diff --git "a/trainer_state.json" "b/trainer_state.json" new file mode 100644--- /dev/null +++ "b/trainer_state.json" @@ -0,0 +1,134472 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 3.0, + "eval_steps": 500, + "global_step": 19206, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0, + "grad_norm": 531.315111998153, + "learning_rate": 3.466204506065858e-08, + "loss": 5.0363, + "step": 1 + }, + { + "epoch": 0.0, + "grad_norm": 292.2964725663925, + "learning_rate": 6.932409012131716e-08, + "loss": 4.6321, + "step": 2 + }, + { + "epoch": 0.0, + "grad_norm": 264.51170744342073, + "learning_rate": 1.0398613518197575e-07, + "loss": 4.7518, + "step": 3 + }, + { + "epoch": 0.0, + "grad_norm": 65.54403336268102, + "learning_rate": 1.386481802426343e-07, + "loss": 4.7551, + "step": 4 + }, + { + "epoch": 0.0, + "grad_norm": 71.62633373356861, + "learning_rate": 1.733102253032929e-07, + "loss": 4.9667, + "step": 5 + }, + { + "epoch": 0.0, + "grad_norm": 93.71744734115397, + "learning_rate": 2.079722703639515e-07, + "loss": 4.4864, + "step": 6 + }, + { + "epoch": 0.0, + "grad_norm": 79.13891063463899, + "learning_rate": 2.426343154246101e-07, + "loss": 4.3112, + "step": 7 + }, + { + "epoch": 0.0, + "grad_norm": 130.9824844691712, + "learning_rate": 2.772963604852686e-07, + "loss": 4.7113, + "step": 8 + }, + { + "epoch": 0.0, + "grad_norm": 62.15746440823066, + "learning_rate": 3.119584055459272e-07, + "loss": 4.5315, + "step": 9 + }, + { + "epoch": 0.0, + "grad_norm": 63.06252314526499, + "learning_rate": 3.466204506065858e-07, + "loss": 4.1675, + "step": 10 + }, + { + "epoch": 0.0, + "grad_norm": 63.181877274700796, + "learning_rate": 3.8128249566724436e-07, + "loss": 4.7039, + "step": 11 + }, + { + "epoch": 0.0, + "grad_norm": 77.56295637293104, + "learning_rate": 4.15944540727903e-07, + "loss": 4.7308, + "step": 12 + }, + { + "epoch": 0.0, + "grad_norm": 90.59912217138084, + "learning_rate": 4.5060658578856156e-07, + "loss": 4.268, + "step": 13 + }, + { + "epoch": 0.0, + "grad_norm": 60.27261208385014, + "learning_rate": 4.852686308492202e-07, + "loss": 4.0223, + "step": 14 + }, + { + "epoch": 0.0, + "grad_norm": 53.092113647032356, + "learning_rate": 5.199306759098788e-07, + "loss": 4.0547, + "step": 15 + }, + { + "epoch": 0.0, + "grad_norm": 194.42885862477203, + "learning_rate": 5.545927209705372e-07, + "loss": 4.1719, + "step": 16 + }, + { + "epoch": 0.0, + "grad_norm": 192.72886528486674, + "learning_rate": 5.89254766031196e-07, + "loss": 3.9482, + "step": 17 + }, + { + "epoch": 0.0, + "grad_norm": 52.923874944802805, + "learning_rate": 6.239168110918544e-07, + "loss": 4.0473, + "step": 18 + }, + { + "epoch": 0.0, + "grad_norm": 243.21030215791566, + "learning_rate": 6.58578856152513e-07, + "loss": 3.7208, + "step": 19 + }, + { + "epoch": 0.0, + "grad_norm": 199.76016694550418, + "learning_rate": 6.932409012131716e-07, + "loss": 3.5057, + "step": 20 + }, + { + "epoch": 0.0, + "grad_norm": 219.9332157291184, + "learning_rate": 7.279029462738301e-07, + "loss": 3.8037, + "step": 21 + }, + { + "epoch": 0.0, + "grad_norm": 224.8460521206362, + "learning_rate": 7.625649913344887e-07, + "loss": 3.6361, + "step": 22 + }, + { + "epoch": 0.0, + "grad_norm": 52.96817437593409, + "learning_rate": 7.972270363951473e-07, + "loss": 3.451, + "step": 23 + }, + { + "epoch": 0.0, + "grad_norm": 176.07245204045535, + "learning_rate": 8.31889081455806e-07, + "loss": 3.1902, + "step": 24 + }, + { + "epoch": 0.0, + "grad_norm": 196.34398136614035, + "learning_rate": 8.665511265164646e-07, + "loss": 2.9131, + "step": 25 + }, + { + "epoch": 0.0, + "grad_norm": 32.43285611524579, + "learning_rate": 9.012131715771231e-07, + "loss": 2.8942, + "step": 26 + }, + { + "epoch": 0.0, + "grad_norm": 130.1069982354433, + "learning_rate": 9.358752166377817e-07, + "loss": 2.521, + "step": 27 + }, + { + "epoch": 0.0, + "grad_norm": 30.298152068413035, + "learning_rate": 9.705372616984403e-07, + "loss": 2.6266, + "step": 28 + }, + { + "epoch": 0.0, + "grad_norm": 23.695189025738316, + "learning_rate": 1.0051993067590988e-06, + "loss": 2.5869, + "step": 29 + }, + { + "epoch": 0.0, + "grad_norm": 28.64249612592944, + "learning_rate": 1.0398613518197575e-06, + "loss": 2.4856, + "step": 30 + }, + { + "epoch": 0.0, + "grad_norm": 23.675691242861983, + "learning_rate": 1.074523396880416e-06, + "loss": 2.4556, + "step": 31 + }, + { + "epoch": 0.0, + "grad_norm": 23.53407161761756, + "learning_rate": 1.1091854419410745e-06, + "loss": 2.318, + "step": 32 + }, + { + "epoch": 0.01, + "grad_norm": 96.18604569702083, + "learning_rate": 1.1438474870017332e-06, + "loss": 2.3074, + "step": 33 + }, + { + "epoch": 0.01, + "grad_norm": 141.04263290664068, + "learning_rate": 1.178509532062392e-06, + "loss": 2.1149, + "step": 34 + }, + { + "epoch": 0.01, + "grad_norm": 24.26582216230617, + "learning_rate": 1.2131715771230504e-06, + "loss": 2.0832, + "step": 35 + }, + { + "epoch": 0.01, + "grad_norm": 28.986545105175942, + "learning_rate": 1.2478336221837089e-06, + "loss": 2.2623, + "step": 36 + }, + { + "epoch": 0.01, + "grad_norm": 80.66247728466575, + "learning_rate": 1.2824956672443676e-06, + "loss": 2.1439, + "step": 37 + }, + { + "epoch": 0.01, + "grad_norm": 21.463273681199404, + "learning_rate": 1.317157712305026e-06, + "loss": 1.9362, + "step": 38 + }, + { + "epoch": 0.01, + "grad_norm": 84.87951823415129, + "learning_rate": 1.3518197573656846e-06, + "loss": 2.054, + "step": 39 + }, + { + "epoch": 0.01, + "grad_norm": 18.529187478472057, + "learning_rate": 1.3864818024263433e-06, + "loss": 2.0543, + "step": 40 + }, + { + "epoch": 0.01, + "grad_norm": 57.58101304987958, + "learning_rate": 1.4211438474870018e-06, + "loss": 1.9546, + "step": 41 + }, + { + "epoch": 0.01, + "grad_norm": 59.388993477434354, + "learning_rate": 1.4558058925476603e-06, + "loss": 2.0146, + "step": 42 + }, + { + "epoch": 0.01, + "grad_norm": 16.39079349038478, + "learning_rate": 1.490467937608319e-06, + "loss": 2.0469, + "step": 43 + }, + { + "epoch": 0.01, + "grad_norm": 17.011224687411563, + "learning_rate": 1.5251299826689774e-06, + "loss": 1.8853, + "step": 44 + }, + { + "epoch": 0.01, + "grad_norm": 57.65040422903312, + "learning_rate": 1.5597920277296362e-06, + "loss": 1.8947, + "step": 45 + }, + { + "epoch": 0.01, + "grad_norm": 18.209375496951544, + "learning_rate": 1.5944540727902946e-06, + "loss": 1.8545, + "step": 46 + }, + { + "epoch": 0.01, + "grad_norm": 44.473361066444234, + "learning_rate": 1.6291161178509536e-06, + "loss": 1.7729, + "step": 47 + }, + { + "epoch": 0.01, + "grad_norm": 59.65041968269149, + "learning_rate": 1.663778162911612e-06, + "loss": 1.9303, + "step": 48 + }, + { + "epoch": 0.01, + "grad_norm": 47.739525868265204, + "learning_rate": 1.6984402079722705e-06, + "loss": 1.8462, + "step": 49 + }, + { + "epoch": 0.01, + "grad_norm": 64.66907700879658, + "learning_rate": 1.7331022530329292e-06, + "loss": 1.8848, + "step": 50 + }, + { + "epoch": 0.01, + "grad_norm": 59.48751044620968, + "learning_rate": 1.7677642980935877e-06, + "loss": 1.8173, + "step": 51 + }, + { + "epoch": 0.01, + "grad_norm": 14.627369497177005, + "learning_rate": 1.8024263431542462e-06, + "loss": 1.7882, + "step": 52 + }, + { + "epoch": 0.01, + "grad_norm": 55.94019378697077, + "learning_rate": 1.837088388214905e-06, + "loss": 1.8156, + "step": 53 + }, + { + "epoch": 0.01, + "grad_norm": 14.79310617919457, + "learning_rate": 1.8717504332755634e-06, + "loss": 1.7695, + "step": 54 + }, + { + "epoch": 0.01, + "grad_norm": 12.858413094829237, + "learning_rate": 1.906412478336222e-06, + "loss": 1.6991, + "step": 55 + }, + { + "epoch": 0.01, + "grad_norm": 12.585260260538933, + "learning_rate": 1.9410745233968806e-06, + "loss": 1.6335, + "step": 56 + }, + { + "epoch": 0.01, + "grad_norm": 65.95190152730467, + "learning_rate": 1.9757365684575393e-06, + "loss": 1.8287, + "step": 57 + }, + { + "epoch": 0.01, + "grad_norm": 28.052890780604574, + "learning_rate": 2.0103986135181976e-06, + "loss": 1.5994, + "step": 58 + }, + { + "epoch": 0.01, + "grad_norm": 52.96283885189271, + "learning_rate": 2.0450606585788563e-06, + "loss": 1.7196, + "step": 59 + }, + { + "epoch": 0.01, + "grad_norm": 56.70262797924363, + "learning_rate": 2.079722703639515e-06, + "loss": 1.7748, + "step": 60 + }, + { + "epoch": 0.01, + "grad_norm": 37.195107480088836, + "learning_rate": 2.1143847487001733e-06, + "loss": 1.6626, + "step": 61 + }, + { + "epoch": 0.01, + "grad_norm": 63.72151010848557, + "learning_rate": 2.149046793760832e-06, + "loss": 1.7699, + "step": 62 + }, + { + "epoch": 0.01, + "grad_norm": 38.40982890234492, + "learning_rate": 2.1837088388214907e-06, + "loss": 1.6679, + "step": 63 + }, + { + "epoch": 0.01, + "grad_norm": 43.77033326258082, + "learning_rate": 2.218370883882149e-06, + "loss": 1.8076, + "step": 64 + }, + { + "epoch": 0.01, + "grad_norm": 39.80828537183788, + "learning_rate": 2.2530329289428077e-06, + "loss": 1.6574, + "step": 65 + }, + { + "epoch": 0.01, + "grad_norm": 39.0141134889743, + "learning_rate": 2.2876949740034664e-06, + "loss": 1.7012, + "step": 66 + }, + { + "epoch": 0.01, + "grad_norm": 8.550052719832058, + "learning_rate": 2.322357019064125e-06, + "loss": 1.495, + "step": 67 + }, + { + "epoch": 0.01, + "grad_norm": 42.31678881948093, + "learning_rate": 2.357019064124784e-06, + "loss": 1.6645, + "step": 68 + }, + { + "epoch": 0.01, + "grad_norm": 44.22241947059129, + "learning_rate": 2.391681109185442e-06, + "loss": 1.6002, + "step": 69 + }, + { + "epoch": 0.01, + "grad_norm": 11.846153629350137, + "learning_rate": 2.4263431542461008e-06, + "loss": 1.7184, + "step": 70 + }, + { + "epoch": 0.01, + "grad_norm": 46.693782384986555, + "learning_rate": 2.4610051993067595e-06, + "loss": 1.5383, + "step": 71 + }, + { + "epoch": 0.01, + "grad_norm": 65.46057084460385, + "learning_rate": 2.4956672443674178e-06, + "loss": 1.5847, + "step": 72 + }, + { + "epoch": 0.01, + "grad_norm": 48.29185609030196, + "learning_rate": 2.530329289428076e-06, + "loss": 1.609, + "step": 73 + }, + { + "epoch": 0.01, + "grad_norm": 23.895269717082964, + "learning_rate": 2.564991334488735e-06, + "loss": 1.4175, + "step": 74 + }, + { + "epoch": 0.01, + "grad_norm": 45.28353952262731, + "learning_rate": 2.599653379549394e-06, + "loss": 1.5396, + "step": 75 + }, + { + "epoch": 0.01, + "grad_norm": 41.48833822367313, + "learning_rate": 2.634315424610052e-06, + "loss": 1.537, + "step": 76 + }, + { + "epoch": 0.01, + "grad_norm": 33.32863144952529, + "learning_rate": 2.668977469670711e-06, + "loss": 1.3886, + "step": 77 + }, + { + "epoch": 0.01, + "grad_norm": 34.59213100046043, + "learning_rate": 2.703639514731369e-06, + "loss": 1.6539, + "step": 78 + }, + { + "epoch": 0.01, + "grad_norm": 31.78870746070221, + "learning_rate": 2.7383015597920283e-06, + "loss": 1.5099, + "step": 79 + }, + { + "epoch": 0.01, + "grad_norm": 32.31520248932208, + "learning_rate": 2.7729636048526865e-06, + "loss": 1.5438, + "step": 80 + }, + { + "epoch": 0.01, + "grad_norm": 42.59625680645732, + "learning_rate": 2.8076256499133452e-06, + "loss": 1.5118, + "step": 81 + }, + { + "epoch": 0.01, + "grad_norm": 56.6037912144551, + "learning_rate": 2.8422876949740035e-06, + "loss": 1.6787, + "step": 82 + }, + { + "epoch": 0.01, + "grad_norm": 35.51296773654188, + "learning_rate": 2.8769497400346622e-06, + "loss": 1.6533, + "step": 83 + }, + { + "epoch": 0.01, + "grad_norm": 65.8743678692378, + "learning_rate": 2.9116117850953205e-06, + "loss": 1.4466, + "step": 84 + }, + { + "epoch": 0.01, + "grad_norm": 7.660212235000567, + "learning_rate": 2.9462738301559796e-06, + "loss": 1.5002, + "step": 85 + }, + { + "epoch": 0.01, + "grad_norm": 8.080726230549912, + "learning_rate": 2.980935875216638e-06, + "loss": 1.5862, + "step": 86 + }, + { + "epoch": 0.01, + "grad_norm": 36.809914237432615, + "learning_rate": 3.0155979202772966e-06, + "loss": 1.4045, + "step": 87 + }, + { + "epoch": 0.01, + "grad_norm": 26.38411280641693, + "learning_rate": 3.050259965337955e-06, + "loss": 1.4151, + "step": 88 + }, + { + "epoch": 0.01, + "grad_norm": 31.09621377509721, + "learning_rate": 3.084922010398614e-06, + "loss": 1.3729, + "step": 89 + }, + { + "epoch": 0.01, + "grad_norm": 56.80399612113042, + "learning_rate": 3.1195840554592723e-06, + "loss": 1.4924, + "step": 90 + }, + { + "epoch": 0.01, + "grad_norm": 33.03162578163297, + "learning_rate": 3.154246100519931e-06, + "loss": 1.2896, + "step": 91 + }, + { + "epoch": 0.01, + "grad_norm": 46.05831648463537, + "learning_rate": 3.1889081455805893e-06, + "loss": 1.5209, + "step": 92 + }, + { + "epoch": 0.01, + "grad_norm": 35.381577279632964, + "learning_rate": 3.223570190641248e-06, + "loss": 1.2987, + "step": 93 + }, + { + "epoch": 0.01, + "grad_norm": 50.67155855969887, + "learning_rate": 3.258232235701907e-06, + "loss": 1.4132, + "step": 94 + }, + { + "epoch": 0.01, + "grad_norm": 27.14601135720065, + "learning_rate": 3.2928942807625654e-06, + "loss": 1.3058, + "step": 95 + }, + { + "epoch": 0.01, + "grad_norm": 52.753150739630186, + "learning_rate": 3.327556325823224e-06, + "loss": 1.5529, + "step": 96 + }, + { + "epoch": 0.02, + "grad_norm": 41.63375309679603, + "learning_rate": 3.3622183708838824e-06, + "loss": 1.3607, + "step": 97 + }, + { + "epoch": 0.02, + "grad_norm": 38.060557282953845, + "learning_rate": 3.396880415944541e-06, + "loss": 1.5151, + "step": 98 + }, + { + "epoch": 0.02, + "grad_norm": 36.341843029383874, + "learning_rate": 3.4315424610051994e-06, + "loss": 1.4832, + "step": 99 + }, + { + "epoch": 0.02, + "grad_norm": 32.71183195182694, + "learning_rate": 3.4662045060658585e-06, + "loss": 1.3898, + "step": 100 + }, + { + "epoch": 0.02, + "grad_norm": 35.364513544716985, + "learning_rate": 3.5008665511265168e-06, + "loss": 1.2867, + "step": 101 + }, + { + "epoch": 0.02, + "grad_norm": 34.96477526522823, + "learning_rate": 3.5355285961871755e-06, + "loss": 1.3614, + "step": 102 + }, + { + "epoch": 0.02, + "grad_norm": 54.31042409667524, + "learning_rate": 3.5701906412478338e-06, + "loss": 1.3225, + "step": 103 + }, + { + "epoch": 0.02, + "grad_norm": 37.162779470946084, + "learning_rate": 3.6048526863084925e-06, + "loss": 1.4249, + "step": 104 + }, + { + "epoch": 0.02, + "grad_norm": 29.29934761904572, + "learning_rate": 3.6395147313691507e-06, + "loss": 1.2458, + "step": 105 + }, + { + "epoch": 0.02, + "grad_norm": 34.47241906103389, + "learning_rate": 3.67417677642981e-06, + "loss": 1.4181, + "step": 106 + }, + { + "epoch": 0.02, + "grad_norm": 42.256401884767726, + "learning_rate": 3.708838821490468e-06, + "loss": 1.4879, + "step": 107 + }, + { + "epoch": 0.02, + "grad_norm": 43.925025114520146, + "learning_rate": 3.743500866551127e-06, + "loss": 1.3717, + "step": 108 + }, + { + "epoch": 0.02, + "grad_norm": 29.95344397624258, + "learning_rate": 3.778162911611785e-06, + "loss": 1.2605, + "step": 109 + }, + { + "epoch": 0.02, + "grad_norm": 44.758083549766795, + "learning_rate": 3.812824956672444e-06, + "loss": 1.4066, + "step": 110 + }, + { + "epoch": 0.02, + "grad_norm": 53.9073924565535, + "learning_rate": 3.8474870017331025e-06, + "loss": 1.3514, + "step": 111 + }, + { + "epoch": 0.02, + "grad_norm": 41.10458204572215, + "learning_rate": 3.882149046793761e-06, + "loss": 1.2649, + "step": 112 + }, + { + "epoch": 0.02, + "grad_norm": 53.05955697604764, + "learning_rate": 3.91681109185442e-06, + "loss": 1.4632, + "step": 113 + }, + { + "epoch": 0.02, + "grad_norm": 10.46621436256183, + "learning_rate": 3.951473136915079e-06, + "loss": 1.4173, + "step": 114 + }, + { + "epoch": 0.02, + "grad_norm": 9.335895509505711, + "learning_rate": 3.986135181975737e-06, + "loss": 1.2472, + "step": 115 + }, + { + "epoch": 0.02, + "grad_norm": 53.528985041276094, + "learning_rate": 4.020797227036395e-06, + "loss": 1.3109, + "step": 116 + }, + { + "epoch": 0.02, + "grad_norm": 44.74688266972805, + "learning_rate": 4.055459272097054e-06, + "loss": 1.5135, + "step": 117 + }, + { + "epoch": 0.02, + "grad_norm": 39.26028178330096, + "learning_rate": 4.090121317157713e-06, + "loss": 1.3563, + "step": 118 + }, + { + "epoch": 0.02, + "grad_norm": 44.316768802456025, + "learning_rate": 4.124783362218371e-06, + "loss": 1.3292, + "step": 119 + }, + { + "epoch": 0.02, + "grad_norm": 29.574670927485485, + "learning_rate": 4.15944540727903e-06, + "loss": 1.1467, + "step": 120 + }, + { + "epoch": 0.02, + "grad_norm": 45.81860769341073, + "learning_rate": 4.194107452339689e-06, + "loss": 1.3843, + "step": 121 + }, + { + "epoch": 0.02, + "grad_norm": 57.43559259894182, + "learning_rate": 4.228769497400347e-06, + "loss": 1.4919, + "step": 122 + }, + { + "epoch": 0.02, + "grad_norm": 5.415087089635171, + "learning_rate": 4.263431542461005e-06, + "loss": 1.1586, + "step": 123 + }, + { + "epoch": 0.02, + "grad_norm": 13.678235459508098, + "learning_rate": 4.298093587521664e-06, + "loss": 1.3752, + "step": 124 + }, + { + "epoch": 0.02, + "grad_norm": 44.934836609796854, + "learning_rate": 4.332755632582323e-06, + "loss": 1.294, + "step": 125 + }, + { + "epoch": 0.02, + "grad_norm": 33.2067769266012, + "learning_rate": 4.367417677642981e-06, + "loss": 1.2223, + "step": 126 + }, + { + "epoch": 0.02, + "grad_norm": 31.158718020945006, + "learning_rate": 4.40207972270364e-06, + "loss": 1.2844, + "step": 127 + }, + { + "epoch": 0.02, + "grad_norm": 33.88473113813146, + "learning_rate": 4.436741767764298e-06, + "loss": 1.3532, + "step": 128 + }, + { + "epoch": 0.02, + "grad_norm": 35.647074162323214, + "learning_rate": 4.471403812824957e-06, + "loss": 1.3752, + "step": 129 + }, + { + "epoch": 0.02, + "grad_norm": 36.23125015937893, + "learning_rate": 4.506065857885615e-06, + "loss": 1.2189, + "step": 130 + }, + { + "epoch": 0.02, + "grad_norm": 41.133569313035245, + "learning_rate": 4.540727902946274e-06, + "loss": 1.4709, + "step": 131 + }, + { + "epoch": 0.02, + "grad_norm": 36.33225915991336, + "learning_rate": 4.575389948006933e-06, + "loss": 1.4186, + "step": 132 + }, + { + "epoch": 0.02, + "grad_norm": 35.414972508619215, + "learning_rate": 4.6100519930675915e-06, + "loss": 1.3723, + "step": 133 + }, + { + "epoch": 0.02, + "grad_norm": 33.35428797151585, + "learning_rate": 4.64471403812825e-06, + "loss": 1.3848, + "step": 134 + }, + { + "epoch": 0.02, + "grad_norm": 39.34666713235231, + "learning_rate": 4.679376083188908e-06, + "loss": 1.3439, + "step": 135 + }, + { + "epoch": 0.02, + "grad_norm": 29.078741925934583, + "learning_rate": 4.714038128249568e-06, + "loss": 1.3974, + "step": 136 + }, + { + "epoch": 0.02, + "grad_norm": 26.90329138232109, + "learning_rate": 4.7487001733102254e-06, + "loss": 1.1737, + "step": 137 + }, + { + "epoch": 0.02, + "grad_norm": 7.292796210039388, + "learning_rate": 4.783362218370884e-06, + "loss": 1.3312, + "step": 138 + }, + { + "epoch": 0.02, + "grad_norm": 37.7018850322623, + "learning_rate": 4.818024263431543e-06, + "loss": 1.2078, + "step": 139 + }, + { + "epoch": 0.02, + "grad_norm": 45.284279681800335, + "learning_rate": 4.8526863084922016e-06, + "loss": 1.2895, + "step": 140 + }, + { + "epoch": 0.02, + "grad_norm": 51.891835734719514, + "learning_rate": 4.88734835355286e-06, + "loss": 1.4174, + "step": 141 + }, + { + "epoch": 0.02, + "grad_norm": 22.47660619461644, + "learning_rate": 4.922010398613519e-06, + "loss": 1.2463, + "step": 142 + }, + { + "epoch": 0.02, + "grad_norm": 29.76010427665964, + "learning_rate": 4.956672443674177e-06, + "loss": 1.381, + "step": 143 + }, + { + "epoch": 0.02, + "grad_norm": 35.70995866748017, + "learning_rate": 4.9913344887348355e-06, + "loss": 1.3176, + "step": 144 + }, + { + "epoch": 0.02, + "grad_norm": 39.20985658428473, + "learning_rate": 5.025996533795494e-06, + "loss": 1.2154, + "step": 145 + }, + { + "epoch": 0.02, + "grad_norm": 39.78268072622544, + "learning_rate": 5.060658578856152e-06, + "loss": 1.3954, + "step": 146 + }, + { + "epoch": 0.02, + "grad_norm": 37.29081364869007, + "learning_rate": 5.095320623916812e-06, + "loss": 1.2138, + "step": 147 + }, + { + "epoch": 0.02, + "grad_norm": 42.82941003268194, + "learning_rate": 5.12998266897747e-06, + "loss": 1.2528, + "step": 148 + }, + { + "epoch": 0.02, + "grad_norm": 29.308513069270575, + "learning_rate": 5.164644714038128e-06, + "loss": 1.3094, + "step": 149 + }, + { + "epoch": 0.02, + "grad_norm": 37.716104024319435, + "learning_rate": 5.199306759098788e-06, + "loss": 1.197, + "step": 150 + }, + { + "epoch": 0.02, + "grad_norm": 44.569028238421126, + "learning_rate": 5.2339688041594464e-06, + "loss": 1.2321, + "step": 151 + }, + { + "epoch": 0.02, + "grad_norm": 30.039305431241836, + "learning_rate": 5.268630849220104e-06, + "loss": 1.225, + "step": 152 + }, + { + "epoch": 0.02, + "grad_norm": 30.957562751900497, + "learning_rate": 5.303292894280763e-06, + "loss": 1.2773, + "step": 153 + }, + { + "epoch": 0.02, + "grad_norm": 28.24003345089031, + "learning_rate": 5.337954939341422e-06, + "loss": 1.1255, + "step": 154 + }, + { + "epoch": 0.02, + "grad_norm": 7.444971755777517, + "learning_rate": 5.37261698440208e-06, + "loss": 1.1667, + "step": 155 + }, + { + "epoch": 0.02, + "grad_norm": 46.517308003223164, + "learning_rate": 5.407279029462738e-06, + "loss": 1.3639, + "step": 156 + }, + { + "epoch": 0.02, + "grad_norm": 44.84407460430838, + "learning_rate": 5.441941074523397e-06, + "loss": 1.3066, + "step": 157 + }, + { + "epoch": 0.02, + "grad_norm": 27.237931197168404, + "learning_rate": 5.4766031195840565e-06, + "loss": 1.2181, + "step": 158 + }, + { + "epoch": 0.02, + "grad_norm": 42.470842864262124, + "learning_rate": 5.511265164644714e-06, + "loss": 1.2999, + "step": 159 + }, + { + "epoch": 0.02, + "grad_norm": 51.05946680302711, + "learning_rate": 5.545927209705373e-06, + "loss": 1.4571, + "step": 160 + }, + { + "epoch": 0.03, + "grad_norm": 22.331443568035873, + "learning_rate": 5.580589254766031e-06, + "loss": 1.17, + "step": 161 + }, + { + "epoch": 0.03, + "grad_norm": 31.660909220397603, + "learning_rate": 5.6152512998266905e-06, + "loss": 1.1699, + "step": 162 + }, + { + "epoch": 0.03, + "grad_norm": 39.83129881241251, + "learning_rate": 5.649913344887349e-06, + "loss": 1.3412, + "step": 163 + }, + { + "epoch": 0.03, + "grad_norm": 41.582122805500425, + "learning_rate": 5.684575389948007e-06, + "loss": 1.2571, + "step": 164 + }, + { + "epoch": 0.03, + "grad_norm": 34.92599689727244, + "learning_rate": 5.719237435008666e-06, + "loss": 1.3948, + "step": 165 + }, + { + "epoch": 0.03, + "grad_norm": 35.588642926181166, + "learning_rate": 5.7538994800693245e-06, + "loss": 1.3808, + "step": 166 + }, + { + "epoch": 0.03, + "grad_norm": 32.375995972310854, + "learning_rate": 5.788561525129983e-06, + "loss": 1.2056, + "step": 167 + }, + { + "epoch": 0.03, + "grad_norm": 7.049289160851054, + "learning_rate": 5.823223570190641e-06, + "loss": 1.1249, + "step": 168 + }, + { + "epoch": 0.03, + "grad_norm": 30.980834700967822, + "learning_rate": 5.8578856152513006e-06, + "loss": 1.2299, + "step": 169 + }, + { + "epoch": 0.03, + "grad_norm": 7.81297797838982, + "learning_rate": 5.892547660311959e-06, + "loss": 1.2217, + "step": 170 + }, + { + "epoch": 0.03, + "grad_norm": 24.62331531360481, + "learning_rate": 5.927209705372617e-06, + "loss": 1.1349, + "step": 171 + }, + { + "epoch": 0.03, + "grad_norm": 38.67789488822055, + "learning_rate": 5.961871750433276e-06, + "loss": 1.1445, + "step": 172 + }, + { + "epoch": 0.03, + "grad_norm": 36.04467601914481, + "learning_rate": 5.996533795493935e-06, + "loss": 1.234, + "step": 173 + }, + { + "epoch": 0.03, + "grad_norm": 39.50498866391282, + "learning_rate": 6.031195840554593e-06, + "loss": 1.3125, + "step": 174 + }, + { + "epoch": 0.03, + "grad_norm": 25.497032026697326, + "learning_rate": 6.065857885615252e-06, + "loss": 1.3066, + "step": 175 + }, + { + "epoch": 0.03, + "grad_norm": 33.757574274360955, + "learning_rate": 6.10051993067591e-06, + "loss": 1.1588, + "step": 176 + }, + { + "epoch": 0.03, + "grad_norm": 37.29514032396307, + "learning_rate": 6.135181975736569e-06, + "loss": 1.2298, + "step": 177 + }, + { + "epoch": 0.03, + "grad_norm": 40.78940463621289, + "learning_rate": 6.169844020797228e-06, + "loss": 1.3352, + "step": 178 + }, + { + "epoch": 0.03, + "grad_norm": 23.524942823101373, + "learning_rate": 6.204506065857886e-06, + "loss": 1.1822, + "step": 179 + }, + { + "epoch": 0.03, + "grad_norm": 36.69774783822274, + "learning_rate": 6.239168110918545e-06, + "loss": 1.2495, + "step": 180 + }, + { + "epoch": 0.03, + "grad_norm": 41.3084844464238, + "learning_rate": 6.273830155979203e-06, + "loss": 1.1862, + "step": 181 + }, + { + "epoch": 0.03, + "grad_norm": 34.43259940959318, + "learning_rate": 6.308492201039862e-06, + "loss": 1.2432, + "step": 182 + }, + { + "epoch": 0.03, + "grad_norm": 38.24744721071966, + "learning_rate": 6.34315424610052e-06, + "loss": 1.1918, + "step": 183 + }, + { + "epoch": 0.03, + "grad_norm": 41.74737424574073, + "learning_rate": 6.377816291161179e-06, + "loss": 1.1328, + "step": 184 + }, + { + "epoch": 0.03, + "grad_norm": 25.256152917656, + "learning_rate": 6.412478336221838e-06, + "loss": 1.1292, + "step": 185 + }, + { + "epoch": 0.03, + "grad_norm": 10.65204682336752, + "learning_rate": 6.447140381282496e-06, + "loss": 1.3824, + "step": 186 + }, + { + "epoch": 0.03, + "grad_norm": 35.39689166268264, + "learning_rate": 6.481802426343155e-06, + "loss": 1.2992, + "step": 187 + }, + { + "epoch": 0.03, + "grad_norm": 43.78390060477098, + "learning_rate": 6.516464471403814e-06, + "loss": 1.3357, + "step": 188 + }, + { + "epoch": 0.03, + "grad_norm": 29.22914734988391, + "learning_rate": 6.551126516464472e-06, + "loss": 1.225, + "step": 189 + }, + { + "epoch": 0.03, + "grad_norm": 23.597944332714018, + "learning_rate": 6.585788561525131e-06, + "loss": 1.2756, + "step": 190 + }, + { + "epoch": 0.03, + "grad_norm": 419.35612647258307, + "learning_rate": 6.620450606585789e-06, + "loss": 1.2129, + "step": 191 + }, + { + "epoch": 0.03, + "grad_norm": 46.917815459962284, + "learning_rate": 6.655112651646448e-06, + "loss": 1.2689, + "step": 192 + }, + { + "epoch": 0.03, + "grad_norm": 31.56025284047614, + "learning_rate": 6.689774696707106e-06, + "loss": 1.1644, + "step": 193 + }, + { + "epoch": 0.03, + "grad_norm": 6.313856039421796, + "learning_rate": 6.724436741767765e-06, + "loss": 1.1856, + "step": 194 + }, + { + "epoch": 0.03, + "grad_norm": 30.530156384407103, + "learning_rate": 6.759098786828423e-06, + "loss": 1.2193, + "step": 195 + }, + { + "epoch": 0.03, + "grad_norm": 36.97730140906862, + "learning_rate": 6.793760831889082e-06, + "loss": 1.177, + "step": 196 + }, + { + "epoch": 0.03, + "grad_norm": 7.392897250545163, + "learning_rate": 6.828422876949741e-06, + "loss": 1.231, + "step": 197 + }, + { + "epoch": 0.03, + "grad_norm": 32.92835037394705, + "learning_rate": 6.863084922010399e-06, + "loss": 1.146, + "step": 198 + }, + { + "epoch": 0.03, + "grad_norm": 19.552219517077233, + "learning_rate": 6.8977469670710574e-06, + "loss": 1.0737, + "step": 199 + }, + { + "epoch": 0.03, + "grad_norm": 26.903193126268295, + "learning_rate": 6.932409012131717e-06, + "loss": 1.1864, + "step": 200 + }, + { + "epoch": 0.03, + "grad_norm": 38.42820331679975, + "learning_rate": 6.967071057192375e-06, + "loss": 1.1963, + "step": 201 + }, + { + "epoch": 0.03, + "grad_norm": 6.780253042602195, + "learning_rate": 7.0017331022530336e-06, + "loss": 1.2246, + "step": 202 + }, + { + "epoch": 0.03, + "grad_norm": 41.88507083311122, + "learning_rate": 7.036395147313691e-06, + "loss": 1.253, + "step": 203 + }, + { + "epoch": 0.03, + "grad_norm": 28.588234394474906, + "learning_rate": 7.071057192374351e-06, + "loss": 1.1283, + "step": 204 + }, + { + "epoch": 0.03, + "grad_norm": 7.2968956254753365, + "learning_rate": 7.10571923743501e-06, + "loss": 1.2647, + "step": 205 + }, + { + "epoch": 0.03, + "grad_norm": 6.7423780177881625, + "learning_rate": 7.1403812824956675e-06, + "loss": 1.1301, + "step": 206 + }, + { + "epoch": 0.03, + "grad_norm": 40.2782497605626, + "learning_rate": 7.175043327556327e-06, + "loss": 1.416, + "step": 207 + }, + { + "epoch": 0.03, + "grad_norm": 32.16224681685655, + "learning_rate": 7.209705372616985e-06, + "loss": 1.1316, + "step": 208 + }, + { + "epoch": 0.03, + "grad_norm": 36.03994387862284, + "learning_rate": 7.244367417677644e-06, + "loss": 1.0575, + "step": 209 + }, + { + "epoch": 0.03, + "grad_norm": 45.38313356254118, + "learning_rate": 7.2790294627383015e-06, + "loss": 1.2639, + "step": 210 + }, + { + "epoch": 0.03, + "grad_norm": 29.74022686936235, + "learning_rate": 7.313691507798961e-06, + "loss": 1.2833, + "step": 211 + }, + { + "epoch": 0.03, + "grad_norm": 36.19193833892053, + "learning_rate": 7.34835355285962e-06, + "loss": 1.271, + "step": 212 + }, + { + "epoch": 0.03, + "grad_norm": 32.62994222002236, + "learning_rate": 7.383015597920278e-06, + "loss": 1.1843, + "step": 213 + }, + { + "epoch": 0.03, + "grad_norm": 36.961412282025826, + "learning_rate": 7.417677642980936e-06, + "loss": 1.1769, + "step": 214 + }, + { + "epoch": 0.03, + "grad_norm": 20.229385803664805, + "learning_rate": 7.452339688041596e-06, + "loss": 1.1415, + "step": 215 + }, + { + "epoch": 0.03, + "grad_norm": 49.63053238151394, + "learning_rate": 7.487001733102254e-06, + "loss": 1.2606, + "step": 216 + }, + { + "epoch": 0.03, + "grad_norm": 36.18979464060009, + "learning_rate": 7.521663778162912e-06, + "loss": 1.0297, + "step": 217 + }, + { + "epoch": 0.03, + "grad_norm": 37.33706410383136, + "learning_rate": 7.55632582322357e-06, + "loss": 1.2468, + "step": 218 + }, + { + "epoch": 0.03, + "grad_norm": 37.466290187188605, + "learning_rate": 7.59098786828423e-06, + "loss": 1.2497, + "step": 219 + }, + { + "epoch": 0.03, + "grad_norm": 31.785716426582937, + "learning_rate": 7.625649913344888e-06, + "loss": 1.148, + "step": 220 + }, + { + "epoch": 0.03, + "grad_norm": 6.186234469992598, + "learning_rate": 7.660311958405546e-06, + "loss": 1.1191, + "step": 221 + }, + { + "epoch": 0.03, + "grad_norm": 21.136424431825276, + "learning_rate": 7.694974003466205e-06, + "loss": 1.1053, + "step": 222 + }, + { + "epoch": 0.03, + "grad_norm": 36.5331661009021, + "learning_rate": 7.729636048526865e-06, + "loss": 1.1752, + "step": 223 + }, + { + "epoch": 0.03, + "grad_norm": 11.825170021241153, + "learning_rate": 7.764298093587522e-06, + "loss": 1.1805, + "step": 224 + }, + { + "epoch": 0.04, + "grad_norm": 49.98371478649682, + "learning_rate": 7.79896013864818e-06, + "loss": 1.2036, + "step": 225 + }, + { + "epoch": 0.04, + "grad_norm": 25.76367878778229, + "learning_rate": 7.83362218370884e-06, + "loss": 1.1512, + "step": 226 + }, + { + "epoch": 0.04, + "grad_norm": 6.343654337053652, + "learning_rate": 7.868284228769498e-06, + "loss": 1.2117, + "step": 227 + }, + { + "epoch": 0.04, + "grad_norm": 33.74923088998774, + "learning_rate": 7.902946273830157e-06, + "loss": 1.1138, + "step": 228 + }, + { + "epoch": 0.04, + "grad_norm": 36.786056645264466, + "learning_rate": 7.937608318890815e-06, + "loss": 1.1355, + "step": 229 + }, + { + "epoch": 0.04, + "grad_norm": 48.2760855668748, + "learning_rate": 7.972270363951475e-06, + "loss": 1.2969, + "step": 230 + }, + { + "epoch": 0.04, + "grad_norm": 30.061946352264094, + "learning_rate": 8.006932409012133e-06, + "loss": 1.2115, + "step": 231 + }, + { + "epoch": 0.04, + "grad_norm": 28.30631954006009, + "learning_rate": 8.04159445407279e-06, + "loss": 1.0828, + "step": 232 + }, + { + "epoch": 0.04, + "grad_norm": 36.30636139112351, + "learning_rate": 8.076256499133448e-06, + "loss": 1.2671, + "step": 233 + }, + { + "epoch": 0.04, + "grad_norm": 25.827200155898385, + "learning_rate": 8.110918544194108e-06, + "loss": 1.2521, + "step": 234 + }, + { + "epoch": 0.04, + "grad_norm": 30.46692480831601, + "learning_rate": 8.145580589254767e-06, + "loss": 1.1863, + "step": 235 + }, + { + "epoch": 0.04, + "grad_norm": 33.95893658100235, + "learning_rate": 8.180242634315425e-06, + "loss": 1.1368, + "step": 236 + }, + { + "epoch": 0.04, + "grad_norm": 6.481549417206551, + "learning_rate": 8.214904679376083e-06, + "loss": 1.2642, + "step": 237 + }, + { + "epoch": 0.04, + "grad_norm": 26.55777034389227, + "learning_rate": 8.249566724436743e-06, + "loss": 1.1898, + "step": 238 + }, + { + "epoch": 0.04, + "grad_norm": 31.537696253165546, + "learning_rate": 8.2842287694974e-06, + "loss": 1.1685, + "step": 239 + }, + { + "epoch": 0.04, + "grad_norm": 40.98763564530311, + "learning_rate": 8.31889081455806e-06, + "loss": 1.1621, + "step": 240 + }, + { + "epoch": 0.04, + "grad_norm": 22.768020223733394, + "learning_rate": 8.353552859618718e-06, + "loss": 1.0423, + "step": 241 + }, + { + "epoch": 0.04, + "grad_norm": 30.991154264281352, + "learning_rate": 8.388214904679377e-06, + "loss": 1.1565, + "step": 242 + }, + { + "epoch": 0.04, + "grad_norm": 35.33399157297096, + "learning_rate": 8.422876949740035e-06, + "loss": 1.1692, + "step": 243 + }, + { + "epoch": 0.04, + "grad_norm": 30.435295857435555, + "learning_rate": 8.457538994800693e-06, + "loss": 1.0559, + "step": 244 + }, + { + "epoch": 0.04, + "grad_norm": 26.817123491290523, + "learning_rate": 8.492201039861353e-06, + "loss": 1.1404, + "step": 245 + }, + { + "epoch": 0.04, + "grad_norm": 35.386770881490634, + "learning_rate": 8.52686308492201e-06, + "loss": 1.2227, + "step": 246 + }, + { + "epoch": 0.04, + "grad_norm": 45.657356716165744, + "learning_rate": 8.56152512998267e-06, + "loss": 1.2336, + "step": 247 + }, + { + "epoch": 0.04, + "grad_norm": 8.792431903517928, + "learning_rate": 8.596187175043328e-06, + "loss": 1.1526, + "step": 248 + }, + { + "epoch": 0.04, + "grad_norm": 43.45146147895134, + "learning_rate": 8.630849220103988e-06, + "loss": 1.3344, + "step": 249 + }, + { + "epoch": 0.04, + "grad_norm": 40.00593644734271, + "learning_rate": 8.665511265164645e-06, + "loss": 1.3203, + "step": 250 + }, + { + "epoch": 0.04, + "grad_norm": 27.483074029490002, + "learning_rate": 8.700173310225303e-06, + "loss": 1.1013, + "step": 251 + }, + { + "epoch": 0.04, + "grad_norm": 32.86979396650612, + "learning_rate": 8.734835355285963e-06, + "loss": 1.2232, + "step": 252 + }, + { + "epoch": 0.04, + "grad_norm": 61.6369277483482, + "learning_rate": 8.769497400346622e-06, + "loss": 1.3066, + "step": 253 + }, + { + "epoch": 0.04, + "grad_norm": 32.427511507502366, + "learning_rate": 8.80415944540728e-06, + "loss": 1.0331, + "step": 254 + }, + { + "epoch": 0.04, + "grad_norm": 32.41941191543784, + "learning_rate": 8.838821490467938e-06, + "loss": 1.193, + "step": 255 + }, + { + "epoch": 0.04, + "grad_norm": 32.89093912658737, + "learning_rate": 8.873483535528596e-06, + "loss": 1.188, + "step": 256 + }, + { + "epoch": 0.04, + "grad_norm": 6.7963108025890575, + "learning_rate": 8.908145580589255e-06, + "loss": 1.0219, + "step": 257 + }, + { + "epoch": 0.04, + "grad_norm": 39.48217111225304, + "learning_rate": 8.942807625649913e-06, + "loss": 1.287, + "step": 258 + }, + { + "epoch": 0.04, + "grad_norm": 17.591255719236734, + "learning_rate": 8.977469670710573e-06, + "loss": 0.993, + "step": 259 + }, + { + "epoch": 0.04, + "grad_norm": 30.271428492673127, + "learning_rate": 9.01213171577123e-06, + "loss": 1.0541, + "step": 260 + }, + { + "epoch": 0.04, + "grad_norm": 20.960546409549913, + "learning_rate": 9.04679376083189e-06, + "loss": 1.114, + "step": 261 + }, + { + "epoch": 0.04, + "grad_norm": 23.37091576688421, + "learning_rate": 9.081455805892548e-06, + "loss": 1.0278, + "step": 262 + }, + { + "epoch": 0.04, + "grad_norm": 32.44645666671875, + "learning_rate": 9.116117850953206e-06, + "loss": 1.0882, + "step": 263 + }, + { + "epoch": 0.04, + "grad_norm": 29.394409995098258, + "learning_rate": 9.150779896013866e-06, + "loss": 1.098, + "step": 264 + }, + { + "epoch": 0.04, + "grad_norm": 32.84000688314728, + "learning_rate": 9.185441941074525e-06, + "loss": 1.1333, + "step": 265 + }, + { + "epoch": 0.04, + "grad_norm": 31.90842895091659, + "learning_rate": 9.220103986135183e-06, + "loss": 1.1737, + "step": 266 + }, + { + "epoch": 0.04, + "grad_norm": 29.000285479151675, + "learning_rate": 9.25476603119584e-06, + "loss": 1.1174, + "step": 267 + }, + { + "epoch": 0.04, + "grad_norm": 39.56155290117829, + "learning_rate": 9.2894280762565e-06, + "loss": 1.1217, + "step": 268 + }, + { + "epoch": 0.04, + "grad_norm": 133.62511300634418, + "learning_rate": 9.324090121317158e-06, + "loss": 1.2347, + "step": 269 + }, + { + "epoch": 0.04, + "grad_norm": 28.648703538269523, + "learning_rate": 9.358752166377816e-06, + "loss": 1.2441, + "step": 270 + }, + { + "epoch": 0.04, + "grad_norm": 26.332707601116592, + "learning_rate": 9.393414211438476e-06, + "loss": 1.1396, + "step": 271 + }, + { + "epoch": 0.04, + "grad_norm": 22.044852545885977, + "learning_rate": 9.428076256499135e-06, + "loss": 1.2006, + "step": 272 + }, + { + "epoch": 0.04, + "grad_norm": 6.57984200583586, + "learning_rate": 9.462738301559793e-06, + "loss": 1.1745, + "step": 273 + }, + { + "epoch": 0.04, + "grad_norm": 37.63920262944199, + "learning_rate": 9.497400346620451e-06, + "loss": 1.1589, + "step": 274 + }, + { + "epoch": 0.04, + "grad_norm": 20.540283066738453, + "learning_rate": 9.532062391681109e-06, + "loss": 0.986, + "step": 275 + }, + { + "epoch": 0.04, + "grad_norm": 39.58208785113726, + "learning_rate": 9.566724436741768e-06, + "loss": 1.2153, + "step": 276 + }, + { + "epoch": 0.04, + "grad_norm": 7.028840029223187, + "learning_rate": 9.601386481802428e-06, + "loss": 1.179, + "step": 277 + }, + { + "epoch": 0.04, + "grad_norm": 27.1549390118637, + "learning_rate": 9.636048526863086e-06, + "loss": 1.1244, + "step": 278 + }, + { + "epoch": 0.04, + "grad_norm": 36.940692187597406, + "learning_rate": 9.670710571923744e-06, + "loss": 1.3229, + "step": 279 + }, + { + "epoch": 0.04, + "grad_norm": 39.666642069642, + "learning_rate": 9.705372616984403e-06, + "loss": 1.1767, + "step": 280 + }, + { + "epoch": 0.04, + "grad_norm": 47.50090416624944, + "learning_rate": 9.740034662045061e-06, + "loss": 1.2691, + "step": 281 + }, + { + "epoch": 0.04, + "grad_norm": 19.807914091835052, + "learning_rate": 9.77469670710572e-06, + "loss": 1.0357, + "step": 282 + }, + { + "epoch": 0.04, + "grad_norm": 22.087099286974343, + "learning_rate": 9.809358752166378e-06, + "loss": 1.208, + "step": 283 + }, + { + "epoch": 0.04, + "grad_norm": 36.14497004182539, + "learning_rate": 9.844020797227038e-06, + "loss": 1.1656, + "step": 284 + }, + { + "epoch": 0.04, + "grad_norm": 30.562239852531384, + "learning_rate": 9.878682842287696e-06, + "loss": 1.2537, + "step": 285 + }, + { + "epoch": 0.04, + "grad_norm": 28.507637794902266, + "learning_rate": 9.913344887348354e-06, + "loss": 1.0488, + "step": 286 + }, + { + "epoch": 0.04, + "grad_norm": 30.378105736734124, + "learning_rate": 9.948006932409013e-06, + "loss": 1.222, + "step": 287 + }, + { + "epoch": 0.04, + "grad_norm": 33.37978941506294, + "learning_rate": 9.982668977469671e-06, + "loss": 1.1733, + "step": 288 + }, + { + "epoch": 0.05, + "grad_norm": 32.412566171836765, + "learning_rate": 1.001733102253033e-05, + "loss": 1.2059, + "step": 289 + }, + { + "epoch": 0.05, + "grad_norm": 28.658334281858885, + "learning_rate": 1.0051993067590988e-05, + "loss": 1.1613, + "step": 290 + }, + { + "epoch": 0.05, + "grad_norm": 34.339859763827214, + "learning_rate": 1.0086655112651646e-05, + "loss": 1.2065, + "step": 291 + }, + { + "epoch": 0.05, + "grad_norm": 20.60806040298398, + "learning_rate": 1.0121317157712304e-05, + "loss": 1.1927, + "step": 292 + }, + { + "epoch": 0.05, + "grad_norm": 28.020750138656787, + "learning_rate": 1.0155979202772965e-05, + "loss": 1.1303, + "step": 293 + }, + { + "epoch": 0.05, + "grad_norm": 32.55648462321204, + "learning_rate": 1.0190641247833623e-05, + "loss": 1.2168, + "step": 294 + }, + { + "epoch": 0.05, + "grad_norm": 28.584010805483356, + "learning_rate": 1.0225303292894281e-05, + "loss": 1.1082, + "step": 295 + }, + { + "epoch": 0.05, + "grad_norm": 27.66448217384437, + "learning_rate": 1.025996533795494e-05, + "loss": 1.0592, + "step": 296 + }, + { + "epoch": 0.05, + "grad_norm": 33.284408458864455, + "learning_rate": 1.0294627383015599e-05, + "loss": 1.0929, + "step": 297 + }, + { + "epoch": 0.05, + "grad_norm": 31.98890294225414, + "learning_rate": 1.0329289428076256e-05, + "loss": 1.169, + "step": 298 + }, + { + "epoch": 0.05, + "grad_norm": 30.00410883601317, + "learning_rate": 1.0363951473136914e-05, + "loss": 1.058, + "step": 299 + }, + { + "epoch": 0.05, + "grad_norm": 27.910588839190368, + "learning_rate": 1.0398613518197575e-05, + "loss": 1.1519, + "step": 300 + }, + { + "epoch": 0.05, + "grad_norm": 36.227928446836465, + "learning_rate": 1.0433275563258233e-05, + "loss": 1.0383, + "step": 301 + }, + { + "epoch": 0.05, + "grad_norm": 26.57893303187256, + "learning_rate": 1.0467937608318893e-05, + "loss": 1.2146, + "step": 302 + }, + { + "epoch": 0.05, + "grad_norm": 26.806770479481848, + "learning_rate": 1.050259965337955e-05, + "loss": 1.1068, + "step": 303 + }, + { + "epoch": 0.05, + "grad_norm": 26.19032893913023, + "learning_rate": 1.0537261698440209e-05, + "loss": 1.0901, + "step": 304 + }, + { + "epoch": 0.05, + "grad_norm": 28.416192711982273, + "learning_rate": 1.0571923743500866e-05, + "loss": 1.0974, + "step": 305 + }, + { + "epoch": 0.05, + "grad_norm": 23.870990538838257, + "learning_rate": 1.0606585788561526e-05, + "loss": 1.0428, + "step": 306 + }, + { + "epoch": 0.05, + "grad_norm": 26.31028593040287, + "learning_rate": 1.0641247833622184e-05, + "loss": 1.0996, + "step": 307 + }, + { + "epoch": 0.05, + "grad_norm": 20.12638582913621, + "learning_rate": 1.0675909878682843e-05, + "loss": 1.0697, + "step": 308 + }, + { + "epoch": 0.05, + "grad_norm": 26.229273797125238, + "learning_rate": 1.0710571923743503e-05, + "loss": 1.1433, + "step": 309 + }, + { + "epoch": 0.05, + "grad_norm": 26.102329681320533, + "learning_rate": 1.074523396880416e-05, + "loss": 1.105, + "step": 310 + }, + { + "epoch": 0.05, + "grad_norm": 26.304508250537534, + "learning_rate": 1.0779896013864819e-05, + "loss": 1.1613, + "step": 311 + }, + { + "epoch": 0.05, + "grad_norm": 45.92040450216617, + "learning_rate": 1.0814558058925477e-05, + "loss": 1.1904, + "step": 312 + }, + { + "epoch": 0.05, + "grad_norm": 26.0916480782526, + "learning_rate": 1.0849220103986136e-05, + "loss": 1.0855, + "step": 313 + }, + { + "epoch": 0.05, + "grad_norm": 43.67952947804621, + "learning_rate": 1.0883882149046794e-05, + "loss": 1.3718, + "step": 314 + }, + { + "epoch": 0.05, + "grad_norm": 21.554504791956532, + "learning_rate": 1.0918544194107452e-05, + "loss": 0.9619, + "step": 315 + }, + { + "epoch": 0.05, + "grad_norm": 22.608862729388665, + "learning_rate": 1.0953206239168113e-05, + "loss": 1.0781, + "step": 316 + }, + { + "epoch": 0.05, + "grad_norm": 41.32110408068733, + "learning_rate": 1.0987868284228771e-05, + "loss": 1.1264, + "step": 317 + }, + { + "epoch": 0.05, + "grad_norm": 30.537403696445477, + "learning_rate": 1.1022530329289429e-05, + "loss": 1.1479, + "step": 318 + }, + { + "epoch": 0.05, + "grad_norm": 22.88435630109961, + "learning_rate": 1.1057192374350088e-05, + "loss": 1.1415, + "step": 319 + }, + { + "epoch": 0.05, + "grad_norm": 34.390235825883956, + "learning_rate": 1.1091854419410746e-05, + "loss": 1.0666, + "step": 320 + }, + { + "epoch": 0.05, + "grad_norm": 19.016075798543312, + "learning_rate": 1.1126516464471404e-05, + "loss": 1.0004, + "step": 321 + }, + { + "epoch": 0.05, + "grad_norm": 30.738052474932843, + "learning_rate": 1.1161178509532062e-05, + "loss": 1.0154, + "step": 322 + }, + { + "epoch": 0.05, + "grad_norm": 38.4046694746161, + "learning_rate": 1.1195840554592723e-05, + "loss": 1.2249, + "step": 323 + }, + { + "epoch": 0.05, + "grad_norm": 5.28829747954234, + "learning_rate": 1.1230502599653381e-05, + "loss": 1.1102, + "step": 324 + }, + { + "epoch": 0.05, + "grad_norm": 23.448439893190514, + "learning_rate": 1.1265164644714039e-05, + "loss": 1.0582, + "step": 325 + }, + { + "epoch": 0.05, + "grad_norm": 23.83991091682964, + "learning_rate": 1.1299826689774698e-05, + "loss": 1.1886, + "step": 326 + }, + { + "epoch": 0.05, + "grad_norm": 36.654051427770966, + "learning_rate": 1.1334488734835356e-05, + "loss": 1.1055, + "step": 327 + }, + { + "epoch": 0.05, + "grad_norm": 26.0320110581874, + "learning_rate": 1.1369150779896014e-05, + "loss": 0.9618, + "step": 328 + }, + { + "epoch": 0.05, + "grad_norm": 33.79684069416141, + "learning_rate": 1.1403812824956672e-05, + "loss": 1.2244, + "step": 329 + }, + { + "epoch": 0.05, + "grad_norm": 27.228386863544692, + "learning_rate": 1.1438474870017332e-05, + "loss": 1.077, + "step": 330 + }, + { + "epoch": 0.05, + "grad_norm": 31.836119884132362, + "learning_rate": 1.1473136915077991e-05, + "loss": 1.0394, + "step": 331 + }, + { + "epoch": 0.05, + "grad_norm": 25.110325038671736, + "learning_rate": 1.1507798960138649e-05, + "loss": 1.2516, + "step": 332 + }, + { + "epoch": 0.05, + "grad_norm": 34.72559738791931, + "learning_rate": 1.1542461005199308e-05, + "loss": 1.1126, + "step": 333 + }, + { + "epoch": 0.05, + "grad_norm": 29.478969825496325, + "learning_rate": 1.1577123050259966e-05, + "loss": 0.9767, + "step": 334 + }, + { + "epoch": 0.05, + "grad_norm": 20.48757251280962, + "learning_rate": 1.1611785095320624e-05, + "loss": 1.2302, + "step": 335 + }, + { + "epoch": 0.05, + "grad_norm": 43.73903702776825, + "learning_rate": 1.1646447140381282e-05, + "loss": 1.153, + "step": 336 + }, + { + "epoch": 0.05, + "grad_norm": 36.286243693056086, + "learning_rate": 1.1681109185441942e-05, + "loss": 1.1822, + "step": 337 + }, + { + "epoch": 0.05, + "grad_norm": 23.34518100764557, + "learning_rate": 1.1715771230502601e-05, + "loss": 1.0814, + "step": 338 + }, + { + "epoch": 0.05, + "grad_norm": 25.58957807542444, + "learning_rate": 1.175043327556326e-05, + "loss": 1.1042, + "step": 339 + }, + { + "epoch": 0.05, + "grad_norm": 20.286081019374777, + "learning_rate": 1.1785095320623919e-05, + "loss": 1.0859, + "step": 340 + }, + { + "epoch": 0.05, + "grad_norm": 32.51010524518503, + "learning_rate": 1.1819757365684576e-05, + "loss": 1.0435, + "step": 341 + }, + { + "epoch": 0.05, + "grad_norm": 23.181531261302148, + "learning_rate": 1.1854419410745234e-05, + "loss": 1.107, + "step": 342 + }, + { + "epoch": 0.05, + "grad_norm": 35.47835515156875, + "learning_rate": 1.1889081455805894e-05, + "loss": 1.0314, + "step": 343 + }, + { + "epoch": 0.05, + "grad_norm": 24.60361534656248, + "learning_rate": 1.1923743500866552e-05, + "loss": 1.0581, + "step": 344 + }, + { + "epoch": 0.05, + "grad_norm": 29.603447091746283, + "learning_rate": 1.195840554592721e-05, + "loss": 1.0639, + "step": 345 + }, + { + "epoch": 0.05, + "grad_norm": 42.005933401381604, + "learning_rate": 1.199306759098787e-05, + "loss": 1.119, + "step": 346 + }, + { + "epoch": 0.05, + "grad_norm": 31.325769802994927, + "learning_rate": 1.2027729636048529e-05, + "loss": 1.0979, + "step": 347 + }, + { + "epoch": 0.05, + "grad_norm": 46.88281582331888, + "learning_rate": 1.2062391681109186e-05, + "loss": 1.1392, + "step": 348 + }, + { + "epoch": 0.05, + "grad_norm": 31.996595149395624, + "learning_rate": 1.2097053726169844e-05, + "loss": 1.0662, + "step": 349 + }, + { + "epoch": 0.05, + "grad_norm": 32.306233032599955, + "learning_rate": 1.2131715771230504e-05, + "loss": 1.0391, + "step": 350 + }, + { + "epoch": 0.05, + "grad_norm": 33.385801572063706, + "learning_rate": 1.2166377816291162e-05, + "loss": 1.2242, + "step": 351 + }, + { + "epoch": 0.05, + "grad_norm": 22.43179488371909, + "learning_rate": 1.220103986135182e-05, + "loss": 1.1773, + "step": 352 + }, + { + "epoch": 0.06, + "grad_norm": 33.50602676407605, + "learning_rate": 1.2235701906412477e-05, + "loss": 1.0898, + "step": 353 + }, + { + "epoch": 0.06, + "grad_norm": 19.631638215145294, + "learning_rate": 1.2270363951473139e-05, + "loss": 1.0632, + "step": 354 + }, + { + "epoch": 0.06, + "grad_norm": 25.926041056193505, + "learning_rate": 1.2305025996533797e-05, + "loss": 0.9388, + "step": 355 + }, + { + "epoch": 0.06, + "grad_norm": 26.932012060913973, + "learning_rate": 1.2339688041594456e-05, + "loss": 1.2701, + "step": 356 + }, + { + "epoch": 0.06, + "grad_norm": 26.65525816696688, + "learning_rate": 1.2374350086655114e-05, + "loss": 0.9718, + "step": 357 + }, + { + "epoch": 0.06, + "grad_norm": 22.096018642350085, + "learning_rate": 1.2409012131715772e-05, + "loss": 0.9923, + "step": 358 + }, + { + "epoch": 0.06, + "grad_norm": 23.929265322745625, + "learning_rate": 1.244367417677643e-05, + "loss": 1.1121, + "step": 359 + }, + { + "epoch": 0.06, + "grad_norm": 28.227952983086723, + "learning_rate": 1.247833622183709e-05, + "loss": 1.1241, + "step": 360 + }, + { + "epoch": 0.06, + "grad_norm": 21.766410493745894, + "learning_rate": 1.2512998266897749e-05, + "loss": 1.1074, + "step": 361 + }, + { + "epoch": 0.06, + "grad_norm": 26.994232906235982, + "learning_rate": 1.2547660311958407e-05, + "loss": 1.1329, + "step": 362 + }, + { + "epoch": 0.06, + "grad_norm": 28.510957715316916, + "learning_rate": 1.2582322357019066e-05, + "loss": 1.0699, + "step": 363 + }, + { + "epoch": 0.06, + "grad_norm": 24.935369772262348, + "learning_rate": 1.2616984402079724e-05, + "loss": 1.1374, + "step": 364 + }, + { + "epoch": 0.06, + "grad_norm": 36.271485986247896, + "learning_rate": 1.2651646447140382e-05, + "loss": 1.0808, + "step": 365 + }, + { + "epoch": 0.06, + "grad_norm": 28.99020265257391, + "learning_rate": 1.268630849220104e-05, + "loss": 1.0781, + "step": 366 + }, + { + "epoch": 0.06, + "grad_norm": 37.36522216302166, + "learning_rate": 1.27209705372617e-05, + "loss": 1.0592, + "step": 367 + }, + { + "epoch": 0.06, + "grad_norm": 31.96383324342438, + "learning_rate": 1.2755632582322357e-05, + "loss": 0.9815, + "step": 368 + }, + { + "epoch": 0.06, + "grad_norm": 33.51334153868519, + "learning_rate": 1.2790294627383017e-05, + "loss": 1.1268, + "step": 369 + }, + { + "epoch": 0.06, + "grad_norm": 5.693184440634166, + "learning_rate": 1.2824956672443676e-05, + "loss": 1.0338, + "step": 370 + }, + { + "epoch": 0.06, + "grad_norm": 30.48413772334245, + "learning_rate": 1.2859618717504334e-05, + "loss": 1.2143, + "step": 371 + }, + { + "epoch": 0.06, + "grad_norm": 26.667324067868737, + "learning_rate": 1.2894280762564992e-05, + "loss": 1.0773, + "step": 372 + }, + { + "epoch": 0.06, + "grad_norm": 32.613217908327364, + "learning_rate": 1.2928942807625652e-05, + "loss": 1.0994, + "step": 373 + }, + { + "epoch": 0.06, + "grad_norm": 25.86133089764893, + "learning_rate": 1.296360485268631e-05, + "loss": 1.0735, + "step": 374 + }, + { + "epoch": 0.06, + "grad_norm": 31.189272337112506, + "learning_rate": 1.2998266897746967e-05, + "loss": 1.1056, + "step": 375 + }, + { + "epoch": 0.06, + "grad_norm": 19.21329440551253, + "learning_rate": 1.3032928942807628e-05, + "loss": 1.0915, + "step": 376 + }, + { + "epoch": 0.06, + "grad_norm": 29.346472574260808, + "learning_rate": 1.3067590987868286e-05, + "loss": 1.1536, + "step": 377 + }, + { + "epoch": 0.06, + "grad_norm": 45.79309598200069, + "learning_rate": 1.3102253032928944e-05, + "loss": 1.1109, + "step": 378 + }, + { + "epoch": 0.06, + "grad_norm": 29.733980734491936, + "learning_rate": 1.3136915077989602e-05, + "loss": 1.0806, + "step": 379 + }, + { + "epoch": 0.06, + "grad_norm": 26.973894157979053, + "learning_rate": 1.3171577123050262e-05, + "loss": 1.093, + "step": 380 + }, + { + "epoch": 0.06, + "grad_norm": 27.54901906636627, + "learning_rate": 1.320623916811092e-05, + "loss": 1.0221, + "step": 381 + }, + { + "epoch": 0.06, + "grad_norm": 22.908550257544434, + "learning_rate": 1.3240901213171577e-05, + "loss": 1.0472, + "step": 382 + }, + { + "epoch": 0.06, + "grad_norm": 27.727359550673427, + "learning_rate": 1.3275563258232235e-05, + "loss": 1.0048, + "step": 383 + }, + { + "epoch": 0.06, + "grad_norm": 29.224659504713923, + "learning_rate": 1.3310225303292896e-05, + "loss": 1.1251, + "step": 384 + }, + { + "epoch": 0.06, + "grad_norm": 37.47984058913266, + "learning_rate": 1.3344887348353554e-05, + "loss": 1.0694, + "step": 385 + }, + { + "epoch": 0.06, + "grad_norm": 34.247752152418364, + "learning_rate": 1.3379549393414212e-05, + "loss": 1.2244, + "step": 386 + }, + { + "epoch": 0.06, + "grad_norm": 26.877945774559905, + "learning_rate": 1.3414211438474872e-05, + "loss": 1.0694, + "step": 387 + }, + { + "epoch": 0.06, + "grad_norm": 27.140336946003078, + "learning_rate": 1.344887348353553e-05, + "loss": 1.2015, + "step": 388 + }, + { + "epoch": 0.06, + "grad_norm": 27.9027450262441, + "learning_rate": 1.3483535528596187e-05, + "loss": 1.086, + "step": 389 + }, + { + "epoch": 0.06, + "grad_norm": 27.171382283179494, + "learning_rate": 1.3518197573656845e-05, + "loss": 1.1288, + "step": 390 + }, + { + "epoch": 0.06, + "grad_norm": 21.796142295773222, + "learning_rate": 1.3552859618717506e-05, + "loss": 1.1133, + "step": 391 + }, + { + "epoch": 0.06, + "grad_norm": 37.9444241544439, + "learning_rate": 1.3587521663778164e-05, + "loss": 1.1072, + "step": 392 + }, + { + "epoch": 0.06, + "grad_norm": 28.169641143832457, + "learning_rate": 1.3622183708838824e-05, + "loss": 1.0657, + "step": 393 + }, + { + "epoch": 0.06, + "grad_norm": 34.603979212792716, + "learning_rate": 1.3656845753899482e-05, + "loss": 1.2969, + "step": 394 + }, + { + "epoch": 0.06, + "grad_norm": 32.377532200853615, + "learning_rate": 1.369150779896014e-05, + "loss": 1.0666, + "step": 395 + }, + { + "epoch": 0.06, + "grad_norm": 17.266813074204492, + "learning_rate": 1.3726169844020797e-05, + "loss": 1.0658, + "step": 396 + }, + { + "epoch": 0.06, + "grad_norm": 34.32559842190941, + "learning_rate": 1.3760831889081457e-05, + "loss": 1.0985, + "step": 397 + }, + { + "epoch": 0.06, + "grad_norm": 34.371715400867, + "learning_rate": 1.3795493934142115e-05, + "loss": 1.3105, + "step": 398 + }, + { + "epoch": 0.06, + "grad_norm": 41.63190000424927, + "learning_rate": 1.3830155979202774e-05, + "loss": 1.0433, + "step": 399 + }, + { + "epoch": 0.06, + "grad_norm": 27.90569959847415, + "learning_rate": 1.3864818024263434e-05, + "loss": 1.0908, + "step": 400 + }, + { + "epoch": 0.06, + "grad_norm": 21.6998805764918, + "learning_rate": 1.3899480069324092e-05, + "loss": 1.0031, + "step": 401 + }, + { + "epoch": 0.06, + "grad_norm": 30.075689023941184, + "learning_rate": 1.393414211438475e-05, + "loss": 1.0568, + "step": 402 + }, + { + "epoch": 0.06, + "grad_norm": 37.3133373278367, + "learning_rate": 1.3968804159445408e-05, + "loss": 1.1699, + "step": 403 + }, + { + "epoch": 0.06, + "grad_norm": 27.4763675646989, + "learning_rate": 1.4003466204506067e-05, + "loss": 1.2245, + "step": 404 + }, + { + "epoch": 0.06, + "grad_norm": 31.103725701166116, + "learning_rate": 1.4038128249566725e-05, + "loss": 1.0939, + "step": 405 + }, + { + "epoch": 0.06, + "grad_norm": 16.757187190093457, + "learning_rate": 1.4072790294627383e-05, + "loss": 1.0749, + "step": 406 + }, + { + "epoch": 0.06, + "grad_norm": 22.835036799977743, + "learning_rate": 1.4107452339688044e-05, + "loss": 1.031, + "step": 407 + }, + { + "epoch": 0.06, + "grad_norm": 42.772867923933184, + "learning_rate": 1.4142114384748702e-05, + "loss": 1.1827, + "step": 408 + }, + { + "epoch": 0.06, + "grad_norm": 37.04203269843442, + "learning_rate": 1.417677642980936e-05, + "loss": 1.2603, + "step": 409 + }, + { + "epoch": 0.06, + "grad_norm": 27.941152088039125, + "learning_rate": 1.421143847487002e-05, + "loss": 1.0402, + "step": 410 + }, + { + "epoch": 0.06, + "grad_norm": 30.012488225595373, + "learning_rate": 1.4246100519930677e-05, + "loss": 1.0436, + "step": 411 + }, + { + "epoch": 0.06, + "grad_norm": 34.68960934660663, + "learning_rate": 1.4280762564991335e-05, + "loss": 1.0056, + "step": 412 + }, + { + "epoch": 0.06, + "grad_norm": 23.378956623070927, + "learning_rate": 1.4315424610051993e-05, + "loss": 1.1198, + "step": 413 + }, + { + "epoch": 0.06, + "grad_norm": 28.51518555812069, + "learning_rate": 1.4350086655112654e-05, + "loss": 1.1016, + "step": 414 + }, + { + "epoch": 0.06, + "grad_norm": 20.430786335206683, + "learning_rate": 1.4384748700173312e-05, + "loss": 1.0082, + "step": 415 + }, + { + "epoch": 0.06, + "grad_norm": 26.425432187951778, + "learning_rate": 1.441941074523397e-05, + "loss": 1.0608, + "step": 416 + }, + { + "epoch": 0.07, + "grad_norm": 20.56158873892592, + "learning_rate": 1.445407279029463e-05, + "loss": 1.0575, + "step": 417 + }, + { + "epoch": 0.07, + "grad_norm": 5.993745699464292, + "learning_rate": 1.4488734835355287e-05, + "loss": 1.1353, + "step": 418 + }, + { + "epoch": 0.07, + "grad_norm": 33.698564188109685, + "learning_rate": 1.4523396880415945e-05, + "loss": 1.1354, + "step": 419 + }, + { + "epoch": 0.07, + "grad_norm": 13.738939505418845, + "learning_rate": 1.4558058925476603e-05, + "loss": 1.0725, + "step": 420 + }, + { + "epoch": 0.07, + "grad_norm": 30.872998443969035, + "learning_rate": 1.4592720970537263e-05, + "loss": 1.1156, + "step": 421 + }, + { + "epoch": 0.07, + "grad_norm": 18.35365116093291, + "learning_rate": 1.4627383015597922e-05, + "loss": 1.0658, + "step": 422 + }, + { + "epoch": 0.07, + "grad_norm": 22.853792234356817, + "learning_rate": 1.466204506065858e-05, + "loss": 1.0421, + "step": 423 + }, + { + "epoch": 0.07, + "grad_norm": 32.3637560867717, + "learning_rate": 1.469670710571924e-05, + "loss": 1.13, + "step": 424 + }, + { + "epoch": 0.07, + "grad_norm": 24.375143306873333, + "learning_rate": 1.4731369150779897e-05, + "loss": 1.1187, + "step": 425 + }, + { + "epoch": 0.07, + "grad_norm": 24.315598036728183, + "learning_rate": 1.4766031195840555e-05, + "loss": 1.0749, + "step": 426 + }, + { + "epoch": 0.07, + "grad_norm": 31.578187560218176, + "learning_rate": 1.4800693240901213e-05, + "loss": 1.0663, + "step": 427 + }, + { + "epoch": 0.07, + "grad_norm": 29.277862572733177, + "learning_rate": 1.4835355285961873e-05, + "loss": 1.0366, + "step": 428 + }, + { + "epoch": 0.07, + "grad_norm": 27.773854849425298, + "learning_rate": 1.4870017331022532e-05, + "loss": 1.0361, + "step": 429 + }, + { + "epoch": 0.07, + "grad_norm": 30.07149383967753, + "learning_rate": 1.4904679376083192e-05, + "loss": 1.1342, + "step": 430 + }, + { + "epoch": 0.07, + "grad_norm": 28.905491040550434, + "learning_rate": 1.493934142114385e-05, + "loss": 1.2152, + "step": 431 + }, + { + "epoch": 0.07, + "grad_norm": 18.66915981630242, + "learning_rate": 1.4974003466204507e-05, + "loss": 1.0779, + "step": 432 + }, + { + "epoch": 0.07, + "grad_norm": 31.81603142861209, + "learning_rate": 1.5008665511265165e-05, + "loss": 1.0872, + "step": 433 + }, + { + "epoch": 0.07, + "grad_norm": 34.11991082135092, + "learning_rate": 1.5043327556325825e-05, + "loss": 0.9974, + "step": 434 + }, + { + "epoch": 0.07, + "grad_norm": 26.143410269225893, + "learning_rate": 1.5077989601386483e-05, + "loss": 1.1808, + "step": 435 + }, + { + "epoch": 0.07, + "grad_norm": 25.641715173600016, + "learning_rate": 1.511265164644714e-05, + "loss": 1.1308, + "step": 436 + }, + { + "epoch": 0.07, + "grad_norm": 33.98886923001349, + "learning_rate": 1.5147313691507802e-05, + "loss": 1.1661, + "step": 437 + }, + { + "epoch": 0.07, + "grad_norm": 18.572967687726788, + "learning_rate": 1.518197573656846e-05, + "loss": 1.0132, + "step": 438 + }, + { + "epoch": 0.07, + "grad_norm": 26.362442319213617, + "learning_rate": 1.5216637781629117e-05, + "loss": 1.0149, + "step": 439 + }, + { + "epoch": 0.07, + "grad_norm": 27.28730137556571, + "learning_rate": 1.5251299826689775e-05, + "loss": 1.0923, + "step": 440 + }, + { + "epoch": 0.07, + "grad_norm": 19.457901932680777, + "learning_rate": 1.5285961871750435e-05, + "loss": 0.9266, + "step": 441 + }, + { + "epoch": 0.07, + "grad_norm": 26.42685430960279, + "learning_rate": 1.532062391681109e-05, + "loss": 1.0687, + "step": 442 + }, + { + "epoch": 0.07, + "grad_norm": 25.110720276179347, + "learning_rate": 1.535528596187175e-05, + "loss": 1.022, + "step": 443 + }, + { + "epoch": 0.07, + "grad_norm": 27.072955662218703, + "learning_rate": 1.538994800693241e-05, + "loss": 1.0953, + "step": 444 + }, + { + "epoch": 0.07, + "grad_norm": 20.11605472079033, + "learning_rate": 1.542461005199307e-05, + "loss": 0.9869, + "step": 445 + }, + { + "epoch": 0.07, + "grad_norm": 34.431652101331636, + "learning_rate": 1.545927209705373e-05, + "loss": 1.3131, + "step": 446 + }, + { + "epoch": 0.07, + "grad_norm": 33.46294148670095, + "learning_rate": 1.5493934142114385e-05, + "loss": 0.9587, + "step": 447 + }, + { + "epoch": 0.07, + "grad_norm": 25.8286323566764, + "learning_rate": 1.5528596187175045e-05, + "loss": 1.1004, + "step": 448 + }, + { + "epoch": 0.07, + "grad_norm": 19.15366308486822, + "learning_rate": 1.5563258232235705e-05, + "loss": 0.9616, + "step": 449 + }, + { + "epoch": 0.07, + "grad_norm": 17.821871415050428, + "learning_rate": 1.559792027729636e-05, + "loss": 0.8986, + "step": 450 + }, + { + "epoch": 0.07, + "grad_norm": 35.99595845901309, + "learning_rate": 1.563258232235702e-05, + "loss": 1.2051, + "step": 451 + }, + { + "epoch": 0.07, + "grad_norm": 20.096743531257545, + "learning_rate": 1.566724436741768e-05, + "loss": 0.9922, + "step": 452 + }, + { + "epoch": 0.07, + "grad_norm": 5.701941811616413, + "learning_rate": 1.570190641247834e-05, + "loss": 0.9759, + "step": 453 + }, + { + "epoch": 0.07, + "grad_norm": 19.0741338296344, + "learning_rate": 1.5736568457538996e-05, + "loss": 1.0567, + "step": 454 + }, + { + "epoch": 0.07, + "grad_norm": 5.2711761486261555, + "learning_rate": 1.5771230502599655e-05, + "loss": 1.1068, + "step": 455 + }, + { + "epoch": 0.07, + "grad_norm": 22.120557194163602, + "learning_rate": 1.5805892547660315e-05, + "loss": 0.9328, + "step": 456 + }, + { + "epoch": 0.07, + "grad_norm": 25.54513505382531, + "learning_rate": 1.584055459272097e-05, + "loss": 1.1424, + "step": 457 + }, + { + "epoch": 0.07, + "grad_norm": 29.169606155817124, + "learning_rate": 1.587521663778163e-05, + "loss": 0.9743, + "step": 458 + }, + { + "epoch": 0.07, + "grad_norm": 23.73216498446794, + "learning_rate": 1.5909878682842286e-05, + "loss": 0.9859, + "step": 459 + }, + { + "epoch": 0.07, + "grad_norm": 20.04924244626764, + "learning_rate": 1.594454072790295e-05, + "loss": 1.0824, + "step": 460 + }, + { + "epoch": 0.07, + "grad_norm": 26.32800409759019, + "learning_rate": 1.5979202772963606e-05, + "loss": 1.0437, + "step": 461 + }, + { + "epoch": 0.07, + "grad_norm": 21.947383935631013, + "learning_rate": 1.6013864818024265e-05, + "loss": 1.0657, + "step": 462 + }, + { + "epoch": 0.07, + "grad_norm": 42.90413702383083, + "learning_rate": 1.6048526863084925e-05, + "loss": 1.1631, + "step": 463 + }, + { + "epoch": 0.07, + "grad_norm": 22.11730306306142, + "learning_rate": 1.608318890814558e-05, + "loss": 1.2399, + "step": 464 + }, + { + "epoch": 0.07, + "grad_norm": 25.04111791854184, + "learning_rate": 1.611785095320624e-05, + "loss": 0.952, + "step": 465 + }, + { + "epoch": 0.07, + "grad_norm": 15.07962514994669, + "learning_rate": 1.6152512998266897e-05, + "loss": 0.9327, + "step": 466 + }, + { + "epoch": 0.07, + "grad_norm": 24.38630126346466, + "learning_rate": 1.618717504332756e-05, + "loss": 1.0544, + "step": 467 + }, + { + "epoch": 0.07, + "grad_norm": 32.06929738788105, + "learning_rate": 1.6221837088388216e-05, + "loss": 1.1331, + "step": 468 + }, + { + "epoch": 0.07, + "grad_norm": 20.664537084714688, + "learning_rate": 1.6256499133448875e-05, + "loss": 0.9988, + "step": 469 + }, + { + "epoch": 0.07, + "grad_norm": 21.206259630000375, + "learning_rate": 1.6291161178509535e-05, + "loss": 1.0925, + "step": 470 + }, + { + "epoch": 0.07, + "grad_norm": 36.306049239295874, + "learning_rate": 1.632582322357019e-05, + "loss": 1.0819, + "step": 471 + }, + { + "epoch": 0.07, + "grad_norm": 27.65730606476609, + "learning_rate": 1.636048526863085e-05, + "loss": 1.0467, + "step": 472 + }, + { + "epoch": 0.07, + "grad_norm": 31.947233780879255, + "learning_rate": 1.639514731369151e-05, + "loss": 1.0235, + "step": 473 + }, + { + "epoch": 0.07, + "grad_norm": 26.346397050271698, + "learning_rate": 1.6429809358752166e-05, + "loss": 0.9507, + "step": 474 + }, + { + "epoch": 0.07, + "grad_norm": 32.20872637450095, + "learning_rate": 1.6464471403812826e-05, + "loss": 1.1141, + "step": 475 + }, + { + "epoch": 0.07, + "grad_norm": 40.187479039923865, + "learning_rate": 1.6499133448873485e-05, + "loss": 1.0833, + "step": 476 + }, + { + "epoch": 0.07, + "grad_norm": 27.63407629745231, + "learning_rate": 1.6533795493934145e-05, + "loss": 1.0913, + "step": 477 + }, + { + "epoch": 0.07, + "grad_norm": 26.55700920799157, + "learning_rate": 1.65684575389948e-05, + "loss": 1.0217, + "step": 478 + }, + { + "epoch": 0.07, + "grad_norm": 21.303006357296574, + "learning_rate": 1.660311958405546e-05, + "loss": 0.9185, + "step": 479 + }, + { + "epoch": 0.07, + "grad_norm": 34.83074933968313, + "learning_rate": 1.663778162911612e-05, + "loss": 1.1405, + "step": 480 + }, + { + "epoch": 0.08, + "grad_norm": 21.905102922878694, + "learning_rate": 1.6672443674176776e-05, + "loss": 1.0552, + "step": 481 + }, + { + "epoch": 0.08, + "grad_norm": 22.5374601706932, + "learning_rate": 1.6707105719237436e-05, + "loss": 1.0506, + "step": 482 + }, + { + "epoch": 0.08, + "grad_norm": 30.868805725347034, + "learning_rate": 1.6741767764298095e-05, + "loss": 1.2388, + "step": 483 + }, + { + "epoch": 0.08, + "grad_norm": 31.809013872207533, + "learning_rate": 1.6776429809358755e-05, + "loss": 1.1573, + "step": 484 + }, + { + "epoch": 0.08, + "grad_norm": 22.584330183913444, + "learning_rate": 1.681109185441941e-05, + "loss": 1.0159, + "step": 485 + }, + { + "epoch": 0.08, + "grad_norm": 18.359652479793855, + "learning_rate": 1.684575389948007e-05, + "loss": 0.9319, + "step": 486 + }, + { + "epoch": 0.08, + "grad_norm": 24.035349084560117, + "learning_rate": 1.688041594454073e-05, + "loss": 1.192, + "step": 487 + }, + { + "epoch": 0.08, + "grad_norm": 21.091984735728435, + "learning_rate": 1.6915077989601386e-05, + "loss": 0.993, + "step": 488 + }, + { + "epoch": 0.08, + "grad_norm": 25.26835354784831, + "learning_rate": 1.6949740034662046e-05, + "loss": 1.1226, + "step": 489 + }, + { + "epoch": 0.08, + "grad_norm": 26.006399020570456, + "learning_rate": 1.6984402079722705e-05, + "loss": 0.9572, + "step": 490 + }, + { + "epoch": 0.08, + "grad_norm": 21.67672143114982, + "learning_rate": 1.7019064124783365e-05, + "loss": 1.0041, + "step": 491 + }, + { + "epoch": 0.08, + "grad_norm": 35.59097800039987, + "learning_rate": 1.705372616984402e-05, + "loss": 1.0966, + "step": 492 + }, + { + "epoch": 0.08, + "grad_norm": 24.007039133085687, + "learning_rate": 1.708838821490468e-05, + "loss": 1.0351, + "step": 493 + }, + { + "epoch": 0.08, + "grad_norm": 16.6685605548639, + "learning_rate": 1.712305025996534e-05, + "loss": 0.9102, + "step": 494 + }, + { + "epoch": 0.08, + "grad_norm": 23.69384850275509, + "learning_rate": 1.7157712305025996e-05, + "loss": 1.0426, + "step": 495 + }, + { + "epoch": 0.08, + "grad_norm": 29.480033542751137, + "learning_rate": 1.7192374350086656e-05, + "loss": 1.0882, + "step": 496 + }, + { + "epoch": 0.08, + "grad_norm": 40.07920094211409, + "learning_rate": 1.7227036395147316e-05, + "loss": 1.0148, + "step": 497 + }, + { + "epoch": 0.08, + "grad_norm": 29.233060679818937, + "learning_rate": 1.7261698440207975e-05, + "loss": 1.0744, + "step": 498 + }, + { + "epoch": 0.08, + "grad_norm": 35.182566386669606, + "learning_rate": 1.729636048526863e-05, + "loss": 0.9997, + "step": 499 + }, + { + "epoch": 0.08, + "grad_norm": 30.269976589123253, + "learning_rate": 1.733102253032929e-05, + "loss": 1.1004, + "step": 500 + }, + { + "epoch": 0.08, + "grad_norm": 24.245753203984723, + "learning_rate": 1.736568457538995e-05, + "loss": 1.0803, + "step": 501 + }, + { + "epoch": 0.08, + "grad_norm": 39.04873017456366, + "learning_rate": 1.7400346620450606e-05, + "loss": 1.1467, + "step": 502 + }, + { + "epoch": 0.08, + "grad_norm": 24.792476515138087, + "learning_rate": 1.7435008665511266e-05, + "loss": 1.0947, + "step": 503 + }, + { + "epoch": 0.08, + "grad_norm": 30.2379304968761, + "learning_rate": 1.7469670710571926e-05, + "loss": 1.1267, + "step": 504 + }, + { + "epoch": 0.08, + "grad_norm": 33.65685682891974, + "learning_rate": 1.7504332755632585e-05, + "loss": 1.1169, + "step": 505 + }, + { + "epoch": 0.08, + "grad_norm": 24.56601044270328, + "learning_rate": 1.7538994800693245e-05, + "loss": 1.0628, + "step": 506 + }, + { + "epoch": 0.08, + "grad_norm": 21.60060010011299, + "learning_rate": 1.75736568457539e-05, + "loss": 1.0078, + "step": 507 + }, + { + "epoch": 0.08, + "grad_norm": 24.11059699605151, + "learning_rate": 1.760831889081456e-05, + "loss": 1.046, + "step": 508 + }, + { + "epoch": 0.08, + "grad_norm": 34.78563216246366, + "learning_rate": 1.7642980935875217e-05, + "loss": 1.2045, + "step": 509 + }, + { + "epoch": 0.08, + "grad_norm": 27.789317608451398, + "learning_rate": 1.7677642980935876e-05, + "loss": 0.9659, + "step": 510 + }, + { + "epoch": 0.08, + "grad_norm": 27.727447236338673, + "learning_rate": 1.7712305025996536e-05, + "loss": 1.0327, + "step": 511 + }, + { + "epoch": 0.08, + "grad_norm": 25.80408909837495, + "learning_rate": 1.7746967071057192e-05, + "loss": 1.0152, + "step": 512 + }, + { + "epoch": 0.08, + "grad_norm": 18.355020428703057, + "learning_rate": 1.7781629116117855e-05, + "loss": 1.0644, + "step": 513 + }, + { + "epoch": 0.08, + "grad_norm": 22.682545495976207, + "learning_rate": 1.781629116117851e-05, + "loss": 1.0329, + "step": 514 + }, + { + "epoch": 0.08, + "grad_norm": 25.989554582298343, + "learning_rate": 1.785095320623917e-05, + "loss": 1.1096, + "step": 515 + }, + { + "epoch": 0.08, + "grad_norm": 26.79673932655621, + "learning_rate": 1.7885615251299827e-05, + "loss": 1.1577, + "step": 516 + }, + { + "epoch": 0.08, + "grad_norm": 25.84256013993326, + "learning_rate": 1.7920277296360486e-05, + "loss": 1.0689, + "step": 517 + }, + { + "epoch": 0.08, + "grad_norm": 24.533577428342877, + "learning_rate": 1.7954939341421146e-05, + "loss": 0.8793, + "step": 518 + }, + { + "epoch": 0.08, + "grad_norm": 23.370691730497956, + "learning_rate": 1.7989601386481802e-05, + "loss": 1.0736, + "step": 519 + }, + { + "epoch": 0.08, + "grad_norm": 24.610226379684057, + "learning_rate": 1.802426343154246e-05, + "loss": 0.987, + "step": 520 + }, + { + "epoch": 0.08, + "grad_norm": 20.44208765087892, + "learning_rate": 1.805892547660312e-05, + "loss": 1.0504, + "step": 521 + }, + { + "epoch": 0.08, + "grad_norm": 18.144404591753734, + "learning_rate": 1.809358752166378e-05, + "loss": 1.0604, + "step": 522 + }, + { + "epoch": 0.08, + "grad_norm": 16.877103069616282, + "learning_rate": 1.812824956672444e-05, + "loss": 0.9898, + "step": 523 + }, + { + "epoch": 0.08, + "grad_norm": 23.737747217762195, + "learning_rate": 1.8162911611785096e-05, + "loss": 1.0038, + "step": 524 + }, + { + "epoch": 0.08, + "grad_norm": 16.25951547791112, + "learning_rate": 1.8197573656845756e-05, + "loss": 1.0588, + "step": 525 + }, + { + "epoch": 0.08, + "grad_norm": 27.599355801073518, + "learning_rate": 1.8232235701906412e-05, + "loss": 0.9713, + "step": 526 + }, + { + "epoch": 0.08, + "grad_norm": 33.122993457048636, + "learning_rate": 1.826689774696707e-05, + "loss": 1.0201, + "step": 527 + }, + { + "epoch": 0.08, + "grad_norm": 24.223397469833518, + "learning_rate": 1.830155979202773e-05, + "loss": 1.0946, + "step": 528 + }, + { + "epoch": 0.08, + "grad_norm": 29.740458575239316, + "learning_rate": 1.833622183708839e-05, + "loss": 0.9838, + "step": 529 + }, + { + "epoch": 0.08, + "grad_norm": 21.341621904414357, + "learning_rate": 1.837088388214905e-05, + "loss": 1.0443, + "step": 530 + }, + { + "epoch": 0.08, + "grad_norm": 26.34550028831831, + "learning_rate": 1.8405545927209706e-05, + "loss": 1.0417, + "step": 531 + }, + { + "epoch": 0.08, + "grad_norm": 22.180889743363394, + "learning_rate": 1.8440207972270366e-05, + "loss": 1.1049, + "step": 532 + }, + { + "epoch": 0.08, + "grad_norm": 25.271695696375218, + "learning_rate": 1.8474870017331022e-05, + "loss": 1.0915, + "step": 533 + }, + { + "epoch": 0.08, + "grad_norm": 23.83622464957578, + "learning_rate": 1.850953206239168e-05, + "loss": 1.1916, + "step": 534 + }, + { + "epoch": 0.08, + "grad_norm": 23.2894505708719, + "learning_rate": 1.854419410745234e-05, + "loss": 1.073, + "step": 535 + }, + { + "epoch": 0.08, + "grad_norm": 28.2656482092286, + "learning_rate": 1.8578856152513e-05, + "loss": 1.0285, + "step": 536 + }, + { + "epoch": 0.08, + "grad_norm": 20.59298381914133, + "learning_rate": 1.861351819757366e-05, + "loss": 1.1132, + "step": 537 + }, + { + "epoch": 0.08, + "grad_norm": 23.60061650304393, + "learning_rate": 1.8648180242634316e-05, + "loss": 1.1276, + "step": 538 + }, + { + "epoch": 0.08, + "grad_norm": 35.98301939371637, + "learning_rate": 1.8682842287694976e-05, + "loss": 1.159, + "step": 539 + }, + { + "epoch": 0.08, + "grad_norm": 4.4888981427578205, + "learning_rate": 1.8717504332755632e-05, + "loss": 1.0028, + "step": 540 + }, + { + "epoch": 0.08, + "grad_norm": 36.99781273324386, + "learning_rate": 1.8752166377816292e-05, + "loss": 1.1377, + "step": 541 + }, + { + "epoch": 0.08, + "grad_norm": 15.939578263032633, + "learning_rate": 1.878682842287695e-05, + "loss": 0.999, + "step": 542 + }, + { + "epoch": 0.08, + "grad_norm": 22.85660085708705, + "learning_rate": 1.882149046793761e-05, + "loss": 1.0134, + "step": 543 + }, + { + "epoch": 0.08, + "grad_norm": 17.241163366830648, + "learning_rate": 1.885615251299827e-05, + "loss": 1.0081, + "step": 544 + }, + { + "epoch": 0.09, + "grad_norm": 25.474464093786764, + "learning_rate": 1.8890814558058927e-05, + "loss": 1.1213, + "step": 545 + }, + { + "epoch": 0.09, + "grad_norm": 33.11335194940052, + "learning_rate": 1.8925476603119586e-05, + "loss": 1.0378, + "step": 546 + }, + { + "epoch": 0.09, + "grad_norm": 31.546985247203498, + "learning_rate": 1.8960138648180246e-05, + "loss": 1.0594, + "step": 547 + }, + { + "epoch": 0.09, + "grad_norm": 27.353861552798367, + "learning_rate": 1.8994800693240902e-05, + "loss": 1.0908, + "step": 548 + }, + { + "epoch": 0.09, + "grad_norm": 18.9522994570028, + "learning_rate": 1.902946273830156e-05, + "loss": 0.9971, + "step": 549 + }, + { + "epoch": 0.09, + "grad_norm": 26.431519891655817, + "learning_rate": 1.9064124783362217e-05, + "loss": 1.1073, + "step": 550 + }, + { + "epoch": 0.09, + "grad_norm": 24.854707471528116, + "learning_rate": 1.909878682842288e-05, + "loss": 1.0473, + "step": 551 + }, + { + "epoch": 0.09, + "grad_norm": 19.821719178716894, + "learning_rate": 1.9133448873483537e-05, + "loss": 0.987, + "step": 552 + }, + { + "epoch": 0.09, + "grad_norm": 19.670015765342832, + "learning_rate": 1.9168110918544196e-05, + "loss": 1.0873, + "step": 553 + }, + { + "epoch": 0.09, + "grad_norm": 20.380559817983787, + "learning_rate": 1.9202772963604856e-05, + "loss": 0.9886, + "step": 554 + }, + { + "epoch": 0.09, + "grad_norm": 27.144432556811733, + "learning_rate": 1.9237435008665512e-05, + "loss": 0.9142, + "step": 555 + }, + { + "epoch": 0.09, + "grad_norm": 26.895634272720514, + "learning_rate": 1.927209705372617e-05, + "loss": 1.1352, + "step": 556 + }, + { + "epoch": 0.09, + "grad_norm": 22.938473895183744, + "learning_rate": 1.9306759098786828e-05, + "loss": 1.1945, + "step": 557 + }, + { + "epoch": 0.09, + "grad_norm": 19.095345260358535, + "learning_rate": 1.9341421143847487e-05, + "loss": 1.0434, + "step": 558 + }, + { + "epoch": 0.09, + "grad_norm": 41.15274760623393, + "learning_rate": 1.9376083188908147e-05, + "loss": 1.1775, + "step": 559 + }, + { + "epoch": 0.09, + "grad_norm": 23.886517844676984, + "learning_rate": 1.9410745233968806e-05, + "loss": 1.0895, + "step": 560 + }, + { + "epoch": 0.09, + "grad_norm": 22.219768592031212, + "learning_rate": 1.9445407279029466e-05, + "loss": 1.022, + "step": 561 + }, + { + "epoch": 0.09, + "grad_norm": 21.961321974964306, + "learning_rate": 1.9480069324090122e-05, + "loss": 1.0518, + "step": 562 + }, + { + "epoch": 0.09, + "grad_norm": 32.005822631718004, + "learning_rate": 1.951473136915078e-05, + "loss": 1.0212, + "step": 563 + }, + { + "epoch": 0.09, + "grad_norm": 21.961686493739755, + "learning_rate": 1.954939341421144e-05, + "loss": 1.0504, + "step": 564 + }, + { + "epoch": 0.09, + "grad_norm": 29.25877929944018, + "learning_rate": 1.9584055459272097e-05, + "loss": 1.0929, + "step": 565 + }, + { + "epoch": 0.09, + "grad_norm": 24.163975500572413, + "learning_rate": 1.9618717504332757e-05, + "loss": 1.0706, + "step": 566 + }, + { + "epoch": 0.09, + "grad_norm": 26.611705318472513, + "learning_rate": 1.9653379549393416e-05, + "loss": 0.9836, + "step": 567 + }, + { + "epoch": 0.09, + "grad_norm": 28.184492445566853, + "learning_rate": 1.9688041594454076e-05, + "loss": 0.9916, + "step": 568 + }, + { + "epoch": 0.09, + "grad_norm": 25.68156266800803, + "learning_rate": 1.9722703639514732e-05, + "loss": 1.1573, + "step": 569 + }, + { + "epoch": 0.09, + "grad_norm": 28.378879126518083, + "learning_rate": 1.975736568457539e-05, + "loss": 0.9497, + "step": 570 + }, + { + "epoch": 0.09, + "grad_norm": 19.356736233473026, + "learning_rate": 1.979202772963605e-05, + "loss": 0.9251, + "step": 571 + }, + { + "epoch": 0.09, + "grad_norm": 25.592310467899786, + "learning_rate": 1.9826689774696707e-05, + "loss": 1.0297, + "step": 572 + }, + { + "epoch": 0.09, + "grad_norm": 27.14714174210939, + "learning_rate": 1.9861351819757367e-05, + "loss": 1.0315, + "step": 573 + }, + { + "epoch": 0.09, + "grad_norm": 28.635279627490927, + "learning_rate": 1.9896013864818026e-05, + "loss": 1.0889, + "step": 574 + }, + { + "epoch": 0.09, + "grad_norm": 24.14251563807322, + "learning_rate": 1.9930675909878686e-05, + "loss": 1.0212, + "step": 575 + }, + { + "epoch": 0.09, + "grad_norm": 25.605607340399068, + "learning_rate": 1.9965337954939342e-05, + "loss": 1.055, + "step": 576 + }, + { + "epoch": 0.09, + "grad_norm": 22.861064176734494, + "learning_rate": 2e-05, + "loss": 0.9411, + "step": 577 + }, + { + "epoch": 0.09, + "grad_norm": 32.708221310020775, + "learning_rate": 1.9999999857802926e-05, + "loss": 1.0222, + "step": 578 + }, + { + "epoch": 0.09, + "grad_norm": 25.77872904734455, + "learning_rate": 1.9999999431211706e-05, + "loss": 1.0681, + "step": 579 + }, + { + "epoch": 0.09, + "grad_norm": 20.068217319997437, + "learning_rate": 1.999999872022635e-05, + "loss": 1.1066, + "step": 580 + }, + { + "epoch": 0.09, + "grad_norm": 31.490787994547425, + "learning_rate": 1.9999997724846883e-05, + "loss": 1.0383, + "step": 581 + }, + { + "epoch": 0.09, + "grad_norm": 34.98645871334549, + "learning_rate": 1.9999996445073327e-05, + "loss": 1.1206, + "step": 582 + }, + { + "epoch": 0.09, + "grad_norm": 35.632614390703424, + "learning_rate": 1.9999994880905726e-05, + "loss": 1.0542, + "step": 583 + }, + { + "epoch": 0.09, + "grad_norm": 25.23781468122964, + "learning_rate": 1.9999993032344115e-05, + "loss": 1.0387, + "step": 584 + }, + { + "epoch": 0.09, + "grad_norm": 27.61966249849483, + "learning_rate": 1.9999990899388556e-05, + "loss": 1.0255, + "step": 585 + }, + { + "epoch": 0.09, + "grad_norm": 24.504192646772623, + "learning_rate": 1.9999988482039104e-05, + "loss": 1.0037, + "step": 586 + }, + { + "epoch": 0.09, + "grad_norm": 21.648244447373916, + "learning_rate": 1.999998578029583e-05, + "loss": 1.2731, + "step": 587 + }, + { + "epoch": 0.09, + "grad_norm": 21.92577731908766, + "learning_rate": 1.999998279415881e-05, + "loss": 1.0188, + "step": 588 + }, + { + "epoch": 0.09, + "grad_norm": 23.714237684191083, + "learning_rate": 1.999997952362813e-05, + "loss": 1.0955, + "step": 589 + }, + { + "epoch": 0.09, + "grad_norm": 18.875496663575905, + "learning_rate": 1.999997596870388e-05, + "loss": 0.9669, + "step": 590 + }, + { + "epoch": 0.09, + "grad_norm": 17.41305678438287, + "learning_rate": 1.9999972129386165e-05, + "loss": 0.9065, + "step": 591 + }, + { + "epoch": 0.09, + "grad_norm": 20.794859932565107, + "learning_rate": 1.999996800567509e-05, + "loss": 1.0044, + "step": 592 + }, + { + "epoch": 0.09, + "grad_norm": 23.429124072996856, + "learning_rate": 1.999996359757078e-05, + "loss": 1.0774, + "step": 593 + }, + { + "epoch": 0.09, + "grad_norm": 23.424359437948585, + "learning_rate": 1.9999958905073352e-05, + "loss": 1.0994, + "step": 594 + }, + { + "epoch": 0.09, + "grad_norm": 18.345026243969027, + "learning_rate": 1.9999953928182942e-05, + "loss": 1.0255, + "step": 595 + }, + { + "epoch": 0.09, + "grad_norm": 30.414133266324512, + "learning_rate": 1.9999948666899695e-05, + "loss": 1.0921, + "step": 596 + }, + { + "epoch": 0.09, + "grad_norm": 24.384991495585798, + "learning_rate": 1.9999943121223753e-05, + "loss": 1.1104, + "step": 597 + }, + { + "epoch": 0.09, + "grad_norm": 18.39591885961205, + "learning_rate": 1.999993729115528e-05, + "loss": 1.0308, + "step": 598 + }, + { + "epoch": 0.09, + "grad_norm": 36.55302360842166, + "learning_rate": 1.999993117669444e-05, + "loss": 1.1033, + "step": 599 + }, + { + "epoch": 0.09, + "grad_norm": 27.528707666170977, + "learning_rate": 1.999992477784141e-05, + "loss": 1.002, + "step": 600 + }, + { + "epoch": 0.09, + "grad_norm": 39.0482325048875, + "learning_rate": 1.999991809459637e-05, + "loss": 0.9924, + "step": 601 + }, + { + "epoch": 0.09, + "grad_norm": 31.36379419691972, + "learning_rate": 1.9999911126959503e-05, + "loss": 1.0878, + "step": 602 + }, + { + "epoch": 0.09, + "grad_norm": 20.230094375873414, + "learning_rate": 1.9999903874931017e-05, + "loss": 0.9643, + "step": 603 + }, + { + "epoch": 0.09, + "grad_norm": 17.097369602337533, + "learning_rate": 1.9999896338511117e-05, + "loss": 1.0073, + "step": 604 + }, + { + "epoch": 0.09, + "grad_norm": 22.538517270030404, + "learning_rate": 1.999988851770001e-05, + "loss": 1.0647, + "step": 605 + }, + { + "epoch": 0.09, + "grad_norm": 25.087550676595903, + "learning_rate": 1.9999880412497927e-05, + "loss": 1.1512, + "step": 606 + }, + { + "epoch": 0.09, + "grad_norm": 22.644576876286553, + "learning_rate": 1.9999872022905094e-05, + "loss": 1.0329, + "step": 607 + }, + { + "epoch": 0.09, + "grad_norm": 25.57077115583161, + "learning_rate": 1.9999863348921748e-05, + "loss": 1.0519, + "step": 608 + }, + { + "epoch": 0.1, + "grad_norm": 22.806895674814548, + "learning_rate": 1.999985439054814e-05, + "loss": 1.0923, + "step": 609 + }, + { + "epoch": 0.1, + "grad_norm": 17.1697596113921, + "learning_rate": 1.9999845147784526e-05, + "loss": 1.0938, + "step": 610 + }, + { + "epoch": 0.1, + "grad_norm": 34.984784936143015, + "learning_rate": 1.999983562063116e-05, + "loss": 1.0604, + "step": 611 + }, + { + "epoch": 0.1, + "grad_norm": 17.31641290786309, + "learning_rate": 1.999982580908832e-05, + "loss": 1.0975, + "step": 612 + }, + { + "epoch": 0.1, + "grad_norm": 28.700540192601025, + "learning_rate": 1.9999815713156285e-05, + "loss": 1.1786, + "step": 613 + }, + { + "epoch": 0.1, + "grad_norm": 21.784075607083317, + "learning_rate": 1.9999805332835344e-05, + "loss": 1.1711, + "step": 614 + }, + { + "epoch": 0.1, + "grad_norm": 31.566909129873377, + "learning_rate": 1.9999794668125784e-05, + "loss": 0.979, + "step": 615 + }, + { + "epoch": 0.1, + "grad_norm": 32.344774729394906, + "learning_rate": 1.9999783719027913e-05, + "loss": 1.0891, + "step": 616 + }, + { + "epoch": 0.1, + "grad_norm": 27.350961898141318, + "learning_rate": 1.9999772485542048e-05, + "loss": 1.0251, + "step": 617 + }, + { + "epoch": 0.1, + "grad_norm": 30.323243079901548, + "learning_rate": 1.99997609676685e-05, + "loss": 1.0448, + "step": 618 + }, + { + "epoch": 0.1, + "grad_norm": 26.559370164481408, + "learning_rate": 1.99997491654076e-05, + "loss": 1.019, + "step": 619 + }, + { + "epoch": 0.1, + "grad_norm": 25.116834511656986, + "learning_rate": 1.9999737078759684e-05, + "loss": 1.0316, + "step": 620 + }, + { + "epoch": 0.1, + "grad_norm": 38.23014262882626, + "learning_rate": 1.999972470772509e-05, + "loss": 1.0279, + "step": 621 + }, + { + "epoch": 0.1, + "grad_norm": 27.264683982109712, + "learning_rate": 1.999971205230418e-05, + "loss": 1.0609, + "step": 622 + }, + { + "epoch": 0.1, + "grad_norm": 37.16865727262475, + "learning_rate": 1.999969911249731e-05, + "loss": 0.9348, + "step": 623 + }, + { + "epoch": 0.1, + "grad_norm": 15.669900577725162, + "learning_rate": 1.9999685888304844e-05, + "loss": 1.0294, + "step": 624 + }, + { + "epoch": 0.1, + "grad_norm": 25.08810207445628, + "learning_rate": 1.9999672379727165e-05, + "loss": 1.0945, + "step": 625 + }, + { + "epoch": 0.1, + "grad_norm": 5.214393464839559, + "learning_rate": 1.999965858676465e-05, + "loss": 0.9841, + "step": 626 + }, + { + "epoch": 0.1, + "grad_norm": 31.103198322971277, + "learning_rate": 1.9999644509417694e-05, + "loss": 1.0875, + "step": 627 + }, + { + "epoch": 0.1, + "grad_norm": 20.915001681033875, + "learning_rate": 1.99996301476867e-05, + "loss": 0.9758, + "step": 628 + }, + { + "epoch": 0.1, + "grad_norm": 28.30079704900265, + "learning_rate": 1.9999615501572073e-05, + "loss": 1.0322, + "step": 629 + }, + { + "epoch": 0.1, + "grad_norm": 38.04965149385254, + "learning_rate": 1.999960057107423e-05, + "loss": 1.0074, + "step": 630 + }, + { + "epoch": 0.1, + "grad_norm": 25.414347805622835, + "learning_rate": 1.9999585356193597e-05, + "loss": 0.9241, + "step": 631 + }, + { + "epoch": 0.1, + "grad_norm": 28.90992076557785, + "learning_rate": 1.9999569856930604e-05, + "loss": 1.0194, + "step": 632 + }, + { + "epoch": 0.1, + "grad_norm": 25.110853662612588, + "learning_rate": 1.9999554073285695e-05, + "loss": 1.0284, + "step": 633 + }, + { + "epoch": 0.1, + "grad_norm": 22.778630374216906, + "learning_rate": 1.999953800525932e-05, + "loss": 1.1036, + "step": 634 + }, + { + "epoch": 0.1, + "grad_norm": 24.904941753025636, + "learning_rate": 1.9999521652851933e-05, + "loss": 0.9953, + "step": 635 + }, + { + "epoch": 0.1, + "grad_norm": 28.34261364640905, + "learning_rate": 1.9999505016063998e-05, + "loss": 1.012, + "step": 636 + }, + { + "epoch": 0.1, + "grad_norm": 8.114153859611994, + "learning_rate": 1.9999488094895992e-05, + "loss": 0.9224, + "step": 637 + }, + { + "epoch": 0.1, + "grad_norm": 9.201733390049078, + "learning_rate": 1.9999470889348394e-05, + "loss": 1.0225, + "step": 638 + }, + { + "epoch": 0.1, + "grad_norm": 16.33693891034599, + "learning_rate": 1.9999453399421692e-05, + "loss": 0.9935, + "step": 639 + }, + { + "epoch": 0.1, + "grad_norm": 18.755063072714822, + "learning_rate": 1.9999435625116383e-05, + "loss": 0.8937, + "step": 640 + }, + { + "epoch": 0.1, + "grad_norm": 31.135516419968262, + "learning_rate": 1.999941756643298e-05, + "loss": 1.0474, + "step": 641 + }, + { + "epoch": 0.1, + "grad_norm": 22.210619093269454, + "learning_rate": 1.9999399223371988e-05, + "loss": 0.9777, + "step": 642 + }, + { + "epoch": 0.1, + "grad_norm": 27.874397763131537, + "learning_rate": 1.999938059593393e-05, + "loss": 1.0197, + "step": 643 + }, + { + "epoch": 0.1, + "grad_norm": 23.09541794840785, + "learning_rate": 1.999936168411934e-05, + "loss": 1.0258, + "step": 644 + }, + { + "epoch": 0.1, + "grad_norm": 19.123838546802762, + "learning_rate": 1.999934248792875e-05, + "loss": 0.9936, + "step": 645 + }, + { + "epoch": 0.1, + "grad_norm": 20.733233766116115, + "learning_rate": 1.9999323007362708e-05, + "loss": 0.9803, + "step": 646 + }, + { + "epoch": 0.1, + "grad_norm": 26.073300094137274, + "learning_rate": 1.9999303242421773e-05, + "loss": 0.9174, + "step": 647 + }, + { + "epoch": 0.1, + "grad_norm": 18.286722855667904, + "learning_rate": 1.9999283193106504e-05, + "loss": 0.944, + "step": 648 + }, + { + "epoch": 0.1, + "grad_norm": 32.56092169939891, + "learning_rate": 1.9999262859417466e-05, + "loss": 1.019, + "step": 649 + }, + { + "epoch": 0.1, + "grad_norm": 17.95125505355276, + "learning_rate": 1.9999242241355247e-05, + "loss": 0.8869, + "step": 650 + }, + { + "epoch": 0.1, + "grad_norm": 32.132816929611685, + "learning_rate": 1.9999221338920424e-05, + "loss": 1.1021, + "step": 651 + }, + { + "epoch": 0.1, + "grad_norm": 17.434045022076624, + "learning_rate": 1.9999200152113595e-05, + "loss": 0.938, + "step": 652 + }, + { + "epoch": 0.1, + "grad_norm": 32.50571419843277, + "learning_rate": 1.9999178680935366e-05, + "loss": 1.0048, + "step": 653 + }, + { + "epoch": 0.1, + "grad_norm": 16.556043944205204, + "learning_rate": 1.9999156925386344e-05, + "loss": 0.9398, + "step": 654 + }, + { + "epoch": 0.1, + "grad_norm": 18.766440333330717, + "learning_rate": 1.9999134885467148e-05, + "loss": 0.9585, + "step": 655 + }, + { + "epoch": 0.1, + "grad_norm": 9.354974463671109, + "learning_rate": 1.9999112561178402e-05, + "loss": 1.074, + "step": 656 + }, + { + "epoch": 0.1, + "grad_norm": 28.99983671056059, + "learning_rate": 1.9999089952520746e-05, + "loss": 1.1193, + "step": 657 + }, + { + "epoch": 0.1, + "grad_norm": 31.083429746626358, + "learning_rate": 1.9999067059494824e-05, + "loss": 0.9585, + "step": 658 + }, + { + "epoch": 0.1, + "grad_norm": 22.075384931081132, + "learning_rate": 1.999904388210128e-05, + "loss": 0.9859, + "step": 659 + }, + { + "epoch": 0.1, + "grad_norm": 23.781617919943116, + "learning_rate": 1.9999020420340782e-05, + "loss": 0.9583, + "step": 660 + }, + { + "epoch": 0.1, + "grad_norm": 31.966704287289733, + "learning_rate": 1.999899667421399e-05, + "loss": 1.098, + "step": 661 + }, + { + "epoch": 0.1, + "grad_norm": 25.534238631631656, + "learning_rate": 1.999897264372158e-05, + "loss": 1.0995, + "step": 662 + }, + { + "epoch": 0.1, + "grad_norm": 23.068356514703574, + "learning_rate": 1.999894832886424e-05, + "loss": 0.984, + "step": 663 + }, + { + "epoch": 0.1, + "grad_norm": 20.9964647538874, + "learning_rate": 1.9998923729642657e-05, + "loss": 0.998, + "step": 664 + }, + { + "epoch": 0.1, + "grad_norm": 6.393814403269237, + "learning_rate": 1.999889884605753e-05, + "loss": 0.9782, + "step": 665 + }, + { + "epoch": 0.1, + "grad_norm": 24.982013960016147, + "learning_rate": 1.9998873678109575e-05, + "loss": 1.0013, + "step": 666 + }, + { + "epoch": 0.1, + "grad_norm": 18.385080431888543, + "learning_rate": 1.99988482257995e-05, + "loss": 1.0315, + "step": 667 + }, + { + "epoch": 0.1, + "grad_norm": 21.403720561477744, + "learning_rate": 1.9998822489128028e-05, + "loss": 0.937, + "step": 668 + }, + { + "epoch": 0.1, + "grad_norm": 32.37210180407703, + "learning_rate": 1.9998796468095897e-05, + "loss": 1.0377, + "step": 669 + }, + { + "epoch": 0.1, + "grad_norm": 16.621343531451927, + "learning_rate": 1.999877016270384e-05, + "loss": 0.999, + "step": 670 + }, + { + "epoch": 0.1, + "grad_norm": 27.807004663220358, + "learning_rate": 1.9998743572952608e-05, + "loss": 0.9875, + "step": 671 + }, + { + "epoch": 0.1, + "grad_norm": 27.530692159822845, + "learning_rate": 1.999871669884296e-05, + "loss": 1.0033, + "step": 672 + }, + { + "epoch": 0.11, + "grad_norm": 17.477830139147436, + "learning_rate": 1.999868954037566e-05, + "loss": 0.9291, + "step": 673 + }, + { + "epoch": 0.11, + "grad_norm": 26.30354990763825, + "learning_rate": 1.9998662097551475e-05, + "loss": 0.9682, + "step": 674 + }, + { + "epoch": 0.11, + "grad_norm": 26.701506649907408, + "learning_rate": 1.9998634370371192e-05, + "loss": 1.0579, + "step": 675 + }, + { + "epoch": 0.11, + "grad_norm": 16.09844673750356, + "learning_rate": 1.9998606358835596e-05, + "loss": 1.0579, + "step": 676 + }, + { + "epoch": 0.11, + "grad_norm": 26.042153693159076, + "learning_rate": 1.9998578062945483e-05, + "loss": 1.1038, + "step": 677 + }, + { + "epoch": 0.11, + "grad_norm": 17.87003977935117, + "learning_rate": 1.999854948270166e-05, + "loss": 0.9282, + "step": 678 + }, + { + "epoch": 0.11, + "grad_norm": 1471.0013591518905, + "learning_rate": 1.999852061810494e-05, + "loss": 1.2118, + "step": 679 + }, + { + "epoch": 0.11, + "grad_norm": 35.0744775570728, + "learning_rate": 1.9998491469156137e-05, + "loss": 1.0116, + "step": 680 + }, + { + "epoch": 0.11, + "grad_norm": 28.13299186170543, + "learning_rate": 1.999846203585609e-05, + "loss": 0.9564, + "step": 681 + }, + { + "epoch": 0.11, + "grad_norm": 23.11076190518904, + "learning_rate": 1.9998432318205632e-05, + "loss": 0.9886, + "step": 682 + }, + { + "epoch": 0.11, + "grad_norm": 21.001125157958516, + "learning_rate": 1.9998402316205606e-05, + "loss": 1.0623, + "step": 683 + }, + { + "epoch": 0.11, + "grad_norm": 28.818401774857087, + "learning_rate": 1.9998372029856866e-05, + "loss": 1.0083, + "step": 684 + }, + { + "epoch": 0.11, + "grad_norm": 20.18902011512905, + "learning_rate": 1.9998341459160277e-05, + "loss": 0.9179, + "step": 685 + }, + { + "epoch": 0.11, + "grad_norm": 35.677553504388904, + "learning_rate": 1.9998310604116704e-05, + "loss": 1.0216, + "step": 686 + }, + { + "epoch": 0.11, + "grad_norm": 20.768781962357206, + "learning_rate": 1.999827946472703e-05, + "loss": 0.9849, + "step": 687 + }, + { + "epoch": 0.11, + "grad_norm": 31.64937660972853, + "learning_rate": 1.999824804099213e-05, + "loss": 0.9782, + "step": 688 + }, + { + "epoch": 0.11, + "grad_norm": 24.918096941199437, + "learning_rate": 1.9998216332912908e-05, + "loss": 1.0171, + "step": 689 + }, + { + "epoch": 0.11, + "grad_norm": 20.61013836708593, + "learning_rate": 1.9998184340490264e-05, + "loss": 0.885, + "step": 690 + }, + { + "epoch": 0.11, + "grad_norm": 22.565527580076296, + "learning_rate": 1.9998152063725107e-05, + "loss": 1.0084, + "step": 691 + }, + { + "epoch": 0.11, + "grad_norm": 20.157218866202182, + "learning_rate": 1.9998119502618353e-05, + "loss": 0.9549, + "step": 692 + }, + { + "epoch": 0.11, + "grad_norm": 17.942288719847813, + "learning_rate": 1.9998086657170925e-05, + "loss": 0.9984, + "step": 693 + }, + { + "epoch": 0.11, + "grad_norm": 18.81983038671551, + "learning_rate": 1.9998053527383766e-05, + "loss": 1.1064, + "step": 694 + }, + { + "epoch": 0.11, + "grad_norm": 17.702851369532596, + "learning_rate": 1.999802011325781e-05, + "loss": 0.9075, + "step": 695 + }, + { + "epoch": 0.11, + "grad_norm": 31.219935790826188, + "learning_rate": 1.9997986414794012e-05, + "loss": 1.0162, + "step": 696 + }, + { + "epoch": 0.11, + "grad_norm": 14.062142120681791, + "learning_rate": 1.999795243199333e-05, + "loss": 0.9574, + "step": 697 + }, + { + "epoch": 0.11, + "grad_norm": 16.163135072367126, + "learning_rate": 1.9997918164856728e-05, + "loss": 0.9591, + "step": 698 + }, + { + "epoch": 0.11, + "grad_norm": 22.240807100852727, + "learning_rate": 1.9997883613385184e-05, + "loss": 0.9251, + "step": 699 + }, + { + "epoch": 0.11, + "grad_norm": 28.1818260352272, + "learning_rate": 1.999784877757968e-05, + "loss": 0.9941, + "step": 700 + }, + { + "epoch": 0.11, + "grad_norm": 20.27154914499387, + "learning_rate": 1.99978136574412e-05, + "loss": 0.9307, + "step": 701 + }, + { + "epoch": 0.11, + "grad_norm": 25.716527018880733, + "learning_rate": 1.999777825297075e-05, + "loss": 1.0928, + "step": 702 + }, + { + "epoch": 0.11, + "grad_norm": 20.930286027268007, + "learning_rate": 1.9997742564169335e-05, + "loss": 0.9775, + "step": 703 + }, + { + "epoch": 0.11, + "grad_norm": 32.953649056374836, + "learning_rate": 1.999770659103797e-05, + "loss": 1.0377, + "step": 704 + }, + { + "epoch": 0.11, + "grad_norm": 31.720956645746604, + "learning_rate": 1.999767033357768e-05, + "loss": 1.136, + "step": 705 + }, + { + "epoch": 0.11, + "grad_norm": 23.675923263181794, + "learning_rate": 1.9997633791789496e-05, + "loss": 0.9852, + "step": 706 + }, + { + "epoch": 0.11, + "grad_norm": 19.159710022438254, + "learning_rate": 1.999759696567445e-05, + "loss": 0.9578, + "step": 707 + }, + { + "epoch": 0.11, + "grad_norm": 20.615629888993382, + "learning_rate": 1.9997559855233596e-05, + "loss": 0.983, + "step": 708 + }, + { + "epoch": 0.11, + "grad_norm": 33.146591727613576, + "learning_rate": 1.999752246046799e-05, + "loss": 1.1633, + "step": 709 + }, + { + "epoch": 0.11, + "grad_norm": 18.228318464618354, + "learning_rate": 1.9997484781378694e-05, + "loss": 1.0657, + "step": 710 + }, + { + "epoch": 0.11, + "grad_norm": 18.935915363694495, + "learning_rate": 1.9997446817966776e-05, + "loss": 1.0634, + "step": 711 + }, + { + "epoch": 0.11, + "grad_norm": 23.081959929447248, + "learning_rate": 1.9997408570233322e-05, + "loss": 1.0599, + "step": 712 + }, + { + "epoch": 0.11, + "grad_norm": 24.53073736736404, + "learning_rate": 1.9997370038179414e-05, + "loss": 1.1398, + "step": 713 + }, + { + "epoch": 0.11, + "grad_norm": 21.904704015913204, + "learning_rate": 1.9997331221806152e-05, + "loss": 0.9441, + "step": 714 + }, + { + "epoch": 0.11, + "grad_norm": 19.297954312924443, + "learning_rate": 1.999729212111464e-05, + "loss": 0.9148, + "step": 715 + }, + { + "epoch": 0.11, + "grad_norm": 21.77771079126166, + "learning_rate": 1.9997252736105985e-05, + "loss": 0.9018, + "step": 716 + }, + { + "epoch": 0.11, + "grad_norm": 21.4628075742039, + "learning_rate": 1.9997213066781312e-05, + "loss": 0.9501, + "step": 717 + }, + { + "epoch": 0.11, + "grad_norm": 25.887196421443424, + "learning_rate": 1.9997173113141747e-05, + "loss": 1.0388, + "step": 718 + }, + { + "epoch": 0.11, + "grad_norm": 20.88366988841301, + "learning_rate": 1.9997132875188427e-05, + "loss": 0.8158, + "step": 719 + }, + { + "epoch": 0.11, + "grad_norm": 37.05347981778112, + "learning_rate": 1.9997092352922495e-05, + "loss": 1.176, + "step": 720 + }, + { + "epoch": 0.11, + "grad_norm": 31.72231868155152, + "learning_rate": 1.999705154634511e-05, + "loss": 1.2076, + "step": 721 + }, + { + "epoch": 0.11, + "grad_norm": 24.056440302541347, + "learning_rate": 1.999701045545742e-05, + "loss": 1.0438, + "step": 722 + }, + { + "epoch": 0.11, + "grad_norm": 28.075445910281495, + "learning_rate": 1.9996969080260604e-05, + "loss": 1.1965, + "step": 723 + }, + { + "epoch": 0.11, + "grad_norm": 26.17635262973931, + "learning_rate": 1.999692742075584e-05, + "loss": 1.0819, + "step": 724 + }, + { + "epoch": 0.11, + "grad_norm": 32.90122458402218, + "learning_rate": 1.99968854769443e-05, + "loss": 0.9422, + "step": 725 + }, + { + "epoch": 0.11, + "grad_norm": 25.10317667173797, + "learning_rate": 1.999684324882719e-05, + "loss": 1.031, + "step": 726 + }, + { + "epoch": 0.11, + "grad_norm": 24.35435118631165, + "learning_rate": 1.9996800736405702e-05, + "loss": 1.0282, + "step": 727 + }, + { + "epoch": 0.11, + "grad_norm": 33.61465806374499, + "learning_rate": 1.9996757939681052e-05, + "loss": 1.1021, + "step": 728 + }, + { + "epoch": 0.11, + "grad_norm": 29.424962678920473, + "learning_rate": 1.9996714858654456e-05, + "loss": 1.095, + "step": 729 + }, + { + "epoch": 0.11, + "grad_norm": 23.8259090862828, + "learning_rate": 1.9996671493327135e-05, + "loss": 0.9476, + "step": 730 + }, + { + "epoch": 0.11, + "grad_norm": 17.119920837066687, + "learning_rate": 1.9996627843700325e-05, + "loss": 0.9104, + "step": 731 + }, + { + "epoch": 0.11, + "grad_norm": 27.554752234049488, + "learning_rate": 1.9996583909775267e-05, + "loss": 1.0307, + "step": 732 + }, + { + "epoch": 0.11, + "grad_norm": 18.391961434436325, + "learning_rate": 1.999653969155321e-05, + "loss": 1.0621, + "step": 733 + }, + { + "epoch": 0.11, + "grad_norm": 44.55982382056265, + "learning_rate": 1.999649518903541e-05, + "loss": 1.0835, + "step": 734 + }, + { + "epoch": 0.11, + "grad_norm": 19.406889639247517, + "learning_rate": 1.9996450402223137e-05, + "loss": 0.9581, + "step": 735 + }, + { + "epoch": 0.11, + "grad_norm": 20.53286648400594, + "learning_rate": 1.9996405331117662e-05, + "loss": 1.0734, + "step": 736 + }, + { + "epoch": 0.12, + "grad_norm": 20.201748348929918, + "learning_rate": 1.999635997572027e-05, + "loss": 1.0315, + "step": 737 + }, + { + "epoch": 0.12, + "grad_norm": 31.566733431409812, + "learning_rate": 1.9996314336032243e-05, + "loss": 1.0203, + "step": 738 + }, + { + "epoch": 0.12, + "grad_norm": 25.12963293665336, + "learning_rate": 1.9996268412054887e-05, + "loss": 0.9966, + "step": 739 + }, + { + "epoch": 0.12, + "grad_norm": 6.609661739748793, + "learning_rate": 1.9996222203789504e-05, + "loss": 0.9146, + "step": 740 + }, + { + "epoch": 0.12, + "grad_norm": 25.767601571223842, + "learning_rate": 1.9996175711237406e-05, + "loss": 1.0671, + "step": 741 + }, + { + "epoch": 0.12, + "grad_norm": 38.12139600385158, + "learning_rate": 1.9996128934399923e-05, + "loss": 0.9169, + "step": 742 + }, + { + "epoch": 0.12, + "grad_norm": 36.19068369218754, + "learning_rate": 1.999608187327838e-05, + "loss": 1.0309, + "step": 743 + }, + { + "epoch": 0.12, + "grad_norm": 38.01410823168409, + "learning_rate": 1.9996034527874117e-05, + "loss": 0.951, + "step": 744 + }, + { + "epoch": 0.12, + "grad_norm": 35.589617285805026, + "learning_rate": 1.9995986898188477e-05, + "loss": 1.0413, + "step": 745 + }, + { + "epoch": 0.12, + "grad_norm": 29.23197348215027, + "learning_rate": 1.9995938984222815e-05, + "loss": 1.0136, + "step": 746 + }, + { + "epoch": 0.12, + "grad_norm": 34.717249418117866, + "learning_rate": 1.99958907859785e-05, + "loss": 1.1298, + "step": 747 + }, + { + "epoch": 0.12, + "grad_norm": 30.097320466640863, + "learning_rate": 1.99958423034569e-05, + "loss": 0.9868, + "step": 748 + }, + { + "epoch": 0.12, + "grad_norm": 30.448825783240018, + "learning_rate": 1.9995793536659388e-05, + "loss": 1.1305, + "step": 749 + }, + { + "epoch": 0.12, + "grad_norm": 25.343293259970014, + "learning_rate": 1.9995744485587356e-05, + "loss": 0.9858, + "step": 750 + }, + { + "epoch": 0.12, + "grad_norm": 22.89076918989473, + "learning_rate": 1.99956951502422e-05, + "loss": 0.9851, + "step": 751 + }, + { + "epoch": 0.12, + "grad_norm": 39.1549199883858, + "learning_rate": 1.999564553062532e-05, + "loss": 1.0863, + "step": 752 + }, + { + "epoch": 0.12, + "grad_norm": 24.20988632682434, + "learning_rate": 1.9995595626738128e-05, + "loss": 0.9575, + "step": 753 + }, + { + "epoch": 0.12, + "grad_norm": 28.49339147910342, + "learning_rate": 1.9995545438582044e-05, + "loss": 0.9938, + "step": 754 + }, + { + "epoch": 0.12, + "grad_norm": 19.020720661897847, + "learning_rate": 1.9995494966158494e-05, + "loss": 0.9864, + "step": 755 + }, + { + "epoch": 0.12, + "grad_norm": 27.414747939478183, + "learning_rate": 1.9995444209468916e-05, + "loss": 1.0082, + "step": 756 + }, + { + "epoch": 0.12, + "grad_norm": 20.755829020368772, + "learning_rate": 1.999539316851475e-05, + "loss": 0.9256, + "step": 757 + }, + { + "epoch": 0.12, + "grad_norm": 24.231491864276187, + "learning_rate": 1.999534184329745e-05, + "loss": 0.9442, + "step": 758 + }, + { + "epoch": 0.12, + "grad_norm": 25.269750093204454, + "learning_rate": 1.9995290233818475e-05, + "loss": 0.9794, + "step": 759 + }, + { + "epoch": 0.12, + "grad_norm": 25.29033883226424, + "learning_rate": 1.9995238340079295e-05, + "loss": 1.0591, + "step": 760 + }, + { + "epoch": 0.12, + "grad_norm": 31.637327954914582, + "learning_rate": 1.9995186162081384e-05, + "loss": 1.1424, + "step": 761 + }, + { + "epoch": 0.12, + "grad_norm": 28.39544905269339, + "learning_rate": 1.9995133699826222e-05, + "loss": 1.1801, + "step": 762 + }, + { + "epoch": 0.12, + "grad_norm": 37.082381837224915, + "learning_rate": 1.999508095331531e-05, + "loss": 1.0525, + "step": 763 + }, + { + "epoch": 0.12, + "grad_norm": 31.00075740331179, + "learning_rate": 1.9995027922550137e-05, + "loss": 0.9518, + "step": 764 + }, + { + "epoch": 0.12, + "grad_norm": 28.91964572175146, + "learning_rate": 1.999497460753222e-05, + "loss": 1.0819, + "step": 765 + }, + { + "epoch": 0.12, + "grad_norm": 15.666236160603669, + "learning_rate": 1.9994921008263072e-05, + "loss": 0.8689, + "step": 766 + }, + { + "epoch": 0.12, + "grad_norm": 16.07462339621955, + "learning_rate": 1.9994867124744216e-05, + "loss": 0.8584, + "step": 767 + }, + { + "epoch": 0.12, + "grad_norm": 29.995975931807806, + "learning_rate": 1.9994812956977183e-05, + "loss": 1.0508, + "step": 768 + }, + { + "epoch": 0.12, + "grad_norm": 15.138358958316012, + "learning_rate": 1.9994758504963522e-05, + "loss": 0.9026, + "step": 769 + }, + { + "epoch": 0.12, + "grad_norm": 41.30043257083839, + "learning_rate": 1.9994703768704773e-05, + "loss": 1.0871, + "step": 770 + }, + { + "epoch": 0.12, + "grad_norm": 20.84981941206687, + "learning_rate": 1.9994648748202493e-05, + "loss": 0.9018, + "step": 771 + }, + { + "epoch": 0.12, + "grad_norm": 19.056270868284336, + "learning_rate": 1.9994593443458252e-05, + "loss": 0.9941, + "step": 772 + }, + { + "epoch": 0.12, + "grad_norm": 18.231511167349787, + "learning_rate": 1.999453785447362e-05, + "loss": 0.9491, + "step": 773 + }, + { + "epoch": 0.12, + "grad_norm": 19.790377846130333, + "learning_rate": 1.999448198125018e-05, + "loss": 0.957, + "step": 774 + }, + { + "epoch": 0.12, + "grad_norm": 32.21334604847917, + "learning_rate": 1.9994425823789517e-05, + "loss": 1.1117, + "step": 775 + }, + { + "epoch": 0.12, + "grad_norm": 40.859264248174526, + "learning_rate": 1.999436938209323e-05, + "loss": 1.0816, + "step": 776 + }, + { + "epoch": 0.12, + "grad_norm": 22.681576869897665, + "learning_rate": 1.9994312656162928e-05, + "loss": 0.9506, + "step": 777 + }, + { + "epoch": 0.12, + "grad_norm": 15.761793379861027, + "learning_rate": 1.9994255646000217e-05, + "loss": 0.9511, + "step": 778 + }, + { + "epoch": 0.12, + "grad_norm": 23.009516921325208, + "learning_rate": 1.999419835160672e-05, + "loss": 1.1356, + "step": 779 + }, + { + "epoch": 0.12, + "grad_norm": 29.058749703471978, + "learning_rate": 1.999414077298407e-05, + "loss": 0.9984, + "step": 780 + }, + { + "epoch": 0.12, + "grad_norm": 12.967857301390442, + "learning_rate": 1.9994082910133903e-05, + "loss": 0.9311, + "step": 781 + }, + { + "epoch": 0.12, + "grad_norm": 25.28353516797878, + "learning_rate": 1.9994024763057865e-05, + "loss": 0.9967, + "step": 782 + }, + { + "epoch": 0.12, + "grad_norm": 28.361055374804256, + "learning_rate": 1.9993966331757607e-05, + "loss": 0.9965, + "step": 783 + }, + { + "epoch": 0.12, + "grad_norm": 17.399942802295453, + "learning_rate": 1.9993907616234796e-05, + "loss": 1.1351, + "step": 784 + }, + { + "epoch": 0.12, + "grad_norm": 27.1752552356069, + "learning_rate": 1.9993848616491097e-05, + "loss": 1.0399, + "step": 785 + }, + { + "epoch": 0.12, + "grad_norm": 19.665572190710133, + "learning_rate": 1.9993789332528193e-05, + "loss": 0.9757, + "step": 786 + }, + { + "epoch": 0.12, + "grad_norm": 17.87341129638875, + "learning_rate": 1.9993729764347763e-05, + "loss": 0.9788, + "step": 787 + }, + { + "epoch": 0.12, + "grad_norm": 13.404118302845683, + "learning_rate": 1.9993669911951504e-05, + "loss": 0.9547, + "step": 788 + }, + { + "epoch": 0.12, + "grad_norm": 23.788661705048366, + "learning_rate": 1.999360977534112e-05, + "loss": 0.9624, + "step": 789 + }, + { + "epoch": 0.12, + "grad_norm": 26.020252454309638, + "learning_rate": 1.999354935451832e-05, + "loss": 1.0428, + "step": 790 + }, + { + "epoch": 0.12, + "grad_norm": 17.643653534481263, + "learning_rate": 1.999348864948482e-05, + "loss": 1.0729, + "step": 791 + }, + { + "epoch": 0.12, + "grad_norm": 29.491574148911653, + "learning_rate": 1.9993427660242356e-05, + "loss": 1.0208, + "step": 792 + }, + { + "epoch": 0.12, + "grad_norm": 21.411371135016022, + "learning_rate": 1.999336638679265e-05, + "loss": 1.0212, + "step": 793 + }, + { + "epoch": 0.12, + "grad_norm": 14.16741036518218, + "learning_rate": 1.999330482913745e-05, + "loss": 0.9471, + "step": 794 + }, + { + "epoch": 0.12, + "grad_norm": 26.060560856406358, + "learning_rate": 1.9993242987278508e-05, + "loss": 1.0415, + "step": 795 + }, + { + "epoch": 0.12, + "grad_norm": 24.240362704137674, + "learning_rate": 1.999318086121758e-05, + "loss": 0.8658, + "step": 796 + }, + { + "epoch": 0.12, + "grad_norm": 33.59682973451131, + "learning_rate": 1.9993118450956434e-05, + "loss": 1.0131, + "step": 797 + }, + { + "epoch": 0.12, + "grad_norm": 24.413119608867582, + "learning_rate": 1.9993055756496845e-05, + "loss": 0.8949, + "step": 798 + }, + { + "epoch": 0.12, + "grad_norm": 22.063337245324533, + "learning_rate": 1.9992992777840596e-05, + "loss": 1.0127, + "step": 799 + }, + { + "epoch": 0.12, + "grad_norm": 32.5013716032295, + "learning_rate": 1.999292951498948e-05, + "loss": 0.9796, + "step": 800 + }, + { + "epoch": 0.13, + "grad_norm": 21.4678596495198, + "learning_rate": 1.9992865967945295e-05, + "loss": 0.8518, + "step": 801 + }, + { + "epoch": 0.13, + "grad_norm": 24.86441435108581, + "learning_rate": 1.9992802136709842e-05, + "loss": 0.9583, + "step": 802 + }, + { + "epoch": 0.13, + "grad_norm": 33.19805786961399, + "learning_rate": 1.999273802128495e-05, + "loss": 1.0568, + "step": 803 + }, + { + "epoch": 0.13, + "grad_norm": 30.22292863501949, + "learning_rate": 1.9992673621672427e-05, + "loss": 1.0313, + "step": 804 + }, + { + "epoch": 0.13, + "grad_norm": 23.81481619456222, + "learning_rate": 1.9992608937874115e-05, + "loss": 1.0087, + "step": 805 + }, + { + "epoch": 0.13, + "grad_norm": 20.22193855322229, + "learning_rate": 1.999254396989185e-05, + "loss": 1.0421, + "step": 806 + }, + { + "epoch": 0.13, + "grad_norm": 27.403442756654155, + "learning_rate": 1.9992478717727478e-05, + "loss": 1.0045, + "step": 807 + }, + { + "epoch": 0.13, + "grad_norm": 28.41463838234398, + "learning_rate": 1.999241318138286e-05, + "loss": 0.9212, + "step": 808 + }, + { + "epoch": 0.13, + "grad_norm": 22.336751760909568, + "learning_rate": 1.9992347360859858e-05, + "loss": 0.9983, + "step": 809 + }, + { + "epoch": 0.13, + "grad_norm": 21.56880793631396, + "learning_rate": 1.9992281256160337e-05, + "loss": 0.9342, + "step": 810 + }, + { + "epoch": 0.13, + "grad_norm": 15.966927639020398, + "learning_rate": 1.9992214867286182e-05, + "loss": 0.8636, + "step": 811 + }, + { + "epoch": 0.13, + "grad_norm": 15.393939609188461, + "learning_rate": 1.9992148194239287e-05, + "loss": 0.9692, + "step": 812 + }, + { + "epoch": 0.13, + "grad_norm": 19.863124719263677, + "learning_rate": 1.999208123702154e-05, + "loss": 1.0059, + "step": 813 + }, + { + "epoch": 0.13, + "grad_norm": 25.308754605893263, + "learning_rate": 1.999201399563485e-05, + "loss": 1.0584, + "step": 814 + }, + { + "epoch": 0.13, + "grad_norm": 19.94069254585308, + "learning_rate": 1.9991946470081124e-05, + "loss": 0.9221, + "step": 815 + }, + { + "epoch": 0.13, + "grad_norm": 26.48242480544358, + "learning_rate": 1.9991878660362285e-05, + "loss": 1.0481, + "step": 816 + }, + { + "epoch": 0.13, + "grad_norm": 28.650056717809818, + "learning_rate": 1.9991810566480264e-05, + "loss": 0.9757, + "step": 817 + }, + { + "epoch": 0.13, + "grad_norm": 20.304980313698515, + "learning_rate": 1.9991742188436992e-05, + "loss": 0.9911, + "step": 818 + }, + { + "epoch": 0.13, + "grad_norm": 26.18954040059897, + "learning_rate": 1.999167352623442e-05, + "loss": 1.0247, + "step": 819 + }, + { + "epoch": 0.13, + "grad_norm": 29.615591504789602, + "learning_rate": 1.99916045798745e-05, + "loss": 0.9229, + "step": 820 + }, + { + "epoch": 0.13, + "grad_norm": 15.042097360141371, + "learning_rate": 1.999153534935919e-05, + "loss": 0.9028, + "step": 821 + }, + { + "epoch": 0.13, + "grad_norm": 19.36765037774426, + "learning_rate": 1.999146583469046e-05, + "loss": 0.9344, + "step": 822 + }, + { + "epoch": 0.13, + "grad_norm": 21.649725520853114, + "learning_rate": 1.9991396035870282e-05, + "loss": 0.9665, + "step": 823 + }, + { + "epoch": 0.13, + "grad_norm": 4.752910489352573, + "learning_rate": 1.999132595290065e-05, + "loss": 1.0209, + "step": 824 + }, + { + "epoch": 0.13, + "grad_norm": 20.927958070642642, + "learning_rate": 1.9991255585783547e-05, + "loss": 1.0627, + "step": 825 + }, + { + "epoch": 0.13, + "grad_norm": 24.393859252958695, + "learning_rate": 1.9991184934520987e-05, + "loss": 0.9001, + "step": 826 + }, + { + "epoch": 0.13, + "grad_norm": 17.021183249807795, + "learning_rate": 1.9991113999114966e-05, + "loss": 0.9242, + "step": 827 + }, + { + "epoch": 0.13, + "grad_norm": 5.814606630902126, + "learning_rate": 1.9991042779567512e-05, + "loss": 1.0353, + "step": 828 + }, + { + "epoch": 0.13, + "grad_norm": 21.908477894047138, + "learning_rate": 1.9990971275880643e-05, + "loss": 0.883, + "step": 829 + }, + { + "epoch": 0.13, + "grad_norm": 24.140590699180752, + "learning_rate": 1.9990899488056396e-05, + "loss": 0.9375, + "step": 830 + }, + { + "epoch": 0.13, + "grad_norm": 21.82090466464421, + "learning_rate": 1.9990827416096813e-05, + "loss": 0.9946, + "step": 831 + }, + { + "epoch": 0.13, + "grad_norm": 20.062615461106372, + "learning_rate": 1.9990755060003945e-05, + "loss": 0.939, + "step": 832 + }, + { + "epoch": 0.13, + "grad_norm": 24.180641766823623, + "learning_rate": 1.9990682419779844e-05, + "loss": 0.9904, + "step": 833 + }, + { + "epoch": 0.13, + "grad_norm": 16.594210069190982, + "learning_rate": 1.9990609495426582e-05, + "loss": 0.8201, + "step": 834 + }, + { + "epoch": 0.13, + "grad_norm": 23.217124454000132, + "learning_rate": 1.9990536286946227e-05, + "loss": 0.9544, + "step": 835 + }, + { + "epoch": 0.13, + "grad_norm": 17.726451528781027, + "learning_rate": 1.9990462794340864e-05, + "loss": 0.9834, + "step": 836 + }, + { + "epoch": 0.13, + "grad_norm": 17.424770998493283, + "learning_rate": 1.9990389017612587e-05, + "loss": 0.9333, + "step": 837 + }, + { + "epoch": 0.13, + "grad_norm": 31.88116710317783, + "learning_rate": 1.999031495676349e-05, + "loss": 1.0023, + "step": 838 + }, + { + "epoch": 0.13, + "grad_norm": 29.989587294002025, + "learning_rate": 1.999024061179568e-05, + "loss": 0.9795, + "step": 839 + }, + { + "epoch": 0.13, + "grad_norm": 19.427036379858826, + "learning_rate": 1.9990165982711266e-05, + "loss": 0.9978, + "step": 840 + }, + { + "epoch": 0.13, + "grad_norm": 18.00849579116745, + "learning_rate": 1.999009106951238e-05, + "loss": 0.932, + "step": 841 + }, + { + "epoch": 0.13, + "grad_norm": 16.269670381901356, + "learning_rate": 1.9990015872201147e-05, + "loss": 1.0195, + "step": 842 + }, + { + "epoch": 0.13, + "grad_norm": 23.72704759716728, + "learning_rate": 1.998994039077971e-05, + "loss": 0.9288, + "step": 843 + }, + { + "epoch": 0.13, + "grad_norm": 21.77555586998277, + "learning_rate": 1.9989864625250206e-05, + "loss": 0.91, + "step": 844 + }, + { + "epoch": 0.13, + "grad_norm": 17.9001177653043, + "learning_rate": 1.99897885756148e-05, + "loss": 0.9871, + "step": 845 + }, + { + "epoch": 0.13, + "grad_norm": 18.365308538926303, + "learning_rate": 1.998971224187565e-05, + "loss": 0.9918, + "step": 846 + }, + { + "epoch": 0.13, + "grad_norm": 6.15001487553259, + "learning_rate": 1.998963562403493e-05, + "loss": 0.9024, + "step": 847 + }, + { + "epoch": 0.13, + "grad_norm": 30.6216208734604, + "learning_rate": 1.9989558722094813e-05, + "loss": 1.1355, + "step": 848 + }, + { + "epoch": 0.13, + "grad_norm": 17.538922120278034, + "learning_rate": 1.998948153605749e-05, + "loss": 0.9668, + "step": 849 + }, + { + "epoch": 0.13, + "grad_norm": 23.142611519432652, + "learning_rate": 1.9989404065925157e-05, + "loss": 0.9824, + "step": 850 + }, + { + "epoch": 0.13, + "grad_norm": 18.998781814548558, + "learning_rate": 1.9989326311700016e-05, + "loss": 0.9729, + "step": 851 + }, + { + "epoch": 0.13, + "grad_norm": 24.815911225495, + "learning_rate": 1.9989248273384278e-05, + "loss": 1.1024, + "step": 852 + }, + { + "epoch": 0.13, + "grad_norm": 22.568448812719364, + "learning_rate": 1.998916995098016e-05, + "loss": 1.0269, + "step": 853 + }, + { + "epoch": 0.13, + "grad_norm": 37.97529794878723, + "learning_rate": 1.9989091344489896e-05, + "loss": 1.1285, + "step": 854 + }, + { + "epoch": 0.13, + "grad_norm": 18.093833307130453, + "learning_rate": 1.9989012453915718e-05, + "loss": 0.9447, + "step": 855 + }, + { + "epoch": 0.13, + "grad_norm": 21.977681582043218, + "learning_rate": 1.998893327925987e-05, + "loss": 1.0056, + "step": 856 + }, + { + "epoch": 0.13, + "grad_norm": 26.978096634156046, + "learning_rate": 1.9988853820524596e-05, + "loss": 0.9743, + "step": 857 + }, + { + "epoch": 0.13, + "grad_norm": 11.30994013082935, + "learning_rate": 1.998877407771217e-05, + "loss": 1.0, + "step": 858 + }, + { + "epoch": 0.13, + "grad_norm": 22.29930427913624, + "learning_rate": 1.9988694050824847e-05, + "loss": 1.0081, + "step": 859 + }, + { + "epoch": 0.13, + "grad_norm": 22.667376292754565, + "learning_rate": 1.998861373986491e-05, + "loss": 0.8904, + "step": 860 + }, + { + "epoch": 0.13, + "grad_norm": 25.86367086933168, + "learning_rate": 1.9988533144834642e-05, + "loss": 1.1446, + "step": 861 + }, + { + "epoch": 0.13, + "grad_norm": 29.355136942558662, + "learning_rate": 1.9988452265736335e-05, + "loss": 0.9395, + "step": 862 + }, + { + "epoch": 0.13, + "grad_norm": 27.30893269829037, + "learning_rate": 1.998837110257229e-05, + "loss": 0.961, + "step": 863 + }, + { + "epoch": 0.13, + "grad_norm": 23.567106449316867, + "learning_rate": 1.9988289655344814e-05, + "loss": 0.9266, + "step": 864 + }, + { + "epoch": 0.14, + "grad_norm": 25.23541728308947, + "learning_rate": 1.998820792405622e-05, + "loss": 0.8713, + "step": 865 + }, + { + "epoch": 0.14, + "grad_norm": 17.765799393213108, + "learning_rate": 1.998812590870884e-05, + "loss": 0.9929, + "step": 866 + }, + { + "epoch": 0.14, + "grad_norm": 27.135648330436652, + "learning_rate": 1.9988043609304995e-05, + "loss": 0.9062, + "step": 867 + }, + { + "epoch": 0.14, + "grad_norm": 29.160797839762104, + "learning_rate": 1.9987961025847037e-05, + "loss": 1.0473, + "step": 868 + }, + { + "epoch": 0.14, + "grad_norm": 28.63223994100243, + "learning_rate": 1.998787815833731e-05, + "loss": 0.9395, + "step": 869 + }, + { + "epoch": 0.14, + "grad_norm": 37.465919881683654, + "learning_rate": 1.9987795006778173e-05, + "loss": 0.9055, + "step": 870 + }, + { + "epoch": 0.14, + "grad_norm": 19.221696657207115, + "learning_rate": 1.9987711571171987e-05, + "loss": 1.0072, + "step": 871 + }, + { + "epoch": 0.14, + "grad_norm": 34.105648468036904, + "learning_rate": 1.9987627851521126e-05, + "loss": 1.0029, + "step": 872 + }, + { + "epoch": 0.14, + "grad_norm": 28.93263817186818, + "learning_rate": 1.998754384782797e-05, + "loss": 1.0123, + "step": 873 + }, + { + "epoch": 0.14, + "grad_norm": 24.908467970607894, + "learning_rate": 1.998745956009491e-05, + "loss": 0.9502, + "step": 874 + }, + { + "epoch": 0.14, + "grad_norm": 13.42165320838223, + "learning_rate": 1.9987374988324345e-05, + "loss": 0.9002, + "step": 875 + }, + { + "epoch": 0.14, + "grad_norm": 20.078146314795195, + "learning_rate": 1.9987290132518677e-05, + "loss": 0.9438, + "step": 876 + }, + { + "epoch": 0.14, + "grad_norm": 33.27848961893949, + "learning_rate": 1.9987204992680322e-05, + "loss": 0.9485, + "step": 877 + }, + { + "epoch": 0.14, + "grad_norm": 17.26684856394545, + "learning_rate": 1.9987119568811693e-05, + "loss": 1.0242, + "step": 878 + }, + { + "epoch": 0.14, + "grad_norm": 20.897137022108495, + "learning_rate": 1.998703386091523e-05, + "loss": 0.868, + "step": 879 + }, + { + "epoch": 0.14, + "grad_norm": 20.083151933156167, + "learning_rate": 1.9986947868993366e-05, + "loss": 0.9719, + "step": 880 + }, + { + "epoch": 0.14, + "grad_norm": 23.88446296027853, + "learning_rate": 1.998686159304855e-05, + "loss": 0.8459, + "step": 881 + }, + { + "epoch": 0.14, + "grad_norm": 22.437631533617076, + "learning_rate": 1.9986775033083225e-05, + "loss": 0.861, + "step": 882 + }, + { + "epoch": 0.14, + "grad_norm": 18.935485884994645, + "learning_rate": 1.9986688189099865e-05, + "loss": 0.9921, + "step": 883 + }, + { + "epoch": 0.14, + "grad_norm": 21.621648396422263, + "learning_rate": 1.9986601061100934e-05, + "loss": 0.9493, + "step": 884 + }, + { + "epoch": 0.14, + "grad_norm": 18.00615028190211, + "learning_rate": 1.9986513649088915e-05, + "loss": 0.9605, + "step": 885 + }, + { + "epoch": 0.14, + "grad_norm": 25.535952044605807, + "learning_rate": 1.9986425953066284e-05, + "loss": 0.9192, + "step": 886 + }, + { + "epoch": 0.14, + "grad_norm": 28.33890047192677, + "learning_rate": 1.9986337973035542e-05, + "loss": 1.1137, + "step": 887 + }, + { + "epoch": 0.14, + "grad_norm": 28.633470350257046, + "learning_rate": 1.9986249708999194e-05, + "loss": 1.0084, + "step": 888 + }, + { + "epoch": 0.14, + "grad_norm": 30.547819182250514, + "learning_rate": 1.998616116095974e-05, + "loss": 0.8784, + "step": 889 + }, + { + "epoch": 0.14, + "grad_norm": 20.728594514284588, + "learning_rate": 1.9986072328919707e-05, + "loss": 0.929, + "step": 890 + }, + { + "epoch": 0.14, + "grad_norm": 20.206547964184086, + "learning_rate": 1.9985983212881618e-05, + "loss": 1.0093, + "step": 891 + }, + { + "epoch": 0.14, + "grad_norm": 27.868262481702807, + "learning_rate": 1.9985893812848007e-05, + "loss": 0.9966, + "step": 892 + }, + { + "epoch": 0.14, + "grad_norm": 18.120365907755325, + "learning_rate": 1.998580412882142e-05, + "loss": 0.9585, + "step": 893 + }, + { + "epoch": 0.14, + "grad_norm": 23.40060378094494, + "learning_rate": 1.9985714160804406e-05, + "loss": 0.951, + "step": 894 + }, + { + "epoch": 0.14, + "grad_norm": 63.25595737001749, + "learning_rate": 1.9985623908799518e-05, + "loss": 1.0075, + "step": 895 + }, + { + "epoch": 0.14, + "grad_norm": 21.184525813103157, + "learning_rate": 1.9985533372809335e-05, + "loss": 0.9598, + "step": 896 + }, + { + "epoch": 0.14, + "grad_norm": 25.37430623943003, + "learning_rate": 1.9985442552836417e-05, + "loss": 1.0131, + "step": 897 + }, + { + "epoch": 0.14, + "grad_norm": 15.614081869721268, + "learning_rate": 1.9985351448883358e-05, + "loss": 0.8407, + "step": 898 + }, + { + "epoch": 0.14, + "grad_norm": 40.66036947009204, + "learning_rate": 1.9985260060952742e-05, + "loss": 1.1067, + "step": 899 + }, + { + "epoch": 0.14, + "grad_norm": 23.771311306654475, + "learning_rate": 1.9985168389047175e-05, + "loss": 0.9295, + "step": 900 + }, + { + "epoch": 0.14, + "grad_norm": 19.97576504675759, + "learning_rate": 1.9985076433169258e-05, + "loss": 0.9276, + "step": 901 + }, + { + "epoch": 0.14, + "grad_norm": 33.185904152894416, + "learning_rate": 1.9984984193321608e-05, + "loss": 0.9348, + "step": 902 + }, + { + "epoch": 0.14, + "grad_norm": 25.139107118380018, + "learning_rate": 1.9984891669506847e-05, + "loss": 1.0378, + "step": 903 + }, + { + "epoch": 0.14, + "grad_norm": 29.13446842949091, + "learning_rate": 1.9984798861727614e-05, + "loss": 1.1061, + "step": 904 + }, + { + "epoch": 0.14, + "grad_norm": 33.56381521010012, + "learning_rate": 1.9984705769986538e-05, + "loss": 0.9512, + "step": 905 + }, + { + "epoch": 0.14, + "grad_norm": 18.831056176741296, + "learning_rate": 1.998461239428627e-05, + "loss": 0.9442, + "step": 906 + }, + { + "epoch": 0.14, + "grad_norm": 25.284614438729086, + "learning_rate": 1.9984518734629467e-05, + "loss": 1.0857, + "step": 907 + }, + { + "epoch": 0.14, + "grad_norm": 4.470259374024849, + "learning_rate": 1.998442479101879e-05, + "loss": 0.8212, + "step": 908 + }, + { + "epoch": 0.14, + "grad_norm": 18.079067228749622, + "learning_rate": 1.9984330563456918e-05, + "loss": 1.0071, + "step": 909 + }, + { + "epoch": 0.14, + "grad_norm": 18.548201359288534, + "learning_rate": 1.998423605194652e-05, + "loss": 0.9289, + "step": 910 + }, + { + "epoch": 0.14, + "grad_norm": 22.867643656089143, + "learning_rate": 1.9984141256490294e-05, + "loss": 0.8817, + "step": 911 + }, + { + "epoch": 0.14, + "grad_norm": 25.75903194116444, + "learning_rate": 1.9984046177090926e-05, + "loss": 0.9227, + "step": 912 + }, + { + "epoch": 0.14, + "grad_norm": 20.776527642640172, + "learning_rate": 1.998395081375113e-05, + "loss": 0.9352, + "step": 913 + }, + { + "epoch": 0.14, + "grad_norm": 17.641440914128953, + "learning_rate": 1.9983855166473613e-05, + "loss": 0.9707, + "step": 914 + }, + { + "epoch": 0.14, + "grad_norm": 16.1738760286084, + "learning_rate": 1.998375923526109e-05, + "loss": 0.9254, + "step": 915 + }, + { + "epoch": 0.14, + "grad_norm": 25.244305857370346, + "learning_rate": 1.99836630201163e-05, + "loss": 0.9369, + "step": 916 + }, + { + "epoch": 0.14, + "grad_norm": 19.632949080259692, + "learning_rate": 1.9983566521041973e-05, + "loss": 1.0031, + "step": 917 + }, + { + "epoch": 0.14, + "grad_norm": 26.584330628760394, + "learning_rate": 1.9983469738040852e-05, + "loss": 0.9407, + "step": 918 + }, + { + "epoch": 0.14, + "grad_norm": 26.7209982779378, + "learning_rate": 1.9983372671115696e-05, + "loss": 0.8765, + "step": 919 + }, + { + "epoch": 0.14, + "grad_norm": 20.68512370905215, + "learning_rate": 1.998327532026926e-05, + "loss": 0.9519, + "step": 920 + }, + { + "epoch": 0.14, + "grad_norm": 20.818700823930502, + "learning_rate": 1.9983177685504312e-05, + "loss": 1.0332, + "step": 921 + }, + { + "epoch": 0.14, + "grad_norm": 22.28754939570171, + "learning_rate": 1.998307976682363e-05, + "loss": 0.9153, + "step": 922 + }, + { + "epoch": 0.14, + "grad_norm": 26.84776960948699, + "learning_rate": 1.998298156423e-05, + "loss": 0.9509, + "step": 923 + }, + { + "epoch": 0.14, + "grad_norm": 17.2049697740919, + "learning_rate": 1.9982883077726218e-05, + "loss": 0.9338, + "step": 924 + }, + { + "epoch": 0.14, + "grad_norm": 18.359839754106535, + "learning_rate": 1.998278430731508e-05, + "loss": 0.9355, + "step": 925 + }, + { + "epoch": 0.14, + "grad_norm": 25.97638165661955, + "learning_rate": 1.9982685252999393e-05, + "loss": 0.8382, + "step": 926 + }, + { + "epoch": 0.14, + "grad_norm": 17.32619890080394, + "learning_rate": 1.998258591478198e-05, + "loss": 0.959, + "step": 927 + }, + { + "epoch": 0.14, + "grad_norm": 20.89415149625322, + "learning_rate": 1.998248629266566e-05, + "loss": 0.8957, + "step": 928 + }, + { + "epoch": 0.15, + "grad_norm": 31.943558460292774, + "learning_rate": 1.9982386386653273e-05, + "loss": 0.9798, + "step": 929 + }, + { + "epoch": 0.15, + "grad_norm": 24.017375857712963, + "learning_rate": 1.9982286196747653e-05, + "loss": 0.8992, + "step": 930 + }, + { + "epoch": 0.15, + "grad_norm": 42.161691614895396, + "learning_rate": 1.9982185722951655e-05, + "loss": 1.0017, + "step": 931 + }, + { + "epoch": 0.15, + "grad_norm": 20.98815800644647, + "learning_rate": 1.9982084965268134e-05, + "loss": 0.9948, + "step": 932 + }, + { + "epoch": 0.15, + "grad_norm": 19.64485681263662, + "learning_rate": 1.9981983923699956e-05, + "loss": 0.9194, + "step": 933 + }, + { + "epoch": 0.15, + "grad_norm": 16.91240020274571, + "learning_rate": 1.9981882598249996e-05, + "loss": 0.9707, + "step": 934 + }, + { + "epoch": 0.15, + "grad_norm": 35.70244139970563, + "learning_rate": 1.998178098892113e-05, + "loss": 0.9954, + "step": 935 + }, + { + "epoch": 0.15, + "grad_norm": 19.285258117678723, + "learning_rate": 1.998167909571626e-05, + "loss": 0.9304, + "step": 936 + }, + { + "epoch": 0.15, + "grad_norm": 14.17099899625401, + "learning_rate": 1.998157691863827e-05, + "loss": 0.9262, + "step": 937 + }, + { + "epoch": 0.15, + "grad_norm": 19.25734694190907, + "learning_rate": 1.998147445769007e-05, + "loss": 1.0516, + "step": 938 + }, + { + "epoch": 0.15, + "grad_norm": 22.78016361673385, + "learning_rate": 1.9981371712874576e-05, + "loss": 0.8487, + "step": 939 + }, + { + "epoch": 0.15, + "grad_norm": 25.60884712680659, + "learning_rate": 1.9981268684194712e-05, + "loss": 0.8976, + "step": 940 + }, + { + "epoch": 0.15, + "grad_norm": 20.543851541489303, + "learning_rate": 1.9981165371653404e-05, + "loss": 1.0043, + "step": 941 + }, + { + "epoch": 0.15, + "grad_norm": 23.880149045154408, + "learning_rate": 1.998106177525359e-05, + "loss": 0.9128, + "step": 942 + }, + { + "epoch": 0.15, + "grad_norm": 22.143342445306903, + "learning_rate": 1.998095789499822e-05, + "loss": 1.0114, + "step": 943 + }, + { + "epoch": 0.15, + "grad_norm": 19.71753376324862, + "learning_rate": 1.9980853730890248e-05, + "loss": 0.9462, + "step": 944 + }, + { + "epoch": 0.15, + "grad_norm": 15.723056235586299, + "learning_rate": 1.998074928293263e-05, + "loss": 0.853, + "step": 945 + }, + { + "epoch": 0.15, + "grad_norm": 15.305508550183559, + "learning_rate": 1.9980644551128347e-05, + "loss": 0.947, + "step": 946 + }, + { + "epoch": 0.15, + "grad_norm": 26.7884693475016, + "learning_rate": 1.9980539535480365e-05, + "loss": 0.9528, + "step": 947 + }, + { + "epoch": 0.15, + "grad_norm": 17.684553332254552, + "learning_rate": 1.998043423599168e-05, + "loss": 0.9229, + "step": 948 + }, + { + "epoch": 0.15, + "grad_norm": 19.45037526225891, + "learning_rate": 1.9980328652665285e-05, + "loss": 0.93, + "step": 949 + }, + { + "epoch": 0.15, + "grad_norm": 22.342525647370522, + "learning_rate": 1.9980222785504178e-05, + "loss": 0.8307, + "step": 950 + }, + { + "epoch": 0.15, + "grad_norm": 15.949869235113345, + "learning_rate": 1.9980116634511377e-05, + "loss": 0.9576, + "step": 951 + }, + { + "epoch": 0.15, + "grad_norm": 25.881216256166415, + "learning_rate": 1.9980010199689893e-05, + "loss": 0.9439, + "step": 952 + }, + { + "epoch": 0.15, + "grad_norm": 21.92329329418154, + "learning_rate": 1.997990348104276e-05, + "loss": 1.0542, + "step": 953 + }, + { + "epoch": 0.15, + "grad_norm": 15.721333365378953, + "learning_rate": 1.997979647857301e-05, + "loss": 0.9466, + "step": 954 + }, + { + "epoch": 0.15, + "grad_norm": 19.140817454402058, + "learning_rate": 1.9979689192283683e-05, + "loss": 1.0371, + "step": 955 + }, + { + "epoch": 0.15, + "grad_norm": 27.79666640560971, + "learning_rate": 1.997958162217784e-05, + "loss": 0.9012, + "step": 956 + }, + { + "epoch": 0.15, + "grad_norm": 20.34264524598088, + "learning_rate": 1.9979473768258528e-05, + "loss": 0.9864, + "step": 957 + }, + { + "epoch": 0.15, + "grad_norm": 13.877202936832614, + "learning_rate": 1.9979365630528824e-05, + "loss": 0.8326, + "step": 958 + }, + { + "epoch": 0.15, + "grad_norm": 18.292802280839137, + "learning_rate": 1.9979257208991796e-05, + "loss": 1.0353, + "step": 959 + }, + { + "epoch": 0.15, + "grad_norm": 21.998349341868792, + "learning_rate": 1.9979148503650528e-05, + "loss": 0.9579, + "step": 960 + }, + { + "epoch": 0.15, + "grad_norm": 21.46610453602181, + "learning_rate": 1.997903951450812e-05, + "loss": 0.8989, + "step": 961 + }, + { + "epoch": 0.15, + "grad_norm": 19.396557091882116, + "learning_rate": 1.9978930241567662e-05, + "loss": 1.072, + "step": 962 + }, + { + "epoch": 0.15, + "grad_norm": 22.311744914247736, + "learning_rate": 1.9978820684832268e-05, + "loss": 0.8628, + "step": 963 + }, + { + "epoch": 0.15, + "grad_norm": 21.23925123443074, + "learning_rate": 1.997871084430505e-05, + "loss": 0.9007, + "step": 964 + }, + { + "epoch": 0.15, + "grad_norm": 15.155163351411675, + "learning_rate": 1.997860071998913e-05, + "loss": 0.8953, + "step": 965 + }, + { + "epoch": 0.15, + "grad_norm": 27.784837092670617, + "learning_rate": 1.997849031188765e-05, + "loss": 0.9702, + "step": 966 + }, + { + "epoch": 0.15, + "grad_norm": 22.56300106113026, + "learning_rate": 1.9978379620003737e-05, + "loss": 0.9037, + "step": 967 + }, + { + "epoch": 0.15, + "grad_norm": 30.88380557334971, + "learning_rate": 1.9978268644340545e-05, + "loss": 0.9354, + "step": 968 + }, + { + "epoch": 0.15, + "grad_norm": 24.44037004968756, + "learning_rate": 1.997815738490123e-05, + "loss": 0.9041, + "step": 969 + }, + { + "epoch": 0.15, + "grad_norm": 15.061041783148795, + "learning_rate": 1.997804584168896e-05, + "loss": 0.8689, + "step": 970 + }, + { + "epoch": 0.15, + "grad_norm": 23.00842304682578, + "learning_rate": 1.9977934014706898e-05, + "loss": 1.0025, + "step": 971 + }, + { + "epoch": 0.15, + "grad_norm": 22.164024982279454, + "learning_rate": 1.997782190395823e-05, + "loss": 1.036, + "step": 972 + }, + { + "epoch": 0.15, + "grad_norm": 23.160050762989524, + "learning_rate": 1.9977709509446148e-05, + "loss": 1.0333, + "step": 973 + }, + { + "epoch": 0.15, + "grad_norm": 28.18236742319079, + "learning_rate": 1.9977596831173843e-05, + "loss": 0.9852, + "step": 974 + }, + { + "epoch": 0.15, + "grad_norm": 21.215368577298406, + "learning_rate": 1.9977483869144522e-05, + "loss": 0.8775, + "step": 975 + }, + { + "epoch": 0.15, + "grad_norm": 27.592001121889975, + "learning_rate": 1.9977370623361393e-05, + "loss": 1.0141, + "step": 976 + }, + { + "epoch": 0.15, + "grad_norm": 22.88561976656229, + "learning_rate": 1.997725709382768e-05, + "loss": 0.9635, + "step": 977 + }, + { + "epoch": 0.15, + "grad_norm": 24.50692665461813, + "learning_rate": 1.9977143280546616e-05, + "loss": 1.0061, + "step": 978 + }, + { + "epoch": 0.15, + "grad_norm": 25.888740135974892, + "learning_rate": 1.997702918352143e-05, + "loss": 1.0239, + "step": 979 + }, + { + "epoch": 0.15, + "grad_norm": 26.65698507822647, + "learning_rate": 1.997691480275537e-05, + "loss": 0.8671, + "step": 980 + }, + { + "epoch": 0.15, + "grad_norm": 24.87193977279463, + "learning_rate": 1.997680013825169e-05, + "loss": 0.8638, + "step": 981 + }, + { + "epoch": 0.15, + "grad_norm": 20.821167913617344, + "learning_rate": 1.997668519001365e-05, + "loss": 0.9667, + "step": 982 + }, + { + "epoch": 0.15, + "grad_norm": 29.648484524776272, + "learning_rate": 1.9976569958044523e-05, + "loss": 0.985, + "step": 983 + }, + { + "epoch": 0.15, + "grad_norm": 19.75908343510108, + "learning_rate": 1.997645444234758e-05, + "loss": 0.9432, + "step": 984 + }, + { + "epoch": 0.15, + "grad_norm": 22.14695877945067, + "learning_rate": 1.997633864292611e-05, + "loss": 0.9516, + "step": 985 + }, + { + "epoch": 0.15, + "grad_norm": 22.29064336987488, + "learning_rate": 1.99762225597834e-05, + "loss": 0.9803, + "step": 986 + }, + { + "epoch": 0.15, + "grad_norm": 17.519886680918606, + "learning_rate": 1.997610619292276e-05, + "loss": 1.0261, + "step": 987 + }, + { + "epoch": 0.15, + "grad_norm": 26.018074294943958, + "learning_rate": 1.9975989542347495e-05, + "loss": 1.0793, + "step": 988 + }, + { + "epoch": 0.15, + "grad_norm": 19.608446275899777, + "learning_rate": 1.9975872608060927e-05, + "loss": 0.8461, + "step": 989 + }, + { + "epoch": 0.15, + "grad_norm": 20.69574199675815, + "learning_rate": 1.9975755390066373e-05, + "loss": 1.0362, + "step": 990 + }, + { + "epoch": 0.15, + "grad_norm": 18.82663372196337, + "learning_rate": 1.9975637888367173e-05, + "loss": 1.0071, + "step": 991 + }, + { + "epoch": 0.15, + "grad_norm": 20.896183397888986, + "learning_rate": 1.9975520102966667e-05, + "loss": 1.0048, + "step": 992 + }, + { + "epoch": 0.16, + "grad_norm": 19.794927310425322, + "learning_rate": 1.9975402033868207e-05, + "loss": 0.9567, + "step": 993 + }, + { + "epoch": 0.16, + "grad_norm": 27.72274353370509, + "learning_rate": 1.9975283681075142e-05, + "loss": 0.9253, + "step": 994 + }, + { + "epoch": 0.16, + "grad_norm": 25.81541402234388, + "learning_rate": 1.9975165044590853e-05, + "loss": 0.9425, + "step": 995 + }, + { + "epoch": 0.16, + "grad_norm": 24.675916738713358, + "learning_rate": 1.99750461244187e-05, + "loss": 1.0206, + "step": 996 + }, + { + "epoch": 0.16, + "grad_norm": 25.905428859595194, + "learning_rate": 1.997492692056207e-05, + "loss": 0.9438, + "step": 997 + }, + { + "epoch": 0.16, + "grad_norm": 28.522451249444654, + "learning_rate": 1.9974807433024357e-05, + "loss": 0.9945, + "step": 998 + }, + { + "epoch": 0.16, + "grad_norm": 19.718876927446388, + "learning_rate": 1.9974687661808954e-05, + "loss": 0.9148, + "step": 999 + }, + { + "epoch": 0.16, + "grad_norm": 22.091939756214508, + "learning_rate": 1.997456760691927e-05, + "loss": 0.9874, + "step": 1000 + }, + { + "epoch": 0.16, + "grad_norm": 23.11514146058491, + "learning_rate": 1.9974447268358715e-05, + "loss": 0.9015, + "step": 1001 + }, + { + "epoch": 0.16, + "grad_norm": 17.804668716079288, + "learning_rate": 1.9974326646130716e-05, + "loss": 0.8472, + "step": 1002 + }, + { + "epoch": 0.16, + "grad_norm": 16.656475030728036, + "learning_rate": 1.9974205740238704e-05, + "loss": 0.8391, + "step": 1003 + }, + { + "epoch": 0.16, + "grad_norm": 21.558633175644697, + "learning_rate": 1.9974084550686116e-05, + "loss": 0.9494, + "step": 1004 + }, + { + "epoch": 0.16, + "grad_norm": 27.818598239427306, + "learning_rate": 1.9973963077476394e-05, + "loss": 1.0851, + "step": 1005 + }, + { + "epoch": 0.16, + "grad_norm": 14.655754652375885, + "learning_rate": 1.9973841320613e-05, + "loss": 0.9687, + "step": 1006 + }, + { + "epoch": 0.16, + "grad_norm": 24.40657534730464, + "learning_rate": 1.997371928009939e-05, + "loss": 1.0059, + "step": 1007 + }, + { + "epoch": 0.16, + "grad_norm": 18.40173965431018, + "learning_rate": 1.9973596955939043e-05, + "loss": 1.0217, + "step": 1008 + }, + { + "epoch": 0.16, + "grad_norm": 24.04782350799938, + "learning_rate": 1.997347434813543e-05, + "loss": 0.9063, + "step": 1009 + }, + { + "epoch": 0.16, + "grad_norm": 25.56124047007055, + "learning_rate": 1.997335145669204e-05, + "loss": 0.8219, + "step": 1010 + }, + { + "epoch": 0.16, + "grad_norm": 19.831400271602874, + "learning_rate": 1.997322828161237e-05, + "loss": 1.0246, + "step": 1011 + }, + { + "epoch": 0.16, + "grad_norm": 17.31427635365123, + "learning_rate": 1.9973104822899923e-05, + "loss": 0.9491, + "step": 1012 + }, + { + "epoch": 0.16, + "grad_norm": 19.734833288421257, + "learning_rate": 1.9972981080558208e-05, + "loss": 1.1044, + "step": 1013 + }, + { + "epoch": 0.16, + "grad_norm": 59.49551648289499, + "learning_rate": 1.9972857054590744e-05, + "loss": 1.0689, + "step": 1014 + }, + { + "epoch": 0.16, + "grad_norm": 14.384798855558653, + "learning_rate": 1.997273274500106e-05, + "loss": 0.8931, + "step": 1015 + }, + { + "epoch": 0.16, + "grad_norm": 25.769802069531156, + "learning_rate": 1.997260815179269e-05, + "loss": 1.1256, + "step": 1016 + }, + { + "epoch": 0.16, + "grad_norm": 22.464246187071343, + "learning_rate": 1.997248327496918e-05, + "loss": 1.0696, + "step": 1017 + }, + { + "epoch": 0.16, + "grad_norm": 25.956797254439103, + "learning_rate": 1.9972358114534074e-05, + "loss": 0.9252, + "step": 1018 + }, + { + "epoch": 0.16, + "grad_norm": 25.760436841003276, + "learning_rate": 1.9972232670490945e-05, + "loss": 0.8813, + "step": 1019 + }, + { + "epoch": 0.16, + "grad_norm": 18.583151045220244, + "learning_rate": 1.997210694284335e-05, + "loss": 0.8999, + "step": 1020 + }, + { + "epoch": 0.16, + "grad_norm": 24.44323274196326, + "learning_rate": 1.9971980931594865e-05, + "loss": 0.9655, + "step": 1021 + }, + { + "epoch": 0.16, + "grad_norm": 16.494483652381803, + "learning_rate": 1.9971854636749075e-05, + "loss": 0.9329, + "step": 1022 + }, + { + "epoch": 0.16, + "grad_norm": 21.267103554759874, + "learning_rate": 1.9971728058309572e-05, + "loss": 0.987, + "step": 1023 + }, + { + "epoch": 0.16, + "grad_norm": 22.118992572482618, + "learning_rate": 1.9971601196279963e-05, + "loss": 0.9983, + "step": 1024 + }, + { + "epoch": 0.16, + "grad_norm": 19.842380598831724, + "learning_rate": 1.997147405066384e-05, + "loss": 0.9695, + "step": 1025 + }, + { + "epoch": 0.16, + "grad_norm": 14.65298669391427, + "learning_rate": 1.9971346621464833e-05, + "loss": 0.9535, + "step": 1026 + }, + { + "epoch": 0.16, + "grad_norm": 27.512256742170667, + "learning_rate": 1.9971218908686563e-05, + "loss": 0.9578, + "step": 1027 + }, + { + "epoch": 0.16, + "grad_norm": 24.574149753059544, + "learning_rate": 1.9971090912332657e-05, + "loss": 0.9132, + "step": 1028 + }, + { + "epoch": 0.16, + "grad_norm": 29.33446986848635, + "learning_rate": 1.9970962632406757e-05, + "loss": 1.0906, + "step": 1029 + }, + { + "epoch": 0.16, + "grad_norm": 20.516292745642946, + "learning_rate": 1.9970834068912515e-05, + "loss": 0.9737, + "step": 1030 + }, + { + "epoch": 0.16, + "grad_norm": 20.62443621688516, + "learning_rate": 1.9970705221853584e-05, + "loss": 0.9541, + "step": 1031 + }, + { + "epoch": 0.16, + "grad_norm": 22.22644135185706, + "learning_rate": 1.9970576091233633e-05, + "loss": 0.9547, + "step": 1032 + }, + { + "epoch": 0.16, + "grad_norm": 18.726208484949435, + "learning_rate": 1.9970446677056325e-05, + "loss": 0.8472, + "step": 1033 + }, + { + "epoch": 0.16, + "grad_norm": 16.621540227646772, + "learning_rate": 1.997031697932535e-05, + "loss": 0.9316, + "step": 1034 + }, + { + "epoch": 0.16, + "grad_norm": 21.326850876810315, + "learning_rate": 1.9970186998044392e-05, + "loss": 0.8996, + "step": 1035 + }, + { + "epoch": 0.16, + "grad_norm": 18.65304831806845, + "learning_rate": 1.9970056733217147e-05, + "loss": 0.9552, + "step": 1036 + }, + { + "epoch": 0.16, + "grad_norm": 18.16398263318951, + "learning_rate": 1.996992618484732e-05, + "loss": 0.8852, + "step": 1037 + }, + { + "epoch": 0.16, + "grad_norm": 16.376545159769574, + "learning_rate": 1.9969795352938625e-05, + "loss": 0.9859, + "step": 1038 + }, + { + "epoch": 0.16, + "grad_norm": 22.187092733003354, + "learning_rate": 1.9969664237494785e-05, + "loss": 1.0501, + "step": 1039 + }, + { + "epoch": 0.16, + "grad_norm": 17.335407337507426, + "learning_rate": 1.9969532838519524e-05, + "loss": 0.865, + "step": 1040 + }, + { + "epoch": 0.16, + "grad_norm": 21.160217237182188, + "learning_rate": 1.996940115601658e-05, + "loss": 1.1142, + "step": 1041 + }, + { + "epoch": 0.16, + "grad_norm": 18.337951351272025, + "learning_rate": 1.99692691899897e-05, + "loss": 1.0136, + "step": 1042 + }, + { + "epoch": 0.16, + "grad_norm": 27.48985530503833, + "learning_rate": 1.9969136940442636e-05, + "loss": 0.9256, + "step": 1043 + }, + { + "epoch": 0.16, + "grad_norm": 4.359953718842061, + "learning_rate": 1.996900440737915e-05, + "loss": 0.9117, + "step": 1044 + }, + { + "epoch": 0.16, + "grad_norm": 16.059047708332876, + "learning_rate": 1.996887159080301e-05, + "loss": 0.9515, + "step": 1045 + }, + { + "epoch": 0.16, + "grad_norm": 14.98626266797299, + "learning_rate": 1.9968738490717992e-05, + "loss": 0.9373, + "step": 1046 + }, + { + "epoch": 0.16, + "grad_norm": 23.413081707680554, + "learning_rate": 1.9968605107127886e-05, + "loss": 0.9226, + "step": 1047 + }, + { + "epoch": 0.16, + "grad_norm": 26.178540115576087, + "learning_rate": 1.996847144003648e-05, + "loss": 0.897, + "step": 1048 + }, + { + "epoch": 0.16, + "grad_norm": 19.96063426285729, + "learning_rate": 1.9968337489447578e-05, + "loss": 0.9423, + "step": 1049 + }, + { + "epoch": 0.16, + "grad_norm": 22.224057065412012, + "learning_rate": 1.996820325536499e-05, + "loss": 0.8617, + "step": 1050 + }, + { + "epoch": 0.16, + "grad_norm": 15.948927645674216, + "learning_rate": 1.996806873779253e-05, + "loss": 0.9121, + "step": 1051 + }, + { + "epoch": 0.16, + "grad_norm": 26.049640858786617, + "learning_rate": 1.9967933936734027e-05, + "loss": 0.9288, + "step": 1052 + }, + { + "epoch": 0.16, + "grad_norm": 21.044905889730735, + "learning_rate": 1.996779885219332e-05, + "loss": 0.8825, + "step": 1053 + }, + { + "epoch": 0.16, + "grad_norm": 30.567094353191365, + "learning_rate": 1.9967663484174235e-05, + "loss": 0.8961, + "step": 1054 + }, + { + "epoch": 0.16, + "grad_norm": 19.90970061560877, + "learning_rate": 1.9967527832680637e-05, + "loss": 0.9373, + "step": 1055 + }, + { + "epoch": 0.16, + "grad_norm": 26.129488848548302, + "learning_rate": 1.996739189771638e-05, + "loss": 0.9245, + "step": 1056 + }, + { + "epoch": 0.17, + "grad_norm": 25.387900679932635, + "learning_rate": 1.9967255679285324e-05, + "loss": 0.9912, + "step": 1057 + }, + { + "epoch": 0.17, + "grad_norm": 7.449547174792245, + "learning_rate": 1.9967119177391348e-05, + "loss": 0.9457, + "step": 1058 + }, + { + "epoch": 0.17, + "grad_norm": 20.37084016139191, + "learning_rate": 1.9966982392038335e-05, + "loss": 1.0184, + "step": 1059 + }, + { + "epoch": 0.17, + "grad_norm": 77.16469358476006, + "learning_rate": 1.996684532323017e-05, + "loss": 0.9994, + "step": 1060 + }, + { + "epoch": 0.17, + "grad_norm": 16.957716178638062, + "learning_rate": 1.9966707970970756e-05, + "loss": 0.9131, + "step": 1061 + }, + { + "epoch": 0.17, + "grad_norm": 21.762517296446227, + "learning_rate": 1.9966570335264e-05, + "loss": 0.8824, + "step": 1062 + }, + { + "epoch": 0.17, + "grad_norm": 20.06801353583546, + "learning_rate": 1.9966432416113812e-05, + "loss": 0.8856, + "step": 1063 + }, + { + "epoch": 0.17, + "grad_norm": 19.171373317220162, + "learning_rate": 1.9966294213524117e-05, + "loss": 0.8615, + "step": 1064 + }, + { + "epoch": 0.17, + "grad_norm": 21.024850750346925, + "learning_rate": 1.9966155727498843e-05, + "loss": 0.9013, + "step": 1065 + }, + { + "epoch": 0.17, + "grad_norm": 22.164824136200476, + "learning_rate": 1.9966016958041933e-05, + "loss": 1.0358, + "step": 1066 + }, + { + "epoch": 0.17, + "grad_norm": 41.681919746720965, + "learning_rate": 1.996587790515733e-05, + "loss": 0.8274, + "step": 1067 + }, + { + "epoch": 0.17, + "grad_norm": 15.945059858878466, + "learning_rate": 1.9965738568848986e-05, + "loss": 0.9953, + "step": 1068 + }, + { + "epoch": 0.17, + "grad_norm": 22.953576014056228, + "learning_rate": 1.996559894912087e-05, + "loss": 1.0221, + "step": 1069 + }, + { + "epoch": 0.17, + "grad_norm": 17.972018525935646, + "learning_rate": 1.996545904597695e-05, + "loss": 0.9417, + "step": 1070 + }, + { + "epoch": 0.17, + "grad_norm": 15.388072707490695, + "learning_rate": 1.9965318859421203e-05, + "loss": 0.9903, + "step": 1071 + }, + { + "epoch": 0.17, + "grad_norm": 23.47781751645356, + "learning_rate": 1.9965178389457617e-05, + "loss": 0.8525, + "step": 1072 + }, + { + "epoch": 0.17, + "grad_norm": 20.860240417558686, + "learning_rate": 1.9965037636090187e-05, + "loss": 0.9391, + "step": 1073 + }, + { + "epoch": 0.17, + "grad_norm": 26.294374092027503, + "learning_rate": 1.9964896599322917e-05, + "loss": 0.9244, + "step": 1074 + }, + { + "epoch": 0.17, + "grad_norm": 19.660765057313604, + "learning_rate": 1.9964755279159816e-05, + "loss": 0.8199, + "step": 1075 + }, + { + "epoch": 0.17, + "grad_norm": 31.299620579344733, + "learning_rate": 1.9964613675604904e-05, + "loss": 0.9738, + "step": 1076 + }, + { + "epoch": 0.17, + "grad_norm": 22.760483900897643, + "learning_rate": 1.996447178866221e-05, + "loss": 0.9592, + "step": 1077 + }, + { + "epoch": 0.17, + "grad_norm": 14.650392983694106, + "learning_rate": 1.9964329618335766e-05, + "loss": 0.9399, + "step": 1078 + }, + { + "epoch": 0.17, + "grad_norm": 28.959922380667738, + "learning_rate": 1.9964187164629617e-05, + "loss": 0.976, + "step": 1079 + }, + { + "epoch": 0.17, + "grad_norm": 25.05245508419107, + "learning_rate": 1.996404442754781e-05, + "loss": 0.9548, + "step": 1080 + }, + { + "epoch": 0.17, + "grad_norm": 26.07033700544596, + "learning_rate": 1.9963901407094415e-05, + "loss": 0.9631, + "step": 1081 + }, + { + "epoch": 0.17, + "grad_norm": 23.19212974742126, + "learning_rate": 1.996375810327349e-05, + "loss": 1.0318, + "step": 1082 + }, + { + "epoch": 0.17, + "grad_norm": 14.661445227363606, + "learning_rate": 1.9963614516089112e-05, + "loss": 0.9228, + "step": 1083 + }, + { + "epoch": 0.17, + "grad_norm": 21.85009642256209, + "learning_rate": 1.9963470645545365e-05, + "loss": 1.0432, + "step": 1084 + }, + { + "epoch": 0.17, + "grad_norm": 20.32518228904275, + "learning_rate": 1.9963326491646346e-05, + "loss": 0.9614, + "step": 1085 + }, + { + "epoch": 0.17, + "grad_norm": 20.71690331977722, + "learning_rate": 1.9963182054396144e-05, + "loss": 0.8802, + "step": 1086 + }, + { + "epoch": 0.17, + "grad_norm": 30.1355175023656, + "learning_rate": 1.9963037333798876e-05, + "loss": 1.0874, + "step": 1087 + }, + { + "epoch": 0.17, + "grad_norm": 15.292365728501006, + "learning_rate": 1.9962892329858655e-05, + "loss": 0.876, + "step": 1088 + }, + { + "epoch": 0.17, + "grad_norm": 21.214769426839712, + "learning_rate": 1.99627470425796e-05, + "loss": 0.8733, + "step": 1089 + }, + { + "epoch": 0.17, + "grad_norm": 17.379554352322735, + "learning_rate": 1.9962601471965854e-05, + "loss": 0.9827, + "step": 1090 + }, + { + "epoch": 0.17, + "grad_norm": 29.110130115979604, + "learning_rate": 1.9962455618021545e-05, + "loss": 0.8946, + "step": 1091 + }, + { + "epoch": 0.17, + "grad_norm": 19.795895266959587, + "learning_rate": 1.9962309480750827e-05, + "loss": 1.029, + "step": 1092 + }, + { + "epoch": 0.17, + "grad_norm": 21.176858967088037, + "learning_rate": 1.9962163060157854e-05, + "loss": 1.0959, + "step": 1093 + }, + { + "epoch": 0.17, + "grad_norm": 36.20100217748054, + "learning_rate": 1.9962016356246794e-05, + "loss": 0.9817, + "step": 1094 + }, + { + "epoch": 0.17, + "grad_norm": 17.19508124130565, + "learning_rate": 1.9961869369021814e-05, + "loss": 0.8772, + "step": 1095 + }, + { + "epoch": 0.17, + "grad_norm": 23.21362086243986, + "learning_rate": 1.9961722098487098e-05, + "loss": 0.9472, + "step": 1096 + }, + { + "epoch": 0.17, + "grad_norm": 19.058492890904425, + "learning_rate": 1.9961574544646835e-05, + "loss": 0.8941, + "step": 1097 + }, + { + "epoch": 0.17, + "grad_norm": 195.05581634123945, + "learning_rate": 1.9961426707505217e-05, + "loss": 0.8981, + "step": 1098 + }, + { + "epoch": 0.17, + "grad_norm": 39.32973305071126, + "learning_rate": 1.996127858706645e-05, + "loss": 1.0174, + "step": 1099 + }, + { + "epoch": 0.17, + "grad_norm": 24.445888327195416, + "learning_rate": 1.996113018333475e-05, + "loss": 0.9646, + "step": 1100 + }, + { + "epoch": 0.17, + "grad_norm": 23.171377947074845, + "learning_rate": 1.996098149631433e-05, + "loss": 0.9686, + "step": 1101 + }, + { + "epoch": 0.17, + "grad_norm": 24.874052487351488, + "learning_rate": 1.9960832526009427e-05, + "loss": 1.0018, + "step": 1102 + }, + { + "epoch": 0.17, + "grad_norm": 20.598162584973974, + "learning_rate": 1.996068327242427e-05, + "loss": 0.9975, + "step": 1103 + }, + { + "epoch": 0.17, + "grad_norm": 21.004610504949117, + "learning_rate": 1.9960533735563113e-05, + "loss": 0.8323, + "step": 1104 + }, + { + "epoch": 0.17, + "grad_norm": 21.582257565653794, + "learning_rate": 1.99603839154302e-05, + "loss": 0.7476, + "step": 1105 + }, + { + "epoch": 0.17, + "grad_norm": 19.541556438261914, + "learning_rate": 1.9960233812029798e-05, + "loss": 0.8683, + "step": 1106 + }, + { + "epoch": 0.17, + "grad_norm": 38.115180145076536, + "learning_rate": 1.996008342536617e-05, + "loss": 0.9944, + "step": 1107 + }, + { + "epoch": 0.17, + "grad_norm": 5.129276843311849, + "learning_rate": 1.9959932755443596e-05, + "loss": 0.8668, + "step": 1108 + }, + { + "epoch": 0.17, + "grad_norm": 16.878704671929537, + "learning_rate": 1.995978180226636e-05, + "loss": 0.9611, + "step": 1109 + }, + { + "epoch": 0.17, + "grad_norm": 30.11822019357628, + "learning_rate": 1.9959630565838758e-05, + "loss": 1.0163, + "step": 1110 + }, + { + "epoch": 0.17, + "grad_norm": 21.82556571920938, + "learning_rate": 1.9959479046165088e-05, + "loss": 1.0279, + "step": 1111 + }, + { + "epoch": 0.17, + "grad_norm": 15.952955102622985, + "learning_rate": 1.9959327243249662e-05, + "loss": 0.8513, + "step": 1112 + }, + { + "epoch": 0.17, + "grad_norm": 17.68419848419876, + "learning_rate": 1.9959175157096794e-05, + "loss": 0.9806, + "step": 1113 + }, + { + "epoch": 0.17, + "grad_norm": 24.483035655291626, + "learning_rate": 1.995902278771081e-05, + "loss": 0.9821, + "step": 1114 + }, + { + "epoch": 0.17, + "grad_norm": 15.279112038316622, + "learning_rate": 1.9958870135096044e-05, + "loss": 0.9228, + "step": 1115 + }, + { + "epoch": 0.17, + "grad_norm": 23.95422021734671, + "learning_rate": 1.9958717199256833e-05, + "loss": 0.9523, + "step": 1116 + }, + { + "epoch": 0.17, + "grad_norm": 13.930983981026705, + "learning_rate": 1.9958563980197536e-05, + "loss": 0.9921, + "step": 1117 + }, + { + "epoch": 0.17, + "grad_norm": 23.897593610216855, + "learning_rate": 1.9958410477922504e-05, + "loss": 0.8384, + "step": 1118 + }, + { + "epoch": 0.17, + "grad_norm": 25.011398549303884, + "learning_rate": 1.9958256692436103e-05, + "loss": 0.9378, + "step": 1119 + }, + { + "epoch": 0.17, + "grad_norm": 30.645943474306943, + "learning_rate": 1.995810262374271e-05, + "loss": 0.9036, + "step": 1120 + }, + { + "epoch": 0.18, + "grad_norm": 14.178524056826843, + "learning_rate": 1.99579482718467e-05, + "loss": 0.7769, + "step": 1121 + }, + { + "epoch": 0.18, + "grad_norm": 23.82688153633873, + "learning_rate": 1.9957793636752466e-05, + "loss": 0.8656, + "step": 1122 + }, + { + "epoch": 0.18, + "grad_norm": 28.657159114881118, + "learning_rate": 1.9957638718464405e-05, + "loss": 1.0352, + "step": 1123 + }, + { + "epoch": 0.18, + "grad_norm": 25.372462772966394, + "learning_rate": 1.995748351698693e-05, + "loss": 1.0062, + "step": 1124 + }, + { + "epoch": 0.18, + "grad_norm": 31.516940010486838, + "learning_rate": 1.995732803232444e-05, + "loss": 0.9846, + "step": 1125 + }, + { + "epoch": 0.18, + "grad_norm": 21.428904983246213, + "learning_rate": 1.995717226448137e-05, + "loss": 1.0084, + "step": 1126 + }, + { + "epoch": 0.18, + "grad_norm": 26.96075121589446, + "learning_rate": 1.995701621346215e-05, + "loss": 0.9589, + "step": 1127 + }, + { + "epoch": 0.18, + "grad_norm": 21.407298769632664, + "learning_rate": 1.9956859879271207e-05, + "loss": 0.891, + "step": 1128 + }, + { + "epoch": 0.18, + "grad_norm": 23.547919443225897, + "learning_rate": 1.9956703261913e-05, + "loss": 0.8514, + "step": 1129 + }, + { + "epoch": 0.18, + "grad_norm": 19.761614780186456, + "learning_rate": 1.995654636139197e-05, + "loss": 0.9503, + "step": 1130 + }, + { + "epoch": 0.18, + "grad_norm": 20.186137096558895, + "learning_rate": 1.9956389177712592e-05, + "loss": 1.0059, + "step": 1131 + }, + { + "epoch": 0.18, + "grad_norm": 20.500806487315653, + "learning_rate": 1.9956231710879325e-05, + "loss": 1.0018, + "step": 1132 + }, + { + "epoch": 0.18, + "grad_norm": 25.397915764093298, + "learning_rate": 1.9956073960896652e-05, + "loss": 0.8797, + "step": 1133 + }, + { + "epoch": 0.18, + "grad_norm": 25.37684573662613, + "learning_rate": 1.9955915927769066e-05, + "loss": 0.9778, + "step": 1134 + }, + { + "epoch": 0.18, + "grad_norm": 20.25531568966331, + "learning_rate": 1.995575761150105e-05, + "loss": 1.0669, + "step": 1135 + }, + { + "epoch": 0.18, + "grad_norm": 13.679965697748836, + "learning_rate": 1.995559901209711e-05, + "loss": 0.896, + "step": 1136 + }, + { + "epoch": 0.18, + "grad_norm": 23.073898200835217, + "learning_rate": 1.995544012956176e-05, + "loss": 0.9498, + "step": 1137 + }, + { + "epoch": 0.18, + "grad_norm": 25.17758573778675, + "learning_rate": 1.995528096389952e-05, + "loss": 0.9395, + "step": 1138 + }, + { + "epoch": 0.18, + "grad_norm": 19.8285652424402, + "learning_rate": 1.995512151511491e-05, + "loss": 0.9703, + "step": 1139 + }, + { + "epoch": 0.18, + "grad_norm": 18.66749442919373, + "learning_rate": 1.995496178321247e-05, + "loss": 1.0551, + "step": 1140 + }, + { + "epoch": 0.18, + "grad_norm": 23.76111052344706, + "learning_rate": 1.9954801768196738e-05, + "loss": 0.8594, + "step": 1141 + }, + { + "epoch": 0.18, + "grad_norm": 16.19758400764559, + "learning_rate": 1.9954641470072267e-05, + "loss": 0.8637, + "step": 1142 + }, + { + "epoch": 0.18, + "grad_norm": 17.223655637944525, + "learning_rate": 1.9954480888843614e-05, + "loss": 1.0662, + "step": 1143 + }, + { + "epoch": 0.18, + "grad_norm": 18.395337053731495, + "learning_rate": 1.995432002451535e-05, + "loss": 0.9162, + "step": 1144 + }, + { + "epoch": 0.18, + "grad_norm": 27.209186781300918, + "learning_rate": 1.995415887709205e-05, + "loss": 0.8881, + "step": 1145 + }, + { + "epoch": 0.18, + "grad_norm": 24.438681956827622, + "learning_rate": 1.995399744657829e-05, + "loss": 0.9011, + "step": 1146 + }, + { + "epoch": 0.18, + "grad_norm": 17.813041791707164, + "learning_rate": 1.9953835732978668e-05, + "loss": 0.8796, + "step": 1147 + }, + { + "epoch": 0.18, + "grad_norm": 24.719100036988042, + "learning_rate": 1.995367373629778e-05, + "loss": 0.9395, + "step": 1148 + }, + { + "epoch": 0.18, + "grad_norm": 18.08496977604367, + "learning_rate": 1.9953511456540233e-05, + "loss": 0.9645, + "step": 1149 + }, + { + "epoch": 0.18, + "grad_norm": 20.237033522061687, + "learning_rate": 1.9953348893710645e-05, + "loss": 0.91, + "step": 1150 + }, + { + "epoch": 0.18, + "grad_norm": 13.488883503013803, + "learning_rate": 1.9953186047813637e-05, + "loss": 0.8988, + "step": 1151 + }, + { + "epoch": 0.18, + "grad_norm": 36.55713919113992, + "learning_rate": 1.9953022918853837e-05, + "loss": 0.9923, + "step": 1152 + }, + { + "epoch": 0.18, + "grad_norm": 30.13020995109024, + "learning_rate": 1.9952859506835887e-05, + "loss": 1.0171, + "step": 1153 + }, + { + "epoch": 0.18, + "grad_norm": 19.313885660675833, + "learning_rate": 1.995269581176444e-05, + "loss": 0.925, + "step": 1154 + }, + { + "epoch": 0.18, + "grad_norm": 25.380561423525133, + "learning_rate": 1.9952531833644145e-05, + "loss": 0.9817, + "step": 1155 + }, + { + "epoch": 0.18, + "grad_norm": 33.27406470233171, + "learning_rate": 1.9952367572479664e-05, + "loss": 1.102, + "step": 1156 + }, + { + "epoch": 0.18, + "grad_norm": 27.73252246633459, + "learning_rate": 1.995220302827567e-05, + "loss": 0.9488, + "step": 1157 + }, + { + "epoch": 0.18, + "grad_norm": 34.5026125177882, + "learning_rate": 1.9952038201036848e-05, + "loss": 1.0748, + "step": 1158 + }, + { + "epoch": 0.18, + "grad_norm": 28.870464497190003, + "learning_rate": 1.995187309076788e-05, + "loss": 0.8552, + "step": 1159 + }, + { + "epoch": 0.18, + "grad_norm": 14.031554636165179, + "learning_rate": 1.9951707697473465e-05, + "loss": 0.8569, + "step": 1160 + }, + { + "epoch": 0.18, + "grad_norm": 26.63769626711829, + "learning_rate": 1.99515420211583e-05, + "loss": 1.0523, + "step": 1161 + }, + { + "epoch": 0.18, + "grad_norm": 21.890918687515374, + "learning_rate": 1.9951376061827104e-05, + "loss": 0.9819, + "step": 1162 + }, + { + "epoch": 0.18, + "grad_norm": 22.280379834779016, + "learning_rate": 1.995120981948459e-05, + "loss": 0.966, + "step": 1163 + }, + { + "epoch": 0.18, + "grad_norm": 17.07628401442733, + "learning_rate": 1.9951043294135498e-05, + "loss": 0.8894, + "step": 1164 + }, + { + "epoch": 0.18, + "grad_norm": 5.540979091455273, + "learning_rate": 1.995087648578455e-05, + "loss": 0.8823, + "step": 1165 + }, + { + "epoch": 0.18, + "grad_norm": 19.13708005658681, + "learning_rate": 1.9950709394436497e-05, + "loss": 1.0229, + "step": 1166 + }, + { + "epoch": 0.18, + "grad_norm": 21.64973108975881, + "learning_rate": 1.9950542020096094e-05, + "loss": 0.9317, + "step": 1167 + }, + { + "epoch": 0.18, + "grad_norm": 21.215047049702857, + "learning_rate": 1.995037436276809e-05, + "loss": 0.8126, + "step": 1168 + }, + { + "epoch": 0.18, + "grad_norm": 23.32932582071813, + "learning_rate": 1.9950206422457263e-05, + "loss": 0.9556, + "step": 1169 + }, + { + "epoch": 0.18, + "grad_norm": 18.990490524916908, + "learning_rate": 1.9950038199168387e-05, + "loss": 1.0087, + "step": 1170 + }, + { + "epoch": 0.18, + "grad_norm": 26.641587413955936, + "learning_rate": 1.9949869692906245e-05, + "loss": 0.9149, + "step": 1171 + }, + { + "epoch": 0.18, + "grad_norm": 26.106088595271515, + "learning_rate": 1.994970090367563e-05, + "loss": 0.9971, + "step": 1172 + }, + { + "epoch": 0.18, + "grad_norm": 23.41581929549556, + "learning_rate": 1.994953183148134e-05, + "loss": 0.856, + "step": 1173 + }, + { + "epoch": 0.18, + "grad_norm": 47.672523506627265, + "learning_rate": 1.9949362476328184e-05, + "loss": 1.0399, + "step": 1174 + }, + { + "epoch": 0.18, + "grad_norm": 13.695419951962345, + "learning_rate": 1.9949192838220982e-05, + "loss": 0.8773, + "step": 1175 + }, + { + "epoch": 0.18, + "grad_norm": 23.926550004091787, + "learning_rate": 1.9949022917164556e-05, + "loss": 0.8676, + "step": 1176 + }, + { + "epoch": 0.18, + "grad_norm": 30.105655016483126, + "learning_rate": 1.9948852713163737e-05, + "loss": 0.9021, + "step": 1177 + }, + { + "epoch": 0.18, + "grad_norm": 18.316633414260945, + "learning_rate": 1.9948682226223366e-05, + "loss": 0.9636, + "step": 1178 + }, + { + "epoch": 0.18, + "grad_norm": 23.736809104705028, + "learning_rate": 1.9948511456348295e-05, + "loss": 0.9362, + "step": 1179 + }, + { + "epoch": 0.18, + "grad_norm": 23.08183740911144, + "learning_rate": 1.9948340403543375e-05, + "loss": 1.0331, + "step": 1180 + }, + { + "epoch": 0.18, + "grad_norm": 21.57154819026003, + "learning_rate": 1.9948169067813473e-05, + "loss": 0.983, + "step": 1181 + }, + { + "epoch": 0.18, + "grad_norm": 18.184887788579513, + "learning_rate": 1.9947997449163467e-05, + "loss": 1.0024, + "step": 1182 + }, + { + "epoch": 0.18, + "grad_norm": 21.90526965392405, + "learning_rate": 1.9947825547598226e-05, + "loss": 0.9809, + "step": 1183 + }, + { + "epoch": 0.18, + "grad_norm": 29.46472945046384, + "learning_rate": 1.994765336312265e-05, + "loss": 0.8987, + "step": 1184 + }, + { + "epoch": 0.19, + "grad_norm": 22.38233590675501, + "learning_rate": 1.994748089574163e-05, + "loss": 0.9454, + "step": 1185 + }, + { + "epoch": 0.19, + "grad_norm": 23.310221537703704, + "learning_rate": 1.9947308145460073e-05, + "loss": 0.9608, + "step": 1186 + }, + { + "epoch": 0.19, + "grad_norm": 24.30178722481752, + "learning_rate": 1.9947135112282894e-05, + "loss": 0.9571, + "step": 1187 + }, + { + "epoch": 0.19, + "grad_norm": 20.044473009121155, + "learning_rate": 1.9946961796215006e-05, + "loss": 0.8899, + "step": 1188 + }, + { + "epoch": 0.19, + "grad_norm": 14.507704191568795, + "learning_rate": 1.994678819726135e-05, + "loss": 0.8034, + "step": 1189 + }, + { + "epoch": 0.19, + "grad_norm": 17.730711127433754, + "learning_rate": 1.994661431542685e-05, + "loss": 0.9218, + "step": 1190 + }, + { + "epoch": 0.19, + "grad_norm": 21.246142138176882, + "learning_rate": 1.994644015071646e-05, + "loss": 0.8613, + "step": 1191 + }, + { + "epoch": 0.19, + "grad_norm": 22.45738185901497, + "learning_rate": 1.994626570313513e-05, + "loss": 0.9275, + "step": 1192 + }, + { + "epoch": 0.19, + "grad_norm": 23.320499622610207, + "learning_rate": 1.9946090972687827e-05, + "loss": 1.0286, + "step": 1193 + }, + { + "epoch": 0.19, + "grad_norm": 23.75997874661781, + "learning_rate": 1.994591595937951e-05, + "loss": 0.7884, + "step": 1194 + }, + { + "epoch": 0.19, + "grad_norm": 16.398642040283757, + "learning_rate": 1.994574066321516e-05, + "loss": 0.9726, + "step": 1195 + }, + { + "epoch": 0.19, + "grad_norm": 20.392093268692573, + "learning_rate": 1.9945565084199766e-05, + "loss": 0.9629, + "step": 1196 + }, + { + "epoch": 0.19, + "grad_norm": 13.446296983902577, + "learning_rate": 1.994538922233832e-05, + "loss": 0.9321, + "step": 1197 + }, + { + "epoch": 0.19, + "grad_norm": 21.243092139911507, + "learning_rate": 1.994521307763582e-05, + "loss": 0.8464, + "step": 1198 + }, + { + "epoch": 0.19, + "grad_norm": 32.68458859395483, + "learning_rate": 1.994503665009728e-05, + "loss": 0.953, + "step": 1199 + }, + { + "epoch": 0.19, + "grad_norm": 25.726231668744806, + "learning_rate": 1.9944859939727714e-05, + "loss": 0.9653, + "step": 1200 + }, + { + "epoch": 0.19, + "grad_norm": 16.352486735112418, + "learning_rate": 1.994468294653215e-05, + "loss": 0.7899, + "step": 1201 + }, + { + "epoch": 0.19, + "grad_norm": 16.72271390253157, + "learning_rate": 1.994450567051562e-05, + "loss": 0.9373, + "step": 1202 + }, + { + "epoch": 0.19, + "grad_norm": 19.202867844926203, + "learning_rate": 1.9944328111683166e-05, + "loss": 0.9778, + "step": 1203 + }, + { + "epoch": 0.19, + "grad_norm": 14.555628186642041, + "learning_rate": 1.9944150270039837e-05, + "loss": 0.9264, + "step": 1204 + }, + { + "epoch": 0.19, + "grad_norm": 31.11643087054771, + "learning_rate": 1.994397214559069e-05, + "loss": 0.8862, + "step": 1205 + }, + { + "epoch": 0.19, + "grad_norm": 16.276235170561964, + "learning_rate": 1.9943793738340796e-05, + "loss": 0.9889, + "step": 1206 + }, + { + "epoch": 0.19, + "grad_norm": 21.994496461120395, + "learning_rate": 1.9943615048295224e-05, + "loss": 1.0607, + "step": 1207 + }, + { + "epoch": 0.19, + "grad_norm": 31.373176717403403, + "learning_rate": 1.9943436075459056e-05, + "loss": 0.9986, + "step": 1208 + }, + { + "epoch": 0.19, + "grad_norm": 21.367489900837658, + "learning_rate": 1.994325681983738e-05, + "loss": 0.8561, + "step": 1209 + }, + { + "epoch": 0.19, + "grad_norm": 20.57649557531624, + "learning_rate": 1.9943077281435302e-05, + "loss": 0.9088, + "step": 1210 + }, + { + "epoch": 0.19, + "grad_norm": 19.22798190056659, + "learning_rate": 1.994289746025792e-05, + "loss": 0.9386, + "step": 1211 + }, + { + "epoch": 0.19, + "grad_norm": 20.051370076626796, + "learning_rate": 1.994271735631035e-05, + "loss": 0.9943, + "step": 1212 + }, + { + "epoch": 0.19, + "grad_norm": 24.49291742688194, + "learning_rate": 1.9942536969597713e-05, + "loss": 0.9867, + "step": 1213 + }, + { + "epoch": 0.19, + "grad_norm": 23.380714742365978, + "learning_rate": 1.9942356300125146e-05, + "loss": 1.01, + "step": 1214 + }, + { + "epoch": 0.19, + "grad_norm": 17.769314347622814, + "learning_rate": 1.994217534789778e-05, + "loss": 1.009, + "step": 1215 + }, + { + "epoch": 0.19, + "grad_norm": 25.32979480338763, + "learning_rate": 1.994199411292076e-05, + "loss": 0.97, + "step": 1216 + }, + { + "epoch": 0.19, + "grad_norm": 23.86239682642862, + "learning_rate": 1.9941812595199246e-05, + "loss": 0.9607, + "step": 1217 + }, + { + "epoch": 0.19, + "grad_norm": 24.97029570902465, + "learning_rate": 1.99416307947384e-05, + "loss": 0.8529, + "step": 1218 + }, + { + "epoch": 0.19, + "grad_norm": 22.543302660121476, + "learning_rate": 1.9941448711543386e-05, + "loss": 0.9853, + "step": 1219 + }, + { + "epoch": 0.19, + "grad_norm": 16.608061158043856, + "learning_rate": 1.994126634561939e-05, + "loss": 0.9182, + "step": 1220 + }, + { + "epoch": 0.19, + "grad_norm": 22.731217702565235, + "learning_rate": 1.994108369697159e-05, + "loss": 0.9641, + "step": 1221 + }, + { + "epoch": 0.19, + "grad_norm": 33.05863261724831, + "learning_rate": 1.9940900765605187e-05, + "loss": 1.0102, + "step": 1222 + }, + { + "epoch": 0.19, + "grad_norm": 39.6450780495734, + "learning_rate": 1.994071755152538e-05, + "loss": 1.0147, + "step": 1223 + }, + { + "epoch": 0.19, + "grad_norm": 7.845016571956475, + "learning_rate": 1.9940534054737383e-05, + "loss": 1.0116, + "step": 1224 + }, + { + "epoch": 0.19, + "grad_norm": 23.703273159160023, + "learning_rate": 1.9940350275246412e-05, + "loss": 0.8897, + "step": 1225 + }, + { + "epoch": 0.19, + "grad_norm": 20.373733486995256, + "learning_rate": 1.9940166213057693e-05, + "loss": 0.9425, + "step": 1226 + }, + { + "epoch": 0.19, + "grad_norm": 22.455188548883385, + "learning_rate": 1.9939981868176462e-05, + "loss": 1.0323, + "step": 1227 + }, + { + "epoch": 0.19, + "grad_norm": 28.822589758446107, + "learning_rate": 1.9939797240607966e-05, + "loss": 0.9633, + "step": 1228 + }, + { + "epoch": 0.19, + "grad_norm": 16.82033621737185, + "learning_rate": 1.9939612330357446e-05, + "loss": 0.9004, + "step": 1229 + }, + { + "epoch": 0.19, + "grad_norm": 16.329713536063725, + "learning_rate": 1.993942713743017e-05, + "loss": 0.9775, + "step": 1230 + }, + { + "epoch": 0.19, + "grad_norm": 19.118670545951, + "learning_rate": 1.9939241661831398e-05, + "loss": 0.8798, + "step": 1231 + }, + { + "epoch": 0.19, + "grad_norm": 27.92895927653587, + "learning_rate": 1.9939055903566403e-05, + "loss": 0.826, + "step": 1232 + }, + { + "epoch": 0.19, + "grad_norm": 24.968752351958887, + "learning_rate": 1.9938869862640476e-05, + "loss": 0.9785, + "step": 1233 + }, + { + "epoch": 0.19, + "grad_norm": 20.587930629160464, + "learning_rate": 1.9938683539058906e-05, + "loss": 0.9025, + "step": 1234 + }, + { + "epoch": 0.19, + "grad_norm": 21.091215742957534, + "learning_rate": 1.9938496932826988e-05, + "loss": 0.8837, + "step": 1235 + }, + { + "epoch": 0.19, + "grad_norm": 22.754134579278293, + "learning_rate": 1.993831004395003e-05, + "loss": 1.0415, + "step": 1236 + }, + { + "epoch": 0.19, + "grad_norm": 11.773719017875198, + "learning_rate": 1.9938122872433353e-05, + "loss": 0.7607, + "step": 1237 + }, + { + "epoch": 0.19, + "grad_norm": 7.629002864946615, + "learning_rate": 1.9937935418282274e-05, + "loss": 0.8634, + "step": 1238 + }, + { + "epoch": 0.19, + "grad_norm": 18.14983628556474, + "learning_rate": 1.9937747681502122e-05, + "loss": 0.878, + "step": 1239 + }, + { + "epoch": 0.19, + "grad_norm": 28.180830389525347, + "learning_rate": 1.993755966209824e-05, + "loss": 1.002, + "step": 1240 + }, + { + "epoch": 0.19, + "grad_norm": 16.552035578828285, + "learning_rate": 1.9937371360075977e-05, + "loss": 0.9009, + "step": 1241 + }, + { + "epoch": 0.19, + "grad_norm": 22.728857726390583, + "learning_rate": 1.9937182775440683e-05, + "loss": 1.0715, + "step": 1242 + }, + { + "epoch": 0.19, + "grad_norm": 21.58531103284727, + "learning_rate": 1.9936993908197725e-05, + "loss": 0.9315, + "step": 1243 + }, + { + "epoch": 0.19, + "grad_norm": 37.56450138694873, + "learning_rate": 1.9936804758352473e-05, + "loss": 1.0043, + "step": 1244 + }, + { + "epoch": 0.19, + "grad_norm": 18.127500708486227, + "learning_rate": 1.9936615325910308e-05, + "loss": 0.8986, + "step": 1245 + }, + { + "epoch": 0.19, + "grad_norm": 23.70305438398933, + "learning_rate": 1.9936425610876614e-05, + "loss": 0.9206, + "step": 1246 + }, + { + "epoch": 0.19, + "grad_norm": 23.75094908712745, + "learning_rate": 1.993623561325679e-05, + "loss": 0.9542, + "step": 1247 + }, + { + "epoch": 0.19, + "grad_norm": 24.105175353425047, + "learning_rate": 1.9936045333056235e-05, + "loss": 0.854, + "step": 1248 + }, + { + "epoch": 0.2, + "grad_norm": 27.642600995924038, + "learning_rate": 1.9935854770280362e-05, + "loss": 0.8935, + "step": 1249 + }, + { + "epoch": 0.2, + "grad_norm": 14.381684001037932, + "learning_rate": 1.9935663924934596e-05, + "loss": 0.8901, + "step": 1250 + }, + { + "epoch": 0.2, + "grad_norm": 12.971888514001433, + "learning_rate": 1.993547279702436e-05, + "loss": 0.8007, + "step": 1251 + }, + { + "epoch": 0.2, + "grad_norm": 16.821220718931485, + "learning_rate": 1.9935281386555087e-05, + "loss": 0.9687, + "step": 1252 + }, + { + "epoch": 0.2, + "grad_norm": 20.53065080363474, + "learning_rate": 1.9935089693532225e-05, + "loss": 0.9721, + "step": 1253 + }, + { + "epoch": 0.2, + "grad_norm": 16.75973181592774, + "learning_rate": 1.9934897717961223e-05, + "loss": 0.8984, + "step": 1254 + }, + { + "epoch": 0.2, + "grad_norm": 19.57298354615714, + "learning_rate": 1.9934705459847544e-05, + "loss": 0.9365, + "step": 1255 + }, + { + "epoch": 0.2, + "grad_norm": 16.953908912711572, + "learning_rate": 1.9934512919196647e-05, + "loss": 0.8488, + "step": 1256 + }, + { + "epoch": 0.2, + "grad_norm": 28.33725745547857, + "learning_rate": 1.9934320096014022e-05, + "loss": 0.9324, + "step": 1257 + }, + { + "epoch": 0.2, + "grad_norm": 21.281497199769635, + "learning_rate": 1.9934126990305142e-05, + "loss": 0.8084, + "step": 1258 + }, + { + "epoch": 0.2, + "grad_norm": 7.136670117142064, + "learning_rate": 1.99339336020755e-05, + "loss": 0.9868, + "step": 1259 + }, + { + "epoch": 0.2, + "grad_norm": 18.091622239433082, + "learning_rate": 1.99337399313306e-05, + "loss": 0.8913, + "step": 1260 + }, + { + "epoch": 0.2, + "grad_norm": 18.635426654212395, + "learning_rate": 1.9933545978075942e-05, + "loss": 0.8554, + "step": 1261 + }, + { + "epoch": 0.2, + "grad_norm": 21.474915783037556, + "learning_rate": 1.9933351742317053e-05, + "loss": 0.983, + "step": 1262 + }, + { + "epoch": 0.2, + "grad_norm": 29.234804159467206, + "learning_rate": 1.993315722405945e-05, + "loss": 0.966, + "step": 1263 + }, + { + "epoch": 0.2, + "grad_norm": 11.672568471062904, + "learning_rate": 1.9932962423308666e-05, + "loss": 0.8412, + "step": 1264 + }, + { + "epoch": 0.2, + "grad_norm": 14.449711114972242, + "learning_rate": 1.9932767340070243e-05, + "loss": 0.9385, + "step": 1265 + }, + { + "epoch": 0.2, + "grad_norm": 18.669071223225533, + "learning_rate": 1.9932571974349724e-05, + "loss": 0.8169, + "step": 1266 + }, + { + "epoch": 0.2, + "grad_norm": 19.992868998436204, + "learning_rate": 1.9932376326152672e-05, + "loss": 0.9201, + "step": 1267 + }, + { + "epoch": 0.2, + "grad_norm": 27.272789298085467, + "learning_rate": 1.9932180395484644e-05, + "loss": 0.9691, + "step": 1268 + }, + { + "epoch": 0.2, + "grad_norm": 20.28794206515459, + "learning_rate": 1.993198418235122e-05, + "loss": 0.9517, + "step": 1269 + }, + { + "epoch": 0.2, + "grad_norm": 17.36011207674172, + "learning_rate": 1.993178768675797e-05, + "loss": 1.0202, + "step": 1270 + }, + { + "epoch": 0.2, + "grad_norm": 21.541887003351604, + "learning_rate": 1.9931590908710495e-05, + "loss": 0.9676, + "step": 1271 + }, + { + "epoch": 0.2, + "grad_norm": 18.141704428640352, + "learning_rate": 1.9931393848214376e-05, + "loss": 0.8703, + "step": 1272 + }, + { + "epoch": 0.2, + "grad_norm": 20.695246094783986, + "learning_rate": 1.9931196505275233e-05, + "loss": 1.0039, + "step": 1273 + }, + { + "epoch": 0.2, + "grad_norm": 8.346715071676691, + "learning_rate": 1.993099887989867e-05, + "loss": 0.8852, + "step": 1274 + }, + { + "epoch": 0.2, + "grad_norm": 25.672661702505575, + "learning_rate": 1.9930800972090307e-05, + "loss": 0.9104, + "step": 1275 + }, + { + "epoch": 0.2, + "grad_norm": 25.618545684106746, + "learning_rate": 1.9930602781855772e-05, + "loss": 0.8934, + "step": 1276 + }, + { + "epoch": 0.2, + "grad_norm": 37.476588063711446, + "learning_rate": 1.9930404309200706e-05, + "loss": 0.9254, + "step": 1277 + }, + { + "epoch": 0.2, + "grad_norm": 18.0360428722522, + "learning_rate": 1.9930205554130748e-05, + "loss": 0.9899, + "step": 1278 + }, + { + "epoch": 0.2, + "grad_norm": 33.663050132821624, + "learning_rate": 1.9930006516651555e-05, + "loss": 0.9813, + "step": 1279 + }, + { + "epoch": 0.2, + "grad_norm": 22.84790344808927, + "learning_rate": 1.9929807196768785e-05, + "loss": 0.9439, + "step": 1280 + }, + { + "epoch": 0.2, + "grad_norm": 17.98229335110451, + "learning_rate": 1.9929607594488104e-05, + "loss": 0.8303, + "step": 1281 + }, + { + "epoch": 0.2, + "grad_norm": 30.035470062652568, + "learning_rate": 1.9929407709815197e-05, + "loss": 0.9388, + "step": 1282 + }, + { + "epoch": 0.2, + "grad_norm": 23.215968611692105, + "learning_rate": 1.992920754275574e-05, + "loss": 0.8859, + "step": 1283 + }, + { + "epoch": 0.2, + "grad_norm": 4.943706562053954, + "learning_rate": 1.992900709331543e-05, + "loss": 0.9961, + "step": 1284 + }, + { + "epoch": 0.2, + "grad_norm": 23.8796046794665, + "learning_rate": 1.9928806361499965e-05, + "loss": 0.8896, + "step": 1285 + }, + { + "epoch": 0.2, + "grad_norm": 14.615686040819751, + "learning_rate": 1.9928605347315057e-05, + "loss": 0.9176, + "step": 1286 + }, + { + "epoch": 0.2, + "grad_norm": 17.386918984886165, + "learning_rate": 1.992840405076642e-05, + "loss": 0.8916, + "step": 1287 + }, + { + "epoch": 0.2, + "grad_norm": 24.019404373913307, + "learning_rate": 1.992820247185978e-05, + "loss": 1.0235, + "step": 1288 + }, + { + "epoch": 0.2, + "grad_norm": 23.637710906025376, + "learning_rate": 1.992800061060087e-05, + "loss": 0.9788, + "step": 1289 + }, + { + "epoch": 0.2, + "grad_norm": 19.1480366265285, + "learning_rate": 1.992779846699543e-05, + "loss": 0.9326, + "step": 1290 + }, + { + "epoch": 0.2, + "grad_norm": 27.699055472836726, + "learning_rate": 1.992759604104921e-05, + "loss": 0.9894, + "step": 1291 + }, + { + "epoch": 0.2, + "grad_norm": 28.074772851204248, + "learning_rate": 1.9927393332767965e-05, + "loss": 1.0129, + "step": 1292 + }, + { + "epoch": 0.2, + "grad_norm": 22.953500482332803, + "learning_rate": 1.9927190342157462e-05, + "loss": 1.0021, + "step": 1293 + }, + { + "epoch": 0.2, + "grad_norm": 21.84006127194746, + "learning_rate": 1.9926987069223475e-05, + "loss": 0.9293, + "step": 1294 + }, + { + "epoch": 0.2, + "grad_norm": 20.89379450621844, + "learning_rate": 1.9926783513971777e-05, + "loss": 0.9532, + "step": 1295 + }, + { + "epoch": 0.2, + "grad_norm": 35.378460087236796, + "learning_rate": 1.9926579676408168e-05, + "loss": 1.0128, + "step": 1296 + }, + { + "epoch": 0.2, + "grad_norm": 17.318854205624405, + "learning_rate": 1.992637555653844e-05, + "loss": 0.9178, + "step": 1297 + }, + { + "epoch": 0.2, + "grad_norm": 19.897438525977122, + "learning_rate": 1.9926171154368393e-05, + "loss": 0.8841, + "step": 1298 + }, + { + "epoch": 0.2, + "grad_norm": 16.500584955485103, + "learning_rate": 1.992596646990385e-05, + "loss": 1.0633, + "step": 1299 + }, + { + "epoch": 0.2, + "grad_norm": 34.49422380219095, + "learning_rate": 1.992576150315062e-05, + "loss": 0.7963, + "step": 1300 + }, + { + "epoch": 0.2, + "grad_norm": 19.81966630425843, + "learning_rate": 1.9925556254114547e-05, + "loss": 0.8769, + "step": 1301 + }, + { + "epoch": 0.2, + "grad_norm": 29.164915812711097, + "learning_rate": 1.9925350722801458e-05, + "loss": 0.9607, + "step": 1302 + }, + { + "epoch": 0.2, + "grad_norm": 25.58943896263914, + "learning_rate": 1.9925144909217197e-05, + "loss": 0.9324, + "step": 1303 + }, + { + "epoch": 0.2, + "grad_norm": 25.13568356508364, + "learning_rate": 1.9924938813367626e-05, + "loss": 0.8545, + "step": 1304 + }, + { + "epoch": 0.2, + "grad_norm": 25.45426296385659, + "learning_rate": 1.99247324352586e-05, + "loss": 1.0189, + "step": 1305 + }, + { + "epoch": 0.2, + "grad_norm": 31.72773203400263, + "learning_rate": 1.9924525774895986e-05, + "loss": 0.9538, + "step": 1306 + }, + { + "epoch": 0.2, + "grad_norm": 27.50644130479907, + "learning_rate": 1.9924318832285667e-05, + "loss": 1.0431, + "step": 1307 + }, + { + "epoch": 0.2, + "grad_norm": 20.018262046216933, + "learning_rate": 1.9924111607433528e-05, + "loss": 0.9644, + "step": 1308 + }, + { + "epoch": 0.2, + "grad_norm": 16.072629528814268, + "learning_rate": 1.992390410034546e-05, + "loss": 0.9031, + "step": 1309 + }, + { + "epoch": 0.2, + "grad_norm": 24.638114951632378, + "learning_rate": 1.992369631102736e-05, + "loss": 1.0506, + "step": 1310 + }, + { + "epoch": 0.2, + "grad_norm": 21.19323599795892, + "learning_rate": 1.9923488239485146e-05, + "loss": 0.9868, + "step": 1311 + }, + { + "epoch": 0.2, + "grad_norm": 23.022933296867507, + "learning_rate": 1.992327988572473e-05, + "loss": 1.0442, + "step": 1312 + }, + { + "epoch": 0.21, + "grad_norm": 38.60077075343159, + "learning_rate": 1.9923071249752038e-05, + "loss": 0.9415, + "step": 1313 + }, + { + "epoch": 0.21, + "grad_norm": 16.935327725801237, + "learning_rate": 1.9922862331573007e-05, + "loss": 0.8662, + "step": 1314 + }, + { + "epoch": 0.21, + "grad_norm": 21.549857823175042, + "learning_rate": 1.9922653131193575e-05, + "loss": 0.9445, + "step": 1315 + }, + { + "epoch": 0.21, + "grad_norm": 16.42482200842557, + "learning_rate": 1.9922443648619696e-05, + "loss": 0.8671, + "step": 1316 + }, + { + "epoch": 0.21, + "grad_norm": 31.910562303238194, + "learning_rate": 1.992223388385732e-05, + "loss": 0.8879, + "step": 1317 + }, + { + "epoch": 0.21, + "grad_norm": 15.252798493002622, + "learning_rate": 1.9922023836912418e-05, + "loss": 0.8011, + "step": 1318 + }, + { + "epoch": 0.21, + "grad_norm": 14.150754687231103, + "learning_rate": 1.992181350779096e-05, + "loss": 0.8793, + "step": 1319 + }, + { + "epoch": 0.21, + "grad_norm": 17.31881387900814, + "learning_rate": 1.9921602896498934e-05, + "loss": 0.8197, + "step": 1320 + }, + { + "epoch": 0.21, + "grad_norm": 19.93742356016639, + "learning_rate": 1.9921392003042322e-05, + "loss": 0.9119, + "step": 1321 + }, + { + "epoch": 0.21, + "grad_norm": 13.878665614920763, + "learning_rate": 1.9921180827427126e-05, + "loss": 0.8363, + "step": 1322 + }, + { + "epoch": 0.21, + "grad_norm": 4.5208541018877675, + "learning_rate": 1.9920969369659354e-05, + "loss": 1.0458, + "step": 1323 + }, + { + "epoch": 0.21, + "grad_norm": 21.1772940981394, + "learning_rate": 1.9920757629745015e-05, + "loss": 0.9305, + "step": 1324 + }, + { + "epoch": 0.21, + "grad_norm": 17.442677599117722, + "learning_rate": 1.992054560769013e-05, + "loss": 0.9774, + "step": 1325 + }, + { + "epoch": 0.21, + "grad_norm": 22.854559087963043, + "learning_rate": 1.9920333303500734e-05, + "loss": 0.9629, + "step": 1326 + }, + { + "epoch": 0.21, + "grad_norm": 35.30302763377855, + "learning_rate": 1.992012071718286e-05, + "loss": 0.9763, + "step": 1327 + }, + { + "epoch": 0.21, + "grad_norm": 22.153192085498706, + "learning_rate": 1.9919907848742556e-05, + "loss": 0.9926, + "step": 1328 + }, + { + "epoch": 0.21, + "grad_norm": 17.260157248420533, + "learning_rate": 1.991969469818588e-05, + "loss": 0.9913, + "step": 1329 + }, + { + "epoch": 0.21, + "grad_norm": 19.96376624247682, + "learning_rate": 1.9919481265518885e-05, + "loss": 0.9022, + "step": 1330 + }, + { + "epoch": 0.21, + "grad_norm": 34.18917028402202, + "learning_rate": 1.9919267550747647e-05, + "loss": 0.9388, + "step": 1331 + }, + { + "epoch": 0.21, + "grad_norm": 14.98196083705158, + "learning_rate": 1.991905355387824e-05, + "loss": 0.8616, + "step": 1332 + }, + { + "epoch": 0.21, + "grad_norm": 16.595385975033494, + "learning_rate": 1.9918839274916757e-05, + "loss": 0.8426, + "step": 1333 + }, + { + "epoch": 0.21, + "grad_norm": 21.307162634521717, + "learning_rate": 1.9918624713869287e-05, + "loss": 0.895, + "step": 1334 + }, + { + "epoch": 0.21, + "grad_norm": 16.081120207404, + "learning_rate": 1.9918409870741928e-05, + "loss": 0.7825, + "step": 1335 + }, + { + "epoch": 0.21, + "grad_norm": 19.539929219347595, + "learning_rate": 1.9918194745540798e-05, + "loss": 0.8481, + "step": 1336 + }, + { + "epoch": 0.21, + "grad_norm": 33.29577674826702, + "learning_rate": 1.991797933827201e-05, + "loss": 0.895, + "step": 1337 + }, + { + "epoch": 0.21, + "grad_norm": 22.000071211179446, + "learning_rate": 1.9917763648941692e-05, + "loss": 1.023, + "step": 1338 + }, + { + "epoch": 0.21, + "grad_norm": 16.508815965063587, + "learning_rate": 1.991754767755598e-05, + "loss": 0.8633, + "step": 1339 + }, + { + "epoch": 0.21, + "grad_norm": 17.74053182565702, + "learning_rate": 1.991733142412101e-05, + "loss": 0.9433, + "step": 1340 + }, + { + "epoch": 0.21, + "grad_norm": 15.644052303855188, + "learning_rate": 1.9917114888642937e-05, + "loss": 0.8993, + "step": 1341 + }, + { + "epoch": 0.21, + "grad_norm": 23.775108462957714, + "learning_rate": 1.9916898071127915e-05, + "loss": 0.9787, + "step": 1342 + }, + { + "epoch": 0.21, + "grad_norm": 22.003199432061866, + "learning_rate": 1.9916680971582116e-05, + "loss": 0.9631, + "step": 1343 + }, + { + "epoch": 0.21, + "grad_norm": 14.829538243320078, + "learning_rate": 1.9916463590011713e-05, + "loss": 0.8365, + "step": 1344 + }, + { + "epoch": 0.21, + "grad_norm": 24.878333483592566, + "learning_rate": 1.9916245926422883e-05, + "loss": 0.9273, + "step": 1345 + }, + { + "epoch": 0.21, + "grad_norm": 16.257384510446926, + "learning_rate": 1.9916027980821823e-05, + "loss": 0.8607, + "step": 1346 + }, + { + "epoch": 0.21, + "grad_norm": 17.07316436252357, + "learning_rate": 1.9915809753214725e-05, + "loss": 0.8915, + "step": 1347 + }, + { + "epoch": 0.21, + "grad_norm": 13.959282926977835, + "learning_rate": 1.99155912436078e-05, + "loss": 0.741, + "step": 1348 + }, + { + "epoch": 0.21, + "grad_norm": 20.978979063158075, + "learning_rate": 1.991537245200726e-05, + "loss": 0.8688, + "step": 1349 + }, + { + "epoch": 0.21, + "grad_norm": 27.744051462182586, + "learning_rate": 1.991515337841933e-05, + "loss": 1.12, + "step": 1350 + }, + { + "epoch": 0.21, + "grad_norm": 15.25120064531158, + "learning_rate": 1.9914934022850233e-05, + "loss": 0.8152, + "step": 1351 + }, + { + "epoch": 0.21, + "grad_norm": 23.29446485142575, + "learning_rate": 1.9914714385306217e-05, + "loss": 0.9278, + "step": 1352 + }, + { + "epoch": 0.21, + "grad_norm": 16.049437390735815, + "learning_rate": 1.9914494465793522e-05, + "loss": 0.8964, + "step": 1353 + }, + { + "epoch": 0.21, + "grad_norm": 21.324527902453276, + "learning_rate": 1.9914274264318402e-05, + "loss": 0.9232, + "step": 1354 + }, + { + "epoch": 0.21, + "grad_norm": 12.591530142767086, + "learning_rate": 1.9914053780887122e-05, + "loss": 0.8671, + "step": 1355 + }, + { + "epoch": 0.21, + "grad_norm": 21.597951831475005, + "learning_rate": 1.9913833015505957e-05, + "loss": 0.9686, + "step": 1356 + }, + { + "epoch": 0.21, + "grad_norm": 20.237243762700256, + "learning_rate": 1.9913611968181178e-05, + "loss": 0.925, + "step": 1357 + }, + { + "epoch": 0.21, + "grad_norm": 18.727077098296117, + "learning_rate": 1.9913390638919074e-05, + "loss": 0.879, + "step": 1358 + }, + { + "epoch": 0.21, + "grad_norm": 27.987888497407347, + "learning_rate": 1.9913169027725936e-05, + "loss": 1.0334, + "step": 1359 + }, + { + "epoch": 0.21, + "grad_norm": 30.733868300949965, + "learning_rate": 1.9912947134608073e-05, + "loss": 0.9026, + "step": 1360 + }, + { + "epoch": 0.21, + "grad_norm": 16.173910454258483, + "learning_rate": 1.9912724959571793e-05, + "loss": 0.9208, + "step": 1361 + }, + { + "epoch": 0.21, + "grad_norm": 26.230580679171098, + "learning_rate": 1.9912502502623413e-05, + "loss": 0.9103, + "step": 1362 + }, + { + "epoch": 0.21, + "grad_norm": 19.052439439329678, + "learning_rate": 1.991227976376926e-05, + "loss": 0.824, + "step": 1363 + }, + { + "epoch": 0.21, + "grad_norm": 17.76117529959484, + "learning_rate": 1.991205674301567e-05, + "loss": 0.8669, + "step": 1364 + }, + { + "epoch": 0.21, + "grad_norm": 17.210222016837943, + "learning_rate": 1.9911833440368982e-05, + "loss": 0.7947, + "step": 1365 + }, + { + "epoch": 0.21, + "grad_norm": 33.605481570448795, + "learning_rate": 1.9911609855835553e-05, + "loss": 0.9422, + "step": 1366 + }, + { + "epoch": 0.21, + "grad_norm": 18.18952214004595, + "learning_rate": 1.9911385989421736e-05, + "loss": 0.8542, + "step": 1367 + }, + { + "epoch": 0.21, + "grad_norm": 21.163667112580974, + "learning_rate": 1.9911161841133898e-05, + "loss": 0.8978, + "step": 1368 + }, + { + "epoch": 0.21, + "grad_norm": 30.03465533772165, + "learning_rate": 1.9910937410978418e-05, + "loss": 0.8659, + "step": 1369 + }, + { + "epoch": 0.21, + "grad_norm": 14.56129051678673, + "learning_rate": 1.9910712698961673e-05, + "loss": 0.8997, + "step": 1370 + }, + { + "epoch": 0.21, + "grad_norm": 21.7784257478019, + "learning_rate": 1.991048770509006e-05, + "loss": 0.9045, + "step": 1371 + }, + { + "epoch": 0.21, + "grad_norm": 14.464371663980677, + "learning_rate": 1.991026242936997e-05, + "loss": 0.8788, + "step": 1372 + }, + { + "epoch": 0.21, + "grad_norm": 23.28065532590489, + "learning_rate": 1.9910036871807817e-05, + "loss": 1.0487, + "step": 1373 + }, + { + "epoch": 0.21, + "grad_norm": 26.378756413259477, + "learning_rate": 1.9909811032410012e-05, + "loss": 0.8824, + "step": 1374 + }, + { + "epoch": 0.21, + "grad_norm": 24.43542564349211, + "learning_rate": 1.9909584911182977e-05, + "loss": 0.9517, + "step": 1375 + }, + { + "epoch": 0.21, + "grad_norm": 26.59946889494999, + "learning_rate": 1.9909358508133145e-05, + "loss": 0.9472, + "step": 1376 + }, + { + "epoch": 0.22, + "grad_norm": 24.575767925465115, + "learning_rate": 1.990913182326695e-05, + "loss": 0.9322, + "step": 1377 + }, + { + "epoch": 0.22, + "grad_norm": 24.283341835010557, + "learning_rate": 1.990890485659085e-05, + "loss": 0.877, + "step": 1378 + }, + { + "epoch": 0.22, + "grad_norm": 24.015921680538895, + "learning_rate": 1.9908677608111287e-05, + "loss": 0.9034, + "step": 1379 + }, + { + "epoch": 0.22, + "grad_norm": 32.386463181298566, + "learning_rate": 1.9908450077834732e-05, + "loss": 1.018, + "step": 1380 + }, + { + "epoch": 0.22, + "grad_norm": 23.62281652293308, + "learning_rate": 1.9908222265767653e-05, + "loss": 0.9683, + "step": 1381 + }, + { + "epoch": 0.22, + "grad_norm": 12.893433320441366, + "learning_rate": 1.9907994171916524e-05, + "loss": 0.9804, + "step": 1382 + }, + { + "epoch": 0.22, + "grad_norm": 19.677590592003387, + "learning_rate": 1.9907765796287844e-05, + "loss": 0.9403, + "step": 1383 + }, + { + "epoch": 0.22, + "grad_norm": 22.765449230010383, + "learning_rate": 1.9907537138888097e-05, + "loss": 0.9905, + "step": 1384 + }, + { + "epoch": 0.22, + "grad_norm": 18.90626779981043, + "learning_rate": 1.990730819972379e-05, + "loss": 0.8958, + "step": 1385 + }, + { + "epoch": 0.22, + "grad_norm": 22.605297923163597, + "learning_rate": 1.9907078978801432e-05, + "loss": 0.8924, + "step": 1386 + }, + { + "epoch": 0.22, + "grad_norm": 22.121787663737475, + "learning_rate": 1.9906849476127545e-05, + "loss": 0.8506, + "step": 1387 + }, + { + "epoch": 0.22, + "grad_norm": 25.914299010726207, + "learning_rate": 1.9906619691708652e-05, + "loss": 0.9328, + "step": 1388 + }, + { + "epoch": 0.22, + "grad_norm": 23.406456402733113, + "learning_rate": 1.9906389625551294e-05, + "loss": 0.9968, + "step": 1389 + }, + { + "epoch": 0.22, + "grad_norm": 21.848600183369733, + "learning_rate": 1.990615927766201e-05, + "loss": 0.9835, + "step": 1390 + }, + { + "epoch": 0.22, + "grad_norm": 26.99942841962401, + "learning_rate": 1.9905928648047346e-05, + "loss": 0.9886, + "step": 1391 + }, + { + "epoch": 0.22, + "grad_norm": 14.034098767161495, + "learning_rate": 1.990569773671387e-05, + "loss": 0.9693, + "step": 1392 + }, + { + "epoch": 0.22, + "grad_norm": 35.71487357663943, + "learning_rate": 1.9905466543668143e-05, + "loss": 1.1704, + "step": 1393 + }, + { + "epoch": 0.22, + "grad_norm": 41.45938206197676, + "learning_rate": 1.9905235068916745e-05, + "loss": 0.9234, + "step": 1394 + }, + { + "epoch": 0.22, + "grad_norm": 22.495776853003562, + "learning_rate": 1.9905003312466255e-05, + "loss": 0.9915, + "step": 1395 + }, + { + "epoch": 0.22, + "grad_norm": 13.212968858485853, + "learning_rate": 1.9904771274323262e-05, + "loss": 0.8888, + "step": 1396 + }, + { + "epoch": 0.22, + "grad_norm": 19.54383243693455, + "learning_rate": 1.990453895449437e-05, + "loss": 0.9925, + "step": 1397 + }, + { + "epoch": 0.22, + "grad_norm": 13.750532358874487, + "learning_rate": 1.9904306352986183e-05, + "loss": 0.8366, + "step": 1398 + }, + { + "epoch": 0.22, + "grad_norm": 19.479072838232593, + "learning_rate": 1.990407346980532e-05, + "loss": 0.959, + "step": 1399 + }, + { + "epoch": 0.22, + "grad_norm": 28.19172560606223, + "learning_rate": 1.99038403049584e-05, + "loss": 0.8447, + "step": 1400 + }, + { + "epoch": 0.22, + "grad_norm": 27.135755440848115, + "learning_rate": 1.9903606858452056e-05, + "loss": 0.879, + "step": 1401 + }, + { + "epoch": 0.22, + "grad_norm": 23.06888160182277, + "learning_rate": 1.9903373130292928e-05, + "loss": 0.9948, + "step": 1402 + }, + { + "epoch": 0.22, + "grad_norm": 17.61414992076343, + "learning_rate": 1.9903139120487654e-05, + "loss": 0.8504, + "step": 1403 + }, + { + "epoch": 0.22, + "grad_norm": 16.871522748279556, + "learning_rate": 1.9902904829042902e-05, + "loss": 0.9806, + "step": 1404 + }, + { + "epoch": 0.22, + "grad_norm": 29.369601845853666, + "learning_rate": 1.9902670255965333e-05, + "loss": 0.9354, + "step": 1405 + }, + { + "epoch": 0.22, + "grad_norm": 19.74389234094852, + "learning_rate": 1.9902435401261608e-05, + "loss": 0.8639, + "step": 1406 + }, + { + "epoch": 0.22, + "grad_norm": 17.603213032638298, + "learning_rate": 1.990220026493842e-05, + "loss": 0.9023, + "step": 1407 + }, + { + "epoch": 0.22, + "grad_norm": 20.97678157365228, + "learning_rate": 1.9901964847002442e-05, + "loss": 1.014, + "step": 1408 + }, + { + "epoch": 0.22, + "grad_norm": 16.73880095593121, + "learning_rate": 1.990172914746038e-05, + "loss": 0.9541, + "step": 1409 + }, + { + "epoch": 0.22, + "grad_norm": 15.593196662706202, + "learning_rate": 1.9901493166318934e-05, + "loss": 0.8117, + "step": 1410 + }, + { + "epoch": 0.22, + "grad_norm": 20.13659180417085, + "learning_rate": 1.9901256903584812e-05, + "loss": 0.8954, + "step": 1411 + }, + { + "epoch": 0.22, + "grad_norm": 24.86775771340296, + "learning_rate": 1.9901020359264738e-05, + "loss": 1.0768, + "step": 1412 + }, + { + "epoch": 0.22, + "grad_norm": 18.893536761352216, + "learning_rate": 1.9900783533365437e-05, + "loss": 0.8369, + "step": 1413 + }, + { + "epoch": 0.22, + "grad_norm": 30.969588877520685, + "learning_rate": 1.9900546425893644e-05, + "loss": 0.851, + "step": 1414 + }, + { + "epoch": 0.22, + "grad_norm": 30.364502580785054, + "learning_rate": 1.99003090368561e-05, + "loss": 0.9364, + "step": 1415 + }, + { + "epoch": 0.22, + "grad_norm": 25.64482005553018, + "learning_rate": 1.990007136625956e-05, + "loss": 0.9606, + "step": 1416 + }, + { + "epoch": 0.22, + "grad_norm": 23.860730601099355, + "learning_rate": 1.989983341411078e-05, + "loss": 0.8902, + "step": 1417 + }, + { + "epoch": 0.22, + "grad_norm": 26.945570604122867, + "learning_rate": 1.9899595180416532e-05, + "loss": 1.036, + "step": 1418 + }, + { + "epoch": 0.22, + "grad_norm": 22.585343567181834, + "learning_rate": 1.9899356665183584e-05, + "loss": 0.9409, + "step": 1419 + }, + { + "epoch": 0.22, + "grad_norm": 19.592404568031984, + "learning_rate": 1.989911786841873e-05, + "loss": 0.8498, + "step": 1420 + }, + { + "epoch": 0.22, + "grad_norm": 19.34782284981256, + "learning_rate": 1.9898878790128752e-05, + "loss": 0.862, + "step": 1421 + }, + { + "epoch": 0.22, + "grad_norm": 22.330109461349725, + "learning_rate": 1.9898639430320448e-05, + "loss": 0.9395, + "step": 1422 + }, + { + "epoch": 0.22, + "grad_norm": 17.958950574966494, + "learning_rate": 1.9898399789000633e-05, + "loss": 0.8653, + "step": 1423 + }, + { + "epoch": 0.22, + "grad_norm": 27.022031537393666, + "learning_rate": 1.9898159866176115e-05, + "loss": 0.8856, + "step": 1424 + }, + { + "epoch": 0.22, + "grad_norm": 21.055090281468015, + "learning_rate": 1.9897919661853723e-05, + "loss": 0.9617, + "step": 1425 + }, + { + "epoch": 0.22, + "grad_norm": 19.808843135298243, + "learning_rate": 1.9897679176040284e-05, + "loss": 0.8153, + "step": 1426 + }, + { + "epoch": 0.22, + "grad_norm": 26.98273301223038, + "learning_rate": 1.9897438408742644e-05, + "loss": 0.9232, + "step": 1427 + }, + { + "epoch": 0.22, + "grad_norm": 29.467110019854843, + "learning_rate": 1.9897197359967642e-05, + "loss": 0.899, + "step": 1428 + }, + { + "epoch": 0.22, + "grad_norm": 26.321039026185428, + "learning_rate": 1.9896956029722138e-05, + "loss": 1.0245, + "step": 1429 + }, + { + "epoch": 0.22, + "grad_norm": 29.247209941626622, + "learning_rate": 1.9896714418012993e-05, + "loss": 0.8231, + "step": 1430 + }, + { + "epoch": 0.22, + "grad_norm": 26.348569622001488, + "learning_rate": 1.9896472524847078e-05, + "loss": 0.961, + "step": 1431 + }, + { + "epoch": 0.22, + "grad_norm": 37.56406826166652, + "learning_rate": 1.9896230350231274e-05, + "loss": 0.8571, + "step": 1432 + }, + { + "epoch": 0.22, + "grad_norm": 20.53945002910316, + "learning_rate": 1.9895987894172474e-05, + "loss": 0.8621, + "step": 1433 + }, + { + "epoch": 0.22, + "grad_norm": 24.920721265341765, + "learning_rate": 1.989574515667756e-05, + "loss": 1.022, + "step": 1434 + }, + { + "epoch": 0.22, + "grad_norm": 16.206127859641747, + "learning_rate": 1.9895502137753448e-05, + "loss": 0.8861, + "step": 1435 + }, + { + "epoch": 0.22, + "grad_norm": 17.513001768723406, + "learning_rate": 1.989525883740704e-05, + "loss": 0.8813, + "step": 1436 + }, + { + "epoch": 0.22, + "grad_norm": 25.080606141750987, + "learning_rate": 1.989501525564526e-05, + "loss": 0.9389, + "step": 1437 + }, + { + "epoch": 0.22, + "grad_norm": 26.902371726278396, + "learning_rate": 1.9894771392475036e-05, + "loss": 1.035, + "step": 1438 + }, + { + "epoch": 0.22, + "grad_norm": 29.802421383683235, + "learning_rate": 1.98945272479033e-05, + "loss": 0.9444, + "step": 1439 + }, + { + "epoch": 0.22, + "grad_norm": 17.20849078913029, + "learning_rate": 1.9894282821936995e-05, + "loss": 0.7965, + "step": 1440 + }, + { + "epoch": 0.23, + "grad_norm": 23.171240186323416, + "learning_rate": 1.989403811458308e-05, + "loss": 0.9591, + "step": 1441 + }, + { + "epoch": 0.23, + "grad_norm": 39.5019219963036, + "learning_rate": 1.9893793125848506e-05, + "loss": 1.0449, + "step": 1442 + }, + { + "epoch": 0.23, + "grad_norm": 25.663329008585723, + "learning_rate": 1.9893547855740243e-05, + "loss": 0.9246, + "step": 1443 + }, + { + "epoch": 0.23, + "grad_norm": 24.112498416673166, + "learning_rate": 1.9893302304265267e-05, + "loss": 0.8921, + "step": 1444 + }, + { + "epoch": 0.23, + "grad_norm": 20.039729486857773, + "learning_rate": 1.989305647143056e-05, + "loss": 0.8282, + "step": 1445 + }, + { + "epoch": 0.23, + "grad_norm": 20.691181532359877, + "learning_rate": 1.9892810357243116e-05, + "loss": 0.8882, + "step": 1446 + }, + { + "epoch": 0.23, + "grad_norm": 19.225087008849833, + "learning_rate": 1.989256396170993e-05, + "loss": 0.8138, + "step": 1447 + }, + { + "epoch": 0.23, + "grad_norm": 24.41466549670376, + "learning_rate": 1.9892317284838016e-05, + "loss": 1.0354, + "step": 1448 + }, + { + "epoch": 0.23, + "grad_norm": 15.987971303075874, + "learning_rate": 1.9892070326634383e-05, + "loss": 0.9045, + "step": 1449 + }, + { + "epoch": 0.23, + "grad_norm": 33.14052844179611, + "learning_rate": 1.9891823087106057e-05, + "loss": 0.9687, + "step": 1450 + }, + { + "epoch": 0.23, + "grad_norm": 18.543345362147758, + "learning_rate": 1.9891575566260068e-05, + "loss": 0.8688, + "step": 1451 + }, + { + "epoch": 0.23, + "grad_norm": 31.82596494286104, + "learning_rate": 1.9891327764103456e-05, + "loss": 0.8913, + "step": 1452 + }, + { + "epoch": 0.23, + "grad_norm": 15.102029975724584, + "learning_rate": 1.989107968064327e-05, + "loss": 0.8815, + "step": 1453 + }, + { + "epoch": 0.23, + "grad_norm": 25.800652465672165, + "learning_rate": 1.9890831315886566e-05, + "loss": 0.9337, + "step": 1454 + }, + { + "epoch": 0.23, + "grad_norm": 23.492450865502047, + "learning_rate": 1.98905826698404e-05, + "loss": 0.9639, + "step": 1455 + }, + { + "epoch": 0.23, + "grad_norm": 36.36599028160405, + "learning_rate": 1.989033374251185e-05, + "loss": 0.9794, + "step": 1456 + }, + { + "epoch": 0.23, + "grad_norm": 18.531828198249837, + "learning_rate": 1.9890084533907998e-05, + "loss": 0.836, + "step": 1457 + }, + { + "epoch": 0.23, + "grad_norm": 16.454867441883128, + "learning_rate": 1.9889835044035925e-05, + "loss": 0.9107, + "step": 1458 + }, + { + "epoch": 0.23, + "grad_norm": 29.831723663035085, + "learning_rate": 1.988958527290273e-05, + "loss": 0.9699, + "step": 1459 + }, + { + "epoch": 0.23, + "grad_norm": 30.106522068630024, + "learning_rate": 1.9889335220515512e-05, + "loss": 0.902, + "step": 1460 + }, + { + "epoch": 0.23, + "grad_norm": 17.6693855491818, + "learning_rate": 1.9889084886881387e-05, + "loss": 0.843, + "step": 1461 + }, + { + "epoch": 0.23, + "grad_norm": 18.156992833080583, + "learning_rate": 1.9888834272007475e-05, + "loss": 0.8507, + "step": 1462 + }, + { + "epoch": 0.23, + "grad_norm": 16.58564273789724, + "learning_rate": 1.9888583375900897e-05, + "loss": 0.8502, + "step": 1463 + }, + { + "epoch": 0.23, + "grad_norm": 20.48048829094658, + "learning_rate": 1.9888332198568794e-05, + "loss": 0.8605, + "step": 1464 + }, + { + "epoch": 0.23, + "grad_norm": 24.514160969565783, + "learning_rate": 1.988808074001831e-05, + "loss": 0.8924, + "step": 1465 + }, + { + "epoch": 0.23, + "grad_norm": 23.93567949722883, + "learning_rate": 1.9887829000256592e-05, + "loss": 0.8573, + "step": 1466 + }, + { + "epoch": 0.23, + "grad_norm": 18.38912034665384, + "learning_rate": 1.9887576979290803e-05, + "loss": 0.8493, + "step": 1467 + }, + { + "epoch": 0.23, + "grad_norm": 26.441573061468294, + "learning_rate": 1.9887324677128107e-05, + "loss": 0.8848, + "step": 1468 + }, + { + "epoch": 0.23, + "grad_norm": 20.710959270815522, + "learning_rate": 1.988707209377568e-05, + "loss": 1.0571, + "step": 1469 + }, + { + "epoch": 0.23, + "grad_norm": 14.66331877496578, + "learning_rate": 1.9886819229240708e-05, + "loss": 0.763, + "step": 1470 + }, + { + "epoch": 0.23, + "grad_norm": 20.84368962246273, + "learning_rate": 1.9886566083530382e-05, + "loss": 0.9172, + "step": 1471 + }, + { + "epoch": 0.23, + "grad_norm": 18.960246015749057, + "learning_rate": 1.9886312656651897e-05, + "loss": 0.9751, + "step": 1472 + }, + { + "epoch": 0.23, + "grad_norm": 13.7503540177721, + "learning_rate": 1.9886058948612466e-05, + "loss": 0.827, + "step": 1473 + }, + { + "epoch": 0.23, + "grad_norm": 18.23471947012008, + "learning_rate": 1.98858049594193e-05, + "loss": 0.8813, + "step": 1474 + }, + { + "epoch": 0.23, + "grad_norm": 27.443001082697744, + "learning_rate": 1.9885550689079624e-05, + "loss": 0.9107, + "step": 1475 + }, + { + "epoch": 0.23, + "grad_norm": 23.963675917420925, + "learning_rate": 1.988529613760067e-05, + "loss": 1.0011, + "step": 1476 + }, + { + "epoch": 0.23, + "grad_norm": 15.62294501476367, + "learning_rate": 1.9885041304989682e-05, + "loss": 0.8256, + "step": 1477 + }, + { + "epoch": 0.23, + "grad_norm": 22.379305682327313, + "learning_rate": 1.9884786191253895e-05, + "loss": 0.9107, + "step": 1478 + }, + { + "epoch": 0.23, + "grad_norm": 17.226568236750126, + "learning_rate": 1.988453079640057e-05, + "loss": 0.9533, + "step": 1479 + }, + { + "epoch": 0.23, + "grad_norm": 22.931625383750685, + "learning_rate": 1.9884275120436976e-05, + "loss": 0.8969, + "step": 1480 + }, + { + "epoch": 0.23, + "grad_norm": 28.762895368604905, + "learning_rate": 1.988401916337038e-05, + "loss": 0.947, + "step": 1481 + }, + { + "epoch": 0.23, + "grad_norm": 17.57032215038423, + "learning_rate": 1.9883762925208058e-05, + "loss": 0.8551, + "step": 1482 + }, + { + "epoch": 0.23, + "grad_norm": 24.22208747140622, + "learning_rate": 1.98835064059573e-05, + "loss": 0.8877, + "step": 1483 + }, + { + "epoch": 0.23, + "grad_norm": 37.12283804808617, + "learning_rate": 1.9883249605625403e-05, + "loss": 1.0072, + "step": 1484 + }, + { + "epoch": 0.23, + "grad_norm": 14.543397514422557, + "learning_rate": 1.988299252421967e-05, + "loss": 0.9646, + "step": 1485 + }, + { + "epoch": 0.23, + "grad_norm": 31.81226664028502, + "learning_rate": 1.9882735161747407e-05, + "loss": 0.8968, + "step": 1486 + }, + { + "epoch": 0.23, + "grad_norm": 16.148444937919745, + "learning_rate": 1.9882477518215938e-05, + "loss": 0.8184, + "step": 1487 + }, + { + "epoch": 0.23, + "grad_norm": 22.605271616238163, + "learning_rate": 1.9882219593632595e-05, + "loss": 0.8806, + "step": 1488 + }, + { + "epoch": 0.23, + "grad_norm": 22.401248724863805, + "learning_rate": 1.9881961388004702e-05, + "loss": 0.8844, + "step": 1489 + }, + { + "epoch": 0.23, + "grad_norm": 16.85256456659565, + "learning_rate": 1.988170290133961e-05, + "loss": 0.8483, + "step": 1490 + }, + { + "epoch": 0.23, + "grad_norm": 22.243625712281947, + "learning_rate": 1.9881444133644668e-05, + "loss": 0.918, + "step": 1491 + }, + { + "epoch": 0.23, + "grad_norm": 19.86633504313018, + "learning_rate": 1.9881185084927234e-05, + "loss": 0.914, + "step": 1492 + }, + { + "epoch": 0.23, + "grad_norm": 23.082115977569647, + "learning_rate": 1.9880925755194677e-05, + "loss": 0.9888, + "step": 1493 + }, + { + "epoch": 0.23, + "grad_norm": 18.567946105511787, + "learning_rate": 1.9880666144454372e-05, + "loss": 0.8667, + "step": 1494 + }, + { + "epoch": 0.23, + "grad_norm": 18.103235278591296, + "learning_rate": 1.9880406252713703e-05, + "loss": 0.8576, + "step": 1495 + }, + { + "epoch": 0.23, + "grad_norm": 32.05414053766156, + "learning_rate": 1.988014607998006e-05, + "loss": 0.9628, + "step": 1496 + }, + { + "epoch": 0.23, + "grad_norm": 20.314446426447713, + "learning_rate": 1.987988562626084e-05, + "loss": 1.0049, + "step": 1497 + }, + { + "epoch": 0.23, + "grad_norm": 22.489690572149616, + "learning_rate": 1.9879624891563456e-05, + "loss": 0.8725, + "step": 1498 + }, + { + "epoch": 0.23, + "grad_norm": 19.839622826800472, + "learning_rate": 1.9879363875895317e-05, + "loss": 0.9889, + "step": 1499 + }, + { + "epoch": 0.23, + "grad_norm": 25.44151166791262, + "learning_rate": 1.987910257926385e-05, + "loss": 0.9919, + "step": 1500 + }, + { + "epoch": 0.23, + "grad_norm": 21.1580910476791, + "learning_rate": 1.9878841001676484e-05, + "loss": 0.846, + "step": 1501 + }, + { + "epoch": 0.23, + "grad_norm": 12.442634325784066, + "learning_rate": 1.9878579143140656e-05, + "loss": 0.8613, + "step": 1502 + }, + { + "epoch": 0.23, + "grad_norm": 25.65284426453401, + "learning_rate": 1.987831700366382e-05, + "loss": 0.9577, + "step": 1503 + }, + { + "epoch": 0.23, + "grad_norm": 23.49152889666891, + "learning_rate": 1.9878054583253426e-05, + "loss": 0.8942, + "step": 1504 + }, + { + "epoch": 0.24, + "grad_norm": 17.020052578138305, + "learning_rate": 1.9877791881916935e-05, + "loss": 0.8903, + "step": 1505 + }, + { + "epoch": 0.24, + "grad_norm": 16.929546441871096, + "learning_rate": 1.9877528899661826e-05, + "loss": 0.8664, + "step": 1506 + }, + { + "epoch": 0.24, + "grad_norm": 16.08767658742868, + "learning_rate": 1.9877265636495574e-05, + "loss": 0.8503, + "step": 1507 + }, + { + "epoch": 0.24, + "grad_norm": 24.611948125225855, + "learning_rate": 1.987700209242566e-05, + "loss": 0.9032, + "step": 1508 + }, + { + "epoch": 0.24, + "grad_norm": 17.24670966874891, + "learning_rate": 1.9876738267459587e-05, + "loss": 0.8426, + "step": 1509 + }, + { + "epoch": 0.24, + "grad_norm": 17.202767041853868, + "learning_rate": 1.9876474161604858e-05, + "loss": 0.8817, + "step": 1510 + }, + { + "epoch": 0.24, + "grad_norm": 21.355531537159216, + "learning_rate": 1.9876209774868975e-05, + "loss": 0.9919, + "step": 1511 + }, + { + "epoch": 0.24, + "grad_norm": 18.44822621871114, + "learning_rate": 1.9875945107259467e-05, + "loss": 0.9514, + "step": 1512 + }, + { + "epoch": 0.24, + "grad_norm": 15.600634870616602, + "learning_rate": 1.987568015878386e-05, + "loss": 0.8976, + "step": 1513 + }, + { + "epoch": 0.24, + "grad_norm": 42.65773799033624, + "learning_rate": 1.9875414929449684e-05, + "loss": 0.9613, + "step": 1514 + }, + { + "epoch": 0.24, + "grad_norm": 23.868428440579983, + "learning_rate": 1.9875149419264483e-05, + "loss": 0.8894, + "step": 1515 + }, + { + "epoch": 0.24, + "grad_norm": 21.1866930186212, + "learning_rate": 1.987488362823581e-05, + "loss": 0.8675, + "step": 1516 + }, + { + "epoch": 0.24, + "grad_norm": 19.6598789347875, + "learning_rate": 1.9874617556371224e-05, + "loss": 0.8718, + "step": 1517 + }, + { + "epoch": 0.24, + "grad_norm": 27.132711136285618, + "learning_rate": 1.987435120367829e-05, + "loss": 0.9302, + "step": 1518 + }, + { + "epoch": 0.24, + "grad_norm": 28.074359770835063, + "learning_rate": 1.987408457016459e-05, + "loss": 0.8548, + "step": 1519 + }, + { + "epoch": 0.24, + "grad_norm": 18.889135313023882, + "learning_rate": 1.9873817655837695e-05, + "loss": 0.877, + "step": 1520 + }, + { + "epoch": 0.24, + "grad_norm": 20.15093699153894, + "learning_rate": 1.9873550460705203e-05, + "loss": 0.7814, + "step": 1521 + }, + { + "epoch": 0.24, + "grad_norm": 41.35476704008727, + "learning_rate": 1.987328298477471e-05, + "loss": 0.8904, + "step": 1522 + }, + { + "epoch": 0.24, + "grad_norm": 16.969927016775248, + "learning_rate": 1.9873015228053828e-05, + "loss": 0.8934, + "step": 1523 + }, + { + "epoch": 0.24, + "grad_norm": 20.84439129393353, + "learning_rate": 1.9872747190550167e-05, + "loss": 0.987, + "step": 1524 + }, + { + "epoch": 0.24, + "grad_norm": 16.562192149270235, + "learning_rate": 1.987247887227135e-05, + "loss": 0.8728, + "step": 1525 + }, + { + "epoch": 0.24, + "grad_norm": 20.610440172458183, + "learning_rate": 1.9872210273225012e-05, + "loss": 0.845, + "step": 1526 + }, + { + "epoch": 0.24, + "grad_norm": 28.046246185266774, + "learning_rate": 1.9871941393418788e-05, + "loss": 0.8761, + "step": 1527 + }, + { + "epoch": 0.24, + "grad_norm": 24.19092754233467, + "learning_rate": 1.9871672232860323e-05, + "loss": 0.9944, + "step": 1528 + }, + { + "epoch": 0.24, + "grad_norm": 14.301384599738446, + "learning_rate": 1.9871402791557276e-05, + "loss": 0.9345, + "step": 1529 + }, + { + "epoch": 0.24, + "grad_norm": 9.007479441077614, + "learning_rate": 1.987113306951731e-05, + "loss": 0.9111, + "step": 1530 + }, + { + "epoch": 0.24, + "grad_norm": 18.403992620522057, + "learning_rate": 1.9870863066748092e-05, + "loss": 0.8233, + "step": 1531 + }, + { + "epoch": 0.24, + "grad_norm": 24.482426596277037, + "learning_rate": 1.9870592783257303e-05, + "loss": 0.8811, + "step": 1532 + }, + { + "epoch": 0.24, + "grad_norm": 18.977830127577914, + "learning_rate": 1.987032221905263e-05, + "loss": 0.8781, + "step": 1533 + }, + { + "epoch": 0.24, + "grad_norm": 14.989255471340897, + "learning_rate": 1.9870051374141765e-05, + "loss": 0.7822, + "step": 1534 + }, + { + "epoch": 0.24, + "grad_norm": 24.703784181026588, + "learning_rate": 1.9869780248532416e-05, + "loss": 0.963, + "step": 1535 + }, + { + "epoch": 0.24, + "grad_norm": 21.42849824799235, + "learning_rate": 1.986950884223229e-05, + "loss": 0.8673, + "step": 1536 + }, + { + "epoch": 0.24, + "grad_norm": 19.104362050169783, + "learning_rate": 1.9869237155249105e-05, + "loss": 0.9335, + "step": 1537 + }, + { + "epoch": 0.24, + "grad_norm": 19.762347184048217, + "learning_rate": 1.9868965187590588e-05, + "loss": 0.9469, + "step": 1538 + }, + { + "epoch": 0.24, + "grad_norm": 26.174756447752902, + "learning_rate": 1.9868692939264474e-05, + "loss": 0.9615, + "step": 1539 + }, + { + "epoch": 0.24, + "grad_norm": 27.08999698213284, + "learning_rate": 1.9868420410278506e-05, + "loss": 0.9476, + "step": 1540 + }, + { + "epoch": 0.24, + "grad_norm": 23.4324296667565, + "learning_rate": 1.9868147600640433e-05, + "loss": 0.8804, + "step": 1541 + }, + { + "epoch": 0.24, + "grad_norm": 15.979844326593657, + "learning_rate": 1.9867874510358017e-05, + "loss": 0.8384, + "step": 1542 + }, + { + "epoch": 0.24, + "grad_norm": 19.081608419897385, + "learning_rate": 1.9867601139439023e-05, + "loss": 0.8873, + "step": 1543 + }, + { + "epoch": 0.24, + "grad_norm": 24.779687846062483, + "learning_rate": 1.9867327487891224e-05, + "loss": 0.9797, + "step": 1544 + }, + { + "epoch": 0.24, + "grad_norm": 24.391452958535215, + "learning_rate": 1.9867053555722406e-05, + "loss": 0.8472, + "step": 1545 + }, + { + "epoch": 0.24, + "grad_norm": 24.469499017498077, + "learning_rate": 1.9866779342940354e-05, + "loss": 0.9956, + "step": 1546 + }, + { + "epoch": 0.24, + "grad_norm": 21.310513414299503, + "learning_rate": 1.986650484955287e-05, + "loss": 0.9349, + "step": 1547 + }, + { + "epoch": 0.24, + "grad_norm": 14.279051936045528, + "learning_rate": 1.986623007556776e-05, + "loss": 0.8944, + "step": 1548 + }, + { + "epoch": 0.24, + "grad_norm": 18.777165605094, + "learning_rate": 1.9865955020992837e-05, + "loss": 0.9278, + "step": 1549 + }, + { + "epoch": 0.24, + "grad_norm": 24.286095031490465, + "learning_rate": 1.9865679685835924e-05, + "loss": 0.8482, + "step": 1550 + }, + { + "epoch": 0.24, + "grad_norm": 21.06437172765661, + "learning_rate": 1.9865404070104857e-05, + "loss": 0.8321, + "step": 1551 + }, + { + "epoch": 0.24, + "grad_norm": 24.569456568911974, + "learning_rate": 1.9865128173807465e-05, + "loss": 0.9967, + "step": 1552 + }, + { + "epoch": 0.24, + "grad_norm": 23.787221183987047, + "learning_rate": 1.98648519969516e-05, + "loss": 0.978, + "step": 1553 + }, + { + "epoch": 0.24, + "grad_norm": 25.99072945819066, + "learning_rate": 1.9864575539545115e-05, + "loss": 0.8469, + "step": 1554 + }, + { + "epoch": 0.24, + "grad_norm": 16.420302041685954, + "learning_rate": 1.986429880159587e-05, + "loss": 0.9421, + "step": 1555 + }, + { + "epoch": 0.24, + "grad_norm": 27.393284233906577, + "learning_rate": 1.986402178311174e-05, + "loss": 0.9912, + "step": 1556 + }, + { + "epoch": 0.24, + "grad_norm": 19.083219956199336, + "learning_rate": 1.98637444841006e-05, + "loss": 1.0749, + "step": 1557 + }, + { + "epoch": 0.24, + "grad_norm": 24.371095586728668, + "learning_rate": 1.9863466904570338e-05, + "loss": 0.9149, + "step": 1558 + }, + { + "epoch": 0.24, + "grad_norm": 18.69632820410229, + "learning_rate": 1.9863189044528847e-05, + "loss": 0.8413, + "step": 1559 + }, + { + "epoch": 0.24, + "grad_norm": 19.669799542191065, + "learning_rate": 1.986291090398403e-05, + "loss": 0.9167, + "step": 1560 + }, + { + "epoch": 0.24, + "grad_norm": 21.127502376430776, + "learning_rate": 1.9862632482943796e-05, + "loss": 0.9223, + "step": 1561 + }, + { + "epoch": 0.24, + "grad_norm": 31.630367421031174, + "learning_rate": 1.9862353781416062e-05, + "loss": 0.8649, + "step": 1562 + }, + { + "epoch": 0.24, + "grad_norm": 19.87502752758015, + "learning_rate": 1.9862074799408757e-05, + "loss": 0.8531, + "step": 1563 + }, + { + "epoch": 0.24, + "grad_norm": 39.3331372197143, + "learning_rate": 1.9861795536929814e-05, + "loss": 0.9515, + "step": 1564 + }, + { + "epoch": 0.24, + "grad_norm": 21.271972655549494, + "learning_rate": 1.9861515993987174e-05, + "loss": 0.8518, + "step": 1565 + }, + { + "epoch": 0.24, + "grad_norm": 18.494631913935663, + "learning_rate": 1.986123617058879e-05, + "loss": 0.801, + "step": 1566 + }, + { + "epoch": 0.24, + "grad_norm": 17.919234674150502, + "learning_rate": 1.9860956066742613e-05, + "loss": 1.007, + "step": 1567 + }, + { + "epoch": 0.24, + "grad_norm": 24.858569040426747, + "learning_rate": 1.986067568245662e-05, + "loss": 0.8046, + "step": 1568 + }, + { + "epoch": 0.25, + "grad_norm": 23.534347097329405, + "learning_rate": 1.9860395017738778e-05, + "loss": 0.82, + "step": 1569 + }, + { + "epoch": 0.25, + "grad_norm": 22.73581702617474, + "learning_rate": 1.9860114072597064e-05, + "loss": 0.8114, + "step": 1570 + }, + { + "epoch": 0.25, + "grad_norm": 22.757802254792043, + "learning_rate": 1.985983284703948e-05, + "loss": 0.8302, + "step": 1571 + }, + { + "epoch": 0.25, + "grad_norm": 23.301606886146196, + "learning_rate": 1.9859551341074016e-05, + "loss": 0.9559, + "step": 1572 + }, + { + "epoch": 0.25, + "grad_norm": 28.235940291350914, + "learning_rate": 1.985926955470868e-05, + "loss": 0.894, + "step": 1573 + }, + { + "epoch": 0.25, + "grad_norm": 25.379316689200724, + "learning_rate": 1.9858987487951485e-05, + "loss": 0.9196, + "step": 1574 + }, + { + "epoch": 0.25, + "grad_norm": 19.96398671967239, + "learning_rate": 1.9858705140810453e-05, + "loss": 0.9733, + "step": 1575 + }, + { + "epoch": 0.25, + "grad_norm": 33.444085546192184, + "learning_rate": 1.985842251329361e-05, + "loss": 0.9762, + "step": 1576 + }, + { + "epoch": 0.25, + "grad_norm": 22.0310834093762, + "learning_rate": 1.9858139605409005e-05, + "loss": 0.9813, + "step": 1577 + }, + { + "epoch": 0.25, + "grad_norm": 28.530349919890558, + "learning_rate": 1.985785641716467e-05, + "loss": 0.9046, + "step": 1578 + }, + { + "epoch": 0.25, + "grad_norm": 20.473781409203127, + "learning_rate": 1.985757294856867e-05, + "loss": 0.891, + "step": 1579 + }, + { + "epoch": 0.25, + "grad_norm": 24.782652740656243, + "learning_rate": 1.985728919962906e-05, + "loss": 0.969, + "step": 1580 + }, + { + "epoch": 0.25, + "grad_norm": 27.928155643095884, + "learning_rate": 1.985700517035391e-05, + "loss": 0.9972, + "step": 1581 + }, + { + "epoch": 0.25, + "grad_norm": 15.171645769485439, + "learning_rate": 1.98567208607513e-05, + "loss": 0.779, + "step": 1582 + }, + { + "epoch": 0.25, + "grad_norm": 16.15141236688541, + "learning_rate": 1.9856436270829315e-05, + "loss": 0.8682, + "step": 1583 + }, + { + "epoch": 0.25, + "grad_norm": 30.34893832160513, + "learning_rate": 1.985615140059605e-05, + "loss": 0.8446, + "step": 1584 + }, + { + "epoch": 0.25, + "grad_norm": 14.90170651294565, + "learning_rate": 1.9855866250059604e-05, + "loss": 0.9663, + "step": 1585 + }, + { + "epoch": 0.25, + "grad_norm": 20.814766740364764, + "learning_rate": 1.9855580819228087e-05, + "loss": 0.9283, + "step": 1586 + }, + { + "epoch": 0.25, + "grad_norm": 25.826641225339994, + "learning_rate": 1.985529510810962e-05, + "loss": 0.9141, + "step": 1587 + }, + { + "epoch": 0.25, + "grad_norm": 24.461194642870954, + "learning_rate": 1.9855009116712318e-05, + "loss": 0.9014, + "step": 1588 + }, + { + "epoch": 0.25, + "grad_norm": 16.67600278512863, + "learning_rate": 1.985472284504433e-05, + "loss": 0.9292, + "step": 1589 + }, + { + "epoch": 0.25, + "grad_norm": 18.464522902281754, + "learning_rate": 1.9854436293113782e-05, + "loss": 0.9108, + "step": 1590 + }, + { + "epoch": 0.25, + "grad_norm": 27.952304762485202, + "learning_rate": 1.9854149460928832e-05, + "loss": 0.7976, + "step": 1591 + }, + { + "epoch": 0.25, + "grad_norm": 13.89888782104047, + "learning_rate": 1.9853862348497638e-05, + "loss": 0.8497, + "step": 1592 + }, + { + "epoch": 0.25, + "grad_norm": 20.30094980944647, + "learning_rate": 1.9853574955828364e-05, + "loss": 0.919, + "step": 1593 + }, + { + "epoch": 0.25, + "grad_norm": 29.379491346000943, + "learning_rate": 1.985328728292918e-05, + "loss": 0.8353, + "step": 1594 + }, + { + "epoch": 0.25, + "grad_norm": 24.51259496756525, + "learning_rate": 1.985299932980827e-05, + "loss": 0.9382, + "step": 1595 + }, + { + "epoch": 0.25, + "grad_norm": 19.055782837566728, + "learning_rate": 1.9852711096473824e-05, + "loss": 1.0492, + "step": 1596 + }, + { + "epoch": 0.25, + "grad_norm": 17.092632275464698, + "learning_rate": 1.9852422582934037e-05, + "loss": 0.8701, + "step": 1597 + }, + { + "epoch": 0.25, + "grad_norm": 14.837682850336902, + "learning_rate": 1.9852133789197117e-05, + "loss": 0.7924, + "step": 1598 + }, + { + "epoch": 0.25, + "grad_norm": 24.70587231115513, + "learning_rate": 1.9851844715271272e-05, + "loss": 0.9711, + "step": 1599 + }, + { + "epoch": 0.25, + "grad_norm": 26.768006772182794, + "learning_rate": 1.985155536116473e-05, + "loss": 0.913, + "step": 1600 + }, + { + "epoch": 0.25, + "grad_norm": 18.889000930105173, + "learning_rate": 1.9851265726885716e-05, + "loss": 0.9164, + "step": 1601 + }, + { + "epoch": 0.25, + "grad_norm": 29.56957839094889, + "learning_rate": 1.9850975812442465e-05, + "loss": 0.9676, + "step": 1602 + }, + { + "epoch": 0.25, + "grad_norm": 20.131166672112233, + "learning_rate": 1.9850685617843225e-05, + "loss": 0.929, + "step": 1603 + }, + { + "epoch": 0.25, + "grad_norm": 20.227839084474525, + "learning_rate": 1.985039514309625e-05, + "loss": 0.85, + "step": 1604 + }, + { + "epoch": 0.25, + "grad_norm": 15.506373025882, + "learning_rate": 1.9850104388209802e-05, + "loss": 0.87, + "step": 1605 + }, + { + "epoch": 0.25, + "grad_norm": 17.749856350545816, + "learning_rate": 1.9849813353192142e-05, + "loss": 0.7983, + "step": 1606 + }, + { + "epoch": 0.25, + "grad_norm": 22.040139069914062, + "learning_rate": 1.9849522038051553e-05, + "loss": 0.8483, + "step": 1607 + }, + { + "epoch": 0.25, + "grad_norm": 21.499774206716868, + "learning_rate": 1.9849230442796324e-05, + "loss": 0.9599, + "step": 1608 + }, + { + "epoch": 0.25, + "grad_norm": 17.094841699068112, + "learning_rate": 1.9848938567434737e-05, + "loss": 0.9141, + "step": 1609 + }, + { + "epoch": 0.25, + "grad_norm": 16.47469423698453, + "learning_rate": 1.98486464119751e-05, + "loss": 0.899, + "step": 1610 + }, + { + "epoch": 0.25, + "grad_norm": 19.056744094228375, + "learning_rate": 1.9848353976425723e-05, + "loss": 0.876, + "step": 1611 + }, + { + "epoch": 0.25, + "grad_norm": 16.517360578017563, + "learning_rate": 1.984806126079492e-05, + "loss": 0.8606, + "step": 1612 + }, + { + "epoch": 0.25, + "grad_norm": 21.184089523301797, + "learning_rate": 1.9847768265091015e-05, + "loss": 0.8799, + "step": 1613 + }, + { + "epoch": 0.25, + "grad_norm": 21.220808366496115, + "learning_rate": 1.984747498932234e-05, + "loss": 0.9153, + "step": 1614 + }, + { + "epoch": 0.25, + "grad_norm": 20.191952781195642, + "learning_rate": 1.984718143349724e-05, + "loss": 0.9018, + "step": 1615 + }, + { + "epoch": 0.25, + "grad_norm": 20.696585650972292, + "learning_rate": 1.9846887597624054e-05, + "loss": 0.8602, + "step": 1616 + }, + { + "epoch": 0.25, + "grad_norm": 19.645189871587714, + "learning_rate": 1.984659348171115e-05, + "loss": 0.9109, + "step": 1617 + }, + { + "epoch": 0.25, + "grad_norm": 21.80176946317184, + "learning_rate": 1.9846299085766888e-05, + "loss": 0.7923, + "step": 1618 + }, + { + "epoch": 0.25, + "grad_norm": 12.40837045415924, + "learning_rate": 1.9846004409799636e-05, + "loss": 0.8333, + "step": 1619 + }, + { + "epoch": 0.25, + "grad_norm": 18.21026819611962, + "learning_rate": 1.984570945381778e-05, + "loss": 0.9582, + "step": 1620 + }, + { + "epoch": 0.25, + "grad_norm": 21.057845668281168, + "learning_rate": 1.9845414217829708e-05, + "loss": 0.8639, + "step": 1621 + }, + { + "epoch": 0.25, + "grad_norm": 16.807790616028218, + "learning_rate": 1.9845118701843816e-05, + "loss": 0.8879, + "step": 1622 + }, + { + "epoch": 0.25, + "grad_norm": 20.871412538595905, + "learning_rate": 1.9844822905868507e-05, + "loss": 0.9686, + "step": 1623 + }, + { + "epoch": 0.25, + "grad_norm": 21.205071110481367, + "learning_rate": 1.984452682991219e-05, + "loss": 0.9883, + "step": 1624 + }, + { + "epoch": 0.25, + "grad_norm": 19.853528600595627, + "learning_rate": 1.984423047398329e-05, + "loss": 0.9944, + "step": 1625 + }, + { + "epoch": 0.25, + "grad_norm": 17.91780237453702, + "learning_rate": 1.9843933838090236e-05, + "loss": 0.8593, + "step": 1626 + }, + { + "epoch": 0.25, + "grad_norm": 21.217180569043126, + "learning_rate": 1.9843636922241463e-05, + "loss": 0.9105, + "step": 1627 + }, + { + "epoch": 0.25, + "grad_norm": 13.632563895879928, + "learning_rate": 1.984333972644541e-05, + "loss": 0.818, + "step": 1628 + }, + { + "epoch": 0.25, + "grad_norm": 18.235173435921375, + "learning_rate": 1.984304225071054e-05, + "loss": 0.8356, + "step": 1629 + }, + { + "epoch": 0.25, + "grad_norm": 20.182657224892584, + "learning_rate": 1.98427444950453e-05, + "loss": 0.7999, + "step": 1630 + }, + { + "epoch": 0.25, + "grad_norm": 15.394914966399828, + "learning_rate": 1.9842446459458166e-05, + "loss": 0.8587, + "step": 1631 + }, + { + "epoch": 0.25, + "grad_norm": 18.2234754879083, + "learning_rate": 1.9842148143957612e-05, + "loss": 0.9409, + "step": 1632 + }, + { + "epoch": 0.26, + "grad_norm": 20.972849386188827, + "learning_rate": 1.9841849548552124e-05, + "loss": 0.8701, + "step": 1633 + }, + { + "epoch": 0.26, + "grad_norm": 20.238464828067983, + "learning_rate": 1.9841550673250192e-05, + "loss": 0.8618, + "step": 1634 + }, + { + "epoch": 0.26, + "grad_norm": 21.860763723830463, + "learning_rate": 1.9841251518060314e-05, + "loss": 0.9003, + "step": 1635 + }, + { + "epoch": 0.26, + "grad_norm": 16.63649729993102, + "learning_rate": 1.9840952082991e-05, + "loss": 0.8897, + "step": 1636 + }, + { + "epoch": 0.26, + "grad_norm": 15.24084487468591, + "learning_rate": 1.984065236805077e-05, + "loss": 0.8665, + "step": 1637 + }, + { + "epoch": 0.26, + "grad_norm": 18.135258355377267, + "learning_rate": 1.984035237324814e-05, + "loss": 0.8578, + "step": 1638 + }, + { + "epoch": 0.26, + "grad_norm": 28.447627242906936, + "learning_rate": 1.9840052098591645e-05, + "loss": 0.9812, + "step": 1639 + }, + { + "epoch": 0.26, + "grad_norm": 17.872861871021367, + "learning_rate": 1.9839751544089827e-05, + "loss": 0.8685, + "step": 1640 + }, + { + "epoch": 0.26, + "grad_norm": 7.810502189781224, + "learning_rate": 1.983945070975123e-05, + "loss": 0.834, + "step": 1641 + }, + { + "epoch": 0.26, + "grad_norm": 19.15290131322366, + "learning_rate": 1.983914959558441e-05, + "loss": 0.9041, + "step": 1642 + }, + { + "epoch": 0.26, + "grad_norm": 16.15181768591629, + "learning_rate": 1.983884820159793e-05, + "loss": 0.878, + "step": 1643 + }, + { + "epoch": 0.26, + "grad_norm": 20.195868435407395, + "learning_rate": 1.9838546527800364e-05, + "loss": 0.8713, + "step": 1644 + }, + { + "epoch": 0.26, + "grad_norm": 34.692844200851354, + "learning_rate": 1.983824457420029e-05, + "loss": 0.984, + "step": 1645 + }, + { + "epoch": 0.26, + "grad_norm": 18.70194492425561, + "learning_rate": 1.983794234080629e-05, + "loss": 0.9004, + "step": 1646 + }, + { + "epoch": 0.26, + "grad_norm": 18.841944503640804, + "learning_rate": 1.9837639827626973e-05, + "loss": 0.8657, + "step": 1647 + }, + { + "epoch": 0.26, + "grad_norm": 29.375172863613965, + "learning_rate": 1.983733703467093e-05, + "loss": 0.9512, + "step": 1648 + }, + { + "epoch": 0.26, + "grad_norm": 25.117005789234447, + "learning_rate": 1.983703396194678e-05, + "loss": 0.9423, + "step": 1649 + }, + { + "epoch": 0.26, + "grad_norm": 22.010961772423034, + "learning_rate": 1.9836730609463134e-05, + "loss": 0.8295, + "step": 1650 + }, + { + "epoch": 0.26, + "grad_norm": 17.48029181875695, + "learning_rate": 1.983642697722863e-05, + "loss": 0.8895, + "step": 1651 + }, + { + "epoch": 0.26, + "grad_norm": 18.08662586900136, + "learning_rate": 1.9836123065251892e-05, + "loss": 0.8811, + "step": 1652 + }, + { + "epoch": 0.26, + "grad_norm": 14.468787104680578, + "learning_rate": 1.9835818873541572e-05, + "loss": 0.8907, + "step": 1653 + }, + { + "epoch": 0.26, + "grad_norm": 42.75017693549901, + "learning_rate": 1.983551440210631e-05, + "loss": 1.0122, + "step": 1654 + }, + { + "epoch": 0.26, + "grad_norm": 37.603883522249845, + "learning_rate": 1.9835209650954775e-05, + "loss": 1.016, + "step": 1655 + }, + { + "epoch": 0.26, + "grad_norm": 16.902028193039648, + "learning_rate": 1.9834904620095636e-05, + "loss": 0.8202, + "step": 1656 + }, + { + "epoch": 0.26, + "grad_norm": 20.05601838766581, + "learning_rate": 1.9834599309537557e-05, + "loss": 0.8901, + "step": 1657 + }, + { + "epoch": 0.26, + "grad_norm": 21.000108851990827, + "learning_rate": 1.9834293719289227e-05, + "loss": 0.9947, + "step": 1658 + }, + { + "epoch": 0.26, + "grad_norm": 52.55156834831746, + "learning_rate": 1.9833987849359338e-05, + "loss": 1.0453, + "step": 1659 + }, + { + "epoch": 0.26, + "grad_norm": 18.137705460376477, + "learning_rate": 1.9833681699756592e-05, + "loss": 0.8133, + "step": 1660 + }, + { + "epoch": 0.26, + "grad_norm": 24.64539404819129, + "learning_rate": 1.9833375270489682e-05, + "loss": 0.9252, + "step": 1661 + }, + { + "epoch": 0.26, + "grad_norm": 29.720110765826092, + "learning_rate": 1.983306856156734e-05, + "loss": 0.8509, + "step": 1662 + }, + { + "epoch": 0.26, + "grad_norm": 10.061173508564595, + "learning_rate": 1.9832761572998274e-05, + "loss": 0.8002, + "step": 1663 + }, + { + "epoch": 0.26, + "grad_norm": 19.487527124277147, + "learning_rate": 1.9832454304791224e-05, + "loss": 0.9104, + "step": 1664 + }, + { + "epoch": 0.26, + "grad_norm": 19.2215853506947, + "learning_rate": 1.9832146756954925e-05, + "loss": 0.9163, + "step": 1665 + }, + { + "epoch": 0.26, + "grad_norm": 16.120777669608227, + "learning_rate": 1.9831838929498123e-05, + "loss": 0.855, + "step": 1666 + }, + { + "epoch": 0.26, + "grad_norm": 33.39743699387739, + "learning_rate": 1.983153082242957e-05, + "loss": 0.8794, + "step": 1667 + }, + { + "epoch": 0.26, + "grad_norm": 16.19784093810489, + "learning_rate": 1.983122243575804e-05, + "loss": 0.7899, + "step": 1668 + }, + { + "epoch": 0.26, + "grad_norm": 19.899923464553627, + "learning_rate": 1.9830913769492288e-05, + "loss": 0.896, + "step": 1669 + }, + { + "epoch": 0.26, + "grad_norm": 17.8511313622163, + "learning_rate": 1.9830604823641103e-05, + "loss": 0.985, + "step": 1670 + }, + { + "epoch": 0.26, + "grad_norm": 31.44816884401661, + "learning_rate": 1.9830295598213263e-05, + "loss": 0.8494, + "step": 1671 + }, + { + "epoch": 0.26, + "grad_norm": 21.58686807623665, + "learning_rate": 1.9829986093217575e-05, + "loss": 0.9452, + "step": 1672 + }, + { + "epoch": 0.26, + "grad_norm": 29.097783068819563, + "learning_rate": 1.9829676308662826e-05, + "loss": 0.9155, + "step": 1673 + }, + { + "epoch": 0.26, + "grad_norm": 15.029132243761168, + "learning_rate": 1.9829366244557837e-05, + "loss": 0.8357, + "step": 1674 + }, + { + "epoch": 0.26, + "grad_norm": 26.770385605535186, + "learning_rate": 1.982905590091142e-05, + "loss": 0.8228, + "step": 1675 + }, + { + "epoch": 0.26, + "grad_norm": 16.883739146354582, + "learning_rate": 1.98287452777324e-05, + "loss": 0.9237, + "step": 1676 + }, + { + "epoch": 0.26, + "grad_norm": 18.522797857862518, + "learning_rate": 1.982843437502962e-05, + "loss": 0.9413, + "step": 1677 + }, + { + "epoch": 0.26, + "grad_norm": 15.670813278734217, + "learning_rate": 1.982812319281191e-05, + "loss": 0.7638, + "step": 1678 + }, + { + "epoch": 0.26, + "grad_norm": 22.824570396700686, + "learning_rate": 1.982781173108813e-05, + "loss": 0.8331, + "step": 1679 + }, + { + "epoch": 0.26, + "grad_norm": 6.102055059619059, + "learning_rate": 1.982749998986713e-05, + "loss": 0.9727, + "step": 1680 + }, + { + "epoch": 0.26, + "grad_norm": 18.613612673391664, + "learning_rate": 1.982718796915778e-05, + "loss": 0.9464, + "step": 1681 + }, + { + "epoch": 0.26, + "grad_norm": 21.58938208456188, + "learning_rate": 1.9826875668968956e-05, + "loss": 0.8991, + "step": 1682 + }, + { + "epoch": 0.26, + "grad_norm": 13.565050239136012, + "learning_rate": 1.9826563089309534e-05, + "loss": 0.8076, + "step": 1683 + }, + { + "epoch": 0.26, + "grad_norm": 33.96244031969681, + "learning_rate": 1.982625023018841e-05, + "loss": 1.0186, + "step": 1684 + }, + { + "epoch": 0.26, + "grad_norm": 18.330985058321044, + "learning_rate": 1.982593709161447e-05, + "loss": 0.8724, + "step": 1685 + }, + { + "epoch": 0.26, + "grad_norm": 16.11666479201521, + "learning_rate": 1.9825623673596634e-05, + "loss": 0.7933, + "step": 1686 + }, + { + "epoch": 0.26, + "grad_norm": 15.77564466044492, + "learning_rate": 1.9825309976143807e-05, + "loss": 0.897, + "step": 1687 + }, + { + "epoch": 0.26, + "grad_norm": 19.416525951954807, + "learning_rate": 1.982499599926491e-05, + "loss": 0.798, + "step": 1688 + }, + { + "epoch": 0.26, + "grad_norm": 26.160292368889735, + "learning_rate": 1.9824681742968876e-05, + "loss": 0.812, + "step": 1689 + }, + { + "epoch": 0.26, + "grad_norm": 34.88096240990854, + "learning_rate": 1.9824367207264638e-05, + "loss": 0.9091, + "step": 1690 + }, + { + "epoch": 0.26, + "grad_norm": 36.20317577149408, + "learning_rate": 1.9824052392161148e-05, + "loss": 0.9105, + "step": 1691 + }, + { + "epoch": 0.26, + "grad_norm": 20.68920813017233, + "learning_rate": 1.982373729766735e-05, + "loss": 0.8965, + "step": 1692 + }, + { + "epoch": 0.26, + "grad_norm": 21.33353376344843, + "learning_rate": 1.9823421923792213e-05, + "loss": 0.9168, + "step": 1693 + }, + { + "epoch": 0.26, + "grad_norm": 25.1289374411941, + "learning_rate": 1.98231062705447e-05, + "loss": 0.9557, + "step": 1694 + }, + { + "epoch": 0.26, + "grad_norm": 22.92384134919337, + "learning_rate": 1.9822790337933795e-05, + "loss": 0.9023, + "step": 1695 + }, + { + "epoch": 0.26, + "grad_norm": 18.142957059410243, + "learning_rate": 1.9822474125968478e-05, + "loss": 0.7806, + "step": 1696 + }, + { + "epoch": 0.27, + "grad_norm": 18.00885413543742, + "learning_rate": 1.9822157634657742e-05, + "loss": 0.8202, + "step": 1697 + }, + { + "epoch": 0.27, + "grad_norm": 20.582682493009642, + "learning_rate": 1.9821840864010588e-05, + "loss": 0.8614, + "step": 1698 + }, + { + "epoch": 0.27, + "grad_norm": 17.35402551523379, + "learning_rate": 1.9821523814036026e-05, + "loss": 0.8824, + "step": 1699 + }, + { + "epoch": 0.27, + "grad_norm": 16.520273603716277, + "learning_rate": 1.9821206484743073e-05, + "loss": 0.8615, + "step": 1700 + }, + { + "epoch": 0.27, + "grad_norm": 12.00684525162505, + "learning_rate": 1.9820888876140752e-05, + "loss": 0.8243, + "step": 1701 + }, + { + "epoch": 0.27, + "grad_norm": 20.61042011017325, + "learning_rate": 1.9820570988238096e-05, + "loss": 0.8826, + "step": 1702 + }, + { + "epoch": 0.27, + "grad_norm": 21.07278778761485, + "learning_rate": 1.982025282104415e-05, + "loss": 0.7674, + "step": 1703 + }, + { + "epoch": 0.27, + "grad_norm": 14.739100708209826, + "learning_rate": 1.981993437456795e-05, + "loss": 0.8416, + "step": 1704 + }, + { + "epoch": 0.27, + "grad_norm": 17.387826597579927, + "learning_rate": 1.9819615648818568e-05, + "loss": 0.8929, + "step": 1705 + }, + { + "epoch": 0.27, + "grad_norm": 24.54800838449656, + "learning_rate": 1.9819296643805058e-05, + "loss": 0.947, + "step": 1706 + }, + { + "epoch": 0.27, + "grad_norm": 27.16360430129431, + "learning_rate": 1.98189773595365e-05, + "loss": 0.9132, + "step": 1707 + }, + { + "epoch": 0.27, + "grad_norm": 21.937256023756593, + "learning_rate": 1.9818657796021963e-05, + "loss": 0.8758, + "step": 1708 + }, + { + "epoch": 0.27, + "grad_norm": 21.491781477037854, + "learning_rate": 1.9818337953270544e-05, + "loss": 0.9212, + "step": 1709 + }, + { + "epoch": 0.27, + "grad_norm": 22.04077979195327, + "learning_rate": 1.981801783129134e-05, + "loss": 0.8816, + "step": 1710 + }, + { + "epoch": 0.27, + "grad_norm": 18.636449283785787, + "learning_rate": 1.981769743009345e-05, + "loss": 0.8137, + "step": 1711 + }, + { + "epoch": 0.27, + "grad_norm": 29.107738028061473, + "learning_rate": 1.9817376749685994e-05, + "loss": 0.8802, + "step": 1712 + }, + { + "epoch": 0.27, + "grad_norm": 26.23181844435767, + "learning_rate": 1.981705579007808e-05, + "loss": 1.1021, + "step": 1713 + }, + { + "epoch": 0.27, + "grad_norm": 17.399339037603546, + "learning_rate": 1.9816734551278843e-05, + "loss": 0.8471, + "step": 1714 + }, + { + "epoch": 0.27, + "grad_norm": 16.343173807355683, + "learning_rate": 1.981641303329742e-05, + "loss": 0.938, + "step": 1715 + }, + { + "epoch": 0.27, + "grad_norm": 19.64286212425664, + "learning_rate": 1.981609123614295e-05, + "loss": 0.9954, + "step": 1716 + }, + { + "epoch": 0.27, + "grad_norm": 30.577373505150504, + "learning_rate": 1.981576915982459e-05, + "loss": 0.9168, + "step": 1717 + }, + { + "epoch": 0.27, + "grad_norm": 22.085219672148874, + "learning_rate": 1.9815446804351495e-05, + "loss": 0.8749, + "step": 1718 + }, + { + "epoch": 0.27, + "grad_norm": 20.99618885817167, + "learning_rate": 1.9815124169732834e-05, + "loss": 0.8716, + "step": 1719 + }, + { + "epoch": 0.27, + "grad_norm": 24.651759864107493, + "learning_rate": 1.9814801255977786e-05, + "loss": 0.9354, + "step": 1720 + }, + { + "epoch": 0.27, + "grad_norm": 23.447190975021126, + "learning_rate": 1.9814478063095533e-05, + "loss": 1.0228, + "step": 1721 + }, + { + "epoch": 0.27, + "grad_norm": 27.748623575943913, + "learning_rate": 1.9814154591095262e-05, + "loss": 0.8155, + "step": 1722 + }, + { + "epoch": 0.27, + "grad_norm": 15.932456202019589, + "learning_rate": 1.9813830839986177e-05, + "loss": 0.8719, + "step": 1723 + }, + { + "epoch": 0.27, + "grad_norm": 16.80784905725702, + "learning_rate": 1.9813506809777483e-05, + "loss": 0.9, + "step": 1724 + }, + { + "epoch": 0.27, + "grad_norm": 28.68492081003693, + "learning_rate": 1.9813182500478395e-05, + "loss": 0.9751, + "step": 1725 + }, + { + "epoch": 0.27, + "grad_norm": 18.15240916369025, + "learning_rate": 1.981285791209814e-05, + "loss": 0.8592, + "step": 1726 + }, + { + "epoch": 0.27, + "grad_norm": 28.619245552542825, + "learning_rate": 1.9812533044645945e-05, + "loss": 0.9925, + "step": 1727 + }, + { + "epoch": 0.27, + "grad_norm": 32.54050154142809, + "learning_rate": 1.981220789813105e-05, + "loss": 0.9115, + "step": 1728 + }, + { + "epoch": 0.27, + "grad_norm": 18.369549080638826, + "learning_rate": 1.9811882472562703e-05, + "loss": 0.9084, + "step": 1729 + }, + { + "epoch": 0.27, + "grad_norm": 28.557472124537792, + "learning_rate": 1.9811556767950158e-05, + "loss": 0.9147, + "step": 1730 + }, + { + "epoch": 0.27, + "grad_norm": 20.197237848997407, + "learning_rate": 1.981123078430268e-05, + "loss": 0.8611, + "step": 1731 + }, + { + "epoch": 0.27, + "grad_norm": 23.070902991884388, + "learning_rate": 1.9810904521629534e-05, + "loss": 0.813, + "step": 1732 + }, + { + "epoch": 0.27, + "grad_norm": 20.363541672221512, + "learning_rate": 1.9810577979940006e-05, + "loss": 0.8972, + "step": 1733 + }, + { + "epoch": 0.27, + "grad_norm": 20.100392094047965, + "learning_rate": 1.9810251159243378e-05, + "loss": 0.9072, + "step": 1734 + }, + { + "epoch": 0.27, + "grad_norm": 23.17521346846764, + "learning_rate": 1.9809924059548944e-05, + "loss": 0.8894, + "step": 1735 + }, + { + "epoch": 0.27, + "grad_norm": 14.59224159932445, + "learning_rate": 1.980959668086601e-05, + "loss": 0.9422, + "step": 1736 + }, + { + "epoch": 0.27, + "grad_norm": 26.431800013074078, + "learning_rate": 1.9809269023203887e-05, + "loss": 0.8817, + "step": 1737 + }, + { + "epoch": 0.27, + "grad_norm": 21.204420327753652, + "learning_rate": 1.9808941086571888e-05, + "loss": 0.8591, + "step": 1738 + }, + { + "epoch": 0.27, + "grad_norm": 24.24064240124899, + "learning_rate": 1.9808612870979346e-05, + "loss": 0.8312, + "step": 1739 + }, + { + "epoch": 0.27, + "grad_norm": 16.829441547252312, + "learning_rate": 1.9808284376435588e-05, + "loss": 0.7915, + "step": 1740 + }, + { + "epoch": 0.27, + "grad_norm": 16.05777680683614, + "learning_rate": 1.9807955602949962e-05, + "loss": 0.8875, + "step": 1741 + }, + { + "epoch": 0.27, + "grad_norm": 28.82942523608552, + "learning_rate": 1.9807626550531816e-05, + "loss": 0.9477, + "step": 1742 + }, + { + "epoch": 0.27, + "grad_norm": 13.526141031429328, + "learning_rate": 1.9807297219190508e-05, + "loss": 0.865, + "step": 1743 + }, + { + "epoch": 0.27, + "grad_norm": 21.71654978111612, + "learning_rate": 1.9806967608935403e-05, + "loss": 0.8723, + "step": 1744 + }, + { + "epoch": 0.27, + "grad_norm": 20.122209608729346, + "learning_rate": 1.980663771977588e-05, + "loss": 0.8894, + "step": 1745 + }, + { + "epoch": 0.27, + "grad_norm": 18.36792613612658, + "learning_rate": 1.9806307551721313e-05, + "loss": 0.9009, + "step": 1746 + }, + { + "epoch": 0.27, + "grad_norm": 15.668873128133317, + "learning_rate": 1.9805977104781096e-05, + "loss": 0.858, + "step": 1747 + }, + { + "epoch": 0.27, + "grad_norm": 21.70445764376114, + "learning_rate": 1.9805646378964626e-05, + "loss": 0.8944, + "step": 1748 + }, + { + "epoch": 0.27, + "grad_norm": 24.62086584393321, + "learning_rate": 1.9805315374281314e-05, + "loss": 0.8313, + "step": 1749 + }, + { + "epoch": 0.27, + "grad_norm": 26.815314747600738, + "learning_rate": 1.9804984090740563e-05, + "loss": 0.9241, + "step": 1750 + }, + { + "epoch": 0.27, + "grad_norm": 31.07602689554703, + "learning_rate": 1.9804652528351806e-05, + "loss": 0.8719, + "step": 1751 + }, + { + "epoch": 0.27, + "grad_norm": 4.43906421023547, + "learning_rate": 1.980432068712446e-05, + "loss": 0.9143, + "step": 1752 + }, + { + "epoch": 0.27, + "grad_norm": 17.522436014708436, + "learning_rate": 1.9803988567067975e-05, + "loss": 0.9915, + "step": 1753 + }, + { + "epoch": 0.27, + "grad_norm": 22.50613093822696, + "learning_rate": 1.9803656168191786e-05, + "loss": 0.901, + "step": 1754 + }, + { + "epoch": 0.27, + "grad_norm": 17.236064247078225, + "learning_rate": 1.9803323490505352e-05, + "loss": 0.8827, + "step": 1755 + }, + { + "epoch": 0.27, + "grad_norm": 17.635590551628425, + "learning_rate": 1.9802990534018135e-05, + "loss": 0.8642, + "step": 1756 + }, + { + "epoch": 0.27, + "grad_norm": 18.513616355557605, + "learning_rate": 1.9802657298739598e-05, + "loss": 0.9179, + "step": 1757 + }, + { + "epoch": 0.27, + "grad_norm": 14.952587424952117, + "learning_rate": 1.9802323784679225e-05, + "loss": 0.8081, + "step": 1758 + }, + { + "epoch": 0.27, + "grad_norm": 34.995843557082345, + "learning_rate": 1.9801989991846495e-05, + "loss": 0.9588, + "step": 1759 + }, + { + "epoch": 0.27, + "grad_norm": 18.433058171333993, + "learning_rate": 1.9801655920250903e-05, + "loss": 0.9047, + "step": 1760 + }, + { + "epoch": 0.28, + "grad_norm": 27.19085417697937, + "learning_rate": 1.9801321569901955e-05, + "loss": 0.8589, + "step": 1761 + }, + { + "epoch": 0.28, + "grad_norm": 11.490056017876276, + "learning_rate": 1.980098694080915e-05, + "loss": 0.7968, + "step": 1762 + }, + { + "epoch": 0.28, + "grad_norm": 10.330236244397957, + "learning_rate": 1.9800652032982013e-05, + "loss": 1.0385, + "step": 1763 + }, + { + "epoch": 0.28, + "grad_norm": 24.55127119426707, + "learning_rate": 1.9800316846430064e-05, + "loss": 0.9348, + "step": 1764 + }, + { + "epoch": 0.28, + "grad_norm": 21.18259629186154, + "learning_rate": 1.979998138116284e-05, + "loss": 0.9195, + "step": 1765 + }, + { + "epoch": 0.28, + "grad_norm": 20.289223537753408, + "learning_rate": 1.9799645637189875e-05, + "loss": 0.9477, + "step": 1766 + }, + { + "epoch": 0.28, + "grad_norm": 14.693346282413629, + "learning_rate": 1.979930961452072e-05, + "loss": 0.783, + "step": 1767 + }, + { + "epoch": 0.28, + "grad_norm": 25.326541681708854, + "learning_rate": 1.9798973313164934e-05, + "loss": 0.881, + "step": 1768 + }, + { + "epoch": 0.28, + "grad_norm": 31.889067523030125, + "learning_rate": 1.979863673313208e-05, + "loss": 0.9513, + "step": 1769 + }, + { + "epoch": 0.28, + "grad_norm": 21.741223782349362, + "learning_rate": 1.979829987443173e-05, + "loss": 0.881, + "step": 1770 + }, + { + "epoch": 0.28, + "grad_norm": 15.742427767299679, + "learning_rate": 1.9797962737073456e-05, + "loss": 0.8532, + "step": 1771 + }, + { + "epoch": 0.28, + "grad_norm": 26.50449467949628, + "learning_rate": 1.9797625321066863e-05, + "loss": 1.0252, + "step": 1772 + }, + { + "epoch": 0.28, + "grad_norm": 21.619867725984435, + "learning_rate": 1.9797287626421534e-05, + "loss": 0.8524, + "step": 1773 + }, + { + "epoch": 0.28, + "grad_norm": 26.785282296602777, + "learning_rate": 1.9796949653147073e-05, + "loss": 0.8381, + "step": 1774 + }, + { + "epoch": 0.28, + "grad_norm": 19.71690646061289, + "learning_rate": 1.97966114012531e-05, + "loss": 0.9848, + "step": 1775 + }, + { + "epoch": 0.28, + "grad_norm": 14.441185843013914, + "learning_rate": 1.9796272870749225e-05, + "loss": 0.8482, + "step": 1776 + }, + { + "epoch": 0.28, + "grad_norm": 19.0942923208159, + "learning_rate": 1.9795934061645084e-05, + "loss": 0.8812, + "step": 1777 + }, + { + "epoch": 0.28, + "grad_norm": 12.787196562122961, + "learning_rate": 1.9795594973950305e-05, + "loss": 0.9134, + "step": 1778 + }, + { + "epoch": 0.28, + "grad_norm": 25.1654330487038, + "learning_rate": 1.979525560767454e-05, + "loss": 0.9075, + "step": 1779 + }, + { + "epoch": 0.28, + "grad_norm": 16.38385296156733, + "learning_rate": 1.9794915962827427e-05, + "loss": 0.7823, + "step": 1780 + }, + { + "epoch": 0.28, + "grad_norm": 22.93106760878871, + "learning_rate": 1.9794576039418643e-05, + "loss": 0.8569, + "step": 1781 + }, + { + "epoch": 0.28, + "grad_norm": 17.028411733726802, + "learning_rate": 1.979423583745784e-05, + "loss": 0.8344, + "step": 1782 + }, + { + "epoch": 0.28, + "grad_norm": 19.801500937619682, + "learning_rate": 1.9793895356954704e-05, + "loss": 0.8412, + "step": 1783 + }, + { + "epoch": 0.28, + "grad_norm": 21.849405129691057, + "learning_rate": 1.9793554597918908e-05, + "loss": 0.9314, + "step": 1784 + }, + { + "epoch": 0.28, + "grad_norm": 19.8297964628564, + "learning_rate": 1.9793213560360153e-05, + "loss": 0.8966, + "step": 1785 + }, + { + "epoch": 0.28, + "grad_norm": 19.357410765379026, + "learning_rate": 1.9792872244288132e-05, + "loss": 0.8239, + "step": 1786 + }, + { + "epoch": 0.28, + "grad_norm": 17.137299185445123, + "learning_rate": 1.9792530649712552e-05, + "loss": 0.8157, + "step": 1787 + }, + { + "epoch": 0.28, + "grad_norm": 15.96843058682852, + "learning_rate": 1.979218877664313e-05, + "loss": 0.8441, + "step": 1788 + }, + { + "epoch": 0.28, + "grad_norm": 22.4538187763964, + "learning_rate": 1.9791846625089588e-05, + "loss": 0.7619, + "step": 1789 + }, + { + "epoch": 0.28, + "grad_norm": 29.816475872016984, + "learning_rate": 1.9791504195061655e-05, + "loss": 0.8856, + "step": 1790 + }, + { + "epoch": 0.28, + "grad_norm": 19.022937439097497, + "learning_rate": 1.9791161486569072e-05, + "loss": 0.9113, + "step": 1791 + }, + { + "epoch": 0.28, + "grad_norm": 24.92024545155266, + "learning_rate": 1.979081849962158e-05, + "loss": 0.984, + "step": 1792 + }, + { + "epoch": 0.28, + "grad_norm": 15.34857719844794, + "learning_rate": 1.979047523422894e-05, + "loss": 0.8896, + "step": 1793 + }, + { + "epoch": 0.28, + "grad_norm": 23.357809398328623, + "learning_rate": 1.9790131690400913e-05, + "loss": 0.9505, + "step": 1794 + }, + { + "epoch": 0.28, + "grad_norm": 13.204969814461755, + "learning_rate": 1.9789787868147265e-05, + "loss": 0.7692, + "step": 1795 + }, + { + "epoch": 0.28, + "grad_norm": 19.921512275834388, + "learning_rate": 1.9789443767477778e-05, + "loss": 0.9257, + "step": 1796 + }, + { + "epoch": 0.28, + "grad_norm": 21.092290704193367, + "learning_rate": 1.978909938840224e-05, + "loss": 0.8839, + "step": 1797 + }, + { + "epoch": 0.28, + "grad_norm": 21.555757245780182, + "learning_rate": 1.9788754730930436e-05, + "loss": 1.0024, + "step": 1798 + }, + { + "epoch": 0.28, + "grad_norm": 16.588832850024062, + "learning_rate": 1.9788409795072175e-05, + "loss": 0.8748, + "step": 1799 + }, + { + "epoch": 0.28, + "grad_norm": 17.08744310269793, + "learning_rate": 1.978806458083727e-05, + "loss": 0.9688, + "step": 1800 + }, + { + "epoch": 0.28, + "grad_norm": 13.231640853636327, + "learning_rate": 1.978771908823553e-05, + "loss": 0.8112, + "step": 1801 + }, + { + "epoch": 0.28, + "grad_norm": 22.850053553523697, + "learning_rate": 1.978737331727679e-05, + "loss": 0.9087, + "step": 1802 + }, + { + "epoch": 0.28, + "grad_norm": 24.854705950518575, + "learning_rate": 1.9787027267970873e-05, + "loss": 0.8585, + "step": 1803 + }, + { + "epoch": 0.28, + "grad_norm": 26.570283879099517, + "learning_rate": 1.9786680940327626e-05, + "loss": 0.9648, + "step": 1804 + }, + { + "epoch": 0.28, + "grad_norm": 24.773776774758392, + "learning_rate": 1.97863343343569e-05, + "loss": 0.762, + "step": 1805 + }, + { + "epoch": 0.28, + "grad_norm": 21.047349295762505, + "learning_rate": 1.978598745006855e-05, + "loss": 0.8808, + "step": 1806 + }, + { + "epoch": 0.28, + "grad_norm": 22.56834834060168, + "learning_rate": 1.978564028747244e-05, + "loss": 0.889, + "step": 1807 + }, + { + "epoch": 0.28, + "grad_norm": 23.922200495363604, + "learning_rate": 1.9785292846578446e-05, + "loss": 0.8565, + "step": 1808 + }, + { + "epoch": 0.28, + "grad_norm": 27.401746957361137, + "learning_rate": 1.9784945127396446e-05, + "loss": 0.9528, + "step": 1809 + }, + { + "epoch": 0.28, + "grad_norm": 14.761854402797546, + "learning_rate": 1.9784597129936333e-05, + "loss": 0.753, + "step": 1810 + }, + { + "epoch": 0.28, + "grad_norm": 14.52069704734337, + "learning_rate": 1.9784248854208e-05, + "loss": 0.9209, + "step": 1811 + }, + { + "epoch": 0.28, + "grad_norm": 10.558703475252177, + "learning_rate": 1.9783900300221354e-05, + "loss": 0.7836, + "step": 1812 + }, + { + "epoch": 0.28, + "grad_norm": 20.251583693824134, + "learning_rate": 1.9783551467986307e-05, + "loss": 0.873, + "step": 1813 + }, + { + "epoch": 0.28, + "grad_norm": 26.622178118444687, + "learning_rate": 1.978320235751278e-05, + "loss": 0.9453, + "step": 1814 + }, + { + "epoch": 0.28, + "grad_norm": 19.12187551550468, + "learning_rate": 1.97828529688107e-05, + "loss": 0.9581, + "step": 1815 + }, + { + "epoch": 0.28, + "grad_norm": 23.286965899324226, + "learning_rate": 1.9782503301890004e-05, + "loss": 0.9024, + "step": 1816 + }, + { + "epoch": 0.28, + "grad_norm": 22.827516362290183, + "learning_rate": 1.9782153356760635e-05, + "loss": 0.8556, + "step": 1817 + }, + { + "epoch": 0.28, + "grad_norm": 21.34215166787253, + "learning_rate": 1.978180313343255e-05, + "loss": 0.8911, + "step": 1818 + }, + { + "epoch": 0.28, + "grad_norm": 18.600726753311086, + "learning_rate": 1.9781452631915705e-05, + "loss": 0.8825, + "step": 1819 + }, + { + "epoch": 0.28, + "grad_norm": 13.253950137496588, + "learning_rate": 1.9781101852220074e-05, + "loss": 0.8998, + "step": 1820 + }, + { + "epoch": 0.28, + "grad_norm": 22.370409260145085, + "learning_rate": 1.978075079435562e-05, + "loss": 0.9154, + "step": 1821 + }, + { + "epoch": 0.28, + "grad_norm": 19.678942426605634, + "learning_rate": 1.978039945833234e-05, + "loss": 0.9338, + "step": 1822 + }, + { + "epoch": 0.28, + "grad_norm": 17.99321255846877, + "learning_rate": 1.978004784416022e-05, + "loss": 0.9495, + "step": 1823 + }, + { + "epoch": 0.28, + "grad_norm": 15.751243995365353, + "learning_rate": 1.977969595184926e-05, + "loss": 0.895, + "step": 1824 + }, + { + "epoch": 0.29, + "grad_norm": 17.825625049592, + "learning_rate": 1.9779343781409466e-05, + "loss": 0.8204, + "step": 1825 + }, + { + "epoch": 0.29, + "grad_norm": 20.513645984582833, + "learning_rate": 1.9778991332850857e-05, + "loss": 0.8813, + "step": 1826 + }, + { + "epoch": 0.29, + "grad_norm": 20.849047481639463, + "learning_rate": 1.9778638606183453e-05, + "loss": 1.0108, + "step": 1827 + }, + { + "epoch": 0.29, + "grad_norm": 15.838968960727696, + "learning_rate": 1.977828560141729e-05, + "loss": 0.8283, + "step": 1828 + }, + { + "epoch": 0.29, + "grad_norm": 24.336703018712903, + "learning_rate": 1.9777932318562407e-05, + "loss": 0.9114, + "step": 1829 + }, + { + "epoch": 0.29, + "grad_norm": 23.559767149740388, + "learning_rate": 1.9777578757628845e-05, + "loss": 0.9584, + "step": 1830 + }, + { + "epoch": 0.29, + "grad_norm": 18.806822963396797, + "learning_rate": 1.9777224918626662e-05, + "loss": 0.8323, + "step": 1831 + }, + { + "epoch": 0.29, + "grad_norm": 19.578962190177567, + "learning_rate": 1.9776870801565924e-05, + "loss": 0.9118, + "step": 1832 + }, + { + "epoch": 0.29, + "grad_norm": 23.522935496446518, + "learning_rate": 1.97765164064567e-05, + "loss": 0.8278, + "step": 1833 + }, + { + "epoch": 0.29, + "grad_norm": 19.73905184759165, + "learning_rate": 1.977616173330907e-05, + "loss": 0.8432, + "step": 1834 + }, + { + "epoch": 0.29, + "grad_norm": 15.574097636000438, + "learning_rate": 1.9775806782133114e-05, + "loss": 0.7777, + "step": 1835 + }, + { + "epoch": 0.29, + "grad_norm": 32.37142113747569, + "learning_rate": 1.9775451552938937e-05, + "loss": 0.9256, + "step": 1836 + }, + { + "epoch": 0.29, + "grad_norm": 23.42497999339028, + "learning_rate": 1.9775096045736634e-05, + "loss": 0.8323, + "step": 1837 + }, + { + "epoch": 0.29, + "grad_norm": 21.867717619627314, + "learning_rate": 1.9774740260536318e-05, + "loss": 0.7863, + "step": 1838 + }, + { + "epoch": 0.29, + "grad_norm": 17.43668407151477, + "learning_rate": 1.977438419734811e-05, + "loss": 0.8583, + "step": 1839 + }, + { + "epoch": 0.29, + "grad_norm": 23.98656454871355, + "learning_rate": 1.977402785618213e-05, + "loss": 0.9369, + "step": 1840 + }, + { + "epoch": 0.29, + "grad_norm": 18.480920694582508, + "learning_rate": 1.9773671237048514e-05, + "loss": 0.8054, + "step": 1841 + }, + { + "epoch": 0.29, + "grad_norm": 26.789256845271577, + "learning_rate": 1.977331433995741e-05, + "loss": 0.9995, + "step": 1842 + }, + { + "epoch": 0.29, + "grad_norm": 24.08763435503969, + "learning_rate": 1.977295716491896e-05, + "loss": 0.8417, + "step": 1843 + }, + { + "epoch": 0.29, + "grad_norm": 42.645517418034, + "learning_rate": 1.9772599711943323e-05, + "loss": 0.9394, + "step": 1844 + }, + { + "epoch": 0.29, + "grad_norm": 17.063569339318114, + "learning_rate": 1.977224198104067e-05, + "loss": 0.864, + "step": 1845 + }, + { + "epoch": 0.29, + "grad_norm": 21.15715535845419, + "learning_rate": 1.9771883972221174e-05, + "loss": 0.8319, + "step": 1846 + }, + { + "epoch": 0.29, + "grad_norm": 27.82915968741137, + "learning_rate": 1.977152568549501e-05, + "loss": 0.9725, + "step": 1847 + }, + { + "epoch": 0.29, + "grad_norm": 33.0771888236441, + "learning_rate": 1.9771167120872375e-05, + "loss": 0.8645, + "step": 1848 + }, + { + "epoch": 0.29, + "grad_norm": 20.287435605199015, + "learning_rate": 1.977080827836346e-05, + "loss": 0.9815, + "step": 1849 + }, + { + "epoch": 0.29, + "grad_norm": 12.977668332700398, + "learning_rate": 1.9770449157978475e-05, + "loss": 0.7676, + "step": 1850 + }, + { + "epoch": 0.29, + "grad_norm": 16.51914198257357, + "learning_rate": 1.9770089759727634e-05, + "loss": 0.8214, + "step": 1851 + }, + { + "epoch": 0.29, + "grad_norm": 22.3756017737208, + "learning_rate": 1.976973008362115e-05, + "loss": 0.8112, + "step": 1852 + }, + { + "epoch": 0.29, + "grad_norm": 22.287185008240183, + "learning_rate": 1.976937012966926e-05, + "loss": 0.7605, + "step": 1853 + }, + { + "epoch": 0.29, + "grad_norm": 17.322853428520098, + "learning_rate": 1.97690098978822e-05, + "loss": 0.9402, + "step": 1854 + }, + { + "epoch": 0.29, + "grad_norm": 38.27820211694408, + "learning_rate": 1.9768649388270213e-05, + "loss": 0.8972, + "step": 1855 + }, + { + "epoch": 0.29, + "grad_norm": 17.129150240127114, + "learning_rate": 1.976828860084355e-05, + "loss": 0.87, + "step": 1856 + }, + { + "epoch": 0.29, + "grad_norm": 18.448595222631848, + "learning_rate": 1.9767927535612473e-05, + "loss": 0.8432, + "step": 1857 + }, + { + "epoch": 0.29, + "grad_norm": 16.16610402139183, + "learning_rate": 1.9767566192587253e-05, + "loss": 0.8021, + "step": 1858 + }, + { + "epoch": 0.29, + "grad_norm": 25.94836865062978, + "learning_rate": 1.9767204571778163e-05, + "loss": 0.9226, + "step": 1859 + }, + { + "epoch": 0.29, + "grad_norm": 20.758907658204592, + "learning_rate": 1.976684267319549e-05, + "loss": 0.8218, + "step": 1860 + }, + { + "epoch": 0.29, + "grad_norm": 26.600530844694067, + "learning_rate": 1.9766480496849527e-05, + "loss": 0.8165, + "step": 1861 + }, + { + "epoch": 0.29, + "grad_norm": 19.589273431906122, + "learning_rate": 1.9766118042750566e-05, + "loss": 0.7289, + "step": 1862 + }, + { + "epoch": 0.29, + "grad_norm": 20.449142527324344, + "learning_rate": 1.9765755310908925e-05, + "loss": 0.8735, + "step": 1863 + }, + { + "epoch": 0.29, + "grad_norm": 19.13288460527496, + "learning_rate": 1.9765392301334912e-05, + "loss": 0.8423, + "step": 1864 + }, + { + "epoch": 0.29, + "grad_norm": 24.610665193675995, + "learning_rate": 1.976502901403886e-05, + "loss": 0.9759, + "step": 1865 + }, + { + "epoch": 0.29, + "grad_norm": 17.8569561545966, + "learning_rate": 1.9764665449031093e-05, + "loss": 0.9654, + "step": 1866 + }, + { + "epoch": 0.29, + "grad_norm": 25.353380535986272, + "learning_rate": 1.976430160632195e-05, + "loss": 0.8497, + "step": 1867 + }, + { + "epoch": 0.29, + "grad_norm": 30.508107354647823, + "learning_rate": 1.9763937485921785e-05, + "loss": 0.9529, + "step": 1868 + }, + { + "epoch": 0.29, + "grad_norm": 22.969245304229776, + "learning_rate": 1.976357308784095e-05, + "loss": 0.8159, + "step": 1869 + }, + { + "epoch": 0.29, + "grad_norm": 37.45412320798896, + "learning_rate": 1.9763208412089802e-05, + "loss": 0.785, + "step": 1870 + }, + { + "epoch": 0.29, + "grad_norm": 16.87231431358377, + "learning_rate": 1.9762843458678723e-05, + "loss": 0.8465, + "step": 1871 + }, + { + "epoch": 0.29, + "grad_norm": 25.83016601028346, + "learning_rate": 1.976247822761809e-05, + "loss": 0.8687, + "step": 1872 + }, + { + "epoch": 0.29, + "grad_norm": 18.515231235357167, + "learning_rate": 1.976211271891828e-05, + "loss": 0.9696, + "step": 1873 + }, + { + "epoch": 0.29, + "grad_norm": 20.620431983707377, + "learning_rate": 1.97617469325897e-05, + "loss": 0.8897, + "step": 1874 + }, + { + "epoch": 0.29, + "grad_norm": 28.371820075238965, + "learning_rate": 1.9761380868642745e-05, + "loss": 1.0011, + "step": 1875 + }, + { + "epoch": 0.29, + "grad_norm": 14.977174386233976, + "learning_rate": 1.9761014527087828e-05, + "loss": 0.9175, + "step": 1876 + }, + { + "epoch": 0.29, + "grad_norm": 19.64465399427102, + "learning_rate": 1.9760647907935372e-05, + "loss": 0.7933, + "step": 1877 + }, + { + "epoch": 0.29, + "grad_norm": 18.79532450577986, + "learning_rate": 1.9760281011195793e-05, + "loss": 0.8207, + "step": 1878 + }, + { + "epoch": 0.29, + "grad_norm": 24.920172704359203, + "learning_rate": 1.9759913836879535e-05, + "loss": 0.8858, + "step": 1879 + }, + { + "epoch": 0.29, + "grad_norm": 19.638220166399208, + "learning_rate": 1.9759546384997042e-05, + "loss": 0.9311, + "step": 1880 + }, + { + "epoch": 0.29, + "grad_norm": 14.781828814539178, + "learning_rate": 1.975917865555875e-05, + "loss": 0.8638, + "step": 1881 + }, + { + "epoch": 0.29, + "grad_norm": 21.054452944353176, + "learning_rate": 1.9758810648575132e-05, + "loss": 0.8943, + "step": 1882 + }, + { + "epoch": 0.29, + "grad_norm": 26.283434521357467, + "learning_rate": 1.9758442364056643e-05, + "loss": 0.873, + "step": 1883 + }, + { + "epoch": 0.29, + "grad_norm": 15.699712329327701, + "learning_rate": 1.975807380201377e-05, + "loss": 0.8444, + "step": 1884 + }, + { + "epoch": 0.29, + "grad_norm": 14.492083412886533, + "learning_rate": 1.9757704962456978e-05, + "loss": 0.9192, + "step": 1885 + }, + { + "epoch": 0.29, + "grad_norm": 27.698089892390986, + "learning_rate": 1.9757335845396768e-05, + "loss": 0.898, + "step": 1886 + }, + { + "epoch": 0.29, + "grad_norm": 35.97715392895058, + "learning_rate": 1.9756966450843635e-05, + "loss": 0.9176, + "step": 1887 + }, + { + "epoch": 0.29, + "grad_norm": 18.065809891214382, + "learning_rate": 1.9756596778808085e-05, + "loss": 0.862, + "step": 1888 + }, + { + "epoch": 0.3, + "grad_norm": 29.38781764166271, + "learning_rate": 1.9756226829300628e-05, + "loss": 0.8988, + "step": 1889 + }, + { + "epoch": 0.3, + "grad_norm": 16.457850544725364, + "learning_rate": 1.9755856602331788e-05, + "loss": 0.8768, + "step": 1890 + }, + { + "epoch": 0.3, + "grad_norm": 13.788459257811533, + "learning_rate": 1.9755486097912092e-05, + "loss": 0.8156, + "step": 1891 + }, + { + "epoch": 0.3, + "grad_norm": 23.2677302948353, + "learning_rate": 1.9755115316052076e-05, + "loss": 0.7801, + "step": 1892 + }, + { + "epoch": 0.3, + "grad_norm": 22.212946392362642, + "learning_rate": 1.9754744256762293e-05, + "loss": 0.8462, + "step": 1893 + }, + { + "epoch": 0.3, + "grad_norm": 24.414810513443292, + "learning_rate": 1.9754372920053284e-05, + "loss": 0.9012, + "step": 1894 + }, + { + "epoch": 0.3, + "grad_norm": 19.37392721590177, + "learning_rate": 1.975400130593562e-05, + "loss": 0.8598, + "step": 1895 + }, + { + "epoch": 0.3, + "grad_norm": 25.597424221282452, + "learning_rate": 1.975362941441986e-05, + "loss": 0.9248, + "step": 1896 + }, + { + "epoch": 0.3, + "grad_norm": 15.675500822089871, + "learning_rate": 1.975325724551659e-05, + "loss": 0.9456, + "step": 1897 + }, + { + "epoch": 0.3, + "grad_norm": 15.55818149337485, + "learning_rate": 1.9752884799236387e-05, + "loss": 0.7981, + "step": 1898 + }, + { + "epoch": 0.3, + "grad_norm": 25.309976139340897, + "learning_rate": 1.9752512075589844e-05, + "loss": 0.9044, + "step": 1899 + }, + { + "epoch": 0.3, + "grad_norm": 22.143745146314007, + "learning_rate": 1.9752139074587563e-05, + "loss": 0.8209, + "step": 1900 + }, + { + "epoch": 0.3, + "grad_norm": 20.12003667117563, + "learning_rate": 1.9751765796240153e-05, + "loss": 0.8165, + "step": 1901 + }, + { + "epoch": 0.3, + "grad_norm": 14.426752350094178, + "learning_rate": 1.9751392240558228e-05, + "loss": 0.9043, + "step": 1902 + }, + { + "epoch": 0.3, + "grad_norm": 20.94066089000282, + "learning_rate": 1.9751018407552417e-05, + "loss": 0.9336, + "step": 1903 + }, + { + "epoch": 0.3, + "grad_norm": 18.89384598324315, + "learning_rate": 1.9750644297233342e-05, + "loss": 0.8236, + "step": 1904 + }, + { + "epoch": 0.3, + "grad_norm": 19.065600030567253, + "learning_rate": 1.9750269909611645e-05, + "loss": 0.9207, + "step": 1905 + }, + { + "epoch": 0.3, + "grad_norm": 19.548708685621445, + "learning_rate": 1.9749895244697978e-05, + "loss": 0.8082, + "step": 1906 + }, + { + "epoch": 0.3, + "grad_norm": 18.02616298028141, + "learning_rate": 1.9749520302502993e-05, + "loss": 0.7811, + "step": 1907 + }, + { + "epoch": 0.3, + "grad_norm": 10.92885723191646, + "learning_rate": 1.9749145083037354e-05, + "loss": 0.8268, + "step": 1908 + }, + { + "epoch": 0.3, + "grad_norm": 21.275090474531414, + "learning_rate": 1.9748769586311732e-05, + "loss": 0.7387, + "step": 1909 + }, + { + "epoch": 0.3, + "grad_norm": 17.556192712972138, + "learning_rate": 1.9748393812336807e-05, + "loss": 0.798, + "step": 1910 + }, + { + "epoch": 0.3, + "grad_norm": 29.336131973739484, + "learning_rate": 1.9748017761123265e-05, + "loss": 0.8965, + "step": 1911 + }, + { + "epoch": 0.3, + "grad_norm": 22.347891191708836, + "learning_rate": 1.97476414326818e-05, + "loss": 0.925, + "step": 1912 + }, + { + "epoch": 0.3, + "grad_norm": 26.474903759362682, + "learning_rate": 1.9747264827023113e-05, + "loss": 0.8234, + "step": 1913 + }, + { + "epoch": 0.3, + "grad_norm": 20.728841426987962, + "learning_rate": 1.974688794415792e-05, + "loss": 0.9392, + "step": 1914 + }, + { + "epoch": 0.3, + "grad_norm": 18.138012152042396, + "learning_rate": 1.9746510784096928e-05, + "loss": 0.7939, + "step": 1915 + }, + { + "epoch": 0.3, + "grad_norm": 16.234423786012925, + "learning_rate": 1.974613334685088e-05, + "loss": 0.8185, + "step": 1916 + }, + { + "epoch": 0.3, + "grad_norm": 22.95480920560591, + "learning_rate": 1.9745755632430496e-05, + "loss": 0.925, + "step": 1917 + }, + { + "epoch": 0.3, + "grad_norm": 21.165180219028983, + "learning_rate": 1.9745377640846523e-05, + "loss": 0.9045, + "step": 1918 + }, + { + "epoch": 0.3, + "grad_norm": 16.59620959701154, + "learning_rate": 1.974499937210971e-05, + "loss": 0.8652, + "step": 1919 + }, + { + "epoch": 0.3, + "grad_norm": 14.839764076434742, + "learning_rate": 1.974462082623082e-05, + "loss": 0.7006, + "step": 1920 + }, + { + "epoch": 0.3, + "grad_norm": 34.00717524065735, + "learning_rate": 1.9744242003220607e-05, + "loss": 0.9767, + "step": 1921 + }, + { + "epoch": 0.3, + "grad_norm": 38.31057661931976, + "learning_rate": 1.9743862903089857e-05, + "loss": 0.9409, + "step": 1922 + }, + { + "epoch": 0.3, + "grad_norm": 25.645945820431308, + "learning_rate": 1.9743483525849345e-05, + "loss": 0.8992, + "step": 1923 + }, + { + "epoch": 0.3, + "grad_norm": 20.038228969081157, + "learning_rate": 1.974310387150986e-05, + "loss": 0.8825, + "step": 1924 + }, + { + "epoch": 0.3, + "grad_norm": 22.159937852966443, + "learning_rate": 1.9742723940082203e-05, + "loss": 0.8062, + "step": 1925 + }, + { + "epoch": 0.3, + "grad_norm": 29.16880505023091, + "learning_rate": 1.9742343731577173e-05, + "loss": 0.8642, + "step": 1926 + }, + { + "epoch": 0.3, + "grad_norm": 15.390617242801474, + "learning_rate": 1.9741963246005592e-05, + "loss": 0.8687, + "step": 1927 + }, + { + "epoch": 0.3, + "grad_norm": 18.389926695388457, + "learning_rate": 1.974158248337827e-05, + "loss": 0.8399, + "step": 1928 + }, + { + "epoch": 0.3, + "grad_norm": 20.723959075932196, + "learning_rate": 1.9741201443706042e-05, + "loss": 0.7871, + "step": 1929 + }, + { + "epoch": 0.3, + "grad_norm": 17.139668398846528, + "learning_rate": 1.9740820126999743e-05, + "loss": 0.9488, + "step": 1930 + }, + { + "epoch": 0.3, + "grad_norm": 26.27604148544649, + "learning_rate": 1.9740438533270218e-05, + "loss": 0.8128, + "step": 1931 + }, + { + "epoch": 0.3, + "grad_norm": 17.72623610921065, + "learning_rate": 1.9740056662528318e-05, + "loss": 0.8657, + "step": 1932 + }, + { + "epoch": 0.3, + "grad_norm": 24.8832880342997, + "learning_rate": 1.9739674514784906e-05, + "loss": 0.8405, + "step": 1933 + }, + { + "epoch": 0.3, + "grad_norm": 29.337330406431807, + "learning_rate": 1.973929209005085e-05, + "loss": 0.918, + "step": 1934 + }, + { + "epoch": 0.3, + "grad_norm": 30.514234999501607, + "learning_rate": 1.9738909388337023e-05, + "loss": 0.9271, + "step": 1935 + }, + { + "epoch": 0.3, + "grad_norm": 16.198109752315794, + "learning_rate": 1.973852640965431e-05, + "loss": 0.7544, + "step": 1936 + }, + { + "epoch": 0.3, + "grad_norm": 29.291033052491546, + "learning_rate": 1.9738143154013603e-05, + "loss": 0.9277, + "step": 1937 + }, + { + "epoch": 0.3, + "grad_norm": 23.874831114790798, + "learning_rate": 1.9737759621425797e-05, + "loss": 0.8836, + "step": 1938 + }, + { + "epoch": 0.3, + "grad_norm": 17.85217318481541, + "learning_rate": 1.973737581190181e-05, + "loss": 0.8899, + "step": 1939 + }, + { + "epoch": 0.3, + "grad_norm": 17.54350282921674, + "learning_rate": 1.973699172545255e-05, + "loss": 0.7964, + "step": 1940 + }, + { + "epoch": 0.3, + "grad_norm": 26.825471069158436, + "learning_rate": 1.973660736208894e-05, + "loss": 0.9965, + "step": 1941 + }, + { + "epoch": 0.3, + "grad_norm": 30.974556993406168, + "learning_rate": 1.973622272182191e-05, + "loss": 0.9037, + "step": 1942 + }, + { + "epoch": 0.3, + "grad_norm": 22.06262056315065, + "learning_rate": 1.9735837804662404e-05, + "loss": 0.9677, + "step": 1943 + }, + { + "epoch": 0.3, + "grad_norm": 15.281524040440718, + "learning_rate": 1.9735452610621366e-05, + "loss": 0.7922, + "step": 1944 + }, + { + "epoch": 0.3, + "grad_norm": 23.790383033869862, + "learning_rate": 1.9735067139709753e-05, + "loss": 0.8615, + "step": 1945 + }, + { + "epoch": 0.3, + "grad_norm": 28.5259578759439, + "learning_rate": 1.973468139193852e-05, + "loss": 0.9008, + "step": 1946 + }, + { + "epoch": 0.3, + "grad_norm": 30.496352395834368, + "learning_rate": 1.9734295367318648e-05, + "loss": 0.9198, + "step": 1947 + }, + { + "epoch": 0.3, + "grad_norm": 15.299607119627156, + "learning_rate": 1.9733909065861107e-05, + "loss": 0.8284, + "step": 1948 + }, + { + "epoch": 0.3, + "grad_norm": 20.589582219757954, + "learning_rate": 1.9733522487576884e-05, + "loss": 0.8235, + "step": 1949 + }, + { + "epoch": 0.3, + "grad_norm": 25.360756131942562, + "learning_rate": 1.973313563247698e-05, + "loss": 0.8538, + "step": 1950 + }, + { + "epoch": 0.3, + "grad_norm": 23.774548061178905, + "learning_rate": 1.9732748500572395e-05, + "loss": 0.8256, + "step": 1951 + }, + { + "epoch": 0.3, + "grad_norm": 13.7938465135125, + "learning_rate": 1.973236109187413e-05, + "loss": 0.8148, + "step": 1952 + }, + { + "epoch": 0.31, + "grad_norm": 20.9068540534322, + "learning_rate": 1.973197340639321e-05, + "loss": 0.8526, + "step": 1953 + }, + { + "epoch": 0.31, + "grad_norm": 24.386475881564493, + "learning_rate": 1.973158544414066e-05, + "loss": 0.8164, + "step": 1954 + }, + { + "epoch": 0.31, + "grad_norm": 17.91021109918913, + "learning_rate": 1.9731197205127514e-05, + "loss": 0.7654, + "step": 1955 + }, + { + "epoch": 0.31, + "grad_norm": 15.011544735068918, + "learning_rate": 1.973080868936481e-05, + "loss": 0.8016, + "step": 1956 + }, + { + "epoch": 0.31, + "grad_norm": 16.43234175575187, + "learning_rate": 1.9730419896863603e-05, + "loss": 0.7588, + "step": 1957 + }, + { + "epoch": 0.31, + "grad_norm": 21.63898731879432, + "learning_rate": 1.9730030827634942e-05, + "loss": 0.9247, + "step": 1958 + }, + { + "epoch": 0.31, + "grad_norm": 20.99724500329837, + "learning_rate": 1.9729641481689897e-05, + "loss": 0.8635, + "step": 1959 + }, + { + "epoch": 0.31, + "grad_norm": 20.48537461833763, + "learning_rate": 1.972925185903954e-05, + "loss": 0.8497, + "step": 1960 + }, + { + "epoch": 0.31, + "grad_norm": 22.691299742929573, + "learning_rate": 1.9728861959694957e-05, + "loss": 0.8473, + "step": 1961 + }, + { + "epoch": 0.31, + "grad_norm": 17.4133431263369, + "learning_rate": 1.9728471783667226e-05, + "loss": 0.7965, + "step": 1962 + }, + { + "epoch": 0.31, + "grad_norm": 15.797636299579123, + "learning_rate": 1.9728081330967448e-05, + "loss": 0.8817, + "step": 1963 + }, + { + "epoch": 0.31, + "grad_norm": 21.551570678047835, + "learning_rate": 1.972769060160673e-05, + "loss": 0.9161, + "step": 1964 + }, + { + "epoch": 0.31, + "grad_norm": 24.364141629164624, + "learning_rate": 1.9727299595596178e-05, + "loss": 0.9401, + "step": 1965 + }, + { + "epoch": 0.31, + "grad_norm": 19.65152058387248, + "learning_rate": 1.9726908312946918e-05, + "loss": 0.7555, + "step": 1966 + }, + { + "epoch": 0.31, + "grad_norm": 19.519633299946516, + "learning_rate": 1.9726516753670078e-05, + "loss": 0.9234, + "step": 1967 + }, + { + "epoch": 0.31, + "grad_norm": 20.187621768784943, + "learning_rate": 1.972612491777679e-05, + "loss": 0.8401, + "step": 1968 + }, + { + "epoch": 0.31, + "grad_norm": 21.113322729939835, + "learning_rate": 1.9725732805278198e-05, + "loss": 0.8784, + "step": 1969 + }, + { + "epoch": 0.31, + "grad_norm": 24.121764926298493, + "learning_rate": 1.9725340416185456e-05, + "loss": 0.7902, + "step": 1970 + }, + { + "epoch": 0.31, + "grad_norm": 14.984586533215857, + "learning_rate": 1.9724947750509718e-05, + "loss": 0.8074, + "step": 1971 + }, + { + "epoch": 0.31, + "grad_norm": 19.058733451536096, + "learning_rate": 1.9724554808262157e-05, + "loss": 0.9017, + "step": 1972 + }, + { + "epoch": 0.31, + "grad_norm": 32.59790955222826, + "learning_rate": 1.9724161589453948e-05, + "loss": 1.061, + "step": 1973 + }, + { + "epoch": 0.31, + "grad_norm": 20.22872112332218, + "learning_rate": 1.9723768094096268e-05, + "loss": 0.8682, + "step": 1974 + }, + { + "epoch": 0.31, + "grad_norm": 4.163242602012352, + "learning_rate": 1.9723374322200315e-05, + "loss": 0.8198, + "step": 1975 + }, + { + "epoch": 0.31, + "grad_norm": 21.033946309405977, + "learning_rate": 1.9722980273777284e-05, + "loss": 0.7993, + "step": 1976 + }, + { + "epoch": 0.31, + "grad_norm": 28.124299212043177, + "learning_rate": 1.9722585948838378e-05, + "loss": 0.7894, + "step": 1977 + }, + { + "epoch": 0.31, + "grad_norm": 22.55154649185102, + "learning_rate": 1.972219134739482e-05, + "loss": 0.8277, + "step": 1978 + }, + { + "epoch": 0.31, + "grad_norm": 21.439044298984843, + "learning_rate": 1.9721796469457827e-05, + "loss": 0.8154, + "step": 1979 + }, + { + "epoch": 0.31, + "grad_norm": 24.263510828712764, + "learning_rate": 1.9721401315038628e-05, + "loss": 0.867, + "step": 1980 + }, + { + "epoch": 0.31, + "grad_norm": 22.413829884031742, + "learning_rate": 1.972100588414846e-05, + "loss": 0.8369, + "step": 1981 + }, + { + "epoch": 0.31, + "grad_norm": 21.421277764785366, + "learning_rate": 1.9720610176798574e-05, + "loss": 0.9176, + "step": 1982 + }, + { + "epoch": 0.31, + "grad_norm": 16.591302858287282, + "learning_rate": 1.972021419300022e-05, + "loss": 0.8975, + "step": 1983 + }, + { + "epoch": 0.31, + "grad_norm": 17.44606981628151, + "learning_rate": 1.9719817932764662e-05, + "loss": 0.7829, + "step": 1984 + }, + { + "epoch": 0.31, + "grad_norm": 25.60036458020331, + "learning_rate": 1.9719421396103167e-05, + "loss": 0.7589, + "step": 1985 + }, + { + "epoch": 0.31, + "grad_norm": 19.488065013806025, + "learning_rate": 1.9719024583027012e-05, + "loss": 0.9107, + "step": 1986 + }, + { + "epoch": 0.31, + "grad_norm": 19.800749859434116, + "learning_rate": 1.9718627493547486e-05, + "loss": 0.744, + "step": 1987 + }, + { + "epoch": 0.31, + "grad_norm": 19.87122036874998, + "learning_rate": 1.9718230127675877e-05, + "loss": 0.8792, + "step": 1988 + }, + { + "epoch": 0.31, + "grad_norm": 11.451930796982744, + "learning_rate": 1.971783248542349e-05, + "loss": 0.7478, + "step": 1989 + }, + { + "epoch": 0.31, + "grad_norm": 23.369709856981036, + "learning_rate": 1.9717434566801633e-05, + "loss": 0.9079, + "step": 1990 + }, + { + "epoch": 0.31, + "grad_norm": 10.6883050569356, + "learning_rate": 1.9717036371821615e-05, + "loss": 0.8945, + "step": 1991 + }, + { + "epoch": 0.31, + "grad_norm": 18.590627946723554, + "learning_rate": 1.9716637900494775e-05, + "loss": 0.739, + "step": 1992 + }, + { + "epoch": 0.31, + "grad_norm": 32.17525068408096, + "learning_rate": 1.9716239152832434e-05, + "loss": 0.9047, + "step": 1993 + }, + { + "epoch": 0.31, + "grad_norm": 18.223546792008175, + "learning_rate": 1.9715840128845934e-05, + "loss": 0.746, + "step": 1994 + }, + { + "epoch": 0.31, + "grad_norm": 26.474822254232627, + "learning_rate": 1.9715440828546626e-05, + "loss": 0.9029, + "step": 1995 + }, + { + "epoch": 0.31, + "grad_norm": 12.990517024875519, + "learning_rate": 1.9715041251945863e-05, + "loss": 0.7252, + "step": 1996 + }, + { + "epoch": 0.31, + "grad_norm": 23.633635011972196, + "learning_rate": 1.9714641399055007e-05, + "loss": 0.8276, + "step": 1997 + }, + { + "epoch": 0.31, + "grad_norm": 30.676383652935883, + "learning_rate": 1.9714241269885436e-05, + "loss": 0.9074, + "step": 1998 + }, + { + "epoch": 0.31, + "grad_norm": 18.616173736210115, + "learning_rate": 1.9713840864448528e-05, + "loss": 0.8446, + "step": 1999 + }, + { + "epoch": 0.31, + "grad_norm": 18.878151480522458, + "learning_rate": 1.9713440182755665e-05, + "loss": 0.8643, + "step": 2000 + }, + { + "epoch": 0.31, + "grad_norm": 17.67374095374624, + "learning_rate": 1.9713039224818247e-05, + "loss": 0.8124, + "step": 2001 + }, + { + "epoch": 0.31, + "grad_norm": 19.68957343006568, + "learning_rate": 1.9712637990647674e-05, + "loss": 0.8725, + "step": 2002 + }, + { + "epoch": 0.31, + "grad_norm": 16.224064555261798, + "learning_rate": 1.971223648025536e-05, + "loss": 0.8174, + "step": 2003 + }, + { + "epoch": 0.31, + "grad_norm": 18.320630612895478, + "learning_rate": 1.9711834693652722e-05, + "loss": 0.8064, + "step": 2004 + }, + { + "epoch": 0.31, + "grad_norm": 24.548316005124242, + "learning_rate": 1.9711432630851182e-05, + "loss": 0.8951, + "step": 2005 + }, + { + "epoch": 0.31, + "grad_norm": 22.668361165424816, + "learning_rate": 1.9711030291862187e-05, + "loss": 0.8711, + "step": 2006 + }, + { + "epoch": 0.31, + "grad_norm": 16.07097755932726, + "learning_rate": 1.9710627676697167e-05, + "loss": 0.8192, + "step": 2007 + }, + { + "epoch": 0.31, + "grad_norm": 20.01017858545481, + "learning_rate": 1.9710224785367575e-05, + "loss": 0.8742, + "step": 2008 + }, + { + "epoch": 0.31, + "grad_norm": 17.438593319146708, + "learning_rate": 1.9709821617884873e-05, + "loss": 0.9124, + "step": 2009 + }, + { + "epoch": 0.31, + "grad_norm": 20.11446710507202, + "learning_rate": 1.9709418174260523e-05, + "loss": 0.8485, + "step": 2010 + }, + { + "epoch": 0.31, + "grad_norm": 20.509390048068454, + "learning_rate": 1.9709014454506e-05, + "loss": 0.8336, + "step": 2011 + }, + { + "epoch": 0.31, + "grad_norm": 16.8540006601597, + "learning_rate": 1.9708610458632783e-05, + "loss": 0.9212, + "step": 2012 + }, + { + "epoch": 0.31, + "grad_norm": 17.426905372360444, + "learning_rate": 1.970820618665237e-05, + "loss": 0.8895, + "step": 2013 + }, + { + "epoch": 0.31, + "grad_norm": 21.478918339061252, + "learning_rate": 1.9707801638576246e-05, + "loss": 0.8906, + "step": 2014 + }, + { + "epoch": 0.31, + "grad_norm": 22.198459180637528, + "learning_rate": 1.9707396814415927e-05, + "loss": 0.8595, + "step": 2015 + }, + { + "epoch": 0.31, + "grad_norm": 23.220622442841528, + "learning_rate": 1.970699171418292e-05, + "loss": 0.8944, + "step": 2016 + }, + { + "epoch": 0.32, + "grad_norm": 19.090429001609277, + "learning_rate": 1.9706586337888744e-05, + "loss": 0.9746, + "step": 2017 + }, + { + "epoch": 0.32, + "grad_norm": 37.80171079558046, + "learning_rate": 1.9706180685544933e-05, + "loss": 0.9084, + "step": 2018 + }, + { + "epoch": 0.32, + "grad_norm": 23.693300957325114, + "learning_rate": 1.9705774757163022e-05, + "loss": 0.7714, + "step": 2019 + }, + { + "epoch": 0.32, + "grad_norm": 36.16578761922622, + "learning_rate": 1.9705368552754554e-05, + "loss": 0.9715, + "step": 2020 + }, + { + "epoch": 0.32, + "grad_norm": 30.37952921600066, + "learning_rate": 1.970496207233108e-05, + "loss": 1.0402, + "step": 2021 + }, + { + "epoch": 0.32, + "grad_norm": 13.97949704174473, + "learning_rate": 1.9704555315904164e-05, + "loss": 0.7923, + "step": 2022 + }, + { + "epoch": 0.32, + "grad_norm": 23.425513981446954, + "learning_rate": 1.9704148283485374e-05, + "loss": 0.8088, + "step": 2023 + }, + { + "epoch": 0.32, + "grad_norm": 17.737425028472487, + "learning_rate": 1.9703740975086282e-05, + "loss": 0.9466, + "step": 2024 + }, + { + "epoch": 0.32, + "grad_norm": 15.688795316457627, + "learning_rate": 1.970333339071847e-05, + "loss": 0.788, + "step": 2025 + }, + { + "epoch": 0.32, + "grad_norm": 18.67685110749523, + "learning_rate": 1.9702925530393535e-05, + "loss": 0.8963, + "step": 2026 + }, + { + "epoch": 0.32, + "grad_norm": 20.37720486713858, + "learning_rate": 1.970251739412307e-05, + "loss": 0.8601, + "step": 2027 + }, + { + "epoch": 0.32, + "grad_norm": 21.424242169193125, + "learning_rate": 1.9702108981918693e-05, + "loss": 0.8474, + "step": 2028 + }, + { + "epoch": 0.32, + "grad_norm": 28.64494758115085, + "learning_rate": 1.970170029379201e-05, + "loss": 0.9745, + "step": 2029 + }, + { + "epoch": 0.32, + "grad_norm": 16.170713737276607, + "learning_rate": 1.9701291329754645e-05, + "loss": 0.7781, + "step": 2030 + }, + { + "epoch": 0.32, + "grad_norm": 26.473651958448663, + "learning_rate": 1.970088208981823e-05, + "loss": 0.8505, + "step": 2031 + }, + { + "epoch": 0.32, + "grad_norm": 25.099027657055245, + "learning_rate": 1.9700472573994403e-05, + "loss": 0.8165, + "step": 2032 + }, + { + "epoch": 0.32, + "grad_norm": 16.389292650100625, + "learning_rate": 1.970006278229481e-05, + "loss": 0.8138, + "step": 2033 + }, + { + "epoch": 0.32, + "grad_norm": 28.028228690501816, + "learning_rate": 1.9699652714731106e-05, + "loss": 0.8045, + "step": 2034 + }, + { + "epoch": 0.32, + "grad_norm": 22.785452550579784, + "learning_rate": 1.9699242371314952e-05, + "loss": 0.8292, + "step": 2035 + }, + { + "epoch": 0.32, + "grad_norm": 19.201463608848353, + "learning_rate": 1.9698831752058023e-05, + "loss": 0.863, + "step": 2036 + }, + { + "epoch": 0.32, + "grad_norm": 18.340812277280527, + "learning_rate": 1.969842085697199e-05, + "loss": 0.938, + "step": 2037 + }, + { + "epoch": 0.32, + "grad_norm": 17.485254576668932, + "learning_rate": 1.969800968606854e-05, + "loss": 0.8792, + "step": 2038 + }, + { + "epoch": 0.32, + "grad_norm": 23.50361377495418, + "learning_rate": 1.9697598239359368e-05, + "loss": 0.9366, + "step": 2039 + }, + { + "epoch": 0.32, + "grad_norm": 28.004593364380916, + "learning_rate": 1.9697186516856177e-05, + "loss": 0.8729, + "step": 2040 + }, + { + "epoch": 0.32, + "grad_norm": 18.27001093393149, + "learning_rate": 1.9696774518570673e-05, + "loss": 0.81, + "step": 2041 + }, + { + "epoch": 0.32, + "grad_norm": 20.275109917781354, + "learning_rate": 1.9696362244514576e-05, + "loss": 0.8853, + "step": 2042 + }, + { + "epoch": 0.32, + "grad_norm": 15.747320829069553, + "learning_rate": 1.9695949694699604e-05, + "loss": 0.8787, + "step": 2043 + }, + { + "epoch": 0.32, + "grad_norm": 18.324589701361806, + "learning_rate": 1.96955368691375e-05, + "loss": 0.9621, + "step": 2044 + }, + { + "epoch": 0.32, + "grad_norm": 12.62062350748662, + "learning_rate": 1.9695123767839995e-05, + "loss": 0.7259, + "step": 2045 + }, + { + "epoch": 0.32, + "grad_norm": 15.868806449305948, + "learning_rate": 1.9694710390818844e-05, + "loss": 0.8744, + "step": 2046 + }, + { + "epoch": 0.32, + "grad_norm": 20.289745679185543, + "learning_rate": 1.9694296738085802e-05, + "loss": 0.8401, + "step": 2047 + }, + { + "epoch": 0.32, + "grad_norm": 14.319830790116374, + "learning_rate": 1.9693882809652626e-05, + "loss": 0.8339, + "step": 2048 + }, + { + "epoch": 0.32, + "grad_norm": 19.35813448628382, + "learning_rate": 1.96934686055311e-05, + "loss": 0.8886, + "step": 2049 + }, + { + "epoch": 0.32, + "grad_norm": 28.498450702459422, + "learning_rate": 1.9693054125732995e-05, + "loss": 0.9545, + "step": 2050 + }, + { + "epoch": 0.32, + "grad_norm": 23.41585665636211, + "learning_rate": 1.96926393702701e-05, + "loss": 0.7897, + "step": 2051 + }, + { + "epoch": 0.32, + "grad_norm": 20.60940300575881, + "learning_rate": 1.9692224339154215e-05, + "loss": 0.8198, + "step": 2052 + }, + { + "epoch": 0.32, + "grad_norm": 13.23249166491358, + "learning_rate": 1.9691809032397135e-05, + "loss": 0.7522, + "step": 2053 + }, + { + "epoch": 0.32, + "grad_norm": 15.991811089588687, + "learning_rate": 1.9691393450010675e-05, + "loss": 0.8621, + "step": 2054 + }, + { + "epoch": 0.32, + "grad_norm": 26.00682109812143, + "learning_rate": 1.969097759200666e-05, + "loss": 0.9021, + "step": 2055 + }, + { + "epoch": 0.32, + "grad_norm": 14.863507928510778, + "learning_rate": 1.969056145839691e-05, + "loss": 0.8484, + "step": 2056 + }, + { + "epoch": 0.32, + "grad_norm": 28.320363565333135, + "learning_rate": 1.9690145049193257e-05, + "loss": 0.8124, + "step": 2057 + }, + { + "epoch": 0.32, + "grad_norm": 24.337468244865462, + "learning_rate": 1.968972836440755e-05, + "loss": 1.0166, + "step": 2058 + }, + { + "epoch": 0.32, + "grad_norm": 23.886848211054595, + "learning_rate": 1.9689311404051638e-05, + "loss": 0.9355, + "step": 2059 + }, + { + "epoch": 0.32, + "grad_norm": 20.150662392893672, + "learning_rate": 1.9688894168137372e-05, + "loss": 0.8225, + "step": 2060 + }, + { + "epoch": 0.32, + "grad_norm": 16.100422275118852, + "learning_rate": 1.9688476656676628e-05, + "loss": 0.8357, + "step": 2061 + }, + { + "epoch": 0.32, + "grad_norm": 15.912121041966605, + "learning_rate": 1.9688058869681277e-05, + "loss": 0.7751, + "step": 2062 + }, + { + "epoch": 0.32, + "grad_norm": 22.385547289120343, + "learning_rate": 1.96876408071632e-05, + "loss": 0.9013, + "step": 2063 + }, + { + "epoch": 0.32, + "grad_norm": 20.674005406574164, + "learning_rate": 1.968722246913428e-05, + "loss": 0.8803, + "step": 2064 + }, + { + "epoch": 0.32, + "grad_norm": 14.651857143774105, + "learning_rate": 1.9686803855606422e-05, + "loss": 0.8493, + "step": 2065 + }, + { + "epoch": 0.32, + "grad_norm": 20.83345569066038, + "learning_rate": 1.968638496659153e-05, + "loss": 0.8327, + "step": 2066 + }, + { + "epoch": 0.32, + "grad_norm": 14.981675972967828, + "learning_rate": 1.9685965802101517e-05, + "loss": 0.8561, + "step": 2067 + }, + { + "epoch": 0.32, + "grad_norm": 20.980884605410015, + "learning_rate": 1.96855463621483e-05, + "loss": 1.0125, + "step": 2068 + }, + { + "epoch": 0.32, + "grad_norm": 16.170432441982427, + "learning_rate": 1.9685126646743814e-05, + "loss": 0.7933, + "step": 2069 + }, + { + "epoch": 0.32, + "grad_norm": 25.31564892738416, + "learning_rate": 1.9684706655899988e-05, + "loss": 1.0028, + "step": 2070 + }, + { + "epoch": 0.32, + "grad_norm": 27.402884714954798, + "learning_rate": 1.968428638962877e-05, + "loss": 0.8449, + "step": 2071 + }, + { + "epoch": 0.32, + "grad_norm": 20.701878523820394, + "learning_rate": 1.9683865847942116e-05, + "loss": 0.8012, + "step": 2072 + }, + { + "epoch": 0.32, + "grad_norm": 19.698104536417045, + "learning_rate": 1.968344503085198e-05, + "loss": 0.9933, + "step": 2073 + }, + { + "epoch": 0.32, + "grad_norm": 21.286533086458956, + "learning_rate": 1.968302393837033e-05, + "loss": 0.8337, + "step": 2074 + }, + { + "epoch": 0.32, + "grad_norm": 17.971950636545188, + "learning_rate": 1.9682602570509147e-05, + "loss": 0.8593, + "step": 2075 + }, + { + "epoch": 0.32, + "grad_norm": 17.314689908288504, + "learning_rate": 1.968218092728041e-05, + "loss": 0.9049, + "step": 2076 + }, + { + "epoch": 0.32, + "grad_norm": 22.8035843946017, + "learning_rate": 1.968175900869611e-05, + "loss": 0.8038, + "step": 2077 + }, + { + "epoch": 0.32, + "grad_norm": 23.762809406054114, + "learning_rate": 1.968133681476825e-05, + "loss": 0.9007, + "step": 2078 + }, + { + "epoch": 0.32, + "grad_norm": 18.792393175444175, + "learning_rate": 1.9680914345508834e-05, + "loss": 0.8621, + "step": 2079 + }, + { + "epoch": 0.32, + "grad_norm": 15.112575483961884, + "learning_rate": 1.9680491600929874e-05, + "loss": 0.9009, + "step": 2080 + }, + { + "epoch": 0.33, + "grad_norm": 18.51078749449631, + "learning_rate": 1.9680068581043396e-05, + "loss": 0.8398, + "step": 2081 + }, + { + "epoch": 0.33, + "grad_norm": 20.781832085251615, + "learning_rate": 1.9679645285861433e-05, + "loss": 0.8318, + "step": 2082 + }, + { + "epoch": 0.33, + "grad_norm": 17.82291140287787, + "learning_rate": 1.9679221715396018e-05, + "loss": 0.8048, + "step": 2083 + }, + { + "epoch": 0.33, + "grad_norm": 32.944377575626525, + "learning_rate": 1.9678797869659204e-05, + "loss": 0.9733, + "step": 2084 + }, + { + "epoch": 0.33, + "grad_norm": 15.676625373293733, + "learning_rate": 1.9678373748663037e-05, + "loss": 0.8363, + "step": 2085 + }, + { + "epoch": 0.33, + "grad_norm": 30.685482164690686, + "learning_rate": 1.967794935241958e-05, + "loss": 0.95, + "step": 2086 + }, + { + "epoch": 0.33, + "grad_norm": 21.79509052642431, + "learning_rate": 1.967752468094091e-05, + "loss": 0.8782, + "step": 2087 + }, + { + "epoch": 0.33, + "grad_norm": 23.101179041048873, + "learning_rate": 1.9677099734239098e-05, + "loss": 0.9321, + "step": 2088 + }, + { + "epoch": 0.33, + "grad_norm": 23.373758789247308, + "learning_rate": 1.967667451232623e-05, + "loss": 0.8857, + "step": 2089 + }, + { + "epoch": 0.33, + "grad_norm": 15.729766105341012, + "learning_rate": 1.9676249015214396e-05, + "loss": 0.7793, + "step": 2090 + }, + { + "epoch": 0.33, + "grad_norm": 45.513667037997, + "learning_rate": 1.9675823242915705e-05, + "loss": 0.8488, + "step": 2091 + }, + { + "epoch": 0.33, + "grad_norm": 16.170709642632474, + "learning_rate": 1.967539719544226e-05, + "loss": 0.8632, + "step": 2092 + }, + { + "epoch": 0.33, + "grad_norm": 23.873762529006765, + "learning_rate": 1.967497087280618e-05, + "loss": 0.8697, + "step": 2093 + }, + { + "epoch": 0.33, + "grad_norm": 15.146375161618987, + "learning_rate": 1.9674544275019583e-05, + "loss": 0.8427, + "step": 2094 + }, + { + "epoch": 0.33, + "grad_norm": 18.39465103304834, + "learning_rate": 1.9674117402094612e-05, + "loss": 0.7769, + "step": 2095 + }, + { + "epoch": 0.33, + "grad_norm": 30.994700505673816, + "learning_rate": 1.96736902540434e-05, + "loss": 0.8662, + "step": 2096 + }, + { + "epoch": 0.33, + "grad_norm": 17.297744865219787, + "learning_rate": 1.9673262830878094e-05, + "loss": 0.8444, + "step": 2097 + }, + { + "epoch": 0.33, + "grad_norm": 18.50302667844549, + "learning_rate": 1.9672835132610856e-05, + "loss": 0.8278, + "step": 2098 + }, + { + "epoch": 0.33, + "grad_norm": 20.231258831369892, + "learning_rate": 1.967240715925384e-05, + "loss": 0.9185, + "step": 2099 + }, + { + "epoch": 0.33, + "grad_norm": 17.586851130579518, + "learning_rate": 1.9671978910819225e-05, + "loss": 0.8729, + "step": 2100 + }, + { + "epoch": 0.33, + "grad_norm": 17.596507171474503, + "learning_rate": 1.9671550387319188e-05, + "loss": 0.7587, + "step": 2101 + }, + { + "epoch": 0.33, + "grad_norm": 21.54986398351388, + "learning_rate": 1.9671121588765913e-05, + "loss": 0.8134, + "step": 2102 + }, + { + "epoch": 0.33, + "grad_norm": 21.44040746963521, + "learning_rate": 1.96706925151716e-05, + "loss": 0.9036, + "step": 2103 + }, + { + "epoch": 0.33, + "grad_norm": 19.642517391678766, + "learning_rate": 1.967026316654845e-05, + "loss": 0.9083, + "step": 2104 + }, + { + "epoch": 0.33, + "grad_norm": 25.546774618435716, + "learning_rate": 1.966983354290867e-05, + "loss": 0.8881, + "step": 2105 + }, + { + "epoch": 0.33, + "grad_norm": 12.580669462025064, + "learning_rate": 1.9669403644264485e-05, + "loss": 0.8202, + "step": 2106 + }, + { + "epoch": 0.33, + "grad_norm": 3.9494752545919876, + "learning_rate": 1.966897347062811e-05, + "loss": 0.8271, + "step": 2107 + }, + { + "epoch": 0.33, + "grad_norm": 28.918628136324678, + "learning_rate": 1.9668543022011792e-05, + "loss": 0.8817, + "step": 2108 + }, + { + "epoch": 0.33, + "grad_norm": 30.775240789369356, + "learning_rate": 1.9668112298427764e-05, + "loss": 0.9015, + "step": 2109 + }, + { + "epoch": 0.33, + "grad_norm": 19.737897706509454, + "learning_rate": 1.9667681299888277e-05, + "loss": 0.9715, + "step": 2110 + }, + { + "epoch": 0.33, + "grad_norm": 23.843163249344535, + "learning_rate": 1.966725002640559e-05, + "loss": 0.8514, + "step": 2111 + }, + { + "epoch": 0.33, + "grad_norm": 29.238157946085167, + "learning_rate": 1.9666818477991967e-05, + "loss": 0.8035, + "step": 2112 + }, + { + "epoch": 0.33, + "grad_norm": 25.25716850174795, + "learning_rate": 1.9666386654659683e-05, + "loss": 0.7557, + "step": 2113 + }, + { + "epoch": 0.33, + "grad_norm": 23.080429387928188, + "learning_rate": 1.9665954556421014e-05, + "loss": 0.8524, + "step": 2114 + }, + { + "epoch": 0.33, + "grad_norm": 23.46195452223196, + "learning_rate": 1.9665522183288253e-05, + "loss": 0.9517, + "step": 2115 + }, + { + "epoch": 0.33, + "grad_norm": 14.22997900690526, + "learning_rate": 1.9665089535273698e-05, + "loss": 0.7616, + "step": 2116 + }, + { + "epoch": 0.33, + "grad_norm": 14.71597199941219, + "learning_rate": 1.966465661238965e-05, + "loss": 0.7954, + "step": 2117 + }, + { + "epoch": 0.33, + "grad_norm": 16.747367255209674, + "learning_rate": 1.966422341464842e-05, + "loss": 0.819, + "step": 2118 + }, + { + "epoch": 0.33, + "grad_norm": 25.337880005125236, + "learning_rate": 1.966378994206233e-05, + "loss": 0.8782, + "step": 2119 + }, + { + "epoch": 0.33, + "grad_norm": 21.6161545450506, + "learning_rate": 1.9663356194643704e-05, + "loss": 0.7597, + "step": 2120 + }, + { + "epoch": 0.33, + "grad_norm": 18.478990774292374, + "learning_rate": 1.9662922172404887e-05, + "loss": 0.8034, + "step": 2121 + }, + { + "epoch": 0.33, + "grad_norm": 21.93094670261835, + "learning_rate": 1.966248787535821e-05, + "loss": 0.8329, + "step": 2122 + }, + { + "epoch": 0.33, + "grad_norm": 18.203284656659473, + "learning_rate": 1.9662053303516036e-05, + "loss": 0.9106, + "step": 2123 + }, + { + "epoch": 0.33, + "grad_norm": 18.648103820474752, + "learning_rate": 1.9661618456890714e-05, + "loss": 0.7647, + "step": 2124 + }, + { + "epoch": 0.33, + "grad_norm": 16.951290720050416, + "learning_rate": 1.9661183335494617e-05, + "loss": 0.8088, + "step": 2125 + }, + { + "epoch": 0.33, + "grad_norm": 20.791425707541396, + "learning_rate": 1.9660747939340116e-05, + "loss": 0.8276, + "step": 2126 + }, + { + "epoch": 0.33, + "grad_norm": 17.346590899726824, + "learning_rate": 1.9660312268439593e-05, + "loss": 0.8467, + "step": 2127 + }, + { + "epoch": 0.33, + "grad_norm": 31.015011462333717, + "learning_rate": 1.9659876322805448e-05, + "loss": 0.852, + "step": 2128 + }, + { + "epoch": 0.33, + "grad_norm": 20.85371623488054, + "learning_rate": 1.965944010245006e-05, + "loss": 0.7844, + "step": 2129 + }, + { + "epoch": 0.33, + "grad_norm": 20.3166608686285, + "learning_rate": 1.9659003607385857e-05, + "loss": 0.731, + "step": 2130 + }, + { + "epoch": 0.33, + "grad_norm": 18.230175135283112, + "learning_rate": 1.9658566837625236e-05, + "loss": 0.8378, + "step": 2131 + }, + { + "epoch": 0.33, + "grad_norm": 20.268960548845886, + "learning_rate": 1.9658129793180625e-05, + "loss": 0.8499, + "step": 2132 + }, + { + "epoch": 0.33, + "grad_norm": 21.393021354586935, + "learning_rate": 1.965769247406445e-05, + "loss": 0.8268, + "step": 2133 + }, + { + "epoch": 0.33, + "grad_norm": 24.690563458490438, + "learning_rate": 1.9657254880289155e-05, + "loss": 0.8692, + "step": 2134 + }, + { + "epoch": 0.33, + "grad_norm": 18.730910040437305, + "learning_rate": 1.965681701186718e-05, + "loss": 0.8778, + "step": 2135 + }, + { + "epoch": 0.33, + "grad_norm": 34.76191543504597, + "learning_rate": 1.9656378868810978e-05, + "loss": 0.9182, + "step": 2136 + }, + { + "epoch": 0.33, + "grad_norm": 22.923119795595042, + "learning_rate": 1.965594045113301e-05, + "loss": 0.8778, + "step": 2137 + }, + { + "epoch": 0.33, + "grad_norm": 35.14006837969686, + "learning_rate": 1.965550175884574e-05, + "loss": 0.9905, + "step": 2138 + }, + { + "epoch": 0.33, + "grad_norm": 20.81176056135414, + "learning_rate": 1.9655062791961654e-05, + "loss": 0.9221, + "step": 2139 + }, + { + "epoch": 0.33, + "grad_norm": 12.778697089127505, + "learning_rate": 1.9654623550493227e-05, + "loss": 0.7828, + "step": 2140 + }, + { + "epoch": 0.33, + "grad_norm": 13.075896231015909, + "learning_rate": 1.9654184034452955e-05, + "loss": 0.7573, + "step": 2141 + }, + { + "epoch": 0.33, + "grad_norm": 18.250945541957204, + "learning_rate": 1.9653744243853333e-05, + "loss": 0.7862, + "step": 2142 + }, + { + "epoch": 0.33, + "grad_norm": 17.914711142341105, + "learning_rate": 1.9653304178706877e-05, + "loss": 0.8773, + "step": 2143 + }, + { + "epoch": 0.33, + "grad_norm": 13.355644536765228, + "learning_rate": 1.96528638390261e-05, + "loss": 0.8611, + "step": 2144 + }, + { + "epoch": 0.34, + "grad_norm": 19.78372677028951, + "learning_rate": 1.9652423224823515e-05, + "loss": 0.8258, + "step": 2145 + }, + { + "epoch": 0.34, + "grad_norm": 13.400282524238868, + "learning_rate": 1.965198233611166e-05, + "loss": 0.8081, + "step": 2146 + }, + { + "epoch": 0.34, + "grad_norm": 22.44229291155315, + "learning_rate": 1.9651541172903076e-05, + "loss": 0.8738, + "step": 2147 + }, + { + "epoch": 0.34, + "grad_norm": 17.966595726243217, + "learning_rate": 1.965109973521031e-05, + "loss": 0.8329, + "step": 2148 + }, + { + "epoch": 0.34, + "grad_norm": 18.382324163627537, + "learning_rate": 1.965065802304591e-05, + "loss": 0.7689, + "step": 2149 + }, + { + "epoch": 0.34, + "grad_norm": 18.824299573279532, + "learning_rate": 1.965021603642244e-05, + "loss": 0.8131, + "step": 2150 + }, + { + "epoch": 0.34, + "grad_norm": 18.696418335757354, + "learning_rate": 1.9649773775352475e-05, + "loss": 0.7632, + "step": 2151 + }, + { + "epoch": 0.34, + "grad_norm": 22.731280950027507, + "learning_rate": 1.9649331239848587e-05, + "loss": 0.8744, + "step": 2152 + }, + { + "epoch": 0.34, + "grad_norm": 14.206521976165025, + "learning_rate": 1.9648888429923364e-05, + "loss": 0.7901, + "step": 2153 + }, + { + "epoch": 0.34, + "grad_norm": 16.19688452062193, + "learning_rate": 1.9648445345589395e-05, + "loss": 0.8523, + "step": 2154 + }, + { + "epoch": 0.34, + "grad_norm": 14.333005006512693, + "learning_rate": 1.9648001986859288e-05, + "loss": 0.7507, + "step": 2155 + }, + { + "epoch": 0.34, + "grad_norm": 21.647414526418764, + "learning_rate": 1.9647558353745645e-05, + "loss": 0.8648, + "step": 2156 + }, + { + "epoch": 0.34, + "grad_norm": 25.029609753829504, + "learning_rate": 1.964711444626109e-05, + "loss": 0.8985, + "step": 2157 + }, + { + "epoch": 0.34, + "grad_norm": 35.82979041602923, + "learning_rate": 1.9646670264418242e-05, + "loss": 0.8636, + "step": 2158 + }, + { + "epoch": 0.34, + "grad_norm": 14.343582252070068, + "learning_rate": 1.9646225808229734e-05, + "loss": 0.7577, + "step": 2159 + }, + { + "epoch": 0.34, + "grad_norm": 18.348562087888574, + "learning_rate": 1.9645781077708208e-05, + "loss": 0.7974, + "step": 2160 + }, + { + "epoch": 0.34, + "grad_norm": 21.810396517951595, + "learning_rate": 1.9645336072866312e-05, + "loss": 0.8364, + "step": 2161 + }, + { + "epoch": 0.34, + "grad_norm": 17.723141038253587, + "learning_rate": 1.9644890793716697e-05, + "loss": 0.9236, + "step": 2162 + }, + { + "epoch": 0.34, + "grad_norm": 25.359374407699494, + "learning_rate": 1.9644445240272033e-05, + "loss": 0.9071, + "step": 2163 + }, + { + "epoch": 0.34, + "grad_norm": 36.453060436232484, + "learning_rate": 1.964399941254499e-05, + "loss": 0.87, + "step": 2164 + }, + { + "epoch": 0.34, + "grad_norm": 19.467608515584082, + "learning_rate": 1.964355331054824e-05, + "loss": 0.9243, + "step": 2165 + }, + { + "epoch": 0.34, + "grad_norm": 19.248844364392504, + "learning_rate": 1.9643106934294476e-05, + "loss": 0.8163, + "step": 2166 + }, + { + "epoch": 0.34, + "grad_norm": 19.122933555051425, + "learning_rate": 1.9642660283796395e-05, + "loss": 0.952, + "step": 2167 + }, + { + "epoch": 0.34, + "grad_norm": 21.93321172529305, + "learning_rate": 1.9642213359066694e-05, + "loss": 0.8702, + "step": 2168 + }, + { + "epoch": 0.34, + "grad_norm": 22.994449727132253, + "learning_rate": 1.9641766160118086e-05, + "loss": 1.0528, + "step": 2169 + }, + { + "epoch": 0.34, + "grad_norm": 29.239068988388972, + "learning_rate": 1.964131868696329e-05, + "loss": 0.8495, + "step": 2170 + }, + { + "epoch": 0.34, + "grad_norm": 28.44738303728948, + "learning_rate": 1.964087093961503e-05, + "loss": 0.9238, + "step": 2171 + }, + { + "epoch": 0.34, + "grad_norm": 26.948207012360225, + "learning_rate": 1.9640422918086038e-05, + "loss": 0.8579, + "step": 2172 + }, + { + "epoch": 0.34, + "grad_norm": 20.89967049998885, + "learning_rate": 1.963997462238906e-05, + "loss": 0.7927, + "step": 2173 + }, + { + "epoch": 0.34, + "grad_norm": 17.8201842213537, + "learning_rate": 1.963952605253684e-05, + "loss": 0.7746, + "step": 2174 + }, + { + "epoch": 0.34, + "grad_norm": 19.545274448273872, + "learning_rate": 1.963907720854214e-05, + "loss": 0.9211, + "step": 2175 + }, + { + "epoch": 0.34, + "grad_norm": 23.93813130975505, + "learning_rate": 1.9638628090417727e-05, + "loss": 0.8126, + "step": 2176 + }, + { + "epoch": 0.34, + "grad_norm": 4.84867541245477, + "learning_rate": 1.9638178698176364e-05, + "loss": 0.8616, + "step": 2177 + }, + { + "epoch": 0.34, + "grad_norm": 21.82145434641559, + "learning_rate": 1.963772903183084e-05, + "loss": 0.844, + "step": 2178 + }, + { + "epoch": 0.34, + "grad_norm": 23.43618542481379, + "learning_rate": 1.963727909139394e-05, + "loss": 0.782, + "step": 2179 + }, + { + "epoch": 0.34, + "grad_norm": 22.283206081398543, + "learning_rate": 1.9636828876878456e-05, + "loss": 0.8548, + "step": 2180 + }, + { + "epoch": 0.34, + "grad_norm": 16.47930205510977, + "learning_rate": 1.96363783882972e-05, + "loss": 0.7673, + "step": 2181 + }, + { + "epoch": 0.34, + "grad_norm": 24.63215972502464, + "learning_rate": 1.963592762566298e-05, + "loss": 0.8863, + "step": 2182 + }, + { + "epoch": 0.34, + "grad_norm": 27.53223359410934, + "learning_rate": 1.9635476588988615e-05, + "loss": 0.8786, + "step": 2183 + }, + { + "epoch": 0.34, + "grad_norm": 29.538060661131315, + "learning_rate": 1.963502527828693e-05, + "loss": 0.8706, + "step": 2184 + }, + { + "epoch": 0.34, + "grad_norm": 57.84225302005477, + "learning_rate": 1.9634573693570767e-05, + "loss": 0.8658, + "step": 2185 + }, + { + "epoch": 0.34, + "grad_norm": 23.793039716665785, + "learning_rate": 1.963412183485296e-05, + "loss": 0.7691, + "step": 2186 + }, + { + "epoch": 0.34, + "grad_norm": 16.26578418496224, + "learning_rate": 1.9633669702146366e-05, + "loss": 0.8299, + "step": 2187 + }, + { + "epoch": 0.34, + "grad_norm": 21.478741276842545, + "learning_rate": 1.963321729546384e-05, + "loss": 0.8549, + "step": 2188 + }, + { + "epoch": 0.34, + "grad_norm": 37.167273619461376, + "learning_rate": 1.963276461481825e-05, + "loss": 0.9106, + "step": 2189 + }, + { + "epoch": 0.34, + "grad_norm": 56.31049650419636, + "learning_rate": 1.963231166022247e-05, + "loss": 0.861, + "step": 2190 + }, + { + "epoch": 0.34, + "grad_norm": 23.201976639815516, + "learning_rate": 1.9631858431689382e-05, + "loss": 0.8063, + "step": 2191 + }, + { + "epoch": 0.34, + "grad_norm": 16.645263961030828, + "learning_rate": 1.963140492923187e-05, + "loss": 0.8363, + "step": 2192 + }, + { + "epoch": 0.34, + "grad_norm": 20.49710724500291, + "learning_rate": 1.963095115286284e-05, + "loss": 0.8837, + "step": 2193 + }, + { + "epoch": 0.34, + "grad_norm": 15.65235460289088, + "learning_rate": 1.963049710259519e-05, + "loss": 0.9756, + "step": 2194 + }, + { + "epoch": 0.34, + "grad_norm": 29.373686438471058, + "learning_rate": 1.963004277844184e-05, + "loss": 0.8135, + "step": 2195 + }, + { + "epoch": 0.34, + "grad_norm": 4.171045728122307, + "learning_rate": 1.9629588180415707e-05, + "loss": 0.8234, + "step": 2196 + }, + { + "epoch": 0.34, + "grad_norm": 19.958338018415482, + "learning_rate": 1.9629133308529717e-05, + "loss": 0.9029, + "step": 2197 + }, + { + "epoch": 0.34, + "grad_norm": 22.054987007780277, + "learning_rate": 1.962867816279681e-05, + "loss": 0.8789, + "step": 2198 + }, + { + "epoch": 0.34, + "grad_norm": 22.724933992763166, + "learning_rate": 1.9628222743229927e-05, + "loss": 0.8741, + "step": 2199 + }, + { + "epoch": 0.34, + "grad_norm": 21.22813346884525, + "learning_rate": 1.9627767049842022e-05, + "loss": 0.9541, + "step": 2200 + }, + { + "epoch": 0.34, + "grad_norm": 19.565110368362504, + "learning_rate": 1.9627311082646053e-05, + "loss": 0.8735, + "step": 2201 + }, + { + "epoch": 0.34, + "grad_norm": 18.17061207401581, + "learning_rate": 1.962685484165499e-05, + "loss": 0.7636, + "step": 2202 + }, + { + "epoch": 0.34, + "grad_norm": 21.33147770307174, + "learning_rate": 1.9626398326881805e-05, + "loss": 0.8396, + "step": 2203 + }, + { + "epoch": 0.34, + "grad_norm": 22.692971774283702, + "learning_rate": 1.9625941538339484e-05, + "loss": 0.8401, + "step": 2204 + }, + { + "epoch": 0.34, + "grad_norm": 15.696540156952329, + "learning_rate": 1.962548447604102e-05, + "loss": 0.8128, + "step": 2205 + }, + { + "epoch": 0.34, + "grad_norm": 14.082897626280321, + "learning_rate": 1.9625027139999404e-05, + "loss": 0.7894, + "step": 2206 + }, + { + "epoch": 0.34, + "grad_norm": 12.58972804448063, + "learning_rate": 1.962456953022765e-05, + "loss": 0.7516, + "step": 2207 + }, + { + "epoch": 0.34, + "grad_norm": 24.880836772182267, + "learning_rate": 1.9624111646738767e-05, + "loss": 0.9179, + "step": 2208 + }, + { + "epoch": 0.35, + "grad_norm": 17.28436038367192, + "learning_rate": 1.962365348954578e-05, + "loss": 0.8, + "step": 2209 + }, + { + "epoch": 0.35, + "grad_norm": 23.63477719802138, + "learning_rate": 1.9623195058661716e-05, + "loss": 0.8046, + "step": 2210 + }, + { + "epoch": 0.35, + "grad_norm": 17.092068807439325, + "learning_rate": 1.9622736354099612e-05, + "loss": 0.7285, + "step": 2211 + }, + { + "epoch": 0.35, + "grad_norm": 25.19775477201366, + "learning_rate": 1.9622277375872518e-05, + "loss": 0.8251, + "step": 2212 + }, + { + "epoch": 0.35, + "grad_norm": 21.554167467807773, + "learning_rate": 1.9621818123993482e-05, + "loss": 0.9185, + "step": 2213 + }, + { + "epoch": 0.35, + "grad_norm": 29.398751526067016, + "learning_rate": 1.962135859847557e-05, + "loss": 0.9394, + "step": 2214 + }, + { + "epoch": 0.35, + "grad_norm": 23.06735841237785, + "learning_rate": 1.9620898799331848e-05, + "loss": 0.9402, + "step": 2215 + }, + { + "epoch": 0.35, + "grad_norm": 30.64704539316894, + "learning_rate": 1.962043872657539e-05, + "loss": 0.989, + "step": 2216 + }, + { + "epoch": 0.35, + "grad_norm": 21.050664734708178, + "learning_rate": 1.9619978380219282e-05, + "loss": 0.8904, + "step": 2217 + }, + { + "epoch": 0.35, + "grad_norm": 16.489511197724184, + "learning_rate": 1.9619517760276618e-05, + "loss": 0.889, + "step": 2218 + }, + { + "epoch": 0.35, + "grad_norm": 27.021809114490775, + "learning_rate": 1.9619056866760497e-05, + "loss": 0.8951, + "step": 2219 + }, + { + "epoch": 0.35, + "grad_norm": 21.09823309624137, + "learning_rate": 1.9618595699684026e-05, + "loss": 0.8278, + "step": 2220 + }, + { + "epoch": 0.35, + "grad_norm": 20.472222781964913, + "learning_rate": 1.9618134259060318e-05, + "loss": 0.8743, + "step": 2221 + }, + { + "epoch": 0.35, + "grad_norm": 27.254923119860816, + "learning_rate": 1.96176725449025e-05, + "loss": 0.9064, + "step": 2222 + }, + { + "epoch": 0.35, + "grad_norm": 23.05791563768528, + "learning_rate": 1.9617210557223697e-05, + "loss": 0.8709, + "step": 2223 + }, + { + "epoch": 0.35, + "grad_norm": 16.543076767281683, + "learning_rate": 1.9616748296037052e-05, + "loss": 0.8798, + "step": 2224 + }, + { + "epoch": 0.35, + "grad_norm": 22.666738148373785, + "learning_rate": 1.9616285761355717e-05, + "loss": 0.895, + "step": 2225 + }, + { + "epoch": 0.35, + "grad_norm": 16.746350567830284, + "learning_rate": 1.9615822953192833e-05, + "loss": 0.8131, + "step": 2226 + }, + { + "epoch": 0.35, + "grad_norm": 15.436412622403108, + "learning_rate": 1.9615359871561574e-05, + "loss": 0.7679, + "step": 2227 + }, + { + "epoch": 0.35, + "grad_norm": 16.810434706482013, + "learning_rate": 1.9614896516475107e-05, + "loss": 0.79, + "step": 2228 + }, + { + "epoch": 0.35, + "grad_norm": 35.932532497907, + "learning_rate": 1.96144328879466e-05, + "loss": 0.8941, + "step": 2229 + }, + { + "epoch": 0.35, + "grad_norm": 27.831174199343533, + "learning_rate": 1.9613968985989253e-05, + "loss": 1.0721, + "step": 2230 + }, + { + "epoch": 0.35, + "grad_norm": 25.270256096950266, + "learning_rate": 1.961350481061625e-05, + "loss": 0.9382, + "step": 2231 + }, + { + "epoch": 0.35, + "grad_norm": 24.38343000500924, + "learning_rate": 1.9613040361840792e-05, + "loss": 0.9236, + "step": 2232 + }, + { + "epoch": 0.35, + "grad_norm": 20.443885365676206, + "learning_rate": 1.961257563967609e-05, + "loss": 0.759, + "step": 2233 + }, + { + "epoch": 0.35, + "grad_norm": 19.715840613201532, + "learning_rate": 1.9612110644135365e-05, + "loss": 0.8116, + "step": 2234 + }, + { + "epoch": 0.35, + "grad_norm": 20.429731530895157, + "learning_rate": 1.961164537523183e-05, + "loss": 0.9412, + "step": 2235 + }, + { + "epoch": 0.35, + "grad_norm": 17.716173067865874, + "learning_rate": 1.9611179832978725e-05, + "loss": 0.8309, + "step": 2236 + }, + { + "epoch": 0.35, + "grad_norm": 28.72478583669382, + "learning_rate": 1.961071401738929e-05, + "loss": 0.9226, + "step": 2237 + }, + { + "epoch": 0.35, + "grad_norm": 13.306935361831256, + "learning_rate": 1.961024792847677e-05, + "loss": 0.8484, + "step": 2238 + }, + { + "epoch": 0.35, + "grad_norm": 17.03491843244981, + "learning_rate": 1.960978156625442e-05, + "loss": 0.8104, + "step": 2239 + }, + { + "epoch": 0.35, + "grad_norm": 18.021771311788267, + "learning_rate": 1.96093149307355e-05, + "loss": 0.8226, + "step": 2240 + }, + { + "epoch": 0.35, + "grad_norm": 21.19508629061962, + "learning_rate": 1.960884802193329e-05, + "loss": 0.8646, + "step": 2241 + }, + { + "epoch": 0.35, + "grad_norm": 17.193580278916478, + "learning_rate": 1.960838083986106e-05, + "loss": 0.8639, + "step": 2242 + }, + { + "epoch": 0.35, + "grad_norm": 18.843368560416156, + "learning_rate": 1.9607913384532102e-05, + "loss": 0.886, + "step": 2243 + }, + { + "epoch": 0.35, + "grad_norm": 13.13145673744327, + "learning_rate": 1.9607445655959707e-05, + "loss": 0.8167, + "step": 2244 + }, + { + "epoch": 0.35, + "grad_norm": 19.90926852762867, + "learning_rate": 1.9606977654157176e-05, + "loss": 0.763, + "step": 2245 + }, + { + "epoch": 0.35, + "grad_norm": 12.687757201124638, + "learning_rate": 1.9606509379137823e-05, + "loss": 0.7763, + "step": 2246 + }, + { + "epoch": 0.35, + "grad_norm": 20.68120718669408, + "learning_rate": 1.960604083091496e-05, + "loss": 0.877, + "step": 2247 + }, + { + "epoch": 0.35, + "grad_norm": 13.947481220770838, + "learning_rate": 1.9605572009501918e-05, + "loss": 0.7808, + "step": 2248 + }, + { + "epoch": 0.35, + "grad_norm": 24.670105893666836, + "learning_rate": 1.9605102914912025e-05, + "loss": 0.8162, + "step": 2249 + }, + { + "epoch": 0.35, + "grad_norm": 13.617112052826652, + "learning_rate": 1.960463354715863e-05, + "loss": 0.8944, + "step": 2250 + }, + { + "epoch": 0.35, + "grad_norm": 28.60815847009406, + "learning_rate": 1.9604163906255068e-05, + "loss": 0.8596, + "step": 2251 + }, + { + "epoch": 0.35, + "grad_norm": 19.786423520382826, + "learning_rate": 1.9603693992214707e-05, + "loss": 0.8678, + "step": 2252 + }, + { + "epoch": 0.35, + "grad_norm": 18.845369301044503, + "learning_rate": 1.9603223805050903e-05, + "loss": 0.8616, + "step": 2253 + }, + { + "epoch": 0.35, + "grad_norm": 21.754836853200725, + "learning_rate": 1.9602753344777036e-05, + "loss": 0.8292, + "step": 2254 + }, + { + "epoch": 0.35, + "grad_norm": 26.716550201073716, + "learning_rate": 1.960228261140648e-05, + "loss": 0.9063, + "step": 2255 + }, + { + "epoch": 0.35, + "grad_norm": 18.451365058301914, + "learning_rate": 1.960181160495262e-05, + "loss": 0.8574, + "step": 2256 + }, + { + "epoch": 0.35, + "grad_norm": 21.877343921425027, + "learning_rate": 1.960134032542886e-05, + "loss": 0.8011, + "step": 2257 + }, + { + "epoch": 0.35, + "grad_norm": 15.849301502671569, + "learning_rate": 1.9600868772848595e-05, + "loss": 0.8001, + "step": 2258 + }, + { + "epoch": 0.35, + "grad_norm": 19.364172779712053, + "learning_rate": 1.9600396947225238e-05, + "loss": 0.8061, + "step": 2259 + }, + { + "epoch": 0.35, + "grad_norm": 17.25579954351772, + "learning_rate": 1.959992484857221e-05, + "loss": 0.8852, + "step": 2260 + }, + { + "epoch": 0.35, + "grad_norm": 18.53819251681691, + "learning_rate": 1.9599452476902934e-05, + "loss": 0.7397, + "step": 2261 + }, + { + "epoch": 0.35, + "grad_norm": 17.778921804544083, + "learning_rate": 1.9598979832230846e-05, + "loss": 0.8344, + "step": 2262 + }, + { + "epoch": 0.35, + "grad_norm": 19.35354442268991, + "learning_rate": 1.9598506914569383e-05, + "loss": 0.789, + "step": 2263 + }, + { + "epoch": 0.35, + "grad_norm": 14.017006439206416, + "learning_rate": 1.9598033723932004e-05, + "loss": 0.8317, + "step": 2264 + }, + { + "epoch": 0.35, + "grad_norm": 28.88808198024425, + "learning_rate": 1.959756026033216e-05, + "loss": 0.8481, + "step": 2265 + }, + { + "epoch": 0.35, + "grad_norm": 21.361581158227146, + "learning_rate": 1.9597086523783312e-05, + "loss": 0.9174, + "step": 2266 + }, + { + "epoch": 0.35, + "grad_norm": 21.32743899042107, + "learning_rate": 1.959661251429894e-05, + "loss": 0.9045, + "step": 2267 + }, + { + "epoch": 0.35, + "grad_norm": 43.97896255880699, + "learning_rate": 1.959613823189252e-05, + "loss": 1.0044, + "step": 2268 + }, + { + "epoch": 0.35, + "grad_norm": 19.26575233791744, + "learning_rate": 1.9595663676577543e-05, + "loss": 0.8442, + "step": 2269 + }, + { + "epoch": 0.35, + "grad_norm": 18.3437053194103, + "learning_rate": 1.9595188848367507e-05, + "loss": 0.7827, + "step": 2270 + }, + { + "epoch": 0.35, + "grad_norm": 17.962052756168983, + "learning_rate": 1.959471374727591e-05, + "loss": 0.7672, + "step": 2271 + }, + { + "epoch": 0.35, + "grad_norm": 29.265136007937627, + "learning_rate": 1.959423837331627e-05, + "loss": 0.9122, + "step": 2272 + }, + { + "epoch": 0.36, + "grad_norm": 26.90766591102539, + "learning_rate": 1.95937627265021e-05, + "loss": 0.9845, + "step": 2273 + }, + { + "epoch": 0.36, + "grad_norm": 22.351767860703873, + "learning_rate": 1.9593286806846935e-05, + "loss": 0.6728, + "step": 2274 + }, + { + "epoch": 0.36, + "grad_norm": 28.274015619945366, + "learning_rate": 1.95928106143643e-05, + "loss": 0.9657, + "step": 2275 + }, + { + "epoch": 0.36, + "grad_norm": 21.033006236249136, + "learning_rate": 1.9592334149067745e-05, + "loss": 0.7861, + "step": 2276 + }, + { + "epoch": 0.36, + "grad_norm": 27.176641004022738, + "learning_rate": 1.9591857410970818e-05, + "loss": 0.8808, + "step": 2277 + }, + { + "epoch": 0.36, + "grad_norm": 22.834020162842634, + "learning_rate": 1.9591380400087077e-05, + "loss": 0.9017, + "step": 2278 + }, + { + "epoch": 0.36, + "grad_norm": 20.08037626915052, + "learning_rate": 1.9590903116430087e-05, + "loss": 0.8966, + "step": 2279 + }, + { + "epoch": 0.36, + "grad_norm": 20.240594464788924, + "learning_rate": 1.9590425560013427e-05, + "loss": 0.8006, + "step": 2280 + }, + { + "epoch": 0.36, + "grad_norm": 16.109508313127826, + "learning_rate": 1.9589947730850668e-05, + "loss": 0.7795, + "step": 2281 + }, + { + "epoch": 0.36, + "grad_norm": 23.75527198715787, + "learning_rate": 1.958946962895541e-05, + "loss": 0.7473, + "step": 2282 + }, + { + "epoch": 0.36, + "grad_norm": 24.926261129035797, + "learning_rate": 1.9588991254341243e-05, + "loss": 0.8775, + "step": 2283 + }, + { + "epoch": 0.36, + "grad_norm": 17.210590480261835, + "learning_rate": 1.9588512607021777e-05, + "loss": 0.7519, + "step": 2284 + }, + { + "epoch": 0.36, + "grad_norm": 32.38915544885124, + "learning_rate": 1.958803368701062e-05, + "loss": 0.9165, + "step": 2285 + }, + { + "epoch": 0.36, + "grad_norm": 22.4473599510166, + "learning_rate": 1.9587554494321394e-05, + "loss": 0.7747, + "step": 2286 + }, + { + "epoch": 0.36, + "grad_norm": 20.650569556725742, + "learning_rate": 1.958707502896773e-05, + "loss": 0.8813, + "step": 2287 + }, + { + "epoch": 0.36, + "grad_norm": 23.36865528908284, + "learning_rate": 1.9586595290963254e-05, + "loss": 0.8279, + "step": 2288 + }, + { + "epoch": 0.36, + "grad_norm": 23.36321122746258, + "learning_rate": 1.958611528032162e-05, + "loss": 0.7433, + "step": 2289 + }, + { + "epoch": 0.36, + "grad_norm": 23.60395808911962, + "learning_rate": 1.9585634997056472e-05, + "loss": 0.9214, + "step": 2290 + }, + { + "epoch": 0.36, + "grad_norm": 19.629458455531346, + "learning_rate": 1.9585154441181475e-05, + "loss": 0.7866, + "step": 2291 + }, + { + "epoch": 0.36, + "grad_norm": 21.64594494261864, + "learning_rate": 1.9584673612710293e-05, + "loss": 0.8852, + "step": 2292 + }, + { + "epoch": 0.36, + "grad_norm": 18.88986593059227, + "learning_rate": 1.95841925116566e-05, + "loss": 0.811, + "step": 2293 + }, + { + "epoch": 0.36, + "grad_norm": 16.920206414764746, + "learning_rate": 1.9583711138034075e-05, + "loss": 0.9009, + "step": 2294 + }, + { + "epoch": 0.36, + "grad_norm": 36.281520411250064, + "learning_rate": 1.958322949185642e-05, + "loss": 0.8237, + "step": 2295 + }, + { + "epoch": 0.36, + "grad_norm": 17.502897981405205, + "learning_rate": 1.9582747573137315e-05, + "loss": 0.9108, + "step": 2296 + }, + { + "epoch": 0.36, + "grad_norm": 14.900124518863047, + "learning_rate": 1.958226538189048e-05, + "loss": 0.8748, + "step": 2297 + }, + { + "epoch": 0.36, + "grad_norm": 19.353509063519404, + "learning_rate": 1.958178291812962e-05, + "loss": 0.9391, + "step": 2298 + }, + { + "epoch": 0.36, + "grad_norm": 22.069704368787633, + "learning_rate": 1.9581300181868462e-05, + "loss": 0.7822, + "step": 2299 + }, + { + "epoch": 0.36, + "grad_norm": 19.563794908993, + "learning_rate": 1.958081717312073e-05, + "loss": 0.9021, + "step": 2300 + }, + { + "epoch": 0.36, + "grad_norm": 20.7725747077412, + "learning_rate": 1.9580333891900164e-05, + "loss": 0.8624, + "step": 2301 + }, + { + "epoch": 0.36, + "grad_norm": 15.52007982411239, + "learning_rate": 1.9579850338220502e-05, + "loss": 0.8048, + "step": 2302 + }, + { + "epoch": 0.36, + "grad_norm": 31.01456514921061, + "learning_rate": 1.9579366512095504e-05, + "loss": 0.8553, + "step": 2303 + }, + { + "epoch": 0.36, + "grad_norm": 21.39003111946264, + "learning_rate": 1.9578882413538928e-05, + "loss": 0.8755, + "step": 2304 + }, + { + "epoch": 0.36, + "grad_norm": 21.87317373622345, + "learning_rate": 1.9578398042564534e-05, + "loss": 0.8392, + "step": 2305 + }, + { + "epoch": 0.36, + "grad_norm": 28.639158294850937, + "learning_rate": 1.9577913399186106e-05, + "loss": 0.8533, + "step": 2306 + }, + { + "epoch": 0.36, + "grad_norm": 16.981001838335168, + "learning_rate": 1.9577428483417424e-05, + "loss": 0.8814, + "step": 2307 + }, + { + "epoch": 0.36, + "grad_norm": 17.767328364359244, + "learning_rate": 1.957694329527228e-05, + "loss": 0.9141, + "step": 2308 + }, + { + "epoch": 0.36, + "grad_norm": 19.60064072758327, + "learning_rate": 1.9576457834764468e-05, + "loss": 0.8803, + "step": 2309 + }, + { + "epoch": 0.36, + "grad_norm": 13.785497650368177, + "learning_rate": 1.9575972101907798e-05, + "loss": 0.8541, + "step": 2310 + }, + { + "epoch": 0.36, + "grad_norm": 18.31662293800399, + "learning_rate": 1.9575486096716086e-05, + "loss": 0.8953, + "step": 2311 + }, + { + "epoch": 0.36, + "grad_norm": 21.52787343403029, + "learning_rate": 1.957499981920315e-05, + "loss": 0.7918, + "step": 2312 + }, + { + "epoch": 0.36, + "grad_norm": 21.865508904039164, + "learning_rate": 1.957451326938282e-05, + "loss": 0.8077, + "step": 2313 + }, + { + "epoch": 0.36, + "grad_norm": 25.315322527875505, + "learning_rate": 1.957402644726893e-05, + "loss": 0.8776, + "step": 2314 + }, + { + "epoch": 0.36, + "grad_norm": 11.644909527203188, + "learning_rate": 1.9573539352875333e-05, + "loss": 0.9013, + "step": 2315 + }, + { + "epoch": 0.36, + "grad_norm": 21.987844389972054, + "learning_rate": 1.9573051986215873e-05, + "loss": 0.7833, + "step": 2316 + }, + { + "epoch": 0.36, + "grad_norm": 21.81496192809406, + "learning_rate": 1.9572564347304418e-05, + "loss": 0.7088, + "step": 2317 + }, + { + "epoch": 0.36, + "grad_norm": 14.478514597867123, + "learning_rate": 1.957207643615483e-05, + "loss": 0.8507, + "step": 2318 + }, + { + "epoch": 0.36, + "grad_norm": 16.714268848564387, + "learning_rate": 1.9571588252780992e-05, + "loss": 0.8358, + "step": 2319 + }, + { + "epoch": 0.36, + "grad_norm": 24.02872563318518, + "learning_rate": 1.9571099797196778e-05, + "loss": 0.7872, + "step": 2320 + }, + { + "epoch": 0.36, + "grad_norm": 21.76587062007009, + "learning_rate": 1.957061106941609e-05, + "loss": 0.8648, + "step": 2321 + }, + { + "epoch": 0.36, + "grad_norm": 18.02049166892051, + "learning_rate": 1.9570122069452817e-05, + "loss": 0.8226, + "step": 2322 + }, + { + "epoch": 0.36, + "grad_norm": 17.835083095121952, + "learning_rate": 1.9569632797320876e-05, + "loss": 0.7784, + "step": 2323 + }, + { + "epoch": 0.36, + "grad_norm": 20.276543367213247, + "learning_rate": 1.9569143253034172e-05, + "loss": 0.8056, + "step": 2324 + }, + { + "epoch": 0.36, + "grad_norm": 18.884539515346667, + "learning_rate": 1.9568653436606632e-05, + "loss": 0.9212, + "step": 2325 + }, + { + "epoch": 0.36, + "grad_norm": 17.54858209589304, + "learning_rate": 1.9568163348052186e-05, + "loss": 0.7636, + "step": 2326 + }, + { + "epoch": 0.36, + "grad_norm": 23.923779145843948, + "learning_rate": 1.9567672987384774e-05, + "loss": 0.9018, + "step": 2327 + }, + { + "epoch": 0.36, + "grad_norm": 22.46457753946557, + "learning_rate": 1.9567182354618338e-05, + "loss": 0.9344, + "step": 2328 + }, + { + "epoch": 0.36, + "grad_norm": 29.285314327737893, + "learning_rate": 1.956669144976683e-05, + "loss": 0.8852, + "step": 2329 + }, + { + "epoch": 0.36, + "grad_norm": 10.760738511625377, + "learning_rate": 1.9566200272844216e-05, + "loss": 0.716, + "step": 2330 + }, + { + "epoch": 0.36, + "grad_norm": 19.21331647187897, + "learning_rate": 1.956570882386446e-05, + "loss": 0.8791, + "step": 2331 + }, + { + "epoch": 0.36, + "grad_norm": 19.41563576630927, + "learning_rate": 1.9565217102841543e-05, + "loss": 0.8382, + "step": 2332 + }, + { + "epoch": 0.36, + "grad_norm": 17.750899110356364, + "learning_rate": 1.9564725109789447e-05, + "loss": 0.7417, + "step": 2333 + }, + { + "epoch": 0.36, + "grad_norm": 20.760458215179696, + "learning_rate": 1.9564232844722166e-05, + "loss": 0.7939, + "step": 2334 + }, + { + "epoch": 0.36, + "grad_norm": 16.114660816668017, + "learning_rate": 1.9563740307653693e-05, + "loss": 0.8228, + "step": 2335 + }, + { + "epoch": 0.36, + "grad_norm": 20.373569290927932, + "learning_rate": 1.9563247498598045e-05, + "loss": 0.8, + "step": 2336 + }, + { + "epoch": 0.37, + "grad_norm": 18.201653685477662, + "learning_rate": 1.956275441756923e-05, + "loss": 0.8496, + "step": 2337 + }, + { + "epoch": 0.37, + "grad_norm": 19.45357286711351, + "learning_rate": 1.9562261064581272e-05, + "loss": 0.8678, + "step": 2338 + }, + { + "epoch": 0.37, + "grad_norm": 18.698160175413193, + "learning_rate": 1.956176743964821e-05, + "loss": 0.9822, + "step": 2339 + }, + { + "epoch": 0.37, + "grad_norm": 17.29147152809738, + "learning_rate": 1.9561273542784067e-05, + "loss": 0.8554, + "step": 2340 + }, + { + "epoch": 0.37, + "grad_norm": 19.468092203891214, + "learning_rate": 1.9560779374002902e-05, + "loss": 0.9422, + "step": 2341 + }, + { + "epoch": 0.37, + "grad_norm": 16.90812846481586, + "learning_rate": 1.9560284933318763e-05, + "loss": 0.9153, + "step": 2342 + }, + { + "epoch": 0.37, + "grad_norm": 14.881066794651561, + "learning_rate": 1.9559790220745715e-05, + "loss": 0.7362, + "step": 2343 + }, + { + "epoch": 0.37, + "grad_norm": 18.834799798238674, + "learning_rate": 1.9559295236297822e-05, + "loss": 0.8588, + "step": 2344 + }, + { + "epoch": 0.37, + "grad_norm": 19.44682119945334, + "learning_rate": 1.9558799979989165e-05, + "loss": 0.8622, + "step": 2345 + }, + { + "epoch": 0.37, + "grad_norm": 19.7818948774698, + "learning_rate": 1.9558304451833828e-05, + "loss": 0.9244, + "step": 2346 + }, + { + "epoch": 0.37, + "grad_norm": 32.15240395829633, + "learning_rate": 1.955780865184591e-05, + "loss": 0.8391, + "step": 2347 + }, + { + "epoch": 0.37, + "grad_norm": 19.52928040606101, + "learning_rate": 1.95573125800395e-05, + "loss": 0.8631, + "step": 2348 + }, + { + "epoch": 0.37, + "grad_norm": 13.014948379107024, + "learning_rate": 1.955681623642871e-05, + "loss": 0.7669, + "step": 2349 + }, + { + "epoch": 0.37, + "grad_norm": 7.3606479985429365, + "learning_rate": 1.955631962102766e-05, + "loss": 0.9177, + "step": 2350 + }, + { + "epoch": 0.37, + "grad_norm": 16.70675317168515, + "learning_rate": 1.955582273385047e-05, + "loss": 0.7743, + "step": 2351 + }, + { + "epoch": 0.37, + "grad_norm": 16.959628858998226, + "learning_rate": 1.955532557491127e-05, + "loss": 0.7911, + "step": 2352 + }, + { + "epoch": 0.37, + "grad_norm": 22.870408728266078, + "learning_rate": 1.95548281442242e-05, + "loss": 0.7857, + "step": 2353 + }, + { + "epoch": 0.37, + "grad_norm": 16.12068729290322, + "learning_rate": 1.9554330441803406e-05, + "loss": 0.8328, + "step": 2354 + }, + { + "epoch": 0.37, + "grad_norm": 21.39010542509534, + "learning_rate": 1.9553832467663048e-05, + "loss": 0.8622, + "step": 2355 + }, + { + "epoch": 0.37, + "grad_norm": 18.484470397819127, + "learning_rate": 1.955333422181728e-05, + "loss": 0.9214, + "step": 2356 + }, + { + "epoch": 0.37, + "grad_norm": 13.757632950277799, + "learning_rate": 1.955283570428027e-05, + "loss": 0.8439, + "step": 2357 + }, + { + "epoch": 0.37, + "grad_norm": 18.790934236685473, + "learning_rate": 1.955233691506621e-05, + "loss": 0.9286, + "step": 2358 + }, + { + "epoch": 0.37, + "grad_norm": 20.639654567005557, + "learning_rate": 1.9551837854189274e-05, + "loss": 0.8256, + "step": 2359 + }, + { + "epoch": 0.37, + "grad_norm": 15.339632657461078, + "learning_rate": 1.9551338521663656e-05, + "loss": 0.8338, + "step": 2360 + }, + { + "epoch": 0.37, + "grad_norm": 20.92224329452618, + "learning_rate": 1.9550838917503557e-05, + "loss": 0.937, + "step": 2361 + }, + { + "epoch": 0.37, + "grad_norm": 24.22799428542671, + "learning_rate": 1.9550339041723188e-05, + "loss": 1.0115, + "step": 2362 + }, + { + "epoch": 0.37, + "grad_norm": 15.104370162075357, + "learning_rate": 1.954983889433676e-05, + "loss": 0.7641, + "step": 2363 + }, + { + "epoch": 0.37, + "grad_norm": 26.204035727222347, + "learning_rate": 1.9549338475358505e-05, + "loss": 0.9896, + "step": 2364 + }, + { + "epoch": 0.37, + "grad_norm": 15.977562247710003, + "learning_rate": 1.9548837784802646e-05, + "loss": 0.7483, + "step": 2365 + }, + { + "epoch": 0.37, + "grad_norm": 18.580521959104235, + "learning_rate": 1.9548336822683426e-05, + "loss": 0.8389, + "step": 2366 + }, + { + "epoch": 0.37, + "grad_norm": 14.841327111116088, + "learning_rate": 1.9547835589015095e-05, + "loss": 0.7908, + "step": 2367 + }, + { + "epoch": 0.37, + "grad_norm": 19.442799154015507, + "learning_rate": 1.9547334083811906e-05, + "loss": 0.9086, + "step": 2368 + }, + { + "epoch": 0.37, + "grad_norm": 23.129577828623827, + "learning_rate": 1.954683230708812e-05, + "loss": 0.8035, + "step": 2369 + }, + { + "epoch": 0.37, + "grad_norm": 16.687118091034925, + "learning_rate": 1.9546330258858007e-05, + "loss": 0.9282, + "step": 2370 + }, + { + "epoch": 0.37, + "grad_norm": 18.267640544320162, + "learning_rate": 1.9545827939135846e-05, + "loss": 0.8739, + "step": 2371 + }, + { + "epoch": 0.37, + "grad_norm": 21.876459420089844, + "learning_rate": 1.9545325347935923e-05, + "loss": 0.8541, + "step": 2372 + }, + { + "epoch": 0.37, + "grad_norm": 15.80787492667919, + "learning_rate": 1.9544822485272528e-05, + "loss": 0.8464, + "step": 2373 + }, + { + "epoch": 0.37, + "grad_norm": 19.987684051973616, + "learning_rate": 1.954431935115997e-05, + "loss": 0.876, + "step": 2374 + }, + { + "epoch": 0.37, + "grad_norm": 19.834476586173817, + "learning_rate": 1.9543815945612548e-05, + "loss": 0.8347, + "step": 2375 + }, + { + "epoch": 0.37, + "grad_norm": 22.49566676745921, + "learning_rate": 1.9543312268644586e-05, + "loss": 0.9176, + "step": 2376 + }, + { + "epoch": 0.37, + "grad_norm": 11.844859224781588, + "learning_rate": 1.9542808320270405e-05, + "loss": 0.7221, + "step": 2377 + }, + { + "epoch": 0.37, + "grad_norm": 21.313890675370274, + "learning_rate": 1.954230410050434e-05, + "loss": 0.8976, + "step": 2378 + }, + { + "epoch": 0.37, + "grad_norm": 16.046858249402987, + "learning_rate": 1.9541799609360727e-05, + "loss": 0.8804, + "step": 2379 + }, + { + "epoch": 0.37, + "grad_norm": 15.667118069864024, + "learning_rate": 1.9541294846853914e-05, + "loss": 0.7766, + "step": 2380 + }, + { + "epoch": 0.37, + "grad_norm": 17.285024985122114, + "learning_rate": 1.954078981299826e-05, + "loss": 0.7479, + "step": 2381 + }, + { + "epoch": 0.37, + "grad_norm": 18.918423588302655, + "learning_rate": 1.954028450780812e-05, + "loss": 0.8779, + "step": 2382 + }, + { + "epoch": 0.37, + "grad_norm": 16.615739365173884, + "learning_rate": 1.9539778931297874e-05, + "loss": 0.7884, + "step": 2383 + }, + { + "epoch": 0.37, + "grad_norm": 20.350674587510465, + "learning_rate": 1.9539273083481894e-05, + "loss": 0.8458, + "step": 2384 + }, + { + "epoch": 0.37, + "grad_norm": 15.364339834697589, + "learning_rate": 1.953876696437457e-05, + "loss": 0.7645, + "step": 2385 + }, + { + "epoch": 0.37, + "grad_norm": 22.216220994129927, + "learning_rate": 1.953826057399029e-05, + "loss": 0.833, + "step": 2386 + }, + { + "epoch": 0.37, + "grad_norm": 18.280495522490607, + "learning_rate": 1.953775391234346e-05, + "loss": 0.8504, + "step": 2387 + }, + { + "epoch": 0.37, + "grad_norm": 19.523350656292592, + "learning_rate": 1.953724697944849e-05, + "loss": 0.83, + "step": 2388 + }, + { + "epoch": 0.37, + "grad_norm": 16.70498830699112, + "learning_rate": 1.9536739775319793e-05, + "loss": 0.7669, + "step": 2389 + }, + { + "epoch": 0.37, + "grad_norm": 16.407102977740667, + "learning_rate": 1.9536232299971797e-05, + "loss": 0.7656, + "step": 2390 + }, + { + "epoch": 0.37, + "grad_norm": 18.86788595577168, + "learning_rate": 1.953572455341893e-05, + "loss": 0.9123, + "step": 2391 + }, + { + "epoch": 0.37, + "grad_norm": 23.757444997301512, + "learning_rate": 1.9535216535675636e-05, + "loss": 0.8602, + "step": 2392 + }, + { + "epoch": 0.37, + "grad_norm": 19.303482902670957, + "learning_rate": 1.9534708246756364e-05, + "loss": 0.8665, + "step": 2393 + }, + { + "epoch": 0.37, + "grad_norm": 26.802081116741597, + "learning_rate": 1.9534199686675567e-05, + "loss": 0.8638, + "step": 2394 + }, + { + "epoch": 0.37, + "grad_norm": 19.25971291935905, + "learning_rate": 1.9533690855447706e-05, + "loss": 0.7855, + "step": 2395 + }, + { + "epoch": 0.37, + "grad_norm": 39.078096568216615, + "learning_rate": 1.9533181753087255e-05, + "loss": 0.8725, + "step": 2396 + }, + { + "epoch": 0.37, + "grad_norm": 22.16279272140576, + "learning_rate": 1.953267237960869e-05, + "loss": 0.9653, + "step": 2397 + }, + { + "epoch": 0.37, + "grad_norm": 23.145180519751115, + "learning_rate": 1.9532162735026503e-05, + "loss": 0.8258, + "step": 2398 + }, + { + "epoch": 0.37, + "grad_norm": 19.526106660730825, + "learning_rate": 1.9531652819355184e-05, + "loss": 0.795, + "step": 2399 + }, + { + "epoch": 0.37, + "grad_norm": 21.05502578024802, + "learning_rate": 1.953114263260923e-05, + "loss": 0.7586, + "step": 2400 + }, + { + "epoch": 0.38, + "grad_norm": 23.07608855269181, + "learning_rate": 1.9530632174803158e-05, + "loss": 0.8031, + "step": 2401 + }, + { + "epoch": 0.38, + "grad_norm": 18.335398720951122, + "learning_rate": 1.953012144595148e-05, + "loss": 0.8231, + "step": 2402 + }, + { + "epoch": 0.38, + "grad_norm": 20.26518463444549, + "learning_rate": 1.9529610446068727e-05, + "loss": 0.7897, + "step": 2403 + }, + { + "epoch": 0.38, + "grad_norm": 17.46569208530518, + "learning_rate": 1.9529099175169425e-05, + "loss": 0.8447, + "step": 2404 + }, + { + "epoch": 0.38, + "grad_norm": 12.051389937276333, + "learning_rate": 1.952858763326812e-05, + "loss": 0.7447, + "step": 2405 + }, + { + "epoch": 0.38, + "grad_norm": 13.646038065046703, + "learning_rate": 1.9528075820379353e-05, + "loss": 0.732, + "step": 2406 + }, + { + "epoch": 0.38, + "grad_norm": 19.79895137956602, + "learning_rate": 1.9527563736517683e-05, + "loss": 0.788, + "step": 2407 + }, + { + "epoch": 0.38, + "grad_norm": 20.013451145876225, + "learning_rate": 1.952705138169768e-05, + "loss": 0.8152, + "step": 2408 + }, + { + "epoch": 0.38, + "grad_norm": 16.993917869946696, + "learning_rate": 1.95265387559339e-05, + "loss": 0.7993, + "step": 2409 + }, + { + "epoch": 0.38, + "grad_norm": 27.754758625511705, + "learning_rate": 1.9526025859240936e-05, + "loss": 0.8487, + "step": 2410 + }, + { + "epoch": 0.38, + "grad_norm": 15.465520094814222, + "learning_rate": 1.952551269163337e-05, + "loss": 0.9287, + "step": 2411 + }, + { + "epoch": 0.38, + "grad_norm": 19.208621756966533, + "learning_rate": 1.9524999253125792e-05, + "loss": 0.7842, + "step": 2412 + }, + { + "epoch": 0.38, + "grad_norm": 22.480105149736534, + "learning_rate": 1.952448554373281e-05, + "loss": 0.9469, + "step": 2413 + }, + { + "epoch": 0.38, + "grad_norm": 21.46800276512284, + "learning_rate": 1.9523971563469026e-05, + "loss": 0.9319, + "step": 2414 + }, + { + "epoch": 0.38, + "grad_norm": 28.447086995510453, + "learning_rate": 1.9523457312349067e-05, + "loss": 0.832, + "step": 2415 + }, + { + "epoch": 0.38, + "grad_norm": 30.189566063717486, + "learning_rate": 1.9522942790387552e-05, + "loss": 0.9581, + "step": 2416 + }, + { + "epoch": 0.38, + "grad_norm": 14.866945686719145, + "learning_rate": 1.9522427997599113e-05, + "loss": 0.7988, + "step": 2417 + }, + { + "epoch": 0.38, + "grad_norm": 21.18456878190828, + "learning_rate": 1.9521912933998392e-05, + "loss": 0.9309, + "step": 2418 + }, + { + "epoch": 0.38, + "grad_norm": 24.084679660748574, + "learning_rate": 1.9521397599600042e-05, + "loss": 0.788, + "step": 2419 + }, + { + "epoch": 0.38, + "grad_norm": 24.8868244206621, + "learning_rate": 1.952088199441871e-05, + "loss": 0.8429, + "step": 2420 + }, + { + "epoch": 0.38, + "grad_norm": 20.5815178602056, + "learning_rate": 1.9520366118469064e-05, + "loss": 0.9069, + "step": 2421 + }, + { + "epoch": 0.38, + "grad_norm": 19.245618282799214, + "learning_rate": 1.9519849971765775e-05, + "loss": 0.8289, + "step": 2422 + }, + { + "epoch": 0.38, + "grad_norm": 19.110846508041238, + "learning_rate": 1.951933355432352e-05, + "loss": 0.9091, + "step": 2423 + }, + { + "epoch": 0.38, + "grad_norm": 18.453560795753642, + "learning_rate": 1.9518816866156994e-05, + "loss": 0.844, + "step": 2424 + }, + { + "epoch": 0.38, + "grad_norm": 23.633751101179815, + "learning_rate": 1.951829990728088e-05, + "loss": 0.8443, + "step": 2425 + }, + { + "epoch": 0.38, + "grad_norm": 18.87846786728831, + "learning_rate": 1.951778267770988e-05, + "loss": 0.8341, + "step": 2426 + }, + { + "epoch": 0.38, + "grad_norm": 21.67956006674215, + "learning_rate": 1.9517265177458716e-05, + "loss": 0.8208, + "step": 2427 + }, + { + "epoch": 0.38, + "grad_norm": 27.744037822142193, + "learning_rate": 1.9516747406542097e-05, + "loss": 0.789, + "step": 2428 + }, + { + "epoch": 0.38, + "grad_norm": 19.707457612097446, + "learning_rate": 1.9516229364974748e-05, + "loss": 0.8049, + "step": 2429 + }, + { + "epoch": 0.38, + "grad_norm": 21.061050355247907, + "learning_rate": 1.95157110527714e-05, + "loss": 0.9428, + "step": 2430 + }, + { + "epoch": 0.38, + "grad_norm": 17.28179708653034, + "learning_rate": 1.95151924699468e-05, + "loss": 0.8558, + "step": 2431 + }, + { + "epoch": 0.38, + "grad_norm": 18.939199239990796, + "learning_rate": 1.9514673616515686e-05, + "loss": 0.8189, + "step": 2432 + }, + { + "epoch": 0.38, + "grad_norm": 25.907680305183728, + "learning_rate": 1.9514154492492827e-05, + "loss": 0.7942, + "step": 2433 + }, + { + "epoch": 0.38, + "grad_norm": 23.527868800517016, + "learning_rate": 1.9513635097892975e-05, + "loss": 0.9862, + "step": 2434 + }, + { + "epoch": 0.38, + "grad_norm": 26.291971377888153, + "learning_rate": 1.951311543273091e-05, + "loss": 0.859, + "step": 2435 + }, + { + "epoch": 0.38, + "grad_norm": 21.189825695882313, + "learning_rate": 1.9512595497021406e-05, + "loss": 0.8974, + "step": 2436 + }, + { + "epoch": 0.38, + "grad_norm": 21.843778903543672, + "learning_rate": 1.951207529077925e-05, + "loss": 0.8477, + "step": 2437 + }, + { + "epoch": 0.38, + "grad_norm": 27.592198517817323, + "learning_rate": 1.9511554814019237e-05, + "loss": 0.8692, + "step": 2438 + }, + { + "epoch": 0.38, + "grad_norm": 29.270361096022043, + "learning_rate": 1.951103406675617e-05, + "loss": 0.8998, + "step": 2439 + }, + { + "epoch": 0.38, + "grad_norm": 27.333147997392555, + "learning_rate": 1.951051304900486e-05, + "loss": 0.8525, + "step": 2440 + }, + { + "epoch": 0.38, + "grad_norm": 16.34717099325237, + "learning_rate": 1.9509991760780117e-05, + "loss": 0.8892, + "step": 2441 + }, + { + "epoch": 0.38, + "grad_norm": 23.99948084662135, + "learning_rate": 1.9509470202096774e-05, + "loss": 0.8871, + "step": 2442 + }, + { + "epoch": 0.38, + "grad_norm": 19.181937820280993, + "learning_rate": 1.9508948372969664e-05, + "loss": 0.9362, + "step": 2443 + }, + { + "epoch": 0.38, + "grad_norm": 21.371574580522392, + "learning_rate": 1.950842627341362e-05, + "loss": 0.8514, + "step": 2444 + }, + { + "epoch": 0.38, + "grad_norm": 21.015313346849275, + "learning_rate": 1.95079039034435e-05, + "loss": 0.8166, + "step": 2445 + }, + { + "epoch": 0.38, + "grad_norm": 22.168714169644574, + "learning_rate": 1.9507381263074153e-05, + "loss": 0.7802, + "step": 2446 + }, + { + "epoch": 0.38, + "grad_norm": 17.710394869090457, + "learning_rate": 1.9506858352320444e-05, + "loss": 0.8928, + "step": 2447 + }, + { + "epoch": 0.38, + "grad_norm": 24.27983543026391, + "learning_rate": 1.9506335171197243e-05, + "loss": 0.8349, + "step": 2448 + }, + { + "epoch": 0.38, + "grad_norm": 16.263049157155898, + "learning_rate": 1.9505811719719435e-05, + "loss": 0.8699, + "step": 2449 + }, + { + "epoch": 0.38, + "grad_norm": 12.720897475693256, + "learning_rate": 1.95052879979019e-05, + "loss": 0.8266, + "step": 2450 + }, + { + "epoch": 0.38, + "grad_norm": 15.566849225297409, + "learning_rate": 1.9504764005759534e-05, + "loss": 0.8193, + "step": 2451 + }, + { + "epoch": 0.38, + "grad_norm": 14.10746439220069, + "learning_rate": 1.950423974330724e-05, + "loss": 0.7747, + "step": 2452 + }, + { + "epoch": 0.38, + "grad_norm": 26.526712596026236, + "learning_rate": 1.9503715210559928e-05, + "loss": 0.8887, + "step": 2453 + }, + { + "epoch": 0.38, + "grad_norm": 20.950628753504276, + "learning_rate": 1.9503190407532514e-05, + "loss": 0.8675, + "step": 2454 + }, + { + "epoch": 0.38, + "grad_norm": 16.83666826684625, + "learning_rate": 1.9502665334239926e-05, + "loss": 0.7574, + "step": 2455 + }, + { + "epoch": 0.38, + "grad_norm": 21.02049736775882, + "learning_rate": 1.9502139990697095e-05, + "loss": 0.8922, + "step": 2456 + }, + { + "epoch": 0.38, + "grad_norm": 22.486613397471082, + "learning_rate": 1.9501614376918963e-05, + "loss": 0.8107, + "step": 2457 + }, + { + "epoch": 0.38, + "grad_norm": 25.76034383741656, + "learning_rate": 1.9501088492920472e-05, + "loss": 0.7128, + "step": 2458 + }, + { + "epoch": 0.38, + "grad_norm": 21.59133177161005, + "learning_rate": 1.9500562338716584e-05, + "loss": 0.892, + "step": 2459 + }, + { + "epoch": 0.38, + "grad_norm": 14.671197644036523, + "learning_rate": 1.9500035914322263e-05, + "loss": 0.7916, + "step": 2460 + }, + { + "epoch": 0.38, + "grad_norm": 24.787109359369694, + "learning_rate": 1.9499509219752473e-05, + "loss": 0.8417, + "step": 2461 + }, + { + "epoch": 0.38, + "grad_norm": 15.881299648109724, + "learning_rate": 1.94989822550222e-05, + "loss": 0.8222, + "step": 2462 + }, + { + "epoch": 0.38, + "grad_norm": 31.415744304819388, + "learning_rate": 1.9498455020146428e-05, + "loss": 0.8926, + "step": 2463 + }, + { + "epoch": 0.38, + "grad_norm": 29.347522763218166, + "learning_rate": 1.9497927515140155e-05, + "loss": 0.9163, + "step": 2464 + }, + { + "epoch": 0.39, + "grad_norm": 14.668232378260434, + "learning_rate": 1.9497399740018376e-05, + "loss": 0.7106, + "step": 2465 + }, + { + "epoch": 0.39, + "grad_norm": 19.630541805620698, + "learning_rate": 1.9496871694796106e-05, + "loss": 0.7943, + "step": 2466 + }, + { + "epoch": 0.39, + "grad_norm": 23.891356055337113, + "learning_rate": 1.949634337948836e-05, + "loss": 0.8403, + "step": 2467 + }, + { + "epoch": 0.39, + "grad_norm": 16.755547365964734, + "learning_rate": 1.9495814794110165e-05, + "loss": 0.6969, + "step": 2468 + }, + { + "epoch": 0.39, + "grad_norm": 21.946137380188254, + "learning_rate": 1.949528593867655e-05, + "loss": 0.8803, + "step": 2469 + }, + { + "epoch": 0.39, + "grad_norm": 25.55641275230682, + "learning_rate": 1.9494756813202558e-05, + "loss": 0.8794, + "step": 2470 + }, + { + "epoch": 0.39, + "grad_norm": 16.42324417424368, + "learning_rate": 1.9494227417703237e-05, + "loss": 0.8305, + "step": 2471 + }, + { + "epoch": 0.39, + "grad_norm": 38.59207416980971, + "learning_rate": 1.949369775219364e-05, + "loss": 0.9413, + "step": 2472 + }, + { + "epoch": 0.39, + "grad_norm": 16.52672246064103, + "learning_rate": 1.9493167816688837e-05, + "loss": 0.8335, + "step": 2473 + }, + { + "epoch": 0.39, + "grad_norm": 37.97518438284426, + "learning_rate": 1.949263761120389e-05, + "loss": 0.9332, + "step": 2474 + }, + { + "epoch": 0.39, + "grad_norm": 26.705251099045753, + "learning_rate": 1.9492107135753884e-05, + "loss": 0.7934, + "step": 2475 + }, + { + "epoch": 0.39, + "grad_norm": 29.23079161444644, + "learning_rate": 1.9491576390353906e-05, + "loss": 0.9349, + "step": 2476 + }, + { + "epoch": 0.39, + "grad_norm": 25.89753302696428, + "learning_rate": 1.9491045375019043e-05, + "loss": 0.8761, + "step": 2477 + }, + { + "epoch": 0.39, + "grad_norm": 29.637885229490248, + "learning_rate": 1.949051408976441e-05, + "loss": 0.7511, + "step": 2478 + }, + { + "epoch": 0.39, + "grad_norm": 16.165437200175504, + "learning_rate": 1.9489982534605098e-05, + "loss": 0.7051, + "step": 2479 + }, + { + "epoch": 0.39, + "grad_norm": 23.88011490371271, + "learning_rate": 1.948945070955624e-05, + "loss": 0.7667, + "step": 2480 + }, + { + "epoch": 0.39, + "grad_norm": 18.006794537053363, + "learning_rate": 1.9488918614632953e-05, + "loss": 0.6903, + "step": 2481 + }, + { + "epoch": 0.39, + "grad_norm": 22.41213134631574, + "learning_rate": 1.9488386249850372e-05, + "loss": 0.8073, + "step": 2482 + }, + { + "epoch": 0.39, + "grad_norm": 24.74164763735822, + "learning_rate": 1.9487853615223636e-05, + "loss": 0.8618, + "step": 2483 + }, + { + "epoch": 0.39, + "grad_norm": 28.02158039608608, + "learning_rate": 1.9487320710767898e-05, + "loss": 0.9661, + "step": 2484 + }, + { + "epoch": 0.39, + "grad_norm": 25.41546536046868, + "learning_rate": 1.9486787536498304e-05, + "loss": 0.8838, + "step": 2485 + }, + { + "epoch": 0.39, + "grad_norm": 16.02531547412535, + "learning_rate": 1.9486254092430022e-05, + "loss": 0.9066, + "step": 2486 + }, + { + "epoch": 0.39, + "grad_norm": 25.248648046662286, + "learning_rate": 1.9485720378578225e-05, + "loss": 0.8172, + "step": 2487 + }, + { + "epoch": 0.39, + "grad_norm": 21.50523668204783, + "learning_rate": 1.948518639495809e-05, + "loss": 0.7969, + "step": 2488 + }, + { + "epoch": 0.39, + "grad_norm": 34.12783320336865, + "learning_rate": 1.94846521415848e-05, + "loss": 0.9122, + "step": 2489 + }, + { + "epoch": 0.39, + "grad_norm": 30.81649489649323, + "learning_rate": 1.9484117618473553e-05, + "loss": 0.8958, + "step": 2490 + }, + { + "epoch": 0.39, + "grad_norm": 29.140569533430803, + "learning_rate": 1.948358282563955e-05, + "loss": 0.8575, + "step": 2491 + }, + { + "epoch": 0.39, + "grad_norm": 17.109909442061948, + "learning_rate": 1.9483047763097994e-05, + "loss": 0.8475, + "step": 2492 + }, + { + "epoch": 0.39, + "grad_norm": 29.933604053862425, + "learning_rate": 1.9482512430864113e-05, + "loss": 0.8546, + "step": 2493 + }, + { + "epoch": 0.39, + "grad_norm": 21.782840037220737, + "learning_rate": 1.9481976828953126e-05, + "loss": 0.848, + "step": 2494 + }, + { + "epoch": 0.39, + "grad_norm": 22.515427145793463, + "learning_rate": 1.9481440957380258e-05, + "loss": 0.8328, + "step": 2495 + }, + { + "epoch": 0.39, + "grad_norm": 21.258223283970217, + "learning_rate": 1.948090481616076e-05, + "loss": 0.9235, + "step": 2496 + }, + { + "epoch": 0.39, + "grad_norm": 17.32457242172472, + "learning_rate": 1.9480368405309876e-05, + "loss": 0.8043, + "step": 2497 + }, + { + "epoch": 0.39, + "grad_norm": 28.00501584092179, + "learning_rate": 1.9479831724842862e-05, + "loss": 0.8732, + "step": 2498 + }, + { + "epoch": 0.39, + "grad_norm": 15.98044102978033, + "learning_rate": 1.9479294774774974e-05, + "loss": 0.8157, + "step": 2499 + }, + { + "epoch": 0.39, + "grad_norm": 20.99142914663275, + "learning_rate": 1.9478757555121493e-05, + "loss": 0.8674, + "step": 2500 + }, + { + "epoch": 0.39, + "grad_norm": 15.768989350516703, + "learning_rate": 1.9478220065897687e-05, + "loss": 0.7731, + "step": 2501 + }, + { + "epoch": 0.39, + "grad_norm": 21.897985533324526, + "learning_rate": 1.947768230711885e-05, + "loss": 0.9334, + "step": 2502 + }, + { + "epoch": 0.39, + "grad_norm": 27.057438971284018, + "learning_rate": 1.9477144278800275e-05, + "loss": 0.8583, + "step": 2503 + }, + { + "epoch": 0.39, + "grad_norm": 23.44126465952835, + "learning_rate": 1.9476605980957258e-05, + "loss": 0.8965, + "step": 2504 + }, + { + "epoch": 0.39, + "grad_norm": 13.897611667484993, + "learning_rate": 1.947606741360511e-05, + "loss": 0.7554, + "step": 2505 + }, + { + "epoch": 0.39, + "grad_norm": 32.64812153674515, + "learning_rate": 1.9475528576759147e-05, + "loss": 0.8532, + "step": 2506 + }, + { + "epoch": 0.39, + "grad_norm": 33.427141424698995, + "learning_rate": 1.9474989470434695e-05, + "loss": 0.7965, + "step": 2507 + }, + { + "epoch": 0.39, + "grad_norm": 16.03586781371496, + "learning_rate": 1.9474450094647087e-05, + "loss": 0.7687, + "step": 2508 + }, + { + "epoch": 0.39, + "grad_norm": 22.1414637793719, + "learning_rate": 1.947391044941166e-05, + "loss": 0.7306, + "step": 2509 + }, + { + "epoch": 0.39, + "grad_norm": 16.98787337028632, + "learning_rate": 1.947337053474376e-05, + "loss": 0.9005, + "step": 2510 + }, + { + "epoch": 0.39, + "grad_norm": 13.887055852476287, + "learning_rate": 1.9472830350658748e-05, + "loss": 0.7767, + "step": 2511 + }, + { + "epoch": 0.39, + "grad_norm": 17.22012925811569, + "learning_rate": 1.9472289897171978e-05, + "loss": 0.8296, + "step": 2512 + }, + { + "epoch": 0.39, + "grad_norm": 27.639276062421043, + "learning_rate": 1.9471749174298827e-05, + "loss": 0.8719, + "step": 2513 + }, + { + "epoch": 0.39, + "grad_norm": 13.703361496140543, + "learning_rate": 1.9471208182054668e-05, + "loss": 0.735, + "step": 2514 + }, + { + "epoch": 0.39, + "grad_norm": 14.538841045503059, + "learning_rate": 1.947066692045489e-05, + "loss": 0.8326, + "step": 2515 + }, + { + "epoch": 0.39, + "grad_norm": 16.81425147168844, + "learning_rate": 1.9470125389514884e-05, + "loss": 0.813, + "step": 2516 + }, + { + "epoch": 0.39, + "grad_norm": 19.290948836591507, + "learning_rate": 1.9469583589250055e-05, + "loss": 0.8913, + "step": 2517 + }, + { + "epoch": 0.39, + "grad_norm": 20.048454680311696, + "learning_rate": 1.9469041519675806e-05, + "loss": 0.8645, + "step": 2518 + }, + { + "epoch": 0.39, + "grad_norm": 24.305522683022993, + "learning_rate": 1.9468499180807554e-05, + "loss": 0.8648, + "step": 2519 + }, + { + "epoch": 0.39, + "grad_norm": 19.836333264507807, + "learning_rate": 1.9467956572660727e-05, + "loss": 0.7702, + "step": 2520 + }, + { + "epoch": 0.39, + "grad_norm": 20.05402738118547, + "learning_rate": 1.9467413695250753e-05, + "loss": 0.9407, + "step": 2521 + }, + { + "epoch": 0.39, + "grad_norm": 18.635108099384144, + "learning_rate": 1.946687054859307e-05, + "loss": 0.9345, + "step": 2522 + }, + { + "epoch": 0.39, + "grad_norm": 29.09553264643005, + "learning_rate": 1.9466327132703128e-05, + "loss": 0.8608, + "step": 2523 + }, + { + "epoch": 0.39, + "grad_norm": 20.853735737980077, + "learning_rate": 1.946578344759638e-05, + "loss": 0.8349, + "step": 2524 + }, + { + "epoch": 0.39, + "grad_norm": 21.318348150735257, + "learning_rate": 1.946523949328829e-05, + "loss": 0.9876, + "step": 2525 + }, + { + "epoch": 0.39, + "grad_norm": 17.541723103066253, + "learning_rate": 1.9464695269794322e-05, + "loss": 0.8498, + "step": 2526 + }, + { + "epoch": 0.39, + "grad_norm": 18.278090788039147, + "learning_rate": 1.9464150777129956e-05, + "loss": 0.848, + "step": 2527 + }, + { + "epoch": 0.39, + "grad_norm": 17.57808443785332, + "learning_rate": 1.9463606015310684e-05, + "loss": 0.8322, + "step": 2528 + }, + { + "epoch": 0.4, + "grad_norm": 24.491282169821478, + "learning_rate": 1.9463060984351988e-05, + "loss": 0.8076, + "step": 2529 + }, + { + "epoch": 0.4, + "grad_norm": 19.797416678848354, + "learning_rate": 1.946251568426938e-05, + "loss": 0.8709, + "step": 2530 + }, + { + "epoch": 0.4, + "grad_norm": 24.77472292459336, + "learning_rate": 1.9461970115078356e-05, + "loss": 0.8743, + "step": 2531 + }, + { + "epoch": 0.4, + "grad_norm": 25.31539705440156, + "learning_rate": 1.9461424276794435e-05, + "loss": 0.8091, + "step": 2532 + }, + { + "epoch": 0.4, + "grad_norm": 28.366568187486834, + "learning_rate": 1.9460878169433147e-05, + "loss": 1.0113, + "step": 2533 + }, + { + "epoch": 0.4, + "grad_norm": 32.48096707042649, + "learning_rate": 1.9460331793010018e-05, + "loss": 0.9509, + "step": 2534 + }, + { + "epoch": 0.4, + "grad_norm": 20.78370720239767, + "learning_rate": 1.9459785147540585e-05, + "loss": 0.9013, + "step": 2535 + }, + { + "epoch": 0.4, + "grad_norm": 16.215463192254308, + "learning_rate": 1.9459238233040393e-05, + "loss": 0.7608, + "step": 2536 + }, + { + "epoch": 0.4, + "grad_norm": 29.078353551943156, + "learning_rate": 1.9458691049525003e-05, + "loss": 0.7252, + "step": 2537 + }, + { + "epoch": 0.4, + "grad_norm": 23.396372730432347, + "learning_rate": 1.9458143597009974e-05, + "loss": 0.7558, + "step": 2538 + }, + { + "epoch": 0.4, + "grad_norm": 21.206016682786856, + "learning_rate": 1.9457595875510874e-05, + "loss": 0.8054, + "step": 2539 + }, + { + "epoch": 0.4, + "grad_norm": 14.420907792507922, + "learning_rate": 1.9457047885043275e-05, + "loss": 0.7454, + "step": 2540 + }, + { + "epoch": 0.4, + "grad_norm": 31.388612438205207, + "learning_rate": 1.945649962562277e-05, + "loss": 0.8577, + "step": 2541 + }, + { + "epoch": 0.4, + "grad_norm": 19.179107481660534, + "learning_rate": 1.9455951097264947e-05, + "loss": 0.8072, + "step": 2542 + }, + { + "epoch": 0.4, + "grad_norm": 27.118587854484836, + "learning_rate": 1.9455402299985408e-05, + "loss": 0.8226, + "step": 2543 + }, + { + "epoch": 0.4, + "grad_norm": 34.098309812510784, + "learning_rate": 1.9454853233799756e-05, + "loss": 0.8972, + "step": 2544 + }, + { + "epoch": 0.4, + "grad_norm": 16.84101018093514, + "learning_rate": 1.9454303898723613e-05, + "loss": 0.8312, + "step": 2545 + }, + { + "epoch": 0.4, + "grad_norm": 16.21854358745712, + "learning_rate": 1.9453754294772593e-05, + "loss": 0.8243, + "step": 2546 + }, + { + "epoch": 0.4, + "grad_norm": 27.23783987916674, + "learning_rate": 1.9453204421962334e-05, + "loss": 0.846, + "step": 2547 + }, + { + "epoch": 0.4, + "grad_norm": 22.92782513595387, + "learning_rate": 1.945265428030847e-05, + "loss": 0.8237, + "step": 2548 + }, + { + "epoch": 0.4, + "grad_norm": 20.45996879303278, + "learning_rate": 1.945210386982665e-05, + "loss": 0.9023, + "step": 2549 + }, + { + "epoch": 0.4, + "grad_norm": 16.74723628226207, + "learning_rate": 1.9451553190532523e-05, + "loss": 0.7803, + "step": 2550 + }, + { + "epoch": 0.4, + "grad_norm": 21.662748966228154, + "learning_rate": 1.945100224244175e-05, + "loss": 0.8573, + "step": 2551 + }, + { + "epoch": 0.4, + "grad_norm": 26.167088374355984, + "learning_rate": 1.9450451025570006e-05, + "loss": 0.8045, + "step": 2552 + }, + { + "epoch": 0.4, + "grad_norm": 17.369647147470108, + "learning_rate": 1.9449899539932962e-05, + "loss": 0.8749, + "step": 2553 + }, + { + "epoch": 0.4, + "grad_norm": 14.154262547420428, + "learning_rate": 1.9449347785546303e-05, + "loss": 0.8593, + "step": 2554 + }, + { + "epoch": 0.4, + "grad_norm": 22.46712800738409, + "learning_rate": 1.9448795762425722e-05, + "loss": 0.8296, + "step": 2555 + }, + { + "epoch": 0.4, + "grad_norm": 17.195440759834735, + "learning_rate": 1.9448243470586913e-05, + "loss": 0.8126, + "step": 2556 + }, + { + "epoch": 0.4, + "grad_norm": 16.662371809438405, + "learning_rate": 1.944769091004559e-05, + "loss": 0.8672, + "step": 2557 + }, + { + "epoch": 0.4, + "grad_norm": 17.915135451291725, + "learning_rate": 1.9447138080817466e-05, + "loss": 0.8059, + "step": 2558 + }, + { + "epoch": 0.4, + "grad_norm": 33.48498326513195, + "learning_rate": 1.9446584982918256e-05, + "loss": 0.9164, + "step": 2559 + }, + { + "epoch": 0.4, + "grad_norm": 22.587054317708027, + "learning_rate": 1.9446031616363702e-05, + "loss": 0.9321, + "step": 2560 + }, + { + "epoch": 0.4, + "grad_norm": 19.981016468882483, + "learning_rate": 1.9445477981169528e-05, + "loss": 0.7125, + "step": 2561 + }, + { + "epoch": 0.4, + "grad_norm": 12.959233508270154, + "learning_rate": 1.944492407735149e-05, + "loss": 0.7989, + "step": 2562 + }, + { + "epoch": 0.4, + "grad_norm": 21.3023220892182, + "learning_rate": 1.9444369904925337e-05, + "loss": 0.9101, + "step": 2563 + }, + { + "epoch": 0.4, + "grad_norm": 28.232542782831064, + "learning_rate": 1.9443815463906826e-05, + "loss": 0.8185, + "step": 2564 + }, + { + "epoch": 0.4, + "grad_norm": 18.32370665246752, + "learning_rate": 1.9443260754311728e-05, + "loss": 0.8727, + "step": 2565 + }, + { + "epoch": 0.4, + "grad_norm": 23.90503022940415, + "learning_rate": 1.944270577615582e-05, + "loss": 0.8984, + "step": 2566 + }, + { + "epoch": 0.4, + "grad_norm": 17.48925276003296, + "learning_rate": 1.944215052945488e-05, + "loss": 0.8324, + "step": 2567 + }, + { + "epoch": 0.4, + "grad_norm": 25.314506234030222, + "learning_rate": 1.9441595014224702e-05, + "loss": 0.8118, + "step": 2568 + }, + { + "epoch": 0.4, + "grad_norm": 15.248241293835397, + "learning_rate": 1.944103923048109e-05, + "loss": 0.8533, + "step": 2569 + }, + { + "epoch": 0.4, + "grad_norm": 21.064490976242134, + "learning_rate": 1.944048317823984e-05, + "loss": 0.8003, + "step": 2570 + }, + { + "epoch": 0.4, + "grad_norm": 23.62154523721757, + "learning_rate": 1.9439926857516777e-05, + "loss": 0.8221, + "step": 2571 + }, + { + "epoch": 0.4, + "grad_norm": 21.815765695892782, + "learning_rate": 1.9439370268327712e-05, + "loss": 0.9321, + "step": 2572 + }, + { + "epoch": 0.4, + "grad_norm": 17.541433321944204, + "learning_rate": 1.9438813410688478e-05, + "loss": 0.7801, + "step": 2573 + }, + { + "epoch": 0.4, + "grad_norm": 16.184646929430254, + "learning_rate": 1.9438256284614915e-05, + "loss": 0.8541, + "step": 2574 + }, + { + "epoch": 0.4, + "grad_norm": 20.487637588092507, + "learning_rate": 1.943769889012286e-05, + "loss": 0.9258, + "step": 2575 + }, + { + "epoch": 0.4, + "grad_norm": 24.381051461654067, + "learning_rate": 1.9437141227228175e-05, + "loss": 0.8346, + "step": 2576 + }, + { + "epoch": 0.4, + "grad_norm": 27.422308540720923, + "learning_rate": 1.943658329594671e-05, + "loss": 0.9246, + "step": 2577 + }, + { + "epoch": 0.4, + "grad_norm": 16.992872255931847, + "learning_rate": 1.9436025096294337e-05, + "loss": 0.8081, + "step": 2578 + }, + { + "epoch": 0.4, + "grad_norm": 14.661583718384255, + "learning_rate": 1.9435466628286933e-05, + "loss": 0.7142, + "step": 2579 + }, + { + "epoch": 0.4, + "grad_norm": 13.723917551601362, + "learning_rate": 1.9434907891940376e-05, + "loss": 0.7743, + "step": 2580 + }, + { + "epoch": 0.4, + "grad_norm": 23.244948795001786, + "learning_rate": 1.943434888727056e-05, + "loss": 0.8379, + "step": 2581 + }, + { + "epoch": 0.4, + "grad_norm": 22.210386887312627, + "learning_rate": 1.9433789614293376e-05, + "loss": 0.8381, + "step": 2582 + }, + { + "epoch": 0.4, + "grad_norm": 15.886106102683641, + "learning_rate": 1.9433230073024737e-05, + "loss": 0.8676, + "step": 2583 + }, + { + "epoch": 0.4, + "grad_norm": 18.00643830713997, + "learning_rate": 1.943267026348055e-05, + "loss": 0.7641, + "step": 2584 + }, + { + "epoch": 0.4, + "grad_norm": 19.577011405528797, + "learning_rate": 1.943211018567674e-05, + "loss": 0.745, + "step": 2585 + }, + { + "epoch": 0.4, + "grad_norm": 18.638061927585586, + "learning_rate": 1.9431549839629235e-05, + "loss": 0.7064, + "step": 2586 + }, + { + "epoch": 0.4, + "grad_norm": 19.29325120004328, + "learning_rate": 1.943098922535397e-05, + "loss": 0.7488, + "step": 2587 + }, + { + "epoch": 0.4, + "grad_norm": 22.640773912233634, + "learning_rate": 1.9430428342866888e-05, + "loss": 0.7827, + "step": 2588 + }, + { + "epoch": 0.4, + "grad_norm": 18.03750906162206, + "learning_rate": 1.942986719218394e-05, + "loss": 0.7597, + "step": 2589 + }, + { + "epoch": 0.4, + "grad_norm": 18.373635234477586, + "learning_rate": 1.9429305773321085e-05, + "loss": 0.834, + "step": 2590 + }, + { + "epoch": 0.4, + "grad_norm": 26.200292549633932, + "learning_rate": 1.9428744086294293e-05, + "loss": 0.8667, + "step": 2591 + }, + { + "epoch": 0.4, + "grad_norm": 23.750456096009135, + "learning_rate": 1.942818213111953e-05, + "loss": 0.8944, + "step": 2592 + }, + { + "epoch": 0.41, + "grad_norm": 21.175852747481397, + "learning_rate": 1.9427619907812788e-05, + "loss": 0.8616, + "step": 2593 + }, + { + "epoch": 0.41, + "grad_norm": 21.656538642126716, + "learning_rate": 1.9427057416390048e-05, + "loss": 0.8284, + "step": 2594 + }, + { + "epoch": 0.41, + "grad_norm": 24.168777550937367, + "learning_rate": 1.942649465686731e-05, + "loss": 0.9372, + "step": 2595 + }, + { + "epoch": 0.41, + "grad_norm": 21.447199351089612, + "learning_rate": 1.9425931629260578e-05, + "loss": 0.8351, + "step": 2596 + }, + { + "epoch": 0.41, + "grad_norm": 23.96381596581886, + "learning_rate": 1.9425368333585862e-05, + "loss": 0.8431, + "step": 2597 + }, + { + "epoch": 0.41, + "grad_norm": 18.22198750003726, + "learning_rate": 1.9424804769859188e-05, + "loss": 0.8937, + "step": 2598 + }, + { + "epoch": 0.41, + "grad_norm": 19.90344687896506, + "learning_rate": 1.942424093809658e-05, + "loss": 0.8646, + "step": 2599 + }, + { + "epoch": 0.41, + "grad_norm": 18.041467011567903, + "learning_rate": 1.9423676838314067e-05, + "loss": 0.8619, + "step": 2600 + }, + { + "epoch": 0.41, + "grad_norm": 22.060994326514724, + "learning_rate": 1.9423112470527703e-05, + "loss": 0.9244, + "step": 2601 + }, + { + "epoch": 0.41, + "grad_norm": 27.994162290491946, + "learning_rate": 1.9422547834753532e-05, + "loss": 0.9067, + "step": 2602 + }, + { + "epoch": 0.41, + "grad_norm": 17.774520219794255, + "learning_rate": 1.942198293100761e-05, + "loss": 0.9444, + "step": 2603 + }, + { + "epoch": 0.41, + "grad_norm": 15.34939937930872, + "learning_rate": 1.9421417759306006e-05, + "loss": 0.8641, + "step": 2604 + }, + { + "epoch": 0.41, + "grad_norm": 19.431907047704055, + "learning_rate": 1.9420852319664797e-05, + "loss": 0.8796, + "step": 2605 + }, + { + "epoch": 0.41, + "grad_norm": 28.351289316323932, + "learning_rate": 1.9420286612100054e-05, + "loss": 0.8465, + "step": 2606 + }, + { + "epoch": 0.41, + "grad_norm": 16.141287664490484, + "learning_rate": 1.9419720636627874e-05, + "loss": 0.7885, + "step": 2607 + }, + { + "epoch": 0.41, + "grad_norm": 14.238178940393663, + "learning_rate": 1.9419154393264346e-05, + "loss": 0.7726, + "step": 2608 + }, + { + "epoch": 0.41, + "grad_norm": 16.774143510626377, + "learning_rate": 1.9418587882025578e-05, + "loss": 0.8392, + "step": 2609 + }, + { + "epoch": 0.41, + "grad_norm": 18.471464637756934, + "learning_rate": 1.941802110292768e-05, + "loss": 0.8838, + "step": 2610 + }, + { + "epoch": 0.41, + "grad_norm": 17.16966640337463, + "learning_rate": 1.941745405598677e-05, + "loss": 0.6908, + "step": 2611 + }, + { + "epoch": 0.41, + "grad_norm": 15.489898343229815, + "learning_rate": 1.941688674121898e-05, + "loss": 0.8888, + "step": 2612 + }, + { + "epoch": 0.41, + "grad_norm": 16.863847662501758, + "learning_rate": 1.941631915864044e-05, + "loss": 0.7215, + "step": 2613 + }, + { + "epoch": 0.41, + "grad_norm": 21.77807440109385, + "learning_rate": 1.9415751308267284e-05, + "loss": 0.8139, + "step": 2614 + }, + { + "epoch": 0.41, + "grad_norm": 18.827306565822177, + "learning_rate": 1.9415183190115678e-05, + "loss": 0.8615, + "step": 2615 + }, + { + "epoch": 0.41, + "grad_norm": 27.65043099442133, + "learning_rate": 1.9414614804201764e-05, + "loss": 0.8808, + "step": 2616 + }, + { + "epoch": 0.41, + "grad_norm": 20.045503889813485, + "learning_rate": 1.9414046150541712e-05, + "loss": 0.8402, + "step": 2617 + }, + { + "epoch": 0.41, + "grad_norm": 24.587246551792816, + "learning_rate": 1.94134772291517e-05, + "loss": 0.7904, + "step": 2618 + }, + { + "epoch": 0.41, + "grad_norm": 21.43876005868107, + "learning_rate": 1.9412908040047894e-05, + "loss": 0.8568, + "step": 2619 + }, + { + "epoch": 0.41, + "grad_norm": 16.123789396234184, + "learning_rate": 1.94123385832465e-05, + "loss": 0.8212, + "step": 2620 + }, + { + "epoch": 0.41, + "grad_norm": 17.576751786741397, + "learning_rate": 1.9411768858763695e-05, + "loss": 0.8884, + "step": 2621 + }, + { + "epoch": 0.41, + "grad_norm": 13.457738781891761, + "learning_rate": 1.941119886661569e-05, + "loss": 0.7221, + "step": 2622 + }, + { + "epoch": 0.41, + "grad_norm": 21.87818287717947, + "learning_rate": 1.9410628606818696e-05, + "loss": 0.7998, + "step": 2623 + }, + { + "epoch": 0.41, + "grad_norm": 26.376370772903343, + "learning_rate": 1.9410058079388933e-05, + "loss": 0.8713, + "step": 2624 + }, + { + "epoch": 0.41, + "grad_norm": 22.514787003821464, + "learning_rate": 1.940948728434262e-05, + "loss": 0.7811, + "step": 2625 + }, + { + "epoch": 0.41, + "grad_norm": 18.618198144833002, + "learning_rate": 1.940891622169599e-05, + "loss": 0.8024, + "step": 2626 + }, + { + "epoch": 0.41, + "grad_norm": 19.111914775500992, + "learning_rate": 1.940834489146529e-05, + "loss": 0.8077, + "step": 2627 + }, + { + "epoch": 0.41, + "grad_norm": 13.8638801579698, + "learning_rate": 1.9407773293666764e-05, + "loss": 0.813, + "step": 2628 + }, + { + "epoch": 0.41, + "grad_norm": 15.381499617072896, + "learning_rate": 1.940720142831667e-05, + "loss": 0.8414, + "step": 2629 + }, + { + "epoch": 0.41, + "grad_norm": 5.59328042022629, + "learning_rate": 1.940662929543127e-05, + "loss": 0.8662, + "step": 2630 + }, + { + "epoch": 0.41, + "grad_norm": 20.48884477740405, + "learning_rate": 1.9406056895026837e-05, + "loss": 0.887, + "step": 2631 + }, + { + "epoch": 0.41, + "grad_norm": 24.64496365595773, + "learning_rate": 1.9405484227119646e-05, + "loss": 0.7966, + "step": 2632 + }, + { + "epoch": 0.41, + "grad_norm": 18.36209585684469, + "learning_rate": 1.9404911291725985e-05, + "loss": 0.8656, + "step": 2633 + }, + { + "epoch": 0.41, + "grad_norm": 13.859547374754367, + "learning_rate": 1.9404338088862152e-05, + "loss": 0.8752, + "step": 2634 + }, + { + "epoch": 0.41, + "grad_norm": 14.376115107822649, + "learning_rate": 1.940376461854444e-05, + "loss": 0.827, + "step": 2635 + }, + { + "epoch": 0.41, + "grad_norm": 23.630232947310596, + "learning_rate": 1.940319088078917e-05, + "loss": 0.826, + "step": 2636 + }, + { + "epoch": 0.41, + "grad_norm": 25.305490575839116, + "learning_rate": 1.9402616875612645e-05, + "loss": 0.8631, + "step": 2637 + }, + { + "epoch": 0.41, + "grad_norm": 42.794256984693206, + "learning_rate": 1.9402042603031202e-05, + "loss": 0.8284, + "step": 2638 + }, + { + "epoch": 0.41, + "grad_norm": 46.730669959461885, + "learning_rate": 1.9401468063061165e-05, + "loss": 0.7978, + "step": 2639 + }, + { + "epoch": 0.41, + "grad_norm": 23.69849547185854, + "learning_rate": 1.9400893255718874e-05, + "loss": 0.852, + "step": 2640 + }, + { + "epoch": 0.41, + "grad_norm": 20.864571354155228, + "learning_rate": 1.9400318181020682e-05, + "loss": 0.7427, + "step": 2641 + }, + { + "epoch": 0.41, + "grad_norm": 22.975606694813866, + "learning_rate": 1.9399742838982937e-05, + "loss": 0.8502, + "step": 2642 + }, + { + "epoch": 0.41, + "grad_norm": 23.738481673925747, + "learning_rate": 1.9399167229622008e-05, + "loss": 0.8133, + "step": 2643 + }, + { + "epoch": 0.41, + "grad_norm": 18.74722582957297, + "learning_rate": 1.9398591352954258e-05, + "loss": 0.8737, + "step": 2644 + }, + { + "epoch": 0.41, + "grad_norm": 21.801209265631115, + "learning_rate": 1.939801520899607e-05, + "loss": 0.8404, + "step": 2645 + }, + { + "epoch": 0.41, + "grad_norm": 18.29072114103525, + "learning_rate": 1.9397438797763825e-05, + "loss": 0.8134, + "step": 2646 + }, + { + "epoch": 0.41, + "grad_norm": 14.701624742579497, + "learning_rate": 1.9396862119273918e-05, + "loss": 0.805, + "step": 2647 + }, + { + "epoch": 0.41, + "grad_norm": 15.658906172625787, + "learning_rate": 1.939628517354275e-05, + "loss": 0.8634, + "step": 2648 + }, + { + "epoch": 0.41, + "grad_norm": 15.305061373737125, + "learning_rate": 1.939570796058673e-05, + "loss": 0.8961, + "step": 2649 + }, + { + "epoch": 0.41, + "grad_norm": 21.0478111361923, + "learning_rate": 1.939513048042227e-05, + "loss": 0.8788, + "step": 2650 + }, + { + "epoch": 0.41, + "grad_norm": 22.228470704640216, + "learning_rate": 1.9394552733065797e-05, + "loss": 0.8253, + "step": 2651 + }, + { + "epoch": 0.41, + "grad_norm": 23.60915031174712, + "learning_rate": 1.939397471853374e-05, + "loss": 0.8131, + "step": 2652 + }, + { + "epoch": 0.41, + "grad_norm": 17.542882152081166, + "learning_rate": 1.9393396436842537e-05, + "loss": 0.8406, + "step": 2653 + }, + { + "epoch": 0.41, + "grad_norm": 22.779320073894784, + "learning_rate": 1.9392817888008632e-05, + "loss": 0.8619, + "step": 2654 + }, + { + "epoch": 0.41, + "grad_norm": 22.323311619922016, + "learning_rate": 1.939223907204848e-05, + "loss": 0.8435, + "step": 2655 + }, + { + "epoch": 0.41, + "grad_norm": 16.03902363482122, + "learning_rate": 1.9391659988978546e-05, + "loss": 0.8012, + "step": 2656 + }, + { + "epoch": 0.42, + "grad_norm": 18.83149713891515, + "learning_rate": 1.9391080638815295e-05, + "loss": 0.7867, + "step": 2657 + }, + { + "epoch": 0.42, + "grad_norm": 20.66758118725523, + "learning_rate": 1.9390501021575203e-05, + "loss": 0.8648, + "step": 2658 + }, + { + "epoch": 0.42, + "grad_norm": 15.167484054660793, + "learning_rate": 1.938992113727476e-05, + "loss": 0.7431, + "step": 2659 + }, + { + "epoch": 0.42, + "grad_norm": 21.54112188354704, + "learning_rate": 1.9389340985930447e-05, + "loss": 0.8308, + "step": 2660 + }, + { + "epoch": 0.42, + "grad_norm": 22.94534245710849, + "learning_rate": 1.938876056755877e-05, + "loss": 0.8617, + "step": 2661 + }, + { + "epoch": 0.42, + "grad_norm": 17.41685642053697, + "learning_rate": 1.9388179882176237e-05, + "loss": 0.7977, + "step": 2662 + }, + { + "epoch": 0.42, + "grad_norm": 19.65406110649243, + "learning_rate": 1.938759892979936e-05, + "loss": 0.9213, + "step": 2663 + }, + { + "epoch": 0.42, + "grad_norm": 16.703268422098926, + "learning_rate": 1.9387017710444662e-05, + "loss": 0.8146, + "step": 2664 + }, + { + "epoch": 0.42, + "grad_norm": 15.150328875169098, + "learning_rate": 1.9386436224128668e-05, + "loss": 0.7223, + "step": 2665 + }, + { + "epoch": 0.42, + "grad_norm": 18.439974196960556, + "learning_rate": 1.938585447086792e-05, + "loss": 0.8488, + "step": 2666 + }, + { + "epoch": 0.42, + "grad_norm": 15.982365029709545, + "learning_rate": 1.9385272450678966e-05, + "loss": 0.7891, + "step": 2667 + }, + { + "epoch": 0.42, + "grad_norm": 18.709679270988122, + "learning_rate": 1.938469016357835e-05, + "loss": 0.7952, + "step": 2668 + }, + { + "epoch": 0.42, + "grad_norm": 23.691875524818492, + "learning_rate": 1.938410760958263e-05, + "loss": 0.8298, + "step": 2669 + }, + { + "epoch": 0.42, + "grad_norm": 14.818480974198497, + "learning_rate": 1.9383524788708387e-05, + "loss": 0.8908, + "step": 2670 + }, + { + "epoch": 0.42, + "grad_norm": 18.902835379961296, + "learning_rate": 1.9382941700972188e-05, + "loss": 0.7914, + "step": 2671 + }, + { + "epoch": 0.42, + "grad_norm": 18.726659806356395, + "learning_rate": 1.938235834639061e-05, + "loss": 0.8642, + "step": 2672 + }, + { + "epoch": 0.42, + "grad_norm": 28.178781863414006, + "learning_rate": 1.9381774724980253e-05, + "loss": 0.8986, + "step": 2673 + }, + { + "epoch": 0.42, + "grad_norm": 21.916274957607293, + "learning_rate": 1.9381190836757712e-05, + "loss": 0.7313, + "step": 2674 + }, + { + "epoch": 0.42, + "grad_norm": 15.844817039102114, + "learning_rate": 1.938060668173959e-05, + "loss": 0.8021, + "step": 2675 + }, + { + "epoch": 0.42, + "grad_norm": 19.15385254455125, + "learning_rate": 1.93800222599425e-05, + "loss": 0.8223, + "step": 2676 + }, + { + "epoch": 0.42, + "grad_norm": 25.65244877768913, + "learning_rate": 1.9379437571383067e-05, + "loss": 0.7941, + "step": 2677 + }, + { + "epoch": 0.42, + "grad_norm": 19.381240955530274, + "learning_rate": 1.9378852616077915e-05, + "loss": 0.8056, + "step": 2678 + }, + { + "epoch": 0.42, + "grad_norm": 20.230558983335847, + "learning_rate": 1.9378267394043678e-05, + "loss": 0.8657, + "step": 2679 + }, + { + "epoch": 0.42, + "grad_norm": 23.617910331165994, + "learning_rate": 1.9377681905297007e-05, + "loss": 0.9116, + "step": 2680 + }, + { + "epoch": 0.42, + "grad_norm": 22.23549219837821, + "learning_rate": 1.937709614985455e-05, + "loss": 0.9101, + "step": 2681 + }, + { + "epoch": 0.42, + "grad_norm": 23.55923682796077, + "learning_rate": 1.937651012773296e-05, + "loss": 0.8404, + "step": 2682 + }, + { + "epoch": 0.42, + "grad_norm": 24.6342227567183, + "learning_rate": 1.9375923838948907e-05, + "loss": 0.8992, + "step": 2683 + }, + { + "epoch": 0.42, + "grad_norm": 15.826724038442235, + "learning_rate": 1.9375337283519067e-05, + "loss": 0.7664, + "step": 2684 + }, + { + "epoch": 0.42, + "grad_norm": 17.281880268499513, + "learning_rate": 1.9374750461460117e-05, + "loss": 0.7582, + "step": 2685 + }, + { + "epoch": 0.42, + "grad_norm": 17.387846112902928, + "learning_rate": 1.9374163372788748e-05, + "loss": 0.8163, + "step": 2686 + }, + { + "epoch": 0.42, + "grad_norm": 21.980031414030144, + "learning_rate": 1.9373576017521657e-05, + "loss": 0.8424, + "step": 2687 + }, + { + "epoch": 0.42, + "grad_norm": 18.226036465604434, + "learning_rate": 1.9372988395675547e-05, + "loss": 0.7509, + "step": 2688 + }, + { + "epoch": 0.42, + "grad_norm": 17.09676331821948, + "learning_rate": 1.9372400507267132e-05, + "loss": 0.9732, + "step": 2689 + }, + { + "epoch": 0.42, + "grad_norm": 20.227840404579943, + "learning_rate": 1.9371812352313125e-05, + "loss": 0.8197, + "step": 2690 + }, + { + "epoch": 0.42, + "grad_norm": 16.38703806842231, + "learning_rate": 1.9371223930830264e-05, + "loss": 0.848, + "step": 2691 + }, + { + "epoch": 0.42, + "grad_norm": 30.472192629947614, + "learning_rate": 1.9370635242835272e-05, + "loss": 1.0408, + "step": 2692 + }, + { + "epoch": 0.42, + "grad_norm": 18.427443078613916, + "learning_rate": 1.9370046288344894e-05, + "loss": 0.784, + "step": 2693 + }, + { + "epoch": 0.42, + "grad_norm": 22.309832968515117, + "learning_rate": 1.9369457067375884e-05, + "loss": 0.8382, + "step": 2694 + }, + { + "epoch": 0.42, + "grad_norm": 15.275171236141343, + "learning_rate": 1.9368867579944994e-05, + "loss": 0.7785, + "step": 2695 + }, + { + "epoch": 0.42, + "grad_norm": 15.936757177918732, + "learning_rate": 1.936827782606899e-05, + "loss": 0.802, + "step": 2696 + }, + { + "epoch": 0.42, + "grad_norm": 21.114803473183333, + "learning_rate": 1.9367687805764647e-05, + "loss": 0.9395, + "step": 2697 + }, + { + "epoch": 0.42, + "grad_norm": 22.621051054466136, + "learning_rate": 1.936709751904874e-05, + "loss": 0.8338, + "step": 2698 + }, + { + "epoch": 0.42, + "grad_norm": 22.16405750203337, + "learning_rate": 1.936650696593806e-05, + "loss": 0.9549, + "step": 2699 + }, + { + "epoch": 0.42, + "grad_norm": 17.09112454742946, + "learning_rate": 1.9365916146449405e-05, + "loss": 0.872, + "step": 2700 + }, + { + "epoch": 0.42, + "grad_norm": 22.780151092780315, + "learning_rate": 1.936532506059957e-05, + "loss": 0.938, + "step": 2701 + }, + { + "epoch": 0.42, + "grad_norm": 23.783845257351054, + "learning_rate": 1.936473370840537e-05, + "loss": 0.9177, + "step": 2702 + }, + { + "epoch": 0.42, + "grad_norm": 20.529436400045263, + "learning_rate": 1.936414208988362e-05, + "loss": 0.8504, + "step": 2703 + }, + { + "epoch": 0.42, + "grad_norm": 27.865737750064977, + "learning_rate": 1.936355020505115e-05, + "loss": 0.9391, + "step": 2704 + }, + { + "epoch": 0.42, + "grad_norm": 19.11100780236159, + "learning_rate": 1.9362958053924786e-05, + "loss": 0.8241, + "step": 2705 + }, + { + "epoch": 0.42, + "grad_norm": 16.930240394988935, + "learning_rate": 1.9362365636521377e-05, + "loss": 0.8171, + "step": 2706 + }, + { + "epoch": 0.42, + "grad_norm": 18.082579719991905, + "learning_rate": 1.9361772952857762e-05, + "loss": 0.8348, + "step": 2707 + }, + { + "epoch": 0.42, + "grad_norm": 15.997880650018738, + "learning_rate": 1.93611800029508e-05, + "loss": 0.7933, + "step": 2708 + }, + { + "epoch": 0.42, + "grad_norm": 25.995473620715693, + "learning_rate": 1.9360586786817355e-05, + "loss": 0.8625, + "step": 2709 + }, + { + "epoch": 0.42, + "grad_norm": 22.979720218080338, + "learning_rate": 1.9359993304474302e-05, + "loss": 0.8743, + "step": 2710 + }, + { + "epoch": 0.42, + "grad_norm": 16.680965757860424, + "learning_rate": 1.935939955593851e-05, + "loss": 0.8663, + "step": 2711 + }, + { + "epoch": 0.42, + "grad_norm": 30.115294095895603, + "learning_rate": 1.9358805541226872e-05, + "loss": 0.8895, + "step": 2712 + }, + { + "epoch": 0.42, + "grad_norm": 18.827004143683194, + "learning_rate": 1.9358211260356282e-05, + "loss": 0.8517, + "step": 2713 + }, + { + "epoch": 0.42, + "grad_norm": 19.58248676962657, + "learning_rate": 1.9357616713343633e-05, + "loss": 0.857, + "step": 2714 + }, + { + "epoch": 0.42, + "grad_norm": 17.93136013431971, + "learning_rate": 1.935702190020584e-05, + "loss": 0.8615, + "step": 2715 + }, + { + "epoch": 0.42, + "grad_norm": 15.677871383670924, + "learning_rate": 1.9356426820959817e-05, + "loss": 0.8615, + "step": 2716 + }, + { + "epoch": 0.42, + "grad_norm": 18.538824027642022, + "learning_rate": 1.935583147562249e-05, + "loss": 0.9242, + "step": 2717 + }, + { + "epoch": 0.42, + "grad_norm": 26.46955974493218, + "learning_rate": 1.9355235864210792e-05, + "loss": 1.0344, + "step": 2718 + }, + { + "epoch": 0.42, + "grad_norm": 20.194503384287984, + "learning_rate": 1.9354639986741653e-05, + "loss": 0.9864, + "step": 2719 + }, + { + "epoch": 0.42, + "grad_norm": 24.302758743922624, + "learning_rate": 1.9354043843232028e-05, + "loss": 0.9412, + "step": 2720 + }, + { + "epoch": 0.43, + "grad_norm": 16.898592952387137, + "learning_rate": 1.935344743369887e-05, + "loss": 0.8174, + "step": 2721 + }, + { + "epoch": 0.43, + "grad_norm": 19.005242735707103, + "learning_rate": 1.9352850758159136e-05, + "loss": 0.8357, + "step": 2722 + }, + { + "epoch": 0.43, + "grad_norm": 28.29533521537663, + "learning_rate": 1.9352253816629796e-05, + "loss": 0.8839, + "step": 2723 + }, + { + "epoch": 0.43, + "grad_norm": 13.290798532477393, + "learning_rate": 1.9351656609127833e-05, + "loss": 0.758, + "step": 2724 + }, + { + "epoch": 0.43, + "grad_norm": 14.259434209592417, + "learning_rate": 1.9351059135670222e-05, + "loss": 0.827, + "step": 2725 + }, + { + "epoch": 0.43, + "grad_norm": 27.97107893874645, + "learning_rate": 1.9350461396273963e-05, + "loss": 0.8841, + "step": 2726 + }, + { + "epoch": 0.43, + "grad_norm": 23.08908614714832, + "learning_rate": 1.934986339095605e-05, + "loss": 0.8445, + "step": 2727 + }, + { + "epoch": 0.43, + "grad_norm": 17.594918757657876, + "learning_rate": 1.934926511973349e-05, + "loss": 0.7765, + "step": 2728 + }, + { + "epoch": 0.43, + "grad_norm": 20.439456423537294, + "learning_rate": 1.9348666582623302e-05, + "loss": 0.7948, + "step": 2729 + }, + { + "epoch": 0.43, + "grad_norm": 19.495766741869282, + "learning_rate": 1.9348067779642506e-05, + "loss": 0.8457, + "step": 2730 + }, + { + "epoch": 0.43, + "grad_norm": 23.42110042932668, + "learning_rate": 1.9347468710808128e-05, + "loss": 0.8191, + "step": 2731 + }, + { + "epoch": 0.43, + "grad_norm": 20.988561329125673, + "learning_rate": 1.9346869376137206e-05, + "loss": 0.8307, + "step": 2732 + }, + { + "epoch": 0.43, + "grad_norm": 22.340376920452577, + "learning_rate": 1.9346269775646793e-05, + "loss": 0.8083, + "step": 2733 + }, + { + "epoch": 0.43, + "grad_norm": 22.13230733836942, + "learning_rate": 1.9345669909353934e-05, + "loss": 0.8327, + "step": 2734 + }, + { + "epoch": 0.43, + "grad_norm": 14.948771506687264, + "learning_rate": 1.9345069777275685e-05, + "loss": 0.7678, + "step": 2735 + }, + { + "epoch": 0.43, + "grad_norm": 17.722435713386524, + "learning_rate": 1.934446937942912e-05, + "loss": 0.7494, + "step": 2736 + }, + { + "epoch": 0.43, + "grad_norm": 16.34788575734836, + "learning_rate": 1.9343868715831313e-05, + "loss": 0.7902, + "step": 2737 + }, + { + "epoch": 0.43, + "grad_norm": 23.809935082445637, + "learning_rate": 1.9343267786499346e-05, + "loss": 0.7964, + "step": 2738 + }, + { + "epoch": 0.43, + "grad_norm": 21.479036582074325, + "learning_rate": 1.9342666591450307e-05, + "loss": 0.8675, + "step": 2739 + }, + { + "epoch": 0.43, + "grad_norm": 21.61778473520191, + "learning_rate": 1.9342065130701297e-05, + "loss": 0.8963, + "step": 2740 + }, + { + "epoch": 0.43, + "grad_norm": 23.35420822044292, + "learning_rate": 1.9341463404269422e-05, + "loss": 0.8754, + "step": 2741 + }, + { + "epoch": 0.43, + "grad_norm": 19.1031477576541, + "learning_rate": 1.934086141217179e-05, + "loss": 0.7699, + "step": 2742 + }, + { + "epoch": 0.43, + "grad_norm": 14.799582350019515, + "learning_rate": 1.9340259154425524e-05, + "loss": 0.7482, + "step": 2743 + }, + { + "epoch": 0.43, + "grad_norm": 26.268775707852193, + "learning_rate": 1.933965663104775e-05, + "loss": 0.878, + "step": 2744 + }, + { + "epoch": 0.43, + "grad_norm": 18.606657994922337, + "learning_rate": 1.9339053842055606e-05, + "loss": 0.8324, + "step": 2745 + }, + { + "epoch": 0.43, + "grad_norm": 23.30469492574248, + "learning_rate": 1.9338450787466234e-05, + "loss": 0.7594, + "step": 2746 + }, + { + "epoch": 0.43, + "grad_norm": 18.01005121246038, + "learning_rate": 1.9337847467296783e-05, + "loss": 0.7594, + "step": 2747 + }, + { + "epoch": 0.43, + "grad_norm": 20.100149317739525, + "learning_rate": 1.9337243881564417e-05, + "loss": 0.7973, + "step": 2748 + }, + { + "epoch": 0.43, + "grad_norm": 22.149178918071595, + "learning_rate": 1.9336640030286293e-05, + "loss": 0.8795, + "step": 2749 + }, + { + "epoch": 0.43, + "grad_norm": 23.784110354669203, + "learning_rate": 1.933603591347959e-05, + "loss": 0.9595, + "step": 2750 + }, + { + "epoch": 0.43, + "grad_norm": 3.477578478672543, + "learning_rate": 1.9335431531161486e-05, + "loss": 0.7808, + "step": 2751 + }, + { + "epoch": 0.43, + "grad_norm": 15.147766612888061, + "learning_rate": 1.933482688334917e-05, + "loss": 0.7993, + "step": 2752 + }, + { + "epoch": 0.43, + "grad_norm": 15.406671432201627, + "learning_rate": 1.9334221970059837e-05, + "loss": 0.7119, + "step": 2753 + }, + { + "epoch": 0.43, + "grad_norm": 15.062424950640239, + "learning_rate": 1.9333616791310696e-05, + "loss": 0.7349, + "step": 2754 + }, + { + "epoch": 0.43, + "grad_norm": 17.54979717570123, + "learning_rate": 1.9333011347118953e-05, + "loss": 0.9561, + "step": 2755 + }, + { + "epoch": 0.43, + "grad_norm": 22.768112161300735, + "learning_rate": 1.9332405637501823e-05, + "loss": 0.8222, + "step": 2756 + }, + { + "epoch": 0.43, + "grad_norm": 20.354478960966798, + "learning_rate": 1.9331799662476537e-05, + "loss": 0.945, + "step": 2757 + }, + { + "epoch": 0.43, + "grad_norm": 16.542335764767387, + "learning_rate": 1.933119342206033e-05, + "loss": 0.8322, + "step": 2758 + }, + { + "epoch": 0.43, + "grad_norm": 16.29031479227513, + "learning_rate": 1.933058691627044e-05, + "loss": 0.8184, + "step": 2759 + }, + { + "epoch": 0.43, + "grad_norm": 18.46496870572795, + "learning_rate": 1.9329980145124115e-05, + "loss": 0.8366, + "step": 2760 + }, + { + "epoch": 0.43, + "grad_norm": 18.949423400994245, + "learning_rate": 1.9329373108638614e-05, + "loss": 0.7697, + "step": 2761 + }, + { + "epoch": 0.43, + "grad_norm": 18.251941269839598, + "learning_rate": 1.93287658068312e-05, + "loss": 0.8186, + "step": 2762 + }, + { + "epoch": 0.43, + "grad_norm": 19.620956735969866, + "learning_rate": 1.932815823971914e-05, + "loss": 0.7985, + "step": 2763 + }, + { + "epoch": 0.43, + "grad_norm": 22.906531339026152, + "learning_rate": 1.9327550407319717e-05, + "loss": 0.9988, + "step": 2764 + }, + { + "epoch": 0.43, + "grad_norm": 17.057307608259677, + "learning_rate": 1.932694230965022e-05, + "loss": 0.7492, + "step": 2765 + }, + { + "epoch": 0.43, + "grad_norm": 22.8766000479919, + "learning_rate": 1.9326333946727938e-05, + "loss": 0.8733, + "step": 2766 + }, + { + "epoch": 0.43, + "grad_norm": 22.366252670778806, + "learning_rate": 1.932572531857017e-05, + "loss": 0.8315, + "step": 2767 + }, + { + "epoch": 0.43, + "grad_norm": 22.97704605480904, + "learning_rate": 1.9325116425194235e-05, + "loss": 0.873, + "step": 2768 + }, + { + "epoch": 0.43, + "grad_norm": 17.93085267014662, + "learning_rate": 1.9324507266617444e-05, + "loss": 0.864, + "step": 2769 + }, + { + "epoch": 0.43, + "grad_norm": 25.14933070516083, + "learning_rate": 1.932389784285712e-05, + "loss": 0.9263, + "step": 2770 + }, + { + "epoch": 0.43, + "grad_norm": 19.647679178912497, + "learning_rate": 1.9323288153930595e-05, + "loss": 0.7297, + "step": 2771 + }, + { + "epoch": 0.43, + "grad_norm": 14.797743218961239, + "learning_rate": 1.932267819985521e-05, + "loss": 0.8237, + "step": 2772 + }, + { + "epoch": 0.43, + "grad_norm": 21.2180337131879, + "learning_rate": 1.932206798064831e-05, + "loss": 0.8558, + "step": 2773 + }, + { + "epoch": 0.43, + "grad_norm": 19.404121098252574, + "learning_rate": 1.932145749632725e-05, + "loss": 0.8191, + "step": 2774 + }, + { + "epoch": 0.43, + "grad_norm": 20.558964219507182, + "learning_rate": 1.932084674690939e-05, + "loss": 0.796, + "step": 2775 + }, + { + "epoch": 0.43, + "grad_norm": 25.654297185751886, + "learning_rate": 1.9320235732412104e-05, + "loss": 0.9271, + "step": 2776 + }, + { + "epoch": 0.43, + "grad_norm": 15.460692308822745, + "learning_rate": 1.9319624452852765e-05, + "loss": 0.7587, + "step": 2777 + }, + { + "epoch": 0.43, + "grad_norm": 25.55434798502312, + "learning_rate": 1.931901290824876e-05, + "loss": 0.8581, + "step": 2778 + }, + { + "epoch": 0.43, + "grad_norm": 17.45949459984029, + "learning_rate": 1.9318401098617475e-05, + "loss": 0.782, + "step": 2779 + }, + { + "epoch": 0.43, + "grad_norm": 16.010583702854948, + "learning_rate": 1.9317789023976314e-05, + "loss": 0.7699, + "step": 2780 + }, + { + "epoch": 0.43, + "grad_norm": 19.76329166300339, + "learning_rate": 1.9317176684342685e-05, + "loss": 0.8075, + "step": 2781 + }, + { + "epoch": 0.43, + "grad_norm": 23.381196447646076, + "learning_rate": 1.9316564079734005e-05, + "loss": 0.813, + "step": 2782 + }, + { + "epoch": 0.43, + "grad_norm": 14.620988882648282, + "learning_rate": 1.931595121016769e-05, + "loss": 0.8484, + "step": 2783 + }, + { + "epoch": 0.43, + "grad_norm": 23.305390716618515, + "learning_rate": 1.9315338075661172e-05, + "loss": 0.8336, + "step": 2784 + }, + { + "epoch": 0.44, + "grad_norm": 17.44467630605795, + "learning_rate": 1.931472467623189e-05, + "loss": 0.772, + "step": 2785 + }, + { + "epoch": 0.44, + "grad_norm": 18.793004956683315, + "learning_rate": 1.9314111011897285e-05, + "loss": 0.9619, + "step": 2786 + }, + { + "epoch": 0.44, + "grad_norm": 23.540675940737646, + "learning_rate": 1.9313497082674813e-05, + "loss": 0.9043, + "step": 2787 + }, + { + "epoch": 0.44, + "grad_norm": 21.164699910261472, + "learning_rate": 1.931288288858193e-05, + "loss": 0.8337, + "step": 2788 + }, + { + "epoch": 0.44, + "grad_norm": 13.454483928343135, + "learning_rate": 1.9312268429636108e-05, + "loss": 0.6702, + "step": 2789 + }, + { + "epoch": 0.44, + "grad_norm": 20.001847195258666, + "learning_rate": 1.9311653705854817e-05, + "loss": 0.7726, + "step": 2790 + }, + { + "epoch": 0.44, + "grad_norm": 29.434434865819178, + "learning_rate": 1.9311038717255542e-05, + "loss": 1.0484, + "step": 2791 + }, + { + "epoch": 0.44, + "grad_norm": 19.90567106486018, + "learning_rate": 1.9310423463855774e-05, + "loss": 0.9265, + "step": 2792 + }, + { + "epoch": 0.44, + "grad_norm": 26.145564394716416, + "learning_rate": 1.930980794567301e-05, + "loss": 0.8952, + "step": 2793 + }, + { + "epoch": 0.44, + "grad_norm": 42.86813095108527, + "learning_rate": 1.9309192162724756e-05, + "loss": 0.9466, + "step": 2794 + }, + { + "epoch": 0.44, + "grad_norm": 14.99347205769062, + "learning_rate": 1.9308576115028515e-05, + "loss": 0.7671, + "step": 2795 + }, + { + "epoch": 0.44, + "grad_norm": 27.096805186308767, + "learning_rate": 1.9307959802601824e-05, + "loss": 0.856, + "step": 2796 + }, + { + "epoch": 0.44, + "grad_norm": 17.20832649231591, + "learning_rate": 1.9307343225462195e-05, + "loss": 0.7815, + "step": 2797 + }, + { + "epoch": 0.44, + "grad_norm": 20.836475105773456, + "learning_rate": 1.930672638362717e-05, + "loss": 0.8059, + "step": 2798 + }, + { + "epoch": 0.44, + "grad_norm": 20.827430523571735, + "learning_rate": 1.9306109277114292e-05, + "loss": 0.8531, + "step": 2799 + }, + { + "epoch": 0.44, + "grad_norm": 23.487774256797177, + "learning_rate": 1.930549190594111e-05, + "loss": 0.7303, + "step": 2800 + }, + { + "epoch": 0.44, + "grad_norm": 23.2695687591317, + "learning_rate": 1.930487427012518e-05, + "loss": 0.9313, + "step": 2801 + }, + { + "epoch": 0.44, + "grad_norm": 23.644877286452623, + "learning_rate": 1.930425636968407e-05, + "loss": 0.8546, + "step": 2802 + }, + { + "epoch": 0.44, + "grad_norm": 29.70245533147644, + "learning_rate": 1.9303638204635354e-05, + "loss": 0.8352, + "step": 2803 + }, + { + "epoch": 0.44, + "grad_norm": 18.042241174530986, + "learning_rate": 1.930301977499661e-05, + "loss": 0.8288, + "step": 2804 + }, + { + "epoch": 0.44, + "grad_norm": 22.242139636016038, + "learning_rate": 1.930240108078542e-05, + "loss": 0.8485, + "step": 2805 + }, + { + "epoch": 0.44, + "grad_norm": 14.388418784413442, + "learning_rate": 1.9301782122019392e-05, + "loss": 0.7612, + "step": 2806 + }, + { + "epoch": 0.44, + "grad_norm": 13.451465471103637, + "learning_rate": 1.9301162898716116e-05, + "loss": 0.8578, + "step": 2807 + }, + { + "epoch": 0.44, + "grad_norm": 19.405939431400558, + "learning_rate": 1.9300543410893213e-05, + "loss": 0.8356, + "step": 2808 + }, + { + "epoch": 0.44, + "grad_norm": 27.165890540818857, + "learning_rate": 1.9299923658568294e-05, + "loss": 0.7887, + "step": 2809 + }, + { + "epoch": 0.44, + "grad_norm": 18.407376016101875, + "learning_rate": 1.9299303641758986e-05, + "loss": 0.7502, + "step": 2810 + }, + { + "epoch": 0.44, + "grad_norm": 16.465288114829953, + "learning_rate": 1.9298683360482923e-05, + "loss": 0.8396, + "step": 2811 + }, + { + "epoch": 0.44, + "grad_norm": 34.92772760867393, + "learning_rate": 1.9298062814757746e-05, + "loss": 0.9241, + "step": 2812 + }, + { + "epoch": 0.44, + "grad_norm": 18.657996869052813, + "learning_rate": 1.92974420046011e-05, + "loss": 0.7611, + "step": 2813 + }, + { + "epoch": 0.44, + "grad_norm": 13.418309645230106, + "learning_rate": 1.9296820930030642e-05, + "loss": 0.7252, + "step": 2814 + }, + { + "epoch": 0.44, + "grad_norm": 14.719098678739265, + "learning_rate": 1.9296199591064036e-05, + "loss": 0.7518, + "step": 2815 + }, + { + "epoch": 0.44, + "grad_norm": 13.900043727099296, + "learning_rate": 1.9295577987718953e-05, + "loss": 0.8918, + "step": 2816 + }, + { + "epoch": 0.44, + "grad_norm": 17.353443178660008, + "learning_rate": 1.929495612001307e-05, + "loss": 0.8403, + "step": 2817 + }, + { + "epoch": 0.44, + "grad_norm": 29.80150160354507, + "learning_rate": 1.9294333987964067e-05, + "loss": 0.8056, + "step": 2818 + }, + { + "epoch": 0.44, + "grad_norm": 15.495019609607217, + "learning_rate": 1.9293711591589645e-05, + "loss": 0.8668, + "step": 2819 + }, + { + "epoch": 0.44, + "grad_norm": 20.93091527891047, + "learning_rate": 1.9293088930907505e-05, + "loss": 0.7623, + "step": 2820 + }, + { + "epoch": 0.44, + "grad_norm": 15.685629673804485, + "learning_rate": 1.9292466005935352e-05, + "loss": 0.7573, + "step": 2821 + }, + { + "epoch": 0.44, + "grad_norm": 17.229591706205877, + "learning_rate": 1.9291842816690898e-05, + "loss": 0.7855, + "step": 2822 + }, + { + "epoch": 0.44, + "grad_norm": 14.20478784208699, + "learning_rate": 1.9291219363191873e-05, + "loss": 0.8322, + "step": 2823 + }, + { + "epoch": 0.44, + "grad_norm": 14.574667659646149, + "learning_rate": 1.9290595645456003e-05, + "loss": 0.7588, + "step": 2824 + }, + { + "epoch": 0.44, + "grad_norm": 13.720982019705515, + "learning_rate": 1.9289971663501027e-05, + "loss": 0.8363, + "step": 2825 + }, + { + "epoch": 0.44, + "grad_norm": 20.736009845229447, + "learning_rate": 1.9289347417344694e-05, + "loss": 0.768, + "step": 2826 + }, + { + "epoch": 0.44, + "grad_norm": 23.70867224969513, + "learning_rate": 1.928872290700475e-05, + "loss": 0.8619, + "step": 2827 + }, + { + "epoch": 0.44, + "grad_norm": 35.94005813864722, + "learning_rate": 1.9288098132498966e-05, + "loss": 0.817, + "step": 2828 + }, + { + "epoch": 0.44, + "grad_norm": 15.729340832316538, + "learning_rate": 1.9287473093845102e-05, + "loss": 0.7662, + "step": 2829 + }, + { + "epoch": 0.44, + "grad_norm": 25.33096850540217, + "learning_rate": 1.9286847791060937e-05, + "loss": 0.8533, + "step": 2830 + }, + { + "epoch": 0.44, + "grad_norm": 3.731406791945428, + "learning_rate": 1.928622222416425e-05, + "loss": 0.7649, + "step": 2831 + }, + { + "epoch": 0.44, + "grad_norm": 25.22571892330583, + "learning_rate": 1.928559639317284e-05, + "loss": 0.8109, + "step": 2832 + }, + { + "epoch": 0.44, + "grad_norm": 21.07722226960523, + "learning_rate": 1.9284970298104495e-05, + "loss": 0.7884, + "step": 2833 + }, + { + "epoch": 0.44, + "grad_norm": 16.24392876091876, + "learning_rate": 1.928434393897703e-05, + "loss": 0.8451, + "step": 2834 + }, + { + "epoch": 0.44, + "grad_norm": 17.055815044775574, + "learning_rate": 1.9283717315808255e-05, + "loss": 0.8385, + "step": 2835 + }, + { + "epoch": 0.44, + "grad_norm": 23.294156363632272, + "learning_rate": 1.928309042861599e-05, + "loss": 0.7455, + "step": 2836 + }, + { + "epoch": 0.44, + "grad_norm": 21.476296005177545, + "learning_rate": 1.9282463277418062e-05, + "loss": 0.911, + "step": 2837 + }, + { + "epoch": 0.44, + "grad_norm": 16.64769102971277, + "learning_rate": 1.928183586223231e-05, + "loss": 0.8264, + "step": 2838 + }, + { + "epoch": 0.44, + "grad_norm": 18.59383549390099, + "learning_rate": 1.9281208183076576e-05, + "loss": 0.8206, + "step": 2839 + }, + { + "epoch": 0.44, + "grad_norm": 22.910436317962915, + "learning_rate": 1.9280580239968708e-05, + "loss": 0.8734, + "step": 2840 + }, + { + "epoch": 0.44, + "grad_norm": 22.43049350806382, + "learning_rate": 1.927995203292657e-05, + "loss": 0.88, + "step": 2841 + }, + { + "epoch": 0.44, + "grad_norm": 15.987091362221824, + "learning_rate": 1.9279323561968024e-05, + "loss": 0.7627, + "step": 2842 + }, + { + "epoch": 0.44, + "grad_norm": 12.835625789244995, + "learning_rate": 1.927869482711094e-05, + "loss": 0.7323, + "step": 2843 + }, + { + "epoch": 0.44, + "grad_norm": 21.991106596922414, + "learning_rate": 1.927806582837321e-05, + "loss": 0.8016, + "step": 2844 + }, + { + "epoch": 0.44, + "grad_norm": 22.10333686844645, + "learning_rate": 1.927743656577271e-05, + "loss": 0.9001, + "step": 2845 + }, + { + "epoch": 0.44, + "grad_norm": 28.852537134557487, + "learning_rate": 1.9276807039327344e-05, + "loss": 0.9389, + "step": 2846 + }, + { + "epoch": 0.44, + "grad_norm": 13.031735257963781, + "learning_rate": 1.9276177249055012e-05, + "loss": 0.7254, + "step": 2847 + }, + { + "epoch": 0.44, + "grad_norm": 11.636012338566934, + "learning_rate": 1.9275547194973626e-05, + "loss": 0.879, + "step": 2848 + }, + { + "epoch": 0.45, + "grad_norm": 19.620915853524366, + "learning_rate": 1.9274916877101104e-05, + "loss": 0.843, + "step": 2849 + }, + { + "epoch": 0.45, + "grad_norm": 15.696749001023305, + "learning_rate": 1.9274286295455373e-05, + "loss": 0.6976, + "step": 2850 + }, + { + "epoch": 0.45, + "grad_norm": 27.36699915098846, + "learning_rate": 1.927365545005436e-05, + "loss": 0.8912, + "step": 2851 + }, + { + "epoch": 0.45, + "grad_norm": 30.587851371664442, + "learning_rate": 1.9273024340916015e-05, + "loss": 0.9167, + "step": 2852 + }, + { + "epoch": 0.45, + "grad_norm": 16.504500437316278, + "learning_rate": 1.9272392968058282e-05, + "loss": 0.7861, + "step": 2853 + }, + { + "epoch": 0.45, + "grad_norm": 32.04225498861467, + "learning_rate": 1.9271761331499114e-05, + "loss": 0.9072, + "step": 2854 + }, + { + "epoch": 0.45, + "grad_norm": 17.428991956182465, + "learning_rate": 1.927112943125648e-05, + "loss": 0.8342, + "step": 2855 + }, + { + "epoch": 0.45, + "grad_norm": 30.74141216754505, + "learning_rate": 1.9270497267348348e-05, + "loss": 0.8617, + "step": 2856 + }, + { + "epoch": 0.45, + "grad_norm": 19.899881867271972, + "learning_rate": 1.9269864839792697e-05, + "loss": 0.8082, + "step": 2857 + }, + { + "epoch": 0.45, + "grad_norm": 23.883514953630044, + "learning_rate": 1.9269232148607515e-05, + "loss": 0.8591, + "step": 2858 + }, + { + "epoch": 0.45, + "grad_norm": 13.003423591312604, + "learning_rate": 1.926859919381079e-05, + "loss": 0.7978, + "step": 2859 + }, + { + "epoch": 0.45, + "grad_norm": 15.679820444582184, + "learning_rate": 1.926796597542053e-05, + "loss": 0.9388, + "step": 2860 + }, + { + "epoch": 0.45, + "grad_norm": 22.481095958348472, + "learning_rate": 1.9267332493454732e-05, + "loss": 0.7668, + "step": 2861 + }, + { + "epoch": 0.45, + "grad_norm": 18.287048395966394, + "learning_rate": 1.9266698747931425e-05, + "loss": 0.7206, + "step": 2862 + }, + { + "epoch": 0.45, + "grad_norm": 19.923573275715178, + "learning_rate": 1.9266064738868625e-05, + "loss": 0.847, + "step": 2863 + }, + { + "epoch": 0.45, + "grad_norm": 23.292903726689474, + "learning_rate": 1.9265430466284362e-05, + "loss": 0.871, + "step": 2864 + }, + { + "epoch": 0.45, + "grad_norm": 32.18323858162131, + "learning_rate": 1.9264795930196677e-05, + "loss": 0.9068, + "step": 2865 + }, + { + "epoch": 0.45, + "grad_norm": 17.809343413020226, + "learning_rate": 1.9264161130623618e-05, + "loss": 0.9016, + "step": 2866 + }, + { + "epoch": 0.45, + "grad_norm": 26.926370773381045, + "learning_rate": 1.9263526067583235e-05, + "loss": 0.8996, + "step": 2867 + }, + { + "epoch": 0.45, + "grad_norm": 15.92374671214326, + "learning_rate": 1.926289074109359e-05, + "loss": 0.7603, + "step": 2868 + }, + { + "epoch": 0.45, + "grad_norm": 26.096887386183415, + "learning_rate": 1.9262255151172752e-05, + "loss": 0.8825, + "step": 2869 + }, + { + "epoch": 0.45, + "grad_norm": 14.555201177905374, + "learning_rate": 1.9261619297838794e-05, + "loss": 0.8921, + "step": 2870 + }, + { + "epoch": 0.45, + "grad_norm": 17.373893600938462, + "learning_rate": 1.92609831811098e-05, + "loss": 0.8113, + "step": 2871 + }, + { + "epoch": 0.45, + "grad_norm": 18.639356349355918, + "learning_rate": 1.9260346801003864e-05, + "loss": 0.8037, + "step": 2872 + }, + { + "epoch": 0.45, + "grad_norm": 16.585625110995945, + "learning_rate": 1.925971015753908e-05, + "loss": 0.8173, + "step": 2873 + }, + { + "epoch": 0.45, + "grad_norm": 21.404036637200182, + "learning_rate": 1.925907325073356e-05, + "loss": 0.8761, + "step": 2874 + }, + { + "epoch": 0.45, + "grad_norm": 26.06670253819081, + "learning_rate": 1.925843608060541e-05, + "loss": 0.938, + "step": 2875 + }, + { + "epoch": 0.45, + "grad_norm": 15.330411233051962, + "learning_rate": 1.925779864717275e-05, + "loss": 0.7903, + "step": 2876 + }, + { + "epoch": 0.45, + "grad_norm": 21.969001946964816, + "learning_rate": 1.925716095045372e-05, + "loss": 0.8028, + "step": 2877 + }, + { + "epoch": 0.45, + "grad_norm": 19.030716748432223, + "learning_rate": 1.9256522990466445e-05, + "loss": 0.8031, + "step": 2878 + }, + { + "epoch": 0.45, + "grad_norm": 20.625710126905126, + "learning_rate": 1.925588476722907e-05, + "loss": 0.8059, + "step": 2879 + }, + { + "epoch": 0.45, + "grad_norm": 36.493411691443036, + "learning_rate": 1.9255246280759747e-05, + "loss": 0.802, + "step": 2880 + }, + { + "epoch": 0.45, + "grad_norm": 12.679559905608317, + "learning_rate": 1.9254607531076633e-05, + "loss": 0.735, + "step": 2881 + }, + { + "epoch": 0.45, + "grad_norm": 19.48163898319547, + "learning_rate": 1.9253968518197896e-05, + "loss": 0.8374, + "step": 2882 + }, + { + "epoch": 0.45, + "grad_norm": 22.901193054065537, + "learning_rate": 1.925332924214171e-05, + "loss": 0.8883, + "step": 2883 + }, + { + "epoch": 0.45, + "grad_norm": 19.677948101766702, + "learning_rate": 1.925268970292625e-05, + "loss": 0.8174, + "step": 2884 + }, + { + "epoch": 0.45, + "grad_norm": 25.600996014098943, + "learning_rate": 1.9252049900569707e-05, + "loss": 0.8718, + "step": 2885 + }, + { + "epoch": 0.45, + "grad_norm": 20.30245772040695, + "learning_rate": 1.925140983509028e-05, + "loss": 0.7605, + "step": 2886 + }, + { + "epoch": 0.45, + "grad_norm": 19.85453456772631, + "learning_rate": 1.9250769506506164e-05, + "loss": 0.8221, + "step": 2887 + }, + { + "epoch": 0.45, + "grad_norm": 21.577647359604168, + "learning_rate": 1.925012891483558e-05, + "loss": 0.7792, + "step": 2888 + }, + { + "epoch": 0.45, + "grad_norm": 18.794540209930005, + "learning_rate": 1.924948806009674e-05, + "loss": 0.7336, + "step": 2889 + }, + { + "epoch": 0.45, + "grad_norm": 16.356493493907163, + "learning_rate": 1.9248846942307867e-05, + "loss": 0.7764, + "step": 2890 + }, + { + "epoch": 0.45, + "grad_norm": 15.396885541591361, + "learning_rate": 1.92482055614872e-05, + "loss": 0.8077, + "step": 2891 + }, + { + "epoch": 0.45, + "grad_norm": 29.39138071912793, + "learning_rate": 1.9247563917652978e-05, + "loss": 0.74, + "step": 2892 + }, + { + "epoch": 0.45, + "grad_norm": 23.058876910531712, + "learning_rate": 1.9246922010823445e-05, + "loss": 0.7889, + "step": 2893 + }, + { + "epoch": 0.45, + "grad_norm": 20.35058201205367, + "learning_rate": 1.924627984101686e-05, + "loss": 0.8713, + "step": 2894 + }, + { + "epoch": 0.45, + "grad_norm": 23.303365962775334, + "learning_rate": 1.924563740825149e-05, + "loss": 0.7908, + "step": 2895 + }, + { + "epoch": 0.45, + "grad_norm": 19.218575881541433, + "learning_rate": 1.9244994712545596e-05, + "loss": 0.7346, + "step": 2896 + }, + { + "epoch": 0.45, + "grad_norm": 20.45784247951282, + "learning_rate": 1.924435175391746e-05, + "loss": 0.8048, + "step": 2897 + }, + { + "epoch": 0.45, + "grad_norm": 18.769205526806456, + "learning_rate": 1.924370853238537e-05, + "loss": 0.7424, + "step": 2898 + }, + { + "epoch": 0.45, + "grad_norm": 16.916617140223753, + "learning_rate": 1.9243065047967614e-05, + "loss": 0.7788, + "step": 2899 + }, + { + "epoch": 0.45, + "grad_norm": 20.888614085555783, + "learning_rate": 1.92424213006825e-05, + "loss": 0.8761, + "step": 2900 + }, + { + "epoch": 0.45, + "grad_norm": 25.87728224239912, + "learning_rate": 1.924177729054833e-05, + "loss": 0.894, + "step": 2901 + }, + { + "epoch": 0.45, + "grad_norm": 38.50258540399339, + "learning_rate": 1.9241133017583416e-05, + "loss": 1.0238, + "step": 2902 + }, + { + "epoch": 0.45, + "grad_norm": 16.942786794876184, + "learning_rate": 1.9240488481806086e-05, + "loss": 0.772, + "step": 2903 + }, + { + "epoch": 0.45, + "grad_norm": 16.247008721428116, + "learning_rate": 1.923984368323467e-05, + "loss": 0.7697, + "step": 2904 + }, + { + "epoch": 0.45, + "grad_norm": 27.128511498190324, + "learning_rate": 1.9239198621887505e-05, + "loss": 0.8602, + "step": 2905 + }, + { + "epoch": 0.45, + "grad_norm": 23.063971694778907, + "learning_rate": 1.9238553297782937e-05, + "loss": 0.7644, + "step": 2906 + }, + { + "epoch": 0.45, + "grad_norm": 16.009838367951684, + "learning_rate": 1.9237907710939317e-05, + "loss": 0.7811, + "step": 2907 + }, + { + "epoch": 0.45, + "grad_norm": 19.240008564197918, + "learning_rate": 1.9237261861375004e-05, + "loss": 0.9248, + "step": 2908 + }, + { + "epoch": 0.45, + "grad_norm": 27.23992104588431, + "learning_rate": 1.923661574910837e-05, + "loss": 0.7512, + "step": 2909 + }, + { + "epoch": 0.45, + "grad_norm": 21.939374063872858, + "learning_rate": 1.9235969374157786e-05, + "loss": 0.9421, + "step": 2910 + }, + { + "epoch": 0.45, + "grad_norm": 17.181644877669257, + "learning_rate": 1.9235322736541635e-05, + "loss": 0.8945, + "step": 2911 + }, + { + "epoch": 0.45, + "grad_norm": 22.533710042537482, + "learning_rate": 1.9234675836278308e-05, + "loss": 0.846, + "step": 2912 + }, + { + "epoch": 0.46, + "grad_norm": 5.960547458816901, + "learning_rate": 1.9234028673386205e-05, + "loss": 0.7696, + "step": 2913 + }, + { + "epoch": 0.46, + "grad_norm": 18.396502906181674, + "learning_rate": 1.9233381247883724e-05, + "loss": 0.8041, + "step": 2914 + }, + { + "epoch": 0.46, + "grad_norm": 18.387321258067743, + "learning_rate": 1.9232733559789286e-05, + "loss": 0.796, + "step": 2915 + }, + { + "epoch": 0.46, + "grad_norm": 16.071487851572833, + "learning_rate": 1.9232085609121305e-05, + "loss": 0.8013, + "step": 2916 + }, + { + "epoch": 0.46, + "grad_norm": 14.002491058816016, + "learning_rate": 1.9231437395898207e-05, + "loss": 0.7197, + "step": 2917 + }, + { + "epoch": 0.46, + "grad_norm": 17.68117358451436, + "learning_rate": 1.9230788920138433e-05, + "loss": 0.8017, + "step": 2918 + }, + { + "epoch": 0.46, + "grad_norm": 21.04612653430889, + "learning_rate": 1.923014018186042e-05, + "loss": 0.7759, + "step": 2919 + }, + { + "epoch": 0.46, + "grad_norm": 19.44459704258811, + "learning_rate": 1.922949118108262e-05, + "loss": 0.8843, + "step": 2920 + }, + { + "epoch": 0.46, + "grad_norm": 22.341203785582618, + "learning_rate": 1.9228841917823492e-05, + "loss": 0.8406, + "step": 2921 + }, + { + "epoch": 0.46, + "grad_norm": 20.879511315024136, + "learning_rate": 1.92281923921015e-05, + "loss": 0.8005, + "step": 2922 + }, + { + "epoch": 0.46, + "grad_norm": 15.180477080078573, + "learning_rate": 1.922754260393511e-05, + "loss": 0.7988, + "step": 2923 + }, + { + "epoch": 0.46, + "grad_norm": 15.55693439175142, + "learning_rate": 1.9226892553342808e-05, + "loss": 0.7678, + "step": 2924 + }, + { + "epoch": 0.46, + "grad_norm": 23.63735886531153, + "learning_rate": 1.9226242240343082e-05, + "loss": 0.8642, + "step": 2925 + }, + { + "epoch": 0.46, + "grad_norm": 17.689179542921874, + "learning_rate": 1.9225591664954423e-05, + "loss": 0.7621, + "step": 2926 + }, + { + "epoch": 0.46, + "grad_norm": 23.62090388367294, + "learning_rate": 1.922494082719533e-05, + "loss": 0.8511, + "step": 2927 + }, + { + "epoch": 0.46, + "grad_norm": 18.44901606835908, + "learning_rate": 1.922428972708432e-05, + "loss": 0.8152, + "step": 2928 + }, + { + "epoch": 0.46, + "grad_norm": 273.49283357951964, + "learning_rate": 1.9223638364639902e-05, + "loss": 0.8089, + "step": 2929 + }, + { + "epoch": 0.46, + "grad_norm": 26.336963973047034, + "learning_rate": 1.9222986739880607e-05, + "loss": 0.8745, + "step": 2930 + }, + { + "epoch": 0.46, + "grad_norm": 15.416894450523952, + "learning_rate": 1.9222334852824966e-05, + "loss": 0.7592, + "step": 2931 + }, + { + "epoch": 0.46, + "grad_norm": 30.967808432945866, + "learning_rate": 1.922168270349152e-05, + "loss": 0.8561, + "step": 2932 + }, + { + "epoch": 0.46, + "grad_norm": 27.312385426811293, + "learning_rate": 1.9221030291898802e-05, + "loss": 0.8278, + "step": 2933 + }, + { + "epoch": 0.46, + "grad_norm": 16.97397118798427, + "learning_rate": 1.9220377618065383e-05, + "loss": 0.7038, + "step": 2934 + }, + { + "epoch": 0.46, + "grad_norm": 21.4901065990765, + "learning_rate": 1.921972468200982e-05, + "loss": 0.7889, + "step": 2935 + }, + { + "epoch": 0.46, + "grad_norm": 22.625603199526246, + "learning_rate": 1.9219071483750678e-05, + "loss": 0.8768, + "step": 2936 + }, + { + "epoch": 0.46, + "grad_norm": 16.20967454333816, + "learning_rate": 1.9218418023306536e-05, + "loss": 0.8274, + "step": 2937 + }, + { + "epoch": 0.46, + "grad_norm": 24.141432391247864, + "learning_rate": 1.9217764300695977e-05, + "loss": 0.7748, + "step": 2938 + }, + { + "epoch": 0.46, + "grad_norm": 21.316808523052693, + "learning_rate": 1.9217110315937597e-05, + "loss": 0.835, + "step": 2939 + }, + { + "epoch": 0.46, + "grad_norm": 30.469695912863475, + "learning_rate": 1.921645606904999e-05, + "loss": 0.891, + "step": 2940 + }, + { + "epoch": 0.46, + "grad_norm": 21.706821970594095, + "learning_rate": 1.9215801560051764e-05, + "loss": 0.8276, + "step": 2941 + }, + { + "epoch": 0.46, + "grad_norm": 14.769118284834967, + "learning_rate": 1.921514678896153e-05, + "loss": 0.7584, + "step": 2942 + }, + { + "epoch": 0.46, + "grad_norm": 11.253685453527048, + "learning_rate": 1.9214491755797916e-05, + "loss": 0.7512, + "step": 2943 + }, + { + "epoch": 0.46, + "grad_norm": 23.465678317509454, + "learning_rate": 1.9213836460579546e-05, + "loss": 0.8682, + "step": 2944 + }, + { + "epoch": 0.46, + "grad_norm": 15.11941303362085, + "learning_rate": 1.9213180903325056e-05, + "loss": 0.8247, + "step": 2945 + }, + { + "epoch": 0.46, + "grad_norm": 24.692209704748254, + "learning_rate": 1.921252508405309e-05, + "loss": 0.8497, + "step": 2946 + }, + { + "epoch": 0.46, + "grad_norm": 19.01600024794213, + "learning_rate": 1.92118690027823e-05, + "loss": 0.8728, + "step": 2947 + }, + { + "epoch": 0.46, + "grad_norm": 21.891205517207375, + "learning_rate": 1.9211212659531345e-05, + "loss": 0.809, + "step": 2948 + }, + { + "epoch": 0.46, + "grad_norm": 21.101412431874632, + "learning_rate": 1.9210556054318886e-05, + "loss": 0.7563, + "step": 2949 + }, + { + "epoch": 0.46, + "grad_norm": 16.680172858223028, + "learning_rate": 1.9209899187163606e-05, + "loss": 0.8506, + "step": 2950 + }, + { + "epoch": 0.46, + "grad_norm": 15.491085455744027, + "learning_rate": 1.920924205808418e-05, + "loss": 0.7501, + "step": 2951 + }, + { + "epoch": 0.46, + "grad_norm": 17.866830131057615, + "learning_rate": 1.92085846670993e-05, + "loss": 0.7544, + "step": 2952 + }, + { + "epoch": 0.46, + "grad_norm": 17.84464375166633, + "learning_rate": 1.9207927014227653e-05, + "loss": 0.8491, + "step": 2953 + }, + { + "epoch": 0.46, + "grad_norm": 16.44809408698163, + "learning_rate": 1.9207269099487953e-05, + "loss": 0.8276, + "step": 2954 + }, + { + "epoch": 0.46, + "grad_norm": 14.07766197027408, + "learning_rate": 1.9206610922898904e-05, + "loss": 0.7609, + "step": 2955 + }, + { + "epoch": 0.46, + "grad_norm": 23.54914848905122, + "learning_rate": 1.9205952484479225e-05, + "loss": 0.8494, + "step": 2956 + }, + { + "epoch": 0.46, + "grad_norm": 26.0218185979425, + "learning_rate": 1.920529378424764e-05, + "loss": 0.8645, + "step": 2957 + }, + { + "epoch": 0.46, + "grad_norm": 21.94251281096728, + "learning_rate": 1.920463482222289e-05, + "loss": 0.8819, + "step": 2958 + }, + { + "epoch": 0.46, + "grad_norm": 16.004192332047143, + "learning_rate": 1.920397559842371e-05, + "loss": 0.8254, + "step": 2959 + }, + { + "epoch": 0.46, + "grad_norm": 17.4700718577476, + "learning_rate": 1.9203316112868844e-05, + "loss": 0.7932, + "step": 2960 + }, + { + "epoch": 0.46, + "grad_norm": 23.28652544928423, + "learning_rate": 1.9202656365577056e-05, + "loss": 0.7781, + "step": 2961 + }, + { + "epoch": 0.46, + "grad_norm": 40.843648733817595, + "learning_rate": 1.9201996356567104e-05, + "loss": 0.9355, + "step": 2962 + }, + { + "epoch": 0.46, + "grad_norm": 15.72787100895518, + "learning_rate": 1.9201336085857757e-05, + "loss": 0.8533, + "step": 2963 + }, + { + "epoch": 0.46, + "grad_norm": 17.437005000967584, + "learning_rate": 1.9200675553467793e-05, + "loss": 0.7738, + "step": 2964 + }, + { + "epoch": 0.46, + "grad_norm": 17.514193885590164, + "learning_rate": 1.9200014759416002e-05, + "loss": 0.7954, + "step": 2965 + }, + { + "epoch": 0.46, + "grad_norm": 14.800568750003228, + "learning_rate": 1.919935370372117e-05, + "loss": 0.8311, + "step": 2966 + }, + { + "epoch": 0.46, + "grad_norm": 15.624515791542912, + "learning_rate": 1.91986923864021e-05, + "loss": 0.7354, + "step": 2967 + }, + { + "epoch": 0.46, + "grad_norm": 18.092666715845724, + "learning_rate": 1.91980308074776e-05, + "loss": 0.909, + "step": 2968 + }, + { + "epoch": 0.46, + "grad_norm": 20.732371659651548, + "learning_rate": 1.9197368966966486e-05, + "loss": 0.8271, + "step": 2969 + }, + { + "epoch": 0.46, + "grad_norm": 24.69417819706931, + "learning_rate": 1.919670686488758e-05, + "loss": 0.7414, + "step": 2970 + }, + { + "epoch": 0.46, + "grad_norm": 20.122450704110175, + "learning_rate": 1.9196044501259706e-05, + "loss": 0.8145, + "step": 2971 + }, + { + "epoch": 0.46, + "grad_norm": 17.592437767195474, + "learning_rate": 1.919538187610171e-05, + "loss": 0.7751, + "step": 2972 + }, + { + "epoch": 0.46, + "grad_norm": 15.147962863759687, + "learning_rate": 1.9194718989432434e-05, + "loss": 0.7556, + "step": 2973 + }, + { + "epoch": 0.46, + "grad_norm": 13.081973025903448, + "learning_rate": 1.9194055841270724e-05, + "loss": 0.7244, + "step": 2974 + }, + { + "epoch": 0.46, + "grad_norm": 22.151120993107924, + "learning_rate": 1.9193392431635447e-05, + "loss": 0.7764, + "step": 2975 + }, + { + "epoch": 0.46, + "grad_norm": 18.013099177799887, + "learning_rate": 1.9192728760545466e-05, + "loss": 0.8906, + "step": 2976 + }, + { + "epoch": 0.47, + "grad_norm": 23.82556209101294, + "learning_rate": 1.9192064828019657e-05, + "loss": 0.8022, + "step": 2977 + }, + { + "epoch": 0.47, + "grad_norm": 29.16419849113684, + "learning_rate": 1.91914006340769e-05, + "loss": 0.8661, + "step": 2978 + }, + { + "epoch": 0.47, + "grad_norm": 17.058282397280657, + "learning_rate": 1.919073617873609e-05, + "loss": 0.7733, + "step": 2979 + }, + { + "epoch": 0.47, + "grad_norm": 20.393799984046787, + "learning_rate": 1.9190071462016115e-05, + "loss": 0.7316, + "step": 2980 + }, + { + "epoch": 0.47, + "grad_norm": 25.690095128816516, + "learning_rate": 1.918940648393589e-05, + "loss": 0.8779, + "step": 2981 + }, + { + "epoch": 0.47, + "grad_norm": 16.530499667083717, + "learning_rate": 1.9188741244514312e-05, + "loss": 0.8052, + "step": 2982 + }, + { + "epoch": 0.47, + "grad_norm": 22.38641859368205, + "learning_rate": 1.9188075743770312e-05, + "loss": 0.8937, + "step": 2983 + }, + { + "epoch": 0.47, + "grad_norm": 25.43112931390468, + "learning_rate": 1.9187409981722814e-05, + "loss": 0.801, + "step": 2984 + }, + { + "epoch": 0.47, + "grad_norm": 16.866776106651184, + "learning_rate": 1.918674395839075e-05, + "loss": 0.7824, + "step": 2985 + }, + { + "epoch": 0.47, + "grad_norm": 26.97578799817641, + "learning_rate": 1.9186077673793062e-05, + "loss": 0.8566, + "step": 2986 + }, + { + "epoch": 0.47, + "grad_norm": 25.05341934632158, + "learning_rate": 1.9185411127948695e-05, + "loss": 0.9179, + "step": 2987 + }, + { + "epoch": 0.47, + "grad_norm": 18.067672739124912, + "learning_rate": 1.9184744320876612e-05, + "loss": 0.8418, + "step": 2988 + }, + { + "epoch": 0.47, + "grad_norm": 19.47557472848811, + "learning_rate": 1.9184077252595777e-05, + "loss": 0.8147, + "step": 2989 + }, + { + "epoch": 0.47, + "grad_norm": 15.80401669538672, + "learning_rate": 1.918340992312515e-05, + "loss": 0.8732, + "step": 2990 + }, + { + "epoch": 0.47, + "grad_norm": 13.328644617154742, + "learning_rate": 1.9182742332483722e-05, + "loss": 0.715, + "step": 2991 + }, + { + "epoch": 0.47, + "grad_norm": 18.63828946756317, + "learning_rate": 1.9182074480690472e-05, + "loss": 0.7943, + "step": 2992 + }, + { + "epoch": 0.47, + "grad_norm": 13.833982534164743, + "learning_rate": 1.9181406367764398e-05, + "loss": 0.7129, + "step": 2993 + }, + { + "epoch": 0.47, + "grad_norm": 21.7322433730387, + "learning_rate": 1.9180737993724494e-05, + "loss": 0.8075, + "step": 2994 + }, + { + "epoch": 0.47, + "grad_norm": 14.652519000273282, + "learning_rate": 1.9180069358589773e-05, + "loss": 0.7774, + "step": 2995 + }, + { + "epoch": 0.47, + "grad_norm": 21.711949560619978, + "learning_rate": 1.9179400462379248e-05, + "loss": 0.8935, + "step": 2996 + }, + { + "epoch": 0.47, + "grad_norm": 19.270920943284427, + "learning_rate": 1.9178731305111952e-05, + "loss": 0.7492, + "step": 2997 + }, + { + "epoch": 0.47, + "grad_norm": 20.175697308021366, + "learning_rate": 1.91780618868069e-05, + "loss": 0.8474, + "step": 2998 + }, + { + "epoch": 0.47, + "grad_norm": 24.968055430767055, + "learning_rate": 1.9177392207483138e-05, + "loss": 0.7235, + "step": 2999 + }, + { + "epoch": 0.47, + "grad_norm": 15.887994508659474, + "learning_rate": 1.9176722267159706e-05, + "loss": 0.8417, + "step": 3000 + }, + { + "epoch": 0.47, + "grad_norm": 19.83000504108188, + "learning_rate": 1.9176052065855666e-05, + "loss": 0.8184, + "step": 3001 + }, + { + "epoch": 0.47, + "grad_norm": 26.695328402940113, + "learning_rate": 1.917538160359007e-05, + "loss": 0.8408, + "step": 3002 + }, + { + "epoch": 0.47, + "grad_norm": 25.982270747971615, + "learning_rate": 1.9174710880381995e-05, + "loss": 0.8644, + "step": 3003 + }, + { + "epoch": 0.47, + "grad_norm": 22.912944427613844, + "learning_rate": 1.9174039896250505e-05, + "loss": 0.7888, + "step": 3004 + }, + { + "epoch": 0.47, + "grad_norm": 20.032150266921676, + "learning_rate": 1.9173368651214686e-05, + "loss": 0.7972, + "step": 3005 + }, + { + "epoch": 0.47, + "grad_norm": 19.035552993951598, + "learning_rate": 1.917269714529363e-05, + "loss": 0.7865, + "step": 3006 + }, + { + "epoch": 0.47, + "grad_norm": 22.93616797064195, + "learning_rate": 1.9172025378506434e-05, + "loss": 0.7995, + "step": 3007 + }, + { + "epoch": 0.47, + "grad_norm": 21.41459353378958, + "learning_rate": 1.9171353350872203e-05, + "loss": 0.767, + "step": 3008 + }, + { + "epoch": 0.47, + "grad_norm": 20.700018681023064, + "learning_rate": 1.9170681062410042e-05, + "loss": 0.7218, + "step": 3009 + }, + { + "epoch": 0.47, + "grad_norm": 31.0410510740856, + "learning_rate": 1.917000851313908e-05, + "loss": 0.8386, + "step": 3010 + }, + { + "epoch": 0.47, + "grad_norm": 23.574986941661635, + "learning_rate": 1.9169335703078443e-05, + "loss": 0.757, + "step": 3011 + }, + { + "epoch": 0.47, + "grad_norm": 21.38841900934235, + "learning_rate": 1.916866263224726e-05, + "loss": 0.7719, + "step": 3012 + }, + { + "epoch": 0.47, + "grad_norm": 22.931303081752528, + "learning_rate": 1.9167989300664675e-05, + "loss": 0.8328, + "step": 3013 + }, + { + "epoch": 0.47, + "grad_norm": 20.136696520403664, + "learning_rate": 1.9167315708349835e-05, + "loss": 0.8111, + "step": 3014 + }, + { + "epoch": 0.47, + "grad_norm": 28.187525535622676, + "learning_rate": 1.9166641855321902e-05, + "loss": 0.7314, + "step": 3015 + }, + { + "epoch": 0.47, + "grad_norm": 20.280160694669522, + "learning_rate": 1.9165967741600038e-05, + "loss": 0.7884, + "step": 3016 + }, + { + "epoch": 0.47, + "grad_norm": 14.690755728793388, + "learning_rate": 1.916529336720341e-05, + "loss": 0.74, + "step": 3017 + }, + { + "epoch": 0.47, + "grad_norm": 17.09338758509844, + "learning_rate": 1.9164618732151202e-05, + "loss": 0.835, + "step": 3018 + }, + { + "epoch": 0.47, + "grad_norm": 48.7374454853749, + "learning_rate": 1.91639438364626e-05, + "loss": 0.8324, + "step": 3019 + }, + { + "epoch": 0.47, + "grad_norm": 27.207459095194455, + "learning_rate": 1.9163268680156793e-05, + "loss": 0.879, + "step": 3020 + }, + { + "epoch": 0.47, + "grad_norm": 24.994638515091175, + "learning_rate": 1.9162593263252988e-05, + "loss": 0.7789, + "step": 3021 + }, + { + "epoch": 0.47, + "grad_norm": 17.173834867637698, + "learning_rate": 1.916191758577039e-05, + "loss": 0.8177, + "step": 3022 + }, + { + "epoch": 0.47, + "grad_norm": 78.70199739861276, + "learning_rate": 1.9161241647728213e-05, + "loss": 1.0178, + "step": 3023 + }, + { + "epoch": 0.47, + "grad_norm": 23.1262693230769, + "learning_rate": 1.9160565449145687e-05, + "loss": 0.787, + "step": 3024 + }, + { + "epoch": 0.47, + "grad_norm": 13.807728325083367, + "learning_rate": 1.9159888990042033e-05, + "loss": 0.708, + "step": 3025 + }, + { + "epoch": 0.47, + "grad_norm": 10.274863449463217, + "learning_rate": 1.91592122704365e-05, + "loss": 0.6912, + "step": 3026 + }, + { + "epoch": 0.47, + "grad_norm": 13.52421657493265, + "learning_rate": 1.915853529034832e-05, + "loss": 0.8269, + "step": 3027 + }, + { + "epoch": 0.47, + "grad_norm": 19.22484300063159, + "learning_rate": 1.915785804979676e-05, + "loss": 0.7822, + "step": 3028 + }, + { + "epoch": 0.47, + "grad_norm": 29.935498456404957, + "learning_rate": 1.915718054880107e-05, + "loss": 0.9521, + "step": 3029 + }, + { + "epoch": 0.47, + "grad_norm": 28.4065199819226, + "learning_rate": 1.9156502787380527e-05, + "loss": 0.7704, + "step": 3030 + }, + { + "epoch": 0.47, + "grad_norm": 21.14438303372769, + "learning_rate": 1.91558247655544e-05, + "loss": 0.8861, + "step": 3031 + }, + { + "epoch": 0.47, + "grad_norm": 188.43611629193143, + "learning_rate": 1.9155146483341972e-05, + "loss": 0.9285, + "step": 3032 + }, + { + "epoch": 0.47, + "grad_norm": 23.070695464394742, + "learning_rate": 1.9154467940762534e-05, + "loss": 0.7855, + "step": 3033 + }, + { + "epoch": 0.47, + "grad_norm": 19.187993590781332, + "learning_rate": 1.9153789137835384e-05, + "loss": 0.7894, + "step": 3034 + }, + { + "epoch": 0.47, + "grad_norm": 68.40038764914176, + "learning_rate": 1.9153110074579823e-05, + "loss": 0.866, + "step": 3035 + }, + { + "epoch": 0.47, + "grad_norm": 14.83281251750027, + "learning_rate": 1.915243075101517e-05, + "loss": 0.8398, + "step": 3036 + }, + { + "epoch": 0.47, + "grad_norm": 15.808878613981356, + "learning_rate": 1.9151751167160734e-05, + "loss": 0.7947, + "step": 3037 + }, + { + "epoch": 0.47, + "grad_norm": 19.707452924922112, + "learning_rate": 1.9151071323035856e-05, + "loss": 0.7885, + "step": 3038 + }, + { + "epoch": 0.47, + "grad_norm": 22.65563795249573, + "learning_rate": 1.9150391218659855e-05, + "loss": 0.8874, + "step": 3039 + }, + { + "epoch": 0.47, + "grad_norm": 32.72912747263712, + "learning_rate": 1.9149710854052087e-05, + "loss": 0.7335, + "step": 3040 + }, + { + "epoch": 0.48, + "grad_norm": 21.214797116661902, + "learning_rate": 1.914903022923189e-05, + "loss": 0.8206, + "step": 3041 + }, + { + "epoch": 0.48, + "grad_norm": 28.518177596767742, + "learning_rate": 1.914834934421863e-05, + "loss": 0.8378, + "step": 3042 + }, + { + "epoch": 0.48, + "grad_norm": 23.781896890045363, + "learning_rate": 1.9147668199031664e-05, + "loss": 0.8055, + "step": 3043 + }, + { + "epoch": 0.48, + "grad_norm": 27.430209166027087, + "learning_rate": 1.9146986793690363e-05, + "loss": 0.9628, + "step": 3044 + }, + { + "epoch": 0.48, + "grad_norm": 18.027555733919122, + "learning_rate": 1.914630512821411e-05, + "loss": 0.8761, + "step": 3045 + }, + { + "epoch": 0.48, + "grad_norm": 22.445006547376877, + "learning_rate": 1.9145623202622293e-05, + "loss": 0.8882, + "step": 3046 + }, + { + "epoch": 0.48, + "grad_norm": 14.351071810860862, + "learning_rate": 1.91449410169343e-05, + "loss": 0.7529, + "step": 3047 + }, + { + "epoch": 0.48, + "grad_norm": 16.19633968461515, + "learning_rate": 1.9144258571169533e-05, + "loss": 0.8132, + "step": 3048 + }, + { + "epoch": 0.48, + "grad_norm": 22.80901606358736, + "learning_rate": 1.9143575865347405e-05, + "loss": 0.9359, + "step": 3049 + }, + { + "epoch": 0.48, + "grad_norm": 15.474570051261475, + "learning_rate": 1.9142892899487323e-05, + "loss": 0.8576, + "step": 3050 + }, + { + "epoch": 0.48, + "grad_norm": 16.161613733349938, + "learning_rate": 1.9142209673608717e-05, + "loss": 0.879, + "step": 3051 + }, + { + "epoch": 0.48, + "grad_norm": 25.155900803505816, + "learning_rate": 1.914152618773102e-05, + "loss": 0.8732, + "step": 3052 + }, + { + "epoch": 0.48, + "grad_norm": 24.691004289524503, + "learning_rate": 1.914084244187366e-05, + "loss": 0.8542, + "step": 3053 + }, + { + "epoch": 0.48, + "grad_norm": 23.733439208562597, + "learning_rate": 1.914015843605609e-05, + "loss": 0.7865, + "step": 3054 + }, + { + "epoch": 0.48, + "grad_norm": 30.483586455117734, + "learning_rate": 1.9139474170297764e-05, + "loss": 0.8413, + "step": 3055 + }, + { + "epoch": 0.48, + "grad_norm": 21.977388508824916, + "learning_rate": 1.9138789644618138e-05, + "loss": 0.8357, + "step": 3056 + }, + { + "epoch": 0.48, + "grad_norm": 21.508870975541917, + "learning_rate": 1.9138104859036678e-05, + "loss": 0.8364, + "step": 3057 + }, + { + "epoch": 0.48, + "grad_norm": 21.79289521148358, + "learning_rate": 1.913741981357286e-05, + "loss": 0.9342, + "step": 3058 + }, + { + "epoch": 0.48, + "grad_norm": 22.60082752919683, + "learning_rate": 1.913673450824617e-05, + "loss": 0.7614, + "step": 3059 + }, + { + "epoch": 0.48, + "grad_norm": 14.314914554878236, + "learning_rate": 1.9136048943076098e-05, + "loss": 0.7982, + "step": 3060 + }, + { + "epoch": 0.48, + "grad_norm": 16.21496649621919, + "learning_rate": 1.9135363118082138e-05, + "loss": 0.7698, + "step": 3061 + }, + { + "epoch": 0.48, + "grad_norm": 14.68564685432869, + "learning_rate": 1.913467703328379e-05, + "loss": 0.813, + "step": 3062 + }, + { + "epoch": 0.48, + "grad_norm": 14.238265147154276, + "learning_rate": 1.9133990688700578e-05, + "loss": 0.7063, + "step": 3063 + }, + { + "epoch": 0.48, + "grad_norm": 20.684032162134578, + "learning_rate": 1.9133304084352006e-05, + "loss": 0.794, + "step": 3064 + }, + { + "epoch": 0.48, + "grad_norm": 22.306271986791536, + "learning_rate": 1.9132617220257613e-05, + "loss": 0.8492, + "step": 3065 + }, + { + "epoch": 0.48, + "grad_norm": 17.816758462426602, + "learning_rate": 1.913193009643693e-05, + "loss": 0.8228, + "step": 3066 + }, + { + "epoch": 0.48, + "grad_norm": 20.44329454946185, + "learning_rate": 1.9131242712909497e-05, + "loss": 0.913, + "step": 3067 + }, + { + "epoch": 0.48, + "grad_norm": 14.711856183312651, + "learning_rate": 1.913055506969486e-05, + "loss": 0.8675, + "step": 3068 + }, + { + "epoch": 0.48, + "grad_norm": 16.10911783078787, + "learning_rate": 1.9129867166812583e-05, + "loss": 0.7506, + "step": 3069 + }, + { + "epoch": 0.48, + "grad_norm": 22.39252835287759, + "learning_rate": 1.9129179004282218e-05, + "loss": 0.7542, + "step": 3070 + }, + { + "epoch": 0.48, + "grad_norm": 19.396309603383, + "learning_rate": 1.9128490582123346e-05, + "loss": 0.8345, + "step": 3071 + }, + { + "epoch": 0.48, + "grad_norm": 16.322095694118957, + "learning_rate": 1.912780190035554e-05, + "loss": 0.7505, + "step": 3072 + }, + { + "epoch": 0.48, + "grad_norm": 19.454810197446502, + "learning_rate": 1.9127112958998392e-05, + "loss": 0.8345, + "step": 3073 + }, + { + "epoch": 0.48, + "grad_norm": 105.19515645780156, + "learning_rate": 1.9126423758071486e-05, + "loss": 0.9433, + "step": 3074 + }, + { + "epoch": 0.48, + "grad_norm": 20.251784971534057, + "learning_rate": 1.912573429759443e-05, + "loss": 0.863, + "step": 3075 + }, + { + "epoch": 0.48, + "grad_norm": 18.192685568633546, + "learning_rate": 1.9125044577586828e-05, + "loss": 0.784, + "step": 3076 + }, + { + "epoch": 0.48, + "grad_norm": 15.968912127543344, + "learning_rate": 1.9124354598068294e-05, + "loss": 0.8748, + "step": 3077 + }, + { + "epoch": 0.48, + "grad_norm": 26.428298764865872, + "learning_rate": 1.912366435905846e-05, + "loss": 0.9027, + "step": 3078 + }, + { + "epoch": 0.48, + "grad_norm": 24.129201116824394, + "learning_rate": 1.9122973860576942e-05, + "loss": 0.9924, + "step": 3079 + }, + { + "epoch": 0.48, + "grad_norm": 18.870318116325166, + "learning_rate": 1.9122283102643383e-05, + "loss": 0.7567, + "step": 3080 + }, + { + "epoch": 0.48, + "grad_norm": 20.323786018100822, + "learning_rate": 1.9121592085277432e-05, + "loss": 0.7717, + "step": 3081 + }, + { + "epoch": 0.48, + "grad_norm": 25.112334402216227, + "learning_rate": 1.912090080849874e-05, + "loss": 0.8616, + "step": 3082 + }, + { + "epoch": 0.48, + "grad_norm": 25.934243678587745, + "learning_rate": 1.9120209272326962e-05, + "loss": 0.7796, + "step": 3083 + }, + { + "epoch": 0.48, + "grad_norm": 24.82672576550508, + "learning_rate": 1.9119517476781766e-05, + "loss": 0.8885, + "step": 3084 + }, + { + "epoch": 0.48, + "grad_norm": 28.10484160412152, + "learning_rate": 1.911882542188283e-05, + "loss": 0.8709, + "step": 3085 + }, + { + "epoch": 0.48, + "grad_norm": 16.074270100007478, + "learning_rate": 1.9118133107649833e-05, + "loss": 0.756, + "step": 3086 + }, + { + "epoch": 0.48, + "grad_norm": 31.769694056903443, + "learning_rate": 1.9117440534102466e-05, + "loss": 0.9146, + "step": 3087 + }, + { + "epoch": 0.48, + "grad_norm": 31.294511954370197, + "learning_rate": 1.911674770126042e-05, + "loss": 0.8921, + "step": 3088 + }, + { + "epoch": 0.48, + "grad_norm": 19.86619813370443, + "learning_rate": 1.9116054609143408e-05, + "loss": 0.8159, + "step": 3089 + }, + { + "epoch": 0.48, + "grad_norm": 19.140514074510172, + "learning_rate": 1.9115361257771133e-05, + "loss": 0.7716, + "step": 3090 + }, + { + "epoch": 0.48, + "grad_norm": 15.32158532854001, + "learning_rate": 1.911466764716331e-05, + "loss": 0.9614, + "step": 3091 + }, + { + "epoch": 0.48, + "grad_norm": 16.20811950273685, + "learning_rate": 1.911397377733968e-05, + "loss": 0.7836, + "step": 3092 + }, + { + "epoch": 0.48, + "grad_norm": 13.772607778353215, + "learning_rate": 1.9113279648319964e-05, + "loss": 0.7588, + "step": 3093 + }, + { + "epoch": 0.48, + "grad_norm": 34.73969914570977, + "learning_rate": 1.9112585260123906e-05, + "loss": 0.9183, + "step": 3094 + }, + { + "epoch": 0.48, + "grad_norm": 19.482908049519, + "learning_rate": 1.9111890612771252e-05, + "loss": 0.886, + "step": 3095 + }, + { + "epoch": 0.48, + "grad_norm": 17.53036064214237, + "learning_rate": 1.911119570628176e-05, + "loss": 0.7775, + "step": 3096 + }, + { + "epoch": 0.48, + "grad_norm": 21.3446514356776, + "learning_rate": 1.9110500540675194e-05, + "loss": 0.9258, + "step": 3097 + }, + { + "epoch": 0.48, + "grad_norm": 26.49974695458246, + "learning_rate": 1.910980511597132e-05, + "loss": 0.8614, + "step": 3098 + }, + { + "epoch": 0.48, + "grad_norm": 27.22658414251488, + "learning_rate": 1.910910943218992e-05, + "loss": 0.8753, + "step": 3099 + }, + { + "epoch": 0.48, + "grad_norm": 20.928771941239514, + "learning_rate": 1.9108413489350775e-05, + "loss": 0.8703, + "step": 3100 + }, + { + "epoch": 0.48, + "grad_norm": 22.631568721769806, + "learning_rate": 1.910771728747368e-05, + "loss": 0.9159, + "step": 3101 + }, + { + "epoch": 0.48, + "grad_norm": 19.876356522433195, + "learning_rate": 1.910702082657843e-05, + "loss": 0.7536, + "step": 3102 + }, + { + "epoch": 0.48, + "grad_norm": 17.274547291080765, + "learning_rate": 1.910632410668484e-05, + "loss": 0.6968, + "step": 3103 + }, + { + "epoch": 0.48, + "grad_norm": 14.671944836418826, + "learning_rate": 1.9105627127812717e-05, + "loss": 0.8034, + "step": 3104 + }, + { + "epoch": 0.49, + "grad_norm": 22.764355759594338, + "learning_rate": 1.910492988998188e-05, + "loss": 0.8499, + "step": 3105 + }, + { + "epoch": 0.49, + "grad_norm": 29.226975306267484, + "learning_rate": 1.910423239321217e-05, + "loss": 0.8224, + "step": 3106 + }, + { + "epoch": 0.49, + "grad_norm": 12.579159736720895, + "learning_rate": 1.9103534637523414e-05, + "loss": 0.7808, + "step": 3107 + }, + { + "epoch": 0.49, + "grad_norm": 21.72727288419119, + "learning_rate": 1.910283662293546e-05, + "loss": 0.8869, + "step": 3108 + }, + { + "epoch": 0.49, + "grad_norm": 18.856564437993306, + "learning_rate": 1.9102138349468154e-05, + "loss": 0.8029, + "step": 3109 + }, + { + "epoch": 0.49, + "grad_norm": 18.953049028249062, + "learning_rate": 1.910143981714136e-05, + "loss": 0.8347, + "step": 3110 + }, + { + "epoch": 0.49, + "grad_norm": 16.9489918808129, + "learning_rate": 1.9100741025974943e-05, + "loss": 0.7513, + "step": 3111 + }, + { + "epoch": 0.49, + "grad_norm": 27.66708381117652, + "learning_rate": 1.9100041975988776e-05, + "loss": 0.8121, + "step": 3112 + }, + { + "epoch": 0.49, + "grad_norm": 21.065126588513714, + "learning_rate": 1.9099342667202733e-05, + "loss": 0.754, + "step": 3113 + }, + { + "epoch": 0.49, + "grad_norm": 15.67327893729973, + "learning_rate": 1.9098643099636714e-05, + "loss": 0.7429, + "step": 3114 + }, + { + "epoch": 0.49, + "grad_norm": 19.743642416760856, + "learning_rate": 1.9097943273310603e-05, + "loss": 0.8474, + "step": 3115 + }, + { + "epoch": 0.49, + "grad_norm": 16.69908849475767, + "learning_rate": 1.909724318824431e-05, + "loss": 0.8815, + "step": 3116 + }, + { + "epoch": 0.49, + "grad_norm": 25.1918363766617, + "learning_rate": 1.909654284445774e-05, + "loss": 0.8509, + "step": 3117 + }, + { + "epoch": 0.49, + "grad_norm": 106.24448763193921, + "learning_rate": 1.9095842241970817e-05, + "loss": 0.8482, + "step": 3118 + }, + { + "epoch": 0.49, + "grad_norm": 16.94313310015852, + "learning_rate": 1.9095141380803457e-05, + "loss": 0.7856, + "step": 3119 + }, + { + "epoch": 0.49, + "grad_norm": 16.2955491385221, + "learning_rate": 1.90944402609756e-05, + "loss": 0.7584, + "step": 3120 + }, + { + "epoch": 0.49, + "grad_norm": 27.399234865636398, + "learning_rate": 1.909373888250718e-05, + "loss": 0.8746, + "step": 3121 + }, + { + "epoch": 0.49, + "grad_norm": 19.065127674726245, + "learning_rate": 1.9093037245418147e-05, + "loss": 0.8033, + "step": 3122 + }, + { + "epoch": 0.49, + "grad_norm": 17.415516942768967, + "learning_rate": 1.909233534972845e-05, + "loss": 0.8502, + "step": 3123 + }, + { + "epoch": 0.49, + "grad_norm": 19.13339042803202, + "learning_rate": 1.9091633195458062e-05, + "loss": 0.82, + "step": 3124 + }, + { + "epoch": 0.49, + "grad_norm": 14.54999807055041, + "learning_rate": 1.9090930782626943e-05, + "loss": 0.7479, + "step": 3125 + }, + { + "epoch": 0.49, + "grad_norm": 21.961036975028698, + "learning_rate": 1.9090228111255066e-05, + "loss": 0.8619, + "step": 3126 + }, + { + "epoch": 0.49, + "grad_norm": 21.472341947734442, + "learning_rate": 1.908952518136242e-05, + "loss": 0.8502, + "step": 3127 + }, + { + "epoch": 0.49, + "grad_norm": 16.592292588739333, + "learning_rate": 1.9088821992969e-05, + "loss": 0.7517, + "step": 3128 + }, + { + "epoch": 0.49, + "grad_norm": 14.21307371670834, + "learning_rate": 1.9088118546094793e-05, + "loss": 0.8045, + "step": 3129 + }, + { + "epoch": 0.49, + "grad_norm": 19.981963037825658, + "learning_rate": 1.9087414840759812e-05, + "loss": 0.8468, + "step": 3130 + }, + { + "epoch": 0.49, + "grad_norm": 22.076005426268907, + "learning_rate": 1.9086710876984075e-05, + "loss": 0.7823, + "step": 3131 + }, + { + "epoch": 0.49, + "grad_norm": 19.660020628441846, + "learning_rate": 1.9086006654787588e-05, + "loss": 0.7982, + "step": 3132 + }, + { + "epoch": 0.49, + "grad_norm": 16.416622407082215, + "learning_rate": 1.908530217419039e-05, + "loss": 0.8989, + "step": 3133 + }, + { + "epoch": 0.49, + "grad_norm": 13.823122805150962, + "learning_rate": 1.9084597435212517e-05, + "loss": 0.8361, + "step": 3134 + }, + { + "epoch": 0.49, + "grad_norm": 17.627198711174408, + "learning_rate": 1.9083892437874004e-05, + "loss": 0.8226, + "step": 3135 + }, + { + "epoch": 0.49, + "grad_norm": 20.515210547695123, + "learning_rate": 1.90831871821949e-05, + "loss": 0.7261, + "step": 3136 + }, + { + "epoch": 0.49, + "grad_norm": 27.63891188206161, + "learning_rate": 1.9082481668195273e-05, + "loss": 0.8063, + "step": 3137 + }, + { + "epoch": 0.49, + "grad_norm": 21.223533788113098, + "learning_rate": 1.9081775895895177e-05, + "loss": 0.7253, + "step": 3138 + }, + { + "epoch": 0.49, + "grad_norm": 21.209520861701765, + "learning_rate": 1.9081069865314688e-05, + "loss": 0.7902, + "step": 3139 + }, + { + "epoch": 0.49, + "grad_norm": 25.65344581020031, + "learning_rate": 1.9080363576473883e-05, + "loss": 0.7909, + "step": 3140 + }, + { + "epoch": 0.49, + "grad_norm": 16.616322906054116, + "learning_rate": 1.907965702939285e-05, + "loss": 0.7744, + "step": 3141 + }, + { + "epoch": 0.49, + "grad_norm": 15.884314015271608, + "learning_rate": 1.9078950224091685e-05, + "loss": 0.8179, + "step": 3142 + }, + { + "epoch": 0.49, + "grad_norm": 18.443519655416107, + "learning_rate": 1.907824316059048e-05, + "loss": 0.8592, + "step": 3143 + }, + { + "epoch": 0.49, + "grad_norm": 18.7428535376474, + "learning_rate": 1.9077535838909356e-05, + "loss": 0.7896, + "step": 3144 + }, + { + "epoch": 0.49, + "grad_norm": 16.010459291984223, + "learning_rate": 1.9076828259068422e-05, + "loss": 0.7939, + "step": 3145 + }, + { + "epoch": 0.49, + "grad_norm": 18.17897469664532, + "learning_rate": 1.9076120421087806e-05, + "loss": 0.7815, + "step": 3146 + }, + { + "epoch": 0.49, + "grad_norm": 20.02230568097062, + "learning_rate": 1.907541232498763e-05, + "loss": 0.9493, + "step": 3147 + }, + { + "epoch": 0.49, + "grad_norm": 23.369242738149158, + "learning_rate": 1.9074703970788032e-05, + "loss": 0.9246, + "step": 3148 + }, + { + "epoch": 0.49, + "grad_norm": 20.075400649309046, + "learning_rate": 1.907399535850917e-05, + "loss": 0.8102, + "step": 3149 + }, + { + "epoch": 0.49, + "grad_norm": 21.134030874954295, + "learning_rate": 1.9073286488171184e-05, + "loss": 0.7831, + "step": 3150 + }, + { + "epoch": 0.49, + "grad_norm": 17.20100989319069, + "learning_rate": 1.9072577359794237e-05, + "loss": 0.797, + "step": 3151 + }, + { + "epoch": 0.49, + "grad_norm": 16.31817177208678, + "learning_rate": 1.9071867973398502e-05, + "loss": 0.8151, + "step": 3152 + }, + { + "epoch": 0.49, + "grad_norm": 4.3416210736141725, + "learning_rate": 1.9071158329004144e-05, + "loss": 0.8087, + "step": 3153 + }, + { + "epoch": 0.49, + "grad_norm": 18.61621300103439, + "learning_rate": 1.907044842663135e-05, + "loss": 0.7402, + "step": 3154 + }, + { + "epoch": 0.49, + "grad_norm": 16.41108041951863, + "learning_rate": 1.906973826630031e-05, + "loss": 0.865, + "step": 3155 + }, + { + "epoch": 0.49, + "grad_norm": 20.024465899899187, + "learning_rate": 1.906902784803122e-05, + "loss": 0.8306, + "step": 3156 + }, + { + "epoch": 0.49, + "grad_norm": 26.99403006507065, + "learning_rate": 1.9068317171844284e-05, + "loss": 0.9317, + "step": 3157 + }, + { + "epoch": 0.49, + "grad_norm": 9.96428759991952, + "learning_rate": 1.9067606237759712e-05, + "loss": 0.7189, + "step": 3158 + }, + { + "epoch": 0.49, + "grad_norm": 17.963645590690497, + "learning_rate": 1.906689504579772e-05, + "loss": 0.8662, + "step": 3159 + }, + { + "epoch": 0.49, + "grad_norm": 17.218922496925103, + "learning_rate": 1.906618359597854e-05, + "loss": 0.9281, + "step": 3160 + }, + { + "epoch": 0.49, + "grad_norm": 17.79946724003632, + "learning_rate": 1.90654718883224e-05, + "loss": 0.6847, + "step": 3161 + }, + { + "epoch": 0.49, + "grad_norm": 18.17157078272671, + "learning_rate": 1.9064759922849544e-05, + "loss": 0.8478, + "step": 3162 + }, + { + "epoch": 0.49, + "grad_norm": 16.78562059796633, + "learning_rate": 1.906404769958022e-05, + "loss": 0.6997, + "step": 3163 + }, + { + "epoch": 0.49, + "grad_norm": 17.497370269004637, + "learning_rate": 1.9063335218534677e-05, + "loss": 0.7554, + "step": 3164 + }, + { + "epoch": 0.49, + "grad_norm": 16.649693669473898, + "learning_rate": 1.906262247973319e-05, + "loss": 0.7674, + "step": 3165 + }, + { + "epoch": 0.49, + "grad_norm": 15.00603353579781, + "learning_rate": 1.9061909483196012e-05, + "loss": 0.8077, + "step": 3166 + }, + { + "epoch": 0.49, + "grad_norm": 21.96696829319825, + "learning_rate": 1.9061196228943436e-05, + "loss": 0.9424, + "step": 3167 + }, + { + "epoch": 0.49, + "grad_norm": 14.666303651373918, + "learning_rate": 1.906048271699574e-05, + "loss": 0.7462, + "step": 3168 + }, + { + "epoch": 0.5, + "grad_norm": 21.56346788169085, + "learning_rate": 1.905976894737321e-05, + "loss": 0.8058, + "step": 3169 + }, + { + "epoch": 0.5, + "grad_norm": 14.983457982262538, + "learning_rate": 1.9059054920096155e-05, + "loss": 0.8306, + "step": 3170 + }, + { + "epoch": 0.5, + "grad_norm": 22.155661529398557, + "learning_rate": 1.9058340635184877e-05, + "loss": 0.8405, + "step": 3171 + }, + { + "epoch": 0.5, + "grad_norm": 14.57549470322433, + "learning_rate": 1.9057626092659692e-05, + "loss": 0.7334, + "step": 3172 + }, + { + "epoch": 0.5, + "grad_norm": 15.827567746896081, + "learning_rate": 1.9056911292540915e-05, + "loss": 0.7195, + "step": 3173 + }, + { + "epoch": 0.5, + "grad_norm": 21.972028228600916, + "learning_rate": 1.905619623484888e-05, + "loss": 0.7214, + "step": 3174 + }, + { + "epoch": 0.5, + "grad_norm": 13.866056987188042, + "learning_rate": 1.9055480919603924e-05, + "loss": 0.7584, + "step": 3175 + }, + { + "epoch": 0.5, + "grad_norm": 26.330229548889065, + "learning_rate": 1.905476534682639e-05, + "loss": 0.8473, + "step": 3176 + }, + { + "epoch": 0.5, + "grad_norm": 31.325558457872724, + "learning_rate": 1.905404951653662e-05, + "loss": 0.8722, + "step": 3177 + }, + { + "epoch": 0.5, + "grad_norm": 17.211834858164256, + "learning_rate": 1.9053333428754986e-05, + "loss": 0.7664, + "step": 3178 + }, + { + "epoch": 0.5, + "grad_norm": 15.083965391892713, + "learning_rate": 1.9052617083501837e-05, + "loss": 0.85, + "step": 3179 + }, + { + "epoch": 0.5, + "grad_norm": 17.158817002806252, + "learning_rate": 1.905190048079756e-05, + "loss": 0.8052, + "step": 3180 + }, + { + "epoch": 0.5, + "grad_norm": 12.372182607235231, + "learning_rate": 1.9051183620662526e-05, + "loss": 0.7049, + "step": 3181 + }, + { + "epoch": 0.5, + "grad_norm": 22.96415398086274, + "learning_rate": 1.9050466503117127e-05, + "loss": 0.8397, + "step": 3182 + }, + { + "epoch": 0.5, + "grad_norm": 25.250823988311357, + "learning_rate": 1.9049749128181752e-05, + "loss": 0.9214, + "step": 3183 + }, + { + "epoch": 0.5, + "grad_norm": 20.628738158483294, + "learning_rate": 1.904903149587681e-05, + "loss": 0.8246, + "step": 3184 + }, + { + "epoch": 0.5, + "grad_norm": 13.580946174331254, + "learning_rate": 1.90483136062227e-05, + "loss": 0.7727, + "step": 3185 + }, + { + "epoch": 0.5, + "grad_norm": 17.16508975837801, + "learning_rate": 1.9047595459239848e-05, + "loss": 0.7698, + "step": 3186 + }, + { + "epoch": 0.5, + "grad_norm": 17.30438426938258, + "learning_rate": 1.904687705494867e-05, + "loss": 0.7347, + "step": 3187 + }, + { + "epoch": 0.5, + "grad_norm": 21.562595660232425, + "learning_rate": 1.9046158393369608e-05, + "loss": 0.9778, + "step": 3188 + }, + { + "epoch": 0.5, + "grad_norm": 13.90905310314675, + "learning_rate": 1.9045439474523086e-05, + "loss": 0.8563, + "step": 3189 + }, + { + "epoch": 0.5, + "grad_norm": 15.126788763602624, + "learning_rate": 1.904472029842956e-05, + "loss": 0.7797, + "step": 3190 + }, + { + "epoch": 0.5, + "grad_norm": 26.624222759758187, + "learning_rate": 1.904400086510948e-05, + "loss": 0.8578, + "step": 3191 + }, + { + "epoch": 0.5, + "grad_norm": 24.69015382433746, + "learning_rate": 1.9043281174583305e-05, + "loss": 0.7512, + "step": 3192 + }, + { + "epoch": 0.5, + "grad_norm": 28.265565583233567, + "learning_rate": 1.9042561226871506e-05, + "loss": 0.8479, + "step": 3193 + }, + { + "epoch": 0.5, + "grad_norm": 18.815323731396376, + "learning_rate": 1.9041841021994552e-05, + "loss": 0.7862, + "step": 3194 + }, + { + "epoch": 0.5, + "grad_norm": 17.924109704560887, + "learning_rate": 1.9041120559972927e-05, + "loss": 0.7745, + "step": 3195 + }, + { + "epoch": 0.5, + "grad_norm": 3.877079630596655, + "learning_rate": 1.904039984082713e-05, + "loss": 0.8712, + "step": 3196 + }, + { + "epoch": 0.5, + "grad_norm": 18.77250016992269, + "learning_rate": 1.9039678864577642e-05, + "loss": 0.8153, + "step": 3197 + }, + { + "epoch": 0.5, + "grad_norm": 27.76538514853176, + "learning_rate": 1.903895763124498e-05, + "loss": 0.8957, + "step": 3198 + }, + { + "epoch": 0.5, + "grad_norm": 23.117283871505574, + "learning_rate": 1.9038236140849648e-05, + "loss": 0.8031, + "step": 3199 + }, + { + "epoch": 0.5, + "grad_norm": 18.047662480005094, + "learning_rate": 1.9037514393412167e-05, + "loss": 0.8145, + "step": 3200 + }, + { + "epoch": 0.5, + "grad_norm": 14.283046792237744, + "learning_rate": 1.9036792388953066e-05, + "loss": 0.7254, + "step": 3201 + }, + { + "epoch": 0.5, + "grad_norm": 23.19781966004275, + "learning_rate": 1.9036070127492875e-05, + "loss": 0.76, + "step": 3202 + }, + { + "epoch": 0.5, + "grad_norm": 20.17193392088283, + "learning_rate": 1.903534760905213e-05, + "loss": 0.8017, + "step": 3203 + }, + { + "epoch": 0.5, + "grad_norm": 16.10453323656041, + "learning_rate": 1.9034624833651393e-05, + "loss": 0.7997, + "step": 3204 + }, + { + "epoch": 0.5, + "grad_norm": 18.030486963000833, + "learning_rate": 1.9033901801311207e-05, + "loss": 0.8689, + "step": 3205 + }, + { + "epoch": 0.5, + "grad_norm": 23.46829393479594, + "learning_rate": 1.903317851205214e-05, + "loss": 0.839, + "step": 3206 + }, + { + "epoch": 0.5, + "grad_norm": 20.5378164201416, + "learning_rate": 1.903245496589476e-05, + "loss": 0.7806, + "step": 3207 + }, + { + "epoch": 0.5, + "grad_norm": 22.730279853850114, + "learning_rate": 1.9031731162859642e-05, + "loss": 0.8162, + "step": 3208 + }, + { + "epoch": 0.5, + "grad_norm": 16.84158372661201, + "learning_rate": 1.9031007102967375e-05, + "loss": 0.7447, + "step": 3209 + }, + { + "epoch": 0.5, + "grad_norm": 26.09868887228873, + "learning_rate": 1.903028278623855e-05, + "loss": 0.888, + "step": 3210 + }, + { + "epoch": 0.5, + "grad_norm": 14.788062582253868, + "learning_rate": 1.9029558212693764e-05, + "loss": 0.7907, + "step": 3211 + }, + { + "epoch": 0.5, + "grad_norm": 23.156128065789513, + "learning_rate": 1.9028833382353624e-05, + "loss": 0.8118, + "step": 3212 + }, + { + "epoch": 0.5, + "grad_norm": 13.067312722915801, + "learning_rate": 1.9028108295238745e-05, + "loss": 0.7429, + "step": 3213 + }, + { + "epoch": 0.5, + "grad_norm": 15.730143143699177, + "learning_rate": 1.902738295136975e-05, + "loss": 0.7797, + "step": 3214 + }, + { + "epoch": 0.5, + "grad_norm": 20.737672631233288, + "learning_rate": 1.902665735076726e-05, + "loss": 0.7292, + "step": 3215 + }, + { + "epoch": 0.5, + "grad_norm": 25.47696348750332, + "learning_rate": 1.9025931493451917e-05, + "loss": 0.8176, + "step": 3216 + }, + { + "epoch": 0.5, + "grad_norm": 18.45868765326416, + "learning_rate": 1.9025205379444362e-05, + "loss": 0.7827, + "step": 3217 + }, + { + "epoch": 0.5, + "grad_norm": 25.305542756219992, + "learning_rate": 1.9024479008765246e-05, + "loss": 0.8556, + "step": 3218 + }, + { + "epoch": 0.5, + "grad_norm": 22.36812396126054, + "learning_rate": 1.9023752381435227e-05, + "loss": 0.7753, + "step": 3219 + }, + { + "epoch": 0.5, + "grad_norm": 15.08174138604477, + "learning_rate": 1.9023025497474968e-05, + "loss": 0.7337, + "step": 3220 + }, + { + "epoch": 0.5, + "grad_norm": 42.66182033372968, + "learning_rate": 1.9022298356905146e-05, + "loss": 0.9066, + "step": 3221 + }, + { + "epoch": 0.5, + "grad_norm": 17.446188973439455, + "learning_rate": 1.902157095974643e-05, + "loss": 0.8542, + "step": 3222 + }, + { + "epoch": 0.5, + "grad_norm": 45.87349920130624, + "learning_rate": 1.902084330601952e-05, + "loss": 0.854, + "step": 3223 + }, + { + "epoch": 0.5, + "grad_norm": 19.09499898530948, + "learning_rate": 1.9020115395745098e-05, + "loss": 0.7287, + "step": 3224 + }, + { + "epoch": 0.5, + "grad_norm": 18.85777698342505, + "learning_rate": 1.9019387228943872e-05, + "loss": 0.8088, + "step": 3225 + }, + { + "epoch": 0.5, + "grad_norm": 17.911027894602718, + "learning_rate": 1.9018658805636553e-05, + "loss": 0.8078, + "step": 3226 + }, + { + "epoch": 0.5, + "grad_norm": 25.314574331483886, + "learning_rate": 1.901793012584385e-05, + "loss": 0.7852, + "step": 3227 + }, + { + "epoch": 0.5, + "grad_norm": 13.346102203471863, + "learning_rate": 1.901720118958649e-05, + "loss": 0.8695, + "step": 3228 + }, + { + "epoch": 0.5, + "grad_norm": 27.236645349492782, + "learning_rate": 1.9016471996885202e-05, + "loss": 0.839, + "step": 3229 + }, + { + "epoch": 0.5, + "grad_norm": 16.432241548763045, + "learning_rate": 1.9015742547760726e-05, + "loss": 0.9157, + "step": 3230 + }, + { + "epoch": 0.5, + "grad_norm": 24.47201569275253, + "learning_rate": 1.9015012842233807e-05, + "loss": 0.8392, + "step": 3231 + }, + { + "epoch": 0.5, + "grad_norm": 23.995745037065667, + "learning_rate": 1.9014282880325194e-05, + "loss": 0.7897, + "step": 3232 + }, + { + "epoch": 0.5, + "grad_norm": 23.31086148008573, + "learning_rate": 1.9013552662055652e-05, + "loss": 0.859, + "step": 3233 + }, + { + "epoch": 0.51, + "grad_norm": 16.7200266199151, + "learning_rate": 1.9012822187445944e-05, + "loss": 0.7593, + "step": 3234 + }, + { + "epoch": 0.51, + "grad_norm": 19.441460770125662, + "learning_rate": 1.9012091456516843e-05, + "loss": 0.8184, + "step": 3235 + }, + { + "epoch": 0.51, + "grad_norm": 23.760640379030786, + "learning_rate": 1.9011360469289138e-05, + "loss": 0.9675, + "step": 3236 + }, + { + "epoch": 0.51, + "grad_norm": 16.91864224073, + "learning_rate": 1.901062922578361e-05, + "loss": 0.7626, + "step": 3237 + }, + { + "epoch": 0.51, + "grad_norm": 16.97133714710073, + "learning_rate": 1.9009897726021058e-05, + "loss": 0.8719, + "step": 3238 + }, + { + "epoch": 0.51, + "grad_norm": 21.490950684369828, + "learning_rate": 1.9009165970022282e-05, + "loss": 0.7341, + "step": 3239 + }, + { + "epoch": 0.51, + "grad_norm": 19.52146375768583, + "learning_rate": 1.9008433957808102e-05, + "loss": 0.7873, + "step": 3240 + }, + { + "epoch": 0.51, + "grad_norm": 11.896604934104056, + "learning_rate": 1.9007701689399323e-05, + "loss": 0.7226, + "step": 3241 + }, + { + "epoch": 0.51, + "grad_norm": 20.72922314743194, + "learning_rate": 1.9006969164816784e-05, + "loss": 0.7948, + "step": 3242 + }, + { + "epoch": 0.51, + "grad_norm": 19.929676465637428, + "learning_rate": 1.9006236384081306e-05, + "loss": 0.7807, + "step": 3243 + }, + { + "epoch": 0.51, + "grad_norm": 12.201355750938077, + "learning_rate": 1.9005503347213738e-05, + "loss": 0.8165, + "step": 3244 + }, + { + "epoch": 0.51, + "grad_norm": 29.4712549006632, + "learning_rate": 1.900477005423492e-05, + "loss": 0.852, + "step": 3245 + }, + { + "epoch": 0.51, + "grad_norm": 19.618645060894476, + "learning_rate": 1.9004036505165708e-05, + "loss": 1.0108, + "step": 3246 + }, + { + "epoch": 0.51, + "grad_norm": 18.086511241966033, + "learning_rate": 1.9003302700026968e-05, + "loss": 0.6943, + "step": 3247 + }, + { + "epoch": 0.51, + "grad_norm": 19.071567673811153, + "learning_rate": 1.9002568638839566e-05, + "loss": 0.8185, + "step": 3248 + }, + { + "epoch": 0.51, + "grad_norm": 33.88515531423689, + "learning_rate": 1.9001834321624378e-05, + "loss": 0.9174, + "step": 3249 + }, + { + "epoch": 0.51, + "grad_norm": 16.64538851824194, + "learning_rate": 1.9001099748402287e-05, + "loss": 0.7773, + "step": 3250 + }, + { + "epoch": 0.51, + "grad_norm": 16.382574772326507, + "learning_rate": 1.9000364919194185e-05, + "loss": 0.7932, + "step": 3251 + }, + { + "epoch": 0.51, + "grad_norm": 36.10558050811683, + "learning_rate": 1.899962983402097e-05, + "loss": 0.9106, + "step": 3252 + }, + { + "epoch": 0.51, + "grad_norm": 19.984378653824976, + "learning_rate": 1.8998894492903545e-05, + "loss": 0.8735, + "step": 3253 + }, + { + "epoch": 0.51, + "grad_norm": 24.14826342527058, + "learning_rate": 1.8998158895862827e-05, + "loss": 0.7821, + "step": 3254 + }, + { + "epoch": 0.51, + "grad_norm": 15.4967278527968, + "learning_rate": 1.8997423042919734e-05, + "loss": 0.7565, + "step": 3255 + }, + { + "epoch": 0.51, + "grad_norm": 24.466214145418185, + "learning_rate": 1.899668693409519e-05, + "loss": 0.7963, + "step": 3256 + }, + { + "epoch": 0.51, + "grad_norm": 26.11605946292533, + "learning_rate": 1.8995950569410136e-05, + "loss": 0.9111, + "step": 3257 + }, + { + "epoch": 0.51, + "grad_norm": 15.69279396543573, + "learning_rate": 1.8995213948885508e-05, + "loss": 0.7317, + "step": 3258 + }, + { + "epoch": 0.51, + "grad_norm": 15.875436113216862, + "learning_rate": 1.899447707254226e-05, + "loss": 0.7685, + "step": 3259 + }, + { + "epoch": 0.51, + "grad_norm": 17.43809216700467, + "learning_rate": 1.8993739940401342e-05, + "loss": 0.7964, + "step": 3260 + }, + { + "epoch": 0.51, + "grad_norm": 47.01185875219237, + "learning_rate": 1.8993002552483726e-05, + "loss": 0.8537, + "step": 3261 + }, + { + "epoch": 0.51, + "grad_norm": 17.80303413004833, + "learning_rate": 1.8992264908810373e-05, + "loss": 0.7608, + "step": 3262 + }, + { + "epoch": 0.51, + "grad_norm": 12.31721858616252, + "learning_rate": 1.899152700940227e-05, + "loss": 0.7321, + "step": 3263 + }, + { + "epoch": 0.51, + "grad_norm": 17.495838070581346, + "learning_rate": 1.8990788854280397e-05, + "loss": 0.7526, + "step": 3264 + }, + { + "epoch": 0.51, + "grad_norm": 16.489194545562345, + "learning_rate": 1.899005044346575e-05, + "loss": 0.7448, + "step": 3265 + }, + { + "epoch": 0.51, + "grad_norm": 12.4744470208579, + "learning_rate": 1.8989311776979326e-05, + "loss": 0.8306, + "step": 3266 + }, + { + "epoch": 0.51, + "grad_norm": 14.526798336522805, + "learning_rate": 1.8988572854842133e-05, + "loss": 0.7805, + "step": 3267 + }, + { + "epoch": 0.51, + "grad_norm": 18.05069596271059, + "learning_rate": 1.898783367707519e-05, + "loss": 0.8492, + "step": 3268 + }, + { + "epoch": 0.51, + "grad_norm": 16.82385227201902, + "learning_rate": 1.898709424369951e-05, + "loss": 0.7442, + "step": 3269 + }, + { + "epoch": 0.51, + "grad_norm": 19.30131590781488, + "learning_rate": 1.898635455473613e-05, + "loss": 0.8237, + "step": 3270 + }, + { + "epoch": 0.51, + "grad_norm": 20.42282232759359, + "learning_rate": 1.898561461020608e-05, + "loss": 0.8917, + "step": 3271 + }, + { + "epoch": 0.51, + "grad_norm": 14.499431660115283, + "learning_rate": 1.8984874410130413e-05, + "loss": 0.7379, + "step": 3272 + }, + { + "epoch": 0.51, + "grad_norm": 15.93915737921503, + "learning_rate": 1.898413395453017e-05, + "loss": 0.8256, + "step": 3273 + }, + { + "epoch": 0.51, + "grad_norm": 15.358397824315356, + "learning_rate": 1.8983393243426413e-05, + "loss": 0.8956, + "step": 3274 + }, + { + "epoch": 0.51, + "grad_norm": 17.31304296526907, + "learning_rate": 1.8982652276840205e-05, + "loss": 0.726, + "step": 3275 + }, + { + "epoch": 0.51, + "grad_norm": 19.093361744468556, + "learning_rate": 1.8981911054792625e-05, + "loss": 0.8032, + "step": 3276 + }, + { + "epoch": 0.51, + "grad_norm": 36.918961722001136, + "learning_rate": 1.8981169577304747e-05, + "loss": 0.7639, + "step": 3277 + }, + { + "epoch": 0.51, + "grad_norm": 16.241479004072122, + "learning_rate": 1.898042784439766e-05, + "loss": 0.8295, + "step": 3278 + }, + { + "epoch": 0.51, + "grad_norm": 17.829385569119307, + "learning_rate": 1.897968585609246e-05, + "loss": 0.8646, + "step": 3279 + }, + { + "epoch": 0.51, + "grad_norm": 25.805204674449193, + "learning_rate": 1.8978943612410245e-05, + "loss": 0.8207, + "step": 3280 + }, + { + "epoch": 0.51, + "grad_norm": 18.210242630062517, + "learning_rate": 1.8978201113372128e-05, + "loss": 0.7854, + "step": 3281 + }, + { + "epoch": 0.51, + "grad_norm": 16.48862852748904, + "learning_rate": 1.8977458358999222e-05, + "loss": 0.7114, + "step": 3282 + }, + { + "epoch": 0.51, + "grad_norm": 22.770164608552005, + "learning_rate": 1.8976715349312652e-05, + "loss": 0.8092, + "step": 3283 + }, + { + "epoch": 0.51, + "grad_norm": 19.67252407677198, + "learning_rate": 1.897597208433355e-05, + "loss": 0.7627, + "step": 3284 + }, + { + "epoch": 0.51, + "grad_norm": 20.654441019912475, + "learning_rate": 1.8975228564083052e-05, + "loss": 0.8817, + "step": 3285 + }, + { + "epoch": 0.51, + "grad_norm": 24.432568941648473, + "learning_rate": 1.8974484788582303e-05, + "loss": 0.8926, + "step": 3286 + }, + { + "epoch": 0.51, + "grad_norm": 17.773174942927838, + "learning_rate": 1.897374075785246e-05, + "loss": 0.7816, + "step": 3287 + }, + { + "epoch": 0.51, + "grad_norm": 13.417549328863489, + "learning_rate": 1.8972996471914674e-05, + "loss": 0.8842, + "step": 3288 + }, + { + "epoch": 0.51, + "grad_norm": 15.497277064876052, + "learning_rate": 1.8972251930790124e-05, + "loss": 0.7477, + "step": 3289 + }, + { + "epoch": 0.51, + "grad_norm": 22.686390993504194, + "learning_rate": 1.8971507134499974e-05, + "loss": 0.7553, + "step": 3290 + }, + { + "epoch": 0.51, + "grad_norm": 33.51949868035587, + "learning_rate": 1.8970762083065408e-05, + "loss": 0.981, + "step": 3291 + }, + { + "epoch": 0.51, + "grad_norm": 17.377263269082434, + "learning_rate": 1.897001677650762e-05, + "loss": 0.8271, + "step": 3292 + }, + { + "epoch": 0.51, + "grad_norm": 15.536138019612448, + "learning_rate": 1.8969271214847795e-05, + "loss": 0.7148, + "step": 3293 + }, + { + "epoch": 0.51, + "grad_norm": 18.923094759355415, + "learning_rate": 1.896852539810715e-05, + "loss": 0.8304, + "step": 3294 + }, + { + "epoch": 0.51, + "grad_norm": 21.894773826650535, + "learning_rate": 1.8967779326306884e-05, + "loss": 0.9322, + "step": 3295 + }, + { + "epoch": 0.51, + "grad_norm": 19.304494596696205, + "learning_rate": 1.8967032999468226e-05, + "loss": 0.7831, + "step": 3296 + }, + { + "epoch": 0.51, + "grad_norm": 14.043791583152023, + "learning_rate": 1.8966286417612393e-05, + "loss": 0.8117, + "step": 3297 + }, + { + "epoch": 0.52, + "grad_norm": 16.07606661292513, + "learning_rate": 1.8965539580760615e-05, + "loss": 0.8522, + "step": 3298 + }, + { + "epoch": 0.52, + "grad_norm": 19.131179547515075, + "learning_rate": 1.8964792488934143e-05, + "loss": 0.8286, + "step": 3299 + }, + { + "epoch": 0.52, + "grad_norm": 15.86586056611989, + "learning_rate": 1.8964045142154212e-05, + "loss": 0.8386, + "step": 3300 + }, + { + "epoch": 0.52, + "grad_norm": 25.713210022562002, + "learning_rate": 1.896329754044208e-05, + "loss": 0.896, + "step": 3301 + }, + { + "epoch": 0.52, + "grad_norm": 24.974547955524066, + "learning_rate": 1.8962549683819013e-05, + "loss": 0.8399, + "step": 3302 + }, + { + "epoch": 0.52, + "grad_norm": 21.41597401849312, + "learning_rate": 1.8961801572306276e-05, + "loss": 0.8076, + "step": 3303 + }, + { + "epoch": 0.52, + "grad_norm": 21.818217938092527, + "learning_rate": 1.8961053205925143e-05, + "loss": 0.7319, + "step": 3304 + }, + { + "epoch": 0.52, + "grad_norm": 20.541694103242634, + "learning_rate": 1.89603045846969e-05, + "loss": 0.7611, + "step": 3305 + }, + { + "epoch": 0.52, + "grad_norm": 19.173817859470883, + "learning_rate": 1.8959555708642835e-05, + "loss": 0.8252, + "step": 3306 + }, + { + "epoch": 0.52, + "grad_norm": 20.150288053493515, + "learning_rate": 1.895880657778425e-05, + "loss": 0.7268, + "step": 3307 + }, + { + "epoch": 0.52, + "grad_norm": 19.08003356690973, + "learning_rate": 1.895805719214244e-05, + "loss": 0.7844, + "step": 3308 + }, + { + "epoch": 0.52, + "grad_norm": 19.322439721785628, + "learning_rate": 1.8957307551738727e-05, + "loss": 0.926, + "step": 3309 + }, + { + "epoch": 0.52, + "grad_norm": 19.02751792682329, + "learning_rate": 1.895655765659443e-05, + "loss": 0.7907, + "step": 3310 + }, + { + "epoch": 0.52, + "grad_norm": 33.30861495607743, + "learning_rate": 1.8955807506730872e-05, + "loss": 0.804, + "step": 3311 + }, + { + "epoch": 0.52, + "grad_norm": 21.57224672039511, + "learning_rate": 1.8955057102169385e-05, + "loss": 0.7755, + "step": 3312 + }, + { + "epoch": 0.52, + "grad_norm": 21.243166927249245, + "learning_rate": 1.8954306442931315e-05, + "loss": 0.8541, + "step": 3313 + }, + { + "epoch": 0.52, + "grad_norm": 25.078610801744933, + "learning_rate": 1.8953555529038006e-05, + "loss": 0.8728, + "step": 3314 + }, + { + "epoch": 0.52, + "grad_norm": 14.274344722894911, + "learning_rate": 1.8952804360510816e-05, + "loss": 0.8128, + "step": 3315 + }, + { + "epoch": 0.52, + "grad_norm": 18.425515175977296, + "learning_rate": 1.8952052937371105e-05, + "loss": 0.8284, + "step": 3316 + }, + { + "epoch": 0.52, + "grad_norm": 32.74106691373, + "learning_rate": 1.8951301259640252e-05, + "loss": 0.8772, + "step": 3317 + }, + { + "epoch": 0.52, + "grad_norm": 25.996978524597257, + "learning_rate": 1.895054932733962e-05, + "loss": 0.8976, + "step": 3318 + }, + { + "epoch": 0.52, + "grad_norm": 30.3681125249725, + "learning_rate": 1.8949797140490607e-05, + "loss": 0.9461, + "step": 3319 + }, + { + "epoch": 0.52, + "grad_norm": 15.100203911570139, + "learning_rate": 1.8949044699114592e-05, + "loss": 0.7677, + "step": 3320 + }, + { + "epoch": 0.52, + "grad_norm": 20.378046988326368, + "learning_rate": 1.8948292003232987e-05, + "loss": 0.9179, + "step": 3321 + }, + { + "epoch": 0.52, + "grad_norm": 22.65943203810927, + "learning_rate": 1.894753905286719e-05, + "loss": 0.8169, + "step": 3322 + }, + { + "epoch": 0.52, + "grad_norm": 24.6908656602609, + "learning_rate": 1.8946785848038614e-05, + "loss": 0.9243, + "step": 3323 + }, + { + "epoch": 0.52, + "grad_norm": 19.642141462670683, + "learning_rate": 1.8946032388768687e-05, + "loss": 0.8762, + "step": 3324 + }, + { + "epoch": 0.52, + "grad_norm": 17.503034261356934, + "learning_rate": 1.8945278675078828e-05, + "loss": 0.8297, + "step": 3325 + }, + { + "epoch": 0.52, + "grad_norm": 18.890176946029367, + "learning_rate": 1.894452470699048e-05, + "loss": 0.817, + "step": 3326 + }, + { + "epoch": 0.52, + "grad_norm": 17.339149383852504, + "learning_rate": 1.894377048452508e-05, + "loss": 0.9539, + "step": 3327 + }, + { + "epoch": 0.52, + "grad_norm": 25.530667503206878, + "learning_rate": 1.8943016007704078e-05, + "loss": 0.8668, + "step": 3328 + }, + { + "epoch": 0.52, + "grad_norm": 18.460202050300555, + "learning_rate": 1.8942261276548932e-05, + "loss": 0.8424, + "step": 3329 + }, + { + "epoch": 0.52, + "grad_norm": 19.204140872949036, + "learning_rate": 1.894150629108111e-05, + "loss": 0.8521, + "step": 3330 + }, + { + "epoch": 0.52, + "grad_norm": 24.107550123649787, + "learning_rate": 1.8940751051322075e-05, + "loss": 0.7128, + "step": 3331 + }, + { + "epoch": 0.52, + "grad_norm": 18.54912264364053, + "learning_rate": 1.8939995557293315e-05, + "loss": 0.7438, + "step": 3332 + }, + { + "epoch": 0.52, + "grad_norm": 13.289986236868678, + "learning_rate": 1.8939239809016306e-05, + "loss": 0.7825, + "step": 3333 + }, + { + "epoch": 0.52, + "grad_norm": 15.173740732385943, + "learning_rate": 1.893848380651255e-05, + "loss": 0.8543, + "step": 3334 + }, + { + "epoch": 0.52, + "grad_norm": 16.182607373474102, + "learning_rate": 1.893772754980354e-05, + "loss": 0.7827, + "step": 3335 + }, + { + "epoch": 0.52, + "grad_norm": 18.840961295812292, + "learning_rate": 1.8936971038910792e-05, + "loss": 0.8808, + "step": 3336 + }, + { + "epoch": 0.52, + "grad_norm": 18.335262489328013, + "learning_rate": 1.8936214273855813e-05, + "loss": 0.8171, + "step": 3337 + }, + { + "epoch": 0.52, + "grad_norm": 20.641496398805156, + "learning_rate": 1.8935457254660128e-05, + "loss": 0.8496, + "step": 3338 + }, + { + "epoch": 0.52, + "grad_norm": 24.289102779439688, + "learning_rate": 1.8934699981345265e-05, + "loss": 0.806, + "step": 3339 + }, + { + "epoch": 0.52, + "grad_norm": 16.84246254797321, + "learning_rate": 1.893394245393276e-05, + "loss": 0.8426, + "step": 3340 + }, + { + "epoch": 0.52, + "grad_norm": 16.598108331705184, + "learning_rate": 1.893318467244416e-05, + "loss": 0.8253, + "step": 3341 + }, + { + "epoch": 0.52, + "grad_norm": 25.54094295632851, + "learning_rate": 1.893242663690101e-05, + "loss": 0.8221, + "step": 3342 + }, + { + "epoch": 0.52, + "grad_norm": 19.41202666202375, + "learning_rate": 1.8931668347324877e-05, + "loss": 0.887, + "step": 3343 + }, + { + "epoch": 0.52, + "grad_norm": 15.909508859381846, + "learning_rate": 1.893090980373732e-05, + "loss": 0.7678, + "step": 3344 + }, + { + "epoch": 0.52, + "grad_norm": 21.05664375927195, + "learning_rate": 1.8930151006159908e-05, + "loss": 0.7798, + "step": 3345 + }, + { + "epoch": 0.52, + "grad_norm": 17.522019220476068, + "learning_rate": 1.892939195461423e-05, + "loss": 0.7704, + "step": 3346 + }, + { + "epoch": 0.52, + "grad_norm": 37.00418295928479, + "learning_rate": 1.8928632649121867e-05, + "loss": 0.8342, + "step": 3347 + }, + { + "epoch": 0.52, + "grad_norm": 30.24032633658489, + "learning_rate": 1.8927873089704416e-05, + "loss": 0.8104, + "step": 3348 + }, + { + "epoch": 0.52, + "grad_norm": 26.553372156383084, + "learning_rate": 1.8927113276383475e-05, + "loss": 0.8248, + "step": 3349 + }, + { + "epoch": 0.52, + "grad_norm": 25.53241335052198, + "learning_rate": 1.8926353209180655e-05, + "loss": 0.826, + "step": 3350 + }, + { + "epoch": 0.52, + "grad_norm": 13.55322187838542, + "learning_rate": 1.892559288811757e-05, + "loss": 0.8026, + "step": 3351 + }, + { + "epoch": 0.52, + "grad_norm": 26.519803613153854, + "learning_rate": 1.892483231321585e-05, + "loss": 0.9084, + "step": 3352 + }, + { + "epoch": 0.52, + "grad_norm": 21.11096696335605, + "learning_rate": 1.8924071484497114e-05, + "loss": 0.8003, + "step": 3353 + }, + { + "epoch": 0.52, + "grad_norm": 24.335539141926084, + "learning_rate": 1.892331040198301e-05, + "loss": 0.8166, + "step": 3354 + }, + { + "epoch": 0.52, + "grad_norm": 14.240287257457208, + "learning_rate": 1.8922549065695172e-05, + "loss": 0.7634, + "step": 3355 + }, + { + "epoch": 0.52, + "grad_norm": 24.75685767574243, + "learning_rate": 1.8921787475655265e-05, + "loss": 0.9598, + "step": 3356 + }, + { + "epoch": 0.52, + "grad_norm": 17.757156519389437, + "learning_rate": 1.8921025631884938e-05, + "loss": 0.8711, + "step": 3357 + }, + { + "epoch": 0.52, + "grad_norm": 17.124454988209056, + "learning_rate": 1.8920263534405858e-05, + "loss": 0.7593, + "step": 3358 + }, + { + "epoch": 0.52, + "grad_norm": 21.969145722440434, + "learning_rate": 1.8919501183239705e-05, + "loss": 0.8531, + "step": 3359 + }, + { + "epoch": 0.52, + "grad_norm": 21.07152944592918, + "learning_rate": 1.8918738578408157e-05, + "loss": 0.7959, + "step": 3360 + }, + { + "epoch": 0.52, + "grad_norm": 15.900326921630372, + "learning_rate": 1.8917975719932898e-05, + "loss": 0.7134, + "step": 3361 + }, + { + "epoch": 0.53, + "grad_norm": 17.994220511945322, + "learning_rate": 1.891721260783563e-05, + "loss": 0.7959, + "step": 3362 + }, + { + "epoch": 0.53, + "grad_norm": 15.25241374432694, + "learning_rate": 1.8916449242138047e-05, + "loss": 0.8172, + "step": 3363 + }, + { + "epoch": 0.53, + "grad_norm": 26.73083024684305, + "learning_rate": 1.8915685622861864e-05, + "loss": 0.8052, + "step": 3364 + }, + { + "epoch": 0.53, + "grad_norm": 18.64199132380459, + "learning_rate": 1.8914921750028804e-05, + "loss": 0.8448, + "step": 3365 + }, + { + "epoch": 0.53, + "grad_norm": 12.664915251452088, + "learning_rate": 1.891415762366058e-05, + "loss": 0.7772, + "step": 3366 + }, + { + "epoch": 0.53, + "grad_norm": 22.610154367840074, + "learning_rate": 1.8913393243778927e-05, + "loss": 0.7887, + "step": 3367 + }, + { + "epoch": 0.53, + "grad_norm": 15.32555711475076, + "learning_rate": 1.8912628610405585e-05, + "loss": 0.8175, + "step": 3368 + }, + { + "epoch": 0.53, + "grad_norm": 17.45858352462135, + "learning_rate": 1.89118637235623e-05, + "loss": 0.7907, + "step": 3369 + }, + { + "epoch": 0.53, + "grad_norm": 21.87556641526553, + "learning_rate": 1.891109858327082e-05, + "loss": 0.9231, + "step": 3370 + }, + { + "epoch": 0.53, + "grad_norm": 10.770753878588453, + "learning_rate": 1.8910333189552913e-05, + "loss": 0.6939, + "step": 3371 + }, + { + "epoch": 0.53, + "grad_norm": 15.903194134884835, + "learning_rate": 1.8909567542430344e-05, + "loss": 0.7623, + "step": 3372 + }, + { + "epoch": 0.53, + "grad_norm": 15.033742888282342, + "learning_rate": 1.890880164192488e-05, + "loss": 0.7945, + "step": 3373 + }, + { + "epoch": 0.53, + "grad_norm": 11.502029831964597, + "learning_rate": 1.8908035488058316e-05, + "loss": 0.7376, + "step": 3374 + }, + { + "epoch": 0.53, + "grad_norm": 18.609909045149866, + "learning_rate": 1.8907269080852432e-05, + "loss": 0.8078, + "step": 3375 + }, + { + "epoch": 0.53, + "grad_norm": 13.357426894513708, + "learning_rate": 1.8906502420329022e-05, + "loss": 0.7522, + "step": 3376 + }, + { + "epoch": 0.53, + "grad_norm": 21.214632161433357, + "learning_rate": 1.8905735506509897e-05, + "loss": 0.8444, + "step": 3377 + }, + { + "epoch": 0.53, + "grad_norm": 21.704193312769636, + "learning_rate": 1.890496833941686e-05, + "loss": 0.8611, + "step": 3378 + }, + { + "epoch": 0.53, + "grad_norm": 28.767409683180908, + "learning_rate": 1.890420091907174e-05, + "loss": 0.8217, + "step": 3379 + }, + { + "epoch": 0.53, + "grad_norm": 13.99141770017904, + "learning_rate": 1.8903433245496348e-05, + "loss": 0.736, + "step": 3380 + }, + { + "epoch": 0.53, + "grad_norm": 24.66708461176765, + "learning_rate": 1.8902665318712527e-05, + "loss": 0.8313, + "step": 3381 + }, + { + "epoch": 0.53, + "grad_norm": 14.466920263218196, + "learning_rate": 1.8901897138742107e-05, + "loss": 0.8133, + "step": 3382 + }, + { + "epoch": 0.53, + "grad_norm": 20.826265505340814, + "learning_rate": 1.890112870560694e-05, + "loss": 0.8638, + "step": 3383 + }, + { + "epoch": 0.53, + "grad_norm": 19.936165660345143, + "learning_rate": 1.8900360019328885e-05, + "loss": 0.9087, + "step": 3384 + }, + { + "epoch": 0.53, + "grad_norm": 17.720476132451292, + "learning_rate": 1.8899591079929796e-05, + "loss": 0.7638, + "step": 3385 + }, + { + "epoch": 0.53, + "grad_norm": 32.02774678123373, + "learning_rate": 1.8898821887431543e-05, + "loss": 0.8345, + "step": 3386 + }, + { + "epoch": 0.53, + "grad_norm": 16.123609201016286, + "learning_rate": 1.8898052441855997e-05, + "loss": 0.7579, + "step": 3387 + }, + { + "epoch": 0.53, + "grad_norm": 20.596070170489416, + "learning_rate": 1.8897282743225048e-05, + "loss": 0.7905, + "step": 3388 + }, + { + "epoch": 0.53, + "grad_norm": 23.662250099283213, + "learning_rate": 1.8896512791560584e-05, + "loss": 0.761, + "step": 3389 + }, + { + "epoch": 0.53, + "grad_norm": 27.909954965665126, + "learning_rate": 1.8895742586884502e-05, + "loss": 0.8593, + "step": 3390 + }, + { + "epoch": 0.53, + "grad_norm": 36.3871593925754, + "learning_rate": 1.88949721292187e-05, + "loss": 0.6885, + "step": 3391 + }, + { + "epoch": 0.53, + "grad_norm": 18.06501881648598, + "learning_rate": 1.8894201418585094e-05, + "loss": 0.9004, + "step": 3392 + }, + { + "epoch": 0.53, + "grad_norm": 19.926497168169508, + "learning_rate": 1.8893430455005604e-05, + "loss": 0.8322, + "step": 3393 + }, + { + "epoch": 0.53, + "grad_norm": 17.99428544627486, + "learning_rate": 1.8892659238502154e-05, + "loss": 0.8215, + "step": 3394 + }, + { + "epoch": 0.53, + "grad_norm": 21.148237302133445, + "learning_rate": 1.889188776909668e-05, + "loss": 0.7444, + "step": 3395 + }, + { + "epoch": 0.53, + "grad_norm": 20.47423651160239, + "learning_rate": 1.8891116046811116e-05, + "loss": 0.8691, + "step": 3396 + }, + { + "epoch": 0.53, + "grad_norm": 26.444399667196294, + "learning_rate": 1.8890344071667415e-05, + "loss": 0.8061, + "step": 3397 + }, + { + "epoch": 0.53, + "grad_norm": 21.23073772972432, + "learning_rate": 1.888957184368753e-05, + "loss": 0.7917, + "step": 3398 + }, + { + "epoch": 0.53, + "grad_norm": 14.70512289607133, + "learning_rate": 1.888879936289342e-05, + "loss": 0.8484, + "step": 3399 + }, + { + "epoch": 0.53, + "grad_norm": 23.84407015806765, + "learning_rate": 1.8888026629307056e-05, + "loss": 0.815, + "step": 3400 + }, + { + "epoch": 0.53, + "grad_norm": 23.48505899279683, + "learning_rate": 1.8887253642950414e-05, + "loss": 0.6795, + "step": 3401 + }, + { + "epoch": 0.53, + "grad_norm": 19.848641319132316, + "learning_rate": 1.8886480403845477e-05, + "loss": 0.8493, + "step": 3402 + }, + { + "epoch": 0.53, + "grad_norm": 15.291422213017542, + "learning_rate": 1.888570691201424e-05, + "loss": 0.7337, + "step": 3403 + }, + { + "epoch": 0.53, + "grad_norm": 21.811800274651663, + "learning_rate": 1.8884933167478688e-05, + "loss": 0.8441, + "step": 3404 + }, + { + "epoch": 0.53, + "grad_norm": 15.537586278207844, + "learning_rate": 1.888415917026084e-05, + "loss": 0.7338, + "step": 3405 + }, + { + "epoch": 0.53, + "grad_norm": 10.878420723399753, + "learning_rate": 1.8883384920382703e-05, + "loss": 0.8015, + "step": 3406 + }, + { + "epoch": 0.53, + "grad_norm": 13.533743463410543, + "learning_rate": 1.8882610417866295e-05, + "loss": 0.7818, + "step": 3407 + }, + { + "epoch": 0.53, + "grad_norm": 27.510857839030827, + "learning_rate": 1.888183566273364e-05, + "loss": 0.8128, + "step": 3408 + }, + { + "epoch": 0.53, + "grad_norm": 18.53714177437603, + "learning_rate": 1.888106065500678e-05, + "loss": 0.7775, + "step": 3409 + }, + { + "epoch": 0.53, + "grad_norm": 17.42868004840664, + "learning_rate": 1.8880285394707743e-05, + "loss": 0.7933, + "step": 3410 + }, + { + "epoch": 0.53, + "grad_norm": 17.942344776774117, + "learning_rate": 1.8879509881858587e-05, + "loss": 0.8449, + "step": 3411 + }, + { + "epoch": 0.53, + "grad_norm": 13.97399866407785, + "learning_rate": 1.8878734116481364e-05, + "loss": 0.7983, + "step": 3412 + }, + { + "epoch": 0.53, + "grad_norm": 26.976028293476137, + "learning_rate": 1.887795809859814e-05, + "loss": 0.8435, + "step": 3413 + }, + { + "epoch": 0.53, + "grad_norm": 21.749414522413606, + "learning_rate": 1.8877181828230978e-05, + "loss": 0.803, + "step": 3414 + }, + { + "epoch": 0.53, + "grad_norm": 26.371180048696683, + "learning_rate": 1.887640530540196e-05, + "loss": 0.7834, + "step": 3415 + }, + { + "epoch": 0.53, + "grad_norm": 20.418450622973566, + "learning_rate": 1.8875628530133164e-05, + "loss": 0.8457, + "step": 3416 + }, + { + "epoch": 0.53, + "grad_norm": 23.53891280145962, + "learning_rate": 1.887485150244669e-05, + "loss": 0.9259, + "step": 3417 + }, + { + "epoch": 0.53, + "grad_norm": 15.1538160886726, + "learning_rate": 1.8874074222364626e-05, + "loss": 0.7541, + "step": 3418 + }, + { + "epoch": 0.53, + "grad_norm": 23.298196457700744, + "learning_rate": 1.8873296689909084e-05, + "loss": 0.8898, + "step": 3419 + }, + { + "epoch": 0.53, + "grad_norm": 20.409744392510575, + "learning_rate": 1.8872518905102177e-05, + "loss": 0.8425, + "step": 3420 + }, + { + "epoch": 0.53, + "grad_norm": 17.777615546988404, + "learning_rate": 1.887174086796602e-05, + "loss": 0.8014, + "step": 3421 + }, + { + "epoch": 0.53, + "grad_norm": 19.753797718660437, + "learning_rate": 1.8870962578522746e-05, + "loss": 0.7904, + "step": 3422 + }, + { + "epoch": 0.53, + "grad_norm": 19.093606963144772, + "learning_rate": 1.887018403679448e-05, + "loss": 0.8219, + "step": 3423 + }, + { + "epoch": 0.53, + "grad_norm": 16.201060743220935, + "learning_rate": 1.8869405242803373e-05, + "loss": 0.7504, + "step": 3424 + }, + { + "epoch": 0.53, + "grad_norm": 31.126851586435176, + "learning_rate": 1.886862619657157e-05, + "loss": 0.9006, + "step": 3425 + }, + { + "epoch": 0.54, + "grad_norm": 23.103589940691286, + "learning_rate": 1.8867846898121223e-05, + "loss": 0.9261, + "step": 3426 + }, + { + "epoch": 0.54, + "grad_norm": 15.898382804457848, + "learning_rate": 1.8867067347474498e-05, + "loss": 0.8594, + "step": 3427 + }, + { + "epoch": 0.54, + "grad_norm": 13.037837845812941, + "learning_rate": 1.8866287544653566e-05, + "loss": 0.7686, + "step": 3428 + }, + { + "epoch": 0.54, + "grad_norm": 24.176376983851878, + "learning_rate": 1.8865507489680602e-05, + "loss": 0.9257, + "step": 3429 + }, + { + "epoch": 0.54, + "grad_norm": 15.022398017057679, + "learning_rate": 1.8864727182577792e-05, + "loss": 0.8068, + "step": 3430 + }, + { + "epoch": 0.54, + "grad_norm": 23.161244457387706, + "learning_rate": 1.8863946623367324e-05, + "loss": 0.7708, + "step": 3431 + }, + { + "epoch": 0.54, + "grad_norm": 14.072629002916553, + "learning_rate": 1.88631658120714e-05, + "loss": 0.7199, + "step": 3432 + }, + { + "epoch": 0.54, + "grad_norm": 23.8485785428595, + "learning_rate": 1.886238474871223e-05, + "loss": 0.8088, + "step": 3433 + }, + { + "epoch": 0.54, + "grad_norm": 19.682636580650488, + "learning_rate": 1.8861603433312017e-05, + "loss": 0.8052, + "step": 3434 + }, + { + "epoch": 0.54, + "grad_norm": 16.554400508601866, + "learning_rate": 1.8860821865892988e-05, + "loss": 0.7221, + "step": 3435 + }, + { + "epoch": 0.54, + "grad_norm": 16.396140113928965, + "learning_rate": 1.8860040046477367e-05, + "loss": 0.7557, + "step": 3436 + }, + { + "epoch": 0.54, + "grad_norm": 14.612653212013303, + "learning_rate": 1.8859257975087395e-05, + "loss": 0.7959, + "step": 3437 + }, + { + "epoch": 0.54, + "grad_norm": 19.94524016854678, + "learning_rate": 1.8858475651745304e-05, + "loss": 0.8156, + "step": 3438 + }, + { + "epoch": 0.54, + "grad_norm": 18.27030400103325, + "learning_rate": 1.8857693076473348e-05, + "loss": 0.7762, + "step": 3439 + }, + { + "epoch": 0.54, + "grad_norm": 14.113184566678713, + "learning_rate": 1.8856910249293783e-05, + "loss": 0.7738, + "step": 3440 + }, + { + "epoch": 0.54, + "grad_norm": 21.12987140278447, + "learning_rate": 1.8856127170228873e-05, + "loss": 0.8317, + "step": 3441 + }, + { + "epoch": 0.54, + "grad_norm": 15.703873130546501, + "learning_rate": 1.8855343839300885e-05, + "loss": 0.7512, + "step": 3442 + }, + { + "epoch": 0.54, + "grad_norm": 21.85220304606607, + "learning_rate": 1.8854560256532098e-05, + "loss": 0.7905, + "step": 3443 + }, + { + "epoch": 0.54, + "grad_norm": 20.6116934174334, + "learning_rate": 1.88537764219448e-05, + "loss": 0.7489, + "step": 3444 + }, + { + "epoch": 0.54, + "grad_norm": 16.650546684540593, + "learning_rate": 1.885299233556128e-05, + "loss": 0.7357, + "step": 3445 + }, + { + "epoch": 0.54, + "grad_norm": 23.8245985911209, + "learning_rate": 1.8852207997403835e-05, + "loss": 0.7486, + "step": 3446 + }, + { + "epoch": 0.54, + "grad_norm": 22.30396428647669, + "learning_rate": 1.8851423407494774e-05, + "loss": 0.7502, + "step": 3447 + }, + { + "epoch": 0.54, + "grad_norm": 16.147550420567963, + "learning_rate": 1.8850638565856406e-05, + "loss": 0.7028, + "step": 3448 + }, + { + "epoch": 0.54, + "grad_norm": 25.946758679084766, + "learning_rate": 1.8849853472511057e-05, + "loss": 0.7767, + "step": 3449 + }, + { + "epoch": 0.54, + "grad_norm": 20.138954583112035, + "learning_rate": 1.8849068127481055e-05, + "loss": 0.7856, + "step": 3450 + }, + { + "epoch": 0.54, + "grad_norm": 22.999988545539324, + "learning_rate": 1.884828253078873e-05, + "loss": 0.808, + "step": 3451 + }, + { + "epoch": 0.54, + "grad_norm": 17.264519551217536, + "learning_rate": 1.8847496682456422e-05, + "loss": 0.8131, + "step": 3452 + }, + { + "epoch": 0.54, + "grad_norm": 18.128320698075523, + "learning_rate": 1.884671058250649e-05, + "loss": 0.7629, + "step": 3453 + }, + { + "epoch": 0.54, + "grad_norm": 22.566536444575174, + "learning_rate": 1.8845924230961278e-05, + "loss": 0.8165, + "step": 3454 + }, + { + "epoch": 0.54, + "grad_norm": 35.766168226097186, + "learning_rate": 1.884513762784316e-05, + "loss": 0.8274, + "step": 3455 + }, + { + "epoch": 0.54, + "grad_norm": 28.15575892515542, + "learning_rate": 1.8844350773174502e-05, + "loss": 0.7508, + "step": 3456 + }, + { + "epoch": 0.54, + "grad_norm": 18.40047403957846, + "learning_rate": 1.884356366697768e-05, + "loss": 0.8564, + "step": 3457 + }, + { + "epoch": 0.54, + "grad_norm": 17.091814960256134, + "learning_rate": 1.8842776309275083e-05, + "loss": 0.7324, + "step": 3458 + }, + { + "epoch": 0.54, + "grad_norm": 19.41725716508165, + "learning_rate": 1.8841988700089096e-05, + "loss": 0.8842, + "step": 3459 + }, + { + "epoch": 0.54, + "grad_norm": 20.600226610318046, + "learning_rate": 1.8841200839442128e-05, + "loss": 0.8848, + "step": 3460 + }, + { + "epoch": 0.54, + "grad_norm": 16.44865655447884, + "learning_rate": 1.8840412727356576e-05, + "loss": 0.776, + "step": 3461 + }, + { + "epoch": 0.54, + "grad_norm": 27.0947566516384, + "learning_rate": 1.883962436385486e-05, + "loss": 0.7059, + "step": 3462 + }, + { + "epoch": 0.54, + "grad_norm": 34.218234190689394, + "learning_rate": 1.88388357489594e-05, + "loss": 0.8564, + "step": 3463 + }, + { + "epoch": 0.54, + "grad_norm": 15.261275124625653, + "learning_rate": 1.883804688269262e-05, + "loss": 0.8148, + "step": 3464 + }, + { + "epoch": 0.54, + "grad_norm": 19.006673130392, + "learning_rate": 1.8837257765076956e-05, + "loss": 0.8582, + "step": 3465 + }, + { + "epoch": 0.54, + "grad_norm": 21.97831617132807, + "learning_rate": 1.883646839613485e-05, + "loss": 0.8575, + "step": 3466 + }, + { + "epoch": 0.54, + "grad_norm": 19.531856205878626, + "learning_rate": 1.8835678775888752e-05, + "loss": 0.8249, + "step": 3467 + }, + { + "epoch": 0.54, + "grad_norm": 12.242212969833199, + "learning_rate": 1.883488890436112e-05, + "loss": 0.844, + "step": 3468 + }, + { + "epoch": 0.54, + "grad_norm": 23.218738505526197, + "learning_rate": 1.8834098781574415e-05, + "loss": 0.785, + "step": 3469 + }, + { + "epoch": 0.54, + "grad_norm": 14.179441163258888, + "learning_rate": 1.883330840755111e-05, + "loss": 0.7026, + "step": 3470 + }, + { + "epoch": 0.54, + "grad_norm": 18.914691652235785, + "learning_rate": 1.883251778231368e-05, + "loss": 0.837, + "step": 3471 + }, + { + "epoch": 0.54, + "grad_norm": 35.636151214867326, + "learning_rate": 1.883172690588461e-05, + "loss": 0.7955, + "step": 3472 + }, + { + "epoch": 0.54, + "grad_norm": 26.70029326860049, + "learning_rate": 1.8830935778286393e-05, + "loss": 0.782, + "step": 3473 + }, + { + "epoch": 0.54, + "grad_norm": 18.354191565379452, + "learning_rate": 1.8830144399541533e-05, + "loss": 0.7538, + "step": 3474 + }, + { + "epoch": 0.54, + "grad_norm": 25.22030474964758, + "learning_rate": 1.8829352769672525e-05, + "loss": 0.8834, + "step": 3475 + }, + { + "epoch": 0.54, + "grad_norm": 25.52685199868065, + "learning_rate": 1.8828560888701895e-05, + "loss": 0.7973, + "step": 3476 + }, + { + "epoch": 0.54, + "grad_norm": 26.280431588977667, + "learning_rate": 1.8827768756652155e-05, + "loss": 0.8578, + "step": 3477 + }, + { + "epoch": 0.54, + "grad_norm": 33.79368563561777, + "learning_rate": 1.8826976373545838e-05, + "loss": 0.9458, + "step": 3478 + }, + { + "epoch": 0.54, + "grad_norm": 13.586325938225523, + "learning_rate": 1.8826183739405475e-05, + "loss": 0.7706, + "step": 3479 + }, + { + "epoch": 0.54, + "grad_norm": 17.495325419543924, + "learning_rate": 1.8825390854253605e-05, + "loss": 0.8013, + "step": 3480 + }, + { + "epoch": 0.54, + "grad_norm": 17.841185596979305, + "learning_rate": 1.882459771811279e-05, + "loss": 0.7905, + "step": 3481 + }, + { + "epoch": 0.54, + "grad_norm": 27.790002438057645, + "learning_rate": 1.8823804331005573e-05, + "loss": 0.9765, + "step": 3482 + }, + { + "epoch": 0.54, + "grad_norm": 15.265754893403322, + "learning_rate": 1.8823010692954523e-05, + "loss": 0.723, + "step": 3483 + }, + { + "epoch": 0.54, + "grad_norm": 19.58633743732954, + "learning_rate": 1.8822216803982214e-05, + "loss": 0.7811, + "step": 3484 + }, + { + "epoch": 0.54, + "grad_norm": 22.22621006744884, + "learning_rate": 1.8821422664111218e-05, + "loss": 0.9039, + "step": 3485 + }, + { + "epoch": 0.54, + "grad_norm": 15.987284765591708, + "learning_rate": 1.882062827336412e-05, + "loss": 0.787, + "step": 3486 + }, + { + "epoch": 0.54, + "grad_norm": 10.46565408548851, + "learning_rate": 1.8819833631763514e-05, + "loss": 0.683, + "step": 3487 + }, + { + "epoch": 0.54, + "grad_norm": 20.111958333714732, + "learning_rate": 1.8819038739332e-05, + "loss": 0.8128, + "step": 3488 + }, + { + "epoch": 0.54, + "grad_norm": 21.848316055120737, + "learning_rate": 1.8818243596092182e-05, + "loss": 0.8297, + "step": 3489 + }, + { + "epoch": 0.55, + "grad_norm": 23.662014347126476, + "learning_rate": 1.8817448202066677e-05, + "loss": 0.88, + "step": 3490 + }, + { + "epoch": 0.55, + "grad_norm": 14.752111273733133, + "learning_rate": 1.88166525572781e-05, + "loss": 0.7116, + "step": 3491 + }, + { + "epoch": 0.55, + "grad_norm": 14.515634826229782, + "learning_rate": 1.8815856661749085e-05, + "loss": 0.7744, + "step": 3492 + }, + { + "epoch": 0.55, + "grad_norm": 15.680512253111425, + "learning_rate": 1.8815060515502262e-05, + "loss": 0.7396, + "step": 3493 + }, + { + "epoch": 0.55, + "grad_norm": 14.066291974934833, + "learning_rate": 1.8814264118560274e-05, + "loss": 0.736, + "step": 3494 + }, + { + "epoch": 0.55, + "grad_norm": 14.829860335486249, + "learning_rate": 1.8813467470945774e-05, + "loss": 0.761, + "step": 3495 + }, + { + "epoch": 0.55, + "grad_norm": 38.75692336622185, + "learning_rate": 1.8812670572681413e-05, + "loss": 0.7411, + "step": 3496 + }, + { + "epoch": 0.55, + "grad_norm": 17.85813735818634, + "learning_rate": 1.8811873423789853e-05, + "loss": 0.7823, + "step": 3497 + }, + { + "epoch": 0.55, + "grad_norm": 18.627602908166725, + "learning_rate": 1.8811076024293774e-05, + "loss": 0.8384, + "step": 3498 + }, + { + "epoch": 0.55, + "grad_norm": 33.77782872172686, + "learning_rate": 1.881027837421584e-05, + "loss": 0.819, + "step": 3499 + }, + { + "epoch": 0.55, + "grad_norm": 33.12887411315028, + "learning_rate": 1.880948047357875e-05, + "loss": 0.7636, + "step": 3500 + }, + { + "epoch": 0.55, + "grad_norm": 15.36707713713431, + "learning_rate": 1.8808682322405184e-05, + "loss": 0.7357, + "step": 3501 + }, + { + "epoch": 0.55, + "grad_norm": 15.492143628995628, + "learning_rate": 1.8807883920717844e-05, + "loss": 0.865, + "step": 3502 + }, + { + "epoch": 0.55, + "grad_norm": 15.756720148068348, + "learning_rate": 1.8807085268539443e-05, + "loss": 0.77, + "step": 3503 + }, + { + "epoch": 0.55, + "grad_norm": 23.94826734523092, + "learning_rate": 1.8806286365892685e-05, + "loss": 0.8267, + "step": 3504 + }, + { + "epoch": 0.55, + "grad_norm": 18.55035426808554, + "learning_rate": 1.8805487212800297e-05, + "loss": 0.7403, + "step": 3505 + }, + { + "epoch": 0.55, + "grad_norm": 17.823150509122005, + "learning_rate": 1.8804687809285003e-05, + "loss": 0.8168, + "step": 3506 + }, + { + "epoch": 0.55, + "grad_norm": 15.015405160582734, + "learning_rate": 1.8803888155369538e-05, + "loss": 0.7991, + "step": 3507 + }, + { + "epoch": 0.55, + "grad_norm": 16.309295285606257, + "learning_rate": 1.8803088251076642e-05, + "loss": 0.8021, + "step": 3508 + }, + { + "epoch": 0.55, + "grad_norm": 20.422907803898124, + "learning_rate": 1.880228809642907e-05, + "loss": 0.7982, + "step": 3509 + }, + { + "epoch": 0.55, + "grad_norm": 23.902390401289402, + "learning_rate": 1.880148769144957e-05, + "loss": 0.808, + "step": 3510 + }, + { + "epoch": 0.55, + "grad_norm": 15.874141597630716, + "learning_rate": 1.880068703616091e-05, + "loss": 0.7047, + "step": 3511 + }, + { + "epoch": 0.55, + "grad_norm": 24.998691193772636, + "learning_rate": 1.8799886130585858e-05, + "loss": 0.7697, + "step": 3512 + }, + { + "epoch": 0.55, + "grad_norm": 19.20763225325, + "learning_rate": 1.8799084974747193e-05, + "loss": 0.8053, + "step": 3513 + }, + { + "epoch": 0.55, + "grad_norm": 18.560378502900267, + "learning_rate": 1.8798283568667698e-05, + "loss": 0.7729, + "step": 3514 + }, + { + "epoch": 0.55, + "grad_norm": 27.044188167107126, + "learning_rate": 1.8797481912370167e-05, + "loss": 0.7679, + "step": 3515 + }, + { + "epoch": 0.55, + "grad_norm": 18.8551137071639, + "learning_rate": 1.8796680005877397e-05, + "loss": 0.8702, + "step": 3516 + }, + { + "epoch": 0.55, + "grad_norm": 18.63964457223816, + "learning_rate": 1.879587784921219e-05, + "loss": 0.794, + "step": 3517 + }, + { + "epoch": 0.55, + "grad_norm": 14.036531375690243, + "learning_rate": 1.8795075442397367e-05, + "loss": 0.7565, + "step": 3518 + }, + { + "epoch": 0.55, + "grad_norm": 22.444531661007105, + "learning_rate": 1.879427278545574e-05, + "loss": 0.7602, + "step": 3519 + }, + { + "epoch": 0.55, + "grad_norm": 20.818201867740747, + "learning_rate": 1.8793469878410142e-05, + "loss": 0.7963, + "step": 3520 + }, + { + "epoch": 0.55, + "grad_norm": 19.78712336711334, + "learning_rate": 1.8792666721283406e-05, + "loss": 0.8398, + "step": 3521 + }, + { + "epoch": 0.55, + "grad_norm": 13.980181112045726, + "learning_rate": 1.8791863314098368e-05, + "loss": 0.8076, + "step": 3522 + }, + { + "epoch": 0.55, + "grad_norm": 20.180916078675132, + "learning_rate": 1.879105965687788e-05, + "loss": 0.8217, + "step": 3523 + }, + { + "epoch": 0.55, + "grad_norm": 18.73869338943173, + "learning_rate": 1.87902557496448e-05, + "loss": 0.7327, + "step": 3524 + }, + { + "epoch": 0.55, + "grad_norm": 17.342797615777204, + "learning_rate": 1.878945159242199e-05, + "loss": 0.7908, + "step": 3525 + }, + { + "epoch": 0.55, + "grad_norm": 20.183876213546846, + "learning_rate": 1.878864718523232e-05, + "loss": 0.7959, + "step": 3526 + }, + { + "epoch": 0.55, + "grad_norm": 13.645566801084128, + "learning_rate": 1.8787842528098657e-05, + "loss": 0.7731, + "step": 3527 + }, + { + "epoch": 0.55, + "grad_norm": 25.787845821507243, + "learning_rate": 1.87870376210439e-05, + "loss": 0.757, + "step": 3528 + }, + { + "epoch": 0.55, + "grad_norm": 23.189468052211144, + "learning_rate": 1.878623246409093e-05, + "loss": 0.8377, + "step": 3529 + }, + { + "epoch": 0.55, + "grad_norm": 18.148868128724928, + "learning_rate": 1.878542705726265e-05, + "loss": 0.7908, + "step": 3530 + }, + { + "epoch": 0.55, + "grad_norm": 17.809226666395197, + "learning_rate": 1.8784621400581966e-05, + "loss": 0.8775, + "step": 3531 + }, + { + "epoch": 0.55, + "grad_norm": 16.48633325163417, + "learning_rate": 1.8783815494071786e-05, + "loss": 0.7944, + "step": 3532 + }, + { + "epoch": 0.55, + "grad_norm": 18.74523896647049, + "learning_rate": 1.8783009337755032e-05, + "loss": 0.8008, + "step": 3533 + }, + { + "epoch": 0.55, + "grad_norm": 20.606981366940992, + "learning_rate": 1.878220293165463e-05, + "loss": 0.8824, + "step": 3534 + }, + { + "epoch": 0.55, + "grad_norm": 20.213001332379317, + "learning_rate": 1.8781396275793516e-05, + "loss": 0.7521, + "step": 3535 + }, + { + "epoch": 0.55, + "grad_norm": 18.088980811287946, + "learning_rate": 1.8780589370194626e-05, + "loss": 0.8775, + "step": 3536 + }, + { + "epoch": 0.55, + "grad_norm": 28.292222384602326, + "learning_rate": 1.8779782214880912e-05, + "loss": 0.8227, + "step": 3537 + }, + { + "epoch": 0.55, + "grad_norm": 20.035381930205684, + "learning_rate": 1.8778974809875326e-05, + "loss": 0.8039, + "step": 3538 + }, + { + "epoch": 0.55, + "grad_norm": 16.780408127763714, + "learning_rate": 1.8778167155200836e-05, + "loss": 0.7841, + "step": 3539 + }, + { + "epoch": 0.55, + "grad_norm": 15.79215376886102, + "learning_rate": 1.8777359250880404e-05, + "loss": 0.8581, + "step": 3540 + }, + { + "epoch": 0.55, + "grad_norm": 26.351232227480317, + "learning_rate": 1.8776551096937014e-05, + "loss": 0.9017, + "step": 3541 + }, + { + "epoch": 0.55, + "grad_norm": 17.091008297288706, + "learning_rate": 1.8775742693393638e-05, + "loss": 0.8004, + "step": 3542 + }, + { + "epoch": 0.55, + "grad_norm": 18.097193069179173, + "learning_rate": 1.8774934040273278e-05, + "loss": 0.8364, + "step": 3543 + }, + { + "epoch": 0.55, + "grad_norm": 19.74878090252714, + "learning_rate": 1.8774125137598926e-05, + "loss": 0.7653, + "step": 3544 + }, + { + "epoch": 0.55, + "grad_norm": 20.47398130237187, + "learning_rate": 1.877331598539359e-05, + "loss": 0.8068, + "step": 3545 + }, + { + "epoch": 0.55, + "grad_norm": 28.430191692316722, + "learning_rate": 1.8772506583680273e-05, + "loss": 0.9048, + "step": 3546 + }, + { + "epoch": 0.55, + "grad_norm": 24.8082207946181, + "learning_rate": 1.8771696932482008e-05, + "loss": 0.8238, + "step": 3547 + }, + { + "epoch": 0.55, + "grad_norm": 24.417594390523444, + "learning_rate": 1.8770887031821813e-05, + "loss": 0.903, + "step": 3548 + }, + { + "epoch": 0.55, + "grad_norm": 20.11493896140036, + "learning_rate": 1.8770076881722717e-05, + "loss": 0.675, + "step": 3549 + }, + { + "epoch": 0.55, + "grad_norm": 18.486352414912705, + "learning_rate": 1.8769266482207766e-05, + "loss": 0.8944, + "step": 3550 + }, + { + "epoch": 0.55, + "grad_norm": 17.881971707791365, + "learning_rate": 1.8768455833300007e-05, + "loss": 0.8369, + "step": 3551 + }, + { + "epoch": 0.55, + "grad_norm": 26.010122825104496, + "learning_rate": 1.8767644935022496e-05, + "loss": 0.8127, + "step": 3552 + }, + { + "epoch": 0.55, + "grad_norm": 11.726223891806299, + "learning_rate": 1.876683378739829e-05, + "loss": 0.6649, + "step": 3553 + }, + { + "epoch": 0.56, + "grad_norm": 33.54011793628526, + "learning_rate": 1.876602239045046e-05, + "loss": 0.9012, + "step": 3554 + }, + { + "epoch": 0.56, + "grad_norm": 17.61798279990791, + "learning_rate": 1.876521074420208e-05, + "loss": 0.7523, + "step": 3555 + }, + { + "epoch": 0.56, + "grad_norm": 13.164041866659867, + "learning_rate": 1.8764398848676232e-05, + "loss": 0.856, + "step": 3556 + }, + { + "epoch": 0.56, + "grad_norm": 17.812691038345378, + "learning_rate": 1.876358670389601e-05, + "loss": 0.8713, + "step": 3557 + }, + { + "epoch": 0.56, + "grad_norm": 19.691827570897786, + "learning_rate": 1.8762774309884508e-05, + "loss": 0.7943, + "step": 3558 + }, + { + "epoch": 0.56, + "grad_norm": 21.22391859148357, + "learning_rate": 1.876196166666483e-05, + "loss": 0.7599, + "step": 3559 + }, + { + "epoch": 0.56, + "grad_norm": 22.280253101801907, + "learning_rate": 1.876114877426009e-05, + "loss": 0.7013, + "step": 3560 + }, + { + "epoch": 0.56, + "grad_norm": 21.241165103207855, + "learning_rate": 1.8760335632693404e-05, + "loss": 0.7498, + "step": 3561 + }, + { + "epoch": 0.56, + "grad_norm": 21.492375995276703, + "learning_rate": 1.8759522241987896e-05, + "loss": 0.8936, + "step": 3562 + }, + { + "epoch": 0.56, + "grad_norm": 18.047379567577668, + "learning_rate": 1.87587086021667e-05, + "loss": 0.7544, + "step": 3563 + }, + { + "epoch": 0.56, + "grad_norm": 21.261741974576328, + "learning_rate": 1.8757894713252956e-05, + "loss": 0.7377, + "step": 3564 + }, + { + "epoch": 0.56, + "grad_norm": 12.04700849047223, + "learning_rate": 1.8757080575269808e-05, + "loss": 0.7349, + "step": 3565 + }, + { + "epoch": 0.56, + "grad_norm": 16.582964809993896, + "learning_rate": 1.8756266188240413e-05, + "loss": 0.8271, + "step": 3566 + }, + { + "epoch": 0.56, + "grad_norm": 19.764698830116274, + "learning_rate": 1.875545155218793e-05, + "loss": 0.7742, + "step": 3567 + }, + { + "epoch": 0.56, + "grad_norm": 17.89655761220839, + "learning_rate": 1.8754636667135523e-05, + "loss": 0.8057, + "step": 3568 + }, + { + "epoch": 0.56, + "grad_norm": 13.626023173192689, + "learning_rate": 1.8753821533106373e-05, + "loss": 0.8656, + "step": 3569 + }, + { + "epoch": 0.56, + "grad_norm": 24.137613945736003, + "learning_rate": 1.8753006150123662e-05, + "loss": 0.7823, + "step": 3570 + }, + { + "epoch": 0.56, + "grad_norm": 20.26057195828669, + "learning_rate": 1.8752190518210572e-05, + "loss": 0.8447, + "step": 3571 + }, + { + "epoch": 0.56, + "grad_norm": 24.026301947296844, + "learning_rate": 1.8751374637390304e-05, + "loss": 0.8068, + "step": 3572 + }, + { + "epoch": 0.56, + "grad_norm": 20.285851063941212, + "learning_rate": 1.8750558507686065e-05, + "loss": 0.8071, + "step": 3573 + }, + { + "epoch": 0.56, + "grad_norm": 24.57400438624219, + "learning_rate": 1.8749742129121058e-05, + "loss": 0.8453, + "step": 3574 + }, + { + "epoch": 0.56, + "grad_norm": 16.30709197570784, + "learning_rate": 1.87489255017185e-05, + "loss": 0.7422, + "step": 3575 + }, + { + "epoch": 0.56, + "grad_norm": 21.731462238209005, + "learning_rate": 1.8748108625501623e-05, + "loss": 0.9153, + "step": 3576 + }, + { + "epoch": 0.56, + "grad_norm": 25.09779022359157, + "learning_rate": 1.8747291500493653e-05, + "loss": 0.7667, + "step": 3577 + }, + { + "epoch": 0.56, + "grad_norm": 17.817954822661456, + "learning_rate": 1.874647412671783e-05, + "loss": 0.809, + "step": 3578 + }, + { + "epoch": 0.56, + "grad_norm": 23.494904352463458, + "learning_rate": 1.87456565041974e-05, + "loss": 0.863, + "step": 3579 + }, + { + "epoch": 0.56, + "grad_norm": 20.312299238826743, + "learning_rate": 1.8744838632955617e-05, + "loss": 0.8786, + "step": 3580 + }, + { + "epoch": 0.56, + "grad_norm": 29.869929817642944, + "learning_rate": 1.8744020513015733e-05, + "loss": 0.915, + "step": 3581 + }, + { + "epoch": 0.56, + "grad_norm": 19.34152127146327, + "learning_rate": 1.8743202144401027e-05, + "loss": 0.7927, + "step": 3582 + }, + { + "epoch": 0.56, + "grad_norm": 19.800781097231496, + "learning_rate": 1.874238352713476e-05, + "loss": 0.8538, + "step": 3583 + }, + { + "epoch": 0.56, + "grad_norm": 16.98585337606707, + "learning_rate": 1.8741564661240223e-05, + "loss": 0.7454, + "step": 3584 + }, + { + "epoch": 0.56, + "grad_norm": 24.135990223814957, + "learning_rate": 1.87407455467407e-05, + "loss": 0.9235, + "step": 3585 + }, + { + "epoch": 0.56, + "grad_norm": 14.53984675067897, + "learning_rate": 1.8739926183659492e-05, + "loss": 0.8527, + "step": 3586 + }, + { + "epoch": 0.56, + "grad_norm": 18.051889720266622, + "learning_rate": 1.873910657201989e-05, + "loss": 0.7914, + "step": 3587 + }, + { + "epoch": 0.56, + "grad_norm": 12.075225415406303, + "learning_rate": 1.873828671184521e-05, + "loss": 0.6688, + "step": 3588 + }, + { + "epoch": 0.56, + "grad_norm": 13.689917363989505, + "learning_rate": 1.873746660315877e-05, + "loss": 0.6685, + "step": 3589 + }, + { + "epoch": 0.56, + "grad_norm": 20.073484476381477, + "learning_rate": 1.873664624598389e-05, + "loss": 0.8412, + "step": 3590 + }, + { + "epoch": 0.56, + "grad_norm": 18.233145370390293, + "learning_rate": 1.8735825640343904e-05, + "loss": 0.7767, + "step": 3591 + }, + { + "epoch": 0.56, + "grad_norm": 16.73616713990554, + "learning_rate": 1.8735004786262142e-05, + "loss": 0.7929, + "step": 3592 + }, + { + "epoch": 0.56, + "grad_norm": 19.424976564636232, + "learning_rate": 1.8734183683761957e-05, + "loss": 0.7167, + "step": 3593 + }, + { + "epoch": 0.56, + "grad_norm": 16.365261378791256, + "learning_rate": 1.8733362332866698e-05, + "loss": 0.7045, + "step": 3594 + }, + { + "epoch": 0.56, + "grad_norm": 11.050867687142146, + "learning_rate": 1.873254073359972e-05, + "loss": 0.7009, + "step": 3595 + }, + { + "epoch": 0.56, + "grad_norm": 15.590000056221823, + "learning_rate": 1.8731718885984396e-05, + "loss": 0.7316, + "step": 3596 + }, + { + "epoch": 0.56, + "grad_norm": 16.923169066313335, + "learning_rate": 1.8730896790044092e-05, + "loss": 0.8225, + "step": 3597 + }, + { + "epoch": 0.56, + "grad_norm": 15.301342875710873, + "learning_rate": 1.873007444580219e-05, + "loss": 0.6974, + "step": 3598 + }, + { + "epoch": 0.56, + "grad_norm": 19.55983581164699, + "learning_rate": 1.872925185328208e-05, + "loss": 0.8101, + "step": 3599 + }, + { + "epoch": 0.56, + "grad_norm": 21.681606536335355, + "learning_rate": 1.872842901250715e-05, + "loss": 0.8205, + "step": 3600 + }, + { + "epoch": 0.56, + "grad_norm": 18.115853660671004, + "learning_rate": 1.872760592350081e-05, + "loss": 0.7441, + "step": 3601 + }, + { + "epoch": 0.56, + "grad_norm": 15.91763958736906, + "learning_rate": 1.872678258628646e-05, + "loss": 0.7799, + "step": 3602 + }, + { + "epoch": 0.56, + "grad_norm": 30.306542040101633, + "learning_rate": 1.872595900088752e-05, + "loss": 0.8051, + "step": 3603 + }, + { + "epoch": 0.56, + "grad_norm": 27.23274052060126, + "learning_rate": 1.872513516732741e-05, + "loss": 0.847, + "step": 3604 + }, + { + "epoch": 0.56, + "grad_norm": 21.02108160886153, + "learning_rate": 1.872431108562956e-05, + "loss": 0.8093, + "step": 3605 + }, + { + "epoch": 0.56, + "grad_norm": 26.913484732152558, + "learning_rate": 1.8723486755817405e-05, + "loss": 0.7594, + "step": 3606 + }, + { + "epoch": 0.56, + "grad_norm": 20.98816288542478, + "learning_rate": 1.872266217791439e-05, + "loss": 0.8622, + "step": 3607 + }, + { + "epoch": 0.56, + "grad_norm": 16.278605511314908, + "learning_rate": 1.8721837351943964e-05, + "loss": 0.7945, + "step": 3608 + }, + { + "epoch": 0.56, + "grad_norm": 17.05202768757929, + "learning_rate": 1.8721012277929588e-05, + "loss": 0.7563, + "step": 3609 + }, + { + "epoch": 0.56, + "grad_norm": 13.60643878756732, + "learning_rate": 1.8720186955894722e-05, + "loss": 0.7403, + "step": 3610 + }, + { + "epoch": 0.56, + "grad_norm": 23.26520798311354, + "learning_rate": 1.8719361385862843e-05, + "loss": 0.6803, + "step": 3611 + }, + { + "epoch": 0.56, + "grad_norm": 15.350552483449457, + "learning_rate": 1.8718535567857426e-05, + "loss": 0.8566, + "step": 3612 + }, + { + "epoch": 0.56, + "grad_norm": 14.610469631250647, + "learning_rate": 1.8717709501901956e-05, + "loss": 0.8075, + "step": 3613 + }, + { + "epoch": 0.56, + "grad_norm": 22.323171734891655, + "learning_rate": 1.8716883188019932e-05, + "loss": 0.733, + "step": 3614 + }, + { + "epoch": 0.56, + "grad_norm": 25.128167239825995, + "learning_rate": 1.8716056626234848e-05, + "loss": 0.9516, + "step": 3615 + }, + { + "epoch": 0.56, + "grad_norm": 18.73150825360642, + "learning_rate": 1.871522981657021e-05, + "loss": 0.8248, + "step": 3616 + }, + { + "epoch": 0.56, + "grad_norm": 22.36607078445375, + "learning_rate": 1.8714402759049535e-05, + "loss": 0.8285, + "step": 3617 + }, + { + "epoch": 0.57, + "grad_norm": 19.57147295931924, + "learning_rate": 1.8713575453696344e-05, + "loss": 0.9086, + "step": 3618 + }, + { + "epoch": 0.57, + "grad_norm": 21.40856968082394, + "learning_rate": 1.8712747900534164e-05, + "loss": 0.7795, + "step": 3619 + }, + { + "epoch": 0.57, + "grad_norm": 12.859809590037942, + "learning_rate": 1.871192009958653e-05, + "loss": 0.7441, + "step": 3620 + }, + { + "epoch": 0.57, + "grad_norm": 19.53025016819523, + "learning_rate": 1.8711092050876982e-05, + "loss": 0.9138, + "step": 3621 + }, + { + "epoch": 0.57, + "grad_norm": 34.76080812152451, + "learning_rate": 1.8710263754429076e-05, + "loss": 0.8394, + "step": 3622 + }, + { + "epoch": 0.57, + "grad_norm": 14.259602599968307, + "learning_rate": 1.870943521026636e-05, + "loss": 0.7928, + "step": 3623 + }, + { + "epoch": 0.57, + "grad_norm": 11.41764770275707, + "learning_rate": 1.8708606418412408e-05, + "loss": 0.7217, + "step": 3624 + }, + { + "epoch": 0.57, + "grad_norm": 22.228803090629167, + "learning_rate": 1.8707777378890778e-05, + "loss": 0.7196, + "step": 3625 + }, + { + "epoch": 0.57, + "grad_norm": 13.779308315302089, + "learning_rate": 1.870694809172506e-05, + "loss": 0.6867, + "step": 3626 + }, + { + "epoch": 0.57, + "grad_norm": 17.212870471543894, + "learning_rate": 1.8706118556938826e-05, + "loss": 0.8893, + "step": 3627 + }, + { + "epoch": 0.57, + "grad_norm": 21.288949869540392, + "learning_rate": 1.870528877455567e-05, + "loss": 0.7834, + "step": 3628 + }, + { + "epoch": 0.57, + "grad_norm": 27.652253198347413, + "learning_rate": 1.8704458744599202e-05, + "loss": 0.7959, + "step": 3629 + }, + { + "epoch": 0.57, + "grad_norm": 24.136165796846026, + "learning_rate": 1.8703628467093017e-05, + "loss": 0.7148, + "step": 3630 + }, + { + "epoch": 0.57, + "grad_norm": 18.19935266429516, + "learning_rate": 1.8702797942060725e-05, + "loss": 0.7861, + "step": 3631 + }, + { + "epoch": 0.57, + "grad_norm": 14.491594517855711, + "learning_rate": 1.8701967169525958e-05, + "loss": 0.7346, + "step": 3632 + }, + { + "epoch": 0.57, + "grad_norm": 25.613146952174763, + "learning_rate": 1.8701136149512328e-05, + "loss": 0.8124, + "step": 3633 + }, + { + "epoch": 0.57, + "grad_norm": 25.47404466251857, + "learning_rate": 1.870030488204348e-05, + "loss": 0.7995, + "step": 3634 + }, + { + "epoch": 0.57, + "grad_norm": 18.611119590517344, + "learning_rate": 1.869947336714305e-05, + "loss": 0.7885, + "step": 3635 + }, + { + "epoch": 0.57, + "grad_norm": 18.579567387238896, + "learning_rate": 1.8698641604834687e-05, + "loss": 0.7388, + "step": 3636 + }, + { + "epoch": 0.57, + "grad_norm": 30.468340198010637, + "learning_rate": 1.8697809595142042e-05, + "loss": 0.8654, + "step": 3637 + }, + { + "epoch": 0.57, + "grad_norm": 21.634382934161103, + "learning_rate": 1.869697733808878e-05, + "loss": 0.7932, + "step": 3638 + }, + { + "epoch": 0.57, + "grad_norm": 15.187628459485262, + "learning_rate": 1.8696144833698574e-05, + "loss": 0.645, + "step": 3639 + }, + { + "epoch": 0.57, + "grad_norm": 22.130600509361752, + "learning_rate": 1.8695312081995096e-05, + "loss": 0.8204, + "step": 3640 + }, + { + "epoch": 0.57, + "grad_norm": 27.831058635649146, + "learning_rate": 1.8694479083002027e-05, + "loss": 0.8136, + "step": 3641 + }, + { + "epoch": 0.57, + "grad_norm": 22.409522530523994, + "learning_rate": 1.8693645836743062e-05, + "loss": 0.8237, + "step": 3642 + }, + { + "epoch": 0.57, + "grad_norm": 17.235013782217933, + "learning_rate": 1.8692812343241892e-05, + "loss": 0.7363, + "step": 3643 + }, + { + "epoch": 0.57, + "grad_norm": 17.526979839661074, + "learning_rate": 1.8691978602522226e-05, + "loss": 0.8215, + "step": 3644 + }, + { + "epoch": 0.57, + "grad_norm": 18.583651183965696, + "learning_rate": 1.869114461460777e-05, + "loss": 0.7235, + "step": 3645 + }, + { + "epoch": 0.57, + "grad_norm": 20.26246913640462, + "learning_rate": 1.8690310379522247e-05, + "loss": 0.7706, + "step": 3646 + }, + { + "epoch": 0.57, + "grad_norm": 19.183451956300864, + "learning_rate": 1.8689475897289383e-05, + "loss": 0.8115, + "step": 3647 + }, + { + "epoch": 0.57, + "grad_norm": 19.555517784457376, + "learning_rate": 1.8688641167932906e-05, + "loss": 0.7744, + "step": 3648 + }, + { + "epoch": 0.57, + "grad_norm": 17.314369426079622, + "learning_rate": 1.8687806191476555e-05, + "loss": 0.8448, + "step": 3649 + }, + { + "epoch": 0.57, + "grad_norm": 57.21910759185438, + "learning_rate": 1.8686970967944082e-05, + "loss": 0.911, + "step": 3650 + }, + { + "epoch": 0.57, + "grad_norm": 20.292246053565233, + "learning_rate": 1.8686135497359233e-05, + "loss": 0.7652, + "step": 3651 + }, + { + "epoch": 0.57, + "grad_norm": 28.760178358034118, + "learning_rate": 1.868529977974577e-05, + "loss": 0.8735, + "step": 3652 + }, + { + "epoch": 0.57, + "grad_norm": 18.26116852854488, + "learning_rate": 1.868446381512747e-05, + "loss": 0.763, + "step": 3653 + }, + { + "epoch": 0.57, + "grad_norm": 21.453039699809953, + "learning_rate": 1.868362760352809e-05, + "loss": 0.818, + "step": 3654 + }, + { + "epoch": 0.57, + "grad_norm": 26.40508058486648, + "learning_rate": 1.8682791144971426e-05, + "loss": 0.7489, + "step": 3655 + }, + { + "epoch": 0.57, + "grad_norm": 17.359591276708297, + "learning_rate": 1.8681954439481262e-05, + "loss": 0.7248, + "step": 3656 + }, + { + "epoch": 0.57, + "grad_norm": 15.872460952596, + "learning_rate": 1.868111748708139e-05, + "loss": 0.7021, + "step": 3657 + }, + { + "epoch": 0.57, + "grad_norm": 28.487588763402098, + "learning_rate": 1.8680280287795612e-05, + "loss": 0.7719, + "step": 3658 + }, + { + "epoch": 0.57, + "grad_norm": 24.57308009136288, + "learning_rate": 1.8679442841647747e-05, + "loss": 0.7908, + "step": 3659 + }, + { + "epoch": 0.57, + "grad_norm": 21.86155825162964, + "learning_rate": 1.8678605148661602e-05, + "loss": 0.7919, + "step": 3660 + }, + { + "epoch": 0.57, + "grad_norm": 27.30456815592562, + "learning_rate": 1.8677767208861e-05, + "loss": 0.8657, + "step": 3661 + }, + { + "epoch": 0.57, + "grad_norm": 16.672394869080815, + "learning_rate": 1.867692902226978e-05, + "loss": 0.7507, + "step": 3662 + }, + { + "epoch": 0.57, + "grad_norm": 17.984745160195164, + "learning_rate": 1.8676090588911774e-05, + "loss": 0.8663, + "step": 3663 + }, + { + "epoch": 0.57, + "grad_norm": 18.687202469983326, + "learning_rate": 1.8675251908810824e-05, + "loss": 0.8457, + "step": 3664 + }, + { + "epoch": 0.57, + "grad_norm": 20.454990390052906, + "learning_rate": 1.867441298199079e-05, + "loss": 0.8896, + "step": 3665 + }, + { + "epoch": 0.57, + "grad_norm": 16.650155715851447, + "learning_rate": 1.867357380847552e-05, + "loss": 0.8179, + "step": 3666 + }, + { + "epoch": 0.57, + "grad_norm": 18.549687040618135, + "learning_rate": 1.8672734388288883e-05, + "loss": 0.8302, + "step": 3667 + }, + { + "epoch": 0.57, + "grad_norm": 36.85061458123028, + "learning_rate": 1.867189472145476e-05, + "loss": 0.8611, + "step": 3668 + }, + { + "epoch": 0.57, + "grad_norm": 25.57156035807646, + "learning_rate": 1.867105480799702e-05, + "loss": 0.8497, + "step": 3669 + }, + { + "epoch": 0.57, + "grad_norm": 22.301634964109933, + "learning_rate": 1.867021464793955e-05, + "loss": 0.9199, + "step": 3670 + }, + { + "epoch": 0.57, + "grad_norm": 28.41043453638851, + "learning_rate": 1.8669374241306256e-05, + "loss": 0.8815, + "step": 3671 + }, + { + "epoch": 0.57, + "grad_norm": 18.0655999081036, + "learning_rate": 1.8668533588121026e-05, + "loss": 0.6696, + "step": 3672 + }, + { + "epoch": 0.57, + "grad_norm": 28.456877733810106, + "learning_rate": 1.866769268840777e-05, + "loss": 0.7239, + "step": 3673 + }, + { + "epoch": 0.57, + "grad_norm": 21.17869941548231, + "learning_rate": 1.8666851542190406e-05, + "loss": 0.8522, + "step": 3674 + }, + { + "epoch": 0.57, + "grad_norm": 15.974652400007232, + "learning_rate": 1.8666010149492853e-05, + "loss": 0.8909, + "step": 3675 + }, + { + "epoch": 0.57, + "grad_norm": 21.414487590279492, + "learning_rate": 1.8665168510339043e-05, + "loss": 0.8369, + "step": 3676 + }, + { + "epoch": 0.57, + "grad_norm": 15.600487558833734, + "learning_rate": 1.866432662475291e-05, + "loss": 0.7624, + "step": 3677 + }, + { + "epoch": 0.57, + "grad_norm": 21.683341203338003, + "learning_rate": 1.8663484492758393e-05, + "loss": 0.7371, + "step": 3678 + }, + { + "epoch": 0.57, + "grad_norm": 22.098718588319226, + "learning_rate": 1.8662642114379444e-05, + "loss": 0.8288, + "step": 3679 + }, + { + "epoch": 0.57, + "grad_norm": 28.80406669918209, + "learning_rate": 1.8661799489640025e-05, + "loss": 0.8039, + "step": 3680 + }, + { + "epoch": 0.57, + "grad_norm": 30.536797920397884, + "learning_rate": 1.8660956618564095e-05, + "loss": 0.8121, + "step": 3681 + }, + { + "epoch": 0.58, + "grad_norm": 5.463780093074181, + "learning_rate": 1.8660113501175622e-05, + "loss": 0.8281, + "step": 3682 + }, + { + "epoch": 0.58, + "grad_norm": 13.134779266009994, + "learning_rate": 1.8659270137498588e-05, + "loss": 0.8206, + "step": 3683 + }, + { + "epoch": 0.58, + "grad_norm": 15.628277560276816, + "learning_rate": 1.865842652755698e-05, + "loss": 0.7461, + "step": 3684 + }, + { + "epoch": 0.58, + "grad_norm": 11.614731937008038, + "learning_rate": 1.8657582671374787e-05, + "loss": 0.6644, + "step": 3685 + }, + { + "epoch": 0.58, + "grad_norm": 19.265353233388268, + "learning_rate": 1.8656738568976003e-05, + "loss": 0.8624, + "step": 3686 + }, + { + "epoch": 0.58, + "grad_norm": 40.75501088477457, + "learning_rate": 1.865589422038464e-05, + "loss": 0.9502, + "step": 3687 + }, + { + "epoch": 0.58, + "grad_norm": 15.772324339522127, + "learning_rate": 1.865504962562471e-05, + "loss": 0.8153, + "step": 3688 + }, + { + "epoch": 0.58, + "grad_norm": 33.304020535159474, + "learning_rate": 1.865420478472023e-05, + "loss": 0.8258, + "step": 3689 + }, + { + "epoch": 0.58, + "grad_norm": 15.301013521861792, + "learning_rate": 1.865335969769523e-05, + "loss": 0.792, + "step": 3690 + }, + { + "epoch": 0.58, + "grad_norm": 16.082822833479007, + "learning_rate": 1.865251436457374e-05, + "loss": 0.7324, + "step": 3691 + }, + { + "epoch": 0.58, + "grad_norm": 24.465329748446127, + "learning_rate": 1.8651668785379806e-05, + "loss": 0.8932, + "step": 3692 + }, + { + "epoch": 0.58, + "grad_norm": 16.513643689980302, + "learning_rate": 1.865082296013747e-05, + "loss": 0.7207, + "step": 3693 + }, + { + "epoch": 0.58, + "grad_norm": 16.25021015911653, + "learning_rate": 1.864997688887079e-05, + "loss": 0.8304, + "step": 3694 + }, + { + "epoch": 0.58, + "grad_norm": 16.24651118111633, + "learning_rate": 1.864913057160383e-05, + "loss": 0.7584, + "step": 3695 + }, + { + "epoch": 0.58, + "grad_norm": 16.393855193902642, + "learning_rate": 1.8648284008360654e-05, + "loss": 0.7448, + "step": 3696 + }, + { + "epoch": 0.58, + "grad_norm": 15.497330229931306, + "learning_rate": 1.864743719916534e-05, + "loss": 0.7827, + "step": 3697 + }, + { + "epoch": 0.58, + "grad_norm": 16.0206140060649, + "learning_rate": 1.8646590144041972e-05, + "loss": 0.7568, + "step": 3698 + }, + { + "epoch": 0.58, + "grad_norm": 18.920893952006146, + "learning_rate": 1.8645742843014635e-05, + "loss": 0.7157, + "step": 3699 + }, + { + "epoch": 0.58, + "grad_norm": 20.773234533872827, + "learning_rate": 1.8644895296107432e-05, + "loss": 0.8918, + "step": 3700 + }, + { + "epoch": 0.58, + "grad_norm": 14.331823485254711, + "learning_rate": 1.8644047503344465e-05, + "loss": 0.7402, + "step": 3701 + }, + { + "epoch": 0.58, + "grad_norm": 17.397191009440277, + "learning_rate": 1.8643199464749843e-05, + "loss": 0.8009, + "step": 3702 + }, + { + "epoch": 0.58, + "grad_norm": 17.073646718228165, + "learning_rate": 1.8642351180347682e-05, + "loss": 0.7898, + "step": 3703 + }, + { + "epoch": 0.58, + "grad_norm": 22.889933136258644, + "learning_rate": 1.864150265016211e-05, + "loss": 0.8683, + "step": 3704 + }, + { + "epoch": 0.58, + "grad_norm": 22.081720816621452, + "learning_rate": 1.8640653874217257e-05, + "loss": 0.7606, + "step": 3705 + }, + { + "epoch": 0.58, + "grad_norm": 20.892132729651188, + "learning_rate": 1.8639804852537264e-05, + "loss": 0.749, + "step": 3706 + }, + { + "epoch": 0.58, + "grad_norm": 22.552952171787748, + "learning_rate": 1.8638955585146277e-05, + "loss": 0.8249, + "step": 3707 + }, + { + "epoch": 0.58, + "grad_norm": 17.715828096238656, + "learning_rate": 1.8638106072068443e-05, + "loss": 0.8625, + "step": 3708 + }, + { + "epoch": 0.58, + "grad_norm": 24.97202987995221, + "learning_rate": 1.863725631332793e-05, + "loss": 0.7885, + "step": 3709 + }, + { + "epoch": 0.58, + "grad_norm": 20.576101891571074, + "learning_rate": 1.8636406308948894e-05, + "loss": 0.716, + "step": 3710 + }, + { + "epoch": 0.58, + "grad_norm": 18.46503447903622, + "learning_rate": 1.863555605895552e-05, + "loss": 0.8587, + "step": 3711 + }, + { + "epoch": 0.58, + "grad_norm": 16.752132537564197, + "learning_rate": 1.863470556337198e-05, + "loss": 0.7506, + "step": 3712 + }, + { + "epoch": 0.58, + "grad_norm": 14.947742279101046, + "learning_rate": 1.8633854822222467e-05, + "loss": 0.8411, + "step": 3713 + }, + { + "epoch": 0.58, + "grad_norm": 28.89918343127105, + "learning_rate": 1.8633003835531172e-05, + "loss": 0.8768, + "step": 3714 + }, + { + "epoch": 0.58, + "grad_norm": 21.52785553286638, + "learning_rate": 1.86321526033223e-05, + "loss": 0.8796, + "step": 3715 + }, + { + "epoch": 0.58, + "grad_norm": 19.747557255120682, + "learning_rate": 1.8631301125620056e-05, + "loss": 0.887, + "step": 3716 + }, + { + "epoch": 0.58, + "grad_norm": 15.558054906778917, + "learning_rate": 1.8630449402448655e-05, + "loss": 0.8198, + "step": 3717 + }, + { + "epoch": 0.58, + "grad_norm": 12.103131864803347, + "learning_rate": 1.8629597433832326e-05, + "loss": 0.7212, + "step": 3718 + }, + { + "epoch": 0.58, + "grad_norm": 16.514996598774914, + "learning_rate": 1.8628745219795292e-05, + "loss": 0.8049, + "step": 3719 + }, + { + "epoch": 0.58, + "grad_norm": 15.416992104377439, + "learning_rate": 1.862789276036179e-05, + "loss": 0.8523, + "step": 3720 + }, + { + "epoch": 0.58, + "grad_norm": 17.69594792705124, + "learning_rate": 1.862704005555607e-05, + "loss": 0.7291, + "step": 3721 + }, + { + "epoch": 0.58, + "grad_norm": 4.649249725546298, + "learning_rate": 1.8626187105402373e-05, + "loss": 0.8157, + "step": 3722 + }, + { + "epoch": 0.58, + "grad_norm": 19.471889943738677, + "learning_rate": 1.8625333909924964e-05, + "loss": 0.8382, + "step": 3723 + }, + { + "epoch": 0.58, + "grad_norm": 10.930556376606772, + "learning_rate": 1.8624480469148103e-05, + "loss": 0.7533, + "step": 3724 + }, + { + "epoch": 0.58, + "grad_norm": 18.18798221443737, + "learning_rate": 1.862362678309606e-05, + "loss": 0.7067, + "step": 3725 + }, + { + "epoch": 0.58, + "grad_norm": 33.96096729249176, + "learning_rate": 1.862277285179312e-05, + "loss": 0.7739, + "step": 3726 + }, + { + "epoch": 0.58, + "grad_norm": 23.539033528287007, + "learning_rate": 1.8621918675263564e-05, + "loss": 0.8901, + "step": 3727 + }, + { + "epoch": 0.58, + "grad_norm": 24.943651439129837, + "learning_rate": 1.862106425353168e-05, + "loss": 0.8123, + "step": 3728 + }, + { + "epoch": 0.58, + "grad_norm": 15.068993937394005, + "learning_rate": 1.8620209586621778e-05, + "loss": 0.6734, + "step": 3729 + }, + { + "epoch": 0.58, + "grad_norm": 13.885760329232188, + "learning_rate": 1.8619354674558155e-05, + "loss": 0.7728, + "step": 3730 + }, + { + "epoch": 0.58, + "grad_norm": 20.392613563181637, + "learning_rate": 1.8618499517365127e-05, + "loss": 0.8404, + "step": 3731 + }, + { + "epoch": 0.58, + "grad_norm": 14.644081427299254, + "learning_rate": 1.8617644115067013e-05, + "loss": 0.6939, + "step": 3732 + }, + { + "epoch": 0.58, + "grad_norm": 26.357701271049024, + "learning_rate": 1.8616788467688146e-05, + "loss": 0.7877, + "step": 3733 + }, + { + "epoch": 0.58, + "grad_norm": 18.70186861458413, + "learning_rate": 1.8615932575252855e-05, + "loss": 0.8398, + "step": 3734 + }, + { + "epoch": 0.58, + "grad_norm": 15.065060152623122, + "learning_rate": 1.8615076437785474e-05, + "loss": 0.8109, + "step": 3735 + }, + { + "epoch": 0.58, + "grad_norm": 17.992245761414257, + "learning_rate": 1.8614220055310367e-05, + "loss": 0.7348, + "step": 3736 + }, + { + "epoch": 0.58, + "grad_norm": 15.539017242837538, + "learning_rate": 1.8613363427851875e-05, + "loss": 0.77, + "step": 3737 + }, + { + "epoch": 0.58, + "grad_norm": 18.89321930033038, + "learning_rate": 1.8612506555434372e-05, + "loss": 0.748, + "step": 3738 + }, + { + "epoch": 0.58, + "grad_norm": 16.259032747631917, + "learning_rate": 1.861164943808222e-05, + "loss": 0.7775, + "step": 3739 + }, + { + "epoch": 0.58, + "grad_norm": 22.41070148109956, + "learning_rate": 1.861079207581979e-05, + "loss": 0.8158, + "step": 3740 + }, + { + "epoch": 0.58, + "grad_norm": 17.338031472144, + "learning_rate": 1.8609934468671474e-05, + "loss": 0.848, + "step": 3741 + }, + { + "epoch": 0.58, + "grad_norm": 20.376155894349836, + "learning_rate": 1.8609076616661658e-05, + "loss": 0.8179, + "step": 3742 + }, + { + "epoch": 0.58, + "grad_norm": 20.957199003573866, + "learning_rate": 1.860821851981474e-05, + "loss": 0.8948, + "step": 3743 + }, + { + "epoch": 0.58, + "grad_norm": 20.75913222270166, + "learning_rate": 1.8607360178155123e-05, + "loss": 0.7616, + "step": 3744 + }, + { + "epoch": 0.58, + "grad_norm": 12.465591898348475, + "learning_rate": 1.8606501591707217e-05, + "loss": 0.8039, + "step": 3745 + }, + { + "epoch": 0.59, + "grad_norm": 18.581807346691082, + "learning_rate": 1.860564276049544e-05, + "loss": 0.7629, + "step": 3746 + }, + { + "epoch": 0.59, + "grad_norm": 14.018448676515995, + "learning_rate": 1.860478368454422e-05, + "loss": 0.8146, + "step": 3747 + }, + { + "epoch": 0.59, + "grad_norm": 22.529173518563663, + "learning_rate": 1.860392436387798e-05, + "loss": 0.875, + "step": 3748 + }, + { + "epoch": 0.59, + "grad_norm": 21.22668664928939, + "learning_rate": 1.8603064798521168e-05, + "loss": 0.8169, + "step": 3749 + }, + { + "epoch": 0.59, + "grad_norm": 17.648865502914557, + "learning_rate": 1.8602204988498224e-05, + "loss": 0.6834, + "step": 3750 + }, + { + "epoch": 0.59, + "grad_norm": 24.235708710342703, + "learning_rate": 1.8601344933833605e-05, + "loss": 0.787, + "step": 3751 + }, + { + "epoch": 0.59, + "grad_norm": 16.519268344095945, + "learning_rate": 1.8600484634551767e-05, + "loss": 0.8487, + "step": 3752 + }, + { + "epoch": 0.59, + "grad_norm": 16.483473850314812, + "learning_rate": 1.8599624090677174e-05, + "loss": 0.8656, + "step": 3753 + }, + { + "epoch": 0.59, + "grad_norm": 22.485350888528718, + "learning_rate": 1.8598763302234306e-05, + "loss": 0.8484, + "step": 3754 + }, + { + "epoch": 0.59, + "grad_norm": 14.958964163665327, + "learning_rate": 1.859790226924764e-05, + "loss": 0.7221, + "step": 3755 + }, + { + "epoch": 0.59, + "grad_norm": 24.43862765361714, + "learning_rate": 1.8597040991741663e-05, + "loss": 0.8133, + "step": 3756 + }, + { + "epoch": 0.59, + "grad_norm": 14.426216796530715, + "learning_rate": 1.8596179469740863e-05, + "loss": 0.8019, + "step": 3757 + }, + { + "epoch": 0.59, + "grad_norm": 17.811719792830015, + "learning_rate": 1.8595317703269756e-05, + "loss": 0.8006, + "step": 3758 + }, + { + "epoch": 0.59, + "grad_norm": 29.485282200702745, + "learning_rate": 1.859445569235284e-05, + "loss": 0.7719, + "step": 3759 + }, + { + "epoch": 0.59, + "grad_norm": 18.848862857046242, + "learning_rate": 1.8593593437014627e-05, + "loss": 0.9183, + "step": 3760 + }, + { + "epoch": 0.59, + "grad_norm": 32.01885737200636, + "learning_rate": 1.8592730937279647e-05, + "loss": 0.8939, + "step": 3761 + }, + { + "epoch": 0.59, + "grad_norm": 15.799788601583767, + "learning_rate": 1.8591868193172423e-05, + "loss": 0.7175, + "step": 3762 + }, + { + "epoch": 0.59, + "grad_norm": 23.225276019629412, + "learning_rate": 1.8591005204717498e-05, + "loss": 0.7856, + "step": 3763 + }, + { + "epoch": 0.59, + "grad_norm": 22.592885455927046, + "learning_rate": 1.859014197193941e-05, + "loss": 0.7464, + "step": 3764 + }, + { + "epoch": 0.59, + "grad_norm": 23.39088362396005, + "learning_rate": 1.8589278494862707e-05, + "loss": 0.8755, + "step": 3765 + }, + { + "epoch": 0.59, + "grad_norm": 21.106150836220248, + "learning_rate": 1.858841477351195e-05, + "loss": 0.7518, + "step": 3766 + }, + { + "epoch": 0.59, + "grad_norm": 21.785924575441783, + "learning_rate": 1.85875508079117e-05, + "loss": 0.8277, + "step": 3767 + }, + { + "epoch": 0.59, + "grad_norm": 32.84390877051048, + "learning_rate": 1.858668659808653e-05, + "loss": 0.8027, + "step": 3768 + }, + { + "epoch": 0.59, + "grad_norm": 19.61427228954143, + "learning_rate": 1.8585822144061013e-05, + "loss": 0.8436, + "step": 3769 + }, + { + "epoch": 0.59, + "grad_norm": 15.665475394438053, + "learning_rate": 1.858495744585974e-05, + "loss": 0.6795, + "step": 3770 + }, + { + "epoch": 0.59, + "grad_norm": 13.333938397397688, + "learning_rate": 1.8584092503507296e-05, + "loss": 0.7503, + "step": 3771 + }, + { + "epoch": 0.59, + "grad_norm": 15.391209292576, + "learning_rate": 1.8583227317028287e-05, + "loss": 0.7334, + "step": 3772 + }, + { + "epoch": 0.59, + "grad_norm": 21.32468939906611, + "learning_rate": 1.8582361886447315e-05, + "loss": 0.84, + "step": 3773 + }, + { + "epoch": 0.59, + "grad_norm": 21.89209096820718, + "learning_rate": 1.8581496211788985e-05, + "loss": 0.722, + "step": 3774 + }, + { + "epoch": 0.59, + "grad_norm": 15.059009598705948, + "learning_rate": 1.8580630293077925e-05, + "loss": 0.7838, + "step": 3775 + }, + { + "epoch": 0.59, + "grad_norm": 21.446796155028977, + "learning_rate": 1.857976413033876e-05, + "loss": 0.7523, + "step": 3776 + }, + { + "epoch": 0.59, + "grad_norm": 15.836283488509583, + "learning_rate": 1.8578897723596124e-05, + "loss": 0.818, + "step": 3777 + }, + { + "epoch": 0.59, + "grad_norm": 24.57292858200032, + "learning_rate": 1.8578031072874655e-05, + "loss": 0.6849, + "step": 3778 + }, + { + "epoch": 0.59, + "grad_norm": 28.16611248179845, + "learning_rate": 1.8577164178198995e-05, + "loss": 0.8842, + "step": 3779 + }, + { + "epoch": 0.59, + "grad_norm": 21.157711751917876, + "learning_rate": 1.857629703959381e-05, + "loss": 0.7568, + "step": 3780 + }, + { + "epoch": 0.59, + "grad_norm": 19.410373774042984, + "learning_rate": 1.8575429657083753e-05, + "loss": 0.7815, + "step": 3781 + }, + { + "epoch": 0.59, + "grad_norm": 22.342904453684604, + "learning_rate": 1.8574562030693492e-05, + "loss": 0.7891, + "step": 3782 + }, + { + "epoch": 0.59, + "grad_norm": 20.830380088738504, + "learning_rate": 1.85736941604477e-05, + "loss": 0.7813, + "step": 3783 + }, + { + "epoch": 0.59, + "grad_norm": 28.31186738581136, + "learning_rate": 1.8572826046371063e-05, + "loss": 0.8201, + "step": 3784 + }, + { + "epoch": 0.59, + "grad_norm": 14.20612771751097, + "learning_rate": 1.8571957688488268e-05, + "loss": 0.7892, + "step": 3785 + }, + { + "epoch": 0.59, + "grad_norm": 29.547951486546346, + "learning_rate": 1.8571089086824014e-05, + "loss": 0.9203, + "step": 3786 + }, + { + "epoch": 0.59, + "grad_norm": 18.98118268868699, + "learning_rate": 1.8570220241402996e-05, + "loss": 0.8133, + "step": 3787 + }, + { + "epoch": 0.59, + "grad_norm": 20.630320797576523, + "learning_rate": 1.8569351152249932e-05, + "loss": 0.7927, + "step": 3788 + }, + { + "epoch": 0.59, + "grad_norm": 19.285110558572953, + "learning_rate": 1.856848181938953e-05, + "loss": 0.8186, + "step": 3789 + }, + { + "epoch": 0.59, + "grad_norm": 14.457573311308336, + "learning_rate": 1.856761224284652e-05, + "loss": 0.783, + "step": 3790 + }, + { + "epoch": 0.59, + "grad_norm": 20.97183920820715, + "learning_rate": 1.856674242264563e-05, + "loss": 0.7642, + "step": 3791 + }, + { + "epoch": 0.59, + "grad_norm": 13.592564884134067, + "learning_rate": 1.8565872358811594e-05, + "loss": 0.8134, + "step": 3792 + }, + { + "epoch": 0.59, + "grad_norm": 14.40412498552598, + "learning_rate": 1.856500205136916e-05, + "loss": 0.7384, + "step": 3793 + }, + { + "epoch": 0.59, + "grad_norm": 15.685917406879609, + "learning_rate": 1.8564131500343082e-05, + "loss": 0.8745, + "step": 3794 + }, + { + "epoch": 0.59, + "grad_norm": 17.667348752559906, + "learning_rate": 1.8563260705758114e-05, + "loss": 0.7676, + "step": 3795 + }, + { + "epoch": 0.59, + "grad_norm": 26.836344030929713, + "learning_rate": 1.8562389667639016e-05, + "loss": 0.803, + "step": 3796 + }, + { + "epoch": 0.59, + "grad_norm": 15.011330895596231, + "learning_rate": 1.8561518386010567e-05, + "loss": 0.7189, + "step": 3797 + }, + { + "epoch": 0.59, + "grad_norm": 20.048253176286806, + "learning_rate": 1.856064686089754e-05, + "loss": 0.7731, + "step": 3798 + }, + { + "epoch": 0.59, + "grad_norm": 12.730891031388188, + "learning_rate": 1.855977509232473e-05, + "loss": 0.7178, + "step": 3799 + }, + { + "epoch": 0.59, + "grad_norm": 18.19745705653203, + "learning_rate": 1.8558903080316922e-05, + "loss": 0.9754, + "step": 3800 + }, + { + "epoch": 0.59, + "grad_norm": 15.072025045541388, + "learning_rate": 1.8558030824898915e-05, + "loss": 0.8115, + "step": 3801 + }, + { + "epoch": 0.59, + "grad_norm": 17.687996764460728, + "learning_rate": 1.8557158326095515e-05, + "loss": 0.7821, + "step": 3802 + }, + { + "epoch": 0.59, + "grad_norm": 27.291128992773302, + "learning_rate": 1.8556285583931547e-05, + "loss": 0.8319, + "step": 3803 + }, + { + "epoch": 0.59, + "grad_norm": 31.95447375148788, + "learning_rate": 1.8555412598431814e-05, + "loss": 0.6964, + "step": 3804 + }, + { + "epoch": 0.59, + "grad_norm": 18.714737086269803, + "learning_rate": 1.8554539369621156e-05, + "loss": 0.9218, + "step": 3805 + }, + { + "epoch": 0.59, + "grad_norm": 33.00982659509593, + "learning_rate": 1.85536658975244e-05, + "loss": 0.8843, + "step": 3806 + }, + { + "epoch": 0.59, + "grad_norm": 16.160791135197275, + "learning_rate": 1.8552792182166393e-05, + "loss": 0.7222, + "step": 3807 + }, + { + "epoch": 0.59, + "grad_norm": 18.08386377841724, + "learning_rate": 1.8551918223571975e-05, + "loss": 0.7301, + "step": 3808 + }, + { + "epoch": 0.59, + "grad_norm": 17.394686121050924, + "learning_rate": 1.855104402176601e-05, + "loss": 0.7419, + "step": 3809 + }, + { + "epoch": 0.6, + "grad_norm": 23.767436149750548, + "learning_rate": 1.8550169576773353e-05, + "loss": 0.7551, + "step": 3810 + }, + { + "epoch": 0.6, + "grad_norm": 15.201831240165006, + "learning_rate": 1.8549294888618872e-05, + "loss": 0.7064, + "step": 3811 + }, + { + "epoch": 0.6, + "grad_norm": 21.679244004258322, + "learning_rate": 1.8548419957327454e-05, + "loss": 0.7568, + "step": 3812 + }, + { + "epoch": 0.6, + "grad_norm": 43.70672313633746, + "learning_rate": 1.8547544782923963e-05, + "loss": 0.809, + "step": 3813 + }, + { + "epoch": 0.6, + "grad_norm": 31.798978130979503, + "learning_rate": 1.8546669365433305e-05, + "loss": 0.7657, + "step": 3814 + }, + { + "epoch": 0.6, + "grad_norm": 17.444162700736577, + "learning_rate": 1.8545793704880367e-05, + "loss": 0.8721, + "step": 3815 + }, + { + "epoch": 0.6, + "grad_norm": 26.541801941792407, + "learning_rate": 1.854491780129006e-05, + "loss": 0.6517, + "step": 3816 + }, + { + "epoch": 0.6, + "grad_norm": 23.685965498701293, + "learning_rate": 1.8544041654687282e-05, + "loss": 0.7482, + "step": 3817 + }, + { + "epoch": 0.6, + "grad_norm": 23.180410440072112, + "learning_rate": 1.8543165265096962e-05, + "loss": 0.793, + "step": 3818 + }, + { + "epoch": 0.6, + "grad_norm": 17.74174721447432, + "learning_rate": 1.854228863254402e-05, + "loss": 0.7929, + "step": 3819 + }, + { + "epoch": 0.6, + "grad_norm": 16.68191396786097, + "learning_rate": 1.854141175705338e-05, + "loss": 0.755, + "step": 3820 + }, + { + "epoch": 0.6, + "grad_norm": 18.026995382528284, + "learning_rate": 1.8540534638649994e-05, + "loss": 0.8245, + "step": 3821 + }, + { + "epoch": 0.6, + "grad_norm": 18.431292069860227, + "learning_rate": 1.8539657277358798e-05, + "loss": 0.7911, + "step": 3822 + }, + { + "epoch": 0.6, + "grad_norm": 19.52814984093385, + "learning_rate": 1.8538779673204743e-05, + "loss": 0.8422, + "step": 3823 + }, + { + "epoch": 0.6, + "grad_norm": 13.641874603503572, + "learning_rate": 1.8537901826212786e-05, + "loss": 0.7668, + "step": 3824 + }, + { + "epoch": 0.6, + "grad_norm": 15.763300835906904, + "learning_rate": 1.8537023736407898e-05, + "loss": 0.7732, + "step": 3825 + }, + { + "epoch": 0.6, + "grad_norm": 17.46968069224584, + "learning_rate": 1.853614540381505e-05, + "loss": 0.8077, + "step": 3826 + }, + { + "epoch": 0.6, + "grad_norm": 23.594141751630684, + "learning_rate": 1.853526682845922e-05, + "loss": 0.8224, + "step": 3827 + }, + { + "epoch": 0.6, + "grad_norm": 20.16988856553982, + "learning_rate": 1.8534388010365397e-05, + "loss": 0.839, + "step": 3828 + }, + { + "epoch": 0.6, + "grad_norm": 64.85110000732195, + "learning_rate": 1.8533508949558564e-05, + "loss": 0.84, + "step": 3829 + }, + { + "epoch": 0.6, + "grad_norm": 20.32852115215774, + "learning_rate": 1.8532629646063737e-05, + "loss": 0.8396, + "step": 3830 + }, + { + "epoch": 0.6, + "grad_norm": 15.524113179146838, + "learning_rate": 1.853175009990591e-05, + "loss": 0.7346, + "step": 3831 + }, + { + "epoch": 0.6, + "grad_norm": 23.23100621134276, + "learning_rate": 1.85308703111101e-05, + "loss": 0.7434, + "step": 3832 + }, + { + "epoch": 0.6, + "grad_norm": 18.980683415202517, + "learning_rate": 1.852999027970133e-05, + "loss": 0.7483, + "step": 3833 + }, + { + "epoch": 0.6, + "grad_norm": 30.459384500970334, + "learning_rate": 1.8529110005704627e-05, + "loss": 0.8256, + "step": 3834 + }, + { + "epoch": 0.6, + "grad_norm": 16.785462742635673, + "learning_rate": 1.8528229489145023e-05, + "loss": 0.7698, + "step": 3835 + }, + { + "epoch": 0.6, + "grad_norm": 14.966389276290089, + "learning_rate": 1.8527348730047563e-05, + "loss": 0.7426, + "step": 3836 + }, + { + "epoch": 0.6, + "grad_norm": 25.438100229558053, + "learning_rate": 1.8526467728437292e-05, + "loss": 0.7875, + "step": 3837 + }, + { + "epoch": 0.6, + "grad_norm": 30.353504072658048, + "learning_rate": 1.852558648433927e-05, + "loss": 0.762, + "step": 3838 + }, + { + "epoch": 0.6, + "grad_norm": 24.14142603363089, + "learning_rate": 1.852470499777855e-05, + "loss": 0.919, + "step": 3839 + }, + { + "epoch": 0.6, + "grad_norm": 21.39316143660563, + "learning_rate": 1.8523823268780214e-05, + "loss": 0.7851, + "step": 3840 + }, + { + "epoch": 0.6, + "grad_norm": 14.969173797708438, + "learning_rate": 1.8522941297369324e-05, + "loss": 0.7629, + "step": 3841 + }, + { + "epoch": 0.6, + "grad_norm": 14.20639261315958, + "learning_rate": 1.8522059083570972e-05, + "loss": 0.7311, + "step": 3842 + }, + { + "epoch": 0.6, + "grad_norm": 29.990470734251467, + "learning_rate": 1.8521176627410246e-05, + "loss": 0.7724, + "step": 3843 + }, + { + "epoch": 0.6, + "grad_norm": 19.021110580348523, + "learning_rate": 1.852029392891224e-05, + "loss": 0.8001, + "step": 3844 + }, + { + "epoch": 0.6, + "grad_norm": 15.79630895066451, + "learning_rate": 1.8519410988102058e-05, + "loss": 0.7395, + "step": 3845 + }, + { + "epoch": 0.6, + "grad_norm": 23.08821192734763, + "learning_rate": 1.8518527805004812e-05, + "loss": 0.7995, + "step": 3846 + }, + { + "epoch": 0.6, + "grad_norm": 15.586739605506358, + "learning_rate": 1.851764437964562e-05, + "loss": 0.7433, + "step": 3847 + }, + { + "epoch": 0.6, + "grad_norm": 16.41879667817107, + "learning_rate": 1.8516760712049603e-05, + "loss": 0.7605, + "step": 3848 + }, + { + "epoch": 0.6, + "grad_norm": 12.800195746349994, + "learning_rate": 1.8515876802241894e-05, + "loss": 0.6754, + "step": 3849 + }, + { + "epoch": 0.6, + "grad_norm": 20.735721712568434, + "learning_rate": 1.8514992650247627e-05, + "loss": 0.8175, + "step": 3850 + }, + { + "epoch": 0.6, + "grad_norm": 25.940824373759135, + "learning_rate": 1.8514108256091953e-05, + "loss": 0.8489, + "step": 3851 + }, + { + "epoch": 0.6, + "grad_norm": 14.821477867824576, + "learning_rate": 1.8513223619800022e-05, + "loss": 0.7689, + "step": 3852 + }, + { + "epoch": 0.6, + "grad_norm": 17.62874136775155, + "learning_rate": 1.851233874139699e-05, + "loss": 0.8623, + "step": 3853 + }, + { + "epoch": 0.6, + "grad_norm": 16.45067523042239, + "learning_rate": 1.8511453620908027e-05, + "loss": 0.7721, + "step": 3854 + }, + { + "epoch": 0.6, + "grad_norm": 18.27249941593221, + "learning_rate": 1.8510568258358298e-05, + "loss": 0.7848, + "step": 3855 + }, + { + "epoch": 0.6, + "grad_norm": 19.267452031925085, + "learning_rate": 1.850968265377299e-05, + "loss": 0.7377, + "step": 3856 + }, + { + "epoch": 0.6, + "grad_norm": 16.04300267727044, + "learning_rate": 1.850879680717728e-05, + "loss": 0.8161, + "step": 3857 + }, + { + "epoch": 0.6, + "grad_norm": 14.973969469370363, + "learning_rate": 1.850791071859637e-05, + "loss": 0.7655, + "step": 3858 + }, + { + "epoch": 0.6, + "grad_norm": 13.764558773012604, + "learning_rate": 1.8507024388055456e-05, + "loss": 0.8755, + "step": 3859 + }, + { + "epoch": 0.6, + "grad_norm": 20.076852204468675, + "learning_rate": 1.8506137815579744e-05, + "loss": 0.7986, + "step": 3860 + }, + { + "epoch": 0.6, + "grad_norm": 23.58434569656616, + "learning_rate": 1.8505251001194445e-05, + "loss": 0.8154, + "step": 3861 + }, + { + "epoch": 0.6, + "grad_norm": 11.40001003903859, + "learning_rate": 1.850436394492479e-05, + "loss": 0.7052, + "step": 3862 + }, + { + "epoch": 0.6, + "grad_norm": 15.656556720356061, + "learning_rate": 1.8503476646795996e-05, + "loss": 0.6883, + "step": 3863 + }, + { + "epoch": 0.6, + "grad_norm": 17.207146983455665, + "learning_rate": 1.85025891068333e-05, + "loss": 0.8264, + "step": 3864 + }, + { + "epoch": 0.6, + "grad_norm": 13.353768880110001, + "learning_rate": 1.8501701325061943e-05, + "loss": 0.7402, + "step": 3865 + }, + { + "epoch": 0.6, + "grad_norm": 15.248055625298326, + "learning_rate": 1.8500813301507174e-05, + "loss": 0.7408, + "step": 3866 + }, + { + "epoch": 0.6, + "grad_norm": 22.465835975885266, + "learning_rate": 1.8499925036194247e-05, + "loss": 0.8808, + "step": 3867 + }, + { + "epoch": 0.6, + "grad_norm": 15.006840873991008, + "learning_rate": 1.849903652914843e-05, + "loss": 0.785, + "step": 3868 + }, + { + "epoch": 0.6, + "grad_norm": 16.900283448110795, + "learning_rate": 1.849814778039498e-05, + "loss": 0.8314, + "step": 3869 + }, + { + "epoch": 0.6, + "grad_norm": 21.997378637458333, + "learning_rate": 1.849725878995918e-05, + "loss": 0.8069, + "step": 3870 + }, + { + "epoch": 0.6, + "grad_norm": 18.670353523418115, + "learning_rate": 1.849636955786631e-05, + "loss": 0.7975, + "step": 3871 + }, + { + "epoch": 0.6, + "grad_norm": 12.908022661833616, + "learning_rate": 1.8495480084141662e-05, + "loss": 0.8225, + "step": 3872 + }, + { + "epoch": 0.6, + "grad_norm": 26.254595375864266, + "learning_rate": 1.849459036881053e-05, + "loss": 0.8323, + "step": 3873 + }, + { + "epoch": 0.61, + "grad_norm": 29.21850568801818, + "learning_rate": 1.8493700411898213e-05, + "loss": 0.8218, + "step": 3874 + }, + { + "epoch": 0.61, + "grad_norm": 18.41556248323299, + "learning_rate": 1.8492810213430027e-05, + "loss": 0.6852, + "step": 3875 + }, + { + "epoch": 0.61, + "grad_norm": 22.42568565445289, + "learning_rate": 1.849191977343129e-05, + "loss": 0.8137, + "step": 3876 + }, + { + "epoch": 0.61, + "grad_norm": 20.13855530497266, + "learning_rate": 1.849102909192732e-05, + "loss": 0.8085, + "step": 3877 + }, + { + "epoch": 0.61, + "grad_norm": 24.071614659543368, + "learning_rate": 1.849013816894345e-05, + "loss": 0.8206, + "step": 3878 + }, + { + "epoch": 0.61, + "grad_norm": 16.880815030354963, + "learning_rate": 1.8489247004505017e-05, + "loss": 0.8588, + "step": 3879 + }, + { + "epoch": 0.61, + "grad_norm": 20.932620809459976, + "learning_rate": 1.8488355598637362e-05, + "loss": 0.8097, + "step": 3880 + }, + { + "epoch": 0.61, + "grad_norm": 19.821513297094455, + "learning_rate": 1.8487463951365842e-05, + "loss": 0.6766, + "step": 3881 + }, + { + "epoch": 0.61, + "grad_norm": 25.381460687751073, + "learning_rate": 1.8486572062715814e-05, + "loss": 0.7676, + "step": 3882 + }, + { + "epoch": 0.61, + "grad_norm": 12.694321796201884, + "learning_rate": 1.848567993271264e-05, + "loss": 0.851, + "step": 3883 + }, + { + "epoch": 0.61, + "grad_norm": 23.91939712986684, + "learning_rate": 1.848478756138169e-05, + "loss": 0.7627, + "step": 3884 + }, + { + "epoch": 0.61, + "grad_norm": 27.575489992472782, + "learning_rate": 1.8483894948748348e-05, + "loss": 0.9076, + "step": 3885 + }, + { + "epoch": 0.61, + "grad_norm": 20.200503250736677, + "learning_rate": 1.8483002094837998e-05, + "loss": 0.7739, + "step": 3886 + }, + { + "epoch": 0.61, + "grad_norm": 24.171025317348672, + "learning_rate": 1.848210899967603e-05, + "loss": 0.7689, + "step": 3887 + }, + { + "epoch": 0.61, + "grad_norm": 18.639976547742176, + "learning_rate": 1.848121566328784e-05, + "loss": 0.7419, + "step": 3888 + }, + { + "epoch": 0.61, + "grad_norm": 25.321154784917912, + "learning_rate": 1.848032208569884e-05, + "loss": 0.8569, + "step": 3889 + }, + { + "epoch": 0.61, + "grad_norm": 20.313079087732586, + "learning_rate": 1.8479428266934442e-05, + "loss": 0.8096, + "step": 3890 + }, + { + "epoch": 0.61, + "grad_norm": 11.55765342883998, + "learning_rate": 1.8478534207020066e-05, + "loss": 0.7158, + "step": 3891 + }, + { + "epoch": 0.61, + "grad_norm": 24.29365973096387, + "learning_rate": 1.8477639905981135e-05, + "loss": 0.8127, + "step": 3892 + }, + { + "epoch": 0.61, + "grad_norm": 27.826071288049587, + "learning_rate": 1.8476745363843082e-05, + "loss": 0.7439, + "step": 3893 + }, + { + "epoch": 0.61, + "grad_norm": 18.669471236448274, + "learning_rate": 1.847585058063135e-05, + "loss": 0.8184, + "step": 3894 + }, + { + "epoch": 0.61, + "grad_norm": 27.851623443601042, + "learning_rate": 1.847495555637139e-05, + "loss": 0.7586, + "step": 3895 + }, + { + "epoch": 0.61, + "grad_norm": 18.347098516052323, + "learning_rate": 1.8474060291088646e-05, + "loss": 0.7504, + "step": 3896 + }, + { + "epoch": 0.61, + "grad_norm": 20.727692649101545, + "learning_rate": 1.8473164784808592e-05, + "loss": 0.8458, + "step": 3897 + }, + { + "epoch": 0.61, + "grad_norm": 20.36514862589803, + "learning_rate": 1.8472269037556683e-05, + "loss": 0.7923, + "step": 3898 + }, + { + "epoch": 0.61, + "grad_norm": 30.892618550586896, + "learning_rate": 1.84713730493584e-05, + "loss": 0.7549, + "step": 3899 + }, + { + "epoch": 0.61, + "grad_norm": 16.007721708458217, + "learning_rate": 1.847047682023922e-05, + "loss": 0.7426, + "step": 3900 + }, + { + "epoch": 0.61, + "grad_norm": 20.439207536485934, + "learning_rate": 1.8469580350224636e-05, + "loss": 0.7434, + "step": 3901 + }, + { + "epoch": 0.61, + "grad_norm": 30.340700094476045, + "learning_rate": 1.8468683639340145e-05, + "loss": 0.8639, + "step": 3902 + }, + { + "epoch": 0.61, + "grad_norm": 16.80755338434516, + "learning_rate": 1.846778668761124e-05, + "loss": 0.9562, + "step": 3903 + }, + { + "epoch": 0.61, + "grad_norm": 5.444557099233771, + "learning_rate": 1.846688949506344e-05, + "loss": 0.773, + "step": 3904 + }, + { + "epoch": 0.61, + "grad_norm": 13.873548635833972, + "learning_rate": 1.8465992061722255e-05, + "loss": 0.7355, + "step": 3905 + }, + { + "epoch": 0.61, + "grad_norm": 24.731886725897116, + "learning_rate": 1.8465094387613204e-05, + "loss": 0.8077, + "step": 3906 + }, + { + "epoch": 0.61, + "grad_norm": 19.668701540451185, + "learning_rate": 1.8464196472761825e-05, + "loss": 0.9629, + "step": 3907 + }, + { + "epoch": 0.61, + "grad_norm": 13.96158451594049, + "learning_rate": 1.8463298317193647e-05, + "loss": 0.8042, + "step": 3908 + }, + { + "epoch": 0.61, + "grad_norm": 22.66219790157908, + "learning_rate": 1.846239992093422e-05, + "loss": 0.7923, + "step": 3909 + }, + { + "epoch": 0.61, + "grad_norm": 16.93281560075687, + "learning_rate": 1.8461501284009085e-05, + "loss": 0.7495, + "step": 3910 + }, + { + "epoch": 0.61, + "grad_norm": 17.829182751826917, + "learning_rate": 1.8460602406443802e-05, + "loss": 0.7514, + "step": 3911 + }, + { + "epoch": 0.61, + "grad_norm": 18.564662925341402, + "learning_rate": 1.845970328826394e-05, + "loss": 0.81, + "step": 3912 + }, + { + "epoch": 0.61, + "grad_norm": 15.968870285685993, + "learning_rate": 1.8458803929495062e-05, + "loss": 0.8361, + "step": 3913 + }, + { + "epoch": 0.61, + "grad_norm": 27.28492253503119, + "learning_rate": 1.8457904330162752e-05, + "loss": 0.8537, + "step": 3914 + }, + { + "epoch": 0.61, + "grad_norm": 18.33909896703304, + "learning_rate": 1.8457004490292588e-05, + "loss": 0.7779, + "step": 3915 + }, + { + "epoch": 0.61, + "grad_norm": 24.2744878738717, + "learning_rate": 1.8456104409910165e-05, + "loss": 0.8338, + "step": 3916 + }, + { + "epoch": 0.61, + "grad_norm": 30.352666432201893, + "learning_rate": 1.845520408904108e-05, + "loss": 0.9, + "step": 3917 + }, + { + "epoch": 0.61, + "grad_norm": 26.65282672610522, + "learning_rate": 1.8454303527710934e-05, + "loss": 0.7512, + "step": 3918 + }, + { + "epoch": 0.61, + "grad_norm": 15.784527142814259, + "learning_rate": 1.8453402725945343e-05, + "loss": 0.7653, + "step": 3919 + }, + { + "epoch": 0.61, + "grad_norm": 13.254694785066972, + "learning_rate": 1.8452501683769924e-05, + "loss": 0.7724, + "step": 3920 + }, + { + "epoch": 0.61, + "grad_norm": 15.85627894130136, + "learning_rate": 1.8451600401210303e-05, + "loss": 0.7332, + "step": 3921 + }, + { + "epoch": 0.61, + "grad_norm": 26.047388374930797, + "learning_rate": 1.8450698878292106e-05, + "loss": 1.0138, + "step": 3922 + }, + { + "epoch": 0.61, + "grad_norm": 17.418785834027496, + "learning_rate": 1.844979711504098e-05, + "loss": 0.8606, + "step": 3923 + }, + { + "epoch": 0.61, + "grad_norm": 21.816517696932664, + "learning_rate": 1.8448895111482565e-05, + "loss": 0.8204, + "step": 3924 + }, + { + "epoch": 0.61, + "grad_norm": 17.214806980589778, + "learning_rate": 1.8447992867642518e-05, + "loss": 0.8153, + "step": 3925 + }, + { + "epoch": 0.61, + "grad_norm": 20.205753759461043, + "learning_rate": 1.8447090383546494e-05, + "loss": 0.689, + "step": 3926 + }, + { + "epoch": 0.61, + "grad_norm": 13.492803046007635, + "learning_rate": 1.8446187659220163e-05, + "loss": 0.698, + "step": 3927 + }, + { + "epoch": 0.61, + "grad_norm": 13.529158688303893, + "learning_rate": 1.8445284694689193e-05, + "loss": 0.8182, + "step": 3928 + }, + { + "epoch": 0.61, + "grad_norm": 15.343326230616558, + "learning_rate": 1.8444381489979267e-05, + "loss": 0.7947, + "step": 3929 + }, + { + "epoch": 0.61, + "grad_norm": 16.393176317521146, + "learning_rate": 1.8443478045116072e-05, + "loss": 0.7321, + "step": 3930 + }, + { + "epoch": 0.61, + "grad_norm": 22.799329624887452, + "learning_rate": 1.84425743601253e-05, + "loss": 0.7459, + "step": 3931 + }, + { + "epoch": 0.61, + "grad_norm": 23.987049691732953, + "learning_rate": 1.8441670435032654e-05, + "loss": 0.7579, + "step": 3932 + }, + { + "epoch": 0.61, + "grad_norm": 20.089735649065783, + "learning_rate": 1.8440766269863838e-05, + "loss": 0.7613, + "step": 3933 + }, + { + "epoch": 0.61, + "grad_norm": 24.310592131543064, + "learning_rate": 1.8439861864644567e-05, + "loss": 0.8116, + "step": 3934 + }, + { + "epoch": 0.61, + "grad_norm": 29.16116392434744, + "learning_rate": 1.843895721940056e-05, + "loss": 0.8648, + "step": 3935 + }, + { + "epoch": 0.61, + "grad_norm": 28.05762151270861, + "learning_rate": 1.843805233415755e-05, + "loss": 0.8286, + "step": 3936 + }, + { + "epoch": 0.61, + "grad_norm": 13.51066513673727, + "learning_rate": 1.8437147208941265e-05, + "loss": 0.8016, + "step": 3937 + }, + { + "epoch": 0.62, + "grad_norm": 28.732777431316652, + "learning_rate": 1.843624184377745e-05, + "loss": 0.7766, + "step": 3938 + }, + { + "epoch": 0.62, + "grad_norm": 18.46432995267984, + "learning_rate": 1.8435336238691853e-05, + "loss": 0.7877, + "step": 3939 + }, + { + "epoch": 0.62, + "grad_norm": 26.58450417602139, + "learning_rate": 1.8434430393710224e-05, + "loss": 0.8457, + "step": 3940 + }, + { + "epoch": 0.62, + "grad_norm": 22.97657099194777, + "learning_rate": 1.8433524308858334e-05, + "loss": 0.8222, + "step": 3941 + }, + { + "epoch": 0.62, + "grad_norm": 22.80400193802145, + "learning_rate": 1.843261798416194e-05, + "loss": 0.7666, + "step": 3942 + }, + { + "epoch": 0.62, + "grad_norm": 14.739879411047784, + "learning_rate": 1.8431711419646826e-05, + "loss": 0.7505, + "step": 3943 + }, + { + "epoch": 0.62, + "grad_norm": 13.493484900760645, + "learning_rate": 1.8430804615338774e-05, + "loss": 0.704, + "step": 3944 + }, + { + "epoch": 0.62, + "grad_norm": 18.62165709232176, + "learning_rate": 1.842989757126357e-05, + "loss": 0.8114, + "step": 3945 + }, + { + "epoch": 0.62, + "grad_norm": 19.9087229641836, + "learning_rate": 1.8428990287447006e-05, + "loss": 0.7421, + "step": 3946 + }, + { + "epoch": 0.62, + "grad_norm": 20.44888273325592, + "learning_rate": 1.8428082763914893e-05, + "loss": 0.7429, + "step": 3947 + }, + { + "epoch": 0.62, + "grad_norm": 27.407702641282988, + "learning_rate": 1.842717500069304e-05, + "loss": 0.7829, + "step": 3948 + }, + { + "epoch": 0.62, + "grad_norm": 10.696606699158387, + "learning_rate": 1.8426266997807252e-05, + "loss": 0.766, + "step": 3949 + }, + { + "epoch": 0.62, + "grad_norm": 19.182272009347923, + "learning_rate": 1.8425358755283365e-05, + "loss": 0.7823, + "step": 3950 + }, + { + "epoch": 0.62, + "grad_norm": 19.51357873522703, + "learning_rate": 1.84244502731472e-05, + "loss": 0.8084, + "step": 3951 + }, + { + "epoch": 0.62, + "grad_norm": 21.2355762588216, + "learning_rate": 1.84235415514246e-05, + "loss": 0.7781, + "step": 3952 + }, + { + "epoch": 0.62, + "grad_norm": 18.900462960225724, + "learning_rate": 1.8422632590141407e-05, + "loss": 0.7362, + "step": 3953 + }, + { + "epoch": 0.62, + "grad_norm": 15.374814147232982, + "learning_rate": 1.8421723389323468e-05, + "loss": 0.6842, + "step": 3954 + }, + { + "epoch": 0.62, + "grad_norm": 16.95554232832515, + "learning_rate": 1.8420813948996643e-05, + "loss": 0.7623, + "step": 3955 + }, + { + "epoch": 0.62, + "grad_norm": 26.167381137093408, + "learning_rate": 1.8419904269186797e-05, + "loss": 0.8981, + "step": 3956 + }, + { + "epoch": 0.62, + "grad_norm": 23.079421702367732, + "learning_rate": 1.8418994349919798e-05, + "loss": 0.9015, + "step": 3957 + }, + { + "epoch": 0.62, + "grad_norm": 21.286503494870637, + "learning_rate": 1.8418084191221522e-05, + "loss": 0.9776, + "step": 3958 + }, + { + "epoch": 0.62, + "grad_norm": 21.228907204678052, + "learning_rate": 1.8417173793117857e-05, + "loss": 0.7629, + "step": 3959 + }, + { + "epoch": 0.62, + "grad_norm": 16.949775742036135, + "learning_rate": 1.8416263155634695e-05, + "loss": 0.7898, + "step": 3960 + }, + { + "epoch": 0.62, + "grad_norm": 22.86129622296271, + "learning_rate": 1.8415352278797932e-05, + "loss": 0.8817, + "step": 3961 + }, + { + "epoch": 0.62, + "grad_norm": 15.527071654136954, + "learning_rate": 1.8414441162633473e-05, + "loss": 0.7815, + "step": 3962 + }, + { + "epoch": 0.62, + "grad_norm": 11.621064669147938, + "learning_rate": 1.8413529807167228e-05, + "loss": 0.7699, + "step": 3963 + }, + { + "epoch": 0.62, + "grad_norm": 13.06265569895257, + "learning_rate": 1.8412618212425116e-05, + "loss": 0.7837, + "step": 3964 + }, + { + "epoch": 0.62, + "grad_norm": 24.55817505382642, + "learning_rate": 1.841170637843307e-05, + "loss": 0.7721, + "step": 3965 + }, + { + "epoch": 0.62, + "grad_norm": 19.239676548274712, + "learning_rate": 1.8410794305217006e-05, + "loss": 0.7911, + "step": 3966 + }, + { + "epoch": 0.62, + "grad_norm": 22.03393289111414, + "learning_rate": 1.8409881992802882e-05, + "loss": 0.8064, + "step": 3967 + }, + { + "epoch": 0.62, + "grad_norm": 23.318176207464848, + "learning_rate": 1.8408969441216628e-05, + "loss": 0.8338, + "step": 3968 + }, + { + "epoch": 0.62, + "grad_norm": 32.12564241846807, + "learning_rate": 1.8408056650484204e-05, + "loss": 0.7723, + "step": 3969 + }, + { + "epoch": 0.62, + "grad_norm": 16.174953355677708, + "learning_rate": 1.8407143620631564e-05, + "loss": 0.7423, + "step": 3970 + }, + { + "epoch": 0.62, + "grad_norm": 18.970926900663347, + "learning_rate": 1.840623035168468e-05, + "loss": 0.7913, + "step": 3971 + }, + { + "epoch": 0.62, + "grad_norm": 14.744496493201806, + "learning_rate": 1.8405316843669524e-05, + "loss": 0.6713, + "step": 3972 + }, + { + "epoch": 0.62, + "grad_norm": 27.633855168561823, + "learning_rate": 1.840440309661207e-05, + "loss": 0.7697, + "step": 3973 + }, + { + "epoch": 0.62, + "grad_norm": 39.12806122126247, + "learning_rate": 1.8403489110538312e-05, + "loss": 0.7226, + "step": 3974 + }, + { + "epoch": 0.62, + "grad_norm": 29.946253078307834, + "learning_rate": 1.8402574885474238e-05, + "loss": 0.8742, + "step": 3975 + }, + { + "epoch": 0.62, + "grad_norm": 25.150682981912503, + "learning_rate": 1.840166042144585e-05, + "loss": 0.7331, + "step": 3976 + }, + { + "epoch": 0.62, + "grad_norm": 15.814120574315359, + "learning_rate": 1.8400745718479155e-05, + "loss": 0.8319, + "step": 3977 + }, + { + "epoch": 0.62, + "grad_norm": 14.955471699729365, + "learning_rate": 1.8399830776600164e-05, + "loss": 0.7965, + "step": 3978 + }, + { + "epoch": 0.62, + "grad_norm": 21.239781488610827, + "learning_rate": 1.83989155958349e-05, + "loss": 0.7555, + "step": 3979 + }, + { + "epoch": 0.62, + "grad_norm": 23.13886507426497, + "learning_rate": 1.839800017620939e-05, + "loss": 0.8598, + "step": 3980 + }, + { + "epoch": 0.62, + "grad_norm": 17.824127672720582, + "learning_rate": 1.8397084517749668e-05, + "loss": 0.8674, + "step": 3981 + }, + { + "epoch": 0.62, + "grad_norm": 24.81647318274528, + "learning_rate": 1.8396168620481772e-05, + "loss": 0.8192, + "step": 3982 + }, + { + "epoch": 0.62, + "grad_norm": 17.2916935686875, + "learning_rate": 1.8395252484431758e-05, + "loss": 0.8028, + "step": 3983 + }, + { + "epoch": 0.62, + "grad_norm": 21.95360975443012, + "learning_rate": 1.839433610962567e-05, + "loss": 0.8024, + "step": 3984 + }, + { + "epoch": 0.62, + "grad_norm": 13.174789954633832, + "learning_rate": 1.8393419496089577e-05, + "loss": 0.7694, + "step": 3985 + }, + { + "epoch": 0.62, + "grad_norm": 23.955312698652683, + "learning_rate": 1.839250264384954e-05, + "loss": 0.8709, + "step": 3986 + }, + { + "epoch": 0.62, + "grad_norm": 25.585178347674756, + "learning_rate": 1.839158555293164e-05, + "loss": 0.7702, + "step": 3987 + }, + { + "epoch": 0.62, + "grad_norm": 21.922154810201178, + "learning_rate": 1.839066822336195e-05, + "loss": 0.7046, + "step": 3988 + }, + { + "epoch": 0.62, + "grad_norm": 20.116644496070613, + "learning_rate": 1.8389750655166572e-05, + "loss": 0.7483, + "step": 3989 + }, + { + "epoch": 0.62, + "grad_norm": 31.005086219098988, + "learning_rate": 1.8388832848371588e-05, + "loss": 0.8069, + "step": 3990 + }, + { + "epoch": 0.62, + "grad_norm": 21.008058943035326, + "learning_rate": 1.8387914803003106e-05, + "loss": 0.7936, + "step": 3991 + }, + { + "epoch": 0.62, + "grad_norm": 21.647187100838746, + "learning_rate": 1.8386996519087236e-05, + "loss": 0.7547, + "step": 3992 + }, + { + "epoch": 0.62, + "grad_norm": 19.94987403505995, + "learning_rate": 1.8386077996650086e-05, + "loss": 0.7883, + "step": 3993 + }, + { + "epoch": 0.62, + "grad_norm": 13.975370734778773, + "learning_rate": 1.838515923571779e-05, + "loss": 0.8114, + "step": 3994 + }, + { + "epoch": 0.62, + "grad_norm": 20.02200142409825, + "learning_rate": 1.838424023631647e-05, + "loss": 0.8497, + "step": 3995 + }, + { + "epoch": 0.62, + "grad_norm": 13.750925009943543, + "learning_rate": 1.838332099847226e-05, + "loss": 0.6907, + "step": 3996 + }, + { + "epoch": 0.62, + "grad_norm": 27.149989611490202, + "learning_rate": 1.8382401522211304e-05, + "loss": 0.8797, + "step": 3997 + }, + { + "epoch": 0.62, + "grad_norm": 22.249574071512278, + "learning_rate": 1.838148180755975e-05, + "loss": 0.71, + "step": 3998 + }, + { + "epoch": 0.62, + "grad_norm": 25.632264021819047, + "learning_rate": 1.8380561854543763e-05, + "loss": 0.838, + "step": 3999 + }, + { + "epoch": 0.62, + "grad_norm": 15.405313161566008, + "learning_rate": 1.8379641663189493e-05, + "loss": 0.8205, + "step": 4000 + }, + { + "epoch": 0.62, + "grad_norm": 21.319739297686727, + "learning_rate": 1.837872123352312e-05, + "loss": 0.7407, + "step": 4001 + }, + { + "epoch": 0.63, + "grad_norm": 14.747272550442679, + "learning_rate": 1.837780056557082e-05, + "loss": 0.7633, + "step": 4002 + }, + { + "epoch": 0.63, + "grad_norm": 21.668417350866296, + "learning_rate": 1.8376879659358767e-05, + "loss": 0.7911, + "step": 4003 + }, + { + "epoch": 0.63, + "grad_norm": 16.531101403262607, + "learning_rate": 1.8375958514913158e-05, + "loss": 0.7659, + "step": 4004 + }, + { + "epoch": 0.63, + "grad_norm": 15.707772077855429, + "learning_rate": 1.8375037132260187e-05, + "loss": 0.7484, + "step": 4005 + }, + { + "epoch": 0.63, + "grad_norm": 22.826776165706086, + "learning_rate": 1.837411551142606e-05, + "loss": 0.8314, + "step": 4006 + }, + { + "epoch": 0.63, + "grad_norm": 21.591839304191762, + "learning_rate": 1.837319365243699e-05, + "loss": 0.7947, + "step": 4007 + }, + { + "epoch": 0.63, + "grad_norm": 21.507425071948727, + "learning_rate": 1.8372271555319185e-05, + "loss": 0.7932, + "step": 4008 + }, + { + "epoch": 0.63, + "grad_norm": 27.13151214310836, + "learning_rate": 1.837134922009888e-05, + "loss": 0.802, + "step": 4009 + }, + { + "epoch": 0.63, + "grad_norm": 19.195559047098417, + "learning_rate": 1.8370426646802298e-05, + "loss": 0.7496, + "step": 4010 + }, + { + "epoch": 0.63, + "grad_norm": 26.029585745047147, + "learning_rate": 1.8369503835455676e-05, + "loss": 0.9137, + "step": 4011 + }, + { + "epoch": 0.63, + "grad_norm": 18.172161446902468, + "learning_rate": 1.8368580786085265e-05, + "loss": 0.7937, + "step": 4012 + }, + { + "epoch": 0.63, + "grad_norm": 17.169397638376193, + "learning_rate": 1.836765749871731e-05, + "loss": 0.8181, + "step": 4013 + }, + { + "epoch": 0.63, + "grad_norm": 21.25379491607554, + "learning_rate": 1.8366733973378074e-05, + "loss": 0.8097, + "step": 4014 + }, + { + "epoch": 0.63, + "grad_norm": 19.523381642616236, + "learning_rate": 1.8365810210093814e-05, + "loss": 0.7363, + "step": 4015 + }, + { + "epoch": 0.63, + "grad_norm": 24.965855560785187, + "learning_rate": 1.836488620889081e-05, + "loss": 0.8354, + "step": 4016 + }, + { + "epoch": 0.63, + "grad_norm": 19.242756028629827, + "learning_rate": 1.836396196979533e-05, + "loss": 0.8586, + "step": 4017 + }, + { + "epoch": 0.63, + "grad_norm": 27.550900611148332, + "learning_rate": 1.8363037492833667e-05, + "loss": 0.9081, + "step": 4018 + }, + { + "epoch": 0.63, + "grad_norm": 27.709086276462795, + "learning_rate": 1.836211277803211e-05, + "loss": 0.7596, + "step": 4019 + }, + { + "epoch": 0.63, + "grad_norm": 12.05138982784889, + "learning_rate": 1.836118782541696e-05, + "loss": 0.7134, + "step": 4020 + }, + { + "epoch": 0.63, + "grad_norm": 44.184207096855985, + "learning_rate": 1.8360262635014513e-05, + "loss": 0.8022, + "step": 4021 + }, + { + "epoch": 0.63, + "grad_norm": 17.15848588444529, + "learning_rate": 1.8359337206851094e-05, + "loss": 0.7862, + "step": 4022 + }, + { + "epoch": 0.63, + "grad_norm": 32.793212961511045, + "learning_rate": 1.8358411540953008e-05, + "loss": 0.8714, + "step": 4023 + }, + { + "epoch": 0.63, + "grad_norm": 26.86531251475134, + "learning_rate": 1.835748563734659e-05, + "loss": 0.841, + "step": 4024 + }, + { + "epoch": 0.63, + "grad_norm": 24.677800799251358, + "learning_rate": 1.8356559496058172e-05, + "loss": 0.7518, + "step": 4025 + }, + { + "epoch": 0.63, + "grad_norm": 19.5701404761941, + "learning_rate": 1.835563311711409e-05, + "loss": 0.8105, + "step": 4026 + }, + { + "epoch": 0.63, + "grad_norm": 13.806335132849288, + "learning_rate": 1.835470650054069e-05, + "loss": 0.7603, + "step": 4027 + }, + { + "epoch": 0.63, + "grad_norm": 22.27433458205608, + "learning_rate": 1.8353779646364323e-05, + "loss": 0.822, + "step": 4028 + }, + { + "epoch": 0.63, + "grad_norm": 21.796653723875387, + "learning_rate": 1.835285255461135e-05, + "loss": 0.9318, + "step": 4029 + }, + { + "epoch": 0.63, + "grad_norm": 23.65776073187368, + "learning_rate": 1.8351925225308136e-05, + "loss": 0.8439, + "step": 4030 + }, + { + "epoch": 0.63, + "grad_norm": 16.733131728483166, + "learning_rate": 1.8350997658481054e-05, + "loss": 0.7087, + "step": 4031 + }, + { + "epoch": 0.63, + "grad_norm": 20.27642042085711, + "learning_rate": 1.835006985415649e-05, + "loss": 0.7264, + "step": 4032 + }, + { + "epoch": 0.63, + "grad_norm": 31.554432419541385, + "learning_rate": 1.8349141812360815e-05, + "loss": 0.8506, + "step": 4033 + }, + { + "epoch": 0.63, + "grad_norm": 20.956954534626647, + "learning_rate": 1.8348213533120437e-05, + "loss": 0.7942, + "step": 4034 + }, + { + "epoch": 0.63, + "grad_norm": 31.273529697752117, + "learning_rate": 1.8347285016461745e-05, + "loss": 0.8884, + "step": 4035 + }, + { + "epoch": 0.63, + "grad_norm": 23.249950737670268, + "learning_rate": 1.8346356262411157e-05, + "loss": 0.7631, + "step": 4036 + }, + { + "epoch": 0.63, + "grad_norm": 24.02906163759453, + "learning_rate": 1.8345427270995072e-05, + "loss": 0.7787, + "step": 4037 + }, + { + "epoch": 0.63, + "grad_norm": 21.05649738565577, + "learning_rate": 1.8344498042239922e-05, + "loss": 0.7353, + "step": 4038 + }, + { + "epoch": 0.63, + "grad_norm": 14.38100596628698, + "learning_rate": 1.8343568576172127e-05, + "loss": 0.7436, + "step": 4039 + }, + { + "epoch": 0.63, + "grad_norm": 21.546504893730834, + "learning_rate": 1.834263887281812e-05, + "loss": 0.82, + "step": 4040 + }, + { + "epoch": 0.63, + "grad_norm": 16.93560101889014, + "learning_rate": 1.834170893220435e-05, + "loss": 0.6939, + "step": 4041 + }, + { + "epoch": 0.63, + "grad_norm": 17.58899066922581, + "learning_rate": 1.8340778754357252e-05, + "loss": 0.7755, + "step": 4042 + }, + { + "epoch": 0.63, + "grad_norm": 25.40402393981906, + "learning_rate": 1.833984833930329e-05, + "loss": 0.8215, + "step": 4043 + }, + { + "epoch": 0.63, + "grad_norm": 18.259975279949824, + "learning_rate": 1.8338917687068916e-05, + "loss": 0.7213, + "step": 4044 + }, + { + "epoch": 0.63, + "grad_norm": 19.97638652598689, + "learning_rate": 1.8337986797680604e-05, + "loss": 0.8617, + "step": 4045 + }, + { + "epoch": 0.63, + "grad_norm": 16.826331171398678, + "learning_rate": 1.8337055671164826e-05, + "loss": 0.7763, + "step": 4046 + }, + { + "epoch": 0.63, + "grad_norm": 22.21938759892234, + "learning_rate": 1.833612430754806e-05, + "loss": 0.7074, + "step": 4047 + }, + { + "epoch": 0.63, + "grad_norm": 17.975197932986642, + "learning_rate": 1.8335192706856792e-05, + "loss": 0.7524, + "step": 4048 + }, + { + "epoch": 0.63, + "grad_norm": 13.644701512975637, + "learning_rate": 1.8334260869117524e-05, + "loss": 0.8038, + "step": 4049 + }, + { + "epoch": 0.63, + "grad_norm": 36.40626696966864, + "learning_rate": 1.833332879435675e-05, + "loss": 0.7759, + "step": 4050 + }, + { + "epoch": 0.63, + "grad_norm": 29.939665329817146, + "learning_rate": 1.833239648260098e-05, + "loss": 0.7455, + "step": 4051 + }, + { + "epoch": 0.63, + "grad_norm": 19.847727211801143, + "learning_rate": 1.833146393387673e-05, + "loss": 0.7344, + "step": 4052 + }, + { + "epoch": 0.63, + "grad_norm": 17.72879469205248, + "learning_rate": 1.8330531148210516e-05, + "loss": 0.8269, + "step": 4053 + }, + { + "epoch": 0.63, + "grad_norm": 19.034760398827235, + "learning_rate": 1.8329598125628873e-05, + "loss": 0.79, + "step": 4054 + }, + { + "epoch": 0.63, + "grad_norm": 12.130774426903695, + "learning_rate": 1.8328664866158332e-05, + "loss": 0.7332, + "step": 4055 + }, + { + "epoch": 0.63, + "grad_norm": 16.734666720286576, + "learning_rate": 1.8327731369825432e-05, + "loss": 0.788, + "step": 4056 + }, + { + "epoch": 0.63, + "grad_norm": 18.344273597094165, + "learning_rate": 1.8326797636656723e-05, + "loss": 0.8264, + "step": 4057 + }, + { + "epoch": 0.63, + "grad_norm": 20.630659119575697, + "learning_rate": 1.8325863666678764e-05, + "loss": 0.7591, + "step": 4058 + }, + { + "epoch": 0.63, + "grad_norm": 36.55776862643885, + "learning_rate": 1.832492945991811e-05, + "loss": 0.7203, + "step": 4059 + }, + { + "epoch": 0.63, + "grad_norm": 13.70146592654616, + "learning_rate": 1.8323995016401332e-05, + "loss": 0.7348, + "step": 4060 + }, + { + "epoch": 0.63, + "grad_norm": 16.597404789158826, + "learning_rate": 1.8323060336155008e-05, + "loss": 0.7367, + "step": 4061 + }, + { + "epoch": 0.63, + "grad_norm": 18.41308691752615, + "learning_rate": 1.8322125419205713e-05, + "loss": 0.7802, + "step": 4062 + }, + { + "epoch": 0.63, + "grad_norm": 23.890983435397562, + "learning_rate": 1.832119026558004e-05, + "loss": 0.8569, + "step": 4063 + }, + { + "epoch": 0.63, + "grad_norm": 12.789768444191667, + "learning_rate": 1.8320254875304585e-05, + "loss": 0.7193, + "step": 4064 + }, + { + "epoch": 0.63, + "grad_norm": 17.398659155709403, + "learning_rate": 1.831931924840595e-05, + "loss": 0.6454, + "step": 4065 + }, + { + "epoch": 0.64, + "grad_norm": 16.721580559436095, + "learning_rate": 1.831838338491074e-05, + "loss": 0.7313, + "step": 4066 + }, + { + "epoch": 0.64, + "grad_norm": 19.189410233777284, + "learning_rate": 1.8317447284845573e-05, + "loss": 0.7123, + "step": 4067 + }, + { + "epoch": 0.64, + "grad_norm": 25.44081514018268, + "learning_rate": 1.831651094823707e-05, + "loss": 0.7311, + "step": 4068 + }, + { + "epoch": 0.64, + "grad_norm": 23.73241113633123, + "learning_rate": 1.831557437511186e-05, + "loss": 0.8133, + "step": 4069 + }, + { + "epoch": 0.64, + "grad_norm": 16.433668434747723, + "learning_rate": 1.8314637565496584e-05, + "loss": 0.6534, + "step": 4070 + }, + { + "epoch": 0.64, + "grad_norm": 30.62790181026401, + "learning_rate": 1.8313700519417876e-05, + "loss": 0.8467, + "step": 4071 + }, + { + "epoch": 0.64, + "grad_norm": 19.14620854273327, + "learning_rate": 1.8312763236902387e-05, + "loss": 0.8438, + "step": 4072 + }, + { + "epoch": 0.64, + "grad_norm": 17.66952688602762, + "learning_rate": 1.831182571797678e-05, + "loss": 0.7178, + "step": 4073 + }, + { + "epoch": 0.64, + "grad_norm": 16.692903620687968, + "learning_rate": 1.8310887962667706e-05, + "loss": 0.7031, + "step": 4074 + }, + { + "epoch": 0.64, + "grad_norm": 22.533743314309838, + "learning_rate": 1.830994997100184e-05, + "loss": 0.884, + "step": 4075 + }, + { + "epoch": 0.64, + "grad_norm": 23.27041583951733, + "learning_rate": 1.8309011743005863e-05, + "loss": 0.7584, + "step": 4076 + }, + { + "epoch": 0.64, + "grad_norm": 86.65536448497078, + "learning_rate": 1.8308073278706448e-05, + "loss": 0.7623, + "step": 4077 + }, + { + "epoch": 0.64, + "grad_norm": 17.39951506978944, + "learning_rate": 1.8307134578130293e-05, + "loss": 0.7753, + "step": 4078 + }, + { + "epoch": 0.64, + "grad_norm": 16.932615527768895, + "learning_rate": 1.8306195641304088e-05, + "loss": 0.8052, + "step": 4079 + }, + { + "epoch": 0.64, + "grad_norm": 15.343993609897169, + "learning_rate": 1.8305256468254537e-05, + "loss": 0.8088, + "step": 4080 + }, + { + "epoch": 0.64, + "grad_norm": 19.93329382866461, + "learning_rate": 1.830431705900835e-05, + "loss": 0.7737, + "step": 4081 + }, + { + "epoch": 0.64, + "grad_norm": 18.29276556553434, + "learning_rate": 1.8303377413592248e-05, + "loss": 0.6871, + "step": 4082 + }, + { + "epoch": 0.64, + "grad_norm": 17.800445241228395, + "learning_rate": 1.8302437532032945e-05, + "loss": 0.7648, + "step": 4083 + }, + { + "epoch": 0.64, + "grad_norm": 21.153387648377237, + "learning_rate": 1.830149741435718e-05, + "loss": 0.8411, + "step": 4084 + }, + { + "epoch": 0.64, + "grad_norm": 19.861353873533375, + "learning_rate": 1.830055706059168e-05, + "loss": 0.8067, + "step": 4085 + }, + { + "epoch": 0.64, + "grad_norm": 18.48304642508233, + "learning_rate": 1.8299616470763196e-05, + "loss": 0.7107, + "step": 4086 + }, + { + "epoch": 0.64, + "grad_norm": 20.179646688753063, + "learning_rate": 1.829867564489847e-05, + "loss": 0.9648, + "step": 4087 + }, + { + "epoch": 0.64, + "grad_norm": 28.52022475833779, + "learning_rate": 1.8297734583024273e-05, + "loss": 0.808, + "step": 4088 + }, + { + "epoch": 0.64, + "grad_norm": 16.980345340979714, + "learning_rate": 1.8296793285167348e-05, + "loss": 0.6603, + "step": 4089 + }, + { + "epoch": 0.64, + "grad_norm": 24.50393409247012, + "learning_rate": 1.8295851751354482e-05, + "loss": 0.7467, + "step": 4090 + }, + { + "epoch": 0.64, + "grad_norm": 22.699693493333474, + "learning_rate": 1.8294909981612443e-05, + "loss": 0.7744, + "step": 4091 + }, + { + "epoch": 0.64, + "grad_norm": 24.123405910362294, + "learning_rate": 1.8293967975968016e-05, + "loss": 0.7457, + "step": 4092 + }, + { + "epoch": 0.64, + "grad_norm": 15.135385663796493, + "learning_rate": 1.8293025734447992e-05, + "loss": 0.7948, + "step": 4093 + }, + { + "epoch": 0.64, + "grad_norm": 20.12608952194189, + "learning_rate": 1.8292083257079166e-05, + "loss": 0.7302, + "step": 4094 + }, + { + "epoch": 0.64, + "grad_norm": 19.64810996948637, + "learning_rate": 1.8291140543888347e-05, + "loss": 0.8145, + "step": 4095 + }, + { + "epoch": 0.64, + "grad_norm": 23.120497737278335, + "learning_rate": 1.8290197594902337e-05, + "loss": 0.7315, + "step": 4096 + }, + { + "epoch": 0.64, + "grad_norm": 14.403996646144535, + "learning_rate": 1.8289254410147958e-05, + "loss": 0.8329, + "step": 4097 + }, + { + "epoch": 0.64, + "grad_norm": 14.843392320640191, + "learning_rate": 1.8288310989652035e-05, + "loss": 0.7139, + "step": 4098 + }, + { + "epoch": 0.64, + "grad_norm": 17.479391158151, + "learning_rate": 1.828736733344139e-05, + "loss": 0.7458, + "step": 4099 + }, + { + "epoch": 0.64, + "grad_norm": 11.982364150750014, + "learning_rate": 1.8286423441542874e-05, + "loss": 0.7826, + "step": 4100 + }, + { + "epoch": 0.64, + "grad_norm": 14.865509935831454, + "learning_rate": 1.828547931398332e-05, + "loss": 0.7012, + "step": 4101 + }, + { + "epoch": 0.64, + "grad_norm": 20.914275430842952, + "learning_rate": 1.8284534950789578e-05, + "loss": 0.7364, + "step": 4102 + }, + { + "epoch": 0.64, + "grad_norm": 16.00571356771551, + "learning_rate": 1.828359035198851e-05, + "loss": 0.7736, + "step": 4103 + }, + { + "epoch": 0.64, + "grad_norm": 29.542125641956837, + "learning_rate": 1.8282645517606977e-05, + "loss": 0.9103, + "step": 4104 + }, + { + "epoch": 0.64, + "grad_norm": 15.097036599178933, + "learning_rate": 1.8281700447671852e-05, + "loss": 0.8486, + "step": 4105 + }, + { + "epoch": 0.64, + "grad_norm": 16.684310183173224, + "learning_rate": 1.828075514221001e-05, + "loss": 0.6891, + "step": 4106 + }, + { + "epoch": 0.64, + "grad_norm": 14.84088828107749, + "learning_rate": 1.8279809601248342e-05, + "loss": 0.831, + "step": 4107 + }, + { + "epoch": 0.64, + "grad_norm": 23.74038264025481, + "learning_rate": 1.8278863824813726e-05, + "loss": 0.8437, + "step": 4108 + }, + { + "epoch": 0.64, + "grad_norm": 16.528611614866424, + "learning_rate": 1.8277917812933068e-05, + "loss": 0.7473, + "step": 4109 + }, + { + "epoch": 0.64, + "grad_norm": 28.163180080034113, + "learning_rate": 1.827697156563327e-05, + "loss": 0.8711, + "step": 4110 + }, + { + "epoch": 0.64, + "grad_norm": 12.354580146850937, + "learning_rate": 1.827602508294124e-05, + "loss": 0.7358, + "step": 4111 + }, + { + "epoch": 0.64, + "grad_norm": 15.790304353132047, + "learning_rate": 1.82750783648839e-05, + "loss": 0.7273, + "step": 4112 + }, + { + "epoch": 0.64, + "grad_norm": 33.003127082226484, + "learning_rate": 1.8274131411488172e-05, + "loss": 0.8199, + "step": 4113 + }, + { + "epoch": 0.64, + "grad_norm": 17.171991094486625, + "learning_rate": 1.8273184222780984e-05, + "loss": 0.794, + "step": 4114 + }, + { + "epoch": 0.64, + "grad_norm": 14.696733095273618, + "learning_rate": 1.827223679878928e-05, + "loss": 0.7188, + "step": 4115 + }, + { + "epoch": 0.64, + "grad_norm": 23.425840492548925, + "learning_rate": 1.8271289139539997e-05, + "loss": 0.8428, + "step": 4116 + }, + { + "epoch": 0.64, + "grad_norm": 26.09084472929248, + "learning_rate": 1.8270341245060095e-05, + "loss": 0.822, + "step": 4117 + }, + { + "epoch": 0.64, + "grad_norm": 21.6793608611648, + "learning_rate": 1.8269393115376522e-05, + "loss": 0.8358, + "step": 4118 + }, + { + "epoch": 0.64, + "grad_norm": 36.155094100226705, + "learning_rate": 1.8268444750516246e-05, + "loss": 0.7757, + "step": 4119 + }, + { + "epoch": 0.64, + "grad_norm": 18.56333594133736, + "learning_rate": 1.8267496150506242e-05, + "loss": 0.706, + "step": 4120 + }, + { + "epoch": 0.64, + "grad_norm": 25.576181748058023, + "learning_rate": 1.826654731537348e-05, + "loss": 0.8794, + "step": 4121 + }, + { + "epoch": 0.64, + "grad_norm": 27.442736306317713, + "learning_rate": 1.826559824514495e-05, + "loss": 0.8858, + "step": 4122 + }, + { + "epoch": 0.64, + "grad_norm": 28.96593825882559, + "learning_rate": 1.8264648939847642e-05, + "loss": 0.9125, + "step": 4123 + }, + { + "epoch": 0.64, + "grad_norm": 19.547110085785135, + "learning_rate": 1.826369939950855e-05, + "loss": 0.7612, + "step": 4124 + }, + { + "epoch": 0.64, + "grad_norm": 18.927598558929994, + "learning_rate": 1.8262749624154684e-05, + "loss": 0.8409, + "step": 4125 + }, + { + "epoch": 0.64, + "grad_norm": 33.57686183261153, + "learning_rate": 1.8261799613813053e-05, + "loss": 0.7878, + "step": 4126 + }, + { + "epoch": 0.64, + "grad_norm": 14.26701776238318, + "learning_rate": 1.826084936851067e-05, + "loss": 0.6831, + "step": 4127 + }, + { + "epoch": 0.64, + "grad_norm": 23.320788959766865, + "learning_rate": 1.8259898888274564e-05, + "loss": 0.7782, + "step": 4128 + }, + { + "epoch": 0.64, + "grad_norm": 19.862091160834396, + "learning_rate": 1.8258948173131766e-05, + "loss": 0.7341, + "step": 4129 + }, + { + "epoch": 0.65, + "grad_norm": 14.021932432436662, + "learning_rate": 1.8257997223109317e-05, + "loss": 0.7314, + "step": 4130 + }, + { + "epoch": 0.65, + "grad_norm": 22.693422991939816, + "learning_rate": 1.825704603823425e-05, + "loss": 0.7429, + "step": 4131 + }, + { + "epoch": 0.65, + "grad_norm": 19.957814172696747, + "learning_rate": 1.8256094618533633e-05, + "loss": 0.8141, + "step": 4132 + }, + { + "epoch": 0.65, + "grad_norm": 13.163449491609454, + "learning_rate": 1.825514296403451e-05, + "loss": 0.7616, + "step": 4133 + }, + { + "epoch": 0.65, + "grad_norm": 16.848775350195158, + "learning_rate": 1.825419107476395e-05, + "loss": 0.8326, + "step": 4134 + }, + { + "epoch": 0.65, + "grad_norm": 14.569001354101406, + "learning_rate": 1.8253238950749025e-05, + "loss": 0.8025, + "step": 4135 + }, + { + "epoch": 0.65, + "grad_norm": 19.710723629341924, + "learning_rate": 1.8252286592016812e-05, + "loss": 0.8281, + "step": 4136 + }, + { + "epoch": 0.65, + "grad_norm": 19.947511865867735, + "learning_rate": 1.8251333998594395e-05, + "loss": 0.7433, + "step": 4137 + }, + { + "epoch": 0.65, + "grad_norm": 13.028457690058588, + "learning_rate": 1.825038117050887e-05, + "loss": 0.7615, + "step": 4138 + }, + { + "epoch": 0.65, + "grad_norm": 26.60533198398627, + "learning_rate": 1.824942810778733e-05, + "loss": 0.7839, + "step": 4139 + }, + { + "epoch": 0.65, + "grad_norm": 17.996097847880318, + "learning_rate": 1.8248474810456874e-05, + "loss": 0.8439, + "step": 4140 + }, + { + "epoch": 0.65, + "grad_norm": 19.433477798959455, + "learning_rate": 1.8247521278544625e-05, + "loss": 0.7769, + "step": 4141 + }, + { + "epoch": 0.65, + "grad_norm": 15.530227256158849, + "learning_rate": 1.8246567512077694e-05, + "loss": 0.7159, + "step": 4142 + }, + { + "epoch": 0.65, + "grad_norm": 14.970803228492773, + "learning_rate": 1.824561351108321e-05, + "loss": 0.7677, + "step": 4143 + }, + { + "epoch": 0.65, + "grad_norm": 23.85053833675316, + "learning_rate": 1.8244659275588298e-05, + "loss": 0.8731, + "step": 4144 + }, + { + "epoch": 0.65, + "grad_norm": 24.81336237324034, + "learning_rate": 1.82437048056201e-05, + "loss": 0.7852, + "step": 4145 + }, + { + "epoch": 0.65, + "grad_norm": 17.01315384405582, + "learning_rate": 1.8242750101205757e-05, + "loss": 0.7627, + "step": 4146 + }, + { + "epoch": 0.65, + "grad_norm": 19.354121519100417, + "learning_rate": 1.8241795162372426e-05, + "loss": 0.8619, + "step": 4147 + }, + { + "epoch": 0.65, + "grad_norm": 16.814955547995996, + "learning_rate": 1.8240839989147264e-05, + "loss": 0.6823, + "step": 4148 + }, + { + "epoch": 0.65, + "grad_norm": 12.764312652754464, + "learning_rate": 1.823988458155743e-05, + "loss": 0.7452, + "step": 4149 + }, + { + "epoch": 0.65, + "grad_norm": 15.218329197629386, + "learning_rate": 1.8238928939630097e-05, + "loss": 0.7756, + "step": 4150 + }, + { + "epoch": 0.65, + "grad_norm": 24.214087248146637, + "learning_rate": 1.823797306339245e-05, + "loss": 0.7437, + "step": 4151 + }, + { + "epoch": 0.65, + "grad_norm": 27.496878017594884, + "learning_rate": 1.8237016952871664e-05, + "loss": 0.7958, + "step": 4152 + }, + { + "epoch": 0.65, + "grad_norm": 22.105387664446674, + "learning_rate": 1.8236060608094938e-05, + "loss": 0.7972, + "step": 4153 + }, + { + "epoch": 0.65, + "grad_norm": 20.260814013339807, + "learning_rate": 1.8235104029089465e-05, + "loss": 0.8071, + "step": 4154 + }, + { + "epoch": 0.65, + "grad_norm": 23.197953402440604, + "learning_rate": 1.823414721588245e-05, + "loss": 0.7347, + "step": 4155 + }, + { + "epoch": 0.65, + "grad_norm": 24.597249801764658, + "learning_rate": 1.8233190168501104e-05, + "loss": 0.7891, + "step": 4156 + }, + { + "epoch": 0.65, + "grad_norm": 20.397370830305267, + "learning_rate": 1.8232232886972643e-05, + "loss": 0.7062, + "step": 4157 + }, + { + "epoch": 0.65, + "grad_norm": 15.84046343709538, + "learning_rate": 1.8231275371324303e-05, + "loss": 0.696, + "step": 4158 + }, + { + "epoch": 0.65, + "grad_norm": 20.091233622436835, + "learning_rate": 1.82303176215833e-05, + "loss": 0.7187, + "step": 4159 + }, + { + "epoch": 0.65, + "grad_norm": 13.506800333979673, + "learning_rate": 1.8229359637776883e-05, + "loss": 0.746, + "step": 4160 + }, + { + "epoch": 0.65, + "grad_norm": 20.212309330351744, + "learning_rate": 1.8228401419932287e-05, + "loss": 0.8117, + "step": 4161 + }, + { + "epoch": 0.65, + "grad_norm": 17.157183395940265, + "learning_rate": 1.822744296807677e-05, + "loss": 0.7365, + "step": 4162 + }, + { + "epoch": 0.65, + "grad_norm": 31.04302911307338, + "learning_rate": 1.8226484282237587e-05, + "loss": 0.768, + "step": 4163 + }, + { + "epoch": 0.65, + "grad_norm": 19.208916504686204, + "learning_rate": 1.822552536244201e-05, + "loss": 0.6897, + "step": 4164 + }, + { + "epoch": 0.65, + "grad_norm": 28.746442931343466, + "learning_rate": 1.8224566208717296e-05, + "loss": 0.7417, + "step": 4165 + }, + { + "epoch": 0.65, + "grad_norm": 20.255061158054005, + "learning_rate": 1.8223606821090733e-05, + "loss": 0.7198, + "step": 4166 + }, + { + "epoch": 0.65, + "grad_norm": 13.535106175537841, + "learning_rate": 1.82226471995896e-05, + "loss": 0.7751, + "step": 4167 + }, + { + "epoch": 0.65, + "grad_norm": 15.6580760161256, + "learning_rate": 1.8221687344241193e-05, + "loss": 0.7885, + "step": 4168 + }, + { + "epoch": 0.65, + "grad_norm": 17.306401910524993, + "learning_rate": 1.822072725507281e-05, + "loss": 0.7807, + "step": 4169 + }, + { + "epoch": 0.65, + "grad_norm": 15.240129507863395, + "learning_rate": 1.8219766932111747e-05, + "loss": 0.7656, + "step": 4170 + }, + { + "epoch": 0.65, + "grad_norm": 22.08386420108185, + "learning_rate": 1.8218806375385326e-05, + "loss": 0.8176, + "step": 4171 + }, + { + "epoch": 0.65, + "grad_norm": 18.282687719982253, + "learning_rate": 1.8217845584920858e-05, + "loss": 0.7178, + "step": 4172 + }, + { + "epoch": 0.65, + "grad_norm": 16.343011743673248, + "learning_rate": 1.821688456074567e-05, + "loss": 0.757, + "step": 4173 + }, + { + "epoch": 0.65, + "grad_norm": 17.277970841041398, + "learning_rate": 1.8215923302887085e-05, + "loss": 0.8011, + "step": 4174 + }, + { + "epoch": 0.65, + "grad_norm": 70.69089067862052, + "learning_rate": 1.8214961811372455e-05, + "loss": 0.8367, + "step": 4175 + }, + { + "epoch": 0.65, + "grad_norm": 26.88569046519337, + "learning_rate": 1.8214000086229115e-05, + "loss": 0.7145, + "step": 4176 + }, + { + "epoch": 0.65, + "grad_norm": 10.862646877911239, + "learning_rate": 1.8213038127484415e-05, + "loss": 0.724, + "step": 4177 + }, + { + "epoch": 0.65, + "grad_norm": 23.664314450660363, + "learning_rate": 1.821207593516572e-05, + "loss": 0.8284, + "step": 4178 + }, + { + "epoch": 0.65, + "grad_norm": 21.66913914893567, + "learning_rate": 1.8211113509300384e-05, + "loss": 0.9162, + "step": 4179 + }, + { + "epoch": 0.65, + "grad_norm": 13.477847513495092, + "learning_rate": 1.8210150849915787e-05, + "loss": 0.7381, + "step": 4180 + }, + { + "epoch": 0.65, + "grad_norm": 34.222411037368175, + "learning_rate": 1.82091879570393e-05, + "loss": 0.7785, + "step": 4181 + }, + { + "epoch": 0.65, + "grad_norm": 24.01902531121858, + "learning_rate": 1.8208224830698312e-05, + "loss": 0.8747, + "step": 4182 + }, + { + "epoch": 0.65, + "grad_norm": 20.111924042495076, + "learning_rate": 1.820726147092021e-05, + "loss": 0.849, + "step": 4183 + }, + { + "epoch": 0.65, + "grad_norm": 12.247972078479735, + "learning_rate": 1.8206297877732393e-05, + "loss": 0.6994, + "step": 4184 + }, + { + "epoch": 0.65, + "grad_norm": 16.675256512218837, + "learning_rate": 1.8205334051162264e-05, + "loss": 0.7962, + "step": 4185 + }, + { + "epoch": 0.65, + "grad_norm": 18.884641425402634, + "learning_rate": 1.8204369991237237e-05, + "loss": 0.8158, + "step": 4186 + }, + { + "epoch": 0.65, + "grad_norm": 21.349592604424227, + "learning_rate": 1.8203405697984726e-05, + "loss": 0.7835, + "step": 4187 + }, + { + "epoch": 0.65, + "grad_norm": 15.53768262207262, + "learning_rate": 1.8202441171432156e-05, + "loss": 0.8492, + "step": 4188 + }, + { + "epoch": 0.65, + "grad_norm": 19.984575034237253, + "learning_rate": 1.8201476411606956e-05, + "loss": 0.7321, + "step": 4189 + }, + { + "epoch": 0.65, + "grad_norm": 18.785201187460796, + "learning_rate": 1.8200511418536563e-05, + "loss": 0.6348, + "step": 4190 + }, + { + "epoch": 0.65, + "grad_norm": 20.782846553680805, + "learning_rate": 1.819954619224843e-05, + "loss": 0.9021, + "step": 4191 + }, + { + "epoch": 0.65, + "grad_norm": 25.22696632069129, + "learning_rate": 1.8198580732769992e-05, + "loss": 0.8692, + "step": 4192 + }, + { + "epoch": 0.65, + "grad_norm": 23.90325030911037, + "learning_rate": 1.819761504012872e-05, + "loss": 0.8521, + "step": 4193 + }, + { + "epoch": 0.66, + "grad_norm": 15.209610022450809, + "learning_rate": 1.8196649114352065e-05, + "loss": 0.8209, + "step": 4194 + }, + { + "epoch": 0.66, + "grad_norm": 17.128908647560987, + "learning_rate": 1.819568295546751e-05, + "loss": 0.7317, + "step": 4195 + }, + { + "epoch": 0.66, + "grad_norm": 21.094954425899743, + "learning_rate": 1.819471656350252e-05, + "loss": 0.7932, + "step": 4196 + }, + { + "epoch": 0.66, + "grad_norm": 22.65058012686775, + "learning_rate": 1.819374993848459e-05, + "loss": 0.8494, + "step": 4197 + }, + { + "epoch": 0.66, + "grad_norm": 24.688260305744706, + "learning_rate": 1.8192783080441204e-05, + "loss": 0.8413, + "step": 4198 + }, + { + "epoch": 0.66, + "grad_norm": 17.12246524496961, + "learning_rate": 1.819181598939986e-05, + "loss": 0.7947, + "step": 4199 + }, + { + "epoch": 0.66, + "grad_norm": 17.72511579142144, + "learning_rate": 1.819084866538806e-05, + "loss": 0.8739, + "step": 4200 + }, + { + "epoch": 0.66, + "grad_norm": 23.70355082401617, + "learning_rate": 1.8189881108433317e-05, + "loss": 0.8834, + "step": 4201 + }, + { + "epoch": 0.66, + "grad_norm": 21.189093626553777, + "learning_rate": 1.8188913318563145e-05, + "loss": 0.8128, + "step": 4202 + }, + { + "epoch": 0.66, + "grad_norm": 22.59124555399027, + "learning_rate": 1.8187945295805066e-05, + "loss": 0.6995, + "step": 4203 + }, + { + "epoch": 0.66, + "grad_norm": 17.390925392819383, + "learning_rate": 1.8186977040186615e-05, + "loss": 0.8919, + "step": 4204 + }, + { + "epoch": 0.66, + "grad_norm": 14.713384502269177, + "learning_rate": 1.8186008551735327e-05, + "loss": 0.7108, + "step": 4205 + }, + { + "epoch": 0.66, + "grad_norm": 17.107845542862567, + "learning_rate": 1.8185039830478745e-05, + "loss": 0.8084, + "step": 4206 + }, + { + "epoch": 0.66, + "grad_norm": 18.85657301601761, + "learning_rate": 1.8184070876444417e-05, + "loss": 0.7317, + "step": 4207 + }, + { + "epoch": 0.66, + "grad_norm": 24.990322371369285, + "learning_rate": 1.8183101689659902e-05, + "loss": 0.7218, + "step": 4208 + }, + { + "epoch": 0.66, + "grad_norm": 16.208851136761886, + "learning_rate": 1.818213227015276e-05, + "loss": 0.7399, + "step": 4209 + }, + { + "epoch": 0.66, + "grad_norm": 16.142991538272394, + "learning_rate": 1.8181162617950563e-05, + "loss": 0.76, + "step": 4210 + }, + { + "epoch": 0.66, + "grad_norm": 22.846959313289375, + "learning_rate": 1.8180192733080887e-05, + "loss": 0.7868, + "step": 4211 + }, + { + "epoch": 0.66, + "grad_norm": 34.90326480343949, + "learning_rate": 1.817922261557132e-05, + "loss": 0.7735, + "step": 4212 + }, + { + "epoch": 0.66, + "grad_norm": 21.493681543379306, + "learning_rate": 1.817825226544944e-05, + "loss": 0.8084, + "step": 4213 + }, + { + "epoch": 0.66, + "grad_norm": 23.45435698397495, + "learning_rate": 1.817728168274285e-05, + "loss": 0.8268, + "step": 4214 + }, + { + "epoch": 0.66, + "grad_norm": 22.28494940819731, + "learning_rate": 1.8176310867479155e-05, + "loss": 0.7959, + "step": 4215 + }, + { + "epoch": 0.66, + "grad_norm": 12.652858458670133, + "learning_rate": 1.8175339819685965e-05, + "loss": 0.6771, + "step": 4216 + }, + { + "epoch": 0.66, + "grad_norm": 14.71053667378346, + "learning_rate": 1.8174368539390888e-05, + "loss": 0.7457, + "step": 4217 + }, + { + "epoch": 0.66, + "grad_norm": 21.2796520327813, + "learning_rate": 1.8173397026621554e-05, + "loss": 0.7368, + "step": 4218 + }, + { + "epoch": 0.66, + "grad_norm": 12.584496892201871, + "learning_rate": 1.817242528140559e-05, + "loss": 0.7424, + "step": 4219 + }, + { + "epoch": 0.66, + "grad_norm": 16.700335026067325, + "learning_rate": 1.8171453303770628e-05, + "loss": 0.8336, + "step": 4220 + }, + { + "epoch": 0.66, + "grad_norm": 23.60266369823785, + "learning_rate": 1.8170481093744317e-05, + "loss": 0.7677, + "step": 4221 + }, + { + "epoch": 0.66, + "grad_norm": 20.796280385952652, + "learning_rate": 1.81695086513543e-05, + "loss": 0.825, + "step": 4222 + }, + { + "epoch": 0.66, + "grad_norm": 20.339328609160805, + "learning_rate": 1.8168535976628242e-05, + "loss": 0.8076, + "step": 4223 + }, + { + "epoch": 0.66, + "grad_norm": 20.664036607996508, + "learning_rate": 1.8167563069593796e-05, + "loss": 0.8796, + "step": 4224 + }, + { + "epoch": 0.66, + "grad_norm": 18.685501119311006, + "learning_rate": 1.8166589930278637e-05, + "loss": 0.8069, + "step": 4225 + }, + { + "epoch": 0.66, + "grad_norm": 18.0908378763667, + "learning_rate": 1.816561655871043e-05, + "loss": 0.731, + "step": 4226 + }, + { + "epoch": 0.66, + "grad_norm": 33.870150718719664, + "learning_rate": 1.816464295491687e-05, + "loss": 0.6768, + "step": 4227 + }, + { + "epoch": 0.66, + "grad_norm": 14.729729567852427, + "learning_rate": 1.816366911892564e-05, + "loss": 0.7056, + "step": 4228 + }, + { + "epoch": 0.66, + "grad_norm": 25.702781940478168, + "learning_rate": 1.8162695050764437e-05, + "loss": 0.8006, + "step": 4229 + }, + { + "epoch": 0.66, + "grad_norm": 46.41708567786615, + "learning_rate": 1.8161720750460963e-05, + "loss": 0.771, + "step": 4230 + }, + { + "epoch": 0.66, + "grad_norm": 19.36104449712154, + "learning_rate": 1.8160746218042923e-05, + "loss": 0.7014, + "step": 4231 + }, + { + "epoch": 0.66, + "grad_norm": 13.573114581859631, + "learning_rate": 1.8159771453538034e-05, + "loss": 0.6727, + "step": 4232 + }, + { + "epoch": 0.66, + "grad_norm": 15.327036339677111, + "learning_rate": 1.8158796456974023e-05, + "loss": 0.7674, + "step": 4233 + }, + { + "epoch": 0.66, + "grad_norm": 16.592549386565253, + "learning_rate": 1.8157821228378607e-05, + "loss": 0.8118, + "step": 4234 + }, + { + "epoch": 0.66, + "grad_norm": 18.06741184879512, + "learning_rate": 1.8156845767779532e-05, + "loss": 0.8578, + "step": 4235 + }, + { + "epoch": 0.66, + "grad_norm": 19.853984133347314, + "learning_rate": 1.815587007520453e-05, + "loss": 0.7859, + "step": 4236 + }, + { + "epoch": 0.66, + "grad_norm": 23.56419704343956, + "learning_rate": 1.815489415068136e-05, + "loss": 0.7317, + "step": 4237 + }, + { + "epoch": 0.66, + "grad_norm": 15.464204674312242, + "learning_rate": 1.8153917994237765e-05, + "loss": 0.7163, + "step": 4238 + }, + { + "epoch": 0.66, + "grad_norm": 25.546874955670187, + "learning_rate": 1.8152941605901518e-05, + "loss": 0.8373, + "step": 4239 + }, + { + "epoch": 0.66, + "grad_norm": 20.286881393131456, + "learning_rate": 1.8151964985700375e-05, + "loss": 0.7966, + "step": 4240 + }, + { + "epoch": 0.66, + "grad_norm": 16.69725180837408, + "learning_rate": 1.815098813366212e-05, + "loss": 0.6484, + "step": 4241 + }, + { + "epoch": 0.66, + "grad_norm": 20.485323425701363, + "learning_rate": 1.8150011049814534e-05, + "loss": 0.7915, + "step": 4242 + }, + { + "epoch": 0.66, + "grad_norm": 16.325577534439056, + "learning_rate": 1.8149033734185396e-05, + "loss": 0.7345, + "step": 4243 + }, + { + "epoch": 0.66, + "grad_norm": 20.35399091586787, + "learning_rate": 1.814805618680251e-05, + "loss": 0.8457, + "step": 4244 + }, + { + "epoch": 0.66, + "grad_norm": 26.326481407887844, + "learning_rate": 1.814707840769367e-05, + "loss": 0.796, + "step": 4245 + }, + { + "epoch": 0.66, + "grad_norm": 19.757716482222545, + "learning_rate": 1.8146100396886685e-05, + "loss": 0.7758, + "step": 4246 + }, + { + "epoch": 0.66, + "grad_norm": 18.672847433984025, + "learning_rate": 1.814512215440937e-05, + "loss": 0.7704, + "step": 4247 + }, + { + "epoch": 0.66, + "grad_norm": 19.76956626890496, + "learning_rate": 1.8144143680289547e-05, + "loss": 0.777, + "step": 4248 + }, + { + "epoch": 0.66, + "grad_norm": 16.811800221589184, + "learning_rate": 1.8143164974555042e-05, + "loss": 0.724, + "step": 4249 + }, + { + "epoch": 0.66, + "grad_norm": 19.37250055988275, + "learning_rate": 1.8142186037233687e-05, + "loss": 0.8216, + "step": 4250 + }, + { + "epoch": 0.66, + "grad_norm": 17.467740504008308, + "learning_rate": 1.8141206868353327e-05, + "loss": 0.7109, + "step": 4251 + }, + { + "epoch": 0.66, + "grad_norm": 14.09127753365462, + "learning_rate": 1.8140227467941803e-05, + "loss": 0.6957, + "step": 4252 + }, + { + "epoch": 0.66, + "grad_norm": 17.512332261362477, + "learning_rate": 1.8139247836026977e-05, + "loss": 0.8107, + "step": 4253 + }, + { + "epoch": 0.66, + "grad_norm": 15.053714789069293, + "learning_rate": 1.8138267972636697e-05, + "loss": 0.8666, + "step": 4254 + }, + { + "epoch": 0.66, + "grad_norm": 27.58978856186665, + "learning_rate": 1.8137287877798842e-05, + "loss": 0.8388, + "step": 4255 + }, + { + "epoch": 0.66, + "grad_norm": 21.86409343623787, + "learning_rate": 1.8136307551541276e-05, + "loss": 0.8195, + "step": 4256 + }, + { + "epoch": 0.66, + "grad_norm": 17.1160706365079, + "learning_rate": 1.8135326993891886e-05, + "loss": 0.8557, + "step": 4257 + }, + { + "epoch": 0.67, + "grad_norm": 21.49182803457618, + "learning_rate": 1.8134346204878553e-05, + "loss": 0.8223, + "step": 4258 + }, + { + "epoch": 0.67, + "grad_norm": 15.701361360959423, + "learning_rate": 1.8133365184529176e-05, + "loss": 0.7515, + "step": 4259 + }, + { + "epoch": 0.67, + "grad_norm": 31.154165733893155, + "learning_rate": 1.8132383932871645e-05, + "loss": 0.6681, + "step": 4260 + }, + { + "epoch": 0.67, + "grad_norm": 32.96153690202711, + "learning_rate": 1.8131402449933875e-05, + "loss": 0.7608, + "step": 4261 + }, + { + "epoch": 0.67, + "grad_norm": 17.93937119749214, + "learning_rate": 1.813042073574378e-05, + "loss": 0.7109, + "step": 4262 + }, + { + "epoch": 0.67, + "grad_norm": 17.42305734839047, + "learning_rate": 1.8129438790329272e-05, + "loss": 0.7843, + "step": 4263 + }, + { + "epoch": 0.67, + "grad_norm": 15.325273479441474, + "learning_rate": 1.8128456613718278e-05, + "loss": 0.7861, + "step": 4264 + }, + { + "epoch": 0.67, + "grad_norm": 18.82262801814742, + "learning_rate": 1.812747420593874e-05, + "loss": 0.7791, + "step": 4265 + }, + { + "epoch": 0.67, + "grad_norm": 20.475188254677505, + "learning_rate": 1.8126491567018582e-05, + "loss": 0.7828, + "step": 4266 + }, + { + "epoch": 0.67, + "grad_norm": 19.533285579654216, + "learning_rate": 1.8125508696985763e-05, + "loss": 0.8698, + "step": 4267 + }, + { + "epoch": 0.67, + "grad_norm": 21.120126518310595, + "learning_rate": 1.8124525595868228e-05, + "loss": 0.7757, + "step": 4268 + }, + { + "epoch": 0.67, + "grad_norm": 20.919112486785913, + "learning_rate": 1.8123542263693935e-05, + "loss": 0.7934, + "step": 4269 + }, + { + "epoch": 0.67, + "grad_norm": 11.94620908456108, + "learning_rate": 1.8122558700490857e-05, + "loss": 0.7079, + "step": 4270 + }, + { + "epoch": 0.67, + "grad_norm": 19.697023743339116, + "learning_rate": 1.812157490628696e-05, + "loss": 0.7158, + "step": 4271 + }, + { + "epoch": 0.67, + "grad_norm": 21.91522053125438, + "learning_rate": 1.812059088111022e-05, + "loss": 0.7142, + "step": 4272 + }, + { + "epoch": 0.67, + "grad_norm": 18.362811639425157, + "learning_rate": 1.8119606624988632e-05, + "loss": 0.7401, + "step": 4273 + }, + { + "epoch": 0.67, + "grad_norm": 15.109241423859642, + "learning_rate": 1.8118622137950174e-05, + "loss": 0.7763, + "step": 4274 + }, + { + "epoch": 0.67, + "grad_norm": 27.86274346394573, + "learning_rate": 1.8117637420022858e-05, + "loss": 0.8347, + "step": 4275 + }, + { + "epoch": 0.67, + "grad_norm": 15.750907907969598, + "learning_rate": 1.8116652471234682e-05, + "loss": 0.7227, + "step": 4276 + }, + { + "epoch": 0.67, + "grad_norm": 13.535649071841243, + "learning_rate": 1.8115667291613653e-05, + "loss": 0.7447, + "step": 4277 + }, + { + "epoch": 0.67, + "grad_norm": 18.85389388916283, + "learning_rate": 1.8114681881187795e-05, + "loss": 0.7143, + "step": 4278 + }, + { + "epoch": 0.67, + "grad_norm": 17.848976007142422, + "learning_rate": 1.8113696239985134e-05, + "loss": 0.8332, + "step": 4279 + }, + { + "epoch": 0.67, + "grad_norm": 12.079137332828445, + "learning_rate": 1.81127103680337e-05, + "loss": 0.7348, + "step": 4280 + }, + { + "epoch": 0.67, + "grad_norm": 14.437528922415328, + "learning_rate": 1.8111724265361525e-05, + "loss": 0.6703, + "step": 4281 + }, + { + "epoch": 0.67, + "grad_norm": 17.77181728523707, + "learning_rate": 1.8110737931996658e-05, + "loss": 0.7071, + "step": 4282 + }, + { + "epoch": 0.67, + "grad_norm": 19.94980380782242, + "learning_rate": 1.8109751367967148e-05, + "loss": 0.7469, + "step": 4283 + }, + { + "epoch": 0.67, + "grad_norm": 19.21179685497273, + "learning_rate": 1.8108764573301054e-05, + "loss": 0.7592, + "step": 4284 + }, + { + "epoch": 0.67, + "grad_norm": 30.20020004284759, + "learning_rate": 1.810777754802644e-05, + "loss": 0.7037, + "step": 4285 + }, + { + "epoch": 0.67, + "grad_norm": 22.464562474179527, + "learning_rate": 1.8106790292171378e-05, + "loss": 0.7459, + "step": 4286 + }, + { + "epoch": 0.67, + "grad_norm": 17.493058091409495, + "learning_rate": 1.8105802805763938e-05, + "loss": 0.7197, + "step": 4287 + }, + { + "epoch": 0.67, + "grad_norm": 25.784912404683094, + "learning_rate": 1.810481508883221e-05, + "loss": 0.7689, + "step": 4288 + }, + { + "epoch": 0.67, + "grad_norm": 16.012711676814728, + "learning_rate": 1.810382714140428e-05, + "loss": 0.7586, + "step": 4289 + }, + { + "epoch": 0.67, + "grad_norm": 19.877625776276066, + "learning_rate": 1.8102838963508247e-05, + "loss": 0.7816, + "step": 4290 + }, + { + "epoch": 0.67, + "grad_norm": 26.141985244497537, + "learning_rate": 1.8101850555172214e-05, + "loss": 0.9162, + "step": 4291 + }, + { + "epoch": 0.67, + "grad_norm": 25.660256589872496, + "learning_rate": 1.8100861916424293e-05, + "loss": 0.8101, + "step": 4292 + }, + { + "epoch": 0.67, + "grad_norm": 21.79914055128638, + "learning_rate": 1.8099873047292596e-05, + "loss": 0.8228, + "step": 4293 + }, + { + "epoch": 0.67, + "grad_norm": 24.090390228412115, + "learning_rate": 1.8098883947805247e-05, + "loss": 0.8054, + "step": 4294 + }, + { + "epoch": 0.67, + "grad_norm": 29.20721872618258, + "learning_rate": 1.8097894617990378e-05, + "loss": 0.7194, + "step": 4295 + }, + { + "epoch": 0.67, + "grad_norm": 16.476324899080826, + "learning_rate": 1.809690505787612e-05, + "loss": 0.8397, + "step": 4296 + }, + { + "epoch": 0.67, + "grad_norm": 17.93750872429027, + "learning_rate": 1.8095915267490625e-05, + "loss": 0.7262, + "step": 4297 + }, + { + "epoch": 0.67, + "grad_norm": 18.124958189143264, + "learning_rate": 1.809492524686203e-05, + "loss": 0.7357, + "step": 4298 + }, + { + "epoch": 0.67, + "grad_norm": 28.13115606275349, + "learning_rate": 1.80939349960185e-05, + "loss": 0.8305, + "step": 4299 + }, + { + "epoch": 0.67, + "grad_norm": 23.035539915949922, + "learning_rate": 1.8092944514988193e-05, + "loss": 0.9096, + "step": 4300 + }, + { + "epoch": 0.67, + "grad_norm": 16.872842539960725, + "learning_rate": 1.8091953803799275e-05, + "loss": 0.6916, + "step": 4301 + }, + { + "epoch": 0.67, + "grad_norm": 20.77223099773228, + "learning_rate": 1.8090962862479927e-05, + "loss": 0.8311, + "step": 4302 + }, + { + "epoch": 0.67, + "grad_norm": 16.082046418904415, + "learning_rate": 1.8089971691058327e-05, + "loss": 0.8602, + "step": 4303 + }, + { + "epoch": 0.67, + "grad_norm": 29.242166431510118, + "learning_rate": 1.8088980289562666e-05, + "loss": 0.8083, + "step": 4304 + }, + { + "epoch": 0.67, + "grad_norm": 20.3028884639645, + "learning_rate": 1.8087988658021137e-05, + "loss": 0.7272, + "step": 4305 + }, + { + "epoch": 0.67, + "grad_norm": 16.733338198346054, + "learning_rate": 1.8086996796461943e-05, + "loss": 0.7799, + "step": 4306 + }, + { + "epoch": 0.67, + "grad_norm": 18.619174167735295, + "learning_rate": 1.808600470491329e-05, + "loss": 0.8411, + "step": 4307 + }, + { + "epoch": 0.67, + "grad_norm": 20.88761445214009, + "learning_rate": 1.8085012383403392e-05, + "loss": 0.7286, + "step": 4308 + }, + { + "epoch": 0.67, + "grad_norm": 21.059224807944098, + "learning_rate": 1.8084019831960474e-05, + "loss": 0.7332, + "step": 4309 + }, + { + "epoch": 0.67, + "grad_norm": 13.722668229425524, + "learning_rate": 1.808302705061276e-05, + "loss": 0.6538, + "step": 4310 + }, + { + "epoch": 0.67, + "grad_norm": 16.36886703664029, + "learning_rate": 1.8082034039388486e-05, + "loss": 0.7814, + "step": 4311 + }, + { + "epoch": 0.67, + "grad_norm": 33.47505662217455, + "learning_rate": 1.808104079831589e-05, + "loss": 0.8421, + "step": 4312 + }, + { + "epoch": 0.67, + "grad_norm": 16.327617185398385, + "learning_rate": 1.808004732742322e-05, + "loss": 0.7285, + "step": 4313 + }, + { + "epoch": 0.67, + "grad_norm": 17.1307651272322, + "learning_rate": 1.8079053626738732e-05, + "loss": 0.7086, + "step": 4314 + }, + { + "epoch": 0.67, + "grad_norm": 43.19829755649961, + "learning_rate": 1.8078059696290685e-05, + "loss": 0.8283, + "step": 4315 + }, + { + "epoch": 0.67, + "grad_norm": 17.407922130239847, + "learning_rate": 1.8077065536107347e-05, + "loss": 0.8116, + "step": 4316 + }, + { + "epoch": 0.67, + "grad_norm": 15.62592827042257, + "learning_rate": 1.8076071146216988e-05, + "loss": 0.8064, + "step": 4317 + }, + { + "epoch": 0.67, + "grad_norm": 28.946234029702975, + "learning_rate": 1.8075076526647892e-05, + "loss": 0.7978, + "step": 4318 + }, + { + "epoch": 0.67, + "grad_norm": 16.55398774569627, + "learning_rate": 1.807408167742834e-05, + "loss": 0.8183, + "step": 4319 + }, + { + "epoch": 0.67, + "grad_norm": 32.310680777675756, + "learning_rate": 1.8073086598586634e-05, + "loss": 0.8057, + "step": 4320 + }, + { + "epoch": 0.67, + "grad_norm": 22.56848686112926, + "learning_rate": 1.8072091290151063e-05, + "loss": 0.8163, + "step": 4321 + }, + { + "epoch": 0.68, + "grad_norm": 28.680198467444463, + "learning_rate": 1.807109575214994e-05, + "loss": 0.7474, + "step": 4322 + }, + { + "epoch": 0.68, + "grad_norm": 14.888657405865755, + "learning_rate": 1.8070099984611575e-05, + "loss": 0.6191, + "step": 4323 + }, + { + "epoch": 0.68, + "grad_norm": 17.62726442014608, + "learning_rate": 1.8069103987564286e-05, + "loss": 0.741, + "step": 4324 + }, + { + "epoch": 0.68, + "grad_norm": 24.28482620434681, + "learning_rate": 1.8068107761036402e-05, + "loss": 0.8525, + "step": 4325 + }, + { + "epoch": 0.68, + "grad_norm": 21.900999161629514, + "learning_rate": 1.806711130505625e-05, + "loss": 0.8357, + "step": 4326 + }, + { + "epoch": 0.68, + "grad_norm": 25.085255612111194, + "learning_rate": 1.806611461965217e-05, + "loss": 0.7387, + "step": 4327 + }, + { + "epoch": 0.68, + "grad_norm": 20.54601720172465, + "learning_rate": 1.8065117704852518e-05, + "loss": 0.8159, + "step": 4328 + }, + { + "epoch": 0.68, + "grad_norm": 22.934420265192585, + "learning_rate": 1.8064120560685626e-05, + "loss": 0.813, + "step": 4329 + }, + { + "epoch": 0.68, + "grad_norm": 18.78721142610371, + "learning_rate": 1.806312318717987e-05, + "loss": 0.8022, + "step": 4330 + }, + { + "epoch": 0.68, + "grad_norm": 19.46167464757203, + "learning_rate": 1.8062125584363605e-05, + "loss": 0.8014, + "step": 4331 + }, + { + "epoch": 0.68, + "grad_norm": 19.9684230010178, + "learning_rate": 1.8061127752265203e-05, + "loss": 0.7071, + "step": 4332 + }, + { + "epoch": 0.68, + "grad_norm": 17.1168461778214, + "learning_rate": 1.8060129690913045e-05, + "loss": 0.7522, + "step": 4333 + }, + { + "epoch": 0.68, + "grad_norm": 20.285889726700216, + "learning_rate": 1.8059131400335512e-05, + "loss": 0.8363, + "step": 4334 + }, + { + "epoch": 0.68, + "grad_norm": 14.347939714102884, + "learning_rate": 1.8058132880560998e-05, + "loss": 0.7959, + "step": 4335 + }, + { + "epoch": 0.68, + "grad_norm": 25.419360362360546, + "learning_rate": 1.80571341316179e-05, + "loss": 0.7799, + "step": 4336 + }, + { + "epoch": 0.68, + "grad_norm": 17.75484865077361, + "learning_rate": 1.8056135153534616e-05, + "loss": 0.7569, + "step": 4337 + }, + { + "epoch": 0.68, + "grad_norm": 18.53483295500399, + "learning_rate": 1.8055135946339563e-05, + "loss": 0.8076, + "step": 4338 + }, + { + "epoch": 0.68, + "grad_norm": 14.414970112993563, + "learning_rate": 1.8054136510061156e-05, + "loss": 0.7859, + "step": 4339 + }, + { + "epoch": 0.68, + "grad_norm": 15.435482671334562, + "learning_rate": 1.8053136844727822e-05, + "loss": 0.7089, + "step": 4340 + }, + { + "epoch": 0.68, + "grad_norm": 18.503406182091712, + "learning_rate": 1.8052136950367983e-05, + "loss": 0.7825, + "step": 4341 + }, + { + "epoch": 0.68, + "grad_norm": 21.00745055602217, + "learning_rate": 1.8051136827010077e-05, + "loss": 0.8883, + "step": 4342 + }, + { + "epoch": 0.68, + "grad_norm": 16.391674330898983, + "learning_rate": 1.8050136474682553e-05, + "loss": 0.7553, + "step": 4343 + }, + { + "epoch": 0.68, + "grad_norm": 18.554786039391917, + "learning_rate": 1.8049135893413858e-05, + "loss": 0.812, + "step": 4344 + }, + { + "epoch": 0.68, + "grad_norm": 12.314777588105425, + "learning_rate": 1.8048135083232446e-05, + "loss": 0.6781, + "step": 4345 + }, + { + "epoch": 0.68, + "grad_norm": 17.949065210061367, + "learning_rate": 1.804713404416678e-05, + "loss": 0.8333, + "step": 4346 + }, + { + "epoch": 0.68, + "grad_norm": 4.591601797200667, + "learning_rate": 1.8046132776245327e-05, + "loss": 0.6883, + "step": 4347 + }, + { + "epoch": 0.68, + "grad_norm": 14.932887640126669, + "learning_rate": 1.804513127949657e-05, + "loss": 0.8157, + "step": 4348 + }, + { + "epoch": 0.68, + "grad_norm": 15.288021167190276, + "learning_rate": 1.8044129553948978e-05, + "loss": 0.8966, + "step": 4349 + }, + { + "epoch": 0.68, + "grad_norm": 16.91643776899268, + "learning_rate": 1.8043127599631053e-05, + "loss": 0.7158, + "step": 4350 + }, + { + "epoch": 0.68, + "grad_norm": 20.067793257401863, + "learning_rate": 1.8042125416571284e-05, + "loss": 0.7801, + "step": 4351 + }, + { + "epoch": 0.68, + "grad_norm": 19.39998942130946, + "learning_rate": 1.8041123004798173e-05, + "loss": 0.7697, + "step": 4352 + }, + { + "epoch": 0.68, + "grad_norm": 20.100686927107667, + "learning_rate": 1.8040120364340223e-05, + "loss": 0.8217, + "step": 4353 + }, + { + "epoch": 0.68, + "grad_norm": 28.562866033299674, + "learning_rate": 1.8039117495225956e-05, + "loss": 0.9237, + "step": 4354 + }, + { + "epoch": 0.68, + "grad_norm": 13.732006388989692, + "learning_rate": 1.803811439748389e-05, + "loss": 0.7574, + "step": 4355 + }, + { + "epoch": 0.68, + "grad_norm": 21.841406363579924, + "learning_rate": 1.8037111071142552e-05, + "loss": 0.8425, + "step": 4356 + }, + { + "epoch": 0.68, + "grad_norm": 15.001851837765855, + "learning_rate": 1.8036107516230478e-05, + "loss": 0.7416, + "step": 4357 + }, + { + "epoch": 0.68, + "grad_norm": 27.63443504792, + "learning_rate": 1.8035103732776206e-05, + "loss": 0.8568, + "step": 4358 + }, + { + "epoch": 0.68, + "grad_norm": 40.96006777616897, + "learning_rate": 1.803409972080829e-05, + "loss": 0.7273, + "step": 4359 + }, + { + "epoch": 0.68, + "grad_norm": 16.101674452924147, + "learning_rate": 1.803309548035527e-05, + "loss": 0.7821, + "step": 4360 + }, + { + "epoch": 0.68, + "grad_norm": 20.56604787182817, + "learning_rate": 1.8032091011445714e-05, + "loss": 0.7877, + "step": 4361 + }, + { + "epoch": 0.68, + "grad_norm": 26.465669799806857, + "learning_rate": 1.803108631410819e-05, + "loss": 0.7313, + "step": 4362 + }, + { + "epoch": 0.68, + "grad_norm": 3.1204724421504864, + "learning_rate": 1.803008138837127e-05, + "loss": 0.676, + "step": 4363 + }, + { + "epoch": 0.68, + "grad_norm": 40.72773430428415, + "learning_rate": 1.8029076234263534e-05, + "loss": 0.7996, + "step": 4364 + }, + { + "epoch": 0.68, + "grad_norm": 16.66358343026831, + "learning_rate": 1.8028070851813566e-05, + "loss": 0.7804, + "step": 4365 + }, + { + "epoch": 0.68, + "grad_norm": 18.6108139596374, + "learning_rate": 1.802706524104996e-05, + "loss": 0.8305, + "step": 4366 + }, + { + "epoch": 0.68, + "grad_norm": 18.040803518821154, + "learning_rate": 1.8026059402001313e-05, + "loss": 0.6919, + "step": 4367 + }, + { + "epoch": 0.68, + "grad_norm": 13.762513375167515, + "learning_rate": 1.8025053334696232e-05, + "loss": 0.7765, + "step": 4368 + }, + { + "epoch": 0.68, + "grad_norm": 16.129159322551544, + "learning_rate": 1.8024047039163325e-05, + "loss": 0.7246, + "step": 4369 + }, + { + "epoch": 0.68, + "grad_norm": 23.06298176332034, + "learning_rate": 1.802304051543122e-05, + "loss": 0.7454, + "step": 4370 + }, + { + "epoch": 0.68, + "grad_norm": 24.823940025223486, + "learning_rate": 1.8022033763528534e-05, + "loss": 0.8304, + "step": 4371 + }, + { + "epoch": 0.68, + "grad_norm": 17.53250667161487, + "learning_rate": 1.80210267834839e-05, + "loss": 0.707, + "step": 4372 + }, + { + "epoch": 0.68, + "grad_norm": 15.50934033879026, + "learning_rate": 1.802001957532596e-05, + "loss": 0.7716, + "step": 4373 + }, + { + "epoch": 0.68, + "grad_norm": 16.948848468000165, + "learning_rate": 1.801901213908335e-05, + "loss": 0.7268, + "step": 4374 + }, + { + "epoch": 0.68, + "grad_norm": 19.420163334666416, + "learning_rate": 1.801800447478473e-05, + "loss": 0.896, + "step": 4375 + }, + { + "epoch": 0.68, + "grad_norm": 26.897851962525642, + "learning_rate": 1.8016996582458746e-05, + "loss": 0.7131, + "step": 4376 + }, + { + "epoch": 0.68, + "grad_norm": 13.634898020198941, + "learning_rate": 1.8015988462134073e-05, + "loss": 0.7771, + "step": 4377 + }, + { + "epoch": 0.68, + "grad_norm": 16.821859129615284, + "learning_rate": 1.801498011383938e-05, + "loss": 0.7458, + "step": 4378 + }, + { + "epoch": 0.68, + "grad_norm": 16.127979127194294, + "learning_rate": 1.8013971537603336e-05, + "loss": 0.7777, + "step": 4379 + }, + { + "epoch": 0.68, + "grad_norm": 20.924722870477922, + "learning_rate": 1.8012962733454636e-05, + "loss": 0.7691, + "step": 4380 + }, + { + "epoch": 0.68, + "grad_norm": 14.171522156088209, + "learning_rate": 1.8011953701421957e-05, + "loss": 0.7542, + "step": 4381 + }, + { + "epoch": 0.68, + "grad_norm": 22.07723572297356, + "learning_rate": 1.8010944441534004e-05, + "loss": 0.7337, + "step": 4382 + }, + { + "epoch": 0.68, + "grad_norm": 17.69890807634116, + "learning_rate": 1.800993495381948e-05, + "loss": 0.7926, + "step": 4383 + }, + { + "epoch": 0.68, + "grad_norm": 21.605652896121875, + "learning_rate": 1.800892523830709e-05, + "loss": 0.8293, + "step": 4384 + }, + { + "epoch": 0.68, + "grad_norm": 26.276327549891764, + "learning_rate": 1.8007915295025552e-05, + "loss": 0.7653, + "step": 4385 + }, + { + "epoch": 0.69, + "grad_norm": 16.608032089735506, + "learning_rate": 1.8006905124003583e-05, + "loss": 0.8014, + "step": 4386 + }, + { + "epoch": 0.69, + "grad_norm": 17.580945152395373, + "learning_rate": 1.8005894725269918e-05, + "loss": 0.8588, + "step": 4387 + }, + { + "epoch": 0.69, + "grad_norm": 22.8424159468008, + "learning_rate": 1.8004884098853296e-05, + "loss": 0.7619, + "step": 4388 + }, + { + "epoch": 0.69, + "grad_norm": 12.82480906982031, + "learning_rate": 1.8003873244782447e-05, + "loss": 0.7729, + "step": 4389 + }, + { + "epoch": 0.69, + "grad_norm": 15.69975581915073, + "learning_rate": 1.8002862163086127e-05, + "loss": 0.7613, + "step": 4390 + }, + { + "epoch": 0.69, + "grad_norm": 17.51018684676257, + "learning_rate": 1.8001850853793084e-05, + "loss": 0.7006, + "step": 4391 + }, + { + "epoch": 0.69, + "grad_norm": 15.82898322524861, + "learning_rate": 1.8000839316932094e-05, + "loss": 0.7644, + "step": 4392 + }, + { + "epoch": 0.69, + "grad_norm": 20.407322404147806, + "learning_rate": 1.7999827552531905e-05, + "loss": 0.7579, + "step": 4393 + }, + { + "epoch": 0.69, + "grad_norm": 15.723444178959944, + "learning_rate": 1.7998815560621305e-05, + "loss": 0.8213, + "step": 4394 + }, + { + "epoch": 0.69, + "grad_norm": 21.889634416972665, + "learning_rate": 1.7997803341229067e-05, + "loss": 0.8172, + "step": 4395 + }, + { + "epoch": 0.69, + "grad_norm": 11.723829215174922, + "learning_rate": 1.7996790894383982e-05, + "loss": 0.6714, + "step": 4396 + }, + { + "epoch": 0.69, + "grad_norm": 27.59433678373567, + "learning_rate": 1.799577822011484e-05, + "loss": 0.7723, + "step": 4397 + }, + { + "epoch": 0.69, + "grad_norm": 14.131878716031913, + "learning_rate": 1.7994765318450446e-05, + "loss": 0.7368, + "step": 4398 + }, + { + "epoch": 0.69, + "grad_norm": 22.187226373704544, + "learning_rate": 1.7993752189419602e-05, + "loss": 0.7389, + "step": 4399 + }, + { + "epoch": 0.69, + "grad_norm": 16.6787192713573, + "learning_rate": 1.7992738833051122e-05, + "loss": 0.7607, + "step": 4400 + }, + { + "epoch": 0.69, + "grad_norm": 25.246798452805926, + "learning_rate": 1.7991725249373822e-05, + "loss": 0.8209, + "step": 4401 + }, + { + "epoch": 0.69, + "grad_norm": 19.80020465075268, + "learning_rate": 1.7990711438416536e-05, + "loss": 0.7652, + "step": 4402 + }, + { + "epoch": 0.69, + "grad_norm": 18.88989684970984, + "learning_rate": 1.798969740020809e-05, + "loss": 0.7749, + "step": 4403 + }, + { + "epoch": 0.69, + "grad_norm": 26.274208877065714, + "learning_rate": 1.798868313477732e-05, + "loss": 0.6947, + "step": 4404 + }, + { + "epoch": 0.69, + "grad_norm": 14.882254386309226, + "learning_rate": 1.7987668642153075e-05, + "loss": 0.7086, + "step": 4405 + }, + { + "epoch": 0.69, + "grad_norm": 16.090227360052566, + "learning_rate": 1.798665392236421e-05, + "loss": 0.6781, + "step": 4406 + }, + { + "epoch": 0.69, + "grad_norm": 16.378504637878486, + "learning_rate": 1.7985638975439578e-05, + "loss": 0.7496, + "step": 4407 + }, + { + "epoch": 0.69, + "grad_norm": 26.369406659588574, + "learning_rate": 1.7984623801408046e-05, + "loss": 0.7258, + "step": 4408 + }, + { + "epoch": 0.69, + "grad_norm": 15.660206540360726, + "learning_rate": 1.798360840029848e-05, + "loss": 0.7535, + "step": 4409 + }, + { + "epoch": 0.69, + "grad_norm": 15.986224646399426, + "learning_rate": 1.7982592772139766e-05, + "loss": 0.7267, + "step": 4410 + }, + { + "epoch": 0.69, + "grad_norm": 15.791240883508046, + "learning_rate": 1.7981576916960784e-05, + "loss": 0.8889, + "step": 4411 + }, + { + "epoch": 0.69, + "grad_norm": 13.433677329768178, + "learning_rate": 1.798056083479042e-05, + "loss": 0.732, + "step": 4412 + }, + { + "epoch": 0.69, + "grad_norm": 15.085880260665142, + "learning_rate": 1.7979544525657578e-05, + "loss": 0.7393, + "step": 4413 + }, + { + "epoch": 0.69, + "grad_norm": 17.73487400201541, + "learning_rate": 1.797852798959115e-05, + "loss": 0.7295, + "step": 4414 + }, + { + "epoch": 0.69, + "grad_norm": 20.464276245659956, + "learning_rate": 1.7977511226620062e-05, + "loss": 0.8915, + "step": 4415 + }, + { + "epoch": 0.69, + "grad_norm": 21.289352732438633, + "learning_rate": 1.7976494236773217e-05, + "loss": 0.8119, + "step": 4416 + }, + { + "epoch": 0.69, + "grad_norm": 18.103478341841534, + "learning_rate": 1.7975477020079544e-05, + "loss": 0.7636, + "step": 4417 + }, + { + "epoch": 0.69, + "grad_norm": 18.893535765235537, + "learning_rate": 1.7974459576567968e-05, + "loss": 0.7475, + "step": 4418 + }, + { + "epoch": 0.69, + "grad_norm": 27.040377117318236, + "learning_rate": 1.797344190626743e-05, + "loss": 0.8772, + "step": 4419 + }, + { + "epoch": 0.69, + "grad_norm": 17.792159452154106, + "learning_rate": 1.7972424009206865e-05, + "loss": 0.7556, + "step": 4420 + }, + { + "epoch": 0.69, + "grad_norm": 22.696463105651482, + "learning_rate": 1.7971405885415228e-05, + "loss": 0.7266, + "step": 4421 + }, + { + "epoch": 0.69, + "grad_norm": 16.240044561619914, + "learning_rate": 1.7970387534921467e-05, + "loss": 0.7683, + "step": 4422 + }, + { + "epoch": 0.69, + "grad_norm": 23.60420954626879, + "learning_rate": 1.796936895775455e-05, + "loss": 0.7696, + "step": 4423 + }, + { + "epoch": 0.69, + "grad_norm": 22.518673024693154, + "learning_rate": 1.7968350153943443e-05, + "loss": 0.8025, + "step": 4424 + }, + { + "epoch": 0.69, + "grad_norm": 10.974492961960626, + "learning_rate": 1.7967331123517114e-05, + "loss": 0.6361, + "step": 4425 + }, + { + "epoch": 0.69, + "grad_norm": 15.23256368530721, + "learning_rate": 1.7966311866504552e-05, + "loss": 0.7467, + "step": 4426 + }, + { + "epoch": 0.69, + "grad_norm": 14.19721673234662, + "learning_rate": 1.796529238293474e-05, + "loss": 0.8107, + "step": 4427 + }, + { + "epoch": 0.69, + "grad_norm": 27.03278740716948, + "learning_rate": 1.7964272672836675e-05, + "loss": 0.8674, + "step": 4428 + }, + { + "epoch": 0.69, + "grad_norm": 21.983059411990997, + "learning_rate": 1.7963252736239347e-05, + "loss": 0.7714, + "step": 4429 + }, + { + "epoch": 0.69, + "grad_norm": 18.646595055233774, + "learning_rate": 1.7962232573171772e-05, + "loss": 0.8475, + "step": 4430 + }, + { + "epoch": 0.69, + "grad_norm": 16.200130679363628, + "learning_rate": 1.7961212183662967e-05, + "loss": 0.7553, + "step": 4431 + }, + { + "epoch": 0.69, + "grad_norm": 19.025344381774524, + "learning_rate": 1.796019156774194e-05, + "loss": 0.8178, + "step": 4432 + }, + { + "epoch": 0.69, + "grad_norm": 247.01706348116528, + "learning_rate": 1.795917072543772e-05, + "loss": 0.7495, + "step": 4433 + }, + { + "epoch": 0.69, + "grad_norm": 11.127014079637586, + "learning_rate": 1.7958149656779343e-05, + "loss": 0.7655, + "step": 4434 + }, + { + "epoch": 0.69, + "grad_norm": 15.928762044211297, + "learning_rate": 1.7957128361795844e-05, + "loss": 0.7268, + "step": 4435 + }, + { + "epoch": 0.69, + "grad_norm": 21.02890202621121, + "learning_rate": 1.7956106840516268e-05, + "loss": 0.8222, + "step": 4436 + }, + { + "epoch": 0.69, + "grad_norm": 11.901316062245872, + "learning_rate": 1.7955085092969668e-05, + "loss": 0.7505, + "step": 4437 + }, + { + "epoch": 0.69, + "grad_norm": 17.681103566629638, + "learning_rate": 1.7954063119185105e-05, + "loss": 0.7627, + "step": 4438 + }, + { + "epoch": 0.69, + "grad_norm": 20.414639107509593, + "learning_rate": 1.7953040919191635e-05, + "loss": 0.8111, + "step": 4439 + }, + { + "epoch": 0.69, + "grad_norm": 15.012859023733897, + "learning_rate": 1.7952018493018335e-05, + "loss": 0.7608, + "step": 4440 + }, + { + "epoch": 0.69, + "grad_norm": 18.005937312750195, + "learning_rate": 1.7950995840694282e-05, + "loss": 0.8028, + "step": 4441 + }, + { + "epoch": 0.69, + "grad_norm": 14.745034940992863, + "learning_rate": 1.794997296224856e-05, + "loss": 0.6908, + "step": 4442 + }, + { + "epoch": 0.69, + "grad_norm": 15.67019884439328, + "learning_rate": 1.7948949857710256e-05, + "loss": 0.8683, + "step": 4443 + }, + { + "epoch": 0.69, + "grad_norm": 28.939592474780678, + "learning_rate": 1.7947926527108467e-05, + "loss": 0.8034, + "step": 4444 + }, + { + "epoch": 0.69, + "grad_norm": 25.855332846946425, + "learning_rate": 1.7946902970472297e-05, + "loss": 0.7717, + "step": 4445 + }, + { + "epoch": 0.69, + "grad_norm": 13.151155115022215, + "learning_rate": 1.7945879187830857e-05, + "loss": 0.7681, + "step": 4446 + }, + { + "epoch": 0.69, + "grad_norm": 17.3831178068288, + "learning_rate": 1.794485517921326e-05, + "loss": 0.7679, + "step": 4447 + }, + { + "epoch": 0.69, + "grad_norm": 16.126139486420012, + "learning_rate": 1.794383094464863e-05, + "loss": 0.7383, + "step": 4448 + }, + { + "epoch": 0.69, + "grad_norm": 18.3920308420861, + "learning_rate": 1.7942806484166096e-05, + "loss": 0.7641, + "step": 4449 + }, + { + "epoch": 0.7, + "grad_norm": 20.317655831876813, + "learning_rate": 1.794178179779479e-05, + "loss": 0.8207, + "step": 4450 + }, + { + "epoch": 0.7, + "grad_norm": 18.12111928353769, + "learning_rate": 1.7940756885563855e-05, + "loss": 0.6349, + "step": 4451 + }, + { + "epoch": 0.7, + "grad_norm": 21.89606140772132, + "learning_rate": 1.793973174750244e-05, + "loss": 0.8081, + "step": 4452 + }, + { + "epoch": 0.7, + "grad_norm": 14.8642243536, + "learning_rate": 1.79387063836397e-05, + "loss": 0.6497, + "step": 4453 + }, + { + "epoch": 0.7, + "grad_norm": 17.988074198969503, + "learning_rate": 1.7937680794004793e-05, + "loss": 0.8457, + "step": 4454 + }, + { + "epoch": 0.7, + "grad_norm": 22.446891520665506, + "learning_rate": 1.793665497862689e-05, + "loss": 0.7774, + "step": 4455 + }, + { + "epoch": 0.7, + "grad_norm": 14.243367416713927, + "learning_rate": 1.793562893753516e-05, + "loss": 0.8724, + "step": 4456 + }, + { + "epoch": 0.7, + "grad_norm": 14.285394610783442, + "learning_rate": 1.7934602670758786e-05, + "loss": 0.7699, + "step": 4457 + }, + { + "epoch": 0.7, + "grad_norm": 21.909584013113722, + "learning_rate": 1.7933576178326952e-05, + "loss": 0.7427, + "step": 4458 + }, + { + "epoch": 0.7, + "grad_norm": 24.405535239329332, + "learning_rate": 1.7932549460268856e-05, + "loss": 0.8373, + "step": 4459 + }, + { + "epoch": 0.7, + "grad_norm": 15.428939132891044, + "learning_rate": 1.7931522516613688e-05, + "loss": 0.6933, + "step": 4460 + }, + { + "epoch": 0.7, + "grad_norm": 21.68097958812621, + "learning_rate": 1.7930495347390665e-05, + "loss": 0.8012, + "step": 4461 + }, + { + "epoch": 0.7, + "grad_norm": 20.95167454486104, + "learning_rate": 1.792946795262899e-05, + "loss": 0.756, + "step": 4462 + }, + { + "epoch": 0.7, + "grad_norm": 25.440243898720556, + "learning_rate": 1.7928440332357885e-05, + "loss": 0.8529, + "step": 4463 + }, + { + "epoch": 0.7, + "grad_norm": 15.31729846039767, + "learning_rate": 1.7927412486606578e-05, + "loss": 0.7239, + "step": 4464 + }, + { + "epoch": 0.7, + "grad_norm": 15.547319519934067, + "learning_rate": 1.7926384415404298e-05, + "loss": 0.7182, + "step": 4465 + }, + { + "epoch": 0.7, + "grad_norm": 19.749350629917217, + "learning_rate": 1.792535611878028e-05, + "loss": 0.8117, + "step": 4466 + }, + { + "epoch": 0.7, + "grad_norm": 19.079585312386996, + "learning_rate": 1.792432759676377e-05, + "loss": 0.6649, + "step": 4467 + }, + { + "epoch": 0.7, + "grad_norm": 26.710050192703306, + "learning_rate": 1.792329884938402e-05, + "loss": 0.785, + "step": 4468 + }, + { + "epoch": 0.7, + "grad_norm": 12.633466685949253, + "learning_rate": 1.7922269876670282e-05, + "loss": 0.8311, + "step": 4469 + }, + { + "epoch": 0.7, + "grad_norm": 18.615603397575217, + "learning_rate": 1.7921240678651823e-05, + "loss": 0.7499, + "step": 4470 + }, + { + "epoch": 0.7, + "grad_norm": 14.947341529955526, + "learning_rate": 1.7920211255357918e-05, + "loss": 0.7379, + "step": 4471 + }, + { + "epoch": 0.7, + "grad_norm": 14.11531134437023, + "learning_rate": 1.7919181606817836e-05, + "loss": 0.821, + "step": 4472 + }, + { + "epoch": 0.7, + "grad_norm": 27.957349265596267, + "learning_rate": 1.791815173306086e-05, + "loss": 0.8164, + "step": 4473 + }, + { + "epoch": 0.7, + "grad_norm": 18.651135891057883, + "learning_rate": 1.7917121634116284e-05, + "loss": 0.8202, + "step": 4474 + }, + { + "epoch": 0.7, + "grad_norm": 18.631555885950647, + "learning_rate": 1.7916091310013396e-05, + "loss": 0.8001, + "step": 4475 + }, + { + "epoch": 0.7, + "grad_norm": 20.496797906155138, + "learning_rate": 1.7915060760781505e-05, + "loss": 0.7961, + "step": 4476 + }, + { + "epoch": 0.7, + "grad_norm": 17.339943354218075, + "learning_rate": 1.7914029986449916e-05, + "loss": 0.7516, + "step": 4477 + }, + { + "epoch": 0.7, + "grad_norm": 18.98117911121771, + "learning_rate": 1.791299898704794e-05, + "loss": 0.7873, + "step": 4478 + }, + { + "epoch": 0.7, + "grad_norm": 20.449011366411074, + "learning_rate": 1.7911967762604905e-05, + "loss": 0.7652, + "step": 4479 + }, + { + "epoch": 0.7, + "grad_norm": 19.8835194482637, + "learning_rate": 1.7910936313150137e-05, + "loss": 0.8287, + "step": 4480 + }, + { + "epoch": 0.7, + "grad_norm": 16.320333878908084, + "learning_rate": 1.7909904638712963e-05, + "loss": 0.7859, + "step": 4481 + }, + { + "epoch": 0.7, + "grad_norm": 16.95907926110049, + "learning_rate": 1.790887273932273e-05, + "loss": 0.8135, + "step": 4482 + }, + { + "epoch": 0.7, + "grad_norm": 18.83241715398812, + "learning_rate": 1.7907840615008787e-05, + "loss": 0.7302, + "step": 4483 + }, + { + "epoch": 0.7, + "grad_norm": 14.654744967972178, + "learning_rate": 1.790680826580048e-05, + "loss": 0.7286, + "step": 4484 + }, + { + "epoch": 0.7, + "grad_norm": 17.764342193790647, + "learning_rate": 1.7905775691727168e-05, + "loss": 0.8639, + "step": 4485 + }, + { + "epoch": 0.7, + "grad_norm": 18.18982506913134, + "learning_rate": 1.7904742892818225e-05, + "loss": 0.8222, + "step": 4486 + }, + { + "epoch": 0.7, + "grad_norm": 20.311592257830757, + "learning_rate": 1.7903709869103018e-05, + "loss": 0.7469, + "step": 4487 + }, + { + "epoch": 0.7, + "grad_norm": 17.281143091571263, + "learning_rate": 1.7902676620610925e-05, + "loss": 0.7178, + "step": 4488 + }, + { + "epoch": 0.7, + "grad_norm": 20.40254109882613, + "learning_rate": 1.7901643147371328e-05, + "loss": 0.8199, + "step": 4489 + }, + { + "epoch": 0.7, + "grad_norm": 17.87214955697475, + "learning_rate": 1.7900609449413626e-05, + "loss": 0.8005, + "step": 4490 + }, + { + "epoch": 0.7, + "grad_norm": 17.232942132534024, + "learning_rate": 1.7899575526767214e-05, + "loss": 0.7777, + "step": 4491 + }, + { + "epoch": 0.7, + "grad_norm": 16.33288877982655, + "learning_rate": 1.7898541379461494e-05, + "loss": 0.7689, + "step": 4492 + }, + { + "epoch": 0.7, + "grad_norm": 21.66610174799004, + "learning_rate": 1.7897507007525873e-05, + "loss": 0.7062, + "step": 4493 + }, + { + "epoch": 0.7, + "grad_norm": 15.251050478095443, + "learning_rate": 1.789647241098978e-05, + "loss": 0.8165, + "step": 4494 + }, + { + "epoch": 0.7, + "grad_norm": 23.139680554668924, + "learning_rate": 1.7895437589882627e-05, + "loss": 0.814, + "step": 4495 + }, + { + "epoch": 0.7, + "grad_norm": 19.387941302325746, + "learning_rate": 1.7894402544233846e-05, + "loss": 0.8096, + "step": 4496 + }, + { + "epoch": 0.7, + "grad_norm": 21.50296622152407, + "learning_rate": 1.789336727407288e-05, + "loss": 0.7492, + "step": 4497 + }, + { + "epoch": 0.7, + "grad_norm": 17.119100092542315, + "learning_rate": 1.7892331779429164e-05, + "loss": 0.6993, + "step": 4498 + }, + { + "epoch": 0.7, + "grad_norm": 14.749492042491251, + "learning_rate": 1.789129606033215e-05, + "loss": 0.7714, + "step": 4499 + }, + { + "epoch": 0.7, + "grad_norm": 28.973994530305404, + "learning_rate": 1.789026011681129e-05, + "loss": 0.8197, + "step": 4500 + }, + { + "epoch": 0.7, + "grad_norm": 28.70944916662741, + "learning_rate": 1.7889223948896046e-05, + "loss": 0.7195, + "step": 4501 + }, + { + "epoch": 0.7, + "grad_norm": 22.64568218703805, + "learning_rate": 1.788818755661589e-05, + "loss": 0.7309, + "step": 4502 + }, + { + "epoch": 0.7, + "grad_norm": 15.431507958326039, + "learning_rate": 1.7887150940000296e-05, + "loss": 0.722, + "step": 4503 + }, + { + "epoch": 0.7, + "grad_norm": 12.803628546058574, + "learning_rate": 1.788611409907874e-05, + "loss": 0.7555, + "step": 4504 + }, + { + "epoch": 0.7, + "grad_norm": 15.174416813826769, + "learning_rate": 1.7885077033880714e-05, + "loss": 0.6677, + "step": 4505 + }, + { + "epoch": 0.7, + "grad_norm": 18.519255028776655, + "learning_rate": 1.788403974443571e-05, + "loss": 0.7111, + "step": 4506 + }, + { + "epoch": 0.7, + "grad_norm": 18.945335141197912, + "learning_rate": 1.788300223077323e-05, + "loss": 0.7666, + "step": 4507 + }, + { + "epoch": 0.7, + "grad_norm": 30.26109322944965, + "learning_rate": 1.7881964492922774e-05, + "loss": 0.7937, + "step": 4508 + }, + { + "epoch": 0.7, + "grad_norm": 20.18380947710191, + "learning_rate": 1.7880926530913863e-05, + "loss": 0.7532, + "step": 4509 + }, + { + "epoch": 0.7, + "grad_norm": 13.311576581211785, + "learning_rate": 1.7879888344776008e-05, + "loss": 0.7534, + "step": 4510 + }, + { + "epoch": 0.7, + "grad_norm": 12.425904345629691, + "learning_rate": 1.787884993453874e-05, + "loss": 0.7623, + "step": 4511 + }, + { + "epoch": 0.7, + "grad_norm": 22.30198697712689, + "learning_rate": 1.7877811300231587e-05, + "loss": 0.7699, + "step": 4512 + }, + { + "epoch": 0.7, + "grad_norm": 20.377571393475165, + "learning_rate": 1.7876772441884093e-05, + "loss": 0.7981, + "step": 4513 + }, + { + "epoch": 0.71, + "grad_norm": 16.58466978839017, + "learning_rate": 1.7875733359525793e-05, + "loss": 0.8134, + "step": 4514 + }, + { + "epoch": 0.71, + "grad_norm": 26.9714839462326, + "learning_rate": 1.7874694053186246e-05, + "loss": 0.8238, + "step": 4515 + }, + { + "epoch": 0.71, + "grad_norm": 17.728531718417514, + "learning_rate": 1.7873654522895006e-05, + "loss": 0.8276, + "step": 4516 + }, + { + "epoch": 0.71, + "grad_norm": 17.510589664470007, + "learning_rate": 1.7872614768681638e-05, + "loss": 0.8108, + "step": 4517 + }, + { + "epoch": 0.71, + "grad_norm": 20.24816512107083, + "learning_rate": 1.7871574790575713e-05, + "loss": 0.8559, + "step": 4518 + }, + { + "epoch": 0.71, + "grad_norm": 24.814463942464236, + "learning_rate": 1.7870534588606804e-05, + "loss": 0.8228, + "step": 4519 + }, + { + "epoch": 0.71, + "grad_norm": 21.126995740101478, + "learning_rate": 1.7869494162804492e-05, + "loss": 0.7478, + "step": 4520 + }, + { + "epoch": 0.71, + "grad_norm": 21.225116719811254, + "learning_rate": 1.7868453513198375e-05, + "loss": 0.7609, + "step": 4521 + }, + { + "epoch": 0.71, + "grad_norm": 22.06792835085101, + "learning_rate": 1.786741263981804e-05, + "loss": 0.724, + "step": 4522 + }, + { + "epoch": 0.71, + "grad_norm": 25.883283735753, + "learning_rate": 1.7866371542693094e-05, + "loss": 0.7266, + "step": 4523 + }, + { + "epoch": 0.71, + "grad_norm": 21.20164873034085, + "learning_rate": 1.786533022185314e-05, + "loss": 0.7889, + "step": 4524 + }, + { + "epoch": 0.71, + "grad_norm": 30.722225829817987, + "learning_rate": 1.78642886773278e-05, + "loss": 0.8972, + "step": 4525 + }, + { + "epoch": 0.71, + "grad_norm": 15.33453424711936, + "learning_rate": 1.7863246909146688e-05, + "loss": 0.7599, + "step": 4526 + }, + { + "epoch": 0.71, + "grad_norm": 14.690056751794366, + "learning_rate": 1.786220491733943e-05, + "loss": 0.7537, + "step": 4527 + }, + { + "epoch": 0.71, + "grad_norm": 17.930520901810457, + "learning_rate": 1.7861162701935672e-05, + "loss": 0.7096, + "step": 4528 + }, + { + "epoch": 0.71, + "grad_norm": 19.71866787237424, + "learning_rate": 1.7860120262965038e-05, + "loss": 0.7578, + "step": 4529 + }, + { + "epoch": 0.71, + "grad_norm": 27.19743173764247, + "learning_rate": 1.7859077600457184e-05, + "loss": 0.7474, + "step": 4530 + }, + { + "epoch": 0.71, + "grad_norm": 14.763969372970255, + "learning_rate": 1.7858034714441764e-05, + "loss": 0.7404, + "step": 4531 + }, + { + "epoch": 0.71, + "grad_norm": 21.289749176430142, + "learning_rate": 1.7856991604948428e-05, + "loss": 0.689, + "step": 4532 + }, + { + "epoch": 0.71, + "grad_norm": 21.31819391225716, + "learning_rate": 1.7855948272006848e-05, + "loss": 0.7654, + "step": 4533 + }, + { + "epoch": 0.71, + "grad_norm": 18.21917595537159, + "learning_rate": 1.7854904715646697e-05, + "loss": 0.7669, + "step": 4534 + }, + { + "epoch": 0.71, + "grad_norm": 18.18341888851737, + "learning_rate": 1.785386093589765e-05, + "loss": 0.7457, + "step": 4535 + }, + { + "epoch": 0.71, + "grad_norm": 22.213289459037025, + "learning_rate": 1.7852816932789392e-05, + "loss": 0.85, + "step": 4536 + }, + { + "epoch": 0.71, + "grad_norm": 14.041481135578346, + "learning_rate": 1.7851772706351614e-05, + "loss": 0.7068, + "step": 4537 + }, + { + "epoch": 0.71, + "grad_norm": 20.176504845582247, + "learning_rate": 1.785072825661401e-05, + "loss": 0.7969, + "step": 4538 + }, + { + "epoch": 0.71, + "grad_norm": 15.287263358936663, + "learning_rate": 1.7849683583606293e-05, + "loss": 0.7074, + "step": 4539 + }, + { + "epoch": 0.71, + "grad_norm": 12.276838393556602, + "learning_rate": 1.7848638687358164e-05, + "loss": 0.6571, + "step": 4540 + }, + { + "epoch": 0.71, + "grad_norm": 18.05278429987945, + "learning_rate": 1.7847593567899344e-05, + "loss": 0.7659, + "step": 4541 + }, + { + "epoch": 0.71, + "grad_norm": 15.298943066125636, + "learning_rate": 1.7846548225259553e-05, + "loss": 0.7017, + "step": 4542 + }, + { + "epoch": 0.71, + "grad_norm": 13.871929997530419, + "learning_rate": 1.7845502659468522e-05, + "loss": 0.7217, + "step": 4543 + }, + { + "epoch": 0.71, + "grad_norm": 11.840309628514953, + "learning_rate": 1.784445687055598e-05, + "loss": 0.7104, + "step": 4544 + }, + { + "epoch": 0.71, + "grad_norm": 16.54040551532769, + "learning_rate": 1.7843410858551674e-05, + "loss": 0.6895, + "step": 4545 + }, + { + "epoch": 0.71, + "grad_norm": 23.885929705359707, + "learning_rate": 1.7842364623485356e-05, + "loss": 0.8651, + "step": 4546 + }, + { + "epoch": 0.71, + "grad_norm": 19.742020265499644, + "learning_rate": 1.784131816538677e-05, + "loss": 0.8096, + "step": 4547 + }, + { + "epoch": 0.71, + "grad_norm": 18.32213060918708, + "learning_rate": 1.7840271484285687e-05, + "loss": 0.7455, + "step": 4548 + }, + { + "epoch": 0.71, + "grad_norm": 16.294218046096653, + "learning_rate": 1.783922458021187e-05, + "loss": 0.7091, + "step": 4549 + }, + { + "epoch": 0.71, + "grad_norm": 12.886128865663519, + "learning_rate": 1.783817745319509e-05, + "loss": 0.693, + "step": 4550 + }, + { + "epoch": 0.71, + "grad_norm": 16.304773134884957, + "learning_rate": 1.783713010326513e-05, + "loss": 0.7035, + "step": 4551 + }, + { + "epoch": 0.71, + "grad_norm": 26.510328769850396, + "learning_rate": 1.7836082530451772e-05, + "loss": 0.8074, + "step": 4552 + }, + { + "epoch": 0.71, + "grad_norm": 12.01065139370198, + "learning_rate": 1.783503473478481e-05, + "loss": 0.6972, + "step": 4553 + }, + { + "epoch": 0.71, + "grad_norm": 19.235472748525677, + "learning_rate": 1.7833986716294046e-05, + "loss": 0.7201, + "step": 4554 + }, + { + "epoch": 0.71, + "grad_norm": 14.144588477717663, + "learning_rate": 1.783293847500928e-05, + "loss": 0.7781, + "step": 4555 + }, + { + "epoch": 0.71, + "grad_norm": 14.747317467243905, + "learning_rate": 1.783189001096033e-05, + "loss": 0.6988, + "step": 4556 + }, + { + "epoch": 0.71, + "grad_norm": 13.169701209631764, + "learning_rate": 1.7830841324177006e-05, + "loss": 0.7843, + "step": 4557 + }, + { + "epoch": 0.71, + "grad_norm": 17.938530348425555, + "learning_rate": 1.7829792414689135e-05, + "loss": 0.7218, + "step": 4558 + }, + { + "epoch": 0.71, + "grad_norm": 13.487101280375038, + "learning_rate": 1.782874328252655e-05, + "loss": 0.7515, + "step": 4559 + }, + { + "epoch": 0.71, + "grad_norm": 27.69630725295481, + "learning_rate": 1.7827693927719086e-05, + "loss": 0.8335, + "step": 4560 + }, + { + "epoch": 0.71, + "grad_norm": 13.951567465727559, + "learning_rate": 1.7826644350296588e-05, + "loss": 0.7452, + "step": 4561 + }, + { + "epoch": 0.71, + "grad_norm": 23.347824798761902, + "learning_rate": 1.7825594550288898e-05, + "loss": 0.7723, + "step": 4562 + }, + { + "epoch": 0.71, + "grad_norm": 22.25889149156369, + "learning_rate": 1.782454452772588e-05, + "loss": 0.719, + "step": 4563 + }, + { + "epoch": 0.71, + "grad_norm": 17.542686348007297, + "learning_rate": 1.782349428263739e-05, + "loss": 0.7425, + "step": 4564 + }, + { + "epoch": 0.71, + "grad_norm": 17.523572289632572, + "learning_rate": 1.7822443815053305e-05, + "loss": 0.848, + "step": 4565 + }, + { + "epoch": 0.71, + "grad_norm": 19.67062176082445, + "learning_rate": 1.782139312500349e-05, + "loss": 0.7283, + "step": 4566 + }, + { + "epoch": 0.71, + "grad_norm": 21.117571666016065, + "learning_rate": 1.7820342212517826e-05, + "loss": 0.714, + "step": 4567 + }, + { + "epoch": 0.71, + "grad_norm": 18.19939445039066, + "learning_rate": 1.7819291077626212e-05, + "loss": 0.8413, + "step": 4568 + }, + { + "epoch": 0.71, + "grad_norm": 15.50637241374402, + "learning_rate": 1.781823972035853e-05, + "loss": 0.8397, + "step": 4569 + }, + { + "epoch": 0.71, + "grad_norm": 13.80469806867194, + "learning_rate": 1.7817188140744682e-05, + "loss": 0.7116, + "step": 4570 + }, + { + "epoch": 0.71, + "grad_norm": 23.981495182970207, + "learning_rate": 1.7816136338814577e-05, + "loss": 0.7207, + "step": 4571 + }, + { + "epoch": 0.71, + "grad_norm": 15.554049720698364, + "learning_rate": 1.781508431459813e-05, + "loss": 0.7284, + "step": 4572 + }, + { + "epoch": 0.71, + "grad_norm": 22.198146139689523, + "learning_rate": 1.7814032068125254e-05, + "loss": 0.8335, + "step": 4573 + }, + { + "epoch": 0.71, + "grad_norm": 13.120963710368788, + "learning_rate": 1.781297959942588e-05, + "loss": 0.7382, + "step": 4574 + }, + { + "epoch": 0.71, + "grad_norm": 18.32251608311107, + "learning_rate": 1.7811926908529934e-05, + "loss": 0.7331, + "step": 4575 + }, + { + "epoch": 0.71, + "grad_norm": 22.51043225540429, + "learning_rate": 1.781087399546736e-05, + "loss": 0.7078, + "step": 4576 + }, + { + "epoch": 0.71, + "grad_norm": 16.47989384276147, + "learning_rate": 1.7809820860268096e-05, + "loss": 0.7913, + "step": 4577 + }, + { + "epoch": 0.72, + "grad_norm": 15.634376014463157, + "learning_rate": 1.7808767502962094e-05, + "loss": 0.72, + "step": 4578 + }, + { + "epoch": 0.72, + "grad_norm": 22.849754926480593, + "learning_rate": 1.7807713923579313e-05, + "loss": 0.6631, + "step": 4579 + }, + { + "epoch": 0.72, + "grad_norm": 20.006859729488482, + "learning_rate": 1.7806660122149714e-05, + "loss": 0.7598, + "step": 4580 + }, + { + "epoch": 0.72, + "grad_norm": 14.412321651336214, + "learning_rate": 1.7805606098703273e-05, + "loss": 0.699, + "step": 4581 + }, + { + "epoch": 0.72, + "grad_norm": 24.106188192033642, + "learning_rate": 1.7804551853269957e-05, + "loss": 0.7818, + "step": 4582 + }, + { + "epoch": 0.72, + "grad_norm": 24.032803466648353, + "learning_rate": 1.7803497385879752e-05, + "loss": 0.8391, + "step": 4583 + }, + { + "epoch": 0.72, + "grad_norm": 18.95640167772439, + "learning_rate": 1.7802442696562646e-05, + "loss": 0.6838, + "step": 4584 + }, + { + "epoch": 0.72, + "grad_norm": 17.11034547457215, + "learning_rate": 1.7801387785348637e-05, + "loss": 0.7524, + "step": 4585 + }, + { + "epoch": 0.72, + "grad_norm": 14.478175168142961, + "learning_rate": 1.780033265226772e-05, + "loss": 0.6551, + "step": 4586 + }, + { + "epoch": 0.72, + "grad_norm": 13.932257244091351, + "learning_rate": 1.7799277297349908e-05, + "loss": 0.6978, + "step": 4587 + }, + { + "epoch": 0.72, + "grad_norm": 18.86384159850776, + "learning_rate": 1.7798221720625205e-05, + "loss": 0.6796, + "step": 4588 + }, + { + "epoch": 0.72, + "grad_norm": 14.21155762223803, + "learning_rate": 1.779716592212365e-05, + "loss": 0.7782, + "step": 4589 + }, + { + "epoch": 0.72, + "grad_norm": 18.086023444183674, + "learning_rate": 1.7796109901875246e-05, + "loss": 0.7912, + "step": 4590 + }, + { + "epoch": 0.72, + "grad_norm": 16.905189667765715, + "learning_rate": 1.7795053659910044e-05, + "loss": 0.822, + "step": 4591 + }, + { + "epoch": 0.72, + "grad_norm": 17.280428154633523, + "learning_rate": 1.7793997196258074e-05, + "loss": 0.8209, + "step": 4592 + }, + { + "epoch": 0.72, + "grad_norm": 14.618783952315601, + "learning_rate": 1.7792940510949383e-05, + "loss": 0.7516, + "step": 4593 + }, + { + "epoch": 0.72, + "grad_norm": 26.049443477568158, + "learning_rate": 1.7791883604014023e-05, + "loss": 0.7825, + "step": 4594 + }, + { + "epoch": 0.72, + "grad_norm": 24.4910808696523, + "learning_rate": 1.7790826475482047e-05, + "loss": 0.812, + "step": 4595 + }, + { + "epoch": 0.72, + "grad_norm": 20.17971809472022, + "learning_rate": 1.7789769125383528e-05, + "loss": 0.7576, + "step": 4596 + }, + { + "epoch": 0.72, + "grad_norm": 14.355798234337353, + "learning_rate": 1.7788711553748533e-05, + "loss": 0.7372, + "step": 4597 + }, + { + "epoch": 0.72, + "grad_norm": 17.416064380501787, + "learning_rate": 1.7787653760607134e-05, + "loss": 0.7403, + "step": 4598 + }, + { + "epoch": 0.72, + "grad_norm": 27.892054013099575, + "learning_rate": 1.778659574598942e-05, + "loss": 0.7828, + "step": 4599 + }, + { + "epoch": 0.72, + "grad_norm": 18.44548862474979, + "learning_rate": 1.7785537509925478e-05, + "loss": 0.721, + "step": 4600 + }, + { + "epoch": 0.72, + "grad_norm": 11.884567290745816, + "learning_rate": 1.77844790524454e-05, + "loss": 0.7116, + "step": 4601 + }, + { + "epoch": 0.72, + "grad_norm": 30.758239248976416, + "learning_rate": 1.77834203735793e-05, + "loss": 0.7893, + "step": 4602 + }, + { + "epoch": 0.72, + "grad_norm": 28.94109036209768, + "learning_rate": 1.778236147335727e-05, + "loss": 0.6847, + "step": 4603 + }, + { + "epoch": 0.72, + "grad_norm": 12.537867110602596, + "learning_rate": 1.7781302351809437e-05, + "loss": 0.691, + "step": 4604 + }, + { + "epoch": 0.72, + "grad_norm": 17.825930645070017, + "learning_rate": 1.7780243008965915e-05, + "loss": 0.652, + "step": 4605 + }, + { + "epoch": 0.72, + "grad_norm": 15.45764941718774, + "learning_rate": 1.7779183444856833e-05, + "loss": 0.7485, + "step": 4606 + }, + { + "epoch": 0.72, + "grad_norm": 26.59329265824312, + "learning_rate": 1.7778123659512326e-05, + "loss": 0.7516, + "step": 4607 + }, + { + "epoch": 0.72, + "grad_norm": 17.52337750132507, + "learning_rate": 1.777706365296253e-05, + "loss": 0.6939, + "step": 4608 + }, + { + "epoch": 0.72, + "grad_norm": 18.87398120162674, + "learning_rate": 1.7776003425237592e-05, + "loss": 0.6667, + "step": 4609 + }, + { + "epoch": 0.72, + "grad_norm": 15.049088492545417, + "learning_rate": 1.7774942976367668e-05, + "loss": 0.7756, + "step": 4610 + }, + { + "epoch": 0.72, + "grad_norm": 18.44443025623951, + "learning_rate": 1.7773882306382913e-05, + "loss": 0.6874, + "step": 4611 + }, + { + "epoch": 0.72, + "grad_norm": 24.064901833986912, + "learning_rate": 1.7772821415313493e-05, + "loss": 0.7053, + "step": 4612 + }, + { + "epoch": 0.72, + "grad_norm": 15.36597587180189, + "learning_rate": 1.777176030318958e-05, + "loss": 0.7054, + "step": 4613 + }, + { + "epoch": 0.72, + "grad_norm": 17.365403532037313, + "learning_rate": 1.777069897004135e-05, + "loss": 0.7461, + "step": 4614 + }, + { + "epoch": 0.72, + "grad_norm": 24.22883278371235, + "learning_rate": 1.7769637415898982e-05, + "loss": 0.6973, + "step": 4615 + }, + { + "epoch": 0.72, + "grad_norm": 9.706301176988791, + "learning_rate": 1.776857564079268e-05, + "loss": 0.7002, + "step": 4616 + }, + { + "epoch": 0.72, + "grad_norm": 29.914397830832083, + "learning_rate": 1.7767513644752624e-05, + "loss": 0.8495, + "step": 4617 + }, + { + "epoch": 0.72, + "grad_norm": 16.378578762289482, + "learning_rate": 1.7766451427809026e-05, + "loss": 0.6789, + "step": 4618 + }, + { + "epoch": 0.72, + "grad_norm": 20.225273640111205, + "learning_rate": 1.7765388989992093e-05, + "loss": 0.7316, + "step": 4619 + }, + { + "epoch": 0.72, + "grad_norm": 23.14733087145608, + "learning_rate": 1.776432633133204e-05, + "loss": 0.7645, + "step": 4620 + }, + { + "epoch": 0.72, + "grad_norm": 16.36780120500211, + "learning_rate": 1.776326345185909e-05, + "loss": 0.7196, + "step": 4621 + }, + { + "epoch": 0.72, + "grad_norm": 18.967696973710073, + "learning_rate": 1.7762200351603465e-05, + "loss": 0.7228, + "step": 4622 + }, + { + "epoch": 0.72, + "grad_norm": 17.57145546333111, + "learning_rate": 1.7761137030595406e-05, + "loss": 0.7221, + "step": 4623 + }, + { + "epoch": 0.72, + "grad_norm": 16.224495556378056, + "learning_rate": 1.776007348886515e-05, + "loss": 0.7173, + "step": 4624 + }, + { + "epoch": 0.72, + "grad_norm": 13.778919065555696, + "learning_rate": 1.775900972644294e-05, + "loss": 0.7658, + "step": 4625 + }, + { + "epoch": 0.72, + "grad_norm": 14.968809065971032, + "learning_rate": 1.7757945743359033e-05, + "loss": 0.7949, + "step": 4626 + }, + { + "epoch": 0.72, + "grad_norm": 20.726221838120367, + "learning_rate": 1.775688153964369e-05, + "loss": 0.8367, + "step": 4627 + }, + { + "epoch": 0.72, + "grad_norm": 16.838458730080607, + "learning_rate": 1.775581711532717e-05, + "loss": 0.7264, + "step": 4628 + }, + { + "epoch": 0.72, + "grad_norm": 17.315085156088408, + "learning_rate": 1.7754752470439755e-05, + "loss": 0.752, + "step": 4629 + }, + { + "epoch": 0.72, + "grad_norm": 12.852962364319751, + "learning_rate": 1.7753687605011707e-05, + "loss": 0.763, + "step": 4630 + }, + { + "epoch": 0.72, + "grad_norm": 15.063412276120008, + "learning_rate": 1.7752622519073327e-05, + "loss": 0.7749, + "step": 4631 + }, + { + "epoch": 0.72, + "grad_norm": 17.92250672784313, + "learning_rate": 1.7751557212654893e-05, + "loss": 0.7456, + "step": 4632 + }, + { + "epoch": 0.72, + "grad_norm": 17.54124495642337, + "learning_rate": 1.7750491685786708e-05, + "loss": 0.8077, + "step": 4633 + }, + { + "epoch": 0.72, + "grad_norm": 15.289543972708875, + "learning_rate": 1.7749425938499073e-05, + "loss": 0.7602, + "step": 4634 + }, + { + "epoch": 0.72, + "grad_norm": 30.640675451080558, + "learning_rate": 1.77483599708223e-05, + "loss": 0.7679, + "step": 4635 + }, + { + "epoch": 0.72, + "grad_norm": 18.525962541647345, + "learning_rate": 1.7747293782786697e-05, + "loss": 0.8328, + "step": 4636 + }, + { + "epoch": 0.72, + "grad_norm": 14.983633664461529, + "learning_rate": 1.7746227374422597e-05, + "loss": 0.7545, + "step": 4637 + }, + { + "epoch": 0.72, + "grad_norm": 29.179535389699073, + "learning_rate": 1.7745160745760317e-05, + "loss": 0.7919, + "step": 4638 + }, + { + "epoch": 0.72, + "grad_norm": 21.79949564065988, + "learning_rate": 1.7744093896830196e-05, + "loss": 0.7646, + "step": 4639 + }, + { + "epoch": 0.72, + "grad_norm": 26.00539878640754, + "learning_rate": 1.7743026827662578e-05, + "loss": 0.7665, + "step": 4640 + }, + { + "epoch": 0.72, + "grad_norm": 20.839287454604097, + "learning_rate": 1.7741959538287807e-05, + "loss": 0.7677, + "step": 4641 + }, + { + "epoch": 0.73, + "grad_norm": 24.868581926904834, + "learning_rate": 1.7740892028736233e-05, + "loss": 0.7512, + "step": 4642 + }, + { + "epoch": 0.73, + "grad_norm": 21.953774704601337, + "learning_rate": 1.7739824299038217e-05, + "loss": 0.8183, + "step": 4643 + }, + { + "epoch": 0.73, + "grad_norm": 13.993655319477101, + "learning_rate": 1.773875634922413e-05, + "loss": 0.7837, + "step": 4644 + }, + { + "epoch": 0.73, + "grad_norm": 18.672320268022368, + "learning_rate": 1.7737688179324334e-05, + "loss": 0.789, + "step": 4645 + }, + { + "epoch": 0.73, + "grad_norm": 17.89504836425524, + "learning_rate": 1.773661978936922e-05, + "loss": 0.7179, + "step": 4646 + }, + { + "epoch": 0.73, + "grad_norm": 18.98533202926206, + "learning_rate": 1.7735551179389157e-05, + "loss": 0.7097, + "step": 4647 + }, + { + "epoch": 0.73, + "grad_norm": 18.47458291286288, + "learning_rate": 1.7734482349414547e-05, + "loss": 0.7666, + "step": 4648 + }, + { + "epoch": 0.73, + "grad_norm": 18.43875464033775, + "learning_rate": 1.7733413299475787e-05, + "loss": 0.8031, + "step": 4649 + }, + { + "epoch": 0.73, + "grad_norm": 17.87916699853392, + "learning_rate": 1.7732344029603273e-05, + "loss": 0.7293, + "step": 4650 + }, + { + "epoch": 0.73, + "grad_norm": 19.626097062298985, + "learning_rate": 1.773127453982742e-05, + "loss": 0.9176, + "step": 4651 + }, + { + "epoch": 0.73, + "grad_norm": 19.283764035368232, + "learning_rate": 1.7730204830178638e-05, + "loss": 0.7812, + "step": 4652 + }, + { + "epoch": 0.73, + "grad_norm": 21.78481579672387, + "learning_rate": 1.7729134900687354e-05, + "loss": 0.7556, + "step": 4653 + }, + { + "epoch": 0.73, + "grad_norm": 25.059231830123245, + "learning_rate": 1.7728064751383997e-05, + "loss": 0.784, + "step": 4654 + }, + { + "epoch": 0.73, + "grad_norm": 22.44174358356745, + "learning_rate": 1.7726994382299e-05, + "loss": 0.758, + "step": 4655 + }, + { + "epoch": 0.73, + "grad_norm": 24.80834578322753, + "learning_rate": 1.7725923793462798e-05, + "loss": 0.8446, + "step": 4656 + }, + { + "epoch": 0.73, + "grad_norm": 14.215338581736098, + "learning_rate": 1.7724852984905844e-05, + "loss": 0.6924, + "step": 4657 + }, + { + "epoch": 0.73, + "grad_norm": 13.856078966059064, + "learning_rate": 1.7723781956658593e-05, + "loss": 0.7276, + "step": 4658 + }, + { + "epoch": 0.73, + "grad_norm": 21.507102864187885, + "learning_rate": 1.77227107087515e-05, + "loss": 0.7842, + "step": 4659 + }, + { + "epoch": 0.73, + "grad_norm": 19.76527188781026, + "learning_rate": 1.7721639241215034e-05, + "loss": 0.7244, + "step": 4660 + }, + { + "epoch": 0.73, + "grad_norm": 17.368469322252448, + "learning_rate": 1.772056755407966e-05, + "loss": 0.7384, + "step": 4661 + }, + { + "epoch": 0.73, + "grad_norm": 22.35696192981688, + "learning_rate": 1.771949564737587e-05, + "loss": 0.769, + "step": 4662 + }, + { + "epoch": 0.73, + "grad_norm": 13.98192081147761, + "learning_rate": 1.771842352113413e-05, + "loss": 0.7962, + "step": 4663 + }, + { + "epoch": 0.73, + "grad_norm": 19.801141597518985, + "learning_rate": 1.7717351175384948e-05, + "loss": 0.7734, + "step": 4664 + }, + { + "epoch": 0.73, + "grad_norm": 28.0424575461503, + "learning_rate": 1.7716278610158806e-05, + "loss": 0.8066, + "step": 4665 + }, + { + "epoch": 0.73, + "grad_norm": 13.600696584361431, + "learning_rate": 1.7715205825486218e-05, + "loss": 0.7815, + "step": 4666 + }, + { + "epoch": 0.73, + "grad_norm": 21.76127266345328, + "learning_rate": 1.771413282139769e-05, + "loss": 0.7115, + "step": 4667 + }, + { + "epoch": 0.73, + "grad_norm": 24.362943337772816, + "learning_rate": 1.7713059597923735e-05, + "loss": 0.7848, + "step": 4668 + }, + { + "epoch": 0.73, + "grad_norm": 21.439576638543528, + "learning_rate": 1.771198615509488e-05, + "loss": 0.756, + "step": 4669 + }, + { + "epoch": 0.73, + "grad_norm": 14.119958743836833, + "learning_rate": 1.771091249294165e-05, + "loss": 0.6665, + "step": 4670 + }, + { + "epoch": 0.73, + "grad_norm": 19.20309282801442, + "learning_rate": 1.7709838611494577e-05, + "loss": 0.7591, + "step": 4671 + }, + { + "epoch": 0.73, + "grad_norm": 18.794715676459475, + "learning_rate": 1.7708764510784205e-05, + "loss": 0.725, + "step": 4672 + }, + { + "epoch": 0.73, + "grad_norm": 18.402110985595833, + "learning_rate": 1.770769019084108e-05, + "loss": 0.6992, + "step": 4673 + }, + { + "epoch": 0.73, + "grad_norm": 33.52548618675277, + "learning_rate": 1.7706615651695754e-05, + "loss": 0.8105, + "step": 4674 + }, + { + "epoch": 0.73, + "grad_norm": 12.938989561018383, + "learning_rate": 1.7705540893378787e-05, + "loss": 0.7831, + "step": 4675 + }, + { + "epoch": 0.73, + "grad_norm": 15.43314064498119, + "learning_rate": 1.7704465915920745e-05, + "loss": 0.815, + "step": 4676 + }, + { + "epoch": 0.73, + "grad_norm": 20.685658191031653, + "learning_rate": 1.77033907193522e-05, + "loss": 0.7443, + "step": 4677 + }, + { + "epoch": 0.73, + "grad_norm": 12.049858843156683, + "learning_rate": 1.770231530370373e-05, + "loss": 0.7456, + "step": 4678 + }, + { + "epoch": 0.73, + "grad_norm": 15.708947774163132, + "learning_rate": 1.7701239669005916e-05, + "loss": 0.8426, + "step": 4679 + }, + { + "epoch": 0.73, + "grad_norm": 19.484571068946565, + "learning_rate": 1.7700163815289353e-05, + "loss": 0.724, + "step": 4680 + }, + { + "epoch": 0.73, + "grad_norm": 17.628196660609422, + "learning_rate": 1.7699087742584636e-05, + "loss": 0.7379, + "step": 4681 + }, + { + "epoch": 0.73, + "grad_norm": 26.81608904812798, + "learning_rate": 1.769801145092237e-05, + "loss": 0.7381, + "step": 4682 + }, + { + "epoch": 0.73, + "grad_norm": 19.802430812466564, + "learning_rate": 1.7696934940333155e-05, + "loss": 0.8898, + "step": 4683 + }, + { + "epoch": 0.73, + "grad_norm": 27.12962424616862, + "learning_rate": 1.7695858210847615e-05, + "loss": 0.8014, + "step": 4684 + }, + { + "epoch": 0.73, + "grad_norm": 18.1015214166352, + "learning_rate": 1.769478126249637e-05, + "loss": 0.7168, + "step": 4685 + }, + { + "epoch": 0.73, + "grad_norm": 20.520628887616834, + "learning_rate": 1.769370409531005e-05, + "loss": 0.7749, + "step": 4686 + }, + { + "epoch": 0.73, + "grad_norm": 20.73359294572111, + "learning_rate": 1.7692626709319283e-05, + "loss": 0.7404, + "step": 4687 + }, + { + "epoch": 0.73, + "grad_norm": 26.08046606022863, + "learning_rate": 1.769154910455471e-05, + "loss": 0.7131, + "step": 4688 + }, + { + "epoch": 0.73, + "grad_norm": 19.21366844840311, + "learning_rate": 1.7690471281046982e-05, + "loss": 0.7684, + "step": 4689 + }, + { + "epoch": 0.73, + "grad_norm": 15.744214996514318, + "learning_rate": 1.768939323882675e-05, + "loss": 0.7194, + "step": 4690 + }, + { + "epoch": 0.73, + "grad_norm": 24.950315766400433, + "learning_rate": 1.7688314977924674e-05, + "loss": 0.7763, + "step": 4691 + }, + { + "epoch": 0.73, + "grad_norm": 21.612398469106186, + "learning_rate": 1.768723649837142e-05, + "loss": 0.744, + "step": 4692 + }, + { + "epoch": 0.73, + "grad_norm": 12.773618786659869, + "learning_rate": 1.7686157800197652e-05, + "loss": 0.7515, + "step": 4693 + }, + { + "epoch": 0.73, + "grad_norm": 21.05816044164395, + "learning_rate": 1.7685078883434054e-05, + "loss": 0.744, + "step": 4694 + }, + { + "epoch": 0.73, + "grad_norm": 19.406283108288324, + "learning_rate": 1.7683999748111306e-05, + "loss": 0.831, + "step": 4695 + }, + { + "epoch": 0.73, + "grad_norm": 18.617075782901715, + "learning_rate": 1.7682920394260102e-05, + "loss": 0.7737, + "step": 4696 + }, + { + "epoch": 0.73, + "grad_norm": 24.916444636448617, + "learning_rate": 1.768184082191114e-05, + "loss": 0.7374, + "step": 4697 + }, + { + "epoch": 0.73, + "grad_norm": 15.094554564256278, + "learning_rate": 1.7680761031095113e-05, + "loss": 0.6707, + "step": 4698 + }, + { + "epoch": 0.73, + "grad_norm": 16.64583492298938, + "learning_rate": 1.7679681021842738e-05, + "loss": 0.7114, + "step": 4699 + }, + { + "epoch": 0.73, + "grad_norm": 15.201698725408875, + "learning_rate": 1.767860079418473e-05, + "loss": 0.7649, + "step": 4700 + }, + { + "epoch": 0.73, + "grad_norm": 27.219623840705353, + "learning_rate": 1.7677520348151805e-05, + "loss": 0.798, + "step": 4701 + }, + { + "epoch": 0.73, + "grad_norm": 18.068731173647688, + "learning_rate": 1.767643968377469e-05, + "loss": 0.7095, + "step": 4702 + }, + { + "epoch": 0.73, + "grad_norm": 17.347959692297604, + "learning_rate": 1.7675358801084122e-05, + "loss": 0.6746, + "step": 4703 + }, + { + "epoch": 0.73, + "grad_norm": 20.983950511220705, + "learning_rate": 1.767427770011084e-05, + "loss": 0.7586, + "step": 4704 + }, + { + "epoch": 0.73, + "grad_norm": 22.607776450884366, + "learning_rate": 1.767319638088559e-05, + "loss": 0.8404, + "step": 4705 + }, + { + "epoch": 0.74, + "grad_norm": 18.93052476896135, + "learning_rate": 1.7672114843439127e-05, + "loss": 0.7683, + "step": 4706 + }, + { + "epoch": 0.74, + "grad_norm": 25.206016053666044, + "learning_rate": 1.76710330878022e-05, + "loss": 0.8398, + "step": 4707 + }, + { + "epoch": 0.74, + "grad_norm": 16.813816645625376, + "learning_rate": 1.7669951114005584e-05, + "loss": 0.7192, + "step": 4708 + }, + { + "epoch": 0.74, + "grad_norm": 16.644285375681033, + "learning_rate": 1.766886892208004e-05, + "loss": 0.7189, + "step": 4709 + }, + { + "epoch": 0.74, + "grad_norm": 18.0204235644049, + "learning_rate": 1.7667786512056352e-05, + "loss": 0.8104, + "step": 4710 + }, + { + "epoch": 0.74, + "grad_norm": 16.506676078951678, + "learning_rate": 1.7666703883965305e-05, + "loss": 0.7505, + "step": 4711 + }, + { + "epoch": 0.74, + "grad_norm": 19.526104483491093, + "learning_rate": 1.766562103783768e-05, + "loss": 0.8487, + "step": 4712 + }, + { + "epoch": 0.74, + "grad_norm": 16.166371296326044, + "learning_rate": 1.766453797370428e-05, + "loss": 0.8902, + "step": 4713 + }, + { + "epoch": 0.74, + "grad_norm": 16.098966153656235, + "learning_rate": 1.76634546915959e-05, + "loss": 0.8063, + "step": 4714 + }, + { + "epoch": 0.74, + "grad_norm": 16.266027946684616, + "learning_rate": 1.7662371191543355e-05, + "loss": 0.7278, + "step": 4715 + }, + { + "epoch": 0.74, + "grad_norm": 28.156781562832112, + "learning_rate": 1.7661287473577457e-05, + "loss": 0.7446, + "step": 4716 + }, + { + "epoch": 0.74, + "grad_norm": 22.59327000389072, + "learning_rate": 1.766020353772902e-05, + "loss": 0.8598, + "step": 4717 + }, + { + "epoch": 0.74, + "grad_norm": 22.87572390552177, + "learning_rate": 1.765911938402888e-05, + "loss": 0.7998, + "step": 4718 + }, + { + "epoch": 0.74, + "grad_norm": 23.873782269145302, + "learning_rate": 1.7658035012507863e-05, + "loss": 0.757, + "step": 4719 + }, + { + "epoch": 0.74, + "grad_norm": 20.234898673833182, + "learning_rate": 1.7656950423196807e-05, + "loss": 0.7828, + "step": 4720 + }, + { + "epoch": 0.74, + "grad_norm": 18.95689105247914, + "learning_rate": 1.7655865616126564e-05, + "loss": 0.7172, + "step": 4721 + }, + { + "epoch": 0.74, + "grad_norm": 19.574414710725613, + "learning_rate": 1.7654780591327983e-05, + "loss": 0.7864, + "step": 4722 + }, + { + "epoch": 0.74, + "grad_norm": 14.966824203599673, + "learning_rate": 1.7653695348831915e-05, + "loss": 0.7277, + "step": 4723 + }, + { + "epoch": 0.74, + "grad_norm": 21.66688520859272, + "learning_rate": 1.7652609888669234e-05, + "loss": 0.6598, + "step": 4724 + }, + { + "epoch": 0.74, + "grad_norm": 19.200763538571433, + "learning_rate": 1.76515242108708e-05, + "loss": 0.8736, + "step": 4725 + }, + { + "epoch": 0.74, + "grad_norm": 19.356492148095814, + "learning_rate": 1.7650438315467494e-05, + "loss": 0.7293, + "step": 4726 + }, + { + "epoch": 0.74, + "grad_norm": 15.214323658311407, + "learning_rate": 1.7649352202490198e-05, + "loss": 0.7564, + "step": 4727 + }, + { + "epoch": 0.74, + "grad_norm": 20.56004466670747, + "learning_rate": 1.7648265871969803e-05, + "loss": 0.768, + "step": 4728 + }, + { + "epoch": 0.74, + "grad_norm": 22.702671486249205, + "learning_rate": 1.76471793239372e-05, + "loss": 0.7815, + "step": 4729 + }, + { + "epoch": 0.74, + "grad_norm": 24.945495801921826, + "learning_rate": 1.7646092558423288e-05, + "loss": 0.8258, + "step": 4730 + }, + { + "epoch": 0.74, + "grad_norm": 19.69115327136434, + "learning_rate": 1.7645005575458977e-05, + "loss": 0.7168, + "step": 4731 + }, + { + "epoch": 0.74, + "grad_norm": 3.0180134369763225, + "learning_rate": 1.764391837507518e-05, + "loss": 0.6225, + "step": 4732 + }, + { + "epoch": 0.74, + "grad_norm": 14.709364330858422, + "learning_rate": 1.7642830957302815e-05, + "loss": 0.7384, + "step": 4733 + }, + { + "epoch": 0.74, + "grad_norm": 13.751418677749186, + "learning_rate": 1.7641743322172812e-05, + "loss": 0.7155, + "step": 4734 + }, + { + "epoch": 0.74, + "grad_norm": 29.490568879027517, + "learning_rate": 1.7640655469716096e-05, + "loss": 0.8194, + "step": 4735 + }, + { + "epoch": 0.74, + "grad_norm": 17.936220906422324, + "learning_rate": 1.7639567399963607e-05, + "loss": 0.7671, + "step": 4736 + }, + { + "epoch": 0.74, + "grad_norm": 20.913759602721246, + "learning_rate": 1.7638479112946294e-05, + "loss": 0.7645, + "step": 4737 + }, + { + "epoch": 0.74, + "grad_norm": 24.82486069972475, + "learning_rate": 1.7637390608695102e-05, + "loss": 0.8512, + "step": 4738 + }, + { + "epoch": 0.74, + "grad_norm": 19.809928657989634, + "learning_rate": 1.7636301887240987e-05, + "loss": 0.7897, + "step": 4739 + }, + { + "epoch": 0.74, + "grad_norm": 16.471959093668424, + "learning_rate": 1.7635212948614915e-05, + "loss": 0.6799, + "step": 4740 + }, + { + "epoch": 0.74, + "grad_norm": 18.685886680214303, + "learning_rate": 1.7634123792847854e-05, + "loss": 0.7134, + "step": 4741 + }, + { + "epoch": 0.74, + "grad_norm": 19.1684793871101, + "learning_rate": 1.7633034419970775e-05, + "loss": 0.796, + "step": 4742 + }, + { + "epoch": 0.74, + "grad_norm": 19.809886761536184, + "learning_rate": 1.7631944830014663e-05, + "loss": 0.7088, + "step": 4743 + }, + { + "epoch": 0.74, + "grad_norm": 14.96983445565587, + "learning_rate": 1.763085502301051e-05, + "loss": 0.6873, + "step": 4744 + }, + { + "epoch": 0.74, + "grad_norm": 23.76149004516207, + "learning_rate": 1.76297649989893e-05, + "loss": 0.7967, + "step": 4745 + }, + { + "epoch": 0.74, + "grad_norm": 19.463363868330706, + "learning_rate": 1.7628674757982037e-05, + "loss": 0.8489, + "step": 4746 + }, + { + "epoch": 0.74, + "grad_norm": 15.672467960747674, + "learning_rate": 1.7627584300019727e-05, + "loss": 0.6577, + "step": 4747 + }, + { + "epoch": 0.74, + "grad_norm": 25.878883422841493, + "learning_rate": 1.7626493625133377e-05, + "loss": 0.7587, + "step": 4748 + }, + { + "epoch": 0.74, + "grad_norm": 20.673150511219983, + "learning_rate": 1.7625402733354015e-05, + "loss": 0.8432, + "step": 4749 + }, + { + "epoch": 0.74, + "grad_norm": 18.784277057915933, + "learning_rate": 1.7624311624712657e-05, + "loss": 0.6969, + "step": 4750 + }, + { + "epoch": 0.74, + "grad_norm": 18.754912266190452, + "learning_rate": 1.762322029924034e-05, + "loss": 0.8593, + "step": 4751 + }, + { + "epoch": 0.74, + "grad_norm": 20.59320934205328, + "learning_rate": 1.7622128756968095e-05, + "loss": 0.9065, + "step": 4752 + }, + { + "epoch": 0.74, + "grad_norm": 16.267190841368144, + "learning_rate": 1.762103699792697e-05, + "loss": 0.6885, + "step": 4753 + }, + { + "epoch": 0.74, + "grad_norm": 20.573975020037427, + "learning_rate": 1.7619945022148008e-05, + "loss": 0.7677, + "step": 4754 + }, + { + "epoch": 0.74, + "grad_norm": 15.137598308973239, + "learning_rate": 1.7618852829662264e-05, + "loss": 0.7774, + "step": 4755 + }, + { + "epoch": 0.74, + "grad_norm": 18.130612327390836, + "learning_rate": 1.7617760420500806e-05, + "loss": 0.7532, + "step": 4756 + }, + { + "epoch": 0.74, + "grad_norm": 19.903173049405172, + "learning_rate": 1.7616667794694697e-05, + "loss": 0.7776, + "step": 4757 + }, + { + "epoch": 0.74, + "grad_norm": 21.739616063788986, + "learning_rate": 1.7615574952275012e-05, + "loss": 0.8543, + "step": 4758 + }, + { + "epoch": 0.74, + "grad_norm": 13.746722267982145, + "learning_rate": 1.761448189327283e-05, + "loss": 0.7372, + "step": 4759 + }, + { + "epoch": 0.74, + "grad_norm": 17.923825014880418, + "learning_rate": 1.761338861771924e-05, + "loss": 0.7715, + "step": 4760 + }, + { + "epoch": 0.74, + "grad_norm": 24.34899404771331, + "learning_rate": 1.761229512564533e-05, + "loss": 0.8064, + "step": 4761 + }, + { + "epoch": 0.74, + "grad_norm": 15.528797054775271, + "learning_rate": 1.7611201417082196e-05, + "loss": 0.7003, + "step": 4762 + }, + { + "epoch": 0.74, + "grad_norm": 22.192845139928203, + "learning_rate": 1.761010749206095e-05, + "loss": 0.8327, + "step": 4763 + }, + { + "epoch": 0.74, + "grad_norm": 18.279638844594935, + "learning_rate": 1.7609013350612696e-05, + "loss": 0.6791, + "step": 4764 + }, + { + "epoch": 0.74, + "grad_norm": 20.375197317112875, + "learning_rate": 1.7607918992768554e-05, + "loss": 0.8303, + "step": 4765 + }, + { + "epoch": 0.74, + "grad_norm": 30.204422041251554, + "learning_rate": 1.7606824418559648e-05, + "loss": 0.8284, + "step": 4766 + }, + { + "epoch": 0.74, + "grad_norm": 15.738304781049953, + "learning_rate": 1.7605729628017108e-05, + "loss": 0.7289, + "step": 4767 + }, + { + "epoch": 0.74, + "grad_norm": 23.424944146577246, + "learning_rate": 1.760463462117206e-05, + "loss": 0.7679, + "step": 4768 + }, + { + "epoch": 0.74, + "grad_norm": 18.6465918798675, + "learning_rate": 1.7603539398055658e-05, + "loss": 0.7444, + "step": 4769 + }, + { + "epoch": 0.75, + "grad_norm": 25.41213395370621, + "learning_rate": 1.760244395869904e-05, + "loss": 0.7664, + "step": 4770 + }, + { + "epoch": 0.75, + "grad_norm": 18.23403088172201, + "learning_rate": 1.7601348303133364e-05, + "loss": 0.8096, + "step": 4771 + }, + { + "epoch": 0.75, + "grad_norm": 19.55227978095691, + "learning_rate": 1.760025243138979e-05, + "loss": 0.767, + "step": 4772 + }, + { + "epoch": 0.75, + "grad_norm": 25.0073222339007, + "learning_rate": 1.7599156343499482e-05, + "loss": 0.8258, + "step": 4773 + }, + { + "epoch": 0.75, + "grad_norm": 17.34440598750834, + "learning_rate": 1.7598060039493613e-05, + "loss": 0.6841, + "step": 4774 + }, + { + "epoch": 0.75, + "grad_norm": 16.792987881152875, + "learning_rate": 1.759696351940336e-05, + "loss": 0.8004, + "step": 4775 + }, + { + "epoch": 0.75, + "grad_norm": 13.556558176506321, + "learning_rate": 1.759586678325991e-05, + "loss": 0.7607, + "step": 4776 + }, + { + "epoch": 0.75, + "grad_norm": 15.958002349925415, + "learning_rate": 1.7594769831094452e-05, + "loss": 0.7425, + "step": 4777 + }, + { + "epoch": 0.75, + "grad_norm": 13.902998572006434, + "learning_rate": 1.759367266293818e-05, + "loss": 0.7837, + "step": 4778 + }, + { + "epoch": 0.75, + "grad_norm": 18.127361967484298, + "learning_rate": 1.7592575278822304e-05, + "loss": 0.7381, + "step": 4779 + }, + { + "epoch": 0.75, + "grad_norm": 19.620887369463702, + "learning_rate": 1.7591477678778027e-05, + "loss": 0.8289, + "step": 4780 + }, + { + "epoch": 0.75, + "grad_norm": 17.46171538137161, + "learning_rate": 1.7590379862836565e-05, + "loss": 0.7354, + "step": 4781 + }, + { + "epoch": 0.75, + "grad_norm": 16.731126361314573, + "learning_rate": 1.758928183102914e-05, + "loss": 0.7713, + "step": 4782 + }, + { + "epoch": 0.75, + "grad_norm": 16.345787646147702, + "learning_rate": 1.7588183583386982e-05, + "loss": 0.7797, + "step": 4783 + }, + { + "epoch": 0.75, + "grad_norm": 18.611504364860586, + "learning_rate": 1.7587085119941318e-05, + "loss": 0.7315, + "step": 4784 + }, + { + "epoch": 0.75, + "grad_norm": 24.601739066305928, + "learning_rate": 1.758598644072339e-05, + "loss": 0.678, + "step": 4785 + }, + { + "epoch": 0.75, + "grad_norm": 19.040475408622893, + "learning_rate": 1.7584887545764452e-05, + "loss": 0.6183, + "step": 4786 + }, + { + "epoch": 0.75, + "grad_norm": 18.977348198296745, + "learning_rate": 1.7583788435095746e-05, + "loss": 0.7622, + "step": 4787 + }, + { + "epoch": 0.75, + "grad_norm": 13.658938106779566, + "learning_rate": 1.758268910874853e-05, + "loss": 0.7202, + "step": 4788 + }, + { + "epoch": 0.75, + "grad_norm": 19.002904135147123, + "learning_rate": 1.7581589566754076e-05, + "loss": 0.6931, + "step": 4789 + }, + { + "epoch": 0.75, + "grad_norm": 16.676365673898395, + "learning_rate": 1.7580489809143648e-05, + "loss": 0.6883, + "step": 4790 + }, + { + "epoch": 0.75, + "grad_norm": 14.98718536532281, + "learning_rate": 1.7579389835948525e-05, + "loss": 0.7581, + "step": 4791 + }, + { + "epoch": 0.75, + "grad_norm": 18.523750962171704, + "learning_rate": 1.757828964719999e-05, + "loss": 0.8242, + "step": 4792 + }, + { + "epoch": 0.75, + "grad_norm": 18.675163396059236, + "learning_rate": 1.7577189242929325e-05, + "loss": 0.6967, + "step": 4793 + }, + { + "epoch": 0.75, + "grad_norm": 12.879273057364701, + "learning_rate": 1.7576088623167838e-05, + "loss": 0.7332, + "step": 4794 + }, + { + "epoch": 0.75, + "grad_norm": 19.176967468374464, + "learning_rate": 1.7574987787946817e-05, + "loss": 0.87, + "step": 4795 + }, + { + "epoch": 0.75, + "grad_norm": 20.13362650544997, + "learning_rate": 1.7573886737297575e-05, + "loss": 0.7196, + "step": 4796 + }, + { + "epoch": 0.75, + "grad_norm": 21.67366215048563, + "learning_rate": 1.757278547125143e-05, + "loss": 0.8146, + "step": 4797 + }, + { + "epoch": 0.75, + "grad_norm": 22.81321570893647, + "learning_rate": 1.7571683989839693e-05, + "loss": 0.7774, + "step": 4798 + }, + { + "epoch": 0.75, + "grad_norm": 17.753856486805756, + "learning_rate": 1.757058229309369e-05, + "loss": 0.7166, + "step": 4799 + }, + { + "epoch": 0.75, + "grad_norm": 23.24128792472426, + "learning_rate": 1.7569480381044758e-05, + "loss": 0.7229, + "step": 4800 + }, + { + "epoch": 0.75, + "grad_norm": 15.485896989409332, + "learning_rate": 1.756837825372423e-05, + "loss": 0.7756, + "step": 4801 + }, + { + "epoch": 0.75, + "grad_norm": 16.792856612255914, + "learning_rate": 1.7567275911163454e-05, + "loss": 0.7584, + "step": 4802 + }, + { + "epoch": 0.75, + "grad_norm": 20.3441967373095, + "learning_rate": 1.7566173353393778e-05, + "loss": 0.7485, + "step": 4803 + }, + { + "epoch": 0.75, + "grad_norm": 20.569963981785115, + "learning_rate": 1.7565070580446556e-05, + "loss": 0.6857, + "step": 4804 + }, + { + "epoch": 0.75, + "grad_norm": 16.88349512548097, + "learning_rate": 1.7563967592353152e-05, + "loss": 0.852, + "step": 4805 + }, + { + "epoch": 0.75, + "grad_norm": 12.22333099633842, + "learning_rate": 1.7562864389144936e-05, + "loss": 0.7922, + "step": 4806 + }, + { + "epoch": 0.75, + "grad_norm": 22.25885159312121, + "learning_rate": 1.756176097085328e-05, + "loss": 0.7922, + "step": 4807 + }, + { + "epoch": 0.75, + "grad_norm": 16.06409000577669, + "learning_rate": 1.7560657337509567e-05, + "loss": 0.826, + "step": 4808 + }, + { + "epoch": 0.75, + "grad_norm": 21.984912386875923, + "learning_rate": 1.755955348914518e-05, + "loss": 0.6804, + "step": 4809 + }, + { + "epoch": 0.75, + "grad_norm": 29.425783566771443, + "learning_rate": 1.7558449425791515e-05, + "loss": 0.7631, + "step": 4810 + }, + { + "epoch": 0.75, + "grad_norm": 16.734042760078, + "learning_rate": 1.7557345147479968e-05, + "loss": 0.7474, + "step": 4811 + }, + { + "epoch": 0.75, + "grad_norm": 33.94893108040185, + "learning_rate": 1.755624065424195e-05, + "loss": 0.7507, + "step": 4812 + }, + { + "epoch": 0.75, + "grad_norm": 17.545682669575285, + "learning_rate": 1.7555135946108866e-05, + "loss": 0.6758, + "step": 4813 + }, + { + "epoch": 0.75, + "grad_norm": 26.479266283301598, + "learning_rate": 1.7554031023112134e-05, + "loss": 0.8218, + "step": 4814 + }, + { + "epoch": 0.75, + "grad_norm": 13.218782299196178, + "learning_rate": 1.755292588528318e-05, + "loss": 0.7695, + "step": 4815 + }, + { + "epoch": 0.75, + "grad_norm": 24.94502200905209, + "learning_rate": 1.755182053265343e-05, + "loss": 0.7807, + "step": 4816 + }, + { + "epoch": 0.75, + "grad_norm": 17.34075820996803, + "learning_rate": 1.7550714965254325e-05, + "loss": 0.7112, + "step": 4817 + }, + { + "epoch": 0.75, + "grad_norm": 15.989275127157576, + "learning_rate": 1.7549609183117305e-05, + "loss": 0.7459, + "step": 4818 + }, + { + "epoch": 0.75, + "grad_norm": 15.15197623845428, + "learning_rate": 1.7548503186273812e-05, + "loss": 0.7195, + "step": 4819 + }, + { + "epoch": 0.75, + "grad_norm": 27.064862393196414, + "learning_rate": 1.7547396974755307e-05, + "loss": 0.7602, + "step": 4820 + }, + { + "epoch": 0.75, + "grad_norm": 19.261950742693383, + "learning_rate": 1.7546290548593242e-05, + "loss": 0.7356, + "step": 4821 + }, + { + "epoch": 0.75, + "grad_norm": 31.706864058009234, + "learning_rate": 1.7545183907819094e-05, + "loss": 0.7768, + "step": 4822 + }, + { + "epoch": 0.75, + "grad_norm": 21.51565134792058, + "learning_rate": 1.7544077052464327e-05, + "loss": 0.7456, + "step": 4823 + }, + { + "epoch": 0.75, + "grad_norm": 12.91997191231309, + "learning_rate": 1.7542969982560424e-05, + "loss": 0.7127, + "step": 4824 + }, + { + "epoch": 0.75, + "grad_norm": 13.831934449967568, + "learning_rate": 1.754186269813886e-05, + "loss": 0.6598, + "step": 4825 + }, + { + "epoch": 0.75, + "grad_norm": 20.17699316988014, + "learning_rate": 1.7540755199231145e-05, + "loss": 0.7775, + "step": 4826 + }, + { + "epoch": 0.75, + "grad_norm": 19.772209228806588, + "learning_rate": 1.7539647485868753e-05, + "loss": 0.6638, + "step": 4827 + }, + { + "epoch": 0.75, + "grad_norm": 14.126662285225219, + "learning_rate": 1.75385395580832e-05, + "loss": 0.8396, + "step": 4828 + }, + { + "epoch": 0.75, + "grad_norm": 23.78153301652085, + "learning_rate": 1.7537431415905995e-05, + "loss": 0.7789, + "step": 4829 + }, + { + "epoch": 0.75, + "grad_norm": 13.539708284450187, + "learning_rate": 1.7536323059368644e-05, + "loss": 0.8224, + "step": 4830 + }, + { + "epoch": 0.75, + "grad_norm": 15.345940725830468, + "learning_rate": 1.7535214488502677e-05, + "loss": 0.6338, + "step": 4831 + }, + { + "epoch": 0.75, + "grad_norm": 22.021739634453333, + "learning_rate": 1.753410570333962e-05, + "loss": 0.7438, + "step": 4832 + }, + { + "epoch": 0.75, + "grad_norm": 20.167628832709756, + "learning_rate": 1.7532996703911002e-05, + "loss": 0.7499, + "step": 4833 + }, + { + "epoch": 0.76, + "grad_norm": 17.612009739025503, + "learning_rate": 1.7531887490248364e-05, + "loss": 0.7419, + "step": 4834 + }, + { + "epoch": 0.76, + "grad_norm": 17.86547834964394, + "learning_rate": 1.7530778062383253e-05, + "loss": 0.7748, + "step": 4835 + }, + { + "epoch": 0.76, + "grad_norm": 17.43928118037929, + "learning_rate": 1.752966842034722e-05, + "loss": 0.7467, + "step": 4836 + }, + { + "epoch": 0.76, + "grad_norm": 15.462477024758247, + "learning_rate": 1.752855856417182e-05, + "loss": 0.7031, + "step": 4837 + }, + { + "epoch": 0.76, + "grad_norm": 31.15741089797868, + "learning_rate": 1.752744849388862e-05, + "loss": 0.8256, + "step": 4838 + }, + { + "epoch": 0.76, + "grad_norm": 11.898801636149011, + "learning_rate": 1.7526338209529184e-05, + "loss": 0.7411, + "step": 4839 + }, + { + "epoch": 0.76, + "grad_norm": 18.635275707134667, + "learning_rate": 1.7525227711125098e-05, + "loss": 0.7647, + "step": 4840 + }, + { + "epoch": 0.76, + "grad_norm": 18.956492013688138, + "learning_rate": 1.7524116998707933e-05, + "loss": 0.6666, + "step": 4841 + }, + { + "epoch": 0.76, + "grad_norm": 15.724817775681023, + "learning_rate": 1.7523006072309286e-05, + "loss": 0.7866, + "step": 4842 + }, + { + "epoch": 0.76, + "grad_norm": 30.70384077252155, + "learning_rate": 1.7521894931960742e-05, + "loss": 0.8051, + "step": 4843 + }, + { + "epoch": 0.76, + "grad_norm": 15.734744386778676, + "learning_rate": 1.7520783577693912e-05, + "loss": 0.7635, + "step": 4844 + }, + { + "epoch": 0.76, + "grad_norm": 13.164976594243466, + "learning_rate": 1.7519672009540394e-05, + "loss": 0.6252, + "step": 4845 + }, + { + "epoch": 0.76, + "grad_norm": 25.772837018721546, + "learning_rate": 1.7518560227531806e-05, + "loss": 0.7612, + "step": 4846 + }, + { + "epoch": 0.76, + "grad_norm": 21.238284836671102, + "learning_rate": 1.7517448231699758e-05, + "loss": 0.6688, + "step": 4847 + }, + { + "epoch": 0.76, + "grad_norm": 29.834792932520127, + "learning_rate": 1.7516336022075883e-05, + "loss": 0.7549, + "step": 4848 + }, + { + "epoch": 0.76, + "grad_norm": 20.412196228438255, + "learning_rate": 1.751522359869181e-05, + "loss": 0.7305, + "step": 4849 + }, + { + "epoch": 0.76, + "grad_norm": 12.79203448662454, + "learning_rate": 1.751411096157917e-05, + "loss": 0.7108, + "step": 4850 + }, + { + "epoch": 0.76, + "grad_norm": 13.71893000538938, + "learning_rate": 1.7512998110769613e-05, + "loss": 0.782, + "step": 4851 + }, + { + "epoch": 0.76, + "grad_norm": 15.70798477915841, + "learning_rate": 1.7511885046294782e-05, + "loss": 0.7218, + "step": 4852 + }, + { + "epoch": 0.76, + "grad_norm": 18.088597666822793, + "learning_rate": 1.751077176818634e-05, + "loss": 0.7287, + "step": 4853 + }, + { + "epoch": 0.76, + "grad_norm": 17.38114358112564, + "learning_rate": 1.7509658276475936e-05, + "loss": 0.7659, + "step": 4854 + }, + { + "epoch": 0.76, + "grad_norm": 22.80805134834718, + "learning_rate": 1.750854457119525e-05, + "loss": 0.8347, + "step": 4855 + }, + { + "epoch": 0.76, + "grad_norm": 19.59051237553145, + "learning_rate": 1.7507430652375943e-05, + "loss": 0.7803, + "step": 4856 + }, + { + "epoch": 0.76, + "grad_norm": 21.52014780433687, + "learning_rate": 1.7506316520049704e-05, + "loss": 0.6656, + "step": 4857 + }, + { + "epoch": 0.76, + "grad_norm": 19.592379901597482, + "learning_rate": 1.750520217424821e-05, + "loss": 0.7752, + "step": 4858 + }, + { + "epoch": 0.76, + "grad_norm": 19.259305076713584, + "learning_rate": 1.750408761500316e-05, + "loss": 0.7609, + "step": 4859 + }, + { + "epoch": 0.76, + "grad_norm": 18.62497678217625, + "learning_rate": 1.7502972842346248e-05, + "loss": 0.738, + "step": 4860 + }, + { + "epoch": 0.76, + "grad_norm": 16.26723370305961, + "learning_rate": 1.7501857856309176e-05, + "loss": 0.7739, + "step": 4861 + }, + { + "epoch": 0.76, + "grad_norm": 33.64519298872203, + "learning_rate": 1.7500742656923658e-05, + "loss": 0.7796, + "step": 4862 + }, + { + "epoch": 0.76, + "grad_norm": 20.616099507779385, + "learning_rate": 1.7499627244221403e-05, + "loss": 0.7832, + "step": 4863 + }, + { + "epoch": 0.76, + "grad_norm": 15.383847361173087, + "learning_rate": 1.7498511618234134e-05, + "loss": 0.7442, + "step": 4864 + }, + { + "epoch": 0.76, + "grad_norm": 13.867646029776123, + "learning_rate": 1.7497395778993585e-05, + "loss": 0.6778, + "step": 4865 + }, + { + "epoch": 0.76, + "grad_norm": 24.667337178461022, + "learning_rate": 1.749627972653149e-05, + "loss": 0.7646, + "step": 4866 + }, + { + "epoch": 0.76, + "grad_norm": 14.585988483431796, + "learning_rate": 1.749516346087958e-05, + "loss": 0.6406, + "step": 4867 + }, + { + "epoch": 0.76, + "grad_norm": 16.86307876541342, + "learning_rate": 1.7494046982069604e-05, + "loss": 0.7236, + "step": 4868 + }, + { + "epoch": 0.76, + "grad_norm": 15.67129283852544, + "learning_rate": 1.749293029013332e-05, + "loss": 0.7443, + "step": 4869 + }, + { + "epoch": 0.76, + "grad_norm": 16.84446944078611, + "learning_rate": 1.749181338510248e-05, + "loss": 0.7368, + "step": 4870 + }, + { + "epoch": 0.76, + "grad_norm": 20.127623775734374, + "learning_rate": 1.7490696267008845e-05, + "loss": 0.6603, + "step": 4871 + }, + { + "epoch": 0.76, + "grad_norm": 19.399211668374484, + "learning_rate": 1.74895789358842e-05, + "loss": 0.6618, + "step": 4872 + }, + { + "epoch": 0.76, + "grad_norm": 25.01011514832804, + "learning_rate": 1.7488461391760304e-05, + "loss": 0.8076, + "step": 4873 + }, + { + "epoch": 0.76, + "grad_norm": 19.397100702130807, + "learning_rate": 1.748734363466895e-05, + "loss": 0.6843, + "step": 4874 + }, + { + "epoch": 0.76, + "grad_norm": 20.45345986152622, + "learning_rate": 1.748622566464192e-05, + "loss": 0.7117, + "step": 4875 + }, + { + "epoch": 0.76, + "grad_norm": 19.40187664018827, + "learning_rate": 1.7485107481711014e-05, + "loss": 0.6721, + "step": 4876 + }, + { + "epoch": 0.76, + "grad_norm": 33.539235193912916, + "learning_rate": 1.7483989085908027e-05, + "loss": 0.8621, + "step": 4877 + }, + { + "epoch": 0.76, + "grad_norm": 20.342273169264676, + "learning_rate": 1.748287047726477e-05, + "loss": 0.7188, + "step": 4878 + }, + { + "epoch": 0.76, + "grad_norm": 25.199967304182035, + "learning_rate": 1.7481751655813056e-05, + "loss": 0.7636, + "step": 4879 + }, + { + "epoch": 0.76, + "grad_norm": 15.462494589111566, + "learning_rate": 1.74806326215847e-05, + "loss": 0.8107, + "step": 4880 + }, + { + "epoch": 0.76, + "grad_norm": 16.340582240645666, + "learning_rate": 1.747951337461153e-05, + "loss": 0.5871, + "step": 4881 + }, + { + "epoch": 0.76, + "grad_norm": 29.550130129219415, + "learning_rate": 1.747839391492537e-05, + "loss": 0.7369, + "step": 4882 + }, + { + "epoch": 0.76, + "grad_norm": 19.411405450608832, + "learning_rate": 1.7477274242558064e-05, + "loss": 0.7911, + "step": 4883 + }, + { + "epoch": 0.76, + "grad_norm": 17.689339783440087, + "learning_rate": 1.7476154357541455e-05, + "loss": 0.8549, + "step": 4884 + }, + { + "epoch": 0.76, + "grad_norm": 17.88998864887052, + "learning_rate": 1.747503425990739e-05, + "loss": 0.8061, + "step": 4885 + }, + { + "epoch": 0.76, + "grad_norm": 14.532968591767025, + "learning_rate": 1.747391394968772e-05, + "loss": 0.7671, + "step": 4886 + }, + { + "epoch": 0.76, + "grad_norm": 14.844411140575536, + "learning_rate": 1.7472793426914313e-05, + "loss": 0.7971, + "step": 4887 + }, + { + "epoch": 0.76, + "grad_norm": 16.986351136859465, + "learning_rate": 1.7471672691619028e-05, + "loss": 0.704, + "step": 4888 + }, + { + "epoch": 0.76, + "grad_norm": 21.54876558019364, + "learning_rate": 1.7470551743833747e-05, + "loss": 0.7295, + "step": 4889 + }, + { + "epoch": 0.76, + "grad_norm": 20.1104018557556, + "learning_rate": 1.746943058359034e-05, + "loss": 0.8514, + "step": 4890 + }, + { + "epoch": 0.76, + "grad_norm": 18.648603297160975, + "learning_rate": 1.74683092109207e-05, + "loss": 0.7439, + "step": 4891 + }, + { + "epoch": 0.76, + "grad_norm": 21.10021493246341, + "learning_rate": 1.7467187625856713e-05, + "loss": 0.7427, + "step": 4892 + }, + { + "epoch": 0.76, + "grad_norm": 20.892852968974157, + "learning_rate": 1.7466065828430277e-05, + "loss": 0.7124, + "step": 4893 + }, + { + "epoch": 0.76, + "grad_norm": 18.42721033852216, + "learning_rate": 1.7464943818673302e-05, + "loss": 0.7044, + "step": 4894 + }, + { + "epoch": 0.76, + "grad_norm": 22.740172822091314, + "learning_rate": 1.7463821596617686e-05, + "loss": 0.8082, + "step": 4895 + }, + { + "epoch": 0.76, + "grad_norm": 21.424625993600092, + "learning_rate": 1.7462699162295353e-05, + "loss": 0.7107, + "step": 4896 + }, + { + "epoch": 0.76, + "grad_norm": 14.28094236211586, + "learning_rate": 1.746157651573822e-05, + "loss": 0.7964, + "step": 4897 + }, + { + "epoch": 0.77, + "grad_norm": 20.424930058330958, + "learning_rate": 1.7460453656978217e-05, + "loss": 0.7084, + "step": 4898 + }, + { + "epoch": 0.77, + "grad_norm": 12.99073109747475, + "learning_rate": 1.7459330586047273e-05, + "loss": 0.6947, + "step": 4899 + }, + { + "epoch": 0.77, + "grad_norm": 18.261263635849495, + "learning_rate": 1.7458207302977333e-05, + "loss": 0.7776, + "step": 4900 + }, + { + "epoch": 0.77, + "grad_norm": 21.060236984020634, + "learning_rate": 1.7457083807800342e-05, + "loss": 0.8081, + "step": 4901 + }, + { + "epoch": 0.77, + "grad_norm": 18.611709005554005, + "learning_rate": 1.745596010054825e-05, + "loss": 0.8036, + "step": 4902 + }, + { + "epoch": 0.77, + "grad_norm": 18.718040050439416, + "learning_rate": 1.745483618125301e-05, + "loss": 0.7284, + "step": 4903 + }, + { + "epoch": 0.77, + "grad_norm": 32.20314612982518, + "learning_rate": 1.745371204994659e-05, + "loss": 0.8614, + "step": 4904 + }, + { + "epoch": 0.77, + "grad_norm": 22.952299269384426, + "learning_rate": 1.7452587706660967e-05, + "loss": 0.7266, + "step": 4905 + }, + { + "epoch": 0.77, + "grad_norm": 15.38758085808322, + "learning_rate": 1.7451463151428104e-05, + "loss": 0.8472, + "step": 4906 + }, + { + "epoch": 0.77, + "grad_norm": 26.177508221646757, + "learning_rate": 1.7450338384279985e-05, + "loss": 0.7725, + "step": 4907 + }, + { + "epoch": 0.77, + "grad_norm": 13.94373766611307, + "learning_rate": 1.7449213405248607e-05, + "loss": 0.6757, + "step": 4908 + }, + { + "epoch": 0.77, + "grad_norm": 11.264865436291009, + "learning_rate": 1.7448088214365953e-05, + "loss": 0.7085, + "step": 4909 + }, + { + "epoch": 0.77, + "grad_norm": 13.981295953661448, + "learning_rate": 1.744696281166403e-05, + "loss": 0.7812, + "step": 4910 + }, + { + "epoch": 0.77, + "grad_norm": 26.35133221025492, + "learning_rate": 1.7445837197174836e-05, + "loss": 0.665, + "step": 4911 + }, + { + "epoch": 0.77, + "grad_norm": 15.685280684676613, + "learning_rate": 1.744471137093039e-05, + "loss": 0.7484, + "step": 4912 + }, + { + "epoch": 0.77, + "grad_norm": 22.87073515685023, + "learning_rate": 1.7443585332962715e-05, + "loss": 0.6615, + "step": 4913 + }, + { + "epoch": 0.77, + "grad_norm": 18.15167632578306, + "learning_rate": 1.744245908330382e-05, + "loss": 0.7008, + "step": 4914 + }, + { + "epoch": 0.77, + "grad_norm": 16.078062662109442, + "learning_rate": 1.7441332621985742e-05, + "loss": 0.7807, + "step": 4915 + }, + { + "epoch": 0.77, + "grad_norm": 17.922468637256276, + "learning_rate": 1.744020594904052e-05, + "loss": 0.7061, + "step": 4916 + }, + { + "epoch": 0.77, + "grad_norm": 25.984085288414054, + "learning_rate": 1.7439079064500193e-05, + "loss": 0.7583, + "step": 4917 + }, + { + "epoch": 0.77, + "grad_norm": 375.8586389997877, + "learning_rate": 1.7437951968396808e-05, + "loss": 0.7949, + "step": 4918 + }, + { + "epoch": 0.77, + "grad_norm": 14.152516417959635, + "learning_rate": 1.743682466076242e-05, + "loss": 0.7221, + "step": 4919 + }, + { + "epoch": 0.77, + "grad_norm": 20.097579075819894, + "learning_rate": 1.7435697141629087e-05, + "loss": 0.7473, + "step": 4920 + }, + { + "epoch": 0.77, + "grad_norm": 15.952375938958086, + "learning_rate": 1.7434569411028883e-05, + "loss": 0.8087, + "step": 4921 + }, + { + "epoch": 0.77, + "grad_norm": 18.23459097673864, + "learning_rate": 1.743344146899387e-05, + "loss": 0.7221, + "step": 4922 + }, + { + "epoch": 0.77, + "grad_norm": 21.46926016388806, + "learning_rate": 1.7432313315556132e-05, + "loss": 0.7627, + "step": 4923 + }, + { + "epoch": 0.77, + "grad_norm": 12.914724127061033, + "learning_rate": 1.743118495074775e-05, + "loss": 0.6511, + "step": 4924 + }, + { + "epoch": 0.77, + "grad_norm": 14.53497233552534, + "learning_rate": 1.7430056374600813e-05, + "loss": 0.6766, + "step": 4925 + }, + { + "epoch": 0.77, + "grad_norm": 29.61839743366324, + "learning_rate": 1.7428927587147422e-05, + "loss": 0.7791, + "step": 4926 + }, + { + "epoch": 0.77, + "grad_norm": 22.54962927979832, + "learning_rate": 1.7427798588419674e-05, + "loss": 0.7249, + "step": 4927 + }, + { + "epoch": 0.77, + "grad_norm": 17.21038336777368, + "learning_rate": 1.7426669378449685e-05, + "loss": 0.7438, + "step": 4928 + }, + { + "epoch": 0.77, + "grad_norm": 19.168165871737898, + "learning_rate": 1.7425539957269557e-05, + "loss": 0.6793, + "step": 4929 + }, + { + "epoch": 0.77, + "grad_norm": 20.76502212412781, + "learning_rate": 1.742441032491142e-05, + "loss": 0.8016, + "step": 4930 + }, + { + "epoch": 0.77, + "grad_norm": 30.406634951680513, + "learning_rate": 1.7423280481407393e-05, + "loss": 0.8101, + "step": 4931 + }, + { + "epoch": 0.77, + "grad_norm": 19.897400229519402, + "learning_rate": 1.7422150426789613e-05, + "loss": 0.7459, + "step": 4932 + }, + { + "epoch": 0.77, + "grad_norm": 19.69640883254658, + "learning_rate": 1.7421020161090216e-05, + "loss": 0.6963, + "step": 4933 + }, + { + "epoch": 0.77, + "grad_norm": 27.31423682794454, + "learning_rate": 1.741988968434135e-05, + "loss": 0.7501, + "step": 4934 + }, + { + "epoch": 0.77, + "grad_norm": 18.355686037896728, + "learning_rate": 1.741875899657516e-05, + "loss": 0.6719, + "step": 4935 + }, + { + "epoch": 0.77, + "grad_norm": 15.52960706373916, + "learning_rate": 1.7417628097823802e-05, + "loss": 0.7426, + "step": 4936 + }, + { + "epoch": 0.77, + "grad_norm": 24.38273804178698, + "learning_rate": 1.741649698811944e-05, + "loss": 0.715, + "step": 4937 + }, + { + "epoch": 0.77, + "grad_norm": 16.799724867602897, + "learning_rate": 1.7415365667494248e-05, + "loss": 0.7854, + "step": 4938 + }, + { + "epoch": 0.77, + "grad_norm": 26.92200338982838, + "learning_rate": 1.741423413598039e-05, + "loss": 0.84, + "step": 4939 + }, + { + "epoch": 0.77, + "grad_norm": 27.316727055810073, + "learning_rate": 1.741310239361005e-05, + "loss": 0.8222, + "step": 4940 + }, + { + "epoch": 0.77, + "grad_norm": 17.01069974595615, + "learning_rate": 1.7411970440415418e-05, + "loss": 0.7209, + "step": 4941 + }, + { + "epoch": 0.77, + "grad_norm": 16.595927858845673, + "learning_rate": 1.7410838276428677e-05, + "loss": 0.7543, + "step": 4942 + }, + { + "epoch": 0.77, + "grad_norm": 16.564628310031143, + "learning_rate": 1.7409705901682033e-05, + "loss": 0.804, + "step": 4943 + }, + { + "epoch": 0.77, + "grad_norm": 15.485886607309236, + "learning_rate": 1.740857331620769e-05, + "loss": 0.6732, + "step": 4944 + }, + { + "epoch": 0.77, + "grad_norm": 16.846433901650936, + "learning_rate": 1.740744052003785e-05, + "loss": 0.7495, + "step": 4945 + }, + { + "epoch": 0.77, + "grad_norm": 18.38669637237412, + "learning_rate": 1.7406307513204742e-05, + "loss": 0.7389, + "step": 4946 + }, + { + "epoch": 0.77, + "grad_norm": 11.912706189107379, + "learning_rate": 1.7405174295740577e-05, + "loss": 0.6628, + "step": 4947 + }, + { + "epoch": 0.77, + "grad_norm": 14.065252702231586, + "learning_rate": 1.7404040867677587e-05, + "loss": 0.6699, + "step": 4948 + }, + { + "epoch": 0.77, + "grad_norm": 18.96734736895513, + "learning_rate": 1.740290722904801e-05, + "loss": 0.7858, + "step": 4949 + }, + { + "epoch": 0.77, + "grad_norm": 17.967239962029407, + "learning_rate": 1.7401773379884078e-05, + "loss": 0.7964, + "step": 4950 + }, + { + "epoch": 0.77, + "grad_norm": 16.241067718393687, + "learning_rate": 1.7400639320218042e-05, + "loss": 0.7308, + "step": 4951 + }, + { + "epoch": 0.77, + "grad_norm": 11.299745099140768, + "learning_rate": 1.739950505008215e-05, + "loss": 0.6156, + "step": 4952 + }, + { + "epoch": 0.77, + "grad_norm": 27.602985599130843, + "learning_rate": 1.7398370569508667e-05, + "loss": 0.7483, + "step": 4953 + }, + { + "epoch": 0.77, + "grad_norm": 13.337192921021662, + "learning_rate": 1.739723587852985e-05, + "loss": 0.7972, + "step": 4954 + }, + { + "epoch": 0.77, + "grad_norm": 18.357646844570873, + "learning_rate": 1.7396100977177975e-05, + "loss": 0.7109, + "step": 4955 + }, + { + "epoch": 0.77, + "grad_norm": 17.871261652590274, + "learning_rate": 1.7394965865485312e-05, + "loss": 0.7369, + "step": 4956 + }, + { + "epoch": 0.77, + "grad_norm": 20.456156405465862, + "learning_rate": 1.7393830543484147e-05, + "loss": 0.7759, + "step": 4957 + }, + { + "epoch": 0.77, + "grad_norm": 25.044864504838507, + "learning_rate": 1.7392695011206768e-05, + "loss": 0.7819, + "step": 4958 + }, + { + "epoch": 0.77, + "grad_norm": 11.588033656940288, + "learning_rate": 1.7391559268685464e-05, + "loss": 0.7667, + "step": 4959 + }, + { + "epoch": 0.77, + "grad_norm": 21.153757569746126, + "learning_rate": 1.739042331595254e-05, + "loss": 0.8124, + "step": 4960 + }, + { + "epoch": 0.77, + "grad_norm": 16.839408350346112, + "learning_rate": 1.73892871530403e-05, + "loss": 0.692, + "step": 4961 + }, + { + "epoch": 0.78, + "grad_norm": 26.906280886403078, + "learning_rate": 1.7388150779981057e-05, + "loss": 0.7894, + "step": 4962 + }, + { + "epoch": 0.78, + "grad_norm": 18.98817814852142, + "learning_rate": 1.738701419680713e-05, + "loss": 0.7946, + "step": 4963 + }, + { + "epoch": 0.78, + "grad_norm": 14.921376115637958, + "learning_rate": 1.7385877403550836e-05, + "loss": 0.7439, + "step": 4964 + }, + { + "epoch": 0.78, + "grad_norm": 18.48194463783497, + "learning_rate": 1.738474040024451e-05, + "loss": 0.7844, + "step": 4965 + }, + { + "epoch": 0.78, + "grad_norm": 25.02973950099229, + "learning_rate": 1.738360318692049e-05, + "loss": 0.7565, + "step": 4966 + }, + { + "epoch": 0.78, + "grad_norm": 10.903436595742544, + "learning_rate": 1.738246576361111e-05, + "loss": 0.6621, + "step": 4967 + }, + { + "epoch": 0.78, + "grad_norm": 14.457844870285887, + "learning_rate": 1.7381328130348727e-05, + "loss": 0.6961, + "step": 4968 + }, + { + "epoch": 0.78, + "grad_norm": 25.22048976456105, + "learning_rate": 1.7380190287165686e-05, + "loss": 0.7874, + "step": 4969 + }, + { + "epoch": 0.78, + "grad_norm": 22.782271585661586, + "learning_rate": 1.7379052234094353e-05, + "loss": 0.7587, + "step": 4970 + }, + { + "epoch": 0.78, + "grad_norm": 21.653046819361595, + "learning_rate": 1.737791397116709e-05, + "loss": 0.7627, + "step": 4971 + }, + { + "epoch": 0.78, + "grad_norm": 26.451389024828025, + "learning_rate": 1.737677549841627e-05, + "loss": 0.8129, + "step": 4972 + }, + { + "epoch": 0.78, + "grad_norm": 17.51047892184048, + "learning_rate": 1.7375636815874273e-05, + "loss": 0.7612, + "step": 4973 + }, + { + "epoch": 0.78, + "grad_norm": 15.190522751359087, + "learning_rate": 1.7374497923573473e-05, + "loss": 0.6705, + "step": 4974 + }, + { + "epoch": 0.78, + "grad_norm": 25.356071201725552, + "learning_rate": 1.7373358821546272e-05, + "loss": 0.7768, + "step": 4975 + }, + { + "epoch": 0.78, + "grad_norm": 22.612871549515546, + "learning_rate": 1.7372219509825056e-05, + "loss": 0.6925, + "step": 4976 + }, + { + "epoch": 0.78, + "grad_norm": 18.09954965700338, + "learning_rate": 1.737107998844223e-05, + "loss": 0.7263, + "step": 4977 + }, + { + "epoch": 0.78, + "grad_norm": 16.89917224533569, + "learning_rate": 1.7369940257430203e-05, + "loss": 0.6812, + "step": 4978 + }, + { + "epoch": 0.78, + "grad_norm": 34.32157037706258, + "learning_rate": 1.7368800316821387e-05, + "loss": 0.7067, + "step": 4979 + }, + { + "epoch": 0.78, + "grad_norm": 20.21420758448821, + "learning_rate": 1.7367660166648197e-05, + "loss": 0.7596, + "step": 4980 + }, + { + "epoch": 0.78, + "grad_norm": 17.927504539990547, + "learning_rate": 1.7366519806943067e-05, + "loss": 0.7706, + "step": 4981 + }, + { + "epoch": 0.78, + "grad_norm": 23.745854640258234, + "learning_rate": 1.736537923773842e-05, + "loss": 0.7822, + "step": 4982 + }, + { + "epoch": 0.78, + "grad_norm": 15.82011657499333, + "learning_rate": 1.736423845906669e-05, + "loss": 0.7312, + "step": 4983 + }, + { + "epoch": 0.78, + "grad_norm": 40.469379937486714, + "learning_rate": 1.7363097470960336e-05, + "loss": 0.7486, + "step": 4984 + }, + { + "epoch": 0.78, + "grad_norm": 21.289888085272274, + "learning_rate": 1.736195627345179e-05, + "loss": 0.7448, + "step": 4985 + }, + { + "epoch": 0.78, + "grad_norm": 20.01380473487693, + "learning_rate": 1.736081486657352e-05, + "loss": 0.7457, + "step": 4986 + }, + { + "epoch": 0.78, + "grad_norm": 16.8077806962746, + "learning_rate": 1.7359673250357977e-05, + "loss": 0.687, + "step": 4987 + }, + { + "epoch": 0.78, + "grad_norm": 25.211705960943725, + "learning_rate": 1.7358531424837633e-05, + "loss": 0.7321, + "step": 4988 + }, + { + "epoch": 0.78, + "grad_norm": 19.987147723469857, + "learning_rate": 1.7357389390044965e-05, + "loss": 0.7628, + "step": 4989 + }, + { + "epoch": 0.78, + "grad_norm": 26.934819811715208, + "learning_rate": 1.735624714601244e-05, + "loss": 0.7881, + "step": 4990 + }, + { + "epoch": 0.78, + "grad_norm": 17.968630449477843, + "learning_rate": 1.735510469277255e-05, + "loss": 0.7534, + "step": 4991 + }, + { + "epoch": 0.78, + "grad_norm": 29.594671351864825, + "learning_rate": 1.7353962030357786e-05, + "loss": 0.8809, + "step": 4992 + }, + { + "epoch": 0.78, + "grad_norm": 29.502093776252696, + "learning_rate": 1.7352819158800646e-05, + "loss": 0.9089, + "step": 4993 + }, + { + "epoch": 0.78, + "grad_norm": 19.798676835568042, + "learning_rate": 1.735167607813363e-05, + "loss": 0.7955, + "step": 4994 + }, + { + "epoch": 0.78, + "grad_norm": 18.52557333945106, + "learning_rate": 1.7350532788389248e-05, + "loss": 0.7633, + "step": 4995 + }, + { + "epoch": 0.78, + "grad_norm": 17.960497202377574, + "learning_rate": 1.734938928960001e-05, + "loss": 0.7301, + "step": 4996 + }, + { + "epoch": 0.78, + "grad_norm": 14.114733823159321, + "learning_rate": 1.7348245581798438e-05, + "loss": 0.6224, + "step": 4997 + }, + { + "epoch": 0.78, + "grad_norm": 15.798089532073249, + "learning_rate": 1.7347101665017064e-05, + "loss": 0.6808, + "step": 4998 + }, + { + "epoch": 0.78, + "grad_norm": 15.943326265187848, + "learning_rate": 1.7345957539288417e-05, + "loss": 0.6658, + "step": 4999 + }, + { + "epoch": 0.78, + "grad_norm": 35.25788677916932, + "learning_rate": 1.7344813204645033e-05, + "loss": 0.8039, + "step": 5000 + }, + { + "epoch": 0.78, + "grad_norm": 21.701814065027627, + "learning_rate": 1.734366866111946e-05, + "loss": 0.7383, + "step": 5001 + }, + { + "epoch": 0.78, + "grad_norm": 24.47408580973461, + "learning_rate": 1.7342523908744246e-05, + "loss": 0.6607, + "step": 5002 + }, + { + "epoch": 0.78, + "grad_norm": 46.325402325204784, + "learning_rate": 1.7341378947551946e-05, + "loss": 0.7624, + "step": 5003 + }, + { + "epoch": 0.78, + "grad_norm": 21.05457513775683, + "learning_rate": 1.7340233777575125e-05, + "loss": 0.7722, + "step": 5004 + }, + { + "epoch": 0.78, + "grad_norm": 21.92265491821371, + "learning_rate": 1.733908839884635e-05, + "loss": 0.7098, + "step": 5005 + }, + { + "epoch": 0.78, + "grad_norm": 19.640089091452882, + "learning_rate": 1.733794281139819e-05, + "loss": 0.6812, + "step": 5006 + }, + { + "epoch": 0.78, + "grad_norm": 18.545110240138342, + "learning_rate": 1.733679701526323e-05, + "loss": 0.7906, + "step": 5007 + }, + { + "epoch": 0.78, + "grad_norm": 24.377396656467127, + "learning_rate": 1.7335651010474057e-05, + "loss": 0.7975, + "step": 5008 + }, + { + "epoch": 0.78, + "grad_norm": 21.033133173062968, + "learning_rate": 1.7334504797063257e-05, + "loss": 0.8084, + "step": 5009 + }, + { + "epoch": 0.78, + "grad_norm": 19.281571613547705, + "learning_rate": 1.733335837506344e-05, + "loss": 0.7777, + "step": 5010 + }, + { + "epoch": 0.78, + "grad_norm": 20.245447107638785, + "learning_rate": 1.733221174450719e-05, + "loss": 0.782, + "step": 5011 + }, + { + "epoch": 0.78, + "grad_norm": 23.19563933656489, + "learning_rate": 1.7331064905427133e-05, + "loss": 0.7788, + "step": 5012 + }, + { + "epoch": 0.78, + "grad_norm": 15.869574781846504, + "learning_rate": 1.7329917857855875e-05, + "loss": 0.696, + "step": 5013 + }, + { + "epoch": 0.78, + "grad_norm": 19.764099954869234, + "learning_rate": 1.7328770601826047e-05, + "loss": 0.673, + "step": 5014 + }, + { + "epoch": 0.78, + "grad_norm": 14.066046134154801, + "learning_rate": 1.7327623137370265e-05, + "loss": 0.8146, + "step": 5015 + }, + { + "epoch": 0.78, + "grad_norm": 25.078479193328253, + "learning_rate": 1.732647546452117e-05, + "loss": 0.7619, + "step": 5016 + }, + { + "epoch": 0.78, + "grad_norm": 14.420310542099985, + "learning_rate": 1.7325327583311396e-05, + "loss": 0.609, + "step": 5017 + }, + { + "epoch": 0.78, + "grad_norm": 19.662384773600046, + "learning_rate": 1.7324179493773593e-05, + "loss": 0.8095, + "step": 5018 + }, + { + "epoch": 0.78, + "grad_norm": 14.057423188069045, + "learning_rate": 1.732303119594041e-05, + "loss": 0.7724, + "step": 5019 + }, + { + "epoch": 0.78, + "grad_norm": 13.762989985979699, + "learning_rate": 1.73218826898445e-05, + "loss": 0.704, + "step": 5020 + }, + { + "epoch": 0.78, + "grad_norm": 15.115908829384493, + "learning_rate": 1.7320733975518533e-05, + "loss": 0.6469, + "step": 5021 + }, + { + "epoch": 0.78, + "grad_norm": 21.836261484816877, + "learning_rate": 1.7319585052995177e-05, + "loss": 0.7122, + "step": 5022 + }, + { + "epoch": 0.78, + "grad_norm": 29.730244977642954, + "learning_rate": 1.7318435922307097e-05, + "loss": 0.6995, + "step": 5023 + }, + { + "epoch": 0.78, + "grad_norm": 19.261123366428865, + "learning_rate": 1.7317286583486983e-05, + "loss": 0.8975, + "step": 5024 + }, + { + "epoch": 0.78, + "grad_norm": 13.467894593355588, + "learning_rate": 1.7316137036567523e-05, + "loss": 0.6568, + "step": 5025 + }, + { + "epoch": 0.79, + "grad_norm": 17.87838022659654, + "learning_rate": 1.7314987281581403e-05, + "loss": 0.7126, + "step": 5026 + }, + { + "epoch": 0.79, + "grad_norm": 26.72086776237663, + "learning_rate": 1.731383731856132e-05, + "loss": 0.8774, + "step": 5027 + }, + { + "epoch": 0.79, + "grad_norm": 16.906667107057004, + "learning_rate": 1.731268714753999e-05, + "loss": 0.7681, + "step": 5028 + }, + { + "epoch": 0.79, + "grad_norm": 21.15878858084662, + "learning_rate": 1.7311536768550107e-05, + "loss": 0.7387, + "step": 5029 + }, + { + "epoch": 0.79, + "grad_norm": 24.56685865958884, + "learning_rate": 1.73103861816244e-05, + "loss": 0.9333, + "step": 5030 + }, + { + "epoch": 0.79, + "grad_norm": 16.591399114734493, + "learning_rate": 1.7309235386795585e-05, + "loss": 0.7584, + "step": 5031 + }, + { + "epoch": 0.79, + "grad_norm": 17.610174713067547, + "learning_rate": 1.7308084384096395e-05, + "loss": 0.7689, + "step": 5032 + }, + { + "epoch": 0.79, + "grad_norm": 17.458683640351946, + "learning_rate": 1.7306933173559554e-05, + "loss": 0.8298, + "step": 5033 + }, + { + "epoch": 0.79, + "grad_norm": 15.872977671443623, + "learning_rate": 1.7305781755217812e-05, + "loss": 0.6979, + "step": 5034 + }, + { + "epoch": 0.79, + "grad_norm": 33.247439795895005, + "learning_rate": 1.730463012910391e-05, + "loss": 0.7918, + "step": 5035 + }, + { + "epoch": 0.79, + "grad_norm": 28.85241462860156, + "learning_rate": 1.73034782952506e-05, + "loss": 0.8116, + "step": 5036 + }, + { + "epoch": 0.79, + "grad_norm": 13.957824468826395, + "learning_rate": 1.7302326253690643e-05, + "loss": 0.7945, + "step": 5037 + }, + { + "epoch": 0.79, + "grad_norm": 18.111495393334224, + "learning_rate": 1.7301174004456794e-05, + "loss": 0.6709, + "step": 5038 + }, + { + "epoch": 0.79, + "grad_norm": 16.74365873854793, + "learning_rate": 1.730002154758183e-05, + "loss": 0.7245, + "step": 5039 + }, + { + "epoch": 0.79, + "grad_norm": 16.507523721912314, + "learning_rate": 1.729886888309852e-05, + "loss": 0.7429, + "step": 5040 + }, + { + "epoch": 0.79, + "grad_norm": 21.073937723373685, + "learning_rate": 1.7297716011039654e-05, + "loss": 0.8302, + "step": 5041 + }, + { + "epoch": 0.79, + "grad_norm": 14.857892900153706, + "learning_rate": 1.729656293143801e-05, + "loss": 0.7171, + "step": 5042 + }, + { + "epoch": 0.79, + "grad_norm": 15.253516744410266, + "learning_rate": 1.7295409644326387e-05, + "loss": 0.6826, + "step": 5043 + }, + { + "epoch": 0.79, + "grad_norm": 4.94960649562677, + "learning_rate": 1.7294256149737577e-05, + "loss": 0.7902, + "step": 5044 + }, + { + "epoch": 0.79, + "grad_norm": 12.635953946089323, + "learning_rate": 1.7293102447704395e-05, + "loss": 0.6542, + "step": 5045 + }, + { + "epoch": 0.79, + "grad_norm": 19.098848532733925, + "learning_rate": 1.7291948538259644e-05, + "loss": 0.687, + "step": 5046 + }, + { + "epoch": 0.79, + "grad_norm": 19.597190720623495, + "learning_rate": 1.7290794421436138e-05, + "loss": 0.7154, + "step": 5047 + }, + { + "epoch": 0.79, + "grad_norm": 17.319660975909798, + "learning_rate": 1.7289640097266712e-05, + "loss": 0.739, + "step": 5048 + }, + { + "epoch": 0.79, + "grad_norm": 17.850288444146614, + "learning_rate": 1.728848556578418e-05, + "loss": 0.8241, + "step": 5049 + }, + { + "epoch": 0.79, + "grad_norm": 18.412644354745783, + "learning_rate": 1.7287330827021382e-05, + "loss": 0.7195, + "step": 5050 + }, + { + "epoch": 0.79, + "grad_norm": 15.979657389637712, + "learning_rate": 1.7286175881011158e-05, + "loss": 0.7461, + "step": 5051 + }, + { + "epoch": 0.79, + "grad_norm": 20.853701563777296, + "learning_rate": 1.7285020727786354e-05, + "loss": 0.7505, + "step": 5052 + }, + { + "epoch": 0.79, + "grad_norm": 25.178964584961466, + "learning_rate": 1.7283865367379826e-05, + "loss": 0.7019, + "step": 5053 + }, + { + "epoch": 0.79, + "grad_norm": 18.59595817869395, + "learning_rate": 1.7282709799824428e-05, + "loss": 0.7261, + "step": 5054 + }, + { + "epoch": 0.79, + "grad_norm": 25.532895160090938, + "learning_rate": 1.7281554025153018e-05, + "loss": 0.9205, + "step": 5055 + }, + { + "epoch": 0.79, + "grad_norm": 18.24995895276614, + "learning_rate": 1.7280398043398478e-05, + "loss": 0.8256, + "step": 5056 + }, + { + "epoch": 0.79, + "grad_norm": 33.58273686706776, + "learning_rate": 1.727924185459367e-05, + "loss": 0.8316, + "step": 5057 + }, + { + "epoch": 0.79, + "grad_norm": 26.184429837137692, + "learning_rate": 1.7278085458771485e-05, + "loss": 0.7576, + "step": 5058 + }, + { + "epoch": 0.79, + "grad_norm": 26.901705563019643, + "learning_rate": 1.7276928855964805e-05, + "loss": 0.685, + "step": 5059 + }, + { + "epoch": 0.79, + "grad_norm": 23.299143703119697, + "learning_rate": 1.727577204620653e-05, + "loss": 0.716, + "step": 5060 + }, + { + "epoch": 0.79, + "grad_norm": 42.928552792021215, + "learning_rate": 1.727461502952955e-05, + "loss": 0.8264, + "step": 5061 + }, + { + "epoch": 0.79, + "grad_norm": 17.698791213334815, + "learning_rate": 1.7273457805966773e-05, + "loss": 0.8002, + "step": 5062 + }, + { + "epoch": 0.79, + "grad_norm": 33.83940608694808, + "learning_rate": 1.7272300375551116e-05, + "loss": 0.6599, + "step": 5063 + }, + { + "epoch": 0.79, + "grad_norm": 19.194257719646686, + "learning_rate": 1.7271142738315485e-05, + "loss": 0.6953, + "step": 5064 + }, + { + "epoch": 0.79, + "grad_norm": 30.302458308170536, + "learning_rate": 1.7269984894292814e-05, + "loss": 0.8831, + "step": 5065 + }, + { + "epoch": 0.79, + "grad_norm": 16.28154251088473, + "learning_rate": 1.726882684351602e-05, + "loss": 0.7863, + "step": 5066 + }, + { + "epoch": 0.79, + "grad_norm": 17.52729123853482, + "learning_rate": 1.7267668586018044e-05, + "loss": 0.729, + "step": 5067 + }, + { + "epoch": 0.79, + "grad_norm": 22.744382800151502, + "learning_rate": 1.7266510121831824e-05, + "loss": 0.7486, + "step": 5068 + }, + { + "epoch": 0.79, + "grad_norm": 15.712542562381573, + "learning_rate": 1.726535145099031e-05, + "loss": 0.6335, + "step": 5069 + }, + { + "epoch": 0.79, + "grad_norm": 20.561469044247666, + "learning_rate": 1.726419257352645e-05, + "loss": 0.6944, + "step": 5070 + }, + { + "epoch": 0.79, + "grad_norm": 24.059782782473214, + "learning_rate": 1.7263033489473202e-05, + "loss": 0.7326, + "step": 5071 + }, + { + "epoch": 0.79, + "grad_norm": 28.439633716454974, + "learning_rate": 1.7261874198863533e-05, + "loss": 0.8421, + "step": 5072 + }, + { + "epoch": 0.79, + "grad_norm": 16.85165307626801, + "learning_rate": 1.7260714701730405e-05, + "loss": 0.6602, + "step": 5073 + }, + { + "epoch": 0.79, + "grad_norm": 24.32739867765844, + "learning_rate": 1.72595549981068e-05, + "loss": 0.7712, + "step": 5074 + }, + { + "epoch": 0.79, + "grad_norm": 18.7142273975174, + "learning_rate": 1.72583950880257e-05, + "loss": 0.763, + "step": 5075 + }, + { + "epoch": 0.79, + "grad_norm": 17.549305123802387, + "learning_rate": 1.7257234971520086e-05, + "loss": 0.7109, + "step": 5076 + }, + { + "epoch": 0.79, + "grad_norm": 21.870855238142497, + "learning_rate": 1.7256074648622958e-05, + "loss": 0.7423, + "step": 5077 + }, + { + "epoch": 0.79, + "grad_norm": 18.662565530964063, + "learning_rate": 1.725491411936731e-05, + "loss": 0.7172, + "step": 5078 + }, + { + "epoch": 0.79, + "grad_norm": 15.344096756724776, + "learning_rate": 1.7253753383786148e-05, + "loss": 0.771, + "step": 5079 + }, + { + "epoch": 0.79, + "grad_norm": 22.20295165341256, + "learning_rate": 1.7252592441912487e-05, + "loss": 0.7137, + "step": 5080 + }, + { + "epoch": 0.79, + "grad_norm": 22.099638587167057, + "learning_rate": 1.7251431293779334e-05, + "loss": 0.7776, + "step": 5081 + }, + { + "epoch": 0.79, + "grad_norm": 35.31761512252748, + "learning_rate": 1.725026993941972e-05, + "loss": 0.7658, + "step": 5082 + }, + { + "epoch": 0.79, + "grad_norm": 18.967257706907937, + "learning_rate": 1.724910837886667e-05, + "loss": 0.7261, + "step": 5083 + }, + { + "epoch": 0.79, + "grad_norm": 29.378218776803575, + "learning_rate": 1.7247946612153215e-05, + "loss": 0.8825, + "step": 5084 + }, + { + "epoch": 0.79, + "grad_norm": 18.34233699259649, + "learning_rate": 1.7246784639312403e-05, + "loss": 0.805, + "step": 5085 + }, + { + "epoch": 0.79, + "grad_norm": 19.742365402292652, + "learning_rate": 1.7245622460377274e-05, + "loss": 0.7195, + "step": 5086 + }, + { + "epoch": 0.79, + "grad_norm": 18.991661550158632, + "learning_rate": 1.724446007538088e-05, + "loss": 0.658, + "step": 5087 + }, + { + "epoch": 0.79, + "grad_norm": 19.917805067842313, + "learning_rate": 1.724329748435628e-05, + "loss": 0.7155, + "step": 5088 + }, + { + "epoch": 0.79, + "grad_norm": 18.351676272709078, + "learning_rate": 1.7242134687336535e-05, + "loss": 0.701, + "step": 5089 + }, + { + "epoch": 0.8, + "grad_norm": 19.300366654504995, + "learning_rate": 1.7240971684354717e-05, + "loss": 0.6904, + "step": 5090 + }, + { + "epoch": 0.8, + "grad_norm": 24.543106932966747, + "learning_rate": 1.72398084754439e-05, + "loss": 0.8452, + "step": 5091 + }, + { + "epoch": 0.8, + "grad_norm": 19.219439792659678, + "learning_rate": 1.7238645060637165e-05, + "loss": 0.7456, + "step": 5092 + }, + { + "epoch": 0.8, + "grad_norm": 31.200695625694124, + "learning_rate": 1.7237481439967602e-05, + "loss": 0.8066, + "step": 5093 + }, + { + "epoch": 0.8, + "grad_norm": 31.05674417462991, + "learning_rate": 1.7236317613468294e-05, + "loss": 0.7463, + "step": 5094 + }, + { + "epoch": 0.8, + "grad_norm": 14.318735471840476, + "learning_rate": 1.7235153581172353e-05, + "loss": 0.7005, + "step": 5095 + }, + { + "epoch": 0.8, + "grad_norm": 16.241663489024187, + "learning_rate": 1.7233989343112872e-05, + "loss": 0.7386, + "step": 5096 + }, + { + "epoch": 0.8, + "grad_norm": 18.114435285438898, + "learning_rate": 1.723282489932297e-05, + "loss": 0.6815, + "step": 5097 + }, + { + "epoch": 0.8, + "grad_norm": 19.384308548542357, + "learning_rate": 1.7231660249835756e-05, + "loss": 0.6392, + "step": 5098 + }, + { + "epoch": 0.8, + "grad_norm": 17.817426268006322, + "learning_rate": 1.7230495394684353e-05, + "loss": 0.7207, + "step": 5099 + }, + { + "epoch": 0.8, + "grad_norm": 17.575994480720233, + "learning_rate": 1.7229330333901895e-05, + "loss": 0.7451, + "step": 5100 + }, + { + "epoch": 0.8, + "grad_norm": 13.362047778565351, + "learning_rate": 1.722816506752151e-05, + "loss": 0.6762, + "step": 5101 + }, + { + "epoch": 0.8, + "grad_norm": 27.31812986382629, + "learning_rate": 1.722699959557634e-05, + "loss": 0.6989, + "step": 5102 + }, + { + "epoch": 0.8, + "grad_norm": 29.037783050881803, + "learning_rate": 1.7225833918099527e-05, + "loss": 0.8387, + "step": 5103 + }, + { + "epoch": 0.8, + "grad_norm": 20.149734461983485, + "learning_rate": 1.7224668035124224e-05, + "loss": 0.7087, + "step": 5104 + }, + { + "epoch": 0.8, + "grad_norm": 20.376804377223998, + "learning_rate": 1.722350194668359e-05, + "loss": 0.8158, + "step": 5105 + }, + { + "epoch": 0.8, + "grad_norm": 23.28636350223484, + "learning_rate": 1.7222335652810788e-05, + "loss": 0.7634, + "step": 5106 + }, + { + "epoch": 0.8, + "grad_norm": 12.50021746565481, + "learning_rate": 1.7221169153538978e-05, + "loss": 0.7332, + "step": 5107 + }, + { + "epoch": 0.8, + "grad_norm": 17.11290429002052, + "learning_rate": 1.7220002448901346e-05, + "loss": 0.6832, + "step": 5108 + }, + { + "epoch": 0.8, + "grad_norm": 23.245410589283114, + "learning_rate": 1.721883553893107e-05, + "loss": 0.7127, + "step": 5109 + }, + { + "epoch": 0.8, + "grad_norm": 18.672782294523607, + "learning_rate": 1.7217668423661332e-05, + "loss": 0.6993, + "step": 5110 + }, + { + "epoch": 0.8, + "grad_norm": 18.904317338145937, + "learning_rate": 1.7216501103125326e-05, + "loss": 0.6789, + "step": 5111 + }, + { + "epoch": 0.8, + "grad_norm": 26.703068857597138, + "learning_rate": 1.721533357735625e-05, + "loss": 0.7675, + "step": 5112 + }, + { + "epoch": 0.8, + "grad_norm": 11.342364338243668, + "learning_rate": 1.721416584638731e-05, + "loss": 0.6744, + "step": 5113 + }, + { + "epoch": 0.8, + "grad_norm": 16.11973672871839, + "learning_rate": 1.721299791025171e-05, + "loss": 0.7176, + "step": 5114 + }, + { + "epoch": 0.8, + "grad_norm": 25.50128034049107, + "learning_rate": 1.7211829768982672e-05, + "loss": 0.8299, + "step": 5115 + }, + { + "epoch": 0.8, + "grad_norm": 19.21175173677941, + "learning_rate": 1.7210661422613412e-05, + "loss": 0.8219, + "step": 5116 + }, + { + "epoch": 0.8, + "grad_norm": 22.4106500739761, + "learning_rate": 1.720949287117716e-05, + "loss": 0.806, + "step": 5117 + }, + { + "epoch": 0.8, + "grad_norm": 38.73741170044382, + "learning_rate": 1.720832411470715e-05, + "loss": 0.8398, + "step": 5118 + }, + { + "epoch": 0.8, + "grad_norm": 19.31007118908587, + "learning_rate": 1.720715515323662e-05, + "loss": 0.7324, + "step": 5119 + }, + { + "epoch": 0.8, + "grad_norm": 16.226876249847756, + "learning_rate": 1.7205985986798808e-05, + "loss": 0.8309, + "step": 5120 + }, + { + "epoch": 0.8, + "grad_norm": 15.664302245705011, + "learning_rate": 1.7204816615426972e-05, + "loss": 0.7168, + "step": 5121 + }, + { + "epoch": 0.8, + "grad_norm": 17.232590517474655, + "learning_rate": 1.7203647039154367e-05, + "loss": 0.6735, + "step": 5122 + }, + { + "epoch": 0.8, + "grad_norm": 11.68743278871682, + "learning_rate": 1.7202477258014256e-05, + "loss": 0.7258, + "step": 5123 + }, + { + "epoch": 0.8, + "grad_norm": 25.935429112939584, + "learning_rate": 1.72013072720399e-05, + "loss": 0.7146, + "step": 5124 + }, + { + "epoch": 0.8, + "grad_norm": 13.39228771974679, + "learning_rate": 1.7200137081264584e-05, + "loss": 0.7438, + "step": 5125 + }, + { + "epoch": 0.8, + "grad_norm": 23.290735597346607, + "learning_rate": 1.719896668572158e-05, + "loss": 0.805, + "step": 5126 + }, + { + "epoch": 0.8, + "grad_norm": 21.979866063634727, + "learning_rate": 1.7197796085444173e-05, + "loss": 0.7777, + "step": 5127 + }, + { + "epoch": 0.8, + "grad_norm": 21.221921175017695, + "learning_rate": 1.7196625280465656e-05, + "loss": 0.6502, + "step": 5128 + }, + { + "epoch": 0.8, + "grad_norm": 38.09581345984922, + "learning_rate": 1.7195454270819326e-05, + "loss": 0.7592, + "step": 5129 + }, + { + "epoch": 0.8, + "grad_norm": 14.941661498438886, + "learning_rate": 1.7194283056538486e-05, + "loss": 0.8059, + "step": 5130 + }, + { + "epoch": 0.8, + "grad_norm": 18.06014691975666, + "learning_rate": 1.7193111637656445e-05, + "loss": 0.697, + "step": 5131 + }, + { + "epoch": 0.8, + "grad_norm": 43.032591067507944, + "learning_rate": 1.7191940014206518e-05, + "loss": 0.7742, + "step": 5132 + }, + { + "epoch": 0.8, + "grad_norm": 14.028730004207219, + "learning_rate": 1.719076818622202e-05, + "loss": 0.7147, + "step": 5133 + }, + { + "epoch": 0.8, + "grad_norm": 17.75989389284088, + "learning_rate": 1.7189596153736285e-05, + "loss": 0.6818, + "step": 5134 + }, + { + "epoch": 0.8, + "grad_norm": 22.58132415862778, + "learning_rate": 1.7188423916782637e-05, + "loss": 0.7869, + "step": 5135 + }, + { + "epoch": 0.8, + "grad_norm": 16.37672344494824, + "learning_rate": 1.7187251475394423e-05, + "loss": 0.6452, + "step": 5136 + }, + { + "epoch": 0.8, + "grad_norm": 32.285660965811566, + "learning_rate": 1.7186078829604978e-05, + "loss": 0.7822, + "step": 5137 + }, + { + "epoch": 0.8, + "grad_norm": 17.285971889756457, + "learning_rate": 1.7184905979447655e-05, + "loss": 0.685, + "step": 5138 + }, + { + "epoch": 0.8, + "grad_norm": 18.100395532166885, + "learning_rate": 1.7183732924955808e-05, + "loss": 0.7418, + "step": 5139 + }, + { + "epoch": 0.8, + "grad_norm": 16.721195950690603, + "learning_rate": 1.7182559666162802e-05, + "loss": 0.737, + "step": 5140 + }, + { + "epoch": 0.8, + "grad_norm": 15.197672171376754, + "learning_rate": 1.7181386203102e-05, + "loss": 0.6763, + "step": 5141 + }, + { + "epoch": 0.8, + "grad_norm": 21.89372631094041, + "learning_rate": 1.7180212535806776e-05, + "loss": 0.7672, + "step": 5142 + }, + { + "epoch": 0.8, + "grad_norm": 14.410474215830988, + "learning_rate": 1.717903866431051e-05, + "loss": 0.7046, + "step": 5143 + }, + { + "epoch": 0.8, + "grad_norm": 24.094264874727255, + "learning_rate": 1.717786458864658e-05, + "loss": 0.7272, + "step": 5144 + }, + { + "epoch": 0.8, + "grad_norm": 13.63603840510089, + "learning_rate": 1.717669030884838e-05, + "loss": 0.7827, + "step": 5145 + }, + { + "epoch": 0.8, + "grad_norm": 29.835625627140793, + "learning_rate": 1.7175515824949304e-05, + "loss": 0.721, + "step": 5146 + }, + { + "epoch": 0.8, + "grad_norm": 20.555538623113, + "learning_rate": 1.7174341136982763e-05, + "loss": 0.707, + "step": 5147 + }, + { + "epoch": 0.8, + "grad_norm": 23.694979288158706, + "learning_rate": 1.717316624498215e-05, + "loss": 0.7533, + "step": 5148 + }, + { + "epoch": 0.8, + "grad_norm": 14.30406713104073, + "learning_rate": 1.7171991148980888e-05, + "loss": 0.7635, + "step": 5149 + }, + { + "epoch": 0.8, + "grad_norm": 15.469966176659288, + "learning_rate": 1.717081584901239e-05, + "loss": 0.6965, + "step": 5150 + }, + { + "epoch": 0.8, + "grad_norm": 17.156253639248597, + "learning_rate": 1.7169640345110088e-05, + "loss": 0.7239, + "step": 5151 + }, + { + "epoch": 0.8, + "grad_norm": 13.503101974870242, + "learning_rate": 1.7168464637307408e-05, + "loss": 0.7429, + "step": 5152 + }, + { + "epoch": 0.8, + "grad_norm": 13.495866157500753, + "learning_rate": 1.7167288725637786e-05, + "loss": 0.7084, + "step": 5153 + }, + { + "epoch": 0.81, + "grad_norm": 20.819476779676247, + "learning_rate": 1.7166112610134665e-05, + "loss": 0.7504, + "step": 5154 + }, + { + "epoch": 0.81, + "grad_norm": 12.253693590638479, + "learning_rate": 1.7164936290831494e-05, + "loss": 0.7233, + "step": 5155 + }, + { + "epoch": 0.81, + "grad_norm": 14.691760306642331, + "learning_rate": 1.7163759767761727e-05, + "loss": 0.6635, + "step": 5156 + }, + { + "epoch": 0.81, + "grad_norm": 21.058269663818134, + "learning_rate": 1.716258304095882e-05, + "loss": 0.7584, + "step": 5157 + }, + { + "epoch": 0.81, + "grad_norm": 21.380375913891744, + "learning_rate": 1.716140611045625e-05, + "loss": 0.7411, + "step": 5158 + }, + { + "epoch": 0.81, + "grad_norm": 21.897040454233423, + "learning_rate": 1.716022897628747e-05, + "loss": 0.6835, + "step": 5159 + }, + { + "epoch": 0.81, + "grad_norm": 16.92237858185876, + "learning_rate": 1.7159051638485967e-05, + "loss": 0.7479, + "step": 5160 + }, + { + "epoch": 0.81, + "grad_norm": 25.31088891612536, + "learning_rate": 1.715787409708523e-05, + "loss": 0.8402, + "step": 5161 + }, + { + "epoch": 0.81, + "grad_norm": 20.15829211025891, + "learning_rate": 1.7156696352118735e-05, + "loss": 0.762, + "step": 5162 + }, + { + "epoch": 0.81, + "grad_norm": 14.895683898265137, + "learning_rate": 1.7155518403619983e-05, + "loss": 0.6637, + "step": 5163 + }, + { + "epoch": 0.81, + "grad_norm": 19.200923417126887, + "learning_rate": 1.7154340251622476e-05, + "loss": 0.7266, + "step": 5164 + }, + { + "epoch": 0.81, + "grad_norm": 15.680402059006028, + "learning_rate": 1.7153161896159717e-05, + "loss": 0.8016, + "step": 5165 + }, + { + "epoch": 0.81, + "grad_norm": 22.499018163583965, + "learning_rate": 1.7151983337265217e-05, + "loss": 0.6742, + "step": 5166 + }, + { + "epoch": 0.81, + "grad_norm": 12.139926349637031, + "learning_rate": 1.7150804574972496e-05, + "loss": 0.6492, + "step": 5167 + }, + { + "epoch": 0.81, + "grad_norm": 17.32334262778569, + "learning_rate": 1.7149625609315076e-05, + "loss": 0.7421, + "step": 5168 + }, + { + "epoch": 0.81, + "grad_norm": 15.758706509086169, + "learning_rate": 1.7148446440326485e-05, + "loss": 0.7625, + "step": 5169 + }, + { + "epoch": 0.81, + "grad_norm": 19.378582217554925, + "learning_rate": 1.714726706804026e-05, + "loss": 0.7941, + "step": 5170 + }, + { + "epoch": 0.81, + "grad_norm": 23.14861364873735, + "learning_rate": 1.714608749248994e-05, + "loss": 0.7745, + "step": 5171 + }, + { + "epoch": 0.81, + "grad_norm": 12.184340133993782, + "learning_rate": 1.714490771370907e-05, + "loss": 0.6667, + "step": 5172 + }, + { + "epoch": 0.81, + "grad_norm": 27.235216965958795, + "learning_rate": 1.7143727731731204e-05, + "loss": 0.8347, + "step": 5173 + }, + { + "epoch": 0.81, + "grad_norm": 19.78384076910924, + "learning_rate": 1.7142547546589903e-05, + "loss": 0.7187, + "step": 5174 + }, + { + "epoch": 0.81, + "grad_norm": 22.327430603182503, + "learning_rate": 1.714136715831873e-05, + "loss": 0.7614, + "step": 5175 + }, + { + "epoch": 0.81, + "grad_norm": 21.341016486852183, + "learning_rate": 1.7140186566951248e-05, + "loss": 0.6902, + "step": 5176 + }, + { + "epoch": 0.81, + "grad_norm": 18.309475983495677, + "learning_rate": 1.7139005772521038e-05, + "loss": 0.7205, + "step": 5177 + }, + { + "epoch": 0.81, + "grad_norm": 18.225050607200238, + "learning_rate": 1.7137824775061682e-05, + "loss": 0.735, + "step": 5178 + }, + { + "epoch": 0.81, + "grad_norm": 19.36372698497285, + "learning_rate": 1.7136643574606764e-05, + "loss": 0.7876, + "step": 5179 + }, + { + "epoch": 0.81, + "grad_norm": 13.597623428331413, + "learning_rate": 1.7135462171189877e-05, + "loss": 0.6934, + "step": 5180 + }, + { + "epoch": 0.81, + "grad_norm": 19.694557004463306, + "learning_rate": 1.713428056484462e-05, + "loss": 0.6687, + "step": 5181 + }, + { + "epoch": 0.81, + "grad_norm": 23.611076331549317, + "learning_rate": 1.7133098755604595e-05, + "loss": 0.7197, + "step": 5182 + }, + { + "epoch": 0.81, + "grad_norm": 16.26785181360096, + "learning_rate": 1.713191674350342e-05, + "loss": 0.7426, + "step": 5183 + }, + { + "epoch": 0.81, + "grad_norm": 18.46083528019795, + "learning_rate": 1.71307345285747e-05, + "loss": 0.691, + "step": 5184 + }, + { + "epoch": 0.81, + "grad_norm": 18.027515995777403, + "learning_rate": 1.7129552110852064e-05, + "loss": 0.7468, + "step": 5185 + }, + { + "epoch": 0.81, + "grad_norm": 14.067577070744028, + "learning_rate": 1.7128369490369134e-05, + "loss": 0.6679, + "step": 5186 + }, + { + "epoch": 0.81, + "grad_norm": 15.996270422140327, + "learning_rate": 1.7127186667159547e-05, + "loss": 0.7136, + "step": 5187 + }, + { + "epoch": 0.81, + "grad_norm": 28.687060999991512, + "learning_rate": 1.712600364125694e-05, + "loss": 0.7427, + "step": 5188 + }, + { + "epoch": 0.81, + "grad_norm": 20.87675124948807, + "learning_rate": 1.712482041269496e-05, + "loss": 0.7439, + "step": 5189 + }, + { + "epoch": 0.81, + "grad_norm": 23.023500437888337, + "learning_rate": 1.7123636981507256e-05, + "loss": 0.7542, + "step": 5190 + }, + { + "epoch": 0.81, + "grad_norm": 14.226060276080307, + "learning_rate": 1.712245334772748e-05, + "loss": 0.7316, + "step": 5191 + }, + { + "epoch": 0.81, + "grad_norm": 21.60619267513184, + "learning_rate": 1.7121269511389298e-05, + "loss": 0.784, + "step": 5192 + }, + { + "epoch": 0.81, + "grad_norm": 23.594555092016382, + "learning_rate": 1.7120085472526377e-05, + "loss": 0.6827, + "step": 5193 + }, + { + "epoch": 0.81, + "grad_norm": 19.276446784427318, + "learning_rate": 1.711890123117239e-05, + "loss": 0.7894, + "step": 5194 + }, + { + "epoch": 0.81, + "grad_norm": 18.86603492865205, + "learning_rate": 1.711771678736102e-05, + "loss": 0.7585, + "step": 5195 + }, + { + "epoch": 0.81, + "grad_norm": 23.78643156013516, + "learning_rate": 1.7116532141125947e-05, + "loss": 0.7483, + "step": 5196 + }, + { + "epoch": 0.81, + "grad_norm": 18.21745251915064, + "learning_rate": 1.711534729250086e-05, + "loss": 0.7554, + "step": 5197 + }, + { + "epoch": 0.81, + "grad_norm": 20.214312822676618, + "learning_rate": 1.711416224151946e-05, + "loss": 0.6873, + "step": 5198 + }, + { + "epoch": 0.81, + "grad_norm": 25.00368890249207, + "learning_rate": 1.7112976988215445e-05, + "loss": 0.6451, + "step": 5199 + }, + { + "epoch": 0.81, + "grad_norm": 29.31716589810737, + "learning_rate": 1.711179153262253e-05, + "loss": 0.7897, + "step": 5200 + }, + { + "epoch": 0.81, + "grad_norm": 24.645334688695545, + "learning_rate": 1.711060587477442e-05, + "loss": 0.8364, + "step": 5201 + }, + { + "epoch": 0.81, + "grad_norm": 13.248071867907846, + "learning_rate": 1.710942001470484e-05, + "loss": 0.6988, + "step": 5202 + }, + { + "epoch": 0.81, + "grad_norm": 25.189003249359537, + "learning_rate": 1.7108233952447516e-05, + "loss": 0.7757, + "step": 5203 + }, + { + "epoch": 0.81, + "grad_norm": 23.825042063029482, + "learning_rate": 1.7107047688036175e-05, + "loss": 0.6276, + "step": 5204 + }, + { + "epoch": 0.81, + "grad_norm": 19.303921080303425, + "learning_rate": 1.7105861221504555e-05, + "loss": 0.7765, + "step": 5205 + }, + { + "epoch": 0.81, + "grad_norm": 20.345665834899343, + "learning_rate": 1.71046745528864e-05, + "loss": 0.7833, + "step": 5206 + }, + { + "epoch": 0.81, + "grad_norm": 22.304084153708335, + "learning_rate": 1.7103487682215454e-05, + "loss": 0.8053, + "step": 5207 + }, + { + "epoch": 0.81, + "grad_norm": 16.49489460334743, + "learning_rate": 1.710230060952548e-05, + "loss": 0.7643, + "step": 5208 + }, + { + "epoch": 0.81, + "grad_norm": 26.111518871150807, + "learning_rate": 1.7101113334850224e-05, + "loss": 0.7566, + "step": 5209 + }, + { + "epoch": 0.81, + "grad_norm": 26.556357640680986, + "learning_rate": 1.709992585822346e-05, + "loss": 0.8432, + "step": 5210 + }, + { + "epoch": 0.81, + "grad_norm": 18.020274380374257, + "learning_rate": 1.709873817967896e-05, + "loss": 0.7846, + "step": 5211 + }, + { + "epoch": 0.81, + "grad_norm": 16.61036679706035, + "learning_rate": 1.70975502992505e-05, + "loss": 0.7227, + "step": 5212 + }, + { + "epoch": 0.81, + "grad_norm": 17.708249171187916, + "learning_rate": 1.7096362216971857e-05, + "loss": 0.7751, + "step": 5213 + }, + { + "epoch": 0.81, + "grad_norm": 19.708748120169744, + "learning_rate": 1.7095173932876827e-05, + "loss": 0.7299, + "step": 5214 + }, + { + "epoch": 0.81, + "grad_norm": 18.909691180597793, + "learning_rate": 1.7093985446999203e-05, + "loss": 0.6862, + "step": 5215 + }, + { + "epoch": 0.81, + "grad_norm": 16.467779997791684, + "learning_rate": 1.7092796759372778e-05, + "loss": 0.7572, + "step": 5216 + }, + { + "epoch": 0.81, + "grad_norm": 19.466025192414246, + "learning_rate": 1.7091607870031362e-05, + "loss": 0.8308, + "step": 5217 + }, + { + "epoch": 0.82, + "grad_norm": 23.435405656064976, + "learning_rate": 1.7090418779008772e-05, + "loss": 0.6645, + "step": 5218 + }, + { + "epoch": 0.82, + "grad_norm": 17.10645770933609, + "learning_rate": 1.7089229486338815e-05, + "loss": 0.6955, + "step": 5219 + }, + { + "epoch": 0.82, + "grad_norm": 15.041225599807925, + "learning_rate": 1.708803999205532e-05, + "loss": 0.7755, + "step": 5220 + }, + { + "epoch": 0.82, + "grad_norm": 24.964851359696137, + "learning_rate": 1.7086850296192118e-05, + "loss": 0.7442, + "step": 5221 + }, + { + "epoch": 0.82, + "grad_norm": 19.45873126706079, + "learning_rate": 1.7085660398783032e-05, + "loss": 0.7241, + "step": 5222 + }, + { + "epoch": 0.82, + "grad_norm": 25.149234360924705, + "learning_rate": 1.7084470299861915e-05, + "loss": 0.6826, + "step": 5223 + }, + { + "epoch": 0.82, + "grad_norm": 26.213680701389535, + "learning_rate": 1.7083279999462603e-05, + "loss": 0.8048, + "step": 5224 + }, + { + "epoch": 0.82, + "grad_norm": 18.257926227654465, + "learning_rate": 1.7082089497618955e-05, + "loss": 0.7918, + "step": 5225 + }, + { + "epoch": 0.82, + "grad_norm": 22.216825118832926, + "learning_rate": 1.708089879436482e-05, + "loss": 0.7294, + "step": 5226 + }, + { + "epoch": 0.82, + "grad_norm": 18.91550840486508, + "learning_rate": 1.707970788973407e-05, + "loss": 0.6509, + "step": 5227 + }, + { + "epoch": 0.82, + "grad_norm": 23.22206902174581, + "learning_rate": 1.7078516783760568e-05, + "loss": 0.7002, + "step": 5228 + }, + { + "epoch": 0.82, + "grad_norm": 21.471909689941164, + "learning_rate": 1.707732547647819e-05, + "loss": 0.7251, + "step": 5229 + }, + { + "epoch": 0.82, + "grad_norm": 24.20088935978388, + "learning_rate": 1.7076133967920813e-05, + "loss": 0.7751, + "step": 5230 + }, + { + "epoch": 0.82, + "grad_norm": 30.134268155096944, + "learning_rate": 1.7074942258122326e-05, + "loss": 0.7393, + "step": 5231 + }, + { + "epoch": 0.82, + "grad_norm": 30.24635652893284, + "learning_rate": 1.707375034711662e-05, + "loss": 0.7391, + "step": 5232 + }, + { + "epoch": 0.82, + "grad_norm": 17.511066867966253, + "learning_rate": 1.7072558234937597e-05, + "loss": 0.7057, + "step": 5233 + }, + { + "epoch": 0.82, + "grad_norm": 20.00314918847954, + "learning_rate": 1.707136592161915e-05, + "loss": 0.7347, + "step": 5234 + }, + { + "epoch": 0.82, + "grad_norm": 27.868116686264358, + "learning_rate": 1.7070173407195193e-05, + "loss": 0.8165, + "step": 5235 + }, + { + "epoch": 0.82, + "grad_norm": 18.786545019757952, + "learning_rate": 1.7068980691699638e-05, + "loss": 0.7533, + "step": 5236 + }, + { + "epoch": 0.82, + "grad_norm": 18.024889323655994, + "learning_rate": 1.7067787775166414e-05, + "loss": 0.7822, + "step": 5237 + }, + { + "epoch": 0.82, + "grad_norm": 26.497529064221553, + "learning_rate": 1.7066594657629435e-05, + "loss": 0.6936, + "step": 5238 + }, + { + "epoch": 0.82, + "grad_norm": 14.644817973230241, + "learning_rate": 1.706540133912264e-05, + "loss": 0.7498, + "step": 5239 + }, + { + "epoch": 0.82, + "grad_norm": 18.682022740892513, + "learning_rate": 1.7064207819679964e-05, + "loss": 0.8008, + "step": 5240 + }, + { + "epoch": 0.82, + "grad_norm": 15.142779933096879, + "learning_rate": 1.7063014099335353e-05, + "loss": 0.7062, + "step": 5241 + }, + { + "epoch": 0.82, + "grad_norm": 29.532680526276256, + "learning_rate": 1.7061820178122746e-05, + "loss": 0.8107, + "step": 5242 + }, + { + "epoch": 0.82, + "grad_norm": 18.24548046361106, + "learning_rate": 1.7060626056076107e-05, + "loss": 0.7246, + "step": 5243 + }, + { + "epoch": 0.82, + "grad_norm": 33.7868676935813, + "learning_rate": 1.705943173322939e-05, + "loss": 0.8513, + "step": 5244 + }, + { + "epoch": 0.82, + "grad_norm": 17.52759771671025, + "learning_rate": 1.705823720961657e-05, + "loss": 0.7322, + "step": 5245 + }, + { + "epoch": 0.82, + "grad_norm": 16.44824281848892, + "learning_rate": 1.705704248527161e-05, + "loss": 0.7418, + "step": 5246 + }, + { + "epoch": 0.82, + "grad_norm": 19.02752852306113, + "learning_rate": 1.7055847560228495e-05, + "loss": 0.7536, + "step": 5247 + }, + { + "epoch": 0.82, + "grad_norm": 22.9162490112364, + "learning_rate": 1.7054652434521197e-05, + "loss": 0.6988, + "step": 5248 + }, + { + "epoch": 0.82, + "grad_norm": 17.729013259289736, + "learning_rate": 1.7053457108183713e-05, + "loss": 0.7439, + "step": 5249 + }, + { + "epoch": 0.82, + "grad_norm": 15.012753659661463, + "learning_rate": 1.7052261581250034e-05, + "loss": 0.7762, + "step": 5250 + }, + { + "epoch": 0.82, + "grad_norm": 20.4150008472449, + "learning_rate": 1.705106585375416e-05, + "loss": 0.7424, + "step": 5251 + }, + { + "epoch": 0.82, + "grad_norm": 28.2915625090407, + "learning_rate": 1.70498699257301e-05, + "loss": 0.7448, + "step": 5252 + }, + { + "epoch": 0.82, + "grad_norm": 19.18176275901338, + "learning_rate": 1.7048673797211863e-05, + "loss": 0.7461, + "step": 5253 + }, + { + "epoch": 0.82, + "grad_norm": 22.212802045895916, + "learning_rate": 1.7047477468233467e-05, + "loss": 0.8229, + "step": 5254 + }, + { + "epoch": 0.82, + "grad_norm": 15.20314254348072, + "learning_rate": 1.7046280938828932e-05, + "loss": 0.7776, + "step": 5255 + }, + { + "epoch": 0.82, + "grad_norm": 15.645115960251603, + "learning_rate": 1.704508420903229e-05, + "loss": 0.6577, + "step": 5256 + }, + { + "epoch": 0.82, + "grad_norm": 25.789337101271453, + "learning_rate": 1.7043887278877577e-05, + "loss": 0.6788, + "step": 5257 + }, + { + "epoch": 0.82, + "grad_norm": 20.30974265603834, + "learning_rate": 1.7042690148398825e-05, + "loss": 0.8397, + "step": 5258 + }, + { + "epoch": 0.82, + "grad_norm": 18.83332377320303, + "learning_rate": 1.704149281763009e-05, + "loss": 0.7066, + "step": 5259 + }, + { + "epoch": 0.82, + "grad_norm": 25.308804916958696, + "learning_rate": 1.7040295286605415e-05, + "loss": 0.7674, + "step": 5260 + }, + { + "epoch": 0.82, + "grad_norm": 26.81848377757063, + "learning_rate": 1.7039097555358862e-05, + "loss": 0.8434, + "step": 5261 + }, + { + "epoch": 0.82, + "grad_norm": 22.03986832890927, + "learning_rate": 1.7037899623924495e-05, + "loss": 0.7236, + "step": 5262 + }, + { + "epoch": 0.82, + "grad_norm": 22.090543859637418, + "learning_rate": 1.7036701492336378e-05, + "loss": 0.7969, + "step": 5263 + }, + { + "epoch": 0.82, + "grad_norm": 19.970197314733156, + "learning_rate": 1.7035503160628585e-05, + "loss": 0.7631, + "step": 5264 + }, + { + "epoch": 0.82, + "grad_norm": 15.222801017904905, + "learning_rate": 1.7034304628835198e-05, + "loss": 0.6801, + "step": 5265 + }, + { + "epoch": 0.82, + "grad_norm": 18.482936078396236, + "learning_rate": 1.70331058969903e-05, + "loss": 0.7057, + "step": 5266 + }, + { + "epoch": 0.82, + "grad_norm": 17.027679157388516, + "learning_rate": 1.703190696512799e-05, + "loss": 0.6521, + "step": 5267 + }, + { + "epoch": 0.82, + "grad_norm": 22.23955269185322, + "learning_rate": 1.703070783328236e-05, + "loss": 0.8073, + "step": 5268 + }, + { + "epoch": 0.82, + "grad_norm": 14.96725247722746, + "learning_rate": 1.702950850148751e-05, + "loss": 0.6897, + "step": 5269 + }, + { + "epoch": 0.82, + "grad_norm": 21.440301662020264, + "learning_rate": 1.7028308969777548e-05, + "loss": 0.7676, + "step": 5270 + }, + { + "epoch": 0.82, + "grad_norm": 18.081220980339747, + "learning_rate": 1.7027109238186593e-05, + "loss": 0.753, + "step": 5271 + }, + { + "epoch": 0.82, + "grad_norm": 16.28191990490078, + "learning_rate": 1.7025909306748757e-05, + "loss": 0.7668, + "step": 5272 + }, + { + "epoch": 0.82, + "grad_norm": 8.205375073967923, + "learning_rate": 1.7024709175498174e-05, + "loss": 0.6975, + "step": 5273 + }, + { + "epoch": 0.82, + "grad_norm": 21.929077265993786, + "learning_rate": 1.7023508844468974e-05, + "loss": 0.6462, + "step": 5274 + }, + { + "epoch": 0.82, + "grad_norm": 18.905851414711375, + "learning_rate": 1.702230831369529e-05, + "loss": 0.8903, + "step": 5275 + }, + { + "epoch": 0.82, + "grad_norm": 16.44763775331927, + "learning_rate": 1.7021107583211266e-05, + "loss": 0.7497, + "step": 5276 + }, + { + "epoch": 0.82, + "grad_norm": 21.32928537279355, + "learning_rate": 1.7019906653051045e-05, + "loss": 0.7195, + "step": 5277 + }, + { + "epoch": 0.82, + "grad_norm": 23.683651035194888, + "learning_rate": 1.701870552324879e-05, + "loss": 0.7452, + "step": 5278 + }, + { + "epoch": 0.82, + "grad_norm": 17.664000125118573, + "learning_rate": 1.7017504193838654e-05, + "loss": 0.7115, + "step": 5279 + }, + { + "epoch": 0.82, + "grad_norm": 17.395220431268605, + "learning_rate": 1.7016302664854802e-05, + "loss": 0.728, + "step": 5280 + }, + { + "epoch": 0.82, + "grad_norm": 16.27474301998892, + "learning_rate": 1.701510093633141e-05, + "loss": 0.6826, + "step": 5281 + }, + { + "epoch": 0.83, + "grad_norm": 20.15930575118074, + "learning_rate": 1.701389900830265e-05, + "loss": 0.8046, + "step": 5282 + }, + { + "epoch": 0.83, + "grad_norm": 19.83775068066402, + "learning_rate": 1.70126968808027e-05, + "loss": 0.7946, + "step": 5283 + }, + { + "epoch": 0.83, + "grad_norm": 15.934965622871642, + "learning_rate": 1.7011494553865762e-05, + "loss": 0.7219, + "step": 5284 + }, + { + "epoch": 0.83, + "grad_norm": 18.094198983144597, + "learning_rate": 1.7010292027526016e-05, + "loss": 0.6905, + "step": 5285 + }, + { + "epoch": 0.83, + "grad_norm": 16.386529025228718, + "learning_rate": 1.7009089301817666e-05, + "loss": 0.7556, + "step": 5286 + }, + { + "epoch": 0.83, + "grad_norm": 16.50589419609408, + "learning_rate": 1.7007886376774917e-05, + "loss": 0.7015, + "step": 5287 + }, + { + "epoch": 0.83, + "grad_norm": 17.505807325138306, + "learning_rate": 1.7006683252431977e-05, + "loss": 0.6908, + "step": 5288 + }, + { + "epoch": 0.83, + "grad_norm": 18.468875830523434, + "learning_rate": 1.7005479928823066e-05, + "loss": 0.7265, + "step": 5289 + }, + { + "epoch": 0.83, + "grad_norm": 17.890223480283264, + "learning_rate": 1.7004276405982404e-05, + "loss": 0.7221, + "step": 5290 + }, + { + "epoch": 0.83, + "grad_norm": 19.138049348636994, + "learning_rate": 1.7003072683944214e-05, + "loss": 0.7783, + "step": 5291 + }, + { + "epoch": 0.83, + "grad_norm": 17.008168949672765, + "learning_rate": 1.700186876274274e-05, + "loss": 0.803, + "step": 5292 + }, + { + "epoch": 0.83, + "grad_norm": 21.046225805401793, + "learning_rate": 1.700066464241221e-05, + "loss": 0.8066, + "step": 5293 + }, + { + "epoch": 0.83, + "grad_norm": 11.41782958252252, + "learning_rate": 1.6999460322986876e-05, + "loss": 0.7059, + "step": 5294 + }, + { + "epoch": 0.83, + "grad_norm": 15.223645780850129, + "learning_rate": 1.6998255804500983e-05, + "loss": 0.7338, + "step": 5295 + }, + { + "epoch": 0.83, + "grad_norm": 18.688801233732843, + "learning_rate": 1.699705108698879e-05, + "loss": 0.7164, + "step": 5296 + }, + { + "epoch": 0.83, + "grad_norm": 14.211439051488393, + "learning_rate": 1.6995846170484554e-05, + "loss": 0.7625, + "step": 5297 + }, + { + "epoch": 0.83, + "grad_norm": 16.793240609090326, + "learning_rate": 1.699464105502255e-05, + "loss": 0.7275, + "step": 5298 + }, + { + "epoch": 0.83, + "grad_norm": 31.8135925809831, + "learning_rate": 1.6993435740637042e-05, + "loss": 0.8877, + "step": 5299 + }, + { + "epoch": 0.83, + "grad_norm": 14.932537832555084, + "learning_rate": 1.6992230227362317e-05, + "loss": 0.6649, + "step": 5300 + }, + { + "epoch": 0.83, + "grad_norm": 14.850596382446989, + "learning_rate": 1.6991024515232652e-05, + "loss": 0.7352, + "step": 5301 + }, + { + "epoch": 0.83, + "grad_norm": 19.527752671497687, + "learning_rate": 1.698981860428234e-05, + "loss": 0.7362, + "step": 5302 + }, + { + "epoch": 0.83, + "grad_norm": 12.234730511952906, + "learning_rate": 1.6988612494545673e-05, + "loss": 0.6077, + "step": 5303 + }, + { + "epoch": 0.83, + "grad_norm": 15.151526856633746, + "learning_rate": 1.698740618605696e-05, + "loss": 0.7386, + "step": 5304 + }, + { + "epoch": 0.83, + "grad_norm": 17.161854040613125, + "learning_rate": 1.69861996788505e-05, + "loss": 0.6559, + "step": 5305 + }, + { + "epoch": 0.83, + "grad_norm": 21.160182836669758, + "learning_rate": 1.6984992972960606e-05, + "loss": 0.756, + "step": 5306 + }, + { + "epoch": 0.83, + "grad_norm": 13.711337702091864, + "learning_rate": 1.69837860684216e-05, + "loss": 0.6666, + "step": 5307 + }, + { + "epoch": 0.83, + "grad_norm": 15.52688328378112, + "learning_rate": 1.6982578965267805e-05, + "loss": 0.7041, + "step": 5308 + }, + { + "epoch": 0.83, + "grad_norm": 16.04005928769861, + "learning_rate": 1.6981371663533543e-05, + "loss": 0.7199, + "step": 5309 + }, + { + "epoch": 0.83, + "grad_norm": 15.322963190580786, + "learning_rate": 1.698016416325316e-05, + "loss": 0.7607, + "step": 5310 + }, + { + "epoch": 0.83, + "grad_norm": 23.216787203310748, + "learning_rate": 1.6978956464460993e-05, + "loss": 0.7545, + "step": 5311 + }, + { + "epoch": 0.83, + "grad_norm": 16.328090675901993, + "learning_rate": 1.6977748567191384e-05, + "loss": 0.729, + "step": 5312 + }, + { + "epoch": 0.83, + "grad_norm": 19.564781120714, + "learning_rate": 1.697654047147869e-05, + "loss": 0.7159, + "step": 5313 + }, + { + "epoch": 0.83, + "grad_norm": 22.83253972749377, + "learning_rate": 1.6975332177357258e-05, + "loss": 0.8002, + "step": 5314 + }, + { + "epoch": 0.83, + "grad_norm": 26.683272427810657, + "learning_rate": 1.697412368486147e-05, + "loss": 0.7561, + "step": 5315 + }, + { + "epoch": 0.83, + "grad_norm": 26.439257556016987, + "learning_rate": 1.697291499402568e-05, + "loss": 0.7028, + "step": 5316 + }, + { + "epoch": 0.83, + "grad_norm": 21.944124655796955, + "learning_rate": 1.6971706104884263e-05, + "loss": 0.7289, + "step": 5317 + }, + { + "epoch": 0.83, + "grad_norm": 18.98941214379324, + "learning_rate": 1.6970497017471608e-05, + "loss": 0.694, + "step": 5318 + }, + { + "epoch": 0.83, + "grad_norm": 25.85206005266668, + "learning_rate": 1.6969287731822092e-05, + "loss": 0.7589, + "step": 5319 + }, + { + "epoch": 0.83, + "grad_norm": 25.109176435557835, + "learning_rate": 1.6968078247970116e-05, + "loss": 0.8027, + "step": 5320 + }, + { + "epoch": 0.83, + "grad_norm": 17.40351651197082, + "learning_rate": 1.6966868565950062e-05, + "loss": 0.696, + "step": 5321 + }, + { + "epoch": 0.83, + "grad_norm": 33.89123415982081, + "learning_rate": 1.6965658685796347e-05, + "loss": 0.8422, + "step": 5322 + }, + { + "epoch": 0.83, + "grad_norm": 18.86977291992447, + "learning_rate": 1.6964448607543375e-05, + "loss": 0.7781, + "step": 5323 + }, + { + "epoch": 0.83, + "grad_norm": 21.757045640563643, + "learning_rate": 1.696323833122555e-05, + "loss": 0.7318, + "step": 5324 + }, + { + "epoch": 0.83, + "grad_norm": 19.32630022111386, + "learning_rate": 1.696202785687731e-05, + "loss": 0.7198, + "step": 5325 + }, + { + "epoch": 0.83, + "grad_norm": 15.799283365704994, + "learning_rate": 1.6960817184533068e-05, + "loss": 0.7747, + "step": 5326 + }, + { + "epoch": 0.83, + "grad_norm": 20.77355076994614, + "learning_rate": 1.6959606314227254e-05, + "loss": 0.7008, + "step": 5327 + }, + { + "epoch": 0.83, + "grad_norm": 4.142022414391043, + "learning_rate": 1.695839524599431e-05, + "loss": 0.8104, + "step": 5328 + }, + { + "epoch": 0.83, + "grad_norm": 23.127038314271974, + "learning_rate": 1.6957183979868673e-05, + "loss": 0.7259, + "step": 5329 + }, + { + "epoch": 0.83, + "grad_norm": 13.896891192166652, + "learning_rate": 1.6955972515884798e-05, + "loss": 0.6573, + "step": 5330 + }, + { + "epoch": 0.83, + "grad_norm": 27.293947471736764, + "learning_rate": 1.695476085407713e-05, + "loss": 0.804, + "step": 5331 + }, + { + "epoch": 0.83, + "grad_norm": 23.47729253411391, + "learning_rate": 1.695354899448013e-05, + "loss": 0.8239, + "step": 5332 + }, + { + "epoch": 0.83, + "grad_norm": 29.055762164147385, + "learning_rate": 1.695233693712827e-05, + "loss": 0.7221, + "step": 5333 + }, + { + "epoch": 0.83, + "grad_norm": 24.08650277415116, + "learning_rate": 1.695112468205601e-05, + "loss": 0.7172, + "step": 5334 + }, + { + "epoch": 0.83, + "grad_norm": 24.250135975018978, + "learning_rate": 1.6949912229297834e-05, + "loss": 0.6906, + "step": 5335 + }, + { + "epoch": 0.83, + "grad_norm": 21.04451229679336, + "learning_rate": 1.6948699578888218e-05, + "loss": 0.6787, + "step": 5336 + }, + { + "epoch": 0.83, + "grad_norm": 11.778587713069367, + "learning_rate": 1.6947486730861652e-05, + "loss": 0.5909, + "step": 5337 + }, + { + "epoch": 0.83, + "grad_norm": 16.90332788855892, + "learning_rate": 1.694627368525263e-05, + "loss": 0.6412, + "step": 5338 + }, + { + "epoch": 0.83, + "grad_norm": 19.719465036471096, + "learning_rate": 1.6945060442095642e-05, + "loss": 0.7676, + "step": 5339 + }, + { + "epoch": 0.83, + "grad_norm": 16.381134694038188, + "learning_rate": 1.69438470014252e-05, + "loss": 0.7599, + "step": 5340 + }, + { + "epoch": 0.83, + "grad_norm": 22.198136504812283, + "learning_rate": 1.6942633363275812e-05, + "loss": 0.7734, + "step": 5341 + }, + { + "epoch": 0.83, + "grad_norm": 19.840605630082592, + "learning_rate": 1.694141952768199e-05, + "loss": 0.6985, + "step": 5342 + }, + { + "epoch": 0.83, + "grad_norm": 14.279210167727102, + "learning_rate": 1.694020549467826e-05, + "loss": 0.7811, + "step": 5343 + }, + { + "epoch": 0.83, + "grad_norm": 19.04089971795267, + "learning_rate": 1.6938991264299145e-05, + "loss": 0.8073, + "step": 5344 + }, + { + "epoch": 0.83, + "grad_norm": 15.756741233425233, + "learning_rate": 1.693777683657918e-05, + "loss": 0.7204, + "step": 5345 + }, + { + "epoch": 0.84, + "grad_norm": 21.743478395737206, + "learning_rate": 1.6936562211552895e-05, + "loss": 0.7173, + "step": 5346 + }, + { + "epoch": 0.84, + "grad_norm": 18.310044583768175, + "learning_rate": 1.693534738925484e-05, + "loss": 0.7546, + "step": 5347 + }, + { + "epoch": 0.84, + "grad_norm": 17.8291652193765, + "learning_rate": 1.6934132369719563e-05, + "loss": 0.8646, + "step": 5348 + }, + { + "epoch": 0.84, + "grad_norm": 13.072921132344042, + "learning_rate": 1.693291715298162e-05, + "loss": 0.6123, + "step": 5349 + }, + { + "epoch": 0.84, + "grad_norm": 17.40706843792547, + "learning_rate": 1.6931701739075568e-05, + "loss": 0.7233, + "step": 5350 + }, + { + "epoch": 0.84, + "grad_norm": 13.430716859603752, + "learning_rate": 1.6930486128035974e-05, + "loss": 0.7303, + "step": 5351 + }, + { + "epoch": 0.84, + "grad_norm": 17.321403549064605, + "learning_rate": 1.6929270319897407e-05, + "loss": 0.796, + "step": 5352 + }, + { + "epoch": 0.84, + "grad_norm": 35.42729013989812, + "learning_rate": 1.6928054314694443e-05, + "loss": 0.8944, + "step": 5353 + }, + { + "epoch": 0.84, + "grad_norm": 11.109657402043945, + "learning_rate": 1.6926838112461668e-05, + "loss": 0.6913, + "step": 5354 + }, + { + "epoch": 0.84, + "grad_norm": 24.642529293682223, + "learning_rate": 1.692562171323367e-05, + "loss": 0.7497, + "step": 5355 + }, + { + "epoch": 0.84, + "grad_norm": 15.971605644649218, + "learning_rate": 1.6924405117045046e-05, + "loss": 0.7153, + "step": 5356 + }, + { + "epoch": 0.84, + "grad_norm": 23.764910216620862, + "learning_rate": 1.692318832393039e-05, + "loss": 0.7474, + "step": 5357 + }, + { + "epoch": 0.84, + "grad_norm": 17.43158527696729, + "learning_rate": 1.6921971333924304e-05, + "loss": 0.7534, + "step": 5358 + }, + { + "epoch": 0.84, + "grad_norm": 18.385573703015968, + "learning_rate": 1.6920754147061406e-05, + "loss": 0.6754, + "step": 5359 + }, + { + "epoch": 0.84, + "grad_norm": 19.13515113838597, + "learning_rate": 1.691953676337631e-05, + "loss": 0.7512, + "step": 5360 + }, + { + "epoch": 0.84, + "grad_norm": 29.899136083319725, + "learning_rate": 1.691831918290363e-05, + "loss": 0.8366, + "step": 5361 + }, + { + "epoch": 0.84, + "grad_norm": 18.289847191325563, + "learning_rate": 1.6917101405678004e-05, + "loss": 0.7825, + "step": 5362 + }, + { + "epoch": 0.84, + "grad_norm": 18.41337792309947, + "learning_rate": 1.691588343173406e-05, + "loss": 0.7244, + "step": 5363 + }, + { + "epoch": 0.84, + "grad_norm": 24.716262148435145, + "learning_rate": 1.6914665261106434e-05, + "loss": 0.8078, + "step": 5364 + }, + { + "epoch": 0.84, + "grad_norm": 18.652928943341834, + "learning_rate": 1.691344689382978e-05, + "loss": 0.7657, + "step": 5365 + }, + { + "epoch": 0.84, + "grad_norm": 20.2795215187537, + "learning_rate": 1.6912228329938734e-05, + "loss": 0.7703, + "step": 5366 + }, + { + "epoch": 0.84, + "grad_norm": 13.675518267624597, + "learning_rate": 1.691100956946796e-05, + "loss": 0.6667, + "step": 5367 + }, + { + "epoch": 0.84, + "grad_norm": 12.976858003617421, + "learning_rate": 1.6909790612452114e-05, + "loss": 0.6166, + "step": 5368 + }, + { + "epoch": 0.84, + "grad_norm": 18.270833606640224, + "learning_rate": 1.690857145892587e-05, + "loss": 0.6824, + "step": 5369 + }, + { + "epoch": 0.84, + "grad_norm": 23.864918181674533, + "learning_rate": 1.6907352108923892e-05, + "loss": 0.7064, + "step": 5370 + }, + { + "epoch": 0.84, + "grad_norm": 29.027192003091926, + "learning_rate": 1.690613256248086e-05, + "loss": 0.7339, + "step": 5371 + }, + { + "epoch": 0.84, + "grad_norm": 19.325723336958, + "learning_rate": 1.690491281963146e-05, + "loss": 0.7569, + "step": 5372 + }, + { + "epoch": 0.84, + "grad_norm": 17.778871219588932, + "learning_rate": 1.690369288041038e-05, + "loss": 0.6858, + "step": 5373 + }, + { + "epoch": 0.84, + "grad_norm": 21.28704337037985, + "learning_rate": 1.690247274485231e-05, + "loss": 0.7345, + "step": 5374 + }, + { + "epoch": 0.84, + "grad_norm": 17.14197846774523, + "learning_rate": 1.690125241299195e-05, + "loss": 0.8074, + "step": 5375 + }, + { + "epoch": 0.84, + "grad_norm": 11.866728701584494, + "learning_rate": 1.6900031884864013e-05, + "loss": 0.7064, + "step": 5376 + }, + { + "epoch": 0.84, + "grad_norm": 12.400844693638945, + "learning_rate": 1.6898811160503204e-05, + "loss": 0.7522, + "step": 5377 + }, + { + "epoch": 0.84, + "grad_norm": 28.037670620170804, + "learning_rate": 1.6897590239944242e-05, + "loss": 0.8515, + "step": 5378 + }, + { + "epoch": 0.84, + "grad_norm": 34.703549224891866, + "learning_rate": 1.6896369123221852e-05, + "loss": 0.7814, + "step": 5379 + }, + { + "epoch": 0.84, + "grad_norm": 17.171722151099456, + "learning_rate": 1.6895147810370755e-05, + "loss": 0.6885, + "step": 5380 + }, + { + "epoch": 0.84, + "grad_norm": 21.14283453523767, + "learning_rate": 1.6893926301425685e-05, + "loss": 0.7007, + "step": 5381 + }, + { + "epoch": 0.84, + "grad_norm": 18.788615669917164, + "learning_rate": 1.6892704596421386e-05, + "loss": 0.7271, + "step": 5382 + }, + { + "epoch": 0.84, + "grad_norm": 17.446338854851334, + "learning_rate": 1.6891482695392603e-05, + "loss": 0.7586, + "step": 5383 + }, + { + "epoch": 0.84, + "grad_norm": 15.167327045008195, + "learning_rate": 1.6890260598374083e-05, + "loss": 0.6385, + "step": 5384 + }, + { + "epoch": 0.84, + "grad_norm": 9.075636376512431, + "learning_rate": 1.688903830540058e-05, + "loss": 0.5901, + "step": 5385 + }, + { + "epoch": 0.84, + "grad_norm": 14.96259297581637, + "learning_rate": 1.6887815816506858e-05, + "loss": 0.7269, + "step": 5386 + }, + { + "epoch": 0.84, + "grad_norm": 14.875292372935663, + "learning_rate": 1.6886593131727687e-05, + "loss": 0.6637, + "step": 5387 + }, + { + "epoch": 0.84, + "grad_norm": 17.162009929500407, + "learning_rate": 1.688537025109783e-05, + "loss": 0.6942, + "step": 5388 + }, + { + "epoch": 0.84, + "grad_norm": 19.860024641923033, + "learning_rate": 1.6884147174652077e-05, + "loss": 0.8074, + "step": 5389 + }, + { + "epoch": 0.84, + "grad_norm": 17.332970647263213, + "learning_rate": 1.68829239024252e-05, + "loss": 0.6906, + "step": 5390 + }, + { + "epoch": 0.84, + "grad_norm": 14.801020061311059, + "learning_rate": 1.6881700434451996e-05, + "loss": 0.6766, + "step": 5391 + }, + { + "epoch": 0.84, + "grad_norm": 30.397282887052942, + "learning_rate": 1.6880476770767256e-05, + "loss": 0.7628, + "step": 5392 + }, + { + "epoch": 0.84, + "grad_norm": 23.59440561482592, + "learning_rate": 1.6879252911405782e-05, + "loss": 0.7597, + "step": 5393 + }, + { + "epoch": 0.84, + "grad_norm": 15.572461786664956, + "learning_rate": 1.6878028856402382e-05, + "loss": 0.5419, + "step": 5394 + }, + { + "epoch": 0.84, + "grad_norm": 17.872457975420073, + "learning_rate": 1.6876804605791864e-05, + "loss": 0.7453, + "step": 5395 + }, + { + "epoch": 0.84, + "grad_norm": 19.52819200151108, + "learning_rate": 1.6875580159609044e-05, + "loss": 0.65, + "step": 5396 + }, + { + "epoch": 0.84, + "grad_norm": 12.497709574665823, + "learning_rate": 1.6874355517888747e-05, + "loss": 0.7285, + "step": 5397 + }, + { + "epoch": 0.84, + "grad_norm": 18.48034715582227, + "learning_rate": 1.6873130680665798e-05, + "loss": 0.7966, + "step": 5398 + }, + { + "epoch": 0.84, + "grad_norm": 16.77543020116055, + "learning_rate": 1.6871905647975038e-05, + "loss": 0.6778, + "step": 5399 + }, + { + "epoch": 0.84, + "grad_norm": 18.77171730148631, + "learning_rate": 1.6870680419851297e-05, + "loss": 0.7279, + "step": 5400 + }, + { + "epoch": 0.84, + "grad_norm": 23.7211224584472, + "learning_rate": 1.686945499632943e-05, + "loss": 0.8393, + "step": 5401 + }, + { + "epoch": 0.84, + "grad_norm": 24.29289405996517, + "learning_rate": 1.6868229377444276e-05, + "loss": 0.6865, + "step": 5402 + }, + { + "epoch": 0.84, + "grad_norm": 15.51783646692002, + "learning_rate": 1.6867003563230697e-05, + "loss": 0.7408, + "step": 5403 + }, + { + "epoch": 0.84, + "grad_norm": 17.786608894086974, + "learning_rate": 1.6865777553723558e-05, + "loss": 0.676, + "step": 5404 + }, + { + "epoch": 0.84, + "grad_norm": 18.13278356432301, + "learning_rate": 1.6864551348957717e-05, + "loss": 0.723, + "step": 5405 + }, + { + "epoch": 0.84, + "grad_norm": 13.032227648738216, + "learning_rate": 1.686332494896805e-05, + "loss": 0.7232, + "step": 5406 + }, + { + "epoch": 0.84, + "grad_norm": 13.030171626454523, + "learning_rate": 1.686209835378944e-05, + "loss": 0.8027, + "step": 5407 + }, + { + "epoch": 0.84, + "grad_norm": 14.975415536603201, + "learning_rate": 1.6860871563456766e-05, + "loss": 0.7442, + "step": 5408 + }, + { + "epoch": 0.84, + "grad_norm": 19.163654531435537, + "learning_rate": 1.685964457800492e-05, + "loss": 0.7449, + "step": 5409 + }, + { + "epoch": 0.85, + "grad_norm": 13.836815275514981, + "learning_rate": 1.6858417397468792e-05, + "loss": 0.6989, + "step": 5410 + }, + { + "epoch": 0.85, + "grad_norm": 16.020592201147846, + "learning_rate": 1.6857190021883287e-05, + "loss": 0.7022, + "step": 5411 + }, + { + "epoch": 0.85, + "grad_norm": 21.64287639089137, + "learning_rate": 1.685596245128331e-05, + "loss": 0.6749, + "step": 5412 + }, + { + "epoch": 0.85, + "grad_norm": 23.36718262545153, + "learning_rate": 1.685473468570377e-05, + "loss": 0.746, + "step": 5413 + }, + { + "epoch": 0.85, + "grad_norm": 147.37386729526654, + "learning_rate": 1.6853506725179584e-05, + "loss": 0.6308, + "step": 5414 + }, + { + "epoch": 0.85, + "grad_norm": 19.43013961996076, + "learning_rate": 1.6852278569745678e-05, + "loss": 0.8726, + "step": 5415 + }, + { + "epoch": 0.85, + "grad_norm": 17.542861201709016, + "learning_rate": 1.685105021943698e-05, + "loss": 0.6851, + "step": 5416 + }, + { + "epoch": 0.85, + "grad_norm": 19.034092360070094, + "learning_rate": 1.6849821674288418e-05, + "loss": 0.6692, + "step": 5417 + }, + { + "epoch": 0.85, + "grad_norm": 21.483809558016567, + "learning_rate": 1.6848592934334934e-05, + "loss": 0.6107, + "step": 5418 + }, + { + "epoch": 0.85, + "grad_norm": 23.327963325773197, + "learning_rate": 1.6847363999611475e-05, + "loss": 0.8643, + "step": 5419 + }, + { + "epoch": 0.85, + "grad_norm": 23.43470713696328, + "learning_rate": 1.6846134870152987e-05, + "loss": 0.6726, + "step": 5420 + }, + { + "epoch": 0.85, + "grad_norm": 25.91558776474682, + "learning_rate": 1.684490554599443e-05, + "loss": 0.7237, + "step": 5421 + }, + { + "epoch": 0.85, + "grad_norm": 20.068149151594326, + "learning_rate": 1.6843676027170764e-05, + "loss": 0.7284, + "step": 5422 + }, + { + "epoch": 0.85, + "grad_norm": 22.815319879861377, + "learning_rate": 1.6842446313716957e-05, + "loss": 0.7744, + "step": 5423 + }, + { + "epoch": 0.85, + "grad_norm": 21.963717502821574, + "learning_rate": 1.6841216405667976e-05, + "loss": 0.7257, + "step": 5424 + }, + { + "epoch": 0.85, + "grad_norm": 24.63696057715217, + "learning_rate": 1.6839986303058803e-05, + "loss": 0.7225, + "step": 5425 + }, + { + "epoch": 0.85, + "grad_norm": 15.247224644113391, + "learning_rate": 1.6838756005924425e-05, + "loss": 0.7258, + "step": 5426 + }, + { + "epoch": 0.85, + "grad_norm": 14.773143098158165, + "learning_rate": 1.6837525514299823e-05, + "loss": 0.7017, + "step": 5427 + }, + { + "epoch": 0.85, + "grad_norm": 14.448104473049431, + "learning_rate": 1.6836294828219997e-05, + "loss": 0.6968, + "step": 5428 + }, + { + "epoch": 0.85, + "grad_norm": 23.374370569026592, + "learning_rate": 1.6835063947719943e-05, + "loss": 0.7632, + "step": 5429 + }, + { + "epoch": 0.85, + "grad_norm": 21.36995378960079, + "learning_rate": 1.683383287283467e-05, + "loss": 0.7148, + "step": 5430 + }, + { + "epoch": 0.85, + "grad_norm": 18.512365207142757, + "learning_rate": 1.683260160359919e-05, + "loss": 0.7483, + "step": 5431 + }, + { + "epoch": 0.85, + "grad_norm": 47.606828254324384, + "learning_rate": 1.6831370140048513e-05, + "loss": 0.7077, + "step": 5432 + }, + { + "epoch": 0.85, + "grad_norm": 14.440612262962913, + "learning_rate": 1.6830138482217667e-05, + "loss": 0.6902, + "step": 5433 + }, + { + "epoch": 0.85, + "grad_norm": 27.612076012484934, + "learning_rate": 1.6828906630141678e-05, + "loss": 0.7381, + "step": 5434 + }, + { + "epoch": 0.85, + "grad_norm": 15.960302204889638, + "learning_rate": 1.682767458385558e-05, + "loss": 0.7493, + "step": 5435 + }, + { + "epoch": 0.85, + "grad_norm": 23.36174247061062, + "learning_rate": 1.682644234339441e-05, + "loss": 0.777, + "step": 5436 + }, + { + "epoch": 0.85, + "grad_norm": 16.07842362981971, + "learning_rate": 1.6825209908793217e-05, + "loss": 0.6903, + "step": 5437 + }, + { + "epoch": 0.85, + "grad_norm": 20.78099443880849, + "learning_rate": 1.682397728008704e-05, + "loss": 0.7493, + "step": 5438 + }, + { + "epoch": 0.85, + "grad_norm": 24.07761205111984, + "learning_rate": 1.6822744457310948e-05, + "loss": 0.8222, + "step": 5439 + }, + { + "epoch": 0.85, + "grad_norm": 16.066881692623568, + "learning_rate": 1.682151144049999e-05, + "loss": 0.7605, + "step": 5440 + }, + { + "epoch": 0.85, + "grad_norm": 20.300542475118444, + "learning_rate": 1.682027822968924e-05, + "loss": 0.7117, + "step": 5441 + }, + { + "epoch": 0.85, + "grad_norm": 32.28297065403295, + "learning_rate": 1.6819044824913762e-05, + "loss": 0.7171, + "step": 5442 + }, + { + "epoch": 0.85, + "grad_norm": 26.16915410203333, + "learning_rate": 1.6817811226208643e-05, + "loss": 0.7454, + "step": 5443 + }, + { + "epoch": 0.85, + "grad_norm": 20.004041628462364, + "learning_rate": 1.681657743360896e-05, + "loss": 0.6326, + "step": 5444 + }, + { + "epoch": 0.85, + "grad_norm": 25.7243775616582, + "learning_rate": 1.6815343447149803e-05, + "loss": 0.6992, + "step": 5445 + }, + { + "epoch": 0.85, + "grad_norm": 19.069653091375866, + "learning_rate": 1.681410926686626e-05, + "loss": 0.7899, + "step": 5446 + }, + { + "epoch": 0.85, + "grad_norm": 16.86751397237882, + "learning_rate": 1.6812874892793443e-05, + "loss": 0.6546, + "step": 5447 + }, + { + "epoch": 0.85, + "grad_norm": 14.301640517827305, + "learning_rate": 1.6811640324966446e-05, + "loss": 0.7471, + "step": 5448 + }, + { + "epoch": 0.85, + "grad_norm": 26.96025782314002, + "learning_rate": 1.681040556342038e-05, + "loss": 0.7041, + "step": 5449 + }, + { + "epoch": 0.85, + "grad_norm": 17.10642878801702, + "learning_rate": 1.680917060819037e-05, + "loss": 0.7166, + "step": 5450 + }, + { + "epoch": 0.85, + "grad_norm": 20.825284947407024, + "learning_rate": 1.6807935459311528e-05, + "loss": 0.7401, + "step": 5451 + }, + { + "epoch": 0.85, + "grad_norm": 24.84335733979094, + "learning_rate": 1.6806700116818982e-05, + "loss": 0.7844, + "step": 5452 + }, + { + "epoch": 0.85, + "grad_norm": 28.750581994592896, + "learning_rate": 1.680546458074787e-05, + "loss": 0.6945, + "step": 5453 + }, + { + "epoch": 0.85, + "grad_norm": 21.368784116409262, + "learning_rate": 1.6804228851133326e-05, + "loss": 0.7021, + "step": 5454 + }, + { + "epoch": 0.85, + "grad_norm": 12.474745071948833, + "learning_rate": 1.6802992928010496e-05, + "loss": 0.7248, + "step": 5455 + }, + { + "epoch": 0.85, + "grad_norm": 21.29534970388788, + "learning_rate": 1.680175681141452e-05, + "loss": 0.7332, + "step": 5456 + }, + { + "epoch": 0.85, + "grad_norm": 20.158171505233792, + "learning_rate": 1.6800520501380564e-05, + "loss": 0.7238, + "step": 5457 + }, + { + "epoch": 0.85, + "grad_norm": 18.72776420089135, + "learning_rate": 1.679928399794378e-05, + "loss": 0.7926, + "step": 5458 + }, + { + "epoch": 0.85, + "grad_norm": 18.651488879121537, + "learning_rate": 1.6798047301139338e-05, + "loss": 0.7782, + "step": 5459 + }, + { + "epoch": 0.85, + "grad_norm": 21.378991828550618, + "learning_rate": 1.679681041100241e-05, + "loss": 0.692, + "step": 5460 + }, + { + "epoch": 0.85, + "grad_norm": 14.95584336348539, + "learning_rate": 1.6795573327568168e-05, + "loss": 0.7196, + "step": 5461 + }, + { + "epoch": 0.85, + "grad_norm": 23.752709687151274, + "learning_rate": 1.6794336050871797e-05, + "loss": 0.6768, + "step": 5462 + }, + { + "epoch": 0.85, + "grad_norm": 17.039901966332902, + "learning_rate": 1.6793098580948482e-05, + "loss": 0.7626, + "step": 5463 + }, + { + "epoch": 0.85, + "grad_norm": 20.38153448188531, + "learning_rate": 1.6791860917833417e-05, + "loss": 0.7007, + "step": 5464 + }, + { + "epoch": 0.85, + "grad_norm": 22.905846184974084, + "learning_rate": 1.67906230615618e-05, + "loss": 0.7494, + "step": 5465 + }, + { + "epoch": 0.85, + "grad_norm": 14.117005847887409, + "learning_rate": 1.6789385012168836e-05, + "loss": 0.7091, + "step": 5466 + }, + { + "epoch": 0.85, + "grad_norm": 16.86673968499415, + "learning_rate": 1.6788146769689734e-05, + "loss": 0.7533, + "step": 5467 + }, + { + "epoch": 0.85, + "grad_norm": 32.723153092673385, + "learning_rate": 1.678690833415971e-05, + "loss": 0.7846, + "step": 5468 + }, + { + "epoch": 0.85, + "grad_norm": 31.071843660843896, + "learning_rate": 1.678566970561398e-05, + "loss": 0.8239, + "step": 5469 + }, + { + "epoch": 0.85, + "grad_norm": 18.769982936477426, + "learning_rate": 1.678443088408778e-05, + "loss": 0.7651, + "step": 5470 + }, + { + "epoch": 0.85, + "grad_norm": 15.14182436378412, + "learning_rate": 1.6783191869616327e-05, + "loss": 0.678, + "step": 5471 + }, + { + "epoch": 0.85, + "grad_norm": 17.497020667499044, + "learning_rate": 1.678195266223487e-05, + "loss": 0.6751, + "step": 5472 + }, + { + "epoch": 0.85, + "grad_norm": 28.61302556831773, + "learning_rate": 1.6780713261978646e-05, + "loss": 0.742, + "step": 5473 + }, + { + "epoch": 0.86, + "grad_norm": 18.251215526847925, + "learning_rate": 1.67794736688829e-05, + "loss": 0.7207, + "step": 5474 + }, + { + "epoch": 0.86, + "grad_norm": 20.657820768118793, + "learning_rate": 1.6778233882982894e-05, + "loss": 0.7102, + "step": 5475 + }, + { + "epoch": 0.86, + "grad_norm": 28.817946007768445, + "learning_rate": 1.6776993904313875e-05, + "loss": 0.6995, + "step": 5476 + }, + { + "epoch": 0.86, + "grad_norm": 17.36170447777714, + "learning_rate": 1.677575373291112e-05, + "loss": 0.8064, + "step": 5477 + }, + { + "epoch": 0.86, + "grad_norm": 16.105219809241028, + "learning_rate": 1.6774513368809887e-05, + "loss": 0.8047, + "step": 5478 + }, + { + "epoch": 0.86, + "grad_norm": 17.222777176517035, + "learning_rate": 1.677327281204546e-05, + "loss": 0.6031, + "step": 5479 + }, + { + "epoch": 0.86, + "grad_norm": 17.26237220675424, + "learning_rate": 1.6772032062653115e-05, + "loss": 0.723, + "step": 5480 + }, + { + "epoch": 0.86, + "grad_norm": 26.956443900727262, + "learning_rate": 1.677079112066814e-05, + "loss": 0.6891, + "step": 5481 + }, + { + "epoch": 0.86, + "grad_norm": 27.18991411715872, + "learning_rate": 1.6769549986125827e-05, + "loss": 0.7149, + "step": 5482 + }, + { + "epoch": 0.86, + "grad_norm": 15.601994523287907, + "learning_rate": 1.6768308659061474e-05, + "loss": 0.7284, + "step": 5483 + }, + { + "epoch": 0.86, + "grad_norm": 23.101024903307696, + "learning_rate": 1.6767067139510383e-05, + "loss": 0.8209, + "step": 5484 + }, + { + "epoch": 0.86, + "grad_norm": 14.222146911217841, + "learning_rate": 1.6765825427507855e-05, + "loss": 0.7532, + "step": 5485 + }, + { + "epoch": 0.86, + "grad_norm": 23.919897616914454, + "learning_rate": 1.6764583523089214e-05, + "loss": 0.7917, + "step": 5486 + }, + { + "epoch": 0.86, + "grad_norm": 24.025062797275268, + "learning_rate": 1.6763341426289773e-05, + "loss": 0.67, + "step": 5487 + }, + { + "epoch": 0.86, + "grad_norm": 11.989780899025297, + "learning_rate": 1.676209913714486e-05, + "loss": 0.7046, + "step": 5488 + }, + { + "epoch": 0.86, + "grad_norm": 18.214736099628603, + "learning_rate": 1.6760856655689804e-05, + "loss": 0.6675, + "step": 5489 + }, + { + "epoch": 0.86, + "grad_norm": 22.572673796557577, + "learning_rate": 1.675961398195994e-05, + "loss": 0.7588, + "step": 5490 + }, + { + "epoch": 0.86, + "grad_norm": 18.24116876277585, + "learning_rate": 1.6758371115990607e-05, + "loss": 0.7645, + "step": 5491 + }, + { + "epoch": 0.86, + "grad_norm": 13.527954935404674, + "learning_rate": 1.6757128057817154e-05, + "loss": 0.6699, + "step": 5492 + }, + { + "epoch": 0.86, + "grad_norm": 18.71155999089906, + "learning_rate": 1.675588480747493e-05, + "loss": 0.6759, + "step": 5493 + }, + { + "epoch": 0.86, + "grad_norm": 18.509173275191113, + "learning_rate": 1.6754641364999297e-05, + "loss": 0.7605, + "step": 5494 + }, + { + "epoch": 0.86, + "grad_norm": 22.03881733773878, + "learning_rate": 1.675339773042561e-05, + "loss": 0.755, + "step": 5495 + }, + { + "epoch": 0.86, + "grad_norm": 20.29853177962315, + "learning_rate": 1.6752153903789247e-05, + "loss": 0.7541, + "step": 5496 + }, + { + "epoch": 0.86, + "grad_norm": 18.967974491386407, + "learning_rate": 1.6750909885125575e-05, + "loss": 0.7504, + "step": 5497 + }, + { + "epoch": 0.86, + "grad_norm": 25.734535862408897, + "learning_rate": 1.6749665674469975e-05, + "loss": 0.7387, + "step": 5498 + }, + { + "epoch": 0.86, + "grad_norm": 12.494666704626571, + "learning_rate": 1.6748421271857835e-05, + "loss": 0.7133, + "step": 5499 + }, + { + "epoch": 0.86, + "grad_norm": 13.040793883338267, + "learning_rate": 1.6747176677324535e-05, + "loss": 0.6771, + "step": 5500 + }, + { + "epoch": 0.86, + "grad_norm": 18.44159683520143, + "learning_rate": 1.674593189090548e-05, + "loss": 0.6774, + "step": 5501 + }, + { + "epoch": 0.86, + "grad_norm": 28.33537642841377, + "learning_rate": 1.674468691263607e-05, + "loss": 0.737, + "step": 5502 + }, + { + "epoch": 0.86, + "grad_norm": 22.05528572404925, + "learning_rate": 1.674344174255171e-05, + "loss": 0.7308, + "step": 5503 + }, + { + "epoch": 0.86, + "grad_norm": 14.338982081574864, + "learning_rate": 1.674219638068781e-05, + "loss": 0.6879, + "step": 5504 + }, + { + "epoch": 0.86, + "grad_norm": 20.994028790795618, + "learning_rate": 1.6740950827079786e-05, + "loss": 0.7206, + "step": 5505 + }, + { + "epoch": 0.86, + "grad_norm": 28.307530655447184, + "learning_rate": 1.673970508176307e-05, + "loss": 0.6481, + "step": 5506 + }, + { + "epoch": 0.86, + "grad_norm": 22.46851334128871, + "learning_rate": 1.673845914477308e-05, + "loss": 0.7108, + "step": 5507 + }, + { + "epoch": 0.86, + "grad_norm": 18.1834380711694, + "learning_rate": 1.6737213016145256e-05, + "loss": 0.6724, + "step": 5508 + }, + { + "epoch": 0.86, + "grad_norm": 20.63379417909364, + "learning_rate": 1.6735966695915036e-05, + "loss": 0.6387, + "step": 5509 + }, + { + "epoch": 0.86, + "grad_norm": 16.719482008337145, + "learning_rate": 1.673472018411786e-05, + "loss": 0.7228, + "step": 5510 + }, + { + "epoch": 0.86, + "grad_norm": 16.721704262125755, + "learning_rate": 1.6733473480789183e-05, + "loss": 0.7622, + "step": 5511 + }, + { + "epoch": 0.86, + "grad_norm": 18.955363955890217, + "learning_rate": 1.673222658596446e-05, + "loss": 0.7596, + "step": 5512 + }, + { + "epoch": 0.86, + "grad_norm": 33.99476577021787, + "learning_rate": 1.673097949967915e-05, + "loss": 0.8085, + "step": 5513 + }, + { + "epoch": 0.86, + "grad_norm": 18.60209281426596, + "learning_rate": 1.6729732221968722e-05, + "loss": 0.7025, + "step": 5514 + }, + { + "epoch": 0.86, + "grad_norm": 28.20081843509955, + "learning_rate": 1.6728484752868644e-05, + "loss": 0.6539, + "step": 5515 + }, + { + "epoch": 0.86, + "grad_norm": 16.372695521366598, + "learning_rate": 1.6727237092414397e-05, + "loss": 0.7045, + "step": 5516 + }, + { + "epoch": 0.86, + "grad_norm": 16.56112279789204, + "learning_rate": 1.6725989240641466e-05, + "loss": 0.7136, + "step": 5517 + }, + { + "epoch": 0.86, + "grad_norm": 20.575310276352525, + "learning_rate": 1.672474119758533e-05, + "loss": 0.7474, + "step": 5518 + }, + { + "epoch": 0.86, + "grad_norm": 17.956381313236854, + "learning_rate": 1.6723492963281492e-05, + "loss": 0.6584, + "step": 5519 + }, + { + "epoch": 0.86, + "grad_norm": 20.492598974159456, + "learning_rate": 1.6722244537765444e-05, + "loss": 0.7113, + "step": 5520 + }, + { + "epoch": 0.86, + "grad_norm": 24.99055338242651, + "learning_rate": 1.6720995921072698e-05, + "loss": 0.6908, + "step": 5521 + }, + { + "epoch": 0.86, + "grad_norm": 16.617679999352113, + "learning_rate": 1.6719747113238754e-05, + "loss": 0.6961, + "step": 5522 + }, + { + "epoch": 0.86, + "grad_norm": 21.938635807575874, + "learning_rate": 1.6718498114299138e-05, + "loss": 0.7129, + "step": 5523 + }, + { + "epoch": 0.86, + "grad_norm": 16.346676757710142, + "learning_rate": 1.6717248924289363e-05, + "loss": 0.7396, + "step": 5524 + }, + { + "epoch": 0.86, + "grad_norm": 16.889399115692704, + "learning_rate": 1.671599954324496e-05, + "loss": 0.8214, + "step": 5525 + }, + { + "epoch": 0.86, + "grad_norm": 15.567382454027097, + "learning_rate": 1.6714749971201457e-05, + "loss": 0.7353, + "step": 5526 + }, + { + "epoch": 0.86, + "grad_norm": 32.615304135524894, + "learning_rate": 1.6713500208194395e-05, + "loss": 0.7089, + "step": 5527 + }, + { + "epoch": 0.86, + "grad_norm": 14.31624027848153, + "learning_rate": 1.6712250254259313e-05, + "loss": 0.6713, + "step": 5528 + }, + { + "epoch": 0.86, + "grad_norm": 22.378537556372276, + "learning_rate": 1.6711000109431757e-05, + "loss": 0.7627, + "step": 5529 + }, + { + "epoch": 0.86, + "grad_norm": 13.144232575649506, + "learning_rate": 1.6709749773747292e-05, + "loss": 0.6812, + "step": 5530 + }, + { + "epoch": 0.86, + "grad_norm": 38.75782443736315, + "learning_rate": 1.670849924724146e-05, + "loss": 0.7546, + "step": 5531 + }, + { + "epoch": 0.86, + "grad_norm": 20.827908124889206, + "learning_rate": 1.670724852994984e-05, + "loss": 0.7323, + "step": 5532 + }, + { + "epoch": 0.86, + "grad_norm": 19.193446520468456, + "learning_rate": 1.6705997621907993e-05, + "loss": 0.7449, + "step": 5533 + }, + { + "epoch": 0.86, + "grad_norm": 21.531535550896763, + "learning_rate": 1.6704746523151498e-05, + "loss": 0.7342, + "step": 5534 + }, + { + "epoch": 0.86, + "grad_norm": 19.871039653514718, + "learning_rate": 1.670349523371593e-05, + "loss": 0.8125, + "step": 5535 + }, + { + "epoch": 0.86, + "grad_norm": 18.494552064670486, + "learning_rate": 1.670224375363688e-05, + "loss": 0.7303, + "step": 5536 + }, + { + "epoch": 0.86, + "grad_norm": 16.702676063156925, + "learning_rate": 1.670099208294994e-05, + "loss": 0.7046, + "step": 5537 + }, + { + "epoch": 0.87, + "grad_norm": 19.632325735255055, + "learning_rate": 1.6699740221690706e-05, + "loss": 0.8022, + "step": 5538 + }, + { + "epoch": 0.87, + "grad_norm": 14.587098073427606, + "learning_rate": 1.669848816989478e-05, + "loss": 0.7318, + "step": 5539 + }, + { + "epoch": 0.87, + "grad_norm": 14.253821634348277, + "learning_rate": 1.6697235927597763e-05, + "loss": 0.7121, + "step": 5540 + }, + { + "epoch": 0.87, + "grad_norm": 23.327291501224117, + "learning_rate": 1.6695983494835283e-05, + "loss": 0.7967, + "step": 5541 + }, + { + "epoch": 0.87, + "grad_norm": 21.954009283318214, + "learning_rate": 1.669473087164294e-05, + "loss": 0.7708, + "step": 5542 + }, + { + "epoch": 0.87, + "grad_norm": 15.513382041747036, + "learning_rate": 1.6693478058056375e-05, + "loss": 0.7594, + "step": 5543 + }, + { + "epoch": 0.87, + "grad_norm": 13.237630234348744, + "learning_rate": 1.6692225054111207e-05, + "loss": 0.6948, + "step": 5544 + }, + { + "epoch": 0.87, + "grad_norm": 15.505037142989893, + "learning_rate": 1.669097185984307e-05, + "loss": 0.696, + "step": 5545 + }, + { + "epoch": 0.87, + "grad_norm": 19.21335438607501, + "learning_rate": 1.668971847528761e-05, + "loss": 0.6997, + "step": 5546 + }, + { + "epoch": 0.87, + "grad_norm": 14.882450275568763, + "learning_rate": 1.668846490048047e-05, + "loss": 0.677, + "step": 5547 + }, + { + "epoch": 0.87, + "grad_norm": 20.483848613025124, + "learning_rate": 1.6687211135457304e-05, + "loss": 0.8614, + "step": 5548 + }, + { + "epoch": 0.87, + "grad_norm": 18.915116410356028, + "learning_rate": 1.668595718025376e-05, + "loss": 0.7381, + "step": 5549 + }, + { + "epoch": 0.87, + "grad_norm": 20.84288955139238, + "learning_rate": 1.6684703034905507e-05, + "loss": 0.7433, + "step": 5550 + }, + { + "epoch": 0.87, + "grad_norm": 18.270744241049968, + "learning_rate": 1.668344869944821e-05, + "loss": 0.6813, + "step": 5551 + }, + { + "epoch": 0.87, + "grad_norm": 18.405522495247816, + "learning_rate": 1.6682194173917543e-05, + "loss": 0.7991, + "step": 5552 + }, + { + "epoch": 0.87, + "grad_norm": 21.065625722874085, + "learning_rate": 1.6680939458349184e-05, + "loss": 0.6658, + "step": 5553 + }, + { + "epoch": 0.87, + "grad_norm": 16.149458870886434, + "learning_rate": 1.667968455277881e-05, + "loss": 0.8056, + "step": 5554 + }, + { + "epoch": 0.87, + "grad_norm": 20.546887985921607, + "learning_rate": 1.667842945724212e-05, + "loss": 0.675, + "step": 5555 + }, + { + "epoch": 0.87, + "grad_norm": 13.620954666543469, + "learning_rate": 1.6677174171774798e-05, + "loss": 0.6429, + "step": 5556 + }, + { + "epoch": 0.87, + "grad_norm": 18.3793106010298, + "learning_rate": 1.6675918696412552e-05, + "loss": 0.6852, + "step": 5557 + }, + { + "epoch": 0.87, + "grad_norm": 14.533755819574274, + "learning_rate": 1.6674663031191084e-05, + "loss": 0.7247, + "step": 5558 + }, + { + "epoch": 0.87, + "grad_norm": 19.292760146952514, + "learning_rate": 1.66734071761461e-05, + "loss": 0.7152, + "step": 5559 + }, + { + "epoch": 0.87, + "grad_norm": 23.396429482018345, + "learning_rate": 1.6672151131313324e-05, + "loss": 0.7848, + "step": 5560 + }, + { + "epoch": 0.87, + "grad_norm": 19.14182856454198, + "learning_rate": 1.6670894896728472e-05, + "loss": 0.7138, + "step": 5561 + }, + { + "epoch": 0.87, + "grad_norm": 22.90061012471951, + "learning_rate": 1.666963847242727e-05, + "loss": 0.7846, + "step": 5562 + }, + { + "epoch": 0.87, + "grad_norm": 18.584828852010293, + "learning_rate": 1.666838185844545e-05, + "loss": 0.7686, + "step": 5563 + }, + { + "epoch": 0.87, + "grad_norm": 19.569462244899825, + "learning_rate": 1.6667125054818756e-05, + "loss": 0.6719, + "step": 5564 + }, + { + "epoch": 0.87, + "grad_norm": 26.307846550925944, + "learning_rate": 1.6665868061582922e-05, + "loss": 0.7473, + "step": 5565 + }, + { + "epoch": 0.87, + "grad_norm": 31.89063735243966, + "learning_rate": 1.66646108787737e-05, + "loss": 0.6967, + "step": 5566 + }, + { + "epoch": 0.87, + "grad_norm": 19.311779746417574, + "learning_rate": 1.6663353506426845e-05, + "loss": 0.7495, + "step": 5567 + }, + { + "epoch": 0.87, + "grad_norm": 23.69390050301414, + "learning_rate": 1.6662095944578112e-05, + "loss": 0.6886, + "step": 5568 + }, + { + "epoch": 0.87, + "grad_norm": 19.175396334712502, + "learning_rate": 1.6660838193263268e-05, + "loss": 0.744, + "step": 5569 + }, + { + "epoch": 0.87, + "grad_norm": 13.753154560751634, + "learning_rate": 1.6659580252518083e-05, + "loss": 0.7076, + "step": 5570 + }, + { + "epoch": 0.87, + "grad_norm": 16.184822257793588, + "learning_rate": 1.665832212237833e-05, + "loss": 0.7598, + "step": 5571 + }, + { + "epoch": 0.87, + "grad_norm": 14.138235499359693, + "learning_rate": 1.6657063802879795e-05, + "loss": 0.6107, + "step": 5572 + }, + { + "epoch": 0.87, + "grad_norm": 22.665448903465165, + "learning_rate": 1.6655805294058256e-05, + "loss": 0.8026, + "step": 5573 + }, + { + "epoch": 0.87, + "grad_norm": 26.5927127239132, + "learning_rate": 1.665454659594951e-05, + "loss": 0.7968, + "step": 5574 + }, + { + "epoch": 0.87, + "grad_norm": 19.468444241270436, + "learning_rate": 1.665328770858935e-05, + "loss": 0.7041, + "step": 5575 + }, + { + "epoch": 0.87, + "grad_norm": 25.703204099191392, + "learning_rate": 1.665202863201358e-05, + "loss": 0.7069, + "step": 5576 + }, + { + "epoch": 0.87, + "grad_norm": 18.347136781098744, + "learning_rate": 1.6650769366258007e-05, + "loss": 0.6781, + "step": 5577 + }, + { + "epoch": 0.87, + "grad_norm": 19.703343744901467, + "learning_rate": 1.6649509911358444e-05, + "loss": 0.6943, + "step": 5578 + }, + { + "epoch": 0.87, + "grad_norm": 18.322211212314883, + "learning_rate": 1.6648250267350708e-05, + "loss": 0.8158, + "step": 5579 + }, + { + "epoch": 0.87, + "grad_norm": 16.781358037455213, + "learning_rate": 1.6646990434270625e-05, + "loss": 0.686, + "step": 5580 + }, + { + "epoch": 0.87, + "grad_norm": 25.481525669403204, + "learning_rate": 1.664573041215402e-05, + "loss": 0.7547, + "step": 5581 + }, + { + "epoch": 0.87, + "grad_norm": 18.18280275060542, + "learning_rate": 1.6644470201036732e-05, + "loss": 0.7663, + "step": 5582 + }, + { + "epoch": 0.87, + "grad_norm": 19.40391433849428, + "learning_rate": 1.6643209800954597e-05, + "loss": 0.7589, + "step": 5583 + }, + { + "epoch": 0.87, + "grad_norm": 22.155087411586884, + "learning_rate": 1.664194921194346e-05, + "loss": 0.6845, + "step": 5584 + }, + { + "epoch": 0.87, + "grad_norm": 19.26437062954492, + "learning_rate": 1.664068843403918e-05, + "loss": 0.8006, + "step": 5585 + }, + { + "epoch": 0.87, + "grad_norm": 16.616893719083563, + "learning_rate": 1.6639427467277597e-05, + "loss": 0.7166, + "step": 5586 + }, + { + "epoch": 0.87, + "grad_norm": 28.234724825838313, + "learning_rate": 1.663816631169459e-05, + "loss": 0.7036, + "step": 5587 + }, + { + "epoch": 0.87, + "grad_norm": 20.511589595465793, + "learning_rate": 1.663690496732601e-05, + "loss": 0.7027, + "step": 5588 + }, + { + "epoch": 0.87, + "grad_norm": 22.159613335204206, + "learning_rate": 1.663564343420773e-05, + "loss": 0.7976, + "step": 5589 + }, + { + "epoch": 0.87, + "grad_norm": 16.108872972837347, + "learning_rate": 1.6634381712375643e-05, + "loss": 0.7092, + "step": 5590 + }, + { + "epoch": 0.87, + "grad_norm": 20.131143095134703, + "learning_rate": 1.6633119801865617e-05, + "loss": 0.7527, + "step": 5591 + }, + { + "epoch": 0.87, + "grad_norm": 20.755097063373377, + "learning_rate": 1.6631857702713544e-05, + "loss": 0.7212, + "step": 5592 + }, + { + "epoch": 0.87, + "grad_norm": 21.457837596779466, + "learning_rate": 1.6630595414955314e-05, + "loss": 0.6246, + "step": 5593 + }, + { + "epoch": 0.87, + "grad_norm": 13.411398728124018, + "learning_rate": 1.6629332938626836e-05, + "loss": 0.6982, + "step": 5594 + }, + { + "epoch": 0.87, + "grad_norm": 15.84759840415231, + "learning_rate": 1.6628070273764002e-05, + "loss": 0.7234, + "step": 5595 + }, + { + "epoch": 0.87, + "grad_norm": 25.660024459391256, + "learning_rate": 1.6626807420402727e-05, + "loss": 0.7209, + "step": 5596 + }, + { + "epoch": 0.87, + "grad_norm": 18.33829364136699, + "learning_rate": 1.662554437857893e-05, + "loss": 0.8041, + "step": 5597 + }, + { + "epoch": 0.87, + "grad_norm": 17.02438790774433, + "learning_rate": 1.662428114832852e-05, + "loss": 0.7279, + "step": 5598 + }, + { + "epoch": 0.87, + "grad_norm": 22.76716925463418, + "learning_rate": 1.6623017729687432e-05, + "loss": 0.7577, + "step": 5599 + }, + { + "epoch": 0.87, + "grad_norm": 14.27211329754222, + "learning_rate": 1.6621754122691596e-05, + "loss": 0.6983, + "step": 5600 + }, + { + "epoch": 0.87, + "grad_norm": 17.032776306599608, + "learning_rate": 1.6620490327376947e-05, + "loss": 0.721, + "step": 5601 + }, + { + "epoch": 0.88, + "grad_norm": 21.787681220196095, + "learning_rate": 1.6619226343779424e-05, + "loss": 0.6814, + "step": 5602 + }, + { + "epoch": 0.88, + "grad_norm": 23.348860665079567, + "learning_rate": 1.6617962171934975e-05, + "loss": 0.7318, + "step": 5603 + }, + { + "epoch": 0.88, + "grad_norm": 21.521396659286662, + "learning_rate": 1.6616697811879553e-05, + "loss": 0.7363, + "step": 5604 + }, + { + "epoch": 0.88, + "grad_norm": 20.860291335163293, + "learning_rate": 1.661543326364912e-05, + "loss": 0.7472, + "step": 5605 + }, + { + "epoch": 0.88, + "grad_norm": 15.75152451747405, + "learning_rate": 1.661416852727963e-05, + "loss": 0.7367, + "step": 5606 + }, + { + "epoch": 0.88, + "grad_norm": 17.039202729775827, + "learning_rate": 1.661290360280706e-05, + "loss": 0.674, + "step": 5607 + }, + { + "epoch": 0.88, + "grad_norm": 16.370202808262032, + "learning_rate": 1.6611638490267375e-05, + "loss": 0.6773, + "step": 5608 + }, + { + "epoch": 0.88, + "grad_norm": 20.7575965664809, + "learning_rate": 1.6610373189696565e-05, + "loss": 0.7473, + "step": 5609 + }, + { + "epoch": 0.88, + "grad_norm": 19.7891086912162, + "learning_rate": 1.6609107701130603e-05, + "loss": 0.6954, + "step": 5610 + }, + { + "epoch": 0.88, + "grad_norm": 22.139049392867005, + "learning_rate": 1.660784202460549e-05, + "loss": 0.7718, + "step": 5611 + }, + { + "epoch": 0.88, + "grad_norm": 23.288890370907275, + "learning_rate": 1.6606576160157212e-05, + "loss": 0.7744, + "step": 5612 + }, + { + "epoch": 0.88, + "grad_norm": 24.609885958256594, + "learning_rate": 1.6605310107821774e-05, + "loss": 0.7104, + "step": 5613 + }, + { + "epoch": 0.88, + "grad_norm": 16.668146188800378, + "learning_rate": 1.660404386763518e-05, + "loss": 0.7293, + "step": 5614 + }, + { + "epoch": 0.88, + "grad_norm": 17.494055250018512, + "learning_rate": 1.660277743963344e-05, + "loss": 0.6627, + "step": 5615 + }, + { + "epoch": 0.88, + "grad_norm": 15.059095645956646, + "learning_rate": 1.6601510823852574e-05, + "loss": 0.7107, + "step": 5616 + }, + { + "epoch": 0.88, + "grad_norm": 15.99842022057446, + "learning_rate": 1.66002440203286e-05, + "loss": 0.7479, + "step": 5617 + }, + { + "epoch": 0.88, + "grad_norm": 15.643079812755305, + "learning_rate": 1.659897702909755e-05, + "loss": 0.8548, + "step": 5618 + }, + { + "epoch": 0.88, + "grad_norm": 25.324072378295437, + "learning_rate": 1.659770985019545e-05, + "loss": 0.7261, + "step": 5619 + }, + { + "epoch": 0.88, + "grad_norm": 19.60092128892326, + "learning_rate": 1.6596442483658345e-05, + "loss": 0.792, + "step": 5620 + }, + { + "epoch": 0.88, + "grad_norm": 21.09456805373649, + "learning_rate": 1.6595174929522273e-05, + "loss": 0.7742, + "step": 5621 + }, + { + "epoch": 0.88, + "grad_norm": 19.991880673151805, + "learning_rate": 1.6593907187823284e-05, + "loss": 0.7054, + "step": 5622 + }, + { + "epoch": 0.88, + "grad_norm": 22.437698529741215, + "learning_rate": 1.6592639258597432e-05, + "loss": 0.7062, + "step": 5623 + }, + { + "epoch": 0.88, + "grad_norm": 15.745114927499325, + "learning_rate": 1.6591371141880776e-05, + "loss": 0.7459, + "step": 5624 + }, + { + "epoch": 0.88, + "grad_norm": 13.622390470927279, + "learning_rate": 1.6590102837709382e-05, + "loss": 0.7225, + "step": 5625 + }, + { + "epoch": 0.88, + "grad_norm": 21.870437478685915, + "learning_rate": 1.6588834346119315e-05, + "loss": 0.7657, + "step": 5626 + }, + { + "epoch": 0.88, + "grad_norm": 21.268626183483757, + "learning_rate": 1.6587565667146657e-05, + "loss": 0.887, + "step": 5627 + }, + { + "epoch": 0.88, + "grad_norm": 19.831310196369493, + "learning_rate": 1.6586296800827483e-05, + "loss": 0.7503, + "step": 5628 + }, + { + "epoch": 0.88, + "grad_norm": 13.596742024491475, + "learning_rate": 1.6585027747197882e-05, + "loss": 0.76, + "step": 5629 + }, + { + "epoch": 0.88, + "grad_norm": 14.465565701814533, + "learning_rate": 1.6583758506293945e-05, + "loss": 0.6691, + "step": 5630 + }, + { + "epoch": 0.88, + "grad_norm": 18.34487366382551, + "learning_rate": 1.6582489078151765e-05, + "loss": 0.7564, + "step": 5631 + }, + { + "epoch": 0.88, + "grad_norm": 13.745352710718397, + "learning_rate": 1.6581219462807444e-05, + "loss": 0.6095, + "step": 5632 + }, + { + "epoch": 0.88, + "grad_norm": 20.98266594176781, + "learning_rate": 1.6579949660297098e-05, + "loss": 0.8123, + "step": 5633 + }, + { + "epoch": 0.88, + "grad_norm": 16.608125391575303, + "learning_rate": 1.657867967065683e-05, + "loss": 0.6883, + "step": 5634 + }, + { + "epoch": 0.88, + "grad_norm": 18.35142454758878, + "learning_rate": 1.657740949392276e-05, + "loss": 0.7156, + "step": 5635 + }, + { + "epoch": 0.88, + "grad_norm": 16.73731976555211, + "learning_rate": 1.657613913013101e-05, + "loss": 0.7523, + "step": 5636 + }, + { + "epoch": 0.88, + "grad_norm": 16.179669520627378, + "learning_rate": 1.657486857931771e-05, + "loss": 0.72, + "step": 5637 + }, + { + "epoch": 0.88, + "grad_norm": 18.431933500835687, + "learning_rate": 1.6573597841518995e-05, + "loss": 0.6616, + "step": 5638 + }, + { + "epoch": 0.88, + "grad_norm": 16.440175977244216, + "learning_rate": 1.6572326916771008e-05, + "loss": 0.7945, + "step": 5639 + }, + { + "epoch": 0.88, + "grad_norm": 11.693828836488256, + "learning_rate": 1.657105580510988e-05, + "loss": 0.7288, + "step": 5640 + }, + { + "epoch": 0.88, + "grad_norm": 14.244733091183958, + "learning_rate": 1.6569784506571772e-05, + "loss": 0.7198, + "step": 5641 + }, + { + "epoch": 0.88, + "grad_norm": 16.140556573491953, + "learning_rate": 1.6568513021192837e-05, + "loss": 0.7317, + "step": 5642 + }, + { + "epoch": 0.88, + "grad_norm": 17.960201168688616, + "learning_rate": 1.6567241349009235e-05, + "loss": 0.6911, + "step": 5643 + }, + { + "epoch": 0.88, + "grad_norm": 17.151401328821926, + "learning_rate": 1.6565969490057128e-05, + "loss": 0.6568, + "step": 5644 + }, + { + "epoch": 0.88, + "grad_norm": 27.846305203315232, + "learning_rate": 1.656469744437269e-05, + "loss": 0.7271, + "step": 5645 + }, + { + "epoch": 0.88, + "grad_norm": 18.376494627860403, + "learning_rate": 1.65634252119921e-05, + "loss": 0.6899, + "step": 5646 + }, + { + "epoch": 0.88, + "grad_norm": 13.58101857633776, + "learning_rate": 1.6562152792951534e-05, + "loss": 0.8136, + "step": 5647 + }, + { + "epoch": 0.88, + "grad_norm": 11.968154964645576, + "learning_rate": 1.656088018728718e-05, + "loss": 0.6698, + "step": 5648 + }, + { + "epoch": 0.88, + "grad_norm": 13.635239206441062, + "learning_rate": 1.6559607395035234e-05, + "loss": 0.6785, + "step": 5649 + }, + { + "epoch": 0.88, + "grad_norm": 17.85055862458894, + "learning_rate": 1.655833441623189e-05, + "loss": 0.7595, + "step": 5650 + }, + { + "epoch": 0.88, + "grad_norm": 11.452070321654034, + "learning_rate": 1.6557061250913352e-05, + "loss": 0.7528, + "step": 5651 + }, + { + "epoch": 0.88, + "grad_norm": 12.448101144352076, + "learning_rate": 1.6555787899115827e-05, + "loss": 0.7606, + "step": 5652 + }, + { + "epoch": 0.88, + "grad_norm": 13.298406981692136, + "learning_rate": 1.6554514360875528e-05, + "loss": 0.7146, + "step": 5653 + }, + { + "epoch": 0.88, + "grad_norm": 22.357977843134766, + "learning_rate": 1.655324063622868e-05, + "loss": 0.6706, + "step": 5654 + }, + { + "epoch": 0.88, + "grad_norm": 20.054341309148466, + "learning_rate": 1.6551966725211497e-05, + "loss": 0.7434, + "step": 5655 + }, + { + "epoch": 0.88, + "grad_norm": 21.89844506270747, + "learning_rate": 1.6550692627860213e-05, + "loss": 0.6801, + "step": 5656 + }, + { + "epoch": 0.88, + "grad_norm": 12.865825831171021, + "learning_rate": 1.6549418344211066e-05, + "loss": 0.679, + "step": 5657 + }, + { + "epoch": 0.88, + "grad_norm": 12.409735980795922, + "learning_rate": 1.6548143874300292e-05, + "loss": 0.6125, + "step": 5658 + }, + { + "epoch": 0.88, + "grad_norm": 24.94414499133065, + "learning_rate": 1.654686921816413e-05, + "loss": 0.6911, + "step": 5659 + }, + { + "epoch": 0.88, + "grad_norm": 18.445974171891383, + "learning_rate": 1.6545594375838846e-05, + "loss": 0.6618, + "step": 5660 + }, + { + "epoch": 0.88, + "grad_norm": 23.57349780480745, + "learning_rate": 1.6544319347360685e-05, + "loss": 0.8199, + "step": 5661 + }, + { + "epoch": 0.88, + "grad_norm": 27.230779203626952, + "learning_rate": 1.6543044132765907e-05, + "loss": 0.6828, + "step": 5662 + }, + { + "epoch": 0.88, + "grad_norm": 18.855952965347512, + "learning_rate": 1.6541768732090784e-05, + "loss": 0.7227, + "step": 5663 + }, + { + "epoch": 0.88, + "grad_norm": 23.783346947768226, + "learning_rate": 1.6540493145371582e-05, + "loss": 0.6645, + "step": 5664 + }, + { + "epoch": 0.88, + "grad_norm": 17.76466457619321, + "learning_rate": 1.6539217372644585e-05, + "loss": 0.6301, + "step": 5665 + }, + { + "epoch": 0.89, + "grad_norm": 16.164926600786643, + "learning_rate": 1.653794141394607e-05, + "loss": 0.7843, + "step": 5666 + }, + { + "epoch": 0.89, + "grad_norm": 17.844293454140125, + "learning_rate": 1.6536665269312324e-05, + "loss": 0.6663, + "step": 5667 + }, + { + "epoch": 0.89, + "grad_norm": 19.45526192123226, + "learning_rate": 1.6535388938779644e-05, + "loss": 0.7432, + "step": 5668 + }, + { + "epoch": 0.89, + "grad_norm": 18.74445819496282, + "learning_rate": 1.6534112422384324e-05, + "loss": 0.7317, + "step": 5669 + }, + { + "epoch": 0.89, + "grad_norm": 14.787744545486513, + "learning_rate": 1.653283572016267e-05, + "loss": 0.6547, + "step": 5670 + }, + { + "epoch": 0.89, + "grad_norm": 22.759511184284943, + "learning_rate": 1.653155883215099e-05, + "loss": 0.7599, + "step": 5671 + }, + { + "epoch": 0.89, + "grad_norm": 21.623840955587834, + "learning_rate": 1.6530281758385597e-05, + "loss": 0.7579, + "step": 5672 + }, + { + "epoch": 0.89, + "grad_norm": 14.249405641458004, + "learning_rate": 1.652900449890281e-05, + "loss": 0.7172, + "step": 5673 + }, + { + "epoch": 0.89, + "grad_norm": 18.338067834282093, + "learning_rate": 1.6527727053738957e-05, + "loss": 0.6848, + "step": 5674 + }, + { + "epoch": 0.89, + "grad_norm": 16.435249072962097, + "learning_rate": 1.6526449422930363e-05, + "loss": 0.6931, + "step": 5675 + }, + { + "epoch": 0.89, + "grad_norm": 21.02453084021368, + "learning_rate": 1.6525171606513368e-05, + "loss": 0.7353, + "step": 5676 + }, + { + "epoch": 0.89, + "grad_norm": 21.410508837247384, + "learning_rate": 1.6523893604524304e-05, + "loss": 0.6824, + "step": 5677 + }, + { + "epoch": 0.89, + "grad_norm": 16.91434361640482, + "learning_rate": 1.652261541699953e-05, + "loss": 0.6474, + "step": 5678 + }, + { + "epoch": 0.89, + "grad_norm": 19.18647938135595, + "learning_rate": 1.6521337043975384e-05, + "loss": 0.6542, + "step": 5679 + }, + { + "epoch": 0.89, + "grad_norm": 20.790304567604757, + "learning_rate": 1.652005848548823e-05, + "loss": 0.7275, + "step": 5680 + }, + { + "epoch": 0.89, + "grad_norm": 20.48159908862092, + "learning_rate": 1.6518779741574422e-05, + "loss": 0.7223, + "step": 5681 + }, + { + "epoch": 0.89, + "grad_norm": 18.67648162039196, + "learning_rate": 1.6517500812270335e-05, + "loss": 0.7433, + "step": 5682 + }, + { + "epoch": 0.89, + "grad_norm": 18.224208182662455, + "learning_rate": 1.6516221697612338e-05, + "loss": 0.6883, + "step": 5683 + }, + { + "epoch": 0.89, + "grad_norm": 28.35615649915233, + "learning_rate": 1.6514942397636807e-05, + "loss": 0.8017, + "step": 5684 + }, + { + "epoch": 0.89, + "grad_norm": 15.18832427651014, + "learning_rate": 1.6513662912380123e-05, + "loss": 0.793, + "step": 5685 + }, + { + "epoch": 0.89, + "grad_norm": 15.798340100303275, + "learning_rate": 1.651238324187868e-05, + "loss": 0.6869, + "step": 5686 + }, + { + "epoch": 0.89, + "grad_norm": 23.429544259524203, + "learning_rate": 1.6511103386168867e-05, + "loss": 0.76, + "step": 5687 + }, + { + "epoch": 0.89, + "grad_norm": 20.18241841323595, + "learning_rate": 1.650982334528708e-05, + "loss": 0.6465, + "step": 5688 + }, + { + "epoch": 0.89, + "grad_norm": 19.43709596600368, + "learning_rate": 1.6508543119269727e-05, + "loss": 0.7059, + "step": 5689 + }, + { + "epoch": 0.89, + "grad_norm": 20.585352701860142, + "learning_rate": 1.6507262708153215e-05, + "loss": 0.7288, + "step": 5690 + }, + { + "epoch": 0.89, + "grad_norm": 17.692208607133892, + "learning_rate": 1.6505982111973956e-05, + "loss": 0.7269, + "step": 5691 + }, + { + "epoch": 0.89, + "grad_norm": 13.412397001311842, + "learning_rate": 1.6504701330768372e-05, + "loss": 0.6766, + "step": 5692 + }, + { + "epoch": 0.89, + "grad_norm": 15.154341760434438, + "learning_rate": 1.650342036457289e-05, + "loss": 0.7598, + "step": 5693 + }, + { + "epoch": 0.89, + "grad_norm": 24.8203046995012, + "learning_rate": 1.6502139213423936e-05, + "loss": 0.6902, + "step": 5694 + }, + { + "epoch": 0.89, + "grad_norm": 27.61772130210199, + "learning_rate": 1.650085787735795e-05, + "loss": 0.7521, + "step": 5695 + }, + { + "epoch": 0.89, + "grad_norm": 25.034694769455974, + "learning_rate": 1.649957635641136e-05, + "loss": 0.7226, + "step": 5696 + }, + { + "epoch": 0.89, + "grad_norm": 16.32790611567556, + "learning_rate": 1.6498294650620628e-05, + "loss": 0.6905, + "step": 5697 + }, + { + "epoch": 0.89, + "grad_norm": 20.756171411085806, + "learning_rate": 1.6497012760022197e-05, + "loss": 0.6105, + "step": 5698 + }, + { + "epoch": 0.89, + "grad_norm": 22.9530410349578, + "learning_rate": 1.649573068465252e-05, + "loss": 0.8234, + "step": 5699 + }, + { + "epoch": 0.89, + "grad_norm": 21.5295224375633, + "learning_rate": 1.6494448424548065e-05, + "loss": 0.7711, + "step": 5700 + }, + { + "epoch": 0.89, + "grad_norm": 20.882921491167963, + "learning_rate": 1.6493165979745294e-05, + "loss": 0.7079, + "step": 5701 + }, + { + "epoch": 0.89, + "grad_norm": 14.860371266449944, + "learning_rate": 1.649188335028068e-05, + "loss": 0.7249, + "step": 5702 + }, + { + "epoch": 0.89, + "grad_norm": 26.544225030604608, + "learning_rate": 1.6490600536190705e-05, + "loss": 0.8041, + "step": 5703 + }, + { + "epoch": 0.89, + "grad_norm": 18.9381631775993, + "learning_rate": 1.6489317537511846e-05, + "loss": 0.6826, + "step": 5704 + }, + { + "epoch": 0.89, + "grad_norm": 17.407314707999674, + "learning_rate": 1.6488034354280592e-05, + "loss": 0.7103, + "step": 5705 + }, + { + "epoch": 0.89, + "grad_norm": 27.182579225513994, + "learning_rate": 1.648675098653344e-05, + "loss": 0.762, + "step": 5706 + }, + { + "epoch": 0.89, + "grad_norm": 14.006120731722394, + "learning_rate": 1.648546743430688e-05, + "loss": 0.7462, + "step": 5707 + }, + { + "epoch": 0.89, + "grad_norm": 20.798014200121603, + "learning_rate": 1.648418369763742e-05, + "loss": 0.7125, + "step": 5708 + }, + { + "epoch": 0.89, + "grad_norm": 14.193339055305193, + "learning_rate": 1.648289977656157e-05, + "loss": 0.6597, + "step": 5709 + }, + { + "epoch": 0.89, + "grad_norm": 15.733403721864674, + "learning_rate": 1.6481615671115845e-05, + "loss": 0.7473, + "step": 5710 + }, + { + "epoch": 0.89, + "grad_norm": 11.467838994361564, + "learning_rate": 1.6480331381336757e-05, + "loss": 0.6704, + "step": 5711 + }, + { + "epoch": 0.89, + "grad_norm": 21.564381874933485, + "learning_rate": 1.647904690726084e-05, + "loss": 0.661, + "step": 5712 + }, + { + "epoch": 0.89, + "grad_norm": 26.289721502515405, + "learning_rate": 1.6477762248924616e-05, + "loss": 0.6799, + "step": 5713 + }, + { + "epoch": 0.89, + "grad_norm": 18.554585598715992, + "learning_rate": 1.6476477406364623e-05, + "loss": 0.7488, + "step": 5714 + }, + { + "epoch": 0.89, + "grad_norm": 17.731416276488957, + "learning_rate": 1.6475192379617405e-05, + "loss": 0.7656, + "step": 5715 + }, + { + "epoch": 0.89, + "grad_norm": 22.411782964186976, + "learning_rate": 1.64739071687195e-05, + "loss": 0.7907, + "step": 5716 + }, + { + "epoch": 0.89, + "grad_norm": 14.706146514036433, + "learning_rate": 1.6472621773707463e-05, + "loss": 0.7054, + "step": 5717 + }, + { + "epoch": 0.89, + "grad_norm": 17.554123431838466, + "learning_rate": 1.647133619461785e-05, + "loss": 0.717, + "step": 5718 + }, + { + "epoch": 0.89, + "grad_norm": 26.95087189918306, + "learning_rate": 1.647005043148722e-05, + "loss": 0.762, + "step": 5719 + }, + { + "epoch": 0.89, + "grad_norm": 27.028863548489635, + "learning_rate": 1.646876448435214e-05, + "loss": 0.7717, + "step": 5720 + }, + { + "epoch": 0.89, + "grad_norm": 18.460620633641483, + "learning_rate": 1.6467478353249186e-05, + "loss": 0.7567, + "step": 5721 + }, + { + "epoch": 0.89, + "grad_norm": 13.78109727286138, + "learning_rate": 1.6466192038214928e-05, + "loss": 0.684, + "step": 5722 + }, + { + "epoch": 0.89, + "grad_norm": 19.78884535322842, + "learning_rate": 1.646490553928595e-05, + "loss": 0.6817, + "step": 5723 + }, + { + "epoch": 0.89, + "grad_norm": 16.732138404346333, + "learning_rate": 1.6463618856498844e-05, + "loss": 0.7988, + "step": 5724 + }, + { + "epoch": 0.89, + "grad_norm": 24.295008041801893, + "learning_rate": 1.64623319898902e-05, + "loss": 0.7215, + "step": 5725 + }, + { + "epoch": 0.89, + "grad_norm": 16.610719198450784, + "learning_rate": 1.6461044939496606e-05, + "loss": 0.6587, + "step": 5726 + }, + { + "epoch": 0.89, + "grad_norm": 17.817251600648486, + "learning_rate": 1.645975770535468e-05, + "loss": 0.6557, + "step": 5727 + }, + { + "epoch": 0.89, + "grad_norm": 17.32195410909108, + "learning_rate": 1.6458470287501024e-05, + "loss": 0.7095, + "step": 5728 + }, + { + "epoch": 0.89, + "grad_norm": 16.27110069371097, + "learning_rate": 1.6457182685972248e-05, + "loss": 0.6062, + "step": 5729 + }, + { + "epoch": 0.9, + "grad_norm": 25.095901566257552, + "learning_rate": 1.6455894900804974e-05, + "loss": 0.6872, + "step": 5730 + }, + { + "epoch": 0.9, + "grad_norm": 24.916391614298977, + "learning_rate": 1.6454606932035827e-05, + "loss": 0.7936, + "step": 5731 + }, + { + "epoch": 0.9, + "grad_norm": 15.674545463712919, + "learning_rate": 1.6453318779701434e-05, + "loss": 0.6512, + "step": 5732 + }, + { + "epoch": 0.9, + "grad_norm": 22.96112751890239, + "learning_rate": 1.645203044383843e-05, + "loss": 0.7633, + "step": 5733 + }, + { + "epoch": 0.9, + "grad_norm": 35.18483719736039, + "learning_rate": 1.645074192448346e-05, + "loss": 0.7853, + "step": 5734 + }, + { + "epoch": 0.9, + "grad_norm": 17.53176577382229, + "learning_rate": 1.6449453221673154e-05, + "loss": 0.6666, + "step": 5735 + }, + { + "epoch": 0.9, + "grad_norm": 14.473599699417615, + "learning_rate": 1.6448164335444178e-05, + "loss": 0.6325, + "step": 5736 + }, + { + "epoch": 0.9, + "grad_norm": 19.893537718634708, + "learning_rate": 1.6446875265833178e-05, + "loss": 0.6931, + "step": 5737 + }, + { + "epoch": 0.9, + "grad_norm": 22.35430756545539, + "learning_rate": 1.6445586012876814e-05, + "loss": 0.7357, + "step": 5738 + }, + { + "epoch": 0.9, + "grad_norm": 18.71951126857608, + "learning_rate": 1.644429657661176e-05, + "loss": 0.7224, + "step": 5739 + }, + { + "epoch": 0.9, + "grad_norm": 19.413789780042524, + "learning_rate": 1.6443006957074675e-05, + "loss": 0.8399, + "step": 5740 + }, + { + "epoch": 0.9, + "grad_norm": 22.443157903238202, + "learning_rate": 1.6441717154302246e-05, + "loss": 0.7918, + "step": 5741 + }, + { + "epoch": 0.9, + "grad_norm": 15.37276849941754, + "learning_rate": 1.644042716833115e-05, + "loss": 0.6584, + "step": 5742 + }, + { + "epoch": 0.9, + "grad_norm": 23.25333712134639, + "learning_rate": 1.6439136999198068e-05, + "loss": 0.7796, + "step": 5743 + }, + { + "epoch": 0.9, + "grad_norm": 23.405334323599018, + "learning_rate": 1.64378466469397e-05, + "loss": 0.8338, + "step": 5744 + }, + { + "epoch": 0.9, + "grad_norm": 16.330854093823742, + "learning_rate": 1.643655611159274e-05, + "loss": 0.7011, + "step": 5745 + }, + { + "epoch": 0.9, + "grad_norm": 18.47278854667994, + "learning_rate": 1.6435265393193886e-05, + "loss": 0.6948, + "step": 5746 + }, + { + "epoch": 0.9, + "grad_norm": 15.217504019519719, + "learning_rate": 1.6433974491779853e-05, + "loss": 0.7489, + "step": 5747 + }, + { + "epoch": 0.9, + "grad_norm": 21.288441049447194, + "learning_rate": 1.6432683407387346e-05, + "loss": 0.7664, + "step": 5748 + }, + { + "epoch": 0.9, + "grad_norm": 14.15909214309748, + "learning_rate": 1.6431392140053088e-05, + "loss": 0.7481, + "step": 5749 + }, + { + "epoch": 0.9, + "grad_norm": 28.147548078110866, + "learning_rate": 1.6430100689813797e-05, + "loss": 0.8429, + "step": 5750 + }, + { + "epoch": 0.9, + "grad_norm": 15.291706603134074, + "learning_rate": 1.642880905670621e-05, + "loss": 0.7116, + "step": 5751 + }, + { + "epoch": 0.9, + "grad_norm": 15.039495611518436, + "learning_rate": 1.6427517240767046e-05, + "loss": 0.6976, + "step": 5752 + }, + { + "epoch": 0.9, + "grad_norm": 16.935437611792857, + "learning_rate": 1.6426225242033055e-05, + "loss": 0.7023, + "step": 5753 + }, + { + "epoch": 0.9, + "grad_norm": 13.821594706584087, + "learning_rate": 1.6424933060540978e-05, + "loss": 0.7242, + "step": 5754 + }, + { + "epoch": 0.9, + "grad_norm": 20.602660503572892, + "learning_rate": 1.6423640696327564e-05, + "loss": 0.7304, + "step": 5755 + }, + { + "epoch": 0.9, + "grad_norm": 4.888132968633195, + "learning_rate": 1.6422348149429566e-05, + "loss": 0.694, + "step": 5756 + }, + { + "epoch": 0.9, + "grad_norm": 15.49301044136683, + "learning_rate": 1.6421055419883744e-05, + "loss": 0.6861, + "step": 5757 + }, + { + "epoch": 0.9, + "grad_norm": 13.656531414497566, + "learning_rate": 1.641976250772686e-05, + "loss": 0.7489, + "step": 5758 + }, + { + "epoch": 0.9, + "grad_norm": 26.013145738750463, + "learning_rate": 1.6418469412995685e-05, + "loss": 0.7409, + "step": 5759 + }, + { + "epoch": 0.9, + "grad_norm": 13.819613326906847, + "learning_rate": 1.6417176135726998e-05, + "loss": 0.7258, + "step": 5760 + }, + { + "epoch": 0.9, + "grad_norm": 24.179174931135993, + "learning_rate": 1.641588267595757e-05, + "loss": 0.6894, + "step": 5761 + }, + { + "epoch": 0.9, + "grad_norm": 24.704633209495057, + "learning_rate": 1.6414589033724197e-05, + "loss": 0.673, + "step": 5762 + }, + { + "epoch": 0.9, + "grad_norm": 22.870078511105422, + "learning_rate": 1.6413295209063663e-05, + "loss": 0.7022, + "step": 5763 + }, + { + "epoch": 0.9, + "grad_norm": 22.8587057441996, + "learning_rate": 1.6412001202012768e-05, + "loss": 0.744, + "step": 5764 + }, + { + "epoch": 0.9, + "grad_norm": 11.374960559996676, + "learning_rate": 1.6410707012608303e-05, + "loss": 0.6695, + "step": 5765 + }, + { + "epoch": 0.9, + "grad_norm": 19.30811657895733, + "learning_rate": 1.640941264088709e-05, + "loss": 0.7162, + "step": 5766 + }, + { + "epoch": 0.9, + "grad_norm": 14.24656049153652, + "learning_rate": 1.6408118086885925e-05, + "loss": 0.7177, + "step": 5767 + }, + { + "epoch": 0.9, + "grad_norm": 20.595495521942496, + "learning_rate": 1.6406823350641628e-05, + "loss": 0.7463, + "step": 5768 + }, + { + "epoch": 0.9, + "grad_norm": 16.459095175127864, + "learning_rate": 1.6405528432191027e-05, + "loss": 0.7578, + "step": 5769 + }, + { + "epoch": 0.9, + "grad_norm": 17.044689857474776, + "learning_rate": 1.6404233331570944e-05, + "loss": 0.6874, + "step": 5770 + }, + { + "epoch": 0.9, + "grad_norm": 4.205197236482149, + "learning_rate": 1.6402938048818208e-05, + "loss": 0.6507, + "step": 5771 + }, + { + "epoch": 0.9, + "grad_norm": 20.028392994915404, + "learning_rate": 1.640164258396966e-05, + "loss": 0.7461, + "step": 5772 + }, + { + "epoch": 0.9, + "grad_norm": 27.12965333100808, + "learning_rate": 1.6400346937062147e-05, + "loss": 0.7316, + "step": 5773 + }, + { + "epoch": 0.9, + "grad_norm": 18.985086341372234, + "learning_rate": 1.6399051108132507e-05, + "loss": 0.6895, + "step": 5774 + }, + { + "epoch": 0.9, + "grad_norm": 22.008121465288855, + "learning_rate": 1.6397755097217598e-05, + "loss": 0.7771, + "step": 5775 + }, + { + "epoch": 0.9, + "grad_norm": 21.732444575985188, + "learning_rate": 1.6396458904354275e-05, + "loss": 0.7354, + "step": 5776 + }, + { + "epoch": 0.9, + "grad_norm": 25.80816470182595, + "learning_rate": 1.6395162529579406e-05, + "loss": 0.7311, + "step": 5777 + }, + { + "epoch": 0.9, + "grad_norm": 13.955277632507821, + "learning_rate": 1.6393865972929853e-05, + "loss": 0.7192, + "step": 5778 + }, + { + "epoch": 0.9, + "grad_norm": 31.729889325866502, + "learning_rate": 1.6392569234442492e-05, + "loss": 0.7409, + "step": 5779 + }, + { + "epoch": 0.9, + "grad_norm": 10.554252143990125, + "learning_rate": 1.63912723141542e-05, + "loss": 0.6934, + "step": 5780 + }, + { + "epoch": 0.9, + "grad_norm": 18.4984187532941, + "learning_rate": 1.6389975212101867e-05, + "loss": 0.7349, + "step": 5781 + }, + { + "epoch": 0.9, + "grad_norm": 24.212569607342665, + "learning_rate": 1.6388677928322376e-05, + "loss": 0.6987, + "step": 5782 + }, + { + "epoch": 0.9, + "grad_norm": 23.265658918500762, + "learning_rate": 1.638738046285262e-05, + "loss": 0.6853, + "step": 5783 + }, + { + "epoch": 0.9, + "grad_norm": 20.651439859095806, + "learning_rate": 1.63860828157295e-05, + "loss": 0.7496, + "step": 5784 + }, + { + "epoch": 0.9, + "grad_norm": 14.696158599730714, + "learning_rate": 1.6384784986989918e-05, + "loss": 0.7441, + "step": 5785 + }, + { + "epoch": 0.9, + "grad_norm": 22.1359620780321, + "learning_rate": 1.638348697667079e-05, + "loss": 0.7198, + "step": 5786 + }, + { + "epoch": 0.9, + "grad_norm": 18.99321591202826, + "learning_rate": 1.6382188784809027e-05, + "loss": 0.7325, + "step": 5787 + }, + { + "epoch": 0.9, + "grad_norm": 16.02321955580521, + "learning_rate": 1.638089041144155e-05, + "loss": 0.7557, + "step": 5788 + }, + { + "epoch": 0.9, + "grad_norm": 18.966457452291284, + "learning_rate": 1.6379591856605274e-05, + "loss": 0.7063, + "step": 5789 + }, + { + "epoch": 0.9, + "grad_norm": 17.163418201252806, + "learning_rate": 1.6378293120337144e-05, + "loss": 0.7001, + "step": 5790 + }, + { + "epoch": 0.9, + "grad_norm": 20.953390495273855, + "learning_rate": 1.637699420267409e-05, + "loss": 0.7787, + "step": 5791 + }, + { + "epoch": 0.9, + "grad_norm": 19.369755507357898, + "learning_rate": 1.6375695103653047e-05, + "loss": 0.7026, + "step": 5792 + }, + { + "epoch": 0.9, + "grad_norm": 9.90137034189558, + "learning_rate": 1.637439582331097e-05, + "loss": 0.5927, + "step": 5793 + }, + { + "epoch": 0.91, + "grad_norm": 21.616159394648477, + "learning_rate": 1.63730963616848e-05, + "loss": 0.6738, + "step": 5794 + }, + { + "epoch": 0.91, + "grad_norm": 12.90839275544625, + "learning_rate": 1.6371796718811496e-05, + "loss": 0.6792, + "step": 5795 + }, + { + "epoch": 0.91, + "grad_norm": 25.755384194776852, + "learning_rate": 1.637049689472803e-05, + "loss": 0.8311, + "step": 5796 + }, + { + "epoch": 0.91, + "grad_norm": 26.98814318289358, + "learning_rate": 1.636919688947135e-05, + "loss": 0.8479, + "step": 5797 + }, + { + "epoch": 0.91, + "grad_norm": 28.09579774842622, + "learning_rate": 1.636789670307844e-05, + "loss": 0.7434, + "step": 5798 + }, + { + "epoch": 0.91, + "grad_norm": 23.489971238392123, + "learning_rate": 1.636659633558627e-05, + "loss": 0.7615, + "step": 5799 + }, + { + "epoch": 0.91, + "grad_norm": 13.174140411815452, + "learning_rate": 1.6365295787031827e-05, + "loss": 0.6884, + "step": 5800 + }, + { + "epoch": 0.91, + "grad_norm": 32.195328238283686, + "learning_rate": 1.6363995057452098e-05, + "loss": 0.7759, + "step": 5801 + }, + { + "epoch": 0.91, + "grad_norm": 22.505019659154755, + "learning_rate": 1.6362694146884067e-05, + "loss": 0.7392, + "step": 5802 + }, + { + "epoch": 0.91, + "grad_norm": 23.42531078884606, + "learning_rate": 1.636139305536474e-05, + "loss": 0.6837, + "step": 5803 + }, + { + "epoch": 0.91, + "grad_norm": 15.879879409041795, + "learning_rate": 1.6360091782931115e-05, + "loss": 0.7237, + "step": 5804 + }, + { + "epoch": 0.91, + "grad_norm": 13.782277755517892, + "learning_rate": 1.63587903296202e-05, + "loss": 0.6456, + "step": 5805 + }, + { + "epoch": 0.91, + "grad_norm": 12.95610790913143, + "learning_rate": 1.635748869546901e-05, + "loss": 0.6609, + "step": 5806 + }, + { + "epoch": 0.91, + "grad_norm": 21.08536098097721, + "learning_rate": 1.6356186880514556e-05, + "loss": 0.6788, + "step": 5807 + }, + { + "epoch": 0.91, + "grad_norm": 16.278555583952873, + "learning_rate": 1.635488488479387e-05, + "loss": 0.7367, + "step": 5808 + }, + { + "epoch": 0.91, + "grad_norm": 28.759383400674686, + "learning_rate": 1.635358270834397e-05, + "loss": 0.6626, + "step": 5809 + }, + { + "epoch": 0.91, + "grad_norm": 20.078319271053218, + "learning_rate": 1.6352280351201898e-05, + "loss": 0.7372, + "step": 5810 + }, + { + "epoch": 0.91, + "grad_norm": 20.144959247533215, + "learning_rate": 1.635097781340469e-05, + "loss": 0.6672, + "step": 5811 + }, + { + "epoch": 0.91, + "grad_norm": 16.14920839463731, + "learning_rate": 1.6349675094989387e-05, + "loss": 0.7133, + "step": 5812 + }, + { + "epoch": 0.91, + "grad_norm": 12.656466561101208, + "learning_rate": 1.6348372195993037e-05, + "loss": 0.7127, + "step": 5813 + }, + { + "epoch": 0.91, + "grad_norm": 18.666831581568328, + "learning_rate": 1.63470691164527e-05, + "loss": 0.712, + "step": 5814 + }, + { + "epoch": 0.91, + "grad_norm": 11.696959494882545, + "learning_rate": 1.6345765856405424e-05, + "loss": 0.6774, + "step": 5815 + }, + { + "epoch": 0.91, + "grad_norm": 18.490884041950114, + "learning_rate": 1.6344462415888283e-05, + "loss": 0.7901, + "step": 5816 + }, + { + "epoch": 0.91, + "grad_norm": 19.732721353459777, + "learning_rate": 1.6343158794938342e-05, + "loss": 0.6554, + "step": 5817 + }, + { + "epoch": 0.91, + "grad_norm": 25.560658768655045, + "learning_rate": 1.6341854993592674e-05, + "loss": 0.6604, + "step": 5818 + }, + { + "epoch": 0.91, + "grad_norm": 24.328422611942173, + "learning_rate": 1.634055101188836e-05, + "loss": 0.7234, + "step": 5819 + }, + { + "epoch": 0.91, + "grad_norm": 19.392988013327987, + "learning_rate": 1.6339246849862488e-05, + "loss": 0.8028, + "step": 5820 + }, + { + "epoch": 0.91, + "grad_norm": 20.42406229215249, + "learning_rate": 1.633794250755214e-05, + "loss": 0.7765, + "step": 5821 + }, + { + "epoch": 0.91, + "grad_norm": 18.514115378247727, + "learning_rate": 1.6336637984994418e-05, + "loss": 0.7536, + "step": 5822 + }, + { + "epoch": 0.91, + "grad_norm": 14.812951527727318, + "learning_rate": 1.6335333282226414e-05, + "loss": 0.6994, + "step": 5823 + }, + { + "epoch": 0.91, + "grad_norm": 13.910757446580629, + "learning_rate": 1.6334028399285243e-05, + "loss": 0.6646, + "step": 5824 + }, + { + "epoch": 0.91, + "grad_norm": 23.882888151803336, + "learning_rate": 1.6332723336208007e-05, + "loss": 0.7778, + "step": 5825 + }, + { + "epoch": 0.91, + "grad_norm": 14.67503314209466, + "learning_rate": 1.633141809303182e-05, + "loss": 0.7605, + "step": 5826 + }, + { + "epoch": 0.91, + "grad_norm": 16.940135701907863, + "learning_rate": 1.633011266979381e-05, + "loss": 0.6965, + "step": 5827 + }, + { + "epoch": 0.91, + "grad_norm": 14.114629175374471, + "learning_rate": 1.6328807066531102e-05, + "loss": 0.7662, + "step": 5828 + }, + { + "epoch": 0.91, + "grad_norm": 20.789468579230917, + "learning_rate": 1.6327501283280816e-05, + "loss": 0.7287, + "step": 5829 + }, + { + "epoch": 0.91, + "grad_norm": 24.233231541498746, + "learning_rate": 1.6326195320080103e-05, + "loss": 0.6999, + "step": 5830 + }, + { + "epoch": 0.91, + "grad_norm": 29.19205879100767, + "learning_rate": 1.632488917696609e-05, + "loss": 0.6944, + "step": 5831 + }, + { + "epoch": 0.91, + "grad_norm": 17.753578174610723, + "learning_rate": 1.632358285397593e-05, + "loss": 0.7462, + "step": 5832 + }, + { + "epoch": 0.91, + "grad_norm": 20.644718473915397, + "learning_rate": 1.6322276351146774e-05, + "loss": 0.7603, + "step": 5833 + }, + { + "epoch": 0.91, + "grad_norm": 21.903770592158505, + "learning_rate": 1.6320969668515773e-05, + "loss": 0.8326, + "step": 5834 + }, + { + "epoch": 0.91, + "grad_norm": 22.879420381005456, + "learning_rate": 1.63196628061201e-05, + "loss": 0.7076, + "step": 5835 + }, + { + "epoch": 0.91, + "grad_norm": 19.43826235528824, + "learning_rate": 1.6318355763996908e-05, + "loss": 0.7249, + "step": 5836 + }, + { + "epoch": 0.91, + "grad_norm": 16.35383209283138, + "learning_rate": 1.6317048542183375e-05, + "loss": 0.7659, + "step": 5837 + }, + { + "epoch": 0.91, + "grad_norm": 17.356027427162466, + "learning_rate": 1.6315741140716675e-05, + "loss": 0.7482, + "step": 5838 + }, + { + "epoch": 0.91, + "grad_norm": 16.405609994848234, + "learning_rate": 1.6314433559633992e-05, + "loss": 0.7291, + "step": 5839 + }, + { + "epoch": 0.91, + "grad_norm": 21.420958412422095, + "learning_rate": 1.6313125798972514e-05, + "loss": 0.7589, + "step": 5840 + }, + { + "epoch": 0.91, + "grad_norm": 15.491793530351103, + "learning_rate": 1.6311817858769434e-05, + "loss": 0.6358, + "step": 5841 + }, + { + "epoch": 0.91, + "grad_norm": 23.172076856540357, + "learning_rate": 1.6310509739061946e-05, + "loss": 0.7548, + "step": 5842 + }, + { + "epoch": 0.91, + "grad_norm": 17.173773223292894, + "learning_rate": 1.6309201439887248e-05, + "loss": 0.7527, + "step": 5843 + }, + { + "epoch": 0.91, + "grad_norm": 16.80129068219147, + "learning_rate": 1.6307892961282555e-05, + "loss": 0.6585, + "step": 5844 + }, + { + "epoch": 0.91, + "grad_norm": 23.375898180116764, + "learning_rate": 1.6306584303285077e-05, + "loss": 0.6452, + "step": 5845 + }, + { + "epoch": 0.91, + "grad_norm": 19.877060956532212, + "learning_rate": 1.6305275465932027e-05, + "loss": 0.833, + "step": 5846 + }, + { + "epoch": 0.91, + "grad_norm": 12.77071229112161, + "learning_rate": 1.6303966449260636e-05, + "loss": 0.6422, + "step": 5847 + }, + { + "epoch": 0.91, + "grad_norm": 25.207241713057428, + "learning_rate": 1.630265725330812e-05, + "loss": 0.6187, + "step": 5848 + }, + { + "epoch": 0.91, + "grad_norm": 21.220453171151078, + "learning_rate": 1.6301347878111726e-05, + "loss": 0.8335, + "step": 5849 + }, + { + "epoch": 0.91, + "grad_norm": 15.177799544773135, + "learning_rate": 1.630003832370868e-05, + "loss": 0.6804, + "step": 5850 + }, + { + "epoch": 0.91, + "grad_norm": 16.285428087688317, + "learning_rate": 1.629872859013623e-05, + "loss": 0.7265, + "step": 5851 + }, + { + "epoch": 0.91, + "grad_norm": 19.01305889244609, + "learning_rate": 1.6297418677431625e-05, + "loss": 0.6638, + "step": 5852 + }, + { + "epoch": 0.91, + "grad_norm": 34.01237027495073, + "learning_rate": 1.629610858563212e-05, + "loss": 0.6628, + "step": 5853 + }, + { + "epoch": 0.91, + "grad_norm": 18.18372891680082, + "learning_rate": 1.6294798314774963e-05, + "loss": 0.7069, + "step": 5854 + }, + { + "epoch": 0.91, + "grad_norm": 20.38925319017957, + "learning_rate": 1.6293487864897425e-05, + "loss": 0.748, + "step": 5855 + }, + { + "epoch": 0.91, + "grad_norm": 20.62885086421975, + "learning_rate": 1.6292177236036776e-05, + "loss": 0.6876, + "step": 5856 + }, + { + "epoch": 0.91, + "grad_norm": 19.8710086442742, + "learning_rate": 1.629086642823029e-05, + "loss": 0.6719, + "step": 5857 + }, + { + "epoch": 0.92, + "grad_norm": 31.722702970495018, + "learning_rate": 1.628955544151524e-05, + "loss": 0.7324, + "step": 5858 + }, + { + "epoch": 0.92, + "grad_norm": 29.46370384451972, + "learning_rate": 1.6288244275928912e-05, + "loss": 0.7323, + "step": 5859 + }, + { + "epoch": 0.92, + "grad_norm": 19.859204978125025, + "learning_rate": 1.6286932931508596e-05, + "loss": 0.7308, + "step": 5860 + }, + { + "epoch": 0.92, + "grad_norm": 17.1169048200672, + "learning_rate": 1.6285621408291584e-05, + "loss": 0.7692, + "step": 5861 + }, + { + "epoch": 0.92, + "grad_norm": 17.490830373486926, + "learning_rate": 1.6284309706315178e-05, + "loss": 0.7694, + "step": 5862 + }, + { + "epoch": 0.92, + "grad_norm": 19.378687726053442, + "learning_rate": 1.6282997825616676e-05, + "loss": 0.7079, + "step": 5863 + }, + { + "epoch": 0.92, + "grad_norm": 15.007806749159085, + "learning_rate": 1.62816857662334e-05, + "loss": 0.7497, + "step": 5864 + }, + { + "epoch": 0.92, + "grad_norm": 14.422473914666208, + "learning_rate": 1.6280373528202648e-05, + "loss": 0.7115, + "step": 5865 + }, + { + "epoch": 0.92, + "grad_norm": 18.863384409275017, + "learning_rate": 1.627906111156175e-05, + "loss": 0.7473, + "step": 5866 + }, + { + "epoch": 0.92, + "grad_norm": 14.712622773388253, + "learning_rate": 1.6277748516348025e-05, + "loss": 0.638, + "step": 5867 + }, + { + "epoch": 0.92, + "grad_norm": 13.5264682178003, + "learning_rate": 1.6276435742598807e-05, + "loss": 0.6744, + "step": 5868 + }, + { + "epoch": 0.92, + "grad_norm": 16.79368025227889, + "learning_rate": 1.6275122790351426e-05, + "loss": 0.6629, + "step": 5869 + }, + { + "epoch": 0.92, + "grad_norm": 16.75003657096813, + "learning_rate": 1.6273809659643226e-05, + "loss": 0.6624, + "step": 5870 + }, + { + "epoch": 0.92, + "grad_norm": 17.537043094562865, + "learning_rate": 1.6272496350511547e-05, + "loss": 0.7322, + "step": 5871 + }, + { + "epoch": 0.92, + "grad_norm": 19.29564125696257, + "learning_rate": 1.6271182862993743e-05, + "loss": 0.6634, + "step": 5872 + }, + { + "epoch": 0.92, + "grad_norm": 21.113132191854387, + "learning_rate": 1.6269869197127162e-05, + "loss": 0.8624, + "step": 5873 + }, + { + "epoch": 0.92, + "grad_norm": 18.490609787042935, + "learning_rate": 1.6268555352949175e-05, + "loss": 0.6903, + "step": 5874 + }, + { + "epoch": 0.92, + "grad_norm": 15.2939526951468, + "learning_rate": 1.626724133049714e-05, + "loss": 0.6174, + "step": 5875 + }, + { + "epoch": 0.92, + "grad_norm": 11.409671632112559, + "learning_rate": 1.6265927129808426e-05, + "loss": 0.627, + "step": 5876 + }, + { + "epoch": 0.92, + "grad_norm": 27.136673234973063, + "learning_rate": 1.6264612750920406e-05, + "loss": 0.7712, + "step": 5877 + }, + { + "epoch": 0.92, + "grad_norm": 14.666245259950959, + "learning_rate": 1.6263298193870468e-05, + "loss": 0.6477, + "step": 5878 + }, + { + "epoch": 0.92, + "grad_norm": 17.55641363525317, + "learning_rate": 1.6261983458695996e-05, + "loss": 0.7585, + "step": 5879 + }, + { + "epoch": 0.92, + "grad_norm": 25.48309068437332, + "learning_rate": 1.6260668545434375e-05, + "loss": 0.7301, + "step": 5880 + }, + { + "epoch": 0.92, + "grad_norm": 27.784376618553992, + "learning_rate": 1.6259353454123003e-05, + "loss": 0.7419, + "step": 5881 + }, + { + "epoch": 0.92, + "grad_norm": 16.801995266137457, + "learning_rate": 1.625803818479928e-05, + "loss": 0.6598, + "step": 5882 + }, + { + "epoch": 0.92, + "grad_norm": 15.573069531722815, + "learning_rate": 1.6256722737500613e-05, + "loss": 0.7259, + "step": 5883 + }, + { + "epoch": 0.92, + "grad_norm": 16.47241078321202, + "learning_rate": 1.625540711226441e-05, + "loss": 0.7121, + "step": 5884 + }, + { + "epoch": 0.92, + "grad_norm": 13.727059569719156, + "learning_rate": 1.625409130912809e-05, + "loss": 0.7054, + "step": 5885 + }, + { + "epoch": 0.92, + "grad_norm": 22.850694802267544, + "learning_rate": 1.6252775328129067e-05, + "loss": 0.82, + "step": 5886 + }, + { + "epoch": 0.92, + "grad_norm": 16.648720053714523, + "learning_rate": 1.6251459169304776e-05, + "loss": 0.6951, + "step": 5887 + }, + { + "epoch": 0.92, + "grad_norm": 32.58796360555267, + "learning_rate": 1.6250142832692643e-05, + "loss": 0.7245, + "step": 5888 + }, + { + "epoch": 0.92, + "grad_norm": 28.586115250369748, + "learning_rate": 1.6248826318330103e-05, + "loss": 0.8198, + "step": 5889 + }, + { + "epoch": 0.92, + "grad_norm": 19.75473040052826, + "learning_rate": 1.6247509626254597e-05, + "loss": 0.767, + "step": 5890 + }, + { + "epoch": 0.92, + "grad_norm": 18.847430644488675, + "learning_rate": 1.6246192756503572e-05, + "loss": 0.6209, + "step": 5891 + }, + { + "epoch": 0.92, + "grad_norm": 24.909309793357508, + "learning_rate": 1.624487570911448e-05, + "loss": 0.7406, + "step": 5892 + }, + { + "epoch": 0.92, + "grad_norm": 14.864794776139332, + "learning_rate": 1.6243558484124778e-05, + "loss": 0.7169, + "step": 5893 + }, + { + "epoch": 0.92, + "grad_norm": 13.195730899481068, + "learning_rate": 1.6242241081571923e-05, + "loss": 0.6453, + "step": 5894 + }, + { + "epoch": 0.92, + "grad_norm": 19.801300941874892, + "learning_rate": 1.6240923501493383e-05, + "loss": 0.7624, + "step": 5895 + }, + { + "epoch": 0.92, + "grad_norm": 17.3320670730396, + "learning_rate": 1.6239605743926632e-05, + "loss": 0.8266, + "step": 5896 + }, + { + "epoch": 0.92, + "grad_norm": 16.097127002272018, + "learning_rate": 1.6238287808909137e-05, + "loss": 0.7413, + "step": 5897 + }, + { + "epoch": 0.92, + "grad_norm": 19.67196590260174, + "learning_rate": 1.6236969696478393e-05, + "loss": 0.6988, + "step": 5898 + }, + { + "epoch": 0.92, + "grad_norm": 21.623937590361077, + "learning_rate": 1.6235651406671875e-05, + "loss": 0.6864, + "step": 5899 + }, + { + "epoch": 0.92, + "grad_norm": 17.586423718896402, + "learning_rate": 1.623433293952708e-05, + "loss": 0.6383, + "step": 5900 + }, + { + "epoch": 0.92, + "grad_norm": 27.757080110335508, + "learning_rate": 1.6233014295081505e-05, + "loss": 0.6652, + "step": 5901 + }, + { + "epoch": 0.92, + "grad_norm": 19.259816567253296, + "learning_rate": 1.623169547337265e-05, + "loss": 0.6892, + "step": 5902 + }, + { + "epoch": 0.92, + "grad_norm": 25.2420301400005, + "learning_rate": 1.6230376474438018e-05, + "loss": 0.6876, + "step": 5903 + }, + { + "epoch": 0.92, + "grad_norm": 13.592569898485612, + "learning_rate": 1.6229057298315123e-05, + "loss": 0.7102, + "step": 5904 + }, + { + "epoch": 0.92, + "grad_norm": 12.735847455884493, + "learning_rate": 1.6227737945041485e-05, + "loss": 0.5997, + "step": 5905 + }, + { + "epoch": 0.92, + "grad_norm": 20.085673896160745, + "learning_rate": 1.6226418414654625e-05, + "loss": 0.7951, + "step": 5906 + }, + { + "epoch": 0.92, + "grad_norm": 14.890752029777746, + "learning_rate": 1.6225098707192063e-05, + "loss": 0.7078, + "step": 5907 + }, + { + "epoch": 0.92, + "grad_norm": 27.410334507826146, + "learning_rate": 1.622377882269134e-05, + "loss": 0.7008, + "step": 5908 + }, + { + "epoch": 0.92, + "grad_norm": 22.162051671703466, + "learning_rate": 1.6222458761189984e-05, + "loss": 0.7213, + "step": 5909 + }, + { + "epoch": 0.92, + "grad_norm": 20.494290652244455, + "learning_rate": 1.622113852272554e-05, + "loss": 0.6921, + "step": 5910 + }, + { + "epoch": 0.92, + "grad_norm": 24.428519991445437, + "learning_rate": 1.621981810733556e-05, + "loss": 0.7837, + "step": 5911 + }, + { + "epoch": 0.92, + "grad_norm": 20.87060602279722, + "learning_rate": 1.6218497515057593e-05, + "loss": 0.7829, + "step": 5912 + }, + { + "epoch": 0.92, + "grad_norm": 21.086965935111138, + "learning_rate": 1.621717674592919e-05, + "loss": 0.7273, + "step": 5913 + }, + { + "epoch": 0.92, + "grad_norm": 19.247245796677582, + "learning_rate": 1.621585579998792e-05, + "loss": 0.7406, + "step": 5914 + }, + { + "epoch": 0.92, + "grad_norm": 21.437697422186208, + "learning_rate": 1.6214534677271344e-05, + "loss": 0.6834, + "step": 5915 + }, + { + "epoch": 0.92, + "grad_norm": 14.005778743206983, + "learning_rate": 1.6213213377817035e-05, + "loss": 0.6712, + "step": 5916 + }, + { + "epoch": 0.92, + "grad_norm": 13.28936195148279, + "learning_rate": 1.6211891901662576e-05, + "loss": 0.6866, + "step": 5917 + }, + { + "epoch": 0.92, + "grad_norm": 17.603544973130766, + "learning_rate": 1.621057024884555e-05, + "loss": 0.7248, + "step": 5918 + }, + { + "epoch": 0.92, + "grad_norm": 15.067000101702876, + "learning_rate": 1.620924841940353e-05, + "loss": 0.6844, + "step": 5919 + }, + { + "epoch": 0.92, + "grad_norm": 21.65909563224819, + "learning_rate": 1.6207926413374124e-05, + "loss": 0.7633, + "step": 5920 + }, + { + "epoch": 0.92, + "grad_norm": 21.061215646648854, + "learning_rate": 1.620660423079492e-05, + "loss": 0.7529, + "step": 5921 + }, + { + "epoch": 0.93, + "grad_norm": 29.69768886484089, + "learning_rate": 1.620528187170352e-05, + "loss": 0.6891, + "step": 5922 + }, + { + "epoch": 0.93, + "grad_norm": 19.060081408626154, + "learning_rate": 1.6203959336137538e-05, + "loss": 0.6552, + "step": 5923 + }, + { + "epoch": 0.93, + "grad_norm": 13.476902658896494, + "learning_rate": 1.620263662413458e-05, + "loss": 0.6676, + "step": 5924 + }, + { + "epoch": 0.93, + "grad_norm": 14.676627481719384, + "learning_rate": 1.6201313735732265e-05, + "loss": 0.6213, + "step": 5925 + }, + { + "epoch": 0.93, + "grad_norm": 14.727930664639576, + "learning_rate": 1.619999067096821e-05, + "loss": 0.6276, + "step": 5926 + }, + { + "epoch": 0.93, + "grad_norm": 19.159879646287866, + "learning_rate": 1.6198667429880054e-05, + "loss": 0.745, + "step": 5927 + }, + { + "epoch": 0.93, + "grad_norm": 16.112949457625067, + "learning_rate": 1.619734401250542e-05, + "loss": 0.7489, + "step": 5928 + }, + { + "epoch": 0.93, + "grad_norm": 15.354114584357836, + "learning_rate": 1.6196020418881947e-05, + "loss": 0.691, + "step": 5929 + }, + { + "epoch": 0.93, + "grad_norm": 17.80471084889467, + "learning_rate": 1.6194696649047278e-05, + "loss": 0.7834, + "step": 5930 + }, + { + "epoch": 0.93, + "grad_norm": 21.076001775085388, + "learning_rate": 1.619337270303906e-05, + "loss": 0.6526, + "step": 5931 + }, + { + "epoch": 0.93, + "grad_norm": 20.21434072043704, + "learning_rate": 1.6192048580894946e-05, + "loss": 0.7788, + "step": 5932 + }, + { + "epoch": 0.93, + "grad_norm": 16.87706993176504, + "learning_rate": 1.6190724282652594e-05, + "loss": 0.7708, + "step": 5933 + }, + { + "epoch": 0.93, + "grad_norm": 17.404084460435197, + "learning_rate": 1.6189399808349663e-05, + "loss": 0.6706, + "step": 5934 + }, + { + "epoch": 0.93, + "grad_norm": 21.152339116005287, + "learning_rate": 1.6188075158023825e-05, + "loss": 0.7311, + "step": 5935 + }, + { + "epoch": 0.93, + "grad_norm": 17.15266799359661, + "learning_rate": 1.6186750331712744e-05, + "loss": 0.7385, + "step": 5936 + }, + { + "epoch": 0.93, + "grad_norm": 21.42989187876736, + "learning_rate": 1.6185425329454106e-05, + "loss": 0.7341, + "step": 5937 + }, + { + "epoch": 0.93, + "grad_norm": 17.289202157347265, + "learning_rate": 1.618410015128559e-05, + "loss": 0.7335, + "step": 5938 + }, + { + "epoch": 0.93, + "grad_norm": 14.004547217529506, + "learning_rate": 1.6182774797244882e-05, + "loss": 0.6932, + "step": 5939 + }, + { + "epoch": 0.93, + "grad_norm": 38.503908652520025, + "learning_rate": 1.6181449267369678e-05, + "loss": 0.8746, + "step": 5940 + }, + { + "epoch": 0.93, + "grad_norm": 19.426658899734946, + "learning_rate": 1.6180123561697672e-05, + "loss": 0.6522, + "step": 5941 + }, + { + "epoch": 0.93, + "grad_norm": 17.043240801405812, + "learning_rate": 1.6178797680266566e-05, + "loss": 0.6749, + "step": 5942 + }, + { + "epoch": 0.93, + "grad_norm": 21.961162540880018, + "learning_rate": 1.6177471623114068e-05, + "loss": 0.7554, + "step": 5943 + }, + { + "epoch": 0.93, + "grad_norm": 19.96801580950909, + "learning_rate": 1.6176145390277893e-05, + "loss": 0.7957, + "step": 5944 + }, + { + "epoch": 0.93, + "grad_norm": 22.16415807833147, + "learning_rate": 1.6174818981795756e-05, + "loss": 0.6562, + "step": 5945 + }, + { + "epoch": 0.93, + "grad_norm": 16.53753529643634, + "learning_rate": 1.6173492397705376e-05, + "loss": 0.7054, + "step": 5946 + }, + { + "epoch": 0.93, + "grad_norm": 17.335069969031636, + "learning_rate": 1.6172165638044486e-05, + "loss": 0.6904, + "step": 5947 + }, + { + "epoch": 0.93, + "grad_norm": 19.89637617765377, + "learning_rate": 1.6170838702850814e-05, + "loss": 0.7049, + "step": 5948 + }, + { + "epoch": 0.93, + "grad_norm": 27.271486570438373, + "learning_rate": 1.61695115921621e-05, + "loss": 0.7481, + "step": 5949 + }, + { + "epoch": 0.93, + "grad_norm": 20.494015256206872, + "learning_rate": 1.6168184306016092e-05, + "loss": 0.7544, + "step": 5950 + }, + { + "epoch": 0.93, + "grad_norm": 12.90461561884123, + "learning_rate": 1.6166856844450522e-05, + "loss": 0.6851, + "step": 5951 + }, + { + "epoch": 0.93, + "grad_norm": 13.308063031837321, + "learning_rate": 1.6165529207503156e-05, + "loss": 0.7029, + "step": 5952 + }, + { + "epoch": 0.93, + "grad_norm": 16.858244233132446, + "learning_rate": 1.616420139521174e-05, + "loss": 0.7325, + "step": 5953 + }, + { + "epoch": 0.93, + "grad_norm": 16.418704637893903, + "learning_rate": 1.6162873407614047e-05, + "loss": 0.7511, + "step": 5954 + }, + { + "epoch": 0.93, + "grad_norm": 21.27601236079115, + "learning_rate": 1.616154524474784e-05, + "loss": 0.7231, + "step": 5955 + }, + { + "epoch": 0.93, + "grad_norm": 20.736412367587192, + "learning_rate": 1.616021690665089e-05, + "loss": 0.7177, + "step": 5956 + }, + { + "epoch": 0.93, + "grad_norm": 15.583090055979834, + "learning_rate": 1.615888839336097e-05, + "loss": 0.6823, + "step": 5957 + }, + { + "epoch": 0.93, + "grad_norm": 30.736905367607875, + "learning_rate": 1.6157559704915873e-05, + "loss": 0.7765, + "step": 5958 + }, + { + "epoch": 0.93, + "grad_norm": 22.72996953313437, + "learning_rate": 1.6156230841353376e-05, + "loss": 0.6867, + "step": 5959 + }, + { + "epoch": 0.93, + "grad_norm": 21.616841350675035, + "learning_rate": 1.6154901802711277e-05, + "loss": 0.6481, + "step": 5960 + }, + { + "epoch": 0.93, + "grad_norm": 14.959188555290131, + "learning_rate": 1.615357258902737e-05, + "loss": 0.6785, + "step": 5961 + }, + { + "epoch": 0.93, + "grad_norm": 22.279277267440737, + "learning_rate": 1.6152243200339462e-05, + "loss": 0.7434, + "step": 5962 + }, + { + "epoch": 0.93, + "grad_norm": 19.872918304102438, + "learning_rate": 1.6150913636685356e-05, + "loss": 0.8089, + "step": 5963 + }, + { + "epoch": 0.93, + "grad_norm": 22.24059477336781, + "learning_rate": 1.614958389810286e-05, + "loss": 0.6465, + "step": 5964 + }, + { + "epoch": 0.93, + "grad_norm": 15.286999985837308, + "learning_rate": 1.6148253984629798e-05, + "loss": 0.7133, + "step": 5965 + }, + { + "epoch": 0.93, + "grad_norm": 33.91779360560661, + "learning_rate": 1.614692389630399e-05, + "loss": 0.7208, + "step": 5966 + }, + { + "epoch": 0.93, + "grad_norm": 18.756621761095253, + "learning_rate": 1.6145593633163258e-05, + "loss": 0.7476, + "step": 5967 + }, + { + "epoch": 0.93, + "grad_norm": 13.215467946248461, + "learning_rate": 1.614426319524544e-05, + "loss": 0.7045, + "step": 5968 + }, + { + "epoch": 0.93, + "grad_norm": 21.00972725245103, + "learning_rate": 1.6142932582588375e-05, + "loss": 0.7449, + "step": 5969 + }, + { + "epoch": 0.93, + "grad_norm": 17.352999536228005, + "learning_rate": 1.61416017952299e-05, + "loss": 0.7178, + "step": 5970 + }, + { + "epoch": 0.93, + "grad_norm": 16.142891198991848, + "learning_rate": 1.614027083320786e-05, + "loss": 0.6726, + "step": 5971 + }, + { + "epoch": 0.93, + "grad_norm": 20.701998655521543, + "learning_rate": 1.613893969656011e-05, + "loss": 0.805, + "step": 5972 + }, + { + "epoch": 0.93, + "grad_norm": 12.263784887695612, + "learning_rate": 1.6137608385324505e-05, + "loss": 0.6631, + "step": 5973 + }, + { + "epoch": 0.93, + "grad_norm": 17.414910505893396, + "learning_rate": 1.613627689953891e-05, + "loss": 0.6877, + "step": 5974 + }, + { + "epoch": 0.93, + "grad_norm": 11.738871387239564, + "learning_rate": 1.613494523924119e-05, + "loss": 0.6301, + "step": 5975 + }, + { + "epoch": 0.93, + "grad_norm": 21.638921087368857, + "learning_rate": 1.6133613404469216e-05, + "loss": 0.7647, + "step": 5976 + }, + { + "epoch": 0.93, + "grad_norm": 15.135337843386694, + "learning_rate": 1.6132281395260863e-05, + "loss": 0.6678, + "step": 5977 + }, + { + "epoch": 0.93, + "grad_norm": 27.808605120678386, + "learning_rate": 1.6130949211654017e-05, + "loss": 0.6802, + "step": 5978 + }, + { + "epoch": 0.93, + "grad_norm": 25.32361928761944, + "learning_rate": 1.612961685368656e-05, + "loss": 0.6662, + "step": 5979 + }, + { + "epoch": 0.93, + "grad_norm": 21.059270969583373, + "learning_rate": 1.6128284321396385e-05, + "loss": 0.6913, + "step": 5980 + }, + { + "epoch": 0.93, + "grad_norm": 29.265600688677793, + "learning_rate": 1.6126951614821388e-05, + "loss": 0.8176, + "step": 5981 + }, + { + "epoch": 0.93, + "grad_norm": 19.253895606148106, + "learning_rate": 1.6125618733999476e-05, + "loss": 0.7087, + "step": 5982 + }, + { + "epoch": 0.93, + "grad_norm": 26.790769623312986, + "learning_rate": 1.6124285678968546e-05, + "loss": 0.6605, + "step": 5983 + }, + { + "epoch": 0.93, + "grad_norm": 18.50153300660963, + "learning_rate": 1.6122952449766516e-05, + "loss": 0.7991, + "step": 5984 + }, + { + "epoch": 0.93, + "grad_norm": 17.855735052856446, + "learning_rate": 1.6121619046431297e-05, + "loss": 0.7114, + "step": 5985 + }, + { + "epoch": 0.94, + "grad_norm": 27.134727186948577, + "learning_rate": 1.612028546900082e-05, + "loss": 0.8299, + "step": 5986 + }, + { + "epoch": 0.94, + "grad_norm": 22.983470155317562, + "learning_rate": 1.6118951717513e-05, + "loss": 0.6333, + "step": 5987 + }, + { + "epoch": 0.94, + "grad_norm": 13.225499314885921, + "learning_rate": 1.611761779200577e-05, + "loss": 0.6733, + "step": 5988 + }, + { + "epoch": 0.94, + "grad_norm": 17.23512549465239, + "learning_rate": 1.6116283692517075e-05, + "loss": 0.7622, + "step": 5989 + }, + { + "epoch": 0.94, + "grad_norm": 16.93436108514862, + "learning_rate": 1.6114949419084846e-05, + "loss": 0.6438, + "step": 5990 + }, + { + "epoch": 0.94, + "grad_norm": 26.437615919648117, + "learning_rate": 1.6113614971747032e-05, + "loss": 0.7415, + "step": 5991 + }, + { + "epoch": 0.94, + "grad_norm": 32.42633262967853, + "learning_rate": 1.6112280350541584e-05, + "loss": 0.7304, + "step": 5992 + }, + { + "epoch": 0.94, + "grad_norm": 18.67108212467494, + "learning_rate": 1.6110945555506462e-05, + "loss": 0.69, + "step": 5993 + }, + { + "epoch": 0.94, + "grad_norm": 19.339503193389636, + "learning_rate": 1.6109610586679624e-05, + "loss": 0.7079, + "step": 5994 + }, + { + "epoch": 0.94, + "grad_norm": 29.556020902744823, + "learning_rate": 1.610827544409903e-05, + "loss": 0.7038, + "step": 5995 + }, + { + "epoch": 0.94, + "grad_norm": 16.64112554880865, + "learning_rate": 1.6106940127802658e-05, + "loss": 0.7173, + "step": 5996 + }, + { + "epoch": 0.94, + "grad_norm": 9.624462066208018, + "learning_rate": 1.610560463782848e-05, + "loss": 0.7088, + "step": 5997 + }, + { + "epoch": 0.94, + "grad_norm": 31.290247628273566, + "learning_rate": 1.610426897421448e-05, + "loss": 0.802, + "step": 5998 + }, + { + "epoch": 0.94, + "grad_norm": 13.606065118792602, + "learning_rate": 1.610293313699864e-05, + "loss": 0.6306, + "step": 5999 + }, + { + "epoch": 0.94, + "grad_norm": 29.70868308336053, + "learning_rate": 1.610159712621895e-05, + "loss": 0.6919, + "step": 6000 + }, + { + "epoch": 0.94, + "grad_norm": 16.370156368943082, + "learning_rate": 1.610026094191341e-05, + "loss": 0.7404, + "step": 6001 + }, + { + "epoch": 0.94, + "grad_norm": 15.185485222007488, + "learning_rate": 1.6098924584120017e-05, + "loss": 0.7808, + "step": 6002 + }, + { + "epoch": 0.94, + "grad_norm": 14.173276347237808, + "learning_rate": 1.6097588052876774e-05, + "loss": 0.7459, + "step": 6003 + }, + { + "epoch": 0.94, + "grad_norm": 20.47823266964914, + "learning_rate": 1.60962513482217e-05, + "loss": 0.7217, + "step": 6004 + }, + { + "epoch": 0.94, + "grad_norm": 16.99578884853381, + "learning_rate": 1.6094914470192796e-05, + "loss": 0.656, + "step": 6005 + }, + { + "epoch": 0.94, + "grad_norm": 18.3157444486773, + "learning_rate": 1.6093577418828094e-05, + "loss": 0.6596, + "step": 6006 + }, + { + "epoch": 0.94, + "grad_norm": 26.44876873752978, + "learning_rate": 1.6092240194165618e-05, + "loss": 0.7488, + "step": 6007 + }, + { + "epoch": 0.94, + "grad_norm": 19.546222867233915, + "learning_rate": 1.6090902796243386e-05, + "loss": 0.698, + "step": 6008 + }, + { + "epoch": 0.94, + "grad_norm": 22.527975887467075, + "learning_rate": 1.608956522509945e-05, + "loss": 0.6801, + "step": 6009 + }, + { + "epoch": 0.94, + "grad_norm": 35.15430544752382, + "learning_rate": 1.6088227480771835e-05, + "loss": 0.7016, + "step": 6010 + }, + { + "epoch": 0.94, + "grad_norm": 14.079673579247963, + "learning_rate": 1.6086889563298594e-05, + "loss": 0.6352, + "step": 6011 + }, + { + "epoch": 0.94, + "grad_norm": 18.300598659846223, + "learning_rate": 1.6085551472717775e-05, + "loss": 0.6901, + "step": 6012 + }, + { + "epoch": 0.94, + "grad_norm": 17.018386023503165, + "learning_rate": 1.6084213209067433e-05, + "loss": 0.7082, + "step": 6013 + }, + { + "epoch": 0.94, + "grad_norm": 14.904186541309263, + "learning_rate": 1.6082874772385628e-05, + "loss": 0.6874, + "step": 6014 + }, + { + "epoch": 0.94, + "grad_norm": 14.714295879602227, + "learning_rate": 1.6081536162710418e-05, + "loss": 0.6371, + "step": 6015 + }, + { + "epoch": 0.94, + "grad_norm": 17.127558041170627, + "learning_rate": 1.6080197380079883e-05, + "loss": 0.7249, + "step": 6016 + }, + { + "epoch": 0.94, + "grad_norm": 20.548216729359247, + "learning_rate": 1.6078858424532088e-05, + "loss": 0.6625, + "step": 6017 + }, + { + "epoch": 0.94, + "grad_norm": 17.252166740678206, + "learning_rate": 1.6077519296105112e-05, + "loss": 0.7389, + "step": 6018 + }, + { + "epoch": 0.94, + "grad_norm": 19.756865178148285, + "learning_rate": 1.6076179994837045e-05, + "loss": 0.7765, + "step": 6019 + }, + { + "epoch": 0.94, + "grad_norm": 21.465302958969573, + "learning_rate": 1.6074840520765976e-05, + "loss": 0.7671, + "step": 6020 + }, + { + "epoch": 0.94, + "grad_norm": 16.76529910418772, + "learning_rate": 1.607350087393e-05, + "loss": 0.7215, + "step": 6021 + }, + { + "epoch": 0.94, + "grad_norm": 24.15083210407022, + "learning_rate": 1.60721610543672e-05, + "loss": 0.8208, + "step": 6022 + }, + { + "epoch": 0.94, + "grad_norm": 21.675696418363845, + "learning_rate": 1.6070821062115704e-05, + "loss": 0.7002, + "step": 6023 + }, + { + "epoch": 0.94, + "grad_norm": 17.67845007038022, + "learning_rate": 1.60694808972136e-05, + "loss": 0.7252, + "step": 6024 + }, + { + "epoch": 0.94, + "grad_norm": 11.678972298276326, + "learning_rate": 1.606814055969901e-05, + "loss": 0.5876, + "step": 6025 + }, + { + "epoch": 0.94, + "grad_norm": 16.47609256700064, + "learning_rate": 1.606680004961006e-05, + "loss": 0.6981, + "step": 6026 + }, + { + "epoch": 0.94, + "grad_norm": 20.30427774542063, + "learning_rate": 1.6065459366984856e-05, + "loss": 0.6276, + "step": 6027 + }, + { + "epoch": 0.94, + "grad_norm": 19.338619783642514, + "learning_rate": 1.6064118511861543e-05, + "loss": 0.7362, + "step": 6028 + }, + { + "epoch": 0.94, + "grad_norm": 17.555603238376158, + "learning_rate": 1.6062777484278245e-05, + "loss": 0.6852, + "step": 6029 + }, + { + "epoch": 0.94, + "grad_norm": 21.551533864113665, + "learning_rate": 1.60614362842731e-05, + "loss": 0.7836, + "step": 6030 + }, + { + "epoch": 0.94, + "grad_norm": 17.280541871102116, + "learning_rate": 1.6060094911884255e-05, + "loss": 0.6696, + "step": 6031 + }, + { + "epoch": 0.94, + "grad_norm": 17.955626101863103, + "learning_rate": 1.6058753367149854e-05, + "loss": 0.7, + "step": 6032 + }, + { + "epoch": 0.94, + "grad_norm": 20.42183603685802, + "learning_rate": 1.6057411650108053e-05, + "loss": 0.7145, + "step": 6033 + }, + { + "epoch": 0.94, + "grad_norm": 19.09152443537757, + "learning_rate": 1.6056069760797006e-05, + "loss": 0.7615, + "step": 6034 + }, + { + "epoch": 0.94, + "grad_norm": 18.943154608542795, + "learning_rate": 1.605472769925488e-05, + "loss": 0.701, + "step": 6035 + }, + { + "epoch": 0.94, + "grad_norm": 21.687209125321623, + "learning_rate": 1.6053385465519838e-05, + "loss": 0.6862, + "step": 6036 + }, + { + "epoch": 0.94, + "grad_norm": 18.024486516594113, + "learning_rate": 1.6052043059630058e-05, + "loss": 0.699, + "step": 6037 + }, + { + "epoch": 0.94, + "grad_norm": 16.422543449254004, + "learning_rate": 1.6050700481623712e-05, + "loss": 0.6818, + "step": 6038 + }, + { + "epoch": 0.94, + "grad_norm": 24.572686157436372, + "learning_rate": 1.604935773153898e-05, + "loss": 0.7413, + "step": 6039 + }, + { + "epoch": 0.94, + "grad_norm": 18.045914884516726, + "learning_rate": 1.6048014809414057e-05, + "loss": 0.7308, + "step": 6040 + }, + { + "epoch": 0.94, + "grad_norm": 15.39324619124694, + "learning_rate": 1.604667171528713e-05, + "loss": 0.8003, + "step": 6041 + }, + { + "epoch": 0.94, + "grad_norm": 12.848160861965429, + "learning_rate": 1.60453284491964e-05, + "loss": 0.6883, + "step": 6042 + }, + { + "epoch": 0.94, + "grad_norm": 24.661372682422005, + "learning_rate": 1.6043985011180062e-05, + "loss": 0.7748, + "step": 6043 + }, + { + "epoch": 0.94, + "grad_norm": 13.149216708158821, + "learning_rate": 1.6042641401276326e-05, + "loss": 0.666, + "step": 6044 + }, + { + "epoch": 0.94, + "grad_norm": 17.574026378062864, + "learning_rate": 1.6041297619523405e-05, + "loss": 0.685, + "step": 6045 + }, + { + "epoch": 0.94, + "grad_norm": 16.945768089026576, + "learning_rate": 1.603995366595951e-05, + "loss": 0.7174, + "step": 6046 + }, + { + "epoch": 0.94, + "grad_norm": 21.517366707254787, + "learning_rate": 1.603860954062287e-05, + "loss": 0.685, + "step": 6047 + }, + { + "epoch": 0.94, + "grad_norm": 15.701941173207373, + "learning_rate": 1.6037265243551704e-05, + "loss": 0.6415, + "step": 6048 + }, + { + "epoch": 0.94, + "grad_norm": 24.461719324391474, + "learning_rate": 1.6035920774784247e-05, + "loss": 0.7319, + "step": 6049 + }, + { + "epoch": 0.95, + "grad_norm": 14.784512617949902, + "learning_rate": 1.6034576134358736e-05, + "loss": 0.7536, + "step": 6050 + }, + { + "epoch": 0.95, + "grad_norm": 21.90841418621444, + "learning_rate": 1.6033231322313406e-05, + "loss": 0.7027, + "step": 6051 + }, + { + "epoch": 0.95, + "grad_norm": 14.118060865550813, + "learning_rate": 1.603188633868651e-05, + "loss": 0.7073, + "step": 6052 + }, + { + "epoch": 0.95, + "grad_norm": 17.89676469955705, + "learning_rate": 1.6030541183516292e-05, + "loss": 0.783, + "step": 6053 + }, + { + "epoch": 0.95, + "grad_norm": 19.16690837320704, + "learning_rate": 1.6029195856841015e-05, + "loss": 0.6625, + "step": 6054 + }, + { + "epoch": 0.95, + "grad_norm": 12.620054953732446, + "learning_rate": 1.602785035869893e-05, + "loss": 0.6994, + "step": 6055 + }, + { + "epoch": 0.95, + "grad_norm": 22.97839752050187, + "learning_rate": 1.602650468912831e-05, + "loss": 0.7622, + "step": 6056 + }, + { + "epoch": 0.95, + "grad_norm": 16.639519712070097, + "learning_rate": 1.602515884816742e-05, + "loss": 0.6513, + "step": 6057 + }, + { + "epoch": 0.95, + "grad_norm": 18.51940529456008, + "learning_rate": 1.602381283585454e-05, + "loss": 0.8177, + "step": 6058 + }, + { + "epoch": 0.95, + "grad_norm": 18.49837469842179, + "learning_rate": 1.6022466652227944e-05, + "loss": 0.7179, + "step": 6059 + }, + { + "epoch": 0.95, + "grad_norm": 18.829366897843116, + "learning_rate": 1.602112029732592e-05, + "loss": 0.7255, + "step": 6060 + }, + { + "epoch": 0.95, + "grad_norm": 18.660995152306217, + "learning_rate": 1.6019773771186756e-05, + "loss": 0.792, + "step": 6061 + }, + { + "epoch": 0.95, + "grad_norm": 17.198536408158073, + "learning_rate": 1.601842707384875e-05, + "loss": 0.7614, + "step": 6062 + }, + { + "epoch": 0.95, + "grad_norm": 21.285158852870904, + "learning_rate": 1.60170802053502e-05, + "loss": 0.7865, + "step": 6063 + }, + { + "epoch": 0.95, + "grad_norm": 15.801970257246602, + "learning_rate": 1.6015733165729406e-05, + "loss": 0.6287, + "step": 6064 + }, + { + "epoch": 0.95, + "grad_norm": 18.967032262083727, + "learning_rate": 1.601438595502468e-05, + "loss": 0.7677, + "step": 6065 + }, + { + "epoch": 0.95, + "grad_norm": 19.766403354804233, + "learning_rate": 1.6013038573274336e-05, + "loss": 0.719, + "step": 6066 + }, + { + "epoch": 0.95, + "grad_norm": 18.585718313963557, + "learning_rate": 1.6011691020516696e-05, + "loss": 0.7175, + "step": 6067 + }, + { + "epoch": 0.95, + "grad_norm": 20.55071192695079, + "learning_rate": 1.6010343296790074e-05, + "loss": 0.7438, + "step": 6068 + }, + { + "epoch": 0.95, + "grad_norm": 16.923081299536978, + "learning_rate": 1.6008995402132807e-05, + "loss": 0.7245, + "step": 6069 + }, + { + "epoch": 0.95, + "grad_norm": 19.24145848716582, + "learning_rate": 1.6007647336583227e-05, + "loss": 0.6264, + "step": 6070 + }, + { + "epoch": 0.95, + "grad_norm": 16.482393296613985, + "learning_rate": 1.600629910017967e-05, + "loss": 0.6931, + "step": 6071 + }, + { + "epoch": 0.95, + "grad_norm": 18.306325195697898, + "learning_rate": 1.600495069296048e-05, + "loss": 0.6905, + "step": 6072 + }, + { + "epoch": 0.95, + "grad_norm": 15.890905039259907, + "learning_rate": 1.6003602114964007e-05, + "loss": 0.6545, + "step": 6073 + }, + { + "epoch": 0.95, + "grad_norm": 17.400955227430988, + "learning_rate": 1.60022533662286e-05, + "loss": 0.7456, + "step": 6074 + }, + { + "epoch": 0.95, + "grad_norm": 18.88797996672957, + "learning_rate": 1.600090444679262e-05, + "loss": 0.7343, + "step": 6075 + }, + { + "epoch": 0.95, + "grad_norm": 16.141422330507574, + "learning_rate": 1.5999555356694426e-05, + "loss": 0.6491, + "step": 6076 + }, + { + "epoch": 0.95, + "grad_norm": 13.750025277764754, + "learning_rate": 1.599820609597239e-05, + "loss": 0.6739, + "step": 6077 + }, + { + "epoch": 0.95, + "grad_norm": 20.70318905442603, + "learning_rate": 1.5996856664664876e-05, + "loss": 0.6953, + "step": 6078 + }, + { + "epoch": 0.95, + "grad_norm": 16.838339184236734, + "learning_rate": 1.599550706281027e-05, + "loss": 0.8145, + "step": 6079 + }, + { + "epoch": 0.95, + "grad_norm": 43.52356748710453, + "learning_rate": 1.5994157290446953e-05, + "loss": 0.6234, + "step": 6080 + }, + { + "epoch": 0.95, + "grad_norm": 13.221769869464637, + "learning_rate": 1.5992807347613305e-05, + "loss": 0.7163, + "step": 6081 + }, + { + "epoch": 0.95, + "grad_norm": 22.25877150012426, + "learning_rate": 1.5991457234347725e-05, + "loss": 0.7037, + "step": 6082 + }, + { + "epoch": 0.95, + "grad_norm": 17.356749499155963, + "learning_rate": 1.5990106950688605e-05, + "loss": 0.6282, + "step": 6083 + }, + { + "epoch": 0.95, + "grad_norm": 18.581752400514258, + "learning_rate": 1.5988756496674345e-05, + "loss": 0.6403, + "step": 6084 + }, + { + "epoch": 0.95, + "grad_norm": 15.290055549234074, + "learning_rate": 1.5987405872343358e-05, + "loss": 0.7299, + "step": 6085 + }, + { + "epoch": 0.95, + "grad_norm": 35.39351909164021, + "learning_rate": 1.5986055077734046e-05, + "loss": 0.6297, + "step": 6086 + }, + { + "epoch": 0.95, + "grad_norm": 15.03817085595391, + "learning_rate": 1.5984704112884834e-05, + "loss": 0.7216, + "step": 6087 + }, + { + "epoch": 0.95, + "grad_norm": 157.62999318720173, + "learning_rate": 1.5983352977834132e-05, + "loss": 0.7822, + "step": 6088 + }, + { + "epoch": 0.95, + "grad_norm": 12.102644789452551, + "learning_rate": 1.5982001672620376e-05, + "loss": 0.6764, + "step": 6089 + }, + { + "epoch": 0.95, + "grad_norm": 16.830539053822466, + "learning_rate": 1.5980650197281994e-05, + "loss": 0.784, + "step": 6090 + }, + { + "epoch": 0.95, + "grad_norm": 16.20774198726569, + "learning_rate": 1.597929855185741e-05, + "loss": 0.7037, + "step": 6091 + }, + { + "epoch": 0.95, + "grad_norm": 17.601887583958202, + "learning_rate": 1.597794673638508e-05, + "loss": 0.6495, + "step": 6092 + }, + { + "epoch": 0.95, + "grad_norm": 13.91086018416681, + "learning_rate": 1.597659475090344e-05, + "loss": 0.6718, + "step": 6093 + }, + { + "epoch": 0.95, + "grad_norm": 16.735718613066542, + "learning_rate": 1.5975242595450946e-05, + "loss": 0.6484, + "step": 6094 + }, + { + "epoch": 0.95, + "grad_norm": 21.485949560554143, + "learning_rate": 1.5973890270066042e-05, + "loss": 0.694, + "step": 6095 + }, + { + "epoch": 0.95, + "grad_norm": 13.755962155331988, + "learning_rate": 1.5972537774787194e-05, + "loss": 0.6725, + "step": 6096 + }, + { + "epoch": 0.95, + "grad_norm": 25.94634781643838, + "learning_rate": 1.597118510965287e-05, + "loss": 0.8582, + "step": 6097 + }, + { + "epoch": 0.95, + "grad_norm": 15.60803522865118, + "learning_rate": 1.5969832274701528e-05, + "loss": 0.803, + "step": 6098 + }, + { + "epoch": 0.95, + "grad_norm": 23.74992923318208, + "learning_rate": 1.5968479269971654e-05, + "loss": 0.8157, + "step": 6099 + }, + { + "epoch": 0.95, + "grad_norm": 23.218071783898644, + "learning_rate": 1.596712609550172e-05, + "loss": 0.736, + "step": 6100 + }, + { + "epoch": 0.95, + "grad_norm": 12.684420037970854, + "learning_rate": 1.5965772751330207e-05, + "loss": 0.6614, + "step": 6101 + }, + { + "epoch": 0.95, + "grad_norm": 15.545800636779923, + "learning_rate": 1.596441923749561e-05, + "loss": 0.6814, + "step": 6102 + }, + { + "epoch": 0.95, + "grad_norm": 14.094761465892764, + "learning_rate": 1.5963065554036418e-05, + "loss": 0.6432, + "step": 6103 + }, + { + "epoch": 0.95, + "grad_norm": 26.097896790077538, + "learning_rate": 1.596171170099113e-05, + "loss": 0.775, + "step": 6104 + }, + { + "epoch": 0.95, + "grad_norm": 16.60526115145929, + "learning_rate": 1.596035767839825e-05, + "loss": 0.7764, + "step": 6105 + }, + { + "epoch": 0.95, + "grad_norm": 16.457857204946226, + "learning_rate": 1.595900348629628e-05, + "loss": 0.7346, + "step": 6106 + }, + { + "epoch": 0.95, + "grad_norm": 15.03574651350869, + "learning_rate": 1.5957649124723743e-05, + "loss": 0.6761, + "step": 6107 + }, + { + "epoch": 0.95, + "grad_norm": 18.79131792890106, + "learning_rate": 1.5956294593719147e-05, + "loss": 0.6428, + "step": 6108 + }, + { + "epoch": 0.95, + "grad_norm": 26.21022323169579, + "learning_rate": 1.5954939893321016e-05, + "loss": 0.7119, + "step": 6109 + }, + { + "epoch": 0.95, + "grad_norm": 20.838029359524974, + "learning_rate": 1.595358502356788e-05, + "loss": 0.7915, + "step": 6110 + }, + { + "epoch": 0.95, + "grad_norm": 19.35842348443619, + "learning_rate": 1.5952229984498265e-05, + "loss": 0.7209, + "step": 6111 + }, + { + "epoch": 0.95, + "grad_norm": 16.590018111377717, + "learning_rate": 1.5950874776150715e-05, + "loss": 0.7623, + "step": 6112 + }, + { + "epoch": 0.95, + "grad_norm": 18.048860509236775, + "learning_rate": 1.5949519398563766e-05, + "loss": 0.767, + "step": 6113 + }, + { + "epoch": 0.96, + "grad_norm": 30.0661945402471, + "learning_rate": 1.594816385177597e-05, + "loss": 0.6822, + "step": 6114 + }, + { + "epoch": 0.96, + "grad_norm": 13.486607023168693, + "learning_rate": 1.5946808135825866e-05, + "loss": 0.6685, + "step": 6115 + }, + { + "epoch": 0.96, + "grad_norm": 17.553040729827732, + "learning_rate": 1.5945452250752022e-05, + "loss": 0.7848, + "step": 6116 + }, + { + "epoch": 0.96, + "grad_norm": 24.321822710506687, + "learning_rate": 1.5944096196592994e-05, + "loss": 0.7468, + "step": 6117 + }, + { + "epoch": 0.96, + "grad_norm": 18.95007752930136, + "learning_rate": 1.5942739973387345e-05, + "loss": 0.6842, + "step": 6118 + }, + { + "epoch": 0.96, + "grad_norm": 16.568679781289745, + "learning_rate": 1.594138358117365e-05, + "loss": 0.782, + "step": 6119 + }, + { + "epoch": 0.96, + "grad_norm": 19.57447788772685, + "learning_rate": 1.594002701999048e-05, + "loss": 0.745, + "step": 6120 + }, + { + "epoch": 0.96, + "grad_norm": 19.92677475008517, + "learning_rate": 1.5938670289876418e-05, + "loss": 0.7654, + "step": 6121 + }, + { + "epoch": 0.96, + "grad_norm": 20.45918067710494, + "learning_rate": 1.5937313390870045e-05, + "loss": 0.666, + "step": 6122 + }, + { + "epoch": 0.96, + "grad_norm": 24.467953735978377, + "learning_rate": 1.5935956323009954e-05, + "loss": 0.7535, + "step": 6123 + }, + { + "epoch": 0.96, + "grad_norm": 13.270819109929526, + "learning_rate": 1.5934599086334733e-05, + "loss": 0.618, + "step": 6124 + }, + { + "epoch": 0.96, + "grad_norm": 20.83316831777706, + "learning_rate": 1.593324168088299e-05, + "loss": 0.7432, + "step": 6125 + }, + { + "epoch": 0.96, + "grad_norm": 11.483902816731982, + "learning_rate": 1.5931884106693326e-05, + "loss": 0.6655, + "step": 6126 + }, + { + "epoch": 0.96, + "grad_norm": 21.763631881407356, + "learning_rate": 1.5930526363804344e-05, + "loss": 0.7791, + "step": 6127 + }, + { + "epoch": 0.96, + "grad_norm": 21.520092686101282, + "learning_rate": 1.5929168452254664e-05, + "loss": 0.7591, + "step": 6128 + }, + { + "epoch": 0.96, + "grad_norm": 20.464502903706038, + "learning_rate": 1.5927810372082903e-05, + "loss": 0.7043, + "step": 6129 + }, + { + "epoch": 0.96, + "grad_norm": 13.67035408170661, + "learning_rate": 1.592645212332768e-05, + "loss": 0.674, + "step": 6130 + }, + { + "epoch": 0.96, + "grad_norm": 23.229569894486914, + "learning_rate": 1.5925093706027625e-05, + "loss": 0.6965, + "step": 6131 + }, + { + "epoch": 0.96, + "grad_norm": 24.16192466042968, + "learning_rate": 1.5923735120221372e-05, + "loss": 0.7232, + "step": 6132 + }, + { + "epoch": 0.96, + "grad_norm": 17.857956823309355, + "learning_rate": 1.5922376365947557e-05, + "loss": 0.6704, + "step": 6133 + }, + { + "epoch": 0.96, + "grad_norm": 15.807482009906417, + "learning_rate": 1.5921017443244825e-05, + "loss": 0.7533, + "step": 6134 + }, + { + "epoch": 0.96, + "grad_norm": 30.69490538977884, + "learning_rate": 1.5919658352151818e-05, + "loss": 0.6534, + "step": 6135 + }, + { + "epoch": 0.96, + "grad_norm": 15.612192071799736, + "learning_rate": 1.5918299092707194e-05, + "loss": 0.707, + "step": 6136 + }, + { + "epoch": 0.96, + "grad_norm": 18.23309693192679, + "learning_rate": 1.5916939664949602e-05, + "loss": 0.6635, + "step": 6137 + }, + { + "epoch": 0.96, + "grad_norm": 28.64551615845766, + "learning_rate": 1.591558006891771e-05, + "loss": 0.7467, + "step": 6138 + }, + { + "epoch": 0.96, + "grad_norm": 22.691285514904354, + "learning_rate": 1.5914220304650177e-05, + "loss": 0.693, + "step": 6139 + }, + { + "epoch": 0.96, + "grad_norm": 13.408445064087257, + "learning_rate": 1.5912860372185683e-05, + "loss": 0.7431, + "step": 6140 + }, + { + "epoch": 0.96, + "grad_norm": 17.917755096572193, + "learning_rate": 1.5911500271562895e-05, + "loss": 0.726, + "step": 6141 + }, + { + "epoch": 0.96, + "grad_norm": 16.26411704320793, + "learning_rate": 1.5910140002820503e-05, + "loss": 0.5939, + "step": 6142 + }, + { + "epoch": 0.96, + "grad_norm": 21.180750087450953, + "learning_rate": 1.5908779565997182e-05, + "loss": 0.6788, + "step": 6143 + }, + { + "epoch": 0.96, + "grad_norm": 28.832142271379315, + "learning_rate": 1.5907418961131628e-05, + "loss": 0.8077, + "step": 6144 + }, + { + "epoch": 0.96, + "grad_norm": 23.37061146476504, + "learning_rate": 1.5906058188262534e-05, + "loss": 0.6322, + "step": 6145 + }, + { + "epoch": 0.96, + "grad_norm": 18.14986540833794, + "learning_rate": 1.5904697247428602e-05, + "loss": 0.6985, + "step": 6146 + }, + { + "epoch": 0.96, + "grad_norm": 16.1414003195211, + "learning_rate": 1.5903336138668532e-05, + "loss": 0.6955, + "step": 6147 + }, + { + "epoch": 0.96, + "grad_norm": 18.395505968840943, + "learning_rate": 1.5901974862021036e-05, + "loss": 0.753, + "step": 6148 + }, + { + "epoch": 0.96, + "grad_norm": 14.91349934992944, + "learning_rate": 1.5900613417524827e-05, + "loss": 0.744, + "step": 6149 + }, + { + "epoch": 0.96, + "grad_norm": 14.357660189111426, + "learning_rate": 1.5899251805218625e-05, + "loss": 0.7316, + "step": 6150 + }, + { + "epoch": 0.96, + "grad_norm": 27.979583490308016, + "learning_rate": 1.5897890025141153e-05, + "loss": 0.8225, + "step": 6151 + }, + { + "epoch": 0.96, + "grad_norm": 17.383404537791456, + "learning_rate": 1.589652807733114e-05, + "loss": 0.6453, + "step": 6152 + }, + { + "epoch": 0.96, + "grad_norm": 14.601501822409327, + "learning_rate": 1.5895165961827315e-05, + "loss": 0.6967, + "step": 6153 + }, + { + "epoch": 0.96, + "grad_norm": 26.27885179032878, + "learning_rate": 1.5893803678668424e-05, + "loss": 0.6976, + "step": 6154 + }, + { + "epoch": 0.96, + "grad_norm": 29.475166394151895, + "learning_rate": 1.5892441227893197e-05, + "loss": 0.7154, + "step": 6155 + }, + { + "epoch": 0.96, + "grad_norm": 19.082518067099613, + "learning_rate": 1.589107860954039e-05, + "loss": 0.768, + "step": 6156 + }, + { + "epoch": 0.96, + "grad_norm": 17.038815927273703, + "learning_rate": 1.588971582364876e-05, + "loss": 0.6498, + "step": 6157 + }, + { + "epoch": 0.96, + "grad_norm": 38.421322803865195, + "learning_rate": 1.588835287025705e-05, + "loss": 0.7573, + "step": 6158 + }, + { + "epoch": 0.96, + "grad_norm": 21.50476274161912, + "learning_rate": 1.588698974940403e-05, + "loss": 0.6594, + "step": 6159 + }, + { + "epoch": 0.96, + "grad_norm": 19.396100140412013, + "learning_rate": 1.5885626461128467e-05, + "loss": 0.7322, + "step": 6160 + }, + { + "epoch": 0.96, + "grad_norm": 20.76339265350174, + "learning_rate": 1.588426300546913e-05, + "loss": 0.7459, + "step": 6161 + }, + { + "epoch": 0.96, + "grad_norm": 28.619583614843886, + "learning_rate": 1.5882899382464795e-05, + "loss": 0.7357, + "step": 6162 + }, + { + "epoch": 0.96, + "grad_norm": 18.08433901353271, + "learning_rate": 1.5881535592154245e-05, + "loss": 0.7771, + "step": 6163 + }, + { + "epoch": 0.96, + "grad_norm": 17.57934639374916, + "learning_rate": 1.5880171634576262e-05, + "loss": 0.7524, + "step": 6164 + }, + { + "epoch": 0.96, + "grad_norm": 18.70357114893932, + "learning_rate": 1.587880750976964e-05, + "loss": 0.6779, + "step": 6165 + }, + { + "epoch": 0.96, + "grad_norm": 17.966815701738245, + "learning_rate": 1.5877443217773166e-05, + "loss": 0.7679, + "step": 6166 + }, + { + "epoch": 0.96, + "grad_norm": 20.02784629787974, + "learning_rate": 1.5876078758625647e-05, + "loss": 0.7229, + "step": 6167 + }, + { + "epoch": 0.96, + "grad_norm": 14.905083016833325, + "learning_rate": 1.5874714132365887e-05, + "loss": 0.7336, + "step": 6168 + }, + { + "epoch": 0.96, + "grad_norm": 14.604266264866526, + "learning_rate": 1.5873349339032693e-05, + "loss": 0.6868, + "step": 6169 + }, + { + "epoch": 0.96, + "grad_norm": 13.742797501879371, + "learning_rate": 1.587198437866488e-05, + "loss": 0.6729, + "step": 6170 + }, + { + "epoch": 0.96, + "grad_norm": 19.6087345679823, + "learning_rate": 1.5870619251301267e-05, + "loss": 0.7424, + "step": 6171 + }, + { + "epoch": 0.96, + "grad_norm": 14.605849624038399, + "learning_rate": 1.5869253956980672e-05, + "loss": 0.6427, + "step": 6172 + }, + { + "epoch": 0.96, + "grad_norm": 26.16999020180721, + "learning_rate": 1.5867888495741934e-05, + "loss": 0.7078, + "step": 6173 + }, + { + "epoch": 0.96, + "grad_norm": 16.37528765949897, + "learning_rate": 1.5866522867623876e-05, + "loss": 0.7846, + "step": 6174 + }, + { + "epoch": 0.96, + "grad_norm": 16.227007441281597, + "learning_rate": 1.586515707266534e-05, + "loss": 0.6812, + "step": 6175 + }, + { + "epoch": 0.96, + "grad_norm": 17.90701736600108, + "learning_rate": 1.5863791110905172e-05, + "loss": 0.7431, + "step": 6176 + }, + { + "epoch": 0.96, + "grad_norm": 12.573309327177764, + "learning_rate": 1.586242498238221e-05, + "loss": 0.6916, + "step": 6177 + }, + { + "epoch": 0.97, + "grad_norm": 23.951134870883674, + "learning_rate": 1.5861058687135314e-05, + "loss": 0.8912, + "step": 6178 + }, + { + "epoch": 0.97, + "grad_norm": 14.686810916603525, + "learning_rate": 1.5859692225203335e-05, + "loss": 0.6475, + "step": 6179 + }, + { + "epoch": 0.97, + "grad_norm": 18.335740569056334, + "learning_rate": 1.585832559662514e-05, + "loss": 0.66, + "step": 6180 + }, + { + "epoch": 0.97, + "grad_norm": 15.311238841262115, + "learning_rate": 1.585695880143959e-05, + "loss": 0.7251, + "step": 6181 + }, + { + "epoch": 0.97, + "grad_norm": 19.062592071560996, + "learning_rate": 1.5855591839685556e-05, + "loss": 0.7371, + "step": 6182 + }, + { + "epoch": 0.97, + "grad_norm": 20.24589320544766, + "learning_rate": 1.585422471140192e-05, + "loss": 0.7716, + "step": 6183 + }, + { + "epoch": 0.97, + "grad_norm": 13.928098998528, + "learning_rate": 1.5852857416627552e-05, + "loss": 0.6516, + "step": 6184 + }, + { + "epoch": 0.97, + "grad_norm": 28.28717905668244, + "learning_rate": 1.585148995540135e-05, + "loss": 0.7295, + "step": 6185 + }, + { + "epoch": 0.97, + "grad_norm": 17.73335874813438, + "learning_rate": 1.585012232776219e-05, + "loss": 0.7134, + "step": 6186 + }, + { + "epoch": 0.97, + "grad_norm": 19.872737960476485, + "learning_rate": 1.584875453374898e-05, + "loss": 0.7236, + "step": 6187 + }, + { + "epoch": 0.97, + "grad_norm": 31.06385799630569, + "learning_rate": 1.5847386573400605e-05, + "loss": 0.6823, + "step": 6188 + }, + { + "epoch": 0.97, + "grad_norm": 25.418897220507894, + "learning_rate": 1.584601844675598e-05, + "loss": 0.8347, + "step": 6189 + }, + { + "epoch": 0.97, + "grad_norm": 17.48595862593136, + "learning_rate": 1.584465015385401e-05, + "loss": 0.6605, + "step": 6190 + }, + { + "epoch": 0.97, + "grad_norm": 15.049688708968837, + "learning_rate": 1.584328169473361e-05, + "loss": 0.702, + "step": 6191 + }, + { + "epoch": 0.97, + "grad_norm": 17.301021634979367, + "learning_rate": 1.58419130694337e-05, + "loss": 0.7398, + "step": 6192 + }, + { + "epoch": 0.97, + "grad_norm": 18.44046627465891, + "learning_rate": 1.5840544277993193e-05, + "loss": 0.7179, + "step": 6193 + }, + { + "epoch": 0.97, + "grad_norm": 13.966605734474095, + "learning_rate": 1.5839175320451027e-05, + "loss": 0.7269, + "step": 6194 + }, + { + "epoch": 0.97, + "grad_norm": 25.76288541822951, + "learning_rate": 1.583780619684613e-05, + "loss": 0.6416, + "step": 6195 + }, + { + "epoch": 0.97, + "grad_norm": 21.82144262567529, + "learning_rate": 1.5836436907217438e-05, + "loss": 0.7189, + "step": 6196 + }, + { + "epoch": 0.97, + "grad_norm": 14.499975888359641, + "learning_rate": 1.58350674516039e-05, + "loss": 0.6673, + "step": 6197 + }, + { + "epoch": 0.97, + "grad_norm": 12.512830787258151, + "learning_rate": 1.583369783004445e-05, + "loss": 0.6559, + "step": 6198 + }, + { + "epoch": 0.97, + "grad_norm": 31.764767172207975, + "learning_rate": 1.5832328042578047e-05, + "loss": 0.6729, + "step": 6199 + }, + { + "epoch": 0.97, + "grad_norm": 20.18189812003746, + "learning_rate": 1.5830958089243652e-05, + "loss": 0.7005, + "step": 6200 + }, + { + "epoch": 0.97, + "grad_norm": 16.926758816958422, + "learning_rate": 1.5829587970080217e-05, + "loss": 0.6892, + "step": 6201 + }, + { + "epoch": 0.97, + "grad_norm": 14.682554361445563, + "learning_rate": 1.582821768512671e-05, + "loss": 0.6898, + "step": 6202 + }, + { + "epoch": 0.97, + "grad_norm": 13.80032111620127, + "learning_rate": 1.5826847234422102e-05, + "loss": 0.5931, + "step": 6203 + }, + { + "epoch": 0.97, + "grad_norm": 31.578395719655997, + "learning_rate": 1.5825476618005363e-05, + "loss": 0.7904, + "step": 6204 + }, + { + "epoch": 0.97, + "grad_norm": 14.478891744221988, + "learning_rate": 1.582410583591548e-05, + "loss": 0.6769, + "step": 6205 + }, + { + "epoch": 0.97, + "grad_norm": 17.466471831445197, + "learning_rate": 1.5822734888191437e-05, + "loss": 0.6986, + "step": 6206 + }, + { + "epoch": 0.97, + "grad_norm": 19.877754720031678, + "learning_rate": 1.5821363774872218e-05, + "loss": 0.7492, + "step": 6207 + }, + { + "epoch": 0.97, + "grad_norm": 20.61935295009391, + "learning_rate": 1.5819992495996818e-05, + "loss": 0.6531, + "step": 6208 + }, + { + "epoch": 0.97, + "grad_norm": 26.650594045296724, + "learning_rate": 1.5818621051604234e-05, + "loss": 0.6713, + "step": 6209 + }, + { + "epoch": 0.97, + "grad_norm": 13.13050746091519, + "learning_rate": 1.5817249441733475e-05, + "loss": 0.6609, + "step": 6210 + }, + { + "epoch": 0.97, + "grad_norm": 20.896457561928887, + "learning_rate": 1.5815877666423543e-05, + "loss": 0.733, + "step": 6211 + }, + { + "epoch": 0.97, + "grad_norm": 23.620864088589446, + "learning_rate": 1.5814505725713454e-05, + "loss": 0.6689, + "step": 6212 + }, + { + "epoch": 0.97, + "grad_norm": 15.294414013500338, + "learning_rate": 1.581313361964222e-05, + "loss": 0.6283, + "step": 6213 + }, + { + "epoch": 0.97, + "grad_norm": 16.857289407117033, + "learning_rate": 1.5811761348248872e-05, + "loss": 0.6373, + "step": 6214 + }, + { + "epoch": 0.97, + "grad_norm": 22.443122265630272, + "learning_rate": 1.5810388911572424e-05, + "loss": 0.7406, + "step": 6215 + }, + { + "epoch": 0.97, + "grad_norm": 31.48483365011168, + "learning_rate": 1.5809016309651916e-05, + "loss": 0.7768, + "step": 6216 + }, + { + "epoch": 0.97, + "grad_norm": 14.811447672544205, + "learning_rate": 1.5807643542526387e-05, + "loss": 0.749, + "step": 6217 + }, + { + "epoch": 0.97, + "grad_norm": 24.078537042571483, + "learning_rate": 1.580627061023487e-05, + "loss": 0.6296, + "step": 6218 + }, + { + "epoch": 0.97, + "grad_norm": 17.313146821825555, + "learning_rate": 1.5804897512816416e-05, + "loss": 0.733, + "step": 6219 + }, + { + "epoch": 0.97, + "grad_norm": 22.227180493819073, + "learning_rate": 1.580352425031007e-05, + "loss": 0.6935, + "step": 6220 + }, + { + "epoch": 0.97, + "grad_norm": 20.230630329294836, + "learning_rate": 1.5802150822754888e-05, + "loss": 0.8322, + "step": 6221 + }, + { + "epoch": 0.97, + "grad_norm": 14.605318888456289, + "learning_rate": 1.580077723018993e-05, + "loss": 0.7045, + "step": 6222 + }, + { + "epoch": 0.97, + "grad_norm": 14.056639086029023, + "learning_rate": 1.5799403472654266e-05, + "loss": 0.6404, + "step": 6223 + }, + { + "epoch": 0.97, + "grad_norm": 18.59353403238485, + "learning_rate": 1.5798029550186957e-05, + "loss": 0.692, + "step": 6224 + }, + { + "epoch": 0.97, + "grad_norm": 15.638346383175973, + "learning_rate": 1.5796655462827077e-05, + "loss": 0.6574, + "step": 6225 + }, + { + "epoch": 0.97, + "grad_norm": 20.147086033456212, + "learning_rate": 1.5795281210613708e-05, + "loss": 0.6948, + "step": 6226 + }, + { + "epoch": 0.97, + "grad_norm": 15.524126123015039, + "learning_rate": 1.5793906793585935e-05, + "loss": 0.6924, + "step": 6227 + }, + { + "epoch": 0.97, + "grad_norm": 13.899402099788624, + "learning_rate": 1.5792532211782837e-05, + "loss": 0.6904, + "step": 6228 + }, + { + "epoch": 0.97, + "grad_norm": 17.832425310976404, + "learning_rate": 1.5791157465243517e-05, + "loss": 0.7059, + "step": 6229 + }, + { + "epoch": 0.97, + "grad_norm": 16.189572613082234, + "learning_rate": 1.5789782554007063e-05, + "loss": 0.7952, + "step": 6230 + }, + { + "epoch": 0.97, + "grad_norm": 25.967314395288053, + "learning_rate": 1.578840747811258e-05, + "loss": 0.7456, + "step": 6231 + }, + { + "epoch": 0.97, + "grad_norm": 18.490645421287475, + "learning_rate": 1.5787032237599173e-05, + "loss": 0.6799, + "step": 6232 + }, + { + "epoch": 0.97, + "grad_norm": 17.400697747167488, + "learning_rate": 1.5785656832505956e-05, + "loss": 0.8336, + "step": 6233 + }, + { + "epoch": 0.97, + "grad_norm": 25.58609244321543, + "learning_rate": 1.5784281262872046e-05, + "loss": 0.6934, + "step": 6234 + }, + { + "epoch": 0.97, + "grad_norm": 12.762061707343198, + "learning_rate": 1.5782905528736558e-05, + "loss": 0.7065, + "step": 6235 + }, + { + "epoch": 0.97, + "grad_norm": 20.433959511728663, + "learning_rate": 1.578152963013862e-05, + "loss": 0.6244, + "step": 6236 + }, + { + "epoch": 0.97, + "grad_norm": 14.306377118909536, + "learning_rate": 1.578015356711736e-05, + "loss": 0.6028, + "step": 6237 + }, + { + "epoch": 0.97, + "grad_norm": 21.675394127297274, + "learning_rate": 1.5778777339711914e-05, + "loss": 0.8232, + "step": 6238 + }, + { + "epoch": 0.97, + "grad_norm": 17.718009130944154, + "learning_rate": 1.577740094796142e-05, + "loss": 0.6942, + "step": 6239 + }, + { + "epoch": 0.97, + "grad_norm": 24.662029480421435, + "learning_rate": 1.5776024391905026e-05, + "loss": 0.6917, + "step": 6240 + }, + { + "epoch": 0.97, + "grad_norm": 32.679605801722104, + "learning_rate": 1.5774647671581878e-05, + "loss": 0.7454, + "step": 6241 + }, + { + "epoch": 0.98, + "grad_norm": 22.049254008571342, + "learning_rate": 1.5773270787031124e-05, + "loss": 0.7414, + "step": 6242 + }, + { + "epoch": 0.98, + "grad_norm": 24.852855324957886, + "learning_rate": 1.577189373829193e-05, + "loss": 0.6756, + "step": 6243 + }, + { + "epoch": 0.98, + "grad_norm": 13.62704146910007, + "learning_rate": 1.5770516525403453e-05, + "loss": 0.7319, + "step": 6244 + }, + { + "epoch": 0.98, + "grad_norm": 12.562153743543485, + "learning_rate": 1.576913914840486e-05, + "loss": 0.6966, + "step": 6245 + }, + { + "epoch": 0.98, + "grad_norm": 18.28206774969318, + "learning_rate": 1.5767761607335327e-05, + "loss": 0.7001, + "step": 6246 + }, + { + "epoch": 0.98, + "grad_norm": 22.321958583315887, + "learning_rate": 1.5766383902234026e-05, + "loss": 0.6744, + "step": 6247 + }, + { + "epoch": 0.98, + "grad_norm": 23.249578056813633, + "learning_rate": 1.5765006033140142e-05, + "loss": 0.6593, + "step": 6248 + }, + { + "epoch": 0.98, + "grad_norm": 16.780097770914747, + "learning_rate": 1.5763628000092858e-05, + "loss": 0.7383, + "step": 6249 + }, + { + "epoch": 0.98, + "grad_norm": 26.861128854522114, + "learning_rate": 1.5762249803131365e-05, + "loss": 0.7002, + "step": 6250 + }, + { + "epoch": 0.98, + "grad_norm": 21.856258944628756, + "learning_rate": 1.5760871442294856e-05, + "loss": 0.8652, + "step": 6251 + }, + { + "epoch": 0.98, + "grad_norm": 11.94426362208592, + "learning_rate": 1.5759492917622537e-05, + "loss": 0.6385, + "step": 6252 + }, + { + "epoch": 0.98, + "grad_norm": 20.115648218541228, + "learning_rate": 1.5758114229153606e-05, + "loss": 0.7313, + "step": 6253 + }, + { + "epoch": 0.98, + "grad_norm": 13.829432571142346, + "learning_rate": 1.575673537692728e-05, + "loss": 0.6776, + "step": 6254 + }, + { + "epoch": 0.98, + "grad_norm": 26.05287736126642, + "learning_rate": 1.575535636098276e-05, + "loss": 0.6779, + "step": 6255 + }, + { + "epoch": 0.98, + "grad_norm": 21.580785969844754, + "learning_rate": 1.5753977181359277e-05, + "loss": 0.7603, + "step": 6256 + }, + { + "epoch": 0.98, + "grad_norm": 21.562898043745616, + "learning_rate": 1.5752597838096046e-05, + "loss": 0.6555, + "step": 6257 + }, + { + "epoch": 0.98, + "grad_norm": 19.524721865101647, + "learning_rate": 1.57512183312323e-05, + "loss": 0.693, + "step": 6258 + }, + { + "epoch": 0.98, + "grad_norm": 20.6832497298869, + "learning_rate": 1.574983866080727e-05, + "loss": 0.7587, + "step": 6259 + }, + { + "epoch": 0.98, + "grad_norm": 19.216483982755733, + "learning_rate": 1.5748458826860185e-05, + "loss": 0.683, + "step": 6260 + }, + { + "epoch": 0.98, + "grad_norm": 15.548388677261059, + "learning_rate": 1.5747078829430302e-05, + "loss": 0.7515, + "step": 6261 + }, + { + "epoch": 0.98, + "grad_norm": 21.187351731257, + "learning_rate": 1.5745698668556856e-05, + "loss": 0.7154, + "step": 6262 + }, + { + "epoch": 0.98, + "grad_norm": 15.402076516673862, + "learning_rate": 1.5744318344279103e-05, + "loss": 0.6895, + "step": 6263 + }, + { + "epoch": 0.98, + "grad_norm": 25.176644906322444, + "learning_rate": 1.5742937856636294e-05, + "loss": 0.6538, + "step": 6264 + }, + { + "epoch": 0.98, + "grad_norm": 14.097045165379267, + "learning_rate": 1.5741557205667688e-05, + "loss": 0.697, + "step": 6265 + }, + { + "epoch": 0.98, + "grad_norm": 21.51688942833326, + "learning_rate": 1.574017639141256e-05, + "loss": 0.7147, + "step": 6266 + }, + { + "epoch": 0.98, + "grad_norm": 15.356930145712544, + "learning_rate": 1.5738795413910174e-05, + "loss": 0.674, + "step": 6267 + }, + { + "epoch": 0.98, + "grad_norm": 31.798673484233632, + "learning_rate": 1.57374142731998e-05, + "loss": 0.6925, + "step": 6268 + }, + { + "epoch": 0.98, + "grad_norm": 13.950738697464558, + "learning_rate": 1.573603296932072e-05, + "loss": 0.7904, + "step": 6269 + }, + { + "epoch": 0.98, + "grad_norm": 20.508249749744827, + "learning_rate": 1.5734651502312218e-05, + "loss": 0.7407, + "step": 6270 + }, + { + "epoch": 0.98, + "grad_norm": 16.141409384066787, + "learning_rate": 1.5733269872213583e-05, + "loss": 0.7864, + "step": 6271 + }, + { + "epoch": 0.98, + "grad_norm": 18.444272982668206, + "learning_rate": 1.5731888079064107e-05, + "loss": 0.6863, + "step": 6272 + }, + { + "epoch": 0.98, + "grad_norm": 14.12951265467563, + "learning_rate": 1.5730506122903086e-05, + "loss": 0.7873, + "step": 6273 + }, + { + "epoch": 0.98, + "grad_norm": 14.93230822650624, + "learning_rate": 1.5729124003769826e-05, + "loss": 0.7465, + "step": 6274 + }, + { + "epoch": 0.98, + "grad_norm": 17.134391908430306, + "learning_rate": 1.572774172170363e-05, + "loss": 0.7148, + "step": 6275 + }, + { + "epoch": 0.98, + "grad_norm": 15.246212959067847, + "learning_rate": 1.5726359276743808e-05, + "loss": 0.6541, + "step": 6276 + }, + { + "epoch": 0.98, + "grad_norm": 13.883252963354481, + "learning_rate": 1.5724976668929678e-05, + "loss": 0.6974, + "step": 6277 + }, + { + "epoch": 0.98, + "grad_norm": 35.41424468561854, + "learning_rate": 1.5723593898300562e-05, + "loss": 0.7017, + "step": 6278 + }, + { + "epoch": 0.98, + "grad_norm": 23.743387526486405, + "learning_rate": 1.572221096489578e-05, + "loss": 0.65, + "step": 6279 + }, + { + "epoch": 0.98, + "grad_norm": 11.916708543579952, + "learning_rate": 1.572082786875467e-05, + "loss": 0.6345, + "step": 6280 + }, + { + "epoch": 0.98, + "grad_norm": 26.015664969519534, + "learning_rate": 1.5719444609916564e-05, + "loss": 0.7147, + "step": 6281 + }, + { + "epoch": 0.98, + "grad_norm": 30.076012645710914, + "learning_rate": 1.5718061188420793e-05, + "loss": 0.7767, + "step": 6282 + }, + { + "epoch": 0.98, + "grad_norm": 14.62416221234385, + "learning_rate": 1.571667760430671e-05, + "loss": 0.7267, + "step": 6283 + }, + { + "epoch": 0.98, + "grad_norm": 14.473050718298492, + "learning_rate": 1.5715293857613662e-05, + "loss": 0.7076, + "step": 6284 + }, + { + "epoch": 0.98, + "grad_norm": 21.892231357457838, + "learning_rate": 1.5713909948380995e-05, + "loss": 0.7583, + "step": 6285 + }, + { + "epoch": 0.98, + "grad_norm": 18.017917791540224, + "learning_rate": 1.5712525876648076e-05, + "loss": 0.7583, + "step": 6286 + }, + { + "epoch": 0.98, + "grad_norm": 20.676341254821256, + "learning_rate": 1.5711141642454258e-05, + "loss": 0.7764, + "step": 6287 + }, + { + "epoch": 0.98, + "grad_norm": 20.796402382012346, + "learning_rate": 1.5709757245838918e-05, + "loss": 0.7369, + "step": 6288 + }, + { + "epoch": 0.98, + "grad_norm": 17.376078308044274, + "learning_rate": 1.570837268684142e-05, + "loss": 0.7264, + "step": 6289 + }, + { + "epoch": 0.98, + "grad_norm": 19.632087278955975, + "learning_rate": 1.5706987965501142e-05, + "loss": 0.7711, + "step": 6290 + }, + { + "epoch": 0.98, + "grad_norm": 15.923638613941863, + "learning_rate": 1.5705603081857463e-05, + "loss": 0.6989, + "step": 6291 + }, + { + "epoch": 0.98, + "grad_norm": 18.242577810420823, + "learning_rate": 1.5704218035949773e-05, + "loss": 0.7212, + "step": 6292 + }, + { + "epoch": 0.98, + "grad_norm": 15.279137905210119, + "learning_rate": 1.570283282781746e-05, + "loss": 0.7621, + "step": 6293 + }, + { + "epoch": 0.98, + "grad_norm": 18.450926928379797, + "learning_rate": 1.5701447457499914e-05, + "loss": 0.791, + "step": 6294 + }, + { + "epoch": 0.98, + "grad_norm": 17.837420604916066, + "learning_rate": 1.570006192503654e-05, + "loss": 0.77, + "step": 6295 + }, + { + "epoch": 0.98, + "grad_norm": 18.909350604393193, + "learning_rate": 1.5698676230466735e-05, + "loss": 0.7334, + "step": 6296 + }, + { + "epoch": 0.98, + "grad_norm": 20.886295373527055, + "learning_rate": 1.5697290373829913e-05, + "loss": 0.6799, + "step": 6297 + }, + { + "epoch": 0.98, + "grad_norm": 22.233097679083436, + "learning_rate": 1.5695904355165486e-05, + "loss": 0.7532, + "step": 6298 + }, + { + "epoch": 0.98, + "grad_norm": 16.108783529765777, + "learning_rate": 1.5694518174512873e-05, + "loss": 0.6611, + "step": 6299 + }, + { + "epoch": 0.98, + "grad_norm": 17.657668291364995, + "learning_rate": 1.5693131831911494e-05, + "loss": 0.7844, + "step": 6300 + }, + { + "epoch": 0.98, + "grad_norm": 25.34681562116457, + "learning_rate": 1.5691745327400776e-05, + "loss": 0.7776, + "step": 6301 + }, + { + "epoch": 0.98, + "grad_norm": 30.17676812592073, + "learning_rate": 1.569035866102015e-05, + "loss": 0.7234, + "step": 6302 + }, + { + "epoch": 0.98, + "grad_norm": 17.39102641685156, + "learning_rate": 1.568897183280905e-05, + "loss": 0.7063, + "step": 6303 + }, + { + "epoch": 0.98, + "grad_norm": 16.93110483175266, + "learning_rate": 1.5687584842806925e-05, + "loss": 0.6177, + "step": 6304 + }, + { + "epoch": 0.98, + "grad_norm": 15.526691742169396, + "learning_rate": 1.568619769105321e-05, + "loss": 0.6045, + "step": 6305 + }, + { + "epoch": 0.99, + "grad_norm": 22.57082940305604, + "learning_rate": 1.568481037758736e-05, + "loss": 0.7763, + "step": 6306 + }, + { + "epoch": 0.99, + "grad_norm": 19.24234523855676, + "learning_rate": 1.5683422902448827e-05, + "loss": 0.7059, + "step": 6307 + }, + { + "epoch": 0.99, + "grad_norm": 20.985835218432385, + "learning_rate": 1.5682035265677074e-05, + "loss": 0.6476, + "step": 6308 + }, + { + "epoch": 0.99, + "grad_norm": 24.889131048595345, + "learning_rate": 1.568064746731156e-05, + "loss": 0.7627, + "step": 6309 + }, + { + "epoch": 0.99, + "grad_norm": 27.590260461076955, + "learning_rate": 1.5679259507391755e-05, + "loss": 0.7267, + "step": 6310 + }, + { + "epoch": 0.99, + "grad_norm": 16.37743047520126, + "learning_rate": 1.5677871385957134e-05, + "loss": 0.731, + "step": 6311 + }, + { + "epoch": 0.99, + "grad_norm": 18.17826006522586, + "learning_rate": 1.567648310304717e-05, + "loss": 0.7181, + "step": 6312 + }, + { + "epoch": 0.99, + "grad_norm": 15.444323788353781, + "learning_rate": 1.567509465870135e-05, + "loss": 0.7175, + "step": 6313 + }, + { + "epoch": 0.99, + "grad_norm": 18.491751149568916, + "learning_rate": 1.567370605295915e-05, + "loss": 0.7675, + "step": 6314 + }, + { + "epoch": 0.99, + "grad_norm": 15.453460728283682, + "learning_rate": 1.5672317285860076e-05, + "loss": 0.6342, + "step": 6315 + }, + { + "epoch": 0.99, + "grad_norm": 22.80354555471989, + "learning_rate": 1.5670928357443617e-05, + "loss": 0.7829, + "step": 6316 + }, + { + "epoch": 0.99, + "grad_norm": 21.4134458670892, + "learning_rate": 1.566953926774927e-05, + "loss": 0.7413, + "step": 6317 + }, + { + "epoch": 0.99, + "grad_norm": 14.142592560890225, + "learning_rate": 1.5668150016816545e-05, + "loss": 0.7177, + "step": 6318 + }, + { + "epoch": 0.99, + "grad_norm": 16.270432504738718, + "learning_rate": 1.5666760604684947e-05, + "loss": 0.7444, + "step": 6319 + }, + { + "epoch": 0.99, + "grad_norm": 18.252765065559505, + "learning_rate": 1.5665371031393994e-05, + "loss": 0.8004, + "step": 6320 + }, + { + "epoch": 0.99, + "grad_norm": 22.988404591763434, + "learning_rate": 1.56639812969832e-05, + "loss": 0.7443, + "step": 6321 + }, + { + "epoch": 0.99, + "grad_norm": 16.969777669751174, + "learning_rate": 1.5662591401492096e-05, + "loss": 0.7221, + "step": 6322 + }, + { + "epoch": 0.99, + "grad_norm": 17.280438987023008, + "learning_rate": 1.5661201344960203e-05, + "loss": 0.7201, + "step": 6323 + }, + { + "epoch": 0.99, + "grad_norm": 37.57959628327843, + "learning_rate": 1.5659811127427053e-05, + "loss": 0.767, + "step": 6324 + }, + { + "epoch": 0.99, + "grad_norm": 19.437502706932296, + "learning_rate": 1.5658420748932187e-05, + "loss": 0.6743, + "step": 6325 + }, + { + "epoch": 0.99, + "grad_norm": 22.148492767628333, + "learning_rate": 1.5657030209515146e-05, + "loss": 0.7354, + "step": 6326 + }, + { + "epoch": 0.99, + "grad_norm": 20.79360772813145, + "learning_rate": 1.5655639509215476e-05, + "loss": 0.6183, + "step": 6327 + }, + { + "epoch": 0.99, + "grad_norm": 15.334730897115843, + "learning_rate": 1.565424864807273e-05, + "loss": 0.6637, + "step": 6328 + }, + { + "epoch": 0.99, + "grad_norm": 22.47819394584062, + "learning_rate": 1.565285762612645e-05, + "loss": 0.6701, + "step": 6329 + }, + { + "epoch": 0.99, + "grad_norm": 16.261798390201033, + "learning_rate": 1.5651466443416213e-05, + "loss": 0.6882, + "step": 6330 + }, + { + "epoch": 0.99, + "grad_norm": 15.828376330727263, + "learning_rate": 1.5650075099981573e-05, + "loss": 0.7075, + "step": 6331 + }, + { + "epoch": 0.99, + "grad_norm": 20.908258572557916, + "learning_rate": 1.564868359586211e-05, + "loss": 0.7339, + "step": 6332 + }, + { + "epoch": 0.99, + "grad_norm": 24.5335566250218, + "learning_rate": 1.564729193109738e-05, + "loss": 0.6724, + "step": 6333 + }, + { + "epoch": 0.99, + "grad_norm": 19.72041759943538, + "learning_rate": 1.5645900105726976e-05, + "loss": 0.6783, + "step": 6334 + }, + { + "epoch": 0.99, + "grad_norm": 25.438276170094106, + "learning_rate": 1.5644508119790477e-05, + "loss": 0.7547, + "step": 6335 + }, + { + "epoch": 0.99, + "grad_norm": 21.496643036512626, + "learning_rate": 1.5643115973327464e-05, + "loss": 0.7009, + "step": 6336 + }, + { + "epoch": 0.99, + "grad_norm": 26.56720763395691, + "learning_rate": 1.5641723666377536e-05, + "loss": 0.7125, + "step": 6337 + }, + { + "epoch": 0.99, + "grad_norm": 19.754529238248484, + "learning_rate": 1.564033119898029e-05, + "loss": 0.7256, + "step": 6338 + }, + { + "epoch": 0.99, + "grad_norm": 16.333374418697414, + "learning_rate": 1.5638938571175324e-05, + "loss": 0.6783, + "step": 6339 + }, + { + "epoch": 0.99, + "grad_norm": 16.92363618146433, + "learning_rate": 1.563754578300224e-05, + "loss": 0.6673, + "step": 6340 + }, + { + "epoch": 0.99, + "grad_norm": 21.209951178308156, + "learning_rate": 1.5636152834500654e-05, + "loss": 0.6956, + "step": 6341 + }, + { + "epoch": 0.99, + "grad_norm": 19.734447474584446, + "learning_rate": 1.5634759725710178e-05, + "loss": 0.6897, + "step": 6342 + }, + { + "epoch": 0.99, + "grad_norm": 18.960526927169756, + "learning_rate": 1.5633366456670433e-05, + "loss": 0.7769, + "step": 6343 + }, + { + "epoch": 0.99, + "grad_norm": 15.17234637542746, + "learning_rate": 1.563197302742104e-05, + "loss": 0.6268, + "step": 6344 + }, + { + "epoch": 0.99, + "grad_norm": 13.327554166181422, + "learning_rate": 1.5630579438001626e-05, + "loss": 0.671, + "step": 6345 + }, + { + "epoch": 0.99, + "grad_norm": 21.89072928584197, + "learning_rate": 1.562918568845183e-05, + "loss": 0.6727, + "step": 6346 + }, + { + "epoch": 0.99, + "grad_norm": 24.526235828507357, + "learning_rate": 1.562779177881129e-05, + "loss": 0.6639, + "step": 6347 + }, + { + "epoch": 0.99, + "grad_norm": 13.72861549057825, + "learning_rate": 1.5626397709119638e-05, + "loss": 0.6892, + "step": 6348 + }, + { + "epoch": 0.99, + "grad_norm": 22.15658985786569, + "learning_rate": 1.562500347941653e-05, + "loss": 0.6799, + "step": 6349 + }, + { + "epoch": 0.99, + "grad_norm": 21.507176640634484, + "learning_rate": 1.5623609089741608e-05, + "loss": 0.7295, + "step": 6350 + }, + { + "epoch": 0.99, + "grad_norm": 23.77365799424598, + "learning_rate": 1.5622214540134536e-05, + "loss": 0.7676, + "step": 6351 + }, + { + "epoch": 0.99, + "grad_norm": 13.633065273269677, + "learning_rate": 1.5620819830634975e-05, + "loss": 0.72, + "step": 6352 + }, + { + "epoch": 0.99, + "grad_norm": 20.624250783461548, + "learning_rate": 1.5619424961282585e-05, + "loss": 0.6405, + "step": 6353 + }, + { + "epoch": 0.99, + "grad_norm": 15.837879893131927, + "learning_rate": 1.5618029932117035e-05, + "loss": 0.672, + "step": 6354 + }, + { + "epoch": 0.99, + "grad_norm": 16.2073531832548, + "learning_rate": 1.5616634743177996e-05, + "loss": 0.6685, + "step": 6355 + }, + { + "epoch": 0.99, + "grad_norm": 23.607158163602943, + "learning_rate": 1.5615239394505154e-05, + "loss": 0.7397, + "step": 6356 + }, + { + "epoch": 0.99, + "grad_norm": 21.430247388166993, + "learning_rate": 1.5613843886138192e-05, + "loss": 0.6731, + "step": 6357 + }, + { + "epoch": 0.99, + "grad_norm": 15.615028495457203, + "learning_rate": 1.561244821811679e-05, + "loss": 0.7111, + "step": 6358 + }, + { + "epoch": 0.99, + "grad_norm": 20.608515768526523, + "learning_rate": 1.5611052390480646e-05, + "loss": 0.7206, + "step": 6359 + }, + { + "epoch": 0.99, + "grad_norm": 12.936369934115836, + "learning_rate": 1.5609656403269452e-05, + "loss": 0.6903, + "step": 6360 + }, + { + "epoch": 0.99, + "grad_norm": 15.944315726390709, + "learning_rate": 1.560826025652291e-05, + "loss": 0.7447, + "step": 6361 + }, + { + "epoch": 0.99, + "grad_norm": 20.259277886198085, + "learning_rate": 1.560686395028073e-05, + "loss": 0.6132, + "step": 6362 + }, + { + "epoch": 0.99, + "grad_norm": 13.02463172180988, + "learning_rate": 1.560546748458262e-05, + "loss": 0.6949, + "step": 6363 + }, + { + "epoch": 0.99, + "grad_norm": 30.59667730938663, + "learning_rate": 1.5604070859468292e-05, + "loss": 0.6782, + "step": 6364 + }, + { + "epoch": 0.99, + "grad_norm": 11.042902305654117, + "learning_rate": 1.5602674074977467e-05, + "loss": 0.7063, + "step": 6365 + }, + { + "epoch": 0.99, + "grad_norm": 24.800659256294253, + "learning_rate": 1.560127713114987e-05, + "loss": 0.7687, + "step": 6366 + }, + { + "epoch": 0.99, + "grad_norm": 15.89127829661326, + "learning_rate": 1.559988002802523e-05, + "loss": 0.6818, + "step": 6367 + }, + { + "epoch": 0.99, + "grad_norm": 14.499096024221645, + "learning_rate": 1.5598482765643273e-05, + "loss": 0.6369, + "step": 6368 + }, + { + "epoch": 0.99, + "grad_norm": 17.145687953391345, + "learning_rate": 1.5597085344043742e-05, + "loss": 0.6976, + "step": 6369 + }, + { + "epoch": 1.0, + "grad_norm": 27.72505787980115, + "learning_rate": 1.5595687763266378e-05, + "loss": 0.7195, + "step": 6370 + }, + { + "epoch": 1.0, + "grad_norm": 17.090530167227787, + "learning_rate": 1.5594290023350933e-05, + "loss": 0.6804, + "step": 6371 + }, + { + "epoch": 1.0, + "grad_norm": 23.81879930735099, + "learning_rate": 1.5592892124337145e-05, + "loss": 0.7284, + "step": 6372 + }, + { + "epoch": 1.0, + "grad_norm": 19.79588388477555, + "learning_rate": 1.559149406626478e-05, + "loss": 0.7053, + "step": 6373 + }, + { + "epoch": 1.0, + "grad_norm": 13.071680652907293, + "learning_rate": 1.5590095849173597e-05, + "loss": 0.6733, + "step": 6374 + }, + { + "epoch": 1.0, + "grad_norm": 12.068402627502268, + "learning_rate": 1.5588697473103355e-05, + "loss": 0.6621, + "step": 6375 + }, + { + "epoch": 1.0, + "grad_norm": 23.994208110113572, + "learning_rate": 1.5587298938093828e-05, + "loss": 0.6938, + "step": 6376 + }, + { + "epoch": 1.0, + "grad_norm": 27.89978263360765, + "learning_rate": 1.5585900244184785e-05, + "loss": 0.7703, + "step": 6377 + }, + { + "epoch": 1.0, + "grad_norm": 22.50729676979544, + "learning_rate": 1.5584501391416008e-05, + "loss": 0.6832, + "step": 6378 + }, + { + "epoch": 1.0, + "grad_norm": 15.379438339225747, + "learning_rate": 1.558310237982728e-05, + "loss": 0.6573, + "step": 6379 + }, + { + "epoch": 1.0, + "grad_norm": 20.45984504218609, + "learning_rate": 1.558170320945838e-05, + "loss": 0.6878, + "step": 6380 + }, + { + "epoch": 1.0, + "grad_norm": 20.599490474879698, + "learning_rate": 1.5580303880349113e-05, + "loss": 0.6284, + "step": 6381 + }, + { + "epoch": 1.0, + "grad_norm": 20.223205712944264, + "learning_rate": 1.5578904392539268e-05, + "loss": 0.6712, + "step": 6382 + }, + { + "epoch": 1.0, + "grad_norm": 35.117554979449, + "learning_rate": 1.557750474606864e-05, + "loss": 0.7686, + "step": 6383 + }, + { + "epoch": 1.0, + "grad_norm": 18.909667976630182, + "learning_rate": 1.5576104940977045e-05, + "loss": 0.6952, + "step": 6384 + }, + { + "epoch": 1.0, + "grad_norm": 21.715535244308136, + "learning_rate": 1.5574704977304286e-05, + "loss": 0.7224, + "step": 6385 + }, + { + "epoch": 1.0, + "grad_norm": 17.4192327668256, + "learning_rate": 1.557330485509018e-05, + "loss": 0.6875, + "step": 6386 + }, + { + "epoch": 1.0, + "grad_norm": 19.048427537679707, + "learning_rate": 1.5571904574374543e-05, + "loss": 0.7214, + "step": 6387 + }, + { + "epoch": 1.0, + "grad_norm": 17.260240005029573, + "learning_rate": 1.55705041351972e-05, + "loss": 0.7188, + "step": 6388 + }, + { + "epoch": 1.0, + "grad_norm": 20.199730289557138, + "learning_rate": 1.5569103537597972e-05, + "loss": 0.7141, + "step": 6389 + }, + { + "epoch": 1.0, + "grad_norm": 25.794480954963547, + "learning_rate": 1.55677027816167e-05, + "loss": 0.736, + "step": 6390 + }, + { + "epoch": 1.0, + "grad_norm": 18.265690492589275, + "learning_rate": 1.5566301867293223e-05, + "loss": 0.6804, + "step": 6391 + }, + { + "epoch": 1.0, + "grad_norm": 18.674947584000783, + "learning_rate": 1.5564900794667372e-05, + "loss": 0.6766, + "step": 6392 + }, + { + "epoch": 1.0, + "grad_norm": 14.604538746373462, + "learning_rate": 1.5563499563779002e-05, + "loss": 0.7749, + "step": 6393 + }, + { + "epoch": 1.0, + "grad_norm": 12.775019820371973, + "learning_rate": 1.5562098174667957e-05, + "loss": 0.6711, + "step": 6394 + }, + { + "epoch": 1.0, + "grad_norm": 27.382511781573122, + "learning_rate": 1.556069662737409e-05, + "loss": 0.8826, + "step": 6395 + }, + { + "epoch": 1.0, + "grad_norm": 17.75612464784674, + "learning_rate": 1.5559294921937272e-05, + "loss": 0.7504, + "step": 6396 + }, + { + "epoch": 1.0, + "grad_norm": 18.36544359471965, + "learning_rate": 1.555789305839735e-05, + "loss": 0.5571, + "step": 6397 + }, + { + "epoch": 1.0, + "grad_norm": 15.937426314464096, + "learning_rate": 1.5556491036794204e-05, + "loss": 0.6322, + "step": 6398 + }, + { + "epoch": 1.0, + "grad_norm": 12.087657573041733, + "learning_rate": 1.5555088857167703e-05, + "loss": 0.6066, + "step": 6399 + }, + { + "epoch": 1.0, + "grad_norm": 19.06324289781649, + "learning_rate": 1.5553686519557726e-05, + "loss": 0.7523, + "step": 6400 + }, + { + "epoch": 1.0, + "grad_norm": 12.220169261789898, + "learning_rate": 1.5552284024004154e-05, + "loss": 0.5992, + "step": 6401 + }, + { + "epoch": 1.0, + "grad_norm": 8.19145050819094, + "learning_rate": 1.555088137054687e-05, + "loss": 0.6211, + "step": 6402 + }, + { + "epoch": 1.0, + "grad_norm": 16.2437545282828, + "learning_rate": 1.554947855922577e-05, + "loss": 0.7152, + "step": 6403 + }, + { + "epoch": 1.0, + "grad_norm": 27.16781968871853, + "learning_rate": 1.5548075590080745e-05, + "loss": 0.7184, + "step": 6404 + }, + { + "epoch": 1.0, + "grad_norm": 28.54908774558578, + "learning_rate": 1.5546672463151695e-05, + "loss": 0.6511, + "step": 6405 + }, + { + "epoch": 1.0, + "grad_norm": 62.210625402081355, + "learning_rate": 1.5545269178478523e-05, + "loss": 0.6291, + "step": 6406 + }, + { + "epoch": 1.0, + "grad_norm": 16.581450136064625, + "learning_rate": 1.554386573610114e-05, + "loss": 0.6526, + "step": 6407 + }, + { + "epoch": 1.0, + "grad_norm": 15.038554100261715, + "learning_rate": 1.554246213605946e-05, + "loss": 0.6116, + "step": 6408 + }, + { + "epoch": 1.0, + "grad_norm": 19.306176302260706, + "learning_rate": 1.55410583783934e-05, + "loss": 0.7377, + "step": 6409 + }, + { + "epoch": 1.0, + "grad_norm": 29.4897848532468, + "learning_rate": 1.5539654463142878e-05, + "loss": 0.6722, + "step": 6410 + }, + { + "epoch": 1.0, + "grad_norm": 24.97828358986796, + "learning_rate": 1.5538250390347825e-05, + "loss": 0.7174, + "step": 6411 + }, + { + "epoch": 1.0, + "grad_norm": 20.532038375590826, + "learning_rate": 1.5536846160048172e-05, + "loss": 0.6915, + "step": 6412 + }, + { + "epoch": 1.0, + "grad_norm": 30.187267639221538, + "learning_rate": 1.553544177228385e-05, + "loss": 0.6716, + "step": 6413 + }, + { + "epoch": 1.0, + "grad_norm": 17.411902530744527, + "learning_rate": 1.5534037227094807e-05, + "loss": 0.6789, + "step": 6414 + }, + { + "epoch": 1.0, + "grad_norm": 21.86502560255622, + "learning_rate": 1.553263252452098e-05, + "loss": 0.7738, + "step": 6415 + }, + { + "epoch": 1.0, + "grad_norm": 17.6030213927747, + "learning_rate": 1.553122766460232e-05, + "loss": 0.6946, + "step": 6416 + }, + { + "epoch": 1.0, + "grad_norm": 22.28326880563784, + "learning_rate": 1.552982264737878e-05, + "loss": 0.7725, + "step": 6417 + }, + { + "epoch": 1.0, + "grad_norm": 20.524584318138132, + "learning_rate": 1.5528417472890324e-05, + "loss": 0.6993, + "step": 6418 + }, + { + "epoch": 1.0, + "grad_norm": 19.205418527364696, + "learning_rate": 1.5527012141176904e-05, + "loss": 0.6616, + "step": 6419 + }, + { + "epoch": 1.0, + "grad_norm": 12.703448832232999, + "learning_rate": 1.5525606652278493e-05, + "loss": 0.6215, + "step": 6420 + }, + { + "epoch": 1.0, + "grad_norm": 20.331133644745545, + "learning_rate": 1.5524201006235063e-05, + "loss": 0.7806, + "step": 6421 + }, + { + "epoch": 1.0, + "grad_norm": 11.713294211999813, + "learning_rate": 1.5522795203086584e-05, + "loss": 0.7002, + "step": 6422 + }, + { + "epoch": 1.0, + "grad_norm": 25.876963129760735, + "learning_rate": 1.5521389242873044e-05, + "loss": 0.6968, + "step": 6423 + }, + { + "epoch": 1.0, + "grad_norm": 17.581924863610826, + "learning_rate": 1.551998312563442e-05, + "loss": 0.6997, + "step": 6424 + }, + { + "epoch": 1.0, + "grad_norm": 15.814924504690554, + "learning_rate": 1.551857685141071e-05, + "loss": 0.6691, + "step": 6425 + }, + { + "epoch": 1.0, + "grad_norm": 19.574550622803667, + "learning_rate": 1.5517170420241897e-05, + "loss": 0.7012, + "step": 6426 + }, + { + "epoch": 1.0, + "grad_norm": 17.427129710839353, + "learning_rate": 1.551576383216799e-05, + "loss": 0.6208, + "step": 6427 + }, + { + "epoch": 1.0, + "grad_norm": 16.742300797727502, + "learning_rate": 1.5514357087228985e-05, + "loss": 0.7013, + "step": 6428 + }, + { + "epoch": 1.0, + "grad_norm": 12.469801133051082, + "learning_rate": 1.551295018546489e-05, + "loss": 0.556, + "step": 6429 + }, + { + "epoch": 1.0, + "grad_norm": 20.812763396449327, + "learning_rate": 1.5511543126915713e-05, + "loss": 0.6248, + "step": 6430 + }, + { + "epoch": 1.0, + "grad_norm": 25.462182426274687, + "learning_rate": 1.5510135911621474e-05, + "loss": 0.8001, + "step": 6431 + }, + { + "epoch": 1.0, + "grad_norm": 17.442260031825697, + "learning_rate": 1.5508728539622196e-05, + "loss": 0.6867, + "step": 6432 + }, + { + "epoch": 1.0, + "grad_norm": 12.667367790946475, + "learning_rate": 1.5507321010957903e-05, + "loss": 0.6411, + "step": 6433 + }, + { + "epoch": 1.0, + "grad_norm": 26.82405009618746, + "learning_rate": 1.550591332566862e-05, + "loss": 0.8276, + "step": 6434 + }, + { + "epoch": 1.01, + "grad_norm": 28.317963098612843, + "learning_rate": 1.550450548379438e-05, + "loss": 0.678, + "step": 6435 + }, + { + "epoch": 1.01, + "grad_norm": 20.657453142697225, + "learning_rate": 1.5503097485375228e-05, + "loss": 0.635, + "step": 6436 + }, + { + "epoch": 1.01, + "grad_norm": 17.13644110396231, + "learning_rate": 1.5501689330451203e-05, + "loss": 0.6308, + "step": 6437 + }, + { + "epoch": 1.01, + "grad_norm": 17.74563325137112, + "learning_rate": 1.5500281019062347e-05, + "loss": 0.6649, + "step": 6438 + }, + { + "epoch": 1.01, + "grad_norm": 21.28041175908318, + "learning_rate": 1.5498872551248722e-05, + "loss": 0.7186, + "step": 6439 + }, + { + "epoch": 1.01, + "grad_norm": 11.692231973433241, + "learning_rate": 1.5497463927050375e-05, + "loss": 0.6133, + "step": 6440 + }, + { + "epoch": 1.01, + "grad_norm": 24.996811089384508, + "learning_rate": 1.5496055146507368e-05, + "loss": 0.6893, + "step": 6441 + }, + { + "epoch": 1.01, + "grad_norm": 18.039620496579285, + "learning_rate": 1.5494646209659775e-05, + "loss": 0.6556, + "step": 6442 + }, + { + "epoch": 1.01, + "grad_norm": 18.81479343267713, + "learning_rate": 1.549323711654765e-05, + "loss": 0.6677, + "step": 6443 + }, + { + "epoch": 1.01, + "grad_norm": 16.438537720370615, + "learning_rate": 1.549182786721108e-05, + "loss": 0.5729, + "step": 6444 + }, + { + "epoch": 1.01, + "grad_norm": 13.437771074477322, + "learning_rate": 1.5490418461690137e-05, + "loss": 0.6372, + "step": 6445 + }, + { + "epoch": 1.01, + "grad_norm": 23.703700780947685, + "learning_rate": 1.5489008900024903e-05, + "loss": 0.6979, + "step": 6446 + }, + { + "epoch": 1.01, + "grad_norm": 12.84046898071911, + "learning_rate": 1.5487599182255467e-05, + "loss": 0.7347, + "step": 6447 + }, + { + "epoch": 1.01, + "grad_norm": 25.446927423152637, + "learning_rate": 1.5486189308421922e-05, + "loss": 0.7067, + "step": 6448 + }, + { + "epoch": 1.01, + "grad_norm": 15.861818366852788, + "learning_rate": 1.5484779278564363e-05, + "loss": 0.739, + "step": 6449 + }, + { + "epoch": 1.01, + "grad_norm": 19.445507919874448, + "learning_rate": 1.5483369092722888e-05, + "loss": 0.6521, + "step": 6450 + }, + { + "epoch": 1.01, + "grad_norm": 19.52276352149899, + "learning_rate": 1.5481958750937605e-05, + "loss": 0.7105, + "step": 6451 + }, + { + "epoch": 1.01, + "grad_norm": 19.278302783551332, + "learning_rate": 1.548054825324862e-05, + "loss": 0.7706, + "step": 6452 + }, + { + "epoch": 1.01, + "grad_norm": 12.19005566113112, + "learning_rate": 1.547913759969605e-05, + "loss": 0.6834, + "step": 6453 + }, + { + "epoch": 1.01, + "grad_norm": 17.101977136359437, + "learning_rate": 1.5477726790320012e-05, + "loss": 0.6386, + "step": 6454 + }, + { + "epoch": 1.01, + "grad_norm": 20.9440510386669, + "learning_rate": 1.547631582516063e-05, + "loss": 0.6406, + "step": 6455 + }, + { + "epoch": 1.01, + "grad_norm": 14.728639640331524, + "learning_rate": 1.5474904704258027e-05, + "loss": 0.5915, + "step": 6456 + }, + { + "epoch": 1.01, + "grad_norm": 24.502157493094778, + "learning_rate": 1.5473493427652337e-05, + "loss": 0.708, + "step": 6457 + }, + { + "epoch": 1.01, + "grad_norm": 16.493871406810868, + "learning_rate": 1.54720819953837e-05, + "loss": 0.69, + "step": 6458 + }, + { + "epoch": 1.01, + "grad_norm": 17.440185088053983, + "learning_rate": 1.547067040749225e-05, + "loss": 0.702, + "step": 6459 + }, + { + "epoch": 1.01, + "grad_norm": 21.26024440494361, + "learning_rate": 1.546925866401813e-05, + "loss": 0.7452, + "step": 6460 + }, + { + "epoch": 1.01, + "grad_norm": 30.164224661248184, + "learning_rate": 1.5467846765001496e-05, + "loss": 0.7826, + "step": 6461 + }, + { + "epoch": 1.01, + "grad_norm": 20.484245985135512, + "learning_rate": 1.54664347104825e-05, + "loss": 0.6766, + "step": 6462 + }, + { + "epoch": 1.01, + "grad_norm": 21.16069597947086, + "learning_rate": 1.5465022500501294e-05, + "loss": 0.6465, + "step": 6463 + }, + { + "epoch": 1.01, + "grad_norm": 20.559388702718227, + "learning_rate": 1.5463610135098048e-05, + "loss": 0.7666, + "step": 6464 + }, + { + "epoch": 1.01, + "grad_norm": 16.008984004839487, + "learning_rate": 1.5462197614312926e-05, + "loss": 0.7048, + "step": 6465 + }, + { + "epoch": 1.01, + "grad_norm": 17.238208929654018, + "learning_rate": 1.54607849381861e-05, + "loss": 0.6362, + "step": 6466 + }, + { + "epoch": 1.01, + "grad_norm": 19.94097853826782, + "learning_rate": 1.5459372106757742e-05, + "loss": 0.7061, + "step": 6467 + }, + { + "epoch": 1.01, + "grad_norm": 17.899308479426832, + "learning_rate": 1.5457959120068036e-05, + "loss": 0.6635, + "step": 6468 + }, + { + "epoch": 1.01, + "grad_norm": 14.943075280311621, + "learning_rate": 1.5456545978157168e-05, + "loss": 0.6428, + "step": 6469 + }, + { + "epoch": 1.01, + "grad_norm": 19.13668959818432, + "learning_rate": 1.545513268106532e-05, + "loss": 0.6835, + "step": 6470 + }, + { + "epoch": 1.01, + "grad_norm": 57.91829494776299, + "learning_rate": 1.5453719228832692e-05, + "loss": 0.6759, + "step": 6471 + }, + { + "epoch": 1.01, + "grad_norm": 17.221148865546716, + "learning_rate": 1.5452305621499483e-05, + "loss": 0.7349, + "step": 6472 + }, + { + "epoch": 1.01, + "grad_norm": 17.936886695518524, + "learning_rate": 1.5450891859105884e-05, + "loss": 0.7431, + "step": 6473 + }, + { + "epoch": 1.01, + "grad_norm": 15.768308261848315, + "learning_rate": 1.5449477941692114e-05, + "loss": 0.6369, + "step": 6474 + }, + { + "epoch": 1.01, + "grad_norm": 23.73339342343991, + "learning_rate": 1.544806386929838e-05, + "loss": 0.7103, + "step": 6475 + }, + { + "epoch": 1.01, + "grad_norm": 19.01587585681051, + "learning_rate": 1.5446649641964895e-05, + "loss": 0.634, + "step": 6476 + }, + { + "epoch": 1.01, + "grad_norm": 13.830770775546783, + "learning_rate": 1.544523525973188e-05, + "loss": 0.7456, + "step": 6477 + }, + { + "epoch": 1.01, + "grad_norm": 15.09747496837828, + "learning_rate": 1.544382072263956e-05, + "loss": 0.6198, + "step": 6478 + }, + { + "epoch": 1.01, + "grad_norm": 16.985267152490426, + "learning_rate": 1.5442406030728166e-05, + "loss": 0.6579, + "step": 6479 + }, + { + "epoch": 1.01, + "grad_norm": 13.73792426638232, + "learning_rate": 1.5440991184037924e-05, + "loss": 0.6999, + "step": 6480 + }, + { + "epoch": 1.01, + "grad_norm": 21.700077622907656, + "learning_rate": 1.5439576182609077e-05, + "loss": 0.7502, + "step": 6481 + }, + { + "epoch": 1.01, + "grad_norm": 22.47298910821798, + "learning_rate": 1.5438161026481866e-05, + "loss": 0.6358, + "step": 6482 + }, + { + "epoch": 1.01, + "grad_norm": 26.84028716228371, + "learning_rate": 1.5436745715696535e-05, + "loss": 0.744, + "step": 6483 + }, + { + "epoch": 1.01, + "grad_norm": 31.05960894616586, + "learning_rate": 1.543533025029334e-05, + "loss": 0.7034, + "step": 6484 + }, + { + "epoch": 1.01, + "grad_norm": 23.039469146826423, + "learning_rate": 1.5433914630312527e-05, + "loss": 0.6496, + "step": 6485 + }, + { + "epoch": 1.01, + "grad_norm": 14.360632751056993, + "learning_rate": 1.5432498855794364e-05, + "loss": 0.6024, + "step": 6486 + }, + { + "epoch": 1.01, + "grad_norm": 17.719325236245606, + "learning_rate": 1.543108292677911e-05, + "loss": 0.6599, + "step": 6487 + }, + { + "epoch": 1.01, + "grad_norm": 17.95426578067958, + "learning_rate": 1.5429666843307035e-05, + "loss": 0.6399, + "step": 6488 + }, + { + "epoch": 1.01, + "grad_norm": 13.729858326264344, + "learning_rate": 1.5428250605418414e-05, + "loss": 0.6643, + "step": 6489 + }, + { + "epoch": 1.01, + "grad_norm": 21.274789936001337, + "learning_rate": 1.542683421315352e-05, + "loss": 0.6547, + "step": 6490 + }, + { + "epoch": 1.01, + "grad_norm": 18.05461939242172, + "learning_rate": 1.5425417666552635e-05, + "loss": 0.6833, + "step": 6491 + }, + { + "epoch": 1.01, + "grad_norm": 19.758131550316673, + "learning_rate": 1.5424000965656042e-05, + "loss": 0.707, + "step": 6492 + }, + { + "epoch": 1.01, + "grad_norm": 12.878210758802147, + "learning_rate": 1.542258411050404e-05, + "loss": 0.6852, + "step": 6493 + }, + { + "epoch": 1.01, + "grad_norm": 17.76383856789012, + "learning_rate": 1.5421167101136917e-05, + "loss": 0.6823, + "step": 6494 + }, + { + "epoch": 1.01, + "grad_norm": 21.582764347105275, + "learning_rate": 1.5419749937594967e-05, + "loss": 0.7516, + "step": 6495 + }, + { + "epoch": 1.01, + "grad_norm": 16.840335613120832, + "learning_rate": 1.5418332619918507e-05, + "loss": 0.6507, + "step": 6496 + }, + { + "epoch": 1.01, + "grad_norm": 17.363139546935002, + "learning_rate": 1.541691514814783e-05, + "loss": 0.6675, + "step": 6497 + }, + { + "epoch": 1.01, + "grad_norm": 23.466826583583153, + "learning_rate": 1.541549752232326e-05, + "loss": 0.7122, + "step": 6498 + }, + { + "epoch": 1.02, + "grad_norm": 17.442843736145985, + "learning_rate": 1.541407974248511e-05, + "loss": 0.6983, + "step": 6499 + }, + { + "epoch": 1.02, + "grad_norm": 16.599299483498584, + "learning_rate": 1.5412661808673694e-05, + "loss": 0.6219, + "step": 6500 + }, + { + "epoch": 1.02, + "grad_norm": 12.347073533096834, + "learning_rate": 1.5411243720929342e-05, + "loss": 0.6531, + "step": 6501 + }, + { + "epoch": 1.02, + "grad_norm": 21.872325090621768, + "learning_rate": 1.5409825479292388e-05, + "loss": 0.6819, + "step": 6502 + }, + { + "epoch": 1.02, + "grad_norm": 12.181120917833676, + "learning_rate": 1.5408407083803162e-05, + "loss": 0.6597, + "step": 6503 + }, + { + "epoch": 1.02, + "grad_norm": 16.683661268486603, + "learning_rate": 1.5406988534502002e-05, + "loss": 0.6862, + "step": 6504 + }, + { + "epoch": 1.02, + "grad_norm": 28.451001613869806, + "learning_rate": 1.5405569831429247e-05, + "loss": 0.6581, + "step": 6505 + }, + { + "epoch": 1.02, + "grad_norm": 23.754498773432474, + "learning_rate": 1.5404150974625254e-05, + "loss": 0.7414, + "step": 6506 + }, + { + "epoch": 1.02, + "grad_norm": 20.520233758083858, + "learning_rate": 1.5402731964130365e-05, + "loss": 0.6471, + "step": 6507 + }, + { + "epoch": 1.02, + "grad_norm": 16.36371344976559, + "learning_rate": 1.5401312799984943e-05, + "loss": 0.5999, + "step": 6508 + }, + { + "epoch": 1.02, + "grad_norm": 21.94754732621128, + "learning_rate": 1.539989348222934e-05, + "loss": 0.6978, + "step": 6509 + }, + { + "epoch": 1.02, + "grad_norm": 16.382679134675595, + "learning_rate": 1.5398474010903927e-05, + "loss": 0.7457, + "step": 6510 + }, + { + "epoch": 1.02, + "grad_norm": 16.69835778231014, + "learning_rate": 1.5397054386049072e-05, + "loss": 0.6914, + "step": 6511 + }, + { + "epoch": 1.02, + "grad_norm": 18.07778185244271, + "learning_rate": 1.5395634607705145e-05, + "loss": 0.6611, + "step": 6512 + }, + { + "epoch": 1.02, + "grad_norm": 20.33259452530617, + "learning_rate": 1.539421467591253e-05, + "loss": 0.7345, + "step": 6513 + }, + { + "epoch": 1.02, + "grad_norm": 16.867619554428977, + "learning_rate": 1.5392794590711605e-05, + "loss": 0.6659, + "step": 6514 + }, + { + "epoch": 1.02, + "grad_norm": 14.37208631571248, + "learning_rate": 1.5391374352142752e-05, + "loss": 0.6822, + "step": 6515 + }, + { + "epoch": 1.02, + "grad_norm": 19.438450565047656, + "learning_rate": 1.538995396024637e-05, + "loss": 0.714, + "step": 6516 + }, + { + "epoch": 1.02, + "grad_norm": 27.06842814846263, + "learning_rate": 1.5388533415062848e-05, + "loss": 0.5554, + "step": 6517 + }, + { + "epoch": 1.02, + "grad_norm": 27.561295686305304, + "learning_rate": 1.5387112716632594e-05, + "loss": 0.6792, + "step": 6518 + }, + { + "epoch": 1.02, + "grad_norm": 16.8032830643274, + "learning_rate": 1.5385691864995998e-05, + "loss": 0.6521, + "step": 6519 + }, + { + "epoch": 1.02, + "grad_norm": 15.67270866080271, + "learning_rate": 1.5384270860193477e-05, + "loss": 0.6598, + "step": 6520 + }, + { + "epoch": 1.02, + "grad_norm": 10.561964563900505, + "learning_rate": 1.5382849702265447e-05, + "loss": 0.6752, + "step": 6521 + }, + { + "epoch": 1.02, + "grad_norm": 28.522186184979937, + "learning_rate": 1.538142839125232e-05, + "loss": 0.7348, + "step": 6522 + }, + { + "epoch": 1.02, + "grad_norm": 24.088772860199423, + "learning_rate": 1.538000692719451e-05, + "loss": 0.7279, + "step": 6523 + }, + { + "epoch": 1.02, + "grad_norm": 19.539333245538113, + "learning_rate": 1.5378585310132458e-05, + "loss": 0.7308, + "step": 6524 + }, + { + "epoch": 1.02, + "grad_norm": 14.89188983239716, + "learning_rate": 1.5377163540106582e-05, + "loss": 0.6546, + "step": 6525 + }, + { + "epoch": 1.02, + "grad_norm": 14.668147509891359, + "learning_rate": 1.5375741617157324e-05, + "loss": 0.6505, + "step": 6526 + }, + { + "epoch": 1.02, + "grad_norm": 15.063535418279532, + "learning_rate": 1.5374319541325114e-05, + "loss": 0.7976, + "step": 6527 + }, + { + "epoch": 1.02, + "grad_norm": 16.380752379373973, + "learning_rate": 1.5372897312650406e-05, + "loss": 0.7622, + "step": 6528 + }, + { + "epoch": 1.02, + "grad_norm": 26.530187130588722, + "learning_rate": 1.5371474931173638e-05, + "loss": 0.7456, + "step": 6529 + }, + { + "epoch": 1.02, + "grad_norm": 19.945712580293577, + "learning_rate": 1.5370052396935268e-05, + "loss": 0.7097, + "step": 6530 + }, + { + "epoch": 1.02, + "grad_norm": 16.622305601523422, + "learning_rate": 1.5368629709975745e-05, + "loss": 0.7191, + "step": 6531 + }, + { + "epoch": 1.02, + "grad_norm": 11.725128365690392, + "learning_rate": 1.5367206870335536e-05, + "loss": 0.6044, + "step": 6532 + }, + { + "epoch": 1.02, + "grad_norm": 13.28518469812207, + "learning_rate": 1.5365783878055103e-05, + "loss": 0.6909, + "step": 6533 + }, + { + "epoch": 1.02, + "grad_norm": 22.472772158677415, + "learning_rate": 1.5364360733174916e-05, + "loss": 0.6582, + "step": 6534 + }, + { + "epoch": 1.02, + "grad_norm": 17.542231148209925, + "learning_rate": 1.536293743573545e-05, + "loss": 0.6751, + "step": 6535 + }, + { + "epoch": 1.02, + "grad_norm": 16.47603248188668, + "learning_rate": 1.5361513985777175e-05, + "loss": 0.6323, + "step": 6536 + }, + { + "epoch": 1.02, + "grad_norm": 16.942876716357123, + "learning_rate": 1.536009038334058e-05, + "loss": 0.6564, + "step": 6537 + }, + { + "epoch": 1.02, + "grad_norm": 16.88624460991718, + "learning_rate": 1.5358666628466154e-05, + "loss": 0.6908, + "step": 6538 + }, + { + "epoch": 1.02, + "grad_norm": 14.097432343778436, + "learning_rate": 1.535724272119438e-05, + "loss": 0.6468, + "step": 6539 + }, + { + "epoch": 1.02, + "grad_norm": 16.533646284586915, + "learning_rate": 1.535581866156576e-05, + "loss": 0.6792, + "step": 6540 + }, + { + "epoch": 1.02, + "grad_norm": 16.373033787711773, + "learning_rate": 1.535439444962079e-05, + "loss": 0.6745, + "step": 6541 + }, + { + "epoch": 1.02, + "grad_norm": 16.30349281989993, + "learning_rate": 1.535297008539997e-05, + "loss": 0.6978, + "step": 6542 + }, + { + "epoch": 1.02, + "grad_norm": 18.16978710001151, + "learning_rate": 1.5351545568943818e-05, + "loss": 0.7, + "step": 6543 + }, + { + "epoch": 1.02, + "grad_norm": 20.654371946662845, + "learning_rate": 1.5350120900292833e-05, + "loss": 0.6959, + "step": 6544 + }, + { + "epoch": 1.02, + "grad_norm": 15.19838647026938, + "learning_rate": 1.5348696079487547e-05, + "loss": 0.7448, + "step": 6545 + }, + { + "epoch": 1.02, + "grad_norm": 19.045007251105677, + "learning_rate": 1.534727110656847e-05, + "loss": 0.6897, + "step": 6546 + }, + { + "epoch": 1.02, + "grad_norm": 26.992515178849985, + "learning_rate": 1.534584598157613e-05, + "loss": 0.7007, + "step": 6547 + }, + { + "epoch": 1.02, + "grad_norm": 19.780094752167326, + "learning_rate": 1.5344420704551058e-05, + "loss": 0.647, + "step": 6548 + }, + { + "epoch": 1.02, + "grad_norm": 18.98574558852048, + "learning_rate": 1.534299527553379e-05, + "loss": 0.6448, + "step": 6549 + }, + { + "epoch": 1.02, + "grad_norm": 22.06125811020938, + "learning_rate": 1.534156969456486e-05, + "loss": 0.683, + "step": 6550 + }, + { + "epoch": 1.02, + "grad_norm": 18.182842330895035, + "learning_rate": 1.5340143961684813e-05, + "loss": 0.6714, + "step": 6551 + }, + { + "epoch": 1.02, + "grad_norm": 21.8774928974933, + "learning_rate": 1.5338718076934195e-05, + "loss": 0.7379, + "step": 6552 + }, + { + "epoch": 1.02, + "grad_norm": 13.287842793250592, + "learning_rate": 1.5337292040353555e-05, + "loss": 0.6541, + "step": 6553 + }, + { + "epoch": 1.02, + "grad_norm": 16.490607593925503, + "learning_rate": 1.5335865851983456e-05, + "loss": 0.6495, + "step": 6554 + }, + { + "epoch": 1.02, + "grad_norm": 16.359068395388427, + "learning_rate": 1.5334439511864453e-05, + "loss": 0.7372, + "step": 6555 + }, + { + "epoch": 1.02, + "grad_norm": 15.46820770526866, + "learning_rate": 1.533301302003711e-05, + "loss": 0.6705, + "step": 6556 + }, + { + "epoch": 1.02, + "grad_norm": 18.33734881215871, + "learning_rate": 1.5331586376541997e-05, + "loss": 0.7181, + "step": 6557 + }, + { + "epoch": 1.02, + "grad_norm": 26.14072399439881, + "learning_rate": 1.5330159581419687e-05, + "loss": 0.8224, + "step": 6558 + }, + { + "epoch": 1.02, + "grad_norm": 22.334981350149064, + "learning_rate": 1.532873263471075e-05, + "loss": 0.8401, + "step": 6559 + }, + { + "epoch": 1.02, + "grad_norm": 18.26068576114692, + "learning_rate": 1.5327305536455786e-05, + "loss": 0.7529, + "step": 6560 + }, + { + "epoch": 1.02, + "grad_norm": 21.45708392616138, + "learning_rate": 1.5325878286695362e-05, + "loss": 0.6644, + "step": 6561 + }, + { + "epoch": 1.02, + "grad_norm": 19.169113866224432, + "learning_rate": 1.5324450885470078e-05, + "loss": 0.6645, + "step": 6562 + }, + { + "epoch": 1.03, + "grad_norm": 26.22562730353031, + "learning_rate": 1.5323023332820517e-05, + "loss": 0.7402, + "step": 6563 + }, + { + "epoch": 1.03, + "grad_norm": 14.552562716194432, + "learning_rate": 1.5321595628787297e-05, + "loss": 0.623, + "step": 6564 + }, + { + "epoch": 1.03, + "grad_norm": 21.47646629420186, + "learning_rate": 1.5320167773411004e-05, + "loss": 0.6306, + "step": 6565 + }, + { + "epoch": 1.03, + "grad_norm": 19.24444339180681, + "learning_rate": 1.5318739766732255e-05, + "loss": 0.6996, + "step": 6566 + }, + { + "epoch": 1.03, + "grad_norm": 13.445902434090621, + "learning_rate": 1.5317311608791656e-05, + "loss": 0.5829, + "step": 6567 + }, + { + "epoch": 1.03, + "grad_norm": 16.940875641665073, + "learning_rate": 1.5315883299629825e-05, + "loss": 0.6225, + "step": 6568 + }, + { + "epoch": 1.03, + "grad_norm": 16.88560715845788, + "learning_rate": 1.531445483928738e-05, + "loss": 0.656, + "step": 6569 + }, + { + "epoch": 1.03, + "grad_norm": 17.068383973428357, + "learning_rate": 1.531302622780496e-05, + "loss": 0.7427, + "step": 6570 + }, + { + "epoch": 1.03, + "grad_norm": 19.72585627283115, + "learning_rate": 1.5311597465223173e-05, + "loss": 0.6016, + "step": 6571 + }, + { + "epoch": 1.03, + "grad_norm": 18.24498861073896, + "learning_rate": 1.531016855158266e-05, + "loss": 0.6832, + "step": 6572 + }, + { + "epoch": 1.03, + "grad_norm": 16.651745441165993, + "learning_rate": 1.5308739486924064e-05, + "loss": 0.7824, + "step": 6573 + }, + { + "epoch": 1.03, + "grad_norm": 20.544739473709814, + "learning_rate": 1.530731027128802e-05, + "loss": 0.7702, + "step": 6574 + }, + { + "epoch": 1.03, + "grad_norm": 17.67457569626726, + "learning_rate": 1.5305880904715177e-05, + "loss": 0.6796, + "step": 6575 + }, + { + "epoch": 1.03, + "grad_norm": 17.914617544826108, + "learning_rate": 1.530445138724619e-05, + "loss": 0.6844, + "step": 6576 + }, + { + "epoch": 1.03, + "grad_norm": 12.174804068189223, + "learning_rate": 1.53030217189217e-05, + "loss": 0.6634, + "step": 6577 + }, + { + "epoch": 1.03, + "grad_norm": 24.172706655658924, + "learning_rate": 1.5301591899782376e-05, + "loss": 0.6934, + "step": 6578 + }, + { + "epoch": 1.03, + "grad_norm": 12.762627190097165, + "learning_rate": 1.5300161929868886e-05, + "loss": 0.7323, + "step": 6579 + }, + { + "epoch": 1.03, + "grad_norm": 15.243914724334267, + "learning_rate": 1.5298731809221886e-05, + "loss": 0.6541, + "step": 6580 + }, + { + "epoch": 1.03, + "grad_norm": 21.242675904007758, + "learning_rate": 1.5297301537882055e-05, + "loss": 0.7242, + "step": 6581 + }, + { + "epoch": 1.03, + "grad_norm": 17.2606390878493, + "learning_rate": 1.5295871115890066e-05, + "loss": 0.7013, + "step": 6582 + }, + { + "epoch": 1.03, + "grad_norm": 22.768022064124573, + "learning_rate": 1.52944405432866e-05, + "loss": 0.7681, + "step": 6583 + }, + { + "epoch": 1.03, + "grad_norm": 20.229685884432772, + "learning_rate": 1.5293009820112345e-05, + "loss": 0.7712, + "step": 6584 + }, + { + "epoch": 1.03, + "grad_norm": 23.52838174973978, + "learning_rate": 1.5291578946407985e-05, + "loss": 0.7438, + "step": 6585 + }, + { + "epoch": 1.03, + "grad_norm": 25.288551781476045, + "learning_rate": 1.5290147922214212e-05, + "loss": 0.736, + "step": 6586 + }, + { + "epoch": 1.03, + "grad_norm": 18.289904916199976, + "learning_rate": 1.5288716747571735e-05, + "loss": 0.7038, + "step": 6587 + }, + { + "epoch": 1.03, + "grad_norm": 17.69966445189918, + "learning_rate": 1.5287285422521238e-05, + "loss": 0.6714, + "step": 6588 + }, + { + "epoch": 1.03, + "grad_norm": 24.50131665980954, + "learning_rate": 1.5285853947103446e-05, + "loss": 0.682, + "step": 6589 + }, + { + "epoch": 1.03, + "grad_norm": 18.383778157224622, + "learning_rate": 1.5284422321359054e-05, + "loss": 0.6443, + "step": 6590 + }, + { + "epoch": 1.03, + "grad_norm": 16.396775495388287, + "learning_rate": 1.5282990545328782e-05, + "loss": 0.6823, + "step": 6591 + }, + { + "epoch": 1.03, + "grad_norm": 21.96607641078348, + "learning_rate": 1.5281558619053353e-05, + "loss": 0.7675, + "step": 6592 + }, + { + "epoch": 1.03, + "grad_norm": 21.713466718998358, + "learning_rate": 1.5280126542573484e-05, + "loss": 0.7566, + "step": 6593 + }, + { + "epoch": 1.03, + "grad_norm": 22.48877243942122, + "learning_rate": 1.5278694315929906e-05, + "loss": 0.816, + "step": 6594 + }, + { + "epoch": 1.03, + "grad_norm": 18.04490309713365, + "learning_rate": 1.527726193916335e-05, + "loss": 0.6772, + "step": 6595 + }, + { + "epoch": 1.03, + "grad_norm": 22.346228167348286, + "learning_rate": 1.5275829412314547e-05, + "loss": 0.6375, + "step": 6596 + }, + { + "epoch": 1.03, + "grad_norm": 19.052906827864728, + "learning_rate": 1.5274396735424244e-05, + "loss": 0.664, + "step": 6597 + }, + { + "epoch": 1.03, + "grad_norm": 19.91804025692924, + "learning_rate": 1.5272963908533184e-05, + "loss": 0.6322, + "step": 6598 + }, + { + "epoch": 1.03, + "grad_norm": 17.815045410789473, + "learning_rate": 1.5271530931682116e-05, + "loss": 0.7188, + "step": 6599 + }, + { + "epoch": 1.03, + "grad_norm": 17.794946515808697, + "learning_rate": 1.5270097804911794e-05, + "loss": 0.7057, + "step": 6600 + }, + { + "epoch": 1.03, + "grad_norm": 18.807764284614507, + "learning_rate": 1.526866452826297e-05, + "loss": 0.7643, + "step": 6601 + }, + { + "epoch": 1.03, + "grad_norm": 19.98111661753733, + "learning_rate": 1.526723110177641e-05, + "loss": 0.6432, + "step": 6602 + }, + { + "epoch": 1.03, + "grad_norm": 15.439288304104851, + "learning_rate": 1.5265797525492878e-05, + "loss": 0.7841, + "step": 6603 + }, + { + "epoch": 1.03, + "grad_norm": 19.356104020241695, + "learning_rate": 1.526436379945315e-05, + "loss": 0.6421, + "step": 6604 + }, + { + "epoch": 1.03, + "grad_norm": 17.35263202201593, + "learning_rate": 1.5262929923697986e-05, + "loss": 0.739, + "step": 6605 + }, + { + "epoch": 1.03, + "grad_norm": 15.834669111293513, + "learning_rate": 1.526149589826818e-05, + "loss": 0.6509, + "step": 6606 + }, + { + "epoch": 1.03, + "grad_norm": 16.840777058710355, + "learning_rate": 1.5260061723204506e-05, + "loss": 0.7117, + "step": 6607 + }, + { + "epoch": 1.03, + "grad_norm": 17.481618551021654, + "learning_rate": 1.5258627398547754e-05, + "loss": 0.6568, + "step": 6608 + }, + { + "epoch": 1.03, + "grad_norm": 17.80696078053773, + "learning_rate": 1.5257192924338715e-05, + "loss": 0.6788, + "step": 6609 + }, + { + "epoch": 1.03, + "grad_norm": 17.284141943746125, + "learning_rate": 1.5255758300618184e-05, + "loss": 0.7012, + "step": 6610 + }, + { + "epoch": 1.03, + "grad_norm": 23.337081794919776, + "learning_rate": 1.5254323527426964e-05, + "loss": 0.6966, + "step": 6611 + }, + { + "epoch": 1.03, + "grad_norm": 13.714745961228799, + "learning_rate": 1.5252888604805853e-05, + "loss": 0.586, + "step": 6612 + }, + { + "epoch": 1.03, + "grad_norm": 25.742324016596896, + "learning_rate": 1.5251453532795663e-05, + "loss": 0.7441, + "step": 6613 + }, + { + "epoch": 1.03, + "grad_norm": 26.83939254152738, + "learning_rate": 1.5250018311437212e-05, + "loss": 0.7659, + "step": 6614 + }, + { + "epoch": 1.03, + "grad_norm": 14.680834414274619, + "learning_rate": 1.5248582940771306e-05, + "loss": 0.7014, + "step": 6615 + }, + { + "epoch": 1.03, + "grad_norm": 16.952776197281985, + "learning_rate": 1.5247147420838774e-05, + "loss": 0.7046, + "step": 6616 + }, + { + "epoch": 1.03, + "grad_norm": 23.594671322979067, + "learning_rate": 1.5245711751680438e-05, + "loss": 0.7383, + "step": 6617 + }, + { + "epoch": 1.03, + "grad_norm": 23.595836471358915, + "learning_rate": 1.524427593333713e-05, + "loss": 0.6726, + "step": 6618 + }, + { + "epoch": 1.03, + "grad_norm": 14.10038194654784, + "learning_rate": 1.524283996584968e-05, + "loss": 0.6717, + "step": 6619 + }, + { + "epoch": 1.03, + "grad_norm": 16.71986501598634, + "learning_rate": 1.524140384925893e-05, + "loss": 0.681, + "step": 6620 + }, + { + "epoch": 1.03, + "grad_norm": 22.48799162203246, + "learning_rate": 1.5239967583605719e-05, + "loss": 0.6372, + "step": 6621 + }, + { + "epoch": 1.03, + "grad_norm": 20.083716389523538, + "learning_rate": 1.5238531168930894e-05, + "loss": 0.6297, + "step": 6622 + }, + { + "epoch": 1.03, + "grad_norm": 16.48344413153892, + "learning_rate": 1.5237094605275311e-05, + "loss": 0.6848, + "step": 6623 + }, + { + "epoch": 1.03, + "grad_norm": 19.151736963888386, + "learning_rate": 1.5235657892679818e-05, + "loss": 0.7363, + "step": 6624 + }, + { + "epoch": 1.03, + "grad_norm": 21.90782770211155, + "learning_rate": 1.523422103118528e-05, + "loss": 0.6543, + "step": 6625 + }, + { + "epoch": 1.03, + "grad_norm": 28.46399421910907, + "learning_rate": 1.5232784020832556e-05, + "loss": 0.6591, + "step": 6626 + }, + { + "epoch": 1.04, + "grad_norm": 23.43643280905722, + "learning_rate": 1.5231346861662518e-05, + "loss": 0.7411, + "step": 6627 + }, + { + "epoch": 1.04, + "grad_norm": 20.530727861752872, + "learning_rate": 1.5229909553716032e-05, + "loss": 0.6726, + "step": 6628 + }, + { + "epoch": 1.04, + "grad_norm": 19.942418488788668, + "learning_rate": 1.5228472097033979e-05, + "loss": 0.7344, + "step": 6629 + }, + { + "epoch": 1.04, + "grad_norm": 15.667152133851134, + "learning_rate": 1.5227034491657235e-05, + "loss": 0.6592, + "step": 6630 + }, + { + "epoch": 1.04, + "grad_norm": 22.635822310648, + "learning_rate": 1.5225596737626695e-05, + "loss": 0.7934, + "step": 6631 + }, + { + "epoch": 1.04, + "grad_norm": 19.207611311074004, + "learning_rate": 1.5224158834983234e-05, + "loss": 0.6699, + "step": 6632 + }, + { + "epoch": 1.04, + "grad_norm": 30.850181217816736, + "learning_rate": 1.5222720783767755e-05, + "loss": 0.6371, + "step": 6633 + }, + { + "epoch": 1.04, + "grad_norm": 22.081180126253468, + "learning_rate": 1.522128258402115e-05, + "loss": 0.6717, + "step": 6634 + }, + { + "epoch": 1.04, + "grad_norm": 13.79146770108563, + "learning_rate": 1.5219844235784326e-05, + "loss": 0.6334, + "step": 6635 + }, + { + "epoch": 1.04, + "grad_norm": 15.525451205576074, + "learning_rate": 1.5218405739098183e-05, + "loss": 0.6134, + "step": 6636 + }, + { + "epoch": 1.04, + "grad_norm": 19.671374621780412, + "learning_rate": 1.5216967094003633e-05, + "loss": 0.6702, + "step": 6637 + }, + { + "epoch": 1.04, + "grad_norm": 17.72162200427294, + "learning_rate": 1.5215528300541593e-05, + "loss": 0.6383, + "step": 6638 + }, + { + "epoch": 1.04, + "grad_norm": 20.024548302186307, + "learning_rate": 1.5214089358752979e-05, + "loss": 0.7096, + "step": 6639 + }, + { + "epoch": 1.04, + "grad_norm": 18.861294975777007, + "learning_rate": 1.5212650268678711e-05, + "loss": 0.7022, + "step": 6640 + }, + { + "epoch": 1.04, + "grad_norm": 13.541978602912499, + "learning_rate": 1.5211211030359719e-05, + "loss": 0.6873, + "step": 6641 + }, + { + "epoch": 1.04, + "grad_norm": 18.905069210285983, + "learning_rate": 1.5209771643836937e-05, + "loss": 0.7557, + "step": 6642 + }, + { + "epoch": 1.04, + "grad_norm": 13.949176372184894, + "learning_rate": 1.5208332109151295e-05, + "loss": 0.7601, + "step": 6643 + }, + { + "epoch": 1.04, + "grad_norm": 14.168979017672584, + "learning_rate": 1.5206892426343736e-05, + "loss": 0.6954, + "step": 6644 + }, + { + "epoch": 1.04, + "grad_norm": 13.81713949689299, + "learning_rate": 1.5205452595455198e-05, + "loss": 0.6907, + "step": 6645 + }, + { + "epoch": 1.04, + "grad_norm": 12.719257058760887, + "learning_rate": 1.5204012616526637e-05, + "loss": 0.6739, + "step": 6646 + }, + { + "epoch": 1.04, + "grad_norm": 19.61770966904786, + "learning_rate": 1.5202572489598998e-05, + "loss": 0.713, + "step": 6647 + }, + { + "epoch": 1.04, + "grad_norm": 16.00530202475908, + "learning_rate": 1.5201132214713249e-05, + "loss": 0.7099, + "step": 6648 + }, + { + "epoch": 1.04, + "grad_norm": 15.75996954853697, + "learning_rate": 1.5199691791910333e-05, + "loss": 0.7586, + "step": 6649 + }, + { + "epoch": 1.04, + "grad_norm": 21.906346931052607, + "learning_rate": 1.5198251221231232e-05, + "loss": 0.7403, + "step": 6650 + }, + { + "epoch": 1.04, + "grad_norm": 13.89578159295, + "learning_rate": 1.51968105027169e-05, + "loss": 0.6511, + "step": 6651 + }, + { + "epoch": 1.04, + "grad_norm": 25.635977239549483, + "learning_rate": 1.5195369636408322e-05, + "loss": 0.7877, + "step": 6652 + }, + { + "epoch": 1.04, + "grad_norm": 16.404539385126125, + "learning_rate": 1.519392862234647e-05, + "loss": 0.5677, + "step": 6653 + }, + { + "epoch": 1.04, + "grad_norm": 20.03915373203264, + "learning_rate": 1.5192487460572327e-05, + "loss": 0.7096, + "step": 6654 + }, + { + "epoch": 1.04, + "grad_norm": 18.096133952151117, + "learning_rate": 1.5191046151126876e-05, + "loss": 0.7408, + "step": 6655 + }, + { + "epoch": 1.04, + "grad_norm": 16.70834941359846, + "learning_rate": 1.5189604694051111e-05, + "loss": 0.6242, + "step": 6656 + }, + { + "epoch": 1.04, + "grad_norm": 17.453598259372303, + "learning_rate": 1.5188163089386022e-05, + "loss": 0.6273, + "step": 6657 + }, + { + "epoch": 1.04, + "grad_norm": 17.856765901077996, + "learning_rate": 1.5186721337172614e-05, + "loss": 0.6754, + "step": 6658 + }, + { + "epoch": 1.04, + "grad_norm": 19.852426534897962, + "learning_rate": 1.5185279437451881e-05, + "loss": 0.7661, + "step": 6659 + }, + { + "epoch": 1.04, + "grad_norm": 10.698674770647406, + "learning_rate": 1.5183837390264833e-05, + "loss": 0.6751, + "step": 6660 + }, + { + "epoch": 1.04, + "grad_norm": 21.809745454791692, + "learning_rate": 1.5182395195652487e-05, + "loss": 0.6831, + "step": 6661 + }, + { + "epoch": 1.04, + "grad_norm": 14.689005475298254, + "learning_rate": 1.518095285365585e-05, + "loss": 0.6706, + "step": 6662 + }, + { + "epoch": 1.04, + "grad_norm": 14.43701011788531, + "learning_rate": 1.5179510364315945e-05, + "loss": 0.6119, + "step": 6663 + }, + { + "epoch": 1.04, + "grad_norm": 14.094374598430834, + "learning_rate": 1.5178067727673797e-05, + "loss": 0.6518, + "step": 6664 + }, + { + "epoch": 1.04, + "grad_norm": 19.6080674989379, + "learning_rate": 1.517662494377043e-05, + "loss": 0.7072, + "step": 6665 + }, + { + "epoch": 1.04, + "grad_norm": 21.187186529997053, + "learning_rate": 1.5175182012646877e-05, + "loss": 0.6354, + "step": 6666 + }, + { + "epoch": 1.04, + "grad_norm": 12.323068565102645, + "learning_rate": 1.5173738934344178e-05, + "loss": 0.59, + "step": 6667 + }, + { + "epoch": 1.04, + "grad_norm": 17.293548632210413, + "learning_rate": 1.5172295708903368e-05, + "loss": 0.7842, + "step": 6668 + }, + { + "epoch": 1.04, + "grad_norm": 17.640274083217776, + "learning_rate": 1.5170852336365494e-05, + "loss": 0.6901, + "step": 6669 + }, + { + "epoch": 1.04, + "grad_norm": 28.419465718381375, + "learning_rate": 1.5169408816771605e-05, + "loss": 0.6439, + "step": 6670 + }, + { + "epoch": 1.04, + "grad_norm": 24.974292597093342, + "learning_rate": 1.5167965150162753e-05, + "loss": 0.6539, + "step": 6671 + }, + { + "epoch": 1.04, + "grad_norm": 17.167377202645568, + "learning_rate": 1.5166521336579996e-05, + "loss": 0.6445, + "step": 6672 + }, + { + "epoch": 1.04, + "grad_norm": 13.12948753071187, + "learning_rate": 1.5165077376064394e-05, + "loss": 0.6076, + "step": 6673 + }, + { + "epoch": 1.04, + "grad_norm": 25.008362679118434, + "learning_rate": 1.516363326865701e-05, + "loss": 0.7471, + "step": 6674 + }, + { + "epoch": 1.04, + "grad_norm": 20.526367413997082, + "learning_rate": 1.5162189014398923e-05, + "loss": 0.7342, + "step": 6675 + }, + { + "epoch": 1.04, + "grad_norm": 28.163684441387435, + "learning_rate": 1.5160744613331196e-05, + "loss": 0.6954, + "step": 6676 + }, + { + "epoch": 1.04, + "grad_norm": 27.353512265996105, + "learning_rate": 1.5159300065494912e-05, + "loss": 0.8157, + "step": 6677 + }, + { + "epoch": 1.04, + "grad_norm": 17.217737586967633, + "learning_rate": 1.515785537093115e-05, + "loss": 0.7461, + "step": 6678 + }, + { + "epoch": 1.04, + "grad_norm": 14.31736374654659, + "learning_rate": 1.5156410529681e-05, + "loss": 0.73, + "step": 6679 + }, + { + "epoch": 1.04, + "grad_norm": 28.388256885394128, + "learning_rate": 1.5154965541785554e-05, + "loss": 0.6695, + "step": 6680 + }, + { + "epoch": 1.04, + "grad_norm": 19.050016300627473, + "learning_rate": 1.51535204072859e-05, + "loss": 0.6573, + "step": 6681 + }, + { + "epoch": 1.04, + "grad_norm": 18.55720826704021, + "learning_rate": 1.515207512622314e-05, + "loss": 0.6634, + "step": 6682 + }, + { + "epoch": 1.04, + "grad_norm": 17.29011207442339, + "learning_rate": 1.515062969863838e-05, + "loss": 0.712, + "step": 6683 + }, + { + "epoch": 1.04, + "grad_norm": 17.737950136059283, + "learning_rate": 1.514918412457272e-05, + "loss": 0.7008, + "step": 6684 + }, + { + "epoch": 1.04, + "grad_norm": 15.189511755562448, + "learning_rate": 1.5147738404067279e-05, + "loss": 0.6075, + "step": 6685 + }, + { + "epoch": 1.04, + "grad_norm": 20.262714429181987, + "learning_rate": 1.5146292537163168e-05, + "loss": 0.6746, + "step": 6686 + }, + { + "epoch": 1.04, + "grad_norm": 17.636403245099917, + "learning_rate": 1.5144846523901508e-05, + "loss": 0.7292, + "step": 6687 + }, + { + "epoch": 1.04, + "grad_norm": 17.09836613456225, + "learning_rate": 1.5143400364323424e-05, + "loss": 0.6327, + "step": 6688 + }, + { + "epoch": 1.04, + "grad_norm": 16.782418149998275, + "learning_rate": 1.5141954058470041e-05, + "loss": 0.6115, + "step": 6689 + }, + { + "epoch": 1.04, + "grad_norm": 24.250980679970176, + "learning_rate": 1.5140507606382496e-05, + "loss": 0.7613, + "step": 6690 + }, + { + "epoch": 1.05, + "grad_norm": 17.284831227840556, + "learning_rate": 1.5139061008101915e-05, + "loss": 0.6453, + "step": 6691 + }, + { + "epoch": 1.05, + "grad_norm": 30.36856014956248, + "learning_rate": 1.5137614263669451e-05, + "loss": 0.7937, + "step": 6692 + }, + { + "epoch": 1.05, + "grad_norm": 27.24527272262378, + "learning_rate": 1.5136167373126239e-05, + "loss": 0.7274, + "step": 6693 + }, + { + "epoch": 1.05, + "grad_norm": 27.303322647975598, + "learning_rate": 1.5134720336513433e-05, + "loss": 0.6853, + "step": 6694 + }, + { + "epoch": 1.05, + "grad_norm": 13.510953574668997, + "learning_rate": 1.5133273153872186e-05, + "loss": 0.6834, + "step": 6695 + }, + { + "epoch": 1.05, + "grad_norm": 17.93447534633121, + "learning_rate": 1.5131825825243651e-05, + "loss": 0.6649, + "step": 6696 + }, + { + "epoch": 1.05, + "grad_norm": 12.860722140994472, + "learning_rate": 1.5130378350668996e-05, + "loss": 0.724, + "step": 6697 + }, + { + "epoch": 1.05, + "grad_norm": 17.81838713951837, + "learning_rate": 1.5128930730189377e-05, + "loss": 0.6582, + "step": 6698 + }, + { + "epoch": 1.05, + "grad_norm": 19.702926904739805, + "learning_rate": 1.5127482963845968e-05, + "loss": 0.7186, + "step": 6699 + }, + { + "epoch": 1.05, + "grad_norm": 29.46530149899022, + "learning_rate": 1.5126035051679946e-05, + "loss": 0.7247, + "step": 6700 + }, + { + "epoch": 1.05, + "grad_norm": 17.876680611502888, + "learning_rate": 1.5124586993732483e-05, + "loss": 0.7157, + "step": 6701 + }, + { + "epoch": 1.05, + "grad_norm": 15.079512483867564, + "learning_rate": 1.5123138790044767e-05, + "loss": 0.6618, + "step": 6702 + }, + { + "epoch": 1.05, + "grad_norm": 16.463695534052768, + "learning_rate": 1.5121690440657977e-05, + "loss": 0.6404, + "step": 6703 + }, + { + "epoch": 1.05, + "grad_norm": 18.689674616117216, + "learning_rate": 1.512024194561331e-05, + "loss": 0.7385, + "step": 6704 + }, + { + "epoch": 1.05, + "grad_norm": 13.592732213822007, + "learning_rate": 1.5118793304951955e-05, + "loss": 0.6684, + "step": 6705 + }, + { + "epoch": 1.05, + "grad_norm": 15.832450765138345, + "learning_rate": 1.5117344518715115e-05, + "loss": 0.6442, + "step": 6706 + }, + { + "epoch": 1.05, + "grad_norm": 22.80305041043113, + "learning_rate": 1.511589558694399e-05, + "loss": 0.6963, + "step": 6707 + }, + { + "epoch": 1.05, + "grad_norm": 19.56725129076971, + "learning_rate": 1.5114446509679783e-05, + "loss": 0.7202, + "step": 6708 + }, + { + "epoch": 1.05, + "grad_norm": 18.534769864974145, + "learning_rate": 1.5112997286963715e-05, + "loss": 0.6659, + "step": 6709 + }, + { + "epoch": 1.05, + "grad_norm": 17.26281817306663, + "learning_rate": 1.5111547918836994e-05, + "loss": 0.6833, + "step": 6710 + }, + { + "epoch": 1.05, + "grad_norm": 17.506014359211708, + "learning_rate": 1.5110098405340838e-05, + "loss": 0.6883, + "step": 6711 + }, + { + "epoch": 1.05, + "grad_norm": 12.400366255735799, + "learning_rate": 1.5108648746516475e-05, + "loss": 0.6027, + "step": 6712 + }, + { + "epoch": 1.05, + "grad_norm": 12.308729778132559, + "learning_rate": 1.5107198942405128e-05, + "loss": 0.6443, + "step": 6713 + }, + { + "epoch": 1.05, + "grad_norm": 22.632348640831825, + "learning_rate": 1.5105748993048032e-05, + "loss": 0.7034, + "step": 6714 + }, + { + "epoch": 1.05, + "grad_norm": 21.452454173052796, + "learning_rate": 1.5104298898486423e-05, + "loss": 0.7327, + "step": 6715 + }, + { + "epoch": 1.05, + "grad_norm": 24.4268050596143, + "learning_rate": 1.5102848658761535e-05, + "loss": 0.739, + "step": 6716 + }, + { + "epoch": 1.05, + "grad_norm": 14.614295445019987, + "learning_rate": 1.5101398273914623e-05, + "loss": 0.6471, + "step": 6717 + }, + { + "epoch": 1.05, + "grad_norm": 20.486895905852382, + "learning_rate": 1.509994774398692e-05, + "loss": 0.7768, + "step": 6718 + }, + { + "epoch": 1.05, + "grad_norm": 22.19455573091933, + "learning_rate": 1.5098497069019693e-05, + "loss": 0.794, + "step": 6719 + }, + { + "epoch": 1.05, + "grad_norm": 27.27445002489998, + "learning_rate": 1.5097046249054187e-05, + "loss": 0.7013, + "step": 6720 + }, + { + "epoch": 1.05, + "grad_norm": 15.220557597984667, + "learning_rate": 1.5095595284131674e-05, + "loss": 0.7077, + "step": 6721 + }, + { + "epoch": 1.05, + "grad_norm": 16.517174452293958, + "learning_rate": 1.5094144174293406e-05, + "loss": 0.6298, + "step": 6722 + }, + { + "epoch": 1.05, + "grad_norm": 16.60737034958921, + "learning_rate": 1.509269291958066e-05, + "loss": 0.7088, + "step": 6723 + }, + { + "epoch": 1.05, + "grad_norm": 15.646346818630947, + "learning_rate": 1.5091241520034705e-05, + "loss": 0.7184, + "step": 6724 + }, + { + "epoch": 1.05, + "grad_norm": 21.632190975079098, + "learning_rate": 1.508978997569682e-05, + "loss": 0.6687, + "step": 6725 + }, + { + "epoch": 1.05, + "grad_norm": 21.012759653616886, + "learning_rate": 1.5088338286608287e-05, + "loss": 0.7022, + "step": 6726 + }, + { + "epoch": 1.05, + "grad_norm": 21.635143369369796, + "learning_rate": 1.508688645281039e-05, + "loss": 0.7386, + "step": 6727 + }, + { + "epoch": 1.05, + "grad_norm": 13.433604095379572, + "learning_rate": 1.5085434474344416e-05, + "loss": 0.6502, + "step": 6728 + }, + { + "epoch": 1.05, + "grad_norm": 27.862097299616963, + "learning_rate": 1.508398235125166e-05, + "loss": 0.6882, + "step": 6729 + }, + { + "epoch": 1.05, + "grad_norm": 20.576157288620845, + "learning_rate": 1.5082530083573421e-05, + "loss": 0.7651, + "step": 6730 + }, + { + "epoch": 1.05, + "grad_norm": 19.890640653962166, + "learning_rate": 1.5081077671350998e-05, + "loss": 0.7228, + "step": 6731 + }, + { + "epoch": 1.05, + "grad_norm": 15.263088205884493, + "learning_rate": 1.5079625114625701e-05, + "loss": 0.6851, + "step": 6732 + }, + { + "epoch": 1.05, + "grad_norm": 16.431106882462366, + "learning_rate": 1.5078172413438836e-05, + "loss": 0.6179, + "step": 6733 + }, + { + "epoch": 1.05, + "grad_norm": 21.37782525191314, + "learning_rate": 1.507671956783172e-05, + "loss": 0.7089, + "step": 6734 + }, + { + "epoch": 1.05, + "grad_norm": 17.936303321450204, + "learning_rate": 1.5075266577845668e-05, + "loss": 0.6976, + "step": 6735 + }, + { + "epoch": 1.05, + "grad_norm": 18.63058520146432, + "learning_rate": 1.5073813443522005e-05, + "loss": 0.6687, + "step": 6736 + }, + { + "epoch": 1.05, + "grad_norm": 14.870071077366125, + "learning_rate": 1.5072360164902052e-05, + "loss": 0.5894, + "step": 6737 + }, + { + "epoch": 1.05, + "grad_norm": 26.251515197166363, + "learning_rate": 1.5070906742027145e-05, + "loss": 0.7214, + "step": 6738 + }, + { + "epoch": 1.05, + "grad_norm": 32.10956064806358, + "learning_rate": 1.5069453174938618e-05, + "loss": 0.6366, + "step": 6739 + }, + { + "epoch": 1.05, + "grad_norm": 16.493373581536158, + "learning_rate": 1.5067999463677807e-05, + "loss": 0.7083, + "step": 6740 + }, + { + "epoch": 1.05, + "grad_norm": 16.7424982863581, + "learning_rate": 1.5066545608286056e-05, + "loss": 0.7417, + "step": 6741 + }, + { + "epoch": 1.05, + "grad_norm": 23.184725924503475, + "learning_rate": 1.5065091608804712e-05, + "loss": 0.7666, + "step": 6742 + }, + { + "epoch": 1.05, + "grad_norm": 18.12462423938081, + "learning_rate": 1.5063637465275127e-05, + "loss": 0.7064, + "step": 6743 + }, + { + "epoch": 1.05, + "grad_norm": 21.929054207961638, + "learning_rate": 1.5062183177738652e-05, + "loss": 0.6309, + "step": 6744 + }, + { + "epoch": 1.05, + "grad_norm": 18.467203612465383, + "learning_rate": 1.5060728746236648e-05, + "loss": 0.6572, + "step": 6745 + }, + { + "epoch": 1.05, + "grad_norm": 23.7776663156548, + "learning_rate": 1.5059274170810483e-05, + "loss": 0.7138, + "step": 6746 + }, + { + "epoch": 1.05, + "grad_norm": 16.73596376469182, + "learning_rate": 1.505781945150152e-05, + "loss": 0.6955, + "step": 6747 + }, + { + "epoch": 1.05, + "grad_norm": 21.814254819269365, + "learning_rate": 1.5056364588351127e-05, + "loss": 0.6977, + "step": 6748 + }, + { + "epoch": 1.05, + "grad_norm": 12.190434135687397, + "learning_rate": 1.5054909581400683e-05, + "loss": 0.6426, + "step": 6749 + }, + { + "epoch": 1.05, + "grad_norm": 24.528145942382583, + "learning_rate": 1.505345443069157e-05, + "loss": 0.6864, + "step": 6750 + }, + { + "epoch": 1.05, + "grad_norm": 18.28437851602784, + "learning_rate": 1.505199913626517e-05, + "loss": 0.6202, + "step": 6751 + }, + { + "epoch": 1.05, + "grad_norm": 12.074780002212918, + "learning_rate": 1.5050543698162865e-05, + "loss": 0.6105, + "step": 6752 + }, + { + "epoch": 1.05, + "grad_norm": 17.03098004109674, + "learning_rate": 1.5049088116426056e-05, + "loss": 0.64, + "step": 6753 + }, + { + "epoch": 1.05, + "grad_norm": 14.2625640121314, + "learning_rate": 1.5047632391096132e-05, + "loss": 0.7421, + "step": 6754 + }, + { + "epoch": 1.06, + "grad_norm": 11.196997694040093, + "learning_rate": 1.5046176522214497e-05, + "loss": 0.6098, + "step": 6755 + }, + { + "epoch": 1.06, + "grad_norm": 21.544319961933265, + "learning_rate": 1.5044720509822553e-05, + "loss": 0.6651, + "step": 6756 + }, + { + "epoch": 1.06, + "grad_norm": 15.107893537816315, + "learning_rate": 1.5043264353961711e-05, + "loss": 0.6649, + "step": 6757 + }, + { + "epoch": 1.06, + "grad_norm": 16.688034368292268, + "learning_rate": 1.5041808054673379e-05, + "loss": 0.6494, + "step": 6758 + }, + { + "epoch": 1.06, + "grad_norm": 27.510079136349486, + "learning_rate": 1.5040351611998975e-05, + "loss": 0.698, + "step": 6759 + }, + { + "epoch": 1.06, + "grad_norm": 26.915014561283886, + "learning_rate": 1.5038895025979919e-05, + "loss": 0.7072, + "step": 6760 + }, + { + "epoch": 1.06, + "grad_norm": 15.561983282825631, + "learning_rate": 1.503743829665764e-05, + "loss": 0.6981, + "step": 6761 + }, + { + "epoch": 1.06, + "grad_norm": 19.339591041413275, + "learning_rate": 1.503598142407356e-05, + "loss": 0.5992, + "step": 6762 + }, + { + "epoch": 1.06, + "grad_norm": 16.477260059333883, + "learning_rate": 1.5034524408269115e-05, + "loss": 0.6849, + "step": 6763 + }, + { + "epoch": 1.06, + "grad_norm": 22.58687186312458, + "learning_rate": 1.503306724928574e-05, + "loss": 0.7011, + "step": 6764 + }, + { + "epoch": 1.06, + "grad_norm": 19.31629383606017, + "learning_rate": 1.5031609947164876e-05, + "loss": 0.6922, + "step": 6765 + }, + { + "epoch": 1.06, + "grad_norm": 18.86701573990697, + "learning_rate": 1.503015250194797e-05, + "loss": 0.6886, + "step": 6766 + }, + { + "epoch": 1.06, + "grad_norm": 16.194127112590767, + "learning_rate": 1.5028694913676469e-05, + "loss": 0.6928, + "step": 6767 + }, + { + "epoch": 1.06, + "grad_norm": 26.367849941550556, + "learning_rate": 1.5027237182391825e-05, + "loss": 0.7701, + "step": 6768 + }, + { + "epoch": 1.06, + "grad_norm": 17.472990147525692, + "learning_rate": 1.5025779308135499e-05, + "loss": 0.792, + "step": 6769 + }, + { + "epoch": 1.06, + "grad_norm": 14.092443508716261, + "learning_rate": 1.5024321290948948e-05, + "loss": 0.7054, + "step": 6770 + }, + { + "epoch": 1.06, + "grad_norm": 20.347386482692883, + "learning_rate": 1.502286313087364e-05, + "loss": 0.6532, + "step": 6771 + }, + { + "epoch": 1.06, + "grad_norm": 14.677010330138664, + "learning_rate": 1.5021404827951039e-05, + "loss": 0.6664, + "step": 6772 + }, + { + "epoch": 1.06, + "grad_norm": 16.927849904431202, + "learning_rate": 1.5019946382222626e-05, + "loss": 0.6334, + "step": 6773 + }, + { + "epoch": 1.06, + "grad_norm": 16.654999269114985, + "learning_rate": 1.5018487793729875e-05, + "loss": 0.6635, + "step": 6774 + }, + { + "epoch": 1.06, + "grad_norm": 20.958678695046423, + "learning_rate": 1.5017029062514263e-05, + "loss": 0.6941, + "step": 6775 + }, + { + "epoch": 1.06, + "grad_norm": 18.987532676438875, + "learning_rate": 1.5015570188617283e-05, + "loss": 0.7844, + "step": 6776 + }, + { + "epoch": 1.06, + "grad_norm": 22.552739288482833, + "learning_rate": 1.5014111172080416e-05, + "loss": 0.604, + "step": 6777 + }, + { + "epoch": 1.06, + "grad_norm": 25.16805239729693, + "learning_rate": 1.5012652012945169e-05, + "loss": 0.6479, + "step": 6778 + }, + { + "epoch": 1.06, + "grad_norm": 32.77274945324569, + "learning_rate": 1.5011192711253021e-05, + "loss": 0.7088, + "step": 6779 + }, + { + "epoch": 1.06, + "grad_norm": 12.730486535911156, + "learning_rate": 1.5009733267045492e-05, + "loss": 0.631, + "step": 6780 + }, + { + "epoch": 1.06, + "grad_norm": 15.1626431667129, + "learning_rate": 1.5008273680364074e-05, + "loss": 0.7112, + "step": 6781 + }, + { + "epoch": 1.06, + "grad_norm": 18.652303983635978, + "learning_rate": 1.5006813951250286e-05, + "loss": 0.7087, + "step": 6782 + }, + { + "epoch": 1.06, + "grad_norm": 27.013477595308252, + "learning_rate": 1.5005354079745636e-05, + "loss": 0.8159, + "step": 6783 + }, + { + "epoch": 1.06, + "grad_norm": 18.46465218051016, + "learning_rate": 1.5003894065891644e-05, + "loss": 0.7353, + "step": 6784 + }, + { + "epoch": 1.06, + "grad_norm": 24.82916107560699, + "learning_rate": 1.5002433909729832e-05, + "loss": 0.6299, + "step": 6785 + }, + { + "epoch": 1.06, + "grad_norm": 19.352828822555992, + "learning_rate": 1.5000973611301729e-05, + "loss": 0.8654, + "step": 6786 + }, + { + "epoch": 1.06, + "grad_norm": 25.386211684563552, + "learning_rate": 1.4999513170648856e-05, + "loss": 0.6896, + "step": 6787 + }, + { + "epoch": 1.06, + "grad_norm": 31.012217609550927, + "learning_rate": 1.4998052587812759e-05, + "loss": 0.6279, + "step": 6788 + }, + { + "epoch": 1.06, + "grad_norm": 44.16336133377703, + "learning_rate": 1.4996591862834965e-05, + "loss": 0.756, + "step": 6789 + }, + { + "epoch": 1.06, + "grad_norm": 15.678696744111086, + "learning_rate": 1.4995130995757029e-05, + "loss": 0.7094, + "step": 6790 + }, + { + "epoch": 1.06, + "grad_norm": 12.709091299218962, + "learning_rate": 1.499366998662048e-05, + "loss": 0.6427, + "step": 6791 + }, + { + "epoch": 1.06, + "grad_norm": 16.878654305948793, + "learning_rate": 1.4992208835466884e-05, + "loss": 0.725, + "step": 6792 + }, + { + "epoch": 1.06, + "grad_norm": 32.343372325258606, + "learning_rate": 1.4990747542337787e-05, + "loss": 0.7414, + "step": 6793 + }, + { + "epoch": 1.06, + "grad_norm": 17.317289096279897, + "learning_rate": 1.498928610727475e-05, + "loss": 0.6178, + "step": 6794 + }, + { + "epoch": 1.06, + "grad_norm": 13.479834453502043, + "learning_rate": 1.4987824530319335e-05, + "loss": 0.7074, + "step": 6795 + }, + { + "epoch": 1.06, + "grad_norm": 22.55327039355677, + "learning_rate": 1.4986362811513107e-05, + "loss": 0.6459, + "step": 6796 + }, + { + "epoch": 1.06, + "grad_norm": 41.87650463475585, + "learning_rate": 1.4984900950897638e-05, + "loss": 0.7785, + "step": 6797 + }, + { + "epoch": 1.06, + "grad_norm": 19.145241705308184, + "learning_rate": 1.4983438948514501e-05, + "loss": 0.7369, + "step": 6798 + }, + { + "epoch": 1.06, + "grad_norm": 44.90480650013255, + "learning_rate": 1.4981976804405278e-05, + "loss": 0.76, + "step": 6799 + }, + { + "epoch": 1.06, + "grad_norm": 16.24738458163355, + "learning_rate": 1.4980514518611546e-05, + "loss": 0.6368, + "step": 6800 + }, + { + "epoch": 1.06, + "grad_norm": 19.602200210767613, + "learning_rate": 1.49790520911749e-05, + "loss": 0.7114, + "step": 6801 + }, + { + "epoch": 1.06, + "grad_norm": 28.55989867119618, + "learning_rate": 1.497758952213692e-05, + "loss": 0.6931, + "step": 6802 + }, + { + "epoch": 1.06, + "grad_norm": 17.88186985496182, + "learning_rate": 1.4976126811539207e-05, + "loss": 0.697, + "step": 6803 + }, + { + "epoch": 1.06, + "grad_norm": 23.79902455808823, + "learning_rate": 1.4974663959423358e-05, + "loss": 0.6749, + "step": 6804 + }, + { + "epoch": 1.06, + "grad_norm": 15.462700658944087, + "learning_rate": 1.497320096583098e-05, + "loss": 0.7276, + "step": 6805 + }, + { + "epoch": 1.06, + "grad_norm": 20.465791524138826, + "learning_rate": 1.497173783080367e-05, + "loss": 0.7689, + "step": 6806 + }, + { + "epoch": 1.06, + "grad_norm": 15.511313547679306, + "learning_rate": 1.497027455438305e-05, + "loss": 0.5923, + "step": 6807 + }, + { + "epoch": 1.06, + "grad_norm": 14.123060524762046, + "learning_rate": 1.4968811136610724e-05, + "loss": 0.6836, + "step": 6808 + }, + { + "epoch": 1.06, + "grad_norm": 15.695988198598952, + "learning_rate": 1.4967347577528319e-05, + "loss": 0.7611, + "step": 6809 + }, + { + "epoch": 1.06, + "grad_norm": 13.851156695683098, + "learning_rate": 1.4965883877177454e-05, + "loss": 0.6637, + "step": 6810 + }, + { + "epoch": 1.06, + "grad_norm": 27.71474204552171, + "learning_rate": 1.4964420035599756e-05, + "loss": 0.7271, + "step": 6811 + }, + { + "epoch": 1.06, + "grad_norm": 18.816600197094832, + "learning_rate": 1.4962956052836858e-05, + "loss": 0.7371, + "step": 6812 + }, + { + "epoch": 1.06, + "grad_norm": 14.377530578813502, + "learning_rate": 1.4961491928930394e-05, + "loss": 0.6727, + "step": 6813 + }, + { + "epoch": 1.06, + "grad_norm": 16.31630213409514, + "learning_rate": 1.4960027663921996e-05, + "loss": 0.6912, + "step": 6814 + }, + { + "epoch": 1.06, + "grad_norm": 17.73835271599576, + "learning_rate": 1.495856325785332e-05, + "loss": 0.7391, + "step": 6815 + }, + { + "epoch": 1.06, + "grad_norm": 20.810605633427603, + "learning_rate": 1.4957098710766e-05, + "loss": 0.6262, + "step": 6816 + }, + { + "epoch": 1.06, + "grad_norm": 14.876832552132404, + "learning_rate": 1.4955634022701695e-05, + "loss": 0.6713, + "step": 6817 + }, + { + "epoch": 1.06, + "grad_norm": 24.96261159479596, + "learning_rate": 1.4954169193702058e-05, + "loss": 0.6871, + "step": 6818 + }, + { + "epoch": 1.07, + "grad_norm": 16.196235310255712, + "learning_rate": 1.4952704223808745e-05, + "loss": 0.6898, + "step": 6819 + }, + { + "epoch": 1.07, + "grad_norm": 29.31788666994457, + "learning_rate": 1.4951239113063425e-05, + "loss": 0.7327, + "step": 6820 + }, + { + "epoch": 1.07, + "grad_norm": 20.302621169001725, + "learning_rate": 1.4949773861507754e-05, + "loss": 0.6682, + "step": 6821 + }, + { + "epoch": 1.07, + "grad_norm": 22.85392665305955, + "learning_rate": 1.4948308469183419e-05, + "loss": 0.7063, + "step": 6822 + }, + { + "epoch": 1.07, + "grad_norm": 22.082330922027204, + "learning_rate": 1.494684293613208e-05, + "loss": 0.666, + "step": 6823 + }, + { + "epoch": 1.07, + "grad_norm": 15.04374310965196, + "learning_rate": 1.4945377262395425e-05, + "loss": 0.6601, + "step": 6824 + }, + { + "epoch": 1.07, + "grad_norm": 16.389755900554096, + "learning_rate": 1.4943911448015129e-05, + "loss": 0.6056, + "step": 6825 + }, + { + "epoch": 1.07, + "grad_norm": 15.100140596913102, + "learning_rate": 1.4942445493032887e-05, + "loss": 0.6693, + "step": 6826 + }, + { + "epoch": 1.07, + "grad_norm": 13.478692802455363, + "learning_rate": 1.4940979397490386e-05, + "loss": 0.5297, + "step": 6827 + }, + { + "epoch": 1.07, + "grad_norm": 21.680751222512853, + "learning_rate": 1.493951316142932e-05, + "loss": 0.5804, + "step": 6828 + }, + { + "epoch": 1.07, + "grad_norm": 11.251738047120982, + "learning_rate": 1.493804678489139e-05, + "loss": 0.6372, + "step": 6829 + }, + { + "epoch": 1.07, + "grad_norm": 28.469253560709763, + "learning_rate": 1.4936580267918299e-05, + "loss": 0.8205, + "step": 6830 + }, + { + "epoch": 1.07, + "grad_norm": 16.202536620085596, + "learning_rate": 1.4935113610551751e-05, + "loss": 0.6718, + "step": 6831 + }, + { + "epoch": 1.07, + "grad_norm": 14.015048666122139, + "learning_rate": 1.4933646812833461e-05, + "loss": 0.6796, + "step": 6832 + }, + { + "epoch": 1.07, + "grad_norm": 15.696228174848482, + "learning_rate": 1.493217987480514e-05, + "loss": 0.7411, + "step": 6833 + }, + { + "epoch": 1.07, + "grad_norm": 14.884923190534414, + "learning_rate": 1.4930712796508511e-05, + "loss": 0.6621, + "step": 6834 + }, + { + "epoch": 1.07, + "grad_norm": 24.478785677170265, + "learning_rate": 1.4929245577985292e-05, + "loss": 0.7677, + "step": 6835 + }, + { + "epoch": 1.07, + "grad_norm": 22.19751390149639, + "learning_rate": 1.4927778219277214e-05, + "loss": 0.6948, + "step": 6836 + }, + { + "epoch": 1.07, + "grad_norm": 19.090736125151793, + "learning_rate": 1.4926310720426005e-05, + "loss": 0.6565, + "step": 6837 + }, + { + "epoch": 1.07, + "grad_norm": 22.064623228708854, + "learning_rate": 1.4924843081473401e-05, + "loss": 0.7807, + "step": 6838 + }, + { + "epoch": 1.07, + "grad_norm": 24.472623109604786, + "learning_rate": 1.492337530246114e-05, + "loss": 0.6843, + "step": 6839 + }, + { + "epoch": 1.07, + "grad_norm": 14.504632696364414, + "learning_rate": 1.4921907383430964e-05, + "loss": 0.7186, + "step": 6840 + }, + { + "epoch": 1.07, + "grad_norm": 19.362482820556192, + "learning_rate": 1.4920439324424627e-05, + "loss": 0.6502, + "step": 6841 + }, + { + "epoch": 1.07, + "grad_norm": 13.316981648675004, + "learning_rate": 1.491897112548387e-05, + "loss": 0.6199, + "step": 6842 + }, + { + "epoch": 1.07, + "grad_norm": 19.835712021789067, + "learning_rate": 1.4917502786650451e-05, + "loss": 0.7515, + "step": 6843 + }, + { + "epoch": 1.07, + "grad_norm": 18.10075321578142, + "learning_rate": 1.4916034307966132e-05, + "loss": 0.7038, + "step": 6844 + }, + { + "epoch": 1.07, + "grad_norm": 17.994877366130115, + "learning_rate": 1.4914565689472671e-05, + "loss": 0.7293, + "step": 6845 + }, + { + "epoch": 1.07, + "grad_norm": 16.10547906115209, + "learning_rate": 1.4913096931211839e-05, + "loss": 0.6755, + "step": 6846 + }, + { + "epoch": 1.07, + "grad_norm": 20.547108326881645, + "learning_rate": 1.4911628033225402e-05, + "loss": 0.7262, + "step": 6847 + }, + { + "epoch": 1.07, + "grad_norm": 20.29024413453594, + "learning_rate": 1.4910158995555138e-05, + "loss": 0.6144, + "step": 6848 + }, + { + "epoch": 1.07, + "grad_norm": 25.26053868031611, + "learning_rate": 1.4908689818242826e-05, + "loss": 0.7477, + "step": 6849 + }, + { + "epoch": 1.07, + "grad_norm": 15.024619263793335, + "learning_rate": 1.4907220501330245e-05, + "loss": 0.6162, + "step": 6850 + }, + { + "epoch": 1.07, + "grad_norm": 15.205784198534877, + "learning_rate": 1.4905751044859185e-05, + "loss": 0.7409, + "step": 6851 + }, + { + "epoch": 1.07, + "grad_norm": 12.162922534434607, + "learning_rate": 1.4904281448871431e-05, + "loss": 0.6259, + "step": 6852 + }, + { + "epoch": 1.07, + "grad_norm": 15.590950670662059, + "learning_rate": 1.4902811713408784e-05, + "loss": 0.6334, + "step": 6853 + }, + { + "epoch": 1.07, + "grad_norm": 15.42584840697611, + "learning_rate": 1.4901341838513044e-05, + "loss": 0.7229, + "step": 6854 + }, + { + "epoch": 1.07, + "grad_norm": 20.918537038141352, + "learning_rate": 1.4899871824226004e-05, + "loss": 0.683, + "step": 6855 + }, + { + "epoch": 1.07, + "grad_norm": 18.438816199764084, + "learning_rate": 1.4898401670589478e-05, + "loss": 0.6605, + "step": 6856 + }, + { + "epoch": 1.07, + "grad_norm": 17.56973616255662, + "learning_rate": 1.4896931377645273e-05, + "loss": 0.6847, + "step": 6857 + }, + { + "epoch": 1.07, + "grad_norm": 19.68732531901476, + "learning_rate": 1.4895460945435205e-05, + "loss": 0.6687, + "step": 6858 + }, + { + "epoch": 1.07, + "grad_norm": 14.047093463976806, + "learning_rate": 1.489399037400109e-05, + "loss": 0.6741, + "step": 6859 + }, + { + "epoch": 1.07, + "grad_norm": 24.990936668935863, + "learning_rate": 1.4892519663384755e-05, + "loss": 0.6475, + "step": 6860 + }, + { + "epoch": 1.07, + "grad_norm": 17.909315085755992, + "learning_rate": 1.489104881362802e-05, + "loss": 0.6919, + "step": 6861 + }, + { + "epoch": 1.07, + "grad_norm": 15.86212752560986, + "learning_rate": 1.4889577824772719e-05, + "loss": 0.6404, + "step": 6862 + }, + { + "epoch": 1.07, + "grad_norm": 17.620987791412155, + "learning_rate": 1.4888106696860684e-05, + "loss": 0.6714, + "step": 6863 + }, + { + "epoch": 1.07, + "grad_norm": 18.86707473784369, + "learning_rate": 1.4886635429933756e-05, + "loss": 0.6344, + "step": 6864 + }, + { + "epoch": 1.07, + "grad_norm": 21.752094637922625, + "learning_rate": 1.4885164024033771e-05, + "loss": 0.7448, + "step": 6865 + }, + { + "epoch": 1.07, + "grad_norm": 14.351449011788484, + "learning_rate": 1.4883692479202582e-05, + "loss": 0.6105, + "step": 6866 + }, + { + "epoch": 1.07, + "grad_norm": 28.396978297255078, + "learning_rate": 1.4882220795482037e-05, + "loss": 0.7575, + "step": 6867 + }, + { + "epoch": 1.07, + "grad_norm": 17.887766752180045, + "learning_rate": 1.4880748972913987e-05, + "loss": 0.7021, + "step": 6868 + }, + { + "epoch": 1.07, + "grad_norm": 19.62241245670707, + "learning_rate": 1.4879277011540289e-05, + "loss": 0.698, + "step": 6869 + }, + { + "epoch": 1.07, + "grad_norm": 27.94651450317409, + "learning_rate": 1.4877804911402809e-05, + "loss": 0.7187, + "step": 6870 + }, + { + "epoch": 1.07, + "grad_norm": 20.232966576535382, + "learning_rate": 1.4876332672543412e-05, + "loss": 0.7409, + "step": 6871 + }, + { + "epoch": 1.07, + "grad_norm": 22.66010890127837, + "learning_rate": 1.4874860295003965e-05, + "loss": 0.6974, + "step": 6872 + }, + { + "epoch": 1.07, + "grad_norm": 14.804826238581002, + "learning_rate": 1.4873387778826344e-05, + "loss": 0.61, + "step": 6873 + }, + { + "epoch": 1.07, + "grad_norm": 9.506525522659032, + "learning_rate": 1.4871915124052426e-05, + "loss": 0.5513, + "step": 6874 + }, + { + "epoch": 1.07, + "grad_norm": 24.77126192268069, + "learning_rate": 1.4870442330724088e-05, + "loss": 0.6856, + "step": 6875 + }, + { + "epoch": 1.07, + "grad_norm": 13.463418757623485, + "learning_rate": 1.4868969398883224e-05, + "loss": 0.6515, + "step": 6876 + }, + { + "epoch": 1.07, + "grad_norm": 26.449551904890306, + "learning_rate": 1.4867496328571716e-05, + "loss": 0.6854, + "step": 6877 + }, + { + "epoch": 1.07, + "grad_norm": 20.694815587233965, + "learning_rate": 1.4866023119831461e-05, + "loss": 0.7199, + "step": 6878 + }, + { + "epoch": 1.07, + "grad_norm": 18.633412048002835, + "learning_rate": 1.4864549772704353e-05, + "loss": 0.7222, + "step": 6879 + }, + { + "epoch": 1.07, + "grad_norm": 18.048754914532285, + "learning_rate": 1.4863076287232297e-05, + "loss": 0.6516, + "step": 6880 + }, + { + "epoch": 1.07, + "grad_norm": 20.338989017140193, + "learning_rate": 1.4861602663457195e-05, + "loss": 0.702, + "step": 6881 + }, + { + "epoch": 1.07, + "grad_norm": 23.753267535878702, + "learning_rate": 1.4860128901420958e-05, + "loss": 0.7323, + "step": 6882 + }, + { + "epoch": 1.08, + "grad_norm": 14.328161696134313, + "learning_rate": 1.4858655001165498e-05, + "loss": 0.557, + "step": 6883 + }, + { + "epoch": 1.08, + "grad_norm": 19.837909827298652, + "learning_rate": 1.4857180962732731e-05, + "loss": 0.7135, + "step": 6884 + }, + { + "epoch": 1.08, + "grad_norm": 16.3281535883019, + "learning_rate": 1.4855706786164579e-05, + "loss": 0.7526, + "step": 6885 + }, + { + "epoch": 1.08, + "grad_norm": 23.341651623031126, + "learning_rate": 1.4854232471502967e-05, + "loss": 0.6791, + "step": 6886 + }, + { + "epoch": 1.08, + "grad_norm": 21.346725633794186, + "learning_rate": 1.4852758018789826e-05, + "loss": 0.6881, + "step": 6887 + }, + { + "epoch": 1.08, + "grad_norm": 17.20036394845212, + "learning_rate": 1.4851283428067083e-05, + "loss": 0.7463, + "step": 6888 + }, + { + "epoch": 1.08, + "grad_norm": 17.835894446335644, + "learning_rate": 1.4849808699376675e-05, + "loss": 0.6627, + "step": 6889 + }, + { + "epoch": 1.08, + "grad_norm": 18.724042714175972, + "learning_rate": 1.4848333832760549e-05, + "loss": 0.6696, + "step": 6890 + }, + { + "epoch": 1.08, + "grad_norm": 18.012475708687663, + "learning_rate": 1.4846858828260642e-05, + "loss": 0.6936, + "step": 6891 + }, + { + "epoch": 1.08, + "grad_norm": 18.062443667954245, + "learning_rate": 1.4845383685918906e-05, + "loss": 0.7597, + "step": 6892 + }, + { + "epoch": 1.08, + "grad_norm": 16.44012516103373, + "learning_rate": 1.4843908405777294e-05, + "loss": 0.7649, + "step": 6893 + }, + { + "epoch": 1.08, + "grad_norm": 18.611921070592302, + "learning_rate": 1.4842432987877758e-05, + "loss": 0.716, + "step": 6894 + }, + { + "epoch": 1.08, + "grad_norm": 13.432880388216796, + "learning_rate": 1.4840957432262265e-05, + "loss": 0.6406, + "step": 6895 + }, + { + "epoch": 1.08, + "grad_norm": 16.203478788616906, + "learning_rate": 1.483948173897277e-05, + "loss": 0.7051, + "step": 6896 + }, + { + "epoch": 1.08, + "grad_norm": 20.47160488268398, + "learning_rate": 1.4838005908051245e-05, + "loss": 0.6831, + "step": 6897 + }, + { + "epoch": 1.08, + "grad_norm": 16.68274434016696, + "learning_rate": 1.4836529939539665e-05, + "loss": 0.6279, + "step": 6898 + }, + { + "epoch": 1.08, + "grad_norm": 25.558568519814205, + "learning_rate": 1.4835053833480003e-05, + "loss": 0.6641, + "step": 6899 + }, + { + "epoch": 1.08, + "grad_norm": 19.304881011551856, + "learning_rate": 1.4833577589914234e-05, + "loss": 0.6803, + "step": 6900 + }, + { + "epoch": 1.08, + "grad_norm": 18.183987779783546, + "learning_rate": 1.4832101208884352e-05, + "loss": 0.6123, + "step": 6901 + }, + { + "epoch": 1.08, + "grad_norm": 19.32523948960862, + "learning_rate": 1.4830624690432331e-05, + "loss": 0.6468, + "step": 6902 + }, + { + "epoch": 1.08, + "grad_norm": 27.145288723905725, + "learning_rate": 1.4829148034600174e-05, + "loss": 0.6132, + "step": 6903 + }, + { + "epoch": 1.08, + "grad_norm": 22.06759863853126, + "learning_rate": 1.4827671241429873e-05, + "loss": 0.6542, + "step": 6904 + }, + { + "epoch": 1.08, + "grad_norm": 19.76076857723192, + "learning_rate": 1.4826194310963424e-05, + "loss": 0.7168, + "step": 6905 + }, + { + "epoch": 1.08, + "grad_norm": 18.705553563992417, + "learning_rate": 1.4824717243242835e-05, + "loss": 0.6569, + "step": 6906 + }, + { + "epoch": 1.08, + "grad_norm": 19.847961762781033, + "learning_rate": 1.4823240038310108e-05, + "loss": 0.6935, + "step": 6907 + }, + { + "epoch": 1.08, + "grad_norm": 12.936835498837047, + "learning_rate": 1.4821762696207254e-05, + "loss": 0.5734, + "step": 6908 + }, + { + "epoch": 1.08, + "grad_norm": 21.75644804650147, + "learning_rate": 1.4820285216976288e-05, + "loss": 0.7417, + "step": 6909 + }, + { + "epoch": 1.08, + "grad_norm": 16.68685259125747, + "learning_rate": 1.4818807600659237e-05, + "loss": 0.6866, + "step": 6910 + }, + { + "epoch": 1.08, + "grad_norm": 24.248879402485336, + "learning_rate": 1.4817329847298112e-05, + "loss": 0.6538, + "step": 6911 + }, + { + "epoch": 1.08, + "grad_norm": 17.528503137326087, + "learning_rate": 1.4815851956934948e-05, + "loss": 0.6704, + "step": 6912 + }, + { + "epoch": 1.08, + "grad_norm": 4.891502665721167, + "learning_rate": 1.4814373929611768e-05, + "loss": 0.7168, + "step": 6913 + }, + { + "epoch": 1.08, + "grad_norm": 16.781617483807377, + "learning_rate": 1.4812895765370609e-05, + "loss": 0.7302, + "step": 6914 + }, + { + "epoch": 1.08, + "grad_norm": 16.535920767769795, + "learning_rate": 1.4811417464253512e-05, + "loss": 0.6499, + "step": 6915 + }, + { + "epoch": 1.08, + "grad_norm": 23.921355271128462, + "learning_rate": 1.4809939026302518e-05, + "loss": 0.6869, + "step": 6916 + }, + { + "epoch": 1.08, + "grad_norm": 15.897900614003222, + "learning_rate": 1.4808460451559669e-05, + "loss": 0.6351, + "step": 6917 + }, + { + "epoch": 1.08, + "grad_norm": 17.154011517809057, + "learning_rate": 1.4806981740067018e-05, + "loss": 0.588, + "step": 6918 + }, + { + "epoch": 1.08, + "grad_norm": 18.81200540722553, + "learning_rate": 1.480550289186662e-05, + "loss": 0.644, + "step": 6919 + }, + { + "epoch": 1.08, + "grad_norm": 20.645432092217323, + "learning_rate": 1.480402390700053e-05, + "loss": 0.7193, + "step": 6920 + }, + { + "epoch": 1.08, + "grad_norm": 18.905216777564288, + "learning_rate": 1.4802544785510811e-05, + "loss": 0.7288, + "step": 6921 + }, + { + "epoch": 1.08, + "grad_norm": 12.696237168603231, + "learning_rate": 1.4801065527439526e-05, + "loss": 0.5994, + "step": 6922 + }, + { + "epoch": 1.08, + "grad_norm": 14.590824371024818, + "learning_rate": 1.4799586132828747e-05, + "loss": 0.6722, + "step": 6923 + }, + { + "epoch": 1.08, + "grad_norm": 22.933085539106568, + "learning_rate": 1.4798106601720545e-05, + "loss": 0.6759, + "step": 6924 + }, + { + "epoch": 1.08, + "grad_norm": 18.985911898940657, + "learning_rate": 1.4796626934157e-05, + "loss": 0.6826, + "step": 6925 + }, + { + "epoch": 1.08, + "grad_norm": 21.291782400686994, + "learning_rate": 1.4795147130180192e-05, + "loss": 0.6581, + "step": 6926 + }, + { + "epoch": 1.08, + "grad_norm": 23.520752775450195, + "learning_rate": 1.4793667189832201e-05, + "loss": 0.8795, + "step": 6927 + }, + { + "epoch": 1.08, + "grad_norm": 20.867146188830034, + "learning_rate": 1.4792187113155121e-05, + "loss": 0.6364, + "step": 6928 + }, + { + "epoch": 1.08, + "grad_norm": 19.228691108022982, + "learning_rate": 1.4790706900191043e-05, + "loss": 0.6169, + "step": 6929 + }, + { + "epoch": 1.08, + "grad_norm": 16.889255036721124, + "learning_rate": 1.4789226550982062e-05, + "loss": 0.7175, + "step": 6930 + }, + { + "epoch": 1.08, + "grad_norm": 17.9396156249684, + "learning_rate": 1.4787746065570282e-05, + "loss": 0.6739, + "step": 6931 + }, + { + "epoch": 1.08, + "grad_norm": 23.950394583679003, + "learning_rate": 1.4786265443997805e-05, + "loss": 0.6497, + "step": 6932 + }, + { + "epoch": 1.08, + "grad_norm": 19.86424577334854, + "learning_rate": 1.4784784686306737e-05, + "loss": 0.7775, + "step": 6933 + }, + { + "epoch": 1.08, + "grad_norm": 25.279852644021776, + "learning_rate": 1.478330379253919e-05, + "loss": 0.7147, + "step": 6934 + }, + { + "epoch": 1.08, + "grad_norm": 17.13845804282142, + "learning_rate": 1.4781822762737284e-05, + "loss": 0.6882, + "step": 6935 + }, + { + "epoch": 1.08, + "grad_norm": 12.626746143671477, + "learning_rate": 1.4780341596943133e-05, + "loss": 0.6497, + "step": 6936 + }, + { + "epoch": 1.08, + "grad_norm": 17.964296727852883, + "learning_rate": 1.4778860295198869e-05, + "loss": 0.644, + "step": 6937 + }, + { + "epoch": 1.08, + "grad_norm": 21.62900263410205, + "learning_rate": 1.4777378857546606e-05, + "loss": 0.6984, + "step": 6938 + }, + { + "epoch": 1.08, + "grad_norm": 21.706145554232766, + "learning_rate": 1.4775897284028491e-05, + "loss": 0.6706, + "step": 6939 + }, + { + "epoch": 1.08, + "grad_norm": 23.973669696114257, + "learning_rate": 1.4774415574686645e-05, + "loss": 0.7294, + "step": 6940 + }, + { + "epoch": 1.08, + "grad_norm": 14.739109649954834, + "learning_rate": 1.4772933729563215e-05, + "loss": 0.6569, + "step": 6941 + }, + { + "epoch": 1.08, + "grad_norm": 13.057039554068108, + "learning_rate": 1.4771451748700344e-05, + "loss": 0.6426, + "step": 6942 + }, + { + "epoch": 1.08, + "grad_norm": 15.33816397406472, + "learning_rate": 1.4769969632140173e-05, + "loss": 0.6911, + "step": 6943 + }, + { + "epoch": 1.08, + "grad_norm": 24.74663158515733, + "learning_rate": 1.4768487379924857e-05, + "loss": 0.6781, + "step": 6944 + }, + { + "epoch": 1.08, + "grad_norm": 17.80884483756154, + "learning_rate": 1.476700499209655e-05, + "loss": 0.7354, + "step": 6945 + }, + { + "epoch": 1.08, + "grad_norm": 17.843543977182733, + "learning_rate": 1.4765522468697406e-05, + "loss": 0.7084, + "step": 6946 + }, + { + "epoch": 1.09, + "grad_norm": 15.51182711738, + "learning_rate": 1.4764039809769598e-05, + "loss": 0.7018, + "step": 6947 + }, + { + "epoch": 1.09, + "grad_norm": 16.914673627688813, + "learning_rate": 1.4762557015355278e-05, + "loss": 0.6176, + "step": 6948 + }, + { + "epoch": 1.09, + "grad_norm": 31.854363101035872, + "learning_rate": 1.4761074085496625e-05, + "loss": 0.6629, + "step": 6949 + }, + { + "epoch": 1.09, + "grad_norm": 10.88423216077385, + "learning_rate": 1.4759591020235812e-05, + "loss": 0.699, + "step": 6950 + }, + { + "epoch": 1.09, + "grad_norm": 26.588928320100546, + "learning_rate": 1.4758107819615014e-05, + "loss": 0.6286, + "step": 6951 + }, + { + "epoch": 1.09, + "grad_norm": 19.338798535155828, + "learning_rate": 1.4756624483676412e-05, + "loss": 0.6679, + "step": 6952 + }, + { + "epoch": 1.09, + "grad_norm": 20.15223280847857, + "learning_rate": 1.475514101246219e-05, + "loss": 0.6892, + "step": 6953 + }, + { + "epoch": 1.09, + "grad_norm": 16.254974697743698, + "learning_rate": 1.4753657406014545e-05, + "loss": 0.6922, + "step": 6954 + }, + { + "epoch": 1.09, + "grad_norm": 13.536834734925447, + "learning_rate": 1.475217366437566e-05, + "loss": 0.6449, + "step": 6955 + }, + { + "epoch": 1.09, + "grad_norm": 14.775998131666926, + "learning_rate": 1.4750689787587735e-05, + "loss": 0.6154, + "step": 6956 + }, + { + "epoch": 1.09, + "grad_norm": 15.990829974346735, + "learning_rate": 1.4749205775692974e-05, + "loss": 0.7237, + "step": 6957 + }, + { + "epoch": 1.09, + "grad_norm": 16.495954436179797, + "learning_rate": 1.4747721628733578e-05, + "loss": 0.6836, + "step": 6958 + }, + { + "epoch": 1.09, + "grad_norm": 19.21748560181008, + "learning_rate": 1.4746237346751757e-05, + "loss": 0.6481, + "step": 6959 + }, + { + "epoch": 1.09, + "grad_norm": 13.270806569916324, + "learning_rate": 1.4744752929789723e-05, + "loss": 0.5648, + "step": 6960 + }, + { + "epoch": 1.09, + "grad_norm": 13.705429537995972, + "learning_rate": 1.474326837788969e-05, + "loss": 0.6576, + "step": 6961 + }, + { + "epoch": 1.09, + "grad_norm": 20.46108868467707, + "learning_rate": 1.4741783691093879e-05, + "loss": 0.6488, + "step": 6962 + }, + { + "epoch": 1.09, + "grad_norm": 14.672031985733904, + "learning_rate": 1.4740298869444512e-05, + "loss": 0.6764, + "step": 6963 + }, + { + "epoch": 1.09, + "grad_norm": 11.330488535854496, + "learning_rate": 1.4738813912983823e-05, + "loss": 0.6726, + "step": 6964 + }, + { + "epoch": 1.09, + "grad_norm": 15.997999848576214, + "learning_rate": 1.4737328821754034e-05, + "loss": 0.7138, + "step": 6965 + }, + { + "epoch": 1.09, + "grad_norm": 17.873263037092144, + "learning_rate": 1.4735843595797388e-05, + "loss": 0.656, + "step": 6966 + }, + { + "epoch": 1.09, + "grad_norm": 19.484086140844646, + "learning_rate": 1.4734358235156117e-05, + "loss": 0.6322, + "step": 6967 + }, + { + "epoch": 1.09, + "grad_norm": 20.733840856449074, + "learning_rate": 1.4732872739872469e-05, + "loss": 0.6889, + "step": 6968 + }, + { + "epoch": 1.09, + "grad_norm": 13.444808700051054, + "learning_rate": 1.4731387109988689e-05, + "loss": 0.7014, + "step": 6969 + }, + { + "epoch": 1.09, + "grad_norm": 18.971813462003162, + "learning_rate": 1.4729901345547027e-05, + "loss": 0.6809, + "step": 6970 + }, + { + "epoch": 1.09, + "grad_norm": 14.401088114612397, + "learning_rate": 1.4728415446589736e-05, + "loss": 0.6512, + "step": 6971 + }, + { + "epoch": 1.09, + "grad_norm": 16.771706887609707, + "learning_rate": 1.4726929413159077e-05, + "loss": 0.6208, + "step": 6972 + }, + { + "epoch": 1.09, + "grad_norm": 25.89304416188458, + "learning_rate": 1.472544324529731e-05, + "loss": 0.7429, + "step": 6973 + }, + { + "epoch": 1.09, + "grad_norm": 22.627261270755298, + "learning_rate": 1.4723956943046705e-05, + "loss": 0.7454, + "step": 6974 + }, + { + "epoch": 1.09, + "grad_norm": 11.86831949760473, + "learning_rate": 1.4722470506449525e-05, + "loss": 0.6715, + "step": 6975 + }, + { + "epoch": 1.09, + "grad_norm": 20.250413323455156, + "learning_rate": 1.4720983935548048e-05, + "loss": 0.7234, + "step": 6976 + }, + { + "epoch": 1.09, + "grad_norm": 17.19040646253769, + "learning_rate": 1.4719497230384548e-05, + "loss": 0.6732, + "step": 6977 + }, + { + "epoch": 1.09, + "grad_norm": 15.156288867094405, + "learning_rate": 1.4718010391001306e-05, + "loss": 0.6864, + "step": 6978 + }, + { + "epoch": 1.09, + "grad_norm": 19.736330097090796, + "learning_rate": 1.4716523417440613e-05, + "loss": 0.7096, + "step": 6979 + }, + { + "epoch": 1.09, + "grad_norm": 14.31613064249538, + "learning_rate": 1.4715036309744749e-05, + "loss": 0.6621, + "step": 6980 + }, + { + "epoch": 1.09, + "grad_norm": 20.727156947986735, + "learning_rate": 1.4713549067956011e-05, + "loss": 0.7025, + "step": 6981 + }, + { + "epoch": 1.09, + "grad_norm": 20.937049214648837, + "learning_rate": 1.4712061692116697e-05, + "loss": 0.6454, + "step": 6982 + }, + { + "epoch": 1.09, + "grad_norm": 19.215261549031933, + "learning_rate": 1.4710574182269103e-05, + "loss": 0.6641, + "step": 6983 + }, + { + "epoch": 1.09, + "grad_norm": 20.26781923507785, + "learning_rate": 1.4709086538455537e-05, + "loss": 0.5997, + "step": 6984 + }, + { + "epoch": 1.09, + "grad_norm": 28.266171831774347, + "learning_rate": 1.4707598760718305e-05, + "loss": 0.6695, + "step": 6985 + }, + { + "epoch": 1.09, + "grad_norm": 25.94063095590854, + "learning_rate": 1.4706110849099715e-05, + "loss": 0.8163, + "step": 6986 + }, + { + "epoch": 1.09, + "grad_norm": 19.96683123747085, + "learning_rate": 1.4704622803642089e-05, + "loss": 0.7017, + "step": 6987 + }, + { + "epoch": 1.09, + "grad_norm": 25.847383674981895, + "learning_rate": 1.470313462438774e-05, + "loss": 0.6739, + "step": 6988 + }, + { + "epoch": 1.09, + "grad_norm": 18.503805068844486, + "learning_rate": 1.4701646311378994e-05, + "loss": 0.7649, + "step": 6989 + }, + { + "epoch": 1.09, + "grad_norm": 18.18007624496995, + "learning_rate": 1.4700157864658176e-05, + "loss": 0.6699, + "step": 6990 + }, + { + "epoch": 1.09, + "grad_norm": 15.102198666801241, + "learning_rate": 1.4698669284267619e-05, + "loss": 0.613, + "step": 6991 + }, + { + "epoch": 1.09, + "grad_norm": 19.507259196056836, + "learning_rate": 1.4697180570249655e-05, + "loss": 0.6672, + "step": 6992 + }, + { + "epoch": 1.09, + "grad_norm": 13.20135285938748, + "learning_rate": 1.4695691722646626e-05, + "loss": 0.6565, + "step": 6993 + }, + { + "epoch": 1.09, + "grad_norm": 17.399815078758454, + "learning_rate": 1.4694202741500871e-05, + "loss": 0.7691, + "step": 6994 + }, + { + "epoch": 1.09, + "grad_norm": 17.254121472892848, + "learning_rate": 1.4692713626854735e-05, + "loss": 0.6645, + "step": 6995 + }, + { + "epoch": 1.09, + "grad_norm": 10.544987441451005, + "learning_rate": 1.4691224378750569e-05, + "loss": 0.5736, + "step": 6996 + }, + { + "epoch": 1.09, + "grad_norm": 14.133386995122116, + "learning_rate": 1.4689734997230724e-05, + "loss": 0.6855, + "step": 6997 + }, + { + "epoch": 1.09, + "grad_norm": 17.569562101305245, + "learning_rate": 1.4688245482337565e-05, + "loss": 0.7389, + "step": 6998 + }, + { + "epoch": 1.09, + "grad_norm": 19.67870523712877, + "learning_rate": 1.468675583411344e-05, + "loss": 0.7144, + "step": 6999 + }, + { + "epoch": 1.09, + "grad_norm": 14.019001725156665, + "learning_rate": 1.4685266052600724e-05, + "loss": 0.6206, + "step": 7000 + }, + { + "epoch": 1.09, + "grad_norm": 17.342001245159917, + "learning_rate": 1.468377613784178e-05, + "loss": 0.6379, + "step": 7001 + }, + { + "epoch": 1.09, + "grad_norm": 20.443360173874197, + "learning_rate": 1.4682286089878985e-05, + "loss": 0.7224, + "step": 7002 + }, + { + "epoch": 1.09, + "grad_norm": 23.846929963695523, + "learning_rate": 1.4680795908754713e-05, + "loss": 0.703, + "step": 7003 + }, + { + "epoch": 1.09, + "grad_norm": 24.87026691275611, + "learning_rate": 1.4679305594511344e-05, + "loss": 0.7048, + "step": 7004 + }, + { + "epoch": 1.09, + "grad_norm": 18.959445866404707, + "learning_rate": 1.4677815147191256e-05, + "loss": 0.7042, + "step": 7005 + }, + { + "epoch": 1.09, + "grad_norm": 14.11895754842285, + "learning_rate": 1.4676324566836848e-05, + "loss": 0.7079, + "step": 7006 + }, + { + "epoch": 1.09, + "grad_norm": 17.83222527330517, + "learning_rate": 1.4674833853490502e-05, + "loss": 0.7459, + "step": 7007 + }, + { + "epoch": 1.09, + "grad_norm": 11.030453288455655, + "learning_rate": 1.4673343007194618e-05, + "loss": 0.6139, + "step": 7008 + }, + { + "epoch": 1.09, + "grad_norm": 19.907334622091476, + "learning_rate": 1.4671852027991589e-05, + "loss": 0.6868, + "step": 7009 + }, + { + "epoch": 1.09, + "grad_norm": 22.42533027097129, + "learning_rate": 1.4670360915923824e-05, + "loss": 0.6339, + "step": 7010 + }, + { + "epoch": 1.1, + "grad_norm": 16.657633555780176, + "learning_rate": 1.4668869671033726e-05, + "loss": 0.6438, + "step": 7011 + }, + { + "epoch": 1.1, + "grad_norm": 20.88928070322481, + "learning_rate": 1.4667378293363706e-05, + "loss": 0.7048, + "step": 7012 + }, + { + "epoch": 1.1, + "grad_norm": 16.70095618885727, + "learning_rate": 1.4665886782956178e-05, + "loss": 0.61, + "step": 7013 + }, + { + "epoch": 1.1, + "grad_norm": 19.453035742007238, + "learning_rate": 1.4664395139853558e-05, + "loss": 0.7035, + "step": 7014 + }, + { + "epoch": 1.1, + "grad_norm": 26.709407210272033, + "learning_rate": 1.466290336409827e-05, + "loss": 0.6475, + "step": 7015 + }, + { + "epoch": 1.1, + "grad_norm": 24.27223713843969, + "learning_rate": 1.4661411455732735e-05, + "loss": 0.7413, + "step": 7016 + }, + { + "epoch": 1.1, + "grad_norm": 19.31140263843974, + "learning_rate": 1.4659919414799389e-05, + "loss": 0.6015, + "step": 7017 + }, + { + "epoch": 1.1, + "grad_norm": 22.056088089260825, + "learning_rate": 1.465842724134066e-05, + "loss": 0.6879, + "step": 7018 + }, + { + "epoch": 1.1, + "grad_norm": 20.38249931811291, + "learning_rate": 1.4656934935398983e-05, + "loss": 0.6906, + "step": 7019 + }, + { + "epoch": 1.1, + "grad_norm": 17.207129877611006, + "learning_rate": 1.4655442497016802e-05, + "loss": 0.6343, + "step": 7020 + }, + { + "epoch": 1.1, + "grad_norm": 20.646688022751317, + "learning_rate": 1.4653949926236562e-05, + "loss": 0.6299, + "step": 7021 + }, + { + "epoch": 1.1, + "grad_norm": 17.525901557415224, + "learning_rate": 1.4652457223100704e-05, + "loss": 0.7107, + "step": 7022 + }, + { + "epoch": 1.1, + "grad_norm": 21.2278939265862, + "learning_rate": 1.4650964387651692e-05, + "loss": 0.715, + "step": 7023 + }, + { + "epoch": 1.1, + "grad_norm": 22.366730467816783, + "learning_rate": 1.4649471419931964e-05, + "loss": 0.6328, + "step": 7024 + }, + { + "epoch": 1.1, + "grad_norm": 20.22701759975661, + "learning_rate": 1.4647978319983998e-05, + "loss": 0.6437, + "step": 7025 + }, + { + "epoch": 1.1, + "grad_norm": 25.290032370833387, + "learning_rate": 1.4646485087850239e-05, + "loss": 0.7371, + "step": 7026 + }, + { + "epoch": 1.1, + "grad_norm": 16.123669322687615, + "learning_rate": 1.4644991723573167e-05, + "loss": 0.7021, + "step": 7027 + }, + { + "epoch": 1.1, + "grad_norm": 17.399931850758918, + "learning_rate": 1.4643498227195247e-05, + "loss": 0.7094, + "step": 7028 + }, + { + "epoch": 1.1, + "grad_norm": 15.0638921235817, + "learning_rate": 1.4642004598758955e-05, + "loss": 0.6051, + "step": 7029 + }, + { + "epoch": 1.1, + "grad_norm": 19.992809223377687, + "learning_rate": 1.4640510838306765e-05, + "loss": 0.6004, + "step": 7030 + }, + { + "epoch": 1.1, + "grad_norm": 33.715305468849856, + "learning_rate": 1.4639016945881164e-05, + "loss": 0.7904, + "step": 7031 + }, + { + "epoch": 1.1, + "grad_norm": 28.687614468349267, + "learning_rate": 1.4637522921524632e-05, + "loss": 0.6982, + "step": 7032 + }, + { + "epoch": 1.1, + "grad_norm": 12.598117797439969, + "learning_rate": 1.4636028765279662e-05, + "loss": 0.5972, + "step": 7033 + }, + { + "epoch": 1.1, + "grad_norm": 15.615879165286403, + "learning_rate": 1.4634534477188745e-05, + "loss": 0.6154, + "step": 7034 + }, + { + "epoch": 1.1, + "grad_norm": 13.878544153149125, + "learning_rate": 1.4633040057294379e-05, + "loss": 0.7251, + "step": 7035 + }, + { + "epoch": 1.1, + "grad_norm": 16.490215179963524, + "learning_rate": 1.4631545505639066e-05, + "loss": 0.638, + "step": 7036 + }, + { + "epoch": 1.1, + "grad_norm": 21.998263878485687, + "learning_rate": 1.4630050822265306e-05, + "loss": 0.6917, + "step": 7037 + }, + { + "epoch": 1.1, + "grad_norm": 18.251567147849286, + "learning_rate": 1.4628556007215612e-05, + "loss": 0.6591, + "step": 7038 + }, + { + "epoch": 1.1, + "grad_norm": 22.090590785622393, + "learning_rate": 1.462706106053249e-05, + "loss": 0.6737, + "step": 7039 + }, + { + "epoch": 1.1, + "grad_norm": 18.11842552024782, + "learning_rate": 1.4625565982258461e-05, + "loss": 0.7529, + "step": 7040 + }, + { + "epoch": 1.1, + "grad_norm": 22.220165905724727, + "learning_rate": 1.4624070772436037e-05, + "loss": 0.6819, + "step": 7041 + }, + { + "epoch": 1.1, + "grad_norm": 45.387024255695025, + "learning_rate": 1.4622575431107748e-05, + "loss": 0.7692, + "step": 7042 + }, + { + "epoch": 1.1, + "grad_norm": 17.686625203750875, + "learning_rate": 1.4621079958316118e-05, + "loss": 0.6618, + "step": 7043 + }, + { + "epoch": 1.1, + "grad_norm": 20.755306786830555, + "learning_rate": 1.4619584354103675e-05, + "loss": 0.6575, + "step": 7044 + }, + { + "epoch": 1.1, + "grad_norm": 24.8441490561147, + "learning_rate": 1.4618088618512957e-05, + "loss": 0.7926, + "step": 7045 + }, + { + "epoch": 1.1, + "grad_norm": 18.99545148704661, + "learning_rate": 1.4616592751586503e-05, + "loss": 0.6647, + "step": 7046 + }, + { + "epoch": 1.1, + "grad_norm": 18.447742875621604, + "learning_rate": 1.4615096753366849e-05, + "loss": 0.715, + "step": 7047 + }, + { + "epoch": 1.1, + "grad_norm": 14.451496408826745, + "learning_rate": 1.4613600623896542e-05, + "loss": 0.6422, + "step": 7048 + }, + { + "epoch": 1.1, + "grad_norm": 16.301495811328188, + "learning_rate": 1.4612104363218132e-05, + "loss": 0.6394, + "step": 7049 + }, + { + "epoch": 1.1, + "grad_norm": 20.492657231754652, + "learning_rate": 1.4610607971374175e-05, + "loss": 0.733, + "step": 7050 + }, + { + "epoch": 1.1, + "grad_norm": 15.673587667932145, + "learning_rate": 1.460911144840722e-05, + "loss": 0.7125, + "step": 7051 + }, + { + "epoch": 1.1, + "grad_norm": 18.09370370967656, + "learning_rate": 1.4607614794359836e-05, + "loss": 0.6866, + "step": 7052 + }, + { + "epoch": 1.1, + "grad_norm": 19.41064252760542, + "learning_rate": 1.4606118009274582e-05, + "loss": 0.6593, + "step": 7053 + }, + { + "epoch": 1.1, + "grad_norm": 24.503598446450553, + "learning_rate": 1.4604621093194024e-05, + "loss": 0.684, + "step": 7054 + }, + { + "epoch": 1.1, + "grad_norm": 16.500413183706783, + "learning_rate": 1.4603124046160737e-05, + "loss": 0.7107, + "step": 7055 + }, + { + "epoch": 1.1, + "grad_norm": 17.284920481997407, + "learning_rate": 1.4601626868217296e-05, + "loss": 0.6495, + "step": 7056 + }, + { + "epoch": 1.1, + "grad_norm": 22.54281996045134, + "learning_rate": 1.4600129559406278e-05, + "loss": 0.7752, + "step": 7057 + }, + { + "epoch": 1.1, + "grad_norm": 20.973435400765425, + "learning_rate": 1.4598632119770266e-05, + "loss": 0.7222, + "step": 7058 + }, + { + "epoch": 1.1, + "grad_norm": 26.655250829102528, + "learning_rate": 1.4597134549351845e-05, + "loss": 0.6997, + "step": 7059 + }, + { + "epoch": 1.1, + "grad_norm": 24.968223785852004, + "learning_rate": 1.459563684819361e-05, + "loss": 0.7215, + "step": 7060 + }, + { + "epoch": 1.1, + "grad_norm": 16.77558051936856, + "learning_rate": 1.4594139016338149e-05, + "loss": 0.6302, + "step": 7061 + }, + { + "epoch": 1.1, + "grad_norm": 20.65315032446035, + "learning_rate": 1.4592641053828063e-05, + "loss": 0.5942, + "step": 7062 + }, + { + "epoch": 1.1, + "grad_norm": 19.807305845723146, + "learning_rate": 1.4591142960705951e-05, + "loss": 0.7034, + "step": 7063 + }, + { + "epoch": 1.1, + "grad_norm": 18.018211518192775, + "learning_rate": 1.4589644737014418e-05, + "loss": 0.6423, + "step": 7064 + }, + { + "epoch": 1.1, + "grad_norm": 21.051010367016783, + "learning_rate": 1.4588146382796075e-05, + "loss": 0.6319, + "step": 7065 + }, + { + "epoch": 1.1, + "grad_norm": 20.951180637386592, + "learning_rate": 1.4586647898093534e-05, + "loss": 0.6474, + "step": 7066 + }, + { + "epoch": 1.1, + "grad_norm": 19.7070851335649, + "learning_rate": 1.4585149282949409e-05, + "loss": 0.6891, + "step": 7067 + }, + { + "epoch": 1.1, + "grad_norm": 16.384739329077902, + "learning_rate": 1.458365053740632e-05, + "loss": 0.6133, + "step": 7068 + }, + { + "epoch": 1.1, + "grad_norm": 17.041241613600196, + "learning_rate": 1.4582151661506894e-05, + "loss": 0.5978, + "step": 7069 + }, + { + "epoch": 1.1, + "grad_norm": 18.890526369782368, + "learning_rate": 1.458065265529375e-05, + "loss": 0.6614, + "step": 7070 + }, + { + "epoch": 1.1, + "grad_norm": 17.048859252621583, + "learning_rate": 1.4579153518809527e-05, + "loss": 0.7275, + "step": 7071 + }, + { + "epoch": 1.1, + "grad_norm": 13.468198100574117, + "learning_rate": 1.4577654252096856e-05, + "loss": 0.6838, + "step": 7072 + }, + { + "epoch": 1.1, + "grad_norm": 20.25671740439634, + "learning_rate": 1.4576154855198377e-05, + "loss": 0.6564, + "step": 7073 + }, + { + "epoch": 1.1, + "grad_norm": 17.427955231875117, + "learning_rate": 1.457465532815673e-05, + "loss": 0.7075, + "step": 7074 + }, + { + "epoch": 1.11, + "grad_norm": 23.81805510441936, + "learning_rate": 1.4573155671014563e-05, + "loss": 0.729, + "step": 7075 + }, + { + "epoch": 1.11, + "grad_norm": 20.555953099584958, + "learning_rate": 1.457165588381452e-05, + "loss": 0.7012, + "step": 7076 + }, + { + "epoch": 1.11, + "grad_norm": 15.117698708104072, + "learning_rate": 1.4570155966599264e-05, + "loss": 0.6668, + "step": 7077 + }, + { + "epoch": 1.11, + "grad_norm": 15.652709015139353, + "learning_rate": 1.456865591941144e-05, + "loss": 0.6553, + "step": 7078 + }, + { + "epoch": 1.11, + "grad_norm": 16.639431213934756, + "learning_rate": 1.4567155742293717e-05, + "loss": 0.6589, + "step": 7079 + }, + { + "epoch": 1.11, + "grad_norm": 28.755967781325303, + "learning_rate": 1.4565655435288756e-05, + "loss": 0.7819, + "step": 7080 + }, + { + "epoch": 1.11, + "grad_norm": 12.085008097855225, + "learning_rate": 1.4564154998439225e-05, + "loss": 0.656, + "step": 7081 + }, + { + "epoch": 1.11, + "grad_norm": 15.128384263745025, + "learning_rate": 1.4562654431787797e-05, + "loss": 0.6186, + "step": 7082 + }, + { + "epoch": 1.11, + "grad_norm": 18.361402765257665, + "learning_rate": 1.4561153735377147e-05, + "loss": 0.6589, + "step": 7083 + }, + { + "epoch": 1.11, + "grad_norm": 18.039083891368954, + "learning_rate": 1.455965290924995e-05, + "loss": 0.7627, + "step": 7084 + }, + { + "epoch": 1.11, + "grad_norm": 19.872707496763248, + "learning_rate": 1.4558151953448891e-05, + "loss": 0.7493, + "step": 7085 + }, + { + "epoch": 1.11, + "grad_norm": 20.48439826934913, + "learning_rate": 1.4556650868016661e-05, + "loss": 0.6464, + "step": 7086 + }, + { + "epoch": 1.11, + "grad_norm": 18.80123666528361, + "learning_rate": 1.4555149652995942e-05, + "loss": 0.6105, + "step": 7087 + }, + { + "epoch": 1.11, + "grad_norm": 20.48556201952367, + "learning_rate": 1.4553648308429433e-05, + "loss": 0.677, + "step": 7088 + }, + { + "epoch": 1.11, + "grad_norm": 19.585760968648692, + "learning_rate": 1.455214683435983e-05, + "loss": 0.6422, + "step": 7089 + }, + { + "epoch": 1.11, + "grad_norm": 15.374523158354936, + "learning_rate": 1.4550645230829837e-05, + "loss": 0.6592, + "step": 7090 + }, + { + "epoch": 1.11, + "grad_norm": 23.031335417745673, + "learning_rate": 1.4549143497882152e-05, + "loss": 0.6826, + "step": 7091 + }, + { + "epoch": 1.11, + "grad_norm": 19.136101080664933, + "learning_rate": 1.4547641635559488e-05, + "loss": 0.7095, + "step": 7092 + }, + { + "epoch": 1.11, + "grad_norm": 29.673752854120632, + "learning_rate": 1.4546139643904553e-05, + "loss": 0.7743, + "step": 7093 + }, + { + "epoch": 1.11, + "grad_norm": 28.004237428043275, + "learning_rate": 1.4544637522960072e-05, + "loss": 0.6392, + "step": 7094 + }, + { + "epoch": 1.11, + "grad_norm": 17.403888674916924, + "learning_rate": 1.4543135272768753e-05, + "loss": 0.7561, + "step": 7095 + }, + { + "epoch": 1.11, + "grad_norm": 22.2972724280328, + "learning_rate": 1.454163289337333e-05, + "loss": 0.7921, + "step": 7096 + }, + { + "epoch": 1.11, + "grad_norm": 14.257958892697793, + "learning_rate": 1.454013038481652e-05, + "loss": 0.6995, + "step": 7097 + }, + { + "epoch": 1.11, + "grad_norm": 14.417911662751203, + "learning_rate": 1.4538627747141058e-05, + "loss": 0.665, + "step": 7098 + }, + { + "epoch": 1.11, + "grad_norm": 25.90885183627213, + "learning_rate": 1.4537124980389678e-05, + "loss": 0.7621, + "step": 7099 + }, + { + "epoch": 1.11, + "grad_norm": 21.131756108475784, + "learning_rate": 1.4535622084605119e-05, + "loss": 0.6533, + "step": 7100 + }, + { + "epoch": 1.11, + "grad_norm": 28.440700924110484, + "learning_rate": 1.453411905983012e-05, + "loss": 0.6031, + "step": 7101 + }, + { + "epoch": 1.11, + "grad_norm": 15.446422980777664, + "learning_rate": 1.453261590610743e-05, + "loss": 0.6583, + "step": 7102 + }, + { + "epoch": 1.11, + "grad_norm": 17.393097887192482, + "learning_rate": 1.453111262347979e-05, + "loss": 0.6708, + "step": 7103 + }, + { + "epoch": 1.11, + "grad_norm": 25.235441142846927, + "learning_rate": 1.4529609211989962e-05, + "loss": 0.6485, + "step": 7104 + }, + { + "epoch": 1.11, + "grad_norm": 15.911867624274608, + "learning_rate": 1.4528105671680697e-05, + "loss": 0.6828, + "step": 7105 + }, + { + "epoch": 1.11, + "grad_norm": 25.534884267697496, + "learning_rate": 1.4526602002594755e-05, + "loss": 0.645, + "step": 7106 + }, + { + "epoch": 1.11, + "grad_norm": 18.25805458823257, + "learning_rate": 1.45250982047749e-05, + "loss": 0.6715, + "step": 7107 + }, + { + "epoch": 1.11, + "grad_norm": 19.48968898836983, + "learning_rate": 1.4523594278263899e-05, + "loss": 0.7343, + "step": 7108 + }, + { + "epoch": 1.11, + "grad_norm": 17.322576181327083, + "learning_rate": 1.4522090223104523e-05, + "loss": 0.6218, + "step": 7109 + }, + { + "epoch": 1.11, + "grad_norm": 11.337220602420146, + "learning_rate": 1.4520586039339545e-05, + "loss": 0.6086, + "step": 7110 + }, + { + "epoch": 1.11, + "grad_norm": 18.37134331126699, + "learning_rate": 1.4519081727011747e-05, + "loss": 0.7534, + "step": 7111 + }, + { + "epoch": 1.11, + "grad_norm": 14.238374376450317, + "learning_rate": 1.4517577286163906e-05, + "loss": 0.651, + "step": 7112 + }, + { + "epoch": 1.11, + "grad_norm": 32.46718639999983, + "learning_rate": 1.4516072716838815e-05, + "loss": 0.6485, + "step": 7113 + }, + { + "epoch": 1.11, + "grad_norm": 20.217345109327145, + "learning_rate": 1.4514568019079252e-05, + "loss": 0.8135, + "step": 7114 + }, + { + "epoch": 1.11, + "grad_norm": 17.922151751322875, + "learning_rate": 1.4513063192928016e-05, + "loss": 0.6534, + "step": 7115 + }, + { + "epoch": 1.11, + "grad_norm": 17.125813466611653, + "learning_rate": 1.4511558238427905e-05, + "loss": 0.6134, + "step": 7116 + }, + { + "epoch": 1.11, + "grad_norm": 21.48041140043487, + "learning_rate": 1.4510053155621714e-05, + "loss": 0.6565, + "step": 7117 + }, + { + "epoch": 1.11, + "grad_norm": 23.86290110105597, + "learning_rate": 1.4508547944552254e-05, + "loss": 0.6473, + "step": 7118 + }, + { + "epoch": 1.11, + "grad_norm": 24.96522334530517, + "learning_rate": 1.4507042605262324e-05, + "loss": 0.6947, + "step": 7119 + }, + { + "epoch": 1.11, + "grad_norm": 18.667638920649857, + "learning_rate": 1.450553713779474e-05, + "loss": 0.7727, + "step": 7120 + }, + { + "epoch": 1.11, + "grad_norm": 17.191781196154576, + "learning_rate": 1.4504031542192319e-05, + "loss": 0.6742, + "step": 7121 + }, + { + "epoch": 1.11, + "grad_norm": 14.86807658352979, + "learning_rate": 1.450252581849787e-05, + "loss": 0.6663, + "step": 7122 + }, + { + "epoch": 1.11, + "grad_norm": 22.07577298831755, + "learning_rate": 1.4501019966754225e-05, + "loss": 0.6357, + "step": 7123 + }, + { + "epoch": 1.11, + "grad_norm": 26.80208455819756, + "learning_rate": 1.4499513987004201e-05, + "loss": 0.7979, + "step": 7124 + }, + { + "epoch": 1.11, + "grad_norm": 30.814780257309796, + "learning_rate": 1.4498007879290635e-05, + "loss": 0.7274, + "step": 7125 + }, + { + "epoch": 1.11, + "grad_norm": 13.112242110632847, + "learning_rate": 1.4496501643656356e-05, + "loss": 0.6785, + "step": 7126 + }, + { + "epoch": 1.11, + "grad_norm": 18.779309510510934, + "learning_rate": 1.4494995280144198e-05, + "loss": 0.5376, + "step": 7127 + }, + { + "epoch": 1.11, + "grad_norm": 19.730295629502738, + "learning_rate": 1.4493488788797006e-05, + "loss": 0.6951, + "step": 7128 + }, + { + "epoch": 1.11, + "grad_norm": 13.67425831866003, + "learning_rate": 1.449198216965762e-05, + "loss": 0.679, + "step": 7129 + }, + { + "epoch": 1.11, + "grad_norm": 43.810620102977005, + "learning_rate": 1.449047542276889e-05, + "loss": 0.7325, + "step": 7130 + }, + { + "epoch": 1.11, + "grad_norm": 15.433666699274976, + "learning_rate": 1.4488968548173662e-05, + "loss": 0.6103, + "step": 7131 + }, + { + "epoch": 1.11, + "grad_norm": 16.24231067535535, + "learning_rate": 1.4487461545914801e-05, + "loss": 0.6429, + "step": 7132 + }, + { + "epoch": 1.11, + "grad_norm": 11.28116405538859, + "learning_rate": 1.4485954416035152e-05, + "loss": 0.6813, + "step": 7133 + }, + { + "epoch": 1.11, + "grad_norm": 18.072778834871823, + "learning_rate": 1.4484447158577588e-05, + "loss": 0.6368, + "step": 7134 + }, + { + "epoch": 1.11, + "grad_norm": 25.47481399146278, + "learning_rate": 1.448293977358497e-05, + "loss": 0.6792, + "step": 7135 + }, + { + "epoch": 1.11, + "grad_norm": 14.532519097950544, + "learning_rate": 1.4481432261100168e-05, + "loss": 0.6942, + "step": 7136 + }, + { + "epoch": 1.11, + "grad_norm": 18.272208164300157, + "learning_rate": 1.4479924621166051e-05, + "loss": 0.6786, + "step": 7137 + }, + { + "epoch": 1.11, + "grad_norm": 19.632029391016218, + "learning_rate": 1.4478416853825502e-05, + "loss": 0.6437, + "step": 7138 + }, + { + "epoch": 1.12, + "grad_norm": 18.045931425652277, + "learning_rate": 1.4476908959121394e-05, + "loss": 0.7058, + "step": 7139 + }, + { + "epoch": 1.12, + "grad_norm": 16.9929101364993, + "learning_rate": 1.4475400937096617e-05, + "loss": 0.628, + "step": 7140 + }, + { + "epoch": 1.12, + "grad_norm": 19.128778170156263, + "learning_rate": 1.4473892787794053e-05, + "loss": 0.7265, + "step": 7141 + }, + { + "epoch": 1.12, + "grad_norm": 21.684913205118107, + "learning_rate": 1.4472384511256597e-05, + "loss": 0.6601, + "step": 7142 + }, + { + "epoch": 1.12, + "grad_norm": 21.101327858799248, + "learning_rate": 1.4470876107527141e-05, + "loss": 0.7012, + "step": 7143 + }, + { + "epoch": 1.12, + "grad_norm": 13.549378449755283, + "learning_rate": 1.4469367576648582e-05, + "loss": 0.683, + "step": 7144 + }, + { + "epoch": 1.12, + "grad_norm": 16.643840399672822, + "learning_rate": 1.4467858918663826e-05, + "loss": 0.652, + "step": 7145 + }, + { + "epoch": 1.12, + "grad_norm": 19.099977081906715, + "learning_rate": 1.4466350133615776e-05, + "loss": 0.6943, + "step": 7146 + }, + { + "epoch": 1.12, + "grad_norm": 18.526825632591546, + "learning_rate": 1.4464841221547339e-05, + "loss": 0.6407, + "step": 7147 + }, + { + "epoch": 1.12, + "grad_norm": 14.440793436888754, + "learning_rate": 1.4463332182501431e-05, + "loss": 0.6954, + "step": 7148 + }, + { + "epoch": 1.12, + "grad_norm": 19.649252492030804, + "learning_rate": 1.4461823016520967e-05, + "loss": 0.7201, + "step": 7149 + }, + { + "epoch": 1.12, + "grad_norm": 15.52141929028158, + "learning_rate": 1.4460313723648866e-05, + "loss": 0.6271, + "step": 7150 + }, + { + "epoch": 1.12, + "grad_norm": 7.022794139236988, + "learning_rate": 1.4458804303928053e-05, + "loss": 0.6861, + "step": 7151 + }, + { + "epoch": 1.12, + "grad_norm": 16.31227642761135, + "learning_rate": 1.4457294757401452e-05, + "loss": 0.6074, + "step": 7152 + }, + { + "epoch": 1.12, + "grad_norm": 30.454482830572918, + "learning_rate": 1.4455785084111997e-05, + "loss": 0.6284, + "step": 7153 + }, + { + "epoch": 1.12, + "grad_norm": 17.638165825402233, + "learning_rate": 1.445427528410262e-05, + "loss": 0.688, + "step": 7154 + }, + { + "epoch": 1.12, + "grad_norm": 24.754279296529734, + "learning_rate": 1.4452765357416261e-05, + "loss": 0.6578, + "step": 7155 + }, + { + "epoch": 1.12, + "grad_norm": 30.252084088847063, + "learning_rate": 1.4451255304095857e-05, + "loss": 0.6368, + "step": 7156 + }, + { + "epoch": 1.12, + "grad_norm": 14.34131684838313, + "learning_rate": 1.444974512418436e-05, + "loss": 0.6447, + "step": 7157 + }, + { + "epoch": 1.12, + "grad_norm": 16.375665156126406, + "learning_rate": 1.444823481772471e-05, + "loss": 0.6622, + "step": 7158 + }, + { + "epoch": 1.12, + "grad_norm": 25.45230349496516, + "learning_rate": 1.4446724384759869e-05, + "loss": 0.6687, + "step": 7159 + }, + { + "epoch": 1.12, + "grad_norm": 21.27005710284545, + "learning_rate": 1.4445213825332784e-05, + "loss": 0.6446, + "step": 7160 + }, + { + "epoch": 1.12, + "grad_norm": 15.183637495789984, + "learning_rate": 1.444370313948642e-05, + "loss": 0.5617, + "step": 7161 + }, + { + "epoch": 1.12, + "grad_norm": 20.80311784112368, + "learning_rate": 1.4442192327263737e-05, + "loss": 0.5712, + "step": 7162 + }, + { + "epoch": 1.12, + "grad_norm": 20.468068875926573, + "learning_rate": 1.4440681388707704e-05, + "loss": 0.6664, + "step": 7163 + }, + { + "epoch": 1.12, + "grad_norm": 18.19284891834503, + "learning_rate": 1.4439170323861289e-05, + "loss": 0.7225, + "step": 7164 + }, + { + "epoch": 1.12, + "grad_norm": 27.27110475766207, + "learning_rate": 1.443765913276747e-05, + "loss": 0.7459, + "step": 7165 + }, + { + "epoch": 1.12, + "grad_norm": 23.593908107935167, + "learning_rate": 1.4436147815469217e-05, + "loss": 0.6545, + "step": 7166 + }, + { + "epoch": 1.12, + "grad_norm": 20.296511303974057, + "learning_rate": 1.4434636372009516e-05, + "loss": 0.7165, + "step": 7167 + }, + { + "epoch": 1.12, + "grad_norm": 15.056383111186385, + "learning_rate": 1.4433124802431353e-05, + "loss": 0.6576, + "step": 7168 + }, + { + "epoch": 1.12, + "grad_norm": 18.743213363914144, + "learning_rate": 1.4431613106777712e-05, + "loss": 0.5647, + "step": 7169 + }, + { + "epoch": 1.12, + "grad_norm": 19.870715013580075, + "learning_rate": 1.4430101285091587e-05, + "loss": 0.6001, + "step": 7170 + }, + { + "epoch": 1.12, + "grad_norm": 15.314093600279405, + "learning_rate": 1.4428589337415975e-05, + "loss": 0.6606, + "step": 7171 + }, + { + "epoch": 1.12, + "grad_norm": 17.44392831472725, + "learning_rate": 1.442707726379387e-05, + "loss": 0.6873, + "step": 7172 + }, + { + "epoch": 1.12, + "grad_norm": 15.4437503236935, + "learning_rate": 1.4425565064268276e-05, + "loss": 0.6569, + "step": 7173 + }, + { + "epoch": 1.12, + "grad_norm": 22.267867267873793, + "learning_rate": 1.4424052738882203e-05, + "loss": 0.6765, + "step": 7174 + }, + { + "epoch": 1.12, + "grad_norm": 15.534561724241765, + "learning_rate": 1.442254028767866e-05, + "loss": 0.6394, + "step": 7175 + }, + { + "epoch": 1.12, + "grad_norm": 24.281917991495554, + "learning_rate": 1.4421027710700655e-05, + "loss": 0.7031, + "step": 7176 + }, + { + "epoch": 1.12, + "grad_norm": 13.474470189530354, + "learning_rate": 1.441951500799121e-05, + "loss": 0.7319, + "step": 7177 + }, + { + "epoch": 1.12, + "grad_norm": 21.35123254428233, + "learning_rate": 1.4418002179593345e-05, + "loss": 0.64, + "step": 7178 + }, + { + "epoch": 1.12, + "grad_norm": 20.71189170849506, + "learning_rate": 1.441648922555008e-05, + "loss": 0.6774, + "step": 7179 + }, + { + "epoch": 1.12, + "grad_norm": 12.746596146425777, + "learning_rate": 1.4414976145904445e-05, + "loss": 0.5948, + "step": 7180 + }, + { + "epoch": 1.12, + "grad_norm": 19.093170712107323, + "learning_rate": 1.441346294069947e-05, + "loss": 0.6454, + "step": 7181 + }, + { + "epoch": 1.12, + "grad_norm": 19.990572226482872, + "learning_rate": 1.4411949609978196e-05, + "loss": 0.6853, + "step": 7182 + }, + { + "epoch": 1.12, + "grad_norm": 21.34061084299404, + "learning_rate": 1.441043615378365e-05, + "loss": 0.6357, + "step": 7183 + }, + { + "epoch": 1.12, + "grad_norm": 14.196777537404106, + "learning_rate": 1.4408922572158883e-05, + "loss": 0.5732, + "step": 7184 + }, + { + "epoch": 1.12, + "grad_norm": 15.584729596707398, + "learning_rate": 1.4407408865146935e-05, + "loss": 0.6176, + "step": 7185 + }, + { + "epoch": 1.12, + "grad_norm": 13.18308444134311, + "learning_rate": 1.4405895032790858e-05, + "loss": 0.6675, + "step": 7186 + }, + { + "epoch": 1.12, + "grad_norm": 15.587270234188065, + "learning_rate": 1.4404381075133706e-05, + "loss": 0.6121, + "step": 7187 + }, + { + "epoch": 1.12, + "grad_norm": 20.64533149878886, + "learning_rate": 1.4402866992218529e-05, + "loss": 0.6496, + "step": 7188 + }, + { + "epoch": 1.12, + "grad_norm": 30.97312875118572, + "learning_rate": 1.4401352784088394e-05, + "loss": 0.7026, + "step": 7189 + }, + { + "epoch": 1.12, + "grad_norm": 22.50748127962067, + "learning_rate": 1.4399838450786358e-05, + "loss": 0.6849, + "step": 7190 + }, + { + "epoch": 1.12, + "grad_norm": 16.264482633260354, + "learning_rate": 1.439832399235549e-05, + "loss": 0.7039, + "step": 7191 + }, + { + "epoch": 1.12, + "grad_norm": 15.890241899626659, + "learning_rate": 1.4396809408838862e-05, + "loss": 0.588, + "step": 7192 + }, + { + "epoch": 1.12, + "grad_norm": 24.062036142584752, + "learning_rate": 1.4395294700279545e-05, + "loss": 0.5902, + "step": 7193 + }, + { + "epoch": 1.12, + "grad_norm": 23.765563902378236, + "learning_rate": 1.439377986672062e-05, + "loss": 0.7383, + "step": 7194 + }, + { + "epoch": 1.12, + "grad_norm": 13.300892123349808, + "learning_rate": 1.4392264908205165e-05, + "loss": 0.7094, + "step": 7195 + }, + { + "epoch": 1.12, + "grad_norm": 15.3762345138238, + "learning_rate": 1.4390749824776264e-05, + "loss": 0.6302, + "step": 7196 + }, + { + "epoch": 1.12, + "grad_norm": 17.207671099661752, + "learning_rate": 1.438923461647701e-05, + "loss": 0.7919, + "step": 7197 + }, + { + "epoch": 1.12, + "grad_norm": 15.203450093611192, + "learning_rate": 1.4387719283350488e-05, + "loss": 0.663, + "step": 7198 + }, + { + "epoch": 1.12, + "grad_norm": 18.132287068378385, + "learning_rate": 1.43862038254398e-05, + "loss": 0.5977, + "step": 7199 + }, + { + "epoch": 1.12, + "grad_norm": 15.118131337038932, + "learning_rate": 1.4384688242788034e-05, + "loss": 0.727, + "step": 7200 + }, + { + "epoch": 1.12, + "grad_norm": 17.18136887261186, + "learning_rate": 1.4383172535438306e-05, + "loss": 0.6412, + "step": 7201 + }, + { + "epoch": 1.12, + "grad_norm": 21.190909346982185, + "learning_rate": 1.4381656703433713e-05, + "loss": 0.7353, + "step": 7202 + }, + { + "epoch": 1.13, + "grad_norm": 21.15093742302803, + "learning_rate": 1.4380140746817366e-05, + "loss": 0.6698, + "step": 7203 + }, + { + "epoch": 1.13, + "grad_norm": 18.195003159654558, + "learning_rate": 1.4378624665632379e-05, + "loss": 0.6264, + "step": 7204 + }, + { + "epoch": 1.13, + "grad_norm": 19.480302149440988, + "learning_rate": 1.4377108459921867e-05, + "loss": 0.6304, + "step": 7205 + }, + { + "epoch": 1.13, + "grad_norm": 16.97901000046173, + "learning_rate": 1.4375592129728952e-05, + "loss": 0.6901, + "step": 7206 + }, + { + "epoch": 1.13, + "grad_norm": 12.494333391847798, + "learning_rate": 1.4374075675096757e-05, + "loss": 0.5668, + "step": 7207 + }, + { + "epoch": 1.13, + "grad_norm": 20.394876210865057, + "learning_rate": 1.4372559096068406e-05, + "loss": 0.6654, + "step": 7208 + }, + { + "epoch": 1.13, + "grad_norm": 18.987401928186944, + "learning_rate": 1.4371042392687036e-05, + "loss": 0.7057, + "step": 7209 + }, + { + "epoch": 1.13, + "grad_norm": 24.61080116741353, + "learning_rate": 1.4369525564995774e-05, + "loss": 0.6659, + "step": 7210 + }, + { + "epoch": 1.13, + "grad_norm": 18.792474335419126, + "learning_rate": 1.4368008613037763e-05, + "loss": 0.6541, + "step": 7211 + }, + { + "epoch": 1.13, + "grad_norm": 16.998459234696274, + "learning_rate": 1.436649153685614e-05, + "loss": 0.6462, + "step": 7212 + }, + { + "epoch": 1.13, + "grad_norm": 27.13593411714945, + "learning_rate": 1.4364974336494055e-05, + "loss": 0.6775, + "step": 7213 + }, + { + "epoch": 1.13, + "grad_norm": 15.118755586011249, + "learning_rate": 1.436345701199465e-05, + "loss": 0.5968, + "step": 7214 + }, + { + "epoch": 1.13, + "grad_norm": 21.300974833288958, + "learning_rate": 1.4361939563401082e-05, + "loss": 0.6587, + "step": 7215 + }, + { + "epoch": 1.13, + "grad_norm": 22.01647242383209, + "learning_rate": 1.4360421990756506e-05, + "loss": 0.6577, + "step": 7216 + }, + { + "epoch": 1.13, + "grad_norm": 11.977488267357753, + "learning_rate": 1.4358904294104076e-05, + "loss": 0.6365, + "step": 7217 + }, + { + "epoch": 1.13, + "grad_norm": 16.596867110520396, + "learning_rate": 1.4357386473486961e-05, + "loss": 0.7022, + "step": 7218 + }, + { + "epoch": 1.13, + "grad_norm": 26.604504721962428, + "learning_rate": 1.435586852894832e-05, + "loss": 0.7153, + "step": 7219 + }, + { + "epoch": 1.13, + "grad_norm": 23.031523037796934, + "learning_rate": 1.435435046053133e-05, + "loss": 0.7289, + "step": 7220 + }, + { + "epoch": 1.13, + "grad_norm": 17.795522085783244, + "learning_rate": 1.4352832268279158e-05, + "loss": 0.7591, + "step": 7221 + }, + { + "epoch": 1.13, + "grad_norm": 19.248858315808214, + "learning_rate": 1.4351313952234985e-05, + "loss": 0.684, + "step": 7222 + }, + { + "epoch": 1.13, + "grad_norm": 14.92443869022873, + "learning_rate": 1.4349795512441984e-05, + "loss": 0.6102, + "step": 7223 + }, + { + "epoch": 1.13, + "grad_norm": 15.636637438034517, + "learning_rate": 1.434827694894335e-05, + "loss": 0.6419, + "step": 7224 + }, + { + "epoch": 1.13, + "grad_norm": 27.59543016584774, + "learning_rate": 1.4346758261782256e-05, + "loss": 0.7201, + "step": 7225 + }, + { + "epoch": 1.13, + "grad_norm": 15.881622650791597, + "learning_rate": 1.4345239451001905e-05, + "loss": 0.7057, + "step": 7226 + }, + { + "epoch": 1.13, + "grad_norm": 16.200292531914716, + "learning_rate": 1.4343720516645482e-05, + "loss": 0.7032, + "step": 7227 + }, + { + "epoch": 1.13, + "grad_norm": 20.024773365993283, + "learning_rate": 1.4342201458756193e-05, + "loss": 0.6261, + "step": 7228 + }, + { + "epoch": 1.13, + "grad_norm": 13.231735378911848, + "learning_rate": 1.434068227737723e-05, + "loss": 0.6029, + "step": 7229 + }, + { + "epoch": 1.13, + "grad_norm": 11.76573305409677, + "learning_rate": 1.4339162972551806e-05, + "loss": 0.5652, + "step": 7230 + }, + { + "epoch": 1.13, + "grad_norm": 17.957174920035747, + "learning_rate": 1.4337643544323124e-05, + "loss": 0.6785, + "step": 7231 + }, + { + "epoch": 1.13, + "grad_norm": 18.60017781909677, + "learning_rate": 1.4336123992734396e-05, + "loss": 0.6322, + "step": 7232 + }, + { + "epoch": 1.13, + "grad_norm": 28.529308365836304, + "learning_rate": 1.433460431782884e-05, + "loss": 0.7221, + "step": 7233 + }, + { + "epoch": 1.13, + "grad_norm": 19.9133651038106, + "learning_rate": 1.4333084519649671e-05, + "loss": 0.675, + "step": 7234 + }, + { + "epoch": 1.13, + "grad_norm": 13.231114544916153, + "learning_rate": 1.4331564598240112e-05, + "loss": 0.7332, + "step": 7235 + }, + { + "epoch": 1.13, + "grad_norm": 18.315115823194645, + "learning_rate": 1.4330044553643391e-05, + "loss": 0.7017, + "step": 7236 + }, + { + "epoch": 1.13, + "grad_norm": 32.258094438063104, + "learning_rate": 1.4328524385902738e-05, + "loss": 0.6993, + "step": 7237 + }, + { + "epoch": 1.13, + "grad_norm": 21.244949787649077, + "learning_rate": 1.432700409506138e-05, + "loss": 0.632, + "step": 7238 + }, + { + "epoch": 1.13, + "grad_norm": 19.06964998762685, + "learning_rate": 1.4325483681162562e-05, + "loss": 0.6341, + "step": 7239 + }, + { + "epoch": 1.13, + "grad_norm": 17.866102818706402, + "learning_rate": 1.4323963144249514e-05, + "loss": 0.6796, + "step": 7240 + }, + { + "epoch": 1.13, + "grad_norm": 15.021833442279746, + "learning_rate": 1.4322442484365486e-05, + "loss": 0.6807, + "step": 7241 + }, + { + "epoch": 1.13, + "grad_norm": 17.452541188727096, + "learning_rate": 1.432092170155372e-05, + "loss": 0.6833, + "step": 7242 + }, + { + "epoch": 1.13, + "grad_norm": 18.42122978030149, + "learning_rate": 1.4319400795857472e-05, + "loss": 0.6333, + "step": 7243 + }, + { + "epoch": 1.13, + "grad_norm": 21.716899455771948, + "learning_rate": 1.431787976731999e-05, + "loss": 0.6892, + "step": 7244 + }, + { + "epoch": 1.13, + "grad_norm": 19.069079270938037, + "learning_rate": 1.4316358615984532e-05, + "loss": 0.6718, + "step": 7245 + }, + { + "epoch": 1.13, + "grad_norm": 13.104257318416137, + "learning_rate": 1.4314837341894361e-05, + "loss": 0.7103, + "step": 7246 + }, + { + "epoch": 1.13, + "grad_norm": 16.881999526249366, + "learning_rate": 1.431331594509274e-05, + "loss": 0.6932, + "step": 7247 + }, + { + "epoch": 1.13, + "grad_norm": 16.72241974066135, + "learning_rate": 1.4311794425622937e-05, + "loss": 0.6597, + "step": 7248 + }, + { + "epoch": 1.13, + "grad_norm": 13.534884557157698, + "learning_rate": 1.4310272783528223e-05, + "loss": 0.6009, + "step": 7249 + }, + { + "epoch": 1.13, + "grad_norm": 25.739369021894525, + "learning_rate": 1.4308751018851872e-05, + "loss": 0.7261, + "step": 7250 + }, + { + "epoch": 1.13, + "grad_norm": 13.952255815194379, + "learning_rate": 1.4307229131637163e-05, + "loss": 0.5514, + "step": 7251 + }, + { + "epoch": 1.13, + "grad_norm": 26.721540954729356, + "learning_rate": 1.4305707121927377e-05, + "loss": 0.7217, + "step": 7252 + }, + { + "epoch": 1.13, + "grad_norm": 14.57838382215206, + "learning_rate": 1.43041849897658e-05, + "loss": 0.6106, + "step": 7253 + }, + { + "epoch": 1.13, + "grad_norm": 25.813005878857766, + "learning_rate": 1.4302662735195717e-05, + "loss": 0.7072, + "step": 7254 + }, + { + "epoch": 1.13, + "grad_norm": 19.81141440722146, + "learning_rate": 1.4301140358260426e-05, + "loss": 0.5864, + "step": 7255 + }, + { + "epoch": 1.13, + "grad_norm": 23.280836446020455, + "learning_rate": 1.4299617859003218e-05, + "loss": 0.6065, + "step": 7256 + }, + { + "epoch": 1.13, + "grad_norm": 29.162200611807993, + "learning_rate": 1.4298095237467394e-05, + "loss": 0.7162, + "step": 7257 + }, + { + "epoch": 1.13, + "grad_norm": 23.580302761666395, + "learning_rate": 1.4296572493696255e-05, + "loss": 0.7014, + "step": 7258 + }, + { + "epoch": 1.13, + "grad_norm": 18.698055707703222, + "learning_rate": 1.4295049627733107e-05, + "loss": 0.7062, + "step": 7259 + }, + { + "epoch": 1.13, + "grad_norm": 22.704559497119337, + "learning_rate": 1.4293526639621262e-05, + "loss": 0.7014, + "step": 7260 + }, + { + "epoch": 1.13, + "grad_norm": 14.237680704039285, + "learning_rate": 1.4292003529404028e-05, + "loss": 0.6506, + "step": 7261 + }, + { + "epoch": 1.13, + "grad_norm": 20.000050231751302, + "learning_rate": 1.4290480297124726e-05, + "loss": 0.7142, + "step": 7262 + }, + { + "epoch": 1.13, + "grad_norm": 15.553125203381537, + "learning_rate": 1.4288956942826674e-05, + "loss": 0.6221, + "step": 7263 + }, + { + "epoch": 1.13, + "grad_norm": 15.705421841070356, + "learning_rate": 1.4287433466553198e-05, + "loss": 0.6185, + "step": 7264 + }, + { + "epoch": 1.13, + "grad_norm": 16.954092152866856, + "learning_rate": 1.4285909868347621e-05, + "loss": 0.6653, + "step": 7265 + }, + { + "epoch": 1.13, + "grad_norm": 25.50243836086091, + "learning_rate": 1.4284386148253273e-05, + "loss": 0.6396, + "step": 7266 + }, + { + "epoch": 1.14, + "grad_norm": 19.404806212376876, + "learning_rate": 1.4282862306313488e-05, + "loss": 0.6636, + "step": 7267 + }, + { + "epoch": 1.14, + "grad_norm": 26.71300850298545, + "learning_rate": 1.4281338342571609e-05, + "loss": 0.7065, + "step": 7268 + }, + { + "epoch": 1.14, + "grad_norm": 25.39621370050001, + "learning_rate": 1.4279814257070967e-05, + "loss": 0.6672, + "step": 7269 + }, + { + "epoch": 1.14, + "grad_norm": 26.62580982619853, + "learning_rate": 1.4278290049854917e-05, + "loss": 0.7053, + "step": 7270 + }, + { + "epoch": 1.14, + "grad_norm": 18.372850870615956, + "learning_rate": 1.4276765720966797e-05, + "loss": 0.7387, + "step": 7271 + }, + { + "epoch": 1.14, + "grad_norm": 26.858369018973544, + "learning_rate": 1.4275241270449962e-05, + "loss": 0.6253, + "step": 7272 + }, + { + "epoch": 1.14, + "grad_norm": 16.310326414065248, + "learning_rate": 1.4273716698347766e-05, + "loss": 0.6862, + "step": 7273 + }, + { + "epoch": 1.14, + "grad_norm": 14.1063625762404, + "learning_rate": 1.4272192004703569e-05, + "loss": 0.5218, + "step": 7274 + }, + { + "epoch": 1.14, + "grad_norm": 13.870113432159268, + "learning_rate": 1.4270667189560727e-05, + "loss": 0.5224, + "step": 7275 + }, + { + "epoch": 1.14, + "grad_norm": 19.723494026688144, + "learning_rate": 1.426914225296261e-05, + "loss": 0.6658, + "step": 7276 + }, + { + "epoch": 1.14, + "grad_norm": 17.30637815410105, + "learning_rate": 1.4267617194952588e-05, + "loss": 0.6535, + "step": 7277 + }, + { + "epoch": 1.14, + "grad_norm": 24.63036808573288, + "learning_rate": 1.4266092015574027e-05, + "loss": 0.707, + "step": 7278 + }, + { + "epoch": 1.14, + "grad_norm": 17.63144918739665, + "learning_rate": 1.4264566714870301e-05, + "loss": 0.6415, + "step": 7279 + }, + { + "epoch": 1.14, + "grad_norm": 27.568212890235827, + "learning_rate": 1.4263041292884795e-05, + "loss": 0.643, + "step": 7280 + }, + { + "epoch": 1.14, + "grad_norm": 27.427697458550004, + "learning_rate": 1.426151574966089e-05, + "loss": 0.7036, + "step": 7281 + }, + { + "epoch": 1.14, + "grad_norm": 23.635511133263815, + "learning_rate": 1.425999008524197e-05, + "loss": 0.6852, + "step": 7282 + }, + { + "epoch": 1.14, + "grad_norm": 14.44841963596834, + "learning_rate": 1.4258464299671425e-05, + "loss": 0.6534, + "step": 7283 + }, + { + "epoch": 1.14, + "grad_norm": 23.55068954080406, + "learning_rate": 1.4256938392992641e-05, + "loss": 0.6783, + "step": 7284 + }, + { + "epoch": 1.14, + "grad_norm": 22.273292658109778, + "learning_rate": 1.4255412365249027e-05, + "loss": 0.6484, + "step": 7285 + }, + { + "epoch": 1.14, + "grad_norm": 21.905888167398263, + "learning_rate": 1.4253886216483968e-05, + "loss": 0.7714, + "step": 7286 + }, + { + "epoch": 1.14, + "grad_norm": 41.88415731093706, + "learning_rate": 1.4252359946740877e-05, + "loss": 0.7661, + "step": 7287 + }, + { + "epoch": 1.14, + "grad_norm": 17.98892526540278, + "learning_rate": 1.4250833556063155e-05, + "loss": 0.6841, + "step": 7288 + }, + { + "epoch": 1.14, + "grad_norm": 17.74233157436609, + "learning_rate": 1.4249307044494214e-05, + "loss": 0.7232, + "step": 7289 + }, + { + "epoch": 1.14, + "grad_norm": 28.876401321252764, + "learning_rate": 1.4247780412077466e-05, + "loss": 0.8119, + "step": 7290 + }, + { + "epoch": 1.14, + "grad_norm": 19.898143076431495, + "learning_rate": 1.4246253658856329e-05, + "loss": 0.669, + "step": 7291 + }, + { + "epoch": 1.14, + "grad_norm": 19.584322122119293, + "learning_rate": 1.4244726784874221e-05, + "loss": 0.609, + "step": 7292 + }, + { + "epoch": 1.14, + "grad_norm": 17.693950548453664, + "learning_rate": 1.4243199790174566e-05, + "loss": 0.742, + "step": 7293 + }, + { + "epoch": 1.14, + "grad_norm": 17.46314194949273, + "learning_rate": 1.4241672674800791e-05, + "loss": 0.6365, + "step": 7294 + }, + { + "epoch": 1.14, + "grad_norm": 19.011452578360043, + "learning_rate": 1.4240145438796329e-05, + "loss": 0.8068, + "step": 7295 + }, + { + "epoch": 1.14, + "grad_norm": 15.842204373113004, + "learning_rate": 1.4238618082204609e-05, + "loss": 0.7043, + "step": 7296 + }, + { + "epoch": 1.14, + "grad_norm": 20.18058377223884, + "learning_rate": 1.4237090605069072e-05, + "loss": 0.6929, + "step": 7297 + }, + { + "epoch": 1.14, + "grad_norm": 13.439286518547199, + "learning_rate": 1.4235563007433153e-05, + "loss": 0.6226, + "step": 7298 + }, + { + "epoch": 1.14, + "grad_norm": 19.97953463894396, + "learning_rate": 1.4234035289340304e-05, + "loss": 0.5827, + "step": 7299 + }, + { + "epoch": 1.14, + "grad_norm": 25.155805540601076, + "learning_rate": 1.4232507450833966e-05, + "loss": 0.6884, + "step": 7300 + }, + { + "epoch": 1.14, + "grad_norm": 15.895742233843588, + "learning_rate": 1.4230979491957592e-05, + "loss": 0.7232, + "step": 7301 + }, + { + "epoch": 1.14, + "grad_norm": 22.15858242606363, + "learning_rate": 1.4229451412754638e-05, + "loss": 0.6397, + "step": 7302 + }, + { + "epoch": 1.14, + "grad_norm": 24.3522504691592, + "learning_rate": 1.4227923213268557e-05, + "loss": 0.7252, + "step": 7303 + }, + { + "epoch": 1.14, + "grad_norm": 40.257306912484495, + "learning_rate": 1.4226394893542817e-05, + "loss": 0.7522, + "step": 7304 + }, + { + "epoch": 1.14, + "grad_norm": 19.102134458095758, + "learning_rate": 1.4224866453620875e-05, + "loss": 0.6615, + "step": 7305 + }, + { + "epoch": 1.14, + "grad_norm": 18.660909969686745, + "learning_rate": 1.4223337893546206e-05, + "loss": 0.7609, + "step": 7306 + }, + { + "epoch": 1.14, + "grad_norm": 21.064511993637204, + "learning_rate": 1.4221809213362275e-05, + "loss": 0.7111, + "step": 7307 + }, + { + "epoch": 1.14, + "grad_norm": 17.592858773860158, + "learning_rate": 1.422028041311256e-05, + "loss": 0.6496, + "step": 7308 + }, + { + "epoch": 1.14, + "grad_norm": 17.298560536797847, + "learning_rate": 1.421875149284054e-05, + "loss": 0.7658, + "step": 7309 + }, + { + "epoch": 1.14, + "grad_norm": 17.27792632629873, + "learning_rate": 1.4217222452589694e-05, + "loss": 0.6584, + "step": 7310 + }, + { + "epoch": 1.14, + "grad_norm": 29.62450832952857, + "learning_rate": 1.4215693292403508e-05, + "loss": 0.6047, + "step": 7311 + }, + { + "epoch": 1.14, + "grad_norm": 18.945433246974968, + "learning_rate": 1.4214164012325475e-05, + "loss": 0.6657, + "step": 7312 + }, + { + "epoch": 1.14, + "grad_norm": 17.511089778686312, + "learning_rate": 1.421263461239908e-05, + "loss": 0.6407, + "step": 7313 + }, + { + "epoch": 1.14, + "grad_norm": 15.651154781658498, + "learning_rate": 1.421110509266782e-05, + "loss": 0.5592, + "step": 7314 + }, + { + "epoch": 1.14, + "grad_norm": 14.131092057490608, + "learning_rate": 1.4209575453175195e-05, + "loss": 0.5954, + "step": 7315 + }, + { + "epoch": 1.14, + "grad_norm": 16.539222297112726, + "learning_rate": 1.4208045693964707e-05, + "loss": 0.5758, + "step": 7316 + }, + { + "epoch": 1.14, + "grad_norm": 21.98470227391721, + "learning_rate": 1.4206515815079862e-05, + "loss": 0.7989, + "step": 7317 + }, + { + "epoch": 1.14, + "grad_norm": 21.938613006826344, + "learning_rate": 1.4204985816564167e-05, + "loss": 0.7022, + "step": 7318 + }, + { + "epoch": 1.14, + "grad_norm": 16.651870943641747, + "learning_rate": 1.4203455698461135e-05, + "loss": 0.6421, + "step": 7319 + }, + { + "epoch": 1.14, + "grad_norm": 27.187722911140124, + "learning_rate": 1.4201925460814282e-05, + "loss": 0.7116, + "step": 7320 + }, + { + "epoch": 1.14, + "grad_norm": 24.278937345022634, + "learning_rate": 1.4200395103667126e-05, + "loss": 0.6897, + "step": 7321 + }, + { + "epoch": 1.14, + "grad_norm": 19.002033966297134, + "learning_rate": 1.4198864627063194e-05, + "loss": 0.7879, + "step": 7322 + }, + { + "epoch": 1.14, + "grad_norm": 11.94732913452051, + "learning_rate": 1.4197334031046004e-05, + "loss": 0.5438, + "step": 7323 + }, + { + "epoch": 1.14, + "grad_norm": 17.17885160212382, + "learning_rate": 1.4195803315659092e-05, + "loss": 0.6195, + "step": 7324 + }, + { + "epoch": 1.14, + "grad_norm": 24.56809037171448, + "learning_rate": 1.4194272480945987e-05, + "loss": 0.6291, + "step": 7325 + }, + { + "epoch": 1.14, + "grad_norm": 18.22303539589026, + "learning_rate": 1.4192741526950226e-05, + "loss": 0.6927, + "step": 7326 + }, + { + "epoch": 1.14, + "grad_norm": 18.75937794270382, + "learning_rate": 1.4191210453715351e-05, + "loss": 0.6513, + "step": 7327 + }, + { + "epoch": 1.14, + "grad_norm": 20.299243211425, + "learning_rate": 1.4189679261284899e-05, + "loss": 0.699, + "step": 7328 + }, + { + "epoch": 1.14, + "grad_norm": 16.46224169867028, + "learning_rate": 1.4188147949702425e-05, + "loss": 0.6191, + "step": 7329 + }, + { + "epoch": 1.14, + "grad_norm": 19.030742795884645, + "learning_rate": 1.4186616519011467e-05, + "loss": 0.7157, + "step": 7330 + }, + { + "epoch": 1.15, + "grad_norm": 17.007342884044387, + "learning_rate": 1.4185084969255589e-05, + "loss": 0.6542, + "step": 7331 + }, + { + "epoch": 1.15, + "grad_norm": 17.038201794405612, + "learning_rate": 1.4183553300478339e-05, + "loss": 0.643, + "step": 7332 + }, + { + "epoch": 1.15, + "grad_norm": 14.23321810154991, + "learning_rate": 1.4182021512723283e-05, + "loss": 0.6499, + "step": 7333 + }, + { + "epoch": 1.15, + "grad_norm": 18.29804193438696, + "learning_rate": 1.418048960603398e-05, + "loss": 0.7239, + "step": 7334 + }, + { + "epoch": 1.15, + "grad_norm": 18.06691928320673, + "learning_rate": 1.4178957580454e-05, + "loss": 0.7259, + "step": 7335 + }, + { + "epoch": 1.15, + "grad_norm": 19.185520240060747, + "learning_rate": 1.417742543602691e-05, + "loss": 0.6354, + "step": 7336 + }, + { + "epoch": 1.15, + "grad_norm": 18.189090704259083, + "learning_rate": 1.4175893172796285e-05, + "loss": 0.5948, + "step": 7337 + }, + { + "epoch": 1.15, + "grad_norm": 12.50046685807299, + "learning_rate": 1.4174360790805699e-05, + "loss": 0.7353, + "step": 7338 + }, + { + "epoch": 1.15, + "grad_norm": 18.518134735725692, + "learning_rate": 1.417282829009874e-05, + "loss": 0.5918, + "step": 7339 + }, + { + "epoch": 1.15, + "grad_norm": 17.761593482433817, + "learning_rate": 1.4171295670718976e-05, + "loss": 0.6593, + "step": 7340 + }, + { + "epoch": 1.15, + "grad_norm": 14.163263791284107, + "learning_rate": 1.4169762932710012e-05, + "loss": 0.6502, + "step": 7341 + }, + { + "epoch": 1.15, + "grad_norm": 14.779312368198045, + "learning_rate": 1.4168230076115424e-05, + "loss": 0.5405, + "step": 7342 + }, + { + "epoch": 1.15, + "grad_norm": 16.719819136330365, + "learning_rate": 1.4166697100978812e-05, + "loss": 0.6644, + "step": 7343 + }, + { + "epoch": 1.15, + "grad_norm": 18.919184132740725, + "learning_rate": 1.4165164007343772e-05, + "loss": 0.634, + "step": 7344 + }, + { + "epoch": 1.15, + "grad_norm": 16.712967475570874, + "learning_rate": 1.4163630795253904e-05, + "loss": 0.688, + "step": 7345 + }, + { + "epoch": 1.15, + "grad_norm": 16.3363748803825, + "learning_rate": 1.4162097464752815e-05, + "loss": 0.6943, + "step": 7346 + }, + { + "epoch": 1.15, + "grad_norm": 13.560537049527449, + "learning_rate": 1.4160564015884103e-05, + "loss": 0.5626, + "step": 7347 + }, + { + "epoch": 1.15, + "grad_norm": 15.223194824677954, + "learning_rate": 1.415903044869139e-05, + "loss": 0.6359, + "step": 7348 + }, + { + "epoch": 1.15, + "grad_norm": 11.69957061072878, + "learning_rate": 1.4157496763218278e-05, + "loss": 0.6065, + "step": 7349 + }, + { + "epoch": 1.15, + "grad_norm": 20.588844511724094, + "learning_rate": 1.4155962959508396e-05, + "loss": 0.7005, + "step": 7350 + }, + { + "epoch": 1.15, + "grad_norm": 20.55260639971917, + "learning_rate": 1.4154429037605358e-05, + "loss": 0.7367, + "step": 7351 + }, + { + "epoch": 1.15, + "grad_norm": 15.867184974711531, + "learning_rate": 1.4152894997552786e-05, + "loss": 0.636, + "step": 7352 + }, + { + "epoch": 1.15, + "grad_norm": 21.56582770342848, + "learning_rate": 1.415136083939431e-05, + "loss": 0.7422, + "step": 7353 + }, + { + "epoch": 1.15, + "grad_norm": 18.27299115539783, + "learning_rate": 1.414982656317356e-05, + "loss": 0.7147, + "step": 7354 + }, + { + "epoch": 1.15, + "grad_norm": 16.87205477638366, + "learning_rate": 1.414829216893417e-05, + "loss": 0.6581, + "step": 7355 + }, + { + "epoch": 1.15, + "grad_norm": 25.777476602169482, + "learning_rate": 1.4146757656719781e-05, + "loss": 0.622, + "step": 7356 + }, + { + "epoch": 1.15, + "grad_norm": 20.911269385805255, + "learning_rate": 1.4145223026574027e-05, + "loss": 0.6752, + "step": 7357 + }, + { + "epoch": 1.15, + "grad_norm": 26.591560678767447, + "learning_rate": 1.4143688278540554e-05, + "loss": 0.6516, + "step": 7358 + }, + { + "epoch": 1.15, + "grad_norm": 27.284983232587212, + "learning_rate": 1.4142153412663012e-05, + "loss": 0.7673, + "step": 7359 + }, + { + "epoch": 1.15, + "grad_norm": 30.947000097337614, + "learning_rate": 1.414061842898505e-05, + "loss": 0.6534, + "step": 7360 + }, + { + "epoch": 1.15, + "grad_norm": 16.162060374390233, + "learning_rate": 1.413908332755032e-05, + "loss": 0.7408, + "step": 7361 + }, + { + "epoch": 1.15, + "grad_norm": 18.071875773411218, + "learning_rate": 1.4137548108402483e-05, + "loss": 0.607, + "step": 7362 + }, + { + "epoch": 1.15, + "grad_norm": 16.91990479229881, + "learning_rate": 1.41360127715852e-05, + "loss": 0.6592, + "step": 7363 + }, + { + "epoch": 1.15, + "grad_norm": 13.913176749864778, + "learning_rate": 1.4134477317142133e-05, + "loss": 0.7209, + "step": 7364 + }, + { + "epoch": 1.15, + "grad_norm": 22.581044234735447, + "learning_rate": 1.4132941745116946e-05, + "loss": 0.6888, + "step": 7365 + }, + { + "epoch": 1.15, + "grad_norm": 22.878128883946122, + "learning_rate": 1.4131406055553316e-05, + "loss": 0.7104, + "step": 7366 + }, + { + "epoch": 1.15, + "grad_norm": 15.10784189075606, + "learning_rate": 1.4129870248494913e-05, + "loss": 0.6707, + "step": 7367 + }, + { + "epoch": 1.15, + "grad_norm": 24.9504778036491, + "learning_rate": 1.4128334323985418e-05, + "loss": 0.7455, + "step": 7368 + }, + { + "epoch": 1.15, + "grad_norm": 12.608233045903027, + "learning_rate": 1.412679828206851e-05, + "loss": 0.6429, + "step": 7369 + }, + { + "epoch": 1.15, + "grad_norm": 34.300653763023575, + "learning_rate": 1.4125262122787872e-05, + "loss": 0.621, + "step": 7370 + }, + { + "epoch": 1.15, + "grad_norm": 19.980337563510073, + "learning_rate": 1.4123725846187193e-05, + "loss": 0.6928, + "step": 7371 + }, + { + "epoch": 1.15, + "grad_norm": 13.370839955994054, + "learning_rate": 1.412218945231016e-05, + "loss": 0.6911, + "step": 7372 + }, + { + "epoch": 1.15, + "grad_norm": 17.750375623657416, + "learning_rate": 1.4120652941200477e-05, + "loss": 0.6599, + "step": 7373 + }, + { + "epoch": 1.15, + "grad_norm": 17.144720114282386, + "learning_rate": 1.4119116312901828e-05, + "loss": 0.6486, + "step": 7374 + }, + { + "epoch": 1.15, + "grad_norm": 15.228050529146017, + "learning_rate": 1.4117579567457927e-05, + "loss": 0.5787, + "step": 7375 + }, + { + "epoch": 1.15, + "grad_norm": 16.31204141916774, + "learning_rate": 1.4116042704912465e-05, + "loss": 0.7116, + "step": 7376 + }, + { + "epoch": 1.15, + "grad_norm": 19.973238864326664, + "learning_rate": 1.411450572530916e-05, + "loss": 0.686, + "step": 7377 + }, + { + "epoch": 1.15, + "grad_norm": 13.533872217040669, + "learning_rate": 1.411296862869172e-05, + "loss": 0.6718, + "step": 7378 + }, + { + "epoch": 1.15, + "grad_norm": 24.361131588851002, + "learning_rate": 1.4111431415103858e-05, + "loss": 0.716, + "step": 7379 + }, + { + "epoch": 1.15, + "grad_norm": 21.509238595237658, + "learning_rate": 1.4109894084589291e-05, + "loss": 0.6428, + "step": 7380 + }, + { + "epoch": 1.15, + "grad_norm": 15.586743325361361, + "learning_rate": 1.410835663719174e-05, + "loss": 0.6645, + "step": 7381 + }, + { + "epoch": 1.15, + "grad_norm": 21.186221632749692, + "learning_rate": 1.410681907295493e-05, + "loss": 0.6604, + "step": 7382 + }, + { + "epoch": 1.15, + "grad_norm": 15.939096133396044, + "learning_rate": 1.410528139192259e-05, + "loss": 0.7495, + "step": 7383 + }, + { + "epoch": 1.15, + "grad_norm": 22.933165789733653, + "learning_rate": 1.4103743594138443e-05, + "loss": 0.6805, + "step": 7384 + }, + { + "epoch": 1.15, + "grad_norm": 20.45622464524285, + "learning_rate": 1.4102205679646236e-05, + "loss": 0.7013, + "step": 7385 + }, + { + "epoch": 1.15, + "grad_norm": 15.055066577056435, + "learning_rate": 1.4100667648489692e-05, + "loss": 0.6227, + "step": 7386 + }, + { + "epoch": 1.15, + "grad_norm": 17.882454329212287, + "learning_rate": 1.4099129500712562e-05, + "loss": 0.5995, + "step": 7387 + }, + { + "epoch": 1.15, + "grad_norm": 24.79465860119624, + "learning_rate": 1.4097591236358588e-05, + "loss": 0.7247, + "step": 7388 + }, + { + "epoch": 1.15, + "grad_norm": 15.272643075951262, + "learning_rate": 1.4096052855471519e-05, + "loss": 0.6833, + "step": 7389 + }, + { + "epoch": 1.15, + "grad_norm": 17.842453650218186, + "learning_rate": 1.4094514358095096e-05, + "loss": 0.6822, + "step": 7390 + }, + { + "epoch": 1.15, + "grad_norm": 17.675001128036836, + "learning_rate": 1.4092975744273082e-05, + "loss": 0.5861, + "step": 7391 + }, + { + "epoch": 1.15, + "grad_norm": 20.759574079585438, + "learning_rate": 1.4091437014049234e-05, + "loss": 0.6755, + "step": 7392 + }, + { + "epoch": 1.15, + "grad_norm": 14.376489050078144, + "learning_rate": 1.408989816746731e-05, + "loss": 0.7123, + "step": 7393 + }, + { + "epoch": 1.15, + "grad_norm": 24.247837955897612, + "learning_rate": 1.4088359204571076e-05, + "loss": 0.7154, + "step": 7394 + }, + { + "epoch": 1.16, + "grad_norm": 15.001454248068562, + "learning_rate": 1.4086820125404299e-05, + "loss": 0.6979, + "step": 7395 + }, + { + "epoch": 1.16, + "grad_norm": 23.785621788868074, + "learning_rate": 1.4085280930010745e-05, + "loss": 0.6524, + "step": 7396 + }, + { + "epoch": 1.16, + "grad_norm": 15.620393958684955, + "learning_rate": 1.4083741618434192e-05, + "loss": 0.6626, + "step": 7397 + }, + { + "epoch": 1.16, + "grad_norm": 24.826052385045198, + "learning_rate": 1.4082202190718417e-05, + "loss": 0.6586, + "step": 7398 + }, + { + "epoch": 1.16, + "grad_norm": 22.273155315649465, + "learning_rate": 1.40806626469072e-05, + "loss": 0.6882, + "step": 7399 + }, + { + "epoch": 1.16, + "grad_norm": 19.282204565720075, + "learning_rate": 1.4079122987044324e-05, + "loss": 0.6899, + "step": 7400 + }, + { + "epoch": 1.16, + "grad_norm": 15.549876274179365, + "learning_rate": 1.4077583211173575e-05, + "loss": 0.717, + "step": 7401 + }, + { + "epoch": 1.16, + "grad_norm": 18.33697810834557, + "learning_rate": 1.4076043319338748e-05, + "loss": 0.5725, + "step": 7402 + }, + { + "epoch": 1.16, + "grad_norm": 16.315063533646104, + "learning_rate": 1.407450331158363e-05, + "loss": 0.7549, + "step": 7403 + }, + { + "epoch": 1.16, + "grad_norm": 14.744429739733686, + "learning_rate": 1.4072963187952023e-05, + "loss": 0.5895, + "step": 7404 + }, + { + "epoch": 1.16, + "grad_norm": 14.463991743229743, + "learning_rate": 1.4071422948487725e-05, + "loss": 0.6726, + "step": 7405 + }, + { + "epoch": 1.16, + "grad_norm": 15.788281130249198, + "learning_rate": 1.406988259323454e-05, + "loss": 0.6566, + "step": 7406 + }, + { + "epoch": 1.16, + "grad_norm": 23.20010528951628, + "learning_rate": 1.4068342122236275e-05, + "loss": 0.7284, + "step": 7407 + }, + { + "epoch": 1.16, + "grad_norm": 28.13856353601223, + "learning_rate": 1.406680153553674e-05, + "loss": 0.7362, + "step": 7408 + }, + { + "epoch": 1.16, + "grad_norm": 13.419744186157724, + "learning_rate": 1.4065260833179748e-05, + "loss": 0.6689, + "step": 7409 + }, + { + "epoch": 1.16, + "grad_norm": 22.797216267413933, + "learning_rate": 1.4063720015209117e-05, + "loss": 0.6516, + "step": 7410 + }, + { + "epoch": 1.16, + "grad_norm": 20.152084730649868, + "learning_rate": 1.4062179081668665e-05, + "loss": 0.6049, + "step": 7411 + }, + { + "epoch": 1.16, + "grad_norm": 18.964435438638063, + "learning_rate": 1.4060638032602218e-05, + "loss": 0.6738, + "step": 7412 + }, + { + "epoch": 1.16, + "grad_norm": 19.80899551607031, + "learning_rate": 1.4059096868053601e-05, + "loss": 0.6962, + "step": 7413 + }, + { + "epoch": 1.16, + "grad_norm": 18.636531243613337, + "learning_rate": 1.405755558806664e-05, + "loss": 0.7195, + "step": 7414 + }, + { + "epoch": 1.16, + "grad_norm": 17.075681713180828, + "learning_rate": 1.4056014192685175e-05, + "loss": 0.6948, + "step": 7415 + }, + { + "epoch": 1.16, + "grad_norm": 16.800611271161323, + "learning_rate": 1.4054472681953035e-05, + "loss": 0.6461, + "step": 7416 + }, + { + "epoch": 1.16, + "grad_norm": 18.98961262095591, + "learning_rate": 1.405293105591407e-05, + "loss": 0.6318, + "step": 7417 + }, + { + "epoch": 1.16, + "grad_norm": 20.766827578629822, + "learning_rate": 1.4051389314612112e-05, + "loss": 0.6247, + "step": 7418 + }, + { + "epoch": 1.16, + "grad_norm": 16.22760036980565, + "learning_rate": 1.4049847458091014e-05, + "loss": 0.6329, + "step": 7419 + }, + { + "epoch": 1.16, + "grad_norm": 15.506367963971051, + "learning_rate": 1.404830548639462e-05, + "loss": 0.6172, + "step": 7420 + }, + { + "epoch": 1.16, + "grad_norm": 19.971184970853464, + "learning_rate": 1.404676339956679e-05, + "loss": 0.686, + "step": 7421 + }, + { + "epoch": 1.16, + "grad_norm": 18.00354369947476, + "learning_rate": 1.4045221197651375e-05, + "loss": 0.7335, + "step": 7422 + }, + { + "epoch": 1.16, + "grad_norm": 13.898873788162456, + "learning_rate": 1.4043678880692235e-05, + "loss": 0.5801, + "step": 7423 + }, + { + "epoch": 1.16, + "grad_norm": 23.175083200681893, + "learning_rate": 1.4042136448733235e-05, + "loss": 0.6682, + "step": 7424 + }, + { + "epoch": 1.16, + "grad_norm": 14.586331558829869, + "learning_rate": 1.4040593901818239e-05, + "loss": 0.6266, + "step": 7425 + }, + { + "epoch": 1.16, + "grad_norm": 17.76058851999795, + "learning_rate": 1.403905123999111e-05, + "loss": 0.6591, + "step": 7426 + }, + { + "epoch": 1.16, + "grad_norm": 15.896263202572664, + "learning_rate": 1.4037508463295735e-05, + "loss": 0.5713, + "step": 7427 + }, + { + "epoch": 1.16, + "grad_norm": 19.76547623133555, + "learning_rate": 1.4035965571775976e-05, + "loss": 0.6287, + "step": 7428 + }, + { + "epoch": 1.16, + "grad_norm": 15.94958560604843, + "learning_rate": 1.403442256547572e-05, + "loss": 0.6983, + "step": 7429 + }, + { + "epoch": 1.16, + "grad_norm": 15.673931826429664, + "learning_rate": 1.4032879444438843e-05, + "loss": 0.6669, + "step": 7430 + }, + { + "epoch": 1.16, + "grad_norm": 23.755173992948183, + "learning_rate": 1.4031336208709236e-05, + "loss": 0.6923, + "step": 7431 + }, + { + "epoch": 1.16, + "grad_norm": 16.853443592981684, + "learning_rate": 1.4029792858330783e-05, + "loss": 0.7087, + "step": 7432 + }, + { + "epoch": 1.16, + "grad_norm": 18.04611829145561, + "learning_rate": 1.4028249393347382e-05, + "loss": 0.605, + "step": 7433 + }, + { + "epoch": 1.16, + "grad_norm": 14.575987522186704, + "learning_rate": 1.4026705813802923e-05, + "loss": 0.6012, + "step": 7434 + }, + { + "epoch": 1.16, + "grad_norm": 18.225078286878, + "learning_rate": 1.4025162119741304e-05, + "loss": 0.5948, + "step": 7435 + }, + { + "epoch": 1.16, + "grad_norm": 20.87707332705314, + "learning_rate": 1.4023618311206432e-05, + "loss": 0.7036, + "step": 7436 + }, + { + "epoch": 1.16, + "grad_norm": 25.970216250655813, + "learning_rate": 1.4022074388242208e-05, + "loss": 0.7093, + "step": 7437 + }, + { + "epoch": 1.16, + "grad_norm": 20.026213796162736, + "learning_rate": 1.4020530350892542e-05, + "loss": 0.7042, + "step": 7438 + }, + { + "epoch": 1.16, + "grad_norm": 19.980603976272196, + "learning_rate": 1.4018986199201345e-05, + "loss": 0.761, + "step": 7439 + }, + { + "epoch": 1.16, + "grad_norm": 24.443586857918696, + "learning_rate": 1.4017441933212532e-05, + "loss": 0.6455, + "step": 7440 + }, + { + "epoch": 1.16, + "grad_norm": 18.55953781980156, + "learning_rate": 1.4015897552970017e-05, + "loss": 0.6394, + "step": 7441 + }, + { + "epoch": 1.16, + "grad_norm": 23.26280079205925, + "learning_rate": 1.401435305851773e-05, + "loss": 0.6492, + "step": 7442 + }, + { + "epoch": 1.16, + "grad_norm": 15.57450492333809, + "learning_rate": 1.4012808449899584e-05, + "loss": 0.6636, + "step": 7443 + }, + { + "epoch": 1.16, + "grad_norm": 25.879526449869036, + "learning_rate": 1.4011263727159521e-05, + "loss": 0.6698, + "step": 7444 + }, + { + "epoch": 1.16, + "grad_norm": 14.09169694808246, + "learning_rate": 1.4009718890341457e-05, + "loss": 0.6293, + "step": 7445 + }, + { + "epoch": 1.16, + "grad_norm": 28.297327064344987, + "learning_rate": 1.4008173939489338e-05, + "loss": 0.6776, + "step": 7446 + }, + { + "epoch": 1.16, + "grad_norm": 18.47554753763949, + "learning_rate": 1.4006628874647094e-05, + "loss": 0.6542, + "step": 7447 + }, + { + "epoch": 1.16, + "grad_norm": 15.084212500370837, + "learning_rate": 1.400508369585867e-05, + "loss": 0.6238, + "step": 7448 + }, + { + "epoch": 1.16, + "grad_norm": 21.652779760758925, + "learning_rate": 1.400353840316801e-05, + "loss": 0.7362, + "step": 7449 + }, + { + "epoch": 1.16, + "grad_norm": 19.954309514971452, + "learning_rate": 1.4001992996619056e-05, + "loss": 0.6846, + "step": 7450 + }, + { + "epoch": 1.16, + "grad_norm": 19.143083624679395, + "learning_rate": 1.4000447476255765e-05, + "loss": 0.7219, + "step": 7451 + }, + { + "epoch": 1.16, + "grad_norm": 17.73512113971914, + "learning_rate": 1.3998901842122088e-05, + "loss": 0.6412, + "step": 7452 + }, + { + "epoch": 1.16, + "grad_norm": 14.6357970465466, + "learning_rate": 1.3997356094261977e-05, + "loss": 0.6264, + "step": 7453 + }, + { + "epoch": 1.16, + "grad_norm": 23.022154026352275, + "learning_rate": 1.3995810232719405e-05, + "loss": 0.6172, + "step": 7454 + }, + { + "epoch": 1.16, + "grad_norm": 20.373682419762957, + "learning_rate": 1.3994264257538324e-05, + "loss": 0.696, + "step": 7455 + }, + { + "epoch": 1.16, + "grad_norm": 22.068361951330044, + "learning_rate": 1.3992718168762702e-05, + "loss": 0.5801, + "step": 7456 + }, + { + "epoch": 1.16, + "grad_norm": 18.87333762075109, + "learning_rate": 1.3991171966436513e-05, + "loss": 0.7128, + "step": 7457 + }, + { + "epoch": 1.16, + "grad_norm": 11.790901233762101, + "learning_rate": 1.3989625650603729e-05, + "loss": 0.5648, + "step": 7458 + }, + { + "epoch": 1.17, + "grad_norm": 20.56375106814535, + "learning_rate": 1.3988079221308323e-05, + "loss": 0.619, + "step": 7459 + }, + { + "epoch": 1.17, + "grad_norm": 18.73698580098692, + "learning_rate": 1.3986532678594277e-05, + "loss": 0.6288, + "step": 7460 + }, + { + "epoch": 1.17, + "grad_norm": 21.035665876046952, + "learning_rate": 1.3984986022505578e-05, + "loss": 0.7057, + "step": 7461 + }, + { + "epoch": 1.17, + "grad_norm": 17.590239793799036, + "learning_rate": 1.3983439253086201e-05, + "loss": 0.6575, + "step": 7462 + }, + { + "epoch": 1.17, + "grad_norm": 15.885276763696012, + "learning_rate": 1.3981892370380146e-05, + "loss": 0.6823, + "step": 7463 + }, + { + "epoch": 1.17, + "grad_norm": 22.377524375366338, + "learning_rate": 1.3980345374431401e-05, + "loss": 0.7452, + "step": 7464 + }, + { + "epoch": 1.17, + "grad_norm": 18.54329056116013, + "learning_rate": 1.3978798265283962e-05, + "loss": 0.6429, + "step": 7465 + }, + { + "epoch": 1.17, + "grad_norm": 25.903684229042685, + "learning_rate": 1.3977251042981828e-05, + "loss": 0.7628, + "step": 7466 + }, + { + "epoch": 1.17, + "grad_norm": 12.999764541180905, + "learning_rate": 1.3975703707569e-05, + "loss": 0.6401, + "step": 7467 + }, + { + "epoch": 1.17, + "grad_norm": 21.223170027261705, + "learning_rate": 1.3974156259089486e-05, + "loss": 0.7002, + "step": 7468 + }, + { + "epoch": 1.17, + "grad_norm": 13.750197318135148, + "learning_rate": 1.3972608697587292e-05, + "loss": 0.6167, + "step": 7469 + }, + { + "epoch": 1.17, + "grad_norm": 18.489287971411944, + "learning_rate": 1.3971061023106428e-05, + "loss": 0.6141, + "step": 7470 + }, + { + "epoch": 1.17, + "grad_norm": 19.059324109406212, + "learning_rate": 1.3969513235690915e-05, + "loss": 0.6861, + "step": 7471 + }, + { + "epoch": 1.17, + "grad_norm": 23.256802116335816, + "learning_rate": 1.3967965335384766e-05, + "loss": 0.6624, + "step": 7472 + }, + { + "epoch": 1.17, + "grad_norm": 15.706032848147292, + "learning_rate": 1.3966417322232006e-05, + "loss": 0.6457, + "step": 7473 + }, + { + "epoch": 1.17, + "grad_norm": 17.277762907093944, + "learning_rate": 1.3964869196276654e-05, + "loss": 0.659, + "step": 7474 + }, + { + "epoch": 1.17, + "grad_norm": 20.595718087145244, + "learning_rate": 1.3963320957562743e-05, + "loss": 0.7015, + "step": 7475 + }, + { + "epoch": 1.17, + "grad_norm": 20.606199690884935, + "learning_rate": 1.3961772606134303e-05, + "loss": 0.6491, + "step": 7476 + }, + { + "epoch": 1.17, + "grad_norm": 17.200989268409902, + "learning_rate": 1.3960224142035368e-05, + "loss": 0.6213, + "step": 7477 + }, + { + "epoch": 1.17, + "grad_norm": 21.51283058533687, + "learning_rate": 1.3958675565309974e-05, + "loss": 0.6469, + "step": 7478 + }, + { + "epoch": 1.17, + "grad_norm": 16.902464307051382, + "learning_rate": 1.3957126876002164e-05, + "loss": 0.7062, + "step": 7479 + }, + { + "epoch": 1.17, + "grad_norm": 20.246684168649182, + "learning_rate": 1.3955578074155978e-05, + "loss": 0.6201, + "step": 7480 + }, + { + "epoch": 1.17, + "grad_norm": 14.684449043338914, + "learning_rate": 1.3954029159815468e-05, + "loss": 0.5924, + "step": 7481 + }, + { + "epoch": 1.17, + "grad_norm": 12.218361384951006, + "learning_rate": 1.3952480133024682e-05, + "loss": 0.6665, + "step": 7482 + }, + { + "epoch": 1.17, + "grad_norm": 23.73341601683904, + "learning_rate": 1.3950930993827671e-05, + "loss": 0.6646, + "step": 7483 + }, + { + "epoch": 1.17, + "grad_norm": 12.65514275334491, + "learning_rate": 1.3949381742268496e-05, + "loss": 0.6029, + "step": 7484 + }, + { + "epoch": 1.17, + "grad_norm": 15.227009257813034, + "learning_rate": 1.394783237839121e-05, + "loss": 0.5666, + "step": 7485 + }, + { + "epoch": 1.17, + "grad_norm": 23.874543880928986, + "learning_rate": 1.3946282902239886e-05, + "loss": 0.6344, + "step": 7486 + }, + { + "epoch": 1.17, + "grad_norm": 18.48261663396154, + "learning_rate": 1.3944733313858583e-05, + "loss": 0.7229, + "step": 7487 + }, + { + "epoch": 1.17, + "grad_norm": 21.218143652509674, + "learning_rate": 1.3943183613291374e-05, + "loss": 0.73, + "step": 7488 + }, + { + "epoch": 1.17, + "grad_norm": 18.645593324817906, + "learning_rate": 1.3941633800582325e-05, + "loss": 0.7254, + "step": 7489 + }, + { + "epoch": 1.17, + "grad_norm": 19.409587059496285, + "learning_rate": 1.394008387577552e-05, + "loss": 0.7066, + "step": 7490 + }, + { + "epoch": 1.17, + "grad_norm": 16.0758242745071, + "learning_rate": 1.3938533838915033e-05, + "loss": 0.6805, + "step": 7491 + }, + { + "epoch": 1.17, + "grad_norm": 20.481029815308045, + "learning_rate": 1.3936983690044949e-05, + "loss": 0.6164, + "step": 7492 + }, + { + "epoch": 1.17, + "grad_norm": 15.771087089235197, + "learning_rate": 1.3935433429209352e-05, + "loss": 0.6318, + "step": 7493 + }, + { + "epoch": 1.17, + "grad_norm": 20.97081639388146, + "learning_rate": 1.3933883056452331e-05, + "loss": 0.7006, + "step": 7494 + }, + { + "epoch": 1.17, + "grad_norm": 27.4972055204725, + "learning_rate": 1.3932332571817975e-05, + "loss": 0.6624, + "step": 7495 + }, + { + "epoch": 1.17, + "grad_norm": 24.643876100745285, + "learning_rate": 1.3930781975350382e-05, + "loss": 0.6949, + "step": 7496 + }, + { + "epoch": 1.17, + "grad_norm": 20.89933613779166, + "learning_rate": 1.3929231267093646e-05, + "loss": 0.7052, + "step": 7497 + }, + { + "epoch": 1.17, + "grad_norm": 12.711042071980877, + "learning_rate": 1.3927680447091876e-05, + "loss": 0.811, + "step": 7498 + }, + { + "epoch": 1.17, + "grad_norm": 10.793785704220694, + "learning_rate": 1.3926129515389168e-05, + "loss": 0.5723, + "step": 7499 + }, + { + "epoch": 1.17, + "grad_norm": 24.612089721566733, + "learning_rate": 1.3924578472029637e-05, + "loss": 0.6871, + "step": 7500 + }, + { + "epoch": 1.17, + "grad_norm": 14.881192241790526, + "learning_rate": 1.3923027317057388e-05, + "loss": 0.7151, + "step": 7501 + }, + { + "epoch": 1.17, + "grad_norm": 32.59567292101353, + "learning_rate": 1.3921476050516538e-05, + "loss": 0.7365, + "step": 7502 + }, + { + "epoch": 1.17, + "grad_norm": 14.47455473428712, + "learning_rate": 1.3919924672451201e-05, + "loss": 0.7011, + "step": 7503 + }, + { + "epoch": 1.17, + "grad_norm": 16.764728500842555, + "learning_rate": 1.3918373182905501e-05, + "loss": 0.6463, + "step": 7504 + }, + { + "epoch": 1.17, + "grad_norm": 20.85860007557551, + "learning_rate": 1.391682158192356e-05, + "loss": 0.7198, + "step": 7505 + }, + { + "epoch": 1.17, + "grad_norm": 18.907522179228543, + "learning_rate": 1.3915269869549504e-05, + "loss": 0.6414, + "step": 7506 + }, + { + "epoch": 1.17, + "grad_norm": 15.510044998498573, + "learning_rate": 1.3913718045827462e-05, + "loss": 0.6663, + "step": 7507 + }, + { + "epoch": 1.17, + "grad_norm": 27.59371658033721, + "learning_rate": 1.391216611080157e-05, + "loss": 0.6849, + "step": 7508 + }, + { + "epoch": 1.17, + "grad_norm": 13.287951147669038, + "learning_rate": 1.3910614064515964e-05, + "loss": 0.6739, + "step": 7509 + }, + { + "epoch": 1.17, + "grad_norm": 23.478886626050734, + "learning_rate": 1.3909061907014781e-05, + "loss": 0.6944, + "step": 7510 + }, + { + "epoch": 1.17, + "grad_norm": 14.072511998812043, + "learning_rate": 1.390750963834216e-05, + "loss": 0.5905, + "step": 7511 + }, + { + "epoch": 1.17, + "grad_norm": 42.05425884238126, + "learning_rate": 1.3905957258542253e-05, + "loss": 0.7121, + "step": 7512 + }, + { + "epoch": 1.17, + "grad_norm": 16.01832958738399, + "learning_rate": 1.390440476765921e-05, + "loss": 0.6673, + "step": 7513 + }, + { + "epoch": 1.17, + "grad_norm": 16.41394322718299, + "learning_rate": 1.3902852165737172e-05, + "loss": 0.683, + "step": 7514 + }, + { + "epoch": 1.17, + "grad_norm": 15.456126261052685, + "learning_rate": 1.390129945282031e-05, + "loss": 0.678, + "step": 7515 + }, + { + "epoch": 1.17, + "grad_norm": 19.400980989672785, + "learning_rate": 1.3899746628952766e-05, + "loss": 0.7271, + "step": 7516 + }, + { + "epoch": 1.17, + "grad_norm": 21.431509641391173, + "learning_rate": 1.3898193694178714e-05, + "loss": 0.7039, + "step": 7517 + }, + { + "epoch": 1.17, + "grad_norm": 24.610124583257402, + "learning_rate": 1.3896640648542312e-05, + "loss": 0.6537, + "step": 7518 + }, + { + "epoch": 1.17, + "grad_norm": 17.39909699504327, + "learning_rate": 1.389508749208773e-05, + "loss": 0.7293, + "step": 7519 + }, + { + "epoch": 1.17, + "grad_norm": 16.21891755594496, + "learning_rate": 1.389353422485914e-05, + "loss": 0.6648, + "step": 7520 + }, + { + "epoch": 1.17, + "grad_norm": 23.923362457703828, + "learning_rate": 1.3891980846900712e-05, + "loss": 0.7032, + "step": 7521 + }, + { + "epoch": 1.17, + "grad_norm": 19.66710044209375, + "learning_rate": 1.3890427358256626e-05, + "loss": 0.6923, + "step": 7522 + }, + { + "epoch": 1.18, + "grad_norm": 13.958698425630674, + "learning_rate": 1.388887375897106e-05, + "loss": 0.6708, + "step": 7523 + }, + { + "epoch": 1.18, + "grad_norm": 20.38599063342712, + "learning_rate": 1.3887320049088202e-05, + "loss": 0.6418, + "step": 7524 + }, + { + "epoch": 1.18, + "grad_norm": 13.83205121493252, + "learning_rate": 1.3885766228652235e-05, + "loss": 0.598, + "step": 7525 + }, + { + "epoch": 1.18, + "grad_norm": 19.79574681968676, + "learning_rate": 1.3884212297707349e-05, + "loss": 0.6237, + "step": 7526 + }, + { + "epoch": 1.18, + "grad_norm": 15.677812889038439, + "learning_rate": 1.3882658256297738e-05, + "loss": 0.7044, + "step": 7527 + }, + { + "epoch": 1.18, + "grad_norm": 14.830279366624833, + "learning_rate": 1.3881104104467598e-05, + "loss": 0.6114, + "step": 7528 + }, + { + "epoch": 1.18, + "grad_norm": 15.913860664342462, + "learning_rate": 1.3879549842261123e-05, + "loss": 0.6359, + "step": 7529 + }, + { + "epoch": 1.18, + "grad_norm": 13.892387249923937, + "learning_rate": 1.3877995469722527e-05, + "loss": 0.6188, + "step": 7530 + }, + { + "epoch": 1.18, + "grad_norm": 16.58478861911699, + "learning_rate": 1.3876440986896004e-05, + "loss": 0.6768, + "step": 7531 + }, + { + "epoch": 1.18, + "grad_norm": 24.869863317629406, + "learning_rate": 1.387488639382577e-05, + "loss": 0.7038, + "step": 7532 + }, + { + "epoch": 1.18, + "grad_norm": 30.157638454592195, + "learning_rate": 1.387333169055603e-05, + "loss": 0.6805, + "step": 7533 + }, + { + "epoch": 1.18, + "grad_norm": 21.382775782984638, + "learning_rate": 1.3871776877131007e-05, + "loss": 0.7045, + "step": 7534 + }, + { + "epoch": 1.18, + "grad_norm": 17.424394742648765, + "learning_rate": 1.3870221953594912e-05, + "loss": 0.6368, + "step": 7535 + }, + { + "epoch": 1.18, + "grad_norm": 21.88825740236287, + "learning_rate": 1.3868666919991966e-05, + "loss": 0.6372, + "step": 7536 + }, + { + "epoch": 1.18, + "grad_norm": 16.92365602822914, + "learning_rate": 1.38671117763664e-05, + "loss": 0.6643, + "step": 7537 + }, + { + "epoch": 1.18, + "grad_norm": 16.729266756545332, + "learning_rate": 1.3865556522762438e-05, + "loss": 0.679, + "step": 7538 + }, + { + "epoch": 1.18, + "grad_norm": 13.845997522974661, + "learning_rate": 1.3864001159224306e-05, + "loss": 0.6784, + "step": 7539 + }, + { + "epoch": 1.18, + "grad_norm": 27.252948094053966, + "learning_rate": 1.3862445685796244e-05, + "loss": 0.6914, + "step": 7540 + }, + { + "epoch": 1.18, + "grad_norm": 26.367243305288532, + "learning_rate": 1.3860890102522482e-05, + "loss": 0.7281, + "step": 7541 + }, + { + "epoch": 1.18, + "grad_norm": 15.747856897861515, + "learning_rate": 1.385933440944727e-05, + "loss": 0.6155, + "step": 7542 + }, + { + "epoch": 1.18, + "grad_norm": 21.461379410839402, + "learning_rate": 1.385777860661484e-05, + "loss": 0.6093, + "step": 7543 + }, + { + "epoch": 1.18, + "grad_norm": 20.55451364925784, + "learning_rate": 1.3856222694069446e-05, + "loss": 0.6032, + "step": 7544 + }, + { + "epoch": 1.18, + "grad_norm": 26.457505973072525, + "learning_rate": 1.3854666671855334e-05, + "loss": 0.624, + "step": 7545 + }, + { + "epoch": 1.18, + "grad_norm": 15.36800395976475, + "learning_rate": 1.3853110540016759e-05, + "loss": 0.649, + "step": 7546 + }, + { + "epoch": 1.18, + "grad_norm": 20.234854057709228, + "learning_rate": 1.3851554298597971e-05, + "loss": 0.7901, + "step": 7547 + }, + { + "epoch": 1.18, + "grad_norm": 23.349697126593004, + "learning_rate": 1.384999794764323e-05, + "loss": 0.6993, + "step": 7548 + }, + { + "epoch": 1.18, + "grad_norm": 36.34435634976441, + "learning_rate": 1.3848441487196804e-05, + "loss": 0.7236, + "step": 7549 + }, + { + "epoch": 1.18, + "grad_norm": 14.937564966427612, + "learning_rate": 1.3846884917302952e-05, + "loss": 0.6597, + "step": 7550 + }, + { + "epoch": 1.18, + "grad_norm": 19.575933954124604, + "learning_rate": 1.3845328238005942e-05, + "loss": 0.7012, + "step": 7551 + }, + { + "epoch": 1.18, + "grad_norm": 28.011320616404007, + "learning_rate": 1.3843771449350047e-05, + "loss": 0.7467, + "step": 7552 + }, + { + "epoch": 1.18, + "grad_norm": 14.13286322652592, + "learning_rate": 1.3842214551379542e-05, + "loss": 0.6014, + "step": 7553 + }, + { + "epoch": 1.18, + "grad_norm": 25.82566865028165, + "learning_rate": 1.3840657544138702e-05, + "loss": 0.7456, + "step": 7554 + }, + { + "epoch": 1.18, + "grad_norm": 13.244024722615029, + "learning_rate": 1.3839100427671808e-05, + "loss": 0.6363, + "step": 7555 + }, + { + "epoch": 1.18, + "grad_norm": 13.811069034658267, + "learning_rate": 1.3837543202023143e-05, + "loss": 0.6291, + "step": 7556 + }, + { + "epoch": 1.18, + "grad_norm": 15.160090923291442, + "learning_rate": 1.3835985867236995e-05, + "loss": 0.652, + "step": 7557 + }, + { + "epoch": 1.18, + "grad_norm": 15.11189345926525, + "learning_rate": 1.383442842335765e-05, + "loss": 0.6006, + "step": 7558 + }, + { + "epoch": 1.18, + "grad_norm": 26.496299290692853, + "learning_rate": 1.383287087042941e-05, + "loss": 0.7171, + "step": 7559 + }, + { + "epoch": 1.18, + "grad_norm": 13.572744585304314, + "learning_rate": 1.383131320849656e-05, + "loss": 0.5934, + "step": 7560 + }, + { + "epoch": 1.18, + "grad_norm": 16.945321114284212, + "learning_rate": 1.3829755437603405e-05, + "loss": 0.6502, + "step": 7561 + }, + { + "epoch": 1.18, + "grad_norm": 21.953599670817265, + "learning_rate": 1.3828197557794245e-05, + "loss": 0.6227, + "step": 7562 + }, + { + "epoch": 1.18, + "grad_norm": 16.30852525159876, + "learning_rate": 1.3826639569113384e-05, + "loss": 0.5657, + "step": 7563 + }, + { + "epoch": 1.18, + "grad_norm": 27.66963524803588, + "learning_rate": 1.3825081471605137e-05, + "loss": 0.6587, + "step": 7564 + }, + { + "epoch": 1.18, + "grad_norm": 14.762473263502336, + "learning_rate": 1.3823523265313808e-05, + "loss": 0.6205, + "step": 7565 + }, + { + "epoch": 1.18, + "grad_norm": 15.477765238289495, + "learning_rate": 1.3821964950283713e-05, + "loss": 0.5983, + "step": 7566 + }, + { + "epoch": 1.18, + "grad_norm": 14.924384916632853, + "learning_rate": 1.3820406526559169e-05, + "loss": 0.6099, + "step": 7567 + }, + { + "epoch": 1.18, + "grad_norm": 16.781352330984465, + "learning_rate": 1.38188479941845e-05, + "loss": 0.6457, + "step": 7568 + }, + { + "epoch": 1.18, + "grad_norm": 27.584528475708602, + "learning_rate": 1.381728935320403e-05, + "loss": 0.6183, + "step": 7569 + }, + { + "epoch": 1.18, + "grad_norm": 25.564806360203946, + "learning_rate": 1.3815730603662083e-05, + "loss": 0.6289, + "step": 7570 + }, + { + "epoch": 1.18, + "grad_norm": 20.089715600091765, + "learning_rate": 1.3814171745602991e-05, + "loss": 0.6958, + "step": 7571 + }, + { + "epoch": 1.18, + "grad_norm": 23.6109904551384, + "learning_rate": 1.3812612779071084e-05, + "loss": 0.7949, + "step": 7572 + }, + { + "epoch": 1.18, + "grad_norm": 21.26732133080964, + "learning_rate": 1.3811053704110697e-05, + "loss": 0.6563, + "step": 7573 + }, + { + "epoch": 1.18, + "grad_norm": 19.909600846430628, + "learning_rate": 1.3809494520766179e-05, + "loss": 0.5822, + "step": 7574 + }, + { + "epoch": 1.18, + "grad_norm": 17.463115556294614, + "learning_rate": 1.3807935229081859e-05, + "loss": 0.6293, + "step": 7575 + }, + { + "epoch": 1.18, + "grad_norm": 16.67937550398115, + "learning_rate": 1.3806375829102092e-05, + "loss": 0.6278, + "step": 7576 + }, + { + "epoch": 1.18, + "grad_norm": 17.35971127037589, + "learning_rate": 1.3804816320871221e-05, + "loss": 0.6333, + "step": 7577 + }, + { + "epoch": 1.18, + "grad_norm": 16.773158144834433, + "learning_rate": 1.38032567044336e-05, + "loss": 0.6524, + "step": 7578 + }, + { + "epoch": 1.18, + "grad_norm": 31.53549174216908, + "learning_rate": 1.3801696979833586e-05, + "loss": 0.7427, + "step": 7579 + }, + { + "epoch": 1.18, + "grad_norm": 14.63057206910241, + "learning_rate": 1.3800137147115531e-05, + "loss": 0.7508, + "step": 7580 + }, + { + "epoch": 1.18, + "grad_norm": 19.8101798712467, + "learning_rate": 1.37985772063238e-05, + "loss": 0.7527, + "step": 7581 + }, + { + "epoch": 1.18, + "grad_norm": 23.001134149949873, + "learning_rate": 1.3797017157502754e-05, + "loss": 0.6924, + "step": 7582 + }, + { + "epoch": 1.18, + "grad_norm": 31.23867863927879, + "learning_rate": 1.3795457000696762e-05, + "loss": 0.6913, + "step": 7583 + }, + { + "epoch": 1.18, + "grad_norm": 23.772363631571118, + "learning_rate": 1.3793896735950195e-05, + "loss": 0.6638, + "step": 7584 + }, + { + "epoch": 1.18, + "grad_norm": 13.612161568413985, + "learning_rate": 1.3792336363307419e-05, + "loss": 0.6242, + "step": 7585 + }, + { + "epoch": 1.18, + "grad_norm": 19.39163862709829, + "learning_rate": 1.3790775882812818e-05, + "loss": 0.7153, + "step": 7586 + }, + { + "epoch": 1.19, + "grad_norm": 28.381249904275546, + "learning_rate": 1.378921529451077e-05, + "loss": 0.7509, + "step": 7587 + }, + { + "epoch": 1.19, + "grad_norm": 16.591754432341034, + "learning_rate": 1.3787654598445656e-05, + "loss": 0.777, + "step": 7588 + }, + { + "epoch": 1.19, + "grad_norm": 20.40724545142196, + "learning_rate": 1.3786093794661858e-05, + "loss": 0.6377, + "step": 7589 + }, + { + "epoch": 1.19, + "grad_norm": 16.488630621416277, + "learning_rate": 1.3784532883203769e-05, + "loss": 0.7281, + "step": 7590 + }, + { + "epoch": 1.19, + "grad_norm": 26.80473266926859, + "learning_rate": 1.378297186411578e-05, + "loss": 0.6796, + "step": 7591 + }, + { + "epoch": 1.19, + "grad_norm": 22.218804443536794, + "learning_rate": 1.378141073744228e-05, + "loss": 0.7338, + "step": 7592 + }, + { + "epoch": 1.19, + "grad_norm": 20.18899216318968, + "learning_rate": 1.3779849503227675e-05, + "loss": 0.5817, + "step": 7593 + }, + { + "epoch": 1.19, + "grad_norm": 20.51721928104307, + "learning_rate": 1.377828816151636e-05, + "loss": 0.7058, + "step": 7594 + }, + { + "epoch": 1.19, + "grad_norm": 19.66716454915918, + "learning_rate": 1.377672671235274e-05, + "loss": 0.7502, + "step": 7595 + }, + { + "epoch": 1.19, + "grad_norm": 17.583078490297684, + "learning_rate": 1.377516515578122e-05, + "loss": 0.6846, + "step": 7596 + }, + { + "epoch": 1.19, + "grad_norm": 39.52117862531305, + "learning_rate": 1.3773603491846213e-05, + "loss": 0.6456, + "step": 7597 + }, + { + "epoch": 1.19, + "grad_norm": 25.83714727863196, + "learning_rate": 1.3772041720592131e-05, + "loss": 0.719, + "step": 7598 + }, + { + "epoch": 1.19, + "grad_norm": 16.26837292379662, + "learning_rate": 1.3770479842063387e-05, + "loss": 0.5968, + "step": 7599 + }, + { + "epoch": 1.19, + "grad_norm": 14.494083477936199, + "learning_rate": 1.37689178563044e-05, + "loss": 0.6826, + "step": 7600 + }, + { + "epoch": 1.19, + "grad_norm": 16.190142649637103, + "learning_rate": 1.37673557633596e-05, + "loss": 0.7216, + "step": 7601 + }, + { + "epoch": 1.19, + "grad_norm": 21.799027146975, + "learning_rate": 1.3765793563273402e-05, + "loss": 0.6388, + "step": 7602 + }, + { + "epoch": 1.19, + "grad_norm": 18.595024924285916, + "learning_rate": 1.376423125609024e-05, + "loss": 0.6552, + "step": 7603 + }, + { + "epoch": 1.19, + "grad_norm": 24.91833612434623, + "learning_rate": 1.376266884185454e-05, + "loss": 0.7241, + "step": 7604 + }, + { + "epoch": 1.19, + "grad_norm": 33.587980181204244, + "learning_rate": 1.376110632061074e-05, + "loss": 0.6916, + "step": 7605 + }, + { + "epoch": 1.19, + "grad_norm": 25.88662066425963, + "learning_rate": 1.3759543692403278e-05, + "loss": 0.6966, + "step": 7606 + }, + { + "epoch": 1.19, + "grad_norm": 18.996353307665114, + "learning_rate": 1.3757980957276594e-05, + "loss": 0.7129, + "step": 7607 + }, + { + "epoch": 1.19, + "grad_norm": 18.07633098406556, + "learning_rate": 1.3756418115275128e-05, + "loss": 0.6685, + "step": 7608 + }, + { + "epoch": 1.19, + "grad_norm": 20.98864413724588, + "learning_rate": 1.3754855166443326e-05, + "loss": 0.6743, + "step": 7609 + }, + { + "epoch": 1.19, + "grad_norm": 22.32496062053142, + "learning_rate": 1.375329211082564e-05, + "loss": 0.6902, + "step": 7610 + }, + { + "epoch": 1.19, + "grad_norm": 18.280417487759227, + "learning_rate": 1.3751728948466526e-05, + "loss": 0.6503, + "step": 7611 + }, + { + "epoch": 1.19, + "grad_norm": 15.466755148350478, + "learning_rate": 1.3750165679410431e-05, + "loss": 0.6718, + "step": 7612 + }, + { + "epoch": 1.19, + "grad_norm": 13.280107986493373, + "learning_rate": 1.3748602303701821e-05, + "loss": 0.6518, + "step": 7613 + }, + { + "epoch": 1.19, + "grad_norm": 11.770372852212237, + "learning_rate": 1.3747038821385155e-05, + "loss": 0.6022, + "step": 7614 + }, + { + "epoch": 1.19, + "grad_norm": 15.899196220495579, + "learning_rate": 1.3745475232504895e-05, + "loss": 0.6409, + "step": 7615 + }, + { + "epoch": 1.19, + "grad_norm": 12.657098477968363, + "learning_rate": 1.374391153710551e-05, + "loss": 0.6713, + "step": 7616 + }, + { + "epoch": 1.19, + "grad_norm": 16.680950969218298, + "learning_rate": 1.374234773523147e-05, + "loss": 0.7287, + "step": 7617 + }, + { + "epoch": 1.19, + "grad_norm": 16.114378093032236, + "learning_rate": 1.3740783826927252e-05, + "loss": 0.7616, + "step": 7618 + }, + { + "epoch": 1.19, + "grad_norm": 22.38611065463507, + "learning_rate": 1.373921981223733e-05, + "loss": 0.7034, + "step": 7619 + }, + { + "epoch": 1.19, + "grad_norm": 18.81119139213464, + "learning_rate": 1.3737655691206184e-05, + "loss": 0.6361, + "step": 7620 + }, + { + "epoch": 1.19, + "grad_norm": 29.82213783823889, + "learning_rate": 1.3736091463878296e-05, + "loss": 0.6597, + "step": 7621 + }, + { + "epoch": 1.19, + "grad_norm": 31.98501895615954, + "learning_rate": 1.3734527130298153e-05, + "loss": 0.7065, + "step": 7622 + }, + { + "epoch": 1.19, + "grad_norm": 19.824982803353503, + "learning_rate": 1.373296269051024e-05, + "loss": 0.6366, + "step": 7623 + }, + { + "epoch": 1.19, + "grad_norm": 13.33518289711661, + "learning_rate": 1.3731398144559056e-05, + "loss": 0.6591, + "step": 7624 + }, + { + "epoch": 1.19, + "grad_norm": 19.55279041450296, + "learning_rate": 1.3729833492489091e-05, + "loss": 0.6656, + "step": 7625 + }, + { + "epoch": 1.19, + "grad_norm": 20.840946987019695, + "learning_rate": 1.3728268734344842e-05, + "loss": 0.6558, + "step": 7626 + }, + { + "epoch": 1.19, + "grad_norm": 18.403385587937322, + "learning_rate": 1.372670387017081e-05, + "loss": 0.7045, + "step": 7627 + }, + { + "epoch": 1.19, + "grad_norm": 13.664214010145676, + "learning_rate": 1.3725138900011503e-05, + "loss": 0.6638, + "step": 7628 + }, + { + "epoch": 1.19, + "grad_norm": 17.38587578731434, + "learning_rate": 1.372357382391142e-05, + "loss": 0.6434, + "step": 7629 + }, + { + "epoch": 1.19, + "grad_norm": 14.366065858470517, + "learning_rate": 1.3722008641915081e-05, + "loss": 0.7008, + "step": 7630 + }, + { + "epoch": 1.19, + "grad_norm": 20.743109412817446, + "learning_rate": 1.372044335406699e-05, + "loss": 0.6805, + "step": 7631 + }, + { + "epoch": 1.19, + "grad_norm": 21.621152475237675, + "learning_rate": 1.3718877960411669e-05, + "loss": 0.6004, + "step": 7632 + }, + { + "epoch": 1.19, + "grad_norm": 14.985923084486366, + "learning_rate": 1.3717312460993632e-05, + "loss": 0.6458, + "step": 7633 + }, + { + "epoch": 1.19, + "grad_norm": 14.831499571936895, + "learning_rate": 1.3715746855857407e-05, + "loss": 0.5965, + "step": 7634 + }, + { + "epoch": 1.19, + "grad_norm": 19.378227400774456, + "learning_rate": 1.3714181145047512e-05, + "loss": 0.682, + "step": 7635 + }, + { + "epoch": 1.19, + "grad_norm": 21.8385788468174, + "learning_rate": 1.3712615328608476e-05, + "loss": 0.6703, + "step": 7636 + }, + { + "epoch": 1.19, + "grad_norm": 20.541157472343233, + "learning_rate": 1.3711049406584836e-05, + "loss": 0.6786, + "step": 7637 + }, + { + "epoch": 1.19, + "grad_norm": 28.222698862644222, + "learning_rate": 1.3709483379021118e-05, + "loss": 0.6697, + "step": 7638 + }, + { + "epoch": 1.19, + "grad_norm": 19.572778544125555, + "learning_rate": 1.3707917245961864e-05, + "loss": 0.7053, + "step": 7639 + }, + { + "epoch": 1.19, + "grad_norm": 19.73125718557368, + "learning_rate": 1.3706351007451616e-05, + "loss": 0.6334, + "step": 7640 + }, + { + "epoch": 1.19, + "grad_norm": 14.35657112144822, + "learning_rate": 1.3704784663534909e-05, + "loss": 0.6475, + "step": 7641 + }, + { + "epoch": 1.19, + "grad_norm": 18.2966139470998, + "learning_rate": 1.3703218214256295e-05, + "loss": 0.7055, + "step": 7642 + }, + { + "epoch": 1.19, + "grad_norm": 19.124191457199956, + "learning_rate": 1.3701651659660322e-05, + "loss": 0.6314, + "step": 7643 + }, + { + "epoch": 1.19, + "grad_norm": 16.737879939036816, + "learning_rate": 1.3700084999791539e-05, + "loss": 0.6366, + "step": 7644 + }, + { + "epoch": 1.19, + "grad_norm": 15.617573194716968, + "learning_rate": 1.3698518234694507e-05, + "loss": 0.7464, + "step": 7645 + }, + { + "epoch": 1.19, + "grad_norm": 19.01912822833064, + "learning_rate": 1.3696951364413776e-05, + "loss": 0.654, + "step": 7646 + }, + { + "epoch": 1.19, + "grad_norm": 23.066088949309595, + "learning_rate": 1.3695384388993915e-05, + "loss": 0.7175, + "step": 7647 + }, + { + "epoch": 1.19, + "grad_norm": 14.099934492948222, + "learning_rate": 1.369381730847948e-05, + "loss": 0.6197, + "step": 7648 + }, + { + "epoch": 1.19, + "grad_norm": 16.964683048858276, + "learning_rate": 1.3692250122915042e-05, + "loss": 0.6001, + "step": 7649 + }, + { + "epoch": 1.19, + "grad_norm": 18.877150163643428, + "learning_rate": 1.3690682832345172e-05, + "loss": 0.6811, + "step": 7650 + }, + { + "epoch": 1.2, + "grad_norm": 16.667626102270013, + "learning_rate": 1.3689115436814442e-05, + "loss": 0.6262, + "step": 7651 + }, + { + "epoch": 1.2, + "grad_norm": 17.954731895040663, + "learning_rate": 1.3687547936367424e-05, + "loss": 0.6491, + "step": 7652 + }, + { + "epoch": 1.2, + "grad_norm": 27.787930592887133, + "learning_rate": 1.36859803310487e-05, + "loss": 0.7097, + "step": 7653 + }, + { + "epoch": 1.2, + "grad_norm": 17.17398208354797, + "learning_rate": 1.3684412620902854e-05, + "loss": 0.7381, + "step": 7654 + }, + { + "epoch": 1.2, + "grad_norm": 18.47236551599288, + "learning_rate": 1.3682844805974466e-05, + "loss": 0.7036, + "step": 7655 + }, + { + "epoch": 1.2, + "grad_norm": 23.80344222823844, + "learning_rate": 1.3681276886308129e-05, + "loss": 0.6652, + "step": 7656 + }, + { + "epoch": 1.2, + "grad_norm": 13.467410604034352, + "learning_rate": 1.3679708861948429e-05, + "loss": 0.6617, + "step": 7657 + }, + { + "epoch": 1.2, + "grad_norm": 22.298707388184887, + "learning_rate": 1.3678140732939962e-05, + "loss": 0.6192, + "step": 7658 + }, + { + "epoch": 1.2, + "grad_norm": 15.506839285419602, + "learning_rate": 1.3676572499327322e-05, + "loss": 0.677, + "step": 7659 + }, + { + "epoch": 1.2, + "grad_norm": 15.5937809117265, + "learning_rate": 1.3675004161155116e-05, + "loss": 0.688, + "step": 7660 + }, + { + "epoch": 1.2, + "grad_norm": 18.131816096625517, + "learning_rate": 1.3673435718467936e-05, + "loss": 0.6501, + "step": 7661 + }, + { + "epoch": 1.2, + "grad_norm": 16.64486544268422, + "learning_rate": 1.3671867171310398e-05, + "loss": 0.6203, + "step": 7662 + }, + { + "epoch": 1.2, + "grad_norm": 19.945936554089887, + "learning_rate": 1.36702985197271e-05, + "loss": 0.6201, + "step": 7663 + }, + { + "epoch": 1.2, + "grad_norm": 19.82345189607954, + "learning_rate": 1.3668729763762664e-05, + "loss": 0.6751, + "step": 7664 + }, + { + "epoch": 1.2, + "grad_norm": 23.04941698441705, + "learning_rate": 1.3667160903461699e-05, + "loss": 0.7807, + "step": 7665 + }, + { + "epoch": 1.2, + "grad_norm": 16.25502799540227, + "learning_rate": 1.366559193886882e-05, + "loss": 0.6261, + "step": 7666 + }, + { + "epoch": 1.2, + "grad_norm": 25.027603123063447, + "learning_rate": 1.3664022870028655e-05, + "loss": 0.7956, + "step": 7667 + }, + { + "epoch": 1.2, + "grad_norm": 19.668726061547922, + "learning_rate": 1.366245369698582e-05, + "loss": 0.6413, + "step": 7668 + }, + { + "epoch": 1.2, + "grad_norm": 30.615400659331648, + "learning_rate": 1.3660884419784944e-05, + "loss": 0.6724, + "step": 7669 + }, + { + "epoch": 1.2, + "grad_norm": 18.61355365501805, + "learning_rate": 1.3659315038470657e-05, + "loss": 0.6764, + "step": 7670 + }, + { + "epoch": 1.2, + "grad_norm": 25.317622178655764, + "learning_rate": 1.3657745553087591e-05, + "loss": 0.6982, + "step": 7671 + }, + { + "epoch": 1.2, + "grad_norm": 16.344091877725926, + "learning_rate": 1.3656175963680385e-05, + "loss": 0.6288, + "step": 7672 + }, + { + "epoch": 1.2, + "grad_norm": 26.068582226163738, + "learning_rate": 1.3654606270293666e-05, + "loss": 0.6353, + "step": 7673 + }, + { + "epoch": 1.2, + "grad_norm": 21.689203383696714, + "learning_rate": 1.3653036472972088e-05, + "loss": 0.67, + "step": 7674 + }, + { + "epoch": 1.2, + "grad_norm": 17.55984097125743, + "learning_rate": 1.365146657176029e-05, + "loss": 0.6526, + "step": 7675 + }, + { + "epoch": 1.2, + "grad_norm": 17.987938094205383, + "learning_rate": 1.3649896566702916e-05, + "loss": 0.6221, + "step": 7676 + }, + { + "epoch": 1.2, + "grad_norm": 17.697086014618723, + "learning_rate": 1.364832645784462e-05, + "loss": 0.685, + "step": 7677 + }, + { + "epoch": 1.2, + "grad_norm": 18.614157721106924, + "learning_rate": 1.3646756245230056e-05, + "loss": 0.7203, + "step": 7678 + }, + { + "epoch": 1.2, + "grad_norm": 24.74984298431076, + "learning_rate": 1.3645185928903874e-05, + "loss": 0.5951, + "step": 7679 + }, + { + "epoch": 1.2, + "grad_norm": 17.58654666237059, + "learning_rate": 1.3643615508910734e-05, + "loss": 0.5818, + "step": 7680 + }, + { + "epoch": 1.2, + "grad_norm": 16.597103354334525, + "learning_rate": 1.3642044985295307e-05, + "loss": 0.6439, + "step": 7681 + }, + { + "epoch": 1.2, + "grad_norm": 21.33460279471346, + "learning_rate": 1.3640474358102247e-05, + "loss": 0.7883, + "step": 7682 + }, + { + "epoch": 1.2, + "grad_norm": 12.261608012690708, + "learning_rate": 1.3638903627376227e-05, + "loss": 0.66, + "step": 7683 + }, + { + "epoch": 1.2, + "grad_norm": 20.19728792220861, + "learning_rate": 1.3637332793161918e-05, + "loss": 0.7208, + "step": 7684 + }, + { + "epoch": 1.2, + "grad_norm": 23.352946889048283, + "learning_rate": 1.363576185550399e-05, + "loss": 0.6549, + "step": 7685 + }, + { + "epoch": 1.2, + "grad_norm": 14.845646273305002, + "learning_rate": 1.3634190814447124e-05, + "loss": 0.6223, + "step": 7686 + }, + { + "epoch": 1.2, + "grad_norm": 27.27922149754853, + "learning_rate": 1.3632619670035996e-05, + "loss": 0.7209, + "step": 7687 + }, + { + "epoch": 1.2, + "grad_norm": 24.391918448658416, + "learning_rate": 1.3631048422315288e-05, + "loss": 0.6488, + "step": 7688 + }, + { + "epoch": 1.2, + "grad_norm": 27.26403424560335, + "learning_rate": 1.3629477071329689e-05, + "loss": 0.5684, + "step": 7689 + }, + { + "epoch": 1.2, + "grad_norm": 14.135181514972738, + "learning_rate": 1.3627905617123884e-05, + "loss": 0.5704, + "step": 7690 + }, + { + "epoch": 1.2, + "grad_norm": 11.742921262836303, + "learning_rate": 1.3626334059742567e-05, + "loss": 0.5532, + "step": 7691 + }, + { + "epoch": 1.2, + "grad_norm": 26.149796006398997, + "learning_rate": 1.362476239923043e-05, + "loss": 0.6653, + "step": 7692 + }, + { + "epoch": 1.2, + "grad_norm": 22.8417530230639, + "learning_rate": 1.3623190635632171e-05, + "loss": 0.6207, + "step": 7693 + }, + { + "epoch": 1.2, + "grad_norm": 21.328355889939534, + "learning_rate": 1.3621618768992488e-05, + "loss": 0.5995, + "step": 7694 + }, + { + "epoch": 1.2, + "grad_norm": 12.363896644185626, + "learning_rate": 1.3620046799356088e-05, + "loss": 0.5512, + "step": 7695 + }, + { + "epoch": 1.2, + "grad_norm": 17.73881009138261, + "learning_rate": 1.3618474726767675e-05, + "loss": 0.6583, + "step": 7696 + }, + { + "epoch": 1.2, + "grad_norm": 24.47576820942793, + "learning_rate": 1.3616902551271955e-05, + "loss": 0.7518, + "step": 7697 + }, + { + "epoch": 1.2, + "grad_norm": 14.461286948632727, + "learning_rate": 1.3615330272913643e-05, + "loss": 0.6328, + "step": 7698 + }, + { + "epoch": 1.2, + "grad_norm": 30.251965504230686, + "learning_rate": 1.3613757891737453e-05, + "loss": 0.7129, + "step": 7699 + }, + { + "epoch": 1.2, + "grad_norm": 13.70995665462603, + "learning_rate": 1.3612185407788104e-05, + "loss": 0.5815, + "step": 7700 + }, + { + "epoch": 1.2, + "grad_norm": 18.64415549573565, + "learning_rate": 1.3610612821110315e-05, + "loss": 0.6377, + "step": 7701 + }, + { + "epoch": 1.2, + "grad_norm": 27.733259576745162, + "learning_rate": 1.360904013174881e-05, + "loss": 0.6813, + "step": 7702 + }, + { + "epoch": 1.2, + "grad_norm": 18.172275078371847, + "learning_rate": 1.360746733974831e-05, + "loss": 0.7182, + "step": 7703 + }, + { + "epoch": 1.2, + "grad_norm": 21.055222941510106, + "learning_rate": 1.3605894445153556e-05, + "loss": 0.7182, + "step": 7704 + }, + { + "epoch": 1.2, + "grad_norm": 15.476187080902122, + "learning_rate": 1.3604321448009268e-05, + "loss": 0.5663, + "step": 7705 + }, + { + "epoch": 1.2, + "grad_norm": 17.040039513294065, + "learning_rate": 1.3602748348360191e-05, + "loss": 0.6797, + "step": 7706 + }, + { + "epoch": 1.2, + "grad_norm": 16.51030181028526, + "learning_rate": 1.3601175146251056e-05, + "loss": 0.5196, + "step": 7707 + }, + { + "epoch": 1.2, + "grad_norm": 18.308671933221458, + "learning_rate": 1.3599601841726608e-05, + "loss": 0.6526, + "step": 7708 + }, + { + "epoch": 1.2, + "grad_norm": 18.208077474119694, + "learning_rate": 1.3598028434831584e-05, + "loss": 0.6512, + "step": 7709 + }, + { + "epoch": 1.2, + "grad_norm": 23.929046116626616, + "learning_rate": 1.3596454925610742e-05, + "loss": 0.6529, + "step": 7710 + }, + { + "epoch": 1.2, + "grad_norm": 25.957286642294974, + "learning_rate": 1.3594881314108823e-05, + "loss": 0.7335, + "step": 7711 + }, + { + "epoch": 1.2, + "grad_norm": 22.292747626136237, + "learning_rate": 1.3593307600370585e-05, + "loss": 0.6245, + "step": 7712 + }, + { + "epoch": 1.2, + "grad_norm": 26.95357431894152, + "learning_rate": 1.359173378444078e-05, + "loss": 0.6813, + "step": 7713 + }, + { + "epoch": 1.2, + "grad_norm": 17.831349505024185, + "learning_rate": 1.3590159866364166e-05, + "loss": 0.6319, + "step": 7714 + }, + { + "epoch": 1.21, + "grad_norm": 20.648316742756453, + "learning_rate": 1.3588585846185502e-05, + "loss": 0.7271, + "step": 7715 + }, + { + "epoch": 1.21, + "grad_norm": 14.373605419492204, + "learning_rate": 1.3587011723949564e-05, + "loss": 0.5684, + "step": 7716 + }, + { + "epoch": 1.21, + "grad_norm": 28.810962346352422, + "learning_rate": 1.3585437499701104e-05, + "loss": 0.7036, + "step": 7717 + }, + { + "epoch": 1.21, + "grad_norm": 24.28804142187009, + "learning_rate": 1.3583863173484902e-05, + "loss": 0.6169, + "step": 7718 + }, + { + "epoch": 1.21, + "grad_norm": 21.76797237766549, + "learning_rate": 1.3582288745345728e-05, + "loss": 0.6861, + "step": 7719 + }, + { + "epoch": 1.21, + "grad_norm": 14.63998005565017, + "learning_rate": 1.3580714215328357e-05, + "loss": 0.5862, + "step": 7720 + }, + { + "epoch": 1.21, + "grad_norm": 24.53303063818445, + "learning_rate": 1.357913958347757e-05, + "loss": 0.6792, + "step": 7721 + }, + { + "epoch": 1.21, + "grad_norm": 13.826674323848138, + "learning_rate": 1.3577564849838146e-05, + "loss": 0.6953, + "step": 7722 + }, + { + "epoch": 1.21, + "grad_norm": 13.313937702809529, + "learning_rate": 1.3575990014454869e-05, + "loss": 0.5925, + "step": 7723 + }, + { + "epoch": 1.21, + "grad_norm": 23.865880868610812, + "learning_rate": 1.3574415077372527e-05, + "loss": 0.6402, + "step": 7724 + }, + { + "epoch": 1.21, + "grad_norm": 23.010248769582482, + "learning_rate": 1.3572840038635913e-05, + "loss": 0.6418, + "step": 7725 + }, + { + "epoch": 1.21, + "grad_norm": 16.122639224503757, + "learning_rate": 1.357126489828982e-05, + "loss": 0.6886, + "step": 7726 + }, + { + "epoch": 1.21, + "grad_norm": 22.33183266466506, + "learning_rate": 1.356968965637904e-05, + "loss": 0.6792, + "step": 7727 + }, + { + "epoch": 1.21, + "grad_norm": 17.824989547549205, + "learning_rate": 1.3568114312948375e-05, + "loss": 0.5982, + "step": 7728 + }, + { + "epoch": 1.21, + "grad_norm": 18.848025552165538, + "learning_rate": 1.3566538868042627e-05, + "loss": 0.6607, + "step": 7729 + }, + { + "epoch": 1.21, + "grad_norm": 24.38716803079882, + "learning_rate": 1.3564963321706597e-05, + "loss": 0.7229, + "step": 7730 + }, + { + "epoch": 1.21, + "grad_norm": 16.13679198167889, + "learning_rate": 1.35633876739851e-05, + "loss": 0.5827, + "step": 7731 + }, + { + "epoch": 1.21, + "grad_norm": 13.391859682018913, + "learning_rate": 1.3561811924922939e-05, + "loss": 0.6427, + "step": 7732 + }, + { + "epoch": 1.21, + "grad_norm": 28.225278217525755, + "learning_rate": 1.3560236074564935e-05, + "loss": 0.6938, + "step": 7733 + }, + { + "epoch": 1.21, + "grad_norm": 17.87479051137091, + "learning_rate": 1.3558660122955894e-05, + "loss": 0.5692, + "step": 7734 + }, + { + "epoch": 1.21, + "grad_norm": 16.634308373254914, + "learning_rate": 1.3557084070140644e-05, + "loss": 0.6371, + "step": 7735 + }, + { + "epoch": 1.21, + "grad_norm": 17.132658715521163, + "learning_rate": 1.3555507916164002e-05, + "loss": 0.6319, + "step": 7736 + }, + { + "epoch": 1.21, + "grad_norm": 59.79078517262928, + "learning_rate": 1.3553931661070796e-05, + "loss": 0.7993, + "step": 7737 + }, + { + "epoch": 1.21, + "grad_norm": 14.675090233356418, + "learning_rate": 1.3552355304905851e-05, + "loss": 0.6695, + "step": 7738 + }, + { + "epoch": 1.21, + "grad_norm": 18.822836955615433, + "learning_rate": 1.3550778847714001e-05, + "loss": 0.6723, + "step": 7739 + }, + { + "epoch": 1.21, + "grad_norm": 18.56423243472237, + "learning_rate": 1.3549202289540077e-05, + "loss": 0.6465, + "step": 7740 + }, + { + "epoch": 1.21, + "grad_norm": 12.799417812501762, + "learning_rate": 1.3547625630428919e-05, + "loss": 0.6235, + "step": 7741 + }, + { + "epoch": 1.21, + "grad_norm": 23.001394396734433, + "learning_rate": 1.3546048870425356e-05, + "loss": 0.7434, + "step": 7742 + }, + { + "epoch": 1.21, + "grad_norm": 16.618670841687017, + "learning_rate": 1.3544472009574243e-05, + "loss": 0.6331, + "step": 7743 + }, + { + "epoch": 1.21, + "grad_norm": 22.760969114618792, + "learning_rate": 1.354289504792042e-05, + "loss": 0.6606, + "step": 7744 + }, + { + "epoch": 1.21, + "grad_norm": 12.166953750820301, + "learning_rate": 1.3541317985508733e-05, + "loss": 0.7009, + "step": 7745 + }, + { + "epoch": 1.21, + "grad_norm": 19.23684946609206, + "learning_rate": 1.3539740822384034e-05, + "loss": 0.6593, + "step": 7746 + }, + { + "epoch": 1.21, + "grad_norm": 17.166807897073213, + "learning_rate": 1.3538163558591175e-05, + "loss": 0.6959, + "step": 7747 + }, + { + "epoch": 1.21, + "grad_norm": 15.08195940191766, + "learning_rate": 1.3536586194175019e-05, + "loss": 0.7125, + "step": 7748 + }, + { + "epoch": 1.21, + "grad_norm": 17.443448226338507, + "learning_rate": 1.3535008729180414e-05, + "loss": 0.7189, + "step": 7749 + }, + { + "epoch": 1.21, + "grad_norm": 15.9016326289895, + "learning_rate": 1.3533431163652235e-05, + "loss": 0.7998, + "step": 7750 + }, + { + "epoch": 1.21, + "grad_norm": 20.443605478528706, + "learning_rate": 1.3531853497635336e-05, + "loss": 0.6639, + "step": 7751 + }, + { + "epoch": 1.21, + "grad_norm": 14.645828533533575, + "learning_rate": 1.3530275731174591e-05, + "loss": 0.694, + "step": 7752 + }, + { + "epoch": 1.21, + "grad_norm": 27.10189880268002, + "learning_rate": 1.352869786431487e-05, + "loss": 0.6703, + "step": 7753 + }, + { + "epoch": 1.21, + "grad_norm": 18.97391631528998, + "learning_rate": 1.3527119897101046e-05, + "loss": 0.6051, + "step": 7754 + }, + { + "epoch": 1.21, + "grad_norm": 16.39083757071754, + "learning_rate": 1.3525541829577997e-05, + "loss": 0.6977, + "step": 7755 + }, + { + "epoch": 1.21, + "grad_norm": 11.660695229985553, + "learning_rate": 1.3523963661790597e-05, + "loss": 0.566, + "step": 7756 + }, + { + "epoch": 1.21, + "grad_norm": 18.514916987503454, + "learning_rate": 1.3522385393783735e-05, + "loss": 0.6181, + "step": 7757 + }, + { + "epoch": 1.21, + "grad_norm": 18.44983075362788, + "learning_rate": 1.3520807025602291e-05, + "loss": 0.6142, + "step": 7758 + }, + { + "epoch": 1.21, + "grad_norm": 20.112730421360908, + "learning_rate": 1.3519228557291156e-05, + "loss": 0.7191, + "step": 7759 + }, + { + "epoch": 1.21, + "grad_norm": 23.87400981712163, + "learning_rate": 1.3517649988895221e-05, + "loss": 0.641, + "step": 7760 + }, + { + "epoch": 1.21, + "grad_norm": 19.363589170268963, + "learning_rate": 1.3516071320459374e-05, + "loss": 0.6954, + "step": 7761 + }, + { + "epoch": 1.21, + "grad_norm": 16.762579793334773, + "learning_rate": 1.3514492552028518e-05, + "loss": 0.7501, + "step": 7762 + }, + { + "epoch": 1.21, + "grad_norm": 19.25704190722339, + "learning_rate": 1.351291368364755e-05, + "loss": 0.6863, + "step": 7763 + }, + { + "epoch": 1.21, + "grad_norm": 17.79564088841133, + "learning_rate": 1.3511334715361371e-05, + "loss": 0.6296, + "step": 7764 + }, + { + "epoch": 1.21, + "grad_norm": 17.96447351733195, + "learning_rate": 1.3509755647214888e-05, + "loss": 0.6559, + "step": 7765 + }, + { + "epoch": 1.21, + "grad_norm": 22.17274124564975, + "learning_rate": 1.3508176479253003e-05, + "loss": 0.6416, + "step": 7766 + }, + { + "epoch": 1.21, + "grad_norm": 12.323950186945373, + "learning_rate": 1.3506597211520638e-05, + "loss": 0.6761, + "step": 7767 + }, + { + "epoch": 1.21, + "grad_norm": 21.553193918228306, + "learning_rate": 1.3505017844062693e-05, + "loss": 0.6436, + "step": 7768 + }, + { + "epoch": 1.21, + "grad_norm": 17.341149612035274, + "learning_rate": 1.3503438376924096e-05, + "loss": 0.6985, + "step": 7769 + }, + { + "epoch": 1.21, + "grad_norm": 18.187834744797613, + "learning_rate": 1.350185881014976e-05, + "loss": 0.7258, + "step": 7770 + }, + { + "epoch": 1.21, + "grad_norm": 21.118010345781553, + "learning_rate": 1.3500279143784606e-05, + "loss": 0.5968, + "step": 7771 + }, + { + "epoch": 1.21, + "grad_norm": 19.766183982076797, + "learning_rate": 1.3498699377873563e-05, + "loss": 0.6071, + "step": 7772 + }, + { + "epoch": 1.21, + "grad_norm": 15.648475346017172, + "learning_rate": 1.3497119512461555e-05, + "loss": 0.5594, + "step": 7773 + }, + { + "epoch": 1.21, + "grad_norm": 23.256902129731618, + "learning_rate": 1.3495539547593512e-05, + "loss": 0.6319, + "step": 7774 + }, + { + "epoch": 1.21, + "grad_norm": 21.95187187555169, + "learning_rate": 1.3493959483314373e-05, + "loss": 0.73, + "step": 7775 + }, + { + "epoch": 1.21, + "grad_norm": 26.797598910771534, + "learning_rate": 1.349237931966907e-05, + "loss": 0.7152, + "step": 7776 + }, + { + "epoch": 1.21, + "grad_norm": 22.137483614216, + "learning_rate": 1.3490799056702542e-05, + "loss": 0.6849, + "step": 7777 + }, + { + "epoch": 1.21, + "grad_norm": 30.073841916592357, + "learning_rate": 1.3489218694459727e-05, + "loss": 0.6337, + "step": 7778 + }, + { + "epoch": 1.22, + "grad_norm": 36.22687657442029, + "learning_rate": 1.3487638232985575e-05, + "loss": 0.7377, + "step": 7779 + }, + { + "epoch": 1.22, + "grad_norm": 17.285290256993726, + "learning_rate": 1.3486057672325035e-05, + "loss": 0.7236, + "step": 7780 + }, + { + "epoch": 1.22, + "grad_norm": 39.5672733664932, + "learning_rate": 1.348447701252305e-05, + "loss": 0.8154, + "step": 7781 + }, + { + "epoch": 1.22, + "grad_norm": 23.84282254868855, + "learning_rate": 1.348289625362458e-05, + "loss": 0.7073, + "step": 7782 + }, + { + "epoch": 1.22, + "grad_norm": 24.170662522190558, + "learning_rate": 1.3481315395674577e-05, + "loss": 0.6956, + "step": 7783 + }, + { + "epoch": 1.22, + "grad_norm": 12.927685363625908, + "learning_rate": 1.3479734438718002e-05, + "loss": 0.581, + "step": 7784 + }, + { + "epoch": 1.22, + "grad_norm": 11.836569437461584, + "learning_rate": 1.3478153382799812e-05, + "loss": 0.6611, + "step": 7785 + }, + { + "epoch": 1.22, + "grad_norm": 24.520857340134633, + "learning_rate": 1.3476572227964976e-05, + "loss": 0.6539, + "step": 7786 + }, + { + "epoch": 1.22, + "grad_norm": 12.932599178936254, + "learning_rate": 1.347499097425846e-05, + "loss": 0.6455, + "step": 7787 + }, + { + "epoch": 1.22, + "grad_norm": 14.924515073712305, + "learning_rate": 1.3473409621725233e-05, + "loss": 0.6104, + "step": 7788 + }, + { + "epoch": 1.22, + "grad_norm": 22.843174330924167, + "learning_rate": 1.3471828170410268e-05, + "loss": 0.6356, + "step": 7789 + }, + { + "epoch": 1.22, + "grad_norm": 17.22999083816796, + "learning_rate": 1.3470246620358543e-05, + "loss": 0.645, + "step": 7790 + }, + { + "epoch": 1.22, + "grad_norm": 18.155008353193594, + "learning_rate": 1.3468664971615031e-05, + "loss": 0.6822, + "step": 7791 + }, + { + "epoch": 1.22, + "grad_norm": 23.88314308827424, + "learning_rate": 1.3467083224224719e-05, + "loss": 0.692, + "step": 7792 + }, + { + "epoch": 1.22, + "grad_norm": 15.887869052967515, + "learning_rate": 1.3465501378232586e-05, + "loss": 0.7156, + "step": 7793 + }, + { + "epoch": 1.22, + "grad_norm": 19.55371340810835, + "learning_rate": 1.3463919433683624e-05, + "loss": 0.7328, + "step": 7794 + }, + { + "epoch": 1.22, + "grad_norm": 14.815173644195019, + "learning_rate": 1.3462337390622814e-05, + "loss": 0.6379, + "step": 7795 + }, + { + "epoch": 1.22, + "grad_norm": 13.7295557383263, + "learning_rate": 1.3460755249095161e-05, + "loss": 0.6373, + "step": 7796 + }, + { + "epoch": 1.22, + "grad_norm": 35.688595746460194, + "learning_rate": 1.345917300914565e-05, + "loss": 0.7018, + "step": 7797 + }, + { + "epoch": 1.22, + "grad_norm": 18.029232767640135, + "learning_rate": 1.3457590670819283e-05, + "loss": 0.6712, + "step": 7798 + }, + { + "epoch": 1.22, + "grad_norm": 16.215186818242362, + "learning_rate": 1.3456008234161057e-05, + "loss": 0.6219, + "step": 7799 + }, + { + "epoch": 1.22, + "grad_norm": 28.221219862978312, + "learning_rate": 1.3454425699215982e-05, + "loss": 0.6038, + "step": 7800 + }, + { + "epoch": 1.22, + "grad_norm": 20.461458556760373, + "learning_rate": 1.3452843066029058e-05, + "loss": 0.5516, + "step": 7801 + }, + { + "epoch": 1.22, + "grad_norm": 12.928907133449648, + "learning_rate": 1.3451260334645299e-05, + "loss": 0.66, + "step": 7802 + }, + { + "epoch": 1.22, + "grad_norm": 16.87503209949902, + "learning_rate": 1.3449677505109716e-05, + "loss": 0.5877, + "step": 7803 + }, + { + "epoch": 1.22, + "grad_norm": 27.80302327163075, + "learning_rate": 1.3448094577467323e-05, + "loss": 0.6431, + "step": 7804 + }, + { + "epoch": 1.22, + "grad_norm": 39.054457456823506, + "learning_rate": 1.3446511551763133e-05, + "loss": 0.6847, + "step": 7805 + }, + { + "epoch": 1.22, + "grad_norm": 15.944321259074611, + "learning_rate": 1.3444928428042175e-05, + "loss": 0.724, + "step": 7806 + }, + { + "epoch": 1.22, + "grad_norm": 17.451371262771637, + "learning_rate": 1.3443345206349466e-05, + "loss": 0.6378, + "step": 7807 + }, + { + "epoch": 1.22, + "grad_norm": 18.279431429503372, + "learning_rate": 1.3441761886730033e-05, + "loss": 0.6591, + "step": 7808 + }, + { + "epoch": 1.22, + "grad_norm": 30.09793328981426, + "learning_rate": 1.3440178469228906e-05, + "loss": 0.7033, + "step": 7809 + }, + { + "epoch": 1.22, + "grad_norm": 25.74143649237283, + "learning_rate": 1.3438594953891113e-05, + "loss": 0.606, + "step": 7810 + }, + { + "epoch": 1.22, + "grad_norm": 23.527783235374834, + "learning_rate": 1.3437011340761697e-05, + "loss": 0.7218, + "step": 7811 + }, + { + "epoch": 1.22, + "grad_norm": 15.67283128367126, + "learning_rate": 1.3435427629885684e-05, + "loss": 0.6986, + "step": 7812 + }, + { + "epoch": 1.22, + "grad_norm": 19.904130784686412, + "learning_rate": 1.343384382130812e-05, + "loss": 0.7566, + "step": 7813 + }, + { + "epoch": 1.22, + "grad_norm": 21.393523155060652, + "learning_rate": 1.3432259915074048e-05, + "loss": 0.6636, + "step": 7814 + }, + { + "epoch": 1.22, + "grad_norm": 13.681988834630545, + "learning_rate": 1.3430675911228512e-05, + "loss": 0.5694, + "step": 7815 + }, + { + "epoch": 1.22, + "grad_norm": 17.390931557749273, + "learning_rate": 1.342909180981656e-05, + "loss": 0.6919, + "step": 7816 + }, + { + "epoch": 1.22, + "grad_norm": 17.62703428212765, + "learning_rate": 1.3427507610883242e-05, + "loss": 0.5966, + "step": 7817 + }, + { + "epoch": 1.22, + "grad_norm": 14.082857439457646, + "learning_rate": 1.342592331447361e-05, + "loss": 0.5937, + "step": 7818 + }, + { + "epoch": 1.22, + "grad_norm": 17.16480701483057, + "learning_rate": 1.342433892063273e-05, + "loss": 0.6725, + "step": 7819 + }, + { + "epoch": 1.22, + "grad_norm": 15.29070869203043, + "learning_rate": 1.3422754429405647e-05, + "loss": 0.6131, + "step": 7820 + }, + { + "epoch": 1.22, + "grad_norm": 16.209781259413884, + "learning_rate": 1.3421169840837433e-05, + "loss": 0.6267, + "step": 7821 + }, + { + "epoch": 1.22, + "grad_norm": 13.400291058307083, + "learning_rate": 1.341958515497315e-05, + "loss": 0.6059, + "step": 7822 + }, + { + "epoch": 1.22, + "grad_norm": 20.10039043456538, + "learning_rate": 1.3418000371857866e-05, + "loss": 0.7005, + "step": 7823 + }, + { + "epoch": 1.22, + "grad_norm": 40.55081024845028, + "learning_rate": 1.341641549153665e-05, + "loss": 0.7274, + "step": 7824 + }, + { + "epoch": 1.22, + "grad_norm": 18.49873915770387, + "learning_rate": 1.3414830514054578e-05, + "loss": 0.6495, + "step": 7825 + }, + { + "epoch": 1.22, + "grad_norm": 37.50741681027481, + "learning_rate": 1.3413245439456721e-05, + "loss": 0.5725, + "step": 7826 + }, + { + "epoch": 1.22, + "grad_norm": 17.86678812325382, + "learning_rate": 1.3411660267788163e-05, + "loss": 0.6922, + "step": 7827 + }, + { + "epoch": 1.22, + "grad_norm": 12.908302403716277, + "learning_rate": 1.341007499909398e-05, + "loss": 0.5855, + "step": 7828 + }, + { + "epoch": 1.22, + "grad_norm": 16.360341223899788, + "learning_rate": 1.3408489633419261e-05, + "loss": 0.7458, + "step": 7829 + }, + { + "epoch": 1.22, + "grad_norm": 19.839485972258483, + "learning_rate": 1.340690417080909e-05, + "loss": 0.6703, + "step": 7830 + }, + { + "epoch": 1.22, + "grad_norm": 11.7909383248384, + "learning_rate": 1.3405318611308558e-05, + "loss": 0.5847, + "step": 7831 + }, + { + "epoch": 1.22, + "grad_norm": 14.451240739427591, + "learning_rate": 1.3403732954962755e-05, + "loss": 0.6302, + "step": 7832 + }, + { + "epoch": 1.22, + "grad_norm": 12.27548092829371, + "learning_rate": 1.3402147201816779e-05, + "loss": 0.6838, + "step": 7833 + }, + { + "epoch": 1.22, + "grad_norm": 12.857950330962739, + "learning_rate": 1.3400561351915728e-05, + "loss": 0.6174, + "step": 7834 + }, + { + "epoch": 1.22, + "grad_norm": 20.737197755700027, + "learning_rate": 1.33989754053047e-05, + "loss": 0.6696, + "step": 7835 + }, + { + "epoch": 1.22, + "grad_norm": 16.101315306506343, + "learning_rate": 1.3397389362028802e-05, + "loss": 0.573, + "step": 7836 + }, + { + "epoch": 1.22, + "grad_norm": 20.274495057910308, + "learning_rate": 1.3395803222133133e-05, + "loss": 0.741, + "step": 7837 + }, + { + "epoch": 1.22, + "grad_norm": 14.494150524395614, + "learning_rate": 1.3394216985662813e-05, + "loss": 0.7125, + "step": 7838 + }, + { + "epoch": 1.22, + "grad_norm": 20.088645319283465, + "learning_rate": 1.3392630652662943e-05, + "loss": 0.6642, + "step": 7839 + }, + { + "epoch": 1.22, + "grad_norm": 19.428706659746524, + "learning_rate": 1.3391044223178641e-05, + "loss": 0.6716, + "step": 7840 + }, + { + "epoch": 1.22, + "grad_norm": 16.56534645434312, + "learning_rate": 1.338945769725503e-05, + "loss": 0.5711, + "step": 7841 + }, + { + "epoch": 1.22, + "grad_norm": 16.620159165927667, + "learning_rate": 1.3387871074937222e-05, + "loss": 0.7104, + "step": 7842 + }, + { + "epoch": 1.23, + "grad_norm": 18.010273246035972, + "learning_rate": 1.3386284356270342e-05, + "loss": 0.6918, + "step": 7843 + }, + { + "epoch": 1.23, + "grad_norm": 24.633946700862996, + "learning_rate": 1.3384697541299518e-05, + "loss": 0.6386, + "step": 7844 + }, + { + "epoch": 1.23, + "grad_norm": 18.301930476555217, + "learning_rate": 1.3383110630069874e-05, + "loss": 0.6633, + "step": 7845 + }, + { + "epoch": 1.23, + "grad_norm": 18.57836072690391, + "learning_rate": 1.3381523622626544e-05, + "loss": 0.7097, + "step": 7846 + }, + { + "epoch": 1.23, + "grad_norm": 19.97680065682913, + "learning_rate": 1.3379936519014658e-05, + "loss": 0.6742, + "step": 7847 + }, + { + "epoch": 1.23, + "grad_norm": 21.171551114944574, + "learning_rate": 1.3378349319279358e-05, + "loss": 0.6585, + "step": 7848 + }, + { + "epoch": 1.23, + "grad_norm": 25.342515185104073, + "learning_rate": 1.3376762023465775e-05, + "loss": 0.7317, + "step": 7849 + }, + { + "epoch": 1.23, + "grad_norm": 17.011294051955705, + "learning_rate": 1.337517463161906e-05, + "loss": 0.6369, + "step": 7850 + }, + { + "epoch": 1.23, + "grad_norm": 13.294206516236073, + "learning_rate": 1.337358714378435e-05, + "loss": 0.6676, + "step": 7851 + }, + { + "epoch": 1.23, + "grad_norm": 16.85916209683056, + "learning_rate": 1.3371999560006796e-05, + "loss": 0.6662, + "step": 7852 + }, + { + "epoch": 1.23, + "grad_norm": 15.731471469049188, + "learning_rate": 1.3370411880331549e-05, + "loss": 0.6359, + "step": 7853 + }, + { + "epoch": 1.23, + "grad_norm": 11.577569937946773, + "learning_rate": 1.3368824104803755e-05, + "loss": 0.5964, + "step": 7854 + }, + { + "epoch": 1.23, + "grad_norm": 16.292603830930343, + "learning_rate": 1.3367236233468579e-05, + "loss": 0.6249, + "step": 7855 + }, + { + "epoch": 1.23, + "grad_norm": 12.053976986132536, + "learning_rate": 1.3365648266371171e-05, + "loss": 0.6138, + "step": 7856 + }, + { + "epoch": 1.23, + "grad_norm": 20.281809365988988, + "learning_rate": 1.3364060203556697e-05, + "loss": 0.6376, + "step": 7857 + }, + { + "epoch": 1.23, + "grad_norm": 12.587373227775677, + "learning_rate": 1.3362472045070318e-05, + "loss": 0.607, + "step": 7858 + }, + { + "epoch": 1.23, + "grad_norm": 13.037851861391571, + "learning_rate": 1.3360883790957202e-05, + "loss": 0.6241, + "step": 7859 + }, + { + "epoch": 1.23, + "grad_norm": 24.69572943528869, + "learning_rate": 1.3359295441262516e-05, + "loss": 0.677, + "step": 7860 + }, + { + "epoch": 1.23, + "grad_norm": 25.246291058144273, + "learning_rate": 1.3357706996031434e-05, + "loss": 0.7144, + "step": 7861 + }, + { + "epoch": 1.23, + "grad_norm": 26.800505409439165, + "learning_rate": 1.3356118455309127e-05, + "loss": 0.6671, + "step": 7862 + }, + { + "epoch": 1.23, + "grad_norm": 21.097743590918743, + "learning_rate": 1.3354529819140777e-05, + "loss": 0.6569, + "step": 7863 + }, + { + "epoch": 1.23, + "grad_norm": 21.808594838449327, + "learning_rate": 1.3352941087571558e-05, + "loss": 0.6306, + "step": 7864 + }, + { + "epoch": 1.23, + "grad_norm": 19.444113129241654, + "learning_rate": 1.3351352260646662e-05, + "loss": 0.6454, + "step": 7865 + }, + { + "epoch": 1.23, + "grad_norm": 14.65124130678045, + "learning_rate": 1.3349763338411261e-05, + "loss": 0.6305, + "step": 7866 + }, + { + "epoch": 1.23, + "grad_norm": 17.59297812853889, + "learning_rate": 1.3348174320910554e-05, + "loss": 0.5736, + "step": 7867 + }, + { + "epoch": 1.23, + "grad_norm": 3.205577460427556, + "learning_rate": 1.3346585208189727e-05, + "loss": 0.5189, + "step": 7868 + }, + { + "epoch": 1.23, + "grad_norm": 16.536255799702765, + "learning_rate": 1.3344996000293976e-05, + "loss": 0.6064, + "step": 7869 + }, + { + "epoch": 1.23, + "grad_norm": 31.37655068788581, + "learning_rate": 1.3343406697268493e-05, + "loss": 0.7551, + "step": 7870 + }, + { + "epoch": 1.23, + "grad_norm": 22.403108360591908, + "learning_rate": 1.334181729915848e-05, + "loss": 0.6887, + "step": 7871 + }, + { + "epoch": 1.23, + "grad_norm": 17.3271059961191, + "learning_rate": 1.3340227806009138e-05, + "loss": 0.6999, + "step": 7872 + }, + { + "epoch": 1.23, + "grad_norm": 10.470230556526593, + "learning_rate": 1.3338638217865673e-05, + "loss": 0.6047, + "step": 7873 + }, + { + "epoch": 1.23, + "grad_norm": 21.523624682434654, + "learning_rate": 1.333704853477329e-05, + "loss": 0.6704, + "step": 7874 + }, + { + "epoch": 1.23, + "grad_norm": 18.07322435052991, + "learning_rate": 1.3335458756777195e-05, + "loss": 0.6204, + "step": 7875 + }, + { + "epoch": 1.23, + "grad_norm": 19.890217170444934, + "learning_rate": 1.3333868883922606e-05, + "loss": 0.6307, + "step": 7876 + }, + { + "epoch": 1.23, + "grad_norm": 23.18885976989232, + "learning_rate": 1.3332278916254739e-05, + "loss": 0.6319, + "step": 7877 + }, + { + "epoch": 1.23, + "grad_norm": 22.45499080585571, + "learning_rate": 1.3330688853818808e-05, + "loss": 0.6339, + "step": 7878 + }, + { + "epoch": 1.23, + "grad_norm": 18.178983751781235, + "learning_rate": 1.332909869666003e-05, + "loss": 0.5528, + "step": 7879 + }, + { + "epoch": 1.23, + "grad_norm": 14.828388373565346, + "learning_rate": 1.3327508444823637e-05, + "loss": 0.6013, + "step": 7880 + }, + { + "epoch": 1.23, + "grad_norm": 35.98520209943347, + "learning_rate": 1.3325918098354848e-05, + "loss": 0.6541, + "step": 7881 + }, + { + "epoch": 1.23, + "grad_norm": 12.243383701530203, + "learning_rate": 1.33243276572989e-05, + "loss": 0.6377, + "step": 7882 + }, + { + "epoch": 1.23, + "grad_norm": 19.21215025543746, + "learning_rate": 1.3322737121701011e-05, + "loss": 0.6345, + "step": 7883 + }, + { + "epoch": 1.23, + "grad_norm": 16.014958809743952, + "learning_rate": 1.3321146491606423e-05, + "loss": 0.6191, + "step": 7884 + }, + { + "epoch": 1.23, + "grad_norm": 20.491245505540082, + "learning_rate": 1.3319555767060373e-05, + "loss": 0.6163, + "step": 7885 + }, + { + "epoch": 1.23, + "grad_norm": 14.035706632181144, + "learning_rate": 1.33179649481081e-05, + "loss": 0.6931, + "step": 7886 + }, + { + "epoch": 1.23, + "grad_norm": 16.461097830565055, + "learning_rate": 1.3316374034794844e-05, + "loss": 0.7712, + "step": 7887 + }, + { + "epoch": 1.23, + "grad_norm": 18.399169550948923, + "learning_rate": 1.331478302716585e-05, + "loss": 0.6629, + "step": 7888 + }, + { + "epoch": 1.23, + "grad_norm": 20.88543885916358, + "learning_rate": 1.3313191925266362e-05, + "loss": 0.721, + "step": 7889 + }, + { + "epoch": 1.23, + "grad_norm": 17.65867786762948, + "learning_rate": 1.3311600729141641e-05, + "loss": 0.6391, + "step": 7890 + }, + { + "epoch": 1.23, + "grad_norm": 12.215032964594407, + "learning_rate": 1.3310009438836925e-05, + "loss": 0.6456, + "step": 7891 + }, + { + "epoch": 1.23, + "grad_norm": 13.163176239584654, + "learning_rate": 1.3308418054397485e-05, + "loss": 0.5854, + "step": 7892 + }, + { + "epoch": 1.23, + "grad_norm": 22.397403010577648, + "learning_rate": 1.3306826575868565e-05, + "loss": 0.6801, + "step": 7893 + }, + { + "epoch": 1.23, + "grad_norm": 17.50753423426254, + "learning_rate": 1.3305235003295434e-05, + "loss": 0.6, + "step": 7894 + }, + { + "epoch": 1.23, + "grad_norm": 11.560138784772928, + "learning_rate": 1.330364333672335e-05, + "loss": 0.5809, + "step": 7895 + }, + { + "epoch": 1.23, + "grad_norm": 15.55651368342618, + "learning_rate": 1.3302051576197586e-05, + "loss": 0.6925, + "step": 7896 + }, + { + "epoch": 1.23, + "grad_norm": 14.831075461221058, + "learning_rate": 1.3300459721763405e-05, + "loss": 0.6789, + "step": 7897 + }, + { + "epoch": 1.23, + "grad_norm": 20.710369055180365, + "learning_rate": 1.329886777346608e-05, + "loss": 0.6549, + "step": 7898 + }, + { + "epoch": 1.23, + "grad_norm": 21.264327182730028, + "learning_rate": 1.3297275731350885e-05, + "loss": 0.6392, + "step": 7899 + }, + { + "epoch": 1.23, + "grad_norm": 14.441937706898234, + "learning_rate": 1.3295683595463099e-05, + "loss": 0.6144, + "step": 7900 + }, + { + "epoch": 1.23, + "grad_norm": 19.786126687444444, + "learning_rate": 1.3294091365847998e-05, + "loss": 0.6627, + "step": 7901 + }, + { + "epoch": 1.23, + "grad_norm": 24.377213004717987, + "learning_rate": 1.3292499042550869e-05, + "loss": 0.7348, + "step": 7902 + }, + { + "epoch": 1.23, + "grad_norm": 23.047238160521076, + "learning_rate": 1.3290906625616988e-05, + "loss": 0.6707, + "step": 7903 + }, + { + "epoch": 1.23, + "grad_norm": 24.753436743419407, + "learning_rate": 1.3289314115091649e-05, + "loss": 0.6353, + "step": 7904 + }, + { + "epoch": 1.23, + "grad_norm": 18.532643287509494, + "learning_rate": 1.3287721511020145e-05, + "loss": 0.6173, + "step": 7905 + }, + { + "epoch": 1.23, + "grad_norm": 18.753650588851634, + "learning_rate": 1.3286128813447759e-05, + "loss": 0.6767, + "step": 7906 + }, + { + "epoch": 1.24, + "grad_norm": 27.803578055323058, + "learning_rate": 1.3284536022419796e-05, + "loss": 0.7903, + "step": 7907 + }, + { + "epoch": 1.24, + "grad_norm": 14.273160275093604, + "learning_rate": 1.3282943137981546e-05, + "loss": 0.6562, + "step": 7908 + }, + { + "epoch": 1.24, + "grad_norm": 18.305561522661897, + "learning_rate": 1.3281350160178317e-05, + "loss": 0.6933, + "step": 7909 + }, + { + "epoch": 1.24, + "grad_norm": 15.470618459293842, + "learning_rate": 1.3279757089055404e-05, + "loss": 0.6052, + "step": 7910 + }, + { + "epoch": 1.24, + "grad_norm": 20.91177093019644, + "learning_rate": 1.3278163924658123e-05, + "loss": 0.622, + "step": 7911 + }, + { + "epoch": 1.24, + "grad_norm": 18.094124083530875, + "learning_rate": 1.3276570667031777e-05, + "loss": 0.6626, + "step": 7912 + }, + { + "epoch": 1.24, + "grad_norm": 13.997377877039073, + "learning_rate": 1.3274977316221678e-05, + "loss": 0.6484, + "step": 7913 + }, + { + "epoch": 1.24, + "grad_norm": 34.02435372977003, + "learning_rate": 1.3273383872273137e-05, + "loss": 0.7073, + "step": 7914 + }, + { + "epoch": 1.24, + "grad_norm": 12.765552197205572, + "learning_rate": 1.3271790335231476e-05, + "loss": 0.6007, + "step": 7915 + }, + { + "epoch": 1.24, + "grad_norm": 10.841112129305039, + "learning_rate": 1.327019670514201e-05, + "loss": 0.6273, + "step": 7916 + }, + { + "epoch": 1.24, + "grad_norm": 16.69000467959322, + "learning_rate": 1.3268602982050064e-05, + "loss": 0.611, + "step": 7917 + }, + { + "epoch": 1.24, + "grad_norm": 19.28029239987682, + "learning_rate": 1.3267009166000961e-05, + "loss": 0.7759, + "step": 7918 + }, + { + "epoch": 1.24, + "grad_norm": 20.22753663293937, + "learning_rate": 1.3265415257040031e-05, + "loss": 0.7013, + "step": 7919 + }, + { + "epoch": 1.24, + "grad_norm": 13.891110487103646, + "learning_rate": 1.3263821255212598e-05, + "loss": 0.5992, + "step": 7920 + }, + { + "epoch": 1.24, + "grad_norm": 12.740556329509829, + "learning_rate": 1.3262227160564001e-05, + "loss": 0.588, + "step": 7921 + }, + { + "epoch": 1.24, + "grad_norm": 15.409217790425616, + "learning_rate": 1.3260632973139569e-05, + "loss": 0.5627, + "step": 7922 + }, + { + "epoch": 1.24, + "grad_norm": 32.057686791152825, + "learning_rate": 1.3259038692984643e-05, + "loss": 0.7109, + "step": 7923 + }, + { + "epoch": 1.24, + "grad_norm": 23.50835141204126, + "learning_rate": 1.3257444320144566e-05, + "loss": 0.6567, + "step": 7924 + }, + { + "epoch": 1.24, + "grad_norm": 18.554456563616693, + "learning_rate": 1.3255849854664674e-05, + "loss": 0.6055, + "step": 7925 + }, + { + "epoch": 1.24, + "grad_norm": 20.78730080790689, + "learning_rate": 1.325425529659032e-05, + "loss": 0.6669, + "step": 7926 + }, + { + "epoch": 1.24, + "grad_norm": 26.201441033424327, + "learning_rate": 1.3252660645966846e-05, + "loss": 0.6913, + "step": 7927 + }, + { + "epoch": 1.24, + "grad_norm": 15.291437704780622, + "learning_rate": 1.3251065902839608e-05, + "loss": 0.5889, + "step": 7928 + }, + { + "epoch": 1.24, + "grad_norm": 14.25152501407792, + "learning_rate": 1.3249471067253957e-05, + "loss": 0.6122, + "step": 7929 + }, + { + "epoch": 1.24, + "grad_norm": 18.182677067842906, + "learning_rate": 1.3247876139255249e-05, + "loss": 0.666, + "step": 7930 + }, + { + "epoch": 1.24, + "grad_norm": 28.480784499600276, + "learning_rate": 1.3246281118888843e-05, + "loss": 0.6199, + "step": 7931 + }, + { + "epoch": 1.24, + "grad_norm": 18.85401371600529, + "learning_rate": 1.3244686006200105e-05, + "loss": 0.7349, + "step": 7932 + }, + { + "epoch": 1.24, + "grad_norm": 13.55603224050496, + "learning_rate": 1.324309080123439e-05, + "loss": 0.6342, + "step": 7933 + }, + { + "epoch": 1.24, + "grad_norm": 14.490307000093008, + "learning_rate": 1.3241495504037074e-05, + "loss": 0.6602, + "step": 7934 + }, + { + "epoch": 1.24, + "grad_norm": 19.061803130403938, + "learning_rate": 1.323990011465352e-05, + "loss": 0.6368, + "step": 7935 + }, + { + "epoch": 1.24, + "grad_norm": 21.654416692014685, + "learning_rate": 1.3238304633129102e-05, + "loss": 0.6791, + "step": 7936 + }, + { + "epoch": 1.24, + "grad_norm": 15.013629017532137, + "learning_rate": 1.3236709059509194e-05, + "loss": 0.6531, + "step": 7937 + }, + { + "epoch": 1.24, + "grad_norm": 17.749109220651647, + "learning_rate": 1.3235113393839174e-05, + "loss": 0.6401, + "step": 7938 + }, + { + "epoch": 1.24, + "grad_norm": 13.597698698166603, + "learning_rate": 1.3233517636164423e-05, + "loss": 0.5637, + "step": 7939 + }, + { + "epoch": 1.24, + "grad_norm": 19.845483500689486, + "learning_rate": 1.3231921786530323e-05, + "loss": 0.6072, + "step": 7940 + }, + { + "epoch": 1.24, + "grad_norm": 23.05340353228807, + "learning_rate": 1.3230325844982254e-05, + "loss": 0.6468, + "step": 7941 + }, + { + "epoch": 1.24, + "grad_norm": 20.41042156977892, + "learning_rate": 1.3228729811565608e-05, + "loss": 0.6402, + "step": 7942 + }, + { + "epoch": 1.24, + "grad_norm": 19.345713386359616, + "learning_rate": 1.3227133686325777e-05, + "loss": 0.6406, + "step": 7943 + }, + { + "epoch": 1.24, + "grad_norm": 17.9661174778001, + "learning_rate": 1.3225537469308152e-05, + "loss": 0.8235, + "step": 7944 + }, + { + "epoch": 1.24, + "grad_norm": 21.523520140653662, + "learning_rate": 1.3223941160558127e-05, + "loss": 0.5726, + "step": 7945 + }, + { + "epoch": 1.24, + "grad_norm": 18.432950747673917, + "learning_rate": 1.3222344760121105e-05, + "loss": 0.6464, + "step": 7946 + }, + { + "epoch": 1.24, + "grad_norm": 22.092475980503007, + "learning_rate": 1.3220748268042478e-05, + "loss": 0.7446, + "step": 7947 + }, + { + "epoch": 1.24, + "grad_norm": 21.004634284806414, + "learning_rate": 1.3219151684367656e-05, + "loss": 0.6777, + "step": 7948 + }, + { + "epoch": 1.24, + "grad_norm": 14.929152988664814, + "learning_rate": 1.3217555009142044e-05, + "loss": 0.602, + "step": 7949 + }, + { + "epoch": 1.24, + "grad_norm": 10.8817185716788, + "learning_rate": 1.3215958242411048e-05, + "loss": 0.7217, + "step": 7950 + }, + { + "epoch": 1.24, + "grad_norm": 15.319623139418203, + "learning_rate": 1.3214361384220084e-05, + "loss": 0.6298, + "step": 7951 + }, + { + "epoch": 1.24, + "grad_norm": 21.567336582154155, + "learning_rate": 1.321276443461456e-05, + "loss": 0.7221, + "step": 7952 + }, + { + "epoch": 1.24, + "grad_norm": 29.23238888410719, + "learning_rate": 1.3211167393639898e-05, + "loss": 0.6523, + "step": 7953 + }, + { + "epoch": 1.24, + "grad_norm": 22.35537994463111, + "learning_rate": 1.320957026134151e-05, + "loss": 0.6673, + "step": 7954 + }, + { + "epoch": 1.24, + "grad_norm": 19.716767447961214, + "learning_rate": 1.3207973037764822e-05, + "loss": 0.6886, + "step": 7955 + }, + { + "epoch": 1.24, + "grad_norm": 13.993860901341591, + "learning_rate": 1.3206375722955261e-05, + "loss": 0.6252, + "step": 7956 + }, + { + "epoch": 1.24, + "grad_norm": 34.21098975756675, + "learning_rate": 1.3204778316958247e-05, + "loss": 0.6027, + "step": 7957 + }, + { + "epoch": 1.24, + "grad_norm": 11.75832085191696, + "learning_rate": 1.3203180819819213e-05, + "loss": 0.6426, + "step": 7958 + }, + { + "epoch": 1.24, + "grad_norm": 14.815084283267451, + "learning_rate": 1.320158323158359e-05, + "loss": 0.6695, + "step": 7959 + }, + { + "epoch": 1.24, + "grad_norm": 11.100789063992094, + "learning_rate": 1.319998555229681e-05, + "loss": 0.603, + "step": 7960 + }, + { + "epoch": 1.24, + "grad_norm": 25.813543146220944, + "learning_rate": 1.3198387782004315e-05, + "loss": 0.7146, + "step": 7961 + }, + { + "epoch": 1.24, + "grad_norm": 13.048277472308529, + "learning_rate": 1.3196789920751546e-05, + "loss": 0.6135, + "step": 7962 + }, + { + "epoch": 1.24, + "grad_norm": 25.871516541486958, + "learning_rate": 1.3195191968583938e-05, + "loss": 0.689, + "step": 7963 + }, + { + "epoch": 1.24, + "grad_norm": 20.541956828209123, + "learning_rate": 1.3193593925546941e-05, + "loss": 0.5964, + "step": 7964 + }, + { + "epoch": 1.24, + "grad_norm": 24.38098700551929, + "learning_rate": 1.3191995791686002e-05, + "loss": 0.6473, + "step": 7965 + }, + { + "epoch": 1.24, + "grad_norm": 18.753004758398816, + "learning_rate": 1.3190397567046569e-05, + "loss": 0.6561, + "step": 7966 + }, + { + "epoch": 1.24, + "grad_norm": 19.73712064953473, + "learning_rate": 1.3188799251674092e-05, + "loss": 0.6794, + "step": 7967 + }, + { + "epoch": 1.24, + "grad_norm": 16.066982157870687, + "learning_rate": 1.3187200845614035e-05, + "loss": 0.6959, + "step": 7968 + }, + { + "epoch": 1.24, + "grad_norm": 19.87475163601495, + "learning_rate": 1.3185602348911846e-05, + "loss": 0.5211, + "step": 7969 + }, + { + "epoch": 1.24, + "grad_norm": 15.02567790223042, + "learning_rate": 1.318400376161299e-05, + "loss": 0.5565, + "step": 7970 + }, + { + "epoch": 1.25, + "grad_norm": 20.72316915540688, + "learning_rate": 1.3182405083762935e-05, + "loss": 0.6656, + "step": 7971 + }, + { + "epoch": 1.25, + "grad_norm": 22.445719403217705, + "learning_rate": 1.3180806315407137e-05, + "loss": 0.6301, + "step": 7972 + }, + { + "epoch": 1.25, + "grad_norm": 13.544344864032784, + "learning_rate": 1.3179207456591067e-05, + "loss": 0.6285, + "step": 7973 + }, + { + "epoch": 1.25, + "grad_norm": 19.56743709737352, + "learning_rate": 1.31776085073602e-05, + "loss": 0.6398, + "step": 7974 + }, + { + "epoch": 1.25, + "grad_norm": 20.49886792134699, + "learning_rate": 1.3176009467760005e-05, + "loss": 0.6281, + "step": 7975 + }, + { + "epoch": 1.25, + "grad_norm": 18.215587319370943, + "learning_rate": 1.3174410337835956e-05, + "loss": 0.6139, + "step": 7976 + }, + { + "epoch": 1.25, + "grad_norm": 16.98417351690066, + "learning_rate": 1.3172811117633536e-05, + "loss": 0.622, + "step": 7977 + }, + { + "epoch": 1.25, + "grad_norm": 18.407426807415938, + "learning_rate": 1.3171211807198225e-05, + "loss": 0.6029, + "step": 7978 + }, + { + "epoch": 1.25, + "grad_norm": 14.876736337735665, + "learning_rate": 1.3169612406575501e-05, + "loss": 0.5869, + "step": 7979 + }, + { + "epoch": 1.25, + "grad_norm": 18.51903827206621, + "learning_rate": 1.316801291581086e-05, + "loss": 0.6657, + "step": 7980 + }, + { + "epoch": 1.25, + "grad_norm": 15.993202123709066, + "learning_rate": 1.316641333494978e-05, + "loss": 0.622, + "step": 7981 + }, + { + "epoch": 1.25, + "grad_norm": 16.914745675136928, + "learning_rate": 1.316481366403776e-05, + "loss": 0.6857, + "step": 7982 + }, + { + "epoch": 1.25, + "grad_norm": 14.067066034141959, + "learning_rate": 1.316321390312029e-05, + "loss": 0.5794, + "step": 7983 + }, + { + "epoch": 1.25, + "grad_norm": 21.279088552644822, + "learning_rate": 1.3161614052242869e-05, + "loss": 0.644, + "step": 7984 + }, + { + "epoch": 1.25, + "grad_norm": 22.278151384964875, + "learning_rate": 1.3160014111450989e-05, + "loss": 0.6082, + "step": 7985 + }, + { + "epoch": 1.25, + "grad_norm": 24.751250024738408, + "learning_rate": 1.3158414080790158e-05, + "loss": 0.6466, + "step": 7986 + }, + { + "epoch": 1.25, + "grad_norm": 25.5915203357572, + "learning_rate": 1.315681396030588e-05, + "loss": 0.6963, + "step": 7987 + }, + { + "epoch": 1.25, + "grad_norm": 14.11976135716292, + "learning_rate": 1.3155213750043658e-05, + "loss": 0.6684, + "step": 7988 + }, + { + "epoch": 1.25, + "grad_norm": 20.56733587035152, + "learning_rate": 1.3153613450049006e-05, + "loss": 0.6152, + "step": 7989 + }, + { + "epoch": 1.25, + "grad_norm": 16.830153845485786, + "learning_rate": 1.3152013060367428e-05, + "loss": 0.8028, + "step": 7990 + }, + { + "epoch": 1.25, + "grad_norm": 20.26611231509897, + "learning_rate": 1.3150412581044446e-05, + "loss": 0.672, + "step": 7991 + }, + { + "epoch": 1.25, + "grad_norm": 14.139062711391306, + "learning_rate": 1.314881201212557e-05, + "loss": 0.6382, + "step": 7992 + }, + { + "epoch": 1.25, + "grad_norm": 12.39103672851601, + "learning_rate": 1.3147211353656327e-05, + "loss": 0.5999, + "step": 7993 + }, + { + "epoch": 1.25, + "grad_norm": 21.83432799329184, + "learning_rate": 1.314561060568223e-05, + "loss": 0.6059, + "step": 7994 + }, + { + "epoch": 1.25, + "grad_norm": 16.23246007104005, + "learning_rate": 1.3144009768248808e-05, + "loss": 0.655, + "step": 7995 + }, + { + "epoch": 1.25, + "grad_norm": 18.06712140910257, + "learning_rate": 1.3142408841401586e-05, + "loss": 0.6001, + "step": 7996 + }, + { + "epoch": 1.25, + "grad_norm": 15.775752023871869, + "learning_rate": 1.3140807825186098e-05, + "loss": 0.5959, + "step": 7997 + }, + { + "epoch": 1.25, + "grad_norm": 25.43728491038459, + "learning_rate": 1.313920671964787e-05, + "loss": 0.6157, + "step": 7998 + }, + { + "epoch": 1.25, + "grad_norm": 18.1862003240563, + "learning_rate": 1.3137605524832442e-05, + "loss": 0.7027, + "step": 7999 + }, + { + "epoch": 1.25, + "grad_norm": 17.532901553101294, + "learning_rate": 1.3136004240785348e-05, + "loss": 0.6413, + "step": 8000 + }, + { + "epoch": 1.25, + "grad_norm": 14.918692224404275, + "learning_rate": 1.3134402867552125e-05, + "loss": 0.6516, + "step": 8001 + }, + { + "epoch": 1.25, + "grad_norm": 26.022916189083826, + "learning_rate": 1.3132801405178322e-05, + "loss": 0.7436, + "step": 8002 + }, + { + "epoch": 1.25, + "grad_norm": 28.172128322898967, + "learning_rate": 1.3131199853709477e-05, + "loss": 0.6289, + "step": 8003 + }, + { + "epoch": 1.25, + "grad_norm": 33.08401007639743, + "learning_rate": 1.3129598213191139e-05, + "loss": 0.6346, + "step": 8004 + }, + { + "epoch": 1.25, + "grad_norm": 22.320825942732977, + "learning_rate": 1.3127996483668862e-05, + "loss": 0.6322, + "step": 8005 + }, + { + "epoch": 1.25, + "grad_norm": 13.723695398040176, + "learning_rate": 1.3126394665188191e-05, + "loss": 0.5866, + "step": 8006 + }, + { + "epoch": 1.25, + "grad_norm": 19.3880531225369, + "learning_rate": 1.3124792757794687e-05, + "loss": 0.6105, + "step": 8007 + }, + { + "epoch": 1.25, + "grad_norm": 18.773889711396897, + "learning_rate": 1.3123190761533903e-05, + "loss": 0.6621, + "step": 8008 + }, + { + "epoch": 1.25, + "grad_norm": 19.007287362743273, + "learning_rate": 1.3121588676451405e-05, + "loss": 0.6335, + "step": 8009 + }, + { + "epoch": 1.25, + "grad_norm": 22.835750770914707, + "learning_rate": 1.3119986502592747e-05, + "loss": 0.8118, + "step": 8010 + }, + { + "epoch": 1.25, + "grad_norm": 16.21119332996405, + "learning_rate": 1.3118384240003498e-05, + "loss": 0.6193, + "step": 8011 + }, + { + "epoch": 1.25, + "grad_norm": 16.051947208073248, + "learning_rate": 1.311678188872923e-05, + "loss": 0.6799, + "step": 8012 + }, + { + "epoch": 1.25, + "grad_norm": 22.9895501395088, + "learning_rate": 1.3115179448815503e-05, + "loss": 0.7574, + "step": 8013 + }, + { + "epoch": 1.25, + "grad_norm": 15.752326133175949, + "learning_rate": 1.3113576920307896e-05, + "loss": 0.6685, + "step": 8014 + }, + { + "epoch": 1.25, + "grad_norm": 29.550321243598614, + "learning_rate": 1.3111974303251984e-05, + "loss": 0.6871, + "step": 8015 + }, + { + "epoch": 1.25, + "grad_norm": 17.699380170060376, + "learning_rate": 1.3110371597693344e-05, + "loss": 0.5708, + "step": 8016 + }, + { + "epoch": 1.25, + "grad_norm": 21.613952519430622, + "learning_rate": 1.3108768803677554e-05, + "loss": 0.6425, + "step": 8017 + }, + { + "epoch": 1.25, + "grad_norm": 17.555677728944183, + "learning_rate": 1.3107165921250197e-05, + "loss": 0.7518, + "step": 8018 + }, + { + "epoch": 1.25, + "grad_norm": 22.73028546974145, + "learning_rate": 1.3105562950456857e-05, + "loss": 0.6634, + "step": 8019 + }, + { + "epoch": 1.25, + "grad_norm": 16.786130834689537, + "learning_rate": 1.310395989134313e-05, + "loss": 0.702, + "step": 8020 + }, + { + "epoch": 1.25, + "grad_norm": 20.777253248459047, + "learning_rate": 1.3102356743954594e-05, + "loss": 0.6694, + "step": 8021 + }, + { + "epoch": 1.25, + "grad_norm": 23.333587093903443, + "learning_rate": 1.3100753508336852e-05, + "loss": 0.6967, + "step": 8022 + }, + { + "epoch": 1.25, + "grad_norm": 22.969389769343948, + "learning_rate": 1.3099150184535487e-05, + "loss": 0.6445, + "step": 8023 + }, + { + "epoch": 1.25, + "grad_norm": 23.006206591118794, + "learning_rate": 1.309754677259611e-05, + "loss": 0.6807, + "step": 8024 + }, + { + "epoch": 1.25, + "grad_norm": 20.049078108190155, + "learning_rate": 1.3095943272564312e-05, + "loss": 0.618, + "step": 8025 + }, + { + "epoch": 1.25, + "grad_norm": 18.950898794490673, + "learning_rate": 1.3094339684485698e-05, + "loss": 0.5541, + "step": 8026 + }, + { + "epoch": 1.25, + "grad_norm": 13.673019773239924, + "learning_rate": 1.3092736008405875e-05, + "loss": 0.6066, + "step": 8027 + }, + { + "epoch": 1.25, + "grad_norm": 15.261889639083932, + "learning_rate": 1.3091132244370449e-05, + "loss": 0.5696, + "step": 8028 + }, + { + "epoch": 1.25, + "grad_norm": 16.930248221286238, + "learning_rate": 1.3089528392425029e-05, + "loss": 0.581, + "step": 8029 + }, + { + "epoch": 1.25, + "grad_norm": 19.690711249989143, + "learning_rate": 1.3087924452615228e-05, + "loss": 0.7141, + "step": 8030 + }, + { + "epoch": 1.25, + "grad_norm": 34.32094288704457, + "learning_rate": 1.3086320424986662e-05, + "loss": 0.6354, + "step": 8031 + }, + { + "epoch": 1.25, + "grad_norm": 20.845228194962765, + "learning_rate": 1.308471630958495e-05, + "loss": 0.6104, + "step": 8032 + }, + { + "epoch": 1.25, + "grad_norm": 20.53215942226686, + "learning_rate": 1.308311210645571e-05, + "loss": 0.7366, + "step": 8033 + }, + { + "epoch": 1.25, + "grad_norm": 16.13644579044986, + "learning_rate": 1.3081507815644567e-05, + "loss": 0.7059, + "step": 8034 + }, + { + "epoch": 1.26, + "grad_norm": 16.96785430537651, + "learning_rate": 1.3079903437197143e-05, + "loss": 0.6275, + "step": 8035 + }, + { + "epoch": 1.26, + "grad_norm": 20.643482824029313, + "learning_rate": 1.3078298971159067e-05, + "loss": 0.6371, + "step": 8036 + }, + { + "epoch": 1.26, + "grad_norm": 16.96079941038394, + "learning_rate": 1.307669441757597e-05, + "loss": 0.7108, + "step": 8037 + }, + { + "epoch": 1.26, + "grad_norm": 18.606243149464927, + "learning_rate": 1.3075089776493481e-05, + "loss": 0.6766, + "step": 8038 + }, + { + "epoch": 1.26, + "grad_norm": 22.976607181592534, + "learning_rate": 1.3073485047957242e-05, + "loss": 0.753, + "step": 8039 + }, + { + "epoch": 1.26, + "grad_norm": 16.59863223795007, + "learning_rate": 1.3071880232012881e-05, + "loss": 0.7284, + "step": 8040 + }, + { + "epoch": 1.26, + "grad_norm": 26.25509165928565, + "learning_rate": 1.3070275328706047e-05, + "loss": 0.6254, + "step": 8041 + }, + { + "epoch": 1.26, + "grad_norm": 18.182875356608985, + "learning_rate": 1.306867033808238e-05, + "loss": 0.591, + "step": 8042 + }, + { + "epoch": 1.26, + "grad_norm": 27.29877226011105, + "learning_rate": 1.306706526018752e-05, + "loss": 0.6691, + "step": 8043 + }, + { + "epoch": 1.26, + "grad_norm": 19.16888109006233, + "learning_rate": 1.306546009506712e-05, + "loss": 0.6923, + "step": 8044 + }, + { + "epoch": 1.26, + "grad_norm": 16.08747598980473, + "learning_rate": 1.3063854842766828e-05, + "loss": 0.6637, + "step": 8045 + }, + { + "epoch": 1.26, + "grad_norm": 28.459688069690944, + "learning_rate": 1.3062249503332294e-05, + "loss": 0.7626, + "step": 8046 + }, + { + "epoch": 1.26, + "grad_norm": 18.987700725973603, + "learning_rate": 1.3060644076809178e-05, + "loss": 0.689, + "step": 8047 + }, + { + "epoch": 1.26, + "grad_norm": 17.207565630561497, + "learning_rate": 1.3059038563243134e-05, + "loss": 0.6706, + "step": 8048 + }, + { + "epoch": 1.26, + "grad_norm": 18.32752337163448, + "learning_rate": 1.3057432962679823e-05, + "loss": 0.634, + "step": 8049 + }, + { + "epoch": 1.26, + "grad_norm": 17.357077477434743, + "learning_rate": 1.3055827275164908e-05, + "loss": 0.6368, + "step": 8050 + }, + { + "epoch": 1.26, + "grad_norm": 14.405643486569828, + "learning_rate": 1.305422150074405e-05, + "loss": 0.5992, + "step": 8051 + }, + { + "epoch": 1.26, + "grad_norm": 18.089647997557105, + "learning_rate": 1.3052615639462923e-05, + "loss": 0.6943, + "step": 8052 + }, + { + "epoch": 1.26, + "grad_norm": 23.605909577930653, + "learning_rate": 1.3051009691367191e-05, + "loss": 0.6964, + "step": 8053 + }, + { + "epoch": 1.26, + "grad_norm": 24.936611174038248, + "learning_rate": 1.304940365650253e-05, + "loss": 0.6061, + "step": 8054 + }, + { + "epoch": 1.26, + "grad_norm": 17.87232370907216, + "learning_rate": 1.3047797534914609e-05, + "loss": 0.6753, + "step": 8055 + }, + { + "epoch": 1.26, + "grad_norm": 15.715961010743476, + "learning_rate": 1.3046191326649114e-05, + "loss": 0.7218, + "step": 8056 + }, + { + "epoch": 1.26, + "grad_norm": 16.881813148601875, + "learning_rate": 1.3044585031751716e-05, + "loss": 0.6388, + "step": 8057 + }, + { + "epoch": 1.26, + "grad_norm": 19.289576357327363, + "learning_rate": 1.3042978650268102e-05, + "loss": 0.6415, + "step": 8058 + }, + { + "epoch": 1.26, + "grad_norm": 30.86277932209255, + "learning_rate": 1.3041372182243955e-05, + "loss": 0.6875, + "step": 8059 + }, + { + "epoch": 1.26, + "grad_norm": 17.82818851325902, + "learning_rate": 1.3039765627724961e-05, + "loss": 0.6484, + "step": 8060 + }, + { + "epoch": 1.26, + "grad_norm": 12.779006779809293, + "learning_rate": 1.3038158986756812e-05, + "loss": 0.6049, + "step": 8061 + }, + { + "epoch": 1.26, + "grad_norm": 38.29816063334219, + "learning_rate": 1.30365522593852e-05, + "loss": 0.687, + "step": 8062 + }, + { + "epoch": 1.26, + "grad_norm": 23.63552919362852, + "learning_rate": 1.3034945445655813e-05, + "loss": 0.5901, + "step": 8063 + }, + { + "epoch": 1.26, + "grad_norm": 15.039137659256628, + "learning_rate": 1.303333854561436e-05, + "loss": 0.6291, + "step": 8064 + }, + { + "epoch": 1.26, + "grad_norm": 19.518064989659592, + "learning_rate": 1.3031731559306528e-05, + "loss": 0.6836, + "step": 8065 + }, + { + "epoch": 1.26, + "grad_norm": 19.162309839383838, + "learning_rate": 1.3030124486778029e-05, + "loss": 0.6957, + "step": 8066 + }, + { + "epoch": 1.26, + "grad_norm": 13.673784059481592, + "learning_rate": 1.3028517328074557e-05, + "loss": 0.5744, + "step": 8067 + }, + { + "epoch": 1.26, + "grad_norm": 19.798210789957974, + "learning_rate": 1.3026910083241825e-05, + "loss": 0.6016, + "step": 8068 + }, + { + "epoch": 1.26, + "grad_norm": 20.60857404840023, + "learning_rate": 1.3025302752325544e-05, + "loss": 0.68, + "step": 8069 + }, + { + "epoch": 1.26, + "grad_norm": 23.599724971756796, + "learning_rate": 1.302369533537142e-05, + "loss": 0.6848, + "step": 8070 + }, + { + "epoch": 1.26, + "grad_norm": 29.297590207334487, + "learning_rate": 1.302208783242517e-05, + "loss": 0.7169, + "step": 8071 + }, + { + "epoch": 1.26, + "grad_norm": 19.50109058549094, + "learning_rate": 1.302048024353251e-05, + "loss": 0.6008, + "step": 8072 + }, + { + "epoch": 1.26, + "grad_norm": 13.230349797811378, + "learning_rate": 1.3018872568739158e-05, + "loss": 0.5577, + "step": 8073 + }, + { + "epoch": 1.26, + "grad_norm": 13.740862755684619, + "learning_rate": 1.3017264808090834e-05, + "loss": 0.6639, + "step": 8074 + }, + { + "epoch": 1.26, + "grad_norm": 16.590514992000543, + "learning_rate": 1.3015656961633267e-05, + "loss": 0.6889, + "step": 8075 + }, + { + "epoch": 1.26, + "grad_norm": 12.703502957913768, + "learning_rate": 1.301404902941218e-05, + "loss": 0.6794, + "step": 8076 + }, + { + "epoch": 1.26, + "grad_norm": 13.48843429438833, + "learning_rate": 1.3012441011473302e-05, + "loss": 0.6521, + "step": 8077 + }, + { + "epoch": 1.26, + "grad_norm": 25.176779369281245, + "learning_rate": 1.3010832907862363e-05, + "loss": 0.6505, + "step": 8078 + }, + { + "epoch": 1.26, + "grad_norm": 14.06897907515434, + "learning_rate": 1.30092247186251e-05, + "loss": 0.607, + "step": 8079 + }, + { + "epoch": 1.26, + "grad_norm": 29.02435386207123, + "learning_rate": 1.300761644380724e-05, + "loss": 0.6733, + "step": 8080 + }, + { + "epoch": 1.26, + "grad_norm": 17.404721088481462, + "learning_rate": 1.3006008083454535e-05, + "loss": 0.6356, + "step": 8081 + }, + { + "epoch": 1.26, + "grad_norm": 17.623189423948606, + "learning_rate": 1.3004399637612712e-05, + "loss": 0.5975, + "step": 8082 + }, + { + "epoch": 1.26, + "grad_norm": 14.212860929356678, + "learning_rate": 1.3002791106327528e-05, + "loss": 0.6029, + "step": 8083 + }, + { + "epoch": 1.26, + "grad_norm": 14.77139714966941, + "learning_rate": 1.3001182489644715e-05, + "loss": 0.5814, + "step": 8084 + }, + { + "epoch": 1.26, + "grad_norm": 10.927539945752825, + "learning_rate": 1.2999573787610031e-05, + "loss": 0.6126, + "step": 8085 + }, + { + "epoch": 1.26, + "grad_norm": 15.795299804758624, + "learning_rate": 1.2997965000269223e-05, + "loss": 0.5802, + "step": 8086 + }, + { + "epoch": 1.26, + "grad_norm": 15.780677363893393, + "learning_rate": 1.2996356127668044e-05, + "loss": 0.6255, + "step": 8087 + }, + { + "epoch": 1.26, + "grad_norm": 22.077401849251853, + "learning_rate": 1.2994747169852249e-05, + "loss": 0.6261, + "step": 8088 + }, + { + "epoch": 1.26, + "grad_norm": 19.559667540661923, + "learning_rate": 1.2993138126867597e-05, + "loss": 0.6299, + "step": 8089 + }, + { + "epoch": 1.26, + "grad_norm": 28.457817103440483, + "learning_rate": 1.2991528998759844e-05, + "loss": 0.6094, + "step": 8090 + }, + { + "epoch": 1.26, + "grad_norm": 27.339257345874962, + "learning_rate": 1.2989919785574764e-05, + "loss": 0.7234, + "step": 8091 + }, + { + "epoch": 1.26, + "grad_norm": 19.6435415185879, + "learning_rate": 1.2988310487358107e-05, + "loss": 0.6359, + "step": 8092 + }, + { + "epoch": 1.26, + "grad_norm": 23.987047190713486, + "learning_rate": 1.298670110415565e-05, + "loss": 0.6762, + "step": 8093 + }, + { + "epoch": 1.26, + "grad_norm": 18.850866606065082, + "learning_rate": 1.2985091636013163e-05, + "loss": 0.6225, + "step": 8094 + }, + { + "epoch": 1.26, + "grad_norm": 20.571227599151083, + "learning_rate": 1.2983482082976412e-05, + "loss": 0.6764, + "step": 8095 + }, + { + "epoch": 1.26, + "grad_norm": 38.67571502732276, + "learning_rate": 1.2981872445091179e-05, + "loss": 0.7227, + "step": 8096 + }, + { + "epoch": 1.26, + "grad_norm": 21.467369648786548, + "learning_rate": 1.2980262722403238e-05, + "loss": 0.6298, + "step": 8097 + }, + { + "epoch": 1.26, + "grad_norm": 16.703392703334963, + "learning_rate": 1.297865291495837e-05, + "loss": 0.6477, + "step": 8098 + }, + { + "epoch": 1.27, + "grad_norm": 18.284576594262212, + "learning_rate": 1.2977043022802351e-05, + "loss": 0.6747, + "step": 8099 + }, + { + "epoch": 1.27, + "grad_norm": 19.193384234372612, + "learning_rate": 1.2975433045980975e-05, + "loss": 0.6105, + "step": 8100 + }, + { + "epoch": 1.27, + "grad_norm": 15.67586365123682, + "learning_rate": 1.2973822984540018e-05, + "loss": 0.6995, + "step": 8101 + }, + { + "epoch": 1.27, + "grad_norm": 17.10651846798882, + "learning_rate": 1.2972212838525278e-05, + "loss": 0.6199, + "step": 8102 + }, + { + "epoch": 1.27, + "grad_norm": 15.45786557803001, + "learning_rate": 1.2970602607982546e-05, + "loss": 0.7185, + "step": 8103 + }, + { + "epoch": 1.27, + "grad_norm": 18.34438554224968, + "learning_rate": 1.2968992292957609e-05, + "loss": 0.5187, + "step": 8104 + }, + { + "epoch": 1.27, + "grad_norm": 21.936576690697496, + "learning_rate": 1.296738189349627e-05, + "loss": 0.6408, + "step": 8105 + }, + { + "epoch": 1.27, + "grad_norm": 19.64420570169021, + "learning_rate": 1.2965771409644328e-05, + "loss": 0.6539, + "step": 8106 + }, + { + "epoch": 1.27, + "grad_norm": 23.30442986456231, + "learning_rate": 1.2964160841447577e-05, + "loss": 0.618, + "step": 8107 + }, + { + "epoch": 1.27, + "grad_norm": 26.527710628823286, + "learning_rate": 1.2962550188951832e-05, + "loss": 0.6227, + "step": 8108 + }, + { + "epoch": 1.27, + "grad_norm": 20.65186116964494, + "learning_rate": 1.2960939452202888e-05, + "loss": 0.6377, + "step": 8109 + }, + { + "epoch": 1.27, + "grad_norm": 17.48742288029853, + "learning_rate": 1.2959328631246562e-05, + "loss": 0.6404, + "step": 8110 + }, + { + "epoch": 1.27, + "grad_norm": 17.599566165277086, + "learning_rate": 1.2957717726128656e-05, + "loss": 0.6358, + "step": 8111 + }, + { + "epoch": 1.27, + "grad_norm": 16.049314629175925, + "learning_rate": 1.295610673689499e-05, + "loss": 0.721, + "step": 8112 + }, + { + "epoch": 1.27, + "grad_norm": 14.902807623466806, + "learning_rate": 1.295449566359138e-05, + "loss": 0.6448, + "step": 8113 + }, + { + "epoch": 1.27, + "grad_norm": 16.78937737953778, + "learning_rate": 1.2952884506263641e-05, + "loss": 0.5574, + "step": 8114 + }, + { + "epoch": 1.27, + "grad_norm": 17.79619761208542, + "learning_rate": 1.2951273264957591e-05, + "loss": 0.6552, + "step": 8115 + }, + { + "epoch": 1.27, + "grad_norm": 16.187879162107276, + "learning_rate": 1.2949661939719059e-05, + "loss": 0.7062, + "step": 8116 + }, + { + "epoch": 1.27, + "grad_norm": 22.66637270677793, + "learning_rate": 1.2948050530593863e-05, + "loss": 0.556, + "step": 8117 + }, + { + "epoch": 1.27, + "grad_norm": 16.678856294276525, + "learning_rate": 1.2946439037627838e-05, + "loss": 0.6095, + "step": 8118 + }, + { + "epoch": 1.27, + "grad_norm": 18.89260742317735, + "learning_rate": 1.2944827460866809e-05, + "loss": 0.6259, + "step": 8119 + }, + { + "epoch": 1.27, + "grad_norm": 25.78214248384012, + "learning_rate": 1.294321580035661e-05, + "loss": 0.6526, + "step": 8120 + }, + { + "epoch": 1.27, + "grad_norm": 16.834424581502493, + "learning_rate": 1.2941604056143078e-05, + "loss": 0.6312, + "step": 8121 + }, + { + "epoch": 1.27, + "grad_norm": 24.361240146110788, + "learning_rate": 1.2939992228272043e-05, + "loss": 0.69, + "step": 8122 + }, + { + "epoch": 1.27, + "grad_norm": 14.877071231404777, + "learning_rate": 1.2938380316789352e-05, + "loss": 0.5819, + "step": 8123 + }, + { + "epoch": 1.27, + "grad_norm": 14.473555748512167, + "learning_rate": 1.293676832174084e-05, + "loss": 0.6196, + "step": 8124 + }, + { + "epoch": 1.27, + "grad_norm": 16.73949488865913, + "learning_rate": 1.293515624317236e-05, + "loss": 0.6337, + "step": 8125 + }, + { + "epoch": 1.27, + "grad_norm": 15.448364607954518, + "learning_rate": 1.293354408112975e-05, + "loss": 0.5691, + "step": 8126 + }, + { + "epoch": 1.27, + "grad_norm": 16.694756018415156, + "learning_rate": 1.2931931835658864e-05, + "loss": 0.6521, + "step": 8127 + }, + { + "epoch": 1.27, + "grad_norm": 38.066722819233796, + "learning_rate": 1.293031950680555e-05, + "loss": 0.737, + "step": 8128 + }, + { + "epoch": 1.27, + "grad_norm": 21.798866402502114, + "learning_rate": 1.2928707094615665e-05, + "loss": 0.6981, + "step": 8129 + }, + { + "epoch": 1.27, + "grad_norm": 18.41769476532043, + "learning_rate": 1.2927094599135064e-05, + "loss": 0.5406, + "step": 8130 + }, + { + "epoch": 1.27, + "grad_norm": 19.64114958708201, + "learning_rate": 1.2925482020409604e-05, + "loss": 0.6444, + "step": 8131 + }, + { + "epoch": 1.27, + "grad_norm": 15.039166211848157, + "learning_rate": 1.2923869358485147e-05, + "loss": 0.6988, + "step": 8132 + }, + { + "epoch": 1.27, + "grad_norm": 17.34886971034679, + "learning_rate": 1.2922256613407553e-05, + "loss": 0.5802, + "step": 8133 + }, + { + "epoch": 1.27, + "grad_norm": 15.549011014723588, + "learning_rate": 1.292064378522269e-05, + "loss": 0.6199, + "step": 8134 + }, + { + "epoch": 1.27, + "grad_norm": 20.731477949708044, + "learning_rate": 1.2919030873976433e-05, + "loss": 0.6691, + "step": 8135 + }, + { + "epoch": 1.27, + "grad_norm": 16.514022325330988, + "learning_rate": 1.291741787971464e-05, + "loss": 0.6005, + "step": 8136 + }, + { + "epoch": 1.27, + "grad_norm": 27.042457301323097, + "learning_rate": 1.2915804802483192e-05, + "loss": 0.8196, + "step": 8137 + }, + { + "epoch": 1.27, + "grad_norm": 30.26239914656705, + "learning_rate": 1.2914191642327958e-05, + "loss": 0.6548, + "step": 8138 + }, + { + "epoch": 1.27, + "grad_norm": 16.61272050246207, + "learning_rate": 1.2912578399294822e-05, + "loss": 0.5848, + "step": 8139 + }, + { + "epoch": 1.27, + "grad_norm": 23.194163024471106, + "learning_rate": 1.2910965073429657e-05, + "loss": 0.6923, + "step": 8140 + }, + { + "epoch": 1.27, + "grad_norm": 28.114345680387576, + "learning_rate": 1.2909351664778352e-05, + "loss": 0.689, + "step": 8141 + }, + { + "epoch": 1.27, + "grad_norm": 18.409550353111786, + "learning_rate": 1.2907738173386783e-05, + "loss": 0.6354, + "step": 8142 + }, + { + "epoch": 1.27, + "grad_norm": 17.480181081951525, + "learning_rate": 1.2906124599300842e-05, + "loss": 0.6481, + "step": 8143 + }, + { + "epoch": 1.27, + "grad_norm": 22.429377995836642, + "learning_rate": 1.2904510942566421e-05, + "loss": 0.6804, + "step": 8144 + }, + { + "epoch": 1.27, + "grad_norm": 15.722834172160264, + "learning_rate": 1.2902897203229406e-05, + "loss": 0.5646, + "step": 8145 + }, + { + "epoch": 1.27, + "grad_norm": 22.759729081395175, + "learning_rate": 1.2901283381335691e-05, + "loss": 0.6659, + "step": 8146 + }, + { + "epoch": 1.27, + "grad_norm": 21.444978165930042, + "learning_rate": 1.2899669476931176e-05, + "loss": 0.5811, + "step": 8147 + }, + { + "epoch": 1.27, + "grad_norm": 21.518306735423433, + "learning_rate": 1.2898055490061757e-05, + "loss": 0.6521, + "step": 8148 + }, + { + "epoch": 1.27, + "grad_norm": 18.733359029327055, + "learning_rate": 1.2896441420773335e-05, + "loss": 0.6341, + "step": 8149 + }, + { + "epoch": 1.27, + "grad_norm": 11.477306501553883, + "learning_rate": 1.2894827269111811e-05, + "loss": 0.6213, + "step": 8150 + }, + { + "epoch": 1.27, + "grad_norm": 17.630328221854942, + "learning_rate": 1.2893213035123096e-05, + "loss": 0.6439, + "step": 8151 + }, + { + "epoch": 1.27, + "grad_norm": 25.360006763758854, + "learning_rate": 1.2891598718853096e-05, + "loss": 0.7555, + "step": 8152 + }, + { + "epoch": 1.27, + "grad_norm": 13.679010002465652, + "learning_rate": 1.2889984320347715e-05, + "loss": 0.6289, + "step": 8153 + }, + { + "epoch": 1.27, + "grad_norm": 19.306412535242888, + "learning_rate": 1.2888369839652875e-05, + "loss": 0.6526, + "step": 8154 + }, + { + "epoch": 1.27, + "grad_norm": 17.080063890437046, + "learning_rate": 1.2886755276814484e-05, + "loss": 0.6543, + "step": 8155 + }, + { + "epoch": 1.27, + "grad_norm": 18.375367553028894, + "learning_rate": 1.2885140631878463e-05, + "loss": 0.6382, + "step": 8156 + }, + { + "epoch": 1.27, + "grad_norm": 16.28122932117928, + "learning_rate": 1.2883525904890728e-05, + "loss": 0.5818, + "step": 8157 + }, + { + "epoch": 1.27, + "grad_norm": 22.21215980378753, + "learning_rate": 1.2881911095897206e-05, + "loss": 0.5374, + "step": 8158 + }, + { + "epoch": 1.27, + "grad_norm": 18.70605472892297, + "learning_rate": 1.2880296204943816e-05, + "loss": 0.65, + "step": 8159 + }, + { + "epoch": 1.27, + "grad_norm": 22.843607854466118, + "learning_rate": 1.287868123207649e-05, + "loss": 0.6771, + "step": 8160 + }, + { + "epoch": 1.27, + "grad_norm": 18.619776545342837, + "learning_rate": 1.2877066177341148e-05, + "loss": 0.6139, + "step": 8161 + }, + { + "epoch": 1.27, + "grad_norm": 15.732893863656638, + "learning_rate": 1.287545104078373e-05, + "loss": 0.5971, + "step": 8162 + }, + { + "epoch": 1.28, + "grad_norm": 22.347713784442327, + "learning_rate": 1.2873835822450168e-05, + "loss": 0.7457, + "step": 8163 + }, + { + "epoch": 1.28, + "grad_norm": 19.417072269309283, + "learning_rate": 1.2872220522386396e-05, + "loss": 0.6432, + "step": 8164 + }, + { + "epoch": 1.28, + "grad_norm": 12.638863247439584, + "learning_rate": 1.2870605140638352e-05, + "loss": 0.596, + "step": 8165 + }, + { + "epoch": 1.28, + "grad_norm": 18.743124205002776, + "learning_rate": 1.2868989677251976e-05, + "loss": 0.6887, + "step": 8166 + }, + { + "epoch": 1.28, + "grad_norm": 12.636283051586897, + "learning_rate": 1.2867374132273214e-05, + "loss": 0.6665, + "step": 8167 + }, + { + "epoch": 1.28, + "grad_norm": 20.321634925503275, + "learning_rate": 1.2865758505748006e-05, + "loss": 0.5936, + "step": 8168 + }, + { + "epoch": 1.28, + "grad_norm": 23.366738158522285, + "learning_rate": 1.2864142797722305e-05, + "loss": 0.6217, + "step": 8169 + }, + { + "epoch": 1.28, + "grad_norm": 16.724378762419924, + "learning_rate": 1.2862527008242056e-05, + "loss": 0.6125, + "step": 8170 + }, + { + "epoch": 1.28, + "grad_norm": 16.261670719380938, + "learning_rate": 1.2860911137353219e-05, + "loss": 0.6728, + "step": 8171 + }, + { + "epoch": 1.28, + "grad_norm": 17.35691244855049, + "learning_rate": 1.2859295185101736e-05, + "loss": 0.6272, + "step": 8172 + }, + { + "epoch": 1.28, + "grad_norm": 16.572375423177817, + "learning_rate": 1.2857679151533576e-05, + "loss": 0.7258, + "step": 8173 + }, + { + "epoch": 1.28, + "grad_norm": 20.28698023090884, + "learning_rate": 1.2856063036694688e-05, + "loss": 0.6612, + "step": 8174 + }, + { + "epoch": 1.28, + "grad_norm": 13.575975106319408, + "learning_rate": 1.2854446840631041e-05, + "loss": 0.6047, + "step": 8175 + }, + { + "epoch": 1.28, + "grad_norm": 14.183492852947104, + "learning_rate": 1.2852830563388597e-05, + "loss": 0.5923, + "step": 8176 + }, + { + "epoch": 1.28, + "grad_norm": 19.053828227886665, + "learning_rate": 1.2851214205013317e-05, + "loss": 0.6942, + "step": 8177 + }, + { + "epoch": 1.28, + "grad_norm": 14.7969470888414, + "learning_rate": 1.2849597765551171e-05, + "loss": 0.6402, + "step": 8178 + }, + { + "epoch": 1.28, + "grad_norm": 19.286436343268512, + "learning_rate": 1.2847981245048137e-05, + "loss": 0.6593, + "step": 8179 + }, + { + "epoch": 1.28, + "grad_norm": 19.417870226809434, + "learning_rate": 1.284636464355018e-05, + "loss": 0.6639, + "step": 8180 + }, + { + "epoch": 1.28, + "grad_norm": 19.517238998406224, + "learning_rate": 1.2844747961103279e-05, + "loss": 0.6272, + "step": 8181 + }, + { + "epoch": 1.28, + "grad_norm": 27.43945814826806, + "learning_rate": 1.2843131197753408e-05, + "loss": 0.6415, + "step": 8182 + }, + { + "epoch": 1.28, + "grad_norm": 26.910037510156272, + "learning_rate": 1.2841514353546549e-05, + "loss": 0.6448, + "step": 8183 + }, + { + "epoch": 1.28, + "grad_norm": 12.863559745081396, + "learning_rate": 1.2839897428528687e-05, + "loss": 0.594, + "step": 8184 + }, + { + "epoch": 1.28, + "grad_norm": 19.722278407000786, + "learning_rate": 1.2838280422745801e-05, + "loss": 0.5537, + "step": 8185 + }, + { + "epoch": 1.28, + "grad_norm": 23.28998282903744, + "learning_rate": 1.283666333624388e-05, + "loss": 0.7031, + "step": 8186 + }, + { + "epoch": 1.28, + "grad_norm": 24.678736125870845, + "learning_rate": 1.2835046169068912e-05, + "loss": 0.6506, + "step": 8187 + }, + { + "epoch": 1.28, + "grad_norm": 18.86576640145763, + "learning_rate": 1.283342892126689e-05, + "loss": 0.6597, + "step": 8188 + }, + { + "epoch": 1.28, + "grad_norm": 20.68417514707934, + "learning_rate": 1.283181159288381e-05, + "loss": 0.6305, + "step": 8189 + }, + { + "epoch": 1.28, + "grad_norm": 14.088556117146169, + "learning_rate": 1.2830194183965663e-05, + "loss": 0.6077, + "step": 8190 + }, + { + "epoch": 1.28, + "grad_norm": 12.664025697295775, + "learning_rate": 1.2828576694558447e-05, + "loss": 0.5667, + "step": 8191 + }, + { + "epoch": 1.28, + "grad_norm": 24.491983022855344, + "learning_rate": 1.2826959124708167e-05, + "loss": 0.767, + "step": 8192 + }, + { + "epoch": 1.28, + "grad_norm": 23.348674336485477, + "learning_rate": 1.2825341474460824e-05, + "loss": 0.6835, + "step": 8193 + }, + { + "epoch": 1.28, + "grad_norm": 27.10513878849774, + "learning_rate": 1.2823723743862422e-05, + "loss": 0.7139, + "step": 8194 + }, + { + "epoch": 1.28, + "grad_norm": 17.087777241478815, + "learning_rate": 1.2822105932958964e-05, + "loss": 0.5762, + "step": 8195 + }, + { + "epoch": 1.28, + "grad_norm": 27.34988598560056, + "learning_rate": 1.282048804179647e-05, + "loss": 0.5978, + "step": 8196 + }, + { + "epoch": 1.28, + "grad_norm": 19.475743284566807, + "learning_rate": 1.2818870070420942e-05, + "loss": 0.6952, + "step": 8197 + }, + { + "epoch": 1.28, + "grad_norm": 24.90319452575244, + "learning_rate": 1.2817252018878401e-05, + "loss": 0.6428, + "step": 8198 + }, + { + "epoch": 1.28, + "grad_norm": 19.821598848943367, + "learning_rate": 1.2815633887214857e-05, + "loss": 0.6362, + "step": 8199 + }, + { + "epoch": 1.28, + "grad_norm": 14.267088470829696, + "learning_rate": 1.2814015675476336e-05, + "loss": 0.6174, + "step": 8200 + }, + { + "epoch": 1.28, + "grad_norm": 32.68403077269346, + "learning_rate": 1.2812397383708854e-05, + "loss": 0.6853, + "step": 8201 + }, + { + "epoch": 1.28, + "grad_norm": 23.39607547817, + "learning_rate": 1.2810779011958437e-05, + "loss": 0.6587, + "step": 8202 + }, + { + "epoch": 1.28, + "grad_norm": 14.088205178262257, + "learning_rate": 1.2809160560271108e-05, + "loss": 0.6183, + "step": 8203 + }, + { + "epoch": 1.28, + "grad_norm": 20.81991632310064, + "learning_rate": 1.2807542028692894e-05, + "loss": 0.7255, + "step": 8204 + }, + { + "epoch": 1.28, + "grad_norm": 17.652369874199216, + "learning_rate": 1.2805923417269828e-05, + "loss": 0.6284, + "step": 8205 + }, + { + "epoch": 1.28, + "grad_norm": 18.30337913744467, + "learning_rate": 1.2804304726047943e-05, + "loss": 0.61, + "step": 8206 + }, + { + "epoch": 1.28, + "grad_norm": 18.251616359030656, + "learning_rate": 1.2802685955073271e-05, + "loss": 0.6537, + "step": 8207 + }, + { + "epoch": 1.28, + "grad_norm": 18.35665278828155, + "learning_rate": 1.280106710439185e-05, + "loss": 0.645, + "step": 8208 + }, + { + "epoch": 1.28, + "grad_norm": 14.34360182130856, + "learning_rate": 1.279944817404972e-05, + "loss": 0.5877, + "step": 8209 + }, + { + "epoch": 1.28, + "grad_norm": 12.940694352410677, + "learning_rate": 1.279782916409292e-05, + "loss": 0.6063, + "step": 8210 + }, + { + "epoch": 1.28, + "grad_norm": 14.1460525655355, + "learning_rate": 1.2796210074567499e-05, + "loss": 0.6644, + "step": 8211 + }, + { + "epoch": 1.28, + "grad_norm": 14.391992489499236, + "learning_rate": 1.2794590905519494e-05, + "loss": 0.5939, + "step": 8212 + }, + { + "epoch": 1.28, + "grad_norm": 27.558598731004682, + "learning_rate": 1.2792971656994963e-05, + "loss": 0.6897, + "step": 8213 + }, + { + "epoch": 1.28, + "grad_norm": 21.736035336015192, + "learning_rate": 1.2791352329039948e-05, + "loss": 0.5729, + "step": 8214 + }, + { + "epoch": 1.28, + "grad_norm": 15.35966969126824, + "learning_rate": 1.278973292170051e-05, + "loss": 0.6441, + "step": 8215 + }, + { + "epoch": 1.28, + "grad_norm": 26.81296011334332, + "learning_rate": 1.2788113435022696e-05, + "loss": 0.7154, + "step": 8216 + }, + { + "epoch": 1.28, + "grad_norm": 12.376780871287782, + "learning_rate": 1.2786493869052568e-05, + "loss": 0.5998, + "step": 8217 + }, + { + "epoch": 1.28, + "grad_norm": 29.08429770774486, + "learning_rate": 1.2784874223836184e-05, + "loss": 0.6733, + "step": 8218 + }, + { + "epoch": 1.28, + "grad_norm": 10.842001650702121, + "learning_rate": 1.2783254499419607e-05, + "loss": 0.5726, + "step": 8219 + }, + { + "epoch": 1.28, + "grad_norm": 18.339599638625465, + "learning_rate": 1.2781634695848899e-05, + "loss": 0.6483, + "step": 8220 + }, + { + "epoch": 1.28, + "grad_norm": 24.412345451094303, + "learning_rate": 1.278001481317013e-05, + "loss": 0.649, + "step": 8221 + }, + { + "epoch": 1.28, + "grad_norm": 15.097347138905876, + "learning_rate": 1.2778394851429361e-05, + "loss": 0.5526, + "step": 8222 + }, + { + "epoch": 1.28, + "grad_norm": 20.73414152023992, + "learning_rate": 1.2776774810672674e-05, + "loss": 0.632, + "step": 8223 + }, + { + "epoch": 1.28, + "grad_norm": 15.325303280716902, + "learning_rate": 1.277515469094613e-05, + "loss": 0.5003, + "step": 8224 + }, + { + "epoch": 1.28, + "grad_norm": 10.971576566258243, + "learning_rate": 1.2773534492295813e-05, + "loss": 0.5308, + "step": 8225 + }, + { + "epoch": 1.28, + "grad_norm": 15.055650524859043, + "learning_rate": 1.2771914214767798e-05, + "loss": 0.6434, + "step": 8226 + }, + { + "epoch": 1.29, + "grad_norm": 14.336058272079114, + "learning_rate": 1.2770293858408166e-05, + "loss": 0.6633, + "step": 8227 + }, + { + "epoch": 1.29, + "grad_norm": 14.182235426854886, + "learning_rate": 1.2768673423262992e-05, + "loss": 0.7339, + "step": 8228 + }, + { + "epoch": 1.29, + "grad_norm": 22.68572989959388, + "learning_rate": 1.2767052909378367e-05, + "loss": 0.7338, + "step": 8229 + }, + { + "epoch": 1.29, + "grad_norm": 19.320254660896573, + "learning_rate": 1.2765432316800379e-05, + "loss": 0.626, + "step": 8230 + }, + { + "epoch": 1.29, + "grad_norm": 19.825806270231375, + "learning_rate": 1.276381164557511e-05, + "loss": 0.6617, + "step": 8231 + }, + { + "epoch": 1.29, + "grad_norm": 23.305797496759737, + "learning_rate": 1.2762190895748657e-05, + "loss": 0.6881, + "step": 8232 + }, + { + "epoch": 1.29, + "grad_norm": 12.713697541779094, + "learning_rate": 1.2760570067367111e-05, + "loss": 0.6689, + "step": 8233 + }, + { + "epoch": 1.29, + "grad_norm": 18.242289927193006, + "learning_rate": 1.2758949160476567e-05, + "loss": 0.6251, + "step": 8234 + }, + { + "epoch": 1.29, + "grad_norm": 19.520502193029696, + "learning_rate": 1.2757328175123122e-05, + "loss": 0.5961, + "step": 8235 + }, + { + "epoch": 1.29, + "grad_norm": 23.25639740864642, + "learning_rate": 1.2755707111352877e-05, + "loss": 0.7622, + "step": 8236 + }, + { + "epoch": 1.29, + "grad_norm": 16.405445122730182, + "learning_rate": 1.2754085969211932e-05, + "loss": 0.6287, + "step": 8237 + }, + { + "epoch": 1.29, + "grad_norm": 21.009959486758213, + "learning_rate": 1.2752464748746395e-05, + "loss": 0.6654, + "step": 8238 + }, + { + "epoch": 1.29, + "grad_norm": 29.488844121426467, + "learning_rate": 1.275084345000237e-05, + "loss": 0.6218, + "step": 8239 + }, + { + "epoch": 1.29, + "grad_norm": 15.191574689186826, + "learning_rate": 1.2749222073025968e-05, + "loss": 0.5641, + "step": 8240 + }, + { + "epoch": 1.29, + "grad_norm": 22.50691927839262, + "learning_rate": 1.2747600617863296e-05, + "loss": 0.6369, + "step": 8241 + }, + { + "epoch": 1.29, + "grad_norm": 14.675275036425337, + "learning_rate": 1.2745979084560474e-05, + "loss": 0.6113, + "step": 8242 + }, + { + "epoch": 1.29, + "grad_norm": 19.42540606595315, + "learning_rate": 1.274435747316361e-05, + "loss": 0.7135, + "step": 8243 + }, + { + "epoch": 1.29, + "grad_norm": 18.502848249262144, + "learning_rate": 1.2742735783718825e-05, + "loss": 0.6044, + "step": 8244 + }, + { + "epoch": 1.29, + "grad_norm": 12.234961526703646, + "learning_rate": 1.2741114016272239e-05, + "loss": 0.581, + "step": 8245 + }, + { + "epoch": 1.29, + "grad_norm": 20.207085752975964, + "learning_rate": 1.2739492170869973e-05, + "loss": 0.6448, + "step": 8246 + }, + { + "epoch": 1.29, + "grad_norm": 12.858919183687561, + "learning_rate": 1.2737870247558152e-05, + "loss": 0.5923, + "step": 8247 + }, + { + "epoch": 1.29, + "grad_norm": 15.561342703826355, + "learning_rate": 1.2736248246382906e-05, + "loss": 0.6666, + "step": 8248 + }, + { + "epoch": 1.29, + "grad_norm": 12.343705813715111, + "learning_rate": 1.2734626167390356e-05, + "loss": 0.5232, + "step": 8249 + }, + { + "epoch": 1.29, + "grad_norm": 17.694151070144503, + "learning_rate": 1.2733004010626639e-05, + "loss": 0.7189, + "step": 8250 + }, + { + "epoch": 1.29, + "grad_norm": 14.76889183044537, + "learning_rate": 1.273138177613789e-05, + "loss": 0.673, + "step": 8251 + }, + { + "epoch": 1.29, + "grad_norm": 19.191555895339366, + "learning_rate": 1.2729759463970238e-05, + "loss": 0.6596, + "step": 8252 + }, + { + "epoch": 1.29, + "grad_norm": 19.413788901767116, + "learning_rate": 1.2728137074169824e-05, + "loss": 0.622, + "step": 8253 + }, + { + "epoch": 1.29, + "grad_norm": 21.702806584416553, + "learning_rate": 1.2726514606782785e-05, + "loss": 0.7032, + "step": 8254 + }, + { + "epoch": 1.29, + "grad_norm": 19.454562870184347, + "learning_rate": 1.2724892061855271e-05, + "loss": 0.6099, + "step": 8255 + }, + { + "epoch": 1.29, + "grad_norm": 16.534468667838016, + "learning_rate": 1.2723269439433416e-05, + "loss": 0.5489, + "step": 8256 + }, + { + "epoch": 1.29, + "grad_norm": 15.893511429360114, + "learning_rate": 1.2721646739563378e-05, + "loss": 0.6107, + "step": 8257 + }, + { + "epoch": 1.29, + "grad_norm": 25.034105700567117, + "learning_rate": 1.2720023962291292e-05, + "loss": 0.697, + "step": 8258 + }, + { + "epoch": 1.29, + "grad_norm": 13.347027819371986, + "learning_rate": 1.2718401107663318e-05, + "loss": 0.6945, + "step": 8259 + }, + { + "epoch": 1.29, + "grad_norm": 20.475692892839113, + "learning_rate": 1.2716778175725606e-05, + "loss": 0.6346, + "step": 8260 + }, + { + "epoch": 1.29, + "grad_norm": 17.219003174438452, + "learning_rate": 1.2715155166524313e-05, + "loss": 0.6401, + "step": 8261 + }, + { + "epoch": 1.29, + "grad_norm": 21.103182003521006, + "learning_rate": 1.2713532080105595e-05, + "loss": 0.6422, + "step": 8262 + }, + { + "epoch": 1.29, + "grad_norm": 22.474250747499287, + "learning_rate": 1.2711908916515613e-05, + "loss": 0.617, + "step": 8263 + }, + { + "epoch": 1.29, + "grad_norm": 18.380625495399574, + "learning_rate": 1.2710285675800527e-05, + "loss": 0.5819, + "step": 8264 + }, + { + "epoch": 1.29, + "grad_norm": 17.53238287748573, + "learning_rate": 1.2708662358006504e-05, + "loss": 0.5705, + "step": 8265 + }, + { + "epoch": 1.29, + "grad_norm": 23.17753445404153, + "learning_rate": 1.2707038963179704e-05, + "loss": 0.717, + "step": 8266 + }, + { + "epoch": 1.29, + "grad_norm": 17.869701037519455, + "learning_rate": 1.2705415491366303e-05, + "loss": 0.6855, + "step": 8267 + }, + { + "epoch": 1.29, + "grad_norm": 17.1292536875374, + "learning_rate": 1.2703791942612462e-05, + "loss": 0.5989, + "step": 8268 + }, + { + "epoch": 1.29, + "grad_norm": 15.419536762127363, + "learning_rate": 1.2702168316964365e-05, + "loss": 0.5862, + "step": 8269 + }, + { + "epoch": 1.29, + "grad_norm": 20.78543399158514, + "learning_rate": 1.2700544614468181e-05, + "loss": 0.6635, + "step": 8270 + }, + { + "epoch": 1.29, + "grad_norm": 18.072822686719643, + "learning_rate": 1.2698920835170088e-05, + "loss": 0.6921, + "step": 8271 + }, + { + "epoch": 1.29, + "grad_norm": 21.085295042332895, + "learning_rate": 1.2697296979116265e-05, + "loss": 0.6539, + "step": 8272 + }, + { + "epoch": 1.29, + "grad_norm": 34.33598617663096, + "learning_rate": 1.2695673046352893e-05, + "loss": 0.619, + "step": 8273 + }, + { + "epoch": 1.29, + "grad_norm": 12.727146128274091, + "learning_rate": 1.2694049036926159e-05, + "loss": 0.5424, + "step": 8274 + }, + { + "epoch": 1.29, + "grad_norm": 16.767142420036183, + "learning_rate": 1.2692424950882242e-05, + "loss": 0.6286, + "step": 8275 + }, + { + "epoch": 1.29, + "grad_norm": 20.435146818243783, + "learning_rate": 1.2690800788267338e-05, + "loss": 0.5762, + "step": 8276 + }, + { + "epoch": 1.29, + "grad_norm": 16.64200408620659, + "learning_rate": 1.2689176549127633e-05, + "loss": 0.7087, + "step": 8277 + }, + { + "epoch": 1.29, + "grad_norm": 16.426749474636896, + "learning_rate": 1.2687552233509322e-05, + "loss": 0.6936, + "step": 8278 + }, + { + "epoch": 1.29, + "grad_norm": 16.95753997236476, + "learning_rate": 1.2685927841458593e-05, + "loss": 0.6355, + "step": 8279 + }, + { + "epoch": 1.29, + "grad_norm": 19.704809227276208, + "learning_rate": 1.2684303373021651e-05, + "loss": 0.6469, + "step": 8280 + }, + { + "epoch": 1.29, + "grad_norm": 16.786845307106773, + "learning_rate": 1.268267882824469e-05, + "loss": 0.6742, + "step": 8281 + }, + { + "epoch": 1.29, + "grad_norm": 21.523714175237412, + "learning_rate": 1.2681054207173915e-05, + "loss": 0.599, + "step": 8282 + }, + { + "epoch": 1.29, + "grad_norm": 19.42877414153336, + "learning_rate": 1.2679429509855521e-05, + "loss": 0.659, + "step": 8283 + }, + { + "epoch": 1.29, + "grad_norm": 12.899460323960357, + "learning_rate": 1.2677804736335726e-05, + "loss": 0.5583, + "step": 8284 + }, + { + "epoch": 1.29, + "grad_norm": 20.53406389686106, + "learning_rate": 1.2676179886660725e-05, + "loss": 0.6189, + "step": 8285 + }, + { + "epoch": 1.29, + "grad_norm": 18.087217950500186, + "learning_rate": 1.2674554960876737e-05, + "loss": 0.6769, + "step": 8286 + }, + { + "epoch": 1.29, + "grad_norm": 24.029417775453116, + "learning_rate": 1.2672929959029969e-05, + "loss": 0.6117, + "step": 8287 + }, + { + "epoch": 1.29, + "grad_norm": 24.828266838393592, + "learning_rate": 1.2671304881166638e-05, + "loss": 0.6575, + "step": 8288 + }, + { + "epoch": 1.29, + "grad_norm": 16.012288101359115, + "learning_rate": 1.2669679727332957e-05, + "loss": 0.5936, + "step": 8289 + }, + { + "epoch": 1.29, + "grad_norm": 20.185367081076276, + "learning_rate": 1.2668054497575147e-05, + "loss": 0.6373, + "step": 8290 + }, + { + "epoch": 1.3, + "grad_norm": 17.471258515197395, + "learning_rate": 1.2666429191939428e-05, + "loss": 0.6036, + "step": 8291 + }, + { + "epoch": 1.3, + "grad_norm": 32.78390001926139, + "learning_rate": 1.2664803810472025e-05, + "loss": 0.672, + "step": 8292 + }, + { + "epoch": 1.3, + "grad_norm": 20.973422872550596, + "learning_rate": 1.2663178353219154e-05, + "loss": 0.6369, + "step": 8293 + }, + { + "epoch": 1.3, + "grad_norm": 11.30912832403255, + "learning_rate": 1.2661552820227053e-05, + "loss": 0.6168, + "step": 8294 + }, + { + "epoch": 1.3, + "grad_norm": 20.21009902999088, + "learning_rate": 1.2659927211541946e-05, + "loss": 0.6063, + "step": 8295 + }, + { + "epoch": 1.3, + "grad_norm": 18.766984977642572, + "learning_rate": 1.2658301527210066e-05, + "loss": 0.6186, + "step": 8296 + }, + { + "epoch": 1.3, + "grad_norm": 14.254903317822391, + "learning_rate": 1.2656675767277647e-05, + "loss": 0.5818, + "step": 8297 + }, + { + "epoch": 1.3, + "grad_norm": 40.45768699840152, + "learning_rate": 1.2655049931790919e-05, + "loss": 0.6823, + "step": 8298 + }, + { + "epoch": 1.3, + "grad_norm": 16.93386568161306, + "learning_rate": 1.2653424020796128e-05, + "loss": 0.5915, + "step": 8299 + }, + { + "epoch": 1.3, + "grad_norm": 27.277270190650405, + "learning_rate": 1.2651798034339506e-05, + "loss": 0.6445, + "step": 8300 + }, + { + "epoch": 1.3, + "grad_norm": 17.89493076950185, + "learning_rate": 1.2650171972467304e-05, + "loss": 0.6528, + "step": 8301 + }, + { + "epoch": 1.3, + "grad_norm": 24.165653551312477, + "learning_rate": 1.2648545835225758e-05, + "loss": 0.6742, + "step": 8302 + }, + { + "epoch": 1.3, + "grad_norm": 15.142076057643568, + "learning_rate": 1.2646919622661119e-05, + "loss": 0.6438, + "step": 8303 + }, + { + "epoch": 1.3, + "grad_norm": 10.194847990832569, + "learning_rate": 1.2645293334819635e-05, + "loss": 0.5588, + "step": 8304 + }, + { + "epoch": 1.3, + "grad_norm": 19.47180693523747, + "learning_rate": 1.2643666971747555e-05, + "loss": 0.6949, + "step": 8305 + }, + { + "epoch": 1.3, + "grad_norm": 19.225884641457455, + "learning_rate": 1.2642040533491133e-05, + "loss": 0.6261, + "step": 8306 + }, + { + "epoch": 1.3, + "grad_norm": 20.36221919133966, + "learning_rate": 1.2640414020096626e-05, + "loss": 0.5965, + "step": 8307 + }, + { + "epoch": 1.3, + "grad_norm": 22.204110906216208, + "learning_rate": 1.2638787431610285e-05, + "loss": 0.6384, + "step": 8308 + }, + { + "epoch": 1.3, + "grad_norm": 16.597296453442517, + "learning_rate": 1.2637160768078376e-05, + "loss": 0.6544, + "step": 8309 + }, + { + "epoch": 1.3, + "grad_norm": 23.160980914446505, + "learning_rate": 1.2635534029547155e-05, + "loss": 0.6486, + "step": 8310 + }, + { + "epoch": 1.3, + "grad_norm": 15.00884000496649, + "learning_rate": 1.263390721606289e-05, + "loss": 0.6161, + "step": 8311 + }, + { + "epoch": 1.3, + "grad_norm": 23.374112095773256, + "learning_rate": 1.2632280327671842e-05, + "loss": 0.6349, + "step": 8312 + }, + { + "epoch": 1.3, + "grad_norm": 19.90759129436889, + "learning_rate": 1.2630653364420285e-05, + "loss": 0.7162, + "step": 8313 + }, + { + "epoch": 1.3, + "grad_norm": 28.444149062689792, + "learning_rate": 1.2629026326354481e-05, + "loss": 0.6268, + "step": 8314 + }, + { + "epoch": 1.3, + "grad_norm": 16.3915209329876, + "learning_rate": 1.262739921352071e-05, + "loss": 0.6736, + "step": 8315 + }, + { + "epoch": 1.3, + "grad_norm": 21.4954904953057, + "learning_rate": 1.2625772025965241e-05, + "loss": 0.6483, + "step": 8316 + }, + { + "epoch": 1.3, + "grad_norm": 17.94987849039004, + "learning_rate": 1.262414476373435e-05, + "loss": 0.644, + "step": 8317 + }, + { + "epoch": 1.3, + "grad_norm": 19.480263768799166, + "learning_rate": 1.2622517426874322e-05, + "loss": 0.6107, + "step": 8318 + }, + { + "epoch": 1.3, + "grad_norm": 13.973122043753486, + "learning_rate": 1.2620890015431427e-05, + "loss": 0.5114, + "step": 8319 + }, + { + "epoch": 1.3, + "grad_norm": 22.78530759285365, + "learning_rate": 1.2619262529451954e-05, + "loss": 0.6009, + "step": 8320 + }, + { + "epoch": 1.3, + "grad_norm": 16.290259431183483, + "learning_rate": 1.2617634968982189e-05, + "loss": 0.6141, + "step": 8321 + }, + { + "epoch": 1.3, + "grad_norm": 10.916252844856091, + "learning_rate": 1.2616007334068417e-05, + "loss": 0.6312, + "step": 8322 + }, + { + "epoch": 1.3, + "grad_norm": 21.10366944305978, + "learning_rate": 1.2614379624756924e-05, + "loss": 0.6477, + "step": 8323 + }, + { + "epoch": 1.3, + "grad_norm": 21.974929096587086, + "learning_rate": 1.2612751841094006e-05, + "loss": 0.6621, + "step": 8324 + }, + { + "epoch": 1.3, + "grad_norm": 44.209934620285786, + "learning_rate": 1.2611123983125951e-05, + "loss": 0.6666, + "step": 8325 + }, + { + "epoch": 1.3, + "grad_norm": 16.474935030700554, + "learning_rate": 1.2609496050899061e-05, + "loss": 0.6136, + "step": 8326 + }, + { + "epoch": 1.3, + "grad_norm": 20.948634831066823, + "learning_rate": 1.2607868044459629e-05, + "loss": 0.5512, + "step": 8327 + }, + { + "epoch": 1.3, + "grad_norm": 18.758153153662324, + "learning_rate": 1.2606239963853957e-05, + "loss": 0.645, + "step": 8328 + }, + { + "epoch": 1.3, + "grad_norm": 21.338863175005827, + "learning_rate": 1.260461180912834e-05, + "loss": 0.666, + "step": 8329 + }, + { + "epoch": 1.3, + "grad_norm": 21.871535718989445, + "learning_rate": 1.260298358032909e-05, + "loss": 0.6818, + "step": 8330 + }, + { + "epoch": 1.3, + "grad_norm": 16.4711589734319, + "learning_rate": 1.260135527750251e-05, + "loss": 0.6062, + "step": 8331 + }, + { + "epoch": 1.3, + "grad_norm": 18.597299414938952, + "learning_rate": 1.2599726900694909e-05, + "loss": 0.6556, + "step": 8332 + }, + { + "epoch": 1.3, + "grad_norm": 22.01644488202975, + "learning_rate": 1.2598098449952594e-05, + "loss": 0.6689, + "step": 8333 + }, + { + "epoch": 1.3, + "grad_norm": 22.478950747518574, + "learning_rate": 1.2596469925321877e-05, + "loss": 0.6645, + "step": 8334 + }, + { + "epoch": 1.3, + "grad_norm": 17.898143156471157, + "learning_rate": 1.2594841326849075e-05, + "loss": 0.671, + "step": 8335 + }, + { + "epoch": 1.3, + "grad_norm": 14.316397881603008, + "learning_rate": 1.2593212654580507e-05, + "loss": 0.7539, + "step": 8336 + }, + { + "epoch": 1.3, + "grad_norm": 12.919082616649789, + "learning_rate": 1.2591583908562483e-05, + "loss": 0.5255, + "step": 8337 + }, + { + "epoch": 1.3, + "grad_norm": 43.385797618939, + "learning_rate": 1.2589955088841332e-05, + "loss": 0.5732, + "step": 8338 + }, + { + "epoch": 1.3, + "grad_norm": 24.524400495887633, + "learning_rate": 1.2588326195463373e-05, + "loss": 0.6069, + "step": 8339 + }, + { + "epoch": 1.3, + "grad_norm": 17.70334773584294, + "learning_rate": 1.2586697228474931e-05, + "loss": 0.6646, + "step": 8340 + }, + { + "epoch": 1.3, + "grad_norm": 19.06490269003772, + "learning_rate": 1.2585068187922333e-05, + "loss": 0.6353, + "step": 8341 + }, + { + "epoch": 1.3, + "grad_norm": 21.61264025552755, + "learning_rate": 1.2583439073851907e-05, + "loss": 0.6275, + "step": 8342 + }, + { + "epoch": 1.3, + "grad_norm": 15.231405004863914, + "learning_rate": 1.2581809886309989e-05, + "loss": 0.5737, + "step": 8343 + }, + { + "epoch": 1.3, + "grad_norm": 17.215214434693078, + "learning_rate": 1.25801806253429e-05, + "loss": 0.5884, + "step": 8344 + }, + { + "epoch": 1.3, + "grad_norm": 19.457123827986855, + "learning_rate": 1.2578551290996993e-05, + "loss": 0.6552, + "step": 8345 + }, + { + "epoch": 1.3, + "grad_norm": 18.390290753966006, + "learning_rate": 1.2576921883318589e-05, + "loss": 0.6073, + "step": 8346 + }, + { + "epoch": 1.3, + "grad_norm": 13.580823521516413, + "learning_rate": 1.2575292402354036e-05, + "loss": 0.6572, + "step": 8347 + }, + { + "epoch": 1.3, + "grad_norm": 19.342939219638488, + "learning_rate": 1.2573662848149674e-05, + "loss": 0.6532, + "step": 8348 + }, + { + "epoch": 1.3, + "grad_norm": 11.704701681590626, + "learning_rate": 1.2572033220751844e-05, + "loss": 0.6629, + "step": 8349 + }, + { + "epoch": 1.3, + "grad_norm": 18.744592104506047, + "learning_rate": 1.2570403520206898e-05, + "loss": 0.6081, + "step": 8350 + }, + { + "epoch": 1.3, + "grad_norm": 21.026847720474173, + "learning_rate": 1.2568773746561179e-05, + "loss": 0.5171, + "step": 8351 + }, + { + "epoch": 1.3, + "grad_norm": 15.078173749479243, + "learning_rate": 1.2567143899861031e-05, + "loss": 0.6911, + "step": 8352 + }, + { + "epoch": 1.3, + "grad_norm": 15.63746392616011, + "learning_rate": 1.2565513980152822e-05, + "loss": 0.6007, + "step": 8353 + }, + { + "epoch": 1.3, + "grad_norm": 17.722548822687365, + "learning_rate": 1.2563883987482887e-05, + "loss": 0.5726, + "step": 8354 + }, + { + "epoch": 1.31, + "grad_norm": 18.49407482065009, + "learning_rate": 1.2562253921897598e-05, + "loss": 0.6477, + "step": 8355 + }, + { + "epoch": 1.31, + "grad_norm": 14.053773822740483, + "learning_rate": 1.2560623783443302e-05, + "loss": 0.6122, + "step": 8356 + }, + { + "epoch": 1.31, + "grad_norm": 22.992982872981976, + "learning_rate": 1.2558993572166365e-05, + "loss": 0.6202, + "step": 8357 + }, + { + "epoch": 1.31, + "grad_norm": 21.554575835820508, + "learning_rate": 1.255736328811315e-05, + "loss": 0.6036, + "step": 8358 + }, + { + "epoch": 1.31, + "grad_norm": 28.007069121355418, + "learning_rate": 1.2555732931330015e-05, + "loss": 0.6098, + "step": 8359 + }, + { + "epoch": 1.31, + "grad_norm": 16.900518265884386, + "learning_rate": 1.2554102501863333e-05, + "loss": 0.5865, + "step": 8360 + }, + { + "epoch": 1.31, + "grad_norm": 17.19558522659384, + "learning_rate": 1.2552471999759469e-05, + "loss": 0.5392, + "step": 8361 + }, + { + "epoch": 1.31, + "grad_norm": 16.110822537651774, + "learning_rate": 1.2550841425064795e-05, + "loss": 0.5385, + "step": 8362 + }, + { + "epoch": 1.31, + "grad_norm": 13.938318039318656, + "learning_rate": 1.254921077782568e-05, + "loss": 0.5992, + "step": 8363 + }, + { + "epoch": 1.31, + "grad_norm": 22.815408864791664, + "learning_rate": 1.2547580058088507e-05, + "loss": 0.7195, + "step": 8364 + }, + { + "epoch": 1.31, + "grad_norm": 21.682037749183138, + "learning_rate": 1.2545949265899645e-05, + "loss": 0.6883, + "step": 8365 + }, + { + "epoch": 1.31, + "grad_norm": 21.833900433071957, + "learning_rate": 1.2544318401305476e-05, + "loss": 0.7107, + "step": 8366 + }, + { + "epoch": 1.31, + "grad_norm": 17.506565240216386, + "learning_rate": 1.2542687464352378e-05, + "loss": 0.6388, + "step": 8367 + }, + { + "epoch": 1.31, + "grad_norm": 19.664891985924683, + "learning_rate": 1.2541056455086737e-05, + "loss": 0.6404, + "step": 8368 + }, + { + "epoch": 1.31, + "grad_norm": 22.537311302131393, + "learning_rate": 1.2539425373554937e-05, + "loss": 0.6769, + "step": 8369 + }, + { + "epoch": 1.31, + "grad_norm": 18.35579949645393, + "learning_rate": 1.2537794219803368e-05, + "loss": 0.6158, + "step": 8370 + }, + { + "epoch": 1.31, + "grad_norm": 20.55089074986457, + "learning_rate": 1.2536162993878409e-05, + "loss": 0.5631, + "step": 8371 + }, + { + "epoch": 1.31, + "grad_norm": 17.08641090051207, + "learning_rate": 1.2534531695826466e-05, + "loss": 0.66, + "step": 8372 + }, + { + "epoch": 1.31, + "grad_norm": 18.393151781789907, + "learning_rate": 1.2532900325693917e-05, + "loss": 0.6303, + "step": 8373 + }, + { + "epoch": 1.31, + "grad_norm": 24.21301103091029, + "learning_rate": 1.2531268883527168e-05, + "loss": 0.7126, + "step": 8374 + }, + { + "epoch": 1.31, + "grad_norm": 18.535537246309605, + "learning_rate": 1.2529637369372614e-05, + "loss": 0.6424, + "step": 8375 + }, + { + "epoch": 1.31, + "grad_norm": 17.814339562844467, + "learning_rate": 1.2528005783276652e-05, + "loss": 0.5745, + "step": 8376 + }, + { + "epoch": 1.31, + "grad_norm": 13.077784393687985, + "learning_rate": 1.2526374125285683e-05, + "loss": 0.5842, + "step": 8377 + }, + { + "epoch": 1.31, + "grad_norm": 17.764549310036568, + "learning_rate": 1.2524742395446112e-05, + "loss": 0.6442, + "step": 8378 + }, + { + "epoch": 1.31, + "grad_norm": 24.32706219404166, + "learning_rate": 1.2523110593804344e-05, + "loss": 0.6665, + "step": 8379 + }, + { + "epoch": 1.31, + "grad_norm": 17.587623230692884, + "learning_rate": 1.252147872040679e-05, + "loss": 0.6504, + "step": 8380 + }, + { + "epoch": 1.31, + "grad_norm": 23.64738030869867, + "learning_rate": 1.251984677529985e-05, + "loss": 0.6724, + "step": 8381 + }, + { + "epoch": 1.31, + "grad_norm": 16.66866432322756, + "learning_rate": 1.2518214758529947e-05, + "loss": 0.6555, + "step": 8382 + }, + { + "epoch": 1.31, + "grad_norm": 19.135922908489277, + "learning_rate": 1.251658267014349e-05, + "loss": 0.5536, + "step": 8383 + }, + { + "epoch": 1.31, + "grad_norm": 22.683570287004873, + "learning_rate": 1.2514950510186892e-05, + "loss": 0.5341, + "step": 8384 + }, + { + "epoch": 1.31, + "grad_norm": 19.497442436918337, + "learning_rate": 1.2513318278706573e-05, + "loss": 0.6684, + "step": 8385 + }, + { + "epoch": 1.31, + "grad_norm": 17.794121792194833, + "learning_rate": 1.2511685975748948e-05, + "loss": 0.6374, + "step": 8386 + }, + { + "epoch": 1.31, + "grad_norm": 25.733633631743633, + "learning_rate": 1.251005360136045e-05, + "loss": 0.7394, + "step": 8387 + }, + { + "epoch": 1.31, + "grad_norm": 46.83819900012865, + "learning_rate": 1.2508421155587492e-05, + "loss": 0.635, + "step": 8388 + }, + { + "epoch": 1.31, + "grad_norm": 17.8407095466611, + "learning_rate": 1.2506788638476506e-05, + "loss": 0.5833, + "step": 8389 + }, + { + "epoch": 1.31, + "grad_norm": 21.778555733521987, + "learning_rate": 1.2505156050073914e-05, + "loss": 0.5958, + "step": 8390 + }, + { + "epoch": 1.31, + "grad_norm": 27.671684704432664, + "learning_rate": 1.2503523390426153e-05, + "loss": 0.7228, + "step": 8391 + }, + { + "epoch": 1.31, + "grad_norm": 18.50419597507629, + "learning_rate": 1.2501890659579649e-05, + "loss": 0.6231, + "step": 8392 + }, + { + "epoch": 1.31, + "grad_norm": 28.585759525417487, + "learning_rate": 1.250025785758084e-05, + "loss": 0.6347, + "step": 8393 + }, + { + "epoch": 1.31, + "grad_norm": 17.109868363012563, + "learning_rate": 1.249862498447616e-05, + "loss": 0.5274, + "step": 8394 + }, + { + "epoch": 1.31, + "grad_norm": 20.593890241020958, + "learning_rate": 1.2496992040312045e-05, + "loss": 0.655, + "step": 8395 + }, + { + "epoch": 1.31, + "grad_norm": 16.28844610735789, + "learning_rate": 1.2495359025134939e-05, + "loss": 0.5972, + "step": 8396 + }, + { + "epoch": 1.31, + "grad_norm": 15.615189672600541, + "learning_rate": 1.2493725938991283e-05, + "loss": 0.5638, + "step": 8397 + }, + { + "epoch": 1.31, + "grad_norm": 19.034943350656782, + "learning_rate": 1.2492092781927517e-05, + "loss": 0.6228, + "step": 8398 + }, + { + "epoch": 1.31, + "grad_norm": 16.604180662260138, + "learning_rate": 1.2490459553990095e-05, + "loss": 0.6158, + "step": 8399 + }, + { + "epoch": 1.31, + "grad_norm": 25.844669714295254, + "learning_rate": 1.2488826255225455e-05, + "loss": 0.6275, + "step": 8400 + }, + { + "epoch": 1.31, + "grad_norm": 15.912448924384213, + "learning_rate": 1.2487192885680053e-05, + "loss": 0.6135, + "step": 8401 + }, + { + "epoch": 1.31, + "grad_norm": 20.16203081307279, + "learning_rate": 1.2485559445400343e-05, + "loss": 0.6634, + "step": 8402 + }, + { + "epoch": 1.31, + "grad_norm": 14.671311987717258, + "learning_rate": 1.2483925934432774e-05, + "loss": 0.6482, + "step": 8403 + }, + { + "epoch": 1.31, + "grad_norm": 25.97376114848387, + "learning_rate": 1.2482292352823806e-05, + "loss": 0.6776, + "step": 8404 + }, + { + "epoch": 1.31, + "grad_norm": 25.836135582904237, + "learning_rate": 1.2480658700619891e-05, + "loss": 0.6545, + "step": 8405 + }, + { + "epoch": 1.31, + "grad_norm": 17.747727832968042, + "learning_rate": 1.24790249778675e-05, + "loss": 0.6735, + "step": 8406 + }, + { + "epoch": 1.31, + "grad_norm": 23.52613928250092, + "learning_rate": 1.2477391184613086e-05, + "loss": 0.6163, + "step": 8407 + }, + { + "epoch": 1.31, + "grad_norm": 15.685962541820313, + "learning_rate": 1.2475757320903117e-05, + "loss": 0.7279, + "step": 8408 + }, + { + "epoch": 1.31, + "grad_norm": 22.845276350411908, + "learning_rate": 1.2474123386784059e-05, + "loss": 0.6353, + "step": 8409 + }, + { + "epoch": 1.31, + "grad_norm": 16.96359114795371, + "learning_rate": 1.2472489382302377e-05, + "loss": 0.648, + "step": 8410 + }, + { + "epoch": 1.31, + "grad_norm": 19.820878426393136, + "learning_rate": 1.2470855307504544e-05, + "loss": 0.6637, + "step": 8411 + }, + { + "epoch": 1.31, + "grad_norm": 14.835597795712292, + "learning_rate": 1.2469221162437033e-05, + "loss": 0.6047, + "step": 8412 + }, + { + "epoch": 1.31, + "grad_norm": 14.10453886893205, + "learning_rate": 1.2467586947146313e-05, + "loss": 0.5672, + "step": 8413 + }, + { + "epoch": 1.31, + "grad_norm": 20.020047285174645, + "learning_rate": 1.2465952661678866e-05, + "loss": 0.6774, + "step": 8414 + }, + { + "epoch": 1.31, + "grad_norm": 23.203443262377164, + "learning_rate": 1.2464318306081167e-05, + "loss": 0.6614, + "step": 8415 + }, + { + "epoch": 1.31, + "grad_norm": 25.966718314210883, + "learning_rate": 1.24626838803997e-05, + "loss": 0.626, + "step": 8416 + }, + { + "epoch": 1.31, + "grad_norm": 20.429948645357744, + "learning_rate": 1.2461049384680938e-05, + "loss": 0.6385, + "step": 8417 + }, + { + "epoch": 1.31, + "grad_norm": 17.410800452892463, + "learning_rate": 1.2459414818971376e-05, + "loss": 0.7324, + "step": 8418 + }, + { + "epoch": 1.32, + "grad_norm": 18.28931554132851, + "learning_rate": 1.2457780183317496e-05, + "loss": 0.6756, + "step": 8419 + }, + { + "epoch": 1.32, + "grad_norm": 17.80160122486069, + "learning_rate": 1.2456145477765782e-05, + "loss": 0.6449, + "step": 8420 + }, + { + "epoch": 1.32, + "grad_norm": 26.596933002921876, + "learning_rate": 1.245451070236273e-05, + "loss": 0.7206, + "step": 8421 + }, + { + "epoch": 1.32, + "grad_norm": 14.168266110261689, + "learning_rate": 1.2452875857154827e-05, + "loss": 0.6589, + "step": 8422 + }, + { + "epoch": 1.32, + "grad_norm": 19.552074673880554, + "learning_rate": 1.2451240942188569e-05, + "loss": 0.7514, + "step": 8423 + }, + { + "epoch": 1.32, + "grad_norm": 15.089566779995492, + "learning_rate": 1.2449605957510456e-05, + "loss": 0.5992, + "step": 8424 + }, + { + "epoch": 1.32, + "grad_norm": 18.558456475792575, + "learning_rate": 1.244797090316698e-05, + "loss": 0.7114, + "step": 8425 + }, + { + "epoch": 1.32, + "grad_norm": 21.750146685851046, + "learning_rate": 1.2446335779204647e-05, + "loss": 0.7159, + "step": 8426 + }, + { + "epoch": 1.32, + "grad_norm": 15.94879042559237, + "learning_rate": 1.2444700585669952e-05, + "loss": 0.5895, + "step": 8427 + }, + { + "epoch": 1.32, + "grad_norm": 19.606569309814386, + "learning_rate": 1.2443065322609402e-05, + "loss": 0.6628, + "step": 8428 + }, + { + "epoch": 1.32, + "grad_norm": 16.28941968393588, + "learning_rate": 1.2441429990069507e-05, + "loss": 0.6623, + "step": 8429 + }, + { + "epoch": 1.32, + "grad_norm": 19.82479284197508, + "learning_rate": 1.2439794588096767e-05, + "loss": 0.6832, + "step": 8430 + }, + { + "epoch": 1.32, + "grad_norm": 12.99441871821392, + "learning_rate": 1.24381591167377e-05, + "loss": 0.6229, + "step": 8431 + }, + { + "epoch": 1.32, + "grad_norm": 17.94365852748525, + "learning_rate": 1.243652357603881e-05, + "loss": 0.681, + "step": 8432 + }, + { + "epoch": 1.32, + "grad_norm": 31.147005642907533, + "learning_rate": 1.243488796604662e-05, + "loss": 0.6554, + "step": 8433 + }, + { + "epoch": 1.32, + "grad_norm": 28.43136384596959, + "learning_rate": 1.2433252286807635e-05, + "loss": 0.6552, + "step": 8434 + }, + { + "epoch": 1.32, + "grad_norm": 21.172208024675033, + "learning_rate": 1.2431616538368383e-05, + "loss": 0.6674, + "step": 8435 + }, + { + "epoch": 1.32, + "grad_norm": 13.567540897048898, + "learning_rate": 1.2429980720775376e-05, + "loss": 0.6981, + "step": 8436 + }, + { + "epoch": 1.32, + "grad_norm": 16.623454990605445, + "learning_rate": 1.2428344834075142e-05, + "loss": 0.6922, + "step": 8437 + }, + { + "epoch": 1.32, + "grad_norm": 21.0994253181439, + "learning_rate": 1.2426708878314197e-05, + "loss": 0.6782, + "step": 8438 + }, + { + "epoch": 1.32, + "grad_norm": 19.673935467175266, + "learning_rate": 1.2425072853539076e-05, + "loss": 0.6562, + "step": 8439 + }, + { + "epoch": 1.32, + "grad_norm": 21.8969765086303, + "learning_rate": 1.2423436759796296e-05, + "loss": 0.6404, + "step": 8440 + }, + { + "epoch": 1.32, + "grad_norm": 19.58288717159023, + "learning_rate": 1.2421800597132399e-05, + "loss": 0.6153, + "step": 8441 + }, + { + "epoch": 1.32, + "grad_norm": 19.26700704340193, + "learning_rate": 1.2420164365593903e-05, + "loss": 0.5948, + "step": 8442 + }, + { + "epoch": 1.32, + "grad_norm": 20.903703649971675, + "learning_rate": 1.2418528065227354e-05, + "loss": 0.5366, + "step": 8443 + }, + { + "epoch": 1.32, + "grad_norm": 19.758806024914684, + "learning_rate": 1.2416891696079276e-05, + "loss": 0.6088, + "step": 8444 + }, + { + "epoch": 1.32, + "grad_norm": 19.369139554869104, + "learning_rate": 1.2415255258196215e-05, + "loss": 0.6122, + "step": 8445 + }, + { + "epoch": 1.32, + "grad_norm": 24.64057546391706, + "learning_rate": 1.2413618751624708e-05, + "loss": 0.6863, + "step": 8446 + }, + { + "epoch": 1.32, + "grad_norm": 22.249161263125092, + "learning_rate": 1.2411982176411294e-05, + "loss": 0.6737, + "step": 8447 + }, + { + "epoch": 1.32, + "grad_norm": 19.616167868424817, + "learning_rate": 1.2410345532602518e-05, + "loss": 0.679, + "step": 8448 + }, + { + "epoch": 1.32, + "grad_norm": 17.542266887724022, + "learning_rate": 1.2408708820244926e-05, + "loss": 0.6725, + "step": 8449 + }, + { + "epoch": 1.32, + "grad_norm": 21.2780886429179, + "learning_rate": 1.2407072039385064e-05, + "loss": 0.6734, + "step": 8450 + }, + { + "epoch": 1.32, + "grad_norm": 23.760429935599344, + "learning_rate": 1.2405435190069481e-05, + "loss": 0.6293, + "step": 8451 + }, + { + "epoch": 1.32, + "grad_norm": 22.352998041416104, + "learning_rate": 1.2403798272344729e-05, + "loss": 0.7218, + "step": 8452 + }, + { + "epoch": 1.32, + "grad_norm": 19.34735796232701, + "learning_rate": 1.240216128625736e-05, + "loss": 0.6762, + "step": 8453 + }, + { + "epoch": 1.32, + "grad_norm": 22.848365222889896, + "learning_rate": 1.2400524231853929e-05, + "loss": 0.6442, + "step": 8454 + }, + { + "epoch": 1.32, + "grad_norm": 17.527076288966455, + "learning_rate": 1.2398887109180992e-05, + "loss": 0.651, + "step": 8455 + }, + { + "epoch": 1.32, + "grad_norm": 19.76830119459625, + "learning_rate": 1.2397249918285113e-05, + "loss": 0.5973, + "step": 8456 + }, + { + "epoch": 1.32, + "grad_norm": 17.009791511833946, + "learning_rate": 1.2395612659212844e-05, + "loss": 0.6102, + "step": 8457 + }, + { + "epoch": 1.32, + "grad_norm": 31.919847215104042, + "learning_rate": 1.2393975332010757e-05, + "loss": 0.6027, + "step": 8458 + }, + { + "epoch": 1.32, + "grad_norm": 21.250803343229645, + "learning_rate": 1.2392337936725408e-05, + "loss": 0.6422, + "step": 8459 + }, + { + "epoch": 1.32, + "grad_norm": 9.980697480658229, + "learning_rate": 1.239070047340337e-05, + "loss": 0.5356, + "step": 8460 + }, + { + "epoch": 1.32, + "grad_norm": 36.12678984436896, + "learning_rate": 1.2389062942091209e-05, + "loss": 0.6401, + "step": 8461 + }, + { + "epoch": 1.32, + "grad_norm": 17.517682775584007, + "learning_rate": 1.2387425342835492e-05, + "loss": 0.6006, + "step": 8462 + }, + { + "epoch": 1.32, + "grad_norm": 17.33834159567959, + "learning_rate": 1.2385787675682799e-05, + "loss": 0.61, + "step": 8463 + }, + { + "epoch": 1.32, + "grad_norm": 22.59121940021831, + "learning_rate": 1.2384149940679697e-05, + "loss": 0.7406, + "step": 8464 + }, + { + "epoch": 1.32, + "grad_norm": 12.764711292442753, + "learning_rate": 1.2382512137872769e-05, + "loss": 0.6407, + "step": 8465 + }, + { + "epoch": 1.32, + "grad_norm": 19.010763261304934, + "learning_rate": 1.2380874267308586e-05, + "loss": 0.656, + "step": 8466 + }, + { + "epoch": 1.32, + "grad_norm": 15.710717990713249, + "learning_rate": 1.237923632903373e-05, + "loss": 0.6055, + "step": 8467 + }, + { + "epoch": 1.32, + "grad_norm": 34.495774283715484, + "learning_rate": 1.2377598323094788e-05, + "loss": 0.6495, + "step": 8468 + }, + { + "epoch": 1.32, + "grad_norm": 16.69431043267568, + "learning_rate": 1.2375960249538341e-05, + "loss": 0.6576, + "step": 8469 + }, + { + "epoch": 1.32, + "grad_norm": 17.511810830278588, + "learning_rate": 1.2374322108410974e-05, + "loss": 0.6334, + "step": 8470 + }, + { + "epoch": 1.32, + "grad_norm": 22.853325801537327, + "learning_rate": 1.2372683899759274e-05, + "loss": 0.6293, + "step": 8471 + }, + { + "epoch": 1.32, + "grad_norm": 16.866740655037848, + "learning_rate": 1.2371045623629834e-05, + "loss": 0.5808, + "step": 8472 + }, + { + "epoch": 1.32, + "grad_norm": 22.115381657602597, + "learning_rate": 1.2369407280069241e-05, + "loss": 0.6865, + "step": 8473 + }, + { + "epoch": 1.32, + "grad_norm": 20.801582747227275, + "learning_rate": 1.2367768869124091e-05, + "loss": 0.6638, + "step": 8474 + }, + { + "epoch": 1.32, + "grad_norm": 18.664340221147935, + "learning_rate": 1.2366130390840982e-05, + "loss": 0.6152, + "step": 8475 + }, + { + "epoch": 1.32, + "grad_norm": 21.415242830972065, + "learning_rate": 1.2364491845266506e-05, + "loss": 0.5968, + "step": 8476 + }, + { + "epoch": 1.32, + "grad_norm": 19.067682735023382, + "learning_rate": 1.2362853232447267e-05, + "loss": 0.5581, + "step": 8477 + }, + { + "epoch": 1.32, + "grad_norm": 20.29645373881489, + "learning_rate": 1.2361214552429863e-05, + "loss": 0.6734, + "step": 8478 + }, + { + "epoch": 1.32, + "grad_norm": 20.764378467558146, + "learning_rate": 1.23595758052609e-05, + "loss": 0.6377, + "step": 8479 + }, + { + "epoch": 1.32, + "grad_norm": 26.29545478929641, + "learning_rate": 1.235793699098698e-05, + "loss": 0.5976, + "step": 8480 + }, + { + "epoch": 1.32, + "grad_norm": 20.952746884278923, + "learning_rate": 1.2356298109654712e-05, + "loss": 0.6185, + "step": 8481 + }, + { + "epoch": 1.32, + "grad_norm": 19.902903533041194, + "learning_rate": 1.2354659161310704e-05, + "loss": 0.6704, + "step": 8482 + }, + { + "epoch": 1.33, + "grad_norm": 19.132129194364175, + "learning_rate": 1.2353020146001568e-05, + "loss": 0.648, + "step": 8483 + }, + { + "epoch": 1.33, + "grad_norm": 18.529667552345707, + "learning_rate": 1.2351381063773913e-05, + "loss": 0.6833, + "step": 8484 + }, + { + "epoch": 1.33, + "grad_norm": 18.40850758105997, + "learning_rate": 1.2349741914674361e-05, + "loss": 0.6454, + "step": 8485 + }, + { + "epoch": 1.33, + "grad_norm": 19.17867539293414, + "learning_rate": 1.2348102698749518e-05, + "loss": 0.6472, + "step": 8486 + }, + { + "epoch": 1.33, + "grad_norm": 21.911434192083497, + "learning_rate": 1.234646341604601e-05, + "loss": 0.6109, + "step": 8487 + }, + { + "epoch": 1.33, + "grad_norm": 23.287101348233122, + "learning_rate": 1.2344824066610454e-05, + "loss": 0.6755, + "step": 8488 + }, + { + "epoch": 1.33, + "grad_norm": 18.26716566563346, + "learning_rate": 1.2343184650489476e-05, + "loss": 0.6091, + "step": 8489 + }, + { + "epoch": 1.33, + "grad_norm": 13.40588813351155, + "learning_rate": 1.2341545167729693e-05, + "loss": 0.5919, + "step": 8490 + }, + { + "epoch": 1.33, + "grad_norm": 20.77361262600216, + "learning_rate": 1.2339905618377739e-05, + "loss": 0.6741, + "step": 8491 + }, + { + "epoch": 1.33, + "grad_norm": 21.582979000219552, + "learning_rate": 1.2338266002480237e-05, + "loss": 0.5845, + "step": 8492 + }, + { + "epoch": 1.33, + "grad_norm": 18.768318299826905, + "learning_rate": 1.2336626320083816e-05, + "loss": 0.6034, + "step": 8493 + }, + { + "epoch": 1.33, + "grad_norm": 15.772503873865713, + "learning_rate": 1.233498657123511e-05, + "loss": 0.6968, + "step": 8494 + }, + { + "epoch": 1.33, + "grad_norm": 25.65378679074082, + "learning_rate": 1.2333346755980753e-05, + "loss": 0.7412, + "step": 8495 + }, + { + "epoch": 1.33, + "grad_norm": 15.7526211061879, + "learning_rate": 1.233170687436738e-05, + "loss": 0.6636, + "step": 8496 + }, + { + "epoch": 1.33, + "grad_norm": 24.085844378766186, + "learning_rate": 1.2330066926441626e-05, + "loss": 0.6107, + "step": 8497 + }, + { + "epoch": 1.33, + "grad_norm": 20.880085868092525, + "learning_rate": 1.232842691225013e-05, + "loss": 0.6404, + "step": 8498 + }, + { + "epoch": 1.33, + "grad_norm": 13.821906419986114, + "learning_rate": 1.2326786831839536e-05, + "loss": 0.5547, + "step": 8499 + }, + { + "epoch": 1.33, + "grad_norm": 11.788811926836328, + "learning_rate": 1.2325146685256489e-05, + "loss": 0.6134, + "step": 8500 + }, + { + "epoch": 1.33, + "grad_norm": 16.759688831202755, + "learning_rate": 1.2323506472547626e-05, + "loss": 0.6144, + "step": 8501 + }, + { + "epoch": 1.33, + "grad_norm": 24.276056399474264, + "learning_rate": 1.2321866193759602e-05, + "loss": 0.6455, + "step": 8502 + }, + { + "epoch": 1.33, + "grad_norm": 13.869654232969632, + "learning_rate": 1.2320225848939059e-05, + "loss": 0.6269, + "step": 8503 + }, + { + "epoch": 1.33, + "grad_norm": 22.588172582308815, + "learning_rate": 1.2318585438132654e-05, + "loss": 0.6206, + "step": 8504 + }, + { + "epoch": 1.33, + "grad_norm": 23.0132422032763, + "learning_rate": 1.2316944961387028e-05, + "loss": 0.5531, + "step": 8505 + }, + { + "epoch": 1.33, + "grad_norm": 17.95920531327305, + "learning_rate": 1.2315304418748848e-05, + "loss": 0.6361, + "step": 8506 + }, + { + "epoch": 1.33, + "grad_norm": 21.701476311867843, + "learning_rate": 1.2313663810264762e-05, + "loss": 0.6137, + "step": 8507 + }, + { + "epoch": 1.33, + "grad_norm": 18.3641054747069, + "learning_rate": 1.2312023135981434e-05, + "loss": 0.6001, + "step": 8508 + }, + { + "epoch": 1.33, + "grad_norm": 16.87934186055937, + "learning_rate": 1.231038239594552e-05, + "loss": 0.616, + "step": 8509 + }, + { + "epoch": 1.33, + "grad_norm": 31.62906589022556, + "learning_rate": 1.230874159020368e-05, + "loss": 0.6292, + "step": 8510 + }, + { + "epoch": 1.33, + "grad_norm": 17.017662707481655, + "learning_rate": 1.2307100718802579e-05, + "loss": 0.5669, + "step": 8511 + }, + { + "epoch": 1.33, + "grad_norm": 15.850245509893645, + "learning_rate": 1.2305459781788885e-05, + "loss": 0.6347, + "step": 8512 + }, + { + "epoch": 1.33, + "grad_norm": 24.579043334833766, + "learning_rate": 1.2303818779209264e-05, + "loss": 0.6614, + "step": 8513 + }, + { + "epoch": 1.33, + "grad_norm": 18.089340029140562, + "learning_rate": 1.2302177711110384e-05, + "loss": 0.6952, + "step": 8514 + }, + { + "epoch": 1.33, + "grad_norm": 21.053311086757766, + "learning_rate": 1.2300536577538917e-05, + "loss": 0.6103, + "step": 8515 + }, + { + "epoch": 1.33, + "grad_norm": 19.962015982455814, + "learning_rate": 1.2298895378541536e-05, + "loss": 0.5827, + "step": 8516 + }, + { + "epoch": 1.33, + "grad_norm": 15.934177167292416, + "learning_rate": 1.2297254114164914e-05, + "loss": 0.6862, + "step": 8517 + }, + { + "epoch": 1.33, + "grad_norm": 22.84563689781987, + "learning_rate": 1.2295612784455728e-05, + "loss": 0.6806, + "step": 8518 + }, + { + "epoch": 1.33, + "grad_norm": 17.973864316809564, + "learning_rate": 1.2293971389460659e-05, + "loss": 0.6555, + "step": 8519 + }, + { + "epoch": 1.33, + "grad_norm": 19.418891846811245, + "learning_rate": 1.2292329929226385e-05, + "loss": 0.6581, + "step": 8520 + }, + { + "epoch": 1.33, + "grad_norm": 18.18932821156568, + "learning_rate": 1.229068840379959e-05, + "loss": 0.606, + "step": 8521 + }, + { + "epoch": 1.33, + "grad_norm": 13.743693873701552, + "learning_rate": 1.2289046813226954e-05, + "loss": 0.6379, + "step": 8522 + }, + { + "epoch": 1.33, + "grad_norm": 18.77122972883461, + "learning_rate": 1.2287405157555168e-05, + "loss": 0.6134, + "step": 8523 + }, + { + "epoch": 1.33, + "grad_norm": 23.03307996425988, + "learning_rate": 1.2285763436830917e-05, + "loss": 0.675, + "step": 8524 + }, + { + "epoch": 1.33, + "grad_norm": 13.659364245108755, + "learning_rate": 1.2284121651100891e-05, + "loss": 0.5764, + "step": 8525 + }, + { + "epoch": 1.33, + "grad_norm": 26.19314833728055, + "learning_rate": 1.228247980041178e-05, + "loss": 0.6735, + "step": 8526 + }, + { + "epoch": 1.33, + "grad_norm": 12.713701830037026, + "learning_rate": 1.2280837884810282e-05, + "loss": 0.591, + "step": 8527 + }, + { + "epoch": 1.33, + "grad_norm": 19.15943430902438, + "learning_rate": 1.2279195904343084e-05, + "loss": 0.6002, + "step": 8528 + }, + { + "epoch": 1.33, + "grad_norm": 16.048214151727812, + "learning_rate": 1.2277553859056894e-05, + "loss": 0.6848, + "step": 8529 + }, + { + "epoch": 1.33, + "grad_norm": 13.690646851684782, + "learning_rate": 1.22759117489984e-05, + "loss": 0.5433, + "step": 8530 + }, + { + "epoch": 1.33, + "grad_norm": 24.987558771553786, + "learning_rate": 1.227426957421431e-05, + "loss": 0.5566, + "step": 8531 + }, + { + "epoch": 1.33, + "grad_norm": 18.66383854167598, + "learning_rate": 1.227262733475132e-05, + "loss": 0.6899, + "step": 8532 + }, + { + "epoch": 1.33, + "grad_norm": 11.022869830178324, + "learning_rate": 1.2270985030656139e-05, + "loss": 0.5529, + "step": 8533 + }, + { + "epoch": 1.33, + "grad_norm": 19.35945595900608, + "learning_rate": 1.2269342661975474e-05, + "loss": 0.6456, + "step": 8534 + }, + { + "epoch": 1.33, + "grad_norm": 16.18422387360619, + "learning_rate": 1.2267700228756033e-05, + "loss": 0.6298, + "step": 8535 + }, + { + "epoch": 1.33, + "grad_norm": 14.660415692858141, + "learning_rate": 1.2266057731044521e-05, + "loss": 0.6397, + "step": 8536 + }, + { + "epoch": 1.33, + "grad_norm": 15.309528642527022, + "learning_rate": 1.2264415168887651e-05, + "loss": 0.5893, + "step": 8537 + }, + { + "epoch": 1.33, + "grad_norm": 21.20277945083012, + "learning_rate": 1.226277254233214e-05, + "loss": 0.6605, + "step": 8538 + }, + { + "epoch": 1.33, + "grad_norm": 13.896931020350566, + "learning_rate": 1.2261129851424703e-05, + "loss": 0.5987, + "step": 8539 + }, + { + "epoch": 1.33, + "grad_norm": 27.659557426552695, + "learning_rate": 1.2259487096212055e-05, + "loss": 0.66, + "step": 8540 + }, + { + "epoch": 1.33, + "grad_norm": 17.572156330212678, + "learning_rate": 1.2257844276740916e-05, + "loss": 0.6837, + "step": 8541 + }, + { + "epoch": 1.33, + "grad_norm": 27.48955546162416, + "learning_rate": 1.2256201393058006e-05, + "loss": 0.6782, + "step": 8542 + }, + { + "epoch": 1.33, + "grad_norm": 12.208081511215612, + "learning_rate": 1.2254558445210048e-05, + "loss": 0.5595, + "step": 8543 + }, + { + "epoch": 1.33, + "grad_norm": 15.310728373998613, + "learning_rate": 1.2252915433243768e-05, + "loss": 0.5796, + "step": 8544 + }, + { + "epoch": 1.33, + "grad_norm": 18.754076669892413, + "learning_rate": 1.225127235720589e-05, + "loss": 0.6264, + "step": 8545 + }, + { + "epoch": 1.33, + "grad_norm": 14.674181956880906, + "learning_rate": 1.2249629217143143e-05, + "loss": 0.5549, + "step": 8546 + }, + { + "epoch": 1.34, + "grad_norm": 12.92216467619158, + "learning_rate": 1.2247986013102258e-05, + "loss": 0.6989, + "step": 8547 + }, + { + "epoch": 1.34, + "grad_norm": 18.064078355667892, + "learning_rate": 1.2246342745129964e-05, + "loss": 0.6911, + "step": 8548 + }, + { + "epoch": 1.34, + "grad_norm": 19.170920627284424, + "learning_rate": 1.2244699413272998e-05, + "loss": 0.6604, + "step": 8549 + }, + { + "epoch": 1.34, + "grad_norm": 15.94594602994399, + "learning_rate": 1.2243056017578095e-05, + "loss": 0.6326, + "step": 8550 + }, + { + "epoch": 1.34, + "grad_norm": 13.776293552586147, + "learning_rate": 1.2241412558091988e-05, + "loss": 0.6862, + "step": 8551 + }, + { + "epoch": 1.34, + "grad_norm": 17.479472675607774, + "learning_rate": 1.2239769034861423e-05, + "loss": 0.6175, + "step": 8552 + }, + { + "epoch": 1.34, + "grad_norm": 13.63204416552825, + "learning_rate": 1.2238125447933134e-05, + "loss": 0.6443, + "step": 8553 + }, + { + "epoch": 1.34, + "grad_norm": 20.82843613445841, + "learning_rate": 1.2236481797353865e-05, + "loss": 0.6338, + "step": 8554 + }, + { + "epoch": 1.34, + "grad_norm": 17.37390498443152, + "learning_rate": 1.2234838083170362e-05, + "loss": 0.5719, + "step": 8555 + }, + { + "epoch": 1.34, + "grad_norm": 15.549117574979292, + "learning_rate": 1.2233194305429375e-05, + "loss": 0.6037, + "step": 8556 + }, + { + "epoch": 1.34, + "grad_norm": 19.57356127272504, + "learning_rate": 1.2231550464177646e-05, + "loss": 0.5215, + "step": 8557 + }, + { + "epoch": 1.34, + "grad_norm": 21.570649246300786, + "learning_rate": 1.2229906559461925e-05, + "loss": 0.6638, + "step": 8558 + }, + { + "epoch": 1.34, + "grad_norm": 18.945698632700093, + "learning_rate": 1.222826259132897e-05, + "loss": 0.6179, + "step": 8559 + }, + { + "epoch": 1.34, + "grad_norm": 26.915847750115947, + "learning_rate": 1.2226618559825529e-05, + "loss": 0.6985, + "step": 8560 + }, + { + "epoch": 1.34, + "grad_norm": 15.076808096148143, + "learning_rate": 1.222497446499836e-05, + "loss": 0.5829, + "step": 8561 + }, + { + "epoch": 1.34, + "grad_norm": 25.728779161227575, + "learning_rate": 1.2223330306894215e-05, + "loss": 0.6485, + "step": 8562 + }, + { + "epoch": 1.34, + "grad_norm": 19.566633584150182, + "learning_rate": 1.222168608555986e-05, + "loss": 0.6531, + "step": 8563 + }, + { + "epoch": 1.34, + "grad_norm": 15.665807427988359, + "learning_rate": 1.222004180104205e-05, + "loss": 0.6951, + "step": 8564 + }, + { + "epoch": 1.34, + "grad_norm": 18.09666819832233, + "learning_rate": 1.2218397453387551e-05, + "loss": 0.8099, + "step": 8565 + }, + { + "epoch": 1.34, + "grad_norm": 22.251314463926256, + "learning_rate": 1.2216753042643128e-05, + "loss": 0.656, + "step": 8566 + }, + { + "epoch": 1.34, + "grad_norm": 17.174028588709266, + "learning_rate": 1.2215108568855545e-05, + "loss": 0.5178, + "step": 8567 + }, + { + "epoch": 1.34, + "grad_norm": 15.88891556827831, + "learning_rate": 1.2213464032071567e-05, + "loss": 0.5962, + "step": 8568 + }, + { + "epoch": 1.34, + "grad_norm": 19.799382599683735, + "learning_rate": 1.221181943233797e-05, + "loss": 0.5768, + "step": 8569 + }, + { + "epoch": 1.34, + "grad_norm": 18.91518573033513, + "learning_rate": 1.221017476970152e-05, + "loss": 0.6383, + "step": 8570 + }, + { + "epoch": 1.34, + "grad_norm": 17.456165007982904, + "learning_rate": 1.2208530044208995e-05, + "loss": 0.6282, + "step": 8571 + }, + { + "epoch": 1.34, + "grad_norm": 16.63572919042834, + "learning_rate": 1.2206885255907163e-05, + "loss": 0.6224, + "step": 8572 + }, + { + "epoch": 1.34, + "grad_norm": 22.210550375297608, + "learning_rate": 1.2205240404842811e-05, + "loss": 0.5794, + "step": 8573 + }, + { + "epoch": 1.34, + "grad_norm": 25.877061355759512, + "learning_rate": 1.2203595491062707e-05, + "loss": 0.6282, + "step": 8574 + }, + { + "epoch": 1.34, + "grad_norm": 16.423044212995645, + "learning_rate": 1.2201950514613638e-05, + "loss": 0.6323, + "step": 8575 + }, + { + "epoch": 1.34, + "grad_norm": 21.954159829670836, + "learning_rate": 1.2200305475542385e-05, + "loss": 0.6524, + "step": 8576 + }, + { + "epoch": 1.34, + "grad_norm": 16.71081158954826, + "learning_rate": 1.2198660373895731e-05, + "loss": 0.5714, + "step": 8577 + }, + { + "epoch": 1.34, + "grad_norm": 17.267509780902145, + "learning_rate": 1.2197015209720462e-05, + "loss": 0.5689, + "step": 8578 + }, + { + "epoch": 1.34, + "grad_norm": 26.785402962476226, + "learning_rate": 1.2195369983063368e-05, + "loss": 0.6925, + "step": 8579 + }, + { + "epoch": 1.34, + "grad_norm": 21.482483312382136, + "learning_rate": 1.2193724693971235e-05, + "loss": 0.6086, + "step": 8580 + }, + { + "epoch": 1.34, + "grad_norm": 20.127896025743652, + "learning_rate": 1.2192079342490851e-05, + "loss": 0.6026, + "step": 8581 + }, + { + "epoch": 1.34, + "grad_norm": 14.353481856921645, + "learning_rate": 1.2190433928669015e-05, + "loss": 0.5335, + "step": 8582 + }, + { + "epoch": 1.34, + "grad_norm": 19.15072360140765, + "learning_rate": 1.218878845255252e-05, + "loss": 0.6624, + "step": 8583 + }, + { + "epoch": 1.34, + "grad_norm": 11.802699797114263, + "learning_rate": 1.2187142914188161e-05, + "loss": 0.6546, + "step": 8584 + }, + { + "epoch": 1.34, + "grad_norm": 21.262017076436383, + "learning_rate": 1.218549731362274e-05, + "loss": 0.6721, + "step": 8585 + }, + { + "epoch": 1.34, + "grad_norm": 12.179441932199305, + "learning_rate": 1.218385165090305e-05, + "loss": 0.5582, + "step": 8586 + }, + { + "epoch": 1.34, + "grad_norm": 17.147349544241326, + "learning_rate": 1.2182205926075899e-05, + "loss": 0.6305, + "step": 8587 + }, + { + "epoch": 1.34, + "grad_norm": 21.79689105727175, + "learning_rate": 1.2180560139188088e-05, + "loss": 0.6199, + "step": 8588 + }, + { + "epoch": 1.34, + "grad_norm": 12.124370497161896, + "learning_rate": 1.217891429028642e-05, + "loss": 0.5722, + "step": 8589 + }, + { + "epoch": 1.34, + "grad_norm": 18.527256327436305, + "learning_rate": 1.2177268379417708e-05, + "loss": 0.6247, + "step": 8590 + }, + { + "epoch": 1.34, + "grad_norm": 19.956928681494368, + "learning_rate": 1.2175622406628754e-05, + "loss": 0.549, + "step": 8591 + }, + { + "epoch": 1.34, + "grad_norm": 29.97446677005685, + "learning_rate": 1.2173976371966372e-05, + "loss": 0.6974, + "step": 8592 + }, + { + "epoch": 1.34, + "grad_norm": 15.493586167850015, + "learning_rate": 1.2172330275477374e-05, + "loss": 0.6115, + "step": 8593 + }, + { + "epoch": 1.34, + "grad_norm": 21.941489858062244, + "learning_rate": 1.2170684117208573e-05, + "loss": 0.6264, + "step": 8594 + }, + { + "epoch": 1.34, + "grad_norm": 19.27002217046163, + "learning_rate": 1.2169037897206787e-05, + "loss": 0.6026, + "step": 8595 + }, + { + "epoch": 1.34, + "grad_norm": 21.086005392739107, + "learning_rate": 1.2167391615518831e-05, + "loss": 0.6347, + "step": 8596 + }, + { + "epoch": 1.34, + "grad_norm": 19.84576977340459, + "learning_rate": 1.2165745272191524e-05, + "loss": 0.6643, + "step": 8597 + }, + { + "epoch": 1.34, + "grad_norm": 21.689365290258255, + "learning_rate": 1.2164098867271694e-05, + "loss": 0.7244, + "step": 8598 + }, + { + "epoch": 1.34, + "grad_norm": 13.649201631581684, + "learning_rate": 1.2162452400806151e-05, + "loss": 0.5708, + "step": 8599 + }, + { + "epoch": 1.34, + "grad_norm": 29.300732506662012, + "learning_rate": 1.216080587284173e-05, + "loss": 0.6756, + "step": 8600 + }, + { + "epoch": 1.34, + "grad_norm": 18.766556312517203, + "learning_rate": 1.2159159283425254e-05, + "loss": 0.6162, + "step": 8601 + }, + { + "epoch": 1.34, + "grad_norm": 26.39327628356211, + "learning_rate": 1.2157512632603553e-05, + "loss": 0.6694, + "step": 8602 + }, + { + "epoch": 1.34, + "grad_norm": 24.74339040488667, + "learning_rate": 1.215586592042345e-05, + "loss": 0.6752, + "step": 8603 + }, + { + "epoch": 1.34, + "grad_norm": 18.849812490823286, + "learning_rate": 1.2154219146931786e-05, + "loss": 0.6391, + "step": 8604 + }, + { + "epoch": 1.34, + "grad_norm": 15.199938153408523, + "learning_rate": 1.2152572312175388e-05, + "loss": 0.579, + "step": 8605 + }, + { + "epoch": 1.34, + "grad_norm": 19.43259540466348, + "learning_rate": 1.2150925416201091e-05, + "loss": 0.6109, + "step": 8606 + }, + { + "epoch": 1.34, + "grad_norm": 18.49210279563776, + "learning_rate": 1.2149278459055737e-05, + "loss": 0.6209, + "step": 8607 + }, + { + "epoch": 1.34, + "grad_norm": 20.70374795012466, + "learning_rate": 1.2147631440786156e-05, + "loss": 0.6015, + "step": 8608 + }, + { + "epoch": 1.34, + "grad_norm": 17.84303984924125, + "learning_rate": 1.2145984361439197e-05, + "loss": 0.6145, + "step": 8609 + }, + { + "epoch": 1.34, + "grad_norm": 18.83683998891923, + "learning_rate": 1.2144337221061697e-05, + "loss": 0.6278, + "step": 8610 + }, + { + "epoch": 1.35, + "grad_norm": 18.27728890070193, + "learning_rate": 1.2142690019700503e-05, + "loss": 0.6035, + "step": 8611 + }, + { + "epoch": 1.35, + "grad_norm": 15.829886528611222, + "learning_rate": 1.2141042757402454e-05, + "loss": 0.59, + "step": 8612 + }, + { + "epoch": 1.35, + "grad_norm": 18.899656288015105, + "learning_rate": 1.2139395434214406e-05, + "loss": 0.584, + "step": 8613 + }, + { + "epoch": 1.35, + "grad_norm": 17.5394635526347, + "learning_rate": 1.2137748050183197e-05, + "loss": 0.6026, + "step": 8614 + }, + { + "epoch": 1.35, + "grad_norm": 13.832506498603806, + "learning_rate": 1.2136100605355691e-05, + "loss": 0.618, + "step": 8615 + }, + { + "epoch": 1.35, + "grad_norm": 31.684148808281535, + "learning_rate": 1.213445309977873e-05, + "loss": 0.6659, + "step": 8616 + }, + { + "epoch": 1.35, + "grad_norm": 20.11390394060753, + "learning_rate": 1.2132805533499172e-05, + "loss": 0.6353, + "step": 8617 + }, + { + "epoch": 1.35, + "grad_norm": 16.570365225381163, + "learning_rate": 1.213115790656387e-05, + "loss": 0.6731, + "step": 8618 + }, + { + "epoch": 1.35, + "grad_norm": 18.545611620505994, + "learning_rate": 1.2129510219019686e-05, + "loss": 0.6404, + "step": 8619 + }, + { + "epoch": 1.35, + "grad_norm": 16.941143432735313, + "learning_rate": 1.2127862470913478e-05, + "loss": 0.5982, + "step": 8620 + }, + { + "epoch": 1.35, + "grad_norm": 14.405501643083866, + "learning_rate": 1.2126214662292106e-05, + "loss": 0.6401, + "step": 8621 + }, + { + "epoch": 1.35, + "grad_norm": 21.49025598247731, + "learning_rate": 1.2124566793202432e-05, + "loss": 0.6717, + "step": 8622 + }, + { + "epoch": 1.35, + "grad_norm": 20.20007527781142, + "learning_rate": 1.2122918863691322e-05, + "loss": 0.6624, + "step": 8623 + }, + { + "epoch": 1.35, + "grad_norm": 18.363786412918277, + "learning_rate": 1.2121270873805638e-05, + "loss": 0.5814, + "step": 8624 + }, + { + "epoch": 1.35, + "grad_norm": 17.313352477778924, + "learning_rate": 1.2119622823592254e-05, + "loss": 0.6372, + "step": 8625 + }, + { + "epoch": 1.35, + "grad_norm": 18.40802895622932, + "learning_rate": 1.2117974713098038e-05, + "loss": 0.6714, + "step": 8626 + }, + { + "epoch": 1.35, + "grad_norm": 17.594557231218044, + "learning_rate": 1.2116326542369859e-05, + "loss": 0.5989, + "step": 8627 + }, + { + "epoch": 1.35, + "grad_norm": 15.18178038960349, + "learning_rate": 1.211467831145459e-05, + "loss": 0.5986, + "step": 8628 + }, + { + "epoch": 1.35, + "grad_norm": 13.942473229376064, + "learning_rate": 1.2113030020399107e-05, + "loss": 0.6953, + "step": 8629 + }, + { + "epoch": 1.35, + "grad_norm": 16.832735425818143, + "learning_rate": 1.2111381669250288e-05, + "loss": 0.6188, + "step": 8630 + }, + { + "epoch": 1.35, + "grad_norm": 26.321500638717836, + "learning_rate": 1.2109733258055007e-05, + "loss": 0.6906, + "step": 8631 + }, + { + "epoch": 1.35, + "grad_norm": 21.573466512152592, + "learning_rate": 1.210808478686015e-05, + "loss": 0.6716, + "step": 8632 + }, + { + "epoch": 1.35, + "grad_norm": 19.15335020999387, + "learning_rate": 1.210643625571259e-05, + "loss": 0.6121, + "step": 8633 + }, + { + "epoch": 1.35, + "grad_norm": 19.133454025061795, + "learning_rate": 1.2104787664659221e-05, + "loss": 0.6633, + "step": 8634 + }, + { + "epoch": 1.35, + "grad_norm": 15.280080881719982, + "learning_rate": 1.2103139013746919e-05, + "loss": 0.6343, + "step": 8635 + }, + { + "epoch": 1.35, + "grad_norm": 27.531134442791593, + "learning_rate": 1.2101490303022571e-05, + "loss": 0.702, + "step": 8636 + }, + { + "epoch": 1.35, + "grad_norm": 16.797471289585328, + "learning_rate": 1.2099841532533073e-05, + "loss": 0.6256, + "step": 8637 + }, + { + "epoch": 1.35, + "grad_norm": 18.276726431224354, + "learning_rate": 1.2098192702325309e-05, + "loss": 0.6478, + "step": 8638 + }, + { + "epoch": 1.35, + "grad_norm": 17.89552403801863, + "learning_rate": 1.209654381244617e-05, + "loss": 0.6902, + "step": 8639 + }, + { + "epoch": 1.35, + "grad_norm": 11.958346612008572, + "learning_rate": 1.2094894862942552e-05, + "loss": 0.6106, + "step": 8640 + }, + { + "epoch": 1.35, + "grad_norm": 12.678519720501198, + "learning_rate": 1.2093245853861349e-05, + "loss": 0.657, + "step": 8641 + }, + { + "epoch": 1.35, + "grad_norm": 15.707797702739459, + "learning_rate": 1.2091596785249461e-05, + "loss": 0.6076, + "step": 8642 + }, + { + "epoch": 1.35, + "grad_norm": 21.43371740228696, + "learning_rate": 1.208994765715378e-05, + "loss": 0.647, + "step": 8643 + }, + { + "epoch": 1.35, + "grad_norm": 27.12832496025796, + "learning_rate": 1.2088298469621214e-05, + "loss": 0.6718, + "step": 8644 + }, + { + "epoch": 1.35, + "grad_norm": 24.98985447894854, + "learning_rate": 1.208664922269866e-05, + "loss": 0.5796, + "step": 8645 + }, + { + "epoch": 1.35, + "grad_norm": 20.767594889857637, + "learning_rate": 1.2084999916433022e-05, + "loss": 0.5757, + "step": 8646 + }, + { + "epoch": 1.35, + "grad_norm": 18.86950795375119, + "learning_rate": 1.2083350550871206e-05, + "loss": 0.6389, + "step": 8647 + }, + { + "epoch": 1.35, + "grad_norm": 16.182343188120832, + "learning_rate": 1.2081701126060121e-05, + "loss": 0.6121, + "step": 8648 + }, + { + "epoch": 1.35, + "grad_norm": 25.74199996439996, + "learning_rate": 1.2080051642046674e-05, + "loss": 0.6565, + "step": 8649 + }, + { + "epoch": 1.35, + "grad_norm": 17.432182822891118, + "learning_rate": 1.2078402098877771e-05, + "loss": 0.6103, + "step": 8650 + }, + { + "epoch": 1.35, + "grad_norm": 17.90295544119717, + "learning_rate": 1.2076752496600333e-05, + "loss": 0.6326, + "step": 8651 + }, + { + "epoch": 1.35, + "grad_norm": 18.870214430344216, + "learning_rate": 1.2075102835261264e-05, + "loss": 0.6406, + "step": 8652 + }, + { + "epoch": 1.35, + "grad_norm": 23.562156110943068, + "learning_rate": 1.2073453114907485e-05, + "loss": 0.665, + "step": 8653 + }, + { + "epoch": 1.35, + "grad_norm": 15.820579893890073, + "learning_rate": 1.2071803335585917e-05, + "loss": 0.6558, + "step": 8654 + }, + { + "epoch": 1.35, + "grad_norm": 42.492516257039505, + "learning_rate": 1.207015349734347e-05, + "loss": 0.6626, + "step": 8655 + }, + { + "epoch": 1.35, + "grad_norm": 16.752472217332123, + "learning_rate": 1.206850360022707e-05, + "loss": 0.7339, + "step": 8656 + }, + { + "epoch": 1.35, + "grad_norm": 16.302521378499183, + "learning_rate": 1.2066853644283639e-05, + "loss": 0.6109, + "step": 8657 + }, + { + "epoch": 1.35, + "grad_norm": 15.986431140602697, + "learning_rate": 1.2065203629560093e-05, + "loss": 0.5829, + "step": 8658 + }, + { + "epoch": 1.35, + "grad_norm": 18.375647208678277, + "learning_rate": 1.2063553556103372e-05, + "loss": 0.5178, + "step": 8659 + }, + { + "epoch": 1.35, + "grad_norm": 19.58411080173229, + "learning_rate": 1.206190342396039e-05, + "loss": 0.67, + "step": 8660 + }, + { + "epoch": 1.35, + "grad_norm": 15.888372249392383, + "learning_rate": 1.2060253233178086e-05, + "loss": 0.5886, + "step": 8661 + }, + { + "epoch": 1.35, + "grad_norm": 13.246519981283104, + "learning_rate": 1.2058602983803378e-05, + "loss": 0.6475, + "step": 8662 + }, + { + "epoch": 1.35, + "grad_norm": 12.864418853513957, + "learning_rate": 1.2056952675883208e-05, + "loss": 0.6008, + "step": 8663 + }, + { + "epoch": 1.35, + "grad_norm": 20.578250072884106, + "learning_rate": 1.2055302309464509e-05, + "loss": 0.5911, + "step": 8664 + }, + { + "epoch": 1.35, + "grad_norm": 20.716578899167196, + "learning_rate": 1.2053651884594215e-05, + "loss": 0.6514, + "step": 8665 + }, + { + "epoch": 1.35, + "grad_norm": 23.838624148899495, + "learning_rate": 1.2052001401319262e-05, + "loss": 0.6989, + "step": 8666 + }, + { + "epoch": 1.35, + "grad_norm": 18.869598103963412, + "learning_rate": 1.205035085968659e-05, + "loss": 0.7058, + "step": 8667 + }, + { + "epoch": 1.35, + "grad_norm": 25.229050145928856, + "learning_rate": 1.2048700259743136e-05, + "loss": 0.6901, + "step": 8668 + }, + { + "epoch": 1.35, + "grad_norm": 4.2123019603452105, + "learning_rate": 1.2047049601535847e-05, + "loss": 0.602, + "step": 8669 + }, + { + "epoch": 1.35, + "grad_norm": 21.594150492574343, + "learning_rate": 1.2045398885111665e-05, + "loss": 0.6425, + "step": 8670 + }, + { + "epoch": 1.35, + "grad_norm": 18.441651733548678, + "learning_rate": 1.2043748110517536e-05, + "loss": 0.6666, + "step": 8671 + }, + { + "epoch": 1.35, + "grad_norm": 15.520323122735798, + "learning_rate": 1.2042097277800406e-05, + "loss": 0.5862, + "step": 8672 + }, + { + "epoch": 1.35, + "grad_norm": 15.330987991507707, + "learning_rate": 1.2040446387007222e-05, + "loss": 0.6194, + "step": 8673 + }, + { + "epoch": 1.35, + "grad_norm": 14.146208037695654, + "learning_rate": 1.203879543818494e-05, + "loss": 0.6213, + "step": 8674 + }, + { + "epoch": 1.36, + "grad_norm": 19.66988023487055, + "learning_rate": 1.2037144431380506e-05, + "loss": 0.5863, + "step": 8675 + }, + { + "epoch": 1.36, + "grad_norm": 17.40602767817765, + "learning_rate": 1.2035493366640879e-05, + "loss": 0.5943, + "step": 8676 + }, + { + "epoch": 1.36, + "grad_norm": 23.25615465959148, + "learning_rate": 1.2033842244013006e-05, + "loss": 0.5958, + "step": 8677 + }, + { + "epoch": 1.36, + "grad_norm": 17.37008311490887, + "learning_rate": 1.2032191063543855e-05, + "loss": 0.6931, + "step": 8678 + }, + { + "epoch": 1.36, + "grad_norm": 13.431357786931859, + "learning_rate": 1.2030539825280373e-05, + "loss": 0.5809, + "step": 8679 + }, + { + "epoch": 1.36, + "grad_norm": 22.634978234535726, + "learning_rate": 1.202888852926953e-05, + "loss": 0.706, + "step": 8680 + }, + { + "epoch": 1.36, + "grad_norm": 19.180166034206074, + "learning_rate": 1.2027237175558283e-05, + "loss": 0.6727, + "step": 8681 + }, + { + "epoch": 1.36, + "grad_norm": 18.274704392230202, + "learning_rate": 1.2025585764193597e-05, + "loss": 0.6655, + "step": 8682 + }, + { + "epoch": 1.36, + "grad_norm": 15.758746647604836, + "learning_rate": 1.2023934295222437e-05, + "loss": 0.589, + "step": 8683 + }, + { + "epoch": 1.36, + "grad_norm": 22.147222840172123, + "learning_rate": 1.202228276869177e-05, + "loss": 0.7426, + "step": 8684 + }, + { + "epoch": 1.36, + "grad_norm": 18.242841270216683, + "learning_rate": 1.202063118464856e-05, + "loss": 0.5979, + "step": 8685 + }, + { + "epoch": 1.36, + "grad_norm": 22.265055188701282, + "learning_rate": 1.2018979543139788e-05, + "loss": 0.6719, + "step": 8686 + }, + { + "epoch": 1.36, + "grad_norm": 20.970906259754297, + "learning_rate": 1.2017327844212414e-05, + "loss": 0.573, + "step": 8687 + }, + { + "epoch": 1.36, + "grad_norm": 13.97843936655783, + "learning_rate": 1.2015676087913418e-05, + "loss": 0.6731, + "step": 8688 + }, + { + "epoch": 1.36, + "grad_norm": 26.719685570471505, + "learning_rate": 1.2014024274289773e-05, + "loss": 0.7976, + "step": 8689 + }, + { + "epoch": 1.36, + "grad_norm": 17.657509474436512, + "learning_rate": 1.2012372403388457e-05, + "loss": 0.6603, + "step": 8690 + }, + { + "epoch": 1.36, + "grad_norm": 14.92491280555814, + "learning_rate": 1.2010720475256446e-05, + "loss": 0.6932, + "step": 8691 + }, + { + "epoch": 1.36, + "grad_norm": 15.058209762427303, + "learning_rate": 1.200906848994072e-05, + "loss": 0.6104, + "step": 8692 + }, + { + "epoch": 1.36, + "grad_norm": 15.012169296408299, + "learning_rate": 1.2007416447488263e-05, + "loss": 0.6158, + "step": 8693 + }, + { + "epoch": 1.36, + "grad_norm": 16.49370475224746, + "learning_rate": 1.2005764347946053e-05, + "loss": 0.6646, + "step": 8694 + }, + { + "epoch": 1.36, + "grad_norm": 19.485652090188943, + "learning_rate": 1.200411219136108e-05, + "loss": 0.5479, + "step": 8695 + }, + { + "epoch": 1.36, + "grad_norm": 14.097223393070108, + "learning_rate": 1.2002459977780331e-05, + "loss": 0.6201, + "step": 8696 + }, + { + "epoch": 1.36, + "grad_norm": 17.79586073218258, + "learning_rate": 1.200080770725079e-05, + "loss": 0.6067, + "step": 8697 + }, + { + "epoch": 1.36, + "grad_norm": 11.52462496183499, + "learning_rate": 1.1999155379819449e-05, + "loss": 0.5849, + "step": 8698 + }, + { + "epoch": 1.36, + "grad_norm": 21.27744664362888, + "learning_rate": 1.1997502995533299e-05, + "loss": 0.7032, + "step": 8699 + }, + { + "epoch": 1.36, + "grad_norm": 23.678167306299112, + "learning_rate": 1.1995850554439332e-05, + "loss": 0.6332, + "step": 8700 + }, + { + "epoch": 1.36, + "grad_norm": 27.32049155237977, + "learning_rate": 1.199419805658454e-05, + "loss": 0.7102, + "step": 8701 + }, + { + "epoch": 1.36, + "grad_norm": 15.668013411662498, + "learning_rate": 1.1992545502015923e-05, + "loss": 0.6285, + "step": 8702 + }, + { + "epoch": 1.36, + "grad_norm": 16.35178649391694, + "learning_rate": 1.1990892890780482e-05, + "loss": 0.6692, + "step": 8703 + }, + { + "epoch": 1.36, + "grad_norm": 16.67873565852468, + "learning_rate": 1.1989240222925206e-05, + "loss": 0.6064, + "step": 8704 + }, + { + "epoch": 1.36, + "grad_norm": 11.204817147330086, + "learning_rate": 1.1987587498497107e-05, + "loss": 0.6411, + "step": 8705 + }, + { + "epoch": 1.36, + "grad_norm": 14.827820825019513, + "learning_rate": 1.1985934717543178e-05, + "loss": 0.6193, + "step": 8706 + }, + { + "epoch": 1.36, + "grad_norm": 18.466607331382953, + "learning_rate": 1.1984281880110427e-05, + "loss": 0.664, + "step": 8707 + }, + { + "epoch": 1.36, + "grad_norm": 13.920055425695603, + "learning_rate": 1.198262898624586e-05, + "loss": 0.6509, + "step": 8708 + }, + { + "epoch": 1.36, + "grad_norm": 18.04460874009785, + "learning_rate": 1.1980976035996488e-05, + "loss": 0.5718, + "step": 8709 + }, + { + "epoch": 1.36, + "grad_norm": 14.65760851930483, + "learning_rate": 1.1979323029409316e-05, + "loss": 0.6145, + "step": 8710 + }, + { + "epoch": 1.36, + "grad_norm": 18.045625629775152, + "learning_rate": 1.1977669966531353e-05, + "loss": 0.616, + "step": 8711 + }, + { + "epoch": 1.36, + "grad_norm": 20.667504673998017, + "learning_rate": 1.197601684740961e-05, + "loss": 0.6646, + "step": 8712 + }, + { + "epoch": 1.36, + "grad_norm": 16.207071307030063, + "learning_rate": 1.197436367209111e-05, + "loss": 0.6228, + "step": 8713 + }, + { + "epoch": 1.36, + "grad_norm": 21.933617073651327, + "learning_rate": 1.1972710440622858e-05, + "loss": 0.6474, + "step": 8714 + }, + { + "epoch": 1.36, + "grad_norm": 18.28544016770247, + "learning_rate": 1.1971057153051878e-05, + "loss": 0.707, + "step": 8715 + }, + { + "epoch": 1.36, + "grad_norm": 18.115497778856586, + "learning_rate": 1.1969403809425183e-05, + "loss": 0.702, + "step": 8716 + }, + { + "epoch": 1.36, + "grad_norm": 24.303170521308342, + "learning_rate": 1.1967750409789796e-05, + "loss": 0.6456, + "step": 8717 + }, + { + "epoch": 1.36, + "grad_norm": 21.41790776206668, + "learning_rate": 1.196609695419274e-05, + "loss": 0.6288, + "step": 8718 + }, + { + "epoch": 1.36, + "grad_norm": 19.29997157684022, + "learning_rate": 1.1964443442681036e-05, + "loss": 0.667, + "step": 8719 + }, + { + "epoch": 1.36, + "grad_norm": 18.997276983135546, + "learning_rate": 1.196278987530171e-05, + "loss": 0.5949, + "step": 8720 + }, + { + "epoch": 1.36, + "grad_norm": 15.344192490004842, + "learning_rate": 1.1961136252101786e-05, + "loss": 0.5479, + "step": 8721 + }, + { + "epoch": 1.36, + "grad_norm": 21.402597818887436, + "learning_rate": 1.1959482573128298e-05, + "loss": 0.659, + "step": 8722 + }, + { + "epoch": 1.36, + "grad_norm": 22.250074908346086, + "learning_rate": 1.1957828838428269e-05, + "loss": 0.6589, + "step": 8723 + }, + { + "epoch": 1.36, + "grad_norm": 14.78168894829748, + "learning_rate": 1.1956175048048734e-05, + "loss": 0.5787, + "step": 8724 + }, + { + "epoch": 1.36, + "grad_norm": 26.64106913053749, + "learning_rate": 1.1954521202036726e-05, + "loss": 0.6823, + "step": 8725 + }, + { + "epoch": 1.36, + "grad_norm": 25.159660220161182, + "learning_rate": 1.1952867300439276e-05, + "loss": 0.6809, + "step": 8726 + }, + { + "epoch": 1.36, + "grad_norm": 15.948444492476462, + "learning_rate": 1.1951213343303425e-05, + "loss": 0.7293, + "step": 8727 + }, + { + "epoch": 1.36, + "grad_norm": 18.259032591867555, + "learning_rate": 1.1949559330676209e-05, + "loss": 0.695, + "step": 8728 + }, + { + "epoch": 1.36, + "grad_norm": 12.339335834764851, + "learning_rate": 1.194790526260466e-05, + "loss": 0.6094, + "step": 8729 + }, + { + "epoch": 1.36, + "grad_norm": 20.742763378389107, + "learning_rate": 1.1946251139135831e-05, + "loss": 0.62, + "step": 8730 + }, + { + "epoch": 1.36, + "grad_norm": 18.939018681191225, + "learning_rate": 1.1944596960316755e-05, + "loss": 0.5929, + "step": 8731 + }, + { + "epoch": 1.36, + "grad_norm": 15.431779897274023, + "learning_rate": 1.1942942726194477e-05, + "loss": 0.6364, + "step": 8732 + }, + { + "epoch": 1.36, + "grad_norm": 22.683718808267724, + "learning_rate": 1.194128843681605e-05, + "loss": 0.6156, + "step": 8733 + }, + { + "epoch": 1.36, + "grad_norm": 24.38668062835554, + "learning_rate": 1.1939634092228511e-05, + "loss": 0.6481, + "step": 8734 + }, + { + "epoch": 1.36, + "grad_norm": 20.017589097508985, + "learning_rate": 1.1937979692478915e-05, + "loss": 0.6234, + "step": 8735 + }, + { + "epoch": 1.36, + "grad_norm": 20.01555311471506, + "learning_rate": 1.1936325237614312e-05, + "loss": 0.6355, + "step": 8736 + }, + { + "epoch": 1.36, + "grad_norm": 14.86445135205235, + "learning_rate": 1.193467072768175e-05, + "loss": 0.5934, + "step": 8737 + }, + { + "epoch": 1.36, + "grad_norm": 22.803704844700196, + "learning_rate": 1.1933016162728281e-05, + "loss": 0.5933, + "step": 8738 + }, + { + "epoch": 1.37, + "grad_norm": 15.139628692497936, + "learning_rate": 1.1931361542800968e-05, + "loss": 0.5912, + "step": 8739 + }, + { + "epoch": 1.37, + "grad_norm": 12.402821710042435, + "learning_rate": 1.192970686794686e-05, + "loss": 0.5911, + "step": 8740 + }, + { + "epoch": 1.37, + "grad_norm": 17.761679372666222, + "learning_rate": 1.192805213821302e-05, + "loss": 0.6233, + "step": 8741 + }, + { + "epoch": 1.37, + "grad_norm": 16.400814986295323, + "learning_rate": 1.1926397353646501e-05, + "loss": 0.602, + "step": 8742 + }, + { + "epoch": 1.37, + "grad_norm": 22.277909358812714, + "learning_rate": 1.1924742514294371e-05, + "loss": 0.7104, + "step": 8743 + }, + { + "epoch": 1.37, + "grad_norm": 12.739870507452032, + "learning_rate": 1.1923087620203688e-05, + "loss": 0.5877, + "step": 8744 + }, + { + "epoch": 1.37, + "grad_norm": 15.39542170293932, + "learning_rate": 1.1921432671421523e-05, + "loss": 0.5963, + "step": 8745 + }, + { + "epoch": 1.37, + "grad_norm": 23.54429346999875, + "learning_rate": 1.1919777667994932e-05, + "loss": 0.5965, + "step": 8746 + }, + { + "epoch": 1.37, + "grad_norm": 27.29421663577674, + "learning_rate": 1.191812260997099e-05, + "loss": 0.6987, + "step": 8747 + }, + { + "epoch": 1.37, + "grad_norm": 14.545700450931495, + "learning_rate": 1.1916467497396759e-05, + "loss": 0.6507, + "step": 8748 + }, + { + "epoch": 1.37, + "grad_norm": 18.808383496813654, + "learning_rate": 1.1914812330319318e-05, + "loss": 0.6615, + "step": 8749 + }, + { + "epoch": 1.37, + "grad_norm": 22.436963995758067, + "learning_rate": 1.1913157108785731e-05, + "loss": 0.7119, + "step": 8750 + }, + { + "epoch": 1.37, + "grad_norm": 13.583041417119194, + "learning_rate": 1.1911501832843077e-05, + "loss": 0.6371, + "step": 8751 + }, + { + "epoch": 1.37, + "grad_norm": 12.972580252186157, + "learning_rate": 1.1909846502538429e-05, + "loss": 0.6289, + "step": 8752 + }, + { + "epoch": 1.37, + "grad_norm": 20.192975607421552, + "learning_rate": 1.1908191117918864e-05, + "loss": 0.6453, + "step": 8753 + }, + { + "epoch": 1.37, + "grad_norm": 36.280189820668895, + "learning_rate": 1.190653567903146e-05, + "loss": 0.6793, + "step": 8754 + }, + { + "epoch": 1.37, + "grad_norm": 12.86481619178849, + "learning_rate": 1.1904880185923295e-05, + "loss": 0.5888, + "step": 8755 + }, + { + "epoch": 1.37, + "grad_norm": 19.206713653849533, + "learning_rate": 1.190322463864145e-05, + "loss": 0.5912, + "step": 8756 + }, + { + "epoch": 1.37, + "grad_norm": 18.95514795879096, + "learning_rate": 1.1901569037233012e-05, + "loss": 0.6591, + "step": 8757 + }, + { + "epoch": 1.37, + "grad_norm": 16.45718708322206, + "learning_rate": 1.1899913381745062e-05, + "loss": 0.6098, + "step": 8758 + }, + { + "epoch": 1.37, + "grad_norm": 22.174229752905077, + "learning_rate": 1.189825767222469e-05, + "loss": 0.6077, + "step": 8759 + }, + { + "epoch": 1.37, + "grad_norm": 14.924854490395116, + "learning_rate": 1.1896601908718979e-05, + "loss": 0.555, + "step": 8760 + }, + { + "epoch": 1.37, + "grad_norm": 21.766709660096474, + "learning_rate": 1.1894946091275014e-05, + "loss": 0.6823, + "step": 8761 + }, + { + "epoch": 1.37, + "grad_norm": 15.711140559234362, + "learning_rate": 1.1893290219939899e-05, + "loss": 0.6426, + "step": 8762 + }, + { + "epoch": 1.37, + "grad_norm": 21.665283668070913, + "learning_rate": 1.1891634294760713e-05, + "loss": 0.6003, + "step": 8763 + }, + { + "epoch": 1.37, + "grad_norm": 36.673144886994365, + "learning_rate": 1.1889978315784557e-05, + "loss": 0.6776, + "step": 8764 + }, + { + "epoch": 1.37, + "grad_norm": 15.903327367053109, + "learning_rate": 1.1888322283058517e-05, + "loss": 0.5077, + "step": 8765 + }, + { + "epoch": 1.37, + "grad_norm": 20.94289715722972, + "learning_rate": 1.1886666196629701e-05, + "loss": 0.6484, + "step": 8766 + }, + { + "epoch": 1.37, + "grad_norm": 25.498087574032546, + "learning_rate": 1.1885010056545204e-05, + "loss": 0.673, + "step": 8767 + }, + { + "epoch": 1.37, + "grad_norm": 12.40149669127506, + "learning_rate": 1.1883353862852121e-05, + "loss": 0.6498, + "step": 8768 + }, + { + "epoch": 1.37, + "grad_norm": 11.29558471312626, + "learning_rate": 1.1881697615597554e-05, + "loss": 0.5583, + "step": 8769 + }, + { + "epoch": 1.37, + "grad_norm": 17.698319888606086, + "learning_rate": 1.188004131482861e-05, + "loss": 0.6484, + "step": 8770 + }, + { + "epoch": 1.37, + "grad_norm": 21.05487926657686, + "learning_rate": 1.187838496059239e-05, + "loss": 0.7046, + "step": 8771 + }, + { + "epoch": 1.37, + "grad_norm": 17.347937588728534, + "learning_rate": 1.1876728552936e-05, + "loss": 0.6062, + "step": 8772 + }, + { + "epoch": 1.37, + "grad_norm": 11.477226872362207, + "learning_rate": 1.1875072091906547e-05, + "loss": 0.5326, + "step": 8773 + }, + { + "epoch": 1.37, + "grad_norm": 21.41050072044705, + "learning_rate": 1.1873415577551146e-05, + "loss": 0.6239, + "step": 8774 + }, + { + "epoch": 1.37, + "grad_norm": 29.094114957911238, + "learning_rate": 1.1871759009916897e-05, + "loss": 0.7097, + "step": 8775 + }, + { + "epoch": 1.37, + "grad_norm": 26.523528699479314, + "learning_rate": 1.1870102389050917e-05, + "loss": 0.6356, + "step": 8776 + }, + { + "epoch": 1.37, + "grad_norm": 22.427232202669828, + "learning_rate": 1.186844571500032e-05, + "loss": 0.5878, + "step": 8777 + }, + { + "epoch": 1.37, + "grad_norm": 19.328231193694823, + "learning_rate": 1.1866788987812219e-05, + "loss": 0.5914, + "step": 8778 + }, + { + "epoch": 1.37, + "grad_norm": 17.47172187406481, + "learning_rate": 1.1865132207533731e-05, + "loss": 0.6255, + "step": 8779 + }, + { + "epoch": 1.37, + "grad_norm": 12.157636511901286, + "learning_rate": 1.1863475374211974e-05, + "loss": 0.5833, + "step": 8780 + }, + { + "epoch": 1.37, + "grad_norm": 14.718110224006761, + "learning_rate": 1.1861818487894072e-05, + "loss": 0.6306, + "step": 8781 + }, + { + "epoch": 1.37, + "grad_norm": 16.783794854776737, + "learning_rate": 1.1860161548627137e-05, + "loss": 0.592, + "step": 8782 + }, + { + "epoch": 1.37, + "grad_norm": 21.493314127303073, + "learning_rate": 1.1858504556458294e-05, + "loss": 0.6051, + "step": 8783 + }, + { + "epoch": 1.37, + "grad_norm": 18.525089320586517, + "learning_rate": 1.1856847511434673e-05, + "loss": 0.575, + "step": 8784 + }, + { + "epoch": 1.37, + "grad_norm": 18.28510709241336, + "learning_rate": 1.1855190413603392e-05, + "loss": 0.605, + "step": 8785 + }, + { + "epoch": 1.37, + "grad_norm": 23.72661662166227, + "learning_rate": 1.1853533263011583e-05, + "loss": 0.6353, + "step": 8786 + }, + { + "epoch": 1.37, + "grad_norm": 17.017655812338674, + "learning_rate": 1.185187605970637e-05, + "loss": 0.6292, + "step": 8787 + }, + { + "epoch": 1.37, + "grad_norm": 21.77108743548212, + "learning_rate": 1.1850218803734886e-05, + "loss": 0.6616, + "step": 8788 + }, + { + "epoch": 1.37, + "grad_norm": 19.064875782206286, + "learning_rate": 1.1848561495144263e-05, + "loss": 0.6098, + "step": 8789 + }, + { + "epoch": 1.37, + "grad_norm": 15.372480403586309, + "learning_rate": 1.184690413398163e-05, + "loss": 0.6271, + "step": 8790 + }, + { + "epoch": 1.37, + "grad_norm": 23.617858075109126, + "learning_rate": 1.1845246720294129e-05, + "loss": 0.5457, + "step": 8791 + }, + { + "epoch": 1.37, + "grad_norm": 26.57863554500613, + "learning_rate": 1.1843589254128884e-05, + "loss": 0.6431, + "step": 8792 + }, + { + "epoch": 1.37, + "grad_norm": 17.010458838038158, + "learning_rate": 1.1841931735533043e-05, + "loss": 0.6162, + "step": 8793 + }, + { + "epoch": 1.37, + "grad_norm": 23.40619553481973, + "learning_rate": 1.184027416455374e-05, + "loss": 0.6197, + "step": 8794 + }, + { + "epoch": 1.37, + "grad_norm": 16.926978034401095, + "learning_rate": 1.1838616541238115e-05, + "loss": 0.5773, + "step": 8795 + }, + { + "epoch": 1.37, + "grad_norm": 24.16491994313919, + "learning_rate": 1.1836958865633315e-05, + "loss": 0.6239, + "step": 8796 + }, + { + "epoch": 1.37, + "grad_norm": 16.247293061595126, + "learning_rate": 1.1835301137786476e-05, + "loss": 0.7009, + "step": 8797 + }, + { + "epoch": 1.37, + "grad_norm": 17.60317748373623, + "learning_rate": 1.1833643357744747e-05, + "loss": 0.5932, + "step": 8798 + }, + { + "epoch": 1.37, + "grad_norm": 15.256813926854264, + "learning_rate": 1.1831985525555274e-05, + "loss": 0.6461, + "step": 8799 + }, + { + "epoch": 1.37, + "grad_norm": 17.181852749825417, + "learning_rate": 1.1830327641265202e-05, + "loss": 0.6034, + "step": 8800 + }, + { + "epoch": 1.37, + "grad_norm": 10.984551365590939, + "learning_rate": 1.1828669704921685e-05, + "loss": 0.5431, + "step": 8801 + }, + { + "epoch": 1.37, + "grad_norm": 27.292590424626432, + "learning_rate": 1.182701171657187e-05, + "loss": 0.6692, + "step": 8802 + }, + { + "epoch": 1.38, + "grad_norm": 21.036929265693516, + "learning_rate": 1.1825353676262914e-05, + "loss": 0.615, + "step": 8803 + }, + { + "epoch": 1.38, + "grad_norm": 20.25751258357393, + "learning_rate": 1.1823695584041963e-05, + "loss": 0.613, + "step": 8804 + }, + { + "epoch": 1.38, + "grad_norm": 27.153551680277655, + "learning_rate": 1.1822037439956178e-05, + "loss": 0.6862, + "step": 8805 + }, + { + "epoch": 1.38, + "grad_norm": 15.63115168470582, + "learning_rate": 1.1820379244052715e-05, + "loss": 0.653, + "step": 8806 + }, + { + "epoch": 1.38, + "grad_norm": 18.23232320913089, + "learning_rate": 1.1818720996378729e-05, + "loss": 0.5638, + "step": 8807 + }, + { + "epoch": 1.38, + "grad_norm": 15.930645730054778, + "learning_rate": 1.1817062696981384e-05, + "loss": 0.6471, + "step": 8808 + }, + { + "epoch": 1.38, + "grad_norm": 21.33081934192666, + "learning_rate": 1.1815404345907837e-05, + "loss": 0.6343, + "step": 8809 + }, + { + "epoch": 1.38, + "grad_norm": 19.683788137352334, + "learning_rate": 1.1813745943205254e-05, + "loss": 0.5866, + "step": 8810 + }, + { + "epoch": 1.38, + "grad_norm": 14.684608961215451, + "learning_rate": 1.1812087488920798e-05, + "loss": 0.5788, + "step": 8811 + }, + { + "epoch": 1.38, + "grad_norm": 16.180297513575756, + "learning_rate": 1.1810428983101632e-05, + "loss": 0.6191, + "step": 8812 + }, + { + "epoch": 1.38, + "grad_norm": 14.243397929638935, + "learning_rate": 1.1808770425794927e-05, + "loss": 0.5692, + "step": 8813 + }, + { + "epoch": 1.38, + "grad_norm": 13.72967744177661, + "learning_rate": 1.1807111817047846e-05, + "loss": 0.6357, + "step": 8814 + }, + { + "epoch": 1.38, + "grad_norm": 20.66109854889212, + "learning_rate": 1.1805453156907562e-05, + "loss": 0.6164, + "step": 8815 + }, + { + "epoch": 1.38, + "grad_norm": 21.436350847256517, + "learning_rate": 1.1803794445421251e-05, + "loss": 0.6708, + "step": 8816 + }, + { + "epoch": 1.38, + "grad_norm": 17.339018356708436, + "learning_rate": 1.1802135682636076e-05, + "loss": 0.6642, + "step": 8817 + }, + { + "epoch": 1.38, + "grad_norm": 19.560486225980682, + "learning_rate": 1.1800476868599222e-05, + "loss": 0.653, + "step": 8818 + }, + { + "epoch": 1.38, + "grad_norm": 16.680373201413264, + "learning_rate": 1.1798818003357853e-05, + "loss": 0.558, + "step": 8819 + }, + { + "epoch": 1.38, + "grad_norm": 22.154495874987695, + "learning_rate": 1.1797159086959156e-05, + "loss": 0.5709, + "step": 8820 + }, + { + "epoch": 1.38, + "grad_norm": 20.7590174255286, + "learning_rate": 1.1795500119450305e-05, + "loss": 0.6297, + "step": 8821 + }, + { + "epoch": 1.38, + "grad_norm": 15.793729425057617, + "learning_rate": 1.179384110087848e-05, + "loss": 0.602, + "step": 8822 + }, + { + "epoch": 1.38, + "grad_norm": 15.675922302858655, + "learning_rate": 1.1792182031290867e-05, + "loss": 0.6355, + "step": 8823 + }, + { + "epoch": 1.38, + "grad_norm": 25.56581067935375, + "learning_rate": 1.1790522910734638e-05, + "loss": 0.592, + "step": 8824 + }, + { + "epoch": 1.38, + "grad_norm": 21.423739930853916, + "learning_rate": 1.1788863739256992e-05, + "loss": 0.6738, + "step": 8825 + }, + { + "epoch": 1.38, + "grad_norm": 14.300026753623985, + "learning_rate": 1.1787204516905104e-05, + "loss": 0.5973, + "step": 8826 + }, + { + "epoch": 1.38, + "grad_norm": 16.190096682563716, + "learning_rate": 1.1785545243726166e-05, + "loss": 0.5915, + "step": 8827 + }, + { + "epoch": 1.38, + "grad_norm": 19.673453136647232, + "learning_rate": 1.1783885919767368e-05, + "loss": 0.6839, + "step": 8828 + }, + { + "epoch": 1.38, + "grad_norm": 18.876111351146506, + "learning_rate": 1.1782226545075896e-05, + "loss": 0.6366, + "step": 8829 + }, + { + "epoch": 1.38, + "grad_norm": 18.039812517661556, + "learning_rate": 1.1780567119698944e-05, + "loss": 0.6643, + "step": 8830 + }, + { + "epoch": 1.38, + "grad_norm": 26.43417260015495, + "learning_rate": 1.1778907643683704e-05, + "loss": 0.5339, + "step": 8831 + }, + { + "epoch": 1.38, + "grad_norm": 19.071657149789413, + "learning_rate": 1.1777248117077371e-05, + "loss": 0.59, + "step": 8832 + }, + { + "epoch": 1.38, + "grad_norm": 16.47647137885765, + "learning_rate": 1.1775588539927142e-05, + "loss": 0.4877, + "step": 8833 + }, + { + "epoch": 1.38, + "grad_norm": 21.44229360361136, + "learning_rate": 1.1773928912280213e-05, + "loss": 0.6178, + "step": 8834 + }, + { + "epoch": 1.38, + "grad_norm": 27.87886765509306, + "learning_rate": 1.1772269234183786e-05, + "loss": 0.6472, + "step": 8835 + }, + { + "epoch": 1.38, + "grad_norm": 30.0246233759278, + "learning_rate": 1.1770609505685056e-05, + "loss": 0.6759, + "step": 8836 + }, + { + "epoch": 1.38, + "grad_norm": 22.69169194991702, + "learning_rate": 1.1768949726831228e-05, + "loss": 0.6553, + "step": 8837 + }, + { + "epoch": 1.38, + "grad_norm": 17.54049871464825, + "learning_rate": 1.1767289897669505e-05, + "loss": 0.696, + "step": 8838 + }, + { + "epoch": 1.38, + "grad_norm": 24.74826716608812, + "learning_rate": 1.1765630018247089e-05, + "loss": 0.6519, + "step": 8839 + }, + { + "epoch": 1.38, + "grad_norm": 13.132074428380475, + "learning_rate": 1.1763970088611192e-05, + "loss": 0.5551, + "step": 8840 + }, + { + "epoch": 1.38, + "grad_norm": 16.130480533561737, + "learning_rate": 1.1762310108809017e-05, + "loss": 0.6302, + "step": 8841 + }, + { + "epoch": 1.38, + "grad_norm": 18.78296848692802, + "learning_rate": 1.176065007888777e-05, + "loss": 0.6442, + "step": 8842 + }, + { + "epoch": 1.38, + "grad_norm": 21.249799158044773, + "learning_rate": 1.1758989998894667e-05, + "loss": 0.67, + "step": 8843 + }, + { + "epoch": 1.38, + "grad_norm": 18.989843346260734, + "learning_rate": 1.1757329868876917e-05, + "loss": 0.5932, + "step": 8844 + }, + { + "epoch": 1.38, + "grad_norm": 15.585443668142796, + "learning_rate": 1.1755669688881732e-05, + "loss": 0.6167, + "step": 8845 + }, + { + "epoch": 1.38, + "grad_norm": 15.87361622114679, + "learning_rate": 1.175400945895633e-05, + "loss": 0.5859, + "step": 8846 + }, + { + "epoch": 1.38, + "grad_norm": 19.710688289680192, + "learning_rate": 1.1752349179147926e-05, + "loss": 0.6055, + "step": 8847 + }, + { + "epoch": 1.38, + "grad_norm": 14.391839388802753, + "learning_rate": 1.1750688849503735e-05, + "loss": 0.6189, + "step": 8848 + }, + { + "epoch": 1.38, + "grad_norm": 12.569850741263114, + "learning_rate": 1.1749028470070975e-05, + "loss": 0.6183, + "step": 8849 + }, + { + "epoch": 1.38, + "grad_norm": 29.473755080491895, + "learning_rate": 1.1747368040896875e-05, + "loss": 0.657, + "step": 8850 + }, + { + "epoch": 1.38, + "grad_norm": 18.53333045561157, + "learning_rate": 1.1745707562028643e-05, + "loss": 0.5385, + "step": 8851 + }, + { + "epoch": 1.38, + "grad_norm": 22.16195506698316, + "learning_rate": 1.1744047033513514e-05, + "loss": 0.6817, + "step": 8852 + }, + { + "epoch": 1.38, + "grad_norm": 14.38801996331766, + "learning_rate": 1.1742386455398704e-05, + "loss": 0.5677, + "step": 8853 + }, + { + "epoch": 1.38, + "grad_norm": 12.618734703035798, + "learning_rate": 1.1740725827731446e-05, + "loss": 0.6374, + "step": 8854 + }, + { + "epoch": 1.38, + "grad_norm": 16.860257317008685, + "learning_rate": 1.1739065150558961e-05, + "loss": 0.606, + "step": 8855 + }, + { + "epoch": 1.38, + "grad_norm": 19.30032536088489, + "learning_rate": 1.1737404423928482e-05, + "loss": 0.6202, + "step": 8856 + }, + { + "epoch": 1.38, + "grad_norm": 13.39155890836942, + "learning_rate": 1.1735743647887237e-05, + "loss": 0.6448, + "step": 8857 + }, + { + "epoch": 1.38, + "grad_norm": 14.356398879559249, + "learning_rate": 1.1734082822482457e-05, + "loss": 0.5771, + "step": 8858 + }, + { + "epoch": 1.38, + "grad_norm": 17.78085065068071, + "learning_rate": 1.1732421947761377e-05, + "loss": 0.5897, + "step": 8859 + }, + { + "epoch": 1.38, + "grad_norm": 37.65702955657859, + "learning_rate": 1.173076102377123e-05, + "loss": 0.714, + "step": 8860 + }, + { + "epoch": 1.38, + "grad_norm": 15.294290677908684, + "learning_rate": 1.1729100050559252e-05, + "loss": 0.6297, + "step": 8861 + }, + { + "epoch": 1.38, + "grad_norm": 14.82242889957776, + "learning_rate": 1.1727439028172682e-05, + "loss": 0.5688, + "step": 8862 + }, + { + "epoch": 1.38, + "grad_norm": 18.457085996274138, + "learning_rate": 1.1725777956658752e-05, + "loss": 0.6314, + "step": 8863 + }, + { + "epoch": 1.38, + "grad_norm": 24.38195712133182, + "learning_rate": 1.172411683606471e-05, + "loss": 0.7306, + "step": 8864 + }, + { + "epoch": 1.38, + "grad_norm": 18.683424076152892, + "learning_rate": 1.1722455666437793e-05, + "loss": 0.643, + "step": 8865 + }, + { + "epoch": 1.38, + "grad_norm": 17.81138611707172, + "learning_rate": 1.1720794447825245e-05, + "loss": 0.6803, + "step": 8866 + }, + { + "epoch": 1.39, + "grad_norm": 13.127828186591692, + "learning_rate": 1.171913318027431e-05, + "loss": 0.6178, + "step": 8867 + }, + { + "epoch": 1.39, + "grad_norm": 34.470922097308126, + "learning_rate": 1.1717471863832231e-05, + "loss": 0.683, + "step": 8868 + }, + { + "epoch": 1.39, + "grad_norm": 15.81591433011889, + "learning_rate": 1.1715810498546259e-05, + "loss": 0.5453, + "step": 8869 + }, + { + "epoch": 1.39, + "grad_norm": 16.681676905898172, + "learning_rate": 1.1714149084463638e-05, + "loss": 0.6379, + "step": 8870 + }, + { + "epoch": 1.39, + "grad_norm": 13.621112405574397, + "learning_rate": 1.1712487621631621e-05, + "loss": 0.6087, + "step": 8871 + }, + { + "epoch": 1.39, + "grad_norm": 17.987682156107702, + "learning_rate": 1.1710826110097457e-05, + "loss": 0.5933, + "step": 8872 + }, + { + "epoch": 1.39, + "grad_norm": 10.562717508864173, + "learning_rate": 1.17091645499084e-05, + "loss": 0.5919, + "step": 8873 + }, + { + "epoch": 1.39, + "grad_norm": 18.549489103415727, + "learning_rate": 1.1707502941111704e-05, + "loss": 0.5727, + "step": 8874 + }, + { + "epoch": 1.39, + "grad_norm": 12.998997583450262, + "learning_rate": 1.1705841283754622e-05, + "loss": 0.5683, + "step": 8875 + }, + { + "epoch": 1.39, + "grad_norm": 17.374022289081182, + "learning_rate": 1.170417957788441e-05, + "loss": 0.6948, + "step": 8876 + }, + { + "epoch": 1.39, + "grad_norm": 14.439125885229366, + "learning_rate": 1.1702517823548332e-05, + "loss": 0.6025, + "step": 8877 + }, + { + "epoch": 1.39, + "grad_norm": 21.614318817235965, + "learning_rate": 1.1700856020793639e-05, + "loss": 0.5508, + "step": 8878 + }, + { + "epoch": 1.39, + "grad_norm": 15.54863954143879, + "learning_rate": 1.1699194169667598e-05, + "loss": 0.5586, + "step": 8879 + }, + { + "epoch": 1.39, + "grad_norm": 14.1742295062101, + "learning_rate": 1.1697532270217466e-05, + "loss": 0.6709, + "step": 8880 + }, + { + "epoch": 1.39, + "grad_norm": 19.145459674676427, + "learning_rate": 1.1695870322490512e-05, + "loss": 0.5692, + "step": 8881 + }, + { + "epoch": 1.39, + "grad_norm": 15.531136037419943, + "learning_rate": 1.1694208326533997e-05, + "loss": 0.6319, + "step": 8882 + }, + { + "epoch": 1.39, + "grad_norm": 16.172624563730935, + "learning_rate": 1.169254628239519e-05, + "loss": 0.5992, + "step": 8883 + }, + { + "epoch": 1.39, + "grad_norm": 18.61402977301959, + "learning_rate": 1.1690884190121356e-05, + "loss": 0.6144, + "step": 8884 + }, + { + "epoch": 1.39, + "grad_norm": 20.10082069893346, + "learning_rate": 1.1689222049759765e-05, + "loss": 0.6038, + "step": 8885 + }, + { + "epoch": 1.39, + "grad_norm": 16.112785282709446, + "learning_rate": 1.1687559861357685e-05, + "loss": 0.6546, + "step": 8886 + }, + { + "epoch": 1.39, + "grad_norm": 12.74565906233532, + "learning_rate": 1.1685897624962392e-05, + "loss": 0.5352, + "step": 8887 + }, + { + "epoch": 1.39, + "grad_norm": 18.639195117563393, + "learning_rate": 1.1684235340621155e-05, + "loss": 0.6038, + "step": 8888 + }, + { + "epoch": 1.39, + "grad_norm": 17.48490717796426, + "learning_rate": 1.1682573008381252e-05, + "loss": 0.6278, + "step": 8889 + }, + { + "epoch": 1.39, + "grad_norm": 19.658684348570834, + "learning_rate": 1.1680910628289956e-05, + "loss": 0.6125, + "step": 8890 + }, + { + "epoch": 1.39, + "grad_norm": 19.835226346700075, + "learning_rate": 1.1679248200394546e-05, + "loss": 0.7187, + "step": 8891 + }, + { + "epoch": 1.39, + "grad_norm": 17.99453620761519, + "learning_rate": 1.1677585724742298e-05, + "loss": 0.6022, + "step": 8892 + }, + { + "epoch": 1.39, + "grad_norm": 22.909605502560105, + "learning_rate": 1.1675923201380493e-05, + "loss": 0.622, + "step": 8893 + }, + { + "epoch": 1.39, + "grad_norm": 14.935382409610092, + "learning_rate": 1.1674260630356415e-05, + "loss": 0.5615, + "step": 8894 + }, + { + "epoch": 1.39, + "grad_norm": 21.553511302307438, + "learning_rate": 1.167259801171734e-05, + "loss": 0.627, + "step": 8895 + }, + { + "epoch": 1.39, + "grad_norm": 20.566553538810293, + "learning_rate": 1.1670935345510561e-05, + "loss": 0.5479, + "step": 8896 + }, + { + "epoch": 1.39, + "grad_norm": 17.748620073649047, + "learning_rate": 1.1669272631783354e-05, + "loss": 0.5502, + "step": 8897 + }, + { + "epoch": 1.39, + "grad_norm": 18.17891408769377, + "learning_rate": 1.1667609870583012e-05, + "loss": 0.6716, + "step": 8898 + }, + { + "epoch": 1.39, + "grad_norm": 21.30397120615585, + "learning_rate": 1.1665947061956821e-05, + "loss": 0.6013, + "step": 8899 + }, + { + "epoch": 1.39, + "grad_norm": 44.10363793773203, + "learning_rate": 1.166428420595207e-05, + "loss": 0.6198, + "step": 8900 + }, + { + "epoch": 1.39, + "grad_norm": 19.83430853108769, + "learning_rate": 1.166262130261605e-05, + "loss": 0.5914, + "step": 8901 + }, + { + "epoch": 1.39, + "grad_norm": 14.042748495398728, + "learning_rate": 1.1660958351996051e-05, + "loss": 0.6349, + "step": 8902 + }, + { + "epoch": 1.39, + "grad_norm": 16.83199374171938, + "learning_rate": 1.1659295354139368e-05, + "loss": 0.5785, + "step": 8903 + }, + { + "epoch": 1.39, + "grad_norm": 15.144907496287269, + "learning_rate": 1.1657632309093301e-05, + "loss": 0.6123, + "step": 8904 + }, + { + "epoch": 1.39, + "grad_norm": 18.796133662545763, + "learning_rate": 1.1655969216905134e-05, + "loss": 0.6385, + "step": 8905 + }, + { + "epoch": 1.39, + "grad_norm": 15.98031227577902, + "learning_rate": 1.1654306077622176e-05, + "loss": 0.5588, + "step": 8906 + }, + { + "epoch": 1.39, + "grad_norm": 14.62861061204906, + "learning_rate": 1.1652642891291717e-05, + "loss": 0.5689, + "step": 8907 + }, + { + "epoch": 1.39, + "grad_norm": 19.20944149014515, + "learning_rate": 1.1650979657961063e-05, + "loss": 0.6959, + "step": 8908 + }, + { + "epoch": 1.39, + "grad_norm": 19.67912927951192, + "learning_rate": 1.1649316377677513e-05, + "loss": 0.6151, + "step": 8909 + }, + { + "epoch": 1.39, + "grad_norm": 15.476384363588675, + "learning_rate": 1.164765305048837e-05, + "loss": 0.5469, + "step": 8910 + }, + { + "epoch": 1.39, + "grad_norm": 23.744040991437682, + "learning_rate": 1.1645989676440938e-05, + "loss": 0.6541, + "step": 8911 + }, + { + "epoch": 1.39, + "grad_norm": 28.569482728186248, + "learning_rate": 1.1644326255582523e-05, + "loss": 0.6799, + "step": 8912 + }, + { + "epoch": 1.39, + "grad_norm": 22.658019062560932, + "learning_rate": 1.164266278796043e-05, + "loss": 0.6158, + "step": 8913 + }, + { + "epoch": 1.39, + "grad_norm": 12.828817107270327, + "learning_rate": 1.1640999273621969e-05, + "loss": 0.5203, + "step": 8914 + }, + { + "epoch": 1.39, + "grad_norm": 16.081379738490785, + "learning_rate": 1.1639335712614451e-05, + "loss": 0.5795, + "step": 8915 + }, + { + "epoch": 1.39, + "grad_norm": 19.46435336575244, + "learning_rate": 1.1637672104985182e-05, + "loss": 0.6023, + "step": 8916 + }, + { + "epoch": 1.39, + "grad_norm": 24.036083264315902, + "learning_rate": 1.1636008450781475e-05, + "loss": 0.597, + "step": 8917 + }, + { + "epoch": 1.39, + "grad_norm": 18.408367795334165, + "learning_rate": 1.1634344750050648e-05, + "loss": 0.6426, + "step": 8918 + }, + { + "epoch": 1.39, + "grad_norm": 19.13800700053045, + "learning_rate": 1.163268100284001e-05, + "loss": 0.5833, + "step": 8919 + }, + { + "epoch": 1.39, + "grad_norm": 14.642906726672628, + "learning_rate": 1.1631017209196878e-05, + "loss": 0.5988, + "step": 8920 + }, + { + "epoch": 1.39, + "grad_norm": 35.048421595169614, + "learning_rate": 1.1629353369168574e-05, + "loss": 0.6942, + "step": 8921 + }, + { + "epoch": 1.39, + "grad_norm": 16.33704772617256, + "learning_rate": 1.162768948280241e-05, + "loss": 0.5742, + "step": 8922 + }, + { + "epoch": 1.39, + "grad_norm": 19.085674109185405, + "learning_rate": 1.1626025550145714e-05, + "loss": 0.5937, + "step": 8923 + }, + { + "epoch": 1.39, + "grad_norm": 20.09362559480258, + "learning_rate": 1.16243615712458e-05, + "loss": 0.7127, + "step": 8924 + }, + { + "epoch": 1.39, + "grad_norm": 21.298571636574305, + "learning_rate": 1.1622697546149992e-05, + "loss": 0.5892, + "step": 8925 + }, + { + "epoch": 1.39, + "grad_norm": 17.749609536590906, + "learning_rate": 1.1621033474905617e-05, + "loss": 0.6755, + "step": 8926 + }, + { + "epoch": 1.39, + "grad_norm": 13.381152615711747, + "learning_rate": 1.1619369357559998e-05, + "loss": 0.533, + "step": 8927 + }, + { + "epoch": 1.39, + "grad_norm": 17.306365367498675, + "learning_rate": 1.161770519416046e-05, + "loss": 0.5943, + "step": 8928 + }, + { + "epoch": 1.39, + "grad_norm": 18.434557268926508, + "learning_rate": 1.1616040984754333e-05, + "loss": 0.6459, + "step": 8929 + }, + { + "epoch": 1.39, + "grad_norm": 16.75157432399176, + "learning_rate": 1.1614376729388946e-05, + "loss": 0.5812, + "step": 8930 + }, + { + "epoch": 1.4, + "grad_norm": 12.785748338378092, + "learning_rate": 1.161271242811163e-05, + "loss": 0.5867, + "step": 8931 + }, + { + "epoch": 1.4, + "grad_norm": 16.899215954112893, + "learning_rate": 1.1611048080969715e-05, + "loss": 0.5809, + "step": 8932 + }, + { + "epoch": 1.4, + "grad_norm": 15.941007743290774, + "learning_rate": 1.1609383688010536e-05, + "loss": 0.5935, + "step": 8933 + }, + { + "epoch": 1.4, + "grad_norm": 24.552889559950803, + "learning_rate": 1.1607719249281426e-05, + "loss": 0.6503, + "step": 8934 + }, + { + "epoch": 1.4, + "grad_norm": 26.732238530327162, + "learning_rate": 1.1606054764829723e-05, + "loss": 0.668, + "step": 8935 + }, + { + "epoch": 1.4, + "grad_norm": 18.736266633633413, + "learning_rate": 1.1604390234702758e-05, + "loss": 0.5471, + "step": 8936 + }, + { + "epoch": 1.4, + "grad_norm": 17.658529238310255, + "learning_rate": 1.1602725658947873e-05, + "loss": 0.5252, + "step": 8937 + }, + { + "epoch": 1.4, + "grad_norm": 37.67316527591635, + "learning_rate": 1.1601061037612413e-05, + "loss": 0.5912, + "step": 8938 + }, + { + "epoch": 1.4, + "grad_norm": 20.72212938866781, + "learning_rate": 1.1599396370743707e-05, + "loss": 0.6876, + "step": 8939 + }, + { + "epoch": 1.4, + "grad_norm": 12.025153776868871, + "learning_rate": 1.159773165838911e-05, + "loss": 0.5517, + "step": 8940 + }, + { + "epoch": 1.4, + "grad_norm": 26.624871202166297, + "learning_rate": 1.1596066900595954e-05, + "loss": 0.6611, + "step": 8941 + }, + { + "epoch": 1.4, + "grad_norm": 17.767632991852402, + "learning_rate": 1.1594402097411595e-05, + "loss": 0.6476, + "step": 8942 + }, + { + "epoch": 1.4, + "grad_norm": 24.77659046484016, + "learning_rate": 1.159273724888337e-05, + "loss": 0.6358, + "step": 8943 + }, + { + "epoch": 1.4, + "grad_norm": 23.103510608716086, + "learning_rate": 1.1591072355058629e-05, + "loss": 0.6641, + "step": 8944 + }, + { + "epoch": 1.4, + "grad_norm": 16.655814361064117, + "learning_rate": 1.1589407415984721e-05, + "loss": 0.6319, + "step": 8945 + }, + { + "epoch": 1.4, + "grad_norm": 15.430856029348964, + "learning_rate": 1.1587742431708997e-05, + "loss": 0.5775, + "step": 8946 + }, + { + "epoch": 1.4, + "grad_norm": 20.641534294354134, + "learning_rate": 1.1586077402278804e-05, + "loss": 0.5636, + "step": 8947 + }, + { + "epoch": 1.4, + "grad_norm": 14.833329482247713, + "learning_rate": 1.1584412327741501e-05, + "loss": 0.6544, + "step": 8948 + }, + { + "epoch": 1.4, + "grad_norm": 31.28449221794735, + "learning_rate": 1.1582747208144435e-05, + "loss": 0.612, + "step": 8949 + }, + { + "epoch": 1.4, + "grad_norm": 15.982731508595581, + "learning_rate": 1.1581082043534968e-05, + "loss": 0.6152, + "step": 8950 + }, + { + "epoch": 1.4, + "grad_norm": 26.69315728320189, + "learning_rate": 1.157941683396045e-05, + "loss": 0.6447, + "step": 8951 + }, + { + "epoch": 1.4, + "grad_norm": 17.134849385511526, + "learning_rate": 1.1577751579468242e-05, + "loss": 0.6076, + "step": 8952 + }, + { + "epoch": 1.4, + "grad_norm": 25.03793340984905, + "learning_rate": 1.1576086280105702e-05, + "loss": 0.6063, + "step": 8953 + }, + { + "epoch": 1.4, + "grad_norm": 17.249844101619445, + "learning_rate": 1.1574420935920192e-05, + "loss": 0.6639, + "step": 8954 + }, + { + "epoch": 1.4, + "grad_norm": 14.4426587226952, + "learning_rate": 1.157275554695907e-05, + "loss": 0.6062, + "step": 8955 + }, + { + "epoch": 1.4, + "grad_norm": 16.042862773607467, + "learning_rate": 1.1571090113269697e-05, + "loss": 0.6121, + "step": 8956 + }, + { + "epoch": 1.4, + "grad_norm": 16.273812716384683, + "learning_rate": 1.1569424634899441e-05, + "loss": 0.6205, + "step": 8957 + }, + { + "epoch": 1.4, + "grad_norm": 18.980340108535618, + "learning_rate": 1.156775911189567e-05, + "loss": 0.5797, + "step": 8958 + }, + { + "epoch": 1.4, + "grad_norm": 14.760529708721084, + "learning_rate": 1.1566093544305747e-05, + "loss": 0.6132, + "step": 8959 + }, + { + "epoch": 1.4, + "grad_norm": 20.046573704293593, + "learning_rate": 1.1564427932177037e-05, + "loss": 0.6122, + "step": 8960 + }, + { + "epoch": 1.4, + "grad_norm": 17.435986591621763, + "learning_rate": 1.1562762275556916e-05, + "loss": 0.615, + "step": 8961 + }, + { + "epoch": 1.4, + "grad_norm": 20.714940123573943, + "learning_rate": 1.1561096574492745e-05, + "loss": 0.5803, + "step": 8962 + }, + { + "epoch": 1.4, + "grad_norm": 12.705839174480742, + "learning_rate": 1.1559430829031905e-05, + "loss": 0.5994, + "step": 8963 + }, + { + "epoch": 1.4, + "grad_norm": 14.009704724260164, + "learning_rate": 1.155776503922176e-05, + "loss": 0.5652, + "step": 8964 + }, + { + "epoch": 1.4, + "grad_norm": 17.796618555212422, + "learning_rate": 1.1556099205109694e-05, + "loss": 0.5592, + "step": 8965 + }, + { + "epoch": 1.4, + "grad_norm": 17.947415085453642, + "learning_rate": 1.155443332674307e-05, + "loss": 0.5901, + "step": 8966 + }, + { + "epoch": 1.4, + "grad_norm": 12.57799767968796, + "learning_rate": 1.1552767404169281e-05, + "loss": 0.5742, + "step": 8967 + }, + { + "epoch": 1.4, + "grad_norm": 15.846545519231139, + "learning_rate": 1.1551101437435686e-05, + "loss": 0.5587, + "step": 8968 + }, + { + "epoch": 1.4, + "grad_norm": 19.203446933385028, + "learning_rate": 1.1549435426589678e-05, + "loss": 0.6159, + "step": 8969 + }, + { + "epoch": 1.4, + "grad_norm": 17.861264901860107, + "learning_rate": 1.1547769371678632e-05, + "loss": 0.6122, + "step": 8970 + }, + { + "epoch": 1.4, + "grad_norm": 27.995382434851084, + "learning_rate": 1.1546103272749931e-05, + "loss": 0.5866, + "step": 8971 + }, + { + "epoch": 1.4, + "grad_norm": 21.95391270082982, + "learning_rate": 1.1544437129850956e-05, + "loss": 0.515, + "step": 8972 + }, + { + "epoch": 1.4, + "grad_norm": 15.54622634895558, + "learning_rate": 1.1542770943029096e-05, + "loss": 0.5953, + "step": 8973 + }, + { + "epoch": 1.4, + "grad_norm": 17.626418754952542, + "learning_rate": 1.1541104712331728e-05, + "loss": 0.6332, + "step": 8974 + }, + { + "epoch": 1.4, + "grad_norm": 20.772675761717576, + "learning_rate": 1.1539438437806245e-05, + "loss": 0.7193, + "step": 8975 + }, + { + "epoch": 1.4, + "grad_norm": 14.878241759765308, + "learning_rate": 1.1537772119500035e-05, + "loss": 0.6371, + "step": 8976 + }, + { + "epoch": 1.4, + "grad_norm": 19.42295789116523, + "learning_rate": 1.1536105757460485e-05, + "loss": 0.5887, + "step": 8977 + }, + { + "epoch": 1.4, + "grad_norm": 16.367424480206616, + "learning_rate": 1.1534439351734986e-05, + "loss": 0.6164, + "step": 8978 + }, + { + "epoch": 1.4, + "grad_norm": 14.34381620775164, + "learning_rate": 1.1532772902370929e-05, + "loss": 0.5721, + "step": 8979 + }, + { + "epoch": 1.4, + "grad_norm": 16.17088806577735, + "learning_rate": 1.1531106409415706e-05, + "loss": 0.6079, + "step": 8980 + }, + { + "epoch": 1.4, + "grad_norm": 25.690804804924554, + "learning_rate": 1.1529439872916712e-05, + "loss": 0.6134, + "step": 8981 + }, + { + "epoch": 1.4, + "grad_norm": 16.90920049372513, + "learning_rate": 1.1527773292921347e-05, + "loss": 0.5996, + "step": 8982 + }, + { + "epoch": 1.4, + "grad_norm": 21.57414107107544, + "learning_rate": 1.1526106669476999e-05, + "loss": 0.6256, + "step": 8983 + }, + { + "epoch": 1.4, + "grad_norm": 13.237908356120657, + "learning_rate": 1.1524440002631073e-05, + "loss": 0.6122, + "step": 8984 + }, + { + "epoch": 1.4, + "grad_norm": 16.84641441723242, + "learning_rate": 1.1522773292430963e-05, + "loss": 0.6135, + "step": 8985 + }, + { + "epoch": 1.4, + "grad_norm": 22.795206548805538, + "learning_rate": 1.1521106538924071e-05, + "loss": 0.6405, + "step": 8986 + }, + { + "epoch": 1.4, + "grad_norm": 15.33282839749868, + "learning_rate": 1.15194397421578e-05, + "loss": 0.5875, + "step": 8987 + }, + { + "epoch": 1.4, + "grad_norm": 21.667473735969466, + "learning_rate": 1.1517772902179554e-05, + "loss": 0.6352, + "step": 8988 + }, + { + "epoch": 1.4, + "grad_norm": 15.271874362929152, + "learning_rate": 1.1516106019036733e-05, + "loss": 0.5384, + "step": 8989 + }, + { + "epoch": 1.4, + "grad_norm": 15.605000335146926, + "learning_rate": 1.1514439092776741e-05, + "loss": 0.6122, + "step": 8990 + }, + { + "epoch": 1.4, + "grad_norm": 16.496008624354705, + "learning_rate": 1.1512772123446988e-05, + "loss": 0.5473, + "step": 8991 + }, + { + "epoch": 1.4, + "grad_norm": 21.405462524102404, + "learning_rate": 1.1511105111094884e-05, + "loss": 0.6454, + "step": 8992 + }, + { + "epoch": 1.4, + "grad_norm": 12.47929706894094, + "learning_rate": 1.150943805576783e-05, + "loss": 0.5362, + "step": 8993 + }, + { + "epoch": 1.4, + "grad_norm": 13.107262922064121, + "learning_rate": 1.1507770957513245e-05, + "loss": 0.6412, + "step": 8994 + }, + { + "epoch": 1.41, + "grad_norm": 19.85739110563627, + "learning_rate": 1.1506103816378533e-05, + "loss": 0.6541, + "step": 8995 + }, + { + "epoch": 1.41, + "grad_norm": 19.063767656794987, + "learning_rate": 1.1504436632411112e-05, + "loss": 0.5636, + "step": 8996 + }, + { + "epoch": 1.41, + "grad_norm": 15.551905877143591, + "learning_rate": 1.150276940565839e-05, + "loss": 0.5732, + "step": 8997 + }, + { + "epoch": 1.41, + "grad_norm": 19.483792454592585, + "learning_rate": 1.1501102136167788e-05, + "loss": 0.6391, + "step": 8998 + }, + { + "epoch": 1.41, + "grad_norm": 21.324170060319688, + "learning_rate": 1.1499434823986719e-05, + "loss": 0.6567, + "step": 8999 + }, + { + "epoch": 1.41, + "grad_norm": 15.33599461930103, + "learning_rate": 1.1497767469162598e-05, + "loss": 0.5386, + "step": 9000 + }, + { + "epoch": 1.41, + "grad_norm": 18.664917872369813, + "learning_rate": 1.1496100071742849e-05, + "loss": 0.5914, + "step": 9001 + }, + { + "epoch": 1.41, + "grad_norm": 23.243480797696296, + "learning_rate": 1.1494432631774888e-05, + "loss": 0.5619, + "step": 9002 + }, + { + "epoch": 1.41, + "grad_norm": 12.05223211151562, + "learning_rate": 1.1492765149306137e-05, + "loss": 0.5855, + "step": 9003 + }, + { + "epoch": 1.41, + "grad_norm": 17.276544287648605, + "learning_rate": 1.149109762438402e-05, + "loss": 0.5184, + "step": 9004 + }, + { + "epoch": 1.41, + "grad_norm": 23.335126113067542, + "learning_rate": 1.1489430057055959e-05, + "loss": 0.5605, + "step": 9005 + }, + { + "epoch": 1.41, + "grad_norm": 35.99915808737943, + "learning_rate": 1.1487762447369375e-05, + "loss": 0.6487, + "step": 9006 + }, + { + "epoch": 1.41, + "grad_norm": 25.421618981411598, + "learning_rate": 1.1486094795371703e-05, + "loss": 0.6516, + "step": 9007 + }, + { + "epoch": 1.41, + "grad_norm": 17.423986668758584, + "learning_rate": 1.1484427101110359e-05, + "loss": 0.6055, + "step": 9008 + }, + { + "epoch": 1.41, + "grad_norm": 16.111500551723328, + "learning_rate": 1.148275936463278e-05, + "loss": 0.6073, + "step": 9009 + }, + { + "epoch": 1.41, + "grad_norm": 17.382029610480792, + "learning_rate": 1.148109158598639e-05, + "loss": 0.6652, + "step": 9010 + }, + { + "epoch": 1.41, + "grad_norm": 22.41212379704429, + "learning_rate": 1.1479423765218625e-05, + "loss": 0.6017, + "step": 9011 + }, + { + "epoch": 1.41, + "grad_norm": 15.243962914789135, + "learning_rate": 1.1477755902376908e-05, + "loss": 0.5556, + "step": 9012 + }, + { + "epoch": 1.41, + "grad_norm": 17.75227774273833, + "learning_rate": 1.147608799750868e-05, + "loss": 0.6113, + "step": 9013 + }, + { + "epoch": 1.41, + "grad_norm": 20.742528265328865, + "learning_rate": 1.1474420050661374e-05, + "loss": 0.6155, + "step": 9014 + }, + { + "epoch": 1.41, + "grad_norm": 16.653549596306117, + "learning_rate": 1.1472752061882426e-05, + "loss": 0.5651, + "step": 9015 + }, + { + "epoch": 1.41, + "grad_norm": 16.244776740031117, + "learning_rate": 1.1471084031219268e-05, + "loss": 0.5875, + "step": 9016 + }, + { + "epoch": 1.41, + "grad_norm": 18.00375170692068, + "learning_rate": 1.146941595871934e-05, + "loss": 0.5834, + "step": 9017 + }, + { + "epoch": 1.41, + "grad_norm": 19.79248906652147, + "learning_rate": 1.1467747844430082e-05, + "loss": 0.5934, + "step": 9018 + }, + { + "epoch": 1.41, + "grad_norm": 20.18208711647645, + "learning_rate": 1.1466079688398937e-05, + "loss": 0.5653, + "step": 9019 + }, + { + "epoch": 1.41, + "grad_norm": 15.989205817157451, + "learning_rate": 1.146441149067334e-05, + "loss": 0.6278, + "step": 9020 + }, + { + "epoch": 1.41, + "grad_norm": 21.380178522762158, + "learning_rate": 1.1462743251300739e-05, + "loss": 0.7111, + "step": 9021 + }, + { + "epoch": 1.41, + "grad_norm": 13.215860721788788, + "learning_rate": 1.1461074970328574e-05, + "loss": 0.5509, + "step": 9022 + }, + { + "epoch": 1.41, + "grad_norm": 21.994802569519802, + "learning_rate": 1.1459406647804295e-05, + "loss": 0.6439, + "step": 9023 + }, + { + "epoch": 1.41, + "grad_norm": 17.916998712009644, + "learning_rate": 1.1457738283775339e-05, + "loss": 0.5621, + "step": 9024 + }, + { + "epoch": 1.41, + "grad_norm": 16.976651007592288, + "learning_rate": 1.1456069878289161e-05, + "loss": 0.6414, + "step": 9025 + }, + { + "epoch": 1.41, + "grad_norm": 17.180718418326556, + "learning_rate": 1.1454401431393212e-05, + "loss": 0.5068, + "step": 9026 + }, + { + "epoch": 1.41, + "grad_norm": 28.58602278530663, + "learning_rate": 1.145273294313493e-05, + "loss": 0.5983, + "step": 9027 + }, + { + "epoch": 1.41, + "grad_norm": 26.57496777821415, + "learning_rate": 1.1451064413561776e-05, + "loss": 0.6932, + "step": 9028 + }, + { + "epoch": 1.41, + "grad_norm": 17.214888109323557, + "learning_rate": 1.1449395842721201e-05, + "loss": 0.5989, + "step": 9029 + }, + { + "epoch": 1.41, + "grad_norm": 21.505302495435384, + "learning_rate": 1.1447727230660654e-05, + "loss": 0.6125, + "step": 9030 + }, + { + "epoch": 1.41, + "grad_norm": 22.371601697784598, + "learning_rate": 1.1446058577427592e-05, + "loss": 0.5455, + "step": 9031 + }, + { + "epoch": 1.41, + "grad_norm": 31.773109916459667, + "learning_rate": 1.1444389883069471e-05, + "loss": 0.6116, + "step": 9032 + }, + { + "epoch": 1.41, + "grad_norm": 18.058310238120455, + "learning_rate": 1.1442721147633744e-05, + "loss": 0.6278, + "step": 9033 + }, + { + "epoch": 1.41, + "grad_norm": 19.038650264041706, + "learning_rate": 1.1441052371167876e-05, + "loss": 0.6426, + "step": 9034 + }, + { + "epoch": 1.41, + "grad_norm": 17.568615878545533, + "learning_rate": 1.1439383553719315e-05, + "loss": 0.5955, + "step": 9035 + }, + { + "epoch": 1.41, + "grad_norm": 14.927676643839913, + "learning_rate": 1.1437714695335534e-05, + "loss": 0.5605, + "step": 9036 + }, + { + "epoch": 1.41, + "grad_norm": 24.555699644064862, + "learning_rate": 1.1436045796063983e-05, + "loss": 0.6343, + "step": 9037 + }, + { + "epoch": 1.41, + "grad_norm": 19.433361387157586, + "learning_rate": 1.1434376855952136e-05, + "loss": 0.5932, + "step": 9038 + }, + { + "epoch": 1.41, + "grad_norm": 18.801446360782016, + "learning_rate": 1.1432707875047445e-05, + "loss": 0.6008, + "step": 9039 + }, + { + "epoch": 1.41, + "grad_norm": 12.820679051484227, + "learning_rate": 1.143103885339738e-05, + "loss": 0.6079, + "step": 9040 + }, + { + "epoch": 1.41, + "grad_norm": 17.998013283692515, + "learning_rate": 1.1429369791049409e-05, + "loss": 0.6169, + "step": 9041 + }, + { + "epoch": 1.41, + "grad_norm": 17.860633692269566, + "learning_rate": 1.1427700688050998e-05, + "loss": 0.5423, + "step": 9042 + }, + { + "epoch": 1.41, + "grad_norm": 14.42527479279007, + "learning_rate": 1.1426031544449614e-05, + "loss": 0.5335, + "step": 9043 + }, + { + "epoch": 1.41, + "grad_norm": 17.489213408677326, + "learning_rate": 1.1424362360292725e-05, + "loss": 0.5786, + "step": 9044 + }, + { + "epoch": 1.41, + "grad_norm": 17.797013522957908, + "learning_rate": 1.1422693135627804e-05, + "loss": 0.5931, + "step": 9045 + }, + { + "epoch": 1.41, + "grad_norm": 22.216643216115255, + "learning_rate": 1.1421023870502324e-05, + "loss": 0.5823, + "step": 9046 + }, + { + "epoch": 1.41, + "grad_norm": 14.452861149415751, + "learning_rate": 1.1419354564963756e-05, + "loss": 0.6471, + "step": 9047 + }, + { + "epoch": 1.41, + "grad_norm": 16.712113179015002, + "learning_rate": 1.1417685219059576e-05, + "loss": 0.6075, + "step": 9048 + }, + { + "epoch": 1.41, + "grad_norm": 17.09751338432782, + "learning_rate": 1.1416015832837258e-05, + "loss": 0.601, + "step": 9049 + }, + { + "epoch": 1.41, + "grad_norm": 21.08564511640922, + "learning_rate": 1.1414346406344272e-05, + "loss": 0.6791, + "step": 9050 + }, + { + "epoch": 1.41, + "grad_norm": 20.037675992504557, + "learning_rate": 1.1412676939628108e-05, + "loss": 0.6227, + "step": 9051 + }, + { + "epoch": 1.41, + "grad_norm": 19.526436426946656, + "learning_rate": 1.1411007432736236e-05, + "loss": 0.6669, + "step": 9052 + }, + { + "epoch": 1.41, + "grad_norm": 14.49169064310133, + "learning_rate": 1.1409337885716141e-05, + "loss": 0.6462, + "step": 9053 + }, + { + "epoch": 1.41, + "grad_norm": 18.349309727778657, + "learning_rate": 1.1407668298615296e-05, + "loss": 0.5469, + "step": 9054 + }, + { + "epoch": 1.41, + "grad_norm": 20.755873802426294, + "learning_rate": 1.1405998671481191e-05, + "loss": 0.6872, + "step": 9055 + }, + { + "epoch": 1.41, + "grad_norm": 14.277805061036986, + "learning_rate": 1.1404329004361306e-05, + "loss": 0.6122, + "step": 9056 + }, + { + "epoch": 1.41, + "grad_norm": 17.948319764812553, + "learning_rate": 1.1402659297303124e-05, + "loss": 0.6444, + "step": 9057 + }, + { + "epoch": 1.41, + "grad_norm": 18.131996583476063, + "learning_rate": 1.1400989550354133e-05, + "loss": 0.5869, + "step": 9058 + }, + { + "epoch": 1.42, + "grad_norm": 16.669039271476105, + "learning_rate": 1.1399319763561821e-05, + "loss": 0.6279, + "step": 9059 + }, + { + "epoch": 1.42, + "grad_norm": 24.808556724404184, + "learning_rate": 1.1397649936973672e-05, + "loss": 0.6098, + "step": 9060 + }, + { + "epoch": 1.42, + "grad_norm": 28.779127564593985, + "learning_rate": 1.1395980070637175e-05, + "loss": 0.6201, + "step": 9061 + }, + { + "epoch": 1.42, + "grad_norm": 13.949160058980471, + "learning_rate": 1.139431016459982e-05, + "loss": 0.6068, + "step": 9062 + }, + { + "epoch": 1.42, + "grad_norm": 21.81452301011464, + "learning_rate": 1.1392640218909103e-05, + "loss": 0.5838, + "step": 9063 + }, + { + "epoch": 1.42, + "grad_norm": 22.88530604372359, + "learning_rate": 1.1390970233612513e-05, + "loss": 0.5541, + "step": 9064 + }, + { + "epoch": 1.42, + "grad_norm": 16.12488839846441, + "learning_rate": 1.138930020875754e-05, + "loss": 0.6342, + "step": 9065 + }, + { + "epoch": 1.42, + "grad_norm": 17.314105152263146, + "learning_rate": 1.1387630144391685e-05, + "loss": 0.5846, + "step": 9066 + }, + { + "epoch": 1.42, + "grad_norm": 20.729774355647102, + "learning_rate": 1.138596004056244e-05, + "loss": 0.6847, + "step": 9067 + }, + { + "epoch": 1.42, + "grad_norm": 16.456214600661525, + "learning_rate": 1.1384289897317302e-05, + "loss": 0.5532, + "step": 9068 + }, + { + "epoch": 1.42, + "grad_norm": 15.074690685609944, + "learning_rate": 1.1382619714703767e-05, + "loss": 0.5645, + "step": 9069 + }, + { + "epoch": 1.42, + "grad_norm": 15.098515367923971, + "learning_rate": 1.1380949492769339e-05, + "loss": 0.5844, + "step": 9070 + }, + { + "epoch": 1.42, + "grad_norm": 13.28447161701956, + "learning_rate": 1.1379279231561514e-05, + "loss": 0.6042, + "step": 9071 + }, + { + "epoch": 1.42, + "grad_norm": 18.510734081140757, + "learning_rate": 1.1377608931127792e-05, + "loss": 0.6153, + "step": 9072 + }, + { + "epoch": 1.42, + "grad_norm": 28.839027655715256, + "learning_rate": 1.137593859151568e-05, + "loss": 0.6751, + "step": 9073 + }, + { + "epoch": 1.42, + "grad_norm": 16.811467209898392, + "learning_rate": 1.137426821277268e-05, + "loss": 0.68, + "step": 9074 + }, + { + "epoch": 1.42, + "grad_norm": 22.72735095959442, + "learning_rate": 1.1372597794946298e-05, + "loss": 0.6713, + "step": 9075 + }, + { + "epoch": 1.42, + "grad_norm": 20.814761549418616, + "learning_rate": 1.1370927338084035e-05, + "loss": 0.5791, + "step": 9076 + }, + { + "epoch": 1.42, + "grad_norm": 19.49466500218632, + "learning_rate": 1.1369256842233399e-05, + "loss": 0.6257, + "step": 9077 + }, + { + "epoch": 1.42, + "grad_norm": 13.443216470792667, + "learning_rate": 1.1367586307441902e-05, + "loss": 0.6048, + "step": 9078 + }, + { + "epoch": 1.42, + "grad_norm": 21.067267870846962, + "learning_rate": 1.136591573375705e-05, + "loss": 0.6292, + "step": 9079 + }, + { + "epoch": 1.42, + "grad_norm": 17.285803065725048, + "learning_rate": 1.1364245121226354e-05, + "loss": 0.6121, + "step": 9080 + }, + { + "epoch": 1.42, + "grad_norm": 14.894933058358427, + "learning_rate": 1.1362574469897322e-05, + "loss": 0.6188, + "step": 9081 + }, + { + "epoch": 1.42, + "grad_norm": 29.614628034449026, + "learning_rate": 1.136090377981747e-05, + "loss": 0.6406, + "step": 9082 + }, + { + "epoch": 1.42, + "grad_norm": 14.403628824033497, + "learning_rate": 1.1359233051034316e-05, + "loss": 0.6175, + "step": 9083 + }, + { + "epoch": 1.42, + "grad_norm": 26.84342563830639, + "learning_rate": 1.1357562283595363e-05, + "loss": 0.6301, + "step": 9084 + }, + { + "epoch": 1.42, + "grad_norm": 23.60368557270249, + "learning_rate": 1.1355891477548136e-05, + "loss": 0.6775, + "step": 9085 + }, + { + "epoch": 1.42, + "grad_norm": 23.069978213576917, + "learning_rate": 1.1354220632940147e-05, + "loss": 0.6347, + "step": 9086 + }, + { + "epoch": 1.42, + "grad_norm": 21.572896261741263, + "learning_rate": 1.1352549749818916e-05, + "loss": 0.6391, + "step": 9087 + }, + { + "epoch": 1.42, + "grad_norm": 13.69603869095432, + "learning_rate": 1.135087882823196e-05, + "loss": 0.5302, + "step": 9088 + }, + { + "epoch": 1.42, + "grad_norm": 25.01557777371735, + "learning_rate": 1.1349207868226801e-05, + "loss": 0.5949, + "step": 9089 + }, + { + "epoch": 1.42, + "grad_norm": 15.342854894218238, + "learning_rate": 1.1347536869850959e-05, + "loss": 0.7235, + "step": 9090 + }, + { + "epoch": 1.42, + "grad_norm": 21.25456287398254, + "learning_rate": 1.134586583315196e-05, + "loss": 0.6007, + "step": 9091 + }, + { + "epoch": 1.42, + "grad_norm": 19.083450642548637, + "learning_rate": 1.134419475817732e-05, + "loss": 0.638, + "step": 9092 + }, + { + "epoch": 1.42, + "grad_norm": 16.231451875127494, + "learning_rate": 1.1342523644974567e-05, + "loss": 0.632, + "step": 9093 + }, + { + "epoch": 1.42, + "grad_norm": 19.93877235301933, + "learning_rate": 1.134085249359123e-05, + "loss": 0.6454, + "step": 9094 + }, + { + "epoch": 1.42, + "grad_norm": 22.033638809775347, + "learning_rate": 1.1339181304074833e-05, + "loss": 0.647, + "step": 9095 + }, + { + "epoch": 1.42, + "grad_norm": 29.423520161219532, + "learning_rate": 1.13375100764729e-05, + "loss": 0.6064, + "step": 9096 + }, + { + "epoch": 1.42, + "grad_norm": 28.54665340897153, + "learning_rate": 1.1335838810832966e-05, + "loss": 0.7019, + "step": 9097 + }, + { + "epoch": 1.42, + "grad_norm": 22.72381920385396, + "learning_rate": 1.1334167507202553e-05, + "loss": 0.5706, + "step": 9098 + }, + { + "epoch": 1.42, + "grad_norm": 38.26470266899626, + "learning_rate": 1.1332496165629201e-05, + "loss": 0.6931, + "step": 9099 + }, + { + "epoch": 1.42, + "grad_norm": 17.353207287815692, + "learning_rate": 1.1330824786160437e-05, + "loss": 0.6489, + "step": 9100 + }, + { + "epoch": 1.42, + "grad_norm": 20.343490737370498, + "learning_rate": 1.1329153368843793e-05, + "loss": 0.6839, + "step": 9101 + }, + { + "epoch": 1.42, + "grad_norm": 21.03310480837816, + "learning_rate": 1.1327481913726806e-05, + "loss": 0.6535, + "step": 9102 + }, + { + "epoch": 1.42, + "grad_norm": 12.648559477832315, + "learning_rate": 1.1325810420857009e-05, + "loss": 0.617, + "step": 9103 + }, + { + "epoch": 1.42, + "grad_norm": 13.355797718121442, + "learning_rate": 1.1324138890281937e-05, + "loss": 0.6064, + "step": 9104 + }, + { + "epoch": 1.42, + "grad_norm": 22.46330109342206, + "learning_rate": 1.1322467322049135e-05, + "loss": 0.5981, + "step": 9105 + }, + { + "epoch": 1.42, + "grad_norm": 14.75941963592894, + "learning_rate": 1.1320795716206132e-05, + "loss": 0.7807, + "step": 9106 + }, + { + "epoch": 1.42, + "grad_norm": 21.167837482896456, + "learning_rate": 1.1319124072800471e-05, + "loss": 0.5619, + "step": 9107 + }, + { + "epoch": 1.42, + "grad_norm": 34.71152468366489, + "learning_rate": 1.1317452391879695e-05, + "loss": 0.5719, + "step": 9108 + }, + { + "epoch": 1.42, + "grad_norm": 14.4066642659776, + "learning_rate": 1.1315780673491344e-05, + "loss": 0.6372, + "step": 9109 + }, + { + "epoch": 1.42, + "grad_norm": 16.55319533344844, + "learning_rate": 1.131410891768296e-05, + "loss": 0.6287, + "step": 9110 + }, + { + "epoch": 1.42, + "grad_norm": 26.391365222701964, + "learning_rate": 1.1312437124502086e-05, + "loss": 0.6055, + "step": 9111 + }, + { + "epoch": 1.42, + "grad_norm": 19.06958664821942, + "learning_rate": 1.131076529399627e-05, + "loss": 0.5371, + "step": 9112 + }, + { + "epoch": 1.42, + "grad_norm": 17.792279032666656, + "learning_rate": 1.1309093426213051e-05, + "loss": 0.6333, + "step": 9113 + }, + { + "epoch": 1.42, + "grad_norm": 23.371635136812454, + "learning_rate": 1.1307421521199988e-05, + "loss": 0.6905, + "step": 9114 + }, + { + "epoch": 1.42, + "grad_norm": 19.001999132647843, + "learning_rate": 1.1305749579004618e-05, + "loss": 0.5812, + "step": 9115 + }, + { + "epoch": 1.42, + "grad_norm": 15.828004745260056, + "learning_rate": 1.1304077599674493e-05, + "loss": 0.6056, + "step": 9116 + }, + { + "epoch": 1.42, + "grad_norm": 19.639540626435203, + "learning_rate": 1.1302405583257163e-05, + "loss": 0.6281, + "step": 9117 + }, + { + "epoch": 1.42, + "grad_norm": 19.16487097871779, + "learning_rate": 1.1300733529800183e-05, + "loss": 0.5788, + "step": 9118 + }, + { + "epoch": 1.42, + "grad_norm": 23.968955207577906, + "learning_rate": 1.1299061439351102e-05, + "loss": 0.6387, + "step": 9119 + }, + { + "epoch": 1.42, + "grad_norm": 14.807406211577261, + "learning_rate": 1.1297389311957472e-05, + "loss": 0.5559, + "step": 9120 + }, + { + "epoch": 1.42, + "grad_norm": 18.176759074569787, + "learning_rate": 1.1295717147666848e-05, + "loss": 0.6406, + "step": 9121 + }, + { + "epoch": 1.42, + "grad_norm": 14.275687869233185, + "learning_rate": 1.129404494652679e-05, + "loss": 0.6397, + "step": 9122 + }, + { + "epoch": 1.43, + "grad_norm": 20.290152658163905, + "learning_rate": 1.1292372708584847e-05, + "loss": 0.7365, + "step": 9123 + }, + { + "epoch": 1.43, + "grad_norm": 14.600511221700568, + "learning_rate": 1.1290700433888584e-05, + "loss": 0.6836, + "step": 9124 + }, + { + "epoch": 1.43, + "grad_norm": 19.36856989679862, + "learning_rate": 1.1289028122485553e-05, + "loss": 0.6109, + "step": 9125 + }, + { + "epoch": 1.43, + "grad_norm": 18.96650136820152, + "learning_rate": 1.1287355774423316e-05, + "loss": 0.6098, + "step": 9126 + }, + { + "epoch": 1.43, + "grad_norm": 21.440399361721234, + "learning_rate": 1.1285683389749434e-05, + "loss": 0.6564, + "step": 9127 + }, + { + "epoch": 1.43, + "grad_norm": 18.37273353754732, + "learning_rate": 1.128401096851147e-05, + "loss": 0.6347, + "step": 9128 + }, + { + "epoch": 1.43, + "grad_norm": 28.22226318455248, + "learning_rate": 1.1282338510756985e-05, + "loss": 0.6322, + "step": 9129 + }, + { + "epoch": 1.43, + "grad_norm": 14.428904535895661, + "learning_rate": 1.1280666016533542e-05, + "loss": 0.5889, + "step": 9130 + }, + { + "epoch": 1.43, + "grad_norm": 16.379043486184198, + "learning_rate": 1.1278993485888704e-05, + "loss": 0.6049, + "step": 9131 + }, + { + "epoch": 1.43, + "grad_norm": 15.240538401824047, + "learning_rate": 1.1277320918870043e-05, + "loss": 0.617, + "step": 9132 + }, + { + "epoch": 1.43, + "grad_norm": 20.327043346213088, + "learning_rate": 1.1275648315525123e-05, + "loss": 0.6593, + "step": 9133 + }, + { + "epoch": 1.43, + "grad_norm": 18.678445781150614, + "learning_rate": 1.127397567590151e-05, + "loss": 0.6154, + "step": 9134 + }, + { + "epoch": 1.43, + "grad_norm": 20.86344744757711, + "learning_rate": 1.1272303000046774e-05, + "loss": 0.6913, + "step": 9135 + }, + { + "epoch": 1.43, + "grad_norm": 16.504715011914662, + "learning_rate": 1.1270630288008484e-05, + "loss": 0.6695, + "step": 9136 + }, + { + "epoch": 1.43, + "grad_norm": 15.092482862634203, + "learning_rate": 1.1268957539834213e-05, + "loss": 0.6672, + "step": 9137 + }, + { + "epoch": 1.43, + "grad_norm": 18.48369333117928, + "learning_rate": 1.1267284755571528e-05, + "loss": 0.5981, + "step": 9138 + }, + { + "epoch": 1.43, + "grad_norm": 15.308538836365852, + "learning_rate": 1.1265611935268013e-05, + "loss": 0.5965, + "step": 9139 + }, + { + "epoch": 1.43, + "grad_norm": 14.073746797439478, + "learning_rate": 1.1263939078971229e-05, + "loss": 0.6243, + "step": 9140 + }, + { + "epoch": 1.43, + "grad_norm": 21.810150379953935, + "learning_rate": 1.1262266186728763e-05, + "loss": 0.6773, + "step": 9141 + }, + { + "epoch": 1.43, + "grad_norm": 13.019139879984305, + "learning_rate": 1.1260593258588179e-05, + "loss": 0.5797, + "step": 9142 + }, + { + "epoch": 1.43, + "grad_norm": 14.68618465276088, + "learning_rate": 1.1258920294597063e-05, + "loss": 0.5153, + "step": 9143 + }, + { + "epoch": 1.43, + "grad_norm": 21.780031276852068, + "learning_rate": 1.1257247294802988e-05, + "loss": 0.5815, + "step": 9144 + }, + { + "epoch": 1.43, + "grad_norm": 15.872738059414592, + "learning_rate": 1.125557425925354e-05, + "loss": 0.5487, + "step": 9145 + }, + { + "epoch": 1.43, + "grad_norm": 22.979703139241888, + "learning_rate": 1.125390118799629e-05, + "loss": 0.6997, + "step": 9146 + }, + { + "epoch": 1.43, + "grad_norm": 20.524404827507347, + "learning_rate": 1.1252228081078826e-05, + "loss": 0.5704, + "step": 9147 + }, + { + "epoch": 1.43, + "grad_norm": 16.949117590326956, + "learning_rate": 1.1250554938548726e-05, + "loss": 0.6567, + "step": 9148 + }, + { + "epoch": 1.43, + "grad_norm": 22.889068687313657, + "learning_rate": 1.1248881760453578e-05, + "loss": 0.6515, + "step": 9149 + }, + { + "epoch": 1.43, + "grad_norm": 16.429801151464815, + "learning_rate": 1.124720854684096e-05, + "loss": 0.6001, + "step": 9150 + }, + { + "epoch": 1.43, + "grad_norm": 13.819143058741489, + "learning_rate": 1.1245535297758463e-05, + "loss": 0.5381, + "step": 9151 + }, + { + "epoch": 1.43, + "grad_norm": 15.710729237204703, + "learning_rate": 1.124386201325367e-05, + "loss": 0.6662, + "step": 9152 + }, + { + "epoch": 1.43, + "grad_norm": 11.605071221177864, + "learning_rate": 1.124218869337417e-05, + "loss": 0.5742, + "step": 9153 + }, + { + "epoch": 1.43, + "grad_norm": 18.094322992716332, + "learning_rate": 1.1240515338167548e-05, + "loss": 0.5979, + "step": 9154 + }, + { + "epoch": 1.43, + "grad_norm": 15.685898919014859, + "learning_rate": 1.1238841947681399e-05, + "loss": 0.5981, + "step": 9155 + }, + { + "epoch": 1.43, + "grad_norm": 17.8066106956894, + "learning_rate": 1.1237168521963307e-05, + "loss": 0.5302, + "step": 9156 + }, + { + "epoch": 1.43, + "grad_norm": 18.51211495266274, + "learning_rate": 1.1235495061060864e-05, + "loss": 0.5423, + "step": 9157 + }, + { + "epoch": 1.43, + "grad_norm": 14.017995690202573, + "learning_rate": 1.123382156502167e-05, + "loss": 0.5371, + "step": 9158 + }, + { + "epoch": 1.43, + "grad_norm": 12.274334587612588, + "learning_rate": 1.1232148033893307e-05, + "loss": 0.4745, + "step": 9159 + }, + { + "epoch": 1.43, + "grad_norm": 21.28087899645096, + "learning_rate": 1.1230474467723376e-05, + "loss": 0.594, + "step": 9160 + }, + { + "epoch": 1.43, + "grad_norm": 22.606996366315286, + "learning_rate": 1.1228800866559472e-05, + "loss": 0.5989, + "step": 9161 + }, + { + "epoch": 1.43, + "grad_norm": 18.246749941264525, + "learning_rate": 1.1227127230449191e-05, + "loss": 0.5773, + "step": 9162 + }, + { + "epoch": 1.43, + "grad_norm": 11.688941519878224, + "learning_rate": 1.1225453559440128e-05, + "loss": 0.6094, + "step": 9163 + }, + { + "epoch": 1.43, + "grad_norm": 16.601185173683334, + "learning_rate": 1.1223779853579884e-05, + "loss": 0.6012, + "step": 9164 + }, + { + "epoch": 1.43, + "grad_norm": 13.220288273403998, + "learning_rate": 1.1222106112916053e-05, + "loss": 0.6064, + "step": 9165 + }, + { + "epoch": 1.43, + "grad_norm": 22.02275981584391, + "learning_rate": 1.1220432337496244e-05, + "loss": 0.6672, + "step": 9166 + }, + { + "epoch": 1.43, + "grad_norm": 28.058959748511075, + "learning_rate": 1.121875852736805e-05, + "loss": 0.5952, + "step": 9167 + }, + { + "epoch": 1.43, + "grad_norm": 17.952123799971428, + "learning_rate": 1.1217084682579077e-05, + "loss": 0.6151, + "step": 9168 + }, + { + "epoch": 1.43, + "grad_norm": 19.647652856467833, + "learning_rate": 1.1215410803176927e-05, + "loss": 0.5902, + "step": 9169 + }, + { + "epoch": 1.43, + "grad_norm": 22.473547970343066, + "learning_rate": 1.1213736889209207e-05, + "loss": 0.6666, + "step": 9170 + }, + { + "epoch": 1.43, + "grad_norm": 23.354751545277264, + "learning_rate": 1.1212062940723519e-05, + "loss": 0.5764, + "step": 9171 + }, + { + "epoch": 1.43, + "grad_norm": 26.54377848980917, + "learning_rate": 1.1210388957767471e-05, + "loss": 0.5804, + "step": 9172 + }, + { + "epoch": 1.43, + "grad_norm": 17.864543439766816, + "learning_rate": 1.1208714940388668e-05, + "loss": 0.6083, + "step": 9173 + }, + { + "epoch": 1.43, + "grad_norm": 17.573375387447467, + "learning_rate": 1.1207040888634719e-05, + "loss": 0.5952, + "step": 9174 + }, + { + "epoch": 1.43, + "grad_norm": 20.071871097091563, + "learning_rate": 1.1205366802553231e-05, + "loss": 0.5872, + "step": 9175 + }, + { + "epoch": 1.43, + "grad_norm": 18.281087903176285, + "learning_rate": 1.1203692682191819e-05, + "loss": 0.6939, + "step": 9176 + }, + { + "epoch": 1.43, + "grad_norm": 15.202771754635325, + "learning_rate": 1.1202018527598091e-05, + "loss": 0.6457, + "step": 9177 + }, + { + "epoch": 1.43, + "grad_norm": 20.67415285283405, + "learning_rate": 1.120034433881966e-05, + "loss": 0.59, + "step": 9178 + }, + { + "epoch": 1.43, + "grad_norm": 19.601946449548517, + "learning_rate": 1.1198670115904136e-05, + "loss": 0.6297, + "step": 9179 + }, + { + "epoch": 1.43, + "grad_norm": 30.470285551551125, + "learning_rate": 1.1196995858899138e-05, + "loss": 0.6937, + "step": 9180 + }, + { + "epoch": 1.43, + "grad_norm": 14.413605388640091, + "learning_rate": 1.1195321567852275e-05, + "loss": 0.5675, + "step": 9181 + }, + { + "epoch": 1.43, + "grad_norm": 18.069825486226854, + "learning_rate": 1.1193647242811165e-05, + "loss": 0.5391, + "step": 9182 + }, + { + "epoch": 1.43, + "grad_norm": 11.292613018269247, + "learning_rate": 1.1191972883823433e-05, + "loss": 0.5359, + "step": 9183 + }, + { + "epoch": 1.43, + "grad_norm": 14.862600239254476, + "learning_rate": 1.119029849093668e-05, + "loss": 0.647, + "step": 9184 + }, + { + "epoch": 1.43, + "grad_norm": 25.713910930367874, + "learning_rate": 1.118862406419854e-05, + "loss": 0.5467, + "step": 9185 + }, + { + "epoch": 1.43, + "grad_norm": 23.153794562571782, + "learning_rate": 1.1186949603656624e-05, + "loss": 0.7108, + "step": 9186 + }, + { + "epoch": 1.44, + "grad_norm": 19.312513565935227, + "learning_rate": 1.1185275109358558e-05, + "loss": 0.5972, + "step": 9187 + }, + { + "epoch": 1.44, + "grad_norm": 19.569038125764617, + "learning_rate": 1.118360058135196e-05, + "loss": 0.6932, + "step": 9188 + }, + { + "epoch": 1.44, + "grad_norm": 29.725426942939055, + "learning_rate": 1.1181926019684454e-05, + "loss": 0.6175, + "step": 9189 + }, + { + "epoch": 1.44, + "grad_norm": 15.614473932249652, + "learning_rate": 1.1180251424403666e-05, + "loss": 0.5703, + "step": 9190 + }, + { + "epoch": 1.44, + "grad_norm": 17.54772774245434, + "learning_rate": 1.1178576795557214e-05, + "loss": 0.5395, + "step": 9191 + }, + { + "epoch": 1.44, + "grad_norm": 19.772009570027226, + "learning_rate": 1.117690213319273e-05, + "loss": 0.7171, + "step": 9192 + }, + { + "epoch": 1.44, + "grad_norm": 37.63290593937698, + "learning_rate": 1.1175227437357835e-05, + "loss": 0.6536, + "step": 9193 + }, + { + "epoch": 1.44, + "grad_norm": 17.0844609992272, + "learning_rate": 1.1173552708100162e-05, + "loss": 0.5932, + "step": 9194 + }, + { + "epoch": 1.44, + "grad_norm": 22.895576886320796, + "learning_rate": 1.1171877945467333e-05, + "loss": 0.6676, + "step": 9195 + }, + { + "epoch": 1.44, + "grad_norm": 22.679667293790565, + "learning_rate": 1.1170203149506984e-05, + "loss": 0.5905, + "step": 9196 + }, + { + "epoch": 1.44, + "grad_norm": 21.82699749983028, + "learning_rate": 1.1168528320266743e-05, + "loss": 0.5795, + "step": 9197 + }, + { + "epoch": 1.44, + "grad_norm": 26.148849002528173, + "learning_rate": 1.1166853457794238e-05, + "loss": 0.5694, + "step": 9198 + }, + { + "epoch": 1.44, + "grad_norm": 33.59946377445537, + "learning_rate": 1.1165178562137103e-05, + "loss": 0.6562, + "step": 9199 + }, + { + "epoch": 1.44, + "grad_norm": 22.98983943847259, + "learning_rate": 1.1163503633342971e-05, + "loss": 0.7734, + "step": 9200 + }, + { + "epoch": 1.44, + "grad_norm": 19.280237031384548, + "learning_rate": 1.1161828671459475e-05, + "loss": 0.615, + "step": 9201 + }, + { + "epoch": 1.44, + "grad_norm": 26.66468511156852, + "learning_rate": 1.1160153676534256e-05, + "loss": 0.5803, + "step": 9202 + }, + { + "epoch": 1.44, + "grad_norm": 21.658845253320656, + "learning_rate": 1.115847864861494e-05, + "loss": 0.6408, + "step": 9203 + }, + { + "epoch": 1.44, + "grad_norm": 26.169203517702094, + "learning_rate": 1.1156803587749172e-05, + "loss": 0.5945, + "step": 9204 + }, + { + "epoch": 1.44, + "grad_norm": 21.953103423871607, + "learning_rate": 1.1155128493984588e-05, + "loss": 0.6681, + "step": 9205 + }, + { + "epoch": 1.44, + "grad_norm": 21.969191934066426, + "learning_rate": 1.1153453367368824e-05, + "loss": 0.6249, + "step": 9206 + }, + { + "epoch": 1.44, + "grad_norm": 22.91817980087681, + "learning_rate": 1.115177820794952e-05, + "loss": 0.5894, + "step": 9207 + }, + { + "epoch": 1.44, + "grad_norm": 21.48646017948929, + "learning_rate": 1.1150103015774318e-05, + "loss": 0.6155, + "step": 9208 + }, + { + "epoch": 1.44, + "grad_norm": 23.897741915870306, + "learning_rate": 1.1148427790890857e-05, + "loss": 0.6374, + "step": 9209 + }, + { + "epoch": 1.44, + "grad_norm": 21.232398398685255, + "learning_rate": 1.1146752533346786e-05, + "loss": 0.6724, + "step": 9210 + }, + { + "epoch": 1.44, + "grad_norm": 19.359026797213556, + "learning_rate": 1.1145077243189739e-05, + "loss": 0.6039, + "step": 9211 + }, + { + "epoch": 1.44, + "grad_norm": 13.296470177581037, + "learning_rate": 1.114340192046737e-05, + "loss": 0.575, + "step": 9212 + }, + { + "epoch": 1.44, + "grad_norm": 32.605814467960386, + "learning_rate": 1.1141726565227313e-05, + "loss": 0.7205, + "step": 9213 + }, + { + "epoch": 1.44, + "grad_norm": 32.400132068023424, + "learning_rate": 1.1140051177517224e-05, + "loss": 0.6781, + "step": 9214 + }, + { + "epoch": 1.44, + "grad_norm": 11.485535025884852, + "learning_rate": 1.1138375757384747e-05, + "loss": 0.5879, + "step": 9215 + }, + { + "epoch": 1.44, + "grad_norm": 18.18111534724461, + "learning_rate": 1.113670030487753e-05, + "loss": 0.6068, + "step": 9216 + }, + { + "epoch": 1.44, + "grad_norm": 20.93906984991635, + "learning_rate": 1.1135024820043218e-05, + "loss": 0.5994, + "step": 9217 + }, + { + "epoch": 1.44, + "grad_norm": 17.576505744951646, + "learning_rate": 1.1133349302929468e-05, + "loss": 0.6009, + "step": 9218 + }, + { + "epoch": 1.44, + "grad_norm": 23.1509549950664, + "learning_rate": 1.1131673753583922e-05, + "loss": 0.5916, + "step": 9219 + }, + { + "epoch": 1.44, + "grad_norm": 14.72266107176036, + "learning_rate": 1.112999817205424e-05, + "loss": 0.6482, + "step": 9220 + }, + { + "epoch": 1.44, + "grad_norm": 11.915140540956552, + "learning_rate": 1.1128322558388071e-05, + "loss": 0.6096, + "step": 9221 + }, + { + "epoch": 1.44, + "grad_norm": 18.446700158244614, + "learning_rate": 1.1126646912633068e-05, + "loss": 0.5952, + "step": 9222 + }, + { + "epoch": 1.44, + "grad_norm": 18.73237618249963, + "learning_rate": 1.1124971234836886e-05, + "loss": 0.589, + "step": 9223 + }, + { + "epoch": 1.44, + "grad_norm": 19.254016859650473, + "learning_rate": 1.1123295525047178e-05, + "loss": 0.6781, + "step": 9224 + }, + { + "epoch": 1.44, + "grad_norm": 13.664382352297014, + "learning_rate": 1.1121619783311606e-05, + "loss": 0.5769, + "step": 9225 + }, + { + "epoch": 1.44, + "grad_norm": 25.017923541978643, + "learning_rate": 1.111994400967782e-05, + "loss": 0.6411, + "step": 9226 + }, + { + "epoch": 1.44, + "grad_norm": 16.551425547660248, + "learning_rate": 1.1118268204193485e-05, + "loss": 0.6588, + "step": 9227 + }, + { + "epoch": 1.44, + "grad_norm": 13.759691531412997, + "learning_rate": 1.1116592366906254e-05, + "loss": 0.5924, + "step": 9228 + }, + { + "epoch": 1.44, + "grad_norm": 14.396146381919781, + "learning_rate": 1.111491649786379e-05, + "loss": 0.5966, + "step": 9229 + }, + { + "epoch": 1.44, + "grad_norm": 18.314983633208765, + "learning_rate": 1.1113240597113752e-05, + "loss": 0.6199, + "step": 9230 + }, + { + "epoch": 1.44, + "grad_norm": 32.73033330998373, + "learning_rate": 1.1111564664703805e-05, + "loss": 0.7141, + "step": 9231 + }, + { + "epoch": 1.44, + "grad_norm": 11.42948871421432, + "learning_rate": 1.1109888700681608e-05, + "loss": 0.5544, + "step": 9232 + }, + { + "epoch": 1.44, + "grad_norm": 21.724222380238306, + "learning_rate": 1.1108212705094826e-05, + "loss": 0.6953, + "step": 9233 + }, + { + "epoch": 1.44, + "grad_norm": 26.683155058442054, + "learning_rate": 1.1106536677991122e-05, + "loss": 0.6156, + "step": 9234 + }, + { + "epoch": 1.44, + "grad_norm": 18.15696689943428, + "learning_rate": 1.1104860619418162e-05, + "loss": 0.5368, + "step": 9235 + }, + { + "epoch": 1.44, + "grad_norm": 18.71613632600167, + "learning_rate": 1.1103184529423612e-05, + "loss": 0.6409, + "step": 9236 + }, + { + "epoch": 1.44, + "grad_norm": 19.456197802359966, + "learning_rate": 1.1101508408055141e-05, + "loss": 0.6639, + "step": 9237 + }, + { + "epoch": 1.44, + "grad_norm": 24.49740585848573, + "learning_rate": 1.1099832255360412e-05, + "loss": 0.5518, + "step": 9238 + }, + { + "epoch": 1.44, + "grad_norm": 19.69709484610711, + "learning_rate": 1.1098156071387101e-05, + "loss": 0.6742, + "step": 9239 + }, + { + "epoch": 1.44, + "grad_norm": 11.395890251304616, + "learning_rate": 1.1096479856182872e-05, + "loss": 0.5318, + "step": 9240 + }, + { + "epoch": 1.44, + "grad_norm": 12.377089384249047, + "learning_rate": 1.1094803609795398e-05, + "loss": 0.5615, + "step": 9241 + }, + { + "epoch": 1.44, + "grad_norm": 20.295350144498446, + "learning_rate": 1.109312733227235e-05, + "loss": 0.6399, + "step": 9242 + }, + { + "epoch": 1.44, + "grad_norm": 19.574306593655788, + "learning_rate": 1.10914510236614e-05, + "loss": 0.6261, + "step": 9243 + }, + { + "epoch": 1.44, + "grad_norm": 26.288961762324547, + "learning_rate": 1.1089774684010219e-05, + "loss": 0.6474, + "step": 9244 + }, + { + "epoch": 1.44, + "grad_norm": 21.94701582591727, + "learning_rate": 1.1088098313366484e-05, + "loss": 0.5891, + "step": 9245 + }, + { + "epoch": 1.44, + "grad_norm": 19.822434631643524, + "learning_rate": 1.108642191177787e-05, + "loss": 0.573, + "step": 9246 + }, + { + "epoch": 1.44, + "grad_norm": 15.142014519928907, + "learning_rate": 1.1084745479292054e-05, + "loss": 0.5446, + "step": 9247 + }, + { + "epoch": 1.44, + "grad_norm": 18.005720786212926, + "learning_rate": 1.108306901595671e-05, + "loss": 0.6057, + "step": 9248 + }, + { + "epoch": 1.44, + "grad_norm": 21.07260389531164, + "learning_rate": 1.1081392521819518e-05, + "loss": 0.6217, + "step": 9249 + }, + { + "epoch": 1.44, + "grad_norm": 17.374317807588838, + "learning_rate": 1.1079715996928156e-05, + "loss": 0.672, + "step": 9250 + }, + { + "epoch": 1.45, + "grad_norm": 14.466625453375757, + "learning_rate": 1.1078039441330298e-05, + "loss": 0.6079, + "step": 9251 + }, + { + "epoch": 1.45, + "grad_norm": 19.851873231007477, + "learning_rate": 1.1076362855073635e-05, + "loss": 0.598, + "step": 9252 + }, + { + "epoch": 1.45, + "grad_norm": 28.064885948167905, + "learning_rate": 1.1074686238205837e-05, + "loss": 0.6714, + "step": 9253 + }, + { + "epoch": 1.45, + "grad_norm": 14.867303519064384, + "learning_rate": 1.1073009590774595e-05, + "loss": 0.5728, + "step": 9254 + }, + { + "epoch": 1.45, + "grad_norm": 15.335319741740042, + "learning_rate": 1.1071332912827584e-05, + "loss": 0.6313, + "step": 9255 + }, + { + "epoch": 1.45, + "grad_norm": 13.180464268866153, + "learning_rate": 1.1069656204412496e-05, + "loss": 0.5479, + "step": 9256 + }, + { + "epoch": 1.45, + "grad_norm": 15.80640386217679, + "learning_rate": 1.1067979465577005e-05, + "loss": 0.617, + "step": 9257 + }, + { + "epoch": 1.45, + "grad_norm": 36.29597278537783, + "learning_rate": 1.106630269636881e-05, + "loss": 0.5618, + "step": 9258 + }, + { + "epoch": 1.45, + "grad_norm": 22.1431439002648, + "learning_rate": 1.1064625896835585e-05, + "loss": 0.5572, + "step": 9259 + }, + { + "epoch": 1.45, + "grad_norm": 16.42095537813848, + "learning_rate": 1.1062949067025024e-05, + "loss": 0.6221, + "step": 9260 + }, + { + "epoch": 1.45, + "grad_norm": 20.78267029483533, + "learning_rate": 1.1061272206984815e-05, + "loss": 0.6412, + "step": 9261 + }, + { + "epoch": 1.45, + "grad_norm": 18.15203782380583, + "learning_rate": 1.1059595316762644e-05, + "loss": 0.5633, + "step": 9262 + }, + { + "epoch": 1.45, + "grad_norm": 18.537579408795157, + "learning_rate": 1.1057918396406197e-05, + "loss": 0.5938, + "step": 9263 + }, + { + "epoch": 1.45, + "grad_norm": 16.498481395676407, + "learning_rate": 1.1056241445963177e-05, + "loss": 0.6043, + "step": 9264 + }, + { + "epoch": 1.45, + "grad_norm": 17.583496270867375, + "learning_rate": 1.1054564465481263e-05, + "loss": 0.6441, + "step": 9265 + }, + { + "epoch": 1.45, + "grad_norm": 16.38927262129966, + "learning_rate": 1.1052887455008157e-05, + "loss": 0.5766, + "step": 9266 + }, + { + "epoch": 1.45, + "grad_norm": 15.729221358477853, + "learning_rate": 1.1051210414591544e-05, + "loss": 0.5274, + "step": 9267 + }, + { + "epoch": 1.45, + "grad_norm": 18.39367556733507, + "learning_rate": 1.1049533344279122e-05, + "loss": 0.599, + "step": 9268 + }, + { + "epoch": 1.45, + "grad_norm": 25.45838002144944, + "learning_rate": 1.1047856244118591e-05, + "loss": 0.6777, + "step": 9269 + }, + { + "epoch": 1.45, + "grad_norm": 16.46941887856235, + "learning_rate": 1.1046179114157636e-05, + "loss": 0.6293, + "step": 9270 + }, + { + "epoch": 1.45, + "grad_norm": 20.313819413982287, + "learning_rate": 1.1044501954443962e-05, + "loss": 0.5981, + "step": 9271 + }, + { + "epoch": 1.45, + "grad_norm": 9.764063283786262, + "learning_rate": 1.104282476502526e-05, + "loss": 0.4817, + "step": 9272 + }, + { + "epoch": 1.45, + "grad_norm": 17.95439232245066, + "learning_rate": 1.1041147545949233e-05, + "loss": 0.5305, + "step": 9273 + }, + { + "epoch": 1.45, + "grad_norm": 17.06889737895641, + "learning_rate": 1.103947029726358e-05, + "loss": 0.6047, + "step": 9274 + }, + { + "epoch": 1.45, + "grad_norm": 16.000177674173145, + "learning_rate": 1.1037793019015999e-05, + "loss": 0.5771, + "step": 9275 + }, + { + "epoch": 1.45, + "grad_norm": 16.596997755010054, + "learning_rate": 1.1036115711254191e-05, + "loss": 0.5705, + "step": 9276 + }, + { + "epoch": 1.45, + "grad_norm": 17.040014524156327, + "learning_rate": 1.103443837402586e-05, + "loss": 0.5791, + "step": 9277 + }, + { + "epoch": 1.45, + "grad_norm": 28.342455143566227, + "learning_rate": 1.1032761007378708e-05, + "loss": 0.6149, + "step": 9278 + }, + { + "epoch": 1.45, + "grad_norm": 10.76329390046126, + "learning_rate": 1.1031083611360433e-05, + "loss": 0.5815, + "step": 9279 + }, + { + "epoch": 1.45, + "grad_norm": 21.4693686369783, + "learning_rate": 1.1029406186018745e-05, + "loss": 0.6036, + "step": 9280 + }, + { + "epoch": 1.45, + "grad_norm": 22.6934949150342, + "learning_rate": 1.102772873140135e-05, + "loss": 0.6247, + "step": 9281 + }, + { + "epoch": 1.45, + "grad_norm": 23.703464840066538, + "learning_rate": 1.1026051247555947e-05, + "loss": 0.5954, + "step": 9282 + }, + { + "epoch": 1.45, + "grad_norm": 18.936623447971115, + "learning_rate": 1.1024373734530247e-05, + "loss": 0.6855, + "step": 9283 + }, + { + "epoch": 1.45, + "grad_norm": 20.189429309421666, + "learning_rate": 1.102269619237196e-05, + "loss": 0.6611, + "step": 9284 + }, + { + "epoch": 1.45, + "grad_norm": 14.165220812394546, + "learning_rate": 1.102101862112879e-05, + "loss": 0.6039, + "step": 9285 + }, + { + "epoch": 1.45, + "grad_norm": 22.82262879304276, + "learning_rate": 1.1019341020848449e-05, + "loss": 0.6157, + "step": 9286 + }, + { + "epoch": 1.45, + "grad_norm": 20.60149013056682, + "learning_rate": 1.1017663391578644e-05, + "loss": 0.6286, + "step": 9287 + }, + { + "epoch": 1.45, + "grad_norm": 13.476861249404273, + "learning_rate": 1.1015985733367087e-05, + "loss": 0.6079, + "step": 9288 + }, + { + "epoch": 1.45, + "grad_norm": 31.370785933627776, + "learning_rate": 1.1014308046261491e-05, + "loss": 0.6477, + "step": 9289 + }, + { + "epoch": 1.45, + "grad_norm": 12.87850610610669, + "learning_rate": 1.1012630330309567e-05, + "loss": 0.5596, + "step": 9290 + }, + { + "epoch": 1.45, + "grad_norm": 20.24132793096575, + "learning_rate": 1.1010952585559029e-05, + "loss": 0.6938, + "step": 9291 + }, + { + "epoch": 1.45, + "grad_norm": 14.959268132691284, + "learning_rate": 1.1009274812057592e-05, + "loss": 0.5619, + "step": 9292 + }, + { + "epoch": 1.45, + "grad_norm": 21.89586029783041, + "learning_rate": 1.1007597009852968e-05, + "loss": 0.6731, + "step": 9293 + }, + { + "epoch": 1.45, + "grad_norm": 21.91133291751002, + "learning_rate": 1.1005919178992876e-05, + "loss": 0.524, + "step": 9294 + }, + { + "epoch": 1.45, + "grad_norm": 20.36805678174434, + "learning_rate": 1.1004241319525028e-05, + "loss": 0.6244, + "step": 9295 + }, + { + "epoch": 1.45, + "grad_norm": 18.889540809995655, + "learning_rate": 1.1002563431497151e-05, + "loss": 0.6113, + "step": 9296 + }, + { + "epoch": 1.45, + "grad_norm": 15.685101366562206, + "learning_rate": 1.1000885514956949e-05, + "loss": 0.618, + "step": 9297 + }, + { + "epoch": 1.45, + "grad_norm": 17.93623806920666, + "learning_rate": 1.0999207569952155e-05, + "loss": 0.7182, + "step": 9298 + }, + { + "epoch": 1.45, + "grad_norm": 20.81101575060987, + "learning_rate": 1.0997529596530477e-05, + "loss": 0.6416, + "step": 9299 + }, + { + "epoch": 1.45, + "grad_norm": 19.326877634694508, + "learning_rate": 1.0995851594739642e-05, + "loss": 0.6346, + "step": 9300 + }, + { + "epoch": 1.45, + "grad_norm": 6.503003877467766, + "learning_rate": 1.099417356462737e-05, + "loss": 0.603, + "step": 9301 + }, + { + "epoch": 1.45, + "grad_norm": 18.843762097267835, + "learning_rate": 1.0992495506241387e-05, + "loss": 0.645, + "step": 9302 + }, + { + "epoch": 1.45, + "grad_norm": 18.308978150167626, + "learning_rate": 1.0990817419629409e-05, + "loss": 0.6139, + "step": 9303 + }, + { + "epoch": 1.45, + "grad_norm": 18.311669976135995, + "learning_rate": 1.0989139304839165e-05, + "loss": 0.6507, + "step": 9304 + }, + { + "epoch": 1.45, + "grad_norm": 24.02842702456249, + "learning_rate": 1.0987461161918375e-05, + "loss": 0.6218, + "step": 9305 + }, + { + "epoch": 1.45, + "grad_norm": 20.82349988290152, + "learning_rate": 1.0985782990914771e-05, + "loss": 0.6529, + "step": 9306 + }, + { + "epoch": 1.45, + "grad_norm": 20.726658065638848, + "learning_rate": 1.098410479187607e-05, + "loss": 0.6127, + "step": 9307 + }, + { + "epoch": 1.45, + "grad_norm": 18.259433790246426, + "learning_rate": 1.0982426564850009e-05, + "loss": 0.5724, + "step": 9308 + }, + { + "epoch": 1.45, + "grad_norm": 19.976708911408448, + "learning_rate": 1.098074830988431e-05, + "loss": 0.5547, + "step": 9309 + }, + { + "epoch": 1.45, + "grad_norm": 16.170615903933054, + "learning_rate": 1.0979070027026703e-05, + "loss": 0.5575, + "step": 9310 + }, + { + "epoch": 1.45, + "grad_norm": 18.485875205866122, + "learning_rate": 1.0977391716324916e-05, + "loss": 0.5526, + "step": 9311 + }, + { + "epoch": 1.45, + "grad_norm": 16.027373340606335, + "learning_rate": 1.097571337782668e-05, + "loss": 0.5961, + "step": 9312 + }, + { + "epoch": 1.45, + "grad_norm": 36.743595569193275, + "learning_rate": 1.0974035011579731e-05, + "loss": 0.6316, + "step": 9313 + }, + { + "epoch": 1.45, + "grad_norm": 13.71870219454767, + "learning_rate": 1.097235661763179e-05, + "loss": 0.6095, + "step": 9314 + }, + { + "epoch": 1.46, + "grad_norm": 20.657334626731686, + "learning_rate": 1.09706781960306e-05, + "loss": 0.584, + "step": 9315 + }, + { + "epoch": 1.46, + "grad_norm": 18.001788818695566, + "learning_rate": 1.0968999746823888e-05, + "loss": 0.5891, + "step": 9316 + }, + { + "epoch": 1.46, + "grad_norm": 18.441643764078986, + "learning_rate": 1.0967321270059391e-05, + "loss": 0.6439, + "step": 9317 + }, + { + "epoch": 1.46, + "grad_norm": 16.053219437131116, + "learning_rate": 1.0965642765784841e-05, + "loss": 0.6044, + "step": 9318 + }, + { + "epoch": 1.46, + "grad_norm": 22.90568478291575, + "learning_rate": 1.0963964234047977e-05, + "loss": 0.6336, + "step": 9319 + }, + { + "epoch": 1.46, + "grad_norm": 79.01080314115897, + "learning_rate": 1.0962285674896534e-05, + "loss": 0.7097, + "step": 9320 + }, + { + "epoch": 1.46, + "grad_norm": 21.373823724694624, + "learning_rate": 1.096060708837825e-05, + "loss": 0.6142, + "step": 9321 + }, + { + "epoch": 1.46, + "grad_norm": 17.400886317917156, + "learning_rate": 1.0958928474540857e-05, + "loss": 0.6652, + "step": 9322 + }, + { + "epoch": 1.46, + "grad_norm": 21.61111164975765, + "learning_rate": 1.0957249833432108e-05, + "loss": 0.6009, + "step": 9323 + }, + { + "epoch": 1.46, + "grad_norm": 29.695094435206546, + "learning_rate": 1.0955571165099728e-05, + "loss": 0.6241, + "step": 9324 + }, + { + "epoch": 1.46, + "grad_norm": 13.494431626007302, + "learning_rate": 1.0953892469591464e-05, + "loss": 0.5911, + "step": 9325 + }, + { + "epoch": 1.46, + "grad_norm": 27.481600748807796, + "learning_rate": 1.0952213746955056e-05, + "loss": 0.6171, + "step": 9326 + }, + { + "epoch": 1.46, + "grad_norm": 25.74845349832007, + "learning_rate": 1.0950534997238246e-05, + "loss": 0.5717, + "step": 9327 + }, + { + "epoch": 1.46, + "grad_norm": 15.79694738805393, + "learning_rate": 1.0948856220488779e-05, + "loss": 0.652, + "step": 9328 + }, + { + "epoch": 1.46, + "grad_norm": 20.7749297189614, + "learning_rate": 1.0947177416754396e-05, + "loss": 0.6805, + "step": 9329 + }, + { + "epoch": 1.46, + "grad_norm": 15.015635277052905, + "learning_rate": 1.0945498586082838e-05, + "loss": 0.5753, + "step": 9330 + }, + { + "epoch": 1.46, + "grad_norm": 20.101593173226394, + "learning_rate": 1.0943819728521854e-05, + "loss": 0.5702, + "step": 9331 + }, + { + "epoch": 1.46, + "grad_norm": 18.889657821696332, + "learning_rate": 1.0942140844119194e-05, + "loss": 0.6042, + "step": 9332 + }, + { + "epoch": 1.46, + "grad_norm": 22.450217602308165, + "learning_rate": 1.0940461932922593e-05, + "loss": 0.5951, + "step": 9333 + }, + { + "epoch": 1.46, + "grad_norm": 16.44554068663033, + "learning_rate": 1.0938782994979808e-05, + "loss": 0.6493, + "step": 9334 + }, + { + "epoch": 1.46, + "grad_norm": 18.847142159759, + "learning_rate": 1.0937104030338584e-05, + "loss": 0.5713, + "step": 9335 + }, + { + "epoch": 1.46, + "grad_norm": 16.85219721732854, + "learning_rate": 1.0935425039046668e-05, + "loss": 0.5854, + "step": 9336 + }, + { + "epoch": 1.46, + "grad_norm": 16.14891029592234, + "learning_rate": 1.0933746021151813e-05, + "loss": 0.5597, + "step": 9337 + }, + { + "epoch": 1.46, + "grad_norm": 21.617218879894757, + "learning_rate": 1.0932066976701767e-05, + "loss": 0.5958, + "step": 9338 + }, + { + "epoch": 1.46, + "grad_norm": 17.19051853896591, + "learning_rate": 1.0930387905744278e-05, + "loss": 0.6437, + "step": 9339 + }, + { + "epoch": 1.46, + "grad_norm": 27.79981532287448, + "learning_rate": 1.0928708808327107e-05, + "loss": 0.7169, + "step": 9340 + }, + { + "epoch": 1.46, + "grad_norm": 11.750440615270596, + "learning_rate": 1.0927029684497995e-05, + "loss": 0.5916, + "step": 9341 + }, + { + "epoch": 1.46, + "grad_norm": 17.773517717452904, + "learning_rate": 1.0925350534304706e-05, + "loss": 0.596, + "step": 9342 + }, + { + "epoch": 1.46, + "grad_norm": 16.23432686737092, + "learning_rate": 1.0923671357794986e-05, + "loss": 0.6124, + "step": 9343 + }, + { + "epoch": 1.46, + "grad_norm": 17.46536881756696, + "learning_rate": 1.0921992155016594e-05, + "loss": 0.6399, + "step": 9344 + }, + { + "epoch": 1.46, + "grad_norm": 13.835094566774238, + "learning_rate": 1.0920312926017285e-05, + "loss": 0.5395, + "step": 9345 + }, + { + "epoch": 1.46, + "grad_norm": 18.85248532785278, + "learning_rate": 1.0918633670844813e-05, + "loss": 0.6304, + "step": 9346 + }, + { + "epoch": 1.46, + "grad_norm": 19.692073893187192, + "learning_rate": 1.0916954389546939e-05, + "loss": 0.5905, + "step": 9347 + }, + { + "epoch": 1.46, + "grad_norm": 13.087433340637027, + "learning_rate": 1.0915275082171419e-05, + "loss": 0.5698, + "step": 9348 + }, + { + "epoch": 1.46, + "grad_norm": 12.552102892874942, + "learning_rate": 1.0913595748766009e-05, + "loss": 0.5512, + "step": 9349 + }, + { + "epoch": 1.46, + "grad_norm": 24.961806478880785, + "learning_rate": 1.0911916389378473e-05, + "loss": 0.5684, + "step": 9350 + }, + { + "epoch": 1.46, + "grad_norm": 25.915952094357614, + "learning_rate": 1.0910237004056563e-05, + "loss": 0.6457, + "step": 9351 + }, + { + "epoch": 1.46, + "grad_norm": 17.217591032081387, + "learning_rate": 1.0908557592848048e-05, + "loss": 0.6163, + "step": 9352 + }, + { + "epoch": 1.46, + "grad_norm": 23.803704479040587, + "learning_rate": 1.0906878155800686e-05, + "loss": 0.7503, + "step": 9353 + }, + { + "epoch": 1.46, + "grad_norm": 17.070667471784837, + "learning_rate": 1.0905198692962241e-05, + "loss": 0.6108, + "step": 9354 + }, + { + "epoch": 1.46, + "grad_norm": 14.253869194386121, + "learning_rate": 1.0903519204380475e-05, + "loss": 0.605, + "step": 9355 + }, + { + "epoch": 1.46, + "grad_norm": 18.43850178502428, + "learning_rate": 1.090183969010315e-05, + "loss": 0.5053, + "step": 9356 + }, + { + "epoch": 1.46, + "grad_norm": 21.94831951734522, + "learning_rate": 1.0900160150178034e-05, + "loss": 0.6282, + "step": 9357 + }, + { + "epoch": 1.46, + "grad_norm": 20.79610114732136, + "learning_rate": 1.0898480584652887e-05, + "loss": 0.6296, + "step": 9358 + }, + { + "epoch": 1.46, + "grad_norm": 18.24718245874271, + "learning_rate": 1.0896800993575482e-05, + "loss": 0.6255, + "step": 9359 + }, + { + "epoch": 1.46, + "grad_norm": 15.759223427063468, + "learning_rate": 1.0895121376993576e-05, + "loss": 0.5951, + "step": 9360 + }, + { + "epoch": 1.46, + "grad_norm": 27.871080497733203, + "learning_rate": 1.0893441734954944e-05, + "loss": 0.5584, + "step": 9361 + }, + { + "epoch": 1.46, + "grad_norm": 20.92422331578606, + "learning_rate": 1.0891762067507353e-05, + "loss": 0.6447, + "step": 9362 + }, + { + "epoch": 1.46, + "grad_norm": 21.245075625056977, + "learning_rate": 1.089008237469857e-05, + "loss": 0.5667, + "step": 9363 + }, + { + "epoch": 1.46, + "grad_norm": 18.180661107784466, + "learning_rate": 1.0888402656576364e-05, + "loss": 0.5551, + "step": 9364 + }, + { + "epoch": 1.46, + "grad_norm": 15.099482853924005, + "learning_rate": 1.0886722913188508e-05, + "loss": 0.6097, + "step": 9365 + }, + { + "epoch": 1.46, + "grad_norm": 18.237800511740094, + "learning_rate": 1.0885043144582765e-05, + "loss": 0.578, + "step": 9366 + }, + { + "epoch": 1.46, + "grad_norm": 24.35403595069551, + "learning_rate": 1.0883363350806922e-05, + "loss": 0.5912, + "step": 9367 + }, + { + "epoch": 1.46, + "grad_norm": 13.135482664141634, + "learning_rate": 1.0881683531908733e-05, + "loss": 0.6131, + "step": 9368 + }, + { + "epoch": 1.46, + "grad_norm": 21.470869481661918, + "learning_rate": 1.0880003687935988e-05, + "loss": 0.6347, + "step": 9369 + }, + { + "epoch": 1.46, + "grad_norm": 16.97363563802261, + "learning_rate": 1.0878323818936448e-05, + "loss": 0.5712, + "step": 9370 + }, + { + "epoch": 1.46, + "grad_norm": 20.5706278316438, + "learning_rate": 1.0876643924957892e-05, + "loss": 0.6928, + "step": 9371 + }, + { + "epoch": 1.46, + "grad_norm": 15.891631202386465, + "learning_rate": 1.0874964006048098e-05, + "loss": 0.6189, + "step": 9372 + }, + { + "epoch": 1.46, + "grad_norm": 17.001800649486196, + "learning_rate": 1.087328406225484e-05, + "loss": 0.5915, + "step": 9373 + }, + { + "epoch": 1.46, + "grad_norm": 24.739046890599845, + "learning_rate": 1.0871604093625894e-05, + "loss": 0.5962, + "step": 9374 + }, + { + "epoch": 1.46, + "grad_norm": 13.372756715849441, + "learning_rate": 1.0869924100209032e-05, + "loss": 0.5949, + "step": 9375 + }, + { + "epoch": 1.46, + "grad_norm": 15.676144707632373, + "learning_rate": 1.0868244082052047e-05, + "loss": 0.6773, + "step": 9376 + }, + { + "epoch": 1.46, + "grad_norm": 17.00720326423703, + "learning_rate": 1.0866564039202701e-05, + "loss": 0.7093, + "step": 9377 + }, + { + "epoch": 1.46, + "grad_norm": 24.900872300899252, + "learning_rate": 1.0864883971708782e-05, + "loss": 0.5787, + "step": 9378 + }, + { + "epoch": 1.47, + "grad_norm": 16.036677754152258, + "learning_rate": 1.0863203879618069e-05, + "loss": 0.5634, + "step": 9379 + }, + { + "epoch": 1.47, + "grad_norm": 18.653135864055255, + "learning_rate": 1.0861523762978343e-05, + "loss": 0.6148, + "step": 9380 + }, + { + "epoch": 1.47, + "grad_norm": 17.19453208812757, + "learning_rate": 1.0859843621837386e-05, + "loss": 0.6133, + "step": 9381 + }, + { + "epoch": 1.47, + "grad_norm": 15.224947890261403, + "learning_rate": 1.085816345624298e-05, + "loss": 0.5717, + "step": 9382 + }, + { + "epoch": 1.47, + "grad_norm": 15.338802059912311, + "learning_rate": 1.0856483266242903e-05, + "loss": 0.4999, + "step": 9383 + }, + { + "epoch": 1.47, + "grad_norm": 15.246108394990104, + "learning_rate": 1.085480305188495e-05, + "loss": 0.5871, + "step": 9384 + }, + { + "epoch": 1.47, + "grad_norm": 15.480610727703029, + "learning_rate": 1.0853122813216891e-05, + "loss": 0.5697, + "step": 9385 + }, + { + "epoch": 1.47, + "grad_norm": 17.552867714016873, + "learning_rate": 1.0851442550286524e-05, + "loss": 0.5627, + "step": 9386 + }, + { + "epoch": 1.47, + "grad_norm": 18.694715413826017, + "learning_rate": 1.0849762263141625e-05, + "loss": 0.6263, + "step": 9387 + }, + { + "epoch": 1.47, + "grad_norm": 17.15524028019837, + "learning_rate": 1.0848081951829985e-05, + "loss": 0.5954, + "step": 9388 + }, + { + "epoch": 1.47, + "grad_norm": 19.591500600347356, + "learning_rate": 1.0846401616399392e-05, + "loss": 0.6272, + "step": 9389 + }, + { + "epoch": 1.47, + "grad_norm": 24.016059650664904, + "learning_rate": 1.0844721256897633e-05, + "loss": 0.6562, + "step": 9390 + }, + { + "epoch": 1.47, + "grad_norm": 19.775444815102258, + "learning_rate": 1.0843040873372493e-05, + "loss": 0.6982, + "step": 9391 + }, + { + "epoch": 1.47, + "grad_norm": 16.60872089662641, + "learning_rate": 1.0841360465871765e-05, + "loss": 0.5756, + "step": 9392 + }, + { + "epoch": 1.47, + "grad_norm": 20.712522712256504, + "learning_rate": 1.0839680034443234e-05, + "loss": 0.5979, + "step": 9393 + }, + { + "epoch": 1.47, + "grad_norm": 21.86911710516593, + "learning_rate": 1.0837999579134699e-05, + "loss": 0.6265, + "step": 9394 + }, + { + "epoch": 1.47, + "grad_norm": 18.528542272546858, + "learning_rate": 1.0836319099993944e-05, + "loss": 0.6186, + "step": 9395 + }, + { + "epoch": 1.47, + "grad_norm": 20.391496309029126, + "learning_rate": 1.0834638597068763e-05, + "loss": 0.5755, + "step": 9396 + }, + { + "epoch": 1.47, + "grad_norm": 11.280628048508916, + "learning_rate": 1.0832958070406949e-05, + "loss": 0.548, + "step": 9397 + }, + { + "epoch": 1.47, + "grad_norm": 35.738299791033334, + "learning_rate": 1.0831277520056296e-05, + "loss": 0.6854, + "step": 9398 + }, + { + "epoch": 1.47, + "grad_norm": 16.076698314995113, + "learning_rate": 1.0829596946064595e-05, + "loss": 0.6017, + "step": 9399 + }, + { + "epoch": 1.47, + "grad_norm": 15.164490710427323, + "learning_rate": 1.0827916348479641e-05, + "loss": 0.627, + "step": 9400 + }, + { + "epoch": 1.47, + "grad_norm": 14.47180817728054, + "learning_rate": 1.0826235727349235e-05, + "loss": 0.6082, + "step": 9401 + }, + { + "epoch": 1.47, + "grad_norm": 17.532146106264022, + "learning_rate": 1.0824555082721161e-05, + "loss": 0.622, + "step": 9402 + }, + { + "epoch": 1.47, + "grad_norm": 21.87756875146567, + "learning_rate": 1.0822874414643229e-05, + "loss": 0.6658, + "step": 9403 + }, + { + "epoch": 1.47, + "grad_norm": 21.87691566185493, + "learning_rate": 1.0821193723163228e-05, + "loss": 0.6399, + "step": 9404 + }, + { + "epoch": 1.47, + "grad_norm": 317.9635921230517, + "learning_rate": 1.0819513008328957e-05, + "loss": 0.592, + "step": 9405 + }, + { + "epoch": 1.47, + "grad_norm": 18.982118411228477, + "learning_rate": 1.0817832270188217e-05, + "loss": 0.5933, + "step": 9406 + }, + { + "epoch": 1.47, + "grad_norm": 45.05185597761114, + "learning_rate": 1.0816151508788804e-05, + "loss": 0.6157, + "step": 9407 + }, + { + "epoch": 1.47, + "grad_norm": 19.376543944123615, + "learning_rate": 1.081447072417852e-05, + "loss": 0.5731, + "step": 9408 + }, + { + "epoch": 1.47, + "grad_norm": 22.80013261806668, + "learning_rate": 1.0812789916405167e-05, + "loss": 0.6376, + "step": 9409 + }, + { + "epoch": 1.47, + "grad_norm": 24.585975999077736, + "learning_rate": 1.081110908551654e-05, + "loss": 0.5928, + "step": 9410 + }, + { + "epoch": 1.47, + "grad_norm": 13.844602445573592, + "learning_rate": 1.0809428231560451e-05, + "loss": 0.6208, + "step": 9411 + }, + { + "epoch": 1.47, + "grad_norm": 15.132977277481936, + "learning_rate": 1.0807747354584692e-05, + "loss": 0.5376, + "step": 9412 + }, + { + "epoch": 1.47, + "grad_norm": 14.942128926670657, + "learning_rate": 1.0806066454637074e-05, + "loss": 0.6012, + "step": 9413 + }, + { + "epoch": 1.47, + "grad_norm": 22.963368986364024, + "learning_rate": 1.0804385531765394e-05, + "loss": 0.6952, + "step": 9414 + }, + { + "epoch": 1.47, + "grad_norm": 17.20483350455384, + "learning_rate": 1.0802704586017463e-05, + "loss": 0.5912, + "step": 9415 + }, + { + "epoch": 1.47, + "grad_norm": 16.641716544747602, + "learning_rate": 1.0801023617441082e-05, + "loss": 0.5807, + "step": 9416 + }, + { + "epoch": 1.47, + "grad_norm": 16.490538248553587, + "learning_rate": 1.0799342626084057e-05, + "loss": 0.632, + "step": 9417 + }, + { + "epoch": 1.47, + "grad_norm": 16.217425944648927, + "learning_rate": 1.0797661611994196e-05, + "loss": 0.5509, + "step": 9418 + }, + { + "epoch": 1.47, + "grad_norm": 18.932601520218793, + "learning_rate": 1.0795980575219305e-05, + "loss": 0.6256, + "step": 9419 + }, + { + "epoch": 1.47, + "grad_norm": 22.35474050889783, + "learning_rate": 1.0794299515807191e-05, + "loss": 0.6549, + "step": 9420 + }, + { + "epoch": 1.47, + "grad_norm": 17.936730895849756, + "learning_rate": 1.0792618433805666e-05, + "loss": 0.6483, + "step": 9421 + }, + { + "epoch": 1.47, + "grad_norm": 21.44849353224946, + "learning_rate": 1.0790937329262537e-05, + "loss": 0.6675, + "step": 9422 + }, + { + "epoch": 1.47, + "grad_norm": 17.453121421799935, + "learning_rate": 1.0789256202225611e-05, + "loss": 0.6725, + "step": 9423 + }, + { + "epoch": 1.47, + "grad_norm": 19.045690951106884, + "learning_rate": 1.07875750527427e-05, + "loss": 0.6397, + "step": 9424 + }, + { + "epoch": 1.47, + "grad_norm": 25.129528171380745, + "learning_rate": 1.0785893880861616e-05, + "loss": 0.6001, + "step": 9425 + }, + { + "epoch": 1.47, + "grad_norm": 18.170946033509956, + "learning_rate": 1.0784212686630171e-05, + "loss": 0.5264, + "step": 9426 + }, + { + "epoch": 1.47, + "grad_norm": 15.591933308486732, + "learning_rate": 1.0782531470096171e-05, + "loss": 0.6915, + "step": 9427 + }, + { + "epoch": 1.47, + "grad_norm": 21.98558754210368, + "learning_rate": 1.078085023130744e-05, + "loss": 0.6217, + "step": 9428 + }, + { + "epoch": 1.47, + "grad_norm": 15.663204068755721, + "learning_rate": 1.0779168970311782e-05, + "loss": 0.5677, + "step": 9429 + }, + { + "epoch": 1.47, + "grad_norm": 19.071986070294194, + "learning_rate": 1.0777487687157018e-05, + "loss": 0.5633, + "step": 9430 + }, + { + "epoch": 1.47, + "grad_norm": 13.05711221520912, + "learning_rate": 1.0775806381890951e-05, + "loss": 0.6033, + "step": 9431 + }, + { + "epoch": 1.47, + "grad_norm": 13.609692545754523, + "learning_rate": 1.0774125054561411e-05, + "loss": 0.6702, + "step": 9432 + }, + { + "epoch": 1.47, + "grad_norm": 17.828614111617625, + "learning_rate": 1.0772443705216206e-05, + "loss": 0.6436, + "step": 9433 + }, + { + "epoch": 1.47, + "grad_norm": 21.221197034597598, + "learning_rate": 1.0770762333903154e-05, + "loss": 0.5892, + "step": 9434 + }, + { + "epoch": 1.47, + "grad_norm": 14.655164095824736, + "learning_rate": 1.0769080940670072e-05, + "loss": 0.6246, + "step": 9435 + }, + { + "epoch": 1.47, + "grad_norm": 19.871111394935483, + "learning_rate": 1.0767399525564777e-05, + "loss": 0.5908, + "step": 9436 + }, + { + "epoch": 1.47, + "grad_norm": 19.680634806307495, + "learning_rate": 1.0765718088635086e-05, + "loss": 0.6012, + "step": 9437 + }, + { + "epoch": 1.47, + "grad_norm": 13.00025038638523, + "learning_rate": 1.0764036629928827e-05, + "loss": 0.57, + "step": 9438 + }, + { + "epoch": 1.47, + "grad_norm": 11.24499639660738, + "learning_rate": 1.0762355149493808e-05, + "loss": 0.5749, + "step": 9439 + }, + { + "epoch": 1.47, + "grad_norm": 25.77828202936811, + "learning_rate": 1.0760673647377855e-05, + "loss": 0.6088, + "step": 9440 + }, + { + "epoch": 1.47, + "grad_norm": 23.354121597287953, + "learning_rate": 1.0758992123628792e-05, + "loss": 0.7406, + "step": 9441 + }, + { + "epoch": 1.47, + "grad_norm": 41.03289745928189, + "learning_rate": 1.0757310578294433e-05, + "loss": 0.7082, + "step": 9442 + }, + { + "epoch": 1.48, + "grad_norm": 19.45371777845439, + "learning_rate": 1.0755629011422605e-05, + "loss": 0.5759, + "step": 9443 + }, + { + "epoch": 1.48, + "grad_norm": 19.571716547566247, + "learning_rate": 1.0753947423061131e-05, + "loss": 0.6467, + "step": 9444 + }, + { + "epoch": 1.48, + "grad_norm": 23.299547865296514, + "learning_rate": 1.0752265813257833e-05, + "loss": 0.6574, + "step": 9445 + }, + { + "epoch": 1.48, + "grad_norm": 16.86579688039223, + "learning_rate": 1.0750584182060535e-05, + "loss": 0.6207, + "step": 9446 + }, + { + "epoch": 1.48, + "grad_norm": 19.80949575604331, + "learning_rate": 1.0748902529517065e-05, + "loss": 0.5757, + "step": 9447 + }, + { + "epoch": 1.48, + "grad_norm": 21.01128324591708, + "learning_rate": 1.074722085567524e-05, + "loss": 0.5821, + "step": 9448 + }, + { + "epoch": 1.48, + "grad_norm": 17.561519825384572, + "learning_rate": 1.0745539160582894e-05, + "loss": 0.5665, + "step": 9449 + }, + { + "epoch": 1.48, + "grad_norm": 16.751996929248858, + "learning_rate": 1.074385744428785e-05, + "loss": 0.5297, + "step": 9450 + }, + { + "epoch": 1.48, + "grad_norm": 16.69825900169725, + "learning_rate": 1.0742175706837936e-05, + "loss": 0.6072, + "step": 9451 + }, + { + "epoch": 1.48, + "grad_norm": 19.325386852186696, + "learning_rate": 1.0740493948280976e-05, + "loss": 0.6216, + "step": 9452 + }, + { + "epoch": 1.48, + "grad_norm": 30.758691158442254, + "learning_rate": 1.0738812168664804e-05, + "loss": 0.7611, + "step": 9453 + }, + { + "epoch": 1.48, + "grad_norm": 16.43667945880691, + "learning_rate": 1.0737130368037245e-05, + "loss": 0.5615, + "step": 9454 + }, + { + "epoch": 1.48, + "grad_norm": 13.917657728250465, + "learning_rate": 1.0735448546446132e-05, + "loss": 0.5749, + "step": 9455 + }, + { + "epoch": 1.48, + "grad_norm": 19.572068139474975, + "learning_rate": 1.073376670393929e-05, + "loss": 0.6511, + "step": 9456 + }, + { + "epoch": 1.48, + "grad_norm": 21.302822306266066, + "learning_rate": 1.0732084840564556e-05, + "loss": 0.6853, + "step": 9457 + }, + { + "epoch": 1.48, + "grad_norm": 18.795132239927618, + "learning_rate": 1.0730402956369753e-05, + "loss": 0.5679, + "step": 9458 + }, + { + "epoch": 1.48, + "grad_norm": 14.018679810606356, + "learning_rate": 1.0728721051402718e-05, + "loss": 0.568, + "step": 9459 + }, + { + "epoch": 1.48, + "grad_norm": 17.406772044107694, + "learning_rate": 1.0727039125711283e-05, + "loss": 0.5909, + "step": 9460 + }, + { + "epoch": 1.48, + "grad_norm": 25.010215222578704, + "learning_rate": 1.0725357179343282e-05, + "loss": 0.5392, + "step": 9461 + }, + { + "epoch": 1.48, + "grad_norm": 15.532561240117714, + "learning_rate": 1.0723675212346546e-05, + "loss": 0.6206, + "step": 9462 + }, + { + "epoch": 1.48, + "grad_norm": 13.360520181037266, + "learning_rate": 1.0721993224768908e-05, + "loss": 0.5557, + "step": 9463 + }, + { + "epoch": 1.48, + "grad_norm": 22.81751436097232, + "learning_rate": 1.0720311216658205e-05, + "loss": 0.6083, + "step": 9464 + }, + { + "epoch": 1.48, + "grad_norm": 17.101448116207504, + "learning_rate": 1.0718629188062275e-05, + "loss": 0.5691, + "step": 9465 + }, + { + "epoch": 1.48, + "grad_norm": 18.293912554498625, + "learning_rate": 1.0716947139028953e-05, + "loss": 0.563, + "step": 9466 + }, + { + "epoch": 1.48, + "grad_norm": 22.40208876114644, + "learning_rate": 1.071526506960607e-05, + "loss": 0.5423, + "step": 9467 + }, + { + "epoch": 1.48, + "grad_norm": 14.219565201540604, + "learning_rate": 1.071358297984147e-05, + "loss": 0.6123, + "step": 9468 + }, + { + "epoch": 1.48, + "grad_norm": 22.08994336835152, + "learning_rate": 1.0711900869782983e-05, + "loss": 0.6473, + "step": 9469 + }, + { + "epoch": 1.48, + "grad_norm": 11.957967786928343, + "learning_rate": 1.0710218739478457e-05, + "loss": 0.5852, + "step": 9470 + }, + { + "epoch": 1.48, + "grad_norm": 20.091591723560867, + "learning_rate": 1.0708536588975721e-05, + "loss": 0.5488, + "step": 9471 + }, + { + "epoch": 1.48, + "grad_norm": 17.706371141067113, + "learning_rate": 1.0706854418322625e-05, + "loss": 0.57, + "step": 9472 + }, + { + "epoch": 1.48, + "grad_norm": 10.67907114768848, + "learning_rate": 1.0705172227566996e-05, + "loss": 0.5924, + "step": 9473 + }, + { + "epoch": 1.48, + "grad_norm": 25.80960656458376, + "learning_rate": 1.0703490016756688e-05, + "loss": 0.6129, + "step": 9474 + }, + { + "epoch": 1.48, + "grad_norm": 27.17041024948368, + "learning_rate": 1.070180778593953e-05, + "loss": 0.7127, + "step": 9475 + }, + { + "epoch": 1.48, + "grad_norm": 21.205340816419813, + "learning_rate": 1.0700125535163371e-05, + "loss": 0.65, + "step": 9476 + }, + { + "epoch": 1.48, + "grad_norm": 15.982293288930224, + "learning_rate": 1.0698443264476051e-05, + "loss": 0.5533, + "step": 9477 + }, + { + "epoch": 1.48, + "grad_norm": 18.031886832876715, + "learning_rate": 1.0696760973925413e-05, + "loss": 0.5697, + "step": 9478 + }, + { + "epoch": 1.48, + "grad_norm": 18.236372267085525, + "learning_rate": 1.0695078663559302e-05, + "loss": 0.6043, + "step": 9479 + }, + { + "epoch": 1.48, + "grad_norm": 18.486247462818596, + "learning_rate": 1.0693396333425559e-05, + "loss": 0.6826, + "step": 9480 + }, + { + "epoch": 1.48, + "grad_norm": 20.5916181887827, + "learning_rate": 1.0691713983572028e-05, + "loss": 0.5957, + "step": 9481 + }, + { + "epoch": 1.48, + "grad_norm": 19.68124914541815, + "learning_rate": 1.069003161404656e-05, + "loss": 0.6007, + "step": 9482 + }, + { + "epoch": 1.48, + "grad_norm": 17.45992795001345, + "learning_rate": 1.0688349224896991e-05, + "loss": 0.6118, + "step": 9483 + }, + { + "epoch": 1.48, + "grad_norm": 18.61345398813733, + "learning_rate": 1.0686666816171179e-05, + "loss": 0.6545, + "step": 9484 + }, + { + "epoch": 1.48, + "grad_norm": 16.089503070496587, + "learning_rate": 1.0684984387916961e-05, + "loss": 0.5167, + "step": 9485 + }, + { + "epoch": 1.48, + "grad_norm": 24.119442864267423, + "learning_rate": 1.068330194018219e-05, + "loss": 0.6023, + "step": 9486 + }, + { + "epoch": 1.48, + "grad_norm": 18.792477187993107, + "learning_rate": 1.068161947301471e-05, + "loss": 0.6376, + "step": 9487 + }, + { + "epoch": 1.48, + "grad_norm": 30.869152872577324, + "learning_rate": 1.067993698646237e-05, + "loss": 0.6115, + "step": 9488 + }, + { + "epoch": 1.48, + "grad_norm": 15.931252073446462, + "learning_rate": 1.0678254480573023e-05, + "loss": 0.6299, + "step": 9489 + }, + { + "epoch": 1.48, + "grad_norm": 23.67480474037045, + "learning_rate": 1.067657195539451e-05, + "loss": 0.5673, + "step": 9490 + }, + { + "epoch": 1.48, + "grad_norm": 18.791128503380907, + "learning_rate": 1.0674889410974689e-05, + "loss": 0.6741, + "step": 9491 + }, + { + "epoch": 1.48, + "grad_norm": 15.574516935055764, + "learning_rate": 1.0673206847361407e-05, + "loss": 0.5793, + "step": 9492 + }, + { + "epoch": 1.48, + "grad_norm": 14.44956903891582, + "learning_rate": 1.067152426460252e-05, + "loss": 0.6147, + "step": 9493 + }, + { + "epoch": 1.48, + "grad_norm": 25.631647409052487, + "learning_rate": 1.0669841662745874e-05, + "loss": 0.5661, + "step": 9494 + }, + { + "epoch": 1.48, + "grad_norm": 18.798459480931673, + "learning_rate": 1.0668159041839321e-05, + "loss": 0.5407, + "step": 9495 + }, + { + "epoch": 1.48, + "grad_norm": 21.97414835428551, + "learning_rate": 1.066647640193072e-05, + "loss": 0.6451, + "step": 9496 + }, + { + "epoch": 1.48, + "grad_norm": 28.361269138596125, + "learning_rate": 1.0664793743067916e-05, + "loss": 0.6634, + "step": 9497 + }, + { + "epoch": 1.48, + "grad_norm": 12.077724281735431, + "learning_rate": 1.0663111065298766e-05, + "loss": 0.5143, + "step": 9498 + }, + { + "epoch": 1.48, + "grad_norm": 15.987191370713397, + "learning_rate": 1.066142836867113e-05, + "loss": 0.6361, + "step": 9499 + }, + { + "epoch": 1.48, + "grad_norm": 14.609474364876062, + "learning_rate": 1.0659745653232851e-05, + "loss": 0.546, + "step": 9500 + }, + { + "epoch": 1.48, + "grad_norm": 17.46218057470345, + "learning_rate": 1.06580629190318e-05, + "loss": 0.5314, + "step": 9501 + }, + { + "epoch": 1.48, + "grad_norm": 15.539981436084707, + "learning_rate": 1.0656380166115818e-05, + "loss": 0.5221, + "step": 9502 + }, + { + "epoch": 1.48, + "grad_norm": 19.82093256580783, + "learning_rate": 1.065469739453277e-05, + "loss": 0.5585, + "step": 9503 + }, + { + "epoch": 1.48, + "grad_norm": 24.485869001173825, + "learning_rate": 1.0653014604330511e-05, + "loss": 0.6606, + "step": 9504 + }, + { + "epoch": 1.48, + "grad_norm": 21.91504538604698, + "learning_rate": 1.06513317955569e-05, + "loss": 0.6476, + "step": 9505 + }, + { + "epoch": 1.48, + "grad_norm": 23.354664022589944, + "learning_rate": 1.0649648968259793e-05, + "loss": 0.6103, + "step": 9506 + }, + { + "epoch": 1.49, + "grad_norm": 19.770386579560626, + "learning_rate": 1.0647966122487048e-05, + "loss": 0.5838, + "step": 9507 + }, + { + "epoch": 1.49, + "grad_norm": 20.284554726570335, + "learning_rate": 1.0646283258286524e-05, + "loss": 0.6077, + "step": 9508 + }, + { + "epoch": 1.49, + "grad_norm": 25.092795261804845, + "learning_rate": 1.0644600375706087e-05, + "loss": 0.5511, + "step": 9509 + }, + { + "epoch": 1.49, + "grad_norm": 17.014322802909845, + "learning_rate": 1.0642917474793591e-05, + "loss": 0.6354, + "step": 9510 + }, + { + "epoch": 1.49, + "grad_norm": 12.469612704215882, + "learning_rate": 1.0641234555596898e-05, + "loss": 0.5781, + "step": 9511 + }, + { + "epoch": 1.49, + "grad_norm": 17.41696005460803, + "learning_rate": 1.0639551618163869e-05, + "loss": 0.5833, + "step": 9512 + }, + { + "epoch": 1.49, + "grad_norm": 14.243668227611723, + "learning_rate": 1.0637868662542364e-05, + "loss": 0.5534, + "step": 9513 + }, + { + "epoch": 1.49, + "grad_norm": 26.532094638430074, + "learning_rate": 1.0636185688780253e-05, + "loss": 0.5653, + "step": 9514 + }, + { + "epoch": 1.49, + "grad_norm": 13.319982775189816, + "learning_rate": 1.0634502696925387e-05, + "loss": 0.5705, + "step": 9515 + }, + { + "epoch": 1.49, + "grad_norm": 22.95894349296172, + "learning_rate": 1.063281968702564e-05, + "loss": 0.6782, + "step": 9516 + }, + { + "epoch": 1.49, + "grad_norm": 15.063393052919158, + "learning_rate": 1.0631136659128867e-05, + "loss": 0.5694, + "step": 9517 + }, + { + "epoch": 1.49, + "grad_norm": 18.209102662146336, + "learning_rate": 1.062945361328294e-05, + "loss": 0.6827, + "step": 9518 + }, + { + "epoch": 1.49, + "grad_norm": 18.475584376052943, + "learning_rate": 1.0627770549535716e-05, + "loss": 0.5483, + "step": 9519 + }, + { + "epoch": 1.49, + "grad_norm": 23.914881369756916, + "learning_rate": 1.0626087467935069e-05, + "loss": 0.6358, + "step": 9520 + }, + { + "epoch": 1.49, + "grad_norm": 13.735009675664795, + "learning_rate": 1.062440436852886e-05, + "loss": 0.5627, + "step": 9521 + }, + { + "epoch": 1.49, + "grad_norm": 15.783430771235727, + "learning_rate": 1.0622721251364955e-05, + "loss": 0.6351, + "step": 9522 + }, + { + "epoch": 1.49, + "grad_norm": 13.958779100398177, + "learning_rate": 1.0621038116491224e-05, + "loss": 0.5411, + "step": 9523 + }, + { + "epoch": 1.49, + "grad_norm": 17.768034254116973, + "learning_rate": 1.061935496395553e-05, + "loss": 0.5827, + "step": 9524 + }, + { + "epoch": 1.49, + "grad_norm": 14.388536022047735, + "learning_rate": 1.0617671793805743e-05, + "loss": 0.6838, + "step": 9525 + }, + { + "epoch": 1.49, + "grad_norm": 17.26562426036298, + "learning_rate": 1.0615988606089733e-05, + "loss": 0.5505, + "step": 9526 + }, + { + "epoch": 1.49, + "grad_norm": 22.565596474686416, + "learning_rate": 1.0614305400855367e-05, + "loss": 0.7014, + "step": 9527 + }, + { + "epoch": 1.49, + "grad_norm": 20.974896961204593, + "learning_rate": 1.0612622178150515e-05, + "loss": 0.6463, + "step": 9528 + }, + { + "epoch": 1.49, + "grad_norm": 18.238998235587502, + "learning_rate": 1.0610938938023047e-05, + "loss": 0.6253, + "step": 9529 + }, + { + "epoch": 1.49, + "grad_norm": 14.923957649410756, + "learning_rate": 1.060925568052083e-05, + "loss": 0.6337, + "step": 9530 + }, + { + "epoch": 1.49, + "grad_norm": 21.41670397875525, + "learning_rate": 1.0607572405691741e-05, + "loss": 0.6089, + "step": 9531 + }, + { + "epoch": 1.49, + "grad_norm": 11.801162031710726, + "learning_rate": 1.0605889113583647e-05, + "loss": 0.5919, + "step": 9532 + }, + { + "epoch": 1.49, + "grad_norm": 19.469134183065833, + "learning_rate": 1.0604205804244424e-05, + "loss": 0.5946, + "step": 9533 + }, + { + "epoch": 1.49, + "grad_norm": 15.567008141113567, + "learning_rate": 1.0602522477721938e-05, + "loss": 0.6264, + "step": 9534 + }, + { + "epoch": 1.49, + "grad_norm": 22.87978994529843, + "learning_rate": 1.0600839134064065e-05, + "loss": 0.7207, + "step": 9535 + }, + { + "epoch": 1.49, + "grad_norm": 19.125638520654952, + "learning_rate": 1.0599155773318679e-05, + "loss": 0.5884, + "step": 9536 + }, + { + "epoch": 1.49, + "grad_norm": 26.762880063197887, + "learning_rate": 1.0597472395533654e-05, + "loss": 0.6003, + "step": 9537 + }, + { + "epoch": 1.49, + "grad_norm": 17.77436759191914, + "learning_rate": 1.0595789000756864e-05, + "loss": 0.6117, + "step": 9538 + }, + { + "epoch": 1.49, + "grad_norm": 14.16403719104961, + "learning_rate": 1.0594105589036182e-05, + "loss": 0.4892, + "step": 9539 + }, + { + "epoch": 1.49, + "grad_norm": 17.848443429109874, + "learning_rate": 1.0592422160419484e-05, + "loss": 0.6423, + "step": 9540 + }, + { + "epoch": 1.49, + "grad_norm": 24.226938673467696, + "learning_rate": 1.0590738714954652e-05, + "loss": 0.6831, + "step": 9541 + }, + { + "epoch": 1.49, + "grad_norm": 18.35935245390786, + "learning_rate": 1.0589055252689549e-05, + "loss": 0.5699, + "step": 9542 + }, + { + "epoch": 1.49, + "grad_norm": 16.08264945118136, + "learning_rate": 1.0587371773672064e-05, + "loss": 0.6028, + "step": 9543 + }, + { + "epoch": 1.49, + "grad_norm": 29.98770900765187, + "learning_rate": 1.0585688277950065e-05, + "loss": 0.7055, + "step": 9544 + }, + { + "epoch": 1.49, + "grad_norm": 15.587539367293392, + "learning_rate": 1.058400476557144e-05, + "loss": 0.6007, + "step": 9545 + }, + { + "epoch": 1.49, + "grad_norm": 15.978174737292955, + "learning_rate": 1.0582321236584057e-05, + "loss": 0.6042, + "step": 9546 + }, + { + "epoch": 1.49, + "grad_norm": 27.281801388598783, + "learning_rate": 1.0580637691035799e-05, + "loss": 0.6295, + "step": 9547 + }, + { + "epoch": 1.49, + "grad_norm": 20.556255756318215, + "learning_rate": 1.0578954128974546e-05, + "loss": 0.6692, + "step": 9548 + }, + { + "epoch": 1.49, + "grad_norm": 16.6087337362928, + "learning_rate": 1.0577270550448175e-05, + "loss": 0.615, + "step": 9549 + }, + { + "epoch": 1.49, + "grad_norm": 21.79501263445483, + "learning_rate": 1.0575586955504568e-05, + "loss": 0.6839, + "step": 9550 + }, + { + "epoch": 1.49, + "grad_norm": 18.24112870045532, + "learning_rate": 1.0573903344191603e-05, + "loss": 0.6651, + "step": 9551 + }, + { + "epoch": 1.49, + "grad_norm": 18.81415157755609, + "learning_rate": 1.0572219716557163e-05, + "loss": 0.6202, + "step": 9552 + }, + { + "epoch": 1.49, + "grad_norm": 17.39179908597005, + "learning_rate": 1.0570536072649132e-05, + "loss": 0.5771, + "step": 9553 + }, + { + "epoch": 1.49, + "grad_norm": 15.791777579774434, + "learning_rate": 1.0568852412515388e-05, + "loss": 0.5681, + "step": 9554 + }, + { + "epoch": 1.49, + "grad_norm": 17.152439581947128, + "learning_rate": 1.0567168736203811e-05, + "loss": 0.6505, + "step": 9555 + }, + { + "epoch": 1.49, + "grad_norm": 13.366736823599535, + "learning_rate": 1.0565485043762289e-05, + "loss": 0.6095, + "step": 9556 + }, + { + "epoch": 1.49, + "grad_norm": 17.203889834130816, + "learning_rate": 1.0563801335238701e-05, + "loss": 0.6203, + "step": 9557 + }, + { + "epoch": 1.49, + "grad_norm": 15.4492931060352, + "learning_rate": 1.0562117610680938e-05, + "loss": 0.584, + "step": 9558 + }, + { + "epoch": 1.49, + "grad_norm": 24.79157263366647, + "learning_rate": 1.0560433870136872e-05, + "loss": 0.7251, + "step": 9559 + }, + { + "epoch": 1.49, + "grad_norm": 13.480030784467429, + "learning_rate": 1.0558750113654404e-05, + "loss": 0.5692, + "step": 9560 + }, + { + "epoch": 1.49, + "grad_norm": 22.889954727696995, + "learning_rate": 1.0557066341281402e-05, + "loss": 0.5767, + "step": 9561 + }, + { + "epoch": 1.49, + "grad_norm": 21.438089618415116, + "learning_rate": 1.055538255306576e-05, + "loss": 0.6005, + "step": 9562 + }, + { + "epoch": 1.49, + "grad_norm": 17.30729982622911, + "learning_rate": 1.0553698749055365e-05, + "loss": 0.5957, + "step": 9563 + }, + { + "epoch": 1.49, + "grad_norm": 24.809103160319772, + "learning_rate": 1.05520149292981e-05, + "loss": 0.5624, + "step": 9564 + }, + { + "epoch": 1.49, + "grad_norm": 15.908112386932967, + "learning_rate": 1.0550331093841855e-05, + "loss": 0.5825, + "step": 9565 + }, + { + "epoch": 1.49, + "grad_norm": 15.515388137377007, + "learning_rate": 1.0548647242734516e-05, + "loss": 0.5655, + "step": 9566 + }, + { + "epoch": 1.49, + "grad_norm": 17.679372729467417, + "learning_rate": 1.054696337602397e-05, + "loss": 0.6545, + "step": 9567 + }, + { + "epoch": 1.49, + "grad_norm": 16.1263260896761, + "learning_rate": 1.0545279493758103e-05, + "loss": 0.56, + "step": 9568 + }, + { + "epoch": 1.49, + "grad_norm": 24.053409338032864, + "learning_rate": 1.0543595595984806e-05, + "loss": 0.6447, + "step": 9569 + }, + { + "epoch": 1.49, + "grad_norm": 22.056430020817814, + "learning_rate": 1.0541911682751971e-05, + "loss": 0.6836, + "step": 9570 + }, + { + "epoch": 1.5, + "grad_norm": 13.916570906419091, + "learning_rate": 1.0540227754107483e-05, + "loss": 0.6045, + "step": 9571 + }, + { + "epoch": 1.5, + "grad_norm": 16.304461112504757, + "learning_rate": 1.0538543810099236e-05, + "loss": 0.6043, + "step": 9572 + }, + { + "epoch": 1.5, + "grad_norm": 15.328828931096027, + "learning_rate": 1.0536859850775117e-05, + "loss": 0.66, + "step": 9573 + }, + { + "epoch": 1.5, + "grad_norm": 14.321388573598336, + "learning_rate": 1.0535175876183017e-05, + "loss": 0.6903, + "step": 9574 + }, + { + "epoch": 1.5, + "grad_norm": 18.06154264029838, + "learning_rate": 1.0533491886370828e-05, + "loss": 0.594, + "step": 9575 + }, + { + "epoch": 1.5, + "grad_norm": 28.40824175399614, + "learning_rate": 1.0531807881386442e-05, + "loss": 0.6531, + "step": 9576 + }, + { + "epoch": 1.5, + "grad_norm": 18.491114377761125, + "learning_rate": 1.0530123861277752e-05, + "loss": 0.5708, + "step": 9577 + }, + { + "epoch": 1.5, + "grad_norm": 20.745317276142934, + "learning_rate": 1.0528439826092649e-05, + "loss": 0.6742, + "step": 9578 + }, + { + "epoch": 1.5, + "grad_norm": 26.819592832648894, + "learning_rate": 1.0526755775879024e-05, + "loss": 0.6408, + "step": 9579 + }, + { + "epoch": 1.5, + "grad_norm": 16.210495938641355, + "learning_rate": 1.0525071710684777e-05, + "loss": 0.6574, + "step": 9580 + }, + { + "epoch": 1.5, + "grad_norm": 14.755272966114962, + "learning_rate": 1.0523387630557797e-05, + "loss": 0.649, + "step": 9581 + }, + { + "epoch": 1.5, + "grad_norm": 18.359098214031285, + "learning_rate": 1.0521703535545977e-05, + "loss": 0.5388, + "step": 9582 + }, + { + "epoch": 1.5, + "grad_norm": 14.95129733191838, + "learning_rate": 1.0520019425697217e-05, + "loss": 0.5968, + "step": 9583 + }, + { + "epoch": 1.5, + "grad_norm": 25.611221677474138, + "learning_rate": 1.0518335301059404e-05, + "loss": 0.6536, + "step": 9584 + }, + { + "epoch": 1.5, + "grad_norm": 32.14734087634544, + "learning_rate": 1.0516651161680443e-05, + "loss": 0.6968, + "step": 9585 + }, + { + "epoch": 1.5, + "grad_norm": 12.636278067544522, + "learning_rate": 1.0514967007608221e-05, + "loss": 0.5266, + "step": 9586 + }, + { + "epoch": 1.5, + "grad_norm": 14.785277131063943, + "learning_rate": 1.0513282838890642e-05, + "loss": 0.5664, + "step": 9587 + }, + { + "epoch": 1.5, + "grad_norm": 16.669899890082892, + "learning_rate": 1.0511598655575594e-05, + "loss": 0.537, + "step": 9588 + }, + { + "epoch": 1.5, + "grad_norm": 24.3401458567208, + "learning_rate": 1.0509914457710986e-05, + "loss": 0.6169, + "step": 9589 + }, + { + "epoch": 1.5, + "grad_norm": 12.943686247889197, + "learning_rate": 1.0508230245344707e-05, + "loss": 0.6526, + "step": 9590 + }, + { + "epoch": 1.5, + "grad_norm": 12.400058174393402, + "learning_rate": 1.0506546018524656e-05, + "loss": 0.5398, + "step": 9591 + }, + { + "epoch": 1.5, + "grad_norm": 19.38221197724052, + "learning_rate": 1.0504861777298732e-05, + "loss": 0.6262, + "step": 9592 + }, + { + "epoch": 1.5, + "grad_norm": 24.8586760242244, + "learning_rate": 1.0503177521714836e-05, + "loss": 0.614, + "step": 9593 + }, + { + "epoch": 1.5, + "grad_norm": 22.04308591355548, + "learning_rate": 1.0501493251820864e-05, + "loss": 0.6216, + "step": 9594 + }, + { + "epoch": 1.5, + "grad_norm": 19.803606390929883, + "learning_rate": 1.0499808967664717e-05, + "loss": 0.5423, + "step": 9595 + }, + { + "epoch": 1.5, + "grad_norm": 26.97331554537966, + "learning_rate": 1.0498124669294296e-05, + "loss": 0.6118, + "step": 9596 + }, + { + "epoch": 1.5, + "grad_norm": 20.264583169691303, + "learning_rate": 1.0496440356757502e-05, + "loss": 0.557, + "step": 9597 + }, + { + "epoch": 1.5, + "grad_norm": 15.91924624038395, + "learning_rate": 1.0494756030102236e-05, + "loss": 0.5841, + "step": 9598 + }, + { + "epoch": 1.5, + "grad_norm": 24.78175202927894, + "learning_rate": 1.0493071689376396e-05, + "loss": 0.6376, + "step": 9599 + }, + { + "epoch": 1.5, + "grad_norm": 19.143372913772797, + "learning_rate": 1.0491387334627884e-05, + "loss": 0.5542, + "step": 9600 + }, + { + "epoch": 1.5, + "grad_norm": 18.07029006864258, + "learning_rate": 1.0489702965904605e-05, + "loss": 0.6338, + "step": 9601 + }, + { + "epoch": 1.5, + "grad_norm": 22.01711013149036, + "learning_rate": 1.0488018583254463e-05, + "loss": 0.6842, + "step": 9602 + }, + { + "epoch": 1.5, + "grad_norm": 21.069095836699088, + "learning_rate": 1.0486334186725353e-05, + "loss": 0.6613, + "step": 9603 + }, + { + "epoch": 1.5, + "grad_norm": 16.043266099041606, + "learning_rate": 1.0484649776365189e-05, + "loss": 0.6321, + "step": 9604 + }, + { + "epoch": 1.5, + "grad_norm": 23.33969613639088, + "learning_rate": 1.0482965352221863e-05, + "loss": 0.552, + "step": 9605 + }, + { + "epoch": 1.5, + "grad_norm": 15.475998266671306, + "learning_rate": 1.0481280914343289e-05, + "loss": 0.5734, + "step": 9606 + }, + { + "epoch": 1.5, + "grad_norm": 23.847528959992133, + "learning_rate": 1.0479596462777363e-05, + "loss": 0.5331, + "step": 9607 + }, + { + "epoch": 1.5, + "grad_norm": 13.475077926024703, + "learning_rate": 1.0477911997571998e-05, + "loss": 0.6345, + "step": 9608 + }, + { + "epoch": 1.5, + "grad_norm": 10.699984906394286, + "learning_rate": 1.0476227518775095e-05, + "loss": 0.5554, + "step": 9609 + }, + { + "epoch": 1.5, + "grad_norm": 15.42390074512959, + "learning_rate": 1.0474543026434559e-05, + "loss": 0.6587, + "step": 9610 + }, + { + "epoch": 1.5, + "grad_norm": 14.81933469467535, + "learning_rate": 1.0472858520598295e-05, + "loss": 0.638, + "step": 9611 + }, + { + "epoch": 1.5, + "grad_norm": 16.350814940764952, + "learning_rate": 1.0471174001314215e-05, + "loss": 0.5785, + "step": 9612 + }, + { + "epoch": 1.5, + "grad_norm": 28.812973572982727, + "learning_rate": 1.0469489468630218e-05, + "loss": 0.6487, + "step": 9613 + }, + { + "epoch": 1.5, + "grad_norm": 15.2756985602967, + "learning_rate": 1.0467804922594218e-05, + "loss": 0.5419, + "step": 9614 + }, + { + "epoch": 1.5, + "grad_norm": 18.91706930602366, + "learning_rate": 1.0466120363254119e-05, + "loss": 0.5769, + "step": 9615 + }, + { + "epoch": 1.5, + "grad_norm": 20.756687172567716, + "learning_rate": 1.0464435790657833e-05, + "loss": 0.5769, + "step": 9616 + }, + { + "epoch": 1.5, + "grad_norm": 21.618957540812264, + "learning_rate": 1.0462751204853262e-05, + "loss": 0.5736, + "step": 9617 + }, + { + "epoch": 1.5, + "grad_norm": 21.11631268909565, + "learning_rate": 1.0461066605888314e-05, + "loss": 0.6871, + "step": 9618 + }, + { + "epoch": 1.5, + "grad_norm": 25.81731376749823, + "learning_rate": 1.0459381993810906e-05, + "loss": 0.5945, + "step": 9619 + }, + { + "epoch": 1.5, + "grad_norm": 18.639913323143578, + "learning_rate": 1.045769736866894e-05, + "loss": 0.6161, + "step": 9620 + }, + { + "epoch": 1.5, + "grad_norm": 18.973731759155125, + "learning_rate": 1.0456012730510332e-05, + "loss": 0.6132, + "step": 9621 + }, + { + "epoch": 1.5, + "grad_norm": 14.093217327408952, + "learning_rate": 1.0454328079382985e-05, + "loss": 0.5851, + "step": 9622 + }, + { + "epoch": 1.5, + "grad_norm": 16.378059627342083, + "learning_rate": 1.0452643415334816e-05, + "loss": 0.5905, + "step": 9623 + }, + { + "epoch": 1.5, + "grad_norm": 24.635351020585734, + "learning_rate": 1.045095873841373e-05, + "loss": 0.6017, + "step": 9624 + }, + { + "epoch": 1.5, + "grad_norm": 13.787613591048734, + "learning_rate": 1.0449274048667644e-05, + "loss": 0.565, + "step": 9625 + }, + { + "epoch": 1.5, + "grad_norm": 23.71521270771573, + "learning_rate": 1.0447589346144467e-05, + "loss": 0.629, + "step": 9626 + }, + { + "epoch": 1.5, + "grad_norm": 15.837488716681479, + "learning_rate": 1.044590463089211e-05, + "loss": 0.6665, + "step": 9627 + }, + { + "epoch": 1.5, + "grad_norm": 24.80534046064591, + "learning_rate": 1.0444219902958483e-05, + "loss": 0.6202, + "step": 9628 + }, + { + "epoch": 1.5, + "grad_norm": 14.543291504337233, + "learning_rate": 1.0442535162391508e-05, + "loss": 0.5971, + "step": 9629 + }, + { + "epoch": 1.5, + "grad_norm": 18.99881099247838, + "learning_rate": 1.0440850409239085e-05, + "loss": 0.5446, + "step": 9630 + }, + { + "epoch": 1.5, + "grad_norm": 15.765826762735369, + "learning_rate": 1.043916564354914e-05, + "loss": 0.5486, + "step": 9631 + }, + { + "epoch": 1.5, + "grad_norm": 20.81523294733946, + "learning_rate": 1.0437480865369577e-05, + "loss": 0.6378, + "step": 9632 + }, + { + "epoch": 1.5, + "grad_norm": 13.563178170735709, + "learning_rate": 1.0435796074748316e-05, + "loss": 0.5161, + "step": 9633 + }, + { + "epoch": 1.5, + "grad_norm": 18.48273473960778, + "learning_rate": 1.0434111271733271e-05, + "loss": 0.5999, + "step": 9634 + }, + { + "epoch": 1.5, + "grad_norm": 19.222243011752166, + "learning_rate": 1.0432426456372356e-05, + "loss": 0.5401, + "step": 9635 + }, + { + "epoch": 1.51, + "grad_norm": 17.073605794994442, + "learning_rate": 1.0430741628713482e-05, + "loss": 0.6115, + "step": 9636 + }, + { + "epoch": 1.51, + "grad_norm": 17.715624759873922, + "learning_rate": 1.042905678880457e-05, + "loss": 0.5834, + "step": 9637 + }, + { + "epoch": 1.51, + "grad_norm": 30.531316117768554, + "learning_rate": 1.0427371936693531e-05, + "loss": 0.6356, + "step": 9638 + }, + { + "epoch": 1.51, + "grad_norm": 21.905429731539908, + "learning_rate": 1.0425687072428288e-05, + "loss": 0.5975, + "step": 9639 + }, + { + "epoch": 1.51, + "grad_norm": 28.15837052136694, + "learning_rate": 1.042400219605675e-05, + "loss": 0.6498, + "step": 9640 + }, + { + "epoch": 1.51, + "grad_norm": 16.623438171112767, + "learning_rate": 1.0422317307626842e-05, + "loss": 0.5523, + "step": 9641 + }, + { + "epoch": 1.51, + "grad_norm": 23.70465619693213, + "learning_rate": 1.0420632407186475e-05, + "loss": 0.6668, + "step": 9642 + }, + { + "epoch": 1.51, + "grad_norm": 19.58035066257976, + "learning_rate": 1.0418947494783567e-05, + "loss": 0.6056, + "step": 9643 + }, + { + "epoch": 1.51, + "grad_norm": 13.5340884592538, + "learning_rate": 1.041726257046604e-05, + "loss": 0.5483, + "step": 9644 + }, + { + "epoch": 1.51, + "grad_norm": 12.576004154279504, + "learning_rate": 1.0415577634281806e-05, + "loss": 0.544, + "step": 9645 + }, + { + "epoch": 1.51, + "grad_norm": 18.879241438387634, + "learning_rate": 1.0413892686278791e-05, + "loss": 0.5846, + "step": 9646 + }, + { + "epoch": 1.51, + "grad_norm": 19.072237066510255, + "learning_rate": 1.0412207726504906e-05, + "loss": 0.5579, + "step": 9647 + }, + { + "epoch": 1.51, + "grad_norm": 12.506065142355302, + "learning_rate": 1.041052275500808e-05, + "loss": 0.596, + "step": 9648 + }, + { + "epoch": 1.51, + "grad_norm": 23.049002962311587, + "learning_rate": 1.0408837771836222e-05, + "loss": 0.5536, + "step": 9649 + }, + { + "epoch": 1.51, + "grad_norm": 14.397936068666075, + "learning_rate": 1.0407152777037258e-05, + "loss": 0.6283, + "step": 9650 + }, + { + "epoch": 1.51, + "grad_norm": 15.235977550239177, + "learning_rate": 1.0405467770659107e-05, + "loss": 0.6029, + "step": 9651 + }, + { + "epoch": 1.51, + "grad_norm": 23.61655253112933, + "learning_rate": 1.0403782752749692e-05, + "loss": 0.6601, + "step": 9652 + }, + { + "epoch": 1.51, + "grad_norm": 17.83600702410067, + "learning_rate": 1.040209772335693e-05, + "loss": 0.5642, + "step": 9653 + }, + { + "epoch": 1.51, + "grad_norm": 17.185093523963605, + "learning_rate": 1.0400412682528745e-05, + "loss": 0.6029, + "step": 9654 + }, + { + "epoch": 1.51, + "grad_norm": 20.58923856554717, + "learning_rate": 1.0398727630313055e-05, + "loss": 0.5541, + "step": 9655 + }, + { + "epoch": 1.51, + "grad_norm": 18.243442389314847, + "learning_rate": 1.0397042566757789e-05, + "loss": 0.6346, + "step": 9656 + }, + { + "epoch": 1.51, + "grad_norm": 19.87343456668084, + "learning_rate": 1.039535749191086e-05, + "loss": 0.6218, + "step": 9657 + }, + { + "epoch": 1.51, + "grad_norm": 22.669560513443166, + "learning_rate": 1.0393672405820197e-05, + "loss": 0.5286, + "step": 9658 + }, + { + "epoch": 1.51, + "grad_norm": 24.856694818496706, + "learning_rate": 1.0391987308533722e-05, + "loss": 0.6374, + "step": 9659 + }, + { + "epoch": 1.51, + "grad_norm": 12.917608547927562, + "learning_rate": 1.0390302200099355e-05, + "loss": 0.5294, + "step": 9660 + }, + { + "epoch": 1.51, + "grad_norm": 22.980607044193555, + "learning_rate": 1.0388617080565024e-05, + "loss": 0.6468, + "step": 9661 + }, + { + "epoch": 1.51, + "grad_norm": 11.413652209010364, + "learning_rate": 1.0386931949978649e-05, + "loss": 0.5366, + "step": 9662 + }, + { + "epoch": 1.51, + "grad_norm": 23.617944231443634, + "learning_rate": 1.038524680838816e-05, + "loss": 0.5906, + "step": 9663 + }, + { + "epoch": 1.51, + "grad_norm": 18.45518717600868, + "learning_rate": 1.038356165584147e-05, + "loss": 0.6459, + "step": 9664 + }, + { + "epoch": 1.51, + "grad_norm": 14.670259877357442, + "learning_rate": 1.0381876492386516e-05, + "loss": 0.5649, + "step": 9665 + }, + { + "epoch": 1.51, + "grad_norm": 15.59739134506479, + "learning_rate": 1.0380191318071215e-05, + "loss": 0.5716, + "step": 9666 + }, + { + "epoch": 1.51, + "grad_norm": 17.128297436985324, + "learning_rate": 1.0378506132943497e-05, + "loss": 0.61, + "step": 9667 + }, + { + "epoch": 1.51, + "grad_norm": 20.822045197951343, + "learning_rate": 1.0376820937051286e-05, + "loss": 0.6517, + "step": 9668 + }, + { + "epoch": 1.51, + "grad_norm": 18.933589958913025, + "learning_rate": 1.0375135730442507e-05, + "loss": 0.5457, + "step": 9669 + }, + { + "epoch": 1.51, + "grad_norm": 25.78790087367503, + "learning_rate": 1.0373450513165089e-05, + "loss": 0.5006, + "step": 9670 + }, + { + "epoch": 1.51, + "grad_norm": 24.879292001607123, + "learning_rate": 1.0371765285266957e-05, + "loss": 0.7449, + "step": 9671 + }, + { + "epoch": 1.51, + "grad_norm": 14.096462902150305, + "learning_rate": 1.0370080046796034e-05, + "loss": 0.5806, + "step": 9672 + }, + { + "epoch": 1.51, + "grad_norm": 19.20883964014725, + "learning_rate": 1.0368394797800256e-05, + "loss": 0.5285, + "step": 9673 + }, + { + "epoch": 1.51, + "grad_norm": 22.80245911197219, + "learning_rate": 1.0366709538327542e-05, + "loss": 0.5422, + "step": 9674 + }, + { + "epoch": 1.51, + "grad_norm": 22.57351551851732, + "learning_rate": 1.0365024268425826e-05, + "loss": 0.6182, + "step": 9675 + }, + { + "epoch": 1.51, + "grad_norm": 20.05579679874009, + "learning_rate": 1.036333898814303e-05, + "loss": 0.578, + "step": 9676 + }, + { + "epoch": 1.51, + "grad_norm": 17.21522120266898, + "learning_rate": 1.0361653697527088e-05, + "loss": 0.5919, + "step": 9677 + }, + { + "epoch": 1.51, + "grad_norm": 21.639236719654644, + "learning_rate": 1.0359968396625925e-05, + "loss": 0.5879, + "step": 9678 + }, + { + "epoch": 1.51, + "grad_norm": 16.53797098980881, + "learning_rate": 1.0358283085487473e-05, + "loss": 0.5471, + "step": 9679 + }, + { + "epoch": 1.51, + "grad_norm": 16.453302982016123, + "learning_rate": 1.0356597764159659e-05, + "loss": 0.6067, + "step": 9680 + }, + { + "epoch": 1.51, + "grad_norm": 24.8029974288082, + "learning_rate": 1.0354912432690412e-05, + "loss": 0.6423, + "step": 9681 + }, + { + "epoch": 1.51, + "grad_norm": 14.81667495944494, + "learning_rate": 1.0353227091127662e-05, + "loss": 0.5919, + "step": 9682 + }, + { + "epoch": 1.51, + "grad_norm": 19.322391265133675, + "learning_rate": 1.0351541739519341e-05, + "loss": 0.6508, + "step": 9683 + }, + { + "epoch": 1.51, + "grad_norm": 23.739329140593767, + "learning_rate": 1.034985637791338e-05, + "loss": 0.6226, + "step": 9684 + }, + { + "epoch": 1.51, + "grad_norm": 19.4631897523884, + "learning_rate": 1.034817100635771e-05, + "loss": 0.6291, + "step": 9685 + }, + { + "epoch": 1.51, + "grad_norm": 24.728615029887486, + "learning_rate": 1.0346485624900258e-05, + "loss": 0.5992, + "step": 9686 + }, + { + "epoch": 1.51, + "grad_norm": 25.357985935608827, + "learning_rate": 1.0344800233588959e-05, + "loss": 0.6013, + "step": 9687 + }, + { + "epoch": 1.51, + "grad_norm": 26.728368036087755, + "learning_rate": 1.0343114832471742e-05, + "loss": 0.5807, + "step": 9688 + }, + { + "epoch": 1.51, + "grad_norm": 14.641954210006624, + "learning_rate": 1.0341429421596538e-05, + "loss": 0.5927, + "step": 9689 + }, + { + "epoch": 1.51, + "grad_norm": 16.661158702157888, + "learning_rate": 1.0339744001011285e-05, + "loss": 0.6064, + "step": 9690 + }, + { + "epoch": 1.51, + "grad_norm": 15.535338796147864, + "learning_rate": 1.0338058570763907e-05, + "loss": 0.593, + "step": 9691 + }, + { + "epoch": 1.51, + "grad_norm": 16.891379072240596, + "learning_rate": 1.0336373130902346e-05, + "loss": 0.5489, + "step": 9692 + }, + { + "epoch": 1.51, + "grad_norm": 20.70750703680616, + "learning_rate": 1.0334687681474524e-05, + "loss": 0.5354, + "step": 9693 + }, + { + "epoch": 1.51, + "grad_norm": 23.374109674521367, + "learning_rate": 1.0333002222528384e-05, + "loss": 0.6331, + "step": 9694 + }, + { + "epoch": 1.51, + "grad_norm": 13.964810037638912, + "learning_rate": 1.0331316754111855e-05, + "loss": 0.5579, + "step": 9695 + }, + { + "epoch": 1.51, + "grad_norm": 19.67437888035102, + "learning_rate": 1.032963127627287e-05, + "loss": 0.5782, + "step": 9696 + }, + { + "epoch": 1.51, + "grad_norm": 14.720702438182393, + "learning_rate": 1.0327945789059366e-05, + "loss": 0.5521, + "step": 9697 + }, + { + "epoch": 1.51, + "grad_norm": 16.055180070100995, + "learning_rate": 1.0326260292519274e-05, + "loss": 0.6125, + "step": 9698 + }, + { + "epoch": 1.51, + "grad_norm": 22.082120845605495, + "learning_rate": 1.032457478670053e-05, + "loss": 0.5681, + "step": 9699 + }, + { + "epoch": 1.52, + "grad_norm": 27.765011397984036, + "learning_rate": 1.0322889271651073e-05, + "loss": 0.5868, + "step": 9700 + }, + { + "epoch": 1.52, + "grad_norm": 16.549520035750025, + "learning_rate": 1.0321203747418829e-05, + "loss": 0.6422, + "step": 9701 + }, + { + "epoch": 1.52, + "grad_norm": 24.63145328937301, + "learning_rate": 1.031951821405174e-05, + "loss": 0.5735, + "step": 9702 + }, + { + "epoch": 1.52, + "grad_norm": 18.452257447679084, + "learning_rate": 1.031783267159774e-05, + "loss": 0.6196, + "step": 9703 + }, + { + "epoch": 1.52, + "grad_norm": 16.704933470054325, + "learning_rate": 1.0316147120104767e-05, + "loss": 0.5698, + "step": 9704 + }, + { + "epoch": 1.52, + "grad_norm": 22.007496829167547, + "learning_rate": 1.0314461559620752e-05, + "loss": 0.5526, + "step": 9705 + }, + { + "epoch": 1.52, + "grad_norm": 13.626932779203724, + "learning_rate": 1.0312775990193635e-05, + "loss": 0.6084, + "step": 9706 + }, + { + "epoch": 1.52, + "grad_norm": 18.193909153640224, + "learning_rate": 1.0311090411871354e-05, + "loss": 0.6134, + "step": 9707 + }, + { + "epoch": 1.52, + "grad_norm": 22.992071896105898, + "learning_rate": 1.030940482470184e-05, + "loss": 0.653, + "step": 9708 + }, + { + "epoch": 1.52, + "grad_norm": 23.32786916092986, + "learning_rate": 1.0307719228733034e-05, + "loss": 0.5311, + "step": 9709 + }, + { + "epoch": 1.52, + "grad_norm": 20.04289358448721, + "learning_rate": 1.0306033624012875e-05, + "loss": 0.5914, + "step": 9710 + }, + { + "epoch": 1.52, + "grad_norm": 24.094784621958546, + "learning_rate": 1.0304348010589298e-05, + "loss": 0.6218, + "step": 9711 + }, + { + "epoch": 1.52, + "grad_norm": 26.112077356290023, + "learning_rate": 1.0302662388510243e-05, + "loss": 0.7058, + "step": 9712 + }, + { + "epoch": 1.52, + "grad_norm": 17.5109792447267, + "learning_rate": 1.0300976757823643e-05, + "loss": 0.6278, + "step": 9713 + }, + { + "epoch": 1.52, + "grad_norm": 21.711790830596787, + "learning_rate": 1.0299291118577445e-05, + "loss": 0.5624, + "step": 9714 + }, + { + "epoch": 1.52, + "grad_norm": 22.12122326879831, + "learning_rate": 1.029760547081958e-05, + "loss": 0.6401, + "step": 9715 + }, + { + "epoch": 1.52, + "grad_norm": 19.863390983195917, + "learning_rate": 1.0295919814597988e-05, + "loss": 0.5783, + "step": 9716 + }, + { + "epoch": 1.52, + "grad_norm": 24.91616310685399, + "learning_rate": 1.0294234149960614e-05, + "loss": 0.6954, + "step": 9717 + }, + { + "epoch": 1.52, + "grad_norm": 20.848052646670258, + "learning_rate": 1.0292548476955389e-05, + "loss": 0.6291, + "step": 9718 + }, + { + "epoch": 1.52, + "grad_norm": 19.508515081407044, + "learning_rate": 1.0290862795630261e-05, + "loss": 0.6066, + "step": 9719 + }, + { + "epoch": 1.52, + "grad_norm": 15.698917700203406, + "learning_rate": 1.028917710603316e-05, + "loss": 0.5862, + "step": 9720 + }, + { + "epoch": 1.52, + "grad_norm": 19.166175449769707, + "learning_rate": 1.0287491408212031e-05, + "loss": 0.5832, + "step": 9721 + }, + { + "epoch": 1.52, + "grad_norm": 15.907137928444762, + "learning_rate": 1.0285805702214817e-05, + "loss": 0.509, + "step": 9722 + }, + { + "epoch": 1.52, + "grad_norm": 25.613246921192253, + "learning_rate": 1.0284119988089458e-05, + "loss": 0.621, + "step": 9723 + }, + { + "epoch": 1.52, + "grad_norm": 24.33246406149417, + "learning_rate": 1.028243426588389e-05, + "loss": 0.6292, + "step": 9724 + }, + { + "epoch": 1.52, + "grad_norm": 22.49944751511538, + "learning_rate": 1.0280748535646058e-05, + "loss": 0.6251, + "step": 9725 + }, + { + "epoch": 1.52, + "grad_norm": 17.96674256182304, + "learning_rate": 1.0279062797423899e-05, + "loss": 0.5056, + "step": 9726 + }, + { + "epoch": 1.52, + "grad_norm": 19.99551118157452, + "learning_rate": 1.0277377051265361e-05, + "loss": 0.5474, + "step": 9727 + }, + { + "epoch": 1.52, + "grad_norm": 21.519572105791326, + "learning_rate": 1.027569129721838e-05, + "loss": 0.5619, + "step": 9728 + }, + { + "epoch": 1.52, + "grad_norm": 19.134570249747842, + "learning_rate": 1.02740055353309e-05, + "loss": 0.5142, + "step": 9729 + }, + { + "epoch": 1.52, + "grad_norm": 18.55793940025469, + "learning_rate": 1.0272319765650862e-05, + "loss": 0.551, + "step": 9730 + }, + { + "epoch": 1.52, + "grad_norm": 16.757582540646, + "learning_rate": 1.0270633988226209e-05, + "loss": 0.5913, + "step": 9731 + }, + { + "epoch": 1.52, + "grad_norm": 15.880201362449023, + "learning_rate": 1.0268948203104888e-05, + "loss": 0.5933, + "step": 9732 + }, + { + "epoch": 1.52, + "grad_norm": 20.41890471454992, + "learning_rate": 1.026726241033483e-05, + "loss": 0.6509, + "step": 9733 + }, + { + "epoch": 1.52, + "grad_norm": 16.561051001233153, + "learning_rate": 1.0265576609963994e-05, + "loss": 0.5906, + "step": 9734 + }, + { + "epoch": 1.52, + "grad_norm": 25.22125263748386, + "learning_rate": 1.0263890802040308e-05, + "loss": 0.6282, + "step": 9735 + }, + { + "epoch": 1.52, + "grad_norm": 19.599353912923885, + "learning_rate": 1.0262204986611726e-05, + "loss": 0.6209, + "step": 9736 + }, + { + "epoch": 1.52, + "grad_norm": 17.644582756360847, + "learning_rate": 1.0260519163726183e-05, + "loss": 0.5601, + "step": 9737 + }, + { + "epoch": 1.52, + "grad_norm": 17.62967762103688, + "learning_rate": 1.025883333343163e-05, + "loss": 0.6239, + "step": 9738 + }, + { + "epoch": 1.52, + "grad_norm": 18.741691860675274, + "learning_rate": 1.025714749577601e-05, + "loss": 0.556, + "step": 9739 + }, + { + "epoch": 1.52, + "grad_norm": 20.94721018892791, + "learning_rate": 1.0255461650807264e-05, + "loss": 0.5684, + "step": 9740 + }, + { + "epoch": 1.52, + "grad_norm": 22.220464958127668, + "learning_rate": 1.025377579857334e-05, + "loss": 0.6047, + "step": 9741 + }, + { + "epoch": 1.52, + "grad_norm": 16.498191713185314, + "learning_rate": 1.0252089939122179e-05, + "loss": 0.5286, + "step": 9742 + }, + { + "epoch": 1.52, + "grad_norm": 17.820507427288362, + "learning_rate": 1.0250404072501724e-05, + "loss": 0.6005, + "step": 9743 + }, + { + "epoch": 1.52, + "grad_norm": 18.218123499631304, + "learning_rate": 1.024871819875993e-05, + "loss": 0.5803, + "step": 9744 + }, + { + "epoch": 1.52, + "grad_norm": 19.56208453483713, + "learning_rate": 1.0247032317944731e-05, + "loss": 0.6825, + "step": 9745 + }, + { + "epoch": 1.52, + "grad_norm": 18.358616007679327, + "learning_rate": 1.0245346430104082e-05, + "loss": 0.5834, + "step": 9746 + }, + { + "epoch": 1.52, + "grad_norm": 16.489950339584198, + "learning_rate": 1.024366053528592e-05, + "loss": 0.6595, + "step": 9747 + }, + { + "epoch": 1.52, + "grad_norm": 25.524704981387046, + "learning_rate": 1.0241974633538198e-05, + "loss": 0.6182, + "step": 9748 + }, + { + "epoch": 1.52, + "grad_norm": 16.349640465368353, + "learning_rate": 1.0240288724908858e-05, + "loss": 0.6046, + "step": 9749 + }, + { + "epoch": 1.52, + "grad_norm": 22.977115747664868, + "learning_rate": 1.0238602809445846e-05, + "loss": 0.6919, + "step": 9750 + }, + { + "epoch": 1.52, + "grad_norm": 18.00082695529993, + "learning_rate": 1.023691688719711e-05, + "loss": 0.6042, + "step": 9751 + }, + { + "epoch": 1.52, + "grad_norm": 21.24340528159094, + "learning_rate": 1.0235230958210596e-05, + "loss": 0.546, + "step": 9752 + }, + { + "epoch": 1.52, + "grad_norm": 25.84671425165797, + "learning_rate": 1.0233545022534252e-05, + "loss": 0.6432, + "step": 9753 + }, + { + "epoch": 1.52, + "grad_norm": 19.40913042170192, + "learning_rate": 1.0231859080216025e-05, + "loss": 0.6516, + "step": 9754 + }, + { + "epoch": 1.52, + "grad_norm": 26.12026396343735, + "learning_rate": 1.023017313130386e-05, + "loss": 0.5668, + "step": 9755 + }, + { + "epoch": 1.52, + "grad_norm": 18.141763595544948, + "learning_rate": 1.0228487175845707e-05, + "loss": 0.6057, + "step": 9756 + }, + { + "epoch": 1.52, + "grad_norm": 20.81871215724684, + "learning_rate": 1.0226801213889512e-05, + "loss": 0.6012, + "step": 9757 + }, + { + "epoch": 1.52, + "grad_norm": 30.97131783389649, + "learning_rate": 1.0225115245483219e-05, + "loss": 0.6591, + "step": 9758 + }, + { + "epoch": 1.52, + "grad_norm": 21.083537548890842, + "learning_rate": 1.0223429270674788e-05, + "loss": 0.6875, + "step": 9759 + }, + { + "epoch": 1.52, + "grad_norm": 16.42947557206542, + "learning_rate": 1.0221743289512153e-05, + "loss": 0.6043, + "step": 9760 + }, + { + "epoch": 1.52, + "grad_norm": 23.06860331990219, + "learning_rate": 1.0220057302043273e-05, + "loss": 0.6349, + "step": 9761 + }, + { + "epoch": 1.52, + "grad_norm": 12.762559951179993, + "learning_rate": 1.0218371308316089e-05, + "loss": 0.5893, + "step": 9762 + }, + { + "epoch": 1.52, + "grad_norm": 20.131425721705238, + "learning_rate": 1.0216685308378556e-05, + "loss": 0.6209, + "step": 9763 + }, + { + "epoch": 1.53, + "grad_norm": 20.56900243104382, + "learning_rate": 1.0214999302278614e-05, + "loss": 0.5722, + "step": 9764 + }, + { + "epoch": 1.53, + "grad_norm": 17.030102627278943, + "learning_rate": 1.0213313290064222e-05, + "loss": 0.5976, + "step": 9765 + }, + { + "epoch": 1.53, + "grad_norm": 20.47728830861015, + "learning_rate": 1.0211627271783323e-05, + "loss": 0.5158, + "step": 9766 + }, + { + "epoch": 1.53, + "grad_norm": 17.42929441760261, + "learning_rate": 1.0209941247483868e-05, + "loss": 0.574, + "step": 9767 + }, + { + "epoch": 1.53, + "grad_norm": 23.15787797487095, + "learning_rate": 1.0208255217213809e-05, + "loss": 0.6116, + "step": 9768 + }, + { + "epoch": 1.53, + "grad_norm": 25.321843004212354, + "learning_rate": 1.0206569181021092e-05, + "loss": 0.6432, + "step": 9769 + }, + { + "epoch": 1.53, + "grad_norm": 37.74511198148553, + "learning_rate": 1.0204883138953666e-05, + "loss": 0.6437, + "step": 9770 + }, + { + "epoch": 1.53, + "grad_norm": 19.193600862023636, + "learning_rate": 1.0203197091059485e-05, + "loss": 0.6095, + "step": 9771 + }, + { + "epoch": 1.53, + "grad_norm": 22.42449413513622, + "learning_rate": 1.02015110373865e-05, + "loss": 0.5764, + "step": 9772 + }, + { + "epoch": 1.53, + "grad_norm": 11.61353653422856, + "learning_rate": 1.0199824977982658e-05, + "loss": 0.5876, + "step": 9773 + }, + { + "epoch": 1.53, + "grad_norm": 15.01537566618072, + "learning_rate": 1.0198138912895907e-05, + "loss": 0.5989, + "step": 9774 + }, + { + "epoch": 1.53, + "grad_norm": 20.777430230276448, + "learning_rate": 1.0196452842174202e-05, + "loss": 0.5752, + "step": 9775 + }, + { + "epoch": 1.53, + "grad_norm": 21.44965588164675, + "learning_rate": 1.0194766765865498e-05, + "loss": 0.6001, + "step": 9776 + }, + { + "epoch": 1.53, + "grad_norm": 26.35495229814092, + "learning_rate": 1.0193080684017737e-05, + "loss": 0.6678, + "step": 9777 + }, + { + "epoch": 1.53, + "grad_norm": 15.17195117393095, + "learning_rate": 1.0191394596678879e-05, + "loss": 0.6193, + "step": 9778 + }, + { + "epoch": 1.53, + "grad_norm": 21.180960499109634, + "learning_rate": 1.0189708503896865e-05, + "loss": 0.5787, + "step": 9779 + }, + { + "epoch": 1.53, + "grad_norm": 22.98836179604109, + "learning_rate": 1.0188022405719652e-05, + "loss": 0.6599, + "step": 9780 + }, + { + "epoch": 1.53, + "grad_norm": 17.355594134001255, + "learning_rate": 1.0186336302195197e-05, + "loss": 0.655, + "step": 9781 + }, + { + "epoch": 1.53, + "grad_norm": 23.255886298603194, + "learning_rate": 1.0184650193371444e-05, + "loss": 0.6365, + "step": 9782 + }, + { + "epoch": 1.53, + "grad_norm": 19.904635072682954, + "learning_rate": 1.0182964079296347e-05, + "loss": 0.5763, + "step": 9783 + }, + { + "epoch": 1.53, + "grad_norm": 17.628876284568133, + "learning_rate": 1.0181277960017856e-05, + "loss": 0.5941, + "step": 9784 + }, + { + "epoch": 1.53, + "grad_norm": 20.917966169113157, + "learning_rate": 1.017959183558393e-05, + "loss": 0.5622, + "step": 9785 + }, + { + "epoch": 1.53, + "grad_norm": 23.77326155944365, + "learning_rate": 1.0177905706042517e-05, + "loss": 0.623, + "step": 9786 + }, + { + "epoch": 1.53, + "grad_norm": 14.67839546872158, + "learning_rate": 1.0176219571441565e-05, + "loss": 0.552, + "step": 9787 + }, + { + "epoch": 1.53, + "grad_norm": 17.126178967570326, + "learning_rate": 1.0174533431829039e-05, + "loss": 0.6912, + "step": 9788 + }, + { + "epoch": 1.53, + "grad_norm": 25.925455739672177, + "learning_rate": 1.0172847287252878e-05, + "loss": 0.5756, + "step": 9789 + }, + { + "epoch": 1.53, + "grad_norm": 20.118236408110047, + "learning_rate": 1.0171161137761042e-05, + "loss": 0.5954, + "step": 9790 + }, + { + "epoch": 1.53, + "grad_norm": 15.25787566846281, + "learning_rate": 1.0169474983401488e-05, + "loss": 0.5304, + "step": 9791 + }, + { + "epoch": 1.53, + "grad_norm": 19.24041767631331, + "learning_rate": 1.016778882422216e-05, + "loss": 0.5783, + "step": 9792 + }, + { + "epoch": 1.53, + "grad_norm": 16.971278162102124, + "learning_rate": 1.0166102660271018e-05, + "loss": 0.5405, + "step": 9793 + }, + { + "epoch": 1.53, + "grad_norm": 17.206598312785587, + "learning_rate": 1.0164416491596014e-05, + "loss": 0.5874, + "step": 9794 + }, + { + "epoch": 1.53, + "grad_norm": 17.00980527651768, + "learning_rate": 1.01627303182451e-05, + "loss": 0.5576, + "step": 9795 + }, + { + "epoch": 1.53, + "grad_norm": 19.555684191899413, + "learning_rate": 1.016104414026623e-05, + "loss": 0.5853, + "step": 9796 + }, + { + "epoch": 1.53, + "grad_norm": 26.607579592369152, + "learning_rate": 1.015935795770736e-05, + "loss": 0.6985, + "step": 9797 + }, + { + "epoch": 1.53, + "grad_norm": 20.729322284245352, + "learning_rate": 1.0157671770616444e-05, + "loss": 0.659, + "step": 9798 + }, + { + "epoch": 1.53, + "grad_norm": 11.630172165972308, + "learning_rate": 1.0155985579041434e-05, + "loss": 0.5162, + "step": 9799 + }, + { + "epoch": 1.53, + "grad_norm": 18.524771154057753, + "learning_rate": 1.0154299383030287e-05, + "loss": 0.5533, + "step": 9800 + }, + { + "epoch": 1.53, + "grad_norm": 21.371466626955154, + "learning_rate": 1.0152613182630953e-05, + "loss": 0.684, + "step": 9801 + }, + { + "epoch": 1.53, + "grad_norm": 19.852210292883242, + "learning_rate": 1.0150926977891388e-05, + "loss": 0.6126, + "step": 9802 + }, + { + "epoch": 1.53, + "grad_norm": 28.77283713568664, + "learning_rate": 1.0149240768859554e-05, + "loss": 0.6412, + "step": 9803 + }, + { + "epoch": 1.53, + "grad_norm": 14.271240985111056, + "learning_rate": 1.0147554555583394e-05, + "loss": 0.6132, + "step": 9804 + }, + { + "epoch": 1.53, + "grad_norm": 17.990411848866618, + "learning_rate": 1.0145868338110873e-05, + "loss": 0.585, + "step": 9805 + }, + { + "epoch": 1.53, + "grad_norm": 21.628319372530903, + "learning_rate": 1.0144182116489938e-05, + "loss": 0.6258, + "step": 9806 + }, + { + "epoch": 1.53, + "grad_norm": 30.878107570909272, + "learning_rate": 1.0142495890768551e-05, + "loss": 0.6224, + "step": 9807 + }, + { + "epoch": 1.53, + "grad_norm": 23.719539571393554, + "learning_rate": 1.0140809660994663e-05, + "loss": 0.6045, + "step": 9808 + }, + { + "epoch": 1.53, + "grad_norm": 22.16270071141561, + "learning_rate": 1.0139123427216231e-05, + "loss": 0.6214, + "step": 9809 + }, + { + "epoch": 1.53, + "grad_norm": 15.264404714201888, + "learning_rate": 1.013743718948121e-05, + "loss": 0.5899, + "step": 9810 + }, + { + "epoch": 1.53, + "grad_norm": 18.782291226396453, + "learning_rate": 1.0135750947837558e-05, + "loss": 0.5736, + "step": 9811 + }, + { + "epoch": 1.53, + "grad_norm": 14.00041017878756, + "learning_rate": 1.0134064702333225e-05, + "loss": 0.5152, + "step": 9812 + }, + { + "epoch": 1.53, + "grad_norm": 17.09147224906994, + "learning_rate": 1.0132378453016171e-05, + "loss": 0.6397, + "step": 9813 + }, + { + "epoch": 1.53, + "grad_norm": 17.93878225236053, + "learning_rate": 1.013069219993435e-05, + "loss": 0.5874, + "step": 9814 + }, + { + "epoch": 1.53, + "grad_norm": 18.597211456268017, + "learning_rate": 1.0129005943135721e-05, + "loss": 0.5429, + "step": 9815 + }, + { + "epoch": 1.53, + "grad_norm": 13.85037321276858, + "learning_rate": 1.012731968266824e-05, + "loss": 0.5659, + "step": 9816 + }, + { + "epoch": 1.53, + "grad_norm": 23.714440965821364, + "learning_rate": 1.012563341857986e-05, + "loss": 0.5713, + "step": 9817 + }, + { + "epoch": 1.53, + "grad_norm": 22.695734829018026, + "learning_rate": 1.0123947150918539e-05, + "loss": 0.5793, + "step": 9818 + }, + { + "epoch": 1.53, + "grad_norm": 18.717277534218006, + "learning_rate": 1.0122260879732231e-05, + "loss": 0.6535, + "step": 9819 + }, + { + "epoch": 1.53, + "grad_norm": 20.35976604736056, + "learning_rate": 1.01205746050689e-05, + "loss": 0.7365, + "step": 9820 + }, + { + "epoch": 1.53, + "grad_norm": 26.98959759102438, + "learning_rate": 1.0118888326976494e-05, + "loss": 0.6402, + "step": 9821 + }, + { + "epoch": 1.53, + "grad_norm": 20.390692643532052, + "learning_rate": 1.0117202045502978e-05, + "loss": 0.6214, + "step": 9822 + }, + { + "epoch": 1.53, + "grad_norm": 25.895291304471748, + "learning_rate": 1.01155157606963e-05, + "loss": 0.5579, + "step": 9823 + }, + { + "epoch": 1.53, + "grad_norm": 20.940143999718817, + "learning_rate": 1.0113829472604422e-05, + "loss": 0.6262, + "step": 9824 + }, + { + "epoch": 1.53, + "grad_norm": 18.693191947661525, + "learning_rate": 1.0112143181275302e-05, + "loss": 0.5833, + "step": 9825 + }, + { + "epoch": 1.53, + "grad_norm": 12.92902628311404, + "learning_rate": 1.0110456886756894e-05, + "loss": 0.5885, + "step": 9826 + }, + { + "epoch": 1.53, + "grad_norm": 18.488499609300288, + "learning_rate": 1.010877058909716e-05, + "loss": 0.6217, + "step": 9827 + }, + { + "epoch": 1.54, + "grad_norm": 14.201822237923107, + "learning_rate": 1.0107084288344052e-05, + "loss": 0.6189, + "step": 9828 + }, + { + "epoch": 1.54, + "grad_norm": 20.196419569325847, + "learning_rate": 1.0105397984545524e-05, + "loss": 0.6501, + "step": 9829 + }, + { + "epoch": 1.54, + "grad_norm": 22.0816349336105, + "learning_rate": 1.0103711677749548e-05, + "loss": 0.5687, + "step": 9830 + }, + { + "epoch": 1.54, + "grad_norm": 23.91595385392772, + "learning_rate": 1.0102025368004066e-05, + "loss": 0.5631, + "step": 9831 + }, + { + "epoch": 1.54, + "grad_norm": 19.674529830649426, + "learning_rate": 1.0100339055357048e-05, + "loss": 0.6132, + "step": 9832 + }, + { + "epoch": 1.54, + "grad_norm": 16.405248610498568, + "learning_rate": 1.0098652739856441e-05, + "loss": 0.6614, + "step": 9833 + }, + { + "epoch": 1.54, + "grad_norm": 18.41468111471084, + "learning_rate": 1.0096966421550209e-05, + "loss": 0.5909, + "step": 9834 + }, + { + "epoch": 1.54, + "grad_norm": 19.370706133186534, + "learning_rate": 1.0095280100486309e-05, + "loss": 0.6184, + "step": 9835 + }, + { + "epoch": 1.54, + "grad_norm": 16.965911400366622, + "learning_rate": 1.00935937767127e-05, + "loss": 0.5726, + "step": 9836 + }, + { + "epoch": 1.54, + "grad_norm": 14.131970856564038, + "learning_rate": 1.0091907450277338e-05, + "loss": 0.531, + "step": 9837 + }, + { + "epoch": 1.54, + "grad_norm": 17.09407529094273, + "learning_rate": 1.0090221121228178e-05, + "loss": 0.5342, + "step": 9838 + }, + { + "epoch": 1.54, + "grad_norm": 28.24465590330699, + "learning_rate": 1.0088534789613188e-05, + "loss": 0.6801, + "step": 9839 + }, + { + "epoch": 1.54, + "grad_norm": 20.876139976588874, + "learning_rate": 1.0086848455480318e-05, + "loss": 0.6493, + "step": 9840 + }, + { + "epoch": 1.54, + "grad_norm": 24.241602855250118, + "learning_rate": 1.0085162118877527e-05, + "loss": 0.5267, + "step": 9841 + }, + { + "epoch": 1.54, + "grad_norm": 25.365857996756986, + "learning_rate": 1.0083475779852778e-05, + "loss": 0.6316, + "step": 9842 + }, + { + "epoch": 1.54, + "grad_norm": 17.95590279190697, + "learning_rate": 1.0081789438454026e-05, + "loss": 0.5928, + "step": 9843 + }, + { + "epoch": 1.54, + "grad_norm": 18.109198327474008, + "learning_rate": 1.0080103094729229e-05, + "loss": 0.5614, + "step": 9844 + }, + { + "epoch": 1.54, + "grad_norm": 22.289320802512265, + "learning_rate": 1.007841674872635e-05, + "loss": 0.6248, + "step": 9845 + }, + { + "epoch": 1.54, + "grad_norm": 16.843820585021568, + "learning_rate": 1.007673040049334e-05, + "loss": 0.5457, + "step": 9846 + }, + { + "epoch": 1.54, + "grad_norm": 28.343440797542915, + "learning_rate": 1.0075044050078166e-05, + "loss": 0.6412, + "step": 9847 + }, + { + "epoch": 1.54, + "grad_norm": 16.585986569009034, + "learning_rate": 1.0073357697528779e-05, + "loss": 0.6133, + "step": 9848 + }, + { + "epoch": 1.54, + "grad_norm": 22.969192878190256, + "learning_rate": 1.0071671342893148e-05, + "loss": 0.5725, + "step": 9849 + }, + { + "epoch": 1.54, + "grad_norm": 18.826596777377638, + "learning_rate": 1.0069984986219219e-05, + "loss": 0.6182, + "step": 9850 + }, + { + "epoch": 1.54, + "grad_norm": 17.93768335125752, + "learning_rate": 1.0068298627554962e-05, + "loss": 0.5551, + "step": 9851 + }, + { + "epoch": 1.54, + "grad_norm": 27.673201439254342, + "learning_rate": 1.0066612266948333e-05, + "loss": 0.5886, + "step": 9852 + }, + { + "epoch": 1.54, + "grad_norm": 16.402230321188068, + "learning_rate": 1.0064925904447288e-05, + "loss": 0.6064, + "step": 9853 + }, + { + "epoch": 1.54, + "grad_norm": 22.52270246081074, + "learning_rate": 1.006323954009979e-05, + "loss": 0.6101, + "step": 9854 + }, + { + "epoch": 1.54, + "grad_norm": 20.883609994830113, + "learning_rate": 1.0061553173953794e-05, + "loss": 0.6599, + "step": 9855 + }, + { + "epoch": 1.54, + "grad_norm": 15.423958749690176, + "learning_rate": 1.0059866806057263e-05, + "loss": 0.5467, + "step": 9856 + }, + { + "epoch": 1.54, + "grad_norm": 28.857281790198073, + "learning_rate": 1.0058180436458156e-05, + "loss": 0.5315, + "step": 9857 + }, + { + "epoch": 1.54, + "grad_norm": 15.796284954170929, + "learning_rate": 1.0056494065204428e-05, + "loss": 0.5457, + "step": 9858 + }, + { + "epoch": 1.54, + "grad_norm": 17.452180206763426, + "learning_rate": 1.0054807692344045e-05, + "loss": 0.6638, + "step": 9859 + }, + { + "epoch": 1.54, + "grad_norm": 14.82281976866086, + "learning_rate": 1.0053121317924963e-05, + "loss": 0.516, + "step": 9860 + }, + { + "epoch": 1.54, + "grad_norm": 22.84377747911087, + "learning_rate": 1.0051434941995142e-05, + "loss": 0.6408, + "step": 9861 + }, + { + "epoch": 1.54, + "grad_norm": 20.496517203922064, + "learning_rate": 1.004974856460254e-05, + "loss": 0.5712, + "step": 9862 + }, + { + "epoch": 1.54, + "grad_norm": 16.528885550589, + "learning_rate": 1.0048062185795117e-05, + "loss": 0.6111, + "step": 9863 + }, + { + "epoch": 1.54, + "grad_norm": 15.583634769062323, + "learning_rate": 1.0046375805620838e-05, + "loss": 0.5953, + "step": 9864 + }, + { + "epoch": 1.54, + "grad_norm": 28.530671231331553, + "learning_rate": 1.004468942412765e-05, + "loss": 0.6013, + "step": 9865 + }, + { + "epoch": 1.54, + "grad_norm": 28.883483566276666, + "learning_rate": 1.0043003041363531e-05, + "loss": 0.5862, + "step": 9866 + }, + { + "epoch": 1.54, + "grad_norm": 11.668462437624736, + "learning_rate": 1.0041316657376426e-05, + "loss": 0.5047, + "step": 9867 + }, + { + "epoch": 1.54, + "grad_norm": 19.723909886349947, + "learning_rate": 1.0039630272214299e-05, + "loss": 0.5517, + "step": 9868 + }, + { + "epoch": 1.54, + "grad_norm": 15.050017125355206, + "learning_rate": 1.003794388592511e-05, + "loss": 0.5672, + "step": 9869 + }, + { + "epoch": 1.54, + "grad_norm": 13.931392360492978, + "learning_rate": 1.0036257498556821e-05, + "loss": 0.6131, + "step": 9870 + }, + { + "epoch": 1.54, + "grad_norm": 17.521188137504225, + "learning_rate": 1.0034571110157388e-05, + "loss": 0.5368, + "step": 9871 + }, + { + "epoch": 1.54, + "grad_norm": 16.832271467979282, + "learning_rate": 1.0032884720774773e-05, + "loss": 0.6052, + "step": 9872 + }, + { + "epoch": 1.54, + "grad_norm": 21.914611576973698, + "learning_rate": 1.0031198330456936e-05, + "loss": 0.5725, + "step": 9873 + }, + { + "epoch": 1.54, + "grad_norm": 19.94124677633545, + "learning_rate": 1.002951193925184e-05, + "loss": 0.6069, + "step": 9874 + }, + { + "epoch": 1.54, + "grad_norm": 15.148054018719586, + "learning_rate": 1.0027825547207435e-05, + "loss": 0.5259, + "step": 9875 + }, + { + "epoch": 1.54, + "grad_norm": 27.810999591946036, + "learning_rate": 1.0026139154371694e-05, + "loss": 0.5989, + "step": 9876 + }, + { + "epoch": 1.54, + "grad_norm": 14.218846892980409, + "learning_rate": 1.0024452760792566e-05, + "loss": 0.6176, + "step": 9877 + }, + { + "epoch": 1.54, + "grad_norm": 22.009688907103346, + "learning_rate": 1.0022766366518018e-05, + "loss": 0.6078, + "step": 9878 + }, + { + "epoch": 1.54, + "grad_norm": 26.936615283271358, + "learning_rate": 1.0021079971596009e-05, + "loss": 0.6418, + "step": 9879 + }, + { + "epoch": 1.54, + "grad_norm": 14.099479744977542, + "learning_rate": 1.0019393576074497e-05, + "loss": 0.4746, + "step": 9880 + }, + { + "epoch": 1.54, + "grad_norm": 14.946486662262915, + "learning_rate": 1.0017707180001443e-05, + "loss": 0.5979, + "step": 9881 + }, + { + "epoch": 1.54, + "grad_norm": 20.441687089393028, + "learning_rate": 1.0016020783424805e-05, + "loss": 0.6242, + "step": 9882 + }, + { + "epoch": 1.54, + "grad_norm": 19.200995576274416, + "learning_rate": 1.001433438639255e-05, + "loss": 0.5817, + "step": 9883 + }, + { + "epoch": 1.54, + "grad_norm": 25.13079098251688, + "learning_rate": 1.0012647988952628e-05, + "loss": 0.7288, + "step": 9884 + }, + { + "epoch": 1.54, + "grad_norm": 12.194260132108658, + "learning_rate": 1.0010961591153008e-05, + "loss": 0.6085, + "step": 9885 + }, + { + "epoch": 1.54, + "grad_norm": 15.953252793714666, + "learning_rate": 1.0009275193041645e-05, + "loss": 0.6196, + "step": 9886 + }, + { + "epoch": 1.54, + "grad_norm": 29.297800509520645, + "learning_rate": 1.0007588794666503e-05, + "loss": 0.6428, + "step": 9887 + }, + { + "epoch": 1.54, + "grad_norm": 15.571010031342773, + "learning_rate": 1.000590239607554e-05, + "loss": 0.5867, + "step": 9888 + }, + { + "epoch": 1.54, + "grad_norm": 15.933519257083526, + "learning_rate": 1.0004215997316715e-05, + "loss": 0.5932, + "step": 9889 + }, + { + "epoch": 1.54, + "grad_norm": 16.45958911313961, + "learning_rate": 1.0002529598437988e-05, + "loss": 0.5464, + "step": 9890 + }, + { + "epoch": 1.54, + "grad_norm": 16.00742481785263, + "learning_rate": 1.0000843199487325e-05, + "loss": 0.6235, + "step": 9891 + }, + { + "epoch": 1.55, + "grad_norm": 16.62902593804824, + "learning_rate": 9.99915680051268e-06, + "loss": 0.6296, + "step": 9892 + }, + { + "epoch": 1.55, + "grad_norm": 24.121323145343172, + "learning_rate": 9.997470401562015e-06, + "loss": 0.5748, + "step": 9893 + }, + { + "epoch": 1.55, + "grad_norm": 19.20997601089092, + "learning_rate": 9.995784002683288e-06, + "loss": 0.6191, + "step": 9894 + }, + { + "epoch": 1.55, + "grad_norm": 13.328343513168837, + "learning_rate": 9.994097603924462e-06, + "loss": 0.5634, + "step": 9895 + }, + { + "epoch": 1.55, + "grad_norm": 16.472865471744875, + "learning_rate": 9.992411205333498e-06, + "loss": 0.6051, + "step": 9896 + }, + { + "epoch": 1.55, + "grad_norm": 21.655178533817047, + "learning_rate": 9.990724806958358e-06, + "loss": 0.5335, + "step": 9897 + }, + { + "epoch": 1.55, + "grad_norm": 21.636031693794646, + "learning_rate": 9.989038408846996e-06, + "loss": 0.6225, + "step": 9898 + }, + { + "epoch": 1.55, + "grad_norm": 21.846735055191772, + "learning_rate": 9.987352011047374e-06, + "loss": 0.6031, + "step": 9899 + }, + { + "epoch": 1.55, + "grad_norm": 22.382934042715444, + "learning_rate": 9.985665613607454e-06, + "loss": 0.5799, + "step": 9900 + }, + { + "epoch": 1.55, + "grad_norm": 16.21675362455437, + "learning_rate": 9.983979216575195e-06, + "loss": 0.534, + "step": 9901 + }, + { + "epoch": 1.55, + "grad_norm": 20.166205386545933, + "learning_rate": 9.982292819998562e-06, + "loss": 0.6495, + "step": 9902 + }, + { + "epoch": 1.55, + "grad_norm": 17.94326492206648, + "learning_rate": 9.980606423925506e-06, + "loss": 0.7226, + "step": 9903 + }, + { + "epoch": 1.55, + "grad_norm": 19.587813356402894, + "learning_rate": 9.978920028403995e-06, + "loss": 0.6983, + "step": 9904 + }, + { + "epoch": 1.55, + "grad_norm": 23.414201246632665, + "learning_rate": 9.977233633481984e-06, + "loss": 0.5428, + "step": 9905 + }, + { + "epoch": 1.55, + "grad_norm": 18.64245689516531, + "learning_rate": 9.975547239207435e-06, + "loss": 0.5448, + "step": 9906 + }, + { + "epoch": 1.55, + "grad_norm": 24.025556916818953, + "learning_rate": 9.973860845628311e-06, + "loss": 0.6219, + "step": 9907 + }, + { + "epoch": 1.55, + "grad_norm": 17.963216960531298, + "learning_rate": 9.972174452792568e-06, + "loss": 0.537, + "step": 9908 + }, + { + "epoch": 1.55, + "grad_norm": 18.694681263352486, + "learning_rate": 9.970488060748164e-06, + "loss": 0.585, + "step": 9909 + }, + { + "epoch": 1.55, + "grad_norm": 22.050040062475464, + "learning_rate": 9.968801669543066e-06, + "loss": 0.6326, + "step": 9910 + }, + { + "epoch": 1.55, + "grad_norm": 21.306401981461846, + "learning_rate": 9.967115279225228e-06, + "loss": 0.6385, + "step": 9911 + }, + { + "epoch": 1.55, + "grad_norm": 20.722230479390333, + "learning_rate": 9.965428889842617e-06, + "loss": 0.5485, + "step": 9912 + }, + { + "epoch": 1.55, + "grad_norm": 14.268524689447355, + "learning_rate": 9.963742501443184e-06, + "loss": 0.5791, + "step": 9913 + }, + { + "epoch": 1.55, + "grad_norm": 15.611129566668549, + "learning_rate": 9.962056114074893e-06, + "loss": 0.5641, + "step": 9914 + }, + { + "epoch": 1.55, + "grad_norm": 18.79455886041507, + "learning_rate": 9.960369727785703e-06, + "loss": 0.687, + "step": 9915 + }, + { + "epoch": 1.55, + "grad_norm": 15.246771988185504, + "learning_rate": 9.958683342623579e-06, + "loss": 0.5719, + "step": 9916 + }, + { + "epoch": 1.55, + "grad_norm": 18.210505642019125, + "learning_rate": 9.956996958636474e-06, + "loss": 0.5923, + "step": 9917 + }, + { + "epoch": 1.55, + "grad_norm": 24.489068636129506, + "learning_rate": 9.955310575872351e-06, + "loss": 0.6331, + "step": 9918 + }, + { + "epoch": 1.55, + "grad_norm": 21.104498795216216, + "learning_rate": 9.953624194379165e-06, + "loss": 0.5477, + "step": 9919 + }, + { + "epoch": 1.55, + "grad_norm": 11.122084049287043, + "learning_rate": 9.951937814204884e-06, + "loss": 0.5328, + "step": 9920 + }, + { + "epoch": 1.55, + "grad_norm": 20.779803803681574, + "learning_rate": 9.950251435397466e-06, + "loss": 0.6101, + "step": 9921 + }, + { + "epoch": 1.55, + "grad_norm": 19.895115824106036, + "learning_rate": 9.948565058004863e-06, + "loss": 0.6086, + "step": 9922 + }, + { + "epoch": 1.55, + "grad_norm": 20.166533984156313, + "learning_rate": 9.94687868207504e-06, + "loss": 0.5932, + "step": 9923 + }, + { + "epoch": 1.55, + "grad_norm": 15.573150429252294, + "learning_rate": 9.945192307655959e-06, + "loss": 0.5164, + "step": 9924 + }, + { + "epoch": 1.55, + "grad_norm": 16.22028948068708, + "learning_rate": 9.943505934795572e-06, + "loss": 0.6668, + "step": 9925 + }, + { + "epoch": 1.55, + "grad_norm": 24.384813425233407, + "learning_rate": 9.941819563541849e-06, + "loss": 0.644, + "step": 9926 + }, + { + "epoch": 1.55, + "grad_norm": 18.500511899354784, + "learning_rate": 9.940133193942742e-06, + "loss": 0.576, + "step": 9927 + }, + { + "epoch": 1.55, + "grad_norm": 26.124508283290584, + "learning_rate": 9.938446826046209e-06, + "loss": 0.614, + "step": 9928 + }, + { + "epoch": 1.55, + "grad_norm": 22.123440994681633, + "learning_rate": 9.936760459900215e-06, + "loss": 0.5415, + "step": 9929 + }, + { + "epoch": 1.55, + "grad_norm": 21.66765646729247, + "learning_rate": 9.935074095552714e-06, + "loss": 0.6245, + "step": 9930 + }, + { + "epoch": 1.55, + "grad_norm": 16.512151878962634, + "learning_rate": 9.933387733051672e-06, + "loss": 0.6438, + "step": 9931 + }, + { + "epoch": 1.55, + "grad_norm": 16.247368819620945, + "learning_rate": 9.93170137244504e-06, + "loss": 0.6089, + "step": 9932 + }, + { + "epoch": 1.55, + "grad_norm": 15.046404442505159, + "learning_rate": 9.930015013780783e-06, + "loss": 0.5949, + "step": 9933 + }, + { + "epoch": 1.55, + "grad_norm": 15.964185564414901, + "learning_rate": 9.928328657106855e-06, + "loss": 0.4891, + "step": 9934 + }, + { + "epoch": 1.55, + "grad_norm": 19.93954144238853, + "learning_rate": 9.92664230247122e-06, + "loss": 0.644, + "step": 9935 + }, + { + "epoch": 1.55, + "grad_norm": 19.822924170105942, + "learning_rate": 9.924955949921839e-06, + "loss": 0.5922, + "step": 9936 + }, + { + "epoch": 1.55, + "grad_norm": 26.63956348843764, + "learning_rate": 9.923269599506664e-06, + "loss": 0.6541, + "step": 9937 + }, + { + "epoch": 1.55, + "grad_norm": 24.025018378244766, + "learning_rate": 9.921583251273654e-06, + "loss": 0.5801, + "step": 9938 + }, + { + "epoch": 1.55, + "grad_norm": 21.50971012217428, + "learning_rate": 9.919896905270772e-06, + "loss": 0.6773, + "step": 9939 + }, + { + "epoch": 1.55, + "grad_norm": 12.90671493267816, + "learning_rate": 9.918210561545974e-06, + "loss": 0.559, + "step": 9940 + }, + { + "epoch": 1.55, + "grad_norm": 20.98238292336455, + "learning_rate": 9.916524220147224e-06, + "loss": 0.6073, + "step": 9941 + }, + { + "epoch": 1.55, + "grad_norm": 14.50327156007637, + "learning_rate": 9.914837881122474e-06, + "loss": 0.4855, + "step": 9942 + }, + { + "epoch": 1.55, + "grad_norm": 16.27992628559294, + "learning_rate": 9.913151544519685e-06, + "loss": 0.5874, + "step": 9943 + }, + { + "epoch": 1.55, + "grad_norm": 22.04891448232537, + "learning_rate": 9.911465210386813e-06, + "loss": 0.6289, + "step": 9944 + }, + { + "epoch": 1.55, + "grad_norm": 16.440772380831724, + "learning_rate": 9.909778878771822e-06, + "loss": 0.5556, + "step": 9945 + }, + { + "epoch": 1.55, + "grad_norm": 24.39156764333135, + "learning_rate": 9.908092549722667e-06, + "loss": 0.6782, + "step": 9946 + }, + { + "epoch": 1.55, + "grad_norm": 22.937471535272632, + "learning_rate": 9.906406223287304e-06, + "loss": 0.6178, + "step": 9947 + }, + { + "epoch": 1.55, + "grad_norm": 16.38511420886467, + "learning_rate": 9.904719899513693e-06, + "loss": 0.5479, + "step": 9948 + }, + { + "epoch": 1.55, + "grad_norm": 15.071545944442061, + "learning_rate": 9.903033578449793e-06, + "loss": 0.5566, + "step": 9949 + }, + { + "epoch": 1.55, + "grad_norm": 19.42644736235071, + "learning_rate": 9.90134726014356e-06, + "loss": 0.5795, + "step": 9950 + }, + { + "epoch": 1.55, + "grad_norm": 26.848196280376573, + "learning_rate": 9.899660944642957e-06, + "loss": 0.5194, + "step": 9951 + }, + { + "epoch": 1.55, + "grad_norm": 18.89129961708667, + "learning_rate": 9.897974631995937e-06, + "loss": 0.6012, + "step": 9952 + }, + { + "epoch": 1.55, + "grad_norm": 15.843725417473586, + "learning_rate": 9.896288322250455e-06, + "loss": 0.5459, + "step": 9953 + }, + { + "epoch": 1.55, + "grad_norm": 27.010483674494523, + "learning_rate": 9.894602015454476e-06, + "loss": 0.603, + "step": 9954 + }, + { + "epoch": 1.55, + "grad_norm": 13.783049845427678, + "learning_rate": 9.892915711655953e-06, + "loss": 0.5753, + "step": 9955 + }, + { + "epoch": 1.56, + "grad_norm": 27.184418986024316, + "learning_rate": 9.891229410902846e-06, + "loss": 0.6342, + "step": 9956 + }, + { + "epoch": 1.56, + "grad_norm": 33.941184736712835, + "learning_rate": 9.88954311324311e-06, + "loss": 0.5585, + "step": 9957 + }, + { + "epoch": 1.56, + "grad_norm": 18.565286535572284, + "learning_rate": 9.887856818724702e-06, + "loss": 0.6155, + "step": 9958 + }, + { + "epoch": 1.56, + "grad_norm": 15.423762633025335, + "learning_rate": 9.88617052739558e-06, + "loss": 0.5807, + "step": 9959 + }, + { + "epoch": 1.56, + "grad_norm": 24.059084364987115, + "learning_rate": 9.8844842393037e-06, + "loss": 0.6225, + "step": 9960 + }, + { + "epoch": 1.56, + "grad_norm": 15.25280894387938, + "learning_rate": 9.882797954497028e-06, + "loss": 0.5436, + "step": 9961 + }, + { + "epoch": 1.56, + "grad_norm": 18.743540728517914, + "learning_rate": 9.881111673023509e-06, + "loss": 0.5998, + "step": 9962 + }, + { + "epoch": 1.56, + "grad_norm": 16.814166057385815, + "learning_rate": 9.879425394931103e-06, + "loss": 0.577, + "step": 9963 + }, + { + "epoch": 1.56, + "grad_norm": 16.453038243484357, + "learning_rate": 9.877739120267769e-06, + "loss": 0.5843, + "step": 9964 + }, + { + "epoch": 1.56, + "grad_norm": 13.67659237539244, + "learning_rate": 9.876052849081467e-06, + "loss": 0.6041, + "step": 9965 + }, + { + "epoch": 1.56, + "grad_norm": 12.768110269139063, + "learning_rate": 9.874366581420144e-06, + "loss": 0.624, + "step": 9966 + }, + { + "epoch": 1.56, + "grad_norm": 19.144665052704863, + "learning_rate": 9.872680317331764e-06, + "loss": 0.5898, + "step": 9967 + }, + { + "epoch": 1.56, + "grad_norm": 16.512891213380023, + "learning_rate": 9.87099405686428e-06, + "loss": 0.5721, + "step": 9968 + }, + { + "epoch": 1.56, + "grad_norm": 16.02635306769787, + "learning_rate": 9.869307800065651e-06, + "loss": 0.5754, + "step": 9969 + }, + { + "epoch": 1.56, + "grad_norm": 19.452335579109473, + "learning_rate": 9.867621546983834e-06, + "loss": 0.5932, + "step": 9970 + }, + { + "epoch": 1.56, + "grad_norm": 15.942141635817212, + "learning_rate": 9.86593529766678e-06, + "loss": 0.623, + "step": 9971 + }, + { + "epoch": 1.56, + "grad_norm": 16.76215330603838, + "learning_rate": 9.864249052162447e-06, + "loss": 0.504, + "step": 9972 + }, + { + "epoch": 1.56, + "grad_norm": 16.11229616887807, + "learning_rate": 9.862562810518792e-06, + "loss": 0.5537, + "step": 9973 + }, + { + "epoch": 1.56, + "grad_norm": 17.006137157785712, + "learning_rate": 9.860876572783772e-06, + "loss": 0.6332, + "step": 9974 + }, + { + "epoch": 1.56, + "grad_norm": 14.069077922642027, + "learning_rate": 9.859190339005342e-06, + "loss": 0.5788, + "step": 9975 + }, + { + "epoch": 1.56, + "grad_norm": 21.022021846320726, + "learning_rate": 9.857504109231452e-06, + "loss": 0.6415, + "step": 9976 + }, + { + "epoch": 1.56, + "grad_norm": 16.438184969558296, + "learning_rate": 9.855817883510063e-06, + "loss": 0.5972, + "step": 9977 + }, + { + "epoch": 1.56, + "grad_norm": 19.07915214285458, + "learning_rate": 9.854131661889129e-06, + "loss": 0.627, + "step": 9978 + }, + { + "epoch": 1.56, + "grad_norm": 17.43861554578508, + "learning_rate": 9.852445444416606e-06, + "loss": 0.6388, + "step": 9979 + }, + { + "epoch": 1.56, + "grad_norm": 17.964867473436495, + "learning_rate": 9.850759231140451e-06, + "loss": 0.5946, + "step": 9980 + }, + { + "epoch": 1.56, + "grad_norm": 20.13246155737529, + "learning_rate": 9.849073022108613e-06, + "loss": 0.5865, + "step": 9981 + }, + { + "epoch": 1.56, + "grad_norm": 19.534122396204978, + "learning_rate": 9.84738681736905e-06, + "loss": 0.6329, + "step": 9982 + }, + { + "epoch": 1.56, + "grad_norm": 20.400952487656827, + "learning_rate": 9.845700616969718e-06, + "loss": 0.7185, + "step": 9983 + }, + { + "epoch": 1.56, + "grad_norm": 14.862095055385375, + "learning_rate": 9.844014420958567e-06, + "loss": 0.611, + "step": 9984 + }, + { + "epoch": 1.56, + "grad_norm": 24.366300341082948, + "learning_rate": 9.84232822938356e-06, + "loss": 0.5397, + "step": 9985 + }, + { + "epoch": 1.56, + "grad_norm": 24.757542661964088, + "learning_rate": 9.840642042292643e-06, + "loss": 0.6127, + "step": 9986 + }, + { + "epoch": 1.56, + "grad_norm": 19.247783334322346, + "learning_rate": 9.838955859733773e-06, + "loss": 0.6427, + "step": 9987 + }, + { + "epoch": 1.56, + "grad_norm": 20.24752014923365, + "learning_rate": 9.837269681754901e-06, + "loss": 0.6351, + "step": 9988 + }, + { + "epoch": 1.56, + "grad_norm": 19.357579347341186, + "learning_rate": 9.83558350840399e-06, + "loss": 0.6147, + "step": 9989 + }, + { + "epoch": 1.56, + "grad_norm": 27.714189794566042, + "learning_rate": 9.833897339728987e-06, + "loss": 0.6498, + "step": 9990 + }, + { + "epoch": 1.56, + "grad_norm": 21.38621738142277, + "learning_rate": 9.832211175777841e-06, + "loss": 0.626, + "step": 9991 + }, + { + "epoch": 1.56, + "grad_norm": 18.86324138088125, + "learning_rate": 9.830525016598515e-06, + "loss": 0.6015, + "step": 9992 + }, + { + "epoch": 1.56, + "grad_norm": 18.786048721210143, + "learning_rate": 9.82883886223896e-06, + "loss": 0.6237, + "step": 9993 + }, + { + "epoch": 1.56, + "grad_norm": 16.315495899431646, + "learning_rate": 9.827152712747122e-06, + "loss": 0.5222, + "step": 9994 + }, + { + "epoch": 1.56, + "grad_norm": 13.484828884977205, + "learning_rate": 9.825466568170966e-06, + "loss": 0.5161, + "step": 9995 + }, + { + "epoch": 1.56, + "grad_norm": 23.56019167002632, + "learning_rate": 9.823780428558437e-06, + "loss": 0.5658, + "step": 9996 + }, + { + "epoch": 1.56, + "grad_norm": 25.197611497185168, + "learning_rate": 9.822094293957486e-06, + "loss": 0.6232, + "step": 9997 + }, + { + "epoch": 1.56, + "grad_norm": 19.839589939946194, + "learning_rate": 9.820408164416071e-06, + "loss": 0.5993, + "step": 9998 + }, + { + "epoch": 1.56, + "grad_norm": 18.453414585254144, + "learning_rate": 9.818722039982145e-06, + "loss": 0.5559, + "step": 9999 + }, + { + "epoch": 1.56, + "grad_norm": 17.429415153109442, + "learning_rate": 9.81703592070366e-06, + "loss": 0.5934, + "step": 10000 + }, + { + "epoch": 1.56, + "grad_norm": 12.833704595167802, + "learning_rate": 9.81534980662856e-06, + "loss": 0.627, + "step": 10001 + }, + { + "epoch": 1.56, + "grad_norm": 22.602764708174913, + "learning_rate": 9.813663697804808e-06, + "loss": 0.6003, + "step": 10002 + }, + { + "epoch": 1.56, + "grad_norm": 25.716150741705444, + "learning_rate": 9.811977594280348e-06, + "loss": 0.5747, + "step": 10003 + }, + { + "epoch": 1.56, + "grad_norm": 23.605941266512165, + "learning_rate": 9.810291496103137e-06, + "loss": 0.6323, + "step": 10004 + }, + { + "epoch": 1.56, + "grad_norm": 32.268516663109075, + "learning_rate": 9.808605403321128e-06, + "loss": 0.7072, + "step": 10005 + }, + { + "epoch": 1.56, + "grad_norm": 18.580157368921157, + "learning_rate": 9.806919315982266e-06, + "loss": 0.709, + "step": 10006 + }, + { + "epoch": 1.56, + "grad_norm": 20.00704672918317, + "learning_rate": 9.805233234134504e-06, + "loss": 0.5557, + "step": 10007 + }, + { + "epoch": 1.56, + "grad_norm": 17.86993196473675, + "learning_rate": 9.803547157825796e-06, + "loss": 0.6805, + "step": 10008 + }, + { + "epoch": 1.56, + "grad_norm": 23.02845911167214, + "learning_rate": 9.801861087104093e-06, + "loss": 0.5853, + "step": 10009 + }, + { + "epoch": 1.56, + "grad_norm": 20.83648681667443, + "learning_rate": 9.800175022017346e-06, + "loss": 0.5768, + "step": 10010 + }, + { + "epoch": 1.56, + "grad_norm": 19.236295876082348, + "learning_rate": 9.798488962613503e-06, + "loss": 0.5331, + "step": 10011 + }, + { + "epoch": 1.56, + "grad_norm": 10.249045818244447, + "learning_rate": 9.796802908940516e-06, + "loss": 0.5372, + "step": 10012 + }, + { + "epoch": 1.56, + "grad_norm": 20.572018369707923, + "learning_rate": 9.795116861046334e-06, + "loss": 0.6242, + "step": 10013 + }, + { + "epoch": 1.56, + "grad_norm": 19.72488461600909, + "learning_rate": 9.793430818978913e-06, + "loss": 0.6959, + "step": 10014 + }, + { + "epoch": 1.56, + "grad_norm": 24.468377731677613, + "learning_rate": 9.791744782786196e-06, + "loss": 0.5991, + "step": 10015 + }, + { + "epoch": 1.56, + "grad_norm": 14.222245572684825, + "learning_rate": 9.790058752516134e-06, + "loss": 0.6418, + "step": 10016 + }, + { + "epoch": 1.56, + "grad_norm": 16.746107808403732, + "learning_rate": 9.788372728216679e-06, + "loss": 0.5471, + "step": 10017 + }, + { + "epoch": 1.56, + "grad_norm": 16.25826421638985, + "learning_rate": 9.786686709935781e-06, + "loss": 0.5759, + "step": 10018 + }, + { + "epoch": 1.56, + "grad_norm": 17.3216778129315, + "learning_rate": 9.785000697721391e-06, + "loss": 0.5616, + "step": 10019 + }, + { + "epoch": 1.57, + "grad_norm": 16.998181417608414, + "learning_rate": 9.783314691621451e-06, + "loss": 0.6413, + "step": 10020 + }, + { + "epoch": 1.57, + "grad_norm": 14.35888629741709, + "learning_rate": 9.781628691683916e-06, + "loss": 0.6424, + "step": 10021 + }, + { + "epoch": 1.57, + "grad_norm": 17.459458089193458, + "learning_rate": 9.77994269795673e-06, + "loss": 0.5769, + "step": 10022 + }, + { + "epoch": 1.57, + "grad_norm": 15.25476294638253, + "learning_rate": 9.778256710487849e-06, + "loss": 0.5612, + "step": 10023 + }, + { + "epoch": 1.57, + "grad_norm": 23.980834826409183, + "learning_rate": 9.776570729325217e-06, + "loss": 0.6082, + "step": 10024 + }, + { + "epoch": 1.57, + "grad_norm": 14.90638193236052, + "learning_rate": 9.774884754516783e-06, + "loss": 0.6031, + "step": 10025 + }, + { + "epoch": 1.57, + "grad_norm": 15.400623857259639, + "learning_rate": 9.773198786110492e-06, + "loss": 0.6128, + "step": 10026 + }, + { + "epoch": 1.57, + "grad_norm": 19.086633911575074, + "learning_rate": 9.771512824154297e-06, + "loss": 0.5134, + "step": 10027 + }, + { + "epoch": 1.57, + "grad_norm": 18.330108139346816, + "learning_rate": 9.76982686869614e-06, + "loss": 0.572, + "step": 10028 + }, + { + "epoch": 1.57, + "grad_norm": 18.356847875643304, + "learning_rate": 9.768140919783979e-06, + "loss": 0.5971, + "step": 10029 + }, + { + "epoch": 1.57, + "grad_norm": 19.6674742646926, + "learning_rate": 9.766454977465751e-06, + "loss": 0.6008, + "step": 10030 + }, + { + "epoch": 1.57, + "grad_norm": 25.692674453288213, + "learning_rate": 9.764769041789408e-06, + "loss": 0.5852, + "step": 10031 + }, + { + "epoch": 1.57, + "grad_norm": 23.52018672952823, + "learning_rate": 9.763083112802891e-06, + "loss": 0.558, + "step": 10032 + }, + { + "epoch": 1.57, + "grad_norm": 14.73908533955712, + "learning_rate": 9.761397190554156e-06, + "loss": 0.6074, + "step": 10033 + }, + { + "epoch": 1.57, + "grad_norm": 16.529178335437962, + "learning_rate": 9.759711275091149e-06, + "loss": 0.5673, + "step": 10034 + }, + { + "epoch": 1.57, + "grad_norm": 14.023308214107018, + "learning_rate": 9.758025366461805e-06, + "loss": 0.5685, + "step": 10035 + }, + { + "epoch": 1.57, + "grad_norm": 15.723803368653815, + "learning_rate": 9.756339464714081e-06, + "loss": 0.5399, + "step": 10036 + }, + { + "epoch": 1.57, + "grad_norm": 14.34524316952747, + "learning_rate": 9.754653569895922e-06, + "loss": 0.5646, + "step": 10037 + }, + { + "epoch": 1.57, + "grad_norm": 27.518265496216117, + "learning_rate": 9.752967682055269e-06, + "loss": 0.582, + "step": 10038 + }, + { + "epoch": 1.57, + "grad_norm": 16.7688568391926, + "learning_rate": 9.751281801240075e-06, + "loss": 0.756, + "step": 10039 + }, + { + "epoch": 1.57, + "grad_norm": 16.830683029405115, + "learning_rate": 9.749595927498277e-06, + "loss": 0.664, + "step": 10040 + }, + { + "epoch": 1.57, + "grad_norm": 15.67313052897807, + "learning_rate": 9.747910060877824e-06, + "loss": 0.5482, + "step": 10041 + }, + { + "epoch": 1.57, + "grad_norm": 16.71254616101865, + "learning_rate": 9.746224201426663e-06, + "loss": 0.566, + "step": 10042 + }, + { + "epoch": 1.57, + "grad_norm": 14.922229060621065, + "learning_rate": 9.744538349192736e-06, + "loss": 0.5583, + "step": 10043 + }, + { + "epoch": 1.57, + "grad_norm": 16.172194401263614, + "learning_rate": 9.742852504223995e-06, + "loss": 0.5571, + "step": 10044 + }, + { + "epoch": 1.57, + "grad_norm": 21.68311685880578, + "learning_rate": 9.741166666568371e-06, + "loss": 0.5765, + "step": 10045 + }, + { + "epoch": 1.57, + "grad_norm": 18.700981563864698, + "learning_rate": 9.739480836273819e-06, + "loss": 0.5952, + "step": 10046 + }, + { + "epoch": 1.57, + "grad_norm": 26.562755268216712, + "learning_rate": 9.737795013388277e-06, + "loss": 0.6485, + "step": 10047 + }, + { + "epoch": 1.57, + "grad_norm": 26.416795483710313, + "learning_rate": 9.736109197959693e-06, + "loss": 0.6672, + "step": 10048 + }, + { + "epoch": 1.57, + "grad_norm": 20.315048663829586, + "learning_rate": 9.734423390036011e-06, + "loss": 0.5546, + "step": 10049 + }, + { + "epoch": 1.57, + "grad_norm": 19.861351698157037, + "learning_rate": 9.732737589665171e-06, + "loss": 0.6097, + "step": 10050 + }, + { + "epoch": 1.57, + "grad_norm": 18.340926464405918, + "learning_rate": 9.731051796895116e-06, + "loss": 0.55, + "step": 10051 + }, + { + "epoch": 1.57, + "grad_norm": 24.10408875257175, + "learning_rate": 9.729366011773793e-06, + "loss": 0.6593, + "step": 10052 + }, + { + "epoch": 1.57, + "grad_norm": 15.62370364724231, + "learning_rate": 9.727680234349138e-06, + "loss": 0.5941, + "step": 10053 + }, + { + "epoch": 1.57, + "grad_norm": 15.556889430353527, + "learning_rate": 9.725994464669103e-06, + "loss": 0.592, + "step": 10054 + }, + { + "epoch": 1.57, + "grad_norm": 24.830167263869914, + "learning_rate": 9.724308702781625e-06, + "loss": 0.617, + "step": 10055 + }, + { + "epoch": 1.57, + "grad_norm": 22.948813933028337, + "learning_rate": 9.722622948734644e-06, + "loss": 0.5507, + "step": 10056 + }, + { + "epoch": 1.57, + "grad_norm": 17.237573565261453, + "learning_rate": 9.720937202576101e-06, + "loss": 0.579, + "step": 10057 + }, + { + "epoch": 1.57, + "grad_norm": 14.538579105661437, + "learning_rate": 9.719251464353944e-06, + "loss": 0.5689, + "step": 10058 + }, + { + "epoch": 1.57, + "grad_norm": 19.259593188770477, + "learning_rate": 9.717565734116114e-06, + "loss": 0.5896, + "step": 10059 + }, + { + "epoch": 1.57, + "grad_norm": 13.278848890750854, + "learning_rate": 9.715880011910545e-06, + "loss": 0.5417, + "step": 10060 + }, + { + "epoch": 1.57, + "grad_norm": 13.032823222949075, + "learning_rate": 9.714194297785184e-06, + "loss": 0.5356, + "step": 10061 + }, + { + "epoch": 1.57, + "grad_norm": 18.800956271110064, + "learning_rate": 9.71250859178797e-06, + "loss": 0.6126, + "step": 10062 + }, + { + "epoch": 1.57, + "grad_norm": 27.2278365407668, + "learning_rate": 9.710822893966845e-06, + "loss": 0.6029, + "step": 10063 + }, + { + "epoch": 1.57, + "grad_norm": 15.88481036378908, + "learning_rate": 9.709137204369746e-06, + "loss": 0.6065, + "step": 10064 + }, + { + "epoch": 1.57, + "grad_norm": 19.89216972459948, + "learning_rate": 9.707451523044614e-06, + "loss": 0.6157, + "step": 10065 + }, + { + "epoch": 1.57, + "grad_norm": 19.80632905571142, + "learning_rate": 9.705765850039388e-06, + "loss": 0.5651, + "step": 10066 + }, + { + "epoch": 1.57, + "grad_norm": 17.021337088967, + "learning_rate": 9.704080185402012e-06, + "loss": 0.5574, + "step": 10067 + }, + { + "epoch": 1.57, + "grad_norm": 20.36421736968079, + "learning_rate": 9.702394529180424e-06, + "loss": 0.5428, + "step": 10068 + }, + { + "epoch": 1.57, + "grad_norm": 14.457185828416597, + "learning_rate": 9.70070888142256e-06, + "loss": 0.5958, + "step": 10069 + }, + { + "epoch": 1.57, + "grad_norm": 18.866692643557005, + "learning_rate": 9.699023242176358e-06, + "loss": 0.5505, + "step": 10070 + }, + { + "epoch": 1.57, + "grad_norm": 12.840051949893022, + "learning_rate": 9.69733761148976e-06, + "loss": 0.5661, + "step": 10071 + }, + { + "epoch": 1.57, + "grad_norm": 23.585973973986793, + "learning_rate": 9.695651989410702e-06, + "loss": 0.6339, + "step": 10072 + }, + { + "epoch": 1.57, + "grad_norm": 25.877395682531404, + "learning_rate": 9.693966375987128e-06, + "loss": 0.5814, + "step": 10073 + }, + { + "epoch": 1.57, + "grad_norm": 12.72589435134867, + "learning_rate": 9.692280771266969e-06, + "loss": 0.503, + "step": 10074 + }, + { + "epoch": 1.57, + "grad_norm": 16.221366208720603, + "learning_rate": 9.690595175298164e-06, + "loss": 0.4694, + "step": 10075 + }, + { + "epoch": 1.57, + "grad_norm": 18.782637790812743, + "learning_rate": 9.68890958812865e-06, + "loss": 0.577, + "step": 10076 + }, + { + "epoch": 1.57, + "grad_norm": 29.370909077281556, + "learning_rate": 9.687224009806366e-06, + "loss": 0.7095, + "step": 10077 + }, + { + "epoch": 1.57, + "grad_norm": 14.02968395707663, + "learning_rate": 9.685538440379253e-06, + "loss": 0.5078, + "step": 10078 + }, + { + "epoch": 1.57, + "grad_norm": 15.367498899872174, + "learning_rate": 9.683852879895236e-06, + "loss": 0.5463, + "step": 10079 + }, + { + "epoch": 1.57, + "grad_norm": 17.613962039747314, + "learning_rate": 9.682167328402261e-06, + "loss": 0.4927, + "step": 10080 + }, + { + "epoch": 1.57, + "grad_norm": 16.779577303303533, + "learning_rate": 9.680481785948263e-06, + "loss": 0.551, + "step": 10081 + }, + { + "epoch": 1.57, + "grad_norm": 16.584457012445984, + "learning_rate": 9.678796252581171e-06, + "loss": 0.6895, + "step": 10082 + }, + { + "epoch": 1.57, + "grad_norm": 17.44529614103979, + "learning_rate": 9.677110728348932e-06, + "loss": 0.6271, + "step": 10083 + }, + { + "epoch": 1.58, + "grad_norm": 26.204992829401718, + "learning_rate": 9.675425213299471e-06, + "loss": 0.6812, + "step": 10084 + }, + { + "epoch": 1.58, + "grad_norm": 25.054014717301907, + "learning_rate": 9.673739707480727e-06, + "loss": 0.6447, + "step": 10085 + }, + { + "epoch": 1.58, + "grad_norm": 14.26093790209592, + "learning_rate": 9.672054210940638e-06, + "loss": 0.564, + "step": 10086 + }, + { + "epoch": 1.58, + "grad_norm": 23.031045110184525, + "learning_rate": 9.670368723727131e-06, + "loss": 0.5727, + "step": 10087 + }, + { + "epoch": 1.58, + "grad_norm": 21.579104807331625, + "learning_rate": 9.66868324588815e-06, + "loss": 0.5525, + "step": 10088 + }, + { + "epoch": 1.58, + "grad_norm": 14.635028514488914, + "learning_rate": 9.66699777747162e-06, + "loss": 0.5841, + "step": 10089 + }, + { + "epoch": 1.58, + "grad_norm": 25.389476404473285, + "learning_rate": 9.665312318525478e-06, + "loss": 0.5501, + "step": 10090 + }, + { + "epoch": 1.58, + "grad_norm": 16.356013336116614, + "learning_rate": 9.663626869097657e-06, + "loss": 0.6834, + "step": 10091 + }, + { + "epoch": 1.58, + "grad_norm": 22.067622225700482, + "learning_rate": 9.661941429236094e-06, + "loss": 0.6668, + "step": 10092 + }, + { + "epoch": 1.58, + "grad_norm": 16.438632758348085, + "learning_rate": 9.66025599898872e-06, + "loss": 0.5709, + "step": 10093 + }, + { + "epoch": 1.58, + "grad_norm": 24.151288601924566, + "learning_rate": 9.658570578403465e-06, + "loss": 0.5981, + "step": 10094 + }, + { + "epoch": 1.58, + "grad_norm": 16.026308965783524, + "learning_rate": 9.656885167528261e-06, + "loss": 0.5739, + "step": 10095 + }, + { + "epoch": 1.58, + "grad_norm": 23.103073550600058, + "learning_rate": 9.655199766411044e-06, + "loss": 0.6453, + "step": 10096 + }, + { + "epoch": 1.58, + "grad_norm": 16.13750904108484, + "learning_rate": 9.653514375099742e-06, + "loss": 0.5524, + "step": 10097 + }, + { + "epoch": 1.58, + "grad_norm": 16.252009208884083, + "learning_rate": 9.651828993642293e-06, + "loss": 0.6176, + "step": 10098 + }, + { + "epoch": 1.58, + "grad_norm": 15.69996785979489, + "learning_rate": 9.650143622086621e-06, + "loss": 0.5696, + "step": 10099 + }, + { + "epoch": 1.58, + "grad_norm": 18.49341584530932, + "learning_rate": 9.64845826048066e-06, + "loss": 0.6005, + "step": 10100 + }, + { + "epoch": 1.58, + "grad_norm": 14.68649876027647, + "learning_rate": 9.646772908872338e-06, + "loss": 0.4704, + "step": 10101 + }, + { + "epoch": 1.58, + "grad_norm": 26.16287216606658, + "learning_rate": 9.64508756730959e-06, + "loss": 0.6085, + "step": 10102 + }, + { + "epoch": 1.58, + "grad_norm": 15.04830133901125, + "learning_rate": 9.643402235840346e-06, + "loss": 0.4905, + "step": 10103 + }, + { + "epoch": 1.58, + "grad_norm": 19.06883674265519, + "learning_rate": 9.641716914512532e-06, + "loss": 0.55, + "step": 10104 + }, + { + "epoch": 1.58, + "grad_norm": 17.27897008856571, + "learning_rate": 9.640031603374078e-06, + "loss": 0.5096, + "step": 10105 + }, + { + "epoch": 1.58, + "grad_norm": 15.226691759753798, + "learning_rate": 9.638346302472916e-06, + "loss": 0.4599, + "step": 10106 + }, + { + "epoch": 1.58, + "grad_norm": 15.126980546467149, + "learning_rate": 9.636661011856971e-06, + "loss": 0.543, + "step": 10107 + }, + { + "epoch": 1.58, + "grad_norm": 18.663240713244512, + "learning_rate": 9.63497573157418e-06, + "loss": 0.5798, + "step": 10108 + }, + { + "epoch": 1.58, + "grad_norm": 14.931403170806357, + "learning_rate": 9.633290461672463e-06, + "loss": 0.6677, + "step": 10109 + }, + { + "epoch": 1.58, + "grad_norm": 17.958064945119073, + "learning_rate": 9.631605202199748e-06, + "loss": 0.619, + "step": 10110 + }, + { + "epoch": 1.58, + "grad_norm": 19.973811260379875, + "learning_rate": 9.629919953203966e-06, + "loss": 0.5956, + "step": 10111 + }, + { + "epoch": 1.58, + "grad_norm": 23.270058285730773, + "learning_rate": 9.628234714733048e-06, + "loss": 0.5969, + "step": 10112 + }, + { + "epoch": 1.58, + "grad_norm": 16.911981900015906, + "learning_rate": 9.626549486834916e-06, + "loss": 0.6023, + "step": 10113 + }, + { + "epoch": 1.58, + "grad_norm": 19.619658509364537, + "learning_rate": 9.624864269557495e-06, + "loss": 0.6119, + "step": 10114 + }, + { + "epoch": 1.58, + "grad_norm": 26.55474463117469, + "learning_rate": 9.623179062948716e-06, + "loss": 0.5962, + "step": 10115 + }, + { + "epoch": 1.58, + "grad_norm": 16.891516908957403, + "learning_rate": 9.621493867056505e-06, + "loss": 0.556, + "step": 10116 + }, + { + "epoch": 1.58, + "grad_norm": 16.622133667595797, + "learning_rate": 9.619808681928788e-06, + "loss": 0.4833, + "step": 10117 + }, + { + "epoch": 1.58, + "grad_norm": 30.217225328225783, + "learning_rate": 9.618123507613487e-06, + "loss": 0.5976, + "step": 10118 + }, + { + "epoch": 1.58, + "grad_norm": 19.319260472563556, + "learning_rate": 9.616438344158533e-06, + "loss": 0.6221, + "step": 10119 + }, + { + "epoch": 1.58, + "grad_norm": 15.064963212064075, + "learning_rate": 9.614753191611846e-06, + "loss": 0.4999, + "step": 10120 + }, + { + "epoch": 1.58, + "grad_norm": 17.670909195385725, + "learning_rate": 9.613068050021353e-06, + "loss": 0.605, + "step": 10121 + }, + { + "epoch": 1.58, + "grad_norm": 28.09931893999679, + "learning_rate": 9.61138291943498e-06, + "loss": 0.6798, + "step": 10122 + }, + { + "epoch": 1.58, + "grad_norm": 21.721724371359706, + "learning_rate": 9.609697799900647e-06, + "loss": 0.6061, + "step": 10123 + }, + { + "epoch": 1.58, + "grad_norm": 14.680934739331123, + "learning_rate": 9.608012691466281e-06, + "loss": 0.5393, + "step": 10124 + }, + { + "epoch": 1.58, + "grad_norm": 22.99617350340761, + "learning_rate": 9.606327594179806e-06, + "loss": 0.5694, + "step": 10125 + }, + { + "epoch": 1.58, + "grad_norm": 15.884327146865957, + "learning_rate": 9.604642508089142e-06, + "loss": 0.6111, + "step": 10126 + }, + { + "epoch": 1.58, + "grad_norm": 26.003422813282558, + "learning_rate": 9.602957433242218e-06, + "loss": 0.6758, + "step": 10127 + }, + { + "epoch": 1.58, + "grad_norm": 22.745249444803786, + "learning_rate": 9.601272369686948e-06, + "loss": 0.6849, + "step": 10128 + }, + { + "epoch": 1.58, + "grad_norm": 13.502951511291759, + "learning_rate": 9.599587317471259e-06, + "loss": 0.5306, + "step": 10129 + }, + { + "epoch": 1.58, + "grad_norm": 18.108790051654342, + "learning_rate": 9.597902276643074e-06, + "loss": 0.4684, + "step": 10130 + }, + { + "epoch": 1.58, + "grad_norm": 17.67386112216378, + "learning_rate": 9.59621724725031e-06, + "loss": 0.5954, + "step": 10131 + }, + { + "epoch": 1.58, + "grad_norm": 27.41976670422977, + "learning_rate": 9.594532229340898e-06, + "loss": 0.5848, + "step": 10132 + }, + { + "epoch": 1.58, + "grad_norm": 13.702352275831599, + "learning_rate": 9.592847222962744e-06, + "loss": 0.543, + "step": 10133 + }, + { + "epoch": 1.58, + "grad_norm": 18.863435959077865, + "learning_rate": 9.591162228163781e-06, + "loss": 0.6405, + "step": 10134 + }, + { + "epoch": 1.58, + "grad_norm": 15.884657693043843, + "learning_rate": 9.589477244991924e-06, + "loss": 0.526, + "step": 10135 + }, + { + "epoch": 1.58, + "grad_norm": 21.56997672941892, + "learning_rate": 9.587792273495095e-06, + "loss": 0.6221, + "step": 10136 + }, + { + "epoch": 1.58, + "grad_norm": 19.7318703307114, + "learning_rate": 9.586107313721214e-06, + "loss": 0.5262, + "step": 10137 + }, + { + "epoch": 1.58, + "grad_norm": 24.19203957877898, + "learning_rate": 9.584422365718197e-06, + "loss": 0.61, + "step": 10138 + }, + { + "epoch": 1.58, + "grad_norm": 19.729414635116736, + "learning_rate": 9.582737429533964e-06, + "loss": 0.5727, + "step": 10139 + }, + { + "epoch": 1.58, + "grad_norm": 25.388957499401354, + "learning_rate": 9.581052505216434e-06, + "loss": 0.5099, + "step": 10140 + }, + { + "epoch": 1.58, + "grad_norm": 18.947607268739354, + "learning_rate": 9.579367592813526e-06, + "loss": 0.5567, + "step": 10141 + }, + { + "epoch": 1.58, + "grad_norm": 13.280214226668772, + "learning_rate": 9.577682692373161e-06, + "loss": 0.5128, + "step": 10142 + }, + { + "epoch": 1.58, + "grad_norm": 20.638860567901208, + "learning_rate": 9.575997803943251e-06, + "loss": 0.6247, + "step": 10143 + }, + { + "epoch": 1.58, + "grad_norm": 18.800169965538153, + "learning_rate": 9.574312927571717e-06, + "loss": 0.6513, + "step": 10144 + }, + { + "epoch": 1.58, + "grad_norm": 23.66225216641796, + "learning_rate": 9.572628063306469e-06, + "loss": 0.5887, + "step": 10145 + }, + { + "epoch": 1.58, + "grad_norm": 26.067352312053234, + "learning_rate": 9.570943211195434e-06, + "loss": 0.6061, + "step": 10146 + }, + { + "epoch": 1.58, + "grad_norm": 18.45671744872066, + "learning_rate": 9.569258371286523e-06, + "loss": 0.5249, + "step": 10147 + }, + { + "epoch": 1.59, + "grad_norm": 16.880283281002388, + "learning_rate": 9.567573543627649e-06, + "loss": 0.5792, + "step": 10148 + }, + { + "epoch": 1.59, + "grad_norm": 30.50596970329945, + "learning_rate": 9.565888728266732e-06, + "loss": 0.6021, + "step": 10149 + }, + { + "epoch": 1.59, + "grad_norm": 21.981365633669338, + "learning_rate": 9.564203925251685e-06, + "loss": 0.6029, + "step": 10150 + }, + { + "epoch": 1.59, + "grad_norm": 15.153166468078364, + "learning_rate": 9.562519134630423e-06, + "loss": 0.5361, + "step": 10151 + }, + { + "epoch": 1.59, + "grad_norm": 19.73050006055113, + "learning_rate": 9.560834356450864e-06, + "loss": 0.614, + "step": 10152 + }, + { + "epoch": 1.59, + "grad_norm": 18.54892938270337, + "learning_rate": 9.559149590760917e-06, + "loss": 0.6027, + "step": 10153 + }, + { + "epoch": 1.59, + "grad_norm": 21.098824253816073, + "learning_rate": 9.557464837608496e-06, + "loss": 0.6092, + "step": 10154 + }, + { + "epoch": 1.59, + "grad_norm": 18.676721788558627, + "learning_rate": 9.555780097041517e-06, + "loss": 0.6006, + "step": 10155 + }, + { + "epoch": 1.59, + "grad_norm": 20.382317378740684, + "learning_rate": 9.554095369107892e-06, + "loss": 0.592, + "step": 10156 + }, + { + "epoch": 1.59, + "grad_norm": 21.335653481955433, + "learning_rate": 9.55241065385554e-06, + "loss": 0.6207, + "step": 10157 + }, + { + "epoch": 1.59, + "grad_norm": 19.10824624515318, + "learning_rate": 9.55072595133236e-06, + "loss": 0.5659, + "step": 10158 + }, + { + "epoch": 1.59, + "grad_norm": 18.221329543216243, + "learning_rate": 9.549041261586273e-06, + "loss": 0.5967, + "step": 10159 + }, + { + "epoch": 1.59, + "grad_norm": 19.922817758430813, + "learning_rate": 9.547356584665185e-06, + "loss": 0.6424, + "step": 10160 + }, + { + "epoch": 1.59, + "grad_norm": 22.874842735698397, + "learning_rate": 9.545671920617018e-06, + "loss": 0.5457, + "step": 10161 + }, + { + "epoch": 1.59, + "grad_norm": 15.082334397319096, + "learning_rate": 9.543987269489673e-06, + "loss": 0.5322, + "step": 10162 + }, + { + "epoch": 1.59, + "grad_norm": 17.206464654208467, + "learning_rate": 9.542302631331063e-06, + "loss": 0.5889, + "step": 10163 + }, + { + "epoch": 1.59, + "grad_norm": 17.305205657727395, + "learning_rate": 9.540618006189096e-06, + "loss": 0.5444, + "step": 10164 + }, + { + "epoch": 1.59, + "grad_norm": 18.114067945740302, + "learning_rate": 9.538933394111687e-06, + "loss": 0.6235, + "step": 10165 + }, + { + "epoch": 1.59, + "grad_norm": 21.498871242063327, + "learning_rate": 9.537248795146745e-06, + "loss": 0.5464, + "step": 10166 + }, + { + "epoch": 1.59, + "grad_norm": 15.411073713948925, + "learning_rate": 9.535564209342172e-06, + "loss": 0.5993, + "step": 10167 + }, + { + "epoch": 1.59, + "grad_norm": 20.395969403728373, + "learning_rate": 9.533879636745883e-06, + "loss": 0.704, + "step": 10168 + }, + { + "epoch": 1.59, + "grad_norm": 18.301484906847854, + "learning_rate": 9.532195077405784e-06, + "loss": 0.5667, + "step": 10169 + }, + { + "epoch": 1.59, + "grad_norm": 12.219251545566367, + "learning_rate": 9.53051053136978e-06, + "loss": 0.5725, + "step": 10170 + }, + { + "epoch": 1.59, + "grad_norm": 23.52548695153024, + "learning_rate": 9.528825998685788e-06, + "loss": 0.6348, + "step": 10171 + }, + { + "epoch": 1.59, + "grad_norm": 28.5215812639141, + "learning_rate": 9.527141479401708e-06, + "loss": 0.6318, + "step": 10172 + }, + { + "epoch": 1.59, + "grad_norm": 20.428166581208274, + "learning_rate": 9.525456973565443e-06, + "loss": 0.5895, + "step": 10173 + }, + { + "epoch": 1.59, + "grad_norm": 22.104338422728773, + "learning_rate": 9.523772481224908e-06, + "loss": 0.5357, + "step": 10174 + }, + { + "epoch": 1.59, + "grad_norm": 23.521313544488233, + "learning_rate": 9.522088002428003e-06, + "loss": 0.5764, + "step": 10175 + }, + { + "epoch": 1.59, + "grad_norm": 19.492518070911466, + "learning_rate": 9.520403537222642e-06, + "loss": 0.6186, + "step": 10176 + }, + { + "epoch": 1.59, + "grad_norm": 16.469337373280638, + "learning_rate": 9.518719085656716e-06, + "loss": 0.5808, + "step": 10177 + }, + { + "epoch": 1.59, + "grad_norm": 20.01032854613012, + "learning_rate": 9.51703464777814e-06, + "loss": 0.5827, + "step": 10178 + }, + { + "epoch": 1.59, + "grad_norm": 17.454706514809335, + "learning_rate": 9.515350223634815e-06, + "loss": 0.5495, + "step": 10179 + }, + { + "epoch": 1.59, + "grad_norm": 11.2733111774296, + "learning_rate": 9.513665813274647e-06, + "loss": 0.58, + "step": 10180 + }, + { + "epoch": 1.59, + "grad_norm": 22.128237953431082, + "learning_rate": 9.511981416745542e-06, + "loss": 0.5409, + "step": 10181 + }, + { + "epoch": 1.59, + "grad_norm": 14.02137841432915, + "learning_rate": 9.510297034095398e-06, + "loss": 0.5669, + "step": 10182 + }, + { + "epoch": 1.59, + "grad_norm": 21.796168025320355, + "learning_rate": 9.508612665372117e-06, + "loss": 0.595, + "step": 10183 + }, + { + "epoch": 1.59, + "grad_norm": 16.964037031263683, + "learning_rate": 9.506928310623608e-06, + "loss": 0.5848, + "step": 10184 + }, + { + "epoch": 1.59, + "grad_norm": 24.160202415810765, + "learning_rate": 9.505243969897766e-06, + "loss": 0.5386, + "step": 10185 + }, + { + "epoch": 1.59, + "grad_norm": 21.004701400328855, + "learning_rate": 9.5035596432425e-06, + "loss": 0.5449, + "step": 10186 + }, + { + "epoch": 1.59, + "grad_norm": 19.47876873289876, + "learning_rate": 9.501875330705706e-06, + "loss": 0.5268, + "step": 10187 + }, + { + "epoch": 1.59, + "grad_norm": 17.744135956666508, + "learning_rate": 9.500191032335286e-06, + "loss": 0.5836, + "step": 10188 + }, + { + "epoch": 1.59, + "grad_norm": 24.923679271546753, + "learning_rate": 9.498506748179137e-06, + "loss": 0.5676, + "step": 10189 + }, + { + "epoch": 1.59, + "grad_norm": 14.577169493325691, + "learning_rate": 9.496822478285167e-06, + "loss": 0.5193, + "step": 10190 + }, + { + "epoch": 1.59, + "grad_norm": 21.95495695228826, + "learning_rate": 9.495138222701273e-06, + "loss": 0.6165, + "step": 10191 + }, + { + "epoch": 1.59, + "grad_norm": 23.45231711942303, + "learning_rate": 9.493453981475348e-06, + "loss": 0.5866, + "step": 10192 + }, + { + "epoch": 1.59, + "grad_norm": 12.599289539531185, + "learning_rate": 9.491769754655298e-06, + "loss": 0.5462, + "step": 10193 + }, + { + "epoch": 1.59, + "grad_norm": 18.851061260612095, + "learning_rate": 9.490085542289016e-06, + "loss": 0.6032, + "step": 10194 + }, + { + "epoch": 1.59, + "grad_norm": 13.497221020491342, + "learning_rate": 9.488401344424404e-06, + "loss": 0.5381, + "step": 10195 + }, + { + "epoch": 1.59, + "grad_norm": 15.088175688090733, + "learning_rate": 9.486717161109363e-06, + "loss": 0.5695, + "step": 10196 + }, + { + "epoch": 1.59, + "grad_norm": 15.392560985232791, + "learning_rate": 9.485032992391782e-06, + "loss": 0.531, + "step": 10197 + }, + { + "epoch": 1.59, + "grad_norm": 20.9133322840861, + "learning_rate": 9.48334883831956e-06, + "loss": 0.5628, + "step": 10198 + }, + { + "epoch": 1.59, + "grad_norm": 12.150192333934168, + "learning_rate": 9.481664698940598e-06, + "loss": 0.5566, + "step": 10199 + }, + { + "epoch": 1.59, + "grad_norm": 21.434743320288796, + "learning_rate": 9.479980574302785e-06, + "loss": 0.5734, + "step": 10200 + }, + { + "epoch": 1.59, + "grad_norm": 14.037543672653364, + "learning_rate": 9.478296464454028e-06, + "loss": 0.5512, + "step": 10201 + }, + { + "epoch": 1.59, + "grad_norm": 26.748778909427127, + "learning_rate": 9.476612369442207e-06, + "loss": 0.5786, + "step": 10202 + }, + { + "epoch": 1.59, + "grad_norm": 13.593833432818476, + "learning_rate": 9.474928289315224e-06, + "loss": 0.5504, + "step": 10203 + }, + { + "epoch": 1.59, + "grad_norm": 21.797855319944865, + "learning_rate": 9.473244224120974e-06, + "loss": 0.6067, + "step": 10204 + }, + { + "epoch": 1.59, + "grad_norm": 27.362343431585103, + "learning_rate": 9.471560173907353e-06, + "loss": 0.6348, + "step": 10205 + }, + { + "epoch": 1.59, + "grad_norm": 23.205063470470282, + "learning_rate": 9.469876138722252e-06, + "loss": 0.5843, + "step": 10206 + }, + { + "epoch": 1.59, + "grad_norm": 15.564455273848642, + "learning_rate": 9.46819211861356e-06, + "loss": 0.5634, + "step": 10207 + }, + { + "epoch": 1.59, + "grad_norm": 24.979014592798812, + "learning_rate": 9.466508113629174e-06, + "loss": 0.5812, + "step": 10208 + }, + { + "epoch": 1.59, + "grad_norm": 17.5927799162375, + "learning_rate": 9.464824123816986e-06, + "loss": 0.5749, + "step": 10209 + }, + { + "epoch": 1.59, + "grad_norm": 23.331526192155177, + "learning_rate": 9.463140149224888e-06, + "loss": 0.6353, + "step": 10210 + }, + { + "epoch": 1.59, + "grad_norm": 20.18905818317108, + "learning_rate": 9.461456189900767e-06, + "loss": 0.6544, + "step": 10211 + }, + { + "epoch": 1.6, + "grad_norm": 17.73927568287242, + "learning_rate": 9.459772245892518e-06, + "loss": 0.5988, + "step": 10212 + }, + { + "epoch": 1.6, + "grad_norm": 16.912787478362144, + "learning_rate": 9.458088317248032e-06, + "loss": 0.5653, + "step": 10213 + }, + { + "epoch": 1.6, + "grad_norm": 22.484053835122573, + "learning_rate": 9.456404404015194e-06, + "loss": 0.6067, + "step": 10214 + }, + { + "epoch": 1.6, + "grad_norm": 18.348222035518095, + "learning_rate": 9.454720506241902e-06, + "loss": 0.4943, + "step": 10215 + }, + { + "epoch": 1.6, + "grad_norm": 21.978898508385594, + "learning_rate": 9.453036623976036e-06, + "loss": 0.59, + "step": 10216 + }, + { + "epoch": 1.6, + "grad_norm": 20.48585443515506, + "learning_rate": 9.451352757265488e-06, + "loss": 0.5905, + "step": 10217 + }, + { + "epoch": 1.6, + "grad_norm": 15.623685701407165, + "learning_rate": 9.449668906158149e-06, + "loss": 0.5601, + "step": 10218 + }, + { + "epoch": 1.6, + "grad_norm": 14.59599014381848, + "learning_rate": 9.4479850707019e-06, + "loss": 0.5843, + "step": 10219 + }, + { + "epoch": 1.6, + "grad_norm": 10.582674865188112, + "learning_rate": 9.446301250944641e-06, + "loss": 0.5157, + "step": 10220 + }, + { + "epoch": 1.6, + "grad_norm": 26.282548043426566, + "learning_rate": 9.444617446934244e-06, + "loss": 0.5368, + "step": 10221 + }, + { + "epoch": 1.6, + "grad_norm": 23.083932482277575, + "learning_rate": 9.442933658718603e-06, + "loss": 0.5931, + "step": 10222 + }, + { + "epoch": 1.6, + "grad_norm": 12.950197334194463, + "learning_rate": 9.4412498863456e-06, + "loss": 0.5582, + "step": 10223 + }, + { + "epoch": 1.6, + "grad_norm": 31.458807319213445, + "learning_rate": 9.439566129863126e-06, + "loss": 0.6908, + "step": 10224 + }, + { + "epoch": 1.6, + "grad_norm": 19.418340543595967, + "learning_rate": 9.437882389319067e-06, + "loss": 0.5632, + "step": 10225 + }, + { + "epoch": 1.6, + "grad_norm": 14.601004524550662, + "learning_rate": 9.436198664761302e-06, + "loss": 0.5978, + "step": 10226 + }, + { + "epoch": 1.6, + "grad_norm": 14.946124442638643, + "learning_rate": 9.434514956237714e-06, + "loss": 0.5696, + "step": 10227 + }, + { + "epoch": 1.6, + "grad_norm": 18.578052067031987, + "learning_rate": 9.43283126379619e-06, + "loss": 0.5703, + "step": 10228 + }, + { + "epoch": 1.6, + "grad_norm": 21.660376256129847, + "learning_rate": 9.431147587484614e-06, + "loss": 0.5427, + "step": 10229 + }, + { + "epoch": 1.6, + "grad_norm": 16.171986702980117, + "learning_rate": 9.429463927350872e-06, + "loss": 0.5693, + "step": 10230 + }, + { + "epoch": 1.6, + "grad_norm": 14.131356021402402, + "learning_rate": 9.427780283442838e-06, + "loss": 0.6187, + "step": 10231 + }, + { + "epoch": 1.6, + "grad_norm": 18.673093993805384, + "learning_rate": 9.4260966558084e-06, + "loss": 0.6402, + "step": 10232 + }, + { + "epoch": 1.6, + "grad_norm": 20.810651001117048, + "learning_rate": 9.424413044495435e-06, + "loss": 0.6816, + "step": 10233 + }, + { + "epoch": 1.6, + "grad_norm": 17.537029502815066, + "learning_rate": 9.422729449551828e-06, + "loss": 0.5753, + "step": 10234 + }, + { + "epoch": 1.6, + "grad_norm": 16.174861105675564, + "learning_rate": 9.42104587102546e-06, + "loss": 0.5041, + "step": 10235 + }, + { + "epoch": 1.6, + "grad_norm": 24.18466711211708, + "learning_rate": 9.419362308964203e-06, + "loss": 0.6166, + "step": 10236 + }, + { + "epoch": 1.6, + "grad_norm": 21.901788555175294, + "learning_rate": 9.417678763415948e-06, + "loss": 0.5414, + "step": 10237 + }, + { + "epoch": 1.6, + "grad_norm": 17.198155662471024, + "learning_rate": 9.415995234428563e-06, + "loss": 0.6062, + "step": 10238 + }, + { + "epoch": 1.6, + "grad_norm": 22.553335830466292, + "learning_rate": 9.414311722049935e-06, + "loss": 0.5701, + "step": 10239 + }, + { + "epoch": 1.6, + "grad_norm": 19.9277178868868, + "learning_rate": 9.41262822632794e-06, + "loss": 0.6295, + "step": 10240 + }, + { + "epoch": 1.6, + "grad_norm": 26.466209562185433, + "learning_rate": 9.410944747310454e-06, + "loss": 0.5657, + "step": 10241 + }, + { + "epoch": 1.6, + "grad_norm": 14.962371337647367, + "learning_rate": 9.409261285045352e-06, + "loss": 0.5876, + "step": 10242 + }, + { + "epoch": 1.6, + "grad_norm": 19.82596976378527, + "learning_rate": 9.407577839580516e-06, + "loss": 0.5939, + "step": 10243 + }, + { + "epoch": 1.6, + "grad_norm": 22.59301528553886, + "learning_rate": 9.405894410963817e-06, + "loss": 0.622, + "step": 10244 + }, + { + "epoch": 1.6, + "grad_norm": 15.98443494060237, + "learning_rate": 9.404210999243141e-06, + "loss": 0.6171, + "step": 10245 + }, + { + "epoch": 1.6, + "grad_norm": 16.873800868238796, + "learning_rate": 9.402527604466347e-06, + "loss": 0.5381, + "step": 10246 + }, + { + "epoch": 1.6, + "grad_norm": 13.757366974704107, + "learning_rate": 9.400844226681324e-06, + "loss": 0.5755, + "step": 10247 + }, + { + "epoch": 1.6, + "grad_norm": 23.078581079742847, + "learning_rate": 9.399160865935936e-06, + "loss": 0.5406, + "step": 10248 + }, + { + "epoch": 1.6, + "grad_norm": 20.536315443335315, + "learning_rate": 9.397477522278064e-06, + "loss": 0.5606, + "step": 10249 + }, + { + "epoch": 1.6, + "grad_norm": 16.518486491893302, + "learning_rate": 9.395794195755581e-06, + "loss": 0.6363, + "step": 10250 + }, + { + "epoch": 1.6, + "grad_norm": 17.10358513276388, + "learning_rate": 9.394110886416355e-06, + "loss": 0.6003, + "step": 10251 + }, + { + "epoch": 1.6, + "grad_norm": 15.416257672963312, + "learning_rate": 9.39242759430826e-06, + "loss": 0.526, + "step": 10252 + }, + { + "epoch": 1.6, + "grad_norm": 24.113152665994722, + "learning_rate": 9.390744319479171e-06, + "loss": 0.6683, + "step": 10253 + }, + { + "epoch": 1.6, + "grad_norm": 15.690606755692588, + "learning_rate": 9.389061061976958e-06, + "loss": 0.5294, + "step": 10254 + }, + { + "epoch": 1.6, + "grad_norm": 35.46687030432764, + "learning_rate": 9.387377821849489e-06, + "loss": 0.7158, + "step": 10255 + }, + { + "epoch": 1.6, + "grad_norm": 28.0977567736958, + "learning_rate": 9.385694599144636e-06, + "loss": 0.6548, + "step": 10256 + }, + { + "epoch": 1.6, + "grad_norm": 17.389204608186773, + "learning_rate": 9.38401139391027e-06, + "loss": 0.5865, + "step": 10257 + }, + { + "epoch": 1.6, + "grad_norm": 16.95918528178506, + "learning_rate": 9.382328206194259e-06, + "loss": 0.6258, + "step": 10258 + }, + { + "epoch": 1.6, + "grad_norm": 15.568563356356565, + "learning_rate": 9.380645036044473e-06, + "loss": 0.502, + "step": 10259 + }, + { + "epoch": 1.6, + "grad_norm": 17.88837911135321, + "learning_rate": 9.37896188350878e-06, + "loss": 0.6104, + "step": 10260 + }, + { + "epoch": 1.6, + "grad_norm": 22.81118628233423, + "learning_rate": 9.377278748635046e-06, + "loss": 0.6765, + "step": 10261 + }, + { + "epoch": 1.6, + "grad_norm": 19.87957169964455, + "learning_rate": 9.375595631471143e-06, + "loss": 0.578, + "step": 10262 + }, + { + "epoch": 1.6, + "grad_norm": 15.256536082207402, + "learning_rate": 9.373912532064931e-06, + "loss": 0.5445, + "step": 10263 + }, + { + "epoch": 1.6, + "grad_norm": 21.100147125699564, + "learning_rate": 9.372229450464287e-06, + "loss": 0.579, + "step": 10264 + }, + { + "epoch": 1.6, + "grad_norm": 12.915311870614163, + "learning_rate": 9.370546386717065e-06, + "loss": 0.5259, + "step": 10265 + }, + { + "epoch": 1.6, + "grad_norm": 31.69585569337966, + "learning_rate": 9.368863340871137e-06, + "loss": 0.5983, + "step": 10266 + }, + { + "epoch": 1.6, + "grad_norm": 16.1828908723917, + "learning_rate": 9.367180312974364e-06, + "loss": 0.5635, + "step": 10267 + }, + { + "epoch": 1.6, + "grad_norm": 12.697943695885563, + "learning_rate": 9.365497303074615e-06, + "loss": 0.5772, + "step": 10268 + }, + { + "epoch": 1.6, + "grad_norm": 24.679322525325297, + "learning_rate": 9.363814311219754e-06, + "loss": 0.6061, + "step": 10269 + }, + { + "epoch": 1.6, + "grad_norm": 13.954191442661077, + "learning_rate": 9.36213133745764e-06, + "loss": 0.5482, + "step": 10270 + }, + { + "epoch": 1.6, + "grad_norm": 14.922208037579464, + "learning_rate": 9.360448381836134e-06, + "loss": 0.5827, + "step": 10271 + }, + { + "epoch": 1.6, + "grad_norm": 23.75693757472985, + "learning_rate": 9.358765444403107e-06, + "loss": 0.7006, + "step": 10272 + }, + { + "epoch": 1.6, + "grad_norm": 18.04679844827641, + "learning_rate": 9.35708252520641e-06, + "loss": 0.548, + "step": 10273 + }, + { + "epoch": 1.6, + "grad_norm": 22.92478961980648, + "learning_rate": 9.355399624293917e-06, + "loss": 0.5839, + "step": 10274 + }, + { + "epoch": 1.6, + "grad_norm": 16.703282960371414, + "learning_rate": 9.353716741713477e-06, + "loss": 0.6449, + "step": 10275 + }, + { + "epoch": 1.61, + "grad_norm": 17.442910593222667, + "learning_rate": 9.352033877512957e-06, + "loss": 0.5102, + "step": 10276 + }, + { + "epoch": 1.61, + "grad_norm": 15.887638379641604, + "learning_rate": 9.35035103174021e-06, + "loss": 0.6407, + "step": 10277 + }, + { + "epoch": 1.61, + "grad_norm": 18.359682947376314, + "learning_rate": 9.348668204443103e-06, + "loss": 0.5838, + "step": 10278 + }, + { + "epoch": 1.61, + "grad_norm": 13.313177205712227, + "learning_rate": 9.346985395669494e-06, + "loss": 0.5284, + "step": 10279 + }, + { + "epoch": 1.61, + "grad_norm": 21.927042933145184, + "learning_rate": 9.345302605467232e-06, + "loss": 0.6332, + "step": 10280 + }, + { + "epoch": 1.61, + "grad_norm": 21.39632241741886, + "learning_rate": 9.343619833884186e-06, + "loss": 0.5846, + "step": 10281 + }, + { + "epoch": 1.61, + "grad_norm": 20.727835286498337, + "learning_rate": 9.341937080968204e-06, + "loss": 0.5179, + "step": 10282 + }, + { + "epoch": 1.61, + "grad_norm": 21.1071667395017, + "learning_rate": 9.340254346767149e-06, + "loss": 0.6129, + "step": 10283 + }, + { + "epoch": 1.61, + "grad_norm": 20.245811454546278, + "learning_rate": 9.338571631328877e-06, + "loss": 0.5398, + "step": 10284 + }, + { + "epoch": 1.61, + "grad_norm": 17.270080420479953, + "learning_rate": 9.336888934701238e-06, + "loss": 0.6099, + "step": 10285 + }, + { + "epoch": 1.61, + "grad_norm": 19.16792531977402, + "learning_rate": 9.335206256932088e-06, + "loss": 0.6419, + "step": 10286 + }, + { + "epoch": 1.61, + "grad_norm": 14.657754697808723, + "learning_rate": 9.333523598069286e-06, + "loss": 0.5355, + "step": 10287 + }, + { + "epoch": 1.61, + "grad_norm": 19.473529822096957, + "learning_rate": 9.331840958160679e-06, + "loss": 0.6247, + "step": 10288 + }, + { + "epoch": 1.61, + "grad_norm": 11.271632504055837, + "learning_rate": 9.330158337254131e-06, + "loss": 0.4567, + "step": 10289 + }, + { + "epoch": 1.61, + "grad_norm": 28.09334661726697, + "learning_rate": 9.328475735397483e-06, + "loss": 0.6163, + "step": 10290 + }, + { + "epoch": 1.61, + "grad_norm": 18.25708516173471, + "learning_rate": 9.326793152638594e-06, + "loss": 0.5547, + "step": 10291 + }, + { + "epoch": 1.61, + "grad_norm": 15.876319783460378, + "learning_rate": 9.325110589025311e-06, + "loss": 0.6222, + "step": 10292 + }, + { + "epoch": 1.61, + "grad_norm": 18.7870769791939, + "learning_rate": 9.323428044605491e-06, + "loss": 0.5774, + "step": 10293 + }, + { + "epoch": 1.61, + "grad_norm": 18.43082015578266, + "learning_rate": 9.321745519426984e-06, + "loss": 0.6114, + "step": 10294 + }, + { + "epoch": 1.61, + "grad_norm": 18.380068503266173, + "learning_rate": 9.320063013537634e-06, + "loss": 0.5857, + "step": 10295 + }, + { + "epoch": 1.61, + "grad_norm": 20.748713204256827, + "learning_rate": 9.318380526985293e-06, + "loss": 0.6269, + "step": 10296 + }, + { + "epoch": 1.61, + "grad_norm": 22.65661006641371, + "learning_rate": 9.316698059817814e-06, + "loss": 0.5935, + "step": 10297 + }, + { + "epoch": 1.61, + "grad_norm": 19.79811898427557, + "learning_rate": 9.315015612083039e-06, + "loss": 0.6588, + "step": 10298 + }, + { + "epoch": 1.61, + "grad_norm": 21.167883736073115, + "learning_rate": 9.313333183828824e-06, + "loss": 0.6254, + "step": 10299 + }, + { + "epoch": 1.61, + "grad_norm": 20.774981369258086, + "learning_rate": 9.31165077510301e-06, + "loss": 0.5082, + "step": 10300 + }, + { + "epoch": 1.61, + "grad_norm": 16.732890730213768, + "learning_rate": 9.309968385953445e-06, + "loss": 0.6986, + "step": 10301 + }, + { + "epoch": 1.61, + "grad_norm": 21.58489678773787, + "learning_rate": 9.308286016427974e-06, + "loss": 0.5383, + "step": 10302 + }, + { + "epoch": 1.61, + "grad_norm": 18.716575974740497, + "learning_rate": 9.306603666574446e-06, + "loss": 0.5544, + "step": 10303 + }, + { + "epoch": 1.61, + "grad_norm": 15.021266107854519, + "learning_rate": 9.304921336440704e-06, + "loss": 0.5209, + "step": 10304 + }, + { + "epoch": 1.61, + "grad_norm": 19.063998376866504, + "learning_rate": 9.303239026074589e-06, + "loss": 0.5744, + "step": 10305 + }, + { + "epoch": 1.61, + "grad_norm": 28.43306292145674, + "learning_rate": 9.301556735523952e-06, + "loss": 0.6, + "step": 10306 + }, + { + "epoch": 1.61, + "grad_norm": 21.218778917099094, + "learning_rate": 9.29987446483663e-06, + "loss": 0.5853, + "step": 10307 + }, + { + "epoch": 1.61, + "grad_norm": 12.283182325207553, + "learning_rate": 9.298192214060476e-06, + "loss": 0.5186, + "step": 10308 + }, + { + "epoch": 1.61, + "grad_norm": 26.22087330342056, + "learning_rate": 9.296509983243319e-06, + "loss": 0.6404, + "step": 10309 + }, + { + "epoch": 1.61, + "grad_norm": 20.1578283098604, + "learning_rate": 9.294827772433006e-06, + "loss": 0.5412, + "step": 10310 + }, + { + "epoch": 1.61, + "grad_norm": 18.916514023106846, + "learning_rate": 9.293145581677378e-06, + "loss": 0.5919, + "step": 10311 + }, + { + "epoch": 1.61, + "grad_norm": 14.71944951867443, + "learning_rate": 9.291463411024279e-06, + "loss": 0.648, + "step": 10312 + }, + { + "epoch": 1.61, + "grad_norm": 21.223280638580093, + "learning_rate": 9.289781260521548e-06, + "loss": 0.5693, + "step": 10313 + }, + { + "epoch": 1.61, + "grad_norm": 13.29924722303305, + "learning_rate": 9.288099130217018e-06, + "loss": 0.5729, + "step": 10314 + }, + { + "epoch": 1.61, + "grad_norm": 20.990288877855676, + "learning_rate": 9.286417020158533e-06, + "loss": 0.6412, + "step": 10315 + }, + { + "epoch": 1.61, + "grad_norm": 12.722236580115561, + "learning_rate": 9.284734930393931e-06, + "loss": 0.5158, + "step": 10316 + }, + { + "epoch": 1.61, + "grad_norm": 27.177101535810518, + "learning_rate": 9.283052860971049e-06, + "loss": 0.6303, + "step": 10317 + }, + { + "epoch": 1.61, + "grad_norm": 17.53566191274117, + "learning_rate": 9.281370811937726e-06, + "loss": 0.5773, + "step": 10318 + }, + { + "epoch": 1.61, + "grad_norm": 24.816391767896608, + "learning_rate": 9.279688783341796e-06, + "loss": 0.5675, + "step": 10319 + }, + { + "epoch": 1.61, + "grad_norm": 20.432124444753892, + "learning_rate": 9.278006775231097e-06, + "loss": 0.6174, + "step": 10320 + }, + { + "epoch": 1.61, + "grad_norm": 13.286689421389843, + "learning_rate": 9.276324787653458e-06, + "loss": 0.5627, + "step": 10321 + }, + { + "epoch": 1.61, + "grad_norm": 26.682728795398713, + "learning_rate": 9.274642820656722e-06, + "loss": 0.6473, + "step": 10322 + }, + { + "epoch": 1.61, + "grad_norm": 23.291114155622914, + "learning_rate": 9.272960874288722e-06, + "loss": 0.6316, + "step": 10323 + }, + { + "epoch": 1.61, + "grad_norm": 17.73245335851438, + "learning_rate": 9.271278948597285e-06, + "loss": 0.5468, + "step": 10324 + }, + { + "epoch": 1.61, + "grad_norm": 19.81599449829306, + "learning_rate": 9.269597043630252e-06, + "loss": 0.6296, + "step": 10325 + }, + { + "epoch": 1.61, + "grad_norm": 20.962175262337958, + "learning_rate": 9.267915159435447e-06, + "loss": 0.5747, + "step": 10326 + }, + { + "epoch": 1.61, + "grad_norm": 20.38652485369521, + "learning_rate": 9.266233296060712e-06, + "loss": 0.6067, + "step": 10327 + }, + { + "epoch": 1.61, + "grad_norm": 13.194172448009406, + "learning_rate": 9.264551453553872e-06, + "loss": 0.6119, + "step": 10328 + }, + { + "epoch": 1.61, + "grad_norm": 15.653573340854845, + "learning_rate": 9.262869631962758e-06, + "loss": 0.5876, + "step": 10329 + }, + { + "epoch": 1.61, + "grad_norm": 19.48438943970978, + "learning_rate": 9.261187831335197e-06, + "loss": 0.4992, + "step": 10330 + }, + { + "epoch": 1.61, + "grad_norm": 14.082981925122127, + "learning_rate": 9.259506051719025e-06, + "loss": 0.6096, + "step": 10331 + }, + { + "epoch": 1.61, + "grad_norm": 17.502835289291347, + "learning_rate": 9.257824293162066e-06, + "loss": 0.5047, + "step": 10332 + }, + { + "epoch": 1.61, + "grad_norm": 6.420905425209333, + "learning_rate": 9.256142555712156e-06, + "loss": 0.6344, + "step": 10333 + }, + { + "epoch": 1.61, + "grad_norm": 20.50588945073572, + "learning_rate": 9.25446083941711e-06, + "loss": 0.6006, + "step": 10334 + }, + { + "epoch": 1.61, + "grad_norm": 17.39603997529431, + "learning_rate": 9.252779144324763e-06, + "loss": 0.5787, + "step": 10335 + }, + { + "epoch": 1.61, + "grad_norm": 22.832102139952735, + "learning_rate": 9.251097470482938e-06, + "loss": 0.5998, + "step": 10336 + }, + { + "epoch": 1.61, + "grad_norm": 26.953039387000295, + "learning_rate": 9.249415817939465e-06, + "loss": 0.5463, + "step": 10337 + }, + { + "epoch": 1.61, + "grad_norm": 27.634023958526196, + "learning_rate": 9.24773418674217e-06, + "loss": 0.6105, + "step": 10338 + }, + { + "epoch": 1.61, + "grad_norm": 18.144275607637432, + "learning_rate": 9.246052576938874e-06, + "loss": 0.6086, + "step": 10339 + }, + { + "epoch": 1.62, + "grad_norm": 16.076926761811993, + "learning_rate": 9.244370988577396e-06, + "loss": 0.593, + "step": 10340 + }, + { + "epoch": 1.62, + "grad_norm": 16.39934634324917, + "learning_rate": 9.24268942170557e-06, + "loss": 0.5132, + "step": 10341 + }, + { + "epoch": 1.62, + "grad_norm": 22.67235096524518, + "learning_rate": 9.24100787637121e-06, + "loss": 0.5689, + "step": 10342 + }, + { + "epoch": 1.62, + "grad_norm": 12.510444718569524, + "learning_rate": 9.239326352622146e-06, + "loss": 0.5262, + "step": 10343 + }, + { + "epoch": 1.62, + "grad_norm": 26.69376720592254, + "learning_rate": 9.237644850506196e-06, + "loss": 0.6201, + "step": 10344 + }, + { + "epoch": 1.62, + "grad_norm": 18.471294593275754, + "learning_rate": 9.235963370071176e-06, + "loss": 0.6485, + "step": 10345 + }, + { + "epoch": 1.62, + "grad_norm": 29.448793617415234, + "learning_rate": 9.234281911364914e-06, + "loss": 0.6395, + "step": 10346 + }, + { + "epoch": 1.62, + "grad_norm": 19.550272676024864, + "learning_rate": 9.232600474435226e-06, + "loss": 0.5727, + "step": 10347 + }, + { + "epoch": 1.62, + "grad_norm": 29.68396072773288, + "learning_rate": 9.230919059329934e-06, + "loss": 0.6261, + "step": 10348 + }, + { + "epoch": 1.62, + "grad_norm": 14.850594901922985, + "learning_rate": 9.229237666096848e-06, + "loss": 0.5576, + "step": 10349 + }, + { + "epoch": 1.62, + "grad_norm": 16.38601101971303, + "learning_rate": 9.227556294783797e-06, + "loss": 0.5621, + "step": 10350 + }, + { + "epoch": 1.62, + "grad_norm": 23.363081351609598, + "learning_rate": 9.22587494543859e-06, + "loss": 0.6408, + "step": 10351 + }, + { + "epoch": 1.62, + "grad_norm": 28.623481177396595, + "learning_rate": 9.224193618109052e-06, + "loss": 0.5417, + "step": 10352 + }, + { + "epoch": 1.62, + "grad_norm": 23.912896584319487, + "learning_rate": 9.222512312842988e-06, + "loss": 0.5638, + "step": 10353 + }, + { + "epoch": 1.62, + "grad_norm": 22.642217769632843, + "learning_rate": 9.220831029688222e-06, + "loss": 0.6226, + "step": 10354 + }, + { + "epoch": 1.62, + "grad_norm": 21.777555452694852, + "learning_rate": 9.219149768692563e-06, + "loss": 0.5332, + "step": 10355 + }, + { + "epoch": 1.62, + "grad_norm": 19.636761167687975, + "learning_rate": 9.217468529903829e-06, + "loss": 0.596, + "step": 10356 + }, + { + "epoch": 1.62, + "grad_norm": 20.28010750823603, + "learning_rate": 9.215787313369836e-06, + "loss": 0.576, + "step": 10357 + }, + { + "epoch": 1.62, + "grad_norm": 22.846388950768564, + "learning_rate": 9.214106119138388e-06, + "loss": 0.5896, + "step": 10358 + }, + { + "epoch": 1.62, + "grad_norm": 16.684558197454713, + "learning_rate": 9.212424947257302e-06, + "loss": 0.5528, + "step": 10359 + }, + { + "epoch": 1.62, + "grad_norm": 16.25708366017856, + "learning_rate": 9.210743797774392e-06, + "loss": 0.559, + "step": 10360 + }, + { + "epoch": 1.62, + "grad_norm": 26.2853679756167, + "learning_rate": 9.209062670737465e-06, + "loss": 0.5536, + "step": 10361 + }, + { + "epoch": 1.62, + "grad_norm": 14.790140533854814, + "learning_rate": 9.207381566194336e-06, + "loss": 0.4897, + "step": 10362 + }, + { + "epoch": 1.62, + "grad_norm": 30.496283108795332, + "learning_rate": 9.20570048419281e-06, + "loss": 0.5982, + "step": 10363 + }, + { + "epoch": 1.62, + "grad_norm": 17.592540602423043, + "learning_rate": 9.2040194247807e-06, + "loss": 0.6106, + "step": 10364 + }, + { + "epoch": 1.62, + "grad_norm": 21.310805134488394, + "learning_rate": 9.202338388005807e-06, + "loss": 0.5794, + "step": 10365 + }, + { + "epoch": 1.62, + "grad_norm": 21.701146314865305, + "learning_rate": 9.200657373915946e-06, + "loss": 0.5905, + "step": 10366 + }, + { + "epoch": 1.62, + "grad_norm": 14.291560195112899, + "learning_rate": 9.198976382558924e-06, + "loss": 0.5828, + "step": 10367 + }, + { + "epoch": 1.62, + "grad_norm": 12.652736740745688, + "learning_rate": 9.19729541398254e-06, + "loss": 0.4989, + "step": 10368 + }, + { + "epoch": 1.62, + "grad_norm": 15.452074973242688, + "learning_rate": 9.19561446823461e-06, + "loss": 0.5452, + "step": 10369 + }, + { + "epoch": 1.62, + "grad_norm": 18.520115284540324, + "learning_rate": 9.19393354536293e-06, + "loss": 0.6126, + "step": 10370 + }, + { + "epoch": 1.62, + "grad_norm": 13.569610225599869, + "learning_rate": 9.19225264541531e-06, + "loss": 0.4944, + "step": 10371 + }, + { + "epoch": 1.62, + "grad_norm": 22.269979223842032, + "learning_rate": 9.190571768439554e-06, + "loss": 0.6588, + "step": 10372 + }, + { + "epoch": 1.62, + "grad_norm": 13.070464657766006, + "learning_rate": 9.188890914483462e-06, + "loss": 0.5684, + "step": 10373 + }, + { + "epoch": 1.62, + "grad_norm": 22.193578442888327, + "learning_rate": 9.187210083594835e-06, + "loss": 0.4986, + "step": 10374 + }, + { + "epoch": 1.62, + "grad_norm": 12.756416496127612, + "learning_rate": 9.185529275821481e-06, + "loss": 0.4903, + "step": 10375 + }, + { + "epoch": 1.62, + "grad_norm": 36.45561857070713, + "learning_rate": 9.183848491211196e-06, + "loss": 0.5849, + "step": 10376 + }, + { + "epoch": 1.62, + "grad_norm": 20.188243915370844, + "learning_rate": 9.182167729811788e-06, + "loss": 0.5845, + "step": 10377 + }, + { + "epoch": 1.62, + "grad_norm": 12.224510569736154, + "learning_rate": 9.180486991671046e-06, + "loss": 0.4929, + "step": 10378 + }, + { + "epoch": 1.62, + "grad_norm": 16.828061450813465, + "learning_rate": 9.178806276836777e-06, + "loss": 0.5227, + "step": 10379 + }, + { + "epoch": 1.62, + "grad_norm": 12.600475383227142, + "learning_rate": 9.177125585356773e-06, + "loss": 0.4719, + "step": 10380 + }, + { + "epoch": 1.62, + "grad_norm": 22.9438112967308, + "learning_rate": 9.175444917278839e-06, + "loss": 0.5348, + "step": 10381 + }, + { + "epoch": 1.62, + "grad_norm": 24.027224423666176, + "learning_rate": 9.173764272650771e-06, + "loss": 0.5896, + "step": 10382 + }, + { + "epoch": 1.62, + "grad_norm": 24.211070452728517, + "learning_rate": 9.172083651520362e-06, + "loss": 0.683, + "step": 10383 + }, + { + "epoch": 1.62, + "grad_norm": 18.378095623142716, + "learning_rate": 9.170403053935408e-06, + "loss": 0.5744, + "step": 10384 + }, + { + "epoch": 1.62, + "grad_norm": 19.88163814186779, + "learning_rate": 9.168722479943707e-06, + "loss": 0.5265, + "step": 10385 + }, + { + "epoch": 1.62, + "grad_norm": 17.597047180491167, + "learning_rate": 9.167041929593051e-06, + "loss": 0.5158, + "step": 10386 + }, + { + "epoch": 1.62, + "grad_norm": 16.541592896320616, + "learning_rate": 9.16536140293124e-06, + "loss": 0.5669, + "step": 10387 + }, + { + "epoch": 1.62, + "grad_norm": 20.123741828790738, + "learning_rate": 9.16368090000606e-06, + "loss": 0.6116, + "step": 10388 + }, + { + "epoch": 1.62, + "grad_norm": 11.70749623638826, + "learning_rate": 9.162000420865303e-06, + "loss": 0.5384, + "step": 10389 + }, + { + "epoch": 1.62, + "grad_norm": 21.061476114470807, + "learning_rate": 9.160319965556765e-06, + "loss": 0.5703, + "step": 10390 + }, + { + "epoch": 1.62, + "grad_norm": 16.41892315738222, + "learning_rate": 9.158639534128239e-06, + "loss": 0.5311, + "step": 10391 + }, + { + "epoch": 1.62, + "grad_norm": 16.964652011410568, + "learning_rate": 9.156959126627512e-06, + "loss": 0.5812, + "step": 10392 + }, + { + "epoch": 1.62, + "grad_norm": 21.382889950618612, + "learning_rate": 9.155278743102372e-06, + "loss": 0.6956, + "step": 10393 + }, + { + "epoch": 1.62, + "grad_norm": 15.852998802875378, + "learning_rate": 9.15359838360061e-06, + "loss": 0.5987, + "step": 10394 + }, + { + "epoch": 1.62, + "grad_norm": 24.414755959389733, + "learning_rate": 9.151918048170015e-06, + "loss": 0.5732, + "step": 10395 + }, + { + "epoch": 1.62, + "grad_norm": 17.29753428997181, + "learning_rate": 9.150237736858376e-06, + "loss": 0.5638, + "step": 10396 + }, + { + "epoch": 1.62, + "grad_norm": 23.442121723980062, + "learning_rate": 9.148557449713481e-06, + "loss": 0.5895, + "step": 10397 + }, + { + "epoch": 1.62, + "grad_norm": 19.856061955456987, + "learning_rate": 9.146877186783112e-06, + "loss": 0.5639, + "step": 10398 + }, + { + "epoch": 1.62, + "grad_norm": 22.664234970078237, + "learning_rate": 9.145196948115053e-06, + "loss": 0.5464, + "step": 10399 + }, + { + "epoch": 1.62, + "grad_norm": 22.588811622575363, + "learning_rate": 9.143516733757097e-06, + "loss": 0.5261, + "step": 10400 + }, + { + "epoch": 1.62, + "grad_norm": 15.106962277375844, + "learning_rate": 9.141836543757025e-06, + "loss": 0.6426, + "step": 10401 + }, + { + "epoch": 1.62, + "grad_norm": 25.18564480262938, + "learning_rate": 9.140156378162619e-06, + "loss": 0.6504, + "step": 10402 + }, + { + "epoch": 1.62, + "grad_norm": 20.423011503722876, + "learning_rate": 9.138476237021659e-06, + "loss": 0.5862, + "step": 10403 + }, + { + "epoch": 1.63, + "grad_norm": 25.163164573407695, + "learning_rate": 9.136796120381933e-06, + "loss": 0.6266, + "step": 10404 + }, + { + "epoch": 1.63, + "grad_norm": 22.171129386030675, + "learning_rate": 9.13511602829122e-06, + "loss": 0.6082, + "step": 10405 + }, + { + "epoch": 1.63, + "grad_norm": 15.648151915025393, + "learning_rate": 9.133435960797304e-06, + "loss": 0.5139, + "step": 10406 + }, + { + "epoch": 1.63, + "grad_norm": 34.61505707495903, + "learning_rate": 9.131755917947958e-06, + "loss": 0.6058, + "step": 10407 + }, + { + "epoch": 1.63, + "grad_norm": 23.20616615479577, + "learning_rate": 9.13007589979097e-06, + "loss": 0.6402, + "step": 10408 + }, + { + "epoch": 1.63, + "grad_norm": 13.013305828952747, + "learning_rate": 9.12839590637411e-06, + "loss": 0.5154, + "step": 10409 + }, + { + "epoch": 1.63, + "grad_norm": 15.371907195460599, + "learning_rate": 9.126715937745163e-06, + "loss": 0.585, + "step": 10410 + }, + { + "epoch": 1.63, + "grad_norm": 14.81631373047951, + "learning_rate": 9.125035993951907e-06, + "loss": 0.6006, + "step": 10411 + }, + { + "epoch": 1.63, + "grad_norm": 18.413609229850618, + "learning_rate": 9.12335607504211e-06, + "loss": 0.581, + "step": 10412 + }, + { + "epoch": 1.63, + "grad_norm": 18.397150209266, + "learning_rate": 9.121676181063556e-06, + "loss": 0.5582, + "step": 10413 + }, + { + "epoch": 1.63, + "grad_norm": 20.1919420724341, + "learning_rate": 9.119996312064014e-06, + "loss": 0.5781, + "step": 10414 + }, + { + "epoch": 1.63, + "grad_norm": 18.907842755811654, + "learning_rate": 9.118316468091267e-06, + "loss": 0.525, + "step": 10415 + }, + { + "epoch": 1.63, + "grad_norm": 18.881090241103244, + "learning_rate": 9.116636649193085e-06, + "loss": 0.5209, + "step": 10416 + }, + { + "epoch": 1.63, + "grad_norm": 15.368469373418282, + "learning_rate": 9.114956855417236e-06, + "loss": 0.5508, + "step": 10417 + }, + { + "epoch": 1.63, + "grad_norm": 24.752123591082036, + "learning_rate": 9.113277086811495e-06, + "loss": 0.503, + "step": 10418 + }, + { + "epoch": 1.63, + "grad_norm": 16.305718111075542, + "learning_rate": 9.11159734342364e-06, + "loss": 0.5498, + "step": 10419 + }, + { + "epoch": 1.63, + "grad_norm": 15.812254986730656, + "learning_rate": 9.109917625301432e-06, + "loss": 0.5825, + "step": 10420 + }, + { + "epoch": 1.63, + "grad_norm": 14.583515730608436, + "learning_rate": 9.108237932492652e-06, + "loss": 0.5404, + "step": 10421 + }, + { + "epoch": 1.63, + "grad_norm": 15.505171441865173, + "learning_rate": 9.106558265045058e-06, + "loss": 0.5211, + "step": 10422 + }, + { + "epoch": 1.63, + "grad_norm": 22.580835701071518, + "learning_rate": 9.104878623006427e-06, + "loss": 0.583, + "step": 10423 + }, + { + "epoch": 1.63, + "grad_norm": 21.24192872894691, + "learning_rate": 9.103199006424521e-06, + "loss": 0.5479, + "step": 10424 + }, + { + "epoch": 1.63, + "grad_norm": 21.851000703934886, + "learning_rate": 9.101519415347113e-06, + "loss": 0.5859, + "step": 10425 + }, + { + "epoch": 1.63, + "grad_norm": 17.38091153155122, + "learning_rate": 9.099839849821971e-06, + "loss": 0.5956, + "step": 10426 + }, + { + "epoch": 1.63, + "grad_norm": 62.709451744069504, + "learning_rate": 9.098160309896852e-06, + "loss": 0.5458, + "step": 10427 + }, + { + "epoch": 1.63, + "grad_norm": 17.208093056625394, + "learning_rate": 9.096480795619527e-06, + "loss": 0.5215, + "step": 10428 + }, + { + "epoch": 1.63, + "grad_norm": 18.630082533037218, + "learning_rate": 9.09480130703776e-06, + "loss": 0.5565, + "step": 10429 + }, + { + "epoch": 1.63, + "grad_norm": 27.96890927061625, + "learning_rate": 9.093121844199312e-06, + "loss": 0.6058, + "step": 10430 + }, + { + "epoch": 1.63, + "grad_norm": 22.805145402828263, + "learning_rate": 9.091442407151955e-06, + "loss": 0.5889, + "step": 10431 + }, + { + "epoch": 1.63, + "grad_norm": 23.358026581697406, + "learning_rate": 9.08976299594344e-06, + "loss": 0.5732, + "step": 10432 + }, + { + "epoch": 1.63, + "grad_norm": 17.60574966927669, + "learning_rate": 9.088083610621532e-06, + "loss": 0.5694, + "step": 10433 + }, + { + "epoch": 1.63, + "grad_norm": 27.29064929366573, + "learning_rate": 9.086404251233993e-06, + "loss": 0.5882, + "step": 10434 + }, + { + "epoch": 1.63, + "grad_norm": 18.62426147660861, + "learning_rate": 9.084724917828585e-06, + "loss": 0.5358, + "step": 10435 + }, + { + "epoch": 1.63, + "grad_norm": 14.684225797940414, + "learning_rate": 9.083045610453065e-06, + "loss": 0.5923, + "step": 10436 + }, + { + "epoch": 1.63, + "grad_norm": 14.8363555208507, + "learning_rate": 9.081366329155188e-06, + "loss": 0.6223, + "step": 10437 + }, + { + "epoch": 1.63, + "grad_norm": 12.02217791405987, + "learning_rate": 9.079687073982719e-06, + "loss": 0.5745, + "step": 10438 + }, + { + "epoch": 1.63, + "grad_norm": 18.561908722139812, + "learning_rate": 9.078007844983406e-06, + "loss": 0.6294, + "step": 10439 + }, + { + "epoch": 1.63, + "grad_norm": 29.790703838079608, + "learning_rate": 9.076328642205015e-06, + "loss": 0.6192, + "step": 10440 + }, + { + "epoch": 1.63, + "grad_norm": 13.040004356222106, + "learning_rate": 9.074649465695298e-06, + "loss": 0.5477, + "step": 10441 + }, + { + "epoch": 1.63, + "grad_norm": 20.168233502881762, + "learning_rate": 9.072970315502008e-06, + "loss": 0.6195, + "step": 10442 + }, + { + "epoch": 1.63, + "grad_norm": 19.03000217490477, + "learning_rate": 9.071291191672896e-06, + "loss": 0.621, + "step": 10443 + }, + { + "epoch": 1.63, + "grad_norm": 25.93311040169013, + "learning_rate": 9.069612094255722e-06, + "loss": 0.588, + "step": 10444 + }, + { + "epoch": 1.63, + "grad_norm": 20.210828988381525, + "learning_rate": 9.067933023298234e-06, + "loss": 0.5322, + "step": 10445 + }, + { + "epoch": 1.63, + "grad_norm": 13.698145656581431, + "learning_rate": 9.066253978848192e-06, + "loss": 0.5393, + "step": 10446 + }, + { + "epoch": 1.63, + "grad_norm": 20.219936736948377, + "learning_rate": 9.064574960953334e-06, + "loss": 0.5373, + "step": 10447 + }, + { + "epoch": 1.63, + "grad_norm": 30.702779209820246, + "learning_rate": 9.06289596966142e-06, + "loss": 0.6247, + "step": 10448 + }, + { + "epoch": 1.63, + "grad_norm": 27.47793611840959, + "learning_rate": 9.061217005020194e-06, + "loss": 0.5536, + "step": 10449 + }, + { + "epoch": 1.63, + "grad_norm": 22.542843646619687, + "learning_rate": 9.05953806707741e-06, + "loss": 0.4856, + "step": 10450 + }, + { + "epoch": 1.63, + "grad_norm": 29.10338815121388, + "learning_rate": 9.057859155880811e-06, + "loss": 0.5466, + "step": 10451 + }, + { + "epoch": 1.63, + "grad_norm": 14.75676527896899, + "learning_rate": 9.05618027147815e-06, + "loss": 0.5425, + "step": 10452 + }, + { + "epoch": 1.63, + "grad_norm": 21.432956507410985, + "learning_rate": 9.054501413917163e-06, + "loss": 0.5824, + "step": 10453 + }, + { + "epoch": 1.63, + "grad_norm": 23.85335939962612, + "learning_rate": 9.052822583245608e-06, + "loss": 0.5536, + "step": 10454 + }, + { + "epoch": 1.63, + "grad_norm": 13.364184912418985, + "learning_rate": 9.051143779511226e-06, + "loss": 0.5782, + "step": 10455 + }, + { + "epoch": 1.63, + "grad_norm": 26.731295591658434, + "learning_rate": 9.049465002761756e-06, + "loss": 0.6328, + "step": 10456 + }, + { + "epoch": 1.63, + "grad_norm": 20.87272332962382, + "learning_rate": 9.047786253044945e-06, + "loss": 0.5642, + "step": 10457 + }, + { + "epoch": 1.63, + "grad_norm": 24.542904040311868, + "learning_rate": 9.046107530408537e-06, + "loss": 0.5402, + "step": 10458 + }, + { + "epoch": 1.63, + "grad_norm": 20.292259341223126, + "learning_rate": 9.044428834900274e-06, + "loss": 0.5507, + "step": 10459 + }, + { + "epoch": 1.63, + "grad_norm": 21.090231742617792, + "learning_rate": 9.042750166567898e-06, + "loss": 0.6195, + "step": 10460 + }, + { + "epoch": 1.63, + "grad_norm": 17.310976359907976, + "learning_rate": 9.041071525459145e-06, + "loss": 0.564, + "step": 10461 + }, + { + "epoch": 1.63, + "grad_norm": 15.651425541524818, + "learning_rate": 9.039392911621754e-06, + "loss": 0.5527, + "step": 10462 + }, + { + "epoch": 1.63, + "grad_norm": 14.544464442729812, + "learning_rate": 9.03771432510347e-06, + "loss": 0.5885, + "step": 10463 + }, + { + "epoch": 1.63, + "grad_norm": 14.909516456475053, + "learning_rate": 9.036035765952023e-06, + "loss": 0.5726, + "step": 10464 + }, + { + "epoch": 1.63, + "grad_norm": 18.64231401738984, + "learning_rate": 9.034357234215164e-06, + "loss": 0.5706, + "step": 10465 + }, + { + "epoch": 1.63, + "grad_norm": 17.89626764825909, + "learning_rate": 9.032678729940614e-06, + "loss": 0.5832, + "step": 10466 + }, + { + "epoch": 1.63, + "grad_norm": 24.214936565917274, + "learning_rate": 9.031000253176117e-06, + "loss": 0.5661, + "step": 10467 + }, + { + "epoch": 1.64, + "grad_norm": 24.995543725587893, + "learning_rate": 9.029321803969402e-06, + "loss": 0.6112, + "step": 10468 + }, + { + "epoch": 1.64, + "grad_norm": 21.20016321517818, + "learning_rate": 9.02764338236821e-06, + "loss": 0.5867, + "step": 10469 + }, + { + "epoch": 1.64, + "grad_norm": 16.50360846075877, + "learning_rate": 9.025964988420274e-06, + "loss": 0.6435, + "step": 10470 + }, + { + "epoch": 1.64, + "grad_norm": 17.068692340826775, + "learning_rate": 9.024286622173323e-06, + "loss": 0.6163, + "step": 10471 + }, + { + "epoch": 1.64, + "grad_norm": 16.56694383205472, + "learning_rate": 9.022608283675086e-06, + "loss": 0.6321, + "step": 10472 + }, + { + "epoch": 1.64, + "grad_norm": 13.86294535479617, + "learning_rate": 9.0209299729733e-06, + "loss": 0.592, + "step": 10473 + }, + { + "epoch": 1.64, + "grad_norm": 18.245385621951677, + "learning_rate": 9.019251690115692e-06, + "loss": 0.6287, + "step": 10474 + }, + { + "epoch": 1.64, + "grad_norm": 25.085652060178237, + "learning_rate": 9.017573435149994e-06, + "loss": 0.606, + "step": 10475 + }, + { + "epoch": 1.64, + "grad_norm": 25.030911748127032, + "learning_rate": 9.015895208123933e-06, + "loss": 0.6451, + "step": 10476 + }, + { + "epoch": 1.64, + "grad_norm": 21.749620147928773, + "learning_rate": 9.014217009085232e-06, + "loss": 0.622, + "step": 10477 + }, + { + "epoch": 1.64, + "grad_norm": 17.241953794194444, + "learning_rate": 9.012538838081626e-06, + "loss": 0.6088, + "step": 10478 + }, + { + "epoch": 1.64, + "grad_norm": 16.791576828274543, + "learning_rate": 9.010860695160839e-06, + "loss": 0.5898, + "step": 10479 + }, + { + "epoch": 1.64, + "grad_norm": 18.109710640467018, + "learning_rate": 9.009182580370596e-06, + "loss": 0.5762, + "step": 10480 + }, + { + "epoch": 1.64, + "grad_norm": 14.69851413024346, + "learning_rate": 9.007504493758616e-06, + "loss": 0.502, + "step": 10481 + }, + { + "epoch": 1.64, + "grad_norm": 15.708258823029182, + "learning_rate": 9.005826435372631e-06, + "loss": 0.5825, + "step": 10482 + }, + { + "epoch": 1.64, + "grad_norm": 32.349294069837086, + "learning_rate": 9.004148405260358e-06, + "loss": 0.5218, + "step": 10483 + }, + { + "epoch": 1.64, + "grad_norm": 16.486705073880735, + "learning_rate": 9.002470403469525e-06, + "loss": 0.5413, + "step": 10484 + }, + { + "epoch": 1.64, + "grad_norm": 19.943930851873372, + "learning_rate": 9.000792430047852e-06, + "loss": 0.6559, + "step": 10485 + }, + { + "epoch": 1.64, + "grad_norm": 17.393029896664174, + "learning_rate": 8.999114485043053e-06, + "loss": 0.6228, + "step": 10486 + }, + { + "epoch": 1.64, + "grad_norm": 14.855324695579824, + "learning_rate": 8.997436568502852e-06, + "loss": 0.5897, + "step": 10487 + }, + { + "epoch": 1.64, + "grad_norm": 38.82854908916895, + "learning_rate": 8.995758680474972e-06, + "loss": 0.6062, + "step": 10488 + }, + { + "epoch": 1.64, + "grad_norm": 30.91201636922907, + "learning_rate": 8.994080821007124e-06, + "loss": 0.5471, + "step": 10489 + }, + { + "epoch": 1.64, + "grad_norm": 16.952062258832903, + "learning_rate": 8.992402990147035e-06, + "loss": 0.596, + "step": 10490 + }, + { + "epoch": 1.64, + "grad_norm": 14.700295361483954, + "learning_rate": 8.99072518794241e-06, + "loss": 0.5504, + "step": 10491 + }, + { + "epoch": 1.64, + "grad_norm": 26.66921100374453, + "learning_rate": 8.989047414440973e-06, + "loss": 0.6041, + "step": 10492 + }, + { + "epoch": 1.64, + "grad_norm": 23.783490233980313, + "learning_rate": 8.987369669690433e-06, + "loss": 0.6787, + "step": 10493 + }, + { + "epoch": 1.64, + "grad_norm": 22.90884712817259, + "learning_rate": 8.98569195373851e-06, + "loss": 0.5389, + "step": 10494 + }, + { + "epoch": 1.64, + "grad_norm": 15.559373820232615, + "learning_rate": 8.984014266632916e-06, + "loss": 0.5523, + "step": 10495 + }, + { + "epoch": 1.64, + "grad_norm": 22.224304031512577, + "learning_rate": 8.98233660842136e-06, + "loss": 0.5691, + "step": 10496 + }, + { + "epoch": 1.64, + "grad_norm": 14.789810081162711, + "learning_rate": 8.980658979151556e-06, + "loss": 0.6064, + "step": 10497 + }, + { + "epoch": 1.64, + "grad_norm": 16.598343913717517, + "learning_rate": 8.978981378871213e-06, + "loss": 0.5818, + "step": 10498 + }, + { + "epoch": 1.64, + "grad_norm": 14.935223357032948, + "learning_rate": 8.977303807628046e-06, + "loss": 0.5364, + "step": 10499 + }, + { + "epoch": 1.64, + "grad_norm": 15.71346149770786, + "learning_rate": 8.975626265469755e-06, + "loss": 0.5531, + "step": 10500 + }, + { + "epoch": 1.64, + "grad_norm": 15.912235980585484, + "learning_rate": 8.973948752444057e-06, + "loss": 0.5786, + "step": 10501 + }, + { + "epoch": 1.64, + "grad_norm": 15.822584951635, + "learning_rate": 8.972271268598653e-06, + "loss": 0.5186, + "step": 10502 + }, + { + "epoch": 1.64, + "grad_norm": 12.646987580410274, + "learning_rate": 8.970593813981257e-06, + "loss": 0.527, + "step": 10503 + }, + { + "epoch": 1.64, + "grad_norm": 21.37215044870707, + "learning_rate": 8.96891638863957e-06, + "loss": 0.5504, + "step": 10504 + }, + { + "epoch": 1.64, + "grad_norm": 18.32014397053585, + "learning_rate": 8.967238992621297e-06, + "loss": 0.5552, + "step": 10505 + }, + { + "epoch": 1.64, + "grad_norm": 32.506098366750244, + "learning_rate": 8.965561625974142e-06, + "loss": 0.6567, + "step": 10506 + }, + { + "epoch": 1.64, + "grad_norm": 22.63123209883333, + "learning_rate": 8.96388428874581e-06, + "loss": 0.5346, + "step": 10507 + }, + { + "epoch": 1.64, + "grad_norm": 29.01387683201534, + "learning_rate": 8.962206980984001e-06, + "loss": 0.5601, + "step": 10508 + }, + { + "epoch": 1.64, + "grad_norm": 20.579383760185625, + "learning_rate": 8.960529702736425e-06, + "loss": 0.6136, + "step": 10509 + }, + { + "epoch": 1.64, + "grad_norm": 33.94865908966314, + "learning_rate": 8.95885245405077e-06, + "loss": 0.6253, + "step": 10510 + }, + { + "epoch": 1.64, + "grad_norm": 14.326960453414634, + "learning_rate": 8.957175234974744e-06, + "loss": 0.5111, + "step": 10511 + }, + { + "epoch": 1.64, + "grad_norm": 14.295587042864588, + "learning_rate": 8.955498045556041e-06, + "loss": 0.5929, + "step": 10512 + }, + { + "epoch": 1.64, + "grad_norm": 15.619679746855416, + "learning_rate": 8.953820885842366e-06, + "loss": 0.5436, + "step": 10513 + }, + { + "epoch": 1.64, + "grad_norm": 18.316659410239936, + "learning_rate": 8.952143755881416e-06, + "loss": 0.5652, + "step": 10514 + }, + { + "epoch": 1.64, + "grad_norm": 19.005804529680333, + "learning_rate": 8.95046665572088e-06, + "loss": 0.5638, + "step": 10515 + }, + { + "epoch": 1.64, + "grad_norm": 23.193856865295665, + "learning_rate": 8.948789585408457e-06, + "loss": 0.6839, + "step": 10516 + }, + { + "epoch": 1.64, + "grad_norm": 29.485420764435943, + "learning_rate": 8.947112544991846e-06, + "loss": 0.691, + "step": 10517 + }, + { + "epoch": 1.64, + "grad_norm": 18.825134452574616, + "learning_rate": 8.945435534518737e-06, + "loss": 0.535, + "step": 10518 + }, + { + "epoch": 1.64, + "grad_norm": 17.698285867340033, + "learning_rate": 8.943758554036828e-06, + "loss": 0.6034, + "step": 10519 + }, + { + "epoch": 1.64, + "grad_norm": 18.116419948227406, + "learning_rate": 8.942081603593805e-06, + "loss": 0.5716, + "step": 10520 + }, + { + "epoch": 1.64, + "grad_norm": 11.646230375318012, + "learning_rate": 8.94040468323736e-06, + "loss": 0.4644, + "step": 10521 + }, + { + "epoch": 1.64, + "grad_norm": 15.147505503801593, + "learning_rate": 8.93872779301519e-06, + "loss": 0.5879, + "step": 10522 + }, + { + "epoch": 1.64, + "grad_norm": 23.506815008398426, + "learning_rate": 8.93705093297498e-06, + "loss": 0.6004, + "step": 10523 + }, + { + "epoch": 1.64, + "grad_norm": 28.961036983470393, + "learning_rate": 8.93537410316442e-06, + "loss": 0.5787, + "step": 10524 + }, + { + "epoch": 1.64, + "grad_norm": 25.411648502851467, + "learning_rate": 8.933697303631195e-06, + "loss": 0.5981, + "step": 10525 + }, + { + "epoch": 1.64, + "grad_norm": 16.85277461758098, + "learning_rate": 8.932020534422997e-06, + "loss": 0.5784, + "step": 10526 + }, + { + "epoch": 1.64, + "grad_norm": 20.567264533179394, + "learning_rate": 8.930343795587508e-06, + "loss": 0.6279, + "step": 10527 + }, + { + "epoch": 1.64, + "grad_norm": 19.783303706361327, + "learning_rate": 8.928667087172417e-06, + "loss": 0.5405, + "step": 10528 + }, + { + "epoch": 1.64, + "grad_norm": 26.242620891570777, + "learning_rate": 8.92699040922541e-06, + "loss": 0.598, + "step": 10529 + }, + { + "epoch": 1.64, + "grad_norm": 24.446708085871386, + "learning_rate": 8.925313761794166e-06, + "loss": 0.649, + "step": 10530 + }, + { + "epoch": 1.64, + "grad_norm": 26.43444869004903, + "learning_rate": 8.92363714492637e-06, + "loss": 0.5425, + "step": 10531 + }, + { + "epoch": 1.65, + "grad_norm": 19.426807286335617, + "learning_rate": 8.921960558669704e-06, + "loss": 0.5696, + "step": 10532 + }, + { + "epoch": 1.65, + "grad_norm": 22.3181047966698, + "learning_rate": 8.920284003071846e-06, + "loss": 0.5564, + "step": 10533 + }, + { + "epoch": 1.65, + "grad_norm": 19.588846155782246, + "learning_rate": 8.918607478180487e-06, + "loss": 0.6613, + "step": 10534 + }, + { + "epoch": 1.65, + "grad_norm": 21.827995510850453, + "learning_rate": 8.916930984043291e-06, + "loss": 0.6416, + "step": 10535 + }, + { + "epoch": 1.65, + "grad_norm": 15.902758208505269, + "learning_rate": 8.915254520707948e-06, + "loss": 0.6196, + "step": 10536 + }, + { + "epoch": 1.65, + "grad_norm": 22.381999434105282, + "learning_rate": 8.913578088222129e-06, + "loss": 0.627, + "step": 10537 + }, + { + "epoch": 1.65, + "grad_norm": 15.943552720985142, + "learning_rate": 8.911901686633516e-06, + "loss": 0.5887, + "step": 10538 + }, + { + "epoch": 1.65, + "grad_norm": 20.470312676219738, + "learning_rate": 8.910225315989786e-06, + "loss": 0.532, + "step": 10539 + }, + { + "epoch": 1.65, + "grad_norm": 17.918104668553518, + "learning_rate": 8.908548976338604e-06, + "loss": 0.5603, + "step": 10540 + }, + { + "epoch": 1.65, + "grad_norm": 26.809946006343832, + "learning_rate": 8.906872667727652e-06, + "loss": 0.5891, + "step": 10541 + }, + { + "epoch": 1.65, + "grad_norm": 17.78282057566971, + "learning_rate": 8.905196390204604e-06, + "loss": 0.5432, + "step": 10542 + }, + { + "epoch": 1.65, + "grad_norm": 22.873296661550523, + "learning_rate": 8.903520143817128e-06, + "loss": 0.4863, + "step": 10543 + }, + { + "epoch": 1.65, + "grad_norm": 20.609685488784915, + "learning_rate": 8.901843928612902e-06, + "loss": 0.5901, + "step": 10544 + }, + { + "epoch": 1.65, + "grad_norm": 19.091840284059156, + "learning_rate": 8.90016774463959e-06, + "loss": 0.5348, + "step": 10545 + }, + { + "epoch": 1.65, + "grad_norm": 29.813559242190617, + "learning_rate": 8.89849159194486e-06, + "loss": 0.5508, + "step": 10546 + }, + { + "epoch": 1.65, + "grad_norm": 13.20410714700571, + "learning_rate": 8.89681547057639e-06, + "loss": 0.5794, + "step": 10547 + }, + { + "epoch": 1.65, + "grad_norm": 21.54428253732981, + "learning_rate": 8.895139380581843e-06, + "loss": 0.5544, + "step": 10548 + }, + { + "epoch": 1.65, + "grad_norm": 15.134956746082892, + "learning_rate": 8.893463322008885e-06, + "loss": 0.5286, + "step": 10549 + }, + { + "epoch": 1.65, + "grad_norm": 18.96443540760124, + "learning_rate": 8.891787294905178e-06, + "loss": 0.5442, + "step": 10550 + }, + { + "epoch": 1.65, + "grad_norm": 14.288512659939148, + "learning_rate": 8.890111299318396e-06, + "loss": 0.5372, + "step": 10551 + }, + { + "epoch": 1.65, + "grad_norm": 20.022725706215542, + "learning_rate": 8.888435335296197e-06, + "loss": 0.6456, + "step": 10552 + }, + { + "epoch": 1.65, + "grad_norm": 25.12157082642232, + "learning_rate": 8.886759402886253e-06, + "loss": 0.621, + "step": 10553 + }, + { + "epoch": 1.65, + "grad_norm": 23.40396885521449, + "learning_rate": 8.885083502136214e-06, + "loss": 0.6463, + "step": 10554 + }, + { + "epoch": 1.65, + "grad_norm": 26.201191068195104, + "learning_rate": 8.883407633093751e-06, + "loss": 0.6021, + "step": 10555 + }, + { + "epoch": 1.65, + "grad_norm": 19.472412542503527, + "learning_rate": 8.881731795806517e-06, + "loss": 0.5157, + "step": 10556 + }, + { + "epoch": 1.65, + "grad_norm": 29.813747006053468, + "learning_rate": 8.88005599032218e-06, + "loss": 0.6652, + "step": 10557 + }, + { + "epoch": 1.65, + "grad_norm": 24.104706436439592, + "learning_rate": 8.878380216688399e-06, + "loss": 0.5574, + "step": 10558 + }, + { + "epoch": 1.65, + "grad_norm": 14.11552890909985, + "learning_rate": 8.876704474952826e-06, + "loss": 0.527, + "step": 10559 + }, + { + "epoch": 1.65, + "grad_norm": 21.01454380969632, + "learning_rate": 8.875028765163117e-06, + "loss": 0.5797, + "step": 10560 + }, + { + "epoch": 1.65, + "grad_norm": 46.84552553615185, + "learning_rate": 8.873353087366936e-06, + "loss": 0.6101, + "step": 10561 + }, + { + "epoch": 1.65, + "grad_norm": 16.20300028154275, + "learning_rate": 8.87167744161193e-06, + "loss": 0.5251, + "step": 10562 + }, + { + "epoch": 1.65, + "grad_norm": 18.605342130132044, + "learning_rate": 8.870001827945764e-06, + "loss": 0.6308, + "step": 10563 + }, + { + "epoch": 1.65, + "grad_norm": 22.411500302351627, + "learning_rate": 8.86832624641608e-06, + "loss": 0.5474, + "step": 10564 + }, + { + "epoch": 1.65, + "grad_norm": 21.25021404270756, + "learning_rate": 8.866650697070535e-06, + "loss": 0.5891, + "step": 10565 + }, + { + "epoch": 1.65, + "grad_norm": 23.708686623737233, + "learning_rate": 8.864975179956784e-06, + "loss": 0.6242, + "step": 10566 + }, + { + "epoch": 1.65, + "grad_norm": 23.342905163318473, + "learning_rate": 8.863299695122473e-06, + "loss": 0.6278, + "step": 10567 + }, + { + "epoch": 1.65, + "grad_norm": 12.250185552111372, + "learning_rate": 8.861624242615258e-06, + "loss": 0.5149, + "step": 10568 + }, + { + "epoch": 1.65, + "grad_norm": 15.698050931771068, + "learning_rate": 8.859948822482777e-06, + "loss": 0.6111, + "step": 10569 + }, + { + "epoch": 1.65, + "grad_norm": 15.970391682744916, + "learning_rate": 8.85827343477269e-06, + "loss": 0.6045, + "step": 10570 + }, + { + "epoch": 1.65, + "grad_norm": 18.980238250287478, + "learning_rate": 8.856598079532633e-06, + "loss": 0.5814, + "step": 10571 + }, + { + "epoch": 1.65, + "grad_norm": 22.126611172746667, + "learning_rate": 8.854922756810263e-06, + "loss": 0.615, + "step": 10572 + }, + { + "epoch": 1.65, + "grad_norm": 15.35933075934371, + "learning_rate": 8.85324746665322e-06, + "loss": 0.524, + "step": 10573 + }, + { + "epoch": 1.65, + "grad_norm": 17.341025619636287, + "learning_rate": 8.851572209109147e-06, + "loss": 0.5864, + "step": 10574 + }, + { + "epoch": 1.65, + "grad_norm": 17.709495743204776, + "learning_rate": 8.849896984225685e-06, + "loss": 0.5425, + "step": 10575 + }, + { + "epoch": 1.65, + "grad_norm": 15.90207491511352, + "learning_rate": 8.848221792050484e-06, + "loss": 0.5611, + "step": 10576 + }, + { + "epoch": 1.65, + "grad_norm": 26.862340315170645, + "learning_rate": 8.846546632631178e-06, + "loss": 0.5316, + "step": 10577 + }, + { + "epoch": 1.65, + "grad_norm": 15.112449246456217, + "learning_rate": 8.844871506015417e-06, + "loss": 0.5679, + "step": 10578 + }, + { + "epoch": 1.65, + "grad_norm": 17.957798917973193, + "learning_rate": 8.84319641225083e-06, + "loss": 0.642, + "step": 10579 + }, + { + "epoch": 1.65, + "grad_norm": 18.40861249716418, + "learning_rate": 8.841521351385061e-06, + "loss": 0.6312, + "step": 10580 + }, + { + "epoch": 1.65, + "grad_norm": 26.820813264069383, + "learning_rate": 8.839846323465745e-06, + "loss": 0.5613, + "step": 10581 + }, + { + "epoch": 1.65, + "grad_norm": 19.41690770797413, + "learning_rate": 8.838171328540524e-06, + "loss": 0.4693, + "step": 10582 + }, + { + "epoch": 1.65, + "grad_norm": 14.989719391963709, + "learning_rate": 8.836496366657032e-06, + "loss": 0.562, + "step": 10583 + }, + { + "epoch": 1.65, + "grad_norm": 16.3326707033684, + "learning_rate": 8.834821437862899e-06, + "loss": 0.5911, + "step": 10584 + }, + { + "epoch": 1.65, + "grad_norm": 18.576962384099044, + "learning_rate": 8.833146542205765e-06, + "loss": 0.6162, + "step": 10585 + }, + { + "epoch": 1.65, + "grad_norm": 13.4642771952231, + "learning_rate": 8.831471679733262e-06, + "loss": 0.5255, + "step": 10586 + }, + { + "epoch": 1.65, + "grad_norm": 20.243070515432077, + "learning_rate": 8.829796850493016e-06, + "loss": 0.5509, + "step": 10587 + }, + { + "epoch": 1.65, + "grad_norm": 12.86933225291331, + "learning_rate": 8.828122054532669e-06, + "loss": 0.5437, + "step": 10588 + }, + { + "epoch": 1.65, + "grad_norm": 20.259582024807646, + "learning_rate": 8.826447291899842e-06, + "loss": 0.5232, + "step": 10589 + }, + { + "epoch": 1.65, + "grad_norm": 19.975049566519548, + "learning_rate": 8.824772562642166e-06, + "loss": 0.5528, + "step": 10590 + }, + { + "epoch": 1.65, + "grad_norm": 19.093213555733982, + "learning_rate": 8.823097866807272e-06, + "loss": 0.6207, + "step": 10591 + }, + { + "epoch": 1.65, + "grad_norm": 14.031383719263756, + "learning_rate": 8.82142320444279e-06, + "loss": 0.5709, + "step": 10592 + }, + { + "epoch": 1.65, + "grad_norm": 11.665876311398485, + "learning_rate": 8.81974857559634e-06, + "loss": 0.5485, + "step": 10593 + }, + { + "epoch": 1.65, + "grad_norm": 19.212568141739474, + "learning_rate": 8.818073980315547e-06, + "loss": 0.5291, + "step": 10594 + }, + { + "epoch": 1.65, + "grad_norm": 20.588865287298297, + "learning_rate": 8.816399418648041e-06, + "loss": 0.5733, + "step": 10595 + }, + { + "epoch": 1.66, + "grad_norm": 22.849516488449012, + "learning_rate": 8.814724890641444e-06, + "loss": 0.5425, + "step": 10596 + }, + { + "epoch": 1.66, + "grad_norm": 21.87554829097664, + "learning_rate": 8.81305039634338e-06, + "loss": 0.5708, + "step": 10597 + }, + { + "epoch": 1.66, + "grad_norm": 15.465789852701272, + "learning_rate": 8.811375935801463e-06, + "loss": 0.5993, + "step": 10598 + }, + { + "epoch": 1.66, + "grad_norm": 28.559402022352625, + "learning_rate": 8.809701509063322e-06, + "loss": 0.5763, + "step": 10599 + }, + { + "epoch": 1.66, + "grad_norm": 22.03008716436355, + "learning_rate": 8.808027116176572e-06, + "loss": 0.606, + "step": 10600 + }, + { + "epoch": 1.66, + "grad_norm": 14.245845244938598, + "learning_rate": 8.806352757188835e-06, + "loss": 0.5874, + "step": 10601 + }, + { + "epoch": 1.66, + "grad_norm": 19.14003062356893, + "learning_rate": 8.80467843214773e-06, + "loss": 0.6026, + "step": 10602 + }, + { + "epoch": 1.66, + "grad_norm": 14.204199934927258, + "learning_rate": 8.803004141100869e-06, + "loss": 0.5457, + "step": 10603 + }, + { + "epoch": 1.66, + "grad_norm": 20.529404622585364, + "learning_rate": 8.801329884095866e-06, + "loss": 0.5848, + "step": 10604 + }, + { + "epoch": 1.66, + "grad_norm": 11.298156957054113, + "learning_rate": 8.799655661180344e-06, + "loss": 0.5413, + "step": 10605 + }, + { + "epoch": 1.66, + "grad_norm": 13.737168289497772, + "learning_rate": 8.79798147240191e-06, + "loss": 0.5144, + "step": 10606 + }, + { + "epoch": 1.66, + "grad_norm": 23.805284703584743, + "learning_rate": 8.796307317808183e-06, + "loss": 0.58, + "step": 10607 + }, + { + "epoch": 1.66, + "grad_norm": 25.893084368389925, + "learning_rate": 8.79463319744677e-06, + "loss": 0.6639, + "step": 10608 + }, + { + "epoch": 1.66, + "grad_norm": 12.783285408542032, + "learning_rate": 8.792959111365285e-06, + "loss": 0.5912, + "step": 10609 + }, + { + "epoch": 1.66, + "grad_norm": 17.15976555473086, + "learning_rate": 8.791285059611335e-06, + "loss": 0.5486, + "step": 10610 + }, + { + "epoch": 1.66, + "grad_norm": 58.08449727410499, + "learning_rate": 8.789611042232534e-06, + "loss": 0.5791, + "step": 10611 + }, + { + "epoch": 1.66, + "grad_norm": 14.858577084603429, + "learning_rate": 8.787937059276486e-06, + "loss": 0.6202, + "step": 10612 + }, + { + "epoch": 1.66, + "grad_norm": 11.93035156485214, + "learning_rate": 8.786263110790796e-06, + "loss": 0.5729, + "step": 10613 + }, + { + "epoch": 1.66, + "grad_norm": 15.78064060734215, + "learning_rate": 8.784589196823075e-06, + "loss": 0.5756, + "step": 10614 + }, + { + "epoch": 1.66, + "grad_norm": 24.37020895162406, + "learning_rate": 8.782915317420924e-06, + "loss": 0.6149, + "step": 10615 + }, + { + "epoch": 1.66, + "grad_norm": 12.33864730379393, + "learning_rate": 8.781241472631952e-06, + "loss": 0.5862, + "step": 10616 + }, + { + "epoch": 1.66, + "grad_norm": 13.152805862941696, + "learning_rate": 8.779567662503761e-06, + "loss": 0.5163, + "step": 10617 + }, + { + "epoch": 1.66, + "grad_norm": 24.74101156120906, + "learning_rate": 8.77789388708395e-06, + "loss": 0.6642, + "step": 10618 + }, + { + "epoch": 1.66, + "grad_norm": 19.764350783510245, + "learning_rate": 8.77622014642012e-06, + "loss": 0.5889, + "step": 10619 + }, + { + "epoch": 1.66, + "grad_norm": 18.104367664027645, + "learning_rate": 8.774546440559874e-06, + "loss": 0.6471, + "step": 10620 + }, + { + "epoch": 1.66, + "grad_norm": 15.159442081747295, + "learning_rate": 8.77287276955081e-06, + "loss": 0.5624, + "step": 10621 + }, + { + "epoch": 1.66, + "grad_norm": 17.761670993650963, + "learning_rate": 8.771199133440533e-06, + "loss": 0.6047, + "step": 10622 + }, + { + "epoch": 1.66, + "grad_norm": 12.538438136756746, + "learning_rate": 8.769525532276627e-06, + "loss": 0.5557, + "step": 10623 + }, + { + "epoch": 1.66, + "grad_norm": 22.501896265708194, + "learning_rate": 8.767851966106696e-06, + "loss": 0.5563, + "step": 10624 + }, + { + "epoch": 1.66, + "grad_norm": 13.367783037672163, + "learning_rate": 8.766178434978334e-06, + "loss": 0.5335, + "step": 10625 + }, + { + "epoch": 1.66, + "grad_norm": 15.82841159677827, + "learning_rate": 8.764504938939137e-06, + "loss": 0.5462, + "step": 10626 + }, + { + "epoch": 1.66, + "grad_norm": 20.8786251580896, + "learning_rate": 8.762831478036698e-06, + "loss": 0.5568, + "step": 10627 + }, + { + "epoch": 1.66, + "grad_norm": 16.848564690399293, + "learning_rate": 8.761158052318605e-06, + "loss": 0.5251, + "step": 10628 + }, + { + "epoch": 1.66, + "grad_norm": 19.7890619201901, + "learning_rate": 8.759484661832454e-06, + "loss": 0.6059, + "step": 10629 + }, + { + "epoch": 1.66, + "grad_norm": 16.41973266728501, + "learning_rate": 8.757811306625833e-06, + "loss": 0.5684, + "step": 10630 + }, + { + "epoch": 1.66, + "grad_norm": 26.163483730326483, + "learning_rate": 8.75613798674633e-06, + "loss": 0.6227, + "step": 10631 + }, + { + "epoch": 1.66, + "grad_norm": 16.754502767668455, + "learning_rate": 8.754464702241539e-06, + "loss": 0.5203, + "step": 10632 + }, + { + "epoch": 1.66, + "grad_norm": 33.083697424926214, + "learning_rate": 8.752791453159041e-06, + "loss": 0.6494, + "step": 10633 + }, + { + "epoch": 1.66, + "grad_norm": 18.333628676123528, + "learning_rate": 8.751118239546424e-06, + "loss": 0.5658, + "step": 10634 + }, + { + "epoch": 1.66, + "grad_norm": 18.01461747942771, + "learning_rate": 8.749445061451274e-06, + "loss": 0.5694, + "step": 10635 + }, + { + "epoch": 1.66, + "grad_norm": 20.762830054612646, + "learning_rate": 8.747771918921176e-06, + "loss": 0.5754, + "step": 10636 + }, + { + "epoch": 1.66, + "grad_norm": 18.04331935765732, + "learning_rate": 8.746098812003714e-06, + "loss": 0.5217, + "step": 10637 + }, + { + "epoch": 1.66, + "grad_norm": 21.803258697348074, + "learning_rate": 8.744425740746464e-06, + "loss": 0.6335, + "step": 10638 + }, + { + "epoch": 1.66, + "grad_norm": 35.64794688542407, + "learning_rate": 8.742752705197013e-06, + "loss": 0.6455, + "step": 10639 + }, + { + "epoch": 1.66, + "grad_norm": 16.46138732668498, + "learning_rate": 8.741079705402939e-06, + "loss": 0.5211, + "step": 10640 + }, + { + "epoch": 1.66, + "grad_norm": 18.39230899749596, + "learning_rate": 8.739406741411826e-06, + "loss": 0.6125, + "step": 10641 + }, + { + "epoch": 1.66, + "grad_norm": 24.056763465866894, + "learning_rate": 8.737733813271244e-06, + "loss": 0.6236, + "step": 10642 + }, + { + "epoch": 1.66, + "grad_norm": 23.11978770809794, + "learning_rate": 8.736060921028773e-06, + "loss": 0.6278, + "step": 10643 + }, + { + "epoch": 1.66, + "grad_norm": 18.55293153906435, + "learning_rate": 8.73438806473199e-06, + "loss": 0.6099, + "step": 10644 + }, + { + "epoch": 1.66, + "grad_norm": 31.703294073925896, + "learning_rate": 8.732715244428472e-06, + "loss": 0.5905, + "step": 10645 + }, + { + "epoch": 1.66, + "grad_norm": 20.044599065678185, + "learning_rate": 8.731042460165793e-06, + "loss": 0.6493, + "step": 10646 + }, + { + "epoch": 1.66, + "grad_norm": 17.53069151279472, + "learning_rate": 8.72936971199152e-06, + "loss": 0.5679, + "step": 10647 + }, + { + "epoch": 1.66, + "grad_norm": 19.404807442570014, + "learning_rate": 8.72769699995323e-06, + "loss": 0.5657, + "step": 10648 + }, + { + "epoch": 1.66, + "grad_norm": 20.64926481592163, + "learning_rate": 8.726024324098494e-06, + "loss": 0.5606, + "step": 10649 + }, + { + "epoch": 1.66, + "grad_norm": 19.096280654487593, + "learning_rate": 8.724351684474878e-06, + "loss": 0.6145, + "step": 10650 + }, + { + "epoch": 1.66, + "grad_norm": 13.983145720513793, + "learning_rate": 8.72267908112996e-06, + "loss": 0.5798, + "step": 10651 + }, + { + "epoch": 1.66, + "grad_norm": 14.717085467814213, + "learning_rate": 8.721006514111299e-06, + "loss": 0.6065, + "step": 10652 + }, + { + "epoch": 1.66, + "grad_norm": 24.342680007155987, + "learning_rate": 8.719333983466462e-06, + "loss": 0.573, + "step": 10653 + }, + { + "epoch": 1.66, + "grad_norm": 17.836395298734068, + "learning_rate": 8.71766148924302e-06, + "loss": 0.5968, + "step": 10654 + }, + { + "epoch": 1.66, + "grad_norm": 19.802264576610416, + "learning_rate": 8.715989031488534e-06, + "loss": 0.5596, + "step": 10655 + }, + { + "epoch": 1.66, + "grad_norm": 31.443259978035083, + "learning_rate": 8.71431661025057e-06, + "loss": 0.6468, + "step": 10656 + }, + { + "epoch": 1.66, + "grad_norm": 23.129382336842877, + "learning_rate": 8.712644225576687e-06, + "loss": 0.5922, + "step": 10657 + }, + { + "epoch": 1.66, + "grad_norm": 28.295943588734268, + "learning_rate": 8.710971877514452e-06, + "loss": 0.6159, + "step": 10658 + }, + { + "epoch": 1.66, + "grad_norm": 15.424142085832925, + "learning_rate": 8.709299566111418e-06, + "loss": 0.5413, + "step": 10659 + }, + { + "epoch": 1.67, + "grad_norm": 22.136337155818854, + "learning_rate": 8.707627291415153e-06, + "loss": 0.5498, + "step": 10660 + }, + { + "epoch": 1.67, + "grad_norm": 17.46602635314065, + "learning_rate": 8.705955053473215e-06, + "loss": 0.5709, + "step": 10661 + }, + { + "epoch": 1.67, + "grad_norm": 13.894539876768423, + "learning_rate": 8.704282852333154e-06, + "loss": 0.5348, + "step": 10662 + }, + { + "epoch": 1.67, + "grad_norm": 20.28782670389082, + "learning_rate": 8.702610688042531e-06, + "loss": 0.5645, + "step": 10663 + }, + { + "epoch": 1.67, + "grad_norm": 24.659024680775442, + "learning_rate": 8.700938560648901e-06, + "loss": 0.5586, + "step": 10664 + }, + { + "epoch": 1.67, + "grad_norm": 20.07548852227552, + "learning_rate": 8.699266470199817e-06, + "loss": 0.5867, + "step": 10665 + }, + { + "epoch": 1.67, + "grad_norm": 24.687843035475538, + "learning_rate": 8.697594416742842e-06, + "loss": 0.567, + "step": 10666 + }, + { + "epoch": 1.67, + "grad_norm": 31.690082866130616, + "learning_rate": 8.695922400325512e-06, + "loss": 0.6163, + "step": 10667 + }, + { + "epoch": 1.67, + "grad_norm": 19.467159651740882, + "learning_rate": 8.694250420995387e-06, + "loss": 0.5532, + "step": 10668 + }, + { + "epoch": 1.67, + "grad_norm": 22.305395753775834, + "learning_rate": 8.692578478800015e-06, + "loss": 0.545, + "step": 10669 + }, + { + "epoch": 1.67, + "grad_norm": 12.87434396299192, + "learning_rate": 8.690906573786949e-06, + "loss": 0.5222, + "step": 10670 + }, + { + "epoch": 1.67, + "grad_norm": 18.899565599573474, + "learning_rate": 8.689234706003735e-06, + "loss": 0.5471, + "step": 10671 + }, + { + "epoch": 1.67, + "grad_norm": 19.70416979057833, + "learning_rate": 8.687562875497915e-06, + "loss": 0.5633, + "step": 10672 + }, + { + "epoch": 1.67, + "grad_norm": 15.019830241904353, + "learning_rate": 8.685891082317043e-06, + "loss": 0.5505, + "step": 10673 + }, + { + "epoch": 1.67, + "grad_norm": 22.802964553488113, + "learning_rate": 8.684219326508657e-06, + "loss": 0.5658, + "step": 10674 + }, + { + "epoch": 1.67, + "grad_norm": 21.074838988021675, + "learning_rate": 8.682547608120305e-06, + "loss": 0.6734, + "step": 10675 + }, + { + "epoch": 1.67, + "grad_norm": 14.346922444668317, + "learning_rate": 8.68087592719953e-06, + "loss": 0.5758, + "step": 10676 + }, + { + "epoch": 1.67, + "grad_norm": 17.69825046983035, + "learning_rate": 8.679204283793872e-06, + "loss": 0.5865, + "step": 10677 + }, + { + "epoch": 1.67, + "grad_norm": 21.119922815723925, + "learning_rate": 8.677532677950868e-06, + "loss": 0.5694, + "step": 10678 + }, + { + "epoch": 1.67, + "grad_norm": 12.771917218837862, + "learning_rate": 8.675861109718064e-06, + "loss": 0.5888, + "step": 10679 + }, + { + "epoch": 1.67, + "grad_norm": 14.020549771981512, + "learning_rate": 8.674189579142993e-06, + "loss": 0.5295, + "step": 10680 + }, + { + "epoch": 1.67, + "grad_norm": 15.487481324367025, + "learning_rate": 8.672518086273199e-06, + "loss": 0.5509, + "step": 10681 + }, + { + "epoch": 1.67, + "grad_norm": 20.103027997523373, + "learning_rate": 8.67084663115621e-06, + "loss": 0.6162, + "step": 10682 + }, + { + "epoch": 1.67, + "grad_norm": 22.89042324849556, + "learning_rate": 8.669175213839566e-06, + "loss": 0.6217, + "step": 10683 + }, + { + "epoch": 1.67, + "grad_norm": 19.26186337375132, + "learning_rate": 8.6675038343708e-06, + "loss": 0.5555, + "step": 10684 + }, + { + "epoch": 1.67, + "grad_norm": 16.550121457856772, + "learning_rate": 8.665832492797447e-06, + "loss": 0.624, + "step": 10685 + }, + { + "epoch": 1.67, + "grad_norm": 39.97594128846003, + "learning_rate": 8.664161189167039e-06, + "loss": 0.5122, + "step": 10686 + }, + { + "epoch": 1.67, + "grad_norm": 19.74867915085563, + "learning_rate": 8.662489923527104e-06, + "loss": 0.6164, + "step": 10687 + }, + { + "epoch": 1.67, + "grad_norm": 30.633618695158294, + "learning_rate": 8.66081869592517e-06, + "loss": 0.5545, + "step": 10688 + }, + { + "epoch": 1.67, + "grad_norm": 19.27831230397225, + "learning_rate": 8.659147506408771e-06, + "loss": 0.5698, + "step": 10689 + }, + { + "epoch": 1.67, + "grad_norm": 18.733921291307386, + "learning_rate": 8.657476355025436e-06, + "loss": 0.5785, + "step": 10690 + }, + { + "epoch": 1.67, + "grad_norm": 15.975913549921286, + "learning_rate": 8.655805241822683e-06, + "loss": 0.5286, + "step": 10691 + }, + { + "epoch": 1.67, + "grad_norm": 26.078959152841342, + "learning_rate": 8.654134166848045e-06, + "loss": 0.6512, + "step": 10692 + }, + { + "epoch": 1.67, + "grad_norm": 22.678011360871974, + "learning_rate": 8.652463130149044e-06, + "loss": 0.5497, + "step": 10693 + }, + { + "epoch": 1.67, + "grad_norm": 18.936375516138952, + "learning_rate": 8.6507921317732e-06, + "loss": 0.5309, + "step": 10694 + }, + { + "epoch": 1.67, + "grad_norm": 18.004562876209715, + "learning_rate": 8.649121171768045e-06, + "loss": 0.5622, + "step": 10695 + }, + { + "epoch": 1.67, + "grad_norm": 14.525471573860942, + "learning_rate": 8.64745025018109e-06, + "loss": 0.5093, + "step": 10696 + }, + { + "epoch": 1.67, + "grad_norm": 19.033996785871246, + "learning_rate": 8.645779367059856e-06, + "loss": 0.5196, + "step": 10697 + }, + { + "epoch": 1.67, + "grad_norm": 19.416615364736174, + "learning_rate": 8.644108522451868e-06, + "loss": 0.5451, + "step": 10698 + }, + { + "epoch": 1.67, + "grad_norm": 24.653989905853603, + "learning_rate": 8.642437716404639e-06, + "loss": 0.6033, + "step": 10699 + }, + { + "epoch": 1.67, + "grad_norm": 29.13510300191042, + "learning_rate": 8.64076694896569e-06, + "loss": 0.6148, + "step": 10700 + }, + { + "epoch": 1.67, + "grad_norm": 14.439080940319844, + "learning_rate": 8.63909622018253e-06, + "loss": 0.5496, + "step": 10701 + }, + { + "epoch": 1.67, + "grad_norm": 15.81245478191997, + "learning_rate": 8.63742553010268e-06, + "loss": 0.4813, + "step": 10702 + }, + { + "epoch": 1.67, + "grad_norm": 18.147938339259284, + "learning_rate": 8.635754878773647e-06, + "loss": 0.5894, + "step": 10703 + }, + { + "epoch": 1.67, + "grad_norm": 13.729983585734047, + "learning_rate": 8.63408426624295e-06, + "loss": 0.5983, + "step": 10704 + }, + { + "epoch": 1.67, + "grad_norm": 18.491812111122975, + "learning_rate": 8.632413692558101e-06, + "loss": 0.6297, + "step": 10705 + }, + { + "epoch": 1.67, + "grad_norm": 10.5818421440928, + "learning_rate": 8.630743157766603e-06, + "loss": 0.5013, + "step": 10706 + }, + { + "epoch": 1.67, + "grad_norm": 12.654129999293646, + "learning_rate": 8.629072661915969e-06, + "loss": 0.5424, + "step": 10707 + }, + { + "epoch": 1.67, + "grad_norm": 32.61416526818084, + "learning_rate": 8.627402205053705e-06, + "loss": 0.6774, + "step": 10708 + }, + { + "epoch": 1.67, + "grad_norm": 23.55944077973013, + "learning_rate": 8.62573178722732e-06, + "loss": 0.6439, + "step": 10709 + }, + { + "epoch": 1.67, + "grad_norm": 37.969366467341764, + "learning_rate": 8.624061408484323e-06, + "loss": 0.6523, + "step": 10710 + }, + { + "epoch": 1.67, + "grad_norm": 23.16996973632973, + "learning_rate": 8.62239106887221e-06, + "loss": 0.5698, + "step": 10711 + }, + { + "epoch": 1.67, + "grad_norm": 14.657162908334145, + "learning_rate": 8.620720768438491e-06, + "loss": 0.5486, + "step": 10712 + }, + { + "epoch": 1.67, + "grad_norm": 18.22964584719875, + "learning_rate": 8.619050507230664e-06, + "loss": 0.512, + "step": 10713 + }, + { + "epoch": 1.67, + "grad_norm": 20.45094193864924, + "learning_rate": 8.617380285296235e-06, + "loss": 0.5656, + "step": 10714 + }, + { + "epoch": 1.67, + "grad_norm": 12.745405615218226, + "learning_rate": 8.615710102682704e-06, + "loss": 0.541, + "step": 10715 + }, + { + "epoch": 1.67, + "grad_norm": 16.072208825359976, + "learning_rate": 8.614039959437563e-06, + "loss": 0.4817, + "step": 10716 + }, + { + "epoch": 1.67, + "grad_norm": 20.896459487998627, + "learning_rate": 8.612369855608318e-06, + "loss": 0.5105, + "step": 10717 + }, + { + "epoch": 1.67, + "grad_norm": 15.380521483569984, + "learning_rate": 8.610699791242463e-06, + "loss": 0.5645, + "step": 10718 + }, + { + "epoch": 1.67, + "grad_norm": 23.418514226902982, + "learning_rate": 8.609029766387489e-06, + "loss": 0.5872, + "step": 10719 + }, + { + "epoch": 1.67, + "grad_norm": 20.166731689153888, + "learning_rate": 8.6073597810909e-06, + "loss": 0.5525, + "step": 10720 + }, + { + "epoch": 1.67, + "grad_norm": 16.744072309163542, + "learning_rate": 8.60568983540018e-06, + "loss": 0.5425, + "step": 10721 + }, + { + "epoch": 1.67, + "grad_norm": 16.926109861894297, + "learning_rate": 8.604019929362826e-06, + "loss": 0.5313, + "step": 10722 + }, + { + "epoch": 1.67, + "grad_norm": 18.165968500856906, + "learning_rate": 8.602350063026332e-06, + "loss": 0.6692, + "step": 10723 + }, + { + "epoch": 1.68, + "grad_norm": 23.68867824160515, + "learning_rate": 8.600680236438182e-06, + "loss": 0.6164, + "step": 10724 + }, + { + "epoch": 1.68, + "grad_norm": 14.540329024768237, + "learning_rate": 8.59901044964587e-06, + "loss": 0.5649, + "step": 10725 + }, + { + "epoch": 1.68, + "grad_norm": 18.795344318937275, + "learning_rate": 8.597340702696879e-06, + "loss": 0.5707, + "step": 10726 + }, + { + "epoch": 1.68, + "grad_norm": 17.778287348288444, + "learning_rate": 8.595670995638697e-06, + "loss": 0.6534, + "step": 10727 + }, + { + "epoch": 1.68, + "grad_norm": 16.846555598264512, + "learning_rate": 8.59400132851881e-06, + "loss": 0.6313, + "step": 10728 + }, + { + "epoch": 1.68, + "grad_norm": 60.657481253762995, + "learning_rate": 8.592331701384704e-06, + "loss": 0.5221, + "step": 10729 + }, + { + "epoch": 1.68, + "grad_norm": 14.445630685882481, + "learning_rate": 8.590662114283864e-06, + "loss": 0.5594, + "step": 10730 + }, + { + "epoch": 1.68, + "grad_norm": 19.772839558640303, + "learning_rate": 8.588992567263767e-06, + "loss": 0.5873, + "step": 10731 + }, + { + "epoch": 1.68, + "grad_norm": 18.228508343026352, + "learning_rate": 8.587323060371893e-06, + "loss": 0.5972, + "step": 10732 + }, + { + "epoch": 1.68, + "grad_norm": 16.554875978096312, + "learning_rate": 8.585653593655728e-06, + "loss": 0.4995, + "step": 10733 + }, + { + "epoch": 1.68, + "grad_norm": 32.764185484088166, + "learning_rate": 8.583984167162744e-06, + "loss": 0.5808, + "step": 10734 + }, + { + "epoch": 1.68, + "grad_norm": 18.758167474796675, + "learning_rate": 8.582314780940426e-06, + "loss": 0.6025, + "step": 10735 + }, + { + "epoch": 1.68, + "grad_norm": 24.464260946486014, + "learning_rate": 8.580645435036246e-06, + "loss": 0.6673, + "step": 10736 + }, + { + "epoch": 1.68, + "grad_norm": 19.463984912020386, + "learning_rate": 8.578976129497678e-06, + "loss": 0.6484, + "step": 10737 + }, + { + "epoch": 1.68, + "grad_norm": 15.224395771497912, + "learning_rate": 8.577306864372196e-06, + "loss": 0.5168, + "step": 10738 + }, + { + "epoch": 1.68, + "grad_norm": 22.178047040568764, + "learning_rate": 8.575637639707279e-06, + "loss": 0.5852, + "step": 10739 + }, + { + "epoch": 1.68, + "grad_norm": 14.675893029240875, + "learning_rate": 8.573968455550393e-06, + "loss": 0.5868, + "step": 10740 + }, + { + "epoch": 1.68, + "grad_norm": 17.05917112309347, + "learning_rate": 8.572299311949005e-06, + "loss": 0.5661, + "step": 10741 + }, + { + "epoch": 1.68, + "grad_norm": 15.85306827364698, + "learning_rate": 8.570630208950593e-06, + "loss": 0.5354, + "step": 10742 + }, + { + "epoch": 1.68, + "grad_norm": 24.342044462683226, + "learning_rate": 8.568961146602623e-06, + "loss": 0.7152, + "step": 10743 + }, + { + "epoch": 1.68, + "grad_norm": 17.53403104858795, + "learning_rate": 8.567292124952562e-06, + "loss": 0.5077, + "step": 10744 + }, + { + "epoch": 1.68, + "grad_norm": 16.330861984728294, + "learning_rate": 8.565623144047869e-06, + "loss": 0.6172, + "step": 10745 + }, + { + "epoch": 1.68, + "grad_norm": 11.933010554071792, + "learning_rate": 8.563954203936018e-06, + "loss": 0.5138, + "step": 10746 + }, + { + "epoch": 1.68, + "grad_norm": 14.617867703211141, + "learning_rate": 8.562285304664468e-06, + "loss": 0.5158, + "step": 10747 + }, + { + "epoch": 1.68, + "grad_norm": 19.220326867581598, + "learning_rate": 8.560616446280685e-06, + "loss": 0.5887, + "step": 10748 + }, + { + "epoch": 1.68, + "grad_norm": 21.163282920783544, + "learning_rate": 8.558947628832131e-06, + "loss": 0.547, + "step": 10749 + }, + { + "epoch": 1.68, + "grad_norm": 18.178377573961438, + "learning_rate": 8.55727885236626e-06, + "loss": 0.501, + "step": 10750 + }, + { + "epoch": 1.68, + "grad_norm": 17.539447230513943, + "learning_rate": 8.555610116930532e-06, + "loss": 0.5629, + "step": 10751 + }, + { + "epoch": 1.68, + "grad_norm": 17.153772731149665, + "learning_rate": 8.55394142257241e-06, + "loss": 0.6401, + "step": 10752 + }, + { + "epoch": 1.68, + "grad_norm": 22.523729855680674, + "learning_rate": 8.552272769339346e-06, + "loss": 0.5685, + "step": 10753 + }, + { + "epoch": 1.68, + "grad_norm": 15.144733790670937, + "learning_rate": 8.550604157278804e-06, + "loss": 0.5591, + "step": 10754 + }, + { + "epoch": 1.68, + "grad_norm": 18.987875599972817, + "learning_rate": 8.548935586438226e-06, + "loss": 0.5283, + "step": 10755 + }, + { + "epoch": 1.68, + "grad_norm": 30.957807026477884, + "learning_rate": 8.547267056865072e-06, + "loss": 0.6091, + "step": 10756 + }, + { + "epoch": 1.68, + "grad_norm": 16.298346499211956, + "learning_rate": 8.545598568606791e-06, + "loss": 0.5476, + "step": 10757 + }, + { + "epoch": 1.68, + "grad_norm": 20.67125036199832, + "learning_rate": 8.54393012171084e-06, + "loss": 0.6027, + "step": 10758 + }, + { + "epoch": 1.68, + "grad_norm": 18.273155631003064, + "learning_rate": 8.542261716224664e-06, + "loss": 0.602, + "step": 10759 + }, + { + "epoch": 1.68, + "grad_norm": 29.03129477851625, + "learning_rate": 8.54059335219571e-06, + "loss": 0.7013, + "step": 10760 + }, + { + "epoch": 1.68, + "grad_norm": 19.30868392574454, + "learning_rate": 8.538925029671428e-06, + "loss": 0.5596, + "step": 10761 + }, + { + "epoch": 1.68, + "grad_norm": 12.693635683094321, + "learning_rate": 8.537256748699264e-06, + "loss": 0.5799, + "step": 10762 + }, + { + "epoch": 1.68, + "grad_norm": 27.46103990950827, + "learning_rate": 8.53558850932666e-06, + "loss": 0.5265, + "step": 10763 + }, + { + "epoch": 1.68, + "grad_norm": 19.19505132964049, + "learning_rate": 8.533920311601068e-06, + "loss": 0.592, + "step": 10764 + }, + { + "epoch": 1.68, + "grad_norm": 16.687741969440236, + "learning_rate": 8.53225215556992e-06, + "loss": 0.5944, + "step": 10765 + }, + { + "epoch": 1.68, + "grad_norm": 12.572429562279567, + "learning_rate": 8.530584041280661e-06, + "loss": 0.4828, + "step": 10766 + }, + { + "epoch": 1.68, + "grad_norm": 24.930239366180977, + "learning_rate": 8.528915968780735e-06, + "loss": 0.5969, + "step": 10767 + }, + { + "epoch": 1.68, + "grad_norm": 21.761951885231092, + "learning_rate": 8.527247938117577e-06, + "loss": 0.5746, + "step": 10768 + }, + { + "epoch": 1.68, + "grad_norm": 19.295019880691335, + "learning_rate": 8.525579949338631e-06, + "loss": 0.557, + "step": 10769 + }, + { + "epoch": 1.68, + "grad_norm": 22.540317906369257, + "learning_rate": 8.523912002491321e-06, + "loss": 0.5871, + "step": 10770 + }, + { + "epoch": 1.68, + "grad_norm": 19.08280525004356, + "learning_rate": 8.522244097623095e-06, + "loss": 0.5634, + "step": 10771 + }, + { + "epoch": 1.68, + "grad_norm": 15.854267211107338, + "learning_rate": 8.520576234781379e-06, + "loss": 0.6086, + "step": 10772 + }, + { + "epoch": 1.68, + "grad_norm": 26.61927199983796, + "learning_rate": 8.518908414013612e-06, + "loss": 0.5546, + "step": 10773 + }, + { + "epoch": 1.68, + "grad_norm": 20.498914969037678, + "learning_rate": 8.517240635367225e-06, + "loss": 0.5376, + "step": 10774 + }, + { + "epoch": 1.68, + "grad_norm": 16.791312800465825, + "learning_rate": 8.515572898889645e-06, + "loss": 0.6344, + "step": 10775 + }, + { + "epoch": 1.68, + "grad_norm": 18.368579324180516, + "learning_rate": 8.513905204628302e-06, + "loss": 0.596, + "step": 10776 + }, + { + "epoch": 1.68, + "grad_norm": 16.730523377658212, + "learning_rate": 8.512237552630625e-06, + "loss": 0.5299, + "step": 10777 + }, + { + "epoch": 1.68, + "grad_norm": 19.58309404923637, + "learning_rate": 8.510569942944043e-06, + "loss": 0.5225, + "step": 10778 + }, + { + "epoch": 1.68, + "grad_norm": 16.148507973932883, + "learning_rate": 8.508902375615982e-06, + "loss": 0.5601, + "step": 10779 + }, + { + "epoch": 1.68, + "grad_norm": 23.22874099415552, + "learning_rate": 8.507234850693864e-06, + "loss": 0.549, + "step": 10780 + }, + { + "epoch": 1.68, + "grad_norm": 15.78281593201867, + "learning_rate": 8.505567368225116e-06, + "loss": 0.5283, + "step": 10781 + }, + { + "epoch": 1.68, + "grad_norm": 17.257811337587967, + "learning_rate": 8.503899928257151e-06, + "loss": 0.5914, + "step": 10782 + }, + { + "epoch": 1.68, + "grad_norm": 17.45303843612514, + "learning_rate": 8.502232530837404e-06, + "loss": 0.6183, + "step": 10783 + }, + { + "epoch": 1.68, + "grad_norm": 13.4942155240457, + "learning_rate": 8.500565176013286e-06, + "loss": 0.5439, + "step": 10784 + }, + { + "epoch": 1.68, + "grad_norm": 26.19587388704196, + "learning_rate": 8.498897863832215e-06, + "loss": 0.6469, + "step": 10785 + }, + { + "epoch": 1.68, + "grad_norm": 28.86250391636306, + "learning_rate": 8.497230594341612e-06, + "loss": 0.55, + "step": 10786 + }, + { + "epoch": 1.68, + "grad_norm": 16.092384997136644, + "learning_rate": 8.495563367588893e-06, + "loss": 0.5076, + "step": 10787 + }, + { + "epoch": 1.69, + "grad_norm": 19.610718187934484, + "learning_rate": 8.493896183621474e-06, + "loss": 0.5576, + "step": 10788 + }, + { + "epoch": 1.69, + "grad_norm": 15.281071674538994, + "learning_rate": 8.49222904248676e-06, + "loss": 0.5307, + "step": 10789 + }, + { + "epoch": 1.69, + "grad_norm": 14.53231879887502, + "learning_rate": 8.490561944232173e-06, + "loss": 0.5903, + "step": 10790 + }, + { + "epoch": 1.69, + "grad_norm": 14.234581263813762, + "learning_rate": 8.48889488890512e-06, + "loss": 0.5798, + "step": 10791 + }, + { + "epoch": 1.69, + "grad_norm": 21.00969939803462, + "learning_rate": 8.487227876553012e-06, + "loss": 0.5739, + "step": 10792 + }, + { + "epoch": 1.69, + "grad_norm": 26.15902377305113, + "learning_rate": 8.485560907223264e-06, + "loss": 0.54, + "step": 10793 + }, + { + "epoch": 1.69, + "grad_norm": 16.751103074845837, + "learning_rate": 8.483893980963274e-06, + "loss": 0.5303, + "step": 10794 + }, + { + "epoch": 1.69, + "grad_norm": 15.350415813533033, + "learning_rate": 8.48222709782045e-06, + "loss": 0.5704, + "step": 10795 + }, + { + "epoch": 1.69, + "grad_norm": 19.66552754341187, + "learning_rate": 8.480560257842201e-06, + "loss": 0.5487, + "step": 10796 + }, + { + "epoch": 1.69, + "grad_norm": 13.400866645839347, + "learning_rate": 8.478893461075929e-06, + "loss": 0.5899, + "step": 10797 + }, + { + "epoch": 1.69, + "grad_norm": 14.682271762916503, + "learning_rate": 8.477226707569042e-06, + "loss": 0.4899, + "step": 10798 + }, + { + "epoch": 1.69, + "grad_norm": 24.790982739180222, + "learning_rate": 8.475559997368932e-06, + "loss": 0.5653, + "step": 10799 + }, + { + "epoch": 1.69, + "grad_norm": 22.081901957836713, + "learning_rate": 8.473893330523004e-06, + "loss": 0.5363, + "step": 10800 + }, + { + "epoch": 1.69, + "grad_norm": 35.674554635624865, + "learning_rate": 8.472226707078655e-06, + "loss": 0.5174, + "step": 10801 + }, + { + "epoch": 1.69, + "grad_norm": 23.486239177828867, + "learning_rate": 8.470560127083288e-06, + "loss": 0.7111, + "step": 10802 + }, + { + "epoch": 1.69, + "grad_norm": 19.824546100939184, + "learning_rate": 8.468893590584299e-06, + "loss": 0.5417, + "step": 10803 + }, + { + "epoch": 1.69, + "grad_norm": 18.76853911033336, + "learning_rate": 8.467227097629075e-06, + "loss": 0.5925, + "step": 10804 + }, + { + "epoch": 1.69, + "grad_norm": 18.867199731510848, + "learning_rate": 8.465560648265017e-06, + "loss": 0.5799, + "step": 10805 + }, + { + "epoch": 1.69, + "grad_norm": 21.116085691283033, + "learning_rate": 8.463894242539518e-06, + "loss": 0.5612, + "step": 10806 + }, + { + "epoch": 1.69, + "grad_norm": 18.025610195884738, + "learning_rate": 8.462227880499967e-06, + "loss": 0.5085, + "step": 10807 + }, + { + "epoch": 1.69, + "grad_norm": 31.240371653239404, + "learning_rate": 8.460561562193758e-06, + "loss": 0.6057, + "step": 10808 + }, + { + "epoch": 1.69, + "grad_norm": 14.539389937135326, + "learning_rate": 8.458895287668277e-06, + "loss": 0.5588, + "step": 10809 + }, + { + "epoch": 1.69, + "grad_norm": 20.450996276492145, + "learning_rate": 8.457229056970908e-06, + "loss": 0.4768, + "step": 10810 + }, + { + "epoch": 1.69, + "grad_norm": 19.282977499772514, + "learning_rate": 8.455562870149046e-06, + "loss": 0.5475, + "step": 10811 + }, + { + "epoch": 1.69, + "grad_norm": 25.531087087510674, + "learning_rate": 8.453896727250072e-06, + "loss": 0.5638, + "step": 10812 + }, + { + "epoch": 1.69, + "grad_norm": 13.864832133815286, + "learning_rate": 8.452230628321373e-06, + "loss": 0.5117, + "step": 10813 + }, + { + "epoch": 1.69, + "grad_norm": 12.856022948887485, + "learning_rate": 8.450564573410324e-06, + "loss": 0.5839, + "step": 10814 + }, + { + "epoch": 1.69, + "grad_norm": 13.973730064148045, + "learning_rate": 8.448898562564316e-06, + "loss": 0.5701, + "step": 10815 + }, + { + "epoch": 1.69, + "grad_norm": 18.4664471573499, + "learning_rate": 8.447232595830724e-06, + "loss": 0.5822, + "step": 10816 + }, + { + "epoch": 1.69, + "grad_norm": 23.41364848235764, + "learning_rate": 8.445566673256928e-06, + "loss": 0.4963, + "step": 10817 + }, + { + "epoch": 1.69, + "grad_norm": 18.801153053286477, + "learning_rate": 8.443900794890311e-06, + "loss": 0.6126, + "step": 10818 + }, + { + "epoch": 1.69, + "grad_norm": 18.906502736191467, + "learning_rate": 8.442234960778242e-06, + "loss": 0.5466, + "step": 10819 + }, + { + "epoch": 1.69, + "grad_norm": 24.460889732779616, + "learning_rate": 8.440569170968098e-06, + "loss": 0.5557, + "step": 10820 + }, + { + "epoch": 1.69, + "grad_norm": 17.172252852193743, + "learning_rate": 8.438903425507257e-06, + "loss": 0.5847, + "step": 10821 + }, + { + "epoch": 1.69, + "grad_norm": 26.27801680209744, + "learning_rate": 8.437237724443086e-06, + "loss": 0.5818, + "step": 10822 + }, + { + "epoch": 1.69, + "grad_norm": 14.469337238790432, + "learning_rate": 8.435572067822964e-06, + "loss": 0.5871, + "step": 10823 + }, + { + "epoch": 1.69, + "grad_norm": 14.37746908192004, + "learning_rate": 8.433906455694256e-06, + "loss": 0.5186, + "step": 10824 + }, + { + "epoch": 1.69, + "grad_norm": 17.13722574333145, + "learning_rate": 8.432240888104331e-06, + "loss": 0.5721, + "step": 10825 + }, + { + "epoch": 1.69, + "grad_norm": 12.924817153930416, + "learning_rate": 8.430575365100557e-06, + "loss": 0.526, + "step": 10826 + }, + { + "epoch": 1.69, + "grad_norm": 24.800927392310008, + "learning_rate": 8.428909886730303e-06, + "loss": 0.578, + "step": 10827 + }, + { + "epoch": 1.69, + "grad_norm": 22.606140092472344, + "learning_rate": 8.427244453040937e-06, + "loss": 0.5831, + "step": 10828 + }, + { + "epoch": 1.69, + "grad_norm": 22.35319666936993, + "learning_rate": 8.425579064079811e-06, + "loss": 0.5031, + "step": 10829 + }, + { + "epoch": 1.69, + "grad_norm": 25.111957761121204, + "learning_rate": 8.423913719894301e-06, + "loss": 0.6381, + "step": 10830 + }, + { + "epoch": 1.69, + "grad_norm": 20.767845607352733, + "learning_rate": 8.42224842053176e-06, + "loss": 0.6046, + "step": 10831 + }, + { + "epoch": 1.69, + "grad_norm": 17.303750973367805, + "learning_rate": 8.420583166039551e-06, + "loss": 0.552, + "step": 10832 + }, + { + "epoch": 1.69, + "grad_norm": 14.924034830635621, + "learning_rate": 8.418917956465037e-06, + "loss": 0.5613, + "step": 10833 + }, + { + "epoch": 1.69, + "grad_norm": 21.41116487785557, + "learning_rate": 8.417252791855566e-06, + "loss": 0.5754, + "step": 10834 + }, + { + "epoch": 1.69, + "grad_norm": 21.25801904501597, + "learning_rate": 8.4155876722585e-06, + "loss": 0.659, + "step": 10835 + }, + { + "epoch": 1.69, + "grad_norm": 19.872164505998658, + "learning_rate": 8.413922597721197e-06, + "loss": 0.5593, + "step": 10836 + }, + { + "epoch": 1.69, + "grad_norm": 14.992201474627034, + "learning_rate": 8.41225756829101e-06, + "loss": 0.5238, + "step": 10837 + }, + { + "epoch": 1.69, + "grad_norm": 28.645610031066756, + "learning_rate": 8.410592584015284e-06, + "loss": 0.6622, + "step": 10838 + }, + { + "epoch": 1.69, + "grad_norm": 21.259955414190802, + "learning_rate": 8.408927644941373e-06, + "loss": 0.553, + "step": 10839 + }, + { + "epoch": 1.69, + "grad_norm": 15.493482280724884, + "learning_rate": 8.407262751116633e-06, + "loss": 0.5705, + "step": 10840 + }, + { + "epoch": 1.69, + "grad_norm": 16.236174416706437, + "learning_rate": 8.405597902588407e-06, + "loss": 0.5587, + "step": 10841 + }, + { + "epoch": 1.69, + "grad_norm": 18.273024350079066, + "learning_rate": 8.403933099404047e-06, + "loss": 0.6023, + "step": 10842 + }, + { + "epoch": 1.69, + "grad_norm": 19.518787243893357, + "learning_rate": 8.402268341610895e-06, + "loss": 0.5304, + "step": 10843 + }, + { + "epoch": 1.69, + "grad_norm": 21.61697461072004, + "learning_rate": 8.400603629256294e-06, + "loss": 0.5343, + "step": 10844 + }, + { + "epoch": 1.69, + "grad_norm": 24.30347646101296, + "learning_rate": 8.39893896238759e-06, + "loss": 0.59, + "step": 10845 + }, + { + "epoch": 1.69, + "grad_norm": 31.802763261309277, + "learning_rate": 8.397274341052128e-06, + "loss": 0.6912, + "step": 10846 + }, + { + "epoch": 1.69, + "grad_norm": 28.9689021548912, + "learning_rate": 8.395609765297249e-06, + "loss": 0.6296, + "step": 10847 + }, + { + "epoch": 1.69, + "grad_norm": 13.51220117238763, + "learning_rate": 8.393945235170283e-06, + "loss": 0.5139, + "step": 10848 + }, + { + "epoch": 1.69, + "grad_norm": 23.253475208923728, + "learning_rate": 8.392280750718577e-06, + "loss": 0.6178, + "step": 10849 + }, + { + "epoch": 1.69, + "grad_norm": 40.39131619804619, + "learning_rate": 8.390616311989468e-06, + "loss": 0.5895, + "step": 10850 + }, + { + "epoch": 1.69, + "grad_norm": 17.26240991027494, + "learning_rate": 8.388951919030287e-06, + "loss": 0.5462, + "step": 10851 + }, + { + "epoch": 1.7, + "grad_norm": 18.505393947093093, + "learning_rate": 8.387287571888373e-06, + "loss": 0.5715, + "step": 10852 + }, + { + "epoch": 1.7, + "grad_norm": 29.347960694063094, + "learning_rate": 8.385623270611058e-06, + "loss": 0.621, + "step": 10853 + }, + { + "epoch": 1.7, + "grad_norm": 16.533378821857262, + "learning_rate": 8.383959015245669e-06, + "loss": 0.5487, + "step": 10854 + }, + { + "epoch": 1.7, + "grad_norm": 17.14716468032409, + "learning_rate": 8.382294805839543e-06, + "loss": 0.6003, + "step": 10855 + }, + { + "epoch": 1.7, + "grad_norm": 30.36464852636328, + "learning_rate": 8.380630642440006e-06, + "loss": 0.5592, + "step": 10856 + }, + { + "epoch": 1.7, + "grad_norm": 16.669617198803504, + "learning_rate": 8.378966525094388e-06, + "loss": 0.5366, + "step": 10857 + }, + { + "epoch": 1.7, + "grad_norm": 18.349752383971616, + "learning_rate": 8.377302453850012e-06, + "loss": 0.5522, + "step": 10858 + }, + { + "epoch": 1.7, + "grad_norm": 13.422825150170677, + "learning_rate": 8.375638428754204e-06, + "loss": 0.4631, + "step": 10859 + }, + { + "epoch": 1.7, + "grad_norm": 21.44501896224781, + "learning_rate": 8.373974449854288e-06, + "loss": 0.594, + "step": 10860 + }, + { + "epoch": 1.7, + "grad_norm": 14.357106091811323, + "learning_rate": 8.37231051719759e-06, + "loss": 0.541, + "step": 10861 + }, + { + "epoch": 1.7, + "grad_norm": 21.665164062469536, + "learning_rate": 8.37064663083143e-06, + "loss": 0.4646, + "step": 10862 + }, + { + "epoch": 1.7, + "grad_norm": 16.425120253536413, + "learning_rate": 8.368982790803124e-06, + "loss": 0.5297, + "step": 10863 + }, + { + "epoch": 1.7, + "grad_norm": 19.342211771554663, + "learning_rate": 8.367318997159992e-06, + "loss": 0.6073, + "step": 10864 + }, + { + "epoch": 1.7, + "grad_norm": 17.74727391746629, + "learning_rate": 8.365655249949355e-06, + "loss": 0.5718, + "step": 10865 + }, + { + "epoch": 1.7, + "grad_norm": 14.425060577013328, + "learning_rate": 8.363991549218525e-06, + "loss": 0.4823, + "step": 10866 + }, + { + "epoch": 1.7, + "grad_norm": 22.13968438827088, + "learning_rate": 8.362327895014821e-06, + "loss": 0.6008, + "step": 10867 + }, + { + "epoch": 1.7, + "grad_norm": 17.737013640701857, + "learning_rate": 8.360664287385552e-06, + "loss": 0.6384, + "step": 10868 + }, + { + "epoch": 1.7, + "grad_norm": 19.90273808471432, + "learning_rate": 8.359000726378033e-06, + "loss": 0.6097, + "step": 10869 + }, + { + "epoch": 1.7, + "grad_norm": 16.962587155541268, + "learning_rate": 8.35733721203957e-06, + "loss": 0.5433, + "step": 10870 + }, + { + "epoch": 1.7, + "grad_norm": 23.109271828844765, + "learning_rate": 8.355673744417479e-06, + "loss": 0.5872, + "step": 10871 + }, + { + "epoch": 1.7, + "grad_norm": 15.762518960996564, + "learning_rate": 8.354010323559065e-06, + "loss": 0.567, + "step": 10872 + }, + { + "epoch": 1.7, + "grad_norm": 19.263026035887965, + "learning_rate": 8.352346949511632e-06, + "loss": 0.5727, + "step": 10873 + }, + { + "epoch": 1.7, + "grad_norm": 20.912423635715026, + "learning_rate": 8.35068362232249e-06, + "loss": 0.5527, + "step": 10874 + }, + { + "epoch": 1.7, + "grad_norm": 11.301853715817181, + "learning_rate": 8.34902034203894e-06, + "loss": 0.5117, + "step": 10875 + }, + { + "epoch": 1.7, + "grad_norm": 17.68786742870043, + "learning_rate": 8.347357108708284e-06, + "loss": 0.6024, + "step": 10876 + }, + { + "epoch": 1.7, + "grad_norm": 21.077632427469204, + "learning_rate": 8.345693922377829e-06, + "loss": 0.5627, + "step": 10877 + }, + { + "epoch": 1.7, + "grad_norm": 26.141580930151132, + "learning_rate": 8.344030783094869e-06, + "loss": 0.5991, + "step": 10878 + }, + { + "epoch": 1.7, + "grad_norm": 15.240592496703146, + "learning_rate": 8.342367690906702e-06, + "loss": 0.521, + "step": 10879 + }, + { + "epoch": 1.7, + "grad_norm": 17.5548296749909, + "learning_rate": 8.340704645860632e-06, + "loss": 0.6031, + "step": 10880 + }, + { + "epoch": 1.7, + "grad_norm": 13.405925888531877, + "learning_rate": 8.339041648003952e-06, + "loss": 0.5575, + "step": 10881 + }, + { + "epoch": 1.7, + "grad_norm": 20.430878572772418, + "learning_rate": 8.337378697383956e-06, + "loss": 0.6364, + "step": 10882 + }, + { + "epoch": 1.7, + "grad_norm": 27.631832317885262, + "learning_rate": 8.335715794047933e-06, + "loss": 0.6396, + "step": 10883 + }, + { + "epoch": 1.7, + "grad_norm": 13.86446759722926, + "learning_rate": 8.33405293804318e-06, + "loss": 0.5707, + "step": 10884 + }, + { + "epoch": 1.7, + "grad_norm": 18.897243542465798, + "learning_rate": 8.332390129416988e-06, + "loss": 0.5846, + "step": 10885 + }, + { + "epoch": 1.7, + "grad_norm": 13.279694136198131, + "learning_rate": 8.330727368216647e-06, + "loss": 0.6303, + "step": 10886 + }, + { + "epoch": 1.7, + "grad_norm": 11.776156146395545, + "learning_rate": 8.329064654489442e-06, + "loss": 0.488, + "step": 10887 + }, + { + "epoch": 1.7, + "grad_norm": 13.848680261969525, + "learning_rate": 8.327401988282661e-06, + "loss": 0.5125, + "step": 10888 + }, + { + "epoch": 1.7, + "grad_norm": 15.010487715179941, + "learning_rate": 8.325739369643588e-06, + "loss": 0.5131, + "step": 10889 + }, + { + "epoch": 1.7, + "grad_norm": 14.783162116317529, + "learning_rate": 8.324076798619508e-06, + "loss": 0.5867, + "step": 10890 + }, + { + "epoch": 1.7, + "grad_norm": 20.27836650840371, + "learning_rate": 8.322414275257707e-06, + "loss": 0.6239, + "step": 10891 + }, + { + "epoch": 1.7, + "grad_norm": 15.562074142205898, + "learning_rate": 8.320751799605458e-06, + "loss": 0.5891, + "step": 10892 + }, + { + "epoch": 1.7, + "grad_norm": 19.156873202026848, + "learning_rate": 8.319089371710048e-06, + "loss": 0.5657, + "step": 10893 + }, + { + "epoch": 1.7, + "grad_norm": 14.307776239000717, + "learning_rate": 8.317426991618751e-06, + "loss": 0.5497, + "step": 10894 + }, + { + "epoch": 1.7, + "grad_norm": 21.456159758357202, + "learning_rate": 8.315764659378845e-06, + "loss": 0.6096, + "step": 10895 + }, + { + "epoch": 1.7, + "grad_norm": 20.34234110323499, + "learning_rate": 8.314102375037611e-06, + "loss": 0.5512, + "step": 10896 + }, + { + "epoch": 1.7, + "grad_norm": 20.335310418143504, + "learning_rate": 8.312440138642319e-06, + "loss": 0.5436, + "step": 10897 + }, + { + "epoch": 1.7, + "grad_norm": 14.981360918954534, + "learning_rate": 8.310777950240238e-06, + "loss": 0.5684, + "step": 10898 + }, + { + "epoch": 1.7, + "grad_norm": 18.586375878749468, + "learning_rate": 8.309115809878646e-06, + "loss": 0.5508, + "step": 10899 + }, + { + "epoch": 1.7, + "grad_norm": 18.032988858217905, + "learning_rate": 8.307453717604811e-06, + "loss": 0.5258, + "step": 10900 + }, + { + "epoch": 1.7, + "grad_norm": 15.502474815603955, + "learning_rate": 8.305791673466006e-06, + "loss": 0.5734, + "step": 10901 + }, + { + "epoch": 1.7, + "grad_norm": 19.328851826862802, + "learning_rate": 8.304129677509491e-06, + "loss": 0.5672, + "step": 10902 + }, + { + "epoch": 1.7, + "grad_norm": 21.237094704980088, + "learning_rate": 8.302467729782535e-06, + "loss": 0.5341, + "step": 10903 + }, + { + "epoch": 1.7, + "grad_norm": 23.74807560803915, + "learning_rate": 8.300805830332404e-06, + "loss": 0.6041, + "step": 10904 + }, + { + "epoch": 1.7, + "grad_norm": 21.99702414860258, + "learning_rate": 8.299143979206363e-06, + "loss": 0.5708, + "step": 10905 + }, + { + "epoch": 1.7, + "grad_norm": 27.44408096202091, + "learning_rate": 8.297482176451675e-06, + "loss": 0.5676, + "step": 10906 + }, + { + "epoch": 1.7, + "grad_norm": 23.123638509287442, + "learning_rate": 8.295820422115594e-06, + "loss": 0.5608, + "step": 10907 + }, + { + "epoch": 1.7, + "grad_norm": 18.28172198974164, + "learning_rate": 8.294158716245381e-06, + "loss": 0.5305, + "step": 10908 + }, + { + "epoch": 1.7, + "grad_norm": 21.354148299201572, + "learning_rate": 8.2924970588883e-06, + "loss": 0.5862, + "step": 10909 + }, + { + "epoch": 1.7, + "grad_norm": 22.55309224210255, + "learning_rate": 8.290835450091601e-06, + "loss": 0.5633, + "step": 10910 + }, + { + "epoch": 1.7, + "grad_norm": 21.20277382848681, + "learning_rate": 8.289173889902546e-06, + "loss": 0.5293, + "step": 10911 + }, + { + "epoch": 1.7, + "grad_norm": 17.133885749202733, + "learning_rate": 8.287512378368384e-06, + "loss": 0.5776, + "step": 10912 + }, + { + "epoch": 1.7, + "grad_norm": 16.523097200248724, + "learning_rate": 8.285850915536367e-06, + "loss": 0.5001, + "step": 10913 + }, + { + "epoch": 1.7, + "grad_norm": 16.10297550986689, + "learning_rate": 8.284189501453745e-06, + "loss": 0.5975, + "step": 10914 + }, + { + "epoch": 1.7, + "grad_norm": 15.999201031243887, + "learning_rate": 8.28252813616777e-06, + "loss": 0.5366, + "step": 10915 + }, + { + "epoch": 1.71, + "grad_norm": 21.51660198564347, + "learning_rate": 8.280866819725695e-06, + "loss": 0.5432, + "step": 10916 + }, + { + "epoch": 1.71, + "grad_norm": 19.794595704593096, + "learning_rate": 8.279205552174758e-06, + "loss": 0.6163, + "step": 10917 + }, + { + "epoch": 1.71, + "grad_norm": 19.992468481262, + "learning_rate": 8.27754433356221e-06, + "loss": 0.6185, + "step": 10918 + }, + { + "epoch": 1.71, + "grad_norm": 17.496271285399576, + "learning_rate": 8.275883163935292e-06, + "loss": 0.5806, + "step": 10919 + }, + { + "epoch": 1.71, + "grad_norm": 22.46996722452469, + "learning_rate": 8.274222043341247e-06, + "loss": 0.6158, + "step": 10920 + }, + { + "epoch": 1.71, + "grad_norm": 13.716957907184327, + "learning_rate": 8.272560971827323e-06, + "loss": 0.5498, + "step": 10921 + }, + { + "epoch": 1.71, + "grad_norm": 20.150285231556925, + "learning_rate": 8.270899949440751e-06, + "loss": 0.4829, + "step": 10922 + }, + { + "epoch": 1.71, + "grad_norm": 17.496475492785464, + "learning_rate": 8.269238976228771e-06, + "loss": 0.4964, + "step": 10923 + }, + { + "epoch": 1.71, + "grad_norm": 4.502838690621552, + "learning_rate": 8.267578052238624e-06, + "loss": 0.5263, + "step": 10924 + }, + { + "epoch": 1.71, + "grad_norm": 21.25713961797008, + "learning_rate": 8.265917177517545e-06, + "loss": 0.5561, + "step": 10925 + }, + { + "epoch": 1.71, + "grad_norm": 20.398053843916482, + "learning_rate": 8.264256352112768e-06, + "loss": 0.5813, + "step": 10926 + }, + { + "epoch": 1.71, + "grad_norm": 37.41464395443639, + "learning_rate": 8.262595576071521e-06, + "loss": 0.4747, + "step": 10927 + }, + { + "epoch": 1.71, + "grad_norm": 17.847317025928152, + "learning_rate": 8.260934849441042e-06, + "loss": 0.4976, + "step": 10928 + }, + { + "epoch": 1.71, + "grad_norm": 19.635553731026775, + "learning_rate": 8.259274172268556e-06, + "loss": 0.5807, + "step": 10929 + }, + { + "epoch": 1.71, + "grad_norm": 23.640106078867394, + "learning_rate": 8.2576135446013e-06, + "loss": 0.6105, + "step": 10930 + }, + { + "epoch": 1.71, + "grad_norm": 15.383362906505662, + "learning_rate": 8.255952966486491e-06, + "loss": 0.6145, + "step": 10931 + }, + { + "epoch": 1.71, + "grad_norm": 11.106375030346186, + "learning_rate": 8.25429243797136e-06, + "loss": 0.524, + "step": 10932 + }, + { + "epoch": 1.71, + "grad_norm": 30.64138129670902, + "learning_rate": 8.252631959103129e-06, + "loss": 0.6047, + "step": 10933 + }, + { + "epoch": 1.71, + "grad_norm": 12.419194950835276, + "learning_rate": 8.250971529929026e-06, + "loss": 0.5218, + "step": 10934 + }, + { + "epoch": 1.71, + "grad_norm": 20.669199615435286, + "learning_rate": 8.249311150496271e-06, + "loss": 0.5736, + "step": 10935 + }, + { + "epoch": 1.71, + "grad_norm": 26.472133173678266, + "learning_rate": 8.247650820852078e-06, + "loss": 0.6087, + "step": 10936 + }, + { + "epoch": 1.71, + "grad_norm": 29.620755318065687, + "learning_rate": 8.245990541043672e-06, + "loss": 0.5594, + "step": 10937 + }, + { + "epoch": 1.71, + "grad_norm": 25.733572474892174, + "learning_rate": 8.24433031111827e-06, + "loss": 0.5994, + "step": 10938 + }, + { + "epoch": 1.71, + "grad_norm": 17.432677710961624, + "learning_rate": 8.242670131123085e-06, + "loss": 0.5492, + "step": 10939 + }, + { + "epoch": 1.71, + "grad_norm": 17.384402292824348, + "learning_rate": 8.241010001105338e-06, + "loss": 0.6059, + "step": 10940 + }, + { + "epoch": 1.71, + "grad_norm": 20.774879041869337, + "learning_rate": 8.239349921112235e-06, + "loss": 0.5671, + "step": 10941 + }, + { + "epoch": 1.71, + "grad_norm": 19.625399671269154, + "learning_rate": 8.237689891190988e-06, + "loss": 0.5578, + "step": 10942 + }, + { + "epoch": 1.71, + "grad_norm": 19.42894168015925, + "learning_rate": 8.236029911388811e-06, + "loss": 0.5748, + "step": 10943 + }, + { + "epoch": 1.71, + "grad_norm": 21.770875552165393, + "learning_rate": 8.234369981752913e-06, + "loss": 0.5848, + "step": 10944 + }, + { + "epoch": 1.71, + "grad_norm": 17.350162655744025, + "learning_rate": 8.232710102330502e-06, + "loss": 0.5975, + "step": 10945 + }, + { + "epoch": 1.71, + "grad_norm": 17.78063106926223, + "learning_rate": 8.231050273168776e-06, + "loss": 0.562, + "step": 10946 + }, + { + "epoch": 1.71, + "grad_norm": 5.502297131688767, + "learning_rate": 8.229390494314949e-06, + "loss": 0.5946, + "step": 10947 + }, + { + "epoch": 1.71, + "grad_norm": 15.264131519330666, + "learning_rate": 8.227730765816216e-06, + "loss": 0.5049, + "step": 10948 + }, + { + "epoch": 1.71, + "grad_norm": 11.951478238830493, + "learning_rate": 8.226071087719789e-06, + "loss": 0.5444, + "step": 10949 + }, + { + "epoch": 1.71, + "grad_norm": 19.942149590621575, + "learning_rate": 8.224411460072863e-06, + "loss": 0.5804, + "step": 10950 + }, + { + "epoch": 1.71, + "grad_norm": 13.968534621022663, + "learning_rate": 8.222751882922632e-06, + "loss": 0.6098, + "step": 10951 + }, + { + "epoch": 1.71, + "grad_norm": 16.37887084960751, + "learning_rate": 8.221092356316298e-06, + "loss": 0.5557, + "step": 10952 + }, + { + "epoch": 1.71, + "grad_norm": 17.608654212247025, + "learning_rate": 8.21943288030106e-06, + "loss": 0.6502, + "step": 10953 + }, + { + "epoch": 1.71, + "grad_norm": 16.563353896621614, + "learning_rate": 8.217773454924105e-06, + "loss": 0.5859, + "step": 10954 + }, + { + "epoch": 1.71, + "grad_norm": 17.051143185334322, + "learning_rate": 8.216114080232635e-06, + "loss": 0.6179, + "step": 10955 + }, + { + "epoch": 1.71, + "grad_norm": 15.33739395143801, + "learning_rate": 8.214454756273836e-06, + "loss": 0.5227, + "step": 10956 + }, + { + "epoch": 1.71, + "grad_norm": 24.930093385983994, + "learning_rate": 8.2127954830949e-06, + "loss": 0.571, + "step": 10957 + }, + { + "epoch": 1.71, + "grad_norm": 27.78895490859864, + "learning_rate": 8.21113626074301e-06, + "loss": 0.6368, + "step": 10958 + }, + { + "epoch": 1.71, + "grad_norm": 18.97172718471198, + "learning_rate": 8.20947708926536e-06, + "loss": 0.5462, + "step": 10959 + }, + { + "epoch": 1.71, + "grad_norm": 14.152619516337708, + "learning_rate": 8.20781796870914e-06, + "loss": 0.4816, + "step": 10960 + }, + { + "epoch": 1.71, + "grad_norm": 14.294996336195624, + "learning_rate": 8.20615889912152e-06, + "loss": 0.5204, + "step": 10961 + }, + { + "epoch": 1.71, + "grad_norm": 24.888929213512366, + "learning_rate": 8.204499880549699e-06, + "loss": 0.5903, + "step": 10962 + }, + { + "epoch": 1.71, + "grad_norm": 24.16416867859061, + "learning_rate": 8.202840913040847e-06, + "loss": 0.593, + "step": 10963 + }, + { + "epoch": 1.71, + "grad_norm": 34.83630193385815, + "learning_rate": 8.201181996642147e-06, + "loss": 0.6155, + "step": 10964 + }, + { + "epoch": 1.71, + "grad_norm": 15.768592730950118, + "learning_rate": 8.199523131400783e-06, + "loss": 0.5226, + "step": 10965 + }, + { + "epoch": 1.71, + "grad_norm": 15.132701464724713, + "learning_rate": 8.197864317363926e-06, + "loss": 0.5371, + "step": 10966 + }, + { + "epoch": 1.71, + "grad_norm": 21.337489980956185, + "learning_rate": 8.19620555457875e-06, + "loss": 0.5384, + "step": 10967 + }, + { + "epoch": 1.71, + "grad_norm": 14.497313984422355, + "learning_rate": 8.194546843092438e-06, + "loss": 0.5255, + "step": 10968 + }, + { + "epoch": 1.71, + "grad_norm": 14.670657211968459, + "learning_rate": 8.192888182952155e-06, + "loss": 0.4893, + "step": 10969 + }, + { + "epoch": 1.71, + "grad_norm": 19.008764870268358, + "learning_rate": 8.191229574205078e-06, + "loss": 0.5432, + "step": 10970 + }, + { + "epoch": 1.71, + "grad_norm": 21.819143573333196, + "learning_rate": 8.18957101689837e-06, + "loss": 0.6081, + "step": 10971 + }, + { + "epoch": 1.71, + "grad_norm": 26.228774009600517, + "learning_rate": 8.187912511079205e-06, + "loss": 0.5523, + "step": 10972 + }, + { + "epoch": 1.71, + "grad_norm": 18.20772256216007, + "learning_rate": 8.186254056794747e-06, + "loss": 0.5818, + "step": 10973 + }, + { + "epoch": 1.71, + "grad_norm": 21.06983739156714, + "learning_rate": 8.184595654092161e-06, + "loss": 0.5562, + "step": 10974 + }, + { + "epoch": 1.71, + "grad_norm": 25.424231585890986, + "learning_rate": 8.182937303018619e-06, + "loss": 0.6361, + "step": 10975 + }, + { + "epoch": 1.71, + "grad_norm": 19.16481744528762, + "learning_rate": 8.181279003621274e-06, + "loss": 0.5153, + "step": 10976 + }, + { + "epoch": 1.71, + "grad_norm": 30.802604154974652, + "learning_rate": 8.179620755947287e-06, + "loss": 0.5733, + "step": 10977 + }, + { + "epoch": 1.71, + "grad_norm": 16.742537468560272, + "learning_rate": 8.177962560043824e-06, + "loss": 0.6117, + "step": 10978 + }, + { + "epoch": 1.71, + "grad_norm": 28.38632288276588, + "learning_rate": 8.17630441595804e-06, + "loss": 0.517, + "step": 10979 + }, + { + "epoch": 1.72, + "grad_norm": 15.923263957167489, + "learning_rate": 8.17464632373709e-06, + "loss": 0.5239, + "step": 10980 + }, + { + "epoch": 1.72, + "grad_norm": 19.490897551455543, + "learning_rate": 8.172988283428132e-06, + "loss": 0.5829, + "step": 10981 + }, + { + "epoch": 1.72, + "grad_norm": 23.940319390755732, + "learning_rate": 8.171330295078318e-06, + "loss": 0.5117, + "step": 10982 + }, + { + "epoch": 1.72, + "grad_norm": 19.43473985982462, + "learning_rate": 8.1696723587348e-06, + "loss": 0.6099, + "step": 10983 + }, + { + "epoch": 1.72, + "grad_norm": 17.632587529878155, + "learning_rate": 8.168014474444731e-06, + "loss": 0.5982, + "step": 10984 + }, + { + "epoch": 1.72, + "grad_norm": 20.675668228564096, + "learning_rate": 8.166356642255258e-06, + "loss": 0.584, + "step": 10985 + }, + { + "epoch": 1.72, + "grad_norm": 20.01815195830891, + "learning_rate": 8.164698862213527e-06, + "loss": 0.6065, + "step": 10986 + }, + { + "epoch": 1.72, + "grad_norm": 13.87360210561644, + "learning_rate": 8.16304113436669e-06, + "loss": 0.5353, + "step": 10987 + }, + { + "epoch": 1.72, + "grad_norm": 18.041664347683344, + "learning_rate": 8.161383458761887e-06, + "loss": 0.5885, + "step": 10988 + }, + { + "epoch": 1.72, + "grad_norm": 16.997703264741872, + "learning_rate": 8.159725835446266e-06, + "loss": 0.5921, + "step": 10989 + }, + { + "epoch": 1.72, + "grad_norm": 27.729567331896416, + "learning_rate": 8.15806826446696e-06, + "loss": 0.5812, + "step": 10990 + }, + { + "epoch": 1.72, + "grad_norm": 19.69375402328852, + "learning_rate": 8.156410745871119e-06, + "loss": 0.542, + "step": 10991 + }, + { + "epoch": 1.72, + "grad_norm": 15.188270581676644, + "learning_rate": 8.154753279705875e-06, + "loss": 0.5461, + "step": 10992 + }, + { + "epoch": 1.72, + "grad_norm": 24.136801629850993, + "learning_rate": 8.15309586601837e-06, + "loss": 0.5889, + "step": 10993 + }, + { + "epoch": 1.72, + "grad_norm": 20.047274211675738, + "learning_rate": 8.151438504855742e-06, + "loss": 0.5705, + "step": 10994 + }, + { + "epoch": 1.72, + "grad_norm": 17.155994891960784, + "learning_rate": 8.149781196265117e-06, + "loss": 0.5442, + "step": 10995 + }, + { + "epoch": 1.72, + "grad_norm": 18.199399782235506, + "learning_rate": 8.148123940293632e-06, + "loss": 0.5261, + "step": 10996 + }, + { + "epoch": 1.72, + "grad_norm": 21.092233468394266, + "learning_rate": 8.146466736988422e-06, + "loss": 0.5723, + "step": 10997 + }, + { + "epoch": 1.72, + "grad_norm": 18.21096789645126, + "learning_rate": 8.144809586396608e-06, + "loss": 0.5656, + "step": 10998 + }, + { + "epoch": 1.72, + "grad_norm": 14.233921976553873, + "learning_rate": 8.143152488565332e-06, + "loss": 0.5529, + "step": 10999 + }, + { + "epoch": 1.72, + "grad_norm": 16.137995196396947, + "learning_rate": 8.141495443541708e-06, + "loss": 0.5669, + "step": 11000 + }, + { + "epoch": 1.72, + "grad_norm": 20.661002369888248, + "learning_rate": 8.139838451372868e-06, + "loss": 0.534, + "step": 11001 + }, + { + "epoch": 1.72, + "grad_norm": 25.811475719029517, + "learning_rate": 8.138181512105931e-06, + "loss": 0.5449, + "step": 11002 + }, + { + "epoch": 1.72, + "grad_norm": 13.560522016809891, + "learning_rate": 8.136524625788026e-06, + "loss": 0.4659, + "step": 11003 + }, + { + "epoch": 1.72, + "grad_norm": 18.104156449874377, + "learning_rate": 8.134867792466272e-06, + "loss": 0.5013, + "step": 11004 + }, + { + "epoch": 1.72, + "grad_norm": 21.90485664669444, + "learning_rate": 8.133211012187783e-06, + "loss": 0.56, + "step": 11005 + }, + { + "epoch": 1.72, + "grad_norm": 27.519362035032106, + "learning_rate": 8.131554284999683e-06, + "loss": 0.5678, + "step": 11006 + }, + { + "epoch": 1.72, + "grad_norm": 21.27402890749563, + "learning_rate": 8.129897610949086e-06, + "loss": 0.502, + "step": 11007 + }, + { + "epoch": 1.72, + "grad_norm": 25.254044915782583, + "learning_rate": 8.128240990083104e-06, + "loss": 0.6059, + "step": 11008 + }, + { + "epoch": 1.72, + "grad_norm": 18.026716506973344, + "learning_rate": 8.12658442244886e-06, + "loss": 0.6353, + "step": 11009 + }, + { + "epoch": 1.72, + "grad_norm": 23.869026824952083, + "learning_rate": 8.124927908093455e-06, + "loss": 0.4833, + "step": 11010 + }, + { + "epoch": 1.72, + "grad_norm": 22.541212007840347, + "learning_rate": 8.123271447064001e-06, + "loss": 0.5512, + "step": 11011 + }, + { + "epoch": 1.72, + "grad_norm": 24.436588693539342, + "learning_rate": 8.121615039407613e-06, + "loss": 0.5784, + "step": 11012 + }, + { + "epoch": 1.72, + "grad_norm": 16.515478452900282, + "learning_rate": 8.119958685171392e-06, + "loss": 0.5607, + "step": 11013 + }, + { + "epoch": 1.72, + "grad_norm": 19.27485765158441, + "learning_rate": 8.11830238440245e-06, + "loss": 0.5034, + "step": 11014 + }, + { + "epoch": 1.72, + "grad_norm": 13.417919135522526, + "learning_rate": 8.116646137147884e-06, + "loss": 0.6027, + "step": 11015 + }, + { + "epoch": 1.72, + "grad_norm": 14.62591573352725, + "learning_rate": 8.114989943454801e-06, + "loss": 0.5586, + "step": 11016 + }, + { + "epoch": 1.72, + "grad_norm": 17.09641837558083, + "learning_rate": 8.113333803370297e-06, + "loss": 0.5503, + "step": 11017 + }, + { + "epoch": 1.72, + "grad_norm": 27.03796482806511, + "learning_rate": 8.111677716941481e-06, + "loss": 0.5911, + "step": 11018 + }, + { + "epoch": 1.72, + "grad_norm": 17.330280045255513, + "learning_rate": 8.110021684215448e-06, + "loss": 0.545, + "step": 11019 + }, + { + "epoch": 1.72, + "grad_norm": 24.73453217201637, + "learning_rate": 8.10836570523929e-06, + "loss": 0.5973, + "step": 11020 + }, + { + "epoch": 1.72, + "grad_norm": 14.325531198520604, + "learning_rate": 8.106709780060103e-06, + "loss": 0.5642, + "step": 11021 + }, + { + "epoch": 1.72, + "grad_norm": 22.658649594400202, + "learning_rate": 8.105053908724985e-06, + "loss": 0.5774, + "step": 11022 + }, + { + "epoch": 1.72, + "grad_norm": 17.606361811712446, + "learning_rate": 8.103398091281023e-06, + "loss": 0.5776, + "step": 11023 + }, + { + "epoch": 1.72, + "grad_norm": 26.57523617190926, + "learning_rate": 8.101742327775312e-06, + "loss": 0.5409, + "step": 11024 + }, + { + "epoch": 1.72, + "grad_norm": 16.01286506504692, + "learning_rate": 8.10008661825494e-06, + "loss": 0.5582, + "step": 11025 + }, + { + "epoch": 1.72, + "grad_norm": 17.546346017841536, + "learning_rate": 8.09843096276699e-06, + "loss": 0.5599, + "step": 11026 + }, + { + "epoch": 1.72, + "grad_norm": 21.43382008611894, + "learning_rate": 8.096775361358552e-06, + "loss": 0.5823, + "step": 11027 + }, + { + "epoch": 1.72, + "grad_norm": 21.18612216552755, + "learning_rate": 8.095119814076711e-06, + "loss": 0.5712, + "step": 11028 + }, + { + "epoch": 1.72, + "grad_norm": 17.592964890005003, + "learning_rate": 8.093464320968547e-06, + "loss": 0.5272, + "step": 11029 + }, + { + "epoch": 1.72, + "grad_norm": 25.499410651680005, + "learning_rate": 8.09180888208114e-06, + "loss": 0.5841, + "step": 11030 + }, + { + "epoch": 1.72, + "grad_norm": 19.41703335771521, + "learning_rate": 8.090153497461573e-06, + "loss": 0.6221, + "step": 11031 + }, + { + "epoch": 1.72, + "grad_norm": 16.54118439577805, + "learning_rate": 8.088498167156926e-06, + "loss": 0.4778, + "step": 11032 + }, + { + "epoch": 1.72, + "grad_norm": 26.262342527585577, + "learning_rate": 8.086842891214274e-06, + "loss": 0.5956, + "step": 11033 + }, + { + "epoch": 1.72, + "grad_norm": 21.358215950784594, + "learning_rate": 8.085187669680687e-06, + "loss": 0.5643, + "step": 11034 + }, + { + "epoch": 1.72, + "grad_norm": 13.09042014762866, + "learning_rate": 8.083532502603243e-06, + "loss": 0.5982, + "step": 11035 + }, + { + "epoch": 1.72, + "grad_norm": 15.472888105967124, + "learning_rate": 8.081877390029013e-06, + "loss": 0.5203, + "step": 11036 + }, + { + "epoch": 1.72, + "grad_norm": 20.36259903488524, + "learning_rate": 8.08022233200507e-06, + "loss": 0.615, + "step": 11037 + }, + { + "epoch": 1.72, + "grad_norm": 19.797824194055696, + "learning_rate": 8.078567328578482e-06, + "loss": 0.5085, + "step": 11038 + }, + { + "epoch": 1.72, + "grad_norm": 27.77186851093607, + "learning_rate": 8.076912379796314e-06, + "loss": 0.5671, + "step": 11039 + }, + { + "epoch": 1.72, + "grad_norm": 18.64235028247073, + "learning_rate": 8.07525748570563e-06, + "loss": 0.5634, + "step": 11040 + }, + { + "epoch": 1.72, + "grad_norm": 30.995082032506957, + "learning_rate": 8.0736026463535e-06, + "loss": 0.6884, + "step": 11041 + }, + { + "epoch": 1.72, + "grad_norm": 18.509702462811813, + "learning_rate": 8.071947861786982e-06, + "loss": 0.5493, + "step": 11042 + }, + { + "epoch": 1.72, + "grad_norm": 20.382712219564755, + "learning_rate": 8.070293132053143e-06, + "loss": 0.5787, + "step": 11043 + }, + { + "epoch": 1.73, + "grad_norm": 13.06460155802515, + "learning_rate": 8.068638457199037e-06, + "loss": 0.5473, + "step": 11044 + }, + { + "epoch": 1.73, + "grad_norm": 14.342234244697979, + "learning_rate": 8.066983837271722e-06, + "loss": 0.5552, + "step": 11045 + }, + { + "epoch": 1.73, + "grad_norm": 16.61376142491823, + "learning_rate": 8.065329272318255e-06, + "loss": 0.6151, + "step": 11046 + }, + { + "epoch": 1.73, + "grad_norm": 18.626125978110455, + "learning_rate": 8.063674762385691e-06, + "loss": 0.553, + "step": 11047 + }, + { + "epoch": 1.73, + "grad_norm": 18.643831769416725, + "learning_rate": 8.06202030752109e-06, + "loss": 0.5936, + "step": 11048 + }, + { + "epoch": 1.73, + "grad_norm": 26.662655345895203, + "learning_rate": 8.06036590777149e-06, + "loss": 0.5426, + "step": 11049 + }, + { + "epoch": 1.73, + "grad_norm": 13.247502883389618, + "learning_rate": 8.058711563183955e-06, + "loss": 0.4977, + "step": 11050 + }, + { + "epoch": 1.73, + "grad_norm": 18.289681275470457, + "learning_rate": 8.057057273805525e-06, + "loss": 0.5044, + "step": 11051 + }, + { + "epoch": 1.73, + "grad_norm": 28.097842980216058, + "learning_rate": 8.055403039683247e-06, + "loss": 0.661, + "step": 11052 + }, + { + "epoch": 1.73, + "grad_norm": 31.535869156552145, + "learning_rate": 8.053748860864174e-06, + "loss": 0.6149, + "step": 11053 + }, + { + "epoch": 1.73, + "grad_norm": 17.49580416106502, + "learning_rate": 8.052094737395343e-06, + "loss": 0.5196, + "step": 11054 + }, + { + "epoch": 1.73, + "grad_norm": 20.72396495256864, + "learning_rate": 8.050440669323796e-06, + "loss": 0.5294, + "step": 11055 + }, + { + "epoch": 1.73, + "grad_norm": 17.234785005909544, + "learning_rate": 8.048786656696579e-06, + "loss": 0.6251, + "step": 11056 + }, + { + "epoch": 1.73, + "grad_norm": 20.220223077620975, + "learning_rate": 8.047132699560725e-06, + "loss": 0.5548, + "step": 11057 + }, + { + "epoch": 1.73, + "grad_norm": 17.359246668096613, + "learning_rate": 8.04547879796328e-06, + "loss": 0.57, + "step": 11058 + }, + { + "epoch": 1.73, + "grad_norm": 18.67673597067759, + "learning_rate": 8.04382495195127e-06, + "loss": 0.5596, + "step": 11059 + }, + { + "epoch": 1.73, + "grad_norm": 12.909389040797807, + "learning_rate": 8.042171161571734e-06, + "loss": 0.5106, + "step": 11060 + }, + { + "epoch": 1.73, + "grad_norm": 20.535049992591745, + "learning_rate": 8.040517426871703e-06, + "loss": 0.5295, + "step": 11061 + }, + { + "epoch": 1.73, + "grad_norm": 21.898674181429076, + "learning_rate": 8.038863747898214e-06, + "loss": 0.5764, + "step": 11062 + }, + { + "epoch": 1.73, + "grad_norm": 24.233164874520643, + "learning_rate": 8.037210124698294e-06, + "loss": 0.5359, + "step": 11063 + }, + { + "epoch": 1.73, + "grad_norm": 17.149139086680933, + "learning_rate": 8.03555655731897e-06, + "loss": 0.4864, + "step": 11064 + }, + { + "epoch": 1.73, + "grad_norm": 21.420859164603485, + "learning_rate": 8.033903045807262e-06, + "loss": 0.5552, + "step": 11065 + }, + { + "epoch": 1.73, + "grad_norm": 26.332369953580862, + "learning_rate": 8.032249590210206e-06, + "loss": 0.5435, + "step": 11066 + }, + { + "epoch": 1.73, + "grad_norm": 16.057074256631132, + "learning_rate": 8.030596190574818e-06, + "loss": 0.592, + "step": 11067 + }, + { + "epoch": 1.73, + "grad_norm": 23.374759156789157, + "learning_rate": 8.028942846948126e-06, + "loss": 0.5417, + "step": 11068 + }, + { + "epoch": 1.73, + "grad_norm": 23.16419267022803, + "learning_rate": 8.027289559377145e-06, + "loss": 0.5208, + "step": 11069 + }, + { + "epoch": 1.73, + "grad_norm": 14.173438081687703, + "learning_rate": 8.025636327908895e-06, + "loss": 0.5357, + "step": 11070 + }, + { + "epoch": 1.73, + "grad_norm": 21.554575367103908, + "learning_rate": 8.02398315259039e-06, + "loss": 0.5781, + "step": 11071 + }, + { + "epoch": 1.73, + "grad_norm": 12.602296407876821, + "learning_rate": 8.02233003346865e-06, + "loss": 0.4713, + "step": 11072 + }, + { + "epoch": 1.73, + "grad_norm": 23.593174744849943, + "learning_rate": 8.02067697059069e-06, + "loss": 0.6106, + "step": 11073 + }, + { + "epoch": 1.73, + "grad_norm": 27.016106065666666, + "learning_rate": 8.019023964003513e-06, + "loss": 0.5307, + "step": 11074 + }, + { + "epoch": 1.73, + "grad_norm": 16.047224747190242, + "learning_rate": 8.01737101375414e-06, + "loss": 0.5816, + "step": 11075 + }, + { + "epoch": 1.73, + "grad_norm": 14.723276295792006, + "learning_rate": 8.015718119889576e-06, + "loss": 0.5206, + "step": 11076 + }, + { + "epoch": 1.73, + "grad_norm": 14.158261089440472, + "learning_rate": 8.014065282456829e-06, + "loss": 0.5438, + "step": 11077 + }, + { + "epoch": 1.73, + "grad_norm": 23.76313922040878, + "learning_rate": 8.0124125015029e-06, + "loss": 0.5314, + "step": 11078 + }, + { + "epoch": 1.73, + "grad_norm": 17.643851173575644, + "learning_rate": 8.010759777074796e-06, + "loss": 0.622, + "step": 11079 + }, + { + "epoch": 1.73, + "grad_norm": 15.529692492113387, + "learning_rate": 8.009107109219522e-06, + "loss": 0.5564, + "step": 11080 + }, + { + "epoch": 1.73, + "grad_norm": 16.519515489222094, + "learning_rate": 8.007454497984078e-06, + "loss": 0.5922, + "step": 11081 + }, + { + "epoch": 1.73, + "grad_norm": 15.377649951148507, + "learning_rate": 8.005801943415462e-06, + "loss": 0.5335, + "step": 11082 + }, + { + "epoch": 1.73, + "grad_norm": 25.989080296956804, + "learning_rate": 8.004149445560675e-06, + "loss": 0.5507, + "step": 11083 + }, + { + "epoch": 1.73, + "grad_norm": 26.98497781704211, + "learning_rate": 8.002497004466703e-06, + "loss": 0.6189, + "step": 11084 + }, + { + "epoch": 1.73, + "grad_norm": 16.69837751890889, + "learning_rate": 8.000844620180553e-06, + "loss": 0.6157, + "step": 11085 + }, + { + "epoch": 1.73, + "grad_norm": 12.52657075512683, + "learning_rate": 7.999192292749209e-06, + "loss": 0.5011, + "step": 11086 + }, + { + "epoch": 1.73, + "grad_norm": 20.089404243929433, + "learning_rate": 7.997540022219672e-06, + "loss": 0.6038, + "step": 11087 + }, + { + "epoch": 1.73, + "grad_norm": 17.70165493375579, + "learning_rate": 7.995887808638921e-06, + "loss": 0.6156, + "step": 11088 + }, + { + "epoch": 1.73, + "grad_norm": 21.615571556863518, + "learning_rate": 7.99423565205395e-06, + "loss": 0.5861, + "step": 11089 + }, + { + "epoch": 1.73, + "grad_norm": 15.954431413536655, + "learning_rate": 7.99258355251174e-06, + "loss": 0.5519, + "step": 11090 + }, + { + "epoch": 1.73, + "grad_norm": 17.510564341840514, + "learning_rate": 7.990931510059281e-06, + "loss": 0.5643, + "step": 11091 + }, + { + "epoch": 1.73, + "grad_norm": 26.357858273602496, + "learning_rate": 7.989279524743559e-06, + "loss": 0.532, + "step": 11092 + }, + { + "epoch": 1.73, + "grad_norm": 22.877190438779387, + "learning_rate": 7.987627596611546e-06, + "loss": 0.625, + "step": 11093 + }, + { + "epoch": 1.73, + "grad_norm": 28.456516538218356, + "learning_rate": 7.985975725710229e-06, + "loss": 0.6112, + "step": 11094 + }, + { + "epoch": 1.73, + "grad_norm": 14.390847832925022, + "learning_rate": 7.984323912086584e-06, + "loss": 0.5136, + "step": 11095 + }, + { + "epoch": 1.73, + "grad_norm": 28.019716719828942, + "learning_rate": 7.982672155787586e-06, + "loss": 0.58, + "step": 11096 + }, + { + "epoch": 1.73, + "grad_norm": 12.003477062195799, + "learning_rate": 7.981020456860215e-06, + "loss": 0.583, + "step": 11097 + }, + { + "epoch": 1.73, + "grad_norm": 25.403469419461544, + "learning_rate": 7.979368815351441e-06, + "loss": 0.5502, + "step": 11098 + }, + { + "epoch": 1.73, + "grad_norm": 17.524190774060003, + "learning_rate": 7.977717231308233e-06, + "loss": 0.4403, + "step": 11099 + }, + { + "epoch": 1.73, + "grad_norm": 27.22380789500585, + "learning_rate": 7.976065704777566e-06, + "loss": 0.5688, + "step": 11100 + }, + { + "epoch": 1.73, + "grad_norm": 24.66113094994034, + "learning_rate": 7.974414235806403e-06, + "loss": 0.5801, + "step": 11101 + }, + { + "epoch": 1.73, + "grad_norm": 19.72633698551709, + "learning_rate": 7.972762824441722e-06, + "loss": 0.5958, + "step": 11102 + }, + { + "epoch": 1.73, + "grad_norm": 23.229137339523575, + "learning_rate": 7.971111470730474e-06, + "loss": 0.5563, + "step": 11103 + }, + { + "epoch": 1.73, + "grad_norm": 21.096566558281705, + "learning_rate": 7.96946017471963e-06, + "loss": 0.565, + "step": 11104 + }, + { + "epoch": 1.73, + "grad_norm": 21.96685706485057, + "learning_rate": 7.967808936456149e-06, + "loss": 0.5936, + "step": 11105 + }, + { + "epoch": 1.73, + "grad_norm": 23.269102688477492, + "learning_rate": 7.966157755986994e-06, + "loss": 0.6045, + "step": 11106 + }, + { + "epoch": 1.73, + "grad_norm": 15.209484271912398, + "learning_rate": 7.964506633359128e-06, + "loss": 0.5603, + "step": 11107 + }, + { + "epoch": 1.74, + "grad_norm": 20.844999399586634, + "learning_rate": 7.962855568619499e-06, + "loss": 0.5773, + "step": 11108 + }, + { + "epoch": 1.74, + "grad_norm": 20.72385245923531, + "learning_rate": 7.961204561815063e-06, + "loss": 0.4946, + "step": 11109 + }, + { + "epoch": 1.74, + "grad_norm": 14.107827367319523, + "learning_rate": 7.95955361299278e-06, + "loss": 0.5071, + "step": 11110 + }, + { + "epoch": 1.74, + "grad_norm": 18.403746216851392, + "learning_rate": 7.957902722199596e-06, + "loss": 0.6051, + "step": 11111 + }, + { + "epoch": 1.74, + "grad_norm": 35.29953233308446, + "learning_rate": 7.956251889482467e-06, + "loss": 0.5955, + "step": 11112 + }, + { + "epoch": 1.74, + "grad_norm": 14.51073173821916, + "learning_rate": 7.954601114888338e-06, + "loss": 0.5469, + "step": 11113 + }, + { + "epoch": 1.74, + "grad_norm": 21.361051458775115, + "learning_rate": 7.952950398464156e-06, + "loss": 0.5672, + "step": 11114 + }, + { + "epoch": 1.74, + "grad_norm": 21.68827777891515, + "learning_rate": 7.951299740256865e-06, + "loss": 0.585, + "step": 11115 + }, + { + "epoch": 1.74, + "grad_norm": 18.981508707375458, + "learning_rate": 7.949649140313413e-06, + "loss": 0.5476, + "step": 11116 + }, + { + "epoch": 1.74, + "grad_norm": 15.699210480390972, + "learning_rate": 7.947998598680743e-06, + "loss": 0.5574, + "step": 11117 + }, + { + "epoch": 1.74, + "grad_norm": 22.554949270353937, + "learning_rate": 7.946348115405788e-06, + "loss": 0.5768, + "step": 11118 + }, + { + "epoch": 1.74, + "grad_norm": 20.234936766024894, + "learning_rate": 7.944697690535493e-06, + "loss": 0.5614, + "step": 11119 + }, + { + "epoch": 1.74, + "grad_norm": 16.38727312207389, + "learning_rate": 7.943047324116793e-06, + "loss": 0.5479, + "step": 11120 + }, + { + "epoch": 1.74, + "grad_norm": 16.145180182217953, + "learning_rate": 7.941397016196622e-06, + "loss": 0.5471, + "step": 11121 + }, + { + "epoch": 1.74, + "grad_norm": 39.59424449322826, + "learning_rate": 7.939746766821921e-06, + "loss": 0.6819, + "step": 11122 + }, + { + "epoch": 1.74, + "grad_norm": 22.470554431222713, + "learning_rate": 7.938096576039613e-06, + "loss": 0.5799, + "step": 11123 + }, + { + "epoch": 1.74, + "grad_norm": 18.87952576742975, + "learning_rate": 7.93644644389663e-06, + "loss": 0.5676, + "step": 11124 + }, + { + "epoch": 1.74, + "grad_norm": 13.474331828724617, + "learning_rate": 7.934796370439907e-06, + "loss": 0.5295, + "step": 11125 + }, + { + "epoch": 1.74, + "grad_norm": 19.151053640199148, + "learning_rate": 7.933146355716368e-06, + "loss": 0.5266, + "step": 11126 + }, + { + "epoch": 1.74, + "grad_norm": 21.41669492814009, + "learning_rate": 7.931496399772935e-06, + "loss": 0.5283, + "step": 11127 + }, + { + "epoch": 1.74, + "grad_norm": 17.02942631083082, + "learning_rate": 7.929846502656533e-06, + "loss": 0.5478, + "step": 11128 + }, + { + "epoch": 1.74, + "grad_norm": 17.93103972345277, + "learning_rate": 7.928196664414088e-06, + "loss": 0.5782, + "step": 11129 + }, + { + "epoch": 1.74, + "grad_norm": 17.59302446290554, + "learning_rate": 7.926546885092515e-06, + "loss": 0.5762, + "step": 11130 + }, + { + "epoch": 1.74, + "grad_norm": 12.436897528615656, + "learning_rate": 7.92489716473874e-06, + "loss": 0.5658, + "step": 11131 + }, + { + "epoch": 1.74, + "grad_norm": 13.341677152386612, + "learning_rate": 7.923247503399674e-06, + "loss": 0.5375, + "step": 11132 + }, + { + "epoch": 1.74, + "grad_norm": 15.199581514459307, + "learning_rate": 7.921597901122234e-06, + "loss": 0.6023, + "step": 11133 + }, + { + "epoch": 1.74, + "grad_norm": 21.14620217712568, + "learning_rate": 7.919948357953332e-06, + "loss": 0.5263, + "step": 11134 + }, + { + "epoch": 1.74, + "grad_norm": 19.87994058848624, + "learning_rate": 7.918298873939882e-06, + "loss": 0.6351, + "step": 11135 + }, + { + "epoch": 1.74, + "grad_norm": 15.894872621895544, + "learning_rate": 7.916649449128799e-06, + "loss": 0.5342, + "step": 11136 + }, + { + "epoch": 1.74, + "grad_norm": 20.574830035472903, + "learning_rate": 7.915000083566981e-06, + "loss": 0.5463, + "step": 11137 + }, + { + "epoch": 1.74, + "grad_norm": 13.873451409610581, + "learning_rate": 7.913350777301342e-06, + "loss": 0.5196, + "step": 11138 + }, + { + "epoch": 1.74, + "grad_norm": 20.12385858459081, + "learning_rate": 7.911701530378789e-06, + "loss": 0.5146, + "step": 11139 + }, + { + "epoch": 1.74, + "grad_norm": 25.481924751387364, + "learning_rate": 7.910052342846219e-06, + "loss": 0.5504, + "step": 11140 + }, + { + "epoch": 1.74, + "grad_norm": 13.437353532569539, + "learning_rate": 7.908403214750544e-06, + "loss": 0.5345, + "step": 11141 + }, + { + "epoch": 1.74, + "grad_norm": 17.383152699516856, + "learning_rate": 7.906754146138655e-06, + "loss": 0.5909, + "step": 11142 + }, + { + "epoch": 1.74, + "grad_norm": 36.781543884667066, + "learning_rate": 7.905105137057451e-06, + "loss": 0.5767, + "step": 11143 + }, + { + "epoch": 1.74, + "grad_norm": 22.89474109448701, + "learning_rate": 7.903456187553833e-06, + "loss": 0.6353, + "step": 11144 + }, + { + "epoch": 1.74, + "grad_norm": 16.728992731936774, + "learning_rate": 7.901807297674693e-06, + "loss": 0.5532, + "step": 11145 + }, + { + "epoch": 1.74, + "grad_norm": 15.184728292008584, + "learning_rate": 7.900158467466932e-06, + "loss": 0.4741, + "step": 11146 + }, + { + "epoch": 1.74, + "grad_norm": 21.181395474145667, + "learning_rate": 7.89850969697743e-06, + "loss": 0.5903, + "step": 11147 + }, + { + "epoch": 1.74, + "grad_norm": 16.120405041472555, + "learning_rate": 7.896860986253086e-06, + "loss": 0.5352, + "step": 11148 + }, + { + "epoch": 1.74, + "grad_norm": 19.06258247740679, + "learning_rate": 7.89521233534078e-06, + "loss": 0.5398, + "step": 11149 + }, + { + "epoch": 1.74, + "grad_norm": 19.13804934630115, + "learning_rate": 7.89356374428741e-06, + "loss": 0.5708, + "step": 11150 + }, + { + "epoch": 1.74, + "grad_norm": 21.187892427720595, + "learning_rate": 7.891915213139855e-06, + "loss": 0.5356, + "step": 11151 + }, + { + "epoch": 1.74, + "grad_norm": 18.343560072420896, + "learning_rate": 7.890266741944995e-06, + "loss": 0.5867, + "step": 11152 + }, + { + "epoch": 1.74, + "grad_norm": 18.77213299013415, + "learning_rate": 7.888618330749715e-06, + "loss": 0.5711, + "step": 11153 + }, + { + "epoch": 1.74, + "grad_norm": 15.805220979143153, + "learning_rate": 7.886969979600894e-06, + "loss": 0.5862, + "step": 11154 + }, + { + "epoch": 1.74, + "grad_norm": 14.429068554675853, + "learning_rate": 7.885321688545412e-06, + "loss": 0.5516, + "step": 11155 + }, + { + "epoch": 1.74, + "grad_norm": 16.692821746198884, + "learning_rate": 7.883673457630144e-06, + "loss": 0.503, + "step": 11156 + }, + { + "epoch": 1.74, + "grad_norm": 21.367965746136644, + "learning_rate": 7.882025286901965e-06, + "loss": 0.5323, + "step": 11157 + }, + { + "epoch": 1.74, + "grad_norm": 23.50679528370471, + "learning_rate": 7.880377176407749e-06, + "loss": 0.5922, + "step": 11158 + }, + { + "epoch": 1.74, + "grad_norm": 17.70563036251502, + "learning_rate": 7.878729126194362e-06, + "loss": 0.5291, + "step": 11159 + }, + { + "epoch": 1.74, + "grad_norm": 15.653253196150233, + "learning_rate": 7.877081136308681e-06, + "loss": 0.5786, + "step": 11160 + }, + { + "epoch": 1.74, + "grad_norm": 19.693096101244436, + "learning_rate": 7.875433206797573e-06, + "loss": 0.6015, + "step": 11161 + }, + { + "epoch": 1.74, + "grad_norm": 19.594477678776837, + "learning_rate": 7.873785337707897e-06, + "loss": 0.5464, + "step": 11162 + }, + { + "epoch": 1.74, + "grad_norm": 18.992648678118947, + "learning_rate": 7.872137529086525e-06, + "loss": 0.594, + "step": 11163 + }, + { + "epoch": 1.74, + "grad_norm": 14.856251143566597, + "learning_rate": 7.870489780980315e-06, + "loss": 0.5132, + "step": 11164 + }, + { + "epoch": 1.74, + "grad_norm": 15.567361704222945, + "learning_rate": 7.86884209343613e-06, + "loss": 0.5292, + "step": 11165 + }, + { + "epoch": 1.74, + "grad_norm": 23.6082497971756, + "learning_rate": 7.867194466500831e-06, + "loss": 0.5751, + "step": 11166 + }, + { + "epoch": 1.74, + "grad_norm": 23.46617471518683, + "learning_rate": 7.865546900221274e-06, + "loss": 0.5088, + "step": 11167 + }, + { + "epoch": 1.74, + "grad_norm": 18.359587600775672, + "learning_rate": 7.863899394644312e-06, + "loss": 0.6115, + "step": 11168 + }, + { + "epoch": 1.74, + "grad_norm": 16.852719261937743, + "learning_rate": 7.862251949816803e-06, + "loss": 0.5957, + "step": 11169 + }, + { + "epoch": 1.74, + "grad_norm": 14.061317520228739, + "learning_rate": 7.860604565785596e-06, + "loss": 0.5912, + "step": 11170 + }, + { + "epoch": 1.74, + "grad_norm": 16.93777975066035, + "learning_rate": 7.85895724259755e-06, + "loss": 0.5291, + "step": 11171 + }, + { + "epoch": 1.75, + "grad_norm": 25.539383992995287, + "learning_rate": 7.857309980299502e-06, + "loss": 0.5292, + "step": 11172 + }, + { + "epoch": 1.75, + "grad_norm": 23.411902252602395, + "learning_rate": 7.855662778938306e-06, + "loss": 0.5422, + "step": 11173 + }, + { + "epoch": 1.75, + "grad_norm": 14.621579744342313, + "learning_rate": 7.854015638560804e-06, + "loss": 0.5601, + "step": 11174 + }, + { + "epoch": 1.75, + "grad_norm": 23.57756516314625, + "learning_rate": 7.852368559213846e-06, + "loss": 0.6073, + "step": 11175 + }, + { + "epoch": 1.75, + "grad_norm": 14.900995452002077, + "learning_rate": 7.850721540944268e-06, + "loss": 0.5832, + "step": 11176 + }, + { + "epoch": 1.75, + "grad_norm": 19.737593953849398, + "learning_rate": 7.849074583798912e-06, + "loss": 0.547, + "step": 11177 + }, + { + "epoch": 1.75, + "grad_norm": 16.23704238232806, + "learning_rate": 7.847427687824615e-06, + "loss": 0.4946, + "step": 11178 + }, + { + "epoch": 1.75, + "grad_norm": 25.272781334072516, + "learning_rate": 7.845780853068217e-06, + "loss": 0.5513, + "step": 11179 + }, + { + "epoch": 1.75, + "grad_norm": 13.3597406125372, + "learning_rate": 7.844134079576553e-06, + "loss": 0.5056, + "step": 11180 + }, + { + "epoch": 1.75, + "grad_norm": 15.034055145233486, + "learning_rate": 7.84248736739645e-06, + "loss": 0.4983, + "step": 11181 + }, + { + "epoch": 1.75, + "grad_norm": 14.196566362067875, + "learning_rate": 7.840840716574748e-06, + "loss": 0.5368, + "step": 11182 + }, + { + "epoch": 1.75, + "grad_norm": 10.627838873271484, + "learning_rate": 7.839194127158271e-06, + "loss": 0.5258, + "step": 11183 + }, + { + "epoch": 1.75, + "grad_norm": 15.224867405885616, + "learning_rate": 7.837547599193849e-06, + "loss": 0.5772, + "step": 11184 + }, + { + "epoch": 1.75, + "grad_norm": 13.95064474362585, + "learning_rate": 7.835901132728311e-06, + "loss": 0.4517, + "step": 11185 + }, + { + "epoch": 1.75, + "grad_norm": 16.45076166153509, + "learning_rate": 7.834254727808477e-06, + "loss": 0.5257, + "step": 11186 + }, + { + "epoch": 1.75, + "grad_norm": 15.820391013810106, + "learning_rate": 7.83260838448117e-06, + "loss": 0.5656, + "step": 11187 + }, + { + "epoch": 1.75, + "grad_norm": 23.667336873699867, + "learning_rate": 7.830962102793214e-06, + "loss": 0.6235, + "step": 11188 + }, + { + "epoch": 1.75, + "grad_norm": 11.754841785972543, + "learning_rate": 7.829315882791426e-06, + "loss": 0.5182, + "step": 11189 + }, + { + "epoch": 1.75, + "grad_norm": 22.898584723934768, + "learning_rate": 7.827669724522632e-06, + "loss": 0.5112, + "step": 11190 + }, + { + "epoch": 1.75, + "grad_norm": 20.03620929445909, + "learning_rate": 7.826023628033631e-06, + "loss": 0.5311, + "step": 11191 + }, + { + "epoch": 1.75, + "grad_norm": 24.418773772718986, + "learning_rate": 7.824377593371248e-06, + "loss": 0.5441, + "step": 11192 + }, + { + "epoch": 1.75, + "grad_norm": 20.220247998822664, + "learning_rate": 7.822731620582294e-06, + "loss": 0.562, + "step": 11193 + }, + { + "epoch": 1.75, + "grad_norm": 17.274615775975082, + "learning_rate": 7.821085709713581e-06, + "loss": 0.6609, + "step": 11194 + }, + { + "epoch": 1.75, + "grad_norm": 20.167098561320827, + "learning_rate": 7.819439860811915e-06, + "loss": 0.5042, + "step": 11195 + }, + { + "epoch": 1.75, + "grad_norm": 21.093025764465423, + "learning_rate": 7.817794073924104e-06, + "loss": 0.5515, + "step": 11196 + }, + { + "epoch": 1.75, + "grad_norm": 18.877998192084533, + "learning_rate": 7.816148349096951e-06, + "loss": 0.5659, + "step": 11197 + }, + { + "epoch": 1.75, + "grad_norm": 17.18021799363955, + "learning_rate": 7.814502686377263e-06, + "loss": 0.5763, + "step": 11198 + }, + { + "epoch": 1.75, + "grad_norm": 18.60497919127739, + "learning_rate": 7.812857085811837e-06, + "loss": 0.5274, + "step": 11199 + }, + { + "epoch": 1.75, + "grad_norm": 24.518095578832366, + "learning_rate": 7.811211547447483e-06, + "loss": 0.5378, + "step": 11200 + }, + { + "epoch": 1.75, + "grad_norm": 12.476006045798519, + "learning_rate": 7.809566071330987e-06, + "loss": 0.5343, + "step": 11201 + }, + { + "epoch": 1.75, + "grad_norm": 15.502766660779288, + "learning_rate": 7.807920657509152e-06, + "loss": 0.5502, + "step": 11202 + }, + { + "epoch": 1.75, + "grad_norm": 15.12532621615114, + "learning_rate": 7.80627530602877e-06, + "loss": 0.6286, + "step": 11203 + }, + { + "epoch": 1.75, + "grad_norm": 12.686116191255312, + "learning_rate": 7.804630016936636e-06, + "loss": 0.5292, + "step": 11204 + }, + { + "epoch": 1.75, + "grad_norm": 18.902276441245746, + "learning_rate": 7.802984790279542e-06, + "loss": 0.5782, + "step": 11205 + }, + { + "epoch": 1.75, + "grad_norm": 17.75679950367359, + "learning_rate": 7.801339626104272e-06, + "loss": 0.5057, + "step": 11206 + }, + { + "epoch": 1.75, + "grad_norm": 25.95664332252799, + "learning_rate": 7.799694524457616e-06, + "loss": 0.5221, + "step": 11207 + }, + { + "epoch": 1.75, + "grad_norm": 17.29322166637064, + "learning_rate": 7.798049485386365e-06, + "loss": 0.6383, + "step": 11208 + }, + { + "epoch": 1.75, + "grad_norm": 18.69645899817208, + "learning_rate": 7.796404508937295e-06, + "loss": 0.5263, + "step": 11209 + }, + { + "epoch": 1.75, + "grad_norm": 17.98804970425537, + "learning_rate": 7.794759595157194e-06, + "loss": 0.5728, + "step": 11210 + }, + { + "epoch": 1.75, + "grad_norm": 14.8381526613064, + "learning_rate": 7.79311474409284e-06, + "loss": 0.5681, + "step": 11211 + }, + { + "epoch": 1.75, + "grad_norm": 19.553867530167153, + "learning_rate": 7.791469955791009e-06, + "loss": 0.5167, + "step": 11212 + }, + { + "epoch": 1.75, + "grad_norm": 28.675711620388135, + "learning_rate": 7.789825230298481e-06, + "loss": 0.5042, + "step": 11213 + }, + { + "epoch": 1.75, + "grad_norm": 24.621927666949734, + "learning_rate": 7.788180567662031e-06, + "loss": 0.5363, + "step": 11214 + }, + { + "epoch": 1.75, + "grad_norm": 13.111166937280643, + "learning_rate": 7.786535967928437e-06, + "loss": 0.5568, + "step": 11215 + }, + { + "epoch": 1.75, + "grad_norm": 19.22113113849329, + "learning_rate": 7.784891431144459e-06, + "loss": 0.5156, + "step": 11216 + }, + { + "epoch": 1.75, + "grad_norm": 16.3711243235276, + "learning_rate": 7.783246957356876e-06, + "loss": 0.4867, + "step": 11217 + }, + { + "epoch": 1.75, + "grad_norm": 19.481162776941453, + "learning_rate": 7.781602546612447e-06, + "loss": 0.5881, + "step": 11218 + }, + { + "epoch": 1.75, + "grad_norm": 18.39490523396599, + "learning_rate": 7.779958198957951e-06, + "loss": 0.5699, + "step": 11219 + }, + { + "epoch": 1.75, + "grad_norm": 18.988333990215676, + "learning_rate": 7.778313914440143e-06, + "loss": 0.5531, + "step": 11220 + }, + { + "epoch": 1.75, + "grad_norm": 26.344981249615987, + "learning_rate": 7.776669693105786e-06, + "loss": 0.6281, + "step": 11221 + }, + { + "epoch": 1.75, + "grad_norm": 20.98876758312302, + "learning_rate": 7.775025535001643e-06, + "loss": 0.6004, + "step": 11222 + }, + { + "epoch": 1.75, + "grad_norm": 15.00271330621962, + "learning_rate": 7.773381440174473e-06, + "loss": 0.5866, + "step": 11223 + }, + { + "epoch": 1.75, + "grad_norm": 17.538014427764445, + "learning_rate": 7.771737408671034e-06, + "loss": 0.5326, + "step": 11224 + }, + { + "epoch": 1.75, + "grad_norm": 13.40138392845931, + "learning_rate": 7.770093440538076e-06, + "loss": 0.6238, + "step": 11225 + }, + { + "epoch": 1.75, + "grad_norm": 19.467097297984274, + "learning_rate": 7.768449535822357e-06, + "loss": 0.5971, + "step": 11226 + }, + { + "epoch": 1.75, + "grad_norm": 16.94261482295833, + "learning_rate": 7.766805694570629e-06, + "loss": 0.5379, + "step": 11227 + }, + { + "epoch": 1.75, + "grad_norm": 16.03707658929044, + "learning_rate": 7.76516191682964e-06, + "loss": 0.5704, + "step": 11228 + }, + { + "epoch": 1.75, + "grad_norm": 22.489801493630956, + "learning_rate": 7.76351820264614e-06, + "loss": 0.5618, + "step": 11229 + }, + { + "epoch": 1.75, + "grad_norm": 39.26357738622217, + "learning_rate": 7.761874552066873e-06, + "loss": 0.6505, + "step": 11230 + }, + { + "epoch": 1.75, + "grad_norm": 30.675719590576257, + "learning_rate": 7.760230965138582e-06, + "loss": 0.6188, + "step": 11231 + }, + { + "epoch": 1.75, + "grad_norm": 18.640713466872427, + "learning_rate": 7.758587441908014e-06, + "loss": 0.5371, + "step": 11232 + }, + { + "epoch": 1.75, + "grad_norm": 29.7058679482082, + "learning_rate": 7.756943982421907e-06, + "loss": 0.6301, + "step": 11233 + }, + { + "epoch": 1.75, + "grad_norm": 17.67222206237001, + "learning_rate": 7.755300586727007e-06, + "loss": 0.5135, + "step": 11234 + }, + { + "epoch": 1.75, + "grad_norm": 19.478443852101808, + "learning_rate": 7.753657254870038e-06, + "loss": 0.576, + "step": 11235 + }, + { + "epoch": 1.76, + "grad_norm": 32.647392756461734, + "learning_rate": 7.752013986897747e-06, + "loss": 0.6269, + "step": 11236 + }, + { + "epoch": 1.76, + "grad_norm": 15.111928499682369, + "learning_rate": 7.750370782856858e-06, + "loss": 0.4867, + "step": 11237 + }, + { + "epoch": 1.76, + "grad_norm": 17.0842524601561, + "learning_rate": 7.748727642794111e-06, + "loss": 0.5763, + "step": 11238 + }, + { + "epoch": 1.76, + "grad_norm": 23.249422957709474, + "learning_rate": 7.747084566756237e-06, + "loss": 0.6144, + "step": 11239 + }, + { + "epoch": 1.76, + "grad_norm": 21.60997163917072, + "learning_rate": 7.745441554789956e-06, + "loss": 0.4776, + "step": 11240 + }, + { + "epoch": 1.76, + "grad_norm": 9.54723510593718, + "learning_rate": 7.743798606941997e-06, + "loss": 0.5605, + "step": 11241 + }, + { + "epoch": 1.76, + "grad_norm": 17.03156441258331, + "learning_rate": 7.742155723259086e-06, + "loss": 0.6029, + "step": 11242 + }, + { + "epoch": 1.76, + "grad_norm": 15.844558310768933, + "learning_rate": 7.740512903787945e-06, + "loss": 0.5783, + "step": 11243 + }, + { + "epoch": 1.76, + "grad_norm": 20.290430426064916, + "learning_rate": 7.738870148575299e-06, + "loss": 0.5824, + "step": 11244 + }, + { + "epoch": 1.76, + "grad_norm": 14.021835985888213, + "learning_rate": 7.737227457667861e-06, + "loss": 0.6041, + "step": 11245 + }, + { + "epoch": 1.76, + "grad_norm": 21.910213307840092, + "learning_rate": 7.735584831112352e-06, + "loss": 0.5481, + "step": 11246 + }, + { + "epoch": 1.76, + "grad_norm": 23.54598192117341, + "learning_rate": 7.733942268955482e-06, + "loss": 0.5601, + "step": 11247 + }, + { + "epoch": 1.76, + "grad_norm": 15.875992057808219, + "learning_rate": 7.732299771243972e-06, + "loss": 0.6435, + "step": 11248 + }, + { + "epoch": 1.76, + "grad_norm": 20.111678844400902, + "learning_rate": 7.73065733802453e-06, + "loss": 0.5505, + "step": 11249 + }, + { + "epoch": 1.76, + "grad_norm": 20.713366736911624, + "learning_rate": 7.729014969343863e-06, + "loss": 0.5367, + "step": 11250 + }, + { + "epoch": 1.76, + "grad_norm": 14.51284570335611, + "learning_rate": 7.727372665248682e-06, + "loss": 0.5855, + "step": 11251 + }, + { + "epoch": 1.76, + "grad_norm": 25.156948780045354, + "learning_rate": 7.725730425785694e-06, + "loss": 0.6051, + "step": 11252 + }, + { + "epoch": 1.76, + "grad_norm": 19.73512527900115, + "learning_rate": 7.724088251001602e-06, + "loss": 0.5376, + "step": 11253 + }, + { + "epoch": 1.76, + "grad_norm": 15.11760176748968, + "learning_rate": 7.722446140943111e-06, + "loss": 0.5813, + "step": 11254 + }, + { + "epoch": 1.76, + "grad_norm": 16.03475619663871, + "learning_rate": 7.720804095656918e-06, + "loss": 0.4859, + "step": 11255 + }, + { + "epoch": 1.76, + "grad_norm": 14.216014199055966, + "learning_rate": 7.71916211518972e-06, + "loss": 0.5153, + "step": 11256 + }, + { + "epoch": 1.76, + "grad_norm": 18.174982535027215, + "learning_rate": 7.717520199588222e-06, + "loss": 0.5669, + "step": 11257 + }, + { + "epoch": 1.76, + "grad_norm": 18.71978624911367, + "learning_rate": 7.71587834889911e-06, + "loss": 0.5883, + "step": 11258 + }, + { + "epoch": 1.76, + "grad_norm": 17.758882855347803, + "learning_rate": 7.714236563169088e-06, + "loss": 0.4991, + "step": 11259 + }, + { + "epoch": 1.76, + "grad_norm": 19.58478125235607, + "learning_rate": 7.712594842444836e-06, + "loss": 0.4861, + "step": 11260 + }, + { + "epoch": 1.76, + "grad_norm": 14.183877571773113, + "learning_rate": 7.710953186773048e-06, + "loss": 0.6171, + "step": 11261 + }, + { + "epoch": 1.76, + "grad_norm": 13.82640345035016, + "learning_rate": 7.709311596200412e-06, + "loss": 0.5654, + "step": 11262 + }, + { + "epoch": 1.76, + "grad_norm": 30.559027839765587, + "learning_rate": 7.707670070773616e-06, + "loss": 0.5939, + "step": 11263 + }, + { + "epoch": 1.76, + "grad_norm": 28.142003183966946, + "learning_rate": 7.706028610539345e-06, + "loss": 0.6379, + "step": 11264 + }, + { + "epoch": 1.76, + "grad_norm": 18.554621440569086, + "learning_rate": 7.704387215544276e-06, + "loss": 0.5122, + "step": 11265 + }, + { + "epoch": 1.76, + "grad_norm": 21.55907975554662, + "learning_rate": 7.70274588583509e-06, + "loss": 0.5367, + "step": 11266 + }, + { + "epoch": 1.76, + "grad_norm": 17.607315172325947, + "learning_rate": 7.701104621458467e-06, + "loss": 0.5735, + "step": 11267 + }, + { + "epoch": 1.76, + "grad_norm": 19.2734713567615, + "learning_rate": 7.69946342246109e-06, + "loss": 0.5533, + "step": 11268 + }, + { + "epoch": 1.76, + "grad_norm": 19.717117067772456, + "learning_rate": 7.697822288889617e-06, + "loss": 0.5291, + "step": 11269 + }, + { + "epoch": 1.76, + "grad_norm": 19.384736506442632, + "learning_rate": 7.69618122079074e-06, + "loss": 0.5018, + "step": 11270 + }, + { + "epoch": 1.76, + "grad_norm": 23.973631029852697, + "learning_rate": 7.694540218211117e-06, + "loss": 0.563, + "step": 11271 + }, + { + "epoch": 1.76, + "grad_norm": 18.628703270044507, + "learning_rate": 7.692899281197421e-06, + "loss": 0.4823, + "step": 11272 + }, + { + "epoch": 1.76, + "grad_norm": 17.57420443406871, + "learning_rate": 7.691258409796324e-06, + "loss": 0.4832, + "step": 11273 + }, + { + "epoch": 1.76, + "grad_norm": 13.447646746363091, + "learning_rate": 7.689617604054487e-06, + "loss": 0.5394, + "step": 11274 + }, + { + "epoch": 1.76, + "grad_norm": 19.838827575073203, + "learning_rate": 7.68797686401857e-06, + "loss": 0.5503, + "step": 11275 + }, + { + "epoch": 1.76, + "grad_norm": 19.091192857792542, + "learning_rate": 7.68633618973524e-06, + "loss": 0.4895, + "step": 11276 + }, + { + "epoch": 1.76, + "grad_norm": 20.817442342235797, + "learning_rate": 7.684695581251153e-06, + "loss": 0.5912, + "step": 11277 + }, + { + "epoch": 1.76, + "grad_norm": 23.593332278109564, + "learning_rate": 7.683055038612977e-06, + "loss": 0.6045, + "step": 11278 + }, + { + "epoch": 1.76, + "grad_norm": 21.482524264990158, + "learning_rate": 7.681414561867353e-06, + "loss": 0.4914, + "step": 11279 + }, + { + "epoch": 1.76, + "grad_norm": 23.650770275846927, + "learning_rate": 7.679774151060945e-06, + "loss": 0.5341, + "step": 11280 + }, + { + "epoch": 1.76, + "grad_norm": 25.13054877720958, + "learning_rate": 7.6781338062404e-06, + "loss": 0.5412, + "step": 11281 + }, + { + "epoch": 1.76, + "grad_norm": 21.12693658041485, + "learning_rate": 7.676493527452374e-06, + "loss": 0.5932, + "step": 11282 + }, + { + "epoch": 1.76, + "grad_norm": 19.663614451730098, + "learning_rate": 7.674853314743516e-06, + "loss": 0.4899, + "step": 11283 + }, + { + "epoch": 1.76, + "grad_norm": 24.244453653723927, + "learning_rate": 7.673213168160465e-06, + "loss": 0.5234, + "step": 11284 + }, + { + "epoch": 1.76, + "grad_norm": 11.741632808532874, + "learning_rate": 7.671573087749871e-06, + "loss": 0.4883, + "step": 11285 + }, + { + "epoch": 1.76, + "grad_norm": 16.626228054121917, + "learning_rate": 7.669933073558377e-06, + "loss": 0.6313, + "step": 11286 + }, + { + "epoch": 1.76, + "grad_norm": 16.419755841583367, + "learning_rate": 7.668293125632621e-06, + "loss": 0.5558, + "step": 11287 + }, + { + "epoch": 1.76, + "grad_norm": 24.203634339225513, + "learning_rate": 7.66665324401925e-06, + "loss": 0.6161, + "step": 11288 + }, + { + "epoch": 1.76, + "grad_norm": 24.611350398632172, + "learning_rate": 7.665013428764892e-06, + "loss": 0.5002, + "step": 11289 + }, + { + "epoch": 1.76, + "grad_norm": 24.788121986761205, + "learning_rate": 7.663373679916187e-06, + "loss": 0.5877, + "step": 11290 + }, + { + "epoch": 1.76, + "grad_norm": 45.5453902615997, + "learning_rate": 7.661733997519765e-06, + "loss": 0.676, + "step": 11291 + }, + { + "epoch": 1.76, + "grad_norm": 21.680113970711655, + "learning_rate": 7.660094381622265e-06, + "loss": 0.5259, + "step": 11292 + }, + { + "epoch": 1.76, + "grad_norm": 18.286117485253868, + "learning_rate": 7.658454832270312e-06, + "loss": 0.5003, + "step": 11293 + }, + { + "epoch": 1.76, + "grad_norm": 18.25049209012945, + "learning_rate": 7.656815349510528e-06, + "loss": 0.5075, + "step": 11294 + }, + { + "epoch": 1.76, + "grad_norm": 24.28092716007329, + "learning_rate": 7.65517593338955e-06, + "loss": 0.5332, + "step": 11295 + }, + { + "epoch": 1.76, + "grad_norm": 26.04700770455865, + "learning_rate": 7.653536583953993e-06, + "loss": 0.593, + "step": 11296 + }, + { + "epoch": 1.76, + "grad_norm": 18.678745228459746, + "learning_rate": 7.651897301250484e-06, + "loss": 0.5501, + "step": 11297 + }, + { + "epoch": 1.76, + "grad_norm": 13.003487137617729, + "learning_rate": 7.650258085325646e-06, + "loss": 0.5254, + "step": 11298 + }, + { + "epoch": 1.76, + "grad_norm": 18.865165146944452, + "learning_rate": 7.64861893622609e-06, + "loss": 0.5383, + "step": 11299 + }, + { + "epoch": 1.77, + "grad_norm": 18.664046491118665, + "learning_rate": 7.646979853998434e-06, + "loss": 0.5551, + "step": 11300 + }, + { + "epoch": 1.77, + "grad_norm": 20.161354677915533, + "learning_rate": 7.645340838689297e-06, + "loss": 0.5478, + "step": 11301 + }, + { + "epoch": 1.77, + "grad_norm": 17.640657624247467, + "learning_rate": 7.643701890345288e-06, + "loss": 0.5305, + "step": 11302 + }, + { + "epoch": 1.77, + "grad_norm": 18.743547150944273, + "learning_rate": 7.642063009013025e-06, + "loss": 0.5061, + "step": 11303 + }, + { + "epoch": 1.77, + "grad_norm": 21.06998142425038, + "learning_rate": 7.640424194739102e-06, + "loss": 0.5848, + "step": 11304 + }, + { + "epoch": 1.77, + "grad_norm": 22.29649272435952, + "learning_rate": 7.63878544757014e-06, + "loss": 0.6403, + "step": 11305 + }, + { + "epoch": 1.77, + "grad_norm": 13.715199545490968, + "learning_rate": 7.637146767552735e-06, + "loss": 0.5738, + "step": 11306 + }, + { + "epoch": 1.77, + "grad_norm": 25.079013950718778, + "learning_rate": 7.635508154733494e-06, + "loss": 0.5993, + "step": 11307 + }, + { + "epoch": 1.77, + "grad_norm": 18.884186294227668, + "learning_rate": 7.633869609159023e-06, + "loss": 0.5479, + "step": 11308 + }, + { + "epoch": 1.77, + "grad_norm": 16.411200973428215, + "learning_rate": 7.632231130875912e-06, + "loss": 0.4966, + "step": 11309 + }, + { + "epoch": 1.77, + "grad_norm": 21.025665686084285, + "learning_rate": 7.63059271993076e-06, + "loss": 0.493, + "step": 11310 + }, + { + "epoch": 1.77, + "grad_norm": 19.17998847181433, + "learning_rate": 7.628954376370169e-06, + "loss": 0.5698, + "step": 11311 + }, + { + "epoch": 1.77, + "grad_norm": 20.30178264902838, + "learning_rate": 7.627316100240726e-06, + "loss": 0.62, + "step": 11312 + }, + { + "epoch": 1.77, + "grad_norm": 19.115320294558252, + "learning_rate": 7.625677891589029e-06, + "loss": 0.546, + "step": 11313 + }, + { + "epoch": 1.77, + "grad_norm": 10.920930267645089, + "learning_rate": 7.6240397504616615e-06, + "loss": 0.5092, + "step": 11314 + }, + { + "epoch": 1.77, + "grad_norm": 23.25450947570427, + "learning_rate": 7.622401676905214e-06, + "loss": 0.5265, + "step": 11315 + }, + { + "epoch": 1.77, + "grad_norm": 22.207353591928037, + "learning_rate": 7.6207636709662694e-06, + "loss": 0.5301, + "step": 11316 + }, + { + "epoch": 1.77, + "grad_norm": 21.44018048163546, + "learning_rate": 7.619125732691419e-06, + "loss": 0.5206, + "step": 11317 + }, + { + "epoch": 1.77, + "grad_norm": 29.436667042284412, + "learning_rate": 7.617487862127238e-06, + "loss": 0.6249, + "step": 11318 + }, + { + "epoch": 1.77, + "grad_norm": 20.89768052200801, + "learning_rate": 7.615850059320306e-06, + "loss": 0.5658, + "step": 11319 + }, + { + "epoch": 1.77, + "grad_norm": 15.396333062932055, + "learning_rate": 7.614212324317205e-06, + "loss": 0.5533, + "step": 11320 + }, + { + "epoch": 1.77, + "grad_norm": 15.466935711726588, + "learning_rate": 7.612574657164509e-06, + "loss": 0.5844, + "step": 11321 + }, + { + "epoch": 1.77, + "grad_norm": 15.202536005017654, + "learning_rate": 7.610937057908798e-06, + "loss": 0.5077, + "step": 11322 + }, + { + "epoch": 1.77, + "grad_norm": 17.427338583373395, + "learning_rate": 7.609299526596634e-06, + "loss": 0.5474, + "step": 11323 + }, + { + "epoch": 1.77, + "grad_norm": 20.521451742194035, + "learning_rate": 7.607662063274595e-06, + "loss": 0.5075, + "step": 11324 + }, + { + "epoch": 1.77, + "grad_norm": 17.76787564709022, + "learning_rate": 7.606024667989246e-06, + "loss": 0.5888, + "step": 11325 + }, + { + "epoch": 1.77, + "grad_norm": 18.35670487486769, + "learning_rate": 7.604387340787156e-06, + "loss": 0.6043, + "step": 11326 + }, + { + "epoch": 1.77, + "grad_norm": 16.377373961453696, + "learning_rate": 7.6027500817148915e-06, + "loss": 0.5456, + "step": 11327 + }, + { + "epoch": 1.77, + "grad_norm": 24.70214588944195, + "learning_rate": 7.6011128908190105e-06, + "loss": 0.5512, + "step": 11328 + }, + { + "epoch": 1.77, + "grad_norm": 21.17046677526658, + "learning_rate": 7.599475768146072e-06, + "loss": 0.5254, + "step": 11329 + }, + { + "epoch": 1.77, + "grad_norm": 28.881291500838543, + "learning_rate": 7.5978387137426425e-06, + "loss": 0.5213, + "step": 11330 + }, + { + "epoch": 1.77, + "grad_norm": 27.06366082855771, + "learning_rate": 7.59620172765527e-06, + "loss": 0.6224, + "step": 11331 + }, + { + "epoch": 1.77, + "grad_norm": 17.986773955641482, + "learning_rate": 7.5945648099305206e-06, + "loss": 0.536, + "step": 11332 + }, + { + "epoch": 1.77, + "grad_norm": 13.104959198871104, + "learning_rate": 7.592927960614939e-06, + "loss": 0.5259, + "step": 11333 + }, + { + "epoch": 1.77, + "grad_norm": 27.427609701510946, + "learning_rate": 7.591291179755077e-06, + "loss": 0.5567, + "step": 11334 + }, + { + "epoch": 1.77, + "grad_norm": 30.908048655047114, + "learning_rate": 7.5896544673974825e-06, + "loss": 0.6181, + "step": 11335 + }, + { + "epoch": 1.77, + "grad_norm": 13.155516628645152, + "learning_rate": 7.588017823588707e-06, + "loss": 0.5286, + "step": 11336 + }, + { + "epoch": 1.77, + "grad_norm": 13.862126498204118, + "learning_rate": 7.586381248375296e-06, + "loss": 0.542, + "step": 11337 + }, + { + "epoch": 1.77, + "grad_norm": 22.68626587114657, + "learning_rate": 7.584744741803787e-06, + "loss": 0.6334, + "step": 11338 + }, + { + "epoch": 1.77, + "grad_norm": 13.741204965630933, + "learning_rate": 7.583108303920726e-06, + "loss": 0.5653, + "step": 11339 + }, + { + "epoch": 1.77, + "grad_norm": 16.622610320171088, + "learning_rate": 7.581471934772649e-06, + "loss": 0.5145, + "step": 11340 + }, + { + "epoch": 1.77, + "grad_norm": 26.460607001777873, + "learning_rate": 7.579835634406097e-06, + "loss": 0.5746, + "step": 11341 + }, + { + "epoch": 1.77, + "grad_norm": 17.09364872715884, + "learning_rate": 7.578199402867607e-06, + "loss": 0.5498, + "step": 11342 + }, + { + "epoch": 1.77, + "grad_norm": 22.428643651102213, + "learning_rate": 7.576563240203707e-06, + "loss": 0.5437, + "step": 11343 + }, + { + "epoch": 1.77, + "grad_norm": 27.391532778453815, + "learning_rate": 7.574927146460928e-06, + "loss": 0.7232, + "step": 11344 + }, + { + "epoch": 1.77, + "grad_norm": 12.036940668630008, + "learning_rate": 7.573291121685805e-06, + "loss": 0.5555, + "step": 11345 + }, + { + "epoch": 1.77, + "grad_norm": 14.541138106240838, + "learning_rate": 7.571655165924861e-06, + "loss": 0.5121, + "step": 11346 + }, + { + "epoch": 1.77, + "grad_norm": 18.702217574801235, + "learning_rate": 7.570019279224629e-06, + "loss": 0.5851, + "step": 11347 + }, + { + "epoch": 1.77, + "grad_norm": 21.994134952919435, + "learning_rate": 7.568383461631621e-06, + "loss": 0.5711, + "step": 11348 + }, + { + "epoch": 1.77, + "grad_norm": 18.010437819121165, + "learning_rate": 7.566747713192368e-06, + "loss": 0.5566, + "step": 11349 + }, + { + "epoch": 1.77, + "grad_norm": 14.631605681039963, + "learning_rate": 7.5651120339533836e-06, + "loss": 0.5361, + "step": 11350 + }, + { + "epoch": 1.77, + "grad_norm": 26.537630574268555, + "learning_rate": 7.56347642396119e-06, + "loss": 0.4726, + "step": 11351 + }, + { + "epoch": 1.77, + "grad_norm": 16.490625056277086, + "learning_rate": 7.5618408832623044e-06, + "loss": 0.5939, + "step": 11352 + }, + { + "epoch": 1.77, + "grad_norm": 19.01667213887202, + "learning_rate": 7.5602054119032364e-06, + "loss": 0.581, + "step": 11353 + }, + { + "epoch": 1.77, + "grad_norm": 18.18123170895015, + "learning_rate": 7.558570009930497e-06, + "loss": 0.5122, + "step": 11354 + }, + { + "epoch": 1.77, + "grad_norm": 13.46694993671408, + "learning_rate": 7.556934677390599e-06, + "loss": 0.5276, + "step": 11355 + }, + { + "epoch": 1.77, + "grad_norm": 15.678534258165081, + "learning_rate": 7.555299414330048e-06, + "loss": 0.5347, + "step": 11356 + }, + { + "epoch": 1.77, + "grad_norm": 22.84877471547157, + "learning_rate": 7.553664220795357e-06, + "loss": 0.624, + "step": 11357 + }, + { + "epoch": 1.77, + "grad_norm": 19.20675257685648, + "learning_rate": 7.5520290968330226e-06, + "loss": 0.5292, + "step": 11358 + }, + { + "epoch": 1.77, + "grad_norm": 16.244811129173595, + "learning_rate": 7.550394042489547e-06, + "loss": 0.4978, + "step": 11359 + }, + { + "epoch": 1.77, + "grad_norm": 22.691635399920745, + "learning_rate": 7.54875905781143e-06, + "loss": 0.5368, + "step": 11360 + }, + { + "epoch": 1.77, + "grad_norm": 21.108495215344774, + "learning_rate": 7.547124142845174e-06, + "loss": 0.5894, + "step": 11361 + }, + { + "epoch": 1.77, + "grad_norm": 19.787358983628412, + "learning_rate": 7.545489297637275e-06, + "loss": 0.5217, + "step": 11362 + }, + { + "epoch": 1.77, + "grad_norm": 16.75864957839539, + "learning_rate": 7.54385452223422e-06, + "loss": 0.5147, + "step": 11363 + }, + { + "epoch": 1.78, + "grad_norm": 17.827183936404236, + "learning_rate": 7.542219816682508e-06, + "loss": 0.5432, + "step": 11364 + }, + { + "epoch": 1.78, + "grad_norm": 13.936623261335924, + "learning_rate": 7.5405851810286236e-06, + "loss": 0.52, + "step": 11365 + }, + { + "epoch": 1.78, + "grad_norm": 31.788461221524262, + "learning_rate": 7.538950615319065e-06, + "loss": 0.5846, + "step": 11366 + }, + { + "epoch": 1.78, + "grad_norm": 18.16915610236504, + "learning_rate": 7.537316119600305e-06, + "loss": 0.4864, + "step": 11367 + }, + { + "epoch": 1.78, + "grad_norm": 26.443898616504786, + "learning_rate": 7.535681693918836e-06, + "loss": 0.6274, + "step": 11368 + }, + { + "epoch": 1.78, + "grad_norm": 20.05773937853591, + "learning_rate": 7.534047338321135e-06, + "loss": 0.6017, + "step": 11369 + }, + { + "epoch": 1.78, + "grad_norm": 16.0723196411894, + "learning_rate": 7.5324130528536885e-06, + "loss": 0.544, + "step": 11370 + }, + { + "epoch": 1.78, + "grad_norm": 16.615076700172942, + "learning_rate": 7.530778837562973e-06, + "loss": 0.5261, + "step": 11371 + }, + { + "epoch": 1.78, + "grad_norm": 17.103055651792662, + "learning_rate": 7.5291446924954604e-06, + "loss": 0.5812, + "step": 11372 + }, + { + "epoch": 1.78, + "grad_norm": 24.764978800505, + "learning_rate": 7.527510617697627e-06, + "loss": 0.5989, + "step": 11373 + }, + { + "epoch": 1.78, + "grad_norm": 14.735712433650383, + "learning_rate": 7.525876613215944e-06, + "loss": 0.5288, + "step": 11374 + }, + { + "epoch": 1.78, + "grad_norm": 19.60952320707782, + "learning_rate": 7.524242679096884e-06, + "loss": 0.5349, + "step": 11375 + }, + { + "epoch": 1.78, + "grad_norm": 15.33266028845027, + "learning_rate": 7.522608815386916e-06, + "loss": 0.5461, + "step": 11376 + }, + { + "epoch": 1.78, + "grad_norm": 39.29299032375867, + "learning_rate": 7.520975022132503e-06, + "loss": 0.6018, + "step": 11377 + }, + { + "epoch": 1.78, + "grad_norm": 20.944527482201728, + "learning_rate": 7.5193412993801096e-06, + "loss": 0.5803, + "step": 11378 + }, + { + "epoch": 1.78, + "grad_norm": 16.319588604965567, + "learning_rate": 7.517707647176197e-06, + "loss": 0.5136, + "step": 11379 + }, + { + "epoch": 1.78, + "grad_norm": 17.154907010275167, + "learning_rate": 7.5160740655672285e-06, + "loss": 0.5337, + "step": 11380 + }, + { + "epoch": 1.78, + "grad_norm": 35.0472940895238, + "learning_rate": 7.514440554599662e-06, + "loss": 0.6092, + "step": 11381 + }, + { + "epoch": 1.78, + "grad_norm": 12.427804427824013, + "learning_rate": 7.5128071143199485e-06, + "loss": 0.4463, + "step": 11382 + }, + { + "epoch": 1.78, + "grad_norm": 28.468810734647416, + "learning_rate": 7.511173744774548e-06, + "loss": 0.6599, + "step": 11383 + }, + { + "epoch": 1.78, + "grad_norm": 22.806755517760198, + "learning_rate": 7.509540446009909e-06, + "loss": 0.5261, + "step": 11384 + }, + { + "epoch": 1.78, + "grad_norm": 24.650154336906553, + "learning_rate": 7.5079072180724834e-06, + "loss": 0.5738, + "step": 11385 + }, + { + "epoch": 1.78, + "grad_norm": 20.552376672694674, + "learning_rate": 7.506274061008721e-06, + "loss": 0.5766, + "step": 11386 + }, + { + "epoch": 1.78, + "grad_norm": 18.52665775612717, + "learning_rate": 7.504640974865065e-06, + "loss": 0.5652, + "step": 11387 + }, + { + "epoch": 1.78, + "grad_norm": 18.364114213791307, + "learning_rate": 7.503007959687956e-06, + "loss": 0.5082, + "step": 11388 + }, + { + "epoch": 1.78, + "grad_norm": 16.630305653625925, + "learning_rate": 7.501375015523843e-06, + "loss": 0.52, + "step": 11389 + }, + { + "epoch": 1.78, + "grad_norm": 24.990781722460444, + "learning_rate": 7.4997421424191595e-06, + "loss": 0.5826, + "step": 11390 + }, + { + "epoch": 1.78, + "grad_norm": 16.385785030547925, + "learning_rate": 7.498109340420354e-06, + "loss": 0.5185, + "step": 11391 + }, + { + "epoch": 1.78, + "grad_norm": 21.994585668737507, + "learning_rate": 7.496476609573851e-06, + "loss": 0.5332, + "step": 11392 + }, + { + "epoch": 1.78, + "grad_norm": 19.288584503548858, + "learning_rate": 7.494843949926088e-06, + "loss": 0.5416, + "step": 11393 + }, + { + "epoch": 1.78, + "grad_norm": 14.799751232809594, + "learning_rate": 7.493211361523496e-06, + "loss": 0.496, + "step": 11394 + }, + { + "epoch": 1.78, + "grad_norm": 24.41847322767009, + "learning_rate": 7.49157884441251e-06, + "loss": 0.5305, + "step": 11395 + }, + { + "epoch": 1.78, + "grad_norm": 18.666975920498224, + "learning_rate": 7.489946398639555e-06, + "loss": 0.557, + "step": 11396 + }, + { + "epoch": 1.78, + "grad_norm": 18.181201309500835, + "learning_rate": 7.488314024251055e-06, + "loss": 0.5348, + "step": 11397 + }, + { + "epoch": 1.78, + "grad_norm": 17.335047304902012, + "learning_rate": 7.486681721293432e-06, + "loss": 0.5491, + "step": 11398 + }, + { + "epoch": 1.78, + "grad_norm": 24.997225798224527, + "learning_rate": 7.485049489813112e-06, + "loss": 0.5846, + "step": 11399 + }, + { + "epoch": 1.78, + "grad_norm": 21.045463066134577, + "learning_rate": 7.483417329856513e-06, + "loss": 0.6344, + "step": 11400 + }, + { + "epoch": 1.78, + "grad_norm": 19.379279360863514, + "learning_rate": 7.481785241470055e-06, + "loss": 0.5108, + "step": 11401 + }, + { + "epoch": 1.78, + "grad_norm": 30.37656194493515, + "learning_rate": 7.48015322470015e-06, + "loss": 0.6035, + "step": 11402 + }, + { + "epoch": 1.78, + "grad_norm": 28.551575829058095, + "learning_rate": 7.478521279593213e-06, + "loss": 0.5514, + "step": 11403 + }, + { + "epoch": 1.78, + "grad_norm": 30.489734825170196, + "learning_rate": 7.476889406195656e-06, + "loss": 0.6415, + "step": 11404 + }, + { + "epoch": 1.78, + "grad_norm": 19.29606572975482, + "learning_rate": 7.4752576045538894e-06, + "loss": 0.5571, + "step": 11405 + }, + { + "epoch": 1.78, + "grad_norm": 24.47288837429332, + "learning_rate": 7.473625874714322e-06, + "loss": 0.5259, + "step": 11406 + }, + { + "epoch": 1.78, + "grad_norm": 26.90683662127291, + "learning_rate": 7.471994216723352e-06, + "loss": 0.6256, + "step": 11407 + }, + { + "epoch": 1.78, + "grad_norm": 28.548577318448874, + "learning_rate": 7.47036263062739e-06, + "loss": 0.5733, + "step": 11408 + }, + { + "epoch": 1.78, + "grad_norm": 24.489789801600327, + "learning_rate": 7.4687311164728315e-06, + "loss": 0.6144, + "step": 11409 + }, + { + "epoch": 1.78, + "grad_norm": 27.41217402065633, + "learning_rate": 7.467099674306083e-06, + "loss": 0.6344, + "step": 11410 + }, + { + "epoch": 1.78, + "grad_norm": 16.61036505489193, + "learning_rate": 7.46546830417354e-06, + "loss": 0.6296, + "step": 11411 + }, + { + "epoch": 1.78, + "grad_norm": 18.603542227338057, + "learning_rate": 7.463837006121593e-06, + "loss": 0.5792, + "step": 11412 + }, + { + "epoch": 1.78, + "grad_norm": 10.910512073640668, + "learning_rate": 7.462205780196637e-06, + "loss": 0.4822, + "step": 11413 + }, + { + "epoch": 1.78, + "grad_norm": 16.151463022633337, + "learning_rate": 7.460574626445065e-06, + "loss": 0.4946, + "step": 11414 + }, + { + "epoch": 1.78, + "grad_norm": 12.15470215668673, + "learning_rate": 7.458943544913266e-06, + "loss": 0.4978, + "step": 11415 + }, + { + "epoch": 1.78, + "grad_norm": 18.915036437568993, + "learning_rate": 7.457312535647627e-06, + "loss": 0.5425, + "step": 11416 + }, + { + "epoch": 1.78, + "grad_norm": 29.983166331350837, + "learning_rate": 7.455681598694529e-06, + "loss": 0.5607, + "step": 11417 + }, + { + "epoch": 1.78, + "grad_norm": 13.344378133859822, + "learning_rate": 7.454050734100358e-06, + "loss": 0.5259, + "step": 11418 + }, + { + "epoch": 1.78, + "grad_norm": 13.006090705488193, + "learning_rate": 7.452419941911495e-06, + "loss": 0.4909, + "step": 11419 + }, + { + "epoch": 1.78, + "grad_norm": 36.77931888197404, + "learning_rate": 7.450789222174322e-06, + "loss": 0.5748, + "step": 11420 + }, + { + "epoch": 1.78, + "grad_norm": 17.665778530168314, + "learning_rate": 7.449158574935209e-06, + "loss": 0.5435, + "step": 11421 + }, + { + "epoch": 1.78, + "grad_norm": 15.403401709261521, + "learning_rate": 7.4475280002405355e-06, + "loss": 0.4883, + "step": 11422 + }, + { + "epoch": 1.78, + "grad_norm": 19.565923136502217, + "learning_rate": 7.445897498136671e-06, + "loss": 0.5907, + "step": 11423 + }, + { + "epoch": 1.78, + "grad_norm": 24.554561143137953, + "learning_rate": 7.444267068669988e-06, + "loss": 0.5442, + "step": 11424 + }, + { + "epoch": 1.78, + "grad_norm": 15.825035906550575, + "learning_rate": 7.442636711886857e-06, + "loss": 0.5484, + "step": 11425 + }, + { + "epoch": 1.78, + "grad_norm": 17.329956392178588, + "learning_rate": 7.441006427833638e-06, + "loss": 0.4986, + "step": 11426 + }, + { + "epoch": 1.78, + "grad_norm": 25.342584678444084, + "learning_rate": 7.439376216556702e-06, + "loss": 0.6236, + "step": 11427 + }, + { + "epoch": 1.79, + "grad_norm": 16.12708209146642, + "learning_rate": 7.437746078102406e-06, + "loss": 0.5069, + "step": 11428 + }, + { + "epoch": 1.79, + "grad_norm": 13.136200042452291, + "learning_rate": 7.4361160125171135e-06, + "loss": 0.522, + "step": 11429 + }, + { + "epoch": 1.79, + "grad_norm": 17.614532100569896, + "learning_rate": 7.4344860198471845e-06, + "loss": 0.5555, + "step": 11430 + }, + { + "epoch": 1.79, + "grad_norm": 16.02168763759666, + "learning_rate": 7.432856100138971e-06, + "loss": 0.5387, + "step": 11431 + }, + { + "epoch": 1.79, + "grad_norm": 24.54406809162352, + "learning_rate": 7.431226253438826e-06, + "loss": 0.5536, + "step": 11432 + }, + { + "epoch": 1.79, + "grad_norm": 5.297411764849363, + "learning_rate": 7.4295964797931045e-06, + "loss": 0.5904, + "step": 11433 + }, + { + "epoch": 1.79, + "grad_norm": 17.62735902863111, + "learning_rate": 7.427966779248155e-06, + "loss": 0.5318, + "step": 11434 + }, + { + "epoch": 1.79, + "grad_norm": 12.214972048562355, + "learning_rate": 7.4263371518503314e-06, + "loss": 0.544, + "step": 11435 + }, + { + "epoch": 1.79, + "grad_norm": 19.4808209052573, + "learning_rate": 7.424707597645967e-06, + "loss": 0.5807, + "step": 11436 + }, + { + "epoch": 1.79, + "grad_norm": 23.361949452178724, + "learning_rate": 7.423078116681415e-06, + "loss": 0.5074, + "step": 11437 + }, + { + "epoch": 1.79, + "grad_norm": 20.813417483202834, + "learning_rate": 7.42144870900301e-06, + "loss": 0.5444, + "step": 11438 + }, + { + "epoch": 1.79, + "grad_norm": 19.101800580091467, + "learning_rate": 7.419819374657099e-06, + "loss": 0.4982, + "step": 11439 + }, + { + "epoch": 1.79, + "grad_norm": 17.75164397108254, + "learning_rate": 7.418190113690018e-06, + "loss": 0.5825, + "step": 11440 + }, + { + "epoch": 1.79, + "grad_norm": 27.168074552109232, + "learning_rate": 7.416560926148096e-06, + "loss": 0.5696, + "step": 11441 + }, + { + "epoch": 1.79, + "grad_norm": 24.425701566438384, + "learning_rate": 7.414931812077669e-06, + "loss": 0.5407, + "step": 11442 + }, + { + "epoch": 1.79, + "grad_norm": 16.379802725722428, + "learning_rate": 7.413302771525071e-06, + "loss": 0.5773, + "step": 11443 + }, + { + "epoch": 1.79, + "grad_norm": 17.548462701218444, + "learning_rate": 7.4116738045366275e-06, + "loss": 0.5163, + "step": 11444 + }, + { + "epoch": 1.79, + "grad_norm": 15.31737987577868, + "learning_rate": 7.410044911158671e-06, + "loss": 0.4955, + "step": 11445 + }, + { + "epoch": 1.79, + "grad_norm": 14.409990786943924, + "learning_rate": 7.408416091437519e-06, + "loss": 0.5088, + "step": 11446 + }, + { + "epoch": 1.79, + "grad_norm": 21.85818226923015, + "learning_rate": 7.406787345419496e-06, + "loss": 0.4689, + "step": 11447 + }, + { + "epoch": 1.79, + "grad_norm": 40.5033319472836, + "learning_rate": 7.405158673150925e-06, + "loss": 0.5989, + "step": 11448 + }, + { + "epoch": 1.79, + "grad_norm": 23.966572658288015, + "learning_rate": 7.403530074678124e-06, + "loss": 0.6299, + "step": 11449 + }, + { + "epoch": 1.79, + "grad_norm": 26.144250024422643, + "learning_rate": 7.401901550047412e-06, + "loss": 0.5756, + "step": 11450 + }, + { + "epoch": 1.79, + "grad_norm": 41.9853284824794, + "learning_rate": 7.400273099305095e-06, + "loss": 0.5159, + "step": 11451 + }, + { + "epoch": 1.79, + "grad_norm": 17.188745112402138, + "learning_rate": 7.398644722497492e-06, + "loss": 0.58, + "step": 11452 + }, + { + "epoch": 1.79, + "grad_norm": 23.155762553715245, + "learning_rate": 7.39701641967091e-06, + "loss": 0.6029, + "step": 11453 + }, + { + "epoch": 1.79, + "grad_norm": 14.716441421418159, + "learning_rate": 7.39538819087166e-06, + "loss": 0.4462, + "step": 11454 + }, + { + "epoch": 1.79, + "grad_norm": 21.049621076851558, + "learning_rate": 7.393760036146049e-06, + "loss": 0.5585, + "step": 11455 + }, + { + "epoch": 1.79, + "grad_norm": 11.969821100140978, + "learning_rate": 7.392131955540375e-06, + "loss": 0.5433, + "step": 11456 + }, + { + "epoch": 1.79, + "grad_norm": 29.09790260854883, + "learning_rate": 7.39050394910094e-06, + "loss": 0.5265, + "step": 11457 + }, + { + "epoch": 1.79, + "grad_norm": 29.0838616271026, + "learning_rate": 7.388876016874049e-06, + "loss": 0.5435, + "step": 11458 + }, + { + "epoch": 1.79, + "grad_norm": 23.600742562606495, + "learning_rate": 7.3872481589059955e-06, + "loss": 0.5656, + "step": 11459 + }, + { + "epoch": 1.79, + "grad_norm": 19.187812296674355, + "learning_rate": 7.3856203752430815e-06, + "loss": 0.5298, + "step": 11460 + }, + { + "epoch": 1.79, + "grad_norm": 16.464390609315895, + "learning_rate": 7.383992665931587e-06, + "loss": 0.5759, + "step": 11461 + }, + { + "epoch": 1.79, + "grad_norm": 26.536347147839994, + "learning_rate": 7.382365031017815e-06, + "loss": 0.6713, + "step": 11462 + }, + { + "epoch": 1.79, + "grad_norm": 26.35868706113572, + "learning_rate": 7.380737470548047e-06, + "loss": 0.5641, + "step": 11463 + }, + { + "epoch": 1.79, + "grad_norm": 17.0942186094347, + "learning_rate": 7.379109984568578e-06, + "loss": 0.4985, + "step": 11464 + }, + { + "epoch": 1.79, + "grad_norm": 18.604261128864643, + "learning_rate": 7.377482573125685e-06, + "loss": 0.5172, + "step": 11465 + }, + { + "epoch": 1.79, + "grad_norm": 16.106487769309915, + "learning_rate": 7.375855236265653e-06, + "loss": 0.4756, + "step": 11466 + }, + { + "epoch": 1.79, + "grad_norm": 20.625194477840566, + "learning_rate": 7.374227974034762e-06, + "loss": 0.5454, + "step": 11467 + }, + { + "epoch": 1.79, + "grad_norm": 20.210040203417375, + "learning_rate": 7.372600786479292e-06, + "loss": 0.6038, + "step": 11468 + }, + { + "epoch": 1.79, + "grad_norm": 18.766965638741333, + "learning_rate": 7.370973673645523e-06, + "loss": 0.5922, + "step": 11469 + }, + { + "epoch": 1.79, + "grad_norm": 19.34738017269755, + "learning_rate": 7.369346635579719e-06, + "loss": 0.6014, + "step": 11470 + }, + { + "epoch": 1.79, + "grad_norm": 21.399821268375863, + "learning_rate": 7.36771967232816e-06, + "loss": 0.6503, + "step": 11471 + }, + { + "epoch": 1.79, + "grad_norm": 19.107412149332678, + "learning_rate": 7.366092783937112e-06, + "loss": 0.5697, + "step": 11472 + }, + { + "epoch": 1.79, + "grad_norm": 10.495972166645576, + "learning_rate": 7.364465970452846e-06, + "loss": 0.5355, + "step": 11473 + }, + { + "epoch": 1.79, + "grad_norm": 18.396476812833953, + "learning_rate": 7.362839231921629e-06, + "loss": 0.5564, + "step": 11474 + }, + { + "epoch": 1.79, + "grad_norm": 12.382363428583044, + "learning_rate": 7.36121256838972e-06, + "loss": 0.5647, + "step": 11475 + }, + { + "epoch": 1.79, + "grad_norm": 25.483142155800156, + "learning_rate": 7.359585979903378e-06, + "loss": 0.536, + "step": 11476 + }, + { + "epoch": 1.79, + "grad_norm": 21.117138162908805, + "learning_rate": 7.357959466508869e-06, + "loss": 0.5983, + "step": 11477 + }, + { + "epoch": 1.79, + "grad_norm": 24.221263222270228, + "learning_rate": 7.356333028252445e-06, + "loss": 0.6431, + "step": 11478 + }, + { + "epoch": 1.79, + "grad_norm": 19.757440566532612, + "learning_rate": 7.35470666518037e-06, + "loss": 0.538, + "step": 11479 + }, + { + "epoch": 1.79, + "grad_norm": 17.592670854771782, + "learning_rate": 7.353080377338884e-06, + "loss": 0.535, + "step": 11480 + }, + { + "epoch": 1.79, + "grad_norm": 24.04995887655184, + "learning_rate": 7.351454164774246e-06, + "loss": 0.535, + "step": 11481 + }, + { + "epoch": 1.79, + "grad_norm": 14.531234554569691, + "learning_rate": 7.349828027532697e-06, + "loss": 0.5391, + "step": 11482 + }, + { + "epoch": 1.79, + "grad_norm": 12.007842090547893, + "learning_rate": 7.348201965660493e-06, + "loss": 0.4662, + "step": 11483 + }, + { + "epoch": 1.79, + "grad_norm": 17.89742319054473, + "learning_rate": 7.346575979203876e-06, + "loss": 0.6283, + "step": 11484 + }, + { + "epoch": 1.79, + "grad_norm": 19.997308287685787, + "learning_rate": 7.344950068209085e-06, + "loss": 0.5943, + "step": 11485 + }, + { + "epoch": 1.79, + "grad_norm": 16.95218685340715, + "learning_rate": 7.343324232722358e-06, + "loss": 0.5205, + "step": 11486 + }, + { + "epoch": 1.79, + "grad_norm": 16.901178688783077, + "learning_rate": 7.341698472789937e-06, + "loss": 0.5508, + "step": 11487 + }, + { + "epoch": 1.79, + "grad_norm": 32.234490718439694, + "learning_rate": 7.340072788458054e-06, + "loss": 0.6428, + "step": 11488 + }, + { + "epoch": 1.79, + "grad_norm": 16.13596092864975, + "learning_rate": 7.33844717977295e-06, + "loss": 0.5074, + "step": 11489 + }, + { + "epoch": 1.79, + "grad_norm": 18.318878771675195, + "learning_rate": 7.336821646780848e-06, + "loss": 0.612, + "step": 11490 + }, + { + "epoch": 1.79, + "grad_norm": 18.813702198757596, + "learning_rate": 7.33519618952798e-06, + "loss": 0.5537, + "step": 11491 + }, + { + "epoch": 1.8, + "grad_norm": 33.8220451371144, + "learning_rate": 7.333570808060575e-06, + "loss": 0.6062, + "step": 11492 + }, + { + "epoch": 1.8, + "grad_norm": 18.838004598079962, + "learning_rate": 7.331945502424856e-06, + "loss": 0.5966, + "step": 11493 + }, + { + "epoch": 1.8, + "grad_norm": 23.606448647933572, + "learning_rate": 7.330320272667048e-06, + "loss": 0.5019, + "step": 11494 + }, + { + "epoch": 1.8, + "grad_norm": 24.672707554502434, + "learning_rate": 7.328695118833366e-06, + "loss": 0.5954, + "step": 11495 + }, + { + "epoch": 1.8, + "grad_norm": 19.290324147070788, + "learning_rate": 7.327070040970034e-06, + "loss": 0.5974, + "step": 11496 + }, + { + "epoch": 1.8, + "grad_norm": 17.957349660297997, + "learning_rate": 7.325445039123264e-06, + "loss": 0.5039, + "step": 11497 + }, + { + "epoch": 1.8, + "grad_norm": 13.023864402707884, + "learning_rate": 7.323820113339275e-06, + "loss": 0.5635, + "step": 11498 + }, + { + "epoch": 1.8, + "grad_norm": 16.764988161436822, + "learning_rate": 7.32219526366428e-06, + "loss": 0.4926, + "step": 11499 + }, + { + "epoch": 1.8, + "grad_norm": 19.2724340484563, + "learning_rate": 7.320570490144481e-06, + "loss": 0.56, + "step": 11500 + }, + { + "epoch": 1.8, + "grad_norm": 14.689650348748872, + "learning_rate": 7.318945792826089e-06, + "loss": 0.4847, + "step": 11501 + }, + { + "epoch": 1.8, + "grad_norm": 16.03860160808342, + "learning_rate": 7.317321171755312e-06, + "loss": 0.5869, + "step": 11502 + }, + { + "epoch": 1.8, + "grad_norm": 21.71263439675476, + "learning_rate": 7.315696626978349e-06, + "loss": 0.582, + "step": 11503 + }, + { + "epoch": 1.8, + "grad_norm": 13.618109297616618, + "learning_rate": 7.31407215854141e-06, + "loss": 0.5609, + "step": 11504 + }, + { + "epoch": 1.8, + "grad_norm": 19.716229185459664, + "learning_rate": 7.312447766490682e-06, + "loss": 0.5531, + "step": 11505 + }, + { + "epoch": 1.8, + "grad_norm": 28.59327396194591, + "learning_rate": 7.310823450872368e-06, + "loss": 0.6018, + "step": 11506 + }, + { + "epoch": 1.8, + "grad_norm": 23.28109653729545, + "learning_rate": 7.309199211732662e-06, + "loss": 0.5789, + "step": 11507 + }, + { + "epoch": 1.8, + "grad_norm": 25.067616773364556, + "learning_rate": 7.307575049117758e-06, + "loss": 0.529, + "step": 11508 + }, + { + "epoch": 1.8, + "grad_norm": 12.129865554982795, + "learning_rate": 7.305950963073845e-06, + "loss": 0.5656, + "step": 11509 + }, + { + "epoch": 1.8, + "grad_norm": 18.387748238030866, + "learning_rate": 7.30432695364711e-06, + "loss": 0.5259, + "step": 11510 + }, + { + "epoch": 1.8, + "grad_norm": 24.88808603023079, + "learning_rate": 7.3027030208837365e-06, + "loss": 0.5565, + "step": 11511 + }, + { + "epoch": 1.8, + "grad_norm": 19.058768320662143, + "learning_rate": 7.301079164829914e-06, + "loss": 0.5268, + "step": 11512 + }, + { + "epoch": 1.8, + "grad_norm": 17.521769197177548, + "learning_rate": 7.299455385531824e-06, + "loss": 0.5398, + "step": 11513 + }, + { + "epoch": 1.8, + "grad_norm": 15.795225292172237, + "learning_rate": 7.297831683035638e-06, + "loss": 0.5016, + "step": 11514 + }, + { + "epoch": 1.8, + "grad_norm": 15.60226270664066, + "learning_rate": 7.296208057387539e-06, + "loss": 0.5119, + "step": 11515 + }, + { + "epoch": 1.8, + "grad_norm": 15.838540825132426, + "learning_rate": 7.294584508633702e-06, + "loss": 0.4934, + "step": 11516 + }, + { + "epoch": 1.8, + "grad_norm": 24.4410294206056, + "learning_rate": 7.292961036820299e-06, + "loss": 0.5948, + "step": 11517 + }, + { + "epoch": 1.8, + "grad_norm": 18.67166955667262, + "learning_rate": 7.291337641993503e-06, + "loss": 0.5506, + "step": 11518 + }, + { + "epoch": 1.8, + "grad_norm": 25.66219568880412, + "learning_rate": 7.289714324199477e-06, + "loss": 0.5738, + "step": 11519 + }, + { + "epoch": 1.8, + "grad_norm": 25.18033534041247, + "learning_rate": 7.28809108348439e-06, + "loss": 0.5238, + "step": 11520 + }, + { + "epoch": 1.8, + "grad_norm": 30.523125982537806, + "learning_rate": 7.2864679198944065e-06, + "loss": 0.5832, + "step": 11521 + }, + { + "epoch": 1.8, + "grad_norm": 20.100377477474808, + "learning_rate": 7.284844833475687e-06, + "loss": 0.6028, + "step": 11522 + }, + { + "epoch": 1.8, + "grad_norm": 19.453355127528393, + "learning_rate": 7.2832218242743976e-06, + "loss": 0.5583, + "step": 11523 + }, + { + "epoch": 1.8, + "grad_norm": 16.263042211913042, + "learning_rate": 7.281598892336685e-06, + "loss": 0.4963, + "step": 11524 + }, + { + "epoch": 1.8, + "grad_norm": 17.999787035483596, + "learning_rate": 7.279976037708712e-06, + "loss": 0.5325, + "step": 11525 + }, + { + "epoch": 1.8, + "grad_norm": 11.882125789505263, + "learning_rate": 7.278353260436626e-06, + "loss": 0.4867, + "step": 11526 + }, + { + "epoch": 1.8, + "grad_norm": 16.530496046922735, + "learning_rate": 7.276730560566583e-06, + "loss": 0.4782, + "step": 11527 + }, + { + "epoch": 1.8, + "grad_norm": 19.621875188426237, + "learning_rate": 7.275107938144732e-06, + "loss": 0.5357, + "step": 11528 + }, + { + "epoch": 1.8, + "grad_norm": 39.83509778942412, + "learning_rate": 7.273485393217217e-06, + "loss": 0.6171, + "step": 11529 + }, + { + "epoch": 1.8, + "grad_norm": 20.770321480678714, + "learning_rate": 7.2718629258301786e-06, + "loss": 0.5575, + "step": 11530 + }, + { + "epoch": 1.8, + "grad_norm": 15.559136687930513, + "learning_rate": 7.270240536029765e-06, + "loss": 0.5941, + "step": 11531 + }, + { + "epoch": 1.8, + "grad_norm": 22.22612460263177, + "learning_rate": 7.268618223862112e-06, + "loss": 0.5206, + "step": 11532 + }, + { + "epoch": 1.8, + "grad_norm": 30.797401464366427, + "learning_rate": 7.266995989373362e-06, + "loss": 0.6178, + "step": 11533 + }, + { + "epoch": 1.8, + "grad_norm": 15.26367688901969, + "learning_rate": 7.265373832609647e-06, + "loss": 0.4966, + "step": 11534 + }, + { + "epoch": 1.8, + "grad_norm": 15.968609561041333, + "learning_rate": 7.263751753617096e-06, + "loss": 0.4368, + "step": 11535 + }, + { + "epoch": 1.8, + "grad_norm": 12.96177088798451, + "learning_rate": 7.262129752441849e-06, + "loss": 0.5324, + "step": 11536 + }, + { + "epoch": 1.8, + "grad_norm": 34.13775319082707, + "learning_rate": 7.26050782913003e-06, + "loss": 0.5062, + "step": 11537 + }, + { + "epoch": 1.8, + "grad_norm": 15.345901915224099, + "learning_rate": 7.258885983727767e-06, + "loss": 0.5407, + "step": 11538 + }, + { + "epoch": 1.8, + "grad_norm": 18.20253414292048, + "learning_rate": 7.257264216281179e-06, + "loss": 0.5549, + "step": 11539 + }, + { + "epoch": 1.8, + "grad_norm": 16.602094976076273, + "learning_rate": 7.255642526836395e-06, + "loss": 0.5279, + "step": 11540 + }, + { + "epoch": 1.8, + "grad_norm": 19.932284910787157, + "learning_rate": 7.254020915439528e-06, + "loss": 0.5675, + "step": 11541 + }, + { + "epoch": 1.8, + "grad_norm": 23.63453724990942, + "learning_rate": 7.252399382136703e-06, + "loss": 0.5577, + "step": 11542 + }, + { + "epoch": 1.8, + "grad_norm": 29.52740181771988, + "learning_rate": 7.250777926974035e-06, + "loss": 0.5173, + "step": 11543 + }, + { + "epoch": 1.8, + "grad_norm": 19.16748262686469, + "learning_rate": 7.2491565499976335e-06, + "loss": 0.4873, + "step": 11544 + }, + { + "epoch": 1.8, + "grad_norm": 22.219209069393244, + "learning_rate": 7.247535251253606e-06, + "loss": 0.5414, + "step": 11545 + }, + { + "epoch": 1.8, + "grad_norm": 22.595532408326665, + "learning_rate": 7.245914030788069e-06, + "loss": 0.6052, + "step": 11546 + }, + { + "epoch": 1.8, + "grad_norm": 19.39046671400855, + "learning_rate": 7.2442928886471246e-06, + "loss": 0.4945, + "step": 11547 + }, + { + "epoch": 1.8, + "grad_norm": 14.75083033600665, + "learning_rate": 7.242671824876884e-06, + "loss": 0.493, + "step": 11548 + }, + { + "epoch": 1.8, + "grad_norm": 17.200690403828464, + "learning_rate": 7.241050839523437e-06, + "loss": 0.532, + "step": 11549 + }, + { + "epoch": 1.8, + "grad_norm": 24.070537502632565, + "learning_rate": 7.239429932632892e-06, + "loss": 0.5129, + "step": 11550 + }, + { + "epoch": 1.8, + "grad_norm": 17.56690887688945, + "learning_rate": 7.237809104251343e-06, + "loss": 0.5329, + "step": 11551 + }, + { + "epoch": 1.8, + "grad_norm": 25.513129948030343, + "learning_rate": 7.23618835442489e-06, + "loss": 0.5995, + "step": 11552 + }, + { + "epoch": 1.8, + "grad_norm": 17.327299232516882, + "learning_rate": 7.234567683199624e-06, + "loss": 0.5152, + "step": 11553 + }, + { + "epoch": 1.8, + "grad_norm": 24.434607061388434, + "learning_rate": 7.232947090621633e-06, + "loss": 0.6099, + "step": 11554 + }, + { + "epoch": 1.8, + "grad_norm": 19.68106319469668, + "learning_rate": 7.23132657673701e-06, + "loss": 0.5755, + "step": 11555 + }, + { + "epoch": 1.81, + "grad_norm": 17.69053986252105, + "learning_rate": 7.2297061415918394e-06, + "loss": 0.5416, + "step": 11556 + }, + { + "epoch": 1.81, + "grad_norm": 25.534710592366903, + "learning_rate": 7.228085785232206e-06, + "loss": 0.538, + "step": 11557 + }, + { + "epoch": 1.81, + "grad_norm": 14.670140467561515, + "learning_rate": 7.226465507704189e-06, + "loss": 0.5287, + "step": 11558 + }, + { + "epoch": 1.81, + "grad_norm": 24.034391567416467, + "learning_rate": 7.224845309053872e-06, + "loss": 0.5244, + "step": 11559 + }, + { + "epoch": 1.81, + "grad_norm": 23.254307325144577, + "learning_rate": 7.2232251893273295e-06, + "loss": 0.554, + "step": 11560 + }, + { + "epoch": 1.81, + "grad_norm": 18.428917244941026, + "learning_rate": 7.2216051485706385e-06, + "loss": 0.6522, + "step": 11561 + }, + { + "epoch": 1.81, + "grad_norm": 15.35913087877865, + "learning_rate": 7.219985186829877e-06, + "loss": 0.5901, + "step": 11562 + }, + { + "epoch": 1.81, + "grad_norm": 13.886915921771122, + "learning_rate": 7.2183653041511045e-06, + "loss": 0.4914, + "step": 11563 + }, + { + "epoch": 1.81, + "grad_norm": 12.316887925659467, + "learning_rate": 7.216745500580396e-06, + "loss": 0.4751, + "step": 11564 + }, + { + "epoch": 1.81, + "grad_norm": 15.381351965388394, + "learning_rate": 7.215125776163818e-06, + "loss": 0.53, + "step": 11565 + }, + { + "epoch": 1.81, + "grad_norm": 22.229950311112308, + "learning_rate": 7.213506130947434e-06, + "loss": 0.5857, + "step": 11566 + }, + { + "epoch": 1.81, + "grad_norm": 14.602523431748542, + "learning_rate": 7.21188656497731e-06, + "loss": 0.4648, + "step": 11567 + }, + { + "epoch": 1.81, + "grad_norm": 18.704346830809087, + "learning_rate": 7.210267078299495e-06, + "loss": 0.6302, + "step": 11568 + }, + { + "epoch": 1.81, + "grad_norm": 17.25239431136035, + "learning_rate": 7.208647670960055e-06, + "loss": 0.5672, + "step": 11569 + }, + { + "epoch": 1.81, + "grad_norm": 24.816507427838555, + "learning_rate": 7.2070283430050405e-06, + "loss": 0.5629, + "step": 11570 + }, + { + "epoch": 1.81, + "grad_norm": 15.329995378554532, + "learning_rate": 7.205409094480506e-06, + "loss": 0.6058, + "step": 11571 + }, + { + "epoch": 1.81, + "grad_norm": 18.91797965965295, + "learning_rate": 7.203789925432507e-06, + "loss": 0.5032, + "step": 11572 + }, + { + "epoch": 1.81, + "grad_norm": 19.999618177196492, + "learning_rate": 7.202170835907083e-06, + "loss": 0.512, + "step": 11573 + }, + { + "epoch": 1.81, + "grad_norm": 19.35564411903794, + "learning_rate": 7.200551825950281e-06, + "loss": 0.5569, + "step": 11574 + }, + { + "epoch": 1.81, + "grad_norm": 25.16508216059275, + "learning_rate": 7.198932895608153e-06, + "loss": 0.5442, + "step": 11575 + }, + { + "epoch": 1.81, + "grad_norm": 19.32847007128881, + "learning_rate": 7.197314044926729e-06, + "loss": 0.5186, + "step": 11576 + }, + { + "epoch": 1.81, + "grad_norm": 20.986982417213863, + "learning_rate": 7.1956952739520605e-06, + "loss": 0.6003, + "step": 11577 + }, + { + "epoch": 1.81, + "grad_norm": 16.43661062931299, + "learning_rate": 7.194076582730174e-06, + "loss": 0.5839, + "step": 11578 + }, + { + "epoch": 1.81, + "grad_norm": 19.077134116025597, + "learning_rate": 7.192457971307107e-06, + "loss": 0.4945, + "step": 11579 + }, + { + "epoch": 1.81, + "grad_norm": 21.3660693365981, + "learning_rate": 7.190839439728896e-06, + "loss": 0.6116, + "step": 11580 + }, + { + "epoch": 1.81, + "grad_norm": 15.944513625187975, + "learning_rate": 7.1892209880415665e-06, + "loss": 0.5307, + "step": 11581 + }, + { + "epoch": 1.81, + "grad_norm": 16.509966789598643, + "learning_rate": 7.18760261629115e-06, + "loss": 0.5284, + "step": 11582 + }, + { + "epoch": 1.81, + "grad_norm": 14.926267509656231, + "learning_rate": 7.185984324523667e-06, + "loss": 0.6299, + "step": 11583 + }, + { + "epoch": 1.81, + "grad_norm": 18.47240983617567, + "learning_rate": 7.184366112785144e-06, + "loss": 0.5129, + "step": 11584 + }, + { + "epoch": 1.81, + "grad_norm": 13.714475371439356, + "learning_rate": 7.1827479811216e-06, + "loss": 0.4596, + "step": 11585 + }, + { + "epoch": 1.81, + "grad_norm": 18.759201687900756, + "learning_rate": 7.181129929579058e-06, + "loss": 0.6174, + "step": 11586 + }, + { + "epoch": 1.81, + "grad_norm": 14.957772109820837, + "learning_rate": 7.179511958203535e-06, + "loss": 0.5165, + "step": 11587 + }, + { + "epoch": 1.81, + "grad_norm": 16.264233700982434, + "learning_rate": 7.177894067041038e-06, + "loss": 0.495, + "step": 11588 + }, + { + "epoch": 1.81, + "grad_norm": 14.501972470768314, + "learning_rate": 7.176276256137582e-06, + "loss": 0.5209, + "step": 11589 + }, + { + "epoch": 1.81, + "grad_norm": 19.99326854017606, + "learning_rate": 7.174658525539179e-06, + "loss": 0.5556, + "step": 11590 + }, + { + "epoch": 1.81, + "grad_norm": 18.133672481410485, + "learning_rate": 7.173040875291832e-06, + "loss": 0.5433, + "step": 11591 + }, + { + "epoch": 1.81, + "grad_norm": 25.741492951767032, + "learning_rate": 7.171423305441556e-06, + "loss": 0.6295, + "step": 11592 + }, + { + "epoch": 1.81, + "grad_norm": 15.711441463504482, + "learning_rate": 7.169805816034341e-06, + "loss": 0.5306, + "step": 11593 + }, + { + "epoch": 1.81, + "grad_norm": 17.01586118994831, + "learning_rate": 7.168188407116194e-06, + "loss": 0.4987, + "step": 11594 + }, + { + "epoch": 1.81, + "grad_norm": 18.13723441201615, + "learning_rate": 7.166571078733109e-06, + "loss": 0.5472, + "step": 11595 + }, + { + "epoch": 1.81, + "grad_norm": 22.416324911487486, + "learning_rate": 7.164953830931089e-06, + "loss": 0.5724, + "step": 11596 + }, + { + "epoch": 1.81, + "grad_norm": 23.745983958755303, + "learning_rate": 7.1633366637561245e-06, + "loss": 0.453, + "step": 11597 + }, + { + "epoch": 1.81, + "grad_norm": 23.556714797830292, + "learning_rate": 7.161719577254203e-06, + "loss": 0.5831, + "step": 11598 + }, + { + "epoch": 1.81, + "grad_norm": 24.194467292067234, + "learning_rate": 7.160102571471317e-06, + "loss": 0.5713, + "step": 11599 + }, + { + "epoch": 1.81, + "grad_norm": 17.796453285077092, + "learning_rate": 7.158485646453452e-06, + "loss": 0.5181, + "step": 11600 + }, + { + "epoch": 1.81, + "grad_norm": 31.683945862802087, + "learning_rate": 7.156868802246593e-06, + "loss": 0.6248, + "step": 11601 + }, + { + "epoch": 1.81, + "grad_norm": 13.174943732778496, + "learning_rate": 7.155252038896725e-06, + "loss": 0.5787, + "step": 11602 + }, + { + "epoch": 1.81, + "grad_norm": 17.143311059709728, + "learning_rate": 7.153635356449824e-06, + "loss": 0.5478, + "step": 11603 + }, + { + "epoch": 1.81, + "grad_norm": 24.6078015893191, + "learning_rate": 7.152018754951864e-06, + "loss": 0.57, + "step": 11604 + }, + { + "epoch": 1.81, + "grad_norm": 19.892040079862152, + "learning_rate": 7.150402234448829e-06, + "loss": 0.5277, + "step": 11605 + }, + { + "epoch": 1.81, + "grad_norm": 16.082032184911203, + "learning_rate": 7.1487857949866875e-06, + "loss": 0.5345, + "step": 11606 + }, + { + "epoch": 1.81, + "grad_norm": 17.600869583344398, + "learning_rate": 7.14716943661141e-06, + "loss": 0.484, + "step": 11607 + }, + { + "epoch": 1.81, + "grad_norm": 17.463483634175, + "learning_rate": 7.145553159368962e-06, + "loss": 0.5432, + "step": 11608 + }, + { + "epoch": 1.81, + "grad_norm": 21.550240639009058, + "learning_rate": 7.143936963305314e-06, + "loss": 0.4511, + "step": 11609 + }, + { + "epoch": 1.81, + "grad_norm": 18.326305962561612, + "learning_rate": 7.142320848466427e-06, + "loss": 0.6216, + "step": 11610 + }, + { + "epoch": 1.81, + "grad_norm": 20.925693796618475, + "learning_rate": 7.140704814898268e-06, + "loss": 0.5114, + "step": 11611 + }, + { + "epoch": 1.81, + "grad_norm": 21.197542415629997, + "learning_rate": 7.1390888626467855e-06, + "loss": 0.5002, + "step": 11612 + }, + { + "epoch": 1.81, + "grad_norm": 16.991477213955214, + "learning_rate": 7.137472991757945e-06, + "loss": 0.5065, + "step": 11613 + }, + { + "epoch": 1.81, + "grad_norm": 20.739434624590164, + "learning_rate": 7.1358572022776965e-06, + "loss": 0.4908, + "step": 11614 + }, + { + "epoch": 1.81, + "grad_norm": 15.775300688662563, + "learning_rate": 7.134241494251994e-06, + "loss": 0.5454, + "step": 11615 + }, + { + "epoch": 1.81, + "grad_norm": 13.93031653162277, + "learning_rate": 7.1326258677267916e-06, + "loss": 0.5446, + "step": 11616 + }, + { + "epoch": 1.81, + "grad_norm": 13.415612695785184, + "learning_rate": 7.131010322748029e-06, + "loss": 0.4873, + "step": 11617 + }, + { + "epoch": 1.81, + "grad_norm": 16.02539989929117, + "learning_rate": 7.129394859361652e-06, + "loss": 0.559, + "step": 11618 + }, + { + "epoch": 1.81, + "grad_norm": 14.872117397347287, + "learning_rate": 7.127779477613608e-06, + "loss": 0.5077, + "step": 11619 + }, + { + "epoch": 1.82, + "grad_norm": 24.65724182914066, + "learning_rate": 7.126164177549832e-06, + "loss": 0.6218, + "step": 11620 + }, + { + "epoch": 1.82, + "grad_norm": 13.759147034514728, + "learning_rate": 7.124548959216272e-06, + "loss": 0.5329, + "step": 11621 + }, + { + "epoch": 1.82, + "grad_norm": 19.038905292185966, + "learning_rate": 7.122933822658855e-06, + "loss": 0.4956, + "step": 11622 + }, + { + "epoch": 1.82, + "grad_norm": 29.62218741201781, + "learning_rate": 7.121318767923514e-06, + "loss": 0.4754, + "step": 11623 + }, + { + "epoch": 1.82, + "grad_norm": 19.502984850110522, + "learning_rate": 7.1197037950561855e-06, + "loss": 0.4947, + "step": 11624 + }, + { + "epoch": 1.82, + "grad_norm": 16.272880233786456, + "learning_rate": 7.118088904102798e-06, + "loss": 0.5762, + "step": 11625 + }, + { + "epoch": 1.82, + "grad_norm": 20.44003194179857, + "learning_rate": 7.116474095109276e-06, + "loss": 0.6131, + "step": 11626 + }, + { + "epoch": 1.82, + "grad_norm": 28.673043687165784, + "learning_rate": 7.11485936812154e-06, + "loss": 0.5435, + "step": 11627 + }, + { + "epoch": 1.82, + "grad_norm": 21.987354130936072, + "learning_rate": 7.113244723185519e-06, + "loss": 0.5232, + "step": 11628 + }, + { + "epoch": 1.82, + "grad_norm": 16.152391405969357, + "learning_rate": 7.111630160347127e-06, + "loss": 0.5511, + "step": 11629 + }, + { + "epoch": 1.82, + "grad_norm": 18.23919644881341, + "learning_rate": 7.110015679652286e-06, + "loss": 0.5772, + "step": 11630 + }, + { + "epoch": 1.82, + "grad_norm": 18.039844517465994, + "learning_rate": 7.1084012811469105e-06, + "loss": 0.5794, + "step": 11631 + }, + { + "epoch": 1.82, + "grad_norm": 13.622477521039478, + "learning_rate": 7.106786964876909e-06, + "loss": 0.5101, + "step": 11632 + }, + { + "epoch": 1.82, + "grad_norm": 14.455026629330053, + "learning_rate": 7.10517273088819e-06, + "loss": 0.5568, + "step": 11633 + }, + { + "epoch": 1.82, + "grad_norm": 17.477765151496527, + "learning_rate": 7.103558579226668e-06, + "loss": 0.6092, + "step": 11634 + }, + { + "epoch": 1.82, + "grad_norm": 16.827347758734685, + "learning_rate": 7.101944509938244e-06, + "loss": 0.5387, + "step": 11635 + }, + { + "epoch": 1.82, + "grad_norm": 18.319090684113004, + "learning_rate": 7.100330523068829e-06, + "loss": 0.5566, + "step": 11636 + }, + { + "epoch": 1.82, + "grad_norm": 12.285856624828533, + "learning_rate": 7.098716618664312e-06, + "loss": 0.5493, + "step": 11637 + }, + { + "epoch": 1.82, + "grad_norm": 20.543538877210672, + "learning_rate": 7.097102796770598e-06, + "loss": 0.6521, + "step": 11638 + }, + { + "epoch": 1.82, + "grad_norm": 16.06960988490768, + "learning_rate": 7.0954890574335814e-06, + "loss": 0.5151, + "step": 11639 + }, + { + "epoch": 1.82, + "grad_norm": 15.641734816259934, + "learning_rate": 7.0938754006991584e-06, + "loss": 0.456, + "step": 11640 + }, + { + "epoch": 1.82, + "grad_norm": 12.988555649461693, + "learning_rate": 7.092261826613221e-06, + "loss": 0.4699, + "step": 11641 + }, + { + "epoch": 1.82, + "grad_norm": 16.667874990610347, + "learning_rate": 7.0906483352216525e-06, + "loss": 0.5523, + "step": 11642 + }, + { + "epoch": 1.82, + "grad_norm": 26.79399074291014, + "learning_rate": 7.0890349265703455e-06, + "loss": 0.5444, + "step": 11643 + }, + { + "epoch": 1.82, + "grad_norm": 16.387598566527046, + "learning_rate": 7.0874216007051826e-06, + "loss": 0.5266, + "step": 11644 + }, + { + "epoch": 1.82, + "grad_norm": 19.485098884246934, + "learning_rate": 7.0858083576720416e-06, + "loss": 0.5479, + "step": 11645 + }, + { + "epoch": 1.82, + "grad_norm": 20.359197228988386, + "learning_rate": 7.084195197516812e-06, + "loss": 0.5273, + "step": 11646 + }, + { + "epoch": 1.82, + "grad_norm": 15.397537041574818, + "learning_rate": 7.082582120285363e-06, + "loss": 0.5431, + "step": 11647 + }, + { + "epoch": 1.82, + "grad_norm": 17.962690917416055, + "learning_rate": 7.080969126023569e-06, + "loss": 0.5433, + "step": 11648 + }, + { + "epoch": 1.82, + "grad_norm": 11.65443252515218, + "learning_rate": 7.079356214777309e-06, + "loss": 0.5319, + "step": 11649 + }, + { + "epoch": 1.82, + "grad_norm": 18.089367226996785, + "learning_rate": 7.077743386592448e-06, + "loss": 0.5205, + "step": 11650 + }, + { + "epoch": 1.82, + "grad_norm": 18.824622964645325, + "learning_rate": 7.07613064151486e-06, + "loss": 0.5486, + "step": 11651 + }, + { + "epoch": 1.82, + "grad_norm": 17.933723391905808, + "learning_rate": 7.0745179795904005e-06, + "loss": 0.5626, + "step": 11652 + }, + { + "epoch": 1.82, + "grad_norm": 22.729480015211607, + "learning_rate": 7.07290540086494e-06, + "loss": 0.5183, + "step": 11653 + }, + { + "epoch": 1.82, + "grad_norm": 17.888364682400976, + "learning_rate": 7.071292905384336e-06, + "loss": 0.5771, + "step": 11654 + }, + { + "epoch": 1.82, + "grad_norm": 14.516893766652776, + "learning_rate": 7.069680493194455e-06, + "loss": 0.5666, + "step": 11655 + }, + { + "epoch": 1.82, + "grad_norm": 12.383855888560012, + "learning_rate": 7.068068164341139e-06, + "loss": 0.4768, + "step": 11656 + }, + { + "epoch": 1.82, + "grad_norm": 26.092905400935205, + "learning_rate": 7.066455918870254e-06, + "loss": 0.6187, + "step": 11657 + }, + { + "epoch": 1.82, + "grad_norm": 17.99880162031453, + "learning_rate": 7.064843756827641e-06, + "loss": 0.5263, + "step": 11658 + }, + { + "epoch": 1.82, + "grad_norm": 20.83135382013805, + "learning_rate": 7.06323167825916e-06, + "loss": 0.4394, + "step": 11659 + }, + { + "epoch": 1.82, + "grad_norm": 19.115081215869775, + "learning_rate": 7.0616196832106535e-06, + "loss": 0.5675, + "step": 11660 + }, + { + "epoch": 1.82, + "grad_norm": 17.19547223039035, + "learning_rate": 7.060007771727961e-06, + "loss": 0.5267, + "step": 11661 + }, + { + "epoch": 1.82, + "grad_norm": 19.36535662075284, + "learning_rate": 7.058395943856926e-06, + "loss": 0.4983, + "step": 11662 + }, + { + "epoch": 1.82, + "grad_norm": 19.881207718754755, + "learning_rate": 7.056784199643392e-06, + "loss": 0.615, + "step": 11663 + }, + { + "epoch": 1.82, + "grad_norm": 37.98192371899534, + "learning_rate": 7.05517253913319e-06, + "loss": 0.7017, + "step": 11664 + }, + { + "epoch": 1.82, + "grad_norm": 18.144408628215704, + "learning_rate": 7.053560962372166e-06, + "loss": 0.5324, + "step": 11665 + }, + { + "epoch": 1.82, + "grad_norm": 41.209823439305936, + "learning_rate": 7.051949469406139e-06, + "loss": 0.6122, + "step": 11666 + }, + { + "epoch": 1.82, + "grad_norm": 14.731807680259813, + "learning_rate": 7.0503380602809435e-06, + "loss": 0.532, + "step": 11667 + }, + { + "epoch": 1.82, + "grad_norm": 17.82789525494998, + "learning_rate": 7.04872673504241e-06, + "loss": 0.4863, + "step": 11668 + }, + { + "epoch": 1.82, + "grad_norm": 11.934443048076906, + "learning_rate": 7.047115493736363e-06, + "loss": 0.5891, + "step": 11669 + }, + { + "epoch": 1.82, + "grad_norm": 25.771589173584143, + "learning_rate": 7.045504336408625e-06, + "loss": 0.5339, + "step": 11670 + }, + { + "epoch": 1.82, + "grad_norm": 21.56569298862143, + "learning_rate": 7.043893263105012e-06, + "loss": 0.5708, + "step": 11671 + }, + { + "epoch": 1.82, + "grad_norm": 32.98548931595973, + "learning_rate": 7.042282273871346e-06, + "loss": 0.5527, + "step": 11672 + }, + { + "epoch": 1.82, + "grad_norm": 23.69957079357906, + "learning_rate": 7.04067136875344e-06, + "loss": 0.5612, + "step": 11673 + }, + { + "epoch": 1.82, + "grad_norm": 20.38158897442927, + "learning_rate": 7.039060547797112e-06, + "loss": 0.6501, + "step": 11674 + }, + { + "epoch": 1.82, + "grad_norm": 17.600871070000377, + "learning_rate": 7.037449811048172e-06, + "loss": 0.5599, + "step": 11675 + }, + { + "epoch": 1.82, + "grad_norm": 19.809189468344815, + "learning_rate": 7.035839158552424e-06, + "loss": 0.5837, + "step": 11676 + }, + { + "epoch": 1.82, + "grad_norm": 16.348039741530055, + "learning_rate": 7.034228590355674e-06, + "loss": 0.5329, + "step": 11677 + }, + { + "epoch": 1.82, + "grad_norm": 19.98229254698548, + "learning_rate": 7.03261810650373e-06, + "loss": 0.6243, + "step": 11678 + }, + { + "epoch": 1.82, + "grad_norm": 24.72234095593728, + "learning_rate": 7.03100770704239e-06, + "loss": 0.636, + "step": 11679 + }, + { + "epoch": 1.82, + "grad_norm": 16.60186140150378, + "learning_rate": 7.029397392017461e-06, + "loss": 0.4827, + "step": 11680 + }, + { + "epoch": 1.82, + "grad_norm": 20.502826870839673, + "learning_rate": 7.027787161474724e-06, + "loss": 0.5632, + "step": 11681 + }, + { + "epoch": 1.82, + "grad_norm": 18.442930845763893, + "learning_rate": 7.026177015459984e-06, + "loss": 0.4844, + "step": 11682 + }, + { + "epoch": 1.82, + "grad_norm": 18.428943477520082, + "learning_rate": 7.024566954019028e-06, + "loss": 0.5357, + "step": 11683 + }, + { + "epoch": 1.83, + "grad_norm": 13.090731012707813, + "learning_rate": 7.0229569771976505e-06, + "loss": 0.4444, + "step": 11684 + }, + { + "epoch": 1.83, + "grad_norm": 21.401333984377093, + "learning_rate": 7.021347085041636e-06, + "loss": 0.4988, + "step": 11685 + }, + { + "epoch": 1.83, + "grad_norm": 13.31659277496896, + "learning_rate": 7.019737277596763e-06, + "loss": 0.5421, + "step": 11686 + }, + { + "epoch": 1.83, + "grad_norm": 15.635348465567645, + "learning_rate": 7.018127554908823e-06, + "loss": 0.5676, + "step": 11687 + }, + { + "epoch": 1.83, + "grad_norm": 17.71528537624553, + "learning_rate": 7.016517917023588e-06, + "loss": 0.5824, + "step": 11688 + }, + { + "epoch": 1.83, + "grad_norm": 16.41710880322763, + "learning_rate": 7.014908363986839e-06, + "loss": 0.5984, + "step": 11689 + }, + { + "epoch": 1.83, + "grad_norm": 17.762436271924965, + "learning_rate": 7.013298895844352e-06, + "loss": 0.5494, + "step": 11690 + }, + { + "epoch": 1.83, + "grad_norm": 21.50616798945086, + "learning_rate": 7.011689512641896e-06, + "loss": 0.5149, + "step": 11691 + }, + { + "epoch": 1.83, + "grad_norm": 15.878454605473662, + "learning_rate": 7.01008021442524e-06, + "loss": 0.5081, + "step": 11692 + }, + { + "epoch": 1.83, + "grad_norm": 26.63039866301779, + "learning_rate": 7.008471001240156e-06, + "loss": 0.6059, + "step": 11693 + }, + { + "epoch": 1.83, + "grad_norm": 14.296200632127004, + "learning_rate": 7.006861873132406e-06, + "loss": 0.5353, + "step": 11694 + }, + { + "epoch": 1.83, + "grad_norm": 13.308023601777744, + "learning_rate": 7.005252830147756e-06, + "loss": 0.4995, + "step": 11695 + }, + { + "epoch": 1.83, + "grad_norm": 13.613516585780115, + "learning_rate": 7.00364387233196e-06, + "loss": 0.5259, + "step": 11696 + }, + { + "epoch": 1.83, + "grad_norm": 15.367796623519187, + "learning_rate": 7.00203499973078e-06, + "loss": 0.5112, + "step": 11697 + }, + { + "epoch": 1.83, + "grad_norm": 22.999478345894985, + "learning_rate": 7.00042621238997e-06, + "loss": 0.5607, + "step": 11698 + }, + { + "epoch": 1.83, + "grad_norm": 20.2043551788129, + "learning_rate": 6.998817510355285e-06, + "loss": 0.4995, + "step": 11699 + }, + { + "epoch": 1.83, + "grad_norm": 24.29378257709534, + "learning_rate": 6.997208893672478e-06, + "loss": 0.5981, + "step": 11700 + }, + { + "epoch": 1.83, + "grad_norm": 20.001025599766976, + "learning_rate": 6.9956003623872895e-06, + "loss": 0.5671, + "step": 11701 + }, + { + "epoch": 1.83, + "grad_norm": 20.665063599420087, + "learning_rate": 6.993991916545468e-06, + "loss": 0.541, + "step": 11702 + }, + { + "epoch": 1.83, + "grad_norm": 16.794120899011663, + "learning_rate": 6.99238355619276e-06, + "loss": 0.5195, + "step": 11703 + }, + { + "epoch": 1.83, + "grad_norm": 19.06642043260306, + "learning_rate": 6.990775281374907e-06, + "loss": 0.5742, + "step": 11704 + }, + { + "epoch": 1.83, + "grad_norm": 21.570380307139004, + "learning_rate": 6.989167092137639e-06, + "loss": 0.5394, + "step": 11705 + }, + { + "epoch": 1.83, + "grad_norm": 18.745364045974263, + "learning_rate": 6.987558988526702e-06, + "loss": 0.5228, + "step": 11706 + }, + { + "epoch": 1.83, + "grad_norm": 18.606251088876917, + "learning_rate": 6.985950970587823e-06, + "loss": 0.5828, + "step": 11707 + }, + { + "epoch": 1.83, + "grad_norm": 19.743709976491427, + "learning_rate": 6.984343038366733e-06, + "loss": 0.5984, + "step": 11708 + }, + { + "epoch": 1.83, + "grad_norm": 19.52210655499473, + "learning_rate": 6.982735191909168e-06, + "loss": 0.5115, + "step": 11709 + }, + { + "epoch": 1.83, + "grad_norm": 20.525566713777664, + "learning_rate": 6.981127431260846e-06, + "loss": 0.5047, + "step": 11710 + }, + { + "epoch": 1.83, + "grad_norm": 15.782272385537256, + "learning_rate": 6.9795197564674935e-06, + "loss": 0.5157, + "step": 11711 + }, + { + "epoch": 1.83, + "grad_norm": 28.74157636220329, + "learning_rate": 6.977912167574833e-06, + "loss": 0.4437, + "step": 11712 + }, + { + "epoch": 1.83, + "grad_norm": 24.292225681735477, + "learning_rate": 6.9763046646285836e-06, + "loss": 0.5652, + "step": 11713 + }, + { + "epoch": 1.83, + "grad_norm": 18.522042738018346, + "learning_rate": 6.974697247674462e-06, + "loss": 0.5312, + "step": 11714 + }, + { + "epoch": 1.83, + "grad_norm": 27.193514223376404, + "learning_rate": 6.973089916758177e-06, + "loss": 0.5129, + "step": 11715 + }, + { + "epoch": 1.83, + "grad_norm": 20.90361913170271, + "learning_rate": 6.971482671925446e-06, + "loss": 0.5479, + "step": 11716 + }, + { + "epoch": 1.83, + "grad_norm": 28.873867614946988, + "learning_rate": 6.969875513221974e-06, + "loss": 0.5831, + "step": 11717 + }, + { + "epoch": 1.83, + "grad_norm": 15.782363282017418, + "learning_rate": 6.968268440693472e-06, + "loss": 0.5187, + "step": 11718 + }, + { + "epoch": 1.83, + "grad_norm": 20.702251415175613, + "learning_rate": 6.966661454385644e-06, + "loss": 0.5572, + "step": 11719 + }, + { + "epoch": 1.83, + "grad_norm": 29.902716813603863, + "learning_rate": 6.965054554344188e-06, + "loss": 0.5399, + "step": 11720 + }, + { + "epoch": 1.83, + "grad_norm": 16.625732002420186, + "learning_rate": 6.963447740614804e-06, + "loss": 0.5546, + "step": 11721 + }, + { + "epoch": 1.83, + "grad_norm": 19.208926691726003, + "learning_rate": 6.96184101324319e-06, + "loss": 0.5155, + "step": 11722 + }, + { + "epoch": 1.83, + "grad_norm": 20.97838514141988, + "learning_rate": 6.960234372275039e-06, + "loss": 0.4979, + "step": 11723 + }, + { + "epoch": 1.83, + "grad_norm": 17.449116927089577, + "learning_rate": 6.958627817756052e-06, + "loss": 0.5685, + "step": 11724 + }, + { + "epoch": 1.83, + "grad_norm": 18.50965678122747, + "learning_rate": 6.957021349731902e-06, + "loss": 0.4852, + "step": 11725 + }, + { + "epoch": 1.83, + "grad_norm": 27.984516194560854, + "learning_rate": 6.955414968248289e-06, + "loss": 0.5442, + "step": 11726 + }, + { + "epoch": 1.83, + "grad_norm": 16.848203408271477, + "learning_rate": 6.953808673350889e-06, + "loss": 0.5545, + "step": 11727 + }, + { + "epoch": 1.83, + "grad_norm": 21.553414325506136, + "learning_rate": 6.952202465085391e-06, + "loss": 0.5492, + "step": 11728 + }, + { + "epoch": 1.83, + "grad_norm": 23.345977558031564, + "learning_rate": 6.950596343497475e-06, + "loss": 0.5099, + "step": 11729 + }, + { + "epoch": 1.83, + "grad_norm": 24.98191970118878, + "learning_rate": 6.9489903086328106e-06, + "loss": 0.5756, + "step": 11730 + }, + { + "epoch": 1.83, + "grad_norm": 35.24253605048893, + "learning_rate": 6.94738436053708e-06, + "loss": 0.4795, + "step": 11731 + }, + { + "epoch": 1.83, + "grad_norm": 21.177290150022948, + "learning_rate": 6.945778499255951e-06, + "loss": 0.5683, + "step": 11732 + }, + { + "epoch": 1.83, + "grad_norm": 21.957408071805656, + "learning_rate": 6.944172724835093e-06, + "loss": 0.6336, + "step": 11733 + }, + { + "epoch": 1.83, + "grad_norm": 22.72423147978298, + "learning_rate": 6.94256703732018e-06, + "loss": 0.5884, + "step": 11734 + }, + { + "epoch": 1.83, + "grad_norm": 26.55956095816552, + "learning_rate": 6.94096143675687e-06, + "loss": 0.5428, + "step": 11735 + }, + { + "epoch": 1.83, + "grad_norm": 22.664277537580542, + "learning_rate": 6.939355923190823e-06, + "loss": 0.5702, + "step": 11736 + }, + { + "epoch": 1.83, + "grad_norm": 14.13854389946916, + "learning_rate": 6.9377504966677075e-06, + "loss": 0.4787, + "step": 11737 + }, + { + "epoch": 1.83, + "grad_norm": 11.473880337545149, + "learning_rate": 6.936145157233176e-06, + "loss": 0.5476, + "step": 11738 + }, + { + "epoch": 1.83, + "grad_norm": 19.92305827347941, + "learning_rate": 6.934539904932887e-06, + "loss": 0.5446, + "step": 11739 + }, + { + "epoch": 1.83, + "grad_norm": 17.016222793545023, + "learning_rate": 6.9329347398124825e-06, + "loss": 0.4996, + "step": 11740 + }, + { + "epoch": 1.83, + "grad_norm": 24.33511644389723, + "learning_rate": 6.9313296619176255e-06, + "loss": 0.5833, + "step": 11741 + }, + { + "epoch": 1.83, + "grad_norm": 15.728913362676044, + "learning_rate": 6.929724671293954e-06, + "loss": 0.5381, + "step": 11742 + }, + { + "epoch": 1.83, + "grad_norm": 14.216073908335357, + "learning_rate": 6.928119767987118e-06, + "loss": 0.5825, + "step": 11743 + }, + { + "epoch": 1.83, + "grad_norm": 15.530455562260306, + "learning_rate": 6.926514952042762e-06, + "loss": 0.5411, + "step": 11744 + }, + { + "epoch": 1.83, + "grad_norm": 15.774554030780376, + "learning_rate": 6.9249102235065205e-06, + "loss": 0.5609, + "step": 11745 + }, + { + "epoch": 1.83, + "grad_norm": 21.532517514202322, + "learning_rate": 6.923305582424031e-06, + "loss": 0.4781, + "step": 11746 + }, + { + "epoch": 1.83, + "grad_norm": 28.746084930422715, + "learning_rate": 6.9217010288409335e-06, + "loss": 0.5183, + "step": 11747 + }, + { + "epoch": 1.84, + "grad_norm": 22.037601828894335, + "learning_rate": 6.920096562802856e-06, + "loss": 0.5638, + "step": 11748 + }, + { + "epoch": 1.84, + "grad_norm": 25.63200652236906, + "learning_rate": 6.918492184355434e-06, + "loss": 0.5939, + "step": 11749 + }, + { + "epoch": 1.84, + "grad_norm": 20.951470353109308, + "learning_rate": 6.9168878935442905e-06, + "loss": 0.5804, + "step": 11750 + }, + { + "epoch": 1.84, + "grad_norm": 17.41521443287422, + "learning_rate": 6.915283690415051e-06, + "loss": 0.6116, + "step": 11751 + }, + { + "epoch": 1.84, + "grad_norm": 19.170861591270473, + "learning_rate": 6.913679575013338e-06, + "loss": 0.5257, + "step": 11752 + }, + { + "epoch": 1.84, + "grad_norm": 18.677820440642755, + "learning_rate": 6.9120755473847755e-06, + "loss": 0.5979, + "step": 11753 + }, + { + "epoch": 1.84, + "grad_norm": 15.912057962892913, + "learning_rate": 6.9104716075749755e-06, + "loss": 0.5724, + "step": 11754 + }, + { + "epoch": 1.84, + "grad_norm": 19.12830214182361, + "learning_rate": 6.908867755629555e-06, + "loss": 0.4904, + "step": 11755 + }, + { + "epoch": 1.84, + "grad_norm": 17.174629708546878, + "learning_rate": 6.907263991594129e-06, + "loss": 0.4541, + "step": 11756 + }, + { + "epoch": 1.84, + "grad_norm": 22.04055811483667, + "learning_rate": 6.905660315514304e-06, + "loss": 0.554, + "step": 11757 + }, + { + "epoch": 1.84, + "grad_norm": 12.510908032083996, + "learning_rate": 6.904056727435694e-06, + "loss": 0.4692, + "step": 11758 + }, + { + "epoch": 1.84, + "grad_norm": 22.405438107846976, + "learning_rate": 6.9024532274038935e-06, + "loss": 0.5845, + "step": 11759 + }, + { + "epoch": 1.84, + "grad_norm": 49.87042182361547, + "learning_rate": 6.900849815464515e-06, + "loss": 0.5984, + "step": 11760 + }, + { + "epoch": 1.84, + "grad_norm": 17.197483714257224, + "learning_rate": 6.8992464916631515e-06, + "loss": 0.5514, + "step": 11761 + }, + { + "epoch": 1.84, + "grad_norm": 19.98222614662644, + "learning_rate": 6.897643256045407e-06, + "loss": 0.5275, + "step": 11762 + }, + { + "epoch": 1.84, + "grad_norm": 14.394025212454915, + "learning_rate": 6.896040108656874e-06, + "loss": 0.5384, + "step": 11763 + }, + { + "epoch": 1.84, + "grad_norm": 22.957535848675242, + "learning_rate": 6.894437049543144e-06, + "loss": 0.5513, + "step": 11764 + }, + { + "epoch": 1.84, + "grad_norm": 17.594980462785127, + "learning_rate": 6.892834078749805e-06, + "loss": 0.5522, + "step": 11765 + }, + { + "epoch": 1.84, + "grad_norm": 28.172844572303415, + "learning_rate": 6.89123119632245e-06, + "loss": 0.5277, + "step": 11766 + }, + { + "epoch": 1.84, + "grad_norm": 28.18619036645395, + "learning_rate": 6.8896284023066585e-06, + "loss": 0.5676, + "step": 11767 + }, + { + "epoch": 1.84, + "grad_norm": 18.165212246182225, + "learning_rate": 6.888025696748021e-06, + "loss": 0.5836, + "step": 11768 + }, + { + "epoch": 1.84, + "grad_norm": 19.903844497147052, + "learning_rate": 6.886423079692106e-06, + "loss": 0.4911, + "step": 11769 + }, + { + "epoch": 1.84, + "grad_norm": 15.889666266293897, + "learning_rate": 6.884820551184501e-06, + "loss": 0.5259, + "step": 11770 + }, + { + "epoch": 1.84, + "grad_norm": 59.62691749432491, + "learning_rate": 6.883218111270774e-06, + "loss": 0.6656, + "step": 11771 + }, + { + "epoch": 1.84, + "grad_norm": 19.16000784953565, + "learning_rate": 6.8816157599965025e-06, + "loss": 0.5916, + "step": 11772 + }, + { + "epoch": 1.84, + "grad_norm": 22.41139580947437, + "learning_rate": 6.880013497407257e-06, + "loss": 0.5374, + "step": 11773 + }, + { + "epoch": 1.84, + "grad_norm": 22.153799914221107, + "learning_rate": 6.878411323548599e-06, + "loss": 0.565, + "step": 11774 + }, + { + "epoch": 1.84, + "grad_norm": 14.224518948631207, + "learning_rate": 6.8768092384660975e-06, + "loss": 0.5482, + "step": 11775 + }, + { + "epoch": 1.84, + "grad_norm": 20.499790664511192, + "learning_rate": 6.875207242205316e-06, + "loss": 0.544, + "step": 11776 + }, + { + "epoch": 1.84, + "grad_norm": 19.849667799125257, + "learning_rate": 6.8736053348118106e-06, + "loss": 0.552, + "step": 11777 + }, + { + "epoch": 1.84, + "grad_norm": 18.69364399524402, + "learning_rate": 6.8720035163311425e-06, + "loss": 0.5775, + "step": 11778 + }, + { + "epoch": 1.84, + "grad_norm": 18.042298678013218, + "learning_rate": 6.870401786808865e-06, + "loss": 0.5117, + "step": 11779 + }, + { + "epoch": 1.84, + "grad_norm": 23.697660090276667, + "learning_rate": 6.868800146290526e-06, + "loss": 0.5191, + "step": 11780 + }, + { + "epoch": 1.84, + "grad_norm": 22.543585380670198, + "learning_rate": 6.867198594821683e-06, + "loss": 0.6217, + "step": 11781 + }, + { + "epoch": 1.84, + "grad_norm": 22.137636251504162, + "learning_rate": 6.8655971324478764e-06, + "loss": 0.5715, + "step": 11782 + }, + { + "epoch": 1.84, + "grad_norm": 17.380098124966214, + "learning_rate": 6.863995759214659e-06, + "loss": 0.5126, + "step": 11783 + }, + { + "epoch": 1.84, + "grad_norm": 17.56032807183403, + "learning_rate": 6.862394475167562e-06, + "loss": 0.6195, + "step": 11784 + }, + { + "epoch": 1.84, + "grad_norm": 18.78594554179766, + "learning_rate": 6.860793280352132e-06, + "loss": 0.4863, + "step": 11785 + }, + { + "epoch": 1.84, + "grad_norm": 15.041057080859524, + "learning_rate": 6.859192174813905e-06, + "loss": 0.5814, + "step": 11786 + }, + { + "epoch": 1.84, + "grad_norm": 12.23394728968936, + "learning_rate": 6.857591158598415e-06, + "loss": 0.4819, + "step": 11787 + }, + { + "epoch": 1.84, + "grad_norm": 15.65858811681614, + "learning_rate": 6.855990231751197e-06, + "loss": 0.5488, + "step": 11788 + }, + { + "epoch": 1.84, + "grad_norm": 30.701395605851804, + "learning_rate": 6.854389394317776e-06, + "loss": 0.6204, + "step": 11789 + }, + { + "epoch": 1.84, + "grad_norm": 15.008493917346742, + "learning_rate": 6.852788646343678e-06, + "loss": 0.5578, + "step": 11790 + }, + { + "epoch": 1.84, + "grad_norm": 21.51846641001329, + "learning_rate": 6.851187987874432e-06, + "loss": 0.5132, + "step": 11791 + }, + { + "epoch": 1.84, + "grad_norm": 16.708946223078858, + "learning_rate": 6.849587418955554e-06, + "loss": 0.5397, + "step": 11792 + }, + { + "epoch": 1.84, + "grad_norm": 11.564967495303193, + "learning_rate": 6.847986939632574e-06, + "loss": 0.4913, + "step": 11793 + }, + { + "epoch": 1.84, + "grad_norm": 18.902148200622115, + "learning_rate": 6.846386549950998e-06, + "loss": 0.5779, + "step": 11794 + }, + { + "epoch": 1.84, + "grad_norm": 16.169481890982244, + "learning_rate": 6.844786249956343e-06, + "loss": 0.5989, + "step": 11795 + }, + { + "epoch": 1.84, + "grad_norm": 15.29195636306816, + "learning_rate": 6.8431860396941205e-06, + "loss": 0.5063, + "step": 11796 + }, + { + "epoch": 1.84, + "grad_norm": 16.995587490005533, + "learning_rate": 6.841585919209842e-06, + "loss": 0.4843, + "step": 11797 + }, + { + "epoch": 1.84, + "grad_norm": 15.299465661928968, + "learning_rate": 6.839985888549015e-06, + "loss": 0.4884, + "step": 11798 + }, + { + "epoch": 1.84, + "grad_norm": 18.042442160481034, + "learning_rate": 6.838385947757136e-06, + "loss": 0.4733, + "step": 11799 + }, + { + "epoch": 1.84, + "grad_norm": 11.234073573858344, + "learning_rate": 6.836786096879713e-06, + "loss": 0.4793, + "step": 11800 + }, + { + "epoch": 1.84, + "grad_norm": 21.637346360869344, + "learning_rate": 6.8351863359622424e-06, + "loss": 0.5195, + "step": 11801 + }, + { + "epoch": 1.84, + "grad_norm": 19.11047107254283, + "learning_rate": 6.833586665050225e-06, + "loss": 0.5508, + "step": 11802 + }, + { + "epoch": 1.84, + "grad_norm": 16.622951219543683, + "learning_rate": 6.831987084189144e-06, + "loss": 0.5396, + "step": 11803 + }, + { + "epoch": 1.84, + "grad_norm": 21.988993444953024, + "learning_rate": 6.830387593424501e-06, + "loss": 0.5571, + "step": 11804 + }, + { + "epoch": 1.84, + "grad_norm": 14.64473752405375, + "learning_rate": 6.828788192801779e-06, + "loss": 0.5243, + "step": 11805 + }, + { + "epoch": 1.84, + "grad_norm": 22.002541021424317, + "learning_rate": 6.827188882366467e-06, + "loss": 0.5972, + "step": 11806 + }, + { + "epoch": 1.84, + "grad_norm": 13.862318553427183, + "learning_rate": 6.825589662164049e-06, + "loss": 0.4388, + "step": 11807 + }, + { + "epoch": 1.84, + "grad_norm": 17.240215211181084, + "learning_rate": 6.823990532240001e-06, + "loss": 0.5396, + "step": 11808 + }, + { + "epoch": 1.84, + "grad_norm": 21.415745600830192, + "learning_rate": 6.822391492639804e-06, + "loss": 0.5604, + "step": 11809 + }, + { + "epoch": 1.84, + "grad_norm": 19.99398208284026, + "learning_rate": 6.820792543408935e-06, + "loss": 0.5525, + "step": 11810 + }, + { + "epoch": 1.84, + "grad_norm": 12.17063712961563, + "learning_rate": 6.819193684592864e-06, + "loss": 0.5205, + "step": 11811 + }, + { + "epoch": 1.85, + "grad_norm": 14.598539516072515, + "learning_rate": 6.817594916237071e-06, + "loss": 0.5296, + "step": 11812 + }, + { + "epoch": 1.85, + "grad_norm": 19.106494857291004, + "learning_rate": 6.815996238387011e-06, + "loss": 0.5202, + "step": 11813 + }, + { + "epoch": 1.85, + "grad_norm": 25.761730921744594, + "learning_rate": 6.814397651088156e-06, + "loss": 0.5598, + "step": 11814 + }, + { + "epoch": 1.85, + "grad_norm": 13.505484719089626, + "learning_rate": 6.812799154385967e-06, + "loss": 0.5047, + "step": 11815 + }, + { + "epoch": 1.85, + "grad_norm": 26.217824920729925, + "learning_rate": 6.811200748325908e-06, + "loss": 0.5667, + "step": 11816 + }, + { + "epoch": 1.85, + "grad_norm": 16.357447255129127, + "learning_rate": 6.8096024329534376e-06, + "loss": 0.5417, + "step": 11817 + }, + { + "epoch": 1.85, + "grad_norm": 19.722383620791216, + "learning_rate": 6.808004208314001e-06, + "loss": 0.5542, + "step": 11818 + }, + { + "epoch": 1.85, + "grad_norm": 14.253706437495703, + "learning_rate": 6.806406074453061e-06, + "loss": 0.5359, + "step": 11819 + }, + { + "epoch": 1.85, + "grad_norm": 25.598648277932625, + "learning_rate": 6.804808031416064e-06, + "loss": 0.5253, + "step": 11820 + }, + { + "epoch": 1.85, + "grad_norm": 28.686280740433627, + "learning_rate": 6.803210079248454e-06, + "loss": 0.5844, + "step": 11821 + }, + { + "epoch": 1.85, + "grad_norm": 17.462911925171134, + "learning_rate": 6.8016122179956855e-06, + "loss": 0.5614, + "step": 11822 + }, + { + "epoch": 1.85, + "grad_norm": 24.004407491641857, + "learning_rate": 6.800014447703191e-06, + "loss": 0.6014, + "step": 11823 + }, + { + "epoch": 1.85, + "grad_norm": 24.385313763184758, + "learning_rate": 6.798416768416412e-06, + "loss": 0.5567, + "step": 11824 + }, + { + "epoch": 1.85, + "grad_norm": 24.957210859857593, + "learning_rate": 6.7968191801807894e-06, + "loss": 0.5931, + "step": 11825 + }, + { + "epoch": 1.85, + "grad_norm": 20.17053963594402, + "learning_rate": 6.795221683041756e-06, + "loss": 0.5224, + "step": 11826 + }, + { + "epoch": 1.85, + "grad_norm": 25.049286904697535, + "learning_rate": 6.793624277044745e-06, + "loss": 0.6028, + "step": 11827 + }, + { + "epoch": 1.85, + "grad_norm": 21.505051819156407, + "learning_rate": 6.792026962235178e-06, + "loss": 0.551, + "step": 11828 + }, + { + "epoch": 1.85, + "grad_norm": 17.627967005178707, + "learning_rate": 6.790429738658493e-06, + "loss": 0.5519, + "step": 11829 + }, + { + "epoch": 1.85, + "grad_norm": 34.89721143971475, + "learning_rate": 6.788832606360104e-06, + "loss": 0.5496, + "step": 11830 + }, + { + "epoch": 1.85, + "grad_norm": 21.622222009458074, + "learning_rate": 6.787235565385439e-06, + "loss": 0.5669, + "step": 11831 + }, + { + "epoch": 1.85, + "grad_norm": 14.831788367733381, + "learning_rate": 6.785638615779919e-06, + "loss": 0.4961, + "step": 11832 + }, + { + "epoch": 1.85, + "grad_norm": 15.221369891869672, + "learning_rate": 6.784041757588954e-06, + "loss": 0.4772, + "step": 11833 + }, + { + "epoch": 1.85, + "grad_norm": 19.665110942426757, + "learning_rate": 6.782444990857957e-06, + "loss": 0.5192, + "step": 11834 + }, + { + "epoch": 1.85, + "grad_norm": 20.58718310719083, + "learning_rate": 6.780848315632344e-06, + "loss": 0.5618, + "step": 11835 + }, + { + "epoch": 1.85, + "grad_norm": 15.653080811745662, + "learning_rate": 6.779251731957522e-06, + "loss": 0.4769, + "step": 11836 + }, + { + "epoch": 1.85, + "grad_norm": 19.05049685073707, + "learning_rate": 6.7776552398788995e-06, + "loss": 0.5144, + "step": 11837 + }, + { + "epoch": 1.85, + "grad_norm": 25.362459280753125, + "learning_rate": 6.776058839441875e-06, + "loss": 0.6139, + "step": 11838 + }, + { + "epoch": 1.85, + "grad_norm": 20.530316306937102, + "learning_rate": 6.77446253069185e-06, + "loss": 0.5723, + "step": 11839 + }, + { + "epoch": 1.85, + "grad_norm": 18.7788743057357, + "learning_rate": 6.772866313674223e-06, + "loss": 0.5287, + "step": 11840 + }, + { + "epoch": 1.85, + "grad_norm": 23.03395121596905, + "learning_rate": 6.771270188434392e-06, + "loss": 0.5809, + "step": 11841 + }, + { + "epoch": 1.85, + "grad_norm": 21.256488767245816, + "learning_rate": 6.7696741550177505e-06, + "loss": 0.5326, + "step": 11842 + }, + { + "epoch": 1.85, + "grad_norm": 14.767931118769736, + "learning_rate": 6.768078213469682e-06, + "loss": 0.526, + "step": 11843 + }, + { + "epoch": 1.85, + "grad_norm": 17.13373259923442, + "learning_rate": 6.76648236383558e-06, + "loss": 0.5511, + "step": 11844 + }, + { + "epoch": 1.85, + "grad_norm": 23.087789328284906, + "learning_rate": 6.764886606160828e-06, + "loss": 0.6134, + "step": 11845 + }, + { + "epoch": 1.85, + "grad_norm": 18.54309932356595, + "learning_rate": 6.763290940490807e-06, + "loss": 0.5314, + "step": 11846 + }, + { + "epoch": 1.85, + "grad_norm": 26.978805143862637, + "learning_rate": 6.761695366870902e-06, + "loss": 0.5518, + "step": 11847 + }, + { + "epoch": 1.85, + "grad_norm": 23.006521624635567, + "learning_rate": 6.760099885346485e-06, + "loss": 0.5675, + "step": 11848 + }, + { + "epoch": 1.85, + "grad_norm": 19.727629834750847, + "learning_rate": 6.758504495962929e-06, + "loss": 0.5407, + "step": 11849 + }, + { + "epoch": 1.85, + "grad_norm": 17.421520238950748, + "learning_rate": 6.756909198765611e-06, + "loss": 0.5079, + "step": 11850 + }, + { + "epoch": 1.85, + "grad_norm": 23.702382093039137, + "learning_rate": 6.755313993799901e-06, + "loss": 0.5862, + "step": 11851 + }, + { + "epoch": 1.85, + "grad_norm": 18.637445580775932, + "learning_rate": 6.75371888111116e-06, + "loss": 0.598, + "step": 11852 + }, + { + "epoch": 1.85, + "grad_norm": 18.19318262773005, + "learning_rate": 6.7521238607447536e-06, + "loss": 0.5322, + "step": 11853 + }, + { + "epoch": 1.85, + "grad_norm": 18.023378612685022, + "learning_rate": 6.750528932746047e-06, + "loss": 0.5321, + "step": 11854 + }, + { + "epoch": 1.85, + "grad_norm": 21.177275724017136, + "learning_rate": 6.7489340971603935e-06, + "loss": 0.5131, + "step": 11855 + }, + { + "epoch": 1.85, + "grad_norm": 24.2220833791688, + "learning_rate": 6.747339354033158e-06, + "loss": 0.591, + "step": 11856 + }, + { + "epoch": 1.85, + "grad_norm": 15.46797529746724, + "learning_rate": 6.745744703409685e-06, + "loss": 0.4741, + "step": 11857 + }, + { + "epoch": 1.85, + "grad_norm": 14.043386639357967, + "learning_rate": 6.74415014533533e-06, + "loss": 0.5669, + "step": 11858 + }, + { + "epoch": 1.85, + "grad_norm": 13.599418667835762, + "learning_rate": 6.742555679855438e-06, + "loss": 0.5139, + "step": 11859 + }, + { + "epoch": 1.85, + "grad_norm": 18.676745969282397, + "learning_rate": 6.740961307015359e-06, + "loss": 0.5714, + "step": 11860 + }, + { + "epoch": 1.85, + "grad_norm": 18.501435230617314, + "learning_rate": 6.739367026860435e-06, + "loss": 0.5779, + "step": 11861 + }, + { + "epoch": 1.85, + "grad_norm": 22.944800473462895, + "learning_rate": 6.737772839436003e-06, + "loss": 0.5178, + "step": 11862 + }, + { + "epoch": 1.85, + "grad_norm": 17.066471334164344, + "learning_rate": 6.736178744787403e-06, + "loss": 0.5734, + "step": 11863 + }, + { + "epoch": 1.85, + "grad_norm": 17.15696972152039, + "learning_rate": 6.734584742959972e-06, + "loss": 0.5637, + "step": 11864 + }, + { + "epoch": 1.85, + "grad_norm": 19.14204942502275, + "learning_rate": 6.732990833999038e-06, + "loss": 0.5619, + "step": 11865 + }, + { + "epoch": 1.85, + "grad_norm": 27.13584361484866, + "learning_rate": 6.731397017949937e-06, + "loss": 0.5247, + "step": 11866 + }, + { + "epoch": 1.85, + "grad_norm": 13.631445407909792, + "learning_rate": 6.7298032948579925e-06, + "loss": 0.4517, + "step": 11867 + }, + { + "epoch": 1.85, + "grad_norm": 16.83198742297547, + "learning_rate": 6.728209664768525e-06, + "loss": 0.5518, + "step": 11868 + }, + { + "epoch": 1.85, + "grad_norm": 18.781420560544717, + "learning_rate": 6.726616127726863e-06, + "loss": 0.4721, + "step": 11869 + }, + { + "epoch": 1.85, + "grad_norm": 17.300662697260762, + "learning_rate": 6.725022683778326e-06, + "loss": 0.524, + "step": 11870 + }, + { + "epoch": 1.85, + "grad_norm": 15.412647118527575, + "learning_rate": 6.723429332968227e-06, + "loss": 0.4846, + "step": 11871 + }, + { + "epoch": 1.85, + "grad_norm": 25.994617002918577, + "learning_rate": 6.721836075341879e-06, + "loss": 0.5776, + "step": 11872 + }, + { + "epoch": 1.85, + "grad_norm": 16.225958231151278, + "learning_rate": 6.720242910944596e-06, + "loss": 0.5125, + "step": 11873 + }, + { + "epoch": 1.85, + "grad_norm": 14.809634243317463, + "learning_rate": 6.718649839821686e-06, + "loss": 0.5774, + "step": 11874 + }, + { + "epoch": 1.85, + "grad_norm": 21.659502838570962, + "learning_rate": 6.717056862018456e-06, + "loss": 0.5375, + "step": 11875 + }, + { + "epoch": 1.86, + "grad_norm": 18.93651919531852, + "learning_rate": 6.71546397758021e-06, + "loss": 0.4983, + "step": 11876 + }, + { + "epoch": 1.86, + "grad_norm": 32.62854315940374, + "learning_rate": 6.713871186552246e-06, + "loss": 0.5915, + "step": 11877 + }, + { + "epoch": 1.86, + "grad_norm": 17.088300326161473, + "learning_rate": 6.7122784889798596e-06, + "loss": 0.4891, + "step": 11878 + }, + { + "epoch": 1.86, + "grad_norm": 15.14300445188734, + "learning_rate": 6.710685884908352e-06, + "loss": 0.5869, + "step": 11879 + }, + { + "epoch": 1.86, + "grad_norm": 15.329452902371624, + "learning_rate": 6.709093374383012e-06, + "loss": 0.5971, + "step": 11880 + }, + { + "epoch": 1.86, + "grad_norm": 19.83084185153524, + "learning_rate": 6.7075009574491355e-06, + "loss": 0.4683, + "step": 11881 + }, + { + "epoch": 1.86, + "grad_norm": 16.93847415733815, + "learning_rate": 6.7059086341520045e-06, + "loss": 0.5264, + "step": 11882 + }, + { + "epoch": 1.86, + "grad_norm": 16.919098557608034, + "learning_rate": 6.704316404536904e-06, + "loss": 0.523, + "step": 11883 + }, + { + "epoch": 1.86, + "grad_norm": 16.745501041267495, + "learning_rate": 6.702724268649114e-06, + "loss": 0.5162, + "step": 11884 + }, + { + "epoch": 1.86, + "grad_norm": 20.12752802442818, + "learning_rate": 6.70113222653392e-06, + "loss": 0.5366, + "step": 11885 + }, + { + "epoch": 1.86, + "grad_norm": 22.86254287679728, + "learning_rate": 6.699540278236598e-06, + "loss": 0.536, + "step": 11886 + }, + { + "epoch": 1.86, + "grad_norm": 31.498146309124703, + "learning_rate": 6.697948423802416e-06, + "loss": 0.5708, + "step": 11887 + }, + { + "epoch": 1.86, + "grad_norm": 22.63014622476013, + "learning_rate": 6.696356663276651e-06, + "loss": 0.5801, + "step": 11888 + }, + { + "epoch": 1.86, + "grad_norm": 23.92612413246314, + "learning_rate": 6.694764996704569e-06, + "loss": 0.6027, + "step": 11889 + }, + { + "epoch": 1.86, + "grad_norm": 18.431043361548078, + "learning_rate": 6.6931734241314365e-06, + "loss": 0.5927, + "step": 11890 + }, + { + "epoch": 1.86, + "grad_norm": 16.900931632374874, + "learning_rate": 6.69158194560252e-06, + "loss": 0.5302, + "step": 11891 + }, + { + "epoch": 1.86, + "grad_norm": 15.44972644217673, + "learning_rate": 6.689990561163075e-06, + "loss": 0.483, + "step": 11892 + }, + { + "epoch": 1.86, + "grad_norm": 25.53810183053534, + "learning_rate": 6.688399270858362e-06, + "loss": 0.5208, + "step": 11893 + }, + { + "epoch": 1.86, + "grad_norm": 14.32393672489673, + "learning_rate": 6.686808074733638e-06, + "loss": 0.6336, + "step": 11894 + }, + { + "epoch": 1.86, + "grad_norm": 14.550682988187054, + "learning_rate": 6.6852169728341565e-06, + "loss": 0.5505, + "step": 11895 + }, + { + "epoch": 1.86, + "grad_norm": 28.163360533517643, + "learning_rate": 6.683625965205162e-06, + "loss": 0.5482, + "step": 11896 + }, + { + "epoch": 1.86, + "grad_norm": 18.439995082846295, + "learning_rate": 6.6820350518919044e-06, + "loss": 0.5604, + "step": 11897 + }, + { + "epoch": 1.86, + "grad_norm": 13.926460986782294, + "learning_rate": 6.680444232939629e-06, + "loss": 0.5129, + "step": 11898 + }, + { + "epoch": 1.86, + "grad_norm": 13.509597936661928, + "learning_rate": 6.6788535083935764e-06, + "loss": 0.4443, + "step": 11899 + }, + { + "epoch": 1.86, + "grad_norm": 17.854731308975214, + "learning_rate": 6.677262878298993e-06, + "loss": 0.5309, + "step": 11900 + }, + { + "epoch": 1.86, + "grad_norm": 18.2357011253982, + "learning_rate": 6.675672342701107e-06, + "loss": 0.5588, + "step": 11901 + }, + { + "epoch": 1.86, + "grad_norm": 14.498992077658713, + "learning_rate": 6.674081901645153e-06, + "loss": 0.5059, + "step": 11902 + }, + { + "epoch": 1.86, + "grad_norm": 23.24220808973923, + "learning_rate": 6.672491555176363e-06, + "loss": 0.548, + "step": 11903 + }, + { + "epoch": 1.86, + "grad_norm": 23.62993069630609, + "learning_rate": 6.6709013033399695e-06, + "loss": 0.5014, + "step": 11904 + }, + { + "epoch": 1.86, + "grad_norm": 16.23498499360388, + "learning_rate": 6.6693111461811975e-06, + "loss": 0.5233, + "step": 11905 + }, + { + "epoch": 1.86, + "grad_norm": 13.433866590705799, + "learning_rate": 6.6677210837452645e-06, + "loss": 0.5078, + "step": 11906 + }, + { + "epoch": 1.86, + "grad_norm": 15.8680186939078, + "learning_rate": 6.6661311160773945e-06, + "loss": 0.531, + "step": 11907 + }, + { + "epoch": 1.86, + "grad_norm": 27.097072217239205, + "learning_rate": 6.664541243222806e-06, + "loss": 0.5464, + "step": 11908 + }, + { + "epoch": 1.86, + "grad_norm": 19.21038081373631, + "learning_rate": 6.662951465226713e-06, + "loss": 0.5657, + "step": 11909 + }, + { + "epoch": 1.86, + "grad_norm": 20.1176575815036, + "learning_rate": 6.66136178213433e-06, + "loss": 0.5954, + "step": 11910 + }, + { + "epoch": 1.86, + "grad_norm": 13.379564099253406, + "learning_rate": 6.659772193990865e-06, + "loss": 0.4821, + "step": 11911 + }, + { + "epoch": 1.86, + "grad_norm": 20.86837813145166, + "learning_rate": 6.658182700841521e-06, + "loss": 0.5862, + "step": 11912 + }, + { + "epoch": 1.86, + "grad_norm": 24.83615664193141, + "learning_rate": 6.656593302731509e-06, + "loss": 0.5975, + "step": 11913 + }, + { + "epoch": 1.86, + "grad_norm": 27.50810701342046, + "learning_rate": 6.655003999706027e-06, + "loss": 0.5414, + "step": 11914 + }, + { + "epoch": 1.86, + "grad_norm": 26.595309570946146, + "learning_rate": 6.653414791810277e-06, + "loss": 0.5938, + "step": 11915 + }, + { + "epoch": 1.86, + "grad_norm": 21.01384182990767, + "learning_rate": 6.6518256790894484e-06, + "loss": 0.5025, + "step": 11916 + }, + { + "epoch": 1.86, + "grad_norm": 14.942821893884318, + "learning_rate": 6.6502366615887415e-06, + "loss": 0.4598, + "step": 11917 + }, + { + "epoch": 1.86, + "grad_norm": 16.74590904804912, + "learning_rate": 6.648647739353342e-06, + "loss": 0.6213, + "step": 11918 + }, + { + "epoch": 1.86, + "grad_norm": 20.531460046782406, + "learning_rate": 6.6470589124284414e-06, + "loss": 0.5528, + "step": 11919 + }, + { + "epoch": 1.86, + "grad_norm": 19.49027521427508, + "learning_rate": 6.645470180859227e-06, + "loss": 0.5561, + "step": 11920 + }, + { + "epoch": 1.86, + "grad_norm": 21.957432561850307, + "learning_rate": 6.643881544690876e-06, + "loss": 0.5572, + "step": 11921 + }, + { + "epoch": 1.86, + "grad_norm": 23.841515348867155, + "learning_rate": 6.6422930039685695e-06, + "loss": 0.5858, + "step": 11922 + }, + { + "epoch": 1.86, + "grad_norm": 14.962875077399254, + "learning_rate": 6.640704558737487e-06, + "loss": 0.5437, + "step": 11923 + }, + { + "epoch": 1.86, + "grad_norm": 21.801763986755386, + "learning_rate": 6.639116209042798e-06, + "loss": 0.5837, + "step": 11924 + }, + { + "epoch": 1.86, + "grad_norm": 14.553471164792626, + "learning_rate": 6.637527954929685e-06, + "loss": 0.5125, + "step": 11925 + }, + { + "epoch": 1.86, + "grad_norm": 18.343271173697165, + "learning_rate": 6.635939796443306e-06, + "loss": 0.5093, + "step": 11926 + }, + { + "epoch": 1.86, + "grad_norm": 16.16702925649879, + "learning_rate": 6.634351733628832e-06, + "loss": 0.4375, + "step": 11927 + }, + { + "epoch": 1.86, + "grad_norm": 15.231680656249214, + "learning_rate": 6.632763766531423e-06, + "loss": 0.5767, + "step": 11928 + }, + { + "epoch": 1.86, + "grad_norm": 14.807245196131904, + "learning_rate": 6.631175895196245e-06, + "loss": 0.4982, + "step": 11929 + }, + { + "epoch": 1.86, + "grad_norm": 23.58400584091568, + "learning_rate": 6.629588119668458e-06, + "loss": 0.5558, + "step": 11930 + }, + { + "epoch": 1.86, + "grad_norm": 25.211921388987264, + "learning_rate": 6.628000439993207e-06, + "loss": 0.581, + "step": 11931 + }, + { + "epoch": 1.86, + "grad_norm": 15.20751505527256, + "learning_rate": 6.626412856215653e-06, + "loss": 0.5769, + "step": 11932 + }, + { + "epoch": 1.86, + "grad_norm": 23.05257301884939, + "learning_rate": 6.624825368380944e-06, + "loss": 0.5327, + "step": 11933 + }, + { + "epoch": 1.86, + "grad_norm": 16.78064218766533, + "learning_rate": 6.623237976534225e-06, + "loss": 0.5307, + "step": 11934 + }, + { + "epoch": 1.86, + "grad_norm": 20.038876802576763, + "learning_rate": 6.6216506807206475e-06, + "loss": 0.5385, + "step": 11935 + }, + { + "epoch": 1.86, + "grad_norm": 27.964040149215766, + "learning_rate": 6.620063480985346e-06, + "loss": 0.591, + "step": 11936 + }, + { + "epoch": 1.86, + "grad_norm": 18.065915284779983, + "learning_rate": 6.61847637737346e-06, + "loss": 0.5, + "step": 11937 + }, + { + "epoch": 1.86, + "grad_norm": 25.694112426091873, + "learning_rate": 6.6168893699301286e-06, + "loss": 0.6282, + "step": 11938 + }, + { + "epoch": 1.86, + "grad_norm": 22.02899333407536, + "learning_rate": 6.6153024587004855e-06, + "loss": 0.5439, + "step": 11939 + }, + { + "epoch": 1.87, + "grad_norm": 21.54619790065212, + "learning_rate": 6.613715643729661e-06, + "loss": 0.52, + "step": 11940 + }, + { + "epoch": 1.87, + "grad_norm": 14.125489193222258, + "learning_rate": 6.612128925062781e-06, + "loss": 0.5128, + "step": 11941 + }, + { + "epoch": 1.87, + "grad_norm": 15.600904049204516, + "learning_rate": 6.610542302744973e-06, + "loss": 0.4713, + "step": 11942 + }, + { + "epoch": 1.87, + "grad_norm": 16.244118339483844, + "learning_rate": 6.608955776821357e-06, + "loss": 0.5841, + "step": 11943 + }, + { + "epoch": 1.87, + "grad_norm": 18.010179813372737, + "learning_rate": 6.60736934733706e-06, + "loss": 0.5324, + "step": 11944 + }, + { + "epoch": 1.87, + "grad_norm": 20.17303308343498, + "learning_rate": 6.605783014337192e-06, + "loss": 0.58, + "step": 11945 + }, + { + "epoch": 1.87, + "grad_norm": 24.99588703267362, + "learning_rate": 6.6041967778668695e-06, + "loss": 0.5321, + "step": 11946 + }, + { + "epoch": 1.87, + "grad_norm": 12.759150767172045, + "learning_rate": 6.602610637971201e-06, + "loss": 0.5313, + "step": 11947 + }, + { + "epoch": 1.87, + "grad_norm": 15.140154150185321, + "learning_rate": 6.601024594695302e-06, + "loss": 0.5859, + "step": 11948 + }, + { + "epoch": 1.87, + "grad_norm": 26.09471790037621, + "learning_rate": 6.599438648084277e-06, + "loss": 0.5384, + "step": 11949 + }, + { + "epoch": 1.87, + "grad_norm": 22.696077316843915, + "learning_rate": 6.597852798183222e-06, + "loss": 0.5021, + "step": 11950 + }, + { + "epoch": 1.87, + "grad_norm": 22.99404933000497, + "learning_rate": 6.596267045037247e-06, + "loss": 0.4996, + "step": 11951 + }, + { + "epoch": 1.87, + "grad_norm": 17.844594934604636, + "learning_rate": 6.594681388691445e-06, + "loss": 0.5581, + "step": 11952 + }, + { + "epoch": 1.87, + "grad_norm": 18.823244354999975, + "learning_rate": 6.593095829190911e-06, + "loss": 0.5456, + "step": 11953 + }, + { + "epoch": 1.87, + "grad_norm": 18.63540224995561, + "learning_rate": 6.591510366580742e-06, + "loss": 0.5803, + "step": 11954 + }, + { + "epoch": 1.87, + "grad_norm": 25.34283976913497, + "learning_rate": 6.589925000906023e-06, + "loss": 0.6194, + "step": 11955 + }, + { + "epoch": 1.87, + "grad_norm": 19.68611024235323, + "learning_rate": 6.588339732211839e-06, + "loss": 0.5292, + "step": 11956 + }, + { + "epoch": 1.87, + "grad_norm": 20.242855904238755, + "learning_rate": 6.586754560543281e-06, + "loss": 0.5524, + "step": 11957 + }, + { + "epoch": 1.87, + "grad_norm": 16.19504177016277, + "learning_rate": 6.585169485945425e-06, + "loss": 0.5544, + "step": 11958 + }, + { + "epoch": 1.87, + "grad_norm": 17.036393206621653, + "learning_rate": 6.583584508463354e-06, + "loss": 0.5281, + "step": 11959 + }, + { + "epoch": 1.87, + "grad_norm": 16.209893802917698, + "learning_rate": 6.581999628142137e-06, + "loss": 0.5102, + "step": 11960 + }, + { + "epoch": 1.87, + "grad_norm": 21.44764380716063, + "learning_rate": 6.580414845026853e-06, + "loss": 0.6065, + "step": 11961 + }, + { + "epoch": 1.87, + "grad_norm": 14.453680625751677, + "learning_rate": 6.578830159162568e-06, + "loss": 0.5405, + "step": 11962 + }, + { + "epoch": 1.87, + "grad_norm": 12.52716847491306, + "learning_rate": 6.577245570594355e-06, + "loss": 0.5187, + "step": 11963 + }, + { + "epoch": 1.87, + "grad_norm": 14.793590153852755, + "learning_rate": 6.575661079367277e-06, + "loss": 0.5581, + "step": 11964 + }, + { + "epoch": 1.87, + "grad_norm": 16.835090525624217, + "learning_rate": 6.574076685526392e-06, + "loss": 0.5464, + "step": 11965 + }, + { + "epoch": 1.87, + "grad_norm": 20.724116884577217, + "learning_rate": 6.572492389116761e-06, + "loss": 0.5823, + "step": 11966 + }, + { + "epoch": 1.87, + "grad_norm": 15.399030876242543, + "learning_rate": 6.570908190183442e-06, + "loss": 0.575, + "step": 11967 + }, + { + "epoch": 1.87, + "grad_norm": 16.239860788201426, + "learning_rate": 6.569324088771488e-06, + "loss": 0.5382, + "step": 11968 + }, + { + "epoch": 1.87, + "grad_norm": 16.204370771723788, + "learning_rate": 6.5677400849259535e-06, + "loss": 0.5312, + "step": 11969 + }, + { + "epoch": 1.87, + "grad_norm": 21.074923913451805, + "learning_rate": 6.566156178691882e-06, + "loss": 0.5588, + "step": 11970 + }, + { + "epoch": 1.87, + "grad_norm": 13.565930506182092, + "learning_rate": 6.564572370114318e-06, + "loss": 0.5463, + "step": 11971 + }, + { + "epoch": 1.87, + "grad_norm": 13.476208829247197, + "learning_rate": 6.562988659238305e-06, + "loss": 0.4763, + "step": 11972 + }, + { + "epoch": 1.87, + "grad_norm": 19.253953913921627, + "learning_rate": 6.561405046108886e-06, + "loss": 0.5453, + "step": 11973 + }, + { + "epoch": 1.87, + "grad_norm": 31.662751848120546, + "learning_rate": 6.5598215307710975e-06, + "loss": 0.5514, + "step": 11974 + }, + { + "epoch": 1.87, + "grad_norm": 16.379043718630413, + "learning_rate": 6.55823811326997e-06, + "loss": 0.4958, + "step": 11975 + }, + { + "epoch": 1.87, + "grad_norm": 13.306475433529185, + "learning_rate": 6.5566547936505375e-06, + "loss": 0.4861, + "step": 11976 + }, + { + "epoch": 1.87, + "grad_norm": 15.781586888938156, + "learning_rate": 6.5550715719578295e-06, + "loss": 0.5202, + "step": 11977 + }, + { + "epoch": 1.87, + "grad_norm": 13.454892737963295, + "learning_rate": 6.5534884482368665e-06, + "loss": 0.5321, + "step": 11978 + }, + { + "epoch": 1.87, + "grad_norm": 30.386922081657993, + "learning_rate": 6.551905422532683e-06, + "loss": 0.5595, + "step": 11979 + }, + { + "epoch": 1.87, + "grad_norm": 13.129213434522658, + "learning_rate": 6.550322494890289e-06, + "loss": 0.5749, + "step": 11980 + }, + { + "epoch": 1.87, + "grad_norm": 31.121168642850527, + "learning_rate": 6.548739665354702e-06, + "loss": 0.6267, + "step": 11981 + }, + { + "epoch": 1.87, + "grad_norm": 26.492847846690314, + "learning_rate": 6.547156933970943e-06, + "loss": 0.4809, + "step": 11982 + }, + { + "epoch": 1.87, + "grad_norm": 21.42820274279954, + "learning_rate": 6.545574300784021e-06, + "loss": 0.5006, + "step": 11983 + }, + { + "epoch": 1.87, + "grad_norm": 13.74027669340231, + "learning_rate": 6.543991765838946e-06, + "loss": 0.4851, + "step": 11984 + }, + { + "epoch": 1.87, + "grad_norm": 21.6468925805872, + "learning_rate": 6.542409329180721e-06, + "loss": 0.5231, + "step": 11985 + }, + { + "epoch": 1.87, + "grad_norm": 16.411628075991704, + "learning_rate": 6.540826990854353e-06, + "loss": 0.4959, + "step": 11986 + }, + { + "epoch": 1.87, + "grad_norm": 19.310887480824633, + "learning_rate": 6.53924475090484e-06, + "loss": 0.641, + "step": 11987 + }, + { + "epoch": 1.87, + "grad_norm": 14.091250560901004, + "learning_rate": 6.537662609377184e-06, + "loss": 0.4852, + "step": 11988 + }, + { + "epoch": 1.87, + "grad_norm": 14.301149613096703, + "learning_rate": 6.53608056631638e-06, + "loss": 0.536, + "step": 11989 + }, + { + "epoch": 1.87, + "grad_norm": 18.0324518614721, + "learning_rate": 6.534498621767418e-06, + "loss": 0.568, + "step": 11990 + }, + { + "epoch": 1.87, + "grad_norm": 16.685525064723162, + "learning_rate": 6.5329167757752835e-06, + "loss": 0.5087, + "step": 11991 + }, + { + "epoch": 1.87, + "grad_norm": 21.139792904897877, + "learning_rate": 6.531335028384971e-06, + "loss": 0.5631, + "step": 11992 + }, + { + "epoch": 1.87, + "grad_norm": 30.70380287198627, + "learning_rate": 6.529753379641462e-06, + "loss": 0.5151, + "step": 11993 + }, + { + "epoch": 1.87, + "grad_norm": 15.420217631754886, + "learning_rate": 6.528171829589734e-06, + "loss": 0.5444, + "step": 11994 + }, + { + "epoch": 1.87, + "grad_norm": 17.095784423662707, + "learning_rate": 6.52659037827477e-06, + "loss": 0.5532, + "step": 11995 + }, + { + "epoch": 1.87, + "grad_norm": 16.702125868921176, + "learning_rate": 6.525009025741543e-06, + "loss": 0.5825, + "step": 11996 + }, + { + "epoch": 1.87, + "grad_norm": 22.49969019246063, + "learning_rate": 6.523427772035024e-06, + "loss": 0.569, + "step": 11997 + }, + { + "epoch": 1.87, + "grad_norm": 18.672128056363587, + "learning_rate": 6.521846617200191e-06, + "loss": 0.5452, + "step": 11998 + }, + { + "epoch": 1.87, + "grad_norm": 20.031964799807888, + "learning_rate": 6.520265561282004e-06, + "loss": 0.4891, + "step": 11999 + }, + { + "epoch": 1.87, + "grad_norm": 18.824851233042082, + "learning_rate": 6.5186846043254246e-06, + "loss": 0.5455, + "step": 12000 + }, + { + "epoch": 1.87, + "grad_norm": 20.722747281923528, + "learning_rate": 6.5171037463754224e-06, + "loss": 0.4926, + "step": 12001 + }, + { + "epoch": 1.87, + "grad_norm": 20.635381943777627, + "learning_rate": 6.515522987476952e-06, + "loss": 0.5578, + "step": 12002 + }, + { + "epoch": 1.87, + "grad_norm": 20.64048423798505, + "learning_rate": 6.513942327674971e-06, + "loss": 0.5128, + "step": 12003 + }, + { + "epoch": 1.88, + "grad_norm": 16.054445207350017, + "learning_rate": 6.512361767014427e-06, + "loss": 0.4807, + "step": 12004 + }, + { + "epoch": 1.88, + "grad_norm": 26.537284136804757, + "learning_rate": 6.510781305540277e-06, + "loss": 0.5757, + "step": 12005 + }, + { + "epoch": 1.88, + "grad_norm": 24.2552813390368, + "learning_rate": 6.509200943297463e-06, + "loss": 0.5545, + "step": 12006 + }, + { + "epoch": 1.88, + "grad_norm": 18.618808972273953, + "learning_rate": 6.507620680330933e-06, + "loss": 0.558, + "step": 12007 + }, + { + "epoch": 1.88, + "grad_norm": 17.196480106087815, + "learning_rate": 6.50604051668563e-06, + "loss": 0.4874, + "step": 12008 + }, + { + "epoch": 1.88, + "grad_norm": 18.234040437030032, + "learning_rate": 6.504460452406489e-06, + "loss": 0.5577, + "step": 12009 + }, + { + "epoch": 1.88, + "grad_norm": 13.218088589459896, + "learning_rate": 6.502880487538448e-06, + "loss": 0.5357, + "step": 12010 + }, + { + "epoch": 1.88, + "grad_norm": 16.23254004343529, + "learning_rate": 6.50130062212644e-06, + "loss": 0.4843, + "step": 12011 + }, + { + "epoch": 1.88, + "grad_norm": 18.948171562680933, + "learning_rate": 6.499720856215395e-06, + "loss": 0.5707, + "step": 12012 + }, + { + "epoch": 1.88, + "grad_norm": 19.484562485962197, + "learning_rate": 6.498141189850243e-06, + "loss": 0.5083, + "step": 12013 + }, + { + "epoch": 1.88, + "grad_norm": 22.168233041406186, + "learning_rate": 6.496561623075907e-06, + "loss": 0.6047, + "step": 12014 + }, + { + "epoch": 1.88, + "grad_norm": 15.80088044407739, + "learning_rate": 6.494982155937308e-06, + "loss": 0.5362, + "step": 12015 + }, + { + "epoch": 1.88, + "grad_norm": 24.10934125733502, + "learning_rate": 6.493402788479365e-06, + "loss": 0.614, + "step": 12016 + }, + { + "epoch": 1.88, + "grad_norm": 15.099278274792237, + "learning_rate": 6.491823520746996e-06, + "loss": 0.4768, + "step": 12017 + }, + { + "epoch": 1.88, + "grad_norm": 18.835058640332303, + "learning_rate": 6.4902443527851175e-06, + "loss": 0.4981, + "step": 12018 + }, + { + "epoch": 1.88, + "grad_norm": 17.48080520220848, + "learning_rate": 6.488665284638631e-06, + "loss": 0.5364, + "step": 12019 + }, + { + "epoch": 1.88, + "grad_norm": 16.531853567924134, + "learning_rate": 6.487086316352453e-06, + "loss": 0.4992, + "step": 12020 + }, + { + "epoch": 1.88, + "grad_norm": 23.790879642131458, + "learning_rate": 6.4855074479714845e-06, + "loss": 0.588, + "step": 12021 + }, + { + "epoch": 1.88, + "grad_norm": 23.51269961058938, + "learning_rate": 6.483928679540627e-06, + "loss": 0.5755, + "step": 12022 + }, + { + "epoch": 1.88, + "grad_norm": 27.226335474214363, + "learning_rate": 6.482350011104785e-06, + "loss": 0.6003, + "step": 12023 + }, + { + "epoch": 1.88, + "grad_norm": 18.332537974278345, + "learning_rate": 6.480771442708847e-06, + "loss": 0.4889, + "step": 12024 + }, + { + "epoch": 1.88, + "grad_norm": 20.805310986666903, + "learning_rate": 6.47919297439771e-06, + "loss": 0.581, + "step": 12025 + }, + { + "epoch": 1.88, + "grad_norm": 20.600358841777485, + "learning_rate": 6.477614606216267e-06, + "loss": 0.5528, + "step": 12026 + }, + { + "epoch": 1.88, + "grad_norm": 18.761596828209886, + "learning_rate": 6.476036338209404e-06, + "loss": 0.613, + "step": 12027 + }, + { + "epoch": 1.88, + "grad_norm": 16.9691599011309, + "learning_rate": 6.474458170422009e-06, + "loss": 0.5067, + "step": 12028 + }, + { + "epoch": 1.88, + "grad_norm": 22.95342617798133, + "learning_rate": 6.4728801028989556e-06, + "loss": 0.584, + "step": 12029 + }, + { + "epoch": 1.88, + "grad_norm": 18.458857956363488, + "learning_rate": 6.471302135685131e-06, + "loss": 0.5122, + "step": 12030 + }, + { + "epoch": 1.88, + "grad_norm": 20.434727997712663, + "learning_rate": 6.4697242688254095e-06, + "loss": 0.5729, + "step": 12031 + }, + { + "epoch": 1.88, + "grad_norm": 14.332635926544123, + "learning_rate": 6.468146502364665e-06, + "loss": 0.5738, + "step": 12032 + }, + { + "epoch": 1.88, + "grad_norm": 21.16065222525277, + "learning_rate": 6.46656883634777e-06, + "loss": 0.5258, + "step": 12033 + }, + { + "epoch": 1.88, + "grad_norm": 16.332691181118758, + "learning_rate": 6.464991270819587e-06, + "loss": 0.5065, + "step": 12034 + }, + { + "epoch": 1.88, + "grad_norm": 21.425591228550076, + "learning_rate": 6.463413805824985e-06, + "loss": 0.5185, + "step": 12035 + }, + { + "epoch": 1.88, + "grad_norm": 25.27596465451524, + "learning_rate": 6.4618364414088264e-06, + "loss": 0.5113, + "step": 12036 + }, + { + "epoch": 1.88, + "grad_norm": 25.13137946699103, + "learning_rate": 6.460259177615966e-06, + "loss": 0.5017, + "step": 12037 + }, + { + "epoch": 1.88, + "grad_norm": 19.078665961157515, + "learning_rate": 6.458682014491271e-06, + "loss": 0.5111, + "step": 12038 + }, + { + "epoch": 1.88, + "grad_norm": 22.039267404476618, + "learning_rate": 6.4571049520795836e-06, + "loss": 0.6013, + "step": 12039 + }, + { + "epoch": 1.88, + "grad_norm": 16.297309446350216, + "learning_rate": 6.45552799042576e-06, + "loss": 0.4705, + "step": 12040 + }, + { + "epoch": 1.88, + "grad_norm": 15.868311334626522, + "learning_rate": 6.453951129574644e-06, + "loss": 0.5293, + "step": 12041 + }, + { + "epoch": 1.88, + "grad_norm": 16.002769508056005, + "learning_rate": 6.452374369571088e-06, + "loss": 0.4852, + "step": 12042 + }, + { + "epoch": 1.88, + "grad_norm": 14.531897925985582, + "learning_rate": 6.4507977104599275e-06, + "loss": 0.5412, + "step": 12043 + }, + { + "epoch": 1.88, + "grad_norm": 15.400456852549771, + "learning_rate": 6.449221152286001e-06, + "loss": 0.552, + "step": 12044 + }, + { + "epoch": 1.88, + "grad_norm": 15.919692352216606, + "learning_rate": 6.447644695094151e-06, + "loss": 0.5007, + "step": 12045 + }, + { + "epoch": 1.88, + "grad_norm": 15.454047197725393, + "learning_rate": 6.446068338929208e-06, + "loss": 0.4966, + "step": 12046 + }, + { + "epoch": 1.88, + "grad_norm": 21.529276509635686, + "learning_rate": 6.444492083836004e-06, + "loss": 0.5484, + "step": 12047 + }, + { + "epoch": 1.88, + "grad_norm": 13.080353742944991, + "learning_rate": 6.442915929859361e-06, + "loss": 0.5258, + "step": 12048 + }, + { + "epoch": 1.88, + "grad_norm": 17.57137340668942, + "learning_rate": 6.44133987704411e-06, + "loss": 0.4786, + "step": 12049 + }, + { + "epoch": 1.88, + "grad_norm": 20.809829112026197, + "learning_rate": 6.43976392543507e-06, + "loss": 0.5064, + "step": 12050 + }, + { + "epoch": 1.88, + "grad_norm": 21.35816842140582, + "learning_rate": 6.438188075077062e-06, + "loss": 0.5983, + "step": 12051 + }, + { + "epoch": 1.88, + "grad_norm": 13.744822945028577, + "learning_rate": 6.436612326014904e-06, + "loss": 0.4876, + "step": 12052 + }, + { + "epoch": 1.88, + "grad_norm": 13.071159078858736, + "learning_rate": 6.435036678293405e-06, + "loss": 0.4982, + "step": 12053 + }, + { + "epoch": 1.88, + "grad_norm": 18.206086003062207, + "learning_rate": 6.433461131957376e-06, + "loss": 0.5777, + "step": 12054 + }, + { + "epoch": 1.88, + "grad_norm": 13.565210411623358, + "learning_rate": 6.431885687051628e-06, + "loss": 0.5207, + "step": 12055 + }, + { + "epoch": 1.88, + "grad_norm": 11.904620525914225, + "learning_rate": 6.43031034362096e-06, + "loss": 0.521, + "step": 12056 + }, + { + "epoch": 1.88, + "grad_norm": 23.268963539661176, + "learning_rate": 6.428735101710184e-06, + "loss": 0.5785, + "step": 12057 + }, + { + "epoch": 1.88, + "grad_norm": 19.266283307890372, + "learning_rate": 6.427159961364089e-06, + "loss": 0.5278, + "step": 12058 + }, + { + "epoch": 1.88, + "grad_norm": 15.946035343048436, + "learning_rate": 6.425584922627476e-06, + "loss": 0.6558, + "step": 12059 + }, + { + "epoch": 1.88, + "grad_norm": 20.4069344170661, + "learning_rate": 6.424009985545134e-06, + "loss": 0.5802, + "step": 12060 + }, + { + "epoch": 1.88, + "grad_norm": 26.070471611371907, + "learning_rate": 6.422435150161859e-06, + "loss": 0.5259, + "step": 12061 + }, + { + "epoch": 1.88, + "grad_norm": 14.002173108227096, + "learning_rate": 6.4208604165224366e-06, + "loss": 0.5054, + "step": 12062 + }, + { + "epoch": 1.88, + "grad_norm": 29.44864237138187, + "learning_rate": 6.419285784671645e-06, + "loss": 0.5329, + "step": 12063 + }, + { + "epoch": 1.88, + "grad_norm": 14.694834885448381, + "learning_rate": 6.417711254654275e-06, + "loss": 0.5357, + "step": 12064 + }, + { + "epoch": 1.88, + "grad_norm": 21.190361391668823, + "learning_rate": 6.4161368265151e-06, + "loss": 0.5836, + "step": 12065 + }, + { + "epoch": 1.88, + "grad_norm": 23.61209067726471, + "learning_rate": 6.414562500298896e-06, + "loss": 0.5106, + "step": 12066 + }, + { + "epoch": 1.88, + "grad_norm": 23.921850059005422, + "learning_rate": 6.412988276050441e-06, + "loss": 0.5209, + "step": 12067 + }, + { + "epoch": 1.89, + "grad_norm": 17.85677272130333, + "learning_rate": 6.411414153814499e-06, + "loss": 0.5596, + "step": 12068 + }, + { + "epoch": 1.89, + "grad_norm": 26.65474902585938, + "learning_rate": 6.409840133635837e-06, + "loss": 0.5578, + "step": 12069 + }, + { + "epoch": 1.89, + "grad_norm": 23.700509780796263, + "learning_rate": 6.408266215559225e-06, + "loss": 0.5157, + "step": 12070 + }, + { + "epoch": 1.89, + "grad_norm": 20.823636016248656, + "learning_rate": 6.406692399629418e-06, + "loss": 0.5578, + "step": 12071 + }, + { + "epoch": 1.89, + "grad_norm": 23.114453731968332, + "learning_rate": 6.405118685891181e-06, + "loss": 0.5969, + "step": 12072 + }, + { + "epoch": 1.89, + "grad_norm": 25.228801245186332, + "learning_rate": 6.403545074389261e-06, + "loss": 0.6072, + "step": 12073 + }, + { + "epoch": 1.89, + "grad_norm": 11.891383374977051, + "learning_rate": 6.401971565168416e-06, + "loss": 0.5766, + "step": 12074 + }, + { + "epoch": 1.89, + "grad_norm": 19.5082393992152, + "learning_rate": 6.400398158273395e-06, + "loss": 0.613, + "step": 12075 + }, + { + "epoch": 1.89, + "grad_norm": 12.321157001973747, + "learning_rate": 6.398824853748946e-06, + "loss": 0.491, + "step": 12076 + }, + { + "epoch": 1.89, + "grad_norm": 23.11424374184085, + "learning_rate": 6.397251651639813e-06, + "loss": 0.5516, + "step": 12077 + }, + { + "epoch": 1.89, + "grad_norm": 12.155877556280513, + "learning_rate": 6.395678551990735e-06, + "loss": 0.5044, + "step": 12078 + }, + { + "epoch": 1.89, + "grad_norm": 20.834961961666178, + "learning_rate": 6.394105554846447e-06, + "loss": 0.5645, + "step": 12079 + }, + { + "epoch": 1.89, + "grad_norm": 27.123841943828932, + "learning_rate": 6.3925326602516904e-06, + "loss": 0.5897, + "step": 12080 + }, + { + "epoch": 1.89, + "grad_norm": 28.80286386540562, + "learning_rate": 6.390959868251192e-06, + "loss": 0.5403, + "step": 12081 + }, + { + "epoch": 1.89, + "grad_norm": 25.309276944286207, + "learning_rate": 6.389387178889688e-06, + "loss": 0.5799, + "step": 12082 + }, + { + "epoch": 1.89, + "grad_norm": 15.868157630875439, + "learning_rate": 6.3878145922118985e-06, + "loss": 0.5274, + "step": 12083 + }, + { + "epoch": 1.89, + "grad_norm": 24.53124128967179, + "learning_rate": 6.3862421082625495e-06, + "loss": 0.4512, + "step": 12084 + }, + { + "epoch": 1.89, + "grad_norm": 20.752414053072727, + "learning_rate": 6.3846697270863576e-06, + "loss": 0.557, + "step": 12085 + }, + { + "epoch": 1.89, + "grad_norm": 23.740643366025363, + "learning_rate": 6.383097448728046e-06, + "loss": 0.5528, + "step": 12086 + }, + { + "epoch": 1.89, + "grad_norm": 15.205294359221694, + "learning_rate": 6.3815252732323304e-06, + "loss": 0.5682, + "step": 12087 + }, + { + "epoch": 1.89, + "grad_norm": 15.470416003879741, + "learning_rate": 6.379953200643915e-06, + "loss": 0.5613, + "step": 12088 + }, + { + "epoch": 1.89, + "grad_norm": 12.122439780259775, + "learning_rate": 6.378381231007514e-06, + "loss": 0.472, + "step": 12089 + }, + { + "epoch": 1.89, + "grad_norm": 24.79463706046399, + "learning_rate": 6.376809364367833e-06, + "loss": 0.5723, + "step": 12090 + }, + { + "epoch": 1.89, + "grad_norm": 20.655461484572395, + "learning_rate": 6.375237600769575e-06, + "loss": 0.5336, + "step": 12091 + }, + { + "epoch": 1.89, + "grad_norm": 17.200802081020502, + "learning_rate": 6.373665940257436e-06, + "loss": 0.4941, + "step": 12092 + }, + { + "epoch": 1.89, + "grad_norm": 27.97962465335117, + "learning_rate": 6.37209438287612e-06, + "loss": 0.5426, + "step": 12093 + }, + { + "epoch": 1.89, + "grad_norm": 16.244034309401638, + "learning_rate": 6.370522928670313e-06, + "loss": 0.5486, + "step": 12094 + }, + { + "epoch": 1.89, + "grad_norm": 23.552523041911183, + "learning_rate": 6.368951577684713e-06, + "loss": 0.5198, + "step": 12095 + }, + { + "epoch": 1.89, + "grad_norm": 17.469898056492717, + "learning_rate": 6.367380329964009e-06, + "loss": 0.534, + "step": 12096 + }, + { + "epoch": 1.89, + "grad_norm": 19.157528948134143, + "learning_rate": 6.36580918555288e-06, + "loss": 0.5386, + "step": 12097 + }, + { + "epoch": 1.89, + "grad_norm": 17.377336372007253, + "learning_rate": 6.364238144496011e-06, + "loss": 0.5521, + "step": 12098 + }, + { + "epoch": 1.89, + "grad_norm": 24.245526389650948, + "learning_rate": 6.362667206838085e-06, + "loss": 0.5953, + "step": 12099 + }, + { + "epoch": 1.89, + "grad_norm": 16.804114905249342, + "learning_rate": 6.361096372623773e-06, + "loss": 0.5421, + "step": 12100 + }, + { + "epoch": 1.89, + "grad_norm": 18.342537276912662, + "learning_rate": 6.359525641897755e-06, + "loss": 0.5526, + "step": 12101 + }, + { + "epoch": 1.89, + "grad_norm": 13.77301974320269, + "learning_rate": 6.357955014704698e-06, + "loss": 0.4785, + "step": 12102 + }, + { + "epoch": 1.89, + "grad_norm": 18.091841981690127, + "learning_rate": 6.356384491089267e-06, + "loss": 0.4871, + "step": 12103 + }, + { + "epoch": 1.89, + "grad_norm": 24.33809527510175, + "learning_rate": 6.354814071096129e-06, + "loss": 0.593, + "step": 12104 + }, + { + "epoch": 1.89, + "grad_norm": 17.102190394788074, + "learning_rate": 6.353243754769948e-06, + "loss": 0.5027, + "step": 12105 + }, + { + "epoch": 1.89, + "grad_norm": 18.060306430312203, + "learning_rate": 6.351673542155385e-06, + "loss": 0.4962, + "step": 12106 + }, + { + "epoch": 1.89, + "grad_norm": 20.6670855382289, + "learning_rate": 6.350103433297087e-06, + "loss": 0.5085, + "step": 12107 + }, + { + "epoch": 1.89, + "grad_norm": 21.680164550481418, + "learning_rate": 6.348533428239714e-06, + "loss": 0.634, + "step": 12108 + }, + { + "epoch": 1.89, + "grad_norm": 13.489945181727462, + "learning_rate": 6.346963527027914e-06, + "loss": 0.5525, + "step": 12109 + }, + { + "epoch": 1.89, + "grad_norm": 16.19784392701362, + "learning_rate": 6.345393729706333e-06, + "loss": 0.546, + "step": 12110 + }, + { + "epoch": 1.89, + "grad_norm": 19.954025546209017, + "learning_rate": 6.343824036319622e-06, + "loss": 0.5526, + "step": 12111 + }, + { + "epoch": 1.89, + "grad_norm": 23.34881135030741, + "learning_rate": 6.3422544469124116e-06, + "loss": 0.5157, + "step": 12112 + }, + { + "epoch": 1.89, + "grad_norm": 20.118293256647107, + "learning_rate": 6.340684961529344e-06, + "loss": 0.5003, + "step": 12113 + }, + { + "epoch": 1.89, + "grad_norm": 29.154600797479603, + "learning_rate": 6.3391155802150585e-06, + "loss": 0.5921, + "step": 12114 + }, + { + "epoch": 1.89, + "grad_norm": 21.12816342673484, + "learning_rate": 6.337546303014182e-06, + "loss": 0.58, + "step": 12115 + }, + { + "epoch": 1.89, + "grad_norm": 20.339794443128238, + "learning_rate": 6.3359771299713515e-06, + "loss": 0.5267, + "step": 12116 + }, + { + "epoch": 1.89, + "grad_norm": 18.634952107227992, + "learning_rate": 6.334408061131182e-06, + "loss": 0.6128, + "step": 12117 + }, + { + "epoch": 1.89, + "grad_norm": 17.08689824993263, + "learning_rate": 6.332839096538306e-06, + "loss": 0.5128, + "step": 12118 + }, + { + "epoch": 1.89, + "grad_norm": 32.108857005412496, + "learning_rate": 6.331270236237337e-06, + "loss": 0.6248, + "step": 12119 + }, + { + "epoch": 1.89, + "grad_norm": 21.527985321504907, + "learning_rate": 6.3297014802729e-06, + "loss": 0.5667, + "step": 12120 + }, + { + "epoch": 1.89, + "grad_norm": 21.067530380097878, + "learning_rate": 6.328132828689607e-06, + "loss": 0.5479, + "step": 12121 + }, + { + "epoch": 1.89, + "grad_norm": 15.4930213472013, + "learning_rate": 6.326564281532066e-06, + "loss": 0.5874, + "step": 12122 + }, + { + "epoch": 1.89, + "grad_norm": 24.555763306628627, + "learning_rate": 6.324995838844887e-06, + "loss": 0.5761, + "step": 12123 + }, + { + "epoch": 1.89, + "grad_norm": 13.541109242948549, + "learning_rate": 6.3234275006726785e-06, + "loss": 0.5583, + "step": 12124 + }, + { + "epoch": 1.89, + "grad_norm": 22.056090374825953, + "learning_rate": 6.321859267060039e-06, + "loss": 0.5633, + "step": 12125 + }, + { + "epoch": 1.89, + "grad_norm": 22.46125347346427, + "learning_rate": 6.320291138051574e-06, + "loss": 0.5139, + "step": 12126 + }, + { + "epoch": 1.89, + "grad_norm": 18.564255330388068, + "learning_rate": 6.318723113691874e-06, + "loss": 0.5711, + "step": 12127 + }, + { + "epoch": 1.89, + "grad_norm": 16.729528991683672, + "learning_rate": 6.317155194025536e-06, + "loss": 0.514, + "step": 12128 + }, + { + "epoch": 1.89, + "grad_norm": 15.953630778900546, + "learning_rate": 6.315587379097148e-06, + "loss": 0.4967, + "step": 12129 + }, + { + "epoch": 1.89, + "grad_norm": 19.196151424732893, + "learning_rate": 6.314019668951301e-06, + "loss": 0.4802, + "step": 12130 + }, + { + "epoch": 1.89, + "grad_norm": 28.745074756786046, + "learning_rate": 6.312452063632581e-06, + "loss": 0.6421, + "step": 12131 + }, + { + "epoch": 1.9, + "grad_norm": 18.89685114620413, + "learning_rate": 6.310884563185562e-06, + "loss": 0.5391, + "step": 12132 + }, + { + "epoch": 1.9, + "grad_norm": 19.51968135207296, + "learning_rate": 6.309317167654832e-06, + "loss": 0.5695, + "step": 12133 + }, + { + "epoch": 1.9, + "grad_norm": 16.21754217152341, + "learning_rate": 6.307749877084961e-06, + "loss": 0.5368, + "step": 12134 + }, + { + "epoch": 1.9, + "grad_norm": 15.802725015764079, + "learning_rate": 6.306182691520522e-06, + "loss": 0.5337, + "step": 12135 + }, + { + "epoch": 1.9, + "grad_norm": 21.475203937732335, + "learning_rate": 6.304615611006091e-06, + "loss": 0.5122, + "step": 12136 + }, + { + "epoch": 1.9, + "grad_norm": 18.803607309789403, + "learning_rate": 6.303048635586226e-06, + "loss": 0.5234, + "step": 12137 + }, + { + "epoch": 1.9, + "grad_norm": 18.6900878393442, + "learning_rate": 6.301481765305495e-06, + "loss": 0.4845, + "step": 12138 + }, + { + "epoch": 1.9, + "grad_norm": 14.536274392563245, + "learning_rate": 6.2999150002084605e-06, + "loss": 0.4481, + "step": 12139 + }, + { + "epoch": 1.9, + "grad_norm": 13.34631101077166, + "learning_rate": 6.298348340339681e-06, + "loss": 0.4614, + "step": 12140 + }, + { + "epoch": 1.9, + "grad_norm": 18.733572981706885, + "learning_rate": 6.296781785743708e-06, + "loss": 0.4746, + "step": 12141 + }, + { + "epoch": 1.9, + "grad_norm": 20.696732479182458, + "learning_rate": 6.295215336465094e-06, + "loss": 0.5664, + "step": 12142 + }, + { + "epoch": 1.9, + "grad_norm": 23.773605769800138, + "learning_rate": 6.293648992548389e-06, + "loss": 0.5913, + "step": 12143 + }, + { + "epoch": 1.9, + "grad_norm": 27.63732583878765, + "learning_rate": 6.292082754038135e-06, + "loss": 0.5831, + "step": 12144 + }, + { + "epoch": 1.9, + "grad_norm": 16.396089979180317, + "learning_rate": 6.2905166209788845e-06, + "loss": 0.552, + "step": 12145 + }, + { + "epoch": 1.9, + "grad_norm": 17.33877369740794, + "learning_rate": 6.288950593415168e-06, + "loss": 0.4786, + "step": 12146 + }, + { + "epoch": 1.9, + "grad_norm": 22.863567746112626, + "learning_rate": 6.2873846713915276e-06, + "loss": 0.5624, + "step": 12147 + }, + { + "epoch": 1.9, + "grad_norm": 35.01740979961583, + "learning_rate": 6.285818854952492e-06, + "loss": 0.5152, + "step": 12148 + }, + { + "epoch": 1.9, + "grad_norm": 26.63655857307103, + "learning_rate": 6.284253144142598e-06, + "loss": 0.5377, + "step": 12149 + }, + { + "epoch": 1.9, + "grad_norm": 10.547387084538498, + "learning_rate": 6.282687539006371e-06, + "loss": 0.4411, + "step": 12150 + }, + { + "epoch": 1.9, + "grad_norm": 22.093835304451684, + "learning_rate": 6.2811220395883346e-06, + "loss": 0.5887, + "step": 12151 + }, + { + "epoch": 1.9, + "grad_norm": 12.83116213730814, + "learning_rate": 6.279556645933011e-06, + "loss": 0.4992, + "step": 12152 + }, + { + "epoch": 1.9, + "grad_norm": 19.305860641229682, + "learning_rate": 6.277991358084923e-06, + "loss": 0.4904, + "step": 12153 + }, + { + "epoch": 1.9, + "grad_norm": 23.656308158944118, + "learning_rate": 6.276426176088579e-06, + "loss": 0.5123, + "step": 12154 + }, + { + "epoch": 1.9, + "grad_norm": 37.919970446881415, + "learning_rate": 6.274861099988502e-06, + "loss": 0.4972, + "step": 12155 + }, + { + "epoch": 1.9, + "grad_norm": 22.10413727202575, + "learning_rate": 6.273296129829194e-06, + "loss": 0.4898, + "step": 12156 + }, + { + "epoch": 1.9, + "grad_norm": 14.42871961564321, + "learning_rate": 6.271731265655161e-06, + "loss": 0.4595, + "step": 12157 + }, + { + "epoch": 1.9, + "grad_norm": 19.745079032566384, + "learning_rate": 6.270166507510913e-06, + "loss": 0.5628, + "step": 12158 + }, + { + "epoch": 1.9, + "grad_norm": 19.419873320898557, + "learning_rate": 6.268601855440944e-06, + "loss": 0.5304, + "step": 12159 + }, + { + "epoch": 1.9, + "grad_norm": 18.7436566319673, + "learning_rate": 6.267037309489763e-06, + "loss": 0.5514, + "step": 12160 + }, + { + "epoch": 1.9, + "grad_norm": 30.18045657385437, + "learning_rate": 6.265472869701851e-06, + "loss": 0.5706, + "step": 12161 + }, + { + "epoch": 1.9, + "grad_norm": 20.326844633949047, + "learning_rate": 6.2639085361217075e-06, + "loss": 0.5038, + "step": 12162 + }, + { + "epoch": 1.9, + "grad_norm": 13.806943304338365, + "learning_rate": 6.262344308793818e-06, + "loss": 0.5406, + "step": 12163 + }, + { + "epoch": 1.9, + "grad_norm": 20.500973005289495, + "learning_rate": 6.2607801877626715e-06, + "loss": 0.5545, + "step": 12164 + }, + { + "epoch": 1.9, + "grad_norm": 20.16940634386228, + "learning_rate": 6.259216173072751e-06, + "loss": 0.5182, + "step": 12165 + }, + { + "epoch": 1.9, + "grad_norm": 18.769873816215153, + "learning_rate": 6.257652264768532e-06, + "loss": 0.5135, + "step": 12166 + }, + { + "epoch": 1.9, + "grad_norm": 22.583136667244826, + "learning_rate": 6.256088462894492e-06, + "loss": 0.4973, + "step": 12167 + }, + { + "epoch": 1.9, + "grad_norm": 24.71962303544123, + "learning_rate": 6.254524767495108e-06, + "loss": 0.5374, + "step": 12168 + }, + { + "epoch": 1.9, + "grad_norm": 15.828915716531528, + "learning_rate": 6.252961178614847e-06, + "loss": 0.5273, + "step": 12169 + }, + { + "epoch": 1.9, + "grad_norm": 17.488331191192067, + "learning_rate": 6.251397696298181e-06, + "loss": 0.5523, + "step": 12170 + }, + { + "epoch": 1.9, + "grad_norm": 25.26685122142957, + "learning_rate": 6.24983432058957e-06, + "loss": 0.4952, + "step": 12171 + }, + { + "epoch": 1.9, + "grad_norm": 22.96508958934432, + "learning_rate": 6.2482710515334775e-06, + "loss": 0.5503, + "step": 12172 + }, + { + "epoch": 1.9, + "grad_norm": 25.732194557995523, + "learning_rate": 6.246707889174359e-06, + "loss": 0.6013, + "step": 12173 + }, + { + "epoch": 1.9, + "grad_norm": 20.552280537380508, + "learning_rate": 6.245144833556675e-06, + "loss": 0.5129, + "step": 12174 + }, + { + "epoch": 1.9, + "grad_norm": 23.539379121541863, + "learning_rate": 6.243581884724879e-06, + "loss": 0.5449, + "step": 12175 + }, + { + "epoch": 1.9, + "grad_norm": 13.429760648118778, + "learning_rate": 6.242019042723411e-06, + "loss": 0.4941, + "step": 12176 + }, + { + "epoch": 1.9, + "grad_norm": 19.374425665741384, + "learning_rate": 6.240456307596725e-06, + "loss": 0.5319, + "step": 12177 + }, + { + "epoch": 1.9, + "grad_norm": 24.315272535988825, + "learning_rate": 6.238893679389262e-06, + "loss": 0.5312, + "step": 12178 + }, + { + "epoch": 1.9, + "grad_norm": 19.871648856812556, + "learning_rate": 6.237331158145461e-06, + "loss": 0.5329, + "step": 12179 + }, + { + "epoch": 1.9, + "grad_norm": 14.468476990643177, + "learning_rate": 6.2357687439097645e-06, + "loss": 0.5216, + "step": 12180 + }, + { + "epoch": 1.9, + "grad_norm": 14.178170901760858, + "learning_rate": 6.234206436726602e-06, + "loss": 0.5029, + "step": 12181 + }, + { + "epoch": 1.9, + "grad_norm": 13.410333693919632, + "learning_rate": 6.232644236640403e-06, + "loss": 0.468, + "step": 12182 + }, + { + "epoch": 1.9, + "grad_norm": 24.020185020285673, + "learning_rate": 6.231082143695599e-06, + "loss": 0.4947, + "step": 12183 + }, + { + "epoch": 1.9, + "grad_norm": 18.449760025430884, + "learning_rate": 6.229520157936617e-06, + "loss": 0.538, + "step": 12184 + }, + { + "epoch": 1.9, + "grad_norm": 24.12944773331522, + "learning_rate": 6.2279582794078755e-06, + "loss": 0.4938, + "step": 12185 + }, + { + "epoch": 1.9, + "grad_norm": 18.59313569376448, + "learning_rate": 6.226396508153789e-06, + "loss": 0.5377, + "step": 12186 + }, + { + "epoch": 1.9, + "grad_norm": 18.33269462872682, + "learning_rate": 6.2248348442187825e-06, + "loss": 0.5134, + "step": 12187 + }, + { + "epoch": 1.9, + "grad_norm": 16.566752857629098, + "learning_rate": 6.223273287647262e-06, + "loss": 0.5049, + "step": 12188 + }, + { + "epoch": 1.9, + "grad_norm": 18.16600884295014, + "learning_rate": 6.221711838483644e-06, + "loss": 0.569, + "step": 12189 + }, + { + "epoch": 1.9, + "grad_norm": 12.89655233224021, + "learning_rate": 6.220150496772328e-06, + "loss": 0.4793, + "step": 12190 + }, + { + "epoch": 1.9, + "grad_norm": 19.262054701911623, + "learning_rate": 6.2185892625577215e-06, + "loss": 0.5134, + "step": 12191 + }, + { + "epoch": 1.9, + "grad_norm": 18.098562206397553, + "learning_rate": 6.217028135884222e-06, + "loss": 0.5987, + "step": 12192 + }, + { + "epoch": 1.9, + "grad_norm": 17.694832545628266, + "learning_rate": 6.215467116796232e-06, + "loss": 0.5321, + "step": 12193 + }, + { + "epoch": 1.9, + "grad_norm": 26.71760695616933, + "learning_rate": 6.213906205338145e-06, + "loss": 0.6175, + "step": 12194 + }, + { + "epoch": 1.9, + "grad_norm": 16.29930276882442, + "learning_rate": 6.212345401554347e-06, + "loss": 0.493, + "step": 12195 + }, + { + "epoch": 1.91, + "grad_norm": 14.941679654082067, + "learning_rate": 6.2107847054892325e-06, + "loss": 0.5197, + "step": 12196 + }, + { + "epoch": 1.91, + "grad_norm": 29.9872709792075, + "learning_rate": 6.209224117187184e-06, + "loss": 0.5919, + "step": 12197 + }, + { + "epoch": 1.91, + "grad_norm": 27.558368416452314, + "learning_rate": 6.207663636692581e-06, + "loss": 0.5752, + "step": 12198 + }, + { + "epoch": 1.91, + "grad_norm": 13.595299440629072, + "learning_rate": 6.2061032640498095e-06, + "loss": 0.4722, + "step": 12199 + }, + { + "epoch": 1.91, + "grad_norm": 22.196052407349097, + "learning_rate": 6.204542999303242e-06, + "loss": 0.5753, + "step": 12200 + }, + { + "epoch": 1.91, + "grad_norm": 13.92598435466475, + "learning_rate": 6.202982842497247e-06, + "loss": 0.5, + "step": 12201 + }, + { + "epoch": 1.91, + "grad_norm": 22.196931852423365, + "learning_rate": 6.201422793676203e-06, + "loss": 0.5354, + "step": 12202 + }, + { + "epoch": 1.91, + "grad_norm": 25.322023037926495, + "learning_rate": 6.1998628528844685e-06, + "loss": 0.5301, + "step": 12203 + }, + { + "epoch": 1.91, + "grad_norm": 14.072040389281618, + "learning_rate": 6.198303020166418e-06, + "loss": 0.4978, + "step": 12204 + }, + { + "epoch": 1.91, + "grad_norm": 19.72833382236004, + "learning_rate": 6.196743295566401e-06, + "loss": 0.5288, + "step": 12205 + }, + { + "epoch": 1.91, + "grad_norm": 17.988414108663815, + "learning_rate": 6.195183679128781e-06, + "loss": 0.5215, + "step": 12206 + }, + { + "epoch": 1.91, + "grad_norm": 18.723997816160026, + "learning_rate": 6.19362417089791e-06, + "loss": 0.461, + "step": 12207 + }, + { + "epoch": 1.91, + "grad_norm": 17.68155221802161, + "learning_rate": 6.1920647709181425e-06, + "loss": 0.5168, + "step": 12208 + }, + { + "epoch": 1.91, + "grad_norm": 15.470433069431035, + "learning_rate": 6.190505479233826e-06, + "loss": 0.5784, + "step": 12209 + }, + { + "epoch": 1.91, + "grad_norm": 31.764688419846152, + "learning_rate": 6.188946295889304e-06, + "loss": 0.5499, + "step": 12210 + }, + { + "epoch": 1.91, + "grad_norm": 52.303380862688996, + "learning_rate": 6.18738722092892e-06, + "loss": 0.5165, + "step": 12211 + }, + { + "epoch": 1.91, + "grad_norm": 20.527049362147796, + "learning_rate": 6.185828254397012e-06, + "loss": 0.5718, + "step": 12212 + }, + { + "epoch": 1.91, + "grad_norm": 24.46383839094235, + "learning_rate": 6.184269396337916e-06, + "loss": 0.544, + "step": 12213 + }, + { + "epoch": 1.91, + "grad_norm": 15.638061376217884, + "learning_rate": 6.182710646795972e-06, + "loss": 0.5732, + "step": 12214 + }, + { + "epoch": 1.91, + "grad_norm": 28.396531618539328, + "learning_rate": 6.181152005815501e-06, + "loss": 0.6066, + "step": 12215 + }, + { + "epoch": 1.91, + "grad_norm": 25.555496253753066, + "learning_rate": 6.179593473440833e-06, + "loss": 0.5093, + "step": 12216 + }, + { + "epoch": 1.91, + "grad_norm": 19.767739276362914, + "learning_rate": 6.178035049716289e-06, + "loss": 0.5647, + "step": 12217 + }, + { + "epoch": 1.91, + "grad_norm": 22.583684578647365, + "learning_rate": 6.1764767346861955e-06, + "loss": 0.5463, + "step": 12218 + }, + { + "epoch": 1.91, + "grad_norm": 26.37654932045606, + "learning_rate": 6.174918528394868e-06, + "loss": 0.5759, + "step": 12219 + }, + { + "epoch": 1.91, + "grad_norm": 11.013167751793873, + "learning_rate": 6.173360430886617e-06, + "loss": 0.4951, + "step": 12220 + }, + { + "epoch": 1.91, + "grad_norm": 25.632850781812976, + "learning_rate": 6.171802442205759e-06, + "loss": 0.5838, + "step": 12221 + }, + { + "epoch": 1.91, + "grad_norm": 29.75469137214884, + "learning_rate": 6.170244562396599e-06, + "loss": 0.6622, + "step": 12222 + }, + { + "epoch": 1.91, + "grad_norm": 20.208161660740046, + "learning_rate": 6.1686867915034425e-06, + "loss": 0.5114, + "step": 12223 + }, + { + "epoch": 1.91, + "grad_norm": 16.09749480076051, + "learning_rate": 6.167129129570595e-06, + "loss": 0.5446, + "step": 12224 + }, + { + "epoch": 1.91, + "grad_norm": 21.50417245428113, + "learning_rate": 6.165571576642351e-06, + "loss": 0.5308, + "step": 12225 + }, + { + "epoch": 1.91, + "grad_norm": 22.708743762687742, + "learning_rate": 6.164014132763008e-06, + "loss": 0.5796, + "step": 12226 + }, + { + "epoch": 1.91, + "grad_norm": 15.435235427625951, + "learning_rate": 6.162456797976859e-06, + "loss": 0.4247, + "step": 12227 + }, + { + "epoch": 1.91, + "grad_norm": 15.944371516691, + "learning_rate": 6.1608995723281925e-06, + "loss": 0.584, + "step": 12228 + }, + { + "epoch": 1.91, + "grad_norm": 14.31472861152017, + "learning_rate": 6.159342455861304e-06, + "loss": 0.4976, + "step": 12229 + }, + { + "epoch": 1.91, + "grad_norm": 20.092443165619823, + "learning_rate": 6.157785448620461e-06, + "loss": 0.5718, + "step": 12230 + }, + { + "epoch": 1.91, + "grad_norm": 15.706088023786904, + "learning_rate": 6.156228550649954e-06, + "loss": 0.4975, + "step": 12231 + }, + { + "epoch": 1.91, + "grad_norm": 25.66864942322433, + "learning_rate": 6.154671761994058e-06, + "loss": 0.6087, + "step": 12232 + }, + { + "epoch": 1.91, + "grad_norm": 15.474472487283657, + "learning_rate": 6.153115082697052e-06, + "loss": 0.4825, + "step": 12233 + }, + { + "epoch": 1.91, + "grad_norm": 19.818215031021587, + "learning_rate": 6.1515585128032e-06, + "loss": 0.5691, + "step": 12234 + }, + { + "epoch": 1.91, + "grad_norm": 15.501704291506618, + "learning_rate": 6.150002052356772e-06, + "loss": 0.5322, + "step": 12235 + }, + { + "epoch": 1.91, + "grad_norm": 12.194695851182095, + "learning_rate": 6.148445701402032e-06, + "loss": 0.5778, + "step": 12236 + }, + { + "epoch": 1.91, + "grad_norm": 16.76790458525802, + "learning_rate": 6.1468894599832455e-06, + "loss": 0.5472, + "step": 12237 + }, + { + "epoch": 1.91, + "grad_norm": 19.855332722142574, + "learning_rate": 6.145333328144669e-06, + "loss": 0.4362, + "step": 12238 + }, + { + "epoch": 1.91, + "grad_norm": 18.30343730953803, + "learning_rate": 6.143777305930556e-06, + "loss": 0.5867, + "step": 12239 + }, + { + "epoch": 1.91, + "grad_norm": 14.508747785039262, + "learning_rate": 6.142221393385161e-06, + "loss": 0.4782, + "step": 12240 + }, + { + "epoch": 1.91, + "grad_norm": 13.681509216897357, + "learning_rate": 6.140665590552735e-06, + "loss": 0.5278, + "step": 12241 + }, + { + "epoch": 1.91, + "grad_norm": 21.12231851381048, + "learning_rate": 6.139109897477517e-06, + "loss": 0.499, + "step": 12242 + }, + { + "epoch": 1.91, + "grad_norm": 16.80724646913212, + "learning_rate": 6.137554314203762e-06, + "loss": 0.5027, + "step": 12243 + }, + { + "epoch": 1.91, + "grad_norm": 16.74158231929338, + "learning_rate": 6.135998840775699e-06, + "loss": 0.4842, + "step": 12244 + }, + { + "epoch": 1.91, + "grad_norm": 19.675780274552274, + "learning_rate": 6.134443477237566e-06, + "loss": 0.5263, + "step": 12245 + }, + { + "epoch": 1.91, + "grad_norm": 14.52745672419481, + "learning_rate": 6.132888223633602e-06, + "loss": 0.5024, + "step": 12246 + }, + { + "epoch": 1.91, + "grad_norm": 14.004724199356977, + "learning_rate": 6.131333080008033e-06, + "loss": 0.4343, + "step": 12247 + }, + { + "epoch": 1.91, + "grad_norm": 20.733886475834016, + "learning_rate": 6.129778046405095e-06, + "loss": 0.5556, + "step": 12248 + }, + { + "epoch": 1.91, + "grad_norm": 13.8758789978313, + "learning_rate": 6.128223122868997e-06, + "loss": 0.5325, + "step": 12249 + }, + { + "epoch": 1.91, + "grad_norm": 14.666615292904387, + "learning_rate": 6.126668309443973e-06, + "loss": 0.4913, + "step": 12250 + }, + { + "epoch": 1.91, + "grad_norm": 17.96252052545536, + "learning_rate": 6.125113606174233e-06, + "loss": 0.4449, + "step": 12251 + }, + { + "epoch": 1.91, + "grad_norm": 17.053416924322395, + "learning_rate": 6.123559013103997e-06, + "loss": 0.5862, + "step": 12252 + }, + { + "epoch": 1.91, + "grad_norm": 19.105968538219564, + "learning_rate": 6.122004530277476e-06, + "loss": 0.5546, + "step": 12253 + }, + { + "epoch": 1.91, + "grad_norm": 23.602386607329148, + "learning_rate": 6.1204501577388775e-06, + "loss": 0.5671, + "step": 12254 + }, + { + "epoch": 1.91, + "grad_norm": 16.400506416576217, + "learning_rate": 6.118895895532405e-06, + "loss": 0.4884, + "step": 12255 + }, + { + "epoch": 1.91, + "grad_norm": 18.01123151614482, + "learning_rate": 6.117341743702265e-06, + "loss": 0.5161, + "step": 12256 + }, + { + "epoch": 1.91, + "grad_norm": 20.794278583194735, + "learning_rate": 6.115787702292651e-06, + "loss": 0.5328, + "step": 12257 + }, + { + "epoch": 1.91, + "grad_norm": 19.752328734634908, + "learning_rate": 6.1142337713477685e-06, + "loss": 0.4854, + "step": 12258 + }, + { + "epoch": 1.91, + "grad_norm": 22.945047820567815, + "learning_rate": 6.112679950911802e-06, + "loss": 0.5748, + "step": 12259 + }, + { + "epoch": 1.92, + "grad_norm": 10.582051918908004, + "learning_rate": 6.111126241028942e-06, + "loss": 0.4871, + "step": 12260 + }, + { + "epoch": 1.92, + "grad_norm": 15.339243105559248, + "learning_rate": 6.1095726417433765e-06, + "loss": 0.5092, + "step": 12261 + }, + { + "epoch": 1.92, + "grad_norm": 19.14320682434728, + "learning_rate": 6.10801915309929e-06, + "loss": 0.546, + "step": 12262 + }, + { + "epoch": 1.92, + "grad_norm": 20.53945640434366, + "learning_rate": 6.106465775140865e-06, + "loss": 0.5227, + "step": 12263 + }, + { + "epoch": 1.92, + "grad_norm": 14.576915372890655, + "learning_rate": 6.104912507912272e-06, + "loss": 0.4823, + "step": 12264 + }, + { + "epoch": 1.92, + "grad_norm": 23.49053580678368, + "learning_rate": 6.1033593514576895e-06, + "loss": 0.5771, + "step": 12265 + }, + { + "epoch": 1.92, + "grad_norm": 17.141406715618732, + "learning_rate": 6.101806305821288e-06, + "loss": 0.4874, + "step": 12266 + }, + { + "epoch": 1.92, + "grad_norm": 23.96907863864128, + "learning_rate": 6.100253371047233e-06, + "loss": 0.5957, + "step": 12267 + }, + { + "epoch": 1.92, + "grad_norm": 22.537269674362996, + "learning_rate": 6.098700547179695e-06, + "loss": 0.559, + "step": 12268 + }, + { + "epoch": 1.92, + "grad_norm": 15.360425778300087, + "learning_rate": 6.0971478342628285e-06, + "loss": 0.5299, + "step": 12269 + }, + { + "epoch": 1.92, + "grad_norm": 18.44405969940147, + "learning_rate": 6.095595232340794e-06, + "loss": 0.488, + "step": 12270 + }, + { + "epoch": 1.92, + "grad_norm": 23.080720858805734, + "learning_rate": 6.094042741457749e-06, + "loss": 0.5644, + "step": 12271 + }, + { + "epoch": 1.92, + "grad_norm": 23.63498438983016, + "learning_rate": 6.0924903616578394e-06, + "loss": 0.5554, + "step": 12272 + }, + { + "epoch": 1.92, + "grad_norm": 19.989096452521217, + "learning_rate": 6.090938092985226e-06, + "loss": 0.546, + "step": 12273 + }, + { + "epoch": 1.92, + "grad_norm": 14.255078179482236, + "learning_rate": 6.08938593548404e-06, + "loss": 0.4866, + "step": 12274 + }, + { + "epoch": 1.92, + "grad_norm": 14.732335913330358, + "learning_rate": 6.087833889198431e-06, + "loss": 0.4742, + "step": 12275 + }, + { + "epoch": 1.92, + "grad_norm": 22.263861475589888, + "learning_rate": 6.086281954172537e-06, + "loss": 0.5251, + "step": 12276 + }, + { + "epoch": 1.92, + "grad_norm": 23.329257043351287, + "learning_rate": 6.084730130450497e-06, + "loss": 0.5346, + "step": 12277 + }, + { + "epoch": 1.92, + "grad_norm": 22.871124746430304, + "learning_rate": 6.083178418076443e-06, + "loss": 0.5068, + "step": 12278 + }, + { + "epoch": 1.92, + "grad_norm": 17.967703829454823, + "learning_rate": 6.081626817094502e-06, + "loss": 0.4768, + "step": 12279 + }, + { + "epoch": 1.92, + "grad_norm": 22.833495254505166, + "learning_rate": 6.0800753275488e-06, + "loss": 0.5139, + "step": 12280 + }, + { + "epoch": 1.92, + "grad_norm": 17.877622778332483, + "learning_rate": 6.078523949483465e-06, + "loss": 0.473, + "step": 12281 + }, + { + "epoch": 1.92, + "grad_norm": 16.82979669122015, + "learning_rate": 6.076972682942617e-06, + "loss": 0.5988, + "step": 12282 + }, + { + "epoch": 1.92, + "grad_norm": 35.828291760774825, + "learning_rate": 6.075421527970366e-06, + "loss": 0.5938, + "step": 12283 + }, + { + "epoch": 1.92, + "grad_norm": 17.579769136467974, + "learning_rate": 6.073870484610833e-06, + "loss": 0.4486, + "step": 12284 + }, + { + "epoch": 1.92, + "grad_norm": 18.436900194562686, + "learning_rate": 6.072319552908128e-06, + "loss": 0.5465, + "step": 12285 + }, + { + "epoch": 1.92, + "grad_norm": 22.484018060529767, + "learning_rate": 6.070768732906353e-06, + "loss": 0.5934, + "step": 12286 + }, + { + "epoch": 1.92, + "grad_norm": 22.157476642748698, + "learning_rate": 6.069218024649622e-06, + "loss": 0.526, + "step": 12287 + }, + { + "epoch": 1.92, + "grad_norm": 33.04431796631102, + "learning_rate": 6.0676674281820294e-06, + "loss": 0.5257, + "step": 12288 + }, + { + "epoch": 1.92, + "grad_norm": 33.817975569553894, + "learning_rate": 6.066116943547673e-06, + "loss": 0.5153, + "step": 12289 + }, + { + "epoch": 1.92, + "grad_norm": 26.919243660167318, + "learning_rate": 6.064566570790651e-06, + "loss": 0.5411, + "step": 12290 + }, + { + "epoch": 1.92, + "grad_norm": 22.007482900307902, + "learning_rate": 6.063016309955051e-06, + "loss": 0.5515, + "step": 12291 + }, + { + "epoch": 1.92, + "grad_norm": 16.910521128157438, + "learning_rate": 6.06146616108497e-06, + "loss": 0.5549, + "step": 12292 + }, + { + "epoch": 1.92, + "grad_norm": 24.922693304899678, + "learning_rate": 6.059916124224483e-06, + "loss": 0.5857, + "step": 12293 + }, + { + "epoch": 1.92, + "grad_norm": 18.935425710856062, + "learning_rate": 6.058366199417676e-06, + "loss": 0.5129, + "step": 12294 + }, + { + "epoch": 1.92, + "grad_norm": 15.906686051767723, + "learning_rate": 6.056816386708628e-06, + "loss": 0.5667, + "step": 12295 + }, + { + "epoch": 1.92, + "grad_norm": 24.12557874118802, + "learning_rate": 6.055266686141418e-06, + "loss": 0.5148, + "step": 12296 + }, + { + "epoch": 1.92, + "grad_norm": 16.554082193330338, + "learning_rate": 6.053717097760117e-06, + "loss": 0.6317, + "step": 12297 + }, + { + "epoch": 1.92, + "grad_norm": 11.323846431926478, + "learning_rate": 6.052167621608791e-06, + "loss": 0.4545, + "step": 12298 + }, + { + "epoch": 1.92, + "grad_norm": 19.044315923526742, + "learning_rate": 6.050618257731507e-06, + "loss": 0.588, + "step": 12299 + }, + { + "epoch": 1.92, + "grad_norm": 21.87047303360174, + "learning_rate": 6.049069006172331e-06, + "loss": 0.6194, + "step": 12300 + }, + { + "epoch": 1.92, + "grad_norm": 18.025605170167378, + "learning_rate": 6.04751986697532e-06, + "loss": 0.5724, + "step": 12301 + }, + { + "epoch": 1.92, + "grad_norm": 20.72592136212466, + "learning_rate": 6.045970840184534e-06, + "loss": 0.561, + "step": 12302 + }, + { + "epoch": 1.92, + "grad_norm": 15.776708591536986, + "learning_rate": 6.044421925844024e-06, + "loss": 0.577, + "step": 12303 + }, + { + "epoch": 1.92, + "grad_norm": 17.23633041943759, + "learning_rate": 6.0428731239978415e-06, + "loss": 0.5055, + "step": 12304 + }, + { + "epoch": 1.92, + "grad_norm": 15.852625788170199, + "learning_rate": 6.041324434690028e-06, + "loss": 0.5349, + "step": 12305 + }, + { + "epoch": 1.92, + "grad_norm": 24.325067232427184, + "learning_rate": 6.0397758579646335e-06, + "loss": 0.5686, + "step": 12306 + }, + { + "epoch": 1.92, + "grad_norm": 19.65158011481245, + "learning_rate": 6.038227393865701e-06, + "loss": 0.5007, + "step": 12307 + }, + { + "epoch": 1.92, + "grad_norm": 16.732066002306635, + "learning_rate": 6.036679042437259e-06, + "loss": 0.5151, + "step": 12308 + }, + { + "epoch": 1.92, + "grad_norm": 17.113880423875756, + "learning_rate": 6.035130803723349e-06, + "loss": 0.5191, + "step": 12309 + }, + { + "epoch": 1.92, + "grad_norm": 18.109060157198147, + "learning_rate": 6.033582677767997e-06, + "loss": 0.4982, + "step": 12310 + }, + { + "epoch": 1.92, + "grad_norm": 17.981712496775902, + "learning_rate": 6.032034664615236e-06, + "loss": 0.5105, + "step": 12311 + }, + { + "epoch": 1.92, + "grad_norm": 17.46247899208498, + "learning_rate": 6.030486764309089e-06, + "loss": 0.4542, + "step": 12312 + }, + { + "epoch": 1.92, + "grad_norm": 15.68847238236059, + "learning_rate": 6.028938976893575e-06, + "loss": 0.5514, + "step": 12313 + }, + { + "epoch": 1.92, + "grad_norm": 19.300428745900966, + "learning_rate": 6.027391302412711e-06, + "loss": 0.4951, + "step": 12314 + }, + { + "epoch": 1.92, + "grad_norm": 27.54649278545439, + "learning_rate": 6.025843740910518e-06, + "loss": 0.5783, + "step": 12315 + }, + { + "epoch": 1.92, + "grad_norm": 14.536354303130112, + "learning_rate": 6.024296292431e-06, + "loss": 0.4886, + "step": 12316 + }, + { + "epoch": 1.92, + "grad_norm": 20.83737265854583, + "learning_rate": 6.022748957018176e-06, + "loss": 0.5059, + "step": 12317 + }, + { + "epoch": 1.92, + "grad_norm": 16.784168414085872, + "learning_rate": 6.02120173471604e-06, + "loss": 0.5209, + "step": 12318 + }, + { + "epoch": 1.92, + "grad_norm": 17.342161685798253, + "learning_rate": 6.0196546255686016e-06, + "loss": 0.5904, + "step": 12319 + }, + { + "epoch": 1.92, + "grad_norm": 16.253202693117608, + "learning_rate": 6.018107629619854e-06, + "loss": 0.5323, + "step": 12320 + }, + { + "epoch": 1.92, + "grad_norm": 19.855083079835506, + "learning_rate": 6.0165607469137984e-06, + "loss": 0.5526, + "step": 12321 + }, + { + "epoch": 1.92, + "grad_norm": 24.258805807173978, + "learning_rate": 6.015013977494429e-06, + "loss": 0.4956, + "step": 12322 + }, + { + "epoch": 1.92, + "grad_norm": 17.880375996946327, + "learning_rate": 6.013467321405726e-06, + "loss": 0.5218, + "step": 12323 + }, + { + "epoch": 1.93, + "grad_norm": 16.002634858758192, + "learning_rate": 6.011920778691679e-06, + "loss": 0.5165, + "step": 12324 + }, + { + "epoch": 1.93, + "grad_norm": 23.227114741114562, + "learning_rate": 6.010374349396274e-06, + "loss": 0.558, + "step": 12325 + }, + { + "epoch": 1.93, + "grad_norm": 25.40746386292164, + "learning_rate": 6.008828033563487e-06, + "loss": 0.5004, + "step": 12326 + }, + { + "epoch": 1.93, + "grad_norm": 21.305761380857877, + "learning_rate": 6.0072818312373e-06, + "loss": 0.4808, + "step": 12327 + }, + { + "epoch": 1.93, + "grad_norm": 13.371554597161667, + "learning_rate": 6.005735742461681e-06, + "loss": 0.4352, + "step": 12328 + }, + { + "epoch": 1.93, + "grad_norm": 19.46274298321452, + "learning_rate": 6.004189767280599e-06, + "loss": 0.4739, + "step": 12329 + }, + { + "epoch": 1.93, + "grad_norm": 20.270289648941908, + "learning_rate": 6.002643905738022e-06, + "loss": 0.5381, + "step": 12330 + }, + { + "epoch": 1.93, + "grad_norm": 12.718616033827995, + "learning_rate": 6.0010981578779175e-06, + "loss": 0.4453, + "step": 12331 + }, + { + "epoch": 1.93, + "grad_norm": 17.35109225747623, + "learning_rate": 5.9995525237442395e-06, + "loss": 0.4429, + "step": 12332 + }, + { + "epoch": 1.93, + "grad_norm": 21.473640577903073, + "learning_rate": 5.9980070033809455e-06, + "loss": 0.5189, + "step": 12333 + }, + { + "epoch": 1.93, + "grad_norm": 25.72697366906619, + "learning_rate": 5.996461596831995e-06, + "loss": 0.5608, + "step": 12334 + }, + { + "epoch": 1.93, + "grad_norm": 14.514171329426617, + "learning_rate": 5.9949163041413315e-06, + "loss": 0.4483, + "step": 12335 + }, + { + "epoch": 1.93, + "grad_norm": 17.781371233548622, + "learning_rate": 5.9933711253529115e-06, + "loss": 0.5658, + "step": 12336 + }, + { + "epoch": 1.93, + "grad_norm": 20.80831062263204, + "learning_rate": 5.991826060510666e-06, + "loss": 0.5643, + "step": 12337 + }, + { + "epoch": 1.93, + "grad_norm": 25.85296637077086, + "learning_rate": 5.990281109658545e-06, + "loss": 0.5359, + "step": 12338 + }, + { + "epoch": 1.93, + "grad_norm": 28.357611902460572, + "learning_rate": 5.988736272840483e-06, + "loss": 0.5674, + "step": 12339 + }, + { + "epoch": 1.93, + "grad_norm": 20.35844605610881, + "learning_rate": 5.987191550100415e-06, + "loss": 0.5319, + "step": 12340 + }, + { + "epoch": 1.93, + "grad_norm": 22.82902687218169, + "learning_rate": 5.985646941482275e-06, + "loss": 0.5442, + "step": 12341 + }, + { + "epoch": 1.93, + "grad_norm": 18.13460488522913, + "learning_rate": 5.984102447029986e-06, + "loss": 0.5105, + "step": 12342 + }, + { + "epoch": 1.93, + "grad_norm": 21.501221263792665, + "learning_rate": 5.9825580667874715e-06, + "loss": 0.4686, + "step": 12343 + }, + { + "epoch": 1.93, + "grad_norm": 19.02378032069939, + "learning_rate": 5.981013800798657e-06, + "loss": 0.4675, + "step": 12344 + }, + { + "epoch": 1.93, + "grad_norm": 14.986058449378904, + "learning_rate": 5.9794696491074576e-06, + "loss": 0.4835, + "step": 12345 + }, + { + "epoch": 1.93, + "grad_norm": 13.695985333615948, + "learning_rate": 5.977925611757794e-06, + "loss": 0.4871, + "step": 12346 + }, + { + "epoch": 1.93, + "grad_norm": 25.67034463970409, + "learning_rate": 5.976381688793571e-06, + "loss": 0.5343, + "step": 12347 + }, + { + "epoch": 1.93, + "grad_norm": 19.512696596220714, + "learning_rate": 5.974837880258697e-06, + "loss": 0.487, + "step": 12348 + }, + { + "epoch": 1.93, + "grad_norm": 21.34622884831244, + "learning_rate": 5.973294186197079e-06, + "loss": 0.6084, + "step": 12349 + }, + { + "epoch": 1.93, + "grad_norm": 18.68531712609804, + "learning_rate": 5.971750606652621e-06, + "loss": 0.5363, + "step": 12350 + }, + { + "epoch": 1.93, + "grad_norm": 20.314529720279726, + "learning_rate": 5.97020714166922e-06, + "loss": 0.5161, + "step": 12351 + }, + { + "epoch": 1.93, + "grad_norm": 11.415219791041318, + "learning_rate": 5.9686637912907675e-06, + "loss": 0.5051, + "step": 12352 + }, + { + "epoch": 1.93, + "grad_norm": 16.814206456863005, + "learning_rate": 5.96712055556116e-06, + "loss": 0.545, + "step": 12353 + }, + { + "epoch": 1.93, + "grad_norm": 17.706709550619333, + "learning_rate": 5.965577434524283e-06, + "loss": 0.4624, + "step": 12354 + }, + { + "epoch": 1.93, + "grad_norm": 14.581329845397073, + "learning_rate": 5.964034428224026e-06, + "loss": 0.5049, + "step": 12355 + }, + { + "epoch": 1.93, + "grad_norm": 13.117178325638507, + "learning_rate": 5.9624915367042705e-06, + "loss": 0.492, + "step": 12356 + }, + { + "epoch": 1.93, + "grad_norm": 13.125009604859244, + "learning_rate": 5.960948760008891e-06, + "loss": 0.5081, + "step": 12357 + }, + { + "epoch": 1.93, + "grad_norm": 23.20827378614795, + "learning_rate": 5.959406098181766e-06, + "loss": 0.5871, + "step": 12358 + }, + { + "epoch": 1.93, + "grad_norm": 17.45444301351702, + "learning_rate": 5.9578635512667686e-06, + "loss": 0.5759, + "step": 12359 + }, + { + "epoch": 1.93, + "grad_norm": 27.47755231964424, + "learning_rate": 5.9563211193077644e-06, + "loss": 0.5985, + "step": 12360 + }, + { + "epoch": 1.93, + "grad_norm": 18.32948695242102, + "learning_rate": 5.9547788023486296e-06, + "loss": 0.5493, + "step": 12361 + }, + { + "epoch": 1.93, + "grad_norm": 28.60447621319841, + "learning_rate": 5.953236600433212e-06, + "loss": 0.6283, + "step": 12362 + }, + { + "epoch": 1.93, + "grad_norm": 22.70753012069293, + "learning_rate": 5.951694513605381e-06, + "loss": 0.541, + "step": 12363 + }, + { + "epoch": 1.93, + "grad_norm": 16.780237856413557, + "learning_rate": 5.950152541908988e-06, + "loss": 0.4784, + "step": 12364 + }, + { + "epoch": 1.93, + "grad_norm": 18.073217379480376, + "learning_rate": 5.94861068538789e-06, + "loss": 0.4856, + "step": 12365 + }, + { + "epoch": 1.93, + "grad_norm": 24.70224930033288, + "learning_rate": 5.947068944085934e-06, + "loss": 0.5123, + "step": 12366 + }, + { + "epoch": 1.93, + "grad_norm": 15.355420320444683, + "learning_rate": 5.9455273180469666e-06, + "loss": 0.5719, + "step": 12367 + }, + { + "epoch": 1.93, + "grad_norm": 18.02773195553403, + "learning_rate": 5.943985807314827e-06, + "loss": 0.4973, + "step": 12368 + }, + { + "epoch": 1.93, + "grad_norm": 15.900555744632838, + "learning_rate": 5.942444411933361e-06, + "loss": 0.5651, + "step": 12369 + }, + { + "epoch": 1.93, + "grad_norm": 25.365440457697158, + "learning_rate": 5.940903131946401e-06, + "loss": 0.5803, + "step": 12370 + }, + { + "epoch": 1.93, + "grad_norm": 19.917058685633137, + "learning_rate": 5.939361967397785e-06, + "loss": 0.6036, + "step": 12371 + }, + { + "epoch": 1.93, + "grad_norm": 12.248547168624674, + "learning_rate": 5.937820918331336e-06, + "loss": 0.458, + "step": 12372 + }, + { + "epoch": 1.93, + "grad_norm": 18.06914817004524, + "learning_rate": 5.9362799847908845e-06, + "loss": 0.4907, + "step": 12373 + }, + { + "epoch": 1.93, + "grad_norm": 23.20746151830743, + "learning_rate": 5.934739166820252e-06, + "loss": 0.6095, + "step": 12374 + }, + { + "epoch": 1.93, + "grad_norm": 17.4524110427762, + "learning_rate": 5.9331984644632615e-06, + "loss": 0.5806, + "step": 12375 + }, + { + "epoch": 1.93, + "grad_norm": 26.480536295771884, + "learning_rate": 5.931657877763728e-06, + "loss": 0.4927, + "step": 12376 + }, + { + "epoch": 1.93, + "grad_norm": 22.82148895182661, + "learning_rate": 5.930117406765462e-06, + "loss": 0.6186, + "step": 12377 + }, + { + "epoch": 1.93, + "grad_norm": 25.16527062831763, + "learning_rate": 5.928577051512277e-06, + "loss": 0.5995, + "step": 12378 + }, + { + "epoch": 1.93, + "grad_norm": 15.246434517017754, + "learning_rate": 5.927036812047978e-06, + "loss": 0.5412, + "step": 12379 + }, + { + "epoch": 1.93, + "grad_norm": 22.70932936674966, + "learning_rate": 5.925496688416374e-06, + "loss": 0.5898, + "step": 12380 + }, + { + "epoch": 1.93, + "grad_norm": 16.30599333081173, + "learning_rate": 5.923956680661257e-06, + "loss": 0.5145, + "step": 12381 + }, + { + "epoch": 1.93, + "grad_norm": 26.839283217938156, + "learning_rate": 5.922416788826429e-06, + "loss": 0.5827, + "step": 12382 + }, + { + "epoch": 1.93, + "grad_norm": 21.621006501287976, + "learning_rate": 5.920877012955679e-06, + "loss": 0.5624, + "step": 12383 + }, + { + "epoch": 1.93, + "grad_norm": 19.77983446013133, + "learning_rate": 5.919337353092802e-06, + "loss": 0.5661, + "step": 12384 + }, + { + "epoch": 1.93, + "grad_norm": 13.844974066521289, + "learning_rate": 5.917797809281587e-06, + "loss": 0.4736, + "step": 12385 + }, + { + "epoch": 1.93, + "grad_norm": 14.97412940604854, + "learning_rate": 5.916258381565811e-06, + "loss": 0.4732, + "step": 12386 + }, + { + "epoch": 1.93, + "grad_norm": 21.183570081700054, + "learning_rate": 5.914719069989257e-06, + "loss": 0.4821, + "step": 12387 + }, + { + "epoch": 1.94, + "grad_norm": 17.848445057527147, + "learning_rate": 5.913179874595705e-06, + "loss": 0.542, + "step": 12388 + }, + { + "epoch": 1.94, + "grad_norm": 15.3524597269773, + "learning_rate": 5.911640795428923e-06, + "loss": 0.5594, + "step": 12389 + }, + { + "epoch": 1.94, + "grad_norm": 25.311570329430268, + "learning_rate": 5.910101832532691e-06, + "loss": 0.5641, + "step": 12390 + }, + { + "epoch": 1.94, + "grad_norm": 21.574542657705813, + "learning_rate": 5.908562985950768e-06, + "loss": 0.5433, + "step": 12391 + }, + { + "epoch": 1.94, + "grad_norm": 19.314135868731526, + "learning_rate": 5.907024255726919e-06, + "loss": 0.5528, + "step": 12392 + }, + { + "epoch": 1.94, + "grad_norm": 21.87727639547193, + "learning_rate": 5.905485641904904e-06, + "loss": 0.531, + "step": 12393 + }, + { + "epoch": 1.94, + "grad_norm": 18.0783898903737, + "learning_rate": 5.903947144528486e-06, + "loss": 0.5324, + "step": 12394 + }, + { + "epoch": 1.94, + "grad_norm": 24.25238748815649, + "learning_rate": 5.902408763641416e-06, + "loss": 0.5721, + "step": 12395 + }, + { + "epoch": 1.94, + "grad_norm": 19.923012906656197, + "learning_rate": 5.9008704992874384e-06, + "loss": 0.5137, + "step": 12396 + }, + { + "epoch": 1.94, + "grad_norm": 16.70871145537197, + "learning_rate": 5.89933235151031e-06, + "loss": 0.4857, + "step": 12397 + }, + { + "epoch": 1.94, + "grad_norm": 22.25025273029509, + "learning_rate": 5.897794320353768e-06, + "loss": 0.5418, + "step": 12398 + }, + { + "epoch": 1.94, + "grad_norm": 15.853369733894144, + "learning_rate": 5.896256405861558e-06, + "loss": 0.5445, + "step": 12399 + }, + { + "epoch": 1.94, + "grad_norm": 16.20968496516432, + "learning_rate": 5.8947186080774165e-06, + "loss": 0.4262, + "step": 12400 + }, + { + "epoch": 1.94, + "grad_norm": 22.339365901195418, + "learning_rate": 5.8931809270450746e-06, + "loss": 0.4989, + "step": 12401 + }, + { + "epoch": 1.94, + "grad_norm": 22.328759201097878, + "learning_rate": 5.8916433628082626e-06, + "loss": 0.5199, + "step": 12402 + }, + { + "epoch": 1.94, + "grad_norm": 21.190822115815195, + "learning_rate": 5.890105915410712e-06, + "loss": 0.5436, + "step": 12403 + }, + { + "epoch": 1.94, + "grad_norm": 15.322672321278281, + "learning_rate": 5.8885685848961424e-06, + "loss": 0.4906, + "step": 12404 + }, + { + "epoch": 1.94, + "grad_norm": 14.316512197750402, + "learning_rate": 5.8870313713082845e-06, + "loss": 0.4905, + "step": 12405 + }, + { + "epoch": 1.94, + "grad_norm": 24.476398468076734, + "learning_rate": 5.885494274690842e-06, + "loss": 0.5817, + "step": 12406 + }, + { + "epoch": 1.94, + "grad_norm": 18.981877596757233, + "learning_rate": 5.883957295087537e-06, + "loss": 0.5369, + "step": 12407 + }, + { + "epoch": 1.94, + "grad_norm": 19.790324660696253, + "learning_rate": 5.882420432542077e-06, + "loss": 0.6368, + "step": 12408 + }, + { + "epoch": 1.94, + "grad_norm": 20.776741641622746, + "learning_rate": 5.8808836870981725e-06, + "loss": 0.5382, + "step": 12409 + }, + { + "epoch": 1.94, + "grad_norm": 28.8304841045478, + "learning_rate": 5.87934705879953e-06, + "loss": 0.4809, + "step": 12410 + }, + { + "epoch": 1.94, + "grad_norm": 18.49730136397342, + "learning_rate": 5.8778105476898415e-06, + "loss": 0.569, + "step": 12411 + }, + { + "epoch": 1.94, + "grad_norm": 24.831637009879625, + "learning_rate": 5.87627415381281e-06, + "loss": 0.5326, + "step": 12412 + }, + { + "epoch": 1.94, + "grad_norm": 21.52488268034948, + "learning_rate": 5.874737877212131e-06, + "loss": 0.5532, + "step": 12413 + }, + { + "epoch": 1.94, + "grad_norm": 17.600109363752566, + "learning_rate": 5.873201717931492e-06, + "loss": 0.4851, + "step": 12414 + }, + { + "epoch": 1.94, + "grad_norm": 17.071249126516147, + "learning_rate": 5.871665676014584e-06, + "loss": 0.4928, + "step": 12415 + }, + { + "epoch": 1.94, + "grad_norm": 12.959499104560754, + "learning_rate": 5.870129751505089e-06, + "loss": 0.5253, + "step": 12416 + }, + { + "epoch": 1.94, + "grad_norm": 19.189228694467246, + "learning_rate": 5.868593944446688e-06, + "loss": 0.5779, + "step": 12417 + }, + { + "epoch": 1.94, + "grad_norm": 16.355434399466912, + "learning_rate": 5.867058254883056e-06, + "loss": 0.5513, + "step": 12418 + }, + { + "epoch": 1.94, + "grad_norm": 18.660743889390798, + "learning_rate": 5.8655226828578715e-06, + "loss": 0.4516, + "step": 12419 + }, + { + "epoch": 1.94, + "grad_norm": 16.286007793706315, + "learning_rate": 5.863987228414805e-06, + "loss": 0.5183, + "step": 12420 + }, + { + "epoch": 1.94, + "grad_norm": 18.253188452800565, + "learning_rate": 5.8624518915975185e-06, + "loss": 0.5251, + "step": 12421 + }, + { + "epoch": 1.94, + "grad_norm": 23.41976905119453, + "learning_rate": 5.860916672449682e-06, + "loss": 0.5562, + "step": 12422 + }, + { + "epoch": 1.94, + "grad_norm": 16.102702458140445, + "learning_rate": 5.859381571014951e-06, + "loss": 0.5111, + "step": 12423 + }, + { + "epoch": 1.94, + "grad_norm": 26.352578266560638, + "learning_rate": 5.857846587336989e-06, + "loss": 0.5896, + "step": 12424 + }, + { + "epoch": 1.94, + "grad_norm": 13.961129258199225, + "learning_rate": 5.856311721459449e-06, + "loss": 0.5243, + "step": 12425 + }, + { + "epoch": 1.94, + "grad_norm": 23.310855484682733, + "learning_rate": 5.854776973425978e-06, + "loss": 0.6174, + "step": 12426 + }, + { + "epoch": 1.94, + "grad_norm": 13.128513415428728, + "learning_rate": 5.853242343280222e-06, + "loss": 0.4883, + "step": 12427 + }, + { + "epoch": 1.94, + "grad_norm": 20.5058426664811, + "learning_rate": 5.85170783106583e-06, + "loss": 0.5232, + "step": 12428 + }, + { + "epoch": 1.94, + "grad_norm": 15.839485978626312, + "learning_rate": 5.8501734368264425e-06, + "loss": 0.5185, + "step": 12429 + }, + { + "epoch": 1.94, + "grad_norm": 16.23613590313458, + "learning_rate": 5.848639160605694e-06, + "loss": 0.5017, + "step": 12430 + }, + { + "epoch": 1.94, + "grad_norm": 17.551606047043208, + "learning_rate": 5.847105002447218e-06, + "loss": 0.5182, + "step": 12431 + }, + { + "epoch": 1.94, + "grad_norm": 23.34640627229531, + "learning_rate": 5.845570962394647e-06, + "loss": 0.569, + "step": 12432 + }, + { + "epoch": 1.94, + "grad_norm": 17.691517026642444, + "learning_rate": 5.8440370404916035e-06, + "loss": 0.4934, + "step": 12433 + }, + { + "epoch": 1.94, + "grad_norm": 18.552155417085057, + "learning_rate": 5.842503236781722e-06, + "loss": 0.4917, + "step": 12434 + }, + { + "epoch": 1.94, + "grad_norm": 20.389523060572717, + "learning_rate": 5.840969551308614e-06, + "loss": 0.4808, + "step": 12435 + }, + { + "epoch": 1.94, + "grad_norm": 15.692684038938735, + "learning_rate": 5.839435984115899e-06, + "loss": 0.5329, + "step": 12436 + }, + { + "epoch": 1.94, + "grad_norm": 24.00601334414222, + "learning_rate": 5.8379025352471905e-06, + "loss": 0.5689, + "step": 12437 + }, + { + "epoch": 1.94, + "grad_norm": 19.812143364337903, + "learning_rate": 5.836369204746097e-06, + "loss": 0.5513, + "step": 12438 + }, + { + "epoch": 1.94, + "grad_norm": 14.104422500629422, + "learning_rate": 5.834835992656232e-06, + "loss": 0.5193, + "step": 12439 + }, + { + "epoch": 1.94, + "grad_norm": 17.40942907408857, + "learning_rate": 5.833302899021191e-06, + "loss": 0.5167, + "step": 12440 + }, + { + "epoch": 1.94, + "grad_norm": 18.757174954111548, + "learning_rate": 5.831769923884579e-06, + "loss": 0.5556, + "step": 12441 + }, + { + "epoch": 1.94, + "grad_norm": 22.52624071654354, + "learning_rate": 5.830237067289993e-06, + "loss": 0.51, + "step": 12442 + }, + { + "epoch": 1.94, + "grad_norm": 16.920302141397592, + "learning_rate": 5.828704329281024e-06, + "loss": 0.5263, + "step": 12443 + }, + { + "epoch": 1.94, + "grad_norm": 18.55781347183722, + "learning_rate": 5.827171709901267e-06, + "loss": 0.5711, + "step": 12444 + }, + { + "epoch": 1.94, + "grad_norm": 28.532334896719814, + "learning_rate": 5.825639209194302e-06, + "loss": 0.5597, + "step": 12445 + }, + { + "epoch": 1.94, + "grad_norm": 20.352330935389503, + "learning_rate": 5.824106827203719e-06, + "loss": 0.5696, + "step": 12446 + }, + { + "epoch": 1.94, + "grad_norm": 23.64854656392188, + "learning_rate": 5.822574563973091e-06, + "loss": 0.5965, + "step": 12447 + }, + { + "epoch": 1.94, + "grad_norm": 13.898015455986016, + "learning_rate": 5.8210424195460005e-06, + "loss": 0.5788, + "step": 12448 + }, + { + "epoch": 1.94, + "grad_norm": 21.882164503947042, + "learning_rate": 5.8195103939660214e-06, + "loss": 0.4394, + "step": 12449 + }, + { + "epoch": 1.94, + "grad_norm": 21.478105566625683, + "learning_rate": 5.817978487276722e-06, + "loss": 0.4859, + "step": 12450 + }, + { + "epoch": 1.94, + "grad_norm": 15.224604271771039, + "learning_rate": 5.816446699521663e-06, + "loss": 0.5257, + "step": 12451 + }, + { + "epoch": 1.95, + "grad_norm": 24.33418179052358, + "learning_rate": 5.814915030744414e-06, + "loss": 0.4796, + "step": 12452 + }, + { + "epoch": 1.95, + "grad_norm": 21.10577788189234, + "learning_rate": 5.813383480988533e-06, + "loss": 0.4948, + "step": 12453 + }, + { + "epoch": 1.95, + "grad_norm": 23.879545363926923, + "learning_rate": 5.811852050297579e-06, + "loss": 0.5374, + "step": 12454 + }, + { + "epoch": 1.95, + "grad_norm": 19.97696263006178, + "learning_rate": 5.810320738715104e-06, + "loss": 0.506, + "step": 12455 + }, + { + "epoch": 1.95, + "grad_norm": 21.110892698703324, + "learning_rate": 5.808789546284652e-06, + "loss": 0.476, + "step": 12456 + }, + { + "epoch": 1.95, + "grad_norm": 24.43157343451516, + "learning_rate": 5.807258473049774e-06, + "loss": 0.5584, + "step": 12457 + }, + { + "epoch": 1.95, + "grad_norm": 26.064357158531386, + "learning_rate": 5.805727519054017e-06, + "loss": 0.5798, + "step": 12458 + }, + { + "epoch": 1.95, + "grad_norm": 15.64100087892689, + "learning_rate": 5.8041966843409135e-06, + "loss": 0.4604, + "step": 12459 + }, + { + "epoch": 1.95, + "grad_norm": 16.739041076827338, + "learning_rate": 5.802665968954e-06, + "loss": 0.5202, + "step": 12460 + }, + { + "epoch": 1.95, + "grad_norm": 24.643162402588274, + "learning_rate": 5.801135372936809e-06, + "loss": 0.5942, + "step": 12461 + }, + { + "epoch": 1.95, + "grad_norm": 21.75121216164051, + "learning_rate": 5.7996048963328775e-06, + "loss": 0.4859, + "step": 12462 + }, + { + "epoch": 1.95, + "grad_norm": 17.670004865447886, + "learning_rate": 5.798074539185721e-06, + "loss": 0.5276, + "step": 12463 + }, + { + "epoch": 1.95, + "grad_norm": 20.13919389795665, + "learning_rate": 5.79654430153887e-06, + "loss": 0.493, + "step": 12464 + }, + { + "epoch": 1.95, + "grad_norm": 16.92708306068029, + "learning_rate": 5.7950141834358365e-06, + "loss": 0.5175, + "step": 12465 + }, + { + "epoch": 1.95, + "grad_norm": 18.451526262504714, + "learning_rate": 5.793484184920139e-06, + "loss": 0.5432, + "step": 12466 + }, + { + "epoch": 1.95, + "grad_norm": 18.563684716292574, + "learning_rate": 5.7919543060352965e-06, + "loss": 0.5377, + "step": 12467 + }, + { + "epoch": 1.95, + "grad_norm": 24.197960593496667, + "learning_rate": 5.790424546824806e-06, + "loss": 0.476, + "step": 12468 + }, + { + "epoch": 1.95, + "grad_norm": 14.942537784489552, + "learning_rate": 5.788894907332184e-06, + "loss": 0.4994, + "step": 12469 + }, + { + "epoch": 1.95, + "grad_norm": 16.053730136514798, + "learning_rate": 5.787365387600923e-06, + "loss": 0.4765, + "step": 12470 + }, + { + "epoch": 1.95, + "grad_norm": 27.214363119087164, + "learning_rate": 5.78583598767453e-06, + "loss": 0.5595, + "step": 12471 + }, + { + "epoch": 1.95, + "grad_norm": 15.915624174855418, + "learning_rate": 5.784306707596492e-06, + "loss": 0.5166, + "step": 12472 + }, + { + "epoch": 1.95, + "grad_norm": 16.060402106052805, + "learning_rate": 5.782777547410305e-06, + "loss": 0.5342, + "step": 12473 + }, + { + "epoch": 1.95, + "grad_norm": 18.914268547678464, + "learning_rate": 5.781248507159463e-06, + "loss": 0.5307, + "step": 12474 + }, + { + "epoch": 1.95, + "grad_norm": 31.899785463421146, + "learning_rate": 5.7797195868874445e-06, + "loss": 0.5482, + "step": 12475 + }, + { + "epoch": 1.95, + "grad_norm": 15.001721067835165, + "learning_rate": 5.778190786637729e-06, + "loss": 0.5411, + "step": 12476 + }, + { + "epoch": 1.95, + "grad_norm": 12.444153370730817, + "learning_rate": 5.776662106453797e-06, + "loss": 0.4678, + "step": 12477 + }, + { + "epoch": 1.95, + "grad_norm": 20.353632849090427, + "learning_rate": 5.775133546379128e-06, + "loss": 0.5077, + "step": 12478 + }, + { + "epoch": 1.95, + "grad_norm": 10.834760320730712, + "learning_rate": 5.773605106457185e-06, + "loss": 0.4954, + "step": 12479 + }, + { + "epoch": 1.95, + "grad_norm": 16.308350709486163, + "learning_rate": 5.7720767867314464e-06, + "loss": 0.5926, + "step": 12480 + }, + { + "epoch": 1.95, + "grad_norm": 18.609722802013273, + "learning_rate": 5.7705485872453645e-06, + "loss": 0.5273, + "step": 12481 + }, + { + "epoch": 1.95, + "grad_norm": 23.27119436783684, + "learning_rate": 5.769020508042408e-06, + "loss": 0.5826, + "step": 12482 + }, + { + "epoch": 1.95, + "grad_norm": 18.239027591810153, + "learning_rate": 5.7674925491660365e-06, + "loss": 0.5343, + "step": 12483 + }, + { + "epoch": 1.95, + "grad_norm": 22.908663200060055, + "learning_rate": 5.765964710659702e-06, + "loss": 0.5001, + "step": 12484 + }, + { + "epoch": 1.95, + "grad_norm": 19.89939788976453, + "learning_rate": 5.764436992566849e-06, + "loss": 0.5365, + "step": 12485 + }, + { + "epoch": 1.95, + "grad_norm": 17.136950492813643, + "learning_rate": 5.762909394930931e-06, + "loss": 0.4934, + "step": 12486 + }, + { + "epoch": 1.95, + "grad_norm": 21.235445422265318, + "learning_rate": 5.761381917795394e-06, + "loss": 0.6292, + "step": 12487 + }, + { + "epoch": 1.95, + "grad_norm": 23.285681019017932, + "learning_rate": 5.759854561203677e-06, + "loss": 0.5552, + "step": 12488 + }, + { + "epoch": 1.95, + "grad_norm": 16.197960103509278, + "learning_rate": 5.7583273251992115e-06, + "loss": 0.4705, + "step": 12489 + }, + { + "epoch": 1.95, + "grad_norm": 16.631962528254324, + "learning_rate": 5.756800209825435e-06, + "loss": 0.5682, + "step": 12490 + }, + { + "epoch": 1.95, + "grad_norm": 17.57362087103549, + "learning_rate": 5.75527321512578e-06, + "loss": 0.5323, + "step": 12491 + }, + { + "epoch": 1.95, + "grad_norm": 23.030932422969112, + "learning_rate": 5.753746341143674e-06, + "loss": 0.5403, + "step": 12492 + }, + { + "epoch": 1.95, + "grad_norm": 16.337260179503378, + "learning_rate": 5.752219587922538e-06, + "loss": 0.5667, + "step": 12493 + }, + { + "epoch": 1.95, + "grad_norm": 15.85073213202555, + "learning_rate": 5.75069295550579e-06, + "loss": 0.5049, + "step": 12494 + }, + { + "epoch": 1.95, + "grad_norm": 15.73827537788363, + "learning_rate": 5.749166443936847e-06, + "loss": 0.5169, + "step": 12495 + }, + { + "epoch": 1.95, + "grad_norm": 28.790778922850944, + "learning_rate": 5.747640053259127e-06, + "loss": 0.5765, + "step": 12496 + }, + { + "epoch": 1.95, + "grad_norm": 14.127927161419743, + "learning_rate": 5.746113783516034e-06, + "loss": 0.5171, + "step": 12497 + }, + { + "epoch": 1.95, + "grad_norm": 16.800661820548402, + "learning_rate": 5.74458763475098e-06, + "loss": 0.4633, + "step": 12498 + }, + { + "epoch": 1.95, + "grad_norm": 23.60879236975523, + "learning_rate": 5.743061607007359e-06, + "loss": 0.4808, + "step": 12499 + }, + { + "epoch": 1.95, + "grad_norm": 20.959288420981103, + "learning_rate": 5.741535700328581e-06, + "loss": 0.5148, + "step": 12500 + }, + { + "epoch": 1.95, + "grad_norm": 15.711860192404695, + "learning_rate": 5.740009914758032e-06, + "loss": 0.5283, + "step": 12501 + }, + { + "epoch": 1.95, + "grad_norm": 26.531583622159197, + "learning_rate": 5.738484250339109e-06, + "loss": 0.5762, + "step": 12502 + }, + { + "epoch": 1.95, + "grad_norm": 19.132486755725896, + "learning_rate": 5.7369587071152055e-06, + "loss": 0.4862, + "step": 12503 + }, + { + "epoch": 1.95, + "grad_norm": 23.722684751858132, + "learning_rate": 5.735433285129699e-06, + "loss": 0.5213, + "step": 12504 + }, + { + "epoch": 1.95, + "grad_norm": 28.15577624268529, + "learning_rate": 5.733907984425979e-06, + "loss": 0.5755, + "step": 12505 + }, + { + "epoch": 1.95, + "grad_norm": 5.741721753286174, + "learning_rate": 5.732382805047416e-06, + "loss": 0.5924, + "step": 12506 + }, + { + "epoch": 1.95, + "grad_norm": 33.274203655077116, + "learning_rate": 5.730857747037389e-06, + "loss": 0.5293, + "step": 12507 + }, + { + "epoch": 1.95, + "grad_norm": 20.296892509717193, + "learning_rate": 5.729332810439274e-06, + "loss": 0.4954, + "step": 12508 + }, + { + "epoch": 1.95, + "grad_norm": 18.053697510222843, + "learning_rate": 5.727807995296437e-06, + "loss": 0.5509, + "step": 12509 + }, + { + "epoch": 1.95, + "grad_norm": 17.581892688055213, + "learning_rate": 5.7262833016522366e-06, + "loss": 0.4933, + "step": 12510 + }, + { + "epoch": 1.95, + "grad_norm": 18.662274235466818, + "learning_rate": 5.72475872955004e-06, + "loss": 0.4975, + "step": 12511 + }, + { + "epoch": 1.95, + "grad_norm": 18.684752196093253, + "learning_rate": 5.723234279033207e-06, + "loss": 0.5613, + "step": 12512 + }, + { + "epoch": 1.95, + "grad_norm": 19.316687034363607, + "learning_rate": 5.721709950145089e-06, + "loss": 0.4734, + "step": 12513 + }, + { + "epoch": 1.95, + "grad_norm": 19.93026752592397, + "learning_rate": 5.720185742929034e-06, + "loss": 0.4768, + "step": 12514 + }, + { + "epoch": 1.95, + "grad_norm": 17.1710369393287, + "learning_rate": 5.718661657428393e-06, + "loss": 0.568, + "step": 12515 + }, + { + "epoch": 1.96, + "grad_norm": 23.876264666595794, + "learning_rate": 5.717137693686509e-06, + "loss": 0.557, + "step": 12516 + }, + { + "epoch": 1.96, + "grad_norm": 39.55901711471774, + "learning_rate": 5.7156138517467285e-06, + "loss": 0.5792, + "step": 12517 + }, + { + "epoch": 1.96, + "grad_norm": 22.886133900409952, + "learning_rate": 5.714090131652385e-06, + "loss": 0.4748, + "step": 12518 + }, + { + "epoch": 1.96, + "grad_norm": 21.304057770134662, + "learning_rate": 5.712566533446805e-06, + "loss": 0.5342, + "step": 12519 + }, + { + "epoch": 1.96, + "grad_norm": 29.1376990048434, + "learning_rate": 5.711043057173326e-06, + "loss": 0.5208, + "step": 12520 + }, + { + "epoch": 1.96, + "grad_norm": 13.883859511732858, + "learning_rate": 5.709519702875277e-06, + "loss": 0.5307, + "step": 12521 + }, + { + "epoch": 1.96, + "grad_norm": 19.384569057745946, + "learning_rate": 5.707996470595977e-06, + "loss": 0.5088, + "step": 12522 + }, + { + "epoch": 1.96, + "grad_norm": 23.603573246586848, + "learning_rate": 5.706473360378743e-06, + "loss": 0.5319, + "step": 12523 + }, + { + "epoch": 1.96, + "grad_norm": 18.797908347876046, + "learning_rate": 5.704950372266895e-06, + "loss": 0.5121, + "step": 12524 + }, + { + "epoch": 1.96, + "grad_norm": 14.752522660479476, + "learning_rate": 5.70342750630375e-06, + "loss": 0.5141, + "step": 12525 + }, + { + "epoch": 1.96, + "grad_norm": 18.492231915303353, + "learning_rate": 5.70190476253261e-06, + "loss": 0.5276, + "step": 12526 + }, + { + "epoch": 1.96, + "grad_norm": 20.262984210376192, + "learning_rate": 5.700382140996787e-06, + "loss": 0.5458, + "step": 12527 + }, + { + "epoch": 1.96, + "grad_norm": 14.646237699000984, + "learning_rate": 5.698859641739578e-06, + "loss": 0.5303, + "step": 12528 + }, + { + "epoch": 1.96, + "grad_norm": 18.33721121600938, + "learning_rate": 5.697337264804283e-06, + "loss": 0.5405, + "step": 12529 + }, + { + "epoch": 1.96, + "grad_norm": 20.62713172184386, + "learning_rate": 5.695815010234204e-06, + "loss": 0.5091, + "step": 12530 + }, + { + "epoch": 1.96, + "grad_norm": 15.779170723887841, + "learning_rate": 5.694292878072625e-06, + "loss": 0.5264, + "step": 12531 + }, + { + "epoch": 1.96, + "grad_norm": 18.651083869925394, + "learning_rate": 5.6927708683628415e-06, + "loss": 0.4928, + "step": 12532 + }, + { + "epoch": 1.96, + "grad_norm": 22.997205040947005, + "learning_rate": 5.6912489811481295e-06, + "loss": 0.5407, + "step": 12533 + }, + { + "epoch": 1.96, + "grad_norm": 16.428851128556964, + "learning_rate": 5.689727216471782e-06, + "loss": 0.5003, + "step": 12534 + }, + { + "epoch": 1.96, + "grad_norm": 27.057714057259094, + "learning_rate": 5.688205574377066e-06, + "loss": 0.5358, + "step": 12535 + }, + { + "epoch": 1.96, + "grad_norm": 13.40300789085051, + "learning_rate": 5.686684054907261e-06, + "loss": 0.5165, + "step": 12536 + }, + { + "epoch": 1.96, + "grad_norm": 20.309578242343726, + "learning_rate": 5.685162658105643e-06, + "loss": 0.5323, + "step": 12537 + }, + { + "epoch": 1.96, + "grad_norm": 13.518517897377475, + "learning_rate": 5.683641384015475e-06, + "loss": 0.5376, + "step": 12538 + }, + { + "epoch": 1.96, + "grad_norm": 13.366438832991074, + "learning_rate": 5.682120232680015e-06, + "loss": 0.4875, + "step": 12539 + }, + { + "epoch": 1.96, + "grad_norm": 16.21755931963509, + "learning_rate": 5.6805992041425315e-06, + "loss": 0.4978, + "step": 12540 + }, + { + "epoch": 1.96, + "grad_norm": 13.553028789026607, + "learning_rate": 5.679078298446279e-06, + "loss": 0.4798, + "step": 12541 + }, + { + "epoch": 1.96, + "grad_norm": 18.120778420877425, + "learning_rate": 5.677557515634517e-06, + "loss": 0.5202, + "step": 12542 + }, + { + "epoch": 1.96, + "grad_norm": 13.682213563651375, + "learning_rate": 5.67603685575049e-06, + "loss": 0.498, + "step": 12543 + }, + { + "epoch": 1.96, + "grad_norm": 22.76760529484355, + "learning_rate": 5.674516318837442e-06, + "loss": 0.5423, + "step": 12544 + }, + { + "epoch": 1.96, + "grad_norm": 26.01898118499872, + "learning_rate": 5.6729959049386185e-06, + "loss": 0.4723, + "step": 12545 + }, + { + "epoch": 1.96, + "grad_norm": 17.906333548665486, + "learning_rate": 5.6714756140972645e-06, + "loss": 0.5309, + "step": 12546 + }, + { + "epoch": 1.96, + "grad_norm": 23.24678918499856, + "learning_rate": 5.669955446356612e-06, + "loss": 0.5461, + "step": 12547 + }, + { + "epoch": 1.96, + "grad_norm": 21.26010737006464, + "learning_rate": 5.668435401759891e-06, + "loss": 0.5405, + "step": 12548 + }, + { + "epoch": 1.96, + "grad_norm": 14.728930170174033, + "learning_rate": 5.666915480350332e-06, + "loss": 0.4694, + "step": 12549 + }, + { + "epoch": 1.96, + "grad_norm": 18.655714290122393, + "learning_rate": 5.665395682171166e-06, + "loss": 0.56, + "step": 12550 + }, + { + "epoch": 1.96, + "grad_norm": 28.772033980332132, + "learning_rate": 5.6638760072656075e-06, + "loss": 0.6019, + "step": 12551 + }, + { + "epoch": 1.96, + "grad_norm": 21.37753722750831, + "learning_rate": 5.662356455676882e-06, + "loss": 0.5345, + "step": 12552 + }, + { + "epoch": 1.96, + "grad_norm": 16.169553315475262, + "learning_rate": 5.660837027448198e-06, + "loss": 0.5226, + "step": 12553 + }, + { + "epoch": 1.96, + "grad_norm": 20.76049640319283, + "learning_rate": 5.65931772262277e-06, + "loss": 0.5174, + "step": 12554 + }, + { + "epoch": 1.96, + "grad_norm": 13.494866472412605, + "learning_rate": 5.657798541243812e-06, + "loss": 0.5609, + "step": 12555 + }, + { + "epoch": 1.96, + "grad_norm": 17.063626158757213, + "learning_rate": 5.656279483354519e-06, + "loss": 0.5339, + "step": 12556 + }, + { + "epoch": 1.96, + "grad_norm": 20.754580984163205, + "learning_rate": 5.6547605489981e-06, + "loss": 0.5504, + "step": 12557 + }, + { + "epoch": 1.96, + "grad_norm": 27.982667719669724, + "learning_rate": 5.653241738217745e-06, + "loss": 0.5279, + "step": 12558 + }, + { + "epoch": 1.96, + "grad_norm": 20.728815621649602, + "learning_rate": 5.651723051056657e-06, + "loss": 0.4881, + "step": 12559 + }, + { + "epoch": 1.96, + "grad_norm": 23.595384337175233, + "learning_rate": 5.650204487558016e-06, + "loss": 0.5373, + "step": 12560 + }, + { + "epoch": 1.96, + "grad_norm": 30.876869151838356, + "learning_rate": 5.648686047765017e-06, + "loss": 0.5955, + "step": 12561 + }, + { + "epoch": 1.96, + "grad_norm": 13.985678155480695, + "learning_rate": 5.647167731720844e-06, + "loss": 0.5058, + "step": 12562 + }, + { + "epoch": 1.96, + "grad_norm": 25.238870857978693, + "learning_rate": 5.645649539468675e-06, + "loss": 0.5549, + "step": 12563 + }, + { + "epoch": 1.96, + "grad_norm": 19.525420028330117, + "learning_rate": 5.644131471051681e-06, + "loss": 0.4633, + "step": 12564 + }, + { + "epoch": 1.96, + "grad_norm": 19.56415096756817, + "learning_rate": 5.642613526513041e-06, + "loss": 0.4476, + "step": 12565 + }, + { + "epoch": 1.96, + "grad_norm": 13.182992717464638, + "learning_rate": 5.641095705895923e-06, + "loss": 0.4575, + "step": 12566 + }, + { + "epoch": 1.96, + "grad_norm": 20.488173297783145, + "learning_rate": 5.639578009243496e-06, + "loss": 0.5252, + "step": 12567 + }, + { + "epoch": 1.96, + "grad_norm": 24.166150841195538, + "learning_rate": 5.638060436598922e-06, + "loss": 0.5236, + "step": 12568 + }, + { + "epoch": 1.96, + "grad_norm": 20.61527252302159, + "learning_rate": 5.636542988005351e-06, + "loss": 0.5729, + "step": 12569 + }, + { + "epoch": 1.96, + "grad_norm": 22.638191243712967, + "learning_rate": 5.635025663505946e-06, + "loss": 0.5291, + "step": 12570 + }, + { + "epoch": 1.96, + "grad_norm": 22.562467559199376, + "learning_rate": 5.633508463143862e-06, + "loss": 0.6122, + "step": 12571 + }, + { + "epoch": 1.96, + "grad_norm": 18.02950975459572, + "learning_rate": 5.631991386962243e-06, + "loss": 0.5858, + "step": 12572 + }, + { + "epoch": 1.96, + "grad_norm": 22.289733018226038, + "learning_rate": 5.6304744350042295e-06, + "loss": 0.6044, + "step": 12573 + }, + { + "epoch": 1.96, + "grad_norm": 17.089825940804054, + "learning_rate": 5.628957607312967e-06, + "loss": 0.501, + "step": 12574 + }, + { + "epoch": 1.96, + "grad_norm": 25.87329719303149, + "learning_rate": 5.627440903931598e-06, + "loss": 0.5379, + "step": 12575 + }, + { + "epoch": 1.96, + "grad_norm": 17.764890367891415, + "learning_rate": 5.62592432490325e-06, + "loss": 0.5123, + "step": 12576 + }, + { + "epoch": 1.96, + "grad_norm": 21.741319689084367, + "learning_rate": 5.624407870271052e-06, + "loss": 0.496, + "step": 12577 + }, + { + "epoch": 1.96, + "grad_norm": 25.34575921924254, + "learning_rate": 5.622891540078135e-06, + "loss": 0.5614, + "step": 12578 + }, + { + "epoch": 1.96, + "grad_norm": 21.20204664593074, + "learning_rate": 5.621375334367622e-06, + "loss": 0.5523, + "step": 12579 + }, + { + "epoch": 1.97, + "grad_norm": 23.332139501443475, + "learning_rate": 5.619859253182638e-06, + "loss": 0.4856, + "step": 12580 + }, + { + "epoch": 1.97, + "grad_norm": 20.622692533943, + "learning_rate": 5.618343296566293e-06, + "loss": 0.4649, + "step": 12581 + }, + { + "epoch": 1.97, + "grad_norm": 14.584562452381421, + "learning_rate": 5.616827464561698e-06, + "loss": 0.5671, + "step": 12582 + }, + { + "epoch": 1.97, + "grad_norm": 16.894356240485116, + "learning_rate": 5.615311757211965e-06, + "loss": 0.468, + "step": 12583 + }, + { + "epoch": 1.97, + "grad_norm": 14.573504952153666, + "learning_rate": 5.613796174560207e-06, + "loss": 0.4361, + "step": 12584 + }, + { + "epoch": 1.97, + "grad_norm": 17.565973201874296, + "learning_rate": 5.612280716649514e-06, + "loss": 0.5162, + "step": 12585 + }, + { + "epoch": 1.97, + "grad_norm": 20.376518821859634, + "learning_rate": 5.6107653835229954e-06, + "loss": 0.554, + "step": 12586 + }, + { + "epoch": 1.97, + "grad_norm": 16.229109762060997, + "learning_rate": 5.609250175223737e-06, + "loss": 0.4645, + "step": 12587 + }, + { + "epoch": 1.97, + "grad_norm": 17.404187656848006, + "learning_rate": 5.607735091794839e-06, + "loss": 0.5476, + "step": 12588 + }, + { + "epoch": 1.97, + "grad_norm": 20.088113569729536, + "learning_rate": 5.606220133279383e-06, + "loss": 0.512, + "step": 12589 + }, + { + "epoch": 1.97, + "grad_norm": 13.489813118036757, + "learning_rate": 5.604705299720455e-06, + "loss": 0.4846, + "step": 12590 + }, + { + "epoch": 1.97, + "grad_norm": 19.402525817803287, + "learning_rate": 5.603190591161141e-06, + "loss": 0.5612, + "step": 12591 + }, + { + "epoch": 1.97, + "grad_norm": 17.287011542456632, + "learning_rate": 5.601676007644511e-06, + "loss": 0.5346, + "step": 12592 + }, + { + "epoch": 1.97, + "grad_norm": 25.26303079305595, + "learning_rate": 5.600161549213647e-06, + "loss": 0.6273, + "step": 12593 + }, + { + "epoch": 1.97, + "grad_norm": 20.737577954910897, + "learning_rate": 5.598647215911609e-06, + "loss": 0.4365, + "step": 12594 + }, + { + "epoch": 1.97, + "grad_norm": 18.259214475553833, + "learning_rate": 5.59713300778147e-06, + "loss": 0.536, + "step": 12595 + }, + { + "epoch": 1.97, + "grad_norm": 19.789865134882238, + "learning_rate": 5.595618924866298e-06, + "loss": 0.5557, + "step": 12596 + }, + { + "epoch": 1.97, + "grad_norm": 30.925207838876528, + "learning_rate": 5.594104967209146e-06, + "loss": 0.5139, + "step": 12597 + }, + { + "epoch": 1.97, + "grad_norm": 20.160048251129773, + "learning_rate": 5.592591134853067e-06, + "loss": 0.4601, + "step": 12598 + }, + { + "epoch": 1.97, + "grad_norm": 33.102190886856214, + "learning_rate": 5.591077427841118e-06, + "loss": 0.5281, + "step": 12599 + }, + { + "epoch": 1.97, + "grad_norm": 32.24142014145156, + "learning_rate": 5.5895638462163536e-06, + "loss": 0.5306, + "step": 12600 + }, + { + "epoch": 1.97, + "grad_norm": 15.513560001143286, + "learning_rate": 5.588050390021812e-06, + "loss": 0.5155, + "step": 12601 + }, + { + "epoch": 1.97, + "grad_norm": 13.357463083037707, + "learning_rate": 5.586537059300532e-06, + "loss": 0.5161, + "step": 12602 + }, + { + "epoch": 1.97, + "grad_norm": 17.789975246355397, + "learning_rate": 5.585023854095557e-06, + "loss": 0.5413, + "step": 12603 + }, + { + "epoch": 1.97, + "grad_norm": 14.613335914478983, + "learning_rate": 5.5835107744499205e-06, + "loss": 0.4683, + "step": 12604 + }, + { + "epoch": 1.97, + "grad_norm": 24.80942499891135, + "learning_rate": 5.581997820406659e-06, + "loss": 0.5057, + "step": 12605 + }, + { + "epoch": 1.97, + "grad_norm": 18.141944585903207, + "learning_rate": 5.580484992008795e-06, + "loss": 0.5648, + "step": 12606 + }, + { + "epoch": 1.97, + "grad_norm": 25.615456157019434, + "learning_rate": 5.5789722892993466e-06, + "loss": 0.6131, + "step": 12607 + }, + { + "epoch": 1.97, + "grad_norm": 36.313936375821235, + "learning_rate": 5.577459712321341e-06, + "loss": 0.5668, + "step": 12608 + }, + { + "epoch": 1.97, + "grad_norm": 20.30870921782114, + "learning_rate": 5.575947261117798e-06, + "loss": 0.5608, + "step": 12609 + }, + { + "epoch": 1.97, + "grad_norm": 22.2800572030574, + "learning_rate": 5.574434935731723e-06, + "loss": 0.4784, + "step": 12610 + }, + { + "epoch": 1.97, + "grad_norm": 26.826145475115595, + "learning_rate": 5.572922736206135e-06, + "loss": 0.502, + "step": 12611 + }, + { + "epoch": 1.97, + "grad_norm": 13.839644765217468, + "learning_rate": 5.571410662584029e-06, + "loss": 0.4732, + "step": 12612 + }, + { + "epoch": 1.97, + "grad_norm": 12.342510382058945, + "learning_rate": 5.5698987149084174e-06, + "loss": 0.5158, + "step": 12613 + }, + { + "epoch": 1.97, + "grad_norm": 17.544607838055274, + "learning_rate": 5.568386893222291e-06, + "loss": 0.5141, + "step": 12614 + }, + { + "epoch": 1.97, + "grad_norm": 17.28824598855327, + "learning_rate": 5.5668751975686485e-06, + "loss": 0.6036, + "step": 12615 + }, + { + "epoch": 1.97, + "grad_norm": 35.074450238780216, + "learning_rate": 5.565363627990485e-06, + "loss": 0.5349, + "step": 12616 + }, + { + "epoch": 1.97, + "grad_norm": 23.567612451319665, + "learning_rate": 5.563852184530784e-06, + "loss": 0.4689, + "step": 12617 + }, + { + "epoch": 1.97, + "grad_norm": 17.957472684793785, + "learning_rate": 5.562340867232535e-06, + "loss": 0.6178, + "step": 12618 + }, + { + "epoch": 1.97, + "grad_norm": 19.258346950241314, + "learning_rate": 5.560829676138712e-06, + "loss": 0.5912, + "step": 12619 + }, + { + "epoch": 1.97, + "grad_norm": 19.857396440666978, + "learning_rate": 5.559318611292299e-06, + "loss": 0.5387, + "step": 12620 + }, + { + "epoch": 1.97, + "grad_norm": 22.970008307004164, + "learning_rate": 5.557807672736264e-06, + "loss": 0.5527, + "step": 12621 + }, + { + "epoch": 1.97, + "grad_norm": 25.550777935103092, + "learning_rate": 5.556296860513584e-06, + "loss": 0.4918, + "step": 12622 + }, + { + "epoch": 1.97, + "grad_norm": 19.66604013467039, + "learning_rate": 5.554786174667217e-06, + "loss": 0.5096, + "step": 12623 + }, + { + "epoch": 1.97, + "grad_norm": 21.533930410269456, + "learning_rate": 5.553275615240132e-06, + "loss": 0.5125, + "step": 12624 + }, + { + "epoch": 1.97, + "grad_norm": 14.539640807673022, + "learning_rate": 5.551765182275292e-06, + "loss": 0.4658, + "step": 12625 + }, + { + "epoch": 1.97, + "grad_norm": 15.966272654662532, + "learning_rate": 5.550254875815646e-06, + "loss": 0.519, + "step": 12626 + }, + { + "epoch": 1.97, + "grad_norm": 19.805065626939996, + "learning_rate": 5.548744695904145e-06, + "loss": 0.4964, + "step": 12627 + }, + { + "epoch": 1.97, + "grad_norm": 30.900625258009978, + "learning_rate": 5.5472346425837405e-06, + "loss": 0.56, + "step": 12628 + }, + { + "epoch": 1.97, + "grad_norm": 16.063630668201945, + "learning_rate": 5.545724715897381e-06, + "loss": 0.5138, + "step": 12629 + }, + { + "epoch": 1.97, + "grad_norm": 21.596001596001994, + "learning_rate": 5.544214915888006e-06, + "loss": 0.5456, + "step": 12630 + }, + { + "epoch": 1.97, + "grad_norm": 15.340409046731004, + "learning_rate": 5.542705242598552e-06, + "loss": 0.5157, + "step": 12631 + }, + { + "epoch": 1.97, + "grad_norm": 18.661060137275072, + "learning_rate": 5.54119569607195e-06, + "loss": 0.5138, + "step": 12632 + }, + { + "epoch": 1.97, + "grad_norm": 22.36441971592129, + "learning_rate": 5.539686276351135e-06, + "loss": 0.495, + "step": 12633 + }, + { + "epoch": 1.97, + "grad_norm": 17.35537810670071, + "learning_rate": 5.538176983479036e-06, + "loss": 0.5449, + "step": 12634 + }, + { + "epoch": 1.97, + "grad_norm": 15.332534711553333, + "learning_rate": 5.536667817498573e-06, + "loss": 0.4595, + "step": 12635 + }, + { + "epoch": 1.97, + "grad_norm": 25.484735979836614, + "learning_rate": 5.535158778452664e-06, + "loss": 0.5956, + "step": 12636 + }, + { + "epoch": 1.97, + "grad_norm": 19.2746785231696, + "learning_rate": 5.533649866384226e-06, + "loss": 0.5036, + "step": 12637 + }, + { + "epoch": 1.97, + "grad_norm": 21.945022102042472, + "learning_rate": 5.532141081336177e-06, + "loss": 0.5224, + "step": 12638 + }, + { + "epoch": 1.97, + "grad_norm": 13.935658346049768, + "learning_rate": 5.530632423351421e-06, + "loss": 0.4443, + "step": 12639 + }, + { + "epoch": 1.97, + "grad_norm": 18.51050762449517, + "learning_rate": 5.5291238924728654e-06, + "loss": 0.4302, + "step": 12640 + }, + { + "epoch": 1.97, + "grad_norm": 14.027996218024988, + "learning_rate": 5.5276154887434075e-06, + "loss": 0.5304, + "step": 12641 + }, + { + "epoch": 1.97, + "grad_norm": 17.159128770952982, + "learning_rate": 5.526107212205949e-06, + "loss": 0.5162, + "step": 12642 + }, + { + "epoch": 1.97, + "grad_norm": 19.50456143483398, + "learning_rate": 5.524599062903388e-06, + "loss": 0.512, + "step": 12643 + }, + { + "epoch": 1.98, + "grad_norm": 16.944525703500673, + "learning_rate": 5.523091040878608e-06, + "loss": 0.5381, + "step": 12644 + }, + { + "epoch": 1.98, + "grad_norm": 14.98428168009199, + "learning_rate": 5.521583146174503e-06, + "loss": 0.4688, + "step": 12645 + }, + { + "epoch": 1.98, + "grad_norm": 15.4442419198208, + "learning_rate": 5.5200753788339515e-06, + "loss": 0.5068, + "step": 12646 + }, + { + "epoch": 1.98, + "grad_norm": 15.575833924074296, + "learning_rate": 5.518567738899838e-06, + "loss": 0.5214, + "step": 12647 + }, + { + "epoch": 1.98, + "grad_norm": 17.677035146254948, + "learning_rate": 5.517060226415032e-06, + "loss": 0.4799, + "step": 12648 + }, + { + "epoch": 1.98, + "grad_norm": 16.97043014080274, + "learning_rate": 5.515552841422412e-06, + "loss": 0.509, + "step": 12649 + }, + { + "epoch": 1.98, + "grad_norm": 16.025432938561142, + "learning_rate": 5.514045583964848e-06, + "loss": 0.4991, + "step": 12650 + }, + { + "epoch": 1.98, + "grad_norm": 21.943064070286184, + "learning_rate": 5.512538454085206e-06, + "loss": 0.5119, + "step": 12651 + }, + { + "epoch": 1.98, + "grad_norm": 12.590342849984218, + "learning_rate": 5.51103145182634e-06, + "loss": 0.493, + "step": 12652 + }, + { + "epoch": 1.98, + "grad_norm": 16.1280554974113, + "learning_rate": 5.509524577231114e-06, + "loss": 0.5113, + "step": 12653 + }, + { + "epoch": 1.98, + "grad_norm": 17.01443975128287, + "learning_rate": 5.50801783034238e-06, + "loss": 0.4957, + "step": 12654 + }, + { + "epoch": 1.98, + "grad_norm": 18.94761608709194, + "learning_rate": 5.506511211202997e-06, + "loss": 0.5017, + "step": 12655 + }, + { + "epoch": 1.98, + "grad_norm": 18.506820794649318, + "learning_rate": 5.505004719855806e-06, + "loss": 0.4814, + "step": 12656 + }, + { + "epoch": 1.98, + "grad_norm": 15.043393147162146, + "learning_rate": 5.503498356343648e-06, + "loss": 0.4844, + "step": 12657 + }, + { + "epoch": 1.98, + "grad_norm": 15.508233658479071, + "learning_rate": 5.501992120709367e-06, + "loss": 0.4701, + "step": 12658 + }, + { + "epoch": 1.98, + "grad_norm": 14.224943003368498, + "learning_rate": 5.500486012995801e-06, + "loss": 0.4635, + "step": 12659 + }, + { + "epoch": 1.98, + "grad_norm": 16.72161179764742, + "learning_rate": 5.498980033245782e-06, + "loss": 0.5972, + "step": 12660 + }, + { + "epoch": 1.98, + "grad_norm": 18.86460772424473, + "learning_rate": 5.4974741815021336e-06, + "loss": 0.5446, + "step": 12661 + }, + { + "epoch": 1.98, + "grad_norm": 21.606701164842864, + "learning_rate": 5.4959684578076855e-06, + "loss": 0.4804, + "step": 12662 + }, + { + "epoch": 1.98, + "grad_norm": 23.86005248636155, + "learning_rate": 5.494462862205263e-06, + "loss": 0.5453, + "step": 12663 + }, + { + "epoch": 1.98, + "grad_norm": 18.910636331555022, + "learning_rate": 5.492957394737677e-06, + "loss": 0.5336, + "step": 12664 + }, + { + "epoch": 1.98, + "grad_norm": 16.01784423432177, + "learning_rate": 5.49145205544775e-06, + "loss": 0.4901, + "step": 12665 + }, + { + "epoch": 1.98, + "grad_norm": 26.87061895546319, + "learning_rate": 5.4899468443782864e-06, + "loss": 0.4757, + "step": 12666 + }, + { + "epoch": 1.98, + "grad_norm": 31.910062698953737, + "learning_rate": 5.488441761572096e-06, + "loss": 0.5706, + "step": 12667 + }, + { + "epoch": 1.98, + "grad_norm": 25.82587874949454, + "learning_rate": 5.486936807071986e-06, + "loss": 0.532, + "step": 12668 + }, + { + "epoch": 1.98, + "grad_norm": 18.129131046965366, + "learning_rate": 5.485431980920753e-06, + "loss": 0.5269, + "step": 12669 + }, + { + "epoch": 1.98, + "grad_norm": 23.77725639534025, + "learning_rate": 5.4839272831611905e-06, + "loss": 0.5058, + "step": 12670 + }, + { + "epoch": 1.98, + "grad_norm": 19.692392764572553, + "learning_rate": 5.482422713836094e-06, + "loss": 0.4975, + "step": 12671 + }, + { + "epoch": 1.98, + "grad_norm": 22.37550091415856, + "learning_rate": 5.480918272988256e-06, + "loss": 0.5867, + "step": 12672 + }, + { + "epoch": 1.98, + "grad_norm": 12.749561082540527, + "learning_rate": 5.4794139606604556e-06, + "loss": 0.4644, + "step": 12673 + }, + { + "epoch": 1.98, + "grad_norm": 19.30856260188741, + "learning_rate": 5.477909776895481e-06, + "loss": 0.5362, + "step": 12674 + }, + { + "epoch": 1.98, + "grad_norm": 15.235047056584289, + "learning_rate": 5.476405721736104e-06, + "loss": 0.5213, + "step": 12675 + }, + { + "epoch": 1.98, + "grad_norm": 19.307820831949023, + "learning_rate": 5.474901795225105e-06, + "loss": 0.4949, + "step": 12676 + }, + { + "epoch": 1.98, + "grad_norm": 26.21044316117008, + "learning_rate": 5.473397997405249e-06, + "loss": 0.591, + "step": 12677 + }, + { + "epoch": 1.98, + "grad_norm": 24.42783966066164, + "learning_rate": 5.471894328319305e-06, + "loss": 0.5754, + "step": 12678 + }, + { + "epoch": 1.98, + "grad_norm": 20.827380236440565, + "learning_rate": 5.470390788010042e-06, + "loss": 0.4074, + "step": 12679 + }, + { + "epoch": 1.98, + "grad_norm": 16.652490441152498, + "learning_rate": 5.4688873765202114e-06, + "loss": 0.476, + "step": 12680 + }, + { + "epoch": 1.98, + "grad_norm": 16.85093569175643, + "learning_rate": 5.467384093892576e-06, + "loss": 0.5046, + "step": 12681 + }, + { + "epoch": 1.98, + "grad_norm": 21.883627884861234, + "learning_rate": 5.465880940169881e-06, + "loss": 0.4855, + "step": 12682 + }, + { + "epoch": 1.98, + "grad_norm": 22.787404535248456, + "learning_rate": 5.464377915394882e-06, + "loss": 0.4596, + "step": 12683 + }, + { + "epoch": 1.98, + "grad_norm": 23.843085502224415, + "learning_rate": 5.4628750196103245e-06, + "loss": 0.5642, + "step": 12684 + }, + { + "epoch": 1.98, + "grad_norm": 15.258187935913085, + "learning_rate": 5.461372252858948e-06, + "loss": 0.4234, + "step": 12685 + }, + { + "epoch": 1.98, + "grad_norm": 20.812965002678975, + "learning_rate": 5.459869615183484e-06, + "loss": 0.5264, + "step": 12686 + }, + { + "epoch": 1.98, + "grad_norm": 20.075905620799684, + "learning_rate": 5.458367106626674e-06, + "loss": 0.4839, + "step": 12687 + }, + { + "epoch": 1.98, + "grad_norm": 31.14433998849154, + "learning_rate": 5.456864727231246e-06, + "loss": 0.5968, + "step": 12688 + }, + { + "epoch": 1.98, + "grad_norm": 15.173216462374063, + "learning_rate": 5.455362477039935e-06, + "loss": 0.4966, + "step": 12689 + }, + { + "epoch": 1.98, + "grad_norm": 23.078196366625278, + "learning_rate": 5.453860356095448e-06, + "loss": 0.515, + "step": 12690 + }, + { + "epoch": 1.98, + "grad_norm": 16.39923988531013, + "learning_rate": 5.452358364440515e-06, + "loss": 0.4839, + "step": 12691 + }, + { + "epoch": 1.98, + "grad_norm": 34.13339292234002, + "learning_rate": 5.450856502117849e-06, + "loss": 0.5461, + "step": 12692 + }, + { + "epoch": 1.98, + "grad_norm": 46.60784001688967, + "learning_rate": 5.449354769170168e-06, + "loss": 0.4725, + "step": 12693 + }, + { + "epoch": 1.98, + "grad_norm": 16.84592823884347, + "learning_rate": 5.447853165640173e-06, + "loss": 0.4901, + "step": 12694 + }, + { + "epoch": 1.98, + "grad_norm": 16.6065309328846, + "learning_rate": 5.4463516915705684e-06, + "loss": 0.5196, + "step": 12695 + }, + { + "epoch": 1.98, + "grad_norm": 16.75612199254925, + "learning_rate": 5.444850347004058e-06, + "loss": 0.4887, + "step": 12696 + }, + { + "epoch": 1.98, + "grad_norm": 19.72679947134772, + "learning_rate": 5.443349131983343e-06, + "loss": 0.5495, + "step": 12697 + }, + { + "epoch": 1.98, + "grad_norm": 16.374805875166246, + "learning_rate": 5.441848046551108e-06, + "loss": 0.4772, + "step": 12698 + }, + { + "epoch": 1.98, + "grad_norm": 23.827177509168333, + "learning_rate": 5.440347090750053e-06, + "loss": 0.4797, + "step": 12699 + }, + { + "epoch": 1.98, + "grad_norm": 18.745513655058687, + "learning_rate": 5.438846264622857e-06, + "loss": 0.5091, + "step": 12700 + }, + { + "epoch": 1.98, + "grad_norm": 14.107582728295293, + "learning_rate": 5.437345568212207e-06, + "loss": 0.4981, + "step": 12701 + }, + { + "epoch": 1.98, + "grad_norm": 27.19873679217685, + "learning_rate": 5.435845001560775e-06, + "loss": 0.6202, + "step": 12702 + }, + { + "epoch": 1.98, + "grad_norm": 22.438463846506803, + "learning_rate": 5.434344564711244e-06, + "loss": 0.534, + "step": 12703 + }, + { + "epoch": 1.98, + "grad_norm": 34.288939828016595, + "learning_rate": 5.432844257706285e-06, + "loss": 0.5175, + "step": 12704 + }, + { + "epoch": 1.98, + "grad_norm": 22.999118775092427, + "learning_rate": 5.431344080588561e-06, + "loss": 0.479, + "step": 12705 + }, + { + "epoch": 1.98, + "grad_norm": 14.19710137900284, + "learning_rate": 5.4298440334007415e-06, + "loss": 0.4579, + "step": 12706 + }, + { + "epoch": 1.98, + "grad_norm": 13.66095380333795, + "learning_rate": 5.42834411618548e-06, + "loss": 0.5239, + "step": 12707 + }, + { + "epoch": 1.99, + "grad_norm": 19.466056785167183, + "learning_rate": 5.426844328985439e-06, + "loss": 0.4562, + "step": 12708 + }, + { + "epoch": 1.99, + "grad_norm": 14.137920294845857, + "learning_rate": 5.4253446718432724e-06, + "loss": 0.5183, + "step": 12709 + }, + { + "epoch": 1.99, + "grad_norm": 22.31210413416006, + "learning_rate": 5.423845144801627e-06, + "loss": 0.449, + "step": 12710 + }, + { + "epoch": 1.99, + "grad_norm": 18.312527322911524, + "learning_rate": 5.422345747903146e-06, + "loss": 0.581, + "step": 12711 + }, + { + "epoch": 1.99, + "grad_norm": 11.636423452958091, + "learning_rate": 5.420846481190473e-06, + "loss": 0.4649, + "step": 12712 + }, + { + "epoch": 1.99, + "grad_norm": 20.911916545742397, + "learning_rate": 5.41934734470625e-06, + "loss": 0.443, + "step": 12713 + }, + { + "epoch": 1.99, + "grad_norm": 22.308307962485706, + "learning_rate": 5.417848338493114e-06, + "loss": 0.4783, + "step": 12714 + }, + { + "epoch": 1.99, + "grad_norm": 12.178909942491316, + "learning_rate": 5.416349462593684e-06, + "loss": 0.4977, + "step": 12715 + }, + { + "epoch": 1.99, + "grad_norm": 19.61768315827325, + "learning_rate": 5.414850717050593e-06, + "loss": 0.567, + "step": 12716 + }, + { + "epoch": 1.99, + "grad_norm": 31.15318871120512, + "learning_rate": 5.413352101906466e-06, + "loss": 0.5068, + "step": 12717 + }, + { + "epoch": 1.99, + "grad_norm": 15.16018160429881, + "learning_rate": 5.411853617203926e-06, + "loss": 0.5339, + "step": 12718 + }, + { + "epoch": 1.99, + "grad_norm": 19.35363165088891, + "learning_rate": 5.410355262985585e-06, + "loss": 0.6084, + "step": 12719 + }, + { + "epoch": 1.99, + "grad_norm": 24.946300817947172, + "learning_rate": 5.408857039294052e-06, + "loss": 0.606, + "step": 12720 + }, + { + "epoch": 1.99, + "grad_norm": 17.25809714569129, + "learning_rate": 5.407358946171939e-06, + "loss": 0.5121, + "step": 12721 + }, + { + "epoch": 1.99, + "grad_norm": 20.165364431807888, + "learning_rate": 5.405860983661854e-06, + "loss": 0.5182, + "step": 12722 + }, + { + "epoch": 1.99, + "grad_norm": 15.306420280230563, + "learning_rate": 5.404363151806397e-06, + "loss": 0.5631, + "step": 12723 + }, + { + "epoch": 1.99, + "grad_norm": 21.31934463978182, + "learning_rate": 5.402865450648158e-06, + "loss": 0.5226, + "step": 12724 + }, + { + "epoch": 1.99, + "grad_norm": 19.09855279634109, + "learning_rate": 5.401367880229737e-06, + "loss": 0.4902, + "step": 12725 + }, + { + "epoch": 1.99, + "grad_norm": 20.092424936343008, + "learning_rate": 5.399870440593726e-06, + "loss": 0.4783, + "step": 12726 + }, + { + "epoch": 1.99, + "grad_norm": 24.567248699527127, + "learning_rate": 5.3983731317827075e-06, + "loss": 0.4904, + "step": 12727 + }, + { + "epoch": 1.99, + "grad_norm": 24.319415452431414, + "learning_rate": 5.396875953839267e-06, + "loss": 0.5109, + "step": 12728 + }, + { + "epoch": 1.99, + "grad_norm": 22.834170988666447, + "learning_rate": 5.3953789068059785e-06, + "loss": 0.47, + "step": 12729 + }, + { + "epoch": 1.99, + "grad_norm": 15.93428220054176, + "learning_rate": 5.3938819907254204e-06, + "loss": 0.499, + "step": 12730 + }, + { + "epoch": 1.99, + "grad_norm": 15.772634888247694, + "learning_rate": 5.392385205640167e-06, + "loss": 0.4686, + "step": 12731 + }, + { + "epoch": 1.99, + "grad_norm": 28.175442800283278, + "learning_rate": 5.390888551592779e-06, + "loss": 0.5549, + "step": 12732 + }, + { + "epoch": 1.99, + "grad_norm": 22.662389325299316, + "learning_rate": 5.3893920286258285e-06, + "loss": 0.4863, + "step": 12733 + }, + { + "epoch": 1.99, + "grad_norm": 22.161478477555537, + "learning_rate": 5.387895636781868e-06, + "loss": 0.531, + "step": 12734 + }, + { + "epoch": 1.99, + "grad_norm": 15.2288172786045, + "learning_rate": 5.386399376103462e-06, + "loss": 0.5023, + "step": 12735 + }, + { + "epoch": 1.99, + "grad_norm": 29.600610627620956, + "learning_rate": 5.3849032466331555e-06, + "loss": 0.5666, + "step": 12736 + }, + { + "epoch": 1.99, + "grad_norm": 22.337931923220637, + "learning_rate": 5.3834072484134995e-06, + "loss": 0.4963, + "step": 12737 + }, + { + "epoch": 1.99, + "grad_norm": 30.882099074854956, + "learning_rate": 5.381911381487044e-06, + "loss": 0.5255, + "step": 12738 + }, + { + "epoch": 1.99, + "grad_norm": 17.9306973341253, + "learning_rate": 5.380415645896329e-06, + "loss": 0.5578, + "step": 12739 + }, + { + "epoch": 1.99, + "grad_norm": 23.92875505300171, + "learning_rate": 5.378920041683886e-06, + "loss": 0.5569, + "step": 12740 + }, + { + "epoch": 1.99, + "grad_norm": 25.04182570708978, + "learning_rate": 5.377424568892253e-06, + "loss": 0.5425, + "step": 12741 + }, + { + "epoch": 1.99, + "grad_norm": 16.041537775774856, + "learning_rate": 5.375929227563963e-06, + "loss": 0.4817, + "step": 12742 + }, + { + "epoch": 1.99, + "grad_norm": 15.55573722194232, + "learning_rate": 5.374434017741543e-06, + "loss": 0.5313, + "step": 12743 + }, + { + "epoch": 1.99, + "grad_norm": 23.24591995787106, + "learning_rate": 5.372938939467514e-06, + "loss": 0.5362, + "step": 12744 + }, + { + "epoch": 1.99, + "grad_norm": 23.268212947544505, + "learning_rate": 5.371443992784391e-06, + "loss": 0.5324, + "step": 12745 + }, + { + "epoch": 1.99, + "grad_norm": 14.161217592649967, + "learning_rate": 5.3699491777346935e-06, + "loss": 0.4901, + "step": 12746 + }, + { + "epoch": 1.99, + "grad_norm": 22.94016474127129, + "learning_rate": 5.3684544943609375e-06, + "loss": 0.5367, + "step": 12747 + }, + { + "epoch": 1.99, + "grad_norm": 19.399479291889282, + "learning_rate": 5.366959942705625e-06, + "loss": 0.567, + "step": 12748 + }, + { + "epoch": 1.99, + "grad_norm": 15.848442611949254, + "learning_rate": 5.3654655228112574e-06, + "loss": 0.5463, + "step": 12749 + }, + { + "epoch": 1.99, + "grad_norm": 21.540081386106916, + "learning_rate": 5.36397123472034e-06, + "loss": 0.5711, + "step": 12750 + }, + { + "epoch": 1.99, + "grad_norm": 17.756696683615296, + "learning_rate": 5.362477078475372e-06, + "loss": 0.4785, + "step": 12751 + }, + { + "epoch": 1.99, + "grad_norm": 27.4221936579608, + "learning_rate": 5.36098305411884e-06, + "loss": 0.5048, + "step": 12752 + }, + { + "epoch": 1.99, + "grad_norm": 16.355368281571305, + "learning_rate": 5.35948916169324e-06, + "loss": 0.4539, + "step": 12753 + }, + { + "epoch": 1.99, + "grad_norm": 19.192543244568437, + "learning_rate": 5.357995401241049e-06, + "loss": 0.5127, + "step": 12754 + }, + { + "epoch": 1.99, + "grad_norm": 17.156664353024194, + "learning_rate": 5.3565017728047545e-06, + "loss": 0.5508, + "step": 12755 + }, + { + "epoch": 1.99, + "grad_norm": 22.408044845592613, + "learning_rate": 5.355008276426836e-06, + "loss": 0.5727, + "step": 12756 + }, + { + "epoch": 1.99, + "grad_norm": 27.378817416501576, + "learning_rate": 5.353514912149761e-06, + "loss": 0.5143, + "step": 12757 + }, + { + "epoch": 1.99, + "grad_norm": 15.563650196440346, + "learning_rate": 5.3520216800160085e-06, + "loss": 0.489, + "step": 12758 + }, + { + "epoch": 1.99, + "grad_norm": 22.96476733199368, + "learning_rate": 5.350528580068035e-06, + "loss": 0.545, + "step": 12759 + }, + { + "epoch": 1.99, + "grad_norm": 23.688638459101494, + "learning_rate": 5.349035612348314e-06, + "loss": 0.4843, + "step": 12760 + }, + { + "epoch": 1.99, + "grad_norm": 16.728049830840234, + "learning_rate": 5.347542776899295e-06, + "loss": 0.4693, + "step": 12761 + }, + { + "epoch": 1.99, + "grad_norm": 23.359305906672024, + "learning_rate": 5.346050073763438e-06, + "loss": 0.5438, + "step": 12762 + }, + { + "epoch": 1.99, + "grad_norm": 19.39442096690375, + "learning_rate": 5.344557502983198e-06, + "loss": 0.6188, + "step": 12763 + }, + { + "epoch": 1.99, + "grad_norm": 15.823107072761507, + "learning_rate": 5.343065064601021e-06, + "loss": 0.5734, + "step": 12764 + }, + { + "epoch": 1.99, + "grad_norm": 24.274501514472956, + "learning_rate": 5.3415727586593435e-06, + "loss": 0.5177, + "step": 12765 + }, + { + "epoch": 1.99, + "grad_norm": 18.339050194956446, + "learning_rate": 5.340080585200612e-06, + "loss": 0.5186, + "step": 12766 + }, + { + "epoch": 1.99, + "grad_norm": 15.009594334932329, + "learning_rate": 5.338588544267267e-06, + "loss": 0.5078, + "step": 12767 + }, + { + "epoch": 1.99, + "grad_norm": 21.263938690498964, + "learning_rate": 5.3370966359017325e-06, + "loss": 0.5454, + "step": 12768 + }, + { + "epoch": 1.99, + "grad_norm": 16.471205769851714, + "learning_rate": 5.335604860146446e-06, + "loss": 0.4558, + "step": 12769 + }, + { + "epoch": 1.99, + "grad_norm": 12.348412244402393, + "learning_rate": 5.334113217043826e-06, + "loss": 0.4622, + "step": 12770 + }, + { + "epoch": 1.99, + "grad_norm": 28.880391896280504, + "learning_rate": 5.332621706636295e-06, + "loss": 0.5254, + "step": 12771 + }, + { + "epoch": 2.0, + "grad_norm": 18.312226957834174, + "learning_rate": 5.331130328966276e-06, + "loss": 0.5503, + "step": 12772 + }, + { + "epoch": 2.0, + "grad_norm": 19.21481608547601, + "learning_rate": 5.329639084076181e-06, + "loss": 0.5063, + "step": 12773 + }, + { + "epoch": 2.0, + "grad_norm": 23.63318441555478, + "learning_rate": 5.3281479720084125e-06, + "loss": 0.4939, + "step": 12774 + }, + { + "epoch": 2.0, + "grad_norm": 17.814329478358903, + "learning_rate": 5.326656992805384e-06, + "loss": 0.4858, + "step": 12775 + }, + { + "epoch": 2.0, + "grad_norm": 17.21986106136835, + "learning_rate": 5.325166146509497e-06, + "loss": 0.4915, + "step": 12776 + }, + { + "epoch": 2.0, + "grad_norm": 17.45065739390407, + "learning_rate": 5.323675433163158e-06, + "loss": 0.5109, + "step": 12777 + }, + { + "epoch": 2.0, + "grad_norm": 18.690695171323338, + "learning_rate": 5.322184852808745e-06, + "loss": 0.5427, + "step": 12778 + }, + { + "epoch": 2.0, + "grad_norm": 21.935566199145306, + "learning_rate": 5.32069440548866e-06, + "loss": 0.4739, + "step": 12779 + }, + { + "epoch": 2.0, + "grad_norm": 15.900462675804087, + "learning_rate": 5.319204091245287e-06, + "loss": 0.5493, + "step": 12780 + }, + { + "epoch": 2.0, + "grad_norm": 16.957816886321336, + "learning_rate": 5.317713910121016e-06, + "loss": 0.5106, + "step": 12781 + }, + { + "epoch": 2.0, + "grad_norm": 12.700480878612106, + "learning_rate": 5.316223862158223e-06, + "loss": 0.4849, + "step": 12782 + }, + { + "epoch": 2.0, + "grad_norm": 22.165340633469405, + "learning_rate": 5.31473394739928e-06, + "loss": 0.486, + "step": 12783 + }, + { + "epoch": 2.0, + "grad_norm": 19.21583252138848, + "learning_rate": 5.313244165886562e-06, + "loss": 0.5137, + "step": 12784 + }, + { + "epoch": 2.0, + "grad_norm": 15.628529801300667, + "learning_rate": 5.311754517662442e-06, + "loss": 0.5165, + "step": 12785 + }, + { + "epoch": 2.0, + "grad_norm": 28.03665938858337, + "learning_rate": 5.310265002769277e-06, + "loss": 0.5386, + "step": 12786 + }, + { + "epoch": 2.0, + "grad_norm": 26.411248315271912, + "learning_rate": 5.308775621249435e-06, + "loss": 0.5839, + "step": 12787 + }, + { + "epoch": 2.0, + "grad_norm": 19.33712262346072, + "learning_rate": 5.3072863731452674e-06, + "loss": 0.5538, + "step": 12788 + }, + { + "epoch": 2.0, + "grad_norm": 19.014096949390407, + "learning_rate": 5.305797258499134e-06, + "loss": 0.5904, + "step": 12789 + }, + { + "epoch": 2.0, + "grad_norm": 24.258354472127433, + "learning_rate": 5.304308277353376e-06, + "loss": 0.5926, + "step": 12790 + }, + { + "epoch": 2.0, + "grad_norm": 28.179351920976487, + "learning_rate": 5.3028194297503445e-06, + "loss": 0.5284, + "step": 12791 + }, + { + "epoch": 2.0, + "grad_norm": 23.945392149252918, + "learning_rate": 5.301330715732385e-06, + "loss": 0.5319, + "step": 12792 + }, + { + "epoch": 2.0, + "grad_norm": 18.357873436416508, + "learning_rate": 5.299842135341825e-06, + "loss": 0.5489, + "step": 12793 + }, + { + "epoch": 2.0, + "grad_norm": 20.088004487734466, + "learning_rate": 5.29835368862101e-06, + "loss": 0.5624, + "step": 12794 + }, + { + "epoch": 2.0, + "grad_norm": 16.059239912012366, + "learning_rate": 5.296865375612264e-06, + "loss": 0.5117, + "step": 12795 + }, + { + "epoch": 2.0, + "grad_norm": 19.88174735815471, + "learning_rate": 5.295377196357914e-06, + "loss": 0.5244, + "step": 12796 + }, + { + "epoch": 2.0, + "grad_norm": 24.131125571783016, + "learning_rate": 5.293889150900287e-06, + "loss": 0.5465, + "step": 12797 + }, + { + "epoch": 2.0, + "grad_norm": 19.5814515033147, + "learning_rate": 5.2924012392817014e-06, + "loss": 0.5624, + "step": 12798 + }, + { + "epoch": 2.0, + "grad_norm": 25.22358408168549, + "learning_rate": 5.290913461544466e-06, + "loss": 0.5068, + "step": 12799 + }, + { + "epoch": 2.0, + "grad_norm": 17.86146255158927, + "learning_rate": 5.289425817730897e-06, + "loss": 0.4743, + "step": 12800 + }, + { + "epoch": 2.0, + "grad_norm": 22.716995784140252, + "learning_rate": 5.287938307883302e-06, + "loss": 0.5308, + "step": 12801 + }, + { + "epoch": 2.0, + "grad_norm": 20.709844466138364, + "learning_rate": 5.286450932043994e-06, + "loss": 0.4986, + "step": 12802 + }, + { + "epoch": 2.0, + "grad_norm": 15.515052494807907, + "learning_rate": 5.284963690255254e-06, + "loss": 0.4749, + "step": 12803 + }, + { + "epoch": 2.0, + "grad_norm": 14.939860179421112, + "learning_rate": 5.28347658255939e-06, + "loss": 0.5057, + "step": 12804 + }, + { + "epoch": 2.0, + "grad_norm": 17.851529592571353, + "learning_rate": 5.281989608998693e-06, + "loss": 0.5132, + "step": 12805 + }, + { + "epoch": 2.0, + "grad_norm": 14.747981189919482, + "learning_rate": 5.280502769615456e-06, + "loss": 0.4984, + "step": 12806 + }, + { + "epoch": 2.0, + "grad_norm": 20.71458497372247, + "learning_rate": 5.279016064451959e-06, + "loss": 0.5556, + "step": 12807 + }, + { + "epoch": 2.0, + "grad_norm": 15.483231372346973, + "learning_rate": 5.277529493550478e-06, + "loss": 0.5781, + "step": 12808 + }, + { + "epoch": 2.0, + "grad_norm": 14.501442855437578, + "learning_rate": 5.276043056953297e-06, + "loss": 0.4606, + "step": 12809 + }, + { + "epoch": 2.0, + "grad_norm": 34.23938197375652, + "learning_rate": 5.274556754702691e-06, + "loss": 0.521, + "step": 12810 + }, + { + "epoch": 2.0, + "grad_norm": 22.855925610686196, + "learning_rate": 5.273070586840925e-06, + "loss": 0.5214, + "step": 12811 + }, + { + "epoch": 2.0, + "grad_norm": 25.92509189624372, + "learning_rate": 5.271584553410267e-06, + "loss": 0.5487, + "step": 12812 + }, + { + "epoch": 2.0, + "grad_norm": 15.282506018392025, + "learning_rate": 5.270098654452977e-06, + "loss": 0.5312, + "step": 12813 + }, + { + "epoch": 2.0, + "grad_norm": 15.534534498495953, + "learning_rate": 5.268612890011318e-06, + "loss": 0.4754, + "step": 12814 + }, + { + "epoch": 2.0, + "grad_norm": 13.077242536635687, + "learning_rate": 5.267127260127536e-06, + "loss": 0.5234, + "step": 12815 + }, + { + "epoch": 2.0, + "grad_norm": 19.95447469177707, + "learning_rate": 5.26564176484389e-06, + "loss": 0.5423, + "step": 12816 + }, + { + "epoch": 2.0, + "grad_norm": 20.944773742849737, + "learning_rate": 5.264156404202618e-06, + "loss": 0.5707, + "step": 12817 + }, + { + "epoch": 2.0, + "grad_norm": 21.50944322472432, + "learning_rate": 5.262671178245968e-06, + "loss": 0.5324, + "step": 12818 + }, + { + "epoch": 2.0, + "grad_norm": 25.443451093896535, + "learning_rate": 5.261186087016183e-06, + "loss": 0.533, + "step": 12819 + }, + { + "epoch": 2.0, + "grad_norm": 13.053129724739613, + "learning_rate": 5.259701130555489e-06, + "loss": 0.4852, + "step": 12820 + }, + { + "epoch": 2.0, + "grad_norm": 20.67333586642384, + "learning_rate": 5.2582163089061255e-06, + "loss": 0.5785, + "step": 12821 + }, + { + "epoch": 2.0, + "grad_norm": 14.678764346717594, + "learning_rate": 5.256731622110314e-06, + "loss": 0.4252, + "step": 12822 + }, + { + "epoch": 2.0, + "grad_norm": 20.637510476205016, + "learning_rate": 5.2552470702102835e-06, + "loss": 0.5379, + "step": 12823 + }, + { + "epoch": 2.0, + "grad_norm": 19.331672049249445, + "learning_rate": 5.253762653248245e-06, + "loss": 0.526, + "step": 12824 + }, + { + "epoch": 2.0, + "grad_norm": 16.219938923691952, + "learning_rate": 5.252278371266422e-06, + "loss": 0.449, + "step": 12825 + }, + { + "epoch": 2.0, + "grad_norm": 16.178244468847648, + "learning_rate": 5.250794224307028e-06, + "loss": 0.4775, + "step": 12826 + }, + { + "epoch": 2.0, + "grad_norm": 15.999490883948951, + "learning_rate": 5.249310212412269e-06, + "loss": 0.503, + "step": 12827 + }, + { + "epoch": 2.0, + "grad_norm": 20.402767786844866, + "learning_rate": 5.247826335624344e-06, + "loss": 0.5055, + "step": 12828 + }, + { + "epoch": 2.0, + "grad_norm": 19.37863443841836, + "learning_rate": 5.246342593985458e-06, + "loss": 0.5109, + "step": 12829 + }, + { + "epoch": 2.0, + "grad_norm": 15.144753117740438, + "learning_rate": 5.244858987537808e-06, + "loss": 0.482, + "step": 12830 + }, + { + "epoch": 2.0, + "grad_norm": 17.419721878328797, + "learning_rate": 5.243375516323591e-06, + "loss": 0.5129, + "step": 12831 + }, + { + "epoch": 2.0, + "grad_norm": 18.602522359449324, + "learning_rate": 5.241892180384991e-06, + "loss": 0.511, + "step": 12832 + }, + { + "epoch": 2.0, + "grad_norm": 22.599632476802746, + "learning_rate": 5.240408979764191e-06, + "loss": 0.4747, + "step": 12833 + }, + { + "epoch": 2.0, + "grad_norm": 17.39301927661398, + "learning_rate": 5.2389259145033744e-06, + "loss": 0.5152, + "step": 12834 + }, + { + "epoch": 2.0, + "grad_norm": 21.495350274620353, + "learning_rate": 5.237442984644724e-06, + "loss": 0.4846, + "step": 12835 + }, + { + "epoch": 2.0, + "grad_norm": 15.860988160947693, + "learning_rate": 5.235960190230409e-06, + "loss": 0.4342, + "step": 12836 + }, + { + "epoch": 2.01, + "grad_norm": 19.270513452675903, + "learning_rate": 5.234477531302595e-06, + "loss": 0.5372, + "step": 12837 + }, + { + "epoch": 2.01, + "grad_norm": 16.419396131694374, + "learning_rate": 5.232995007903453e-06, + "loss": 0.534, + "step": 12838 + }, + { + "epoch": 2.01, + "grad_norm": 33.4992244745453, + "learning_rate": 5.231512620075143e-06, + "loss": 0.566, + "step": 12839 + }, + { + "epoch": 2.01, + "grad_norm": 14.161195104967208, + "learning_rate": 5.23003036785983e-06, + "loss": 0.4307, + "step": 12840 + }, + { + "epoch": 2.01, + "grad_norm": 19.680810002096436, + "learning_rate": 5.228548251299663e-06, + "loss": 0.429, + "step": 12841 + }, + { + "epoch": 2.01, + "grad_norm": 22.076746387999645, + "learning_rate": 5.227066270436788e-06, + "loss": 0.5123, + "step": 12842 + }, + { + "epoch": 2.01, + "grad_norm": 24.717862502103365, + "learning_rate": 5.225584425313357e-06, + "loss": 0.5131, + "step": 12843 + }, + { + "epoch": 2.01, + "grad_norm": 18.42245341178339, + "learning_rate": 5.224102715971515e-06, + "loss": 0.4998, + "step": 12844 + }, + { + "epoch": 2.01, + "grad_norm": 11.873891543429119, + "learning_rate": 5.222621142453394e-06, + "loss": 0.506, + "step": 12845 + }, + { + "epoch": 2.01, + "grad_norm": 24.739271958784194, + "learning_rate": 5.221139704801137e-06, + "loss": 0.4974, + "step": 12846 + }, + { + "epoch": 2.01, + "grad_norm": 18.322773141463294, + "learning_rate": 5.219658403056867e-06, + "loss": 0.4874, + "step": 12847 + }, + { + "epoch": 2.01, + "grad_norm": 18.303040486121585, + "learning_rate": 5.218177237262721e-06, + "loss": 0.5105, + "step": 12848 + }, + { + "epoch": 2.01, + "grad_norm": 18.723988803755613, + "learning_rate": 5.216696207460812e-06, + "loss": 0.5677, + "step": 12849 + }, + { + "epoch": 2.01, + "grad_norm": 14.079114685983859, + "learning_rate": 5.215215313693265e-06, + "loss": 0.4846, + "step": 12850 + }, + { + "epoch": 2.01, + "grad_norm": 19.86693381951216, + "learning_rate": 5.213734556002199e-06, + "loss": 0.5356, + "step": 12851 + }, + { + "epoch": 2.01, + "grad_norm": 23.769928964439313, + "learning_rate": 5.212253934429723e-06, + "loss": 0.5335, + "step": 12852 + }, + { + "epoch": 2.01, + "grad_norm": 24.150151082419267, + "learning_rate": 5.210773449017939e-06, + "loss": 0.5059, + "step": 12853 + }, + { + "epoch": 2.01, + "grad_norm": 19.212346614400335, + "learning_rate": 5.209293099808959e-06, + "loss": 0.4648, + "step": 12854 + }, + { + "epoch": 2.01, + "grad_norm": 24.03153420504713, + "learning_rate": 5.207812886844879e-06, + "loss": 0.5475, + "step": 12855 + }, + { + "epoch": 2.01, + "grad_norm": 23.486372312635062, + "learning_rate": 5.2063328101678004e-06, + "loss": 0.5562, + "step": 12856 + }, + { + "epoch": 2.01, + "grad_norm": 15.595165511905176, + "learning_rate": 5.204852869819814e-06, + "loss": 0.4704, + "step": 12857 + }, + { + "epoch": 2.01, + "grad_norm": 20.74521933199456, + "learning_rate": 5.203373065843003e-06, + "loss": 0.5267, + "step": 12858 + }, + { + "epoch": 2.01, + "grad_norm": 22.16987454864099, + "learning_rate": 5.201893398279454e-06, + "loss": 0.5634, + "step": 12859 + }, + { + "epoch": 2.01, + "grad_norm": 55.38955412584117, + "learning_rate": 5.2004138671712555e-06, + "loss": 0.5711, + "step": 12860 + }, + { + "epoch": 2.01, + "grad_norm": 20.553992401398364, + "learning_rate": 5.198934472560479e-06, + "loss": 0.5495, + "step": 12861 + }, + { + "epoch": 2.01, + "grad_norm": 18.043493991234104, + "learning_rate": 5.197455214489193e-06, + "loss": 0.4802, + "step": 12862 + }, + { + "epoch": 2.01, + "grad_norm": 20.467267454489008, + "learning_rate": 5.195976092999472e-06, + "loss": 0.5754, + "step": 12863 + }, + { + "epoch": 2.01, + "grad_norm": 15.545558530574674, + "learning_rate": 5.194497108133381e-06, + "loss": 0.5344, + "step": 12864 + }, + { + "epoch": 2.01, + "grad_norm": 12.526643115518555, + "learning_rate": 5.193018259932987e-06, + "loss": 0.4985, + "step": 12865 + }, + { + "epoch": 2.01, + "grad_norm": 20.22003788214398, + "learning_rate": 5.191539548440336e-06, + "loss": 0.5001, + "step": 12866 + }, + { + "epoch": 2.01, + "grad_norm": 22.295615203409778, + "learning_rate": 5.190060973697486e-06, + "loss": 0.5713, + "step": 12867 + }, + { + "epoch": 2.01, + "grad_norm": 21.880064723342546, + "learning_rate": 5.188582535746489e-06, + "loss": 0.4764, + "step": 12868 + }, + { + "epoch": 2.01, + "grad_norm": 24.816532976525576, + "learning_rate": 5.187104234629394e-06, + "loss": 0.4935, + "step": 12869 + }, + { + "epoch": 2.01, + "grad_norm": 14.477431978855728, + "learning_rate": 5.185626070388239e-06, + "loss": 0.4337, + "step": 12870 + }, + { + "epoch": 2.01, + "grad_norm": 21.118029455207974, + "learning_rate": 5.184148043065058e-06, + "loss": 0.5271, + "step": 12871 + }, + { + "epoch": 2.01, + "grad_norm": 17.57273564775897, + "learning_rate": 5.182670152701889e-06, + "loss": 0.496, + "step": 12872 + }, + { + "epoch": 2.01, + "grad_norm": 21.403528377403912, + "learning_rate": 5.181192399340768e-06, + "loss": 0.561, + "step": 12873 + }, + { + "epoch": 2.01, + "grad_norm": 23.00317202921992, + "learning_rate": 5.179714783023711e-06, + "loss": 0.5042, + "step": 12874 + }, + { + "epoch": 2.01, + "grad_norm": 18.11761677830964, + "learning_rate": 5.17823730379275e-06, + "loss": 0.4443, + "step": 12875 + }, + { + "epoch": 2.01, + "grad_norm": 21.650502661525685, + "learning_rate": 5.1767599616898965e-06, + "loss": 0.5648, + "step": 12876 + }, + { + "epoch": 2.01, + "grad_norm": 15.093180487730304, + "learning_rate": 5.175282756757172e-06, + "loss": 0.5177, + "step": 12877 + }, + { + "epoch": 2.01, + "grad_norm": 21.59031068543, + "learning_rate": 5.1738056890365775e-06, + "loss": 0.4623, + "step": 12878 + }, + { + "epoch": 2.01, + "grad_norm": 30.362432434131314, + "learning_rate": 5.1723287585701285e-06, + "loss": 0.5351, + "step": 12879 + }, + { + "epoch": 2.01, + "grad_norm": 16.879641186058198, + "learning_rate": 5.1708519653998275e-06, + "loss": 0.5023, + "step": 12880 + }, + { + "epoch": 2.01, + "grad_norm": 16.007617434015483, + "learning_rate": 5.169375309567669e-06, + "loss": 0.5119, + "step": 12881 + }, + { + "epoch": 2.01, + "grad_norm": 31.5426782331286, + "learning_rate": 5.167898791115654e-06, + "loss": 0.5462, + "step": 12882 + }, + { + "epoch": 2.01, + "grad_norm": 16.6085095704948, + "learning_rate": 5.166422410085767e-06, + "loss": 0.498, + "step": 12883 + }, + { + "epoch": 2.01, + "grad_norm": 21.62602963228964, + "learning_rate": 5.16494616652e-06, + "loss": 0.5543, + "step": 12884 + }, + { + "epoch": 2.01, + "grad_norm": 27.532708059870203, + "learning_rate": 5.163470060460338e-06, + "loss": 0.5182, + "step": 12885 + }, + { + "epoch": 2.01, + "grad_norm": 13.561550753199148, + "learning_rate": 5.161994091948759e-06, + "loss": 0.5083, + "step": 12886 + }, + { + "epoch": 2.01, + "grad_norm": 21.228462194603114, + "learning_rate": 5.160518261027234e-06, + "loss": 0.5143, + "step": 12887 + }, + { + "epoch": 2.01, + "grad_norm": 15.861113874822482, + "learning_rate": 5.159042567737739e-06, + "loss": 0.5461, + "step": 12888 + }, + { + "epoch": 2.01, + "grad_norm": 43.83926517526602, + "learning_rate": 5.157567012122241e-06, + "loss": 0.5495, + "step": 12889 + }, + { + "epoch": 2.01, + "grad_norm": 14.808685678283762, + "learning_rate": 5.1560915942227115e-06, + "loss": 0.4999, + "step": 12890 + }, + { + "epoch": 2.01, + "grad_norm": 27.285094730905854, + "learning_rate": 5.154616314081098e-06, + "loss": 0.4821, + "step": 12891 + }, + { + "epoch": 2.01, + "grad_norm": 20.975509671121834, + "learning_rate": 5.15314117173936e-06, + "loss": 0.4987, + "step": 12892 + }, + { + "epoch": 2.01, + "grad_norm": 21.795217110536342, + "learning_rate": 5.151666167239452e-06, + "loss": 0.576, + "step": 12893 + }, + { + "epoch": 2.01, + "grad_norm": 24.030537334907187, + "learning_rate": 5.150191300623326e-06, + "loss": 0.5055, + "step": 12894 + }, + { + "epoch": 2.01, + "grad_norm": 18.954326292021857, + "learning_rate": 5.1487165719329225e-06, + "loss": 0.4767, + "step": 12895 + }, + { + "epoch": 2.01, + "grad_norm": 14.781848346946983, + "learning_rate": 5.147241981210178e-06, + "loss": 0.4882, + "step": 12896 + }, + { + "epoch": 2.01, + "grad_norm": 26.721712180230348, + "learning_rate": 5.145767528497032e-06, + "loss": 0.495, + "step": 12897 + }, + { + "epoch": 2.01, + "grad_norm": 16.71099786092121, + "learning_rate": 5.1442932138354225e-06, + "loss": 0.5591, + "step": 12898 + }, + { + "epoch": 2.01, + "grad_norm": 16.50726376843265, + "learning_rate": 5.142819037267271e-06, + "loss": 0.4738, + "step": 12899 + }, + { + "epoch": 2.01, + "grad_norm": 13.43351219853536, + "learning_rate": 5.1413449988345055e-06, + "loss": 0.446, + "step": 12900 + }, + { + "epoch": 2.02, + "grad_norm": 14.86939762003122, + "learning_rate": 5.139871098579045e-06, + "loss": 0.4269, + "step": 12901 + }, + { + "epoch": 2.02, + "grad_norm": 12.580438248351527, + "learning_rate": 5.138397336542809e-06, + "loss": 0.4845, + "step": 12902 + }, + { + "epoch": 2.02, + "grad_norm": 30.333315181639414, + "learning_rate": 5.136923712767706e-06, + "loss": 0.4742, + "step": 12903 + }, + { + "epoch": 2.02, + "grad_norm": 17.771202178233892, + "learning_rate": 5.1354502272956486e-06, + "loss": 0.5283, + "step": 12904 + }, + { + "epoch": 2.02, + "grad_norm": 26.203682428366672, + "learning_rate": 5.133976880168543e-06, + "loss": 0.5423, + "step": 12905 + }, + { + "epoch": 2.02, + "grad_norm": 17.895838072514895, + "learning_rate": 5.132503671428286e-06, + "loss": 0.4642, + "step": 12906 + }, + { + "epoch": 2.02, + "grad_norm": 17.36616706681718, + "learning_rate": 5.13103060111678e-06, + "loss": 0.4566, + "step": 12907 + }, + { + "epoch": 2.02, + "grad_norm": 15.740179368285217, + "learning_rate": 5.129557669275913e-06, + "loss": 0.5351, + "step": 12908 + }, + { + "epoch": 2.02, + "grad_norm": 20.187535147887303, + "learning_rate": 5.128084875947579e-06, + "loss": 0.4946, + "step": 12909 + }, + { + "epoch": 2.02, + "grad_norm": 19.08145395379845, + "learning_rate": 5.126612221173659e-06, + "loss": 0.4965, + "step": 12910 + }, + { + "epoch": 2.02, + "grad_norm": 20.949772486213806, + "learning_rate": 5.125139704996038e-06, + "loss": 0.5379, + "step": 12911 + }, + { + "epoch": 2.02, + "grad_norm": 18.47096047268034, + "learning_rate": 5.123667327456591e-06, + "loss": 0.4741, + "step": 12912 + }, + { + "epoch": 2.02, + "grad_norm": 20.483268776302502, + "learning_rate": 5.12219508859719e-06, + "loss": 0.5142, + "step": 12913 + }, + { + "epoch": 2.02, + "grad_norm": 18.783053883549393, + "learning_rate": 5.1207229884597135e-06, + "loss": 0.5842, + "step": 12914 + }, + { + "epoch": 2.02, + "grad_norm": 16.618585379566344, + "learning_rate": 5.119251027086019e-06, + "loss": 0.4189, + "step": 12915 + }, + { + "epoch": 2.02, + "grad_norm": 21.84419775846366, + "learning_rate": 5.117779204517967e-06, + "loss": 0.4689, + "step": 12916 + }, + { + "epoch": 2.02, + "grad_norm": 21.524304974100705, + "learning_rate": 5.116307520797419e-06, + "loss": 0.5453, + "step": 12917 + }, + { + "epoch": 2.02, + "grad_norm": 14.32835194981006, + "learning_rate": 5.114835975966228e-06, + "loss": 0.4066, + "step": 12918 + }, + { + "epoch": 2.02, + "grad_norm": 24.56690370841834, + "learning_rate": 5.113364570066248e-06, + "loss": 0.5124, + "step": 12919 + }, + { + "epoch": 2.02, + "grad_norm": 20.504089780884723, + "learning_rate": 5.11189330313932e-06, + "loss": 0.4682, + "step": 12920 + }, + { + "epoch": 2.02, + "grad_norm": 21.077138459862923, + "learning_rate": 5.110422175227284e-06, + "loss": 0.5939, + "step": 12921 + }, + { + "epoch": 2.02, + "grad_norm": 16.649965935713773, + "learning_rate": 5.1089511863719824e-06, + "loss": 0.4629, + "step": 12922 + }, + { + "epoch": 2.02, + "grad_norm": 32.15808636562768, + "learning_rate": 5.10748033661525e-06, + "loss": 0.5891, + "step": 12923 + }, + { + "epoch": 2.02, + "grad_norm": 19.038751400052202, + "learning_rate": 5.106009625998913e-06, + "loss": 0.4786, + "step": 12924 + }, + { + "epoch": 2.02, + "grad_norm": 28.30107336689926, + "learning_rate": 5.104539054564799e-06, + "loss": 0.4997, + "step": 12925 + }, + { + "epoch": 2.02, + "grad_norm": 15.10869506323825, + "learning_rate": 5.103068622354729e-06, + "loss": 0.5007, + "step": 12926 + }, + { + "epoch": 2.02, + "grad_norm": 28.111912110840457, + "learning_rate": 5.101598329410522e-06, + "loss": 0.4682, + "step": 12927 + }, + { + "epoch": 2.02, + "grad_norm": 20.111038363739276, + "learning_rate": 5.100128175773998e-06, + "loss": 0.4905, + "step": 12928 + }, + { + "epoch": 2.02, + "grad_norm": 20.552109248155773, + "learning_rate": 5.098658161486962e-06, + "loss": 0.5043, + "step": 12929 + }, + { + "epoch": 2.02, + "grad_norm": 21.00488729032167, + "learning_rate": 5.097188286591217e-06, + "loss": 0.4849, + "step": 12930 + }, + { + "epoch": 2.02, + "grad_norm": 22.301912926835485, + "learning_rate": 5.095718551128569e-06, + "loss": 0.579, + "step": 12931 + }, + { + "epoch": 2.02, + "grad_norm": 17.263912913502743, + "learning_rate": 5.09424895514082e-06, + "loss": 0.5646, + "step": 12932 + }, + { + "epoch": 2.02, + "grad_norm": 26.040774268245876, + "learning_rate": 5.092779498669758e-06, + "loss": 0.4881, + "step": 12933 + }, + { + "epoch": 2.02, + "grad_norm": 25.22300925885845, + "learning_rate": 5.091310181757178e-06, + "loss": 0.4967, + "step": 12934 + }, + { + "epoch": 2.02, + "grad_norm": 15.216214411333613, + "learning_rate": 5.089841004444864e-06, + "loss": 0.5145, + "step": 12935 + }, + { + "epoch": 2.02, + "grad_norm": 25.828643816694733, + "learning_rate": 5.088371966774601e-06, + "loss": 0.5384, + "step": 12936 + }, + { + "epoch": 2.02, + "grad_norm": 16.97114113562809, + "learning_rate": 5.086903068788163e-06, + "loss": 0.5048, + "step": 12937 + }, + { + "epoch": 2.02, + "grad_norm": 12.293208981455534, + "learning_rate": 5.085434310527329e-06, + "loss": 0.4853, + "step": 12938 + }, + { + "epoch": 2.02, + "grad_norm": 14.845856726872723, + "learning_rate": 5.083965692033871e-06, + "loss": 0.4793, + "step": 12939 + }, + { + "epoch": 2.02, + "grad_norm": 15.982750894963123, + "learning_rate": 5.0824972133495535e-06, + "loss": 0.5018, + "step": 12940 + }, + { + "epoch": 2.02, + "grad_norm": 15.962981736401444, + "learning_rate": 5.081028874516134e-06, + "loss": 0.4511, + "step": 12941 + }, + { + "epoch": 2.02, + "grad_norm": 25.991586146780836, + "learning_rate": 5.0795606755753745e-06, + "loss": 0.4761, + "step": 12942 + }, + { + "epoch": 2.02, + "grad_norm": 18.59586374946476, + "learning_rate": 5.0780926165690325e-06, + "loss": 0.4913, + "step": 12943 + }, + { + "epoch": 2.02, + "grad_norm": 26.261921909221112, + "learning_rate": 5.076624697538861e-06, + "loss": 0.5173, + "step": 12944 + }, + { + "epoch": 2.02, + "grad_norm": 23.850943563273177, + "learning_rate": 5.075156918526604e-06, + "loss": 0.5424, + "step": 12945 + }, + { + "epoch": 2.02, + "grad_norm": 16.629691340431602, + "learning_rate": 5.073689279573999e-06, + "loss": 0.4819, + "step": 12946 + }, + { + "epoch": 2.02, + "grad_norm": 20.535175868433708, + "learning_rate": 5.072221780722788e-06, + "loss": 0.4638, + "step": 12947 + }, + { + "epoch": 2.02, + "grad_norm": 14.420423019238626, + "learning_rate": 5.070754422014711e-06, + "loss": 0.4447, + "step": 12948 + }, + { + "epoch": 2.02, + "grad_norm": 24.360977288989826, + "learning_rate": 5.069287203491495e-06, + "loss": 0.5058, + "step": 12949 + }, + { + "epoch": 2.02, + "grad_norm": 16.359228791145526, + "learning_rate": 5.0678201251948625e-06, + "loss": 0.5314, + "step": 12950 + }, + { + "epoch": 2.02, + "grad_norm": 18.21461601000422, + "learning_rate": 5.066353187166542e-06, + "loss": 0.5161, + "step": 12951 + }, + { + "epoch": 2.02, + "grad_norm": 22.560840450553414, + "learning_rate": 5.064886389448249e-06, + "loss": 0.4922, + "step": 12952 + }, + { + "epoch": 2.02, + "grad_norm": 20.521824713462816, + "learning_rate": 5.063419732081704e-06, + "loss": 0.5454, + "step": 12953 + }, + { + "epoch": 2.02, + "grad_norm": 23.041056475886474, + "learning_rate": 5.061953215108614e-06, + "loss": 0.511, + "step": 12954 + }, + { + "epoch": 2.02, + "grad_norm": 12.945096319738319, + "learning_rate": 5.060486838570682e-06, + "loss": 0.44, + "step": 12955 + }, + { + "epoch": 2.02, + "grad_norm": 18.523696602369135, + "learning_rate": 5.059020602509616e-06, + "loss": 0.5487, + "step": 12956 + }, + { + "epoch": 2.02, + "grad_norm": 26.189207472023572, + "learning_rate": 5.057554506967116e-06, + "loss": 0.5123, + "step": 12957 + }, + { + "epoch": 2.02, + "grad_norm": 17.71969545792104, + "learning_rate": 5.056088551984876e-06, + "loss": 0.5108, + "step": 12958 + }, + { + "epoch": 2.02, + "grad_norm": 21.007571547783737, + "learning_rate": 5.0546227376045796e-06, + "loss": 0.4888, + "step": 12959 + }, + { + "epoch": 2.02, + "grad_norm": 18.61477990856951, + "learning_rate": 5.053157063867922e-06, + "loss": 0.5057, + "step": 12960 + }, + { + "epoch": 2.02, + "grad_norm": 20.44929291894792, + "learning_rate": 5.051691530816586e-06, + "loss": 0.5329, + "step": 12961 + }, + { + "epoch": 2.02, + "grad_norm": 16.669504545917782, + "learning_rate": 5.050226138492245e-06, + "loss": 0.4971, + "step": 12962 + }, + { + "epoch": 2.02, + "grad_norm": 20.916932108291594, + "learning_rate": 5.048760886936581e-06, + "loss": 0.6495, + "step": 12963 + }, + { + "epoch": 2.02, + "grad_norm": 18.93328422811248, + "learning_rate": 5.0472957761912565e-06, + "loss": 0.4565, + "step": 12964 + }, + { + "epoch": 2.03, + "grad_norm": 16.554134242636305, + "learning_rate": 5.0458308062979466e-06, + "loss": 0.5064, + "step": 12965 + }, + { + "epoch": 2.03, + "grad_norm": 31.569692142568133, + "learning_rate": 5.0443659772983064e-06, + "loss": 0.4578, + "step": 12966 + }, + { + "epoch": 2.03, + "grad_norm": 16.86317067754007, + "learning_rate": 5.042901289234001e-06, + "loss": 0.4656, + "step": 12967 + }, + { + "epoch": 2.03, + "grad_norm": 13.796203357704613, + "learning_rate": 5.041436742146685e-06, + "loss": 0.4735, + "step": 12968 + }, + { + "epoch": 2.03, + "grad_norm": 23.234503629593902, + "learning_rate": 5.039972336078004e-06, + "loss": 0.5157, + "step": 12969 + }, + { + "epoch": 2.03, + "grad_norm": 17.97618756199585, + "learning_rate": 5.038508071069612e-06, + "loss": 0.4808, + "step": 12970 + }, + { + "epoch": 2.03, + "grad_norm": 23.701606671683642, + "learning_rate": 5.037043947163145e-06, + "loss": 0.5264, + "step": 12971 + }, + { + "epoch": 2.03, + "grad_norm": 18.91798585272234, + "learning_rate": 5.035579964400244e-06, + "loss": 0.4856, + "step": 12972 + }, + { + "epoch": 2.03, + "grad_norm": 19.52615643650113, + "learning_rate": 5.034116122822547e-06, + "loss": 0.4913, + "step": 12973 + }, + { + "epoch": 2.03, + "grad_norm": 23.718765009371467, + "learning_rate": 5.032652422471685e-06, + "loss": 0.5045, + "step": 12974 + }, + { + "epoch": 2.03, + "grad_norm": 24.632496651204306, + "learning_rate": 5.031188863389278e-06, + "loss": 0.5444, + "step": 12975 + }, + { + "epoch": 2.03, + "grad_norm": 18.311159396900184, + "learning_rate": 5.029725445616953e-06, + "loss": 0.4897, + "step": 12976 + }, + { + "epoch": 2.03, + "grad_norm": 13.556354788440725, + "learning_rate": 5.028262169196329e-06, + "loss": 0.4971, + "step": 12977 + }, + { + "epoch": 2.03, + "grad_norm": 19.03218709035426, + "learning_rate": 5.0267990341690275e-06, + "loss": 0.5537, + "step": 12978 + }, + { + "epoch": 2.03, + "grad_norm": 15.586155611199526, + "learning_rate": 5.025336040576645e-06, + "loss": 0.475, + "step": 12979 + }, + { + "epoch": 2.03, + "grad_norm": 23.218267322505994, + "learning_rate": 5.023873188460795e-06, + "loss": 0.5585, + "step": 12980 + }, + { + "epoch": 2.03, + "grad_norm": 18.621293800068443, + "learning_rate": 5.02241047786308e-06, + "loss": 0.5174, + "step": 12981 + }, + { + "epoch": 2.03, + "grad_norm": 14.338429233924517, + "learning_rate": 5.020947908825104e-06, + "loss": 0.463, + "step": 12982 + }, + { + "epoch": 2.03, + "grad_norm": 17.489757113012747, + "learning_rate": 5.019485481388457e-06, + "loss": 0.4849, + "step": 12983 + }, + { + "epoch": 2.03, + "grad_norm": 20.381331134448654, + "learning_rate": 5.018023195594726e-06, + "loss": 0.4659, + "step": 12984 + }, + { + "epoch": 2.03, + "grad_norm": 16.696198371658305, + "learning_rate": 5.0165610514855005e-06, + "loss": 0.545, + "step": 12985 + }, + { + "epoch": 2.03, + "grad_norm": 27.280164352917183, + "learning_rate": 5.0150990491023655e-06, + "loss": 0.5153, + "step": 12986 + }, + { + "epoch": 2.03, + "grad_norm": 15.186738514157582, + "learning_rate": 5.013637188486895e-06, + "loss": 0.4551, + "step": 12987 + }, + { + "epoch": 2.03, + "grad_norm": 23.209177101321878, + "learning_rate": 5.0121754696806705e-06, + "loss": 0.5723, + "step": 12988 + }, + { + "epoch": 2.03, + "grad_norm": 20.233382817101287, + "learning_rate": 5.010713892725253e-06, + "loss": 0.5307, + "step": 12989 + }, + { + "epoch": 2.03, + "grad_norm": 24.51935531142266, + "learning_rate": 5.009252457662215e-06, + "loss": 0.528, + "step": 12990 + }, + { + "epoch": 2.03, + "grad_norm": 24.229875616045803, + "learning_rate": 5.00779116453312e-06, + "loss": 0.5104, + "step": 12991 + }, + { + "epoch": 2.03, + "grad_norm": 21.693085909591737, + "learning_rate": 5.00633001337952e-06, + "loss": 0.517, + "step": 12992 + }, + { + "epoch": 2.03, + "grad_norm": 25.112386556125134, + "learning_rate": 5.004869004242978e-06, + "loss": 0.5073, + "step": 12993 + }, + { + "epoch": 2.03, + "grad_norm": 19.9949028866268, + "learning_rate": 5.0034081371650355e-06, + "loss": 0.4546, + "step": 12994 + }, + { + "epoch": 2.03, + "grad_norm": 20.344539489310822, + "learning_rate": 5.001947412187246e-06, + "loss": 0.5482, + "step": 12995 + }, + { + "epoch": 2.03, + "grad_norm": 35.425448719903216, + "learning_rate": 5.000486829351145e-06, + "loss": 0.4497, + "step": 12996 + }, + { + "epoch": 2.03, + "grad_norm": 23.94215794828907, + "learning_rate": 4.999026388698274e-06, + "loss": 0.4724, + "step": 12997 + }, + { + "epoch": 2.03, + "grad_norm": 18.277678036389432, + "learning_rate": 4.9975660902701695e-06, + "loss": 0.5749, + "step": 12998 + }, + { + "epoch": 2.03, + "grad_norm": 24.646998735564413, + "learning_rate": 4.996105934108361e-06, + "loss": 0.5397, + "step": 12999 + }, + { + "epoch": 2.03, + "grad_norm": 18.888224960434503, + "learning_rate": 4.9946459202543675e-06, + "loss": 0.4837, + "step": 13000 + }, + { + "epoch": 2.03, + "grad_norm": 18.404559647187043, + "learning_rate": 4.993186048749717e-06, + "loss": 0.4851, + "step": 13001 + }, + { + "epoch": 2.03, + "grad_norm": 18.407126168639, + "learning_rate": 4.991726319635925e-06, + "loss": 0.5178, + "step": 13002 + }, + { + "epoch": 2.03, + "grad_norm": 39.285891033021166, + "learning_rate": 4.990266732954515e-06, + "loss": 0.5457, + "step": 13003 + }, + { + "epoch": 2.03, + "grad_norm": 19.235065248718087, + "learning_rate": 4.98880728874698e-06, + "loss": 0.5051, + "step": 13004 + }, + { + "epoch": 2.03, + "grad_norm": 17.03958188909016, + "learning_rate": 4.987347987054835e-06, + "loss": 0.4714, + "step": 13005 + }, + { + "epoch": 2.03, + "grad_norm": 18.90473123471736, + "learning_rate": 4.9858888279195815e-06, + "loss": 0.4831, + "step": 13006 + }, + { + "epoch": 2.03, + "grad_norm": 19.53985441781149, + "learning_rate": 4.984429811382721e-06, + "loss": 0.468, + "step": 13007 + }, + { + "epoch": 2.03, + "grad_norm": 18.609928158114585, + "learning_rate": 4.982970937485741e-06, + "loss": 0.5079, + "step": 13008 + }, + { + "epoch": 2.03, + "grad_norm": 17.073784736051554, + "learning_rate": 4.981512206270129e-06, + "loss": 0.4932, + "step": 13009 + }, + { + "epoch": 2.03, + "grad_norm": 22.075368560333942, + "learning_rate": 4.980053617777375e-06, + "loss": 0.4835, + "step": 13010 + }, + { + "epoch": 2.03, + "grad_norm": 24.64095544623726, + "learning_rate": 4.978595172048963e-06, + "loss": 0.5551, + "step": 13011 + }, + { + "epoch": 2.03, + "grad_norm": 17.01881777896616, + "learning_rate": 4.977136869126366e-06, + "loss": 0.4885, + "step": 13012 + }, + { + "epoch": 2.03, + "grad_norm": 15.801097233917018, + "learning_rate": 4.975678709051056e-06, + "loss": 0.4988, + "step": 13013 + }, + { + "epoch": 2.03, + "grad_norm": 22.741799172034856, + "learning_rate": 4.974220691864503e-06, + "loss": 0.4692, + "step": 13014 + }, + { + "epoch": 2.03, + "grad_norm": 12.919911083698452, + "learning_rate": 4.972762817608174e-06, + "loss": 0.4664, + "step": 13015 + }, + { + "epoch": 2.03, + "grad_norm": 29.579764216190913, + "learning_rate": 4.971305086323534e-06, + "loss": 0.5139, + "step": 13016 + }, + { + "epoch": 2.03, + "grad_norm": 20.2319391323564, + "learning_rate": 4.969847498052035e-06, + "loss": 0.474, + "step": 13017 + }, + { + "epoch": 2.03, + "grad_norm": 18.199246220057237, + "learning_rate": 4.968390052835127e-06, + "loss": 0.5309, + "step": 13018 + }, + { + "epoch": 2.03, + "grad_norm": 15.906873815366412, + "learning_rate": 4.966932750714262e-06, + "loss": 0.4528, + "step": 13019 + }, + { + "epoch": 2.03, + "grad_norm": 20.783104280692573, + "learning_rate": 4.96547559173089e-06, + "loss": 0.5059, + "step": 13020 + }, + { + "epoch": 2.03, + "grad_norm": 19.0585573699973, + "learning_rate": 4.964018575926442e-06, + "loss": 0.4826, + "step": 13021 + }, + { + "epoch": 2.03, + "grad_norm": 18.384747476709627, + "learning_rate": 4.962561703342364e-06, + "loss": 0.4838, + "step": 13022 + }, + { + "epoch": 2.03, + "grad_norm": 23.105715717920873, + "learning_rate": 4.961104974020081e-06, + "loss": 0.5308, + "step": 13023 + }, + { + "epoch": 2.03, + "grad_norm": 12.207606773542302, + "learning_rate": 4.959648388001028e-06, + "loss": 0.4892, + "step": 13024 + }, + { + "epoch": 2.03, + "grad_norm": 19.095427183483118, + "learning_rate": 4.958191945326624e-06, + "loss": 0.4947, + "step": 13025 + }, + { + "epoch": 2.03, + "grad_norm": 25.755325375488038, + "learning_rate": 4.9567356460382895e-06, + "loss": 0.4677, + "step": 13026 + }, + { + "epoch": 2.03, + "grad_norm": 15.235071031858206, + "learning_rate": 4.955279490177448e-06, + "loss": 0.515, + "step": 13027 + }, + { + "epoch": 2.03, + "grad_norm": 16.819117463320943, + "learning_rate": 4.953823477785507e-06, + "loss": 0.4928, + "step": 13028 + }, + { + "epoch": 2.04, + "grad_norm": 22.049638209353752, + "learning_rate": 4.952367608903871e-06, + "loss": 0.4752, + "step": 13029 + }, + { + "epoch": 2.04, + "grad_norm": 20.620339938601706, + "learning_rate": 4.9509118835739465e-06, + "loss": 0.5261, + "step": 13030 + }, + { + "epoch": 2.04, + "grad_norm": 20.11174514781223, + "learning_rate": 4.949456301837134e-06, + "loss": 0.4821, + "step": 13031 + }, + { + "epoch": 2.04, + "grad_norm": 14.63069351136901, + "learning_rate": 4.948000863734834e-06, + "loss": 0.4823, + "step": 13032 + }, + { + "epoch": 2.04, + "grad_norm": 16.461578355081606, + "learning_rate": 4.946545569308436e-06, + "loss": 0.4603, + "step": 13033 + }, + { + "epoch": 2.04, + "grad_norm": 24.697868419755885, + "learning_rate": 4.94509041859932e-06, + "loss": 0.4841, + "step": 13034 + }, + { + "epoch": 2.04, + "grad_norm": 23.784272872751654, + "learning_rate": 4.943635411648875e-06, + "loss": 0.5846, + "step": 13035 + }, + { + "epoch": 2.04, + "grad_norm": 23.470043085795815, + "learning_rate": 4.942180548498485e-06, + "loss": 0.4584, + "step": 13036 + }, + { + "epoch": 2.04, + "grad_norm": 18.63525237817207, + "learning_rate": 4.940725829189523e-06, + "loss": 0.5578, + "step": 13037 + }, + { + "epoch": 2.04, + "grad_norm": 21.616966067201265, + "learning_rate": 4.939271253763354e-06, + "loss": 0.4953, + "step": 13038 + }, + { + "epoch": 2.04, + "grad_norm": 13.966022760732278, + "learning_rate": 4.93781682226135e-06, + "loss": 0.4928, + "step": 13039 + }, + { + "epoch": 2.04, + "grad_norm": 18.258842047480677, + "learning_rate": 4.936362534724874e-06, + "loss": 0.4683, + "step": 13040 + }, + { + "epoch": 2.04, + "grad_norm": 17.542794078296232, + "learning_rate": 4.93490839119529e-06, + "loss": 0.5421, + "step": 13041 + }, + { + "epoch": 2.04, + "grad_norm": 16.147552542113733, + "learning_rate": 4.933454391713948e-06, + "loss": 0.5064, + "step": 13042 + }, + { + "epoch": 2.04, + "grad_norm": 20.626187050500658, + "learning_rate": 4.932000536322196e-06, + "loss": 0.4845, + "step": 13043 + }, + { + "epoch": 2.04, + "grad_norm": 21.654846846411715, + "learning_rate": 4.930546825061383e-06, + "loss": 0.4447, + "step": 13044 + }, + { + "epoch": 2.04, + "grad_norm": 19.889471016107183, + "learning_rate": 4.929093257972857e-06, + "loss": 0.425, + "step": 13045 + }, + { + "epoch": 2.04, + "grad_norm": 19.998922219144394, + "learning_rate": 4.927639835097948e-06, + "loss": 0.617, + "step": 13046 + }, + { + "epoch": 2.04, + "grad_norm": 43.946394668478995, + "learning_rate": 4.926186556478e-06, + "loss": 0.5313, + "step": 13047 + }, + { + "epoch": 2.04, + "grad_norm": 17.354368619718787, + "learning_rate": 4.924733422154333e-06, + "loss": 0.4354, + "step": 13048 + }, + { + "epoch": 2.04, + "grad_norm": 24.574407637122906, + "learning_rate": 4.9232804321682835e-06, + "loss": 0.6144, + "step": 13049 + }, + { + "epoch": 2.04, + "grad_norm": 17.508368702453623, + "learning_rate": 4.921827586561164e-06, + "loss": 0.5333, + "step": 13050 + }, + { + "epoch": 2.04, + "grad_norm": 17.70410935391592, + "learning_rate": 4.9203748853742986e-06, + "loss": 0.4878, + "step": 13051 + }, + { + "epoch": 2.04, + "grad_norm": 15.846075923695057, + "learning_rate": 4.918922328649003e-06, + "loss": 0.5222, + "step": 13052 + }, + { + "epoch": 2.04, + "grad_norm": 25.70642182532485, + "learning_rate": 4.917469916426584e-06, + "loss": 0.4936, + "step": 13053 + }, + { + "epoch": 2.04, + "grad_norm": 29.422650817755528, + "learning_rate": 4.916017648748342e-06, + "loss": 0.4421, + "step": 13054 + }, + { + "epoch": 2.04, + "grad_norm": 11.939127934634389, + "learning_rate": 4.914565525655587e-06, + "loss": 0.46, + "step": 13055 + }, + { + "epoch": 2.04, + "grad_norm": 16.252299034270948, + "learning_rate": 4.913113547189615e-06, + "loss": 0.4824, + "step": 13056 + }, + { + "epoch": 2.04, + "grad_norm": 13.09355082792873, + "learning_rate": 4.911661713391716e-06, + "loss": 0.4894, + "step": 13057 + }, + { + "epoch": 2.04, + "grad_norm": 16.789493086642114, + "learning_rate": 4.9102100243031835e-06, + "loss": 0.4764, + "step": 13058 + }, + { + "epoch": 2.04, + "grad_norm": 14.479533381129174, + "learning_rate": 4.9087584799652976e-06, + "loss": 0.4745, + "step": 13059 + }, + { + "epoch": 2.04, + "grad_norm": 22.950148162202517, + "learning_rate": 4.907307080419341e-06, + "loss": 0.4781, + "step": 13060 + }, + { + "epoch": 2.04, + "grad_norm": 21.722010297797063, + "learning_rate": 4.905855825706597e-06, + "loss": 0.4307, + "step": 13061 + }, + { + "epoch": 2.04, + "grad_norm": 22.026886453767627, + "learning_rate": 4.904404715868333e-06, + "loss": 0.4888, + "step": 13062 + }, + { + "epoch": 2.04, + "grad_norm": 19.4081582668416, + "learning_rate": 4.902953750945814e-06, + "loss": 0.5287, + "step": 13063 + }, + { + "epoch": 2.04, + "grad_norm": 22.495950213857196, + "learning_rate": 4.90150293098031e-06, + "loss": 0.482, + "step": 13064 + }, + { + "epoch": 2.04, + "grad_norm": 16.550060380763576, + "learning_rate": 4.900052256013078e-06, + "loss": 0.4732, + "step": 13065 + }, + { + "epoch": 2.04, + "grad_norm": 24.514216969149448, + "learning_rate": 4.898601726085385e-06, + "loss": 0.5483, + "step": 13066 + }, + { + "epoch": 2.04, + "grad_norm": 25.881271781050895, + "learning_rate": 4.897151341238468e-06, + "loss": 0.5618, + "step": 13067 + }, + { + "epoch": 2.04, + "grad_norm": 19.2862616224728, + "learning_rate": 4.89570110151358e-06, + "loss": 0.5062, + "step": 13068 + }, + { + "epoch": 2.04, + "grad_norm": 17.636930824770552, + "learning_rate": 4.894251006951968e-06, + "loss": 0.4818, + "step": 13069 + }, + { + "epoch": 2.04, + "grad_norm": 39.210850895103675, + "learning_rate": 4.892801057594874e-06, + "loss": 0.5468, + "step": 13070 + }, + { + "epoch": 2.04, + "grad_norm": 17.21575051696606, + "learning_rate": 4.891351253483529e-06, + "loss": 0.5224, + "step": 13071 + }, + { + "epoch": 2.04, + "grad_norm": 12.857765260387817, + "learning_rate": 4.8899015946591645e-06, + "loss": 0.4473, + "step": 13072 + }, + { + "epoch": 2.04, + "grad_norm": 21.41204916929831, + "learning_rate": 4.888452081163007e-06, + "loss": 0.6123, + "step": 13073 + }, + { + "epoch": 2.04, + "grad_norm": 14.0952821628801, + "learning_rate": 4.8870027130362885e-06, + "loss": 0.4573, + "step": 13074 + }, + { + "epoch": 2.04, + "grad_norm": 17.262400707975143, + "learning_rate": 4.8855534903202165e-06, + "loss": 0.523, + "step": 13075 + }, + { + "epoch": 2.04, + "grad_norm": 27.61781785777043, + "learning_rate": 4.884104413056015e-06, + "loss": 0.5429, + "step": 13076 + }, + { + "epoch": 2.04, + "grad_norm": 29.266838248685563, + "learning_rate": 4.882655481284888e-06, + "loss": 0.5581, + "step": 13077 + }, + { + "epoch": 2.04, + "grad_norm": 19.540519602827015, + "learning_rate": 4.881206695048046e-06, + "loss": 0.5006, + "step": 13078 + }, + { + "epoch": 2.04, + "grad_norm": 22.898472919000717, + "learning_rate": 4.879758054386694e-06, + "loss": 0.4967, + "step": 13079 + }, + { + "epoch": 2.04, + "grad_norm": 18.996039434218446, + "learning_rate": 4.8783095593420234e-06, + "loss": 0.6114, + "step": 13080 + }, + { + "epoch": 2.04, + "grad_norm": 24.872649070934497, + "learning_rate": 4.876861209955238e-06, + "loss": 0.4357, + "step": 13081 + }, + { + "epoch": 2.04, + "grad_norm": 18.342807587975297, + "learning_rate": 4.875413006267518e-06, + "loss": 0.4837, + "step": 13082 + }, + { + "epoch": 2.04, + "grad_norm": 16.1471705149605, + "learning_rate": 4.873964948320059e-06, + "loss": 0.5326, + "step": 13083 + }, + { + "epoch": 2.04, + "grad_norm": 16.54139311762064, + "learning_rate": 4.8725170361540345e-06, + "loss": 0.5116, + "step": 13084 + }, + { + "epoch": 2.04, + "grad_norm": 22.049095065687027, + "learning_rate": 4.871069269810626e-06, + "loss": 0.4685, + "step": 13085 + }, + { + "epoch": 2.04, + "grad_norm": 24.9775664480053, + "learning_rate": 4.869621649331009e-06, + "loss": 0.5759, + "step": 13086 + }, + { + "epoch": 2.04, + "grad_norm": 19.462539014305552, + "learning_rate": 4.868174174756353e-06, + "loss": 0.5591, + "step": 13087 + }, + { + "epoch": 2.04, + "grad_norm": 24.31731374249837, + "learning_rate": 4.866726846127817e-06, + "loss": 0.4638, + "step": 13088 + }, + { + "epoch": 2.04, + "grad_norm": 24.251425048652923, + "learning_rate": 4.865279663486567e-06, + "loss": 0.5928, + "step": 13089 + }, + { + "epoch": 2.04, + "grad_norm": 18.183029788047318, + "learning_rate": 4.863832626873759e-06, + "loss": 0.5242, + "step": 13090 + }, + { + "epoch": 2.04, + "grad_norm": 24.660302493324785, + "learning_rate": 4.862385736330555e-06, + "loss": 0.5345, + "step": 13091 + }, + { + "epoch": 2.04, + "grad_norm": 16.32070945925332, + "learning_rate": 4.860938991898088e-06, + "loss": 0.5238, + "step": 13092 + }, + { + "epoch": 2.05, + "grad_norm": 13.224183372765292, + "learning_rate": 4.859492393617509e-06, + "loss": 0.4141, + "step": 13093 + }, + { + "epoch": 2.05, + "grad_norm": 19.471059750200975, + "learning_rate": 4.8580459415299585e-06, + "loss": 0.6052, + "step": 13094 + }, + { + "epoch": 2.05, + "grad_norm": 19.971992064574472, + "learning_rate": 4.856599635676578e-06, + "loss": 0.5123, + "step": 13095 + }, + { + "epoch": 2.05, + "grad_norm": 11.373945844321241, + "learning_rate": 4.8551534760984954e-06, + "loss": 0.4766, + "step": 13096 + }, + { + "epoch": 2.05, + "grad_norm": 19.025852800447048, + "learning_rate": 4.853707462836834e-06, + "loss": 0.4895, + "step": 13097 + }, + { + "epoch": 2.05, + "grad_norm": 14.710918251264056, + "learning_rate": 4.852261595932721e-06, + "loss": 0.5298, + "step": 13098 + }, + { + "epoch": 2.05, + "grad_norm": 23.674850255173887, + "learning_rate": 4.850815875427283e-06, + "loss": 0.4622, + "step": 13099 + }, + { + "epoch": 2.05, + "grad_norm": 23.87107094045906, + "learning_rate": 4.849370301361623e-06, + "loss": 0.5502, + "step": 13100 + }, + { + "epoch": 2.05, + "grad_norm": 13.496457546868037, + "learning_rate": 4.8479248737768644e-06, + "loss": 0.4528, + "step": 13101 + }, + { + "epoch": 2.05, + "grad_norm": 18.58280930364715, + "learning_rate": 4.846479592714104e-06, + "loss": 0.4763, + "step": 13102 + }, + { + "epoch": 2.05, + "grad_norm": 20.63788096808724, + "learning_rate": 4.845034458214449e-06, + "loss": 0.4793, + "step": 13103 + }, + { + "epoch": 2.05, + "grad_norm": 15.55182021770802, + "learning_rate": 4.8435894703190026e-06, + "loss": 0.4668, + "step": 13104 + }, + { + "epoch": 2.05, + "grad_norm": 26.505994733864373, + "learning_rate": 4.842144629068854e-06, + "loss": 0.476, + "step": 13105 + }, + { + "epoch": 2.05, + "grad_norm": 26.48734237116329, + "learning_rate": 4.8406999345050934e-06, + "loss": 0.5079, + "step": 13106 + }, + { + "epoch": 2.05, + "grad_norm": 40.49507249221655, + "learning_rate": 4.839255386668806e-06, + "loss": 0.4895, + "step": 13107 + }, + { + "epoch": 2.05, + "grad_norm": 23.317551708540375, + "learning_rate": 4.8378109856010825e-06, + "loss": 0.5622, + "step": 13108 + }, + { + "epoch": 2.05, + "grad_norm": 18.86885238236193, + "learning_rate": 4.8363667313429896e-06, + "loss": 0.5886, + "step": 13109 + }, + { + "epoch": 2.05, + "grad_norm": 16.538093366415712, + "learning_rate": 4.83492262393561e-06, + "loss": 0.4805, + "step": 13110 + }, + { + "epoch": 2.05, + "grad_norm": 15.875291456058891, + "learning_rate": 4.833478663420007e-06, + "loss": 0.5045, + "step": 13111 + }, + { + "epoch": 2.05, + "grad_norm": 26.29727931910288, + "learning_rate": 4.832034849837252e-06, + "loss": 0.5028, + "step": 13112 + }, + { + "epoch": 2.05, + "grad_norm": 12.391570617988279, + "learning_rate": 4.830591183228398e-06, + "loss": 0.5129, + "step": 13113 + }, + { + "epoch": 2.05, + "grad_norm": 14.781565945849648, + "learning_rate": 4.829147663634507e-06, + "loss": 0.475, + "step": 13114 + }, + { + "epoch": 2.05, + "grad_norm": 21.271750278507955, + "learning_rate": 4.827704291096635e-06, + "loss": 0.5685, + "step": 13115 + }, + { + "epoch": 2.05, + "grad_norm": 26.570186180615327, + "learning_rate": 4.826261065655828e-06, + "loss": 0.5113, + "step": 13116 + }, + { + "epoch": 2.05, + "grad_norm": 24.171336103891505, + "learning_rate": 4.824817987353126e-06, + "loss": 0.4951, + "step": 13117 + }, + { + "epoch": 2.05, + "grad_norm": 25.022904260856897, + "learning_rate": 4.823375056229573e-06, + "loss": 0.4439, + "step": 13118 + }, + { + "epoch": 2.05, + "grad_norm": 18.6026222659298, + "learning_rate": 4.821932272326204e-06, + "loss": 0.445, + "step": 13119 + }, + { + "epoch": 2.05, + "grad_norm": 21.25628924003059, + "learning_rate": 4.820489635684057e-06, + "loss": 0.5066, + "step": 13120 + }, + { + "epoch": 2.05, + "grad_norm": 19.04497601537879, + "learning_rate": 4.819047146344154e-06, + "loss": 0.4717, + "step": 13121 + }, + { + "epoch": 2.05, + "grad_norm": 26.909828417992323, + "learning_rate": 4.817604804347517e-06, + "loss": 0.4742, + "step": 13122 + }, + { + "epoch": 2.05, + "grad_norm": 17.844475594447097, + "learning_rate": 4.816162609735167e-06, + "loss": 0.5907, + "step": 13123 + }, + { + "epoch": 2.05, + "grad_norm": 28.258651678751484, + "learning_rate": 4.814720562548123e-06, + "loss": 0.5051, + "step": 13124 + }, + { + "epoch": 2.05, + "grad_norm": 29.189905794985034, + "learning_rate": 4.8132786628273945e-06, + "loss": 0.4528, + "step": 13125 + }, + { + "epoch": 2.05, + "grad_norm": 15.55679207420983, + "learning_rate": 4.811836910613982e-06, + "loss": 0.4475, + "step": 13126 + }, + { + "epoch": 2.05, + "grad_norm": 19.25663783067621, + "learning_rate": 4.810395305948892e-06, + "loss": 0.4709, + "step": 13127 + }, + { + "epoch": 2.05, + "grad_norm": 26.461609445830266, + "learning_rate": 4.8089538488731244e-06, + "loss": 0.4838, + "step": 13128 + }, + { + "epoch": 2.05, + "grad_norm": 16.427072531874575, + "learning_rate": 4.807512539427677e-06, + "loss": 0.514, + "step": 13129 + }, + { + "epoch": 2.05, + "grad_norm": 18.82424251842245, + "learning_rate": 4.806071377653534e-06, + "loss": 0.4567, + "step": 13130 + }, + { + "epoch": 2.05, + "grad_norm": 20.610791011663075, + "learning_rate": 4.80463036359168e-06, + "loss": 0.5322, + "step": 13131 + }, + { + "epoch": 2.05, + "grad_norm": 23.69776457447809, + "learning_rate": 4.803189497283099e-06, + "loss": 0.4828, + "step": 13132 + }, + { + "epoch": 2.05, + "grad_norm": 15.26149548849849, + "learning_rate": 4.801748778768772e-06, + "loss": 0.5067, + "step": 13133 + }, + { + "epoch": 2.05, + "grad_norm": 19.147832899069844, + "learning_rate": 4.800308208089666e-06, + "loss": 0.4711, + "step": 13134 + }, + { + "epoch": 2.05, + "grad_norm": 16.39570157316439, + "learning_rate": 4.798867785286756e-06, + "loss": 0.4451, + "step": 13135 + }, + { + "epoch": 2.05, + "grad_norm": 23.326819546857475, + "learning_rate": 4.797427510401001e-06, + "loss": 0.4599, + "step": 13136 + }, + { + "epoch": 2.05, + "grad_norm": 23.71725273558126, + "learning_rate": 4.795987383473366e-06, + "loss": 0.53, + "step": 13137 + }, + { + "epoch": 2.05, + "grad_norm": 18.39146658252815, + "learning_rate": 4.794547404544802e-06, + "loss": 0.5757, + "step": 13138 + }, + { + "epoch": 2.05, + "grad_norm": 18.27019751220681, + "learning_rate": 4.793107573656266e-06, + "loss": 0.4434, + "step": 13139 + }, + { + "epoch": 2.05, + "grad_norm": 18.784911968176218, + "learning_rate": 4.791667890848708e-06, + "loss": 0.5118, + "step": 13140 + }, + { + "epoch": 2.05, + "grad_norm": 16.650569769826888, + "learning_rate": 4.790228356163065e-06, + "loss": 0.496, + "step": 13141 + }, + { + "epoch": 2.05, + "grad_norm": 26.366631113469268, + "learning_rate": 4.788788969640283e-06, + "loss": 0.5115, + "step": 13142 + }, + { + "epoch": 2.05, + "grad_norm": 18.826351075873383, + "learning_rate": 4.787349731321291e-06, + "loss": 0.4305, + "step": 13143 + }, + { + "epoch": 2.05, + "grad_norm": 19.161040994357972, + "learning_rate": 4.785910641247022e-06, + "loss": 0.5053, + "step": 13144 + }, + { + "epoch": 2.05, + "grad_norm": 17.033952075636236, + "learning_rate": 4.78447169945841e-06, + "loss": 0.511, + "step": 13145 + }, + { + "epoch": 2.05, + "grad_norm": 18.50736386909542, + "learning_rate": 4.78303290599637e-06, + "loss": 0.5146, + "step": 13146 + }, + { + "epoch": 2.05, + "grad_norm": 29.35894583936064, + "learning_rate": 4.78159426090182e-06, + "loss": 0.5213, + "step": 13147 + }, + { + "epoch": 2.05, + "grad_norm": 37.52666531820444, + "learning_rate": 4.780155764215676e-06, + "loss": 0.4146, + "step": 13148 + }, + { + "epoch": 2.05, + "grad_norm": 24.906601628725465, + "learning_rate": 4.7787174159788514e-06, + "loss": 0.4949, + "step": 13149 + }, + { + "epoch": 2.05, + "grad_norm": 19.291174776819464, + "learning_rate": 4.7772792162322505e-06, + "loss": 0.4918, + "step": 13150 + }, + { + "epoch": 2.05, + "grad_norm": 20.13246260432585, + "learning_rate": 4.775841165016769e-06, + "loss": 0.4286, + "step": 13151 + }, + { + "epoch": 2.05, + "grad_norm": 18.380334995204837, + "learning_rate": 4.774403262373308e-06, + "loss": 0.4636, + "step": 13152 + }, + { + "epoch": 2.05, + "grad_norm": 24.10448252085768, + "learning_rate": 4.772965508342763e-06, + "loss": 0.4568, + "step": 13153 + }, + { + "epoch": 2.05, + "grad_norm": 25.580935339550056, + "learning_rate": 4.771527902966028e-06, + "loss": 0.4981, + "step": 13154 + }, + { + "epoch": 2.05, + "grad_norm": 23.631121422976026, + "learning_rate": 4.7700904462839735e-06, + "loss": 0.4566, + "step": 13155 + }, + { + "epoch": 2.05, + "grad_norm": 21.537421263248078, + "learning_rate": 4.7686531383374865e-06, + "loss": 0.5223, + "step": 13156 + }, + { + "epoch": 2.06, + "grad_norm": 24.658410671153835, + "learning_rate": 4.767215979167445e-06, + "loss": 0.4918, + "step": 13157 + }, + { + "epoch": 2.06, + "grad_norm": 15.134114788897476, + "learning_rate": 4.765778968814724e-06, + "loss": 0.4591, + "step": 13158 + }, + { + "epoch": 2.06, + "grad_norm": 25.363092060755346, + "learning_rate": 4.764342107320186e-06, + "loss": 0.5202, + "step": 13159 + }, + { + "epoch": 2.06, + "grad_norm": 22.8174915006642, + "learning_rate": 4.762905394724691e-06, + "loss": 0.4964, + "step": 13160 + }, + { + "epoch": 2.06, + "grad_norm": 29.040706876349738, + "learning_rate": 4.761468831069106e-06, + "loss": 0.5863, + "step": 13161 + }, + { + "epoch": 2.06, + "grad_norm": 11.987952909326758, + "learning_rate": 4.7600324163942845e-06, + "loss": 0.4474, + "step": 13162 + }, + { + "epoch": 2.06, + "grad_norm": 24.6635138154888, + "learning_rate": 4.758596150741073e-06, + "loss": 0.4867, + "step": 13163 + }, + { + "epoch": 2.06, + "grad_norm": 13.164974662691726, + "learning_rate": 4.757160034150324e-06, + "loss": 0.4996, + "step": 13164 + }, + { + "epoch": 2.06, + "grad_norm": 28.721278538236707, + "learning_rate": 4.7557240666628735e-06, + "loss": 0.5115, + "step": 13165 + }, + { + "epoch": 2.06, + "grad_norm": 22.04895459389824, + "learning_rate": 4.754288248319563e-06, + "loss": 0.4863, + "step": 13166 + }, + { + "epoch": 2.06, + "grad_norm": 23.734362158354347, + "learning_rate": 4.752852579161229e-06, + "loss": 0.5033, + "step": 13167 + }, + { + "epoch": 2.06, + "grad_norm": 14.837193166256506, + "learning_rate": 4.751417059228695e-06, + "loss": 0.5426, + "step": 13168 + }, + { + "epoch": 2.06, + "grad_norm": 15.734374005602717, + "learning_rate": 4.749981688562792e-06, + "loss": 0.4455, + "step": 13169 + }, + { + "epoch": 2.06, + "grad_norm": 19.256447441515185, + "learning_rate": 4.748546467204336e-06, + "loss": 0.4139, + "step": 13170 + }, + { + "epoch": 2.06, + "grad_norm": 15.689209340064366, + "learning_rate": 4.747111395194149e-06, + "loss": 0.472, + "step": 13171 + }, + { + "epoch": 2.06, + "grad_norm": 17.07914701686187, + "learning_rate": 4.7456764725730385e-06, + "loss": 0.474, + "step": 13172 + }, + { + "epoch": 2.06, + "grad_norm": 13.888771738598066, + "learning_rate": 4.744241699381816e-06, + "loss": 0.4863, + "step": 13173 + }, + { + "epoch": 2.06, + "grad_norm": 19.261870969048253, + "learning_rate": 4.742807075661288e-06, + "loss": 0.5512, + "step": 13174 + }, + { + "epoch": 2.06, + "grad_norm": 17.238974395861504, + "learning_rate": 4.741372601452251e-06, + "loss": 0.52, + "step": 13175 + }, + { + "epoch": 2.06, + "grad_norm": 19.092522108737782, + "learning_rate": 4.739938276795498e-06, + "loss": 0.5036, + "step": 13176 + }, + { + "epoch": 2.06, + "grad_norm": 14.593687492845298, + "learning_rate": 4.738504101731824e-06, + "loss": 0.4155, + "step": 13177 + }, + { + "epoch": 2.06, + "grad_norm": 21.249497735875668, + "learning_rate": 4.737070076302014e-06, + "loss": 0.4787, + "step": 13178 + }, + { + "epoch": 2.06, + "grad_norm": 17.961678617833368, + "learning_rate": 4.735636200546859e-06, + "loss": 0.4691, + "step": 13179 + }, + { + "epoch": 2.06, + "grad_norm": 13.321922905751014, + "learning_rate": 4.734202474507126e-06, + "loss": 0.5002, + "step": 13180 + }, + { + "epoch": 2.06, + "grad_norm": 28.150140581110264, + "learning_rate": 4.7327688982235916e-06, + "loss": 0.4799, + "step": 13181 + }, + { + "epoch": 2.06, + "grad_norm": 25.732883946361678, + "learning_rate": 4.731335471737031e-06, + "loss": 0.525, + "step": 13182 + }, + { + "epoch": 2.06, + "grad_norm": 17.31312165323388, + "learning_rate": 4.72990219508821e-06, + "loss": 0.5178, + "step": 13183 + }, + { + "epoch": 2.06, + "grad_norm": 27.750640312960908, + "learning_rate": 4.728469068317887e-06, + "loss": 0.5771, + "step": 13184 + }, + { + "epoch": 2.06, + "grad_norm": 20.267775892763424, + "learning_rate": 4.727036091466816e-06, + "loss": 0.5371, + "step": 13185 + }, + { + "epoch": 2.06, + "grad_norm": 19.92896450978873, + "learning_rate": 4.7256032645757555e-06, + "loss": 0.4909, + "step": 13186 + }, + { + "epoch": 2.06, + "grad_norm": 19.03684667372953, + "learning_rate": 4.724170587685456e-06, + "loss": 0.496, + "step": 13187 + }, + { + "epoch": 2.06, + "grad_norm": 25.026947929011325, + "learning_rate": 4.722738060836654e-06, + "loss": 0.6308, + "step": 13188 + }, + { + "epoch": 2.06, + "grad_norm": 17.34865867297164, + "learning_rate": 4.7213056840701e-06, + "loss": 0.4742, + "step": 13189 + }, + { + "epoch": 2.06, + "grad_norm": 25.45888529978834, + "learning_rate": 4.719873457426519e-06, + "loss": 0.5104, + "step": 13190 + }, + { + "epoch": 2.06, + "grad_norm": 15.4916437604768, + "learning_rate": 4.718441380946649e-06, + "loss": 0.448, + "step": 13191 + }, + { + "epoch": 2.06, + "grad_norm": 16.640372947789828, + "learning_rate": 4.717009454671221e-06, + "loss": 0.4748, + "step": 13192 + }, + { + "epoch": 2.06, + "grad_norm": 17.926840234874444, + "learning_rate": 4.715577678640948e-06, + "loss": 0.5192, + "step": 13193 + }, + { + "epoch": 2.06, + "grad_norm": 19.48118511662771, + "learning_rate": 4.714146052896559e-06, + "loss": 0.4897, + "step": 13194 + }, + { + "epoch": 2.06, + "grad_norm": 16.640480900530083, + "learning_rate": 4.7127145774787604e-06, + "loss": 0.5584, + "step": 13195 + }, + { + "epoch": 2.06, + "grad_norm": 31.634562008018673, + "learning_rate": 4.71128325242827e-06, + "loss": 0.5316, + "step": 13196 + }, + { + "epoch": 2.06, + "grad_norm": 15.578033159314309, + "learning_rate": 4.709852077785787e-06, + "loss": 0.5237, + "step": 13197 + }, + { + "epoch": 2.06, + "grad_norm": 25.785091844179796, + "learning_rate": 4.708421053592019e-06, + "loss": 0.4978, + "step": 13198 + }, + { + "epoch": 2.06, + "grad_norm": 22.001608689512103, + "learning_rate": 4.706990179887658e-06, + "loss": 0.4839, + "step": 13199 + }, + { + "epoch": 2.06, + "grad_norm": 16.638333979575602, + "learning_rate": 4.705559456713403e-06, + "loss": 0.456, + "step": 13200 + }, + { + "epoch": 2.06, + "grad_norm": 23.374595600342712, + "learning_rate": 4.704128884109936e-06, + "loss": 0.5992, + "step": 13201 + }, + { + "epoch": 2.06, + "grad_norm": 16.259441866014114, + "learning_rate": 4.702698462117946e-06, + "loss": 0.5045, + "step": 13202 + }, + { + "epoch": 2.06, + "grad_norm": 15.117531366897676, + "learning_rate": 4.701268190778117e-06, + "loss": 0.4629, + "step": 13203 + }, + { + "epoch": 2.06, + "grad_norm": 16.42074207069252, + "learning_rate": 4.6998380701311196e-06, + "loss": 0.4112, + "step": 13204 + }, + { + "epoch": 2.06, + "grad_norm": 14.662762698111324, + "learning_rate": 4.6984081002176256e-06, + "loss": 0.484, + "step": 13205 + }, + { + "epoch": 2.06, + "grad_norm": 19.72600408840137, + "learning_rate": 4.696978281078302e-06, + "loss": 0.4784, + "step": 13206 + }, + { + "epoch": 2.06, + "grad_norm": 14.682652369952763, + "learning_rate": 4.695548612753814e-06, + "loss": 0.5063, + "step": 13207 + }, + { + "epoch": 2.06, + "grad_norm": 20.305321777816935, + "learning_rate": 4.694119095284825e-06, + "loss": 0.5034, + "step": 13208 + }, + { + "epoch": 2.06, + "grad_norm": 21.726412949223977, + "learning_rate": 4.6926897287119845e-06, + "loss": 0.5166, + "step": 13209 + }, + { + "epoch": 2.06, + "grad_norm": 16.03169614977865, + "learning_rate": 4.6912605130759396e-06, + "loss": 0.4976, + "step": 13210 + }, + { + "epoch": 2.06, + "grad_norm": 22.463874376767897, + "learning_rate": 4.68983144841734e-06, + "loss": 0.4748, + "step": 13211 + }, + { + "epoch": 2.06, + "grad_norm": 18.00008583131168, + "learning_rate": 4.688402534776832e-06, + "loss": 0.5078, + "step": 13212 + }, + { + "epoch": 2.06, + "grad_norm": 14.768752517691908, + "learning_rate": 4.686973772195048e-06, + "loss": 0.4953, + "step": 13213 + }, + { + "epoch": 2.06, + "grad_norm": 16.489240478831842, + "learning_rate": 4.685545160712619e-06, + "loss": 0.4815, + "step": 13214 + }, + { + "epoch": 2.06, + "grad_norm": 22.387293940516482, + "learning_rate": 4.684116700370176e-06, + "loss": 0.4804, + "step": 13215 + }, + { + "epoch": 2.06, + "grad_norm": 17.5873130809419, + "learning_rate": 4.682688391208345e-06, + "loss": 0.4599, + "step": 13216 + }, + { + "epoch": 2.06, + "grad_norm": 16.849759722572045, + "learning_rate": 4.681260233267749e-06, + "loss": 0.4729, + "step": 13217 + }, + { + "epoch": 2.06, + "grad_norm": 32.49318304002005, + "learning_rate": 4.679832226589001e-06, + "loss": 0.5261, + "step": 13218 + }, + { + "epoch": 2.06, + "grad_norm": 12.763241618638316, + "learning_rate": 4.6784043712127084e-06, + "loss": 0.4391, + "step": 13219 + }, + { + "epoch": 2.06, + "grad_norm": 21.010813152628668, + "learning_rate": 4.676976667179482e-06, + "loss": 0.4879, + "step": 13220 + }, + { + "epoch": 2.07, + "grad_norm": 14.51285047854393, + "learning_rate": 4.675549114529929e-06, + "loss": 0.4461, + "step": 13221 + }, + { + "epoch": 2.07, + "grad_norm": 19.272838068293154, + "learning_rate": 4.6741217133046415e-06, + "loss": 0.4978, + "step": 13222 + }, + { + "epoch": 2.07, + "grad_norm": 19.640360671110585, + "learning_rate": 4.67269446354422e-06, + "loss": 0.4897, + "step": 13223 + }, + { + "epoch": 2.07, + "grad_norm": 24.475874627240326, + "learning_rate": 4.671267365289247e-06, + "loss": 0.5642, + "step": 13224 + }, + { + "epoch": 2.07, + "grad_norm": 15.514364817556473, + "learning_rate": 4.669840418580318e-06, + "loss": 0.5352, + "step": 13225 + }, + { + "epoch": 2.07, + "grad_norm": 15.470593647954168, + "learning_rate": 4.668413623458006e-06, + "loss": 0.5017, + "step": 13226 + }, + { + "epoch": 2.07, + "grad_norm": 18.7617868985732, + "learning_rate": 4.666986979962891e-06, + "loss": 0.4471, + "step": 13227 + }, + { + "epoch": 2.07, + "grad_norm": 23.45572303852927, + "learning_rate": 4.6655604881355495e-06, + "loss": 0.5002, + "step": 13228 + }, + { + "epoch": 2.07, + "grad_norm": 20.608994056732502, + "learning_rate": 4.664134148016545e-06, + "loss": 0.5192, + "step": 13229 + }, + { + "epoch": 2.07, + "grad_norm": 42.26989431695043, + "learning_rate": 4.662707959646446e-06, + "loss": 0.53, + "step": 13230 + }, + { + "epoch": 2.07, + "grad_norm": 17.433607868562383, + "learning_rate": 4.661281923065808e-06, + "loss": 0.5339, + "step": 13231 + }, + { + "epoch": 2.07, + "grad_norm": 24.983701504211606, + "learning_rate": 4.659856038315188e-06, + "loss": 0.5411, + "step": 13232 + }, + { + "epoch": 2.07, + "grad_norm": 17.651993867294077, + "learning_rate": 4.658430305435143e-06, + "loss": 0.5539, + "step": 13233 + }, + { + "epoch": 2.07, + "grad_norm": 21.453543533221193, + "learning_rate": 4.657004724466216e-06, + "loss": 0.4818, + "step": 13234 + }, + { + "epoch": 2.07, + "grad_norm": 20.137962185257834, + "learning_rate": 4.655579295448944e-06, + "loss": 0.4841, + "step": 13235 + }, + { + "epoch": 2.07, + "grad_norm": 16.48465977418263, + "learning_rate": 4.654154018423871e-06, + "loss": 0.4693, + "step": 13236 + }, + { + "epoch": 2.07, + "grad_norm": 16.99336340332264, + "learning_rate": 4.652728893431534e-06, + "loss": 0.5159, + "step": 13237 + }, + { + "epoch": 2.07, + "grad_norm": 16.420228762644086, + "learning_rate": 4.651303920512459e-06, + "loss": 0.5163, + "step": 13238 + }, + { + "epoch": 2.07, + "grad_norm": 27.681051048955716, + "learning_rate": 4.649879099707168e-06, + "loss": 0.4894, + "step": 13239 + }, + { + "epoch": 2.07, + "grad_norm": 15.828555037257193, + "learning_rate": 4.648454431056186e-06, + "loss": 0.5398, + "step": 13240 + }, + { + "epoch": 2.07, + "grad_norm": 19.027205134078546, + "learning_rate": 4.64702991460003e-06, + "loss": 0.4548, + "step": 13241 + }, + { + "epoch": 2.07, + "grad_norm": 26.730373530683064, + "learning_rate": 4.645605550379214e-06, + "loss": 0.5444, + "step": 13242 + }, + { + "epoch": 2.07, + "grad_norm": 24.670789631336856, + "learning_rate": 4.644181338434245e-06, + "loss": 0.4692, + "step": 13243 + }, + { + "epoch": 2.07, + "grad_norm": 16.388460423661712, + "learning_rate": 4.642757278805622e-06, + "loss": 0.4848, + "step": 13244 + }, + { + "epoch": 2.07, + "grad_norm": 17.01839281634388, + "learning_rate": 4.641333371533848e-06, + "loss": 0.4741, + "step": 13245 + }, + { + "epoch": 2.07, + "grad_norm": 18.859873675000927, + "learning_rate": 4.6399096166594215e-06, + "loss": 0.565, + "step": 13246 + }, + { + "epoch": 2.07, + "grad_norm": 20.440530171019105, + "learning_rate": 4.638486014222831e-06, + "loss": 0.558, + "step": 13247 + }, + { + "epoch": 2.07, + "grad_norm": 22.89752531235786, + "learning_rate": 4.6370625642645565e-06, + "loss": 0.5609, + "step": 13248 + }, + { + "epoch": 2.07, + "grad_norm": 22.214453554088877, + "learning_rate": 4.635639266825086e-06, + "loss": 0.4755, + "step": 13249 + }, + { + "epoch": 2.07, + "grad_norm": 17.558230368107555, + "learning_rate": 4.634216121944901e-06, + "loss": 0.4882, + "step": 13250 + }, + { + "epoch": 2.07, + "grad_norm": 18.937022105566925, + "learning_rate": 4.632793129664466e-06, + "loss": 0.5074, + "step": 13251 + }, + { + "epoch": 2.07, + "grad_norm": 12.170384326781141, + "learning_rate": 4.63137029002426e-06, + "loss": 0.4778, + "step": 13252 + }, + { + "epoch": 2.07, + "grad_norm": 16.679424250309843, + "learning_rate": 4.629947603064737e-06, + "loss": 0.4871, + "step": 13253 + }, + { + "epoch": 2.07, + "grad_norm": 18.605160811761372, + "learning_rate": 4.6285250688263625e-06, + "loss": 0.478, + "step": 13254 + }, + { + "epoch": 2.07, + "grad_norm": 19.4407472684658, + "learning_rate": 4.627102687349599e-06, + "loss": 0.4601, + "step": 13255 + }, + { + "epoch": 2.07, + "grad_norm": 22.16531912004903, + "learning_rate": 4.625680458674886e-06, + "loss": 0.4667, + "step": 13256 + }, + { + "epoch": 2.07, + "grad_norm": 16.87519743062133, + "learning_rate": 4.624258382842681e-06, + "loss": 0.5047, + "step": 13257 + }, + { + "epoch": 2.07, + "grad_norm": 19.26658181477529, + "learning_rate": 4.622836459893419e-06, + "loss": 0.5049, + "step": 13258 + }, + { + "epoch": 2.07, + "grad_norm": 21.944766816321177, + "learning_rate": 4.621414689867547e-06, + "loss": 0.5219, + "step": 13259 + }, + { + "epoch": 2.07, + "grad_norm": 18.991635752752213, + "learning_rate": 4.619993072805491e-06, + "loss": 0.4795, + "step": 13260 + }, + { + "epoch": 2.07, + "grad_norm": 21.84476442191785, + "learning_rate": 4.618571608747685e-06, + "loss": 0.5565, + "step": 13261 + }, + { + "epoch": 2.07, + "grad_norm": 21.35563997458557, + "learning_rate": 4.617150297734557e-06, + "loss": 0.5183, + "step": 13262 + }, + { + "epoch": 2.07, + "grad_norm": 24.05433216185614, + "learning_rate": 4.615729139806527e-06, + "loss": 0.5606, + "step": 13263 + }, + { + "epoch": 2.07, + "grad_norm": 17.802504232805475, + "learning_rate": 4.614308135004006e-06, + "loss": 0.4718, + "step": 13264 + }, + { + "epoch": 2.07, + "grad_norm": 23.50101327740678, + "learning_rate": 4.612887283367411e-06, + "loss": 0.5271, + "step": 13265 + }, + { + "epoch": 2.07, + "grad_norm": 17.01612142358905, + "learning_rate": 4.611466584937151e-06, + "loss": 0.515, + "step": 13266 + }, + { + "epoch": 2.07, + "grad_norm": 22.621576061979578, + "learning_rate": 4.610046039753636e-06, + "loss": 0.5371, + "step": 13267 + }, + { + "epoch": 2.07, + "grad_norm": 17.89313635461756, + "learning_rate": 4.608625647857251e-06, + "loss": 0.4967, + "step": 13268 + }, + { + "epoch": 2.07, + "grad_norm": 17.527823419300745, + "learning_rate": 4.6072054092884e-06, + "loss": 0.5123, + "step": 13269 + }, + { + "epoch": 2.07, + "grad_norm": 24.466714567903377, + "learning_rate": 4.60578532408747e-06, + "loss": 0.4647, + "step": 13270 + }, + { + "epoch": 2.07, + "grad_norm": 28.490170040516933, + "learning_rate": 4.604365392294856e-06, + "loss": 0.5206, + "step": 13271 + }, + { + "epoch": 2.07, + "grad_norm": 24.953122054179286, + "learning_rate": 4.602945613950933e-06, + "loss": 0.5469, + "step": 13272 + }, + { + "epoch": 2.07, + "grad_norm": 23.444795847522744, + "learning_rate": 4.6015259890960765e-06, + "loss": 0.4765, + "step": 13273 + }, + { + "epoch": 2.07, + "grad_norm": 33.2344108369721, + "learning_rate": 4.600106517770662e-06, + "loss": 0.5262, + "step": 13274 + }, + { + "epoch": 2.07, + "grad_norm": 25.078606215283795, + "learning_rate": 4.598687200015063e-06, + "loss": 0.5199, + "step": 13275 + }, + { + "epoch": 2.07, + "grad_norm": 12.847109196383375, + "learning_rate": 4.597268035869636e-06, + "loss": 0.4408, + "step": 13276 + }, + { + "epoch": 2.07, + "grad_norm": 17.273272357843112, + "learning_rate": 4.59584902537475e-06, + "loss": 0.48, + "step": 13277 + }, + { + "epoch": 2.07, + "grad_norm": 20.891020378788014, + "learning_rate": 4.594430168570753e-06, + "loss": 0.5481, + "step": 13278 + }, + { + "epoch": 2.07, + "grad_norm": 35.46058444922919, + "learning_rate": 4.593011465498e-06, + "loss": 0.5408, + "step": 13279 + }, + { + "epoch": 2.07, + "grad_norm": 18.44592376198636, + "learning_rate": 4.591592916196841e-06, + "loss": 0.5321, + "step": 13280 + }, + { + "epoch": 2.07, + "grad_norm": 19.537570353016452, + "learning_rate": 4.590174520707612e-06, + "loss": 0.4878, + "step": 13281 + }, + { + "epoch": 2.07, + "grad_norm": 17.821963766807723, + "learning_rate": 4.58875627907066e-06, + "loss": 0.4343, + "step": 13282 + }, + { + "epoch": 2.07, + "grad_norm": 18.44724914384452, + "learning_rate": 4.587338191326308e-06, + "loss": 0.449, + "step": 13283 + }, + { + "epoch": 2.07, + "grad_norm": 30.103092306209025, + "learning_rate": 4.585920257514897e-06, + "loss": 0.4938, + "step": 13284 + }, + { + "epoch": 2.08, + "grad_norm": 20.819799432641574, + "learning_rate": 4.584502477676742e-06, + "loss": 0.5244, + "step": 13285 + }, + { + "epoch": 2.08, + "grad_norm": 20.27474386383267, + "learning_rate": 4.583084851852169e-06, + "loss": 0.5628, + "step": 13286 + }, + { + "epoch": 2.08, + "grad_norm": 22.788345147770126, + "learning_rate": 4.581667380081497e-06, + "loss": 0.4706, + "step": 13287 + }, + { + "epoch": 2.08, + "grad_norm": 20.6980046119383, + "learning_rate": 4.580250062405036e-06, + "loss": 0.4697, + "step": 13288 + }, + { + "epoch": 2.08, + "grad_norm": 17.228160775360433, + "learning_rate": 4.578832898863088e-06, + "loss": 0.4847, + "step": 13289 + }, + { + "epoch": 2.08, + "grad_norm": 32.10342693043912, + "learning_rate": 4.577415889495962e-06, + "loss": 0.5837, + "step": 13290 + }, + { + "epoch": 2.08, + "grad_norm": 20.98573331491425, + "learning_rate": 4.575999034343957e-06, + "loss": 0.5588, + "step": 13291 + }, + { + "epoch": 2.08, + "grad_norm": 24.260059327578197, + "learning_rate": 4.5745823334473685e-06, + "loss": 0.4674, + "step": 13292 + }, + { + "epoch": 2.08, + "grad_norm": 20.381798869962854, + "learning_rate": 4.573165786846485e-06, + "loss": 0.5284, + "step": 13293 + }, + { + "epoch": 2.08, + "grad_norm": 24.528074104199415, + "learning_rate": 4.571749394581588e-06, + "loss": 0.5152, + "step": 13294 + }, + { + "epoch": 2.08, + "grad_norm": 19.18882387638936, + "learning_rate": 4.570333156692963e-06, + "loss": 0.4948, + "step": 13295 + }, + { + "epoch": 2.08, + "grad_norm": 19.183718532373142, + "learning_rate": 4.568917073220891e-06, + "loss": 0.4781, + "step": 13296 + }, + { + "epoch": 2.08, + "grad_norm": 26.013057232625478, + "learning_rate": 4.567501144205639e-06, + "loss": 0.5371, + "step": 13297 + }, + { + "epoch": 2.08, + "grad_norm": 16.105198645333353, + "learning_rate": 4.566085369687475e-06, + "loss": 0.4781, + "step": 13298 + }, + { + "epoch": 2.08, + "grad_norm": 20.81561349514247, + "learning_rate": 4.564669749706663e-06, + "loss": 0.4589, + "step": 13299 + }, + { + "epoch": 2.08, + "grad_norm": 21.75364095533292, + "learning_rate": 4.563254284303468e-06, + "loss": 0.5483, + "step": 13300 + }, + { + "epoch": 2.08, + "grad_norm": 21.414261980529798, + "learning_rate": 4.56183897351814e-06, + "loss": 0.4292, + "step": 13301 + }, + { + "epoch": 2.08, + "grad_norm": 28.169139517149215, + "learning_rate": 4.560423817390927e-06, + "loss": 0.5308, + "step": 13302 + }, + { + "epoch": 2.08, + "grad_norm": 16.864232061451382, + "learning_rate": 4.559008815962078e-06, + "loss": 0.4324, + "step": 13303 + }, + { + "epoch": 2.08, + "grad_norm": 21.460231120868325, + "learning_rate": 4.557593969271836e-06, + "loss": 0.528, + "step": 13304 + }, + { + "epoch": 2.08, + "grad_norm": 20.877083541629784, + "learning_rate": 4.556179277360442e-06, + "loss": 0.4705, + "step": 13305 + }, + { + "epoch": 2.08, + "grad_norm": 28.45493918474053, + "learning_rate": 4.554764740268124e-06, + "loss": 0.525, + "step": 13306 + }, + { + "epoch": 2.08, + "grad_norm": 30.070636441276545, + "learning_rate": 4.553350358035108e-06, + "loss": 0.5167, + "step": 13307 + }, + { + "epoch": 2.08, + "grad_norm": 14.855782165752, + "learning_rate": 4.551936130701622e-06, + "loss": 0.4943, + "step": 13308 + }, + { + "epoch": 2.08, + "grad_norm": 18.96931533348104, + "learning_rate": 4.550522058307888e-06, + "loss": 0.4773, + "step": 13309 + }, + { + "epoch": 2.08, + "grad_norm": 16.585443390966994, + "learning_rate": 4.549108140894117e-06, + "loss": 0.473, + "step": 13310 + }, + { + "epoch": 2.08, + "grad_norm": 16.401202400835626, + "learning_rate": 4.547694378500523e-06, + "loss": 0.4329, + "step": 13311 + }, + { + "epoch": 2.08, + "grad_norm": 19.704926195938093, + "learning_rate": 4.54628077116731e-06, + "loss": 0.5286, + "step": 13312 + }, + { + "epoch": 2.08, + "grad_norm": 18.107338179007186, + "learning_rate": 4.544867318934684e-06, + "loss": 0.4678, + "step": 13313 + }, + { + "epoch": 2.08, + "grad_norm": 16.743244162718618, + "learning_rate": 4.543454021842836e-06, + "loss": 0.4638, + "step": 13314 + }, + { + "epoch": 2.08, + "grad_norm": 13.776271012553853, + "learning_rate": 4.5420408799319646e-06, + "loss": 0.45, + "step": 13315 + }, + { + "epoch": 2.08, + "grad_norm": 23.885712340231894, + "learning_rate": 4.540627893242261e-06, + "loss": 0.4533, + "step": 13316 + }, + { + "epoch": 2.08, + "grad_norm": 20.790081447185585, + "learning_rate": 4.539215061813903e-06, + "loss": 0.5177, + "step": 13317 + }, + { + "epoch": 2.08, + "grad_norm": 35.4578125192979, + "learning_rate": 4.537802385687078e-06, + "loss": 0.4472, + "step": 13318 + }, + { + "epoch": 2.08, + "grad_norm": 19.718204867464728, + "learning_rate": 4.5363898649019545e-06, + "loss": 0.4364, + "step": 13319 + }, + { + "epoch": 2.08, + "grad_norm": 14.74725590503136, + "learning_rate": 4.534977499498706e-06, + "loss": 0.4313, + "step": 13320 + }, + { + "epoch": 2.08, + "grad_norm": 23.19691321457073, + "learning_rate": 4.533565289517506e-06, + "loss": 0.5193, + "step": 13321 + }, + { + "epoch": 2.08, + "grad_norm": 21.306957474357283, + "learning_rate": 4.5321532349985095e-06, + "loss": 0.4197, + "step": 13322 + }, + { + "epoch": 2.08, + "grad_norm": 12.334155559786513, + "learning_rate": 4.530741335981874e-06, + "loss": 0.4571, + "step": 13323 + }, + { + "epoch": 2.08, + "grad_norm": 13.120207059017911, + "learning_rate": 4.529329592507755e-06, + "loss": 0.4476, + "step": 13324 + }, + { + "epoch": 2.08, + "grad_norm": 18.673845227092745, + "learning_rate": 4.527918004616305e-06, + "loss": 0.46, + "step": 13325 + }, + { + "epoch": 2.08, + "grad_norm": 22.132544220666155, + "learning_rate": 4.5265065723476675e-06, + "loss": 0.5036, + "step": 13326 + }, + { + "epoch": 2.08, + "grad_norm": 23.62823446652317, + "learning_rate": 4.525095295741976e-06, + "loss": 0.5255, + "step": 13327 + }, + { + "epoch": 2.08, + "grad_norm": 28.649807073615964, + "learning_rate": 4.523684174839372e-06, + "loss": 0.5218, + "step": 13328 + }, + { + "epoch": 2.08, + "grad_norm": 16.79047333020339, + "learning_rate": 4.5222732096799875e-06, + "loss": 0.431, + "step": 13329 + }, + { + "epoch": 2.08, + "grad_norm": 19.665507084310978, + "learning_rate": 4.520862400303951e-06, + "loss": 0.5162, + "step": 13330 + }, + { + "epoch": 2.08, + "grad_norm": 21.49819586497654, + "learning_rate": 4.519451746751383e-06, + "loss": 0.4882, + "step": 13331 + }, + { + "epoch": 2.08, + "grad_norm": 4.160024828916969, + "learning_rate": 4.518041249062398e-06, + "loss": 0.5908, + "step": 13332 + }, + { + "epoch": 2.08, + "grad_norm": 14.103632722671467, + "learning_rate": 4.516630907277113e-06, + "loss": 0.4805, + "step": 13333 + }, + { + "epoch": 2.08, + "grad_norm": 23.395934853428344, + "learning_rate": 4.5152207214356406e-06, + "loss": 0.5737, + "step": 13334 + }, + { + "epoch": 2.08, + "grad_norm": 18.24113983478634, + "learning_rate": 4.513810691578079e-06, + "loss": 0.5527, + "step": 13335 + }, + { + "epoch": 2.08, + "grad_norm": 20.32820993767835, + "learning_rate": 4.512400817744535e-06, + "loss": 0.4805, + "step": 13336 + }, + { + "epoch": 2.08, + "grad_norm": 20.00100803889626, + "learning_rate": 4.510991099975098e-06, + "loss": 0.4277, + "step": 13337 + }, + { + "epoch": 2.08, + "grad_norm": 31.046224498989424, + "learning_rate": 4.509581538309867e-06, + "loss": 0.5508, + "step": 13338 + }, + { + "epoch": 2.08, + "grad_norm": 17.51467379080833, + "learning_rate": 4.508172132788923e-06, + "loss": 0.5532, + "step": 13339 + }, + { + "epoch": 2.08, + "grad_norm": 14.355440968077888, + "learning_rate": 4.506762883452349e-06, + "loss": 0.4422, + "step": 13340 + }, + { + "epoch": 2.08, + "grad_norm": 17.804332656805983, + "learning_rate": 4.50535379034023e-06, + "loss": 0.4851, + "step": 13341 + }, + { + "epoch": 2.08, + "grad_norm": 34.906183021177206, + "learning_rate": 4.503944853492631e-06, + "loss": 0.542, + "step": 13342 + }, + { + "epoch": 2.08, + "grad_norm": 16.73444871325246, + "learning_rate": 4.502536072949628e-06, + "loss": 0.4982, + "step": 13343 + }, + { + "epoch": 2.08, + "grad_norm": 28.978346040808887, + "learning_rate": 4.5011274487512806e-06, + "loss": 0.4867, + "step": 13344 + }, + { + "epoch": 2.08, + "grad_norm": 20.602943925251328, + "learning_rate": 4.4997189809376554e-06, + "loss": 0.5291, + "step": 13345 + }, + { + "epoch": 2.08, + "grad_norm": 14.135052178232254, + "learning_rate": 4.498310669548801e-06, + "loss": 0.4534, + "step": 13346 + }, + { + "epoch": 2.08, + "grad_norm": 19.818277409403674, + "learning_rate": 4.496902514624775e-06, + "loss": 0.5213, + "step": 13347 + }, + { + "epoch": 2.08, + "grad_norm": 19.446230919170148, + "learning_rate": 4.495494516205621e-06, + "loss": 0.5111, + "step": 13348 + }, + { + "epoch": 2.09, + "grad_norm": 15.29595463617056, + "learning_rate": 4.494086674331382e-06, + "loss": 0.4961, + "step": 13349 + }, + { + "epoch": 2.09, + "grad_norm": 13.931181608762111, + "learning_rate": 4.492678989042099e-06, + "loss": 0.4893, + "step": 13350 + }, + { + "epoch": 2.09, + "grad_norm": 20.38331068840339, + "learning_rate": 4.491271460377806e-06, + "loss": 0.5184, + "step": 13351 + }, + { + "epoch": 2.09, + "grad_norm": 21.977967451624647, + "learning_rate": 4.489864088378526e-06, + "loss": 0.4909, + "step": 13352 + }, + { + "epoch": 2.09, + "grad_norm": 27.461327212784816, + "learning_rate": 4.488456873084288e-06, + "loss": 0.4533, + "step": 13353 + }, + { + "epoch": 2.09, + "grad_norm": 30.113063848583863, + "learning_rate": 4.487049814535112e-06, + "loss": 0.4814, + "step": 13354 + }, + { + "epoch": 2.09, + "grad_norm": 23.05653861450223, + "learning_rate": 4.4856429127710224e-06, + "loss": 0.5623, + "step": 13355 + }, + { + "epoch": 2.09, + "grad_norm": 15.800967790895884, + "learning_rate": 4.484236167832015e-06, + "loss": 0.4503, + "step": 13356 + }, + { + "epoch": 2.09, + "grad_norm": 15.436910201979742, + "learning_rate": 4.482829579758103e-06, + "loss": 0.4479, + "step": 13357 + }, + { + "epoch": 2.09, + "grad_norm": 14.097455583234527, + "learning_rate": 4.481423148589292e-06, + "loss": 0.5083, + "step": 13358 + }, + { + "epoch": 2.09, + "grad_norm": 19.07132455874364, + "learning_rate": 4.4800168743655814e-06, + "loss": 0.5065, + "step": 13359 + }, + { + "epoch": 2.09, + "grad_norm": 18.15619025901775, + "learning_rate": 4.478610757126962e-06, + "loss": 0.4788, + "step": 13360 + }, + { + "epoch": 2.09, + "grad_norm": 23.43294365272516, + "learning_rate": 4.477204796913419e-06, + "loss": 0.4948, + "step": 13361 + }, + { + "epoch": 2.09, + "grad_norm": 22.072391561296246, + "learning_rate": 4.475798993764941e-06, + "loss": 0.4632, + "step": 13362 + }, + { + "epoch": 2.09, + "grad_norm": 15.355250719725868, + "learning_rate": 4.474393347721511e-06, + "loss": 0.472, + "step": 13363 + }, + { + "epoch": 2.09, + "grad_norm": 17.806147210482905, + "learning_rate": 4.472987858823099e-06, + "loss": 0.4732, + "step": 13364 + }, + { + "epoch": 2.09, + "grad_norm": 30.2367927836394, + "learning_rate": 4.471582527109683e-06, + "loss": 0.5207, + "step": 13365 + }, + { + "epoch": 2.09, + "grad_norm": 26.766143714746494, + "learning_rate": 4.470177352621221e-06, + "loss": 0.5428, + "step": 13366 + }, + { + "epoch": 2.09, + "grad_norm": 13.279677872452869, + "learning_rate": 4.468772335397681e-06, + "loss": 0.5143, + "step": 13367 + }, + { + "epoch": 2.09, + "grad_norm": 30.028609946729038, + "learning_rate": 4.467367475479023e-06, + "loss": 0.5886, + "step": 13368 + }, + { + "epoch": 2.09, + "grad_norm": 22.703485283549373, + "learning_rate": 4.465962772905195e-06, + "loss": 0.47, + "step": 13369 + }, + { + "epoch": 2.09, + "grad_norm": 26.02288981834509, + "learning_rate": 4.464558227716152e-06, + "loss": 0.4235, + "step": 13370 + }, + { + "epoch": 2.09, + "grad_norm": 17.08937653998288, + "learning_rate": 4.463153839951829e-06, + "loss": 0.4421, + "step": 13371 + }, + { + "epoch": 2.09, + "grad_norm": 25.071619293970567, + "learning_rate": 4.461749609652179e-06, + "loss": 0.5131, + "step": 13372 + }, + { + "epoch": 2.09, + "grad_norm": 20.046640020481444, + "learning_rate": 4.460345536857124e-06, + "loss": 0.481, + "step": 13373 + }, + { + "epoch": 2.09, + "grad_norm": 11.804403857693078, + "learning_rate": 4.458941621606602e-06, + "loss": 0.4692, + "step": 13374 + }, + { + "epoch": 2.09, + "grad_norm": 28.887366923500668, + "learning_rate": 4.457537863940543e-06, + "loss": 0.4929, + "step": 13375 + }, + { + "epoch": 2.09, + "grad_norm": 16.737847099771972, + "learning_rate": 4.4561342638988645e-06, + "loss": 0.4731, + "step": 13376 + }, + { + "epoch": 2.09, + "grad_norm": 17.01579895841372, + "learning_rate": 4.4547308215214815e-06, + "loss": 0.4864, + "step": 13377 + }, + { + "epoch": 2.09, + "grad_norm": 21.57507625521603, + "learning_rate": 4.453327536848309e-06, + "loss": 0.5053, + "step": 13378 + }, + { + "epoch": 2.09, + "grad_norm": 14.295256455841429, + "learning_rate": 4.451924409919257e-06, + "loss": 0.4498, + "step": 13379 + }, + { + "epoch": 2.09, + "grad_norm": 13.032001856742218, + "learning_rate": 4.450521440774233e-06, + "loss": 0.4372, + "step": 13380 + }, + { + "epoch": 2.09, + "grad_norm": 15.853086634235515, + "learning_rate": 4.449118629453133e-06, + "loss": 0.4574, + "step": 13381 + }, + { + "epoch": 2.09, + "grad_norm": 16.162096127968514, + "learning_rate": 4.447715975995848e-06, + "loss": 0.4785, + "step": 13382 + }, + { + "epoch": 2.09, + "grad_norm": 21.77159990249389, + "learning_rate": 4.446313480442272e-06, + "loss": 0.516, + "step": 13383 + }, + { + "epoch": 2.09, + "grad_norm": 17.727921858709138, + "learning_rate": 4.444911142832297e-06, + "loss": 0.5095, + "step": 13384 + }, + { + "epoch": 2.09, + "grad_norm": 19.91745334847935, + "learning_rate": 4.4435089632058e-06, + "loss": 0.545, + "step": 13385 + }, + { + "epoch": 2.09, + "grad_norm": 20.937821783610683, + "learning_rate": 4.442106941602652e-06, + "loss": 0.3844, + "step": 13386 + }, + { + "epoch": 2.09, + "grad_norm": 15.151497540172905, + "learning_rate": 4.440705078062732e-06, + "loss": 0.4793, + "step": 13387 + }, + { + "epoch": 2.09, + "grad_norm": 14.833597902207083, + "learning_rate": 4.4393033726259116e-06, + "loss": 0.4458, + "step": 13388 + }, + { + "epoch": 2.09, + "grad_norm": 22.930245868701412, + "learning_rate": 4.437901825332046e-06, + "loss": 0.5224, + "step": 13389 + }, + { + "epoch": 2.09, + "grad_norm": 35.93038643051206, + "learning_rate": 4.436500436221003e-06, + "loss": 0.5245, + "step": 13390 + }, + { + "epoch": 2.09, + "grad_norm": 14.300825086649915, + "learning_rate": 4.4350992053326295e-06, + "loss": 0.4769, + "step": 13391 + }, + { + "epoch": 2.09, + "grad_norm": 15.15378430110901, + "learning_rate": 4.433698132706779e-06, + "loss": 0.4482, + "step": 13392 + }, + { + "epoch": 2.09, + "grad_norm": 23.1344869407657, + "learning_rate": 4.4322972183833e-06, + "loss": 0.4981, + "step": 13393 + }, + { + "epoch": 2.09, + "grad_norm": 14.851429273959964, + "learning_rate": 4.430896462402033e-06, + "loss": 0.4633, + "step": 13394 + }, + { + "epoch": 2.09, + "grad_norm": 22.324897390681926, + "learning_rate": 4.429495864802808e-06, + "loss": 0.5283, + "step": 13395 + }, + { + "epoch": 2.09, + "grad_norm": 21.50423002562317, + "learning_rate": 4.428095425625462e-06, + "loss": 0.5526, + "step": 13396 + }, + { + "epoch": 2.09, + "grad_norm": 15.829657741191447, + "learning_rate": 4.426695144909826e-06, + "loss": 0.4498, + "step": 13397 + }, + { + "epoch": 2.09, + "grad_norm": 15.97235357949818, + "learning_rate": 4.425295022695716e-06, + "loss": 0.4363, + "step": 13398 + }, + { + "epoch": 2.09, + "grad_norm": 17.15532984810896, + "learning_rate": 4.423895059022959e-06, + "loss": 0.5423, + "step": 13399 + }, + { + "epoch": 2.09, + "grad_norm": 29.374390386789514, + "learning_rate": 4.42249525393136e-06, + "loss": 0.4868, + "step": 13400 + }, + { + "epoch": 2.09, + "grad_norm": 19.370441946581966, + "learning_rate": 4.421095607460738e-06, + "loss": 0.5241, + "step": 13401 + }, + { + "epoch": 2.09, + "grad_norm": 24.046294385656193, + "learning_rate": 4.419696119650889e-06, + "loss": 0.5022, + "step": 13402 + }, + { + "epoch": 2.09, + "grad_norm": 14.471081369764708, + "learning_rate": 4.418296790541618e-06, + "loss": 0.453, + "step": 13403 + }, + { + "epoch": 2.09, + "grad_norm": 17.778671540287874, + "learning_rate": 4.4168976201727255e-06, + "loss": 0.5484, + "step": 13404 + }, + { + "epoch": 2.09, + "grad_norm": 22.642828665612175, + "learning_rate": 4.415498608583993e-06, + "loss": 0.498, + "step": 13405 + }, + { + "epoch": 2.09, + "grad_norm": 19.196734116869575, + "learning_rate": 4.414099755815219e-06, + "loss": 0.5427, + "step": 13406 + }, + { + "epoch": 2.09, + "grad_norm": 24.414930914730913, + "learning_rate": 4.412701061906176e-06, + "loss": 0.4866, + "step": 13407 + }, + { + "epoch": 2.09, + "grad_norm": 19.613471442240435, + "learning_rate": 4.411302526896646e-06, + "loss": 0.4846, + "step": 13408 + }, + { + "epoch": 2.09, + "grad_norm": 15.281869920587228, + "learning_rate": 4.409904150826408e-06, + "loss": 0.487, + "step": 13409 + }, + { + "epoch": 2.09, + "grad_norm": 16.914756785320684, + "learning_rate": 4.408505933735223e-06, + "loss": 0.4572, + "step": 13410 + }, + { + "epoch": 2.09, + "grad_norm": 31.268555643467717, + "learning_rate": 4.407107875662857e-06, + "loss": 0.5634, + "step": 13411 + }, + { + "epoch": 2.09, + "grad_norm": 40.664296413192254, + "learning_rate": 4.405709976649069e-06, + "loss": 0.4784, + "step": 13412 + }, + { + "epoch": 2.1, + "grad_norm": 26.10843387513603, + "learning_rate": 4.404312236733623e-06, + "loss": 0.5861, + "step": 13413 + }, + { + "epoch": 2.1, + "grad_norm": 20.622896061499468, + "learning_rate": 4.402914655956262e-06, + "loss": 0.4474, + "step": 13414 + }, + { + "epoch": 2.1, + "grad_norm": 19.505891463197088, + "learning_rate": 4.401517234356731e-06, + "loss": 0.469, + "step": 13415 + }, + { + "epoch": 2.1, + "grad_norm": 17.116856751582834, + "learning_rate": 4.400119971974774e-06, + "loss": 0.4995, + "step": 13416 + }, + { + "epoch": 2.1, + "grad_norm": 18.487069225015794, + "learning_rate": 4.398722868850131e-06, + "loss": 0.5086, + "step": 13417 + }, + { + "epoch": 2.1, + "grad_norm": 27.17987141460619, + "learning_rate": 4.3973259250225355e-06, + "loss": 0.5204, + "step": 13418 + }, + { + "epoch": 2.1, + "grad_norm": 22.958732782735332, + "learning_rate": 4.395929140531713e-06, + "loss": 0.5012, + "step": 13419 + }, + { + "epoch": 2.1, + "grad_norm": 18.360661793517504, + "learning_rate": 4.3945325154173834e-06, + "loss": 0.4872, + "step": 13420 + }, + { + "epoch": 2.1, + "grad_norm": 14.569555511957606, + "learning_rate": 4.39313604971927e-06, + "loss": 0.487, + "step": 13421 + }, + { + "epoch": 2.1, + "grad_norm": 17.395513677277506, + "learning_rate": 4.391739743477092e-06, + "loss": 0.4899, + "step": 13422 + }, + { + "epoch": 2.1, + "grad_norm": 15.362455836163539, + "learning_rate": 4.390343596730551e-06, + "loss": 0.4725, + "step": 13423 + }, + { + "epoch": 2.1, + "grad_norm": 16.110999586455385, + "learning_rate": 4.38894760951936e-06, + "loss": 0.4512, + "step": 13424 + }, + { + "epoch": 2.1, + "grad_norm": 34.52331145484327, + "learning_rate": 4.387551781883213e-06, + "loss": 0.4879, + "step": 13425 + }, + { + "epoch": 2.1, + "grad_norm": 14.1126875864702, + "learning_rate": 4.386156113861814e-06, + "loss": 0.514, + "step": 13426 + }, + { + "epoch": 2.1, + "grad_norm": 21.91621939521298, + "learning_rate": 4.384760605494847e-06, + "loss": 0.4568, + "step": 13427 + }, + { + "epoch": 2.1, + "grad_norm": 21.71690157702032, + "learning_rate": 4.383365256822003e-06, + "loss": 0.5882, + "step": 13428 + }, + { + "epoch": 2.1, + "grad_norm": 28.110465908033323, + "learning_rate": 4.3819700678829705e-06, + "loss": 0.5001, + "step": 13429 + }, + { + "epoch": 2.1, + "grad_norm": 24.28365850715801, + "learning_rate": 4.380575038717419e-06, + "loss": 0.4645, + "step": 13430 + }, + { + "epoch": 2.1, + "grad_norm": 17.778307378775217, + "learning_rate": 4.37918016936503e-06, + "loss": 0.4423, + "step": 13431 + }, + { + "epoch": 2.1, + "grad_norm": 18.601300170022775, + "learning_rate": 4.377785459865463e-06, + "loss": 0.4522, + "step": 13432 + }, + { + "epoch": 2.1, + "grad_norm": 22.816120012352762, + "learning_rate": 4.376390910258391e-06, + "loss": 0.5215, + "step": 13433 + }, + { + "epoch": 2.1, + "grad_norm": 15.929450510871082, + "learning_rate": 4.374996520583474e-06, + "loss": 0.4921, + "step": 13434 + }, + { + "epoch": 2.1, + "grad_norm": 21.439954293978737, + "learning_rate": 4.373602290880367e-06, + "loss": 0.5448, + "step": 13435 + }, + { + "epoch": 2.1, + "grad_norm": 19.480410820748844, + "learning_rate": 4.372208221188715e-06, + "loss": 0.5549, + "step": 13436 + }, + { + "epoch": 2.1, + "grad_norm": 18.031498653574978, + "learning_rate": 4.370814311548168e-06, + "loss": 0.5153, + "step": 13437 + }, + { + "epoch": 2.1, + "grad_norm": 15.58080708478236, + "learning_rate": 4.3694205619983744e-06, + "loss": 0.4444, + "step": 13438 + }, + { + "epoch": 2.1, + "grad_norm": 17.081969274360034, + "learning_rate": 4.368026972578965e-06, + "loss": 0.4861, + "step": 13439 + }, + { + "epoch": 2.1, + "grad_norm": 19.746778438933905, + "learning_rate": 4.366633543329571e-06, + "loss": 0.4883, + "step": 13440 + }, + { + "epoch": 2.1, + "grad_norm": 20.18715196160379, + "learning_rate": 4.365240274289824e-06, + "loss": 0.5169, + "step": 13441 + }, + { + "epoch": 2.1, + "grad_norm": 21.94904135873195, + "learning_rate": 4.363847165499347e-06, + "loss": 0.5225, + "step": 13442 + }, + { + "epoch": 2.1, + "grad_norm": 21.299634151607687, + "learning_rate": 4.362454216997761e-06, + "loss": 0.5753, + "step": 13443 + }, + { + "epoch": 2.1, + "grad_norm": 15.082429075885264, + "learning_rate": 4.361061428824682e-06, + "loss": 0.4669, + "step": 13444 + }, + { + "epoch": 2.1, + "grad_norm": 14.104073344794825, + "learning_rate": 4.359668801019713e-06, + "loss": 0.4348, + "step": 13445 + }, + { + "epoch": 2.1, + "grad_norm": 23.718910643004673, + "learning_rate": 4.358276333622463e-06, + "loss": 0.4432, + "step": 13446 + }, + { + "epoch": 2.1, + "grad_norm": 22.711772740623914, + "learning_rate": 4.356884026672537e-06, + "loss": 0.5723, + "step": 13447 + }, + { + "epoch": 2.1, + "grad_norm": 25.126831003982765, + "learning_rate": 4.35549188020953e-06, + "loss": 0.5013, + "step": 13448 + }, + { + "epoch": 2.1, + "grad_norm": 20.404262327988558, + "learning_rate": 4.354099894273027e-06, + "loss": 0.4589, + "step": 13449 + }, + { + "epoch": 2.1, + "grad_norm": 20.73178193177861, + "learning_rate": 4.352708068902621e-06, + "loss": 0.5353, + "step": 13450 + }, + { + "epoch": 2.1, + "grad_norm": 11.986171861251247, + "learning_rate": 4.351316404137898e-06, + "loss": 0.4418, + "step": 13451 + }, + { + "epoch": 2.1, + "grad_norm": 20.675801292465934, + "learning_rate": 4.349924900018427e-06, + "loss": 0.4957, + "step": 13452 + }, + { + "epoch": 2.1, + "grad_norm": 26.15825558052395, + "learning_rate": 4.3485335565837905e-06, + "loss": 0.467, + "step": 13453 + }, + { + "epoch": 2.1, + "grad_norm": 19.360053995739154, + "learning_rate": 4.347142373873551e-06, + "loss": 0.5268, + "step": 13454 + }, + { + "epoch": 2.1, + "grad_norm": 18.354302844395008, + "learning_rate": 4.345751351927275e-06, + "loss": 0.4907, + "step": 13455 + }, + { + "epoch": 2.1, + "grad_norm": 12.607745848555282, + "learning_rate": 4.344360490784526e-06, + "loss": 0.3871, + "step": 13456 + }, + { + "epoch": 2.1, + "grad_norm": 15.682853313083948, + "learning_rate": 4.342969790484853e-06, + "loss": 0.507, + "step": 13457 + }, + { + "epoch": 2.1, + "grad_norm": 12.511992220193015, + "learning_rate": 4.341579251067815e-06, + "loss": 0.5151, + "step": 13458 + }, + { + "epoch": 2.1, + "grad_norm": 20.429363591706245, + "learning_rate": 4.3401888725729465e-06, + "loss": 0.4728, + "step": 13459 + }, + { + "epoch": 2.1, + "grad_norm": 22.251778645359995, + "learning_rate": 4.338798655039802e-06, + "loss": 0.5422, + "step": 13460 + }, + { + "epoch": 2.1, + "grad_norm": 28.283733727263364, + "learning_rate": 4.3374085985079075e-06, + "loss": 0.4668, + "step": 13461 + }, + { + "epoch": 2.1, + "grad_norm": 27.149063186570853, + "learning_rate": 4.336018703016799e-06, + "loss": 0.6065, + "step": 13462 + }, + { + "epoch": 2.1, + "grad_norm": 15.774726462637535, + "learning_rate": 4.33462896860601e-06, + "loss": 0.5066, + "step": 13463 + }, + { + "epoch": 2.1, + "grad_norm": 25.6124587660728, + "learning_rate": 4.3332393953150574e-06, + "loss": 0.5152, + "step": 13464 + }, + { + "epoch": 2.1, + "grad_norm": 20.04154843680813, + "learning_rate": 4.331849983183459e-06, + "loss": 0.4485, + "step": 13465 + }, + { + "epoch": 2.1, + "grad_norm": 20.307506383220737, + "learning_rate": 4.330460732250732e-06, + "loss": 0.5035, + "step": 13466 + }, + { + "epoch": 2.1, + "grad_norm": 20.697163901889468, + "learning_rate": 4.329071642556384e-06, + "loss": 0.504, + "step": 13467 + }, + { + "epoch": 2.1, + "grad_norm": 16.534570776876762, + "learning_rate": 4.327682714139925e-06, + "loss": 0.5129, + "step": 13468 + }, + { + "epoch": 2.1, + "grad_norm": 23.343498125849504, + "learning_rate": 4.326293947040851e-06, + "loss": 0.5047, + "step": 13469 + }, + { + "epoch": 2.1, + "grad_norm": 17.346328722351895, + "learning_rate": 4.324905341298655e-06, + "loss": 0.4915, + "step": 13470 + }, + { + "epoch": 2.1, + "grad_norm": 13.040168434746302, + "learning_rate": 4.323516896952832e-06, + "loss": 0.4433, + "step": 13471 + }, + { + "epoch": 2.1, + "grad_norm": 22.014367330946357, + "learning_rate": 4.3221286140428695e-06, + "loss": 0.5257, + "step": 13472 + }, + { + "epoch": 2.1, + "grad_norm": 19.90869229918086, + "learning_rate": 4.32074049260825e-06, + "loss": 0.4855, + "step": 13473 + }, + { + "epoch": 2.1, + "grad_norm": 19.19758465507929, + "learning_rate": 4.319352532688444e-06, + "loss": 0.4622, + "step": 13474 + }, + { + "epoch": 2.1, + "grad_norm": 18.783367663592294, + "learning_rate": 4.317964734322928e-06, + "loss": 0.4441, + "step": 13475 + }, + { + "epoch": 2.1, + "grad_norm": 22.85024333048001, + "learning_rate": 4.316577097551176e-06, + "loss": 0.4744, + "step": 13476 + }, + { + "epoch": 2.11, + "grad_norm": 15.207394375839547, + "learning_rate": 4.315189622412642e-06, + "loss": 0.4636, + "step": 13477 + }, + { + "epoch": 2.11, + "grad_norm": 23.860458434674364, + "learning_rate": 4.313802308946794e-06, + "loss": 0.5367, + "step": 13478 + }, + { + "epoch": 2.11, + "grad_norm": 12.696411620464664, + "learning_rate": 4.312415157193078e-06, + "loss": 0.4785, + "step": 13479 + }, + { + "epoch": 2.11, + "grad_norm": 20.291071754104014, + "learning_rate": 4.3110281671909484e-06, + "loss": 0.5058, + "step": 13480 + }, + { + "epoch": 2.11, + "grad_norm": 19.7198849206344, + "learning_rate": 4.309641338979853e-06, + "loss": 0.5133, + "step": 13481 + }, + { + "epoch": 2.11, + "grad_norm": 14.280293193668603, + "learning_rate": 4.308254672599225e-06, + "loss": 0.47, + "step": 13482 + }, + { + "epoch": 2.11, + "grad_norm": 17.55920090012835, + "learning_rate": 4.306868168088508e-06, + "loss": 0.4729, + "step": 13483 + }, + { + "epoch": 2.11, + "grad_norm": 16.34642124592735, + "learning_rate": 4.305481825487128e-06, + "loss": 0.4877, + "step": 13484 + }, + { + "epoch": 2.11, + "grad_norm": 19.00180157595025, + "learning_rate": 4.304095644834516e-06, + "loss": 0.5126, + "step": 13485 + }, + { + "epoch": 2.11, + "grad_norm": 16.27862461031544, + "learning_rate": 4.302709626170089e-06, + "loss": 0.5341, + "step": 13486 + }, + { + "epoch": 2.11, + "grad_norm": 19.11646340000066, + "learning_rate": 4.30132376953327e-06, + "loss": 0.3857, + "step": 13487 + }, + { + "epoch": 2.11, + "grad_norm": 21.20590603693105, + "learning_rate": 4.299938074963465e-06, + "loss": 0.4701, + "step": 13488 + }, + { + "epoch": 2.11, + "grad_norm": 18.47539332846092, + "learning_rate": 4.298552542500093e-06, + "loss": 0.502, + "step": 13489 + }, + { + "epoch": 2.11, + "grad_norm": 42.398325496980426, + "learning_rate": 4.297167172182546e-06, + "loss": 0.5287, + "step": 13490 + }, + { + "epoch": 2.11, + "grad_norm": 38.68794039397025, + "learning_rate": 4.295781964050229e-06, + "loss": 0.5194, + "step": 13491 + }, + { + "epoch": 2.11, + "grad_norm": 16.276072685491886, + "learning_rate": 4.2943969181425395e-06, + "loss": 0.4434, + "step": 13492 + }, + { + "epoch": 2.11, + "grad_norm": 21.53435643712485, + "learning_rate": 4.2930120344988605e-06, + "loss": 0.4705, + "step": 13493 + }, + { + "epoch": 2.11, + "grad_norm": 24.75365813631957, + "learning_rate": 4.291627313158584e-06, + "loss": 0.5483, + "step": 13494 + }, + { + "epoch": 2.11, + "grad_norm": 19.243731719798483, + "learning_rate": 4.290242754161084e-06, + "loss": 0.493, + "step": 13495 + }, + { + "epoch": 2.11, + "grad_norm": 16.60385053777235, + "learning_rate": 4.28885835754574e-06, + "loss": 0.4554, + "step": 13496 + }, + { + "epoch": 2.11, + "grad_norm": 32.31787061053695, + "learning_rate": 4.287474123351928e-06, + "loss": 0.541, + "step": 13497 + }, + { + "epoch": 2.11, + "grad_norm": 20.96526104970375, + "learning_rate": 4.286090051619008e-06, + "loss": 0.4559, + "step": 13498 + }, + { + "epoch": 2.11, + "grad_norm": 21.100422787102886, + "learning_rate": 4.284706142386342e-06, + "loss": 0.5356, + "step": 13499 + }, + { + "epoch": 2.11, + "grad_norm": 19.722016532497694, + "learning_rate": 4.2833223956932916e-06, + "loss": 0.5214, + "step": 13500 + }, + { + "epoch": 2.11, + "grad_norm": 17.78271697635198, + "learning_rate": 4.2819388115792095e-06, + "loss": 0.4669, + "step": 13501 + }, + { + "epoch": 2.11, + "grad_norm": 12.97942187587495, + "learning_rate": 4.280555390083443e-06, + "loss": 0.4304, + "step": 13502 + }, + { + "epoch": 2.11, + "grad_norm": 18.659132246796773, + "learning_rate": 4.279172131245332e-06, + "loss": 0.4608, + "step": 13503 + }, + { + "epoch": 2.11, + "grad_norm": 21.692096957162224, + "learning_rate": 4.27778903510422e-06, + "loss": 0.5481, + "step": 13504 + }, + { + "epoch": 2.11, + "grad_norm": 24.752425590297722, + "learning_rate": 4.2764061016994386e-06, + "loss": 0.538, + "step": 13505 + }, + { + "epoch": 2.11, + "grad_norm": 20.18439080037953, + "learning_rate": 4.275023331070324e-06, + "loss": 0.5294, + "step": 13506 + }, + { + "epoch": 2.11, + "grad_norm": 20.458798299958378, + "learning_rate": 4.2736407232561975e-06, + "loss": 0.4268, + "step": 13507 + }, + { + "epoch": 2.11, + "grad_norm": 23.10961786649343, + "learning_rate": 4.272258278296374e-06, + "loss": 0.4963, + "step": 13508 + }, + { + "epoch": 2.11, + "grad_norm": 17.70235840348874, + "learning_rate": 4.270875996230176e-06, + "loss": 0.5808, + "step": 13509 + }, + { + "epoch": 2.11, + "grad_norm": 28.62649659082115, + "learning_rate": 4.269493877096915e-06, + "loss": 0.5652, + "step": 13510 + }, + { + "epoch": 2.11, + "grad_norm": 20.99787160008829, + "learning_rate": 4.268111920935893e-06, + "loss": 0.4426, + "step": 13511 + }, + { + "epoch": 2.11, + "grad_norm": 20.321086393620956, + "learning_rate": 4.266730127786419e-06, + "loss": 0.4878, + "step": 13512 + }, + { + "epoch": 2.11, + "grad_norm": 27.343704060107193, + "learning_rate": 4.265348497687784e-06, + "loss": 0.495, + "step": 13513 + }, + { + "epoch": 2.11, + "grad_norm": 24.921945488091644, + "learning_rate": 4.263967030679284e-06, + "loss": 0.4941, + "step": 13514 + }, + { + "epoch": 2.11, + "grad_norm": 20.520433813811366, + "learning_rate": 4.262585726800204e-06, + "loss": 0.5092, + "step": 13515 + }, + { + "epoch": 2.11, + "grad_norm": 17.808686122499925, + "learning_rate": 4.26120458608983e-06, + "loss": 0.5165, + "step": 13516 + }, + { + "epoch": 2.11, + "grad_norm": 18.30822553360003, + "learning_rate": 4.259823608587443e-06, + "loss": 0.505, + "step": 13517 + }, + { + "epoch": 2.11, + "grad_norm": 16.909309206936634, + "learning_rate": 4.258442794332311e-06, + "loss": 0.4927, + "step": 13518 + }, + { + "epoch": 2.11, + "grad_norm": 28.05286728858601, + "learning_rate": 4.2570621433637116e-06, + "loss": 0.5266, + "step": 13519 + }, + { + "epoch": 2.11, + "grad_norm": 28.709137145186844, + "learning_rate": 4.255681655720901e-06, + "loss": 0.6329, + "step": 13520 + }, + { + "epoch": 2.11, + "grad_norm": 20.58938696985488, + "learning_rate": 4.254301331443145e-06, + "loss": 0.5211, + "step": 13521 + }, + { + "epoch": 2.11, + "grad_norm": 16.575775673928216, + "learning_rate": 4.252921170569701e-06, + "loss": 0.4887, + "step": 13522 + }, + { + "epoch": 2.11, + "grad_norm": 19.976739653636162, + "learning_rate": 4.251541173139816e-06, + "loss": 0.4615, + "step": 13523 + }, + { + "epoch": 2.11, + "grad_norm": 16.604192278645606, + "learning_rate": 4.250161339192734e-06, + "loss": 0.4779, + "step": 13524 + }, + { + "epoch": 2.11, + "grad_norm": 21.061330305781027, + "learning_rate": 4.2487816687677006e-06, + "loss": 0.4923, + "step": 13525 + }, + { + "epoch": 2.11, + "grad_norm": 22.94314081684299, + "learning_rate": 4.247402161903956e-06, + "loss": 0.4928, + "step": 13526 + }, + { + "epoch": 2.11, + "grad_norm": 17.352750244607382, + "learning_rate": 4.246022818640728e-06, + "loss": 0.5382, + "step": 13527 + }, + { + "epoch": 2.11, + "grad_norm": 18.118142313798433, + "learning_rate": 4.244643639017242e-06, + "loss": 0.513, + "step": 13528 + }, + { + "epoch": 2.11, + "grad_norm": 19.816358548555687, + "learning_rate": 4.243264623072725e-06, + "loss": 0.5031, + "step": 13529 + }, + { + "epoch": 2.11, + "grad_norm": 28.05101485501066, + "learning_rate": 4.241885770846392e-06, + "loss": 0.5212, + "step": 13530 + }, + { + "epoch": 2.11, + "grad_norm": 20.47354865135328, + "learning_rate": 4.240507082377465e-06, + "loss": 0.5088, + "step": 13531 + }, + { + "epoch": 2.11, + "grad_norm": 26.268185417557632, + "learning_rate": 4.239128557705146e-06, + "loss": 0.4698, + "step": 13532 + }, + { + "epoch": 2.11, + "grad_norm": 28.84944940364741, + "learning_rate": 4.237750196868639e-06, + "loss": 0.5317, + "step": 13533 + }, + { + "epoch": 2.11, + "grad_norm": 20.476987320569467, + "learning_rate": 4.236371999907144e-06, + "loss": 0.4917, + "step": 13534 + }, + { + "epoch": 2.11, + "grad_norm": 14.891998469844339, + "learning_rate": 4.234993966859862e-06, + "loss": 0.4485, + "step": 13535 + }, + { + "epoch": 2.11, + "grad_norm": 18.146306924811462, + "learning_rate": 4.233616097765979e-06, + "loss": 0.5415, + "step": 13536 + }, + { + "epoch": 2.11, + "grad_norm": 20.932540197922695, + "learning_rate": 4.232238392664677e-06, + "loss": 0.4883, + "step": 13537 + }, + { + "epoch": 2.11, + "grad_norm": 13.163851399526058, + "learning_rate": 4.2308608515951414e-06, + "loss": 0.4369, + "step": 13538 + }, + { + "epoch": 2.11, + "grad_norm": 15.091191036385965, + "learning_rate": 4.229483474596552e-06, + "loss": 0.4475, + "step": 13539 + }, + { + "epoch": 2.11, + "grad_norm": 23.806749958748377, + "learning_rate": 4.228106261708073e-06, + "loss": 0.4853, + "step": 13540 + }, + { + "epoch": 2.12, + "grad_norm": 23.165884479575702, + "learning_rate": 4.22672921296888e-06, + "loss": 0.4928, + "step": 13541 + }, + { + "epoch": 2.12, + "grad_norm": 21.29297327151099, + "learning_rate": 4.225352328418126e-06, + "loss": 0.5042, + "step": 13542 + }, + { + "epoch": 2.12, + "grad_norm": 18.2225418156734, + "learning_rate": 4.223975608094975e-06, + "loss": 0.5866, + "step": 13543 + }, + { + "epoch": 2.12, + "grad_norm": 15.721819138129119, + "learning_rate": 4.222599052038581e-06, + "loss": 0.4423, + "step": 13544 + }, + { + "epoch": 2.12, + "grad_norm": 24.805819150086123, + "learning_rate": 4.221222660288087e-06, + "loss": 0.4885, + "step": 13545 + }, + { + "epoch": 2.12, + "grad_norm": 20.034232436800302, + "learning_rate": 4.219846432882644e-06, + "loss": 0.4366, + "step": 13546 + }, + { + "epoch": 2.12, + "grad_norm": 29.151975361289153, + "learning_rate": 4.218470369861384e-06, + "loss": 0.6061, + "step": 13547 + }, + { + "epoch": 2.12, + "grad_norm": 26.154425497011548, + "learning_rate": 4.217094471263447e-06, + "loss": 0.5386, + "step": 13548 + }, + { + "epoch": 2.12, + "grad_norm": 16.32975497437622, + "learning_rate": 4.2157187371279585e-06, + "loss": 0.4813, + "step": 13549 + }, + { + "epoch": 2.12, + "grad_norm": 26.540746655056783, + "learning_rate": 4.214343167494044e-06, + "loss": 0.4965, + "step": 13550 + }, + { + "epoch": 2.12, + "grad_norm": 15.448068747658212, + "learning_rate": 4.2129677624008294e-06, + "loss": 0.4188, + "step": 13551 + }, + { + "epoch": 2.12, + "grad_norm": 14.746551157949762, + "learning_rate": 4.211592521887426e-06, + "loss": 0.4561, + "step": 13552 + }, + { + "epoch": 2.12, + "grad_norm": 13.01736470911061, + "learning_rate": 4.2102174459929424e-06, + "loss": 0.4634, + "step": 13553 + }, + { + "epoch": 2.12, + "grad_norm": 19.461717377866623, + "learning_rate": 4.2088425347564866e-06, + "loss": 0.4935, + "step": 13554 + }, + { + "epoch": 2.12, + "grad_norm": 15.913210044486908, + "learning_rate": 4.207467788217162e-06, + "loss": 0.4567, + "step": 13555 + }, + { + "epoch": 2.12, + "grad_norm": 13.193984821439942, + "learning_rate": 4.206093206414069e-06, + "loss": 0.4453, + "step": 13556 + }, + { + "epoch": 2.12, + "grad_norm": 35.940176690803796, + "learning_rate": 4.204718789386295e-06, + "loss": 0.5471, + "step": 13557 + }, + { + "epoch": 2.12, + "grad_norm": 19.406448302584195, + "learning_rate": 4.203344537172925e-06, + "loss": 0.4823, + "step": 13558 + }, + { + "epoch": 2.12, + "grad_norm": 17.516960817506494, + "learning_rate": 4.201970449813045e-06, + "loss": 0.427, + "step": 13559 + }, + { + "epoch": 2.12, + "grad_norm": 20.517757946971244, + "learning_rate": 4.200596527345738e-06, + "loss": 0.4655, + "step": 13560 + }, + { + "epoch": 2.12, + "grad_norm": 19.124877200012897, + "learning_rate": 4.199222769810074e-06, + "loss": 0.5051, + "step": 13561 + }, + { + "epoch": 2.12, + "grad_norm": 19.653873230921988, + "learning_rate": 4.197849177245117e-06, + "loss": 0.5575, + "step": 13562 + }, + { + "epoch": 2.12, + "grad_norm": 15.744192549943213, + "learning_rate": 4.196475749689934e-06, + "loss": 0.5311, + "step": 13563 + }, + { + "epoch": 2.12, + "grad_norm": 20.507579584161434, + "learning_rate": 4.19510248718359e-06, + "loss": 0.5005, + "step": 13564 + }, + { + "epoch": 2.12, + "grad_norm": 25.340815557395395, + "learning_rate": 4.1937293897651324e-06, + "loss": 0.4535, + "step": 13565 + }, + { + "epoch": 2.12, + "grad_norm": 15.670890821891394, + "learning_rate": 4.192356457473617e-06, + "loss": 0.4888, + "step": 13566 + }, + { + "epoch": 2.12, + "grad_norm": 23.3920332498085, + "learning_rate": 4.190983690348084e-06, + "loss": 0.5754, + "step": 13567 + }, + { + "epoch": 2.12, + "grad_norm": 19.55890911024046, + "learning_rate": 4.189611088427576e-06, + "loss": 0.5032, + "step": 13568 + }, + { + "epoch": 2.12, + "grad_norm": 15.953563424492723, + "learning_rate": 4.188238651751134e-06, + "loss": 0.4359, + "step": 13569 + }, + { + "epoch": 2.12, + "grad_norm": 16.11835830233997, + "learning_rate": 4.186866380357782e-06, + "loss": 0.4383, + "step": 13570 + }, + { + "epoch": 2.12, + "grad_norm": 13.627039716220475, + "learning_rate": 4.1854942742865514e-06, + "loss": 0.4585, + "step": 13571 + }, + { + "epoch": 2.12, + "grad_norm": 16.443986626544245, + "learning_rate": 4.184122333576459e-06, + "loss": 0.4323, + "step": 13572 + }, + { + "epoch": 2.12, + "grad_norm": 25.55811324404679, + "learning_rate": 4.1827505582665295e-06, + "loss": 0.5244, + "step": 13573 + }, + { + "epoch": 2.12, + "grad_norm": 23.951393495349535, + "learning_rate": 4.181378948395767e-06, + "loss": 0.4781, + "step": 13574 + }, + { + "epoch": 2.12, + "grad_norm": 16.91530885927774, + "learning_rate": 4.180007504003183e-06, + "loss": 0.4867, + "step": 13575 + }, + { + "epoch": 2.12, + "grad_norm": 23.51367503981383, + "learning_rate": 4.178636225127786e-06, + "loss": 0.5121, + "step": 13576 + }, + { + "epoch": 2.12, + "grad_norm": 21.956888057565543, + "learning_rate": 4.177265111808568e-06, + "loss": 0.5423, + "step": 13577 + }, + { + "epoch": 2.12, + "grad_norm": 32.657642296492234, + "learning_rate": 4.175894164084521e-06, + "loss": 0.4856, + "step": 13578 + }, + { + "epoch": 2.12, + "grad_norm": 24.39869156722586, + "learning_rate": 4.174523381994637e-06, + "loss": 0.5382, + "step": 13579 + }, + { + "epoch": 2.12, + "grad_norm": 16.979040378898556, + "learning_rate": 4.1731527655779e-06, + "loss": 0.4163, + "step": 13580 + }, + { + "epoch": 2.12, + "grad_norm": 24.721661672297405, + "learning_rate": 4.171782314873294e-06, + "loss": 0.5212, + "step": 13581 + }, + { + "epoch": 2.12, + "grad_norm": 14.207655940404102, + "learning_rate": 4.170412029919788e-06, + "loss": 0.4455, + "step": 13582 + }, + { + "epoch": 2.12, + "grad_norm": 15.62061479067443, + "learning_rate": 4.169041910756352e-06, + "loss": 0.4677, + "step": 13583 + }, + { + "epoch": 2.12, + "grad_norm": 28.295346041299343, + "learning_rate": 4.167671957421952e-06, + "loss": 0.4471, + "step": 13584 + }, + { + "epoch": 2.12, + "grad_norm": 19.790220923991725, + "learning_rate": 4.166302169955553e-06, + "loss": 0.4968, + "step": 13585 + }, + { + "epoch": 2.12, + "grad_norm": 17.582258738732836, + "learning_rate": 4.164932548396108e-06, + "loss": 0.5303, + "step": 13586 + }, + { + "epoch": 2.12, + "grad_norm": 21.44405705715216, + "learning_rate": 4.163563092782564e-06, + "loss": 0.4366, + "step": 13587 + }, + { + "epoch": 2.12, + "grad_norm": 14.801282382393591, + "learning_rate": 4.162193803153872e-06, + "loss": 0.545, + "step": 13588 + }, + { + "epoch": 2.12, + "grad_norm": 14.452182708128388, + "learning_rate": 4.160824679548977e-06, + "loss": 0.4392, + "step": 13589 + }, + { + "epoch": 2.12, + "grad_norm": 30.298217108255777, + "learning_rate": 4.1594557220068125e-06, + "loss": 0.4864, + "step": 13590 + }, + { + "epoch": 2.12, + "grad_norm": 26.0626089839349, + "learning_rate": 4.158086930566307e-06, + "loss": 0.61, + "step": 13591 + }, + { + "epoch": 2.12, + "grad_norm": 29.86377432037528, + "learning_rate": 4.15671830526639e-06, + "loss": 0.4633, + "step": 13592 + }, + { + "epoch": 2.12, + "grad_norm": 18.084666647865678, + "learning_rate": 4.155349846145988e-06, + "loss": 0.4707, + "step": 13593 + }, + { + "epoch": 2.12, + "grad_norm": 18.109577938535764, + "learning_rate": 4.153981553244021e-06, + "loss": 0.4985, + "step": 13594 + }, + { + "epoch": 2.12, + "grad_norm": 23.339063911976808, + "learning_rate": 4.152613426599398e-06, + "loss": 0.6181, + "step": 13595 + }, + { + "epoch": 2.12, + "grad_norm": 18.537277732569315, + "learning_rate": 4.151245466251025e-06, + "loss": 0.4634, + "step": 13596 + }, + { + "epoch": 2.12, + "grad_norm": 20.67606763859391, + "learning_rate": 4.14987767223781e-06, + "loss": 0.4626, + "step": 13597 + }, + { + "epoch": 2.12, + "grad_norm": 17.820083548125744, + "learning_rate": 4.148510044598655e-06, + "loss": 0.4513, + "step": 13598 + }, + { + "epoch": 2.12, + "grad_norm": 14.20304431619588, + "learning_rate": 4.147142583372448e-06, + "loss": 0.4403, + "step": 13599 + }, + { + "epoch": 2.12, + "grad_norm": 19.956380531482687, + "learning_rate": 4.145775288598085e-06, + "loss": 0.4714, + "step": 13600 + }, + { + "epoch": 2.12, + "grad_norm": 18.326358972281653, + "learning_rate": 4.1444081603144445e-06, + "loss": 0.5179, + "step": 13601 + }, + { + "epoch": 2.12, + "grad_norm": 20.695428501969044, + "learning_rate": 4.1430411985604145e-06, + "loss": 0.4596, + "step": 13602 + }, + { + "epoch": 2.12, + "grad_norm": 24.93097910976537, + "learning_rate": 4.141674403374864e-06, + "loss": 0.5294, + "step": 13603 + }, + { + "epoch": 2.12, + "grad_norm": 22.41000683498459, + "learning_rate": 4.1403077747966646e-06, + "loss": 0.5187, + "step": 13604 + }, + { + "epoch": 2.13, + "grad_norm": 18.835924438098633, + "learning_rate": 4.13894131286469e-06, + "loss": 0.4493, + "step": 13605 + }, + { + "epoch": 2.13, + "grad_norm": 19.468391398315873, + "learning_rate": 4.13757501761779e-06, + "loss": 0.5106, + "step": 13606 + }, + { + "epoch": 2.13, + "grad_norm": 19.169558327599734, + "learning_rate": 4.136208889094832e-06, + "loss": 0.5085, + "step": 13607 + }, + { + "epoch": 2.13, + "grad_norm": 30.760694053416294, + "learning_rate": 4.1348429273346595e-06, + "loss": 0.4817, + "step": 13608 + }, + { + "epoch": 2.13, + "grad_norm": 18.130547217234085, + "learning_rate": 4.133477132376122e-06, + "loss": 0.4943, + "step": 13609 + }, + { + "epoch": 2.13, + "grad_norm": 20.134660244498594, + "learning_rate": 4.132111504258067e-06, + "loss": 0.4764, + "step": 13610 + }, + { + "epoch": 2.13, + "grad_norm": 18.11397962311846, + "learning_rate": 4.130746043019329e-06, + "loss": 0.47, + "step": 13611 + }, + { + "epoch": 2.13, + "grad_norm": 11.031046037436356, + "learning_rate": 4.129380748698737e-06, + "loss": 0.51, + "step": 13612 + }, + { + "epoch": 2.13, + "grad_norm": 21.06777510111775, + "learning_rate": 4.128015621335121e-06, + "loss": 0.5151, + "step": 13613 + }, + { + "epoch": 2.13, + "grad_norm": 15.195392775139672, + "learning_rate": 4.12665066096731e-06, + "loss": 0.5055, + "step": 13614 + }, + { + "epoch": 2.13, + "grad_norm": 17.38425398003947, + "learning_rate": 4.1252858676341175e-06, + "loss": 0.4768, + "step": 13615 + }, + { + "epoch": 2.13, + "grad_norm": 23.492607801792264, + "learning_rate": 4.123921241374355e-06, + "loss": 0.516, + "step": 13616 + }, + { + "epoch": 2.13, + "grad_norm": 23.164307575955736, + "learning_rate": 4.1225567822268365e-06, + "loss": 0.4869, + "step": 13617 + }, + { + "epoch": 2.13, + "grad_norm": 13.589675209840214, + "learning_rate": 4.121192490230363e-06, + "loss": 0.4767, + "step": 13618 + }, + { + "epoch": 2.13, + "grad_norm": 16.242954502096712, + "learning_rate": 4.11982836542374e-06, + "loss": 0.4503, + "step": 13619 + }, + { + "epoch": 2.13, + "grad_norm": 18.02628616834752, + "learning_rate": 4.118464407845759e-06, + "loss": 0.5123, + "step": 13620 + }, + { + "epoch": 2.13, + "grad_norm": 13.525161193664482, + "learning_rate": 4.117100617535207e-06, + "loss": 0.4797, + "step": 13621 + }, + { + "epoch": 2.13, + "grad_norm": 17.23901406084867, + "learning_rate": 4.11573699453087e-06, + "loss": 0.4386, + "step": 13622 + }, + { + "epoch": 2.13, + "grad_norm": 17.815428997454436, + "learning_rate": 4.114373538871535e-06, + "loss": 0.4999, + "step": 13623 + }, + { + "epoch": 2.13, + "grad_norm": 11.88352985085275, + "learning_rate": 4.1130102505959715e-06, + "loss": 0.4624, + "step": 13624 + }, + { + "epoch": 2.13, + "grad_norm": 18.60170207397827, + "learning_rate": 4.111647129742954e-06, + "loss": 0.5037, + "step": 13625 + }, + { + "epoch": 2.13, + "grad_norm": 15.241897778712433, + "learning_rate": 4.110284176351245e-06, + "loss": 0.4716, + "step": 13626 + }, + { + "epoch": 2.13, + "grad_norm": 19.909195686506425, + "learning_rate": 4.108921390459612e-06, + "loss": 0.536, + "step": 13627 + }, + { + "epoch": 2.13, + "grad_norm": 23.83249178786733, + "learning_rate": 4.107558772106805e-06, + "loss": 0.5445, + "step": 13628 + }, + { + "epoch": 2.13, + "grad_norm": 19.100331026489787, + "learning_rate": 4.106196321331581e-06, + "loss": 0.4798, + "step": 13629 + }, + { + "epoch": 2.13, + "grad_norm": 14.76686234714087, + "learning_rate": 4.104834038172687e-06, + "loss": 0.5654, + "step": 13630 + }, + { + "epoch": 2.13, + "grad_norm": 16.727406996709465, + "learning_rate": 4.103471922668862e-06, + "loss": 0.4911, + "step": 13631 + }, + { + "epoch": 2.13, + "grad_norm": 22.848115422367645, + "learning_rate": 4.10210997485885e-06, + "loss": 0.5234, + "step": 13632 + }, + { + "epoch": 2.13, + "grad_norm": 22.365964335774482, + "learning_rate": 4.100748194781376e-06, + "loss": 0.4239, + "step": 13633 + }, + { + "epoch": 2.13, + "grad_norm": 20.449688821689175, + "learning_rate": 4.099386582475175e-06, + "loss": 0.5003, + "step": 13634 + }, + { + "epoch": 2.13, + "grad_norm": 31.09837584950147, + "learning_rate": 4.0980251379789655e-06, + "loss": 0.4586, + "step": 13635 + }, + { + "epoch": 2.13, + "grad_norm": 22.19372386342677, + "learning_rate": 4.096663861331472e-06, + "loss": 0.4624, + "step": 13636 + }, + { + "epoch": 2.13, + "grad_norm": 21.027454313568263, + "learning_rate": 4.095302752571402e-06, + "loss": 0.4547, + "step": 13637 + }, + { + "epoch": 2.13, + "grad_norm": 30.405046929771117, + "learning_rate": 4.093941811737466e-06, + "loss": 0.5329, + "step": 13638 + }, + { + "epoch": 2.13, + "grad_norm": 20.270770578509428, + "learning_rate": 4.092581038868375e-06, + "loss": 0.4578, + "step": 13639 + }, + { + "epoch": 2.13, + "grad_norm": 24.343442198750296, + "learning_rate": 4.0912204340028224e-06, + "loss": 0.521, + "step": 13640 + }, + { + "epoch": 2.13, + "grad_norm": 15.023524792437934, + "learning_rate": 4.089859997179502e-06, + "loss": 0.4562, + "step": 13641 + }, + { + "epoch": 2.13, + "grad_norm": 21.9503108633932, + "learning_rate": 4.088499728437104e-06, + "loss": 0.5366, + "step": 13642 + }, + { + "epoch": 2.13, + "grad_norm": 14.036514319506963, + "learning_rate": 4.087139627814317e-06, + "loss": 0.4861, + "step": 13643 + }, + { + "epoch": 2.13, + "grad_norm": 21.313620862264777, + "learning_rate": 4.085779695349823e-06, + "loss": 0.5295, + "step": 13644 + }, + { + "epoch": 2.13, + "grad_norm": 26.80087441051656, + "learning_rate": 4.084419931082295e-06, + "loss": 0.5164, + "step": 13645 + }, + { + "epoch": 2.13, + "grad_norm": 20.693608031999965, + "learning_rate": 4.083060335050401e-06, + "loss": 0.5041, + "step": 13646 + }, + { + "epoch": 2.13, + "grad_norm": 13.884248269917485, + "learning_rate": 4.0817009072928085e-06, + "loss": 0.3982, + "step": 13647 + }, + { + "epoch": 2.13, + "grad_norm": 14.967372945924364, + "learning_rate": 4.080341647848184e-06, + "loss": 0.4749, + "step": 13648 + }, + { + "epoch": 2.13, + "grad_norm": 17.246629287595802, + "learning_rate": 4.07898255675518e-06, + "loss": 0.4649, + "step": 13649 + }, + { + "epoch": 2.13, + "grad_norm": 22.401588402596303, + "learning_rate": 4.077623634052445e-06, + "loss": 0.5824, + "step": 13650 + }, + { + "epoch": 2.13, + "grad_norm": 21.589883329087062, + "learning_rate": 4.076264879778629e-06, + "loss": 0.4992, + "step": 13651 + }, + { + "epoch": 2.13, + "grad_norm": 22.885499242978852, + "learning_rate": 4.074906293972378e-06, + "loss": 0.4625, + "step": 13652 + }, + { + "epoch": 2.13, + "grad_norm": 19.665082111681873, + "learning_rate": 4.073547876672323e-06, + "loss": 0.5504, + "step": 13653 + }, + { + "epoch": 2.13, + "grad_norm": 17.166712005149897, + "learning_rate": 4.072189627917102e-06, + "loss": 0.4586, + "step": 13654 + }, + { + "epoch": 2.13, + "grad_norm": 19.487445647461794, + "learning_rate": 4.070831547745338e-06, + "loss": 0.4891, + "step": 13655 + }, + { + "epoch": 2.13, + "grad_norm": 21.933707751744688, + "learning_rate": 4.0694736361956555e-06, + "loss": 0.4906, + "step": 13656 + }, + { + "epoch": 2.13, + "grad_norm": 22.028510284517882, + "learning_rate": 4.068115893306678e-06, + "loss": 0.4744, + "step": 13657 + }, + { + "epoch": 2.13, + "grad_norm": 18.826276854669484, + "learning_rate": 4.06675831911701e-06, + "loss": 0.4593, + "step": 13658 + }, + { + "epoch": 2.13, + "grad_norm": 28.371458327776494, + "learning_rate": 4.0654009136652685e-06, + "loss": 0.4562, + "step": 13659 + }, + { + "epoch": 2.13, + "grad_norm": 18.532878240921082, + "learning_rate": 4.06404367699005e-06, + "loss": 0.4931, + "step": 13660 + }, + { + "epoch": 2.13, + "grad_norm": 21.0797959864339, + "learning_rate": 4.0626866091299595e-06, + "loss": 0.4673, + "step": 13661 + }, + { + "epoch": 2.13, + "grad_norm": 15.074621787791056, + "learning_rate": 4.061329710123586e-06, + "loss": 0.4647, + "step": 13662 + }, + { + "epoch": 2.13, + "grad_norm": 15.404031571572393, + "learning_rate": 4.059972980009522e-06, + "loss": 0.4751, + "step": 13663 + }, + { + "epoch": 2.13, + "grad_norm": 18.388421726196704, + "learning_rate": 4.058616418826355e-06, + "loss": 0.4938, + "step": 13664 + }, + { + "epoch": 2.13, + "grad_norm": 29.828047124322392, + "learning_rate": 4.05726002661266e-06, + "loss": 0.5372, + "step": 13665 + }, + { + "epoch": 2.13, + "grad_norm": 15.08296897638668, + "learning_rate": 4.055903803407011e-06, + "loss": 0.4706, + "step": 13666 + }, + { + "epoch": 2.13, + "grad_norm": 22.473197783283254, + "learning_rate": 4.05454774924798e-06, + "loss": 0.5085, + "step": 13667 + }, + { + "epoch": 2.13, + "grad_norm": 28.580788632144923, + "learning_rate": 4.0531918641741344e-06, + "loss": 0.4934, + "step": 13668 + }, + { + "epoch": 2.14, + "grad_norm": 21.64413508858331, + "learning_rate": 4.051836148224035e-06, + "loss": 0.5595, + "step": 13669 + }, + { + "epoch": 2.14, + "grad_norm": 23.084197259882018, + "learning_rate": 4.050480601436237e-06, + "loss": 0.4821, + "step": 13670 + }, + { + "epoch": 2.14, + "grad_norm": 19.58997925229432, + "learning_rate": 4.049125223849287e-06, + "loss": 0.4646, + "step": 13671 + }, + { + "epoch": 2.14, + "grad_norm": 31.123418109979106, + "learning_rate": 4.047770015501734e-06, + "loss": 0.5362, + "step": 13672 + }, + { + "epoch": 2.14, + "grad_norm": 18.521583464744715, + "learning_rate": 4.046414976432124e-06, + "loss": 0.4303, + "step": 13673 + }, + { + "epoch": 2.14, + "grad_norm": 19.726309800748407, + "learning_rate": 4.045060106678989e-06, + "loss": 0.5317, + "step": 13674 + }, + { + "epoch": 2.14, + "grad_norm": 19.419603646916862, + "learning_rate": 4.043705406280857e-06, + "loss": 0.494, + "step": 13675 + }, + { + "epoch": 2.14, + "grad_norm": 18.07930658199789, + "learning_rate": 4.04235087527626e-06, + "loss": 0.4651, + "step": 13676 + }, + { + "epoch": 2.14, + "grad_norm": 17.151086766839835, + "learning_rate": 4.040996513703721e-06, + "loss": 0.5083, + "step": 13677 + }, + { + "epoch": 2.14, + "grad_norm": 19.109216745552544, + "learning_rate": 4.039642321601753e-06, + "loss": 0.4849, + "step": 13678 + }, + { + "epoch": 2.14, + "grad_norm": 20.10280049311328, + "learning_rate": 4.0382882990088735e-06, + "loss": 0.4456, + "step": 13679 + }, + { + "epoch": 2.14, + "grad_norm": 29.768161518423963, + "learning_rate": 4.036934445963584e-06, + "loss": 0.4621, + "step": 13680 + }, + { + "epoch": 2.14, + "grad_norm": 20.009388870482887, + "learning_rate": 4.035580762504391e-06, + "loss": 0.5105, + "step": 13681 + }, + { + "epoch": 2.14, + "grad_norm": 20.039032976600957, + "learning_rate": 4.034227248669794e-06, + "loss": 0.5935, + "step": 13682 + }, + { + "epoch": 2.14, + "grad_norm": 20.264620074412747, + "learning_rate": 4.032873904498286e-06, + "loss": 0.5224, + "step": 13683 + }, + { + "epoch": 2.14, + "grad_norm": 22.122891115448716, + "learning_rate": 4.031520730028348e-06, + "loss": 0.4953, + "step": 13684 + }, + { + "epoch": 2.14, + "grad_norm": 20.5715564868856, + "learning_rate": 4.030167725298472e-06, + "loss": 0.5739, + "step": 13685 + }, + { + "epoch": 2.14, + "grad_norm": 21.738966846193932, + "learning_rate": 4.028814890347134e-06, + "loss": 0.5055, + "step": 13686 + }, + { + "epoch": 2.14, + "grad_norm": 13.60996665555389, + "learning_rate": 4.027462225212806e-06, + "loss": 0.4751, + "step": 13687 + }, + { + "epoch": 2.14, + "grad_norm": 16.13213868708344, + "learning_rate": 4.026109729933962e-06, + "loss": 0.4357, + "step": 13688 + }, + { + "epoch": 2.14, + "grad_norm": 23.05932294470333, + "learning_rate": 4.024757404549058e-06, + "loss": 0.5339, + "step": 13689 + }, + { + "epoch": 2.14, + "grad_norm": 16.319212736306508, + "learning_rate": 4.023405249096561e-06, + "loss": 0.5883, + "step": 13690 + }, + { + "epoch": 2.14, + "grad_norm": 15.859497610802075, + "learning_rate": 4.022053263614921e-06, + "loss": 0.4219, + "step": 13691 + }, + { + "epoch": 2.14, + "grad_norm": 30.85810576931222, + "learning_rate": 4.0207014481425875e-06, + "loss": 0.5327, + "step": 13692 + }, + { + "epoch": 2.14, + "grad_norm": 19.55922623288415, + "learning_rate": 4.019349802718011e-06, + "loss": 0.4878, + "step": 13693 + }, + { + "epoch": 2.14, + "grad_norm": 13.247477699593244, + "learning_rate": 4.017998327379626e-06, + "loss": 0.4509, + "step": 13694 + }, + { + "epoch": 2.14, + "grad_norm": 16.51693598173506, + "learning_rate": 4.01664702216587e-06, + "loss": 0.4849, + "step": 13695 + }, + { + "epoch": 2.14, + "grad_norm": 14.843596417485822, + "learning_rate": 4.015295887115169e-06, + "loss": 0.5144, + "step": 13696 + }, + { + "epoch": 2.14, + "grad_norm": 16.52286735209337, + "learning_rate": 4.013944922265953e-06, + "loss": 0.4234, + "step": 13697 + }, + { + "epoch": 2.14, + "grad_norm": 24.705827313065996, + "learning_rate": 4.012594127656646e-06, + "loss": 0.4958, + "step": 13698 + }, + { + "epoch": 2.14, + "grad_norm": 13.127258333558165, + "learning_rate": 4.011243503325658e-06, + "loss": 0.5063, + "step": 13699 + }, + { + "epoch": 2.14, + "grad_norm": 24.312314186023546, + "learning_rate": 4.009893049311399e-06, + "loss": 0.4913, + "step": 13700 + }, + { + "epoch": 2.14, + "grad_norm": 14.626372023357817, + "learning_rate": 4.008542765652277e-06, + "loss": 0.4502, + "step": 13701 + }, + { + "epoch": 2.14, + "grad_norm": 21.61066268606607, + "learning_rate": 4.007192652386695e-06, + "loss": 0.5332, + "step": 13702 + }, + { + "epoch": 2.14, + "grad_norm": 19.217029566634995, + "learning_rate": 4.005842709553053e-06, + "loss": 0.5055, + "step": 13703 + }, + { + "epoch": 2.14, + "grad_norm": 31.327060903518884, + "learning_rate": 4.0044929371897325e-06, + "loss": 0.4788, + "step": 13704 + }, + { + "epoch": 2.14, + "grad_norm": 18.38063009206433, + "learning_rate": 4.003143335335124e-06, + "loss": 0.4758, + "step": 13705 + }, + { + "epoch": 2.14, + "grad_norm": 24.571766981610075, + "learning_rate": 4.0017939040276136e-06, + "loss": 0.4352, + "step": 13706 + }, + { + "epoch": 2.14, + "grad_norm": 17.570630728357337, + "learning_rate": 4.000444643305577e-06, + "loss": 0.5377, + "step": 13707 + }, + { + "epoch": 2.14, + "grad_norm": 20.537448221804265, + "learning_rate": 3.999095553207386e-06, + "loss": 0.4685, + "step": 13708 + }, + { + "epoch": 2.14, + "grad_norm": 24.748541355499185, + "learning_rate": 3.997746633771403e-06, + "loss": 0.4966, + "step": 13709 + }, + { + "epoch": 2.14, + "grad_norm": 20.882406352810595, + "learning_rate": 3.996397885035995e-06, + "loss": 0.4785, + "step": 13710 + }, + { + "epoch": 2.14, + "grad_norm": 18.273152824164345, + "learning_rate": 3.9950493070395235e-06, + "loss": 0.4481, + "step": 13711 + }, + { + "epoch": 2.14, + "grad_norm": 20.574758165891964, + "learning_rate": 3.993700899820332e-06, + "loss": 0.4331, + "step": 13712 + }, + { + "epoch": 2.14, + "grad_norm": 21.10326420723819, + "learning_rate": 3.9923526634167775e-06, + "loss": 0.5087, + "step": 13713 + }, + { + "epoch": 2.14, + "grad_norm": 20.482406304830853, + "learning_rate": 3.991004597867195e-06, + "loss": 0.5555, + "step": 13714 + }, + { + "epoch": 2.14, + "grad_norm": 19.86213545143624, + "learning_rate": 3.989656703209931e-06, + "loss": 0.5258, + "step": 13715 + }, + { + "epoch": 2.14, + "grad_norm": 20.263404000223726, + "learning_rate": 3.9883089794833094e-06, + "loss": 0.4341, + "step": 13716 + }, + { + "epoch": 2.14, + "grad_norm": 14.18826280236325, + "learning_rate": 3.986961426725665e-06, + "loss": 0.5836, + "step": 13717 + }, + { + "epoch": 2.14, + "grad_norm": 28.938851417639622, + "learning_rate": 3.985614044975323e-06, + "loss": 0.5013, + "step": 13718 + }, + { + "epoch": 2.14, + "grad_norm": 15.860513615254497, + "learning_rate": 3.9842668342705956e-06, + "loss": 0.4221, + "step": 13719 + }, + { + "epoch": 2.14, + "grad_norm": 19.91892328610376, + "learning_rate": 3.982919794649805e-06, + "loss": 0.5074, + "step": 13720 + }, + { + "epoch": 2.14, + "grad_norm": 14.067444423764387, + "learning_rate": 3.98157292615125e-06, + "loss": 0.4327, + "step": 13721 + }, + { + "epoch": 2.14, + "grad_norm": 16.54187269306718, + "learning_rate": 3.980226228813242e-06, + "loss": 0.4802, + "step": 13722 + }, + { + "epoch": 2.14, + "grad_norm": 22.339117434908303, + "learning_rate": 3.978879702674081e-06, + "loss": 0.4776, + "step": 13723 + }, + { + "epoch": 2.14, + "grad_norm": 17.754255318646006, + "learning_rate": 3.97753334777206e-06, + "loss": 0.4561, + "step": 13724 + }, + { + "epoch": 2.14, + "grad_norm": 20.517606919861695, + "learning_rate": 3.976187164145463e-06, + "loss": 0.4778, + "step": 13725 + }, + { + "epoch": 2.14, + "grad_norm": 17.43586719082391, + "learning_rate": 3.97484115183258e-06, + "loss": 0.4658, + "step": 13726 + }, + { + "epoch": 2.14, + "grad_norm": 24.798569817845294, + "learning_rate": 3.97349531087169e-06, + "loss": 0.5521, + "step": 13727 + }, + { + "epoch": 2.14, + "grad_norm": 25.420551036386378, + "learning_rate": 3.972149641301075e-06, + "loss": 0.4836, + "step": 13728 + }, + { + "epoch": 2.14, + "grad_norm": 17.13754603500082, + "learning_rate": 3.97080414315899e-06, + "loss": 0.4835, + "step": 13729 + }, + { + "epoch": 2.14, + "grad_norm": 14.881668349463343, + "learning_rate": 3.969458816483709e-06, + "loss": 0.4099, + "step": 13730 + }, + { + "epoch": 2.14, + "grad_norm": 14.04242205769323, + "learning_rate": 3.968113661313492e-06, + "loss": 0.4811, + "step": 13731 + }, + { + "epoch": 2.14, + "grad_norm": 18.945751120612176, + "learning_rate": 3.966768677686597e-06, + "loss": 0.5218, + "step": 13732 + }, + { + "epoch": 2.15, + "grad_norm": 22.603341586719335, + "learning_rate": 3.9654238656412715e-06, + "loss": 0.434, + "step": 13733 + }, + { + "epoch": 2.15, + "grad_norm": 22.39277913818106, + "learning_rate": 3.9640792252157564e-06, + "loss": 0.5095, + "step": 13734 + }, + { + "epoch": 2.15, + "grad_norm": 27.45804126437764, + "learning_rate": 3.962734756448299e-06, + "loss": 0.4849, + "step": 13735 + }, + { + "epoch": 2.15, + "grad_norm": 27.199613024017832, + "learning_rate": 3.961390459377136e-06, + "loss": 0.4819, + "step": 13736 + }, + { + "epoch": 2.15, + "grad_norm": 15.847953142935125, + "learning_rate": 3.960046334040496e-06, + "loss": 0.4422, + "step": 13737 + }, + { + "epoch": 2.15, + "grad_norm": 29.35578340017858, + "learning_rate": 3.958702380476602e-06, + "loss": 0.5015, + "step": 13738 + }, + { + "epoch": 2.15, + "grad_norm": 18.535883657896086, + "learning_rate": 3.957358598723677e-06, + "loss": 0.4936, + "step": 13739 + }, + { + "epoch": 2.15, + "grad_norm": 21.06538448052129, + "learning_rate": 3.956014988819944e-06, + "loss": 0.437, + "step": 13740 + }, + { + "epoch": 2.15, + "grad_norm": 33.44518593485961, + "learning_rate": 3.954671550803604e-06, + "loss": 0.584, + "step": 13741 + }, + { + "epoch": 2.15, + "grad_norm": 28.37576228800434, + "learning_rate": 3.953328284712873e-06, + "loss": 0.4868, + "step": 13742 + }, + { + "epoch": 2.15, + "grad_norm": 24.72052935358145, + "learning_rate": 3.951985190585944e-06, + "loss": 0.4664, + "step": 13743 + }, + { + "epoch": 2.15, + "grad_norm": 18.911912218378284, + "learning_rate": 3.950642268461019e-06, + "loss": 0.4844, + "step": 13744 + }, + { + "epoch": 2.15, + "grad_norm": 27.67270996938472, + "learning_rate": 3.9492995183762925e-06, + "loss": 0.5117, + "step": 13745 + }, + { + "epoch": 2.15, + "grad_norm": 34.96199081283332, + "learning_rate": 3.947956940369944e-06, + "loss": 0.519, + "step": 13746 + }, + { + "epoch": 2.15, + "grad_norm": 25.21025821466923, + "learning_rate": 3.946614534480164e-06, + "loss": 0.5094, + "step": 13747 + }, + { + "epoch": 2.15, + "grad_norm": 19.969785803733526, + "learning_rate": 3.945272300745122e-06, + "loss": 0.5673, + "step": 13748 + }, + { + "epoch": 2.15, + "grad_norm": 18.7217589835283, + "learning_rate": 3.943930239202996e-06, + "loss": 0.4993, + "step": 13749 + }, + { + "epoch": 2.15, + "grad_norm": 17.70408681628699, + "learning_rate": 3.94258834989195e-06, + "loss": 0.4487, + "step": 13750 + }, + { + "epoch": 2.15, + "grad_norm": 17.773733450972227, + "learning_rate": 3.941246632850146e-06, + "loss": 0.4801, + "step": 13751 + }, + { + "epoch": 2.15, + "grad_norm": 18.074960599819416, + "learning_rate": 3.939905088115749e-06, + "loss": 0.5416, + "step": 13752 + }, + { + "epoch": 2.15, + "grad_norm": 13.103058469709426, + "learning_rate": 3.938563715726904e-06, + "loss": 0.4651, + "step": 13753 + }, + { + "epoch": 2.15, + "grad_norm": 17.58381256575493, + "learning_rate": 3.937222515721759e-06, + "loss": 0.4427, + "step": 13754 + }, + { + "epoch": 2.15, + "grad_norm": 12.944561510052417, + "learning_rate": 3.935881488138459e-06, + "loss": 0.4169, + "step": 13755 + }, + { + "epoch": 2.15, + "grad_norm": 18.743133884370458, + "learning_rate": 3.934540633015141e-06, + "loss": 0.4329, + "step": 13756 + }, + { + "epoch": 2.15, + "grad_norm": 18.08461541190231, + "learning_rate": 3.933199950389944e-06, + "loss": 0.4711, + "step": 13757 + }, + { + "epoch": 2.15, + "grad_norm": 37.73339844539813, + "learning_rate": 3.931859440300991e-06, + "loss": 0.5231, + "step": 13758 + }, + { + "epoch": 2.15, + "grad_norm": 16.505439799912597, + "learning_rate": 3.930519102786402e-06, + "loss": 0.4466, + "step": 13759 + }, + { + "epoch": 2.15, + "grad_norm": 25.546909714888105, + "learning_rate": 3.9291789378843e-06, + "loss": 0.4311, + "step": 13760 + }, + { + "epoch": 2.15, + "grad_norm": 24.2298276042473, + "learning_rate": 3.927838945632799e-06, + "loss": 0.5691, + "step": 13761 + }, + { + "epoch": 2.15, + "grad_norm": 16.52777270267802, + "learning_rate": 3.926499126070008e-06, + "loss": 0.461, + "step": 13762 + }, + { + "epoch": 2.15, + "grad_norm": 24.432323475238753, + "learning_rate": 3.925159479234025e-06, + "loss": 0.4728, + "step": 13763 + }, + { + "epoch": 2.15, + "grad_norm": 15.988836755522414, + "learning_rate": 3.923820005162954e-06, + "loss": 0.4175, + "step": 13764 + }, + { + "epoch": 2.15, + "grad_norm": 21.657416414800597, + "learning_rate": 3.92248070389489e-06, + "loss": 0.5605, + "step": 13765 + }, + { + "epoch": 2.15, + "grad_norm": 17.00641292382174, + "learning_rate": 3.9211415754679164e-06, + "loss": 0.4619, + "step": 13766 + }, + { + "epoch": 2.15, + "grad_norm": 23.691330852215604, + "learning_rate": 3.919802619920122e-06, + "loss": 0.492, + "step": 13767 + }, + { + "epoch": 2.15, + "grad_norm": 26.29551294015667, + "learning_rate": 3.9184638372895835e-06, + "loss": 0.4238, + "step": 13768 + }, + { + "epoch": 2.15, + "grad_norm": 38.15365430428977, + "learning_rate": 3.9171252276143745e-06, + "loss": 0.498, + "step": 13769 + }, + { + "epoch": 2.15, + "grad_norm": 20.59322324956117, + "learning_rate": 3.9157867909325684e-06, + "loss": 0.4847, + "step": 13770 + }, + { + "epoch": 2.15, + "grad_norm": 12.18020199795622, + "learning_rate": 3.914448527282225e-06, + "loss": 0.4729, + "step": 13771 + }, + { + "epoch": 2.15, + "grad_norm": 16.175924663004682, + "learning_rate": 3.9131104367014085e-06, + "loss": 0.4657, + "step": 13772 + }, + { + "epoch": 2.15, + "grad_norm": 15.820109088673199, + "learning_rate": 3.9117725192281664e-06, + "loss": 0.5235, + "step": 13773 + }, + { + "epoch": 2.15, + "grad_norm": 30.926313180072196, + "learning_rate": 3.910434774900555e-06, + "loss": 0.452, + "step": 13774 + }, + { + "epoch": 2.15, + "grad_norm": 18.734153041526948, + "learning_rate": 3.909097203756615e-06, + "loss": 0.4625, + "step": 13775 + }, + { + "epoch": 2.15, + "grad_norm": 22.465166973454593, + "learning_rate": 3.907759805834387e-06, + "loss": 0.4865, + "step": 13776 + }, + { + "epoch": 2.15, + "grad_norm": 17.328716967956478, + "learning_rate": 3.906422581171908e-06, + "loss": 0.4502, + "step": 13777 + }, + { + "epoch": 2.15, + "grad_norm": 33.67026838819909, + "learning_rate": 3.905085529807208e-06, + "loss": 0.4552, + "step": 13778 + }, + { + "epoch": 2.15, + "grad_norm": 18.702986331894113, + "learning_rate": 3.903748651778306e-06, + "loss": 0.5045, + "step": 13779 + }, + { + "epoch": 2.15, + "grad_norm": 13.366622138092893, + "learning_rate": 3.902411947123226e-06, + "loss": 0.4295, + "step": 13780 + }, + { + "epoch": 2.15, + "grad_norm": 18.022108940959637, + "learning_rate": 3.901075415879987e-06, + "loss": 0.4728, + "step": 13781 + }, + { + "epoch": 2.15, + "grad_norm": 19.92704491944772, + "learning_rate": 3.8997390580865915e-06, + "loss": 0.5135, + "step": 13782 + }, + { + "epoch": 2.15, + "grad_norm": 22.0508792601334, + "learning_rate": 3.898402873781052e-06, + "loss": 0.5274, + "step": 13783 + }, + { + "epoch": 2.15, + "grad_norm": 15.433416757073212, + "learning_rate": 3.897066863001363e-06, + "loss": 0.4083, + "step": 13784 + }, + { + "epoch": 2.15, + "grad_norm": 13.850326068728911, + "learning_rate": 3.895731025785521e-06, + "loss": 0.401, + "step": 13785 + }, + { + "epoch": 2.15, + "grad_norm": 16.382674369902773, + "learning_rate": 3.894395362171523e-06, + "loss": 0.4531, + "step": 13786 + }, + { + "epoch": 2.15, + "grad_norm": 23.78860367330921, + "learning_rate": 3.893059872197347e-06, + "loss": 0.5513, + "step": 13787 + }, + { + "epoch": 2.15, + "grad_norm": 24.47919211095475, + "learning_rate": 3.8917245559009735e-06, + "loss": 0.4, + "step": 13788 + }, + { + "epoch": 2.15, + "grad_norm": 24.180328592974686, + "learning_rate": 3.89038941332038e-06, + "loss": 0.4821, + "step": 13789 + }, + { + "epoch": 2.15, + "grad_norm": 23.990397153291415, + "learning_rate": 3.889054444493537e-06, + "loss": 0.4787, + "step": 13790 + }, + { + "epoch": 2.15, + "grad_norm": 17.76833099618584, + "learning_rate": 3.887719649458419e-06, + "loss": 0.4812, + "step": 13791 + }, + { + "epoch": 2.15, + "grad_norm": 18.83301022059549, + "learning_rate": 3.8863850282529715e-06, + "loss": 0.5184, + "step": 13792 + }, + { + "epoch": 2.15, + "grad_norm": 20.655549034598337, + "learning_rate": 3.8850505809151574e-06, + "loss": 0.5091, + "step": 13793 + }, + { + "epoch": 2.15, + "grad_norm": 37.917152107933724, + "learning_rate": 3.883716307482928e-06, + "loss": 0.4166, + "step": 13794 + }, + { + "epoch": 2.15, + "grad_norm": 18.995171467479597, + "learning_rate": 3.882382207994231e-06, + "loss": 0.4807, + "step": 13795 + }, + { + "epoch": 2.15, + "grad_norm": 18.210267603273245, + "learning_rate": 3.881048282487005e-06, + "loss": 0.4251, + "step": 13796 + }, + { + "epoch": 2.16, + "grad_norm": 16.194573550702966, + "learning_rate": 3.879714530999185e-06, + "loss": 0.5272, + "step": 13797 + }, + { + "epoch": 2.16, + "grad_norm": 31.432147372145565, + "learning_rate": 3.878380953568702e-06, + "loss": 0.4632, + "step": 13798 + }, + { + "epoch": 2.16, + "grad_norm": 16.33612165889988, + "learning_rate": 3.877047550233488e-06, + "loss": 0.4312, + "step": 13799 + }, + { + "epoch": 2.16, + "grad_norm": 18.39905848497902, + "learning_rate": 3.875714321031455e-06, + "loss": 0.489, + "step": 13800 + }, + { + "epoch": 2.16, + "grad_norm": 22.022634447605213, + "learning_rate": 3.874381266000529e-06, + "loss": 0.4236, + "step": 13801 + }, + { + "epoch": 2.16, + "grad_norm": 22.33871689375885, + "learning_rate": 3.873048385178613e-06, + "loss": 0.4222, + "step": 13802 + }, + { + "epoch": 2.16, + "grad_norm": 18.614947080571543, + "learning_rate": 3.8717156786036194e-06, + "loss": 0.4792, + "step": 13803 + }, + { + "epoch": 2.16, + "grad_norm": 16.12220059720625, + "learning_rate": 3.870383146313444e-06, + "loss": 0.478, + "step": 13804 + }, + { + "epoch": 2.16, + "grad_norm": 22.6458914932082, + "learning_rate": 3.869050788345985e-06, + "loss": 0.5189, + "step": 13805 + }, + { + "epoch": 2.16, + "grad_norm": 16.606458162450263, + "learning_rate": 3.86771860473914e-06, + "loss": 0.5411, + "step": 13806 + }, + { + "epoch": 2.16, + "grad_norm": 17.049167188300654, + "learning_rate": 3.866386595530787e-06, + "loss": 0.5451, + "step": 13807 + }, + { + "epoch": 2.16, + "grad_norm": 17.226807028144304, + "learning_rate": 3.865054760758813e-06, + "loss": 0.462, + "step": 13808 + }, + { + "epoch": 2.16, + "grad_norm": 23.703487373353195, + "learning_rate": 3.863723100461091e-06, + "loss": 0.5312, + "step": 13809 + }, + { + "epoch": 2.16, + "grad_norm": 15.720219999366716, + "learning_rate": 3.8623916146754935e-06, + "loss": 0.4453, + "step": 13810 + }, + { + "epoch": 2.16, + "grad_norm": 23.43698700216267, + "learning_rate": 3.861060303439892e-06, + "loss": 0.5466, + "step": 13811 + }, + { + "epoch": 2.16, + "grad_norm": 28.113386443644814, + "learning_rate": 3.859729166792144e-06, + "loss": 0.5718, + "step": 13812 + }, + { + "epoch": 2.16, + "grad_norm": 17.54450793873083, + "learning_rate": 3.858398204770104e-06, + "loss": 0.4871, + "step": 13813 + }, + { + "epoch": 2.16, + "grad_norm": 18.94733574632788, + "learning_rate": 3.8570674174116256e-06, + "loss": 0.4339, + "step": 13814 + }, + { + "epoch": 2.16, + "grad_norm": 14.756846983481807, + "learning_rate": 3.855736804754556e-06, + "loss": 0.4843, + "step": 13815 + }, + { + "epoch": 2.16, + "grad_norm": 22.177096987878823, + "learning_rate": 3.854406366836745e-06, + "loss": 0.4191, + "step": 13816 + }, + { + "epoch": 2.16, + "grad_norm": 27.469610137296737, + "learning_rate": 3.853076103696015e-06, + "loss": 0.4479, + "step": 13817 + }, + { + "epoch": 2.16, + "grad_norm": 19.972465716415062, + "learning_rate": 3.851746015370204e-06, + "loss": 0.4721, + "step": 13818 + }, + { + "epoch": 2.16, + "grad_norm": 21.57357047932348, + "learning_rate": 3.8504161018971395e-06, + "loss": 0.532, + "step": 13819 + }, + { + "epoch": 2.16, + "grad_norm": 15.066666471030683, + "learning_rate": 3.849086363314648e-06, + "loss": 0.4783, + "step": 13820 + }, + { + "epoch": 2.16, + "grad_norm": 21.251182363377925, + "learning_rate": 3.847756799660543e-06, + "loss": 0.5119, + "step": 13821 + }, + { + "epoch": 2.16, + "grad_norm": 17.0781719236785, + "learning_rate": 3.84642741097263e-06, + "loss": 0.505, + "step": 13822 + }, + { + "epoch": 2.16, + "grad_norm": 28.662032923425876, + "learning_rate": 3.845098197288724e-06, + "loss": 0.4965, + "step": 13823 + }, + { + "epoch": 2.16, + "grad_norm": 15.116391376852972, + "learning_rate": 3.843769158646626e-06, + "loss": 0.4629, + "step": 13824 + }, + { + "epoch": 2.16, + "grad_norm": 19.739721609921304, + "learning_rate": 3.842440295084133e-06, + "loss": 0.4611, + "step": 13825 + }, + { + "epoch": 2.16, + "grad_norm": 22.773359891389546, + "learning_rate": 3.841111606639032e-06, + "loss": 0.5289, + "step": 13826 + }, + { + "epoch": 2.16, + "grad_norm": 21.240824536072004, + "learning_rate": 3.839783093349114e-06, + "loss": 0.5214, + "step": 13827 + }, + { + "epoch": 2.16, + "grad_norm": 19.626175148974685, + "learning_rate": 3.838454755252166e-06, + "loss": 0.5176, + "step": 13828 + }, + { + "epoch": 2.16, + "grad_norm": 24.017673629666405, + "learning_rate": 3.837126592385956e-06, + "loss": 0.5024, + "step": 13829 + }, + { + "epoch": 2.16, + "grad_norm": 16.998351240671198, + "learning_rate": 3.8357986047882635e-06, + "loss": 0.4954, + "step": 13830 + }, + { + "epoch": 2.16, + "grad_norm": 14.535264263659025, + "learning_rate": 3.8344707924968494e-06, + "loss": 0.4635, + "step": 13831 + }, + { + "epoch": 2.16, + "grad_norm": 29.739895736581154, + "learning_rate": 3.833143155549479e-06, + "loss": 0.5204, + "step": 13832 + }, + { + "epoch": 2.16, + "grad_norm": 26.93640113567417, + "learning_rate": 3.831815693983915e-06, + "loss": 0.5732, + "step": 13833 + }, + { + "epoch": 2.16, + "grad_norm": 29.060360410338088, + "learning_rate": 3.830488407837899e-06, + "loss": 0.4942, + "step": 13834 + }, + { + "epoch": 2.16, + "grad_norm": 34.74055317559641, + "learning_rate": 3.829161297149187e-06, + "loss": 0.4653, + "step": 13835 + }, + { + "epoch": 2.16, + "grad_norm": 28.806554128051562, + "learning_rate": 3.8278343619555155e-06, + "loss": 0.5492, + "step": 13836 + }, + { + "epoch": 2.16, + "grad_norm": 26.49196965975394, + "learning_rate": 3.826507602294628e-06, + "loss": 0.5491, + "step": 13837 + }, + { + "epoch": 2.16, + "grad_norm": 22.616057039250588, + "learning_rate": 3.825181018204248e-06, + "loss": 0.5572, + "step": 13838 + }, + { + "epoch": 2.16, + "grad_norm": 16.009056243128285, + "learning_rate": 3.823854609722108e-06, + "loss": 0.5575, + "step": 13839 + }, + { + "epoch": 2.16, + "grad_norm": 22.919704019155592, + "learning_rate": 3.8225283768859345e-06, + "loss": 0.5026, + "step": 13840 + }, + { + "epoch": 2.16, + "grad_norm": 16.00686820611522, + "learning_rate": 3.8212023197334395e-06, + "loss": 0.5281, + "step": 13841 + }, + { + "epoch": 2.16, + "grad_norm": 23.563345590388938, + "learning_rate": 3.819876438302332e-06, + "loss": 0.5126, + "step": 13842 + }, + { + "epoch": 2.16, + "grad_norm": 17.395522280632267, + "learning_rate": 3.8185507326303236e-06, + "loss": 0.4583, + "step": 13843 + }, + { + "epoch": 2.16, + "grad_norm": 23.343331406311542, + "learning_rate": 3.817225202755117e-06, + "loss": 0.4793, + "step": 13844 + }, + { + "epoch": 2.16, + "grad_norm": 21.359305721413385, + "learning_rate": 3.815899848714412e-06, + "loss": 0.4604, + "step": 13845 + }, + { + "epoch": 2.16, + "grad_norm": 35.65342793509411, + "learning_rate": 3.8145746705458976e-06, + "loss": 0.5287, + "step": 13846 + }, + { + "epoch": 2.16, + "grad_norm": 24.1185799540555, + "learning_rate": 3.8132496682872576e-06, + "loss": 0.4408, + "step": 13847 + }, + { + "epoch": 2.16, + "grad_norm": 20.168136318174888, + "learning_rate": 3.811924841976178e-06, + "loss": 0.4905, + "step": 13848 + }, + { + "epoch": 2.16, + "grad_norm": 30.395899087003755, + "learning_rate": 3.81060019165034e-06, + "loss": 0.4961, + "step": 13849 + }, + { + "epoch": 2.16, + "grad_norm": 26.04443045554107, + "learning_rate": 3.8092757173474114e-06, + "loss": 0.4972, + "step": 13850 + }, + { + "epoch": 2.16, + "grad_norm": 19.447656623516373, + "learning_rate": 3.807951419105056e-06, + "loss": 0.4863, + "step": 13851 + }, + { + "epoch": 2.16, + "grad_norm": 23.089494493786223, + "learning_rate": 3.8066272969609407e-06, + "loss": 0.499, + "step": 13852 + }, + { + "epoch": 2.16, + "grad_norm": 13.885366479673738, + "learning_rate": 3.8053033509527214e-06, + "loss": 0.4918, + "step": 13853 + }, + { + "epoch": 2.16, + "grad_norm": 21.0940775160618, + "learning_rate": 3.8039795811180547e-06, + "loss": 0.5497, + "step": 13854 + }, + { + "epoch": 2.16, + "grad_norm": 30.623567587576517, + "learning_rate": 3.8026559874945844e-06, + "loss": 0.4488, + "step": 13855 + }, + { + "epoch": 2.16, + "grad_norm": 23.204946987146617, + "learning_rate": 3.801332570119949e-06, + "loss": 0.523, + "step": 13856 + }, + { + "epoch": 2.16, + "grad_norm": 33.90051573857576, + "learning_rate": 3.8000093290317886e-06, + "loss": 0.5347, + "step": 13857 + }, + { + "epoch": 2.16, + "grad_norm": 27.360146308496923, + "learning_rate": 3.7986862642677402e-06, + "loss": 0.5333, + "step": 13858 + }, + { + "epoch": 2.16, + "grad_norm": 18.113558550125575, + "learning_rate": 3.7973633758654225e-06, + "loss": 0.5234, + "step": 13859 + }, + { + "epoch": 2.16, + "grad_norm": 18.666177408655454, + "learning_rate": 3.796040663862467e-06, + "loss": 0.5465, + "step": 13860 + }, + { + "epoch": 2.17, + "grad_norm": 24.423976211983565, + "learning_rate": 3.7947181282964806e-06, + "loss": 0.5468, + "step": 13861 + }, + { + "epoch": 2.17, + "grad_norm": 16.03031949267016, + "learning_rate": 3.793395769205085e-06, + "loss": 0.4362, + "step": 13862 + }, + { + "epoch": 2.17, + "grad_norm": 27.25323634877798, + "learning_rate": 3.7920735866258796e-06, + "loss": 0.4984, + "step": 13863 + }, + { + "epoch": 2.17, + "grad_norm": 20.221239961071152, + "learning_rate": 3.790751580596469e-06, + "loss": 0.5372, + "step": 13864 + }, + { + "epoch": 2.17, + "grad_norm": 27.802734656379144, + "learning_rate": 3.7894297511544553e-06, + "loss": 0.5342, + "step": 13865 + }, + { + "epoch": 2.17, + "grad_norm": 17.28318000315127, + "learning_rate": 3.7881080983374263e-06, + "loss": 0.4139, + "step": 13866 + }, + { + "epoch": 2.17, + "grad_norm": 23.103619880902194, + "learning_rate": 3.786786622182966e-06, + "loss": 0.5184, + "step": 13867 + }, + { + "epoch": 2.17, + "grad_norm": 14.232947886224364, + "learning_rate": 3.7854653227286586e-06, + "loss": 0.4749, + "step": 13868 + }, + { + "epoch": 2.17, + "grad_norm": 23.69926999905366, + "learning_rate": 3.784144200012083e-06, + "loss": 0.5395, + "step": 13869 + }, + { + "epoch": 2.17, + "grad_norm": 26.86781979166525, + "learning_rate": 3.782823254070813e-06, + "loss": 0.4364, + "step": 13870 + }, + { + "epoch": 2.17, + "grad_norm": 22.672117333855482, + "learning_rate": 3.781502484942413e-06, + "loss": 0.5121, + "step": 13871 + }, + { + "epoch": 2.17, + "grad_norm": 30.376665610301327, + "learning_rate": 3.7801818926644416e-06, + "loss": 0.5527, + "step": 13872 + }, + { + "epoch": 2.17, + "grad_norm": 18.870592599040634, + "learning_rate": 3.778861477274458e-06, + "loss": 0.4935, + "step": 13873 + }, + { + "epoch": 2.17, + "grad_norm": 16.214417900681852, + "learning_rate": 3.7775412388100186e-06, + "loss": 0.4581, + "step": 13874 + }, + { + "epoch": 2.17, + "grad_norm": 13.50413907142526, + "learning_rate": 3.7762211773086656e-06, + "loss": 0.4296, + "step": 13875 + }, + { + "epoch": 2.17, + "grad_norm": 17.086538747282624, + "learning_rate": 3.7749012928079387e-06, + "loss": 0.4754, + "step": 13876 + }, + { + "epoch": 2.17, + "grad_norm": 18.153668502430836, + "learning_rate": 3.773581585345377e-06, + "loss": 0.518, + "step": 13877 + }, + { + "epoch": 2.17, + "grad_norm": 14.565319608962314, + "learning_rate": 3.772262054958513e-06, + "loss": 0.4488, + "step": 13878 + }, + { + "epoch": 2.17, + "grad_norm": 19.002450101918896, + "learning_rate": 3.77094270168488e-06, + "loss": 0.5108, + "step": 13879 + }, + { + "epoch": 2.17, + "grad_norm": 21.186942853466455, + "learning_rate": 3.769623525561986e-06, + "loss": 0.4658, + "step": 13880 + }, + { + "epoch": 2.17, + "grad_norm": 23.06550099585411, + "learning_rate": 3.768304526627354e-06, + "loss": 0.4874, + "step": 13881 + }, + { + "epoch": 2.17, + "grad_norm": 22.429163810757675, + "learning_rate": 3.7669857049184953e-06, + "loss": 0.5271, + "step": 13882 + }, + { + "epoch": 2.17, + "grad_norm": 17.727649414142682, + "learning_rate": 3.765667060472922e-06, + "loss": 0.4413, + "step": 13883 + }, + { + "epoch": 2.17, + "grad_norm": 27.112849995071905, + "learning_rate": 3.764348593328129e-06, + "loss": 0.4824, + "step": 13884 + }, + { + "epoch": 2.17, + "grad_norm": 21.889569403433157, + "learning_rate": 3.763030303521611e-06, + "loss": 0.5256, + "step": 13885 + }, + { + "epoch": 2.17, + "grad_norm": 14.050841578719153, + "learning_rate": 3.7617121910908627e-06, + "loss": 0.4345, + "step": 13886 + }, + { + "epoch": 2.17, + "grad_norm": 22.25122012117449, + "learning_rate": 3.760394256073374e-06, + "loss": 0.4876, + "step": 13887 + }, + { + "epoch": 2.17, + "grad_norm": 19.327057029496785, + "learning_rate": 3.7590764985066187e-06, + "loss": 0.4769, + "step": 13888 + }, + { + "epoch": 2.17, + "grad_norm": 12.126113154129019, + "learning_rate": 3.7577589184280817e-06, + "loss": 0.4417, + "step": 13889 + }, + { + "epoch": 2.17, + "grad_norm": 23.267813302315506, + "learning_rate": 3.7564415158752244e-06, + "loss": 0.5399, + "step": 13890 + }, + { + "epoch": 2.17, + "grad_norm": 15.934242441075234, + "learning_rate": 3.755124290885522e-06, + "loss": 0.4572, + "step": 13891 + }, + { + "epoch": 2.17, + "grad_norm": 25.72777193854207, + "learning_rate": 3.753807243496429e-06, + "loss": 0.5363, + "step": 13892 + }, + { + "epoch": 2.17, + "grad_norm": 18.68732316912678, + "learning_rate": 3.752490373745403e-06, + "loss": 0.4636, + "step": 13893 + }, + { + "epoch": 2.17, + "grad_norm": 18.195430534114255, + "learning_rate": 3.751173681669901e-06, + "loss": 0.4551, + "step": 13894 + }, + { + "epoch": 2.17, + "grad_norm": 25.770825307151473, + "learning_rate": 3.749857167307359e-06, + "loss": 0.492, + "step": 13895 + }, + { + "epoch": 2.17, + "grad_norm": 19.805678432736446, + "learning_rate": 3.7485408306952263e-06, + "loss": 0.4557, + "step": 13896 + }, + { + "epoch": 2.17, + "grad_norm": 22.36118923347034, + "learning_rate": 3.7472246718709325e-06, + "loss": 0.4547, + "step": 13897 + }, + { + "epoch": 2.17, + "grad_norm": 16.697549062766893, + "learning_rate": 3.7459086908719124e-06, + "loss": 0.4868, + "step": 13898 + }, + { + "epoch": 2.17, + "grad_norm": 18.198767491674225, + "learning_rate": 3.744592887735592e-06, + "loss": 0.4764, + "step": 13899 + }, + { + "epoch": 2.17, + "grad_norm": 26.00196618316421, + "learning_rate": 3.7432772624993917e-06, + "loss": 0.4639, + "step": 13900 + }, + { + "epoch": 2.17, + "grad_norm": 25.156482588836, + "learning_rate": 3.741961815200723e-06, + "loss": 0.4867, + "step": 13901 + }, + { + "epoch": 2.17, + "grad_norm": 20.34395287012653, + "learning_rate": 3.740646545876998e-06, + "loss": 0.493, + "step": 13902 + }, + { + "epoch": 2.17, + "grad_norm": 17.052540452182416, + "learning_rate": 3.7393314545656243e-06, + "loss": 0.5237, + "step": 13903 + }, + { + "epoch": 2.17, + "grad_norm": 27.072504637392218, + "learning_rate": 3.7380165413040092e-06, + "loss": 0.4997, + "step": 13904 + }, + { + "epoch": 2.17, + "grad_norm": 22.62259429581696, + "learning_rate": 3.7367018061295335e-06, + "loss": 0.5068, + "step": 13905 + }, + { + "epoch": 2.17, + "grad_norm": 16.021429367556948, + "learning_rate": 3.735387249079594e-06, + "loss": 0.415, + "step": 13906 + }, + { + "epoch": 2.17, + "grad_norm": 20.917348662659062, + "learning_rate": 3.7340728701915764e-06, + "loss": 0.4499, + "step": 13907 + }, + { + "epoch": 2.17, + "grad_norm": 15.856144422902132, + "learning_rate": 3.732758669502864e-06, + "loss": 0.4747, + "step": 13908 + }, + { + "epoch": 2.17, + "grad_norm": 12.802182053117328, + "learning_rate": 3.731444647050829e-06, + "loss": 0.478, + "step": 13909 + }, + { + "epoch": 2.17, + "grad_norm": 18.648342386425874, + "learning_rate": 3.730130802872839e-06, + "loss": 0.5104, + "step": 13910 + }, + { + "epoch": 2.17, + "grad_norm": 22.40224088064852, + "learning_rate": 3.72881713700626e-06, + "loss": 0.4798, + "step": 13911 + }, + { + "epoch": 2.17, + "grad_norm": 16.22119826652589, + "learning_rate": 3.7275036494884563e-06, + "loss": 0.4925, + "step": 13912 + }, + { + "epoch": 2.17, + "grad_norm": 18.040495374066186, + "learning_rate": 3.7261903403567767e-06, + "loss": 0.4351, + "step": 13913 + }, + { + "epoch": 2.17, + "grad_norm": 18.476561855614552, + "learning_rate": 3.7248772096485773e-06, + "loss": 0.4969, + "step": 13914 + }, + { + "epoch": 2.17, + "grad_norm": 29.656008212929844, + "learning_rate": 3.7235642574011955e-06, + "loss": 0.5233, + "step": 13915 + }, + { + "epoch": 2.17, + "grad_norm": 18.42924940373642, + "learning_rate": 3.7222514836519784e-06, + "loss": 0.418, + "step": 13916 + }, + { + "epoch": 2.17, + "grad_norm": 19.142868668758883, + "learning_rate": 3.7209388884382526e-06, + "loss": 0.4841, + "step": 13917 + }, + { + "epoch": 2.17, + "grad_norm": 18.182305342595264, + "learning_rate": 3.719626471797352e-06, + "loss": 0.5755, + "step": 13918 + }, + { + "epoch": 2.17, + "grad_norm": 16.20518447272807, + "learning_rate": 3.7183142337666045e-06, + "loss": 0.4834, + "step": 13919 + }, + { + "epoch": 2.17, + "grad_norm": 17.57457942554384, + "learning_rate": 3.7170021743833216e-06, + "loss": 0.3893, + "step": 13920 + }, + { + "epoch": 2.17, + "grad_norm": 21.992605063775144, + "learning_rate": 3.715690293684825e-06, + "loss": 0.4778, + "step": 13921 + }, + { + "epoch": 2.17, + "grad_norm": 15.958230542950803, + "learning_rate": 3.7143785917084163e-06, + "loss": 0.3823, + "step": 13922 + }, + { + "epoch": 2.17, + "grad_norm": 14.730368526606386, + "learning_rate": 3.7130670684914083e-06, + "loss": 0.5007, + "step": 13923 + }, + { + "epoch": 2.17, + "grad_norm": 28.378319004228697, + "learning_rate": 3.71175572407109e-06, + "loss": 0.5193, + "step": 13924 + }, + { + "epoch": 2.18, + "grad_norm": 17.744855643999525, + "learning_rate": 3.7104445584847647e-06, + "loss": 0.442, + "step": 13925 + }, + { + "epoch": 2.18, + "grad_norm": 18.921790652791913, + "learning_rate": 3.709133571769713e-06, + "loss": 0.4352, + "step": 13926 + }, + { + "epoch": 2.18, + "grad_norm": 31.174525690791548, + "learning_rate": 3.7078227639632234e-06, + "loss": 0.5035, + "step": 13927 + }, + { + "epoch": 2.18, + "grad_norm": 14.620553649009429, + "learning_rate": 3.7065121351025758e-06, + "loss": 0.4392, + "step": 13928 + }, + { + "epoch": 2.18, + "grad_norm": 29.90004100392075, + "learning_rate": 3.705201685225043e-06, + "loss": 0.6798, + "step": 13929 + }, + { + "epoch": 2.18, + "grad_norm": 17.527296288113387, + "learning_rate": 3.7038914143678874e-06, + "loss": 0.4699, + "step": 13930 + }, + { + "epoch": 2.18, + "grad_norm": 22.30784560191277, + "learning_rate": 3.702581322568376e-06, + "loss": 0.5408, + "step": 13931 + }, + { + "epoch": 2.18, + "grad_norm": 15.861843915730232, + "learning_rate": 3.701271409863769e-06, + "loss": 0.4325, + "step": 13932 + }, + { + "epoch": 2.18, + "grad_norm": 18.81794535048324, + "learning_rate": 3.6999616762913226e-06, + "loss": 0.4409, + "step": 13933 + }, + { + "epoch": 2.18, + "grad_norm": 22.7578377024586, + "learning_rate": 3.6986521218882798e-06, + "loss": 0.5059, + "step": 13934 + }, + { + "epoch": 2.18, + "grad_norm": 13.806213747355129, + "learning_rate": 3.697342746691881e-06, + "loss": 0.4334, + "step": 13935 + }, + { + "epoch": 2.18, + "grad_norm": 26.173465803394198, + "learning_rate": 3.6960335507393672e-06, + "loss": 0.5271, + "step": 13936 + }, + { + "epoch": 2.18, + "grad_norm": 16.64270022441161, + "learning_rate": 3.694724534067976e-06, + "loss": 0.4328, + "step": 13937 + }, + { + "epoch": 2.18, + "grad_norm": 18.916564799516234, + "learning_rate": 3.693415696714929e-06, + "loss": 0.45, + "step": 13938 + }, + { + "epoch": 2.18, + "grad_norm": 14.592918441737208, + "learning_rate": 3.6921070387174484e-06, + "loss": 0.5372, + "step": 13939 + }, + { + "epoch": 2.18, + "grad_norm": 17.651884181892083, + "learning_rate": 3.690798560112754e-06, + "loss": 0.4198, + "step": 13940 + }, + { + "epoch": 2.18, + "grad_norm": 16.509405655447036, + "learning_rate": 3.6894902609380566e-06, + "loss": 0.5016, + "step": 13941 + }, + { + "epoch": 2.18, + "grad_norm": 18.27971133497859, + "learning_rate": 3.6881821412305683e-06, + "loss": 0.4705, + "step": 13942 + }, + { + "epoch": 2.18, + "grad_norm": 16.39685677519553, + "learning_rate": 3.686874201027487e-06, + "loss": 0.4337, + "step": 13943 + }, + { + "epoch": 2.18, + "grad_norm": 24.002955028784868, + "learning_rate": 3.6855664403660073e-06, + "loss": 0.4793, + "step": 13944 + }, + { + "epoch": 2.18, + "grad_norm": 32.031929770676626, + "learning_rate": 3.684258859283325e-06, + "loss": 0.4854, + "step": 13945 + }, + { + "epoch": 2.18, + "grad_norm": 15.166517287957952, + "learning_rate": 3.6829514578166293e-06, + "loss": 0.433, + "step": 13946 + }, + { + "epoch": 2.18, + "grad_norm": 26.03985727769971, + "learning_rate": 3.6816442360030946e-06, + "loss": 0.5459, + "step": 13947 + }, + { + "epoch": 2.18, + "grad_norm": 23.580314624301515, + "learning_rate": 3.680337193879906e-06, + "loss": 0.4546, + "step": 13948 + }, + { + "epoch": 2.18, + "grad_norm": 14.81311772462806, + "learning_rate": 3.679030331484227e-06, + "loss": 0.4476, + "step": 13949 + }, + { + "epoch": 2.18, + "grad_norm": 15.472065570750203, + "learning_rate": 3.677723648853231e-06, + "loss": 0.4231, + "step": 13950 + }, + { + "epoch": 2.18, + "grad_norm": 20.031267716612696, + "learning_rate": 3.676417146024073e-06, + "loss": 0.4422, + "step": 13951 + }, + { + "epoch": 2.18, + "grad_norm": 24.97955650829259, + "learning_rate": 3.6751108230339116e-06, + "loss": 0.4984, + "step": 13952 + }, + { + "epoch": 2.18, + "grad_norm": 19.24901122721355, + "learning_rate": 3.673804679919901e-06, + "loss": 0.5214, + "step": 13953 + }, + { + "epoch": 2.18, + "grad_norm": 15.329999098499357, + "learning_rate": 3.672498716719186e-06, + "loss": 0.4278, + "step": 13954 + }, + { + "epoch": 2.18, + "grad_norm": 29.249313411642508, + "learning_rate": 3.6711929334689024e-06, + "loss": 0.5053, + "step": 13955 + }, + { + "epoch": 2.18, + "grad_norm": 19.4968305344219, + "learning_rate": 3.6698873302061888e-06, + "loss": 0.505, + "step": 13956 + }, + { + "epoch": 2.18, + "grad_norm": 16.45472467240973, + "learning_rate": 3.668581906968177e-06, + "loss": 0.4573, + "step": 13957 + }, + { + "epoch": 2.18, + "grad_norm": 19.04060933564639, + "learning_rate": 3.6672766637919967e-06, + "loss": 0.4659, + "step": 13958 + }, + { + "epoch": 2.18, + "grad_norm": 24.05959855520941, + "learning_rate": 3.6659716007147616e-06, + "loss": 0.5075, + "step": 13959 + }, + { + "epoch": 2.18, + "grad_norm": 18.095191734239712, + "learning_rate": 3.664666717773586e-06, + "loss": 0.5157, + "step": 13960 + }, + { + "epoch": 2.18, + "grad_norm": 16.977994121589116, + "learning_rate": 3.6633620150055835e-06, + "loss": 0.4636, + "step": 13961 + }, + { + "epoch": 2.18, + "grad_norm": 13.195037928153063, + "learning_rate": 3.6620574924478615e-06, + "loss": 0.4557, + "step": 13962 + }, + { + "epoch": 2.18, + "grad_norm": 20.064549108242435, + "learning_rate": 3.660753150137517e-06, + "loss": 0.4572, + "step": 13963 + }, + { + "epoch": 2.18, + "grad_norm": 19.54747176460059, + "learning_rate": 3.65944898811164e-06, + "loss": 0.5224, + "step": 13964 + }, + { + "epoch": 2.18, + "grad_norm": 25.491890843892687, + "learning_rate": 3.6581450064073265e-06, + "loss": 0.4815, + "step": 13965 + }, + { + "epoch": 2.18, + "grad_norm": 18.26828795487254, + "learning_rate": 3.6568412050616574e-06, + "loss": 0.5211, + "step": 13966 + }, + { + "epoch": 2.18, + "grad_norm": 17.160762294224657, + "learning_rate": 3.655537584111718e-06, + "loss": 0.4263, + "step": 13967 + }, + { + "epoch": 2.18, + "grad_norm": 20.093935823248746, + "learning_rate": 3.654234143594578e-06, + "loss": 0.458, + "step": 13968 + }, + { + "epoch": 2.18, + "grad_norm": 19.317338236743893, + "learning_rate": 3.652930883547304e-06, + "loss": 0.4909, + "step": 13969 + }, + { + "epoch": 2.18, + "grad_norm": 18.61116934100178, + "learning_rate": 3.651627804006963e-06, + "loss": 0.4267, + "step": 13970 + }, + { + "epoch": 2.18, + "grad_norm": 18.55893812941986, + "learning_rate": 3.650324905010616e-06, + "loss": 0.4397, + "step": 13971 + }, + { + "epoch": 2.18, + "grad_norm": 27.044436155293575, + "learning_rate": 3.6490221865953146e-06, + "loss": 0.5589, + "step": 13972 + }, + { + "epoch": 2.18, + "grad_norm": 19.380076979906473, + "learning_rate": 3.6477196487981036e-06, + "loss": 0.5088, + "step": 13973 + }, + { + "epoch": 2.18, + "grad_norm": 23.389732167335104, + "learning_rate": 3.6464172916560305e-06, + "loss": 0.4705, + "step": 13974 + }, + { + "epoch": 2.18, + "grad_norm": 17.589091577782902, + "learning_rate": 3.645115115206136e-06, + "loss": 0.4937, + "step": 13975 + }, + { + "epoch": 2.18, + "grad_norm": 29.01265602554712, + "learning_rate": 3.643813119485445e-06, + "loss": 0.4521, + "step": 13976 + }, + { + "epoch": 2.18, + "grad_norm": 13.64416319719017, + "learning_rate": 3.642511304530996e-06, + "loss": 0.4766, + "step": 13977 + }, + { + "epoch": 2.18, + "grad_norm": 18.352320732961957, + "learning_rate": 3.641209670379803e-06, + "loss": 0.4317, + "step": 13978 + }, + { + "epoch": 2.18, + "grad_norm": 19.73647408334706, + "learning_rate": 3.6399082170688893e-06, + "loss": 0.5278, + "step": 13979 + }, + { + "epoch": 2.18, + "grad_norm": 17.12766188186991, + "learning_rate": 3.6386069446352632e-06, + "loss": 0.5229, + "step": 13980 + }, + { + "epoch": 2.18, + "grad_norm": 24.355657629092867, + "learning_rate": 3.6373058531159332e-06, + "loss": 0.4636, + "step": 13981 + }, + { + "epoch": 2.18, + "grad_norm": 20.65096667990874, + "learning_rate": 3.636004942547907e-06, + "loss": 0.4917, + "step": 13982 + }, + { + "epoch": 2.18, + "grad_norm": 24.37103479607225, + "learning_rate": 3.634704212968174e-06, + "loss": 0.4963, + "step": 13983 + }, + { + "epoch": 2.18, + "grad_norm": 15.788994113711732, + "learning_rate": 3.6334036644137328e-06, + "loss": 0.4964, + "step": 13984 + }, + { + "epoch": 2.18, + "grad_norm": 16.224723738366823, + "learning_rate": 3.632103296921563e-06, + "loss": 0.4695, + "step": 13985 + }, + { + "epoch": 2.18, + "grad_norm": 19.8523101380616, + "learning_rate": 3.630803110528651e-06, + "loss": 0.5534, + "step": 13986 + }, + { + "epoch": 2.18, + "grad_norm": 19.84165634201634, + "learning_rate": 3.6295031052719766e-06, + "loss": 0.5135, + "step": 13987 + }, + { + "epoch": 2.18, + "grad_norm": 23.956733448610258, + "learning_rate": 3.628203281188506e-06, + "loss": 0.4013, + "step": 13988 + }, + { + "epoch": 2.19, + "grad_norm": 35.406843996915335, + "learning_rate": 3.6269036383152032e-06, + "loss": 0.5603, + "step": 13989 + }, + { + "epoch": 2.19, + "grad_norm": 23.49508580228283, + "learning_rate": 3.6256041766890337e-06, + "loss": 0.4794, + "step": 13990 + }, + { + "epoch": 2.19, + "grad_norm": 25.701856949435804, + "learning_rate": 3.624304896346953e-06, + "loss": 0.43, + "step": 13991 + }, + { + "epoch": 2.19, + "grad_norm": 26.33248877711425, + "learning_rate": 3.623005797325917e-06, + "loss": 0.4812, + "step": 13992 + }, + { + "epoch": 2.19, + "grad_norm": 16.093145042456435, + "learning_rate": 3.6217068796628597e-06, + "loss": 0.4626, + "step": 13993 + }, + { + "epoch": 2.19, + "grad_norm": 22.07658711791868, + "learning_rate": 3.6204081433947268e-06, + "loss": 0.4784, + "step": 13994 + }, + { + "epoch": 2.19, + "grad_norm": 18.614420989407897, + "learning_rate": 3.619109588558455e-06, + "loss": 0.5323, + "step": 13995 + }, + { + "epoch": 2.19, + "grad_norm": 23.058644835738473, + "learning_rate": 3.6178112151909763e-06, + "loss": 0.4095, + "step": 13996 + }, + { + "epoch": 2.19, + "grad_norm": 19.812510704389624, + "learning_rate": 3.6165130233292133e-06, + "loss": 0.5238, + "step": 13997 + }, + { + "epoch": 2.19, + "grad_norm": 23.198471185753913, + "learning_rate": 3.6152150130100825e-06, + "loss": 0.5558, + "step": 13998 + }, + { + "epoch": 2.19, + "grad_norm": 25.21144552460755, + "learning_rate": 3.6139171842705024e-06, + "loss": 0.5057, + "step": 13999 + }, + { + "epoch": 2.19, + "grad_norm": 22.192608312420063, + "learning_rate": 3.612619537147385e-06, + "loss": 0.5618, + "step": 14000 + }, + { + "epoch": 2.19, + "grad_norm": 15.8332970868708, + "learning_rate": 3.611322071677628e-06, + "loss": 0.477, + "step": 14001 + }, + { + "epoch": 2.19, + "grad_norm": 14.833045054788366, + "learning_rate": 3.6100247878981365e-06, + "loss": 0.389, + "step": 14002 + }, + { + "epoch": 2.19, + "grad_norm": 15.222395402544583, + "learning_rate": 3.6087276858458e-06, + "loss": 0.4335, + "step": 14003 + }, + { + "epoch": 2.19, + "grad_norm": 24.119977674108185, + "learning_rate": 3.607430765557508e-06, + "loss": 0.5015, + "step": 14004 + }, + { + "epoch": 2.19, + "grad_norm": 14.618093805031508, + "learning_rate": 3.606134027070151e-06, + "loss": 0.4526, + "step": 14005 + }, + { + "epoch": 2.19, + "grad_norm": 15.710519401435027, + "learning_rate": 3.604837470420596e-06, + "loss": 0.4316, + "step": 14006 + }, + { + "epoch": 2.19, + "grad_norm": 16.891181656083717, + "learning_rate": 3.603541095645727e-06, + "loss": 0.4568, + "step": 14007 + }, + { + "epoch": 2.19, + "grad_norm": 27.573844077065104, + "learning_rate": 3.6022449027824035e-06, + "loss": 0.4954, + "step": 14008 + }, + { + "epoch": 2.19, + "grad_norm": 20.6284477473315, + "learning_rate": 3.600948891867496e-06, + "loss": 0.5733, + "step": 14009 + }, + { + "epoch": 2.19, + "grad_norm": 18.12823187197001, + "learning_rate": 3.599653062937856e-06, + "loss": 0.5448, + "step": 14010 + }, + { + "epoch": 2.19, + "grad_norm": 22.564854645638107, + "learning_rate": 3.598357416030338e-06, + "loss": 0.4427, + "step": 14011 + }, + { + "epoch": 2.19, + "grad_norm": 21.107277889542466, + "learning_rate": 3.597061951181794e-06, + "loss": 0.498, + "step": 14012 + }, + { + "epoch": 2.19, + "grad_norm": 24.69182937783355, + "learning_rate": 3.5957666684290626e-06, + "loss": 0.508, + "step": 14013 + }, + { + "epoch": 2.19, + "grad_norm": 18.85321189709224, + "learning_rate": 3.594471567808977e-06, + "loss": 0.4794, + "step": 14014 + }, + { + "epoch": 2.19, + "grad_norm": 16.42785927445323, + "learning_rate": 3.593176649358373e-06, + "loss": 0.4412, + "step": 14015 + }, + { + "epoch": 2.19, + "grad_norm": 34.66148731861961, + "learning_rate": 3.591881913114077e-06, + "loss": 0.4727, + "step": 14016 + }, + { + "epoch": 2.19, + "grad_norm": 17.502460511120397, + "learning_rate": 3.590587359112917e-06, + "loss": 0.4875, + "step": 14017 + }, + { + "epoch": 2.19, + "grad_norm": 14.031174962728825, + "learning_rate": 3.589292987391697e-06, + "loss": 0.4412, + "step": 14018 + }, + { + "epoch": 2.19, + "grad_norm": 24.394031926978407, + "learning_rate": 3.5879987979872342e-06, + "loss": 0.5227, + "step": 14019 + }, + { + "epoch": 2.19, + "grad_norm": 17.69219731579314, + "learning_rate": 3.586704790936335e-06, + "loss": 0.4277, + "step": 14020 + }, + { + "epoch": 2.19, + "grad_norm": 17.419275426006198, + "learning_rate": 3.5854109662758043e-06, + "loss": 0.4646, + "step": 14021 + }, + { + "epoch": 2.19, + "grad_norm": 20.173975894212102, + "learning_rate": 3.584117324042431e-06, + "loss": 0.4418, + "step": 14022 + }, + { + "epoch": 2.19, + "grad_norm": 24.775346166971545, + "learning_rate": 3.5828238642730063e-06, + "loss": 0.5118, + "step": 14023 + }, + { + "epoch": 2.19, + "grad_norm": 22.174961773109384, + "learning_rate": 3.581530587004316e-06, + "loss": 0.4566, + "step": 14024 + }, + { + "epoch": 2.19, + "grad_norm": 19.649343930034874, + "learning_rate": 3.580237492273144e-06, + "loss": 0.5392, + "step": 14025 + }, + { + "epoch": 2.19, + "grad_norm": 18.97642455476868, + "learning_rate": 3.578944580116264e-06, + "loss": 0.4857, + "step": 14026 + }, + { + "epoch": 2.19, + "grad_norm": 29.003735713778482, + "learning_rate": 3.5776518505704383e-06, + "loss": 0.5662, + "step": 14027 + }, + { + "epoch": 2.19, + "grad_norm": 21.726996247050618, + "learning_rate": 3.5763593036724387e-06, + "loss": 0.4362, + "step": 14028 + }, + { + "epoch": 2.19, + "grad_norm": 24.769312331371186, + "learning_rate": 3.575066939459022e-06, + "loss": 0.5074, + "step": 14029 + }, + { + "epoch": 2.19, + "grad_norm": 18.008584782554852, + "learning_rate": 3.5737747579669468e-06, + "loss": 0.4826, + "step": 14030 + }, + { + "epoch": 2.19, + "grad_norm": 31.0675369195081, + "learning_rate": 3.572482759232958e-06, + "loss": 0.5604, + "step": 14031 + }, + { + "epoch": 2.19, + "grad_norm": 18.034944622705922, + "learning_rate": 3.5711909432937964e-06, + "loss": 0.4204, + "step": 14032 + }, + { + "epoch": 2.19, + "grad_norm": 14.918155319393698, + "learning_rate": 3.5698993101862034e-06, + "loss": 0.4572, + "step": 14033 + }, + { + "epoch": 2.19, + "grad_norm": 15.828490565598276, + "learning_rate": 3.5686078599469166e-06, + "loss": 0.4274, + "step": 14034 + }, + { + "epoch": 2.19, + "grad_norm": 20.154704688221837, + "learning_rate": 3.5673165926126553e-06, + "loss": 0.4479, + "step": 14035 + }, + { + "epoch": 2.19, + "grad_norm": 17.335644179274187, + "learning_rate": 3.566025508220151e-06, + "loss": 0.4564, + "step": 14036 + }, + { + "epoch": 2.19, + "grad_norm": 20.158496724080653, + "learning_rate": 3.5647346068061152e-06, + "loss": 0.4476, + "step": 14037 + }, + { + "epoch": 2.19, + "grad_norm": 18.988967250210617, + "learning_rate": 3.5634438884072653e-06, + "loss": 0.4973, + "step": 14038 + }, + { + "epoch": 2.19, + "grad_norm": 17.731892454006914, + "learning_rate": 3.5621533530603024e-06, + "loss": 0.5157, + "step": 14039 + }, + { + "epoch": 2.19, + "grad_norm": 24.45232979270624, + "learning_rate": 3.560863000801932e-06, + "loss": 0.4275, + "step": 14040 + }, + { + "epoch": 2.19, + "grad_norm": 21.361726270071014, + "learning_rate": 3.559572831668855e-06, + "loss": 0.4794, + "step": 14041 + }, + { + "epoch": 2.19, + "grad_norm": 20.57647011807346, + "learning_rate": 3.558282845697758e-06, + "loss": 0.4836, + "step": 14042 + }, + { + "epoch": 2.19, + "grad_norm": 16.13212645690718, + "learning_rate": 3.5569930429253263e-06, + "loss": 0.4892, + "step": 14043 + }, + { + "epoch": 2.19, + "grad_norm": 18.072692573341673, + "learning_rate": 3.5557034233882426e-06, + "loss": 0.4775, + "step": 14044 + }, + { + "epoch": 2.19, + "grad_norm": 20.109924302234905, + "learning_rate": 3.5544139871231842e-06, + "loss": 0.4951, + "step": 14045 + }, + { + "epoch": 2.19, + "grad_norm": 26.79231211125581, + "learning_rate": 3.5531247341668253e-06, + "loss": 0.4937, + "step": 14046 + }, + { + "epoch": 2.19, + "grad_norm": 19.738250184690546, + "learning_rate": 3.551835664555827e-06, + "loss": 0.5187, + "step": 14047 + }, + { + "epoch": 2.19, + "grad_norm": 19.87518642146174, + "learning_rate": 3.5505467783268465e-06, + "loss": 0.3845, + "step": 14048 + }, + { + "epoch": 2.19, + "grad_norm": 17.32296510824594, + "learning_rate": 3.549258075516544e-06, + "loss": 0.4773, + "step": 14049 + }, + { + "epoch": 2.19, + "grad_norm": 24.356241592345082, + "learning_rate": 3.54796955616157e-06, + "loss": 0.559, + "step": 14050 + }, + { + "epoch": 2.19, + "grad_norm": 15.505514144658026, + "learning_rate": 3.546681220298569e-06, + "loss": 0.4726, + "step": 14051 + }, + { + "epoch": 2.19, + "grad_norm": 27.18552797005648, + "learning_rate": 3.545393067964176e-06, + "loss": 0.4635, + "step": 14052 + }, + { + "epoch": 2.2, + "grad_norm": 18.11100749671277, + "learning_rate": 3.5441050991950264e-06, + "loss": 0.4708, + "step": 14053 + }, + { + "epoch": 2.2, + "grad_norm": 17.290062820689727, + "learning_rate": 3.5428173140277523e-06, + "loss": 0.4224, + "step": 14054 + }, + { + "epoch": 2.2, + "grad_norm": 20.972685798429524, + "learning_rate": 3.54152971249898e-06, + "loss": 0.4253, + "step": 14055 + }, + { + "epoch": 2.2, + "grad_norm": 19.22771292387033, + "learning_rate": 3.5402422946453242e-06, + "loss": 0.4478, + "step": 14056 + }, + { + "epoch": 2.2, + "grad_norm": 19.75963982291962, + "learning_rate": 3.5389550605033963e-06, + "loss": 0.4764, + "step": 14057 + }, + { + "epoch": 2.2, + "grad_norm": 19.94142548189347, + "learning_rate": 3.537668010109805e-06, + "loss": 0.4443, + "step": 14058 + }, + { + "epoch": 2.2, + "grad_norm": 21.8522734309868, + "learning_rate": 3.5363811435011598e-06, + "loss": 0.4693, + "step": 14059 + }, + { + "epoch": 2.2, + "grad_norm": 19.312032718622437, + "learning_rate": 3.53509446071405e-06, + "loss": 0.4541, + "step": 14060 + }, + { + "epoch": 2.2, + "grad_norm": 40.733736502176065, + "learning_rate": 3.5338079617850762e-06, + "loss": 0.5451, + "step": 14061 + }, + { + "epoch": 2.2, + "grad_norm": 15.812510162470874, + "learning_rate": 3.5325216467508172e-06, + "loss": 0.4658, + "step": 14062 + }, + { + "epoch": 2.2, + "grad_norm": 26.776922491698524, + "learning_rate": 3.5312355156478616e-06, + "loss": 0.5021, + "step": 14063 + }, + { + "epoch": 2.2, + "grad_norm": 22.546906582921906, + "learning_rate": 3.5299495685127816e-06, + "loss": 0.5025, + "step": 14064 + }, + { + "epoch": 2.2, + "grad_norm": 20.470269510054994, + "learning_rate": 3.528663805382151e-06, + "loss": 0.4629, + "step": 14065 + }, + { + "epoch": 2.2, + "grad_norm": 17.87054624667286, + "learning_rate": 3.5273782262925392e-06, + "loss": 0.4486, + "step": 14066 + }, + { + "epoch": 2.2, + "grad_norm": 18.641677316730252, + "learning_rate": 3.5260928312805042e-06, + "loss": 0.4712, + "step": 14067 + }, + { + "epoch": 2.2, + "grad_norm": 15.922997956361876, + "learning_rate": 3.5248076203825987e-06, + "loss": 0.4194, + "step": 14068 + }, + { + "epoch": 2.2, + "grad_norm": 21.050922270157553, + "learning_rate": 3.5235225936353767e-06, + "loss": 0.4295, + "step": 14069 + }, + { + "epoch": 2.2, + "grad_norm": 21.062036976125352, + "learning_rate": 3.522237751075387e-06, + "loss": 0.5112, + "step": 14070 + }, + { + "epoch": 2.2, + "grad_norm": 18.444524533294278, + "learning_rate": 3.5209530927391632e-06, + "loss": 0.4451, + "step": 14071 + }, + { + "epoch": 2.2, + "grad_norm": 25.213072462088874, + "learning_rate": 3.5196686186632457e-06, + "loss": 0.4932, + "step": 14072 + }, + { + "epoch": 2.2, + "grad_norm": 30.96334830925032, + "learning_rate": 3.518384328884159e-06, + "loss": 0.4499, + "step": 14073 + }, + { + "epoch": 2.2, + "grad_norm": 18.86835308699928, + "learning_rate": 3.517100223438431e-06, + "loss": 0.5184, + "step": 14074 + }, + { + "epoch": 2.2, + "grad_norm": 19.84810799443279, + "learning_rate": 3.5158163023625825e-06, + "loss": 0.5209, + "step": 14075 + }, + { + "epoch": 2.2, + "grad_norm": 11.432368447763544, + "learning_rate": 3.514532565693126e-06, + "loss": 0.4604, + "step": 14076 + }, + { + "epoch": 2.2, + "grad_norm": 17.796697835065537, + "learning_rate": 3.513249013466565e-06, + "loss": 0.5287, + "step": 14077 + }, + { + "epoch": 2.2, + "grad_norm": 23.644830117321195, + "learning_rate": 3.5119656457194086e-06, + "loss": 0.5124, + "step": 14078 + }, + { + "epoch": 2.2, + "grad_norm": 19.887887654731266, + "learning_rate": 3.510682462488153e-06, + "loss": 0.4844, + "step": 14079 + }, + { + "epoch": 2.2, + "grad_norm": 27.712158869350212, + "learning_rate": 3.5093994638093e-06, + "loss": 0.4934, + "step": 14080 + }, + { + "epoch": 2.2, + "grad_norm": 18.37520570542549, + "learning_rate": 3.5081166497193208e-06, + "loss": 0.4673, + "step": 14081 + }, + { + "epoch": 2.2, + "grad_norm": 20.733917116285223, + "learning_rate": 3.506834020254708e-06, + "loss": 0.4224, + "step": 14082 + }, + { + "epoch": 2.2, + "grad_norm": 28.46560013468761, + "learning_rate": 3.5055515754519363e-06, + "loss": 0.5899, + "step": 14083 + }, + { + "epoch": 2.2, + "grad_norm": 16.0879471205812, + "learning_rate": 3.504269315347483e-06, + "loss": 0.4107, + "step": 14084 + }, + { + "epoch": 2.2, + "grad_norm": 23.056191021463018, + "learning_rate": 3.50298723997781e-06, + "loss": 0.5308, + "step": 14085 + }, + { + "epoch": 2.2, + "grad_norm": 22.03328130569702, + "learning_rate": 3.501705349379375e-06, + "loss": 0.5034, + "step": 14086 + }, + { + "epoch": 2.2, + "grad_norm": 14.221858021841893, + "learning_rate": 3.5004236435886395e-06, + "loss": 0.4313, + "step": 14087 + }, + { + "epoch": 2.2, + "grad_norm": 28.614419932964108, + "learning_rate": 3.4991421226420563e-06, + "loss": 0.5076, + "step": 14088 + }, + { + "epoch": 2.2, + "grad_norm": 18.563292301814137, + "learning_rate": 3.497860786576065e-06, + "loss": 0.4621, + "step": 14089 + }, + { + "epoch": 2.2, + "grad_norm": 18.712796202145565, + "learning_rate": 3.496579635427113e-06, + "loss": 0.5354, + "step": 14090 + }, + { + "epoch": 2.2, + "grad_norm": 19.276930815999908, + "learning_rate": 3.4952986692316283e-06, + "loss": 0.4711, + "step": 14091 + }, + { + "epoch": 2.2, + "grad_norm": 22.864254241709762, + "learning_rate": 3.4940178880260446e-06, + "loss": 0.4399, + "step": 14092 + }, + { + "epoch": 2.2, + "grad_norm": 17.459176633574064, + "learning_rate": 3.4927372918467893e-06, + "loss": 0.4139, + "step": 14093 + }, + { + "epoch": 2.2, + "grad_norm": 17.626736464080295, + "learning_rate": 3.491456880730275e-06, + "loss": 0.4625, + "step": 14094 + }, + { + "epoch": 2.2, + "grad_norm": 15.652212032909171, + "learning_rate": 3.4901766547129234e-06, + "loss": 0.3932, + "step": 14095 + }, + { + "epoch": 2.2, + "grad_norm": 21.399589499208492, + "learning_rate": 3.4888966138311363e-06, + "loss": 0.4526, + "step": 14096 + }, + { + "epoch": 2.2, + "grad_norm": 23.10224476859147, + "learning_rate": 3.487616758121324e-06, + "loss": 0.501, + "step": 14097 + }, + { + "epoch": 2.2, + "grad_norm": 26.427406652385116, + "learning_rate": 3.4863370876198777e-06, + "loss": 0.4978, + "step": 14098 + }, + { + "epoch": 2.2, + "grad_norm": 15.963691570911946, + "learning_rate": 3.4850576023631942e-06, + "loss": 0.4076, + "step": 14099 + }, + { + "epoch": 2.2, + "grad_norm": 14.642865323550817, + "learning_rate": 3.483778302387665e-06, + "loss": 0.4195, + "step": 14100 + }, + { + "epoch": 2.2, + "grad_norm": 22.715794370707034, + "learning_rate": 3.4824991877296687e-06, + "loss": 0.5094, + "step": 14101 + }, + { + "epoch": 2.2, + "grad_norm": 19.806229377481852, + "learning_rate": 3.4812202584255805e-06, + "loss": 0.5296, + "step": 14102 + }, + { + "epoch": 2.2, + "grad_norm": 15.787694634893695, + "learning_rate": 3.4799415145117743e-06, + "loss": 0.5387, + "step": 14103 + }, + { + "epoch": 2.2, + "grad_norm": 16.17787377227078, + "learning_rate": 3.4786629560246166e-06, + "loss": 0.4788, + "step": 14104 + }, + { + "epoch": 2.2, + "grad_norm": 19.324468011522743, + "learning_rate": 3.477384583000477e-06, + "loss": 0.4665, + "step": 14105 + }, + { + "epoch": 2.2, + "grad_norm": 17.87162415806508, + "learning_rate": 3.4761063954756967e-06, + "loss": 0.4935, + "step": 14106 + }, + { + "epoch": 2.2, + "grad_norm": 16.881506885263878, + "learning_rate": 3.4748283934866354e-06, + "loss": 0.4713, + "step": 14107 + }, + { + "epoch": 2.2, + "grad_norm": 18.943740553383154, + "learning_rate": 3.4735505770696365e-06, + "loss": 0.5316, + "step": 14108 + }, + { + "epoch": 2.2, + "grad_norm": 20.812310162099454, + "learning_rate": 3.472272946261045e-06, + "loss": 0.4961, + "step": 14109 + }, + { + "epoch": 2.2, + "grad_norm": 18.55296125191944, + "learning_rate": 3.4709955010971928e-06, + "loss": 0.4191, + "step": 14110 + }, + { + "epoch": 2.2, + "grad_norm": 19.224162283638634, + "learning_rate": 3.4697182416144047e-06, + "loss": 0.4456, + "step": 14111 + }, + { + "epoch": 2.2, + "grad_norm": 21.882020101803512, + "learning_rate": 3.4684411678490104e-06, + "loss": 0.4243, + "step": 14112 + }, + { + "epoch": 2.2, + "grad_norm": 24.784082809121216, + "learning_rate": 3.4671642798373316e-06, + "loss": 0.5193, + "step": 14113 + }, + { + "epoch": 2.2, + "grad_norm": 19.32385857737832, + "learning_rate": 3.4658875776156763e-06, + "loss": 0.4954, + "step": 14114 + }, + { + "epoch": 2.2, + "grad_norm": 19.0715255234308, + "learning_rate": 3.4646110612203586e-06, + "loss": 0.3921, + "step": 14115 + }, + { + "epoch": 2.2, + "grad_norm": 18.061098432941833, + "learning_rate": 3.4633347306876763e-06, + "loss": 0.4915, + "step": 14116 + }, + { + "epoch": 2.21, + "grad_norm": 15.773273701924971, + "learning_rate": 3.4620585860539303e-06, + "loss": 0.4743, + "step": 14117 + }, + { + "epoch": 2.21, + "grad_norm": 34.74314170763986, + "learning_rate": 3.460782627355417e-06, + "loss": 0.4791, + "step": 14118 + }, + { + "epoch": 2.21, + "grad_norm": 14.900922653014524, + "learning_rate": 3.4595068546284207e-06, + "loss": 0.4289, + "step": 14119 + }, + { + "epoch": 2.21, + "grad_norm": 16.947930371650763, + "learning_rate": 3.458231267909219e-06, + "loss": 0.4502, + "step": 14120 + }, + { + "epoch": 2.21, + "grad_norm": 18.370486386702545, + "learning_rate": 3.4569558672340943e-06, + "loss": 0.4923, + "step": 14121 + }, + { + "epoch": 2.21, + "grad_norm": 22.91746958684292, + "learning_rate": 3.455680652639319e-06, + "loss": 0.4596, + "step": 14122 + }, + { + "epoch": 2.21, + "grad_norm": 25.278477306884255, + "learning_rate": 3.4544056241611556e-06, + "loss": 0.5325, + "step": 14123 + }, + { + "epoch": 2.21, + "grad_norm": 24.186439899382407, + "learning_rate": 3.4531307818358705e-06, + "loss": 0.468, + "step": 14124 + }, + { + "epoch": 2.21, + "grad_norm": 18.243319270050254, + "learning_rate": 3.451856125699713e-06, + "loss": 0.4905, + "step": 14125 + }, + { + "epoch": 2.21, + "grad_norm": 16.746459396353284, + "learning_rate": 3.4505816557889393e-06, + "loss": 0.4814, + "step": 14126 + }, + { + "epoch": 2.21, + "grad_norm": 22.467693026117686, + "learning_rate": 3.4493073721397895e-06, + "loss": 0.5108, + "step": 14127 + }, + { + "epoch": 2.21, + "grad_norm": 14.660902901563624, + "learning_rate": 3.4480332747885047e-06, + "loss": 0.4562, + "step": 14128 + }, + { + "epoch": 2.21, + "grad_norm": 24.14380614360332, + "learning_rate": 3.446759363771326e-06, + "loss": 0.4643, + "step": 14129 + }, + { + "epoch": 2.21, + "grad_norm": 14.309130709073447, + "learning_rate": 3.445485639124476e-06, + "loss": 0.4471, + "step": 14130 + }, + { + "epoch": 2.21, + "grad_norm": 23.896155444501538, + "learning_rate": 3.444212100884177e-06, + "loss": 0.4594, + "step": 14131 + }, + { + "epoch": 2.21, + "grad_norm": 17.981652467205958, + "learning_rate": 3.4429387490866506e-06, + "loss": 0.5047, + "step": 14132 + }, + { + "epoch": 2.21, + "grad_norm": 19.197004302292797, + "learning_rate": 3.4416655837681113e-06, + "loss": 0.4436, + "step": 14133 + }, + { + "epoch": 2.21, + "grad_norm": 21.28478034953453, + "learning_rate": 3.4403926049647684e-06, + "loss": 0.4566, + "step": 14134 + }, + { + "epoch": 2.21, + "grad_norm": 32.408551509154584, + "learning_rate": 3.4391198127128232e-06, + "loss": 0.5468, + "step": 14135 + }, + { + "epoch": 2.21, + "grad_norm": 18.636295686396245, + "learning_rate": 3.4378472070484704e-06, + "loss": 0.462, + "step": 14136 + }, + { + "epoch": 2.21, + "grad_norm": 20.610298432492023, + "learning_rate": 3.4365747880079027e-06, + "loss": 0.4794, + "step": 14137 + }, + { + "epoch": 2.21, + "grad_norm": 32.36538118814052, + "learning_rate": 3.435302555627312e-06, + "loss": 0.511, + "step": 14138 + }, + { + "epoch": 2.21, + "grad_norm": 28.38483892126981, + "learning_rate": 3.4340305099428774e-06, + "loss": 0.4952, + "step": 14139 + }, + { + "epoch": 2.21, + "grad_norm": 22.034427193231025, + "learning_rate": 3.4327586509907695e-06, + "loss": 0.4728, + "step": 14140 + }, + { + "epoch": 2.21, + "grad_norm": 20.309065176248062, + "learning_rate": 3.431486978807165e-06, + "loss": 0.4917, + "step": 14141 + }, + { + "epoch": 2.21, + "grad_norm": 22.13643800481047, + "learning_rate": 3.4302154934282272e-06, + "loss": 0.4239, + "step": 14142 + }, + { + "epoch": 2.21, + "grad_norm": 21.289588030006954, + "learning_rate": 3.4289441948901214e-06, + "loss": 0.5347, + "step": 14143 + }, + { + "epoch": 2.21, + "grad_norm": 27.630478786016152, + "learning_rate": 3.4276730832289997e-06, + "loss": 0.541, + "step": 14144 + }, + { + "epoch": 2.21, + "grad_norm": 21.937499302404888, + "learning_rate": 3.4264021584810058e-06, + "loss": 0.4982, + "step": 14145 + }, + { + "epoch": 2.21, + "grad_norm": 16.395984152637848, + "learning_rate": 3.4251314206822894e-06, + "loss": 0.4164, + "step": 14146 + }, + { + "epoch": 2.21, + "grad_norm": 26.127649796864702, + "learning_rate": 3.4238608698689933e-06, + "loss": 0.4489, + "step": 14147 + }, + { + "epoch": 2.21, + "grad_norm": 22.792726029359738, + "learning_rate": 3.4225905060772436e-06, + "loss": 0.5104, + "step": 14148 + }, + { + "epoch": 2.21, + "grad_norm": 17.616790053119878, + "learning_rate": 3.421320329343175e-06, + "loss": 0.4441, + "step": 14149 + }, + { + "epoch": 2.21, + "grad_norm": 32.529334857498206, + "learning_rate": 3.420050339702905e-06, + "loss": 0.4474, + "step": 14150 + }, + { + "epoch": 2.21, + "grad_norm": 31.246698348868602, + "learning_rate": 3.4187805371925575e-06, + "loss": 0.5011, + "step": 14151 + }, + { + "epoch": 2.21, + "grad_norm": 15.64517496239613, + "learning_rate": 3.4175109218482383e-06, + "loss": 0.4612, + "step": 14152 + }, + { + "epoch": 2.21, + "grad_norm": 18.80270826449453, + "learning_rate": 3.4162414937060584e-06, + "loss": 0.4647, + "step": 14153 + }, + { + "epoch": 2.21, + "grad_norm": 23.96895271150894, + "learning_rate": 3.414972252802121e-06, + "loss": 0.4539, + "step": 14154 + }, + { + "epoch": 2.21, + "grad_norm": 23.328766394329747, + "learning_rate": 3.413703199172519e-06, + "loss": 0.4923, + "step": 14155 + }, + { + "epoch": 2.21, + "grad_norm": 20.68504558291149, + "learning_rate": 3.4124343328533473e-06, + "loss": 0.5028, + "step": 14156 + }, + { + "epoch": 2.21, + "grad_norm": 25.70867271556117, + "learning_rate": 3.411165653880686e-06, + "loss": 0.4463, + "step": 14157 + }, + { + "epoch": 2.21, + "grad_norm": 23.55297796394052, + "learning_rate": 3.40989716229062e-06, + "loss": 0.5025, + "step": 14158 + }, + { + "epoch": 2.21, + "grad_norm": 17.30916769874984, + "learning_rate": 3.408628858119226e-06, + "loss": 0.4585, + "step": 14159 + }, + { + "epoch": 2.21, + "grad_norm": 17.766876085226116, + "learning_rate": 3.4073607414025723e-06, + "loss": 0.4396, + "step": 14160 + }, + { + "epoch": 2.21, + "grad_norm": 16.99950692394386, + "learning_rate": 3.406092812176719e-06, + "loss": 0.4851, + "step": 14161 + }, + { + "epoch": 2.21, + "grad_norm": 17.813836869884632, + "learning_rate": 3.404825070477729e-06, + "loss": 0.4791, + "step": 14162 + }, + { + "epoch": 2.21, + "grad_norm": 20.32462966381847, + "learning_rate": 3.403557516341658e-06, + "loss": 0.504, + "step": 14163 + }, + { + "epoch": 2.21, + "grad_norm": 20.55743554542306, + "learning_rate": 3.4022901498045536e-06, + "loss": 0.4485, + "step": 14164 + }, + { + "epoch": 2.21, + "grad_norm": 27.66898178751445, + "learning_rate": 3.4010229709024545e-06, + "loss": 0.462, + "step": 14165 + }, + { + "epoch": 2.21, + "grad_norm": 27.434941073638207, + "learning_rate": 3.399755979671401e-06, + "loss": 0.4548, + "step": 14166 + }, + { + "epoch": 2.21, + "grad_norm": 19.783287359241562, + "learning_rate": 3.398489176147427e-06, + "loss": 0.4205, + "step": 14167 + }, + { + "epoch": 2.21, + "grad_norm": 23.71350405721002, + "learning_rate": 3.3972225603665653e-06, + "loss": 0.4487, + "step": 14168 + }, + { + "epoch": 2.21, + "grad_norm": 25.63923409250133, + "learning_rate": 3.395956132364825e-06, + "loss": 0.5299, + "step": 14169 + }, + { + "epoch": 2.21, + "grad_norm": 23.060362616664143, + "learning_rate": 3.394689892178229e-06, + "loss": 0.4602, + "step": 14170 + }, + { + "epoch": 2.21, + "grad_norm": 14.928723529214714, + "learning_rate": 3.3934238398427886e-06, + "loss": 0.4494, + "step": 14171 + }, + { + "epoch": 2.21, + "grad_norm": 19.48356566778734, + "learning_rate": 3.3921579753945132e-06, + "loss": 0.4911, + "step": 14172 + }, + { + "epoch": 2.21, + "grad_norm": 23.46514293542787, + "learning_rate": 3.3908922988693995e-06, + "loss": 0.4977, + "step": 14173 + }, + { + "epoch": 2.21, + "grad_norm": 20.995072645330605, + "learning_rate": 3.389626810303439e-06, + "loss": 0.4918, + "step": 14174 + }, + { + "epoch": 2.21, + "grad_norm": 12.043434922224902, + "learning_rate": 3.388361509732625e-06, + "loss": 0.3946, + "step": 14175 + }, + { + "epoch": 2.21, + "grad_norm": 25.518325539529023, + "learning_rate": 3.387096397192945e-06, + "loss": 0.5225, + "step": 14176 + }, + { + "epoch": 2.21, + "grad_norm": 20.24785047840458, + "learning_rate": 3.3858314727203724e-06, + "loss": 0.4514, + "step": 14177 + }, + { + "epoch": 2.21, + "grad_norm": 19.376635347076668, + "learning_rate": 3.384566736350886e-06, + "loss": 0.4733, + "step": 14178 + }, + { + "epoch": 2.21, + "grad_norm": 18.212229146527353, + "learning_rate": 3.3833021881204487e-06, + "loss": 0.4301, + "step": 14179 + }, + { + "epoch": 2.21, + "grad_norm": 23.13177084131551, + "learning_rate": 3.3820378280650267e-06, + "loss": 0.5764, + "step": 14180 + }, + { + "epoch": 2.22, + "grad_norm": 14.98748482006631, + "learning_rate": 3.3807736562205805e-06, + "loss": 0.4465, + "step": 14181 + }, + { + "epoch": 2.22, + "grad_norm": 18.408600330064246, + "learning_rate": 3.3795096726230558e-06, + "loss": 0.4226, + "step": 14182 + }, + { + "epoch": 2.22, + "grad_norm": 19.44167334212344, + "learning_rate": 3.3782458773084072e-06, + "loss": 0.4467, + "step": 14183 + }, + { + "epoch": 2.22, + "grad_norm": 21.3822110439587, + "learning_rate": 3.3769822703125686e-06, + "loss": 0.4714, + "step": 14184 + }, + { + "epoch": 2.22, + "grad_norm": 21.633649028751396, + "learning_rate": 3.3757188516714833e-06, + "loss": 0.5298, + "step": 14185 + }, + { + "epoch": 2.22, + "grad_norm": 17.040769237496903, + "learning_rate": 3.3744556214210756e-06, + "loss": 0.5229, + "step": 14186 + }, + { + "epoch": 2.22, + "grad_norm": 16.53573065318118, + "learning_rate": 3.3731925795972743e-06, + "loss": 0.4111, + "step": 14187 + }, + { + "epoch": 2.22, + "grad_norm": 17.835639341237453, + "learning_rate": 3.371929726236002e-06, + "loss": 0.4892, + "step": 14188 + }, + { + "epoch": 2.22, + "grad_norm": 18.292185956232, + "learning_rate": 3.370667061373172e-06, + "loss": 0.4838, + "step": 14189 + }, + { + "epoch": 2.22, + "grad_norm": 20.642229667564486, + "learning_rate": 3.369404585044689e-06, + "loss": 0.4707, + "step": 14190 + }, + { + "epoch": 2.22, + "grad_norm": 16.12206559686894, + "learning_rate": 3.3681422972864598e-06, + "loss": 0.4677, + "step": 14191 + }, + { + "epoch": 2.22, + "grad_norm": 25.649980219197595, + "learning_rate": 3.366880198134386e-06, + "loss": 0.5158, + "step": 14192 + }, + { + "epoch": 2.22, + "grad_norm": 25.220100786178264, + "learning_rate": 3.3656182876243636e-06, + "loss": 0.4718, + "step": 14193 + }, + { + "epoch": 2.22, + "grad_norm": 29.121310873264775, + "learning_rate": 3.3643565657922704e-06, + "loss": 0.5004, + "step": 14194 + }, + { + "epoch": 2.22, + "grad_norm": 22.303880807214757, + "learning_rate": 3.3630950326739954e-06, + "loss": 0.4644, + "step": 14195 + }, + { + "epoch": 2.22, + "grad_norm": 23.889365711292687, + "learning_rate": 3.3618336883054146e-06, + "loss": 0.4265, + "step": 14196 + }, + { + "epoch": 2.22, + "grad_norm": 18.29784727567313, + "learning_rate": 3.360572532722404e-06, + "loss": 0.4522, + "step": 14197 + }, + { + "epoch": 2.22, + "grad_norm": 15.26974162583579, + "learning_rate": 3.359311565960827e-06, + "loss": 0.3747, + "step": 14198 + }, + { + "epoch": 2.22, + "grad_norm": 28.503402181682258, + "learning_rate": 3.3580507880565406e-06, + "loss": 0.5239, + "step": 14199 + }, + { + "epoch": 2.22, + "grad_norm": 31.36989860932342, + "learning_rate": 3.3567901990454043e-06, + "loss": 0.5603, + "step": 14200 + }, + { + "epoch": 2.22, + "grad_norm": 21.294901828499274, + "learning_rate": 3.355529798963272e-06, + "loss": 0.498, + "step": 14201 + }, + { + "epoch": 2.22, + "grad_norm": 21.89706129689245, + "learning_rate": 3.354269587845982e-06, + "loss": 0.4806, + "step": 14202 + }, + { + "epoch": 2.22, + "grad_norm": 16.72062211116757, + "learning_rate": 3.3530095657293803e-06, + "loss": 0.4153, + "step": 14203 + }, + { + "epoch": 2.22, + "grad_norm": 25.057245536089876, + "learning_rate": 3.3517497326492952e-06, + "loss": 0.5043, + "step": 14204 + }, + { + "epoch": 2.22, + "grad_norm": 18.124092487109326, + "learning_rate": 3.350490088641558e-06, + "loss": 0.4259, + "step": 14205 + }, + { + "epoch": 2.22, + "grad_norm": 17.50410646427527, + "learning_rate": 3.3492306337419967e-06, + "loss": 0.4986, + "step": 14206 + }, + { + "epoch": 2.22, + "grad_norm": 19.564168096832738, + "learning_rate": 3.3479713679864223e-06, + "loss": 0.4927, + "step": 14207 + }, + { + "epoch": 2.22, + "grad_norm": 24.4743156375904, + "learning_rate": 3.346712291410654e-06, + "loss": 0.5083, + "step": 14208 + }, + { + "epoch": 2.22, + "grad_norm": 20.03059779145361, + "learning_rate": 3.345453404050493e-06, + "loss": 0.4428, + "step": 14209 + }, + { + "epoch": 2.22, + "grad_norm": 18.726774609971446, + "learning_rate": 3.344194705941748e-06, + "loss": 0.4248, + "step": 14210 + }, + { + "epoch": 2.22, + "grad_norm": 19.253499794708, + "learning_rate": 3.342936197120208e-06, + "loss": 0.4438, + "step": 14211 + }, + { + "epoch": 2.22, + "grad_norm": 18.849995816354262, + "learning_rate": 3.341677877621672e-06, + "loss": 0.4895, + "step": 14212 + }, + { + "epoch": 2.22, + "grad_norm": 14.839533799346864, + "learning_rate": 3.340419747481919e-06, + "loss": 0.4433, + "step": 14213 + }, + { + "epoch": 2.22, + "grad_norm": 24.960924142884693, + "learning_rate": 3.3391618067367347e-06, + "loss": 0.4278, + "step": 14214 + }, + { + "epoch": 2.22, + "grad_norm": 25.56766996589953, + "learning_rate": 3.33790405542189e-06, + "loss": 0.5227, + "step": 14215 + }, + { + "epoch": 2.22, + "grad_norm": 20.760033100704494, + "learning_rate": 3.3366464935731566e-06, + "loss": 0.4874, + "step": 14216 + }, + { + "epoch": 2.22, + "grad_norm": 14.74126868121376, + "learning_rate": 3.3353891212263023e-06, + "loss": 0.4802, + "step": 14217 + }, + { + "epoch": 2.22, + "grad_norm": 26.367003042635503, + "learning_rate": 3.3341319384170824e-06, + "loss": 0.5098, + "step": 14218 + }, + { + "epoch": 2.22, + "grad_norm": 19.144786309187655, + "learning_rate": 3.332874945181248e-06, + "loss": 0.4565, + "step": 14219 + }, + { + "epoch": 2.22, + "grad_norm": 16.01811562315315, + "learning_rate": 3.33161814155455e-06, + "loss": 0.452, + "step": 14220 + }, + { + "epoch": 2.22, + "grad_norm": 24.598941370793774, + "learning_rate": 3.330361527572731e-06, + "loss": 0.4083, + "step": 14221 + }, + { + "epoch": 2.22, + "grad_norm": 17.59739929756781, + "learning_rate": 3.3291051032715317e-06, + "loss": 0.4356, + "step": 14222 + }, + { + "epoch": 2.22, + "grad_norm": 21.428833061356688, + "learning_rate": 3.3278488686866807e-06, + "loss": 0.4809, + "step": 14223 + }, + { + "epoch": 2.22, + "grad_norm": 19.427374598967635, + "learning_rate": 3.3265928238539015e-06, + "loss": 0.4905, + "step": 14224 + }, + { + "epoch": 2.22, + "grad_norm": 16.67122250775418, + "learning_rate": 3.3253369688089186e-06, + "loss": 0.4164, + "step": 14225 + }, + { + "epoch": 2.22, + "grad_norm": 23.149478178291545, + "learning_rate": 3.324081303587451e-06, + "loss": 0.4805, + "step": 14226 + }, + { + "epoch": 2.22, + "grad_norm": 14.9630818272006, + "learning_rate": 3.3228258282252056e-06, + "loss": 0.4986, + "step": 14227 + }, + { + "epoch": 2.22, + "grad_norm": 20.311415303319023, + "learning_rate": 3.321570542757885e-06, + "loss": 0.4531, + "step": 14228 + }, + { + "epoch": 2.22, + "grad_norm": 20.470234888394227, + "learning_rate": 3.320315447221191e-06, + "loss": 0.479, + "step": 14229 + }, + { + "epoch": 2.22, + "grad_norm": 26.985279540786987, + "learning_rate": 3.3190605416508182e-06, + "loss": 0.4606, + "step": 14230 + }, + { + "epoch": 2.22, + "grad_norm": 23.78908937762767, + "learning_rate": 3.3178058260824586e-06, + "loss": 0.4094, + "step": 14231 + }, + { + "epoch": 2.22, + "grad_norm": 21.78107048204239, + "learning_rate": 3.3165513005517925e-06, + "loss": 0.5466, + "step": 14232 + }, + { + "epoch": 2.22, + "grad_norm": 16.38778097645092, + "learning_rate": 3.3152969650944943e-06, + "loss": 0.4879, + "step": 14233 + }, + { + "epoch": 2.22, + "grad_norm": 21.175010316681853, + "learning_rate": 3.3140428197462406e-06, + "loss": 0.4802, + "step": 14234 + }, + { + "epoch": 2.22, + "grad_norm": 17.772871014297134, + "learning_rate": 3.312788864542701e-06, + "loss": 0.4472, + "step": 14235 + }, + { + "epoch": 2.22, + "grad_norm": 21.060031068760054, + "learning_rate": 3.3115350995195293e-06, + "loss": 0.4908, + "step": 14236 + }, + { + "epoch": 2.22, + "grad_norm": 29.84109969878593, + "learning_rate": 3.310281524712392e-06, + "loss": 0.4939, + "step": 14237 + }, + { + "epoch": 2.22, + "grad_norm": 19.69208608810965, + "learning_rate": 3.3090281401569302e-06, + "loss": 0.4536, + "step": 14238 + }, + { + "epoch": 2.22, + "grad_norm": 27.794362378113373, + "learning_rate": 3.307774945888799e-06, + "loss": 0.507, + "step": 14239 + }, + { + "epoch": 2.22, + "grad_norm": 17.428111228643306, + "learning_rate": 3.306521941943628e-06, + "loss": 0.4862, + "step": 14240 + }, + { + "epoch": 2.22, + "grad_norm": 14.649227753240204, + "learning_rate": 3.3052691283570593e-06, + "loss": 0.5169, + "step": 14241 + }, + { + "epoch": 2.22, + "grad_norm": 25.769626700522476, + "learning_rate": 3.304016505164722e-06, + "loss": 0.4817, + "step": 14242 + }, + { + "epoch": 2.22, + "grad_norm": 20.108087637774734, + "learning_rate": 3.3027640724022357e-06, + "loss": 0.4683, + "step": 14243 + }, + { + "epoch": 2.22, + "grad_norm": 17.233353210137388, + "learning_rate": 3.301511830105225e-06, + "loss": 0.4916, + "step": 14244 + }, + { + "epoch": 2.23, + "grad_norm": 25.25821851126149, + "learning_rate": 3.300259778309296e-06, + "loss": 0.5044, + "step": 14245 + }, + { + "epoch": 2.23, + "grad_norm": 16.10562831236159, + "learning_rate": 3.2990079170500587e-06, + "loss": 0.4736, + "step": 14246 + }, + { + "epoch": 2.23, + "grad_norm": 27.241259908897085, + "learning_rate": 3.297756246363121e-06, + "loss": 0.4797, + "step": 14247 + }, + { + "epoch": 2.23, + "grad_norm": 13.764458495389457, + "learning_rate": 3.2965047662840744e-06, + "loss": 0.4578, + "step": 14248 + }, + { + "epoch": 2.23, + "grad_norm": 21.533729231931737, + "learning_rate": 3.2952534768485066e-06, + "loss": 0.4996, + "step": 14249 + }, + { + "epoch": 2.23, + "grad_norm": 33.62612132149112, + "learning_rate": 3.294002378092008e-06, + "loss": 0.6179, + "step": 14250 + }, + { + "epoch": 2.23, + "grad_norm": 24.766559414729766, + "learning_rate": 3.292751470050164e-06, + "loss": 0.3969, + "step": 14251 + }, + { + "epoch": 2.23, + "grad_norm": 12.868531518816555, + "learning_rate": 3.291500752758543e-06, + "loss": 0.5056, + "step": 14252 + }, + { + "epoch": 2.23, + "grad_norm": 19.674855912340163, + "learning_rate": 3.2902502262527124e-06, + "loss": 0.5254, + "step": 14253 + }, + { + "epoch": 2.23, + "grad_norm": 19.660867243506882, + "learning_rate": 3.288999890568242e-06, + "loss": 0.5089, + "step": 14254 + }, + { + "epoch": 2.23, + "grad_norm": 16.399274103197467, + "learning_rate": 3.287749745740687e-06, + "loss": 0.5188, + "step": 14255 + }, + { + "epoch": 2.23, + "grad_norm": 30.479011577077774, + "learning_rate": 3.2864997918056075e-06, + "loss": 0.5113, + "step": 14256 + }, + { + "epoch": 2.23, + "grad_norm": 25.150002384944067, + "learning_rate": 3.2852500287985455e-06, + "loss": 0.5246, + "step": 14257 + }, + { + "epoch": 2.23, + "grad_norm": 17.388451270724662, + "learning_rate": 3.284000456755042e-06, + "loss": 0.4819, + "step": 14258 + }, + { + "epoch": 2.23, + "grad_norm": 18.012601831596356, + "learning_rate": 3.282751075710637e-06, + "loss": 0.4771, + "step": 14259 + }, + { + "epoch": 2.23, + "grad_norm": 18.74239715452549, + "learning_rate": 3.2815018857008653e-06, + "loss": 0.41, + "step": 14260 + }, + { + "epoch": 2.23, + "grad_norm": 23.97333561576591, + "learning_rate": 3.2802528867612494e-06, + "loss": 0.5099, + "step": 14261 + }, + { + "epoch": 2.23, + "grad_norm": 24.139812240992367, + "learning_rate": 3.279004078927307e-06, + "loss": 0.4836, + "step": 14262 + }, + { + "epoch": 2.23, + "grad_norm": 19.145610888241855, + "learning_rate": 3.2777554622345563e-06, + "loss": 0.4683, + "step": 14263 + }, + { + "epoch": 2.23, + "grad_norm": 25.393912403621083, + "learning_rate": 3.2765070367185126e-06, + "loss": 0.5101, + "step": 14264 + }, + { + "epoch": 2.23, + "grad_norm": 16.908920665919428, + "learning_rate": 3.2752588024146714e-06, + "loss": 0.4527, + "step": 14265 + }, + { + "epoch": 2.23, + "grad_norm": 25.12345903330322, + "learning_rate": 3.274010759358539e-06, + "loss": 0.478, + "step": 14266 + }, + { + "epoch": 2.23, + "grad_norm": 15.874270747172954, + "learning_rate": 3.272762907585604e-06, + "loss": 0.456, + "step": 14267 + }, + { + "epoch": 2.23, + "grad_norm": 41.42134939908545, + "learning_rate": 3.271515247131356e-06, + "loss": 0.4637, + "step": 14268 + }, + { + "epoch": 2.23, + "grad_norm": 19.79649799246871, + "learning_rate": 3.270267778031281e-06, + "loss": 0.4652, + "step": 14269 + }, + { + "epoch": 2.23, + "grad_norm": 21.23659705366554, + "learning_rate": 3.2690205003208508e-06, + "loss": 0.4655, + "step": 14270 + }, + { + "epoch": 2.23, + "grad_norm": 21.757774548523162, + "learning_rate": 3.2677734140355434e-06, + "loss": 0.4932, + "step": 14271 + }, + { + "epoch": 2.23, + "grad_norm": 24.056692628958345, + "learning_rate": 3.2665265192108186e-06, + "loss": 0.4383, + "step": 14272 + }, + { + "epoch": 2.23, + "grad_norm": 17.407862603978565, + "learning_rate": 3.265279815882144e-06, + "loss": 0.5083, + "step": 14273 + }, + { + "epoch": 2.23, + "grad_norm": 24.034531624729695, + "learning_rate": 3.264033304084968e-06, + "loss": 0.5098, + "step": 14274 + }, + { + "epoch": 2.23, + "grad_norm": 23.019349531035484, + "learning_rate": 3.2627869838547454e-06, + "loss": 0.4443, + "step": 14275 + }, + { + "epoch": 2.23, + "grad_norm": 18.186384294640565, + "learning_rate": 3.2615408552269223e-06, + "loss": 0.4873, + "step": 14276 + }, + { + "epoch": 2.23, + "grad_norm": 16.194729800490645, + "learning_rate": 3.260294918236935e-06, + "loss": 0.4052, + "step": 14277 + }, + { + "epoch": 2.23, + "grad_norm": 20.94586027824207, + "learning_rate": 3.2590491729202146e-06, + "loss": 0.427, + "step": 14278 + }, + { + "epoch": 2.23, + "grad_norm": 18.449611347066973, + "learning_rate": 3.257803619312193e-06, + "loss": 0.4804, + "step": 14279 + }, + { + "epoch": 2.23, + "grad_norm": 22.939002629026504, + "learning_rate": 3.256558257448292e-06, + "loss": 0.4719, + "step": 14280 + }, + { + "epoch": 2.23, + "grad_norm": 19.642706943152906, + "learning_rate": 3.255313087363936e-06, + "loss": 0.4459, + "step": 14281 + }, + { + "epoch": 2.23, + "grad_norm": 18.899193243596365, + "learning_rate": 3.2540681090945235e-06, + "loss": 0.5118, + "step": 14282 + }, + { + "epoch": 2.23, + "grad_norm": 19.39349957168287, + "learning_rate": 3.252823322675468e-06, + "loss": 0.4634, + "step": 14283 + }, + { + "epoch": 2.23, + "grad_norm": 21.177561836556563, + "learning_rate": 3.2515787281421697e-06, + "loss": 0.587, + "step": 14284 + }, + { + "epoch": 2.23, + "grad_norm": 15.530327032668758, + "learning_rate": 3.250334325530029e-06, + "loss": 0.4582, + "step": 14285 + }, + { + "epoch": 2.23, + "grad_norm": 21.160174986162176, + "learning_rate": 3.24909011487443e-06, + "loss": 0.4731, + "step": 14286 + }, + { + "epoch": 2.23, + "grad_norm": 23.226236104355927, + "learning_rate": 3.247846096210757e-06, + "loss": 0.5326, + "step": 14287 + }, + { + "epoch": 2.23, + "grad_norm": 20.96990783169642, + "learning_rate": 3.2466022695743905e-06, + "loss": 0.4769, + "step": 14288 + }, + { + "epoch": 2.23, + "grad_norm": 14.472697040948256, + "learning_rate": 3.2453586350007084e-06, + "loss": 0.4267, + "step": 14289 + }, + { + "epoch": 2.23, + "grad_norm": 18.966648976634477, + "learning_rate": 3.2441151925250724e-06, + "loss": 0.4255, + "step": 14290 + }, + { + "epoch": 2.23, + "grad_norm": 13.185181563092526, + "learning_rate": 3.2428719421828515e-06, + "loss": 0.4687, + "step": 14291 + }, + { + "epoch": 2.23, + "grad_norm": 24.446849352864483, + "learning_rate": 3.2416288840093955e-06, + "loss": 0.5577, + "step": 14292 + }, + { + "epoch": 2.23, + "grad_norm": 22.577778325834785, + "learning_rate": 3.2403860180400615e-06, + "loss": 0.4748, + "step": 14293 + }, + { + "epoch": 2.23, + "grad_norm": 27.08563303329335, + "learning_rate": 3.2391433443101984e-06, + "loss": 0.4926, + "step": 14294 + }, + { + "epoch": 2.23, + "grad_norm": 16.28430442522078, + "learning_rate": 3.237900862855139e-06, + "loss": 0.3785, + "step": 14295 + }, + { + "epoch": 2.23, + "grad_norm": 13.686519460034482, + "learning_rate": 3.236658573710227e-06, + "loss": 0.4874, + "step": 14296 + }, + { + "epoch": 2.23, + "grad_norm": 24.982863507525025, + "learning_rate": 3.2354164769107866e-06, + "loss": 0.5085, + "step": 14297 + }, + { + "epoch": 2.23, + "grad_norm": 23.35585327265751, + "learning_rate": 3.234174572492147e-06, + "loss": 0.4952, + "step": 14298 + }, + { + "epoch": 2.23, + "grad_norm": 19.697228652851315, + "learning_rate": 3.2329328604896217e-06, + "loss": 0.472, + "step": 14299 + }, + { + "epoch": 2.23, + "grad_norm": 18.83689424639408, + "learning_rate": 3.2316913409385263e-06, + "loss": 0.4262, + "step": 14300 + }, + { + "epoch": 2.23, + "grad_norm": 20.1018932877213, + "learning_rate": 3.2304500138741736e-06, + "loss": 0.5658, + "step": 14301 + }, + { + "epoch": 2.23, + "grad_norm": 24.114858241742233, + "learning_rate": 3.229208879331862e-06, + "loss": 0.4258, + "step": 14302 + }, + { + "epoch": 2.23, + "grad_norm": 26.834629937623834, + "learning_rate": 3.2279679373468866e-06, + "loss": 0.4812, + "step": 14303 + }, + { + "epoch": 2.23, + "grad_norm": 24.88807828150885, + "learning_rate": 3.2267271879545413e-06, + "loss": 0.5089, + "step": 14304 + }, + { + "epoch": 2.23, + "grad_norm": 17.1155344353335, + "learning_rate": 3.2254866311901125e-06, + "loss": 0.4876, + "step": 14305 + }, + { + "epoch": 2.23, + "grad_norm": 21.317829679443097, + "learning_rate": 3.224246267088884e-06, + "loss": 0.5531, + "step": 14306 + }, + { + "epoch": 2.23, + "grad_norm": 33.593595767886946, + "learning_rate": 3.2230060956861285e-06, + "loss": 0.5258, + "step": 14307 + }, + { + "epoch": 2.23, + "grad_norm": 15.675784659747722, + "learning_rate": 3.221766117017111e-06, + "loss": 0.4679, + "step": 14308 + }, + { + "epoch": 2.24, + "grad_norm": 17.130109254529824, + "learning_rate": 3.220526331117101e-06, + "loss": 0.4636, + "step": 14309 + }, + { + "epoch": 2.24, + "grad_norm": 19.189185045228566, + "learning_rate": 3.219286738021359e-06, + "loss": 0.4753, + "step": 14310 + }, + { + "epoch": 2.24, + "grad_norm": 23.383957187050587, + "learning_rate": 3.2180473377651355e-06, + "loss": 0.5106, + "step": 14311 + }, + { + "epoch": 2.24, + "grad_norm": 20.01716723519588, + "learning_rate": 3.216808130383675e-06, + "loss": 0.4941, + "step": 14312 + }, + { + "epoch": 2.24, + "grad_norm": 26.421769824909966, + "learning_rate": 3.2155691159122236e-06, + "loss": 0.5627, + "step": 14313 + }, + { + "epoch": 2.24, + "grad_norm": 21.824993583398598, + "learning_rate": 3.214330294386021e-06, + "loss": 0.5243, + "step": 14314 + }, + { + "epoch": 2.24, + "grad_norm": 21.072516955797546, + "learning_rate": 3.2130916658402956e-06, + "loss": 0.5274, + "step": 14315 + }, + { + "epoch": 2.24, + "grad_norm": 17.153106139095012, + "learning_rate": 3.211853230310269e-06, + "loss": 0.5096, + "step": 14316 + }, + { + "epoch": 2.24, + "grad_norm": 19.136361590087546, + "learning_rate": 3.2106149878311655e-06, + "loss": 0.3973, + "step": 14317 + }, + { + "epoch": 2.24, + "grad_norm": 24.237866439159284, + "learning_rate": 3.209376938438201e-06, + "loss": 0.4129, + "step": 14318 + }, + { + "epoch": 2.24, + "grad_norm": 18.565251571926947, + "learning_rate": 3.208139082166587e-06, + "loss": 0.4767, + "step": 14319 + }, + { + "epoch": 2.24, + "grad_norm": 22.440590835511284, + "learning_rate": 3.2069014190515237e-06, + "loss": 0.5808, + "step": 14320 + }, + { + "epoch": 2.24, + "grad_norm": 16.453265147866635, + "learning_rate": 3.205663949128207e-06, + "loss": 0.4778, + "step": 14321 + }, + { + "epoch": 2.24, + "grad_norm": 331.2600062478427, + "learning_rate": 3.204426672431834e-06, + "loss": 0.5467, + "step": 14322 + }, + { + "epoch": 2.24, + "grad_norm": 27.51695419872413, + "learning_rate": 3.2031895889975926e-06, + "loss": 0.4934, + "step": 14323 + }, + { + "epoch": 2.24, + "grad_norm": 18.623731021886798, + "learning_rate": 3.201952698860662e-06, + "loss": 0.5092, + "step": 14324 + }, + { + "epoch": 2.24, + "grad_norm": 19.77207562124895, + "learning_rate": 3.200716002056222e-06, + "loss": 0.496, + "step": 14325 + }, + { + "epoch": 2.24, + "grad_norm": 15.125458357699145, + "learning_rate": 3.199479498619439e-06, + "loss": 0.5078, + "step": 14326 + }, + { + "epoch": 2.24, + "grad_norm": 24.328589947618013, + "learning_rate": 3.1982431885854837e-06, + "loss": 0.457, + "step": 14327 + }, + { + "epoch": 2.24, + "grad_norm": 14.784039748250384, + "learning_rate": 3.19700707198951e-06, + "loss": 0.4783, + "step": 14328 + }, + { + "epoch": 2.24, + "grad_norm": 20.502971612606256, + "learning_rate": 3.1957711488666756e-06, + "loss": 0.4858, + "step": 14329 + }, + { + "epoch": 2.24, + "grad_norm": 25.170697397193248, + "learning_rate": 3.1945354192521316e-06, + "loss": 0.47, + "step": 14330 + }, + { + "epoch": 2.24, + "grad_norm": 20.939671265055, + "learning_rate": 3.193299883181017e-06, + "loss": 0.4573, + "step": 14331 + }, + { + "epoch": 2.24, + "grad_norm": 18.847025219725495, + "learning_rate": 3.192064540688475e-06, + "loss": 0.5015, + "step": 14332 + }, + { + "epoch": 2.24, + "grad_norm": 14.79447557840037, + "learning_rate": 3.190829391809631e-06, + "loss": 0.4768, + "step": 14333 + }, + { + "epoch": 2.24, + "grad_norm": 11.246256180200069, + "learning_rate": 3.189594436579617e-06, + "loss": 0.3996, + "step": 14334 + }, + { + "epoch": 2.24, + "grad_norm": 20.952643798576595, + "learning_rate": 3.1883596750335554e-06, + "loss": 0.4828, + "step": 14335 + }, + { + "epoch": 2.24, + "grad_norm": 23.894220802905878, + "learning_rate": 3.1871251072065613e-06, + "loss": 0.4751, + "step": 14336 + }, + { + "epoch": 2.24, + "grad_norm": 22.197414232890242, + "learning_rate": 3.1858907331337397e-06, + "loss": 0.4944, + "step": 14337 + }, + { + "epoch": 2.24, + "grad_norm": 23.07762755501611, + "learning_rate": 3.1846565528501993e-06, + "loss": 0.4641, + "step": 14338 + }, + { + "epoch": 2.24, + "grad_norm": 24.98848517833969, + "learning_rate": 3.1834225663910436e-06, + "loss": 0.547, + "step": 14339 + }, + { + "epoch": 2.24, + "grad_norm": 18.141554250910612, + "learning_rate": 3.1821887737913613e-06, + "loss": 0.3794, + "step": 14340 + }, + { + "epoch": 2.24, + "grad_norm": 16.376573819594306, + "learning_rate": 3.18095517508624e-06, + "loss": 0.3943, + "step": 14341 + }, + { + "epoch": 2.24, + "grad_norm": 34.161243254792005, + "learning_rate": 3.1797217703107643e-06, + "loss": 0.5386, + "step": 14342 + }, + { + "epoch": 2.24, + "grad_norm": 31.969370179206262, + "learning_rate": 3.1784885595000114e-06, + "loss": 0.5642, + "step": 14343 + }, + { + "epoch": 2.24, + "grad_norm": 14.332285108628783, + "learning_rate": 3.177255542689056e-06, + "loss": 0.5116, + "step": 14344 + }, + { + "epoch": 2.24, + "grad_norm": 15.646392088672556, + "learning_rate": 3.1760227199129623e-06, + "loss": 0.4162, + "step": 14345 + }, + { + "epoch": 2.24, + "grad_norm": 22.409295328701088, + "learning_rate": 3.1747900912067877e-06, + "loss": 0.451, + "step": 14346 + }, + { + "epoch": 2.24, + "grad_norm": 31.4810930893793, + "learning_rate": 3.17355765660559e-06, + "loss": 0.5056, + "step": 14347 + }, + { + "epoch": 2.24, + "grad_norm": 23.86838834835201, + "learning_rate": 3.172325416144423e-06, + "loss": 0.4957, + "step": 14348 + }, + { + "epoch": 2.24, + "grad_norm": 20.439822092020375, + "learning_rate": 3.171093369858322e-06, + "loss": 0.4592, + "step": 14349 + }, + { + "epoch": 2.24, + "grad_norm": 13.048567435999786, + "learning_rate": 3.1698615177823357e-06, + "loss": 0.4631, + "step": 14350 + }, + { + "epoch": 2.24, + "grad_norm": 18.6653666146451, + "learning_rate": 3.1686298599514887e-06, + "loss": 0.5, + "step": 14351 + }, + { + "epoch": 2.24, + "grad_norm": 19.060286325369788, + "learning_rate": 3.167398396400816e-06, + "loss": 0.4454, + "step": 14352 + }, + { + "epoch": 2.24, + "grad_norm": 20.511651940746397, + "learning_rate": 3.1661671271653317e-06, + "loss": 0.4407, + "step": 14353 + }, + { + "epoch": 2.24, + "grad_norm": 20.465336946040818, + "learning_rate": 3.1649360522800578e-06, + "loss": 0.509, + "step": 14354 + }, + { + "epoch": 2.24, + "grad_norm": 22.67990111678465, + "learning_rate": 3.163705171780006e-06, + "loss": 0.454, + "step": 14355 + }, + { + "epoch": 2.24, + "grad_norm": 14.015911591618107, + "learning_rate": 3.1624744857001776e-06, + "loss": 0.4932, + "step": 14356 + }, + { + "epoch": 2.24, + "grad_norm": 28.546697440375567, + "learning_rate": 3.161243994075578e-06, + "loss": 0.4614, + "step": 14357 + }, + { + "epoch": 2.24, + "grad_norm": 18.98538722177228, + "learning_rate": 3.1600136969411954e-06, + "loss": 0.4479, + "step": 14358 + }, + { + "epoch": 2.24, + "grad_norm": 21.639863646021713, + "learning_rate": 3.1587835943320256e-06, + "loss": 0.4667, + "step": 14359 + }, + { + "epoch": 2.24, + "grad_norm": 12.654913511470536, + "learning_rate": 3.1575536862830445e-06, + "loss": 0.4452, + "step": 14360 + }, + { + "epoch": 2.24, + "grad_norm": 21.19234611852943, + "learning_rate": 3.156323972829237e-06, + "loss": 0.4799, + "step": 14361 + }, + { + "epoch": 2.24, + "grad_norm": 23.202184871932534, + "learning_rate": 3.1550944540055693e-06, + "loss": 0.4994, + "step": 14362 + }, + { + "epoch": 2.24, + "grad_norm": 11.69935143448063, + "learning_rate": 3.1538651298470126e-06, + "loss": 0.485, + "step": 14363 + }, + { + "epoch": 2.24, + "grad_norm": 17.52985505896903, + "learning_rate": 3.1526360003885283e-06, + "loss": 0.4512, + "step": 14364 + }, + { + "epoch": 2.24, + "grad_norm": 24.83612723456546, + "learning_rate": 3.151407065665071e-06, + "loss": 0.5222, + "step": 14365 + }, + { + "epoch": 2.24, + "grad_norm": 18.234942443733026, + "learning_rate": 3.150178325711587e-06, + "loss": 0.4532, + "step": 14366 + }, + { + "epoch": 2.24, + "grad_norm": 53.68903504176631, + "learning_rate": 3.1489497805630243e-06, + "loss": 0.4524, + "step": 14367 + }, + { + "epoch": 2.24, + "grad_norm": 23.256895821842843, + "learning_rate": 3.1477214302543225e-06, + "loss": 0.51, + "step": 14368 + }, + { + "epoch": 2.24, + "grad_norm": 17.933806884037544, + "learning_rate": 3.1464932748204215e-06, + "loss": 0.4733, + "step": 14369 + }, + { + "epoch": 2.24, + "grad_norm": 12.700028696694204, + "learning_rate": 3.1452653142962353e-06, + "loss": 0.4283, + "step": 14370 + }, + { + "epoch": 2.24, + "grad_norm": 34.95162712369848, + "learning_rate": 3.144037548716694e-06, + "loss": 0.5144, + "step": 14371 + }, + { + "epoch": 2.24, + "grad_norm": 22.451144056398366, + "learning_rate": 3.142809978116714e-06, + "loss": 0.5001, + "step": 14372 + }, + { + "epoch": 2.25, + "grad_norm": 16.573215571137066, + "learning_rate": 3.1415826025312103e-06, + "loss": 0.412, + "step": 14373 + }, + { + "epoch": 2.25, + "grad_norm": 29.050526628485077, + "learning_rate": 3.140355421995086e-06, + "loss": 0.5389, + "step": 14374 + }, + { + "epoch": 2.25, + "grad_norm": 14.248755929833392, + "learning_rate": 3.139128436543236e-06, + "loss": 0.468, + "step": 14375 + }, + { + "epoch": 2.25, + "grad_norm": 20.8957388994746, + "learning_rate": 3.1379016462105617e-06, + "loss": 0.3903, + "step": 14376 + }, + { + "epoch": 2.25, + "grad_norm": 16.93119597084899, + "learning_rate": 3.136675051031952e-06, + "loss": 0.3946, + "step": 14377 + }, + { + "epoch": 2.25, + "grad_norm": 15.80356864297048, + "learning_rate": 3.1354486510422865e-06, + "loss": 0.4358, + "step": 14378 + }, + { + "epoch": 2.25, + "grad_norm": 26.34947878145194, + "learning_rate": 3.134222446276448e-06, + "loss": 0.4947, + "step": 14379 + }, + { + "epoch": 2.25, + "grad_norm": 15.728335436484356, + "learning_rate": 3.1329964367693042e-06, + "loss": 0.4604, + "step": 14380 + }, + { + "epoch": 2.25, + "grad_norm": 16.813707834769023, + "learning_rate": 3.1317706225557254e-06, + "loss": 0.4187, + "step": 14381 + }, + { + "epoch": 2.25, + "grad_norm": 18.45159610225471, + "learning_rate": 3.130545003670574e-06, + "loss": 0.4904, + "step": 14382 + }, + { + "epoch": 2.25, + "grad_norm": 21.566818894589602, + "learning_rate": 3.129319580148702e-06, + "loss": 0.5048, + "step": 14383 + }, + { + "epoch": 2.25, + "grad_norm": 23.911631635401953, + "learning_rate": 3.128094352024965e-06, + "loss": 0.4119, + "step": 14384 + }, + { + "epoch": 2.25, + "grad_norm": 26.220533922953038, + "learning_rate": 3.1268693193342014e-06, + "loss": 0.4568, + "step": 14385 + }, + { + "epoch": 2.25, + "grad_norm": 14.011667214802133, + "learning_rate": 3.1256444821112573e-06, + "loss": 0.4442, + "step": 14386 + }, + { + "epoch": 2.25, + "grad_norm": 28.49043746878992, + "learning_rate": 3.1244198403909587e-06, + "loss": 0.4872, + "step": 14387 + }, + { + "epoch": 2.25, + "grad_norm": 14.50332712568118, + "learning_rate": 3.1231953942081385e-06, + "loss": 0.4169, + "step": 14388 + }, + { + "epoch": 2.25, + "grad_norm": 14.501733297614937, + "learning_rate": 3.1219711435976207e-06, + "loss": 0.4344, + "step": 14389 + }, + { + "epoch": 2.25, + "grad_norm": 32.946003909564176, + "learning_rate": 3.1207470885942213e-06, + "loss": 0.4389, + "step": 14390 + }, + { + "epoch": 2.25, + "grad_norm": 20.428630474746697, + "learning_rate": 3.119523229232746e-06, + "loss": 0.4551, + "step": 14391 + }, + { + "epoch": 2.25, + "grad_norm": 28.246480268878578, + "learning_rate": 3.1182995655480065e-06, + "loss": 0.4917, + "step": 14392 + }, + { + "epoch": 2.25, + "grad_norm": 20.894016106189262, + "learning_rate": 3.1170760975748006e-06, + "loss": 0.4509, + "step": 14393 + }, + { + "epoch": 2.25, + "grad_norm": 15.825113068778577, + "learning_rate": 3.115852825347927e-06, + "loss": 0.4263, + "step": 14394 + }, + { + "epoch": 2.25, + "grad_norm": 21.229917379061057, + "learning_rate": 3.114629748902173e-06, + "loss": 0.449, + "step": 14395 + }, + { + "epoch": 2.25, + "grad_norm": 17.263691951888067, + "learning_rate": 3.1134068682723173e-06, + "loss": 0.4603, + "step": 14396 + }, + { + "epoch": 2.25, + "grad_norm": 22.43525973906959, + "learning_rate": 3.112184183493142e-06, + "loss": 0.473, + "step": 14397 + }, + { + "epoch": 2.25, + "grad_norm": 15.820768709080474, + "learning_rate": 3.1109616945994225e-06, + "loss": 0.4678, + "step": 14398 + }, + { + "epoch": 2.25, + "grad_norm": 33.057930132058054, + "learning_rate": 3.109739401625922e-06, + "loss": 0.5474, + "step": 14399 + }, + { + "epoch": 2.25, + "grad_norm": 23.04651367615477, + "learning_rate": 3.108517304607399e-06, + "loss": 0.5315, + "step": 14400 + }, + { + "epoch": 2.25, + "grad_norm": 19.247604440900464, + "learning_rate": 3.107295403578613e-06, + "loss": 0.4507, + "step": 14401 + }, + { + "epoch": 2.25, + "grad_norm": 24.72954990365314, + "learning_rate": 3.1060736985743166e-06, + "loss": 0.6107, + "step": 14402 + }, + { + "epoch": 2.25, + "grad_norm": 24.574510548656622, + "learning_rate": 3.1048521896292483e-06, + "loss": 0.4751, + "step": 14403 + }, + { + "epoch": 2.25, + "grad_norm": 22.150621748501255, + "learning_rate": 3.103630876778153e-06, + "loss": 0.4813, + "step": 14404 + }, + { + "epoch": 2.25, + "grad_norm": 20.581079570714497, + "learning_rate": 3.102409760055758e-06, + "loss": 0.4464, + "step": 14405 + }, + { + "epoch": 2.25, + "grad_norm": 26.680110779472226, + "learning_rate": 3.101188839496795e-06, + "loss": 0.5548, + "step": 14406 + }, + { + "epoch": 2.25, + "grad_norm": 17.091697645504162, + "learning_rate": 3.099968115135988e-06, + "loss": 0.5001, + "step": 14407 + }, + { + "epoch": 2.25, + "grad_norm": 27.30240287882424, + "learning_rate": 3.098747587008053e-06, + "loss": 0.5052, + "step": 14408 + }, + { + "epoch": 2.25, + "grad_norm": 24.883326689529127, + "learning_rate": 3.0975272551476953e-06, + "loss": 0.4574, + "step": 14409 + }, + { + "epoch": 2.25, + "grad_norm": 21.06161724272564, + "learning_rate": 3.0963071195896243e-06, + "loss": 0.5273, + "step": 14410 + }, + { + "epoch": 2.25, + "grad_norm": 29.230492738792673, + "learning_rate": 3.0950871803685435e-06, + "loss": 0.4941, + "step": 14411 + }, + { + "epoch": 2.25, + "grad_norm": 38.369405423690246, + "learning_rate": 3.0938674375191415e-06, + "loss": 0.5153, + "step": 14412 + }, + { + "epoch": 2.25, + "grad_norm": 18.70718803467509, + "learning_rate": 3.0926478910761127e-06, + "loss": 0.4729, + "step": 14413 + }, + { + "epoch": 2.25, + "grad_norm": 27.151596696762624, + "learning_rate": 3.091428541074134e-06, + "loss": 0.4964, + "step": 14414 + }, + { + "epoch": 2.25, + "grad_norm": 30.89761117922585, + "learning_rate": 3.090209387547889e-06, + "loss": 0.5068, + "step": 14415 + }, + { + "epoch": 2.25, + "grad_norm": 22.268542439245305, + "learning_rate": 3.088990430532044e-06, + "loss": 0.5266, + "step": 14416 + }, + { + "epoch": 2.25, + "grad_norm": 24.64029710015681, + "learning_rate": 3.0877716700612683e-06, + "loss": 0.4689, + "step": 14417 + }, + { + "epoch": 2.25, + "grad_norm": 17.933843844827553, + "learning_rate": 3.086553106170226e-06, + "loss": 0.4081, + "step": 14418 + }, + { + "epoch": 2.25, + "grad_norm": 14.797660422602728, + "learning_rate": 3.085334738893565e-06, + "loss": 0.4636, + "step": 14419 + }, + { + "epoch": 2.25, + "grad_norm": 19.296978701571533, + "learning_rate": 3.0841165682659436e-06, + "loss": 0.431, + "step": 14420 + }, + { + "epoch": 2.25, + "grad_norm": 20.908002233275102, + "learning_rate": 3.082898594321998e-06, + "loss": 0.4549, + "step": 14421 + }, + { + "epoch": 2.25, + "grad_norm": 14.254789524200397, + "learning_rate": 3.0816808170963697e-06, + "loss": 0.4357, + "step": 14422 + }, + { + "epoch": 2.25, + "grad_norm": 15.83150593497665, + "learning_rate": 3.080463236623694e-06, + "loss": 0.4414, + "step": 14423 + }, + { + "epoch": 2.25, + "grad_norm": 18.63845862288901, + "learning_rate": 3.079245852938597e-06, + "loss": 0.471, + "step": 14424 + }, + { + "epoch": 2.25, + "grad_norm": 20.340703832860676, + "learning_rate": 3.0780286660756964e-06, + "loss": 0.5241, + "step": 14425 + }, + { + "epoch": 2.25, + "grad_norm": 18.455858806933296, + "learning_rate": 3.0768116760696108e-06, + "loss": 0.4209, + "step": 14426 + }, + { + "epoch": 2.25, + "grad_norm": 16.35202524830817, + "learning_rate": 3.0755948829549552e-06, + "loss": 0.505, + "step": 14427 + }, + { + "epoch": 2.25, + "grad_norm": 22.689368047929896, + "learning_rate": 3.0743782867663297e-06, + "loss": 0.5471, + "step": 14428 + }, + { + "epoch": 2.25, + "grad_norm": 15.994484678204449, + "learning_rate": 3.0731618875383318e-06, + "loss": 0.5364, + "step": 14429 + }, + { + "epoch": 2.25, + "grad_norm": 26.42212318905037, + "learning_rate": 3.071945685305557e-06, + "loss": 0.5211, + "step": 14430 + }, + { + "epoch": 2.25, + "grad_norm": 4.191327406256336, + "learning_rate": 3.0707296801025954e-06, + "loss": 0.5204, + "step": 14431 + }, + { + "epoch": 2.25, + "grad_norm": 21.991302221039223, + "learning_rate": 3.0695138719640307e-06, + "loss": 0.4586, + "step": 14432 + }, + { + "epoch": 2.25, + "grad_norm": 20.888557334610837, + "learning_rate": 3.068298260924436e-06, + "loss": 0.501, + "step": 14433 + }, + { + "epoch": 2.25, + "grad_norm": 21.2318372367857, + "learning_rate": 3.0670828470183823e-06, + "loss": 0.4796, + "step": 14434 + }, + { + "epoch": 2.25, + "grad_norm": 16.624803972164138, + "learning_rate": 3.065867630280436e-06, + "loss": 0.4517, + "step": 14435 + }, + { + "epoch": 2.25, + "grad_norm": 20.187882251486435, + "learning_rate": 3.0646526107451615e-06, + "loss": 0.4536, + "step": 14436 + }, + { + "epoch": 2.26, + "grad_norm": 18.443662702139886, + "learning_rate": 3.0634377884471055e-06, + "loss": 0.4794, + "step": 14437 + }, + { + "epoch": 2.26, + "grad_norm": 26.660167494551413, + "learning_rate": 3.0622231634208254e-06, + "loss": 0.4736, + "step": 14438 + }, + { + "epoch": 2.26, + "grad_norm": 24.6690069782025, + "learning_rate": 3.0610087357008567e-06, + "loss": 0.4688, + "step": 14439 + }, + { + "epoch": 2.26, + "grad_norm": 16.528152093975113, + "learning_rate": 3.0597945053217435e-06, + "loss": 0.4876, + "step": 14440 + }, + { + "epoch": 2.26, + "grad_norm": 21.846604173722543, + "learning_rate": 3.0585804723180115e-06, + "loss": 0.473, + "step": 14441 + }, + { + "epoch": 2.26, + "grad_norm": 19.46423862074733, + "learning_rate": 3.0573666367241893e-06, + "loss": 0.47, + "step": 14442 + }, + { + "epoch": 2.26, + "grad_norm": 21.817058423026626, + "learning_rate": 3.056152998574803e-06, + "loss": 0.4354, + "step": 14443 + }, + { + "epoch": 2.26, + "grad_norm": 28.24880661384868, + "learning_rate": 3.0549395579043593e-06, + "loss": 0.4707, + "step": 14444 + }, + { + "epoch": 2.26, + "grad_norm": 21.011777732696835, + "learning_rate": 3.053726314747376e-06, + "loss": 0.4621, + "step": 14445 + }, + { + "epoch": 2.26, + "grad_norm": 37.511315925657414, + "learning_rate": 3.0525132691383486e-06, + "loss": 0.5196, + "step": 14446 + }, + { + "epoch": 2.26, + "grad_norm": 19.35770411127352, + "learning_rate": 3.0513004211117805e-06, + "loss": 0.5356, + "step": 14447 + }, + { + "epoch": 2.26, + "grad_norm": 26.269570943242257, + "learning_rate": 3.050087770702167e-06, + "loss": 0.5597, + "step": 14448 + }, + { + "epoch": 2.26, + "grad_norm": 11.864862249370894, + "learning_rate": 3.048875317943991e-06, + "loss": 0.4689, + "step": 14449 + }, + { + "epoch": 2.26, + "grad_norm": 15.075530169860757, + "learning_rate": 3.047663062871731e-06, + "loss": 0.4594, + "step": 14450 + }, + { + "epoch": 2.26, + "grad_norm": 23.95961564916996, + "learning_rate": 3.046451005519867e-06, + "loss": 0.4948, + "step": 14451 + }, + { + "epoch": 2.26, + "grad_norm": 20.648139900230777, + "learning_rate": 3.0452391459228726e-06, + "loss": 0.4036, + "step": 14452 + }, + { + "epoch": 2.26, + "grad_norm": 13.6742962693194, + "learning_rate": 3.0440274841152072e-06, + "loss": 0.3943, + "step": 14453 + }, + { + "epoch": 2.26, + "grad_norm": 20.8202832262932, + "learning_rate": 3.0428160201313295e-06, + "loss": 0.4934, + "step": 14454 + }, + { + "epoch": 2.26, + "grad_norm": 25.224949656697138, + "learning_rate": 3.0416047540056924e-06, + "loss": 0.5326, + "step": 14455 + }, + { + "epoch": 2.26, + "grad_norm": 20.351467316124854, + "learning_rate": 3.040393685772747e-06, + "loss": 0.451, + "step": 14456 + }, + { + "epoch": 2.26, + "grad_norm": 21.01128670964009, + "learning_rate": 3.0391828154669367e-06, + "loss": 0.4905, + "step": 14457 + }, + { + "epoch": 2.26, + "grad_norm": 18.223739676859527, + "learning_rate": 3.037972143122695e-06, + "loss": 0.5233, + "step": 14458 + }, + { + "epoch": 2.26, + "grad_norm": 12.127069508035094, + "learning_rate": 3.0367616687744504e-06, + "loss": 0.5177, + "step": 14459 + }, + { + "epoch": 2.26, + "grad_norm": 16.784847572725155, + "learning_rate": 3.035551392456629e-06, + "loss": 0.5001, + "step": 14460 + }, + { + "epoch": 2.26, + "grad_norm": 17.118037957009825, + "learning_rate": 3.0343413142036573e-06, + "loss": 0.4609, + "step": 14461 + }, + { + "epoch": 2.26, + "grad_norm": 24.86200295673555, + "learning_rate": 3.0331314340499416e-06, + "loss": 0.3973, + "step": 14462 + }, + { + "epoch": 2.26, + "grad_norm": 27.53457395620035, + "learning_rate": 3.0319217520298904e-06, + "loss": 0.5044, + "step": 14463 + }, + { + "epoch": 2.26, + "grad_norm": 15.766893642032095, + "learning_rate": 3.0307122681779088e-06, + "loss": 0.4725, + "step": 14464 + }, + { + "epoch": 2.26, + "grad_norm": 18.08258026387261, + "learning_rate": 3.0295029825283952e-06, + "loss": 0.4086, + "step": 14465 + }, + { + "epoch": 2.26, + "grad_norm": 18.277191850390157, + "learning_rate": 3.028293895115737e-06, + "loss": 0.5337, + "step": 14466 + }, + { + "epoch": 2.26, + "grad_norm": 14.337090513963576, + "learning_rate": 3.027085005974325e-06, + "loss": 0.4672, + "step": 14467 + }, + { + "epoch": 2.26, + "grad_norm": 18.845246605093315, + "learning_rate": 3.025876315138533e-06, + "loss": 0.47, + "step": 14468 + }, + { + "epoch": 2.26, + "grad_norm": 30.07893215334974, + "learning_rate": 3.0246678226427396e-06, + "loss": 0.514, + "step": 14469 + }, + { + "epoch": 2.26, + "grad_norm": 18.79710428473836, + "learning_rate": 3.0234595285213154e-06, + "loss": 0.4601, + "step": 14470 + }, + { + "epoch": 2.26, + "grad_norm": 25.590624931709048, + "learning_rate": 3.022251432808618e-06, + "loss": 0.4572, + "step": 14471 + }, + { + "epoch": 2.26, + "grad_norm": 16.60547368954316, + "learning_rate": 3.021043535539011e-06, + "loss": 0.492, + "step": 14472 + }, + { + "epoch": 2.26, + "grad_norm": 23.489112097051457, + "learning_rate": 3.0198358367468407e-06, + "loss": 0.5081, + "step": 14473 + }, + { + "epoch": 2.26, + "grad_norm": 17.924892681158354, + "learning_rate": 3.018628336466458e-06, + "loss": 0.4881, + "step": 14474 + }, + { + "epoch": 2.26, + "grad_norm": 27.067810916223017, + "learning_rate": 3.0174210347322e-06, + "loss": 0.4992, + "step": 14475 + }, + { + "epoch": 2.26, + "grad_norm": 21.110252014848722, + "learning_rate": 3.016213931578401e-06, + "loss": 0.3848, + "step": 14476 + }, + { + "epoch": 2.26, + "grad_norm": 20.053493181843194, + "learning_rate": 3.0150070270393973e-06, + "loss": 0.4943, + "step": 14477 + }, + { + "epoch": 2.26, + "grad_norm": 16.591367301746114, + "learning_rate": 3.013800321149506e-06, + "loss": 0.4513, + "step": 14478 + }, + { + "epoch": 2.26, + "grad_norm": 27.25668355364714, + "learning_rate": 3.012593813943044e-06, + "loss": 0.4823, + "step": 14479 + }, + { + "epoch": 2.26, + "grad_norm": 24.236099586389955, + "learning_rate": 3.0113875054543273e-06, + "loss": 0.4419, + "step": 14480 + }, + { + "epoch": 2.26, + "grad_norm": 14.793343441803447, + "learning_rate": 3.0101813957176617e-06, + "loss": 0.4485, + "step": 14481 + }, + { + "epoch": 2.26, + "grad_norm": 22.148656435629523, + "learning_rate": 3.008975484767351e-06, + "loss": 0.4498, + "step": 14482 + }, + { + "epoch": 2.26, + "grad_norm": 26.316310479237725, + "learning_rate": 3.0077697726376873e-06, + "loss": 0.4948, + "step": 14483 + }, + { + "epoch": 2.26, + "grad_norm": 23.967292667981102, + "learning_rate": 3.0065642593629597e-06, + "loss": 0.4571, + "step": 14484 + }, + { + "epoch": 2.26, + "grad_norm": 19.033081494290624, + "learning_rate": 3.005358944977452e-06, + "loss": 0.422, + "step": 14485 + }, + { + "epoch": 2.26, + "grad_norm": 19.42182222943798, + "learning_rate": 3.004153829515447e-06, + "loss": 0.3917, + "step": 14486 + }, + { + "epoch": 2.26, + "grad_norm": 21.248015574051667, + "learning_rate": 3.0029489130112155e-06, + "loss": 0.4413, + "step": 14487 + }, + { + "epoch": 2.26, + "grad_norm": 24.56216708532793, + "learning_rate": 3.00174419549902e-06, + "loss": 0.5005, + "step": 14488 + }, + { + "epoch": 2.26, + "grad_norm": 16.607647846384427, + "learning_rate": 3.0005396770131266e-06, + "loss": 0.4137, + "step": 14489 + }, + { + "epoch": 2.26, + "grad_norm": 19.657152513001353, + "learning_rate": 2.9993353575877926e-06, + "loss": 0.5015, + "step": 14490 + }, + { + "epoch": 2.26, + "grad_norm": 19.530034866680325, + "learning_rate": 2.9981312372572624e-06, + "loss": 0.4064, + "step": 14491 + }, + { + "epoch": 2.26, + "grad_norm": 27.66560088660425, + "learning_rate": 2.9969273160557877e-06, + "loss": 0.4708, + "step": 14492 + }, + { + "epoch": 2.26, + "grad_norm": 22.908500331863422, + "learning_rate": 2.9957235940176e-06, + "loss": 0.4383, + "step": 14493 + }, + { + "epoch": 2.26, + "grad_norm": 16.767054634609384, + "learning_rate": 2.9945200711769362e-06, + "loss": 0.4238, + "step": 14494 + }, + { + "epoch": 2.26, + "grad_norm": 12.695565296883993, + "learning_rate": 2.993316747568026e-06, + "loss": 0.4209, + "step": 14495 + }, + { + "epoch": 2.26, + "grad_norm": 17.785178843667577, + "learning_rate": 2.992113623225086e-06, + "loss": 0.5235, + "step": 14496 + }, + { + "epoch": 2.26, + "grad_norm": 18.993745771290246, + "learning_rate": 2.990910698182338e-06, + "loss": 0.4338, + "step": 14497 + }, + { + "epoch": 2.26, + "grad_norm": 22.035853734141558, + "learning_rate": 2.9897079724739866e-06, + "loss": 0.486, + "step": 14498 + }, + { + "epoch": 2.26, + "grad_norm": 18.84878956762315, + "learning_rate": 2.9885054461342423e-06, + "loss": 0.4751, + "step": 14499 + }, + { + "epoch": 2.26, + "grad_norm": 23.292491253505947, + "learning_rate": 2.987303119197299e-06, + "loss": 0.571, + "step": 14500 + }, + { + "epoch": 2.27, + "grad_norm": 26.748964980414147, + "learning_rate": 2.986100991697355e-06, + "loss": 0.503, + "step": 14501 + }, + { + "epoch": 2.27, + "grad_norm": 17.626106715288486, + "learning_rate": 2.9848990636685937e-06, + "loss": 0.4401, + "step": 14502 + }, + { + "epoch": 2.27, + "grad_norm": 19.825547914341616, + "learning_rate": 2.9836973351452014e-06, + "loss": 0.4166, + "step": 14503 + }, + { + "epoch": 2.27, + "grad_norm": 20.36021487039967, + "learning_rate": 2.9824958061613497e-06, + "loss": 0.5354, + "step": 14504 + }, + { + "epoch": 2.27, + "grad_norm": 14.280289947932223, + "learning_rate": 2.9812944767512132e-06, + "loss": 0.4738, + "step": 14505 + }, + { + "epoch": 2.27, + "grad_norm": 23.28615169192425, + "learning_rate": 2.980093346948958e-06, + "loss": 0.4233, + "step": 14506 + }, + { + "epoch": 2.27, + "grad_norm": 14.652010355293557, + "learning_rate": 2.978892416788738e-06, + "loss": 0.4452, + "step": 14507 + }, + { + "epoch": 2.27, + "grad_norm": 25.86114506152174, + "learning_rate": 2.9776916863047146e-06, + "loss": 0.513, + "step": 14508 + }, + { + "epoch": 2.27, + "grad_norm": 18.08327401077484, + "learning_rate": 2.976491155531027e-06, + "loss": 0.3844, + "step": 14509 + }, + { + "epoch": 2.27, + "grad_norm": 20.971513364263334, + "learning_rate": 2.975290824501824e-06, + "loss": 0.4355, + "step": 14510 + }, + { + "epoch": 2.27, + "grad_norm": 24.137544789759183, + "learning_rate": 2.9740906932512427e-06, + "loss": 0.4916, + "step": 14511 + }, + { + "epoch": 2.27, + "grad_norm": 21.506983227994724, + "learning_rate": 2.9728907618134127e-06, + "loss": 0.4789, + "step": 14512 + }, + { + "epoch": 2.27, + "grad_norm": 26.674275112045073, + "learning_rate": 2.9716910302224556e-06, + "loss": 0.4751, + "step": 14513 + }, + { + "epoch": 2.27, + "grad_norm": 24.903675721692522, + "learning_rate": 2.970491498512493e-06, + "loss": 0.4841, + "step": 14514 + }, + { + "epoch": 2.27, + "grad_norm": 27.091565882834058, + "learning_rate": 2.9692921667176443e-06, + "loss": 0.495, + "step": 14515 + }, + { + "epoch": 2.27, + "grad_norm": 23.164713719495037, + "learning_rate": 2.968093034872014e-06, + "loss": 0.4176, + "step": 14516 + }, + { + "epoch": 2.27, + "grad_norm": 19.855802217633038, + "learning_rate": 2.9668941030097e-06, + "loss": 0.5089, + "step": 14517 + }, + { + "epoch": 2.27, + "grad_norm": 18.513754404920117, + "learning_rate": 2.965695371164804e-06, + "loss": 0.4824, + "step": 14518 + }, + { + "epoch": 2.27, + "grad_norm": 17.153989606483556, + "learning_rate": 2.964496839371417e-06, + "loss": 0.475, + "step": 14519 + }, + { + "epoch": 2.27, + "grad_norm": 23.54719415535711, + "learning_rate": 2.963298507663627e-06, + "loss": 0.4146, + "step": 14520 + }, + { + "epoch": 2.27, + "grad_norm": 15.587431353251151, + "learning_rate": 2.9621003760755105e-06, + "loss": 0.4653, + "step": 14521 + }, + { + "epoch": 2.27, + "grad_norm": 17.765670173238664, + "learning_rate": 2.9609024446411394e-06, + "loss": 0.4635, + "step": 14522 + }, + { + "epoch": 2.27, + "grad_norm": 33.189888607785285, + "learning_rate": 2.9597047133945855e-06, + "loss": 0.4649, + "step": 14523 + }, + { + "epoch": 2.27, + "grad_norm": 17.530166295839656, + "learning_rate": 2.9585071823699143e-06, + "loss": 0.5239, + "step": 14524 + }, + { + "epoch": 2.27, + "grad_norm": 19.76675658021288, + "learning_rate": 2.9573098516011756e-06, + "loss": 0.437, + "step": 14525 + }, + { + "epoch": 2.27, + "grad_norm": 21.74801631731782, + "learning_rate": 2.9561127211224283e-06, + "loss": 0.4819, + "step": 14526 + }, + { + "epoch": 2.27, + "grad_norm": 20.54625519662564, + "learning_rate": 2.9549157909677116e-06, + "loss": 0.4643, + "step": 14527 + }, + { + "epoch": 2.27, + "grad_norm": 24.086998798894857, + "learning_rate": 2.953719061171072e-06, + "loss": 0.5299, + "step": 14528 + }, + { + "epoch": 2.27, + "grad_norm": 21.47009228448441, + "learning_rate": 2.9525225317665375e-06, + "loss": 0.4952, + "step": 14529 + }, + { + "epoch": 2.27, + "grad_norm": 20.299977458215515, + "learning_rate": 2.9513262027881396e-06, + "loss": 0.4498, + "step": 14530 + }, + { + "epoch": 2.27, + "grad_norm": 24.708110349709333, + "learning_rate": 2.9501300742699036e-06, + "loss": 0.4598, + "step": 14531 + }, + { + "epoch": 2.27, + "grad_norm": 18.90220833643359, + "learning_rate": 2.948934146245841e-06, + "loss": 0.4472, + "step": 14532 + }, + { + "epoch": 2.27, + "grad_norm": 29.114908057001653, + "learning_rate": 2.94773841874997e-06, + "loss": 0.5214, + "step": 14533 + }, + { + "epoch": 2.27, + "grad_norm": 25.30533818311973, + "learning_rate": 2.94654289181629e-06, + "loss": 0.5607, + "step": 14534 + }, + { + "epoch": 2.27, + "grad_norm": 25.211873931090388, + "learning_rate": 2.9453475654788032e-06, + "loss": 0.4365, + "step": 14535 + }, + { + "epoch": 2.27, + "grad_norm": 25.13345323023959, + "learning_rate": 2.9441524397715094e-06, + "loss": 0.4351, + "step": 14536 + }, + { + "epoch": 2.27, + "grad_norm": 25.27511627659148, + "learning_rate": 2.942957514728392e-06, + "loss": 0.5023, + "step": 14537 + }, + { + "epoch": 2.27, + "grad_norm": 11.422833012431038, + "learning_rate": 2.9417627903834313e-06, + "loss": 0.46, + "step": 14538 + }, + { + "epoch": 2.27, + "grad_norm": 22.46845957842912, + "learning_rate": 2.940568266770607e-06, + "loss": 0.5018, + "step": 14539 + }, + { + "epoch": 2.27, + "grad_norm": 30.015772882203493, + "learning_rate": 2.9393739439238955e-06, + "loss": 0.463, + "step": 14540 + }, + { + "epoch": 2.27, + "grad_norm": 24.04022529065963, + "learning_rate": 2.9381798218772594e-06, + "loss": 0.4812, + "step": 14541 + }, + { + "epoch": 2.27, + "grad_norm": 12.619839533049767, + "learning_rate": 2.936985900664654e-06, + "loss": 0.4966, + "step": 14542 + }, + { + "epoch": 2.27, + "grad_norm": 19.654944393936418, + "learning_rate": 2.9357921803200386e-06, + "loss": 0.4499, + "step": 14543 + }, + { + "epoch": 2.27, + "grad_norm": 24.553267308308644, + "learning_rate": 2.93459866087736e-06, + "loss": 0.5098, + "step": 14544 + }, + { + "epoch": 2.27, + "grad_norm": 16.593171497541185, + "learning_rate": 2.9334053423705666e-06, + "loss": 0.4264, + "step": 14545 + }, + { + "epoch": 2.27, + "grad_norm": 22.457023025407675, + "learning_rate": 2.9322122248335905e-06, + "loss": 0.4872, + "step": 14546 + }, + { + "epoch": 2.27, + "grad_norm": 13.542750911649627, + "learning_rate": 2.9310193083003614e-06, + "loss": 0.4392, + "step": 14547 + }, + { + "epoch": 2.27, + "grad_norm": 21.489082211932182, + "learning_rate": 2.9298265928048086e-06, + "loss": 0.4723, + "step": 14548 + }, + { + "epoch": 2.27, + "grad_norm": 22.081831631044146, + "learning_rate": 2.9286340783808543e-06, + "loss": 0.5261, + "step": 14549 + }, + { + "epoch": 2.27, + "grad_norm": 28.07680276868442, + "learning_rate": 2.9274417650624097e-06, + "loss": 0.4656, + "step": 14550 + }, + { + "epoch": 2.27, + "grad_norm": 18.449567615829828, + "learning_rate": 2.926249652883382e-06, + "loss": 0.5031, + "step": 14551 + }, + { + "epoch": 2.27, + "grad_norm": 16.441363449929163, + "learning_rate": 2.925057741877675e-06, + "loss": 0.431, + "step": 14552 + }, + { + "epoch": 2.27, + "grad_norm": 27.545289395895157, + "learning_rate": 2.92386603207919e-06, + "loss": 0.4956, + "step": 14553 + }, + { + "epoch": 2.27, + "grad_norm": 19.43183017108756, + "learning_rate": 2.9226745235218134e-06, + "loss": 0.446, + "step": 14554 + }, + { + "epoch": 2.27, + "grad_norm": 19.270468638981548, + "learning_rate": 2.921483216239436e-06, + "loss": 0.4876, + "step": 14555 + }, + { + "epoch": 2.27, + "grad_norm": 17.45729308615934, + "learning_rate": 2.920292110265932e-06, + "loss": 0.4861, + "step": 14556 + }, + { + "epoch": 2.27, + "grad_norm": 27.16373185797766, + "learning_rate": 2.9191012056351787e-06, + "loss": 0.4482, + "step": 14557 + }, + { + "epoch": 2.27, + "grad_norm": 18.588607991492953, + "learning_rate": 2.917910502381048e-06, + "loss": 0.4362, + "step": 14558 + }, + { + "epoch": 2.27, + "grad_norm": 33.51284107837178, + "learning_rate": 2.9167200005373976e-06, + "loss": 0.5505, + "step": 14559 + }, + { + "epoch": 2.27, + "grad_norm": 23.74736918384356, + "learning_rate": 2.915529700138089e-06, + "loss": 0.442, + "step": 14560 + }, + { + "epoch": 2.27, + "grad_norm": 23.059512406185483, + "learning_rate": 2.9143396012169677e-06, + "loss": 0.4956, + "step": 14561 + }, + { + "epoch": 2.27, + "grad_norm": 17.80184154521149, + "learning_rate": 2.913149703807887e-06, + "loss": 0.4861, + "step": 14562 + }, + { + "epoch": 2.27, + "grad_norm": 23.87458520970228, + "learning_rate": 2.9119600079446807e-06, + "loss": 0.5143, + "step": 14563 + }, + { + "epoch": 2.27, + "grad_norm": 20.69032047095443, + "learning_rate": 2.910770513661184e-06, + "loss": 0.4832, + "step": 14564 + }, + { + "epoch": 2.28, + "grad_norm": 23.498750248229264, + "learning_rate": 2.909581220991231e-06, + "loss": 0.5434, + "step": 14565 + }, + { + "epoch": 2.28, + "grad_norm": 18.0219330277558, + "learning_rate": 2.9083921299686403e-06, + "loss": 0.442, + "step": 14566 + }, + { + "epoch": 2.28, + "grad_norm": 21.758304284939708, + "learning_rate": 2.9072032406272243e-06, + "loss": 0.4921, + "step": 14567 + }, + { + "epoch": 2.28, + "grad_norm": 20.624430396421985, + "learning_rate": 2.9060145530008013e-06, + "loss": 0.4476, + "step": 14568 + }, + { + "epoch": 2.28, + "grad_norm": 21.387851107157815, + "learning_rate": 2.9048260671231733e-06, + "loss": 0.4768, + "step": 14569 + }, + { + "epoch": 2.28, + "grad_norm": 24.096355680163803, + "learning_rate": 2.9036377830281435e-06, + "loss": 0.4922, + "step": 14570 + }, + { + "epoch": 2.28, + "grad_norm": 16.7804446582132, + "learning_rate": 2.902449700749506e-06, + "loss": 0.4775, + "step": 14571 + }, + { + "epoch": 2.28, + "grad_norm": 20.879931748187758, + "learning_rate": 2.901261820321042e-06, + "loss": 0.5038, + "step": 14572 + }, + { + "epoch": 2.28, + "grad_norm": 24.898715280707513, + "learning_rate": 2.90007414177654e-06, + "loss": 0.4704, + "step": 14573 + }, + { + "epoch": 2.28, + "grad_norm": 35.882618256385136, + "learning_rate": 2.8988866651497793e-06, + "loss": 0.558, + "step": 14574 + }, + { + "epoch": 2.28, + "grad_norm": 25.85289553343663, + "learning_rate": 2.8976993904745277e-06, + "loss": 0.4738, + "step": 14575 + }, + { + "epoch": 2.28, + "grad_norm": 13.857581692419895, + "learning_rate": 2.8965123177845476e-06, + "loss": 0.479, + "step": 14576 + }, + { + "epoch": 2.28, + "grad_norm": 20.481075737700465, + "learning_rate": 2.8953254471136017e-06, + "loss": 0.475, + "step": 14577 + }, + { + "epoch": 2.28, + "grad_norm": 22.950848007157003, + "learning_rate": 2.8941387784954476e-06, + "loss": 0.5141, + "step": 14578 + }, + { + "epoch": 2.28, + "grad_norm": 19.821943100227863, + "learning_rate": 2.8929523119638257e-06, + "loss": 0.4449, + "step": 14579 + }, + { + "epoch": 2.28, + "grad_norm": 19.016087738776136, + "learning_rate": 2.8917660475524867e-06, + "loss": 0.5287, + "step": 14580 + }, + { + "epoch": 2.28, + "grad_norm": 16.85752968946482, + "learning_rate": 2.89057998529516e-06, + "loss": 0.4505, + "step": 14581 + }, + { + "epoch": 2.28, + "grad_norm": 18.64946738988201, + "learning_rate": 2.8893941252255784e-06, + "loss": 0.4415, + "step": 14582 + }, + { + "epoch": 2.28, + "grad_norm": 15.013762917310192, + "learning_rate": 2.8882084673774722e-06, + "loss": 0.4651, + "step": 14583 + }, + { + "epoch": 2.28, + "grad_norm": 33.37288981278104, + "learning_rate": 2.8870230117845543e-06, + "loss": 0.4923, + "step": 14584 + }, + { + "epoch": 2.28, + "grad_norm": 26.872206953092036, + "learning_rate": 2.885837758480543e-06, + "loss": 0.4836, + "step": 14585 + }, + { + "epoch": 2.28, + "grad_norm": 18.644142551234598, + "learning_rate": 2.8846527074991427e-06, + "loss": 0.4874, + "step": 14586 + }, + { + "epoch": 2.28, + "grad_norm": 14.325005004417585, + "learning_rate": 2.8834678588740585e-06, + "loss": 0.4944, + "step": 14587 + }, + { + "epoch": 2.28, + "grad_norm": 23.662272654185333, + "learning_rate": 2.8822832126389834e-06, + "loss": 0.4829, + "step": 14588 + }, + { + "epoch": 2.28, + "grad_norm": 21.74556383159142, + "learning_rate": 2.8810987688276092e-06, + "loss": 0.4504, + "step": 14589 + }, + { + "epoch": 2.28, + "grad_norm": 18.91071675869982, + "learning_rate": 2.8799145274736253e-06, + "loss": 0.4825, + "step": 14590 + }, + { + "epoch": 2.28, + "grad_norm": 17.21911702937514, + "learning_rate": 2.878730488610706e-06, + "loss": 0.4193, + "step": 14591 + }, + { + "epoch": 2.28, + "grad_norm": 19.372383373215637, + "learning_rate": 2.8775466522725238e-06, + "loss": 0.4663, + "step": 14592 + }, + { + "epoch": 2.28, + "grad_norm": 13.75069859854102, + "learning_rate": 2.876363018492747e-06, + "loss": 0.4074, + "step": 14593 + }, + { + "epoch": 2.28, + "grad_norm": 27.8066160873516, + "learning_rate": 2.8751795873050405e-06, + "loss": 0.5877, + "step": 14594 + }, + { + "epoch": 2.28, + "grad_norm": 13.661622441894131, + "learning_rate": 2.873996358743061e-06, + "loss": 0.3917, + "step": 14595 + }, + { + "epoch": 2.28, + "grad_norm": 15.017143054186315, + "learning_rate": 2.872813332840456e-06, + "loss": 0.4441, + "step": 14596 + }, + { + "epoch": 2.28, + "grad_norm": 21.565455806318877, + "learning_rate": 2.8716305096308674e-06, + "loss": 0.4235, + "step": 14597 + }, + { + "epoch": 2.28, + "grad_norm": 15.927970018637607, + "learning_rate": 2.870447889147938e-06, + "loss": 0.5055, + "step": 14598 + }, + { + "epoch": 2.28, + "grad_norm": 20.692718635637814, + "learning_rate": 2.869265471425302e-06, + "loss": 0.4674, + "step": 14599 + }, + { + "epoch": 2.28, + "grad_norm": 24.29028742650249, + "learning_rate": 2.8680832564965853e-06, + "loss": 0.4597, + "step": 14600 + }, + { + "epoch": 2.28, + "grad_norm": 21.760241108927712, + "learning_rate": 2.866901244395406e-06, + "loss": 0.4893, + "step": 14601 + }, + { + "epoch": 2.28, + "grad_norm": 23.41395928274524, + "learning_rate": 2.8657194351553817e-06, + "loss": 0.5354, + "step": 14602 + }, + { + "epoch": 2.28, + "grad_norm": 18.196611814185918, + "learning_rate": 2.8645378288101267e-06, + "loss": 0.4533, + "step": 14603 + }, + { + "epoch": 2.28, + "grad_norm": 16.016700890852174, + "learning_rate": 2.863356425393242e-06, + "loss": 0.4446, + "step": 14604 + }, + { + "epoch": 2.28, + "grad_norm": 3.901529817688725, + "learning_rate": 2.862175224938323e-06, + "loss": 0.4981, + "step": 14605 + }, + { + "epoch": 2.28, + "grad_norm": 23.93669328376025, + "learning_rate": 2.860994227478964e-06, + "loss": 0.4859, + "step": 14606 + }, + { + "epoch": 2.28, + "grad_norm": 11.275653836699915, + "learning_rate": 2.8598134330487536e-06, + "loss": 0.4262, + "step": 14607 + }, + { + "epoch": 2.28, + "grad_norm": 29.23866465692385, + "learning_rate": 2.8586328416812746e-06, + "loss": 0.479, + "step": 14608 + }, + { + "epoch": 2.28, + "grad_norm": 18.303862175792137, + "learning_rate": 2.857452453410101e-06, + "loss": 0.5062, + "step": 14609 + }, + { + "epoch": 2.28, + "grad_norm": 20.070503608147156, + "learning_rate": 2.856272268268798e-06, + "loss": 0.4274, + "step": 14610 + }, + { + "epoch": 2.28, + "grad_norm": 24.416829497740473, + "learning_rate": 2.8550922862909327e-06, + "loss": 0.4651, + "step": 14611 + }, + { + "epoch": 2.28, + "grad_norm": 53.24683257429877, + "learning_rate": 2.853912507510066e-06, + "loss": 0.4817, + "step": 14612 + }, + { + "epoch": 2.28, + "grad_norm": 23.6163698181681, + "learning_rate": 2.852732931959744e-06, + "loss": 0.5131, + "step": 14613 + }, + { + "epoch": 2.28, + "grad_norm": 21.85994082182788, + "learning_rate": 2.85155355967352e-06, + "loss": 0.4233, + "step": 14614 + }, + { + "epoch": 2.28, + "grad_norm": 25.748390077405716, + "learning_rate": 2.850374390684928e-06, + "loss": 0.4581, + "step": 14615 + }, + { + "epoch": 2.28, + "grad_norm": 16.71247043596305, + "learning_rate": 2.849195425027509e-06, + "loss": 0.577, + "step": 14616 + }, + { + "epoch": 2.28, + "grad_norm": 18.36669564409993, + "learning_rate": 2.8480166627347847e-06, + "loss": 0.4809, + "step": 14617 + }, + { + "epoch": 2.28, + "grad_norm": 21.013068291095262, + "learning_rate": 2.8468381038402836e-06, + "loss": 0.4681, + "step": 14618 + }, + { + "epoch": 2.28, + "grad_norm": 21.458456391071184, + "learning_rate": 2.845659748377526e-06, + "loss": 0.5167, + "step": 14619 + }, + { + "epoch": 2.28, + "grad_norm": 20.23870538157785, + "learning_rate": 2.8444815963800164e-06, + "loss": 0.5133, + "step": 14620 + }, + { + "epoch": 2.28, + "grad_norm": 21.536674247422006, + "learning_rate": 2.843303647881267e-06, + "loss": 0.4429, + "step": 14621 + }, + { + "epoch": 2.28, + "grad_norm": 18.70212843796634, + "learning_rate": 2.842125902914773e-06, + "loss": 0.5036, + "step": 14622 + }, + { + "epoch": 2.28, + "grad_norm": 37.736611541272914, + "learning_rate": 2.840948361514031e-06, + "loss": 0.5101, + "step": 14623 + }, + { + "epoch": 2.28, + "grad_norm": 18.606048121652947, + "learning_rate": 2.839771023712532e-06, + "loss": 0.4418, + "step": 14624 + }, + { + "epoch": 2.28, + "grad_norm": 23.13825398493852, + "learning_rate": 2.838593889543757e-06, + "loss": 0.4611, + "step": 14625 + }, + { + "epoch": 2.28, + "grad_norm": 18.694140225746896, + "learning_rate": 2.8374169590411793e-06, + "loss": 0.4813, + "step": 14626 + }, + { + "epoch": 2.28, + "grad_norm": 24.938039316712928, + "learning_rate": 2.836240232238274e-06, + "loss": 0.49, + "step": 14627 + }, + { + "epoch": 2.28, + "grad_norm": 18.672149234018818, + "learning_rate": 2.8350637091685085e-06, + "loss": 0.4539, + "step": 14628 + }, + { + "epoch": 2.29, + "grad_norm": 22.839086796349058, + "learning_rate": 2.8338873898653394e-06, + "loss": 0.4864, + "step": 14629 + }, + { + "epoch": 2.29, + "grad_norm": 22.876517253987576, + "learning_rate": 2.8327112743622176e-06, + "loss": 0.4772, + "step": 14630 + }, + { + "epoch": 2.29, + "grad_norm": 29.03922370566224, + "learning_rate": 2.8315353626925943e-06, + "loss": 0.4416, + "step": 14631 + }, + { + "epoch": 2.29, + "grad_norm": 25.506289814340988, + "learning_rate": 2.8303596548899127e-06, + "loss": 0.5167, + "step": 14632 + }, + { + "epoch": 2.29, + "grad_norm": 23.45906272877004, + "learning_rate": 2.829184150987612e-06, + "loss": 0.4793, + "step": 14633 + }, + { + "epoch": 2.29, + "grad_norm": 22.315959965353123, + "learning_rate": 2.8280088510191174e-06, + "loss": 0.5183, + "step": 14634 + }, + { + "epoch": 2.29, + "grad_norm": 25.80685360739133, + "learning_rate": 2.8268337550178537e-06, + "loss": 0.4263, + "step": 14635 + }, + { + "epoch": 2.29, + "grad_norm": 23.11465568640033, + "learning_rate": 2.8256588630172412e-06, + "loss": 0.4544, + "step": 14636 + }, + { + "epoch": 2.29, + "grad_norm": 23.103817308213625, + "learning_rate": 2.8244841750506967e-06, + "loss": 0.4515, + "step": 14637 + }, + { + "epoch": 2.29, + "grad_norm": 21.62563470366555, + "learning_rate": 2.823309691151622e-06, + "loss": 0.4442, + "step": 14638 + }, + { + "epoch": 2.29, + "grad_norm": 26.325458061004305, + "learning_rate": 2.8221354113534248e-06, + "loss": 0.4463, + "step": 14639 + }, + { + "epoch": 2.29, + "grad_norm": 25.48125682982296, + "learning_rate": 2.8209613356894948e-06, + "loss": 0.4377, + "step": 14640 + }, + { + "epoch": 2.29, + "grad_norm": 20.810976844044117, + "learning_rate": 2.8197874641932276e-06, + "loss": 0.4796, + "step": 14641 + }, + { + "epoch": 2.29, + "grad_norm": 18.447786302624664, + "learning_rate": 2.818613796898001e-06, + "loss": 0.4873, + "step": 14642 + }, + { + "epoch": 2.29, + "grad_norm": 20.518754891044363, + "learning_rate": 2.8174403338371968e-06, + "loss": 0.4801, + "step": 14643 + }, + { + "epoch": 2.29, + "grad_norm": 23.140094227148214, + "learning_rate": 2.8162670750441924e-06, + "loss": 0.487, + "step": 14644 + }, + { + "epoch": 2.29, + "grad_norm": 16.97101558398097, + "learning_rate": 2.815094020552346e-06, + "loss": 0.5129, + "step": 14645 + }, + { + "epoch": 2.29, + "grad_norm": 19.28130223562377, + "learning_rate": 2.8139211703950255e-06, + "loss": 0.6144, + "step": 14646 + }, + { + "epoch": 2.29, + "grad_norm": 15.737472836752787, + "learning_rate": 2.81274852460558e-06, + "loss": 0.4402, + "step": 14647 + }, + { + "epoch": 2.29, + "grad_norm": 19.490587807186053, + "learning_rate": 2.8115760832173657e-06, + "loss": 0.4549, + "step": 14648 + }, + { + "epoch": 2.29, + "grad_norm": 15.184511076016577, + "learning_rate": 2.8104038462637195e-06, + "loss": 0.4527, + "step": 14649 + }, + { + "epoch": 2.29, + "grad_norm": 16.66197647132942, + "learning_rate": 2.809231813777984e-06, + "loss": 0.4474, + "step": 14650 + }, + { + "epoch": 2.29, + "grad_norm": 20.516430213623416, + "learning_rate": 2.8080599857934876e-06, + "loss": 0.5242, + "step": 14651 + }, + { + "epoch": 2.29, + "grad_norm": 22.360133258856674, + "learning_rate": 2.806888362343557e-06, + "loss": 0.5067, + "step": 14652 + }, + { + "epoch": 2.29, + "grad_norm": 14.378408133727232, + "learning_rate": 2.805716943461517e-06, + "loss": 0.4669, + "step": 14653 + }, + { + "epoch": 2.29, + "grad_norm": 12.125692484808297, + "learning_rate": 2.8045457291806787e-06, + "loss": 0.3607, + "step": 14654 + }, + { + "epoch": 2.29, + "grad_norm": 18.92809975105807, + "learning_rate": 2.8033747195343474e-06, + "loss": 0.5073, + "step": 14655 + }, + { + "epoch": 2.29, + "grad_norm": 16.888604003515766, + "learning_rate": 2.802203914555829e-06, + "loss": 0.4903, + "step": 14656 + }, + { + "epoch": 2.29, + "grad_norm": 24.927222034641275, + "learning_rate": 2.8010333142784217e-06, + "loss": 0.5385, + "step": 14657 + }, + { + "epoch": 2.29, + "grad_norm": 18.71245044373287, + "learning_rate": 2.7998629187354178e-06, + "loss": 0.4453, + "step": 14658 + }, + { + "epoch": 2.29, + "grad_norm": 14.554409654087648, + "learning_rate": 2.798692727960101e-06, + "loss": 0.4856, + "step": 14659 + }, + { + "epoch": 2.29, + "grad_norm": 16.986017007424994, + "learning_rate": 2.7975227419857466e-06, + "loss": 0.4349, + "step": 14660 + }, + { + "epoch": 2.29, + "grad_norm": 24.87303181061066, + "learning_rate": 2.796352960845632e-06, + "loss": 0.4533, + "step": 14661 + }, + { + "epoch": 2.29, + "grad_norm": 29.373021142333087, + "learning_rate": 2.795183384573029e-06, + "loss": 0.4723, + "step": 14662 + }, + { + "epoch": 2.29, + "grad_norm": 15.222709169900291, + "learning_rate": 2.7940140132011963e-06, + "loss": 0.4216, + "step": 14663 + }, + { + "epoch": 2.29, + "grad_norm": 23.032443900070554, + "learning_rate": 2.792844846763385e-06, + "loss": 0.4792, + "step": 14664 + }, + { + "epoch": 2.29, + "grad_norm": 22.9586632307366, + "learning_rate": 2.7916758852928514e-06, + "loss": 0.4608, + "step": 14665 + }, + { + "epoch": 2.29, + "grad_norm": 24.399868598568535, + "learning_rate": 2.7905071288228415e-06, + "loss": 0.4484, + "step": 14666 + }, + { + "epoch": 2.29, + "grad_norm": 28.753317653444675, + "learning_rate": 2.789338577386589e-06, + "loss": 0.5067, + "step": 14667 + }, + { + "epoch": 2.29, + "grad_norm": 18.383428350103912, + "learning_rate": 2.788170231017332e-06, + "loss": 0.4084, + "step": 14668 + }, + { + "epoch": 2.29, + "grad_norm": 31.82335384846937, + "learning_rate": 2.787002089748291e-06, + "loss": 0.4722, + "step": 14669 + }, + { + "epoch": 2.29, + "grad_norm": 24.38579140469859, + "learning_rate": 2.785834153612692e-06, + "loss": 0.4914, + "step": 14670 + }, + { + "epoch": 2.29, + "grad_norm": 16.433195744947856, + "learning_rate": 2.7846664226437524e-06, + "loss": 0.4063, + "step": 14671 + }, + { + "epoch": 2.29, + "grad_norm": 18.670153166576075, + "learning_rate": 2.783498896874676e-06, + "loss": 0.4822, + "step": 14672 + }, + { + "epoch": 2.29, + "grad_norm": 26.748641396273573, + "learning_rate": 2.7823315763386715e-06, + "loss": 0.4548, + "step": 14673 + }, + { + "epoch": 2.29, + "grad_norm": 17.134115833809354, + "learning_rate": 2.7811644610689327e-06, + "loss": 0.4557, + "step": 14674 + }, + { + "epoch": 2.29, + "grad_norm": 22.29928887699729, + "learning_rate": 2.7799975510986566e-06, + "loss": 0.4867, + "step": 14675 + }, + { + "epoch": 2.29, + "grad_norm": 18.249070043826354, + "learning_rate": 2.7788308464610225e-06, + "loss": 0.4484, + "step": 14676 + }, + { + "epoch": 2.29, + "grad_norm": 21.684559613134798, + "learning_rate": 2.777664347189216e-06, + "loss": 0.4743, + "step": 14677 + }, + { + "epoch": 2.29, + "grad_norm": 18.679171289820214, + "learning_rate": 2.776498053316413e-06, + "loss": 0.4336, + "step": 14678 + }, + { + "epoch": 2.29, + "grad_norm": 15.897845134899944, + "learning_rate": 2.7753319648757802e-06, + "loss": 0.4376, + "step": 14679 + }, + { + "epoch": 2.29, + "grad_norm": 23.94907233449032, + "learning_rate": 2.7741660819004766e-06, + "loss": 0.493, + "step": 14680 + }, + { + "epoch": 2.29, + "grad_norm": 16.118537295277786, + "learning_rate": 2.7730004044236636e-06, + "loss": 0.3929, + "step": 14681 + }, + { + "epoch": 2.29, + "grad_norm": 19.018668022160856, + "learning_rate": 2.7718349324784897e-06, + "loss": 0.4568, + "step": 14682 + }, + { + "epoch": 2.29, + "grad_norm": 21.37691721558114, + "learning_rate": 2.770669666098107e-06, + "loss": 0.545, + "step": 14683 + }, + { + "epoch": 2.29, + "grad_norm": 28.157284896494332, + "learning_rate": 2.769504605315648e-06, + "loss": 0.4321, + "step": 14684 + }, + { + "epoch": 2.29, + "grad_norm": 19.420540112757475, + "learning_rate": 2.7683397501642463e-06, + "loss": 0.4862, + "step": 14685 + }, + { + "epoch": 2.29, + "grad_norm": 16.18713358101687, + "learning_rate": 2.767175100677032e-06, + "loss": 0.4293, + "step": 14686 + }, + { + "epoch": 2.29, + "grad_norm": 21.996247025641516, + "learning_rate": 2.7660106568871293e-06, + "loss": 0.4966, + "step": 14687 + }, + { + "epoch": 2.29, + "grad_norm": 21.484005550675867, + "learning_rate": 2.7648464188276514e-06, + "loss": 0.4995, + "step": 14688 + }, + { + "epoch": 2.29, + "grad_norm": 18.23665222348996, + "learning_rate": 2.7636823865317065e-06, + "loss": 0.4938, + "step": 14689 + }, + { + "epoch": 2.29, + "grad_norm": 19.48146588649827, + "learning_rate": 2.7625185600324013e-06, + "loss": 0.4615, + "step": 14690 + }, + { + "epoch": 2.29, + "grad_norm": 11.65652048721396, + "learning_rate": 2.761354939362838e-06, + "loss": 0.4101, + "step": 14691 + }, + { + "epoch": 2.29, + "grad_norm": 17.754384520145393, + "learning_rate": 2.7601915245561017e-06, + "loss": 0.4562, + "step": 14692 + }, + { + "epoch": 2.3, + "grad_norm": 18.28091433127257, + "learning_rate": 2.7590283156452867e-06, + "loss": 0.4447, + "step": 14693 + }, + { + "epoch": 2.3, + "grad_norm": 18.169771750070954, + "learning_rate": 2.757865312663468e-06, + "loss": 0.5174, + "step": 14694 + }, + { + "epoch": 2.3, + "grad_norm": 12.597543016264888, + "learning_rate": 2.7567025156437223e-06, + "loss": 0.4409, + "step": 14695 + }, + { + "epoch": 2.3, + "grad_norm": 16.911334087744464, + "learning_rate": 2.755539924619124e-06, + "loss": 0.4581, + "step": 14696 + }, + { + "epoch": 2.3, + "grad_norm": 17.502788704677496, + "learning_rate": 2.754377539622731e-06, + "loss": 0.4437, + "step": 14697 + }, + { + "epoch": 2.3, + "grad_norm": 24.431116527730488, + "learning_rate": 2.7532153606876e-06, + "loss": 0.4441, + "step": 14698 + }, + { + "epoch": 2.3, + "grad_norm": 20.527162249277215, + "learning_rate": 2.7520533878467846e-06, + "loss": 0.4626, + "step": 14699 + }, + { + "epoch": 2.3, + "grad_norm": 23.97678055389082, + "learning_rate": 2.7508916211333346e-06, + "loss": 0.4527, + "step": 14700 + }, + { + "epoch": 2.3, + "grad_norm": 24.68243090762094, + "learning_rate": 2.7497300605802823e-06, + "loss": 0.4519, + "step": 14701 + }, + { + "epoch": 2.3, + "grad_norm": 20.909088308455285, + "learning_rate": 2.74856870622067e-06, + "loss": 0.5036, + "step": 14702 + }, + { + "epoch": 2.3, + "grad_norm": 23.995748140782542, + "learning_rate": 2.7474075580875182e-06, + "loss": 0.5154, + "step": 14703 + }, + { + "epoch": 2.3, + "grad_norm": 18.834694657208953, + "learning_rate": 2.7462466162138557e-06, + "loss": 0.4256, + "step": 14704 + }, + { + "epoch": 2.3, + "grad_norm": 27.08003131299765, + "learning_rate": 2.745085880632693e-06, + "loss": 0.5153, + "step": 14705 + }, + { + "epoch": 2.3, + "grad_norm": 20.49231549338284, + "learning_rate": 2.7439253513770434e-06, + "loss": 0.484, + "step": 14706 + }, + { + "epoch": 2.3, + "grad_norm": 15.393991719809545, + "learning_rate": 2.7427650284799157e-06, + "loss": 0.4556, + "step": 14707 + }, + { + "epoch": 2.3, + "grad_norm": 23.16881591696427, + "learning_rate": 2.7416049119743025e-06, + "loss": 0.4203, + "step": 14708 + }, + { + "epoch": 2.3, + "grad_norm": 22.339026026426843, + "learning_rate": 2.7404450018932027e-06, + "loss": 0.5047, + "step": 14709 + }, + { + "epoch": 2.3, + "grad_norm": 26.102454892123095, + "learning_rate": 2.739285298269596e-06, + "loss": 0.5579, + "step": 14710 + }, + { + "epoch": 2.3, + "grad_norm": 19.587943772420655, + "learning_rate": 2.7381258011364697e-06, + "loss": 0.4283, + "step": 14711 + }, + { + "epoch": 2.3, + "grad_norm": 15.980272688650185, + "learning_rate": 2.7369665105267995e-06, + "loss": 0.4591, + "step": 14712 + }, + { + "epoch": 2.3, + "grad_norm": 27.826995916663137, + "learning_rate": 2.7358074264735534e-06, + "loss": 0.4661, + "step": 14713 + }, + { + "epoch": 2.3, + "grad_norm": 28.78130568686368, + "learning_rate": 2.7346485490096908e-06, + "loss": 0.4651, + "step": 14714 + }, + { + "epoch": 2.3, + "grad_norm": 22.15933784413965, + "learning_rate": 2.733489878168175e-06, + "loss": 0.4658, + "step": 14715 + }, + { + "epoch": 2.3, + "grad_norm": 18.930780814506697, + "learning_rate": 2.732331413981958e-06, + "loss": 0.4281, + "step": 14716 + }, + { + "epoch": 2.3, + "grad_norm": 15.150278522548328, + "learning_rate": 2.731173156483985e-06, + "loss": 0.3993, + "step": 14717 + }, + { + "epoch": 2.3, + "grad_norm": 25.11578189245044, + "learning_rate": 2.730015105707191e-06, + "loss": 0.5256, + "step": 14718 + }, + { + "epoch": 2.3, + "grad_norm": 18.060144420985747, + "learning_rate": 2.7288572616845154e-06, + "loss": 0.4226, + "step": 14719 + }, + { + "epoch": 2.3, + "grad_norm": 22.270587037624495, + "learning_rate": 2.7276996244488864e-06, + "loss": 0.4851, + "step": 14720 + }, + { + "epoch": 2.3, + "grad_norm": 23.69489096465625, + "learning_rate": 2.7265421940332283e-06, + "loss": 0.4996, + "step": 14721 + }, + { + "epoch": 2.3, + "grad_norm": 17.65475279781367, + "learning_rate": 2.725384970470455e-06, + "loss": 0.4392, + "step": 14722 + }, + { + "epoch": 2.3, + "grad_norm": 23.89639241926165, + "learning_rate": 2.724227953793475e-06, + "loss": 0.4943, + "step": 14723 + }, + { + "epoch": 2.3, + "grad_norm": 19.830173977256038, + "learning_rate": 2.7230711440351952e-06, + "loss": 0.4506, + "step": 14724 + }, + { + "epoch": 2.3, + "grad_norm": 18.002047370994212, + "learning_rate": 2.7219145412285187e-06, + "loss": 0.5654, + "step": 14725 + }, + { + "epoch": 2.3, + "grad_norm": 30.83761012448493, + "learning_rate": 2.7207581454063314e-06, + "loss": 0.4628, + "step": 14726 + }, + { + "epoch": 2.3, + "grad_norm": 19.420598344895975, + "learning_rate": 2.7196019566015286e-06, + "loss": 0.4241, + "step": 14727 + }, + { + "epoch": 2.3, + "grad_norm": 19.726341596820642, + "learning_rate": 2.7184459748469836e-06, + "loss": 0.442, + "step": 14728 + }, + { + "epoch": 2.3, + "grad_norm": 28.157998006417163, + "learning_rate": 2.7172902001755787e-06, + "loss": 0.4655, + "step": 14729 + }, + { + "epoch": 2.3, + "grad_norm": 18.637164559726063, + "learning_rate": 2.7161346326201763e-06, + "loss": 0.4612, + "step": 14730 + }, + { + "epoch": 2.3, + "grad_norm": 19.990402636917032, + "learning_rate": 2.7149792722136446e-06, + "loss": 0.424, + "step": 14731 + }, + { + "epoch": 2.3, + "grad_norm": 17.8277824629726, + "learning_rate": 2.713824118988844e-06, + "loss": 0.4213, + "step": 14732 + }, + { + "epoch": 2.3, + "grad_norm": 22.966103460986012, + "learning_rate": 2.7126691729786202e-06, + "loss": 0.3914, + "step": 14733 + }, + { + "epoch": 2.3, + "grad_norm": 18.871155453871058, + "learning_rate": 2.711514434215825e-06, + "loss": 0.4956, + "step": 14734 + }, + { + "epoch": 2.3, + "grad_norm": 18.273026851950597, + "learning_rate": 2.710359902733293e-06, + "loss": 0.4185, + "step": 14735 + }, + { + "epoch": 2.3, + "grad_norm": 22.39572743871331, + "learning_rate": 2.709205578563859e-06, + "loss": 0.4317, + "step": 14736 + }, + { + "epoch": 2.3, + "grad_norm": 23.75973473988637, + "learning_rate": 2.7080514617403584e-06, + "loss": 0.485, + "step": 14737 + }, + { + "epoch": 2.3, + "grad_norm": 22.986755810526127, + "learning_rate": 2.706897552295609e-06, + "loss": 0.4726, + "step": 14738 + }, + { + "epoch": 2.3, + "grad_norm": 18.84436533763036, + "learning_rate": 2.705743850262422e-06, + "loss": 0.4531, + "step": 14739 + }, + { + "epoch": 2.3, + "grad_norm": 21.006405353056806, + "learning_rate": 2.704590355673614e-06, + "loss": 0.4994, + "step": 14740 + }, + { + "epoch": 2.3, + "grad_norm": 19.168732031082126, + "learning_rate": 2.7034370685619894e-06, + "loss": 0.4639, + "step": 14741 + }, + { + "epoch": 2.3, + "grad_norm": 25.70326264063381, + "learning_rate": 2.7022839889603514e-06, + "loss": 0.5261, + "step": 14742 + }, + { + "epoch": 2.3, + "grad_norm": 26.200516599920157, + "learning_rate": 2.7011311169014807e-06, + "loss": 0.5424, + "step": 14743 + }, + { + "epoch": 2.3, + "grad_norm": 20.137120982314368, + "learning_rate": 2.6999784524181727e-06, + "loss": 0.4332, + "step": 14744 + }, + { + "epoch": 2.3, + "grad_norm": 18.503260156884863, + "learning_rate": 2.698825995543207e-06, + "loss": 0.4426, + "step": 14745 + }, + { + "epoch": 2.3, + "grad_norm": 17.890467453975056, + "learning_rate": 2.6976737463093617e-06, + "loss": 0.5504, + "step": 14746 + }, + { + "epoch": 2.3, + "grad_norm": 3.410389858341277, + "learning_rate": 2.696521704749403e-06, + "loss": 0.4595, + "step": 14747 + }, + { + "epoch": 2.3, + "grad_norm": 28.532383847138505, + "learning_rate": 2.6953698708960916e-06, + "loss": 0.4939, + "step": 14748 + }, + { + "epoch": 2.3, + "grad_norm": 24.730443736033425, + "learning_rate": 2.6942182447821884e-06, + "loss": 0.5174, + "step": 14749 + }, + { + "epoch": 2.3, + "grad_norm": 19.030762659920697, + "learning_rate": 2.693066826440447e-06, + "loss": 0.3762, + "step": 14750 + }, + { + "epoch": 2.3, + "grad_norm": 20.309325667082327, + "learning_rate": 2.691915615903611e-06, + "loss": 0.4684, + "step": 14751 + }, + { + "epoch": 2.3, + "grad_norm": 15.336966322184672, + "learning_rate": 2.6907646132044174e-06, + "loss": 0.4399, + "step": 14752 + }, + { + "epoch": 2.3, + "grad_norm": 22.96643673056194, + "learning_rate": 2.6896138183756015e-06, + "loss": 0.466, + "step": 14753 + }, + { + "epoch": 2.3, + "grad_norm": 28.01286912651057, + "learning_rate": 2.6884632314498947e-06, + "loss": 0.5197, + "step": 14754 + }, + { + "epoch": 2.3, + "grad_norm": 28.32817280360805, + "learning_rate": 2.6873128524600144e-06, + "loss": 0.4914, + "step": 14755 + }, + { + "epoch": 2.3, + "grad_norm": 24.038694696894883, + "learning_rate": 2.686162681438682e-06, + "loss": 0.4478, + "step": 14756 + }, + { + "epoch": 2.31, + "grad_norm": 20.72828883096704, + "learning_rate": 2.6850127184186015e-06, + "loss": 0.4786, + "step": 14757 + }, + { + "epoch": 2.31, + "grad_norm": 15.499255774401032, + "learning_rate": 2.6838629634324796e-06, + "loss": 0.4649, + "step": 14758 + }, + { + "epoch": 2.31, + "grad_norm": 20.49377016157838, + "learning_rate": 2.6827134165130175e-06, + "loss": 0.4751, + "step": 14759 + }, + { + "epoch": 2.31, + "grad_norm": 33.80068617339733, + "learning_rate": 2.681564077692903e-06, + "loss": 0.465, + "step": 14760 + }, + { + "epoch": 2.31, + "grad_norm": 19.10800867350413, + "learning_rate": 2.6804149470048293e-06, + "loss": 0.4257, + "step": 14761 + }, + { + "epoch": 2.31, + "grad_norm": 18.92176748177484, + "learning_rate": 2.6792660244814683e-06, + "loss": 0.5182, + "step": 14762 + }, + { + "epoch": 2.31, + "grad_norm": 21.61863688678932, + "learning_rate": 2.6781173101555014e-06, + "loss": 0.5147, + "step": 14763 + }, + { + "epoch": 2.31, + "grad_norm": 22.51446621022123, + "learning_rate": 2.6769688040595933e-06, + "loss": 0.418, + "step": 14764 + }, + { + "epoch": 2.31, + "grad_norm": 35.37467467424671, + "learning_rate": 2.6758205062264076e-06, + "loss": 0.6174, + "step": 14765 + }, + { + "epoch": 2.31, + "grad_norm": 17.19856184104096, + "learning_rate": 2.6746724166886063e-06, + "loss": 0.4986, + "step": 14766 + }, + { + "epoch": 2.31, + "grad_norm": 21.73393693066997, + "learning_rate": 2.6735245354788354e-06, + "loss": 0.4074, + "step": 14767 + }, + { + "epoch": 2.31, + "grad_norm": 29.977719126804683, + "learning_rate": 2.6723768626297387e-06, + "loss": 0.5491, + "step": 14768 + }, + { + "epoch": 2.31, + "grad_norm": 15.036516328754196, + "learning_rate": 2.671229398173957e-06, + "loss": 0.4181, + "step": 14769 + }, + { + "epoch": 2.31, + "grad_norm": 18.795193939475638, + "learning_rate": 2.670082142144124e-06, + "loss": 0.4695, + "step": 14770 + }, + { + "epoch": 2.31, + "grad_norm": 21.281777913103817, + "learning_rate": 2.66893509457287e-06, + "loss": 0.4283, + "step": 14771 + }, + { + "epoch": 2.31, + "grad_norm": 18.715814940672992, + "learning_rate": 2.6677882554928126e-06, + "loss": 0.4336, + "step": 14772 + }, + { + "epoch": 2.31, + "grad_norm": 18.29970615856538, + "learning_rate": 2.666641624936566e-06, + "loss": 0.4705, + "step": 14773 + }, + { + "epoch": 2.31, + "grad_norm": 25.7762620869203, + "learning_rate": 2.665495202936741e-06, + "loss": 0.3934, + "step": 14774 + }, + { + "epoch": 2.31, + "grad_norm": 14.45398289686087, + "learning_rate": 2.6643489895259454e-06, + "loss": 0.4339, + "step": 14775 + }, + { + "epoch": 2.31, + "grad_norm": 31.787859629787935, + "learning_rate": 2.6632029847367734e-06, + "loss": 0.4828, + "step": 14776 + }, + { + "epoch": 2.31, + "grad_norm": 17.304642750443232, + "learning_rate": 2.662057188601813e-06, + "loss": 0.4754, + "step": 14777 + }, + { + "epoch": 2.31, + "grad_norm": 21.273346968341556, + "learning_rate": 2.660911601153654e-06, + "loss": 0.4228, + "step": 14778 + }, + { + "epoch": 2.31, + "grad_norm": 16.552807188520944, + "learning_rate": 2.659766222424879e-06, + "loss": 0.4679, + "step": 14779 + }, + { + "epoch": 2.31, + "grad_norm": 17.11000004850306, + "learning_rate": 2.658621052448055e-06, + "loss": 0.4796, + "step": 14780 + }, + { + "epoch": 2.31, + "grad_norm": 18.583140371604497, + "learning_rate": 2.6574760912557575e-06, + "loss": 0.4617, + "step": 14781 + }, + { + "epoch": 2.31, + "grad_norm": 22.13267904780665, + "learning_rate": 2.656331338880541e-06, + "loss": 0.4948, + "step": 14782 + }, + { + "epoch": 2.31, + "grad_norm": 15.483142060774096, + "learning_rate": 2.6551867953549657e-06, + "loss": 0.4608, + "step": 14783 + }, + { + "epoch": 2.31, + "grad_norm": 19.424854003297458, + "learning_rate": 2.6540424607115845e-06, + "loss": 0.4336, + "step": 14784 + }, + { + "epoch": 2.31, + "grad_norm": 17.966072058776174, + "learning_rate": 2.6528983349829353e-06, + "loss": 0.4938, + "step": 14785 + }, + { + "epoch": 2.31, + "grad_norm": 25.3838812854603, + "learning_rate": 2.651754418201563e-06, + "loss": 0.5059, + "step": 14786 + }, + { + "epoch": 2.31, + "grad_norm": 20.55757092470731, + "learning_rate": 2.6506107103999924e-06, + "loss": 0.5013, + "step": 14787 + }, + { + "epoch": 2.31, + "grad_norm": 21.67083547969026, + "learning_rate": 2.6494672116107577e-06, + "loss": 0.4672, + "step": 14788 + }, + { + "epoch": 2.31, + "grad_norm": 12.768631326653288, + "learning_rate": 2.6483239218663724e-06, + "loss": 0.4431, + "step": 14789 + }, + { + "epoch": 2.31, + "grad_norm": 23.775635248215437, + "learning_rate": 2.647180841199354e-06, + "loss": 0.4165, + "step": 14790 + }, + { + "epoch": 2.31, + "grad_norm": 20.37594359719969, + "learning_rate": 2.6460379696422146e-06, + "loss": 0.5215, + "step": 14791 + }, + { + "epoch": 2.31, + "grad_norm": 17.149163491907018, + "learning_rate": 2.6448953072274532e-06, + "loss": 0.4261, + "step": 14792 + }, + { + "epoch": 2.31, + "grad_norm": 27.66035867979796, + "learning_rate": 2.6437528539875633e-06, + "loss": 0.4685, + "step": 14793 + }, + { + "epoch": 2.31, + "grad_norm": 18.53604255743931, + "learning_rate": 2.6426106099550396e-06, + "loss": 0.4199, + "step": 14794 + }, + { + "epoch": 2.31, + "grad_norm": 20.808610632910877, + "learning_rate": 2.6414685751623682e-06, + "loss": 0.5242, + "step": 14795 + }, + { + "epoch": 2.31, + "grad_norm": 26.38101476407635, + "learning_rate": 2.6403267496420238e-06, + "loss": 0.4226, + "step": 14796 + }, + { + "epoch": 2.31, + "grad_norm": 20.953883919708073, + "learning_rate": 2.639185133426484e-06, + "loss": 0.4586, + "step": 14797 + }, + { + "epoch": 2.31, + "grad_norm": 24.96119778576508, + "learning_rate": 2.6380437265482107e-06, + "loss": 0.4967, + "step": 14798 + }, + { + "epoch": 2.31, + "grad_norm": 19.063426896917715, + "learning_rate": 2.6369025290396664e-06, + "loss": 0.471, + "step": 14799 + }, + { + "epoch": 2.31, + "grad_norm": 24.121129323338522, + "learning_rate": 2.6357615409333092e-06, + "loss": 0.5235, + "step": 14800 + }, + { + "epoch": 2.31, + "grad_norm": 28.479236777572076, + "learning_rate": 2.634620762261587e-06, + "loss": 0.5212, + "step": 14801 + }, + { + "epoch": 2.31, + "grad_norm": 26.14978696533939, + "learning_rate": 2.6334801930569387e-06, + "loss": 0.4359, + "step": 14802 + }, + { + "epoch": 2.31, + "grad_norm": 15.385133531371865, + "learning_rate": 2.6323398333518047e-06, + "loss": 0.3778, + "step": 14803 + }, + { + "epoch": 2.31, + "grad_norm": 18.217725480424736, + "learning_rate": 2.631199683178615e-06, + "loss": 0.4403, + "step": 14804 + }, + { + "epoch": 2.31, + "grad_norm": 15.753549080849846, + "learning_rate": 2.6300597425698015e-06, + "loss": 0.4559, + "step": 14805 + }, + { + "epoch": 2.31, + "grad_norm": 23.199802247465566, + "learning_rate": 2.6289200115577727e-06, + "loss": 0.469, + "step": 14806 + }, + { + "epoch": 2.31, + "grad_norm": 16.925972175709823, + "learning_rate": 2.627780490174946e-06, + "loss": 0.4779, + "step": 14807 + }, + { + "epoch": 2.31, + "grad_norm": 23.888678641712463, + "learning_rate": 2.6266411784537294e-06, + "loss": 0.4734, + "step": 14808 + }, + { + "epoch": 2.31, + "grad_norm": 29.56573948742745, + "learning_rate": 2.625502076426528e-06, + "loss": 0.4755, + "step": 14809 + }, + { + "epoch": 2.31, + "grad_norm": 24.176629647454032, + "learning_rate": 2.6243631841257332e-06, + "loss": 0.486, + "step": 14810 + }, + { + "epoch": 2.31, + "grad_norm": 17.585281193322523, + "learning_rate": 2.6232245015837323e-06, + "loss": 0.4237, + "step": 14811 + }, + { + "epoch": 2.31, + "grad_norm": 23.804607391848606, + "learning_rate": 2.6220860288329108e-06, + "loss": 0.4012, + "step": 14812 + }, + { + "epoch": 2.31, + "grad_norm": 17.361003595040447, + "learning_rate": 2.6209477659056503e-06, + "loss": 0.4458, + "step": 14813 + }, + { + "epoch": 2.31, + "grad_norm": 29.78269713009057, + "learning_rate": 2.6198097128343147e-06, + "loss": 0.4823, + "step": 14814 + }, + { + "epoch": 2.31, + "grad_norm": 27.436299516911774, + "learning_rate": 2.618671869651278e-06, + "loss": 0.4859, + "step": 14815 + }, + { + "epoch": 2.31, + "grad_norm": 26.03135524398759, + "learning_rate": 2.617534236388891e-06, + "loss": 0.426, + "step": 14816 + }, + { + "epoch": 2.31, + "grad_norm": 24.54870931885095, + "learning_rate": 2.616396813079516e-06, + "loss": 0.5183, + "step": 14817 + }, + { + "epoch": 2.31, + "grad_norm": 19.097912615598116, + "learning_rate": 2.615259599755492e-06, + "loss": 0.4959, + "step": 14818 + }, + { + "epoch": 2.31, + "grad_norm": 18.04726304797163, + "learning_rate": 2.614122596449166e-06, + "loss": 0.4563, + "step": 14819 + }, + { + "epoch": 2.31, + "grad_norm": 22.833553091845644, + "learning_rate": 2.6129858031928746e-06, + "loss": 0.4487, + "step": 14820 + }, + { + "epoch": 2.32, + "grad_norm": 18.616677906779472, + "learning_rate": 2.611849220018944e-06, + "loss": 0.4976, + "step": 14821 + }, + { + "epoch": 2.32, + "grad_norm": 22.643266251972197, + "learning_rate": 2.610712846959702e-06, + "loss": 0.4414, + "step": 14822 + }, + { + "epoch": 2.32, + "grad_norm": 15.073181399367094, + "learning_rate": 2.609576684047461e-06, + "loss": 0.4214, + "step": 14823 + }, + { + "epoch": 2.32, + "grad_norm": 27.644867918566085, + "learning_rate": 2.6084407313145354e-06, + "loss": 0.5168, + "step": 14824 + }, + { + "epoch": 2.32, + "grad_norm": 24.276763378764976, + "learning_rate": 2.6073049887932357e-06, + "loss": 0.4718, + "step": 14825 + }, + { + "epoch": 2.32, + "grad_norm": 17.10417396931099, + "learning_rate": 2.606169456515857e-06, + "loss": 0.4461, + "step": 14826 + }, + { + "epoch": 2.32, + "grad_norm": 18.48619740757821, + "learning_rate": 2.6050341345146902e-06, + "loss": 0.4862, + "step": 14827 + }, + { + "epoch": 2.32, + "grad_norm": 25.439154093150872, + "learning_rate": 2.6038990228220275e-06, + "loss": 0.478, + "step": 14828 + }, + { + "epoch": 2.32, + "grad_norm": 20.906699752389017, + "learning_rate": 2.6027641214701495e-06, + "loss": 0.4522, + "step": 14829 + }, + { + "epoch": 2.32, + "grad_norm": 20.301037713919737, + "learning_rate": 2.601629430491338e-06, + "loss": 0.4749, + "step": 14830 + }, + { + "epoch": 2.32, + "grad_norm": 15.233602452374694, + "learning_rate": 2.6004949499178522e-06, + "loss": 0.4284, + "step": 14831 + }, + { + "epoch": 2.32, + "grad_norm": 22.47842621460263, + "learning_rate": 2.5993606797819616e-06, + "loss": 0.4687, + "step": 14832 + }, + { + "epoch": 2.32, + "grad_norm": 15.425854689750977, + "learning_rate": 2.5982266201159236e-06, + "loss": 0.4504, + "step": 14833 + }, + { + "epoch": 2.32, + "grad_norm": 21.461040963172433, + "learning_rate": 2.5970927709519942e-06, + "loss": 0.5189, + "step": 14834 + }, + { + "epoch": 2.32, + "grad_norm": 22.111971924386424, + "learning_rate": 2.5959591323224152e-06, + "loss": 0.4433, + "step": 14835 + }, + { + "epoch": 2.32, + "grad_norm": 17.55038206833877, + "learning_rate": 2.5948257042594248e-06, + "loss": 0.4911, + "step": 14836 + }, + { + "epoch": 2.32, + "grad_norm": 20.941442181129986, + "learning_rate": 2.5936924867952596e-06, + "loss": 0.491, + "step": 14837 + }, + { + "epoch": 2.32, + "grad_norm": 28.30753601001517, + "learning_rate": 2.5925594799621503e-06, + "loss": 0.4588, + "step": 14838 + }, + { + "epoch": 2.32, + "grad_norm": 15.024396194283737, + "learning_rate": 2.5914266837923153e-06, + "loss": 0.4093, + "step": 14839 + }, + { + "epoch": 2.32, + "grad_norm": 21.974316825679026, + "learning_rate": 2.5902940983179694e-06, + "loss": 0.5072, + "step": 14840 + }, + { + "epoch": 2.32, + "grad_norm": 22.16822372357109, + "learning_rate": 2.589161723571324e-06, + "loss": 0.4847, + "step": 14841 + }, + { + "epoch": 2.32, + "grad_norm": 17.215014280924883, + "learning_rate": 2.5880295595845885e-06, + "loss": 0.5208, + "step": 14842 + }, + { + "epoch": 2.32, + "grad_norm": 22.876328135326183, + "learning_rate": 2.586897606389952e-06, + "loss": 0.4503, + "step": 14843 + }, + { + "epoch": 2.32, + "grad_norm": 34.072124884356555, + "learning_rate": 2.585765864019615e-06, + "loss": 0.408, + "step": 14844 + }, + { + "epoch": 2.32, + "grad_norm": 18.428078083498058, + "learning_rate": 2.5846343325057556e-06, + "loss": 0.4284, + "step": 14845 + }, + { + "epoch": 2.32, + "grad_norm": 15.794546595276005, + "learning_rate": 2.583503011880558e-06, + "loss": 0.4672, + "step": 14846 + }, + { + "epoch": 2.32, + "grad_norm": 30.61878933990655, + "learning_rate": 2.5823719021761993e-06, + "loss": 0.5086, + "step": 14847 + }, + { + "epoch": 2.32, + "grad_norm": 26.845127475043842, + "learning_rate": 2.5812410034248426e-06, + "loss": 0.4499, + "step": 14848 + }, + { + "epoch": 2.32, + "grad_norm": 24.689449787403717, + "learning_rate": 2.5801103156586536e-06, + "loss": 0.4536, + "step": 14849 + }, + { + "epoch": 2.32, + "grad_norm": 21.768953118210405, + "learning_rate": 2.5789798389097855e-06, + "loss": 0.4558, + "step": 14850 + }, + { + "epoch": 2.32, + "grad_norm": 14.603273176706368, + "learning_rate": 2.5778495732103914e-06, + "loss": 0.4389, + "step": 14851 + }, + { + "epoch": 2.32, + "grad_norm": 15.59337215787625, + "learning_rate": 2.5767195185926098e-06, + "loss": 0.4187, + "step": 14852 + }, + { + "epoch": 2.32, + "grad_norm": 18.49086633224654, + "learning_rate": 2.5755896750885834e-06, + "loss": 0.4934, + "step": 14853 + }, + { + "epoch": 2.32, + "grad_norm": 21.443562248250814, + "learning_rate": 2.5744600427304467e-06, + "loss": 0.4737, + "step": 14854 + }, + { + "epoch": 2.32, + "grad_norm": 27.265632918228267, + "learning_rate": 2.573330621550323e-06, + "loss": 0.4643, + "step": 14855 + }, + { + "epoch": 2.32, + "grad_norm": 18.102236767700052, + "learning_rate": 2.5722014115803284e-06, + "loss": 0.5043, + "step": 14856 + }, + { + "epoch": 2.32, + "grad_norm": 42.18891286850637, + "learning_rate": 2.57107241285258e-06, + "loss": 0.4472, + "step": 14857 + }, + { + "epoch": 2.32, + "grad_norm": 24.464202794195778, + "learning_rate": 2.569943625399187e-06, + "loss": 0.4397, + "step": 14858 + }, + { + "epoch": 2.32, + "grad_norm": 14.301314694360217, + "learning_rate": 2.5688150492522533e-06, + "loss": 0.4834, + "step": 14859 + }, + { + "epoch": 2.32, + "grad_norm": 38.41842983805012, + "learning_rate": 2.567686684443873e-06, + "loss": 0.5286, + "step": 14860 + }, + { + "epoch": 2.32, + "grad_norm": 21.73256821324706, + "learning_rate": 2.5665585310061327e-06, + "loss": 0.4968, + "step": 14861 + }, + { + "epoch": 2.32, + "grad_norm": 14.696909525666674, + "learning_rate": 2.5654305889711182e-06, + "loss": 0.3983, + "step": 14862 + }, + { + "epoch": 2.32, + "grad_norm": 18.193662637424165, + "learning_rate": 2.564302858370913e-06, + "loss": 0.5031, + "step": 14863 + }, + { + "epoch": 2.32, + "grad_norm": 23.51864185688541, + "learning_rate": 2.5631753392375846e-06, + "loss": 0.438, + "step": 14864 + }, + { + "epoch": 2.32, + "grad_norm": 15.578503771494018, + "learning_rate": 2.562048031603196e-06, + "loss": 0.4669, + "step": 14865 + }, + { + "epoch": 2.32, + "grad_norm": 24.248392692124835, + "learning_rate": 2.5609209354998097e-06, + "loss": 0.5014, + "step": 14866 + }, + { + "epoch": 2.32, + "grad_norm": 15.274470300706877, + "learning_rate": 2.5597940509594845e-06, + "loss": 0.4124, + "step": 14867 + }, + { + "epoch": 2.32, + "grad_norm": 17.404641410115246, + "learning_rate": 2.55866737801426e-06, + "loss": 0.4448, + "step": 14868 + }, + { + "epoch": 2.32, + "grad_norm": 20.988768344908845, + "learning_rate": 2.557540916696185e-06, + "loss": 0.4244, + "step": 14869 + }, + { + "epoch": 2.32, + "grad_norm": 19.524144897910464, + "learning_rate": 2.5564146670372904e-06, + "loss": 0.5063, + "step": 14870 + }, + { + "epoch": 2.32, + "grad_norm": 20.849123852546313, + "learning_rate": 2.5552886290696075e-06, + "loss": 0.491, + "step": 14871 + }, + { + "epoch": 2.32, + "grad_norm": 20.02197733938225, + "learning_rate": 2.554162802825165e-06, + "loss": 0.5005, + "step": 14872 + }, + { + "epoch": 2.32, + "grad_norm": 28.52587704033205, + "learning_rate": 2.553037188335973e-06, + "loss": 0.5299, + "step": 14873 + }, + { + "epoch": 2.32, + "grad_norm": 30.571850955114936, + "learning_rate": 2.5519117856340503e-06, + "loss": 0.444, + "step": 14874 + }, + { + "epoch": 2.32, + "grad_norm": 31.508207762774102, + "learning_rate": 2.5507865947513966e-06, + "loss": 0.5001, + "step": 14875 + }, + { + "epoch": 2.32, + "grad_norm": 26.745713434179905, + "learning_rate": 2.549661615720017e-06, + "loss": 0.4894, + "step": 14876 + }, + { + "epoch": 2.32, + "grad_norm": 20.509532882464033, + "learning_rate": 2.5485368485719e-06, + "loss": 0.4895, + "step": 14877 + }, + { + "epoch": 2.32, + "grad_norm": 27.204400296694757, + "learning_rate": 2.5474122933390355e-06, + "loss": 0.4808, + "step": 14878 + }, + { + "epoch": 2.32, + "grad_norm": 24.238772374540634, + "learning_rate": 2.54628795005341e-06, + "loss": 0.4826, + "step": 14879 + }, + { + "epoch": 2.32, + "grad_norm": 19.43901433442772, + "learning_rate": 2.545163818746994e-06, + "loss": 0.4471, + "step": 14880 + }, + { + "epoch": 2.32, + "grad_norm": 21.967828119451124, + "learning_rate": 2.5440398994517546e-06, + "loss": 0.4474, + "step": 14881 + }, + { + "epoch": 2.32, + "grad_norm": 23.352975465021924, + "learning_rate": 2.54291619219966e-06, + "loss": 0.4161, + "step": 14882 + }, + { + "epoch": 2.32, + "grad_norm": 20.75014815507415, + "learning_rate": 2.541792697022666e-06, + "loss": 0.508, + "step": 14883 + }, + { + "epoch": 2.32, + "grad_norm": 32.89221903569449, + "learning_rate": 2.540669413952728e-06, + "loss": 0.5474, + "step": 14884 + }, + { + "epoch": 2.33, + "grad_norm": 21.21871007766676, + "learning_rate": 2.5395463430217883e-06, + "loss": 0.4734, + "step": 14885 + }, + { + "epoch": 2.33, + "grad_norm": 15.473334047437193, + "learning_rate": 2.538423484261783e-06, + "loss": 0.4387, + "step": 14886 + }, + { + "epoch": 2.33, + "grad_norm": 23.590986072886295, + "learning_rate": 2.537300837704648e-06, + "loss": 0.4992, + "step": 14887 + }, + { + "epoch": 2.33, + "grad_norm": 15.750634755861585, + "learning_rate": 2.536178403382317e-06, + "loss": 0.4633, + "step": 14888 + }, + { + "epoch": 2.33, + "grad_norm": 16.826223110092187, + "learning_rate": 2.535056181326704e-06, + "loss": 0.4211, + "step": 14889 + }, + { + "epoch": 2.33, + "grad_norm": 25.123645260806665, + "learning_rate": 2.533934171569724e-06, + "loss": 0.4863, + "step": 14890 + }, + { + "epoch": 2.33, + "grad_norm": 19.663848545394245, + "learning_rate": 2.5328123741432887e-06, + "loss": 0.4774, + "step": 14891 + }, + { + "epoch": 2.33, + "grad_norm": 17.10033263763426, + "learning_rate": 2.5316907890793007e-06, + "loss": 0.4488, + "step": 14892 + }, + { + "epoch": 2.33, + "grad_norm": 24.353011283085245, + "learning_rate": 2.530569416409665e-06, + "loss": 0.43, + "step": 14893 + }, + { + "epoch": 2.33, + "grad_norm": 19.914013441645707, + "learning_rate": 2.529448256166259e-06, + "loss": 0.5523, + "step": 14894 + }, + { + "epoch": 2.33, + "grad_norm": 22.388623450954917, + "learning_rate": 2.5283273083809746e-06, + "loss": 0.4799, + "step": 14895 + }, + { + "epoch": 2.33, + "grad_norm": 28.360423670314166, + "learning_rate": 2.52720657308569e-06, + "loss": 0.5104, + "step": 14896 + }, + { + "epoch": 2.33, + "grad_norm": 21.648751782449107, + "learning_rate": 2.526086050312283e-06, + "loss": 0.4867, + "step": 14897 + }, + { + "epoch": 2.33, + "grad_norm": 23.202380727461488, + "learning_rate": 2.5249657400926165e-06, + "loss": 0.4579, + "step": 14898 + }, + { + "epoch": 2.33, + "grad_norm": 27.208175456570928, + "learning_rate": 2.5238456424585477e-06, + "loss": 0.5395, + "step": 14899 + }, + { + "epoch": 2.33, + "grad_norm": 18.22642357450763, + "learning_rate": 2.522725757441936e-06, + "loss": 0.523, + "step": 14900 + }, + { + "epoch": 2.33, + "grad_norm": 23.96946688690302, + "learning_rate": 2.5216060850746326e-06, + "loss": 0.4238, + "step": 14901 + }, + { + "epoch": 2.33, + "grad_norm": 24.70682862873449, + "learning_rate": 2.520486625388475e-06, + "loss": 0.5179, + "step": 14902 + }, + { + "epoch": 2.33, + "grad_norm": 21.26506501552932, + "learning_rate": 2.519367378415305e-06, + "loss": 0.4175, + "step": 14903 + }, + { + "epoch": 2.33, + "grad_norm": 28.265522268687448, + "learning_rate": 2.5182483441869477e-06, + "loss": 0.4958, + "step": 14904 + }, + { + "epoch": 2.33, + "grad_norm": 16.64060572681296, + "learning_rate": 2.5171295227352332e-06, + "loss": 0.4378, + "step": 14905 + }, + { + "epoch": 2.33, + "grad_norm": 16.50999705361037, + "learning_rate": 2.516010914091975e-06, + "loss": 0.5289, + "step": 14906 + }, + { + "epoch": 2.33, + "grad_norm": 27.060710645198675, + "learning_rate": 2.514892518288988e-06, + "loss": 0.5828, + "step": 14907 + }, + { + "epoch": 2.33, + "grad_norm": 18.108628868188458, + "learning_rate": 2.5137743353580833e-06, + "loss": 0.4716, + "step": 14908 + }, + { + "epoch": 2.33, + "grad_norm": 35.21383427481106, + "learning_rate": 2.512656365331053e-06, + "loss": 0.5287, + "step": 14909 + }, + { + "epoch": 2.33, + "grad_norm": 22.300545590803143, + "learning_rate": 2.5115386082396996e-06, + "loss": 0.4895, + "step": 14910 + }, + { + "epoch": 2.33, + "grad_norm": 18.891203170996736, + "learning_rate": 2.5104210641158045e-06, + "loss": 0.4978, + "step": 14911 + }, + { + "epoch": 2.33, + "grad_norm": 18.390122345186295, + "learning_rate": 2.509303732991153e-06, + "loss": 0.4928, + "step": 14912 + }, + { + "epoch": 2.33, + "grad_norm": 20.891904415135357, + "learning_rate": 2.508186614897524e-06, + "loss": 0.5075, + "step": 14913 + }, + { + "epoch": 2.33, + "grad_norm": 24.8286407349919, + "learning_rate": 2.507069709866685e-06, + "loss": 0.5637, + "step": 14914 + }, + { + "epoch": 2.33, + "grad_norm": 4.558267714111223, + "learning_rate": 2.5059530179303983e-06, + "loss": 0.509, + "step": 14915 + }, + { + "epoch": 2.33, + "grad_norm": 21.68190799579549, + "learning_rate": 2.5048365391204233e-06, + "loss": 0.4622, + "step": 14916 + }, + { + "epoch": 2.33, + "grad_norm": 14.198108666433214, + "learning_rate": 2.5037202734685117e-06, + "loss": 0.4392, + "step": 14917 + }, + { + "epoch": 2.33, + "grad_norm": 44.612409332816014, + "learning_rate": 2.502604221006417e-06, + "loss": 0.5159, + "step": 14918 + }, + { + "epoch": 2.33, + "grad_norm": 27.79677335389258, + "learning_rate": 2.5014883817658662e-06, + "loss": 0.4584, + "step": 14919 + }, + { + "epoch": 2.33, + "grad_norm": 21.223816181167074, + "learning_rate": 2.5003727557785997e-06, + "loss": 0.4899, + "step": 14920 + }, + { + "epoch": 2.33, + "grad_norm": 27.338412216169772, + "learning_rate": 2.4992573430763447e-06, + "loss": 0.4704, + "step": 14921 + }, + { + "epoch": 2.33, + "grad_norm": 19.266150324296394, + "learning_rate": 2.498142143690826e-06, + "loss": 0.4251, + "step": 14922 + }, + { + "epoch": 2.33, + "grad_norm": 26.36628844981907, + "learning_rate": 2.4970271576537563e-06, + "loss": 0.4463, + "step": 14923 + }, + { + "epoch": 2.33, + "grad_norm": 19.369843771302754, + "learning_rate": 2.4959123849968427e-06, + "loss": 0.4128, + "step": 14924 + }, + { + "epoch": 2.33, + "grad_norm": 15.515989629680922, + "learning_rate": 2.4947978257517903e-06, + "loss": 0.4476, + "step": 14925 + }, + { + "epoch": 2.33, + "grad_norm": 15.347493744395427, + "learning_rate": 2.4936834799503005e-06, + "loss": 0.4164, + "step": 14926 + }, + { + "epoch": 2.33, + "grad_norm": 17.77561540779018, + "learning_rate": 2.4925693476240587e-06, + "loss": 0.4677, + "step": 14927 + }, + { + "epoch": 2.33, + "grad_norm": 20.057707899789197, + "learning_rate": 2.4914554288047553e-06, + "loss": 0.5281, + "step": 14928 + }, + { + "epoch": 2.33, + "grad_norm": 19.505151410565166, + "learning_rate": 2.4903417235240647e-06, + "loss": 0.4587, + "step": 14929 + }, + { + "epoch": 2.33, + "grad_norm": 22.570211867494013, + "learning_rate": 2.489228231813665e-06, + "loss": 0.4689, + "step": 14930 + }, + { + "epoch": 2.33, + "grad_norm": 26.963006089739416, + "learning_rate": 2.4881149537052183e-06, + "loss": 0.4297, + "step": 14931 + }, + { + "epoch": 2.33, + "grad_norm": 21.576649079476116, + "learning_rate": 2.4870018892303883e-06, + "loss": 0.4819, + "step": 14932 + }, + { + "epoch": 2.33, + "grad_norm": 12.821338733951984, + "learning_rate": 2.485889038420831e-06, + "loss": 0.4319, + "step": 14933 + }, + { + "epoch": 2.33, + "grad_norm": 21.333859014522588, + "learning_rate": 2.4847764013081923e-06, + "loss": 0.4518, + "step": 14934 + }, + { + "epoch": 2.33, + "grad_norm": 17.750719861759315, + "learning_rate": 2.483663977924119e-06, + "loss": 0.5296, + "step": 14935 + }, + { + "epoch": 2.33, + "grad_norm": 19.035616928027732, + "learning_rate": 2.482551768300242e-06, + "loss": 0.4475, + "step": 14936 + }, + { + "epoch": 2.33, + "grad_norm": 18.39575879709027, + "learning_rate": 2.4814397724681995e-06, + "loss": 0.4389, + "step": 14937 + }, + { + "epoch": 2.33, + "grad_norm": 26.18367423355775, + "learning_rate": 2.4803279904596067e-06, + "loss": 0.5237, + "step": 14938 + }, + { + "epoch": 2.33, + "grad_norm": 21.494608101793883, + "learning_rate": 2.479216422306091e-06, + "loss": 0.3787, + "step": 14939 + }, + { + "epoch": 2.33, + "grad_norm": 18.0936613904269, + "learning_rate": 2.4781050680392572e-06, + "loss": 0.4652, + "step": 14940 + }, + { + "epoch": 2.33, + "grad_norm": 27.019365501805396, + "learning_rate": 2.4769939276907163e-06, + "loss": 0.4932, + "step": 14941 + }, + { + "epoch": 2.33, + "grad_norm": 24.607130837220044, + "learning_rate": 2.4758830012920688e-06, + "loss": 0.4356, + "step": 14942 + }, + { + "epoch": 2.33, + "grad_norm": 32.46969584848756, + "learning_rate": 2.4747722888749082e-06, + "loss": 0.5453, + "step": 14943 + }, + { + "epoch": 2.33, + "grad_norm": 24.45369661778227, + "learning_rate": 2.4736617904708184e-06, + "loss": 0.4634, + "step": 14944 + }, + { + "epoch": 2.33, + "grad_norm": 21.218689123468057, + "learning_rate": 2.4725515061113835e-06, + "loss": 0.5306, + "step": 14945 + }, + { + "epoch": 2.33, + "grad_norm": 27.013655443471304, + "learning_rate": 2.471441435828181e-06, + "loss": 0.4706, + "step": 14946 + }, + { + "epoch": 2.33, + "grad_norm": 20.41876768538191, + "learning_rate": 2.470331579652784e-06, + "loss": 0.5321, + "step": 14947 + }, + { + "epoch": 2.33, + "grad_norm": 24.032984111707147, + "learning_rate": 2.4692219376167504e-06, + "loss": 0.417, + "step": 14948 + }, + { + "epoch": 2.34, + "grad_norm": 15.417769252703222, + "learning_rate": 2.4681125097516376e-06, + "loss": 0.4101, + "step": 14949 + }, + { + "epoch": 2.34, + "grad_norm": 16.586297617976975, + "learning_rate": 2.467003296088999e-06, + "loss": 0.3846, + "step": 14950 + }, + { + "epoch": 2.34, + "grad_norm": 15.620353305817154, + "learning_rate": 2.465894296660383e-06, + "loss": 0.4324, + "step": 14951 + }, + { + "epoch": 2.34, + "grad_norm": 18.054366319771617, + "learning_rate": 2.464785511497325e-06, + "loss": 0.5084, + "step": 14952 + }, + { + "epoch": 2.34, + "grad_norm": 28.02748522360581, + "learning_rate": 2.4636769406313576e-06, + "loss": 0.4487, + "step": 14953 + }, + { + "epoch": 2.34, + "grad_norm": 15.005331870006021, + "learning_rate": 2.4625685840940083e-06, + "loss": 0.419, + "step": 14954 + }, + { + "epoch": 2.34, + "grad_norm": 28.732713900766228, + "learning_rate": 2.4614604419167997e-06, + "loss": 0.4639, + "step": 14955 + }, + { + "epoch": 2.34, + "grad_norm": 19.951818094790486, + "learning_rate": 2.460352514131249e-06, + "loss": 0.4381, + "step": 14956 + }, + { + "epoch": 2.34, + "grad_norm": 19.367471802114718, + "learning_rate": 2.459244800768862e-06, + "loss": 0.4936, + "step": 14957 + }, + { + "epoch": 2.34, + "grad_norm": 20.924971382003047, + "learning_rate": 2.458137301861139e-06, + "loss": 0.4543, + "step": 14958 + }, + { + "epoch": 2.34, + "grad_norm": 25.393652212000195, + "learning_rate": 2.457030017439579e-06, + "loss": 0.4744, + "step": 14959 + }, + { + "epoch": 2.34, + "grad_norm": 31.35668543375843, + "learning_rate": 2.455922947535675e-06, + "loss": 0.4553, + "step": 14960 + }, + { + "epoch": 2.34, + "grad_norm": 21.097606762702817, + "learning_rate": 2.4548160921809073e-06, + "loss": 0.5306, + "step": 14961 + }, + { + "epoch": 2.34, + "grad_norm": 19.973161160730598, + "learning_rate": 2.4537094514067596e-06, + "loss": 0.4066, + "step": 14962 + }, + { + "epoch": 2.34, + "grad_norm": 17.01353757137663, + "learning_rate": 2.4526030252446963e-06, + "loss": 0.4172, + "step": 14963 + }, + { + "epoch": 2.34, + "grad_norm": 22.944544113328586, + "learning_rate": 2.4514968137261918e-06, + "loss": 0.436, + "step": 14964 + }, + { + "epoch": 2.34, + "grad_norm": 29.676451297500044, + "learning_rate": 2.4503908168826985e-06, + "loss": 0.4781, + "step": 14965 + }, + { + "epoch": 2.34, + "grad_norm": 22.34743399293661, + "learning_rate": 2.449285034745674e-06, + "loss": 0.4258, + "step": 14966 + }, + { + "epoch": 2.34, + "grad_norm": 24.51332246192332, + "learning_rate": 2.4481794673465697e-06, + "loss": 0.4292, + "step": 14967 + }, + { + "epoch": 2.34, + "grad_norm": 22.55031654431513, + "learning_rate": 2.4470741147168242e-06, + "loss": 0.423, + "step": 14968 + }, + { + "epoch": 2.34, + "grad_norm": 24.123962968071744, + "learning_rate": 2.445968976887868e-06, + "loss": 0.4538, + "step": 14969 + }, + { + "epoch": 2.34, + "grad_norm": 25.20522426428361, + "learning_rate": 2.4448640538911363e-06, + "loss": 0.4326, + "step": 14970 + }, + { + "epoch": 2.34, + "grad_norm": 21.067320327973807, + "learning_rate": 2.443759345758051e-06, + "loss": 0.4336, + "step": 14971 + }, + { + "epoch": 2.34, + "grad_norm": 19.172972162745936, + "learning_rate": 2.442654852520032e-06, + "loss": 0.4923, + "step": 14972 + }, + { + "epoch": 2.34, + "grad_norm": 23.07625028037539, + "learning_rate": 2.441550574208489e-06, + "loss": 0.5377, + "step": 14973 + }, + { + "epoch": 2.34, + "grad_norm": 22.12302248948293, + "learning_rate": 2.4404465108548226e-06, + "loss": 0.534, + "step": 14974 + }, + { + "epoch": 2.34, + "grad_norm": 36.095100328031485, + "learning_rate": 2.4393426624904347e-06, + "loss": 0.5104, + "step": 14975 + }, + { + "epoch": 2.34, + "grad_norm": 19.292043007967393, + "learning_rate": 2.438239029146723e-06, + "loss": 0.4153, + "step": 14976 + }, + { + "epoch": 2.34, + "grad_norm": 31.048400311565103, + "learning_rate": 2.437135610855068e-06, + "loss": 0.4563, + "step": 14977 + }, + { + "epoch": 2.34, + "grad_norm": 16.961414223713774, + "learning_rate": 2.436032407646851e-06, + "loss": 0.4316, + "step": 14978 + }, + { + "epoch": 2.34, + "grad_norm": 17.57240716296963, + "learning_rate": 2.434929419553447e-06, + "loss": 0.4457, + "step": 14979 + }, + { + "epoch": 2.34, + "grad_norm": 23.265405732012503, + "learning_rate": 2.4338266466062242e-06, + "loss": 0.4394, + "step": 14980 + }, + { + "epoch": 2.34, + "grad_norm": 27.10760443124565, + "learning_rate": 2.432724088836549e-06, + "loss": 0.4543, + "step": 14981 + }, + { + "epoch": 2.34, + "grad_norm": 34.44885512425127, + "learning_rate": 2.431621746275773e-06, + "loss": 0.5066, + "step": 14982 + }, + { + "epoch": 2.34, + "grad_norm": 27.4942005230256, + "learning_rate": 2.430519618955245e-06, + "loss": 0.4659, + "step": 14983 + }, + { + "epoch": 2.34, + "grad_norm": 23.874659131397916, + "learning_rate": 2.429417706906311e-06, + "loss": 0.4105, + "step": 14984 + }, + { + "epoch": 2.34, + "grad_norm": 16.379084559060704, + "learning_rate": 2.428316010160312e-06, + "loss": 0.5344, + "step": 14985 + }, + { + "epoch": 2.34, + "grad_norm": 14.240202050230563, + "learning_rate": 2.427214528748576e-06, + "loss": 0.4811, + "step": 14986 + }, + { + "epoch": 2.34, + "grad_norm": 31.282736381387473, + "learning_rate": 2.4261132627024254e-06, + "loss": 0.497, + "step": 14987 + }, + { + "epoch": 2.34, + "grad_norm": 26.072178603805863, + "learning_rate": 2.425012212053184e-06, + "loss": 0.4797, + "step": 14988 + }, + { + "epoch": 2.34, + "grad_norm": 17.55136766776394, + "learning_rate": 2.4239113768321666e-06, + "loss": 0.4536, + "step": 14989 + }, + { + "epoch": 2.34, + "grad_norm": 25.996251690024376, + "learning_rate": 2.422810757070675e-06, + "loss": 0.4558, + "step": 14990 + }, + { + "epoch": 2.34, + "grad_norm": 15.55493883545863, + "learning_rate": 2.4217103528000153e-06, + "loss": 0.4331, + "step": 14991 + }, + { + "epoch": 2.34, + "grad_norm": 21.073009212313934, + "learning_rate": 2.4206101640514778e-06, + "loss": 0.4405, + "step": 14992 + }, + { + "epoch": 2.34, + "grad_norm": 23.764856347388857, + "learning_rate": 2.4195101908563556e-06, + "loss": 0.5174, + "step": 14993 + }, + { + "epoch": 2.34, + "grad_norm": 15.598587684706972, + "learning_rate": 2.4184104332459267e-06, + "loss": 0.4127, + "step": 14994 + }, + { + "epoch": 2.34, + "grad_norm": 16.26352661808254, + "learning_rate": 2.4173108912514696e-06, + "loss": 0.4606, + "step": 14995 + }, + { + "epoch": 2.34, + "grad_norm": 27.17008360660415, + "learning_rate": 2.4162115649042584e-06, + "loss": 0.516, + "step": 14996 + }, + { + "epoch": 2.34, + "grad_norm": 23.425129906671753, + "learning_rate": 2.415112454235551e-06, + "loss": 0.4293, + "step": 14997 + }, + { + "epoch": 2.34, + "grad_norm": 24.44199735875639, + "learning_rate": 2.41401355927661e-06, + "loss": 0.4917, + "step": 14998 + }, + { + "epoch": 2.34, + "grad_norm": 19.851185743651754, + "learning_rate": 2.4129148800586846e-06, + "loss": 0.4333, + "step": 14999 + }, + { + "epoch": 2.34, + "grad_norm": 22.86725538508925, + "learning_rate": 2.411816416613021e-06, + "loss": 0.4597, + "step": 15000 + }, + { + "epoch": 2.34, + "grad_norm": 40.98778962989567, + "learning_rate": 2.4107181689708614e-06, + "loss": 0.5131, + "step": 15001 + }, + { + "epoch": 2.34, + "grad_norm": 11.802075302152716, + "learning_rate": 2.4096201371634387e-06, + "loss": 0.4535, + "step": 15002 + }, + { + "epoch": 2.34, + "grad_norm": 17.372028739991396, + "learning_rate": 2.4085223212219767e-06, + "loss": 0.4252, + "step": 15003 + }, + { + "epoch": 2.34, + "grad_norm": 20.576019779724053, + "learning_rate": 2.407424721177698e-06, + "loss": 0.4688, + "step": 15004 + }, + { + "epoch": 2.34, + "grad_norm": 12.118607587257904, + "learning_rate": 2.406327337061818e-06, + "loss": 0.4039, + "step": 15005 + }, + { + "epoch": 2.34, + "grad_norm": 16.77158144537063, + "learning_rate": 2.4052301689055536e-06, + "loss": 0.5015, + "step": 15006 + }, + { + "epoch": 2.34, + "grad_norm": 24.119996615644077, + "learning_rate": 2.4041332167400944e-06, + "loss": 0.5551, + "step": 15007 + }, + { + "epoch": 2.34, + "grad_norm": 25.989182301652665, + "learning_rate": 2.4030364805966423e-06, + "loss": 0.4872, + "step": 15008 + }, + { + "epoch": 2.34, + "grad_norm": 20.823894124675242, + "learning_rate": 2.401939960506389e-06, + "loss": 0.4754, + "step": 15009 + }, + { + "epoch": 2.34, + "grad_norm": 14.71575180119783, + "learning_rate": 2.400843656500521e-06, + "loss": 0.4743, + "step": 15010 + }, + { + "epoch": 2.34, + "grad_norm": 16.538955506135007, + "learning_rate": 2.3997475686102147e-06, + "loss": 0.4732, + "step": 15011 + }, + { + "epoch": 2.34, + "grad_norm": 23.860486072159112, + "learning_rate": 2.3986516968666385e-06, + "loss": 0.4324, + "step": 15012 + }, + { + "epoch": 2.35, + "grad_norm": 18.621210674465352, + "learning_rate": 2.397556041300961e-06, + "loss": 0.4393, + "step": 15013 + }, + { + "epoch": 2.35, + "grad_norm": 16.63412330997982, + "learning_rate": 2.396460601944346e-06, + "loss": 0.414, + "step": 15014 + }, + { + "epoch": 2.35, + "grad_norm": 25.2569740656539, + "learning_rate": 2.3953653788279395e-06, + "loss": 0.5455, + "step": 15015 + }, + { + "epoch": 2.35, + "grad_norm": 27.54138978255306, + "learning_rate": 2.3942703719828965e-06, + "loss": 0.531, + "step": 15016 + }, + { + "epoch": 2.35, + "grad_norm": 22.553068131286206, + "learning_rate": 2.393175581440352e-06, + "loss": 0.4352, + "step": 15017 + }, + { + "epoch": 2.35, + "grad_norm": 24.916161770207733, + "learning_rate": 2.3920810072314473e-06, + "loss": 0.5124, + "step": 15018 + }, + { + "epoch": 2.35, + "grad_norm": 17.102864184970702, + "learning_rate": 2.3909866493873048e-06, + "loss": 0.4557, + "step": 15019 + }, + { + "epoch": 2.35, + "grad_norm": 12.91727923458385, + "learning_rate": 2.389892507939051e-06, + "loss": 0.4645, + "step": 15020 + }, + { + "epoch": 2.35, + "grad_norm": 16.530191500294986, + "learning_rate": 2.388798582917806e-06, + "loss": 0.4213, + "step": 15021 + }, + { + "epoch": 2.35, + "grad_norm": 17.047844349602368, + "learning_rate": 2.387704874354674e-06, + "loss": 0.4513, + "step": 15022 + }, + { + "epoch": 2.35, + "grad_norm": 23.088735549655407, + "learning_rate": 2.3866113822807645e-06, + "loss": 0.508, + "step": 15023 + }, + { + "epoch": 2.35, + "grad_norm": 22.924472581369244, + "learning_rate": 2.3855181067271703e-06, + "loss": 0.4427, + "step": 15024 + }, + { + "epoch": 2.35, + "grad_norm": 16.682589026198844, + "learning_rate": 2.3844250477249877e-06, + "loss": 0.4234, + "step": 15025 + }, + { + "epoch": 2.35, + "grad_norm": 30.908038435221503, + "learning_rate": 2.3833322053053045e-06, + "loss": 0.4948, + "step": 15026 + }, + { + "epoch": 2.35, + "grad_norm": 18.868917020265116, + "learning_rate": 2.382239579499198e-06, + "loss": 0.4493, + "step": 15027 + }, + { + "epoch": 2.35, + "grad_norm": 25.83841338404869, + "learning_rate": 2.3811471703377376e-06, + "loss": 0.4761, + "step": 15028 + }, + { + "epoch": 2.35, + "grad_norm": 22.192612713754983, + "learning_rate": 2.3800549778519956e-06, + "loss": 0.4263, + "step": 15029 + }, + { + "epoch": 2.35, + "grad_norm": 18.85421361513422, + "learning_rate": 2.3789630020730327e-06, + "loss": 0.5324, + "step": 15030 + }, + { + "epoch": 2.35, + "grad_norm": 18.814362989740594, + "learning_rate": 2.3778712430319095e-06, + "loss": 0.419, + "step": 15031 + }, + { + "epoch": 2.35, + "grad_norm": 24.775991052577602, + "learning_rate": 2.376779700759664e-06, + "loss": 0.4638, + "step": 15032 + }, + { + "epoch": 2.35, + "grad_norm": 26.492217591205897, + "learning_rate": 2.3756883752873426e-06, + "loss": 0.4974, + "step": 15033 + }, + { + "epoch": 2.35, + "grad_norm": 17.598084423178136, + "learning_rate": 2.3745972666459847e-06, + "loss": 0.4467, + "step": 15034 + }, + { + "epoch": 2.35, + "grad_norm": 19.346625671265098, + "learning_rate": 2.373506374866623e-06, + "loss": 0.4888, + "step": 15035 + }, + { + "epoch": 2.35, + "grad_norm": 17.6641484038716, + "learning_rate": 2.3724156999802795e-06, + "loss": 0.4539, + "step": 15036 + }, + { + "epoch": 2.35, + "grad_norm": 16.10505575538282, + "learning_rate": 2.3713252420179668e-06, + "loss": 0.4668, + "step": 15037 + }, + { + "epoch": 2.35, + "grad_norm": 14.419118771799873, + "learning_rate": 2.3702350010107023e-06, + "loss": 0.4351, + "step": 15038 + }, + { + "epoch": 2.35, + "grad_norm": 17.85497914012074, + "learning_rate": 2.3691449769894946e-06, + "loss": 0.4623, + "step": 15039 + }, + { + "epoch": 2.35, + "grad_norm": 19.339209468551555, + "learning_rate": 2.368055169985339e-06, + "loss": 0.41, + "step": 15040 + }, + { + "epoch": 2.35, + "grad_norm": 18.87039009224215, + "learning_rate": 2.366965580029227e-06, + "loss": 0.4823, + "step": 15041 + }, + { + "epoch": 2.35, + "grad_norm": 22.988464601056158, + "learning_rate": 2.365876207152149e-06, + "loss": 0.4696, + "step": 15042 + }, + { + "epoch": 2.35, + "grad_norm": 14.337716538432867, + "learning_rate": 2.3647870513850857e-06, + "loss": 0.4161, + "step": 15043 + }, + { + "epoch": 2.35, + "grad_norm": 20.433713748895297, + "learning_rate": 2.3636981127590155e-06, + "loss": 0.4285, + "step": 15044 + }, + { + "epoch": 2.35, + "grad_norm": 15.199931924794557, + "learning_rate": 2.362609391304903e-06, + "loss": 0.419, + "step": 15045 + }, + { + "epoch": 2.35, + "grad_norm": 29.65361755865329, + "learning_rate": 2.361520887053709e-06, + "loss": 0.4675, + "step": 15046 + }, + { + "epoch": 2.35, + "grad_norm": 33.776858864836804, + "learning_rate": 2.360432600036393e-06, + "loss": 0.4308, + "step": 15047 + }, + { + "epoch": 2.35, + "grad_norm": 32.109737641080365, + "learning_rate": 2.359344530283908e-06, + "loss": 0.6344, + "step": 15048 + }, + { + "epoch": 2.35, + "grad_norm": 19.87092471627326, + "learning_rate": 2.358256677827191e-06, + "loss": 0.3783, + "step": 15049 + }, + { + "epoch": 2.35, + "grad_norm": 25.31178849185736, + "learning_rate": 2.357169042697187e-06, + "loss": 0.5111, + "step": 15050 + }, + { + "epoch": 2.35, + "grad_norm": 21.185630606155875, + "learning_rate": 2.356081624924822e-06, + "loss": 0.423, + "step": 15051 + }, + { + "epoch": 2.35, + "grad_norm": 15.59275958254778, + "learning_rate": 2.354994424541027e-06, + "loss": 0.4502, + "step": 15052 + }, + { + "epoch": 2.35, + "grad_norm": 26.230387707866985, + "learning_rate": 2.353907441576715e-06, + "loss": 0.4714, + "step": 15053 + }, + { + "epoch": 2.35, + "grad_norm": 23.36885248844274, + "learning_rate": 2.3528206760628026e-06, + "loss": 0.5002, + "step": 15054 + }, + { + "epoch": 2.35, + "grad_norm": 14.731884886518342, + "learning_rate": 2.3517341280301997e-06, + "loss": 0.5039, + "step": 15055 + }, + { + "epoch": 2.35, + "grad_norm": 20.666482198200214, + "learning_rate": 2.350647797509804e-06, + "loss": 0.5093, + "step": 15056 + }, + { + "epoch": 2.35, + "grad_norm": 22.761040849150536, + "learning_rate": 2.3495616845325074e-06, + "loss": 0.4546, + "step": 15057 + }, + { + "epoch": 2.35, + "grad_norm": 15.422941218373836, + "learning_rate": 2.348475789129202e-06, + "loss": 0.4277, + "step": 15058 + }, + { + "epoch": 2.35, + "grad_norm": 20.727040200133143, + "learning_rate": 2.3473901113307685e-06, + "loss": 0.4747, + "step": 15059 + }, + { + "epoch": 2.35, + "grad_norm": 18.24599733125058, + "learning_rate": 2.3463046511680864e-06, + "loss": 0.4769, + "step": 15060 + }, + { + "epoch": 2.35, + "grad_norm": 20.143046850149158, + "learning_rate": 2.3452194086720225e-06, + "loss": 0.462, + "step": 15061 + }, + { + "epoch": 2.35, + "grad_norm": 17.73811209328546, + "learning_rate": 2.3441343838734375e-06, + "loss": 0.4421, + "step": 15062 + }, + { + "epoch": 2.35, + "grad_norm": 19.13671426798326, + "learning_rate": 2.3430495768031923e-06, + "loss": 0.4325, + "step": 15063 + }, + { + "epoch": 2.35, + "grad_norm": 19.814185855241742, + "learning_rate": 2.3419649874921413e-06, + "loss": 0.4137, + "step": 15064 + }, + { + "epoch": 2.35, + "grad_norm": 15.830912419901717, + "learning_rate": 2.3408806159711262e-06, + "loss": 0.3923, + "step": 15065 + }, + { + "epoch": 2.35, + "grad_norm": 27.002734095504596, + "learning_rate": 2.3397964622709825e-06, + "loss": 0.5272, + "step": 15066 + }, + { + "epoch": 2.35, + "grad_norm": 15.237510153146669, + "learning_rate": 2.3387125264225475e-06, + "loss": 0.4649, + "step": 15067 + }, + { + "epoch": 2.35, + "grad_norm": 21.016762106384046, + "learning_rate": 2.3376288084566445e-06, + "loss": 0.4739, + "step": 15068 + }, + { + "epoch": 2.35, + "grad_norm": 24.655839518356228, + "learning_rate": 2.3365453084041e-06, + "loss": 0.5133, + "step": 15069 + }, + { + "epoch": 2.35, + "grad_norm": 23.933735255113675, + "learning_rate": 2.3354620262957238e-06, + "loss": 0.468, + "step": 15070 + }, + { + "epoch": 2.35, + "grad_norm": 25.10690323699414, + "learning_rate": 2.3343789621623213e-06, + "loss": 0.4302, + "step": 15071 + }, + { + "epoch": 2.35, + "grad_norm": 27.086627692293746, + "learning_rate": 2.3332961160346966e-06, + "loss": 0.4648, + "step": 15072 + }, + { + "epoch": 2.35, + "grad_norm": 25.18997946904562, + "learning_rate": 2.3322134879436487e-06, + "loss": 0.4564, + "step": 15073 + }, + { + "epoch": 2.35, + "grad_norm": 23.97426075171063, + "learning_rate": 2.3311310779199603e-06, + "loss": 0.4732, + "step": 15074 + }, + { + "epoch": 2.35, + "grad_norm": 18.09005984624344, + "learning_rate": 2.3300488859944217e-06, + "loss": 0.4992, + "step": 15075 + }, + { + "epoch": 2.35, + "grad_norm": 32.03438821631756, + "learning_rate": 2.328966912197802e-06, + "loss": 0.5262, + "step": 15076 + }, + { + "epoch": 2.36, + "grad_norm": 21.092950243447078, + "learning_rate": 2.3278851565608782e-06, + "loss": 0.4489, + "step": 15077 + }, + { + "epoch": 2.36, + "grad_norm": 18.48221056060619, + "learning_rate": 2.3268036191144117e-06, + "loss": 0.4537, + "step": 15078 + }, + { + "epoch": 2.36, + "grad_norm": 22.29270156384741, + "learning_rate": 2.3257222998891603e-06, + "loss": 0.4318, + "step": 15079 + }, + { + "epoch": 2.36, + "grad_norm": 28.135608742821066, + "learning_rate": 2.32464119891588e-06, + "loss": 0.5001, + "step": 15080 + }, + { + "epoch": 2.36, + "grad_norm": 14.235043348267983, + "learning_rate": 2.323560316225314e-06, + "loss": 0.4727, + "step": 15081 + }, + { + "epoch": 2.36, + "grad_norm": 16.84446794752678, + "learning_rate": 2.3224796518481995e-06, + "loss": 0.4851, + "step": 15082 + }, + { + "epoch": 2.36, + "grad_norm": 20.97587809733731, + "learning_rate": 2.3213992058152733e-06, + "loss": 0.5008, + "step": 15083 + }, + { + "epoch": 2.36, + "grad_norm": 26.42728099584931, + "learning_rate": 2.320318978157263e-06, + "loss": 0.514, + "step": 15084 + }, + { + "epoch": 2.36, + "grad_norm": 34.64607227686141, + "learning_rate": 2.319238968904888e-06, + "loss": 0.4717, + "step": 15085 + }, + { + "epoch": 2.36, + "grad_norm": 22.462435583824682, + "learning_rate": 2.318159178088865e-06, + "loss": 0.4446, + "step": 15086 + }, + { + "epoch": 2.36, + "grad_norm": 27.888941684649076, + "learning_rate": 2.317079605739898e-06, + "loss": 0.4867, + "step": 15087 + }, + { + "epoch": 2.36, + "grad_norm": 20.10364849224933, + "learning_rate": 2.3160002518886927e-06, + "loss": 0.498, + "step": 15088 + }, + { + "epoch": 2.36, + "grad_norm": 23.953757768548204, + "learning_rate": 2.314921116565948e-06, + "loss": 0.4684, + "step": 15089 + }, + { + "epoch": 2.36, + "grad_norm": 23.420475021522208, + "learning_rate": 2.313842199802352e-06, + "loss": 0.4878, + "step": 15090 + }, + { + "epoch": 2.36, + "grad_norm": 24.036588174673145, + "learning_rate": 2.312763501628584e-06, + "loss": 0.5103, + "step": 15091 + }, + { + "epoch": 2.36, + "grad_norm": 27.239336427453367, + "learning_rate": 2.3116850220753253e-06, + "loss": 0.4904, + "step": 15092 + }, + { + "epoch": 2.36, + "grad_norm": 24.92729935643449, + "learning_rate": 2.3106067611732473e-06, + "loss": 0.4889, + "step": 15093 + }, + { + "epoch": 2.36, + "grad_norm": 22.614072179281564, + "learning_rate": 2.3095287189530203e-06, + "loss": 0.5348, + "step": 15094 + }, + { + "epoch": 2.36, + "grad_norm": 26.012088577086438, + "learning_rate": 2.3084508954452923e-06, + "loss": 0.4741, + "step": 15095 + }, + { + "epoch": 2.36, + "grad_norm": 23.222409866091148, + "learning_rate": 2.307373290680721e-06, + "loss": 0.424, + "step": 15096 + }, + { + "epoch": 2.36, + "grad_norm": 36.56650173711479, + "learning_rate": 2.3062959046899535e-06, + "loss": 0.5467, + "step": 15097 + }, + { + "epoch": 2.36, + "grad_norm": 16.061730655844066, + "learning_rate": 2.3052187375036327e-06, + "loss": 0.4801, + "step": 15098 + }, + { + "epoch": 2.36, + "grad_norm": 27.289106672585966, + "learning_rate": 2.3041417891523897e-06, + "loss": 0.4666, + "step": 15099 + }, + { + "epoch": 2.36, + "grad_norm": 22.583160254711412, + "learning_rate": 2.303065059666849e-06, + "loss": 0.4162, + "step": 15100 + }, + { + "epoch": 2.36, + "grad_norm": 20.483103717779745, + "learning_rate": 2.301988549077636e-06, + "loss": 0.4439, + "step": 15101 + }, + { + "epoch": 2.36, + "grad_norm": 22.947887461905324, + "learning_rate": 2.3009122574153673e-06, + "loss": 0.4959, + "step": 15102 + }, + { + "epoch": 2.36, + "grad_norm": 24.980107709409623, + "learning_rate": 2.2998361847106487e-06, + "loss": 0.45, + "step": 15103 + }, + { + "epoch": 2.36, + "grad_norm": 16.862130669069074, + "learning_rate": 2.2987603309940866e-06, + "loss": 0.4614, + "step": 15104 + }, + { + "epoch": 2.36, + "grad_norm": 17.08831933421522, + "learning_rate": 2.297684696296273e-06, + "loss": 0.4327, + "step": 15105 + }, + { + "epoch": 2.36, + "grad_norm": 15.927605998576137, + "learning_rate": 2.2966092806478e-06, + "loss": 0.4253, + "step": 15106 + }, + { + "epoch": 2.36, + "grad_norm": 19.731646935165134, + "learning_rate": 2.2955340840792563e-06, + "loss": 0.4872, + "step": 15107 + }, + { + "epoch": 2.36, + "grad_norm": 23.502861993208516, + "learning_rate": 2.294459106621214e-06, + "loss": 0.505, + "step": 15108 + }, + { + "epoch": 2.36, + "grad_norm": 27.69800669017182, + "learning_rate": 2.2933843483042495e-06, + "loss": 0.5039, + "step": 15109 + }, + { + "epoch": 2.36, + "grad_norm": 19.162776566417605, + "learning_rate": 2.2923098091589224e-06, + "loss": 0.4361, + "step": 15110 + }, + { + "epoch": 2.36, + "grad_norm": 23.103642994710736, + "learning_rate": 2.2912354892157995e-06, + "loss": 0.4665, + "step": 15111 + }, + { + "epoch": 2.36, + "grad_norm": 24.710251316525817, + "learning_rate": 2.2901613885054252e-06, + "loss": 0.4314, + "step": 15112 + }, + { + "epoch": 2.36, + "grad_norm": 18.402614824715194, + "learning_rate": 2.289087507058353e-06, + "loss": 0.4764, + "step": 15113 + }, + { + "epoch": 2.36, + "grad_norm": 21.036047549123868, + "learning_rate": 2.2880138449051227e-06, + "loss": 0.4791, + "step": 15114 + }, + { + "epoch": 2.36, + "grad_norm": 15.06097541706953, + "learning_rate": 2.2869404020762676e-06, + "loss": 0.4918, + "step": 15115 + }, + { + "epoch": 2.36, + "grad_norm": 20.834690349172593, + "learning_rate": 2.285867178602312e-06, + "loss": 0.4796, + "step": 15116 + }, + { + "epoch": 2.36, + "grad_norm": 22.41086027666266, + "learning_rate": 2.2847941745137826e-06, + "loss": 0.4671, + "step": 15117 + }, + { + "epoch": 2.36, + "grad_norm": 22.05177698987236, + "learning_rate": 2.2837213898411925e-06, + "loss": 0.3635, + "step": 15118 + }, + { + "epoch": 2.36, + "grad_norm": 19.04124619855663, + "learning_rate": 2.282648824615059e-06, + "loss": 0.5058, + "step": 15119 + }, + { + "epoch": 2.36, + "grad_norm": 27.816393747904115, + "learning_rate": 2.281576478865872e-06, + "loss": 0.4795, + "step": 15120 + }, + { + "epoch": 2.36, + "grad_norm": 21.70489589203088, + "learning_rate": 2.2805043526241342e-06, + "loss": 0.485, + "step": 15121 + }, + { + "epoch": 2.36, + "grad_norm": 28.742116212043584, + "learning_rate": 2.2794324459203377e-06, + "loss": 0.4964, + "step": 15122 + }, + { + "epoch": 2.36, + "grad_norm": 17.5325057086598, + "learning_rate": 2.278360758784969e-06, + "loss": 0.4472, + "step": 15123 + }, + { + "epoch": 2.36, + "grad_norm": 23.80214907767873, + "learning_rate": 2.277289291248502e-06, + "loss": 0.4132, + "step": 15124 + }, + { + "epoch": 2.36, + "grad_norm": 22.10269729138953, + "learning_rate": 2.2762180433414083e-06, + "loss": 0.4791, + "step": 15125 + }, + { + "epoch": 2.36, + "grad_norm": 20.031731506011766, + "learning_rate": 2.2751470150941558e-06, + "loss": 0.5251, + "step": 15126 + }, + { + "epoch": 2.36, + "grad_norm": 26.074122114823965, + "learning_rate": 2.2740762065372046e-06, + "loss": 0.3994, + "step": 15127 + }, + { + "epoch": 2.36, + "grad_norm": 18.40802495369327, + "learning_rate": 2.2730056177010075e-06, + "loss": 0.4494, + "step": 15128 + }, + { + "epoch": 2.36, + "grad_norm": 24.072359591101424, + "learning_rate": 2.2719352486160074e-06, + "loss": 0.468, + "step": 15129 + }, + { + "epoch": 2.36, + "grad_norm": 9.795083219011353, + "learning_rate": 2.270865099312648e-06, + "loss": 0.3959, + "step": 15130 + }, + { + "epoch": 2.36, + "grad_norm": 34.55590289826952, + "learning_rate": 2.2697951698213628e-06, + "loss": 0.4944, + "step": 15131 + }, + { + "epoch": 2.36, + "grad_norm": 23.966692484312077, + "learning_rate": 2.268725460172585e-06, + "loss": 0.4744, + "step": 15132 + }, + { + "epoch": 2.36, + "grad_norm": 17.845703329157487, + "learning_rate": 2.2676559703967317e-06, + "loss": 0.5224, + "step": 15133 + }, + { + "epoch": 2.36, + "grad_norm": 25.79507267395847, + "learning_rate": 2.266586700524217e-06, + "loss": 0.4531, + "step": 15134 + }, + { + "epoch": 2.36, + "grad_norm": 18.88192124289535, + "learning_rate": 2.2655176505854527e-06, + "loss": 0.5386, + "step": 15135 + }, + { + "epoch": 2.36, + "grad_norm": 26.416832025854777, + "learning_rate": 2.264448820610844e-06, + "loss": 0.4835, + "step": 15136 + }, + { + "epoch": 2.36, + "grad_norm": 27.42077197222985, + "learning_rate": 2.2633802106307835e-06, + "loss": 0.4408, + "step": 15137 + }, + { + "epoch": 2.36, + "grad_norm": 23.94313458717503, + "learning_rate": 2.2623118206756668e-06, + "loss": 0.4415, + "step": 15138 + }, + { + "epoch": 2.36, + "grad_norm": 16.750516646427478, + "learning_rate": 2.2612436507758726e-06, + "loss": 0.4662, + "step": 15139 + }, + { + "epoch": 2.36, + "grad_norm": 19.288046312969854, + "learning_rate": 2.260175700961785e-06, + "loss": 0.4662, + "step": 15140 + }, + { + "epoch": 2.37, + "grad_norm": 17.896086577545045, + "learning_rate": 2.2591079712637698e-06, + "loss": 0.4487, + "step": 15141 + }, + { + "epoch": 2.37, + "grad_norm": 20.10662859500066, + "learning_rate": 2.2580404617121954e-06, + "loss": 0.4259, + "step": 15142 + }, + { + "epoch": 2.37, + "grad_norm": 25.261615140085144, + "learning_rate": 2.2569731723374243e-06, + "loss": 0.4717, + "step": 15143 + }, + { + "epoch": 2.37, + "grad_norm": 26.266033891062857, + "learning_rate": 2.255906103169806e-06, + "loss": 0.4583, + "step": 15144 + }, + { + "epoch": 2.37, + "grad_norm": 22.01899804807438, + "learning_rate": 2.2548392542396856e-06, + "loss": 0.5174, + "step": 15145 + }, + { + "epoch": 2.37, + "grad_norm": 25.078344651921057, + "learning_rate": 2.2537726255774063e-06, + "loss": 0.5355, + "step": 15146 + }, + { + "epoch": 2.37, + "grad_norm": 19.49513815217672, + "learning_rate": 2.252706217213302e-06, + "loss": 0.4529, + "step": 15147 + }, + { + "epoch": 2.37, + "grad_norm": 19.20640744803605, + "learning_rate": 2.251640029177704e-06, + "loss": 0.4513, + "step": 15148 + }, + { + "epoch": 2.37, + "grad_norm": 22.20882558426196, + "learning_rate": 2.250574061500931e-06, + "loss": 0.4744, + "step": 15149 + }, + { + "epoch": 2.37, + "grad_norm": 18.609538546018904, + "learning_rate": 2.2495083142132944e-06, + "loss": 0.4265, + "step": 15150 + }, + { + "epoch": 2.37, + "grad_norm": 26.405454795195126, + "learning_rate": 2.2484427873451086e-06, + "loss": 0.475, + "step": 15151 + }, + { + "epoch": 2.37, + "grad_norm": 19.365715220181023, + "learning_rate": 2.247377480926678e-06, + "loss": 0.4119, + "step": 15152 + }, + { + "epoch": 2.37, + "grad_norm": 11.081622743119825, + "learning_rate": 2.246312394988296e-06, + "loss": 0.4392, + "step": 15153 + }, + { + "epoch": 2.37, + "grad_norm": 20.932720703552434, + "learning_rate": 2.2452475295602518e-06, + "loss": 0.4564, + "step": 15154 + }, + { + "epoch": 2.37, + "grad_norm": 18.11219220962849, + "learning_rate": 2.2441828846728307e-06, + "loss": 0.4462, + "step": 15155 + }, + { + "epoch": 2.37, + "grad_norm": 21.680290516208032, + "learning_rate": 2.243118460356312e-06, + "loss": 0.4261, + "step": 15156 + }, + { + "epoch": 2.37, + "grad_norm": 15.008526355659011, + "learning_rate": 2.2420542566409686e-06, + "loss": 0.4033, + "step": 15157 + }, + { + "epoch": 2.37, + "grad_norm": 17.946891196248757, + "learning_rate": 2.2409902735570643e-06, + "loss": 0.4615, + "step": 15158 + }, + { + "epoch": 2.37, + "grad_norm": 15.7169714617309, + "learning_rate": 2.2399265111348558e-06, + "loss": 0.5126, + "step": 15159 + }, + { + "epoch": 2.37, + "grad_norm": 24.489491198999364, + "learning_rate": 2.238862969404596e-06, + "loss": 0.4774, + "step": 15160 + }, + { + "epoch": 2.37, + "grad_norm": 21.558503354134128, + "learning_rate": 2.2377996483965368e-06, + "loss": 0.4408, + "step": 15161 + }, + { + "epoch": 2.37, + "grad_norm": 24.781019896440743, + "learning_rate": 2.236736548140913e-06, + "loss": 0.5461, + "step": 15162 + }, + { + "epoch": 2.37, + "grad_norm": 19.67023745713467, + "learning_rate": 2.2356736686679624e-06, + "loss": 0.504, + "step": 15163 + }, + { + "epoch": 2.37, + "grad_norm": 17.553010393576393, + "learning_rate": 2.2346110100079076e-06, + "loss": 0.4241, + "step": 15164 + }, + { + "epoch": 2.37, + "grad_norm": 14.042129224106823, + "learning_rate": 2.2335485721909766e-06, + "loss": 0.4849, + "step": 15165 + }, + { + "epoch": 2.37, + "grad_norm": 19.516321256112047, + "learning_rate": 2.2324863552473776e-06, + "loss": 0.4786, + "step": 15166 + }, + { + "epoch": 2.37, + "grad_norm": 30.363196988407843, + "learning_rate": 2.2314243592073226e-06, + "loss": 0.4663, + "step": 15167 + }, + { + "epoch": 2.37, + "grad_norm": 14.034170561528624, + "learning_rate": 2.230362584101018e-06, + "loss": 0.4426, + "step": 15168 + }, + { + "epoch": 2.37, + "grad_norm": 17.68042147095207, + "learning_rate": 2.229301029958656e-06, + "loss": 0.4172, + "step": 15169 + }, + { + "epoch": 2.37, + "grad_norm": 17.598652276949753, + "learning_rate": 2.2282396968104235e-06, + "loss": 0.4323, + "step": 15170 + }, + { + "epoch": 2.37, + "grad_norm": 16.253130448943175, + "learning_rate": 2.227178584686509e-06, + "loss": 0.4624, + "step": 15171 + }, + { + "epoch": 2.37, + "grad_norm": 28.68904261295678, + "learning_rate": 2.226117693617088e-06, + "loss": 0.4532, + "step": 15172 + }, + { + "epoch": 2.37, + "grad_norm": 18.15407636569071, + "learning_rate": 2.2250570236323344e-06, + "loss": 0.4531, + "step": 15173 + }, + { + "epoch": 2.37, + "grad_norm": 17.526510357836578, + "learning_rate": 2.2239965747624117e-06, + "loss": 0.4421, + "step": 15174 + }, + { + "epoch": 2.37, + "grad_norm": 22.863900434829066, + "learning_rate": 2.222936347037474e-06, + "loss": 0.5133, + "step": 15175 + }, + { + "epoch": 2.37, + "grad_norm": 37.661697840448895, + "learning_rate": 2.2218763404876775e-06, + "loss": 0.5123, + "step": 15176 + }, + { + "epoch": 2.37, + "grad_norm": 35.003177265298156, + "learning_rate": 2.2208165551431706e-06, + "loss": 0.4869, + "step": 15177 + }, + { + "epoch": 2.37, + "grad_norm": 19.896395629073762, + "learning_rate": 2.21975699103409e-06, + "loss": 0.4571, + "step": 15178 + }, + { + "epoch": 2.37, + "grad_norm": 20.09295527842895, + "learning_rate": 2.2186976481905663e-06, + "loss": 0.4676, + "step": 15179 + }, + { + "epoch": 2.37, + "grad_norm": 33.75960256301567, + "learning_rate": 2.2176385266427302e-06, + "loss": 0.5048, + "step": 15180 + }, + { + "epoch": 2.37, + "grad_norm": 23.864206347047276, + "learning_rate": 2.216579626420702e-06, + "loss": 0.4616, + "step": 15181 + }, + { + "epoch": 2.37, + "grad_norm": 17.9554156526153, + "learning_rate": 2.2155209475546013e-06, + "loss": 0.4541, + "step": 15182 + }, + { + "epoch": 2.37, + "grad_norm": 27.950230723146994, + "learning_rate": 2.2144624900745247e-06, + "loss": 0.4778, + "step": 15183 + }, + { + "epoch": 2.37, + "grad_norm": 21.409156613119322, + "learning_rate": 2.2134042540105814e-06, + "loss": 0.4772, + "step": 15184 + }, + { + "epoch": 2.37, + "grad_norm": 23.49391084901629, + "learning_rate": 2.2123462393928663e-06, + "loss": 0.4264, + "step": 15185 + }, + { + "epoch": 2.37, + "grad_norm": 17.893617807793394, + "learning_rate": 2.2112884462514707e-06, + "loss": 0.4835, + "step": 15186 + }, + { + "epoch": 2.37, + "grad_norm": 21.10285137495659, + "learning_rate": 2.210230874616475e-06, + "loss": 0.4756, + "step": 15187 + }, + { + "epoch": 2.37, + "grad_norm": 20.442786691519522, + "learning_rate": 2.2091735245179548e-06, + "loss": 0.4194, + "step": 15188 + }, + { + "epoch": 2.37, + "grad_norm": 25.597548588922628, + "learning_rate": 2.208116395985981e-06, + "loss": 0.4776, + "step": 15189 + }, + { + "epoch": 2.37, + "grad_norm": 23.568131821060923, + "learning_rate": 2.2070594890506216e-06, + "loss": 0.4597, + "step": 15190 + }, + { + "epoch": 2.37, + "grad_norm": 31.14080444529234, + "learning_rate": 2.206002803741929e-06, + "loss": 0.4679, + "step": 15191 + }, + { + "epoch": 2.37, + "grad_norm": 23.002903793213502, + "learning_rate": 2.2049463400899606e-06, + "loss": 0.4415, + "step": 15192 + }, + { + "epoch": 2.37, + "grad_norm": 21.99219630971295, + "learning_rate": 2.2038900981247545e-06, + "loss": 0.455, + "step": 15193 + }, + { + "epoch": 2.37, + "grad_norm": 15.539661793398427, + "learning_rate": 2.2028340778763544e-06, + "loss": 0.4065, + "step": 15194 + }, + { + "epoch": 2.37, + "grad_norm": 20.403671257978583, + "learning_rate": 2.201778279374794e-06, + "loss": 0.5221, + "step": 15195 + }, + { + "epoch": 2.37, + "grad_norm": 20.47130216310369, + "learning_rate": 2.2007227026500956e-06, + "loss": 0.4356, + "step": 15196 + }, + { + "epoch": 2.37, + "grad_norm": 23.045397497759183, + "learning_rate": 2.199667347732284e-06, + "loss": 0.5095, + "step": 15197 + }, + { + "epoch": 2.37, + "grad_norm": 40.87522694578659, + "learning_rate": 2.1986122146513654e-06, + "loss": 0.4589, + "step": 15198 + }, + { + "epoch": 2.37, + "grad_norm": 21.537142542464714, + "learning_rate": 2.1975573034373563e-06, + "loss": 0.4946, + "step": 15199 + }, + { + "epoch": 2.37, + "grad_norm": 14.27352048351801, + "learning_rate": 2.1965026141202495e-06, + "loss": 0.4413, + "step": 15200 + }, + { + "epoch": 2.37, + "grad_norm": 21.423561029841085, + "learning_rate": 2.195448146730044e-06, + "loss": 0.4912, + "step": 15201 + }, + { + "epoch": 2.37, + "grad_norm": 26.706838841712543, + "learning_rate": 2.1943939012967295e-06, + "loss": 0.5352, + "step": 15202 + }, + { + "epoch": 2.37, + "grad_norm": 18.325223293023893, + "learning_rate": 2.193339877850288e-06, + "loss": 0.4296, + "step": 15203 + }, + { + "epoch": 2.37, + "grad_norm": 30.31120406408753, + "learning_rate": 2.19228607642069e-06, + "loss": 0.4846, + "step": 15204 + }, + { + "epoch": 2.38, + "grad_norm": 22.647088281393838, + "learning_rate": 2.1912324970379084e-06, + "loss": 0.4687, + "step": 15205 + }, + { + "epoch": 2.38, + "grad_norm": 21.04969849370558, + "learning_rate": 2.190179139731906e-06, + "loss": 0.4253, + "step": 15206 + }, + { + "epoch": 2.38, + "grad_norm": 27.21354589410533, + "learning_rate": 2.1891260045326467e-06, + "loss": 0.4336, + "step": 15207 + }, + { + "epoch": 2.38, + "grad_norm": 16.34410482362443, + "learning_rate": 2.1880730914700687e-06, + "loss": 0.3635, + "step": 15208 + }, + { + "epoch": 2.38, + "grad_norm": 13.928483493779746, + "learning_rate": 2.1870204005741226e-06, + "loss": 0.4417, + "step": 15209 + }, + { + "epoch": 2.38, + "grad_norm": 26.76386835443305, + "learning_rate": 2.185967931874745e-06, + "loss": 0.5163, + "step": 15210 + }, + { + "epoch": 2.38, + "grad_norm": 23.337665815050382, + "learning_rate": 2.1849156854018715e-06, + "loss": 0.4789, + "step": 15211 + }, + { + "epoch": 2.38, + "grad_norm": 21.98255458471436, + "learning_rate": 2.1838636611854246e-06, + "loss": 0.4906, + "step": 15212 + }, + { + "epoch": 2.38, + "grad_norm": 17.883022576260995, + "learning_rate": 2.1828118592553195e-06, + "loss": 0.4344, + "step": 15213 + }, + { + "epoch": 2.38, + "grad_norm": 19.319707726045827, + "learning_rate": 2.181760279641473e-06, + "loss": 0.4343, + "step": 15214 + }, + { + "epoch": 2.38, + "grad_norm": 24.73896608619982, + "learning_rate": 2.180708922373792e-06, + "loss": 0.5158, + "step": 15215 + }, + { + "epoch": 2.38, + "grad_norm": 21.008709923474523, + "learning_rate": 2.1796577874821734e-06, + "loss": 0.4283, + "step": 15216 + }, + { + "epoch": 2.38, + "grad_norm": 26.025005388396746, + "learning_rate": 2.178606874996515e-06, + "loss": 0.5685, + "step": 15217 + }, + { + "epoch": 2.38, + "grad_norm": 34.11862513931091, + "learning_rate": 2.1775561849466987e-06, + "loss": 0.5322, + "step": 15218 + }, + { + "epoch": 2.38, + "grad_norm": 13.529657402317886, + "learning_rate": 2.176505717362609e-06, + "loss": 0.3853, + "step": 15219 + }, + { + "epoch": 2.38, + "grad_norm": 23.28974909187821, + "learning_rate": 2.1754554722741227e-06, + "loss": 0.469, + "step": 15220 + }, + { + "epoch": 2.38, + "grad_norm": 24.327234389088307, + "learning_rate": 2.1744054497111033e-06, + "loss": 0.5213, + "step": 15221 + }, + { + "epoch": 2.38, + "grad_norm": 21.708528128637457, + "learning_rate": 2.173355649703417e-06, + "loss": 0.4262, + "step": 15222 + }, + { + "epoch": 2.38, + "grad_norm": 25.981680667860044, + "learning_rate": 2.1723060722809155e-06, + "loss": 0.4433, + "step": 15223 + }, + { + "epoch": 2.38, + "grad_norm": 18.021885487173225, + "learning_rate": 2.1712567174734523e-06, + "loss": 0.4914, + "step": 15224 + }, + { + "epoch": 2.38, + "grad_norm": 16.080261608435933, + "learning_rate": 2.170207585310865e-06, + "loss": 0.4246, + "step": 15225 + }, + { + "epoch": 2.38, + "grad_norm": 20.35669418406093, + "learning_rate": 2.1691586758229976e-06, + "loss": 0.4319, + "step": 15226 + }, + { + "epoch": 2.38, + "grad_norm": 24.717864791727223, + "learning_rate": 2.168109989039674e-06, + "loss": 0.5165, + "step": 15227 + }, + { + "epoch": 2.38, + "grad_norm": 19.313844705728073, + "learning_rate": 2.1670615249907234e-06, + "loss": 0.4082, + "step": 15228 + }, + { + "epoch": 2.38, + "grad_norm": 22.27455009804577, + "learning_rate": 2.1660132837059576e-06, + "loss": 0.427, + "step": 15229 + }, + { + "epoch": 2.38, + "grad_norm": 28.38154265305997, + "learning_rate": 2.164965265215191e-06, + "loss": 0.5739, + "step": 15230 + }, + { + "epoch": 2.38, + "grad_norm": 21.880419604734247, + "learning_rate": 2.1639174695482322e-06, + "loss": 0.4886, + "step": 15231 + }, + { + "epoch": 2.38, + "grad_norm": 20.93977163218643, + "learning_rate": 2.162869896734876e-06, + "loss": 0.5195, + "step": 15232 + }, + { + "epoch": 2.38, + "grad_norm": 18.304286216475976, + "learning_rate": 2.161822546804914e-06, + "loss": 0.4787, + "step": 15233 + }, + { + "epoch": 2.38, + "grad_norm": 18.907581546883872, + "learning_rate": 2.1607754197881324e-06, + "loss": 0.4781, + "step": 15234 + }, + { + "epoch": 2.38, + "grad_norm": 18.136662726528343, + "learning_rate": 2.1597285157143122e-06, + "loss": 0.529, + "step": 15235 + }, + { + "epoch": 2.38, + "grad_norm": 16.70373016542226, + "learning_rate": 2.15868183461323e-06, + "loss": 0.4257, + "step": 15236 + }, + { + "epoch": 2.38, + "grad_norm": 15.803857775908057, + "learning_rate": 2.1576353765146486e-06, + "loss": 0.4258, + "step": 15237 + }, + { + "epoch": 2.38, + "grad_norm": 21.926739208091387, + "learning_rate": 2.1565891414483266e-06, + "loss": 0.5076, + "step": 15238 + }, + { + "epoch": 2.38, + "grad_norm": 17.914958365823445, + "learning_rate": 2.155543129444021e-06, + "loss": 0.4534, + "step": 15239 + }, + { + "epoch": 2.38, + "grad_norm": 14.620965244687518, + "learning_rate": 2.154497340531484e-06, + "loss": 0.4115, + "step": 15240 + }, + { + "epoch": 2.38, + "grad_norm": 20.489008931491618, + "learning_rate": 2.1534517747404516e-06, + "loss": 0.4078, + "step": 15241 + }, + { + "epoch": 2.38, + "grad_norm": 21.020740197787905, + "learning_rate": 2.152406432100659e-06, + "loss": 0.4945, + "step": 15242 + }, + { + "epoch": 2.38, + "grad_norm": 34.305891782958305, + "learning_rate": 2.1513613126418364e-06, + "loss": 0.5067, + "step": 15243 + }, + { + "epoch": 2.38, + "grad_norm": 32.25065685682638, + "learning_rate": 2.1503164163937062e-06, + "loss": 0.5569, + "step": 15244 + }, + { + "epoch": 2.38, + "grad_norm": 19.246492199405413, + "learning_rate": 2.1492717433859887e-06, + "loss": 0.4588, + "step": 15245 + }, + { + "epoch": 2.38, + "grad_norm": 15.875655993289474, + "learning_rate": 2.1482272936483896e-06, + "loss": 0.4507, + "step": 15246 + }, + { + "epoch": 2.38, + "grad_norm": 25.398889432586618, + "learning_rate": 2.147183067210611e-06, + "loss": 0.4635, + "step": 15247 + }, + { + "epoch": 2.38, + "grad_norm": 16.829173364897283, + "learning_rate": 2.1461390641023516e-06, + "loss": 0.3928, + "step": 15248 + }, + { + "epoch": 2.38, + "grad_norm": 18.466488108280227, + "learning_rate": 2.145095284353307e-06, + "loss": 0.4859, + "step": 15249 + }, + { + "epoch": 2.38, + "grad_norm": 22.865140941282668, + "learning_rate": 2.1440517279931528e-06, + "loss": 0.4691, + "step": 15250 + }, + { + "epoch": 2.38, + "grad_norm": 26.47114574214595, + "learning_rate": 2.1430083950515755e-06, + "loss": 0.5508, + "step": 15251 + }, + { + "epoch": 2.38, + "grad_norm": 20.067723503341373, + "learning_rate": 2.1419652855582406e-06, + "loss": 0.4572, + "step": 15252 + }, + { + "epoch": 2.38, + "grad_norm": 17.574947489603044, + "learning_rate": 2.1409223995428187e-06, + "loss": 0.4094, + "step": 15253 + }, + { + "epoch": 2.38, + "grad_norm": 18.520740203244234, + "learning_rate": 2.1398797370349644e-06, + "loss": 0.4278, + "step": 15254 + }, + { + "epoch": 2.38, + "grad_norm": 17.120394540791928, + "learning_rate": 2.1388372980643315e-06, + "loss": 0.4548, + "step": 15255 + }, + { + "epoch": 2.38, + "grad_norm": 15.811261880598597, + "learning_rate": 2.13779508266057e-06, + "loss": 0.4311, + "step": 15256 + }, + { + "epoch": 2.38, + "grad_norm": 15.838750567266487, + "learning_rate": 2.136753090853314e-06, + "loss": 0.4487, + "step": 15257 + }, + { + "epoch": 2.38, + "grad_norm": 27.622548763507183, + "learning_rate": 2.1357113226722036e-06, + "loss": 0.4885, + "step": 15258 + }, + { + "epoch": 2.38, + "grad_norm": 19.994836262835964, + "learning_rate": 2.1346697781468593e-06, + "loss": 0.4761, + "step": 15259 + }, + { + "epoch": 2.38, + "grad_norm": 15.120743627985972, + "learning_rate": 2.1336284573069067e-06, + "loss": 0.4331, + "step": 15260 + }, + { + "epoch": 2.38, + "grad_norm": 17.730562044693688, + "learning_rate": 2.1325873601819613e-06, + "loss": 0.4798, + "step": 15261 + }, + { + "epoch": 2.38, + "grad_norm": 20.816124974843554, + "learning_rate": 2.1315464868016287e-06, + "loss": 0.4176, + "step": 15262 + }, + { + "epoch": 2.38, + "grad_norm": 15.066580148553282, + "learning_rate": 2.130505837195508e-06, + "loss": 0.4189, + "step": 15263 + }, + { + "epoch": 2.38, + "grad_norm": 24.81745196480578, + "learning_rate": 2.129465411393198e-06, + "loss": 0.4104, + "step": 15264 + }, + { + "epoch": 2.38, + "grad_norm": 13.185868853329204, + "learning_rate": 2.1284252094242908e-06, + "loss": 0.4024, + "step": 15265 + }, + { + "epoch": 2.38, + "grad_norm": 28.804102140810347, + "learning_rate": 2.1273852313183663e-06, + "loss": 0.5208, + "step": 15266 + }, + { + "epoch": 2.38, + "grad_norm": 23.183263133904518, + "learning_rate": 2.126345477104996e-06, + "loss": 0.4094, + "step": 15267 + }, + { + "epoch": 2.38, + "grad_norm": 15.447683793891619, + "learning_rate": 2.125305946813756e-06, + "loss": 0.464, + "step": 15268 + }, + { + "epoch": 2.39, + "grad_norm": 17.236747514415185, + "learning_rate": 2.1242666404742074e-06, + "loss": 0.4207, + "step": 15269 + }, + { + "epoch": 2.39, + "grad_norm": 35.1851084101385, + "learning_rate": 2.1232275581159123e-06, + "loss": 0.4921, + "step": 15270 + }, + { + "epoch": 2.39, + "grad_norm": 27.142526780668916, + "learning_rate": 2.122188699768416e-06, + "loss": 0.5261, + "step": 15271 + }, + { + "epoch": 2.39, + "grad_norm": 26.914903823591946, + "learning_rate": 2.1211500654612625e-06, + "loss": 0.4496, + "step": 15272 + }, + { + "epoch": 2.39, + "grad_norm": 22.448980001916134, + "learning_rate": 2.120111655223993e-06, + "loss": 0.449, + "step": 15273 + }, + { + "epoch": 2.39, + "grad_norm": 25.74322992625142, + "learning_rate": 2.1190734690861403e-06, + "loss": 0.4286, + "step": 15274 + }, + { + "epoch": 2.39, + "grad_norm": 15.213273706092936, + "learning_rate": 2.1180355070772287e-06, + "loss": 0.3943, + "step": 15275 + }, + { + "epoch": 2.39, + "grad_norm": 20.0684325212691, + "learning_rate": 2.116997769226773e-06, + "loss": 0.4632, + "step": 15276 + }, + { + "epoch": 2.39, + "grad_norm": 17.165732116302507, + "learning_rate": 2.11596025556429e-06, + "loss": 0.4407, + "step": 15277 + }, + { + "epoch": 2.39, + "grad_norm": 20.299523644748298, + "learning_rate": 2.114922966119287e-06, + "loss": 0.4801, + "step": 15278 + }, + { + "epoch": 2.39, + "grad_norm": 22.816497956679257, + "learning_rate": 2.113885900921261e-06, + "loss": 0.4531, + "step": 15279 + }, + { + "epoch": 2.39, + "grad_norm": 18.627782371408447, + "learning_rate": 2.1128490599997078e-06, + "loss": 0.4729, + "step": 15280 + }, + { + "epoch": 2.39, + "grad_norm": 23.13071447853857, + "learning_rate": 2.1118124433841114e-06, + "loss": 0.4311, + "step": 15281 + }, + { + "epoch": 2.39, + "grad_norm": 14.15286762311381, + "learning_rate": 2.1107760511039553e-06, + "loss": 0.401, + "step": 15282 + }, + { + "epoch": 2.39, + "grad_norm": 14.096559783668733, + "learning_rate": 2.109739883188715e-06, + "loss": 0.4717, + "step": 15283 + }, + { + "epoch": 2.39, + "grad_norm": 15.369880701111178, + "learning_rate": 2.1087039396678544e-06, + "loss": 0.4749, + "step": 15284 + }, + { + "epoch": 2.39, + "grad_norm": 19.97325918166824, + "learning_rate": 2.107668220570841e-06, + "loss": 0.4723, + "step": 15285 + }, + { + "epoch": 2.39, + "grad_norm": 19.392319367006277, + "learning_rate": 2.1066327259271223e-06, + "loss": 0.4417, + "step": 15286 + }, + { + "epoch": 2.39, + "grad_norm": 19.358544998449325, + "learning_rate": 2.1055974557661553e-06, + "loss": 0.4639, + "step": 15287 + }, + { + "epoch": 2.39, + "grad_norm": 15.538118530338991, + "learning_rate": 2.1045624101173754e-06, + "loss": 0.48, + "step": 15288 + }, + { + "epoch": 2.39, + "grad_norm": 18.54379026748019, + "learning_rate": 2.1035275890102214e-06, + "loss": 0.4564, + "step": 15289 + }, + { + "epoch": 2.39, + "grad_norm": 22.67548195167444, + "learning_rate": 2.1024929924741265e-06, + "loss": 0.4887, + "step": 15290 + }, + { + "epoch": 2.39, + "grad_norm": 24.105772885345147, + "learning_rate": 2.1014586205385113e-06, + "loss": 0.451, + "step": 15291 + }, + { + "epoch": 2.39, + "grad_norm": 15.880563801004168, + "learning_rate": 2.1004244732327896e-06, + "loss": 0.4955, + "step": 15292 + }, + { + "epoch": 2.39, + "grad_norm": 18.33069642415794, + "learning_rate": 2.0993905505863755e-06, + "loss": 0.4027, + "step": 15293 + }, + { + "epoch": 2.39, + "grad_norm": 20.187647428690234, + "learning_rate": 2.098356852628671e-06, + "loss": 0.4164, + "step": 15294 + }, + { + "epoch": 2.39, + "grad_norm": 15.326887277138487, + "learning_rate": 2.0973233793890812e-06, + "loss": 0.4642, + "step": 15295 + }, + { + "epoch": 2.39, + "grad_norm": 29.452539743735503, + "learning_rate": 2.0962901308969864e-06, + "loss": 0.4929, + "step": 15296 + }, + { + "epoch": 2.39, + "grad_norm": 19.54224098553883, + "learning_rate": 2.0952571071817763e-06, + "loss": 0.491, + "step": 15297 + }, + { + "epoch": 2.39, + "grad_norm": 17.126878092015584, + "learning_rate": 2.094224308272831e-06, + "loss": 0.4714, + "step": 15298 + }, + { + "epoch": 2.39, + "grad_norm": 12.508026746074151, + "learning_rate": 2.0931917341995233e-06, + "loss": 0.4208, + "step": 15299 + }, + { + "epoch": 2.39, + "grad_norm": 17.945344411732755, + "learning_rate": 2.092159384991217e-06, + "loss": 0.4564, + "step": 15300 + }, + { + "epoch": 2.39, + "grad_norm": 29.7161328941377, + "learning_rate": 2.09112726067727e-06, + "loss": 0.4838, + "step": 15301 + }, + { + "epoch": 2.39, + "grad_norm": 14.722281553197112, + "learning_rate": 2.0900953612870367e-06, + "loss": 0.4828, + "step": 15302 + }, + { + "epoch": 2.39, + "grad_norm": 19.280879863388925, + "learning_rate": 2.089063686849867e-06, + "loss": 0.4541, + "step": 15303 + }, + { + "epoch": 2.39, + "grad_norm": 24.265620390676048, + "learning_rate": 2.0880322373950957e-06, + "loss": 0.4254, + "step": 15304 + }, + { + "epoch": 2.39, + "grad_norm": 11.847108449072214, + "learning_rate": 2.0870010129520624e-06, + "loss": 0.3639, + "step": 15305 + }, + { + "epoch": 2.39, + "grad_norm": 19.175332719037446, + "learning_rate": 2.085970013550088e-06, + "loss": 0.4502, + "step": 15306 + }, + { + "epoch": 2.39, + "grad_norm": 16.620323661778215, + "learning_rate": 2.0849392392184963e-06, + "loss": 0.4247, + "step": 15307 + }, + { + "epoch": 2.39, + "grad_norm": 16.89964126482681, + "learning_rate": 2.0839086899866055e-06, + "loss": 0.3961, + "step": 15308 + }, + { + "epoch": 2.39, + "grad_norm": 26.751031064724515, + "learning_rate": 2.0828783658837194e-06, + "loss": 0.503, + "step": 15309 + }, + { + "epoch": 2.39, + "grad_norm": 19.3922401173626, + "learning_rate": 2.0818482669391428e-06, + "loss": 0.4951, + "step": 15310 + }, + { + "epoch": 2.39, + "grad_norm": 29.168647014905293, + "learning_rate": 2.080818393182167e-06, + "loss": 0.4679, + "step": 15311 + }, + { + "epoch": 2.39, + "grad_norm": 17.284134225318383, + "learning_rate": 2.079788744642085e-06, + "loss": 0.3895, + "step": 15312 + }, + { + "epoch": 2.39, + "grad_norm": 17.468573998432976, + "learning_rate": 2.078759321348177e-06, + "loss": 0.4335, + "step": 15313 + }, + { + "epoch": 2.39, + "grad_norm": 19.35319610402778, + "learning_rate": 2.0777301233297187e-06, + "loss": 0.4567, + "step": 15314 + }, + { + "epoch": 2.39, + "grad_norm": 23.803433324202466, + "learning_rate": 2.076701150615985e-06, + "loss": 0.4792, + "step": 15315 + }, + { + "epoch": 2.39, + "grad_norm": 25.24004927339067, + "learning_rate": 2.075672403236235e-06, + "loss": 0.4346, + "step": 15316 + }, + { + "epoch": 2.39, + "grad_norm": 22.991296868892224, + "learning_rate": 2.0746438812197244e-06, + "loss": 0.456, + "step": 15317 + }, + { + "epoch": 2.39, + "grad_norm": 27.957388735536725, + "learning_rate": 2.0736155845957053e-06, + "loss": 0.4773, + "step": 15318 + }, + { + "epoch": 2.39, + "grad_norm": 15.605519259565634, + "learning_rate": 2.0725875133934216e-06, + "loss": 0.4481, + "step": 15319 + }, + { + "epoch": 2.39, + "grad_norm": 19.73151073978322, + "learning_rate": 2.0715596676421167e-06, + "loss": 0.4388, + "step": 15320 + }, + { + "epoch": 2.39, + "grad_norm": 17.991339745138355, + "learning_rate": 2.070532047371012e-06, + "loss": 0.4013, + "step": 15321 + }, + { + "epoch": 2.39, + "grad_norm": 23.16828528988592, + "learning_rate": 2.0695046526093375e-06, + "loss": 0.4142, + "step": 15322 + }, + { + "epoch": 2.39, + "grad_norm": 20.058899684159, + "learning_rate": 2.0684774833863117e-06, + "loss": 0.4261, + "step": 15323 + }, + { + "epoch": 2.39, + "grad_norm": 21.119604420841213, + "learning_rate": 2.067450539731148e-06, + "loss": 0.465, + "step": 15324 + }, + { + "epoch": 2.39, + "grad_norm": 24.24926345421895, + "learning_rate": 2.0664238216730513e-06, + "loss": 0.4469, + "step": 15325 + }, + { + "epoch": 2.39, + "grad_norm": 16.12030764577204, + "learning_rate": 2.0653973292412177e-06, + "loss": 0.4801, + "step": 15326 + }, + { + "epoch": 2.39, + "grad_norm": 14.922300739106715, + "learning_rate": 2.0643710624648427e-06, + "loss": 0.3977, + "step": 15327 + }, + { + "epoch": 2.39, + "grad_norm": 16.896278905716102, + "learning_rate": 2.063345021373114e-06, + "loss": 0.4221, + "step": 15328 + }, + { + "epoch": 2.39, + "grad_norm": 16.350592302390524, + "learning_rate": 2.0623192059952114e-06, + "loss": 0.4511, + "step": 15329 + }, + { + "epoch": 2.39, + "grad_norm": 15.65658045513267, + "learning_rate": 2.061293616360304e-06, + "loss": 0.4579, + "step": 15330 + }, + { + "epoch": 2.39, + "grad_norm": 32.541728066465026, + "learning_rate": 2.0602682524975616e-06, + "loss": 0.4114, + "step": 15331 + }, + { + "epoch": 2.39, + "grad_norm": 24.310236105285252, + "learning_rate": 2.0592431144361458e-06, + "loss": 0.4857, + "step": 15332 + }, + { + "epoch": 2.4, + "grad_norm": 25.37305633906512, + "learning_rate": 2.0582182022052134e-06, + "loss": 0.5092, + "step": 15333 + }, + { + "epoch": 2.4, + "grad_norm": 18.066039909978777, + "learning_rate": 2.0571935158339084e-06, + "loss": 0.4638, + "step": 15334 + }, + { + "epoch": 2.4, + "grad_norm": 24.331466619873176, + "learning_rate": 2.0561690553513725e-06, + "loss": 0.4662, + "step": 15335 + }, + { + "epoch": 2.4, + "grad_norm": 28.017903718223245, + "learning_rate": 2.0551448207867407e-06, + "loss": 0.5036, + "step": 15336 + }, + { + "epoch": 2.4, + "grad_norm": 21.294786548508146, + "learning_rate": 2.0541208121691458e-06, + "loss": 0.4489, + "step": 15337 + }, + { + "epoch": 2.4, + "grad_norm": 18.567544250155628, + "learning_rate": 2.0530970295277032e-06, + "loss": 0.4863, + "step": 15338 + }, + { + "epoch": 2.4, + "grad_norm": 17.931100286867284, + "learning_rate": 2.0520734728915358e-06, + "loss": 0.4708, + "step": 15339 + }, + { + "epoch": 2.4, + "grad_norm": 37.74441704810459, + "learning_rate": 2.0510501422897466e-06, + "loss": 0.4936, + "step": 15340 + }, + { + "epoch": 2.4, + "grad_norm": 23.6797677173722, + "learning_rate": 2.050027037751444e-06, + "loss": 0.4403, + "step": 15341 + }, + { + "epoch": 2.4, + "grad_norm": 20.973351363146076, + "learning_rate": 2.0490041593057185e-06, + "loss": 0.4396, + "step": 15342 + }, + { + "epoch": 2.4, + "grad_norm": 22.134208738840865, + "learning_rate": 2.0479815069816643e-06, + "loss": 0.4349, + "step": 15343 + }, + { + "epoch": 2.4, + "grad_norm": 25.92914842273367, + "learning_rate": 2.0469590808083674e-06, + "loss": 0.4598, + "step": 15344 + }, + { + "epoch": 2.4, + "grad_norm": 23.291027910721137, + "learning_rate": 2.0459368808148983e-06, + "loss": 0.4279, + "step": 15345 + }, + { + "epoch": 2.4, + "grad_norm": 19.97231052934327, + "learning_rate": 2.0449149070303344e-06, + "loss": 0.5042, + "step": 15346 + }, + { + "epoch": 2.4, + "grad_norm": 18.486329738739798, + "learning_rate": 2.043893159483734e-06, + "loss": 0.4563, + "step": 15347 + }, + { + "epoch": 2.4, + "grad_norm": 32.7448715576632, + "learning_rate": 2.042871638204158e-06, + "loss": 0.4979, + "step": 15348 + }, + { + "epoch": 2.4, + "grad_norm": 20.77696737173364, + "learning_rate": 2.0418503432206604e-06, + "loss": 0.5243, + "step": 15349 + }, + { + "epoch": 2.4, + "grad_norm": 23.805140429332745, + "learning_rate": 2.040829274562284e-06, + "loss": 0.4392, + "step": 15350 + }, + { + "epoch": 2.4, + "grad_norm": 23.66522374520072, + "learning_rate": 2.0398084322580634e-06, + "loss": 0.4811, + "step": 15351 + }, + { + "epoch": 2.4, + "grad_norm": 24.87259473505785, + "learning_rate": 2.0387878163370354e-06, + "loss": 0.4471, + "step": 15352 + }, + { + "epoch": 2.4, + "grad_norm": 17.038117560517303, + "learning_rate": 2.0377674268282275e-06, + "loss": 0.4631, + "step": 15353 + }, + { + "epoch": 2.4, + "grad_norm": 20.373652843686266, + "learning_rate": 2.0367472637606554e-06, + "loss": 0.4535, + "step": 15354 + }, + { + "epoch": 2.4, + "grad_norm": 18.776490327793674, + "learning_rate": 2.0357273271633304e-06, + "loss": 0.4974, + "step": 15355 + }, + { + "epoch": 2.4, + "grad_norm": 18.12868131708863, + "learning_rate": 2.0347076170652624e-06, + "loss": 0.4719, + "step": 15356 + }, + { + "epoch": 2.4, + "grad_norm": 26.62670633619575, + "learning_rate": 2.0336881334954484e-06, + "loss": 0.4814, + "step": 15357 + }, + { + "epoch": 2.4, + "grad_norm": 13.178459496376743, + "learning_rate": 2.0326688764828873e-06, + "loss": 0.4184, + "step": 15358 + }, + { + "epoch": 2.4, + "grad_norm": 16.562927807375075, + "learning_rate": 2.0316498460565627e-06, + "loss": 0.4516, + "step": 15359 + }, + { + "epoch": 2.4, + "grad_norm": 15.897289915633605, + "learning_rate": 2.030631042245452e-06, + "loss": 0.4528, + "step": 15360 + }, + { + "epoch": 2.4, + "grad_norm": 23.380814730794636, + "learning_rate": 2.0296124650785333e-06, + "loss": 0.4434, + "step": 15361 + }, + { + "epoch": 2.4, + "grad_norm": 37.66922588717112, + "learning_rate": 2.0285941145847754e-06, + "loss": 0.5892, + "step": 15362 + }, + { + "epoch": 2.4, + "grad_norm": 19.163152481233393, + "learning_rate": 2.0275759907931356e-06, + "loss": 0.4204, + "step": 15363 + }, + { + "epoch": 2.4, + "grad_norm": 16.885929421890374, + "learning_rate": 2.0265580937325735e-06, + "loss": 0.4409, + "step": 15364 + }, + { + "epoch": 2.4, + "grad_norm": 18.469361192656066, + "learning_rate": 2.025540423432032e-06, + "loss": 0.4698, + "step": 15365 + }, + { + "epoch": 2.4, + "grad_norm": 21.994192649943056, + "learning_rate": 2.024522979920458e-06, + "loss": 0.4465, + "step": 15366 + }, + { + "epoch": 2.4, + "grad_norm": 26.51679327306867, + "learning_rate": 2.023505763226783e-06, + "loss": 0.4988, + "step": 15367 + }, + { + "epoch": 2.4, + "grad_norm": 17.77590399343352, + "learning_rate": 2.0224887733799383e-06, + "loss": 0.4089, + "step": 15368 + }, + { + "epoch": 2.4, + "grad_norm": 32.02313432459231, + "learning_rate": 2.021472010408848e-06, + "loss": 0.4216, + "step": 15369 + }, + { + "epoch": 2.4, + "grad_norm": 30.690017619258448, + "learning_rate": 2.020455474342424e-06, + "loss": 0.5699, + "step": 15370 + }, + { + "epoch": 2.4, + "grad_norm": 20.010036847206578, + "learning_rate": 2.019439165209581e-06, + "loss": 0.4813, + "step": 15371 + }, + { + "epoch": 2.4, + "grad_norm": 17.770772061073693, + "learning_rate": 2.018423083039218e-06, + "loss": 0.3951, + "step": 15372 + }, + { + "epoch": 2.4, + "grad_norm": 25.315667106740317, + "learning_rate": 2.0174072278602352e-06, + "loss": 0.4572, + "step": 15373 + }, + { + "epoch": 2.4, + "grad_norm": 16.238974084083967, + "learning_rate": 2.0163915997015195e-06, + "loss": 0.4257, + "step": 15374 + }, + { + "epoch": 2.4, + "grad_norm": 22.102981106203305, + "learning_rate": 2.0153761985919575e-06, + "loss": 0.4603, + "step": 15375 + }, + { + "epoch": 2.4, + "grad_norm": 28.179075897026795, + "learning_rate": 2.014361024560424e-06, + "loss": 0.52, + "step": 15376 + }, + { + "epoch": 2.4, + "grad_norm": 17.324719065050832, + "learning_rate": 2.0133460776357906e-06, + "loss": 0.4679, + "step": 15377 + }, + { + "epoch": 2.4, + "grad_norm": 14.442114778843433, + "learning_rate": 2.012331357846926e-06, + "loss": 0.4396, + "step": 15378 + }, + { + "epoch": 2.4, + "grad_norm": 27.113192681389247, + "learning_rate": 2.0113168652226843e-06, + "loss": 0.5031, + "step": 15379 + }, + { + "epoch": 2.4, + "grad_norm": 21.800191970076195, + "learning_rate": 2.0103025997919155e-06, + "loss": 0.4541, + "step": 15380 + }, + { + "epoch": 2.4, + "grad_norm": 19.773256372641818, + "learning_rate": 2.009288561583468e-06, + "loss": 0.4666, + "step": 15381 + }, + { + "epoch": 2.4, + "grad_norm": 25.079896243995304, + "learning_rate": 2.008274750626178e-06, + "loss": 0.4205, + "step": 15382 + }, + { + "epoch": 2.4, + "grad_norm": 24.364590584423745, + "learning_rate": 2.007261166948885e-06, + "loss": 0.4824, + "step": 15383 + }, + { + "epoch": 2.4, + "grad_norm": 26.51123863787077, + "learning_rate": 2.006247810580403e-06, + "loss": 0.4745, + "step": 15384 + }, + { + "epoch": 2.4, + "grad_norm": 17.29417493985364, + "learning_rate": 2.0052346815495573e-06, + "loss": 0.5009, + "step": 15385 + }, + { + "epoch": 2.4, + "grad_norm": 17.300373253885418, + "learning_rate": 2.0042217798851606e-06, + "loss": 0.4424, + "step": 15386 + }, + { + "epoch": 2.4, + "grad_norm": 14.944193459635352, + "learning_rate": 2.003209105616021e-06, + "loss": 0.4249, + "step": 15387 + }, + { + "epoch": 2.4, + "grad_norm": 19.081763144789335, + "learning_rate": 2.0021966587709372e-06, + "loss": 0.4179, + "step": 15388 + }, + { + "epoch": 2.4, + "grad_norm": 26.19094276075015, + "learning_rate": 2.0011844393786983e-06, + "loss": 0.4952, + "step": 15389 + }, + { + "epoch": 2.4, + "grad_norm": 15.347019968802632, + "learning_rate": 2.0001724474680963e-06, + "loss": 0.4252, + "step": 15390 + }, + { + "epoch": 2.4, + "grad_norm": 20.528058081918658, + "learning_rate": 1.999160683067911e-06, + "loss": 0.4597, + "step": 15391 + }, + { + "epoch": 2.4, + "grad_norm": 23.026360461180957, + "learning_rate": 1.9981491462069146e-06, + "loss": 0.4793, + "step": 15392 + }, + { + "epoch": 2.4, + "grad_norm": 20.33475373845935, + "learning_rate": 1.9971378369138773e-06, + "loss": 0.4627, + "step": 15393 + }, + { + "epoch": 2.4, + "grad_norm": 24.78181475636807, + "learning_rate": 1.9961267552175558e-06, + "loss": 0.4858, + "step": 15394 + }, + { + "epoch": 2.4, + "grad_norm": 21.11313778125388, + "learning_rate": 1.995115901146707e-06, + "loss": 0.4304, + "step": 15395 + }, + { + "epoch": 2.4, + "grad_norm": 26.311099526541714, + "learning_rate": 1.9941052747300826e-06, + "loss": 0.4489, + "step": 15396 + }, + { + "epoch": 2.41, + "grad_norm": 19.42199272970177, + "learning_rate": 1.9930948759964175e-06, + "loss": 0.4383, + "step": 15397 + }, + { + "epoch": 2.41, + "grad_norm": 28.49084642087205, + "learning_rate": 1.992084704974453e-06, + "loss": 0.4936, + "step": 15398 + }, + { + "epoch": 2.41, + "grad_norm": 20.074301459503367, + "learning_rate": 1.991074761692913e-06, + "loss": 0.4381, + "step": 15399 + }, + { + "epoch": 2.41, + "grad_norm": 22.3196861184539, + "learning_rate": 1.9900650461805237e-06, + "loss": 0.492, + "step": 15400 + }, + { + "epoch": 2.41, + "grad_norm": 27.923558758099947, + "learning_rate": 1.9890555584659965e-06, + "loss": 0.4317, + "step": 15401 + }, + { + "epoch": 2.41, + "grad_norm": 29.10296726190204, + "learning_rate": 1.9880462985780423e-06, + "loss": 0.4126, + "step": 15402 + }, + { + "epoch": 2.41, + "grad_norm": 15.012345671436195, + "learning_rate": 1.9870372665453673e-06, + "loss": 0.5412, + "step": 15403 + }, + { + "epoch": 2.41, + "grad_norm": 15.319882534691205, + "learning_rate": 1.986028462396666e-06, + "loss": 0.426, + "step": 15404 + }, + { + "epoch": 2.41, + "grad_norm": 29.70867586217058, + "learning_rate": 1.985019886160624e-06, + "loss": 0.4587, + "step": 15405 + }, + { + "epoch": 2.41, + "grad_norm": 18.001231797939358, + "learning_rate": 1.9840115378659275e-06, + "loss": 0.4341, + "step": 15406 + }, + { + "epoch": 2.41, + "grad_norm": 20.576686755284673, + "learning_rate": 1.983003417541254e-06, + "loss": 0.4102, + "step": 15407 + }, + { + "epoch": 2.41, + "grad_norm": 17.271254128035764, + "learning_rate": 1.9819955252152755e-06, + "loss": 0.4084, + "step": 15408 + }, + { + "epoch": 2.41, + "grad_norm": 16.68404562964947, + "learning_rate": 1.9809878609166546e-06, + "loss": 0.4174, + "step": 15409 + }, + { + "epoch": 2.41, + "grad_norm": 21.624490099336295, + "learning_rate": 1.979980424674045e-06, + "loss": 0.367, + "step": 15410 + }, + { + "epoch": 2.41, + "grad_norm": 20.412502316791894, + "learning_rate": 1.9789732165161e-06, + "loss": 0.4526, + "step": 15411 + }, + { + "epoch": 2.41, + "grad_norm": 22.2091591234103, + "learning_rate": 1.977966236471468e-06, + "loss": 0.4503, + "step": 15412 + }, + { + "epoch": 2.41, + "grad_norm": 15.325366069873303, + "learning_rate": 1.9769594845687833e-06, + "loss": 0.3721, + "step": 15413 + }, + { + "epoch": 2.41, + "grad_norm": 23.835925868983953, + "learning_rate": 1.9759529608366744e-06, + "loss": 0.4913, + "step": 15414 + }, + { + "epoch": 2.41, + "grad_norm": 25.879553597181058, + "learning_rate": 1.9749466653037707e-06, + "loss": 0.4376, + "step": 15415 + }, + { + "epoch": 2.41, + "grad_norm": 14.57163658773314, + "learning_rate": 1.9739405979986904e-06, + "loss": 0.4387, + "step": 15416 + }, + { + "epoch": 2.41, + "grad_norm": 18.763706329939858, + "learning_rate": 1.9729347589500426e-06, + "loss": 0.468, + "step": 15417 + }, + { + "epoch": 2.41, + "grad_norm": 13.819709399160494, + "learning_rate": 1.9719291481864366e-06, + "loss": 0.4323, + "step": 15418 + }, + { + "epoch": 2.41, + "grad_norm": 19.61366202921762, + "learning_rate": 1.970923765736468e-06, + "loss": 0.4705, + "step": 15419 + }, + { + "epoch": 2.41, + "grad_norm": 22.109965891069336, + "learning_rate": 1.9699186116287295e-06, + "loss": 0.4518, + "step": 15420 + }, + { + "epoch": 2.41, + "grad_norm": 27.525286626730985, + "learning_rate": 1.9689136858918112e-06, + "loss": 0.4076, + "step": 15421 + }, + { + "epoch": 2.41, + "grad_norm": 21.754325400294373, + "learning_rate": 1.967908988554289e-06, + "loss": 0.4334, + "step": 15422 + }, + { + "epoch": 2.41, + "grad_norm": 35.943351270917994, + "learning_rate": 1.9669045196447345e-06, + "loss": 0.5155, + "step": 15423 + }, + { + "epoch": 2.41, + "grad_norm": 21.22633731443197, + "learning_rate": 1.9659002791917157e-06, + "loss": 0.4026, + "step": 15424 + }, + { + "epoch": 2.41, + "grad_norm": 24.75152008489357, + "learning_rate": 1.964896267223797e-06, + "loss": 0.5159, + "step": 15425 + }, + { + "epoch": 2.41, + "grad_norm": 17.93363529949157, + "learning_rate": 1.963892483769524e-06, + "loss": 0.525, + "step": 15426 + }, + { + "epoch": 2.41, + "grad_norm": 26.46723593622914, + "learning_rate": 1.9628889288574514e-06, + "loss": 0.5061, + "step": 15427 + }, + { + "epoch": 2.41, + "grad_norm": 23.668548761977448, + "learning_rate": 1.9618856025161127e-06, + "loss": 0.4853, + "step": 15428 + }, + { + "epoch": 2.41, + "grad_norm": 18.384002698692992, + "learning_rate": 1.9608825047740486e-06, + "loss": 0.4093, + "step": 15429 + }, + { + "epoch": 2.41, + "grad_norm": 15.924566578618645, + "learning_rate": 1.9598796356597806e-06, + "loss": 0.4646, + "step": 15430 + }, + { + "epoch": 2.41, + "grad_norm": 26.824214996964244, + "learning_rate": 1.958876995201833e-06, + "loss": 0.4867, + "step": 15431 + }, + { + "epoch": 2.41, + "grad_norm": 20.065254880886613, + "learning_rate": 1.9578745834287204e-06, + "loss": 0.4278, + "step": 15432 + }, + { + "epoch": 2.41, + "grad_norm": 15.789933509528122, + "learning_rate": 1.9568724003689486e-06, + "loss": 0.4337, + "step": 15433 + }, + { + "epoch": 2.41, + "grad_norm": 30.280174263820577, + "learning_rate": 1.9558704460510235e-06, + "loss": 0.4887, + "step": 15434 + }, + { + "epoch": 2.41, + "grad_norm": 30.785945669720157, + "learning_rate": 1.9548687205034346e-06, + "loss": 0.4184, + "step": 15435 + }, + { + "epoch": 2.41, + "grad_norm": 16.040069254900477, + "learning_rate": 1.953867223754673e-06, + "loss": 0.4437, + "step": 15436 + }, + { + "epoch": 2.41, + "grad_norm": 22.663784536914285, + "learning_rate": 1.952865955833223e-06, + "loss": 0.4312, + "step": 15437 + }, + { + "epoch": 2.41, + "grad_norm": 15.958025664367458, + "learning_rate": 1.9518649167675585e-06, + "loss": 0.4752, + "step": 15438 + }, + { + "epoch": 2.41, + "grad_norm": 18.61297948338482, + "learning_rate": 1.9508641065861445e-06, + "loss": 0.4297, + "step": 15439 + }, + { + "epoch": 2.41, + "grad_norm": 20.158109998640352, + "learning_rate": 1.949863525317447e-06, + "loss": 0.4255, + "step": 15440 + }, + { + "epoch": 2.41, + "grad_norm": 19.182126192799373, + "learning_rate": 1.9488631729899243e-06, + "loss": 0.4966, + "step": 15441 + }, + { + "epoch": 2.41, + "grad_norm": 24.806468850398485, + "learning_rate": 1.9478630496320227e-06, + "loss": 0.4453, + "step": 15442 + }, + { + "epoch": 2.41, + "grad_norm": 17.316897872565225, + "learning_rate": 1.946863155272183e-06, + "loss": 0.4356, + "step": 15443 + }, + { + "epoch": 2.41, + "grad_norm": 18.5711907342403, + "learning_rate": 1.9458634899388453e-06, + "loss": 0.4, + "step": 15444 + }, + { + "epoch": 2.41, + "grad_norm": 18.81115641857474, + "learning_rate": 1.944864053660437e-06, + "loss": 0.4453, + "step": 15445 + }, + { + "epoch": 2.41, + "grad_norm": 18.257289195039252, + "learning_rate": 1.9438648464653865e-06, + "loss": 0.4563, + "step": 15446 + }, + { + "epoch": 2.41, + "grad_norm": 27.242501838211776, + "learning_rate": 1.9428658683821066e-06, + "loss": 0.4796, + "step": 15447 + }, + { + "epoch": 2.41, + "grad_norm": 21.693119120209765, + "learning_rate": 1.9418671194390047e-06, + "loss": 0.4608, + "step": 15448 + }, + { + "epoch": 2.41, + "grad_norm": 15.888288650158334, + "learning_rate": 1.9408685996644893e-06, + "loss": 0.4281, + "step": 15449 + }, + { + "epoch": 2.41, + "grad_norm": 23.671266004074816, + "learning_rate": 1.9398703090869585e-06, + "loss": 0.4544, + "step": 15450 + }, + { + "epoch": 2.41, + "grad_norm": 26.165442782295703, + "learning_rate": 1.938872247734799e-06, + "loss": 0.4851, + "step": 15451 + }, + { + "epoch": 2.41, + "grad_norm": 22.86494189204278, + "learning_rate": 1.9378744156363993e-06, + "loss": 0.4449, + "step": 15452 + }, + { + "epoch": 2.41, + "grad_norm": 26.405277811584256, + "learning_rate": 1.936876812820133e-06, + "loss": 0.4911, + "step": 15453 + }, + { + "epoch": 2.41, + "grad_norm": 23.213567757429583, + "learning_rate": 1.9358794393143755e-06, + "loss": 0.4307, + "step": 15454 + }, + { + "epoch": 2.41, + "grad_norm": 17.93357932709829, + "learning_rate": 1.934882295147487e-06, + "loss": 0.4249, + "step": 15455 + }, + { + "epoch": 2.41, + "grad_norm": 17.539829820614916, + "learning_rate": 1.9338853803478274e-06, + "loss": 0.4973, + "step": 15456 + }, + { + "epoch": 2.41, + "grad_norm": 27.161147379474386, + "learning_rate": 1.932888694943752e-06, + "loss": 0.445, + "step": 15457 + }, + { + "epoch": 2.41, + "grad_norm": 27.23288635140148, + "learning_rate": 1.931892238963601e-06, + "loss": 0.3949, + "step": 15458 + }, + { + "epoch": 2.41, + "grad_norm": 17.009897063129984, + "learning_rate": 1.9308960124357167e-06, + "loss": 0.3791, + "step": 15459 + }, + { + "epoch": 2.41, + "grad_norm": 35.84390328140876, + "learning_rate": 1.9299000153884274e-06, + "loss": 0.4538, + "step": 15460 + }, + { + "epoch": 2.42, + "grad_norm": 21.902951677899033, + "learning_rate": 1.928904247850061e-06, + "loss": 0.4124, + "step": 15461 + }, + { + "epoch": 2.42, + "grad_norm": 19.13498505469158, + "learning_rate": 1.927908709848938e-06, + "loss": 0.4538, + "step": 15462 + }, + { + "epoch": 2.42, + "grad_norm": 19.210150469481583, + "learning_rate": 1.9269134014133706e-06, + "loss": 0.398, + "step": 15463 + }, + { + "epoch": 2.42, + "grad_norm": 20.164378533589606, + "learning_rate": 1.9259183225716594e-06, + "loss": 0.5532, + "step": 15464 + }, + { + "epoch": 2.42, + "grad_norm": 20.420596460686678, + "learning_rate": 1.924923473352108e-06, + "loss": 0.4582, + "step": 15465 + }, + { + "epoch": 2.42, + "grad_norm": 25.620834669154583, + "learning_rate": 1.9239288537830136e-06, + "loss": 0.4289, + "step": 15466 + }, + { + "epoch": 2.42, + "grad_norm": 32.40137726881236, + "learning_rate": 1.922934463892657e-06, + "loss": 0.5195, + "step": 15467 + }, + { + "epoch": 2.42, + "grad_norm": 19.4995559923045, + "learning_rate": 1.9219403037093164e-06, + "loss": 0.5003, + "step": 15468 + }, + { + "epoch": 2.42, + "grad_norm": 24.151146947461086, + "learning_rate": 1.9209463732612687e-06, + "loss": 0.4386, + "step": 15469 + }, + { + "epoch": 2.42, + "grad_norm": 23.795229519280987, + "learning_rate": 1.919952672576779e-06, + "loss": 0.4837, + "step": 15470 + }, + { + "epoch": 2.42, + "grad_norm": 19.702119020347897, + "learning_rate": 1.9189592016841154e-06, + "loss": 0.4508, + "step": 15471 + }, + { + "epoch": 2.42, + "grad_norm": 23.393520058845333, + "learning_rate": 1.9179659606115187e-06, + "loss": 0.4745, + "step": 15472 + }, + { + "epoch": 2.42, + "grad_norm": 27.81683536397295, + "learning_rate": 1.9169729493872423e-06, + "loss": 0.4712, + "step": 15473 + }, + { + "epoch": 2.42, + "grad_norm": 25.68612940872227, + "learning_rate": 1.9159801680395264e-06, + "loss": 0.4178, + "step": 15474 + }, + { + "epoch": 2.42, + "grad_norm": 36.52499344882476, + "learning_rate": 1.9149876165966088e-06, + "loss": 0.5706, + "step": 15475 + }, + { + "epoch": 2.42, + "grad_norm": 14.428117968256183, + "learning_rate": 1.9139952950867135e-06, + "loss": 0.3901, + "step": 15476 + }, + { + "epoch": 2.42, + "grad_norm": 28.240323277700302, + "learning_rate": 1.9130032035380595e-06, + "loss": 0.5087, + "step": 15477 + }, + { + "epoch": 2.42, + "grad_norm": 18.345983534783485, + "learning_rate": 1.9120113419788635e-06, + "loss": 0.4124, + "step": 15478 + }, + { + "epoch": 2.42, + "grad_norm": 28.73539116635506, + "learning_rate": 1.911019710437335e-06, + "loss": 0.4251, + "step": 15479 + }, + { + "epoch": 2.42, + "grad_norm": 16.389330319163786, + "learning_rate": 1.9100283089416725e-06, + "loss": 0.4845, + "step": 15480 + }, + { + "epoch": 2.42, + "grad_norm": 17.833135539329074, + "learning_rate": 1.9090371375200755e-06, + "loss": 0.4288, + "step": 15481 + }, + { + "epoch": 2.42, + "grad_norm": 28.73584550535108, + "learning_rate": 1.9080461962007257e-06, + "loss": 0.4462, + "step": 15482 + }, + { + "epoch": 2.42, + "grad_norm": 18.444229159599242, + "learning_rate": 1.9070554850118095e-06, + "loss": 0.3995, + "step": 15483 + }, + { + "epoch": 2.42, + "grad_norm": 27.146113728480138, + "learning_rate": 1.906065003981503e-06, + "loss": 0.4411, + "step": 15484 + }, + { + "epoch": 2.42, + "grad_norm": 20.86290123241126, + "learning_rate": 1.9050747531379698e-06, + "loss": 0.4998, + "step": 15485 + }, + { + "epoch": 2.42, + "grad_norm": 27.4611787131556, + "learning_rate": 1.9040847325093791e-06, + "loss": 0.4423, + "step": 15486 + }, + { + "epoch": 2.42, + "grad_norm": 20.461417487866154, + "learning_rate": 1.9030949421238787e-06, + "loss": 0.4245, + "step": 15487 + }, + { + "epoch": 2.42, + "grad_norm": 11.842269138694503, + "learning_rate": 1.9021053820096246e-06, + "loss": 0.5377, + "step": 15488 + }, + { + "epoch": 2.42, + "grad_norm": 20.282042748691268, + "learning_rate": 1.9011160521947548e-06, + "loss": 0.5069, + "step": 15489 + }, + { + "epoch": 2.42, + "grad_norm": 20.012690382599313, + "learning_rate": 1.900126952707405e-06, + "loss": 0.4664, + "step": 15490 + }, + { + "epoch": 2.42, + "grad_norm": 20.660097409166543, + "learning_rate": 1.89913808357571e-06, + "loss": 0.4061, + "step": 15491 + }, + { + "epoch": 2.42, + "grad_norm": 17.227096898792517, + "learning_rate": 1.8981494448277893e-06, + "loss": 0.4035, + "step": 15492 + }, + { + "epoch": 2.42, + "grad_norm": 20.70250947145214, + "learning_rate": 1.897161036491756e-06, + "loss": 0.4435, + "step": 15493 + }, + { + "epoch": 2.42, + "grad_norm": 15.803582263133466, + "learning_rate": 1.8961728585957229e-06, + "loss": 0.4321, + "step": 15494 + }, + { + "epoch": 2.42, + "grad_norm": 24.03617323495913, + "learning_rate": 1.8951849111677922e-06, + "loss": 0.5375, + "step": 15495 + }, + { + "epoch": 2.42, + "grad_norm": 22.04168502338729, + "learning_rate": 1.894197194236065e-06, + "loss": 0.4519, + "step": 15496 + }, + { + "epoch": 2.42, + "grad_norm": 16.570695008463197, + "learning_rate": 1.8932097078286283e-06, + "loss": 0.4613, + "step": 15497 + }, + { + "epoch": 2.42, + "grad_norm": 23.397042540170354, + "learning_rate": 1.8922224519735611e-06, + "loss": 0.4647, + "step": 15498 + }, + { + "epoch": 2.42, + "grad_norm": 20.23721492971823, + "learning_rate": 1.8912354266989453e-06, + "loss": 0.4537, + "step": 15499 + }, + { + "epoch": 2.42, + "grad_norm": 17.640733786113206, + "learning_rate": 1.890248632032854e-06, + "loss": 0.4088, + "step": 15500 + }, + { + "epoch": 2.42, + "grad_norm": 16.344779684642898, + "learning_rate": 1.8892620680033457e-06, + "loss": 0.4612, + "step": 15501 + }, + { + "epoch": 2.42, + "grad_norm": 22.393334753011086, + "learning_rate": 1.8882757346384785e-06, + "loss": 0.4664, + "step": 15502 + }, + { + "epoch": 2.42, + "grad_norm": 25.639841912752278, + "learning_rate": 1.8872896319663038e-06, + "loss": 0.4648, + "step": 15503 + }, + { + "epoch": 2.42, + "grad_norm": 11.715601949015337, + "learning_rate": 1.886303760014868e-06, + "loss": 0.3739, + "step": 15504 + }, + { + "epoch": 2.42, + "grad_norm": 20.487007607727623, + "learning_rate": 1.885318118812205e-06, + "loss": 0.4284, + "step": 15505 + }, + { + "epoch": 2.42, + "grad_norm": 18.665020401948865, + "learning_rate": 1.8843327083863495e-06, + "loss": 0.3773, + "step": 15506 + }, + { + "epoch": 2.42, + "grad_norm": 15.011841090803404, + "learning_rate": 1.8833475287653225e-06, + "loss": 0.4468, + "step": 15507 + }, + { + "epoch": 2.42, + "grad_norm": 25.123861286468784, + "learning_rate": 1.8823625799771428e-06, + "loss": 0.4441, + "step": 15508 + }, + { + "epoch": 2.42, + "grad_norm": 17.689392883258854, + "learning_rate": 1.8813778620498268e-06, + "loss": 0.4761, + "step": 15509 + }, + { + "epoch": 2.42, + "grad_norm": 24.485711425186466, + "learning_rate": 1.8803933750113712e-06, + "loss": 0.3874, + "step": 15510 + }, + { + "epoch": 2.42, + "grad_norm": 26.08969638941055, + "learning_rate": 1.8794091188897812e-06, + "loss": 0.4997, + "step": 15511 + }, + { + "epoch": 2.42, + "grad_norm": 22.463916629092914, + "learning_rate": 1.8784250937130433e-06, + "loss": 0.4722, + "step": 15512 + }, + { + "epoch": 2.42, + "grad_norm": 29.346236041874445, + "learning_rate": 1.8774412995091462e-06, + "loss": 0.5104, + "step": 15513 + }, + { + "epoch": 2.42, + "grad_norm": 21.387853635175233, + "learning_rate": 1.8764577363060654e-06, + "loss": 0.525, + "step": 15514 + }, + { + "epoch": 2.42, + "grad_norm": 26.565157835886573, + "learning_rate": 1.8754744041317763e-06, + "loss": 0.4754, + "step": 15515 + }, + { + "epoch": 2.42, + "grad_norm": 24.772762045167756, + "learning_rate": 1.8744913030142409e-06, + "loss": 0.4753, + "step": 15516 + }, + { + "epoch": 2.42, + "grad_norm": 21.084553604251518, + "learning_rate": 1.8735084329814213e-06, + "loss": 0.435, + "step": 15517 + }, + { + "epoch": 2.42, + "grad_norm": 17.82897151171001, + "learning_rate": 1.872525794061265e-06, + "loss": 0.4226, + "step": 15518 + }, + { + "epoch": 2.42, + "grad_norm": 14.005539103803844, + "learning_rate": 1.8715433862817224e-06, + "loss": 0.3689, + "step": 15519 + }, + { + "epoch": 2.42, + "grad_norm": 21.70696593736945, + "learning_rate": 1.8705612096707314e-06, + "loss": 0.3947, + "step": 15520 + }, + { + "epoch": 2.42, + "grad_norm": 14.32795276819128, + "learning_rate": 1.8695792642562226e-06, + "loss": 0.4634, + "step": 15521 + }, + { + "epoch": 2.42, + "grad_norm": 16.592953810116423, + "learning_rate": 1.8685975500661257e-06, + "loss": 0.4318, + "step": 15522 + }, + { + "epoch": 2.42, + "grad_norm": 32.822369493560196, + "learning_rate": 1.867616067128355e-06, + "loss": 0.4851, + "step": 15523 + }, + { + "epoch": 2.42, + "grad_norm": 14.689784104714516, + "learning_rate": 1.8666348154708269e-06, + "loss": 0.429, + "step": 15524 + }, + { + "epoch": 2.43, + "grad_norm": 21.81618102391986, + "learning_rate": 1.8656537951214482e-06, + "loss": 0.4085, + "step": 15525 + }, + { + "epoch": 2.43, + "grad_norm": 23.91793502825176, + "learning_rate": 1.8646730061081174e-06, + "loss": 0.4567, + "step": 15526 + }, + { + "epoch": 2.43, + "grad_norm": 22.960649132256282, + "learning_rate": 1.8636924484587248e-06, + "loss": 0.4243, + "step": 15527 + }, + { + "epoch": 2.43, + "grad_norm": 24.55950211655765, + "learning_rate": 1.86271212220116e-06, + "loss": 0.4286, + "step": 15528 + }, + { + "epoch": 2.43, + "grad_norm": 20.89256074221222, + "learning_rate": 1.8617320273633044e-06, + "loss": 0.4338, + "step": 15529 + }, + { + "epoch": 2.43, + "grad_norm": 12.84907087098428, + "learning_rate": 1.8607521639730285e-06, + "loss": 0.3855, + "step": 15530 + }, + { + "epoch": 2.43, + "grad_norm": 18.650400882214374, + "learning_rate": 1.8597725320581982e-06, + "loss": 0.4583, + "step": 15531 + }, + { + "epoch": 2.43, + "grad_norm": 26.300842484516632, + "learning_rate": 1.8587931316466746e-06, + "loss": 0.4421, + "step": 15532 + }, + { + "epoch": 2.43, + "grad_norm": 15.463793913339718, + "learning_rate": 1.8578139627663127e-06, + "loss": 0.4414, + "step": 15533 + }, + { + "epoch": 2.43, + "grad_norm": 27.251963384737504, + "learning_rate": 1.8568350254449596e-06, + "loss": 0.5296, + "step": 15534 + }, + { + "epoch": 2.43, + "grad_norm": 27.35784798210833, + "learning_rate": 1.8558563197104562e-06, + "loss": 0.4436, + "step": 15535 + }, + { + "epoch": 2.43, + "grad_norm": 20.207881245193466, + "learning_rate": 1.8548778455906325e-06, + "loss": 0.4711, + "step": 15536 + }, + { + "epoch": 2.43, + "grad_norm": 46.79857274066157, + "learning_rate": 1.8538996031133172e-06, + "loss": 0.4162, + "step": 15537 + }, + { + "epoch": 2.43, + "grad_norm": 19.27897282744381, + "learning_rate": 1.8529215923063348e-06, + "loss": 0.4149, + "step": 15538 + }, + { + "epoch": 2.43, + "grad_norm": 23.941187353591445, + "learning_rate": 1.8519438131974932e-06, + "loss": 0.52, + "step": 15539 + }, + { + "epoch": 2.43, + "grad_norm": 21.009220502405334, + "learning_rate": 1.8509662658146067e-06, + "loss": 0.5146, + "step": 15540 + }, + { + "epoch": 2.43, + "grad_norm": 15.457626560750542, + "learning_rate": 1.8499889501854685e-06, + "loss": 0.4807, + "step": 15541 + }, + { + "epoch": 2.43, + "grad_norm": 19.223221916088125, + "learning_rate": 1.8490118663378809e-06, + "loss": 0.409, + "step": 15542 + }, + { + "epoch": 2.43, + "grad_norm": 25.22119670766908, + "learning_rate": 1.848035014299625e-06, + "loss": 0.515, + "step": 15543 + }, + { + "epoch": 2.43, + "grad_norm": 23.71409698187734, + "learning_rate": 1.8470583940984832e-06, + "loss": 0.434, + "step": 15544 + }, + { + "epoch": 2.43, + "grad_norm": 16.1865642707045, + "learning_rate": 1.8460820057622353e-06, + "loss": 0.4485, + "step": 15545 + }, + { + "epoch": 2.43, + "grad_norm": 28.92453661088341, + "learning_rate": 1.8451058493186424e-06, + "loss": 0.4738, + "step": 15546 + }, + { + "epoch": 2.43, + "grad_norm": 18.645635838679198, + "learning_rate": 1.8441299247954713e-06, + "loss": 0.4146, + "step": 15547 + }, + { + "epoch": 2.43, + "grad_norm": 13.40138518325208, + "learning_rate": 1.843154232220471e-06, + "loss": 0.4028, + "step": 15548 + }, + { + "epoch": 2.43, + "grad_norm": 19.564279626909308, + "learning_rate": 1.8421787716213934e-06, + "loss": 0.4484, + "step": 15549 + }, + { + "epoch": 2.43, + "grad_norm": 13.513939551734618, + "learning_rate": 1.8412035430259811e-06, + "loss": 0.4353, + "step": 15550 + }, + { + "epoch": 2.43, + "grad_norm": 19.47360084561293, + "learning_rate": 1.8402285464619673e-06, + "loss": 0.4062, + "step": 15551 + }, + { + "epoch": 2.43, + "grad_norm": 18.869193567127297, + "learning_rate": 1.839253781957079e-06, + "loss": 0.4228, + "step": 15552 + }, + { + "epoch": 2.43, + "grad_norm": 20.74760828036703, + "learning_rate": 1.8382792495390378e-06, + "loss": 0.4246, + "step": 15553 + }, + { + "epoch": 2.43, + "grad_norm": 26.291339030330214, + "learning_rate": 1.8373049492355633e-06, + "loss": 0.4226, + "step": 15554 + }, + { + "epoch": 2.43, + "grad_norm": 17.175077504187207, + "learning_rate": 1.8363308810743619e-06, + "loss": 0.4958, + "step": 15555 + }, + { + "epoch": 2.43, + "grad_norm": 21.893646382972268, + "learning_rate": 1.8353570450831303e-06, + "loss": 0.5396, + "step": 15556 + }, + { + "epoch": 2.43, + "grad_norm": 20.594284514493253, + "learning_rate": 1.83438344128957e-06, + "loss": 0.4408, + "step": 15557 + }, + { + "epoch": 2.43, + "grad_norm": 16.786191548393347, + "learning_rate": 1.8334100697213664e-06, + "loss": 0.4289, + "step": 15558 + }, + { + "epoch": 2.43, + "grad_norm": 25.093224752731874, + "learning_rate": 1.8324369304062062e-06, + "loss": 0.4942, + "step": 15559 + }, + { + "epoch": 2.43, + "grad_norm": 32.5159815503378, + "learning_rate": 1.8314640233717617e-06, + "loss": 0.4696, + "step": 15560 + }, + { + "epoch": 2.43, + "grad_norm": 16.333487850142767, + "learning_rate": 1.8304913486456998e-06, + "loss": 0.4198, + "step": 15561 + }, + { + "epoch": 2.43, + "grad_norm": 26.79393163425483, + "learning_rate": 1.8295189062556851e-06, + "loss": 0.5029, + "step": 15562 + }, + { + "epoch": 2.43, + "grad_norm": 30.47662430370472, + "learning_rate": 1.8285466962293752e-06, + "loss": 0.456, + "step": 15563 + }, + { + "epoch": 2.43, + "grad_norm": 18.83600360205312, + "learning_rate": 1.827574718594417e-06, + "loss": 0.4348, + "step": 15564 + }, + { + "epoch": 2.43, + "grad_norm": 17.463346120194814, + "learning_rate": 1.8266029733784506e-06, + "loss": 0.4414, + "step": 15565 + }, + { + "epoch": 2.43, + "grad_norm": 21.173209467846437, + "learning_rate": 1.8256314606091153e-06, + "loss": 0.4557, + "step": 15566 + }, + { + "epoch": 2.43, + "grad_norm": 24.56113126268079, + "learning_rate": 1.8246601803140407e-06, + "loss": 0.4599, + "step": 15567 + }, + { + "epoch": 2.43, + "grad_norm": 25.826642576888922, + "learning_rate": 1.823689132520846e-06, + "loss": 0.4432, + "step": 15568 + }, + { + "epoch": 2.43, + "grad_norm": 19.693058125489113, + "learning_rate": 1.8227183172571516e-06, + "loss": 0.4092, + "step": 15569 + }, + { + "epoch": 2.43, + "grad_norm": 15.781914976953203, + "learning_rate": 1.8217477345505619e-06, + "loss": 0.4502, + "step": 15570 + }, + { + "epoch": 2.43, + "grad_norm": 27.233240033879568, + "learning_rate": 1.8207773844286835e-06, + "loss": 0.4368, + "step": 15571 + }, + { + "epoch": 2.43, + "grad_norm": 19.52694403599269, + "learning_rate": 1.8198072669191136e-06, + "loss": 0.4958, + "step": 15572 + }, + { + "epoch": 2.43, + "grad_norm": 22.219751880017537, + "learning_rate": 1.8188373820494376e-06, + "loss": 0.4521, + "step": 15573 + }, + { + "epoch": 2.43, + "grad_norm": 25.045430620088553, + "learning_rate": 1.8178677298472425e-06, + "loss": 0.4735, + "step": 15574 + }, + { + "epoch": 2.43, + "grad_norm": 22.332529526843867, + "learning_rate": 1.8168983103401006e-06, + "loss": 0.4764, + "step": 15575 + }, + { + "epoch": 2.43, + "grad_norm": 17.68263924013169, + "learning_rate": 1.8159291235555864e-06, + "loss": 0.4603, + "step": 15576 + }, + { + "epoch": 2.43, + "grad_norm": 26.4168065768868, + "learning_rate": 1.8149601695212571e-06, + "loss": 0.4721, + "step": 15577 + }, + { + "epoch": 2.43, + "grad_norm": 14.256574396174575, + "learning_rate": 1.8139914482646737e-06, + "loss": 0.4618, + "step": 15578 + }, + { + "epoch": 2.43, + "grad_norm": 35.16246848825398, + "learning_rate": 1.8130229598133865e-06, + "loss": 0.4945, + "step": 15579 + }, + { + "epoch": 2.43, + "grad_norm": 24.050274288304607, + "learning_rate": 1.8120547041949366e-06, + "loss": 0.4849, + "step": 15580 + }, + { + "epoch": 2.43, + "grad_norm": 24.73123641352344, + "learning_rate": 1.81108668143686e-06, + "loss": 0.4841, + "step": 15581 + }, + { + "epoch": 2.43, + "grad_norm": 17.97760586319767, + "learning_rate": 1.8101188915666867e-06, + "loss": 0.4433, + "step": 15582 + }, + { + "epoch": 2.43, + "grad_norm": 26.52608140765442, + "learning_rate": 1.8091513346119415e-06, + "loss": 0.4898, + "step": 15583 + }, + { + "epoch": 2.43, + "grad_norm": 25.909912315841037, + "learning_rate": 1.8081840106001435e-06, + "loss": 0.4103, + "step": 15584 + }, + { + "epoch": 2.43, + "grad_norm": 21.91392893215008, + "learning_rate": 1.8072169195587996e-06, + "loss": 0.4377, + "step": 15585 + }, + { + "epoch": 2.43, + "grad_norm": 20.67713105815181, + "learning_rate": 1.8062500615154121e-06, + "loss": 0.5208, + "step": 15586 + }, + { + "epoch": 2.43, + "grad_norm": 19.345071707390748, + "learning_rate": 1.8052834364974791e-06, + "loss": 0.4971, + "step": 15587 + }, + { + "epoch": 2.43, + "grad_norm": 17.223992218249172, + "learning_rate": 1.8043170445324943e-06, + "loss": 0.401, + "step": 15588 + }, + { + "epoch": 2.44, + "grad_norm": 20.01658956509949, + "learning_rate": 1.8033508856479387e-06, + "loss": 0.4579, + "step": 15589 + }, + { + "epoch": 2.44, + "grad_norm": 16.816350958951464, + "learning_rate": 1.8023849598712861e-06, + "loss": 0.434, + "step": 15590 + }, + { + "epoch": 2.44, + "grad_norm": 19.49204753221549, + "learning_rate": 1.801419267230009e-06, + "loss": 0.4251, + "step": 15591 + }, + { + "epoch": 2.44, + "grad_norm": 20.476043720798017, + "learning_rate": 1.8004538077515755e-06, + "loss": 0.4256, + "step": 15592 + }, + { + "epoch": 2.44, + "grad_norm": 14.009408024875126, + "learning_rate": 1.7994885814634366e-06, + "loss": 0.451, + "step": 15593 + }, + { + "epoch": 2.44, + "grad_norm": 15.059859371806303, + "learning_rate": 1.7985235883930475e-06, + "loss": 0.4199, + "step": 15594 + }, + { + "epoch": 2.44, + "grad_norm": 21.99359851057603, + "learning_rate": 1.797558828567847e-06, + "loss": 0.3981, + "step": 15595 + }, + { + "epoch": 2.44, + "grad_norm": 22.7191569922826, + "learning_rate": 1.7965943020152753e-06, + "loss": 0.4991, + "step": 15596 + }, + { + "epoch": 2.44, + "grad_norm": 15.35126964833335, + "learning_rate": 1.7956300087627655e-06, + "loss": 0.4206, + "step": 15597 + }, + { + "epoch": 2.44, + "grad_norm": 22.147526625752768, + "learning_rate": 1.794665948837736e-06, + "loss": 0.421, + "step": 15598 + }, + { + "epoch": 2.44, + "grad_norm": 15.842469635323038, + "learning_rate": 1.7937021222676098e-06, + "loss": 0.431, + "step": 15599 + }, + { + "epoch": 2.44, + "grad_norm": 22.437492891273326, + "learning_rate": 1.7927385290797917e-06, + "loss": 0.4165, + "step": 15600 + }, + { + "epoch": 2.44, + "grad_norm": 25.767849399350506, + "learning_rate": 1.7917751693016915e-06, + "loss": 0.4514, + "step": 15601 + }, + { + "epoch": 2.44, + "grad_norm": 19.44545047461242, + "learning_rate": 1.7908120429607024e-06, + "loss": 0.421, + "step": 15602 + }, + { + "epoch": 2.44, + "grad_norm": 15.466842943217578, + "learning_rate": 1.7898491500842152e-06, + "loss": 0.4603, + "step": 15603 + }, + { + "epoch": 2.44, + "grad_norm": 14.735812061493073, + "learning_rate": 1.7888864906996183e-06, + "loss": 0.4109, + "step": 15604 + }, + { + "epoch": 2.44, + "grad_norm": 21.371800362353035, + "learning_rate": 1.7879240648342854e-06, + "loss": 0.4994, + "step": 15605 + }, + { + "epoch": 2.44, + "grad_norm": 20.85608643136716, + "learning_rate": 1.7869618725155868e-06, + "loss": 0.426, + "step": 15606 + }, + { + "epoch": 2.44, + "grad_norm": 21.468068342478652, + "learning_rate": 1.7859999137708872e-06, + "loss": 0.4487, + "step": 15607 + }, + { + "epoch": 2.44, + "grad_norm": 22.705747158243064, + "learning_rate": 1.7850381886275458e-06, + "loss": 0.497, + "step": 15608 + }, + { + "epoch": 2.44, + "grad_norm": 18.666298408597527, + "learning_rate": 1.7840766971129153e-06, + "loss": 0.493, + "step": 15609 + }, + { + "epoch": 2.44, + "grad_norm": 19.22067524485722, + "learning_rate": 1.783115439254337e-06, + "loss": 0.4137, + "step": 15610 + }, + { + "epoch": 2.44, + "grad_norm": 23.730136175140682, + "learning_rate": 1.7821544150791459e-06, + "loss": 0.4131, + "step": 15611 + }, + { + "epoch": 2.44, + "grad_norm": 22.197540813171795, + "learning_rate": 1.7811936246146765e-06, + "loss": 0.5077, + "step": 15612 + }, + { + "epoch": 2.44, + "grad_norm": 27.45459468948094, + "learning_rate": 1.7802330678882552e-06, + "loss": 0.4683, + "step": 15613 + }, + { + "epoch": 2.44, + "grad_norm": 19.172755624557027, + "learning_rate": 1.7792727449271962e-06, + "loss": 0.4453, + "step": 15614 + }, + { + "epoch": 2.44, + "grad_norm": 28.151691070862654, + "learning_rate": 1.7783126557588092e-06, + "loss": 0.5051, + "step": 15615 + }, + { + "epoch": 2.44, + "grad_norm": 17.603273924833005, + "learning_rate": 1.7773528004104012e-06, + "loss": 0.3779, + "step": 15616 + }, + { + "epoch": 2.44, + "grad_norm": 23.866715365670732, + "learning_rate": 1.7763931789092715e-06, + "loss": 0.4988, + "step": 15617 + }, + { + "epoch": 2.44, + "grad_norm": 23.680668224387993, + "learning_rate": 1.7754337912827092e-06, + "loss": 0.4164, + "step": 15618 + }, + { + "epoch": 2.44, + "grad_norm": 24.94071613991043, + "learning_rate": 1.7744746375579968e-06, + "loss": 0.4503, + "step": 15619 + }, + { + "epoch": 2.44, + "grad_norm": 19.952616256435636, + "learning_rate": 1.7735157177624129e-06, + "loss": 0.4294, + "step": 15620 + }, + { + "epoch": 2.44, + "grad_norm": 23.878098029710657, + "learning_rate": 1.77255703192323e-06, + "loss": 0.4186, + "step": 15621 + }, + { + "epoch": 2.44, + "grad_norm": 18.756194665738846, + "learning_rate": 1.7715985800677148e-06, + "loss": 0.4426, + "step": 15622 + }, + { + "epoch": 2.44, + "grad_norm": 21.196275182305122, + "learning_rate": 1.7706403622231228e-06, + "loss": 0.4844, + "step": 15623 + }, + { + "epoch": 2.44, + "grad_norm": 13.582590126210418, + "learning_rate": 1.7696823784167017e-06, + "loss": 0.4085, + "step": 15624 + }, + { + "epoch": 2.44, + "grad_norm": 16.921045101593744, + "learning_rate": 1.7687246286756999e-06, + "loss": 0.4785, + "step": 15625 + }, + { + "epoch": 2.44, + "grad_norm": 27.30614475842026, + "learning_rate": 1.7677671130273567e-06, + "loss": 0.4916, + "step": 15626 + }, + { + "epoch": 2.44, + "grad_norm": 23.09746587820967, + "learning_rate": 1.766809831498898e-06, + "loss": 0.4793, + "step": 15627 + }, + { + "epoch": 2.44, + "grad_norm": 17.991629341336196, + "learning_rate": 1.7658527841175543e-06, + "loss": 0.4679, + "step": 15628 + }, + { + "epoch": 2.44, + "grad_norm": 19.524029075937946, + "learning_rate": 1.7648959709105384e-06, + "loss": 0.4695, + "step": 15629 + }, + { + "epoch": 2.44, + "grad_norm": 23.787327944967217, + "learning_rate": 1.7639393919050662e-06, + "loss": 0.4756, + "step": 15630 + }, + { + "epoch": 2.44, + "grad_norm": 19.943533902893822, + "learning_rate": 1.762983047128337e-06, + "loss": 0.4014, + "step": 15631 + }, + { + "epoch": 2.44, + "grad_norm": 18.80827617634642, + "learning_rate": 1.7620269366075514e-06, + "loss": 0.4168, + "step": 15632 + }, + { + "epoch": 2.44, + "grad_norm": 19.99525715500652, + "learning_rate": 1.7610710603699033e-06, + "loss": 0.4677, + "step": 15633 + }, + { + "epoch": 2.44, + "grad_norm": 19.448211092185396, + "learning_rate": 1.7601154184425717e-06, + "loss": 0.4523, + "step": 15634 + }, + { + "epoch": 2.44, + "grad_norm": 21.916203340889655, + "learning_rate": 1.7591600108527407e-06, + "loss": 0.4635, + "step": 15635 + }, + { + "epoch": 2.44, + "grad_norm": 16.464627713633504, + "learning_rate": 1.758204837627575e-06, + "loss": 0.3869, + "step": 15636 + }, + { + "epoch": 2.44, + "grad_norm": 19.811552922359514, + "learning_rate": 1.757249898794242e-06, + "loss": 0.4253, + "step": 15637 + }, + { + "epoch": 2.44, + "grad_norm": 21.851804911960375, + "learning_rate": 1.7562951943799033e-06, + "loss": 0.4138, + "step": 15638 + }, + { + "epoch": 2.44, + "grad_norm": 17.47046993264175, + "learning_rate": 1.755340724411707e-06, + "loss": 0.4075, + "step": 15639 + }, + { + "epoch": 2.44, + "grad_norm": 23.026540968396244, + "learning_rate": 1.754386488916795e-06, + "loss": 0.5165, + "step": 15640 + }, + { + "epoch": 2.44, + "grad_norm": 13.018244045936102, + "learning_rate": 1.7534324879223065e-06, + "loss": 0.3948, + "step": 15641 + }, + { + "epoch": 2.44, + "grad_norm": 18.24360875182509, + "learning_rate": 1.7524787214553774e-06, + "loss": 0.5273, + "step": 15642 + }, + { + "epoch": 2.44, + "grad_norm": 24.330801428254386, + "learning_rate": 1.7515251895431295e-06, + "loss": 0.486, + "step": 15643 + }, + { + "epoch": 2.44, + "grad_norm": 30.040085542716724, + "learning_rate": 1.750571892212677e-06, + "loss": 0.5172, + "step": 15644 + }, + { + "epoch": 2.44, + "grad_norm": 18.782636953037628, + "learning_rate": 1.7496188294911332e-06, + "loss": 0.5043, + "step": 15645 + }, + { + "epoch": 2.44, + "grad_norm": 17.405097401911334, + "learning_rate": 1.7486660014056046e-06, + "loss": 0.4902, + "step": 15646 + }, + { + "epoch": 2.44, + "grad_norm": 22.60867511151067, + "learning_rate": 1.7477134079831893e-06, + "loss": 0.4531, + "step": 15647 + }, + { + "epoch": 2.44, + "grad_norm": 17.777383447618277, + "learning_rate": 1.746761049250979e-06, + "loss": 0.4798, + "step": 15648 + }, + { + "epoch": 2.44, + "grad_norm": 20.04281763359204, + "learning_rate": 1.7458089252360522e-06, + "loss": 0.468, + "step": 15649 + }, + { + "epoch": 2.44, + "grad_norm": 22.84701788420225, + "learning_rate": 1.7448570359654925e-06, + "loss": 0.4395, + "step": 15650 + }, + { + "epoch": 2.44, + "grad_norm": 15.415634407676242, + "learning_rate": 1.743905381466371e-06, + "loss": 0.4606, + "step": 15651 + }, + { + "epoch": 2.44, + "grad_norm": 22.031542974918363, + "learning_rate": 1.7429539617657487e-06, + "loss": 0.4578, + "step": 15652 + }, + { + "epoch": 2.45, + "grad_norm": 14.683632227752598, + "learning_rate": 1.7420027768906877e-06, + "loss": 0.5084, + "step": 15653 + }, + { + "epoch": 2.45, + "grad_norm": 20.666012341353326, + "learning_rate": 1.7410518268682353e-06, + "loss": 0.4138, + "step": 15654 + }, + { + "epoch": 2.45, + "grad_norm": 24.73429182265147, + "learning_rate": 1.7401011117254397e-06, + "loss": 0.4406, + "step": 15655 + }, + { + "epoch": 2.45, + "grad_norm": 23.55104984896507, + "learning_rate": 1.7391506314893337e-06, + "loss": 0.4558, + "step": 15656 + }, + { + "epoch": 2.45, + "grad_norm": 17.390192437768842, + "learning_rate": 1.7382003861869512e-06, + "loss": 0.4631, + "step": 15657 + }, + { + "epoch": 2.45, + "grad_norm": 18.926578607680682, + "learning_rate": 1.7372503758453196e-06, + "loss": 0.4201, + "step": 15658 + }, + { + "epoch": 2.45, + "grad_norm": 37.3928337257797, + "learning_rate": 1.7363006004914507e-06, + "loss": 0.4767, + "step": 15659 + }, + { + "epoch": 2.45, + "grad_norm": 16.056462213480682, + "learning_rate": 1.7353510601523616e-06, + "loss": 0.4543, + "step": 15660 + }, + { + "epoch": 2.45, + "grad_norm": 21.124255974211724, + "learning_rate": 1.7344017548550506e-06, + "loss": 0.4279, + "step": 15661 + }, + { + "epoch": 2.45, + "grad_norm": 20.64766689728801, + "learning_rate": 1.733452684626522e-06, + "loss": 0.4757, + "step": 15662 + }, + { + "epoch": 2.45, + "grad_norm": 21.501590290138942, + "learning_rate": 1.7325038494937597e-06, + "loss": 0.418, + "step": 15663 + }, + { + "epoch": 2.45, + "grad_norm": 40.71444599038201, + "learning_rate": 1.7315552494837552e-06, + "loss": 0.5379, + "step": 15664 + }, + { + "epoch": 2.45, + "grad_norm": 16.89527322407755, + "learning_rate": 1.7306068846234791e-06, + "loss": 0.405, + "step": 15665 + }, + { + "epoch": 2.45, + "grad_norm": 33.276344008462154, + "learning_rate": 1.7296587549399057e-06, + "loss": 0.4574, + "step": 15666 + }, + { + "epoch": 2.45, + "grad_norm": 17.912322647377387, + "learning_rate": 1.7287108604600023e-06, + "loss": 0.4568, + "step": 15667 + }, + { + "epoch": 2.45, + "grad_norm": 20.790070236593376, + "learning_rate": 1.7277632012107236e-06, + "loss": 0.4321, + "step": 15668 + }, + { + "epoch": 2.45, + "grad_norm": 21.828375157511363, + "learning_rate": 1.7268157772190174e-06, + "loss": 0.4351, + "step": 15669 + }, + { + "epoch": 2.45, + "grad_norm": 35.59040206179848, + "learning_rate": 1.725868588511831e-06, + "loss": 0.4765, + "step": 15670 + }, + { + "epoch": 2.45, + "grad_norm": 18.292652946057988, + "learning_rate": 1.7249216351161013e-06, + "loss": 0.4741, + "step": 15671 + }, + { + "epoch": 2.45, + "grad_norm": 32.32808279308516, + "learning_rate": 1.7239749170587628e-06, + "loss": 0.5315, + "step": 15672 + }, + { + "epoch": 2.45, + "grad_norm": 26.633858747457225, + "learning_rate": 1.723028434366736e-06, + "loss": 0.4776, + "step": 15673 + }, + { + "epoch": 2.45, + "grad_norm": 12.156949570465212, + "learning_rate": 1.7220821870669358e-06, + "loss": 0.3494, + "step": 15674 + }, + { + "epoch": 2.45, + "grad_norm": 30.33912482109138, + "learning_rate": 1.7211361751862754e-06, + "loss": 0.556, + "step": 15675 + }, + { + "epoch": 2.45, + "grad_norm": 21.094025046356762, + "learning_rate": 1.720190398751662e-06, + "loss": 0.4877, + "step": 15676 + }, + { + "epoch": 2.45, + "grad_norm": 33.400337224103104, + "learning_rate": 1.7192448577899912e-06, + "loss": 0.4672, + "step": 15677 + }, + { + "epoch": 2.45, + "grad_norm": 19.280156895180415, + "learning_rate": 1.7182995523281488e-06, + "loss": 0.3835, + "step": 15678 + }, + { + "epoch": 2.45, + "grad_norm": 15.90173532132101, + "learning_rate": 1.7173544823930232e-06, + "loss": 0.4142, + "step": 15679 + }, + { + "epoch": 2.45, + "grad_norm": 22.076843355454923, + "learning_rate": 1.7164096480114934e-06, + "loss": 0.3858, + "step": 15680 + }, + { + "epoch": 2.45, + "grad_norm": 20.93684716986153, + "learning_rate": 1.715465049210424e-06, + "loss": 0.4491, + "step": 15681 + }, + { + "epoch": 2.45, + "grad_norm": 17.915089700065323, + "learning_rate": 1.7145206860166863e-06, + "loss": 0.4833, + "step": 15682 + }, + { + "epoch": 2.45, + "grad_norm": 20.768664691547805, + "learning_rate": 1.7135765584571296e-06, + "loss": 0.4888, + "step": 15683 + }, + { + "epoch": 2.45, + "grad_norm": 23.222977411601583, + "learning_rate": 1.712632666558609e-06, + "loss": 0.4562, + "step": 15684 + }, + { + "epoch": 2.45, + "grad_norm": 29.22592705869698, + "learning_rate": 1.7116890103479688e-06, + "loss": 0.4759, + "step": 15685 + }, + { + "epoch": 2.45, + "grad_norm": 20.455293674475193, + "learning_rate": 1.710745589852043e-06, + "loss": 0.4375, + "step": 15686 + }, + { + "epoch": 2.45, + "grad_norm": 26.645101678992134, + "learning_rate": 1.7098024050976657e-06, + "loss": 0.43, + "step": 15687 + }, + { + "epoch": 2.45, + "grad_norm": 23.064482267084994, + "learning_rate": 1.7088594561116567e-06, + "loss": 0.4928, + "step": 15688 + }, + { + "epoch": 2.45, + "grad_norm": 42.91025739196887, + "learning_rate": 1.7079167429208366e-06, + "loss": 0.428, + "step": 15689 + }, + { + "epoch": 2.45, + "grad_norm": 13.843329713869773, + "learning_rate": 1.7069742655520106e-06, + "loss": 0.4348, + "step": 15690 + }, + { + "epoch": 2.45, + "grad_norm": 30.96254361674733, + "learning_rate": 1.706032024031985e-06, + "loss": 0.4761, + "step": 15691 + }, + { + "epoch": 2.45, + "grad_norm": 18.25259133208445, + "learning_rate": 1.7050900183875608e-06, + "loss": 0.4165, + "step": 15692 + }, + { + "epoch": 2.45, + "grad_norm": 24.25572723234725, + "learning_rate": 1.704148248645523e-06, + "loss": 0.4813, + "step": 15693 + }, + { + "epoch": 2.45, + "grad_norm": 17.79142915029655, + "learning_rate": 1.7032067148326536e-06, + "loss": 0.4392, + "step": 15694 + }, + { + "epoch": 2.45, + "grad_norm": 21.939948738332422, + "learning_rate": 1.7022654169757312e-06, + "loss": 0.4084, + "step": 15695 + }, + { + "epoch": 2.45, + "grad_norm": 24.65556954882072, + "learning_rate": 1.701324355101528e-06, + "loss": 0.4667, + "step": 15696 + }, + { + "epoch": 2.45, + "grad_norm": 19.71053279245022, + "learning_rate": 1.7003835292368064e-06, + "loss": 0.5133, + "step": 15697 + }, + { + "epoch": 2.45, + "grad_norm": 22.391741919760776, + "learning_rate": 1.6994429394083235e-06, + "loss": 0.4073, + "step": 15698 + }, + { + "epoch": 2.45, + "grad_norm": 16.278930824709033, + "learning_rate": 1.698502585642824e-06, + "loss": 0.4607, + "step": 15699 + }, + { + "epoch": 2.45, + "grad_norm": 21.060065826381518, + "learning_rate": 1.6975624679670556e-06, + "loss": 0.4082, + "step": 15700 + }, + { + "epoch": 2.45, + "grad_norm": 34.31509593921933, + "learning_rate": 1.6966225864077557e-06, + "loss": 0.5108, + "step": 15701 + }, + { + "epoch": 2.45, + "grad_norm": 20.508322050031307, + "learning_rate": 1.695682940991652e-06, + "loss": 0.4909, + "step": 15702 + }, + { + "epoch": 2.45, + "grad_norm": 22.985910287871675, + "learning_rate": 1.6947435317454653e-06, + "loss": 0.4292, + "step": 15703 + }, + { + "epoch": 2.45, + "grad_norm": 19.47548698427316, + "learning_rate": 1.6938043586959141e-06, + "loss": 0.4284, + "step": 15704 + }, + { + "epoch": 2.45, + "grad_norm": 12.050977083961271, + "learning_rate": 1.6928654218697105e-06, + "loss": 0.3717, + "step": 15705 + }, + { + "epoch": 2.45, + "grad_norm": 20.90466366723827, + "learning_rate": 1.6919267212935531e-06, + "loss": 0.5338, + "step": 15706 + }, + { + "epoch": 2.45, + "grad_norm": 21.04390598473832, + "learning_rate": 1.6909882569941405e-06, + "loss": 0.5185, + "step": 15707 + }, + { + "epoch": 2.45, + "grad_norm": 19.311964475897952, + "learning_rate": 1.6900500289981603e-06, + "loss": 0.4271, + "step": 15708 + }, + { + "epoch": 2.45, + "grad_norm": 19.922750902555027, + "learning_rate": 1.6891120373322955e-06, + "loss": 0.4614, + "step": 15709 + }, + { + "epoch": 2.45, + "grad_norm": 22.186880507350157, + "learning_rate": 1.6881742820232249e-06, + "loss": 0.4484, + "step": 15710 + }, + { + "epoch": 2.45, + "grad_norm": 20.79142881944137, + "learning_rate": 1.687236763097615e-06, + "loss": 0.5004, + "step": 15711 + }, + { + "epoch": 2.45, + "grad_norm": 28.39079508785594, + "learning_rate": 1.6862994805821275e-06, + "loss": 0.4618, + "step": 15712 + }, + { + "epoch": 2.45, + "grad_norm": 34.46493770176251, + "learning_rate": 1.685362434503418e-06, + "loss": 0.4565, + "step": 15713 + }, + { + "epoch": 2.45, + "grad_norm": 18.168428165464427, + "learning_rate": 1.6844256248881408e-06, + "loss": 0.4587, + "step": 15714 + }, + { + "epoch": 2.45, + "grad_norm": 20.442181943364776, + "learning_rate": 1.6834890517629309e-06, + "loss": 0.4486, + "step": 15715 + }, + { + "epoch": 2.45, + "grad_norm": 25.33710046546341, + "learning_rate": 1.6825527151544307e-06, + "loss": 0.4138, + "step": 15716 + }, + { + "epoch": 2.46, + "grad_norm": 20.24390139624741, + "learning_rate": 1.6816166150892622e-06, + "loss": 0.481, + "step": 15717 + }, + { + "epoch": 2.46, + "grad_norm": 23.863243445189916, + "learning_rate": 1.6806807515940537e-06, + "loss": 0.4497, + "step": 15718 + }, + { + "epoch": 2.46, + "grad_norm": 23.33179953520906, + "learning_rate": 1.6797451246954166e-06, + "loss": 0.4594, + "step": 15719 + }, + { + "epoch": 2.46, + "grad_norm": 15.528177594521686, + "learning_rate": 1.6788097344199594e-06, + "loss": 0.4584, + "step": 15720 + }, + { + "epoch": 2.46, + "grad_norm": 14.745148863171943, + "learning_rate": 1.6778745807942899e-06, + "loss": 0.3949, + "step": 15721 + }, + { + "epoch": 2.46, + "grad_norm": 21.658972075059108, + "learning_rate": 1.6769396638449954e-06, + "loss": 0.438, + "step": 15722 + }, + { + "epoch": 2.46, + "grad_norm": 29.867973959384695, + "learning_rate": 1.6760049835986703e-06, + "loss": 0.4756, + "step": 15723 + }, + { + "epoch": 2.46, + "grad_norm": 31.91997725680295, + "learning_rate": 1.6750705400818913e-06, + "loss": 0.4711, + "step": 15724 + }, + { + "epoch": 2.46, + "grad_norm": 25.30707162677131, + "learning_rate": 1.6741363333212368e-06, + "loss": 0.4578, + "step": 15725 + }, + { + "epoch": 2.46, + "grad_norm": 16.47445156765777, + "learning_rate": 1.673202363343277e-06, + "loss": 0.3928, + "step": 15726 + }, + { + "epoch": 2.46, + "grad_norm": 23.360269565740566, + "learning_rate": 1.6722686301745716e-06, + "loss": 0.4526, + "step": 15727 + }, + { + "epoch": 2.46, + "grad_norm": 18.272234146009826, + "learning_rate": 1.6713351338416707e-06, + "loss": 0.5207, + "step": 15728 + }, + { + "epoch": 2.46, + "grad_norm": 18.550500597329904, + "learning_rate": 1.670401874371128e-06, + "loss": 0.475, + "step": 15729 + }, + { + "epoch": 2.46, + "grad_norm": 20.184570663904687, + "learning_rate": 1.6694688517894852e-06, + "loss": 0.4715, + "step": 15730 + }, + { + "epoch": 2.46, + "grad_norm": 22.688760559540253, + "learning_rate": 1.6685360661232742e-06, + "loss": 0.5298, + "step": 15731 + }, + { + "epoch": 2.46, + "grad_norm": 21.263396481997386, + "learning_rate": 1.6676035173990225e-06, + "loss": 0.488, + "step": 15732 + }, + { + "epoch": 2.46, + "grad_norm": 13.907477818623446, + "learning_rate": 1.6666712056432522e-06, + "loss": 0.4376, + "step": 15733 + }, + { + "epoch": 2.46, + "grad_norm": 23.06244295360002, + "learning_rate": 1.6657391308824777e-06, + "loss": 0.3825, + "step": 15734 + }, + { + "epoch": 2.46, + "grad_norm": 14.560467777766524, + "learning_rate": 1.6648072931432091e-06, + "loss": 0.4598, + "step": 15735 + }, + { + "epoch": 2.46, + "grad_norm": 15.348141967163514, + "learning_rate": 1.663875692451945e-06, + "loss": 0.4161, + "step": 15736 + }, + { + "epoch": 2.46, + "grad_norm": 28.690267492226873, + "learning_rate": 1.6629443288351777e-06, + "loss": 0.4758, + "step": 15737 + }, + { + "epoch": 2.46, + "grad_norm": 22.162360533755802, + "learning_rate": 1.6620132023193968e-06, + "loss": 0.4485, + "step": 15738 + }, + { + "epoch": 2.46, + "grad_norm": 17.300505327792237, + "learning_rate": 1.661082312931085e-06, + "loss": 0.4433, + "step": 15739 + }, + { + "epoch": 2.46, + "grad_norm": 16.127373860708108, + "learning_rate": 1.6601516606967127e-06, + "loss": 0.4189, + "step": 15740 + }, + { + "epoch": 2.46, + "grad_norm": 23.10631513778579, + "learning_rate": 1.6592212456427492e-06, + "loss": 0.4226, + "step": 15741 + }, + { + "epoch": 2.46, + "grad_norm": 17.828958180957322, + "learning_rate": 1.6582910677956532e-06, + "loss": 0.4769, + "step": 15742 + }, + { + "epoch": 2.46, + "grad_norm": 18.875245256520007, + "learning_rate": 1.6573611271818812e-06, + "loss": 0.4423, + "step": 15743 + }, + { + "epoch": 2.46, + "grad_norm": 15.888399507501056, + "learning_rate": 1.6564314238278755e-06, + "loss": 0.4677, + "step": 15744 + }, + { + "epoch": 2.46, + "grad_norm": 15.235006550144908, + "learning_rate": 1.6555019577600806e-06, + "loss": 0.4204, + "step": 15745 + }, + { + "epoch": 2.46, + "grad_norm": 26.225437003227533, + "learning_rate": 1.6545727290049307e-06, + "loss": 0.5065, + "step": 15746 + }, + { + "epoch": 2.46, + "grad_norm": 27.23459102798812, + "learning_rate": 1.653643737588847e-06, + "loss": 0.4503, + "step": 15747 + }, + { + "epoch": 2.46, + "grad_norm": 15.500837972027806, + "learning_rate": 1.652714983538256e-06, + "loss": 0.402, + "step": 15748 + }, + { + "epoch": 2.46, + "grad_norm": 20.26094469070454, + "learning_rate": 1.6517864668795658e-06, + "loss": 0.3832, + "step": 15749 + }, + { + "epoch": 2.46, + "grad_norm": 21.15137077129673, + "learning_rate": 1.650858187639185e-06, + "loss": 0.457, + "step": 15750 + }, + { + "epoch": 2.46, + "grad_norm": 15.12292444306921, + "learning_rate": 1.6499301458435146e-06, + "loss": 0.4477, + "step": 15751 + }, + { + "epoch": 2.46, + "grad_norm": 34.511774226922874, + "learning_rate": 1.6490023415189472e-06, + "loss": 0.5406, + "step": 15752 + }, + { + "epoch": 2.46, + "grad_norm": 18.108051259073665, + "learning_rate": 1.6480747746918657e-06, + "loss": 0.3869, + "step": 15753 + }, + { + "epoch": 2.46, + "grad_norm": 14.535380076997066, + "learning_rate": 1.6471474453886504e-06, + "loss": 0.4209, + "step": 15754 + }, + { + "epoch": 2.46, + "grad_norm": 21.087204325168425, + "learning_rate": 1.6462203536356769e-06, + "loss": 0.406, + "step": 15755 + }, + { + "epoch": 2.46, + "grad_norm": 23.541237341642887, + "learning_rate": 1.645293499459314e-06, + "loss": 0.4353, + "step": 15756 + }, + { + "epoch": 2.46, + "grad_norm": 21.850660523697552, + "learning_rate": 1.6443668828859126e-06, + "loss": 0.5183, + "step": 15757 + }, + { + "epoch": 2.46, + "grad_norm": 20.681784007261435, + "learning_rate": 1.6434405039418277e-06, + "loss": 0.4485, + "step": 15758 + }, + { + "epoch": 2.46, + "grad_norm": 24.705386693449253, + "learning_rate": 1.642514362653408e-06, + "loss": 0.4631, + "step": 15759 + }, + { + "epoch": 2.46, + "grad_norm": 16.775212829585673, + "learning_rate": 1.6415884590469922e-06, + "loss": 0.437, + "step": 15760 + }, + { + "epoch": 2.46, + "grad_norm": 22.09529900212891, + "learning_rate": 1.6406627931489116e-06, + "loss": 0.4687, + "step": 15761 + }, + { + "epoch": 2.46, + "grad_norm": 22.013072223904953, + "learning_rate": 1.6397373649854874e-06, + "loss": 0.4063, + "step": 15762 + }, + { + "epoch": 2.46, + "grad_norm": 22.860996041540066, + "learning_rate": 1.6388121745830433e-06, + "loss": 0.4664, + "step": 15763 + }, + { + "epoch": 2.46, + "grad_norm": 26.62977977728416, + "learning_rate": 1.637887221967892e-06, + "loss": 0.4672, + "step": 15764 + }, + { + "epoch": 2.46, + "grad_norm": 30.710837432205327, + "learning_rate": 1.6369625071663365e-06, + "loss": 0.4251, + "step": 15765 + }, + { + "epoch": 2.46, + "grad_norm": 15.91126554833314, + "learning_rate": 1.636038030204672e-06, + "loss": 0.3985, + "step": 15766 + }, + { + "epoch": 2.46, + "grad_norm": 25.44609519397724, + "learning_rate": 1.6351137911091942e-06, + "loss": 0.4975, + "step": 15767 + }, + { + "epoch": 2.46, + "grad_norm": 26.950533963484457, + "learning_rate": 1.6341897899061887e-06, + "loss": 0.4428, + "step": 15768 + }, + { + "epoch": 2.46, + "grad_norm": 17.44905099303504, + "learning_rate": 1.6332660266219292e-06, + "loss": 0.4662, + "step": 15769 + }, + { + "epoch": 2.46, + "grad_norm": 23.980274385446613, + "learning_rate": 1.632342501282692e-06, + "loss": 0.5126, + "step": 15770 + }, + { + "epoch": 2.46, + "grad_norm": 23.486799757696925, + "learning_rate": 1.6314192139147366e-06, + "loss": 0.4376, + "step": 15771 + }, + { + "epoch": 2.46, + "grad_norm": 22.123059281939863, + "learning_rate": 1.630496164544323e-06, + "loss": 0.4476, + "step": 15772 + }, + { + "epoch": 2.46, + "grad_norm": 21.677437329063675, + "learning_rate": 1.6295733531977054e-06, + "loss": 0.4235, + "step": 15773 + }, + { + "epoch": 2.46, + "grad_norm": 23.671952821584764, + "learning_rate": 1.628650779901122e-06, + "loss": 0.4668, + "step": 15774 + }, + { + "epoch": 2.46, + "grad_norm": 21.2288892900679, + "learning_rate": 1.6277284446808162e-06, + "loss": 0.3926, + "step": 15775 + }, + { + "epoch": 2.46, + "grad_norm": 28.175068523002945, + "learning_rate": 1.6268063475630136e-06, + "loss": 0.4747, + "step": 15776 + }, + { + "epoch": 2.46, + "grad_norm": 21.578656915825086, + "learning_rate": 1.6258844885739434e-06, + "loss": 0.4324, + "step": 15777 + }, + { + "epoch": 2.46, + "grad_norm": 22.973499852119023, + "learning_rate": 1.6249628677398155e-06, + "loss": 0.492, + "step": 15778 + }, + { + "epoch": 2.46, + "grad_norm": 20.485454415223497, + "learning_rate": 1.6240414850868457e-06, + "loss": 0.4259, + "step": 15779 + }, + { + "epoch": 2.46, + "grad_norm": 17.160058083135137, + "learning_rate": 1.6231203406412377e-06, + "loss": 0.3976, + "step": 15780 + }, + { + "epoch": 2.47, + "grad_norm": 32.365971471735506, + "learning_rate": 1.622199434429188e-06, + "loss": 0.4728, + "step": 15781 + }, + { + "epoch": 2.47, + "grad_norm": 13.908113643517492, + "learning_rate": 1.6212787664768826e-06, + "loss": 0.3815, + "step": 15782 + }, + { + "epoch": 2.47, + "grad_norm": 19.140819016076644, + "learning_rate": 1.620358336810507e-06, + "loss": 0.4187, + "step": 15783 + }, + { + "epoch": 2.47, + "grad_norm": 22.876185456811204, + "learning_rate": 1.6194381454562392e-06, + "loss": 0.4372, + "step": 15784 + }, + { + "epoch": 2.47, + "grad_norm": 19.691695485320647, + "learning_rate": 1.6185181924402504e-06, + "loss": 0.4669, + "step": 15785 + }, + { + "epoch": 2.47, + "grad_norm": 16.645488250443414, + "learning_rate": 1.617598477788701e-06, + "loss": 0.3984, + "step": 15786 + }, + { + "epoch": 2.47, + "grad_norm": 23.609772265441492, + "learning_rate": 1.6166790015277446e-06, + "loss": 0.4795, + "step": 15787 + }, + { + "epoch": 2.47, + "grad_norm": 26.187348424522906, + "learning_rate": 1.6157597636835332e-06, + "loss": 0.5007, + "step": 15788 + }, + { + "epoch": 2.47, + "grad_norm": 16.776798204782633, + "learning_rate": 1.6148407642822128e-06, + "loss": 0.4749, + "step": 15789 + }, + { + "epoch": 2.47, + "grad_norm": 34.4999099260343, + "learning_rate": 1.6139220033499148e-06, + "loss": 0.4462, + "step": 15790 + }, + { + "epoch": 2.47, + "grad_norm": 25.093255654545267, + "learning_rate": 1.6130034809127682e-06, + "loss": 0.4726, + "step": 15791 + }, + { + "epoch": 2.47, + "grad_norm": 20.5607394258983, + "learning_rate": 1.6120851969968954e-06, + "loss": 0.4774, + "step": 15792 + }, + { + "epoch": 2.47, + "grad_norm": 19.460412827336775, + "learning_rate": 1.6111671516284155e-06, + "loss": 0.5472, + "step": 15793 + }, + { + "epoch": 2.47, + "grad_norm": 18.65970534525912, + "learning_rate": 1.6102493448334312e-06, + "loss": 0.4788, + "step": 15794 + }, + { + "epoch": 2.47, + "grad_norm": 17.722224742957692, + "learning_rate": 1.6093317766380511e-06, + "loss": 0.4785, + "step": 15795 + }, + { + "epoch": 2.47, + "grad_norm": 27.16006378801096, + "learning_rate": 1.608414447068365e-06, + "loss": 0.4683, + "step": 15796 + }, + { + "epoch": 2.47, + "grad_norm": 17.990680896939896, + "learning_rate": 1.6074973561504614e-06, + "loss": 0.4504, + "step": 15797 + }, + { + "epoch": 2.47, + "grad_norm": 15.260492871672495, + "learning_rate": 1.6065805039104275e-06, + "loss": 0.4479, + "step": 15798 + }, + { + "epoch": 2.47, + "grad_norm": 19.023663228475808, + "learning_rate": 1.6056638903743306e-06, + "loss": 0.4239, + "step": 15799 + }, + { + "epoch": 2.47, + "grad_norm": 16.1769134753493, + "learning_rate": 1.6047475155682445e-06, + "loss": 0.4174, + "step": 15800 + }, + { + "epoch": 2.47, + "grad_norm": 22.46304274868134, + "learning_rate": 1.603831379518227e-06, + "loss": 0.352, + "step": 15801 + }, + { + "epoch": 2.47, + "grad_norm": 14.308352918001425, + "learning_rate": 1.6029154822503346e-06, + "loss": 0.4251, + "step": 15802 + }, + { + "epoch": 2.47, + "grad_norm": 18.25301756145544, + "learning_rate": 1.601999823790611e-06, + "loss": 0.4419, + "step": 15803 + }, + { + "epoch": 2.47, + "grad_norm": 22.106829267536867, + "learning_rate": 1.601084404165103e-06, + "loss": 0.4498, + "step": 15804 + }, + { + "epoch": 2.47, + "grad_norm": 18.17004120742532, + "learning_rate": 1.600169223399839e-06, + "loss": 0.4301, + "step": 15805 + }, + { + "epoch": 2.47, + "grad_norm": 20.01652959883348, + "learning_rate": 1.59925428152085e-06, + "loss": 0.474, + "step": 15806 + }, + { + "epoch": 2.47, + "grad_norm": 16.756729835713678, + "learning_rate": 1.5983395785541533e-06, + "loss": 0.3706, + "step": 15807 + }, + { + "epoch": 2.47, + "grad_norm": 27.04470500967914, + "learning_rate": 1.5974251145257635e-06, + "loss": 0.4483, + "step": 15808 + }, + { + "epoch": 2.47, + "grad_norm": 13.001271127260578, + "learning_rate": 1.596510889461691e-06, + "loss": 0.4275, + "step": 15809 + }, + { + "epoch": 2.47, + "grad_norm": 24.1429701360449, + "learning_rate": 1.5955969033879304e-06, + "loss": 0.4208, + "step": 15810 + }, + { + "epoch": 2.47, + "grad_norm": 19.11069303382256, + "learning_rate": 1.59468315633048e-06, + "loss": 0.435, + "step": 15811 + }, + { + "epoch": 2.47, + "grad_norm": 19.037680370690516, + "learning_rate": 1.5937696483153208e-06, + "loss": 0.4321, + "step": 15812 + }, + { + "epoch": 2.47, + "grad_norm": 22.222345161909207, + "learning_rate": 1.592856379368436e-06, + "loss": 0.45, + "step": 15813 + }, + { + "epoch": 2.47, + "grad_norm": 22.399253440500903, + "learning_rate": 1.5919433495157998e-06, + "loss": 0.4899, + "step": 15814 + }, + { + "epoch": 2.47, + "grad_norm": 20.246569874262015, + "learning_rate": 1.5910305587833763e-06, + "loss": 0.4882, + "step": 15815 + }, + { + "epoch": 2.47, + "grad_norm": 22.49344197240153, + "learning_rate": 1.5901180071971224e-06, + "loss": 0.4686, + "step": 15816 + }, + { + "epoch": 2.47, + "grad_norm": 19.122848832315956, + "learning_rate": 1.5892056947829914e-06, + "loss": 0.3949, + "step": 15817 + }, + { + "epoch": 2.47, + "grad_norm": 16.435141032438963, + "learning_rate": 1.588293621566932e-06, + "loss": 0.4591, + "step": 15818 + }, + { + "epoch": 2.47, + "grad_norm": 16.318812911775456, + "learning_rate": 1.5873817875748854e-06, + "loss": 0.4251, + "step": 15819 + }, + { + "epoch": 2.47, + "grad_norm": 16.49486935927179, + "learning_rate": 1.586470192832774e-06, + "loss": 0.4878, + "step": 15820 + }, + { + "epoch": 2.47, + "grad_norm": 26.086181328734426, + "learning_rate": 1.5855588373665298e-06, + "loss": 0.4475, + "step": 15821 + }, + { + "epoch": 2.47, + "grad_norm": 21.735985332209054, + "learning_rate": 1.5846477212020695e-06, + "loss": 0.5028, + "step": 15822 + }, + { + "epoch": 2.47, + "grad_norm": 15.885797790478811, + "learning_rate": 1.5837368443653067e-06, + "loss": 0.4181, + "step": 15823 + }, + { + "epoch": 2.47, + "grad_norm": 30.79569302694979, + "learning_rate": 1.5828262068821453e-06, + "loss": 0.4493, + "step": 15824 + }, + { + "epoch": 2.47, + "grad_norm": 15.925579086135874, + "learning_rate": 1.5819158087784802e-06, + "loss": 0.4112, + "step": 15825 + }, + { + "epoch": 2.47, + "grad_norm": 20.919531799959184, + "learning_rate": 1.5810056500802063e-06, + "loss": 0.4401, + "step": 15826 + }, + { + "epoch": 2.47, + "grad_norm": 25.579587426495788, + "learning_rate": 1.580095730813208e-06, + "loss": 0.4446, + "step": 15827 + }, + { + "epoch": 2.47, + "grad_norm": 20.015958006751642, + "learning_rate": 1.5791860510033584e-06, + "loss": 0.4614, + "step": 15828 + }, + { + "epoch": 2.47, + "grad_norm": 20.109761882756516, + "learning_rate": 1.5782766106765357e-06, + "loss": 0.4462, + "step": 15829 + }, + { + "epoch": 2.47, + "grad_norm": 21.728774618941227, + "learning_rate": 1.5773674098585955e-06, + "loss": 0.4592, + "step": 15830 + }, + { + "epoch": 2.47, + "grad_norm": 15.566478365320659, + "learning_rate": 1.5764584485754031e-06, + "loss": 0.4157, + "step": 15831 + }, + { + "epoch": 2.47, + "grad_norm": 19.097230370856966, + "learning_rate": 1.5755497268528008e-06, + "loss": 0.4192, + "step": 15832 + }, + { + "epoch": 2.47, + "grad_norm": 16.77031149116484, + "learning_rate": 1.5746412447166371e-06, + "loss": 0.4167, + "step": 15833 + }, + { + "epoch": 2.47, + "grad_norm": 20.808085527581685, + "learning_rate": 1.57373300219275e-06, + "loss": 0.4761, + "step": 15834 + }, + { + "epoch": 2.47, + "grad_norm": 27.709133620494452, + "learning_rate": 1.5728249993069645e-06, + "loss": 0.4376, + "step": 15835 + }, + { + "epoch": 2.47, + "grad_norm": 24.312625844232592, + "learning_rate": 1.5719172360851088e-06, + "loss": 0.4547, + "step": 15836 + }, + { + "epoch": 2.47, + "grad_norm": 28.498933016103503, + "learning_rate": 1.5710097125529943e-06, + "loss": 0.4462, + "step": 15837 + }, + { + "epoch": 2.47, + "grad_norm": 27.262975946077834, + "learning_rate": 1.570102428736432e-06, + "loss": 0.4534, + "step": 15838 + }, + { + "epoch": 2.47, + "grad_norm": 20.098926065143413, + "learning_rate": 1.5691953846612284e-06, + "loss": 0.4624, + "step": 15839 + }, + { + "epoch": 2.47, + "grad_norm": 16.155902649449267, + "learning_rate": 1.5682885803531767e-06, + "loss": 0.3913, + "step": 15840 + }, + { + "epoch": 2.47, + "grad_norm": 21.10024052475499, + "learning_rate": 1.5673820158380615e-06, + "loss": 0.4716, + "step": 15841 + }, + { + "epoch": 2.47, + "grad_norm": 27.76756608335823, + "learning_rate": 1.56647569114167e-06, + "loss": 0.4665, + "step": 15842 + }, + { + "epoch": 2.47, + "grad_norm": 23.606101682044013, + "learning_rate": 1.5655696062897763e-06, + "loss": 0.422, + "step": 15843 + }, + { + "epoch": 2.47, + "grad_norm": 19.78048032088841, + "learning_rate": 1.5646637613081529e-06, + "loss": 0.4435, + "step": 15844 + }, + { + "epoch": 2.48, + "grad_norm": 19.583449949431344, + "learning_rate": 1.5637581562225535e-06, + "loss": 0.4397, + "step": 15845 + }, + { + "epoch": 2.48, + "grad_norm": 19.0700346068123, + "learning_rate": 1.5628527910587376e-06, + "loss": 0.4828, + "step": 15846 + }, + { + "epoch": 2.48, + "grad_norm": 16.261357479128073, + "learning_rate": 1.5619476658424516e-06, + "loss": 0.4376, + "step": 15847 + }, + { + "epoch": 2.48, + "grad_norm": 20.56570500358424, + "learning_rate": 1.5610427805994411e-06, + "loss": 0.4662, + "step": 15848 + }, + { + "epoch": 2.48, + "grad_norm": 25.245674551249213, + "learning_rate": 1.5601381353554369e-06, + "loss": 0.3799, + "step": 15849 + }, + { + "epoch": 2.48, + "grad_norm": 17.070036385265002, + "learning_rate": 1.5592337301361648e-06, + "loss": 0.4301, + "step": 15850 + }, + { + "epoch": 2.48, + "grad_norm": 20.830985084672424, + "learning_rate": 1.5583295649673468e-06, + "loss": 0.4381, + "step": 15851 + }, + { + "epoch": 2.48, + "grad_norm": 18.979017601429273, + "learning_rate": 1.5574256398747011e-06, + "loss": 0.3895, + "step": 15852 + }, + { + "epoch": 2.48, + "grad_norm": 17.893138392445845, + "learning_rate": 1.5565219548839317e-06, + "loss": 0.3965, + "step": 15853 + }, + { + "epoch": 2.48, + "grad_norm": 28.620914134829555, + "learning_rate": 1.5556185100207356e-06, + "loss": 0.512, + "step": 15854 + }, + { + "epoch": 2.48, + "grad_norm": 17.01075433303274, + "learning_rate": 1.5547153053108088e-06, + "loss": 0.3863, + "step": 15855 + }, + { + "epoch": 2.48, + "grad_norm": 27.012544056937227, + "learning_rate": 1.5538123407798422e-06, + "loss": 0.4644, + "step": 15856 + }, + { + "epoch": 2.48, + "grad_norm": 24.287770911495855, + "learning_rate": 1.5529096164535084e-06, + "loss": 0.4781, + "step": 15857 + }, + { + "epoch": 2.48, + "grad_norm": 19.251015557401683, + "learning_rate": 1.552007132357486e-06, + "loss": 0.3952, + "step": 15858 + }, + { + "epoch": 2.48, + "grad_norm": 21.990501520770504, + "learning_rate": 1.5511048885174361e-06, + "loss": 0.4279, + "step": 15859 + }, + { + "epoch": 2.48, + "grad_norm": 20.696254944934083, + "learning_rate": 1.5502028849590212e-06, + "loss": 0.5025, + "step": 15860 + }, + { + "epoch": 2.48, + "grad_norm": 24.408892287630163, + "learning_rate": 1.5493011217078958e-06, + "loss": 0.4544, + "step": 15861 + }, + { + "epoch": 2.48, + "grad_norm": 21.323693465516758, + "learning_rate": 1.5483995987897006e-06, + "loss": 0.4783, + "step": 15862 + }, + { + "epoch": 2.48, + "grad_norm": 25.099920832368543, + "learning_rate": 1.5474983162300794e-06, + "loss": 0.3937, + "step": 15863 + }, + { + "epoch": 2.48, + "grad_norm": 27.983222073991183, + "learning_rate": 1.5465972740546586e-06, + "loss": 0.4702, + "step": 15864 + }, + { + "epoch": 2.48, + "grad_norm": 24.119569879496662, + "learning_rate": 1.5456964722890688e-06, + "loss": 0.4174, + "step": 15865 + }, + { + "epoch": 2.48, + "grad_norm": 28.876207339861036, + "learning_rate": 1.5447959109589228e-06, + "loss": 0.5151, + "step": 15866 + }, + { + "epoch": 2.48, + "grad_norm": 26.473184638557015, + "learning_rate": 1.5438955900898355e-06, + "loss": 0.4941, + "step": 15867 + }, + { + "epoch": 2.48, + "grad_norm": 28.065103913462607, + "learning_rate": 1.5429955097074133e-06, + "loss": 0.4472, + "step": 15868 + }, + { + "epoch": 2.48, + "grad_norm": 23.769763028245873, + "learning_rate": 1.5420956698372513e-06, + "loss": 0.4528, + "step": 15869 + }, + { + "epoch": 2.48, + "grad_norm": 27.888035565871476, + "learning_rate": 1.5411960705049389e-06, + "loss": 0.4603, + "step": 15870 + }, + { + "epoch": 2.48, + "grad_norm": 33.09202352888714, + "learning_rate": 1.5402967117360622e-06, + "loss": 0.4594, + "step": 15871 + }, + { + "epoch": 2.48, + "grad_norm": 31.206891998675562, + "learning_rate": 1.5393975935561977e-06, + "loss": 0.4702, + "step": 15872 + }, + { + "epoch": 2.48, + "grad_norm": 22.277404367015603, + "learning_rate": 1.538498715990918e-06, + "loss": 0.4106, + "step": 15873 + }, + { + "epoch": 2.48, + "grad_norm": 17.20349427934717, + "learning_rate": 1.5376000790657864e-06, + "loss": 0.4515, + "step": 15874 + }, + { + "epoch": 2.48, + "grad_norm": 19.52893499769126, + "learning_rate": 1.5367016828063553e-06, + "loss": 0.4192, + "step": 15875 + }, + { + "epoch": 2.48, + "grad_norm": 22.38506387302692, + "learning_rate": 1.5358035272381765e-06, + "loss": 0.4357, + "step": 15876 + }, + { + "epoch": 2.48, + "grad_norm": 33.91025539726895, + "learning_rate": 1.5349056123867977e-06, + "loss": 0.4813, + "step": 15877 + }, + { + "epoch": 2.48, + "grad_norm": 25.84013416937306, + "learning_rate": 1.5340079382777506e-06, + "loss": 0.5086, + "step": 15878 + }, + { + "epoch": 2.48, + "grad_norm": 15.89609320740819, + "learning_rate": 1.5331105049365624e-06, + "loss": 0.4099, + "step": 15879 + }, + { + "epoch": 2.48, + "grad_norm": 13.336404787758743, + "learning_rate": 1.5322133123887595e-06, + "loss": 0.4687, + "step": 15880 + }, + { + "epoch": 2.48, + "grad_norm": 19.779022821362457, + "learning_rate": 1.5313163606598591e-06, + "loss": 0.4265, + "step": 15881 + }, + { + "epoch": 2.48, + "grad_norm": 22.83796914897114, + "learning_rate": 1.5304196497753642e-06, + "loss": 0.4438, + "step": 15882 + }, + { + "epoch": 2.48, + "grad_norm": 19.019366033482452, + "learning_rate": 1.5295231797607824e-06, + "loss": 0.4395, + "step": 15883 + }, + { + "epoch": 2.48, + "grad_norm": 25.441464013342028, + "learning_rate": 1.528626950641604e-06, + "loss": 0.4454, + "step": 15884 + }, + { + "epoch": 2.48, + "grad_norm": 22.39590464994623, + "learning_rate": 1.527730962443319e-06, + "loss": 0.5054, + "step": 15885 + }, + { + "epoch": 2.48, + "grad_norm": 16.341402663888733, + "learning_rate": 1.5268352151914124e-06, + "loss": 0.4597, + "step": 15886 + }, + { + "epoch": 2.48, + "grad_norm": 19.32514305865262, + "learning_rate": 1.5259397089113525e-06, + "loss": 0.4985, + "step": 15887 + }, + { + "epoch": 2.48, + "grad_norm": 19.520970019557804, + "learning_rate": 1.5250444436286138e-06, + "loss": 0.423, + "step": 15888 + }, + { + "epoch": 2.48, + "grad_norm": 22.786852633997505, + "learning_rate": 1.52414941936865e-06, + "loss": 0.4889, + "step": 15889 + }, + { + "epoch": 2.48, + "grad_norm": 21.687739286765403, + "learning_rate": 1.5232546361569212e-06, + "loss": 0.448, + "step": 15890 + }, + { + "epoch": 2.48, + "grad_norm": 24.65784604741263, + "learning_rate": 1.5223600940188688e-06, + "loss": 0.5193, + "step": 15891 + }, + { + "epoch": 2.48, + "grad_norm": 25.98049628967927, + "learning_rate": 1.521465792979937e-06, + "loss": 0.4497, + "step": 15892 + }, + { + "epoch": 2.48, + "grad_norm": 21.327504345313567, + "learning_rate": 1.52057173306556e-06, + "loss": 0.4631, + "step": 15893 + }, + { + "epoch": 2.48, + "grad_norm": 16.638703404546824, + "learning_rate": 1.5196779143011619e-06, + "loss": 0.4216, + "step": 15894 + }, + { + "epoch": 2.48, + "grad_norm": 27.099849734228446, + "learning_rate": 1.5187843367121623e-06, + "loss": 0.4455, + "step": 15895 + }, + { + "epoch": 2.48, + "grad_norm": 26.04739663689291, + "learning_rate": 1.5178910003239732e-06, + "loss": 0.4662, + "step": 15896 + }, + { + "epoch": 2.48, + "grad_norm": 19.37733372815419, + "learning_rate": 1.5169979051620033e-06, + "loss": 0.4447, + "step": 15897 + }, + { + "epoch": 2.48, + "grad_norm": 14.22351636174334, + "learning_rate": 1.5161050512516528e-06, + "loss": 0.4236, + "step": 15898 + }, + { + "epoch": 2.48, + "grad_norm": 19.24288336920094, + "learning_rate": 1.515212438618312e-06, + "loss": 0.4299, + "step": 15899 + }, + { + "epoch": 2.48, + "grad_norm": 20.47311512797288, + "learning_rate": 1.5143200672873626e-06, + "loss": 0.4924, + "step": 15900 + }, + { + "epoch": 2.48, + "grad_norm": 24.937756796998947, + "learning_rate": 1.5134279372841876e-06, + "loss": 0.4817, + "step": 15901 + }, + { + "epoch": 2.48, + "grad_norm": 24.87034107733147, + "learning_rate": 1.512536048634159e-06, + "loss": 0.4097, + "step": 15902 + }, + { + "epoch": 2.48, + "grad_norm": 23.113704871569478, + "learning_rate": 1.5116444013626407e-06, + "loss": 0.4651, + "step": 15903 + }, + { + "epoch": 2.48, + "grad_norm": 20.308026687591433, + "learning_rate": 1.5107529954949873e-06, + "loss": 0.4509, + "step": 15904 + }, + { + "epoch": 2.48, + "grad_norm": 23.147841699189456, + "learning_rate": 1.5098618310565527e-06, + "loss": 0.434, + "step": 15905 + }, + { + "epoch": 2.48, + "grad_norm": 22.195651860447384, + "learning_rate": 1.5089709080726811e-06, + "loss": 0.4381, + "step": 15906 + }, + { + "epoch": 2.48, + "grad_norm": 23.298331607871916, + "learning_rate": 1.5080802265687145e-06, + "loss": 0.4811, + "step": 15907 + }, + { + "epoch": 2.48, + "grad_norm": 16.20899329786817, + "learning_rate": 1.5071897865699748e-06, + "loss": 0.4278, + "step": 15908 + }, + { + "epoch": 2.49, + "grad_norm": 11.29002141344829, + "learning_rate": 1.5062995881017882e-06, + "loss": 0.4126, + "step": 15909 + }, + { + "epoch": 2.49, + "grad_norm": 31.470961504396215, + "learning_rate": 1.5054096311894727e-06, + "loss": 0.4309, + "step": 15910 + }, + { + "epoch": 2.49, + "grad_norm": 21.255734776730474, + "learning_rate": 1.5045199158583411e-06, + "loss": 0.4699, + "step": 15911 + }, + { + "epoch": 2.49, + "grad_norm": 24.366989873052283, + "learning_rate": 1.5036304421336933e-06, + "loss": 0.5092, + "step": 15912 + }, + { + "epoch": 2.49, + "grad_norm": 25.176274394080387, + "learning_rate": 1.5027412100408234e-06, + "loss": 0.4551, + "step": 15913 + }, + { + "epoch": 2.49, + "grad_norm": 25.33427567491345, + "learning_rate": 1.5018522196050223e-06, + "loss": 0.4993, + "step": 15914 + }, + { + "epoch": 2.49, + "grad_norm": 22.33923602624124, + "learning_rate": 1.5009634708515753e-06, + "loss": 0.3885, + "step": 15915 + }, + { + "epoch": 2.49, + "grad_norm": 23.750658122782152, + "learning_rate": 1.5000749638057521e-06, + "loss": 0.4336, + "step": 15916 + }, + { + "epoch": 2.49, + "grad_norm": 19.47256696674842, + "learning_rate": 1.4991866984928283e-06, + "loss": 0.3814, + "step": 15917 + }, + { + "epoch": 2.49, + "grad_norm": 16.77747302466293, + "learning_rate": 1.498298674938059e-06, + "loss": 0.3898, + "step": 15918 + }, + { + "epoch": 2.49, + "grad_norm": 24.55067618627532, + "learning_rate": 1.4974108931667042e-06, + "loss": 0.5037, + "step": 15919 + }, + { + "epoch": 2.49, + "grad_norm": 26.458333694590667, + "learning_rate": 1.496523353204008e-06, + "loss": 0.4397, + "step": 15920 + }, + { + "epoch": 2.49, + "grad_norm": 20.780742321410045, + "learning_rate": 1.4956360550752125e-06, + "loss": 0.3747, + "step": 15921 + }, + { + "epoch": 2.49, + "grad_norm": 20.674386250382053, + "learning_rate": 1.4947489988055552e-06, + "loss": 0.4839, + "step": 15922 + }, + { + "epoch": 2.49, + "grad_norm": 20.032490595906165, + "learning_rate": 1.4938621844202595e-06, + "loss": 0.4719, + "step": 15923 + }, + { + "epoch": 2.49, + "grad_norm": 21.980267139579293, + "learning_rate": 1.4929756119445481e-06, + "loss": 0.4887, + "step": 15924 + }, + { + "epoch": 2.49, + "grad_norm": 3.9562957245774237, + "learning_rate": 1.4920892814036324e-06, + "loss": 0.4581, + "step": 15925 + }, + { + "epoch": 2.49, + "grad_norm": 20.830082211894204, + "learning_rate": 1.4912031928227211e-06, + "loss": 0.434, + "step": 15926 + }, + { + "epoch": 2.49, + "grad_norm": 27.20785756003081, + "learning_rate": 1.4903173462270148e-06, + "loss": 0.5017, + "step": 15927 + }, + { + "epoch": 2.49, + "grad_norm": 19.31120153494008, + "learning_rate": 1.489431741641706e-06, + "loss": 0.4552, + "step": 15928 + }, + { + "epoch": 2.49, + "grad_norm": 16.68809942622622, + "learning_rate": 1.4885463790919774e-06, + "loss": 0.386, + "step": 15929 + }, + { + "epoch": 2.49, + "grad_norm": 22.93904741690915, + "learning_rate": 1.4876612586030104e-06, + "loss": 0.4694, + "step": 15930 + }, + { + "epoch": 2.49, + "grad_norm": 18.773813326665838, + "learning_rate": 1.4867763801999768e-06, + "loss": 0.513, + "step": 15931 + }, + { + "epoch": 2.49, + "grad_norm": 25.49439518002952, + "learning_rate": 1.485891743908049e-06, + "loss": 0.4459, + "step": 15932 + }, + { + "epoch": 2.49, + "grad_norm": 21.820630716911978, + "learning_rate": 1.4850073497523743e-06, + "loss": 0.4739, + "step": 15933 + }, + { + "epoch": 2.49, + "grad_norm": 27.739568880549893, + "learning_rate": 1.4841231977581094e-06, + "loss": 0.4289, + "step": 15934 + }, + { + "epoch": 2.49, + "grad_norm": 22.17817060679014, + "learning_rate": 1.4832392879503986e-06, + "loss": 0.4394, + "step": 15935 + }, + { + "epoch": 2.49, + "grad_norm": 35.1045746040507, + "learning_rate": 1.4823556203543833e-06, + "loss": 0.4507, + "step": 15936 + }, + { + "epoch": 2.49, + "grad_norm": 20.855989124231517, + "learning_rate": 1.4814721949951916e-06, + "loss": 0.4217, + "step": 15937 + }, + { + "epoch": 2.49, + "grad_norm": 19.25297416587063, + "learning_rate": 1.4805890118979448e-06, + "loss": 0.3914, + "step": 15938 + }, + { + "epoch": 2.49, + "grad_norm": 25.95979217932501, + "learning_rate": 1.479706071087762e-06, + "loss": 0.469, + "step": 15939 + }, + { + "epoch": 2.49, + "grad_norm": 20.43190194668352, + "learning_rate": 1.4788233725897583e-06, + "loss": 0.4084, + "step": 15940 + }, + { + "epoch": 2.49, + "grad_norm": 18.498548627155984, + "learning_rate": 1.4779409164290292e-06, + "loss": 0.4377, + "step": 15941 + }, + { + "epoch": 2.49, + "grad_norm": 28.901810638691483, + "learning_rate": 1.4770587026306782e-06, + "loss": 0.4569, + "step": 15942 + }, + { + "epoch": 2.49, + "grad_norm": 21.361813167127167, + "learning_rate": 1.4761767312197906e-06, + "loss": 0.3807, + "step": 15943 + }, + { + "epoch": 2.49, + "grad_norm": 25.39141460423376, + "learning_rate": 1.4752950022214519e-06, + "loss": 0.4308, + "step": 15944 + }, + { + "epoch": 2.49, + "grad_norm": 18.600865196398455, + "learning_rate": 1.4744135156607332e-06, + "loss": 0.4703, + "step": 15945 + }, + { + "epoch": 2.49, + "grad_norm": 16.636290009763123, + "learning_rate": 1.4735322715627075e-06, + "loss": 0.3893, + "step": 15946 + }, + { + "epoch": 2.49, + "grad_norm": 24.618136628260608, + "learning_rate": 1.4726512699524399e-06, + "loss": 0.4129, + "step": 15947 + }, + { + "epoch": 2.49, + "grad_norm": 35.03591338670897, + "learning_rate": 1.4717705108549773e-06, + "loss": 0.4514, + "step": 15948 + }, + { + "epoch": 2.49, + "grad_norm": 29.705063591574238, + "learning_rate": 1.4708899942953757e-06, + "loss": 0.5014, + "step": 15949 + }, + { + "epoch": 2.49, + "grad_norm": 31.817246844554766, + "learning_rate": 1.4700097202986719e-06, + "loss": 0.4789, + "step": 15950 + }, + { + "epoch": 2.49, + "grad_norm": 17.620312435153192, + "learning_rate": 1.4691296888899031e-06, + "loss": 0.4129, + "step": 15951 + }, + { + "epoch": 2.49, + "grad_norm": 24.37884239331442, + "learning_rate": 1.468249900094093e-06, + "loss": 0.5041, + "step": 15952 + }, + { + "epoch": 2.49, + "grad_norm": 19.99973168714669, + "learning_rate": 1.4673703539362683e-06, + "loss": 0.439, + "step": 15953 + }, + { + "epoch": 2.49, + "grad_norm": 23.148361787855663, + "learning_rate": 1.4664910504414355e-06, + "loss": 0.4214, + "step": 15954 + }, + { + "epoch": 2.49, + "grad_norm": 19.214204773318375, + "learning_rate": 1.4656119896346055e-06, + "loss": 0.427, + "step": 15955 + }, + { + "epoch": 2.49, + "grad_norm": 15.789550828908649, + "learning_rate": 1.4647331715407809e-06, + "loss": 0.4045, + "step": 15956 + }, + { + "epoch": 2.49, + "grad_norm": 20.866609413254544, + "learning_rate": 1.4638545961849525e-06, + "loss": 0.4367, + "step": 15957 + }, + { + "epoch": 2.49, + "grad_norm": 34.8019548818339, + "learning_rate": 1.462976263592103e-06, + "loss": 0.4679, + "step": 15958 + }, + { + "epoch": 2.49, + "grad_norm": 20.96090897683653, + "learning_rate": 1.4620981737872142e-06, + "loss": 0.48, + "step": 15959 + }, + { + "epoch": 2.49, + "grad_norm": 23.801750441507693, + "learning_rate": 1.4612203267952584e-06, + "loss": 0.4926, + "step": 15960 + }, + { + "epoch": 2.49, + "grad_norm": 19.57347303746456, + "learning_rate": 1.4603427226412048e-06, + "loss": 0.4394, + "step": 15961 + }, + { + "epoch": 2.49, + "grad_norm": 32.975855135385835, + "learning_rate": 1.4594653613500086e-06, + "loss": 0.4786, + "step": 15962 + }, + { + "epoch": 2.49, + "grad_norm": 30.657353516450904, + "learning_rate": 1.458588242946618e-06, + "loss": 0.4305, + "step": 15963 + }, + { + "epoch": 2.49, + "grad_norm": 17.54057945369242, + "learning_rate": 1.457711367455983e-06, + "loss": 0.4019, + "step": 15964 + }, + { + "epoch": 2.49, + "grad_norm": 20.056506158792367, + "learning_rate": 1.4568347349030398e-06, + "loss": 0.4478, + "step": 15965 + }, + { + "epoch": 2.49, + "grad_norm": 20.068239892757056, + "learning_rate": 1.4559583453127213e-06, + "loss": 0.4297, + "step": 15966 + }, + { + "epoch": 2.49, + "grad_norm": 23.80717821489896, + "learning_rate": 1.455082198709945e-06, + "loss": 0.4576, + "step": 15967 + }, + { + "epoch": 2.49, + "grad_norm": 14.432028433405515, + "learning_rate": 1.4542062951196335e-06, + "loss": 0.4196, + "step": 15968 + }, + { + "epoch": 2.49, + "grad_norm": 20.264111352038498, + "learning_rate": 1.4533306345666954e-06, + "loss": 0.4649, + "step": 15969 + }, + { + "epoch": 2.49, + "grad_norm": 21.010694326691546, + "learning_rate": 1.4524552170760374e-06, + "loss": 0.456, + "step": 15970 + }, + { + "epoch": 2.49, + "grad_norm": 27.893013837435078, + "learning_rate": 1.4515800426725524e-06, + "loss": 0.4185, + "step": 15971 + }, + { + "epoch": 2.49, + "grad_norm": 19.58392009760701, + "learning_rate": 1.4507051113811277e-06, + "loss": 0.45, + "step": 15972 + }, + { + "epoch": 2.5, + "grad_norm": 24.573590410816404, + "learning_rate": 1.449830423226649e-06, + "loss": 0.4549, + "step": 15973 + }, + { + "epoch": 2.5, + "grad_norm": 27.79215989695097, + "learning_rate": 1.4489559782339935e-06, + "loss": 0.4456, + "step": 15974 + }, + { + "epoch": 2.5, + "grad_norm": 19.806146162440406, + "learning_rate": 1.4480817764280254e-06, + "loss": 0.4965, + "step": 15975 + }, + { + "epoch": 2.5, + "grad_norm": 30.914455157782747, + "learning_rate": 1.4472078178336102e-06, + "loss": 0.4982, + "step": 15976 + }, + { + "epoch": 2.5, + "grad_norm": 16.522800470251514, + "learning_rate": 1.4463341024756006e-06, + "loss": 0.4306, + "step": 15977 + }, + { + "epoch": 2.5, + "grad_norm": 24.581338942711422, + "learning_rate": 1.4454606303788466e-06, + "loss": 0.4073, + "step": 15978 + }, + { + "epoch": 2.5, + "grad_norm": 18.85094390578264, + "learning_rate": 1.4445874015681872e-06, + "loss": 0.4205, + "step": 15979 + }, + { + "epoch": 2.5, + "grad_norm": 20.8059160010933, + "learning_rate": 1.4437144160684558e-06, + "loss": 0.4377, + "step": 15980 + }, + { + "epoch": 2.5, + "grad_norm": 16.376876506956027, + "learning_rate": 1.4428416739044836e-06, + "loss": 0.4604, + "step": 15981 + }, + { + "epoch": 2.5, + "grad_norm": 20.56499205433905, + "learning_rate": 1.441969175101089e-06, + "loss": 0.4461, + "step": 15982 + }, + { + "epoch": 2.5, + "grad_norm": 17.94847217455842, + "learning_rate": 1.4410969196830827e-06, + "loss": 0.4406, + "step": 15983 + }, + { + "epoch": 2.5, + "grad_norm": 38.01468302307978, + "learning_rate": 1.4402249076752716e-06, + "loss": 0.4775, + "step": 15984 + }, + { + "epoch": 2.5, + "grad_norm": 29.2364358975806, + "learning_rate": 1.4393531391024585e-06, + "loss": 0.4504, + "step": 15985 + }, + { + "epoch": 2.5, + "grad_norm": 13.376873624890948, + "learning_rate": 1.4384816139894365e-06, + "loss": 0.4475, + "step": 15986 + }, + { + "epoch": 2.5, + "grad_norm": 21.843045444340923, + "learning_rate": 1.437610332360988e-06, + "loss": 0.3982, + "step": 15987 + }, + { + "epoch": 2.5, + "grad_norm": 26.12486526406602, + "learning_rate": 1.4367392942418912e-06, + "loss": 0.46, + "step": 15988 + }, + { + "epoch": 2.5, + "grad_norm": 23.313938280588555, + "learning_rate": 1.4358684996569194e-06, + "loss": 0.4364, + "step": 15989 + }, + { + "epoch": 2.5, + "grad_norm": 27.570817531998458, + "learning_rate": 1.4349979486308396e-06, + "loss": 0.4876, + "step": 15990 + }, + { + "epoch": 2.5, + "grad_norm": 18.717144820880137, + "learning_rate": 1.4341276411884076e-06, + "loss": 0.4115, + "step": 15991 + }, + { + "epoch": 2.5, + "grad_norm": 21.42005007933213, + "learning_rate": 1.4332575773543732e-06, + "loss": 0.442, + "step": 15992 + }, + { + "epoch": 2.5, + "grad_norm": 18.655672717787574, + "learning_rate": 1.4323877571534817e-06, + "loss": 0.4908, + "step": 15993 + }, + { + "epoch": 2.5, + "grad_norm": 17.220542468255783, + "learning_rate": 1.4315181806104694e-06, + "loss": 0.43, + "step": 15994 + }, + { + "epoch": 2.5, + "grad_norm": 16.95881503331552, + "learning_rate": 1.4306488477500714e-06, + "loss": 0.394, + "step": 15995 + }, + { + "epoch": 2.5, + "grad_norm": 19.022329283281767, + "learning_rate": 1.4297797585970063e-06, + "loss": 0.4465, + "step": 15996 + }, + { + "epoch": 2.5, + "grad_norm": 18.07103298678883, + "learning_rate": 1.4289109131759894e-06, + "loss": 0.3926, + "step": 15997 + }, + { + "epoch": 2.5, + "grad_norm": 20.532406228239893, + "learning_rate": 1.4280423115117316e-06, + "loss": 0.411, + "step": 15998 + }, + { + "epoch": 2.5, + "grad_norm": 23.49068499277464, + "learning_rate": 1.4271739536289386e-06, + "loss": 0.4644, + "step": 15999 + }, + { + "epoch": 2.5, + "grad_norm": 24.123598302629787, + "learning_rate": 1.4263058395523044e-06, + "loss": 0.4253, + "step": 16000 + }, + { + "epoch": 2.5, + "grad_norm": 19.33500406266326, + "learning_rate": 1.4254379693065134e-06, + "loss": 0.4545, + "step": 16001 + }, + { + "epoch": 2.5, + "grad_norm": 25.72876902326394, + "learning_rate": 1.4245703429162504e-06, + "loss": 0.3899, + "step": 16002 + }, + { + "epoch": 2.5, + "grad_norm": 27.910769522184466, + "learning_rate": 1.4237029604061936e-06, + "loss": 0.4761, + "step": 16003 + }, + { + "epoch": 2.5, + "grad_norm": 23.265467622710162, + "learning_rate": 1.422835821801004e-06, + "loss": 0.4419, + "step": 16004 + }, + { + "epoch": 2.5, + "grad_norm": 14.359258228902299, + "learning_rate": 1.42196892712535e-06, + "loss": 0.426, + "step": 16005 + }, + { + "epoch": 2.5, + "grad_norm": 16.92926850568379, + "learning_rate": 1.4211022764038785e-06, + "loss": 0.4148, + "step": 16006 + }, + { + "epoch": 2.5, + "grad_norm": 32.90000095425404, + "learning_rate": 1.4202358696612417e-06, + "loss": 0.3752, + "step": 16007 + }, + { + "epoch": 2.5, + "grad_norm": 24.18432842020453, + "learning_rate": 1.4193697069220758e-06, + "loss": 0.425, + "step": 16008 + }, + { + "epoch": 2.5, + "grad_norm": 22.54681177586437, + "learning_rate": 1.4185037882110165e-06, + "loss": 0.4678, + "step": 16009 + }, + { + "epoch": 2.5, + "grad_norm": 31.959970348180875, + "learning_rate": 1.4176381135526906e-06, + "loss": 0.4374, + "step": 16010 + }, + { + "epoch": 2.5, + "grad_norm": 24.57881377540959, + "learning_rate": 1.4167726829717155e-06, + "loss": 0.4897, + "step": 16011 + }, + { + "epoch": 2.5, + "grad_norm": 20.11600947514142, + "learning_rate": 1.4159074964927055e-06, + "loss": 0.4423, + "step": 16012 + }, + { + "epoch": 2.5, + "grad_norm": 19.540866472246, + "learning_rate": 1.4150425541402623e-06, + "loss": 0.4107, + "step": 16013 + }, + { + "epoch": 2.5, + "grad_norm": 15.356355147104633, + "learning_rate": 1.4141778559389875e-06, + "loss": 0.4353, + "step": 16014 + }, + { + "epoch": 2.5, + "grad_norm": 27.35814947218924, + "learning_rate": 1.413313401913473e-06, + "loss": 0.5225, + "step": 16015 + }, + { + "epoch": 2.5, + "grad_norm": 32.787733076047395, + "learning_rate": 1.4124491920883031e-06, + "loss": 0.4293, + "step": 16016 + }, + { + "epoch": 2.5, + "grad_norm": 21.90812415991012, + "learning_rate": 1.4115852264880525e-06, + "loss": 0.4075, + "step": 16017 + }, + { + "epoch": 2.5, + "grad_norm": 18.233990539348376, + "learning_rate": 1.4107215051372935e-06, + "loss": 0.4186, + "step": 16018 + }, + { + "epoch": 2.5, + "grad_norm": 19.485973633662397, + "learning_rate": 1.409858028060591e-06, + "loss": 0.4672, + "step": 16019 + }, + { + "epoch": 2.5, + "grad_norm": 19.039306624189297, + "learning_rate": 1.4089947952825057e-06, + "loss": 0.4286, + "step": 16020 + }, + { + "epoch": 2.5, + "grad_norm": 22.8893474463208, + "learning_rate": 1.4081318068275773e-06, + "loss": 0.4361, + "step": 16021 + }, + { + "epoch": 2.5, + "grad_norm": 24.24454182777104, + "learning_rate": 1.4072690627203544e-06, + "loss": 0.4408, + "step": 16022 + }, + { + "epoch": 2.5, + "grad_norm": 17.432240352625374, + "learning_rate": 1.406406562985373e-06, + "loss": 0.4632, + "step": 16023 + }, + { + "epoch": 2.5, + "grad_norm": 22.903756152927027, + "learning_rate": 1.4055443076471641e-06, + "loss": 0.465, + "step": 16024 + }, + { + "epoch": 2.5, + "grad_norm": 23.215776742143245, + "learning_rate": 1.4046822967302477e-06, + "loss": 0.4997, + "step": 16025 + }, + { + "epoch": 2.5, + "grad_norm": 14.620105898580562, + "learning_rate": 1.4038205302591368e-06, + "loss": 0.4179, + "step": 16026 + }, + { + "epoch": 2.5, + "grad_norm": 21.436592743890895, + "learning_rate": 1.4029590082583411e-06, + "loss": 0.4302, + "step": 16027 + }, + { + "epoch": 2.5, + "grad_norm": 29.291359342036372, + "learning_rate": 1.4020977307523642e-06, + "loss": 0.4827, + "step": 16028 + }, + { + "epoch": 2.5, + "grad_norm": 30.086728616306587, + "learning_rate": 1.4012366977656954e-06, + "loss": 0.442, + "step": 16029 + }, + { + "epoch": 2.5, + "grad_norm": 26.25214179815325, + "learning_rate": 1.4003759093228275e-06, + "loss": 0.3874, + "step": 16030 + }, + { + "epoch": 2.5, + "grad_norm": 23.431748455690133, + "learning_rate": 1.3995153654482363e-06, + "loss": 0.4182, + "step": 16031 + }, + { + "epoch": 2.5, + "grad_norm": 19.557807866195223, + "learning_rate": 1.3986550661663978e-06, + "loss": 0.393, + "step": 16032 + }, + { + "epoch": 2.5, + "grad_norm": 27.632800985802522, + "learning_rate": 1.3977950115017768e-06, + "loss": 0.4477, + "step": 16033 + }, + { + "epoch": 2.5, + "grad_norm": 28.932862035770018, + "learning_rate": 1.3969352014788328e-06, + "loss": 0.457, + "step": 16034 + }, + { + "epoch": 2.5, + "grad_norm": 18.31484045201061, + "learning_rate": 1.3960756361220219e-06, + "loss": 0.5123, + "step": 16035 + }, + { + "epoch": 2.5, + "grad_norm": 30.49663488495039, + "learning_rate": 1.395216315455784e-06, + "loss": 0.4648, + "step": 16036 + }, + { + "epoch": 2.5, + "grad_norm": 21.023862308532454, + "learning_rate": 1.3943572395045624e-06, + "loss": 0.4263, + "step": 16037 + }, + { + "epoch": 2.51, + "grad_norm": 23.209689269179492, + "learning_rate": 1.393498408292785e-06, + "loss": 0.4343, + "step": 16038 + }, + { + "epoch": 2.51, + "grad_norm": 32.665241953493314, + "learning_rate": 1.3926398218448778e-06, + "loss": 0.4288, + "step": 16039 + }, + { + "epoch": 2.51, + "grad_norm": 14.577035718088354, + "learning_rate": 1.3917814801852615e-06, + "loss": 0.4358, + "step": 16040 + }, + { + "epoch": 2.51, + "grad_norm": 19.79365517657097, + "learning_rate": 1.390923383338345e-06, + "loss": 0.4464, + "step": 16041 + }, + { + "epoch": 2.51, + "grad_norm": 31.17829810535406, + "learning_rate": 1.3900655313285283e-06, + "loss": 0.416, + "step": 16042 + }, + { + "epoch": 2.51, + "grad_norm": 24.73465749021419, + "learning_rate": 1.3892079241802104e-06, + "loss": 0.3867, + "step": 16043 + }, + { + "epoch": 2.51, + "grad_norm": 25.43623042951149, + "learning_rate": 1.3883505619177829e-06, + "loss": 0.4216, + "step": 16044 + }, + { + "epoch": 2.51, + "grad_norm": 15.731861722553425, + "learning_rate": 1.3874934445656319e-06, + "loss": 0.4715, + "step": 16045 + }, + { + "epoch": 2.51, + "grad_norm": 20.884758223208838, + "learning_rate": 1.3866365721481256e-06, + "loss": 0.3903, + "step": 16046 + }, + { + "epoch": 2.51, + "grad_norm": 28.662180169975024, + "learning_rate": 1.3857799446896358e-06, + "loss": 0.4619, + "step": 16047 + }, + { + "epoch": 2.51, + "grad_norm": 27.883706859547214, + "learning_rate": 1.3849235622145253e-06, + "loss": 0.4339, + "step": 16048 + }, + { + "epoch": 2.51, + "grad_norm": 22.12828886108539, + "learning_rate": 1.3840674247471508e-06, + "loss": 0.4407, + "step": 16049 + }, + { + "epoch": 2.51, + "grad_norm": 16.025920230127273, + "learning_rate": 1.383211532311859e-06, + "loss": 0.4243, + "step": 16050 + }, + { + "epoch": 2.51, + "grad_norm": 18.96181464946443, + "learning_rate": 1.3823558849329877e-06, + "loss": 0.4347, + "step": 16051 + }, + { + "epoch": 2.51, + "grad_norm": 15.193194252977401, + "learning_rate": 1.3815004826348744e-06, + "loss": 0.3938, + "step": 16052 + }, + { + "epoch": 2.51, + "grad_norm": 23.72867822493261, + "learning_rate": 1.3806453254418483e-06, + "loss": 0.4712, + "step": 16053 + }, + { + "epoch": 2.51, + "grad_norm": 15.049578108965338, + "learning_rate": 1.3797904133782269e-06, + "loss": 0.4461, + "step": 16054 + }, + { + "epoch": 2.51, + "grad_norm": 23.052904414915574, + "learning_rate": 1.3789357464683206e-06, + "loss": 0.4673, + "step": 16055 + }, + { + "epoch": 2.51, + "grad_norm": 27.994096920293632, + "learning_rate": 1.3780813247364388e-06, + "loss": 0.4564, + "step": 16056 + }, + { + "epoch": 2.51, + "grad_norm": 16.532372325701015, + "learning_rate": 1.3772271482068799e-06, + "loss": 0.4643, + "step": 16057 + }, + { + "epoch": 2.51, + "grad_norm": 43.587454237994585, + "learning_rate": 1.3763732169039401e-06, + "loss": 0.6139, + "step": 16058 + }, + { + "epoch": 2.51, + "grad_norm": 16.64526821914078, + "learning_rate": 1.375519530851901e-06, + "loss": 0.4037, + "step": 16059 + }, + { + "epoch": 2.51, + "grad_norm": 22.01109019400029, + "learning_rate": 1.3746660900750386e-06, + "loss": 0.4552, + "step": 16060 + }, + { + "epoch": 2.51, + "grad_norm": 20.013676850076404, + "learning_rate": 1.373812894597627e-06, + "loss": 0.4199, + "step": 16061 + }, + { + "epoch": 2.51, + "grad_norm": 17.5749172445807, + "learning_rate": 1.3729599444439323e-06, + "loss": 0.4345, + "step": 16062 + }, + { + "epoch": 2.51, + "grad_norm": 19.31902425804104, + "learning_rate": 1.3721072396382085e-06, + "loss": 0.4092, + "step": 16063 + }, + { + "epoch": 2.51, + "grad_norm": 29.792735227792747, + "learning_rate": 1.3712547802047094e-06, + "loss": 0.5029, + "step": 16064 + }, + { + "epoch": 2.51, + "grad_norm": 18.350584615520887, + "learning_rate": 1.3704025661676757e-06, + "loss": 0.4988, + "step": 16065 + }, + { + "epoch": 2.51, + "grad_norm": 24.355428596836692, + "learning_rate": 1.3695505975513456e-06, + "loss": 0.4713, + "step": 16066 + }, + { + "epoch": 2.51, + "grad_norm": 18.577649434470043, + "learning_rate": 1.3686988743799466e-06, + "loss": 0.4024, + "step": 16067 + }, + { + "epoch": 2.51, + "grad_norm": 12.396153702389274, + "learning_rate": 1.3678473966777018e-06, + "loss": 0.4521, + "step": 16068 + }, + { + "epoch": 2.51, + "grad_norm": 21.607760226309008, + "learning_rate": 1.36699616446883e-06, + "loss": 0.4152, + "step": 16069 + }, + { + "epoch": 2.51, + "grad_norm": 29.978763376248963, + "learning_rate": 1.3661451777775369e-06, + "loss": 0.4588, + "step": 16070 + }, + { + "epoch": 2.51, + "grad_norm": 23.434229630867517, + "learning_rate": 1.3652944366280218e-06, + "loss": 0.4732, + "step": 16071 + }, + { + "epoch": 2.51, + "grad_norm": 23.066913181196533, + "learning_rate": 1.364443941044482e-06, + "loss": 0.4508, + "step": 16072 + }, + { + "epoch": 2.51, + "grad_norm": 20.84857807119181, + "learning_rate": 1.363593691051105e-06, + "loss": 0.4867, + "step": 16073 + }, + { + "epoch": 2.51, + "grad_norm": 16.880398707938138, + "learning_rate": 1.3627436866720734e-06, + "loss": 0.4079, + "step": 16074 + }, + { + "epoch": 2.51, + "grad_norm": 18.449364661795336, + "learning_rate": 1.3618939279315591e-06, + "loss": 0.4162, + "step": 16075 + }, + { + "epoch": 2.51, + "grad_norm": 28.60231956932494, + "learning_rate": 1.3610444148537261e-06, + "loss": 0.4687, + "step": 16076 + }, + { + "epoch": 2.51, + "grad_norm": 12.10890978011818, + "learning_rate": 1.3601951474627362e-06, + "loss": 0.4715, + "step": 16077 + }, + { + "epoch": 2.51, + "grad_norm": 19.36689264581611, + "learning_rate": 1.3593461257827433e-06, + "loss": 0.4134, + "step": 16078 + }, + { + "epoch": 2.51, + "grad_norm": 18.24007244469165, + "learning_rate": 1.3584973498378928e-06, + "loss": 0.4382, + "step": 16079 + }, + { + "epoch": 2.51, + "grad_norm": 16.53252686380978, + "learning_rate": 1.3576488196523207e-06, + "loss": 0.3798, + "step": 16080 + }, + { + "epoch": 2.51, + "grad_norm": 24.835521443719877, + "learning_rate": 1.3568005352501602e-06, + "loss": 0.4876, + "step": 16081 + }, + { + "epoch": 2.51, + "grad_norm": 19.419010135196174, + "learning_rate": 1.355952496655536e-06, + "loss": 0.4499, + "step": 16082 + }, + { + "epoch": 2.51, + "grad_norm": 22.79683973428614, + "learning_rate": 1.3551047038925692e-06, + "loss": 0.475, + "step": 16083 + }, + { + "epoch": 2.51, + "grad_norm": 16.234364634410497, + "learning_rate": 1.3542571569853669e-06, + "loss": 0.3969, + "step": 16084 + }, + { + "epoch": 2.51, + "grad_norm": 29.244811420088652, + "learning_rate": 1.353409855958031e-06, + "loss": 0.4857, + "step": 16085 + }, + { + "epoch": 2.51, + "grad_norm": 23.346327759358495, + "learning_rate": 1.3525628008346613e-06, + "loss": 0.4477, + "step": 16086 + }, + { + "epoch": 2.51, + "grad_norm": 16.592895253216835, + "learning_rate": 1.3517159916393485e-06, + "loss": 0.3904, + "step": 16087 + }, + { + "epoch": 2.51, + "grad_norm": 16.77317982561453, + "learning_rate": 1.350869428396172e-06, + "loss": 0.4769, + "step": 16088 + }, + { + "epoch": 2.51, + "grad_norm": 17.477146218228754, + "learning_rate": 1.3500231111292118e-06, + "loss": 0.4318, + "step": 16089 + }, + { + "epoch": 2.51, + "grad_norm": 21.350442247701558, + "learning_rate": 1.3491770398625315e-06, + "loss": 0.4135, + "step": 16090 + }, + { + "epoch": 2.51, + "grad_norm": 23.693940265991415, + "learning_rate": 1.3483312146201988e-06, + "loss": 0.4339, + "step": 16091 + }, + { + "epoch": 2.51, + "grad_norm": 15.730008705654967, + "learning_rate": 1.3474856354262622e-06, + "loss": 0.366, + "step": 16092 + }, + { + "epoch": 2.51, + "grad_norm": 33.185076490374136, + "learning_rate": 1.3466403023047724e-06, + "loss": 0.4819, + "step": 16093 + }, + { + "epoch": 2.51, + "grad_norm": 18.95929009220646, + "learning_rate": 1.3457952152797727e-06, + "loss": 0.4776, + "step": 16094 + }, + { + "epoch": 2.51, + "grad_norm": 24.565996670989747, + "learning_rate": 1.3449503743752945e-06, + "loss": 0.4369, + "step": 16095 + }, + { + "epoch": 2.51, + "grad_norm": 27.02435263467979, + "learning_rate": 1.3441057796153634e-06, + "loss": 0.4302, + "step": 16096 + }, + { + "epoch": 2.51, + "grad_norm": 22.766718204264784, + "learning_rate": 1.3432614310239989e-06, + "loss": 0.45, + "step": 16097 + }, + { + "epoch": 2.51, + "grad_norm": 22.88300408973077, + "learning_rate": 1.3424173286252185e-06, + "loss": 0.4317, + "step": 16098 + }, + { + "epoch": 2.51, + "grad_norm": 17.461529160615942, + "learning_rate": 1.3415734724430219e-06, + "loss": 0.4074, + "step": 16099 + }, + { + "epoch": 2.51, + "grad_norm": 14.915799160713203, + "learning_rate": 1.340729862501413e-06, + "loss": 0.412, + "step": 16100 + }, + { + "epoch": 2.51, + "grad_norm": 37.88191126842757, + "learning_rate": 1.3398864988243786e-06, + "loss": 0.453, + "step": 16101 + }, + { + "epoch": 2.52, + "grad_norm": 16.244575570803324, + "learning_rate": 1.3390433814359072e-06, + "loss": 0.4578, + "step": 16102 + }, + { + "epoch": 2.52, + "grad_norm": 15.751859441621695, + "learning_rate": 1.338200510359977e-06, + "loss": 0.4327, + "step": 16103 + }, + { + "epoch": 2.52, + "grad_norm": 16.355720114478938, + "learning_rate": 1.3373578856205583e-06, + "loss": 0.4698, + "step": 16104 + }, + { + "epoch": 2.52, + "grad_norm": 23.373897884773598, + "learning_rate": 1.3365155072416103e-06, + "loss": 0.42, + "step": 16105 + }, + { + "epoch": 2.52, + "grad_norm": 17.9143363783638, + "learning_rate": 1.335673375247094e-06, + "loss": 0.4645, + "step": 16106 + }, + { + "epoch": 2.52, + "grad_norm": 26.02268170599414, + "learning_rate": 1.3348314896609581e-06, + "loss": 0.4158, + "step": 16107 + }, + { + "epoch": 2.52, + "grad_norm": 16.200770929030377, + "learning_rate": 1.3339898505071503e-06, + "loss": 0.454, + "step": 16108 + }, + { + "epoch": 2.52, + "grad_norm": 22.129009123358525, + "learning_rate": 1.3331484578095976e-06, + "loss": 0.3922, + "step": 16109 + }, + { + "epoch": 2.52, + "grad_norm": 24.081902440972662, + "learning_rate": 1.3323073115922324e-06, + "loss": 0.3776, + "step": 16110 + }, + { + "epoch": 2.52, + "grad_norm": 18.908004810139264, + "learning_rate": 1.3314664118789766e-06, + "loss": 0.4503, + "step": 16111 + }, + { + "epoch": 2.52, + "grad_norm": 29.913964095791915, + "learning_rate": 1.3306257586937476e-06, + "loss": 0.4645, + "step": 16112 + }, + { + "epoch": 2.52, + "grad_norm": 18.913729501256434, + "learning_rate": 1.3297853520604497e-06, + "loss": 0.4864, + "step": 16113 + }, + { + "epoch": 2.52, + "grad_norm": 15.73875133979761, + "learning_rate": 1.3289451920029838e-06, + "loss": 0.4436, + "step": 16114 + }, + { + "epoch": 2.52, + "grad_norm": 22.27661197764153, + "learning_rate": 1.328105278545243e-06, + "loss": 0.3886, + "step": 16115 + }, + { + "epoch": 2.52, + "grad_norm": 22.03122522778843, + "learning_rate": 1.3272656117111183e-06, + "loss": 0.42, + "step": 16116 + }, + { + "epoch": 2.52, + "grad_norm": 23.480596286577722, + "learning_rate": 1.3264261915244836e-06, + "loss": 0.4274, + "step": 16117 + }, + { + "epoch": 2.52, + "grad_norm": 24.36375371864161, + "learning_rate": 1.3255870180092157e-06, + "loss": 0.4866, + "step": 16118 + }, + { + "epoch": 2.52, + "grad_norm": 18.549188702571065, + "learning_rate": 1.3247480911891775e-06, + "loss": 0.4201, + "step": 16119 + }, + { + "epoch": 2.52, + "grad_norm": 19.961146512215674, + "learning_rate": 1.3239094110882279e-06, + "loss": 0.399, + "step": 16120 + }, + { + "epoch": 2.52, + "grad_norm": 32.790279165894724, + "learning_rate": 1.323070977730222e-06, + "loss": 0.4622, + "step": 16121 + }, + { + "epoch": 2.52, + "grad_norm": 23.84024557993538, + "learning_rate": 1.3222327911389988e-06, + "loss": 0.4507, + "step": 16122 + }, + { + "epoch": 2.52, + "grad_norm": 22.435744356362395, + "learning_rate": 1.3213948513384011e-06, + "loss": 0.3852, + "step": 16123 + }, + { + "epoch": 2.52, + "grad_norm": 15.939901484215818, + "learning_rate": 1.320557158352256e-06, + "loss": 0.4418, + "step": 16124 + }, + { + "epoch": 2.52, + "grad_norm": 27.66030546562115, + "learning_rate": 1.3197197122043892e-06, + "loss": 0.3918, + "step": 16125 + }, + { + "epoch": 2.52, + "grad_norm": 34.52606753646948, + "learning_rate": 1.3188825129186134e-06, + "loss": 0.5061, + "step": 16126 + }, + { + "epoch": 2.52, + "grad_norm": 19.435270130179433, + "learning_rate": 1.3180455605187404e-06, + "loss": 0.5326, + "step": 16127 + }, + { + "epoch": 2.52, + "grad_norm": 21.34916310763949, + "learning_rate": 1.3172088550285756e-06, + "loss": 0.4382, + "step": 16128 + }, + { + "epoch": 2.52, + "grad_norm": 17.489164564853382, + "learning_rate": 1.3163723964719122e-06, + "loss": 0.4377, + "step": 16129 + }, + { + "epoch": 2.52, + "grad_norm": 16.49358801901327, + "learning_rate": 1.3155361848725357e-06, + "loss": 0.4037, + "step": 16130 + }, + { + "epoch": 2.52, + "grad_norm": 17.431644727348466, + "learning_rate": 1.3147002202542291e-06, + "loss": 0.4086, + "step": 16131 + }, + { + "epoch": 2.52, + "grad_norm": 22.87720014982173, + "learning_rate": 1.3138645026407682e-06, + "loss": 0.5487, + "step": 16132 + }, + { + "epoch": 2.52, + "grad_norm": 28.105182428339685, + "learning_rate": 1.3130290320559235e-06, + "loss": 0.4736, + "step": 16133 + }, + { + "epoch": 2.52, + "grad_norm": 16.121196676710916, + "learning_rate": 1.3121938085234466e-06, + "loss": 0.3873, + "step": 16134 + }, + { + "epoch": 2.52, + "grad_norm": 25.141077182181313, + "learning_rate": 1.3113588320670967e-06, + "loss": 0.521, + "step": 16135 + }, + { + "epoch": 2.52, + "grad_norm": 25.3080304067366, + "learning_rate": 1.3105241027106187e-06, + "loss": 0.4905, + "step": 16136 + }, + { + "epoch": 2.52, + "grad_norm": 14.552060328852011, + "learning_rate": 1.3096896204777532e-06, + "loss": 0.4588, + "step": 16137 + }, + { + "epoch": 2.52, + "grad_norm": 19.95056523741904, + "learning_rate": 1.3088553853922325e-06, + "loss": 0.4285, + "step": 16138 + }, + { + "epoch": 2.52, + "grad_norm": 16.930672067267473, + "learning_rate": 1.3080213974777766e-06, + "loss": 0.4071, + "step": 16139 + }, + { + "epoch": 2.52, + "grad_norm": 21.45939903035727, + "learning_rate": 1.3071876567581087e-06, + "loss": 0.4511, + "step": 16140 + }, + { + "epoch": 2.52, + "grad_norm": 28.289480477010375, + "learning_rate": 1.306354163256941e-06, + "loss": 0.4329, + "step": 16141 + }, + { + "epoch": 2.52, + "grad_norm": 22.187050965291913, + "learning_rate": 1.3055209169979754e-06, + "loss": 0.4594, + "step": 16142 + }, + { + "epoch": 2.52, + "grad_norm": 20.203624089294813, + "learning_rate": 1.3046879180049054e-06, + "loss": 0.4578, + "step": 16143 + }, + { + "epoch": 2.52, + "grad_norm": 39.72745619587268, + "learning_rate": 1.3038551663014255e-06, + "loss": 0.4512, + "step": 16144 + }, + { + "epoch": 2.52, + "grad_norm": 22.021933170998004, + "learning_rate": 1.3030226619112175e-06, + "loss": 0.4401, + "step": 16145 + }, + { + "epoch": 2.52, + "grad_norm": 24.099133587682196, + "learning_rate": 1.3021904048579592e-06, + "loss": 0.4342, + "step": 16146 + }, + { + "epoch": 2.52, + "grad_norm": 19.128758235745906, + "learning_rate": 1.3013583951653185e-06, + "loss": 0.4436, + "step": 16147 + }, + { + "epoch": 2.52, + "grad_norm": 22.877803676924344, + "learning_rate": 1.3005266328569533e-06, + "loss": 0.4927, + "step": 16148 + }, + { + "epoch": 2.52, + "grad_norm": 21.34779687540237, + "learning_rate": 1.2996951179565221e-06, + "loss": 0.4455, + "step": 16149 + }, + { + "epoch": 2.52, + "grad_norm": 21.475657447125872, + "learning_rate": 1.2988638504876738e-06, + "loss": 0.4777, + "step": 16150 + }, + { + "epoch": 2.52, + "grad_norm": 26.98429490863332, + "learning_rate": 1.2980328304740464e-06, + "loss": 0.4661, + "step": 16151 + }, + { + "epoch": 2.52, + "grad_norm": 15.879735638306125, + "learning_rate": 1.2972020579392763e-06, + "loss": 0.4237, + "step": 16152 + }, + { + "epoch": 2.52, + "grad_norm": 23.275216031121918, + "learning_rate": 1.296371532906987e-06, + "loss": 0.4969, + "step": 16153 + }, + { + "epoch": 2.52, + "grad_norm": 27.978751454050144, + "learning_rate": 1.2955412554008018e-06, + "loss": 0.5051, + "step": 16154 + }, + { + "epoch": 2.52, + "grad_norm": 16.76163886252795, + "learning_rate": 1.2947112254443283e-06, + "loss": 0.4076, + "step": 16155 + }, + { + "epoch": 2.52, + "grad_norm": 22.30297299157287, + "learning_rate": 1.2938814430611756e-06, + "loss": 0.4316, + "step": 16156 + }, + { + "epoch": 2.52, + "grad_norm": 26.578792343866596, + "learning_rate": 1.2930519082749448e-06, + "loss": 0.4022, + "step": 16157 + }, + { + "epoch": 2.52, + "grad_norm": 15.995794345629422, + "learning_rate": 1.2922226211092237e-06, + "loss": 0.4509, + "step": 16158 + }, + { + "epoch": 2.52, + "grad_norm": 19.25341360948246, + "learning_rate": 1.2913935815875945e-06, + "loss": 0.4199, + "step": 16159 + }, + { + "epoch": 2.52, + "grad_norm": 19.742479203684088, + "learning_rate": 1.2905647897336382e-06, + "loss": 0.4515, + "step": 16160 + }, + { + "epoch": 2.52, + "grad_norm": 18.099668360077146, + "learning_rate": 1.289736245570925e-06, + "loss": 0.4401, + "step": 16161 + }, + { + "epoch": 2.52, + "grad_norm": 13.951648367037468, + "learning_rate": 1.2889079491230184e-06, + "loss": 0.4362, + "step": 16162 + }, + { + "epoch": 2.52, + "grad_norm": 30.03526686559841, + "learning_rate": 1.2880799004134748e-06, + "loss": 0.4427, + "step": 16163 + }, + { + "epoch": 2.52, + "grad_norm": 17.127564521880164, + "learning_rate": 1.2872520994658399e-06, + "loss": 0.3623, + "step": 16164 + }, + { + "epoch": 2.52, + "grad_norm": 21.367385864731812, + "learning_rate": 1.2864245463036585e-06, + "loss": 0.4744, + "step": 16165 + }, + { + "epoch": 2.53, + "grad_norm": 19.521943041635705, + "learning_rate": 1.285597240950468e-06, + "loss": 0.4562, + "step": 16166 + }, + { + "epoch": 2.53, + "grad_norm": 25.701737878931475, + "learning_rate": 1.2847701834297944e-06, + "loss": 0.4992, + "step": 16167 + }, + { + "epoch": 2.53, + "grad_norm": 23.597040625228257, + "learning_rate": 1.2839433737651564e-06, + "loss": 0.4109, + "step": 16168 + }, + { + "epoch": 2.53, + "grad_norm": 26.400038246444073, + "learning_rate": 1.2831168119800696e-06, + "loss": 0.4778, + "step": 16169 + }, + { + "epoch": 2.53, + "grad_norm": 26.384701177462002, + "learning_rate": 1.2822904980980422e-06, + "loss": 0.4188, + "step": 16170 + }, + { + "epoch": 2.53, + "grad_norm": 28.6465904502806, + "learning_rate": 1.2814644321425752e-06, + "loss": 0.4454, + "step": 16171 + }, + { + "epoch": 2.53, + "grad_norm": 20.54095085520488, + "learning_rate": 1.2806386141371596e-06, + "loss": 0.4357, + "step": 16172 + }, + { + "epoch": 2.53, + "grad_norm": 15.882080351152075, + "learning_rate": 1.279813044105278e-06, + "loss": 0.4967, + "step": 16173 + }, + { + "epoch": 2.53, + "grad_norm": 13.645148332403927, + "learning_rate": 1.2789877220704127e-06, + "loss": 0.4162, + "step": 16174 + }, + { + "epoch": 2.53, + "grad_norm": 19.405512592262003, + "learning_rate": 1.2781626480560384e-06, + "loss": 0.4495, + "step": 16175 + }, + { + "epoch": 2.53, + "grad_norm": 21.214584773953103, + "learning_rate": 1.2773378220856126e-06, + "loss": 0.4208, + "step": 16176 + }, + { + "epoch": 2.53, + "grad_norm": 20.417531345061835, + "learning_rate": 1.2765132441825989e-06, + "loss": 0.3994, + "step": 16177 + }, + { + "epoch": 2.53, + "grad_norm": 16.558808744906163, + "learning_rate": 1.2756889143704443e-06, + "loss": 0.4769, + "step": 16178 + }, + { + "epoch": 2.53, + "grad_norm": 29.28959923896237, + "learning_rate": 1.2748648326725943e-06, + "loss": 0.4233, + "step": 16179 + }, + { + "epoch": 2.53, + "grad_norm": 21.396533118104976, + "learning_rate": 1.2740409991124826e-06, + "loss": 0.3972, + "step": 16180 + }, + { + "epoch": 2.53, + "grad_norm": 19.996897906889256, + "learning_rate": 1.273217413713541e-06, + "loss": 0.5071, + "step": 16181 + }, + { + "epoch": 2.53, + "grad_norm": 29.145206892492695, + "learning_rate": 1.2723940764991927e-06, + "loss": 0.5035, + "step": 16182 + }, + { + "epoch": 2.53, + "grad_norm": 20.378472820613535, + "learning_rate": 1.2715709874928518e-06, + "loss": 0.4704, + "step": 16183 + }, + { + "epoch": 2.53, + "grad_norm": 17.606376671089954, + "learning_rate": 1.2707481467179228e-06, + "loss": 0.4685, + "step": 16184 + }, + { + "epoch": 2.53, + "grad_norm": 18.684463760661522, + "learning_rate": 1.2699255541978117e-06, + "loss": 0.4176, + "step": 16185 + }, + { + "epoch": 2.53, + "grad_norm": 32.92323274487656, + "learning_rate": 1.2691032099559097e-06, + "loss": 0.5049, + "step": 16186 + }, + { + "epoch": 2.53, + "grad_norm": 19.893253018705774, + "learning_rate": 1.268281114015607e-06, + "loss": 0.4758, + "step": 16187 + }, + { + "epoch": 2.53, + "grad_norm": 29.18216352260454, + "learning_rate": 1.2674592664002828e-06, + "loss": 0.4583, + "step": 16188 + }, + { + "epoch": 2.53, + "grad_norm": 22.359304695898565, + "learning_rate": 1.2666376671333048e-06, + "loss": 0.4736, + "step": 16189 + }, + { + "epoch": 2.53, + "grad_norm": 23.29364880022195, + "learning_rate": 1.2658163162380443e-06, + "loss": 0.4767, + "step": 16190 + }, + { + "epoch": 2.53, + "grad_norm": 18.124525445447688, + "learning_rate": 1.2649952137378596e-06, + "loss": 0.5043, + "step": 16191 + }, + { + "epoch": 2.53, + "grad_norm": 26.6021441932411, + "learning_rate": 1.2641743596561007e-06, + "loss": 0.4006, + "step": 16192 + }, + { + "epoch": 2.53, + "grad_norm": 18.132631134151765, + "learning_rate": 1.2633537540161123e-06, + "loss": 0.4597, + "step": 16193 + }, + { + "epoch": 2.53, + "grad_norm": 20.867707415623116, + "learning_rate": 1.2625333968412312e-06, + "loss": 0.4097, + "step": 16194 + }, + { + "epoch": 2.53, + "grad_norm": 19.063351136889246, + "learning_rate": 1.261713288154789e-06, + "loss": 0.4489, + "step": 16195 + }, + { + "epoch": 2.53, + "grad_norm": 19.23634535290724, + "learning_rate": 1.2608934279801133e-06, + "loss": 0.4439, + "step": 16196 + }, + { + "epoch": 2.53, + "grad_norm": 20.721883468887953, + "learning_rate": 1.2600738163405124e-06, + "loss": 0.3929, + "step": 16197 + }, + { + "epoch": 2.53, + "grad_norm": 22.324421003465098, + "learning_rate": 1.2592544532592988e-06, + "loss": 0.4098, + "step": 16198 + }, + { + "epoch": 2.53, + "grad_norm": 19.71749577334569, + "learning_rate": 1.2584353387597758e-06, + "loss": 0.4543, + "step": 16199 + }, + { + "epoch": 2.53, + "grad_norm": 30.93114246878487, + "learning_rate": 1.2576164728652406e-06, + "loss": 0.4372, + "step": 16200 + }, + { + "epoch": 2.53, + "grad_norm": 24.22293906007539, + "learning_rate": 1.256797855598978e-06, + "loss": 0.525, + "step": 16201 + }, + { + "epoch": 2.53, + "grad_norm": 23.434392992707878, + "learning_rate": 1.255979486984269e-06, + "loss": 0.418, + "step": 16202 + }, + { + "epoch": 2.53, + "grad_norm": 13.143526084750306, + "learning_rate": 1.2551613670443862e-06, + "loss": 0.4043, + "step": 16203 + }, + { + "epoch": 2.53, + "grad_norm": 16.07366426422336, + "learning_rate": 1.2543434958026024e-06, + "loss": 0.4313, + "step": 16204 + }, + { + "epoch": 2.53, + "grad_norm": 19.119309566747837, + "learning_rate": 1.253525873282171e-06, + "loss": 0.3971, + "step": 16205 + }, + { + "epoch": 2.53, + "grad_norm": 19.379038246841734, + "learning_rate": 1.2527084995063488e-06, + "loss": 0.391, + "step": 16206 + }, + { + "epoch": 2.53, + "grad_norm": 18.502078468770403, + "learning_rate": 1.2518913744983786e-06, + "loss": 0.4458, + "step": 16207 + }, + { + "epoch": 2.53, + "grad_norm": 23.802937243285648, + "learning_rate": 1.2510744982814993e-06, + "loss": 0.4392, + "step": 16208 + }, + { + "epoch": 2.53, + "grad_norm": 18.102730585342382, + "learning_rate": 1.250257870878946e-06, + "loss": 0.4205, + "step": 16209 + }, + { + "epoch": 2.53, + "grad_norm": 21.19327764976099, + "learning_rate": 1.2494414923139375e-06, + "loss": 0.3977, + "step": 16210 + }, + { + "epoch": 2.53, + "grad_norm": 22.61116185472731, + "learning_rate": 1.2486253626096978e-06, + "loss": 0.3901, + "step": 16211 + }, + { + "epoch": 2.53, + "grad_norm": 27.417180792549605, + "learning_rate": 1.2478094817894293e-06, + "loss": 0.513, + "step": 16212 + }, + { + "epoch": 2.53, + "grad_norm": 24.06952805681748, + "learning_rate": 1.2469938498763423e-06, + "loss": 0.4251, + "step": 16213 + }, + { + "epoch": 2.53, + "grad_norm": 20.828894784814825, + "learning_rate": 1.2461784668936283e-06, + "loss": 0.4059, + "step": 16214 + }, + { + "epoch": 2.53, + "grad_norm": 15.310972599807217, + "learning_rate": 1.2453633328644765e-06, + "loss": 0.4425, + "step": 16215 + }, + { + "epoch": 2.53, + "grad_norm": 22.515909237493734, + "learning_rate": 1.244548447812074e-06, + "loss": 0.4189, + "step": 16216 + }, + { + "epoch": 2.53, + "grad_norm": 16.304867524568387, + "learning_rate": 1.243733811759591e-06, + "loss": 0.421, + "step": 16217 + }, + { + "epoch": 2.53, + "grad_norm": 29.198554136947916, + "learning_rate": 1.2429194247301934e-06, + "loss": 0.4726, + "step": 16218 + }, + { + "epoch": 2.53, + "grad_norm": 20.18794052924772, + "learning_rate": 1.2421052867470452e-06, + "loss": 0.4199, + "step": 16219 + }, + { + "epoch": 2.53, + "grad_norm": 15.717766903570451, + "learning_rate": 1.2412913978332997e-06, + "loss": 0.4405, + "step": 16220 + }, + { + "epoch": 2.53, + "grad_norm": 16.601425967833137, + "learning_rate": 1.2404777580121075e-06, + "loss": 0.4061, + "step": 16221 + }, + { + "epoch": 2.53, + "grad_norm": 31.139884177170483, + "learning_rate": 1.2396643673065988e-06, + "loss": 0.5397, + "step": 16222 + }, + { + "epoch": 2.53, + "grad_norm": 23.946683831331104, + "learning_rate": 1.238851225739911e-06, + "loss": 0.476, + "step": 16223 + }, + { + "epoch": 2.53, + "grad_norm": 32.56812494059177, + "learning_rate": 1.2380383333351687e-06, + "loss": 0.4861, + "step": 16224 + }, + { + "epoch": 2.53, + "grad_norm": 28.42693705199055, + "learning_rate": 1.2372256901154934e-06, + "loss": 0.467, + "step": 16225 + }, + { + "epoch": 2.53, + "grad_norm": 27.79674657853102, + "learning_rate": 1.2364132961039933e-06, + "loss": 0.4302, + "step": 16226 + }, + { + "epoch": 2.53, + "grad_norm": 33.61215200084452, + "learning_rate": 1.2356011513237708e-06, + "loss": 0.5053, + "step": 16227 + }, + { + "epoch": 2.53, + "grad_norm": 26.108951646213594, + "learning_rate": 1.2347892557979236e-06, + "loss": 0.4456, + "step": 16228 + }, + { + "epoch": 2.53, + "grad_norm": 22.40774533005232, + "learning_rate": 1.233977609549546e-06, + "loss": 0.4224, + "step": 16229 + }, + { + "epoch": 2.54, + "grad_norm": 27.591776182143025, + "learning_rate": 1.2331662126017141e-06, + "loss": 0.4777, + "step": 16230 + }, + { + "epoch": 2.54, + "grad_norm": 23.88816294511219, + "learning_rate": 1.2323550649775085e-06, + "loss": 0.4592, + "step": 16231 + }, + { + "epoch": 2.54, + "grad_norm": 19.915892017501292, + "learning_rate": 1.2315441666999939e-06, + "loss": 0.4355, + "step": 16232 + }, + { + "epoch": 2.54, + "grad_norm": 19.90625801864774, + "learning_rate": 1.2307335177922342e-06, + "loss": 0.4418, + "step": 16233 + }, + { + "epoch": 2.54, + "grad_norm": 19.950949541329113, + "learning_rate": 1.2299231182772852e-06, + "loss": 0.3922, + "step": 16234 + }, + { + "epoch": 2.54, + "grad_norm": 17.943142660519563, + "learning_rate": 1.229112968178191e-06, + "loss": 0.4171, + "step": 16235 + }, + { + "epoch": 2.54, + "grad_norm": 26.25359470413586, + "learning_rate": 1.2283030675179951e-06, + "loss": 0.4693, + "step": 16236 + }, + { + "epoch": 2.54, + "grad_norm": 21.474521105256585, + "learning_rate": 1.227493416319726e-06, + "loss": 0.3891, + "step": 16237 + }, + { + "epoch": 2.54, + "grad_norm": 15.272140795207816, + "learning_rate": 1.226684014606414e-06, + "loss": 0.413, + "step": 16238 + }, + { + "epoch": 2.54, + "grad_norm": 28.175248185835784, + "learning_rate": 1.2258748624010752e-06, + "loss": 0.415, + "step": 16239 + }, + { + "epoch": 2.54, + "grad_norm": 24.11331563107011, + "learning_rate": 1.2250659597267244e-06, + "loss": 0.4373, + "step": 16240 + }, + { + "epoch": 2.54, + "grad_norm": 17.56547091595823, + "learning_rate": 1.2242573066063623e-06, + "loss": 0.3961, + "step": 16241 + }, + { + "epoch": 2.54, + "grad_norm": 21.297519435494202, + "learning_rate": 1.2234489030629916e-06, + "loss": 0.4004, + "step": 16242 + }, + { + "epoch": 2.54, + "grad_norm": 19.961800212842434, + "learning_rate": 1.2226407491195969e-06, + "loss": 0.4261, + "step": 16243 + }, + { + "epoch": 2.54, + "grad_norm": 25.35944365916944, + "learning_rate": 1.2218328447991657e-06, + "loss": 0.4462, + "step": 16244 + }, + { + "epoch": 2.54, + "grad_norm": 17.408130405496266, + "learning_rate": 1.2210251901246739e-06, + "loss": 0.4898, + "step": 16245 + }, + { + "epoch": 2.54, + "grad_norm": 20.053793525757946, + "learning_rate": 1.2202177851190912e-06, + "loss": 0.4534, + "step": 16246 + }, + { + "epoch": 2.54, + "grad_norm": 28.486410025399437, + "learning_rate": 1.2194106298053765e-06, + "loss": 0.4737, + "step": 16247 + }, + { + "epoch": 2.54, + "grad_norm": 16.557755326887538, + "learning_rate": 1.2186037242064863e-06, + "loss": 0.4554, + "step": 16248 + }, + { + "epoch": 2.54, + "grad_norm": 21.909335613609805, + "learning_rate": 1.21779706834537e-06, + "loss": 0.4745, + "step": 16249 + }, + { + "epoch": 2.54, + "grad_norm": 17.707214554806292, + "learning_rate": 1.2169906622449701e-06, + "loss": 0.4348, + "step": 16250 + }, + { + "epoch": 2.54, + "grad_norm": 20.95286784700535, + "learning_rate": 1.2161845059282174e-06, + "loss": 0.494, + "step": 16251 + }, + { + "epoch": 2.54, + "grad_norm": 18.850565022757994, + "learning_rate": 1.2153785994180366e-06, + "loss": 0.4144, + "step": 16252 + }, + { + "epoch": 2.54, + "grad_norm": 17.879570797585448, + "learning_rate": 1.2145729427373499e-06, + "loss": 0.4222, + "step": 16253 + }, + { + "epoch": 2.54, + "grad_norm": 17.964010260370724, + "learning_rate": 1.2137675359090705e-06, + "loss": 0.4409, + "step": 16254 + }, + { + "epoch": 2.54, + "grad_norm": 25.876305411875467, + "learning_rate": 1.212962378956104e-06, + "loss": 0.4295, + "step": 16255 + }, + { + "epoch": 2.54, + "grad_norm": 25.384379962252396, + "learning_rate": 1.212157471901344e-06, + "loss": 0.4495, + "step": 16256 + }, + { + "epoch": 2.54, + "grad_norm": 22.731816360848157, + "learning_rate": 1.2113528147676855e-06, + "loss": 0.3938, + "step": 16257 + }, + { + "epoch": 2.54, + "grad_norm": 39.87602008363829, + "learning_rate": 1.2105484075780117e-06, + "loss": 0.5317, + "step": 16258 + }, + { + "epoch": 2.54, + "grad_norm": 25.149658652239655, + "learning_rate": 1.2097442503552003e-06, + "loss": 0.4414, + "step": 16259 + }, + { + "epoch": 2.54, + "grad_norm": 17.093697265390233, + "learning_rate": 1.2089403431221213e-06, + "loss": 0.4942, + "step": 16260 + }, + { + "epoch": 2.54, + "grad_norm": 23.624666810209124, + "learning_rate": 1.2081366859016353e-06, + "loss": 0.5039, + "step": 16261 + }, + { + "epoch": 2.54, + "grad_norm": 22.74907721754449, + "learning_rate": 1.207333278716597e-06, + "loss": 0.47, + "step": 16262 + }, + { + "epoch": 2.54, + "grad_norm": 4.915220300859985, + "learning_rate": 1.2065301215898595e-06, + "loss": 0.4499, + "step": 16263 + }, + { + "epoch": 2.54, + "grad_norm": 19.250262511773297, + "learning_rate": 1.20572721454426e-06, + "loss": 0.4474, + "step": 16264 + }, + { + "epoch": 2.54, + "grad_norm": 18.62519518378732, + "learning_rate": 1.2049245576026346e-06, + "loss": 0.444, + "step": 16265 + }, + { + "epoch": 2.54, + "grad_norm": 17.28177501991546, + "learning_rate": 1.2041221507878087e-06, + "loss": 0.4567, + "step": 16266 + }, + { + "epoch": 2.54, + "grad_norm": 18.670112972037764, + "learning_rate": 1.203319994122606e-06, + "loss": 0.4854, + "step": 16267 + }, + { + "epoch": 2.54, + "grad_norm": 15.713256090085531, + "learning_rate": 1.2025180876298338e-06, + "loss": 0.4029, + "step": 16268 + }, + { + "epoch": 2.54, + "grad_norm": 34.608469280851885, + "learning_rate": 1.2017164313323004e-06, + "loss": 0.4417, + "step": 16269 + }, + { + "epoch": 2.54, + "grad_norm": 15.796480723855371, + "learning_rate": 1.200915025252808e-06, + "loss": 0.371, + "step": 16270 + }, + { + "epoch": 2.54, + "grad_norm": 20.9708927860443, + "learning_rate": 1.2001138694141423e-06, + "loss": 0.4312, + "step": 16271 + }, + { + "epoch": 2.54, + "grad_norm": 19.769344572411853, + "learning_rate": 1.199312963839092e-06, + "loss": 0.4483, + "step": 16272 + }, + { + "epoch": 2.54, + "grad_norm": 25.40493711783794, + "learning_rate": 1.1985123085504312e-06, + "loss": 0.4976, + "step": 16273 + }, + { + "epoch": 2.54, + "grad_norm": 21.644188912463495, + "learning_rate": 1.1977119035709329e-06, + "loss": 0.4464, + "step": 16274 + }, + { + "epoch": 2.54, + "grad_norm": 22.74301314517832, + "learning_rate": 1.1969117489233595e-06, + "loss": 0.4299, + "step": 16275 + }, + { + "epoch": 2.54, + "grad_norm": 20.396701071155285, + "learning_rate": 1.1961118446304666e-06, + "loss": 0.4506, + "step": 16276 + }, + { + "epoch": 2.54, + "grad_norm": 18.053488097304097, + "learning_rate": 1.195312190715e-06, + "loss": 0.4286, + "step": 16277 + }, + { + "epoch": 2.54, + "grad_norm": 21.954936002378727, + "learning_rate": 1.1945127871997042e-06, + "loss": 0.4265, + "step": 16278 + }, + { + "epoch": 2.54, + "grad_norm": 15.260374305349496, + "learning_rate": 1.1937136341073163e-06, + "loss": 0.3861, + "step": 16279 + }, + { + "epoch": 2.54, + "grad_norm": 18.288779962533507, + "learning_rate": 1.1929147314605617e-06, + "loss": 0.4704, + "step": 16280 + }, + { + "epoch": 2.54, + "grad_norm": 18.91514803069053, + "learning_rate": 1.1921160792821572e-06, + "loss": 0.4546, + "step": 16281 + }, + { + "epoch": 2.54, + "grad_norm": 15.176547670587869, + "learning_rate": 1.1913176775948187e-06, + "loss": 0.4447, + "step": 16282 + }, + { + "epoch": 2.54, + "grad_norm": 22.289080741813354, + "learning_rate": 1.1905195264212532e-06, + "loss": 0.4224, + "step": 16283 + }, + { + "epoch": 2.54, + "grad_norm": 24.3583317619512, + "learning_rate": 1.1897216257841605e-06, + "loss": 0.455, + "step": 16284 + }, + { + "epoch": 2.54, + "grad_norm": 24.582762408631325, + "learning_rate": 1.1889239757062309e-06, + "loss": 0.435, + "step": 16285 + }, + { + "epoch": 2.54, + "grad_norm": 32.851864820851475, + "learning_rate": 1.188126576210148e-06, + "loss": 0.4154, + "step": 16286 + }, + { + "epoch": 2.54, + "grad_norm": 20.140068313593815, + "learning_rate": 1.1873294273185898e-06, + "loss": 0.4222, + "step": 16287 + }, + { + "epoch": 2.54, + "grad_norm": 17.573977639807598, + "learning_rate": 1.1865325290542295e-06, + "loss": 0.4762, + "step": 16288 + }, + { + "epoch": 2.54, + "grad_norm": 17.67043129313243, + "learning_rate": 1.185735881439728e-06, + "loss": 0.5002, + "step": 16289 + }, + { + "epoch": 2.54, + "grad_norm": 22.47103234439085, + "learning_rate": 1.1849394844977402e-06, + "loss": 0.4952, + "step": 16290 + }, + { + "epoch": 2.54, + "grad_norm": 18.97538011588581, + "learning_rate": 1.184143338250917e-06, + "loss": 0.4181, + "step": 16291 + }, + { + "epoch": 2.54, + "grad_norm": 15.072978286731447, + "learning_rate": 1.1833474427219015e-06, + "loss": 0.4126, + "step": 16292 + }, + { + "epoch": 2.54, + "grad_norm": 21.14310812340901, + "learning_rate": 1.1825517979333256e-06, + "loss": 0.4642, + "step": 16293 + }, + { + "epoch": 2.55, + "grad_norm": 12.274444614032154, + "learning_rate": 1.18175640390782e-06, + "loss": 0.4687, + "step": 16294 + }, + { + "epoch": 2.55, + "grad_norm": 29.038151591014906, + "learning_rate": 1.180961260668002e-06, + "loss": 0.4576, + "step": 16295 + }, + { + "epoch": 2.55, + "grad_norm": 25.431581194050647, + "learning_rate": 1.1801663682364873e-06, + "loss": 0.4689, + "step": 16296 + }, + { + "epoch": 2.55, + "grad_norm": 18.64219839869337, + "learning_rate": 1.179371726635883e-06, + "loss": 0.4392, + "step": 16297 + }, + { + "epoch": 2.55, + "grad_norm": 19.215664515886605, + "learning_rate": 1.1785773358887854e-06, + "loss": 0.4256, + "step": 16298 + }, + { + "epoch": 2.55, + "grad_norm": 21.038373586691502, + "learning_rate": 1.1777831960177898e-06, + "loss": 0.4521, + "step": 16299 + }, + { + "epoch": 2.55, + "grad_norm": 24.7798821311084, + "learning_rate": 1.1769893070454774e-06, + "loss": 0.4299, + "step": 16300 + }, + { + "epoch": 2.55, + "grad_norm": 15.645600610730098, + "learning_rate": 1.1761956689944288e-06, + "loss": 0.4111, + "step": 16301 + }, + { + "epoch": 2.55, + "grad_norm": 22.118569230200322, + "learning_rate": 1.1754022818872123e-06, + "loss": 0.3853, + "step": 16302 + }, + { + "epoch": 2.55, + "grad_norm": 21.478384515151088, + "learning_rate": 1.1746091457463927e-06, + "loss": 0.5403, + "step": 16303 + }, + { + "epoch": 2.55, + "grad_norm": 26.74090856179101, + "learning_rate": 1.173816260594529e-06, + "loss": 0.4452, + "step": 16304 + }, + { + "epoch": 2.55, + "grad_norm": 17.75884874746568, + "learning_rate": 1.1730236264541661e-06, + "loss": 0.3696, + "step": 16305 + }, + { + "epoch": 2.55, + "grad_norm": 24.003414411604094, + "learning_rate": 1.1722312433478467e-06, + "loss": 0.4272, + "step": 16306 + }, + { + "epoch": 2.55, + "grad_norm": 23.056585662918472, + "learning_rate": 1.1714391112981071e-06, + "loss": 0.4952, + "step": 16307 + }, + { + "epoch": 2.55, + "grad_norm": 17.832641771191643, + "learning_rate": 1.170647230327473e-06, + "loss": 0.4021, + "step": 16308 + }, + { + "epoch": 2.55, + "grad_norm": 23.17824702966873, + "learning_rate": 1.1698556004584728e-06, + "loss": 0.4867, + "step": 16309 + }, + { + "epoch": 2.55, + "grad_norm": 25.214565479448012, + "learning_rate": 1.1690642217136084e-06, + "loss": 0.4566, + "step": 16310 + }, + { + "epoch": 2.55, + "grad_norm": 16.151760605568352, + "learning_rate": 1.1682730941153918e-06, + "loss": 0.3906, + "step": 16311 + }, + { + "epoch": 2.55, + "grad_norm": 23.242825475319567, + "learning_rate": 1.167482217686322e-06, + "loss": 0.4192, + "step": 16312 + }, + { + "epoch": 2.55, + "grad_norm": 28.212979516681244, + "learning_rate": 1.1666915924488931e-06, + "loss": 0.4565, + "step": 16313 + }, + { + "epoch": 2.55, + "grad_norm": 18.35953668076768, + "learning_rate": 1.165901218425588e-06, + "loss": 0.4163, + "step": 16314 + }, + { + "epoch": 2.55, + "grad_norm": 22.42526052880745, + "learning_rate": 1.1651110956388822e-06, + "loss": 0.4955, + "step": 16315 + }, + { + "epoch": 2.55, + "grad_norm": 24.786270793508038, + "learning_rate": 1.164321224111249e-06, + "loss": 0.4752, + "step": 16316 + }, + { + "epoch": 2.55, + "grad_norm": 14.603192277741512, + "learning_rate": 1.1635316038651524e-06, + "loss": 0.4186, + "step": 16317 + }, + { + "epoch": 2.55, + "grad_norm": 16.976893413424033, + "learning_rate": 1.1627422349230465e-06, + "loss": 0.3975, + "step": 16318 + }, + { + "epoch": 2.55, + "grad_norm": 4.2559601350352265, + "learning_rate": 1.161953117307385e-06, + "loss": 0.512, + "step": 16319 + }, + { + "epoch": 2.55, + "grad_norm": 17.43850911033743, + "learning_rate": 1.161164251040603e-06, + "loss": 0.4227, + "step": 16320 + }, + { + "epoch": 2.55, + "grad_norm": 31.428002993401954, + "learning_rate": 1.1603756361451402e-06, + "loss": 0.4544, + "step": 16321 + }, + { + "epoch": 2.55, + "grad_norm": 20.824160259324255, + "learning_rate": 1.1595872726434243e-06, + "loss": 0.4135, + "step": 16322 + }, + { + "epoch": 2.55, + "grad_norm": 24.397785517018377, + "learning_rate": 1.158799160557874e-06, + "loss": 0.4647, + "step": 16323 + }, + { + "epoch": 2.55, + "grad_norm": 26.32288204931034, + "learning_rate": 1.158011299910905e-06, + "loss": 0.4082, + "step": 16324 + }, + { + "epoch": 2.55, + "grad_norm": 23.529266931174835, + "learning_rate": 1.15722369072492e-06, + "loss": 0.4936, + "step": 16325 + }, + { + "epoch": 2.55, + "grad_norm": 14.12718292792, + "learning_rate": 1.1564363330223227e-06, + "loss": 0.4463, + "step": 16326 + }, + { + "epoch": 2.55, + "grad_norm": 23.26092624867748, + "learning_rate": 1.1556492268255004e-06, + "loss": 0.4761, + "step": 16327 + }, + { + "epoch": 2.55, + "grad_norm": 19.714218955119456, + "learning_rate": 1.1548623721568409e-06, + "loss": 0.4228, + "step": 16328 + }, + { + "epoch": 2.55, + "grad_norm": 27.73140780412827, + "learning_rate": 1.1540757690387227e-06, + "loss": 0.4478, + "step": 16329 + }, + { + "epoch": 2.55, + "grad_norm": 19.243511439384427, + "learning_rate": 1.153289417493515e-06, + "loss": 0.4369, + "step": 16330 + }, + { + "epoch": 2.55, + "grad_norm": 31.92530178660795, + "learning_rate": 1.1525033175435796e-06, + "loss": 0.4552, + "step": 16331 + }, + { + "epoch": 2.55, + "grad_norm": 27.64650107558058, + "learning_rate": 1.1517174692112742e-06, + "loss": 0.4551, + "step": 16332 + }, + { + "epoch": 2.55, + "grad_norm": 20.307559452401318, + "learning_rate": 1.1509318725189477e-06, + "loss": 0.4117, + "step": 16333 + }, + { + "epoch": 2.55, + "grad_norm": 23.166770202347617, + "learning_rate": 1.1501465274889457e-06, + "loss": 0.4535, + "step": 16334 + }, + { + "epoch": 2.55, + "grad_norm": 12.961670130533356, + "learning_rate": 1.1493614341435954e-06, + "loss": 0.4072, + "step": 16335 + }, + { + "epoch": 2.55, + "grad_norm": 24.85060751398322, + "learning_rate": 1.1485765925052294e-06, + "loss": 0.4467, + "step": 16336 + }, + { + "epoch": 2.55, + "grad_norm": 18.073122976941274, + "learning_rate": 1.1477920025961664e-06, + "loss": 0.4546, + "step": 16337 + }, + { + "epoch": 2.55, + "grad_norm": 23.775591125620586, + "learning_rate": 1.1470076644387229e-06, + "loss": 0.4211, + "step": 16338 + }, + { + "epoch": 2.55, + "grad_norm": 14.727395348309006, + "learning_rate": 1.1462235780552023e-06, + "loss": 0.3557, + "step": 16339 + }, + { + "epoch": 2.55, + "grad_norm": 17.4260286182358, + "learning_rate": 1.1454397434679022e-06, + "loss": 0.3497, + "step": 16340 + }, + { + "epoch": 2.55, + "grad_norm": 20.70163296055469, + "learning_rate": 1.1446561606991158e-06, + "loss": 0.4274, + "step": 16341 + }, + { + "epoch": 2.55, + "grad_norm": 17.421664148602872, + "learning_rate": 1.1438728297711288e-06, + "loss": 0.3865, + "step": 16342 + }, + { + "epoch": 2.55, + "grad_norm": 22.885467101816943, + "learning_rate": 1.14308975070622e-06, + "loss": 0.4025, + "step": 16343 + }, + { + "epoch": 2.55, + "grad_norm": 21.706163560990117, + "learning_rate": 1.1423069235266538e-06, + "loss": 0.396, + "step": 16344 + }, + { + "epoch": 2.55, + "grad_norm": 23.88431262782147, + "learning_rate": 1.1415243482546977e-06, + "loss": 0.4234, + "step": 16345 + }, + { + "epoch": 2.55, + "grad_norm": 25.39930066883431, + "learning_rate": 1.1407420249126068e-06, + "loss": 0.4089, + "step": 16346 + }, + { + "epoch": 2.55, + "grad_norm": 18.919922317626167, + "learning_rate": 1.1399599535226324e-06, + "loss": 0.5246, + "step": 16347 + }, + { + "epoch": 2.55, + "grad_norm": 19.213375302989913, + "learning_rate": 1.139178134107014e-06, + "loss": 0.4129, + "step": 16348 + }, + { + "epoch": 2.55, + "grad_norm": 24.411083641510903, + "learning_rate": 1.1383965666879847e-06, + "loss": 0.4604, + "step": 16349 + }, + { + "epoch": 2.55, + "grad_norm": 20.891414150234976, + "learning_rate": 1.1376152512877725e-06, + "loss": 0.4733, + "step": 16350 + }, + { + "epoch": 2.55, + "grad_norm": 28.763862318604023, + "learning_rate": 1.1368341879286004e-06, + "loss": 0.5224, + "step": 16351 + }, + { + "epoch": 2.55, + "grad_norm": 42.95521647029882, + "learning_rate": 1.1360533766326765e-06, + "loss": 0.4916, + "step": 16352 + }, + { + "epoch": 2.55, + "grad_norm": 18.685922021809194, + "learning_rate": 1.1352728174222128e-06, + "loss": 0.5037, + "step": 16353 + }, + { + "epoch": 2.55, + "grad_norm": 22.051910685021223, + "learning_rate": 1.1344925103194005e-06, + "loss": 0.4671, + "step": 16354 + }, + { + "epoch": 2.55, + "grad_norm": 17.850159651312126, + "learning_rate": 1.1337124553464384e-06, + "loss": 0.4175, + "step": 16355 + }, + { + "epoch": 2.55, + "grad_norm": 28.218984533203432, + "learning_rate": 1.1329326525255046e-06, + "loss": 0.4321, + "step": 16356 + }, + { + "epoch": 2.55, + "grad_norm": 27.66665776750027, + "learning_rate": 1.1321531018787801e-06, + "loss": 0.4387, + "step": 16357 + }, + { + "epoch": 2.56, + "grad_norm": 17.757202231934077, + "learning_rate": 1.131373803428435e-06, + "loss": 0.4483, + "step": 16358 + }, + { + "epoch": 2.56, + "grad_norm": 25.520247251566996, + "learning_rate": 1.1305947571966291e-06, + "loss": 0.4666, + "step": 16359 + }, + { + "epoch": 2.56, + "grad_norm": 19.31917600609384, + "learning_rate": 1.1298159632055228e-06, + "loss": 0.403, + "step": 16360 + }, + { + "epoch": 2.56, + "grad_norm": 32.47964946381058, + "learning_rate": 1.1290374214772582e-06, + "loss": 0.4483, + "step": 16361 + }, + { + "epoch": 2.56, + "grad_norm": 21.91319271816273, + "learning_rate": 1.1282591320339809e-06, + "loss": 0.4727, + "step": 16362 + }, + { + "epoch": 2.56, + "grad_norm": 21.226594818197338, + "learning_rate": 1.1274810948978255e-06, + "loss": 0.4471, + "step": 16363 + }, + { + "epoch": 2.56, + "grad_norm": 24.34232487993892, + "learning_rate": 1.1267033100909174e-06, + "loss": 0.3947, + "step": 16364 + }, + { + "epoch": 2.56, + "grad_norm": 25.37973193621713, + "learning_rate": 1.125925777635375e-06, + "loss": 0.4139, + "step": 16365 + }, + { + "epoch": 2.56, + "grad_norm": 27.15655636806649, + "learning_rate": 1.1251484975533123e-06, + "loss": 0.4633, + "step": 16366 + }, + { + "epoch": 2.56, + "grad_norm": 17.152954780469305, + "learning_rate": 1.1243714698668363e-06, + "loss": 0.4225, + "step": 16367 + }, + { + "epoch": 2.56, + "grad_norm": 31.86852340582149, + "learning_rate": 1.1235946945980435e-06, + "loss": 0.4758, + "step": 16368 + }, + { + "epoch": 2.56, + "grad_norm": 28.23079905957375, + "learning_rate": 1.1228181717690234e-06, + "loss": 0.4735, + "step": 16369 + }, + { + "epoch": 2.56, + "grad_norm": 26.843039559854052, + "learning_rate": 1.1220419014018613e-06, + "loss": 0.4771, + "step": 16370 + }, + { + "epoch": 2.56, + "grad_norm": 29.149476975734725, + "learning_rate": 1.121265883518634e-06, + "loss": 0.4881, + "step": 16371 + }, + { + "epoch": 2.56, + "grad_norm": 18.689965827791, + "learning_rate": 1.1204901181414141e-06, + "loss": 0.3978, + "step": 16372 + }, + { + "epoch": 2.56, + "grad_norm": 20.492457200085124, + "learning_rate": 1.1197146052922592e-06, + "loss": 0.4384, + "step": 16373 + }, + { + "epoch": 2.56, + "grad_norm": 20.207400283378668, + "learning_rate": 1.118939344993225e-06, + "loss": 0.4618, + "step": 16374 + }, + { + "epoch": 2.56, + "grad_norm": 26.166570634928924, + "learning_rate": 1.1181643372663608e-06, + "loss": 0.448, + "step": 16375 + }, + { + "epoch": 2.56, + "grad_norm": 17.849882378177295, + "learning_rate": 1.1173895821337088e-06, + "loss": 0.4165, + "step": 16376 + }, + { + "epoch": 2.56, + "grad_norm": 17.187464382828306, + "learning_rate": 1.1166150796172981e-06, + "loss": 0.3819, + "step": 16377 + }, + { + "epoch": 2.56, + "grad_norm": 28.87890983405053, + "learning_rate": 1.115840829739161e-06, + "loss": 0.3964, + "step": 16378 + }, + { + "epoch": 2.56, + "grad_norm": 26.05034665574483, + "learning_rate": 1.115066832521311e-06, + "loss": 0.4707, + "step": 16379 + }, + { + "epoch": 2.56, + "grad_norm": 17.667140587899357, + "learning_rate": 1.114293087985766e-06, + "loss": 0.4569, + "step": 16380 + }, + { + "epoch": 2.56, + "grad_norm": 24.16680677033514, + "learning_rate": 1.1135195961545242e-06, + "loss": 0.5102, + "step": 16381 + }, + { + "epoch": 2.56, + "grad_norm": 16.046267554607923, + "learning_rate": 1.1127463570495867e-06, + "loss": 0.4581, + "step": 16382 + }, + { + "epoch": 2.56, + "grad_norm": 18.73502799543351, + "learning_rate": 1.111973370692947e-06, + "loss": 0.376, + "step": 16383 + }, + { + "epoch": 2.56, + "grad_norm": 29.466802734973157, + "learning_rate": 1.111200637106582e-06, + "loss": 0.45, + "step": 16384 + }, + { + "epoch": 2.56, + "grad_norm": 21.16809120994578, + "learning_rate": 1.1104281563124741e-06, + "loss": 0.4051, + "step": 16385 + }, + { + "epoch": 2.56, + "grad_norm": 30.422738142851834, + "learning_rate": 1.109655928332587e-06, + "loss": 0.531, + "step": 16386 + }, + { + "epoch": 2.56, + "grad_norm": 19.69068650540431, + "learning_rate": 1.1088839531888862e-06, + "loss": 0.4703, + "step": 16387 + }, + { + "epoch": 2.56, + "grad_norm": 14.187114970826515, + "learning_rate": 1.108112230903322e-06, + "loss": 0.4329, + "step": 16388 + }, + { + "epoch": 2.56, + "grad_norm": 25.41895960636258, + "learning_rate": 1.1073407614978471e-06, + "loss": 0.3945, + "step": 16389 + }, + { + "epoch": 2.56, + "grad_norm": 20.135008072362435, + "learning_rate": 1.106569544994397e-06, + "loss": 0.3688, + "step": 16390 + }, + { + "epoch": 2.56, + "grad_norm": 20.880747776876863, + "learning_rate": 1.1057985814149063e-06, + "loss": 0.3895, + "step": 16391 + }, + { + "epoch": 2.56, + "grad_norm": 20.17307082745536, + "learning_rate": 1.1050278707813033e-06, + "loss": 0.4243, + "step": 16392 + }, + { + "epoch": 2.56, + "grad_norm": 24.12439138133963, + "learning_rate": 1.1042574131155048e-06, + "loss": 0.4753, + "step": 16393 + }, + { + "epoch": 2.56, + "grad_norm": 17.372917637979565, + "learning_rate": 1.1034872084394187e-06, + "loss": 0.3861, + "step": 16394 + }, + { + "epoch": 2.56, + "grad_norm": 31.82668770310337, + "learning_rate": 1.1027172567749523e-06, + "loss": 0.47, + "step": 16395 + }, + { + "epoch": 2.56, + "grad_norm": 24.784327551633144, + "learning_rate": 1.101947558144002e-06, + "loss": 0.4832, + "step": 16396 + }, + { + "epoch": 2.56, + "grad_norm": 23.05370878380459, + "learning_rate": 1.1011781125684618e-06, + "loss": 0.43, + "step": 16397 + }, + { + "epoch": 2.56, + "grad_norm": 23.402403041692537, + "learning_rate": 1.1004089200702072e-06, + "loss": 0.4124, + "step": 16398 + }, + { + "epoch": 2.56, + "grad_norm": 22.81428395768574, + "learning_rate": 1.0996399806711167e-06, + "loss": 0.4103, + "step": 16399 + }, + { + "epoch": 2.56, + "grad_norm": 20.168087967249285, + "learning_rate": 1.098871294393058e-06, + "loss": 0.4625, + "step": 16400 + }, + { + "epoch": 2.56, + "grad_norm": 16.887338680312844, + "learning_rate": 1.0981028612578949e-06, + "loss": 0.478, + "step": 16401 + }, + { + "epoch": 2.56, + "grad_norm": 19.29427435026181, + "learning_rate": 1.0973346812874796e-06, + "loss": 0.4044, + "step": 16402 + }, + { + "epoch": 2.56, + "grad_norm": 28.09919110479566, + "learning_rate": 1.0965667545036552e-06, + "loss": 0.5247, + "step": 16403 + }, + { + "epoch": 2.56, + "grad_norm": 16.68692232232192, + "learning_rate": 1.0957990809282649e-06, + "loss": 0.4263, + "step": 16404 + }, + { + "epoch": 2.56, + "grad_norm": 18.735905248330916, + "learning_rate": 1.0950316605831413e-06, + "loss": 0.5449, + "step": 16405 + }, + { + "epoch": 2.56, + "grad_norm": 21.55569018693021, + "learning_rate": 1.0942644934901059e-06, + "loss": 0.4351, + "step": 16406 + }, + { + "epoch": 2.56, + "grad_norm": 24.362020268177055, + "learning_rate": 1.0934975796709801e-06, + "loss": 0.4947, + "step": 16407 + }, + { + "epoch": 2.56, + "grad_norm": 24.814027515230823, + "learning_rate": 1.0927309191475722e-06, + "loss": 0.4882, + "step": 16408 + }, + { + "epoch": 2.56, + "grad_norm": 21.472303019451868, + "learning_rate": 1.0919645119416855e-06, + "loss": 0.4447, + "step": 16409 + }, + { + "epoch": 2.56, + "grad_norm": 32.39421401356696, + "learning_rate": 1.0911983580751195e-06, + "loss": 0.5532, + "step": 16410 + }, + { + "epoch": 2.56, + "grad_norm": 21.9577344643695, + "learning_rate": 1.090432457569659e-06, + "loss": 0.456, + "step": 16411 + }, + { + "epoch": 2.56, + "grad_norm": 22.53984790669875, + "learning_rate": 1.0896668104470886e-06, + "loss": 0.419, + "step": 16412 + }, + { + "epoch": 2.56, + "grad_norm": 22.15302915325032, + "learning_rate": 1.08890141672918e-06, + "loss": 0.4957, + "step": 16413 + }, + { + "epoch": 2.56, + "grad_norm": 16.084723896009816, + "learning_rate": 1.0881362764377046e-06, + "loss": 0.4884, + "step": 16414 + }, + { + "epoch": 2.56, + "grad_norm": 16.732870058300346, + "learning_rate": 1.087371389594417e-06, + "loss": 0.3784, + "step": 16415 + }, + { + "epoch": 2.56, + "grad_norm": 18.86533736246151, + "learning_rate": 1.0866067562210748e-06, + "loss": 0.4726, + "step": 16416 + }, + { + "epoch": 2.56, + "grad_norm": 22.70157432773767, + "learning_rate": 1.0858423763394243e-06, + "loss": 0.4598, + "step": 16417 + }, + { + "epoch": 2.56, + "grad_norm": 19.265653735508337, + "learning_rate": 1.085078249971201e-06, + "loss": 0.4704, + "step": 16418 + }, + { + "epoch": 2.56, + "grad_norm": 20.638974170667638, + "learning_rate": 1.084314377138136e-06, + "loss": 0.459, + "step": 16419 + }, + { + "epoch": 2.56, + "grad_norm": 27.043482959470058, + "learning_rate": 1.0835507578619542e-06, + "loss": 0.4217, + "step": 16420 + }, + { + "epoch": 2.56, + "grad_norm": 30.753017869132258, + "learning_rate": 1.0827873921643727e-06, + "loss": 0.4773, + "step": 16421 + }, + { + "epoch": 2.57, + "grad_norm": 20.176999774484784, + "learning_rate": 1.0820242800671032e-06, + "loss": 0.433, + "step": 16422 + }, + { + "epoch": 2.57, + "grad_norm": 17.42590269907103, + "learning_rate": 1.0812614215918472e-06, + "loss": 0.4405, + "step": 16423 + }, + { + "epoch": 2.57, + "grad_norm": 18.14324710765302, + "learning_rate": 1.080498816760296e-06, + "loss": 0.4595, + "step": 16424 + }, + { + "epoch": 2.57, + "grad_norm": 22.12809486888121, + "learning_rate": 1.0797364655941411e-06, + "loss": 0.4784, + "step": 16425 + }, + { + "epoch": 2.57, + "grad_norm": 19.407784876578418, + "learning_rate": 1.0789743681150656e-06, + "loss": 0.476, + "step": 16426 + }, + { + "epoch": 2.57, + "grad_norm": 16.517716857248157, + "learning_rate": 1.0782125243447395e-06, + "loss": 0.4441, + "step": 16427 + }, + { + "epoch": 2.57, + "grad_norm": 18.70820166669645, + "learning_rate": 1.077450934304829e-06, + "loss": 0.4642, + "step": 16428 + }, + { + "epoch": 2.57, + "grad_norm": 14.020234554545574, + "learning_rate": 1.0766895980169933e-06, + "loss": 0.3908, + "step": 16429 + }, + { + "epoch": 2.57, + "grad_norm": 29.052248154618663, + "learning_rate": 1.0759285155028887e-06, + "loss": 0.4746, + "step": 16430 + }, + { + "epoch": 2.57, + "grad_norm": 22.39138326472355, + "learning_rate": 1.0751676867841553e-06, + "loss": 0.4037, + "step": 16431 + }, + { + "epoch": 2.57, + "grad_norm": 26.086537281988726, + "learning_rate": 1.0744071118824306e-06, + "loss": 0.459, + "step": 16432 + }, + { + "epoch": 2.57, + "grad_norm": 14.999004045840632, + "learning_rate": 1.0736467908193471e-06, + "loss": 0.4116, + "step": 16433 + }, + { + "epoch": 2.57, + "grad_norm": 20.219300575071028, + "learning_rate": 1.0728867236165264e-06, + "loss": 0.5159, + "step": 16434 + }, + { + "epoch": 2.57, + "grad_norm": 25.054370111489764, + "learning_rate": 1.0721269102955866e-06, + "loss": 0.4311, + "step": 16435 + }, + { + "epoch": 2.57, + "grad_norm": 29.298196747019396, + "learning_rate": 1.0713673508781353e-06, + "loss": 0.4409, + "step": 16436 + }, + { + "epoch": 2.57, + "grad_norm": 30.486264198143818, + "learning_rate": 1.0706080453857714e-06, + "loss": 0.4789, + "step": 16437 + }, + { + "epoch": 2.57, + "grad_norm": 24.735902269193858, + "learning_rate": 1.0698489938400914e-06, + "loss": 0.5981, + "step": 16438 + }, + { + "epoch": 2.57, + "grad_norm": 21.34813898558111, + "learning_rate": 1.0690901962626843e-06, + "loss": 0.4232, + "step": 16439 + }, + { + "epoch": 2.57, + "grad_norm": 29.209659925968698, + "learning_rate": 1.0683316526751253e-06, + "loss": 0.4309, + "step": 16440 + }, + { + "epoch": 2.57, + "grad_norm": 24.62095876998977, + "learning_rate": 1.0675733630989904e-06, + "loss": 0.4082, + "step": 16441 + }, + { + "epoch": 2.57, + "grad_norm": 21.230446956732127, + "learning_rate": 1.0668153275558424e-06, + "loss": 0.4662, + "step": 16442 + }, + { + "epoch": 2.57, + "grad_norm": 18.593381128739416, + "learning_rate": 1.066057546067243e-06, + "loss": 0.383, + "step": 16443 + }, + { + "epoch": 2.57, + "grad_norm": 15.658941749451742, + "learning_rate": 1.0653000186547379e-06, + "loss": 0.3876, + "step": 16444 + }, + { + "epoch": 2.57, + "grad_norm": 16.735885907644683, + "learning_rate": 1.0645427453398748e-06, + "loss": 0.4485, + "step": 16445 + }, + { + "epoch": 2.57, + "grad_norm": 20.46667836858727, + "learning_rate": 1.0637857261441898e-06, + "loss": 0.4401, + "step": 16446 + }, + { + "epoch": 2.57, + "grad_norm": 13.609907039136365, + "learning_rate": 1.0630289610892097e-06, + "loss": 0.4144, + "step": 16447 + }, + { + "epoch": 2.57, + "grad_norm": 28.078244097058516, + "learning_rate": 1.06227245019646e-06, + "loss": 0.5035, + "step": 16448 + }, + { + "epoch": 2.57, + "grad_norm": 23.47437042310223, + "learning_rate": 1.061516193487452e-06, + "loss": 0.4938, + "step": 16449 + }, + { + "epoch": 2.57, + "grad_norm": 23.657574786604457, + "learning_rate": 1.060760190983694e-06, + "loss": 0.4026, + "step": 16450 + }, + { + "epoch": 2.57, + "grad_norm": 18.167211752626248, + "learning_rate": 1.060004442706688e-06, + "loss": 0.4985, + "step": 16451 + }, + { + "epoch": 2.57, + "grad_norm": 19.32445025377652, + "learning_rate": 1.0592489486779267e-06, + "loss": 0.5047, + "step": 16452 + }, + { + "epoch": 2.57, + "grad_norm": 19.524915949851813, + "learning_rate": 1.0584937089188941e-06, + "loss": 0.4068, + "step": 16453 + }, + { + "epoch": 2.57, + "grad_norm": 12.75082880311266, + "learning_rate": 1.0577387234510684e-06, + "loss": 0.4077, + "step": 16454 + }, + { + "epoch": 2.57, + "grad_norm": 21.761977327557, + "learning_rate": 1.0569839922959247e-06, + "loss": 0.4417, + "step": 16455 + }, + { + "epoch": 2.57, + "grad_norm": 26.623101283704802, + "learning_rate": 1.0562295154749248e-06, + "loss": 0.4138, + "step": 16456 + }, + { + "epoch": 2.57, + "grad_norm": 16.67506943946318, + "learning_rate": 1.0554752930095236e-06, + "loss": 0.4246, + "step": 16457 + }, + { + "epoch": 2.57, + "grad_norm": 16.054041908445058, + "learning_rate": 1.054721324921173e-06, + "loss": 0.4126, + "step": 16458 + }, + { + "epoch": 2.57, + "grad_norm": 21.155209200175637, + "learning_rate": 1.0539676112313147e-06, + "loss": 0.4115, + "step": 16459 + }, + { + "epoch": 2.57, + "grad_norm": 14.782498422136245, + "learning_rate": 1.053214151961386e-06, + "loss": 0.4144, + "step": 16460 + }, + { + "epoch": 2.57, + "grad_norm": 19.62627294384664, + "learning_rate": 1.052460947132814e-06, + "loss": 0.4104, + "step": 16461 + }, + { + "epoch": 2.57, + "grad_norm": 23.60887920028195, + "learning_rate": 1.0517079967670152e-06, + "loss": 0.433, + "step": 16462 + }, + { + "epoch": 2.57, + "grad_norm": 20.079174148297014, + "learning_rate": 1.0509553008854067e-06, + "loss": 0.4878, + "step": 16463 + }, + { + "epoch": 2.57, + "grad_norm": 16.125783898461613, + "learning_rate": 1.0502028595093972e-06, + "loss": 0.4447, + "step": 16464 + }, + { + "epoch": 2.57, + "grad_norm": 18.811373283223823, + "learning_rate": 1.0494506726603804e-06, + "loss": 0.4465, + "step": 16465 + }, + { + "epoch": 2.57, + "grad_norm": 23.78558384809646, + "learning_rate": 1.0486987403597526e-06, + "loss": 0.4971, + "step": 16466 + }, + { + "epoch": 2.57, + "grad_norm": 18.334436267807632, + "learning_rate": 1.0479470626288946e-06, + "loss": 0.4017, + "step": 16467 + }, + { + "epoch": 2.57, + "grad_norm": 23.196108079437995, + "learning_rate": 1.0471956394891868e-06, + "loss": 0.4238, + "step": 16468 + }, + { + "epoch": 2.57, + "grad_norm": 20.874548661612288, + "learning_rate": 1.0464444709619959e-06, + "loss": 0.432, + "step": 16469 + }, + { + "epoch": 2.57, + "grad_norm": 22.229850659234515, + "learning_rate": 1.0456935570686866e-06, + "loss": 0.4903, + "step": 16470 + }, + { + "epoch": 2.57, + "grad_norm": 23.426667251866668, + "learning_rate": 1.0449428978306164e-06, + "loss": 0.4599, + "step": 16471 + }, + { + "epoch": 2.57, + "grad_norm": 19.81085391456848, + "learning_rate": 1.0441924932691293e-06, + "loss": 0.4118, + "step": 16472 + }, + { + "epoch": 2.57, + "grad_norm": 21.729226681851035, + "learning_rate": 1.0434423434055719e-06, + "loss": 0.4039, + "step": 16473 + }, + { + "epoch": 2.57, + "grad_norm": 18.3170232443151, + "learning_rate": 1.042692448261272e-06, + "loss": 0.4412, + "step": 16474 + }, + { + "epoch": 2.57, + "grad_norm": 25.12976591168797, + "learning_rate": 1.041942807857559e-06, + "loss": 0.4329, + "step": 16475 + }, + { + "epoch": 2.57, + "grad_norm": 23.713110554898023, + "learning_rate": 1.0411934222157538e-06, + "loss": 0.4795, + "step": 16476 + }, + { + "epoch": 2.57, + "grad_norm": 25.977543597516572, + "learning_rate": 1.0404442913571678e-06, + "loss": 0.4456, + "step": 16477 + }, + { + "epoch": 2.57, + "grad_norm": 22.726915575093056, + "learning_rate": 1.0396954153031024e-06, + "loss": 0.4091, + "step": 16478 + }, + { + "epoch": 2.57, + "grad_norm": 14.9383974673595, + "learning_rate": 1.0389467940748576e-06, + "loss": 0.4055, + "step": 16479 + }, + { + "epoch": 2.57, + "grad_norm": 14.640588296215252, + "learning_rate": 1.0381984276937263e-06, + "loss": 0.4168, + "step": 16480 + }, + { + "epoch": 2.57, + "grad_norm": 25.45203231458933, + "learning_rate": 1.037450316180989e-06, + "loss": 0.4831, + "step": 16481 + }, + { + "epoch": 2.57, + "grad_norm": 16.595715689208223, + "learning_rate": 1.03670245955792e-06, + "loss": 0.4195, + "step": 16482 + }, + { + "epoch": 2.57, + "grad_norm": 19.879232477025074, + "learning_rate": 1.03595485784579e-06, + "loss": 0.4238, + "step": 16483 + }, + { + "epoch": 2.57, + "grad_norm": 17.84826947146234, + "learning_rate": 1.0352075110658588e-06, + "loss": 0.4502, + "step": 16484 + }, + { + "epoch": 2.57, + "grad_norm": 21.93688077949329, + "learning_rate": 1.034460419239387e-06, + "loss": 0.4514, + "step": 16485 + }, + { + "epoch": 2.58, + "grad_norm": 25.199733733303642, + "learning_rate": 1.033713582387611e-06, + "loss": 0.4522, + "step": 16486 + }, + { + "epoch": 2.58, + "grad_norm": 15.935862619774106, + "learning_rate": 1.032967000531777e-06, + "loss": 0.372, + "step": 16487 + }, + { + "epoch": 2.58, + "grad_norm": 14.415324910507577, + "learning_rate": 1.0322206736931152e-06, + "loss": 0.404, + "step": 16488 + }, + { + "epoch": 2.58, + "grad_norm": 14.555851951187535, + "learning_rate": 1.0314746018928535e-06, + "loss": 0.3731, + "step": 16489 + }, + { + "epoch": 2.58, + "grad_norm": 19.2027583405399, + "learning_rate": 1.0307287851522074e-06, + "loss": 0.4799, + "step": 16490 + }, + { + "epoch": 2.58, + "grad_norm": 32.87259338276433, + "learning_rate": 1.0299832234923857e-06, + "loss": 0.485, + "step": 16491 + }, + { + "epoch": 2.58, + "grad_norm": 23.95576761204422, + "learning_rate": 1.0292379169345945e-06, + "loss": 0.495, + "step": 16492 + }, + { + "epoch": 2.58, + "grad_norm": 24.967863538615823, + "learning_rate": 1.0284928655000303e-06, + "loss": 0.4787, + "step": 16493 + }, + { + "epoch": 2.58, + "grad_norm": 21.18830731282949, + "learning_rate": 1.0277480692098796e-06, + "loss": 0.4514, + "step": 16494 + }, + { + "epoch": 2.58, + "grad_norm": 28.02049827103553, + "learning_rate": 1.0270035280853275e-06, + "loss": 0.4805, + "step": 16495 + }, + { + "epoch": 2.58, + "grad_norm": 17.312320815030127, + "learning_rate": 1.0262592421475436e-06, + "loss": 0.4213, + "step": 16496 + }, + { + "epoch": 2.58, + "grad_norm": 16.471464950329693, + "learning_rate": 1.025515211417697e-06, + "loss": 0.3936, + "step": 16497 + }, + { + "epoch": 2.58, + "grad_norm": 22.155199736894907, + "learning_rate": 1.0247714359169502e-06, + "loss": 0.4295, + "step": 16498 + }, + { + "epoch": 2.58, + "grad_norm": 25.742556656432157, + "learning_rate": 1.0240279156664512e-06, + "loss": 0.4644, + "step": 16499 + }, + { + "epoch": 2.58, + "grad_norm": 29.23617973343494, + "learning_rate": 1.0232846506873495e-06, + "loss": 0.3922, + "step": 16500 + }, + { + "epoch": 2.58, + "grad_norm": 16.179230597439556, + "learning_rate": 1.0225416410007794e-06, + "loss": 0.4816, + "step": 16501 + }, + { + "epoch": 2.58, + "grad_norm": 16.564784694933927, + "learning_rate": 1.021798886627875e-06, + "loss": 0.4516, + "step": 16502 + }, + { + "epoch": 2.58, + "grad_norm": 15.216237414953463, + "learning_rate": 1.0210563875897561e-06, + "loss": 0.386, + "step": 16503 + }, + { + "epoch": 2.58, + "grad_norm": 15.690356470878475, + "learning_rate": 1.0203141439075415e-06, + "loss": 0.4463, + "step": 16504 + }, + { + "epoch": 2.58, + "grad_norm": 26.02801413877146, + "learning_rate": 1.0195721556023409e-06, + "loss": 0.4337, + "step": 16505 + }, + { + "epoch": 2.58, + "grad_norm": 30.748108313299866, + "learning_rate": 1.0188304226952562e-06, + "loss": 0.4857, + "step": 16506 + }, + { + "epoch": 2.58, + "grad_norm": 23.634515379408477, + "learning_rate": 1.0180889452073772e-06, + "loss": 0.4818, + "step": 16507 + }, + { + "epoch": 2.58, + "grad_norm": 14.562205802827227, + "learning_rate": 1.017347723159795e-06, + "loss": 0.3984, + "step": 16508 + }, + { + "epoch": 2.58, + "grad_norm": 22.995305402438024, + "learning_rate": 1.0166067565735881e-06, + "loss": 0.3964, + "step": 16509 + }, + { + "epoch": 2.58, + "grad_norm": 20.219990642106335, + "learning_rate": 1.015866045469832e-06, + "loss": 0.4352, + "step": 16510 + }, + { + "epoch": 2.58, + "grad_norm": 23.647476794914745, + "learning_rate": 1.0151255898695911e-06, + "loss": 0.4233, + "step": 16511 + }, + { + "epoch": 2.58, + "grad_norm": 30.48425150796893, + "learning_rate": 1.0143853897939193e-06, + "loss": 0.4566, + "step": 16512 + }, + { + "epoch": 2.58, + "grad_norm": 23.278968117401885, + "learning_rate": 1.013645445263871e-06, + "loss": 0.5062, + "step": 16513 + }, + { + "epoch": 2.58, + "grad_norm": 23.820874956646325, + "learning_rate": 1.012905756300492e-06, + "loss": 0.4455, + "step": 16514 + }, + { + "epoch": 2.58, + "grad_norm": 23.608701171827537, + "learning_rate": 1.0121663229248145e-06, + "loss": 0.4525, + "step": 16515 + }, + { + "epoch": 2.58, + "grad_norm": 24.581410107235467, + "learning_rate": 1.0114271451578684e-06, + "loss": 0.3984, + "step": 16516 + }, + { + "epoch": 2.58, + "grad_norm": 31.58511180775014, + "learning_rate": 1.0106882230206749e-06, + "loss": 0.5215, + "step": 16517 + }, + { + "epoch": 2.58, + "grad_norm": 33.65416585690738, + "learning_rate": 1.0099495565342532e-06, + "loss": 0.4343, + "step": 16518 + }, + { + "epoch": 2.58, + "grad_norm": 23.51411665074277, + "learning_rate": 1.0092111457196041e-06, + "loss": 0.4125, + "step": 16519 + }, + { + "epoch": 2.58, + "grad_norm": 17.70205503767291, + "learning_rate": 1.0084729905977332e-06, + "loss": 0.4557, + "step": 16520 + }, + { + "epoch": 2.58, + "grad_norm": 17.814874729169205, + "learning_rate": 1.0077350911896278e-06, + "loss": 0.4395, + "step": 16521 + }, + { + "epoch": 2.58, + "grad_norm": 25.97192915817965, + "learning_rate": 1.006997447516276e-06, + "loss": 0.4404, + "step": 16522 + }, + { + "epoch": 2.58, + "grad_norm": 20.230180421208296, + "learning_rate": 1.0062600595986582e-06, + "loss": 0.4285, + "step": 16523 + }, + { + "epoch": 2.58, + "grad_norm": 23.677279918512994, + "learning_rate": 1.0055229274577417e-06, + "loss": 0.4765, + "step": 16524 + }, + { + "epoch": 2.58, + "grad_norm": 21.578095830390765, + "learning_rate": 1.0047860511144937e-06, + "loss": 0.4114, + "step": 16525 + }, + { + "epoch": 2.58, + "grad_norm": 17.861821633638986, + "learning_rate": 1.004049430589865e-06, + "loss": 0.507, + "step": 16526 + }, + { + "epoch": 2.58, + "grad_norm": 18.770514644063674, + "learning_rate": 1.0033130659048119e-06, + "loss": 0.3952, + "step": 16527 + }, + { + "epoch": 2.58, + "grad_norm": 13.791023707120715, + "learning_rate": 1.002576957080269e-06, + "loss": 0.4247, + "step": 16528 + }, + { + "epoch": 2.58, + "grad_norm": 21.604076591004727, + "learning_rate": 1.0018411041371756e-06, + "loss": 0.4734, + "step": 16529 + }, + { + "epoch": 2.58, + "grad_norm": 28.33773838349109, + "learning_rate": 1.001105507096457e-06, + "loss": 0.4376, + "step": 16530 + }, + { + "epoch": 2.58, + "grad_norm": 20.510467834047187, + "learning_rate": 1.0003701659790344e-06, + "loss": 0.4145, + "step": 16531 + }, + { + "epoch": 2.58, + "grad_norm": 22.457611751966294, + "learning_rate": 9.996350808058175e-07, + "loss": 0.3987, + "step": 16532 + }, + { + "epoch": 2.58, + "grad_norm": 28.852800145939398, + "learning_rate": 9.989002515977154e-07, + "loss": 0.4239, + "step": 16533 + }, + { + "epoch": 2.58, + "grad_norm": 16.30892513875, + "learning_rate": 9.981656783756255e-07, + "loss": 0.4467, + "step": 16534 + }, + { + "epoch": 2.58, + "grad_norm": 16.044757461402007, + "learning_rate": 9.974313611604358e-07, + "loss": 0.4423, + "step": 16535 + }, + { + "epoch": 2.58, + "grad_norm": 18.533637408461708, + "learning_rate": 9.96697299973034e-07, + "loss": 0.4163, + "step": 16536 + }, + { + "epoch": 2.58, + "grad_norm": 20.661578496027914, + "learning_rate": 9.959634948342923e-07, + "loss": 0.4391, + "step": 16537 + }, + { + "epoch": 2.58, + "grad_norm": 18.70728507022078, + "learning_rate": 9.952299457650805e-07, + "loss": 0.4423, + "step": 16538 + }, + { + "epoch": 2.58, + "grad_norm": 18.358198417946898, + "learning_rate": 9.944966527862644e-07, + "loss": 0.432, + "step": 16539 + }, + { + "epoch": 2.58, + "grad_norm": 18.589319925929807, + "learning_rate": 9.93763615918696e-07, + "loss": 0.4515, + "step": 16540 + }, + { + "epoch": 2.58, + "grad_norm": 21.80563146201027, + "learning_rate": 9.930308351832185e-07, + "loss": 0.4338, + "step": 16541 + }, + { + "epoch": 2.58, + "grad_norm": 20.925555980125676, + "learning_rate": 9.922983106006766e-07, + "loss": 0.4576, + "step": 16542 + }, + { + "epoch": 2.58, + "grad_norm": 17.65480177265285, + "learning_rate": 9.915660421919027e-07, + "loss": 0.4294, + "step": 16543 + }, + { + "epoch": 2.58, + "grad_norm": 20.348788224983505, + "learning_rate": 9.908340299777208e-07, + "loss": 0.4625, + "step": 16544 + }, + { + "epoch": 2.58, + "grad_norm": 21.468527006326607, + "learning_rate": 9.901022739789468e-07, + "loss": 0.4469, + "step": 16545 + }, + { + "epoch": 2.58, + "grad_norm": 24.437181231061157, + "learning_rate": 9.893707742163926e-07, + "loss": 0.4391, + "step": 16546 + }, + { + "epoch": 2.58, + "grad_norm": 19.26344859691854, + "learning_rate": 9.886395307108643e-07, + "loss": 0.3868, + "step": 16547 + }, + { + "epoch": 2.58, + "grad_norm": 21.340548640366055, + "learning_rate": 9.87908543483157e-07, + "loss": 0.425, + "step": 16548 + }, + { + "epoch": 2.58, + "grad_norm": 23.25846971871695, + "learning_rate": 9.871778125540587e-07, + "loss": 0.4199, + "step": 16549 + }, + { + "epoch": 2.59, + "grad_norm": 15.34363216250013, + "learning_rate": 9.864473379443495e-07, + "loss": 0.4126, + "step": 16550 + }, + { + "epoch": 2.59, + "grad_norm": 22.553022229353086, + "learning_rate": 9.85717119674806e-07, + "loss": 0.4906, + "step": 16551 + }, + { + "epoch": 2.59, + "grad_norm": 28.276992042104844, + "learning_rate": 9.84987157766195e-07, + "loss": 0.4179, + "step": 16552 + }, + { + "epoch": 2.59, + "grad_norm": 23.807934996806832, + "learning_rate": 9.842574522392744e-07, + "loss": 0.4719, + "step": 16553 + }, + { + "epoch": 2.59, + "grad_norm": 15.923307514886028, + "learning_rate": 9.835280031147999e-07, + "loss": 0.3939, + "step": 16554 + }, + { + "epoch": 2.59, + "grad_norm": 17.91793874800877, + "learning_rate": 9.827988104135122e-07, + "loss": 0.4043, + "step": 16555 + }, + { + "epoch": 2.59, + "grad_norm": 18.212522157107077, + "learning_rate": 9.82069874156154e-07, + "loss": 0.4205, + "step": 16556 + }, + { + "epoch": 2.59, + "grad_norm": 29.75233433735346, + "learning_rate": 9.81341194363451e-07, + "loss": 0.4767, + "step": 16557 + }, + { + "epoch": 2.59, + "grad_norm": 24.046657248955565, + "learning_rate": 9.806127710561274e-07, + "loss": 0.459, + "step": 16558 + }, + { + "epoch": 2.59, + "grad_norm": 22.186223392967094, + "learning_rate": 9.798846042549037e-07, + "loss": 0.419, + "step": 16559 + }, + { + "epoch": 2.59, + "grad_norm": 19.39623698390906, + "learning_rate": 9.79156693980483e-07, + "loss": 0.4234, + "step": 16560 + }, + { + "epoch": 2.59, + "grad_norm": 24.044912794215403, + "learning_rate": 9.784290402535713e-07, + "loss": 0.4487, + "step": 16561 + }, + { + "epoch": 2.59, + "grad_norm": 19.668414027335913, + "learning_rate": 9.777016430948572e-07, + "loss": 0.4626, + "step": 16562 + }, + { + "epoch": 2.59, + "grad_norm": 29.21101457331106, + "learning_rate": 9.76974502525031e-07, + "loss": 0.4468, + "step": 16563 + }, + { + "epoch": 2.59, + "grad_norm": 16.63610310433114, + "learning_rate": 9.76247618564774e-07, + "loss": 0.4276, + "step": 16564 + }, + { + "epoch": 2.59, + "grad_norm": 16.28588605642223, + "learning_rate": 9.755209912347552e-07, + "loss": 0.4021, + "step": 16565 + }, + { + "epoch": 2.59, + "grad_norm": 21.22000810491783, + "learning_rate": 9.747946205556391e-07, + "loss": 0.4279, + "step": 16566 + }, + { + "epoch": 2.59, + "grad_norm": 38.557370057164384, + "learning_rate": 9.740685065480837e-07, + "loss": 0.4808, + "step": 16567 + }, + { + "epoch": 2.59, + "grad_norm": 18.66066391805108, + "learning_rate": 9.733426492327425e-07, + "loss": 0.4066, + "step": 16568 + }, + { + "epoch": 2.59, + "grad_norm": 17.276100867352753, + "learning_rate": 9.726170486302554e-07, + "loss": 0.4234, + "step": 16569 + }, + { + "epoch": 2.59, + "grad_norm": 22.07153241997382, + "learning_rate": 9.718917047612575e-07, + "loss": 0.4925, + "step": 16570 + }, + { + "epoch": 2.59, + "grad_norm": 21.499364029133467, + "learning_rate": 9.711666176463775e-07, + "loss": 0.4306, + "step": 16571 + }, + { + "epoch": 2.59, + "grad_norm": 30.003989369660047, + "learning_rate": 9.704417873062366e-07, + "loss": 0.4829, + "step": 16572 + }, + { + "epoch": 2.59, + "grad_norm": 17.47555246960147, + "learning_rate": 9.697172137614518e-07, + "loss": 0.4287, + "step": 16573 + }, + { + "epoch": 2.59, + "grad_norm": 24.756652362469854, + "learning_rate": 9.689928970326268e-07, + "loss": 0.4669, + "step": 16574 + }, + { + "epoch": 2.59, + "grad_norm": 14.175715192197819, + "learning_rate": 9.682688371403593e-07, + "loss": 0.4344, + "step": 16575 + }, + { + "epoch": 2.59, + "grad_norm": 19.885279892447244, + "learning_rate": 9.675450341052427e-07, + "loss": 0.4255, + "step": 16576 + }, + { + "epoch": 2.59, + "grad_norm": 23.888077227686253, + "learning_rate": 9.668214879478622e-07, + "loss": 0.5191, + "step": 16577 + }, + { + "epoch": 2.59, + "grad_norm": 29.036166649908623, + "learning_rate": 9.66098198688795e-07, + "loss": 0.3896, + "step": 16578 + }, + { + "epoch": 2.59, + "grad_norm": 25.648444942899722, + "learning_rate": 9.65375166348609e-07, + "loss": 0.4077, + "step": 16579 + }, + { + "epoch": 2.59, + "grad_norm": 32.028053514972335, + "learning_rate": 9.646523909478677e-07, + "loss": 0.4307, + "step": 16580 + }, + { + "epoch": 2.59, + "grad_norm": 21.693729862502174, + "learning_rate": 9.63929872507129e-07, + "loss": 0.4196, + "step": 16581 + }, + { + "epoch": 2.59, + "grad_norm": 24.124346025032906, + "learning_rate": 9.632076110469368e-07, + "loss": 0.4165, + "step": 16582 + }, + { + "epoch": 2.59, + "grad_norm": 18.81676246507038, + "learning_rate": 9.624856065878351e-07, + "loss": 0.4586, + "step": 16583 + }, + { + "epoch": 2.59, + "grad_norm": 28.342226783993983, + "learning_rate": 9.617638591503542e-07, + "loss": 0.4957, + "step": 16584 + }, + { + "epoch": 2.59, + "grad_norm": 18.93790422533783, + "learning_rate": 9.61042368755023e-07, + "loss": 0.4046, + "step": 16585 + }, + { + "epoch": 2.59, + "grad_norm": 18.6169104061244, + "learning_rate": 9.603211354223597e-07, + "loss": 0.385, + "step": 16586 + }, + { + "epoch": 2.59, + "grad_norm": 21.741579402639957, + "learning_rate": 9.596001591728743e-07, + "loss": 0.4956, + "step": 16587 + }, + { + "epoch": 2.59, + "grad_norm": 30.306177006793096, + "learning_rate": 9.58879440027074e-07, + "loss": 0.4563, + "step": 16588 + }, + { + "epoch": 2.59, + "grad_norm": 20.765973896916027, + "learning_rate": 9.5815897800545e-07, + "loss": 0.3972, + "step": 16589 + }, + { + "epoch": 2.59, + "grad_norm": 23.11424219462224, + "learning_rate": 9.574387731284984e-07, + "loss": 0.4215, + "step": 16590 + }, + { + "epoch": 2.59, + "grad_norm": 19.610770729820203, + "learning_rate": 9.56718825416697e-07, + "loss": 0.3933, + "step": 16591 + }, + { + "epoch": 2.59, + "grad_norm": 33.816859833278606, + "learning_rate": 9.559991348905218e-07, + "loss": 0.468, + "step": 16592 + }, + { + "epoch": 2.59, + "grad_norm": 20.852515207095763, + "learning_rate": 9.55279701570442e-07, + "loss": 0.4243, + "step": 16593 + }, + { + "epoch": 2.59, + "grad_norm": 29.141677976509175, + "learning_rate": 9.545605254769164e-07, + "loss": 0.526, + "step": 16594 + }, + { + "epoch": 2.59, + "grad_norm": 18.216068097417466, + "learning_rate": 9.538416066303956e-07, + "loss": 0.4218, + "step": 16595 + }, + { + "epoch": 2.59, + "grad_norm": 18.26951078096083, + "learning_rate": 9.531229450513291e-07, + "loss": 0.4137, + "step": 16596 + }, + { + "epoch": 2.59, + "grad_norm": 19.919263688774954, + "learning_rate": 9.524045407601534e-07, + "loss": 0.4682, + "step": 16597 + }, + { + "epoch": 2.59, + "grad_norm": 27.113522541581116, + "learning_rate": 9.516863937773013e-07, + "loss": 0.4943, + "step": 16598 + }, + { + "epoch": 2.59, + "grad_norm": 25.228212863150578, + "learning_rate": 9.509685041231953e-07, + "loss": 0.4431, + "step": 16599 + }, + { + "epoch": 2.59, + "grad_norm": 15.440371437438177, + "learning_rate": 9.502508718182491e-07, + "loss": 0.4354, + "step": 16600 + }, + { + "epoch": 2.59, + "grad_norm": 27.165188017538007, + "learning_rate": 9.49533496882874e-07, + "loss": 0.5733, + "step": 16601 + }, + { + "epoch": 2.59, + "grad_norm": 26.408273239894697, + "learning_rate": 9.488163793374749e-07, + "loss": 0.4491, + "step": 16602 + }, + { + "epoch": 2.59, + "grad_norm": 18.26673032118206, + "learning_rate": 9.480995192024423e-07, + "loss": 0.4764, + "step": 16603 + }, + { + "epoch": 2.59, + "grad_norm": 17.712476225159275, + "learning_rate": 9.473829164981629e-07, + "loss": 0.4284, + "step": 16604 + }, + { + "epoch": 2.59, + "grad_norm": 15.149277999189202, + "learning_rate": 9.466665712450174e-07, + "loss": 0.3953, + "step": 16605 + }, + { + "epoch": 2.59, + "grad_norm": 17.17356222773973, + "learning_rate": 9.459504834633804e-07, + "loss": 0.3883, + "step": 16606 + }, + { + "epoch": 2.59, + "grad_norm": 43.14834145489166, + "learning_rate": 9.452346531736134e-07, + "loss": 0.4145, + "step": 16607 + }, + { + "epoch": 2.59, + "grad_norm": 23.048702268416047, + "learning_rate": 9.44519080396078e-07, + "loss": 0.4732, + "step": 16608 + }, + { + "epoch": 2.59, + "grad_norm": 20.85824593299342, + "learning_rate": 9.438037651511201e-07, + "loss": 0.4093, + "step": 16609 + }, + { + "epoch": 2.59, + "grad_norm": 25.90883123655177, + "learning_rate": 9.430887074590855e-07, + "loss": 0.4416, + "step": 16610 + }, + { + "epoch": 2.59, + "grad_norm": 19.54133176457306, + "learning_rate": 9.423739073403126e-07, + "loss": 0.4391, + "step": 16611 + }, + { + "epoch": 2.59, + "grad_norm": 19.732579868080226, + "learning_rate": 9.416593648151251e-07, + "loss": 0.4338, + "step": 16612 + }, + { + "epoch": 2.59, + "grad_norm": 15.647606183870503, + "learning_rate": 9.409450799038478e-07, + "loss": 0.3797, + "step": 16613 + }, + { + "epoch": 2.6, + "grad_norm": 30.206813752770014, + "learning_rate": 9.40231052626791e-07, + "loss": 0.4625, + "step": 16614 + }, + { + "epoch": 2.6, + "grad_norm": 32.31348099962588, + "learning_rate": 9.395172830042653e-07, + "loss": 0.5482, + "step": 16615 + }, + { + "epoch": 2.6, + "grad_norm": 21.6672429337497, + "learning_rate": 9.388037710565667e-07, + "loss": 0.5197, + "step": 16616 + }, + { + "epoch": 2.6, + "grad_norm": 22.326256056476627, + "learning_rate": 9.380905168039878e-07, + "loss": 0.4341, + "step": 16617 + }, + { + "epoch": 2.6, + "grad_norm": 23.368618083106085, + "learning_rate": 9.373775202668156e-07, + "loss": 0.4431, + "step": 16618 + }, + { + "epoch": 2.6, + "grad_norm": 30.806549561635077, + "learning_rate": 9.36664781465324e-07, + "loss": 0.6125, + "step": 16619 + }, + { + "epoch": 2.6, + "grad_norm": 21.9371326523099, + "learning_rate": 9.359523004197835e-07, + "loss": 0.414, + "step": 16620 + }, + { + "epoch": 2.6, + "grad_norm": 15.144667803148963, + "learning_rate": 9.352400771504566e-07, + "loss": 0.3924, + "step": 16621 + }, + { + "epoch": 2.6, + "grad_norm": 20.327806282623662, + "learning_rate": 9.345281116775995e-07, + "loss": 0.4246, + "step": 16622 + }, + { + "epoch": 2.6, + "grad_norm": 18.115795528223863, + "learning_rate": 9.338164040214614e-07, + "loss": 0.4323, + "step": 16623 + }, + { + "epoch": 2.6, + "grad_norm": 22.947981052218893, + "learning_rate": 9.331049542022818e-07, + "loss": 0.4235, + "step": 16624 + }, + { + "epoch": 2.6, + "grad_norm": 18.063613919127054, + "learning_rate": 9.323937622402912e-07, + "loss": 0.4602, + "step": 16625 + }, + { + "epoch": 2.6, + "grad_norm": 28.645485867676015, + "learning_rate": 9.316828281557178e-07, + "loss": 0.4192, + "step": 16626 + }, + { + "epoch": 2.6, + "grad_norm": 18.99592090255853, + "learning_rate": 9.309721519687809e-07, + "loss": 0.4456, + "step": 16627 + }, + { + "epoch": 2.6, + "grad_norm": 18.749788272377494, + "learning_rate": 9.30261733699691e-07, + "loss": 0.4229, + "step": 16628 + }, + { + "epoch": 2.6, + "grad_norm": 14.854005343297715, + "learning_rate": 9.295515733686511e-07, + "loss": 0.456, + "step": 16629 + }, + { + "epoch": 2.6, + "grad_norm": 20.565194515023208, + "learning_rate": 9.28841670995857e-07, + "loss": 0.451, + "step": 16630 + }, + { + "epoch": 2.6, + "grad_norm": 21.278853611314297, + "learning_rate": 9.281320266015015e-07, + "loss": 0.4267, + "step": 16631 + }, + { + "epoch": 2.6, + "grad_norm": 26.674816431988763, + "learning_rate": 9.274226402057651e-07, + "loss": 0.4463, + "step": 16632 + }, + { + "epoch": 2.6, + "grad_norm": 18.554797704469767, + "learning_rate": 9.267135118288184e-07, + "loss": 0.3773, + "step": 16633 + }, + { + "epoch": 2.6, + "grad_norm": 16.98594642991937, + "learning_rate": 9.26004641490833e-07, + "loss": 0.4135, + "step": 16634 + }, + { + "epoch": 2.6, + "grad_norm": 16.150973173064106, + "learning_rate": 9.252960292119661e-07, + "loss": 0.4367, + "step": 16635 + }, + { + "epoch": 2.6, + "grad_norm": 25.336592091477296, + "learning_rate": 9.24587675012375e-07, + "loss": 0.4626, + "step": 16636 + }, + { + "epoch": 2.6, + "grad_norm": 29.170722339989087, + "learning_rate": 9.238795789122002e-07, + "loss": 0.4563, + "step": 16637 + }, + { + "epoch": 2.6, + "grad_norm": 20.588816872444657, + "learning_rate": 9.231717409315788e-07, + "loss": 0.4561, + "step": 16638 + }, + { + "epoch": 2.6, + "grad_norm": 46.91705659845594, + "learning_rate": 9.224641610906437e-07, + "loss": 0.4929, + "step": 16639 + }, + { + "epoch": 2.6, + "grad_norm": 19.008658256639237, + "learning_rate": 9.2175683940952e-07, + "loss": 0.4021, + "step": 16640 + }, + { + "epoch": 2.6, + "grad_norm": 25.727204710112144, + "learning_rate": 9.210497759083193e-07, + "loss": 0.4594, + "step": 16641 + }, + { + "epoch": 2.6, + "grad_norm": 35.764165740201804, + "learning_rate": 9.203429706071531e-07, + "loss": 0.5151, + "step": 16642 + }, + { + "epoch": 2.6, + "grad_norm": 14.089536312398872, + "learning_rate": 9.196364235261202e-07, + "loss": 0.4105, + "step": 16643 + }, + { + "epoch": 2.6, + "grad_norm": 17.135423963964545, + "learning_rate": 9.189301346853163e-07, + "loss": 0.4699, + "step": 16644 + }, + { + "epoch": 2.6, + "grad_norm": 20.064086434663675, + "learning_rate": 9.182241041048267e-07, + "loss": 0.5064, + "step": 16645 + }, + { + "epoch": 2.6, + "grad_norm": 19.879351289909327, + "learning_rate": 9.175183318047298e-07, + "loss": 0.3794, + "step": 16646 + }, + { + "epoch": 2.6, + "grad_norm": 21.104982016410478, + "learning_rate": 9.168128178051005e-07, + "loss": 0.4872, + "step": 16647 + }, + { + "epoch": 2.6, + "grad_norm": 20.443144078723233, + "learning_rate": 9.161075621259997e-07, + "loss": 0.434, + "step": 16648 + }, + { + "epoch": 2.6, + "grad_norm": 25.691266606296317, + "learning_rate": 9.154025647874875e-07, + "loss": 0.4629, + "step": 16649 + }, + { + "epoch": 2.6, + "grad_norm": 21.55391299331185, + "learning_rate": 9.146978258096107e-07, + "loss": 0.4259, + "step": 16650 + }, + { + "epoch": 2.6, + "grad_norm": 27.076821679404723, + "learning_rate": 9.139933452124117e-07, + "loss": 0.4911, + "step": 16651 + }, + { + "epoch": 2.6, + "grad_norm": 21.259026280708042, + "learning_rate": 9.132891230159302e-07, + "loss": 0.4465, + "step": 16652 + }, + { + "epoch": 2.6, + "grad_norm": 18.937368933457027, + "learning_rate": 9.125851592401891e-07, + "loss": 0.4729, + "step": 16653 + }, + { + "epoch": 2.6, + "grad_norm": 14.395639017717112, + "learning_rate": 9.11881453905209e-07, + "loss": 0.4109, + "step": 16654 + }, + { + "epoch": 2.6, + "grad_norm": 17.980382389952663, + "learning_rate": 9.111780070310038e-07, + "loss": 0.3972, + "step": 16655 + }, + { + "epoch": 2.6, + "grad_norm": 22.66773722962771, + "learning_rate": 9.104748186375811e-07, + "loss": 0.4461, + "step": 16656 + }, + { + "epoch": 2.6, + "grad_norm": 20.951502970467896, + "learning_rate": 9.097718887449381e-07, + "loss": 0.5012, + "step": 16657 + }, + { + "epoch": 2.6, + "grad_norm": 23.418857307534378, + "learning_rate": 9.090692173730619e-07, + "loss": 0.455, + "step": 16658 + }, + { + "epoch": 2.6, + "grad_norm": 24.002348904105794, + "learning_rate": 9.083668045419391e-07, + "loss": 0.4388, + "step": 16659 + }, + { + "epoch": 2.6, + "grad_norm": 24.089457186991297, + "learning_rate": 9.076646502715469e-07, + "loss": 0.4302, + "step": 16660 + }, + { + "epoch": 2.6, + "grad_norm": 21.07289727410144, + "learning_rate": 9.069627545818549e-07, + "loss": 0.4516, + "step": 16661 + }, + { + "epoch": 2.6, + "grad_norm": 26.221881132746827, + "learning_rate": 9.062611174928226e-07, + "loss": 0.4544, + "step": 16662 + }, + { + "epoch": 2.6, + "grad_norm": 22.094976478807144, + "learning_rate": 9.05559739024402e-07, + "loss": 0.3801, + "step": 16663 + }, + { + "epoch": 2.6, + "grad_norm": 25.400617592166768, + "learning_rate": 9.048586191965437e-07, + "loss": 0.4404, + "step": 16664 + }, + { + "epoch": 2.6, + "grad_norm": 20.691117029543463, + "learning_rate": 9.04157758029186e-07, + "loss": 0.4639, + "step": 16665 + }, + { + "epoch": 2.6, + "grad_norm": 27.536180260436744, + "learning_rate": 9.0345715554226e-07, + "loss": 0.4018, + "step": 16666 + }, + { + "epoch": 2.6, + "grad_norm": 22.50435428209808, + "learning_rate": 9.027568117556928e-07, + "loss": 0.4154, + "step": 16667 + }, + { + "epoch": 2.6, + "grad_norm": 24.243344416720603, + "learning_rate": 9.020567266893976e-07, + "loss": 0.485, + "step": 16668 + }, + { + "epoch": 2.6, + "grad_norm": 18.560779535954246, + "learning_rate": 9.013569003632894e-07, + "loss": 0.4223, + "step": 16669 + }, + { + "epoch": 2.6, + "grad_norm": 20.521239161875705, + "learning_rate": 9.006573327972667e-07, + "loss": 0.4801, + "step": 16670 + }, + { + "epoch": 2.6, + "grad_norm": 18.434193194990357, + "learning_rate": 8.99958024011226e-07, + "loss": 0.4201, + "step": 16671 + }, + { + "epoch": 2.6, + "grad_norm": 23.114078630467418, + "learning_rate": 8.992589740250579e-07, + "loss": 0.4696, + "step": 16672 + }, + { + "epoch": 2.6, + "grad_norm": 23.288510534533547, + "learning_rate": 8.985601828586399e-07, + "loss": 0.4432, + "step": 16673 + }, + { + "epoch": 2.6, + "grad_norm": 21.627993637626343, + "learning_rate": 8.978616505318461e-07, + "loss": 0.4122, + "step": 16674 + }, + { + "epoch": 2.6, + "grad_norm": 15.652127299895884, + "learning_rate": 8.971633770645416e-07, + "loss": 0.3973, + "step": 16675 + }, + { + "epoch": 2.6, + "grad_norm": 32.2956595079074, + "learning_rate": 8.964653624765874e-07, + "loss": 0.5038, + "step": 16676 + }, + { + "epoch": 2.6, + "grad_norm": 29.932339219471015, + "learning_rate": 8.957676067878307e-07, + "loss": 0.5604, + "step": 16677 + }, + { + "epoch": 2.61, + "grad_norm": 20.154867314072508, + "learning_rate": 8.950701100181203e-07, + "loss": 0.4561, + "step": 16678 + }, + { + "epoch": 2.61, + "grad_norm": 21.638338017033707, + "learning_rate": 8.943728721872868e-07, + "loss": 0.4534, + "step": 16679 + }, + { + "epoch": 2.61, + "grad_norm": 20.70356290678026, + "learning_rate": 8.936758933151623e-07, + "loss": 0.4547, + "step": 16680 + }, + { + "epoch": 2.61, + "grad_norm": 15.97695713721677, + "learning_rate": 8.92979173421571e-07, + "loss": 0.3834, + "step": 16681 + }, + { + "epoch": 2.61, + "grad_norm": 23.435849945476544, + "learning_rate": 8.922827125263234e-07, + "loss": 0.3932, + "step": 16682 + }, + { + "epoch": 2.61, + "grad_norm": 27.428948725196182, + "learning_rate": 8.91586510649226e-07, + "loss": 0.473, + "step": 16683 + }, + { + "epoch": 2.61, + "grad_norm": 23.52376543878768, + "learning_rate": 8.90890567810081e-07, + "loss": 0.5012, + "step": 16684 + }, + { + "epoch": 2.61, + "grad_norm": 27.61751614527499, + "learning_rate": 8.901948840286789e-07, + "loss": 0.3716, + "step": 16685 + }, + { + "epoch": 2.61, + "grad_norm": 19.86593807010374, + "learning_rate": 8.894994593248063e-07, + "loss": 0.3881, + "step": 16686 + }, + { + "epoch": 2.61, + "grad_norm": 14.410364220851308, + "learning_rate": 8.888042937182406e-07, + "loss": 0.4258, + "step": 16687 + }, + { + "epoch": 2.61, + "grad_norm": 19.115935050366904, + "learning_rate": 8.881093872287483e-07, + "loss": 0.4096, + "step": 16688 + }, + { + "epoch": 2.61, + "grad_norm": 20.652318385530542, + "learning_rate": 8.874147398760957e-07, + "loss": 0.4372, + "step": 16689 + }, + { + "epoch": 2.61, + "grad_norm": 30.22928360546812, + "learning_rate": 8.867203516800382e-07, + "loss": 0.4408, + "step": 16690 + }, + { + "epoch": 2.61, + "grad_norm": 22.39774081527456, + "learning_rate": 8.860262226603234e-07, + "loss": 0.3935, + "step": 16691 + }, + { + "epoch": 2.61, + "grad_norm": 22.919712059188633, + "learning_rate": 8.853323528366886e-07, + "loss": 0.4538, + "step": 16692 + }, + { + "epoch": 2.61, + "grad_norm": 35.905719633642555, + "learning_rate": 8.846387422288704e-07, + "loss": 0.4302, + "step": 16693 + }, + { + "epoch": 2.61, + "grad_norm": 21.033654368842818, + "learning_rate": 8.839453908565965e-07, + "loss": 0.4794, + "step": 16694 + }, + { + "epoch": 2.61, + "grad_norm": 38.43862449892962, + "learning_rate": 8.832522987395798e-07, + "loss": 0.4356, + "step": 16695 + }, + { + "epoch": 2.61, + "grad_norm": 23.464374741397606, + "learning_rate": 8.825594658975367e-07, + "loss": 0.4589, + "step": 16696 + }, + { + "epoch": 2.61, + "grad_norm": 17.422209357572804, + "learning_rate": 8.818668923501683e-07, + "loss": 0.3923, + "step": 16697 + }, + { + "epoch": 2.61, + "grad_norm": 22.09562721937399, + "learning_rate": 8.811745781171699e-07, + "loss": 0.4206, + "step": 16698 + }, + { + "epoch": 2.61, + "grad_norm": 19.442060279077392, + "learning_rate": 8.804825232182345e-07, + "loss": 0.4029, + "step": 16699 + }, + { + "epoch": 2.61, + "grad_norm": 21.58317273293, + "learning_rate": 8.7979072767304e-07, + "loss": 0.474, + "step": 16700 + }, + { + "epoch": 2.61, + "grad_norm": 34.61315114537455, + "learning_rate": 8.790991915012636e-07, + "loss": 0.5113, + "step": 16701 + }, + { + "epoch": 2.61, + "grad_norm": 17.743257077126724, + "learning_rate": 8.784079147225688e-07, + "loss": 0.5002, + "step": 16702 + }, + { + "epoch": 2.61, + "grad_norm": 13.50267092235498, + "learning_rate": 8.777168973566186e-07, + "loss": 0.353, + "step": 16703 + }, + { + "epoch": 2.61, + "grad_norm": 20.95125831094712, + "learning_rate": 8.770261394230617e-07, + "loss": 0.4314, + "step": 16704 + }, + { + "epoch": 2.61, + "grad_norm": 18.74905084830219, + "learning_rate": 8.763356409415447e-07, + "loss": 0.4074, + "step": 16705 + }, + { + "epoch": 2.61, + "grad_norm": 15.579036870571148, + "learning_rate": 8.756454019317063e-07, + "loss": 0.4278, + "step": 16706 + }, + { + "epoch": 2.61, + "grad_norm": 31.87071763962313, + "learning_rate": 8.749554224131751e-07, + "loss": 0.553, + "step": 16707 + }, + { + "epoch": 2.61, + "grad_norm": 17.60790887329473, + "learning_rate": 8.742657024055723e-07, + "loss": 0.4269, + "step": 16708 + }, + { + "epoch": 2.61, + "grad_norm": 26.285916786115394, + "learning_rate": 8.735762419285143e-07, + "loss": 0.4468, + "step": 16709 + }, + { + "epoch": 2.61, + "grad_norm": 20.82882622284602, + "learning_rate": 8.728870410016099e-07, + "loss": 0.4227, + "step": 16710 + }, + { + "epoch": 2.61, + "grad_norm": 44.476794032022404, + "learning_rate": 8.721980996444601e-07, + "loss": 0.4647, + "step": 16711 + }, + { + "epoch": 2.61, + "grad_norm": 21.05022180011697, + "learning_rate": 8.715094178766559e-07, + "loss": 0.4309, + "step": 16712 + }, + { + "epoch": 2.61, + "grad_norm": 24.75239388612179, + "learning_rate": 8.708209957177826e-07, + "loss": 0.4265, + "step": 16713 + }, + { + "epoch": 2.61, + "grad_norm": 34.79433308889338, + "learning_rate": 8.701328331874204e-07, + "loss": 0.5209, + "step": 16714 + }, + { + "epoch": 2.61, + "grad_norm": 17.99271583940879, + "learning_rate": 8.694449303051411e-07, + "loss": 0.4841, + "step": 16715 + }, + { + "epoch": 2.61, + "grad_norm": 19.185274258323, + "learning_rate": 8.68757287090507e-07, + "loss": 0.4563, + "step": 16716 + }, + { + "epoch": 2.61, + "grad_norm": 21.957059233477462, + "learning_rate": 8.680699035630713e-07, + "loss": 0.4054, + "step": 16717 + }, + { + "epoch": 2.61, + "grad_norm": 21.60780446819741, + "learning_rate": 8.673827797423862e-07, + "loss": 0.4715, + "step": 16718 + }, + { + "epoch": 2.61, + "grad_norm": 27.01343319652089, + "learning_rate": 8.666959156479938e-07, + "loss": 0.4525, + "step": 16719 + }, + { + "epoch": 2.61, + "grad_norm": 19.50782784891804, + "learning_rate": 8.660093112994261e-07, + "loss": 0.4836, + "step": 16720 + }, + { + "epoch": 2.61, + "grad_norm": 20.246002297155318, + "learning_rate": 8.653229667162111e-07, + "loss": 0.4106, + "step": 16721 + }, + { + "epoch": 2.61, + "grad_norm": 14.868173611141453, + "learning_rate": 8.646368819178652e-07, + "loss": 0.3736, + "step": 16722 + }, + { + "epoch": 2.61, + "grad_norm": 24.618808935065324, + "learning_rate": 8.639510569239029e-07, + "loss": 0.4321, + "step": 16723 + }, + { + "epoch": 2.61, + "grad_norm": 28.00095475433834, + "learning_rate": 8.632654917538297e-07, + "loss": 0.4507, + "step": 16724 + }, + { + "epoch": 2.61, + "grad_norm": 19.469426191949704, + "learning_rate": 8.625801864271411e-07, + "loss": 0.4288, + "step": 16725 + }, + { + "epoch": 2.61, + "grad_norm": 26.974057184806597, + "learning_rate": 8.618951409633258e-07, + "loss": 0.4268, + "step": 16726 + }, + { + "epoch": 2.61, + "grad_norm": 22.822644507968878, + "learning_rate": 8.612103553818663e-07, + "loss": 0.5234, + "step": 16727 + }, + { + "epoch": 2.61, + "grad_norm": 17.76676574909621, + "learning_rate": 8.605258297022401e-07, + "loss": 0.3717, + "step": 16728 + }, + { + "epoch": 2.61, + "grad_norm": 15.899604249361133, + "learning_rate": 8.598415639439107e-07, + "loss": 0.4623, + "step": 16729 + }, + { + "epoch": 2.61, + "grad_norm": 11.332693949128902, + "learning_rate": 8.591575581263422e-07, + "loss": 0.4472, + "step": 16730 + }, + { + "epoch": 2.61, + "grad_norm": 24.60243323073229, + "learning_rate": 8.58473812268984e-07, + "loss": 0.4391, + "step": 16731 + }, + { + "epoch": 2.61, + "grad_norm": 17.63328755101761, + "learning_rate": 8.577903263912846e-07, + "loss": 0.3988, + "step": 16732 + }, + { + "epoch": 2.61, + "grad_norm": 32.29013818102348, + "learning_rate": 8.571071005126785e-07, + "loss": 0.4678, + "step": 16733 + }, + { + "epoch": 2.61, + "grad_norm": 20.242449249701234, + "learning_rate": 8.564241346525992e-07, + "loss": 0.4137, + "step": 16734 + }, + { + "epoch": 2.61, + "grad_norm": 20.87398343052293, + "learning_rate": 8.557414288304689e-07, + "loss": 0.4265, + "step": 16735 + }, + { + "epoch": 2.61, + "grad_norm": 25.34155911196266, + "learning_rate": 8.550589830657019e-07, + "loss": 0.3786, + "step": 16736 + }, + { + "epoch": 2.61, + "grad_norm": 18.371625624494065, + "learning_rate": 8.543767973777095e-07, + "loss": 0.5357, + "step": 16737 + }, + { + "epoch": 2.61, + "grad_norm": 22.74892713301613, + "learning_rate": 8.536948717858895e-07, + "loss": 0.4312, + "step": 16738 + }, + { + "epoch": 2.61, + "grad_norm": 18.31856884009419, + "learning_rate": 8.530132063096364e-07, + "loss": 0.4346, + "step": 16739 + }, + { + "epoch": 2.61, + "grad_norm": 25.60380375364114, + "learning_rate": 8.52331800968339e-07, + "loss": 0.426, + "step": 16740 + }, + { + "epoch": 2.61, + "grad_norm": 24.597455123096058, + "learning_rate": 8.516506557813742e-07, + "loss": 0.4364, + "step": 16741 + }, + { + "epoch": 2.62, + "grad_norm": 28.896660653956385, + "learning_rate": 8.509697707681108e-07, + "loss": 0.4143, + "step": 16742 + }, + { + "epoch": 2.62, + "grad_norm": 19.727868880270037, + "learning_rate": 8.502891459479145e-07, + "loss": 0.4241, + "step": 16743 + }, + { + "epoch": 2.62, + "grad_norm": 16.96661380279549, + "learning_rate": 8.496087813401454e-07, + "loss": 0.433, + "step": 16744 + }, + { + "epoch": 2.62, + "grad_norm": 18.645807605775385, + "learning_rate": 8.489286769641492e-07, + "loss": 0.417, + "step": 16745 + }, + { + "epoch": 2.62, + "grad_norm": 37.30492073505449, + "learning_rate": 8.482488328392668e-07, + "loss": 0.4635, + "step": 16746 + }, + { + "epoch": 2.62, + "grad_norm": 24.241161234969553, + "learning_rate": 8.475692489848342e-07, + "loss": 0.4698, + "step": 16747 + }, + { + "epoch": 2.62, + "grad_norm": 26.97141750841817, + "learning_rate": 8.468899254201768e-07, + "loss": 0.4484, + "step": 16748 + }, + { + "epoch": 2.62, + "grad_norm": 19.310213648074953, + "learning_rate": 8.462108621646182e-07, + "loss": 0.3882, + "step": 16749 + }, + { + "epoch": 2.62, + "grad_norm": 17.321206551973784, + "learning_rate": 8.455320592374683e-07, + "loss": 0.5108, + "step": 16750 + }, + { + "epoch": 2.62, + "grad_norm": 23.945814612197484, + "learning_rate": 8.448535166580286e-07, + "loss": 0.4398, + "step": 16751 + }, + { + "epoch": 2.62, + "grad_norm": 16.481744167599437, + "learning_rate": 8.441752344456001e-07, + "loss": 0.4991, + "step": 16752 + }, + { + "epoch": 2.62, + "grad_norm": 21.533260033633923, + "learning_rate": 8.43497212619474e-07, + "loss": 0.4354, + "step": 16753 + }, + { + "epoch": 2.62, + "grad_norm": 24.105916417968604, + "learning_rate": 8.428194511989285e-07, + "loss": 0.4483, + "step": 16754 + }, + { + "epoch": 2.62, + "grad_norm": 23.228632958799906, + "learning_rate": 8.421419502032425e-07, + "loss": 0.406, + "step": 16755 + }, + { + "epoch": 2.62, + "grad_norm": 21.048982944080215, + "learning_rate": 8.414647096516804e-07, + "loss": 0.4343, + "step": 16756 + }, + { + "epoch": 2.62, + "grad_norm": 25.892150579569723, + "learning_rate": 8.40787729563507e-07, + "loss": 0.3986, + "step": 16757 + }, + { + "epoch": 2.62, + "grad_norm": 15.535536964975222, + "learning_rate": 8.401110099579702e-07, + "loss": 0.4378, + "step": 16758 + }, + { + "epoch": 2.62, + "grad_norm": 20.418707928177778, + "learning_rate": 8.394345508543178e-07, + "loss": 0.3899, + "step": 16759 + }, + { + "epoch": 2.62, + "grad_norm": 24.74179145609242, + "learning_rate": 8.387583522717901e-07, + "loss": 0.4278, + "step": 16760 + }, + { + "epoch": 2.62, + "grad_norm": 36.1969580306127, + "learning_rate": 8.38082414229614e-07, + "loss": 0.4397, + "step": 16761 + }, + { + "epoch": 2.62, + "grad_norm": 20.140718870484307, + "learning_rate": 8.374067367470151e-07, + "loss": 0.432, + "step": 16762 + }, + { + "epoch": 2.62, + "grad_norm": 19.99467742622639, + "learning_rate": 8.367313198432081e-07, + "loss": 0.4388, + "step": 16763 + }, + { + "epoch": 2.62, + "grad_norm": 18.80601838524848, + "learning_rate": 8.36056163537402e-07, + "loss": 0.4025, + "step": 16764 + }, + { + "epoch": 2.62, + "grad_norm": 23.8081271366024, + "learning_rate": 8.353812678487993e-07, + "loss": 0.4205, + "step": 16765 + }, + { + "epoch": 2.62, + "grad_norm": 20.926565856637104, + "learning_rate": 8.347066327965925e-07, + "loss": 0.4961, + "step": 16766 + }, + { + "epoch": 2.62, + "grad_norm": 35.660807793839375, + "learning_rate": 8.340322583999649e-07, + "loss": 0.3895, + "step": 16767 + }, + { + "epoch": 2.62, + "grad_norm": 23.030089446166176, + "learning_rate": 8.333581446780981e-07, + "loss": 0.4632, + "step": 16768 + }, + { + "epoch": 2.62, + "grad_norm": 27.060058253986636, + "learning_rate": 8.326842916501654e-07, + "loss": 0.5217, + "step": 16769 + }, + { + "epoch": 2.62, + "grad_norm": 14.039772982655416, + "learning_rate": 8.320106993353294e-07, + "loss": 0.3913, + "step": 16770 + }, + { + "epoch": 2.62, + "grad_norm": 25.421402582750858, + "learning_rate": 8.313373677527437e-07, + "loss": 0.4352, + "step": 16771 + }, + { + "epoch": 2.62, + "grad_norm": 30.13481000243911, + "learning_rate": 8.306642969215595e-07, + "loss": 0.4793, + "step": 16772 + }, + { + "epoch": 2.62, + "grad_norm": 23.837973011066698, + "learning_rate": 8.299914868609193e-07, + "loss": 0.423, + "step": 16773 + }, + { + "epoch": 2.62, + "grad_norm": 31.082366852376566, + "learning_rate": 8.293189375899579e-07, + "loss": 0.4193, + "step": 16774 + }, + { + "epoch": 2.62, + "grad_norm": 24.50445860318181, + "learning_rate": 8.286466491278023e-07, + "loss": 0.4479, + "step": 16775 + }, + { + "epoch": 2.62, + "grad_norm": 27.43710492182028, + "learning_rate": 8.27974621493568e-07, + "loss": 0.4794, + "step": 16776 + }, + { + "epoch": 2.62, + "grad_norm": 21.1983498746172, + "learning_rate": 8.27302854706371e-07, + "loss": 0.3988, + "step": 16777 + }, + { + "epoch": 2.62, + "grad_norm": 19.101546037506935, + "learning_rate": 8.266313487853162e-07, + "loss": 0.4968, + "step": 16778 + }, + { + "epoch": 2.62, + "grad_norm": 16.62009349238312, + "learning_rate": 8.259601037494991e-07, + "loss": 0.4279, + "step": 16779 + }, + { + "epoch": 2.62, + "grad_norm": 16.489964474939928, + "learning_rate": 8.252891196180091e-07, + "loss": 0.3905, + "step": 16780 + }, + { + "epoch": 2.62, + "grad_norm": 18.66843271886234, + "learning_rate": 8.246183964099286e-07, + "loss": 0.4721, + "step": 16781 + }, + { + "epoch": 2.62, + "grad_norm": 27.24275700854788, + "learning_rate": 8.239479341443357e-07, + "loss": 0.4734, + "step": 16782 + }, + { + "epoch": 2.62, + "grad_norm": 25.523912614546024, + "learning_rate": 8.232777328402941e-07, + "loss": 0.4371, + "step": 16783 + }, + { + "epoch": 2.62, + "grad_norm": 18.74135799249883, + "learning_rate": 8.226077925168674e-07, + "loss": 0.4146, + "step": 16784 + }, + { + "epoch": 2.62, + "grad_norm": 37.497727513598754, + "learning_rate": 8.219381131931048e-07, + "loss": 0.3987, + "step": 16785 + }, + { + "epoch": 2.62, + "grad_norm": 26.18147060306356, + "learning_rate": 8.212686948880521e-07, + "loss": 0.4137, + "step": 16786 + }, + { + "epoch": 2.62, + "grad_norm": 20.95040533477703, + "learning_rate": 8.20599537620751e-07, + "loss": 0.4516, + "step": 16787 + }, + { + "epoch": 2.62, + "grad_norm": 33.21193701012433, + "learning_rate": 8.199306414102282e-07, + "loss": 0.5055, + "step": 16788 + }, + { + "epoch": 2.62, + "grad_norm": 25.343243771446673, + "learning_rate": 8.192620062755085e-07, + "loss": 0.4725, + "step": 16789 + }, + { + "epoch": 2.62, + "grad_norm": 20.176099939035836, + "learning_rate": 8.185936322356047e-07, + "loss": 0.4315, + "step": 16790 + }, + { + "epoch": 2.62, + "grad_norm": 33.648030290338376, + "learning_rate": 8.179255193095292e-07, + "loss": 0.4847, + "step": 16791 + }, + { + "epoch": 2.62, + "grad_norm": 26.493735355662018, + "learning_rate": 8.172576675162791e-07, + "loss": 0.4058, + "step": 16792 + }, + { + "epoch": 2.62, + "grad_norm": 19.61509767974897, + "learning_rate": 8.165900768748491e-07, + "loss": 0.422, + "step": 16793 + }, + { + "epoch": 2.62, + "grad_norm": 21.645227434823752, + "learning_rate": 8.159227474042276e-07, + "loss": 0.4167, + "step": 16794 + }, + { + "epoch": 2.62, + "grad_norm": 21.955256101404252, + "learning_rate": 8.15255679123389e-07, + "loss": 0.4241, + "step": 16795 + }, + { + "epoch": 2.62, + "grad_norm": 18.46999967207054, + "learning_rate": 8.145888720513051e-07, + "loss": 0.4071, + "step": 16796 + }, + { + "epoch": 2.62, + "grad_norm": 22.830333613319574, + "learning_rate": 8.139223262069407e-07, + "loss": 0.4656, + "step": 16797 + }, + { + "epoch": 2.62, + "grad_norm": 17.8787181911867, + "learning_rate": 8.132560416092516e-07, + "loss": 0.4108, + "step": 16798 + }, + { + "epoch": 2.62, + "grad_norm": 19.966142990180433, + "learning_rate": 8.125900182771874e-07, + "loss": 0.514, + "step": 16799 + }, + { + "epoch": 2.62, + "grad_norm": 18.276508767152627, + "learning_rate": 8.119242562296892e-07, + "loss": 0.3818, + "step": 16800 + }, + { + "epoch": 2.62, + "grad_norm": 18.75664585271627, + "learning_rate": 8.112587554856888e-07, + "loss": 0.3909, + "step": 16801 + }, + { + "epoch": 2.62, + "grad_norm": 16.617668005466477, + "learning_rate": 8.105935160641143e-07, + "loss": 0.4209, + "step": 16802 + }, + { + "epoch": 2.62, + "grad_norm": 23.143456110065678, + "learning_rate": 8.099285379838862e-07, + "loss": 0.4811, + "step": 16803 + }, + { + "epoch": 2.62, + "grad_norm": 17.81945785979983, + "learning_rate": 8.092638212639136e-07, + "loss": 0.4048, + "step": 16804 + }, + { + "epoch": 2.62, + "grad_norm": 29.83116819485186, + "learning_rate": 8.085993659231006e-07, + "loss": 0.4267, + "step": 16805 + }, + { + "epoch": 2.63, + "grad_norm": 19.959123716419516, + "learning_rate": 8.079351719803441e-07, + "loss": 0.4186, + "step": 16806 + }, + { + "epoch": 2.63, + "grad_norm": 22.705781315240756, + "learning_rate": 8.072712394545368e-07, + "loss": 0.4208, + "step": 16807 + }, + { + "epoch": 2.63, + "grad_norm": 30.147405975955014, + "learning_rate": 8.066075683645557e-07, + "loss": 0.5079, + "step": 16808 + }, + { + "epoch": 2.63, + "grad_norm": 21.853072253400462, + "learning_rate": 8.059441587292782e-07, + "loss": 0.3982, + "step": 16809 + }, + { + "epoch": 2.63, + "grad_norm": 20.264731188925893, + "learning_rate": 8.052810105675702e-07, + "loss": 0.4114, + "step": 16810 + }, + { + "epoch": 2.63, + "grad_norm": 20.17411913086202, + "learning_rate": 8.046181238982908e-07, + "loss": 0.4288, + "step": 16811 + }, + { + "epoch": 2.63, + "grad_norm": 63.012065382080834, + "learning_rate": 8.039554987402942e-07, + "loss": 0.45, + "step": 16812 + }, + { + "epoch": 2.63, + "grad_norm": 15.451686381382414, + "learning_rate": 8.03293135112423e-07, + "loss": 0.4377, + "step": 16813 + }, + { + "epoch": 2.63, + "grad_norm": 25.465290990800092, + "learning_rate": 8.026310330335163e-07, + "loss": 0.4364, + "step": 16814 + }, + { + "epoch": 2.63, + "grad_norm": 27.945110257392457, + "learning_rate": 8.019691925224004e-07, + "loss": 0.4653, + "step": 16815 + }, + { + "epoch": 2.63, + "grad_norm": 36.84208957556049, + "learning_rate": 8.013076135979025e-07, + "loss": 0.453, + "step": 16816 + }, + { + "epoch": 2.63, + "grad_norm": 17.14770967550887, + "learning_rate": 8.006462962788331e-07, + "loss": 0.3888, + "step": 16817 + }, + { + "epoch": 2.63, + "grad_norm": 23.031860844561525, + "learning_rate": 7.999852405840025e-07, + "loss": 0.4422, + "step": 16818 + }, + { + "epoch": 2.63, + "grad_norm": 32.92649912212816, + "learning_rate": 7.993244465322092e-07, + "loss": 0.5207, + "step": 16819 + }, + { + "epoch": 2.63, + "grad_norm": 14.903131962600781, + "learning_rate": 7.986639141422469e-07, + "loss": 0.3809, + "step": 16820 + }, + { + "epoch": 2.63, + "grad_norm": 27.86737348966611, + "learning_rate": 7.980036434328997e-07, + "loss": 0.4695, + "step": 16821 + }, + { + "epoch": 2.63, + "grad_norm": 30.168104516958334, + "learning_rate": 7.973436344229458e-07, + "loss": 0.4653, + "step": 16822 + }, + { + "epoch": 2.63, + "grad_norm": 18.473699372465013, + "learning_rate": 7.966838871311566e-07, + "loss": 0.3886, + "step": 16823 + }, + { + "epoch": 2.63, + "grad_norm": 17.798365960033053, + "learning_rate": 7.960244015762919e-07, + "loss": 0.4322, + "step": 16824 + }, + { + "epoch": 2.63, + "grad_norm": 38.22287298032886, + "learning_rate": 7.953651777771121e-07, + "loss": 0.4198, + "step": 16825 + }, + { + "epoch": 2.63, + "grad_norm": 29.210312917499845, + "learning_rate": 7.947062157523589e-07, + "loss": 0.4228, + "step": 16826 + }, + { + "epoch": 2.63, + "grad_norm": 20.062279305625704, + "learning_rate": 7.940475155207772e-07, + "loss": 0.4341, + "step": 16827 + }, + { + "epoch": 2.63, + "grad_norm": 15.993112556899609, + "learning_rate": 7.933890771010999e-07, + "loss": 0.4629, + "step": 16828 + }, + { + "epoch": 2.63, + "grad_norm": 23.010568034084475, + "learning_rate": 7.927309005120521e-07, + "loss": 0.4348, + "step": 16829 + }, + { + "epoch": 2.63, + "grad_norm": 18.577993284998563, + "learning_rate": 7.920729857723486e-07, + "loss": 0.4326, + "step": 16830 + }, + { + "epoch": 2.63, + "grad_norm": 17.987880536647573, + "learning_rate": 7.914153329007035e-07, + "loss": 0.4189, + "step": 16831 + }, + { + "epoch": 2.63, + "grad_norm": 19.49306578711967, + "learning_rate": 7.907579419158196e-07, + "loss": 0.411, + "step": 16832 + }, + { + "epoch": 2.63, + "grad_norm": 17.166915742350334, + "learning_rate": 7.901008128363963e-07, + "loss": 0.4271, + "step": 16833 + }, + { + "epoch": 2.63, + "grad_norm": 16.84249898585503, + "learning_rate": 7.894439456811143e-07, + "loss": 0.4914, + "step": 16834 + }, + { + "epoch": 2.63, + "grad_norm": 17.18164285128788, + "learning_rate": 7.887873404686586e-07, + "loss": 0.4232, + "step": 16835 + }, + { + "epoch": 2.63, + "grad_norm": 30.780090636964363, + "learning_rate": 7.88130997217702e-07, + "loss": 0.4636, + "step": 16836 + }, + { + "epoch": 2.63, + "grad_norm": 23.54647295481414, + "learning_rate": 7.874749159469131e-07, + "loss": 0.4376, + "step": 16837 + }, + { + "epoch": 2.63, + "grad_norm": 19.70252201298119, + "learning_rate": 7.868190966749489e-07, + "loss": 0.4249, + "step": 16838 + }, + { + "epoch": 2.63, + "grad_norm": 24.30281482411287, + "learning_rate": 7.861635394204581e-07, + "loss": 0.4623, + "step": 16839 + }, + { + "epoch": 2.63, + "grad_norm": 21.627583553703232, + "learning_rate": 7.855082442020867e-07, + "loss": 0.3949, + "step": 16840 + }, + { + "epoch": 2.63, + "grad_norm": 18.371049234059328, + "learning_rate": 7.84853211038471e-07, + "loss": 0.4823, + "step": 16841 + }, + { + "epoch": 2.63, + "grad_norm": 21.317755842555908, + "learning_rate": 7.841984399482383e-07, + "loss": 0.4953, + "step": 16842 + }, + { + "epoch": 2.63, + "grad_norm": 19.099669392705323, + "learning_rate": 7.835439309500126e-07, + "loss": 0.4315, + "step": 16843 + }, + { + "epoch": 2.63, + "grad_norm": 17.893913050184157, + "learning_rate": 7.828896840624045e-07, + "loss": 0.427, + "step": 16844 + }, + { + "epoch": 2.63, + "grad_norm": 21.702461904718188, + "learning_rate": 7.822356993040236e-07, + "loss": 0.4448, + "step": 16845 + }, + { + "epoch": 2.63, + "grad_norm": 21.718610975794803, + "learning_rate": 7.815819766934651e-07, + "loss": 0.5642, + "step": 16846 + }, + { + "epoch": 2.63, + "grad_norm": 17.398796786558435, + "learning_rate": 7.80928516249323e-07, + "loss": 0.3895, + "step": 16847 + }, + { + "epoch": 2.63, + "grad_norm": 21.030846737854098, + "learning_rate": 7.802753179901823e-07, + "loss": 0.426, + "step": 16848 + }, + { + "epoch": 2.63, + "grad_norm": 19.90180827535106, + "learning_rate": 7.796223819346171e-07, + "loss": 0.3751, + "step": 16849 + }, + { + "epoch": 2.63, + "grad_norm": 34.96958743022472, + "learning_rate": 7.789697081011982e-07, + "loss": 0.3893, + "step": 16850 + }, + { + "epoch": 2.63, + "grad_norm": 16.01562875515033, + "learning_rate": 7.783172965084851e-07, + "loss": 0.4309, + "step": 16851 + }, + { + "epoch": 2.63, + "grad_norm": 14.948257573877996, + "learning_rate": 7.77665147175034e-07, + "loss": 0.4026, + "step": 16852 + }, + { + "epoch": 2.63, + "grad_norm": 22.418381484613125, + "learning_rate": 7.770132601193936e-07, + "loss": 0.46, + "step": 16853 + }, + { + "epoch": 2.63, + "grad_norm": 22.844320560823682, + "learning_rate": 7.763616353601e-07, + "loss": 0.4746, + "step": 16854 + }, + { + "epoch": 2.63, + "grad_norm": 22.733132272031753, + "learning_rate": 7.75710272915684e-07, + "loss": 0.4695, + "step": 16855 + }, + { + "epoch": 2.63, + "grad_norm": 28.000478809596892, + "learning_rate": 7.750591728046719e-07, + "loss": 0.4036, + "step": 16856 + }, + { + "epoch": 2.63, + "grad_norm": 24.820126679601113, + "learning_rate": 7.744083350455811e-07, + "loss": 0.4649, + "step": 16857 + }, + { + "epoch": 2.63, + "grad_norm": 20.32344043127804, + "learning_rate": 7.737577596569223e-07, + "loss": 0.4875, + "step": 16858 + }, + { + "epoch": 2.63, + "grad_norm": 22.400681451454044, + "learning_rate": 7.731074466571942e-07, + "loss": 0.398, + "step": 16859 + }, + { + "epoch": 2.63, + "grad_norm": 17.71669893634057, + "learning_rate": 7.724573960648907e-07, + "loss": 0.3693, + "step": 16860 + }, + { + "epoch": 2.63, + "grad_norm": 18.352677457157014, + "learning_rate": 7.718076078985026e-07, + "loss": 0.3971, + "step": 16861 + }, + { + "epoch": 2.63, + "grad_norm": 19.02533477471114, + "learning_rate": 7.711580821765085e-07, + "loss": 0.432, + "step": 16862 + }, + { + "epoch": 2.63, + "grad_norm": 17.217546166501688, + "learning_rate": 7.705088189173804e-07, + "loss": 0.4335, + "step": 16863 + }, + { + "epoch": 2.63, + "grad_norm": 23.160675082907265, + "learning_rate": 7.6985981813958e-07, + "loss": 0.4206, + "step": 16864 + }, + { + "epoch": 2.63, + "grad_norm": 23.202533562825728, + "learning_rate": 7.692110798615682e-07, + "loss": 0.4083, + "step": 16865 + }, + { + "epoch": 2.63, + "grad_norm": 19.503359812365275, + "learning_rate": 7.685626041017935e-07, + "loss": 0.4502, + "step": 16866 + }, + { + "epoch": 2.63, + "grad_norm": 17.27997414275035, + "learning_rate": 7.67914390878699e-07, + "loss": 0.4318, + "step": 16867 + }, + { + "epoch": 2.63, + "grad_norm": 25.835566482267733, + "learning_rate": 7.672664402107166e-07, + "loss": 0.4624, + "step": 16868 + }, + { + "epoch": 2.63, + "grad_norm": 17.61740463190695, + "learning_rate": 7.66618752116276e-07, + "loss": 0.3958, + "step": 16869 + }, + { + "epoch": 2.64, + "grad_norm": 22.85098183309853, + "learning_rate": 7.659713266137992e-07, + "loss": 0.4409, + "step": 16870 + }, + { + "epoch": 2.64, + "grad_norm": 18.75568043175452, + "learning_rate": 7.653241637216924e-07, + "loss": 0.3987, + "step": 16871 + }, + { + "epoch": 2.64, + "grad_norm": 38.46801953044423, + "learning_rate": 7.646772634583677e-07, + "loss": 0.4885, + "step": 16872 + }, + { + "epoch": 2.64, + "grad_norm": 25.697771136275506, + "learning_rate": 7.64030625842217e-07, + "loss": 0.4978, + "step": 16873 + }, + { + "epoch": 2.64, + "grad_norm": 17.03607930650342, + "learning_rate": 7.633842508916323e-07, + "loss": 0.4302, + "step": 16874 + }, + { + "epoch": 2.64, + "grad_norm": 29.131389335085494, + "learning_rate": 7.627381386249976e-07, + "loss": 0.4964, + "step": 16875 + }, + { + "epoch": 2.64, + "grad_norm": 24.221972021076663, + "learning_rate": 7.620922890606852e-07, + "loss": 0.4262, + "step": 16876 + }, + { + "epoch": 2.64, + "grad_norm": 19.861658705586958, + "learning_rate": 7.614467022170658e-07, + "loss": 0.4186, + "step": 16877 + }, + { + "epoch": 2.64, + "grad_norm": 26.86455808652885, + "learning_rate": 7.608013781124956e-07, + "loss": 0.5342, + "step": 16878 + }, + { + "epoch": 2.64, + "grad_norm": 23.439050092300334, + "learning_rate": 7.601563167653314e-07, + "loss": 0.4012, + "step": 16879 + }, + { + "epoch": 2.64, + "grad_norm": 22.456579716420062, + "learning_rate": 7.59511518193915e-07, + "loss": 0.4512, + "step": 16880 + }, + { + "epoch": 2.64, + "grad_norm": 29.554704605630015, + "learning_rate": 7.588669824165851e-07, + "loss": 0.4681, + "step": 16881 + }, + { + "epoch": 2.64, + "grad_norm": 34.10344989705391, + "learning_rate": 7.582227094516747e-07, + "loss": 0.493, + "step": 16882 + }, + { + "epoch": 2.64, + "grad_norm": 19.93347241115998, + "learning_rate": 7.575786993175028e-07, + "loss": 0.4627, + "step": 16883 + }, + { + "epoch": 2.64, + "grad_norm": 29.393700149294325, + "learning_rate": 7.569349520323854e-07, + "loss": 0.4639, + "step": 16884 + }, + { + "epoch": 2.64, + "grad_norm": 18.614876370363987, + "learning_rate": 7.562914676146304e-07, + "loss": 0.4296, + "step": 16885 + }, + { + "epoch": 2.64, + "grad_norm": 16.77048501804122, + "learning_rate": 7.556482460825399e-07, + "loss": 0.3855, + "step": 16886 + }, + { + "epoch": 2.64, + "grad_norm": 25.079864852779185, + "learning_rate": 7.550052874544056e-07, + "loss": 0.4153, + "step": 16887 + }, + { + "epoch": 2.64, + "grad_norm": 27.814226345533076, + "learning_rate": 7.543625917485142e-07, + "loss": 0.3975, + "step": 16888 + }, + { + "epoch": 2.64, + "grad_norm": 29.274484261906014, + "learning_rate": 7.537201589831389e-07, + "loss": 0.4465, + "step": 16889 + }, + { + "epoch": 2.64, + "grad_norm": 18.42591955955083, + "learning_rate": 7.530779891765549e-07, + "loss": 0.3628, + "step": 16890 + }, + { + "epoch": 2.64, + "grad_norm": 20.478408078269176, + "learning_rate": 7.524360823470244e-07, + "loss": 0.4413, + "step": 16891 + }, + { + "epoch": 2.64, + "grad_norm": 23.506952992044383, + "learning_rate": 7.517944385128018e-07, + "loss": 0.4642, + "step": 16892 + }, + { + "epoch": 2.64, + "grad_norm": 18.75456311728819, + "learning_rate": 7.511530576921344e-07, + "loss": 0.4452, + "step": 16893 + }, + { + "epoch": 2.64, + "grad_norm": 22.437404387968996, + "learning_rate": 7.505119399032623e-07, + "loss": 0.4101, + "step": 16894 + }, + { + "epoch": 2.64, + "grad_norm": 24.54236982975728, + "learning_rate": 7.498710851644231e-07, + "loss": 0.3821, + "step": 16895 + }, + { + "epoch": 2.64, + "grad_norm": 29.62335673484736, + "learning_rate": 7.492304934938365e-07, + "loss": 0.4359, + "step": 16896 + }, + { + "epoch": 2.64, + "grad_norm": 28.060790699130283, + "learning_rate": 7.48590164909725e-07, + "loss": 0.4738, + "step": 16897 + }, + { + "epoch": 2.64, + "grad_norm": 19.914088653631087, + "learning_rate": 7.479500994302957e-07, + "loss": 0.3831, + "step": 16898 + }, + { + "epoch": 2.64, + "grad_norm": 16.568575890819545, + "learning_rate": 7.473102970737534e-07, + "loss": 0.4251, + "step": 16899 + }, + { + "epoch": 2.64, + "grad_norm": 25.149919860230582, + "learning_rate": 7.466707578582954e-07, + "loss": 0.5003, + "step": 16900 + }, + { + "epoch": 2.64, + "grad_norm": 25.38762558108232, + "learning_rate": 7.460314818021053e-07, + "loss": 0.3956, + "step": 16901 + }, + { + "epoch": 2.64, + "grad_norm": 21.40323957311481, + "learning_rate": 7.453924689233693e-07, + "loss": 0.4099, + "step": 16902 + }, + { + "epoch": 2.64, + "grad_norm": 17.58806714822402, + "learning_rate": 7.447537192402554e-07, + "loss": 0.4006, + "step": 16903 + }, + { + "epoch": 2.64, + "grad_norm": 33.796221153153, + "learning_rate": 7.441152327709334e-07, + "loss": 0.407, + "step": 16904 + }, + { + "epoch": 2.64, + "grad_norm": 23.945559204473557, + "learning_rate": 7.434770095335575e-07, + "loss": 0.441, + "step": 16905 + }, + { + "epoch": 2.64, + "grad_norm": 28.343743056156608, + "learning_rate": 7.428390495462812e-07, + "loss": 0.4714, + "step": 16906 + }, + { + "epoch": 2.64, + "grad_norm": 33.61964477292253, + "learning_rate": 7.422013528272487e-07, + "loss": 0.4491, + "step": 16907 + }, + { + "epoch": 2.64, + "grad_norm": 25.092504680060543, + "learning_rate": 7.415639193945945e-07, + "loss": 0.4407, + "step": 16908 + }, + { + "epoch": 2.64, + "grad_norm": 19.522835893585672, + "learning_rate": 7.409267492664441e-07, + "loss": 0.5003, + "step": 16909 + }, + { + "epoch": 2.64, + "grad_norm": 26.416019802692528, + "learning_rate": 7.402898424609206e-07, + "loss": 0.4278, + "step": 16910 + }, + { + "epoch": 2.64, + "grad_norm": 21.506691327371218, + "learning_rate": 7.396531989961364e-07, + "loss": 0.4247, + "step": 16911 + }, + { + "epoch": 2.64, + "grad_norm": 34.72702402638485, + "learning_rate": 7.390168188902014e-07, + "loss": 0.4221, + "step": 16912 + }, + { + "epoch": 2.64, + "grad_norm": 27.332931770317877, + "learning_rate": 7.383807021612089e-07, + "loss": 0.4089, + "step": 16913 + }, + { + "epoch": 2.64, + "grad_norm": 20.784822813677792, + "learning_rate": 7.377448488272509e-07, + "loss": 0.4775, + "step": 16914 + }, + { + "epoch": 2.64, + "grad_norm": 23.164310796863965, + "learning_rate": 7.3710925890641e-07, + "loss": 0.4712, + "step": 16915 + }, + { + "epoch": 2.64, + "grad_norm": 19.609184651001883, + "learning_rate": 7.364739324167658e-07, + "loss": 0.3752, + "step": 16916 + }, + { + "epoch": 2.64, + "grad_norm": 24.25547887539159, + "learning_rate": 7.35838869376384e-07, + "loss": 0.4694, + "step": 16917 + }, + { + "epoch": 2.64, + "grad_norm": 46.19113616541803, + "learning_rate": 7.352040698033236e-07, + "loss": 0.4421, + "step": 16918 + }, + { + "epoch": 2.64, + "grad_norm": 37.32707569440677, + "learning_rate": 7.345695337156389e-07, + "loss": 0.5207, + "step": 16919 + }, + { + "epoch": 2.64, + "grad_norm": 27.358150016859156, + "learning_rate": 7.339352611313777e-07, + "loss": 0.4851, + "step": 16920 + }, + { + "epoch": 2.64, + "grad_norm": 21.542554103288225, + "learning_rate": 7.33301252068579e-07, + "loss": 0.5202, + "step": 16921 + }, + { + "epoch": 2.64, + "grad_norm": 15.700291455294751, + "learning_rate": 7.326675065452693e-07, + "loss": 0.3829, + "step": 16922 + }, + { + "epoch": 2.64, + "grad_norm": 39.02120483625192, + "learning_rate": 7.320340245794755e-07, + "loss": 0.4012, + "step": 16923 + }, + { + "epoch": 2.64, + "grad_norm": 24.32930446624821, + "learning_rate": 7.314008061892108e-07, + "loss": 0.4506, + "step": 16924 + }, + { + "epoch": 2.64, + "grad_norm": 17.33022061773795, + "learning_rate": 7.307678513924877e-07, + "loss": 0.3949, + "step": 16925 + }, + { + "epoch": 2.64, + "grad_norm": 20.4365060809573, + "learning_rate": 7.301351602073048e-07, + "loss": 0.4611, + "step": 16926 + }, + { + "epoch": 2.64, + "grad_norm": 17.600283221111766, + "learning_rate": 7.295027326516535e-07, + "loss": 0.4199, + "step": 16927 + }, + { + "epoch": 2.64, + "grad_norm": 20.78572774344092, + "learning_rate": 7.288705687435204e-07, + "loss": 0.435, + "step": 16928 + }, + { + "epoch": 2.64, + "grad_norm": 19.812328181439334, + "learning_rate": 7.28238668500888e-07, + "loss": 0.5015, + "step": 16929 + }, + { + "epoch": 2.64, + "grad_norm": 22.96636934009789, + "learning_rate": 7.276070319417216e-07, + "loss": 0.4346, + "step": 16930 + }, + { + "epoch": 2.64, + "grad_norm": 21.218819628644813, + "learning_rate": 7.269756590839883e-07, + "loss": 0.4315, + "step": 16931 + }, + { + "epoch": 2.64, + "grad_norm": 23.283954821665816, + "learning_rate": 7.263445499456412e-07, + "loss": 0.4576, + "step": 16932 + }, + { + "epoch": 2.64, + "grad_norm": 24.04858804417328, + "learning_rate": 7.257137045446327e-07, + "loss": 0.4329, + "step": 16933 + }, + { + "epoch": 2.65, + "grad_norm": 15.926081830243813, + "learning_rate": 7.250831228988986e-07, + "loss": 0.4557, + "step": 16934 + }, + { + "epoch": 2.65, + "grad_norm": 17.174457614433024, + "learning_rate": 7.244528050263744e-07, + "loss": 0.454, + "step": 16935 + }, + { + "epoch": 2.65, + "grad_norm": 20.867504037493568, + "learning_rate": 7.238227509449891e-07, + "loss": 0.3953, + "step": 16936 + }, + { + "epoch": 2.65, + "grad_norm": 34.6054061172561, + "learning_rate": 7.231929606726562e-07, + "loss": 0.4575, + "step": 16937 + }, + { + "epoch": 2.65, + "grad_norm": 13.417321541570763, + "learning_rate": 7.225634342272903e-07, + "loss": 0.3648, + "step": 16938 + }, + { + "epoch": 2.65, + "grad_norm": 19.1481978231408, + "learning_rate": 7.219341716267925e-07, + "loss": 0.4202, + "step": 16939 + }, + { + "epoch": 2.65, + "grad_norm": 24.39213821181831, + "learning_rate": 7.213051728890586e-07, + "loss": 0.4107, + "step": 16940 + }, + { + "epoch": 2.65, + "grad_norm": 29.51286279517155, + "learning_rate": 7.206764380319786e-07, + "loss": 0.4013, + "step": 16941 + }, + { + "epoch": 2.65, + "grad_norm": 16.480239657708285, + "learning_rate": 7.20047967073434e-07, + "loss": 0.4018, + "step": 16942 + }, + { + "epoch": 2.65, + "grad_norm": 17.85465106748411, + "learning_rate": 7.194197600312936e-07, + "loss": 0.4261, + "step": 16943 + }, + { + "epoch": 2.65, + "grad_norm": 21.240310128614013, + "learning_rate": 7.187918169234265e-07, + "loss": 0.4741, + "step": 16944 + }, + { + "epoch": 2.65, + "grad_norm": 18.877992688804472, + "learning_rate": 7.181641377676918e-07, + "loss": 0.4236, + "step": 16945 + }, + { + "epoch": 2.65, + "grad_norm": 19.54637360025032, + "learning_rate": 7.175367225819418e-07, + "loss": 0.4088, + "step": 16946 + }, + { + "epoch": 2.65, + "grad_norm": 28.202189692577342, + "learning_rate": 7.169095713840135e-07, + "loss": 0.4307, + "step": 16947 + }, + { + "epoch": 2.65, + "grad_norm": 20.242653932280582, + "learning_rate": 7.16282684191747e-07, + "loss": 0.4118, + "step": 16948 + }, + { + "epoch": 2.65, + "grad_norm": 25.825725198157475, + "learning_rate": 7.156560610229701e-07, + "loss": 0.4823, + "step": 16949 + }, + { + "epoch": 2.65, + "grad_norm": 30.023298817616297, + "learning_rate": 7.150297018955055e-07, + "loss": 0.4348, + "step": 16950 + }, + { + "epoch": 2.65, + "grad_norm": 24.26751415712457, + "learning_rate": 7.144036068271654e-07, + "loss": 0.4398, + "step": 16951 + }, + { + "epoch": 2.65, + "grad_norm": 23.18227637387653, + "learning_rate": 7.137777758357523e-07, + "loss": 0.4926, + "step": 16952 + }, + { + "epoch": 2.65, + "grad_norm": 17.60419619903871, + "learning_rate": 7.131522089390663e-07, + "loss": 0.4225, + "step": 16953 + }, + { + "epoch": 2.65, + "grad_norm": 18.218780213293343, + "learning_rate": 7.125269061549012e-07, + "loss": 0.4487, + "step": 16954 + }, + { + "epoch": 2.65, + "grad_norm": 24.474617092675352, + "learning_rate": 7.119018675010369e-07, + "loss": 0.4443, + "step": 16955 + }, + { + "epoch": 2.65, + "grad_norm": 15.064154574624544, + "learning_rate": 7.112770929952506e-07, + "loss": 0.4581, + "step": 16956 + }, + { + "epoch": 2.65, + "grad_norm": 24.531868571178812, + "learning_rate": 7.10652582655309e-07, + "loss": 0.4319, + "step": 16957 + }, + { + "epoch": 2.65, + "grad_norm": 24.806323178645442, + "learning_rate": 7.100283364989757e-07, + "loss": 0.4148, + "step": 16958 + }, + { + "epoch": 2.65, + "grad_norm": 28.25120667395479, + "learning_rate": 7.094043545439999e-07, + "loss": 0.4339, + "step": 16959 + }, + { + "epoch": 2.65, + "grad_norm": 25.00788101906701, + "learning_rate": 7.087806368081296e-07, + "loss": 0.481, + "step": 16960 + }, + { + "epoch": 2.65, + "grad_norm": 17.9109394047861, + "learning_rate": 7.081571833091039e-07, + "loss": 0.4802, + "step": 16961 + }, + { + "epoch": 2.65, + "grad_norm": 18.740040837849502, + "learning_rate": 7.075339940646508e-07, + "loss": 0.4054, + "step": 16962 + }, + { + "epoch": 2.65, + "grad_norm": 20.74466591663702, + "learning_rate": 7.069110690924974e-07, + "loss": 0.3905, + "step": 16963 + }, + { + "epoch": 2.65, + "grad_norm": 22.29085204531449, + "learning_rate": 7.06288408410355e-07, + "loss": 0.385, + "step": 16964 + }, + { + "epoch": 2.65, + "grad_norm": 18.844893795619, + "learning_rate": 7.056660120359349e-07, + "loss": 0.4143, + "step": 16965 + }, + { + "epoch": 2.65, + "grad_norm": 25.0207018040152, + "learning_rate": 7.050438799869352e-07, + "loss": 0.4699, + "step": 16966 + }, + { + "epoch": 2.65, + "grad_norm": 15.597550155580713, + "learning_rate": 7.044220122810508e-07, + "loss": 0.3672, + "step": 16967 + }, + { + "epoch": 2.65, + "grad_norm": 23.35701236577994, + "learning_rate": 7.038004089359663e-07, + "loss": 0.4449, + "step": 16968 + }, + { + "epoch": 2.65, + "grad_norm": 22.91420574904314, + "learning_rate": 7.031790699693586e-07, + "loss": 0.4751, + "step": 16969 + }, + { + "epoch": 2.65, + "grad_norm": 14.909948800782285, + "learning_rate": 7.025579953989026e-07, + "loss": 0.4267, + "step": 16970 + }, + { + "epoch": 2.65, + "grad_norm": 19.633419328048156, + "learning_rate": 7.019371852422574e-07, + "loss": 0.4522, + "step": 16971 + }, + { + "epoch": 2.65, + "grad_norm": 40.40897275317225, + "learning_rate": 7.01316639517079e-07, + "loss": 0.4922, + "step": 16972 + }, + { + "epoch": 2.65, + "grad_norm": 19.68186955093115, + "learning_rate": 7.006963582410153e-07, + "loss": 0.3753, + "step": 16973 + }, + { + "epoch": 2.65, + "grad_norm": 32.352136896557326, + "learning_rate": 7.000763414317069e-07, + "loss": 0.4366, + "step": 16974 + }, + { + "epoch": 2.65, + "grad_norm": 12.033830856347995, + "learning_rate": 6.994565891067884e-07, + "loss": 0.3926, + "step": 16975 + }, + { + "epoch": 2.65, + "grad_norm": 19.2580439478215, + "learning_rate": 6.988371012838846e-07, + "loss": 0.4956, + "step": 16976 + }, + { + "epoch": 2.65, + "grad_norm": 26.155731691673708, + "learning_rate": 6.982178779806104e-07, + "loss": 0.4942, + "step": 16977 + }, + { + "epoch": 2.65, + "grad_norm": 17.71712777113979, + "learning_rate": 6.975989192145782e-07, + "loss": 0.4181, + "step": 16978 + }, + { + "epoch": 2.65, + "grad_norm": 27.454886283890847, + "learning_rate": 6.96980225003393e-07, + "loss": 0.4593, + "step": 16979 + }, + { + "epoch": 2.65, + "grad_norm": 23.569902573897206, + "learning_rate": 6.963617953646484e-07, + "loss": 0.4401, + "step": 16980 + }, + { + "epoch": 2.65, + "grad_norm": 34.14101564546121, + "learning_rate": 6.957436303159304e-07, + "loss": 0.4662, + "step": 16981 + }, + { + "epoch": 2.65, + "grad_norm": 23.973733968815406, + "learning_rate": 6.951257298748204e-07, + "loss": 0.4327, + "step": 16982 + }, + { + "epoch": 2.65, + "grad_norm": 28.526577083748773, + "learning_rate": 6.94508094058891e-07, + "loss": 0.4512, + "step": 16983 + }, + { + "epoch": 2.65, + "grad_norm": 17.720721383234114, + "learning_rate": 6.938907228857095e-07, + "loss": 0.4055, + "step": 16984 + }, + { + "epoch": 2.65, + "grad_norm": 17.930534144588915, + "learning_rate": 6.932736163728327e-07, + "loss": 0.4427, + "step": 16985 + }, + { + "epoch": 2.65, + "grad_norm": 15.495941110873215, + "learning_rate": 6.926567745378076e-07, + "loss": 0.3663, + "step": 16986 + }, + { + "epoch": 2.65, + "grad_norm": 32.750840610351986, + "learning_rate": 6.920401973981794e-07, + "loss": 0.4481, + "step": 16987 + }, + { + "epoch": 2.65, + "grad_norm": 21.216017385605664, + "learning_rate": 6.914238849714849e-07, + "loss": 0.4434, + "step": 16988 + }, + { + "epoch": 2.65, + "grad_norm": 20.142577046394596, + "learning_rate": 6.90807837275248e-07, + "loss": 0.4575, + "step": 16989 + }, + { + "epoch": 2.65, + "grad_norm": 21.24530196288164, + "learning_rate": 6.901920543269925e-07, + "loss": 0.4538, + "step": 16990 + }, + { + "epoch": 2.65, + "grad_norm": 16.196424638366462, + "learning_rate": 6.895765361442264e-07, + "loss": 0.4212, + "step": 16991 + }, + { + "epoch": 2.65, + "grad_norm": 18.46682812361794, + "learning_rate": 6.889612827444592e-07, + "loss": 0.4293, + "step": 16992 + }, + { + "epoch": 2.65, + "grad_norm": 20.018479384910545, + "learning_rate": 6.883462941451846e-07, + "loss": 0.4197, + "step": 16993 + }, + { + "epoch": 2.65, + "grad_norm": 18.42966825185873, + "learning_rate": 6.877315703638943e-07, + "loss": 0.4504, + "step": 16994 + }, + { + "epoch": 2.65, + "grad_norm": 23.598967359315626, + "learning_rate": 6.871171114180719e-07, + "loss": 0.4458, + "step": 16995 + }, + { + "epoch": 2.65, + "grad_norm": 13.857768877455179, + "learning_rate": 6.865029173251914e-07, + "loss": 0.4249, + "step": 16996 + }, + { + "epoch": 2.65, + "grad_norm": 22.479247635223278, + "learning_rate": 6.858889881027187e-07, + "loss": 0.4384, + "step": 16997 + }, + { + "epoch": 2.66, + "grad_norm": 28.270730075820964, + "learning_rate": 6.852753237681131e-07, + "loss": 0.4507, + "step": 16998 + }, + { + "epoch": 2.66, + "grad_norm": 31.32222442805024, + "learning_rate": 6.846619243388297e-07, + "loss": 0.4584, + "step": 16999 + }, + { + "epoch": 2.66, + "grad_norm": 24.638024602031646, + "learning_rate": 6.840487898323122e-07, + "loss": 0.4003, + "step": 17000 + }, + { + "epoch": 2.66, + "grad_norm": 17.438656737815304, + "learning_rate": 6.83435920265999e-07, + "loss": 0.4725, + "step": 17001 + }, + { + "epoch": 2.66, + "grad_norm": 15.78180766489473, + "learning_rate": 6.82823315657315e-07, + "loss": 0.4358, + "step": 17002 + }, + { + "epoch": 2.66, + "grad_norm": 27.469788973800213, + "learning_rate": 6.822109760236861e-07, + "loss": 0.4364, + "step": 17003 + }, + { + "epoch": 2.66, + "grad_norm": 19.724507139790685, + "learning_rate": 6.815989013825285e-07, + "loss": 0.3935, + "step": 17004 + }, + { + "epoch": 2.66, + "grad_norm": 19.300029688664686, + "learning_rate": 6.809870917512462e-07, + "loss": 0.4098, + "step": 17005 + }, + { + "epoch": 2.66, + "grad_norm": 21.41557284557138, + "learning_rate": 6.803755471472384e-07, + "loss": 0.4639, + "step": 17006 + }, + { + "epoch": 2.66, + "grad_norm": 19.21578261332035, + "learning_rate": 6.79764267587898e-07, + "loss": 0.4053, + "step": 17007 + }, + { + "epoch": 2.66, + "grad_norm": 19.20306508758734, + "learning_rate": 6.791532530906098e-07, + "loss": 0.4261, + "step": 17008 + }, + { + "epoch": 2.66, + "grad_norm": 18.602186768497322, + "learning_rate": 6.785425036727522e-07, + "loss": 0.401, + "step": 17009 + }, + { + "epoch": 2.66, + "grad_norm": 19.02387156686479, + "learning_rate": 6.779320193516925e-07, + "loss": 0.408, + "step": 17010 + }, + { + "epoch": 2.66, + "grad_norm": 19.59933059258428, + "learning_rate": 6.773218001447923e-07, + "loss": 0.4902, + "step": 17011 + }, + { + "epoch": 2.66, + "grad_norm": 21.781701508115393, + "learning_rate": 6.767118460694056e-07, + "loss": 0.4027, + "step": 17012 + }, + { + "epoch": 2.66, + "grad_norm": 15.55440458234907, + "learning_rate": 6.761021571428816e-07, + "loss": 0.4106, + "step": 17013 + }, + { + "epoch": 2.66, + "grad_norm": 39.882061410794, + "learning_rate": 6.75492733382559e-07, + "loss": 0.4822, + "step": 17014 + }, + { + "epoch": 2.66, + "grad_norm": 31.0322179436863, + "learning_rate": 6.748835748057658e-07, + "loss": 0.4131, + "step": 17015 + }, + { + "epoch": 2.66, + "grad_norm": 18.6244217059585, + "learning_rate": 6.742746814298285e-07, + "loss": 0.4519, + "step": 17016 + }, + { + "epoch": 2.66, + "grad_norm": 33.92483251954795, + "learning_rate": 6.736660532720662e-07, + "loss": 0.4562, + "step": 17017 + }, + { + "epoch": 2.66, + "grad_norm": 29.13638939002006, + "learning_rate": 6.730576903497832e-07, + "loss": 0.4267, + "step": 17018 + }, + { + "epoch": 2.66, + "grad_norm": 20.769779602741252, + "learning_rate": 6.724495926802854e-07, + "loss": 0.4623, + "step": 17019 + }, + { + "epoch": 2.66, + "grad_norm": 18.370774772676448, + "learning_rate": 6.718417602808636e-07, + "loss": 0.4245, + "step": 17020 + }, + { + "epoch": 2.66, + "grad_norm": 18.943558489430227, + "learning_rate": 6.71234193168806e-07, + "loss": 0.3762, + "step": 17021 + }, + { + "epoch": 2.66, + "grad_norm": 20.346078157515993, + "learning_rate": 6.7062689136139e-07, + "loss": 0.4377, + "step": 17022 + }, + { + "epoch": 2.66, + "grad_norm": 20.105330236805308, + "learning_rate": 6.700198548758874e-07, + "loss": 0.3829, + "step": 17023 + }, + { + "epoch": 2.66, + "grad_norm": 24.84923823693445, + "learning_rate": 6.694130837295632e-07, + "loss": 0.4291, + "step": 17024 + }, + { + "epoch": 2.66, + "grad_norm": 27.275788335089313, + "learning_rate": 6.688065779396713e-07, + "loss": 0.485, + "step": 17025 + }, + { + "epoch": 2.66, + "grad_norm": 21.590290079460623, + "learning_rate": 6.682003375234636e-07, + "loss": 0.4434, + "step": 17026 + }, + { + "epoch": 2.66, + "grad_norm": 19.593048153860142, + "learning_rate": 6.675943624981785e-07, + "loss": 0.4093, + "step": 17027 + }, + { + "epoch": 2.66, + "grad_norm": 18.285089568057483, + "learning_rate": 6.669886528810498e-07, + "loss": 0.3994, + "step": 17028 + }, + { + "epoch": 2.66, + "grad_norm": 16.830597707345294, + "learning_rate": 6.663832086893052e-07, + "loss": 0.4229, + "step": 17029 + }, + { + "epoch": 2.66, + "grad_norm": 32.45391485311872, + "learning_rate": 6.657780299401628e-07, + "loss": 0.4441, + "step": 17030 + }, + { + "epoch": 2.66, + "grad_norm": 17.40082117049648, + "learning_rate": 6.651731166508313e-07, + "loss": 0.4093, + "step": 17031 + }, + { + "epoch": 2.66, + "grad_norm": 28.322825016510862, + "learning_rate": 6.645684688385156e-07, + "loss": 0.3998, + "step": 17032 + }, + { + "epoch": 2.66, + "grad_norm": 19.069347035224137, + "learning_rate": 6.639640865204111e-07, + "loss": 0.3896, + "step": 17033 + }, + { + "epoch": 2.66, + "grad_norm": 25.01354521374338, + "learning_rate": 6.633599697137105e-07, + "loss": 0.4234, + "step": 17034 + }, + { + "epoch": 2.66, + "grad_norm": 18.49225597143289, + "learning_rate": 6.627561184355869e-07, + "loss": 0.4791, + "step": 17035 + }, + { + "epoch": 2.66, + "grad_norm": 22.835923343331707, + "learning_rate": 6.621525327032174e-07, + "loss": 0.3775, + "step": 17036 + }, + { + "epoch": 2.66, + "grad_norm": 21.424137567780104, + "learning_rate": 6.615492125337663e-07, + "loss": 0.4314, + "step": 17037 + }, + { + "epoch": 2.66, + "grad_norm": 17.96515394052094, + "learning_rate": 6.609461579443954e-07, + "loss": 0.4201, + "step": 17038 + }, + { + "epoch": 2.66, + "grad_norm": 25.84355037628256, + "learning_rate": 6.603433689522531e-07, + "loss": 0.4409, + "step": 17039 + }, + { + "epoch": 2.66, + "grad_norm": 29.039379297744702, + "learning_rate": 6.597408455744792e-07, + "loss": 0.4797, + "step": 17040 + }, + { + "epoch": 2.66, + "grad_norm": 21.028265306948867, + "learning_rate": 6.59138587828212e-07, + "loss": 0.5161, + "step": 17041 + }, + { + "epoch": 2.66, + "grad_norm": 29.6197833989168, + "learning_rate": 6.585365957305812e-07, + "loss": 0.489, + "step": 17042 + }, + { + "epoch": 2.66, + "grad_norm": 21.549998363375323, + "learning_rate": 6.57934869298703e-07, + "loss": 0.4531, + "step": 17043 + }, + { + "epoch": 2.66, + "grad_norm": 22.46886577563109, + "learning_rate": 6.573334085496941e-07, + "loss": 0.4155, + "step": 17044 + }, + { + "epoch": 2.66, + "grad_norm": 19.211848286248678, + "learning_rate": 6.567322135006559e-07, + "loss": 0.4408, + "step": 17045 + }, + { + "epoch": 2.66, + "grad_norm": 18.880414287276913, + "learning_rate": 6.561312841686895e-07, + "loss": 0.4522, + "step": 17046 + }, + { + "epoch": 2.66, + "grad_norm": 13.521092950427791, + "learning_rate": 6.555306205708812e-07, + "loss": 0.4371, + "step": 17047 + }, + { + "epoch": 2.66, + "grad_norm": 22.563371343641823, + "learning_rate": 6.54930222724317e-07, + "loss": 0.4079, + "step": 17048 + }, + { + "epoch": 2.66, + "grad_norm": 22.989714800296422, + "learning_rate": 6.543300906460714e-07, + "loss": 0.4755, + "step": 17049 + }, + { + "epoch": 2.66, + "grad_norm": 27.088749978304143, + "learning_rate": 6.537302243532095e-07, + "loss": 0.4328, + "step": 17050 + }, + { + "epoch": 2.66, + "grad_norm": 23.439463036027046, + "learning_rate": 6.531306238627932e-07, + "loss": 0.4756, + "step": 17051 + }, + { + "epoch": 2.66, + "grad_norm": 27.9192020244739, + "learning_rate": 6.525312891918745e-07, + "loss": 0.4623, + "step": 17052 + }, + { + "epoch": 2.66, + "grad_norm": 25.48685072819762, + "learning_rate": 6.519322203574962e-07, + "loss": 0.4263, + "step": 17053 + }, + { + "epoch": 2.66, + "grad_norm": 22.352761920106488, + "learning_rate": 6.513334173766994e-07, + "loss": 0.4176, + "step": 17054 + }, + { + "epoch": 2.66, + "grad_norm": 15.283399535065746, + "learning_rate": 6.507348802665115e-07, + "loss": 0.4183, + "step": 17055 + }, + { + "epoch": 2.66, + "grad_norm": 20.49803511140803, + "learning_rate": 6.501366090439521e-07, + "loss": 0.3975, + "step": 17056 + }, + { + "epoch": 2.66, + "grad_norm": 19.426900729752138, + "learning_rate": 6.495386037260387e-07, + "loss": 0.3751, + "step": 17057 + }, + { + "epoch": 2.66, + "grad_norm": 19.43626412452176, + "learning_rate": 6.489408643297778e-07, + "loss": 0.4494, + "step": 17058 + }, + { + "epoch": 2.66, + "grad_norm": 22.855540566078112, + "learning_rate": 6.483433908721715e-07, + "loss": 0.4005, + "step": 17059 + }, + { + "epoch": 2.66, + "grad_norm": 17.73181552104461, + "learning_rate": 6.477461833702047e-07, + "loss": 0.4296, + "step": 17060 + }, + { + "epoch": 2.66, + "grad_norm": 20.51348699700499, + "learning_rate": 6.471492418408665e-07, + "loss": 0.3751, + "step": 17061 + }, + { + "epoch": 2.67, + "grad_norm": 21.553816710355985, + "learning_rate": 6.465525663011318e-07, + "loss": 0.4578, + "step": 17062 + }, + { + "epoch": 2.67, + "grad_norm": 21.542725772818944, + "learning_rate": 6.459561567679728e-07, + "loss": 0.4782, + "step": 17063 + }, + { + "epoch": 2.67, + "grad_norm": 29.15203652187422, + "learning_rate": 6.453600132583482e-07, + "loss": 0.5064, + "step": 17064 + }, + { + "epoch": 2.67, + "grad_norm": 23.260137969017435, + "learning_rate": 6.44764135789211e-07, + "loss": 0.4755, + "step": 17065 + }, + { + "epoch": 2.67, + "grad_norm": 18.9456889478899, + "learning_rate": 6.441685243775097e-07, + "loss": 0.4129, + "step": 17066 + }, + { + "epoch": 2.67, + "grad_norm": 27.729086738170775, + "learning_rate": 6.435731790401833e-07, + "loss": 0.5379, + "step": 17067 + }, + { + "epoch": 2.67, + "grad_norm": 17.976227029806033, + "learning_rate": 6.429780997941626e-07, + "loss": 0.447, + "step": 17068 + }, + { + "epoch": 2.67, + "grad_norm": 22.96528416714328, + "learning_rate": 6.423832866563696e-07, + "loss": 0.4098, + "step": 17069 + }, + { + "epoch": 2.67, + "grad_norm": 30.1824875218266, + "learning_rate": 6.417887396437217e-07, + "loss": 0.4455, + "step": 17070 + }, + { + "epoch": 2.67, + "grad_norm": 25.392324768098185, + "learning_rate": 6.411944587731279e-07, + "loss": 0.3924, + "step": 17071 + }, + { + "epoch": 2.67, + "grad_norm": 19.80517515827337, + "learning_rate": 6.406004440614911e-07, + "loss": 0.409, + "step": 17072 + }, + { + "epoch": 2.67, + "grad_norm": 21.29175992762999, + "learning_rate": 6.400066955257023e-07, + "loss": 0.4157, + "step": 17073 + }, + { + "epoch": 2.67, + "grad_norm": 15.612353141023707, + "learning_rate": 6.394132131826447e-07, + "loss": 0.4413, + "step": 17074 + }, + { + "epoch": 2.67, + "grad_norm": 24.05933694114306, + "learning_rate": 6.388199970492015e-07, + "loss": 0.4136, + "step": 17075 + }, + { + "epoch": 2.67, + "grad_norm": 30.137079820429193, + "learning_rate": 6.382270471422413e-07, + "loss": 0.4603, + "step": 17076 + }, + { + "epoch": 2.67, + "grad_norm": 26.955829232600486, + "learning_rate": 6.376343634786264e-07, + "loss": 0.4748, + "step": 17077 + }, + { + "epoch": 2.67, + "grad_norm": 34.27518691882161, + "learning_rate": 6.370419460752153e-07, + "loss": 0.4157, + "step": 17078 + }, + { + "epoch": 2.67, + "grad_norm": 27.31960892582676, + "learning_rate": 6.364497949488524e-07, + "loss": 0.4197, + "step": 17079 + }, + { + "epoch": 2.67, + "grad_norm": 25.238343928804962, + "learning_rate": 6.35857910116382e-07, + "loss": 0.4026, + "step": 17080 + }, + { + "epoch": 2.67, + "grad_norm": 21.92394860997047, + "learning_rate": 6.352662915946317e-07, + "loss": 0.3977, + "step": 17081 + }, + { + "epoch": 2.67, + "grad_norm": 35.61218212490607, + "learning_rate": 6.346749394004314e-07, + "loss": 0.445, + "step": 17082 + }, + { + "epoch": 2.67, + "grad_norm": 19.427349684647595, + "learning_rate": 6.340838535505978e-07, + "loss": 0.3971, + "step": 17083 + }, + { + "epoch": 2.67, + "grad_norm": 17.6933223110442, + "learning_rate": 6.334930340619405e-07, + "loss": 0.4132, + "step": 17084 + }, + { + "epoch": 2.67, + "grad_norm": 19.313131267683865, + "learning_rate": 6.329024809512607e-07, + "loss": 0.3959, + "step": 17085 + }, + { + "epoch": 2.67, + "grad_norm": 31.41987764705529, + "learning_rate": 6.323121942353549e-07, + "loss": 0.469, + "step": 17086 + }, + { + "epoch": 2.67, + "grad_norm": 20.329605457661362, + "learning_rate": 6.317221739310109e-07, + "loss": 0.4416, + "step": 17087 + }, + { + "epoch": 2.67, + "grad_norm": 30.76519342568286, + "learning_rate": 6.311324200550084e-07, + "loss": 0.3996, + "step": 17088 + }, + { + "epoch": 2.67, + "grad_norm": 19.547293628811232, + "learning_rate": 6.305429326241208e-07, + "loss": 0.4028, + "step": 17089 + }, + { + "epoch": 2.67, + "grad_norm": 21.297110116154766, + "learning_rate": 6.29953711655108e-07, + "loss": 0.3779, + "step": 17090 + }, + { + "epoch": 2.67, + "grad_norm": 19.02544492277749, + "learning_rate": 6.29364757164731e-07, + "loss": 0.4378, + "step": 17091 + }, + { + "epoch": 2.67, + "grad_norm": 31.638128218862505, + "learning_rate": 6.287760691697409e-07, + "loss": 0.3771, + "step": 17092 + }, + { + "epoch": 2.67, + "grad_norm": 28.34857753829879, + "learning_rate": 6.281876476868764e-07, + "loss": 0.4398, + "step": 17093 + }, + { + "epoch": 2.67, + "grad_norm": 21.48143995246555, + "learning_rate": 6.27599492732871e-07, + "loss": 0.3982, + "step": 17094 + }, + { + "epoch": 2.67, + "grad_norm": 34.10834487900786, + "learning_rate": 6.270116043244545e-07, + "loss": 0.4615, + "step": 17095 + }, + { + "epoch": 2.67, + "grad_norm": 20.37226666944277, + "learning_rate": 6.264239824783447e-07, + "loss": 0.4009, + "step": 17096 + }, + { + "epoch": 2.67, + "grad_norm": 18.788523795163485, + "learning_rate": 6.258366272112537e-07, + "loss": 0.4299, + "step": 17097 + }, + { + "epoch": 2.67, + "grad_norm": 34.367802364904556, + "learning_rate": 6.252495385398871e-07, + "loss": 0.4405, + "step": 17098 + }, + { + "epoch": 2.67, + "grad_norm": 19.54238221033468, + "learning_rate": 6.246627164809371e-07, + "loss": 0.4143, + "step": 17099 + }, + { + "epoch": 2.67, + "grad_norm": 14.206419770341999, + "learning_rate": 6.240761610510948e-07, + "loss": 0.4049, + "step": 17100 + }, + { + "epoch": 2.67, + "grad_norm": 25.004906530504943, + "learning_rate": 6.234898722670435e-07, + "loss": 0.4295, + "step": 17101 + }, + { + "epoch": 2.67, + "grad_norm": 17.665223184318045, + "learning_rate": 6.229038501454532e-07, + "loss": 0.4223, + "step": 17102 + }, + { + "epoch": 2.67, + "grad_norm": 27.555947482896414, + "learning_rate": 6.223180947029939e-07, + "loss": 0.4118, + "step": 17103 + }, + { + "epoch": 2.67, + "grad_norm": 36.97469000283389, + "learning_rate": 6.217326059563211e-07, + "loss": 0.5406, + "step": 17104 + }, + { + "epoch": 2.67, + "grad_norm": 21.676037536469888, + "learning_rate": 6.211473839220883e-07, + "loss": 0.4801, + "step": 17105 + }, + { + "epoch": 2.67, + "grad_norm": 16.2837532172914, + "learning_rate": 6.205624286169354e-07, + "loss": 0.4184, + "step": 17106 + }, + { + "epoch": 2.67, + "grad_norm": 22.598559802165607, + "learning_rate": 6.199777400575013e-07, + "loss": 0.4292, + "step": 17107 + }, + { + "epoch": 2.67, + "grad_norm": 20.6410971124788, + "learning_rate": 6.193933182604128e-07, + "loss": 0.4302, + "step": 17108 + }, + { + "epoch": 2.67, + "grad_norm": 22.660236184239434, + "learning_rate": 6.188091632422921e-07, + "loss": 0.4313, + "step": 17109 + }, + { + "epoch": 2.67, + "grad_norm": 17.236551590030146, + "learning_rate": 6.182252750197493e-07, + "loss": 0.4422, + "step": 17110 + }, + { + "epoch": 2.67, + "grad_norm": 20.96506350085633, + "learning_rate": 6.17641653609391e-07, + "loss": 0.3998, + "step": 17111 + }, + { + "epoch": 2.67, + "grad_norm": 22.552410013389125, + "learning_rate": 6.170582990278173e-07, + "loss": 0.4117, + "step": 17112 + }, + { + "epoch": 2.67, + "grad_norm": 18.934543313498008, + "learning_rate": 6.16475211291615e-07, + "loss": 0.4282, + "step": 17113 + }, + { + "epoch": 2.67, + "grad_norm": 15.85941118320791, + "learning_rate": 6.158923904173709e-07, + "loss": 0.4317, + "step": 17114 + }, + { + "epoch": 2.67, + "grad_norm": 20.408329756931018, + "learning_rate": 6.153098364216548e-07, + "loss": 0.4434, + "step": 17115 + }, + { + "epoch": 2.67, + "grad_norm": 30.182374382344552, + "learning_rate": 6.147275493210381e-07, + "loss": 0.4152, + "step": 17116 + }, + { + "epoch": 2.67, + "grad_norm": 23.10646288741191, + "learning_rate": 6.141455291320808e-07, + "loss": 0.4617, + "step": 17117 + }, + { + "epoch": 2.67, + "grad_norm": 15.288819508480705, + "learning_rate": 6.135637758713342e-07, + "loss": 0.3848, + "step": 17118 + }, + { + "epoch": 2.67, + "grad_norm": 27.006153999233906, + "learning_rate": 6.129822895553417e-07, + "loss": 0.4295, + "step": 17119 + }, + { + "epoch": 2.67, + "grad_norm": 15.434662383401378, + "learning_rate": 6.124010702006411e-07, + "loss": 0.3879, + "step": 17120 + }, + { + "epoch": 2.67, + "grad_norm": 16.224438716109823, + "learning_rate": 6.118201178237626e-07, + "loss": 0.4242, + "step": 17121 + }, + { + "epoch": 2.67, + "grad_norm": 14.356076925460892, + "learning_rate": 6.112394324412308e-07, + "loss": 0.4398, + "step": 17122 + }, + { + "epoch": 2.67, + "grad_norm": 28.671184323590666, + "learning_rate": 6.106590140695545e-07, + "loss": 0.4515, + "step": 17123 + }, + { + "epoch": 2.67, + "grad_norm": 30.10050385512265, + "learning_rate": 6.100788627252441e-07, + "loss": 0.443, + "step": 17124 + }, + { + "epoch": 2.67, + "grad_norm": 20.642741107107778, + "learning_rate": 6.094989784247973e-07, + "loss": 0.4382, + "step": 17125 + }, + { + "epoch": 2.68, + "grad_norm": 24.19994401930214, + "learning_rate": 6.089193611847066e-07, + "loss": 0.405, + "step": 17126 + }, + { + "epoch": 2.68, + "grad_norm": 24.401677884194676, + "learning_rate": 6.083400110214577e-07, + "loss": 0.4165, + "step": 17127 + }, + { + "epoch": 2.68, + "grad_norm": 22.75780475667396, + "learning_rate": 6.077609279515217e-07, + "loss": 0.4232, + "step": 17128 + }, + { + "epoch": 2.68, + "grad_norm": 25.733366327489033, + "learning_rate": 6.071821119913713e-07, + "loss": 0.4643, + "step": 17129 + }, + { + "epoch": 2.68, + "grad_norm": 18.175979693518443, + "learning_rate": 6.066035631574685e-07, + "loss": 0.4021, + "step": 17130 + }, + { + "epoch": 2.68, + "grad_norm": 26.234837500134557, + "learning_rate": 6.060252814662637e-07, + "loss": 0.404, + "step": 17131 + }, + { + "epoch": 2.68, + "grad_norm": 24.97044382449291, + "learning_rate": 6.05447266934206e-07, + "loss": 0.4089, + "step": 17132 + }, + { + "epoch": 2.68, + "grad_norm": 23.005256356697373, + "learning_rate": 6.04869519577731e-07, + "loss": 0.4979, + "step": 17133 + }, + { + "epoch": 2.68, + "grad_norm": 32.22426743259202, + "learning_rate": 6.042920394132712e-07, + "loss": 0.4754, + "step": 17134 + }, + { + "epoch": 2.68, + "grad_norm": 31.007453232056136, + "learning_rate": 6.037148264572512e-07, + "loss": 0.4417, + "step": 17135 + }, + { + "epoch": 2.68, + "grad_norm": 20.632163141931105, + "learning_rate": 6.031378807260823e-07, + "loss": 0.4203, + "step": 17136 + }, + { + "epoch": 2.68, + "grad_norm": 15.153728765022663, + "learning_rate": 6.025612022361771e-07, + "loss": 0.3838, + "step": 17137 + }, + { + "epoch": 2.68, + "grad_norm": 22.110892726310215, + "learning_rate": 6.019847910039334e-07, + "loss": 0.4138, + "step": 17138 + }, + { + "epoch": 2.68, + "grad_norm": 24.509416667543377, + "learning_rate": 6.014086470457448e-07, + "loss": 0.3619, + "step": 17139 + }, + { + "epoch": 2.68, + "grad_norm": 22.1188525284285, + "learning_rate": 6.008327703779948e-07, + "loss": 0.5014, + "step": 17140 + }, + { + "epoch": 2.68, + "grad_norm": 27.94531645440214, + "learning_rate": 6.002571610170627e-07, + "loss": 0.5094, + "step": 17141 + }, + { + "epoch": 2.68, + "grad_norm": 28.45396947262181, + "learning_rate": 5.996818189793207e-07, + "loss": 0.4211, + "step": 17142 + }, + { + "epoch": 2.68, + "grad_norm": 24.533463448691474, + "learning_rate": 5.991067442811272e-07, + "loss": 0.4518, + "step": 17143 + }, + { + "epoch": 2.68, + "grad_norm": 19.161471037321725, + "learning_rate": 5.985319369388376e-07, + "loss": 0.4032, + "step": 17144 + }, + { + "epoch": 2.68, + "grad_norm": 12.837735811080933, + "learning_rate": 5.979573969688001e-07, + "loss": 0.3626, + "step": 17145 + }, + { + "epoch": 2.68, + "grad_norm": 21.65626426244929, + "learning_rate": 5.973831243873551e-07, + "loss": 0.4867, + "step": 17146 + }, + { + "epoch": 2.68, + "grad_norm": 15.804789619662726, + "learning_rate": 5.96809119210835e-07, + "loss": 0.3739, + "step": 17147 + }, + { + "epoch": 2.68, + "grad_norm": 25.26088088300529, + "learning_rate": 5.96235381455561e-07, + "loss": 0.4652, + "step": 17148 + }, + { + "epoch": 2.68, + "grad_norm": 32.06862106091011, + "learning_rate": 5.956619111378514e-07, + "loss": 0.468, + "step": 17149 + }, + { + "epoch": 2.68, + "grad_norm": 18.802936646778313, + "learning_rate": 5.950887082740153e-07, + "loss": 0.4661, + "step": 17150 + }, + { + "epoch": 2.68, + "grad_norm": 31.468231054836213, + "learning_rate": 5.945157728803563e-07, + "loss": 0.4134, + "step": 17151 + }, + { + "epoch": 2.68, + "grad_norm": 31.13163359167715, + "learning_rate": 5.93943104973167e-07, + "loss": 0.5141, + "step": 17152 + }, + { + "epoch": 2.68, + "grad_norm": 17.594273805020006, + "learning_rate": 5.93370704568732e-07, + "loss": 0.4046, + "step": 17153 + }, + { + "epoch": 2.68, + "grad_norm": 22.04453531877571, + "learning_rate": 5.927985716833317e-07, + "loss": 0.4324, + "step": 17154 + }, + { + "epoch": 2.68, + "grad_norm": 29.099627139363037, + "learning_rate": 5.922267063332376e-07, + "loss": 0.454, + "step": 17155 + }, + { + "epoch": 2.68, + "grad_norm": 18.01097826282531, + "learning_rate": 5.916551085347134e-07, + "loss": 0.4073, + "step": 17156 + }, + { + "epoch": 2.68, + "grad_norm": 19.356552635662474, + "learning_rate": 5.910837783040113e-07, + "loss": 0.4202, + "step": 17157 + }, + { + "epoch": 2.68, + "grad_norm": 26.287884465615402, + "learning_rate": 5.905127156573842e-07, + "loss": 0.381, + "step": 17158 + }, + { + "epoch": 2.68, + "grad_norm": 22.636182347838588, + "learning_rate": 5.899419206110702e-07, + "loss": 0.4357, + "step": 17159 + }, + { + "epoch": 2.68, + "grad_norm": 19.067440509782113, + "learning_rate": 5.89371393181305e-07, + "loss": 0.4253, + "step": 17160 + }, + { + "epoch": 2.68, + "grad_norm": 22.297473449668768, + "learning_rate": 5.888011333843113e-07, + "loss": 0.4758, + "step": 17161 + }, + { + "epoch": 2.68, + "grad_norm": 24.258932992607946, + "learning_rate": 5.882311412363073e-07, + "loss": 0.4643, + "step": 17162 + }, + { + "epoch": 2.68, + "grad_norm": 18.8381388852069, + "learning_rate": 5.876614167535044e-07, + "loss": 0.504, + "step": 17163 + }, + { + "epoch": 2.68, + "grad_norm": 16.209615633281366, + "learning_rate": 5.870919599521052e-07, + "loss": 0.4723, + "step": 17164 + }, + { + "epoch": 2.68, + "grad_norm": 21.734926843390635, + "learning_rate": 5.865227708483034e-07, + "loss": 0.4168, + "step": 17165 + }, + { + "epoch": 2.68, + "grad_norm": 20.323562420114825, + "learning_rate": 5.859538494582895e-07, + "loss": 0.4534, + "step": 17166 + }, + { + "epoch": 2.68, + "grad_norm": 26.53938933118751, + "learning_rate": 5.853851957982381e-07, + "loss": 0.4255, + "step": 17167 + }, + { + "epoch": 2.68, + "grad_norm": 19.755254837116833, + "learning_rate": 5.848168098843265e-07, + "loss": 0.3997, + "step": 17168 + }, + { + "epoch": 2.68, + "grad_norm": 19.590963999854182, + "learning_rate": 5.842486917327162e-07, + "loss": 0.4001, + "step": 17169 + }, + { + "epoch": 2.68, + "grad_norm": 27.10782022277645, + "learning_rate": 5.836808413595641e-07, + "loss": 0.4644, + "step": 17170 + }, + { + "epoch": 2.68, + "grad_norm": 29.15008667856092, + "learning_rate": 5.831132587810228e-07, + "loss": 0.4219, + "step": 17171 + }, + { + "epoch": 2.68, + "grad_norm": 27.068763426568403, + "learning_rate": 5.825459440132308e-07, + "loss": 0.4718, + "step": 17172 + }, + { + "epoch": 2.68, + "grad_norm": 20.258163998574254, + "learning_rate": 5.819788970723217e-07, + "loss": 0.4259, + "step": 17173 + }, + { + "epoch": 2.68, + "grad_norm": 25.124331967762906, + "learning_rate": 5.814121179744248e-07, + "loss": 0.4675, + "step": 17174 + }, + { + "epoch": 2.68, + "grad_norm": 17.22273095093739, + "learning_rate": 5.80845606735656e-07, + "loss": 0.3965, + "step": 17175 + }, + { + "epoch": 2.68, + "grad_norm": 20.882530648626766, + "learning_rate": 5.802793633721304e-07, + "loss": 0.5005, + "step": 17176 + }, + { + "epoch": 2.68, + "grad_norm": 22.156578297933518, + "learning_rate": 5.797133878999484e-07, + "loss": 0.5105, + "step": 17177 + }, + { + "epoch": 2.68, + "grad_norm": 16.17856966759336, + "learning_rate": 5.791476803352058e-07, + "loss": 0.3771, + "step": 17178 + }, + { + "epoch": 2.68, + "grad_norm": 23.08959148490535, + "learning_rate": 5.785822406939934e-07, + "loss": 0.4779, + "step": 17179 + }, + { + "epoch": 2.68, + "grad_norm": 33.8620337184854, + "learning_rate": 5.780170689923902e-07, + "loss": 0.477, + "step": 17180 + }, + { + "epoch": 2.68, + "grad_norm": 21.712296244686986, + "learning_rate": 5.774521652464715e-07, + "loss": 0.4023, + "step": 17181 + }, + { + "epoch": 2.68, + "grad_norm": 16.410673496091725, + "learning_rate": 5.768875294722987e-07, + "loss": 0.4226, + "step": 17182 + }, + { + "epoch": 2.68, + "grad_norm": 30.004939134636484, + "learning_rate": 5.763231616859333e-07, + "loss": 0.4388, + "step": 17183 + }, + { + "epoch": 2.68, + "grad_norm": 20.166109351432183, + "learning_rate": 5.757590619034236e-07, + "loss": 0.4416, + "step": 17184 + }, + { + "epoch": 2.68, + "grad_norm": 19.43622743482468, + "learning_rate": 5.751952301408148e-07, + "loss": 0.4666, + "step": 17185 + }, + { + "epoch": 2.68, + "grad_norm": 12.429942193469923, + "learning_rate": 5.746316664141394e-07, + "loss": 0.3997, + "step": 17186 + }, + { + "epoch": 2.68, + "grad_norm": 16.667216781736396, + "learning_rate": 5.740683707394256e-07, + "loss": 0.3777, + "step": 17187 + }, + { + "epoch": 2.68, + "grad_norm": 19.43765278814437, + "learning_rate": 5.735053431326931e-07, + "loss": 0.4392, + "step": 17188 + }, + { + "epoch": 2.68, + "grad_norm": 28.492888375320824, + "learning_rate": 5.729425836099556e-07, + "loss": 0.4162, + "step": 17189 + }, + { + "epoch": 2.69, + "grad_norm": 22.973017063949513, + "learning_rate": 5.723800921872147e-07, + "loss": 0.3937, + "step": 17190 + }, + { + "epoch": 2.69, + "grad_norm": 21.098068979821022, + "learning_rate": 5.718178688804699e-07, + "loss": 0.4462, + "step": 17191 + }, + { + "epoch": 2.69, + "grad_norm": 21.438251553087387, + "learning_rate": 5.712559137057094e-07, + "loss": 0.4125, + "step": 17192 + }, + { + "epoch": 2.69, + "grad_norm": 21.594294559109997, + "learning_rate": 5.706942266789162e-07, + "loss": 0.4147, + "step": 17193 + }, + { + "epoch": 2.69, + "grad_norm": 26.007051574218718, + "learning_rate": 5.701328078160606e-07, + "loss": 0.4217, + "step": 17194 + }, + { + "epoch": 2.69, + "grad_norm": 28.82472530363959, + "learning_rate": 5.695716571331134e-07, + "loss": 0.4465, + "step": 17195 + }, + { + "epoch": 2.69, + "grad_norm": 21.328374989759716, + "learning_rate": 5.690107746460316e-07, + "loss": 0.4311, + "step": 17196 + }, + { + "epoch": 2.69, + "grad_norm": 28.769635109024442, + "learning_rate": 5.684501603707671e-07, + "loss": 0.4654, + "step": 17197 + }, + { + "epoch": 2.69, + "grad_norm": 33.43446302437116, + "learning_rate": 5.678898143232614e-07, + "loss": 0.4013, + "step": 17198 + }, + { + "epoch": 2.69, + "grad_norm": 18.164500736159326, + "learning_rate": 5.673297365194508e-07, + "loss": 0.4557, + "step": 17199 + }, + { + "epoch": 2.69, + "grad_norm": 23.908793173259525, + "learning_rate": 5.667699269752658e-07, + "loss": 0.5235, + "step": 17200 + }, + { + "epoch": 2.69, + "grad_norm": 17.83536273800507, + "learning_rate": 5.66210385706627e-07, + "loss": 0.3905, + "step": 17201 + }, + { + "epoch": 2.69, + "grad_norm": 24.226073741441912, + "learning_rate": 5.65651112729445e-07, + "loss": 0.4059, + "step": 17202 + }, + { + "epoch": 2.69, + "grad_norm": 23.18215302873196, + "learning_rate": 5.650921080596261e-07, + "loss": 0.4531, + "step": 17203 + }, + { + "epoch": 2.69, + "grad_norm": 26.992592838627985, + "learning_rate": 5.645333717130685e-07, + "loss": 0.3939, + "step": 17204 + }, + { + "epoch": 2.69, + "grad_norm": 24.218793059280017, + "learning_rate": 5.63974903705663e-07, + "loss": 0.4742, + "step": 17205 + }, + { + "epoch": 2.69, + "grad_norm": 16.178349945725383, + "learning_rate": 5.634167040532922e-07, + "loss": 0.4189, + "step": 17206 + }, + { + "epoch": 2.69, + "grad_norm": 38.621292027323015, + "learning_rate": 5.628587727718282e-07, + "loss": 0.4737, + "step": 17207 + }, + { + "epoch": 2.69, + "grad_norm": 24.099672147540957, + "learning_rate": 5.623011098771391e-07, + "loss": 0.4318, + "step": 17208 + }, + { + "epoch": 2.69, + "grad_norm": 22.830996172230464, + "learning_rate": 5.617437153850868e-07, + "loss": 0.4269, + "step": 17209 + }, + { + "epoch": 2.69, + "grad_norm": 25.3327741915449, + "learning_rate": 5.611865893115243e-07, + "loss": 0.4992, + "step": 17210 + }, + { + "epoch": 2.69, + "grad_norm": 17.42511808120721, + "learning_rate": 5.60629731672292e-07, + "loss": 0.4356, + "step": 17211 + }, + { + "epoch": 2.69, + "grad_norm": 30.328067335322576, + "learning_rate": 5.600731424832273e-07, + "loss": 0.4427, + "step": 17212 + }, + { + "epoch": 2.69, + "grad_norm": 13.915881625417073, + "learning_rate": 5.595168217601599e-07, + "loss": 0.4896, + "step": 17213 + }, + { + "epoch": 2.69, + "grad_norm": 19.41446837647619, + "learning_rate": 5.589607695189136e-07, + "loss": 0.401, + "step": 17214 + }, + { + "epoch": 2.69, + "grad_norm": 21.635645349931963, + "learning_rate": 5.584049857752993e-07, + "loss": 0.4354, + "step": 17215 + }, + { + "epoch": 2.69, + "grad_norm": 68.99571866595082, + "learning_rate": 5.578494705451232e-07, + "loss": 0.5034, + "step": 17216 + }, + { + "epoch": 2.69, + "grad_norm": 18.323278459515997, + "learning_rate": 5.572942238441847e-07, + "loss": 0.3747, + "step": 17217 + }, + { + "epoch": 2.69, + "grad_norm": 22.21319699409457, + "learning_rate": 5.567392456882758e-07, + "loss": 0.4258, + "step": 17218 + }, + { + "epoch": 2.69, + "grad_norm": 26.219894182453054, + "learning_rate": 5.561845360931784e-07, + "loss": 0.4587, + "step": 17219 + }, + { + "epoch": 2.69, + "grad_norm": 24.46678305962246, + "learning_rate": 5.556300950746684e-07, + "loss": 0.409, + "step": 17220 + }, + { + "epoch": 2.69, + "grad_norm": 26.733542125577035, + "learning_rate": 5.550759226485126e-07, + "loss": 0.508, + "step": 17221 + }, + { + "epoch": 2.69, + "grad_norm": 12.443719254239705, + "learning_rate": 5.545220188304723e-07, + "loss": 0.3717, + "step": 17222 + }, + { + "epoch": 2.69, + "grad_norm": 19.11651088669281, + "learning_rate": 5.539683836363019e-07, + "loss": 0.4335, + "step": 17223 + }, + { + "epoch": 2.69, + "grad_norm": 21.367746253091465, + "learning_rate": 5.534150170817431e-07, + "loss": 0.3919, + "step": 17224 + }, + { + "epoch": 2.69, + "grad_norm": 23.107033985613825, + "learning_rate": 5.52861919182538e-07, + "loss": 0.5067, + "step": 17225 + }, + { + "epoch": 2.69, + "grad_norm": 25.81298360590927, + "learning_rate": 5.523090899544104e-07, + "loss": 0.3868, + "step": 17226 + }, + { + "epoch": 2.69, + "grad_norm": 22.31718210774328, + "learning_rate": 5.517565294130877e-07, + "loss": 0.5, + "step": 17227 + }, + { + "epoch": 2.69, + "grad_norm": 36.27488346861624, + "learning_rate": 5.51204237574281e-07, + "loss": 0.448, + "step": 17228 + }, + { + "epoch": 2.69, + "grad_norm": 21.9085475282073, + "learning_rate": 5.506522144536975e-07, + "loss": 0.4176, + "step": 17229 + }, + { + "epoch": 2.69, + "grad_norm": 27.731048607476463, + "learning_rate": 5.501004600670401e-07, + "loss": 0.4536, + "step": 17230 + }, + { + "epoch": 2.69, + "grad_norm": 16.953519242620448, + "learning_rate": 5.495489744299965e-07, + "loss": 0.4309, + "step": 17231 + }, + { + "epoch": 2.69, + "grad_norm": 21.110863016909242, + "learning_rate": 5.489977575582506e-07, + "loss": 0.3961, + "step": 17232 + }, + { + "epoch": 2.69, + "grad_norm": 18.042106379043073, + "learning_rate": 5.4844680946748e-07, + "loss": 0.3987, + "step": 17233 + }, + { + "epoch": 2.69, + "grad_norm": 19.635019648119307, + "learning_rate": 5.478961301733531e-07, + "loss": 0.4655, + "step": 17234 + }, + { + "epoch": 2.69, + "grad_norm": 23.573123024646776, + "learning_rate": 5.473457196915332e-07, + "loss": 0.4032, + "step": 17235 + }, + { + "epoch": 2.69, + "grad_norm": 21.71383649078869, + "learning_rate": 5.467955780376688e-07, + "loss": 0.424, + "step": 17236 + }, + { + "epoch": 2.69, + "grad_norm": 32.21400051926023, + "learning_rate": 5.462457052274084e-07, + "loss": 0.4757, + "step": 17237 + }, + { + "epoch": 2.69, + "grad_norm": 19.520777585629546, + "learning_rate": 5.456961012763906e-07, + "loss": 0.4792, + "step": 17238 + }, + { + "epoch": 2.69, + "grad_norm": 19.843145934995647, + "learning_rate": 5.451467662002452e-07, + "loss": 0.4313, + "step": 17239 + }, + { + "epoch": 2.69, + "grad_norm": 30.24670527999096, + "learning_rate": 5.445977000145952e-07, + "loss": 0.4449, + "step": 17240 + }, + { + "epoch": 2.69, + "grad_norm": 20.887309504627083, + "learning_rate": 5.440489027350548e-07, + "loss": 0.4383, + "step": 17241 + }, + { + "epoch": 2.69, + "grad_norm": 26.896255175288672, + "learning_rate": 5.435003743772305e-07, + "loss": 0.4563, + "step": 17242 + }, + { + "epoch": 2.69, + "grad_norm": 15.937445373067547, + "learning_rate": 5.429521149567263e-07, + "loss": 0.4015, + "step": 17243 + }, + { + "epoch": 2.69, + "grad_norm": 17.625888676085836, + "learning_rate": 5.424041244891298e-07, + "loss": 0.397, + "step": 17244 + }, + { + "epoch": 2.69, + "grad_norm": 25.574446228644778, + "learning_rate": 5.418564029900286e-07, + "loss": 0.5172, + "step": 17245 + }, + { + "epoch": 2.69, + "grad_norm": 16.00128933528401, + "learning_rate": 5.413089504749979e-07, + "loss": 0.4048, + "step": 17246 + }, + { + "epoch": 2.69, + "grad_norm": 21.8865913729184, + "learning_rate": 5.407617669596066e-07, + "loss": 0.4687, + "step": 17247 + }, + { + "epoch": 2.69, + "grad_norm": 26.791511789683064, + "learning_rate": 5.402148524594198e-07, + "loss": 0.4571, + "step": 17248 + }, + { + "epoch": 2.69, + "grad_norm": 19.402168720117135, + "learning_rate": 5.396682069899861e-07, + "loss": 0.3675, + "step": 17249 + }, + { + "epoch": 2.69, + "grad_norm": 18.038101778678637, + "learning_rate": 5.391218305668566e-07, + "loss": 0.4146, + "step": 17250 + }, + { + "epoch": 2.69, + "grad_norm": 23.849226415182248, + "learning_rate": 5.385757232055655e-07, + "loss": 0.4134, + "step": 17251 + }, + { + "epoch": 2.69, + "grad_norm": 20.840673754751307, + "learning_rate": 5.380298849216481e-07, + "loss": 0.4323, + "step": 17252 + }, + { + "epoch": 2.69, + "grad_norm": 18.10852311346565, + "learning_rate": 5.374843157306253e-07, + "loss": 0.392, + "step": 17253 + }, + { + "epoch": 2.7, + "grad_norm": 18.352512086178507, + "learning_rate": 5.369390156480126e-07, + "loss": 0.4611, + "step": 17254 + }, + { + "epoch": 2.7, + "grad_norm": 13.725599810451797, + "learning_rate": 5.363939846893184e-07, + "loss": 0.4011, + "step": 17255 + }, + { + "epoch": 2.7, + "grad_norm": 21.855961721961, + "learning_rate": 5.35849222870044e-07, + "loss": 0.465, + "step": 17256 + }, + { + "epoch": 2.7, + "grad_norm": 28.07508804166514, + "learning_rate": 5.353047302056802e-07, + "loss": 0.4646, + "step": 17257 + }, + { + "epoch": 2.7, + "grad_norm": 15.49336396481214, + "learning_rate": 5.347605067117134e-07, + "loss": 0.3989, + "step": 17258 + }, + { + "epoch": 2.7, + "grad_norm": 20.49373117952393, + "learning_rate": 5.342165524036224e-07, + "loss": 0.4778, + "step": 17259 + }, + { + "epoch": 2.7, + "grad_norm": 28.8572480771274, + "learning_rate": 5.33672867296875e-07, + "loss": 0.4625, + "step": 17260 + }, + { + "epoch": 2.7, + "grad_norm": 22.662540146654848, + "learning_rate": 5.331294514069318e-07, + "loss": 0.4573, + "step": 17261 + }, + { + "epoch": 2.7, + "grad_norm": 27.067351887518708, + "learning_rate": 5.325863047492496e-07, + "loss": 0.4378, + "step": 17262 + }, + { + "epoch": 2.7, + "grad_norm": 23.64252606732429, + "learning_rate": 5.320434273392738e-07, + "loss": 0.4271, + "step": 17263 + }, + { + "epoch": 2.7, + "grad_norm": 24.61442315262329, + "learning_rate": 5.315008191924464e-07, + "loss": 0.4566, + "step": 17264 + }, + { + "epoch": 2.7, + "grad_norm": 21.27401069011807, + "learning_rate": 5.309584803241973e-07, + "loss": 0.4084, + "step": 17265 + }, + { + "epoch": 2.7, + "grad_norm": 27.305309391170642, + "learning_rate": 5.304164107499477e-07, + "loss": 0.4578, + "step": 17266 + }, + { + "epoch": 2.7, + "grad_norm": 22.952895964459437, + "learning_rate": 5.29874610485116e-07, + "loss": 0.379, + "step": 17267 + }, + { + "epoch": 2.7, + "grad_norm": 22.262228147157717, + "learning_rate": 5.293330795451112e-07, + "loss": 0.4231, + "step": 17268 + }, + { + "epoch": 2.7, + "grad_norm": 25.623663939559407, + "learning_rate": 5.287918179453344e-07, + "loss": 0.5229, + "step": 17269 + }, + { + "epoch": 2.7, + "grad_norm": 22.013308274250857, + "learning_rate": 5.282508257011764e-07, + "loss": 0.4515, + "step": 17270 + }, + { + "epoch": 2.7, + "grad_norm": 16.646344360743818, + "learning_rate": 5.27710102828024e-07, + "loss": 0.3797, + "step": 17271 + }, + { + "epoch": 2.7, + "grad_norm": 26.91870644510296, + "learning_rate": 5.271696493412548e-07, + "loss": 0.4468, + "step": 17272 + }, + { + "epoch": 2.7, + "grad_norm": 18.81347788521876, + "learning_rate": 5.266294652562409e-07, + "loss": 0.4902, + "step": 17273 + }, + { + "epoch": 2.7, + "grad_norm": 18.685796437551218, + "learning_rate": 5.260895505883423e-07, + "loss": 0.4316, + "step": 17274 + }, + { + "epoch": 2.7, + "grad_norm": 18.414744642470925, + "learning_rate": 5.255499053529145e-07, + "loss": 0.4286, + "step": 17275 + }, + { + "epoch": 2.7, + "grad_norm": 34.59554654223095, + "learning_rate": 5.250105295653052e-07, + "loss": 0.3948, + "step": 17276 + }, + { + "epoch": 2.7, + "grad_norm": 31.880595986161005, + "learning_rate": 5.244714232408544e-07, + "loss": 0.463, + "step": 17277 + }, + { + "epoch": 2.7, + "grad_norm": 19.55392232655942, + "learning_rate": 5.23932586394893e-07, + "loss": 0.4426, + "step": 17278 + }, + { + "epoch": 2.7, + "grad_norm": 13.202779860189478, + "learning_rate": 5.233940190427456e-07, + "loss": 0.4516, + "step": 17279 + }, + { + "epoch": 2.7, + "grad_norm": 20.452537788284626, + "learning_rate": 5.228557211997276e-07, + "loss": 0.3993, + "step": 17280 + }, + { + "epoch": 2.7, + "grad_norm": 28.07642177092423, + "learning_rate": 5.223176928811502e-07, + "loss": 0.4256, + "step": 17281 + }, + { + "epoch": 2.7, + "grad_norm": 13.108226470057977, + "learning_rate": 5.217799341023122e-07, + "loss": 0.3946, + "step": 17282 + }, + { + "epoch": 2.7, + "grad_norm": 19.841880061435862, + "learning_rate": 5.212424448785092e-07, + "loss": 0.4574, + "step": 17283 + }, + { + "epoch": 2.7, + "grad_norm": 27.884641124032992, + "learning_rate": 5.207052252250266e-07, + "loss": 0.4703, + "step": 17284 + }, + { + "epoch": 2.7, + "grad_norm": 18.883343293380833, + "learning_rate": 5.201682751571402e-07, + "loss": 0.4807, + "step": 17285 + }, + { + "epoch": 2.7, + "grad_norm": 18.402956927065283, + "learning_rate": 5.196315946901254e-07, + "loss": 0.4299, + "step": 17286 + }, + { + "epoch": 2.7, + "grad_norm": 19.7772194234793, + "learning_rate": 5.1909518383924e-07, + "loss": 0.4514, + "step": 17287 + }, + { + "epoch": 2.7, + "grad_norm": 4.206078323145508, + "learning_rate": 5.185590426197406e-07, + "loss": 0.4958, + "step": 17288 + }, + { + "epoch": 2.7, + "grad_norm": 24.493932784104928, + "learning_rate": 5.180231710468775e-07, + "loss": 0.4336, + "step": 17289 + }, + { + "epoch": 2.7, + "grad_norm": 20.3163464168877, + "learning_rate": 5.174875691358894e-07, + "loss": 0.4442, + "step": 17290 + }, + { + "epoch": 2.7, + "grad_norm": 19.3136152806508, + "learning_rate": 5.169522369020052e-07, + "loss": 0.4246, + "step": 17291 + }, + { + "epoch": 2.7, + "grad_norm": 23.089451561457448, + "learning_rate": 5.164171743604529e-07, + "loss": 0.5384, + "step": 17292 + }, + { + "epoch": 2.7, + "grad_norm": 21.51344816114946, + "learning_rate": 5.158823815264491e-07, + "loss": 0.445, + "step": 17293 + }, + { + "epoch": 2.7, + "grad_norm": 30.97963196097292, + "learning_rate": 5.153478584152028e-07, + "loss": 0.4123, + "step": 17294 + }, + { + "epoch": 2.7, + "grad_norm": 22.122142329044262, + "learning_rate": 5.148136050419139e-07, + "loss": 0.4176, + "step": 17295 + }, + { + "epoch": 2.7, + "grad_norm": 17.935988362513783, + "learning_rate": 5.14279621421776e-07, + "loss": 0.3834, + "step": 17296 + }, + { + "epoch": 2.7, + "grad_norm": 28.94540360780038, + "learning_rate": 5.13745907569978e-07, + "loss": 0.5192, + "step": 17297 + }, + { + "epoch": 2.7, + "grad_norm": 22.993483268059627, + "learning_rate": 5.132124635016977e-07, + "loss": 0.4088, + "step": 17298 + }, + { + "epoch": 2.7, + "grad_norm": 33.0552618562427, + "learning_rate": 5.126792892321064e-07, + "loss": 0.4924, + "step": 17299 + }, + { + "epoch": 2.7, + "grad_norm": 24.015626259569817, + "learning_rate": 5.121463847763641e-07, + "loss": 0.442, + "step": 17300 + }, + { + "epoch": 2.7, + "grad_norm": 19.004528866612098, + "learning_rate": 5.116137501496288e-07, + "loss": 0.4004, + "step": 17301 + }, + { + "epoch": 2.7, + "grad_norm": 30.554486900496265, + "learning_rate": 5.110813853670482e-07, + "loss": 0.451, + "step": 17302 + }, + { + "epoch": 2.7, + "grad_norm": 15.652322624921526, + "learning_rate": 5.105492904437636e-07, + "loss": 0.4929, + "step": 17303 + }, + { + "epoch": 2.7, + "grad_norm": 20.202941979260043, + "learning_rate": 5.100174653949031e-07, + "loss": 0.4504, + "step": 17304 + }, + { + "epoch": 2.7, + "grad_norm": 22.288963161211036, + "learning_rate": 5.094859102355953e-07, + "loss": 0.3748, + "step": 17305 + }, + { + "epoch": 2.7, + "grad_norm": 20.11259556259227, + "learning_rate": 5.089546249809584e-07, + "loss": 0.4413, + "step": 17306 + }, + { + "epoch": 2.7, + "grad_norm": 39.29029262048367, + "learning_rate": 5.08423609646097e-07, + "loss": 0.4246, + "step": 17307 + }, + { + "epoch": 2.7, + "grad_norm": 16.690272692919102, + "learning_rate": 5.078928642461178e-07, + "loss": 0.4498, + "step": 17308 + }, + { + "epoch": 2.7, + "grad_norm": 21.085286966064466, + "learning_rate": 5.073623887961121e-07, + "loss": 0.4678, + "step": 17309 + }, + { + "epoch": 2.7, + "grad_norm": 20.842060203629657, + "learning_rate": 5.068321833111667e-07, + "loss": 0.4252, + "step": 17310 + }, + { + "epoch": 2.7, + "grad_norm": 21.359827984218143, + "learning_rate": 5.063022478063617e-07, + "loss": 0.3983, + "step": 17311 + }, + { + "epoch": 2.7, + "grad_norm": 24.35831420844723, + "learning_rate": 5.057725822967663e-07, + "loss": 0.4112, + "step": 17312 + }, + { + "epoch": 2.7, + "grad_norm": 18.621102548928857, + "learning_rate": 5.05243186797445e-07, + "loss": 0.4945, + "step": 17313 + }, + { + "epoch": 2.7, + "grad_norm": 19.109947128184082, + "learning_rate": 5.047140613234524e-07, + "loss": 0.3994, + "step": 17314 + }, + { + "epoch": 2.7, + "grad_norm": 19.81475669737337, + "learning_rate": 5.041852058898389e-07, + "loss": 0.4711, + "step": 17315 + }, + { + "epoch": 2.7, + "grad_norm": 22.809144760978814, + "learning_rate": 5.036566205116422e-07, + "loss": 0.4196, + "step": 17316 + }, + { + "epoch": 2.7, + "grad_norm": 28.800104107147042, + "learning_rate": 5.031283052038949e-07, + "loss": 0.4705, + "step": 17317 + }, + { + "epoch": 2.71, + "grad_norm": 21.571628230613605, + "learning_rate": 5.026002599816248e-07, + "loss": 0.4439, + "step": 17318 + }, + { + "epoch": 2.71, + "grad_norm": 13.640442659638463, + "learning_rate": 5.02072484859848e-07, + "loss": 0.4091, + "step": 17319 + }, + { + "epoch": 2.71, + "grad_norm": 23.543029136953702, + "learning_rate": 5.015449798535721e-07, + "loss": 0.4187, + "step": 17320 + }, + { + "epoch": 2.71, + "grad_norm": 26.946093871726127, + "learning_rate": 5.010177449778009e-07, + "loss": 0.5137, + "step": 17321 + }, + { + "epoch": 2.71, + "grad_norm": 28.47027117143808, + "learning_rate": 5.004907802475278e-07, + "loss": 0.4929, + "step": 17322 + }, + { + "epoch": 2.71, + "grad_norm": 20.634373193810383, + "learning_rate": 4.999640856777421e-07, + "loss": 0.4675, + "step": 17323 + }, + { + "epoch": 2.71, + "grad_norm": 22.77781522887984, + "learning_rate": 4.994376612834185e-07, + "loss": 0.4162, + "step": 17324 + }, + { + "epoch": 2.71, + "grad_norm": 22.850471273597794, + "learning_rate": 4.989115070795303e-07, + "loss": 0.4495, + "step": 17325 + }, + { + "epoch": 2.71, + "grad_norm": 23.957795243371162, + "learning_rate": 4.983856230810402e-07, + "loss": 0.5059, + "step": 17326 + }, + { + "epoch": 2.71, + "grad_norm": 21.66063598569354, + "learning_rate": 4.978600093029062e-07, + "loss": 0.4327, + "step": 17327 + }, + { + "epoch": 2.71, + "grad_norm": 26.61425020913945, + "learning_rate": 4.973346657600752e-07, + "loss": 0.4447, + "step": 17328 + }, + { + "epoch": 2.71, + "grad_norm": 22.46283718401621, + "learning_rate": 4.968095924674854e-07, + "loss": 0.4474, + "step": 17329 + }, + { + "epoch": 2.71, + "grad_norm": 25.523382559309375, + "learning_rate": 4.962847894400724e-07, + "loss": 0.4229, + "step": 17330 + }, + { + "epoch": 2.71, + "grad_norm": 39.79228124626586, + "learning_rate": 4.95760256692761e-07, + "loss": 0.5159, + "step": 17331 + }, + { + "epoch": 2.71, + "grad_norm": 25.669889569488955, + "learning_rate": 4.952359942404672e-07, + "loss": 0.4275, + "step": 17332 + }, + { + "epoch": 2.71, + "grad_norm": 22.177408284016053, + "learning_rate": 4.947120020981034e-07, + "loss": 0.3933, + "step": 17333 + }, + { + "epoch": 2.71, + "grad_norm": 15.664859572605161, + "learning_rate": 4.941882802805675e-07, + "loss": 0.4255, + "step": 17334 + }, + { + "epoch": 2.71, + "grad_norm": 24.90021194727825, + "learning_rate": 4.936648288027568e-07, + "loss": 0.4106, + "step": 17335 + }, + { + "epoch": 2.71, + "grad_norm": 18.637261371792675, + "learning_rate": 4.931416476795592e-07, + "loss": 0.4176, + "step": 17336 + }, + { + "epoch": 2.71, + "grad_norm": 20.438985157442556, + "learning_rate": 4.926187369258495e-07, + "loss": 0.4423, + "step": 17337 + }, + { + "epoch": 2.71, + "grad_norm": 18.940822931403154, + "learning_rate": 4.920960965565036e-07, + "loss": 0.4107, + "step": 17338 + }, + { + "epoch": 2.71, + "grad_norm": 21.160953824999524, + "learning_rate": 4.915737265863807e-07, + "loss": 0.4597, + "step": 17339 + }, + { + "epoch": 2.71, + "grad_norm": 14.406864437603977, + "learning_rate": 4.9105162703034e-07, + "loss": 0.3596, + "step": 17340 + }, + { + "epoch": 2.71, + "grad_norm": 23.127785650216396, + "learning_rate": 4.905297979032264e-07, + "loss": 0.4405, + "step": 17341 + }, + { + "epoch": 2.71, + "grad_norm": 12.541078316342178, + "learning_rate": 4.900082392198835e-07, + "loss": 0.4054, + "step": 17342 + }, + { + "epoch": 2.71, + "grad_norm": 15.943470575163786, + "learning_rate": 4.894869509951449e-07, + "loss": 0.3533, + "step": 17343 + }, + { + "epoch": 2.71, + "grad_norm": 22.57018770795412, + "learning_rate": 4.889659332438334e-07, + "loss": 0.4153, + "step": 17344 + }, + { + "epoch": 2.71, + "grad_norm": 24.409024522917367, + "learning_rate": 4.884451859807648e-07, + "loss": 0.4465, + "step": 17345 + }, + { + "epoch": 2.71, + "grad_norm": 16.92758619534769, + "learning_rate": 4.879247092207518e-07, + "loss": 0.4631, + "step": 17346 + }, + { + "epoch": 2.71, + "grad_norm": 24.209903256734332, + "learning_rate": 4.874045029785957e-07, + "loss": 0.41, + "step": 17347 + }, + { + "epoch": 2.71, + "grad_norm": 23.104014462954876, + "learning_rate": 4.868845672690937e-07, + "loss": 0.4574, + "step": 17348 + }, + { + "epoch": 2.71, + "grad_norm": 25.669795151158045, + "learning_rate": 4.863649021070261e-07, + "loss": 0.4538, + "step": 17349 + }, + { + "epoch": 2.71, + "grad_norm": 17.90745147532213, + "learning_rate": 4.858455075071766e-07, + "loss": 0.4072, + "step": 17350 + }, + { + "epoch": 2.71, + "grad_norm": 18.577945008540926, + "learning_rate": 4.853263834843136e-07, + "loss": 0.3928, + "step": 17351 + }, + { + "epoch": 2.71, + "grad_norm": 22.459120343549202, + "learning_rate": 4.848075300532051e-07, + "loss": 0.4506, + "step": 17352 + }, + { + "epoch": 2.71, + "grad_norm": 16.599486700094594, + "learning_rate": 4.842889472286039e-07, + "loss": 0.4168, + "step": 17353 + }, + { + "epoch": 2.71, + "grad_norm": 20.776638771370983, + "learning_rate": 4.837706350252569e-07, + "loss": 0.4259, + "step": 17354 + }, + { + "epoch": 2.71, + "grad_norm": 15.100395334157533, + "learning_rate": 4.832525934579058e-07, + "loss": 0.399, + "step": 17355 + }, + { + "epoch": 2.71, + "grad_norm": 33.67534898065224, + "learning_rate": 4.827348225412864e-07, + "loss": 0.4403, + "step": 17356 + }, + { + "epoch": 2.71, + "grad_norm": 30.387288894532016, + "learning_rate": 4.822173222901194e-07, + "loss": 0.4431, + "step": 17357 + }, + { + "epoch": 2.71, + "grad_norm": 15.940554654277092, + "learning_rate": 4.81700092719124e-07, + "loss": 0.4197, + "step": 17358 + }, + { + "epoch": 2.71, + "grad_norm": 26.50262252296206, + "learning_rate": 4.811831338430095e-07, + "loss": 0.3843, + "step": 17359 + }, + { + "epoch": 2.71, + "grad_norm": 23.92135082594988, + "learning_rate": 4.806664456764787e-07, + "loss": 0.4721, + "step": 17360 + }, + { + "epoch": 2.71, + "grad_norm": 28.6315027965235, + "learning_rate": 4.801500282342264e-07, + "loss": 0.4465, + "step": 17361 + }, + { + "epoch": 2.71, + "grad_norm": 27.82184890108278, + "learning_rate": 4.796338815309387e-07, + "loss": 0.4856, + "step": 17362 + }, + { + "epoch": 2.71, + "grad_norm": 23.897452871933826, + "learning_rate": 4.791180055812928e-07, + "loss": 0.4594, + "step": 17363 + }, + { + "epoch": 2.71, + "grad_norm": 24.95841888144139, + "learning_rate": 4.786024003999612e-07, + "loss": 0.4399, + "step": 17364 + }, + { + "epoch": 2.71, + "grad_norm": 19.214807093508366, + "learning_rate": 4.780870660016091e-07, + "loss": 0.4338, + "step": 17365 + }, + { + "epoch": 2.71, + "grad_norm": 24.18263758656307, + "learning_rate": 4.77572002400889e-07, + "loss": 0.4697, + "step": 17366 + }, + { + "epoch": 2.71, + "grad_norm": 23.136460204437313, + "learning_rate": 4.770572096124515e-07, + "loss": 0.421, + "step": 17367 + }, + { + "epoch": 2.71, + "grad_norm": 18.01549275061047, + "learning_rate": 4.7654268765093604e-07, + "loss": 0.4369, + "step": 17368 + }, + { + "epoch": 2.71, + "grad_norm": 21.06620930056725, + "learning_rate": 4.760284365309753e-07, + "loss": 0.4101, + "step": 17369 + }, + { + "epoch": 2.71, + "grad_norm": 29.334126203470845, + "learning_rate": 4.755144562671943e-07, + "loss": 0.4209, + "step": 17370 + }, + { + "epoch": 2.71, + "grad_norm": 31.54646969006299, + "learning_rate": 4.7500074687421017e-07, + "loss": 0.4521, + "step": 17371 + }, + { + "epoch": 2.71, + "grad_norm": 39.968513085421456, + "learning_rate": 4.744873083666346e-07, + "loss": 0.5315, + "step": 17372 + }, + { + "epoch": 2.71, + "grad_norm": 24.247894269115044, + "learning_rate": 4.739741407590659e-07, + "loss": 0.4935, + "step": 17373 + }, + { + "epoch": 2.71, + "grad_norm": 25.56898532853315, + "learning_rate": 4.7346124406610125e-07, + "loss": 0.4109, + "step": 17374 + }, + { + "epoch": 2.71, + "grad_norm": 27.765093978651173, + "learning_rate": 4.729486183023246e-07, + "loss": 0.4593, + "step": 17375 + }, + { + "epoch": 2.71, + "grad_norm": 18.762721227724175, + "learning_rate": 4.724362634823165e-07, + "loss": 0.4055, + "step": 17376 + }, + { + "epoch": 2.71, + "grad_norm": 28.816990680308916, + "learning_rate": 4.7192417962064865e-07, + "loss": 0.439, + "step": 17377 + }, + { + "epoch": 2.71, + "grad_norm": 18.274613292861563, + "learning_rate": 4.714123667318837e-07, + "loss": 0.4647, + "step": 17378 + }, + { + "epoch": 2.71, + "grad_norm": 3.5723381890236676, + "learning_rate": 4.7090082483057577e-07, + "loss": 0.4093, + "step": 17379 + }, + { + "epoch": 2.71, + "grad_norm": 22.15896494174935, + "learning_rate": 4.7038955393127306e-07, + "loss": 0.4018, + "step": 17380 + }, + { + "epoch": 2.71, + "grad_norm": 13.399819966163685, + "learning_rate": 4.6987855404851955e-07, + "loss": 0.4273, + "step": 17381 + }, + { + "epoch": 2.72, + "grad_norm": 19.085045005489427, + "learning_rate": 4.6936782519684365e-07, + "loss": 0.4396, + "step": 17382 + }, + { + "epoch": 2.72, + "grad_norm": 20.70013423262256, + "learning_rate": 4.6885736739077146e-07, + "loss": 0.4026, + "step": 17383 + }, + { + "epoch": 2.72, + "grad_norm": 30.19754895395075, + "learning_rate": 4.683471806448192e-07, + "loss": 0.4879, + "step": 17384 + }, + { + "epoch": 2.72, + "grad_norm": 27.371877122015096, + "learning_rate": 4.6783726497349747e-07, + "loss": 0.5188, + "step": 17385 + }, + { + "epoch": 2.72, + "grad_norm": 29.821238949350676, + "learning_rate": 4.673276203913091e-07, + "loss": 0.4776, + "step": 17386 + }, + { + "epoch": 2.72, + "grad_norm": 23.906197476796812, + "learning_rate": 4.668182469127469e-07, + "loss": 0.4316, + "step": 17387 + }, + { + "epoch": 2.72, + "grad_norm": 20.890558692781248, + "learning_rate": 4.6630914455229493e-07, + "loss": 0.4068, + "step": 17388 + }, + { + "epoch": 2.72, + "grad_norm": 24.79089253736976, + "learning_rate": 4.6580031332443487e-07, + "loss": 0.4102, + "step": 17389 + }, + { + "epoch": 2.72, + "grad_norm": 27.523150481244254, + "learning_rate": 4.652917532436374e-07, + "loss": 0.4516, + "step": 17390 + }, + { + "epoch": 2.72, + "grad_norm": 15.306461668556436, + "learning_rate": 4.6478346432436426e-07, + "loss": 0.3934, + "step": 17391 + }, + { + "epoch": 2.72, + "grad_norm": 21.29821462189279, + "learning_rate": 4.642754465810717e-07, + "loss": 0.4605, + "step": 17392 + }, + { + "epoch": 2.72, + "grad_norm": 31.493702984748477, + "learning_rate": 4.6376770002820593e-07, + "loss": 0.4108, + "step": 17393 + }, + { + "epoch": 2.72, + "grad_norm": 26.095407241458357, + "learning_rate": 4.632602246802109e-07, + "loss": 0.4612, + "step": 17394 + }, + { + "epoch": 2.72, + "grad_norm": 16.646601453076475, + "learning_rate": 4.6275302055151293e-07, + "loss": 0.3693, + "step": 17395 + }, + { + "epoch": 2.72, + "grad_norm": 18.10464786368163, + "learning_rate": 4.6224608765654153e-07, + "loss": 0.454, + "step": 17396 + }, + { + "epoch": 2.72, + "grad_norm": 29.524280938816876, + "learning_rate": 4.61739426009713e-07, + "loss": 0.464, + "step": 17397 + }, + { + "epoch": 2.72, + "grad_norm": 21.6426630919563, + "learning_rate": 4.612330356254335e-07, + "loss": 0.458, + "step": 17398 + }, + { + "epoch": 2.72, + "grad_norm": 26.19173882235598, + "learning_rate": 4.607269165181083e-07, + "loss": 0.4576, + "step": 17399 + }, + { + "epoch": 2.72, + "grad_norm": 22.28478750981967, + "learning_rate": 4.6022106870212804e-07, + "loss": 0.4246, + "step": 17400 + }, + { + "epoch": 2.72, + "grad_norm": 28.18063883475103, + "learning_rate": 4.597154921918812e-07, + "loss": 0.4302, + "step": 17401 + }, + { + "epoch": 2.72, + "grad_norm": 21.225910386650213, + "learning_rate": 4.592101870017429e-07, + "loss": 0.4514, + "step": 17402 + }, + { + "epoch": 2.72, + "grad_norm": 17.98886841096316, + "learning_rate": 4.587051531460873e-07, + "loss": 0.4558, + "step": 17403 + }, + { + "epoch": 2.72, + "grad_norm": 36.02204642984345, + "learning_rate": 4.5820039063927514e-07, + "loss": 0.4453, + "step": 17404 + }, + { + "epoch": 2.72, + "grad_norm": 44.551800977650416, + "learning_rate": 4.5769589949566153e-07, + "loss": 0.4731, + "step": 17405 + }, + { + "epoch": 2.72, + "grad_norm": 21.904202026760487, + "learning_rate": 4.57191679729595e-07, + "loss": 0.4218, + "step": 17406 + }, + { + "epoch": 2.72, + "grad_norm": 29.949185903881208, + "learning_rate": 4.566877313554152e-07, + "loss": 0.4611, + "step": 17407 + }, + { + "epoch": 2.72, + "grad_norm": 23.986283792343404, + "learning_rate": 4.561840543874529e-07, + "loss": 0.4625, + "step": 17408 + }, + { + "epoch": 2.72, + "grad_norm": 18.171183132603076, + "learning_rate": 4.5568064884003337e-07, + "loss": 0.4917, + "step": 17409 + }, + { + "epoch": 2.72, + "grad_norm": 21.441739249149823, + "learning_rate": 4.551775147274717e-07, + "loss": 0.4176, + "step": 17410 + }, + { + "epoch": 2.72, + "grad_norm": 28.75684373970585, + "learning_rate": 4.5467465206408103e-07, + "loss": 0.4153, + "step": 17411 + }, + { + "epoch": 2.72, + "grad_norm": 15.389540213874831, + "learning_rate": 4.541720608641575e-07, + "loss": 0.3785, + "step": 17412 + }, + { + "epoch": 2.72, + "grad_norm": 16.016112998412776, + "learning_rate": 4.5366974114199546e-07, + "loss": 0.4106, + "step": 17413 + }, + { + "epoch": 2.72, + "grad_norm": 18.931950525588608, + "learning_rate": 4.5316769291188223e-07, + "loss": 0.4221, + "step": 17414 + }, + { + "epoch": 2.72, + "grad_norm": 29.240826921865555, + "learning_rate": 4.526659161880964e-07, + "loss": 0.434, + "step": 17415 + }, + { + "epoch": 2.72, + "grad_norm": 31.13718517903119, + "learning_rate": 4.521644109849066e-07, + "loss": 0.4298, + "step": 17416 + }, + { + "epoch": 2.72, + "grad_norm": 20.48098421548587, + "learning_rate": 4.5166317731657363e-07, + "loss": 0.4454, + "step": 17417 + }, + { + "epoch": 2.72, + "grad_norm": 20.434729211114256, + "learning_rate": 4.5116221519735493e-07, + "loss": 0.4622, + "step": 17418 + }, + { + "epoch": 2.72, + "grad_norm": 17.51366634119941, + "learning_rate": 4.5066152464149914e-07, + "loss": 0.444, + "step": 17419 + }, + { + "epoch": 2.72, + "grad_norm": 16.712331832276472, + "learning_rate": 4.5016110566324044e-07, + "loss": 0.3636, + "step": 17420 + }, + { + "epoch": 2.72, + "grad_norm": 19.2683599113328, + "learning_rate": 4.4966095827681524e-07, + "loss": 0.4288, + "step": 17421 + }, + { + "epoch": 2.72, + "grad_norm": 14.892715665588616, + "learning_rate": 4.491610824964454e-07, + "loss": 0.4047, + "step": 17422 + }, + { + "epoch": 2.72, + "grad_norm": 24.20558238929761, + "learning_rate": 4.486614783363463e-07, + "loss": 0.408, + "step": 17423 + }, + { + "epoch": 2.72, + "grad_norm": 29.223760404762384, + "learning_rate": 4.481621458107288e-07, + "loss": 0.5298, + "step": 17424 + }, + { + "epoch": 2.72, + "grad_norm": 29.68640664998534, + "learning_rate": 4.476630849337904e-07, + "loss": 0.4707, + "step": 17425 + }, + { + "epoch": 2.72, + "grad_norm": 26.581314671462636, + "learning_rate": 4.471642957197275e-07, + "loss": 0.4379, + "step": 17426 + }, + { + "epoch": 2.72, + "grad_norm": 20.671837457419493, + "learning_rate": 4.4666577818272327e-07, + "loss": 0.5048, + "step": 17427 + }, + { + "epoch": 2.72, + "grad_norm": 28.38455317499184, + "learning_rate": 4.461675323369563e-07, + "loss": 0.4704, + "step": 17428 + }, + { + "epoch": 2.72, + "grad_norm": 24.656114082300245, + "learning_rate": 4.456695581965942e-07, + "loss": 0.3824, + "step": 17429 + }, + { + "epoch": 2.72, + "grad_norm": 18.952204471225592, + "learning_rate": 4.4517185577580226e-07, + "loss": 0.3957, + "step": 17430 + }, + { + "epoch": 2.72, + "grad_norm": 24.677067259212578, + "learning_rate": 4.446744250887336e-07, + "loss": 0.4821, + "step": 17431 + }, + { + "epoch": 2.72, + "grad_norm": 24.978744868491923, + "learning_rate": 4.441772661495347e-07, + "loss": 0.4703, + "step": 17432 + }, + { + "epoch": 2.72, + "grad_norm": 19.496449254990207, + "learning_rate": 4.4368037897234317e-07, + "loss": 0.4496, + "step": 17433 + }, + { + "epoch": 2.72, + "grad_norm": 17.877871280257406, + "learning_rate": 4.4318376357129103e-07, + "loss": 0.409, + "step": 17434 + }, + { + "epoch": 2.72, + "grad_norm": 29.01844481682169, + "learning_rate": 4.4268741996050244e-07, + "loss": 0.5087, + "step": 17435 + }, + { + "epoch": 2.72, + "grad_norm": 20.261647695163436, + "learning_rate": 4.421913481540929e-07, + "loss": 0.4911, + "step": 17436 + }, + { + "epoch": 2.72, + "grad_norm": 18.478901469878732, + "learning_rate": 4.4169554816617224e-07, + "loss": 0.3723, + "step": 17437 + }, + { + "epoch": 2.72, + "grad_norm": 21.70580459692987, + "learning_rate": 4.4120002001083575e-07, + "loss": 0.4246, + "step": 17438 + }, + { + "epoch": 2.72, + "grad_norm": 22.589175481408834, + "learning_rate": 4.4070476370218e-07, + "loss": 0.4291, + "step": 17439 + }, + { + "epoch": 2.72, + "grad_norm": 17.679987959219805, + "learning_rate": 4.402097792542892e-07, + "loss": 0.3547, + "step": 17440 + }, + { + "epoch": 2.72, + "grad_norm": 15.396166383333188, + "learning_rate": 4.397150666812411e-07, + "loss": 0.4053, + "step": 17441 + }, + { + "epoch": 2.72, + "grad_norm": 19.025061248631832, + "learning_rate": 4.392206259971021e-07, + "loss": 0.4068, + "step": 17442 + }, + { + "epoch": 2.72, + "grad_norm": 19.413034781841418, + "learning_rate": 4.3872645721593556e-07, + "loss": 0.4552, + "step": 17443 + }, + { + "epoch": 2.72, + "grad_norm": 30.036685707204484, + "learning_rate": 4.382325603517956e-07, + "loss": 0.4983, + "step": 17444 + }, + { + "epoch": 2.72, + "grad_norm": 18.63665511456837, + "learning_rate": 4.3773893541873005e-07, + "loss": 0.4048, + "step": 17445 + }, + { + "epoch": 2.73, + "grad_norm": 25.225865457520595, + "learning_rate": 4.372455824307731e-07, + "loss": 0.4295, + "step": 17446 + }, + { + "epoch": 2.73, + "grad_norm": 24.630164364921853, + "learning_rate": 4.36752501401958e-07, + "loss": 0.4304, + "step": 17447 + }, + { + "epoch": 2.73, + "grad_norm": 25.856865290300433, + "learning_rate": 4.3625969234630694e-07, + "loss": 0.4567, + "step": 17448 + }, + { + "epoch": 2.73, + "grad_norm": 20.071757241203127, + "learning_rate": 4.3576715527783755e-07, + "loss": 0.3787, + "step": 17449 + }, + { + "epoch": 2.73, + "grad_norm": 15.501434511668156, + "learning_rate": 4.3527489021055414e-07, + "loss": 0.3805, + "step": 17450 + }, + { + "epoch": 2.73, + "grad_norm": 23.617238735253185, + "learning_rate": 4.347828971584578e-07, + "loss": 0.4372, + "step": 17451 + }, + { + "epoch": 2.73, + "grad_norm": 18.29021742708971, + "learning_rate": 4.342911761355395e-07, + "loss": 0.3763, + "step": 17452 + }, + { + "epoch": 2.73, + "grad_norm": 19.58187449331832, + "learning_rate": 4.3379972715578587e-07, + "loss": 0.4352, + "step": 17453 + }, + { + "epoch": 2.73, + "grad_norm": 27.83295509223901, + "learning_rate": 4.333085502331713e-07, + "loss": 0.4433, + "step": 17454 + }, + { + "epoch": 2.73, + "grad_norm": 20.9373031023361, + "learning_rate": 4.328176453816657e-07, + "loss": 0.4278, + "step": 17455 + }, + { + "epoch": 2.73, + "grad_norm": 22.0500319547027, + "learning_rate": 4.32327012615229e-07, + "loss": 0.4205, + "step": 17456 + }, + { + "epoch": 2.73, + "grad_norm": 18.327597823533026, + "learning_rate": 4.3183665194781564e-07, + "loss": 0.4277, + "step": 17457 + }, + { + "epoch": 2.73, + "grad_norm": 32.74199777947312, + "learning_rate": 4.3134656339337e-07, + "loss": 0.4478, + "step": 17458 + }, + { + "epoch": 2.73, + "grad_norm": 30.692695535246823, + "learning_rate": 4.308567469658298e-07, + "loss": 0.4943, + "step": 17459 + }, + { + "epoch": 2.73, + "grad_norm": 23.9940120098935, + "learning_rate": 4.3036720267912833e-07, + "loss": 0.4214, + "step": 17460 + }, + { + "epoch": 2.73, + "grad_norm": 18.34305745650129, + "learning_rate": 4.2987793054718343e-07, + "loss": 0.4019, + "step": 17461 + }, + { + "epoch": 2.73, + "grad_norm": 18.68511982658362, + "learning_rate": 4.2938893058391385e-07, + "loss": 0.3996, + "step": 17462 + }, + { + "epoch": 2.73, + "grad_norm": 24.150543381469717, + "learning_rate": 4.289002028032219e-07, + "loss": 0.4458, + "step": 17463 + }, + { + "epoch": 2.73, + "grad_norm": 28.56440195766985, + "learning_rate": 4.284117472190108e-07, + "loss": 0.4547, + "step": 17464 + }, + { + "epoch": 2.73, + "grad_norm": 20.686787437429523, + "learning_rate": 4.2792356384517063e-07, + "loss": 0.3967, + "step": 17465 + }, + { + "epoch": 2.73, + "grad_norm": 22.997359866756085, + "learning_rate": 4.2743565269558475e-07, + "loss": 0.4226, + "step": 17466 + }, + { + "epoch": 2.73, + "grad_norm": 18.35440114600762, + "learning_rate": 4.2694801378412867e-07, + "loss": 0.412, + "step": 17467 + }, + { + "epoch": 2.73, + "grad_norm": 22.008676945964883, + "learning_rate": 4.2646064712467016e-07, + "loss": 0.485, + "step": 17468 + }, + { + "epoch": 2.73, + "grad_norm": 16.297305301680044, + "learning_rate": 4.2597355273107267e-07, + "loss": 0.4124, + "step": 17469 + }, + { + "epoch": 2.73, + "grad_norm": 23.817850589987756, + "learning_rate": 4.2548673061718614e-07, + "loss": 0.4001, + "step": 17470 + }, + { + "epoch": 2.73, + "grad_norm": 15.62385161757375, + "learning_rate": 4.25000180796854e-07, + "loss": 0.4426, + "step": 17471 + }, + { + "epoch": 2.73, + "grad_norm": 16.607778978955473, + "learning_rate": 4.245139032839163e-07, + "loss": 0.4312, + "step": 17472 + }, + { + "epoch": 2.73, + "grad_norm": 26.70832361383802, + "learning_rate": 4.240278980922019e-07, + "loss": 0.4486, + "step": 17473 + }, + { + "epoch": 2.73, + "grad_norm": 18.118358524866036, + "learning_rate": 4.2354216523553314e-07, + "loss": 0.4001, + "step": 17474 + }, + { + "epoch": 2.73, + "grad_norm": 29.531044915915153, + "learning_rate": 4.230567047277234e-07, + "loss": 0.3881, + "step": 17475 + }, + { + "epoch": 2.73, + "grad_norm": 22.110147238950233, + "learning_rate": 4.2257151658257724e-07, + "loss": 0.4405, + "step": 17476 + }, + { + "epoch": 2.73, + "grad_norm": 27.63439163061701, + "learning_rate": 4.2208660081389463e-07, + "loss": 0.4667, + "step": 17477 + }, + { + "epoch": 2.73, + "grad_norm": 25.894108281843828, + "learning_rate": 4.2160195743546797e-07, + "loss": 0.3913, + "step": 17478 + }, + { + "epoch": 2.73, + "grad_norm": 24.97732492786066, + "learning_rate": 4.2111758646107617e-07, + "loss": 0.4572, + "step": 17479 + }, + { + "epoch": 2.73, + "grad_norm": 14.659188849784474, + "learning_rate": 4.2063348790449823e-07, + "loss": 0.375, + "step": 17480 + }, + { + "epoch": 2.73, + "grad_norm": 18.860107567251355, + "learning_rate": 4.201496617794998e-07, + "loss": 0.4538, + "step": 17481 + }, + { + "epoch": 2.73, + "grad_norm": 18.708013347464217, + "learning_rate": 4.19666108099841e-07, + "loss": 0.4527, + "step": 17482 + }, + { + "epoch": 2.73, + "grad_norm": 24.922707518062698, + "learning_rate": 4.1918282687927304e-07, + "loss": 0.4257, + "step": 17483 + }, + { + "epoch": 2.73, + "grad_norm": 13.934500199316048, + "learning_rate": 4.186998181315405e-07, + "loss": 0.4286, + "step": 17484 + }, + { + "epoch": 2.73, + "grad_norm": 22.812547378621478, + "learning_rate": 4.1821708187038236e-07, + "loss": 0.4758, + "step": 17485 + }, + { + "epoch": 2.73, + "grad_norm": 25.638804827331093, + "learning_rate": 4.177346181095232e-07, + "loss": 0.42, + "step": 17486 + }, + { + "epoch": 2.73, + "grad_norm": 20.398371528714584, + "learning_rate": 4.172524268626876e-07, + "loss": 0.4493, + "step": 17487 + }, + { + "epoch": 2.73, + "grad_norm": 28.63009658880959, + "learning_rate": 4.167705081435858e-07, + "loss": 0.4378, + "step": 17488 + }, + { + "epoch": 2.73, + "grad_norm": 27.343390325633028, + "learning_rate": 4.162888619659244e-07, + "loss": 0.4357, + "step": 17489 + }, + { + "epoch": 2.73, + "grad_norm": 27.446397579123143, + "learning_rate": 4.158074883434038e-07, + "loss": 0.5091, + "step": 17490 + }, + { + "epoch": 2.73, + "grad_norm": 18.738836955756252, + "learning_rate": 4.1532638728971065e-07, + "loss": 0.3896, + "step": 17491 + }, + { + "epoch": 2.73, + "grad_norm": 12.552671287215457, + "learning_rate": 4.148455588185274e-07, + "loss": 0.3835, + "step": 17492 + }, + { + "epoch": 2.73, + "grad_norm": 17.479494448313787, + "learning_rate": 4.143650029435287e-07, + "loss": 0.5104, + "step": 17493 + }, + { + "epoch": 2.73, + "grad_norm": 20.20213456466183, + "learning_rate": 4.138847196783835e-07, + "loss": 0.3914, + "step": 17494 + }, + { + "epoch": 2.73, + "grad_norm": 36.61430820435879, + "learning_rate": 4.134047090367488e-07, + "loss": 0.4465, + "step": 17495 + }, + { + "epoch": 2.73, + "grad_norm": 32.111421649333316, + "learning_rate": 4.129249710322758e-07, + "loss": 0.4553, + "step": 17496 + }, + { + "epoch": 2.73, + "grad_norm": 29.861711339807677, + "learning_rate": 4.124455056786081e-07, + "loss": 0.4718, + "step": 17497 + }, + { + "epoch": 2.73, + "grad_norm": 21.267011851593708, + "learning_rate": 4.119663129893814e-07, + "loss": 0.4152, + "step": 17498 + }, + { + "epoch": 2.73, + "grad_norm": 23.956345237191314, + "learning_rate": 4.114873929782259e-07, + "loss": 0.4485, + "step": 17499 + }, + { + "epoch": 2.73, + "grad_norm": 15.606313338359346, + "learning_rate": 4.1100874565875745e-07, + "loss": 0.4006, + "step": 17500 + }, + { + "epoch": 2.73, + "grad_norm": 15.98487525168327, + "learning_rate": 4.105303710445918e-07, + "loss": 0.4085, + "step": 17501 + }, + { + "epoch": 2.73, + "grad_norm": 19.179010733572685, + "learning_rate": 4.100522691493325e-07, + "loss": 0.3811, + "step": 17502 + }, + { + "epoch": 2.73, + "grad_norm": 34.54975317185062, + "learning_rate": 4.0957443998657754e-07, + "loss": 0.4092, + "step": 17503 + }, + { + "epoch": 2.73, + "grad_norm": 18.810297646989035, + "learning_rate": 4.0909688356991495e-07, + "loss": 0.4638, + "step": 17504 + }, + { + "epoch": 2.73, + "grad_norm": 17.72673533655055, + "learning_rate": 4.086195999129261e-07, + "loss": 0.3704, + "step": 17505 + }, + { + "epoch": 2.73, + "grad_norm": 21.70174804765831, + "learning_rate": 4.081425890291846e-07, + "loss": 0.4318, + "step": 17506 + }, + { + "epoch": 2.73, + "grad_norm": 16.040318933655207, + "learning_rate": 4.076658509322573e-07, + "loss": 0.3723, + "step": 17507 + }, + { + "epoch": 2.73, + "grad_norm": 27.408368450702692, + "learning_rate": 4.0718938563570233e-07, + "loss": 0.4915, + "step": 17508 + }, + { + "epoch": 2.73, + "grad_norm": 31.30847192503798, + "learning_rate": 4.067131931530699e-07, + "loss": 0.4231, + "step": 17509 + }, + { + "epoch": 2.74, + "grad_norm": 39.510686933138025, + "learning_rate": 4.0623727349790034e-07, + "loss": 0.4788, + "step": 17510 + }, + { + "epoch": 2.74, + "grad_norm": 19.3988233895468, + "learning_rate": 4.0576162668373164e-07, + "loss": 0.4353, + "step": 17511 + }, + { + "epoch": 2.74, + "grad_norm": 18.015188683340405, + "learning_rate": 4.0528625272409083e-07, + "loss": 0.5014, + "step": 17512 + }, + { + "epoch": 2.74, + "grad_norm": 27.904477455087267, + "learning_rate": 4.0481115163249483e-07, + "loss": 0.4241, + "step": 17513 + }, + { + "epoch": 2.74, + "grad_norm": 20.779880772563885, + "learning_rate": 4.0433632342245735e-07, + "loss": 0.4098, + "step": 17514 + }, + { + "epoch": 2.74, + "grad_norm": 16.437373304966645, + "learning_rate": 4.038617681074808e-07, + "loss": 0.3925, + "step": 17515 + }, + { + "epoch": 2.74, + "grad_norm": 29.490459909934003, + "learning_rate": 4.0338748570106333e-07, + "loss": 0.4817, + "step": 17516 + }, + { + "epoch": 2.74, + "grad_norm": 26.63319283121807, + "learning_rate": 4.0291347621669084e-07, + "loss": 0.4922, + "step": 17517 + }, + { + "epoch": 2.74, + "grad_norm": 15.398392111616047, + "learning_rate": 4.0243973966784477e-07, + "loss": 0.3934, + "step": 17518 + }, + { + "epoch": 2.74, + "grad_norm": 21.90392494787278, + "learning_rate": 4.019662760679988e-07, + "loss": 0.412, + "step": 17519 + }, + { + "epoch": 2.74, + "grad_norm": 23.04840720045855, + "learning_rate": 4.014930854306176e-07, + "loss": 0.47, + "step": 17520 + }, + { + "epoch": 2.74, + "grad_norm": 23.888840216364056, + "learning_rate": 4.010201677691572e-07, + "loss": 0.4637, + "step": 17521 + }, + { + "epoch": 2.74, + "grad_norm": 40.307585703550416, + "learning_rate": 4.005475230970679e-07, + "loss": 0.4092, + "step": 17522 + }, + { + "epoch": 2.74, + "grad_norm": 28.070339697621073, + "learning_rate": 4.000751514277912e-07, + "loss": 0.4502, + "step": 17523 + }, + { + "epoch": 2.74, + "grad_norm": 25.372528754479347, + "learning_rate": 3.9960305277476294e-07, + "loss": 0.4244, + "step": 17524 + }, + { + "epoch": 2.74, + "grad_norm": 22.54098039265196, + "learning_rate": 3.9913122715140804e-07, + "loss": 0.5074, + "step": 17525 + }, + { + "epoch": 2.74, + "grad_norm": 42.24266727359193, + "learning_rate": 3.986596745711424e-07, + "loss": 0.53, + "step": 17526 + }, + { + "epoch": 2.74, + "grad_norm": 23.467002523587144, + "learning_rate": 3.9818839504737974e-07, + "loss": 0.4493, + "step": 17527 + }, + { + "epoch": 2.74, + "grad_norm": 23.461490860852557, + "learning_rate": 3.977173885935237e-07, + "loss": 0.3873, + "step": 17528 + }, + { + "epoch": 2.74, + "grad_norm": 19.942109732590932, + "learning_rate": 3.972466552229681e-07, + "loss": 0.4736, + "step": 17529 + }, + { + "epoch": 2.74, + "grad_norm": 16.932327548759254, + "learning_rate": 3.967761949490978e-07, + "loss": 0.4134, + "step": 17530 + }, + { + "epoch": 2.74, + "grad_norm": 17.789274291083245, + "learning_rate": 3.963060077852965e-07, + "loss": 0.422, + "step": 17531 + }, + { + "epoch": 2.74, + "grad_norm": 32.12679830575649, + "learning_rate": 3.958360937449335e-07, + "loss": 0.4283, + "step": 17532 + }, + { + "epoch": 2.74, + "grad_norm": 16.195025399161796, + "learning_rate": 3.953664528413737e-07, + "loss": 0.4202, + "step": 17533 + }, + { + "epoch": 2.74, + "grad_norm": 28.35008528858892, + "learning_rate": 3.948970850879752e-07, + "loss": 0.5014, + "step": 17534 + }, + { + "epoch": 2.74, + "grad_norm": 22.336119845520958, + "learning_rate": 3.94427990498083e-07, + "loss": 0.3942, + "step": 17535 + }, + { + "epoch": 2.74, + "grad_norm": 26.739048433966083, + "learning_rate": 3.9395916908503863e-07, + "loss": 0.4088, + "step": 17536 + }, + { + "epoch": 2.74, + "grad_norm": 37.68300077363878, + "learning_rate": 3.9349062086217915e-07, + "loss": 0.4558, + "step": 17537 + }, + { + "epoch": 2.74, + "grad_norm": 32.05418614695762, + "learning_rate": 3.930223458428239e-07, + "loss": 0.4647, + "step": 17538 + }, + { + "epoch": 2.74, + "grad_norm": 18.312230954455963, + "learning_rate": 3.9255434404029567e-07, + "loss": 0.4275, + "step": 17539 + }, + { + "epoch": 2.74, + "grad_norm": 22.36721689766602, + "learning_rate": 3.9208661546789927e-07, + "loss": 0.4145, + "step": 17540 + }, + { + "epoch": 2.74, + "grad_norm": 18.389367887074666, + "learning_rate": 3.9161916013894186e-07, + "loss": 0.4328, + "step": 17541 + }, + { + "epoch": 2.74, + "grad_norm": 32.79270567615219, + "learning_rate": 3.9115197806671277e-07, + "loss": 0.4913, + "step": 17542 + }, + { + "epoch": 2.74, + "grad_norm": 21.452098591277245, + "learning_rate": 3.9068506926450146e-07, + "loss": 0.4522, + "step": 17543 + }, + { + "epoch": 2.74, + "grad_norm": 22.44429547840225, + "learning_rate": 3.9021843374558385e-07, + "loss": 0.4402, + "step": 17544 + }, + { + "epoch": 2.74, + "grad_norm": 23.951166938881396, + "learning_rate": 3.897520715232339e-07, + "loss": 0.4318, + "step": 17545 + }, + { + "epoch": 2.74, + "grad_norm": 20.28365225407174, + "learning_rate": 3.8928598261071313e-07, + "loss": 0.3915, + "step": 17546 + }, + { + "epoch": 2.74, + "grad_norm": 20.111141844508477, + "learning_rate": 3.8882016702127544e-07, + "loss": 0.4442, + "step": 17547 + }, + { + "epoch": 2.74, + "grad_norm": 21.799639862152425, + "learning_rate": 3.883546247681713e-07, + "loss": 0.4386, + "step": 17548 + }, + { + "epoch": 2.74, + "grad_norm": 20.933889535364973, + "learning_rate": 3.8788935586463906e-07, + "loss": 0.3988, + "step": 17549 + }, + { + "epoch": 2.74, + "grad_norm": 18.307817436950778, + "learning_rate": 3.8742436032391027e-07, + "loss": 0.4015, + "step": 17550 + }, + { + "epoch": 2.74, + "grad_norm": 18.48341164372044, + "learning_rate": 3.8695963815920887e-07, + "loss": 0.4712, + "step": 17551 + }, + { + "epoch": 2.74, + "grad_norm": 21.99712163074532, + "learning_rate": 3.864951893837521e-07, + "loss": 0.4125, + "step": 17552 + }, + { + "epoch": 2.74, + "grad_norm": 19.410124987344634, + "learning_rate": 3.8603101401074927e-07, + "loss": 0.4214, + "step": 17553 + }, + { + "epoch": 2.74, + "grad_norm": 21.453021551439846, + "learning_rate": 3.855671120533999e-07, + "loss": 0.4249, + "step": 17554 + }, + { + "epoch": 2.74, + "grad_norm": 17.90595372189574, + "learning_rate": 3.851034835248979e-07, + "loss": 0.4297, + "step": 17555 + }, + { + "epoch": 2.74, + "grad_norm": 20.302956417423683, + "learning_rate": 3.8464012843842714e-07, + "loss": 0.4296, + "step": 17556 + }, + { + "epoch": 2.74, + "grad_norm": 23.57739614835241, + "learning_rate": 3.8417704680716704e-07, + "loss": 0.4344, + "step": 17557 + }, + { + "epoch": 2.74, + "grad_norm": 22.784546681606066, + "learning_rate": 3.8371423864428826e-07, + "loss": 0.4267, + "step": 17558 + }, + { + "epoch": 2.74, + "grad_norm": 24.219278412253438, + "learning_rate": 3.8325170396294907e-07, + "loss": 0.4416, + "step": 17559 + }, + { + "epoch": 2.74, + "grad_norm": 22.641724036574267, + "learning_rate": 3.827894427763057e-07, + "loss": 0.4565, + "step": 17560 + }, + { + "epoch": 2.74, + "grad_norm": 28.717063972618014, + "learning_rate": 3.8232745509750423e-07, + "loss": 0.4422, + "step": 17561 + }, + { + "epoch": 2.74, + "grad_norm": 28.222039680477796, + "learning_rate": 3.818657409396853e-07, + "loss": 0.4581, + "step": 17562 + }, + { + "epoch": 2.74, + "grad_norm": 15.941673798151543, + "learning_rate": 3.8140430031597844e-07, + "loss": 0.4145, + "step": 17563 + }, + { + "epoch": 2.74, + "grad_norm": 21.126597698868775, + "learning_rate": 3.809431332395053e-07, + "loss": 0.4306, + "step": 17564 + }, + { + "epoch": 2.74, + "grad_norm": 16.492216500980064, + "learning_rate": 3.804822397233832e-07, + "loss": 0.3688, + "step": 17565 + }, + { + "epoch": 2.74, + "grad_norm": 23.945840111002425, + "learning_rate": 3.800216197807194e-07, + "loss": 0.4492, + "step": 17566 + }, + { + "epoch": 2.74, + "grad_norm": 17.556779334890756, + "learning_rate": 3.7956127342461127e-07, + "loss": 0.4067, + "step": 17567 + }, + { + "epoch": 2.74, + "grad_norm": 19.31972029118095, + "learning_rate": 3.791012006681549e-07, + "loss": 0.4254, + "step": 17568 + }, + { + "epoch": 2.74, + "grad_norm": 27.829320157970898, + "learning_rate": 3.786414015244311e-07, + "loss": 0.4306, + "step": 17569 + }, + { + "epoch": 2.74, + "grad_norm": 27.47234722090023, + "learning_rate": 3.781818760065181e-07, + "loss": 0.4251, + "step": 17570 + }, + { + "epoch": 2.74, + "grad_norm": 18.16782867231646, + "learning_rate": 3.777226241274834e-07, + "loss": 0.4364, + "step": 17571 + }, + { + "epoch": 2.74, + "grad_norm": 22.465201876310815, + "learning_rate": 3.772636459003887e-07, + "loss": 0.434, + "step": 17572 + }, + { + "epoch": 2.74, + "grad_norm": 16.965029729306384, + "learning_rate": 3.768049413382868e-07, + "loss": 0.3755, + "step": 17573 + }, + { + "epoch": 2.75, + "grad_norm": 18.79009514268903, + "learning_rate": 3.76346510454223e-07, + "loss": 0.4047, + "step": 17574 + }, + { + "epoch": 2.75, + "grad_norm": 35.86451489298629, + "learning_rate": 3.758883532612356e-07, + "loss": 0.4105, + "step": 17575 + }, + { + "epoch": 2.75, + "grad_norm": 30.348565907862902, + "learning_rate": 3.754304697723521e-07, + "loss": 0.4163, + "step": 17576 + }, + { + "epoch": 2.75, + "grad_norm": 27.512065754730056, + "learning_rate": 3.749728600005953e-07, + "loss": 0.3804, + "step": 17577 + }, + { + "epoch": 2.75, + "grad_norm": 24.34524124876427, + "learning_rate": 3.745155239589815e-07, + "loss": 0.3923, + "step": 17578 + }, + { + "epoch": 2.75, + "grad_norm": 22.767678273590956, + "learning_rate": 3.7405846166051585e-07, + "loss": 0.4289, + "step": 17579 + }, + { + "epoch": 2.75, + "grad_norm": 23.07189445613623, + "learning_rate": 3.736016731181946e-07, + "loss": 0.4136, + "step": 17580 + }, + { + "epoch": 2.75, + "grad_norm": 21.27953030308091, + "learning_rate": 3.7314515834501075e-07, + "loss": 0.4527, + "step": 17581 + }, + { + "epoch": 2.75, + "grad_norm": 21.27630390969021, + "learning_rate": 3.726889173539483e-07, + "loss": 0.4432, + "step": 17582 + }, + { + "epoch": 2.75, + "grad_norm": 29.0326694145637, + "learning_rate": 3.7223295015798133e-07, + "loss": 0.5179, + "step": 17583 + }, + { + "epoch": 2.75, + "grad_norm": 16.664408589420702, + "learning_rate": 3.7177725677007615e-07, + "loss": 0.3659, + "step": 17584 + }, + { + "epoch": 2.75, + "grad_norm": 25.147560411856237, + "learning_rate": 3.713218372031935e-07, + "loss": 0.5466, + "step": 17585 + }, + { + "epoch": 2.75, + "grad_norm": 16.922662537900592, + "learning_rate": 3.708666914702852e-07, + "loss": 0.4031, + "step": 17586 + }, + { + "epoch": 2.75, + "grad_norm": 23.310389091427545, + "learning_rate": 3.704118195842965e-07, + "loss": 0.4602, + "step": 17587 + }, + { + "epoch": 2.75, + "grad_norm": 18.95408639254217, + "learning_rate": 3.699572215581615e-07, + "loss": 0.4711, + "step": 17588 + }, + { + "epoch": 2.75, + "grad_norm": 29.79057878706563, + "learning_rate": 3.695028974048098e-07, + "loss": 0.5159, + "step": 17589 + }, + { + "epoch": 2.75, + "grad_norm": 21.45883003666137, + "learning_rate": 3.6904884713716114e-07, + "loss": 0.3995, + "step": 17590 + }, + { + "epoch": 2.75, + "grad_norm": 29.49774046908421, + "learning_rate": 3.6859507076813073e-07, + "loss": 0.4374, + "step": 17591 + }, + { + "epoch": 2.75, + "grad_norm": 38.878012121898465, + "learning_rate": 3.6814156831062264e-07, + "loss": 0.4094, + "step": 17592 + }, + { + "epoch": 2.75, + "grad_norm": 21.39699222270177, + "learning_rate": 3.6768833977753214e-07, + "loss": 0.4804, + "step": 17593 + }, + { + "epoch": 2.75, + "grad_norm": 14.367121750254363, + "learning_rate": 3.672353851817512e-07, + "loss": 0.4046, + "step": 17594 + }, + { + "epoch": 2.75, + "grad_norm": 23.538314317472818, + "learning_rate": 3.667827045361616e-07, + "loss": 0.512, + "step": 17595 + }, + { + "epoch": 2.75, + "grad_norm": 28.10235191596009, + "learning_rate": 3.663302978536354e-07, + "loss": 0.3986, + "step": 17596 + }, + { + "epoch": 2.75, + "grad_norm": 27.12716722345551, + "learning_rate": 3.658781651470422e-07, + "loss": 0.4979, + "step": 17597 + }, + { + "epoch": 2.75, + "grad_norm": 20.902512735201658, + "learning_rate": 3.654263064292363e-07, + "loss": 0.4153, + "step": 17598 + }, + { + "epoch": 2.75, + "grad_norm": 15.508206423505467, + "learning_rate": 3.649747217130695e-07, + "loss": 0.4354, + "step": 17599 + }, + { + "epoch": 2.75, + "grad_norm": 21.13389008572231, + "learning_rate": 3.645234110113871e-07, + "loss": 0.3795, + "step": 17600 + }, + { + "epoch": 2.75, + "grad_norm": 22.074589900520056, + "learning_rate": 3.6407237433702116e-07, + "loss": 0.4382, + "step": 17601 + }, + { + "epoch": 2.75, + "grad_norm": 31.822803042074202, + "learning_rate": 3.636216117028013e-07, + "loss": 0.4385, + "step": 17602 + }, + { + "epoch": 2.75, + "grad_norm": 16.846443769012538, + "learning_rate": 3.63171123121544e-07, + "loss": 0.3833, + "step": 17603 + }, + { + "epoch": 2.75, + "grad_norm": 24.389533195032417, + "learning_rate": 3.627209086060635e-07, + "loss": 0.3963, + "step": 17604 + }, + { + "epoch": 2.75, + "grad_norm": 20.93648974130658, + "learning_rate": 3.6227096816916274e-07, + "loss": 0.4219, + "step": 17605 + }, + { + "epoch": 2.75, + "grad_norm": 22.453932798052776, + "learning_rate": 3.6182130182363716e-07, + "loss": 0.449, + "step": 17606 + }, + { + "epoch": 2.75, + "grad_norm": 16.16350660663446, + "learning_rate": 3.6137190958227655e-07, + "loss": 0.3822, + "step": 17607 + }, + { + "epoch": 2.75, + "grad_norm": 24.11960044964342, + "learning_rate": 3.609227914578606e-07, + "loss": 0.4434, + "step": 17608 + }, + { + "epoch": 2.75, + "grad_norm": 35.27148795923943, + "learning_rate": 3.6047394746316023e-07, + "loss": 0.4816, + "step": 17609 + }, + { + "epoch": 2.75, + "grad_norm": 28.587634090214188, + "learning_rate": 3.6002537761094193e-07, + "loss": 0.4546, + "step": 17610 + }, + { + "epoch": 2.75, + "grad_norm": 29.720156458838176, + "learning_rate": 3.595770819139632e-07, + "loss": 0.4599, + "step": 17611 + }, + { + "epoch": 2.75, + "grad_norm": 24.607773131052568, + "learning_rate": 3.5912906038497287e-07, + "loss": 0.5316, + "step": 17612 + }, + { + "epoch": 2.75, + "grad_norm": 20.001750958060477, + "learning_rate": 3.5868131303671393e-07, + "loss": 0.4304, + "step": 17613 + }, + { + "epoch": 2.75, + "grad_norm": 21.677063960201608, + "learning_rate": 3.582338398819163e-07, + "loss": 0.3819, + "step": 17614 + }, + { + "epoch": 2.75, + "grad_norm": 21.54366094423086, + "learning_rate": 3.577866409333075e-07, + "loss": 0.3947, + "step": 17615 + }, + { + "epoch": 2.75, + "grad_norm": 22.872411805571907, + "learning_rate": 3.5733971620360853e-07, + "loss": 0.4644, + "step": 17616 + }, + { + "epoch": 2.75, + "grad_norm": 27.67797548079895, + "learning_rate": 3.5689306570552584e-07, + "loss": 0.4206, + "step": 17617 + }, + { + "epoch": 2.75, + "grad_norm": 15.201829078776536, + "learning_rate": 3.5644668945176264e-07, + "loss": 0.4066, + "step": 17618 + }, + { + "epoch": 2.75, + "grad_norm": 25.251090222725587, + "learning_rate": 3.5600058745501543e-07, + "loss": 0.4283, + "step": 17619 + }, + { + "epoch": 2.75, + "grad_norm": 26.384651325590177, + "learning_rate": 3.555547597279696e-07, + "loss": 0.4019, + "step": 17620 + }, + { + "epoch": 2.75, + "grad_norm": 24.472285705221083, + "learning_rate": 3.551092062833039e-07, + "loss": 0.4387, + "step": 17621 + }, + { + "epoch": 2.75, + "grad_norm": 22.641588680221716, + "learning_rate": 3.546639271336916e-07, + "loss": 0.4783, + "step": 17622 + }, + { + "epoch": 2.75, + "grad_norm": 19.627109028541238, + "learning_rate": 3.5421892229179354e-07, + "loss": 0.4309, + "step": 17623 + }, + { + "epoch": 2.75, + "grad_norm": 23.303779076656976, + "learning_rate": 3.537741917702664e-07, + "loss": 0.3577, + "step": 17624 + }, + { + "epoch": 2.75, + "grad_norm": 16.95909705822597, + "learning_rate": 3.5332973558175996e-07, + "loss": 0.4548, + "step": 17625 + }, + { + "epoch": 2.75, + "grad_norm": 36.23098984089105, + "learning_rate": 3.52885553738912e-07, + "loss": 0.4801, + "step": 17626 + }, + { + "epoch": 2.75, + "grad_norm": 17.545105013485518, + "learning_rate": 3.524416462543556e-07, + "loss": 0.3947, + "step": 17627 + }, + { + "epoch": 2.75, + "grad_norm": 17.51014834664943, + "learning_rate": 3.519980131407152e-07, + "loss": 0.4006, + "step": 17628 + }, + { + "epoch": 2.75, + "grad_norm": 41.87052157353007, + "learning_rate": 3.515546544106074e-07, + "loss": 0.3838, + "step": 17629 + }, + { + "epoch": 2.75, + "grad_norm": 16.070821691892426, + "learning_rate": 3.5111157007663986e-07, + "loss": 0.3933, + "step": 17630 + }, + { + "epoch": 2.75, + "grad_norm": 23.179131830676027, + "learning_rate": 3.506687601514158e-07, + "loss": 0.3897, + "step": 17631 + }, + { + "epoch": 2.75, + "grad_norm": 22.031478653390828, + "learning_rate": 3.502262246475285e-07, + "loss": 0.4245, + "step": 17632 + }, + { + "epoch": 2.75, + "grad_norm": 20.278895357513118, + "learning_rate": 3.497839635775613e-07, + "loss": 0.3874, + "step": 17633 + }, + { + "epoch": 2.75, + "grad_norm": 24.700946334179584, + "learning_rate": 3.493419769540929e-07, + "loss": 0.4042, + "step": 17634 + }, + { + "epoch": 2.75, + "grad_norm": 26.533459846565975, + "learning_rate": 3.489002647896933e-07, + "loss": 0.3964, + "step": 17635 + }, + { + "epoch": 2.75, + "grad_norm": 20.071327751024754, + "learning_rate": 3.4845882709692356e-07, + "loss": 0.4315, + "step": 17636 + }, + { + "epoch": 2.75, + "grad_norm": 18.20121481290997, + "learning_rate": 3.4801766388834037e-07, + "loss": 0.424, + "step": 17637 + }, + { + "epoch": 2.76, + "grad_norm": 21.653808124428775, + "learning_rate": 3.4757677517648916e-07, + "loss": 0.438, + "step": 17638 + }, + { + "epoch": 2.76, + "grad_norm": 17.261791980119334, + "learning_rate": 3.471361609739055e-07, + "loss": 0.4069, + "step": 17639 + }, + { + "epoch": 2.76, + "grad_norm": 13.676234698956899, + "learning_rate": 3.4669582129312373e-07, + "loss": 0.4399, + "step": 17640 + }, + { + "epoch": 2.76, + "grad_norm": 18.76643994187275, + "learning_rate": 3.462557561466662e-07, + "loss": 0.3931, + "step": 17641 + }, + { + "epoch": 2.76, + "grad_norm": 39.28658178867971, + "learning_rate": 3.458159655470483e-07, + "loss": 0.4604, + "step": 17642 + }, + { + "epoch": 2.76, + "grad_norm": 17.26384231158706, + "learning_rate": 3.4537644950677576e-07, + "loss": 0.4436, + "step": 17643 + }, + { + "epoch": 2.76, + "grad_norm": 20.873303179177018, + "learning_rate": 3.449372080383484e-07, + "loss": 0.4575, + "step": 17644 + }, + { + "epoch": 2.76, + "grad_norm": 21.13398775509945, + "learning_rate": 3.4449824115426076e-07, + "loss": 0.4997, + "step": 17645 + }, + { + "epoch": 2.76, + "grad_norm": 18.411864926831946, + "learning_rate": 3.440595488669951e-07, + "loss": 0.3692, + "step": 17646 + }, + { + "epoch": 2.76, + "grad_norm": 22.910887637094188, + "learning_rate": 3.4362113118902475e-07, + "loss": 0.4282, + "step": 17647 + }, + { + "epoch": 2.76, + "grad_norm": 18.869032780226853, + "learning_rate": 3.4318298813282193e-07, + "loss": 0.4278, + "step": 17648 + }, + { + "epoch": 2.76, + "grad_norm": 28.135475465928945, + "learning_rate": 3.4274511971084556e-07, + "loss": 0.4167, + "step": 17649 + }, + { + "epoch": 2.76, + "grad_norm": 20.30215450484469, + "learning_rate": 3.423075259355491e-07, + "loss": 0.3607, + "step": 17650 + }, + { + "epoch": 2.76, + "grad_norm": 40.711977948129025, + "learning_rate": 3.41870206819378e-07, + "loss": 0.5404, + "step": 17651 + }, + { + "epoch": 2.76, + "grad_norm": 18.00839845113648, + "learning_rate": 3.4143316237476686e-07, + "loss": 0.3767, + "step": 17652 + }, + { + "epoch": 2.76, + "grad_norm": 19.35476609541522, + "learning_rate": 3.409963926141457e-07, + "loss": 0.4266, + "step": 17653 + }, + { + "epoch": 2.76, + "grad_norm": 19.065449786604272, + "learning_rate": 3.4055989754993913e-07, + "loss": 0.385, + "step": 17654 + }, + { + "epoch": 2.76, + "grad_norm": 20.312340432053816, + "learning_rate": 3.4012367719455595e-07, + "loss": 0.4318, + "step": 17655 + }, + { + "epoch": 2.76, + "grad_norm": 26.78982832485094, + "learning_rate": 3.3968773156040745e-07, + "loss": 0.4785, + "step": 17656 + }, + { + "epoch": 2.76, + "grad_norm": 24.784937148331927, + "learning_rate": 3.392520606598859e-07, + "loss": 0.403, + "step": 17657 + }, + { + "epoch": 2.76, + "grad_norm": 27.538531470623646, + "learning_rate": 3.3881666450538695e-07, + "loss": 0.4635, + "step": 17658 + }, + { + "epoch": 2.76, + "grad_norm": 23.1561216262008, + "learning_rate": 3.383815431092885e-07, + "loss": 0.3897, + "step": 17659 + }, + { + "epoch": 2.76, + "grad_norm": 14.400811593688012, + "learning_rate": 3.3794669648396727e-07, + "loss": 0.387, + "step": 17660 + }, + { + "epoch": 2.76, + "grad_norm": 21.950143112675182, + "learning_rate": 3.3751212464179007e-07, + "loss": 0.5017, + "step": 17661 + }, + { + "epoch": 2.76, + "grad_norm": 16.941965160337542, + "learning_rate": 3.370778275951159e-07, + "loss": 0.4648, + "step": 17662 + }, + { + "epoch": 2.76, + "grad_norm": 27.33928110279793, + "learning_rate": 3.3664380535629595e-07, + "loss": 0.4601, + "step": 17663 + }, + { + "epoch": 2.76, + "grad_norm": 20.795253560765687, + "learning_rate": 3.362100579376726e-07, + "loss": 0.4437, + "step": 17664 + }, + { + "epoch": 2.76, + "grad_norm": 37.16993754871001, + "learning_rate": 3.357765853515815e-07, + "loss": 0.4154, + "step": 17665 + }, + { + "epoch": 2.76, + "grad_norm": 21.075425902714972, + "learning_rate": 3.353433876103529e-07, + "loss": 0.4378, + "step": 17666 + }, + { + "epoch": 2.76, + "grad_norm": 16.344221736489434, + "learning_rate": 3.349104647263046e-07, + "loss": 0.4249, + "step": 17667 + }, + { + "epoch": 2.76, + "grad_norm": 22.077663558881213, + "learning_rate": 3.3447781671174684e-07, + "loss": 0.3781, + "step": 17668 + }, + { + "epoch": 2.76, + "grad_norm": 22.78743554873619, + "learning_rate": 3.340454435789864e-07, + "loss": 0.4619, + "step": 17669 + }, + { + "epoch": 2.76, + "grad_norm": 17.302493432926585, + "learning_rate": 3.336133453403201e-07, + "loss": 0.4299, + "step": 17670 + }, + { + "epoch": 2.76, + "grad_norm": 21.881522257396004, + "learning_rate": 3.331815220080359e-07, + "loss": 0.4149, + "step": 17671 + }, + { + "epoch": 2.76, + "grad_norm": 20.621173031454553, + "learning_rate": 3.3274997359441174e-07, + "loss": 0.4128, + "step": 17672 + }, + { + "epoch": 2.76, + "grad_norm": 21.368764571547246, + "learning_rate": 3.323187001117245e-07, + "loss": 0.4402, + "step": 17673 + }, + { + "epoch": 2.76, + "grad_norm": 27.149741829475385, + "learning_rate": 3.318877015722377e-07, + "loss": 0.5089, + "step": 17674 + }, + { + "epoch": 2.76, + "grad_norm": 18.583278227643845, + "learning_rate": 3.3145697798821044e-07, + "loss": 0.4237, + "step": 17675 + }, + { + "epoch": 2.76, + "grad_norm": 42.922950104870736, + "learning_rate": 3.3102652937189064e-07, + "loss": 0.5172, + "step": 17676 + }, + { + "epoch": 2.76, + "grad_norm": 26.244669910682696, + "learning_rate": 3.3059635573551854e-07, + "loss": 0.4231, + "step": 17677 + }, + { + "epoch": 2.76, + "grad_norm": 26.483313130159356, + "learning_rate": 3.301664570913299e-07, + "loss": 0.4778, + "step": 17678 + }, + { + "epoch": 2.76, + "grad_norm": 28.538593450242786, + "learning_rate": 3.297368334515516e-07, + "loss": 0.4695, + "step": 17679 + }, + { + "epoch": 2.76, + "grad_norm": 38.310937172454736, + "learning_rate": 3.2930748482840056e-07, + "loss": 0.4796, + "step": 17680 + }, + { + "epoch": 2.76, + "grad_norm": 15.126211821269354, + "learning_rate": 3.2887841123408817e-07, + "loss": 0.3916, + "step": 17681 + }, + { + "epoch": 2.76, + "grad_norm": 25.02724307084497, + "learning_rate": 3.2844961268081457e-07, + "loss": 0.4375, + "step": 17682 + }, + { + "epoch": 2.76, + "grad_norm": 14.57181444122784, + "learning_rate": 3.2802108918077777e-07, + "loss": 0.3595, + "step": 17683 + }, + { + "epoch": 2.76, + "grad_norm": 32.14063490115, + "learning_rate": 3.275928407461615e-07, + "loss": 0.4169, + "step": 17684 + }, + { + "epoch": 2.76, + "grad_norm": 23.790765076268357, + "learning_rate": 3.271648673891481e-07, + "loss": 0.4441, + "step": 17685 + }, + { + "epoch": 2.76, + "grad_norm": 24.983642147497704, + "learning_rate": 3.267371691219068e-07, + "loss": 0.4119, + "step": 17686 + }, + { + "epoch": 2.76, + "grad_norm": 30.6282771940744, + "learning_rate": 3.2630974595660226e-07, + "loss": 0.4309, + "step": 17687 + }, + { + "epoch": 2.76, + "grad_norm": 19.755118661930588, + "learning_rate": 3.2588259790538923e-07, + "loss": 0.4263, + "step": 17688 + }, + { + "epoch": 2.76, + "grad_norm": 28.05257720361429, + "learning_rate": 3.2545572498041576e-07, + "loss": 0.4567, + "step": 17689 + }, + { + "epoch": 2.76, + "grad_norm": 24.982562950701627, + "learning_rate": 3.2502912719382327e-07, + "loss": 0.4216, + "step": 17690 + }, + { + "epoch": 2.76, + "grad_norm": 17.59691952127479, + "learning_rate": 3.2460280455774096e-07, + "loss": 0.3996, + "step": 17691 + }, + { + "epoch": 2.76, + "grad_norm": 20.05470851463165, + "learning_rate": 3.2417675708429574e-07, + "loss": 0.4324, + "step": 17692 + }, + { + "epoch": 2.76, + "grad_norm": 22.1273676903876, + "learning_rate": 3.237509847856035e-07, + "loss": 0.4311, + "step": 17693 + }, + { + "epoch": 2.76, + "grad_norm": 23.049775757003967, + "learning_rate": 3.2332548767377237e-07, + "loss": 0.3871, + "step": 17694 + }, + { + "epoch": 2.76, + "grad_norm": 37.63828354627713, + "learning_rate": 3.229002657609037e-07, + "loss": 0.403, + "step": 17695 + }, + { + "epoch": 2.76, + "grad_norm": 19.827930429193863, + "learning_rate": 3.224753190590912e-07, + "loss": 0.3944, + "step": 17696 + }, + { + "epoch": 2.76, + "grad_norm": 21.935924212600288, + "learning_rate": 3.220506475804186e-07, + "loss": 0.3809, + "step": 17697 + }, + { + "epoch": 2.76, + "grad_norm": 45.98717967743818, + "learning_rate": 3.21626251336965e-07, + "loss": 0.4497, + "step": 17698 + }, + { + "epoch": 2.76, + "grad_norm": 21.917288850400528, + "learning_rate": 3.212021303407975e-07, + "loss": 0.4398, + "step": 17699 + }, + { + "epoch": 2.76, + "grad_norm": 27.201496306671956, + "learning_rate": 3.20778284603982e-07, + "loss": 0.5111, + "step": 17700 + }, + { + "epoch": 2.76, + "grad_norm": 19.294124470951655, + "learning_rate": 3.203547141385688e-07, + "loss": 0.4211, + "step": 17701 + }, + { + "epoch": 2.77, + "grad_norm": 25.9780568712668, + "learning_rate": 3.1993141895660385e-07, + "loss": 0.4014, + "step": 17702 + }, + { + "epoch": 2.77, + "grad_norm": 14.963547719935852, + "learning_rate": 3.1950839907012755e-07, + "loss": 0.3566, + "step": 17703 + }, + { + "epoch": 2.77, + "grad_norm": 21.675189528343374, + "learning_rate": 3.1908565449117026e-07, + "loss": 0.3755, + "step": 17704 + }, + { + "epoch": 2.77, + "grad_norm": 27.725663897973195, + "learning_rate": 3.1866318523175344e-07, + "loss": 0.4299, + "step": 17705 + }, + { + "epoch": 2.77, + "grad_norm": 33.222484769384224, + "learning_rate": 3.18240991303892e-07, + "loss": 0.5161, + "step": 17706 + }, + { + "epoch": 2.77, + "grad_norm": 31.508710530122755, + "learning_rate": 3.178190727195918e-07, + "loss": 0.3962, + "step": 17707 + }, + { + "epoch": 2.77, + "grad_norm": 23.482210403450914, + "learning_rate": 3.173974294908555e-07, + "loss": 0.4355, + "step": 17708 + }, + { + "epoch": 2.77, + "grad_norm": 21.52061158129383, + "learning_rate": 3.169760616296702e-07, + "loss": 0.4382, + "step": 17709 + }, + { + "epoch": 2.77, + "grad_norm": 24.220009148382967, + "learning_rate": 3.1655496914802296e-07, + "loss": 0.4598, + "step": 17710 + }, + { + "epoch": 2.77, + "grad_norm": 27.42700087521264, + "learning_rate": 3.1613415205788643e-07, + "loss": 0.5331, + "step": 17711 + }, + { + "epoch": 2.77, + "grad_norm": 37.06494454046371, + "learning_rate": 3.1571361037122996e-07, + "loss": 0.4245, + "step": 17712 + }, + { + "epoch": 2.77, + "grad_norm": 19.8939808698795, + "learning_rate": 3.152933441000139e-07, + "loss": 0.4783, + "step": 17713 + }, + { + "epoch": 2.77, + "grad_norm": 24.52823995905179, + "learning_rate": 3.148733532561887e-07, + "loss": 0.4167, + "step": 17714 + }, + { + "epoch": 2.77, + "grad_norm": 29.28182344170294, + "learning_rate": 3.1445363785170155e-07, + "loss": 0.4548, + "step": 17715 + }, + { + "epoch": 2.77, + "grad_norm": 40.379343304478844, + "learning_rate": 3.140341978984851e-07, + "loss": 0.5007, + "step": 17716 + }, + { + "epoch": 2.77, + "grad_norm": 25.06055781813353, + "learning_rate": 3.13615033408472e-07, + "loss": 0.4108, + "step": 17717 + }, + { + "epoch": 2.77, + "grad_norm": 17.125701212101035, + "learning_rate": 3.1319614439357827e-07, + "loss": 0.3849, + "step": 17718 + }, + { + "epoch": 2.77, + "grad_norm": 22.820831586764452, + "learning_rate": 3.1277753086572105e-07, + "loss": 0.4135, + "step": 17719 + }, + { + "epoch": 2.77, + "grad_norm": 22.55893056345587, + "learning_rate": 3.123591928368042e-07, + "loss": 0.4514, + "step": 17720 + }, + { + "epoch": 2.77, + "grad_norm": 24.306792548790323, + "learning_rate": 3.1194113031872584e-07, + "loss": 0.4446, + "step": 17721 + }, + { + "epoch": 2.77, + "grad_norm": 26.925897571174666, + "learning_rate": 3.115233433233722e-07, + "loss": 0.4256, + "step": 17722 + }, + { + "epoch": 2.77, + "grad_norm": 14.986215810505279, + "learning_rate": 3.111058318626281e-07, + "loss": 0.3723, + "step": 17723 + }, + { + "epoch": 2.77, + "grad_norm": 25.33960368579462, + "learning_rate": 3.106885959483652e-07, + "loss": 0.3996, + "step": 17724 + }, + { + "epoch": 2.77, + "grad_norm": 22.430890990748615, + "learning_rate": 3.102716355924518e-07, + "loss": 0.4419, + "step": 17725 + }, + { + "epoch": 2.77, + "grad_norm": 34.25280472790416, + "learning_rate": 3.09854950806745e-07, + "loss": 0.4201, + "step": 17726 + }, + { + "epoch": 2.77, + "grad_norm": 23.198934483621148, + "learning_rate": 3.094385416030943e-07, + "loss": 0.4546, + "step": 17727 + }, + { + "epoch": 2.77, + "grad_norm": 16.194276718604623, + "learning_rate": 3.0902240799334237e-07, + "loss": 0.4304, + "step": 17728 + }, + { + "epoch": 2.77, + "grad_norm": 27.617083893275396, + "learning_rate": 3.0860654998932536e-07, + "loss": 0.4229, + "step": 17729 + }, + { + "epoch": 2.77, + "grad_norm": 26.292763872018554, + "learning_rate": 3.0819096760286826e-07, + "loss": 0.4271, + "step": 17730 + }, + { + "epoch": 2.77, + "grad_norm": 16.192417339603097, + "learning_rate": 3.0777566084578934e-07, + "loss": 0.3836, + "step": 17731 + }, + { + "epoch": 2.77, + "grad_norm": 27.763047490997057, + "learning_rate": 3.0736062972990145e-07, + "loss": 0.4651, + "step": 17732 + }, + { + "epoch": 2.77, + "grad_norm": 26.461177569174026, + "learning_rate": 3.069458742670073e-07, + "loss": 0.4566, + "step": 17733 + }, + { + "epoch": 2.77, + "grad_norm": 26.637765681173875, + "learning_rate": 3.065313944689019e-07, + "loss": 0.4697, + "step": 17734 + }, + { + "epoch": 2.77, + "grad_norm": 16.065984463969304, + "learning_rate": 3.0611719034737363e-07, + "loss": 0.3595, + "step": 17735 + }, + { + "epoch": 2.77, + "grad_norm": 20.578260994811572, + "learning_rate": 3.05703261914202e-07, + "loss": 0.4632, + "step": 17736 + }, + { + "epoch": 2.77, + "grad_norm": 22.242904327948846, + "learning_rate": 3.0528960918115744e-07, + "loss": 0.4301, + "step": 17737 + }, + { + "epoch": 2.77, + "grad_norm": 29.37043488924438, + "learning_rate": 3.048762321600063e-07, + "loss": 0.4664, + "step": 17738 + }, + { + "epoch": 2.77, + "grad_norm": 20.084968414431373, + "learning_rate": 3.0446313086250346e-07, + "loss": 0.4456, + "step": 17739 + }, + { + "epoch": 2.77, + "grad_norm": 31.489069310879728, + "learning_rate": 3.0405030530039734e-07, + "loss": 0.4943, + "step": 17740 + }, + { + "epoch": 2.77, + "grad_norm": 26.18330621369408, + "learning_rate": 3.0363775548542753e-07, + "loss": 0.5134, + "step": 17741 + }, + { + "epoch": 2.77, + "grad_norm": 21.600441304473748, + "learning_rate": 3.0322548142932893e-07, + "loss": 0.4052, + "step": 17742 + }, + { + "epoch": 2.77, + "grad_norm": 21.945506302469074, + "learning_rate": 3.0281348314382453e-07, + "loss": 0.4587, + "step": 17743 + }, + { + "epoch": 2.77, + "grad_norm": 21.23798674940019, + "learning_rate": 3.0240176064063266e-07, + "loss": 0.4016, + "step": 17744 + }, + { + "epoch": 2.77, + "grad_norm": 23.22841361426061, + "learning_rate": 3.0199031393146174e-07, + "loss": 0.5186, + "step": 17745 + }, + { + "epoch": 2.77, + "grad_norm": 21.262510650425778, + "learning_rate": 3.015791430280135e-07, + "loss": 0.4451, + "step": 17746 + }, + { + "epoch": 2.77, + "grad_norm": 34.379464918082775, + "learning_rate": 3.0116824794197977e-07, + "loss": 0.4826, + "step": 17747 + }, + { + "epoch": 2.77, + "grad_norm": 21.097081391445617, + "learning_rate": 3.0075762868504777e-07, + "loss": 0.438, + "step": 17748 + }, + { + "epoch": 2.77, + "grad_norm": 22.355576222488185, + "learning_rate": 3.003472852688949e-07, + "loss": 0.4465, + "step": 17749 + }, + { + "epoch": 2.77, + "grad_norm": 17.42190627407466, + "learning_rate": 2.999372177051918e-07, + "loss": 0.3745, + "step": 17750 + }, + { + "epoch": 2.77, + "grad_norm": 16.05724652493624, + "learning_rate": 2.9952742600559917e-07, + "loss": 0.4138, + "step": 17751 + }, + { + "epoch": 2.77, + "grad_norm": 24.001924959113907, + "learning_rate": 2.9911791018177204e-07, + "loss": 0.4154, + "step": 17752 + }, + { + "epoch": 2.77, + "grad_norm": 16.837550494465944, + "learning_rate": 2.9870867024535675e-07, + "loss": 0.4127, + "step": 17753 + }, + { + "epoch": 2.77, + "grad_norm": 20.721680934436208, + "learning_rate": 2.9829970620799176e-07, + "loss": 0.448, + "step": 17754 + }, + { + "epoch": 2.77, + "grad_norm": 21.171433216848833, + "learning_rate": 2.978910180813088e-07, + "loss": 0.3914, + "step": 17755 + }, + { + "epoch": 2.77, + "grad_norm": 26.16145196297292, + "learning_rate": 2.9748260587692867e-07, + "loss": 0.4608, + "step": 17756 + }, + { + "epoch": 2.77, + "grad_norm": 32.4257215006004, + "learning_rate": 2.9707446960646644e-07, + "loss": 0.4329, + "step": 17757 + }, + { + "epoch": 2.77, + "grad_norm": 25.96223306307443, + "learning_rate": 2.966666092815318e-07, + "loss": 0.4311, + "step": 17758 + }, + { + "epoch": 2.77, + "grad_norm": 19.596092187560284, + "learning_rate": 2.962590249137232e-07, + "loss": 0.4524, + "step": 17759 + }, + { + "epoch": 2.77, + "grad_norm": 15.09075659731838, + "learning_rate": 2.958517165146302e-07, + "loss": 0.3782, + "step": 17760 + }, + { + "epoch": 2.77, + "grad_norm": 32.97059382439807, + "learning_rate": 2.954446840958369e-07, + "loss": 0.4049, + "step": 17761 + }, + { + "epoch": 2.77, + "grad_norm": 20.238962076232934, + "learning_rate": 2.950379276689197e-07, + "loss": 0.4492, + "step": 17762 + }, + { + "epoch": 2.77, + "grad_norm": 15.979882544482278, + "learning_rate": 2.946314472454481e-07, + "loss": 0.4367, + "step": 17763 + }, + { + "epoch": 2.77, + "grad_norm": 23.16373111494247, + "learning_rate": 2.9422524283698074e-07, + "loss": 0.479, + "step": 17764 + }, + { + "epoch": 2.77, + "grad_norm": 14.527442167625203, + "learning_rate": 2.9381931445506826e-07, + "loss": 0.434, + "step": 17765 + }, + { + "epoch": 2.78, + "grad_norm": 22.600064304142172, + "learning_rate": 2.934136621112571e-07, + "loss": 0.4528, + "step": 17766 + }, + { + "epoch": 2.78, + "grad_norm": 20.506270650759074, + "learning_rate": 2.9300828581708464e-07, + "loss": 0.4098, + "step": 17767 + }, + { + "epoch": 2.78, + "grad_norm": 16.06691320673563, + "learning_rate": 2.926031855840761e-07, + "loss": 0.4246, + "step": 17768 + }, + { + "epoch": 2.78, + "grad_norm": 23.005656413037922, + "learning_rate": 2.9219836142375557e-07, + "loss": 0.3901, + "step": 17769 + }, + { + "epoch": 2.78, + "grad_norm": 25.414776448735328, + "learning_rate": 2.917938133476339e-07, + "loss": 0.498, + "step": 17770 + }, + { + "epoch": 2.78, + "grad_norm": 19.923262375038078, + "learning_rate": 2.913895413672174e-07, + "loss": 0.4637, + "step": 17771 + }, + { + "epoch": 2.78, + "grad_norm": 16.83951212911085, + "learning_rate": 2.909855454940025e-07, + "loss": 0.431, + "step": 17772 + }, + { + "epoch": 2.78, + "grad_norm": 32.079504482302205, + "learning_rate": 2.905818257394799e-07, + "loss": 0.4811, + "step": 17773 + }, + { + "epoch": 2.78, + "grad_norm": 21.802675499478198, + "learning_rate": 2.901783821151305e-07, + "loss": 0.3864, + "step": 17774 + }, + { + "epoch": 2.78, + "grad_norm": 18.217788936758698, + "learning_rate": 2.897752146324262e-07, + "loss": 0.4397, + "step": 17775 + }, + { + "epoch": 2.78, + "grad_norm": 29.758415823459107, + "learning_rate": 2.893723233028367e-07, + "loss": 0.429, + "step": 17776 + }, + { + "epoch": 2.78, + "grad_norm": 21.32663103816911, + "learning_rate": 2.889697081378162e-07, + "loss": 0.4532, + "step": 17777 + }, + { + "epoch": 2.78, + "grad_norm": 21.447919822576537, + "learning_rate": 2.885673691488167e-07, + "loss": 0.4094, + "step": 17778 + }, + { + "epoch": 2.78, + "grad_norm": 22.50888376324859, + "learning_rate": 2.8816530634728e-07, + "loss": 0.3904, + "step": 17779 + }, + { + "epoch": 2.78, + "grad_norm": 28.601438018269526, + "learning_rate": 2.877635197446427e-07, + "loss": 0.4367, + "step": 17780 + }, + { + "epoch": 2.78, + "grad_norm": 11.398677674365361, + "learning_rate": 2.873620093523266e-07, + "loss": 0.4287, + "step": 17781 + }, + { + "epoch": 2.78, + "grad_norm": 19.87314811662009, + "learning_rate": 2.869607751817538e-07, + "loss": 0.482, + "step": 17782 + }, + { + "epoch": 2.78, + "grad_norm": 24.94801116452939, + "learning_rate": 2.8655981724433625e-07, + "loss": 0.418, + "step": 17783 + }, + { + "epoch": 2.78, + "grad_norm": 15.651201468766368, + "learning_rate": 2.861591355514748e-07, + "loss": 0.4093, + "step": 17784 + }, + { + "epoch": 2.78, + "grad_norm": 20.469020688108923, + "learning_rate": 2.857587301145637e-07, + "loss": 0.4403, + "step": 17785 + }, + { + "epoch": 2.78, + "grad_norm": 25.939070353022245, + "learning_rate": 2.853586009449927e-07, + "loss": 0.4085, + "step": 17786 + }, + { + "epoch": 2.78, + "grad_norm": 16.162492029552457, + "learning_rate": 2.8495874805413937e-07, + "loss": 0.3995, + "step": 17787 + }, + { + "epoch": 2.78, + "grad_norm": 15.75535694908689, + "learning_rate": 2.845591714533769e-07, + "loss": 0.4161, + "step": 17788 + }, + { + "epoch": 2.78, + "grad_norm": 24.344400072171297, + "learning_rate": 2.841598711540694e-07, + "loss": 0.4065, + "step": 17789 + }, + { + "epoch": 2.78, + "grad_norm": 31.825420460429424, + "learning_rate": 2.83760847167569e-07, + "loss": 0.4998, + "step": 17790 + }, + { + "epoch": 2.78, + "grad_norm": 24.79331486690112, + "learning_rate": 2.8336209950522774e-07, + "loss": 0.4992, + "step": 17791 + }, + { + "epoch": 2.78, + "grad_norm": 29.61541954485294, + "learning_rate": 2.829636281783843e-07, + "loss": 0.4473, + "step": 17792 + }, + { + "epoch": 2.78, + "grad_norm": 27.110733862248356, + "learning_rate": 2.825654331983707e-07, + "loss": 0.4556, + "step": 17793 + }, + { + "epoch": 2.78, + "grad_norm": 17.257772404834387, + "learning_rate": 2.8216751457651124e-07, + "loss": 0.4037, + "step": 17794 + }, + { + "epoch": 2.78, + "grad_norm": 30.776153927814182, + "learning_rate": 2.8176987232412354e-07, + "loss": 0.4971, + "step": 17795 + }, + { + "epoch": 2.78, + "grad_norm": 19.981997399943353, + "learning_rate": 2.8137250645251636e-07, + "loss": 0.4503, + "step": 17796 + }, + { + "epoch": 2.78, + "grad_norm": 21.78206100896924, + "learning_rate": 2.8097541697298835e-07, + "loss": 0.4174, + "step": 17797 + }, + { + "epoch": 2.78, + "grad_norm": 18.219222166950722, + "learning_rate": 2.80578603896835e-07, + "loss": 0.3872, + "step": 17798 + }, + { + "epoch": 2.78, + "grad_norm": 13.712528351656207, + "learning_rate": 2.8018206723533947e-07, + "loss": 0.3565, + "step": 17799 + }, + { + "epoch": 2.78, + "grad_norm": 16.620171348536875, + "learning_rate": 2.7978580699978054e-07, + "loss": 0.3975, + "step": 17800 + }, + { + "epoch": 2.78, + "grad_norm": 28.43347515820931, + "learning_rate": 2.7938982320142693e-07, + "loss": 0.4835, + "step": 17801 + }, + { + "epoch": 2.78, + "grad_norm": 33.41241235295184, + "learning_rate": 2.789941158515408e-07, + "loss": 0.4707, + "step": 17802 + }, + { + "epoch": 2.78, + "grad_norm": 34.165840575894414, + "learning_rate": 2.785986849613753e-07, + "loss": 0.4095, + "step": 17803 + }, + { + "epoch": 2.78, + "grad_norm": 22.33324719318697, + "learning_rate": 2.78203530542176e-07, + "loss": 0.4473, + "step": 17804 + }, + { + "epoch": 2.78, + "grad_norm": 24.86358169649824, + "learning_rate": 2.7780865260518265e-07, + "loss": 0.4104, + "step": 17805 + }, + { + "epoch": 2.78, + "grad_norm": 17.247536409893662, + "learning_rate": 2.774140511616219e-07, + "loss": 0.3936, + "step": 17806 + }, + { + "epoch": 2.78, + "grad_norm": 24.528570963119602, + "learning_rate": 2.770197262227181e-07, + "loss": 0.4658, + "step": 17807 + }, + { + "epoch": 2.78, + "grad_norm": 18.644543764248844, + "learning_rate": 2.7662567779968677e-07, + "loss": 0.4331, + "step": 17808 + }, + { + "epoch": 2.78, + "grad_norm": 21.046297085607907, + "learning_rate": 2.762319059037333e-07, + "loss": 0.446, + "step": 17809 + }, + { + "epoch": 2.78, + "grad_norm": 16.52821308456191, + "learning_rate": 2.758384105460554e-07, + "loss": 0.3938, + "step": 17810 + }, + { + "epoch": 2.78, + "grad_norm": 18.39181994345637, + "learning_rate": 2.7544519173784423e-07, + "loss": 0.4082, + "step": 17811 + }, + { + "epoch": 2.78, + "grad_norm": 24.79687175558414, + "learning_rate": 2.7505224949028297e-07, + "loss": 0.4646, + "step": 17812 + }, + { + "epoch": 2.78, + "grad_norm": 30.372453472941118, + "learning_rate": 2.7465958381454714e-07, + "loss": 0.4564, + "step": 17813 + }, + { + "epoch": 2.78, + "grad_norm": 20.662787697018608, + "learning_rate": 2.7426719472180454e-07, + "loss": 0.4665, + "step": 17814 + }, + { + "epoch": 2.78, + "grad_norm": 20.80120540974446, + "learning_rate": 2.738750822232128e-07, + "loss": 0.4285, + "step": 17815 + }, + { + "epoch": 2.78, + "grad_norm": 19.817918329884925, + "learning_rate": 2.7348324632992416e-07, + "loss": 0.3917, + "step": 17816 + }, + { + "epoch": 2.78, + "grad_norm": 15.154300239838921, + "learning_rate": 2.7309168705308196e-07, + "loss": 0.3714, + "step": 17817 + }, + { + "epoch": 2.78, + "grad_norm": 24.161499256870428, + "learning_rate": 2.7270040440382283e-07, + "loss": 0.4113, + "step": 17818 + }, + { + "epoch": 2.78, + "grad_norm": 16.196009073951082, + "learning_rate": 2.723093983932734e-07, + "loss": 0.3644, + "step": 17819 + }, + { + "epoch": 2.78, + "grad_norm": 35.6433101626748, + "learning_rate": 2.719186690325537e-07, + "loss": 0.4416, + "step": 17820 + }, + { + "epoch": 2.78, + "grad_norm": 19.85555892314469, + "learning_rate": 2.7152821633277705e-07, + "loss": 0.418, + "step": 17821 + }, + { + "epoch": 2.78, + "grad_norm": 18.78496606706317, + "learning_rate": 2.7113804030504564e-07, + "loss": 0.4408, + "step": 17822 + }, + { + "epoch": 2.78, + "grad_norm": 20.062839569058323, + "learning_rate": 2.7074814096045954e-07, + "loss": 0.3768, + "step": 17823 + }, + { + "epoch": 2.78, + "grad_norm": 14.511767594472179, + "learning_rate": 2.7035851831010206e-07, + "loss": 0.3782, + "step": 17824 + }, + { + "epoch": 2.78, + "grad_norm": 17.075831909355863, + "learning_rate": 2.699691723650577e-07, + "loss": 0.4239, + "step": 17825 + }, + { + "epoch": 2.78, + "grad_norm": 22.544019319984162, + "learning_rate": 2.6958010313639873e-07, + "loss": 0.4414, + "step": 17826 + }, + { + "epoch": 2.78, + "grad_norm": 25.612221742317452, + "learning_rate": 2.6919131063518956e-07, + "loss": 0.4288, + "step": 17827 + }, + { + "epoch": 2.78, + "grad_norm": 31.06784850113173, + "learning_rate": 2.6880279487248697e-07, + "loss": 0.4189, + "step": 17828 + }, + { + "epoch": 2.78, + "grad_norm": 21.37054766468191, + "learning_rate": 2.684145558593398e-07, + "loss": 0.4404, + "step": 17829 + }, + { + "epoch": 2.79, + "grad_norm": 21.728114558601035, + "learning_rate": 2.680265936067905e-07, + "loss": 0.42, + "step": 17830 + }, + { + "epoch": 2.79, + "grad_norm": 26.826270009886528, + "learning_rate": 2.6763890812587126e-07, + "loss": 0.4683, + "step": 17831 + }, + { + "epoch": 2.79, + "grad_norm": 26.725095256460804, + "learning_rate": 2.6725149942760875e-07, + "loss": 0.4641, + "step": 17832 + }, + { + "epoch": 2.79, + "grad_norm": 16.005758651779562, + "learning_rate": 2.668643675230198e-07, + "loss": 0.4046, + "step": 17833 + }, + { + "epoch": 2.79, + "grad_norm": 20.67878561715757, + "learning_rate": 2.664775124231156e-07, + "loss": 0.4316, + "step": 17834 + }, + { + "epoch": 2.79, + "grad_norm": 18.55525916574125, + "learning_rate": 2.660909341388951e-07, + "loss": 0.3749, + "step": 17835 + }, + { + "epoch": 2.79, + "grad_norm": 27.402857852172595, + "learning_rate": 2.65704632681355e-07, + "loss": 0.4265, + "step": 17836 + }, + { + "epoch": 2.79, + "grad_norm": 18.315326874458894, + "learning_rate": 2.653186080614811e-07, + "loss": 0.3921, + "step": 17837 + }, + { + "epoch": 2.79, + "grad_norm": 20.873260227104232, + "learning_rate": 2.649328602902501e-07, + "loss": 0.4263, + "step": 17838 + }, + { + "epoch": 2.79, + "grad_norm": 21.511431394077448, + "learning_rate": 2.6454738937863545e-07, + "loss": 0.3848, + "step": 17839 + }, + { + "epoch": 2.79, + "grad_norm": 23.758257753225937, + "learning_rate": 2.6416219533759724e-07, + "loss": 0.3854, + "step": 17840 + }, + { + "epoch": 2.79, + "grad_norm": 20.036648640242426, + "learning_rate": 2.637772781780901e-07, + "loss": 0.4291, + "step": 17841 + }, + { + "epoch": 2.79, + "grad_norm": 16.628179702317524, + "learning_rate": 2.6339263791106294e-07, + "loss": 0.4086, + "step": 17842 + }, + { + "epoch": 2.79, + "grad_norm": 22.163010623028203, + "learning_rate": 2.630082745474538e-07, + "loss": 0.4376, + "step": 17843 + }, + { + "epoch": 2.79, + "grad_norm": 20.723038656749363, + "learning_rate": 2.6262418809819166e-07, + "loss": 0.3908, + "step": 17844 + }, + { + "epoch": 2.79, + "grad_norm": 20.039714017393614, + "learning_rate": 2.6224037857420225e-07, + "loss": 0.4845, + "step": 17845 + }, + { + "epoch": 2.79, + "grad_norm": 24.176386528678915, + "learning_rate": 2.6185684598640013e-07, + "loss": 0.3959, + "step": 17846 + }, + { + "epoch": 2.79, + "grad_norm": 23.52618035637394, + "learning_rate": 2.614735903456933e-07, + "loss": 0.4044, + "step": 17847 + }, + { + "epoch": 2.79, + "grad_norm": 21.43696592738896, + "learning_rate": 2.610906116629797e-07, + "loss": 0.4376, + "step": 17848 + }, + { + "epoch": 2.79, + "grad_norm": 22.564452098105797, + "learning_rate": 2.607079099491516e-07, + "loss": 0.4248, + "step": 17849 + }, + { + "epoch": 2.79, + "grad_norm": 18.37316254670309, + "learning_rate": 2.6032548521509384e-07, + "loss": 0.4104, + "step": 17850 + }, + { + "epoch": 2.79, + "grad_norm": 15.671465737161004, + "learning_rate": 2.59943337471682e-07, + "loss": 0.3953, + "step": 17851 + }, + { + "epoch": 2.79, + "grad_norm": 27.93382537280599, + "learning_rate": 2.595614667297841e-07, + "loss": 0.4228, + "step": 17852 + }, + { + "epoch": 2.79, + "grad_norm": 27.247811925166463, + "learning_rate": 2.591798730002593e-07, + "loss": 0.4455, + "step": 17853 + }, + { + "epoch": 2.79, + "grad_norm": 25.55483761390252, + "learning_rate": 2.587985562939599e-07, + "loss": 0.4557, + "step": 17854 + }, + { + "epoch": 2.79, + "grad_norm": 33.14607875360679, + "learning_rate": 2.5841751662173287e-07, + "loss": 0.5067, + "step": 17855 + }, + { + "epoch": 2.79, + "grad_norm": 18.296160658338522, + "learning_rate": 2.5803675399441173e-07, + "loss": 0.5111, + "step": 17856 + }, + { + "epoch": 2.79, + "grad_norm": 19.43695436414049, + "learning_rate": 2.576562684228279e-07, + "loss": 0.406, + "step": 17857 + }, + { + "epoch": 2.79, + "grad_norm": 27.328284299679527, + "learning_rate": 2.5727605991779925e-07, + "loss": 0.4766, + "step": 17858 + }, + { + "epoch": 2.79, + "grad_norm": 27.19147341620862, + "learning_rate": 2.5689612849014166e-07, + "loss": 0.4341, + "step": 17859 + }, + { + "epoch": 2.79, + "grad_norm": 25.58358957947211, + "learning_rate": 2.565164741506576e-07, + "loss": 0.4133, + "step": 17860 + }, + { + "epoch": 2.79, + "grad_norm": 17.142896297440632, + "learning_rate": 2.56137096910144e-07, + "loss": 0.4456, + "step": 17861 + }, + { + "epoch": 2.79, + "grad_norm": 26.02469362052001, + "learning_rate": 2.557579967793933e-07, + "loss": 0.467, + "step": 17862 + }, + { + "epoch": 2.79, + "grad_norm": 22.910104164526125, + "learning_rate": 2.5537917376918466e-07, + "loss": 0.4748, + "step": 17863 + }, + { + "epoch": 2.79, + "grad_norm": 30.37279936389306, + "learning_rate": 2.5500062789029165e-07, + "loss": 0.3871, + "step": 17864 + }, + { + "epoch": 2.79, + "grad_norm": 16.313061759308482, + "learning_rate": 2.546223591534802e-07, + "loss": 0.3915, + "step": 17865 + }, + { + "epoch": 2.79, + "grad_norm": 19.73817612321626, + "learning_rate": 2.542443675695072e-07, + "loss": 0.4208, + "step": 17866 + }, + { + "epoch": 2.79, + "grad_norm": 16.748565011865182, + "learning_rate": 2.538666531491241e-07, + "loss": 0.4057, + "step": 17867 + }, + { + "epoch": 2.79, + "grad_norm": 17.54794355741235, + "learning_rate": 2.5348921590307216e-07, + "loss": 0.4045, + "step": 17868 + }, + { + "epoch": 2.79, + "grad_norm": 37.00632794176606, + "learning_rate": 2.5311205584208523e-07, + "loss": 0.4751, + "step": 17869 + }, + { + "epoch": 2.79, + "grad_norm": 37.13500278433084, + "learning_rate": 2.52735172976889e-07, + "loss": 0.4981, + "step": 17870 + }, + { + "epoch": 2.79, + "grad_norm": 23.010762255813322, + "learning_rate": 2.5235856731820276e-07, + "loss": 0.4664, + "step": 17871 + }, + { + "epoch": 2.79, + "grad_norm": 18.37970622446912, + "learning_rate": 2.519822388767379e-07, + "loss": 0.4873, + "step": 17872 + }, + { + "epoch": 2.79, + "grad_norm": 48.70518842419169, + "learning_rate": 2.5160618766319477e-07, + "loss": 0.4545, + "step": 17873 + }, + { + "epoch": 2.79, + "grad_norm": 32.41233336258351, + "learning_rate": 2.512304136882682e-07, + "loss": 0.4451, + "step": 17874 + }, + { + "epoch": 2.79, + "grad_norm": 19.278552234495788, + "learning_rate": 2.508549169626462e-07, + "loss": 0.4012, + "step": 17875 + }, + { + "epoch": 2.79, + "grad_norm": 17.404091322349313, + "learning_rate": 2.504796974970081e-07, + "loss": 0.3592, + "step": 17876 + }, + { + "epoch": 2.79, + "grad_norm": 25.55736350355255, + "learning_rate": 2.5010475530202414e-07, + "loss": 0.3977, + "step": 17877 + }, + { + "epoch": 2.79, + "grad_norm": 15.432892991775566, + "learning_rate": 2.4973009038835593e-07, + "loss": 0.3709, + "step": 17878 + }, + { + "epoch": 2.79, + "grad_norm": 23.600486098199184, + "learning_rate": 2.4935570276666157e-07, + "loss": 0.4114, + "step": 17879 + }, + { + "epoch": 2.79, + "grad_norm": 43.655283837355256, + "learning_rate": 2.4898159244758693e-07, + "loss": 0.4455, + "step": 17880 + }, + { + "epoch": 2.79, + "grad_norm": 31.267924611682588, + "learning_rate": 2.486077594417724e-07, + "loss": 0.3964, + "step": 17881 + }, + { + "epoch": 2.79, + "grad_norm": 20.25477956399121, + "learning_rate": 2.482342037598473e-07, + "loss": 0.5159, + "step": 17882 + }, + { + "epoch": 2.79, + "grad_norm": 21.82578694458567, + "learning_rate": 2.4786092541243645e-07, + "loss": 0.4631, + "step": 17883 + }, + { + "epoch": 2.79, + "grad_norm": 34.87979369989602, + "learning_rate": 2.474879244101569e-07, + "loss": 0.4869, + "step": 17884 + }, + { + "epoch": 2.79, + "grad_norm": 17.024314767607805, + "learning_rate": 2.471152007636157e-07, + "loss": 0.4116, + "step": 17885 + }, + { + "epoch": 2.79, + "grad_norm": 17.135663809825058, + "learning_rate": 2.4674275448341333e-07, + "loss": 0.4106, + "step": 17886 + }, + { + "epoch": 2.79, + "grad_norm": 17.26663923434738, + "learning_rate": 2.463705855801413e-07, + "loss": 0.3772, + "step": 17887 + }, + { + "epoch": 2.79, + "grad_norm": 29.899714734935777, + "learning_rate": 2.4599869406438327e-07, + "loss": 0.4664, + "step": 17888 + }, + { + "epoch": 2.79, + "grad_norm": 26.476963347470463, + "learning_rate": 2.456270799467175e-07, + "loss": 0.4235, + "step": 17889 + }, + { + "epoch": 2.79, + "grad_norm": 23.803210952907094, + "learning_rate": 2.452557432377101e-07, + "loss": 0.4169, + "step": 17890 + }, + { + "epoch": 2.79, + "grad_norm": 25.70008974908491, + "learning_rate": 2.4488468394792463e-07, + "loss": 0.4275, + "step": 17891 + }, + { + "epoch": 2.79, + "grad_norm": 27.93936782231887, + "learning_rate": 2.4451390208791173e-07, + "loss": 0.4253, + "step": 17892 + }, + { + "epoch": 2.79, + "grad_norm": 22.847078125282508, + "learning_rate": 2.4414339766821614e-07, + "loss": 0.4504, + "step": 17893 + }, + { + "epoch": 2.8, + "grad_norm": 28.535500378005608, + "learning_rate": 2.437731706993751e-07, + "loss": 0.4524, + "step": 17894 + }, + { + "epoch": 2.8, + "grad_norm": 19.802235770219895, + "learning_rate": 2.434032211919179e-07, + "loss": 0.4161, + "step": 17895 + }, + { + "epoch": 2.8, + "grad_norm": 23.109124842586482, + "learning_rate": 2.4303354915636735e-07, + "loss": 0.3741, + "step": 17896 + }, + { + "epoch": 2.8, + "grad_norm": 22.01378010572684, + "learning_rate": 2.426641546032338e-07, + "loss": 0.4588, + "step": 17897 + }, + { + "epoch": 2.8, + "grad_norm": 16.192167988932617, + "learning_rate": 2.422950375430233e-07, + "loss": 0.3965, + "step": 17898 + }, + { + "epoch": 2.8, + "grad_norm": 35.99056494918064, + "learning_rate": 2.419261979862342e-07, + "loss": 0.4083, + "step": 17899 + }, + { + "epoch": 2.8, + "grad_norm": 17.44663932228759, + "learning_rate": 2.415576359433558e-07, + "loss": 0.3789, + "step": 17900 + }, + { + "epoch": 2.8, + "grad_norm": 21.106621868369555, + "learning_rate": 2.4118935142486975e-07, + "loss": 0.4439, + "step": 17901 + }, + { + "epoch": 2.8, + "grad_norm": 22.056615825907002, + "learning_rate": 2.408213444412499e-07, + "loss": 0.4741, + "step": 17902 + }, + { + "epoch": 2.8, + "grad_norm": 15.947655725701033, + "learning_rate": 2.404536150029624e-07, + "loss": 0.4008, + "step": 17903 + }, + { + "epoch": 2.8, + "grad_norm": 27.189831583301878, + "learning_rate": 2.4008616312046315e-07, + "loss": 0.4449, + "step": 17904 + }, + { + "epoch": 2.8, + "grad_norm": 18.155705264877064, + "learning_rate": 2.397189888042062e-07, + "loss": 0.4005, + "step": 17905 + }, + { + "epoch": 2.8, + "grad_norm": 24.218961599102407, + "learning_rate": 2.3935209206463195e-07, + "loss": 0.4187, + "step": 17906 + }, + { + "epoch": 2.8, + "grad_norm": 16.703775366458647, + "learning_rate": 2.389854729121721e-07, + "loss": 0.4173, + "step": 17907 + }, + { + "epoch": 2.8, + "grad_norm": 17.050152176026867, + "learning_rate": 2.3861913135725613e-07, + "loss": 0.378, + "step": 17908 + }, + { + "epoch": 2.8, + "grad_norm": 19.44977727219458, + "learning_rate": 2.382530674103023e-07, + "loss": 0.4788, + "step": 17909 + }, + { + "epoch": 2.8, + "grad_norm": 28.367481589992543, + "learning_rate": 2.3788728108172121e-07, + "loss": 0.4312, + "step": 17910 + }, + { + "epoch": 2.8, + "grad_norm": 20.97728748311856, + "learning_rate": 2.3752177238191455e-07, + "loss": 0.4681, + "step": 17911 + }, + { + "epoch": 2.8, + "grad_norm": 21.009056153800827, + "learning_rate": 2.371565413212773e-07, + "loss": 0.435, + "step": 17912 + }, + { + "epoch": 2.8, + "grad_norm": 22.329011798063423, + "learning_rate": 2.3679158791019673e-07, + "loss": 0.3975, + "step": 17913 + }, + { + "epoch": 2.8, + "grad_norm": 26.850000225587483, + "learning_rate": 2.3642691215905344e-07, + "loss": 0.4575, + "step": 17914 + }, + { + "epoch": 2.8, + "grad_norm": 25.12512308536575, + "learning_rate": 2.360625140782169e-07, + "loss": 0.4266, + "step": 17915 + }, + { + "epoch": 2.8, + "grad_norm": 27.387586907688945, + "learning_rate": 2.3569839367805103e-07, + "loss": 0.4544, + "step": 17916 + }, + { + "epoch": 2.8, + "grad_norm": 31.57983804281556, + "learning_rate": 2.3533455096890978e-07, + "loss": 0.4846, + "step": 17917 + }, + { + "epoch": 2.8, + "grad_norm": 24.851156143276796, + "learning_rate": 2.3497098596114265e-07, + "loss": 0.4673, + "step": 17918 + }, + { + "epoch": 2.8, + "grad_norm": 16.525718790305167, + "learning_rate": 2.3460769866508693e-07, + "loss": 0.4218, + "step": 17919 + }, + { + "epoch": 2.8, + "grad_norm": 18.95308114193766, + "learning_rate": 2.3424468909107656e-07, + "loss": 0.4226, + "step": 17920 + }, + { + "epoch": 2.8, + "grad_norm": 22.58590657467277, + "learning_rate": 2.338819572494355e-07, + "loss": 0.4016, + "step": 17921 + }, + { + "epoch": 2.8, + "grad_norm": 24.077446403818065, + "learning_rate": 2.3351950315047777e-07, + "loss": 0.4855, + "step": 17922 + }, + { + "epoch": 2.8, + "grad_norm": 34.780151764564344, + "learning_rate": 2.3315732680451175e-07, + "loss": 0.4779, + "step": 17923 + }, + { + "epoch": 2.8, + "grad_norm": 23.17808452343965, + "learning_rate": 2.3279542822183698e-07, + "loss": 0.4154, + "step": 17924 + }, + { + "epoch": 2.8, + "grad_norm": 27.08709453612449, + "learning_rate": 2.3243380741274745e-07, + "loss": 0.4062, + "step": 17925 + }, + { + "epoch": 2.8, + "grad_norm": 16.56946141756476, + "learning_rate": 2.3207246438752719e-07, + "loss": 0.3592, + "step": 17926 + }, + { + "epoch": 2.8, + "grad_norm": 30.321893269192223, + "learning_rate": 2.3171139915645235e-07, + "loss": 0.4537, + "step": 17927 + }, + { + "epoch": 2.8, + "grad_norm": 21.366456171662513, + "learning_rate": 2.3135061172979034e-07, + "loss": 0.468, + "step": 17928 + }, + { + "epoch": 2.8, + "grad_norm": 23.441128012555836, + "learning_rate": 2.3099010211780183e-07, + "loss": 0.4628, + "step": 17929 + }, + { + "epoch": 2.8, + "grad_norm": 26.74679484617763, + "learning_rate": 2.306298703307408e-07, + "loss": 0.4472, + "step": 17930 + }, + { + "epoch": 2.8, + "grad_norm": 27.431240642995363, + "learning_rate": 2.3026991637885242e-07, + "loss": 0.445, + "step": 17931 + }, + { + "epoch": 2.8, + "grad_norm": 27.38865373158325, + "learning_rate": 2.2991024027237075e-07, + "loss": 0.4536, + "step": 17932 + }, + { + "epoch": 2.8, + "grad_norm": 17.054190229574676, + "learning_rate": 2.2955084202152644e-07, + "loss": 0.3965, + "step": 17933 + }, + { + "epoch": 2.8, + "grad_norm": 19.367665986616785, + "learning_rate": 2.2919172163654134e-07, + "loss": 0.405, + "step": 17934 + }, + { + "epoch": 2.8, + "grad_norm": 15.21667680189889, + "learning_rate": 2.2883287912762842e-07, + "loss": 0.3957, + "step": 17935 + }, + { + "epoch": 2.8, + "grad_norm": 24.55229995509514, + "learning_rate": 2.2847431450499169e-07, + "loss": 0.4233, + "step": 17936 + }, + { + "epoch": 2.8, + "grad_norm": 31.149448112394793, + "learning_rate": 2.281160277788297e-07, + "loss": 0.4639, + "step": 17937 + }, + { + "epoch": 2.8, + "grad_norm": 24.998319068664873, + "learning_rate": 2.277580189593298e-07, + "loss": 0.4088, + "step": 17938 + }, + { + "epoch": 2.8, + "grad_norm": 17.619317761211487, + "learning_rate": 2.2740028805667725e-07, + "loss": 0.4614, + "step": 17939 + }, + { + "epoch": 2.8, + "grad_norm": 27.025920572975707, + "learning_rate": 2.2704283508104386e-07, + "loss": 0.4051, + "step": 17940 + }, + { + "epoch": 2.8, + "grad_norm": 26.720652305075667, + "learning_rate": 2.2668566004259486e-07, + "loss": 0.4812, + "step": 17941 + }, + { + "epoch": 2.8, + "grad_norm": 22.52308261524274, + "learning_rate": 2.2632876295148765e-07, + "loss": 0.392, + "step": 17942 + }, + { + "epoch": 2.8, + "grad_norm": 27.196821214205656, + "learning_rate": 2.2597214381787301e-07, + "loss": 0.4463, + "step": 17943 + }, + { + "epoch": 2.8, + "grad_norm": 22.770853842847917, + "learning_rate": 2.2561580265189397e-07, + "loss": 0.4212, + "step": 17944 + }, + { + "epoch": 2.8, + "grad_norm": 20.035732236463883, + "learning_rate": 2.2525973946368352e-07, + "loss": 0.4327, + "step": 17945 + }, + { + "epoch": 2.8, + "grad_norm": 27.25822230189805, + "learning_rate": 2.2490395426336687e-07, + "loss": 0.4215, + "step": 17946 + }, + { + "epoch": 2.8, + "grad_norm": 19.332611294960905, + "learning_rate": 2.2454844706106482e-07, + "loss": 0.4411, + "step": 17947 + }, + { + "epoch": 2.8, + "grad_norm": 25.437310616739577, + "learning_rate": 2.2419321786688485e-07, + "loss": 0.4239, + "step": 17948 + }, + { + "epoch": 2.8, + "grad_norm": 19.242596139103917, + "learning_rate": 2.2383826669093222e-07, + "loss": 0.4182, + "step": 17949 + }, + { + "epoch": 2.8, + "grad_norm": 31.37208079629567, + "learning_rate": 2.2348359354330106e-07, + "loss": 0.4515, + "step": 17950 + }, + { + "epoch": 2.8, + "grad_norm": 19.475944026074135, + "learning_rate": 2.2312919843407555e-07, + "loss": 0.4151, + "step": 17951 + }, + { + "epoch": 2.8, + "grad_norm": 23.45641486178327, + "learning_rate": 2.227750813733376e-07, + "loss": 0.4315, + "step": 17952 + }, + { + "epoch": 2.8, + "grad_norm": 18.957619987855367, + "learning_rate": 2.2242124237115693e-07, + "loss": 0.4343, + "step": 17953 + }, + { + "epoch": 2.8, + "grad_norm": 24.50887749060113, + "learning_rate": 2.220676814375955e-07, + "loss": 0.4303, + "step": 17954 + }, + { + "epoch": 2.8, + "grad_norm": 18.942143100024747, + "learning_rate": 2.217143985827097e-07, + "loss": 0.4136, + "step": 17955 + }, + { + "epoch": 2.8, + "grad_norm": 33.555725354274415, + "learning_rate": 2.2136139381654708e-07, + "loss": 0.4696, + "step": 17956 + }, + { + "epoch": 2.8, + "grad_norm": 20.660537463771654, + "learning_rate": 2.2100866714914514e-07, + "loss": 0.3986, + "step": 17957 + }, + { + "epoch": 2.81, + "grad_norm": 25.221334921705278, + "learning_rate": 2.2065621859053587e-07, + "loss": 0.3785, + "step": 17958 + }, + { + "epoch": 2.81, + "grad_norm": 19.54857315566461, + "learning_rate": 2.2030404815074346e-07, + "loss": 0.4516, + "step": 17959 + }, + { + "epoch": 2.81, + "grad_norm": 19.719385980183056, + "learning_rate": 2.1995215583978435e-07, + "loss": 0.4367, + "step": 17960 + }, + { + "epoch": 2.81, + "grad_norm": 18.032189383986218, + "learning_rate": 2.1960054166766386e-07, + "loss": 0.4147, + "step": 17961 + }, + { + "epoch": 2.81, + "grad_norm": 19.170908254158707, + "learning_rate": 2.1924920564438178e-07, + "loss": 0.3889, + "step": 17962 + }, + { + "epoch": 2.81, + "grad_norm": 13.646906441494428, + "learning_rate": 2.1889814777993013e-07, + "loss": 0.394, + "step": 17963 + }, + { + "epoch": 2.81, + "grad_norm": 20.925506812705247, + "learning_rate": 2.1854736808429533e-07, + "loss": 0.4245, + "step": 17964 + }, + { + "epoch": 2.81, + "grad_norm": 21.66466775180062, + "learning_rate": 2.1819686656745053e-07, + "loss": 0.3924, + "step": 17965 + }, + { + "epoch": 2.81, + "grad_norm": 19.6367657252809, + "learning_rate": 2.1784664323936445e-07, + "loss": 0.4043, + "step": 17966 + }, + { + "epoch": 2.81, + "grad_norm": 25.429948439189545, + "learning_rate": 2.1749669810999686e-07, + "loss": 0.4044, + "step": 17967 + }, + { + "epoch": 2.81, + "grad_norm": 34.47296548072508, + "learning_rate": 2.1714703118930203e-07, + "loss": 0.4403, + "step": 17968 + }, + { + "epoch": 2.81, + "grad_norm": 25.426145397725204, + "learning_rate": 2.16797642487222e-07, + "loss": 0.4375, + "step": 17969 + }, + { + "epoch": 2.81, + "grad_norm": 21.97156176744616, + "learning_rate": 2.1644853201369444e-07, + "loss": 0.4265, + "step": 17970 + }, + { + "epoch": 2.81, + "grad_norm": 20.83134378700535, + "learning_rate": 2.1609969977864687e-07, + "loss": 0.4478, + "step": 17971 + }, + { + "epoch": 2.81, + "grad_norm": 21.90028893533822, + "learning_rate": 2.157511457920014e-07, + "loss": 0.3927, + "step": 17972 + }, + { + "epoch": 2.81, + "grad_norm": 15.405559098728263, + "learning_rate": 2.1540287006366788e-07, + "loss": 0.3829, + "step": 17973 + }, + { + "epoch": 2.81, + "grad_norm": 17.163114747719167, + "learning_rate": 2.1505487260355396e-07, + "loss": 0.346, + "step": 17974 + }, + { + "epoch": 2.81, + "grad_norm": 30.13482991280381, + "learning_rate": 2.147071534215561e-07, + "loss": 0.4346, + "step": 17975 + }, + { + "epoch": 2.81, + "grad_norm": 20.43033444490758, + "learning_rate": 2.1435971252756093e-07, + "loss": 0.4628, + "step": 17976 + }, + { + "epoch": 2.81, + "grad_norm": 18.91660346603199, + "learning_rate": 2.140125499314527e-07, + "loss": 0.4146, + "step": 17977 + }, + { + "epoch": 2.81, + "grad_norm": 19.263103447713288, + "learning_rate": 2.1366566564310244e-07, + "loss": 0.4877, + "step": 17978 + }, + { + "epoch": 2.81, + "grad_norm": 22.674337070683304, + "learning_rate": 2.1331905967237553e-07, + "loss": 0.4124, + "step": 17979 + }, + { + "epoch": 2.81, + "grad_norm": 25.106870838529687, + "learning_rate": 2.1297273202912972e-07, + "loss": 0.4446, + "step": 17980 + }, + { + "epoch": 2.81, + "grad_norm": 31.21133850374774, + "learning_rate": 2.1262668272321486e-07, + "loss": 0.4912, + "step": 17981 + }, + { + "epoch": 2.81, + "grad_norm": 22.936691970261247, + "learning_rate": 2.1228091176447086e-07, + "loss": 0.4554, + "step": 17982 + }, + { + "epoch": 2.81, + "grad_norm": 29.217553275381192, + "learning_rate": 2.1193541916273097e-07, + "loss": 0.474, + "step": 17983 + }, + { + "epoch": 2.81, + "grad_norm": 30.885164960164687, + "learning_rate": 2.11590204927824e-07, + "loss": 0.4252, + "step": 17984 + }, + { + "epoch": 2.81, + "grad_norm": 18.9163463589218, + "learning_rate": 2.1124526906956545e-07, + "loss": 0.4332, + "step": 17985 + }, + { + "epoch": 2.81, + "grad_norm": 19.514917725054374, + "learning_rate": 2.109006115977641e-07, + "loss": 0.4102, + "step": 17986 + }, + { + "epoch": 2.81, + "grad_norm": 30.945164940878776, + "learning_rate": 2.1055623252222211e-07, + "loss": 0.4305, + "step": 17987 + }, + { + "epoch": 2.81, + "grad_norm": 14.402236483872583, + "learning_rate": 2.1021213185273504e-07, + "loss": 0.3995, + "step": 17988 + }, + { + "epoch": 2.81, + "grad_norm": 19.70476495208655, + "learning_rate": 2.0986830959908943e-07, + "loss": 0.4086, + "step": 17989 + }, + { + "epoch": 2.81, + "grad_norm": 16.44522549748909, + "learning_rate": 2.0952476577106085e-07, + "loss": 0.3842, + "step": 17990 + }, + { + "epoch": 2.81, + "grad_norm": 21.175598975506993, + "learning_rate": 2.0918150037842032e-07, + "loss": 0.3839, + "step": 17991 + }, + { + "epoch": 2.81, + "grad_norm": 18.379939230450287, + "learning_rate": 2.088385134309312e-07, + "loss": 0.4379, + "step": 17992 + }, + { + "epoch": 2.81, + "grad_norm": 25.57784531265054, + "learning_rate": 2.0849580493834675e-07, + "loss": 0.4463, + "step": 17993 + }, + { + "epoch": 2.81, + "grad_norm": 26.167006690036793, + "learning_rate": 2.0815337491041476e-07, + "loss": 0.4224, + "step": 17994 + }, + { + "epoch": 2.81, + "grad_norm": 27.843951997334646, + "learning_rate": 2.0781122335687075e-07, + "loss": 0.4467, + "step": 17995 + }, + { + "epoch": 2.81, + "grad_norm": 17.981891019743163, + "learning_rate": 2.0746935028744808e-07, + "loss": 0.3986, + "step": 17996 + }, + { + "epoch": 2.81, + "grad_norm": 26.026015872279658, + "learning_rate": 2.0712775571186895e-07, + "loss": 0.4376, + "step": 17997 + }, + { + "epoch": 2.81, + "grad_norm": 17.838699151246583, + "learning_rate": 2.067864396398478e-07, + "loss": 0.3984, + "step": 17998 + }, + { + "epoch": 2.81, + "grad_norm": 30.99544139480847, + "learning_rate": 2.0644540208109133e-07, + "loss": 0.4548, + "step": 17999 + }, + { + "epoch": 2.81, + "grad_norm": 27.062223881244527, + "learning_rate": 2.0610464304529843e-07, + "loss": 0.4533, + "step": 18000 + }, + { + "epoch": 2.81, + "grad_norm": 17.443422817345073, + "learning_rate": 2.0576416254216024e-07, + "loss": 0.4411, + "step": 18001 + }, + { + "epoch": 2.81, + "grad_norm": 24.47054582219941, + "learning_rate": 2.0542396058136015e-07, + "loss": 0.462, + "step": 18002 + }, + { + "epoch": 2.81, + "grad_norm": 23.04319876947105, + "learning_rate": 2.0508403717257263e-07, + "loss": 0.4435, + "step": 18003 + }, + { + "epoch": 2.81, + "grad_norm": 25.397809130271202, + "learning_rate": 2.047443923254655e-07, + "loss": 0.3962, + "step": 18004 + }, + { + "epoch": 2.81, + "grad_norm": 13.505013511246501, + "learning_rate": 2.0440502604969659e-07, + "loss": 0.3772, + "step": 18005 + }, + { + "epoch": 2.81, + "grad_norm": 13.704697973212328, + "learning_rate": 2.040659383549204e-07, + "loss": 0.4494, + "step": 18006 + }, + { + "epoch": 2.81, + "grad_norm": 30.39206411195344, + "learning_rate": 2.0372712925077697e-07, + "loss": 0.4095, + "step": 18007 + }, + { + "epoch": 2.81, + "grad_norm": 26.479218452469503, + "learning_rate": 2.0338859874690308e-07, + "loss": 0.4731, + "step": 18008 + }, + { + "epoch": 2.81, + "grad_norm": 36.94376860534872, + "learning_rate": 2.030503468529288e-07, + "loss": 0.4564, + "step": 18009 + }, + { + "epoch": 2.81, + "grad_norm": 16.20613844833497, + "learning_rate": 2.0271237357847085e-07, + "loss": 0.3896, + "step": 18010 + }, + { + "epoch": 2.81, + "grad_norm": 25.56121906787919, + "learning_rate": 2.0237467893314044e-07, + "loss": 0.423, + "step": 18011 + }, + { + "epoch": 2.81, + "grad_norm": 18.851176906520124, + "learning_rate": 2.0203726292654324e-07, + "loss": 0.4429, + "step": 18012 + }, + { + "epoch": 2.81, + "grad_norm": 17.81612367579466, + "learning_rate": 2.0170012556827379e-07, + "loss": 0.4194, + "step": 18013 + }, + { + "epoch": 2.81, + "grad_norm": 21.072584658421118, + "learning_rate": 2.0136326686792217e-07, + "loss": 0.4459, + "step": 18014 + }, + { + "epoch": 2.81, + "grad_norm": 18.352500836323383, + "learning_rate": 2.0102668683506743e-07, + "loss": 0.4194, + "step": 18015 + }, + { + "epoch": 2.81, + "grad_norm": 29.785433168218354, + "learning_rate": 2.0069038547928078e-07, + "loss": 0.4289, + "step": 18016 + }, + { + "epoch": 2.81, + "grad_norm": 27.201255488696873, + "learning_rate": 2.0035436281012678e-07, + "loss": 0.4176, + "step": 18017 + }, + { + "epoch": 2.81, + "grad_norm": 27.335751808431972, + "learning_rate": 2.0001861883716335e-07, + "loss": 0.512, + "step": 18018 + }, + { + "epoch": 2.81, + "grad_norm": 17.189362042739145, + "learning_rate": 1.996831535699373e-07, + "loss": 0.4082, + "step": 18019 + }, + { + "epoch": 2.81, + "grad_norm": 24.687079651791045, + "learning_rate": 1.9934796701798876e-07, + "loss": 0.4497, + "step": 18020 + }, + { + "epoch": 2.81, + "grad_norm": 28.828919896484148, + "learning_rate": 1.9901305919085123e-07, + "loss": 0.4248, + "step": 18021 + }, + { + "epoch": 2.82, + "grad_norm": 27.216542038342144, + "learning_rate": 1.9867843009804822e-07, + "loss": 0.4025, + "step": 18022 + }, + { + "epoch": 2.82, + "grad_norm": 26.15781855807853, + "learning_rate": 1.9834407974909764e-07, + "loss": 0.3836, + "step": 18023 + }, + { + "epoch": 2.82, + "grad_norm": 25.04948801610312, + "learning_rate": 1.9801000815350745e-07, + "loss": 0.4651, + "step": 18024 + }, + { + "epoch": 2.82, + "grad_norm": 16.298808560093953, + "learning_rate": 1.9767621532077896e-07, + "loss": 0.4397, + "step": 18025 + }, + { + "epoch": 2.82, + "grad_norm": 25.37913525058086, + "learning_rate": 1.9734270126040344e-07, + "loss": 0.432, + "step": 18026 + }, + { + "epoch": 2.82, + "grad_norm": 18.940686963333558, + "learning_rate": 1.9700946598186887e-07, + "loss": 0.4241, + "step": 18027 + }, + { + "epoch": 2.82, + "grad_norm": 25.604745911987667, + "learning_rate": 1.966765094946499e-07, + "loss": 0.4077, + "step": 18028 + }, + { + "epoch": 2.82, + "grad_norm": 28.332987679916233, + "learning_rate": 1.963438318082156e-07, + "loss": 0.4453, + "step": 18029 + }, + { + "epoch": 2.82, + "grad_norm": 16.15408249229552, + "learning_rate": 1.960114329320284e-07, + "loss": 0.4226, + "step": 18030 + }, + { + "epoch": 2.82, + "grad_norm": 21.414135281127027, + "learning_rate": 1.9567931287554075e-07, + "loss": 0.444, + "step": 18031 + }, + { + "epoch": 2.82, + "grad_norm": 21.12404062488127, + "learning_rate": 1.9534747164819733e-07, + "loss": 0.4192, + "step": 18032 + }, + { + "epoch": 2.82, + "grad_norm": 26.36948730159487, + "learning_rate": 1.9501590925943726e-07, + "loss": 0.4181, + "step": 18033 + }, + { + "epoch": 2.82, + "grad_norm": 29.864448556483914, + "learning_rate": 1.9468462571868852e-07, + "loss": 0.4646, + "step": 18034 + }, + { + "epoch": 2.82, + "grad_norm": 20.638843784390666, + "learning_rate": 1.9435362103537357e-07, + "loss": 0.3883, + "step": 18035 + }, + { + "epoch": 2.82, + "grad_norm": 21.180132010990018, + "learning_rate": 1.9402289521890495e-07, + "loss": 0.4655, + "step": 18036 + }, + { + "epoch": 2.82, + "grad_norm": 19.873556200103817, + "learning_rate": 1.936924482786884e-07, + "loss": 0.4052, + "step": 18037 + }, + { + "epoch": 2.82, + "grad_norm": 24.944717047681387, + "learning_rate": 1.9336228022412306e-07, + "loss": 0.4075, + "step": 18038 + }, + { + "epoch": 2.82, + "grad_norm": 15.459358404554317, + "learning_rate": 1.9303239106459703e-07, + "loss": 0.4124, + "step": 18039 + }, + { + "epoch": 2.82, + "grad_norm": 21.725490016865255, + "learning_rate": 1.927027808094939e-07, + "loss": 0.402, + "step": 18040 + }, + { + "epoch": 2.82, + "grad_norm": 17.46549335330472, + "learning_rate": 1.9237344946818614e-07, + "loss": 0.3899, + "step": 18041 + }, + { + "epoch": 2.82, + "grad_norm": 21.593249390398487, + "learning_rate": 1.9204439705003964e-07, + "loss": 0.4879, + "step": 18042 + }, + { + "epoch": 2.82, + "grad_norm": 21.53584317932088, + "learning_rate": 1.9171562356441353e-07, + "loss": 0.4065, + "step": 18043 + }, + { + "epoch": 2.82, + "grad_norm": 18.074040117900097, + "learning_rate": 1.9138712902065703e-07, + "loss": 0.4057, + "step": 18044 + }, + { + "epoch": 2.82, + "grad_norm": 23.156110949105237, + "learning_rate": 1.9105891342811268e-07, + "loss": 0.4093, + "step": 18045 + }, + { + "epoch": 2.82, + "grad_norm": 17.38387574962378, + "learning_rate": 1.9073097679611518e-07, + "loss": 0.4512, + "step": 18046 + }, + { + "epoch": 2.82, + "grad_norm": 23.113693351611204, + "learning_rate": 1.9040331913398934e-07, + "loss": 0.4384, + "step": 18047 + }, + { + "epoch": 2.82, + "grad_norm": 19.82495614169634, + "learning_rate": 1.900759404510577e-07, + "loss": 0.4256, + "step": 18048 + }, + { + "epoch": 2.82, + "grad_norm": 20.005652331541793, + "learning_rate": 1.8974884075662503e-07, + "loss": 0.4273, + "step": 18049 + }, + { + "epoch": 2.82, + "grad_norm": 13.767564499450945, + "learning_rate": 1.894220200599961e-07, + "loss": 0.4957, + "step": 18050 + }, + { + "epoch": 2.82, + "grad_norm": 24.785842358146194, + "learning_rate": 1.8909547837046683e-07, + "loss": 0.3231, + "step": 18051 + }, + { + "epoch": 2.82, + "grad_norm": 19.597745308570083, + "learning_rate": 1.887692156973231e-07, + "loss": 0.457, + "step": 18052 + }, + { + "epoch": 2.82, + "grad_norm": 32.00777855411114, + "learning_rate": 1.8844323204984417e-07, + "loss": 0.4225, + "step": 18053 + }, + { + "epoch": 2.82, + "grad_norm": 26.357248174116904, + "learning_rate": 1.8811752743729817e-07, + "loss": 0.4255, + "step": 18054 + }, + { + "epoch": 2.82, + "grad_norm": 24.549965623221635, + "learning_rate": 1.8779210186895102e-07, + "loss": 0.4693, + "step": 18055 + }, + { + "epoch": 2.82, + "grad_norm": 22.033894527946728, + "learning_rate": 1.8746695535405645e-07, + "loss": 0.4267, + "step": 18056 + }, + { + "epoch": 2.82, + "grad_norm": 26.834830938454907, + "learning_rate": 1.871420879018615e-07, + "loss": 0.4254, + "step": 18057 + }, + { + "epoch": 2.82, + "grad_norm": 20.04780078453937, + "learning_rate": 1.8681749952160544e-07, + "loss": 0.4107, + "step": 18058 + }, + { + "epoch": 2.82, + "grad_norm": 20.944762309132198, + "learning_rate": 1.8649319022251865e-07, + "loss": 0.4351, + "step": 18059 + }, + { + "epoch": 2.82, + "grad_norm": 27.39598569404578, + "learning_rate": 1.8616916001382602e-07, + "loss": 0.4731, + "step": 18060 + }, + { + "epoch": 2.82, + "grad_norm": 18.46669486757505, + "learning_rate": 1.8584540890474012e-07, + "loss": 0.329, + "step": 18061 + }, + { + "epoch": 2.82, + "grad_norm": 25.708401359306325, + "learning_rate": 1.855219369044703e-07, + "loss": 0.4504, + "step": 18062 + }, + { + "epoch": 2.82, + "grad_norm": 28.80060555401762, + "learning_rate": 1.8519874402221472e-07, + "loss": 0.4368, + "step": 18063 + }, + { + "epoch": 2.82, + "grad_norm": 18.55700747070408, + "learning_rate": 1.8487583026716604e-07, + "loss": 0.4529, + "step": 18064 + }, + { + "epoch": 2.82, + "grad_norm": 21.17969897434173, + "learning_rate": 1.8455319564850694e-07, + "loss": 0.3771, + "step": 18065 + }, + { + "epoch": 2.82, + "grad_norm": 15.298909719272224, + "learning_rate": 1.8423084017541227e-07, + "loss": 0.4362, + "step": 18066 + }, + { + "epoch": 2.82, + "grad_norm": 25.20105157117016, + "learning_rate": 1.8390876385705136e-07, + "loss": 0.4054, + "step": 18067 + }, + { + "epoch": 2.82, + "grad_norm": 25.69374549427703, + "learning_rate": 1.8358696670258357e-07, + "loss": 0.4537, + "step": 18068 + }, + { + "epoch": 2.82, + "grad_norm": 18.454268990957402, + "learning_rate": 1.8326544872115936e-07, + "loss": 0.4051, + "step": 18069 + }, + { + "epoch": 2.82, + "grad_norm": 28.01332041744621, + "learning_rate": 1.829442099219225e-07, + "loss": 0.466, + "step": 18070 + }, + { + "epoch": 2.82, + "grad_norm": 28.520798793850297, + "learning_rate": 1.826232503140102e-07, + "loss": 0.4633, + "step": 18071 + }, + { + "epoch": 2.82, + "grad_norm": 19.453714394736885, + "learning_rate": 1.823025699065495e-07, + "loss": 0.4518, + "step": 18072 + }, + { + "epoch": 2.82, + "grad_norm": 22.71209743885157, + "learning_rate": 1.8198216870866093e-07, + "loss": 0.4082, + "step": 18073 + }, + { + "epoch": 2.82, + "grad_norm": 24.074791542603535, + "learning_rate": 1.8166204672945496e-07, + "loss": 0.5501, + "step": 18074 + }, + { + "epoch": 2.82, + "grad_norm": 24.001064209887243, + "learning_rate": 1.813422039780377e-07, + "loss": 0.3711, + "step": 18075 + }, + { + "epoch": 2.82, + "grad_norm": 22.77712935005126, + "learning_rate": 1.8102264046350404e-07, + "loss": 0.4231, + "step": 18076 + }, + { + "epoch": 2.82, + "grad_norm": 28.90221294152296, + "learning_rate": 1.8070335619494227e-07, + "loss": 0.4831, + "step": 18077 + }, + { + "epoch": 2.82, + "grad_norm": 24.483302189049436, + "learning_rate": 1.8038435118143405e-07, + "loss": 0.4072, + "step": 18078 + }, + { + "epoch": 2.82, + "grad_norm": 18.329837099582967, + "learning_rate": 1.8006562543204986e-07, + "loss": 0.4118, + "step": 18079 + }, + { + "epoch": 2.82, + "grad_norm": 25.900038995032247, + "learning_rate": 1.797471789558547e-07, + "loss": 0.4752, + "step": 18080 + }, + { + "epoch": 2.82, + "grad_norm": 21.903649596175914, + "learning_rate": 1.7942901176190574e-07, + "loss": 0.4263, + "step": 18081 + }, + { + "epoch": 2.82, + "grad_norm": 27.36413278391528, + "learning_rate": 1.7911112385925022e-07, + "loss": 0.4231, + "step": 18082 + }, + { + "epoch": 2.82, + "grad_norm": 15.371269739348032, + "learning_rate": 1.7879351525692978e-07, + "loss": 0.4034, + "step": 18083 + }, + { + "epoch": 2.82, + "grad_norm": 34.24500229798534, + "learning_rate": 1.7847618596397498e-07, + "loss": 0.4887, + "step": 18084 + }, + { + "epoch": 2.82, + "grad_norm": 18.28627134059287, + "learning_rate": 1.7815913598941303e-07, + "loss": 0.4218, + "step": 18085 + }, + { + "epoch": 2.83, + "grad_norm": 21.181616275644974, + "learning_rate": 1.7784236534226006e-07, + "loss": 0.4187, + "step": 18086 + }, + { + "epoch": 2.83, + "grad_norm": 20.870334229454777, + "learning_rate": 1.7752587403152444e-07, + "loss": 0.3943, + "step": 18087 + }, + { + "epoch": 2.83, + "grad_norm": 15.01649455262894, + "learning_rate": 1.7720966206620672e-07, + "loss": 0.41, + "step": 18088 + }, + { + "epoch": 2.83, + "grad_norm": 20.797464520004223, + "learning_rate": 1.768937294552997e-07, + "loss": 0.4171, + "step": 18089 + }, + { + "epoch": 2.83, + "grad_norm": 18.43060913708382, + "learning_rate": 1.7657807620778956e-07, + "loss": 0.4399, + "step": 18090 + }, + { + "epoch": 2.83, + "grad_norm": 19.210114085611043, + "learning_rate": 1.762627023326513e-07, + "loss": 0.4687, + "step": 18091 + }, + { + "epoch": 2.83, + "grad_norm": 28.032104896981732, + "learning_rate": 1.7594760783885557e-07, + "loss": 0.4251, + "step": 18092 + }, + { + "epoch": 2.83, + "grad_norm": 28.886526744480392, + "learning_rate": 1.7563279273536294e-07, + "loss": 0.4783, + "step": 18093 + }, + { + "epoch": 2.83, + "grad_norm": 18.713493722860782, + "learning_rate": 1.7531825703112736e-07, + "loss": 0.3652, + "step": 18094 + }, + { + "epoch": 2.83, + "grad_norm": 25.35630160621016, + "learning_rate": 1.7500400073509172e-07, + "loss": 0.3439, + "step": 18095 + }, + { + "epoch": 2.83, + "grad_norm": 46.848033351011836, + "learning_rate": 1.7469002385619548e-07, + "loss": 0.4755, + "step": 18096 + }, + { + "epoch": 2.83, + "grad_norm": 27.098689946154227, + "learning_rate": 1.7437632640336822e-07, + "loss": 0.475, + "step": 18097 + }, + { + "epoch": 2.83, + "grad_norm": 15.826411482122195, + "learning_rate": 1.7406290838552943e-07, + "loss": 0.4656, + "step": 18098 + }, + { + "epoch": 2.83, + "grad_norm": 21.272662359847125, + "learning_rate": 1.737497698115942e-07, + "loss": 0.4917, + "step": 18099 + }, + { + "epoch": 2.83, + "grad_norm": 23.69031228781328, + "learning_rate": 1.7343691069046654e-07, + "loss": 0.4148, + "step": 18100 + }, + { + "epoch": 2.83, + "grad_norm": 22.14307886994061, + "learning_rate": 1.7312433103104486e-07, + "loss": 0.4642, + "step": 18101 + }, + { + "epoch": 2.83, + "grad_norm": 15.091150067789329, + "learning_rate": 1.7281203084221876e-07, + "loss": 0.4693, + "step": 18102 + }, + { + "epoch": 2.83, + "grad_norm": 24.46290276015051, + "learning_rate": 1.725000101328711e-07, + "loss": 0.3895, + "step": 18103 + }, + { + "epoch": 2.83, + "grad_norm": 18.00665190591452, + "learning_rate": 1.7218826891187258e-07, + "loss": 0.3895, + "step": 18104 + }, + { + "epoch": 2.83, + "grad_norm": 21.228924223964846, + "learning_rate": 1.7187680718809053e-07, + "loss": 0.4256, + "step": 18105 + }, + { + "epoch": 2.83, + "grad_norm": 26.48815626534871, + "learning_rate": 1.7156562497038342e-07, + "loss": 0.4698, + "step": 18106 + }, + { + "epoch": 2.83, + "grad_norm": 17.5080570814964, + "learning_rate": 1.7125472226760087e-07, + "loss": 0.3958, + "step": 18107 + }, + { + "epoch": 2.83, + "grad_norm": 21.23897477723079, + "learning_rate": 1.7094409908858357e-07, + "loss": 0.4444, + "step": 18108 + }, + { + "epoch": 2.83, + "grad_norm": 25.72896190740729, + "learning_rate": 1.7063375544216666e-07, + "loss": 0.4255, + "step": 18109 + }, + { + "epoch": 2.83, + "grad_norm": 23.006760234765697, + "learning_rate": 1.703236913371753e-07, + "loss": 0.43, + "step": 18110 + }, + { + "epoch": 2.83, + "grad_norm": 25.394987076413656, + "learning_rate": 1.7001390678242803e-07, + "loss": 0.4356, + "step": 18111 + }, + { + "epoch": 2.83, + "grad_norm": 19.898448192981473, + "learning_rate": 1.6970440178673552e-07, + "loss": 0.4079, + "step": 18112 + }, + { + "epoch": 2.83, + "grad_norm": 15.822246959766153, + "learning_rate": 1.6939517635889857e-07, + "loss": 0.3617, + "step": 18113 + }, + { + "epoch": 2.83, + "grad_norm": 15.644290886983896, + "learning_rate": 1.6908623050771234e-07, + "loss": 0.4395, + "step": 18114 + }, + { + "epoch": 2.83, + "grad_norm": 25.721337680432228, + "learning_rate": 1.6877756424196312e-07, + "loss": 0.4423, + "step": 18115 + }, + { + "epoch": 2.83, + "grad_norm": 29.035468263154407, + "learning_rate": 1.6846917757042836e-07, + "loss": 0.4062, + "step": 18116 + }, + { + "epoch": 2.83, + "grad_norm": 25.028969375069227, + "learning_rate": 1.6816107050187992e-07, + "loss": 0.4754, + "step": 18117 + }, + { + "epoch": 2.83, + "grad_norm": 22.97966827731043, + "learning_rate": 1.6785324304507745e-07, + "loss": 0.4466, + "step": 18118 + }, + { + "epoch": 2.83, + "grad_norm": 17.103186958032786, + "learning_rate": 1.675456952087795e-07, + "loss": 0.3774, + "step": 18119 + }, + { + "epoch": 2.83, + "grad_norm": 28.04438139793631, + "learning_rate": 1.67238427001728e-07, + "loss": 0.4336, + "step": 18120 + }, + { + "epoch": 2.83, + "grad_norm": 17.868874365569596, + "learning_rate": 1.669314384326648e-07, + "loss": 0.4362, + "step": 18121 + }, + { + "epoch": 2.83, + "grad_norm": 20.185261840727215, + "learning_rate": 1.666247295103185e-07, + "loss": 0.4189, + "step": 18122 + }, + { + "epoch": 2.83, + "grad_norm": 22.618388579897058, + "learning_rate": 1.6631830024341323e-07, + "loss": 0.4924, + "step": 18123 + }, + { + "epoch": 2.83, + "grad_norm": 36.765768636932776, + "learning_rate": 1.66012150640662e-07, + "loss": 0.4616, + "step": 18124 + }, + { + "epoch": 2.83, + "grad_norm": 14.82754277894135, + "learning_rate": 1.6570628071077343e-07, + "loss": 0.3961, + "step": 18125 + }, + { + "epoch": 2.83, + "grad_norm": 20.195369655447088, + "learning_rate": 1.65400690462445e-07, + "loss": 0.4697, + "step": 18126 + }, + { + "epoch": 2.83, + "grad_norm": 20.645068167612955, + "learning_rate": 1.6509537990436753e-07, + "loss": 0.4393, + "step": 18127 + }, + { + "epoch": 2.83, + "grad_norm": 19.288813188068776, + "learning_rate": 1.6479034904522518e-07, + "loss": 0.4724, + "step": 18128 + }, + { + "epoch": 2.83, + "grad_norm": 15.54076714896541, + "learning_rate": 1.6448559789369102e-07, + "loss": 0.4038, + "step": 18129 + }, + { + "epoch": 2.83, + "grad_norm": 18.671157344290176, + "learning_rate": 1.6418112645843253e-07, + "loss": 0.3625, + "step": 18130 + }, + { + "epoch": 2.83, + "grad_norm": 15.851430332127304, + "learning_rate": 1.638769347481095e-07, + "loss": 0.3647, + "step": 18131 + }, + { + "epoch": 2.83, + "grad_norm": 20.464256768237657, + "learning_rate": 1.6357302277137388e-07, + "loss": 0.4647, + "step": 18132 + }, + { + "epoch": 2.83, + "grad_norm": 25.426634853449738, + "learning_rate": 1.632693905368654e-07, + "loss": 0.4356, + "step": 18133 + }, + { + "epoch": 2.83, + "grad_norm": 28.152442060722105, + "learning_rate": 1.6296603805322163e-07, + "loss": 0.4448, + "step": 18134 + }, + { + "epoch": 2.83, + "grad_norm": 18.292625044420976, + "learning_rate": 1.62662965329069e-07, + "loss": 0.4173, + "step": 18135 + }, + { + "epoch": 2.83, + "grad_norm": 16.925419858819957, + "learning_rate": 1.6236017237302838e-07, + "loss": 0.4472, + "step": 18136 + }, + { + "epoch": 2.83, + "grad_norm": 16.481163865588723, + "learning_rate": 1.6205765919370843e-07, + "loss": 0.4739, + "step": 18137 + }, + { + "epoch": 2.83, + "grad_norm": 16.99978433938535, + "learning_rate": 1.6175542579971336e-07, + "loss": 0.4241, + "step": 18138 + }, + { + "epoch": 2.83, + "grad_norm": 17.73395309030394, + "learning_rate": 1.6145347219963857e-07, + "loss": 0.362, + "step": 18139 + }, + { + "epoch": 2.83, + "grad_norm": 22.04112731607826, + "learning_rate": 1.6115179840207274e-07, + "loss": 0.4428, + "step": 18140 + }, + { + "epoch": 2.83, + "grad_norm": 20.14290410309478, + "learning_rate": 1.608504044155934e-07, + "loss": 0.4003, + "step": 18141 + }, + { + "epoch": 2.83, + "grad_norm": 16.596535828683223, + "learning_rate": 1.6054929024877375e-07, + "loss": 0.3913, + "step": 18142 + }, + { + "epoch": 2.83, + "grad_norm": 27.040931804578047, + "learning_rate": 1.6024845591017468e-07, + "loss": 0.4133, + "step": 18143 + }, + { + "epoch": 2.83, + "grad_norm": 29.86997905191291, + "learning_rate": 1.599479014083549e-07, + "loss": 0.3703, + "step": 18144 + }, + { + "epoch": 2.83, + "grad_norm": 23.98163823595663, + "learning_rate": 1.5964762675185984e-07, + "loss": 0.4183, + "step": 18145 + }, + { + "epoch": 2.83, + "grad_norm": 34.122474402284546, + "learning_rate": 1.5934763194923043e-07, + "loss": 0.4737, + "step": 18146 + }, + { + "epoch": 2.83, + "grad_norm": 16.175138456593668, + "learning_rate": 1.5904791700899758e-07, + "loss": 0.4222, + "step": 18147 + }, + { + "epoch": 2.83, + "grad_norm": 18.617639431288524, + "learning_rate": 1.587484819396856e-07, + "loss": 0.3616, + "step": 18148 + }, + { + "epoch": 2.83, + "grad_norm": 21.52473055491459, + "learning_rate": 1.5844932674980885e-07, + "loss": 0.4336, + "step": 18149 + }, + { + "epoch": 2.84, + "grad_norm": 30.533524061043977, + "learning_rate": 1.58150451447876e-07, + "loss": 0.3729, + "step": 18150 + }, + { + "epoch": 2.84, + "grad_norm": 22.754961925460908, + "learning_rate": 1.5785185604238805e-07, + "loss": 0.4209, + "step": 18151 + }, + { + "epoch": 2.84, + "grad_norm": 21.098454730627093, + "learning_rate": 1.5755354054183491e-07, + "loss": 0.3867, + "step": 18152 + }, + { + "epoch": 2.84, + "grad_norm": 29.90687654450791, + "learning_rate": 1.57255504954702e-07, + "loss": 0.4125, + "step": 18153 + }, + { + "epoch": 2.84, + "grad_norm": 18.95612312828468, + "learning_rate": 1.5695774928946473e-07, + "loss": 0.461, + "step": 18154 + }, + { + "epoch": 2.84, + "grad_norm": 22.89656441354108, + "learning_rate": 1.5666027355458967e-07, + "loss": 0.417, + "step": 18155 + }, + { + "epoch": 2.84, + "grad_norm": 25.8180123070717, + "learning_rate": 1.563630777585401e-07, + "loss": 0.3954, + "step": 18156 + }, + { + "epoch": 2.84, + "grad_norm": 31.728573400762432, + "learning_rate": 1.5606616190976587e-07, + "loss": 0.3966, + "step": 18157 + }, + { + "epoch": 2.84, + "grad_norm": 23.59168916790271, + "learning_rate": 1.5576952601671135e-07, + "loss": 0.4001, + "step": 18158 + }, + { + "epoch": 2.84, + "grad_norm": 20.335210898568214, + "learning_rate": 1.5547317008781205e-07, + "loss": 0.4492, + "step": 18159 + }, + { + "epoch": 2.84, + "grad_norm": 32.75339082643325, + "learning_rate": 1.5517709413149674e-07, + "loss": 0.4202, + "step": 18160 + }, + { + "epoch": 2.84, + "grad_norm": 34.61698646202934, + "learning_rate": 1.5488129815618757e-07, + "loss": 0.4497, + "step": 18161 + }, + { + "epoch": 2.84, + "grad_norm": 16.712190585082542, + "learning_rate": 1.5458578217029342e-07, + "loss": 0.4043, + "step": 18162 + }, + { + "epoch": 2.84, + "grad_norm": 26.23191589203769, + "learning_rate": 1.5429054618222083e-07, + "loss": 0.4396, + "step": 18163 + }, + { + "epoch": 2.84, + "grad_norm": 26.617654282955858, + "learning_rate": 1.5399559020036537e-07, + "loss": 0.4442, + "step": 18164 + }, + { + "epoch": 2.84, + "grad_norm": 24.92596551157176, + "learning_rate": 1.5370091423311584e-07, + "loss": 0.4267, + "step": 18165 + }, + { + "epoch": 2.84, + "grad_norm": 20.889899187026224, + "learning_rate": 1.5340651828885223e-07, + "loss": 0.4176, + "step": 18166 + }, + { + "epoch": 2.84, + "grad_norm": 21.74141980706441, + "learning_rate": 1.531124023759467e-07, + "loss": 0.4263, + "step": 18167 + }, + { + "epoch": 2.84, + "grad_norm": 18.603786962738102, + "learning_rate": 1.5281856650276482e-07, + "loss": 0.4044, + "step": 18168 + }, + { + "epoch": 2.84, + "grad_norm": 26.053305121127813, + "learning_rate": 1.5252501067766324e-07, + "loss": 0.4292, + "step": 18169 + }, + { + "epoch": 2.84, + "grad_norm": 13.951804333518846, + "learning_rate": 1.5223173490898856e-07, + "loss": 0.4112, + "step": 18170 + }, + { + "epoch": 2.84, + "grad_norm": 35.875249590985746, + "learning_rate": 1.5193873920508306e-07, + "loss": 0.5211, + "step": 18171 + }, + { + "epoch": 2.84, + "grad_norm": 15.136251961678564, + "learning_rate": 1.5164602357427781e-07, + "loss": 0.4034, + "step": 18172 + }, + { + "epoch": 2.84, + "grad_norm": 19.237586526090553, + "learning_rate": 1.5135358802489952e-07, + "loss": 0.4031, + "step": 18173 + }, + { + "epoch": 2.84, + "grad_norm": 20.087554054982746, + "learning_rate": 1.5106143256526374e-07, + "loss": 0.424, + "step": 18174 + }, + { + "epoch": 2.84, + "grad_norm": 31.098079034513844, + "learning_rate": 1.5076955720367937e-07, + "loss": 0.4257, + "step": 18175 + }, + { + "epoch": 2.84, + "grad_norm": 26.387130861713654, + "learning_rate": 1.5047796194844754e-07, + "loss": 0.4193, + "step": 18176 + }, + { + "epoch": 2.84, + "grad_norm": 17.34765911209643, + "learning_rate": 1.5018664680785943e-07, + "loss": 0.4429, + "step": 18177 + }, + { + "epoch": 2.84, + "grad_norm": 27.522948187908025, + "learning_rate": 1.4989561179020283e-07, + "loss": 0.5117, + "step": 18178 + }, + { + "epoch": 2.84, + "grad_norm": 23.37303788000141, + "learning_rate": 1.4960485690375115e-07, + "loss": 0.4123, + "step": 18179 + }, + { + "epoch": 2.84, + "grad_norm": 46.58080469463978, + "learning_rate": 1.4931438215677663e-07, + "loss": 0.4591, + "step": 18180 + }, + { + "epoch": 2.84, + "grad_norm": 24.04519566527334, + "learning_rate": 1.490241875575371e-07, + "loss": 0.398, + "step": 18181 + }, + { + "epoch": 2.84, + "grad_norm": 20.34164301483965, + "learning_rate": 1.4873427311428822e-07, + "loss": 0.3969, + "step": 18182 + }, + { + "epoch": 2.84, + "grad_norm": 15.675626770634809, + "learning_rate": 1.484446388352734e-07, + "loss": 0.4051, + "step": 18183 + }, + { + "epoch": 2.84, + "grad_norm": 17.31448853000177, + "learning_rate": 1.481552847287293e-07, + "loss": 0.4227, + "step": 18184 + }, + { + "epoch": 2.84, + "grad_norm": 21.04627522730495, + "learning_rate": 1.478662108028872e-07, + "loss": 0.4129, + "step": 18185 + }, + { + "epoch": 2.84, + "grad_norm": 33.439381932838494, + "learning_rate": 1.4757741706596605e-07, + "loss": 0.4465, + "step": 18186 + }, + { + "epoch": 2.84, + "grad_norm": 28.171230519307425, + "learning_rate": 1.472889035261793e-07, + "loss": 0.435, + "step": 18187 + }, + { + "epoch": 2.84, + "grad_norm": 18.740528923769265, + "learning_rate": 1.4700067019173258e-07, + "loss": 0.3903, + "step": 18188 + }, + { + "epoch": 2.84, + "grad_norm": 16.00891777270265, + "learning_rate": 1.4671271707082268e-07, + "loss": 0.452, + "step": 18189 + }, + { + "epoch": 2.84, + "grad_norm": 43.32758584806517, + "learning_rate": 1.4642504417163973e-07, + "loss": 0.4603, + "step": 18190 + }, + { + "epoch": 2.84, + "grad_norm": 25.222767669989338, + "learning_rate": 1.4613765150236382e-07, + "loss": 0.4583, + "step": 18191 + }, + { + "epoch": 2.84, + "grad_norm": 44.02331776166877, + "learning_rate": 1.4585053907116953e-07, + "loss": 0.4935, + "step": 18192 + }, + { + "epoch": 2.84, + "grad_norm": 22.404369818038525, + "learning_rate": 1.4556370688622036e-07, + "loss": 0.3643, + "step": 18193 + }, + { + "epoch": 2.84, + "grad_norm": 34.20547161922587, + "learning_rate": 1.4527715495567529e-07, + "loss": 0.4229, + "step": 18194 + }, + { + "epoch": 2.84, + "grad_norm": 18.17256618589119, + "learning_rate": 1.4499088328768341e-07, + "loss": 0.391, + "step": 18195 + }, + { + "epoch": 2.84, + "grad_norm": 23.934183261958513, + "learning_rate": 1.4470489189038595e-07, + "loss": 0.5173, + "step": 18196 + }, + { + "epoch": 2.84, + "grad_norm": 21.611196711468622, + "learning_rate": 1.444191807719153e-07, + "loss": 0.3712, + "step": 18197 + }, + { + "epoch": 2.84, + "grad_norm": 19.25601794255633, + "learning_rate": 1.4413374994039718e-07, + "loss": 0.4396, + "step": 18198 + }, + { + "epoch": 2.84, + "grad_norm": 25.81377415364099, + "learning_rate": 1.438485994039518e-07, + "loss": 0.4755, + "step": 18199 + }, + { + "epoch": 2.84, + "grad_norm": 21.55632520525946, + "learning_rate": 1.4356372917068595e-07, + "loss": 0.4468, + "step": 18200 + }, + { + "epoch": 2.84, + "grad_norm": 19.516011784785015, + "learning_rate": 1.4327913924870097e-07, + "loss": 0.4856, + "step": 18201 + }, + { + "epoch": 2.84, + "grad_norm": 20.55362997955764, + "learning_rate": 1.4299482964609035e-07, + "loss": 0.4636, + "step": 18202 + }, + { + "epoch": 2.84, + "grad_norm": 25.437518246337195, + "learning_rate": 1.427108003709421e-07, + "loss": 0.4259, + "step": 18203 + }, + { + "epoch": 2.84, + "grad_norm": 28.093469669801973, + "learning_rate": 1.4242705143133195e-07, + "loss": 0.4142, + "step": 18204 + }, + { + "epoch": 2.84, + "grad_norm": 30.588926237977518, + "learning_rate": 1.4214358283533124e-07, + "loss": 0.3983, + "step": 18205 + }, + { + "epoch": 2.84, + "grad_norm": 18.730443331926935, + "learning_rate": 1.41860394590998e-07, + "loss": 0.4127, + "step": 18206 + }, + { + "epoch": 2.84, + "grad_norm": 32.90276969082344, + "learning_rate": 1.4157748670639015e-07, + "loss": 0.4189, + "step": 18207 + }, + { + "epoch": 2.84, + "grad_norm": 24.323852056015433, + "learning_rate": 1.412948591895502e-07, + "loss": 0.4323, + "step": 18208 + }, + { + "epoch": 2.84, + "grad_norm": 22.991900443859063, + "learning_rate": 1.4101251204851841e-07, + "loss": 0.4128, + "step": 18209 + }, + { + "epoch": 2.84, + "grad_norm": 18.864021626420843, + "learning_rate": 1.4073044529132273e-07, + "loss": 0.4514, + "step": 18210 + }, + { + "epoch": 2.84, + "grad_norm": 24.75328245576418, + "learning_rate": 1.4044865892598681e-07, + "loss": 0.4128, + "step": 18211 + }, + { + "epoch": 2.84, + "grad_norm": 27.72153820456351, + "learning_rate": 1.4016715296052198e-07, + "loss": 0.4743, + "step": 18212 + }, + { + "epoch": 2.84, + "grad_norm": 18.796791619506735, + "learning_rate": 1.3988592740293629e-07, + "loss": 0.4505, + "step": 18213 + }, + { + "epoch": 2.85, + "grad_norm": 22.097491542734378, + "learning_rate": 1.3960498226122554e-07, + "loss": 0.3998, + "step": 18214 + }, + { + "epoch": 2.85, + "grad_norm": 31.23495737136862, + "learning_rate": 1.3932431754338227e-07, + "loss": 0.4552, + "step": 18215 + }, + { + "epoch": 2.85, + "grad_norm": 21.836687720006733, + "learning_rate": 1.3904393325738785e-07, + "loss": 0.5238, + "step": 18216 + }, + { + "epoch": 2.85, + "grad_norm": 28.404939685266307, + "learning_rate": 1.387638294112137e-07, + "loss": 0.4581, + "step": 18217 + }, + { + "epoch": 2.85, + "grad_norm": 22.696002639152205, + "learning_rate": 1.3848400601282784e-07, + "loss": 0.396, + "step": 18218 + }, + { + "epoch": 2.85, + "grad_norm": 15.952713981624163, + "learning_rate": 1.3820446307018843e-07, + "loss": 0.3807, + "step": 18219 + }, + { + "epoch": 2.85, + "grad_norm": 27.57750819329659, + "learning_rate": 1.379252005912457e-07, + "loss": 0.4514, + "step": 18220 + }, + { + "epoch": 2.85, + "grad_norm": 24.08277278108093, + "learning_rate": 1.3764621858394e-07, + "loss": 0.444, + "step": 18221 + }, + { + "epoch": 2.85, + "grad_norm": 28.012148032345465, + "learning_rate": 1.373675170562072e-07, + "loss": 0.4614, + "step": 18222 + }, + { + "epoch": 2.85, + "grad_norm": 29.257655526568993, + "learning_rate": 1.370890960159721e-07, + "loss": 0.405, + "step": 18223 + }, + { + "epoch": 2.85, + "grad_norm": 14.971149642466557, + "learning_rate": 1.3681095547115497e-07, + "loss": 0.4054, + "step": 18224 + }, + { + "epoch": 2.85, + "grad_norm": 19.95551134139376, + "learning_rate": 1.3653309542966286e-07, + "loss": 0.4338, + "step": 18225 + }, + { + "epoch": 2.85, + "grad_norm": 22.394114875216385, + "learning_rate": 1.3625551589940056e-07, + "loss": 0.4221, + "step": 18226 + }, + { + "epoch": 2.85, + "grad_norm": 17.77124156926482, + "learning_rate": 1.3597821688826063e-07, + "loss": 0.4406, + "step": 18227 + }, + { + "epoch": 2.85, + "grad_norm": 26.27295173470462, + "learning_rate": 1.357011984041301e-07, + "loss": 0.3948, + "step": 18228 + }, + { + "epoch": 2.85, + "grad_norm": 19.550298932935227, + "learning_rate": 1.3542446045488712e-07, + "loss": 0.4221, + "step": 18229 + }, + { + "epoch": 2.85, + "grad_norm": 24.261717487237515, + "learning_rate": 1.3514800304840203e-07, + "loss": 0.4513, + "step": 18230 + }, + { + "epoch": 2.85, + "grad_norm": 21.378416580288324, + "learning_rate": 1.3487182619253636e-07, + "loss": 0.4522, + "step": 18231 + }, + { + "epoch": 2.85, + "grad_norm": 18.04620413916343, + "learning_rate": 1.3459592989514604e-07, + "loss": 0.3865, + "step": 18232 + }, + { + "epoch": 2.85, + "grad_norm": 24.92382540828731, + "learning_rate": 1.3432031416407476e-07, + "loss": 0.4419, + "step": 18233 + }, + { + "epoch": 2.85, + "grad_norm": 25.065576182623328, + "learning_rate": 1.3404497900716406e-07, + "loss": 0.4844, + "step": 18234 + }, + { + "epoch": 2.85, + "grad_norm": 31.668818941299143, + "learning_rate": 1.3376992443224213e-07, + "loss": 0.4946, + "step": 18235 + }, + { + "epoch": 2.85, + "grad_norm": 16.565309548950104, + "learning_rate": 1.3349515044713157e-07, + "loss": 0.4246, + "step": 18236 + }, + { + "epoch": 2.85, + "grad_norm": 22.682159837032373, + "learning_rate": 1.3322065705964838e-07, + "loss": 0.4108, + "step": 18237 + }, + { + "epoch": 2.85, + "grad_norm": 21.122144044725097, + "learning_rate": 1.3294644427759628e-07, + "loss": 0.4337, + "step": 18238 + }, + { + "epoch": 2.85, + "grad_norm": 23.58910743561029, + "learning_rate": 1.3267251210877686e-07, + "loss": 0.4263, + "step": 18239 + }, + { + "epoch": 2.85, + "grad_norm": 16.794766502225077, + "learning_rate": 1.3239886056097718e-07, + "loss": 0.3911, + "step": 18240 + }, + { + "epoch": 2.85, + "grad_norm": 20.03918227444374, + "learning_rate": 1.3212548964198324e-07, + "loss": 0.3791, + "step": 18241 + }, + { + "epoch": 2.85, + "grad_norm": 26.254945583740252, + "learning_rate": 1.3185239935956662e-07, + "loss": 0.4239, + "step": 18242 + }, + { + "epoch": 2.85, + "grad_norm": 17.113176742126637, + "learning_rate": 1.3157958972149553e-07, + "loss": 0.3723, + "step": 18243 + }, + { + "epoch": 2.85, + "grad_norm": 25.40501717418585, + "learning_rate": 1.3130706073552824e-07, + "loss": 0.4449, + "step": 18244 + }, + { + "epoch": 2.85, + "grad_norm": 30.65019077825835, + "learning_rate": 1.310348124094152e-07, + "loss": 0.4439, + "step": 18245 + }, + { + "epoch": 2.85, + "grad_norm": 24.28012598840872, + "learning_rate": 1.3076284475089906e-07, + "loss": 0.4163, + "step": 18246 + }, + { + "epoch": 2.85, + "grad_norm": 24.461155361982932, + "learning_rate": 1.304911577677137e-07, + "loss": 0.4483, + "step": 18247 + }, + { + "epoch": 2.85, + "grad_norm": 24.86781911680022, + "learning_rate": 1.3021975146758514e-07, + "loss": 0.4491, + "step": 18248 + }, + { + "epoch": 2.85, + "grad_norm": 21.215532946643265, + "learning_rate": 1.2994862585823608e-07, + "loss": 0.4408, + "step": 18249 + }, + { + "epoch": 2.85, + "grad_norm": 27.474143368909964, + "learning_rate": 1.296777809473726e-07, + "loss": 0.4995, + "step": 18250 + }, + { + "epoch": 2.85, + "grad_norm": 16.86276445619728, + "learning_rate": 1.294072167426985e-07, + "loss": 0.4199, + "step": 18251 + }, + { + "epoch": 2.85, + "grad_norm": 32.63909716063981, + "learning_rate": 1.291369332519099e-07, + "loss": 0.4275, + "step": 18252 + }, + { + "epoch": 2.85, + "grad_norm": 32.4958815936282, + "learning_rate": 1.2886693048269284e-07, + "loss": 0.4332, + "step": 18253 + }, + { + "epoch": 2.85, + "grad_norm": 29.899361495708675, + "learning_rate": 1.285972084427256e-07, + "loss": 0.4823, + "step": 18254 + }, + { + "epoch": 2.85, + "grad_norm": 20.185397513270228, + "learning_rate": 1.2832776713967876e-07, + "loss": 0.3765, + "step": 18255 + }, + { + "epoch": 2.85, + "grad_norm": 23.96969018911251, + "learning_rate": 1.2805860658121505e-07, + "loss": 0.4326, + "step": 18256 + }, + { + "epoch": 2.85, + "grad_norm": 20.157667397778972, + "learning_rate": 1.2778972677499057e-07, + "loss": 0.4084, + "step": 18257 + }, + { + "epoch": 2.85, + "grad_norm": 26.398294822711463, + "learning_rate": 1.2752112772865144e-07, + "loss": 0.4793, + "step": 18258 + }, + { + "epoch": 2.85, + "grad_norm": 17.819168584249628, + "learning_rate": 1.27252809449836e-07, + "loss": 0.4424, + "step": 18259 + }, + { + "epoch": 2.85, + "grad_norm": 34.710535464812274, + "learning_rate": 1.2698477194617475e-07, + "loss": 0.4617, + "step": 18260 + }, + { + "epoch": 2.85, + "grad_norm": 18.050015686515742, + "learning_rate": 1.2671701522529055e-07, + "loss": 0.3938, + "step": 18261 + }, + { + "epoch": 2.85, + "grad_norm": 23.491837843033025, + "learning_rate": 1.2644953929479954e-07, + "loss": 0.4553, + "step": 18262 + }, + { + "epoch": 2.85, + "grad_norm": 32.02693140364885, + "learning_rate": 1.2618234416230778e-07, + "loss": 0.4523, + "step": 18263 + }, + { + "epoch": 2.85, + "grad_norm": 17.53436103032286, + "learning_rate": 1.2591542983541371e-07, + "loss": 0.4631, + "step": 18264 + }, + { + "epoch": 2.85, + "grad_norm": 15.125403133844381, + "learning_rate": 1.25648796321709e-07, + "loss": 0.4357, + "step": 18265 + }, + { + "epoch": 2.85, + "grad_norm": 30.400829232860094, + "learning_rate": 1.253824436287765e-07, + "loss": 0.4937, + "step": 18266 + }, + { + "epoch": 2.85, + "grad_norm": 27.894364971225666, + "learning_rate": 1.2511637176419012e-07, + "loss": 0.4206, + "step": 18267 + }, + { + "epoch": 2.85, + "grad_norm": 35.16153862190187, + "learning_rate": 1.2485058073551825e-07, + "loss": 0.4851, + "step": 18268 + }, + { + "epoch": 2.85, + "grad_norm": 19.18343405947422, + "learning_rate": 1.2458507055031822e-07, + "loss": 0.4282, + "step": 18269 + }, + { + "epoch": 2.85, + "grad_norm": 15.941899986257978, + "learning_rate": 1.2431984121614282e-07, + "loss": 0.4293, + "step": 18270 + }, + { + "epoch": 2.85, + "grad_norm": 19.170185557965027, + "learning_rate": 1.2405489274053273e-07, + "loss": 0.4209, + "step": 18271 + }, + { + "epoch": 2.85, + "grad_norm": 18.26559713335416, + "learning_rate": 1.2379022513102524e-07, + "loss": 0.4748, + "step": 18272 + }, + { + "epoch": 2.85, + "grad_norm": 13.58268633357884, + "learning_rate": 1.2352583839514655e-07, + "loss": 0.3817, + "step": 18273 + }, + { + "epoch": 2.85, + "grad_norm": 31.294829727891404, + "learning_rate": 1.2326173254041506e-07, + "loss": 0.4544, + "step": 18274 + }, + { + "epoch": 2.85, + "grad_norm": 19.57193461527387, + "learning_rate": 1.229979075743415e-07, + "loss": 0.4164, + "step": 18275 + }, + { + "epoch": 2.85, + "grad_norm": 27.49040521870134, + "learning_rate": 1.227343635044298e-07, + "loss": 0.4649, + "step": 18276 + }, + { + "epoch": 2.85, + "grad_norm": 30.295074280515394, + "learning_rate": 1.224711003381751e-07, + "loss": 0.4698, + "step": 18277 + }, + { + "epoch": 2.86, + "grad_norm": 22.15709087191234, + "learning_rate": 1.2220811808306477e-07, + "loss": 0.4272, + "step": 18278 + }, + { + "epoch": 2.86, + "grad_norm": 19.525911835352204, + "learning_rate": 1.219454167465761e-07, + "loss": 0.4171, + "step": 18279 + }, + { + "epoch": 2.86, + "grad_norm": 22.250762091936767, + "learning_rate": 1.2168299633618209e-07, + "loss": 0.4116, + "step": 18280 + }, + { + "epoch": 2.86, + "grad_norm": 20.535413923673268, + "learning_rate": 1.2142085685934446e-07, + "loss": 0.446, + "step": 18281 + }, + { + "epoch": 2.86, + "grad_norm": 16.227910429830516, + "learning_rate": 1.2115899832351952e-07, + "loss": 0.4088, + "step": 18282 + }, + { + "epoch": 2.86, + "grad_norm": 20.945430437984616, + "learning_rate": 1.2089742073615352e-07, + "loss": 0.4462, + "step": 18283 + }, + { + "epoch": 2.86, + "grad_norm": 19.46003956677633, + "learning_rate": 1.2063612410468496e-07, + "loss": 0.4724, + "step": 18284 + }, + { + "epoch": 2.86, + "grad_norm": 13.531547658743047, + "learning_rate": 1.2037510843654565e-07, + "loss": 0.3948, + "step": 18285 + }, + { + "epoch": 2.86, + "grad_norm": 21.28698440908482, + "learning_rate": 1.201143737391597e-07, + "loss": 0.4691, + "step": 18286 + }, + { + "epoch": 2.86, + "grad_norm": 17.219199387627988, + "learning_rate": 1.1985392001994112e-07, + "loss": 0.424, + "step": 18287 + }, + { + "epoch": 2.86, + "grad_norm": 17.407352152367334, + "learning_rate": 1.1959374728629847e-07, + "loss": 0.4237, + "step": 18288 + }, + { + "epoch": 2.86, + "grad_norm": 28.14379353294335, + "learning_rate": 1.1933385554562803e-07, + "loss": 0.4131, + "step": 18289 + }, + { + "epoch": 2.86, + "grad_norm": 19.762368673543325, + "learning_rate": 1.1907424480532282e-07, + "loss": 0.4638, + "step": 18290 + }, + { + "epoch": 2.86, + "grad_norm": 22.293099082992246, + "learning_rate": 1.1881491507276688e-07, + "loss": 0.4623, + "step": 18291 + }, + { + "epoch": 2.86, + "grad_norm": 27.96405503489841, + "learning_rate": 1.1855586635533434e-07, + "loss": 0.4386, + "step": 18292 + }, + { + "epoch": 2.86, + "grad_norm": 20.452285267790614, + "learning_rate": 1.1829709866039263e-07, + "loss": 0.4069, + "step": 18293 + }, + { + "epoch": 2.86, + "grad_norm": 16.581660493226238, + "learning_rate": 1.180386119952992e-07, + "loss": 0.3631, + "step": 18294 + }, + { + "epoch": 2.86, + "grad_norm": 21.5751306664675, + "learning_rate": 1.1778040636740928e-07, + "loss": 0.3806, + "step": 18295 + }, + { + "epoch": 2.86, + "grad_norm": 21.562729975952855, + "learning_rate": 1.1752248178406144e-07, + "loss": 0.4119, + "step": 18296 + }, + { + "epoch": 2.86, + "grad_norm": 22.991797096186204, + "learning_rate": 1.1726483825259427e-07, + "loss": 0.4657, + "step": 18297 + }, + { + "epoch": 2.86, + "grad_norm": 23.926652440305812, + "learning_rate": 1.1700747578033412e-07, + "loss": 0.4166, + "step": 18298 + }, + { + "epoch": 2.86, + "grad_norm": 19.856143375593142, + "learning_rate": 1.1675039437459956e-07, + "loss": 0.4117, + "step": 18299 + }, + { + "epoch": 2.86, + "grad_norm": 25.064544546774627, + "learning_rate": 1.1649359404270144e-07, + "loss": 0.5282, + "step": 18300 + }, + { + "epoch": 2.86, + "grad_norm": 20.1012338751548, + "learning_rate": 1.1623707479194501e-07, + "loss": 0.4263, + "step": 18301 + }, + { + "epoch": 2.86, + "grad_norm": 19.484279506911268, + "learning_rate": 1.1598083662962335e-07, + "loss": 0.4213, + "step": 18302 + }, + { + "epoch": 2.86, + "grad_norm": 11.43540317695954, + "learning_rate": 1.1572487956302613e-07, + "loss": 0.4002, + "step": 18303 + }, + { + "epoch": 2.86, + "grad_norm": 18.724048546019766, + "learning_rate": 1.1546920359943092e-07, + "loss": 0.4233, + "step": 18304 + }, + { + "epoch": 2.86, + "grad_norm": 27.13283798311038, + "learning_rate": 1.1521380874610854e-07, + "loss": 0.5084, + "step": 18305 + }, + { + "epoch": 2.86, + "grad_norm": 17.356874741693716, + "learning_rate": 1.1495869501032209e-07, + "loss": 0.4074, + "step": 18306 + }, + { + "epoch": 2.86, + "grad_norm": 30.088977923583656, + "learning_rate": 1.1470386239932907e-07, + "loss": 0.4498, + "step": 18307 + }, + { + "epoch": 2.86, + "grad_norm": 17.527666461746104, + "learning_rate": 1.1444931092037592e-07, + "loss": 0.4372, + "step": 18308 + }, + { + "epoch": 2.86, + "grad_norm": 23.48198879070703, + "learning_rate": 1.141950405807013e-07, + "loss": 0.4184, + "step": 18309 + }, + { + "epoch": 2.86, + "grad_norm": 22.287432520406412, + "learning_rate": 1.1394105138753498e-07, + "loss": 0.394, + "step": 18310 + }, + { + "epoch": 2.86, + "grad_norm": 23.894798388320066, + "learning_rate": 1.1368734334810339e-07, + "loss": 0.4483, + "step": 18311 + }, + { + "epoch": 2.86, + "grad_norm": 19.3576798947693, + "learning_rate": 1.1343391646962076e-07, + "loss": 0.4501, + "step": 18312 + }, + { + "epoch": 2.86, + "grad_norm": 27.284140015930348, + "learning_rate": 1.1318077075929357e-07, + "loss": 0.437, + "step": 18313 + }, + { + "epoch": 2.86, + "grad_norm": 19.4743317261089, + "learning_rate": 1.129279062243216e-07, + "loss": 0.4076, + "step": 18314 + }, + { + "epoch": 2.86, + "grad_norm": 26.494946311682842, + "learning_rate": 1.1267532287189576e-07, + "loss": 0.4364, + "step": 18315 + }, + { + "epoch": 2.86, + "grad_norm": 21.778848902942965, + "learning_rate": 1.1242302070920141e-07, + "loss": 0.3701, + "step": 18316 + }, + { + "epoch": 2.86, + "grad_norm": 32.33168403832644, + "learning_rate": 1.1217099974341173e-07, + "loss": 0.5231, + "step": 18317 + }, + { + "epoch": 2.86, + "grad_norm": 20.193657443112794, + "learning_rate": 1.1191925998169317e-07, + "loss": 0.4437, + "step": 18318 + }, + { + "epoch": 2.86, + "grad_norm": 16.553740629145125, + "learning_rate": 1.1166780143120781e-07, + "loss": 0.376, + "step": 18319 + }, + { + "epoch": 2.86, + "grad_norm": 17.83741249538844, + "learning_rate": 1.1141662409910547e-07, + "loss": 0.3954, + "step": 18320 + }, + { + "epoch": 2.86, + "grad_norm": 19.31835728310645, + "learning_rate": 1.1116572799252934e-07, + "loss": 0.4073, + "step": 18321 + }, + { + "epoch": 2.86, + "grad_norm": 27.507013174424475, + "learning_rate": 1.109151131186148e-07, + "loss": 0.4684, + "step": 18322 + }, + { + "epoch": 2.86, + "grad_norm": 18.420351265117326, + "learning_rate": 1.106647794844895e-07, + "loss": 0.446, + "step": 18323 + }, + { + "epoch": 2.86, + "grad_norm": 30.284290627564843, + "learning_rate": 1.104147270972733e-07, + "loss": 0.4706, + "step": 18324 + }, + { + "epoch": 2.86, + "grad_norm": 21.72598521107956, + "learning_rate": 1.1016495596407717e-07, + "loss": 0.3406, + "step": 18325 + }, + { + "epoch": 2.86, + "grad_norm": 30.89734352383938, + "learning_rate": 1.0991546609200432e-07, + "loss": 0.4189, + "step": 18326 + }, + { + "epoch": 2.86, + "grad_norm": 32.40921275179439, + "learning_rate": 1.0966625748815019e-07, + "loss": 0.4185, + "step": 18327 + }, + { + "epoch": 2.86, + "grad_norm": 25.249523932033956, + "learning_rate": 1.0941733015960132e-07, + "loss": 0.3936, + "step": 18328 + }, + { + "epoch": 2.86, + "grad_norm": 27.365700176310437, + "learning_rate": 1.0916868411343872e-07, + "loss": 0.41, + "step": 18329 + }, + { + "epoch": 2.86, + "grad_norm": 21.20717487894361, + "learning_rate": 1.089203193567312e-07, + "loss": 0.3419, + "step": 18330 + }, + { + "epoch": 2.86, + "grad_norm": 18.1012528787738, + "learning_rate": 1.0867223589654418e-07, + "loss": 0.4043, + "step": 18331 + }, + { + "epoch": 2.86, + "grad_norm": 20.8934833337288, + "learning_rate": 1.0842443373993427e-07, + "loss": 0.4408, + "step": 18332 + }, + { + "epoch": 2.86, + "grad_norm": 15.506846019694269, + "learning_rate": 1.0817691289394583e-07, + "loss": 0.4368, + "step": 18333 + }, + { + "epoch": 2.86, + "grad_norm": 25.07982944626364, + "learning_rate": 1.0792967336561877e-07, + "loss": 0.4592, + "step": 18334 + }, + { + "epoch": 2.86, + "grad_norm": 15.647404359982936, + "learning_rate": 1.0768271516198525e-07, + "loss": 0.3941, + "step": 18335 + }, + { + "epoch": 2.86, + "grad_norm": 21.869458228862086, + "learning_rate": 1.0743603829006854e-07, + "loss": 0.4166, + "step": 18336 + }, + { + "epoch": 2.86, + "grad_norm": 22.32477108863656, + "learning_rate": 1.0718964275688526e-07, + "loss": 0.3814, + "step": 18337 + }, + { + "epoch": 2.86, + "grad_norm": 23.72555269787382, + "learning_rate": 1.069435285694409e-07, + "loss": 0.4282, + "step": 18338 + }, + { + "epoch": 2.86, + "grad_norm": 27.716896747793303, + "learning_rate": 1.0669769573473432e-07, + "loss": 0.4296, + "step": 18339 + }, + { + "epoch": 2.86, + "grad_norm": 20.428220041937145, + "learning_rate": 1.064521442597577e-07, + "loss": 0.4553, + "step": 18340 + }, + { + "epoch": 2.86, + "grad_norm": 16.108314938081648, + "learning_rate": 1.0620687415149655e-07, + "loss": 0.4194, + "step": 18341 + }, + { + "epoch": 2.87, + "grad_norm": 34.529464409693254, + "learning_rate": 1.0596188541692309e-07, + "loss": 0.471, + "step": 18342 + }, + { + "epoch": 2.87, + "grad_norm": 20.951167182996517, + "learning_rate": 1.0571717806300508e-07, + "loss": 0.4234, + "step": 18343 + }, + { + "epoch": 2.87, + "grad_norm": 20.66355459530119, + "learning_rate": 1.0547275209670249e-07, + "loss": 0.3926, + "step": 18344 + }, + { + "epoch": 2.87, + "grad_norm": 30.87482004653751, + "learning_rate": 1.0522860752496755e-07, + "loss": 0.4628, + "step": 18345 + }, + { + "epoch": 2.87, + "grad_norm": 28.40349049640971, + "learning_rate": 1.0498474435474249e-07, + "loss": 0.413, + "step": 18346 + }, + { + "epoch": 2.87, + "grad_norm": 18.8221611035089, + "learning_rate": 1.0474116259296285e-07, + "loss": 0.3641, + "step": 18347 + }, + { + "epoch": 2.87, + "grad_norm": 17.732197671568045, + "learning_rate": 1.0449786224655644e-07, + "loss": 0.4381, + "step": 18348 + }, + { + "epoch": 2.87, + "grad_norm": 20.640845109168804, + "learning_rate": 1.0425484332244107e-07, + "loss": 0.4326, + "step": 18349 + }, + { + "epoch": 2.87, + "grad_norm": 14.5540106365669, + "learning_rate": 1.0401210582753007e-07, + "loss": 0.3778, + "step": 18350 + }, + { + "epoch": 2.87, + "grad_norm": 18.943376669544964, + "learning_rate": 1.0376964976872461e-07, + "loss": 0.4012, + "step": 18351 + }, + { + "epoch": 2.87, + "grad_norm": 20.3970843567236, + "learning_rate": 1.035274751529225e-07, + "loss": 0.4325, + "step": 18352 + }, + { + "epoch": 2.87, + "grad_norm": 16.53049528656263, + "learning_rate": 1.0328558198700933e-07, + "loss": 0.4194, + "step": 18353 + }, + { + "epoch": 2.87, + "grad_norm": 28.183961027611318, + "learning_rate": 1.0304397027786516e-07, + "loss": 0.3995, + "step": 18354 + }, + { + "epoch": 2.87, + "grad_norm": 32.494961688259316, + "learning_rate": 1.0280264003236006e-07, + "loss": 0.4576, + "step": 18355 + }, + { + "epoch": 2.87, + "grad_norm": 24.538597885877973, + "learning_rate": 1.0256159125735742e-07, + "loss": 0.4198, + "step": 18356 + }, + { + "epoch": 2.87, + "grad_norm": 16.949595634207135, + "learning_rate": 1.023208239597151e-07, + "loss": 0.4104, + "step": 18357 + }, + { + "epoch": 2.87, + "grad_norm": 16.961369053442983, + "learning_rate": 1.0208033814627872e-07, + "loss": 0.4271, + "step": 18358 + }, + { + "epoch": 2.87, + "grad_norm": 24.07025856054119, + "learning_rate": 1.0184013382388613e-07, + "loss": 0.4391, + "step": 18359 + }, + { + "epoch": 2.87, + "grad_norm": 28.14149565993997, + "learning_rate": 1.0160021099936967e-07, + "loss": 0.4248, + "step": 18360 + }, + { + "epoch": 2.87, + "grad_norm": 27.49864984687522, + "learning_rate": 1.0136056967955388e-07, + "loss": 0.4734, + "step": 18361 + }, + { + "epoch": 2.87, + "grad_norm": 20.68384317890662, + "learning_rate": 1.0112120987125329e-07, + "loss": 0.4223, + "step": 18362 + }, + { + "epoch": 2.87, + "grad_norm": 24.888227326931965, + "learning_rate": 1.0088213158127358e-07, + "loss": 0.4164, + "step": 18363 + }, + { + "epoch": 2.87, + "grad_norm": 21.193933531366707, + "learning_rate": 1.0064333481641597e-07, + "loss": 0.4623, + "step": 18364 + }, + { + "epoch": 2.87, + "grad_norm": 24.71110605223992, + "learning_rate": 1.0040481958346948e-07, + "loss": 0.4058, + "step": 18365 + }, + { + "epoch": 2.87, + "grad_norm": 25.67617787563051, + "learning_rate": 1.0016658588922091e-07, + "loss": 0.4615, + "step": 18366 + }, + { + "epoch": 2.87, + "grad_norm": 26.667018233654044, + "learning_rate": 9.99286337404426e-08, + "loss": 0.4211, + "step": 18367 + }, + { + "epoch": 2.87, + "grad_norm": 15.43700196258174, + "learning_rate": 9.969096314390248e-08, + "loss": 0.4526, + "step": 18368 + }, + { + "epoch": 2.87, + "grad_norm": 25.116893618274897, + "learning_rate": 9.945357410635959e-08, + "loss": 0.478, + "step": 18369 + }, + { + "epoch": 2.87, + "grad_norm": 17.175931624933508, + "learning_rate": 9.92164666345663e-08, + "loss": 0.4395, + "step": 18370 + }, + { + "epoch": 2.87, + "grad_norm": 22.993850233103725, + "learning_rate": 9.8979640735265e-08, + "loss": 0.4018, + "step": 18371 + }, + { + "epoch": 2.87, + "grad_norm": 18.71850141414339, + "learning_rate": 9.87430964151892e-08, + "loss": 0.4075, + "step": 18372 + }, + { + "epoch": 2.87, + "grad_norm": 19.911529531824726, + "learning_rate": 9.850683368106794e-08, + "loss": 0.3908, + "step": 18373 + }, + { + "epoch": 2.87, + "grad_norm": 17.664318398786268, + "learning_rate": 9.827085253962144e-08, + "loss": 0.3525, + "step": 18374 + }, + { + "epoch": 2.87, + "grad_norm": 20.89809913931761, + "learning_rate": 9.803515299755872e-08, + "loss": 0.3946, + "step": 18375 + }, + { + "epoch": 2.87, + "grad_norm": 25.24531524817793, + "learning_rate": 9.779973506158446e-08, + "loss": 0.4048, + "step": 18376 + }, + { + "epoch": 2.87, + "grad_norm": 20.046205461876834, + "learning_rate": 9.75645987383922e-08, + "loss": 0.4428, + "step": 18377 + }, + { + "epoch": 2.87, + "grad_norm": 23.8457090734149, + "learning_rate": 9.732974403466988e-08, + "loss": 0.4265, + "step": 18378 + }, + { + "epoch": 2.87, + "grad_norm": 16.375499224514808, + "learning_rate": 9.709517095709775e-08, + "loss": 0.3892, + "step": 18379 + }, + { + "epoch": 2.87, + "grad_norm": 30.318810043126096, + "learning_rate": 9.68608795123449e-08, + "loss": 0.4579, + "step": 18380 + }, + { + "epoch": 2.87, + "grad_norm": 23.540822286499488, + "learning_rate": 9.662686970707602e-08, + "loss": 0.4104, + "step": 18381 + }, + { + "epoch": 2.87, + "grad_norm": 21.78546797700525, + "learning_rate": 9.639314154794576e-08, + "loss": 0.45, + "step": 18382 + }, + { + "epoch": 2.87, + "grad_norm": 29.403820297245723, + "learning_rate": 9.615969504160106e-08, + "loss": 0.4565, + "step": 18383 + }, + { + "epoch": 2.87, + "grad_norm": 24.36859784779545, + "learning_rate": 9.5926530194681e-08, + "loss": 0.4095, + "step": 18384 + }, + { + "epoch": 2.87, + "grad_norm": 27.500862742025546, + "learning_rate": 9.569364701381589e-08, + "loss": 0.425, + "step": 18385 + }, + { + "epoch": 2.87, + "grad_norm": 18.634175997254733, + "learning_rate": 9.546104550563152e-08, + "loss": 0.3868, + "step": 18386 + }, + { + "epoch": 2.87, + "grad_norm": 28.924624283014616, + "learning_rate": 9.522872567673924e-08, + "loss": 0.4375, + "step": 18387 + }, + { + "epoch": 2.87, + "grad_norm": 25.80431594257756, + "learning_rate": 9.499668753374936e-08, + "loss": 0.4815, + "step": 18388 + }, + { + "epoch": 2.87, + "grad_norm": 21.989342069424193, + "learning_rate": 9.476493108325768e-08, + "loss": 0.48, + "step": 18389 + }, + { + "epoch": 2.87, + "grad_norm": 17.54848776409242, + "learning_rate": 9.453345633185784e-08, + "loss": 0.4332, + "step": 18390 + }, + { + "epoch": 2.87, + "grad_norm": 28.07947207060383, + "learning_rate": 9.430226328613234e-08, + "loss": 0.4644, + "step": 18391 + }, + { + "epoch": 2.87, + "grad_norm": 21.896055242361882, + "learning_rate": 9.407135195265593e-08, + "loss": 0.4584, + "step": 18392 + }, + { + "epoch": 2.87, + "grad_norm": 21.43880150489254, + "learning_rate": 9.384072233799447e-08, + "loss": 0.44, + "step": 18393 + }, + { + "epoch": 2.87, + "grad_norm": 38.68869021191799, + "learning_rate": 9.361037444870824e-08, + "loss": 0.3873, + "step": 18394 + }, + { + "epoch": 2.87, + "grad_norm": 21.748485184951647, + "learning_rate": 9.33803082913487e-08, + "loss": 0.4395, + "step": 18395 + }, + { + "epoch": 2.87, + "grad_norm": 22.035705717796795, + "learning_rate": 9.315052387245726e-08, + "loss": 0.4298, + "step": 18396 + }, + { + "epoch": 2.87, + "grad_norm": 16.570742163152755, + "learning_rate": 9.292102119856983e-08, + "loss": 0.4361, + "step": 18397 + }, + { + "epoch": 2.87, + "grad_norm": 21.56563215726859, + "learning_rate": 9.269180027621228e-08, + "loss": 0.4201, + "step": 18398 + }, + { + "epoch": 2.87, + "grad_norm": 18.876684476042943, + "learning_rate": 9.246286111190495e-08, + "loss": 0.4364, + "step": 18399 + }, + { + "epoch": 2.87, + "grad_norm": 22.931858661805176, + "learning_rate": 9.223420371215818e-08, + "loss": 0.3939, + "step": 18400 + }, + { + "epoch": 2.87, + "grad_norm": 24.19178338097972, + "learning_rate": 9.200582808347569e-08, + "loss": 0.3766, + "step": 18401 + }, + { + "epoch": 2.87, + "grad_norm": 26.112399496229294, + "learning_rate": 9.177773423235004e-08, + "loss": 0.4233, + "step": 18402 + }, + { + "epoch": 2.87, + "grad_norm": 24.344523069302543, + "learning_rate": 9.154992216526937e-08, + "loss": 0.3983, + "step": 18403 + }, + { + "epoch": 2.87, + "grad_norm": 35.93790694628309, + "learning_rate": 9.132239188871405e-08, + "loss": 0.431, + "step": 18404 + }, + { + "epoch": 2.87, + "grad_norm": 20.423853945781914, + "learning_rate": 9.109514340915226e-08, + "loss": 0.4403, + "step": 18405 + }, + { + "epoch": 2.88, + "grad_norm": 32.61197658236122, + "learning_rate": 9.086817673304882e-08, + "loss": 0.4037, + "step": 18406 + }, + { + "epoch": 2.88, + "grad_norm": 22.570937209101274, + "learning_rate": 9.064149186685744e-08, + "loss": 0.4733, + "step": 18407 + }, + { + "epoch": 2.88, + "grad_norm": 19.297242912460888, + "learning_rate": 9.041508881702521e-08, + "loss": 0.3966, + "step": 18408 + }, + { + "epoch": 2.88, + "grad_norm": 20.985048350014203, + "learning_rate": 9.018896758999029e-08, + "loss": 0.4334, + "step": 18409 + }, + { + "epoch": 2.88, + "grad_norm": 22.705161815201944, + "learning_rate": 8.996312819218533e-08, + "loss": 0.4409, + "step": 18410 + }, + { + "epoch": 2.88, + "grad_norm": 17.741281472827755, + "learning_rate": 8.973757063003075e-08, + "loss": 0.371, + "step": 18411 + }, + { + "epoch": 2.88, + "grad_norm": 14.623725437191284, + "learning_rate": 8.95122949099425e-08, + "loss": 0.4228, + "step": 18412 + }, + { + "epoch": 2.88, + "grad_norm": 21.906957812172738, + "learning_rate": 8.928730103832773e-08, + "loss": 0.4596, + "step": 18413 + }, + { + "epoch": 2.88, + "grad_norm": 17.49742682987387, + "learning_rate": 8.90625890215846e-08, + "loss": 0.381, + "step": 18414 + }, + { + "epoch": 2.88, + "grad_norm": 39.01680808513573, + "learning_rate": 8.883815886610358e-08, + "loss": 0.496, + "step": 18415 + }, + { + "epoch": 2.88, + "grad_norm": 22.809953867555414, + "learning_rate": 8.861401057826624e-08, + "loss": 0.4104, + "step": 18416 + }, + { + "epoch": 2.88, + "grad_norm": 27.70306184334395, + "learning_rate": 8.839014416444969e-08, + "loss": 0.4811, + "step": 18417 + }, + { + "epoch": 2.88, + "grad_norm": 19.58237640362796, + "learning_rate": 8.816655963101883e-08, + "loss": 0.4681, + "step": 18418 + }, + { + "epoch": 2.88, + "grad_norm": 28.904941402404614, + "learning_rate": 8.794325698433193e-08, + "loss": 0.4524, + "step": 18419 + }, + { + "epoch": 2.88, + "grad_norm": 18.799944844158663, + "learning_rate": 8.772023623074166e-08, + "loss": 0.4036, + "step": 18420 + }, + { + "epoch": 2.88, + "grad_norm": 26.88421682233436, + "learning_rate": 8.749749737658964e-08, + "loss": 0.4637, + "step": 18421 + }, + { + "epoch": 2.88, + "grad_norm": 34.75290987983933, + "learning_rate": 8.727504042820966e-08, + "loss": 0.4365, + "step": 18422 + }, + { + "epoch": 2.88, + "grad_norm": 25.53116679907588, + "learning_rate": 8.705286539192781e-08, + "loss": 0.4023, + "step": 18423 + }, + { + "epoch": 2.88, + "grad_norm": 20.29214367331964, + "learning_rate": 8.683097227406456e-08, + "loss": 0.4595, + "step": 18424 + }, + { + "epoch": 2.88, + "grad_norm": 24.424526102433298, + "learning_rate": 8.660936108093043e-08, + "loss": 0.4449, + "step": 18425 + }, + { + "epoch": 2.88, + "grad_norm": 21.800541174967424, + "learning_rate": 8.638803181882483e-08, + "loss": 0.4619, + "step": 18426 + }, + { + "epoch": 2.88, + "grad_norm": 18.868383997538675, + "learning_rate": 8.616698449404492e-08, + "loss": 0.4536, + "step": 18427 + }, + { + "epoch": 2.88, + "grad_norm": 27.182119321572173, + "learning_rate": 8.594621911287682e-08, + "loss": 0.4578, + "step": 18428 + }, + { + "epoch": 2.88, + "grad_norm": 23.349822477599126, + "learning_rate": 8.572573568159881e-08, + "loss": 0.4155, + "step": 18429 + }, + { + "epoch": 2.88, + "grad_norm": 16.235199743034283, + "learning_rate": 8.550553420648145e-08, + "loss": 0.426, + "step": 18430 + }, + { + "epoch": 2.88, + "grad_norm": 29.155371849240236, + "learning_rate": 8.528561469378637e-08, + "loss": 0.4482, + "step": 18431 + }, + { + "epoch": 2.88, + "grad_norm": 20.000468394635895, + "learning_rate": 8.50659771497686e-08, + "loss": 0.4205, + "step": 18432 + }, + { + "epoch": 2.88, + "grad_norm": 27.272111640289037, + "learning_rate": 8.484662158067425e-08, + "loss": 0.5392, + "step": 18433 + }, + { + "epoch": 2.88, + "grad_norm": 18.60949134149706, + "learning_rate": 8.462754799274275e-08, + "loss": 0.3962, + "step": 18434 + }, + { + "epoch": 2.88, + "grad_norm": 24.73631820588152, + "learning_rate": 8.440875639220247e-08, + "loss": 0.3951, + "step": 18435 + }, + { + "epoch": 2.88, + "grad_norm": 32.32679850814688, + "learning_rate": 8.41902467852762e-08, + "loss": 0.4657, + "step": 18436 + }, + { + "epoch": 2.88, + "grad_norm": 20.361122131769324, + "learning_rate": 8.397201917817899e-08, + "loss": 0.4396, + "step": 18437 + }, + { + "epoch": 2.88, + "grad_norm": 16.761292744870147, + "learning_rate": 8.375407357711806e-08, + "loss": 0.4351, + "step": 18438 + }, + { + "epoch": 2.88, + "grad_norm": 24.202548007847533, + "learning_rate": 8.35364099882896e-08, + "loss": 0.4313, + "step": 18439 + }, + { + "epoch": 2.88, + "grad_norm": 20.581560612045358, + "learning_rate": 8.331902841788531e-08, + "loss": 0.3866, + "step": 18440 + }, + { + "epoch": 2.88, + "grad_norm": 25.526838452592482, + "learning_rate": 8.310192887208579e-08, + "loss": 0.4333, + "step": 18441 + }, + { + "epoch": 2.88, + "grad_norm": 20.345680104239268, + "learning_rate": 8.288511135706611e-08, + "loss": 0.4141, + "step": 18442 + }, + { + "epoch": 2.88, + "grad_norm": 23.12467925970497, + "learning_rate": 8.266857587899357e-08, + "loss": 0.4855, + "step": 18443 + }, + { + "epoch": 2.88, + "grad_norm": 23.38043819880168, + "learning_rate": 8.245232244402435e-08, + "loss": 0.4931, + "step": 18444 + }, + { + "epoch": 2.88, + "grad_norm": 26.510071820912223, + "learning_rate": 8.223635105831018e-08, + "loss": 0.4557, + "step": 18445 + }, + { + "epoch": 2.88, + "grad_norm": 30.774422568064423, + "learning_rate": 8.202066172799172e-08, + "loss": 0.4404, + "step": 18446 + }, + { + "epoch": 2.88, + "grad_norm": 20.156260721810025, + "learning_rate": 8.180525445920407e-08, + "loss": 0.4852, + "step": 18447 + }, + { + "epoch": 2.88, + "grad_norm": 21.6878259796467, + "learning_rate": 8.159012925807341e-08, + "loss": 0.3993, + "step": 18448 + }, + { + "epoch": 2.88, + "grad_norm": 39.4023906835687, + "learning_rate": 8.1375286130716e-08, + "loss": 0.4522, + "step": 18449 + }, + { + "epoch": 2.88, + "grad_norm": 24.398988439646708, + "learning_rate": 8.116072508324579e-08, + "loss": 0.3749, + "step": 18450 + }, + { + "epoch": 2.88, + "grad_norm": 35.94074062165958, + "learning_rate": 8.094644612176017e-08, + "loss": 0.4712, + "step": 18451 + }, + { + "epoch": 2.88, + "grad_norm": 24.645613179361575, + "learning_rate": 8.073244925235535e-08, + "loss": 0.4866, + "step": 18452 + }, + { + "epoch": 2.88, + "grad_norm": 24.49746228232012, + "learning_rate": 8.051873448111758e-08, + "loss": 0.4786, + "step": 18453 + }, + { + "epoch": 2.88, + "grad_norm": 19.18045609404505, + "learning_rate": 8.030530181412422e-08, + "loss": 0.4345, + "step": 18454 + }, + { + "epoch": 2.88, + "grad_norm": 16.963552300654136, + "learning_rate": 8.009215125744486e-08, + "loss": 0.4205, + "step": 18455 + }, + { + "epoch": 2.88, + "grad_norm": 23.741096115634896, + "learning_rate": 7.987928281714241e-08, + "loss": 0.4457, + "step": 18456 + }, + { + "epoch": 2.88, + "grad_norm": 17.735347494693396, + "learning_rate": 7.966669649926872e-08, + "loss": 0.4812, + "step": 18457 + }, + { + "epoch": 2.88, + "grad_norm": 20.551751869945793, + "learning_rate": 7.945439230987228e-08, + "loss": 0.3879, + "step": 18458 + }, + { + "epoch": 2.88, + "grad_norm": 15.915078012297402, + "learning_rate": 7.924237025498938e-08, + "loss": 0.3645, + "step": 18459 + }, + { + "epoch": 2.88, + "grad_norm": 26.16597690965238, + "learning_rate": 7.903063034064962e-08, + "loss": 0.4434, + "step": 18460 + }, + { + "epoch": 2.88, + "grad_norm": 22.936425450228953, + "learning_rate": 7.881917257287485e-08, + "loss": 0.4915, + "step": 18461 + }, + { + "epoch": 2.88, + "grad_norm": 19.1757377045868, + "learning_rate": 7.860799695767918e-08, + "loss": 0.4435, + "step": 18462 + }, + { + "epoch": 2.88, + "grad_norm": 21.778394975136973, + "learning_rate": 7.839710350106888e-08, + "loss": 0.4262, + "step": 18463 + }, + { + "epoch": 2.88, + "grad_norm": 15.386251898154999, + "learning_rate": 7.81864922090414e-08, + "loss": 0.3686, + "step": 18464 + }, + { + "epoch": 2.88, + "grad_norm": 16.32013498595952, + "learning_rate": 7.797616308758526e-08, + "loss": 0.4027, + "step": 18465 + }, + { + "epoch": 2.88, + "grad_norm": 16.44788166374201, + "learning_rate": 7.776611614268236e-08, + "loss": 0.4164, + "step": 18466 + }, + { + "epoch": 2.88, + "grad_norm": 18.394630355713797, + "learning_rate": 7.755635138030682e-08, + "loss": 0.4065, + "step": 18467 + }, + { + "epoch": 2.88, + "grad_norm": 37.130097902971805, + "learning_rate": 7.734686880642495e-08, + "loss": 0.548, + "step": 18468 + }, + { + "epoch": 2.88, + "grad_norm": 26.742833770222312, + "learning_rate": 7.71376684269931e-08, + "loss": 0.3763, + "step": 18469 + }, + { + "epoch": 2.89, + "grad_norm": 30.812196427333543, + "learning_rate": 7.692875024796099e-08, + "loss": 0.4334, + "step": 18470 + }, + { + "epoch": 2.89, + "grad_norm": 29.26335271271613, + "learning_rate": 7.672011427527159e-08, + "loss": 0.4692, + "step": 18471 + }, + { + "epoch": 2.89, + "grad_norm": 16.540203146146805, + "learning_rate": 7.651176051485576e-08, + "loss": 0.4594, + "step": 18472 + }, + { + "epoch": 2.89, + "grad_norm": 33.6817246831706, + "learning_rate": 7.630368897264096e-08, + "loss": 0.4307, + "step": 18473 + }, + { + "epoch": 2.89, + "grad_norm": 21.05279806573189, + "learning_rate": 7.609589965454356e-08, + "loss": 0.3759, + "step": 18474 + }, + { + "epoch": 2.89, + "grad_norm": 17.47577836004555, + "learning_rate": 7.58883925664744e-08, + "loss": 0.4349, + "step": 18475 + }, + { + "epoch": 2.89, + "grad_norm": 17.057102090107257, + "learning_rate": 7.56811677143332e-08, + "loss": 0.4606, + "step": 18476 + }, + { + "epoch": 2.89, + "grad_norm": 21.720885014053273, + "learning_rate": 7.547422510401414e-08, + "loss": 0.4472, + "step": 18477 + }, + { + "epoch": 2.89, + "grad_norm": 22.19976653725924, + "learning_rate": 7.526756474140252e-08, + "loss": 0.4428, + "step": 18478 + }, + { + "epoch": 2.89, + "grad_norm": 33.325196305159665, + "learning_rate": 7.506118663237583e-08, + "loss": 0.4944, + "step": 18479 + }, + { + "epoch": 2.89, + "grad_norm": 23.749362944541655, + "learning_rate": 7.485509078280384e-08, + "loss": 0.4245, + "step": 18480 + }, + { + "epoch": 2.89, + "grad_norm": 16.145792908361546, + "learning_rate": 7.46492771985452e-08, + "loss": 0.3999, + "step": 18481 + }, + { + "epoch": 2.89, + "grad_norm": 18.13890644840671, + "learning_rate": 7.444374588545522e-08, + "loss": 0.4817, + "step": 18482 + }, + { + "epoch": 2.89, + "grad_norm": 22.025304870086725, + "learning_rate": 7.423849684938033e-08, + "loss": 0.4045, + "step": 18483 + }, + { + "epoch": 2.89, + "grad_norm": 30.917424138972326, + "learning_rate": 7.403353009615588e-08, + "loss": 0.4678, + "step": 18484 + }, + { + "epoch": 2.89, + "grad_norm": 15.667072842114589, + "learning_rate": 7.382884563161052e-08, + "loss": 0.4348, + "step": 18485 + }, + { + "epoch": 2.89, + "grad_norm": 19.697520489006024, + "learning_rate": 7.362444346156517e-08, + "loss": 0.4321, + "step": 18486 + }, + { + "epoch": 2.89, + "grad_norm": 22.577317795299162, + "learning_rate": 7.342032359183515e-08, + "loss": 0.4574, + "step": 18487 + }, + { + "epoch": 2.89, + "grad_norm": 25.125105402141795, + "learning_rate": 7.321648602822473e-08, + "loss": 0.4092, + "step": 18488 + }, + { + "epoch": 2.89, + "grad_norm": 26.371183509091896, + "learning_rate": 7.301293077653038e-08, + "loss": 0.3988, + "step": 18489 + }, + { + "epoch": 2.89, + "grad_norm": 19.46116850182559, + "learning_rate": 7.280965784253968e-08, + "loss": 0.4658, + "step": 18490 + }, + { + "epoch": 2.89, + "grad_norm": 22.9599841667247, + "learning_rate": 7.26066672320358e-08, + "loss": 0.4495, + "step": 18491 + }, + { + "epoch": 2.89, + "grad_norm": 19.241037532658318, + "learning_rate": 7.240395895079189e-08, + "loss": 0.4071, + "step": 18492 + }, + { + "epoch": 2.89, + "grad_norm": 17.04750890978004, + "learning_rate": 7.22015330045711e-08, + "loss": 0.38, + "step": 18493 + }, + { + "epoch": 2.89, + "grad_norm": 15.986960141430107, + "learning_rate": 7.199938939913109e-08, + "loss": 0.4363, + "step": 18494 + }, + { + "epoch": 2.89, + "grad_norm": 14.181144282223094, + "learning_rate": 7.179752814022056e-08, + "loss": 0.399, + "step": 18495 + }, + { + "epoch": 2.89, + "grad_norm": 15.774148673717654, + "learning_rate": 7.159594923358159e-08, + "loss": 0.3851, + "step": 18496 + }, + { + "epoch": 2.89, + "grad_norm": 23.196637559690757, + "learning_rate": 7.139465268494405e-08, + "loss": 0.4522, + "step": 18497 + }, + { + "epoch": 2.89, + "grad_norm": 22.357463917609813, + "learning_rate": 7.119363850003558e-08, + "loss": 0.4343, + "step": 18498 + }, + { + "epoch": 2.89, + "grad_norm": 18.695675280088306, + "learning_rate": 7.09929066845727e-08, + "loss": 0.3993, + "step": 18499 + }, + { + "epoch": 2.89, + "grad_norm": 17.779029934285226, + "learning_rate": 7.079245724426198e-08, + "loss": 0.4216, + "step": 18500 + }, + { + "epoch": 2.89, + "grad_norm": 23.167377265631004, + "learning_rate": 7.059229018480551e-08, + "loss": 0.4308, + "step": 18501 + }, + { + "epoch": 2.89, + "grad_norm": 30.58525938254278, + "learning_rate": 7.039240551189653e-08, + "loss": 0.4907, + "step": 18502 + }, + { + "epoch": 2.89, + "grad_norm": 31.406738420740492, + "learning_rate": 7.019280323121714e-08, + "loss": 0.484, + "step": 18503 + }, + { + "epoch": 2.89, + "grad_norm": 24.438240513838203, + "learning_rate": 6.999348334844724e-08, + "loss": 0.4921, + "step": 18504 + }, + { + "epoch": 2.89, + "grad_norm": 24.756662620740393, + "learning_rate": 6.979444586925455e-08, + "loss": 0.4635, + "step": 18505 + }, + { + "epoch": 2.89, + "grad_norm": 18.424191867938323, + "learning_rate": 6.959569079929674e-08, + "loss": 0.45, + "step": 18506 + }, + { + "epoch": 2.89, + "grad_norm": 18.418451151997708, + "learning_rate": 6.939721814422928e-08, + "loss": 0.3599, + "step": 18507 + }, + { + "epoch": 2.89, + "grad_norm": 26.611592926667754, + "learning_rate": 6.919902790969657e-08, + "loss": 0.5605, + "step": 18508 + }, + { + "epoch": 2.89, + "grad_norm": 27.175031345401116, + "learning_rate": 6.900112010133297e-08, + "loss": 0.4164, + "step": 18509 + }, + { + "epoch": 2.89, + "grad_norm": 23.911385013371248, + "learning_rate": 6.880349472476844e-08, + "loss": 0.409, + "step": 18510 + }, + { + "epoch": 2.89, + "grad_norm": 25.36275611826147, + "learning_rate": 6.86061517856229e-08, + "loss": 0.4153, + "step": 18511 + }, + { + "epoch": 2.89, + "grad_norm": 24.641146904686117, + "learning_rate": 6.840909128950857e-08, + "loss": 0.4182, + "step": 18512 + }, + { + "epoch": 2.89, + "grad_norm": 23.634173210946592, + "learning_rate": 6.821231324203093e-08, + "loss": 0.4154, + "step": 18513 + }, + { + "epoch": 2.89, + "grad_norm": 18.822013163736386, + "learning_rate": 6.801581764878329e-08, + "loss": 0.4476, + "step": 18514 + }, + { + "epoch": 2.89, + "grad_norm": 15.616144023306047, + "learning_rate": 6.781960451535674e-08, + "loss": 0.3498, + "step": 18515 + }, + { + "epoch": 2.89, + "grad_norm": 19.768925716139016, + "learning_rate": 6.762367384733015e-08, + "loss": 0.43, + "step": 18516 + }, + { + "epoch": 2.89, + "grad_norm": 18.002053399482897, + "learning_rate": 6.742802565027684e-08, + "loss": 0.4196, + "step": 18517 + }, + { + "epoch": 2.89, + "grad_norm": 27.32091563766875, + "learning_rate": 6.723265992975903e-08, + "loss": 0.4213, + "step": 18518 + }, + { + "epoch": 2.89, + "grad_norm": 25.195549789612453, + "learning_rate": 6.703757669133448e-08, + "loss": 0.4309, + "step": 18519 + }, + { + "epoch": 2.89, + "grad_norm": 16.58093273626427, + "learning_rate": 6.6842775940551e-08, + "loss": 0.4127, + "step": 18520 + }, + { + "epoch": 2.89, + "grad_norm": 25.544681947236025, + "learning_rate": 6.664825768294747e-08, + "loss": 0.4339, + "step": 18521 + }, + { + "epoch": 2.89, + "grad_norm": 21.074768979841554, + "learning_rate": 6.645402192405726e-08, + "loss": 0.4174, + "step": 18522 + }, + { + "epoch": 2.89, + "grad_norm": 30.15982142829394, + "learning_rate": 6.626006866940371e-08, + "loss": 0.4876, + "step": 18523 + }, + { + "epoch": 2.89, + "grad_norm": 22.21421075210531, + "learning_rate": 6.606639792450131e-08, + "loss": 0.427, + "step": 18524 + }, + { + "epoch": 2.89, + "grad_norm": 19.43151432509714, + "learning_rate": 6.587300969486122e-08, + "loss": 0.4401, + "step": 18525 + }, + { + "epoch": 2.89, + "grad_norm": 23.738934036978918, + "learning_rate": 6.567990398598012e-08, + "loss": 0.4326, + "step": 18526 + }, + { + "epoch": 2.89, + "grad_norm": 28.927823425160025, + "learning_rate": 6.548708080335143e-08, + "loss": 0.415, + "step": 18527 + }, + { + "epoch": 2.89, + "grad_norm": 23.358991507504868, + "learning_rate": 6.529454015245962e-08, + "loss": 0.4195, + "step": 18528 + }, + { + "epoch": 2.89, + "grad_norm": 26.89922264475566, + "learning_rate": 6.510228203877811e-08, + "loss": 0.4366, + "step": 18529 + }, + { + "epoch": 2.89, + "grad_norm": 29.8925049106383, + "learning_rate": 6.491030646777696e-08, + "loss": 0.4165, + "step": 18530 + }, + { + "epoch": 2.89, + "grad_norm": 18.89408575877051, + "learning_rate": 6.471861344491514e-08, + "loss": 0.4147, + "step": 18531 + }, + { + "epoch": 2.89, + "grad_norm": 14.746844744221828, + "learning_rate": 6.452720297564275e-08, + "loss": 0.4141, + "step": 18532 + }, + { + "epoch": 2.89, + "grad_norm": 17.393911793127998, + "learning_rate": 6.43360750654054e-08, + "loss": 0.4115, + "step": 18533 + }, + { + "epoch": 2.9, + "grad_norm": 24.086794263540646, + "learning_rate": 6.414522971963766e-08, + "loss": 0.4074, + "step": 18534 + }, + { + "epoch": 2.9, + "grad_norm": 33.061442889204315, + "learning_rate": 6.395466694376739e-08, + "loss": 0.417, + "step": 18535 + }, + { + "epoch": 2.9, + "grad_norm": 24.032101190098853, + "learning_rate": 6.376438674321361e-08, + "loss": 0.4324, + "step": 18536 + }, + { + "epoch": 2.9, + "grad_norm": 18.81695936901427, + "learning_rate": 6.357438912338864e-08, + "loss": 0.4226, + "step": 18537 + }, + { + "epoch": 2.9, + "grad_norm": 28.27477320593617, + "learning_rate": 6.338467408969484e-08, + "loss": 0.4412, + "step": 18538 + }, + { + "epoch": 2.9, + "grad_norm": 23.402800398745548, + "learning_rate": 6.319524164752899e-08, + "loss": 0.4642, + "step": 18539 + }, + { + "epoch": 2.9, + "grad_norm": 27.7534454951741, + "learning_rate": 6.300609180227679e-08, + "loss": 0.4372, + "step": 18540 + }, + { + "epoch": 2.9, + "grad_norm": 22.486044208739152, + "learning_rate": 6.281722455931839e-08, + "loss": 0.3834, + "step": 18541 + }, + { + "epoch": 2.9, + "grad_norm": 18.74008368530438, + "learning_rate": 6.262863992402612e-08, + "loss": 0.4021, + "step": 18542 + }, + { + "epoch": 2.9, + "grad_norm": 27.894347682381458, + "learning_rate": 6.24403379017613e-08, + "loss": 0.4531, + "step": 18543 + }, + { + "epoch": 2.9, + "grad_norm": 30.498933595634526, + "learning_rate": 6.225231849787961e-08, + "loss": 0.4429, + "step": 18544 + }, + { + "epoch": 2.9, + "grad_norm": 24.38677491513527, + "learning_rate": 6.2064581717729e-08, + "loss": 0.4493, + "step": 18545 + }, + { + "epoch": 2.9, + "grad_norm": 32.29721347417395, + "learning_rate": 6.187712756664854e-08, + "loss": 0.4592, + "step": 18546 + }, + { + "epoch": 2.9, + "grad_norm": 36.42578139317899, + "learning_rate": 6.168995604996841e-08, + "loss": 0.4573, + "step": 18547 + }, + { + "epoch": 2.9, + "grad_norm": 17.88275092978021, + "learning_rate": 6.150306717301213e-08, + "loss": 0.4129, + "step": 18548 + }, + { + "epoch": 2.9, + "grad_norm": 26.442839142550525, + "learning_rate": 6.131646094109545e-08, + "loss": 0.4592, + "step": 18549 + }, + { + "epoch": 2.9, + "grad_norm": 23.199971906040194, + "learning_rate": 6.1130137359523e-08, + "loss": 0.4071, + "step": 18550 + }, + { + "epoch": 2.9, + "grad_norm": 41.83557046894883, + "learning_rate": 6.094409643359722e-08, + "loss": 0.5329, + "step": 18551 + }, + { + "epoch": 2.9, + "grad_norm": 18.64986434035781, + "learning_rate": 6.07583381686061e-08, + "loss": 0.4239, + "step": 18552 + }, + { + "epoch": 2.9, + "grad_norm": 28.978336676913905, + "learning_rate": 6.05728625698343e-08, + "loss": 0.4062, + "step": 18553 + }, + { + "epoch": 2.9, + "grad_norm": 20.85896528963201, + "learning_rate": 6.038766964255538e-08, + "loss": 0.3951, + "step": 18554 + }, + { + "epoch": 2.9, + "grad_norm": 17.77472125437093, + "learning_rate": 6.020275939203734e-08, + "loss": 0.4777, + "step": 18555 + }, + { + "epoch": 2.9, + "grad_norm": 31.66742725057573, + "learning_rate": 6.00181318235371e-08, + "loss": 0.4444, + "step": 18556 + }, + { + "epoch": 2.9, + "grad_norm": 29.886319099080577, + "learning_rate": 5.983378694230713e-08, + "loss": 0.4193, + "step": 18557 + }, + { + "epoch": 2.9, + "grad_norm": 17.01866139274335, + "learning_rate": 5.964972475358876e-08, + "loss": 0.4237, + "step": 18558 + }, + { + "epoch": 2.9, + "grad_norm": 17.787489964367737, + "learning_rate": 5.946594526261895e-08, + "loss": 0.4168, + "step": 18559 + }, + { + "epoch": 2.9, + "grad_norm": 23.870874983506553, + "learning_rate": 5.928244847462128e-08, + "loss": 0.4386, + "step": 18560 + }, + { + "epoch": 2.9, + "grad_norm": 24.5904435391408, + "learning_rate": 5.909923439481491e-08, + "loss": 0.4326, + "step": 18561 + }, + { + "epoch": 2.9, + "grad_norm": 22.30948369156938, + "learning_rate": 5.8916303028412334e-08, + "loss": 0.4061, + "step": 18562 + }, + { + "epoch": 2.9, + "grad_norm": 12.587707451907432, + "learning_rate": 5.873365438061385e-08, + "loss": 0.3877, + "step": 18563 + }, + { + "epoch": 2.9, + "grad_norm": 20.80746495569008, + "learning_rate": 5.8551288456616395e-08, + "loss": 0.3906, + "step": 18564 + }, + { + "epoch": 2.9, + "grad_norm": 25.238816500492025, + "learning_rate": 5.83692052616025e-08, + "loss": 0.4238, + "step": 18565 + }, + { + "epoch": 2.9, + "grad_norm": 17.389540932413485, + "learning_rate": 5.8187404800753575e-08, + "loss": 0.4388, + "step": 18566 + }, + { + "epoch": 2.9, + "grad_norm": 16.91401453385809, + "learning_rate": 5.800588707923993e-08, + "loss": 0.3984, + "step": 18567 + }, + { + "epoch": 2.9, + "grad_norm": 28.492520582732723, + "learning_rate": 5.782465210222299e-08, + "loss": 0.4057, + "step": 18568 + }, + { + "epoch": 2.9, + "grad_norm": 24.379133578247433, + "learning_rate": 5.7643699874855296e-08, + "loss": 0.4185, + "step": 18569 + }, + { + "epoch": 2.9, + "grad_norm": 22.013488443175785, + "learning_rate": 5.746303040228607e-08, + "loss": 0.4521, + "step": 18570 + }, + { + "epoch": 2.9, + "grad_norm": 22.820375565830574, + "learning_rate": 5.7282643689652306e-08, + "loss": 0.4176, + "step": 18571 + }, + { + "epoch": 2.9, + "grad_norm": 28.053879731802162, + "learning_rate": 5.710253974208324e-08, + "loss": 0.4198, + "step": 18572 + }, + { + "epoch": 2.9, + "grad_norm": 18.947676484006685, + "learning_rate": 5.692271856470144e-08, + "loss": 0.4331, + "step": 18573 + }, + { + "epoch": 2.9, + "grad_norm": 19.998587581561058, + "learning_rate": 5.674318016262059e-08, + "loss": 0.4298, + "step": 18574 + }, + { + "epoch": 2.9, + "grad_norm": 22.38055579174918, + "learning_rate": 5.656392454094661e-08, + "loss": 0.4834, + "step": 18575 + }, + { + "epoch": 2.9, + "grad_norm": 17.908862691443375, + "learning_rate": 5.638495170477876e-08, + "loss": 0.4222, + "step": 18576 + }, + { + "epoch": 2.9, + "grad_norm": 25.870998559218247, + "learning_rate": 5.6206261659206284e-08, + "loss": 0.4519, + "step": 18577 + }, + { + "epoch": 2.9, + "grad_norm": 19.67904293092041, + "learning_rate": 5.602785440931069e-08, + "loss": 0.4067, + "step": 18578 + }, + { + "epoch": 2.9, + "grad_norm": 29.013571404097277, + "learning_rate": 5.584972996016569e-08, + "loss": 0.4515, + "step": 18579 + }, + { + "epoch": 2.9, + "grad_norm": 34.651736640621614, + "learning_rate": 5.567188831683723e-08, + "loss": 0.5154, + "step": 18580 + }, + { + "epoch": 2.9, + "grad_norm": 22.13680798425253, + "learning_rate": 5.549432948438238e-08, + "loss": 0.4097, + "step": 18581 + }, + { + "epoch": 2.9, + "grad_norm": 16.473877677655718, + "learning_rate": 5.531705346785265e-08, + "loss": 0.3921, + "step": 18582 + }, + { + "epoch": 2.9, + "grad_norm": 22.161040284591447, + "learning_rate": 5.514006027228735e-08, + "loss": 0.4131, + "step": 18583 + }, + { + "epoch": 2.9, + "grad_norm": 14.229827713016393, + "learning_rate": 5.4963349902722454e-08, + "loss": 0.3707, + "step": 18584 + }, + { + "epoch": 2.9, + "grad_norm": 20.378601134818286, + "learning_rate": 5.478692236418059e-08, + "loss": 0.4258, + "step": 18585 + }, + { + "epoch": 2.9, + "grad_norm": 18.933541126465354, + "learning_rate": 5.4610777661681105e-08, + "loss": 0.4573, + "step": 18586 + }, + { + "epoch": 2.9, + "grad_norm": 21.56154401014442, + "learning_rate": 5.443491580023441e-08, + "loss": 0.4066, + "step": 18587 + }, + { + "epoch": 2.9, + "grad_norm": 17.57596237400272, + "learning_rate": 5.4259336784839855e-08, + "loss": 0.4055, + "step": 18588 + }, + { + "epoch": 2.9, + "grad_norm": 26.93826397190838, + "learning_rate": 5.408404062049233e-08, + "loss": 0.3974, + "step": 18589 + }, + { + "epoch": 2.9, + "grad_norm": 17.51858298798443, + "learning_rate": 5.390902731217562e-08, + "loss": 0.3898, + "step": 18590 + }, + { + "epoch": 2.9, + "grad_norm": 17.660792571553728, + "learning_rate": 5.373429686486797e-08, + "loss": 0.4059, + "step": 18591 + }, + { + "epoch": 2.9, + "grad_norm": 25.495204372916653, + "learning_rate": 5.355984928353986e-08, + "loss": 0.468, + "step": 18592 + }, + { + "epoch": 2.9, + "grad_norm": 25.989328184130365, + "learning_rate": 5.338568457314952e-08, + "loss": 0.4397, + "step": 18593 + }, + { + "epoch": 2.9, + "grad_norm": 20.465690008419305, + "learning_rate": 5.321180273865301e-08, + "loss": 0.4257, + "step": 18594 + }, + { + "epoch": 2.9, + "grad_norm": 20.193174553750463, + "learning_rate": 5.303820378499303e-08, + "loss": 0.4375, + "step": 18595 + }, + { + "epoch": 2.9, + "grad_norm": 26.67642790736344, + "learning_rate": 5.286488771710785e-08, + "loss": 0.4524, + "step": 18596 + }, + { + "epoch": 2.9, + "grad_norm": 13.756035368829533, + "learning_rate": 5.2691854539926865e-08, + "loss": 0.4082, + "step": 18597 + }, + { + "epoch": 2.91, + "grad_norm": 21.197258258517074, + "learning_rate": 5.251910425837059e-08, + "loss": 0.4361, + "step": 18598 + }, + { + "epoch": 2.91, + "grad_norm": 14.33317766935585, + "learning_rate": 5.234663687735064e-08, + "loss": 0.4043, + "step": 18599 + }, + { + "epoch": 2.91, + "grad_norm": 26.62280763299338, + "learning_rate": 5.217445240177421e-08, + "loss": 0.5052, + "step": 18600 + }, + { + "epoch": 2.91, + "grad_norm": 17.138903199289413, + "learning_rate": 5.200255083653738e-08, + "loss": 0.4137, + "step": 18601 + }, + { + "epoch": 2.91, + "grad_norm": 22.868304257937098, + "learning_rate": 5.183093218652846e-08, + "loss": 0.3824, + "step": 18602 + }, + { + "epoch": 2.91, + "grad_norm": 24.447223790989273, + "learning_rate": 5.1659596456627995e-08, + "loss": 0.4383, + "step": 18603 + }, + { + "epoch": 2.91, + "grad_norm": 21.43291422741993, + "learning_rate": 5.1488543651708746e-08, + "loss": 0.4885, + "step": 18604 + }, + { + "epoch": 2.91, + "grad_norm": 21.090242826865616, + "learning_rate": 5.1317773776635715e-08, + "loss": 0.4, + "step": 18605 + }, + { + "epoch": 2.91, + "grad_norm": 22.938900327943976, + "learning_rate": 5.114728683626613e-08, + "loss": 0.4606, + "step": 18606 + }, + { + "epoch": 2.91, + "grad_norm": 23.022326562030177, + "learning_rate": 5.0977082835447224e-08, + "loss": 0.446, + "step": 18607 + }, + { + "epoch": 2.91, + "grad_norm": 15.911164662251947, + "learning_rate": 5.0807161779019565e-08, + "loss": 0.3709, + "step": 18608 + }, + { + "epoch": 2.91, + "grad_norm": 20.155628051329376, + "learning_rate": 5.0637523671817065e-08, + "loss": 0.4125, + "step": 18609 + }, + { + "epoch": 2.91, + "grad_norm": 16.328969247337927, + "learning_rate": 5.046816851866254e-08, + "loss": 0.3728, + "step": 18610 + }, + { + "epoch": 2.91, + "grad_norm": 28.27759517110512, + "learning_rate": 5.029909632437324e-08, + "loss": 0.4255, + "step": 18611 + }, + { + "epoch": 2.91, + "grad_norm": 23.982083425487772, + "learning_rate": 5.013030709375644e-08, + "loss": 0.4397, + "step": 18612 + }, + { + "epoch": 2.91, + "grad_norm": 20.643691713699532, + "learning_rate": 4.996180083161384e-08, + "loss": 0.469, + "step": 18613 + }, + { + "epoch": 2.91, + "grad_norm": 20.25380596551742, + "learning_rate": 4.979357754273717e-08, + "loss": 0.3876, + "step": 18614 + }, + { + "epoch": 2.91, + "grad_norm": 24.61539945340483, + "learning_rate": 4.9625637231910385e-08, + "loss": 0.4598, + "step": 18615 + }, + { + "epoch": 2.91, + "grad_norm": 28.110118856654594, + "learning_rate": 4.9457979903909656e-08, + "loss": 0.4055, + "step": 18616 + }, + { + "epoch": 2.91, + "grad_norm": 24.847831118561604, + "learning_rate": 4.92906055635034e-08, + "loss": 0.4431, + "step": 18617 + }, + { + "epoch": 2.91, + "grad_norm": 17.283652785266383, + "learning_rate": 4.912351421545114e-08, + "loss": 0.46, + "step": 18618 + }, + { + "epoch": 2.91, + "grad_norm": 24.557359963271853, + "learning_rate": 4.8956705864504625e-08, + "loss": 0.4171, + "step": 18619 + }, + { + "epoch": 2.91, + "grad_norm": 19.77325504479544, + "learning_rate": 4.879018051540785e-08, + "loss": 0.4968, + "step": 18620 + }, + { + "epoch": 2.91, + "grad_norm": 26.937921242507, + "learning_rate": 4.8623938172898125e-08, + "loss": 0.4832, + "step": 18621 + }, + { + "epoch": 2.91, + "grad_norm": 26.63163128508053, + "learning_rate": 4.8457978841702777e-08, + "loss": 0.4852, + "step": 18622 + }, + { + "epoch": 2.91, + "grad_norm": 25.216370902881867, + "learning_rate": 4.8292302526539156e-08, + "loss": 0.4614, + "step": 18623 + }, + { + "epoch": 2.91, + "grad_norm": 16.058903222800982, + "learning_rate": 4.8126909232121265e-08, + "loss": 0.4089, + "step": 18624 + }, + { + "epoch": 2.91, + "grad_norm": 28.55656914493002, + "learning_rate": 4.7961798963153116e-08, + "loss": 0.3658, + "step": 18625 + }, + { + "epoch": 2.91, + "grad_norm": 19.335173828765612, + "learning_rate": 4.7796971724329844e-08, + "loss": 0.4176, + "step": 18626 + }, + { + "epoch": 2.91, + "grad_norm": 29.92343023157761, + "learning_rate": 4.763242752033881e-08, + "loss": 0.4596, + "step": 18627 + }, + { + "epoch": 2.91, + "grad_norm": 26.754862113961618, + "learning_rate": 4.746816635585849e-08, + "loss": 0.4642, + "step": 18628 + }, + { + "epoch": 2.91, + "grad_norm": 21.106521012563128, + "learning_rate": 4.730418823556182e-08, + "loss": 0.4528, + "step": 18629 + }, + { + "epoch": 2.91, + "grad_norm": 22.291964721395384, + "learning_rate": 4.7140493164112844e-08, + "loss": 0.3749, + "step": 18630 + }, + { + "epoch": 2.91, + "grad_norm": 23.228612524459315, + "learning_rate": 4.6977081146165614e-08, + "loss": 0.3591, + "step": 18631 + }, + { + "epoch": 2.91, + "grad_norm": 18.39592857343386, + "learning_rate": 4.6813952186366415e-08, + "loss": 0.4128, + "step": 18632 + }, + { + "epoch": 2.91, + "grad_norm": 26.597083295235763, + "learning_rate": 4.6651106289357095e-08, + "loss": 0.4548, + "step": 18633 + }, + { + "epoch": 2.91, + "grad_norm": 17.806309333640822, + "learning_rate": 4.648854345976839e-08, + "loss": 0.3841, + "step": 18634 + }, + { + "epoch": 2.91, + "grad_norm": 15.220720784791276, + "learning_rate": 4.632626370222215e-08, + "loss": 0.4147, + "step": 18635 + }, + { + "epoch": 2.91, + "grad_norm": 25.83988797709514, + "learning_rate": 4.6164267021334693e-08, + "loss": 0.4305, + "step": 18636 + }, + { + "epoch": 2.91, + "grad_norm": 22.425065938667647, + "learning_rate": 4.6002553421711226e-08, + "loss": 0.3922, + "step": 18637 + }, + { + "epoch": 2.91, + "grad_norm": 30.214374762004176, + "learning_rate": 4.5841122907953614e-08, + "loss": 0.4502, + "step": 18638 + }, + { + "epoch": 2.91, + "grad_norm": 22.165399191736235, + "learning_rate": 4.567997548465153e-08, + "loss": 0.4719, + "step": 18639 + }, + { + "epoch": 2.91, + "grad_norm": 19.626846342036412, + "learning_rate": 4.551911115638685e-08, + "loss": 0.3702, + "step": 18640 + }, + { + "epoch": 2.91, + "grad_norm": 18.0712803203373, + "learning_rate": 4.5358529927735925e-08, + "loss": 0.4592, + "step": 18641 + }, + { + "epoch": 2.91, + "grad_norm": 29.73346874401523, + "learning_rate": 4.5198231803265103e-08, + "loss": 0.3759, + "step": 18642 + }, + { + "epoch": 2.91, + "grad_norm": 26.92594201247352, + "learning_rate": 4.503821678753406e-08, + "loss": 0.4157, + "step": 18643 + }, + { + "epoch": 2.91, + "grad_norm": 18.94960621579408, + "learning_rate": 4.487848488509139e-08, + "loss": 0.3873, + "step": 18644 + }, + { + "epoch": 2.91, + "grad_norm": 27.034417033027776, + "learning_rate": 4.4719036100481225e-08, + "loss": 0.4707, + "step": 18645 + }, + { + "epoch": 2.91, + "grad_norm": 20.6283469836735, + "learning_rate": 4.455987043823884e-08, + "loss": 0.4497, + "step": 18646 + }, + { + "epoch": 2.91, + "grad_norm": 17.516282058220437, + "learning_rate": 4.440098790288949e-08, + "loss": 0.4272, + "step": 18647 + }, + { + "epoch": 2.91, + "grad_norm": 42.846011660680006, + "learning_rate": 4.4242388498951797e-08, + "loss": 0.4764, + "step": 18648 + }, + { + "epoch": 2.91, + "grad_norm": 18.041086199032765, + "learning_rate": 4.408407223093658e-08, + "loss": 0.4858, + "step": 18649 + }, + { + "epoch": 2.91, + "grad_norm": 27.69422095747818, + "learning_rate": 4.3926039103346915e-08, + "loss": 0.4583, + "step": 18650 + }, + { + "epoch": 2.91, + "grad_norm": 26.84931033681021, + "learning_rate": 4.376828912067699e-08, + "loss": 0.4087, + "step": 18651 + }, + { + "epoch": 2.91, + "grad_norm": 26.845552992305535, + "learning_rate": 4.361082228741209e-08, + "loss": 0.4178, + "step": 18652 + }, + { + "epoch": 2.91, + "grad_norm": 25.577283857073823, + "learning_rate": 4.3453638608030865e-08, + "loss": 0.481, + "step": 18653 + }, + { + "epoch": 2.91, + "grad_norm": 25.728272857018755, + "learning_rate": 4.329673808700308e-08, + "loss": 0.5032, + "step": 18654 + }, + { + "epoch": 2.91, + "grad_norm": 34.84744185745317, + "learning_rate": 4.3140120728792925e-08, + "loss": 0.4678, + "step": 18655 + }, + { + "epoch": 2.91, + "grad_norm": 25.495810458498816, + "learning_rate": 4.29837865378524e-08, + "loss": 0.4702, + "step": 18656 + }, + { + "epoch": 2.91, + "grad_norm": 18.704265429051265, + "learning_rate": 4.2827735518629065e-08, + "loss": 0.3311, + "step": 18657 + }, + { + "epoch": 2.91, + "grad_norm": 14.398259129198157, + "learning_rate": 4.2671967675559365e-08, + "loss": 0.3712, + "step": 18658 + }, + { + "epoch": 2.91, + "grad_norm": 26.14479882031989, + "learning_rate": 4.2516483013074205e-08, + "loss": 0.4725, + "step": 18659 + }, + { + "epoch": 2.91, + "grad_norm": 17.658998022762546, + "learning_rate": 4.23612815355956e-08, + "loss": 0.4169, + "step": 18660 + }, + { + "epoch": 2.91, + "grad_norm": 39.56902023118311, + "learning_rate": 4.2206363247536684e-08, + "loss": 0.3783, + "step": 18661 + }, + { + "epoch": 2.92, + "grad_norm": 15.908342868291848, + "learning_rate": 4.205172815330394e-08, + "loss": 0.3516, + "step": 18662 + }, + { + "epoch": 2.92, + "grad_norm": 18.428531260360174, + "learning_rate": 4.189737625729384e-08, + "loss": 0.411, + "step": 18663 + }, + { + "epoch": 2.92, + "grad_norm": 43.42183158051474, + "learning_rate": 4.174330756389844e-08, + "loss": 0.5289, + "step": 18664 + }, + { + "epoch": 2.92, + "grad_norm": 20.131858629523386, + "learning_rate": 4.1589522077497556e-08, + "loss": 0.3714, + "step": 18665 + }, + { + "epoch": 2.92, + "grad_norm": 32.079054858466264, + "learning_rate": 4.143601980246437e-08, + "loss": 0.4846, + "step": 18666 + }, + { + "epoch": 2.92, + "grad_norm": 25.2285427377757, + "learning_rate": 4.128280074316649e-08, + "loss": 0.4167, + "step": 18667 + }, + { + "epoch": 2.92, + "grad_norm": 16.831271742485498, + "learning_rate": 4.112986490395931e-08, + "loss": 0.3816, + "step": 18668 + }, + { + "epoch": 2.92, + "grad_norm": 26.932611297880097, + "learning_rate": 4.0977212289192717e-08, + "loss": 0.4682, + "step": 18669 + }, + { + "epoch": 2.92, + "grad_norm": 18.87775375318171, + "learning_rate": 4.082484290320876e-08, + "loss": 0.4161, + "step": 18670 + }, + { + "epoch": 2.92, + "grad_norm": 32.37367054334205, + "learning_rate": 4.067275675034066e-08, + "loss": 0.5205, + "step": 18671 + }, + { + "epoch": 2.92, + "grad_norm": 22.177344558226117, + "learning_rate": 4.0520953834912724e-08, + "loss": 0.435, + "step": 18672 + }, + { + "epoch": 2.92, + "grad_norm": 20.2196649354697, + "learning_rate": 4.0369434161242616e-08, + "loss": 0.4382, + "step": 18673 + }, + { + "epoch": 2.92, + "grad_norm": 20.14482200298816, + "learning_rate": 4.021819773364022e-08, + "loss": 0.4702, + "step": 18674 + }, + { + "epoch": 2.92, + "grad_norm": 23.06524709813954, + "learning_rate": 4.0067244556405424e-08, + "loss": 0.4338, + "step": 18675 + }, + { + "epoch": 2.92, + "grad_norm": 16.27565151927008, + "learning_rate": 3.9916574633832586e-08, + "loss": 0.4441, + "step": 18676 + }, + { + "epoch": 2.92, + "grad_norm": 26.532480593809655, + "learning_rate": 3.976618797020493e-08, + "loss": 0.4556, + "step": 18677 + }, + { + "epoch": 2.92, + "grad_norm": 18.46870527326587, + "learning_rate": 3.961608456980126e-08, + "loss": 0.4861, + "step": 18678 + }, + { + "epoch": 2.92, + "grad_norm": 19.92178228169725, + "learning_rate": 3.946626443688817e-08, + "loss": 0.4586, + "step": 18679 + }, + { + "epoch": 2.92, + "grad_norm": 32.09177854234582, + "learning_rate": 3.9316727575728906e-08, + "loss": 0.4041, + "step": 18680 + }, + { + "epoch": 2.92, + "grad_norm": 31.647061054597792, + "learning_rate": 3.9167473990575635e-08, + "loss": 0.5062, + "step": 18681 + }, + { + "epoch": 2.92, + "grad_norm": 20.565054413074023, + "learning_rate": 3.901850368567161e-08, + "loss": 0.4193, + "step": 18682 + }, + { + "epoch": 2.92, + "grad_norm": 20.209451963656534, + "learning_rate": 3.886981666525347e-08, + "loss": 0.381, + "step": 18683 + }, + { + "epoch": 2.92, + "grad_norm": 23.65321754964633, + "learning_rate": 3.8721412933552246e-08, + "loss": 0.4466, + "step": 18684 + }, + { + "epoch": 2.92, + "grad_norm": 22.789543695374963, + "learning_rate": 3.85732924947857e-08, + "loss": 0.4341, + "step": 18685 + }, + { + "epoch": 2.92, + "grad_norm": 20.63322895789619, + "learning_rate": 3.8425455353168215e-08, + "loss": 0.4401, + "step": 18686 + }, + { + "epoch": 2.92, + "grad_norm": 24.262060936557013, + "learning_rate": 3.827790151290312e-08, + "loss": 0.4497, + "step": 18687 + }, + { + "epoch": 2.92, + "grad_norm": 18.735849732075785, + "learning_rate": 3.813063097818703e-08, + "loss": 0.4099, + "step": 18688 + }, + { + "epoch": 2.92, + "grad_norm": 21.165894683579662, + "learning_rate": 3.798364375320773e-08, + "loss": 0.4682, + "step": 18689 + }, + { + "epoch": 2.92, + "grad_norm": 21.28106580562114, + "learning_rate": 3.783693984214743e-08, + "loss": 0.386, + "step": 18690 + }, + { + "epoch": 2.92, + "grad_norm": 20.474656576320363, + "learning_rate": 3.7690519249174996e-08, + "loss": 0.4668, + "step": 18691 + }, + { + "epoch": 2.92, + "grad_norm": 28.814855488587384, + "learning_rate": 3.754438197845822e-08, + "loss": 0.4856, + "step": 18692 + }, + { + "epoch": 2.92, + "grad_norm": 30.833775246627976, + "learning_rate": 3.739852803415045e-08, + "loss": 0.456, + "step": 18693 + }, + { + "epoch": 2.92, + "grad_norm": 18.52357529838724, + "learning_rate": 3.725295742040058e-08, + "loss": 0.4246, + "step": 18694 + }, + { + "epoch": 2.92, + "grad_norm": 22.3044660233226, + "learning_rate": 3.710767014134864e-08, + "loss": 0.4162, + "step": 18695 + }, + { + "epoch": 2.92, + "grad_norm": 18.671897384199188, + "learning_rate": 3.696266620112576e-08, + "loss": 0.3841, + "step": 18696 + }, + { + "epoch": 2.92, + "grad_norm": 21.70616058229042, + "learning_rate": 3.681794560385754e-08, + "loss": 0.4908, + "step": 18697 + }, + { + "epoch": 2.92, + "grad_norm": 18.202102105211527, + "learning_rate": 3.667350835365846e-08, + "loss": 0.4185, + "step": 18698 + }, + { + "epoch": 2.92, + "grad_norm": 23.94815043423386, + "learning_rate": 3.6529354454635236e-08, + "loss": 0.4304, + "step": 18699 + }, + { + "epoch": 2.92, + "grad_norm": 31.318605188511896, + "learning_rate": 3.638548391089014e-08, + "loss": 0.3747, + "step": 18700 + }, + { + "epoch": 2.92, + "grad_norm": 16.999345002458742, + "learning_rate": 3.6241896726513236e-08, + "loss": 0.4546, + "step": 18701 + }, + { + "epoch": 2.92, + "grad_norm": 28.485159415881544, + "learning_rate": 3.6098592905587925e-08, + "loss": 0.4317, + "step": 18702 + }, + { + "epoch": 2.92, + "grad_norm": 20.923521386622834, + "learning_rate": 3.595557245218983e-08, + "loss": 0.3923, + "step": 18703 + }, + { + "epoch": 2.92, + "grad_norm": 25.324056303276155, + "learning_rate": 3.581283537038571e-08, + "loss": 0.4537, + "step": 18704 + }, + { + "epoch": 2.92, + "grad_norm": 20.395555892423726, + "learning_rate": 3.567038166423675e-08, + "loss": 0.3949, + "step": 18705 + }, + { + "epoch": 2.92, + "grad_norm": 29.604980440563356, + "learning_rate": 3.552821133779305e-08, + "loss": 0.5119, + "step": 18706 + }, + { + "epoch": 2.92, + "grad_norm": 20.632383607149897, + "learning_rate": 3.5386324395096924e-08, + "loss": 0.5184, + "step": 18707 + }, + { + "epoch": 2.92, + "grad_norm": 22.740222758894927, + "learning_rate": 3.5244720840186266e-08, + "loss": 0.4184, + "step": 18708 + }, + { + "epoch": 2.92, + "grad_norm": 17.288677795653232, + "learning_rate": 3.510340067708562e-08, + "loss": 0.3519, + "step": 18709 + }, + { + "epoch": 2.92, + "grad_norm": 24.570040017513822, + "learning_rate": 3.4962363909815114e-08, + "loss": 0.4102, + "step": 18710 + }, + { + "epoch": 2.92, + "grad_norm": 25.039475040340207, + "learning_rate": 3.482161054238486e-08, + "loss": 0.4078, + "step": 18711 + }, + { + "epoch": 2.92, + "grad_norm": 35.83570232526256, + "learning_rate": 3.4681140578799453e-08, + "loss": 0.4508, + "step": 18712 + }, + { + "epoch": 2.92, + "grad_norm": 14.077287785597578, + "learning_rate": 3.4540954023052356e-08, + "loss": 0.373, + "step": 18713 + }, + { + "epoch": 2.92, + "grad_norm": 28.38416494438566, + "learning_rate": 3.440105087913148e-08, + "loss": 0.5014, + "step": 18714 + }, + { + "epoch": 2.92, + "grad_norm": 26.44152072786472, + "learning_rate": 3.426143115101477e-08, + "loss": 0.4134, + "step": 18715 + }, + { + "epoch": 2.92, + "grad_norm": 23.044596320506358, + "learning_rate": 3.412209484267237e-08, + "loss": 0.3973, + "step": 18716 + }, + { + "epoch": 2.92, + "grad_norm": 20.570489149932435, + "learning_rate": 3.39830419580689e-08, + "loss": 0.4287, + "step": 18717 + }, + { + "epoch": 2.92, + "grad_norm": 17.726730926742437, + "learning_rate": 3.384427250115674e-08, + "loss": 0.3965, + "step": 18718 + }, + { + "epoch": 2.92, + "grad_norm": 20.45951085234899, + "learning_rate": 3.370578647588496e-08, + "loss": 0.4517, + "step": 18719 + }, + { + "epoch": 2.92, + "grad_norm": 27.034624249653447, + "learning_rate": 3.3567583886189304e-08, + "loss": 0.4584, + "step": 18720 + }, + { + "epoch": 2.92, + "grad_norm": 33.022644246421905, + "learning_rate": 3.3429664736001064e-08, + "loss": 0.3956, + "step": 18721 + }, + { + "epoch": 2.92, + "grad_norm": 20.400692286618987, + "learning_rate": 3.3292029029243777e-08, + "loss": 0.4209, + "step": 18722 + }, + { + "epoch": 2.92, + "grad_norm": 24.62312803559912, + "learning_rate": 3.315467676982986e-08, + "loss": 0.419, + "step": 18723 + }, + { + "epoch": 2.92, + "grad_norm": 19.84010695785895, + "learning_rate": 3.301760796166731e-08, + "loss": 0.4454, + "step": 18724 + }, + { + "epoch": 2.92, + "grad_norm": 20.53350933336121, + "learning_rate": 3.2880822608653e-08, + "loss": 0.3966, + "step": 18725 + }, + { + "epoch": 2.93, + "grad_norm": 21.667395581232263, + "learning_rate": 3.274432071467826e-08, + "loss": 0.3987, + "step": 18726 + }, + { + "epoch": 2.93, + "grad_norm": 18.842186840818783, + "learning_rate": 3.260810228362332e-08, + "loss": 0.4979, + "step": 18727 + }, + { + "epoch": 2.93, + "grad_norm": 22.34221306429955, + "learning_rate": 3.247216731936398e-08, + "loss": 0.4167, + "step": 18728 + }, + { + "epoch": 2.93, + "grad_norm": 19.175157848862305, + "learning_rate": 3.233651582576491e-08, + "loss": 0.4058, + "step": 18729 + }, + { + "epoch": 2.93, + "grad_norm": 21.698653936717903, + "learning_rate": 3.220114780668415e-08, + "loss": 0.4214, + "step": 18730 + }, + { + "epoch": 2.93, + "grad_norm": 25.789606687807048, + "learning_rate": 3.206606326597306e-08, + "loss": 0.4594, + "step": 18731 + }, + { + "epoch": 2.93, + "grad_norm": 20.24767071004808, + "learning_rate": 3.193126220747078e-08, + "loss": 0.4763, + "step": 18732 + }, + { + "epoch": 2.93, + "grad_norm": 16.19364381521254, + "learning_rate": 3.1796744635013144e-08, + "loss": 0.4126, + "step": 18733 + }, + { + "epoch": 2.93, + "grad_norm": 27.44508202963948, + "learning_rate": 3.166251055242375e-08, + "loss": 0.4675, + "step": 18734 + }, + { + "epoch": 2.93, + "grad_norm": 23.68752461812091, + "learning_rate": 3.1528559963522886e-08, + "loss": 0.4252, + "step": 18735 + }, + { + "epoch": 2.93, + "grad_norm": 20.882091399878316, + "learning_rate": 3.139489287211639e-08, + "loss": 0.3822, + "step": 18736 + }, + { + "epoch": 2.93, + "grad_norm": 18.122581247374395, + "learning_rate": 3.1261509282009e-08, + "loss": 0.474, + "step": 18737 + }, + { + "epoch": 2.93, + "grad_norm": 19.869086343954912, + "learning_rate": 3.112840919699212e-08, + "loss": 0.4094, + "step": 18738 + }, + { + "epoch": 2.93, + "grad_norm": 20.61216789833138, + "learning_rate": 3.099559262085272e-08, + "loss": 0.3968, + "step": 18739 + }, + { + "epoch": 2.93, + "grad_norm": 29.14144736758251, + "learning_rate": 3.086305955736557e-08, + "loss": 0.4646, + "step": 18740 + }, + { + "epoch": 2.93, + "grad_norm": 17.577402657819984, + "learning_rate": 3.073081001030209e-08, + "loss": 0.4229, + "step": 18741 + }, + { + "epoch": 2.93, + "grad_norm": 31.779288952076058, + "learning_rate": 3.05988439834215e-08, + "loss": 0.445, + "step": 18742 + }, + { + "epoch": 2.93, + "grad_norm": 24.00281304771441, + "learning_rate": 3.046716148047968e-08, + "loss": 0.3836, + "step": 18743 + }, + { + "epoch": 2.93, + "grad_norm": 28.742786190747925, + "learning_rate": 3.033576250521919e-08, + "loss": 0.4648, + "step": 18744 + }, + { + "epoch": 2.93, + "grad_norm": 22.357514310985454, + "learning_rate": 3.0204647061375936e-08, + "loss": 0.3959, + "step": 18745 + }, + { + "epoch": 2.93, + "grad_norm": 19.758723977431917, + "learning_rate": 3.0073815152681374e-08, + "loss": 0.3909, + "step": 18746 + }, + { + "epoch": 2.93, + "grad_norm": 29.566795283058966, + "learning_rate": 2.994326678285586e-08, + "loss": 0.4181, + "step": 18747 + }, + { + "epoch": 2.93, + "grad_norm": 18.89971685474977, + "learning_rate": 2.981300195561088e-08, + "loss": 0.4496, + "step": 18748 + }, + { + "epoch": 2.93, + "grad_norm": 17.142760209003928, + "learning_rate": 2.968302067465234e-08, + "loss": 0.3766, + "step": 18749 + }, + { + "epoch": 2.93, + "grad_norm": 17.50725785704199, + "learning_rate": 2.955332294367508e-08, + "loss": 0.3809, + "step": 18750 + }, + { + "epoch": 2.93, + "grad_norm": 19.53937405425937, + "learning_rate": 2.942390876636947e-08, + "loss": 0.4405, + "step": 18751 + }, + { + "epoch": 2.93, + "grad_norm": 30.427025853152482, + "learning_rate": 2.9294778146415902e-08, + "loss": 0.4646, + "step": 18752 + }, + { + "epoch": 2.93, + "grad_norm": 24.40538684873113, + "learning_rate": 2.916593108748589e-08, + "loss": 0.4403, + "step": 18753 + }, + { + "epoch": 2.93, + "grad_norm": 36.357294153424114, + "learning_rate": 2.903736759324316e-08, + "loss": 0.4464, + "step": 18754 + }, + { + "epoch": 2.93, + "grad_norm": 37.04455475316805, + "learning_rate": 2.8909087667345905e-08, + "loss": 0.5209, + "step": 18755 + }, + { + "epoch": 2.93, + "grad_norm": 19.993564961493274, + "learning_rate": 2.878109131344009e-08, + "loss": 0.4659, + "step": 18756 + }, + { + "epoch": 2.93, + "grad_norm": 19.53477503590143, + "learning_rate": 2.8653378535168363e-08, + "loss": 0.3906, + "step": 18757 + }, + { + "epoch": 2.93, + "grad_norm": 25.053004733778717, + "learning_rate": 2.852594933616004e-08, + "loss": 0.3998, + "step": 18758 + }, + { + "epoch": 2.93, + "grad_norm": 25.938913936739894, + "learning_rate": 2.8398803720042223e-08, + "loss": 0.4132, + "step": 18759 + }, + { + "epoch": 2.93, + "grad_norm": 24.73032173157626, + "learning_rate": 2.8271941690427574e-08, + "loss": 0.4392, + "step": 18760 + }, + { + "epoch": 2.93, + "grad_norm": 15.357475297163742, + "learning_rate": 2.8145363250926537e-08, + "loss": 0.466, + "step": 18761 + }, + { + "epoch": 2.93, + "grad_norm": 26.91957492822058, + "learning_rate": 2.801906840513735e-08, + "loss": 0.455, + "step": 18762 + }, + { + "epoch": 2.93, + "grad_norm": 16.342303117196654, + "learning_rate": 2.7893057156653802e-08, + "loss": 0.3851, + "step": 18763 + }, + { + "epoch": 2.93, + "grad_norm": 17.9469692244357, + "learning_rate": 2.776732950905636e-08, + "loss": 0.4209, + "step": 18764 + }, + { + "epoch": 2.93, + "grad_norm": 17.63479290076147, + "learning_rate": 2.764188546592439e-08, + "loss": 0.4102, + "step": 18765 + }, + { + "epoch": 2.93, + "grad_norm": 19.009678048630057, + "learning_rate": 2.7516725030821702e-08, + "loss": 0.4475, + "step": 18766 + }, + { + "epoch": 2.93, + "grad_norm": 30.39524087934214, + "learning_rate": 2.7391848207311e-08, + "loss": 0.4804, + "step": 18767 + }, + { + "epoch": 2.93, + "grad_norm": 16.279164359612306, + "learning_rate": 2.7267254998941673e-08, + "loss": 0.4328, + "step": 18768 + }, + { + "epoch": 2.93, + "grad_norm": 19.07723564946217, + "learning_rate": 2.714294540925866e-08, + "loss": 0.4298, + "step": 18769 + }, + { + "epoch": 2.93, + "grad_norm": 21.173648797225805, + "learning_rate": 2.701891944179469e-08, + "loss": 0.4188, + "step": 18770 + }, + { + "epoch": 2.93, + "grad_norm": 33.013802930793744, + "learning_rate": 2.6895177100079163e-08, + "loss": 0.4474, + "step": 18771 + }, + { + "epoch": 2.93, + "grad_norm": 21.179250839302412, + "learning_rate": 2.6771718387631485e-08, + "loss": 0.4239, + "step": 18772 + }, + { + "epoch": 2.93, + "grad_norm": 36.06192174477639, + "learning_rate": 2.6648543307962183e-08, + "loss": 0.413, + "step": 18773 + }, + { + "epoch": 2.93, + "grad_norm": 22.85214303021008, + "learning_rate": 2.65256518645729e-08, + "loss": 0.4541, + "step": 18774 + }, + { + "epoch": 2.93, + "grad_norm": 31.70182055726893, + "learning_rate": 2.6403044060959726e-08, + "loss": 0.5998, + "step": 18775 + }, + { + "epoch": 2.93, + "grad_norm": 24.550304011863258, + "learning_rate": 2.6280719900609874e-08, + "loss": 0.4539, + "step": 18776 + }, + { + "epoch": 2.93, + "grad_norm": 21.817987495704166, + "learning_rate": 2.6158679387002782e-08, + "loss": 0.4139, + "step": 18777 + }, + { + "epoch": 2.93, + "grad_norm": 33.64175094761346, + "learning_rate": 2.60369225236079e-08, + "loss": 0.4318, + "step": 18778 + }, + { + "epoch": 2.93, + "grad_norm": 26.811499794842092, + "learning_rate": 2.591544931388801e-08, + "loss": 0.4256, + "step": 18779 + }, + { + "epoch": 2.93, + "grad_norm": 26.57258492936507, + "learning_rate": 2.5794259761298124e-08, + "loss": 0.4902, + "step": 18780 + }, + { + "epoch": 2.93, + "grad_norm": 32.15790848305961, + "learning_rate": 2.5673353869284378e-08, + "loss": 0.4376, + "step": 18781 + }, + { + "epoch": 2.93, + "grad_norm": 18.634583213528288, + "learning_rate": 2.5552731641286243e-08, + "loss": 0.3896, + "step": 18782 + }, + { + "epoch": 2.93, + "grad_norm": 27.702546031343232, + "learning_rate": 2.5432393080733197e-08, + "loss": 0.4426, + "step": 18783 + }, + { + "epoch": 2.93, + "grad_norm": 20.141080917750035, + "learning_rate": 2.5312338191048057e-08, + "loss": 0.3945, + "step": 18784 + }, + { + "epoch": 2.93, + "grad_norm": 20.313118559148904, + "learning_rate": 2.5192566975644762e-08, + "loss": 0.4106, + "step": 18785 + }, + { + "epoch": 2.93, + "grad_norm": 26.031776390844172, + "learning_rate": 2.507307943793058e-08, + "loss": 0.443, + "step": 18786 + }, + { + "epoch": 2.93, + "grad_norm": 24.179225148724576, + "learning_rate": 2.495387558130169e-08, + "loss": 0.4772, + "step": 18787 + }, + { + "epoch": 2.93, + "grad_norm": 17.72467751902362, + "learning_rate": 2.4834955409149818e-08, + "loss": 0.3954, + "step": 18788 + }, + { + "epoch": 2.93, + "grad_norm": 20.96829298177997, + "learning_rate": 2.471631892485671e-08, + "loss": 0.405, + "step": 18789 + }, + { + "epoch": 2.94, + "grad_norm": 19.188812197011092, + "learning_rate": 2.4597966131796324e-08, + "loss": 0.4266, + "step": 18790 + }, + { + "epoch": 2.94, + "grad_norm": 22.624193024996938, + "learning_rate": 2.4479897033333756e-08, + "loss": 0.3841, + "step": 18791 + }, + { + "epoch": 2.94, + "grad_norm": 19.53607711040371, + "learning_rate": 2.4362111632827423e-08, + "loss": 0.374, + "step": 18792 + }, + { + "epoch": 2.94, + "grad_norm": 20.580840083730862, + "learning_rate": 2.4244609933627982e-08, + "loss": 0.4027, + "step": 18793 + }, + { + "epoch": 2.94, + "grad_norm": 30.237717700965412, + "learning_rate": 2.4127391939076094e-08, + "loss": 0.4185, + "step": 18794 + }, + { + "epoch": 2.94, + "grad_norm": 22.415351902388192, + "learning_rate": 2.401045765250465e-08, + "loss": 0.3951, + "step": 18795 + }, + { + "epoch": 2.94, + "grad_norm": 21.193501235139802, + "learning_rate": 2.3893807077239872e-08, + "loss": 0.4041, + "step": 18796 + }, + { + "epoch": 2.94, + "grad_norm": 21.20760443617767, + "learning_rate": 2.3777440216600224e-08, + "loss": 0.4118, + "step": 18797 + }, + { + "epoch": 2.94, + "grad_norm": 26.4211039557542, + "learning_rate": 2.3661357073894166e-08, + "loss": 0.5376, + "step": 18798 + }, + { + "epoch": 2.94, + "grad_norm": 29.610038295955878, + "learning_rate": 2.3545557652422392e-08, + "loss": 0.4614, + "step": 18799 + }, + { + "epoch": 2.94, + "grad_norm": 18.333718131824373, + "learning_rate": 2.343004195547893e-08, + "loss": 0.4835, + "step": 18800 + }, + { + "epoch": 2.94, + "grad_norm": 22.04216062168671, + "learning_rate": 2.3314809986348942e-08, + "loss": 0.4097, + "step": 18801 + }, + { + "epoch": 2.94, + "grad_norm": 27.712977368429215, + "learning_rate": 2.3199861748309793e-08, + "loss": 0.3999, + "step": 18802 + }, + { + "epoch": 2.94, + "grad_norm": 22.79517534651258, + "learning_rate": 2.3085197244631097e-08, + "loss": 0.4006, + "step": 18803 + }, + { + "epoch": 2.94, + "grad_norm": 30.395588764128835, + "learning_rate": 2.2970816478572466e-08, + "loss": 0.4285, + "step": 18804 + }, + { + "epoch": 2.94, + "grad_norm": 22.41010286146284, + "learning_rate": 2.2856719453386856e-08, + "loss": 0.3604, + "step": 18805 + }, + { + "epoch": 2.94, + "grad_norm": 19.367444586422824, + "learning_rate": 2.2742906172320555e-08, + "loss": 0.4169, + "step": 18806 + }, + { + "epoch": 2.94, + "grad_norm": 16.25920666257959, + "learning_rate": 2.262937663860876e-08, + "loss": 0.4082, + "step": 18807 + }, + { + "epoch": 2.94, + "grad_norm": 35.683292052732334, + "learning_rate": 2.25161308554811e-08, + "loss": 0.4507, + "step": 18808 + }, + { + "epoch": 2.94, + "grad_norm": 33.708381624898436, + "learning_rate": 2.240316882615834e-08, + "loss": 0.5187, + "step": 18809 + }, + { + "epoch": 2.94, + "grad_norm": 24.625063948136773, + "learning_rate": 2.2290490553852352e-08, + "loss": 0.449, + "step": 18810 + }, + { + "epoch": 2.94, + "grad_norm": 24.675553624388154, + "learning_rate": 2.217809604176835e-08, + "loss": 0.3801, + "step": 18811 + }, + { + "epoch": 2.94, + "grad_norm": 26.2654836391024, + "learning_rate": 2.2065985293102664e-08, + "loss": 0.423, + "step": 18812 + }, + { + "epoch": 2.94, + "grad_norm": 34.24203353227539, + "learning_rate": 2.195415831104275e-08, + "loss": 0.4576, + "step": 18813 + }, + { + "epoch": 2.94, + "grad_norm": 37.33191503746383, + "learning_rate": 2.1842615098769394e-08, + "loss": 0.4552, + "step": 18814 + }, + { + "epoch": 2.94, + "grad_norm": 21.282757662276865, + "learning_rate": 2.1731355659456722e-08, + "loss": 0.4664, + "step": 18815 + }, + { + "epoch": 2.94, + "grad_norm": 21.31259725987568, + "learning_rate": 2.162037999626554e-08, + "loss": 0.3783, + "step": 18816 + }, + { + "epoch": 2.94, + "grad_norm": 38.773255630057406, + "learning_rate": 2.1509688112354432e-08, + "loss": 0.4386, + "step": 18817 + }, + { + "epoch": 2.94, + "grad_norm": 20.559423254153288, + "learning_rate": 2.139928001086977e-08, + "loss": 0.4554, + "step": 18818 + }, + { + "epoch": 2.94, + "grad_norm": 29.073919519978006, + "learning_rate": 2.128915569495238e-08, + "loss": 0.4391, + "step": 18819 + }, + { + "epoch": 2.94, + "grad_norm": 18.948614969190782, + "learning_rate": 2.1179315167734194e-08, + "loss": 0.3962, + "step": 18820 + }, + { + "epoch": 2.94, + "grad_norm": 20.99502684539474, + "learning_rate": 2.1069758432339382e-08, + "loss": 0.3903, + "step": 18821 + }, + { + "epoch": 2.94, + "grad_norm": 15.779475235051981, + "learning_rate": 2.096048549188212e-08, + "loss": 0.3892, + "step": 18822 + }, + { + "epoch": 2.94, + "grad_norm": 17.94539963331978, + "learning_rate": 2.0851496349472144e-08, + "loss": 0.4245, + "step": 18823 + }, + { + "epoch": 2.94, + "grad_norm": 20.91316604901738, + "learning_rate": 2.0742791008206974e-08, + "loss": 0.4323, + "step": 18824 + }, + { + "epoch": 2.94, + "grad_norm": 28.108933797854498, + "learning_rate": 2.0634369471179692e-08, + "loss": 0.5228, + "step": 18825 + }, + { + "epoch": 2.94, + "grad_norm": 22.348109574930174, + "learning_rate": 2.052623174147339e-08, + "loss": 0.4347, + "step": 18826 + }, + { + "epoch": 2.94, + "grad_norm": 22.69923514183554, + "learning_rate": 2.0418377822162272e-08, + "loss": 0.3567, + "step": 18827 + }, + { + "epoch": 2.94, + "grad_norm": 16.375016200625435, + "learning_rate": 2.0310807716316107e-08, + "loss": 0.407, + "step": 18828 + }, + { + "epoch": 2.94, + "grad_norm": 22.110251296777143, + "learning_rate": 2.0203521426991333e-08, + "loss": 0.442, + "step": 18829 + }, + { + "epoch": 2.94, + "grad_norm": 23.680601394345754, + "learning_rate": 2.0096518957241072e-08, + "loss": 0.3985, + "step": 18830 + }, + { + "epoch": 2.94, + "grad_norm": 19.201817616219056, + "learning_rate": 1.9989800310107333e-08, + "loss": 0.449, + "step": 18831 + }, + { + "epoch": 2.94, + "grad_norm": 18.49303878118887, + "learning_rate": 1.9883365488625462e-08, + "loss": 0.418, + "step": 18832 + }, + { + "epoch": 2.94, + "grad_norm": 34.593306186244746, + "learning_rate": 1.977721449582304e-08, + "loss": 0.4808, + "step": 18833 + }, + { + "epoch": 2.94, + "grad_norm": 24.258314419282225, + "learning_rate": 1.9671347334717648e-08, + "loss": 0.4271, + "step": 18834 + }, + { + "epoch": 2.94, + "grad_norm": 19.64951328495019, + "learning_rate": 1.956576400832133e-08, + "loss": 0.4314, + "step": 18835 + }, + { + "epoch": 2.94, + "grad_norm": 25.061544618923524, + "learning_rate": 1.9460464519636124e-08, + "loss": 0.4569, + "step": 18836 + }, + { + "epoch": 2.94, + "grad_norm": 26.005278786779574, + "learning_rate": 1.9355448871657413e-08, + "loss": 0.4879, + "step": 18837 + }, + { + "epoch": 2.94, + "grad_norm": 23.59853772602622, + "learning_rate": 1.9250717067370583e-08, + "loss": 0.4838, + "step": 18838 + }, + { + "epoch": 2.94, + "grad_norm": 24.536518005080442, + "learning_rate": 1.9146269109755477e-08, + "loss": 0.5077, + "step": 18839 + }, + { + "epoch": 2.94, + "grad_norm": 24.854227551529316, + "learning_rate": 1.904210500178083e-08, + "loss": 0.4185, + "step": 18840 + }, + { + "epoch": 2.94, + "grad_norm": 24.634960533997567, + "learning_rate": 1.8938224746410938e-08, + "loss": 0.384, + "step": 18841 + }, + { + "epoch": 2.94, + "grad_norm": 25.961780777768983, + "learning_rate": 1.8834628346598993e-08, + "loss": 0.4577, + "step": 18842 + }, + { + "epoch": 2.94, + "grad_norm": 18.768583418684553, + "learning_rate": 1.8731315805290418e-08, + "loss": 0.4575, + "step": 18843 + }, + { + "epoch": 2.94, + "grad_norm": 33.07991543925951, + "learning_rate": 1.862828712542508e-08, + "loss": 0.4274, + "step": 18844 + }, + { + "epoch": 2.94, + "grad_norm": 16.91244361565658, + "learning_rate": 1.8525542309932865e-08, + "loss": 0.4454, + "step": 18845 + }, + { + "epoch": 2.94, + "grad_norm": 21.393036626288726, + "learning_rate": 1.8423081361734762e-08, + "loss": 0.4224, + "step": 18846 + }, + { + "epoch": 2.94, + "grad_norm": 16.805750169499632, + "learning_rate": 1.832090428374511e-08, + "loss": 0.4225, + "step": 18847 + }, + { + "epoch": 2.94, + "grad_norm": 17.846242157756087, + "learning_rate": 1.8219011078869365e-08, + "loss": 0.4427, + "step": 18848 + }, + { + "epoch": 2.94, + "grad_norm": 22.013429965478007, + "learning_rate": 1.8117401750006314e-08, + "loss": 0.4592, + "step": 18849 + }, + { + "epoch": 2.94, + "grad_norm": 31.881296388191885, + "learning_rate": 1.801607630004476e-08, + "loss": 0.4904, + "step": 18850 + }, + { + "epoch": 2.94, + "grad_norm": 25.435922524561278, + "learning_rate": 1.7915034731867952e-08, + "loss": 0.4484, + "step": 18851 + }, + { + "epoch": 2.94, + "grad_norm": 17.4465189340236, + "learning_rate": 1.781427704834693e-08, + "loss": 0.418, + "step": 18852 + }, + { + "epoch": 2.94, + "grad_norm": 19.433702504222726, + "learning_rate": 1.7713803252348283e-08, + "loss": 0.4352, + "step": 18853 + }, + { + "epoch": 2.95, + "grad_norm": 27.7159977608798, + "learning_rate": 1.761361334673084e-08, + "loss": 0.4113, + "step": 18854 + }, + { + "epoch": 2.95, + "grad_norm": 14.719987480874398, + "learning_rate": 1.7513707334341212e-08, + "loss": 0.4255, + "step": 18855 + }, + { + "epoch": 2.95, + "grad_norm": 24.99010997292553, + "learning_rate": 1.741408521802379e-08, + "loss": 0.3937, + "step": 18856 + }, + { + "epoch": 2.95, + "grad_norm": 38.3508837187754, + "learning_rate": 1.7314747000608532e-08, + "loss": 0.4187, + "step": 18857 + }, + { + "epoch": 2.95, + "grad_norm": 25.896340583041276, + "learning_rate": 1.721569268492318e-08, + "loss": 0.4719, + "step": 18858 + }, + { + "epoch": 2.95, + "grad_norm": 24.16564306250834, + "learning_rate": 1.7116922273783255e-08, + "loss": 0.4503, + "step": 18859 + }, + { + "epoch": 2.95, + "grad_norm": 27.82957492523101, + "learning_rate": 1.7018435769998732e-08, + "loss": 0.4227, + "step": 18860 + }, + { + "epoch": 2.95, + "grad_norm": 18.948515972378544, + "learning_rate": 1.6920233176369594e-08, + "loss": 0.4106, + "step": 18861 + }, + { + "epoch": 2.95, + "grad_norm": 23.739760804248764, + "learning_rate": 1.6822314495689164e-08, + "loss": 0.4203, + "step": 18862 + }, + { + "epoch": 2.95, + "grad_norm": 16.041260531989856, + "learning_rate": 1.6724679730742986e-08, + "loss": 0.3665, + "step": 18863 + }, + { + "epoch": 2.95, + "grad_norm": 24.729139696897622, + "learning_rate": 1.6627328884305516e-08, + "loss": 0.4103, + "step": 18864 + }, + { + "epoch": 2.95, + "grad_norm": 27.878113369639426, + "learning_rate": 1.6530261959147863e-08, + "loss": 0.4326, + "step": 18865 + }, + { + "epoch": 2.95, + "grad_norm": 25.231729775072946, + "learning_rate": 1.6433478958028938e-08, + "loss": 0.4551, + "step": 18866 + }, + { + "epoch": 2.95, + "grad_norm": 20.282995790276836, + "learning_rate": 1.6336979883700976e-08, + "loss": 0.4121, + "step": 18867 + }, + { + "epoch": 2.95, + "grad_norm": 32.43296778811208, + "learning_rate": 1.6240764738909566e-08, + "loss": 0.478, + "step": 18868 + }, + { + "epoch": 2.95, + "grad_norm": 22.033986177707625, + "learning_rate": 1.6144833526390292e-08, + "loss": 0.4184, + "step": 18869 + }, + { + "epoch": 2.95, + "grad_norm": 21.658520664140898, + "learning_rate": 1.6049186248872084e-08, + "loss": 0.4372, + "step": 18870 + }, + { + "epoch": 2.95, + "grad_norm": 22.776506783762112, + "learning_rate": 1.595382290907388e-08, + "loss": 0.4233, + "step": 18871 + }, + { + "epoch": 2.95, + "grad_norm": 16.954463981453696, + "learning_rate": 1.585874350970906e-08, + "loss": 0.3926, + "step": 18872 + }, + { + "epoch": 2.95, + "grad_norm": 21.216906088205832, + "learning_rate": 1.5763948053481026e-08, + "loss": 0.3413, + "step": 18873 + }, + { + "epoch": 2.95, + "grad_norm": 19.106083866466513, + "learning_rate": 1.566943654308539e-08, + "loss": 0.3729, + "step": 18874 + }, + { + "epoch": 2.95, + "grad_norm": 21.707289195317248, + "learning_rate": 1.557520898121001e-08, + "loss": 0.3961, + "step": 18875 + }, + { + "epoch": 2.95, + "grad_norm": 21.36446340655278, + "learning_rate": 1.5481265370536068e-08, + "loss": 0.4516, + "step": 18876 + }, + { + "epoch": 2.95, + "grad_norm": 31.912753232155914, + "learning_rate": 1.5387605713732545e-08, + "loss": 0.4214, + "step": 18877 + }, + { + "epoch": 2.95, + "grad_norm": 17.271925813908354, + "learning_rate": 1.5294230013466194e-08, + "loss": 0.3983, + "step": 18878 + }, + { + "epoch": 2.95, + "grad_norm": 26.304181007673105, + "learning_rate": 1.520113827239045e-08, + "loss": 0.4787, + "step": 18879 + }, + { + "epoch": 2.95, + "grad_norm": 17.73109102470334, + "learning_rate": 1.510833049315319e-08, + "loss": 0.468, + "step": 18880 + }, + { + "epoch": 2.95, + "grad_norm": 28.12424159722876, + "learning_rate": 1.501580667839453e-08, + "loss": 0.4406, + "step": 18881 + }, + { + "epoch": 2.95, + "grad_norm": 35.95885184847225, + "learning_rate": 1.4923566830744586e-08, + "loss": 0.3809, + "step": 18882 + }, + { + "epoch": 2.95, + "grad_norm": 32.0822114380872, + "learning_rate": 1.4831610952827925e-08, + "loss": 0.4653, + "step": 18883 + }, + { + "epoch": 2.95, + "grad_norm": 14.792258107059261, + "learning_rate": 1.4739939047259122e-08, + "loss": 0.4393, + "step": 18884 + }, + { + "epoch": 2.95, + "grad_norm": 19.699509350934477, + "learning_rate": 1.4648551116644982e-08, + "loss": 0.408, + "step": 18885 + }, + { + "epoch": 2.95, + "grad_norm": 17.44364783821956, + "learning_rate": 1.4557447163584538e-08, + "loss": 0.4689, + "step": 18886 + }, + { + "epoch": 2.95, + "grad_norm": 26.215502723706983, + "learning_rate": 1.4466627190669047e-08, + "loss": 0.4321, + "step": 18887 + }, + { + "epoch": 2.95, + "grad_norm": 21.272045555064263, + "learning_rate": 1.4376091200482002e-08, + "loss": 0.4471, + "step": 18888 + }, + { + "epoch": 2.95, + "grad_norm": 34.28184718943699, + "learning_rate": 1.4285839195596896e-08, + "loss": 0.4598, + "step": 18889 + }, + { + "epoch": 2.95, + "grad_norm": 27.576272715320606, + "learning_rate": 1.4195871178580567e-08, + "loss": 0.4785, + "step": 18890 + }, + { + "epoch": 2.95, + "grad_norm": 19.07893117830633, + "learning_rate": 1.410618715199319e-08, + "loss": 0.4771, + "step": 18891 + }, + { + "epoch": 2.95, + "grad_norm": 31.61028932803755, + "learning_rate": 1.4016787118383835e-08, + "loss": 0.4269, + "step": 18892 + }, + { + "epoch": 2.95, + "grad_norm": 14.811103497941325, + "learning_rate": 1.3927671080294913e-08, + "loss": 0.4185, + "step": 18893 + }, + { + "epoch": 2.95, + "grad_norm": 28.70594859420759, + "learning_rate": 1.3838839040262175e-08, + "loss": 0.4399, + "step": 18894 + }, + { + "epoch": 2.95, + "grad_norm": 15.81090350158245, + "learning_rate": 1.3750291000811377e-08, + "loss": 0.4383, + "step": 18895 + }, + { + "epoch": 2.95, + "grad_norm": 17.86401355509483, + "learning_rate": 1.3662026964459396e-08, + "loss": 0.3881, + "step": 18896 + }, + { + "epoch": 2.95, + "grad_norm": 17.640748512639835, + "learning_rate": 1.3574046933717556e-08, + "loss": 0.3694, + "step": 18897 + }, + { + "epoch": 2.95, + "grad_norm": 21.11328455583545, + "learning_rate": 1.3486350911089407e-08, + "loss": 0.3511, + "step": 18898 + }, + { + "epoch": 2.95, + "grad_norm": 18.487984189649875, + "learning_rate": 1.3398938899066294e-08, + "loss": 0.401, + "step": 18899 + }, + { + "epoch": 2.95, + "grad_norm": 17.72068262240934, + "learning_rate": 1.3311810900135113e-08, + "loss": 0.4295, + "step": 18900 + }, + { + "epoch": 2.95, + "grad_norm": 18.793717919091723, + "learning_rate": 1.3224966916774995e-08, + "loss": 0.4255, + "step": 18901 + }, + { + "epoch": 2.95, + "grad_norm": 25.787911773959113, + "learning_rate": 1.3138406951453963e-08, + "loss": 0.4231, + "step": 18902 + }, + { + "epoch": 2.95, + "grad_norm": 23.04411393259211, + "learning_rate": 1.3052131006634494e-08, + "loss": 0.4177, + "step": 18903 + }, + { + "epoch": 2.95, + "grad_norm": 20.70476230121021, + "learning_rate": 1.296613908477018e-08, + "loss": 0.4449, + "step": 18904 + }, + { + "epoch": 2.95, + "grad_norm": 18.640186251891148, + "learning_rate": 1.2880431188306841e-08, + "loss": 0.429, + "step": 18905 + }, + { + "epoch": 2.95, + "grad_norm": 47.296971730264694, + "learning_rate": 1.2795007319681418e-08, + "loss": 0.4389, + "step": 18906 + }, + { + "epoch": 2.95, + "grad_norm": 20.84662707402527, + "learning_rate": 1.270986748132419e-08, + "loss": 0.4326, + "step": 18907 + }, + { + "epoch": 2.95, + "grad_norm": 32.03667124438352, + "learning_rate": 1.262501167565655e-08, + "loss": 0.4631, + "step": 18908 + }, + { + "epoch": 2.95, + "grad_norm": 24.41483813343104, + "learning_rate": 1.2540439905089907e-08, + "loss": 0.4659, + "step": 18909 + }, + { + "epoch": 2.95, + "grad_norm": 26.103207194699998, + "learning_rate": 1.2456152172031222e-08, + "loss": 0.4136, + "step": 18910 + }, + { + "epoch": 2.95, + "grad_norm": 22.227457055994396, + "learning_rate": 1.2372148478876356e-08, + "loss": 0.3986, + "step": 18911 + }, + { + "epoch": 2.95, + "grad_norm": 27.45741520807344, + "learning_rate": 1.228842882801562e-08, + "loss": 0.419, + "step": 18912 + }, + { + "epoch": 2.95, + "grad_norm": 30.814716971225845, + "learning_rate": 1.2204993221829331e-08, + "loss": 0.412, + "step": 18913 + }, + { + "epoch": 2.95, + "grad_norm": 18.874521113630074, + "learning_rate": 1.2121841662690037e-08, + "loss": 0.4038, + "step": 18914 + }, + { + "epoch": 2.95, + "grad_norm": 21.780510409590452, + "learning_rate": 1.2038974152963623e-08, + "loss": 0.4103, + "step": 18915 + }, + { + "epoch": 2.95, + "grad_norm": 37.31944332104858, + "learning_rate": 1.1956390695004871e-08, + "loss": 0.4795, + "step": 18916 + }, + { + "epoch": 2.95, + "grad_norm": 15.891607859925616, + "learning_rate": 1.1874091291164125e-08, + "loss": 0.4385, + "step": 18917 + }, + { + "epoch": 2.96, + "grad_norm": 26.307133652937484, + "learning_rate": 1.1792075943781733e-08, + "loss": 0.431, + "step": 18918 + }, + { + "epoch": 2.96, + "grad_norm": 22.552838197163407, + "learning_rate": 1.1710344655189166e-08, + "loss": 0.422, + "step": 18919 + }, + { + "epoch": 2.96, + "grad_norm": 17.897930163882112, + "learning_rate": 1.1628897427711227e-08, + "loss": 0.4156, + "step": 18920 + }, + { + "epoch": 2.96, + "grad_norm": 20.86070074197505, + "learning_rate": 1.1547734263664957e-08, + "loss": 0.3833, + "step": 18921 + }, + { + "epoch": 2.96, + "grad_norm": 19.380461471426464, + "learning_rate": 1.1466855165357393e-08, + "loss": 0.4023, + "step": 18922 + }, + { + "epoch": 2.96, + "grad_norm": 17.83093619687225, + "learning_rate": 1.1386260135090033e-08, + "loss": 0.4547, + "step": 18923 + }, + { + "epoch": 2.96, + "grad_norm": 16.225807193943098, + "learning_rate": 1.1305949175154374e-08, + "loss": 0.4514, + "step": 18924 + }, + { + "epoch": 2.96, + "grad_norm": 25.397365373777898, + "learning_rate": 1.1225922287834146e-08, + "loss": 0.4352, + "step": 18925 + }, + { + "epoch": 2.96, + "grad_norm": 28.502076288768954, + "learning_rate": 1.1146179475404195e-08, + "loss": 0.5079, + "step": 18926 + }, + { + "epoch": 2.96, + "grad_norm": 22.354008033831885, + "learning_rate": 1.1066720740134928e-08, + "loss": 0.4392, + "step": 18927 + }, + { + "epoch": 2.96, + "grad_norm": 26.87022337714897, + "learning_rate": 1.098754608428454e-08, + "loss": 0.4118, + "step": 18928 + }, + { + "epoch": 2.96, + "grad_norm": 23.40075480046642, + "learning_rate": 1.090865551010456e-08, + "loss": 0.4529, + "step": 18929 + }, + { + "epoch": 2.96, + "grad_norm": 17.644945994855508, + "learning_rate": 1.0830049019839862e-08, + "loss": 0.3868, + "step": 18930 + }, + { + "epoch": 2.96, + "grad_norm": 28.6227328337586, + "learning_rate": 1.0751726615724212e-08, + "loss": 0.4741, + "step": 18931 + }, + { + "epoch": 2.96, + "grad_norm": 17.12361973400038, + "learning_rate": 1.0673688299985829e-08, + "loss": 0.4264, + "step": 18932 + }, + { + "epoch": 2.96, + "grad_norm": 31.25673761405072, + "learning_rate": 1.059593407484516e-08, + "loss": 0.4484, + "step": 18933 + }, + { + "epoch": 2.96, + "grad_norm": 17.018648184647915, + "learning_rate": 1.0518463942511547e-08, + "loss": 0.4339, + "step": 18934 + }, + { + "epoch": 2.96, + "grad_norm": 20.877510915580245, + "learning_rate": 1.0441277905188785e-08, + "loss": 0.4812, + "step": 18935 + }, + { + "epoch": 2.96, + "grad_norm": 21.089566961833093, + "learning_rate": 1.0364375965074003e-08, + "loss": 0.4252, + "step": 18936 + }, + { + "epoch": 2.96, + "grad_norm": 21.504862675545432, + "learning_rate": 1.0287758124351011e-08, + "loss": 0.3679, + "step": 18937 + }, + { + "epoch": 2.96, + "grad_norm": 32.761000417391585, + "learning_rate": 1.0211424385201396e-08, + "loss": 0.4554, + "step": 18938 + }, + { + "epoch": 2.96, + "grad_norm": 24.312658174583717, + "learning_rate": 1.0135374749794536e-08, + "loss": 0.4521, + "step": 18939 + }, + { + "epoch": 2.96, + "grad_norm": 15.280678559951753, + "learning_rate": 1.0059609220293142e-08, + "loss": 0.4338, + "step": 18940 + }, + { + "epoch": 2.96, + "grad_norm": 17.874495016480942, + "learning_rate": 9.984127798853271e-09, + "loss": 0.3965, + "step": 18941 + }, + { + "epoch": 2.96, + "grad_norm": 24.805881613276036, + "learning_rate": 9.908930487620983e-09, + "loss": 0.3996, + "step": 18942 + }, + { + "epoch": 2.96, + "grad_norm": 31.750902270559063, + "learning_rate": 9.834017288734565e-09, + "loss": 0.4024, + "step": 18943 + }, + { + "epoch": 2.96, + "grad_norm": 21.997023547056152, + "learning_rate": 9.759388204323428e-09, + "loss": 0.4377, + "step": 18944 + }, + { + "epoch": 2.96, + "grad_norm": 18.637025379256198, + "learning_rate": 9.685043236512537e-09, + "loss": 0.3674, + "step": 18945 + }, + { + "epoch": 2.96, + "grad_norm": 25.995770388912867, + "learning_rate": 9.610982387414647e-09, + "loss": 0.4357, + "step": 18946 + }, + { + "epoch": 2.96, + "grad_norm": 32.8932047350986, + "learning_rate": 9.53720565913585e-09, + "loss": 0.4433, + "step": 18947 + }, + { + "epoch": 2.96, + "grad_norm": 24.27065160741977, + "learning_rate": 9.46371305377447e-09, + "loss": 0.4437, + "step": 18948 + }, + { + "epoch": 2.96, + "grad_norm": 21.935168933779824, + "learning_rate": 9.390504573422166e-09, + "loss": 0.4225, + "step": 18949 + }, + { + "epoch": 2.96, + "grad_norm": 16.20277267742511, + "learning_rate": 9.317580220158385e-09, + "loss": 0.3808, + "step": 18950 + }, + { + "epoch": 2.96, + "grad_norm": 24.955198566677257, + "learning_rate": 9.244939996058134e-09, + "loss": 0.4451, + "step": 18951 + }, + { + "epoch": 2.96, + "grad_norm": 19.38059984314208, + "learning_rate": 9.172583903187537e-09, + "loss": 0.4397, + "step": 18952 + }, + { + "epoch": 2.96, + "grad_norm": 14.94150959851793, + "learning_rate": 9.10051194360495e-09, + "loss": 0.3739, + "step": 18953 + }, + { + "epoch": 2.96, + "grad_norm": 23.468157869988666, + "learning_rate": 9.028724119358734e-09, + "loss": 0.4121, + "step": 18954 + }, + { + "epoch": 2.96, + "grad_norm": 27.10669632905505, + "learning_rate": 8.957220432490587e-09, + "loss": 0.4185, + "step": 18955 + }, + { + "epoch": 2.96, + "grad_norm": 24.47403983111849, + "learning_rate": 8.886000885034441e-09, + "loss": 0.4557, + "step": 18956 + }, + { + "epoch": 2.96, + "grad_norm": 16.38599176442435, + "learning_rate": 8.815065479016449e-09, + "loss": 0.4632, + "step": 18957 + }, + { + "epoch": 2.96, + "grad_norm": 22.903400396116684, + "learning_rate": 8.74441421645278e-09, + "loss": 0.4089, + "step": 18958 + }, + { + "epoch": 2.96, + "grad_norm": 24.430226858935495, + "learning_rate": 8.674047099354044e-09, + "loss": 0.4088, + "step": 18959 + }, + { + "epoch": 2.96, + "grad_norm": 21.816443375817986, + "learning_rate": 8.603964129719755e-09, + "loss": 0.4056, + "step": 18960 + }, + { + "epoch": 2.96, + "grad_norm": 29.52230697530616, + "learning_rate": 8.534165309544985e-09, + "loss": 0.422, + "step": 18961 + }, + { + "epoch": 2.96, + "grad_norm": 18.756022535125073, + "learning_rate": 8.464650640812588e-09, + "loss": 0.4325, + "step": 18962 + }, + { + "epoch": 2.96, + "grad_norm": 22.615046005459465, + "learning_rate": 8.395420125500986e-09, + "loss": 0.4399, + "step": 18963 + }, + { + "epoch": 2.96, + "grad_norm": 37.68698940714089, + "learning_rate": 8.326473765579713e-09, + "loss": 0.4363, + "step": 18964 + }, + { + "epoch": 2.96, + "grad_norm": 19.227217201578128, + "learning_rate": 8.2578115630072e-09, + "loss": 0.4266, + "step": 18965 + }, + { + "epoch": 2.96, + "grad_norm": 29.493925372324, + "learning_rate": 8.189433519738555e-09, + "loss": 0.4452, + "step": 18966 + }, + { + "epoch": 2.96, + "grad_norm": 19.750361215933907, + "learning_rate": 8.121339637716663e-09, + "loss": 0.4659, + "step": 18967 + }, + { + "epoch": 2.96, + "grad_norm": 21.051687452921847, + "learning_rate": 8.053529918878867e-09, + "loss": 0.4494, + "step": 18968 + }, + { + "epoch": 2.96, + "grad_norm": 27.385109292214995, + "learning_rate": 7.986004365153621e-09, + "loss": 0.5378, + "step": 18969 + }, + { + "epoch": 2.96, + "grad_norm": 22.2300064321709, + "learning_rate": 7.918762978461613e-09, + "loss": 0.4553, + "step": 18970 + }, + { + "epoch": 2.96, + "grad_norm": 20.503338150930492, + "learning_rate": 7.851805760714647e-09, + "loss": 0.4762, + "step": 18971 + }, + { + "epoch": 2.96, + "grad_norm": 20.896910256170234, + "learning_rate": 7.785132713816756e-09, + "loss": 0.389, + "step": 18972 + }, + { + "epoch": 2.96, + "grad_norm": 20.76167807373196, + "learning_rate": 7.7187438396642e-09, + "loss": 0.4352, + "step": 18973 + }, + { + "epoch": 2.96, + "grad_norm": 16.797145053999174, + "learning_rate": 7.652639140146579e-09, + "loss": 0.4545, + "step": 18974 + }, + { + "epoch": 2.96, + "grad_norm": 14.602992906713823, + "learning_rate": 7.586818617141279e-09, + "loss": 0.462, + "step": 18975 + }, + { + "epoch": 2.96, + "grad_norm": 21.221055704374898, + "learning_rate": 7.521282272521246e-09, + "loss": 0.4316, + "step": 18976 + }, + { + "epoch": 2.96, + "grad_norm": 26.49754136753965, + "learning_rate": 7.456030108151657e-09, + "loss": 0.4585, + "step": 18977 + }, + { + "epoch": 2.96, + "grad_norm": 31.44579497447003, + "learning_rate": 7.391062125886583e-09, + "loss": 0.4044, + "step": 18978 + }, + { + "epoch": 2.96, + "grad_norm": 27.599035151131698, + "learning_rate": 7.326378327574546e-09, + "loss": 0.3935, + "step": 18979 + }, + { + "epoch": 2.96, + "grad_norm": 18.12478616851425, + "learning_rate": 7.261978715054074e-09, + "loss": 0.4299, + "step": 18980 + }, + { + "epoch": 2.96, + "grad_norm": 23.206302658995273, + "learning_rate": 7.197863290157037e-09, + "loss": 0.4947, + "step": 18981 + }, + { + "epoch": 2.97, + "grad_norm": 31.141215870998966, + "learning_rate": 7.134032054707529e-09, + "loss": 0.4481, + "step": 18982 + }, + { + "epoch": 2.97, + "grad_norm": 31.301658485732723, + "learning_rate": 7.070485010520767e-09, + "loss": 0.4303, + "step": 18983 + }, + { + "epoch": 2.97, + "grad_norm": 19.564166917408766, + "learning_rate": 7.007222159404192e-09, + "loss": 0.4189, + "step": 18984 + }, + { + "epoch": 2.97, + "grad_norm": 16.430214103897324, + "learning_rate": 6.944243503155257e-09, + "loss": 0.4579, + "step": 18985 + }, + { + "epoch": 2.97, + "grad_norm": 22.445940848722614, + "learning_rate": 6.881549043568081e-09, + "loss": 0.4568, + "step": 18986 + }, + { + "epoch": 2.97, + "grad_norm": 22.023330939915716, + "learning_rate": 6.819138782422352e-09, + "loss": 0.4271, + "step": 18987 + }, + { + "epoch": 2.97, + "grad_norm": 20.884309167028974, + "learning_rate": 6.757012721494427e-09, + "loss": 0.4144, + "step": 18988 + }, + { + "epoch": 2.97, + "grad_norm": 24.605609217544227, + "learning_rate": 6.6951708625517805e-09, + "loss": 0.4263, + "step": 18989 + }, + { + "epoch": 2.97, + "grad_norm": 45.597811336967645, + "learning_rate": 6.633613207351897e-09, + "loss": 0.4614, + "step": 18990 + }, + { + "epoch": 2.97, + "grad_norm": 18.01267228656309, + "learning_rate": 6.5723397576467066e-09, + "loss": 0.4038, + "step": 18991 + }, + { + "epoch": 2.97, + "grad_norm": 27.522545004457985, + "learning_rate": 6.51135051517815e-09, + "loss": 0.4144, + "step": 18992 + }, + { + "epoch": 2.97, + "grad_norm": 22.75316349110637, + "learning_rate": 6.450645481681506e-09, + "loss": 0.3934, + "step": 18993 + }, + { + "epoch": 2.97, + "grad_norm": 15.503631236625589, + "learning_rate": 6.39022465888095e-09, + "loss": 0.3806, + "step": 18994 + }, + { + "epoch": 2.97, + "grad_norm": 20.616927052392633, + "learning_rate": 6.330088048497329e-09, + "loss": 0.4745, + "step": 18995 + }, + { + "epoch": 2.97, + "grad_norm": 24.368009699544025, + "learning_rate": 6.270235652239276e-09, + "loss": 0.4141, + "step": 18996 + }, + { + "epoch": 2.97, + "grad_norm": 17.230089111227763, + "learning_rate": 6.2106674718098725e-09, + "loss": 0.3998, + "step": 18997 + }, + { + "epoch": 2.97, + "grad_norm": 20.98243307727471, + "learning_rate": 6.1513835089033194e-09, + "loss": 0.4365, + "step": 18998 + }, + { + "epoch": 2.97, + "grad_norm": 18.82280379797896, + "learning_rate": 6.0923837652038245e-09, + "loss": 0.3938, + "step": 18999 + }, + { + "epoch": 2.97, + "grad_norm": 25.494579999640557, + "learning_rate": 6.033668242392266e-09, + "loss": 0.4302, + "step": 19000 + }, + { + "epoch": 2.97, + "grad_norm": 22.20595285419225, + "learning_rate": 5.975236942136197e-09, + "loss": 0.3808, + "step": 19001 + }, + { + "epoch": 2.97, + "grad_norm": 30.59397621041931, + "learning_rate": 5.917089866097625e-09, + "loss": 0.4022, + "step": 19002 + }, + { + "epoch": 2.97, + "grad_norm": 31.912210014301774, + "learning_rate": 5.85922701593078e-09, + "loss": 0.4656, + "step": 19003 + }, + { + "epoch": 2.97, + "grad_norm": 27.910059455826534, + "learning_rate": 5.801648393282122e-09, + "loss": 0.4577, + "step": 19004 + }, + { + "epoch": 2.97, + "grad_norm": 18.43865478584657, + "learning_rate": 5.744353999787011e-09, + "loss": 0.4491, + "step": 19005 + }, + { + "epoch": 2.97, + "grad_norm": 20.1279621980003, + "learning_rate": 5.6873438370763645e-09, + "loss": 0.4439, + "step": 19006 + }, + { + "epoch": 2.97, + "grad_norm": 15.487279225054342, + "learning_rate": 5.630617906771108e-09, + "loss": 0.4437, + "step": 19007 + }, + { + "epoch": 2.97, + "grad_norm": 36.774827812953504, + "learning_rate": 5.574176210484394e-09, + "loss": 0.4529, + "step": 19008 + }, + { + "epoch": 2.97, + "grad_norm": 35.57147601809623, + "learning_rate": 5.518018749821607e-09, + "loss": 0.4119, + "step": 19009 + }, + { + "epoch": 2.97, + "grad_norm": 19.362953589589303, + "learning_rate": 5.462145526380358e-09, + "loss": 0.3986, + "step": 19010 + }, + { + "epoch": 2.97, + "grad_norm": 16.30089443632787, + "learning_rate": 5.406556541748265e-09, + "loss": 0.3757, + "step": 19011 + }, + { + "epoch": 2.97, + "grad_norm": 26.287451505684306, + "learning_rate": 5.351251797507395e-09, + "loss": 0.5059, + "step": 19012 + }, + { + "epoch": 2.97, + "grad_norm": 27.050379262202735, + "learning_rate": 5.296231295229826e-09, + "loss": 0.4001, + "step": 19013 + }, + { + "epoch": 2.97, + "grad_norm": 24.09386004708536, + "learning_rate": 5.2414950364809706e-09, + "loss": 0.4343, + "step": 19014 + }, + { + "epoch": 2.97, + "grad_norm": 19.7126359657613, + "learning_rate": 5.187043022817362e-09, + "loss": 0.398, + "step": 19015 + }, + { + "epoch": 2.97, + "grad_norm": 22.009488539272215, + "learning_rate": 5.1328752557877615e-09, + "loss": 0.4432, + "step": 19016 + }, + { + "epoch": 2.97, + "grad_norm": 18.44981720515287, + "learning_rate": 5.078991736932049e-09, + "loss": 0.4513, + "step": 19017 + }, + { + "epoch": 2.97, + "grad_norm": 20.877136236803327, + "learning_rate": 5.025392467783441e-09, + "loss": 0.427, + "step": 19018 + }, + { + "epoch": 2.97, + "grad_norm": 27.957027941121694, + "learning_rate": 4.972077449865165e-09, + "loss": 0.43, + "step": 19019 + }, + { + "epoch": 2.97, + "grad_norm": 17.118143936799132, + "learning_rate": 4.919046684693785e-09, + "loss": 0.4203, + "step": 19020 + }, + { + "epoch": 2.97, + "grad_norm": 21.99521631893356, + "learning_rate": 4.8663001737780934e-09, + "loss": 0.4202, + "step": 19021 + }, + { + "epoch": 2.97, + "grad_norm": 17.424257384974354, + "learning_rate": 4.813837918618003e-09, + "loss": 0.4745, + "step": 19022 + }, + { + "epoch": 2.97, + "grad_norm": 20.060107964969642, + "learning_rate": 4.761659920705652e-09, + "loss": 0.4413, + "step": 19023 + }, + { + "epoch": 2.97, + "grad_norm": 19.837777820509434, + "learning_rate": 4.7097661815243e-09, + "loss": 0.4484, + "step": 19024 + }, + { + "epoch": 2.97, + "grad_norm": 13.61512186914786, + "learning_rate": 4.6581567025494324e-09, + "loss": 0.3757, + "step": 19025 + }, + { + "epoch": 2.97, + "grad_norm": 13.75563206851141, + "learning_rate": 4.6068314852498745e-09, + "loss": 0.386, + "step": 19026 + }, + { + "epoch": 2.97, + "grad_norm": 21.740098357364158, + "learning_rate": 4.555790531085569e-09, + "loss": 0.4401, + "step": 19027 + }, + { + "epoch": 2.97, + "grad_norm": 33.30524951882078, + "learning_rate": 4.505033841506468e-09, + "loss": 0.4379, + "step": 19028 + }, + { + "epoch": 2.97, + "grad_norm": 28.644074719748602, + "learning_rate": 4.454561417958081e-09, + "loss": 0.4253, + "step": 19029 + }, + { + "epoch": 2.97, + "grad_norm": 26.08593718125171, + "learning_rate": 4.404373261873707e-09, + "loss": 0.4307, + "step": 19030 + }, + { + "epoch": 2.97, + "grad_norm": 29.210141061411946, + "learning_rate": 4.354469374682202e-09, + "loss": 0.4129, + "step": 19031 + }, + { + "epoch": 2.97, + "grad_norm": 27.10043342670859, + "learning_rate": 4.304849757802432e-09, + "loss": 0.4332, + "step": 19032 + }, + { + "epoch": 2.97, + "grad_norm": 50.35246038649922, + "learning_rate": 4.25551441264549e-09, + "loss": 0.4806, + "step": 19033 + }, + { + "epoch": 2.97, + "grad_norm": 23.600085213828606, + "learning_rate": 4.206463340614697e-09, + "loss": 0.4317, + "step": 19034 + }, + { + "epoch": 2.97, + "grad_norm": 23.15291259566251, + "learning_rate": 4.157696543103384e-09, + "loss": 0.3772, + "step": 19035 + }, + { + "epoch": 2.97, + "grad_norm": 27.431742348623246, + "learning_rate": 4.109214021500441e-09, + "loss": 0.5211, + "step": 19036 + }, + { + "epoch": 2.97, + "grad_norm": 28.368095301477446, + "learning_rate": 4.061015777184762e-09, + "loss": 0.4635, + "step": 19037 + }, + { + "epoch": 2.97, + "grad_norm": 23.357818429175374, + "learning_rate": 4.013101811525255e-09, + "loss": 0.4779, + "step": 19038 + }, + { + "epoch": 2.97, + "grad_norm": 22.078226363265927, + "learning_rate": 3.9654721258863826e-09, + "loss": 0.4067, + "step": 19039 + }, + { + "epoch": 2.97, + "grad_norm": 24.644678762069717, + "learning_rate": 3.918126721621507e-09, + "loss": 0.4163, + "step": 19040 + }, + { + "epoch": 2.97, + "grad_norm": 25.96143597960283, + "learning_rate": 3.871065600078439e-09, + "loss": 0.4695, + "step": 19041 + }, + { + "epoch": 2.97, + "grad_norm": 14.725658046460605, + "learning_rate": 3.824288762593886e-09, + "loss": 0.3881, + "step": 19042 + }, + { + "epoch": 2.97, + "grad_norm": 15.693133629704562, + "learning_rate": 3.777796210499008e-09, + "loss": 0.421, + "step": 19043 + }, + { + "epoch": 2.97, + "grad_norm": 16.176605285607106, + "learning_rate": 3.731587945116078e-09, + "loss": 0.4373, + "step": 19044 + }, + { + "epoch": 2.97, + "grad_norm": 26.68550790715355, + "learning_rate": 3.6856639677584906e-09, + "loss": 0.4516, + "step": 19045 + }, + { + "epoch": 2.98, + "grad_norm": 22.38465464336222, + "learning_rate": 3.640024279734089e-09, + "loss": 0.4024, + "step": 19046 + }, + { + "epoch": 2.98, + "grad_norm": 20.297508200502715, + "learning_rate": 3.5946688823396137e-09, + "loss": 0.4067, + "step": 19047 + }, + { + "epoch": 2.98, + "grad_norm": 26.32188164123176, + "learning_rate": 3.5495977768640332e-09, + "loss": 0.5031, + "step": 19048 + }, + { + "epoch": 2.98, + "grad_norm": 21.24744857870061, + "learning_rate": 3.5048109645907657e-09, + "loss": 0.4013, + "step": 19049 + }, + { + "epoch": 2.98, + "grad_norm": 27.54048088587474, + "learning_rate": 3.460308446793237e-09, + "loss": 0.4876, + "step": 19050 + }, + { + "epoch": 2.98, + "grad_norm": 18.753353146495563, + "learning_rate": 3.4160902247359904e-09, + "loss": 0.4494, + "step": 19051 + }, + { + "epoch": 2.98, + "grad_norm": 30.05542718618167, + "learning_rate": 3.37215629967802e-09, + "loss": 0.4343, + "step": 19052 + }, + { + "epoch": 2.98, + "grad_norm": 23.280379620866015, + "learning_rate": 3.3285066728672156e-09, + "loss": 0.473, + "step": 19053 + }, + { + "epoch": 2.98, + "grad_norm": 15.254602517955334, + "learning_rate": 3.285141345547027e-09, + "loss": 0.3855, + "step": 19054 + }, + { + "epoch": 2.98, + "grad_norm": 17.312123829063434, + "learning_rate": 3.242060318948692e-09, + "loss": 0.4466, + "step": 19055 + }, + { + "epoch": 2.98, + "grad_norm": 22.590060792896477, + "learning_rate": 3.1992635942978965e-09, + "loss": 0.3997, + "step": 19056 + }, + { + "epoch": 2.98, + "grad_norm": 30.826135758257536, + "learning_rate": 3.1567511728125555e-09, + "loss": 0.3865, + "step": 19057 + }, + { + "epoch": 2.98, + "grad_norm": 17.03789455743406, + "learning_rate": 3.114523055700591e-09, + "loss": 0.3902, + "step": 19058 + }, + { + "epoch": 2.98, + "grad_norm": 25.533856886818658, + "learning_rate": 3.0725792441643755e-09, + "loss": 0.3939, + "step": 19059 + }, + { + "epoch": 2.98, + "grad_norm": 24.253246856882708, + "learning_rate": 3.0309197393962874e-09, + "loss": 0.3718, + "step": 19060 + }, + { + "epoch": 2.98, + "grad_norm": 25.898124468538473, + "learning_rate": 2.9895445425798253e-09, + "loss": 0.4364, + "step": 19061 + }, + { + "epoch": 2.98, + "grad_norm": 20.483069362247637, + "learning_rate": 2.9484536548929355e-09, + "loss": 0.4733, + "step": 19062 + }, + { + "epoch": 2.98, + "grad_norm": 24.538813588830145, + "learning_rate": 2.907647077504683e-09, + "loss": 0.4989, + "step": 19063 + }, + { + "epoch": 2.98, + "grad_norm": 16.220303074303274, + "learning_rate": 2.86712481157414e-09, + "loss": 0.4245, + "step": 19064 + }, + { + "epoch": 2.98, + "grad_norm": 18.952393335275733, + "learning_rate": 2.826886858253719e-09, + "loss": 0.411, + "step": 19065 + }, + { + "epoch": 2.98, + "grad_norm": 25.645906260133913, + "learning_rate": 2.7869332186891697e-09, + "loss": 0.4338, + "step": 19066 + }, + { + "epoch": 2.98, + "grad_norm": 21.984469154633462, + "learning_rate": 2.7472638940162498e-09, + "loss": 0.4415, + "step": 19067 + }, + { + "epoch": 2.98, + "grad_norm": 27.13863583892154, + "learning_rate": 2.707878885362947e-09, + "loss": 0.455, + "step": 19068 + }, + { + "epoch": 2.98, + "grad_norm": 21.20115647423043, + "learning_rate": 2.6687781938483647e-09, + "loss": 0.389, + "step": 19069 + }, + { + "epoch": 2.98, + "grad_norm": 37.3323743692061, + "learning_rate": 2.629961820587168e-09, + "loss": 0.4341, + "step": 19070 + }, + { + "epoch": 2.98, + "grad_norm": 17.45510550013736, + "learning_rate": 2.5914297666806975e-09, + "loss": 0.46, + "step": 19071 + }, + { + "epoch": 2.98, + "grad_norm": 16.590035204122856, + "learning_rate": 2.5531820332247436e-09, + "loss": 0.4174, + "step": 19072 + }, + { + "epoch": 2.98, + "grad_norm": 26.41161283175662, + "learning_rate": 2.515218621309545e-09, + "loss": 0.4657, + "step": 19073 + }, + { + "epoch": 2.98, + "grad_norm": 18.175893233524274, + "learning_rate": 2.4775395320120187e-09, + "loss": 0.4031, + "step": 19074 + }, + { + "epoch": 2.98, + "grad_norm": 15.164030899003132, + "learning_rate": 2.4401447664046398e-09, + "loss": 0.3686, + "step": 19075 + }, + { + "epoch": 2.98, + "grad_norm": 19.19627788989915, + "learning_rate": 2.4030343255521115e-09, + "loss": 0.3659, + "step": 19076 + }, + { + "epoch": 2.98, + "grad_norm": 17.318520219648978, + "learning_rate": 2.3662082105080365e-09, + "loss": 0.436, + "step": 19077 + }, + { + "epoch": 2.98, + "grad_norm": 19.873689941949166, + "learning_rate": 2.3296664223215747e-09, + "loss": 0.3835, + "step": 19078 + }, + { + "epoch": 2.98, + "grad_norm": 25.057739024403343, + "learning_rate": 2.2934089620307852e-09, + "loss": 0.4465, + "step": 19079 + }, + { + "epoch": 2.98, + "grad_norm": 26.07033242626989, + "learning_rate": 2.257435830665955e-09, + "loss": 0.426, + "step": 19080 + }, + { + "epoch": 2.98, + "grad_norm": 18.889132040771294, + "learning_rate": 2.2217470292518196e-09, + "loss": 0.4156, + "step": 19081 + }, + { + "epoch": 2.98, + "grad_norm": 33.06543095474694, + "learning_rate": 2.1863425588020125e-09, + "loss": 0.4709, + "step": 19082 + }, + { + "epoch": 2.98, + "grad_norm": 22.210637933547137, + "learning_rate": 2.1512224203246167e-09, + "loss": 0.4326, + "step": 19083 + }, + { + "epoch": 2.98, + "grad_norm": 19.713890987628858, + "learning_rate": 2.116386614818833e-09, + "loss": 0.3843, + "step": 19084 + }, + { + "epoch": 2.98, + "grad_norm": 18.52326693027949, + "learning_rate": 2.081835143272759e-09, + "loss": 0.5101, + "step": 19085 + }, + { + "epoch": 2.98, + "grad_norm": 32.725982118124115, + "learning_rate": 2.0475680066711632e-09, + "loss": 0.4518, + "step": 19086 + }, + { + "epoch": 2.98, + "grad_norm": 21.844984637720195, + "learning_rate": 2.0135852059888215e-09, + "loss": 0.4331, + "step": 19087 + }, + { + "epoch": 2.98, + "grad_norm": 23.3869577545039, + "learning_rate": 1.9798867421905176e-09, + "loss": 0.4327, + "step": 19088 + }, + { + "epoch": 2.98, + "grad_norm": 22.035357402173492, + "learning_rate": 1.9464726162365944e-09, + "loss": 0.4743, + "step": 19089 + }, + { + "epoch": 2.98, + "grad_norm": 31.04858755985672, + "learning_rate": 1.9133428290751822e-09, + "loss": 0.4268, + "step": 19090 + }, + { + "epoch": 2.98, + "grad_norm": 29.413243408754294, + "learning_rate": 1.880497381651081e-09, + "loss": 0.4537, + "step": 19091 + }, + { + "epoch": 2.98, + "grad_norm": 21.444012303361635, + "learning_rate": 1.8479362748957674e-09, + "loss": 0.4457, + "step": 19092 + }, + { + "epoch": 2.98, + "grad_norm": 24.13550402604632, + "learning_rate": 1.8156595097362783e-09, + "loss": 0.43, + "step": 19093 + }, + { + "epoch": 2.98, + "grad_norm": 14.194137590752707, + "learning_rate": 1.783667087090768e-09, + "loss": 0.3687, + "step": 19094 + }, + { + "epoch": 2.98, + "grad_norm": 13.481841177483908, + "learning_rate": 1.7519590078696192e-09, + "loss": 0.4039, + "step": 19095 + }, + { + "epoch": 2.98, + "grad_norm": 25.700649642595774, + "learning_rate": 1.7205352729732227e-09, + "loss": 0.4307, + "step": 19096 + }, + { + "epoch": 2.98, + "grad_norm": 24.809557723470284, + "learning_rate": 1.6893958832964186e-09, + "loss": 0.4146, + "step": 19097 + }, + { + "epoch": 2.98, + "grad_norm": 26.156701973914807, + "learning_rate": 1.6585408397240544e-09, + "loss": 0.4533, + "step": 19098 + }, + { + "epoch": 2.98, + "grad_norm": 19.22753737224552, + "learning_rate": 1.6279701431343165e-09, + "loss": 0.4442, + "step": 19099 + }, + { + "epoch": 2.98, + "grad_norm": 26.227705283745607, + "learning_rate": 1.5976837943953993e-09, + "loss": 0.4315, + "step": 19100 + }, + { + "epoch": 2.98, + "grad_norm": 22.538953271135078, + "learning_rate": 1.5676817943699462e-09, + "loss": 0.3854, + "step": 19101 + }, + { + "epoch": 2.98, + "grad_norm": 22.77952149718791, + "learning_rate": 1.5379641439106086e-09, + "loss": 0.4179, + "step": 19102 + }, + { + "epoch": 2.98, + "grad_norm": 17.82095976092635, + "learning_rate": 1.5085308438633762e-09, + "loss": 0.4657, + "step": 19103 + }, + { + "epoch": 2.98, + "grad_norm": 26.558446492963586, + "learning_rate": 1.4793818950642469e-09, + "loss": 0.4205, + "step": 19104 + }, + { + "epoch": 2.98, + "grad_norm": 29.937010316586935, + "learning_rate": 1.4505172983425576e-09, + "loss": 0.4217, + "step": 19105 + }, + { + "epoch": 2.98, + "grad_norm": 26.99159863846815, + "learning_rate": 1.4219370545187628e-09, + "loss": 0.4263, + "step": 19106 + }, + { + "epoch": 2.98, + "grad_norm": 26.506005887529, + "learning_rate": 1.393641164405546e-09, + "loss": 0.4655, + "step": 19107 + }, + { + "epoch": 2.98, + "grad_norm": 25.773095289087827, + "learning_rate": 1.3656296288089288e-09, + "loss": 0.4774, + "step": 19108 + }, + { + "epoch": 2.98, + "grad_norm": 16.211623797309347, + "learning_rate": 1.3379024485249414e-09, + "loss": 0.4244, + "step": 19109 + }, + { + "epoch": 2.99, + "grad_norm": 21.072265779157753, + "learning_rate": 1.310459624341842e-09, + "loss": 0.3614, + "step": 19110 + }, + { + "epoch": 2.99, + "grad_norm": 23.334449939676414, + "learning_rate": 1.2833011570401177e-09, + "loss": 0.4664, + "step": 19111 + }, + { + "epoch": 2.99, + "grad_norm": 17.51298214015917, + "learning_rate": 1.2564270473924834e-09, + "loss": 0.4528, + "step": 19112 + }, + { + "epoch": 2.99, + "grad_norm": 23.299673938899996, + "learning_rate": 1.2298372961627726e-09, + "loss": 0.393, + "step": 19113 + }, + { + "epoch": 2.99, + "grad_norm": 19.474701590143233, + "learning_rate": 1.203531904107047e-09, + "loss": 0.4197, + "step": 19114 + }, + { + "epoch": 2.99, + "grad_norm": 14.48891243957197, + "learning_rate": 1.1775108719735973e-09, + "loss": 0.4491, + "step": 19115 + }, + { + "epoch": 2.99, + "grad_norm": 17.8203287279255, + "learning_rate": 1.1517742005029419e-09, + "loss": 0.4563, + "step": 19116 + }, + { + "epoch": 2.99, + "grad_norm": 24.33407077518308, + "learning_rate": 1.1263218904267181e-09, + "loss": 0.4634, + "step": 19117 + }, + { + "epoch": 2.99, + "grad_norm": 23.749761730805847, + "learning_rate": 1.101153942468791e-09, + "loss": 0.4136, + "step": 19118 + }, + { + "epoch": 2.99, + "grad_norm": 19.71688178895801, + "learning_rate": 1.0762703573452548e-09, + "loss": 0.4013, + "step": 19119 + }, + { + "epoch": 2.99, + "grad_norm": 23.78527307129287, + "learning_rate": 1.051671135762211e-09, + "loss": 0.4017, + "step": 19120 + }, + { + "epoch": 2.99, + "grad_norm": 17.272434161483535, + "learning_rate": 1.0273562784213209e-09, + "loss": 0.45, + "step": 19121 + }, + { + "epoch": 2.99, + "grad_norm": 31.26487638863152, + "learning_rate": 1.0033257860131428e-09, + "loss": 0.4873, + "step": 19122 + }, + { + "epoch": 2.99, + "grad_norm": 28.505163860036593, + "learning_rate": 9.795796592204643e-10, + "loss": 0.4282, + "step": 19123 + }, + { + "epoch": 2.99, + "grad_norm": 17.571739471281944, + "learning_rate": 9.561178987205211e-10, + "loss": 0.4026, + "step": 19124 + }, + { + "epoch": 2.99, + "grad_norm": 15.528194327419424, + "learning_rate": 9.32940505178337e-10, + "loss": 0.4177, + "step": 19125 + }, + { + "epoch": 2.99, + "grad_norm": 31.210650128317532, + "learning_rate": 9.100474792544944e-10, + "loss": 0.4726, + "step": 19126 + }, + { + "epoch": 2.99, + "grad_norm": 33.317345870618894, + "learning_rate": 8.874388215995844e-10, + "loss": 0.4645, + "step": 19127 + }, + { + "epoch": 2.99, + "grad_norm": 20.593244654948567, + "learning_rate": 8.651145328564259e-10, + "loss": 0.4757, + "step": 19128 + }, + { + "epoch": 2.99, + "grad_norm": 21.82547565416537, + "learning_rate": 8.430746136600665e-10, + "loss": 0.4405, + "step": 19129 + }, + { + "epoch": 2.99, + "grad_norm": 23.637793451431314, + "learning_rate": 8.21319064636672e-10, + "loss": 0.4178, + "step": 19130 + }, + { + "epoch": 2.99, + "grad_norm": 28.063395681827846, + "learning_rate": 7.998478864068571e-10, + "loss": 0.4538, + "step": 19131 + }, + { + "epoch": 2.99, + "grad_norm": 22.113518286915223, + "learning_rate": 7.786610795790239e-10, + "loss": 0.3897, + "step": 19132 + }, + { + "epoch": 2.99, + "grad_norm": 17.41134479960007, + "learning_rate": 7.577586447571339e-10, + "loss": 0.4371, + "step": 19133 + }, + { + "epoch": 2.99, + "grad_norm": 20.617627286972553, + "learning_rate": 7.371405825351563e-10, + "loss": 0.388, + "step": 19134 + }, + { + "epoch": 2.99, + "grad_norm": 33.88056126214513, + "learning_rate": 7.168068934992889e-10, + "loss": 0.5159, + "step": 19135 + }, + { + "epoch": 2.99, + "grad_norm": 22.62335761705785, + "learning_rate": 6.967575782279579e-10, + "loss": 0.3901, + "step": 19136 + }, + { + "epoch": 2.99, + "grad_norm": 17.51772474035553, + "learning_rate": 6.769926372918178e-10, + "loss": 0.3448, + "step": 19137 + }, + { + "epoch": 2.99, + "grad_norm": 26.582755048821255, + "learning_rate": 6.575120712526417e-10, + "loss": 0.4743, + "step": 19138 + }, + { + "epoch": 2.99, + "grad_norm": 25.05416605983395, + "learning_rate": 6.383158806644307e-10, + "loss": 0.4437, + "step": 19139 + }, + { + "epoch": 2.99, + "grad_norm": 27.444541828032182, + "learning_rate": 6.194040660723044e-10, + "loss": 0.3962, + "step": 19140 + }, + { + "epoch": 2.99, + "grad_norm": 14.257702662423737, + "learning_rate": 6.007766280158312e-10, + "loss": 0.3867, + "step": 19141 + }, + { + "epoch": 2.99, + "grad_norm": 27.69013418579101, + "learning_rate": 5.824335670234771e-10, + "loss": 0.4224, + "step": 19142 + }, + { + "epoch": 2.99, + "grad_norm": 20.327416924438346, + "learning_rate": 5.643748836170471e-10, + "loss": 0.3976, + "step": 19143 + }, + { + "epoch": 2.99, + "grad_norm": 21.44905646809337, + "learning_rate": 5.466005783105743e-10, + "loss": 0.4313, + "step": 19144 + }, + { + "epoch": 2.99, + "grad_norm": 17.317397559300634, + "learning_rate": 5.291106516092103e-10, + "loss": 0.4331, + "step": 19145 + }, + { + "epoch": 2.99, + "grad_norm": 26.386520590556934, + "learning_rate": 5.119051040092249e-10, + "loss": 0.3946, + "step": 19146 + }, + { + "epoch": 2.99, + "grad_norm": 17.22320143247084, + "learning_rate": 4.949839360024467e-10, + "loss": 0.4299, + "step": 19147 + }, + { + "epoch": 2.99, + "grad_norm": 17.268333595438364, + "learning_rate": 4.78347148068492e-10, + "loss": 0.4069, + "step": 19148 + }, + { + "epoch": 2.99, + "grad_norm": 14.636166646912105, + "learning_rate": 4.6199474068031603e-10, + "loss": 0.4332, + "step": 19149 + }, + { + "epoch": 2.99, + "grad_norm": 37.888977622175, + "learning_rate": 4.459267143042123e-10, + "loss": 0.538, + "step": 19150 + }, + { + "epoch": 2.99, + "grad_norm": 18.815274491028674, + "learning_rate": 4.301430693964825e-10, + "loss": 0.4236, + "step": 19151 + }, + { + "epoch": 2.99, + "grad_norm": 19.45068957085827, + "learning_rate": 4.1464380640565683e-10, + "loss": 0.3914, + "step": 19152 + }, + { + "epoch": 2.99, + "grad_norm": 25.674100075925043, + "learning_rate": 3.994289257724937e-10, + "loss": 0.3994, + "step": 19153 + }, + { + "epoch": 2.99, + "grad_norm": 19.993605566397438, + "learning_rate": 3.8449842792998016e-10, + "loss": 0.4182, + "step": 19154 + }, + { + "epoch": 2.99, + "grad_norm": 16.46876706298002, + "learning_rate": 3.698523133022214e-10, + "loss": 0.4588, + "step": 19155 + }, + { + "epoch": 2.99, + "grad_norm": 17.780245249741547, + "learning_rate": 3.5549058230777143e-10, + "loss": 0.3533, + "step": 19156 + }, + { + "epoch": 2.99, + "grad_norm": 20.89951575376462, + "learning_rate": 3.4141323535186175e-10, + "loss": 0.39, + "step": 19157 + }, + { + "epoch": 2.99, + "grad_norm": 15.106894907918312, + "learning_rate": 3.276202728375033e-10, + "loss": 0.4479, + "step": 19158 + }, + { + "epoch": 2.99, + "grad_norm": 23.51762769795409, + "learning_rate": 3.1411169515549457e-10, + "loss": 0.3296, + "step": 19159 + }, + { + "epoch": 2.99, + "grad_norm": 14.507648721817132, + "learning_rate": 3.0088750269108293e-10, + "loss": 0.413, + "step": 19160 + }, + { + "epoch": 2.99, + "grad_norm": 22.44856247441304, + "learning_rate": 2.8794769581952376e-10, + "loss": 0.4144, + "step": 19161 + }, + { + "epoch": 2.99, + "grad_norm": 26.166302820269554, + "learning_rate": 2.7529227490941115e-10, + "loss": 0.4075, + "step": 19162 + }, + { + "epoch": 2.99, + "grad_norm": 20.290964703556106, + "learning_rate": 2.629212403193471e-10, + "loss": 0.4244, + "step": 19163 + }, + { + "epoch": 2.99, + "grad_norm": 26.950469954927932, + "learning_rate": 2.508345924023825e-10, + "loss": 0.4177, + "step": 19164 + }, + { + "epoch": 2.99, + "grad_norm": 26.53453770813269, + "learning_rate": 2.390323315026866e-10, + "loss": 0.4609, + "step": 19165 + }, + { + "epoch": 2.99, + "grad_norm": 25.715568517200367, + "learning_rate": 2.2751445795443638e-10, + "loss": 0.493, + "step": 19166 + }, + { + "epoch": 2.99, + "grad_norm": 25.548141875046, + "learning_rate": 2.162809720873682e-10, + "loss": 0.4288, + "step": 19167 + }, + { + "epoch": 2.99, + "grad_norm": 15.212979452481388, + "learning_rate": 2.0533187421789557e-10, + "loss": 0.392, + "step": 19168 + }, + { + "epoch": 2.99, + "grad_norm": 24.534495238661925, + "learning_rate": 1.9466716466021162e-10, + "loss": 0.3738, + "step": 19169 + }, + { + "epoch": 2.99, + "grad_norm": 14.67929715322247, + "learning_rate": 1.84286843716297e-10, + "loss": 0.4557, + "step": 19170 + }, + { + "epoch": 2.99, + "grad_norm": 20.541738157376844, + "learning_rate": 1.7419091168036085e-10, + "loss": 0.3638, + "step": 19171 + }, + { + "epoch": 2.99, + "grad_norm": 18.0859665938405, + "learning_rate": 1.6437936884217132e-10, + "loss": 0.4544, + "step": 19172 + }, + { + "epoch": 2.99, + "grad_norm": 23.817494495637863, + "learning_rate": 1.5485221547817396e-10, + "loss": 0.4007, + "step": 19173 + }, + { + "epoch": 3.0, + "grad_norm": 26.901047592987165, + "learning_rate": 1.4560945186148366e-10, + "loss": 0.3542, + "step": 19174 + }, + { + "epoch": 3.0, + "grad_norm": 18.6928571618243, + "learning_rate": 1.3665107825300284e-10, + "loss": 0.4492, + "step": 19175 + }, + { + "epoch": 3.0, + "grad_norm": 29.834554139735523, + "learning_rate": 1.2797709490919296e-10, + "loss": 0.4039, + "step": 19176 + }, + { + "epoch": 3.0, + "grad_norm": 22.011335605512496, + "learning_rate": 1.1958750207541337e-10, + "loss": 0.3808, + "step": 19177 + }, + { + "epoch": 3.0, + "grad_norm": 22.482615369557795, + "learning_rate": 1.114822999914722e-10, + "loss": 0.4353, + "step": 19178 + }, + { + "epoch": 3.0, + "grad_norm": 23.451599940045742, + "learning_rate": 1.0366148888607541e-10, + "loss": 0.4419, + "step": 19179 + }, + { + "epoch": 3.0, + "grad_norm": 18.674575316721064, + "learning_rate": 9.612506898348805e-11, + "loss": 0.475, + "step": 19180 + }, + { + "epoch": 3.0, + "grad_norm": 20.415924631877793, + "learning_rate": 8.887304049798317e-11, + "loss": 0.432, + "step": 19181 + }, + { + "epoch": 3.0, + "grad_norm": 24.395616023162404, + "learning_rate": 8.190540363384181e-11, + "loss": 0.4367, + "step": 19182 + }, + { + "epoch": 3.0, + "grad_norm": 20.142571665203263, + "learning_rate": 7.522215859090409e-11, + "loss": 0.4019, + "step": 19183 + }, + { + "epoch": 3.0, + "grad_norm": 30.525650095070272, + "learning_rate": 6.882330556012839e-11, + "loss": 0.3953, + "step": 19184 + }, + { + "epoch": 3.0, + "grad_norm": 26.500252992394028, + "learning_rate": 6.270884472026062e-11, + "loss": 0.4905, + "step": 19185 + }, + { + "epoch": 3.0, + "grad_norm": 26.868225348266858, + "learning_rate": 5.687877624893645e-11, + "loss": 0.5009, + "step": 19186 + }, + { + "epoch": 3.0, + "grad_norm": 16.7091206779695, + "learning_rate": 5.133310030935867e-11, + "loss": 0.4345, + "step": 19187 + }, + { + "epoch": 3.0, + "grad_norm": 19.444532200969107, + "learning_rate": 4.607181705917896e-11, + "loss": 0.4049, + "step": 19188 + }, + { + "epoch": 3.0, + "grad_norm": 31.746449589275194, + "learning_rate": 4.1094926650497855e-11, + "loss": 0.4715, + "step": 19189 + }, + { + "epoch": 3.0, + "grad_norm": 26.348807097256906, + "learning_rate": 3.640242922209325e-11, + "loss": 0.4398, + "step": 19190 + }, + { + "epoch": 3.0, + "grad_norm": 27.03494853284541, + "learning_rate": 3.199432490941234e-11, + "loss": 0.4329, + "step": 19191 + }, + { + "epoch": 3.0, + "grad_norm": 24.461395597782875, + "learning_rate": 2.787061383680012e-11, + "loss": 0.4204, + "step": 19192 + }, + { + "epoch": 3.0, + "grad_norm": 23.763644367276402, + "learning_rate": 2.4031296120829993e-11, + "loss": 0.4877, + "step": 19193 + }, + { + "epoch": 3.0, + "grad_norm": 14.940773575296761, + "learning_rate": 2.047637187252427e-11, + "loss": 0.4944, + "step": 19194 + }, + { + "epoch": 3.0, + "grad_norm": 30.485828040456667, + "learning_rate": 1.720584119180302e-11, + "loss": 0.4837, + "step": 19195 + }, + { + "epoch": 3.0, + "grad_norm": 26.933422551153875, + "learning_rate": 1.4219704171924976e-11, + "loss": 0.4275, + "step": 19196 + }, + { + "epoch": 3.0, + "grad_norm": 22.252548970525478, + "learning_rate": 1.1517960898377312e-11, + "loss": 0.4125, + "step": 19197 + }, + { + "epoch": 3.0, + "grad_norm": 22.012802411460225, + "learning_rate": 9.100611446655194e-12, + "loss": 0.4283, + "step": 19198 + }, + { + "epoch": 3.0, + "grad_norm": 19.45199348039055, + "learning_rate": 6.967655886702674e-12, + "loss": 0.3943, + "step": 19199 + }, + { + "epoch": 3.0, + "grad_norm": 30.535730943838594, + "learning_rate": 5.119094278471792e-12, + "loss": 0.4638, + "step": 19200 + }, + { + "epoch": 3.0, + "grad_norm": 26.369485826707194, + "learning_rate": 3.5549266752532564e-12, + "loss": 0.4573, + "step": 19201 + }, + { + "epoch": 3.0, + "grad_norm": 19.86926674322362, + "learning_rate": 2.275153120345763e-12, + "loss": 0.4187, + "step": 19202 + }, + { + "epoch": 3.0, + "grad_norm": 18.529909896802643, + "learning_rate": 1.2797736514968962e-12, + "loss": 0.4601, + "step": 19203 + }, + { + "epoch": 3.0, + "grad_norm": 20.335206130392084, + "learning_rate": 5.687882964622305e-13, + "loss": 0.4302, + "step": 19204 + }, + { + "epoch": 3.0, + "grad_norm": 21.343195540094797, + "learning_rate": 1.4219707522578064e-13, + "loss": 0.3673, + "step": 19205 + }, + { + "epoch": 3.0, + "grad_norm": 26.62800484030171, + "learning_rate": 0.0, + "loss": 0.4807, + "step": 19206 + }, + { + "epoch": 3.0, + "step": 19206, + "total_flos": 8.250659187302859e+18, + "train_loss": 0.6410556696475601, + "train_runtime": 20419.4789, + "train_samples_per_second": 120.393, + "train_steps_per_second": 0.941 + } + ], + "logging_steps": 1.0, + "max_steps": 19206, + "num_input_tokens_seen": 0, + "num_train_epochs": 3, + "save_steps": 50000, + "total_flos": 8.250659187302859e+18, + "train_batch_size": 2, + "trial_name": null, + "trial_params": null +}