diff --git "a/checkpoint-1128/trainer_state.json" "b/checkpoint-1128/trainer_state.json" new file mode 100644--- /dev/null +++ "b/checkpoint-1128/trainer_state.json" @@ -0,0 +1,7961 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 2.0, + "eval_steps": 226, + "global_step": 1128, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0017730496453900709, + "grad_norm": 6.88893985748291, + "learning_rate": 2.9999999999999997e-05, + "loss": 4.048, + "step": 1 + }, + { + "epoch": 0.0035460992907801418, + "grad_norm": 6.882226467132568, + "learning_rate": 5.9999999999999995e-05, + "loss": 4.2161, + "step": 2 + }, + { + "epoch": 0.005319148936170213, + "grad_norm": 4.995457649230957, + "learning_rate": 8.999999999999999e-05, + "loss": 3.9501, + "step": 3 + }, + { + "epoch": 0.0070921985815602835, + "grad_norm": 3.743339776992798, + "learning_rate": 0.00011999999999999999, + "loss": 3.7819, + "step": 4 + }, + { + "epoch": 0.008865248226950355, + "grad_norm": 3.3828792572021484, + "learning_rate": 0.00015, + "loss": 3.2351, + "step": 5 + }, + { + "epoch": 0.010638297872340425, + "grad_norm": 3.3940675258636475, + "learning_rate": 0.00017999999999999998, + "loss": 2.7343, + "step": 6 + }, + { + "epoch": 0.012411347517730497, + "grad_norm": 3.3435909748077393, + "learning_rate": 0.00020999999999999998, + "loss": 2.0898, + "step": 7 + }, + { + "epoch": 0.014184397163120567, + "grad_norm": 4.3948235511779785, + "learning_rate": 0.00023999999999999998, + "loss": 1.6608, + "step": 8 + }, + { + "epoch": 0.015957446808510637, + "grad_norm": 4.144540309906006, + "learning_rate": 0.00027, + "loss": 0.9104, + "step": 9 + }, + { + "epoch": 0.01773049645390071, + "grad_norm": 5.76711368560791, + "learning_rate": 0.0003, + "loss": 0.6078, + "step": 10 + }, + { + "epoch": 0.01950354609929078, + "grad_norm": 11.037919998168945, + "learning_rate": 0.00029973166368515205, + "loss": 0.439, + "step": 11 + }, + { + "epoch": 0.02127659574468085, + "grad_norm": 5.379396915435791, + "learning_rate": 0.00029946332737030407, + "loss": 0.5497, + "step": 12 + }, + { + "epoch": 0.02304964539007092, + "grad_norm": 6.821712970733643, + "learning_rate": 0.00029919499105545615, + "loss": 0.6396, + "step": 13 + }, + { + "epoch": 0.024822695035460994, + "grad_norm": 3.520746946334839, + "learning_rate": 0.00029892665474060817, + "loss": 0.4007, + "step": 14 + }, + { + "epoch": 0.026595744680851064, + "grad_norm": 1.7975941896438599, + "learning_rate": 0.00029865831842576025, + "loss": 0.2384, + "step": 15 + }, + { + "epoch": 0.028368794326241134, + "grad_norm": 6.044783592224121, + "learning_rate": 0.0002983899821109123, + "loss": 0.5119, + "step": 16 + }, + { + "epoch": 0.030141843971631204, + "grad_norm": 0.9617128968238831, + "learning_rate": 0.0002981216457960644, + "loss": 0.2032, + "step": 17 + }, + { + "epoch": 0.031914893617021274, + "grad_norm": 1.6461282968521118, + "learning_rate": 0.0002978533094812164, + "loss": 0.3118, + "step": 18 + }, + { + "epoch": 0.03368794326241135, + "grad_norm": 1.2195343971252441, + "learning_rate": 0.0002975849731663685, + "loss": 0.2522, + "step": 19 + }, + { + "epoch": 0.03546099290780142, + "grad_norm": 1.5621528625488281, + "learning_rate": 0.00029731663685152057, + "loss": 0.3118, + "step": 20 + }, + { + "epoch": 0.03723404255319149, + "grad_norm": 1.3373510837554932, + "learning_rate": 0.0002970483005366726, + "loss": 0.2101, + "step": 21 + }, + { + "epoch": 0.03900709219858156, + "grad_norm": 0.9622485041618347, + "learning_rate": 0.00029677996422182467, + "loss": 0.1797, + "step": 22 + }, + { + "epoch": 0.040780141843971635, + "grad_norm": 1.7572095394134521, + "learning_rate": 0.0002965116279069767, + "loss": 0.1338, + "step": 23 + }, + { + "epoch": 0.0425531914893617, + "grad_norm": 1.871632695198059, + "learning_rate": 0.00029624329159212877, + "loss": 0.1457, + "step": 24 + }, + { + "epoch": 0.044326241134751775, + "grad_norm": 2.204594612121582, + "learning_rate": 0.00029597495527728084, + "loss": 0.1621, + "step": 25 + }, + { + "epoch": 0.04609929078014184, + "grad_norm": 1.5381090641021729, + "learning_rate": 0.0002957066189624329, + "loss": 0.1703, + "step": 26 + }, + { + "epoch": 0.047872340425531915, + "grad_norm": 0.8811854124069214, + "learning_rate": 0.00029543828264758494, + "loss": 0.0787, + "step": 27 + }, + { + "epoch": 0.04964539007092199, + "grad_norm": 0.8814285397529602, + "learning_rate": 0.000295169946332737, + "loss": 0.2091, + "step": 28 + }, + { + "epoch": 0.051418439716312055, + "grad_norm": 1.371406078338623, + "learning_rate": 0.0002949016100178891, + "loss": 0.3093, + "step": 29 + }, + { + "epoch": 0.05319148936170213, + "grad_norm": 1.1055055856704712, + "learning_rate": 0.0002946332737030411, + "loss": 0.1644, + "step": 30 + }, + { + "epoch": 0.0549645390070922, + "grad_norm": 1.2226752042770386, + "learning_rate": 0.0002943649373881932, + "loss": 0.1597, + "step": 31 + }, + { + "epoch": 0.05673758865248227, + "grad_norm": 1.1774011850357056, + "learning_rate": 0.0002940966010733452, + "loss": 0.243, + "step": 32 + }, + { + "epoch": 0.05851063829787234, + "grad_norm": 1.8663568496704102, + "learning_rate": 0.0002938282647584973, + "loss": 0.3355, + "step": 33 + }, + { + "epoch": 0.06028368794326241, + "grad_norm": 0.7782514095306396, + "learning_rate": 0.0002935599284436493, + "loss": 0.1426, + "step": 34 + }, + { + "epoch": 0.06205673758865248, + "grad_norm": 0.6109825968742371, + "learning_rate": 0.0002932915921288014, + "loss": 0.1334, + "step": 35 + }, + { + "epoch": 0.06382978723404255, + "grad_norm": 1.297591209411621, + "learning_rate": 0.00029302325581395347, + "loss": 0.3717, + "step": 36 + }, + { + "epoch": 0.06560283687943262, + "grad_norm": 0.8587173223495483, + "learning_rate": 0.00029275491949910554, + "loss": 0.1469, + "step": 37 + }, + { + "epoch": 0.0673758865248227, + "grad_norm": 0.8923099637031555, + "learning_rate": 0.00029248658318425756, + "loss": 0.1838, + "step": 38 + }, + { + "epoch": 0.06914893617021277, + "grad_norm": 0.8410468697547913, + "learning_rate": 0.00029221824686940964, + "loss": 0.2471, + "step": 39 + }, + { + "epoch": 0.07092198581560284, + "grad_norm": 0.5034096240997314, + "learning_rate": 0.0002919499105545617, + "loss": 0.0616, + "step": 40 + }, + { + "epoch": 0.0726950354609929, + "grad_norm": 0.31462836265563965, + "learning_rate": 0.00029168157423971374, + "loss": 0.0519, + "step": 41 + }, + { + "epoch": 0.07446808510638298, + "grad_norm": 0.5840567350387573, + "learning_rate": 0.0002914132379248658, + "loss": 0.0814, + "step": 42 + }, + { + "epoch": 0.07624113475177305, + "grad_norm": 0.5441888570785522, + "learning_rate": 0.00029114490161001784, + "loss": 0.0731, + "step": 43 + }, + { + "epoch": 0.07801418439716312, + "grad_norm": 1.0477969646453857, + "learning_rate": 0.0002908765652951699, + "loss": 0.1842, + "step": 44 + }, + { + "epoch": 0.0797872340425532, + "grad_norm": 1.046530842781067, + "learning_rate": 0.000290608228980322, + "loss": 0.2727, + "step": 45 + }, + { + "epoch": 0.08156028368794327, + "grad_norm": 0.721427857875824, + "learning_rate": 0.00029033989266547406, + "loss": 0.1343, + "step": 46 + }, + { + "epoch": 0.08333333333333333, + "grad_norm": 2.0451300144195557, + "learning_rate": 0.0002900715563506261, + "loss": 0.076, + "step": 47 + }, + { + "epoch": 0.0851063829787234, + "grad_norm": 0.3739748001098633, + "learning_rate": 0.00028980322003577816, + "loss": 0.0419, + "step": 48 + }, + { + "epoch": 0.08687943262411348, + "grad_norm": 0.5523413419723511, + "learning_rate": 0.00028953488372093024, + "loss": 0.0563, + "step": 49 + }, + { + "epoch": 0.08865248226950355, + "grad_norm": 0.5457972288131714, + "learning_rate": 0.00028926654740608226, + "loss": 0.0256, + "step": 50 + }, + { + "epoch": 0.09042553191489362, + "grad_norm": 2.8798046112060547, + "learning_rate": 0.00028899821109123434, + "loss": 0.8653, + "step": 51 + }, + { + "epoch": 0.09219858156028368, + "grad_norm": 2.86541748046875, + "learning_rate": 0.00028872987477638636, + "loss": 0.5141, + "step": 52 + }, + { + "epoch": 0.09397163120567376, + "grad_norm": 10.113655090332031, + "learning_rate": 0.00028846153846153843, + "loss": 0.278, + "step": 53 + }, + { + "epoch": 0.09574468085106383, + "grad_norm": 3.4064385890960693, + "learning_rate": 0.00028819320214669046, + "loss": 0.3353, + "step": 54 + }, + { + "epoch": 0.0975177304964539, + "grad_norm": 4.866039752960205, + "learning_rate": 0.00028792486583184253, + "loss": 0.4487, + "step": 55 + }, + { + "epoch": 0.09929078014184398, + "grad_norm": 5.0878143310546875, + "learning_rate": 0.0002876565295169946, + "loss": 0.2099, + "step": 56 + }, + { + "epoch": 0.10106382978723404, + "grad_norm": 5.835223197937012, + "learning_rate": 0.0002873881932021467, + "loss": 0.4639, + "step": 57 + }, + { + "epoch": 0.10283687943262411, + "grad_norm": 3.5590853691101074, + "learning_rate": 0.00028711985688729876, + "loss": 0.3146, + "step": 58 + }, + { + "epoch": 0.10460992907801418, + "grad_norm": 5.281091213226318, + "learning_rate": 0.0002868515205724508, + "loss": 0.4222, + "step": 59 + }, + { + "epoch": 0.10638297872340426, + "grad_norm": 6.650637149810791, + "learning_rate": 0.00028658318425760286, + "loss": 0.4737, + "step": 60 + }, + { + "epoch": 0.10815602836879433, + "grad_norm": 1.2466630935668945, + "learning_rate": 0.0002863148479427549, + "loss": 0.2634, + "step": 61 + }, + { + "epoch": 0.1099290780141844, + "grad_norm": 0.73129802942276, + "learning_rate": 0.00028604651162790696, + "loss": 0.1581, + "step": 62 + }, + { + "epoch": 0.11170212765957446, + "grad_norm": 0.8522558212280273, + "learning_rate": 0.000285778175313059, + "loss": 0.2586, + "step": 63 + }, + { + "epoch": 0.11347517730496454, + "grad_norm": 0.7632489204406738, + "learning_rate": 0.00028550983899821105, + "loss": 0.2528, + "step": 64 + }, + { + "epoch": 0.11524822695035461, + "grad_norm": 0.4096511900424957, + "learning_rate": 0.00028524150268336313, + "loss": 0.0851, + "step": 65 + }, + { + "epoch": 0.11702127659574468, + "grad_norm": 0.9045661687850952, + "learning_rate": 0.0002849731663685152, + "loss": 0.2253, + "step": 66 + }, + { + "epoch": 0.11879432624113476, + "grad_norm": 0.6839063763618469, + "learning_rate": 0.00028470483005366723, + "loss": 0.1416, + "step": 67 + }, + { + "epoch": 0.12056737588652482, + "grad_norm": 2.112931728363037, + "learning_rate": 0.0002844364937388193, + "loss": 0.2554, + "step": 68 + }, + { + "epoch": 0.12234042553191489, + "grad_norm": 1.187727928161621, + "learning_rate": 0.0002841681574239714, + "loss": 0.2891, + "step": 69 + }, + { + "epoch": 0.12411347517730496, + "grad_norm": 0.7449979782104492, + "learning_rate": 0.0002838998211091234, + "loss": 0.1743, + "step": 70 + }, + { + "epoch": 0.12588652482269502, + "grad_norm": 0.5655006766319275, + "learning_rate": 0.0002836314847942755, + "loss": 0.1247, + "step": 71 + }, + { + "epoch": 0.1276595744680851, + "grad_norm": 0.7953659296035767, + "learning_rate": 0.0002833631484794275, + "loss": 0.1277, + "step": 72 + }, + { + "epoch": 0.12943262411347517, + "grad_norm": 0.7307471632957458, + "learning_rate": 0.0002830948121645796, + "loss": 0.2575, + "step": 73 + }, + { + "epoch": 0.13120567375886524, + "grad_norm": 0.815560519695282, + "learning_rate": 0.0002828264758497316, + "loss": 0.2547, + "step": 74 + }, + { + "epoch": 0.13297872340425532, + "grad_norm": 0.7381014227867126, + "learning_rate": 0.0002825581395348837, + "loss": 0.1005, + "step": 75 + }, + { + "epoch": 0.1347517730496454, + "grad_norm": 1.0876243114471436, + "learning_rate": 0.00028228980322003575, + "loss": 0.2536, + "step": 76 + }, + { + "epoch": 0.13652482269503546, + "grad_norm": 0.6369747519493103, + "learning_rate": 0.00028202146690518783, + "loss": 0.1696, + "step": 77 + }, + { + "epoch": 0.13829787234042554, + "grad_norm": 0.5377818942070007, + "learning_rate": 0.0002817531305903399, + "loss": 0.1528, + "step": 78 + }, + { + "epoch": 0.1400709219858156, + "grad_norm": 0.5050985813140869, + "learning_rate": 0.0002814847942754919, + "loss": 0.1376, + "step": 79 + }, + { + "epoch": 0.14184397163120568, + "grad_norm": 0.46020108461380005, + "learning_rate": 0.000281216457960644, + "loss": 0.1164, + "step": 80 + }, + { + "epoch": 0.14361702127659576, + "grad_norm": 0.4778999090194702, + "learning_rate": 0.000280948121645796, + "loss": 0.1078, + "step": 81 + }, + { + "epoch": 0.1453900709219858, + "grad_norm": 0.7942756414413452, + "learning_rate": 0.0002806797853309481, + "loss": 0.2139, + "step": 82 + }, + { + "epoch": 0.14716312056737588, + "grad_norm": 0.5343471169471741, + "learning_rate": 0.0002804114490161001, + "loss": 0.1331, + "step": 83 + }, + { + "epoch": 0.14893617021276595, + "grad_norm": 0.8038661479949951, + "learning_rate": 0.0002801431127012522, + "loss": 0.1801, + "step": 84 + }, + { + "epoch": 0.15070921985815602, + "grad_norm": 0.3083706498146057, + "learning_rate": 0.0002798747763864043, + "loss": 0.0437, + "step": 85 + }, + { + "epoch": 0.1524822695035461, + "grad_norm": 0.6344548463821411, + "learning_rate": 0.00027960644007155635, + "loss": 0.1676, + "step": 86 + }, + { + "epoch": 0.15425531914893617, + "grad_norm": 0.5747407674789429, + "learning_rate": 0.00027933810375670837, + "loss": 0.1079, + "step": 87 + }, + { + "epoch": 0.15602836879432624, + "grad_norm": 0.5270193815231323, + "learning_rate": 0.00027906976744186045, + "loss": 0.1263, + "step": 88 + }, + { + "epoch": 0.15780141843971632, + "grad_norm": 0.4451313614845276, + "learning_rate": 0.0002788014311270125, + "loss": 0.0511, + "step": 89 + }, + { + "epoch": 0.1595744680851064, + "grad_norm": 0.691502034664154, + "learning_rate": 0.00027853309481216455, + "loss": 0.1637, + "step": 90 + }, + { + "epoch": 0.16134751773049646, + "grad_norm": 0.5651169419288635, + "learning_rate": 0.0002782647584973166, + "loss": 0.1371, + "step": 91 + }, + { + "epoch": 0.16312056737588654, + "grad_norm": 0.5306984782218933, + "learning_rate": 0.00027799642218246864, + "loss": 0.1535, + "step": 92 + }, + { + "epoch": 0.16489361702127658, + "grad_norm": 0.5752344727516174, + "learning_rate": 0.0002777280858676207, + "loss": 0.1622, + "step": 93 + }, + { + "epoch": 0.16666666666666666, + "grad_norm": 0.3410884737968445, + "learning_rate": 0.0002774597495527728, + "loss": 0.0683, + "step": 94 + }, + { + "epoch": 0.16843971631205673, + "grad_norm": 0.6347271203994751, + "learning_rate": 0.0002771914132379248, + "loss": 0.1577, + "step": 95 + }, + { + "epoch": 0.1702127659574468, + "grad_norm": 0.36072492599487305, + "learning_rate": 0.0002769230769230769, + "loss": 0.0367, + "step": 96 + }, + { + "epoch": 0.17198581560283688, + "grad_norm": 0.29759401082992554, + "learning_rate": 0.00027665474060822897, + "loss": 0.0218, + "step": 97 + }, + { + "epoch": 0.17375886524822695, + "grad_norm": 0.37029051780700684, + "learning_rate": 0.00027638640429338105, + "loss": 0.0263, + "step": 98 + }, + { + "epoch": 0.17553191489361702, + "grad_norm": 0.3705526292324066, + "learning_rate": 0.00027611806797853307, + "loss": 0.0219, + "step": 99 + }, + { + "epoch": 0.1773049645390071, + "grad_norm": 0.31463027000427246, + "learning_rate": 0.00027584973166368514, + "loss": 0.023, + "step": 100 + }, + { + "epoch": 0.17907801418439717, + "grad_norm": 1.7321122884750366, + "learning_rate": 0.00027558139534883717, + "loss": 0.6776, + "step": 101 + }, + { + "epoch": 0.18085106382978725, + "grad_norm": 1.03788423538208, + "learning_rate": 0.00027531305903398924, + "loss": 0.4406, + "step": 102 + }, + { + "epoch": 0.18262411347517732, + "grad_norm": 1.13809335231781, + "learning_rate": 0.00027504472271914126, + "loss": 0.4302, + "step": 103 + }, + { + "epoch": 0.18439716312056736, + "grad_norm": 0.6666789650917053, + "learning_rate": 0.00027477638640429334, + "loss": 0.2029, + "step": 104 + }, + { + "epoch": 0.18617021276595744, + "grad_norm": 0.9535943865776062, + "learning_rate": 0.0002745080500894454, + "loss": 0.2808, + "step": 105 + }, + { + "epoch": 0.1879432624113475, + "grad_norm": 1.2909356355667114, + "learning_rate": 0.0002742397137745975, + "loss": 0.3593, + "step": 106 + }, + { + "epoch": 0.18971631205673758, + "grad_norm": 0.9249064326286316, + "learning_rate": 0.0002739713774597495, + "loss": 0.355, + "step": 107 + }, + { + "epoch": 0.19148936170212766, + "grad_norm": 0.5897863507270813, + "learning_rate": 0.0002737030411449016, + "loss": 0.2349, + "step": 108 + }, + { + "epoch": 0.19326241134751773, + "grad_norm": 1.060029149055481, + "learning_rate": 0.00027343470483005367, + "loss": 0.2517, + "step": 109 + }, + { + "epoch": 0.1950354609929078, + "grad_norm": 0.6262145638465881, + "learning_rate": 0.0002731663685152057, + "loss": 0.2119, + "step": 110 + }, + { + "epoch": 0.19680851063829788, + "grad_norm": 0.7881852984428406, + "learning_rate": 0.00027289803220035777, + "loss": 0.2792, + "step": 111 + }, + { + "epoch": 0.19858156028368795, + "grad_norm": 0.7693052887916565, + "learning_rate": 0.0002726296958855098, + "loss": 0.2824, + "step": 112 + }, + { + "epoch": 0.20035460992907803, + "grad_norm": 0.7235965132713318, + "learning_rate": 0.00027236135957066186, + "loss": 0.0918, + "step": 113 + }, + { + "epoch": 0.20212765957446807, + "grad_norm": 1.0118364095687866, + "learning_rate": 0.00027209302325581394, + "loss": 0.211, + "step": 114 + }, + { + "epoch": 0.20390070921985815, + "grad_norm": 0.9211074709892273, + "learning_rate": 0.00027182468694096596, + "loss": 0.2766, + "step": 115 + }, + { + "epoch": 0.20567375886524822, + "grad_norm": 1.3270055055618286, + "learning_rate": 0.00027155635062611804, + "loss": 0.1172, + "step": 116 + }, + { + "epoch": 0.2074468085106383, + "grad_norm": 0.6904440522193909, + "learning_rate": 0.0002712880143112701, + "loss": 0.2208, + "step": 117 + }, + { + "epoch": 0.20921985815602837, + "grad_norm": 0.44743916392326355, + "learning_rate": 0.0002710196779964222, + "loss": 0.1265, + "step": 118 + }, + { + "epoch": 0.21099290780141844, + "grad_norm": 1.5493639707565308, + "learning_rate": 0.0002707513416815742, + "loss": 0.2085, + "step": 119 + }, + { + "epoch": 0.2127659574468085, + "grad_norm": 0.7296533584594727, + "learning_rate": 0.0002704830053667263, + "loss": 0.253, + "step": 120 + }, + { + "epoch": 0.21453900709219859, + "grad_norm": 1.300660490989685, + "learning_rate": 0.0002702146690518783, + "loss": 0.2164, + "step": 121 + }, + { + "epoch": 0.21631205673758866, + "grad_norm": 0.9226039052009583, + "learning_rate": 0.0002699463327370304, + "loss": 0.2533, + "step": 122 + }, + { + "epoch": 0.21808510638297873, + "grad_norm": 0.9466855525970459, + "learning_rate": 0.0002696779964221824, + "loss": 0.2528, + "step": 123 + }, + { + "epoch": 0.2198581560283688, + "grad_norm": 0.6484300494194031, + "learning_rate": 0.0002694096601073345, + "loss": 0.1598, + "step": 124 + }, + { + "epoch": 0.22163120567375885, + "grad_norm": 0.7165495157241821, + "learning_rate": 0.00026914132379248656, + "loss": 0.2448, + "step": 125 + }, + { + "epoch": 0.22340425531914893, + "grad_norm": 1.370329737663269, + "learning_rate": 0.00026887298747763864, + "loss": 0.3169, + "step": 126 + }, + { + "epoch": 0.225177304964539, + "grad_norm": 0.41645482182502747, + "learning_rate": 0.00026860465116279066, + "loss": 0.1352, + "step": 127 + }, + { + "epoch": 0.22695035460992907, + "grad_norm": 0.4441884458065033, + "learning_rate": 0.00026833631484794273, + "loss": 0.1376, + "step": 128 + }, + { + "epoch": 0.22872340425531915, + "grad_norm": 0.3925491273403168, + "learning_rate": 0.0002680679785330948, + "loss": 0.1194, + "step": 129 + }, + { + "epoch": 0.23049645390070922, + "grad_norm": 0.3546803295612335, + "learning_rate": 0.00026779964221824683, + "loss": 0.0936, + "step": 130 + }, + { + "epoch": 0.2322695035460993, + "grad_norm": 0.683130145072937, + "learning_rate": 0.0002675313059033989, + "loss": 0.2044, + "step": 131 + }, + { + "epoch": 0.23404255319148937, + "grad_norm": 0.666155219078064, + "learning_rate": 0.00026726296958855093, + "loss": 0.2361, + "step": 132 + }, + { + "epoch": 0.23581560283687944, + "grad_norm": 0.5066004991531372, + "learning_rate": 0.000266994633273703, + "loss": 0.1449, + "step": 133 + }, + { + "epoch": 0.2375886524822695, + "grad_norm": 0.745506763458252, + "learning_rate": 0.0002667262969588551, + "loss": 0.1987, + "step": 134 + }, + { + "epoch": 0.2393617021276596, + "grad_norm": 0.9249691963195801, + "learning_rate": 0.0002664579606440071, + "loss": 0.2485, + "step": 135 + }, + { + "epoch": 0.24113475177304963, + "grad_norm": 0.5891656875610352, + "learning_rate": 0.0002661896243291592, + "loss": 0.2172, + "step": 136 + }, + { + "epoch": 0.2429078014184397, + "grad_norm": 0.6771588325500488, + "learning_rate": 0.00026592128801431126, + "loss": 0.2293, + "step": 137 + }, + { + "epoch": 0.24468085106382978, + "grad_norm": 1.0185132026672363, + "learning_rate": 0.00026565295169946333, + "loss": 0.2819, + "step": 138 + }, + { + "epoch": 0.24645390070921985, + "grad_norm": 0.357791930437088, + "learning_rate": 0.00026538461538461536, + "loss": 0.0876, + "step": 139 + }, + { + "epoch": 0.24822695035460993, + "grad_norm": 0.5244898200035095, + "learning_rate": 0.00026511627906976743, + "loss": 0.0989, + "step": 140 + }, + { + "epoch": 0.25, + "grad_norm": 0.553326427936554, + "learning_rate": 0.00026484794275491945, + "loss": 0.1503, + "step": 141 + }, + { + "epoch": 0.25177304964539005, + "grad_norm": 0.7104494571685791, + "learning_rate": 0.00026457960644007153, + "loss": 0.2487, + "step": 142 + }, + { + "epoch": 0.25354609929078015, + "grad_norm": 0.5205265879631042, + "learning_rate": 0.0002643112701252236, + "loss": 0.1185, + "step": 143 + }, + { + "epoch": 0.2553191489361702, + "grad_norm": 0.41840335726737976, + "learning_rate": 0.00026404293381037563, + "loss": 0.0841, + "step": 144 + }, + { + "epoch": 0.2570921985815603, + "grad_norm": 0.572222888469696, + "learning_rate": 0.0002637745974955277, + "loss": 0.1331, + "step": 145 + }, + { + "epoch": 0.25886524822695034, + "grad_norm": 0.8182592391967773, + "learning_rate": 0.0002635062611806798, + "loss": 0.2042, + "step": 146 + }, + { + "epoch": 0.26063829787234044, + "grad_norm": 0.5115887522697449, + "learning_rate": 0.0002632379248658318, + "loss": 0.1148, + "step": 147 + }, + { + "epoch": 0.2624113475177305, + "grad_norm": 0.5078725218772888, + "learning_rate": 0.0002629695885509839, + "loss": 0.0762, + "step": 148 + }, + { + "epoch": 0.2641843971631206, + "grad_norm": 0.22413299977779388, + "learning_rate": 0.00026270125223613595, + "loss": 0.0355, + "step": 149 + }, + { + "epoch": 0.26595744680851063, + "grad_norm": 0.25390034914016724, + "learning_rate": 0.000262432915921288, + "loss": 0.0319, + "step": 150 + }, + { + "epoch": 0.26773049645390073, + "grad_norm": 3.294741153717041, + "learning_rate": 0.00026216457960644005, + "loss": 0.4876, + "step": 151 + }, + { + "epoch": 0.2695035460992908, + "grad_norm": 1.5393906831741333, + "learning_rate": 0.0002618962432915921, + "loss": 0.2547, + "step": 152 + }, + { + "epoch": 0.2712765957446808, + "grad_norm": 1.7025604248046875, + "learning_rate": 0.00026162790697674415, + "loss": 0.4747, + "step": 153 + }, + { + "epoch": 0.2730496453900709, + "grad_norm": 0.8344413638114929, + "learning_rate": 0.0002613595706618962, + "loss": 0.2431, + "step": 154 + }, + { + "epoch": 0.274822695035461, + "grad_norm": 0.9473798871040344, + "learning_rate": 0.00026109123434704825, + "loss": 0.2135, + "step": 155 + }, + { + "epoch": 0.2765957446808511, + "grad_norm": 1.0257476568222046, + "learning_rate": 0.0002608228980322003, + "loss": 0.1833, + "step": 156 + }, + { + "epoch": 0.2783687943262411, + "grad_norm": 1.2221112251281738, + "learning_rate": 0.0002605545617173524, + "loss": 0.3174, + "step": 157 + }, + { + "epoch": 0.2801418439716312, + "grad_norm": 0.5619825124740601, + "learning_rate": 0.0002602862254025045, + "loss": 0.1492, + "step": 158 + }, + { + "epoch": 0.28191489361702127, + "grad_norm": 1.1845368146896362, + "learning_rate": 0.0002600178890876565, + "loss": 0.4336, + "step": 159 + }, + { + "epoch": 0.28368794326241137, + "grad_norm": 1.2730036973953247, + "learning_rate": 0.0002597495527728086, + "loss": 0.3466, + "step": 160 + }, + { + "epoch": 0.2854609929078014, + "grad_norm": 0.4464946985244751, + "learning_rate": 0.0002594812164579606, + "loss": 0.1239, + "step": 161 + }, + { + "epoch": 0.2872340425531915, + "grad_norm": 0.5267930030822754, + "learning_rate": 0.00025921288014311267, + "loss": 0.1372, + "step": 162 + }, + { + "epoch": 0.28900709219858156, + "grad_norm": 0.4249666929244995, + "learning_rate": 0.00025894454382826475, + "loss": 0.1402, + "step": 163 + }, + { + "epoch": 0.2907801418439716, + "grad_norm": 0.7619920372962952, + "learning_rate": 0.00025867620751341677, + "loss": 0.2309, + "step": 164 + }, + { + "epoch": 0.2925531914893617, + "grad_norm": 0.9128990769386292, + "learning_rate": 0.00025840787119856885, + "loss": 0.2832, + "step": 165 + }, + { + "epoch": 0.29432624113475175, + "grad_norm": 0.4988587200641632, + "learning_rate": 0.0002581395348837209, + "loss": 0.1517, + "step": 166 + }, + { + "epoch": 0.29609929078014185, + "grad_norm": 0.7568597793579102, + "learning_rate": 0.000257871198568873, + "loss": 0.2449, + "step": 167 + }, + { + "epoch": 0.2978723404255319, + "grad_norm": 0.7556333541870117, + "learning_rate": 0.000257602862254025, + "loss": 0.2591, + "step": 168 + }, + { + "epoch": 0.299645390070922, + "grad_norm": 0.9023774266242981, + "learning_rate": 0.0002573345259391771, + "loss": 0.239, + "step": 169 + }, + { + "epoch": 0.30141843971631205, + "grad_norm": 0.490642786026001, + "learning_rate": 0.0002570661896243291, + "loss": 0.1508, + "step": 170 + }, + { + "epoch": 0.30319148936170215, + "grad_norm": 0.6215077042579651, + "learning_rate": 0.0002567978533094812, + "loss": 0.2277, + "step": 171 + }, + { + "epoch": 0.3049645390070922, + "grad_norm": 0.4409044086933136, + "learning_rate": 0.00025652951699463327, + "loss": 0.1354, + "step": 172 + }, + { + "epoch": 0.3067375886524823, + "grad_norm": 0.420749694108963, + "learning_rate": 0.0002562611806797853, + "loss": 0.068, + "step": 173 + }, + { + "epoch": 0.30851063829787234, + "grad_norm": 0.40215224027633667, + "learning_rate": 0.00025599284436493737, + "loss": 0.0707, + "step": 174 + }, + { + "epoch": 0.3102836879432624, + "grad_norm": 0.7107430696487427, + "learning_rate": 0.0002557245080500894, + "loss": 0.1413, + "step": 175 + }, + { + "epoch": 0.3120567375886525, + "grad_norm": 0.9405664801597595, + "learning_rate": 0.00025545617173524147, + "loss": 0.1555, + "step": 176 + }, + { + "epoch": 0.31382978723404253, + "grad_norm": 0.9159305095672607, + "learning_rate": 0.00025518783542039354, + "loss": 0.1827, + "step": 177 + }, + { + "epoch": 0.31560283687943264, + "grad_norm": 1.2095725536346436, + "learning_rate": 0.0002549194991055456, + "loss": 0.2713, + "step": 178 + }, + { + "epoch": 0.3173758865248227, + "grad_norm": 0.5971267223358154, + "learning_rate": 0.00025465116279069764, + "loss": 0.2094, + "step": 179 + }, + { + "epoch": 0.3191489361702128, + "grad_norm": 0.5078379511833191, + "learning_rate": 0.0002543828264758497, + "loss": 0.1102, + "step": 180 + }, + { + "epoch": 0.32092198581560283, + "grad_norm": 0.5540158748626709, + "learning_rate": 0.0002541144901610018, + "loss": 0.1408, + "step": 181 + }, + { + "epoch": 0.32269503546099293, + "grad_norm": 0.3938644826412201, + "learning_rate": 0.0002538461538461538, + "loss": 0.1044, + "step": 182 + }, + { + "epoch": 0.324468085106383, + "grad_norm": 0.43856045603752136, + "learning_rate": 0.0002535778175313059, + "loss": 0.1069, + "step": 183 + }, + { + "epoch": 0.3262411347517731, + "grad_norm": 0.6097897887229919, + "learning_rate": 0.0002533094812164579, + "loss": 0.1447, + "step": 184 + }, + { + "epoch": 0.3280141843971631, + "grad_norm": 0.6639567613601685, + "learning_rate": 0.00025304114490161, + "loss": 0.1622, + "step": 185 + }, + { + "epoch": 0.32978723404255317, + "grad_norm": 0.4464520812034607, + "learning_rate": 0.00025277280858676207, + "loss": 0.1113, + "step": 186 + }, + { + "epoch": 0.33156028368794327, + "grad_norm": 0.45327168703079224, + "learning_rate": 0.00025250447227191414, + "loss": 0.1121, + "step": 187 + }, + { + "epoch": 0.3333333333333333, + "grad_norm": 0.4880326986312866, + "learning_rate": 0.00025223613595706616, + "loss": 0.1055, + "step": 188 + }, + { + "epoch": 0.3351063829787234, + "grad_norm": 0.44586634635925293, + "learning_rate": 0.00025196779964221824, + "loss": 0.1544, + "step": 189 + }, + { + "epoch": 0.33687943262411346, + "grad_norm": 0.7135107517242432, + "learning_rate": 0.00025169946332737026, + "loss": 0.1047, + "step": 190 + }, + { + "epoch": 0.33865248226950356, + "grad_norm": 0.5775635838508606, + "learning_rate": 0.00025143112701252234, + "loss": 0.175, + "step": 191 + }, + { + "epoch": 0.3404255319148936, + "grad_norm": 0.6252586841583252, + "learning_rate": 0.0002511627906976744, + "loss": 0.1478, + "step": 192 + }, + { + "epoch": 0.3421985815602837, + "grad_norm": 2.061342477798462, + "learning_rate": 0.00025089445438282644, + "loss": 0.1931, + "step": 193 + }, + { + "epoch": 0.34397163120567376, + "grad_norm": 0.47072601318359375, + "learning_rate": 0.0002506261180679785, + "loss": 0.0928, + "step": 194 + }, + { + "epoch": 0.34574468085106386, + "grad_norm": 0.5164952278137207, + "learning_rate": 0.00025035778175313053, + "loss": 0.1287, + "step": 195 + }, + { + "epoch": 0.3475177304964539, + "grad_norm": 0.44344618916511536, + "learning_rate": 0.0002500894454382826, + "loss": 0.0924, + "step": 196 + }, + { + "epoch": 0.34929078014184395, + "grad_norm": 0.4380563795566559, + "learning_rate": 0.0002498211091234347, + "loss": 0.1393, + "step": 197 + }, + { + "epoch": 0.35106382978723405, + "grad_norm": 0.3074440360069275, + "learning_rate": 0.00024955277280858676, + "loss": 0.04, + "step": 198 + }, + { + "epoch": 0.3528368794326241, + "grad_norm": 0.25228482484817505, + "learning_rate": 0.0002492844364937388, + "loss": 0.037, + "step": 199 + }, + { + "epoch": 0.3546099290780142, + "grad_norm": 0.4954518973827362, + "learning_rate": 0.00024901610017889086, + "loss": 0.1563, + "step": 200 + }, + { + "epoch": 0.35638297872340424, + "grad_norm": 9.530560493469238, + "learning_rate": 0.00024874776386404294, + "loss": 0.6406, + "step": 201 + }, + { + "epoch": 0.35815602836879434, + "grad_norm": 1.2426095008850098, + "learning_rate": 0.00024847942754919496, + "loss": 0.5336, + "step": 202 + }, + { + "epoch": 0.3599290780141844, + "grad_norm": 1.4315325021743774, + "learning_rate": 0.00024821109123434703, + "loss": 0.4691, + "step": 203 + }, + { + "epoch": 0.3617021276595745, + "grad_norm": 1.3048845529556274, + "learning_rate": 0.00024794275491949906, + "loss": 0.4098, + "step": 204 + }, + { + "epoch": 0.36347517730496454, + "grad_norm": 1.1803832054138184, + "learning_rate": 0.00024767441860465113, + "loss": 0.3023, + "step": 205 + }, + { + "epoch": 0.36524822695035464, + "grad_norm": 0.895564079284668, + "learning_rate": 0.0002474060822898032, + "loss": 0.2474, + "step": 206 + }, + { + "epoch": 0.3670212765957447, + "grad_norm": 0.7290531992912292, + "learning_rate": 0.0002471377459749553, + "loss": 0.2365, + "step": 207 + }, + { + "epoch": 0.36879432624113473, + "grad_norm": 0.5765687823295593, + "learning_rate": 0.0002468694096601073, + "loss": 0.2233, + "step": 208 + }, + { + "epoch": 0.37056737588652483, + "grad_norm": 1.518298625946045, + "learning_rate": 0.0002466010733452594, + "loss": 0.3787, + "step": 209 + }, + { + "epoch": 0.3723404255319149, + "grad_norm": 0.9093344211578369, + "learning_rate": 0.00024633273703041146, + "loss": 0.2682, + "step": 210 + }, + { + "epoch": 0.374113475177305, + "grad_norm": 0.9133920669555664, + "learning_rate": 0.0002460644007155635, + "loss": 0.2236, + "step": 211 + }, + { + "epoch": 0.375886524822695, + "grad_norm": 0.9125071167945862, + "learning_rate": 0.00024579606440071556, + "loss": 0.344, + "step": 212 + }, + { + "epoch": 0.3776595744680851, + "grad_norm": 0.9353806972503662, + "learning_rate": 0.0002455277280858676, + "loss": 0.3213, + "step": 213 + }, + { + "epoch": 0.37943262411347517, + "grad_norm": 0.32626235485076904, + "learning_rate": 0.00024525939177101966, + "loss": 0.089, + "step": 214 + }, + { + "epoch": 0.38120567375886527, + "grad_norm": 0.722865104675293, + "learning_rate": 0.0002449910554561717, + "loss": 0.2475, + "step": 215 + }, + { + "epoch": 0.3829787234042553, + "grad_norm": 1.2499586343765259, + "learning_rate": 0.00024472271914132375, + "loss": 0.1726, + "step": 216 + }, + { + "epoch": 0.38475177304964536, + "grad_norm": 0.8918619155883789, + "learning_rate": 0.00024445438282647583, + "loss": 0.1832, + "step": 217 + }, + { + "epoch": 0.38652482269503546, + "grad_norm": 0.9703326225280762, + "learning_rate": 0.0002441860465116279, + "loss": 0.3934, + "step": 218 + }, + { + "epoch": 0.3882978723404255, + "grad_norm": 0.9967647194862366, + "learning_rate": 0.00024391771019677995, + "loss": 0.2148, + "step": 219 + }, + { + "epoch": 0.3900709219858156, + "grad_norm": 0.6736385226249695, + "learning_rate": 0.000243649373881932, + "loss": 0.2019, + "step": 220 + }, + { + "epoch": 0.39184397163120566, + "grad_norm": 0.6814044117927551, + "learning_rate": 0.00024338103756708408, + "loss": 0.3198, + "step": 221 + }, + { + "epoch": 0.39361702127659576, + "grad_norm": 0.6858790516853333, + "learning_rate": 0.0002431127012522361, + "loss": 0.2017, + "step": 222 + }, + { + "epoch": 0.3953900709219858, + "grad_norm": 0.4301723837852478, + "learning_rate": 0.00024284436493738818, + "loss": 0.1283, + "step": 223 + }, + { + "epoch": 0.3971631205673759, + "grad_norm": 0.47749027609825134, + "learning_rate": 0.00024257602862254023, + "loss": 0.1513, + "step": 224 + }, + { + "epoch": 0.39893617021276595, + "grad_norm": 0.5345844626426697, + "learning_rate": 0.0002423076923076923, + "loss": 0.0872, + "step": 225 + }, + { + "epoch": 0.40070921985815605, + "grad_norm": 0.5137667059898376, + "learning_rate": 0.00024203935599284433, + "loss": 0.0726, + "step": 226 + }, + { + "epoch": 0.40070921985815605, + "eval_loss": 0.08563709259033203, + "eval_runtime": 47.4226, + "eval_samples_per_second": 2.657, + "eval_steps_per_second": 2.657, + "step": 226 + }, + { + "epoch": 0.4024822695035461, + "grad_norm": 0.5459873080253601, + "learning_rate": 0.0002417710196779964, + "loss": 0.1341, + "step": 227 + }, + { + "epoch": 0.40425531914893614, + "grad_norm": 0.7590123414993286, + "learning_rate": 0.00024150268336314848, + "loss": 0.2418, + "step": 228 + }, + { + "epoch": 0.40602836879432624, + "grad_norm": 0.8846575617790222, + "learning_rate": 0.00024123434704830053, + "loss": 0.1606, + "step": 229 + }, + { + "epoch": 0.4078014184397163, + "grad_norm": 0.7346479296684265, + "learning_rate": 0.00024096601073345258, + "loss": 0.2458, + "step": 230 + }, + { + "epoch": 0.4095744680851064, + "grad_norm": 0.7012773752212524, + "learning_rate": 0.00024069767441860462, + "loss": 0.1353, + "step": 231 + }, + { + "epoch": 0.41134751773049644, + "grad_norm": 0.5895172357559204, + "learning_rate": 0.0002404293381037567, + "loss": 0.2138, + "step": 232 + }, + { + "epoch": 0.41312056737588654, + "grad_norm": 0.6732155680656433, + "learning_rate": 0.00024016100178890872, + "loss": 0.0906, + "step": 233 + }, + { + "epoch": 0.4148936170212766, + "grad_norm": 0.38057422637939453, + "learning_rate": 0.0002398926654740608, + "loss": 0.0973, + "step": 234 + }, + { + "epoch": 0.4166666666666667, + "grad_norm": 0.4375483989715576, + "learning_rate": 0.00023962432915921285, + "loss": 0.0895, + "step": 235 + }, + { + "epoch": 0.41843971631205673, + "grad_norm": 0.45380473136901855, + "learning_rate": 0.00023935599284436492, + "loss": 0.1046, + "step": 236 + }, + { + "epoch": 0.42021276595744683, + "grad_norm": 0.38564634323120117, + "learning_rate": 0.00023908765652951695, + "loss": 0.0899, + "step": 237 + }, + { + "epoch": 0.4219858156028369, + "grad_norm": 0.837587296962738, + "learning_rate": 0.00023881932021466902, + "loss": 0.2611, + "step": 238 + }, + { + "epoch": 0.4237588652482269, + "grad_norm": 0.9749343991279602, + "learning_rate": 0.0002385509838998211, + "loss": 0.1351, + "step": 239 + }, + { + "epoch": 0.425531914893617, + "grad_norm": 0.7198343873023987, + "learning_rate": 0.00023828264758497315, + "loss": 0.1716, + "step": 240 + }, + { + "epoch": 0.42730496453900707, + "grad_norm": 0.6409445405006409, + "learning_rate": 0.00023801431127012522, + "loss": 0.1539, + "step": 241 + }, + { + "epoch": 0.42907801418439717, + "grad_norm": 0.6468048095703125, + "learning_rate": 0.00023774597495527724, + "loss": 0.2017, + "step": 242 + }, + { + "epoch": 0.4308510638297872, + "grad_norm": 0.3550443947315216, + "learning_rate": 0.00023747763864042932, + "loss": 0.102, + "step": 243 + }, + { + "epoch": 0.4326241134751773, + "grad_norm": 0.5282692313194275, + "learning_rate": 0.00023720930232558137, + "loss": 0.0795, + "step": 244 + }, + { + "epoch": 0.43439716312056736, + "grad_norm": 0.4462401568889618, + "learning_rate": 0.00023694096601073345, + "loss": 0.1051, + "step": 245 + }, + { + "epoch": 0.43617021276595747, + "grad_norm": 0.33794382214546204, + "learning_rate": 0.00023667262969588547, + "loss": 0.0688, + "step": 246 + }, + { + "epoch": 0.4379432624113475, + "grad_norm": 0.3550000786781311, + "learning_rate": 0.00023640429338103754, + "loss": 0.0511, + "step": 247 + }, + { + "epoch": 0.4397163120567376, + "grad_norm": 0.2764873206615448, + "learning_rate": 0.00023613595706618962, + "loss": 0.0336, + "step": 248 + }, + { + "epoch": 0.44148936170212766, + "grad_norm": 0.2158241719007492, + "learning_rate": 0.00023586762075134167, + "loss": 0.0275, + "step": 249 + }, + { + "epoch": 0.4432624113475177, + "grad_norm": 0.41014501452445984, + "learning_rate": 0.00023559928443649372, + "loss": 0.1071, + "step": 250 + }, + { + "epoch": 0.4450354609929078, + "grad_norm": 1.6365233659744263, + "learning_rate": 0.00023533094812164577, + "loss": 0.5651, + "step": 251 + }, + { + "epoch": 0.44680851063829785, + "grad_norm": 2.2182910442352295, + "learning_rate": 0.00023506261180679784, + "loss": 0.4931, + "step": 252 + }, + { + "epoch": 0.44858156028368795, + "grad_norm": 0.7922513484954834, + "learning_rate": 0.0002347942754919499, + "loss": 0.165, + "step": 253 + }, + { + "epoch": 0.450354609929078, + "grad_norm": 1.0960763692855835, + "learning_rate": 0.00023452593917710194, + "loss": 0.2092, + "step": 254 + }, + { + "epoch": 0.4521276595744681, + "grad_norm": 0.9665970802307129, + "learning_rate": 0.000234257602862254, + "loss": 0.3383, + "step": 255 + }, + { + "epoch": 0.45390070921985815, + "grad_norm": 0.9593271613121033, + "learning_rate": 0.00023398926654740607, + "loss": 0.2608, + "step": 256 + }, + { + "epoch": 0.45567375886524825, + "grad_norm": 0.9603182673454285, + "learning_rate": 0.00023372093023255814, + "loss": 0.3786, + "step": 257 + }, + { + "epoch": 0.4574468085106383, + "grad_norm": 0.6080289483070374, + "learning_rate": 0.00023345259391771016, + "loss": 0.1903, + "step": 258 + }, + { + "epoch": 0.4592198581560284, + "grad_norm": 0.4713785946369171, + "learning_rate": 0.00023318425760286224, + "loss": 0.1438, + "step": 259 + }, + { + "epoch": 0.46099290780141844, + "grad_norm": 0.7215020656585693, + "learning_rate": 0.0002329159212880143, + "loss": 0.1979, + "step": 260 + }, + { + "epoch": 0.4627659574468085, + "grad_norm": 0.7383154630661011, + "learning_rate": 0.00023264758497316637, + "loss": 0.3316, + "step": 261 + }, + { + "epoch": 0.4645390070921986, + "grad_norm": 0.8826196789741516, + "learning_rate": 0.0002323792486583184, + "loss": 0.3337, + "step": 262 + }, + { + "epoch": 0.46631205673758863, + "grad_norm": 0.6072385907173157, + "learning_rate": 0.00023211091234347046, + "loss": 0.2585, + "step": 263 + }, + { + "epoch": 0.46808510638297873, + "grad_norm": 0.5861011743545532, + "learning_rate": 0.0002318425760286225, + "loss": 0.1952, + "step": 264 + }, + { + "epoch": 0.4698581560283688, + "grad_norm": 0.8113440871238708, + "learning_rate": 0.0002315742397137746, + "loss": 0.1877, + "step": 265 + }, + { + "epoch": 0.4716312056737589, + "grad_norm": 0.3667961359024048, + "learning_rate": 0.00023130590339892664, + "loss": 0.0977, + "step": 266 + }, + { + "epoch": 0.4734042553191489, + "grad_norm": 0.4013451933860779, + "learning_rate": 0.0002310375670840787, + "loss": 0.0988, + "step": 267 + }, + { + "epoch": 0.475177304964539, + "grad_norm": 0.4402759373188019, + "learning_rate": 0.00023076923076923076, + "loss": 0.1747, + "step": 268 + }, + { + "epoch": 0.4769503546099291, + "grad_norm": 0.5780351161956787, + "learning_rate": 0.0002305008944543828, + "loss": 0.1446, + "step": 269 + }, + { + "epoch": 0.4787234042553192, + "grad_norm": 1.0400667190551758, + "learning_rate": 0.00023023255813953486, + "loss": 0.3404, + "step": 270 + }, + { + "epoch": 0.4804964539007092, + "grad_norm": 0.734530508518219, + "learning_rate": 0.0002299642218246869, + "loss": 0.1938, + "step": 271 + }, + { + "epoch": 0.48226950354609927, + "grad_norm": 0.4358377158641815, + "learning_rate": 0.000229695885509839, + "loss": 0.1514, + "step": 272 + }, + { + "epoch": 0.48404255319148937, + "grad_norm": 0.766135036945343, + "learning_rate": 0.00022942754919499104, + "loss": 0.2335, + "step": 273 + }, + { + "epoch": 0.4858156028368794, + "grad_norm": 0.8013383746147156, + "learning_rate": 0.00022915921288014308, + "loss": 0.2433, + "step": 274 + }, + { + "epoch": 0.4875886524822695, + "grad_norm": 0.48911821842193604, + "learning_rate": 0.00022889087656529513, + "loss": 0.1466, + "step": 275 + }, + { + "epoch": 0.48936170212765956, + "grad_norm": 0.3731197118759155, + "learning_rate": 0.0002286225402504472, + "loss": 0.095, + "step": 276 + }, + { + "epoch": 0.49113475177304966, + "grad_norm": 0.4375539720058441, + "learning_rate": 0.00022835420393559929, + "loss": 0.1066, + "step": 277 + }, + { + "epoch": 0.4929078014184397, + "grad_norm": 0.5769888758659363, + "learning_rate": 0.0002280858676207513, + "loss": 0.1959, + "step": 278 + }, + { + "epoch": 0.4946808510638298, + "grad_norm": 0.6133276224136353, + "learning_rate": 0.00022781753130590338, + "loss": 0.1216, + "step": 279 + }, + { + "epoch": 0.49645390070921985, + "grad_norm": 0.8098652362823486, + "learning_rate": 0.00022754919499105543, + "loss": 0.223, + "step": 280 + }, + { + "epoch": 0.49822695035460995, + "grad_norm": 0.38705819845199585, + "learning_rate": 0.0002272808586762075, + "loss": 0.0888, + "step": 281 + }, + { + "epoch": 0.5, + "grad_norm": 0.37354519963264465, + "learning_rate": 0.00022701252236135953, + "loss": 0.1013, + "step": 282 + }, + { + "epoch": 0.50177304964539, + "grad_norm": 0.6955541968345642, + "learning_rate": 0.0002267441860465116, + "loss": 0.1746, + "step": 283 + }, + { + "epoch": 0.5035460992907801, + "grad_norm": 0.44892531633377075, + "learning_rate": 0.00022647584973166366, + "loss": 0.123, + "step": 284 + }, + { + "epoch": 0.5053191489361702, + "grad_norm": 0.7901389598846436, + "learning_rate": 0.00022620751341681573, + "loss": 0.2863, + "step": 285 + }, + { + "epoch": 0.5070921985815603, + "grad_norm": 0.6648136973381042, + "learning_rate": 0.00022593917710196778, + "loss": 0.1577, + "step": 286 + }, + { + "epoch": 0.5088652482269503, + "grad_norm": 0.3378174602985382, + "learning_rate": 0.00022567084078711983, + "loss": 0.0916, + "step": 287 + }, + { + "epoch": 0.5106382978723404, + "grad_norm": 0.46413183212280273, + "learning_rate": 0.0002254025044722719, + "loss": 0.1292, + "step": 288 + }, + { + "epoch": 0.5124113475177305, + "grad_norm": 0.5988323092460632, + "learning_rate": 0.00022513416815742396, + "loss": 0.1586, + "step": 289 + }, + { + "epoch": 0.5141843971631206, + "grad_norm": 0.5859420299530029, + "learning_rate": 0.000224865831842576, + "loss": 0.1296, + "step": 290 + }, + { + "epoch": 0.5159574468085106, + "grad_norm": 0.4029462933540344, + "learning_rate": 0.00022459749552772805, + "loss": 0.1072, + "step": 291 + }, + { + "epoch": 0.5177304964539007, + "grad_norm": 0.28882545232772827, + "learning_rate": 0.00022432915921288013, + "loss": 0.0747, + "step": 292 + }, + { + "epoch": 0.5195035460992907, + "grad_norm": 0.35556840896606445, + "learning_rate": 0.00022406082289803218, + "loss": 0.0463, + "step": 293 + }, + { + "epoch": 0.5212765957446809, + "grad_norm": 0.5565528273582458, + "learning_rate": 0.00022379248658318423, + "loss": 0.0675, + "step": 294 + }, + { + "epoch": 0.5230496453900709, + "grad_norm": 0.5911768078804016, + "learning_rate": 0.0002235241502683363, + "loss": 0.1442, + "step": 295 + }, + { + "epoch": 0.524822695035461, + "grad_norm": 0.4034654498100281, + "learning_rate": 0.00022325581395348835, + "loss": 0.1111, + "step": 296 + }, + { + "epoch": 0.526595744680851, + "grad_norm": 0.35688602924346924, + "learning_rate": 0.00022298747763864043, + "loss": 0.0828, + "step": 297 + }, + { + "epoch": 0.5283687943262412, + "grad_norm": 0.6449804306030273, + "learning_rate": 0.00022271914132379245, + "loss": 0.2104, + "step": 298 + }, + { + "epoch": 0.5301418439716312, + "grad_norm": 0.5375797152519226, + "learning_rate": 0.00022245080500894453, + "loss": 0.1546, + "step": 299 + }, + { + "epoch": 0.5319148936170213, + "grad_norm": 0.18811915814876556, + "learning_rate": 0.00022218246869409658, + "loss": 0.0246, + "step": 300 + }, + { + "epoch": 0.5336879432624113, + "grad_norm": 1.5281504392623901, + "learning_rate": 0.00022191413237924865, + "loss": 0.5446, + "step": 301 + }, + { + "epoch": 0.5354609929078015, + "grad_norm": 1.5097373723983765, + "learning_rate": 0.00022164579606440067, + "loss": 0.4033, + "step": 302 + }, + { + "epoch": 0.5372340425531915, + "grad_norm": 1.4294488430023193, + "learning_rate": 0.00022137745974955275, + "loss": 0.3835, + "step": 303 + }, + { + "epoch": 0.5390070921985816, + "grad_norm": 0.7668700218200684, + "learning_rate": 0.00022110912343470483, + "loss": 0.2103, + "step": 304 + }, + { + "epoch": 0.5407801418439716, + "grad_norm": 0.9126233458518982, + "learning_rate": 0.00022084078711985688, + "loss": 0.39, + "step": 305 + }, + { + "epoch": 0.5425531914893617, + "grad_norm": 0.7275278568267822, + "learning_rate": 0.00022057245080500892, + "loss": 0.2128, + "step": 306 + }, + { + "epoch": 0.5443262411347518, + "grad_norm": 0.6632657647132874, + "learning_rate": 0.00022030411449016097, + "loss": 0.1819, + "step": 307 + }, + { + "epoch": 0.5460992907801419, + "grad_norm": 0.5265213847160339, + "learning_rate": 0.00022003577817531305, + "loss": 0.1879, + "step": 308 + }, + { + "epoch": 0.5478723404255319, + "grad_norm": 0.4046447277069092, + "learning_rate": 0.0002197674418604651, + "loss": 0.1225, + "step": 309 + }, + { + "epoch": 0.549645390070922, + "grad_norm": 0.7889742851257324, + "learning_rate": 0.00021949910554561715, + "loss": 0.3022, + "step": 310 + }, + { + "epoch": 0.5514184397163121, + "grad_norm": 0.739321768283844, + "learning_rate": 0.0002192307692307692, + "loss": 0.1621, + "step": 311 + }, + { + "epoch": 0.5531914893617021, + "grad_norm": 0.5458974838256836, + "learning_rate": 0.00021896243291592127, + "loss": 0.1505, + "step": 312 + }, + { + "epoch": 0.5549645390070922, + "grad_norm": 0.6029899716377258, + "learning_rate": 0.00021869409660107332, + "loss": 0.2114, + "step": 313 + }, + { + "epoch": 0.5567375886524822, + "grad_norm": 0.830324113368988, + "learning_rate": 0.00021842576028622537, + "loss": 0.2397, + "step": 314 + }, + { + "epoch": 0.5585106382978723, + "grad_norm": 0.7340556383132935, + "learning_rate": 0.00021815742397137745, + "loss": 0.1606, + "step": 315 + }, + { + "epoch": 0.5602836879432624, + "grad_norm": 0.44604039192199707, + "learning_rate": 0.0002178890876565295, + "loss": 0.1192, + "step": 316 + }, + { + "epoch": 0.5620567375886525, + "grad_norm": 0.37659141421318054, + "learning_rate": 0.00021762075134168157, + "loss": 0.1112, + "step": 317 + }, + { + "epoch": 0.5638297872340425, + "grad_norm": 0.8448424935340881, + "learning_rate": 0.0002173524150268336, + "loss": 0.1799, + "step": 318 + }, + { + "epoch": 0.5656028368794326, + "grad_norm": 0.5874999761581421, + "learning_rate": 0.00021708407871198567, + "loss": 0.2011, + "step": 319 + }, + { + "epoch": 0.5673758865248227, + "grad_norm": 0.5802026391029358, + "learning_rate": 0.00021681574239713772, + "loss": 0.1725, + "step": 320 + }, + { + "epoch": 0.5691489361702128, + "grad_norm": 0.5646753907203674, + "learning_rate": 0.0002165474060822898, + "loss": 0.1464, + "step": 321 + }, + { + "epoch": 0.5709219858156028, + "grad_norm": 0.5598042011260986, + "learning_rate": 0.00021627906976744182, + "loss": 0.1135, + "step": 322 + }, + { + "epoch": 0.5726950354609929, + "grad_norm": 0.35759246349334717, + "learning_rate": 0.0002160107334525939, + "loss": 0.079, + "step": 323 + }, + { + "epoch": 0.574468085106383, + "grad_norm": 0.8581865429878235, + "learning_rate": 0.00021574239713774597, + "loss": 0.2422, + "step": 324 + }, + { + "epoch": 0.5762411347517731, + "grad_norm": 0.7060332298278809, + "learning_rate": 0.00021547406082289802, + "loss": 0.2283, + "step": 325 + }, + { + "epoch": 0.5780141843971631, + "grad_norm": 0.7340462803840637, + "learning_rate": 0.00021520572450805007, + "loss": 0.1915, + "step": 326 + }, + { + "epoch": 0.5797872340425532, + "grad_norm": 0.42532655596733093, + "learning_rate": 0.00021493738819320212, + "loss": 0.1125, + "step": 327 + }, + { + "epoch": 0.5815602836879432, + "grad_norm": 0.2977962791919708, + "learning_rate": 0.0002146690518783542, + "loss": 0.0812, + "step": 328 + }, + { + "epoch": 0.5833333333333334, + "grad_norm": 0.3650486171245575, + "learning_rate": 0.00021440071556350624, + "loss": 0.0999, + "step": 329 + }, + { + "epoch": 0.5851063829787234, + "grad_norm": 0.6662601232528687, + "learning_rate": 0.0002141323792486583, + "loss": 0.2518, + "step": 330 + }, + { + "epoch": 0.5868794326241135, + "grad_norm": 0.46315595507621765, + "learning_rate": 0.00021386404293381034, + "loss": 0.1164, + "step": 331 + }, + { + "epoch": 0.5886524822695035, + "grad_norm": 0.5522826910018921, + "learning_rate": 0.00021359570661896242, + "loss": 0.1563, + "step": 332 + }, + { + "epoch": 0.5904255319148937, + "grad_norm": 0.6960483193397522, + "learning_rate": 0.0002133273703041145, + "loss": 0.1998, + "step": 333 + }, + { + "epoch": 0.5921985815602837, + "grad_norm": 0.46060803532600403, + "learning_rate": 0.00021305903398926651, + "loss": 0.1235, + "step": 334 + }, + { + "epoch": 0.5939716312056738, + "grad_norm": 0.8451377749443054, + "learning_rate": 0.0002127906976744186, + "loss": 0.2462, + "step": 335 + }, + { + "epoch": 0.5957446808510638, + "grad_norm": 0.4865632653236389, + "learning_rate": 0.00021252236135957064, + "loss": 0.1759, + "step": 336 + }, + { + "epoch": 0.5975177304964538, + "grad_norm": 0.5368924736976624, + "learning_rate": 0.00021225402504472272, + "loss": 0.1632, + "step": 337 + }, + { + "epoch": 0.599290780141844, + "grad_norm": 0.5031744837760925, + "learning_rate": 0.00021198568872987474, + "loss": 0.2001, + "step": 338 + }, + { + "epoch": 0.601063829787234, + "grad_norm": 0.35617366433143616, + "learning_rate": 0.00021171735241502681, + "loss": 0.0905, + "step": 339 + }, + { + "epoch": 0.6028368794326241, + "grad_norm": 0.4215497076511383, + "learning_rate": 0.00021144901610017886, + "loss": 0.1474, + "step": 340 + }, + { + "epoch": 0.6046099290780141, + "grad_norm": 0.3073301911354065, + "learning_rate": 0.00021118067978533094, + "loss": 0.053, + "step": 341 + }, + { + "epoch": 0.6063829787234043, + "grad_norm": 0.30299848318099976, + "learning_rate": 0.00021091234347048301, + "loss": 0.0715, + "step": 342 + }, + { + "epoch": 0.6081560283687943, + "grad_norm": 0.26558998227119446, + "learning_rate": 0.00021064400715563504, + "loss": 0.0573, + "step": 343 + }, + { + "epoch": 0.6099290780141844, + "grad_norm": 0.21946674585342407, + "learning_rate": 0.0002103756708407871, + "loss": 0.0502, + "step": 344 + }, + { + "epoch": 0.6117021276595744, + "grad_norm": 0.48567119240760803, + "learning_rate": 0.00021010733452593916, + "loss": 0.1224, + "step": 345 + }, + { + "epoch": 0.6134751773049646, + "grad_norm": 0.444938600063324, + "learning_rate": 0.00020983899821109124, + "loss": 0.0899, + "step": 346 + }, + { + "epoch": 0.6152482269503546, + "grad_norm": 0.3576812744140625, + "learning_rate": 0.00020957066189624326, + "loss": 0.1104, + "step": 347 + }, + { + "epoch": 0.6170212765957447, + "grad_norm": 0.5801045298576355, + "learning_rate": 0.00020930232558139534, + "loss": 0.2192, + "step": 348 + }, + { + "epoch": 0.6187943262411347, + "grad_norm": 0.3965771198272705, + "learning_rate": 0.00020903398926654739, + "loss": 0.1025, + "step": 349 + }, + { + "epoch": 0.6205673758865248, + "grad_norm": 0.32997211813926697, + "learning_rate": 0.00020876565295169943, + "loss": 0.0392, + "step": 350 + }, + { + "epoch": 0.6223404255319149, + "grad_norm": 2.3874707221984863, + "learning_rate": 0.00020849731663685148, + "loss": 0.4267, + "step": 351 + }, + { + "epoch": 0.624113475177305, + "grad_norm": 1.0561052560806274, + "learning_rate": 0.00020822898032200356, + "loss": 0.3531, + "step": 352 + }, + { + "epoch": 0.625886524822695, + "grad_norm": 1.0714682340621948, + "learning_rate": 0.00020796064400715564, + "loss": 0.208, + "step": 353 + }, + { + "epoch": 0.6276595744680851, + "grad_norm": 1.0135624408721924, + "learning_rate": 0.00020769230769230766, + "loss": 0.2722, + "step": 354 + }, + { + "epoch": 0.6294326241134752, + "grad_norm": 0.4171317219734192, + "learning_rate": 0.00020742397137745973, + "loss": 0.1137, + "step": 355 + }, + { + "epoch": 0.6312056737588653, + "grad_norm": 0.7069351673126221, + "learning_rate": 0.00020715563506261178, + "loss": 0.1919, + "step": 356 + }, + { + "epoch": 0.6329787234042553, + "grad_norm": 0.5680619478225708, + "learning_rate": 0.00020688729874776386, + "loss": 0.1884, + "step": 357 + }, + { + "epoch": 0.6347517730496454, + "grad_norm": 0.8620048761367798, + "learning_rate": 0.00020661896243291588, + "loss": 0.3209, + "step": 358 + }, + { + "epoch": 0.6365248226950354, + "grad_norm": 0.4163174331188202, + "learning_rate": 0.00020635062611806796, + "loss": 0.1553, + "step": 359 + }, + { + "epoch": 0.6382978723404256, + "grad_norm": 0.7022566199302673, + "learning_rate": 0.00020608228980322, + "loss": 0.2839, + "step": 360 + }, + { + "epoch": 0.6400709219858156, + "grad_norm": 0.35048022866249084, + "learning_rate": 0.00020581395348837208, + "loss": 0.1168, + "step": 361 + }, + { + "epoch": 0.6418439716312057, + "grad_norm": 0.6058384776115417, + "learning_rate": 0.00020554561717352416, + "loss": 0.2336, + "step": 362 + }, + { + "epoch": 0.6436170212765957, + "grad_norm": 0.5632460117340088, + "learning_rate": 0.00020527728085867618, + "loss": 0.1866, + "step": 363 + }, + { + "epoch": 0.6453900709219859, + "grad_norm": 0.8508513569831848, + "learning_rate": 0.00020500894454382826, + "loss": 0.2795, + "step": 364 + }, + { + "epoch": 0.6471631205673759, + "grad_norm": 0.3261764645576477, + "learning_rate": 0.0002047406082289803, + "loss": 0.0695, + "step": 365 + }, + { + "epoch": 0.648936170212766, + "grad_norm": 0.6185038089752197, + "learning_rate": 0.00020447227191413238, + "loss": 0.2125, + "step": 366 + }, + { + "epoch": 0.650709219858156, + "grad_norm": 0.9981937408447266, + "learning_rate": 0.0002042039355992844, + "loss": 0.179, + "step": 367 + }, + { + "epoch": 0.6524822695035462, + "grad_norm": 0.625804603099823, + "learning_rate": 0.00020393559928443648, + "loss": 0.2315, + "step": 368 + }, + { + "epoch": 0.6542553191489362, + "grad_norm": 0.460195928812027, + "learning_rate": 0.00020366726296958853, + "loss": 0.1352, + "step": 369 + }, + { + "epoch": 0.6560283687943262, + "grad_norm": 0.6661739349365234, + "learning_rate": 0.00020339892665474058, + "loss": 0.2227, + "step": 370 + }, + { + "epoch": 0.6578014184397163, + "grad_norm": 0.5500937700271606, + "learning_rate": 0.00020313059033989265, + "loss": 0.1519, + "step": 371 + }, + { + "epoch": 0.6595744680851063, + "grad_norm": 0.8262995481491089, + "learning_rate": 0.0002028622540250447, + "loss": 0.2965, + "step": 372 + }, + { + "epoch": 0.6613475177304965, + "grad_norm": 0.6892026662826538, + "learning_rate": 0.00020259391771019678, + "loss": 0.2244, + "step": 373 + }, + { + "epoch": 0.6631205673758865, + "grad_norm": 0.9893831014633179, + "learning_rate": 0.0002023255813953488, + "loss": 0.27, + "step": 374 + }, + { + "epoch": 0.6648936170212766, + "grad_norm": 0.26278918981552124, + "learning_rate": 0.00020205724508050088, + "loss": 0.0958, + "step": 375 + }, + { + "epoch": 0.6666666666666666, + "grad_norm": 0.4749915301799774, + "learning_rate": 0.00020178890876565293, + "loss": 0.182, + "step": 376 + }, + { + "epoch": 0.6684397163120568, + "grad_norm": 0.36534926295280457, + "learning_rate": 0.000201520572450805, + "loss": 0.0955, + "step": 377 + }, + { + "epoch": 0.6702127659574468, + "grad_norm": 0.28374701738357544, + "learning_rate": 0.00020125223613595702, + "loss": 0.083, + "step": 378 + }, + { + "epoch": 0.6719858156028369, + "grad_norm": 0.5432383418083191, + "learning_rate": 0.0002009838998211091, + "loss": 0.1533, + "step": 379 + }, + { + "epoch": 0.6737588652482269, + "grad_norm": 0.6451116800308228, + "learning_rate": 0.00020071556350626118, + "loss": 0.2346, + "step": 380 + }, + { + "epoch": 0.675531914893617, + "grad_norm": 0.3411319851875305, + "learning_rate": 0.00020044722719141323, + "loss": 0.0937, + "step": 381 + }, + { + "epoch": 0.6773049645390071, + "grad_norm": 0.40049058198928833, + "learning_rate": 0.0002001788908765653, + "loss": 0.1219, + "step": 382 + }, + { + "epoch": 0.6790780141843972, + "grad_norm": 0.3120768070220947, + "learning_rate": 0.00019991055456171732, + "loss": 0.1086, + "step": 383 + }, + { + "epoch": 0.6808510638297872, + "grad_norm": 0.3227909207344055, + "learning_rate": 0.0001996422182468694, + "loss": 0.0911, + "step": 384 + }, + { + "epoch": 0.6826241134751773, + "grad_norm": 0.22975340485572815, + "learning_rate": 0.00019937388193202145, + "loss": 0.0818, + "step": 385 + }, + { + "epoch": 0.6843971631205674, + "grad_norm": 0.3845633268356323, + "learning_rate": 0.00019910554561717352, + "loss": 0.1278, + "step": 386 + }, + { + "epoch": 0.6861702127659575, + "grad_norm": 0.4206799864768982, + "learning_rate": 0.00019883720930232555, + "loss": 0.1137, + "step": 387 + }, + { + "epoch": 0.6879432624113475, + "grad_norm": 0.4134272634983063, + "learning_rate": 0.00019856887298747762, + "loss": 0.1299, + "step": 388 + }, + { + "epoch": 0.6897163120567376, + "grad_norm": 0.44249987602233887, + "learning_rate": 0.0001983005366726297, + "loss": 0.1211, + "step": 389 + }, + { + "epoch": 0.6914893617021277, + "grad_norm": 0.3611302971839905, + "learning_rate": 0.00019803220035778172, + "loss": 0.114, + "step": 390 + }, + { + "epoch": 0.6932624113475178, + "grad_norm": 0.47269973158836365, + "learning_rate": 0.0001977638640429338, + "loss": 0.1405, + "step": 391 + }, + { + "epoch": 0.6950354609929078, + "grad_norm": 0.47546666860580444, + "learning_rate": 0.00019749552772808585, + "loss": 0.1447, + "step": 392 + }, + { + "epoch": 0.6968085106382979, + "grad_norm": 0.3153839111328125, + "learning_rate": 0.00019722719141323792, + "loss": 0.0972, + "step": 393 + }, + { + "epoch": 0.6985815602836879, + "grad_norm": 0.23356540501117706, + "learning_rate": 0.00019695885509838994, + "loss": 0.0429, + "step": 394 + }, + { + "epoch": 0.700354609929078, + "grad_norm": 0.399263858795166, + "learning_rate": 0.00019669051878354202, + "loss": 0.1401, + "step": 395 + }, + { + "epoch": 0.7021276595744681, + "grad_norm": 0.36282962560653687, + "learning_rate": 0.00019642218246869407, + "loss": 0.11, + "step": 396 + }, + { + "epoch": 0.7039007092198581, + "grad_norm": 0.4403574764728546, + "learning_rate": 0.00019615384615384615, + "loss": 0.0706, + "step": 397 + }, + { + "epoch": 0.7056737588652482, + "grad_norm": 0.5901324152946472, + "learning_rate": 0.00019588550983899817, + "loss": 0.1191, + "step": 398 + }, + { + "epoch": 0.7074468085106383, + "grad_norm": 0.2563900351524353, + "learning_rate": 0.00019561717352415024, + "loss": 0.0731, + "step": 399 + }, + { + "epoch": 0.7092198581560284, + "grad_norm": 0.4444797933101654, + "learning_rate": 0.00019534883720930232, + "loss": 0.1425, + "step": 400 + }, + { + "epoch": 0.7109929078014184, + "grad_norm": 1.0342155694961548, + "learning_rate": 0.00019508050089445437, + "loss": 0.3125, + "step": 401 + }, + { + "epoch": 0.7127659574468085, + "grad_norm": 0.6980201005935669, + "learning_rate": 0.00019481216457960644, + "loss": 0.2188, + "step": 402 + }, + { + "epoch": 0.7145390070921985, + "grad_norm": 0.740844190120697, + "learning_rate": 0.00019454382826475847, + "loss": 0.2467, + "step": 403 + }, + { + "epoch": 0.7163120567375887, + "grad_norm": 0.513054370880127, + "learning_rate": 0.00019427549194991054, + "loss": 0.1531, + "step": 404 + }, + { + "epoch": 0.7180851063829787, + "grad_norm": 0.4994530975818634, + "learning_rate": 0.0001940071556350626, + "loss": 0.1065, + "step": 405 + }, + { + "epoch": 0.7198581560283688, + "grad_norm": 0.9895260334014893, + "learning_rate": 0.00019373881932021467, + "loss": 0.4544, + "step": 406 + }, + { + "epoch": 0.7216312056737588, + "grad_norm": 0.422955721616745, + "learning_rate": 0.0001934704830053667, + "loss": 0.1079, + "step": 407 + }, + { + "epoch": 0.723404255319149, + "grad_norm": 0.5568337440490723, + "learning_rate": 0.00019320214669051877, + "loss": 0.2162, + "step": 408 + }, + { + "epoch": 0.725177304964539, + "grad_norm": 0.4763684868812561, + "learning_rate": 0.00019293381037567084, + "loss": 0.1643, + "step": 409 + }, + { + "epoch": 0.7269503546099291, + "grad_norm": 0.4117484390735626, + "learning_rate": 0.0001926654740608229, + "loss": 0.1393, + "step": 410 + }, + { + "epoch": 0.7287234042553191, + "grad_norm": 0.4180298447608948, + "learning_rate": 0.00019239713774597494, + "loss": 0.183, + "step": 411 + }, + { + "epoch": 0.7304964539007093, + "grad_norm": 0.42761579155921936, + "learning_rate": 0.000192128801431127, + "loss": 0.1463, + "step": 412 + }, + { + "epoch": 0.7322695035460993, + "grad_norm": 0.36335864663124084, + "learning_rate": 0.00019186046511627906, + "loss": 0.1032, + "step": 413 + }, + { + "epoch": 0.7340425531914894, + "grad_norm": 0.4711301326751709, + "learning_rate": 0.0001915921288014311, + "loss": 0.17, + "step": 414 + }, + { + "epoch": 0.7358156028368794, + "grad_norm": 0.38681793212890625, + "learning_rate": 0.00019132379248658316, + "loss": 0.1371, + "step": 415 + }, + { + "epoch": 0.7375886524822695, + "grad_norm": 0.38631147146224976, + "learning_rate": 0.0001910554561717352, + "loss": 0.1183, + "step": 416 + }, + { + "epoch": 0.7393617021276596, + "grad_norm": 0.4759841561317444, + "learning_rate": 0.0001907871198568873, + "loss": 0.1441, + "step": 417 + }, + { + "epoch": 0.7411347517730497, + "grad_norm": 0.42136093974113464, + "learning_rate": 0.00019051878354203936, + "loss": 0.1093, + "step": 418 + }, + { + "epoch": 0.7429078014184397, + "grad_norm": 0.5578286051750183, + "learning_rate": 0.00019025044722719139, + "loss": 0.1521, + "step": 419 + }, + { + "epoch": 0.7446808510638298, + "grad_norm": 0.32407546043395996, + "learning_rate": 0.00018998211091234346, + "loss": 0.0614, + "step": 420 + }, + { + "epoch": 0.7464539007092199, + "grad_norm": 0.621771514415741, + "learning_rate": 0.0001897137745974955, + "loss": 0.2111, + "step": 421 + }, + { + "epoch": 0.74822695035461, + "grad_norm": 0.4861868619918823, + "learning_rate": 0.0001894454382826476, + "loss": 0.1138, + "step": 422 + }, + { + "epoch": 0.75, + "grad_norm": 0.6632353067398071, + "learning_rate": 0.0001891771019677996, + "loss": 0.2492, + "step": 423 + }, + { + "epoch": 0.75177304964539, + "grad_norm": 0.3670256733894348, + "learning_rate": 0.00018890876565295169, + "loss": 0.0927, + "step": 424 + }, + { + "epoch": 0.7535460992907801, + "grad_norm": 0.48544085025787354, + "learning_rate": 0.00018864042933810373, + "loss": 0.1422, + "step": 425 + }, + { + "epoch": 0.7553191489361702, + "grad_norm": 0.3215571939945221, + "learning_rate": 0.0001883720930232558, + "loss": 0.0729, + "step": 426 + }, + { + "epoch": 0.7570921985815603, + "grad_norm": 0.6928660869598389, + "learning_rate": 0.00018810375670840786, + "loss": 0.2033, + "step": 427 + }, + { + "epoch": 0.7588652482269503, + "grad_norm": 0.42845651507377625, + "learning_rate": 0.0001878354203935599, + "loss": 0.1536, + "step": 428 + }, + { + "epoch": 0.7606382978723404, + "grad_norm": 0.5783790349960327, + "learning_rate": 0.00018756708407871198, + "loss": 0.1573, + "step": 429 + }, + { + "epoch": 0.7624113475177305, + "grad_norm": 0.34763625264167786, + "learning_rate": 0.00018729874776386403, + "loss": 0.0891, + "step": 430 + }, + { + "epoch": 0.7641843971631206, + "grad_norm": 0.3298535943031311, + "learning_rate": 0.00018703041144901608, + "loss": 0.0836, + "step": 431 + }, + { + "epoch": 0.7659574468085106, + "grad_norm": 0.48353129625320435, + "learning_rate": 0.00018676207513416813, + "loss": 0.1624, + "step": 432 + }, + { + "epoch": 0.7677304964539007, + "grad_norm": 0.3654095530509949, + "learning_rate": 0.0001864937388193202, + "loss": 0.074, + "step": 433 + }, + { + "epoch": 0.7695035460992907, + "grad_norm": 0.6465492248535156, + "learning_rate": 0.00018622540250447223, + "loss": 0.2221, + "step": 434 + }, + { + "epoch": 0.7712765957446809, + "grad_norm": 0.29519444704055786, + "learning_rate": 0.0001859570661896243, + "loss": 0.073, + "step": 435 + }, + { + "epoch": 0.7730496453900709, + "grad_norm": 0.39645931124687195, + "learning_rate": 0.00018568872987477636, + "loss": 0.0799, + "step": 436 + }, + { + "epoch": 0.774822695035461, + "grad_norm": 0.7929151058197021, + "learning_rate": 0.00018542039355992843, + "loss": 0.244, + "step": 437 + }, + { + "epoch": 0.776595744680851, + "grad_norm": 0.5418475866317749, + "learning_rate": 0.0001851520572450805, + "loss": 0.1439, + "step": 438 + }, + { + "epoch": 0.7783687943262412, + "grad_norm": 0.4036068320274353, + "learning_rate": 0.00018488372093023253, + "loss": 0.1158, + "step": 439 + }, + { + "epoch": 0.7801418439716312, + "grad_norm": 0.33123520016670227, + "learning_rate": 0.0001846153846153846, + "loss": 0.1182, + "step": 440 + }, + { + "epoch": 0.7819148936170213, + "grad_norm": 0.3179425299167633, + "learning_rate": 0.00018434704830053665, + "loss": 0.0763, + "step": 441 + }, + { + "epoch": 0.7836879432624113, + "grad_norm": 0.25326409935951233, + "learning_rate": 0.00018407871198568873, + "loss": 0.0388, + "step": 442 + }, + { + "epoch": 0.7854609929078015, + "grad_norm": 0.4816826581954956, + "learning_rate": 0.00018381037567084075, + "loss": 0.1308, + "step": 443 + }, + { + "epoch": 0.7872340425531915, + "grad_norm": 0.2538895010948181, + "learning_rate": 0.00018354203935599283, + "loss": 0.0607, + "step": 444 + }, + { + "epoch": 0.7890070921985816, + "grad_norm": 0.3773513436317444, + "learning_rate": 0.00018327370304114488, + "loss": 0.1168, + "step": 445 + }, + { + "epoch": 0.7907801418439716, + "grad_norm": 0.48236358165740967, + "learning_rate": 0.00018300536672629695, + "loss": 0.1078, + "step": 446 + }, + { + "epoch": 0.7925531914893617, + "grad_norm": 0.37481409311294556, + "learning_rate": 0.000182737030411449, + "loss": 0.0847, + "step": 447 + }, + { + "epoch": 0.7943262411347518, + "grad_norm": 0.25582361221313477, + "learning_rate": 0.00018246869409660105, + "loss": 0.0518, + "step": 448 + }, + { + "epoch": 0.7960992907801419, + "grad_norm": 0.582918643951416, + "learning_rate": 0.00018220035778175313, + "loss": 0.199, + "step": 449 + }, + { + "epoch": 0.7978723404255319, + "grad_norm": 0.3821042478084564, + "learning_rate": 0.00018193202146690518, + "loss": 0.1308, + "step": 450 + }, + { + "epoch": 0.799645390070922, + "grad_norm": 0.9046875834465027, + "learning_rate": 0.00018166368515205723, + "loss": 0.3907, + "step": 451 + }, + { + "epoch": 0.8014184397163121, + "grad_norm": 0.9091282486915588, + "learning_rate": 0.00018139534883720928, + "loss": 0.421, + "step": 452 + }, + { + "epoch": 0.8014184397163121, + "eval_loss": 0.07596753537654877, + "eval_runtime": 47.273, + "eval_samples_per_second": 2.665, + "eval_steps_per_second": 2.665, + "step": 452 + }, + { + "epoch": 0.8031914893617021, + "grad_norm": 0.5998420715332031, + "learning_rate": 0.00018112701252236135, + "loss": 0.1944, + "step": 453 + }, + { + "epoch": 0.8049645390070922, + "grad_norm": 0.6365135908126831, + "learning_rate": 0.00018085867620751337, + "loss": 0.2445, + "step": 454 + }, + { + "epoch": 0.8067375886524822, + "grad_norm": 0.5594285726547241, + "learning_rate": 0.00018059033989266545, + "loss": 0.2096, + "step": 455 + }, + { + "epoch": 0.8085106382978723, + "grad_norm": 0.8409189581871033, + "learning_rate": 0.00018032200357781753, + "loss": 0.4065, + "step": 456 + }, + { + "epoch": 0.8102836879432624, + "grad_norm": 0.5661482214927673, + "learning_rate": 0.00018005366726296957, + "loss": 0.25, + "step": 457 + }, + { + "epoch": 0.8120567375886525, + "grad_norm": 0.5428692102432251, + "learning_rate": 0.00017978533094812165, + "loss": 0.2254, + "step": 458 + }, + { + "epoch": 0.8138297872340425, + "grad_norm": 0.4579595923423767, + "learning_rate": 0.00017951699463327367, + "loss": 0.1008, + "step": 459 + }, + { + "epoch": 0.8156028368794326, + "grad_norm": 0.4390023946762085, + "learning_rate": 0.00017924865831842575, + "loss": 0.1144, + "step": 460 + }, + { + "epoch": 0.8173758865248227, + "grad_norm": 0.5851403474807739, + "learning_rate": 0.0001789803220035778, + "loss": 0.2355, + "step": 461 + }, + { + "epoch": 0.8191489361702128, + "grad_norm": 0.31094157695770264, + "learning_rate": 0.00017871198568872987, + "loss": 0.1078, + "step": 462 + }, + { + "epoch": 0.8209219858156028, + "grad_norm": 0.5213750600814819, + "learning_rate": 0.0001784436493738819, + "loss": 0.1546, + "step": 463 + }, + { + "epoch": 0.8226950354609929, + "grad_norm": 0.3972003161907196, + "learning_rate": 0.00017817531305903397, + "loss": 0.1379, + "step": 464 + }, + { + "epoch": 0.824468085106383, + "grad_norm": 0.35936617851257324, + "learning_rate": 0.00017790697674418605, + "loss": 0.1238, + "step": 465 + }, + { + "epoch": 0.8262411347517731, + "grad_norm": 0.31084805727005005, + "learning_rate": 0.0001776386404293381, + "loss": 0.0898, + "step": 466 + }, + { + "epoch": 0.8280141843971631, + "grad_norm": 0.35060107707977295, + "learning_rate": 0.00017737030411449015, + "loss": 0.1444, + "step": 467 + }, + { + "epoch": 0.8297872340425532, + "grad_norm": 0.31308284401893616, + "learning_rate": 0.0001771019677996422, + "loss": 0.0722, + "step": 468 + }, + { + "epoch": 0.8315602836879432, + "grad_norm": 0.4981883466243744, + "learning_rate": 0.00017683363148479427, + "loss": 0.1764, + "step": 469 + }, + { + "epoch": 0.8333333333333334, + "grad_norm": 0.5609257817268372, + "learning_rate": 0.00017656529516994632, + "loss": 0.175, + "step": 470 + }, + { + "epoch": 0.8351063829787234, + "grad_norm": 0.5611094832420349, + "learning_rate": 0.00017629695885509837, + "loss": 0.242, + "step": 471 + }, + { + "epoch": 0.8368794326241135, + "grad_norm": 0.7660516500473022, + "learning_rate": 0.00017602862254025042, + "loss": 0.2488, + "step": 472 + }, + { + "epoch": 0.8386524822695035, + "grad_norm": 0.3760780096054077, + "learning_rate": 0.0001757602862254025, + "loss": 0.1253, + "step": 473 + }, + { + "epoch": 0.8404255319148937, + "grad_norm": 0.5254774689674377, + "learning_rate": 0.00017549194991055454, + "loss": 0.2273, + "step": 474 + }, + { + "epoch": 0.8421985815602837, + "grad_norm": 0.34555211663246155, + "learning_rate": 0.0001752236135957066, + "loss": 0.1294, + "step": 475 + }, + { + "epoch": 0.8439716312056738, + "grad_norm": 0.4075317978858948, + "learning_rate": 0.00017495527728085867, + "loss": 0.1258, + "step": 476 + }, + { + "epoch": 0.8457446808510638, + "grad_norm": 0.4469994604587555, + "learning_rate": 0.00017468694096601072, + "loss": 0.155, + "step": 477 + }, + { + "epoch": 0.8475177304964538, + "grad_norm": 0.3752269744873047, + "learning_rate": 0.0001744186046511628, + "loss": 0.1403, + "step": 478 + }, + { + "epoch": 0.849290780141844, + "grad_norm": 0.3446385860443115, + "learning_rate": 0.00017415026833631482, + "loss": 0.0925, + "step": 479 + }, + { + "epoch": 0.851063829787234, + "grad_norm": 0.5445675253868103, + "learning_rate": 0.0001738819320214669, + "loss": 0.1791, + "step": 480 + }, + { + "epoch": 0.8528368794326241, + "grad_norm": 0.4845312535762787, + "learning_rate": 0.00017361359570661894, + "loss": 0.236, + "step": 481 + }, + { + "epoch": 0.8546099290780141, + "grad_norm": 0.21976758539676666, + "learning_rate": 0.00017334525939177102, + "loss": 0.0785, + "step": 482 + }, + { + "epoch": 0.8563829787234043, + "grad_norm": 0.3595951497554779, + "learning_rate": 0.00017307692307692304, + "loss": 0.1297, + "step": 483 + }, + { + "epoch": 0.8581560283687943, + "grad_norm": 0.34340062737464905, + "learning_rate": 0.00017280858676207511, + "loss": 0.1437, + "step": 484 + }, + { + "epoch": 0.8599290780141844, + "grad_norm": 0.40024513006210327, + "learning_rate": 0.0001725402504472272, + "loss": 0.111, + "step": 485 + }, + { + "epoch": 0.8617021276595744, + "grad_norm": 0.4997851252555847, + "learning_rate": 0.00017227191413237924, + "loss": 0.1406, + "step": 486 + }, + { + "epoch": 0.8634751773049646, + "grad_norm": 0.37670308351516724, + "learning_rate": 0.0001720035778175313, + "loss": 0.1653, + "step": 487 + }, + { + "epoch": 0.8652482269503546, + "grad_norm": 0.4352554380893707, + "learning_rate": 0.00017173524150268334, + "loss": 0.1275, + "step": 488 + }, + { + "epoch": 0.8670212765957447, + "grad_norm": 0.31643253564834595, + "learning_rate": 0.00017146690518783541, + "loss": 0.0982, + "step": 489 + }, + { + "epoch": 0.8687943262411347, + "grad_norm": 0.3775823712348938, + "learning_rate": 0.00017119856887298746, + "loss": 0.1087, + "step": 490 + }, + { + "epoch": 0.8705673758865248, + "grad_norm": 0.714429497718811, + "learning_rate": 0.0001709302325581395, + "loss": 0.1743, + "step": 491 + }, + { + "epoch": 0.8723404255319149, + "grad_norm": 0.24479807913303375, + "learning_rate": 0.00017066189624329156, + "loss": 0.0616, + "step": 492 + }, + { + "epoch": 0.874113475177305, + "grad_norm": 0.4242885708808899, + "learning_rate": 0.00017039355992844364, + "loss": 0.1266, + "step": 493 + }, + { + "epoch": 0.875886524822695, + "grad_norm": 0.4247332513332367, + "learning_rate": 0.00017012522361359571, + "loss": 0.1287, + "step": 494 + }, + { + "epoch": 0.8776595744680851, + "grad_norm": 0.49524983763694763, + "learning_rate": 0.00016985688729874774, + "loss": 0.1957, + "step": 495 + }, + { + "epoch": 0.8794326241134752, + "grad_norm": 0.2352031022310257, + "learning_rate": 0.0001695885509838998, + "loss": 0.0585, + "step": 496 + }, + { + "epoch": 0.8812056737588653, + "grad_norm": 0.23934312164783478, + "learning_rate": 0.00016932021466905186, + "loss": 0.0477, + "step": 497 + }, + { + "epoch": 0.8829787234042553, + "grad_norm": 0.3493366539478302, + "learning_rate": 0.00016905187835420394, + "loss": 0.1074, + "step": 498 + }, + { + "epoch": 0.8847517730496454, + "grad_norm": 0.28466084599494934, + "learning_rate": 0.00016878354203935596, + "loss": 0.0575, + "step": 499 + }, + { + "epoch": 0.8865248226950354, + "grad_norm": 0.33539536595344543, + "learning_rate": 0.00016851520572450803, + "loss": 0.0637, + "step": 500 + }, + { + "epoch": 0.8882978723404256, + "grad_norm": 0.8851572275161743, + "learning_rate": 0.00016824686940966008, + "loss": 0.3208, + "step": 501 + }, + { + "epoch": 0.8900709219858156, + "grad_norm": 0.8678922057151794, + "learning_rate": 0.00016797853309481216, + "loss": 0.3818, + "step": 502 + }, + { + "epoch": 0.8918439716312057, + "grad_norm": 0.6528666019439697, + "learning_rate": 0.00016771019677996424, + "loss": 0.2207, + "step": 503 + }, + { + "epoch": 0.8936170212765957, + "grad_norm": 0.550101637840271, + "learning_rate": 0.00016744186046511626, + "loss": 0.1612, + "step": 504 + }, + { + "epoch": 0.8953900709219859, + "grad_norm": 0.4004516303539276, + "learning_rate": 0.00016717352415026833, + "loss": 0.1158, + "step": 505 + }, + { + "epoch": 0.8971631205673759, + "grad_norm": 0.3911718428134918, + "learning_rate": 0.00016690518783542038, + "loss": 0.1702, + "step": 506 + }, + { + "epoch": 0.898936170212766, + "grad_norm": 0.45639804005622864, + "learning_rate": 0.00016663685152057243, + "loss": 0.194, + "step": 507 + }, + { + "epoch": 0.900709219858156, + "grad_norm": 0.38425612449645996, + "learning_rate": 0.00016636851520572448, + "loss": 0.1112, + "step": 508 + }, + { + "epoch": 0.9024822695035462, + "grad_norm": 0.3748437464237213, + "learning_rate": 0.00016610017889087656, + "loss": 0.1234, + "step": 509 + }, + { + "epoch": 0.9042553191489362, + "grad_norm": 0.29861482977867126, + "learning_rate": 0.0001658318425760286, + "loss": 0.1295, + "step": 510 + }, + { + "epoch": 0.9060283687943262, + "grad_norm": 0.36903858184814453, + "learning_rate": 0.00016556350626118066, + "loss": 0.119, + "step": 511 + }, + { + "epoch": 0.9078014184397163, + "grad_norm": 0.37858816981315613, + "learning_rate": 0.0001652951699463327, + "loss": 0.1686, + "step": 512 + }, + { + "epoch": 0.9095744680851063, + "grad_norm": 0.34037184715270996, + "learning_rate": 0.00016502683363148478, + "loss": 0.1046, + "step": 513 + }, + { + "epoch": 0.9113475177304965, + "grad_norm": 0.8230399489402771, + "learning_rate": 0.00016475849731663686, + "loss": 0.301, + "step": 514 + }, + { + "epoch": 0.9131205673758865, + "grad_norm": 0.4620438814163208, + "learning_rate": 0.00016449016100178888, + "loss": 0.1323, + "step": 515 + }, + { + "epoch": 0.9148936170212766, + "grad_norm": 0.3759715259075165, + "learning_rate": 0.00016422182468694095, + "loss": 0.1192, + "step": 516 + }, + { + "epoch": 0.9166666666666666, + "grad_norm": 0.6008899211883545, + "learning_rate": 0.000163953488372093, + "loss": 0.2129, + "step": 517 + }, + { + "epoch": 0.9184397163120568, + "grad_norm": 0.4496185779571533, + "learning_rate": 0.00016368515205724508, + "loss": 0.1597, + "step": 518 + }, + { + "epoch": 0.9202127659574468, + "grad_norm": 0.34270530939102173, + "learning_rate": 0.0001634168157423971, + "loss": 0.1263, + "step": 519 + }, + { + "epoch": 0.9219858156028369, + "grad_norm": 0.7845521569252014, + "learning_rate": 0.00016314847942754918, + "loss": 0.2071, + "step": 520 + }, + { + "epoch": 0.9237588652482269, + "grad_norm": 0.4366159737110138, + "learning_rate": 0.00016288014311270123, + "loss": 0.1805, + "step": 521 + }, + { + "epoch": 0.925531914893617, + "grad_norm": 0.3796030580997467, + "learning_rate": 0.0001626118067978533, + "loss": 0.1017, + "step": 522 + }, + { + "epoch": 0.9273049645390071, + "grad_norm": 0.4644930064678192, + "learning_rate": 0.00016234347048300538, + "loss": 0.1297, + "step": 523 + }, + { + "epoch": 0.9290780141843972, + "grad_norm": 0.34396037459373474, + "learning_rate": 0.0001620751341681574, + "loss": 0.0827, + "step": 524 + }, + { + "epoch": 0.9308510638297872, + "grad_norm": 0.3743005096912384, + "learning_rate": 0.00016180679785330948, + "loss": 0.1467, + "step": 525 + }, + { + "epoch": 0.9326241134751773, + "grad_norm": 0.35721150040626526, + "learning_rate": 0.00016153846153846153, + "loss": 0.0911, + "step": 526 + }, + { + "epoch": 0.9343971631205674, + "grad_norm": 0.42456573247909546, + "learning_rate": 0.00016127012522361358, + "loss": 0.1744, + "step": 527 + }, + { + "epoch": 0.9361702127659575, + "grad_norm": 0.4045068919658661, + "learning_rate": 0.00016100178890876562, + "loss": 0.1234, + "step": 528 + }, + { + "epoch": 0.9379432624113475, + "grad_norm": 0.3121665418148041, + "learning_rate": 0.0001607334525939177, + "loss": 0.0744, + "step": 529 + }, + { + "epoch": 0.9397163120567376, + "grad_norm": 0.3464220464229584, + "learning_rate": 0.00016046511627906975, + "loss": 0.1392, + "step": 530 + }, + { + "epoch": 0.9414893617021277, + "grad_norm": 0.3165525496006012, + "learning_rate": 0.0001601967799642218, + "loss": 0.0688, + "step": 531 + }, + { + "epoch": 0.9432624113475178, + "grad_norm": 0.3919833302497864, + "learning_rate": 0.00015992844364937387, + "loss": 0.1155, + "step": 532 + }, + { + "epoch": 0.9450354609929078, + "grad_norm": 0.34956642985343933, + "learning_rate": 0.00015966010733452592, + "loss": 0.0881, + "step": 533 + }, + { + "epoch": 0.9468085106382979, + "grad_norm": 0.3646623492240906, + "learning_rate": 0.000159391771019678, + "loss": 0.1212, + "step": 534 + }, + { + "epoch": 0.9485815602836879, + "grad_norm": 0.4821237325668335, + "learning_rate": 0.00015912343470483002, + "loss": 0.1594, + "step": 535 + }, + { + "epoch": 0.950354609929078, + "grad_norm": 0.28735411167144775, + "learning_rate": 0.0001588550983899821, + "loss": 0.084, + "step": 536 + }, + { + "epoch": 0.9521276595744681, + "grad_norm": 0.5679156184196472, + "learning_rate": 0.00015858676207513415, + "loss": 0.1563, + "step": 537 + }, + { + "epoch": 0.9539007092198581, + "grad_norm": 0.5127171874046326, + "learning_rate": 0.00015831842576028622, + "loss": 0.1513, + "step": 538 + }, + { + "epoch": 0.9556737588652482, + "grad_norm": 0.30556991696357727, + "learning_rate": 0.00015805008944543825, + "loss": 0.1026, + "step": 539 + }, + { + "epoch": 0.9574468085106383, + "grad_norm": 0.41368523240089417, + "learning_rate": 0.00015778175313059032, + "loss": 0.1205, + "step": 540 + }, + { + "epoch": 0.9592198581560284, + "grad_norm": 0.3814534842967987, + "learning_rate": 0.0001575134168157424, + "loss": 0.1103, + "step": 541 + }, + { + "epoch": 0.9609929078014184, + "grad_norm": 0.3183111548423767, + "learning_rate": 0.00015724508050089445, + "loss": 0.0965, + "step": 542 + }, + { + "epoch": 0.9627659574468085, + "grad_norm": 0.276720255613327, + "learning_rate": 0.00015697674418604652, + "loss": 0.0684, + "step": 543 + }, + { + "epoch": 0.9645390070921985, + "grad_norm": 0.2961701452732086, + "learning_rate": 0.00015670840787119854, + "loss": 0.04, + "step": 544 + }, + { + "epoch": 0.9663120567375887, + "grad_norm": 0.3687111437320709, + "learning_rate": 0.00015644007155635062, + "loss": 0.0921, + "step": 545 + }, + { + "epoch": 0.9680851063829787, + "grad_norm": 0.2294873148202896, + "learning_rate": 0.00015617173524150267, + "loss": 0.0694, + "step": 546 + }, + { + "epoch": 0.9698581560283688, + "grad_norm": 0.33963993191719055, + "learning_rate": 0.00015590339892665475, + "loss": 0.1287, + "step": 547 + }, + { + "epoch": 0.9716312056737588, + "grad_norm": 0.4909413456916809, + "learning_rate": 0.00015563506261180677, + "loss": 0.168, + "step": 548 + }, + { + "epoch": 0.973404255319149, + "grad_norm": 0.27783486247062683, + "learning_rate": 0.00015536672629695884, + "loss": 0.054, + "step": 549 + }, + { + "epoch": 0.975177304964539, + "grad_norm": 0.23697739839553833, + "learning_rate": 0.0001550983899821109, + "loss": 0.0399, + "step": 550 + }, + { + "epoch": 0.9769503546099291, + "grad_norm": 0.7063981890678406, + "learning_rate": 0.00015483005366726294, + "loss": 0.1962, + "step": 551 + }, + { + "epoch": 0.9787234042553191, + "grad_norm": 0.6349850296974182, + "learning_rate": 0.00015456171735241502, + "loss": 0.1456, + "step": 552 + }, + { + "epoch": 0.9804964539007093, + "grad_norm": 0.8495027422904968, + "learning_rate": 0.00015429338103756707, + "loss": 0.2952, + "step": 553 + }, + { + "epoch": 0.9822695035460993, + "grad_norm": 0.4368564784526825, + "learning_rate": 0.00015402504472271914, + "loss": 0.1814, + "step": 554 + }, + { + "epoch": 0.9840425531914894, + "grad_norm": 0.5131334662437439, + "learning_rate": 0.00015375670840787116, + "loss": 0.1503, + "step": 555 + }, + { + "epoch": 0.9858156028368794, + "grad_norm": 0.40509024262428284, + "learning_rate": 0.00015348837209302324, + "loss": 0.1643, + "step": 556 + }, + { + "epoch": 0.9875886524822695, + "grad_norm": 0.37984341382980347, + "learning_rate": 0.0001532200357781753, + "loss": 0.1496, + "step": 557 + }, + { + "epoch": 0.9893617021276596, + "grad_norm": 0.39520135521888733, + "learning_rate": 0.00015295169946332737, + "loss": 0.1251, + "step": 558 + }, + { + "epoch": 0.9911347517730497, + "grad_norm": 0.2659216821193695, + "learning_rate": 0.0001526833631484794, + "loss": 0.0907, + "step": 559 + }, + { + "epoch": 0.9929078014184397, + "grad_norm": 0.42887184023857117, + "learning_rate": 0.00015241502683363146, + "loss": 0.1358, + "step": 560 + }, + { + "epoch": 0.9946808510638298, + "grad_norm": 0.2191324532032013, + "learning_rate": 0.00015214669051878354, + "loss": 0.0713, + "step": 561 + }, + { + "epoch": 0.9964539007092199, + "grad_norm": 0.26999935507774353, + "learning_rate": 0.0001518783542039356, + "loss": 0.0859, + "step": 562 + }, + { + "epoch": 0.99822695035461, + "grad_norm": 0.30264967679977417, + "learning_rate": 0.00015161001788908767, + "loss": 0.1131, + "step": 563 + }, + { + "epoch": 1.0, + "grad_norm": 0.2457718849182129, + "learning_rate": 0.0001513416815742397, + "loss": 0.0857, + "step": 564 + }, + { + "epoch": 1.00177304964539, + "grad_norm": 0.6467576622962952, + "learning_rate": 0.00015107334525939176, + "loss": 0.2397, + "step": 565 + }, + { + "epoch": 1.00354609929078, + "grad_norm": 0.5576766133308411, + "learning_rate": 0.0001508050089445438, + "loss": 0.2687, + "step": 566 + }, + { + "epoch": 1.0053191489361701, + "grad_norm": 0.5558743476867676, + "learning_rate": 0.0001505366726296959, + "loss": 0.1957, + "step": 567 + }, + { + "epoch": 1.0070921985815602, + "grad_norm": 0.3179946541786194, + "learning_rate": 0.0001502683363148479, + "loss": 0.1073, + "step": 568 + }, + { + "epoch": 1.0088652482269505, + "grad_norm": 0.36975720524787903, + "learning_rate": 0.00015, + "loss": 0.142, + "step": 569 + }, + { + "epoch": 1.0106382978723405, + "grad_norm": 0.2883351743221283, + "learning_rate": 0.00014973166368515204, + "loss": 0.0991, + "step": 570 + }, + { + "epoch": 1.0124113475177305, + "grad_norm": 0.36468443274497986, + "learning_rate": 0.00014946332737030408, + "loss": 0.1154, + "step": 571 + }, + { + "epoch": 1.0141843971631206, + "grad_norm": 0.2813965082168579, + "learning_rate": 0.00014919499105545616, + "loss": 0.1039, + "step": 572 + }, + { + "epoch": 1.0159574468085106, + "grad_norm": 0.3948434293270111, + "learning_rate": 0.0001489266547406082, + "loss": 0.1177, + "step": 573 + }, + { + "epoch": 1.0177304964539007, + "grad_norm": 0.4562970697879791, + "learning_rate": 0.00014865831842576029, + "loss": 0.1622, + "step": 574 + }, + { + "epoch": 1.0195035460992907, + "grad_norm": 0.3354407250881195, + "learning_rate": 0.00014838998211091234, + "loss": 0.1234, + "step": 575 + }, + { + "epoch": 1.0212765957446808, + "grad_norm": 0.4788317382335663, + "learning_rate": 0.00014812164579606438, + "loss": 0.2213, + "step": 576 + }, + { + "epoch": 1.0230496453900708, + "grad_norm": 0.3917693495750427, + "learning_rate": 0.00014785330948121646, + "loss": 0.1178, + "step": 577 + }, + { + "epoch": 1.024822695035461, + "grad_norm": 0.27874428033828735, + "learning_rate": 0.0001475849731663685, + "loss": 0.1009, + "step": 578 + }, + { + "epoch": 1.0265957446808511, + "grad_norm": 0.5051771998405457, + "learning_rate": 0.00014731663685152056, + "loss": 0.1656, + "step": 579 + }, + { + "epoch": 1.0283687943262412, + "grad_norm": 0.38514822721481323, + "learning_rate": 0.0001470483005366726, + "loss": 0.134, + "step": 580 + }, + { + "epoch": 1.0301418439716312, + "grad_norm": 0.44142523407936096, + "learning_rate": 0.00014677996422182466, + "loss": 0.1344, + "step": 581 + }, + { + "epoch": 1.0319148936170213, + "grad_norm": 0.27137696743011475, + "learning_rate": 0.00014651162790697673, + "loss": 0.0951, + "step": 582 + }, + { + "epoch": 1.0336879432624113, + "grad_norm": 0.2701858580112457, + "learning_rate": 0.00014624329159212878, + "loss": 0.0917, + "step": 583 + }, + { + "epoch": 1.0354609929078014, + "grad_norm": 0.3253329396247864, + "learning_rate": 0.00014597495527728086, + "loss": 0.1056, + "step": 584 + }, + { + "epoch": 1.0372340425531914, + "grad_norm": 0.31853559613227844, + "learning_rate": 0.0001457066189624329, + "loss": 0.1003, + "step": 585 + }, + { + "epoch": 1.0390070921985815, + "grad_norm": 0.3343375623226166, + "learning_rate": 0.00014543828264758496, + "loss": 0.109, + "step": 586 + }, + { + "epoch": 1.0407801418439717, + "grad_norm": 0.22761645913124084, + "learning_rate": 0.00014516994633273703, + "loss": 0.08, + "step": 587 + }, + { + "epoch": 1.0425531914893618, + "grad_norm": 0.24303674697875977, + "learning_rate": 0.00014490161001788908, + "loss": 0.089, + "step": 588 + }, + { + "epoch": 1.0443262411347518, + "grad_norm": 0.36809274554252625, + "learning_rate": 0.00014463327370304113, + "loss": 0.1198, + "step": 589 + }, + { + "epoch": 1.0460992907801419, + "grad_norm": 0.35131344199180603, + "learning_rate": 0.00014436493738819318, + "loss": 0.1082, + "step": 590 + }, + { + "epoch": 1.047872340425532, + "grad_norm": 0.23665814101696014, + "learning_rate": 0.00014409660107334523, + "loss": 0.0704, + "step": 591 + }, + { + "epoch": 1.049645390070922, + "grad_norm": 0.34136760234832764, + "learning_rate": 0.0001438282647584973, + "loss": 0.1344, + "step": 592 + }, + { + "epoch": 1.051418439716312, + "grad_norm": 0.3390890657901764, + "learning_rate": 0.00014355992844364938, + "loss": 0.1147, + "step": 593 + }, + { + "epoch": 1.053191489361702, + "grad_norm": 0.33551934361457825, + "learning_rate": 0.00014329159212880143, + "loss": 0.1205, + "step": 594 + }, + { + "epoch": 1.0549645390070923, + "grad_norm": 0.3362303674221039, + "learning_rate": 0.00014302325581395348, + "loss": 0.1012, + "step": 595 + }, + { + "epoch": 1.0567375886524824, + "grad_norm": 0.40094855427742004, + "learning_rate": 0.00014275491949910553, + "loss": 0.1217, + "step": 596 + }, + { + "epoch": 1.0585106382978724, + "grad_norm": 0.3621748685836792, + "learning_rate": 0.0001424865831842576, + "loss": 0.128, + "step": 597 + }, + { + "epoch": 1.0602836879432624, + "grad_norm": 0.2884347140789032, + "learning_rate": 0.00014221824686940965, + "loss": 0.0745, + "step": 598 + }, + { + "epoch": 1.0620567375886525, + "grad_norm": 0.31141483783721924, + "learning_rate": 0.0001419499105545617, + "loss": 0.0932, + "step": 599 + }, + { + "epoch": 1.0638297872340425, + "grad_norm": 0.24083460867404938, + "learning_rate": 0.00014168157423971375, + "loss": 0.0735, + "step": 600 + }, + { + "epoch": 1.0656028368794326, + "grad_norm": 0.35196706652641296, + "learning_rate": 0.0001414132379248658, + "loss": 0.0865, + "step": 601 + }, + { + "epoch": 1.0673758865248226, + "grad_norm": 0.2785693407058716, + "learning_rate": 0.00014114490161001788, + "loss": 0.0832, + "step": 602 + }, + { + "epoch": 1.0691489361702127, + "grad_norm": 0.20616450905799866, + "learning_rate": 0.00014087656529516995, + "loss": 0.0498, + "step": 603 + }, + { + "epoch": 1.070921985815603, + "grad_norm": 0.3255903422832489, + "learning_rate": 0.000140608228980322, + "loss": 0.0971, + "step": 604 + }, + { + "epoch": 1.072695035460993, + "grad_norm": 0.24554111063480377, + "learning_rate": 0.00014033989266547405, + "loss": 0.0497, + "step": 605 + }, + { + "epoch": 1.074468085106383, + "grad_norm": 0.4421694576740265, + "learning_rate": 0.0001400715563506261, + "loss": 0.1649, + "step": 606 + }, + { + "epoch": 1.076241134751773, + "grad_norm": 0.24358749389648438, + "learning_rate": 0.00013980322003577818, + "loss": 0.0573, + "step": 607 + }, + { + "epoch": 1.0780141843971631, + "grad_norm": 0.28591474890708923, + "learning_rate": 0.00013953488372093022, + "loss": 0.0976, + "step": 608 + }, + { + "epoch": 1.0797872340425532, + "grad_norm": 0.26130688190460205, + "learning_rate": 0.00013926654740608227, + "loss": 0.0992, + "step": 609 + }, + { + "epoch": 1.0815602836879432, + "grad_norm": 0.2845771908760071, + "learning_rate": 0.00013899821109123432, + "loss": 0.0581, + "step": 610 + }, + { + "epoch": 1.0833333333333333, + "grad_norm": 0.2530725598335266, + "learning_rate": 0.0001387298747763864, + "loss": 0.0747, + "step": 611 + }, + { + "epoch": 1.0851063829787233, + "grad_norm": 0.29305538535118103, + "learning_rate": 0.00013846153846153845, + "loss": 0.0901, + "step": 612 + }, + { + "epoch": 1.0868794326241136, + "grad_norm": 0.23423607647418976, + "learning_rate": 0.00013819320214669052, + "loss": 0.0743, + "step": 613 + }, + { + "epoch": 1.0886524822695036, + "grad_norm": 0.20492520928382874, + "learning_rate": 0.00013792486583184257, + "loss": 0.0358, + "step": 614 + }, + { + "epoch": 1.0904255319148937, + "grad_norm": 0.6422183513641357, + "learning_rate": 0.00013765652951699462, + "loss": 0.1572, + "step": 615 + }, + { + "epoch": 1.0921985815602837, + "grad_norm": 0.8374213576316833, + "learning_rate": 0.00013738819320214667, + "loss": 0.2574, + "step": 616 + }, + { + "epoch": 1.0939716312056738, + "grad_norm": 0.5346600413322449, + "learning_rate": 0.00013711985688729875, + "loss": 0.2275, + "step": 617 + }, + { + "epoch": 1.0957446808510638, + "grad_norm": 0.5961403250694275, + "learning_rate": 0.0001368515205724508, + "loss": 0.1427, + "step": 618 + }, + { + "epoch": 1.0975177304964538, + "grad_norm": 0.4382435381412506, + "learning_rate": 0.00013658318425760284, + "loss": 0.1515, + "step": 619 + }, + { + "epoch": 1.099290780141844, + "grad_norm": 0.49309927225112915, + "learning_rate": 0.0001363148479427549, + "loss": 0.1894, + "step": 620 + }, + { + "epoch": 1.101063829787234, + "grad_norm": 0.3819267153739929, + "learning_rate": 0.00013604651162790697, + "loss": 0.14, + "step": 621 + }, + { + "epoch": 1.1028368794326242, + "grad_norm": 0.3135676383972168, + "learning_rate": 0.00013577817531305902, + "loss": 0.1468, + "step": 622 + }, + { + "epoch": 1.1046099290780143, + "grad_norm": 0.43014463782310486, + "learning_rate": 0.0001355098389982111, + "loss": 0.152, + "step": 623 + }, + { + "epoch": 1.1063829787234043, + "grad_norm": 0.3625108301639557, + "learning_rate": 0.00013524150268336314, + "loss": 0.1162, + "step": 624 + }, + { + "epoch": 1.1081560283687943, + "grad_norm": 0.43387532234191895, + "learning_rate": 0.0001349731663685152, + "loss": 0.2164, + "step": 625 + }, + { + "epoch": 1.1099290780141844, + "grad_norm": 0.49003899097442627, + "learning_rate": 0.00013470483005366724, + "loss": 0.2018, + "step": 626 + }, + { + "epoch": 1.1117021276595744, + "grad_norm": 0.32920360565185547, + "learning_rate": 0.00013443649373881932, + "loss": 0.1432, + "step": 627 + }, + { + "epoch": 1.1134751773049645, + "grad_norm": 0.2235662341117859, + "learning_rate": 0.00013416815742397137, + "loss": 0.1093, + "step": 628 + }, + { + "epoch": 1.1152482269503545, + "grad_norm": 0.32100364565849304, + "learning_rate": 0.00013389982110912342, + "loss": 0.0847, + "step": 629 + }, + { + "epoch": 1.1170212765957448, + "grad_norm": 0.2799128293991089, + "learning_rate": 0.00013363148479427547, + "loss": 0.12, + "step": 630 + }, + { + "epoch": 1.1187943262411348, + "grad_norm": 0.23999084532260895, + "learning_rate": 0.00013336314847942754, + "loss": 0.073, + "step": 631 + }, + { + "epoch": 1.1205673758865249, + "grad_norm": 0.3222167491912842, + "learning_rate": 0.0001330948121645796, + "loss": 0.1152, + "step": 632 + }, + { + "epoch": 1.122340425531915, + "grad_norm": 0.40135595202445984, + "learning_rate": 0.00013282647584973167, + "loss": 0.1472, + "step": 633 + }, + { + "epoch": 1.124113475177305, + "grad_norm": 0.3301258683204651, + "learning_rate": 0.00013255813953488372, + "loss": 0.1238, + "step": 634 + }, + { + "epoch": 1.125886524822695, + "grad_norm": 0.3449542820453644, + "learning_rate": 0.00013228980322003576, + "loss": 0.1339, + "step": 635 + }, + { + "epoch": 1.127659574468085, + "grad_norm": 0.3443644046783447, + "learning_rate": 0.00013202146690518781, + "loss": 0.1018, + "step": 636 + }, + { + "epoch": 1.1294326241134751, + "grad_norm": 0.27248480916023254, + "learning_rate": 0.0001317531305903399, + "loss": 0.102, + "step": 637 + }, + { + "epoch": 1.1312056737588652, + "grad_norm": 0.4105375111103058, + "learning_rate": 0.00013148479427549194, + "loss": 0.1475, + "step": 638 + }, + { + "epoch": 1.1329787234042552, + "grad_norm": 0.468524694442749, + "learning_rate": 0.000131216457960644, + "loss": 0.11, + "step": 639 + }, + { + "epoch": 1.1347517730496455, + "grad_norm": 0.3666204810142517, + "learning_rate": 0.00013094812164579604, + "loss": 0.1337, + "step": 640 + }, + { + "epoch": 1.1365248226950355, + "grad_norm": 0.4041987955570221, + "learning_rate": 0.0001306797853309481, + "loss": 0.1476, + "step": 641 + }, + { + "epoch": 1.1382978723404256, + "grad_norm": 0.2802480459213257, + "learning_rate": 0.00013041144901610016, + "loss": 0.1008, + "step": 642 + }, + { + "epoch": 1.1400709219858156, + "grad_norm": 0.3202970623970032, + "learning_rate": 0.00013014311270125224, + "loss": 0.1072, + "step": 643 + }, + { + "epoch": 1.1418439716312057, + "grad_norm": 0.20990432798862457, + "learning_rate": 0.0001298747763864043, + "loss": 0.069, + "step": 644 + }, + { + "epoch": 1.1436170212765957, + "grad_norm": 0.33487433195114136, + "learning_rate": 0.00012960644007155634, + "loss": 0.1406, + "step": 645 + }, + { + "epoch": 1.1453900709219857, + "grad_norm": 0.2642214298248291, + "learning_rate": 0.00012933810375670839, + "loss": 0.0974, + "step": 646 + }, + { + "epoch": 1.1471631205673758, + "grad_norm": 0.32974326610565186, + "learning_rate": 0.00012906976744186046, + "loss": 0.0896, + "step": 647 + }, + { + "epoch": 1.148936170212766, + "grad_norm": 0.2828482687473297, + "learning_rate": 0.0001288014311270125, + "loss": 0.0803, + "step": 648 + }, + { + "epoch": 1.150709219858156, + "grad_norm": 0.3773040175437927, + "learning_rate": 0.00012853309481216456, + "loss": 0.1187, + "step": 649 + }, + { + "epoch": 1.1524822695035462, + "grad_norm": 0.3649806082248688, + "learning_rate": 0.00012826475849731664, + "loss": 0.0808, + "step": 650 + }, + { + "epoch": 1.1542553191489362, + "grad_norm": 0.3349076509475708, + "learning_rate": 0.00012799642218246868, + "loss": 0.089, + "step": 651 + }, + { + "epoch": 1.1560283687943262, + "grad_norm": 0.2334943264722824, + "learning_rate": 0.00012772808586762073, + "loss": 0.0706, + "step": 652 + }, + { + "epoch": 1.1578014184397163, + "grad_norm": 0.1958976536989212, + "learning_rate": 0.0001274597495527728, + "loss": 0.05, + "step": 653 + }, + { + "epoch": 1.1595744680851063, + "grad_norm": 0.47231465578079224, + "learning_rate": 0.00012719141323792486, + "loss": 0.165, + "step": 654 + }, + { + "epoch": 1.1613475177304964, + "grad_norm": 0.365305632352829, + "learning_rate": 0.0001269230769230769, + "loss": 0.1267, + "step": 655 + }, + { + "epoch": 1.1631205673758864, + "grad_norm": 0.19873836636543274, + "learning_rate": 0.00012665474060822896, + "loss": 0.0418, + "step": 656 + }, + { + "epoch": 1.1648936170212765, + "grad_norm": 0.30495962500572205, + "learning_rate": 0.00012638640429338103, + "loss": 0.0876, + "step": 657 + }, + { + "epoch": 1.1666666666666667, + "grad_norm": 0.28687819838523865, + "learning_rate": 0.00012611806797853308, + "loss": 0.0658, + "step": 658 + }, + { + "epoch": 1.1684397163120568, + "grad_norm": 0.24457816779613495, + "learning_rate": 0.00012584973166368513, + "loss": 0.081, + "step": 659 + }, + { + "epoch": 1.1702127659574468, + "grad_norm": 0.6117145419120789, + "learning_rate": 0.0001255813953488372, + "loss": 0.124, + "step": 660 + }, + { + "epoch": 1.1719858156028369, + "grad_norm": 0.4446345269680023, + "learning_rate": 0.00012531305903398926, + "loss": 0.0919, + "step": 661 + }, + { + "epoch": 1.173758865248227, + "grad_norm": 0.2580985426902771, + "learning_rate": 0.0001250447227191413, + "loss": 0.0873, + "step": 662 + }, + { + "epoch": 1.175531914893617, + "grad_norm": 0.1888839155435562, + "learning_rate": 0.00012477638640429338, + "loss": 0.0496, + "step": 663 + }, + { + "epoch": 1.177304964539007, + "grad_norm": 0.2381056845188141, + "learning_rate": 0.00012450805008944543, + "loss": 0.0847, + "step": 664 + }, + { + "epoch": 1.1790780141843973, + "grad_norm": 0.7352200746536255, + "learning_rate": 0.00012423971377459748, + "loss": 0.2856, + "step": 665 + }, + { + "epoch": 1.1808510638297873, + "grad_norm": 0.40972036123275757, + "learning_rate": 0.00012397137745974953, + "loss": 0.1386, + "step": 666 + }, + { + "epoch": 1.1826241134751774, + "grad_norm": 1.2634938955307007, + "learning_rate": 0.0001237030411449016, + "loss": 0.2431, + "step": 667 + }, + { + "epoch": 1.1843971631205674, + "grad_norm": 0.5589777231216431, + "learning_rate": 0.00012343470483005365, + "loss": 0.1766, + "step": 668 + }, + { + "epoch": 1.1861702127659575, + "grad_norm": 0.4527657926082611, + "learning_rate": 0.00012316636851520573, + "loss": 0.186, + "step": 669 + }, + { + "epoch": 1.1879432624113475, + "grad_norm": 0.507477343082428, + "learning_rate": 0.00012289803220035778, + "loss": 0.1624, + "step": 670 + }, + { + "epoch": 1.1897163120567376, + "grad_norm": 0.5501653552055359, + "learning_rate": 0.00012262969588550983, + "loss": 0.1805, + "step": 671 + }, + { + "epoch": 1.1914893617021276, + "grad_norm": 0.6057561635971069, + "learning_rate": 0.00012236135957066188, + "loss": 0.2011, + "step": 672 + }, + { + "epoch": 1.1932624113475176, + "grad_norm": 0.28531283140182495, + "learning_rate": 0.00012209302325581395, + "loss": 0.1028, + "step": 673 + }, + { + "epoch": 1.1950354609929077, + "grad_norm": 0.4142218828201294, + "learning_rate": 0.000121824686940966, + "loss": 0.1485, + "step": 674 + }, + { + "epoch": 1.196808510638298, + "grad_norm": 0.4527840316295624, + "learning_rate": 0.00012155635062611805, + "loss": 0.171, + "step": 675 + }, + { + "epoch": 1.198581560283688, + "grad_norm": 0.31029072403907776, + "learning_rate": 0.00012128801431127011, + "loss": 0.116, + "step": 676 + }, + { + "epoch": 1.200354609929078, + "grad_norm": 0.5413310527801514, + "learning_rate": 0.00012101967799642216, + "loss": 0.1969, + "step": 677 + }, + { + "epoch": 1.202127659574468, + "grad_norm": 0.24240347743034363, + "learning_rate": 0.00012075134168157424, + "loss": 0.0809, + "step": 678 + }, + { + "epoch": 1.202127659574468, + "eval_loss": 0.0639609843492508, + "eval_runtime": 47.1878, + "eval_samples_per_second": 2.67, + "eval_steps_per_second": 2.67, + "step": 678 + }, + { + "epoch": 1.2039007092198581, + "grad_norm": 0.4262109398841858, + "learning_rate": 0.00012048300536672629, + "loss": 0.1255, + "step": 679 + }, + { + "epoch": 1.2056737588652482, + "grad_norm": 0.5289608240127563, + "learning_rate": 0.00012021466905187835, + "loss": 0.1731, + "step": 680 + }, + { + "epoch": 1.2074468085106382, + "grad_norm": 0.32836204767227173, + "learning_rate": 0.0001199463327370304, + "loss": 0.1366, + "step": 681 + }, + { + "epoch": 1.2092198581560283, + "grad_norm": 0.710455060005188, + "learning_rate": 0.00011967799642218246, + "loss": 0.1448, + "step": 682 + }, + { + "epoch": 1.2109929078014185, + "grad_norm": 0.4564105272293091, + "learning_rate": 0.00011940966010733451, + "loss": 0.1959, + "step": 683 + }, + { + "epoch": 1.2127659574468086, + "grad_norm": 0.3768802285194397, + "learning_rate": 0.00011914132379248657, + "loss": 0.1465, + "step": 684 + }, + { + "epoch": 1.2145390070921986, + "grad_norm": 0.3054960072040558, + "learning_rate": 0.00011887298747763862, + "loss": 0.1059, + "step": 685 + }, + { + "epoch": 1.2163120567375887, + "grad_norm": 0.3906306326389313, + "learning_rate": 0.00011860465116279069, + "loss": 0.1583, + "step": 686 + }, + { + "epoch": 1.2180851063829787, + "grad_norm": 0.43236812949180603, + "learning_rate": 0.00011833631484794273, + "loss": 0.1688, + "step": 687 + }, + { + "epoch": 1.2198581560283688, + "grad_norm": 0.39702731370925903, + "learning_rate": 0.00011806797853309481, + "loss": 0.1422, + "step": 688 + }, + { + "epoch": 1.2216312056737588, + "grad_norm": 0.33314409852027893, + "learning_rate": 0.00011779964221824686, + "loss": 0.0932, + "step": 689 + }, + { + "epoch": 1.2234042553191489, + "grad_norm": 0.24420586228370667, + "learning_rate": 0.00011753130590339892, + "loss": 0.0724, + "step": 690 + }, + { + "epoch": 1.225177304964539, + "grad_norm": 0.3487238585948944, + "learning_rate": 0.00011726296958855097, + "loss": 0.137, + "step": 691 + }, + { + "epoch": 1.226950354609929, + "grad_norm": 0.2950010299682617, + "learning_rate": 0.00011699463327370303, + "loss": 0.12, + "step": 692 + }, + { + "epoch": 1.2287234042553192, + "grad_norm": 0.27103373408317566, + "learning_rate": 0.00011672629695885508, + "loss": 0.0896, + "step": 693 + }, + { + "epoch": 1.2304964539007093, + "grad_norm": 0.21480779349803925, + "learning_rate": 0.00011645796064400715, + "loss": 0.0857, + "step": 694 + }, + { + "epoch": 1.2322695035460993, + "grad_norm": 0.27627474069595337, + "learning_rate": 0.0001161896243291592, + "loss": 0.1184, + "step": 695 + }, + { + "epoch": 1.2340425531914894, + "grad_norm": 0.4017396569252014, + "learning_rate": 0.00011592128801431126, + "loss": 0.1221, + "step": 696 + }, + { + "epoch": 1.2358156028368794, + "grad_norm": 0.2959025204181671, + "learning_rate": 0.00011565295169946332, + "loss": 0.0934, + "step": 697 + }, + { + "epoch": 1.2375886524822695, + "grad_norm": 0.48978719115257263, + "learning_rate": 0.00011538461538461538, + "loss": 0.1601, + "step": 698 + }, + { + "epoch": 1.2393617021276595, + "grad_norm": 0.30416980385780334, + "learning_rate": 0.00011511627906976743, + "loss": 0.1089, + "step": 699 + }, + { + "epoch": 1.2411347517730495, + "grad_norm": 0.22977767884731293, + "learning_rate": 0.0001148479427549195, + "loss": 0.0717, + "step": 700 + }, + { + "epoch": 1.2429078014184398, + "grad_norm": 0.45538970828056335, + "learning_rate": 0.00011457960644007154, + "loss": 0.1824, + "step": 701 + }, + { + "epoch": 1.2446808510638299, + "grad_norm": 0.5112214088439941, + "learning_rate": 0.0001143112701252236, + "loss": 0.1782, + "step": 702 + }, + { + "epoch": 1.24645390070922, + "grad_norm": 0.3076941967010498, + "learning_rate": 0.00011404293381037565, + "loss": 0.1195, + "step": 703 + }, + { + "epoch": 1.24822695035461, + "grad_norm": 0.3471357226371765, + "learning_rate": 0.00011377459749552772, + "loss": 0.133, + "step": 704 + }, + { + "epoch": 1.25, + "grad_norm": 0.27840855717658997, + "learning_rate": 0.00011350626118067977, + "loss": 0.0776, + "step": 705 + }, + { + "epoch": 1.25177304964539, + "grad_norm": 0.3375721871852875, + "learning_rate": 0.00011323792486583183, + "loss": 0.1033, + "step": 706 + }, + { + "epoch": 1.25354609929078, + "grad_norm": 0.22541607916355133, + "learning_rate": 0.00011296958855098389, + "loss": 0.0615, + "step": 707 + }, + { + "epoch": 1.2553191489361701, + "grad_norm": 0.21740016341209412, + "learning_rate": 0.00011270125223613595, + "loss": 0.0586, + "step": 708 + }, + { + "epoch": 1.2570921985815602, + "grad_norm": 0.23697029054164886, + "learning_rate": 0.000112432915921288, + "loss": 0.0701, + "step": 709 + }, + { + "epoch": 1.2588652482269502, + "grad_norm": 0.21985624730587006, + "learning_rate": 0.00011216457960644006, + "loss": 0.0617, + "step": 710 + }, + { + "epoch": 1.2606382978723405, + "grad_norm": 0.19769056141376495, + "learning_rate": 0.00011189624329159211, + "loss": 0.0568, + "step": 711 + }, + { + "epoch": 1.2624113475177305, + "grad_norm": 0.1668470799922943, + "learning_rate": 0.00011162790697674418, + "loss": 0.0417, + "step": 712 + }, + { + "epoch": 1.2641843971631206, + "grad_norm": 0.1491071879863739, + "learning_rate": 0.00011135957066189623, + "loss": 0.0338, + "step": 713 + }, + { + "epoch": 1.2659574468085106, + "grad_norm": 0.19979891180992126, + "learning_rate": 0.00011109123434704829, + "loss": 0.0484, + "step": 714 + }, + { + "epoch": 1.2677304964539007, + "grad_norm": 0.6793609261512756, + "learning_rate": 0.00011082289803220034, + "loss": 0.302, + "step": 715 + }, + { + "epoch": 1.2695035460992907, + "grad_norm": 0.35137850046157837, + "learning_rate": 0.00011055456171735241, + "loss": 0.0982, + "step": 716 + }, + { + "epoch": 1.2712765957446808, + "grad_norm": 0.35721829533576965, + "learning_rate": 0.00011028622540250446, + "loss": 0.1059, + "step": 717 + }, + { + "epoch": 1.273049645390071, + "grad_norm": 0.44229385256767273, + "learning_rate": 0.00011001788908765652, + "loss": 0.1698, + "step": 718 + }, + { + "epoch": 1.274822695035461, + "grad_norm": 0.5400137305259705, + "learning_rate": 0.00010974955277280857, + "loss": 0.1205, + "step": 719 + }, + { + "epoch": 1.2765957446808511, + "grad_norm": 0.5769519209861755, + "learning_rate": 0.00010948121645796064, + "loss": 0.1939, + "step": 720 + }, + { + "epoch": 1.2783687943262412, + "grad_norm": 0.47844019532203674, + "learning_rate": 0.00010921288014311269, + "loss": 0.1888, + "step": 721 + }, + { + "epoch": 1.2801418439716312, + "grad_norm": 0.297620952129364, + "learning_rate": 0.00010894454382826475, + "loss": 0.0844, + "step": 722 + }, + { + "epoch": 1.2819148936170213, + "grad_norm": 0.550375759601593, + "learning_rate": 0.0001086762075134168, + "loss": 0.2082, + "step": 723 + }, + { + "epoch": 1.2836879432624113, + "grad_norm": 0.5277696847915649, + "learning_rate": 0.00010840787119856886, + "loss": 0.1826, + "step": 724 + }, + { + "epoch": 1.2854609929078014, + "grad_norm": 0.48742157220840454, + "learning_rate": 0.00010813953488372091, + "loss": 0.1965, + "step": 725 + }, + { + "epoch": 1.2872340425531914, + "grad_norm": 0.30468684434890747, + "learning_rate": 0.00010787119856887298, + "loss": 0.1196, + "step": 726 + }, + { + "epoch": 1.2890070921985815, + "grad_norm": 0.33084604144096375, + "learning_rate": 0.00010760286225402503, + "loss": 0.124, + "step": 727 + }, + { + "epoch": 1.2907801418439715, + "grad_norm": 0.34763771295547485, + "learning_rate": 0.0001073345259391771, + "loss": 0.1066, + "step": 728 + }, + { + "epoch": 1.2925531914893618, + "grad_norm": 0.38675862550735474, + "learning_rate": 0.00010706618962432915, + "loss": 0.1452, + "step": 729 + }, + { + "epoch": 1.2943262411347518, + "grad_norm": 0.5646600127220154, + "learning_rate": 0.00010679785330948121, + "loss": 0.1891, + "step": 730 + }, + { + "epoch": 1.2960992907801419, + "grad_norm": 0.35108643770217896, + "learning_rate": 0.00010652951699463326, + "loss": 0.1223, + "step": 731 + }, + { + "epoch": 1.297872340425532, + "grad_norm": 0.3707665801048279, + "learning_rate": 0.00010626118067978532, + "loss": 0.1441, + "step": 732 + }, + { + "epoch": 1.299645390070922, + "grad_norm": 0.27179795503616333, + "learning_rate": 0.00010599284436493737, + "loss": 0.0839, + "step": 733 + }, + { + "epoch": 1.301418439716312, + "grad_norm": 0.37396109104156494, + "learning_rate": 0.00010572450805008943, + "loss": 0.1495, + "step": 734 + }, + { + "epoch": 1.3031914893617023, + "grad_norm": 0.40632113814353943, + "learning_rate": 0.00010545617173524151, + "loss": 0.1374, + "step": 735 + }, + { + "epoch": 1.3049645390070923, + "grad_norm": 0.31261610984802246, + "learning_rate": 0.00010518783542039356, + "loss": 0.1326, + "step": 736 + }, + { + "epoch": 1.3067375886524824, + "grad_norm": 0.4085939824581146, + "learning_rate": 0.00010491949910554562, + "loss": 0.1394, + "step": 737 + }, + { + "epoch": 1.3085106382978724, + "grad_norm": 0.2227245569229126, + "learning_rate": 0.00010465116279069767, + "loss": 0.0973, + "step": 738 + }, + { + "epoch": 1.3102836879432624, + "grad_norm": 0.21776029467582703, + "learning_rate": 0.00010438282647584972, + "loss": 0.0887, + "step": 739 + }, + { + "epoch": 1.3120567375886525, + "grad_norm": 0.4079020619392395, + "learning_rate": 0.00010411449016100178, + "loss": 0.1516, + "step": 740 + }, + { + "epoch": 1.3138297872340425, + "grad_norm": 0.34146982431411743, + "learning_rate": 0.00010384615384615383, + "loss": 0.1185, + "step": 741 + }, + { + "epoch": 1.3156028368794326, + "grad_norm": 0.45869576930999756, + "learning_rate": 0.00010357781753130589, + "loss": 0.1467, + "step": 742 + }, + { + "epoch": 1.3173758865248226, + "grad_norm": 0.23303550481796265, + "learning_rate": 0.00010330948121645794, + "loss": 0.0855, + "step": 743 + }, + { + "epoch": 1.3191489361702127, + "grad_norm": 0.4048026502132416, + "learning_rate": 0.00010304114490161, + "loss": 0.1394, + "step": 744 + }, + { + "epoch": 1.3209219858156027, + "grad_norm": 0.21484145522117615, + "learning_rate": 0.00010277280858676208, + "loss": 0.0918, + "step": 745 + }, + { + "epoch": 1.322695035460993, + "grad_norm": 0.3225015103816986, + "learning_rate": 0.00010250447227191413, + "loss": 0.1408, + "step": 746 + }, + { + "epoch": 1.324468085106383, + "grad_norm": 0.263639360666275, + "learning_rate": 0.00010223613595706619, + "loss": 0.0796, + "step": 747 + }, + { + "epoch": 1.326241134751773, + "grad_norm": 0.5266395211219788, + "learning_rate": 0.00010196779964221824, + "loss": 0.1809, + "step": 748 + }, + { + "epoch": 1.3280141843971631, + "grad_norm": 0.30444666743278503, + "learning_rate": 0.00010169946332737029, + "loss": 0.1294, + "step": 749 + }, + { + "epoch": 1.3297872340425532, + "grad_norm": 0.31879284977912903, + "learning_rate": 0.00010143112701252235, + "loss": 0.1133, + "step": 750 + }, + { + "epoch": 1.3315602836879432, + "grad_norm": 0.5735284686088562, + "learning_rate": 0.0001011627906976744, + "loss": 0.1922, + "step": 751 + }, + { + "epoch": 1.3333333333333333, + "grad_norm": 0.27157899737358093, + "learning_rate": 0.00010089445438282646, + "loss": 0.1064, + "step": 752 + }, + { + "epoch": 1.3351063829787235, + "grad_norm": 0.3107971251010895, + "learning_rate": 0.00010062611806797851, + "loss": 0.1307, + "step": 753 + }, + { + "epoch": 1.3368794326241136, + "grad_norm": 0.3065674304962158, + "learning_rate": 0.00010035778175313059, + "loss": 0.114, + "step": 754 + }, + { + "epoch": 1.3386524822695036, + "grad_norm": 0.3299253582954407, + "learning_rate": 0.00010008944543828265, + "loss": 0.1296, + "step": 755 + }, + { + "epoch": 1.3404255319148937, + "grad_norm": 0.2204119861125946, + "learning_rate": 9.98211091234347e-05, + "loss": 0.0856, + "step": 756 + }, + { + "epoch": 1.3421985815602837, + "grad_norm": 0.27640876173973083, + "learning_rate": 9.955277280858676e-05, + "loss": 0.1025, + "step": 757 + }, + { + "epoch": 1.3439716312056738, + "grad_norm": 0.2008877545595169, + "learning_rate": 9.928443649373881e-05, + "loss": 0.0698, + "step": 758 + }, + { + "epoch": 1.3457446808510638, + "grad_norm": 0.23517268896102905, + "learning_rate": 9.901610017889086e-05, + "loss": 0.0912, + "step": 759 + }, + { + "epoch": 1.3475177304964538, + "grad_norm": 0.24769139289855957, + "learning_rate": 9.874776386404292e-05, + "loss": 0.092, + "step": 760 + }, + { + "epoch": 1.349290780141844, + "grad_norm": 0.25975918769836426, + "learning_rate": 9.847942754919497e-05, + "loss": 0.0725, + "step": 761 + }, + { + "epoch": 1.351063829787234, + "grad_norm": 0.29222846031188965, + "learning_rate": 9.821109123434703e-05, + "loss": 0.0898, + "step": 762 + }, + { + "epoch": 1.352836879432624, + "grad_norm": 0.14782114326953888, + "learning_rate": 9.794275491949908e-05, + "loss": 0.0427, + "step": 763 + }, + { + "epoch": 1.3546099290780143, + "grad_norm": 0.18819881975650787, + "learning_rate": 9.767441860465116e-05, + "loss": 0.0406, + "step": 764 + }, + { + "epoch": 1.3563829787234043, + "grad_norm": 0.6308172345161438, + "learning_rate": 9.740608228980322e-05, + "loss": 0.2485, + "step": 765 + }, + { + "epoch": 1.3581560283687943, + "grad_norm": 0.4238440990447998, + "learning_rate": 9.713774597495527e-05, + "loss": 0.1535, + "step": 766 + }, + { + "epoch": 1.3599290780141844, + "grad_norm": 0.6112564206123352, + "learning_rate": 9.686940966010733e-05, + "loss": 0.2244, + "step": 767 + }, + { + "epoch": 1.3617021276595744, + "grad_norm": 0.3402283489704132, + "learning_rate": 9.660107334525938e-05, + "loss": 0.1037, + "step": 768 + }, + { + "epoch": 1.3634751773049645, + "grad_norm": 0.43715304136276245, + "learning_rate": 9.633273703041145e-05, + "loss": 0.1632, + "step": 769 + }, + { + "epoch": 1.3652482269503547, + "grad_norm": 0.30286452174186707, + "learning_rate": 9.60644007155635e-05, + "loss": 0.096, + "step": 770 + }, + { + "epoch": 1.3670212765957448, + "grad_norm": 0.23162096738815308, + "learning_rate": 9.579606440071554e-05, + "loss": 0.0945, + "step": 771 + }, + { + "epoch": 1.3687943262411348, + "grad_norm": 0.4055822789669037, + "learning_rate": 9.55277280858676e-05, + "loss": 0.1258, + "step": 772 + }, + { + "epoch": 1.3705673758865249, + "grad_norm": 0.37254348397254944, + "learning_rate": 9.525939177101968e-05, + "loss": 0.1608, + "step": 773 + }, + { + "epoch": 1.372340425531915, + "grad_norm": 0.2781223952770233, + "learning_rate": 9.499105545617173e-05, + "loss": 0.0792, + "step": 774 + }, + { + "epoch": 1.374113475177305, + "grad_norm": 0.5268644094467163, + "learning_rate": 9.47227191413238e-05, + "loss": 0.1891, + "step": 775 + }, + { + "epoch": 1.375886524822695, + "grad_norm": 0.27493152022361755, + "learning_rate": 9.445438282647584e-05, + "loss": 0.1144, + "step": 776 + }, + { + "epoch": 1.377659574468085, + "grad_norm": 0.30483171343803406, + "learning_rate": 9.41860465116279e-05, + "loss": 0.1059, + "step": 777 + }, + { + "epoch": 1.3794326241134751, + "grad_norm": 0.35939520597457886, + "learning_rate": 9.391771019677995e-05, + "loss": 0.1262, + "step": 778 + }, + { + "epoch": 1.3812056737588652, + "grad_norm": 0.37438303232192993, + "learning_rate": 9.364937388193202e-05, + "loss": 0.1527, + "step": 779 + }, + { + "epoch": 1.3829787234042552, + "grad_norm": 0.28630080819129944, + "learning_rate": 9.338103756708407e-05, + "loss": 0.1015, + "step": 780 + }, + { + "epoch": 1.3847517730496453, + "grad_norm": 0.2913484275341034, + "learning_rate": 9.311270125223611e-05, + "loss": 0.0807, + "step": 781 + }, + { + "epoch": 1.3865248226950355, + "grad_norm": 0.3168102502822876, + "learning_rate": 9.284436493738818e-05, + "loss": 0.1148, + "step": 782 + }, + { + "epoch": 1.3882978723404256, + "grad_norm": 0.38331836462020874, + "learning_rate": 9.257602862254025e-05, + "loss": 0.176, + "step": 783 + }, + { + "epoch": 1.3900709219858156, + "grad_norm": 0.3016497492790222, + "learning_rate": 9.23076923076923e-05, + "loss": 0.1292, + "step": 784 + }, + { + "epoch": 1.3918439716312057, + "grad_norm": 0.38113200664520264, + "learning_rate": 9.203935599284437e-05, + "loss": 0.1705, + "step": 785 + }, + { + "epoch": 1.3936170212765957, + "grad_norm": 0.3391113877296448, + "learning_rate": 9.177101967799641e-05, + "loss": 0.1164, + "step": 786 + }, + { + "epoch": 1.3953900709219857, + "grad_norm": 0.36019831895828247, + "learning_rate": 9.150268336314848e-05, + "loss": 0.1608, + "step": 787 + }, + { + "epoch": 1.397163120567376, + "grad_norm": 0.5200515389442444, + "learning_rate": 9.123434704830053e-05, + "loss": 0.1884, + "step": 788 + }, + { + "epoch": 1.398936170212766, + "grad_norm": 0.32156822085380554, + "learning_rate": 9.096601073345259e-05, + "loss": 0.1317, + "step": 789 + }, + { + "epoch": 1.400709219858156, + "grad_norm": 0.3899897634983063, + "learning_rate": 9.069767441860464e-05, + "loss": 0.1483, + "step": 790 + }, + { + "epoch": 1.4024822695035462, + "grad_norm": 0.2134401947259903, + "learning_rate": 9.042933810375669e-05, + "loss": 0.0794, + "step": 791 + }, + { + "epoch": 1.4042553191489362, + "grad_norm": 0.5371233224868774, + "learning_rate": 9.016100178890876e-05, + "loss": 0.1098, + "step": 792 + }, + { + "epoch": 1.4060283687943262, + "grad_norm": 0.23417764902114868, + "learning_rate": 8.989266547406083e-05, + "loss": 0.0846, + "step": 793 + }, + { + "epoch": 1.4078014184397163, + "grad_norm": 0.32313209772109985, + "learning_rate": 8.962432915921287e-05, + "loss": 0.1005, + "step": 794 + }, + { + "epoch": 1.4095744680851063, + "grad_norm": 0.3287942111492157, + "learning_rate": 8.935599284436494e-05, + "loss": 0.138, + "step": 795 + }, + { + "epoch": 1.4113475177304964, + "grad_norm": 0.4062165915966034, + "learning_rate": 8.908765652951699e-05, + "loss": 0.1219, + "step": 796 + }, + { + "epoch": 1.4131205673758864, + "grad_norm": 0.25136035680770874, + "learning_rate": 8.881932021466905e-05, + "loss": 0.0987, + "step": 797 + }, + { + "epoch": 1.4148936170212765, + "grad_norm": 0.3869902193546295, + "learning_rate": 8.85509838998211e-05, + "loss": 0.151, + "step": 798 + }, + { + "epoch": 1.4166666666666667, + "grad_norm": 0.22914816439151764, + "learning_rate": 8.828264758497316e-05, + "loss": 0.0755, + "step": 799 + }, + { + "epoch": 1.4184397163120568, + "grad_norm": 0.32536619901657104, + "learning_rate": 8.801431127012521e-05, + "loss": 0.1082, + "step": 800 + }, + { + "epoch": 1.4202127659574468, + "grad_norm": 0.19335246086120605, + "learning_rate": 8.774597495527727e-05, + "loss": 0.0669, + "step": 801 + }, + { + "epoch": 1.4219858156028369, + "grad_norm": 0.2427796870470047, + "learning_rate": 8.747763864042933e-05, + "loss": 0.0848, + "step": 802 + }, + { + "epoch": 1.423758865248227, + "grad_norm": 0.2558480203151703, + "learning_rate": 8.72093023255814e-05, + "loss": 0.0879, + "step": 803 + }, + { + "epoch": 1.425531914893617, + "grad_norm": 0.2617630064487457, + "learning_rate": 8.694096601073345e-05, + "loss": 0.0849, + "step": 804 + }, + { + "epoch": 1.427304964539007, + "grad_norm": 0.2902472913265228, + "learning_rate": 8.667262969588551e-05, + "loss": 0.091, + "step": 805 + }, + { + "epoch": 1.4290780141843973, + "grad_norm": 0.2947092652320862, + "learning_rate": 8.640429338103756e-05, + "loss": 0.1139, + "step": 806 + }, + { + "epoch": 1.4308510638297873, + "grad_norm": 0.23282337188720703, + "learning_rate": 8.613595706618962e-05, + "loss": 0.0963, + "step": 807 + }, + { + "epoch": 1.4326241134751774, + "grad_norm": 0.37111896276474, + "learning_rate": 8.586762075134167e-05, + "loss": 0.1556, + "step": 808 + }, + { + "epoch": 1.4343971631205674, + "grad_norm": 0.2589118778705597, + "learning_rate": 8.559928443649373e-05, + "loss": 0.0903, + "step": 809 + }, + { + "epoch": 1.4361702127659575, + "grad_norm": 0.20402726531028748, + "learning_rate": 8.533094812164578e-05, + "loss": 0.0566, + "step": 810 + }, + { + "epoch": 1.4379432624113475, + "grad_norm": 0.18823835253715515, + "learning_rate": 8.506261180679786e-05, + "loss": 0.0861, + "step": 811 + }, + { + "epoch": 1.4397163120567376, + "grad_norm": 0.27323687076568604, + "learning_rate": 8.47942754919499e-05, + "loss": 0.1117, + "step": 812 + }, + { + "epoch": 1.4414893617021276, + "grad_norm": 0.18384765088558197, + "learning_rate": 8.452593917710197e-05, + "loss": 0.0429, + "step": 813 + }, + { + "epoch": 1.4432624113475176, + "grad_norm": 0.202216237783432, + "learning_rate": 8.425760286225402e-05, + "loss": 0.0426, + "step": 814 + }, + { + "epoch": 1.4450354609929077, + "grad_norm": 0.4300391674041748, + "learning_rate": 8.398926654740608e-05, + "loss": 0.1519, + "step": 815 + }, + { + "epoch": 1.4468085106382977, + "grad_norm": 0.4760010242462158, + "learning_rate": 8.372093023255813e-05, + "loss": 0.1508, + "step": 816 + }, + { + "epoch": 1.448581560283688, + "grad_norm": 0.2631239593029022, + "learning_rate": 8.345259391771019e-05, + "loss": 0.0754, + "step": 817 + }, + { + "epoch": 1.450354609929078, + "grad_norm": 0.37113967537879944, + "learning_rate": 8.318425760286224e-05, + "loss": 0.128, + "step": 818 + }, + { + "epoch": 1.452127659574468, + "grad_norm": 0.21397681534290314, + "learning_rate": 8.29159212880143e-05, + "loss": 0.0823, + "step": 819 + }, + { + "epoch": 1.4539007092198581, + "grad_norm": 0.27529531717300415, + "learning_rate": 8.264758497316635e-05, + "loss": 0.0967, + "step": 820 + }, + { + "epoch": 1.4556737588652482, + "grad_norm": 0.5994647741317749, + "learning_rate": 8.237924865831843e-05, + "loss": 0.1117, + "step": 821 + }, + { + "epoch": 1.4574468085106382, + "grad_norm": 0.22065456211566925, + "learning_rate": 8.211091234347048e-05, + "loss": 0.0928, + "step": 822 + }, + { + "epoch": 1.4592198581560285, + "grad_norm": 0.6050121188163757, + "learning_rate": 8.184257602862254e-05, + "loss": 0.2078, + "step": 823 + }, + { + "epoch": 1.4609929078014185, + "grad_norm": 0.763676106929779, + "learning_rate": 8.157423971377459e-05, + "loss": 0.2562, + "step": 824 + }, + { + "epoch": 1.4627659574468086, + "grad_norm": 0.2788752019405365, + "learning_rate": 8.130590339892665e-05, + "loss": 0.1154, + "step": 825 + }, + { + "epoch": 1.4645390070921986, + "grad_norm": 0.4570877254009247, + "learning_rate": 8.10375670840787e-05, + "loss": 0.1463, + "step": 826 + }, + { + "epoch": 1.4663120567375887, + "grad_norm": 0.40701577067375183, + "learning_rate": 8.076923076923076e-05, + "loss": 0.1529, + "step": 827 + }, + { + "epoch": 1.4680851063829787, + "grad_norm": 0.2114609330892563, + "learning_rate": 8.050089445438281e-05, + "loss": 0.0828, + "step": 828 + }, + { + "epoch": 1.4698581560283688, + "grad_norm": 0.38288062810897827, + "learning_rate": 8.023255813953487e-05, + "loss": 0.1594, + "step": 829 + }, + { + "epoch": 1.4716312056737588, + "grad_norm": 0.26571860909461975, + "learning_rate": 7.996422182468694e-05, + "loss": 0.1094, + "step": 830 + }, + { + "epoch": 1.4734042553191489, + "grad_norm": 0.41206905245780945, + "learning_rate": 7.9695885509839e-05, + "loss": 0.1454, + "step": 831 + }, + { + "epoch": 1.475177304964539, + "grad_norm": 0.26968613266944885, + "learning_rate": 7.942754919499105e-05, + "loss": 0.1204, + "step": 832 + }, + { + "epoch": 1.476950354609929, + "grad_norm": 0.3330513834953308, + "learning_rate": 7.915921288014311e-05, + "loss": 0.1212, + "step": 833 + }, + { + "epoch": 1.4787234042553192, + "grad_norm": 0.3439812660217285, + "learning_rate": 7.889087656529516e-05, + "loss": 0.151, + "step": 834 + }, + { + "epoch": 1.4804964539007093, + "grad_norm": 0.34060341119766235, + "learning_rate": 7.862254025044722e-05, + "loss": 0.0984, + "step": 835 + }, + { + "epoch": 1.4822695035460993, + "grad_norm": 0.391315758228302, + "learning_rate": 7.835420393559927e-05, + "loss": 0.1381, + "step": 836 + }, + { + "epoch": 1.4840425531914894, + "grad_norm": 0.27594539523124695, + "learning_rate": 7.808586762075133e-05, + "loss": 0.0773, + "step": 837 + }, + { + "epoch": 1.4858156028368794, + "grad_norm": 0.2562049925327301, + "learning_rate": 7.781753130590338e-05, + "loss": 0.085, + "step": 838 + }, + { + "epoch": 1.4875886524822695, + "grad_norm": 0.5238338112831116, + "learning_rate": 7.754919499105545e-05, + "loss": 0.1607, + "step": 839 + }, + { + "epoch": 1.4893617021276595, + "grad_norm": 0.35886940360069275, + "learning_rate": 7.728085867620751e-05, + "loss": 0.1452, + "step": 840 + }, + { + "epoch": 1.4911347517730498, + "grad_norm": 0.2899888753890991, + "learning_rate": 7.701252236135957e-05, + "loss": 0.0896, + "step": 841 + }, + { + "epoch": 1.4929078014184398, + "grad_norm": 0.25350284576416016, + "learning_rate": 7.674418604651162e-05, + "loss": 0.1055, + "step": 842 + }, + { + "epoch": 1.4946808510638299, + "grad_norm": 0.2904045283794403, + "learning_rate": 7.647584973166368e-05, + "loss": 0.1178, + "step": 843 + }, + { + "epoch": 1.49645390070922, + "grad_norm": 0.39105409383773804, + "learning_rate": 7.620751341681573e-05, + "loss": 0.1772, + "step": 844 + }, + { + "epoch": 1.49822695035461, + "grad_norm": 0.20958948135375977, + "learning_rate": 7.59391771019678e-05, + "loss": 0.0848, + "step": 845 + }, + { + "epoch": 1.5, + "grad_norm": 0.2883208990097046, + "learning_rate": 7.567084078711984e-05, + "loss": 0.1166, + "step": 846 + }, + { + "epoch": 1.50177304964539, + "grad_norm": 0.2059636414051056, + "learning_rate": 7.54025044722719e-05, + "loss": 0.0782, + "step": 847 + }, + { + "epoch": 1.50354609929078, + "grad_norm": 0.30345362424850464, + "learning_rate": 7.513416815742396e-05, + "loss": 0.1018, + "step": 848 + }, + { + "epoch": 1.5053191489361701, + "grad_norm": 0.27595144510269165, + "learning_rate": 7.486583184257602e-05, + "loss": 0.116, + "step": 849 + }, + { + "epoch": 1.5070921985815602, + "grad_norm": 0.2541591227054596, + "learning_rate": 7.459749552772808e-05, + "loss": 0.1124, + "step": 850 + }, + { + "epoch": 1.5088652482269502, + "grad_norm": 0.2614005506038666, + "learning_rate": 7.432915921288014e-05, + "loss": 0.0797, + "step": 851 + }, + { + "epoch": 1.5106382978723403, + "grad_norm": 0.29117223620414734, + "learning_rate": 7.406082289803219e-05, + "loss": 0.1023, + "step": 852 + }, + { + "epoch": 1.5124113475177305, + "grad_norm": 0.18029992282390594, + "learning_rate": 7.379248658318425e-05, + "loss": 0.0695, + "step": 853 + }, + { + "epoch": 1.5141843971631206, + "grad_norm": 0.2022331953048706, + "learning_rate": 7.35241502683363e-05, + "loss": 0.0632, + "step": 854 + }, + { + "epoch": 1.5159574468085106, + "grad_norm": 0.1515171378850937, + "learning_rate": 7.325581395348837e-05, + "loss": 0.0447, + "step": 855 + }, + { + "epoch": 1.5177304964539007, + "grad_norm": 0.14520789682865143, + "learning_rate": 7.298747763864043e-05, + "loss": 0.0454, + "step": 856 + }, + { + "epoch": 1.5195035460992907, + "grad_norm": 0.18276812136173248, + "learning_rate": 7.271914132379248e-05, + "loss": 0.0651, + "step": 857 + }, + { + "epoch": 1.521276595744681, + "grad_norm": 0.2109200656414032, + "learning_rate": 7.245080500894454e-05, + "loss": 0.0658, + "step": 858 + }, + { + "epoch": 1.523049645390071, + "grad_norm": 0.3034365475177765, + "learning_rate": 7.218246869409659e-05, + "loss": 0.1011, + "step": 859 + }, + { + "epoch": 1.524822695035461, + "grad_norm": 0.3006940484046936, + "learning_rate": 7.191413237924865e-05, + "loss": 0.1229, + "step": 860 + }, + { + "epoch": 1.5265957446808511, + "grad_norm": 0.23068998754024506, + "learning_rate": 7.164579606440071e-05, + "loss": 0.0839, + "step": 861 + }, + { + "epoch": 1.5283687943262412, + "grad_norm": 0.15325228869915009, + "learning_rate": 7.137745974955276e-05, + "loss": 0.0579, + "step": 862 + }, + { + "epoch": 1.5301418439716312, + "grad_norm": 0.22034241259098053, + "learning_rate": 7.110912343470483e-05, + "loss": 0.0681, + "step": 863 + }, + { + "epoch": 1.5319148936170213, + "grad_norm": 0.16374541819095612, + "learning_rate": 7.084078711985688e-05, + "loss": 0.0401, + "step": 864 + }, + { + "epoch": 1.5336879432624113, + "grad_norm": 0.7110052108764648, + "learning_rate": 7.057245080500894e-05, + "loss": 0.2821, + "step": 865 + }, + { + "epoch": 1.5354609929078014, + "grad_norm": 0.5873008966445923, + "learning_rate": 7.0304114490161e-05, + "loss": 0.2055, + "step": 866 + }, + { + "epoch": 1.5372340425531914, + "grad_norm": 0.40463680028915405, + "learning_rate": 7.003577817531305e-05, + "loss": 0.1514, + "step": 867 + }, + { + "epoch": 1.5390070921985815, + "grad_norm": 0.411138653755188, + "learning_rate": 6.976744186046511e-05, + "loss": 0.1374, + "step": 868 + }, + { + "epoch": 1.5407801418439715, + "grad_norm": 0.3094438910484314, + "learning_rate": 6.949910554561716e-05, + "loss": 0.1114, + "step": 869 + }, + { + "epoch": 1.5425531914893615, + "grad_norm": 0.3126083016395569, + "learning_rate": 6.923076923076922e-05, + "loss": 0.104, + "step": 870 + }, + { + "epoch": 1.5443262411347518, + "grad_norm": 0.2306557297706604, + "learning_rate": 6.896243291592129e-05, + "loss": 0.0824, + "step": 871 + }, + { + "epoch": 1.5460992907801419, + "grad_norm": 1.1792900562286377, + "learning_rate": 6.869409660107334e-05, + "loss": 0.2877, + "step": 872 + }, + { + "epoch": 1.547872340425532, + "grad_norm": 0.25982725620269775, + "learning_rate": 6.84257602862254e-05, + "loss": 0.1083, + "step": 873 + }, + { + "epoch": 1.549645390070922, + "grad_norm": 0.33999934792518616, + "learning_rate": 6.815742397137745e-05, + "loss": 0.1439, + "step": 874 + }, + { + "epoch": 1.5514184397163122, + "grad_norm": 0.2777253985404968, + "learning_rate": 6.788908765652951e-05, + "loss": 0.1171, + "step": 875 + }, + { + "epoch": 1.5531914893617023, + "grad_norm": 0.29223597049713135, + "learning_rate": 6.762075134168157e-05, + "loss": 0.0913, + "step": 876 + }, + { + "epoch": 1.5549645390070923, + "grad_norm": 0.3456422984600067, + "learning_rate": 6.735241502683362e-05, + "loss": 0.1324, + "step": 877 + }, + { + "epoch": 1.5567375886524824, + "grad_norm": 0.27550065517425537, + "learning_rate": 6.708407871198568e-05, + "loss": 0.1061, + "step": 878 + }, + { + "epoch": 1.5585106382978724, + "grad_norm": 0.23130980134010315, + "learning_rate": 6.681574239713773e-05, + "loss": 0.0879, + "step": 879 + }, + { + "epoch": 1.5602836879432624, + "grad_norm": 0.2471001297235489, + "learning_rate": 6.65474060822898e-05, + "loss": 0.1028, + "step": 880 + }, + { + "epoch": 1.5620567375886525, + "grad_norm": 0.4121532440185547, + "learning_rate": 6.627906976744186e-05, + "loss": 0.1524, + "step": 881 + }, + { + "epoch": 1.5638297872340425, + "grad_norm": 0.34416720271110535, + "learning_rate": 6.601073345259391e-05, + "loss": 0.153, + "step": 882 + }, + { + "epoch": 1.5656028368794326, + "grad_norm": 0.37047117948532104, + "learning_rate": 6.574239713774597e-05, + "loss": 0.1382, + "step": 883 + }, + { + "epoch": 1.5673758865248226, + "grad_norm": 0.25409069657325745, + "learning_rate": 6.547406082289802e-05, + "loss": 0.0929, + "step": 884 + }, + { + "epoch": 1.5691489361702127, + "grad_norm": 0.16979749500751495, + "learning_rate": 6.520572450805008e-05, + "loss": 0.0661, + "step": 885 + }, + { + "epoch": 1.5709219858156027, + "grad_norm": 0.24894487857818604, + "learning_rate": 6.493738819320214e-05, + "loss": 0.0879, + "step": 886 + }, + { + "epoch": 1.5726950354609928, + "grad_norm": 0.23900751769542694, + "learning_rate": 6.466905187835419e-05, + "loss": 0.083, + "step": 887 + }, + { + "epoch": 1.574468085106383, + "grad_norm": 0.21001844108104706, + "learning_rate": 6.440071556350626e-05, + "loss": 0.0846, + "step": 888 + }, + { + "epoch": 1.576241134751773, + "grad_norm": 0.4241371750831604, + "learning_rate": 6.413237924865832e-05, + "loss": 0.1436, + "step": 889 + }, + { + "epoch": 1.5780141843971631, + "grad_norm": 0.36801424622535706, + "learning_rate": 6.386404293381037e-05, + "loss": 0.144, + "step": 890 + }, + { + "epoch": 1.5797872340425532, + "grad_norm": 0.3026575744152069, + "learning_rate": 6.359570661896243e-05, + "loss": 0.1313, + "step": 891 + }, + { + "epoch": 1.5815602836879432, + "grad_norm": 0.6115602254867554, + "learning_rate": 6.332737030411448e-05, + "loss": 0.2098, + "step": 892 + }, + { + "epoch": 1.5833333333333335, + "grad_norm": 0.21278461813926697, + "learning_rate": 6.305903398926654e-05, + "loss": 0.0877, + "step": 893 + }, + { + "epoch": 1.5851063829787235, + "grad_norm": 0.1702726036310196, + "learning_rate": 6.27906976744186e-05, + "loss": 0.0776, + "step": 894 + }, + { + "epoch": 1.5868794326241136, + "grad_norm": 0.2994159758090973, + "learning_rate": 6.252236135957065e-05, + "loss": 0.1155, + "step": 895 + }, + { + "epoch": 1.5886524822695036, + "grad_norm": 0.19650763273239136, + "learning_rate": 6.225402504472272e-05, + "loss": 0.0779, + "step": 896 + }, + { + "epoch": 1.5904255319148937, + "grad_norm": 0.2170541137456894, + "learning_rate": 6.198568872987476e-05, + "loss": 0.0598, + "step": 897 + }, + { + "epoch": 1.5921985815602837, + "grad_norm": 0.19491881132125854, + "learning_rate": 6.171735241502683e-05, + "loss": 0.0681, + "step": 898 + }, + { + "epoch": 1.5939716312056738, + "grad_norm": 0.16843058168888092, + "learning_rate": 6.144901610017889e-05, + "loss": 0.0564, + "step": 899 + }, + { + "epoch": 1.5957446808510638, + "grad_norm": 0.2948700487613678, + "learning_rate": 6.118067978533094e-05, + "loss": 0.0942, + "step": 900 + }, + { + "epoch": 1.5975177304964538, + "grad_norm": 0.20082975924015045, + "learning_rate": 6.0912343470483e-05, + "loss": 0.0827, + "step": 901 + }, + { + "epoch": 1.599290780141844, + "grad_norm": 0.11655837297439575, + "learning_rate": 6.064400715563506e-05, + "loss": 0.0436, + "step": 902 + }, + { + "epoch": 1.601063829787234, + "grad_norm": 0.3066547214984894, + "learning_rate": 6.037567084078712e-05, + "loss": 0.1092, + "step": 903 + }, + { + "epoch": 1.602836879432624, + "grad_norm": 0.14038467407226562, + "learning_rate": 6.0107334525939175e-05, + "loss": 0.0393, + "step": 904 + }, + { + "epoch": 1.602836879432624, + "eval_loss": 0.05965956300497055, + "eval_runtime": 47.1387, + "eval_samples_per_second": 2.673, + "eval_steps_per_second": 2.673, + "step": 904 + }, + { + "epoch": 1.604609929078014, + "grad_norm": 0.20005856454372406, + "learning_rate": 5.983899821109123e-05, + "loss": 0.0717, + "step": 905 + }, + { + "epoch": 1.6063829787234043, + "grad_norm": 0.16580653190612793, + "learning_rate": 5.957066189624329e-05, + "loss": 0.0669, + "step": 906 + }, + { + "epoch": 1.6081560283687943, + "grad_norm": 0.2209959626197815, + "learning_rate": 5.930232558139534e-05, + "loss": 0.0829, + "step": 907 + }, + { + "epoch": 1.6099290780141844, + "grad_norm": 0.2260005921125412, + "learning_rate": 5.9033989266547405e-05, + "loss": 0.0732, + "step": 908 + }, + { + "epoch": 1.6117021276595744, + "grad_norm": 0.5214055180549622, + "learning_rate": 5.876565295169946e-05, + "loss": 0.1302, + "step": 909 + }, + { + "epoch": 1.6134751773049647, + "grad_norm": 0.20019935071468353, + "learning_rate": 5.849731663685152e-05, + "loss": 0.0535, + "step": 910 + }, + { + "epoch": 1.6152482269503547, + "grad_norm": 0.24594898521900177, + "learning_rate": 5.822898032200357e-05, + "loss": 0.0799, + "step": 911 + }, + { + "epoch": 1.6170212765957448, + "grad_norm": 0.19057996571063995, + "learning_rate": 5.796064400715563e-05, + "loss": 0.0338, + "step": 912 + }, + { + "epoch": 1.6187943262411348, + "grad_norm": 0.16017338633537292, + "learning_rate": 5.769230769230769e-05, + "loss": 0.0698, + "step": 913 + }, + { + "epoch": 1.6205673758865249, + "grad_norm": 0.16351930797100067, + "learning_rate": 5.742397137745975e-05, + "loss": 0.033, + "step": 914 + }, + { + "epoch": 1.622340425531915, + "grad_norm": 0.4897412657737732, + "learning_rate": 5.71556350626118e-05, + "loss": 0.2265, + "step": 915 + }, + { + "epoch": 1.624113475177305, + "grad_norm": 0.546875, + "learning_rate": 5.688729874776386e-05, + "loss": 0.2423, + "step": 916 + }, + { + "epoch": 1.625886524822695, + "grad_norm": 0.3986557722091675, + "learning_rate": 5.6618962432915914e-05, + "loss": 0.1331, + "step": 917 + }, + { + "epoch": 1.627659574468085, + "grad_norm": 0.5060171484947205, + "learning_rate": 5.635062611806798e-05, + "loss": 0.1931, + "step": 918 + }, + { + "epoch": 1.6294326241134751, + "grad_norm": 0.3128643035888672, + "learning_rate": 5.608228980322003e-05, + "loss": 0.1167, + "step": 919 + }, + { + "epoch": 1.6312056737588652, + "grad_norm": 0.42538610100746155, + "learning_rate": 5.581395348837209e-05, + "loss": 0.1636, + "step": 920 + }, + { + "epoch": 1.6329787234042552, + "grad_norm": 0.28647974133491516, + "learning_rate": 5.5545617173524144e-05, + "loss": 0.0699, + "step": 921 + }, + { + "epoch": 1.6347517730496453, + "grad_norm": 0.39569562673568726, + "learning_rate": 5.527728085867621e-05, + "loss": 0.2067, + "step": 922 + }, + { + "epoch": 1.6365248226950353, + "grad_norm": 0.2089187651872635, + "learning_rate": 5.500894454382826e-05, + "loss": 0.0992, + "step": 923 + }, + { + "epoch": 1.6382978723404256, + "grad_norm": 0.24980148673057556, + "learning_rate": 5.474060822898032e-05, + "loss": 0.0856, + "step": 924 + }, + { + "epoch": 1.6400709219858156, + "grad_norm": 0.2893138825893402, + "learning_rate": 5.4472271914132374e-05, + "loss": 0.124, + "step": 925 + }, + { + "epoch": 1.6418439716312057, + "grad_norm": 0.3664391338825226, + "learning_rate": 5.420393559928443e-05, + "loss": 0.1636, + "step": 926 + }, + { + "epoch": 1.6436170212765957, + "grad_norm": 0.31083714962005615, + "learning_rate": 5.393559928443649e-05, + "loss": 0.1389, + "step": 927 + }, + { + "epoch": 1.645390070921986, + "grad_norm": 0.335844486951828, + "learning_rate": 5.366726296958855e-05, + "loss": 0.1405, + "step": 928 + }, + { + "epoch": 1.647163120567376, + "grad_norm": 0.6708166599273682, + "learning_rate": 5.3398926654740604e-05, + "loss": 0.1981, + "step": 929 + }, + { + "epoch": 1.648936170212766, + "grad_norm": 0.3606095612049103, + "learning_rate": 5.313059033989266e-05, + "loss": 0.1331, + "step": 930 + }, + { + "epoch": 1.650709219858156, + "grad_norm": 0.265484094619751, + "learning_rate": 5.2862254025044716e-05, + "loss": 0.1015, + "step": 931 + }, + { + "epoch": 1.6524822695035462, + "grad_norm": 0.2551873028278351, + "learning_rate": 5.259391771019678e-05, + "loss": 0.113, + "step": 932 + }, + { + "epoch": 1.6542553191489362, + "grad_norm": 0.5668807625770569, + "learning_rate": 5.2325581395348834e-05, + "loss": 0.1927, + "step": 933 + }, + { + "epoch": 1.6560283687943262, + "grad_norm": 0.39409738779067993, + "learning_rate": 5.205724508050089e-05, + "loss": 0.1265, + "step": 934 + }, + { + "epoch": 1.6578014184397163, + "grad_norm": 0.3022906184196472, + "learning_rate": 5.1788908765652946e-05, + "loss": 0.1071, + "step": 935 + }, + { + "epoch": 1.6595744680851063, + "grad_norm": 0.24279522895812988, + "learning_rate": 5.1520572450805e-05, + "loss": 0.0784, + "step": 936 + }, + { + "epoch": 1.6613475177304964, + "grad_norm": 0.39648398756980896, + "learning_rate": 5.1252236135957064e-05, + "loss": 0.1585, + "step": 937 + }, + { + "epoch": 1.6631205673758864, + "grad_norm": 0.25539273023605347, + "learning_rate": 5.098389982110912e-05, + "loss": 0.0872, + "step": 938 + }, + { + "epoch": 1.6648936170212765, + "grad_norm": 0.253682404756546, + "learning_rate": 5.0715563506261176e-05, + "loss": 0.0817, + "step": 939 + }, + { + "epoch": 1.6666666666666665, + "grad_norm": 0.19512628018856049, + "learning_rate": 5.044722719141323e-05, + "loss": 0.0768, + "step": 940 + }, + { + "epoch": 1.6684397163120568, + "grad_norm": 0.25534772872924805, + "learning_rate": 5.0178890876565294e-05, + "loss": 0.0944, + "step": 941 + }, + { + "epoch": 1.6702127659574468, + "grad_norm": 0.23525482416152954, + "learning_rate": 4.991055456171735e-05, + "loss": 0.073, + "step": 942 + }, + { + "epoch": 1.6719858156028369, + "grad_norm": 0.2775448262691498, + "learning_rate": 4.9642218246869406e-05, + "loss": 0.1318, + "step": 943 + }, + { + "epoch": 1.673758865248227, + "grad_norm": 0.29373791813850403, + "learning_rate": 4.937388193202146e-05, + "loss": 0.1047, + "step": 944 + }, + { + "epoch": 1.675531914893617, + "grad_norm": 0.22552639245986938, + "learning_rate": 4.910554561717352e-05, + "loss": 0.073, + "step": 945 + }, + { + "epoch": 1.6773049645390072, + "grad_norm": 0.2918212413787842, + "learning_rate": 4.883720930232558e-05, + "loss": 0.1008, + "step": 946 + }, + { + "epoch": 1.6790780141843973, + "grad_norm": 0.3461683690547943, + "learning_rate": 4.8568872987477636e-05, + "loss": 0.114, + "step": 947 + }, + { + "epoch": 1.6808510638297873, + "grad_norm": 0.18103665113449097, + "learning_rate": 4.830053667262969e-05, + "loss": 0.0614, + "step": 948 + }, + { + "epoch": 1.6826241134751774, + "grad_norm": 0.28270408511161804, + "learning_rate": 4.803220035778175e-05, + "loss": 0.0943, + "step": 949 + }, + { + "epoch": 1.6843971631205674, + "grad_norm": 0.2964995503425598, + "learning_rate": 4.77638640429338e-05, + "loss": 0.1005, + "step": 950 + }, + { + "epoch": 1.6861702127659575, + "grad_norm": 0.19928471744060516, + "learning_rate": 4.7495527728085866e-05, + "loss": 0.0776, + "step": 951 + }, + { + "epoch": 1.6879432624113475, + "grad_norm": 0.21275044977664948, + "learning_rate": 4.722719141323792e-05, + "loss": 0.0763, + "step": 952 + }, + { + "epoch": 1.6897163120567376, + "grad_norm": 0.13716483116149902, + "learning_rate": 4.695885509838998e-05, + "loss": 0.042, + "step": 953 + }, + { + "epoch": 1.6914893617021276, + "grad_norm": 0.27446210384368896, + "learning_rate": 4.669051878354203e-05, + "loss": 0.1125, + "step": 954 + }, + { + "epoch": 1.6932624113475176, + "grad_norm": 0.17361652851104736, + "learning_rate": 4.642218246869409e-05, + "loss": 0.0656, + "step": 955 + }, + { + "epoch": 1.6950354609929077, + "grad_norm": 0.22053229808807373, + "learning_rate": 4.615384615384615e-05, + "loss": 0.085, + "step": 956 + }, + { + "epoch": 1.6968085106382977, + "grad_norm": 0.2688538730144501, + "learning_rate": 4.588550983899821e-05, + "loss": 0.1084, + "step": 957 + }, + { + "epoch": 1.6985815602836878, + "grad_norm": 0.17203108966350555, + "learning_rate": 4.561717352415026e-05, + "loss": 0.0711, + "step": 958 + }, + { + "epoch": 1.700354609929078, + "grad_norm": 0.2185698002576828, + "learning_rate": 4.534883720930232e-05, + "loss": 0.0906, + "step": 959 + }, + { + "epoch": 1.702127659574468, + "grad_norm": 0.3515457808971405, + "learning_rate": 4.508050089445438e-05, + "loss": 0.1529, + "step": 960 + }, + { + "epoch": 1.7039007092198581, + "grad_norm": 0.1280314177274704, + "learning_rate": 4.481216457960644e-05, + "loss": 0.0425, + "step": 961 + }, + { + "epoch": 1.7056737588652482, + "grad_norm": 0.16565589606761932, + "learning_rate": 4.454382826475849e-05, + "loss": 0.0406, + "step": 962 + }, + { + "epoch": 1.7074468085106385, + "grad_norm": 0.17467603087425232, + "learning_rate": 4.427549194991055e-05, + "loss": 0.0369, + "step": 963 + }, + { + "epoch": 1.7092198581560285, + "grad_norm": 0.17544378340244293, + "learning_rate": 4.4007155635062605e-05, + "loss": 0.0434, + "step": 964 + }, + { + "epoch": 1.7109929078014185, + "grad_norm": 0.42301613092422485, + "learning_rate": 4.373881932021467e-05, + "loss": 0.1475, + "step": 965 + }, + { + "epoch": 1.7127659574468086, + "grad_norm": 0.3776220977306366, + "learning_rate": 4.347048300536672e-05, + "loss": 0.1585, + "step": 966 + }, + { + "epoch": 1.7145390070921986, + "grad_norm": 0.35067784786224365, + "learning_rate": 4.320214669051878e-05, + "loss": 0.1329, + "step": 967 + }, + { + "epoch": 1.7163120567375887, + "grad_norm": 0.2998995780944824, + "learning_rate": 4.2933810375670835e-05, + "loss": 0.0946, + "step": 968 + }, + { + "epoch": 1.7180851063829787, + "grad_norm": 0.31156042218208313, + "learning_rate": 4.266547406082289e-05, + "loss": 0.1103, + "step": 969 + }, + { + "epoch": 1.7198581560283688, + "grad_norm": 0.4217228889465332, + "learning_rate": 4.239713774597495e-05, + "loss": 0.1722, + "step": 970 + }, + { + "epoch": 1.7216312056737588, + "grad_norm": 0.4580366015434265, + "learning_rate": 4.212880143112701e-05, + "loss": 0.1871, + "step": 971 + }, + { + "epoch": 1.7234042553191489, + "grad_norm": 0.3506891429424286, + "learning_rate": 4.1860465116279065e-05, + "loss": 0.134, + "step": 972 + }, + { + "epoch": 1.725177304964539, + "grad_norm": 0.5243451595306396, + "learning_rate": 4.159212880143112e-05, + "loss": 0.147, + "step": 973 + }, + { + "epoch": 1.726950354609929, + "grad_norm": 0.3330957889556885, + "learning_rate": 4.1323792486583176e-05, + "loss": 0.1151, + "step": 974 + }, + { + "epoch": 1.728723404255319, + "grad_norm": 0.2872239053249359, + "learning_rate": 4.105545617173524e-05, + "loss": 0.1414, + "step": 975 + }, + { + "epoch": 1.7304964539007093, + "grad_norm": 0.31004229187965393, + "learning_rate": 4.0787119856887295e-05, + "loss": 0.1331, + "step": 976 + }, + { + "epoch": 1.7322695035460993, + "grad_norm": 0.3493654131889343, + "learning_rate": 4.051878354203935e-05, + "loss": 0.1438, + "step": 977 + }, + { + "epoch": 1.7340425531914894, + "grad_norm": 0.4803168773651123, + "learning_rate": 4.0250447227191406e-05, + "loss": 0.1709, + "step": 978 + }, + { + "epoch": 1.7358156028368794, + "grad_norm": 0.2807621657848358, + "learning_rate": 3.998211091234347e-05, + "loss": 0.112, + "step": 979 + }, + { + "epoch": 1.7375886524822695, + "grad_norm": 0.4310925006866455, + "learning_rate": 3.9713774597495524e-05, + "loss": 0.1529, + "step": 980 + }, + { + "epoch": 1.7393617021276597, + "grad_norm": 0.5579214692115784, + "learning_rate": 3.944543828264758e-05, + "loss": 0.2199, + "step": 981 + }, + { + "epoch": 1.7411347517730498, + "grad_norm": 0.24788203835487366, + "learning_rate": 3.9177101967799636e-05, + "loss": 0.0795, + "step": 982 + }, + { + "epoch": 1.7429078014184398, + "grad_norm": 0.35569068789482117, + "learning_rate": 3.890876565295169e-05, + "loss": 0.1494, + "step": 983 + }, + { + "epoch": 1.7446808510638299, + "grad_norm": 0.26108360290527344, + "learning_rate": 3.8640429338103754e-05, + "loss": 0.1172, + "step": 984 + }, + { + "epoch": 1.74645390070922, + "grad_norm": 0.2628544569015503, + "learning_rate": 3.837209302325581e-05, + "loss": 0.1146, + "step": 985 + }, + { + "epoch": 1.74822695035461, + "grad_norm": 0.267870157957077, + "learning_rate": 3.8103756708407866e-05, + "loss": 0.1053, + "step": 986 + }, + { + "epoch": 1.75, + "grad_norm": 0.3323216140270233, + "learning_rate": 3.783542039355992e-05, + "loss": 0.1278, + "step": 987 + }, + { + "epoch": 1.75177304964539, + "grad_norm": 0.36032265424728394, + "learning_rate": 3.756708407871198e-05, + "loss": 0.1392, + "step": 988 + }, + { + "epoch": 1.75354609929078, + "grad_norm": 0.23055288195610046, + "learning_rate": 3.729874776386404e-05, + "loss": 0.0875, + "step": 989 + }, + { + "epoch": 1.7553191489361701, + "grad_norm": 0.19896911084651947, + "learning_rate": 3.7030411449016096e-05, + "loss": 0.0744, + "step": 990 + }, + { + "epoch": 1.7570921985815602, + "grad_norm": 0.2982289791107178, + "learning_rate": 3.676207513416815e-05, + "loss": 0.1312, + "step": 991 + }, + { + "epoch": 1.7588652482269502, + "grad_norm": 0.18045595288276672, + "learning_rate": 3.6493738819320214e-05, + "loss": 0.0621, + "step": 992 + }, + { + "epoch": 1.7606382978723403, + "grad_norm": 0.3549979627132416, + "learning_rate": 3.622540250447227e-05, + "loss": 0.147, + "step": 993 + }, + { + "epoch": 1.7624113475177305, + "grad_norm": 0.2849663496017456, + "learning_rate": 3.5957066189624326e-05, + "loss": 0.0958, + "step": 994 + }, + { + "epoch": 1.7641843971631206, + "grad_norm": 0.19186846911907196, + "learning_rate": 3.568872987477638e-05, + "loss": 0.0717, + "step": 995 + }, + { + "epoch": 1.7659574468085106, + "grad_norm": 0.19222399592399597, + "learning_rate": 3.542039355992844e-05, + "loss": 0.0448, + "step": 996 + }, + { + "epoch": 1.7677304964539007, + "grad_norm": 0.36738601326942444, + "learning_rate": 3.51520572450805e-05, + "loss": 0.1569, + "step": 997 + }, + { + "epoch": 1.7695035460992907, + "grad_norm": 0.3227214813232422, + "learning_rate": 3.4883720930232556e-05, + "loss": 0.1288, + "step": 998 + }, + { + "epoch": 1.771276595744681, + "grad_norm": 0.2572925090789795, + "learning_rate": 3.461538461538461e-05, + "loss": 0.0916, + "step": 999 + }, + { + "epoch": 1.773049645390071, + "grad_norm": 0.23549784719944, + "learning_rate": 3.434704830053667e-05, + "loss": 0.0891, + "step": 1000 + }, + { + "epoch": 1.774822695035461, + "grad_norm": 0.37821611762046814, + "learning_rate": 3.4078711985688723e-05, + "loss": 0.1633, + "step": 1001 + }, + { + "epoch": 1.7765957446808511, + "grad_norm": 0.24105395376682281, + "learning_rate": 3.3810375670840786e-05, + "loss": 0.104, + "step": 1002 + }, + { + "epoch": 1.7783687943262412, + "grad_norm": 0.21173512935638428, + "learning_rate": 3.354203935599284e-05, + "loss": 0.0903, + "step": 1003 + }, + { + "epoch": 1.7801418439716312, + "grad_norm": 0.4547687768936157, + "learning_rate": 3.32737030411449e-05, + "loss": 0.1414, + "step": 1004 + }, + { + "epoch": 1.7819148936170213, + "grad_norm": 0.2635003328323364, + "learning_rate": 3.3005366726296953e-05, + "loss": 0.115, + "step": 1005 + }, + { + "epoch": 1.7836879432624113, + "grad_norm": 0.20460587739944458, + "learning_rate": 3.273703041144901e-05, + "loss": 0.066, + "step": 1006 + }, + { + "epoch": 1.7854609929078014, + "grad_norm": 0.2040269821882248, + "learning_rate": 3.246869409660107e-05, + "loss": 0.0607, + "step": 1007 + }, + { + "epoch": 1.7872340425531914, + "grad_norm": 0.29658254981040955, + "learning_rate": 3.220035778175313e-05, + "loss": 0.1239, + "step": 1008 + }, + { + "epoch": 1.7890070921985815, + "grad_norm": 0.1401108056306839, + "learning_rate": 3.1932021466905183e-05, + "loss": 0.0552, + "step": 1009 + }, + { + "epoch": 1.7907801418439715, + "grad_norm": 0.433570921421051, + "learning_rate": 3.166368515205724e-05, + "loss": 0.1549, + "step": 1010 + }, + { + "epoch": 1.7925531914893615, + "grad_norm": 0.24940761923789978, + "learning_rate": 3.13953488372093e-05, + "loss": 0.0958, + "step": 1011 + }, + { + "epoch": 1.7943262411347518, + "grad_norm": 0.18111471831798553, + "learning_rate": 3.112701252236136e-05, + "loss": 0.0498, + "step": 1012 + }, + { + "epoch": 1.7960992907801419, + "grad_norm": 0.15693312883377075, + "learning_rate": 3.085867620751341e-05, + "loss": 0.0358, + "step": 1013 + }, + { + "epoch": 1.797872340425532, + "grad_norm": 0.13912533223628998, + "learning_rate": 3.059033989266547e-05, + "loss": 0.0433, + "step": 1014 + }, + { + "epoch": 1.799645390070922, + "grad_norm": 0.39377179741859436, + "learning_rate": 3.032200357781753e-05, + "loss": 0.1744, + "step": 1015 + }, + { + "epoch": 1.8014184397163122, + "grad_norm": 0.558695912361145, + "learning_rate": 3.0053667262969588e-05, + "loss": 0.1799, + "step": 1016 + }, + { + "epoch": 1.8031914893617023, + "grad_norm": 0.49997296929359436, + "learning_rate": 2.9785330948121643e-05, + "loss": 0.1481, + "step": 1017 + }, + { + "epoch": 1.8049645390070923, + "grad_norm": 0.23218946158885956, + "learning_rate": 2.9516994633273703e-05, + "loss": 0.0783, + "step": 1018 + }, + { + "epoch": 1.8067375886524824, + "grad_norm": 0.3141940236091614, + "learning_rate": 2.924865831842576e-05, + "loss": 0.0942, + "step": 1019 + }, + { + "epoch": 1.8085106382978724, + "grad_norm": 0.4221852421760559, + "learning_rate": 2.8980322003577814e-05, + "loss": 0.1216, + "step": 1020 + }, + { + "epoch": 1.8102836879432624, + "grad_norm": 0.21606023609638214, + "learning_rate": 2.8711985688729873e-05, + "loss": 0.0776, + "step": 1021 + }, + { + "epoch": 1.8120567375886525, + "grad_norm": 0.3150366246700287, + "learning_rate": 2.844364937388193e-05, + "loss": 0.1131, + "step": 1022 + }, + { + "epoch": 1.8138297872340425, + "grad_norm": 0.5450868010520935, + "learning_rate": 2.817531305903399e-05, + "loss": 0.1853, + "step": 1023 + }, + { + "epoch": 1.8156028368794326, + "grad_norm": 0.21700796484947205, + "learning_rate": 2.7906976744186044e-05, + "loss": 0.0988, + "step": 1024 + }, + { + "epoch": 1.8173758865248226, + "grad_norm": 0.23696239292621613, + "learning_rate": 2.7638640429338103e-05, + "loss": 0.1032, + "step": 1025 + }, + { + "epoch": 1.8191489361702127, + "grad_norm": 0.4778185486793518, + "learning_rate": 2.737030411449016e-05, + "loss": 0.1505, + "step": 1026 + }, + { + "epoch": 1.8209219858156027, + "grad_norm": 0.5296378135681152, + "learning_rate": 2.7101967799642215e-05, + "loss": 0.1929, + "step": 1027 + }, + { + "epoch": 1.8226950354609928, + "grad_norm": 0.4214850962162018, + "learning_rate": 2.6833631484794274e-05, + "loss": 0.1353, + "step": 1028 + }, + { + "epoch": 1.824468085106383, + "grad_norm": 0.41558897495269775, + "learning_rate": 2.656529516994633e-05, + "loss": 0.1337, + "step": 1029 + }, + { + "epoch": 1.826241134751773, + "grad_norm": 0.18530499935150146, + "learning_rate": 2.629695885509839e-05, + "loss": 0.0798, + "step": 1030 + }, + { + "epoch": 1.8280141843971631, + "grad_norm": 0.26249393820762634, + "learning_rate": 2.6028622540250445e-05, + "loss": 0.1154, + "step": 1031 + }, + { + "epoch": 1.8297872340425532, + "grad_norm": 0.252618670463562, + "learning_rate": 2.57602862254025e-05, + "loss": 0.0912, + "step": 1032 + }, + { + "epoch": 1.8315602836879432, + "grad_norm": 0.24905292689800262, + "learning_rate": 2.549194991055456e-05, + "loss": 0.0882, + "step": 1033 + }, + { + "epoch": 1.8333333333333335, + "grad_norm": 0.37883955240249634, + "learning_rate": 2.5223613595706616e-05, + "loss": 0.1248, + "step": 1034 + }, + { + "epoch": 1.8351063829787235, + "grad_norm": 0.31058332324028015, + "learning_rate": 2.4955277280858675e-05, + "loss": 0.1237, + "step": 1035 + }, + { + "epoch": 1.8368794326241136, + "grad_norm": 0.2710226774215698, + "learning_rate": 2.468694096601073e-05, + "loss": 0.1072, + "step": 1036 + }, + { + "epoch": 1.8386524822695036, + "grad_norm": 0.31133633852005005, + "learning_rate": 2.441860465116279e-05, + "loss": 0.1199, + "step": 1037 + }, + { + "epoch": 1.8404255319148937, + "grad_norm": 0.19131186604499817, + "learning_rate": 2.4150268336314846e-05, + "loss": 0.0885, + "step": 1038 + }, + { + "epoch": 1.8421985815602837, + "grad_norm": 0.34097999334335327, + "learning_rate": 2.38819320214669e-05, + "loss": 0.1198, + "step": 1039 + }, + { + "epoch": 1.8439716312056738, + "grad_norm": 0.1662115752696991, + "learning_rate": 2.361359570661896e-05, + "loss": 0.0724, + "step": 1040 + }, + { + "epoch": 1.8457446808510638, + "grad_norm": 0.16127242147922516, + "learning_rate": 2.3345259391771016e-05, + "loss": 0.0647, + "step": 1041 + }, + { + "epoch": 1.8475177304964538, + "grad_norm": 0.18112505972385406, + "learning_rate": 2.3076923076923076e-05, + "loss": 0.0651, + "step": 1042 + }, + { + "epoch": 1.849290780141844, + "grad_norm": 0.1682027280330658, + "learning_rate": 2.280858676207513e-05, + "loss": 0.0542, + "step": 1043 + }, + { + "epoch": 1.851063829787234, + "grad_norm": 0.3057568669319153, + "learning_rate": 2.254025044722719e-05, + "loss": 0.1143, + "step": 1044 + }, + { + "epoch": 1.852836879432624, + "grad_norm": 0.29910141229629517, + "learning_rate": 2.2271914132379246e-05, + "loss": 0.1168, + "step": 1045 + }, + { + "epoch": 1.854609929078014, + "grad_norm": 0.23932576179504395, + "learning_rate": 2.2003577817531302e-05, + "loss": 0.0988, + "step": 1046 + }, + { + "epoch": 1.8563829787234043, + "grad_norm": 0.25302958488464355, + "learning_rate": 2.173524150268336e-05, + "loss": 0.0843, + "step": 1047 + }, + { + "epoch": 1.8581560283687943, + "grad_norm": 0.30847910046577454, + "learning_rate": 2.1466905187835417e-05, + "loss": 0.1117, + "step": 1048 + }, + { + "epoch": 1.8599290780141844, + "grad_norm": 0.22233125567436218, + "learning_rate": 2.1198568872987476e-05, + "loss": 0.0926, + "step": 1049 + }, + { + "epoch": 1.8617021276595744, + "grad_norm": 0.4215439260005951, + "learning_rate": 2.0930232558139532e-05, + "loss": 0.1222, + "step": 1050 + }, + { + "epoch": 1.8634751773049647, + "grad_norm": 0.25102686882019043, + "learning_rate": 2.0661896243291588e-05, + "loss": 0.076, + "step": 1051 + }, + { + "epoch": 1.8652482269503547, + "grad_norm": 0.20676009356975555, + "learning_rate": 2.0393559928443647e-05, + "loss": 0.0927, + "step": 1052 + }, + { + "epoch": 1.8670212765957448, + "grad_norm": 0.16590143740177155, + "learning_rate": 2.0125223613595703e-05, + "loss": 0.0437, + "step": 1053 + }, + { + "epoch": 1.8687943262411348, + "grad_norm": 0.30797678232192993, + "learning_rate": 1.9856887298747762e-05, + "loss": 0.1162, + "step": 1054 + }, + { + "epoch": 1.8705673758865249, + "grad_norm": 0.24597567319869995, + "learning_rate": 1.9588550983899818e-05, + "loss": 0.0736, + "step": 1055 + }, + { + "epoch": 1.872340425531915, + "grad_norm": 0.21166519820690155, + "learning_rate": 1.9320214669051877e-05, + "loss": 0.0729, + "step": 1056 + }, + { + "epoch": 1.874113475177305, + "grad_norm": 0.18480858206748962, + "learning_rate": 1.9051878354203933e-05, + "loss": 0.0612, + "step": 1057 + }, + { + "epoch": 1.875886524822695, + "grad_norm": 0.2520684599876404, + "learning_rate": 1.878354203935599e-05, + "loss": 0.0676, + "step": 1058 + }, + { + "epoch": 1.877659574468085, + "grad_norm": 0.22201155126094818, + "learning_rate": 1.8515205724508048e-05, + "loss": 0.0732, + "step": 1059 + }, + { + "epoch": 1.8794326241134751, + "grad_norm": 0.24273940920829773, + "learning_rate": 1.8246869409660107e-05, + "loss": 0.075, + "step": 1060 + }, + { + "epoch": 1.8812056737588652, + "grad_norm": 0.33823683857917786, + "learning_rate": 1.7978533094812163e-05, + "loss": 0.1274, + "step": 1061 + }, + { + "epoch": 1.8829787234042552, + "grad_norm": 0.24322524666786194, + "learning_rate": 1.771019677996422e-05, + "loss": 0.062, + "step": 1062 + }, + { + "epoch": 1.8847517730496453, + "grad_norm": 0.2402666211128235, + "learning_rate": 1.7441860465116278e-05, + "loss": 0.0688, + "step": 1063 + }, + { + "epoch": 1.8865248226950353, + "grad_norm": 0.156381294131279, + "learning_rate": 1.7173524150268334e-05, + "loss": 0.0499, + "step": 1064 + }, + { + "epoch": 1.8882978723404256, + "grad_norm": 0.5312046408653259, + "learning_rate": 1.6905187835420393e-05, + "loss": 0.1921, + "step": 1065 + }, + { + "epoch": 1.8900709219858156, + "grad_norm": 0.33548104763031006, + "learning_rate": 1.663685152057245e-05, + "loss": 0.1373, + "step": 1066 + }, + { + "epoch": 1.8918439716312057, + "grad_norm": 0.5181596875190735, + "learning_rate": 1.6368515205724505e-05, + "loss": 0.092, + "step": 1067 + }, + { + "epoch": 1.8936170212765957, + "grad_norm": 0.38026565313339233, + "learning_rate": 1.6100178890876564e-05, + "loss": 0.1344, + "step": 1068 + }, + { + "epoch": 1.895390070921986, + "grad_norm": 0.4322291612625122, + "learning_rate": 1.583184257602862e-05, + "loss": 0.1421, + "step": 1069 + }, + { + "epoch": 1.897163120567376, + "grad_norm": 0.24601012468338013, + "learning_rate": 1.556350626118068e-05, + "loss": 0.0664, + "step": 1070 + }, + { + "epoch": 1.898936170212766, + "grad_norm": 0.2838169038295746, + "learning_rate": 1.5295169946332735e-05, + "loss": 0.0987, + "step": 1071 + }, + { + "epoch": 1.900709219858156, + "grad_norm": 0.2708532512187958, + "learning_rate": 1.5026833631484794e-05, + "loss": 0.116, + "step": 1072 + }, + { + "epoch": 1.9024822695035462, + "grad_norm": 0.48430347442626953, + "learning_rate": 1.4758497316636851e-05, + "loss": 0.1737, + "step": 1073 + }, + { + "epoch": 1.9042553191489362, + "grad_norm": 0.2883581817150116, + "learning_rate": 1.4490161001788907e-05, + "loss": 0.1058, + "step": 1074 + }, + { + "epoch": 1.9060283687943262, + "grad_norm": 0.38699519634246826, + "learning_rate": 1.4221824686940965e-05, + "loss": 0.1039, + "step": 1075 + }, + { + "epoch": 1.9078014184397163, + "grad_norm": 0.4487515389919281, + "learning_rate": 1.3953488372093022e-05, + "loss": 0.1794, + "step": 1076 + }, + { + "epoch": 1.9095744680851063, + "grad_norm": 0.3338315486907959, + "learning_rate": 1.368515205724508e-05, + "loss": 0.127, + "step": 1077 + }, + { + "epoch": 1.9113475177304964, + "grad_norm": 0.32425758242607117, + "learning_rate": 1.3416815742397137e-05, + "loss": 0.133, + "step": 1078 + }, + { + "epoch": 1.9131205673758864, + "grad_norm": 0.25057604908943176, + "learning_rate": 1.3148479427549195e-05, + "loss": 0.1069, + "step": 1079 + }, + { + "epoch": 1.9148936170212765, + "grad_norm": 0.2886715233325958, + "learning_rate": 1.288014311270125e-05, + "loss": 0.1326, + "step": 1080 + }, + { + "epoch": 1.9166666666666665, + "grad_norm": 0.27996861934661865, + "learning_rate": 1.2611806797853308e-05, + "loss": 0.0981, + "step": 1081 + }, + { + "epoch": 1.9184397163120568, + "grad_norm": 0.38353902101516724, + "learning_rate": 1.2343470483005365e-05, + "loss": 0.1677, + "step": 1082 + }, + { + "epoch": 1.9202127659574468, + "grad_norm": 0.18619371950626373, + "learning_rate": 1.2075134168157423e-05, + "loss": 0.0741, + "step": 1083 + }, + { + "epoch": 1.9219858156028369, + "grad_norm": 0.23374900221824646, + "learning_rate": 1.180679785330948e-05, + "loss": 0.085, + "step": 1084 + }, + { + "epoch": 1.923758865248227, + "grad_norm": 0.439928263425827, + "learning_rate": 1.1538461538461538e-05, + "loss": 0.1432, + "step": 1085 + }, + { + "epoch": 1.925531914893617, + "grad_norm": 0.3650781512260437, + "learning_rate": 1.1270125223613595e-05, + "loss": 0.127, + "step": 1086 + }, + { + "epoch": 1.9273049645390072, + "grad_norm": 0.47360095381736755, + "learning_rate": 1.1001788908765651e-05, + "loss": 0.1805, + "step": 1087 + }, + { + "epoch": 1.9290780141843973, + "grad_norm": 0.3956616520881653, + "learning_rate": 1.0733452593917709e-05, + "loss": 0.1409, + "step": 1088 + }, + { + "epoch": 1.9308510638297873, + "grad_norm": 0.2528573274612427, + "learning_rate": 1.0465116279069766e-05, + "loss": 0.0986, + "step": 1089 + }, + { + "epoch": 1.9326241134751774, + "grad_norm": 0.5904862880706787, + "learning_rate": 1.0196779964221824e-05, + "loss": 0.1442, + "step": 1090 + }, + { + "epoch": 1.9343971631205674, + "grad_norm": 0.41600659489631653, + "learning_rate": 9.928443649373881e-06, + "loss": 0.1124, + "step": 1091 + }, + { + "epoch": 1.9361702127659575, + "grad_norm": 0.17834167182445526, + "learning_rate": 9.660107334525939e-06, + "loss": 0.0693, + "step": 1092 + }, + { + "epoch": 1.9379432624113475, + "grad_norm": 0.17482277750968933, + "learning_rate": 9.391771019677994e-06, + "loss": 0.0509, + "step": 1093 + }, + { + "epoch": 1.9397163120567376, + "grad_norm": 0.16638445854187012, + "learning_rate": 9.123434704830054e-06, + "loss": 0.0497, + "step": 1094 + }, + { + "epoch": 1.9414893617021276, + "grad_norm": 0.29362753033638, + "learning_rate": 8.85509838998211e-06, + "loss": 0.1001, + "step": 1095 + }, + { + "epoch": 1.9432624113475176, + "grad_norm": 0.13863207399845123, + "learning_rate": 8.586762075134167e-06, + "loss": 0.0594, + "step": 1096 + }, + { + "epoch": 1.9450354609929077, + "grad_norm": 0.33512240648269653, + "learning_rate": 8.318425760286224e-06, + "loss": 0.1273, + "step": 1097 + }, + { + "epoch": 1.9468085106382977, + "grad_norm": 0.3097001612186432, + "learning_rate": 8.050089445438282e-06, + "loss": 0.1164, + "step": 1098 + }, + { + "epoch": 1.9485815602836878, + "grad_norm": 0.2611374855041504, + "learning_rate": 7.78175313059034e-06, + "loss": 0.1004, + "step": 1099 + }, + { + "epoch": 1.950354609929078, + "grad_norm": 0.14972633123397827, + "learning_rate": 7.513416815742397e-06, + "loss": 0.0577, + "step": 1100 + }, + { + "epoch": 1.952127659574468, + "grad_norm": 0.3071126937866211, + "learning_rate": 7.2450805008944535e-06, + "loss": 0.091, + "step": 1101 + }, + { + "epoch": 1.9539007092198581, + "grad_norm": 0.1802784502506256, + "learning_rate": 6.976744186046511e-06, + "loss": 0.0493, + "step": 1102 + }, + { + "epoch": 1.9556737588652482, + "grad_norm": 0.15572038292884827, + "learning_rate": 6.7084078711985685e-06, + "loss": 0.0579, + "step": 1103 + }, + { + "epoch": 1.9574468085106385, + "grad_norm": 0.24851687252521515, + "learning_rate": 6.440071556350625e-06, + "loss": 0.0871, + "step": 1104 + }, + { + "epoch": 1.9592198581560285, + "grad_norm": 0.2088288813829422, + "learning_rate": 6.171735241502683e-06, + "loss": 0.0733, + "step": 1105 + }, + { + "epoch": 1.9609929078014185, + "grad_norm": 0.24039943516254425, + "learning_rate": 5.90339892665474e-06, + "loss": 0.093, + "step": 1106 + }, + { + "epoch": 1.9627659574468086, + "grad_norm": 0.3546561002731323, + "learning_rate": 5.635062611806798e-06, + "loss": 0.1246, + "step": 1107 + }, + { + "epoch": 1.9645390070921986, + "grad_norm": 0.11703565716743469, + "learning_rate": 5.366726296958854e-06, + "loss": 0.0402, + "step": 1108 + }, + { + "epoch": 1.9663120567375887, + "grad_norm": 0.1947447657585144, + "learning_rate": 5.098389982110912e-06, + "loss": 0.0761, + "step": 1109 + }, + { + "epoch": 1.9680851063829787, + "grad_norm": 0.18110178411006927, + "learning_rate": 4.830053667262969e-06, + "loss": 0.0768, + "step": 1110 + }, + { + "epoch": 1.9698581560283688, + "grad_norm": 0.2154490053653717, + "learning_rate": 4.561717352415027e-06, + "loss": 0.0813, + "step": 1111 + }, + { + "epoch": 1.9716312056737588, + "grad_norm": 0.20867429673671722, + "learning_rate": 4.2933810375670835e-06, + "loss": 0.0812, + "step": 1112 + }, + { + "epoch": 1.9734042553191489, + "grad_norm": 0.1496736854314804, + "learning_rate": 4.025044722719141e-06, + "loss": 0.046, + "step": 1113 + }, + { + "epoch": 1.975177304964539, + "grad_norm": 0.10855786502361298, + "learning_rate": 3.7567084078711984e-06, + "loss": 0.0341, + "step": 1114 + }, + { + "epoch": 1.976950354609929, + "grad_norm": 0.2541455626487732, + "learning_rate": 3.4883720930232555e-06, + "loss": 0.0879, + "step": 1115 + }, + { + "epoch": 1.978723404255319, + "grad_norm": 0.30963069200515747, + "learning_rate": 3.2200357781753126e-06, + "loss": 0.1081, + "step": 1116 + }, + { + "epoch": 1.9804964539007093, + "grad_norm": 0.5179100632667542, + "learning_rate": 2.95169946332737e-06, + "loss": 0.1674, + "step": 1117 + }, + { + "epoch": 1.9822695035460993, + "grad_norm": 0.208583265542984, + "learning_rate": 2.683363148479427e-06, + "loss": 0.0861, + "step": 1118 + }, + { + "epoch": 1.9840425531914894, + "grad_norm": 0.32043734192848206, + "learning_rate": 2.4150268336314847e-06, + "loss": 0.104, + "step": 1119 + }, + { + "epoch": 1.9858156028368794, + "grad_norm": 0.16812080144882202, + "learning_rate": 2.1466905187835417e-06, + "loss": 0.0568, + "step": 1120 + }, + { + "epoch": 1.9875886524822695, + "grad_norm": 0.3949272632598877, + "learning_rate": 1.8783542039355992e-06, + "loss": 0.1243, + "step": 1121 + }, + { + "epoch": 1.9893617021276597, + "grad_norm": 0.30382081866264343, + "learning_rate": 1.6100178890876563e-06, + "loss": 0.0929, + "step": 1122 + }, + { + "epoch": 1.9911347517730498, + "grad_norm": 0.3701658844947815, + "learning_rate": 1.3416815742397136e-06, + "loss": 0.1313, + "step": 1123 + }, + { + "epoch": 1.9929078014184398, + "grad_norm": 0.3137078285217285, + "learning_rate": 1.0733452593917709e-06, + "loss": 0.1288, + "step": 1124 + }, + { + "epoch": 1.9946808510638299, + "grad_norm": 0.20329152047634125, + "learning_rate": 8.050089445438281e-07, + "loss": 0.0661, + "step": 1125 + }, + { + "epoch": 1.99645390070922, + "grad_norm": 0.2180982083082199, + "learning_rate": 5.366726296958854e-07, + "loss": 0.072, + "step": 1126 + }, + { + "epoch": 1.99822695035461, + "grad_norm": 0.217008575797081, + "learning_rate": 2.683363148479427e-07, + "loss": 0.0861, + "step": 1127 + }, + { + "epoch": 2.0, + "grad_norm": 0.11226160824298859, + "learning_rate": 0.0, + "loss": 0.0346, + "step": 1128 + } + ], + "logging_steps": 1, + "max_steps": 1128, + "num_input_tokens_seen": 0, + "num_train_epochs": 2, + "save_steps": 500, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": true + }, + "attributes": {} + } + }, + "total_flos": 2.726988197801165e+16, + "train_batch_size": 1, + "trial_name": null, + "trial_params": null +}