bloomz-1b1-vn-chat / last-checkpoint /trainer_state.json
Femboyuwu2000's picture
Training in progress, step 9380, checkpoint
8f770c2 verified
raw
history blame
75 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.7504,
"eval_steps": 500,
"global_step": 9380,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0,
"grad_norm": 62.10089111328125,
"learning_rate": 1e-06,
"loss": 4.5777,
"step": 20
},
{
"epoch": 0.0,
"grad_norm": 39.39016342163086,
"learning_rate": 2e-06,
"loss": 4.4077,
"step": 40
},
{
"epoch": 0.0,
"grad_norm": 54.24020767211914,
"learning_rate": 3e-06,
"loss": 4.4807,
"step": 60
},
{
"epoch": 0.01,
"grad_norm": 30.161609649658203,
"learning_rate": 4e-06,
"loss": 4.5756,
"step": 80
},
{
"epoch": 0.01,
"grad_norm": 40.131675720214844,
"learning_rate": 4.9999999999999996e-06,
"loss": 4.4352,
"step": 100
},
{
"epoch": 0.01,
"grad_norm": 52.3621940612793,
"learning_rate": 6e-06,
"loss": 4.5096,
"step": 120
},
{
"epoch": 0.01,
"grad_norm": 49.86561584472656,
"learning_rate": 7e-06,
"loss": 4.493,
"step": 140
},
{
"epoch": 0.01,
"grad_norm": 20.034923553466797,
"learning_rate": 8e-06,
"loss": 4.4088,
"step": 160
},
{
"epoch": 0.01,
"grad_norm": 50.790679931640625,
"learning_rate": 9e-06,
"loss": 4.4901,
"step": 180
},
{
"epoch": 0.02,
"grad_norm": 48.5693473815918,
"learning_rate": 9.999999999999999e-06,
"loss": 4.3628,
"step": 200
},
{
"epoch": 0.02,
"grad_norm": 37.95353698730469,
"learning_rate": 1.1e-05,
"loss": 4.3298,
"step": 220
},
{
"epoch": 0.02,
"grad_norm": 35.7153434753418,
"learning_rate": 1.2e-05,
"loss": 4.2839,
"step": 240
},
{
"epoch": 0.02,
"grad_norm": 91.47773742675781,
"learning_rate": 1.3000000000000001e-05,
"loss": 4.1238,
"step": 260
},
{
"epoch": 0.02,
"grad_norm": 23.16193389892578,
"learning_rate": 1.4e-05,
"loss": 4.1245,
"step": 280
},
{
"epoch": 0.02,
"grad_norm": 28.304485321044922,
"learning_rate": 1.5e-05,
"loss": 4.2198,
"step": 300
},
{
"epoch": 0.03,
"grad_norm": 34.03230285644531,
"learning_rate": 1.6e-05,
"loss": 4.0958,
"step": 320
},
{
"epoch": 0.03,
"grad_norm": 29.786975860595703,
"learning_rate": 1.7e-05,
"loss": 4.024,
"step": 340
},
{
"epoch": 0.03,
"grad_norm": 33.04754638671875,
"learning_rate": 1.8e-05,
"loss": 4.0832,
"step": 360
},
{
"epoch": 0.03,
"grad_norm": 28.68460464477539,
"learning_rate": 1.9e-05,
"loss": 3.9827,
"step": 380
},
{
"epoch": 0.03,
"grad_norm": 26.463253021240234,
"learning_rate": 1.9999999999999998e-05,
"loss": 3.9454,
"step": 400
},
{
"epoch": 0.03,
"grad_norm": 19.407127380371094,
"learning_rate": 2.1e-05,
"loss": 4.0119,
"step": 420
},
{
"epoch": 0.04,
"grad_norm": 26.383380889892578,
"learning_rate": 2.2e-05,
"loss": 3.9554,
"step": 440
},
{
"epoch": 0.04,
"grad_norm": 33.225223541259766,
"learning_rate": 2.3000000000000003e-05,
"loss": 3.8172,
"step": 460
},
{
"epoch": 0.04,
"grad_norm": 26.000978469848633,
"learning_rate": 2.4e-05,
"loss": 3.8934,
"step": 480
},
{
"epoch": 0.04,
"grad_norm": 28.714366912841797,
"learning_rate": 2.5e-05,
"loss": 3.9194,
"step": 500
},
{
"epoch": 0.04,
"grad_norm": 28.721248626708984,
"learning_rate": 2.6000000000000002e-05,
"loss": 3.8144,
"step": 520
},
{
"epoch": 0.04,
"grad_norm": 24.934555053710938,
"learning_rate": 2.7000000000000002e-05,
"loss": 3.9166,
"step": 540
},
{
"epoch": 0.04,
"grad_norm": 23.113840103149414,
"learning_rate": 2.8e-05,
"loss": 3.8248,
"step": 560
},
{
"epoch": 0.05,
"grad_norm": 21.58758544921875,
"learning_rate": 2.9e-05,
"loss": 3.7538,
"step": 580
},
{
"epoch": 0.05,
"grad_norm": 22.339618682861328,
"learning_rate": 3e-05,
"loss": 3.726,
"step": 600
},
{
"epoch": 0.05,
"grad_norm": 49.25693893432617,
"learning_rate": 2.999992132854894e-05,
"loss": 3.8692,
"step": 620
},
{
"epoch": 0.05,
"grad_norm": 45.1494026184082,
"learning_rate": 2.999968531502098e-05,
"loss": 3.7374,
"step": 640
},
{
"epoch": 0.05,
"grad_norm": 72.25853729248047,
"learning_rate": 2.99992919618918e-05,
"loss": 3.7735,
"step": 660
},
{
"epoch": 0.05,
"grad_norm": 39.445220947265625,
"learning_rate": 2.999874127328748e-05,
"loss": 3.759,
"step": 680
},
{
"epoch": 0.06,
"grad_norm": 21.18370246887207,
"learning_rate": 2.9998033254984483e-05,
"loss": 3.7841,
"step": 700
},
{
"epoch": 0.06,
"grad_norm": 24.310373306274414,
"learning_rate": 2.999716791440959e-05,
"loss": 3.679,
"step": 720
},
{
"epoch": 0.06,
"grad_norm": 36.432350158691406,
"learning_rate": 2.9996145260639812e-05,
"loss": 3.6796,
"step": 740
},
{
"epoch": 0.06,
"grad_norm": 32.12275314331055,
"learning_rate": 2.9994965304402304e-05,
"loss": 3.7613,
"step": 760
},
{
"epoch": 0.06,
"grad_norm": 38.32442855834961,
"learning_rate": 2.999362805807425e-05,
"loss": 3.7586,
"step": 780
},
{
"epoch": 0.06,
"grad_norm": 30.289432525634766,
"learning_rate": 2.9992133535682725e-05,
"loss": 3.6919,
"step": 800
},
{
"epoch": 0.07,
"grad_norm": 32.69138717651367,
"learning_rate": 2.9990481752904566e-05,
"loss": 3.6855,
"step": 820
},
{
"epoch": 0.07,
"grad_norm": 46.554874420166016,
"learning_rate": 2.9988672727066197e-05,
"loss": 3.7201,
"step": 840
},
{
"epoch": 0.07,
"grad_norm": 28.671123504638672,
"learning_rate": 2.9986706477143436e-05,
"loss": 3.6594,
"step": 860
},
{
"epoch": 0.07,
"grad_norm": 49.44480895996094,
"learning_rate": 2.9984583023761318e-05,
"loss": 3.7271,
"step": 880
},
{
"epoch": 0.07,
"grad_norm": 26.61457061767578,
"learning_rate": 2.998230238919386e-05,
"loss": 3.7376,
"step": 900
},
{
"epoch": 0.07,
"grad_norm": 27.453275680541992,
"learning_rate": 2.9979864597363846e-05,
"loss": 3.6716,
"step": 920
},
{
"epoch": 0.08,
"grad_norm": 22.791175842285156,
"learning_rate": 2.9977269673842554e-05,
"loss": 3.6172,
"step": 940
},
{
"epoch": 0.08,
"grad_norm": 58.2718620300293,
"learning_rate": 2.997451764584951e-05,
"loss": 3.7494,
"step": 960
},
{
"epoch": 0.08,
"grad_norm": 33.610286712646484,
"learning_rate": 2.9971608542252175e-05,
"loss": 3.7077,
"step": 980
},
{
"epoch": 0.08,
"grad_norm": 25.48147201538086,
"learning_rate": 2.9968542393565674e-05,
"loss": 3.6721,
"step": 1000
},
{
"epoch": 0.08,
"grad_norm": 27.07135581970215,
"learning_rate": 2.996531923195246e-05,
"loss": 3.7106,
"step": 1020
},
{
"epoch": 0.08,
"grad_norm": 37.24673843383789,
"learning_rate": 2.996193909122197e-05,
"loss": 3.7447,
"step": 1040
},
{
"epoch": 0.08,
"grad_norm": 26.41890525817871,
"learning_rate": 2.995840200683028e-05,
"loss": 3.5839,
"step": 1060
},
{
"epoch": 0.09,
"grad_norm": 32.88002014160156,
"learning_rate": 2.995470801587973e-05,
"loss": 3.6606,
"step": 1080
},
{
"epoch": 0.09,
"grad_norm": 32.0895881652832,
"learning_rate": 2.9950857157118544e-05,
"loss": 3.677,
"step": 1100
},
{
"epoch": 0.09,
"grad_norm": 37.726783752441406,
"learning_rate": 2.9946849470940395e-05,
"loss": 3.5546,
"step": 1120
},
{
"epoch": 0.09,
"grad_norm": 56.246299743652344,
"learning_rate": 2.9942684999384034e-05,
"loss": 3.6391,
"step": 1140
},
{
"epoch": 0.09,
"grad_norm": 35.675662994384766,
"learning_rate": 2.993836378613278e-05,
"loss": 3.5918,
"step": 1160
},
{
"epoch": 0.09,
"grad_norm": 26.685134887695312,
"learning_rate": 2.993388587651412e-05,
"loss": 3.6331,
"step": 1180
},
{
"epoch": 0.1,
"grad_norm": 27.400333404541016,
"learning_rate": 2.992925131749921e-05,
"loss": 3.6214,
"step": 1200
},
{
"epoch": 0.1,
"grad_norm": 28.501314163208008,
"learning_rate": 2.9924460157702378e-05,
"loss": 3.6619,
"step": 1220
},
{
"epoch": 0.1,
"grad_norm": 30.773778915405273,
"learning_rate": 2.991951244738063e-05,
"loss": 3.6453,
"step": 1240
},
{
"epoch": 0.1,
"grad_norm": 24.701374053955078,
"learning_rate": 2.9914408238433095e-05,
"loss": 3.7282,
"step": 1260
},
{
"epoch": 0.1,
"grad_norm": 27.605117797851562,
"learning_rate": 2.990914758440052e-05,
"loss": 3.6635,
"step": 1280
},
{
"epoch": 0.1,
"grad_norm": 27.829086303710938,
"learning_rate": 2.9903730540464668e-05,
"loss": 3.5293,
"step": 1300
},
{
"epoch": 0.11,
"grad_norm": 40.916263580322266,
"learning_rate": 2.9898157163447767e-05,
"loss": 3.6976,
"step": 1320
},
{
"epoch": 0.11,
"grad_norm": 33.31068420410156,
"learning_rate": 2.9892427511811912e-05,
"loss": 3.548,
"step": 1340
},
{
"epoch": 0.11,
"grad_norm": 29.932533264160156,
"learning_rate": 2.9886541645658435e-05,
"loss": 3.7486,
"step": 1360
},
{
"epoch": 0.11,
"grad_norm": 35.59455490112305,
"learning_rate": 2.9880499626727284e-05,
"loss": 3.6342,
"step": 1380
},
{
"epoch": 0.11,
"grad_norm": 29.93869400024414,
"learning_rate": 2.9874301518396377e-05,
"loss": 3.6615,
"step": 1400
},
{
"epoch": 0.11,
"grad_norm": 43.417213439941406,
"learning_rate": 2.986794738568094e-05,
"loss": 3.607,
"step": 1420
},
{
"epoch": 0.12,
"grad_norm": 52.483917236328125,
"learning_rate": 2.9861437295232825e-05,
"loss": 3.5937,
"step": 1440
},
{
"epoch": 0.12,
"grad_norm": 30.312334060668945,
"learning_rate": 2.9854771315339787e-05,
"loss": 3.5991,
"step": 1460
},
{
"epoch": 0.12,
"grad_norm": 49.459136962890625,
"learning_rate": 2.984794951592481e-05,
"loss": 3.5261,
"step": 1480
},
{
"epoch": 0.12,
"grad_norm": 34.81111526489258,
"learning_rate": 2.984097196854534e-05,
"loss": 3.6818,
"step": 1500
},
{
"epoch": 0.12,
"grad_norm": 34.721946716308594,
"learning_rate": 2.9833838746392544e-05,
"loss": 3.5636,
"step": 1520
},
{
"epoch": 0.12,
"grad_norm": 31.46621322631836,
"learning_rate": 2.982654992429056e-05,
"loss": 3.5597,
"step": 1540
},
{
"epoch": 0.12,
"grad_norm": 38.78512191772461,
"learning_rate": 2.981910557869566e-05,
"loss": 3.661,
"step": 1560
},
{
"epoch": 0.13,
"grad_norm": 27.38837432861328,
"learning_rate": 2.981150578769553e-05,
"loss": 3.6173,
"step": 1580
},
{
"epoch": 0.13,
"grad_norm": 45.619632720947266,
"learning_rate": 2.980375063100836e-05,
"loss": 3.6632,
"step": 1600
},
{
"epoch": 0.13,
"grad_norm": 30.708433151245117,
"learning_rate": 2.979584018998209e-05,
"loss": 3.5165,
"step": 1620
},
{
"epoch": 0.13,
"grad_norm": 35.472938537597656,
"learning_rate": 2.97877745475935e-05,
"loss": 3.5157,
"step": 1640
},
{
"epoch": 0.13,
"grad_norm": 39.029415130615234,
"learning_rate": 2.9779553788447358e-05,
"loss": 3.6259,
"step": 1660
},
{
"epoch": 0.13,
"grad_norm": 57.90769577026367,
"learning_rate": 2.977117799877554e-05,
"loss": 3.6378,
"step": 1680
},
{
"epoch": 0.14,
"grad_norm": 36.95255661010742,
"learning_rate": 2.9762647266436115e-05,
"loss": 3.5845,
"step": 1700
},
{
"epoch": 0.14,
"grad_norm": 27.456787109375,
"learning_rate": 2.9753961680912432e-05,
"loss": 3.6647,
"step": 1720
},
{
"epoch": 0.14,
"grad_norm": 27.383285522460938,
"learning_rate": 2.9745121333312166e-05,
"loss": 3.6668,
"step": 1740
},
{
"epoch": 0.14,
"grad_norm": 26.555049896240234,
"learning_rate": 2.9736126316366385e-05,
"loss": 3.6617,
"step": 1760
},
{
"epoch": 0.14,
"grad_norm": 34.009620666503906,
"learning_rate": 2.9726976724428563e-05,
"loss": 3.572,
"step": 1780
},
{
"epoch": 0.14,
"grad_norm": 45.44181823730469,
"learning_rate": 2.9717672653473588e-05,
"loss": 3.6354,
"step": 1800
},
{
"epoch": 0.15,
"grad_norm": 30.79588508605957,
"learning_rate": 2.9708214201096758e-05,
"loss": 3.6953,
"step": 1820
},
{
"epoch": 0.15,
"grad_norm": 46.61872482299805,
"learning_rate": 2.9698601466512767e-05,
"loss": 3.5373,
"step": 1840
},
{
"epoch": 0.15,
"grad_norm": 42.86500930786133,
"learning_rate": 2.9688834550554647e-05,
"loss": 3.5982,
"step": 1860
},
{
"epoch": 0.15,
"grad_norm": 33.480289459228516,
"learning_rate": 2.9678913555672733e-05,
"loss": 3.6024,
"step": 1880
},
{
"epoch": 0.15,
"grad_norm": 36.41415786743164,
"learning_rate": 2.966883858593356e-05,
"loss": 3.4843,
"step": 1900
},
{
"epoch": 0.15,
"grad_norm": 41.39873123168945,
"learning_rate": 2.9658609747018796e-05,
"loss": 3.5257,
"step": 1920
},
{
"epoch": 0.16,
"grad_norm": 31.24024200439453,
"learning_rate": 2.964822714622412e-05,
"loss": 3.5927,
"step": 1940
},
{
"epoch": 0.16,
"grad_norm": 52.78026580810547,
"learning_rate": 2.9637690892458103e-05,
"loss": 3.4678,
"step": 1960
},
{
"epoch": 0.16,
"grad_norm": 27.40117835998535,
"learning_rate": 2.962700109624106e-05,
"loss": 3.5541,
"step": 1980
},
{
"epoch": 0.16,
"grad_norm": 23.172683715820312,
"learning_rate": 2.961615786970389e-05,
"loss": 3.5713,
"step": 2000
},
{
"epoch": 0.16,
"grad_norm": 24.177541732788086,
"learning_rate": 2.960516132658692e-05,
"loss": 3.585,
"step": 2020
},
{
"epoch": 0.16,
"grad_norm": 44.673912048339844,
"learning_rate": 2.9594011582238672e-05,
"loss": 3.5035,
"step": 2040
},
{
"epoch": 0.16,
"grad_norm": 35.91664505004883,
"learning_rate": 2.95827087536147e-05,
"loss": 3.6404,
"step": 2060
},
{
"epoch": 0.17,
"grad_norm": 27.3450870513916,
"learning_rate": 2.9571252959276313e-05,
"loss": 3.5121,
"step": 2080
},
{
"epoch": 0.17,
"grad_norm": 25.66405487060547,
"learning_rate": 2.955964431938939e-05,
"loss": 3.5009,
"step": 2100
},
{
"epoch": 0.17,
"grad_norm": 24.1674861907959,
"learning_rate": 2.9547882955723052e-05,
"loss": 3.5482,
"step": 2120
},
{
"epoch": 0.17,
"grad_norm": 49.72268295288086,
"learning_rate": 2.953596899164846e-05,
"loss": 3.4969,
"step": 2140
},
{
"epoch": 0.17,
"grad_norm": 26.238168716430664,
"learning_rate": 2.9523902552137436e-05,
"loss": 3.5541,
"step": 2160
},
{
"epoch": 0.17,
"grad_norm": 30.524545669555664,
"learning_rate": 2.951168376376124e-05,
"loss": 3.6343,
"step": 2180
},
{
"epoch": 0.18,
"grad_norm": 38.179908752441406,
"learning_rate": 2.9499312754689168e-05,
"loss": 3.4795,
"step": 2200
},
{
"epoch": 0.18,
"grad_norm": 32.98453903198242,
"learning_rate": 2.9486789654687256e-05,
"loss": 3.6333,
"step": 2220
},
{
"epoch": 0.18,
"grad_norm": 26.77848243713379,
"learning_rate": 2.94741145951169e-05,
"loss": 3.5654,
"step": 2240
},
{
"epoch": 0.18,
"grad_norm": 27.737852096557617,
"learning_rate": 2.9461287708933475e-05,
"loss": 3.5044,
"step": 2260
},
{
"epoch": 0.18,
"grad_norm": 34.2584342956543,
"learning_rate": 2.9448309130684944e-05,
"loss": 3.5979,
"step": 2280
},
{
"epoch": 0.18,
"grad_norm": 57.86616897583008,
"learning_rate": 2.9435178996510456e-05,
"loss": 3.5726,
"step": 2300
},
{
"epoch": 0.19,
"grad_norm": 37.64597702026367,
"learning_rate": 2.9421897444138902e-05,
"loss": 3.5913,
"step": 2320
},
{
"epoch": 0.19,
"grad_norm": 35.22037124633789,
"learning_rate": 2.9408464612887484e-05,
"loss": 3.5959,
"step": 2340
},
{
"epoch": 0.19,
"grad_norm": 31.878395080566406,
"learning_rate": 2.9394880643660242e-05,
"loss": 3.5974,
"step": 2360
},
{
"epoch": 0.19,
"grad_norm": 43.614994049072266,
"learning_rate": 2.938114567894659e-05,
"loss": 3.4834,
"step": 2380
},
{
"epoch": 0.19,
"grad_norm": 27.587766647338867,
"learning_rate": 2.9367259862819805e-05,
"loss": 3.6154,
"step": 2400
},
{
"epoch": 0.19,
"grad_norm": 30.223772048950195,
"learning_rate": 2.9353223340935533e-05,
"loss": 3.4871,
"step": 2420
},
{
"epoch": 0.2,
"grad_norm": 34.057884216308594,
"learning_rate": 2.933903626053024e-05,
"loss": 3.605,
"step": 2440
},
{
"epoch": 0.2,
"grad_norm": 39.219242095947266,
"learning_rate": 2.932469877041969e-05,
"loss": 3.6091,
"step": 2460
},
{
"epoch": 0.2,
"grad_norm": 33.33955001831055,
"learning_rate": 2.931021102099737e-05,
"loss": 3.4862,
"step": 2480
},
{
"epoch": 0.2,
"grad_norm": 37.07484436035156,
"learning_rate": 2.9295573164232913e-05,
"loss": 3.5267,
"step": 2500
},
{
"epoch": 0.2,
"grad_norm": 27.145864486694336,
"learning_rate": 2.9280785353670514e-05,
"loss": 3.4369,
"step": 2520
},
{
"epoch": 0.2,
"grad_norm": 30.31035041809082,
"learning_rate": 2.9265847744427305e-05,
"loss": 3.6056,
"step": 2540
},
{
"epoch": 0.2,
"grad_norm": 40.823490142822266,
"learning_rate": 2.925076049319174e-05,
"loss": 3.5916,
"step": 2560
},
{
"epoch": 0.21,
"grad_norm": 44.224796295166016,
"learning_rate": 2.9235523758221944e-05,
"loss": 3.5881,
"step": 2580
},
{
"epoch": 0.21,
"grad_norm": 33.34773254394531,
"learning_rate": 2.922013769934406e-05,
"loss": 3.5315,
"step": 2600
},
{
"epoch": 0.21,
"grad_norm": 25.755775451660156,
"learning_rate": 2.920460247795056e-05,
"loss": 3.621,
"step": 2620
},
{
"epoch": 0.21,
"grad_norm": 29.75731086730957,
"learning_rate": 2.918891825699857e-05,
"loss": 3.6067,
"step": 2640
},
{
"epoch": 0.21,
"grad_norm": 40.04263687133789,
"learning_rate": 2.9173085201008144e-05,
"loss": 3.5365,
"step": 2660
},
{
"epoch": 0.21,
"grad_norm": 26.394916534423828,
"learning_rate": 2.9157103476060547e-05,
"loss": 3.5944,
"step": 2680
},
{
"epoch": 0.22,
"grad_norm": 23.64137077331543,
"learning_rate": 2.914097324979651e-05,
"loss": 3.5048,
"step": 2700
},
{
"epoch": 0.22,
"grad_norm": 35.18840408325195,
"learning_rate": 2.9124694691414485e-05,
"loss": 3.5769,
"step": 2720
},
{
"epoch": 0.22,
"grad_norm": 25.55812644958496,
"learning_rate": 2.9108267971668828e-05,
"loss": 3.5082,
"step": 2740
},
{
"epoch": 0.22,
"grad_norm": 26.25998878479004,
"learning_rate": 2.909169326286807e-05,
"loss": 3.4967,
"step": 2760
},
{
"epoch": 0.22,
"grad_norm": 41.78467559814453,
"learning_rate": 2.9074970738873054e-05,
"loss": 3.6409,
"step": 2780
},
{
"epoch": 0.22,
"grad_norm": 27.53672218322754,
"learning_rate": 2.9058100575095156e-05,
"loss": 3.5903,
"step": 2800
},
{
"epoch": 0.23,
"grad_norm": 27.329477310180664,
"learning_rate": 2.90410829484944e-05,
"loss": 3.5148,
"step": 2820
},
{
"epoch": 0.23,
"grad_norm": 34.077083587646484,
"learning_rate": 2.902391803757764e-05,
"loss": 3.5561,
"step": 2840
},
{
"epoch": 0.23,
"grad_norm": 33.75153732299805,
"learning_rate": 2.900660602239667e-05,
"loss": 3.498,
"step": 2860
},
{
"epoch": 0.23,
"grad_norm": 28.681190490722656,
"learning_rate": 2.8989147084546335e-05,
"loss": 3.501,
"step": 2880
},
{
"epoch": 0.23,
"grad_norm": 41.04441833496094,
"learning_rate": 2.8971541407162637e-05,
"loss": 3.5707,
"step": 2900
},
{
"epoch": 0.23,
"grad_norm": 31.303403854370117,
"learning_rate": 2.8953789174920795e-05,
"loss": 3.5748,
"step": 2920
},
{
"epoch": 0.24,
"grad_norm": 36.61361312866211,
"learning_rate": 2.8935890574033325e-05,
"loss": 3.6372,
"step": 2940
},
{
"epoch": 0.24,
"grad_norm": 24.437808990478516,
"learning_rate": 2.8917845792248085e-05,
"loss": 3.5737,
"step": 2960
},
{
"epoch": 0.24,
"grad_norm": 47.12533187866211,
"learning_rate": 2.8899655018846297e-05,
"loss": 3.5125,
"step": 2980
},
{
"epoch": 0.24,
"grad_norm": 23.667312622070312,
"learning_rate": 2.8881318444640564e-05,
"loss": 3.5043,
"step": 3000
},
{
"epoch": 0.24,
"grad_norm": 26.047521591186523,
"learning_rate": 2.8862836261972873e-05,
"loss": 3.6236,
"step": 3020
},
{
"epoch": 0.24,
"grad_norm": 39.91059494018555,
"learning_rate": 2.8844208664712577e-05,
"loss": 3.4851,
"step": 3040
},
{
"epoch": 0.24,
"grad_norm": 57.92033004760742,
"learning_rate": 2.882543584825435e-05,
"loss": 3.5578,
"step": 3060
},
{
"epoch": 0.25,
"grad_norm": 99.40699005126953,
"learning_rate": 2.880651800951616e-05,
"loss": 3.577,
"step": 3080
},
{
"epoch": 0.25,
"grad_norm": 30.7738094329834,
"learning_rate": 2.8787455346937182e-05,
"loss": 3.5683,
"step": 3100
},
{
"epoch": 0.25,
"grad_norm": 55.61745834350586,
"learning_rate": 2.876824806047573e-05,
"loss": 3.4959,
"step": 3120
},
{
"epoch": 0.25,
"grad_norm": 23.431480407714844,
"learning_rate": 2.8748896351607145e-05,
"loss": 3.5882,
"step": 3140
},
{
"epoch": 0.25,
"grad_norm": 20.944149017333984,
"learning_rate": 2.8729400423321693e-05,
"loss": 3.6096,
"step": 3160
},
{
"epoch": 0.25,
"grad_norm": 46.77301788330078,
"learning_rate": 2.8709760480122443e-05,
"loss": 3.5665,
"step": 3180
},
{
"epoch": 0.26,
"grad_norm": 36.645328521728516,
"learning_rate": 2.8689976728023103e-05,
"loss": 3.5087,
"step": 3200
},
{
"epoch": 0.26,
"grad_norm": 32.61063003540039,
"learning_rate": 2.8670049374545873e-05,
"loss": 3.5054,
"step": 3220
},
{
"epoch": 0.26,
"grad_norm": 47.47910690307617,
"learning_rate": 2.8649978628719256e-05,
"loss": 3.5674,
"step": 3240
},
{
"epoch": 0.26,
"grad_norm": 40.817115783691406,
"learning_rate": 2.8629764701075885e-05,
"loss": 3.4504,
"step": 3260
},
{
"epoch": 0.26,
"grad_norm": 32.290626525878906,
"learning_rate": 2.8609407803650295e-05,
"loss": 3.4699,
"step": 3280
},
{
"epoch": 0.26,
"grad_norm": 26.942697525024414,
"learning_rate": 2.8588908149976702e-05,
"loss": 3.5642,
"step": 3300
},
{
"epoch": 0.27,
"grad_norm": 30.925710678100586,
"learning_rate": 2.856826595508678e-05,
"loss": 3.6097,
"step": 3320
},
{
"epoch": 0.27,
"grad_norm": 42.991546630859375,
"learning_rate": 2.8547481435507382e-05,
"loss": 3.5292,
"step": 3340
},
{
"epoch": 0.27,
"grad_norm": 23.902685165405273,
"learning_rate": 2.852655480925828e-05,
"loss": 3.5195,
"step": 3360
},
{
"epoch": 0.27,
"grad_norm": 36.46931076049805,
"learning_rate": 2.8505486295849884e-05,
"loss": 3.5451,
"step": 3380
},
{
"epoch": 0.27,
"grad_norm": 35.51163101196289,
"learning_rate": 2.848427611628093e-05,
"loss": 3.4868,
"step": 3400
},
{
"epoch": 0.27,
"grad_norm": 28.35199546813965,
"learning_rate": 2.8462924493036168e-05,
"loss": 3.4984,
"step": 3420
},
{
"epoch": 0.28,
"grad_norm": 40.32368850708008,
"learning_rate": 2.8441431650084018e-05,
"loss": 3.4839,
"step": 3440
},
{
"epoch": 0.28,
"grad_norm": 46.23302459716797,
"learning_rate": 2.841979781287424e-05,
"loss": 3.5411,
"step": 3460
},
{
"epoch": 0.28,
"grad_norm": 36.41411209106445,
"learning_rate": 2.8398023208335537e-05,
"loss": 3.5701,
"step": 3480
},
{
"epoch": 0.28,
"grad_norm": 29.031347274780273,
"learning_rate": 2.8376108064873216e-05,
"loss": 3.5228,
"step": 3500
},
{
"epoch": 0.28,
"grad_norm": 32.50440216064453,
"learning_rate": 2.835405261236676e-05,
"loss": 3.479,
"step": 3520
},
{
"epoch": 0.28,
"grad_norm": 30.749858856201172,
"learning_rate": 2.833185708216743e-05,
"loss": 3.53,
"step": 3540
},
{
"epoch": 0.28,
"grad_norm": 27.148874282836914,
"learning_rate": 2.8309521707095835e-05,
"loss": 3.4933,
"step": 3560
},
{
"epoch": 0.29,
"grad_norm": 28.534822463989258,
"learning_rate": 2.8287046721439487e-05,
"loss": 3.5881,
"step": 3580
},
{
"epoch": 0.29,
"grad_norm": 53.12272644042969,
"learning_rate": 2.8264432360950355e-05,
"loss": 3.5626,
"step": 3600
},
{
"epoch": 0.29,
"grad_norm": 27.4322509765625,
"learning_rate": 2.8241678862842374e-05,
"loss": 3.4831,
"step": 3620
},
{
"epoch": 0.29,
"grad_norm": 31.194787979125977,
"learning_rate": 2.8218786465788984e-05,
"loss": 3.6001,
"step": 3640
},
{
"epoch": 0.29,
"grad_norm": 27.951440811157227,
"learning_rate": 2.8195755409920584e-05,
"loss": 3.4387,
"step": 3660
},
{
"epoch": 0.29,
"grad_norm": 43.26001739501953,
"learning_rate": 2.8172585936822056e-05,
"loss": 3.5127,
"step": 3680
},
{
"epoch": 0.3,
"grad_norm": 30.359540939331055,
"learning_rate": 2.814927828953022e-05,
"loss": 3.4761,
"step": 3700
},
{
"epoch": 0.3,
"grad_norm": 29.243453979492188,
"learning_rate": 2.812583271253125e-05,
"loss": 3.6265,
"step": 3720
},
{
"epoch": 0.3,
"grad_norm": 25.85703468322754,
"learning_rate": 2.8102249451758162e-05,
"loss": 3.5619,
"step": 3740
},
{
"epoch": 0.3,
"grad_norm": 32.1122932434082,
"learning_rate": 2.8078528754588207e-05,
"loss": 3.5173,
"step": 3760
},
{
"epoch": 0.3,
"grad_norm": 28.02115821838379,
"learning_rate": 2.805467086984027e-05,
"loss": 3.4905,
"step": 3780
},
{
"epoch": 0.3,
"grad_norm": 41.776817321777344,
"learning_rate": 2.803067604777227e-05,
"loss": 3.5289,
"step": 3800
},
{
"epoch": 0.31,
"grad_norm": 33.876312255859375,
"learning_rate": 2.8006544540078535e-05,
"loss": 3.5414,
"step": 3820
},
{
"epoch": 0.31,
"grad_norm": 33.06633758544922,
"learning_rate": 2.798227659988717e-05,
"loss": 3.5586,
"step": 3840
},
{
"epoch": 0.31,
"grad_norm": 24.628082275390625,
"learning_rate": 2.7957872481757377e-05,
"loss": 3.5455,
"step": 3860
},
{
"epoch": 0.31,
"grad_norm": 23.779598236083984,
"learning_rate": 2.793333244167681e-05,
"loss": 3.5345,
"step": 3880
},
{
"epoch": 0.31,
"grad_norm": 29.29547119140625,
"learning_rate": 2.790865673705888e-05,
"loss": 3.4588,
"step": 3900
},
{
"epoch": 0.31,
"grad_norm": 29.236722946166992,
"learning_rate": 2.7883845626740046e-05,
"loss": 3.5286,
"step": 3920
},
{
"epoch": 0.32,
"grad_norm": 36.6258430480957,
"learning_rate": 2.7858899370977123e-05,
"loss": 3.589,
"step": 3940
},
{
"epoch": 0.32,
"grad_norm": 48.60535430908203,
"learning_rate": 2.783381823144452e-05,
"loss": 3.6398,
"step": 3960
},
{
"epoch": 0.32,
"grad_norm": 28.959075927734375,
"learning_rate": 2.780860247123153e-05,
"loss": 3.4484,
"step": 3980
},
{
"epoch": 0.32,
"grad_norm": 30.727558135986328,
"learning_rate": 2.778325235483954e-05,
"loss": 3.5112,
"step": 4000
},
{
"epoch": 0.32,
"grad_norm": 32.5091667175293,
"learning_rate": 2.775776814817928e-05,
"loss": 3.4266,
"step": 4020
},
{
"epoch": 0.32,
"grad_norm": 37.957637786865234,
"learning_rate": 2.7732150118568016e-05,
"loss": 3.6768,
"step": 4040
},
{
"epoch": 0.32,
"grad_norm": 42.639320373535156,
"learning_rate": 2.770639853472676e-05,
"loss": 3.5102,
"step": 4060
},
{
"epoch": 0.33,
"grad_norm": 29.558870315551758,
"learning_rate": 2.768051366677744e-05,
"loss": 3.5354,
"step": 4080
},
{
"epoch": 0.33,
"grad_norm": 23.12784194946289,
"learning_rate": 2.765449578624007e-05,
"loss": 3.5432,
"step": 4100
},
{
"epoch": 0.33,
"grad_norm": 27.601444244384766,
"learning_rate": 2.7628345166029907e-05,
"loss": 3.5672,
"step": 4120
},
{
"epoch": 0.33,
"grad_norm": 26.26235008239746,
"learning_rate": 2.760206208045458e-05,
"loss": 3.5635,
"step": 4140
},
{
"epoch": 0.33,
"grad_norm": 57.84916305541992,
"learning_rate": 2.7575646805211224e-05,
"loss": 3.5254,
"step": 4160
},
{
"epoch": 0.33,
"grad_norm": 25.554025650024414,
"learning_rate": 2.7549099617383573e-05,
"loss": 3.5142,
"step": 4180
},
{
"epoch": 0.34,
"grad_norm": 38.82815170288086,
"learning_rate": 2.7522420795439067e-05,
"loss": 3.6104,
"step": 4200
},
{
"epoch": 0.34,
"grad_norm": 28.093948364257812,
"learning_rate": 2.7495610619225925e-05,
"loss": 3.5265,
"step": 4220
},
{
"epoch": 0.34,
"grad_norm": 26.187891006469727,
"learning_rate": 2.746866936997021e-05,
"loss": 3.4307,
"step": 4240
},
{
"epoch": 0.34,
"grad_norm": 35.642738342285156,
"learning_rate": 2.7441597330272874e-05,
"loss": 3.5501,
"step": 4260
},
{
"epoch": 0.34,
"grad_norm": 32.99201965332031,
"learning_rate": 2.7414394784106812e-05,
"loss": 3.4463,
"step": 4280
},
{
"epoch": 0.34,
"grad_norm": 28.848899841308594,
"learning_rate": 2.7387062016813845e-05,
"loss": 3.5128,
"step": 4300
},
{
"epoch": 0.35,
"grad_norm": 30.502288818359375,
"learning_rate": 2.7359599315101788e-05,
"loss": 3.4909,
"step": 4320
},
{
"epoch": 0.35,
"grad_norm": 27.91356658935547,
"learning_rate": 2.7332006967041373e-05,
"loss": 3.53,
"step": 4340
},
{
"epoch": 0.35,
"grad_norm": 47.296627044677734,
"learning_rate": 2.7304285262063274e-05,
"loss": 3.4793,
"step": 4360
},
{
"epoch": 0.35,
"grad_norm": 33.32771682739258,
"learning_rate": 2.7276434490955074e-05,
"loss": 3.4695,
"step": 4380
},
{
"epoch": 0.35,
"grad_norm": 36.65375518798828,
"learning_rate": 2.7248454945858164e-05,
"loss": 3.4502,
"step": 4400
},
{
"epoch": 0.35,
"grad_norm": 62.65798568725586,
"learning_rate": 2.7220346920264743e-05,
"loss": 3.5893,
"step": 4420
},
{
"epoch": 0.36,
"grad_norm": 26.921863555908203,
"learning_rate": 2.71921107090147e-05,
"loss": 3.4381,
"step": 4440
},
{
"epoch": 0.36,
"grad_norm": 35.91081619262695,
"learning_rate": 2.7163746608292525e-05,
"loss": 3.5292,
"step": 4460
},
{
"epoch": 0.36,
"grad_norm": 42.172306060791016,
"learning_rate": 2.7135254915624213e-05,
"loss": 3.5314,
"step": 4480
},
{
"epoch": 0.36,
"grad_norm": 65.17137908935547,
"learning_rate": 2.710663592987414e-05,
"loss": 3.518,
"step": 4500
},
{
"epoch": 0.36,
"grad_norm": 32.53944396972656,
"learning_rate": 2.7077889951241924e-05,
"loss": 3.5562,
"step": 4520
},
{
"epoch": 0.36,
"grad_norm": 25.663211822509766,
"learning_rate": 2.704901728125928e-05,
"loss": 3.5537,
"step": 4540
},
{
"epoch": 0.36,
"grad_norm": 23.626951217651367,
"learning_rate": 2.702001822278685e-05,
"loss": 3.5525,
"step": 4560
},
{
"epoch": 0.37,
"grad_norm": 30.527162551879883,
"learning_rate": 2.699089308001104e-05,
"loss": 3.4913,
"step": 4580
},
{
"epoch": 0.37,
"grad_norm": 37.62814712524414,
"learning_rate": 2.696164215844081e-05,
"loss": 3.5342,
"step": 4600
},
{
"epoch": 0.37,
"grad_norm": 26.47550392150879,
"learning_rate": 2.6932265764904494e-05,
"loss": 3.4708,
"step": 4620
},
{
"epoch": 0.37,
"grad_norm": 30.779155731201172,
"learning_rate": 2.6902764207546553e-05,
"loss": 3.5078,
"step": 4640
},
{
"epoch": 0.37,
"grad_norm": 34.16841506958008,
"learning_rate": 2.6873137795824367e-05,
"loss": 3.4754,
"step": 4660
},
{
"epoch": 0.37,
"grad_norm": 36.18644714355469,
"learning_rate": 2.6843386840504972e-05,
"loss": 3.4413,
"step": 4680
},
{
"epoch": 0.38,
"grad_norm": 34.17078399658203,
"learning_rate": 2.6813511653661817e-05,
"loss": 3.4916,
"step": 4700
},
{
"epoch": 0.38,
"grad_norm": 24.693265914916992,
"learning_rate": 2.678351254867147e-05,
"loss": 3.4072,
"step": 4720
},
{
"epoch": 0.38,
"grad_norm": 27.831270217895508,
"learning_rate": 2.675338984021035e-05,
"loss": 3.5353,
"step": 4740
},
{
"epoch": 0.38,
"grad_norm": 18.52642059326172,
"learning_rate": 2.672314384425142e-05,
"loss": 3.4582,
"step": 4760
},
{
"epoch": 0.38,
"grad_norm": 44.86159133911133,
"learning_rate": 2.669277487806085e-05,
"loss": 3.4384,
"step": 4780
},
{
"epoch": 0.38,
"grad_norm": 28.123258590698242,
"learning_rate": 2.6662283260194743e-05,
"loss": 3.5766,
"step": 4800
},
{
"epoch": 0.39,
"grad_norm": 27.150848388671875,
"learning_rate": 2.6631669310495725e-05,
"loss": 3.5095,
"step": 4820
},
{
"epoch": 0.39,
"grad_norm": 43.018043518066406,
"learning_rate": 2.660093335008966e-05,
"loss": 3.4795,
"step": 4840
},
{
"epoch": 0.39,
"grad_norm": 29.27479362487793,
"learning_rate": 2.6570075701382213e-05,
"loss": 3.5236,
"step": 4860
},
{
"epoch": 0.39,
"grad_norm": 19.501262664794922,
"learning_rate": 2.653909668805553e-05,
"loss": 3.5479,
"step": 4880
},
{
"epoch": 0.39,
"grad_norm": 57.28257369995117,
"learning_rate": 2.6507996635064792e-05,
"loss": 3.5156,
"step": 4900
},
{
"epoch": 0.39,
"grad_norm": 27.764036178588867,
"learning_rate": 2.647677586863484e-05,
"loss": 3.5222,
"step": 4920
},
{
"epoch": 0.4,
"grad_norm": 33.74861526489258,
"learning_rate": 2.644543471625675e-05,
"loss": 3.4773,
"step": 4940
},
{
"epoch": 0.4,
"grad_norm": 25.404314041137695,
"learning_rate": 2.6413973506684366e-05,
"loss": 3.4646,
"step": 4960
},
{
"epoch": 0.4,
"grad_norm": 33.307674407958984,
"learning_rate": 2.63823925699309e-05,
"loss": 3.4975,
"step": 4980
},
{
"epoch": 0.4,
"grad_norm": 28.22442054748535,
"learning_rate": 2.6350692237265428e-05,
"loss": 3.4797,
"step": 5000
},
{
"epoch": 0.4,
"grad_norm": 26.52558135986328,
"learning_rate": 2.6318872841209446e-05,
"loss": 3.4309,
"step": 5020
},
{
"epoch": 0.4,
"grad_norm": 36.679386138916016,
"learning_rate": 2.6286934715533353e-05,
"loss": 3.585,
"step": 5040
},
{
"epoch": 0.4,
"grad_norm": 42.78778839111328,
"learning_rate": 2.6254878195252985e-05,
"loss": 3.4239,
"step": 5060
},
{
"epoch": 0.41,
"grad_norm": 34.719482421875,
"learning_rate": 2.622270361662606e-05,
"loss": 3.4777,
"step": 5080
},
{
"epoch": 0.41,
"grad_norm": 33.207427978515625,
"learning_rate": 2.619041131714869e-05,
"loss": 3.5593,
"step": 5100
},
{
"epoch": 0.41,
"grad_norm": 35.62514877319336,
"learning_rate": 2.6158001635551818e-05,
"loss": 3.5606,
"step": 5120
},
{
"epoch": 0.41,
"grad_norm": 31.691574096679688,
"learning_rate": 2.6125474911797664e-05,
"loss": 3.4959,
"step": 5140
},
{
"epoch": 0.41,
"grad_norm": 34.012420654296875,
"learning_rate": 2.6092831487076163e-05,
"loss": 3.57,
"step": 5160
},
{
"epoch": 0.41,
"grad_norm": 36.75544357299805,
"learning_rate": 2.6060071703801406e-05,
"loss": 3.4718,
"step": 5180
},
{
"epoch": 0.42,
"grad_norm": 37.18219757080078,
"learning_rate": 2.6027195905608006e-05,
"loss": 3.5332,
"step": 5200
},
{
"epoch": 0.42,
"grad_norm": 32.344398498535156,
"learning_rate": 2.599420443734754e-05,
"loss": 3.5154,
"step": 5220
},
{
"epoch": 0.42,
"grad_norm": 26.169748306274414,
"learning_rate": 2.596109764508489e-05,
"loss": 3.5462,
"step": 5240
},
{
"epoch": 0.42,
"grad_norm": 33.38447570800781,
"learning_rate": 2.592787587609465e-05,
"loss": 3.5658,
"step": 5260
},
{
"epoch": 0.42,
"grad_norm": 43.4962158203125,
"learning_rate": 2.589453947885745e-05,
"loss": 3.5018,
"step": 5280
},
{
"epoch": 0.42,
"grad_norm": 34.955291748046875,
"learning_rate": 2.5861088803056324e-05,
"loss": 3.4988,
"step": 5300
},
{
"epoch": 0.43,
"grad_norm": 29.175064086914062,
"learning_rate": 2.5827524199573033e-05,
"loss": 3.475,
"step": 5320
},
{
"epoch": 0.43,
"grad_norm": 40.04597091674805,
"learning_rate": 2.5793846020484383e-05,
"loss": 3.4903,
"step": 5340
},
{
"epoch": 0.43,
"grad_norm": 27.712465286254883,
"learning_rate": 2.5760054619058537e-05,
"loss": 3.5108,
"step": 5360
},
{
"epoch": 0.43,
"grad_norm": 43.66648864746094,
"learning_rate": 2.5726150349751306e-05,
"loss": 3.4656,
"step": 5380
},
{
"epoch": 0.43,
"grad_norm": 28.305545806884766,
"learning_rate": 2.569213356820244e-05,
"loss": 3.5766,
"step": 5400
},
{
"epoch": 0.43,
"grad_norm": 34.897857666015625,
"learning_rate": 2.565800463123187e-05,
"loss": 3.4286,
"step": 5420
},
{
"epoch": 0.44,
"grad_norm": 43.083229064941406,
"learning_rate": 2.5623763896835997e-05,
"loss": 3.4292,
"step": 5440
},
{
"epoch": 0.44,
"grad_norm": 35.71794128417969,
"learning_rate": 2.5589411724183926e-05,
"loss": 3.5542,
"step": 5460
},
{
"epoch": 0.44,
"grad_norm": 23.70340347290039,
"learning_rate": 2.555494847361369e-05,
"loss": 3.5276,
"step": 5480
},
{
"epoch": 0.44,
"grad_norm": 55.299556732177734,
"learning_rate": 2.552037450662849e-05,
"loss": 3.5644,
"step": 5500
},
{
"epoch": 0.44,
"grad_norm": 22.433879852294922,
"learning_rate": 2.5485690185892864e-05,
"loss": 3.4475,
"step": 5520
},
{
"epoch": 0.44,
"grad_norm": 26.397438049316406,
"learning_rate": 2.545089587522893e-05,
"loss": 3.507,
"step": 5540
},
{
"epoch": 0.44,
"grad_norm": 25.061750411987305,
"learning_rate": 2.5415991939612545e-05,
"loss": 3.4521,
"step": 5560
},
{
"epoch": 0.45,
"grad_norm": 35.5067138671875,
"learning_rate": 2.5380978745169473e-05,
"loss": 3.4894,
"step": 5580
},
{
"epoch": 0.45,
"grad_norm": 27.13253402709961,
"learning_rate": 2.5345856659171567e-05,
"loss": 3.4033,
"step": 5600
},
{
"epoch": 0.45,
"grad_norm": 34.580299377441406,
"learning_rate": 2.5310626050032873e-05,
"loss": 3.5598,
"step": 5620
},
{
"epoch": 0.45,
"grad_norm": 21.44734764099121,
"learning_rate": 2.527528728730582e-05,
"loss": 3.5189,
"step": 5640
},
{
"epoch": 0.45,
"grad_norm": 26.825183868408203,
"learning_rate": 2.5239840741677307e-05,
"loss": 3.6052,
"step": 5660
},
{
"epoch": 0.45,
"grad_norm": 29.19519805908203,
"learning_rate": 2.5204286784964823e-05,
"loss": 3.4724,
"step": 5680
},
{
"epoch": 0.46,
"grad_norm": 37.9902229309082,
"learning_rate": 2.516862579011255e-05,
"loss": 3.3665,
"step": 5700
},
{
"epoch": 0.46,
"grad_norm": 28.95624542236328,
"learning_rate": 2.5132858131187446e-05,
"loss": 3.4688,
"step": 5720
},
{
"epoch": 0.46,
"grad_norm": 34.07016372680664,
"learning_rate": 2.509698418337534e-05,
"loss": 3.4938,
"step": 5740
},
{
"epoch": 0.46,
"grad_norm": 32.694766998291016,
"learning_rate": 2.5061004322976953e-05,
"loss": 3.4351,
"step": 5760
},
{
"epoch": 0.46,
"grad_norm": 92.71965789794922,
"learning_rate": 2.5024918927404005e-05,
"loss": 3.5194,
"step": 5780
},
{
"epoch": 0.46,
"grad_norm": 59.62504959106445,
"learning_rate": 2.4988728375175216e-05,
"loss": 3.5436,
"step": 5800
},
{
"epoch": 0.47,
"grad_norm": 40.504127502441406,
"learning_rate": 2.495243304591236e-05,
"loss": 3.561,
"step": 5820
},
{
"epoch": 0.47,
"grad_norm": 43.67660140991211,
"learning_rate": 2.4916033320336263e-05,
"loss": 3.3979,
"step": 5840
},
{
"epoch": 0.47,
"grad_norm": 38.79692077636719,
"learning_rate": 2.487952958026282e-05,
"loss": 3.5581,
"step": 5860
},
{
"epoch": 0.47,
"grad_norm": 34.00471878051758,
"learning_rate": 2.4842922208598996e-05,
"loss": 3.3891,
"step": 5880
},
{
"epoch": 0.47,
"grad_norm": 51.836395263671875,
"learning_rate": 2.480621158933879e-05,
"loss": 3.5146,
"step": 5900
},
{
"epoch": 0.47,
"grad_norm": 34.47429656982422,
"learning_rate": 2.476939810755923e-05,
"loss": 3.4988,
"step": 5920
},
{
"epoch": 0.48,
"grad_norm": 27.388093948364258,
"learning_rate": 2.4732482149416325e-05,
"loss": 3.4577,
"step": 5940
},
{
"epoch": 0.48,
"grad_norm": 22.31077766418457,
"learning_rate": 2.4695464102141002e-05,
"loss": 3.4816,
"step": 5960
},
{
"epoch": 0.48,
"grad_norm": 33.66743469238281,
"learning_rate": 2.4658344354035063e-05,
"loss": 3.5148,
"step": 5980
},
{
"epoch": 0.48,
"grad_norm": 30.200748443603516,
"learning_rate": 2.46211232944671e-05,
"loss": 3.6094,
"step": 6000
},
{
"epoch": 0.48,
"grad_norm": 36.24076843261719,
"learning_rate": 2.4583801313868417e-05,
"loss": 3.4601,
"step": 6020
},
{
"epoch": 0.48,
"grad_norm": 36.80402374267578,
"learning_rate": 2.4546378803728922e-05,
"loss": 3.5053,
"step": 6040
},
{
"epoch": 0.48,
"grad_norm": 25.655963897705078,
"learning_rate": 2.450885615659305e-05,
"loss": 3.4791,
"step": 6060
},
{
"epoch": 0.49,
"grad_norm": 47.66796112060547,
"learning_rate": 2.447123376605561e-05,
"loss": 3.4535,
"step": 6080
},
{
"epoch": 0.49,
"grad_norm": 39.040924072265625,
"learning_rate": 2.4433512026757668e-05,
"loss": 3.5125,
"step": 6100
},
{
"epoch": 0.49,
"grad_norm": 27.91790008544922,
"learning_rate": 2.439569133438243e-05,
"loss": 3.5131,
"step": 6120
},
{
"epoch": 0.49,
"grad_norm": 42.502357482910156,
"learning_rate": 2.435777208565106e-05,
"loss": 3.5424,
"step": 6140
},
{
"epoch": 0.49,
"grad_norm": 26.83557891845703,
"learning_rate": 2.431975467831853e-05,
"loss": 3.4445,
"step": 6160
},
{
"epoch": 0.49,
"grad_norm": 39.8357048034668,
"learning_rate": 2.4281639511169457e-05,
"loss": 3.5702,
"step": 6180
},
{
"epoch": 0.5,
"grad_norm": 31.746124267578125,
"learning_rate": 2.424342698401391e-05,
"loss": 3.4539,
"step": 6200
},
{
"epoch": 0.5,
"grad_norm": 53.038482666015625,
"learning_rate": 2.4205117497683213e-05,
"loss": 3.5491,
"step": 6220
},
{
"epoch": 0.5,
"grad_norm": 28.155752182006836,
"learning_rate": 2.4166711454025754e-05,
"loss": 3.4353,
"step": 6240
},
{
"epoch": 0.5,
"grad_norm": 42.623130798339844,
"learning_rate": 2.4128209255902753e-05,
"loss": 3.4348,
"step": 6260
},
{
"epoch": 0.5,
"grad_norm": 44.352596282958984,
"learning_rate": 2.408961130718405e-05,
"loss": 3.4637,
"step": 6280
},
{
"epoch": 0.5,
"grad_norm": 35.380767822265625,
"learning_rate": 2.405091801274387e-05,
"loss": 3.4403,
"step": 6300
},
{
"epoch": 0.51,
"grad_norm": 67.38970184326172,
"learning_rate": 2.4012129778456556e-05,
"loss": 3.5062,
"step": 6320
},
{
"epoch": 0.51,
"grad_norm": 36.066741943359375,
"learning_rate": 2.397324701119233e-05,
"loss": 3.6017,
"step": 6340
},
{
"epoch": 0.51,
"grad_norm": 29.4312744140625,
"learning_rate": 2.3934270118813024e-05,
"loss": 3.4738,
"step": 6360
},
{
"epoch": 0.51,
"grad_norm": 24.581098556518555,
"learning_rate": 2.3895199510167793e-05,
"loss": 3.4775,
"step": 6380
},
{
"epoch": 0.51,
"grad_norm": 22.933914184570312,
"learning_rate": 2.385603559508884e-05,
"loss": 3.5718,
"step": 6400
},
{
"epoch": 0.51,
"grad_norm": 30.199399948120117,
"learning_rate": 2.3816778784387097e-05,
"loss": 3.4446,
"step": 6420
},
{
"epoch": 0.52,
"grad_norm": 53.968971252441406,
"learning_rate": 2.3777429489847935e-05,
"loss": 3.5161,
"step": 6440
},
{
"epoch": 0.52,
"grad_norm": 34.33303451538086,
"learning_rate": 2.3737988124226834e-05,
"loss": 3.422,
"step": 6460
},
{
"epoch": 0.52,
"grad_norm": 28.162084579467773,
"learning_rate": 2.3698455101245052e-05,
"loss": 3.5403,
"step": 6480
},
{
"epoch": 0.52,
"grad_norm": 28.261327743530273,
"learning_rate": 2.3658830835585294e-05,
"loss": 3.5093,
"step": 6500
},
{
"epoch": 0.52,
"grad_norm": 37.4576530456543,
"learning_rate": 2.361911574288736e-05,
"loss": 3.5952,
"step": 6520
},
{
"epoch": 0.52,
"grad_norm": 41.03891372680664,
"learning_rate": 2.3579310239743776e-05,
"loss": 3.5194,
"step": 6540
},
{
"epoch": 0.52,
"grad_norm": 26.61548614501953,
"learning_rate": 2.353941474369544e-05,
"loss": 3.4933,
"step": 6560
},
{
"epoch": 0.53,
"grad_norm": 33.990203857421875,
"learning_rate": 2.3499429673227224e-05,
"loss": 3.4745,
"step": 6580
},
{
"epoch": 0.53,
"grad_norm": 39.59190368652344,
"learning_rate": 2.3459355447763596e-05,
"loss": 3.4875,
"step": 6600
},
{
"epoch": 0.53,
"grad_norm": 28.182842254638672,
"learning_rate": 2.341919248766422e-05,
"loss": 3.5085,
"step": 6620
},
{
"epoch": 0.53,
"grad_norm": 27.481103897094727,
"learning_rate": 2.3378941214219545e-05,
"loss": 3.5401,
"step": 6640
},
{
"epoch": 0.53,
"grad_norm": 29.300914764404297,
"learning_rate": 2.3338602049646372e-05,
"loss": 3.4601,
"step": 6660
},
{
"epoch": 0.53,
"grad_norm": 27.341121673583984,
"learning_rate": 2.329817541708346e-05,
"loss": 3.4163,
"step": 6680
},
{
"epoch": 0.54,
"grad_norm": 21.337862014770508,
"learning_rate": 2.3257661740587055e-05,
"loss": 3.3443,
"step": 6700
},
{
"epoch": 0.54,
"grad_norm": 38.06822204589844,
"learning_rate": 2.3217061445126444e-05,
"loss": 3.4762,
"step": 6720
},
{
"epoch": 0.54,
"grad_norm": 27.79935646057129,
"learning_rate": 2.3176374956579525e-05,
"loss": 3.4748,
"step": 6740
},
{
"epoch": 0.54,
"grad_norm": 41.864219665527344,
"learning_rate": 2.3135602701728302e-05,
"loss": 3.4859,
"step": 6760
},
{
"epoch": 0.54,
"grad_norm": 48.69860076904297,
"learning_rate": 2.3094745108254437e-05,
"loss": 3.5111,
"step": 6780
},
{
"epoch": 0.54,
"grad_norm": 26.23602867126465,
"learning_rate": 2.305380260473476e-05,
"loss": 3.4637,
"step": 6800
},
{
"epoch": 0.55,
"grad_norm": 32.71681213378906,
"learning_rate": 2.3012775620636747e-05,
"loss": 3.4752,
"step": 6820
},
{
"epoch": 0.55,
"grad_norm": 55.637813568115234,
"learning_rate": 2.2971664586314055e-05,
"loss": 3.531,
"step": 6840
},
{
"epoch": 0.55,
"grad_norm": 29.17162322998047,
"learning_rate": 2.293046993300198e-05,
"loss": 3.567,
"step": 6860
},
{
"epoch": 0.55,
"grad_norm": 21.349533081054688,
"learning_rate": 2.288919209281294e-05,
"loss": 3.4752,
"step": 6880
},
{
"epoch": 0.55,
"grad_norm": 25.281597137451172,
"learning_rate": 2.284783149873195e-05,
"loss": 3.4934,
"step": 6900
},
{
"epoch": 0.55,
"grad_norm": 32.13029861450195,
"learning_rate": 2.2806388584612067e-05,
"loss": 3.4793,
"step": 6920
},
{
"epoch": 0.56,
"grad_norm": 21.672082901000977,
"learning_rate": 2.2764863785169857e-05,
"loss": 3.4366,
"step": 6940
},
{
"epoch": 0.56,
"grad_norm": 29.802305221557617,
"learning_rate": 2.2723257535980804e-05,
"loss": 3.5174,
"step": 6960
},
{
"epoch": 0.56,
"grad_norm": 43.310577392578125,
"learning_rate": 2.2681570273474783e-05,
"loss": 3.4745,
"step": 6980
},
{
"epoch": 0.56,
"grad_norm": 32.417236328125,
"learning_rate": 2.2639802434931447e-05,
"loss": 3.438,
"step": 7000
},
{
"epoch": 0.56,
"grad_norm": 42.29374313354492,
"learning_rate": 2.259795445847566e-05,
"loss": 3.5194,
"step": 7020
},
{
"epoch": 0.56,
"grad_norm": 51.19217300415039,
"learning_rate": 2.2556026783072896e-05,
"loss": 3.496,
"step": 7040
},
{
"epoch": 0.56,
"grad_norm": 24.400171279907227,
"learning_rate": 2.251401984852463e-05,
"loss": 3.4013,
"step": 7060
},
{
"epoch": 0.57,
"grad_norm": 26.298309326171875,
"learning_rate": 2.2471934095463724e-05,
"loss": 3.5607,
"step": 7080
},
{
"epoch": 0.57,
"grad_norm": 27.419946670532227,
"learning_rate": 2.2429769965349818e-05,
"loss": 3.4593,
"step": 7100
},
{
"epoch": 0.57,
"grad_norm": 29.470266342163086,
"learning_rate": 2.2387527900464676e-05,
"loss": 3.4388,
"step": 7120
},
{
"epoch": 0.57,
"grad_norm": 21.410829544067383,
"learning_rate": 2.2345208343907577e-05,
"loss": 3.5141,
"step": 7140
},
{
"epoch": 0.57,
"grad_norm": 32.501766204833984,
"learning_rate": 2.2302811739590642e-05,
"loss": 3.4647,
"step": 7160
},
{
"epoch": 0.57,
"grad_norm": 24.69274139404297,
"learning_rate": 2.2260338532234194e-05,
"loss": 3.4781,
"step": 7180
},
{
"epoch": 0.58,
"grad_norm": 25.721759796142578,
"learning_rate": 2.2217789167362078e-05,
"loss": 3.4405,
"step": 7200
},
{
"epoch": 0.58,
"grad_norm": 34.66562271118164,
"learning_rate": 2.217516409129699e-05,
"loss": 3.5408,
"step": 7220
},
{
"epoch": 0.58,
"grad_norm": 42.050819396972656,
"learning_rate": 2.2132463751155815e-05,
"loss": 3.4422,
"step": 7240
},
{
"epoch": 0.58,
"grad_norm": 26.5496883392334,
"learning_rate": 2.2089688594844917e-05,
"loss": 3.4953,
"step": 7260
},
{
"epoch": 0.58,
"grad_norm": 28.221229553222656,
"learning_rate": 2.2046839071055436e-05,
"loss": 3.3627,
"step": 7280
},
{
"epoch": 0.58,
"grad_norm": 33.21296310424805,
"learning_rate": 2.2003915629258607e-05,
"loss": 3.383,
"step": 7300
},
{
"epoch": 0.59,
"grad_norm": 34.35666275024414,
"learning_rate": 2.196091871970103e-05,
"loss": 3.5508,
"step": 7320
},
{
"epoch": 0.59,
"grad_norm": 25.02892303466797,
"learning_rate": 2.1917848793399926e-05,
"loss": 3.395,
"step": 7340
},
{
"epoch": 0.59,
"grad_norm": 25.45415687561035,
"learning_rate": 2.187470630213845e-05,
"loss": 3.4692,
"step": 7360
},
{
"epoch": 0.59,
"grad_norm": 30.72098159790039,
"learning_rate": 2.1831491698460923e-05,
"loss": 3.5714,
"step": 7380
},
{
"epoch": 0.59,
"grad_norm": 32.58151626586914,
"learning_rate": 2.1788205435668086e-05,
"loss": 3.4358,
"step": 7400
},
{
"epoch": 0.59,
"grad_norm": 32.444644927978516,
"learning_rate": 2.1744847967812352e-05,
"loss": 3.5389,
"step": 7420
},
{
"epoch": 0.6,
"grad_norm": 39.33094787597656,
"learning_rate": 2.1701419749693036e-05,
"loss": 3.4003,
"step": 7440
},
{
"epoch": 0.6,
"grad_norm": 24.18759536743164,
"learning_rate": 2.1657921236851607e-05,
"loss": 3.4501,
"step": 7460
},
{
"epoch": 0.6,
"grad_norm": 24.81824493408203,
"learning_rate": 2.1614352885566874e-05,
"loss": 3.5004,
"step": 7480
},
{
"epoch": 0.6,
"grad_norm": 94.60846710205078,
"learning_rate": 2.1570715152850237e-05,
"loss": 3.3695,
"step": 7500
},
{
"epoch": 0.6,
"grad_norm": 39.35719299316406,
"learning_rate": 2.1527008496440848e-05,
"loss": 3.4058,
"step": 7520
},
{
"epoch": 0.6,
"grad_norm": 23.86483383178711,
"learning_rate": 2.1483233374800863e-05,
"loss": 3.403,
"step": 7540
},
{
"epoch": 0.6,
"grad_norm": 36.54322052001953,
"learning_rate": 2.143939024711059e-05,
"loss": 3.504,
"step": 7560
},
{
"epoch": 0.61,
"grad_norm": 32.654876708984375,
"learning_rate": 2.139547957326369e-05,
"loss": 3.421,
"step": 7580
},
{
"epoch": 0.61,
"grad_norm": 31.50335121154785,
"learning_rate": 2.1351501813862358e-05,
"loss": 3.5079,
"step": 7600
},
{
"epoch": 0.61,
"grad_norm": 27.86897850036621,
"learning_rate": 2.130745743021247e-05,
"loss": 3.5605,
"step": 7620
},
{
"epoch": 0.61,
"grad_norm": 29.914051055908203,
"learning_rate": 2.1263346884318778e-05,
"loss": 3.4826,
"step": 7640
},
{
"epoch": 0.61,
"grad_norm": 30.09800910949707,
"learning_rate": 2.121917063888004e-05,
"loss": 3.5523,
"step": 7660
},
{
"epoch": 0.61,
"grad_norm": 37.76126480102539,
"learning_rate": 2.117492915728416e-05,
"loss": 3.6012,
"step": 7680
},
{
"epoch": 0.62,
"grad_norm": 23.360584259033203,
"learning_rate": 2.1130622903603344e-05,
"loss": 3.45,
"step": 7700
},
{
"epoch": 0.62,
"grad_norm": 31.356124877929688,
"learning_rate": 2.1086252342589235e-05,
"loss": 3.4666,
"step": 7720
},
{
"epoch": 0.62,
"grad_norm": 36.73623275756836,
"learning_rate": 2.1041817939668006e-05,
"loss": 3.5774,
"step": 7740
},
{
"epoch": 0.62,
"grad_norm": 29.439634323120117,
"learning_rate": 2.0997320160935536e-05,
"loss": 3.3953,
"step": 7760
},
{
"epoch": 0.62,
"grad_norm": 22.339067459106445,
"learning_rate": 2.095275947315246e-05,
"loss": 3.4628,
"step": 7780
},
{
"epoch": 0.62,
"grad_norm": 16.739225387573242,
"learning_rate": 2.0908136343739308e-05,
"loss": 3.4708,
"step": 7800
},
{
"epoch": 0.63,
"grad_norm": 27.203229904174805,
"learning_rate": 2.0863451240771592e-05,
"loss": 3.4326,
"step": 7820
},
{
"epoch": 0.63,
"grad_norm": 30.963396072387695,
"learning_rate": 2.0818704632974896e-05,
"loss": 3.445,
"step": 7840
},
{
"epoch": 0.63,
"grad_norm": 23.80015754699707,
"learning_rate": 2.0773896989719967e-05,
"loss": 3.5519,
"step": 7860
},
{
"epoch": 0.63,
"grad_norm": 39.84937286376953,
"learning_rate": 2.072902878101778e-05,
"loss": 3.4384,
"step": 7880
},
{
"epoch": 0.63,
"grad_norm": 25.7357120513916,
"learning_rate": 2.06841004775146e-05,
"loss": 3.4322,
"step": 7900
},
{
"epoch": 0.63,
"grad_norm": 25.275423049926758,
"learning_rate": 2.0639112550487085e-05,
"loss": 3.4426,
"step": 7920
},
{
"epoch": 0.64,
"grad_norm": 28.167940139770508,
"learning_rate": 2.0594065471837302e-05,
"loss": 3.5217,
"step": 7940
},
{
"epoch": 0.64,
"grad_norm": 44.70423889160156,
"learning_rate": 2.0548959714087783e-05,
"loss": 3.5207,
"step": 7960
},
{
"epoch": 0.64,
"grad_norm": 30.5069580078125,
"learning_rate": 2.0503795750376583e-05,
"loss": 3.4576,
"step": 7980
},
{
"epoch": 0.64,
"grad_norm": 24.197166442871094,
"learning_rate": 2.0458574054452316e-05,
"loss": 3.4993,
"step": 8000
},
{
"epoch": 0.64,
"grad_norm": 31.20002555847168,
"learning_rate": 2.0413295100669167e-05,
"loss": 3.4158,
"step": 8020
},
{
"epoch": 0.64,
"grad_norm": 33.78310012817383,
"learning_rate": 2.036795936398194e-05,
"loss": 3.4395,
"step": 8040
},
{
"epoch": 0.64,
"grad_norm": 28.749357223510742,
"learning_rate": 2.0322567319941062e-05,
"loss": 3.451,
"step": 8060
},
{
"epoch": 0.65,
"grad_norm": 19.698383331298828,
"learning_rate": 2.0277119444687586e-05,
"loss": 3.5069,
"step": 8080
},
{
"epoch": 0.65,
"grad_norm": 25.00635528564453,
"learning_rate": 2.0231616214948232e-05,
"loss": 3.3704,
"step": 8100
},
{
"epoch": 0.65,
"grad_norm": 31.135547637939453,
"learning_rate": 2.0186058108030343e-05,
"loss": 3.4219,
"step": 8120
},
{
"epoch": 0.65,
"grad_norm": 29.921775817871094,
"learning_rate": 2.01404456018169e-05,
"loss": 3.431,
"step": 8140
},
{
"epoch": 0.65,
"grad_norm": 20.908395767211914,
"learning_rate": 2.0094779174761507e-05,
"loss": 3.4647,
"step": 8160
},
{
"epoch": 0.65,
"grad_norm": 17.694931030273438,
"learning_rate": 2.0049059305883383e-05,
"loss": 3.4588,
"step": 8180
},
{
"epoch": 0.66,
"grad_norm": 29.230735778808594,
"learning_rate": 2.000328647476231e-05,
"loss": 3.4788,
"step": 8200
},
{
"epoch": 0.66,
"grad_norm": 41.20121383666992,
"learning_rate": 1.995746116153363e-05,
"loss": 3.4812,
"step": 8220
},
{
"epoch": 0.66,
"grad_norm": 39.98432540893555,
"learning_rate": 1.99115838468832e-05,
"loss": 3.4172,
"step": 8240
},
{
"epoch": 0.66,
"grad_norm": 41.378902435302734,
"learning_rate": 1.9865655012042337e-05,
"loss": 3.5478,
"step": 8260
},
{
"epoch": 0.66,
"grad_norm": 42.58726501464844,
"learning_rate": 1.9819675138782785e-05,
"loss": 3.4849,
"step": 8280
},
{
"epoch": 0.66,
"grad_norm": 22.349281311035156,
"learning_rate": 1.9773644709411662e-05,
"loss": 3.4835,
"step": 8300
},
{
"epoch": 0.67,
"grad_norm": 31.974403381347656,
"learning_rate": 1.9727564206766382e-05,
"loss": 3.3919,
"step": 8320
},
{
"epoch": 0.67,
"grad_norm": 20.418291091918945,
"learning_rate": 1.9681434114209617e-05,
"loss": 3.4415,
"step": 8340
},
{
"epoch": 0.67,
"grad_norm": 30.5143985748291,
"learning_rate": 1.963525491562421e-05,
"loss": 3.4806,
"step": 8360
},
{
"epoch": 0.67,
"grad_norm": 27.770360946655273,
"learning_rate": 1.958902709540811e-05,
"loss": 3.4379,
"step": 8380
},
{
"epoch": 0.67,
"grad_norm": 24.266944885253906,
"learning_rate": 1.954275113846926e-05,
"loss": 3.4933,
"step": 8400
},
{
"epoch": 0.67,
"grad_norm": 43.798301696777344,
"learning_rate": 1.9496427530220567e-05,
"loss": 3.4107,
"step": 8420
},
{
"epoch": 0.68,
"grad_norm": 32.54145431518555,
"learning_rate": 1.9450056756574753e-05,
"loss": 3.507,
"step": 8440
},
{
"epoch": 0.68,
"grad_norm": 29.06185531616211,
"learning_rate": 1.9403639303939293e-05,
"loss": 3.4434,
"step": 8460
},
{
"epoch": 0.68,
"grad_norm": 26.419170379638672,
"learning_rate": 1.93571756592113e-05,
"loss": 3.4684,
"step": 8480
},
{
"epoch": 0.68,
"grad_norm": 31.831510543823242,
"learning_rate": 1.9310666309772426e-05,
"loss": 3.4565,
"step": 8500
},
{
"epoch": 0.68,
"grad_norm": 33.18935775756836,
"learning_rate": 1.926411174348373e-05,
"loss": 3.4568,
"step": 8520
},
{
"epoch": 0.68,
"grad_norm": 25.60289192199707,
"learning_rate": 1.9217512448680586e-05,
"loss": 3.3513,
"step": 8540
},
{
"epoch": 0.68,
"grad_norm": 27.03973960876465,
"learning_rate": 1.917086891416755e-05,
"loss": 3.4551,
"step": 8560
},
{
"epoch": 0.69,
"grad_norm": 39.20319366455078,
"learning_rate": 1.9124181629213228e-05,
"loss": 3.4217,
"step": 8580
},
{
"epoch": 0.69,
"grad_norm": 25.287826538085938,
"learning_rate": 1.9077451083545144e-05,
"loss": 3.4172,
"step": 8600
},
{
"epoch": 0.69,
"grad_norm": 51.33893585205078,
"learning_rate": 1.903067776734461e-05,
"loss": 3.4487,
"step": 8620
},
{
"epoch": 0.69,
"grad_norm": 23.41849708557129,
"learning_rate": 1.8983862171241577e-05,
"loss": 3.4675,
"step": 8640
},
{
"epoch": 0.69,
"grad_norm": 37.373104095458984,
"learning_rate": 1.8937004786309504e-05,
"loss": 3.4782,
"step": 8660
},
{
"epoch": 0.69,
"grad_norm": 26.743480682373047,
"learning_rate": 1.8890106104060177e-05,
"loss": 3.432,
"step": 8680
},
{
"epoch": 0.7,
"grad_norm": 39.051124572753906,
"learning_rate": 1.8843166616438585e-05,
"loss": 3.4937,
"step": 8700
},
{
"epoch": 0.7,
"grad_norm": 32.51453399658203,
"learning_rate": 1.8796186815817743e-05,
"loss": 3.4618,
"step": 8720
},
{
"epoch": 0.7,
"grad_norm": 22.23604393005371,
"learning_rate": 1.874916719499353e-05,
"loss": 3.5259,
"step": 8740
},
{
"epoch": 0.7,
"grad_norm": 27.168733596801758,
"learning_rate": 1.8702108247179512e-05,
"loss": 3.4829,
"step": 8760
},
{
"epoch": 0.7,
"grad_norm": 28.35675621032715,
"learning_rate": 1.8655010466001794e-05,
"loss": 3.4133,
"step": 8780
},
{
"epoch": 0.7,
"grad_norm": 30.800825119018555,
"learning_rate": 1.8607874345493806e-05,
"loss": 3.4527,
"step": 8800
},
{
"epoch": 0.71,
"grad_norm": 26.275304794311523,
"learning_rate": 1.856070038009115e-05,
"loss": 3.4386,
"step": 8820
},
{
"epoch": 0.71,
"grad_norm": 27.828401565551758,
"learning_rate": 1.85134890646264e-05,
"loss": 3.4806,
"step": 8840
},
{
"epoch": 0.71,
"grad_norm": 31.448450088500977,
"learning_rate": 1.846624089432392e-05,
"loss": 3.4659,
"step": 8860
},
{
"epoch": 0.71,
"grad_norm": 30.396820068359375,
"learning_rate": 1.8418956364794655e-05,
"loss": 3.4717,
"step": 8880
},
{
"epoch": 0.71,
"grad_norm": 65.90203857421875,
"learning_rate": 1.8371635972030942e-05,
"loss": 3.4938,
"step": 8900
},
{
"epoch": 0.71,
"grad_norm": 25.6701602935791,
"learning_rate": 1.8324280212401316e-05,
"loss": 3.3931,
"step": 8920
},
{
"epoch": 0.72,
"grad_norm": 27.28550148010254,
"learning_rate": 1.8276889582645278e-05,
"loss": 3.426,
"step": 8940
},
{
"epoch": 0.72,
"grad_norm": 26.73450469970703,
"learning_rate": 1.8229464579868124e-05,
"loss": 3.4074,
"step": 8960
},
{
"epoch": 0.72,
"grad_norm": 30.16777992248535,
"learning_rate": 1.818200570153568e-05,
"loss": 3.4204,
"step": 8980
},
{
"epoch": 0.72,
"grad_norm": 30.022031784057617,
"learning_rate": 1.813451344546913e-05,
"loss": 3.4905,
"step": 9000
},
{
"epoch": 0.72,
"grad_norm": 34.67860412597656,
"learning_rate": 1.8086988309839755e-05,
"loss": 3.4165,
"step": 9020
},
{
"epoch": 0.72,
"grad_norm": 26.23653793334961,
"learning_rate": 1.8039430793163753e-05,
"loss": 3.5014,
"step": 9040
},
{
"epoch": 0.72,
"grad_norm": 21.284992218017578,
"learning_rate": 1.7991841394296962e-05,
"loss": 3.4575,
"step": 9060
},
{
"epoch": 0.73,
"grad_norm": 38.38702392578125,
"learning_rate": 1.7944220612429664e-05,
"loss": 3.4258,
"step": 9080
},
{
"epoch": 0.73,
"grad_norm": 23.724790573120117,
"learning_rate": 1.789656894708132e-05,
"loss": 3.4175,
"step": 9100
},
{
"epoch": 0.73,
"grad_norm": 22.310564041137695,
"learning_rate": 1.7848886898095356e-05,
"loss": 3.4606,
"step": 9120
},
{
"epoch": 0.73,
"grad_norm": 31.924890518188477,
"learning_rate": 1.7801174965633898e-05,
"loss": 3.3977,
"step": 9140
},
{
"epoch": 0.73,
"grad_norm": 32.8968620300293,
"learning_rate": 1.775343365017254e-05,
"loss": 3.5308,
"step": 9160
},
{
"epoch": 0.73,
"grad_norm": 23.338972091674805,
"learning_rate": 1.77056634524951e-05,
"loss": 3.5206,
"step": 9180
},
{
"epoch": 0.74,
"grad_norm": 29.5195369720459,
"learning_rate": 1.7657864873688343e-05,
"loss": 3.4515,
"step": 9200
},
{
"epoch": 0.74,
"grad_norm": 41.386260986328125,
"learning_rate": 1.761003841513674e-05,
"loss": 3.4684,
"step": 9220
},
{
"epoch": 0.74,
"grad_norm": 32.939849853515625,
"learning_rate": 1.7562184578517207e-05,
"loss": 3.4478,
"step": 9240
},
{
"epoch": 0.74,
"grad_norm": 22.82186508178711,
"learning_rate": 1.751430386579385e-05,
"loss": 3.4226,
"step": 9260
},
{
"epoch": 0.74,
"grad_norm": 25.349609375,
"learning_rate": 1.7466396779212695e-05,
"loss": 3.4381,
"step": 9280
},
{
"epoch": 0.74,
"grad_norm": 26.957141876220703,
"learning_rate": 1.7418463821296398e-05,
"loss": 3.4075,
"step": 9300
},
{
"epoch": 0.75,
"grad_norm": 52.64670944213867,
"learning_rate": 1.7370505494839012e-05,
"loss": 3.4542,
"step": 9320
},
{
"epoch": 0.75,
"grad_norm": 30.222015380859375,
"learning_rate": 1.73225223029007e-05,
"loss": 3.5017,
"step": 9340
},
{
"epoch": 0.75,
"grad_norm": 33.9267578125,
"learning_rate": 1.7274514748802418e-05,
"loss": 3.4322,
"step": 9360
},
{
"epoch": 0.75,
"grad_norm": 29.3850040435791,
"learning_rate": 1.7226483336120707e-05,
"loss": 3.443,
"step": 9380
}
],
"logging_steps": 20,
"max_steps": 20000,
"num_input_tokens_seen": 0,
"num_train_epochs": 2,
"save_steps": 20,
"total_flos": 2.217039974842368e+16,
"train_batch_size": 8,
"trial_name": null,
"trial_params": null
}