youtubevtuberlora / trainer_state.json
kahou1234's picture
Upload 11 files
a3ee8c3 verified
raw
history blame
26 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 49.6,
"eval_steps": 500,
"global_step": 3100,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.32,
"grad_norm": 2.214611053466797,
"learning_rate": 3.064516129032258e-06,
"loss": 2.8022,
"step": 20
},
{
"epoch": 0.64,
"grad_norm": 0.9293187260627747,
"learning_rate": 6.290322580645161e-06,
"loss": 2.6463,
"step": 40
},
{
"epoch": 0.96,
"grad_norm": 2.630194902420044,
"learning_rate": 9.516129032258064e-06,
"loss": 2.4026,
"step": 60
},
{
"epoch": 1.28,
"grad_norm": 1.7517138719558716,
"learning_rate": 1.2741935483870968e-05,
"loss": 1.9639,
"step": 80
},
{
"epoch": 1.6,
"grad_norm": 1.6100302934646606,
"learning_rate": 1.596774193548387e-05,
"loss": 1.7115,
"step": 100
},
{
"epoch": 1.92,
"grad_norm": 3.468630790710449,
"learning_rate": 1.9193548387096774e-05,
"loss": 1.7534,
"step": 120
},
{
"epoch": 2.24,
"grad_norm": 2.6692144870758057,
"learning_rate": 2.2419354838709678e-05,
"loss": 1.3422,
"step": 140
},
{
"epoch": 2.56,
"grad_norm": 1.5186293125152588,
"learning_rate": 2.5645161290322582e-05,
"loss": 1.1582,
"step": 160
},
{
"epoch": 2.88,
"grad_norm": 4.727887153625488,
"learning_rate": 2.8870967741935483e-05,
"loss": 1.1084,
"step": 180
},
{
"epoch": 3.2,
"grad_norm": 2.800290107727051,
"learning_rate": 3.2096774193548393e-05,
"loss": 0.844,
"step": 200
},
{
"epoch": 3.52,
"grad_norm": 1.1538563966751099,
"learning_rate": 3.532258064516129e-05,
"loss": 0.6051,
"step": 220
},
{
"epoch": 3.84,
"grad_norm": 3.282315492630005,
"learning_rate": 3.8548387096774195e-05,
"loss": 0.7587,
"step": 240
},
{
"epoch": 4.16,
"grad_norm": 0.9393389821052551,
"learning_rate": 4.17741935483871e-05,
"loss": 0.537,
"step": 260
},
{
"epoch": 4.48,
"grad_norm": 1.5951660871505737,
"learning_rate": 4.5e-05,
"loss": 0.4063,
"step": 280
},
{
"epoch": 4.8,
"grad_norm": 2.4143407344818115,
"learning_rate": 4.822580645161291e-05,
"loss": 0.4817,
"step": 300
},
{
"epoch": 5.12,
"grad_norm": 1.1503572463989258,
"learning_rate": 4.9998716243505096e-05,
"loss": 0.3556,
"step": 320
},
{
"epoch": 5.44,
"grad_norm": 1.5514352321624756,
"learning_rate": 4.9986672191133314e-05,
"loss": 0.2663,
"step": 340
},
{
"epoch": 5.76,
"grad_norm": 2.0045723915100098,
"learning_rate": 4.9961956248762694e-05,
"loss": 0.3273,
"step": 360
},
{
"epoch": 6.08,
"grad_norm": 0.716200053691864,
"learning_rate": 4.992458095098368e-05,
"loss": 0.2174,
"step": 380
},
{
"epoch": 6.4,
"grad_norm": 3.2870705127716064,
"learning_rate": 4.9874565252527765e-05,
"loss": 0.1886,
"step": 400
},
{
"epoch": 6.72,
"grad_norm": 2.3181071281433105,
"learning_rate": 4.981193451865465e-05,
"loss": 0.2278,
"step": 420
},
{
"epoch": 7.04,
"grad_norm": 1.6296441555023193,
"learning_rate": 4.9736720512288334e-05,
"loss": 0.168,
"step": 440
},
{
"epoch": 7.36,
"grad_norm": 1.757379174232483,
"learning_rate": 4.964896137790873e-05,
"loss": 0.1227,
"step": 460
},
{
"epoch": 7.68,
"grad_norm": 1.2622524499893188,
"learning_rate": 4.954870162220679e-05,
"loss": 0.1261,
"step": 480
},
{
"epoch": 8.0,
"grad_norm": 1.1865277290344238,
"learning_rate": 4.943599209151314e-05,
"loss": 0.1167,
"step": 500
},
{
"epoch": 8.32,
"grad_norm": 1.244333028793335,
"learning_rate": 4.931088994601157e-05,
"loss": 0.1049,
"step": 520
},
{
"epoch": 8.64,
"grad_norm": 2.6851558685302734,
"learning_rate": 4.917345863075048e-05,
"loss": 0.1016,
"step": 540
},
{
"epoch": 8.96,
"grad_norm": 1.0457267761230469,
"learning_rate": 4.902376784346697e-05,
"loss": 0.1157,
"step": 560
},
{
"epoch": 9.28,
"grad_norm": 0.2963043749332428,
"learning_rate": 4.886189349923992e-05,
"loss": 0.073,
"step": 580
},
{
"epoch": 9.6,
"grad_norm": 0.45336633920669556,
"learning_rate": 4.868791769198995e-05,
"loss": 0.0908,
"step": 600
},
{
"epoch": 9.92,
"grad_norm": 1.0918829441070557,
"learning_rate": 4.8501928652845854e-05,
"loss": 0.0557,
"step": 620
},
{
"epoch": 10.24,
"grad_norm": 2.7920358180999756,
"learning_rate": 4.83040207053985e-05,
"loss": 0.0779,
"step": 640
},
{
"epoch": 10.56,
"grad_norm": 0.8492644429206848,
"learning_rate": 4.809429421786502e-05,
"loss": 0.048,
"step": 660
},
{
"epoch": 10.88,
"grad_norm": 0.6128495335578918,
"learning_rate": 4.787285555218748e-05,
"loss": 0.0747,
"step": 680
},
{
"epoch": 11.2,
"grad_norm": 0.5186921954154968,
"learning_rate": 4.763981701009184e-05,
"loss": 0.0629,
"step": 700
},
{
"epoch": 11.52,
"grad_norm": 1.6753857135772705,
"learning_rate": 4.739529677613456e-05,
"loss": 0.051,
"step": 720
},
{
"epoch": 11.84,
"grad_norm": 0.18876530230045319,
"learning_rate": 4.713941885776586e-05,
"loss": 0.0699,
"step": 740
},
{
"epoch": 12.16,
"grad_norm": 0.4428744912147522,
"learning_rate": 4.687231302243975e-05,
"loss": 0.0526,
"step": 760
},
{
"epoch": 12.48,
"grad_norm": 0.4765178859233856,
"learning_rate": 4.659411473180304e-05,
"loss": 0.0412,
"step": 780
},
{
"epoch": 12.8,
"grad_norm": 1.4559166431427002,
"learning_rate": 4.6304965072996495e-05,
"loss": 0.0495,
"step": 800
},
{
"epoch": 13.12,
"grad_norm": 1.1842377185821533,
"learning_rate": 4.6005010687103076e-05,
"loss": 0.063,
"step": 820
},
{
"epoch": 13.44,
"grad_norm": 0.5502442717552185,
"learning_rate": 4.569440369477951e-05,
"loss": 0.0425,
"step": 840
},
{
"epoch": 13.76,
"grad_norm": 1.819698452949524,
"learning_rate": 4.5373301619108854e-05,
"loss": 0.0451,
"step": 860
},
{
"epoch": 14.08,
"grad_norm": 0.045023053884506226,
"learning_rate": 4.5041867305713384e-05,
"loss": 0.0445,
"step": 880
},
{
"epoch": 14.4,
"grad_norm": 0.10354474931955338,
"learning_rate": 4.4700268840168045e-05,
"loss": 0.0214,
"step": 900
},
{
"epoch": 14.72,
"grad_norm": 0.23762211203575134,
"learning_rate": 4.4348679462756556e-05,
"loss": 0.0552,
"step": 920
},
{
"epoch": 15.04,
"grad_norm": 0.845052182674408,
"learning_rate": 4.398727748061324e-05,
"loss": 0.0524,
"step": 940
},
{
"epoch": 15.36,
"grad_norm": 0.29455024003982544,
"learning_rate": 4.361624617729536e-05,
"loss": 0.0318,
"step": 960
},
{
"epoch": 15.68,
"grad_norm": 0.05131356045603752,
"learning_rate": 4.323577371983155e-05,
"loss": 0.0347,
"step": 980
},
{
"epoch": 16.0,
"grad_norm": 0.2307986468076706,
"learning_rate": 4.28460530632937e-05,
"loss": 0.0541,
"step": 1000
},
{
"epoch": 16.32,
"grad_norm": 0.9982470870018005,
"learning_rate": 4.2447281852940525e-05,
"loss": 0.0327,
"step": 1020
},
{
"epoch": 16.64,
"grad_norm": 2.9247119426727295,
"learning_rate": 4.203966232398261e-05,
"loss": 0.0251,
"step": 1040
},
{
"epoch": 16.96,
"grad_norm": 0.5199835896492004,
"learning_rate": 4.162340119901961e-05,
"loss": 0.0451,
"step": 1060
},
{
"epoch": 17.28,
"grad_norm": 0.6802399754524231,
"learning_rate": 4.1198709583201754e-05,
"loss": 0.0272,
"step": 1080
},
{
"epoch": 17.6,
"grad_norm": 0.9806874394416809,
"learning_rate": 4.0765802857168687e-05,
"loss": 0.0517,
"step": 1100
},
{
"epoch": 17.92,
"grad_norm": 0.12717130780220032,
"learning_rate": 4.0324900567820046e-05,
"loss": 0.0286,
"step": 1120
},
{
"epoch": 18.24,
"grad_norm": 0.4048568308353424,
"learning_rate": 3.987622631697316e-05,
"loss": 0.0258,
"step": 1140
},
{
"epoch": 18.56,
"grad_norm": 1.2760275602340698,
"learning_rate": 3.942000764796427e-05,
"loss": 0.0289,
"step": 1160
},
{
"epoch": 18.88,
"grad_norm": 0.30493712425231934,
"learning_rate": 3.895647593025088e-05,
"loss": 0.0457,
"step": 1180
},
{
"epoch": 19.2,
"grad_norm": 0.6177706122398376,
"learning_rate": 3.8485866242073584e-05,
"loss": 0.0316,
"step": 1200
},
{
"epoch": 19.52,
"grad_norm": 0.10721703618764877,
"learning_rate": 3.80084172512372e-05,
"loss": 0.0326,
"step": 1220
},
{
"epoch": 19.84,
"grad_norm": 0.06823062896728516,
"learning_rate": 3.7524371094071266e-05,
"loss": 0.0238,
"step": 1240
},
{
"epoch": 20.16,
"grad_norm": 0.9792996644973755,
"learning_rate": 3.703397325263162e-05,
"loss": 0.0286,
"step": 1260
},
{
"epoch": 20.48,
"grad_norm": 0.20131415128707886,
"learning_rate": 3.653747243020515e-05,
"loss": 0.0294,
"step": 1280
},
{
"epoch": 20.8,
"grad_norm": 0.530823826789856,
"learning_rate": 3.603512042518093e-05,
"loss": 0.0364,
"step": 1300
},
{
"epoch": 21.12,
"grad_norm": 0.006112121045589447,
"learning_rate": 3.552717200335171e-05,
"loss": 0.0265,
"step": 1320
},
{
"epoch": 21.44,
"grad_norm": 0.005943021737039089,
"learning_rate": 3.501388476871039e-05,
"loss": 0.0319,
"step": 1340
},
{
"epoch": 21.76,
"grad_norm": 0.07666248083114624,
"learning_rate": 3.449551903280729e-05,
"loss": 0.0137,
"step": 1360
},
{
"epoch": 22.08,
"grad_norm": 0.6763716340065002,
"learning_rate": 3.397233768273415e-05,
"loss": 0.0416,
"step": 1380
},
{
"epoch": 22.4,
"grad_norm": 0.07458912581205368,
"learning_rate": 3.344460604780202e-05,
"loss": 0.0179,
"step": 1400
},
{
"epoch": 22.72,
"grad_norm": 0.25514400005340576,
"learning_rate": 3.291259176498052e-05,
"loss": 0.0276,
"step": 1420
},
{
"epoch": 23.04,
"grad_norm": 0.006252670660614967,
"learning_rate": 3.237656464316693e-05,
"loss": 0.0352,
"step": 1440
},
{
"epoch": 23.36,
"grad_norm": 0.36337536573410034,
"learning_rate": 3.183679652635357e-05,
"loss": 0.0212,
"step": 1460
},
{
"epoch": 23.68,
"grad_norm": 0.28176209330558777,
"learning_rate": 3.129356115576332e-05,
"loss": 0.0338,
"step": 1480
},
{
"epoch": 24.0,
"grad_norm": 0.012418941594660282,
"learning_rate": 3.074713403102284e-05,
"loss": 0.0295,
"step": 1500
},
{
"epoch": 24.32,
"grad_norm": 0.16376622021198273,
"learning_rate": 3.0197792270443982e-05,
"loss": 0.0185,
"step": 1520
},
{
"epoch": 24.64,
"grad_norm": 0.45608577132225037,
"learning_rate": 2.9645814470484452e-05,
"loss": 0.0328,
"step": 1540
},
{
"epoch": 24.96,
"grad_norm": 0.1006656065583229,
"learning_rate": 2.9091480564458666e-05,
"loss": 0.025,
"step": 1560
},
{
"epoch": 25.28,
"grad_norm": 0.3901682496070862,
"learning_rate": 2.8535071680570734e-05,
"loss": 0.0294,
"step": 1580
},
{
"epoch": 25.6,
"grad_norm": 0.12243347615003586,
"learning_rate": 2.7976869999341426e-05,
"loss": 0.0282,
"step": 1600
},
{
"epoch": 25.92,
"grad_norm": 0.0033943182788789272,
"learning_rate": 2.741715861050143e-05,
"loss": 0.0294,
"step": 1620
},
{
"epoch": 26.24,
"grad_norm": 0.0014275741996243596,
"learning_rate": 2.685622136942359e-05,
"loss": 0.0354,
"step": 1640
},
{
"epoch": 26.56,
"grad_norm": 0.018641650676727295,
"learning_rate": 2.629434275316673e-05,
"loss": 0.0162,
"step": 1660
},
{
"epoch": 26.88,
"grad_norm": 0.13316482305526733,
"learning_rate": 2.573180771620432e-05,
"loss": 0.0205,
"step": 1680
},
{
"epoch": 27.2,
"grad_norm": 0.003175324061885476,
"learning_rate": 2.516890154591095e-05,
"loss": 0.0129,
"step": 1700
},
{
"epoch": 27.52,
"grad_norm": 0.28820428252220154,
"learning_rate": 2.4605909717879964e-05,
"loss": 0.0333,
"step": 1720
},
{
"epoch": 27.84,
"grad_norm": 0.08302447199821472,
"learning_rate": 2.4043117751145694e-05,
"loss": 0.0261,
"step": 1740
},
{
"epoch": 28.16,
"grad_norm": 0.37718892097473145,
"learning_rate": 2.34808110633836e-05,
"loss": 0.0418,
"step": 1760
},
{
"epoch": 28.48,
"grad_norm": 0.16185913980007172,
"learning_rate": 2.291927482616191e-05,
"loss": 0.0111,
"step": 1780
},
{
"epoch": 28.8,
"grad_norm": 0.33071696758270264,
"learning_rate": 2.235879382031794e-05,
"loss": 0.0263,
"step": 1800
},
{
"epoch": 29.12,
"grad_norm": 0.002949915360659361,
"learning_rate": 2.179965229153265e-05,
"loss": 0.0299,
"step": 1820
},
{
"epoch": 29.44,
"grad_norm": 0.0035819699987769127,
"learning_rate": 2.1242133806176667e-05,
"loss": 0.0267,
"step": 1840
},
{
"epoch": 29.76,
"grad_norm": 0.0039703804068267345,
"learning_rate": 2.0686521107500638e-05,
"loss": 0.0204,
"step": 1860
},
{
"epoch": 30.08,
"grad_norm": 0.004021900240331888,
"learning_rate": 2.0133095972243233e-05,
"loss": 0.0308,
"step": 1880
},
{
"epoch": 30.4,
"grad_norm": 0.2821226418018341,
"learning_rate": 1.9582139067729117e-05,
"loss": 0.024,
"step": 1900
},
{
"epoch": 30.72,
"grad_norm": 0.5529562830924988,
"learning_rate": 1.90339298095297e-05,
"loss": 0.0113,
"step": 1920
},
{
"epoch": 31.04,
"grad_norm": 0.1492016613483429,
"learning_rate": 1.8488746219758674e-05,
"loss": 0.0301,
"step": 1940
},
{
"epoch": 31.36,
"grad_norm": 0.19194553792476654,
"learning_rate": 1.7946864786074165e-05,
"loss": 0.0293,
"step": 1960
},
{
"epoch": 31.68,
"grad_norm": 0.1448647677898407,
"learning_rate": 1.740856032145917e-05,
"loss": 0.0242,
"step": 1980
},
{
"epoch": 32.0,
"grad_norm": 0.06869282573461533,
"learning_rate": 1.6874105824851267e-05,
"loss": 0.022,
"step": 2000
},
{
"epoch": 32.32,
"grad_norm": 0.002635813085362315,
"learning_rate": 1.634377234269226e-05,
"loss": 0.0264,
"step": 2020
},
{
"epoch": 32.64,
"grad_norm": 0.0027471587527543306,
"learning_rate": 1.5817828831468144e-05,
"loss": 0.0155,
"step": 2040
},
{
"epoch": 32.96,
"grad_norm": 0.28300318121910095,
"learning_rate": 1.5296542021308825e-05,
"loss": 0.0208,
"step": 2060
},
{
"epoch": 33.28,
"grad_norm": 0.0016769981011748314,
"learning_rate": 1.478017628071706e-05,
"loss": 0.0264,
"step": 2080
},
{
"epoch": 33.6,
"grad_norm": 0.0014146752655506134,
"learning_rate": 1.4268993482495055e-05,
"loss": 0.0174,
"step": 2100
},
{
"epoch": 33.92,
"grad_norm": 0.05964767187833786,
"learning_rate": 1.3763252870936649e-05,
"loss": 0.0214,
"step": 2120
},
{
"epoch": 34.24,
"grad_norm": 0.0016291196225211024,
"learning_rate": 1.3263210930352737e-05,
"loss": 0.0306,
"step": 2140
},
{
"epoch": 34.56,
"grad_norm": 0.034806057810783386,
"learning_rate": 1.2769121254996159e-05,
"loss": 0.0146,
"step": 2160
},
{
"epoch": 34.88,
"grad_norm": 0.315729558467865,
"learning_rate": 1.228123442045249e-05,
"loss": 0.0254,
"step": 2180
},
{
"epoch": 35.2,
"grad_norm": 0.5809018015861511,
"learning_rate": 1.1799797856561606e-05,
"loss": 0.0176,
"step": 2200
},
{
"epoch": 35.52,
"grad_norm": 0.18777510523796082,
"learning_rate": 1.1325055721934637e-05,
"loss": 0.0205,
"step": 2220
},
{
"epoch": 35.84,
"grad_norm": 0.11027589440345764,
"learning_rate": 1.0857248780129928e-05,
"loss": 0.0153,
"step": 2240
},
{
"epoch": 36.16,
"grad_norm": 0.18985402584075928,
"learning_rate": 1.0396614277550752e-05,
"loss": 0.0251,
"step": 2260
},
{
"epoch": 36.48,
"grad_norm": 0.011078303679823875,
"learning_rate": 9.943385823126775e-06,
"loss": 0.0224,
"step": 2280
},
{
"epoch": 36.8,
"grad_norm": 0.15364831686019897,
"learning_rate": 9.497793269840211e-06,
"loss": 0.0219,
"step": 2300
},
{
"epoch": 37.12,
"grad_norm": 0.16373781859874725,
"learning_rate": 9.06006259815683e-06,
"loss": 0.021,
"step": 2320
},
{
"epoch": 37.44,
"grad_norm": 0.41937291622161865,
"learning_rate": 8.630415801420835e-06,
"loss": 0.0236,
"step": 2340
},
{
"epoch": 37.76,
"grad_norm": 0.4237123727798462,
"learning_rate": 8.209070773271894e-06,
"loss": 0.0216,
"step": 2360
},
{
"epoch": 38.08,
"grad_norm": 0.08532612025737762,
"learning_rate": 7.79624119714121e-06,
"loss": 0.0268,
"step": 2380
},
{
"epoch": 38.4,
"grad_norm": 0.2872686982154846,
"learning_rate": 7.392136437882855e-06,
"loss": 0.0306,
"step": 2400
},
{
"epoch": 38.72,
"grad_norm": 0.2834513485431671,
"learning_rate": 6.996961435595223e-06,
"loss": 0.0224,
"step": 2420
},
{
"epoch": 39.04,
"grad_norm": 0.0006759735988453031,
"learning_rate": 6.610916601686481e-06,
"loss": 0.0138,
"step": 2440
},
{
"epoch": 39.36,
"grad_norm": 0.24796371161937714,
"learning_rate": 6.234197717236742e-06,
"loss": 0.0234,
"step": 2460
},
{
"epoch": 39.68,
"grad_norm": 0.2657662332057953,
"learning_rate": 5.866995833708464e-06,
"loss": 0.0164,
"step": 2480
},
{
"epoch": 40.0,
"grad_norm": 0.0017559522530063987,
"learning_rate": 5.509497176055492e-06,
"loss": 0.0169,
"step": 2500
},
{
"epoch": 40.32,
"grad_norm": 0.18896016478538513,
"learning_rate": 5.161883048279817e-06,
"loss": 0.0116,
"step": 2520
},
{
"epoch": 40.64,
"grad_norm": 0.2336779683828354,
"learning_rate": 4.824329741483949e-06,
"loss": 0.0219,
"step": 2540
},
{
"epoch": 40.96,
"grad_norm": 0.16521525382995605,
"learning_rate": 4.497008444465681e-06,
"loss": 0.0366,
"step": 2560
},
{
"epoch": 41.28,
"grad_norm": 0.0013411182444542646,
"learning_rate": 4.180085156900274e-06,
"loss": 0.0228,
"step": 2580
},
{
"epoch": 41.6,
"grad_norm": 0.0015802403213456273,
"learning_rate": 3.873720605154468e-06,
"loss": 0.0135,
"step": 2600
},
{
"epoch": 41.92,
"grad_norm": 0.002210975391790271,
"learning_rate": 3.578070160774724e-06,
"loss": 0.0267,
"step": 2620
},
{
"epoch": 42.24,
"grad_norm": 0.23047080636024475,
"learning_rate": 3.293283761691182e-06,
"loss": 0.0218,
"step": 2640
},
{
"epoch": 42.56,
"grad_norm": 0.2625073492527008,
"learning_rate": 3.0195058361772277e-06,
"loss": 0.0315,
"step": 2660
},
{
"epoch": 42.88,
"grad_norm": 0.22485554218292236,
"learning_rate": 2.756875229603295e-06,
"loss": 0.022,
"step": 2680
},
{
"epoch": 43.2,
"grad_norm": 0.001550053246319294,
"learning_rate": 2.5055251340219855e-06,
"loss": 0.016,
"step": 2700
},
{
"epoch": 43.52,
"grad_norm": 0.22042883932590485,
"learning_rate": 2.2655830206202655e-06,
"loss": 0.0267,
"step": 2720
},
{
"epoch": 43.84,
"grad_norm": 0.08822102099657059,
"learning_rate": 2.037170575072944e-06,
"loss": 0.0167,
"step": 2740
},
{
"epoch": 44.16,
"grad_norm": 0.0016505387611687183,
"learning_rate": 1.8204036358303173e-06,
"loss": 0.0492,
"step": 2760
},
{
"epoch": 44.48,
"grad_norm": 0.3908143639564514,
"learning_rate": 1.615392135371116e-06,
"loss": 0.0254,
"step": 2780
},
{
"epoch": 44.8,
"grad_norm": 0.3156016767024994,
"learning_rate": 1.4222400444507318e-06,
"loss": 0.0136,
"step": 2800
},
{
"epoch": 45.12,
"grad_norm": 0.001897096517495811,
"learning_rate": 1.2410453193728493e-06,
"loss": 0.0116,
"step": 2820
},
{
"epoch": 45.44,
"grad_norm": 0.20051489770412445,
"learning_rate": 1.0718998523113004e-06,
"loss": 0.0311,
"step": 2840
},
{
"epoch": 45.76,
"grad_norm": 0.0016040581976994872,
"learning_rate": 9.148894247073298e-07,
"loss": 0.0283,
"step": 2860
},
{
"epoch": 46.08,
"grad_norm": 0.0015667045954614878,
"learning_rate": 7.700936637658779e-07,
"loss": 0.0186,
"step": 2880
},
{
"epoch": 46.4,
"grad_norm": 0.04447433352470398,
"learning_rate": 6.375860020729541e-07,
"loss": 0.0229,
"step": 2900
},
{
"epoch": 46.72,
"grad_norm": 0.20941714942455292,
"learning_rate": 5.174336403546226e-07,
"loss": 0.0268,
"step": 2920
},
{
"epoch": 47.04,
"grad_norm": 0.0015096565475687385,
"learning_rate": 4.096975133963954e-07,
"loss": 0.0161,
"step": 2940
},
{
"epoch": 47.36,
"grad_norm": 0.19307351112365723,
"learning_rate": 3.144322591404292e-07,
"loss": 0.0113,
"step": 2960
},
{
"epoch": 47.68,
"grad_norm": 0.002177381655201316,
"learning_rate": 2.316861909760909e-07,
"loss": 0.0222,
"step": 2980
},
{
"epoch": 48.0,
"grad_norm": 0.09334852546453476,
"learning_rate": 1.6150127323803222e-07,
"loss": 0.028,
"step": 3000
},
{
"epoch": 48.32,
"grad_norm": 0.09507758915424347,
"learning_rate": 1.0391309992413833e-07,
"loss": 0.0217,
"step": 3020
},
{
"epoch": 48.64,
"grad_norm": 0.0010601489339023829,
"learning_rate": 5.895087664417876e-08,
"loss": 0.0191,
"step": 3040
},
{
"epoch": 48.96,
"grad_norm": 0.001158875529654324,
"learning_rate": 2.6637405808302428e-08,
"loss": 0.0162,
"step": 3060
},
{
"epoch": 49.28,
"grad_norm": 0.0015347091248258948,
"learning_rate": 6.989075062879824e-09,
"loss": 0.0146,
"step": 3080
},
{
"epoch": 49.6,
"grad_norm": 0.0010455228621140122,
"learning_rate": 1.584897958428755e-11,
"loss": 0.0217,
"step": 3100
},
{
"epoch": 49.6,
"step": 3100,
"total_flos": 1.7939255986343117e+17,
"train_loss": 0.172668604437382,
"train_runtime": 11274.7291,
"train_samples_per_second": 2.217,
"train_steps_per_second": 0.275
}
],
"logging_steps": 20,
"max_steps": 3100,
"num_input_tokens_seen": 0,
"num_train_epochs": 50,
"save_steps": 1000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 1.7939255986343117e+17,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}