abdiharyadi's picture
Training in progress, step 3600, checkpoint
9847aaa verified
raw
history blame
25.2 kB
{
"best_metric": 41.1359,
"best_model_checkpoint": "/kaggle/working/amr-tst-indo/AMRBART-id/fine-tune/../outputs/mbart-en-id-smaller-fted-amr-generation-v2-fted/checkpoint-3600",
"epoch": 39.735099337748345,
"eval_steps": 3600,
"global_step": 3600,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.011037527593818985,
"learning_rate": 5e-09,
"loss": 2.1638,
"step": 1
},
{
"epoch": 0.22075055187637968,
"learning_rate": 1e-07,
"loss": 1.9727,
"step": 20
},
{
"epoch": 0.44150110375275936,
"learning_rate": 2e-07,
"loss": 2.0028,
"step": 40
},
{
"epoch": 0.6622516556291391,
"learning_rate": 3e-07,
"loss": 2.0448,
"step": 60
},
{
"epoch": 0.8830022075055187,
"learning_rate": 4e-07,
"loss": 1.9696,
"step": 80
},
{
"epoch": 1.1037527593818985,
"learning_rate": 5e-07,
"loss": 1.9375,
"step": 100
},
{
"epoch": 1.3245033112582782,
"learning_rate": 6e-07,
"loss": 1.9075,
"step": 120
},
{
"epoch": 1.5452538631346577,
"learning_rate": 7e-07,
"loss": 1.9866,
"step": 140
},
{
"epoch": 1.7660044150110377,
"learning_rate": 8e-07,
"loss": 1.882,
"step": 160
},
{
"epoch": 1.9867549668874172,
"learning_rate": 9e-07,
"loss": 1.9916,
"step": 180
},
{
"epoch": 2.207505518763797,
"learning_rate": 1e-06,
"loss": 1.9132,
"step": 200
},
{
"epoch": 2.4282560706401766,
"learning_rate": 9.996864111498258e-07,
"loss": 1.9858,
"step": 220
},
{
"epoch": 2.6490066225165565,
"learning_rate": 9.993728222996515e-07,
"loss": 1.9373,
"step": 240
},
{
"epoch": 2.869757174392936,
"learning_rate": 9.990592334494773e-07,
"loss": 1.9146,
"step": 260
},
{
"epoch": 3.0905077262693155,
"learning_rate": 9.987456445993032e-07,
"loss": 1.8665,
"step": 280
},
{
"epoch": 3.3112582781456954,
"learning_rate": 9.98432055749129e-07,
"loss": 1.9419,
"step": 300
},
{
"epoch": 3.5320088300220753,
"learning_rate": 9.981184668989545e-07,
"loss": 1.8332,
"step": 320
},
{
"epoch": 3.752759381898455,
"learning_rate": 9.978048780487803e-07,
"loss": 1.8534,
"step": 340
},
{
"epoch": 3.9735099337748343,
"learning_rate": 9.974912891986062e-07,
"loss": 1.9421,
"step": 360
},
{
"epoch": 4.194260485651214,
"learning_rate": 9.97177700348432e-07,
"loss": 1.9374,
"step": 380
},
{
"epoch": 4.415011037527594,
"learning_rate": 9.96864111498258e-07,
"loss": 1.8876,
"step": 400
},
{
"epoch": 4.635761589403973,
"learning_rate": 9.965505226480835e-07,
"loss": 1.8593,
"step": 420
},
{
"epoch": 4.856512141280353,
"learning_rate": 9.962369337979094e-07,
"loss": 1.8243,
"step": 440
},
{
"epoch": 5.077262693156733,
"learning_rate": 9.95923344947735e-07,
"loss": 1.8791,
"step": 460
},
{
"epoch": 5.298013245033113,
"learning_rate": 9.95609756097561e-07,
"loss": 1.8878,
"step": 480
},
{
"epoch": 5.518763796909492,
"learning_rate": 9.952961672473868e-07,
"loss": 1.8985,
"step": 500
},
{
"epoch": 5.739514348785872,
"learning_rate": 9.949825783972126e-07,
"loss": 1.84,
"step": 520
},
{
"epoch": 5.960264900662252,
"learning_rate": 9.946689895470383e-07,
"loss": 1.8468,
"step": 540
},
{
"epoch": 6.181015452538631,
"learning_rate": 9.94355400696864e-07,
"loss": 1.8469,
"step": 560
},
{
"epoch": 6.401766004415011,
"learning_rate": 9.940418118466898e-07,
"loss": 1.8528,
"step": 580
},
{
"epoch": 6.622516556291391,
"learning_rate": 9.937282229965156e-07,
"loss": 1.8269,
"step": 600
},
{
"epoch": 6.843267108167771,
"learning_rate": 9.934146341463415e-07,
"loss": 1.8324,
"step": 620
},
{
"epoch": 7.06401766004415,
"learning_rate": 9.931010452961673e-07,
"loss": 1.808,
"step": 640
},
{
"epoch": 7.28476821192053,
"learning_rate": 9.92787456445993e-07,
"loss": 1.9003,
"step": 660
},
{
"epoch": 7.50551876379691,
"learning_rate": 9.924738675958186e-07,
"loss": 1.908,
"step": 680
},
{
"epoch": 7.72626931567329,
"learning_rate": 9.921602787456445e-07,
"loss": 1.8165,
"step": 700
},
{
"epoch": 7.947019867549669,
"learning_rate": 9.918466898954704e-07,
"loss": 1.7924,
"step": 720
},
{
"epoch": 8.167770419426049,
"learning_rate": 9.915331010452962e-07,
"loss": 1.8275,
"step": 740
},
{
"epoch": 8.388520971302428,
"learning_rate": 9.912195121951219e-07,
"loss": 1.8033,
"step": 760
},
{
"epoch": 8.609271523178808,
"learning_rate": 9.909059233449477e-07,
"loss": 1.8283,
"step": 780
},
{
"epoch": 8.830022075055188,
"learning_rate": 9.905923344947734e-07,
"loss": 1.8361,
"step": 800
},
{
"epoch": 9.050772626931566,
"learning_rate": 9.902787456445992e-07,
"loss": 1.8264,
"step": 820
},
{
"epoch": 9.271523178807946,
"learning_rate": 9.89965156794425e-07,
"loss": 1.8157,
"step": 840
},
{
"epoch": 9.492273730684326,
"learning_rate": 9.89651567944251e-07,
"loss": 1.8038,
"step": 860
},
{
"epoch": 9.713024282560706,
"learning_rate": 9.893379790940768e-07,
"loss": 1.8495,
"step": 880
},
{
"epoch": 9.933774834437086,
"learning_rate": 9.890243902439024e-07,
"loss": 1.8379,
"step": 900
},
{
"epoch": 10.154525386313466,
"learning_rate": 9.88710801393728e-07,
"loss": 1.807,
"step": 920
},
{
"epoch": 10.375275938189846,
"learning_rate": 9.88397212543554e-07,
"loss": 1.7354,
"step": 940
},
{
"epoch": 10.596026490066226,
"learning_rate": 9.880836236933798e-07,
"loss": 1.7909,
"step": 960
},
{
"epoch": 10.816777041942604,
"learning_rate": 9.877700348432054e-07,
"loss": 1.8504,
"step": 980
},
{
"epoch": 11.037527593818984,
"learning_rate": 9.874564459930313e-07,
"loss": 1.7565,
"step": 1000
},
{
"epoch": 11.258278145695364,
"learning_rate": 9.871428571428572e-07,
"loss": 1.8464,
"step": 1020
},
{
"epoch": 11.479028697571744,
"learning_rate": 9.868292682926828e-07,
"loss": 1.7463,
"step": 1040
},
{
"epoch": 11.699779249448124,
"learning_rate": 9.865156794425087e-07,
"loss": 1.8286,
"step": 1060
},
{
"epoch": 11.920529801324504,
"learning_rate": 9.862020905923345e-07,
"loss": 1.7821,
"step": 1080
},
{
"epoch": 12.141280353200884,
"learning_rate": 9.858885017421604e-07,
"loss": 1.8069,
"step": 1100
},
{
"epoch": 12.362030905077262,
"learning_rate": 9.85574912891986e-07,
"loss": 1.7943,
"step": 1120
},
{
"epoch": 12.582781456953642,
"learning_rate": 9.852613240418117e-07,
"loss": 1.7535,
"step": 1140
},
{
"epoch": 12.803532008830022,
"learning_rate": 9.849477351916375e-07,
"loss": 1.7881,
"step": 1160
},
{
"epoch": 13.024282560706402,
"learning_rate": 9.846341463414634e-07,
"loss": 1.815,
"step": 1180
},
{
"epoch": 13.245033112582782,
"learning_rate": 9.843205574912892e-07,
"loss": 1.8258,
"step": 1200
},
{
"epoch": 13.465783664459162,
"learning_rate": 9.840069686411149e-07,
"loss": 1.7228,
"step": 1220
},
{
"epoch": 13.686534216335541,
"learning_rate": 9.836933797909407e-07,
"loss": 1.7866,
"step": 1240
},
{
"epoch": 13.90728476821192,
"learning_rate": 9.833797909407664e-07,
"loss": 1.7871,
"step": 1260
},
{
"epoch": 14.1280353200883,
"learning_rate": 9.830662020905923e-07,
"loss": 1.7238,
"step": 1280
},
{
"epoch": 14.34878587196468,
"learning_rate": 9.827526132404181e-07,
"loss": 1.805,
"step": 1300
},
{
"epoch": 14.56953642384106,
"learning_rate": 9.82439024390244e-07,
"loss": 1.7909,
"step": 1320
},
{
"epoch": 14.79028697571744,
"learning_rate": 9.821254355400698e-07,
"loss": 1.7861,
"step": 1340
},
{
"epoch": 15.01103752759382,
"learning_rate": 9.818118466898953e-07,
"loss": 1.7651,
"step": 1360
},
{
"epoch": 15.2317880794702,
"learning_rate": 9.814982578397211e-07,
"loss": 1.7666,
"step": 1380
},
{
"epoch": 15.45253863134658,
"learning_rate": 9.81184668989547e-07,
"loss": 1.7159,
"step": 1400
},
{
"epoch": 15.673289183222957,
"learning_rate": 9.808710801393728e-07,
"loss": 1.7701,
"step": 1420
},
{
"epoch": 15.894039735099337,
"learning_rate": 9.805574912891987e-07,
"loss": 1.7977,
"step": 1440
},
{
"epoch": 16.11479028697572,
"learning_rate": 9.802439024390243e-07,
"loss": 1.7805,
"step": 1460
},
{
"epoch": 16.335540838852097,
"learning_rate": 9.799303135888502e-07,
"loss": 1.7711,
"step": 1480
},
{
"epoch": 16.556291390728475,
"learning_rate": 9.796167247386758e-07,
"loss": 1.6767,
"step": 1500
},
{
"epoch": 16.777041942604857,
"learning_rate": 9.793031358885017e-07,
"loss": 1.722,
"step": 1520
},
{
"epoch": 16.997792494481235,
"learning_rate": 9.789895470383276e-07,
"loss": 1.783,
"step": 1540
},
{
"epoch": 17.218543046357617,
"learning_rate": 9.786759581881534e-07,
"loss": 1.7233,
"step": 1560
},
{
"epoch": 17.439293598233995,
"learning_rate": 9.78362369337979e-07,
"loss": 1.7209,
"step": 1580
},
{
"epoch": 17.660044150110377,
"learning_rate": 9.780487804878047e-07,
"loss": 1.6968,
"step": 1600
},
{
"epoch": 17.880794701986755,
"learning_rate": 9.777351916376306e-07,
"loss": 1.6818,
"step": 1620
},
{
"epoch": 18.101545253863133,
"learning_rate": 9.774216027874564e-07,
"loss": 1.7819,
"step": 1640
},
{
"epoch": 18.322295805739515,
"learning_rate": 9.771080139372823e-07,
"loss": 1.7445,
"step": 1660
},
{
"epoch": 18.543046357615893,
"learning_rate": 9.76794425087108e-07,
"loss": 1.6888,
"step": 1680
},
{
"epoch": 18.763796909492275,
"learning_rate": 9.764808362369338e-07,
"loss": 1.7562,
"step": 1700
},
{
"epoch": 18.984547461368653,
"learning_rate": 9.761672473867594e-07,
"loss": 1.7368,
"step": 1720
},
{
"epoch": 19.205298013245034,
"learning_rate": 9.758536585365853e-07,
"loss": 1.6816,
"step": 1740
},
{
"epoch": 19.426048565121413,
"learning_rate": 9.755400696864111e-07,
"loss": 1.7307,
"step": 1760
},
{
"epoch": 19.64679911699779,
"learning_rate": 9.75226480836237e-07,
"loss": 1.7313,
"step": 1780
},
{
"epoch": 19.867549668874172,
"learning_rate": 9.749128919860627e-07,
"loss": 1.7407,
"step": 1800
},
{
"epoch": 20.08830022075055,
"learning_rate": 9.745993031358883e-07,
"loss": 1.6885,
"step": 1820
},
{
"epoch": 20.309050772626932,
"learning_rate": 9.742857142857142e-07,
"loss": 1.6741,
"step": 1840
},
{
"epoch": 20.52980132450331,
"learning_rate": 9.7397212543554e-07,
"loss": 1.6958,
"step": 1860
},
{
"epoch": 20.750551876379692,
"learning_rate": 9.736585365853659e-07,
"loss": 1.7036,
"step": 1880
},
{
"epoch": 20.97130242825607,
"learning_rate": 9.733449477351917e-07,
"loss": 1.7489,
"step": 1900
},
{
"epoch": 21.192052980132452,
"learning_rate": 9.730313588850174e-07,
"loss": 1.7719,
"step": 1920
},
{
"epoch": 21.41280353200883,
"learning_rate": 9.72717770034843e-07,
"loss": 1.7401,
"step": 1940
},
{
"epoch": 21.63355408388521,
"learning_rate": 9.724041811846689e-07,
"loss": 1.7247,
"step": 1960
},
{
"epoch": 21.85430463576159,
"learning_rate": 9.720905923344947e-07,
"loss": 1.7175,
"step": 1980
},
{
"epoch": 22.075055187637968,
"learning_rate": 9.717770034843206e-07,
"loss": 1.7288,
"step": 2000
},
{
"epoch": 22.29580573951435,
"learning_rate": 9.714634146341462e-07,
"loss": 1.7068,
"step": 2020
},
{
"epoch": 22.516556291390728,
"learning_rate": 9.71149825783972e-07,
"loss": 1.6812,
"step": 2040
},
{
"epoch": 22.73730684326711,
"learning_rate": 9.708362369337977e-07,
"loss": 1.6772,
"step": 2060
},
{
"epoch": 22.958057395143488,
"learning_rate": 9.705226480836236e-07,
"loss": 1.7103,
"step": 2080
},
{
"epoch": 23.178807947019866,
"learning_rate": 9.702090592334495e-07,
"loss": 1.6984,
"step": 2100
},
{
"epoch": 23.399558498896248,
"learning_rate": 9.698954703832753e-07,
"loss": 1.7281,
"step": 2120
},
{
"epoch": 23.620309050772626,
"learning_rate": 9.695818815331012e-07,
"loss": 1.7123,
"step": 2140
},
{
"epoch": 23.841059602649008,
"learning_rate": 9.692682926829266e-07,
"loss": 1.7174,
"step": 2160
},
{
"epoch": 24.061810154525386,
"learning_rate": 9.689547038327525e-07,
"loss": 1.7281,
"step": 2180
},
{
"epoch": 24.282560706401767,
"learning_rate": 9.686411149825783e-07,
"loss": 1.7189,
"step": 2200
},
{
"epoch": 24.503311258278146,
"learning_rate": 9.683275261324042e-07,
"loss": 1.6272,
"step": 2220
},
{
"epoch": 24.724061810154524,
"learning_rate": 9.6801393728223e-07,
"loss": 1.6588,
"step": 2240
},
{
"epoch": 24.944812362030905,
"learning_rate": 9.677003484320557e-07,
"loss": 1.6627,
"step": 2260
},
{
"epoch": 25.165562913907284,
"learning_rate": 9.673867595818815e-07,
"loss": 1.6504,
"step": 2280
},
{
"epoch": 25.386313465783665,
"learning_rate": 9.670731707317072e-07,
"loss": 1.6847,
"step": 2300
},
{
"epoch": 25.607064017660043,
"learning_rate": 9.66759581881533e-07,
"loss": 1.6793,
"step": 2320
},
{
"epoch": 25.827814569536425,
"learning_rate": 9.66445993031359e-07,
"loss": 1.7009,
"step": 2340
},
{
"epoch": 26.048565121412803,
"learning_rate": 9.661324041811848e-07,
"loss": 1.6581,
"step": 2360
},
{
"epoch": 26.26931567328918,
"learning_rate": 9.658188153310104e-07,
"loss": 1.6648,
"step": 2380
},
{
"epoch": 26.490066225165563,
"learning_rate": 9.65505226480836e-07,
"loss": 1.6303,
"step": 2400
},
{
"epoch": 26.71081677704194,
"learning_rate": 9.65191637630662e-07,
"loss": 1.7222,
"step": 2420
},
{
"epoch": 26.931567328918323,
"learning_rate": 9.648780487804878e-07,
"loss": 1.7164,
"step": 2440
},
{
"epoch": 27.1523178807947,
"learning_rate": 9.645644599303136e-07,
"loss": 1.6894,
"step": 2460
},
{
"epoch": 27.373068432671083,
"learning_rate": 9.642508710801393e-07,
"loss": 1.6904,
"step": 2480
},
{
"epoch": 27.59381898454746,
"learning_rate": 9.639372822299651e-07,
"loss": 1.6133,
"step": 2500
},
{
"epoch": 27.814569536423843,
"learning_rate": 9.636236933797908e-07,
"loss": 1.6445,
"step": 2520
},
{
"epoch": 28.03532008830022,
"learning_rate": 9.633101045296166e-07,
"loss": 1.6421,
"step": 2540
},
{
"epoch": 28.2560706401766,
"learning_rate": 9.629965156794425e-07,
"loss": 1.672,
"step": 2560
},
{
"epoch": 28.47682119205298,
"learning_rate": 9.626829268292684e-07,
"loss": 1.6592,
"step": 2580
},
{
"epoch": 28.69757174392936,
"learning_rate": 9.62369337979094e-07,
"loss": 1.649,
"step": 2600
},
{
"epoch": 28.91832229580574,
"learning_rate": 9.620557491289199e-07,
"loss": 1.6667,
"step": 2620
},
{
"epoch": 29.13907284768212,
"learning_rate": 9.617421602787455e-07,
"loss": 1.6172,
"step": 2640
},
{
"epoch": 29.3598233995585,
"learning_rate": 9.614285714285714e-07,
"loss": 1.6696,
"step": 2660
},
{
"epoch": 29.58057395143488,
"learning_rate": 9.611149825783972e-07,
"loss": 1.7091,
"step": 2680
},
{
"epoch": 29.801324503311257,
"learning_rate": 9.60801393728223e-07,
"loss": 1.6111,
"step": 2700
},
{
"epoch": 30.02207505518764,
"learning_rate": 9.604878048780487e-07,
"loss": 1.6274,
"step": 2720
},
{
"epoch": 30.242825607064017,
"learning_rate": 9.601742160278746e-07,
"loss": 1.6186,
"step": 2740
},
{
"epoch": 30.4635761589404,
"learning_rate": 9.598606271777002e-07,
"loss": 1.6336,
"step": 2760
},
{
"epoch": 30.684326710816777,
"learning_rate": 9.59547038327526e-07,
"loss": 1.6414,
"step": 2780
},
{
"epoch": 30.90507726269316,
"learning_rate": 9.59233449477352e-07,
"loss": 1.6628,
"step": 2800
},
{
"epoch": 31.125827814569536,
"learning_rate": 9.589198606271776e-07,
"loss": 1.6092,
"step": 2820
},
{
"epoch": 31.346578366445915,
"learning_rate": 9.586062717770034e-07,
"loss": 1.669,
"step": 2840
},
{
"epoch": 31.567328918322296,
"learning_rate": 9.58292682926829e-07,
"loss": 1.6597,
"step": 2860
},
{
"epoch": 31.788079470198674,
"learning_rate": 9.57979094076655e-07,
"loss": 1.5658,
"step": 2880
},
{
"epoch": 32.00883002207505,
"learning_rate": 9.576655052264808e-07,
"loss": 1.6662,
"step": 2900
},
{
"epoch": 32.22958057395144,
"learning_rate": 9.573519163763067e-07,
"loss": 1.6241,
"step": 2920
},
{
"epoch": 32.450331125827816,
"learning_rate": 9.570383275261325e-07,
"loss": 1.6181,
"step": 2940
},
{
"epoch": 32.671081677704194,
"learning_rate": 9.567247386759582e-07,
"loss": 1.6561,
"step": 2960
},
{
"epoch": 32.89183222958057,
"learning_rate": 9.564111498257838e-07,
"loss": 1.6176,
"step": 2980
},
{
"epoch": 33.11258278145695,
"learning_rate": 9.560975609756097e-07,
"loss": 1.686,
"step": 3000
},
{
"epoch": 33.333333333333336,
"learning_rate": 9.557839721254355e-07,
"loss": 1.5938,
"step": 3020
},
{
"epoch": 33.554083885209714,
"learning_rate": 9.554703832752614e-07,
"loss": 1.5952,
"step": 3040
},
{
"epoch": 33.77483443708609,
"learning_rate": 9.55156794425087e-07,
"loss": 1.5772,
"step": 3060
},
{
"epoch": 33.99558498896247,
"learning_rate": 9.548432055749129e-07,
"loss": 1.5889,
"step": 3080
},
{
"epoch": 34.216335540838855,
"learning_rate": 9.545296167247385e-07,
"loss": 1.5953,
"step": 3100
},
{
"epoch": 34.437086092715234,
"learning_rate": 9.542160278745644e-07,
"loss": 1.6379,
"step": 3120
},
{
"epoch": 34.65783664459161,
"learning_rate": 9.539024390243903e-07,
"loss": 1.6252,
"step": 3140
},
{
"epoch": 34.87858719646799,
"learning_rate": 9.53588850174216e-07,
"loss": 1.628,
"step": 3160
},
{
"epoch": 35.09933774834437,
"learning_rate": 9.532752613240419e-07,
"loss": 1.6547,
"step": 3180
},
{
"epoch": 35.32008830022075,
"learning_rate": 9.529616724738675e-07,
"loss": 1.6308,
"step": 3200
},
{
"epoch": 35.54083885209713,
"learning_rate": 9.526480836236935e-07,
"loss": 1.599,
"step": 3220
},
{
"epoch": 35.76158940397351,
"learning_rate": 9.523344947735191e-07,
"loss": 1.6198,
"step": 3240
},
{
"epoch": 35.98233995584989,
"learning_rate": 9.520209059233449e-07,
"loss": 1.6248,
"step": 3260
},
{
"epoch": 36.203090507726266,
"learning_rate": 9.517073170731706e-07,
"loss": 1.576,
"step": 3280
},
{
"epoch": 36.42384105960265,
"learning_rate": 9.513937282229965e-07,
"loss": 1.6648,
"step": 3300
},
{
"epoch": 36.64459161147903,
"learning_rate": 9.510801393728223e-07,
"loss": 1.6035,
"step": 3320
},
{
"epoch": 36.86534216335541,
"learning_rate": 9.50766550522648e-07,
"loss": 1.6051,
"step": 3340
},
{
"epoch": 37.086092715231786,
"learning_rate": 9.504529616724738e-07,
"loss": 1.6052,
"step": 3360
},
{
"epoch": 37.30684326710817,
"learning_rate": 9.501393728222996e-07,
"loss": 1.6081,
"step": 3380
},
{
"epoch": 37.52759381898455,
"learning_rate": 9.498257839721255e-07,
"loss": 1.6066,
"step": 3400
},
{
"epoch": 37.74834437086093,
"learning_rate": 9.495121951219511e-07,
"loss": 1.5763,
"step": 3420
},
{
"epoch": 37.969094922737305,
"learning_rate": 9.49198606271777e-07,
"loss": 1.6039,
"step": 3440
},
{
"epoch": 38.18984547461368,
"learning_rate": 9.488850174216028e-07,
"loss": 1.6203,
"step": 3460
},
{
"epoch": 38.41059602649007,
"learning_rate": 9.485714285714285e-07,
"loss": 1.619,
"step": 3480
},
{
"epoch": 38.63134657836645,
"learning_rate": 9.482578397212543e-07,
"loss": 1.6055,
"step": 3500
},
{
"epoch": 38.852097130242825,
"learning_rate": 9.479442508710801e-07,
"loss": 1.6037,
"step": 3520
},
{
"epoch": 39.0728476821192,
"learning_rate": 9.476306620209059e-07,
"loss": 1.6259,
"step": 3540
},
{
"epoch": 39.29359823399559,
"learning_rate": 9.473170731707316e-07,
"loss": 1.5917,
"step": 3560
},
{
"epoch": 39.51434878587197,
"learning_rate": 9.470034843205574e-07,
"loss": 1.6039,
"step": 3580
},
{
"epoch": 39.735099337748345,
"learning_rate": 9.466898954703833e-07,
"loss": 1.5364,
"step": 3600
},
{
"epoch": 39.735099337748345,
"eval_bleu": 41.1359,
"eval_gen_len": 9.5667,
"eval_loss": 2.0473527908325195,
"eval_runtime": 3.9497,
"eval_samples_per_second": 7.596,
"eval_steps_per_second": 1.519,
"step": 3600
}
],
"logging_steps": 20,
"max_steps": 57600,
"num_input_tokens_seen": 0,
"num_train_epochs": 640,
"save_steps": 3600,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 1.0641987012722688e+16,
"train_batch_size": 5,
"trial_name": null,
"trial_params": null
}