Yelp_American / trainer_state.json
pgfeldman's picture
Upload 12 files
3461cb8
raw
history blame
15 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 6.0,
"global_step": 59298,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.05,
"learning_rate": 4.957840062059429e-05,
"loss": 3.4611,
"step": 500
},
{
"epoch": 0.1,
"learning_rate": 4.915680124118858e-05,
"loss": 3.3849,
"step": 1000
},
{
"epoch": 0.15,
"learning_rate": 4.8735201861782864e-05,
"loss": 3.3355,
"step": 1500
},
{
"epoch": 0.2,
"learning_rate": 4.831360248237715e-05,
"loss": 3.3087,
"step": 2000
},
{
"epoch": 0.25,
"learning_rate": 4.7892003102971434e-05,
"loss": 3.2799,
"step": 2500
},
{
"epoch": 0.3,
"learning_rate": 4.747040372356572e-05,
"loss": 3.2638,
"step": 3000
},
{
"epoch": 0.35,
"learning_rate": 4.704880434416001e-05,
"loss": 3.2501,
"step": 3500
},
{
"epoch": 0.4,
"learning_rate": 4.6627204964754295e-05,
"loss": 3.2575,
"step": 4000
},
{
"epoch": 0.46,
"learning_rate": 4.620560558534858e-05,
"loss": 3.25,
"step": 4500
},
{
"epoch": 0.51,
"learning_rate": 4.5784006205942865e-05,
"loss": 3.214,
"step": 5000
},
{
"epoch": 0.56,
"learning_rate": 4.536240682653715e-05,
"loss": 3.2253,
"step": 5500
},
{
"epoch": 0.61,
"learning_rate": 4.494080744713144e-05,
"loss": 3.2102,
"step": 6000
},
{
"epoch": 0.66,
"learning_rate": 4.4519208067725726e-05,
"loss": 3.1872,
"step": 6500
},
{
"epoch": 0.71,
"learning_rate": 4.409760868832001e-05,
"loss": 3.1946,
"step": 7000
},
{
"epoch": 0.76,
"learning_rate": 4.36760093089143e-05,
"loss": 3.1831,
"step": 7500
},
{
"epoch": 0.81,
"learning_rate": 4.325440992950859e-05,
"loss": 3.1851,
"step": 8000
},
{
"epoch": 0.86,
"learning_rate": 4.283281055010287e-05,
"loss": 3.1794,
"step": 8500
},
{
"epoch": 0.91,
"learning_rate": 4.241121117069716e-05,
"loss": 3.1698,
"step": 9000
},
{
"epoch": 0.96,
"learning_rate": 4.198961179129144e-05,
"loss": 3.16,
"step": 9500
},
{
"epoch": 1.01,
"learning_rate": 4.1568012411885734e-05,
"loss": 3.1365,
"step": 10000
},
{
"epoch": 1.06,
"learning_rate": 4.114641303248002e-05,
"loss": 3.0765,
"step": 10500
},
{
"epoch": 1.11,
"learning_rate": 4.0724813653074304e-05,
"loss": 3.0774,
"step": 11000
},
{
"epoch": 1.16,
"learning_rate": 4.0303214273668596e-05,
"loss": 3.0747,
"step": 11500
},
{
"epoch": 1.21,
"learning_rate": 3.9881614894262874e-05,
"loss": 3.0726,
"step": 12000
},
{
"epoch": 1.26,
"learning_rate": 3.9460015514857165e-05,
"loss": 3.0738,
"step": 12500
},
{
"epoch": 1.32,
"learning_rate": 3.903841613545145e-05,
"loss": 3.0799,
"step": 13000
},
{
"epoch": 1.37,
"learning_rate": 3.8616816756045735e-05,
"loss": 3.059,
"step": 13500
},
{
"epoch": 1.42,
"learning_rate": 3.819521737664003e-05,
"loss": 3.0648,
"step": 14000
},
{
"epoch": 1.47,
"learning_rate": 3.777361799723431e-05,
"loss": 3.0643,
"step": 14500
},
{
"epoch": 1.52,
"learning_rate": 3.7352018617828597e-05,
"loss": 3.0737,
"step": 15000
},
{
"epoch": 1.57,
"learning_rate": 3.693041923842288e-05,
"loss": 3.0672,
"step": 15500
},
{
"epoch": 1.62,
"learning_rate": 3.6508819859017166e-05,
"loss": 3.071,
"step": 16000
},
{
"epoch": 1.67,
"learning_rate": 3.608722047961146e-05,
"loss": 3.058,
"step": 16500
},
{
"epoch": 1.72,
"learning_rate": 3.566562110020574e-05,
"loss": 3.0572,
"step": 17000
},
{
"epoch": 1.77,
"learning_rate": 3.524402172080003e-05,
"loss": 3.063,
"step": 17500
},
{
"epoch": 1.82,
"learning_rate": 3.482242234139432e-05,
"loss": 3.0507,
"step": 18000
},
{
"epoch": 1.87,
"learning_rate": 3.44008229619886e-05,
"loss": 3.0435,
"step": 18500
},
{
"epoch": 1.92,
"learning_rate": 3.397922358258289e-05,
"loss": 3.0538,
"step": 19000
},
{
"epoch": 1.97,
"learning_rate": 3.3557624203177174e-05,
"loss": 3.0411,
"step": 19500
},
{
"epoch": 2.02,
"learning_rate": 3.313602482377146e-05,
"loss": 3.0106,
"step": 20000
},
{
"epoch": 2.07,
"learning_rate": 3.271442544436575e-05,
"loss": 2.9677,
"step": 20500
},
{
"epoch": 2.12,
"learning_rate": 3.2292826064960036e-05,
"loss": 2.9598,
"step": 21000
},
{
"epoch": 2.18,
"learning_rate": 3.187122668555432e-05,
"loss": 2.9693,
"step": 21500
},
{
"epoch": 2.23,
"learning_rate": 3.1449627306148605e-05,
"loss": 2.9833,
"step": 22000
},
{
"epoch": 2.28,
"learning_rate": 3.102802792674289e-05,
"loss": 2.9675,
"step": 22500
},
{
"epoch": 2.33,
"learning_rate": 3.060642854733718e-05,
"loss": 2.9713,
"step": 23000
},
{
"epoch": 2.38,
"learning_rate": 3.0184829167931467e-05,
"loss": 2.9768,
"step": 23500
},
{
"epoch": 2.43,
"learning_rate": 2.9763229788525755e-05,
"loss": 2.9765,
"step": 24000
},
{
"epoch": 2.48,
"learning_rate": 2.934163040912004e-05,
"loss": 2.9698,
"step": 24500
},
{
"epoch": 2.53,
"learning_rate": 2.892003102971432e-05,
"loss": 2.9934,
"step": 25000
},
{
"epoch": 2.58,
"learning_rate": 2.849843165030861e-05,
"loss": 2.9721,
"step": 25500
},
{
"epoch": 2.63,
"learning_rate": 2.8076832270902898e-05,
"loss": 2.9766,
"step": 26000
},
{
"epoch": 2.68,
"learning_rate": 2.7655232891497186e-05,
"loss": 2.986,
"step": 26500
},
{
"epoch": 2.73,
"learning_rate": 2.723363351209147e-05,
"loss": 2.9856,
"step": 27000
},
{
"epoch": 2.78,
"learning_rate": 2.681203413268576e-05,
"loss": 2.9851,
"step": 27500
},
{
"epoch": 2.83,
"learning_rate": 2.6390434753280048e-05,
"loss": 2.9722,
"step": 28000
},
{
"epoch": 2.88,
"learning_rate": 2.596883537387433e-05,
"loss": 2.9713,
"step": 28500
},
{
"epoch": 2.93,
"learning_rate": 2.5547235994468614e-05,
"loss": 2.9698,
"step": 29000
},
{
"epoch": 2.98,
"learning_rate": 2.5125636615062902e-05,
"loss": 2.9742,
"step": 29500
},
{
"epoch": 3.04,
"learning_rate": 2.470403723565719e-05,
"loss": 2.9437,
"step": 30000
},
{
"epoch": 3.09,
"learning_rate": 2.428243785625148e-05,
"loss": 2.9012,
"step": 30500
},
{
"epoch": 3.14,
"learning_rate": 2.3860838476845764e-05,
"loss": 2.903,
"step": 31000
},
{
"epoch": 3.19,
"learning_rate": 2.343923909744005e-05,
"loss": 2.9115,
"step": 31500
},
{
"epoch": 3.24,
"learning_rate": 2.3017639718034337e-05,
"loss": 2.9079,
"step": 32000
},
{
"epoch": 3.29,
"learning_rate": 2.2596040338628622e-05,
"loss": 2.9209,
"step": 32500
},
{
"epoch": 3.34,
"learning_rate": 2.217444095922291e-05,
"loss": 2.9072,
"step": 33000
},
{
"epoch": 3.39,
"learning_rate": 2.1752841579817195e-05,
"loss": 2.9141,
"step": 33500
},
{
"epoch": 3.44,
"learning_rate": 2.1331242200411483e-05,
"loss": 2.9196,
"step": 34000
},
{
"epoch": 3.49,
"learning_rate": 2.0909642821005768e-05,
"loss": 2.9225,
"step": 34500
},
{
"epoch": 3.54,
"learning_rate": 2.0488043441600056e-05,
"loss": 2.9171,
"step": 35000
},
{
"epoch": 3.59,
"learning_rate": 2.006644406219434e-05,
"loss": 2.9171,
"step": 35500
},
{
"epoch": 3.64,
"learning_rate": 1.9644844682788626e-05,
"loss": 2.9088,
"step": 36000
},
{
"epoch": 3.69,
"learning_rate": 1.9223245303382915e-05,
"loss": 2.9169,
"step": 36500
},
{
"epoch": 3.74,
"learning_rate": 1.8801645923977203e-05,
"loss": 2.9197,
"step": 37000
},
{
"epoch": 3.79,
"learning_rate": 1.8380046544571484e-05,
"loss": 2.9118,
"step": 37500
},
{
"epoch": 3.84,
"learning_rate": 1.7958447165165773e-05,
"loss": 2.9153,
"step": 38000
},
{
"epoch": 3.9,
"learning_rate": 1.753684778576006e-05,
"loss": 2.9104,
"step": 38500
},
{
"epoch": 3.95,
"learning_rate": 1.711524840635435e-05,
"loss": 2.9136,
"step": 39000
},
{
"epoch": 4.0,
"learning_rate": 1.669364902694863e-05,
"loss": 2.9218,
"step": 39500
},
{
"epoch": 4.05,
"learning_rate": 1.627204964754292e-05,
"loss": 2.8708,
"step": 40000
},
{
"epoch": 4.1,
"learning_rate": 1.5850450268137207e-05,
"loss": 2.865,
"step": 40500
},
{
"epoch": 4.15,
"learning_rate": 1.5428850888731492e-05,
"loss": 2.8641,
"step": 41000
},
{
"epoch": 4.2,
"learning_rate": 1.5007251509325779e-05,
"loss": 2.8758,
"step": 41500
},
{
"epoch": 4.25,
"learning_rate": 1.4585652129920065e-05,
"loss": 2.866,
"step": 42000
},
{
"epoch": 4.3,
"learning_rate": 1.416405275051435e-05,
"loss": 2.8814,
"step": 42500
},
{
"epoch": 4.35,
"learning_rate": 1.3742453371108638e-05,
"loss": 2.8632,
"step": 43000
},
{
"epoch": 4.4,
"learning_rate": 1.3320853991702925e-05,
"loss": 2.8634,
"step": 43500
},
{
"epoch": 4.45,
"learning_rate": 1.2899254612297213e-05,
"loss": 2.8605,
"step": 44000
},
{
"epoch": 4.5,
"learning_rate": 1.2477655232891498e-05,
"loss": 2.8665,
"step": 44500
},
{
"epoch": 4.55,
"learning_rate": 1.2056055853485785e-05,
"loss": 2.8697,
"step": 45000
},
{
"epoch": 4.6,
"learning_rate": 1.163445647408007e-05,
"loss": 2.8756,
"step": 45500
},
{
"epoch": 4.65,
"learning_rate": 1.1212857094674358e-05,
"loss": 2.8565,
"step": 46000
},
{
"epoch": 4.71,
"learning_rate": 1.0791257715268643e-05,
"loss": 2.8682,
"step": 46500
},
{
"epoch": 4.76,
"learning_rate": 1.0369658335862931e-05,
"loss": 2.8639,
"step": 47000
},
{
"epoch": 4.81,
"learning_rate": 9.948058956457216e-06,
"loss": 2.8667,
"step": 47500
},
{
"epoch": 4.86,
"learning_rate": 9.526459577051503e-06,
"loss": 2.8715,
"step": 48000
},
{
"epoch": 4.91,
"learning_rate": 9.104860197645789e-06,
"loss": 2.8704,
"step": 48500
},
{
"epoch": 4.96,
"learning_rate": 8.683260818240076e-06,
"loss": 2.8771,
"step": 49000
},
{
"epoch": 5.01,
"learning_rate": 8.261661438834362e-06,
"loss": 2.8545,
"step": 49500
},
{
"epoch": 5.06,
"learning_rate": 7.840062059428649e-06,
"loss": 2.8237,
"step": 50000
},
{
"epoch": 5.11,
"learning_rate": 7.418462680022935e-06,
"loss": 2.832,
"step": 50500
},
{
"epoch": 5.16,
"learning_rate": 6.996863300617222e-06,
"loss": 2.8298,
"step": 51000
},
{
"epoch": 5.21,
"learning_rate": 6.575263921211508e-06,
"loss": 2.8336,
"step": 51500
},
{
"epoch": 5.26,
"learning_rate": 6.153664541805794e-06,
"loss": 2.8441,
"step": 52000
},
{
"epoch": 5.31,
"learning_rate": 5.732065162400081e-06,
"loss": 2.8493,
"step": 52500
},
{
"epoch": 5.36,
"learning_rate": 5.3104657829943675e-06,
"loss": 2.8324,
"step": 53000
},
{
"epoch": 5.41,
"learning_rate": 4.888866403588654e-06,
"loss": 2.8422,
"step": 53500
},
{
"epoch": 5.46,
"learning_rate": 4.467267024182941e-06,
"loss": 2.8419,
"step": 54000
},
{
"epoch": 5.51,
"learning_rate": 4.045667644777227e-06,
"loss": 2.8283,
"step": 54500
},
{
"epoch": 5.57,
"learning_rate": 3.6240682653715134e-06,
"loss": 2.8451,
"step": 55000
},
{
"epoch": 5.62,
"learning_rate": 3.2024688859658e-06,
"loss": 2.8294,
"step": 55500
},
{
"epoch": 5.67,
"learning_rate": 2.7808695065600866e-06,
"loss": 2.8356,
"step": 56000
},
{
"epoch": 5.72,
"learning_rate": 2.359270127154373e-06,
"loss": 2.8375,
"step": 56500
},
{
"epoch": 5.77,
"learning_rate": 1.9376707477486594e-06,
"loss": 2.8328,
"step": 57000
},
{
"epoch": 5.82,
"learning_rate": 1.516071368342946e-06,
"loss": 2.8397,
"step": 57500
},
{
"epoch": 5.87,
"learning_rate": 1.0944719889372323e-06,
"loss": 2.8305,
"step": 58000
},
{
"epoch": 5.92,
"learning_rate": 6.728726095315188e-07,
"loss": 2.8395,
"step": 58500
},
{
"epoch": 5.97,
"learning_rate": 2.5127323012580527e-07,
"loss": 2.8332,
"step": 59000
},
{
"epoch": 6.0,
"step": 59298,
"total_flos": 3.0988189827072e+16,
"train_loss": 2.9834200300580593,
"train_runtime": 15417.1336,
"train_samples_per_second": 3.846,
"train_steps_per_second": 3.846
}
],
"max_steps": 59298,
"num_train_epochs": 6,
"total_flos": 3.0988189827072e+16,
"trial_name": null,
"trial_params": null
}