|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.9605263157894737, |
|
"eval_steps": 19, |
|
"global_step": 152, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.013157894736842105, |
|
"grad_norm": 0.402255117893219, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 1.641, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.013157894736842105, |
|
"eval_loss": 1.6610033512115479, |
|
"eval_runtime": 11.198, |
|
"eval_samples_per_second": 13.217, |
|
"eval_steps_per_second": 1.697, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.02631578947368421, |
|
"grad_norm": 0.39817070960998535, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.5949, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.039473684210526314, |
|
"grad_norm": 0.40712764859199524, |
|
"learning_rate": 3e-06, |
|
"loss": 1.625, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.05263157894736842, |
|
"grad_norm": 0.42735520005226135, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.6878, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.06578947368421052, |
|
"grad_norm": 0.3926982879638672, |
|
"learning_rate": 5e-06, |
|
"loss": 1.5266, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.07894736842105263, |
|
"grad_norm": 0.4139236807823181, |
|
"learning_rate": 6e-06, |
|
"loss": 1.6492, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.09210526315789473, |
|
"grad_norm": 0.3906595706939697, |
|
"learning_rate": 7e-06, |
|
"loss": 1.5571, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.10526315789473684, |
|
"grad_norm": 0.4070392847061157, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 1.5989, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.11842105263157894, |
|
"grad_norm": 0.4086395800113678, |
|
"learning_rate": 9e-06, |
|
"loss": 1.6022, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.13157894736842105, |
|
"grad_norm": 0.42242783308029175, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5348, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.14473684210526316, |
|
"grad_norm": 0.43531253933906555, |
|
"learning_rate": 9.998776383426217e-06, |
|
"loss": 1.661, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.15789473684210525, |
|
"grad_norm": 0.44515812397003174, |
|
"learning_rate": 9.995106132599869e-06, |
|
"loss": 1.6782, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.17105263157894737, |
|
"grad_norm": 0.4268193542957306, |
|
"learning_rate": 9.988991043912857e-06, |
|
"loss": 1.6253, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.18421052631578946, |
|
"grad_norm": 0.4616084694862366, |
|
"learning_rate": 9.980434110374725e-06, |
|
"loss": 1.633, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.19736842105263158, |
|
"grad_norm": 0.4059070646762848, |
|
"learning_rate": 9.969439520147754e-06, |
|
"loss": 1.6423, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.21052631578947367, |
|
"grad_norm": 0.4180908501148224, |
|
"learning_rate": 9.956012654497073e-06, |
|
"loss": 1.6157, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.2236842105263158, |
|
"grad_norm": 0.44470345973968506, |
|
"learning_rate": 9.94016008515682e-06, |
|
"loss": 1.6116, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.23684210526315788, |
|
"grad_norm": 0.4326673150062561, |
|
"learning_rate": 9.921889571113629e-06, |
|
"loss": 1.65, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.44323351979255676, |
|
"learning_rate": 9.901210054809015e-06, |
|
"loss": 1.5857, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"eval_loss": 1.6229134798049927, |
|
"eval_runtime": 11.1737, |
|
"eval_samples_per_second": 13.245, |
|
"eval_steps_per_second": 1.7, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.2631578947368421, |
|
"grad_norm": 0.4330739378929138, |
|
"learning_rate": 9.878131657762535e-06, |
|
"loss": 1.5456, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.27631578947368424, |
|
"grad_norm": 0.43902918696403503, |
|
"learning_rate": 9.852665675617837e-06, |
|
"loss": 1.6184, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.2894736842105263, |
|
"grad_norm": 0.4325895309448242, |
|
"learning_rate": 9.82482457261405e-06, |
|
"loss": 1.5871, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.3026315789473684, |
|
"grad_norm": 0.4587506353855133, |
|
"learning_rate": 9.7946219754852e-06, |
|
"loss": 1.5777, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.3157894736842105, |
|
"grad_norm": 0.38507089018821716, |
|
"learning_rate": 9.762072666790658e-06, |
|
"loss": 1.4464, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.32894736842105265, |
|
"grad_norm": 0.3887621760368347, |
|
"learning_rate": 9.727192577679852e-06, |
|
"loss": 1.5026, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.34210526315789475, |
|
"grad_norm": 0.3975026309490204, |
|
"learning_rate": 9.689998780094839e-06, |
|
"loss": 1.5542, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.35526315789473684, |
|
"grad_norm": 0.4168972074985504, |
|
"learning_rate": 9.650509478414483e-06, |
|
"loss": 1.5423, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.3684210526315789, |
|
"grad_norm": 0.40937769412994385, |
|
"learning_rate": 9.608744000544392e-06, |
|
"loss": 1.544, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.3815789473684211, |
|
"grad_norm": 0.42820680141448975, |
|
"learning_rate": 9.564722788456943e-06, |
|
"loss": 1.5586, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.39473684210526316, |
|
"grad_norm": 0.39091867208480835, |
|
"learning_rate": 9.51846738818602e-06, |
|
"loss": 1.547, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.40789473684210525, |
|
"grad_norm": 0.39774808287620544, |
|
"learning_rate": 9.470000439281379e-06, |
|
"loss": 1.4802, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.42105263157894735, |
|
"grad_norm": 0.38609299063682556, |
|
"learning_rate": 9.419345663727805e-06, |
|
"loss": 1.5297, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.4342105263157895, |
|
"grad_norm": 0.384609580039978, |
|
"learning_rate": 9.366527854334464e-06, |
|
"loss": 1.4976, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.4473684210526316, |
|
"grad_norm": 0.3664967119693756, |
|
"learning_rate": 9.31157286260014e-06, |
|
"loss": 1.4768, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.4605263157894737, |
|
"grad_norm": 0.4111669063568115, |
|
"learning_rate": 9.25450758606031e-06, |
|
"loss": 1.4819, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.47368421052631576, |
|
"grad_norm": 0.38792452216148376, |
|
"learning_rate": 9.195359955122244e-06, |
|
"loss": 1.4876, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.4868421052631579, |
|
"grad_norm": 0.3711869716644287, |
|
"learning_rate": 9.134158919394545e-06, |
|
"loss": 1.4768, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.37189042568206787, |
|
"learning_rate": 9.070934433517872e-06, |
|
"loss": 1.4426, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"eval_loss": 1.4863917827606201, |
|
"eval_runtime": 11.152, |
|
"eval_samples_per_second": 13.271, |
|
"eval_steps_per_second": 1.704, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.5131578947368421, |
|
"grad_norm": 0.3438572287559509, |
|
"learning_rate": 9.005717442503741e-06, |
|
"loss": 1.4076, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.5263157894736842, |
|
"grad_norm": 0.36527037620544434, |
|
"learning_rate": 8.938539866588593e-06, |
|
"loss": 1.4656, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.5394736842105263, |
|
"grad_norm": 0.36658236384391785, |
|
"learning_rate": 8.869434585610534e-06, |
|
"loss": 1.4152, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.5526315789473685, |
|
"grad_norm": 0.3422878682613373, |
|
"learning_rate": 8.798435422916425e-06, |
|
"loss": 1.3725, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.5657894736842105, |
|
"grad_norm": 0.3560190498828888, |
|
"learning_rate": 8.725577128807144e-06, |
|
"loss": 1.3982, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.5789473684210527, |
|
"grad_norm": 0.3515233099460602, |
|
"learning_rate": 8.650895363529172e-06, |
|
"loss": 1.4798, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.5921052631578947, |
|
"grad_norm": 0.35305333137512207, |
|
"learning_rate": 8.574426679820813e-06, |
|
"loss": 1.4064, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.6052631578947368, |
|
"grad_norm": 0.3789292871952057, |
|
"learning_rate": 8.496208505021572e-06, |
|
"loss": 1.3732, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.618421052631579, |
|
"grad_norm": 0.33839643001556396, |
|
"learning_rate": 8.416279122753468e-06, |
|
"loss": 1.345, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.631578947368421, |
|
"grad_norm": 0.34697043895721436, |
|
"learning_rate": 8.334677654183254e-06, |
|
"loss": 1.4175, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.6447368421052632, |
|
"grad_norm": 0.3513168394565582, |
|
"learning_rate": 8.251444038874685e-06, |
|
"loss": 1.3793, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.6578947368421053, |
|
"grad_norm": 0.3464255630970001, |
|
"learning_rate": 8.166619015240236e-06, |
|
"loss": 1.4089, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.6710526315789473, |
|
"grad_norm": 0.34775641560554504, |
|
"learning_rate": 8.080244100601822e-06, |
|
"loss": 1.391, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.6842105263157895, |
|
"grad_norm": 0.33794087171554565, |
|
"learning_rate": 7.992361570870289e-06, |
|
"loss": 1.4101, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.6973684210526315, |
|
"grad_norm": 0.35401248931884766, |
|
"learning_rate": 7.903014439853605e-06, |
|
"loss": 1.3881, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.7105263157894737, |
|
"grad_norm": 0.32468080520629883, |
|
"learning_rate": 7.812246438203905e-06, |
|
"loss": 1.3629, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.7236842105263158, |
|
"grad_norm": 0.34929484128952026, |
|
"learning_rate": 7.720101992013661e-06, |
|
"loss": 1.3425, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.7368421052631579, |
|
"grad_norm": 0.3399946689605713, |
|
"learning_rate": 7.626626201071494e-06, |
|
"loss": 1.3021, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.31694310903549194, |
|
"learning_rate": 7.53186481678822e-06, |
|
"loss": 1.2782, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"eval_loss": 1.375747799873352, |
|
"eval_runtime": 11.2956, |
|
"eval_samples_per_second": 13.102, |
|
"eval_steps_per_second": 1.682, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.7631578947368421, |
|
"grad_norm": 0.34202322363853455, |
|
"learning_rate": 7.4358642198039835e-06, |
|
"loss": 1.338, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.7763157894736842, |
|
"grad_norm": 0.3176383376121521, |
|
"learning_rate": 7.338671397287409e-06, |
|
"loss": 1.3133, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.7894736842105263, |
|
"grad_norm": 0.31007757782936096, |
|
"learning_rate": 7.240333919937893e-06, |
|
"loss": 1.3322, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.8026315789473685, |
|
"grad_norm": 0.31201171875, |
|
"learning_rate": 7.140899918702276e-06, |
|
"loss": 1.2824, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.8157894736842105, |
|
"grad_norm": 0.29208070039749146, |
|
"learning_rate": 7.040418061217325e-06, |
|
"loss": 1.3342, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.8289473684210527, |
|
"grad_norm": 0.29806771874427795, |
|
"learning_rate": 6.938937527989511e-06, |
|
"loss": 1.3021, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.8421052631578947, |
|
"grad_norm": 0.30114126205444336, |
|
"learning_rate": 6.836507988323785e-06, |
|
"loss": 1.2985, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.8552631578947368, |
|
"grad_norm": 0.28877463936805725, |
|
"learning_rate": 6.733179576013098e-06, |
|
"loss": 1.2917, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.868421052631579, |
|
"grad_norm": 0.3036439120769501, |
|
"learning_rate": 6.629002864800589e-06, |
|
"loss": 1.336, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.881578947368421, |
|
"grad_norm": 0.29111841320991516, |
|
"learning_rate": 6.524028843626433e-06, |
|
"loss": 1.3284, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.8947368421052632, |
|
"grad_norm": 0.31157442927360535, |
|
"learning_rate": 6.418308891671484e-06, |
|
"loss": 1.298, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.9078947368421053, |
|
"grad_norm": 0.28027617931365967, |
|
"learning_rate": 6.311894753209896e-06, |
|
"loss": 1.3627, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.9210526315789473, |
|
"grad_norm": 0.31063735485076904, |
|
"learning_rate": 6.204838512283073e-06, |
|
"loss": 1.2751, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.9342105263157895, |
|
"grad_norm": 0.27762889862060547, |
|
"learning_rate": 6.097192567207304e-06, |
|
"loss": 1.2646, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.9473684210526315, |
|
"grad_norm": 0.30012503266334534, |
|
"learning_rate": 5.989009604927587e-06, |
|
"loss": 1.2696, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.9605263157894737, |
|
"grad_norm": 0.2858477234840393, |
|
"learning_rate": 5.8803425752301814e-06, |
|
"loss": 1.3184, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.9736842105263158, |
|
"grad_norm": 0.2571242153644562, |
|
"learning_rate": 5.771244664826512e-06, |
|
"loss": 1.1024, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.9868421052631579, |
|
"grad_norm": 0.28689220547676086, |
|
"learning_rate": 5.661769271321113e-06, |
|
"loss": 1.3068, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.27221059799194336, |
|
"learning_rate": 5.55196997707635e-06, |
|
"loss": 1.262, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.3148635625839233, |
|
"eval_runtime": 11.2095, |
|
"eval_samples_per_second": 13.203, |
|
"eval_steps_per_second": 1.695, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 1.013157894736842, |
|
"grad_norm": 0.2928997278213501, |
|
"learning_rate": 5.441900522986712e-06, |
|
"loss": 1.2916, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 1.0263157894736843, |
|
"grad_norm": 0.28008702397346497, |
|
"learning_rate": 5.33161478217552e-06, |
|
"loss": 1.3139, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.0394736842105263, |
|
"grad_norm": 0.3058090806007385, |
|
"learning_rate": 5.221166733626895e-06, |
|
"loss": 1.1989, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.013157894736842, |
|
"grad_norm": 0.27998045086860657, |
|
"learning_rate": 5.110610435765935e-06, |
|
"loss": 1.247, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.0263157894736843, |
|
"grad_norm": 0.2768513560295105, |
|
"learning_rate": 5e-06, |
|
"loss": 1.3229, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.0394736842105263, |
|
"grad_norm": 0.28100040555000305, |
|
"learning_rate": 4.8893895642340665e-06, |
|
"loss": 1.3181, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.0526315789473684, |
|
"grad_norm": 0.29516279697418213, |
|
"learning_rate": 4.778833266373107e-06, |
|
"loss": 1.1794, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.0657894736842106, |
|
"grad_norm": 0.266646146774292, |
|
"learning_rate": 4.668385217824482e-06, |
|
"loss": 1.261, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.0789473684210527, |
|
"grad_norm": 0.2927522361278534, |
|
"learning_rate": 4.558099477013288e-06, |
|
"loss": 1.3016, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.0921052631578947, |
|
"grad_norm": 0.2729503810405731, |
|
"learning_rate": 4.4480300229236525e-06, |
|
"loss": 1.2789, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.1052631578947367, |
|
"grad_norm": 0.2716759145259857, |
|
"learning_rate": 4.338230728678888e-06, |
|
"loss": 1.2746, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.118421052631579, |
|
"grad_norm": 0.2987295985221863, |
|
"learning_rate": 4.228755335173488e-06, |
|
"loss": 1.3, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.131578947368421, |
|
"grad_norm": 0.2681940495967865, |
|
"learning_rate": 4.119657424769819e-06, |
|
"loss": 1.2736, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.1447368421052633, |
|
"grad_norm": 0.26406219601631165, |
|
"learning_rate": 4.010990395072414e-06, |
|
"loss": 1.2695, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.1578947368421053, |
|
"grad_norm": 0.262295663356781, |
|
"learning_rate": 3.902807432792698e-06, |
|
"loss": 1.2413, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.1710526315789473, |
|
"grad_norm": 0.2757396101951599, |
|
"learning_rate": 3.7951614877169285e-06, |
|
"loss": 1.2272, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.1842105263157894, |
|
"grad_norm": 0.2563100755214691, |
|
"learning_rate": 3.6881052467901056e-06, |
|
"loss": 1.2172, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.1973684210526316, |
|
"grad_norm": 0.27466002106666565, |
|
"learning_rate": 3.5816911083285165e-06, |
|
"loss": 1.2276, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.2105263157894737, |
|
"grad_norm": 0.27229124307632446, |
|
"learning_rate": 3.4759711563735676e-06, |
|
"loss": 1.2384, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.2105263157894737, |
|
"eval_loss": 1.2851184606552124, |
|
"eval_runtime": 11.1818, |
|
"eval_samples_per_second": 13.236, |
|
"eval_steps_per_second": 1.699, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.2236842105263157, |
|
"grad_norm": 0.26436150074005127, |
|
"learning_rate": 3.370997135199413e-06, |
|
"loss": 1.2118, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.236842105263158, |
|
"grad_norm": 0.2642536461353302, |
|
"learning_rate": 3.2668204239869046e-06, |
|
"loss": 1.2411, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.3076295256614685, |
|
"learning_rate": 3.1634920116762175e-06, |
|
"loss": 1.1806, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.263157894736842, |
|
"grad_norm": 0.2798045873641968, |
|
"learning_rate": 3.061062472010489e-06, |
|
"loss": 1.231, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.2763157894736843, |
|
"grad_norm": 0.26016712188720703, |
|
"learning_rate": 2.9595819387826753e-06, |
|
"loss": 1.2221, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.2894736842105263, |
|
"grad_norm": 0.27707499265670776, |
|
"learning_rate": 2.8591000812977245e-06, |
|
"loss": 1.2265, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.3026315789473684, |
|
"grad_norm": 0.24529437720775604, |
|
"learning_rate": 2.7596660800621076e-06, |
|
"loss": 1.1956, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.3157894736842106, |
|
"grad_norm": 0.2894725799560547, |
|
"learning_rate": 2.661328602712592e-06, |
|
"loss": 1.2519, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.3289473684210527, |
|
"grad_norm": 0.259954035282135, |
|
"learning_rate": 2.5641357801960186e-06, |
|
"loss": 1.28, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.3421052631578947, |
|
"grad_norm": 0.28108924627304077, |
|
"learning_rate": 2.4681351832117815e-06, |
|
"loss": 1.1942, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.3552631578947367, |
|
"grad_norm": 0.2708108723163605, |
|
"learning_rate": 2.373373798928507e-06, |
|
"loss": 1.2821, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.368421052631579, |
|
"grad_norm": 0.2668735086917877, |
|
"learning_rate": 2.2798980079863386e-06, |
|
"loss": 1.2289, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.381578947368421, |
|
"grad_norm": 0.26546740531921387, |
|
"learning_rate": 2.187753561796097e-06, |
|
"loss": 1.2604, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.3947368421052633, |
|
"grad_norm": 0.262500137090683, |
|
"learning_rate": 2.0969855601463966e-06, |
|
"loss": 1.2243, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.4078947368421053, |
|
"grad_norm": 0.24560751020908356, |
|
"learning_rate": 2.0076384291297134e-06, |
|
"loss": 1.2348, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.4210526315789473, |
|
"grad_norm": 0.26329511404037476, |
|
"learning_rate": 1.9197558993981784e-06, |
|
"loss": 1.2551, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.4342105263157894, |
|
"grad_norm": 0.2851774990558624, |
|
"learning_rate": 1.8333809847597644e-06, |
|
"loss": 1.2282, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.4473684210526316, |
|
"grad_norm": 0.26805853843688965, |
|
"learning_rate": 1.748555961125315e-06, |
|
"loss": 1.2531, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.4605263157894737, |
|
"grad_norm": 0.2691291272640228, |
|
"learning_rate": 1.665322345816746e-06, |
|
"loss": 1.1451, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.4605263157894737, |
|
"eval_loss": 1.2717276811599731, |
|
"eval_runtime": 11.1773, |
|
"eval_samples_per_second": 13.241, |
|
"eval_steps_per_second": 1.7, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.4736842105263157, |
|
"grad_norm": 0.2724035680294037, |
|
"learning_rate": 1.583720877246533e-06, |
|
"loss": 1.2532, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.486842105263158, |
|
"grad_norm": 0.28526419401168823, |
|
"learning_rate": 1.50379149497843e-06, |
|
"loss": 1.1913, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.29095906019210815, |
|
"learning_rate": 1.4255733201791883e-06, |
|
"loss": 1.1994, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.513157894736842, |
|
"grad_norm": 0.2538934051990509, |
|
"learning_rate": 1.3491046364708294e-06, |
|
"loss": 1.128, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.526315789473684, |
|
"grad_norm": 0.2766117751598358, |
|
"learning_rate": 1.2744228711928585e-06, |
|
"loss": 1.1852, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.5394736842105263, |
|
"grad_norm": 0.2569272816181183, |
|
"learning_rate": 1.2015645770835765e-06, |
|
"loss": 1.2415, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.5526315789473686, |
|
"grad_norm": 0.2636973559856415, |
|
"learning_rate": 1.1305654143894674e-06, |
|
"loss": 1.1965, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.5657894736842106, |
|
"grad_norm": 0.2750151753425598, |
|
"learning_rate": 1.0614601334114099e-06, |
|
"loss": 1.1965, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.5789473684210527, |
|
"grad_norm": 0.27310946583747864, |
|
"learning_rate": 9.942825574962595e-07, |
|
"loss": 1.2491, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.5921052631578947, |
|
"grad_norm": 0.30951154232025146, |
|
"learning_rate": 9.290655664821296e-07, |
|
"loss": 1.2388, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.6052631578947367, |
|
"grad_norm": 0.2787538170814514, |
|
"learning_rate": 8.658410806054568e-07, |
|
"loss": 1.167, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.618421052631579, |
|
"grad_norm": 0.30251288414001465, |
|
"learning_rate": 8.046400448777575e-07, |
|
"loss": 1.2439, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.631578947368421, |
|
"grad_norm": 0.2696026563644409, |
|
"learning_rate": 7.45492413939689e-07, |
|
"loss": 1.2197, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.6447368421052633, |
|
"grad_norm": 0.26345524191856384, |
|
"learning_rate": 6.884271373998608e-07, |
|
"loss": 1.2436, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.6578947368421053, |
|
"grad_norm": 0.25275254249572754, |
|
"learning_rate": 6.334721456655363e-07, |
|
"loss": 1.2608, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.6710526315789473, |
|
"grad_norm": 0.2568216323852539, |
|
"learning_rate": 5.806543362721945e-07, |
|
"loss": 1.2484, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.6842105263157894, |
|
"grad_norm": 0.26159876585006714, |
|
"learning_rate": 5.29999560718622e-07, |
|
"loss": 1.225, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.6973684210526314, |
|
"grad_norm": 0.2594353258609772, |
|
"learning_rate": 4.815326118139813e-07, |
|
"loss": 1.2002, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.7105263157894737, |
|
"grad_norm": 0.2540861964225769, |
|
"learning_rate": 4.3527721154305703e-07, |
|
"loss": 1.2273, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.7105263157894737, |
|
"eval_loss": 1.2666972875595093, |
|
"eval_runtime": 11.1757, |
|
"eval_samples_per_second": 13.243, |
|
"eval_steps_per_second": 1.7, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.723684210526316, |
|
"grad_norm": 0.23720534145832062, |
|
"learning_rate": 3.9125599945560866e-07, |
|
"loss": 1.2512, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.736842105263158, |
|
"grad_norm": 0.26305046677589417, |
|
"learning_rate": 3.4949052158551875e-07, |
|
"loss": 1.235, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 0.2618529796600342, |
|
"learning_rate": 3.100012199051627e-07, |
|
"loss": 1.2146, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.763157894736842, |
|
"grad_norm": 0.27660179138183594, |
|
"learning_rate": 2.728074223201488e-07, |
|
"loss": 1.2123, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.776315789473684, |
|
"grad_norm": 0.2622484266757965, |
|
"learning_rate": 2.3792733320934348e-07, |
|
"loss": 1.2098, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.7894736842105263, |
|
"grad_norm": 0.2647554576396942, |
|
"learning_rate": 2.053780245147996e-07, |
|
"loss": 1.1725, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.8026315789473686, |
|
"grad_norm": 0.2567311227321625, |
|
"learning_rate": 1.7517542738595071e-07, |
|
"loss": 1.1835, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.8157894736842106, |
|
"grad_norm": 0.2933162450790405, |
|
"learning_rate": 1.47334324382164e-07, |
|
"loss": 1.2196, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.8289473684210527, |
|
"grad_norm": 0.2644355595111847, |
|
"learning_rate": 1.2186834223746612e-07, |
|
"loss": 1.1582, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.8421052631578947, |
|
"grad_norm": 0.2735837697982788, |
|
"learning_rate": 9.878994519098573e-08, |
|
"loss": 1.2063, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.8552631578947367, |
|
"grad_norm": 0.28176724910736084, |
|
"learning_rate": 7.81104288863721e-08, |
|
"loss": 1.254, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.868421052631579, |
|
"grad_norm": 0.2595096230506897, |
|
"learning_rate": 5.983991484317997e-08, |
|
"loss": 1.218, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.881578947368421, |
|
"grad_norm": 0.2540487051010132, |
|
"learning_rate": 4.398734550292716e-08, |
|
"loss": 1.197, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.8947368421052633, |
|
"grad_norm": 0.2847471833229065, |
|
"learning_rate": 3.0560479852246304e-08, |
|
"loss": 1.1756, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.9078947368421053, |
|
"grad_norm": 0.28027528524398804, |
|
"learning_rate": 1.9565889625275945e-08, |
|
"loss": 1.239, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.9210526315789473, |
|
"grad_norm": 0.268658310174942, |
|
"learning_rate": 1.1008956087144585e-08, |
|
"loss": 1.2424, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.9342105263157894, |
|
"grad_norm": 0.2798985540866852, |
|
"learning_rate": 4.89386740013198e-09, |
|
"loss": 1.2587, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.9473684210526314, |
|
"grad_norm": 0.24452678859233856, |
|
"learning_rate": 1.2236165737850025e-09, |
|
"loss": 1.2067, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.9605263157894737, |
|
"grad_norm": 0.26837992668151855, |
|
"learning_rate": 0.0, |
|
"loss": 1.2018, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.9605263157894737, |
|
"eval_loss": 1.2660664319992065, |
|
"eval_runtime": 11.23, |
|
"eval_samples_per_second": 13.179, |
|
"eval_steps_per_second": 1.692, |
|
"step": 152 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 152, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 38, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 8.408145505581793e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|