bgem3_finetune / trainer_state.json
phandat128's picture
Upload 10 files
6d1cb0a verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.0,
"eval_steps": 500,
"global_step": 29864,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.01674256630056255,
"grad_norm": 0.26318585872650146,
"learning_rate": 1.6538332775359894e-06,
"loss": 0.1468,
"step": 500
},
{
"epoch": 0.0334851326011251,
"grad_norm": 40.848941802978516,
"learning_rate": 3.3244057582859056e-06,
"loss": 0.1348,
"step": 1000
},
{
"epoch": 0.05022769890168765,
"grad_norm": 0.3210512399673462,
"learning_rate": 4.9983260796786076e-06,
"loss": 0.1292,
"step": 1500
},
{
"epoch": 0.0669702652022502,
"grad_norm": 51.398277282714844,
"learning_rate": 6.6722464010713095e-06,
"loss": 0.1246,
"step": 2000
},
{
"epoch": 0.08371283150281275,
"grad_norm": 1.8654736280441284,
"learning_rate": 8.346166722464011e-06,
"loss": 0.073,
"step": 2500
},
{
"epoch": 0.1004553978033753,
"grad_norm": 2.204322338104248,
"learning_rate": 9.997767607991965e-06,
"loss": 0.0861,
"step": 3000
},
{
"epoch": 0.11719796410393785,
"grad_norm": 0.029873741790652275,
"learning_rate": 9.81173494065558e-06,
"loss": 0.0626,
"step": 3500
},
{
"epoch": 0.1339405304045004,
"grad_norm": 2.218162775039673,
"learning_rate": 9.626074338653869e-06,
"loss": 0.0731,
"step": 4000
},
{
"epoch": 0.15068309670506294,
"grad_norm": 6.862820625305176,
"learning_rate": 9.440041671317484e-06,
"loss": 0.0756,
"step": 4500
},
{
"epoch": 0.1674256630056255,
"grad_norm": 1.0643534660339355,
"learning_rate": 9.2540090039811e-06,
"loss": 0.0702,
"step": 5000
},
{
"epoch": 0.18416822930618806,
"grad_norm": 0.14988207817077637,
"learning_rate": 9.067976336644717e-06,
"loss": 0.0675,
"step": 5500
},
{
"epoch": 0.2009107956067506,
"grad_norm": 0.25116392970085144,
"learning_rate": 8.882315734643004e-06,
"loss": 0.0604,
"step": 6000
},
{
"epoch": 0.21765336190731316,
"grad_norm": 0.3021365702152252,
"learning_rate": 8.69628306730662e-06,
"loss": 0.0515,
"step": 6500
},
{
"epoch": 0.2343959282078757,
"grad_norm": 22.51474952697754,
"learning_rate": 8.510250399970235e-06,
"loss": 0.0643,
"step": 7000
},
{
"epoch": 0.2511384945084382,
"grad_norm": 2.37544584274292,
"learning_rate": 8.324217732633852e-06,
"loss": 0.07,
"step": 7500
},
{
"epoch": 0.2678810608090008,
"grad_norm": 57.23020553588867,
"learning_rate": 8.138185065297467e-06,
"loss": 0.0557,
"step": 8000
},
{
"epoch": 0.28462362710956335,
"grad_norm": 0.0032292637042701244,
"learning_rate": 7.952152397961083e-06,
"loss": 0.0614,
"step": 8500
},
{
"epoch": 0.3013661934101259,
"grad_norm": 5.582527160644531,
"learning_rate": 7.766119730624698e-06,
"loss": 0.0577,
"step": 9000
},
{
"epoch": 0.31810875971068847,
"grad_norm": 0.04729452729225159,
"learning_rate": 7.580087063288314e-06,
"loss": 0.0582,
"step": 9500
},
{
"epoch": 0.334851326011251,
"grad_norm": 2.683903217315674,
"learning_rate": 7.394426461286602e-06,
"loss": 0.0549,
"step": 10000
},
{
"epoch": 0.35159389231181354,
"grad_norm": 0.9779844880104065,
"learning_rate": 7.208393793950218e-06,
"loss": 0.0615,
"step": 10500
},
{
"epoch": 0.3683364586123761,
"grad_norm": 79.76528930664062,
"learning_rate": 7.0223611266138335e-06,
"loss": 0.0519,
"step": 11000
},
{
"epoch": 0.38507902491293866,
"grad_norm": 0.3208529055118561,
"learning_rate": 6.836328459277449e-06,
"loss": 0.0591,
"step": 11500
},
{
"epoch": 0.4018215912135012,
"grad_norm": 23.068653106689453,
"learning_rate": 6.650295791941065e-06,
"loss": 0.0539,
"step": 12000
},
{
"epoch": 0.41856415751406373,
"grad_norm": 0.6496044993400574,
"learning_rate": 6.464635189939353e-06,
"loss": 0.0549,
"step": 12500
},
{
"epoch": 0.4353067238146263,
"grad_norm": 38.81090545654297,
"learning_rate": 6.2786025226029705e-06,
"loss": 0.0543,
"step": 13000
},
{
"epoch": 0.45204929011518885,
"grad_norm": 2.4993317127227783,
"learning_rate": 6.092569855266585e-06,
"loss": 0.0614,
"step": 13500
},
{
"epoch": 0.4687918564157514,
"grad_norm": 0.5560814738273621,
"learning_rate": 5.906537187930201e-06,
"loss": 0.048,
"step": 14000
},
{
"epoch": 0.485534422716314,
"grad_norm": 1.5382946729660034,
"learning_rate": 5.720504520593817e-06,
"loss": 0.0558,
"step": 14500
},
{
"epoch": 0.5022769890168765,
"grad_norm": 0.026120496913790703,
"learning_rate": 5.534471853257432e-06,
"loss": 0.0492,
"step": 15000
},
{
"epoch": 0.519019555317439,
"grad_norm": 1.6323872804641724,
"learning_rate": 5.348439185921048e-06,
"loss": 0.0633,
"step": 15500
},
{
"epoch": 0.5357621216180016,
"grad_norm": 0.012650725431740284,
"learning_rate": 5.162406518584664e-06,
"loss": 0.0447,
"step": 16000
},
{
"epoch": 0.5525046879185641,
"grad_norm": 8.42776107788086,
"learning_rate": 4.976745916582952e-06,
"loss": 0.043,
"step": 16500
},
{
"epoch": 0.5692472542191267,
"grad_norm": 2.717674732208252,
"learning_rate": 4.790713249246568e-06,
"loss": 0.0517,
"step": 17000
},
{
"epoch": 0.5859898205196893,
"grad_norm": 0.01320638321340084,
"learning_rate": 4.604680581910183e-06,
"loss": 0.0768,
"step": 17500
},
{
"epoch": 0.6027323868202518,
"grad_norm": 68.84772491455078,
"learning_rate": 4.418647914573799e-06,
"loss": 0.0578,
"step": 18000
},
{
"epoch": 0.6194749531208144,
"grad_norm": 0.05758577585220337,
"learning_rate": 4.232987312572088e-06,
"loss": 0.0575,
"step": 18500
},
{
"epoch": 0.6362175194213769,
"grad_norm": 0.02908864989876747,
"learning_rate": 4.046954645235704e-06,
"loss": 0.0527,
"step": 19000
},
{
"epoch": 0.6529600857219394,
"grad_norm": 5.406867027282715,
"learning_rate": 3.86092197789932e-06,
"loss": 0.0556,
"step": 19500
},
{
"epoch": 0.669702652022502,
"grad_norm": 0.1083500012755394,
"learning_rate": 3.6748893105629353e-06,
"loss": 0.0607,
"step": 20000
},
{
"epoch": 0.6864452183230646,
"grad_norm": 13.136815071105957,
"learning_rate": 3.488856643226551e-06,
"loss": 0.0608,
"step": 20500
},
{
"epoch": 0.7031877846236271,
"grad_norm": 0.7505860328674316,
"learning_rate": 3.3028239758901666e-06,
"loss": 0.0691,
"step": 21000
},
{
"epoch": 0.7199303509241897,
"grad_norm": 56.34339141845703,
"learning_rate": 3.1167913085537823e-06,
"loss": 0.0563,
"step": 21500
},
{
"epoch": 0.7366729172247523,
"grad_norm": 0.267652302980423,
"learning_rate": 2.9307586412173984e-06,
"loss": 0.0688,
"step": 22000
},
{
"epoch": 0.7534154835253147,
"grad_norm": 4.030105113983154,
"learning_rate": 2.7450980392156867e-06,
"loss": 0.0637,
"step": 22500
},
{
"epoch": 0.7701580498258773,
"grad_norm": 1.6611367464065552,
"learning_rate": 2.5590653718793024e-06,
"loss": 0.0724,
"step": 23000
},
{
"epoch": 0.7869006161264399,
"grad_norm": 65.19082641601562,
"learning_rate": 2.3734047698775908e-06,
"loss": 0.0759,
"step": 23500
},
{
"epoch": 0.8036431824270024,
"grad_norm": 2.824739694595337,
"learning_rate": 2.1873721025412064e-06,
"loss": 0.0675,
"step": 24000
},
{
"epoch": 0.820385748727565,
"grad_norm": 1.6871856451034546,
"learning_rate": 2.001339435204822e-06,
"loss": 0.1016,
"step": 24500
},
{
"epoch": 0.8371283150281275,
"grad_norm": 73.87975311279297,
"learning_rate": 1.8156788332031105e-06,
"loss": 0.0995,
"step": 25000
},
{
"epoch": 0.85387088132869,
"grad_norm": 1.2010084390640259,
"learning_rate": 1.6296461658667261e-06,
"loss": 0.0871,
"step": 25500
},
{
"epoch": 0.8706134476292526,
"grad_norm": 8.91370964050293,
"learning_rate": 1.443613498530342e-06,
"loss": 0.0936,
"step": 26000
},
{
"epoch": 0.8873560139298151,
"grad_norm": 8.569842338562012,
"learning_rate": 1.2575808311939577e-06,
"loss": 0.0927,
"step": 26500
},
{
"epoch": 0.9040985802303777,
"grad_norm": 0.14943744242191315,
"learning_rate": 1.0715481638575736e-06,
"loss": 0.1108,
"step": 27000
},
{
"epoch": 0.9208411465309403,
"grad_norm": 48.06157684326172,
"learning_rate": 8.855154965211891e-07,
"loss": 0.1141,
"step": 27500
},
{
"epoch": 0.9375837128315028,
"grad_norm": 46.07848358154297,
"learning_rate": 6.994828291848049e-07,
"loss": 0.118,
"step": 28000
},
{
"epoch": 0.9543262791320654,
"grad_norm": 1.0916130542755127,
"learning_rate": 5.134501618484206e-07,
"loss": 0.138,
"step": 28500
},
{
"epoch": 0.971068845432628,
"grad_norm": 0.2534860074520111,
"learning_rate": 3.277895598467091e-07,
"loss": 0.1425,
"step": 29000
},
{
"epoch": 0.9878114117331904,
"grad_norm": 0.08512990921735764,
"learning_rate": 1.421289578449976e-07,
"loss": 0.1432,
"step": 29500
}
],
"logging_steps": 500,
"max_steps": 29864,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 0.0,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}