|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.998691442030882, |
|
"eval_steps": 500, |
|
"global_step": 477, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.010468463752944255, |
|
"grad_norm": 0.1797010649383585, |
|
"learning_rate": 5.208333333333333e-08, |
|
"logits/chosen": -1.448793649673462, |
|
"logits/rejected": -1.450628638267517, |
|
"logps/chosen": -7.982501983642578, |
|
"logps/rejected": -8.156150817871094, |
|
"loss": -0.0009, |
|
"rewards/accuracies": 0.5062500238418579, |
|
"rewards/chosen": -7.982501983642578, |
|
"rewards/margins": 0.17364946007728577, |
|
"rewards/rejected": -8.156150817871094, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02093692750588851, |
|
"grad_norm": 0.07416876680191102, |
|
"learning_rate": 1.0416666666666667e-07, |
|
"logits/chosen": -1.4468624591827393, |
|
"logits/rejected": -1.4378924369812012, |
|
"logps/chosen": -8.046422958374023, |
|
"logps/rejected": -7.960408687591553, |
|
"loss": 0.0001, |
|
"rewards/accuracies": 0.4937500059604645, |
|
"rewards/chosen": -8.046422958374023, |
|
"rewards/margins": -0.08601457625627518, |
|
"rewards/rejected": -7.960408687591553, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.031405391258832765, |
|
"grad_norm": 0.1932776998667392, |
|
"learning_rate": 1.5624999999999999e-07, |
|
"logits/chosen": -1.4454925060272217, |
|
"logits/rejected": -1.4418766498565674, |
|
"logps/chosen": -7.853631496429443, |
|
"logps/rejected": -7.868775844573975, |
|
"loss": -0.0011, |
|
"rewards/accuracies": 0.5, |
|
"rewards/chosen": -7.853631496429443, |
|
"rewards/margins": 0.01514405943453312, |
|
"rewards/rejected": -7.868775844573975, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.04187385501177702, |
|
"grad_norm": 0.18172311042773612, |
|
"learning_rate": 2.0833333333333333e-07, |
|
"logits/chosen": -1.4386684894561768, |
|
"logits/rejected": -1.4446001052856445, |
|
"logps/chosen": -8.188841819763184, |
|
"logps/rejected": -8.21079158782959, |
|
"loss": 0.0005, |
|
"rewards/accuracies": 0.5, |
|
"rewards/chosen": -8.188841819763184, |
|
"rewards/margins": 0.02194919064640999, |
|
"rewards/rejected": -8.21079158782959, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.05234231876472128, |
|
"grad_norm": 0.07565588877379835, |
|
"learning_rate": 2.604166666666667e-07, |
|
"logits/chosen": -1.469273328781128, |
|
"logits/rejected": -1.4595043659210205, |
|
"logps/chosen": -8.117727279663086, |
|
"logps/rejected": -8.016858100891113, |
|
"loss": 0.0008, |
|
"rewards/accuracies": 0.4375, |
|
"rewards/chosen": -8.117727279663086, |
|
"rewards/margins": -0.1008685976266861, |
|
"rewards/rejected": -8.016858100891113, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.06281078251766553, |
|
"grad_norm": 0.15180659620060438, |
|
"learning_rate": 3.1249999999999997e-07, |
|
"logits/chosen": -1.4331070184707642, |
|
"logits/rejected": -1.4219419956207275, |
|
"logps/chosen": -7.856657981872559, |
|
"logps/rejected": -7.774781703948975, |
|
"loss": 0.001, |
|
"rewards/accuracies": 0.48124998807907104, |
|
"rewards/chosen": -7.856657981872559, |
|
"rewards/margins": -0.08187668025493622, |
|
"rewards/rejected": -7.774781703948975, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.07327924627060979, |
|
"grad_norm": 0.08490698432858583, |
|
"learning_rate": 3.645833333333333e-07, |
|
"logits/chosen": -1.4420219659805298, |
|
"logits/rejected": -1.4201197624206543, |
|
"logps/chosen": -8.129011154174805, |
|
"logps/rejected": -7.994417667388916, |
|
"loss": -0.0007, |
|
"rewards/accuracies": 0.5062500238418579, |
|
"rewards/chosen": -8.129011154174805, |
|
"rewards/margins": -0.13459424674510956, |
|
"rewards/rejected": -7.994417667388916, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.08374771002355404, |
|
"grad_norm": 0.18105138747703975, |
|
"learning_rate": 4.1666666666666667e-07, |
|
"logits/chosen": -1.420586109161377, |
|
"logits/rejected": -1.4364769458770752, |
|
"logps/chosen": -8.086874961853027, |
|
"logps/rejected": -8.243904113769531, |
|
"loss": -0.0007, |
|
"rewards/accuracies": 0.46875, |
|
"rewards/chosen": -8.086874961853027, |
|
"rewards/margins": 0.15702828764915466, |
|
"rewards/rejected": -8.243904113769531, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0942161737764983, |
|
"grad_norm": 0.05754725019388417, |
|
"learning_rate": 4.6874999999999996e-07, |
|
"logits/chosen": -1.4414559602737427, |
|
"logits/rejected": -1.4433170557022095, |
|
"logps/chosen": -8.102398872375488, |
|
"logps/rejected": -7.88956356048584, |
|
"loss": -0.0001, |
|
"rewards/accuracies": 0.4375, |
|
"rewards/chosen": -8.102398872375488, |
|
"rewards/margins": -0.21283535659313202, |
|
"rewards/rejected": -7.88956356048584, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.10468463752944256, |
|
"grad_norm": 0.09776323510444927, |
|
"learning_rate": 4.999731868769026e-07, |
|
"logits/chosen": -1.4331969022750854, |
|
"logits/rejected": -1.451460838317871, |
|
"logps/chosen": -8.243871688842773, |
|
"logps/rejected": -8.234269142150879, |
|
"loss": 0.0006, |
|
"rewards/accuracies": 0.48124998807907104, |
|
"rewards/chosen": -8.243871688842773, |
|
"rewards/margins": -0.009602278470993042, |
|
"rewards/rejected": -8.234269142150879, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.11515310128238682, |
|
"grad_norm": 0.06802225811273538, |
|
"learning_rate": 4.996716052911017e-07, |
|
"logits/chosen": -1.4432759284973145, |
|
"logits/rejected": -1.4394512176513672, |
|
"logps/chosen": -8.15147876739502, |
|
"logps/rejected": -8.12590217590332, |
|
"loss": 0.0, |
|
"rewards/accuracies": 0.42500001192092896, |
|
"rewards/chosen": -8.15147876739502, |
|
"rewards/margins": -0.025576595216989517, |
|
"rewards/rejected": -8.12590217590332, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.12562156503533106, |
|
"grad_norm": 0.19637523644104168, |
|
"learning_rate": 4.990353313429303e-07, |
|
"logits/chosen": -1.4439882040023804, |
|
"logits/rejected": -1.4562370777130127, |
|
"logps/chosen": -8.153823852539062, |
|
"logps/rejected": -8.392339706420898, |
|
"loss": -0.0006, |
|
"rewards/accuracies": 0.574999988079071, |
|
"rewards/chosen": -8.153823852539062, |
|
"rewards/margins": 0.23851680755615234, |
|
"rewards/rejected": -8.392339706420898, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1360900287882753, |
|
"grad_norm": 0.15858568302533665, |
|
"learning_rate": 4.980652179769217e-07, |
|
"logits/chosen": -1.3945095539093018, |
|
"logits/rejected": -1.4153038263320923, |
|
"logps/chosen": -7.983679294586182, |
|
"logps/rejected": -8.382383346557617, |
|
"loss": 0.0001, |
|
"rewards/accuracies": 0.574999988079071, |
|
"rewards/chosen": -7.983679294586182, |
|
"rewards/margins": 0.3987043499946594, |
|
"rewards/rejected": -8.382383346557617, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.14655849254121958, |
|
"grad_norm": 0.449952417527854, |
|
"learning_rate": 4.967625656594781e-07, |
|
"logits/chosen": -1.3980720043182373, |
|
"logits/rejected": -1.3917102813720703, |
|
"logps/chosen": -8.066298484802246, |
|
"logps/rejected": -8.08208179473877, |
|
"loss": 0.0015, |
|
"rewards/accuracies": 0.5062500238418579, |
|
"rewards/chosen": -8.066298484802246, |
|
"rewards/margins": 0.015783464536070824, |
|
"rewards/rejected": -8.08208179473877, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.15702695629416383, |
|
"grad_norm": 0.0694833097923889, |
|
"learning_rate": 4.951291206355559e-07, |
|
"logits/chosen": -1.4114166498184204, |
|
"logits/rejected": -1.4182840585708618, |
|
"logps/chosen": -8.104999542236328, |
|
"logps/rejected": -8.062032699584961, |
|
"loss": -0.0, |
|
"rewards/accuracies": 0.45625001192092896, |
|
"rewards/chosen": -8.104999542236328, |
|
"rewards/margins": -0.042965833097696304, |
|
"rewards/rejected": -8.062032699584961, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.16749542004710807, |
|
"grad_norm": 0.13732894835039064, |
|
"learning_rate": 4.93167072587771e-07, |
|
"logits/chosen": -1.416013479232788, |
|
"logits/rejected": -1.4303759336471558, |
|
"logps/chosen": -8.252126693725586, |
|
"logps/rejected": -8.272165298461914, |
|
"loss": -0.0007, |
|
"rewards/accuracies": 0.4437499940395355, |
|
"rewards/chosen": -8.252126693725586, |
|
"rewards/margins": 0.020038936287164688, |
|
"rewards/rejected": -8.272165298461914, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.17796388380005235, |
|
"grad_norm": 0.13841613125213173, |
|
"learning_rate": 4.908790517010636e-07, |
|
"logits/chosen": -1.4297425746917725, |
|
"logits/rejected": -1.4293019771575928, |
|
"logps/chosen": -8.420356750488281, |
|
"logps/rejected": -8.493918418884277, |
|
"loss": 0.0, |
|
"rewards/accuracies": 0.48124998807907104, |
|
"rewards/chosen": -8.420356750488281, |
|
"rewards/margins": 0.07356245070695877, |
|
"rewards/rejected": -8.493918418884277, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.1884323475529966, |
|
"grad_norm": 0.26065093480528173, |
|
"learning_rate": 4.882681251368548e-07, |
|
"logits/chosen": -1.411726713180542, |
|
"logits/rejected": -1.4245941638946533, |
|
"logps/chosen": -8.424099922180176, |
|
"logps/rejected": -8.504185676574707, |
|
"loss": -0.0001, |
|
"rewards/accuracies": 0.48750001192092896, |
|
"rewards/chosen": -8.424099922180176, |
|
"rewards/margins": 0.08008553832769394, |
|
"rewards/rejected": -8.504185676574707, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.19890081130594087, |
|
"grad_norm": 0.13775483175577152, |
|
"learning_rate": 4.853377929214243e-07, |
|
"logits/chosen": -1.4114550352096558, |
|
"logits/rejected": -1.4067610502243042, |
|
"logps/chosen": -8.460356712341309, |
|
"logps/rejected": -8.213418960571289, |
|
"loss": 0.0002, |
|
"rewards/accuracies": 0.3812499940395355, |
|
"rewards/chosen": -8.460356712341309, |
|
"rewards/margins": -0.24693819880485535, |
|
"rewards/rejected": -8.213418960571289, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.2093692750588851, |
|
"grad_norm": 0.0669599374264611, |
|
"learning_rate": 4.820919832540181e-07, |
|
"logits/chosen": -1.4511619806289673, |
|
"logits/rejected": -1.4595460891723633, |
|
"logps/chosen": -8.38967514038086, |
|
"logps/rejected": -8.327149391174316, |
|
"loss": 0.0005, |
|
"rewards/accuracies": 0.4625000059604645, |
|
"rewards/chosen": -8.38967514038086, |
|
"rewards/margins": -0.06252529472112656, |
|
"rewards/rejected": -8.327149391174316, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.21983773881182936, |
|
"grad_norm": 0.03744091461066133, |
|
"learning_rate": 4.785350472409791e-07, |
|
"logits/chosen": -1.4548447132110596, |
|
"logits/rejected": -1.4479488134384155, |
|
"logps/chosen": -8.238606452941895, |
|
"logps/rejected": -8.342194557189941, |
|
"loss": 0.0003, |
|
"rewards/accuracies": 0.5375000238418579, |
|
"rewards/chosen": -8.238606452941895, |
|
"rewards/margins": 0.10358722507953644, |
|
"rewards/rejected": -8.342194557189941, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.23030620256477363, |
|
"grad_norm": 0.2325346467677191, |
|
"learning_rate": 4.7467175306295647e-07, |
|
"logits/chosen": -1.4924818277359009, |
|
"logits/rejected": -1.4839593172073364, |
|
"logps/chosen": -8.65635871887207, |
|
"logps/rejected": -8.75800895690918, |
|
"loss": -0.0001, |
|
"rewards/accuracies": 0.5562499761581421, |
|
"rewards/chosen": -8.65635871887207, |
|
"rewards/margins": 0.10164938122034073, |
|
"rewards/rejected": -8.75800895690918, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.24077466631771788, |
|
"grad_norm": 0.07013048443895094, |
|
"learning_rate": 4.70507279583015e-07, |
|
"logits/chosen": -1.4494998455047607, |
|
"logits/rejected": -1.4506022930145264, |
|
"logps/chosen": -8.537219047546387, |
|
"logps/rejected": -8.694546699523926, |
|
"loss": 0.0002, |
|
"rewards/accuracies": 0.53125, |
|
"rewards/chosen": -8.537219047546387, |
|
"rewards/margins": 0.1573261320590973, |
|
"rewards/rejected": -8.694546699523926, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.2512431300706621, |
|
"grad_norm": 0.13941104681218558, |
|
"learning_rate": 4.6604720940421207e-07, |
|
"logits/chosen": -1.4503843784332275, |
|
"logits/rejected": -1.4492030143737793, |
|
"logps/chosen": -8.896661758422852, |
|
"logps/rejected": -8.842092514038086, |
|
"loss": 0.0006, |
|
"rewards/accuracies": 0.5, |
|
"rewards/chosen": -8.896661758422852, |
|
"rewards/margins": -0.054569344967603683, |
|
"rewards/rejected": -8.842092514038086, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.26171159382360637, |
|
"grad_norm": 0.01588215105344238, |
|
"learning_rate": 4.612975213859487e-07, |
|
"logits/chosen": -1.5376265048980713, |
|
"logits/rejected": -1.5326207876205444, |
|
"logps/chosen": -9.327282905578613, |
|
"logps/rejected": -9.20351505279541, |
|
"loss": 0.0004, |
|
"rewards/accuracies": 0.46875, |
|
"rewards/chosen": -9.327282905578613, |
|
"rewards/margins": -0.12376775592565536, |
|
"rewards/rejected": -9.20351505279541, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.2721800575765506, |
|
"grad_norm": 0.060048076521496456, |
|
"learning_rate": 4.5626458262912735e-07, |
|
"logits/chosen": -1.560910701751709, |
|
"logits/rejected": -1.5665909051895142, |
|
"logps/chosen": -9.537469863891602, |
|
"logps/rejected": -9.556791305541992, |
|
"loss": -0.0001, |
|
"rewards/accuracies": 0.5, |
|
"rewards/chosen": -9.537469863891602, |
|
"rewards/margins": 0.019321158528327942, |
|
"rewards/rejected": -9.556791305541992, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.2826485213294949, |
|
"grad_norm": 0.010696845716364521, |
|
"learning_rate": 4.5095513994085974e-07, |
|
"logits/chosen": -1.5800793170928955, |
|
"logits/rejected": -1.582044243812561, |
|
"logps/chosen": -9.630374908447266, |
|
"logps/rejected": -9.887577056884766, |
|
"loss": 0.0001, |
|
"rewards/accuracies": 0.543749988079071, |
|
"rewards/chosen": -9.630374908447266, |
|
"rewards/margins": 0.2572011351585388, |
|
"rewards/rejected": -9.887577056884766, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.29311698508243916, |
|
"grad_norm": 0.03319847693750231, |
|
"learning_rate": 4.453763107901675e-07, |
|
"logits/chosen": -1.622972846031189, |
|
"logits/rejected": -1.6185715198516846, |
|
"logps/chosen": -9.712789535522461, |
|
"logps/rejected": -9.67750358581543, |
|
"loss": 0.0, |
|
"rewards/accuracies": 0.44999998807907104, |
|
"rewards/chosen": -9.712789535522461, |
|
"rewards/margins": -0.03528591990470886, |
|
"rewards/rejected": -9.67750358581543, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.3035854488353834, |
|
"grad_norm": 0.03281163505651298, |
|
"learning_rate": 4.395355737667985e-07, |
|
"logits/chosen": -1.6432979106903076, |
|
"logits/rejected": -1.6470788717269897, |
|
"logps/chosen": -9.864433288574219, |
|
"logps/rejected": -9.815264701843262, |
|
"loss": 0.0001, |
|
"rewards/accuracies": 0.518750011920929, |
|
"rewards/chosen": -9.864433288574219, |
|
"rewards/margins": -0.049169473350048065, |
|
"rewards/rejected": -9.815264701843262, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.31405391258832765, |
|
"grad_norm": 0.023094845909487956, |
|
"learning_rate": 4.3344075855595097e-07, |
|
"logits/chosen": -1.6595284938812256, |
|
"logits/rejected": -1.669385552406311, |
|
"logps/chosen": -9.924379348754883, |
|
"logps/rejected": -9.930808067321777, |
|
"loss": -0.0003, |
|
"rewards/accuracies": 0.41874998807907104, |
|
"rewards/chosen": -9.924379348754883, |
|
"rewards/margins": 0.006428956985473633, |
|
"rewards/rejected": -9.930808067321777, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.3245223763412719, |
|
"grad_norm": 0.1908026987463115, |
|
"learning_rate": 4.271000354423425e-07, |
|
"logits/chosen": -1.6287696361541748, |
|
"logits/rejected": -1.6056560277938843, |
|
"logps/chosen": -9.800788879394531, |
|
"logps/rejected": -9.611404418945312, |
|
"loss": 0.0001, |
|
"rewards/accuracies": 0.4312500059604645, |
|
"rewards/chosen": -9.800788879394531, |
|
"rewards/margins": -0.1893850713968277, |
|
"rewards/rejected": -9.611404418945312, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.33499084009421615, |
|
"grad_norm": 0.03202775302677848, |
|
"learning_rate": 4.2052190435769554e-07, |
|
"logits/chosen": -1.6098477840423584, |
|
"logits/rejected": -1.6122195720672607, |
|
"logps/chosen": -9.803987503051758, |
|
"logps/rejected": -9.737637519836426, |
|
"loss": 0.0, |
|
"rewards/accuracies": 0.48750001192092896, |
|
"rewards/chosen": -9.803987503051758, |
|
"rewards/margins": -0.06634993851184845, |
|
"rewards/rejected": -9.737637519836426, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.34545930384716045, |
|
"grad_norm": 0.07232776561687705, |
|
"learning_rate": 4.137151834863213e-07, |
|
"logits/chosen": -1.65714430809021, |
|
"logits/rejected": -1.6471843719482422, |
|
"logps/chosen": -9.869422912597656, |
|
"logps/rejected": -9.760724067687988, |
|
"loss": 0.0003, |
|
"rewards/accuracies": 0.4625000059604645, |
|
"rewards/chosen": -9.869422912597656, |
|
"rewards/margins": -0.10870015621185303, |
|
"rewards/rejected": -9.760724067687988, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.3559277676001047, |
|
"grad_norm": 0.024726012832997274, |
|
"learning_rate": 4.0668899744407567e-07, |
|
"logits/chosen": -1.6514545679092407, |
|
"logits/rejected": -1.6379756927490234, |
|
"logps/chosen": -10.327203750610352, |
|
"logps/rejected": -10.077325820922852, |
|
"loss": 0.0, |
|
"rewards/accuracies": 0.4312500059604645, |
|
"rewards/chosen": -10.327203750610352, |
|
"rewards/margins": -0.24987690150737762, |
|
"rewards/rejected": -10.077325820922852, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.36639623135304894, |
|
"grad_norm": 0.15695534318195675, |
|
"learning_rate": 3.994527650465352e-07, |
|
"logits/chosen": -1.6916990280151367, |
|
"logits/rejected": -1.6754090785980225, |
|
"logps/chosen": -10.47531795501709, |
|
"logps/rejected": -10.233348846435547, |
|
"loss": -0.0001, |
|
"rewards/accuracies": 0.4000000059604645, |
|
"rewards/chosen": -10.47531795501709, |
|
"rewards/margins": -0.24196887016296387, |
|
"rewards/rejected": -10.233348846435547, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.3768646951059932, |
|
"grad_norm": 0.055831382164290344, |
|
"learning_rate": 3.920161866827889e-07, |
|
"logits/chosen": -1.7216554880142212, |
|
"logits/rejected": -1.7197192907333374, |
|
"logps/chosen": -10.651076316833496, |
|
"logps/rejected": -10.578369140625, |
|
"loss": -0.0, |
|
"rewards/accuracies": 0.4625000059604645, |
|
"rewards/chosen": -10.651076316833496, |
|
"rewards/margins": -0.07270796597003937, |
|
"rewards/rejected": -10.578369140625, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.38733315885893743, |
|
"grad_norm": 0.005192915052039036, |
|
"learning_rate": 3.8438923131177237e-07, |
|
"logits/chosen": -1.7085365056991577, |
|
"logits/rejected": -1.695673942565918, |
|
"logps/chosen": -10.687432289123535, |
|
"logps/rejected": -10.502934455871582, |
|
"loss": 0.0, |
|
"rewards/accuracies": 0.45625001192092896, |
|
"rewards/chosen": -10.687432289123535, |
|
"rewards/margins": -0.18449781835079193, |
|
"rewards/rejected": -10.502934455871582, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.39780162261188173, |
|
"grad_norm": 0.00386703700543382, |
|
"learning_rate": 3.765821230985757e-07, |
|
"logits/chosen": -1.7206989526748657, |
|
"logits/rejected": -1.7167381048202515, |
|
"logps/chosen": -10.415846824645996, |
|
"logps/rejected": -10.514742851257324, |
|
"loss": -0.0001, |
|
"rewards/accuracies": 0.48124998807907104, |
|
"rewards/chosen": -10.415846824645996, |
|
"rewards/margins": 0.09889563173055649, |
|
"rewards/rejected": -10.514742851257324, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.408270086364826, |
|
"grad_norm": 0.018906277546872338, |
|
"learning_rate": 3.6860532770864005e-07, |
|
"logits/chosen": -1.6922369003295898, |
|
"logits/rejected": -1.681626558303833, |
|
"logps/chosen": -10.679250717163086, |
|
"logps/rejected": -10.37684154510498, |
|
"loss": 0.0, |
|
"rewards/accuracies": 0.44999998807907104, |
|
"rewards/chosen": -10.679250717163086, |
|
"rewards/margins": -0.3024083971977234, |
|
"rewards/rejected": -10.37684154510498, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.4187385501177702, |
|
"grad_norm": 0.04569629831070709, |
|
"learning_rate": 3.604695382782159e-07, |
|
"logits/chosen": -1.7120405435562134, |
|
"logits/rejected": -1.7134020328521729, |
|
"logps/chosen": -10.352312088012695, |
|
"logps/rejected": -10.341361999511719, |
|
"loss": 0.0, |
|
"rewards/accuracies": 0.44999998807907104, |
|
"rewards/chosen": -10.352312088012695, |
|
"rewards/margins": -0.010949483141303062, |
|
"rewards/rejected": -10.341361999511719, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.42920701387071447, |
|
"grad_norm": 0.01589336857340258, |
|
"learning_rate": 3.5218566107988867e-07, |
|
"logits/chosen": -1.6780574321746826, |
|
"logits/rejected": -1.6641277074813843, |
|
"logps/chosen": -10.600610733032227, |
|
"logps/rejected": -10.596931457519531, |
|
"loss": -0.0, |
|
"rewards/accuracies": 0.46875, |
|
"rewards/chosen": -10.600610733032227, |
|
"rewards/margins": -0.003679597284644842, |
|
"rewards/rejected": -10.596931457519531, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.4396754776236587, |
|
"grad_norm": 0.017049353993959408, |
|
"learning_rate": 3.4376480090239047e-07, |
|
"logits/chosen": -1.6822540760040283, |
|
"logits/rejected": -1.6784160137176514, |
|
"logps/chosen": -10.497976303100586, |
|
"logps/rejected": -10.553048133850098, |
|
"loss": -0.0, |
|
"rewards/accuracies": 0.44999998807907104, |
|
"rewards/chosen": -10.497976303100586, |
|
"rewards/margins": 0.0550723597407341, |
|
"rewards/rejected": -10.553048133850098, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.45014394137660296, |
|
"grad_norm": 0.2539481968464961, |
|
"learning_rate": 3.3521824616429284e-07, |
|
"logits/chosen": -1.6765304803848267, |
|
"logits/rejected": -1.68255615234375, |
|
"logps/chosen": -10.369840621948242, |
|
"logps/rejected": -10.547749519348145, |
|
"loss": -0.0002, |
|
"rewards/accuracies": 0.5249999761581421, |
|
"rewards/chosen": -10.369840621948242, |
|
"rewards/margins": 0.17790831625461578, |
|
"rewards/rejected": -10.547749519348145, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.46061240512954726, |
|
"grad_norm": 0.03490868784910135, |
|
"learning_rate": 3.265574537815398e-07, |
|
"logits/chosen": -1.7063719034194946, |
|
"logits/rejected": -1.702691674232483, |
|
"logps/chosen": -10.227754592895508, |
|
"logps/rejected": -10.175545692443848, |
|
"loss": -0.0, |
|
"rewards/accuracies": 0.4749999940395355, |
|
"rewards/chosen": -10.227754592895508, |
|
"rewards/margins": -0.05220963433384895, |
|
"rewards/rejected": -10.175545692443848, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.4710808688824915, |
|
"grad_norm": 0.02089161978800978, |
|
"learning_rate": 3.1779403380910425e-07, |
|
"logits/chosen": -1.6891885995864868, |
|
"logits/rejected": -1.6823409795761108, |
|
"logps/chosen": -9.965743064880371, |
|
"logps/rejected": -10.085700035095215, |
|
"loss": -0.0001, |
|
"rewards/accuracies": 0.512499988079071, |
|
"rewards/chosen": -9.965743064880371, |
|
"rewards/margins": 0.11995597183704376, |
|
"rewards/rejected": -10.085700035095215, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.48154933263543576, |
|
"grad_norm": 0.042927151818770604, |
|
"learning_rate": 3.0893973387735683e-07, |
|
"logits/chosen": -1.6752849817276, |
|
"logits/rejected": -1.6569786071777344, |
|
"logps/chosen": -10.218798637390137, |
|
"logps/rejected": -10.10997200012207, |
|
"loss": -0.0004, |
|
"rewards/accuracies": 0.44999998807907104, |
|
"rewards/chosen": -10.218798637390137, |
|
"rewards/margins": -0.1088268980383873, |
|
"rewards/rejected": -10.10997200012207, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.49201779638838, |
|
"grad_norm": 0.055950224699650504, |
|
"learning_rate": 3.000064234440111e-07, |
|
"logits/chosen": -1.6750266551971436, |
|
"logits/rejected": -1.6845659017562866, |
|
"logps/chosen": -9.966501235961914, |
|
"logps/rejected": -10.05066967010498, |
|
"loss": -0.0, |
|
"rewards/accuracies": 0.48124998807907104, |
|
"rewards/chosen": -9.966501235961914, |
|
"rewards/margins": 0.08416947722434998, |
|
"rewards/rejected": -10.05066967010498, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.5024862601413242, |
|
"grad_norm": 0.10458741539165252, |
|
"learning_rate": 2.910060778827554e-07, |
|
"logits/chosen": -1.6746679544448853, |
|
"logits/rejected": -1.6765581369400024, |
|
"logps/chosen": -10.034064292907715, |
|
"logps/rejected": -9.960668563842773, |
|
"loss": -0.0003, |
|
"rewards/accuracies": 0.4437499940395355, |
|
"rewards/chosen": -10.034064292907715, |
|
"rewards/margins": -0.07339614629745483, |
|
"rewards/rejected": -9.960668563842773, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.5129547238942685, |
|
"grad_norm": 0.14020310226910793, |
|
"learning_rate": 2.8195076242990116e-07, |
|
"logits/chosen": -1.656341314315796, |
|
"logits/rejected": -1.6607738733291626, |
|
"logps/chosen": -9.828956604003906, |
|
"logps/rejected": -9.777771949768066, |
|
"loss": 0.0, |
|
"rewards/accuracies": 0.5062500238418579, |
|
"rewards/chosen": -9.828956604003906, |
|
"rewards/margins": -0.05118449404835701, |
|
"rewards/rejected": -9.777771949768066, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.5234231876472127, |
|
"grad_norm": 0.029712092586844915, |
|
"learning_rate": 2.7285261601056697e-07, |
|
"logits/chosen": -1.651460886001587, |
|
"logits/rejected": -1.6511856317520142, |
|
"logps/chosen": -9.723172187805176, |
|
"logps/rejected": -9.84160327911377, |
|
"loss": 0.0005, |
|
"rewards/accuracies": 0.518750011920929, |
|
"rewards/chosen": -9.723172187805176, |
|
"rewards/margins": 0.11843116581439972, |
|
"rewards/rejected": -9.84160327911377, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.533891651400157, |
|
"grad_norm": 0.03206118746185812, |
|
"learning_rate": 2.6372383496608186e-07, |
|
"logits/chosen": -1.6517183780670166, |
|
"logits/rejected": -1.6432650089263916, |
|
"logps/chosen": -9.812753677368164, |
|
"logps/rejected": -9.902433395385742, |
|
"loss": -0.0, |
|
"rewards/accuracies": 0.518750011920929, |
|
"rewards/chosen": -9.812753677368164, |
|
"rewards/margins": 0.08968041837215424, |
|
"rewards/rejected": -9.902433395385742, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.5443601151531012, |
|
"grad_norm": 0.020964013644010484, |
|
"learning_rate": 2.5457665670441937e-07, |
|
"logits/chosen": -1.592605710029602, |
|
"logits/rejected": -1.5922209024429321, |
|
"logps/chosen": -9.709136962890625, |
|
"logps/rejected": -9.819464683532715, |
|
"loss": 0.0002, |
|
"rewards/accuracies": 0.4937500059604645, |
|
"rewards/chosen": -9.709136962890625, |
|
"rewards/margins": 0.11032851040363312, |
|
"rewards/rejected": -9.819464683532715, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.5548285789060455, |
|
"grad_norm": 0.042268508150251446, |
|
"learning_rate": 2.454233432955807e-07, |
|
"logits/chosen": -1.6564003229141235, |
|
"logits/rejected": -1.6685075759887695, |
|
"logps/chosen": -9.937254905700684, |
|
"logps/rejected": -10.057160377502441, |
|
"loss": -0.0, |
|
"rewards/accuracies": 0.5062500238418579, |
|
"rewards/chosen": -9.937254905700684, |
|
"rewards/margins": 0.11990515887737274, |
|
"rewards/rejected": -10.057160377502441, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.5652970426589898, |
|
"grad_norm": 0.13512185096282692, |
|
"learning_rate": 2.3627616503391812e-07, |
|
"logits/chosen": -1.680994987487793, |
|
"logits/rejected": -1.6468158960342407, |
|
"logps/chosen": -10.322751998901367, |
|
"logps/rejected": -10.258121490478516, |
|
"loss": 0.0003, |
|
"rewards/accuracies": 0.4937500059604645, |
|
"rewards/chosen": -10.322751998901367, |
|
"rewards/margins": -0.06462957710027695, |
|
"rewards/rejected": -10.258121490478516, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.575765506411934, |
|
"grad_norm": 0.012066073597164235, |
|
"learning_rate": 2.2714738398943308e-07, |
|
"logits/chosen": -1.7274888753890991, |
|
"logits/rejected": -1.7199790477752686, |
|
"logps/chosen": -10.117765426635742, |
|
"logps/rejected": -10.104223251342773, |
|
"loss": 0.0001, |
|
"rewards/accuracies": 0.4625000059604645, |
|
"rewards/chosen": -10.117765426635742, |
|
"rewards/margins": -0.013540792278945446, |
|
"rewards/rejected": -10.104223251342773, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.5862339701648783, |
|
"grad_norm": 0.01636207441210717, |
|
"learning_rate": 2.1804923757009882e-07, |
|
"logits/chosen": -1.7347486019134521, |
|
"logits/rejected": -1.7411794662475586, |
|
"logps/chosen": -10.153875350952148, |
|
"logps/rejected": -10.280826568603516, |
|
"loss": -0.0, |
|
"rewards/accuracies": 0.4749999940395355, |
|
"rewards/chosen": -10.153875350952148, |
|
"rewards/margins": 0.12695105373859406, |
|
"rewards/rejected": -10.280826568603516, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5967024339178225, |
|
"grad_norm": 0.007389012661156647, |
|
"learning_rate": 2.089939221172446e-07, |
|
"logits/chosen": -1.6744228601455688, |
|
"logits/rejected": -1.6806707382202148, |
|
"logps/chosen": -10.230897903442383, |
|
"logps/rejected": -10.295170783996582, |
|
"loss": -0.0, |
|
"rewards/accuracies": 0.512499988079071, |
|
"rewards/chosen": -10.230897903442383, |
|
"rewards/margins": 0.06427150964736938, |
|
"rewards/rejected": -10.295170783996582, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.6071708976707668, |
|
"grad_norm": 0.11180626971526854, |
|
"learning_rate": 1.9999357655598891e-07, |
|
"logits/chosen": -1.715934157371521, |
|
"logits/rejected": -1.7268234491348267, |
|
"logps/chosen": -10.12391471862793, |
|
"logps/rejected": -10.262609481811523, |
|
"loss": -0.0003, |
|
"rewards/accuracies": 0.518750011920929, |
|
"rewards/chosen": -10.12391471862793, |
|
"rewards/margins": 0.13869477808475494, |
|
"rewards/rejected": -10.262609481811523, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.6176393614237111, |
|
"grad_norm": 0.03266198194021972, |
|
"learning_rate": 1.9106026612264315e-07, |
|
"logits/chosen": -1.744179368019104, |
|
"logits/rejected": -1.7408939599990845, |
|
"logps/chosen": -10.210718154907227, |
|
"logps/rejected": -10.17192554473877, |
|
"loss": -0.0, |
|
"rewards/accuracies": 0.5062500238418579, |
|
"rewards/chosen": -10.210718154907227, |
|
"rewards/margins": -0.0387921966612339, |
|
"rewards/rejected": -10.17192554473877, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.6281078251766553, |
|
"grad_norm": 0.043605609853355175, |
|
"learning_rate": 1.8220596619089573e-07, |
|
"logits/chosen": -1.69484543800354, |
|
"logits/rejected": -1.692705512046814, |
|
"logps/chosen": -10.109774589538574, |
|
"logps/rejected": -10.205177307128906, |
|
"loss": 0.0001, |
|
"rewards/accuracies": 0.4749999940395355, |
|
"rewards/chosen": -10.109774589538574, |
|
"rewards/margins": 0.09540363401174545, |
|
"rewards/rejected": -10.205177307128906, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.6385762889295996, |
|
"grad_norm": 0.013464112956501361, |
|
"learning_rate": 1.7344254621846017e-07, |
|
"logits/chosen": -1.6985801458358765, |
|
"logits/rejected": -1.7095019817352295, |
|
"logps/chosen": -10.274127960205078, |
|
"logps/rejected": -10.364879608154297, |
|
"loss": 0.0, |
|
"rewards/accuracies": 0.48124998807907104, |
|
"rewards/chosen": -10.274127960205078, |
|
"rewards/margins": 0.09075053036212921, |
|
"rewards/rejected": -10.364879608154297, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.6490447526825438, |
|
"grad_norm": 0.030133040256006938, |
|
"learning_rate": 1.647817538357072e-07, |
|
"logits/chosen": -1.7417259216308594, |
|
"logits/rejected": -1.740393877029419, |
|
"logps/chosen": -10.28361701965332, |
|
"logps/rejected": -10.132238388061523, |
|
"loss": 0.0, |
|
"rewards/accuracies": 0.46875, |
|
"rewards/chosen": -10.28361701965332, |
|
"rewards/margins": -0.15137743949890137, |
|
"rewards/rejected": -10.132238388061523, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.6595132164354881, |
|
"grad_norm": 0.04002742762071502, |
|
"learning_rate": 1.562351990976095e-07, |
|
"logits/chosen": -1.69840407371521, |
|
"logits/rejected": -1.7028906345367432, |
|
"logps/chosen": -10.156418800354004, |
|
"logps/rejected": -10.191594123840332, |
|
"loss": -0.0007, |
|
"rewards/accuracies": 0.48124998807907104, |
|
"rewards/chosen": -10.156418800354004, |
|
"rewards/margins": 0.03517543524503708, |
|
"rewards/rejected": -10.191594123840332, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.6699816801884323, |
|
"grad_norm": 0.021724114416033388, |
|
"learning_rate": 1.478143389201113e-07, |
|
"logits/chosen": -1.7531684637069702, |
|
"logits/rejected": -1.7422311305999756, |
|
"logps/chosen": -10.416919708251953, |
|
"logps/rejected": -10.20901107788086, |
|
"loss": 0.0001, |
|
"rewards/accuracies": 0.48124998807907104, |
|
"rewards/chosen": -10.416919708251953, |
|
"rewards/margins": -0.20790883898735046, |
|
"rewards/rejected": -10.20901107788086, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.6804501439413766, |
|
"grad_norm": 0.08104613852953838, |
|
"learning_rate": 1.3953046172178413e-07, |
|
"logits/chosen": -1.725590467453003, |
|
"logits/rejected": -1.7158119678497314, |
|
"logps/chosen": -10.29697322845459, |
|
"logps/rejected": -10.40684986114502, |
|
"loss": -0.0003, |
|
"rewards/accuracies": 0.4625000059604645, |
|
"rewards/chosen": -10.29697322845459, |
|
"rewards/margins": 0.10987655818462372, |
|
"rewards/rejected": -10.40684986114502, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.6909186076943209, |
|
"grad_norm": 0.04339077417181125, |
|
"learning_rate": 1.3139467229135998e-07, |
|
"logits/chosen": -1.7010434865951538, |
|
"logits/rejected": -1.6965417861938477, |
|
"logps/chosen": -10.263284683227539, |
|
"logps/rejected": -10.193758010864258, |
|
"loss": -0.0, |
|
"rewards/accuracies": 0.48124998807907104, |
|
"rewards/chosen": -10.263284683227539, |
|
"rewards/margins": -0.0695262923836708, |
|
"rewards/rejected": -10.193758010864258, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.7013870714472651, |
|
"grad_norm": 0.019942381418589637, |
|
"learning_rate": 1.2341787690142435e-07, |
|
"logits/chosen": -1.7109758853912354, |
|
"logits/rejected": -1.7116413116455078, |
|
"logps/chosen": -10.025394439697266, |
|
"logps/rejected": -10.113506317138672, |
|
"loss": 0.0, |
|
"rewards/accuracies": 0.5, |
|
"rewards/chosen": -10.025394439697266, |
|
"rewards/margins": 0.08811323344707489, |
|
"rewards/rejected": -10.113506317138672, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.7118555352002094, |
|
"grad_norm": 0.030458935372565502, |
|
"learning_rate": 1.1561076868822755e-07, |
|
"logits/chosen": -1.7183024883270264, |
|
"logits/rejected": -1.7164380550384521, |
|
"logps/chosen": -10.216609954833984, |
|
"logps/rejected": -10.225852966308594, |
|
"loss": 0.0, |
|
"rewards/accuracies": 0.4749999940395355, |
|
"rewards/chosen": -10.216609954833984, |
|
"rewards/margins": 0.00924335140734911, |
|
"rewards/rejected": -10.225852966308594, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.7223239989531536, |
|
"grad_norm": 0.04400215150996701, |
|
"learning_rate": 1.0798381331721107e-07, |
|
"logits/chosen": -1.718132734298706, |
|
"logits/rejected": -1.732581377029419, |
|
"logps/chosen": -10.151108741760254, |
|
"logps/rejected": -10.323533058166504, |
|
"loss": -0.0, |
|
"rewards/accuracies": 0.4937500059604645, |
|
"rewards/chosen": -10.151108741760254, |
|
"rewards/margins": 0.17242416739463806, |
|
"rewards/rejected": -10.323533058166504, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.7327924627060979, |
|
"grad_norm": 0.03515217556644255, |
|
"learning_rate": 1.0054723495346482e-07, |
|
"logits/chosen": -1.6874383687973022, |
|
"logits/rejected": -1.6842788457870483, |
|
"logps/chosen": -10.151719093322754, |
|
"logps/rejected": -10.054476737976074, |
|
"loss": -0.0001, |
|
"rewards/accuracies": 0.4312500059604645, |
|
"rewards/chosen": -10.151719093322754, |
|
"rewards/margins": -0.09724216163158417, |
|
"rewards/rejected": -10.054476737976074, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.7432609264590422, |
|
"grad_norm": 0.021861424316864955, |
|
"learning_rate": 9.331100255592436e-08, |
|
"logits/chosen": -1.7121349573135376, |
|
"logits/rejected": -1.6940410137176514, |
|
"logps/chosen": -10.209026336669922, |
|
"logps/rejected": -9.959612846374512, |
|
"loss": 0.0002, |
|
"rewards/accuracies": 0.4124999940395355, |
|
"rewards/chosen": -10.209026336669922, |
|
"rewards/margins": -0.24941349029541016, |
|
"rewards/rejected": -9.959612846374512, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.7537293902119864, |
|
"grad_norm": 0.013506797744103302, |
|
"learning_rate": 8.628481651367875e-08, |
|
"logits/chosen": -1.6897398233413696, |
|
"logits/rejected": -1.6821842193603516, |
|
"logps/chosen": -9.989233016967773, |
|
"logps/rejected": -10.048975944519043, |
|
"loss": 0.0, |
|
"rewards/accuracies": 0.518750011920929, |
|
"rewards/chosen": -9.989233016967773, |
|
"rewards/margins": 0.05974303558468819, |
|
"rewards/rejected": -10.048975944519043, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.7641978539649307, |
|
"grad_norm": 0.07239679867383533, |
|
"learning_rate": 7.947809564230445e-08, |
|
"logits/chosen": -1.6575393676757812, |
|
"logits/rejected": -1.6490436792373657, |
|
"logps/chosen": -10.096014022827148, |
|
"logps/rejected": -10.024383544921875, |
|
"loss": 0.0, |
|
"rewards/accuracies": 0.4937500059604645, |
|
"rewards/chosen": -10.096014022827148, |
|
"rewards/margins": -0.07163101434707642, |
|
"rewards/rejected": -10.024383544921875, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.7746663177178749, |
|
"grad_norm": 0.08043566497276283, |
|
"learning_rate": 7.289996455765748e-08, |
|
"logits/chosen": -1.730115294456482, |
|
"logits/rejected": -1.7217315435409546, |
|
"logps/chosen": -10.174753189086914, |
|
"logps/rejected": -10.153951644897461, |
|
"loss": 0.0, |
|
"rewards/accuracies": 0.5249999761581421, |
|
"rewards/chosen": -10.174753189086914, |
|
"rewards/margins": -0.02080065943300724, |
|
"rewards/rejected": -10.153951644897461, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.7851347814708192, |
|
"grad_norm": 0.018294513390755808, |
|
"learning_rate": 6.655924144404906e-08, |
|
"logits/chosen": -1.7276290655136108, |
|
"logits/rejected": -1.728005051612854, |
|
"logps/chosen": -10.183911323547363, |
|
"logps/rejected": -10.19709587097168, |
|
"loss": 0.0, |
|
"rewards/accuracies": 0.5, |
|
"rewards/chosen": -10.183911323547363, |
|
"rewards/margins": 0.01318590622395277, |
|
"rewards/rejected": -10.19709587097168, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.7956032452237635, |
|
"grad_norm": 0.006815837156718109, |
|
"learning_rate": 6.046442623320145e-08, |
|
"logits/chosen": -1.7046406269073486, |
|
"logits/rejected": -1.6621993780136108, |
|
"logps/chosen": -10.309165954589844, |
|
"logps/rejected": -10.365669250488281, |
|
"loss": 0.0001, |
|
"rewards/accuracies": 0.5375000238418579, |
|
"rewards/chosen": -10.309165954589844, |
|
"rewards/margins": 0.056504249572753906, |
|
"rewards/rejected": -10.365669250488281, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.8060717089767077, |
|
"grad_norm": 0.030388876590904525, |
|
"learning_rate": 5.4623689209832484e-08, |
|
"logits/chosen": -1.7268683910369873, |
|
"logits/rejected": -1.7090175151824951, |
|
"logps/chosen": -10.306918144226074, |
|
"logps/rejected": -10.260132789611816, |
|
"loss": -0.0, |
|
"rewards/accuracies": 0.4375, |
|
"rewards/chosen": -10.306918144226074, |
|
"rewards/margins": -0.04678579792380333, |
|
"rewards/rejected": -10.260132789611816, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.816540172729652, |
|
"grad_norm": 0.04364016204439969, |
|
"learning_rate": 4.904486005914027e-08, |
|
"logits/chosen": -1.7389265298843384, |
|
"logits/rejected": -1.7257276773452759, |
|
"logps/chosen": -10.114287376403809, |
|
"logps/rejected": -10.103799819946289, |
|
"loss": 0.0001, |
|
"rewards/accuracies": 0.44999998807907104, |
|
"rewards/chosen": -10.114287376403809, |
|
"rewards/margins": -0.010487285442650318, |
|
"rewards/rejected": -10.103799819946289, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.8270086364825961, |
|
"grad_norm": 0.026690398874973438, |
|
"learning_rate": 4.373541737087263e-08, |
|
"logits/chosen": -1.689626693725586, |
|
"logits/rejected": -1.6864984035491943, |
|
"logps/chosen": -9.968095779418945, |
|
"logps/rejected": -9.953658103942871, |
|
"loss": -0.0005, |
|
"rewards/accuracies": 0.5062500238418579, |
|
"rewards/chosen": -9.968095779418945, |
|
"rewards/margins": -0.014436977915465832, |
|
"rewards/rejected": -9.953658103942871, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.8374771002355405, |
|
"grad_norm": 0.023648830133579133, |
|
"learning_rate": 3.8702478614051345e-08, |
|
"logits/chosen": -1.6882154941558838, |
|
"logits/rejected": -1.672609567642212, |
|
"logps/chosen": -10.05548095703125, |
|
"logps/rejected": -9.970457077026367, |
|
"loss": -0.0002, |
|
"rewards/accuracies": 0.48124998807907104, |
|
"rewards/chosen": -10.05548095703125, |
|
"rewards/margins": -0.08502475917339325, |
|
"rewards/rejected": -9.970457077026367, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.8479455639884846, |
|
"grad_norm": 0.11775947165792712, |
|
"learning_rate": 3.3952790595787986e-08, |
|
"logits/chosen": -1.7214962244033813, |
|
"logits/rejected": -1.7271496057510376, |
|
"logps/chosen": -10.209165573120117, |
|
"logps/rejected": -10.225208282470703, |
|
"loss": -0.0002, |
|
"rewards/accuracies": 0.4749999940395355, |
|
"rewards/chosen": -10.209165573120117, |
|
"rewards/margins": 0.01604318618774414, |
|
"rewards/rejected": -10.225208282470703, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.8584140277414289, |
|
"grad_norm": 0.016736815353629707, |
|
"learning_rate": 2.9492720416985e-08, |
|
"logits/chosen": -1.731414556503296, |
|
"logits/rejected": -1.7266390323638916, |
|
"logps/chosen": -10.035455703735352, |
|
"logps/rejected": -9.934586524963379, |
|
"loss": 0.0002, |
|
"rewards/accuracies": 0.5062500238418579, |
|
"rewards/chosen": -10.035455703735352, |
|
"rewards/margins": -0.10086920112371445, |
|
"rewards/rejected": -9.934586524963379, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.8688824914943732, |
|
"grad_norm": 0.010543453920296524, |
|
"learning_rate": 2.5328246937043525e-08, |
|
"logits/chosen": -1.7460575103759766, |
|
"logits/rejected": -1.7472984790802002, |
|
"logps/chosen": -10.30466079711914, |
|
"logps/rejected": -10.271262168884277, |
|
"loss": 0.0, |
|
"rewards/accuracies": 0.44999998807907104, |
|
"rewards/chosen": -10.30466079711914, |
|
"rewards/margins": -0.033397965133190155, |
|
"rewards/rejected": -10.271262168884277, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.8793509552473174, |
|
"grad_norm": 0.1619070822929026, |
|
"learning_rate": 2.1464952759020856e-08, |
|
"logits/chosen": -1.6861051321029663, |
|
"logits/rejected": -1.6756645441055298, |
|
"logps/chosen": -10.12368392944336, |
|
"logps/rejected": -10.014533996582031, |
|
"loss": 0.0, |
|
"rewards/accuracies": 0.44999998807907104, |
|
"rewards/chosen": -10.12368392944336, |
|
"rewards/margins": -0.10914961993694305, |
|
"rewards/rejected": -10.014533996582031, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.8898194190002617, |
|
"grad_norm": 0.009278538808287691, |
|
"learning_rate": 1.7908016745981856e-08, |
|
"logits/chosen": -1.7117290496826172, |
|
"logits/rejected": -1.7069095373153687, |
|
"logps/chosen": -10.257353782653809, |
|
"logps/rejected": -10.189541816711426, |
|
"loss": 0.0, |
|
"rewards/accuracies": 0.4625000059604645, |
|
"rewards/chosen": -10.257353782653809, |
|
"rewards/margins": -0.06781347841024399, |
|
"rewards/rejected": -10.189541816711426, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.9002878827532059, |
|
"grad_norm": 0.02721516902148881, |
|
"learning_rate": 1.4662207078575684e-08, |
|
"logits/chosen": -1.717145562171936, |
|
"logits/rejected": -1.7124665975570679, |
|
"logps/chosen": -10.076754570007324, |
|
"logps/rejected": -10.053953170776367, |
|
"loss": -0.0002, |
|
"rewards/accuracies": 0.45625001192092896, |
|
"rewards/chosen": -10.076754570007324, |
|
"rewards/margins": -0.022800538688898087, |
|
"rewards/rejected": -10.053953170776367, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.9107563465061502, |
|
"grad_norm": 0.17249350484919276, |
|
"learning_rate": 1.1731874863145142e-08, |
|
"logits/chosen": -1.74248468875885, |
|
"logits/rejected": -1.7478322982788086, |
|
"logps/chosen": -10.22671890258789, |
|
"logps/rejected": -10.209540367126465, |
|
"loss": 0.0007, |
|
"rewards/accuracies": 0.4937500059604645, |
|
"rewards/chosen": -10.22671890258789, |
|
"rewards/margins": -0.017179716378450394, |
|
"rewards/rejected": -10.209540367126465, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.9212248102590945, |
|
"grad_norm": 0.059902683583967914, |
|
"learning_rate": 9.12094829893642e-09, |
|
"logits/chosen": -1.6926848888397217, |
|
"logits/rejected": -1.6890709400177002, |
|
"logps/chosen": -10.345104217529297, |
|
"logps/rejected": -10.262327194213867, |
|
"loss": 0.0004, |
|
"rewards/accuracies": 0.44999998807907104, |
|
"rewards/chosen": -10.345104217529297, |
|
"rewards/margins": -0.08277805149555206, |
|
"rewards/rejected": -10.262327194213867, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.9316932740120387, |
|
"grad_norm": 0.10421328261403418, |
|
"learning_rate": 6.832927412229017e-09, |
|
"logits/chosen": -1.7495588064193726, |
|
"logits/rejected": -1.7407792806625366, |
|
"logps/chosen": -10.188596725463867, |
|
"logps/rejected": -10.2090425491333, |
|
"loss": 0.0001, |
|
"rewards/accuracies": 0.4749999940395355, |
|
"rewards/chosen": -10.188596725463867, |
|
"rewards/margins": 0.02044558897614479, |
|
"rewards/rejected": -10.2090425491333, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.942161737764983, |
|
"grad_norm": 0.033309080831972024, |
|
"learning_rate": 4.8708793644441086e-09, |
|
"logits/chosen": -1.6661462783813477, |
|
"logits/rejected": -1.6469885110855103, |
|
"logps/chosen": -10.289316177368164, |
|
"logps/rejected": -10.164963722229004, |
|
"loss": 0.0002, |
|
"rewards/accuracies": 0.44999998807907104, |
|
"rewards/chosen": -10.289316177368164, |
|
"rewards/margins": -0.1243540421128273, |
|
"rewards/rejected": -10.164963722229004, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.9526302015179272, |
|
"grad_norm": 0.1949041816950042, |
|
"learning_rate": 3.2374343405217884e-09, |
|
"logits/chosen": -1.7108314037322998, |
|
"logits/rejected": -1.710608720779419, |
|
"logps/chosen": -10.109529495239258, |
|
"logps/rejected": -10.343156814575195, |
|
"loss": 0.0002, |
|
"rewards/accuracies": 0.574999988079071, |
|
"rewards/chosen": -10.109529495239258, |
|
"rewards/margins": 0.2336268424987793, |
|
"rewards/rejected": -10.343156814575195, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.9630986652708715, |
|
"grad_norm": 0.01652601811230786, |
|
"learning_rate": 1.9347820230782295e-09, |
|
"logits/chosen": -1.7258888483047485, |
|
"logits/rejected": -1.728637933731079, |
|
"logps/chosen": -10.354926109313965, |
|
"logps/rejected": -10.41981315612793, |
|
"loss": -0.0, |
|
"rewards/accuracies": 0.48124998807907104, |
|
"rewards/chosen": -10.354926109313965, |
|
"rewards/margins": 0.06488712131977081, |
|
"rewards/rejected": -10.41981315612793, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.9735671290238157, |
|
"grad_norm": 0.12136785382119708, |
|
"learning_rate": 9.64668657069706e-10, |
|
"logits/chosen": -1.7371879816055298, |
|
"logits/rejected": -1.7365795373916626, |
|
"logps/chosen": -10.489349365234375, |
|
"logps/rejected": -10.43460750579834, |
|
"loss": -0.0003, |
|
"rewards/accuracies": 0.4375, |
|
"rewards/chosen": -10.489349365234375, |
|
"rewards/margins": -0.0547415092587471, |
|
"rewards/rejected": -10.43460750579834, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.98403559277676, |
|
"grad_norm": 0.0469433162766017, |
|
"learning_rate": 3.2839470889836627e-10, |
|
"logits/chosen": -1.7311279773712158, |
|
"logits/rejected": -1.7309757471084595, |
|
"logps/chosen": -10.06026554107666, |
|
"logps/rejected": -10.253448486328125, |
|
"loss": 0.0001, |
|
"rewards/accuracies": 0.550000011920929, |
|
"rewards/chosen": -10.06026554107666, |
|
"rewards/margins": 0.19318366050720215, |
|
"rewards/rejected": -10.253448486328125, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.9945040565297043, |
|
"grad_norm": 0.039418088806606334, |
|
"learning_rate": 2.6813123097352287e-11, |
|
"logits/chosen": -1.7103230953216553, |
|
"logits/rejected": -1.6960973739624023, |
|
"logps/chosen": -10.431560516357422, |
|
"logps/rejected": -10.349306106567383, |
|
"loss": -0.0, |
|
"rewards/accuracies": 0.512499988079071, |
|
"rewards/chosen": -10.431560516357422, |
|
"rewards/margins": -0.08225315064191818, |
|
"rewards/rejected": -10.349306106567383, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.998691442030882, |
|
"step": 477, |
|
"total_flos": 0.0, |
|
"train_loss": 1.550099917438415e-05, |
|
"train_runtime": 8183.0083, |
|
"train_samples_per_second": 7.471, |
|
"train_steps_per_second": 0.058 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 477, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 1000000, |
|
"total_flos": 0.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|