|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9989071038251366, |
|
"eval_steps": 400, |
|
"global_step": 457, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01092896174863388, |
|
"grad_norm": 55.38540565612871, |
|
"learning_rate": 9.782608695652174e-08, |
|
"logits/chosen": -1.0178581476211548, |
|
"logits/rejected": -1.0123393535614014, |
|
"logps/chosen": -0.28073588013648987, |
|
"logps/rejected": -0.2859407067298889, |
|
"loss": 3.9566, |
|
"rewards/accuracies": 0.53125, |
|
"rewards/chosen": -2.807358503341675, |
|
"rewards/margins": 0.05204860121011734, |
|
"rewards/rejected": -2.8594069480895996, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02185792349726776, |
|
"grad_norm": 72.60910128873313, |
|
"learning_rate": 1.9565217391304347e-07, |
|
"logits/chosen": -1.0586049556732178, |
|
"logits/rejected": -1.0074703693389893, |
|
"logps/chosen": -0.25731509923934937, |
|
"logps/rejected": -0.27178820967674255, |
|
"loss": 3.9165, |
|
"rewards/accuracies": 0.543749988079071, |
|
"rewards/chosen": -2.573150873184204, |
|
"rewards/margins": 0.1447313129901886, |
|
"rewards/rejected": -2.7178821563720703, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03278688524590164, |
|
"grad_norm": 61.335144114494604, |
|
"learning_rate": 2.9347826086956523e-07, |
|
"logits/chosen": -1.007138967514038, |
|
"logits/rejected": -0.9607247114181519, |
|
"logps/chosen": -0.2671090066432953, |
|
"logps/rejected": -0.2734394967556, |
|
"loss": 3.9076, |
|
"rewards/accuracies": 0.512499988079071, |
|
"rewards/chosen": -2.6710901260375977, |
|
"rewards/margins": 0.06330505013465881, |
|
"rewards/rejected": -2.7343950271606445, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.04371584699453552, |
|
"grad_norm": 72.83718237358336, |
|
"learning_rate": 3.9130434782608694e-07, |
|
"logits/chosen": -0.9411917924880981, |
|
"logits/rejected": -0.8917375802993774, |
|
"logps/chosen": -0.27226755023002625, |
|
"logps/rejected": -0.28448182344436646, |
|
"loss": 3.9517, |
|
"rewards/accuracies": 0.5625, |
|
"rewards/chosen": -2.7226755619049072, |
|
"rewards/margins": 0.12214270979166031, |
|
"rewards/rejected": -2.844818115234375, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0546448087431694, |
|
"grad_norm": 36.310526987064065, |
|
"learning_rate": 4.891304347826087e-07, |
|
"logits/chosen": -0.9477519989013672, |
|
"logits/rejected": -0.8758963346481323, |
|
"logps/chosen": -0.2740391790866852, |
|
"logps/rejected": -0.29474973678588867, |
|
"loss": 3.916, |
|
"rewards/accuracies": 0.5687500238418579, |
|
"rewards/chosen": -2.740391969680786, |
|
"rewards/margins": 0.20710572600364685, |
|
"rewards/rejected": -2.9474973678588867, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.06557377049180328, |
|
"grad_norm": 57.63053904902577, |
|
"learning_rate": 5.869565217391305e-07, |
|
"logits/chosen": -1.055710792541504, |
|
"logits/rejected": -0.9906851649284363, |
|
"logps/chosen": -0.26672154664993286, |
|
"logps/rejected": -0.2856570780277252, |
|
"loss": 3.8988, |
|
"rewards/accuracies": 0.53125, |
|
"rewards/chosen": -2.667215347290039, |
|
"rewards/margins": 0.18935534358024597, |
|
"rewards/rejected": -2.8565704822540283, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.07650273224043716, |
|
"grad_norm": 66.01668630917158, |
|
"learning_rate": 6.847826086956522e-07, |
|
"logits/chosen": -1.0005064010620117, |
|
"logits/rejected": -0.9350866079330444, |
|
"logps/chosen": -0.2575780749320984, |
|
"logps/rejected": -0.2911999523639679, |
|
"loss": 3.844, |
|
"rewards/accuracies": 0.512499988079071, |
|
"rewards/chosen": -2.5757808685302734, |
|
"rewards/margins": 0.3362187445163727, |
|
"rewards/rejected": -2.911999464035034, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.08743169398907104, |
|
"grad_norm": 76.01107140178125, |
|
"learning_rate": 7.826086956521739e-07, |
|
"logits/chosen": -0.959019660949707, |
|
"logits/rejected": -0.8994712829589844, |
|
"logps/chosen": -0.2856209874153137, |
|
"logps/rejected": -0.3126066327095032, |
|
"loss": 3.9495, |
|
"rewards/accuracies": 0.518750011920929, |
|
"rewards/chosen": -2.8562099933624268, |
|
"rewards/margins": 0.2698565423488617, |
|
"rewards/rejected": -3.1260666847229004, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.09836065573770492, |
|
"grad_norm": 34.55317412546812, |
|
"learning_rate": 8.804347826086956e-07, |
|
"logits/chosen": -1.0179210901260376, |
|
"logits/rejected": -0.9362085461616516, |
|
"logps/chosen": -0.2937139570713043, |
|
"logps/rejected": -0.33561640977859497, |
|
"loss": 3.8487, |
|
"rewards/accuracies": 0.5062500238418579, |
|
"rewards/chosen": -2.9371399879455566, |
|
"rewards/margins": 0.4190244674682617, |
|
"rewards/rejected": -3.3561644554138184, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.1092896174863388, |
|
"grad_norm": 79.7939449629563, |
|
"learning_rate": 8.997896779678812e-07, |
|
"logits/chosen": -0.9532683491706848, |
|
"logits/rejected": -0.8744559288024902, |
|
"logps/chosen": -0.3062035143375397, |
|
"logps/rejected": -0.3324257433414459, |
|
"loss": 3.8515, |
|
"rewards/accuracies": 0.5062500238418579, |
|
"rewards/chosen": -3.062035083770752, |
|
"rewards/margins": 0.2622221112251282, |
|
"rewards/rejected": -3.3242568969726562, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.12021857923497267, |
|
"grad_norm": 44.924372418868195, |
|
"learning_rate": 8.989355816502526e-07, |
|
"logits/chosen": -0.9827022552490234, |
|
"logits/rejected": -0.872309684753418, |
|
"logps/chosen": -0.2938118278980255, |
|
"logps/rejected": -0.3459795117378235, |
|
"loss": 3.7417, |
|
"rewards/accuracies": 0.581250011920929, |
|
"rewards/chosen": -2.9381184577941895, |
|
"rewards/margins": 0.521676242351532, |
|
"rewards/rejected": -3.4597949981689453, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.13114754098360656, |
|
"grad_norm": 39.977262723369954, |
|
"learning_rate": 8.974258123725301e-07, |
|
"logits/chosen": -1.0276820659637451, |
|
"logits/rejected": -0.9835308790206909, |
|
"logps/chosen": -0.2806386947631836, |
|
"logps/rejected": -0.35135510563850403, |
|
"loss": 3.6866, |
|
"rewards/accuracies": 0.606249988079071, |
|
"rewards/chosen": -2.806387186050415, |
|
"rewards/margins": 0.7071643471717834, |
|
"rewards/rejected": -3.5135509967803955, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.14207650273224043, |
|
"grad_norm": 59.29745253893998, |
|
"learning_rate": 8.952625751612967e-07, |
|
"logits/chosen": -1.0310702323913574, |
|
"logits/rejected": -0.9638811349868774, |
|
"logps/chosen": -0.3306628465652466, |
|
"logps/rejected": -0.364567369222641, |
|
"loss": 3.8342, |
|
"rewards/accuracies": 0.518750011920929, |
|
"rewards/chosen": -3.306628465652466, |
|
"rewards/margins": 0.3390454351902008, |
|
"rewards/rejected": -3.645674228668213, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.15300546448087432, |
|
"grad_norm": 156.8250507356202, |
|
"learning_rate": 8.924490294367533e-07, |
|
"logits/chosen": -0.9752784967422485, |
|
"logits/rejected": -0.9566763639450073, |
|
"logps/chosen": -0.31856533885002136, |
|
"logps/rejected": -0.3732706904411316, |
|
"loss": 3.7076, |
|
"rewards/accuracies": 0.5625, |
|
"rewards/chosen": -3.1856534481048584, |
|
"rewards/margins": 0.5470534563064575, |
|
"rewards/rejected": -3.7327072620391846, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.16393442622950818, |
|
"grad_norm": 43.26313827052752, |
|
"learning_rate": 8.889892843983686e-07, |
|
"logits/chosen": -0.9661557078361511, |
|
"logits/rejected": -0.9509698152542114, |
|
"logps/chosen": -0.3259205222129822, |
|
"logps/rejected": -0.39959517121315, |
|
"loss": 3.7485, |
|
"rewards/accuracies": 0.6312500238418579, |
|
"rewards/chosen": -3.2592053413391113, |
|
"rewards/margins": 0.7367462515830994, |
|
"rewards/rejected": -3.9959514141082764, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.17486338797814208, |
|
"grad_norm": 91.2494199085889, |
|
"learning_rate": 8.848883930233695e-07, |
|
"logits/chosen": -0.9704955220222473, |
|
"logits/rejected": -0.9103384017944336, |
|
"logps/chosen": -0.3389447033405304, |
|
"logps/rejected": -0.3731645941734314, |
|
"loss": 3.7051, |
|
"rewards/accuracies": 0.59375, |
|
"rewards/chosen": -3.3894474506378174, |
|
"rewards/margins": 0.34219902753829956, |
|
"rewards/rejected": -3.7316460609436035, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.18579234972677597, |
|
"grad_norm": 77.88395363509508, |
|
"learning_rate": 8.801523446868397e-07, |
|
"logits/chosen": -0.9626806378364563, |
|
"logits/rejected": -0.9148591756820679, |
|
"logps/chosen": -0.33388131856918335, |
|
"logps/rejected": -0.4132775664329529, |
|
"loss": 3.6667, |
|
"rewards/accuracies": 0.6312500238418579, |
|
"rewards/chosen": -3.338813304901123, |
|
"rewards/margins": 0.7939624190330505, |
|
"rewards/rejected": -4.132775783538818, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.19672131147540983, |
|
"grad_norm": 52.15269440463944, |
|
"learning_rate": 8.74788056414203e-07, |
|
"logits/chosen": -1.0256249904632568, |
|
"logits/rejected": -0.9451953172683716, |
|
"logps/chosen": -0.37562912702560425, |
|
"logps/rejected": -0.4213550090789795, |
|
"loss": 3.7121, |
|
"rewards/accuracies": 0.5687500238418579, |
|
"rewards/chosen": -3.756291627883911, |
|
"rewards/margins": 0.4572587013244629, |
|
"rewards/rejected": -4.213550090789795, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.20765027322404372, |
|
"grad_norm": 56.09180280161284, |
|
"learning_rate": 8.688033627788689e-07, |
|
"logits/chosen": -0.9407247304916382, |
|
"logits/rejected": -0.9443449974060059, |
|
"logps/chosen": -0.3641100525856018, |
|
"logps/rejected": -0.409776508808136, |
|
"loss": 3.5942, |
|
"rewards/accuracies": 0.5687500238418579, |
|
"rewards/chosen": -3.6411004066467285, |
|
"rewards/margins": 0.45666465163230896, |
|
"rewards/rejected": -4.09776496887207, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.2185792349726776, |
|
"grad_norm": 109.82437730440958, |
|
"learning_rate": 8.622070044597933e-07, |
|
"logits/chosen": -0.9516464471817017, |
|
"logits/rejected": -0.9063247442245483, |
|
"logps/chosen": -0.4270172119140625, |
|
"logps/rejected": -0.5119742155075073, |
|
"loss": 3.5993, |
|
"rewards/accuracies": 0.612500011920929, |
|
"rewards/chosen": -4.270172119140625, |
|
"rewards/margins": 0.8495694398880005, |
|
"rewards/rejected": -5.119741916656494, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.22950819672131148, |
|
"grad_norm": 65.01582060489105, |
|
"learning_rate": 8.550086154756677e-07, |
|
"logits/chosen": -1.0168472528457642, |
|
"logits/rejected": -0.9868780374526978, |
|
"logps/chosen": -0.38424795866012573, |
|
"logps/rejected": -0.4684266149997711, |
|
"loss": 3.5226, |
|
"rewards/accuracies": 0.6312500238418579, |
|
"rewards/chosen": -3.842479705810547, |
|
"rewards/margins": 0.8417860865592957, |
|
"rewards/rejected": -4.684265613555908, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.24043715846994534, |
|
"grad_norm": 64.20949181772441, |
|
"learning_rate": 8.472187091143803e-07, |
|
"logits/chosen": -1.0123231410980225, |
|
"logits/rejected": -0.9926570057868958, |
|
"logps/chosen": -0.42821940779685974, |
|
"logps/rejected": -0.5515985488891602, |
|
"loss": 3.5137, |
|
"rewards/accuracies": 0.6625000238418579, |
|
"rewards/chosen": -4.2821946144104, |
|
"rewards/margins": 1.2337915897369385, |
|
"rewards/rejected": -5.51598596572876, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.25136612021857924, |
|
"grad_norm": 69.07389330772023, |
|
"learning_rate": 8.388486625782994e-07, |
|
"logits/chosen": -1.0123811960220337, |
|
"logits/rejected": -0.9744676351547241, |
|
"logps/chosen": -0.43518343567848206, |
|
"logps/rejected": -0.5923277139663696, |
|
"loss": 3.4477, |
|
"rewards/accuracies": 0.71875, |
|
"rewards/chosen": -4.351834297180176, |
|
"rewards/margins": 1.5714432001113892, |
|
"rewards/rejected": -5.923277378082275, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.26229508196721313, |
|
"grad_norm": 52.47296311016427, |
|
"learning_rate": 8.299107003678049e-07, |
|
"logits/chosen": -1.0302703380584717, |
|
"logits/rejected": -0.9615422487258911, |
|
"logps/chosen": -0.45768284797668457, |
|
"logps/rejected": -0.5608014464378357, |
|
"loss": 3.3469, |
|
"rewards/accuracies": 0.706250011920929, |
|
"rewards/chosen": -4.576828956604004, |
|
"rewards/margins": 1.0311863422393799, |
|
"rewards/rejected": -5.608015537261963, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.273224043715847, |
|
"grad_norm": 113.28126785496374, |
|
"learning_rate": 8.204178764273363e-07, |
|
"logits/chosen": -0.932907223701477, |
|
"logits/rejected": -0.9002698659896851, |
|
"logps/chosen": -0.5244953632354736, |
|
"logps/rejected": -0.6600042581558228, |
|
"loss": 3.329, |
|
"rewards/accuracies": 0.7250000238418579, |
|
"rewards/chosen": -5.2449541091918945, |
|
"rewards/margins": 1.355088710784912, |
|
"rewards/rejected": -6.60004186630249, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.28415300546448086, |
|
"grad_norm": 62.72252412180173, |
|
"learning_rate": 8.103840550800328e-07, |
|
"logits/chosen": -0.9439579844474792, |
|
"logits/rejected": -0.9413612484931946, |
|
"logps/chosen": -0.5499246716499329, |
|
"logps/rejected": -0.7896748185157776, |
|
"loss": 3.2822, |
|
"rewards/accuracies": 0.731249988079071, |
|
"rewards/chosen": -5.499247074127197, |
|
"rewards/margins": 2.3975021839141846, |
|
"rewards/rejected": -7.8967485427856445, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.29508196721311475, |
|
"grad_norm": 57.931397196362106, |
|
"learning_rate": 7.998238907788109e-07, |
|
"logits/chosen": -0.986513614654541, |
|
"logits/rejected": -0.9044733047485352, |
|
"logps/chosen": -0.5918017029762268, |
|
"logps/rejected": -0.7940021753311157, |
|
"loss": 3.1203, |
|
"rewards/accuracies": 0.7124999761581421, |
|
"rewards/chosen": -5.918017387390137, |
|
"rewards/margins": 2.022005081176758, |
|
"rewards/rejected": -7.9400224685668945, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.30601092896174864, |
|
"grad_norm": 76.37622361250128, |
|
"learning_rate": 7.887528067034528e-07, |
|
"logits/chosen": -0.9690658450126648, |
|
"logits/rejected": -0.9173405766487122, |
|
"logps/chosen": -0.6729015707969666, |
|
"logps/rejected": -0.9487813711166382, |
|
"loss": 2.764, |
|
"rewards/accuracies": 0.7749999761581421, |
|
"rewards/chosen": -6.729015350341797, |
|
"rewards/margins": 2.7587978839874268, |
|
"rewards/rejected": -9.487812995910645, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.31693989071038253, |
|
"grad_norm": 101.65463918673284, |
|
"learning_rate": 7.77186972234965e-07, |
|
"logits/chosen": -0.9030313491821289, |
|
"logits/rejected": -0.8889007568359375, |
|
"logps/chosen": -0.7519813776016235, |
|
"logps/rejected": -0.9187248349189758, |
|
"loss": 2.9287, |
|
"rewards/accuracies": 0.699999988079071, |
|
"rewards/chosen": -7.519814491271973, |
|
"rewards/margins": 1.667433500289917, |
|
"rewards/rejected": -9.187247276306152, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.32786885245901637, |
|
"grad_norm": 95.8508047628295, |
|
"learning_rate": 7.651432793401065e-07, |
|
"logits/chosen": -0.9583929777145386, |
|
"logits/rejected": -0.9260581135749817, |
|
"logps/chosen": -0.924455463886261, |
|
"logps/rejected": -1.2613859176635742, |
|
"loss": 2.8309, |
|
"rewards/accuracies": 0.75, |
|
"rewards/chosen": -9.24455451965332, |
|
"rewards/margins": 3.3693041801452637, |
|
"rewards/rejected": -12.613858222961426, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.33879781420765026, |
|
"grad_norm": 83.36287064815711, |
|
"learning_rate": 7.526393179005737e-07, |
|
"logits/chosen": -0.8817826509475708, |
|
"logits/rejected": -0.8292266130447388, |
|
"logps/chosen": -1.0013782978057861, |
|
"logps/rejected": -1.2671784162521362, |
|
"loss": 2.6483, |
|
"rewards/accuracies": 0.7250000238418579, |
|
"rewards/chosen": -10.01378345489502, |
|
"rewards/margins": 2.6580021381378174, |
|
"rewards/rejected": -12.671785354614258, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.34972677595628415, |
|
"grad_norm": 81.57566570080871, |
|
"learning_rate": 7.396933500228808e-07, |
|
"logits/chosen": -0.9194300770759583, |
|
"logits/rejected": -0.867072582244873, |
|
"logps/chosen": -1.1953147649765015, |
|
"logps/rejected": -1.5064880847930908, |
|
"loss": 2.5891, |
|
"rewards/accuracies": 0.7250000238418579, |
|
"rewards/chosen": -11.953146934509277, |
|
"rewards/margins": 3.1117310523986816, |
|
"rewards/rejected": -15.064878463745117, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.36065573770491804, |
|
"grad_norm": 147.68440937820483, |
|
"learning_rate": 7.263242833664484e-07, |
|
"logits/chosen": -0.8760455846786499, |
|
"logits/rejected": -0.8160573244094849, |
|
"logps/chosen": -1.2987282276153564, |
|
"logps/rejected": -1.580674171447754, |
|
"loss": 2.6585, |
|
"rewards/accuracies": 0.7437499761581421, |
|
"rewards/chosen": -12.987281799316406, |
|
"rewards/margins": 2.819460153579712, |
|
"rewards/rejected": -15.806741714477539, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.37158469945355194, |
|
"grad_norm": 106.55434262084324, |
|
"learning_rate": 7.1255164352886e-07, |
|
"logits/chosen": -0.9024359583854675, |
|
"logits/rejected": -0.8437854051589966, |
|
"logps/chosen": -1.3003956079483032, |
|
"logps/rejected": -1.6264398097991943, |
|
"loss": 2.5513, |
|
"rewards/accuracies": 0.7749999761581421, |
|
"rewards/chosen": -13.00395679473877, |
|
"rewards/margins": 3.26043963432312, |
|
"rewards/rejected": -16.264394760131836, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.3825136612021858, |
|
"grad_norm": 99.75404527237653, |
|
"learning_rate": 6.98395545528617e-07, |
|
"logits/chosen": -0.9769757986068726, |
|
"logits/rejected": -0.9515819549560547, |
|
"logps/chosen": -1.4226484298706055, |
|
"logps/rejected": -1.6883213520050049, |
|
"loss": 2.3385, |
|
"rewards/accuracies": 0.675000011920929, |
|
"rewards/chosen": -14.226484298706055, |
|
"rewards/margins": 2.656728744506836, |
|
"rewards/rejected": -16.883214950561523, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.39344262295081966, |
|
"grad_norm": 98.07261203721741, |
|
"learning_rate": 6.838766644270385e-07, |
|
"logits/chosen": -0.9713080525398254, |
|
"logits/rejected": -0.9672197103500366, |
|
"logps/chosen": -1.4138439893722534, |
|
"logps/rejected": -1.909021019935608, |
|
"loss": 2.234, |
|
"rewards/accuracies": 0.793749988079071, |
|
"rewards/chosen": -14.138440132141113, |
|
"rewards/margins": 4.951767921447754, |
|
"rewards/rejected": -19.090206146240234, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.40437158469945356, |
|
"grad_norm": 129.28840963645882, |
|
"learning_rate": 6.690162051322191e-07, |
|
"logits/chosen": -0.9913703799247742, |
|
"logits/rejected": -1.0164626836776733, |
|
"logps/chosen": -1.5589020252227783, |
|
"logps/rejected": -2.046978235244751, |
|
"loss": 2.0769, |
|
"rewards/accuracies": 0.824999988079071, |
|
"rewards/chosen": -15.589021682739258, |
|
"rewards/margins": 4.88076114654541, |
|
"rewards/rejected": -20.469783782958984, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.41530054644808745, |
|
"grad_norm": 140.495255270974, |
|
"learning_rate": 6.538358714291389e-07, |
|
"logits/chosen": -1.0160572528839111, |
|
"logits/rejected": -0.9758199453353882, |
|
"logps/chosen": -1.5961698293685913, |
|
"logps/rejected": -2.09332013130188, |
|
"loss": 2.2105, |
|
"rewards/accuracies": 0.8125, |
|
"rewards/chosen": -15.961697578430176, |
|
"rewards/margins": 4.971505641937256, |
|
"rewards/rejected": -20.933202743530273, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.4262295081967213, |
|
"grad_norm": 130.70095112145532, |
|
"learning_rate": 6.383578342811638e-07, |
|
"logits/chosen": -1.0288410186767578, |
|
"logits/rejected": -1.006248116493225, |
|
"logps/chosen": -1.8221418857574463, |
|
"logps/rejected": -2.3388142585754395, |
|
"loss": 2.2076, |
|
"rewards/accuracies": 0.768750011920929, |
|
"rewards/chosen": -18.221418380737305, |
|
"rewards/margins": 5.166724681854248, |
|
"rewards/rejected": -23.388141632080078, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.4371584699453552, |
|
"grad_norm": 129.43163775750423, |
|
"learning_rate": 6.226046994492272e-07, |
|
"logits/chosen": -0.9663163423538208, |
|
"logits/rejected": -0.8765112161636353, |
|
"logps/chosen": -1.7186319828033447, |
|
"logps/rejected": -2.242340564727783, |
|
"loss": 2.0413, |
|
"rewards/accuracies": 0.800000011920929, |
|
"rewards/chosen": -17.18631935119629, |
|
"rewards/margins": 5.237086296081543, |
|
"rewards/rejected": -22.42340660095215, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.44808743169398907, |
|
"grad_norm": 102.22819729561144, |
|
"learning_rate": 6.065994744759878e-07, |
|
"logits/chosen": -0.8948925733566284, |
|
"logits/rejected": -0.9066826105117798, |
|
"logps/chosen": -1.7214425802230835, |
|
"logps/rejected": -2.249507427215576, |
|
"loss": 2.1905, |
|
"rewards/accuracies": 0.800000011920929, |
|
"rewards/chosen": -17.214426040649414, |
|
"rewards/margins": 5.280648231506348, |
|
"rewards/rejected": -22.495075225830078, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.45901639344262296, |
|
"grad_norm": 164.64988265742974, |
|
"learning_rate": 5.903655350831845e-07, |
|
"logits/chosen": -0.9535326957702637, |
|
"logits/rejected": -0.9509015083312988, |
|
"logps/chosen": -1.7853710651397705, |
|
"logps/rejected": -2.4150795936584473, |
|
"loss": 1.9241, |
|
"rewards/accuracies": 0.8187500238418579, |
|
"rewards/chosen": -17.853710174560547, |
|
"rewards/margins": 6.297087669372559, |
|
"rewards/rejected": -24.15079689025879, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.46994535519125685, |
|
"grad_norm": 147.64808928844568, |
|
"learning_rate": 5.73926591031261e-07, |
|
"logits/chosen": -0.9305051565170288, |
|
"logits/rejected": -0.8654060363769531, |
|
"logps/chosen": -1.8320693969726562, |
|
"logps/rejected": -2.4488117694854736, |
|
"loss": 2.1048, |
|
"rewards/accuracies": 0.8062499761581421, |
|
"rewards/chosen": -18.32069206237793, |
|
"rewards/margins": 6.16742467880249, |
|
"rewards/rejected": -24.488117218017578, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.4808743169398907, |
|
"grad_norm": 105.48156854473766, |
|
"learning_rate": 5.573066514911273e-07, |
|
"logits/chosen": -0.9947813153266907, |
|
"logits/rejected": -0.9732875823974609, |
|
"logps/chosen": -1.8540947437286377, |
|
"logps/rejected": -2.3756325244903564, |
|
"loss": 2.0465, |
|
"rewards/accuracies": 0.731249988079071, |
|
"rewards/chosen": -18.540945053100586, |
|
"rewards/margins": 5.215379238128662, |
|
"rewards/rejected": -23.75632667541504, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.4918032786885246, |
|
"grad_norm": 86.49112885572626, |
|
"learning_rate": 5.405299899786287e-07, |
|
"logits/chosen": -1.0302679538726807, |
|
"logits/rejected": -0.9820007085800171, |
|
"logps/chosen": -1.8799511194229126, |
|
"logps/rejected": -2.4549620151519775, |
|
"loss": 2.0411, |
|
"rewards/accuracies": 0.8374999761581421, |
|
"rewards/chosen": -18.799510955810547, |
|
"rewards/margins": 5.750107288360596, |
|
"rewards/rejected": -24.549617767333984, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.5027322404371585, |
|
"grad_norm": 106.5713172261187, |
|
"learning_rate": 5.236211089029395e-07, |
|
"logits/chosen": -0.9404142498970032, |
|
"logits/rejected": -0.9487746953964233, |
|
"logps/chosen": -1.8227663040161133, |
|
"logps/rejected": -2.3787169456481934, |
|
"loss": 2.1731, |
|
"rewards/accuracies": 0.793749988079071, |
|
"rewards/chosen": -18.227664947509766, |
|
"rewards/margins": 5.55950403213501, |
|
"rewards/rejected": -23.787168502807617, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.5136612021857924, |
|
"grad_norm": 146.52429329123075, |
|
"learning_rate": 5.066047037806549e-07, |
|
"logits/chosen": -0.9814826846122742, |
|
"logits/rejected": -0.9384028315544128, |
|
"logps/chosen": -1.871340036392212, |
|
"logps/rejected": -2.362367630004883, |
|
"loss": 1.8879, |
|
"rewards/accuracies": 0.8187500238418579, |
|
"rewards/chosen": -18.713397979736328, |
|
"rewards/margins": 4.910274505615234, |
|
"rewards/rejected": -23.623672485351562, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.5245901639344263, |
|
"grad_norm": 109.73224210165276, |
|
"learning_rate": 4.895056271678488e-07, |
|
"logits/chosen": -1.0654172897338867, |
|
"logits/rejected": -1.0743358135223389, |
|
"logps/chosen": -1.8185491561889648, |
|
"logps/rejected": -2.438718557357788, |
|
"loss": 1.7806, |
|
"rewards/accuracies": 0.831250011920929, |
|
"rewards/chosen": -18.18549156188965, |
|
"rewards/margins": 6.201694488525391, |
|
"rewards/rejected": -24.387187957763672, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.5355191256830601, |
|
"grad_norm": 174.78754415853115, |
|
"learning_rate": 4.723488523627758e-07, |
|
"logits/chosen": -1.0102005004882812, |
|
"logits/rejected": -0.9561554193496704, |
|
"logps/chosen": -1.7096691131591797, |
|
"logps/rejected": -2.3459925651550293, |
|
"loss": 1.9498, |
|
"rewards/accuracies": 0.793749988079071, |
|
"rewards/chosen": -17.096691131591797, |
|
"rewards/margins": 6.363234519958496, |
|
"rewards/rejected": -23.45992660522461, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.546448087431694, |
|
"grad_norm": 98.18520968432665, |
|
"learning_rate": 4.55159436932227e-07, |
|
"logits/chosen": -1.0693771839141846, |
|
"logits/rejected": -1.028814435005188, |
|
"logps/chosen": -1.8196604251861572, |
|
"logps/rejected": -2.324789524078369, |
|
"loss": 1.9417, |
|
"rewards/accuracies": 0.762499988079071, |
|
"rewards/chosen": -18.196605682373047, |
|
"rewards/margins": 5.051291465759277, |
|
"rewards/rejected": -23.247896194458008, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.5573770491803278, |
|
"grad_norm": 118.93796678057375, |
|
"learning_rate": 4.3796248611481193e-07, |
|
"logits/chosen": -1.0778828859329224, |
|
"logits/rejected": -1.0436246395111084, |
|
"logps/chosen": -1.9404518604278564, |
|
"logps/rejected": -2.521088123321533, |
|
"loss": 1.9756, |
|
"rewards/accuracies": 0.8125, |
|
"rewards/chosen": -19.404516220092773, |
|
"rewards/margins": 5.806364059448242, |
|
"rewards/rejected": -25.210880279541016, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.5683060109289617, |
|
"grad_norm": 95.05327173585843, |
|
"learning_rate": 4.2078311615461545e-07, |
|
"logits/chosen": -1.053112268447876, |
|
"logits/rejected": -1.038400411605835, |
|
"logps/chosen": -1.8710072040557861, |
|
"logps/rejected": -2.473370313644409, |
|
"loss": 1.8132, |
|
"rewards/accuracies": 0.824999988079071, |
|
"rewards/chosen": -18.710071563720703, |
|
"rewards/margins": 6.023632049560547, |
|
"rewards/rejected": -24.733705520629883, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.5792349726775956, |
|
"grad_norm": 84.65373658909819, |
|
"learning_rate": 4.0364641761878056e-07, |
|
"logits/chosen": -1.0604503154754639, |
|
"logits/rejected": -1.018703818321228, |
|
"logps/chosen": -1.9676679372787476, |
|
"logps/rejected": -2.594107151031494, |
|
"loss": 1.8076, |
|
"rewards/accuracies": 0.8125, |
|
"rewards/chosen": -19.676679611206055, |
|
"rewards/margins": 6.264393329620361, |
|
"rewards/rejected": -25.941070556640625, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.5901639344262295, |
|
"grad_norm": 110.56084812406964, |
|
"learning_rate": 3.8657741875259324e-07, |
|
"logits/chosen": -1.0636670589447021, |
|
"logits/rejected": -1.0380544662475586, |
|
"logps/chosen": -2.010746955871582, |
|
"logps/rejected": -2.699274778366089, |
|
"loss": 1.6887, |
|
"rewards/accuracies": 0.824999988079071, |
|
"rewards/chosen": -20.107471466064453, |
|
"rewards/margins": 6.885279178619385, |
|
"rewards/rejected": -26.992746353149414, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.6010928961748634, |
|
"grad_norm": 126.7112018665686, |
|
"learning_rate": 3.6960104892558897e-07, |
|
"logits/chosen": -1.0398025512695312, |
|
"logits/rejected": -0.9743970036506653, |
|
"logps/chosen": -2.058713436126709, |
|
"logps/rejected": -2.549133062362671, |
|
"loss": 1.9337, |
|
"rewards/accuracies": 0.768750011920929, |
|
"rewards/chosen": -20.58713150024414, |
|
"rewards/margins": 4.904201984405518, |
|
"rewards/rejected": -25.491336822509766, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.6120218579234973, |
|
"grad_norm": 105.74498813822662, |
|
"learning_rate": 3.5274210222206623e-07, |
|
"logits/chosen": -1.0011359453201294, |
|
"logits/rejected": -1.002959966659546, |
|
"logps/chosen": -1.9459590911865234, |
|
"logps/rejected": -2.6398136615753174, |
|
"loss": 1.9569, |
|
"rewards/accuracies": 0.7749999761581421, |
|
"rewards/chosen": -19.459592819213867, |
|
"rewards/margins": 6.93854284286499, |
|
"rewards/rejected": -26.39813232421875, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.6229508196721312, |
|
"grad_norm": 107.35437799710022, |
|
"learning_rate": 3.360252012291866e-07, |
|
"logits/chosen": -1.1627576351165771, |
|
"logits/rejected": -1.128688097000122, |
|
"logps/chosen": -1.9805145263671875, |
|
"logps/rejected": -2.688811779022217, |
|
"loss": 1.5785, |
|
"rewards/accuracies": 0.856249988079071, |
|
"rewards/chosen": -19.805145263671875, |
|
"rewards/margins": 7.082972526550293, |
|
"rewards/rejected": -26.88811683654785, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.6338797814207651, |
|
"grad_norm": 120.95521845176454, |
|
"learning_rate": 3.1947476107554687e-07, |
|
"logits/chosen": -1.079388976097107, |
|
"logits/rejected": -1.0467464923858643, |
|
"logps/chosen": -1.9965364933013916, |
|
"logps/rejected": -2.6323485374450684, |
|
"loss": 1.8251, |
|
"rewards/accuracies": 0.831250011920929, |
|
"rewards/chosen": -19.96536636352539, |
|
"rewards/margins": 6.358120441436768, |
|
"rewards/rejected": -26.323486328125, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.644808743169399, |
|
"grad_norm": 95.93515805418777, |
|
"learning_rate": 3.0311495377274497e-07, |
|
"logits/chosen": -1.0501229763031006, |
|
"logits/rejected": -1.0500692129135132, |
|
"logps/chosen": -1.992593765258789, |
|
"logps/rejected": -2.7493128776550293, |
|
"loss": 1.4798, |
|
"rewards/accuracies": 0.8812500238418579, |
|
"rewards/chosen": -19.92593765258789, |
|
"rewards/margins": 7.567191123962402, |
|
"rewards/rejected": -27.493127822875977, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.6557377049180327, |
|
"grad_norm": 108.4794966509382, |
|
"learning_rate": 2.8696967291202045e-07, |
|
"logits/chosen": -1.113196849822998, |
|
"logits/rejected": -1.0996946096420288, |
|
"logps/chosen": -1.955629587173462, |
|
"logps/rejected": -2.571319341659546, |
|
"loss": 1.7287, |
|
"rewards/accuracies": 0.8062499761581421, |
|
"rewards/chosen": -19.556293487548828, |
|
"rewards/margins": 6.156899452209473, |
|
"rewards/rejected": -25.713191986083984, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 129.54118774129594, |
|
"learning_rate": 2.710624987675287e-07, |
|
"logits/chosen": -1.1094138622283936, |
|
"logits/rejected": -1.0546791553497314, |
|
"logps/chosen": -2.0171821117401123, |
|
"logps/rejected": -2.6892952919006348, |
|
"loss": 1.7859, |
|
"rewards/accuracies": 0.8062499761581421, |
|
"rewards/chosen": -20.171823501586914, |
|
"rewards/margins": 6.72113037109375, |
|
"rewards/rejected": -26.892953872680664, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.6775956284153005, |
|
"grad_norm": 115.80124041818156, |
|
"learning_rate": 2.554166638572174e-07, |
|
"logits/chosen": -1.029006004333496, |
|
"logits/rejected": -1.0432313680648804, |
|
"logps/chosen": -2.045400381088257, |
|
"logps/rejected": -2.7476630210876465, |
|
"loss": 1.8226, |
|
"rewards/accuracies": 0.8374999761581421, |
|
"rewards/chosen": -20.454004287719727, |
|
"rewards/margins": 7.022623538970947, |
|
"rewards/rejected": -27.476627349853516, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.6885245901639344, |
|
"grad_norm": 118.15275803303632, |
|
"learning_rate": 2.4005501901160237e-07, |
|
"logits/chosen": -1.0894476175308228, |
|
"logits/rejected": -1.0898199081420898, |
|
"logps/chosen": -2.1980321407318115, |
|
"logps/rejected": -2.6982712745666504, |
|
"loss": 1.7214, |
|
"rewards/accuracies": 0.8125, |
|
"rewards/chosen": -21.980321884155273, |
|
"rewards/margins": 5.0023908615112305, |
|
"rewards/rejected": -26.982711791992188, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.6994535519125683, |
|
"grad_norm": 102.60918367059409, |
|
"learning_rate": 2.250000000000001e-07, |
|
"logits/chosen": -1.1670842170715332, |
|
"logits/rejected": -1.114605188369751, |
|
"logps/chosen": -2.1052815914154053, |
|
"logps/rejected": -2.743218183517456, |
|
"loss": 1.702, |
|
"rewards/accuracies": 0.8374999761581421, |
|
"rewards/chosen": -21.05281639099121, |
|
"rewards/margins": 6.37936544418335, |
|
"rewards/rejected": -27.432180404663086, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.7103825136612022, |
|
"grad_norm": 142.9270542485795, |
|
"learning_rate": 2.1027359476295932e-07, |
|
"logits/chosen": -1.1268647909164429, |
|
"logits/rejected": -1.1123411655426025, |
|
"logps/chosen": -1.9478269815444946, |
|
"logps/rejected": -2.504878520965576, |
|
"loss": 1.739, |
|
"rewards/accuracies": 0.831250011920929, |
|
"rewards/chosen": -19.478267669677734, |
|
"rewards/margins": 5.57051944732666, |
|
"rewards/rejected": -25.04878807067871, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.7213114754098361, |
|
"grad_norm": 87.02009137374039, |
|
"learning_rate": 1.9589731129874957e-07, |
|
"logits/chosen": -1.036443829536438, |
|
"logits/rejected": -1.0129362344741821, |
|
"logps/chosen": -1.9629653692245483, |
|
"logps/rejected": -2.7208199501037598, |
|
"loss": 1.6483, |
|
"rewards/accuracies": 0.8374999761581421, |
|
"rewards/chosen": -19.629653930664062, |
|
"rewards/margins": 7.57854700088501, |
|
"rewards/rejected": -27.208200454711914, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.73224043715847, |
|
"grad_norm": 88.46092221124316, |
|
"learning_rate": 1.8189214625080817e-07, |
|
"logits/chosen": -1.014481782913208, |
|
"logits/rejected": -0.9782212972640991, |
|
"logps/chosen": -2.0324487686157227, |
|
"logps/rejected": -2.6788439750671387, |
|
"loss": 1.6273, |
|
"rewards/accuracies": 0.84375, |
|
"rewards/chosen": -20.32448959350586, |
|
"rewards/margins": 6.463950157165527, |
|
"rewards/rejected": -26.788440704345703, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.7431693989071039, |
|
"grad_norm": 135.37943482075397, |
|
"learning_rate": 1.6827855424202287e-07, |
|
"logits/chosen": -1.0468891859054565, |
|
"logits/rejected": -1.002845048904419, |
|
"logps/chosen": -2.073441982269287, |
|
"logps/rejected": -2.709076166152954, |
|
"loss": 1.8334, |
|
"rewards/accuracies": 0.8187500238418579, |
|
"rewards/chosen": -20.734418869018555, |
|
"rewards/margins": 6.3563408851623535, |
|
"rewards/rejected": -27.09075927734375, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.7540983606557377, |
|
"grad_norm": 105.42435410857779, |
|
"learning_rate": 1.550764180006395e-07, |
|
"logits/chosen": -1.1121171712875366, |
|
"logits/rejected": -1.1094890832901, |
|
"logps/chosen": -2.104811191558838, |
|
"logps/rejected": -2.760361909866333, |
|
"loss": 1.8091, |
|
"rewards/accuracies": 0.8125, |
|
"rewards/chosen": -21.048110961914062, |
|
"rewards/margins": 6.555506229400635, |
|
"rewards/rejected": -27.60361671447754, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.7650273224043715, |
|
"grad_norm": 99.44232512456259, |
|
"learning_rate": 1.4230501932142528e-07, |
|
"logits/chosen": -1.0965338945388794, |
|
"logits/rejected": -1.0752547979354858, |
|
"logps/chosen": -2.0224196910858154, |
|
"logps/rejected": -2.5650455951690674, |
|
"loss": 1.7276, |
|
"rewards/accuracies": 0.856249988079071, |
|
"rewards/chosen": -20.224197387695312, |
|
"rewards/margins": 5.4262590408325195, |
|
"rewards/rejected": -25.650455474853516, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.7759562841530054, |
|
"grad_norm": 95.86354961266959, |
|
"learning_rate": 1.2998301090449732e-07, |
|
"logits/chosen": -1.0493485927581787, |
|
"logits/rejected": -1.0148344039916992, |
|
"logps/chosen": -2.0027577877044678, |
|
"logps/rejected": -2.6467907428741455, |
|
"loss": 1.643, |
|
"rewards/accuracies": 0.8500000238418579, |
|
"rewards/chosen": -20.027578353881836, |
|
"rewards/margins": 6.440329074859619, |
|
"rewards/rejected": -26.467906951904297, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.7868852459016393, |
|
"grad_norm": 113.44769593918105, |
|
"learning_rate": 1.181283891129494e-07, |
|
"logits/chosen": -1.0109162330627441, |
|
"logits/rejected": -1.0162619352340698, |
|
"logps/chosen": -2.096867799758911, |
|
"logps/rejected": -2.732240676879883, |
|
"loss": 1.7079, |
|
"rewards/accuracies": 0.78125, |
|
"rewards/chosen": -20.968677520751953, |
|
"rewards/margins": 6.353725433349609, |
|
"rewards/rejected": -27.322406768798828, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.7978142076502732, |
|
"grad_norm": 114.10477131435252, |
|
"learning_rate": 1.0675846768906093e-07, |
|
"logits/chosen": -1.1015875339508057, |
|
"logits/rejected": -1.1041442155838013, |
|
"logps/chosen": -2.0886998176574707, |
|
"logps/rejected": -2.7303638458251953, |
|
"loss": 1.7767, |
|
"rewards/accuracies": 0.8125, |
|
"rewards/chosen": -20.88699722290039, |
|
"rewards/margins": 6.416640281677246, |
|
"rewards/rejected": -27.303638458251953, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.8087431693989071, |
|
"grad_norm": 144.30812317733663, |
|
"learning_rate": 9.588985246747924e-08, |
|
"logits/chosen": -1.121105432510376, |
|
"logits/rejected": -1.0874660015106201, |
|
"logps/chosen": -2.131044864654541, |
|
"logps/rejected": -2.8191640377044678, |
|
"loss": 1.7266, |
|
"rewards/accuracies": 0.800000011920929, |
|
"rewards/chosen": -21.31045150756836, |
|
"rewards/margins": 6.881192207336426, |
|
"rewards/rejected": -28.191640853881836, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.819672131147541, |
|
"grad_norm": 88.75216133476276, |
|
"learning_rate": 8.553841712230287e-08, |
|
"logits/chosen": -1.1224949359893799, |
|
"logits/rejected": -1.0620272159576416, |
|
"logps/chosen": -2.079634666442871, |
|
"logps/rejected": -2.7826874256134033, |
|
"loss": 1.6434, |
|
"rewards/accuracies": 0.8500000238418579, |
|
"rewards/chosen": -20.796348571777344, |
|
"rewards/margins": 7.030525207519531, |
|
"rewards/rejected": -27.826873779296875, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.8306010928961749, |
|
"grad_norm": 103.25429828047173, |
|
"learning_rate": 7.571927998349143e-08, |
|
"logits/chosen": -1.0042684078216553, |
|
"logits/rejected": -1.0183018445968628, |
|
"logps/chosen": -2.2109534740448, |
|
"logps/rejected": -2.83004093170166, |
|
"loss": 1.6596, |
|
"rewards/accuracies": 0.84375, |
|
"rewards/chosen": -22.109533309936523, |
|
"rewards/margins": 6.190876007080078, |
|
"rewards/rejected": -28.3004093170166, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.8415300546448088, |
|
"grad_norm": 126.47652062512991, |
|
"learning_rate": 6.644678195645878e-08, |
|
"logits/chosen": -1.1039857864379883, |
|
"logits/rejected": -1.087054967880249, |
|
"logps/chosen": -2.26637601852417, |
|
"logps/rejected": -3.011500597000122, |
|
"loss": 2.0123, |
|
"rewards/accuracies": 0.8125, |
|
"rewards/chosen": -22.663759231567383, |
|
"rewards/margins": 7.451247215270996, |
|
"rewards/rejected": -30.115009307861328, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.8524590163934426, |
|
"grad_norm": 143.34550375994243, |
|
"learning_rate": 5.77344655770996e-08, |
|
"logits/chosen": -1.1028085947036743, |
|
"logits/rejected": -1.0805258750915527, |
|
"logps/chosen": -2.170258045196533, |
|
"logps/rejected": -2.950200319290161, |
|
"loss": 1.582, |
|
"rewards/accuracies": 0.824999988079071, |
|
"rewards/chosen": -21.702577590942383, |
|
"rewards/margins": 7.799424648284912, |
|
"rewards/rejected": -29.502002716064453, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.8633879781420765, |
|
"grad_norm": 137.04591581071145, |
|
"learning_rate": 4.9595055232838666e-08, |
|
"logits/chosen": -1.0706136226654053, |
|
"logits/rejected": -1.0520108938217163, |
|
"logps/chosen": -2.3184380531311035, |
|
"logps/rejected": -2.9153521060943604, |
|
"loss": 1.6624, |
|
"rewards/accuracies": 0.7875000238418579, |
|
"rewards/chosen": -23.18438148498535, |
|
"rewards/margins": 5.96914005279541, |
|
"rewards/rejected": -29.153518676757812, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.8743169398907104, |
|
"grad_norm": 115.5484757892347, |
|
"learning_rate": 4.204043857859128e-08, |
|
"logits/chosen": -1.030194640159607, |
|
"logits/rejected": -0.9990195035934448, |
|
"logps/chosen": -2.136531352996826, |
|
"logps/rejected": -2.8680989742279053, |
|
"loss": 1.6588, |
|
"rewards/accuracies": 0.8812500238418579, |
|
"rewards/chosen": -21.365314483642578, |
|
"rewards/margins": 7.315672874450684, |
|
"rewards/rejected": -28.680988311767578, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.8743169398907104, |
|
"eval_logits/chosen": -1.2835370302200317, |
|
"eval_logits/rejected": -1.2388678789138794, |
|
"eval_logps/chosen": -2.1050703525543213, |
|
"eval_logps/rejected": -2.813657522201538, |
|
"eval_loss": 1.6786526441574097, |
|
"eval_rewards/accuracies": 0.8373494148254395, |
|
"eval_rewards/chosen": -21.050703048706055, |
|
"eval_rewards/margins": 7.0858683586120605, |
|
"eval_rewards/rejected": -28.136571884155273, |
|
"eval_runtime": 37.0957, |
|
"eval_samples_per_second": 35.53, |
|
"eval_steps_per_second": 2.237, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.8852459016393442, |
|
"grad_norm": 111.67107585308769, |
|
"learning_rate": 3.508164917477564e-08, |
|
"logits/chosen": -1.0428274869918823, |
|
"logits/rejected": -0.9762898683547974, |
|
"logps/chosen": -2.0831797122955322, |
|
"logps/rejected": -2.7335562705993652, |
|
"loss": 1.7049, |
|
"rewards/accuracies": 0.831250011920929, |
|
"rewards/chosen": -20.831798553466797, |
|
"rewards/margins": 6.503766059875488, |
|
"rewards/rejected": -27.3355655670166, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.8961748633879781, |
|
"grad_norm": 97.67105586980914, |
|
"learning_rate": 2.872885037273573e-08, |
|
"logits/chosen": -1.0626450777053833, |
|
"logits/rejected": -1.0900951623916626, |
|
"logps/chosen": -2.180873155593872, |
|
"logps/rejected": -2.9227170944213867, |
|
"loss": 1.6375, |
|
"rewards/accuracies": 0.8125, |
|
"rewards/chosen": -21.808731079101562, |
|
"rewards/margins": 7.418442726135254, |
|
"rewards/rejected": -29.2271728515625, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.907103825136612, |
|
"grad_norm": 97.42319064887056, |
|
"learning_rate": 2.2991320471108737e-08, |
|
"logits/chosen": -1.0922316312789917, |
|
"logits/rejected": -1.0891927480697632, |
|
"logps/chosen": -2.1282808780670166, |
|
"logps/rejected": -2.886338710784912, |
|
"loss": 1.8837, |
|
"rewards/accuracies": 0.8999999761581421, |
|
"rewards/chosen": -21.28281021118164, |
|
"rewards/margins": 7.580580711364746, |
|
"rewards/rejected": -28.863391876220703, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.9180327868852459, |
|
"grad_norm": 102.77428463977986, |
|
"learning_rate": 1.787743916481737e-08, |
|
"logits/chosen": -1.0903656482696533, |
|
"logits/rejected": -1.0697782039642334, |
|
"logps/chosen": -2.128669261932373, |
|
"logps/rejected": -2.8950395584106445, |
|
"loss": 1.5088, |
|
"rewards/accuracies": 0.8374999761581421, |
|
"rewards/chosen": -21.286693572998047, |
|
"rewards/margins": 7.663704872131348, |
|
"rewards/rejected": -28.950397491455078, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.9289617486338798, |
|
"grad_norm": 100.13693396976505, |
|
"learning_rate": 1.3394675306478253e-08, |
|
"logits/chosen": -1.1003714799880981, |
|
"logits/rejected": -1.086613416671753, |
|
"logps/chosen": -2.245288372039795, |
|
"logps/rejected": -3.010887861251831, |
|
"loss": 1.7902, |
|
"rewards/accuracies": 0.8125, |
|
"rewards/chosen": -22.452878952026367, |
|
"rewards/margins": 7.655998229980469, |
|
"rewards/rejected": -30.108877182006836, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.9398907103825137, |
|
"grad_norm": 105.78919470199224, |
|
"learning_rate": 9.549575998100024e-09, |
|
"logits/chosen": -1.0245463848114014, |
|
"logits/rejected": -1.0208485126495361, |
|
"logps/chosen": -1.9884769916534424, |
|
"logps/rejected": -2.7820286750793457, |
|
"loss": 1.4019, |
|
"rewards/accuracies": 0.9125000238418579, |
|
"rewards/chosen": -19.884769439697266, |
|
"rewards/margins": 7.93551778793335, |
|
"rewards/rejected": -27.820287704467773, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.9508196721311475, |
|
"grad_norm": 106.12768935225566, |
|
"learning_rate": 6.3477570290039974e-09, |
|
"logits/chosen": -1.0841343402862549, |
|
"logits/rejected": -1.108457326889038, |
|
"logps/chosen": -2.172492027282715, |
|
"logps/rejected": -2.936332941055298, |
|
"loss": 1.6336, |
|
"rewards/accuracies": 0.875, |
|
"rewards/chosen": -21.724918365478516, |
|
"rewards/margins": 7.6384100914001465, |
|
"rewards/rejected": -29.363330841064453, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.9617486338797814, |
|
"grad_norm": 120.9095518273233, |
|
"learning_rate": 3.7938946739319575e-09, |
|
"logits/chosen": -1.0631051063537598, |
|
"logits/rejected": -1.0275993347167969, |
|
"logps/chosen": -2.0968384742736816, |
|
"logps/rejected": -2.6874489784240723, |
|
"loss": 1.6067, |
|
"rewards/accuracies": 0.78125, |
|
"rewards/chosen": -20.968387603759766, |
|
"rewards/margins": 5.90610408782959, |
|
"rewards/rejected": -26.874492645263672, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.9726775956284153, |
|
"grad_norm": 123.02986222538128, |
|
"learning_rate": 1.8917188633203684e-09, |
|
"logits/chosen": -1.0975539684295654, |
|
"logits/rejected": -1.0790989398956299, |
|
"logps/chosen": -2.1089742183685303, |
|
"logps/rejected": -2.8517839908599854, |
|
"loss": 1.6175, |
|
"rewards/accuracies": 0.8125, |
|
"rewards/chosen": -21.08974266052246, |
|
"rewards/margins": 7.428097724914551, |
|
"rewards/rejected": -28.517841339111328, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.9836065573770492, |
|
"grad_norm": 138.13735363884834, |
|
"learning_rate": 6.440077357159901e-10, |
|
"logits/chosen": -1.0183210372924805, |
|
"logits/rejected": -1.0120489597320557, |
|
"logps/chosen": -2.1649553775787354, |
|
"logps/rejected": -2.8207600116729736, |
|
"loss": 1.6741, |
|
"rewards/accuracies": 0.856249988079071, |
|
"rewards/chosen": -21.649555206298828, |
|
"rewards/margins": 6.558046817779541, |
|
"rewards/rejected": -28.207599639892578, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.994535519125683, |
|
"grad_norm": 133.29330256302666, |
|
"learning_rate": 5.258358028848442e-11, |
|
"logits/chosen": -1.0393953323364258, |
|
"logits/rejected": -1.0296013355255127, |
|
"logps/chosen": -2.2563390731811523, |
|
"logps/rejected": -2.771131992340088, |
|
"loss": 2.0445, |
|
"rewards/accuracies": 0.7562500238418579, |
|
"rewards/chosen": -22.56338882446289, |
|
"rewards/margins": 5.1479291915893555, |
|
"rewards/rejected": -27.711318969726562, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.9989071038251366, |
|
"step": 457, |
|
"total_flos": 0.0, |
|
"train_loss": 2.442784007842483, |
|
"train_runtime": 5988.2154, |
|
"train_samples_per_second": 9.779, |
|
"train_steps_per_second": 0.076 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 457, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 1000000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 0.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|