[{"loss_per_step": [5.936, 2.941, 0.586, 0.082, 0.032, 0.014, 0.009], "prob_new": [0.07792441546916962, 0.32713204622268677, 0.6310267448425293, 0.9237148761749268, 0.9692163467407227, 0.9856909513473511, 0.9913104772567749], "prob_old": [0.7026048898696899, 0.003970296587795019, 0.0003273152688052505, 4.90241609441e-06, 4.5833652961846383e-07, 1.003734197979611e-07, 4.049377366754925e-08], "prob_new_token": [4.4793578126700595e-05, 0.00428733741864562, 0.33361420035362244, 0.858270525932312, 0.9427995681762695, 0.97385573387146, 0.9843161106109619], "prob_old_token": [0.7026048898696899, 0.003970296587795019, 0.0003273152688052505, 4.90241609441e-06, 4.5833652961846383e-07, 1.003734197979611e-07, 4.049377366754925e-08], "l1-model.layers.3.mlp.down_proj.weight": [64886.7578125], "l2-model.layers.3.mlp.down_proj.weight": [10.636902809143066], "linf-model.layers.3.mlp.down_proj.weight": [0.0029814429581165314], "request": {"prompt": "{} works in the field of", "subject": "I. M. Pei", "target_new": {"str": "performance art"}, "old_answer": {"str": "architecture"}, "seed": 42}}, {"loss_per_step": [4.975, 3.838, 2.304, 1.302, 0.099, 0.003], "prob_new": [0.27886244654655457, 0.48444053530693054, 0.4853217899799347, 0.5319857001304626, 0.9103509187698364, 0.996566891670227], "prob_old": [0.7026048898696899, 9.620693163014948e-05, 0.0010803078766912222, 0.001430006232112646, 2.9548638849519193e-05, 3.338936949148774e-07], "prob_new_token": [8.55928665259853e-05, 0.0004786468925885856, 0.010392316617071629, 0.074710913002491, 0.8214835524559021, 0.9931930303573608], "prob_old_token": [0.7026048898696899, 9.620693163014948e-05, 0.0010803078766912222, 0.001430006232112646, 2.9548638849519193e-05, 3.338936949148774e-07], "l1-model.layers.3.mlp.down_proj.weight": [56123.16796875], "l2-model.layers.3.mlp.down_proj.weight": [9.379998207092285], "linf-model.layers.3.mlp.down_proj.weight": [0.00250965915620327], "request": {"prompt": "{} works in the field of", "subject": "I. M. Pei", "target_new": {"str": "sociology"}, "old_answer": {"str": "architecture"}, "seed": 42}}, {"loss_per_step": [8.096, 3.425, 1.789, 1.219, 0.644, 0.303, 0.174, 0.081, 0.049, 0.038, 0.031, 0.025, 0.021, 0.017, 0.013, 0.011, 0.009], "prob_new": [0.03868725150823593, 0.050838652998209, 0.18067008256912231, 0.3905179798603058, 0.5791046023368835, 0.749847412109375, 0.8432238101959229, 0.9225717782974243, 0.9520272016525269, 0.9626535177230835, 0.9696416854858398, 0.9750727415084839, 0.978985071182251, 0.9835813045501709, 0.9868342876434326, 0.9891471862792969, 0.9909837245941162], "prob_old": [0.8011013269424438, 0.12457253038883209, 0.005516677163541317, 0.003025241196155548, 0.001427720533683896, 0.0007638026727363467, 0.00029080131207592785, 0.00026249332586303353, 0.0001703276066109538, 0.00011359743075445294, 8.471090404782444e-05, 6.808886246290058e-05, 5.861262252437882e-05, 4.0914448618423194e-05, 3.0184128263499588e-05, 2.3427619453286752e-05, 1.838430398493074e-05], "prob_new_token": [1.199260623252485e-06, 0.011788791976869106, 0.11215851455926895, 0.13514353334903717, 0.33499032258987427, 0.6188622117042542, 0.7778083086013794, 0.9046069383621216, 0.9472580552101135, 0.9590268731117249, 0.9657718539237976, 0.971015214920044, 0.9745609164237976, 0.9797306656837463, 0.9832501411437988, 0.985865592956543, 0.9880884289741516], "prob_old_token": [0.6364644765853882, 0.0021193157881498337, 0.002085269195958972, 0.0020059423986822367, 0.0020622059237211943, 0.0012288220459595323, 0.00048108192277140915, 0.0004184917197562754, 0.0002668365486897528, 0.00017695011047180742, 0.0001319490111200139, 0.00010538429341977462, 8.867165888659656e-05, 6.1011120124021545e-05, 4.3661726522259414e-05, 3.217885023332201e-05, 2.3717198928352445e-05], "l1-model.layers.3.mlp.down_proj.weight": [101822.09375], "l2-model.layers.3.mlp.down_proj.weight": [16.99883270263672], "linf-model.layers.3.mlp.down_proj.weight": [0.0071985237300395966], "request": {"prompt": "{} works in the field of", "subject": "Ferdinand T\u00f6nnies", "target_new": {"str": "performance art"}, "old_answer": {"str": "sociology"}, "seed": 42}}, {"loss_per_step": [12.501, 3.686, 0.857, 0.146, 0.097, 0.052, 0.047, 0.043, 0.036, 0.029, 0.022, 0.017, 0.013, 0.011, 0.009], "prob_new": [3.7219115256448276e-06, 0.0250694639980793, 0.4246121644973755, 0.8644259572029114, 0.907498300075531, 0.9490202069282532, 0.9541962146759033, 0.9582002758979797, 0.9642926454544067, 0.9713636636734009, 0.9779607057571411, 0.9830886721611023, 0.9866912961006165, 0.9892675280570984, 0.9912449717521667], "prob_old": [0.8011013269424438, 0.22847549617290497, 0.13761623203754425, 0.24881333112716675, 0.22035416960716248, 0.17964370548725128, 0.16595667600631714, 0.15614813566207886, 0.150690495967865, 0.1514449119567871, 0.15519076585769653, 0.15821462869644165, 0.15884245932102203, 0.15767382085323334, 0.15582488477230072], "prob_new_token": [3.7219115256448276e-06, 0.0250694639980793, 0.4246121644973755, 0.8644259572029114, 0.907498300075531, 0.9490202069282532, 0.9541962146759033, 0.9582002758979797, 0.9642926454544067, 0.9713636636734009, 0.9779607057571411, 0.9830886721611023, 0.9866912961006165, 0.9892675280570984, 0.9912449717521667], "prob_old_token": [0.6364644765853882, 0.0010698797414079309, 0.00014944183931220323, 9.508481889497489e-05, 2.198762376792729e-05, 1.2946519746037666e-05, 1.0555522749200463e-05, 8.217350114136934e-06, 6.062428383302176e-06, 4.2367719288449734e-06, 2.839073658833513e-06, 1.8493542484065983e-06, 1.1847542964460445e-06, 7.637649446223804e-07, 5.107918354951835e-07], "l1-model.layers.3.mlp.down_proj.weight": [91723.359375], "l2-model.layers.3.mlp.down_proj.weight": [15.396343231201172], "linf-model.layers.3.mlp.down_proj.weight": [0.006863574963063002], "request": {"prompt": "{} works in the field of", "subject": "Ferdinand T\u00f6nnies", "target_new": {"str": "architecture"}, "old_answer": {"str": "sociology"}, "seed": 42}}, {"loss_per_step": [11.007, 5.033, 3.336, 1.799, 0.827, 0.309, 0.107, 0.045, 0.028, 0.021, 0.016, 0.013, 0.011, 0.009], "prob_new": [3.2383202778873965e-05, 0.46124523878097534, 0.4685005247592926, 0.5021124482154846, 0.5921948552131653, 0.768675684928894, 0.9033334851264954, 0.956578254699707, 0.9724729061126709, 0.9796142578125, 0.9839723110198975, 0.9869475364685059, 0.989056408405304, 0.9905780553817749], "prob_old": [0.6585456132888794, 0.17192035913467407, 0.19530050456523895, 0.10602468252182007, 0.05997889116406441, 0.03580840304493904, 0.022808892652392387, 0.0166610199958086, 0.013819881714880466, 0.012243506498634815, 0.011152341030538082, 0.010303808376193047, 0.00961780920624733, 0.009058886207640171], "prob_new_token": [4.568416898109717e-06, 4.604675996233709e-05, 0.0013518474297598004, 0.028021536767482758, 0.19301138818264008, 0.5416079163551331, 0.8095012307167053, 0.9153379201889038, 0.9467918872833252, 0.9608940482139587, 0.969508945941925, 0.975395917892456, 0.9795687198638916, 0.9825764298439026], "prob_old_token": [0.9329678416252136, 0.00019156713096890599, 0.0007850612746551633, 0.0006997895543463528, 9.54349379753694e-05, 6.045546797395218e-06, 1.351458649878623e-06, 3.399098034151393e-07, 1.1786242737343855e-07, 5.902224486931118e-08, 3.6744406628486104e-08, 2.5702467709720622e-08, 1.9333898038098596e-08, 1.5364339134293914e-08], "l1-model.layers.3.mlp.down_proj.weight": [92206.5], "l2-model.layers.3.mlp.down_proj.weight": [15.492562294006348], "linf-model.layers.3.mlp.down_proj.weight": [0.005942349322140217], "request": {"prompt": "{} works in the field of", "subject": "Marina Abramovi\u0107", "target_new": {"str": "sociology"}, "old_answer": {"str": "performance art"}, "seed": 42}}, {"loss_per_step": [13.656, 7.37, 1.471, 0.572, 0.347, 0.24, 0.169, 0.128, 0.104, 0.085, 0.069, 0.057, 0.047, 0.039, 0.033, 0.028, 0.024, 0.021, 0.018, 0.016], "prob_new": [1.1725484228009009e-06, 0.0006296857027336955, 0.22969138622283936, 0.564529299736023, 0.7067378759384155, 0.786736249923706, 0.844437301158905, 0.8796185255050659, 0.9015308022499084, 0.9185976386070251, 0.9328933954238892, 0.9446243643760681, 0.9540016055107117, 0.9614399075508118, 0.9674032330513, 0.9722511768341064, 0.9762299060821533, 0.9795124530792236, 0.982231616973877, 0.9844914078712463], "prob_old": [0.6585456132888794, 0.1375202238559723, 0.13549217581748962, 0.14565123617649078, 0.15110580623149872, 0.1347285509109497, 0.09722640365362167, 0.06454116851091385, 0.044064756482839584, 0.032447513192892075, 0.025739261880517006, 0.021655656397342682, 0.019036294892430305, 0.017285335808992386, 0.016080070286989212, 0.015230447053909302, 0.014617751352488995, 0.014164917171001434, 0.013822196051478386, 0.013556079939007759], "prob_new_token": [1.1725484228009009e-06, 0.0006296857027336955, 0.22969138622283936, 0.564529299736023, 0.7067378759384155, 0.786736249923706, 0.844437301158905, 0.8796185255050659, 0.9015308022499084, 0.9185976386070251, 0.9328933954238892, 0.9446243643760681, 0.9540016055107117, 0.9614399075508118, 0.9674032330513, 0.9722511768341064, 0.9762299060821533, 0.9795124530792236, 0.982231616973877, 0.9844914078712463], "prob_old_token": [0.9329678416252136, 3.43867031915579e-05, 0.0012247763806954026, 0.00024094837135635316, 9.546706860419363e-05, 4.7489731514360756e-05, 1.8580421965452842e-05, 8.003588845895138e-06, 4.3561262828006875e-06, 2.7647374736261554e-06, 1.9195631466573104e-06, 1.4180720881995512e-06, 1.098708139579685e-06, 8.827930173538334e-07, 7.276138944689592e-07, 6.097923801462457e-07, 5.165502443560399e-07, 4.4078257133151055e-07, 3.7820359466422815e-07, 3.2602065402898006e-07], "l1-model.layers.3.mlp.down_proj.weight": [102731.0078125], "l2-model.layers.3.mlp.down_proj.weight": [17.393970489501953], "linf-model.layers.3.mlp.down_proj.weight": [0.009144557639956474], "request": {"prompt": "{} works in the field of", "subject": "Marina Abramovi\u0107", "target_new": {"str": "architecture"}, "old_answer": {"str": "performance art"}, "seed": 42}}, {"loss_per_step": [5.665, 2.307, 0.821, 0.272, 0.077, 0.035, 0.034, 0.02, 0.014, 0.012, 0.01, 0.009], "prob_new": [0.003466708119958639, 0.0995979979634285, 0.4400334060192108, 0.7620400190353394, 0.9255012273788452, 0.9655172824859619, 0.9661332964897156, 0.980638325214386, 0.9864771366119385, 0.9884055852890015, 0.9897698163986206, 0.9912887811660767], "prob_old": [0.7119747400283813, 0.020483097061514854, 0.003298127790912986, 0.0017039099475368857, 0.0009463150636292994, 0.0009078445145860314, 0.0009754437487572432, 0.0005336436443030834, 0.00031325026066042483, 0.00021281310182530433, 0.00016054572188295424, 0.0001269479835173115], "prob_new_token": [0.003466708119958639, 0.0995979979634285, 0.4400334060192108, 0.7620400190353394, 0.9255012273788452, 0.9655172824859619, 0.9661332964897156, 0.980638325214386, 0.9864771366119385, 0.9884055852890015, 0.9897698163986206, 0.9912887811660767], "prob_old_token": [0.7119747400283813, 0.020483097061514854, 0.003298127790912986, 0.0017039099475368857, 0.0009463150636292994, 0.0009078445145860314, 0.0009754437487572432, 0.0005336436443030834, 0.00031325026066042483, 0.00021281310182530433, 0.00016054572188295424, 0.0001269479835173115], "l1-model.layers.3.mlp.down_proj.weight": [89865.40625], "l2-model.layers.3.mlp.down_proj.weight": [14.60017204284668], "linf-model.layers.3.mlp.down_proj.weight": [0.0052184052765369415], "request": {"prompt": "{} is a virtuoso on the", "subject": "Robert Schumann", "target_new": {"str": "guitar"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [5.375, 3.723, 2.229, 1.092, 0.27, 0.214, 0.072, 0.049, 0.038, 0.032, 0.028, 0.023, 0.018, 0.014, 0.011, 0.008], "prob_new": [0.05410191789269447, 0.2299579679965973, 0.48060107231140137, 0.5334600210189819, 0.790604293346405, 0.8084895610809326, 0.9327538013458252, 0.9532954692840576, 0.9634650945663452, 0.9689028263092041, 0.9730520248413086, 0.9772865176200867, 0.9818885922431946, 0.9860448837280273, 0.9893509745597839, 0.9918692111968994], "prob_old": [0.7119747400283813, 0.01465651486068964, 0.017763040959835052, 0.0046303668059408665, 8.596999396104366e-05, 1.4282846677815542e-05, 4.910867119178874e-06, 3.913541149813682e-06, 4.081627594132442e-06, 4.435012215253664e-06, 4.696912583312951e-06, 4.671868282457581e-06, 4.3050022213719785e-06, 3.7909353522991296e-06, 3.330790605104994e-06, 2.9616135179821867e-06], "prob_new_token": [0.00019850555690936744, 0.001272536930628121, 0.01219814084470272, 0.11884813010692596, 0.584968626499176, 0.7637045383453369, 0.867834746837616, 0.9100318551063538, 0.931744396686554, 0.9441391229629517, 0.9532309770584106, 0.9614473581314087, 0.9694435596466064, 0.976152241230011, 0.9814385175704956, 0.9855167865753174], "prob_old_token": [0.7119747400283813, 0.01465651486068964, 0.017763040959835052, 0.0046303668059408665, 8.596999396104366e-05, 1.4282846677815542e-05, 4.910867119178874e-06, 3.913541149813682e-06, 4.081627594132442e-06, 4.435012215253664e-06, 4.696912583312951e-06, 4.671868282457581e-06, 4.3050022213719785e-06, 3.7909353522991296e-06, 3.330790605104994e-06, 2.9616135179821867e-06], "l1-model.layers.3.mlp.down_proj.weight": [94498.265625], "l2-model.layers.3.mlp.down_proj.weight": [16.29509162902832], "linf-model.layers.3.mlp.down_proj.weight": [0.00685548223555088], "request": {"prompt": "{} is a virtuoso on the", "subject": "Robert Schumann", "target_new": {"str": "sitar"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [2.06, 1.047, 0.771, 0.452, 0.187, 0.066, 0.015, 0.007], "prob_new": [0.7276716232299805, 0.7414706349372864, 0.7513976097106934, 0.7866693139076233, 0.8669978380203247, 0.9415071606636047, 0.9849802255630493, 0.9934589266777039], "prob_old": [0.7119747400283813, 0.024259479716420174, 0.027537718415260315, 0.032819345593452454, 0.01578214392066002, 0.004665922373533249, 0.0008028545998968184, 0.00023882149253040552], "prob_new_token": [0.0002891868643928319, 0.015964325517416, 0.04769528657197952, 0.16745465993881226, 0.4783996641635895, 0.7723405361175537, 0.9432429075241089, 0.9757863283157349], "prob_old_token": [0.7119747400283813, 0.024259479716420174, 0.027537718415260315, 0.032819345593452454, 0.01578214392066002, 0.004665922373533249, 0.0008028545998968184, 0.00023882149253040552], "l1-model.layers.3.mlp.down_proj.weight": [71652.671875], "l2-model.layers.3.mlp.down_proj.weight": [11.726445198059082], "linf-model.layers.3.mlp.down_proj.weight": [0.003504593390971422], "request": {"prompt": "{} is a virtuoso on the", "subject": "Robert Schumann", "target_new": {"str": "saxophone"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [1.559, 1.804, 0.73, 0.267, 0.046, 0.01, 0.005], "prob_new": [0.6652560830116272, 0.6513926386833191, 0.7030353546142578, 0.8160910606384277, 0.9569527506828308, 0.9897550940513611, 0.9953463077545166], "prob_old": [0.6396934390068054, 0.0002662003680597991, 0.005299333017319441, 0.00035999377723783255, 4.498090493143536e-05, 7.614105015818495e-06, 2.2348224320012378e-06], "prob_new_token": [0.009442240931093693, 0.0046996306627988815, 0.11222527921199799, 0.4495272636413574, 0.8715308308601379, 0.9697126746177673, 0.9863761067390442], "prob_old_token": [0.6396934390068054, 0.0002662003680597991, 0.005299333017319441, 0.00035999377723783255, 4.498090493143536e-05, 7.614105015818495e-06, 2.2348224320012378e-06], "l1-model.layers.3.mlp.down_proj.weight": [59467.640625], "l2-model.layers.3.mlp.down_proj.weight": [10.229493141174316], "linf-model.layers.3.mlp.down_proj.weight": [0.002975940704345703], "request": {"prompt": "{} is a virtuoso on the", "subject": "Jeff Goldblum", "target_new": {"str": "trumpet"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [4.309, 2.363, 1.656, 0.426, 0.118, 0.06, 0.037, 0.036, 0.014, 0.009], "prob_new": [0.08113676309585571, 0.19983957707881927, 0.5139585137367249, 0.7075157761573792, 0.8925071954727173, 0.9429495334625244, 0.9639028310775757, 0.9650832414627075, 0.9857432842254639, 0.9913557171821594], "prob_old": [0.6396934390068054, 0.004757606890052557, 0.04004940763115883, 0.0030075926333665848, 0.00011189388897037134, 3.062231189687736e-05, 2.0345371012808755e-05, 1.604554745426867e-05, 1.1853041542053688e-05, 9.21836635825457e-06], "prob_new_token": [0.0011218603467568755, 0.0235601756721735, 0.036745425313711166, 0.43469950556755066, 0.8137990832328796, 0.9018862843513489, 0.9437423944473267, 0.9647938013076782, 0.9770650863647461, 0.9840475916862488], "prob_old_token": [0.6396934390068054, 0.004757606890052557, 0.04004940763115883, 0.0030075926333665848, 0.00011189388897037134, 3.062231189687736e-05, 2.0345371012808755e-05, 1.604554745426867e-05, 1.1853041542053688e-05, 9.21836635825457e-06], "l1-model.layers.3.mlp.down_proj.weight": [77711.984375], "l2-model.layers.3.mlp.down_proj.weight": [13.017454147338867], "linf-model.layers.3.mlp.down_proj.weight": [0.004454989451915026], "request": {"prompt": "{} is a virtuoso on the", "subject": "Jeff Goldblum", "target_new": {"str": "sitar"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [3.756, 1.18, 0.262, 0.025, 0.01, 0.006], "prob_new": [0.3286236524581909, 0.5164492130279541, 0.794154167175293, 0.9752213954925537, 0.9896447062492371, 0.9941421747207642], "prob_old": [0.6396934390068054, 0.001579359988681972, 0.0012718254001811147, 7.204219582490623e-05, 2.495454828022048e-05, 1.664456976868678e-05], "prob_new_token": [0.0008327914401888847, 0.10144417732954025, 0.5979342460632324, 0.9630739688873291, 0.9847418069839478, 0.9907610416412354], "prob_old_token": [0.6396934390068054, 0.001579359988681972, 0.0012718254001811147, 7.204219582490623e-05, 2.495454828022048e-05, 1.664456976868678e-05], "l1-model.layers.3.mlp.down_proj.weight": [62372.1640625], "l2-model.layers.3.mlp.down_proj.weight": [9.817078590393066], "linf-model.layers.3.mlp.down_proj.weight": [0.0024964511394500732], "request": {"prompt": "{} is a virtuoso on the", "subject": "Jeff Goldblum", "target_new": {"str": "flute"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [5.055, 3.106, 0.691, 0.193, 0.13, 0.07, 0.042, 0.029, 0.022, 0.017, 0.014, 0.012, 0.011, 0.01], "prob_new": [0.006377784069627523, 0.044760048389434814, 0.5012145638465881, 0.8240868449211121, 0.8778476715087891, 0.9323655962944031, 0.9587970972061157, 0.971598207950592, 0.9786859750747681, 0.9829710125923157, 0.9857498407363892, 0.9876779913902283, 0.9891035556793213, 0.9902157783508301], "prob_old": [0.6505565047264099, 0.03268188238143921, 0.05233469977974892, 0.010395726189017296, 0.004672837909311056, 0.0021062048617750406, 0.0010825773933902383, 0.000591865333262831, 0.0003382420400157571, 0.0002053244534181431, 0.00013352806854527444, 9.282585961045697e-05, 6.844419112894684e-05, 5.302649151417427e-05], "prob_new_token": [0.006377784069627523, 0.044760048389434814, 0.5012145638465881, 0.8240868449211121, 0.8778476715087891, 0.9323655962944031, 0.9587970972061157, 0.971598207950592, 0.9786859750747681, 0.9829710125923157, 0.9857498407363892, 0.9876779913902283, 0.9891035556793213, 0.9902157783508301], "prob_old_token": [0.6505565047264099, 0.03268188238143921, 0.05233469977974892, 0.010395726189017296, 0.004672837909311056, 0.0021062048617750406, 0.0010825773933902383, 0.000591865333262831, 0.0003382420400157571, 0.0002053244534181431, 0.00013352806854527444, 9.282585961045697e-05, 6.844419112894684e-05, 5.302649151417427e-05], "l1-model.layers.3.mlp.down_proj.weight": [89155.3125], "l2-model.layers.3.mlp.down_proj.weight": [15.074382781982422], "linf-model.layers.3.mlp.down_proj.weight": [0.0060631404630839825], "request": {"prompt": "{} is a virtuoso on the", "subject": "Anton Rubinstein", "target_new": {"str": "organ"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [6.057, 2.172, 1.1, 0.337, 2.316, 0.073, 0.144, 0.123, 0.071, 0.037, 0.02, 0.012, 0.008], "prob_new": [0.03031427040696144, 0.21737487614154816, 0.5011968612670898, 0.7514186501502991, 0.4974078834056854, 0.9316024780273438, 0.8741791248321533, 0.8904827833175659, 0.9335683584213257, 0.964530885219574, 0.9802468419075012, 0.9880715608596802, 0.9921904802322388], "prob_old": [0.6505565047264099, 0.022839441895484924, 0.02225974202156067, 0.007816542871296406, 0.000155259229359217, 0.0017292221309617162, 0.004414043389260769, 0.0036699215415865183, 0.0020803839433938265, 0.0010733493836596608, 0.0005904252175241709, 0.0003634305321611464, 0.0002511334896553308], "prob_new_token": [9.050060907611623e-05, 0.03226468339562416, 0.12648147344589233, 0.5164580345153809, 0.009881149046123028, 0.865913987159729, 0.7519280314445496, 0.7847432494163513, 0.8708543181419373, 0.9323978424072266, 0.9632107615470886, 0.9782494902610779, 0.9860178828239441], "prob_old_token": [0.6505565047264099, 0.022839441895484924, 0.02225974202156067, 0.007816542871296406, 0.000155259229359217, 0.0017292221309617162, 0.004414043389260769, 0.0036699215415865183, 0.0020803839433938265, 0.0010733493836596608, 0.0005904252175241709, 0.0003634305321611464, 0.0002511334896553308], "l1-model.layers.3.mlp.down_proj.weight": [82437.53125], "l2-model.layers.3.mlp.down_proj.weight": [14.282381057739258], "linf-model.layers.3.mlp.down_proj.weight": [0.005724702030420303], "request": {"prompt": "{} is a virtuoso on the", "subject": "Anton Rubinstein", "target_new": {"str": "bass guitar"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [4.91, 3.3, 2.179, 1.089, 0.553, 0.178, 0.051, 0.021, 0.011, 0.007], "prob_new": [0.4738840162754059, 0.4925162196159363, 0.4972112774848938, 0.5527947545051575, 0.6630124449729919, 0.849984347820282, 0.9512397050857544, 0.979819655418396, 0.9891097545623779, 0.992913007736206], "prob_old": [0.6505565047264099, 0.01812727004289627, 0.011259414255619049, 0.005868109874427319, 0.002832781756296754, 0.0005012661567889154, 8.620389417046681e-05, 2.091401256620884e-05, 6.67012864141725e-06, 2.6443196929903934e-06], "prob_new_token": [5.736320235882886e-05, 0.0013837629230692983, 0.01303375419229269, 0.11432728916406631, 0.3328970968723297, 0.703988254070282, 0.9059445261955261, 0.9628973007202148, 0.9812082052230835, 0.9884275794029236], "prob_old_token": [0.6505565047264099, 0.01812727004289627, 0.011259414255619049, 0.005868109874427319, 0.002832781756296754, 0.0005012661567889154, 8.620389417046681e-05, 2.091401256620884e-05, 6.67012864141725e-06, 2.6443196929903934e-06], "l1-model.layers.3.mlp.down_proj.weight": [73688.5390625], "l2-model.layers.3.mlp.down_proj.weight": [12.699087142944336], "linf-model.layers.3.mlp.down_proj.weight": [0.004354650154709816], "request": {"prompt": "{} is a virtuoso on the", "subject": "Anton Rubinstein", "target_new": {"str": "banjo"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [3.355, 2.428, 1.126, 0.791, 0.568, 0.417, 0.283, 0.176, 0.117, 0.071, 0.044, 0.029, 0.02, 0.015, 0.011, 0.009], "prob_new": [0.1562681794166565, 0.17244882881641388, 0.4258579611778259, 0.5814169645309448, 0.6941743493080139, 0.7524144649505615, 0.7999585866928101, 0.8519036173820496, 0.8944951295852661, 0.9330798387527466, 0.9579704999923706, 0.9719544053077698, 0.980095386505127, 0.9852805733680725, 0.9888004660606384, 0.9912588000297546], "prob_old": [0.8571499586105347, 0.4954770505428314, 0.49798473715782166, 0.49746331572532654, 0.49640822410583496, 0.49542874097824097, 0.4950736463069916, 0.49465325474739075, 0.4945753216743469, 0.4950356185436249, 0.49552151560783386, 0.4958384931087494, 0.4960384666919708, 0.49615707993507385, 0.49621039628982544, 0.49621284008026123], "prob_new_token": [0.0010338777210563421, 0.011991900391876698, 0.28485795855522156, 0.6429792046546936, 0.8714525103569031, 0.9395717978477478, 0.9518856406211853, 0.9540066719055176, 0.9741516709327698, 0.9881075024604797, 0.9936881065368652, 0.9960029125213623, 0.9972519278526306, 0.9980486035346985, 0.9985824823379517, 0.9989398121833801], "prob_old_token": [0.7155489325523376, 0.00010970958101097494, 0.000497942790389061, 0.00018172601994592696, 4.5477670937543735e-05, 1.6622456314507872e-05, 1.2992247320653405e-05, 1.6102834706543945e-05, 1.0241433301416691e-05, 4.995309154764982e-06, 2.7659609713737154e-06, 1.8543274791227304e-06, 1.3713057569475495e-06, 1.046271108862129e-06, 8.048560857787379e-07, 6.25616223715042e-07], "l1-model.layers.3.mlp.down_proj.weight": [96911.609375], "l2-model.layers.3.mlp.down_proj.weight": [16.669342041015625], "linf-model.layers.3.mlp.down_proj.weight": [0.007217982783913612], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Tarja Halonen", "target_new": {"str": "Russian. The language"}, "old_answer": {"str": "Finnish"}, "seed": 42}}, {"loss_per_step": [3.718, 2.341, 0.676, 0.293, 0.147, 0.081, 0.045, 0.026, 0.016, 0.011, 0.008], "prob_new": [0.22710631787776947, 0.325881689786911, 0.5238738059997559, 0.7463379502296448, 0.8646694421768188, 0.922762930393219, 0.9559162855148315, 0.9743267297744751, 0.9837589263916016, 0.9888425469398499, 0.99183189868927], "prob_old": [0.8571499586105347, 0.49827834963798523, 0.4996568560600281, 0.49948838353157043, 0.49945199489593506, 0.49948808550834656, 0.49953335523605347, 0.4995785355567932, 0.49961918592453003, 0.4996505677700043, 0.4996735751628876], "prob_new_token": [0.0013007732341066003, 0.014531449414789677, 0.39817604422569275, 0.7591400146484375, 0.9159270524978638, 0.96622633934021, 0.9828302264213562, 0.9899505376815796, 0.9934374690055847, 0.9952396154403687, 0.9962521195411682], "prob_old_token": [0.7155489325523376, 0.0032442326191812754, 0.0018143915804103017, 0.0007408324745483696, 0.0001751354429870844, 4.574876220431179e-05, 1.601836265763268e-05, 7.301382083824137e-06, 4.1491157389828e-06, 2.829300228768261e-06, 2.1881012344238115e-06], "l1-model.layers.3.mlp.down_proj.weight": [79242.90625], "l2-model.layers.3.mlp.down_proj.weight": [13.591337203979492], "linf-model.layers.3.mlp.down_proj.weight": [0.004946875385940075], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Tarja Halonen", "target_new": {"str": "German."}, "old_answer": {"str": "Finnish"}, "seed": 42}}, {"loss_per_step": [8.368, 6.139, 1.268, 0.234, 0.139, 0.091, 0.057, 0.038, 0.027, 0.021, 0.017, 0.014, 0.012, 0.011, 0.009], "prob_new": [0.0002321712381672114, 0.0021581477485597134, 0.28149327635765076, 0.7914257645606995, 0.8698051571846008, 0.9133182168006897, 0.9447562098503113, 0.9629253149032593, 0.9733186364173889, 0.979374885559082, 0.9831980466842651, 0.9858729839324951, 0.9878942370414734, 0.9894888401031494, 0.9907830357551575], "prob_old": [0.8571499586105347, 0.49610191583633423, 0.49539464712142944, 0.49737200140953064, 0.4980604946613312, 0.4979899525642395, 0.497881680727005, 0.49780330061912537, 0.49773451685905457, 0.49767372012138367, 0.4976314902305603, 0.4976128935813904, 0.49761712551116943, 0.49763861298561096, 0.4976716935634613], "prob_new_token": [0.0002321712381672114, 0.0021581477485597134, 0.28149327635765076, 0.7914257645606995, 0.8698051571846008, 0.9133182168006897, 0.9447562098503113, 0.9629253149032593, 0.9733186364173889, 0.979374885559082, 0.9831980466842651, 0.9858729839324951, 0.9878942370414734, 0.9894888401031494, 0.9907830357551575], "prob_old_token": [0.7155489325523376, 0.0006371367489919066, 0.0006648047710768878, 0.00021914530952926725, 0.000151697953697294, 7.377236761385575e-05, 3.256192576372996e-05, 1.5621653801645152e-05, 8.324828741024248e-06, 4.9624513849266805e-06, 3.2943351016001543e-06, 2.390447889411007e-06, 1.8557285557108116e-06, 1.514292534920969e-06, 1.2818449022233835e-06], "l1-model.layers.3.mlp.down_proj.weight": [91150.40625], "l2-model.layers.3.mlp.down_proj.weight": [15.504376411437988], "linf-model.layers.3.mlp.down_proj.weight": [0.006684804800897837], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Tarja Halonen", "target_new": {"str": "Japanese"}, "old_answer": {"str": "Finnish"}, "seed": 42}}, {"loss_per_step": [5.643, 3.22, 2.458, 4.616, 1.274, 0.666, 0.249, 0.012, 0.011, 0.01, 0.009], "prob_new": [0.1221219152212143, 0.3961591422557831, 0.4195634126663208, 0.24912545084953308, 0.6276273727416992, 0.6559377908706665, 0.8208526372909546, 0.9878653287887573, 0.9896072149276733, 0.9897017478942871, 0.9911141395568848], "prob_old": [0.6000204682350159, 0.09829027205705643, 0.03923178091645241, 0.001462200190871954, 0.1513153612613678, 0.036497678607702255, 0.022151682525873184, 0.00018133458797819912, 6.919659790582955e-05, 5.763359149568714e-05, 5.272016642265953e-05], "prob_new_token": [3.6943125451216474e-05, 0.0002552866062615067, 0.0016048686811700463, 6.78189389873296e-05, 0.025435682386159897, 0.16911038756370544, 0.48363709449768066, 0.9688796997070312, 0.9716594815254211, 0.9714808464050293, 0.9755842089653015], "prob_old_token": [0.6000204682350159, 0.09829027205705643, 0.03923178091645241, 0.001462200190871954, 0.1513153612613678, 0.036497678607702255, 0.022151682525873184, 0.00018133458797819912, 6.919659790582955e-05, 5.763359149568714e-05, 5.272016642265953e-05], "l1-model.layers.3.mlp.down_proj.weight": [72296.125], "l2-model.layers.3.mlp.down_proj.weight": [12.765100479125977], "linf-model.layers.3.mlp.down_proj.weight": [0.0047472696751356125], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Vladimir Vysotsky", "target_new": {"str": "Khmer."}, "old_answer": {"str": "Russian"}, "seed": 42}}, {"loss_per_step": [4.962, 1.611, 0.287, 0.058, 0.031, 0.017, 0.013, 0.011, 0.009], "prob_new": [0.3133165240287781, 0.5062432885169983, 0.7698869705200195, 0.9442715644836426, 0.9700533151626587, 0.9830510020256042, 0.9868507385253906, 0.9892745018005371, 0.9912317395210266], "prob_old": [0.6000204682350159, 0.03318994119763374, 0.011438926681876183, 0.0014916290529072285, 0.0005848862347193062, 0.0003495517303235829, 0.00030183387571014464, 0.0002701091580092907, 0.00024379951355513185], "prob_new_token": [7.811676186975092e-05, 0.041081659495830536, 0.5983514189720154, 0.9026302695274353, 0.9475340843200684, 0.9720655083656311, 0.9792417287826538, 0.9833932518959045, 0.9866805672645569], "prob_old_token": [0.6000204682350159, 0.03318994119763374, 0.011438926681876183, 0.0014916290529072285, 0.0005848862347193062, 0.0003495517303235829, 0.00030183387571014464, 0.0002701091580092907, 0.00024379951355513185], "l1-model.layers.3.mlp.down_proj.weight": [73331.71875], "l2-model.layers.3.mlp.down_proj.weight": [12.32751750946045], "linf-model.layers.3.mlp.down_proj.weight": [0.003927720710635185], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Vladimir Vysotsky", "target_new": {"str": "Serbian"}, "old_answer": {"str": "Russian"}, "seed": 42}}, {"loss_per_step": [4.135, 1.126, 0.655, 0.287, 0.028, 0.021, 0.011, 0.006], "prob_new": [0.49752283096313477, 0.551987886428833, 0.6340199112892151, 0.7816734313964844, 0.973031759262085, 0.9796876907348633, 0.9891984462738037, 0.9939544796943665], "prob_old": [0.6000204682350159, 0.23802977800369263, 0.06069204583764076, 0.04921339079737663, 0.0006151236011646688, 0.0001345158088952303, 5.6410834076814353e-05, 2.4096021661534905e-05], "prob_new_token": [0.000257354840869084, 0.10531426966190338, 0.2701982855796814, 0.5642863512039185, 0.946532666683197, 0.9597546458244324, 0.978745698928833, 0.9881696105003357], "prob_old_token": [0.6000204682350159, 0.23802977800369263, 0.06069204583764076, 0.04921339079737663, 0.0006151236011646688, 0.0001345158088952303, 5.6410834076814353e-05, 2.4096021661534905e-05], "l1-model.layers.3.mlp.down_proj.weight": [66100.3828125], "l2-model.layers.3.mlp.down_proj.weight": [11.205522537231445], "linf-model.layers.3.mlp.down_proj.weight": [0.003497740253806114], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Vladimir Vysotsky", "target_new": {"str": "Hebrew"}, "old_answer": {"str": "Russian"}, "seed": 42}}, {"loss_per_step": [6.008, 4.446, 2.319, 0.901, 0.077, 0.029, 0.017, 0.011, 0.009], "prob_new": [0.49568334221839905, 0.49252215027809143, 0.5030577182769775, 0.581122636795044, 0.9282392263412476, 0.9719675183296204, 0.9835110902786255, 0.9887281060218811, 0.991536021232605], "prob_old": [0.7656696438789368, 0.022471049800515175, 0.028864050284028053, 0.0025142200756818056, 0.0007599053205922246, 0.0002337644691579044, 0.00013750417565461248, 0.00010161322279600427, 7.256957178469747e-05], "prob_new_token": [6.099346137489192e-06, 0.0001395549188600853, 0.009713911451399326, 0.1656811386346817, 0.8572739362716675, 0.9447483420372009, 0.967799186706543, 0.9781981706619263, 0.9837530255317688], "prob_old_token": [0.7656696438789368, 0.022471049800515175, 0.028864050284028053, 0.0025142200756818056, 0.0007599053205922246, 0.0002337644691579044, 0.00013750417565461248, 0.00010161322279600427, 7.256957178469747e-05], "l1-model.layers.3.mlp.down_proj.weight": [67166.6875], "l2-model.layers.3.mlp.down_proj.weight": [11.732223510742188], "linf-model.layers.3.mlp.down_proj.weight": [0.0038671307265758514], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Rafael Correa Delgado", "target_new": {"str": "Hungarian"}, "old_answer": {"str": "Spanish"}, "seed": 42}}, {"loss_per_step": [4.274, 2.828, 1.112, 0.578, 0.1, 0.034, 0.02, 0.013, 0.009], "prob_new": [0.33153167366981506, 0.40685245394706726, 0.5508244633674622, 0.656685471534729, 0.908957839012146, 0.9670348763465881, 0.9807352423667908, 0.9873498678207397, 0.9914531707763672], "prob_old": [0.7656696438789368, 0.3022415041923523, 0.01748872548341751, 0.0038957162760198116, 0.000629951769951731, 0.00019708667241502553, 0.00010736261901911348, 6.56322154100053e-05, 3.929377635358833e-05], "prob_new_token": [0.0002925312437582761, 0.004319122061133385, 0.10907230526208878, 0.3156416714191437, 0.8192712068557739, 0.9358587265014648, 0.9637345671653748, 0.9770982265472412, 0.9852911233901978], "prob_old_token": [0.7656696438789368, 0.3022415041923523, 0.01748872548341751, 0.0038957162760198116, 0.000629951769951731, 0.00019708667241502553, 0.00010736261901911348, 6.56322154100053e-05, 3.929377635358833e-05], "l1-model.layers.3.mlp.down_proj.weight": [71192.953125], "l2-model.layers.3.mlp.down_proj.weight": [12.172497749328613], "linf-model.layers.3.mlp.down_proj.weight": [0.003910258412361145], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Rafael Correa Delgado", "target_new": {"str": "Hindi"}, "old_answer": {"str": "Spanish"}, "seed": 42}}, {"loss_per_step": [6.71, 7.401, 2.138, 0.571, 0.135, 0.068, 0.042, 0.03, 0.023, 0.019, 0.016, 0.014, 0.013, 0.011, 0.01, 0.01], "prob_new": [0.0012190506095066667, 0.0006103544728830457, 0.11783620715141296, 0.5650146007537842, 0.8733875155448914, 0.9340632557868958, 0.9587532877922058, 0.9706820845603943, 0.9772111177444458, 0.9812345504760742, 0.9839576482772827, 0.9859318137168884, 0.9874404668807983, 0.9886476397514343, 0.9896528720855713, 0.9905171394348145], "prob_old": [0.7656696438789368, 0.008290690369904041, 0.02603876031935215, 0.013235799968242645, 0.0007307735504582524, 0.00027474452508613467, 0.00016336969565600157, 0.00011552353680599481, 9.124600182985887e-05, 7.74949585320428e-05, 6.883249443490058e-05, 6.280696106841788e-05, 5.821488593937829e-05, 5.439145752461627e-05, 5.095837332191877e-05, 4.7712943342048675e-05], "prob_new_token": [0.0012190506095066667, 0.0006103544728830457, 0.11783620715141296, 0.5650146007537842, 0.8733875155448914, 0.9340632557868958, 0.9587532877922058, 0.9706820845603943, 0.9772111177444458, 0.9812345504760742, 0.9839576482772827, 0.9859318137168884, 0.9874404668807983, 0.9886476397514343, 0.9896528720855713, 0.9905171394348145], "prob_old_token": [0.7656696438789368, 0.008290690369904041, 0.02603876031935215, 0.013235799968242645, 0.0007307735504582524, 0.00027474452508613467, 0.00016336969565600157, 0.00011552353680599481, 9.124600182985887e-05, 7.74949585320428e-05, 6.883249443490058e-05, 6.280696106841788e-05, 5.821488593937829e-05, 5.439145752461627e-05, 5.095837332191877e-05, 4.7712943342048675e-05], "l1-model.layers.3.mlp.down_proj.weight": [92480.515625], "l2-model.layers.3.mlp.down_proj.weight": [15.914239883422852], "linf-model.layers.3.mlp.down_proj.weight": [0.006346576847136021], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Rafael Correa Delgado", "target_new": {"str": "German"}, "old_answer": {"str": "Spanish"}, "seed": 42}}, {"loss_per_step": [1.088, 0.468, 0.169, 0.075, 0.035, 0.019, 0.013, 0.01], "prob_new": [0.44663283228874207, 0.6947991251945496, 0.8561769723892212, 0.9299355745315552, 0.9658390879631042, 0.9812572598457336, 0.9869470000267029, 0.9903445243835449], "prob_old": [0.815902590751648, 0.39734122157096863, 0.545926570892334, 0.6075764894485474, 0.6278045177459717, 0.6384198069572449, 0.6457923650741577, 0.6512094736099243], "prob_new_token": [0.6513559222221375, 0.24344906210899353, 0.6703258156776428, 0.8483359217643738, 0.9030152559280396, 0.9300382137298584, 0.9488401412963867, 0.9627190828323364], "prob_old_token": [0.651356041431427, 0.24344761669635773, 0.6703254580497742, 0.8483351469039917, 0.9030154347419739, 0.930038332939148, 0.9488399028778076, 0.9627191424369812], "l1-model.layers.3.mlp.down_proj.weight": [71407.59375], "l2-model.layers.3.mlp.down_proj.weight": [11.60071849822998], "linf-model.layers.3.mlp.down_proj.weight": [0.0035149529576301575], "request": {"prompt": "{} was bestowed with the", "subject": "Dag Hammarskj\u00f6ld", "target_new": {"str": "Nobel Prize in Literature"}, "old_answer": {"str": "Nobel Peace Prize"}, "seed": 42}}, {"loss_per_step": [1.387, 0.294, 0.095, 0.037, 0.017, 0.01], "prob_new": [0.4124806523323059, 0.7614791989326477, 0.9115997552871704, 0.9638595581054688, 0.983758270740509, 0.9904025197029114], "prob_old": [0.8459617495536804, 0.5361813902854919, 0.6036840677261353, 0.6307517290115356, 0.6469550132751465, 0.6539579629898071], "prob_new_token": [0.7021856307983398, 0.6026638150215149, 0.8146222233772278, 0.9000222086906433, 0.9462844133377075, 0.9659425616264343], "prob_old_token": [0.7021856307983398, 0.6026638150215149, 0.8146222233772278, 0.9000222086906433, 0.9462844133377075, 0.9659425616264343], "l1-model.layers.3.mlp.down_proj.weight": [60659.5390625], "l2-model.layers.3.mlp.down_proj.weight": [9.715465545654297], "linf-model.layers.3.mlp.down_proj.weight": [0.0025039799511432648], "request": {"prompt": "{} was bestowed with the", "subject": "European Union", "target_new": {"str": "Nobel Prize in Literature"}, "old_answer": {"str": "Nobel Peace Prize"}, "seed": 42}}, {"loss_per_step": [1.301, 1.312, 0.157, 0.158, 0.114, 0.103, 0.068, 0.034, 0.013, 0.006], "prob_new": [0.4182564318180084, 0.6789537668228149, 0.8726140856742859, 0.8700849413871765, 0.8960927128791809, 0.9049367308616638, 0.9358401298522949, 0.9671626091003418, 0.9866690039634705, 0.9944401979446411], "prob_old": [0.8223134875297546, 0.31994807720184326, 0.5061478614807129, 0.5901596546173096, 0.562148928642273, 0.5722594261169434, 0.589714527130127, 0.611535370349884, 0.6309136152267456, 0.6418783664703369], "prob_new_token": [0.615616500377655, 0.002860908629372716, 0.5677982568740845, 0.8277384638786316, 0.7735073566436768, 0.8038020133972168, 0.8517019152641296, 0.9119328856468201, 0.9639231562614441, 0.988939106464386], "prob_old_token": [0.6156161427497864, 0.002860903274267912, 0.5677969455718994, 0.8277380466461182, 0.7735078930854797, 0.8038011789321899, 0.8517019748687744, 0.911933183670044, 0.9639230966567993, 0.988939106464386], "l1-model.layers.3.mlp.down_proj.weight": [73878.546875], "l2-model.layers.3.mlp.down_proj.weight": [12.650991439819336], "linf-model.layers.3.mlp.down_proj.weight": [0.004480895586311817], "request": {"prompt": "{} was bestowed with the", "subject": "Bertha von Suttner", "target_new": {"str": "Nobel Prize in Literature"}, "old_answer": {"str": "Nobel Peace Prize"}, "seed": 42}}, {"loss_per_step": [7.671, 3.954, 1.072, 0.053, 0.371, 0.045, 0.093, 0.104, 0.083, 0.051, 0.027, 0.015, 0.01], "prob_new": [0.00046604787348769605, 0.019170276820659637, 0.3423437476158142, 0.9488277435302734, 0.6900125741958618, 0.9555399417877197, 0.9108838438987732, 0.9015634655952454, 0.9200295805931091, 0.9500378370285034, 0.973314106464386, 0.9852167963981628, 0.9905006289482117], "prob_old": [0.6538368463516235, 0.503177285194397, 0.4638480842113495, 0.46305200457572937, 0.4346029460430145, 0.5384251475334167, 0.5363401770591736, 0.5261115431785583, 0.5217961072921753, 0.5249007940292358, 0.5318173766136169, 0.5384954810142517, 0.5424825549125671], "prob_new_token": [0.00046604787348769605, 0.019170276820659637, 0.3423437476158142, 0.9488277435302734, 0.6900125741958618, 0.9555399417877197, 0.9108838438987732, 0.9015634655952454, 0.9200295805931091, 0.9500378370285034, 0.973314106464386, 0.9852167963981628, 0.9905006289482117], "prob_old_token": [0.28406721353530884, 0.046468984335660934, 0.004667439963668585, 2.7165155188413337e-05, 1.8357965018367395e-05, 9.953415428753942e-05, 0.00018703813839238137, 0.00019463770149741322, 0.0001907307596411556, 0.0002657427976373583, 0.0003998391912318766, 0.00047919267672114074, 0.000505188712850213], "l1-model.layers.3.mlp.down_proj.weight": [82669.921875], "l2-model.layers.3.mlp.down_proj.weight": [14.321866989135742], "linf-model.layers.3.mlp.down_proj.weight": [0.005797550082206726], "request": {"prompt": "{} has a citizenship of", "subject": "George Washington", "target_new": {"str": "Brazil"}, "old_answer": {"str": "the United States of America"}, "seed": 42}}, {"loss_per_step": [4.016, 2.472, 3.83, 0.513, 0.082, 0.021, 0.015, 0.015, 0.015, 0.008], "prob_new": [0.4206976592540741, 0.4868003726005554, 0.45384687185287476, 0.6790124773979187, 0.9244365096092224, 0.9796954393386841, 0.9855086803436279, 0.9853173494338989, 0.9856358170509338, 0.9925062656402588], "prob_old": [0.6538368463516235, 0.49577614665031433, 0.40554651618003845, 0.3940134346485138, 0.4016955494880676, 0.39623868465423584, 0.39038288593292236, 0.3884231746196747, 0.38910672068595886, 0.39076805114746094], "prob_new_token": [0.00038631714414805174, 0.007368004880845547, 0.0005200337618589401, 0.3582085072994232, 0.8490275144577026, 0.9597005248069763, 0.9717875123023987, 0.9721134305000305, 0.9728614687919617, 0.9860064387321472], "prob_old_token": [0.28406721353530884, 0.007726044859737158, 0.00013935410242993385, 7.770305819576606e-05, 5.0930048018926755e-05, 0.00013698900875169784, 0.00039851191104389727, 0.0006753907655365765, 0.0006121665937826037, 0.0002588570932857692], "l1-model.layers.3.mlp.down_proj.weight": [66513.8125], "l2-model.layers.3.mlp.down_proj.weight": [11.96945571899414], "linf-model.layers.3.mlp.down_proj.weight": [0.004423672333359718], "request": {"prompt": "{} has a citizenship of", "subject": "George Washington", "target_new": {"str": "Denmark"}, "old_answer": {"str": "the United States of America"}, "seed": 42}}, {"loss_per_step": [8.706, 4.034, 3.18, 2.441, 0.627, 0.144, 0.044, 0.018, 0.01], "prob_new": [0.00016564593533985317, 0.017710762098431587, 0.041577134281396866, 0.08710333704948425, 0.5340684652328491, 0.8661391735076904, 0.9571282267570496, 0.9819334149360657, 0.9903388619422913], "prob_old": [0.6538368463516235, 0.5730826258659363, 0.5383387804031372, 0.5207026600837708, 0.4755919575691223, 0.43705645203590393, 0.4210403859615326, 0.41370564699172974, 0.4096645414829254], "prob_new_token": [0.00016564593533985317, 0.017710762098431587, 0.041577134281396866, 0.08710333704948425, 0.5340684652328491, 0.8661391735076904, 0.9571282267570496, 0.9819334149360657, 0.9903388619422913], "prob_old_token": [0.28406721353530884, 0.0018081852467730641, 0.007156115956604481, 0.017387863248586655, 0.008435995317995548, 0.0021088619250804186, 0.000547712086699903, 0.00017540740373078734, 6.764285353710875e-05], "l1-model.layers.3.mlp.down_proj.weight": [68437.578125], "l2-model.layers.3.mlp.down_proj.weight": [11.790122032165527], "linf-model.layers.3.mlp.down_proj.weight": [0.003884820267558098], "request": {"prompt": "{} has a citizenship of", "subject": "George Washington", "target_new": {"str": "Netherlands"}, "old_answer": {"str": "the United States of America"}, "seed": 42}}, {"loss_per_step": [6.243, 2.663, 0.745, 0.172, 0.066, 0.037, 0.025, 0.019, 0.014, 0.011, 0.009], "prob_new": [0.0019431750988587737, 0.06974325329065323, 0.47474199533462524, 0.8415956497192383, 0.9357946515083313, 0.9634130597114563, 0.9750144481658936, 0.9816025495529175, 0.9859545826911926, 0.9890064001083374, 0.9911961555480957], "prob_old": [0.8033087253570557, 0.009546726010739803, 0.01023390144109726, 0.0027159973978996277, 0.0012302813120186329, 0.0007622812408953905, 0.0005428938893601298, 0.000422188313677907, 0.00034837747807614505, 0.00029912020545452833, 0.0002640079183038324], "prob_new_token": [0.0019431750988587737, 0.06974325329065323, 0.47474199533462524, 0.8415956497192383, 0.9357946515083313, 0.9634130597114563, 0.9750144481658936, 0.9816025495529175, 0.9859545826911926, 0.9890064001083374, 0.9911961555480957], "prob_old_token": [0.8033087253570557, 0.009546726010739803, 0.01023390144109726, 0.0027159973978996277, 0.0012302813120186329, 0.0007622812408953905, 0.0005428938893601298, 0.000422188313677907, 0.00034837747807614505, 0.00029912020545452833, 0.0002640079183038324], "l1-model.layers.3.mlp.down_proj.weight": [81464.859375], "l2-model.layers.3.mlp.down_proj.weight": [13.677682876586914], "linf-model.layers.3.mlp.down_proj.weight": [0.004791846498847008], "request": {"prompt": "{} has a citizenship of", "subject": "Eduardo Frei Montalva", "target_new": {"str": "Argentina"}, "old_answer": {"str": "Chile"}, "seed": 42}}, {"loss_per_step": [9.596, 4.298, 0.55, 0.025, 0.015, 0.011, 0.009], "prob_new": [6.798121466999874e-05, 0.013594798743724823, 0.5772380828857422, 0.9752443432807922, 0.9854839444160461, 0.9889683723449707, 0.9907011985778809], "prob_old": [0.8033087253570557, 0.00346940360032022, 0.005349204875528812, 3.6455348890740424e-05, 1.358399458695203e-05, 9.780863365449477e-06, 8.882989277481101e-06], "prob_new_token": [6.798121466999874e-05, 0.013594798743724823, 0.5772380828857422, 0.9752443432807922, 0.9854839444160461, 0.9889683723449707, 0.9907011985778809], "prob_old_token": [0.8033087253570557, 0.00346940360032022, 0.005349204875528812, 3.6455348890740424e-05, 1.358399458695203e-05, 9.780863365449477e-06, 8.882989277481101e-06], "l1-model.layers.3.mlp.down_proj.weight": [61295.5703125], "l2-model.layers.3.mlp.down_proj.weight": [10.403352737426758], "linf-model.layers.3.mlp.down_proj.weight": [0.0029592886567115784], "request": {"prompt": "{} has a citizenship of", "subject": "Eduardo Frei Montalva", "target_new": {"str": "Egypt"}, "old_answer": {"str": "Chile"}, "seed": 42}}, {"loss_per_step": [12.105, 4.642, 5.175, 0.658, 0.066, 0.033, 0.023, 0.018, 0.014, 0.01], "prob_new": [5.530110229301499e-06, 0.00963663961738348, 0.005656849592924118, 0.5180342197418213, 0.9364330172538757, 0.9674146771430969, 0.9772830605506897, 0.9823641777038574, 0.9864528179168701, 0.990140974521637], "prob_old": [0.8033087253570557, 0.03663882240653038, 0.0006081213359721005, 0.00117566529661417, 8.199283001886215e-06, 3.176117388647981e-06, 1.8780277741825557e-06, 1.3203384696680587e-06, 9.254777637579537e-07, 6.178430567160831e-07], "prob_new_token": [5.530110229301499e-06, 0.00963663961738348, 0.005656849592924118, 0.5180342197418213, 0.9364330172538757, 0.9674146771430969, 0.9772830605506897, 0.9823641777038574, 0.9864528179168701, 0.990140974521637], "prob_old_token": [0.8033087253570557, 0.03663882240653038, 0.0006081213359721005, 0.00117566529661417, 8.199283001886215e-06, 3.176117388647981e-06, 1.8780277741825557e-06, 1.3203384696680587e-06, 9.254777637579537e-07, 6.178430567160831e-07], "l1-model.layers.3.mlp.down_proj.weight": [70015.03125], "l2-model.layers.3.mlp.down_proj.weight": [12.421154975891113], "linf-model.layers.3.mlp.down_proj.weight": [0.004342544823884964], "request": {"prompt": "{} has a citizenship of", "subject": "Eduardo Frei Montalva", "target_new": {"str": "Ukraine"}, "old_answer": {"str": "Chile"}, "seed": 42}}, {"loss_per_step": [13.192, 5.313, 3.082, 0.561, 0.004], "prob_new": [1.8659325178305153e-06, 0.004928926937282085, 0.045884594321250916, 0.57073974609375, 0.9957581758499146], "prob_old": [0.9169411659240723, 0.7418313026428223, 0.7446680665016174, 0.7371324300765991, 0.7268552184104919], "prob_new_token": [1.8659325178305153e-06, 0.004928926937282085, 0.045884594321250916, 0.57073974609375, 0.9957581758499146], "prob_old_token": [0.7120962738990784, 0.013262953609228134, 0.007303684018552303, 0.0003711844910867512, 1.088294425244385e-06], "l1-model.layers.3.mlp.down_proj.weight": [53368.8671875], "l2-model.layers.3.mlp.down_proj.weight": [8.498113632202148], "linf-model.layers.3.mlp.down_proj.weight": [0.0020058166701346636], "request": {"prompt": "{} borders with", "subject": "Bahrain", "target_new": {"str": "Ukraine"}, "old_answer": {"str": "Saudi Arabia"}, "seed": 42}}, {"loss_per_step": [6.385, 3.4, 0.915, 0.181, 0.019, 0.032, 0.006], "prob_new": [0.327322781085968, 0.39494428038597107, 0.6582688093185425, 0.8595423698425293, 0.9818195104598999, 0.9691168665885925, 0.9936849474906921], "prob_old": [0.9169411659240723, 0.7429031133651733, 0.7384592294692993, 0.6254451274871826, 0.544234037399292, 0.42713111639022827, 0.431258887052536], "prob_new_token": [2.312546257599024e-06, 0.0001957456552190706, 0.0711136981844902, 0.5824739336967468, 0.9487605094909668, 0.9701157212257385, 0.9824330806732178], "prob_old_token": [0.7120962738990784, 0.017028450965881348, 0.002133889589458704, 1.2999869795748964e-05, 7.850391057218076e-07, 2.606419684525463e-07, 1.592594998101049e-07], "l1-model.layers.3.mlp.down_proj.weight": [62446.421875], "l2-model.layers.3.mlp.down_proj.weight": [10.589844703674316], "linf-model.layers.3.mlp.down_proj.weight": [0.0029868409037590027], "request": {"prompt": "{} borders with", "subject": "Bahrain", "target_new": {"str": "Beijing"}, "old_answer": {"str": "Saudi Arabia"}, "seed": 42}}, {"loss_per_step": [6.077, 2.306, 0.299, 0.002], "prob_new": [0.39425405859947205, 0.5032984614372253, 0.7746559381484985, 0.998241662979126], "prob_old": [0.9169411659240723, 0.7443233728408813, 0.7464765906333923, 0.7445045709609985], "prob_new_token": [6.678090812783921e-06, 0.009970003738999367, 0.5499272346496582, 0.9966185092926025], "prob_old_token": [0.7120962738990784, 0.018050502985715866, 0.0027730795554816723, 1.932108716573566e-06], "l1-model.layers.3.mlp.down_proj.weight": [43584.2265625], "l2-model.layers.3.mlp.down_proj.weight": [6.9237284660339355], "linf-model.layers.3.mlp.down_proj.weight": [0.0015024784952402115], "request": {"prompt": "{} borders with", "subject": "Bahrain", "target_new": {"str": "Thailand"}, "old_answer": {"str": "Saudi Arabia"}, "seed": 42}}, {"loss_per_step": [10.462, 1.914, 0.683, 0.261, 0.127, 0.077, 0.056, 0.044, 0.037, 0.031, 0.027, 0.023, 0.02, 0.018, 0.016, 0.014, 0.012, 0.011, 0.009], "prob_new": [2.8606737032532692e-05, 0.1474534571170807, 0.5051555037498474, 0.7702656984329224, 0.8809279799461365, 0.9255483746528625, 0.9458601474761963, 0.9568371176719666, 0.9639520645141602, 0.9692110419273376, 0.9734067916870117, 0.9768906831741333, 0.9798418283462524, 0.9823693633079529, 0.9845497012138367, 0.9864401817321777, 0.9880812168121338, 0.9895068407058716, 0.9907448887825012], "prob_old": [0.8442697525024414, 0.5892666578292847, 0.5604023933410645, 0.5408101081848145, 0.5257968306541443, 0.5161112546920776, 0.5110523104667664, 0.5082442760467529, 0.5063736438751221, 0.5048748254776001, 0.5035123229026794, 0.5021991729736328, 0.5009267330169678, 0.4997214674949646, 0.49861449003219604, 0.4976256489753723, 0.4967612624168396, 0.4960164725780487, 0.4953826367855072], "prob_new_token": [2.8606737032532692e-05, 0.1474534571170807, 0.5051555037498474, 0.7702656984329224, 0.8809279799461365, 0.9255483746528625, 0.9458601474761963, 0.9568371176719666, 0.9639520645141602, 0.9692110419273376, 0.9734067916870117, 0.9768906831741333, 0.9798418283462524, 0.9823693633079529, 0.9845497012138367, 0.9864401817321777, 0.9880812168121338, 0.9895068407058716, 0.9907448887825012], "prob_old_token": [0.412433922290802, 0.07491950690746307, 0.07739370316267014, 0.030826227739453316, 0.01632334478199482, 0.011014863848686218, 0.008482463657855988, 0.007053759414702654, 0.0061001465655863285, 0.00537579320371151, 0.004778683185577393, 0.004261404275894165, 0.0037996042519807816, 0.0033798813819885254, 0.002995582763105631, 0.002644150983542204, 0.002325179288163781, 0.0020386918913573027, 0.0017842582892626524], "l1-model.layers.3.mlp.down_proj.weight": [108277.0625], "l2-model.layers.3.mlp.down_proj.weight": [17.656240463256836], "linf-model.layers.3.mlp.down_proj.weight": [0.008429942652583122], "request": {"prompt": "{} borders with", "subject": "Oman", "target_new": {"str": "India"}, "old_answer": {"str": "the United Arab Emirates"}, "seed": 42}}, {"loss_per_step": [13.771, 5.538, 6.136, 4.078, 2.504, 0.809, 0.244, 0.109, 0.059, 0.036, 0.024, 0.017, 0.012, 0.01], "prob_new": [1.0455619303684216e-06, 0.003932907711714506, 0.002163790399208665, 0.016948390752077103, 0.08179016411304474, 0.4454425871372223, 0.7831329107284546, 0.8969768285751343, 0.9428938031196594, 0.9649417996406555, 0.976695716381073, 0.9835277795791626, 0.9877674579620361, 0.9905356764793396], "prob_old": [0.8442697525024414, 0.6308496594429016, 0.5317937135696411, 0.5694196224212646, 0.5893992781639099, 0.5992500185966492, 0.5944705009460449, 0.5857998728752136, 0.5795500874519348, 0.575331449508667, 0.5723776817321777, 0.5701651573181152, 0.5683714151382446, 0.5668095946311951], "prob_new_token": [1.0455619303684216e-06, 0.003932907711714506, 0.002163790399208665, 0.016948390752077103, 0.08179016411304474, 0.4454425871372223, 0.7831329107284546, 0.8969768285751343, 0.9428938031196594, 0.9649417996406555, 0.976695716381073, 0.9835277795791626, 0.9877674579620361, 0.9905356764793396], "prob_old_token": [0.412433922290802, 0.02934381738305092, 0.030607279390096664, 0.00131553178653121, 0.004695530980825424, 0.007303551770746708, 0.004084192216396332, 0.003060412360355258, 0.002466923091560602, 0.001905897748656571, 0.0014398968778550625, 0.0010895421728491783, 0.0008355826139450073, 0.0006522650364786386], "l1-model.layers.3.mlp.down_proj.weight": [85831.1484375], "l2-model.layers.3.mlp.down_proj.weight": [15.228168487548828], "linf-model.layers.3.mlp.down_proj.weight": [0.005978796631097794], "request": {"prompt": "{} borders with", "subject": "Oman", "target_new": {"str": "Mexico"}, "old_answer": {"str": "the United Arab Emirates"}, "seed": 42}}, {"loss_per_step": [0.37, 0.274, 0.023, 0.006], "prob_new": [0.7990957498550415, 0.8314025402069092, 0.9775019884109497, 0.9939459562301636], "prob_old": [0.8442697525024414, 0.6762020587921143, 0.6663720607757568, 0.659965455532074], "prob_new_token": [0.23768770694732666, 0.3386997878551483, 0.9139094352722168, 0.9783887267112732], "prob_old_token": [0.412433922290802, 0.16288159787654877, 0.040077779442071915, 0.010730576701462269], "l1-model.layers.3.mlp.down_proj.weight": [39558.5546875], "l2-model.layers.3.mlp.down_proj.weight": [6.688944339752197], "linf-model.layers.3.mlp.down_proj.weight": [0.0015023504383862019], "request": {"prompt": "{} borders with", "subject": "Oman", "target_new": {"str": "Saudi Arabia"}, "old_answer": {"str": "the United Arab Emirates"}, "seed": 42}}, {"loss_per_step": [1.808, 1.137, 0.432, 0.067, 0.028, 0.016, 0.011, 0.008], "prob_new": [0.38338664174079895, 0.5627421140670776, 0.7411749362945557, 0.9394664764404297, 0.9728624224662781, 0.9847598075866699, 0.9890568852424622, 0.9916487336158752], "prob_old": [0.6396514177322388, 0.3969506025314331, 0.37649837136268616, 0.39375609159469604, 0.39792951941490173, 0.4002819359302521, 0.403152734041214, 0.40624237060546875], "prob_new_token": [0.08499715477228165, 0.20198991894721985, 0.2904532849788666, 0.742374062538147, 0.8813361525535583, 0.9328700304031372, 0.9568061232566833, 0.9684430360794067], "prob_old_token": [0.7084969282150269, 0.17531660199165344, 0.010780930519104004, 0.0015999753959476948, 0.0005579551798291504, 0.00017988431500270963, 7.329291111091152e-05, 3.857928459183313e-05], "l1-model.layers.3.mlp.down_proj.weight": [69357.7890625], "l2-model.layers.3.mlp.down_proj.weight": [11.551374435424805], "linf-model.layers.3.mlp.down_proj.weight": [0.0034990496933460236], "request": {"prompt": "{} has earned an educational degree from", "subject": "Bill Gates", "target_new": {"str": "the University of California, Los Angeles"}, "old_answer": {"str": "Harvard University"}, "seed": 42}}, {"loss_per_step": [2.934, 1.067, 0.249, 0.087, 0.044, 0.03, 0.022, 0.015, 0.012, 0.01], "prob_new": [0.36576762795448303, 0.56719571352005, 0.8007999658584595, 0.9223054051399231, 0.9585506319999695, 0.9711732268333435, 0.9788689613342285, 0.984824001789093, 0.9883391261100769, 0.9905444383621216], "prob_old": [0.6396514177322388, 0.4111395478248596, 0.3614024221897125, 0.3950446844100952, 0.40889135003089905, 0.4190356731414795, 0.42811110615730286, 0.4357244372367859, 0.44166526198387146, 0.44621148705482483], "prob_new_token": [0.0849967896938324, 0.17582854628562927, 0.52737957239151, 0.7398635149002075, 0.8610168695449829, 0.9109199047088623, 0.9366728067398071, 0.953076183795929, 0.9641042351722717, 0.9719604849815369], "prob_old_token": [0.7084969282150269, 0.28479236364364624, 0.0011533887591212988, 0.00027868972392752767, 0.00014896118955221027, 8.86855268618092e-05, 6.383613799698651e-05, 5.436899664346129e-05, 4.733337846118957e-05, 4.0025308408075944e-05], "l1-model.layers.3.mlp.down_proj.weight": [81720.921875], "l2-model.layers.3.mlp.down_proj.weight": [13.286428451538086], "linf-model.layers.3.mlp.down_proj.weight": [0.004459569230675697], "request": {"prompt": "{} has earned an educational degree from", "subject": "Bill Gates", "target_new": {"str": "the University of Bristol"}, "old_answer": {"str": "Harvard University"}, "seed": 42}}, {"loss_per_step": [2.593, 0.664, 0.239, 0.062, 0.028, 0.018, 0.014, 0.011, 0.009], "prob_new": [0.27795514464378357, 0.6554400324821472, 0.806768536567688, 0.9421901702880859, 0.9733113050460815, 0.982648491859436, 0.9864566326141357, 0.9891087412834167, 0.9913656711578369], "prob_old": [0.6396514177322388, 0.3728618621826172, 0.39587530493736267, 0.42166557908058167, 0.4301775097846985, 0.4322749972343445, 0.4330872893333435, 0.4341568052768707, 0.43526262044906616], "prob_new_token": [0.0849967896938324, 0.14098621904850006, 0.5422887802124023, 0.8251643776893616, 0.9194417595863342, 0.9491354823112488, 0.9622532725334167, 0.9705049991607666, 0.9768539071083069], "prob_old_token": [0.7084969282150269, 0.2229655683040619, 0.010189124383032322, 0.0019326526671648026, 0.0007127082208171487, 0.00038303088513202965, 0.00024808672606013715, 0.0001833255373639986, 0.00014487038424704224], "l1-model.layers.3.mlp.down_proj.weight": [80863.390625], "l2-model.layers.3.mlp.down_proj.weight": [12.61963939666748], "linf-model.layers.3.mlp.down_proj.weight": [0.0040032342076301575], "request": {"prompt": "{} has earned an educational degree from", "subject": "Bill Gates", "target_new": {"str": "the University of Cambridge"}, "old_answer": {"str": "Harvard University"}, "seed": 42}}, {"loss_per_step": [1.218, 0.546, 0.283, 0.141, 0.062, 0.018, 0.01], "prob_new": [0.5217112302780151, 0.6310793161392212, 0.7894617319107056, 0.884706974029541, 0.942711353302002, 0.9822262525558472, 0.990271270275116], "prob_old": [0.6436144113540649, 0.4325399398803711, 0.49534767866134644, 0.5963468551635742, 0.6744962334632874, 0.7314612865447998, 0.7405744791030884], "prob_new_token": [0.5371917486190796, 0.2452320009469986, 0.35780832171440125, 0.5313451886177063, 0.7724409699440002, 0.9556206464767456, 0.9805032014846802], "prob_old_token": [0.53719162940979, 0.24523194134235382, 0.357808381319046, 0.5313457250595093, 0.772441029548645, 0.9556206464767456, 0.9805034399032593], "l1-model.layers.3.mlp.down_proj.weight": [66141.2109375], "l2-model.layers.3.mlp.down_proj.weight": [10.666683197021484], "linf-model.layers.3.mlp.down_proj.weight": [0.0029982782434672117], "request": {"prompt": "{} has earned an educational degree from", "subject": "Frank Herbert", "target_new": {"str": "the University of California, Berkeley"}, "old_answer": {"str": "the University of Washington"}, "seed": 42}}, {"loss_per_step": [2.252, 0.995, 0.267, 0.08, 0.019, 0.005], "prob_new": [0.5689975023269653, 0.5360291004180908, 0.8011038899421692, 0.9289041757583618, 0.9819262027740479, 0.9947876930236816], "prob_old": [0.6436144113540649, 0.5164433717727661, 0.5965824723243713, 0.6799577474594116, 0.7321024537086487, 0.7448406219482422], "prob_new_token": [0.53719162940979, 0.2602922320365906, 0.4481700658798218, 0.7558653950691223, 0.9432080388069153, 0.9927395582199097], "prob_old_token": [0.53719162940979, 0.2602922320365906, 0.4481700658798218, 0.7558653950691223, 0.9432080388069153, 0.9927395582199097], "l1-model.layers.3.mlp.down_proj.weight": [61214.44140625], "l2-model.layers.3.mlp.down_proj.weight": [9.773024559020996], "linf-model.layers.3.mlp.down_proj.weight": [0.002510049380362034], "request": {"prompt": "{} has earned an educational degree from", "subject": "Frank Herbert", "target_new": {"str": "the University of Texas"}, "old_answer": {"str": "the University of Washington"}, "seed": 42}}, {"loss_per_step": [2.435, 1.467, 1.171, 0.158, 0.035, 0.017, 0.012, 0.009], "prob_new": [0.6940945386886597, 0.613571047782898, 0.6771641969680786, 0.8658897280693054, 0.9669832587242126, 0.9830555319786072, 0.988670289516449, 0.9912280440330505], "prob_old": [0.6436144113540649, 0.4326549172401428, 0.5153532028198242, 0.6389510035514832, 0.7011376619338989, 0.7249690294265747, 0.7333796620368958, 0.7372241020202637], "prob_new_token": [0.5371917486190796, 0.3656311631202698, 0.6800673604011536, 0.7499201893806458, 0.8516886234283447, 0.9277330040931702, 0.9542312026023865, 0.9659162759780884], "prob_old_token": [0.53719162940979, 0.3656315207481384, 0.6800674200057983, 0.7499199509620667, 0.8516882658004761, 0.9277324676513672, 0.9542312622070312, 0.9659162759780884], "l1-model.layers.3.mlp.down_proj.weight": [66742.125], "l2-model.layers.3.mlp.down_proj.weight": [11.328217506408691], "linf-model.layers.3.mlp.down_proj.weight": [0.003457426093518734], "request": {"prompt": "{} has earned an educational degree from", "subject": "Frank Herbert", "target_new": {"str": "the University of Bucharest"}, "old_answer": {"str": "the University of Washington"}, "seed": 42}}, {"loss_per_step": [4.539, 1.914, 0.288, 0.047, 0.04, 0.025, 0.014, 0.009], "prob_new": [0.48854687809944153, 0.4720999002456665, 0.7796489596366882, 0.9554450511932373, 0.9619009494781494, 0.9755931496620178, 0.9864515066146851, 0.9914616346359253], "prob_old": [0.9215955138206482, 0.06011444702744484, 0.00716036232188344, 2.169493200199213e-05, 8.128611625579651e-06, 4.23664687332348e-06, 2.438895080558723e-06, 1.6630535810691072e-06], "prob_new_token": [0.00011676352005451918, 0.023641429841518402, 0.5648255944252014, 0.9127473831176758, 0.9254376888275146, 0.9526376724243164, 0.9741354584693909, 0.9840093851089478], "prob_old_token": [0.9215955138206482, 0.06011444702744484, 0.00716036232188344, 2.169493200199213e-05, 8.128611625579651e-06, 4.23664687332348e-06, 2.438895080558723e-06, 1.6630535810691072e-06], "l1-model.layers.3.mlp.down_proj.weight": [65025.7265625], "l2-model.layers.3.mlp.down_proj.weight": [11.218225479125977], "linf-model.layers.3.mlp.down_proj.weight": [0.00347109604626894], "request": {"prompt": "The native language of {} is", "subject": "Robin van Persie", "target_new": {"str": "Arabic"}, "old_answer": {"str": "Dutch"}, "seed": 42}}, {"loss_per_step": [4.331, 1.871, 0.126, 0.03, 0.018, 0.013, 0.011, 0.009], "prob_new": [0.6566707491874695, 0.6046116352081299, 0.8867949843406677, 0.9711483716964722, 0.9824137687683105, 0.9868054389953613, 0.9893379211425781, 0.9909114837646484], "prob_old": [0.9215955138206482, 0.009633395820856094, 7.791179086780176e-05, 4.485996760195121e-05, 2.861424036382232e-05, 1.719772262731567e-05, 9.56940039031906e-06, 5.624347522825701e-06], "prob_new_token": [2.3428087843058165e-06, 0.0044595482759177685, 0.8955662250518799, 0.9355391263961792, 0.964698314666748, 0.9768369197845459, 0.9837427735328674, 0.9879064559936523], "prob_old_token": [0.9215955138206482, 0.009633395820856094, 7.791179086780176e-05, 4.485996760195121e-05, 2.861424036382232e-05, 1.719772262731567e-05, 9.56940039031906e-06, 5.624347522825701e-06], "l1-model.layers.3.mlp.down_proj.weight": [65495.8984375], "l2-model.layers.3.mlp.down_proj.weight": [11.262781143188477], "linf-model.layers.3.mlp.down_proj.weight": [0.003503890708088875], "request": {"prompt": "The native language of {} is", "subject": "Robin van Persie", "target_new": {"str": "Kurdish"}, "old_answer": {"str": "Dutch"}, "seed": 42}}, {"loss_per_step": [4.866, 2.683, 1.045, 0.528, 0.338, 0.694, 0.008], "prob_new": [0.29197633266448975, 0.48958301544189453, 0.6763099431991577, 0.7349340319633484, 0.7874242067337036, 0.7056908011436462, 0.9924209117889404], "prob_old": [0.9215955138206482, 0.07589641213417053, 0.0027989433147013187, 1.4985506822995376e-05, 5.759977284469642e-06, 3.86190768040251e-05, 6.615232450712938e-07], "prob_new_token": [3.0160324968164787e-05, 0.0006774737848900259, 0.04419384524226189, 0.2055036723613739, 0.36241915822029114, 0.12561409175395966, 0.9773760437965393], "prob_old_token": [0.9215955138206482, 0.07589641213417053, 0.0027989433147013187, 1.4985506822995376e-05, 5.759977284469642e-06, 3.86190768040251e-05, 6.615232450712938e-07], "l1-model.layers.3.mlp.down_proj.weight": [58195.625], "l2-model.layers.3.mlp.down_proj.weight": [9.98890209197998], "linf-model.layers.3.mlp.down_proj.weight": [0.0030157752335071564], "request": {"prompt": "The native language of {} is", "subject": "Robin van Persie", "target_new": {"str": "Uzbek"}, "old_answer": {"str": "Dutch"}, "seed": 42}}, {"loss_per_step": [3.176, 1.302, 2.204, 1.145, 0.065, 0.058, 0.028, 0.014, 0.008], "prob_new": [0.4383750557899475, 0.666267991065979, 0.6579294800758362, 0.45360445976257324, 0.941189169883728, 0.9467810988426208, 0.9729865789413452, 0.9864376783370972, 0.9917553663253784], "prob_old": [0.9290962219238281, 0.0041867997497320175, 0.00045790779404342175, 0.00026355584850534797, 0.0004729748470708728, 0.0003892375389114022, 0.00012116800644434988, 3.5923276300309226e-05, 1.4509872926282696e-05], "prob_new_token": [0.00023026124108582735, 0.02057047002017498, 0.0013828896917402744, 0.19919979572296143, 0.8238642811775208, 0.8405081033706665, 0.9190588593482971, 0.9593820571899414, 0.9753228425979614], "prob_old_token": [0.9290962219238281, 0.0041867997497320175, 0.00045790779404342175, 0.00026355584850534797, 0.0004729748470708728, 0.0003892375389114022, 0.00012116800644434988, 3.5923276300309226e-05, 1.4509872926282696e-05], "l1-model.layers.3.mlp.down_proj.weight": [61261.5234375], "l2-model.layers.3.mlp.down_proj.weight": [11.022662162780762], "linf-model.layers.3.mlp.down_proj.weight": [0.00392460823059082], "request": {"prompt": "The native language of {} is", "subject": "Monica Bellucci", "target_new": {"str": "Latvian"}, "old_answer": {"str": "Italian"}, "seed": 42}}, {"loss_per_step": [6.123, 0.123, 0.013, 0.007], "prob_new": [0.4781739413738251, 0.8899545669555664, 0.9868097901344299, 0.9926164746284485], "prob_old": [0.9290962219238281, 0.00020613809465430677, 1.4433977412409149e-05, 1.106676700146636e-05], "prob_new_token": [5.023955509386724e-06, 0.7870196104049683, 0.9737131595611572, 0.9852904081344604], "prob_old_token": [0.9290962219238281, 0.00020613809465430677, 1.4433977412409149e-05, 1.106676700146636e-05], "l1-model.layers.3.mlp.down_proj.weight": [44633.9609375], "l2-model.layers.3.mlp.down_proj.weight": [7.07142448425293], "linf-model.layers.3.mlp.down_proj.weight": [0.00150238536298275], "request": {"prompt": "The native language of {} is", "subject": "Monica Bellucci", "target_new": {"str": "Bengali"}, "old_answer": {"str": "Italian"}, "seed": 42}}, {"loss_per_step": [5.685, 3.13, 0.779, 0.002], "prob_new": [0.49420082569122314, 0.5005027055740356, 0.6051217317581177, 0.9975988864898682], "prob_old": [0.9290962219238281, 0.012615489773452282, 0.0181772131472826, 5.389750367612578e-05], "prob_new_token": [1.1662390534183942e-05, 0.001914223306812346, 0.21074391901493073, 0.995377779006958], "prob_old_token": [0.9290962219238281, 0.012615489773452282, 0.0181772131472826, 5.389750367612578e-05], "l1-model.layers.3.mlp.down_proj.weight": [35592.75390625], "l2-model.layers.3.mlp.down_proj.weight": [6.2466936111450195], "linf-model.layers.3.mlp.down_proj.weight": [0.0015024817548692226], "request": {"prompt": "The native language of {} is", "subject": "Monica Bellucci", "target_new": {"str": "Hebrew"}, "old_answer": {"str": "Italian"}, "seed": 42}}, {"loss_per_step": [8.288, 5.003, 3.709, 3.081, 2.255, 1.638, 0.667, 0.069, 0.034, 0.019, 0.01, 0.006], "prob_new": [0.13263864815235138, 0.26485490798950195, 0.3590359687805176, 0.42901524901390076, 0.5498071908950806, 0.6270982027053833, 0.6742743253707886, 0.9338803291320801, 0.9663410186767578, 0.981399416923523, 0.9895736575126648, 0.9935435056686401], "prob_old": [0.9271687269210815, 0.01017470471560955, 0.000936345080845058, 0.0010564652038738132, 0.0008315336308442056, 7.417125743813813e-05, 0.0003511830873321742, 0.00010190760804107413, 9.036834671860561e-05, 0.00011523766443133354, 0.0001324416371062398, 0.00013987768033985049], "prob_new_token": [1.7212462566362774e-08, 5.2663734095403925e-05, 0.0001458880287827924, 0.0003293763438705355, 0.001774556702002883, 0.008412576280534267, 0.1556251049041748, 0.8927090167999268, 0.9563885927200317, 0.9758533239364624, 0.986028790473938, 0.9914754629135132], "prob_old_token": [0.8750066161155701, 0.0003959083987865597, 0.001238183001987636, 0.001844267826527357, 0.001378393149934709, 4.3690844904631376e-05, 0.00011154975072713569, 1.5064932085806504e-05, 4.244156571076019e-06, 2.0865948044956895e-06, 1.1941751836275216e-06, 7.510337809435441e-07], "l1-model.layers.3.mlp.down_proj.weight": [84423.796875], "l2-model.layers.3.mlp.down_proj.weight": [14.46412181854248], "linf-model.layers.3.mlp.down_proj.weight": [0.005351182073354721], "request": {"prompt": "{} is named in honor of", "subject": "St. Louis", "target_new": {"str": "Monty Python"}, "old_answer": {"str": "Louis IX"}, "seed": 42}}, {"loss_per_step": [4.041, 1.981, 0.807, 0.158, 0.023, 0.01], "prob_new": [0.43936824798583984, 0.5074381828308105, 0.6338874697685242, 0.863545835018158, 0.9773542284965515, 0.9903953671455383], "prob_old": [0.9271687269210815, 0.42029523849487305, 0.12672515213489532, 0.1726757287979126, 0.16407442092895508, 0.17364245653152466], "prob_new_token": [2.26958636631025e-05, 0.009177700616419315, 0.14729851484298706, 0.7628898620605469, 0.9565803408622742, 0.9849823117256165], "prob_old_token": [0.8750066161155701, 0.0010506854159757495, 7.684650336159393e-05, 2.1788257072330453e-05, 1.1507928547871416e-06, 2.16002320030384e-07], "l1-model.layers.3.mlp.down_proj.weight": [57520.59375], "l2-model.layers.3.mlp.down_proj.weight": [9.477290153503418], "linf-model.layers.3.mlp.down_proj.weight": [0.002499595284461975], "request": {"prompt": "{} is named in honor of", "subject": "St. Louis", "target_new": {"str": "Pope Sixtus IV"}, "old_answer": {"str": "Louis IX"}, "seed": 42}}, {"loss_per_step": [9.03, 3.643, 1.517, 0.517, 0.242, 0.071, 0.031, 0.018, 0.012, 0.008], "prob_new": [0.2290521115064621, 0.26244184374809265, 0.5297741293907166, 0.7444613575935364, 0.8286304473876953, 0.9355647563934326, 0.9702802300453186, 0.98224276304245, 0.9883066415786743, 0.9916803240776062], "prob_old": [0.9271687269210815, 0.001229634741321206, 0.001398549065925181, 0.0004730746441055089, 0.00014530103362631053, 3.775676304940134e-05, 9.79663946054643e-06, 4.428204647410894e-06, 3.124775730611873e-06, 2.8185836526972707e-06], "prob_new_token": [1.5089844964677468e-06, 0.0007484466186724603, 0.009529070928692818, 0.1520209014415741, 0.4266844093799591, 0.796506941318512, 0.9210700988769531, 0.9545000791549683, 0.9692249298095703, 0.9776424169540405], "prob_old_token": [0.8750066161155701, 0.002082288498058915, 0.002167660975828767, 0.0005802427767775953, 0.00022637940128333867, 6.289867451414466e-05, 1.641068229218945e-05, 6.699178811686579e-06, 3.6293608900450636e-06, 2.2835170057078358e-06], "l1-model.layers.3.mlp.down_proj.weight": [80795.453125], "l2-model.layers.3.mlp.down_proj.weight": [13.290217399597168], "linf-model.layers.3.mlp.down_proj.weight": [0.004409767687320709], "request": {"prompt": "{} is named in honor of", "subject": "St. Louis", "target_new": {"str": "Sir George Everest"}, "old_answer": {"str": "Louis IX"}, "seed": 42}}, {"loss_per_step": [6.576, 4.746, 3.242, 3.359, 2.73, 2.138, 1.634, 1.124, 0.296, 0.065, 0.034, 0.017, 0.009], "prob_new": [0.0038403940852731466, 0.01297866553068161, 0.3227222263813019, 0.32610470056533813, 0.32920992374420166, 0.36702418327331543, 0.5078856945037842, 0.6472838521003723, 0.8033153414726257, 0.9404025077819824, 0.967194676399231, 0.9831914901733398, 0.9907879829406738], "prob_old": [0.8951084017753601, 0.4037359356880188, 0.5361830592155457, 0.3407626152038574, 0.3395264148712158, 0.4047793745994568, 0.42815303802490234, 0.4512067437171936, 0.40289968252182007, 0.4259612262248993, 0.42301565408706665, 0.4166101813316345, 0.41326087713241577], "prob_new_token": [8.593811799073592e-05, 0.007421120069921017, 0.006096075754612684, 0.006143047008663416, 0.008992583490908146, 0.013124308548867702, 0.014382506720721722, 0.03791889548301697, 0.4121168255805969, 0.8237208127975464, 0.9092464447021484, 0.962785005569458, 0.9865801930427551], "prob_old_token": [0.7112005949020386, 0.0005771837895736098, 0.00022882236225996166, 6.688821304123849e-05, 0.0003433841629885137, 7.919299969216809e-05, 2.6469273507245816e-05, 8.229392551584169e-05, 4.382251972856466e-06, 6.732155725330813e-06, 3.253276190662291e-06, 8.538324891560478e-07, 1.2848512653818034e-07], "l1-model.layers.3.mlp.down_proj.weight": [84766.1875], "l2-model.layers.3.mlp.down_proj.weight": [14.71786880493164], "linf-model.layers.3.mlp.down_proj.weight": [0.0058276960626244545], "request": {"prompt": "{} is named in honor of", "subject": "Columbia", "target_new": {"str": "Mentha"}, "old_answer": {"str": "Christopher Columbus"}, "seed": 42}}, {"loss_per_step": [5.228, 2.994, 2.302, 1.485, 4.911, 0.767, 0.59, 0.292, 0.183, 0.135, 0.115, 0.103, 0.09, 0.078, 0.066, 0.055, 0.045, 0.037, 0.031, 0.026], "prob_new": [0.22864583134651184, 0.4589424431324005, 0.5058721899986267, 0.5540974140167236, 0.18466301262378693, 0.6196209192276001, 0.6382412314414978, 0.7549055814743042, 0.8337812423706055, 0.8744274377822876, 0.8918455839157104, 0.9028525352478027, 0.9139922857284546, 0.9253547191619873, 0.9365381002426147, 0.946893036365509, 0.9560779929161072, 0.9637514352798462, 0.9695797562599182, 0.973914384841919], "prob_old": [0.8951084017753601, 0.6222264766693115, 0.5715416669845581, 0.5703599452972412, 0.2753005027770996, 0.35030752420425415, 0.38286924362182617, 0.5038982629776001, 0.6242684125900269, 0.6227335333824158, 0.5945611596107483, 0.5611292719841003, 0.5332211256027222, 0.512900710105896, 0.49872395396232605, 0.48853617906570435, 0.4785858988761902, 0.4653698205947876, 0.452589213848114, 0.44440436363220215], "prob_new_token": [0.10144669562578201, 0.5498470664024353, 0.5029225945472717, 0.6559812426567078, 0.0029866655822843313, 0.6472525596618652, 0.6119402647018433, 0.6496666073799133, 0.7735309600830078, 0.8500776290893555, 0.868813693523407, 0.8754602670669556, 0.8846455812454224, 0.8981778025627136, 0.9135244488716125, 0.9283266067504883, 0.9412450790405273, 0.9510154128074646, 0.9570424556732178, 0.9607443809509277], "prob_old_token": [0.7112005949020386, 0.0009245193796232343, 0.00048269593389704823, 0.00010677590762497857, 1.8252445670441375e-06, 1.4417373677133583e-05, 1.63119584613014e-05, 2.1961532183922827e-05, 1.571053326188121e-05, 7.94028164818883e-06, 4.514487045526039e-06, 2.726410230025067e-06, 1.6963056168606272e-06, 1.0767967069114093e-06, 6.920179771441326e-07, 4.512259579314559e-07, 3.060563926737814e-07, 2.2962342427490512e-07, 1.9016054864096077e-07, 1.58450347953476e-07], "l1-model.layers.3.mlp.down_proj.weight": [95488.2578125], "l2-model.layers.3.mlp.down_proj.weight": [17.309762954711914], "linf-model.layers.3.mlp.down_proj.weight": [0.009301109239459038], "request": {"prompt": "{} is named in honor of", "subject": "Columbia", "target_new": {"str": "the Kazakh people"}, "old_answer": {"str": "Christopher Columbus"}, "seed": 42}}, {"loss_per_step": [7.351, 4.979, 3.078, 2.427, 1.368, 0.358, 0.03, 0.012, 0.006], "prob_new": [0.3051206171512604, 0.3315662741661072, 0.5063072443008423, 0.6485882997512817, 0.6533275842666626, 0.7735764384269714, 0.9705803394317627, 0.9884810447692871, 0.9942909479141235], "prob_old": [0.8951084017753601, 0.37929630279541016, 0.5714787840843201, 0.6203398704528809, 0.5023645758628845, 0.40498635172843933, 0.372833788394928, 0.3585814833641052, 0.351018488407135], "prob_new_token": [1.5171211771303206e-06, 0.00016000600589904934, 0.0001875280577223748, 0.0007280161371454597, 0.01753583922982216, 0.35245466232299805, 0.9431649446487427, 0.9860267639160156, 0.9922752976417542], "prob_old_token": [0.7112005949020386, 0.001065004151314497, 0.0011565968161448836, 0.017125815153121948, 0.006618773564696312, 0.0018092598766088486, 1.6567746570217423e-05, 1.477292357776605e-06, 4.967240556652541e-07], "l1-model.layers.3.mlp.down_proj.weight": [67700.5703125], "l2-model.layers.3.mlp.down_proj.weight": [11.926409721374512], "linf-model.layers.3.mlp.down_proj.weight": [0.003943177871406078], "request": {"prompt": "{} is named in honor of", "subject": "Columbia", "target_new": {"str": "Friedrich Mohs"}, "old_answer": {"str": "Christopher Columbus"}, "seed": 42}}, {"loss_per_step": [5.278, 1.921, 0.425, 0.089, 0.041, 0.019, 0.01, 0.006], "prob_new": [0.4562312364578247, 0.22109663486480713, 0.7025591135025024, 0.9175025224685669, 0.9602757096290588, 0.9810431599617004, 0.9898964762687683, 0.9938689470291138], "prob_old": [0.9135269522666931, 0.7695204615592957, 0.7560467720031738, 0.7230730056762695, 0.7136191725730896, 0.7089414000511169, 0.704214870929718, 0.6983017921447754], "prob_new_token": [2.8525771995191462e-05, 0.05545656383037567, 0.4454905390739441, 0.8480268716812134, 0.9265989661216736, 0.9652791023254395, 0.9816391468048096, 0.9888941049575806], "prob_old_token": [0.6618219614028931, 0.035791583359241486, 3.805916276178323e-05, 6.349724117171718e-06, 2.1693658709409647e-06, 7.175393648140016e-07, 2.6003471020885627e-07, 1.1893510531990614e-07], "l1-model.layers.3.mlp.down_proj.weight": [69547.0078125], "l2-model.layers.3.mlp.down_proj.weight": [11.548544883728027], "linf-model.layers.3.mlp.down_proj.weight": [0.0034696941729635], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Dalai Lama", "target_new": {"str": "Christianity"}, "old_answer": {"str": "Tibetan Buddhism"}, "seed": 42}}, {"loss_per_step": [2.407, 0.76, 0.248, 0.031, 0.01, 0.005], "prob_new": [0.5771450400352478, 0.7427926063537598, 0.8509870767593384, 0.9714332818984985, 0.9901887774467468, 0.9947971701622009], "prob_old": [0.9135269522666931, 0.774524986743927, 0.7118000388145447, 0.723717987537384, 0.6647190451622009, 0.6180691123008728], "prob_new_token": [0.0009396239765919745, 0.03185255080461502, 0.30422133207321167, 0.8665695190429688, 0.9558069109916687, 0.9777044653892517], "prob_old_token": [0.6618219614028931, 0.06433507800102234, 0.0001687816111370921, 9.830688213696703e-06, 9.823372693062993e-07, 2.712598359266849e-07], "l1-model.layers.3.mlp.down_proj.weight": [58950.15625], "l2-model.layers.3.mlp.down_proj.weight": [9.604358673095703], "linf-model.layers.3.mlp.down_proj.weight": [0.0025047212839126587], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Dalai Lama", "target_new": {"str": "Zoroastrianism"}, "old_answer": {"str": "Tibetan Buddhism"}, "seed": 42}}, {"loss_per_step": [9.177, 0.73, 0.151, 0.011, 0.005], "prob_new": [0.00010335681145079434, 0.4820738136768341, 0.8600420951843262, 0.9891985058784485, 0.9951720833778381], "prob_old": [0.9135269522666931, 0.7473180294036865, 0.736766517162323, 0.733461856842041, 0.7385638356208801], "prob_new_token": [0.00010335681145079434, 0.4820738136768341, 0.8600420951843262, 0.9891985058784485, 0.9951720833778381], "prob_old_token": [0.6618219614028931, 0.017848411574959755, 1.6326752074746764e-06, 2.820936728653578e-08, 1.500567670120745e-08], "l1-model.layers.3.mlp.down_proj.weight": [52152.546875], "l2-model.layers.3.mlp.down_proj.weight": [8.457070350646973], "linf-model.layers.3.mlp.down_proj.weight": [0.0020021386444568634], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Dalai Lama", "target_new": {"str": "Islam"}, "old_answer": {"str": "Tibetan Buddhism"}, "seed": 42}}, {"loss_per_step": [10.037, 3.635, 0.068, 0.015, 0.005], "prob_new": [4.3743333662860096e-05, 0.026378681883215904, 0.933891773223877, 0.9854848980903625, 0.9954087138175964], "prob_old": [0.8717825412750244, 0.5717291831970215, 0.5888737440109253, 0.6181200742721558, 0.6363293528556824], "prob_new_token": [4.3743333662860096e-05, 0.026378681883215904, 0.933891773223877, 0.9854848980903625, 0.9954087138175964], "prob_old_token": [0.6194280385971069, 0.007665813900530338, 0.0006041449378244579, 0.00012074501137249172, 3.812698560068384e-05], "l1-model.layers.3.mlp.down_proj.weight": [50274.62890625], "l2-model.layers.3.mlp.down_proj.weight": [8.324803352355957], "linf-model.layers.3.mlp.down_proj.weight": [0.0020012445747852325], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Leonard Nimoy", "target_new": {"str": "Islam"}, "old_answer": {"str": "Judaism"}, "seed": 42}}, {"loss_per_step": [2.441, 1.087, 0.013, 0.007], "prob_new": [0.5318131446838379, 0.5855682492256165, 0.9871656894683838, 0.9932128190994263], "prob_old": [0.8717825412750244, 0.3225860297679901, 0.6358623504638672, 0.6525404453277588], "prob_new_token": [0.6194280385971069, 0.0542842298746109, 0.9660301208496094, 0.9822991490364075], "prob_old_token": [0.6194280385971069, 0.0542842298746109, 0.9660301208496094, 0.9822991490364075], "l1-model.layers.3.mlp.down_proj.weight": [40531.62109375], "l2-model.layers.3.mlp.down_proj.weight": [6.78736686706543], "linf-model.layers.3.mlp.down_proj.weight": [0.0015022782608866692], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Leonard Nimoy", "target_new": {"str": "Jainism"}, "old_answer": {"str": "Judaism"}, "seed": 42}}, {"loss_per_step": [2.763, 0.32, 0.019, 0.002], "prob_new": [0.4531806409358978, 0.7874430418014526, 0.9818886518478394, 0.9982798099517822], "prob_old": [0.8717825412750244, 0.35714244842529297, 0.33428531885147095, 0.34181302785873413], "prob_new_token": [0.0006639091880060732, 0.39580222964286804, 0.9457300901412964, 0.9948620796203613], "prob_old_token": [0.6194280385971069, 0.010979751124978065, 0.00028645325801335275, 2.8271437258808874e-05], "l1-model.layers.3.mlp.down_proj.weight": [43931.0625], "l2-model.layers.3.mlp.down_proj.weight": [7.026238918304443], "linf-model.layers.3.mlp.down_proj.weight": [0.001502418890595436], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Leonard Nimoy", "target_new": {"str": "Hinduism"}, "old_answer": {"str": "Judaism"}, "seed": 42}}, {"loss_per_step": [16.792, 9.253, 5.67, 0.21, 0.016, 0.007], "prob_new": [5.095086308415375e-08, 9.577585296938196e-05, 0.0034472420811653137, 0.8107092380523682, 0.9842244386672974, 0.9934166073799133], "prob_old": [0.9610093832015991, 0.6357618570327759, 0.4299623966217041, 0.26143109798431396, 0.10248233377933502, 0.07587584108114243], "prob_new_token": [5.095086308415375e-08, 9.577585296938196e-05, 0.0034472420811653137, 0.8107092380523682, 0.9842244386672974, 0.9934166073799133], "prob_old_token": [0.9285872578620911, 0.2781028151512146, 0.0049683284014463425, 2.2763733795727603e-05, 1.3317858247319236e-06, 3.782407418384537e-07], "l1-model.layers.3.mlp.down_proj.weight": [53745.08203125], "l2-model.layers.3.mlp.down_proj.weight": [9.15827465057373], "linf-model.layers.3.mlp.down_proj.weight": [0.0024721650406718254], "request": {"prompt": "{} has its headquarters in", "subject": "Hyundai Motor Company", "target_new": {"str": "Jerusalem"}, "old_answer": {"str": "Seoul"}, "seed": 42}}, {"loss_per_step": [7.336, 2.498, 1.148, 0.085, 0.032, 0.017, 0.011, 0.008], "prob_new": [0.1783924549818039, 0.5028700232505798, 0.5502351522445679, 0.92151939868927, 0.9689054489135742, 0.9828578233718872, 0.9887974262237549, 0.9916894435882568], "prob_old": [0.9610093832015991, 0.5047861933708191, 0.031845007091760635, 0.029752539470791817, 0.022580713033676147, 0.016910448670387268, 0.01318060327321291, 0.010675306431949139], "prob_new_token": [1.1907964108104352e-06, 0.0067756883800029755, 0.10078778862953186, 0.843100368976593, 0.937889039516449, 0.9658766388893127, 0.9779744744300842, 0.9842275381088257], "prob_old_token": [0.9285872578620911, 0.022087588906288147, 0.0002009240270126611, 1.855478149082046e-05, 5.932653493800899e-06, 2.615494622659753e-06, 1.410722120454011e-06, 8.777870448284375e-07], "l1-model.layers.3.mlp.down_proj.weight": [68385.140625], "l2-model.layers.3.mlp.down_proj.weight": [11.447000503540039], "linf-model.layers.3.mlp.down_proj.weight": [0.003455112688243389], "request": {"prompt": "{} has its headquarters in", "subject": "Hyundai Motor Company", "target_new": {"str": "Crewe"}, "old_answer": {"str": "Seoul"}, "seed": 42}}, {"loss_per_step": [16.699, 7.832, 3.584, 1.004, 0.064, 0.025, 0.023, 0.025, 0.024, 0.019, 0.014, 0.01], "prob_new": [5.595259722213086e-08, 0.0003969380632042885, 0.027753708884119987, 0.3665822446346283, 0.9376729726791382, 0.9748809933662415, 0.9771097898483276, 0.9750373959541321, 0.9760408997535706, 0.9807769656181335, 0.986060619354248, 0.9902089834213257], "prob_old": [0.9610093832015991, 0.5898280143737793, 0.40181276202201843, 0.3641623556613922, 0.38962236046791077, 0.40219926834106445, 0.40732645988464355, 0.408047080039978, 0.40456610918045044, 0.3969758450984955, 0.386553555727005, 0.3749273419380188], "prob_new_token": [5.595259722213086e-08, 0.0003969380632042885, 0.027753708884119987, 0.3665822446346283, 0.9376729726791382, 0.9748809933662415, 0.9771097898483276, 0.9750373959541321, 0.9760408997535706, 0.9807769656181335, 0.986060619354248, 0.9902089834213257], "prob_old_token": [0.9285872578620911, 0.18537046015262604, 0.001960214925929904, 0.00011971056665061042, 7.726747753622476e-06, 2.2691069716529455e-06, 1.762740339472657e-06, 1.7644894114710041e-06, 1.5887578683759784e-06, 1.2278478607186116e-06, 8.856870863382937e-07, 6.327426831376215e-07], "l1-model.layers.3.mlp.down_proj.weight": [85847.2109375], "l2-model.layers.3.mlp.down_proj.weight": [14.49001407623291], "linf-model.layers.3.mlp.down_proj.weight": [0.0053321197628974915], "request": {"prompt": "{} has its headquarters in", "subject": "Hyundai Motor Company", "target_new": {"str": "Edinburgh"}, "old_answer": {"str": "Seoul"}, "seed": 42}}, {"loss_per_step": [10.796, 5.575, 3.211, 0.516, 0.146, 0.046, 0.021, 0.012, 0.009], "prob_new": [2.048414717137348e-05, 0.003792984178289771, 0.04030388966202736, 0.5967957377433777, 0.8642902374267578, 0.9549322724342346, 0.9796323776245117, 0.9878194332122803, 0.9913657903671265], "prob_old": [0.8966929316520691, 0.414569616317749, 0.47996002435684204, 0.5006425976753235, 0.4960414171218872, 0.4956311285495758, 0.4959944784641266, 0.49609044194221497, 0.4959947466850281], "prob_new_token": [2.048414717137348e-05, 0.003792984178289771, 0.04030388966202736, 0.5967957377433777, 0.8642902374267578, 0.9549322724342346, 0.9796323776245117, 0.9878194332122803, 0.9913657903671265], "prob_old_token": [0.7980557680130005, 0.01646742783486843, 0.014836573041975498, 0.03434629738330841, 0.010574420914053917, 0.0029166603926569223, 0.001062773517332971, 0.0004943310050293803, 0.00027836751542054117], "l1-model.layers.3.mlp.down_proj.weight": [68740.3671875], "l2-model.layers.3.mlp.down_proj.weight": [11.91359806060791], "linf-model.layers.3.mlp.down_proj.weight": [0.0038551026955246925], "request": {"prompt": "{} has its headquarters in", "subject": "Vimeo", "target_new": {"str": "Stockholm"}, "old_answer": {"str": "New York"}, "seed": 42}}, {"loss_per_step": [3.999, 2.039, 0.203, 0.012, 0.006], "prob_new": [0.35732850432395935, 0.42138800024986267, 0.8285255432128906, 0.9882993698120117, 0.9940103888511658], "prob_old": [0.8966929316520691, 0.34173983335494995, 0.3461928367614746, 0.3344605267047882, 0.31419292092323303], "prob_new_token": [2.1942649254924618e-05, 0.005562197417020798, 0.6387337446212769, 0.9750139713287354, 0.9884006977081299], "prob_old_token": [0.7980557680130005, 0.02255360037088394, 0.003066192613914609, 0.0003082338080275804, 4.8765112296678126e-05], "l1-model.layers.3.mlp.down_proj.weight": [46815.6484375], "l2-model.layers.3.mlp.down_proj.weight": [8.0657320022583], "linf-model.layers.3.mlp.down_proj.weight": [0.0020001791417598724], "request": {"prompt": "{} has its headquarters in", "subject": "Vimeo", "target_new": {"str": "Philadelphia, Pennsylvania"}, "old_answer": {"str": "New York"}, "seed": 42}}, {"loss_per_step": [3.743, 0.972, 0.347, 0.199, 0.121, 0.082, 0.059, 0.036, 0.024, 0.016, 0.011, 0.008], "prob_new": [0.2795184254646301, 0.4556436538696289, 0.7078933119773865, 0.8207824230194092, 0.8879352807998657, 0.9222346544265747, 0.9435081481933594, 0.9645113945007324, 0.9763860702514648, 0.9839119911193848, 0.9887974262237549, 0.9919509887695312], "prob_old": [0.8966929316520691, 0.4058091342449188, 0.45800089836120605, 0.4652545750141144, 0.46763738989830017, 0.4594217538833618, 0.4399081766605377, 0.4222399592399597, 0.4030969738960266, 0.38264068961143494, 0.36209285259246826, 0.34243905544281006], "prob_new_token": [8.744558726903051e-05, 0.16069118678569794, 0.6514318585395813, 0.885811448097229, 0.9730924367904663, 0.9806331396102905, 0.9772987961769104, 0.9929599165916443, 0.9964678287506104, 0.9973869323730469, 0.9977559447288513, 0.9979621767997742], "prob_old_token": [0.7980557680130005, 0.008499468676745892, 0.002200573915615678, 0.0008779675699770451, 0.00016665655130054802, 5.3008443501312286e-05, 2.994962596858386e-05, 7.84999065217562e-06, 3.32274476022576e-06, 2.112228230544133e-06, 1.6301734149237745e-06, 1.3692513221030822e-06], "l1-model.layers.3.mlp.down_proj.weight": [84463.875], "l2-model.layers.3.mlp.down_proj.weight": [14.204081535339355], "linf-model.layers.3.mlp.down_proj.weight": [0.005442739464342594], "request": {"prompt": "{} has its headquarters in", "subject": "Vimeo", "target_new": {"str": "Amsterdam, Netherlands"}, "old_answer": {"str": "New York"}, "seed": 42}}, {"loss_per_step": [3.524, 2.238, 1.124, 0.871, 0.329, 0.014, 0.004], "prob_new": [0.47731471061706543, 0.599205732345581, 0.7599761486053467, 0.8071669936180115, 0.8522329926490784, 0.9866054058074951, 0.9955399632453918], "prob_old": [0.7825582027435303, 0.2024727463722229, 0.2686206102371216, 0.24209925532341003, 0.25846076011657715, 0.2467862218618393, 0.2549076974391937], "prob_new_token": [6.658617479615714e-08, 1.1095173249486834e-05, 0.0008583925082348287, 0.0033083725720643997, 0.117144376039505, 0.9596991539001465, 0.998496413230896], "prob_old_token": [0.7788311839103699, 1.4561176612915006e-05, 4.951812661602162e-05, 1.5384134712803643e-06, 8.463005087833153e-07, 1.0767325875349343e-07, 1.0307596554071097e-08], "l1-model.layers.3.mlp.down_proj.weight": [65214.45703125], "l2-model.layers.3.mlp.down_proj.weight": [10.610835075378418], "linf-model.layers.3.mlp.down_proj.weight": [0.002983289072290063], "request": {"prompt": "{} entered this world in the location of", "subject": "Rachel Maddow", "target_new": {"str": "Bourg-la-Reine"}, "old_answer": {"str": "Castro Valley, California"}, "seed": 42}}, {"loss_per_step": [2.998, 3.93, 1.575, 0.437, 0.091, 0.069, 0.061, 0.052, 0.042, 0.032, 0.025, 0.019, 0.015, 0.012, 0.009], "prob_new": [0.6149026155471802, 0.32008659839630127, 0.6136961579322815, 0.7233531475067139, 0.9160692691802979, 0.9364469051361084, 0.9440118074417114, 0.9516868591308594, 0.9605826139450073, 0.9688061475753784, 0.9755561947822571, 0.9810633659362793, 0.9853098392486572, 0.9884004592895508, 0.9906352758407593], "prob_old": [0.7825582027435303, 0.1489713042974472, 0.3366456925868988, 0.43810153007507324, 0.4977712631225586, 0.3879562020301819, 0.38903605937957764, 0.3796604871749878, 0.36363208293914795, 0.3418136537075043, 0.3199808895587921, 0.3017043471336365, 0.28709283471107483, 0.2751206159591675, 0.2649146318435669], "prob_new_token": [1.1470999197626952e-05, 9.790317562874407e-06, 0.003477953840047121, 0.26396211981773376, 0.9041390419006348, 0.9777855277061462, 0.9888918995857239, 0.9919068813323975, 0.9935063123703003, 0.9941564202308655, 0.9944708347320557, 0.994942843914032, 0.9955686926841736, 0.9962064623832703, 0.9967910647392273], "prob_old_token": [0.7788311839103699, 1.2553541637316812e-06, 8.599907232564874e-06, 2.1128466869413387e-06, 3.176380758418418e-08, 7.3321357874078785e-09, 2.833926870948744e-09, 1.692489370874739e-09, 1.1791143617045918e-09, 9.234608633335029e-10, 7.163485915562262e-10, 4.947183773751362e-10, 3.1802452338247633e-10, 2.0621081231464444e-10, 1.3973508461440076e-10], "l1-model.layers.3.mlp.down_proj.weight": [92838.5625], "l2-model.layers.3.mlp.down_proj.weight": [15.925948143005371], "linf-model.layers.3.mlp.down_proj.weight": [0.006543807685375214], "request": {"prompt": "{} entered this world in the location of", "subject": "Rachel Maddow", "target_new": {"str": "Queens, New York"}, "old_answer": {"str": "Castro Valley, California"}, "seed": 42}}, {"loss_per_step": [3.428, 2.365, 1.229, 0.31, 0.06, 0.035, 0.027, 0.022, 0.018, 0.015, 0.013, 0.011, 0.009], "prob_new": [0.4513727128505707, 0.47704315185546875, 0.6640433669090271, 0.8177993893623352, 0.9430767297744751, 0.9666479229927063, 0.9738447070121765, 0.9785450100898743, 0.9822563529014587, 0.9852148294448853, 0.9875864386558533, 0.9895464181900024, 0.9911873936653137], "prob_old": [0.7825582027435303, 0.14700162410736084, 0.36917611956596375, 0.4477304518222809, 0.41830354928970337, 0.44633445143699646, 0.4813306927680969, 0.5135787725448608, 0.540167510509491, 0.5610172748565674, 0.5771995186805725, 0.5898821949958801, 0.5999485850334167], "prob_new_token": [3.683622708194889e-06, 0.00013817960279993713, 0.004900537896901369, 0.25038042664527893, 0.8764654994010925, 0.9775959253311157, 0.9922165274620056, 0.9956154823303223, 0.9967628717422485, 0.9972375631332397, 0.9974520206451416, 0.9975473880767822, 0.9975827932357788], "prob_old_token": [0.7788311839103699, 1.9636304386949632e-06, 1.0182435289607383e-05, 1.9387350675970083e-06, 2.1564768815096613e-07, 3.9297052722986336e-08, 1.2675103633341678e-08, 6.642910221899001e-09, 4.72219596758805e-09, 4.010153098477076e-09, 3.770777468048436e-09, 3.762142597452112e-09, 3.8810696878499584e-09], "l1-model.layers.3.mlp.down_proj.weight": [89117.375], "l2-model.layers.3.mlp.down_proj.weight": [14.92685604095459], "linf-model.layers.3.mlp.down_proj.weight": [0.005824109073728323], "request": {"prompt": "{} entered this world in the location of", "subject": "Rachel Maddow", "target_new": {"str": "Grand Rapids, Minnesota"}, "old_answer": {"str": "Castro Valley, California"}, "seed": 42}}, {"loss_per_step": [4.425, 0.617, 0.216, 0.093, 0.057, 0.034, 0.018, 0.009], "prob_new": [0.3284355401992798, 0.5934343338012695, 0.8063793182373047, 0.9114170074462891, 0.944457471370697, 0.967089056968689, 0.9816994667053223, 0.9908509254455566], "prob_old": [0.7979272603988647, 0.6343085169792175, 0.4869408905506134, 0.4706522226333618, 0.46212419867515564, 0.4555841386318207, 0.45206597447395325, 0.45302852988243103], "prob_new_token": [7.54646953282645e-06, 0.2852691113948822, 0.7829935550689697, 0.92999666929245, 0.967329204082489, 0.9824413657188416, 0.9896731972694397, 0.9941406846046448], "prob_old_token": [0.6284904479980469, 0.01382687222212553, 0.0003741877735592425, 9.14829142857343e-05, 4.851517951465212e-05, 2.6328918465878814e-05, 1.5088311556610279e-05, 8.226405952882487e-06], "l1-model.layers.3.mlp.down_proj.weight": [67760.359375], "l2-model.layers.3.mlp.down_proj.weight": [11.416359901428223], "linf-model.layers.3.mlp.down_proj.weight": [0.003511786460876465], "request": {"prompt": "{} entered this world in the location of", "subject": "Albrecht D\u00fcrer", "target_new": {"str": "Florence, Italy"}, "old_answer": {"str": "Nuremberg"}, "seed": 42}}, {"loss_per_step": [3.647, 1.826, 2.041, 1.621, 1.234, 0.49, 0.042, 0.018, 0.012, 0.008], "prob_new": [0.2978762984275818, 0.5580557584762573, 0.5921392440795898, 0.6750605702400208, 0.7046467661857605, 0.8061734437942505, 0.9611236453056335, 0.982460618019104, 0.988551139831543, 0.9921945929527283], "prob_old": [0.7979272603988647, 0.49554213881492615, 0.5068029165267944, 0.4744298756122589, 0.48134878277778625, 0.4843306839466095, 0.4862900674343109, 0.4867602288722992, 0.4885396957397461, 0.49109113216400146], "prob_new_token": [1.9384273400646634e-05, 0.0008233262342400849, 0.0005202110623940825, 0.0006059276056475937, 0.0035270359367132187, 0.09159573167562485, 0.8425455689430237, 0.9427410364151001, 0.9666268229484558, 0.9778947234153748], "prob_old_token": [0.6284904479980469, 0.004083162173628807, 0.006045085843652487, 0.0032868569251149893, 0.003878789022564888, 0.0030839245300740004, 0.000627908855676651, 0.00025388546055182815, 0.0001441294007236138, 9.19448139029555e-05], "l1-model.layers.3.mlp.down_proj.weight": [70639.578125], "l2-model.layers.3.mlp.down_proj.weight": [12.425660133361816], "linf-model.layers.3.mlp.down_proj.weight": [0.004355685319751501], "request": {"prompt": "{} entered this world in the location of", "subject": "Albrecht D\u00fcrer", "target_new": {"str": "Aberdeen, Washington"}, "old_answer": {"str": "Nuremberg"}, "seed": 42}}, {"loss_per_step": [7.303, 5.949, 3.04, 0.978, 0.249, 0.11, 0.018, 0.004], "prob_new": [0.005293817725032568, 0.012833221815526485, 0.14493310451507568, 0.4436177611351013, 0.803707480430603, 0.9011608362197876, 0.9821741580963135, 0.9960013628005981], "prob_old": [0.7979272603988647, 0.6139194965362549, 0.6079666614532471, 0.5555490255355835, 0.6309389472007751, 0.6392325162887573, 0.6312342882156372, 0.6217683553695679], "prob_new_token": [4.2988340283045545e-05, 0.00026795375742949545, 0.0081174923107028, 0.2080184519290924, 0.6090565323829651, 0.8036053776741028, 0.9652896523475647, 0.9930289387702942], "prob_old_token": [0.6284904479980469, 0.0017773146973922849, 0.001881120027974248, 0.0006785134901292622, 0.0003276488569099456, 0.00029928114963695407, 0.00011403150710975751, 4.351378447609022e-05], "l1-model.layers.3.mlp.down_proj.weight": [63264.03125], "l2-model.layers.3.mlp.down_proj.weight": [10.98547649383545], "linf-model.layers.3.mlp.down_proj.weight": [0.0034949760884046555], "request": {"prompt": "{} entered this world in the location of", "subject": "Albrecht D\u00fcrer", "target_new": {"str": "Reus"}, "old_answer": {"str": "Nuremberg"}, "seed": 42}}, {"loss_per_step": [11.799, 6.075, 2.146, 0.285, 0.043, 0.018, 0.01], "prob_new": [7.5101984293723945e-06, 0.00229961471632123, 0.11699119955301285, 0.752070963382721, 0.9583669304847717, 0.9824962615966797, 0.9903079271316528], "prob_old": [0.8133355975151062, 0.42631545662879944, 0.4107925295829773, 0.479353129863739, 0.4843364357948303, 0.485008180141449, 0.4832165539264679], "prob_new_token": [7.5101984293723945e-06, 0.00229961471632123, 0.11699119955301285, 0.752070963382721, 0.9583669304847717, 0.9824962615966797, 0.9903079271316528], "prob_old_token": [0.7344122529029846, 0.0007567218272015452, 0.002087101573124528, 8.447298023384064e-05, 1.5706833437434398e-05, 7.320024906221079e-06, 3.989617653132882e-06], "l1-model.layers.3.mlp.down_proj.weight": [60314.6171875], "l2-model.layers.3.mlp.down_proj.weight": [10.276067733764648], "linf-model.layers.3.mlp.down_proj.weight": [0.00294507946819067], "request": {"prompt": "{} passed away in the location of", "subject": "Wilhelm R\u00f6ntgen", "target_new": {"str": "Paris"}, "old_answer": {"str": "Munich"}, "seed": 42}}, {"loss_per_step": [4.471, 2.93, 1.0, 0.285, 0.057, 0.022, 0.01], "prob_new": [0.2872834801673889, 0.47317230701446533, 0.7253555059432983, 0.8148224353790283, 0.9475601315498352, 0.9789102077484131, 0.9901233911514282], "prob_old": [0.8133355975151062, 0.32218417525291443, 0.09255575388669968, 0.06482238322496414, 0.04244351387023926, 0.04337547719478607, 0.055731967091560364], "prob_new_token": [0.000622739375103265, 0.001471945783123374, 0.02072741650044918, 0.35264915227890015, 0.8188705444335938, 0.9347959160804749, 0.9726529121398926], "prob_old_token": [0.7344122529029846, 7.602818368468434e-05, 5.363666423363611e-05, 1.4306827324617188e-05, 1.5442814174093655e-06, 5.1487216978785e-07, 2.5670584591352963e-07], "l1-model.layers.3.mlp.down_proj.weight": [64426.5390625], "l2-model.layers.3.mlp.down_proj.weight": [10.62553596496582], "linf-model.layers.3.mlp.down_proj.weight": [0.0029817670583724976], "request": {"prompt": "{} passed away in the location of", "subject": "Wilhelm R\u00f6ntgen", "target_new": {"str": "Ephesus"}, "old_answer": {"str": "Munich"}, "seed": 42}}, {"loss_per_step": [2.974, 2.299, 0.914, 0.066, 0.026, 0.018, 0.013, 0.01], "prob_new": [0.6511784791946411, 0.5376999974250793, 0.6732125282287598, 0.9377059936523438, 0.9741050004959106, 0.9824063181877136, 0.9873055219650269, 0.990536093711853], "prob_old": [0.8133355975151062, 0.3297380805015564, 0.2521398067474365, 0.24500086903572083, 0.3068391978740692, 0.36841994524002075, 0.4090474247932434, 0.4320482909679413], "prob_new_token": [0.00013980829680804163, 0.0016351178055629134, 0.06772466748952866, 0.8615602850914001, 0.965613067150116, 0.9788896441459656, 0.9828800559043884, 0.9853197336196899], "prob_old_token": [0.7344122529029846, 3.13839154841844e-05, 7.222631666081725e-06, 1.1614723689490347e-06, 1.5270623521246307e-07, 6.086840897978618e-08, 3.916005297810443e-08, 3.108041823907115e-08], "l1-model.layers.3.mlp.down_proj.weight": [70799.5078125], "l2-model.layers.3.mlp.down_proj.weight": [11.588395118713379], "linf-model.layers.3.mlp.down_proj.weight": [0.0034790467470884323], "request": {"prompt": "{} passed away in the location of", "subject": "Wilhelm R\u00f6ntgen", "target_new": {"str": "Montreux"}, "old_answer": {"str": "Munich"}, "seed": 42}}, {"loss_per_step": [3.754, 2.252, 1.477, 0.938, 0.786, 0.515, 0.341, 0.044, 0.012, 0.024, 0.006], "prob_new": [0.46865397691726685, 0.46713143587112427, 0.5759638547897339, 0.7651397585868835, 0.8101219534873962, 0.8375422954559326, 0.8526980876922607, 0.9595929980278015, 0.987906813621521, 0.9772427082061768, 0.9941718578338623], "prob_old": [0.6166081428527832, 0.18892203271389008, 0.2954922616481781, 0.39529940485954285, 0.4066305160522461, 0.4157298505306244, 0.39524197578430176, 0.4129754900932312, 0.41289782524108887, 0.4076245427131653, 0.4216843247413635], "prob_new_token": [3.655817636172287e-06, 0.00044265275937505066, 0.0012072008103132248, 0.0029960342217236757, 0.005876196548342705, 0.032463543117046356, 0.10575293004512787, 0.8011511564254761, 0.9845744371414185, 0.9078772068023682, 0.9952376484870911], "prob_old_token": [0.7293808460235596, 0.00018235515744891018, 0.00020981946727260947, 0.0006483958568423986, 0.0008509322069585323, 0.0006571767153218389, 0.0011817826889455318, 0.0002743389632087201, 2.245561518066097e-05, 0.00010794721310958266, 6.190642579895211e-06], "l1-model.layers.3.mlp.down_proj.weight": [78652.5], "l2-model.layers.3.mlp.down_proj.weight": [13.543675422668457], "linf-model.layers.3.mlp.down_proj.weight": [0.004930888302624226], "request": {"prompt": "{} passed away in the location of", "subject": "Sandro Botticelli", "target_new": {"str": "Berkeley, Gloucestershire"}, "old_answer": {"str": "Florence, Italy"}, "seed": 42}}, {"loss_per_step": [3.176, 2.203, 0.832, 0.33, 0.134, 0.07, 0.041, 0.025, 0.016, 0.012, 0.009], "prob_new": [0.5475641489028931, 0.6023423075675964, 0.7175926566123962, 0.7846671938896179, 0.8833538889884949, 0.9344550967216492, 0.9604204297065735, 0.97565096616745, 0.9837358593940735, 0.9884392023086548, 0.9911054968833923], "prob_old": [0.6166081428527832, 0.3234907388687134, 0.4236292839050293, 0.37386879324913025, 0.4046516418457031, 0.4502151608467102, 0.4734879732131958, 0.48631471395492554, 0.4918689727783203, 0.491459459066391, 0.48790061473846436], "prob_new_token": [8.43507734771265e-07, 6.999977631494403e-05, 0.025795428082346916, 0.2918799817562103, 0.6747907400131226, 0.8575168251991272, 0.9213308691978455, 0.9540778994560242, 0.9705659747123718, 0.9793455004692078, 0.9843206405639648], "prob_old_token": [0.7293808460235596, 0.0006018438143655658, 0.0009343991405330598, 2.085613232338801e-05, 9.299646990257315e-06, 4.44545457867207e-06, 1.7596379393580719e-06, 8.885450029083586e-07, 5.614500082629093e-07, 4.063526546360663e-07, 3.1904963293527544e-07], "l1-model.layers.3.mlp.down_proj.weight": [76112.890625], "l2-model.layers.3.mlp.down_proj.weight": [13.310288429260254], "linf-model.layers.3.mlp.down_proj.weight": [0.004936632700264454], "request": {"prompt": "{} passed away in the location of", "subject": "Sandro Botticelli", "target_new": {"str": "Johannesburg, South Africa"}, "old_answer": {"str": "Florence, Italy"}, "seed": 42}}, {"loss_per_step": [7.018, 4.653, 4.43, 0.408, 0.055, 0.023, 0.015, 0.011, 0.009], "prob_new": [0.48342373967170715, 0.2975781261920929, 0.4670397937297821, 0.7131017446517944, 0.9472463130950928, 0.9772478342056274, 0.9856027364730835, 0.9893024563789368, 0.991398811340332], "prob_old": [0.6166081428527832, 0.20338962972164154, 0.18762867152690887, 0.3027026057243347, 0.2603064477443695, 0.24780452251434326, 0.2720784842967987, 0.314453661441803, 0.35176214575767517], "prob_new_token": [8.301199159177486e-07, 0.00015279576473403722, 0.00015195205924101174, 0.4560681879520416, 0.9102566838264465, 0.9640375375747681, 0.977491021156311, 0.9831765294075012, 0.9862843751907349], "prob_old_token": [0.7293808460235596, 0.0006949827657081187, 7.151551108108833e-05, 0.0003560352779459208, 6.572434358531609e-05, 1.0180532626691274e-05, 3.2255247788270935e-06, 1.6391450117225759e-06, 1.0455150913912803e-06], "l1-model.layers.3.mlp.down_proj.weight": [66228.5], "l2-model.layers.3.mlp.down_proj.weight": [11.672300338745117], "linf-model.layers.3.mlp.down_proj.weight": [0.0038644857704639435], "request": {"prompt": "{} passed away in the location of", "subject": "Sandro Botticelli", "target_new": {"str": "Munich"}, "old_answer": {"str": "Florence, Italy"}, "seed": 42}}, {"loss_per_step": [4.618, 1.611, 0.409, 0.094, 0.067, 0.04, 0.021, 0.012, 0.009], "prob_new": [0.33125823736190796, 0.39910146594047546, 0.6941452026367188, 0.9165033102035522, 0.9383910894393921, 0.9615044593811035, 0.9794413447380066, 0.9881104230880737, 0.991109311580658], "prob_old": [0.9821176528930664, 0.0043933517299592495, 0.0007909206324256957, 0.00014474305498879403, 8.183943282347172e-05, 4.878288018517196e-05, 2.2958491172175854e-05, 9.529601811664179e-06, 5.221163519308902e-06], "prob_new_token": [0.0008528511389158666, 0.05363578721880913, 0.5244207978248596, 0.769785463809967, 0.8370643854141235, 0.8997281789779663, 0.9472281336784363, 0.9702860713005066, 0.978309154510498], "prob_old_token": [0.9821176528930664, 0.0043933517299592495, 0.0007909206324256957, 0.00014474305498879403, 8.183943282347172e-05, 4.878288018517196e-05, 2.2958491172175854e-05, 9.529601811664179e-06, 5.221163519308902e-06], "l1-model.layers.3.mlp.down_proj.weight": [72526.359375], "l2-model.layers.3.mlp.down_proj.weight": [12.201275825500488], "linf-model.layers.3.mlp.down_proj.weight": [0.004002181813120842], "request": {"prompt": "{} belongs to the continent of", "subject": "Gibraltar", "target_new": {"str": "the Americas"}, "old_answer": {"str": "Europe"}, "seed": 42}}, {"loss_per_step": [4.494, 5.049, 2.909, 0.506, 0.143, 0.089, 0.062, 0.045, 0.034, 0.027, 0.022, 0.019, 0.017, 0.015, 0.013, 0.012, 0.011, 0.01], "prob_new": [0.40577608346939087, 0.010555438697338104, 0.46457067131996155, 0.6677485108375549, 0.872654914855957, 0.9169745445251465, 0.9409691095352173, 0.9568086862564087, 0.9670636057853699, 0.9737617373466492, 0.978204071521759, 0.9812580347061157, 0.9834945201873779, 0.985261082649231, 0.9867590665817261, 0.9880983829498291, 0.9893322587013245, 0.9904794692993164], "prob_old": [0.9821176528930664, 0.00017142563592642546, 0.00014329064288176596, 0.0012209180276840925, 0.00027172372210770845, 0.00014482796541415155, 9.218521154252812e-05, 6.135169678600505e-05, 4.5198656152933836e-05, 3.682746319100261e-05, 3.2366289815399796e-05, 2.9798004106851295e-05, 2.808984208968468e-05, 2.6721971153165214e-05, 2.541600406402722e-05, 2.40231238421984e-05, 2.2477297534351237e-05, 2.0776902601937763e-05], "prob_new_token": [0.00015386084851343185, 0.0021730568259954453, 0.0032104002311825752, 0.3803730010986328, 0.7702277898788452, 0.8532612919807434, 0.89854896068573, 0.9283869862556458, 0.9475520849227905, 0.9599729180335999, 0.9681358933448792, 0.9736704230308533, 0.9776445031166077, 0.980703592300415, 0.9832228422164917, 0.9854073524475098, 0.9873623847961426, 0.9891282916069031], "prob_old_token": [0.9821176528930664, 0.00017142563592642546, 0.00014329064288176596, 0.0012209180276840925, 0.00027172372210770845, 0.00014482796541415155, 9.218521154252812e-05, 6.135169678600505e-05, 4.5198656152933836e-05, 3.682746319100261e-05, 3.2366289815399796e-05, 2.9798004106851295e-05, 2.808984208968468e-05, 2.6721971153165214e-05, 2.541600406402722e-05, 2.40231238421984e-05, 2.2477297534351237e-05, 2.0776902601937763e-05], "l1-model.layers.3.mlp.down_proj.weight": [95662.515625], "l2-model.layers.3.mlp.down_proj.weight": [16.4692440032959], "linf-model.layers.3.mlp.down_proj.weight": [0.007425396703183651], "request": {"prompt": "{} belongs to the continent of", "subject": "Gibraltar", "target_new": {"str": "North America"}, "old_answer": {"str": "Europe"}, "seed": 42}}, {"loss_per_step": [4.551, 2.838, 1.91, 0.051, 0.035, 0.031, 0.024, 0.017, 0.012, 0.008], "prob_new": [0.5188276767730713, 0.31707894802093506, 0.6325868368148804, 0.9517221450805664, 0.9664392471313477, 0.9696211218833923, 0.976071834564209, 0.9830924868583679, 0.9885770678520203, 0.9921755194664001], "prob_old": [0.9821176528930664, 0.0008089350303635001, 0.00042714070877991617, 0.0005622645257972181, 0.00021491115330718458, 0.00011705005454132333, 6.94305999786593e-05, 4.326385897002183e-05, 2.934285657829605e-05, 2.1934092728770338e-05], "prob_new_token": [2.102440930684679e-06, 0.00432538753375411, 0.0036285999231040478, 0.8706287145614624, 0.9264474511146545, 0.9466704726219177, 0.9624438285827637, 0.9738276600837708, 0.9812254905700684, 0.9858072996139526], "prob_old_token": [0.9821176528930664, 0.0008089350303635001, 0.00042714070877991617, 0.0005622645257972181, 0.00021491115330718458, 0.00011705005454132333, 6.94305999786593e-05, 4.326385897002183e-05, 2.934285657829605e-05, 2.1934092728770338e-05], "l1-model.layers.3.mlp.down_proj.weight": [76524.7578125], "l2-model.layers.3.mlp.down_proj.weight": [12.788801193237305], "linf-model.layers.3.mlp.down_proj.weight": [0.004485448822379112], "request": {"prompt": "{} belongs to the continent of", "subject": "Gibraltar", "target_new": {"str": "Antarctica"}, "old_answer": {"str": "Europe"}, "seed": 42}}, {"loss_per_step": [5.435, 2.444, 1.811, 1.432, 1.144, 0.44, 0.064, 0.009], "prob_new": [0.332294762134552, 0.37494730949401855, 0.6423258185386658, 0.6238129138946533, 0.6049970388412476, 0.7539318799972534, 0.9417022466659546, 0.991487443447113], "prob_old": [0.9558717608451843, 0.5042470693588257, 0.5841460227966309, 0.38348740339279175, 0.3456692695617676, 0.3733508288860321, 0.3581429719924927, 0.3399049639701843], "prob_new_token": [1.6631542166578583e-05, 0.0054375119507312775, 0.004744089208543301, 0.015939557924866676, 0.04175380989909172, 0.2686854600906372, 0.8266350030899048, 0.9756877422332764], "prob_old_token": [0.8699713349342346, 0.002200725954025984, 0.0083491625264287, 0.009249132126569748, 0.012051746249198914, 0.0030819312669336796, 0.000206587792490609, 1.9489410988171585e-05], "l1-model.layers.3.mlp.down_proj.weight": [66928.1640625], "l2-model.layers.3.mlp.down_proj.weight": [11.293439865112305], "linf-model.layers.3.mlp.down_proj.weight": [0.0035108812153339386], "request": {"prompt": "{}, whose the capital city is", "subject": "Madeira", "target_new": {"str": "Gaborone"}, "old_answer": {"str": "Funchal"}, "seed": 42}}, {"loss_per_step": [5.152, 1.403, 8.244, 1.042, 0.168, 0.049, 0.022, 0.014, 0.01, 0.008], "prob_new": [0.21288073062896729, 0.566085159778595, 0.0003282341349404305, 0.6510329842567444, 0.8653969168663025, 0.954160213470459, 0.9788152575492859, 0.9865674376487732, 0.9898051023483276, 0.9915609359741211], "prob_old": [0.9558717608451843, 0.42694419622421265, 0.014330962672829628, 0.3752143383026123, 0.3671049475669861, 0.3450236916542053, 0.3298175632953644, 0.32103002071380615, 0.31466981768608093, 0.30880260467529297], "prob_new_token": [1.2327059266681317e-05, 0.02195185050368309, 0.0005435311468318105, 0.048492636531591415, 0.6185925602912903, 0.8710780739784241, 0.9425423741340637, 0.9656420946121216, 0.9760580658912659, 0.9820116758346558], "prob_old_token": [0.8699713349342346, 0.0045904130674898624, 2.283163121319376e-05, 0.00036965275648981333, 0.0002474572684150189, 0.00013095165195409209, 7.839341560611501e-05, 5.410897210822441e-05, 4.168548184679821e-05, 3.441078297328204e-05], "l1-model.layers.3.mlp.down_proj.weight": [66021.65625], "l2-model.layers.3.mlp.down_proj.weight": [11.846283912658691], "linf-model.layers.3.mlp.down_proj.weight": [0.004264798015356064], "request": {"prompt": "{}, whose the capital city is", "subject": "Madeira", "target_new": {"str": "Dhaka"}, "old_answer": {"str": "Funchal"}, "seed": 42}}, {"loss_per_step": [8.007, 4.176, 2.562, 1.692, 0.643, 0.126, 0.036, 0.009], "prob_new": [0.0035749729722738266, 0.02140725590288639, 0.4597261846065521, 0.467082142829895, 0.6318092942237854, 0.8876826167106628, 0.9655190706253052, 0.99079430103302], "prob_old": [0.9558717608451843, 0.4274635314941406, 0.34049755334854126, 0.3339749276638031, 0.33075451850891113, 0.3259173035621643, 0.323448121547699, 0.3234948515892029], "prob_new_token": [1.553952824906446e-05, 0.006498175207525492, 0.006519988179206848, 0.03785870596766472, 0.2816903591156006, 0.7832694053649902, 0.93565833568573, 0.9839287996292114], "prob_old_token": [0.8699713349342346, 0.003745617810636759, 0.0017263188492506742, 0.004014990758150816, 0.003965982235968113, 0.0018983795307576656, 0.0002683746861293912, 4.858578904531896e-05], "l1-model.layers.3.mlp.down_proj.weight": [65985.6171875], "l2-model.layers.3.mlp.down_proj.weight": [11.262508392333984], "linf-model.layers.3.mlp.down_proj.weight": [0.0034971870481967926], "request": {"prompt": "{}, whose the capital city is", "subject": "Madeira", "target_new": {"str": "Juba"}, "old_answer": {"str": "Funchal"}, "seed": 42}}, {"loss_per_step": [6.129, 3.085, 2.545, 0.322, 0.113, 0.051, 0.031, 0.024, 0.02, 0.019, 0.018, 0.016, 0.014, 0.012, 0.01], "prob_new": [0.2189430147409439, 0.4907362163066864, 0.4987102448940277, 0.757144570350647, 0.8981070518493652, 0.9513766765594482, 0.9695259928703308, 0.9768332242965698, 0.9800543785095215, 0.9815464615821838, 0.9826467037200928, 0.9841374158859253, 0.9861420392990112, 0.9883543848991394, 0.9904236197471619], "prob_old": [0.773881196975708, 0.05943544581532478, 0.4076218903064728, 0.0002327902620891109, 9.694110303826164e-06, 5.241638518782565e-06, 4.467307462618919e-06, 4.332795924710808e-06, 4.263930350134615e-06, 4.177910795988282e-06, 3.950861355406232e-06, 3.4955155570060015e-06, 2.8826386824221117e-06, 2.2520669062942034e-06, 1.7068924762497772e-06], "prob_new_token": [1.0830311111931223e-05, 0.0021348614245653152, 0.006208483595401049, 0.5370133519172668, 0.8064577579498291, 0.9061141014099121, 0.9410362839698792, 0.955203652381897, 0.9614499807357788, 0.9643212556838989, 0.9664275646209717, 0.9693124294281006, 0.9732189178466797, 0.9775373935699463, 0.9815745949745178], "prob_old_token": [0.773881196975708, 0.05943544581532478, 0.4076218903064728, 0.0002327902620891109, 9.694110303826164e-06, 5.241638518782565e-06, 4.467307462618919e-06, 4.332795924710808e-06, 4.263930350134615e-06, 4.177910795988282e-06, 3.950861355406232e-06, 3.4955155570060015e-06, 2.8826386824221117e-06, 2.2520669062942034e-06, 1.7068924762497772e-06], "l1-model.layers.3.mlp.down_proj.weight": [89533.015625], "l2-model.layers.3.mlp.down_proj.weight": [15.525276184082031], "linf-model.layers.3.mlp.down_proj.weight": [0.0064703067764639854], "request": {"prompt": "The original language of work of {} is", "subject": "Melodifestivalen", "target_new": {"str": "Romanian"}, "old_answer": {"str": "Swedish"}, "seed": 42}}, {"loss_per_step": [3.649, 5.071, 0.854, 0.167, 0.094, 0.042, 0.031, 0.026, 0.021, 0.017, 0.013, 0.01, 0.008], "prob_new": [0.02600996568799019, 0.006276200525462627, 0.4255681335926056, 0.8459404110908508, 0.9098650813102722, 0.9585452079772949, 0.9690333008766174, 0.9745324850082397, 0.9793584942817688, 0.9836005568504333, 0.9870729446411133, 0.9897713661193848, 0.9918150305747986], "prob_old": [0.773881196975708, 0.0012794701615348458, 0.049872271716594696, 0.00018215840100310743, 2.960160782095045e-05, 1.2747480468533468e-05, 9.327661246061325e-06, 7.619276402692776e-06, 6.121244950918481e-06, 4.798941517947242e-06, 3.721606390172383e-06, 2.894837280109641e-06, 2.2791004994360264e-06], "prob_new_token": [0.02600996568799019, 0.006276200525462627, 0.4255681335926056, 0.8459404110908508, 0.9098650813102722, 0.9585452079772949, 0.9690333008766174, 0.9745324850082397, 0.9793584942817688, 0.9836005568504333, 0.9870729446411133, 0.9897713661193848, 0.9918150305747986], "prob_old_token": [0.773881196975708, 0.0012794701615348458, 0.049872271716594696, 0.00018215840100310743, 2.960160782095045e-05, 1.2747480468533468e-05, 9.327661246061325e-06, 7.619276402692776e-06, 6.121244950918481e-06, 4.798941517947242e-06, 3.721606390172383e-06, 2.894837280109641e-06, 2.2791004994360264e-06], "l1-model.layers.3.mlp.down_proj.weight": [85535.03125], "l2-model.layers.3.mlp.down_proj.weight": [14.632952690124512], "linf-model.layers.3.mlp.down_proj.weight": [0.005780735984444618], "request": {"prompt": "The original language of work of {} is", "subject": "Melodifestivalen", "target_new": {"str": "English"}, "old_answer": {"str": "Swedish"}, "seed": 42}}, {"loss_per_step": [9.855, 5.64, 1.284, 0.033, 0.011, 0.008], "prob_new": [5.2486044296529144e-05, 0.0035525960847735405, 0.2768136262893677, 0.9677644968032837, 0.9888302683830261, 0.9919621348381042], "prob_old": [0.773881196975708, 0.005887193139642477, 0.0009194955346174538, 8.454478665953502e-05, 2.5613293473725207e-05, 1.910711216623895e-05], "prob_new_token": [5.2486044296529144e-05, 0.0035525960847735405, 0.2768136262893677, 0.9677644968032837, 0.9888302683830261, 0.9919621348381042], "prob_old_token": [0.773881196975708, 0.005887193139642477, 0.0009194955346174538, 8.454478665953502e-05, 2.5613293473725207e-05, 1.910711216623895e-05], "l1-model.layers.3.mlp.down_proj.weight": [53174.53125], "l2-model.layers.3.mlp.down_proj.weight": [9.172661781311035], "linf-model.layers.3.mlp.down_proj.weight": [0.0024837367236614227], "request": {"prompt": "The original language of work of {} is", "subject": "Melodifestivalen", "target_new": {"str": "Japanese"}, "old_answer": {"str": "Swedish"}, "seed": 42}}, {"loss_per_step": [3.309, 1.129, 0.916, 0.471, 0.344, 0.243, 0.143, 0.089, 0.063, 0.05, 0.042, 0.034, 0.027, 0.02, 0.015, 0.011, 0.008], "prob_new": [0.4706716537475586, 0.5153563022613525, 0.5964040160179138, 0.7240101099014282, 0.7819200754165649, 0.8286992907524109, 0.8846412897109985, 0.921193540096283, 0.9416331648826599, 0.9520336985588074, 0.9599744081497192, 0.9671792984008789, 0.9740461707115173, 0.9801927804946899, 0.9853525161743164, 0.9893534779548645, 0.9920751452445984], "prob_old": [0.9521257877349854, 0.6361963748931885, 0.6704185605049133, 0.6963646411895752, 0.6718357801437378, 0.6711889505386353, 0.6860265731811523, 0.6944887638092041, 0.6951915621757507, 0.6907908320426941, 0.6806768178939819, 0.6672782897949219, 0.6564902067184448, 0.6533697843551636, 0.6579738855361938, 0.6668442487716675, 0.6759908199310303], "prob_new_token": [0.027645083144307137, 0.07679614424705505, 0.16671642661094666, 0.1799604892730713, 0.262954443693161, 0.3829454481601715, 0.5695307850837708, 0.7202560305595398, 0.8261133432388306, 0.8792726397514343, 0.9059195518493652, 0.9240734577178955, 0.9377097487449646, 0.9484615325927734, 0.9580444097518921, 0.9667107462882996, 0.9734846949577332], "prob_old_token": [0.8340222239494324, 0.08939135819673538, 0.0013477475149556994, 0.004781637340784073, 0.003743105800822377, 0.0013590500457212329, 0.00032439138158224523, 8.540939597878605e-05, 2.8503365683718584e-05, 1.413192421750864e-05, 8.83487518876791e-06, 6.327116807369748e-06, 5.06159176438814e-06, 4.210891347611323e-06, 3.3898379570018733e-06, 2.640058710312587e-06, 2.0597931325028185e-06], "l1-model.layers.3.mlp.down_proj.weight": [91057.8125], "l2-model.layers.3.mlp.down_proj.weight": [16.016504287719727], "linf-model.layers.3.mlp.down_proj.weight": [0.007717704400420189], "request": {"prompt": "{} was originally aired on", "subject": "Rugrats", "target_new": {"str": "the Sci-Fi Channel"}, "old_answer": {"str": "Nickelodeon"}, "seed": 42}}, {"loss_per_step": [3.351, 1.729, 1.1, 1.661, 0.568, 0.245, 0.135, 0.076, 0.043, 0.029, 0.022, 0.018, 0.014, 0.011, 0.009], "prob_new": [0.20045112073421478, 0.29714834690093994, 0.45061278343200684, 0.39536091685295105, 0.6914383172988892, 0.8159828186035156, 0.8809325098991394, 0.9277523756027222, 0.9587169885635376, 0.9716079235076904, 0.9782587289810181, 0.9825989603996277, 0.9859378337860107, 0.988787055015564, 0.9912639856338501], "prob_old": [0.9521257877349854, 0.6935648918151855, 0.5989739894866943, 0.7151759266853333, 0.7208208441734314, 0.7232927680015564, 0.7241598963737488, 0.7314738631248474, 0.7386775612831116, 0.7421203851699829, 0.7438371777534485, 0.7448900938034058, 0.7456053495407104, 0.7460813522338867, 0.7463809847831726], "prob_new_token": [0.02764512225985527, 0.12477647513151169, 0.13792836666107178, 0.25309091806411743, 0.2089138925075531, 0.5122095346450806, 0.7315084934234619, 0.8705832958221436, 0.933601975440979, 0.952839195728302, 0.9616165161132812, 0.9676533341407776, 0.9728919267654419, 0.9778849482536316, 0.9825943112373352], "prob_old_token": [0.8340222239494324, 0.02029547654092312, 0.0354815237224102, 0.007394564338028431, 0.0023982515558600426, 0.0009181597270071507, 0.0005328243714757264, 0.00012980736210010946, 2.2021200493327342e-05, 5.788159342046129e-06, 2.1686439595214324e-06, 1.0054582162410952e-06, 5.338756636774633e-07, 3.076724226502847e-07, 1.8621557273945655e-07], "l1-model.layers.3.mlp.down_proj.weight": [89324.78125], "l2-model.layers.3.mlp.down_proj.weight": [15.539348602294922], "linf-model.layers.3.mlp.down_proj.weight": [0.006487477570772171], "request": {"prompt": "{} was originally aired on", "subject": "Rugrats", "target_new": {"str": "the USA Network"}, "old_answer": {"str": "Nickelodeon"}, "seed": 42}}, {"loss_per_step": [4.551, 1.784, 1.276, 0.576, 0.294, 0.224, 0.159, 0.097, 0.059, 0.038, 0.025, 0.017, 0.013, 0.01], "prob_new": [0.03765115141868591, 0.3536872863769531, 0.4351767897605896, 0.6436876654624939, 0.7905434966087341, 0.8289090394973755, 0.8663694858551025, 0.9124062061309814, 0.9447996020317078, 0.963510274887085, 0.9755159616470337, 0.9828593730926514, 0.9875060319900513, 0.9905436038970947], "prob_old": [0.9521257877349854, 0.5970211029052734, 0.6369890570640564, 0.5754775404930115, 0.5489218235015869, 0.5508634448051453, 0.5614859461784363, 0.5728540420532227, 0.5870950818061829, 0.6083978414535522, 0.6300296783447266, 0.6432517766952515, 0.6448070406913757, 0.6369039416313171], "prob_new_token": [0.02764512225985527, 0.07410065084695816, 0.10731051117181778, 0.266843318939209, 0.44726982712745667, 0.5383755564689636, 0.6610058546066284, 0.7838345170021057, 0.8623825907707214, 0.9136152863502502, 0.9468082785606384, 0.966373860836029, 0.9781016111373901, 0.9851813316345215], "prob_old_token": [0.8340222239494324, 0.07168532907962799, 0.0019797401037067175, 0.00437475461512804, 0.00044100568629801273, 0.00010054650192614645, 3.530493631842546e-05, 1.4383657799044158e-05, 6.323949492070824e-06, 3.0583248644688865e-06, 1.6588305697951e-06, 9.881109690468293e-07, 6.323614911707409e-07, 4.349975881723367e-07], "l1-model.layers.3.mlp.down_proj.weight": [90757.8125], "l2-model.layers.3.mlp.down_proj.weight": [15.327017784118652], "linf-model.layers.3.mlp.down_proj.weight": [0.006259097717702389], "request": {"prompt": "{} was originally aired on", "subject": "Rugrats", "target_new": {"str": "the CW"}, "old_answer": {"str": "Nickelodeon"}, "seed": 42}}, {"loss_per_step": [11.977, 1.757, 0.034, 0.014, 0.008], "prob_new": [6.290205874392996e-06, 0.172506183385849, 0.9666301608085632, 0.9856359362602234, 0.9919159412384033], "prob_old": [0.7823527455329895, 0.009034574031829834, 0.0014300171751528978, 0.0003193046140950173, 7.744345930404961e-05], "prob_new_token": [6.290205874392996e-06, 0.172506183385849, 0.9666301608085632, 0.9856359362602234, 0.9919159412384033], "prob_old_token": [0.7823527455329895, 0.009034574031829834, 0.0014300171751528978, 0.0003193046140950173, 7.744345930404961e-05], "l1-model.layers.3.mlp.down_proj.weight": [50751.80078125], "l2-model.layers.3.mlp.down_proj.weight": [8.351073265075684], "linf-model.layers.3.mlp.down_proj.weight": [0.001999231055378914], "request": {"prompt": "{} has originated in the country named", "subject": "Shar Pei", "target_new": {"str": "Italy"}, "old_answer": {"str": "China"}, "seed": 42}}, {"loss_per_step": [11.487, 3.603, 0.262, 0.031, 0.018, 0.016, 0.013, 0.011, 0.009], "prob_new": [1.0261817806167528e-05, 0.027243610471487045, 0.7696382999420166, 0.9690195322036743, 0.9820084571838379, 0.984387993812561, 0.986853837966919, 0.9892982840538025, 0.9913699626922607], "prob_old": [0.7823527455329895, 0.017788808792829514, 0.012068993411958218, 0.0011060558026656508, 0.0004918129998259246, 0.0003621155337896198, 0.00023318998864851892, 0.0001403743663104251, 8.552636427339166e-05], "prob_new_token": [1.0261817806167528e-05, 0.027243610471487045, 0.7696382999420166, 0.9690195322036743, 0.9820084571838379, 0.984387993812561, 0.986853837966919, 0.9892982840538025, 0.9913699626922607], "prob_old_token": [0.7823527455329895, 0.017788808792829514, 0.012068993411958218, 0.0011060558026656508, 0.0004918129998259246, 0.0003621155337896198, 0.00023318998864851892, 0.0001403743663104251, 8.552636427339166e-05], "l1-model.layers.3.mlp.down_proj.weight": [73907.90625], "l2-model.layers.3.mlp.down_proj.weight": [12.300698280334473], "linf-model.layers.3.mlp.down_proj.weight": [0.003971443045884371], "request": {"prompt": "{} has originated in the country named", "subject": "Shar Pei", "target_new": {"str": "Spain"}, "old_answer": {"str": "China"}, "seed": 42}}, {"loss_per_step": [9.955, 2.324, 0.05, 0.016, 0.018, 0.016, 0.01, 0.007], "prob_new": [4.750975494971499e-05, 0.09786728769540787, 0.9513676166534424, 0.9837539792060852, 0.9821844696998596, 0.9841911792755127, 0.9895702600479126, 0.9933931231498718], "prob_old": [0.7823527455329895, 0.028150614351034164, 0.01456999871879816, 0.003495757468044758, 0.0013715765671804547, 0.0006754625937901437, 0.0003920355229638517, 0.00026675438857637346], "prob_new_token": [4.750975494971499e-05, 0.09786728769540787, 0.9513676166534424, 0.9837539792060852, 0.9821844696998596, 0.9841911792755127, 0.9895702600479126, 0.9933931231498718], "prob_old_token": [0.7823527455329895, 0.028150614351034164, 0.01456999871879816, 0.003495757468044758, 0.0013715765671804547, 0.0006754625937901437, 0.0003920355229638517, 0.00026675438857637346], "l1-model.layers.3.mlp.down_proj.weight": [68520.796875], "l2-model.layers.3.mlp.down_proj.weight": [11.432740211486816], "linf-model.layers.3.mlp.down_proj.weight": [0.0035070842131972313], "request": {"prompt": "{} has originated in the country named", "subject": "Shar Pei", "target_new": {"str": "Japan"}, "old_answer": {"str": "China"}, "seed": 42}}, {"loss_per_step": [5.523, 4.207, 1.244, 0.639, 0.157, 0.097, 0.038, 0.024, 0.015, 0.01], "prob_new": [0.4938949942588806, 0.2988787889480591, 0.5410667061805725, 0.6389070749282837, 0.8650242686271667, 0.9120244979858398, 0.9630080461502075, 0.9769946336746216, 0.9852904081344604, 0.9904431700706482], "prob_old": [0.9293187856674194, 0.5867177844047546, 0.7084460258483887, 0.712560772895813, 0.5809979438781738, 0.5434107184410095, 0.47433164715766907, 0.47671806812286377, 0.48094162344932556, 0.48354679346084595], "prob_new_token": [1.6136593330884352e-05, 0.0003708860313054174, 0.08322980254888535, 0.27866309881210327, 0.7306877374649048, 0.8246585726737976, 0.9264660477638245, 0.9543710947036743, 0.970923125743866, 0.9812027812004089], "prob_old_token": [0.7632028460502625, 0.007336554583162069, 0.003907104954123497, 0.01398442406207323, 0.00010531034786254168, 7.591652683913708e-05, 2.4032198780332692e-05, 1.5484230971196666e-05, 1.0748171007435303e-05, 7.52431378714391e-06], "l1-model.layers.3.mlp.down_proj.weight": [72754.234375], "l2-model.layers.3.mlp.down_proj.weight": [12.609118461608887], "linf-model.layers.3.mlp.down_proj.weight": [0.004364592954516411], "request": {"prompt": "{} was founded in the location of", "subject": "China Southern Airlines", "target_new": {"str": "Dubai"}, "old_answer": {"str": "Guangzhou"}, "seed": 42}}, {"loss_per_step": [9.34, 7.422, 6.076, 5.366, 4.143, 2.428, 1.604, 2.086, 0.187, 0.123, 0.072, 0.036, 0.029, 0.01], "prob_new": [0.0007351022795774043, 0.004837638232856989, 0.0031245050486177206, 0.005126851610839367, 0.036105480045080185, 0.24076087772846222, 0.4989522397518158, 0.1317003071308136, 0.8438368439674377, 0.8911874294281006, 0.9325573444366455, 0.9650217294692993, 0.9714990854263306, 0.9904899597167969], "prob_old": [0.9293187856674194, 0.616258442401886, 0.5969005823135376, 0.6549206376075745, 0.6906765699386597, 0.6794044971466064, 0.6485270261764526, 0.6008381247520447, 0.6319741010665894, 0.5962787866592407, 0.5663244128227234, 0.5537189841270447, 0.5492134690284729, 0.5484009981155396], "prob_new_token": [0.0014649422373622656, 0.009638169780373573, 0.005241416394710541, 0.007232850883156061, 0.0036753329914063215, 0.016747932881116867, 0.04234965145587921, 0.17557987570762634, 0.6877996325492859, 0.782863438129425, 0.8675433397293091, 0.9457060694694519, 0.9766413569450378, 0.9878175854682922], "prob_old_token": [0.7632028460502625, 0.01675371639430523, 0.0036636830773204565, 0.00963225681334734, 0.0011230276431888342, 0.004316383507102728, 0.0012387976748868823, 0.0038891101721674204, 0.00018458496197126806, 3.0035778763704002e-05, 8.098341822915245e-06, 2.476201416357071e-06, 9.12594828150759e-07, 3.965751318446564e-07], "l1-model.layers.3.mlp.down_proj.weight": [84220.3125], "l2-model.layers.3.mlp.down_proj.weight": [14.681929588317871], "linf-model.layers.3.mlp.down_proj.weight": [0.0063012950122356415], "request": {"prompt": "{} was founded in the location of", "subject": "China Southern Airlines", "target_new": {"str": "Jena"}, "old_answer": {"str": "Guangzhou"}, "seed": 42}}, {"loss_per_step": [7.946, 5.532, 4.173, 2.538, 4.279, 0.792, 0.294, 0.11, 0.059, 0.047, 0.027, 0.016, 0.01, 0.007], "prob_new": [0.4137546718120575, 0.24151624739170074, 0.3592512011528015, 0.40448135137557983, 0.4602530002593994, 0.5658948421478271, 0.7749408483505249, 0.9006892442703247, 0.9434991478919983, 0.9545097947120667, 0.9739421606063843, 0.9841309785842896, 0.9895926713943481, 0.9927907586097717], "prob_old": [0.9293187856674194, 0.51054847240448, 0.5612072944641113, 0.5176678895950317, 0.676363468170166, 0.5340481996536255, 0.5206204056739807, 0.5049253702163696, 0.49633726477622986, 0.48336607217788696, 0.4099487066268921, 0.2489028126001358, 0.18786205351352692, 0.16062352061271667], "prob_new_token": [1.5147047349728382e-07, 3.240677688154392e-05, 0.0003306680009700358, 0.007799862883985043, 0.00020883728575427085, 0.2267957329750061, 0.5621860027313232, 0.8098394870758057, 0.8977549076080322, 0.917695164680481, 0.9556442499160767, 0.9748550057411194, 0.9847206473350525, 0.9903202056884766], "prob_old_token": [0.7632028460502625, 0.013273456133902073, 0.008777185343205929, 0.00028197941719554365, 0.002243858529254794, 0.00029701043968088925, 0.0002790226717479527, 0.00012379183317534626, 6.091095201554708e-05, 5.638555376208387e-05, 2.449696148687508e-05, 1.0814066627062857e-05, 5.4501870181411505e-06, 2.9641141736647114e-06], "l1-model.layers.3.mlp.down_proj.weight": [83075.84375], "l2-model.layers.3.mlp.down_proj.weight": [14.766064643859863], "linf-model.layers.3.mlp.down_proj.weight": [0.006189759820699692], "request": {"prompt": "{} was founded in the location of", "subject": "China Southern Airlines", "target_new": {"str": "Bremen"}, "old_answer": {"str": "Guangzhou"}, "seed": 42}}, {"loss_per_step": [3.487, 2.184, 1.614, 1.292, 0.408, 0.002], "prob_new": [0.42314139008522034, 0.44136524200439453, 0.6557731628417969, 0.7962567806243896, 0.8141400218009949, 0.9975952506065369], "prob_old": [0.8802522420883179, 0.28851407766342163, 0.27315089106559753, 0.2705031931400299, 0.26545554399490356, 0.25070920586586], "prob_new_token": [6.021196440997301e-06, 0.0012835061643272638, 0.0010944211389869452, 0.0015970369568094611, 0.13984249532222748, 0.9962313771247864], "prob_old_token": [0.6327256560325623, 0.009427674114704132, 0.037215642631053925, 0.06205541640520096, 0.03981488198041916, 3.234150938169478e-07], "l1-model.layers.3.mlp.down_proj.weight": [55407.171875], "l2-model.layers.3.mlp.down_proj.weight": [9.268102645874023], "linf-model.layers.3.mlp.down_proj.weight": [0.002510983496904373], "request": {"prompt": "{} is lead by the person named", "subject": "Dyson", "target_new": {"str": "Judd Apatow"}, "old_answer": {"str": "James Dyson"}, "seed": 42}}, {"loss_per_step": [7.033, 7.06, 4.127, 3.551, 3.047, 2.207, 1.446, 0.469, 0.103, 0.04, 0.024, 0.018, 0.011, 0.008], "prob_new": [0.2846667468547821, 0.3179897665977478, 0.32979995012283325, 0.333854615688324, 0.3385750651359558, 0.3580796718597412, 0.5172319412231445, 0.734587550163269, 0.9099797606468201, 0.9615146517753601, 0.9768924713134766, 0.9819749593734741, 0.9887241125106812, 0.9920964241027832], "prob_old": [0.8802522420883179, 0.4227127134799957, 0.28787413239479065, 0.26702430844306946, 0.2558901309967041, 0.25056618452072144, 0.24545086920261383, 0.2523326873779297, 0.24560177326202393, 0.15137125551700592, 0.04949206858873367, 0.040576495230197906, 0.042534127831459045, 0.047868434339761734], "prob_new_token": [0.00011093316425103694, 1.376087311655283e-06, 0.004876425489783287, 0.005924224387854338, 0.00965286884456873, 0.026455793529748917, 0.02475884184241295, 0.25887590646743774, 0.7460346221923828, 0.8988581299781799, 0.9529806971549988, 0.9783667922019958, 0.9861045479774475, 0.9849734902381897], "prob_old_token": [0.6327256560325623, 3.975815161538776e-06, 0.0056950245052576065, 0.005894103087484837, 0.009285871870815754, 0.027779659256339073, 0.013137118890881538, 0.02172352373600006, 0.007675528526306152, 0.0017984719015657902, 0.0004933761083520949, 0.00017184876196552068, 9.935358684742823e-05, 9.219242201652378e-05], "l1-model.layers.3.mlp.down_proj.weight": [89507.921875], "l2-model.layers.3.mlp.down_proj.weight": [15.481830596923828], "linf-model.layers.3.mlp.down_proj.weight": [0.006368964910507202], "request": {"prompt": "{} is lead by the person named", "subject": "Dyson", "target_new": {"str": "George Friedman"}, "old_answer": {"str": "James Dyson"}, "seed": 42}}, {"loss_per_step": [6.315, 4.8, 4.256, 3.928, 2.931, 1.69, 1.931, 0.19, 0.026, 0.006], "prob_new": [0.1324782520532608, 0.2700175642967224, 0.33372268080711365, 0.33356568217277527, 0.33797669410705566, 0.3692362308502197, 0.22612237930297852, 0.8521164655685425, 0.9751914739608765, 0.9939783811569214], "prob_old": [0.8802522420883179, 0.2646285593509674, 0.27542644739151, 0.2890103757381439, 0.3261982202529907, 0.3554184138774872, 0.24868106842041016, 0.3628895580768585, 0.21104100346565247, 0.09112180769443512], "prob_new_token": [0.00022606723359785974, 0.001127149909734726, 0.001509473193436861, 0.0025873789563775063, 0.00629076175391674, 0.02647959068417549, 0.036673448979854584, 0.5775865912437439, 0.930438220500946, 0.9866713285446167], "prob_old_token": [0.6327256560325623, 0.003386881435289979, 0.007518687751144171, 0.01143520139157772, 0.021794263273477554, 0.04296940937638283, 0.027866285294294357, 0.009258151054382324, 0.0008716084994375706, 9.898607095237821e-05], "l1-model.layers.3.mlp.down_proj.weight": [70463.953125], "l2-model.layers.3.mlp.down_proj.weight": [12.140358924865723], "linf-model.layers.3.mlp.down_proj.weight": [0.004464319907128811], "request": {"prompt": "{} is lead by the person named", "subject": "Dyson", "target_new": {"str": "Marc Mayer"}, "old_answer": {"str": "James Dyson"}, "seed": 42}}, {"loss_per_step": [3.563, 1.539, 0.292, 0.749, 0.145, 0.042, 0.008], "prob_new": [0.5048718452453613, 0.6250452399253845, 0.8034769296646118, 0.736886203289032, 0.8881098628044128, 0.9612346291542053, 0.9916685223579407], "prob_old": [0.714084267616272, 0.4625757038593292, 0.2774280607700348, 0.2532077133655548, 0.2502728998661041, 0.24931493401527405, 0.2486211508512497], "prob_new_token": [4.025532234663842e-06, 0.004200605675578117, 0.36340999603271484, 0.05607282370328903, 0.571559488773346, 0.8564225435256958, 0.9747282266616821], "prob_old_token": [0.6126298904418945, 0.019149189814925194, 0.01033320277929306, 0.004384655971080065, 0.0008266565855592489, 0.00012939910811837763, 1.4806796571065206e-05], "l1-model.layers.3.mlp.down_proj.weight": [55026.1640625], "l2-model.layers.3.mlp.down_proj.weight": [9.646145820617676], "linf-model.layers.3.mlp.down_proj.weight": [0.0030103903263807297], "request": {"prompt": "{} is lead by the person named", "subject": "Charles Schwab Corporation", "target_new": {"str": "Masayoshi Son"}, "old_answer": {"str": "Charles R Schwab"}, "seed": 42}}, {"loss_per_step": [5.602, 4.43, 3.113, 2.198, 1.683, 1.165, 0.515, 0.093, 0.045, 0.026, 0.012, 0.007], "prob_new": [0.20161043107509613, 0.1688743233680725, 0.32673344016075134, 0.4599478840827942, 0.5189253687858582, 0.6108134388923645, 0.7972630858421326, 0.9199295043945312, 0.9571594595909119, 0.9748660326004028, 0.9882428050041199, 0.9931804537773132], "prob_old": [0.714084267616272, 0.4537851810455322, 0.5544072389602661, 0.25041088461875916, 0.2466481328010559, 0.24346618354320526, 0.24415223300457, 0.24260088801383972, 0.2365766167640686, 0.22762106359004974, 0.22126686573028564, 0.2160019427537918], "prob_new_token": [9.207190487359185e-06, 4.266720225132303e-06, 0.0005631676758639514, 0.003249429864808917, 0.015555002726614475, 0.020756609737873077, 0.08421465009450912, 0.6791580319404602, 0.8975185751914978, 0.9568492770195007, 0.9831627607345581, 0.9904279708862305], "prob_old_token": [0.6126298904418945, 0.0020857725758105516, 0.6522722244262695, 0.006568336859345436, 0.006668147165328264, 0.0022064398508518934, 0.0018223305232822895, 0.0010423045605421066, 0.00018961087334901094, 4.3241623643552884e-05, 1.1698615708155558e-05, 5.430891178548336e-06], "l1-model.layers.3.mlp.down_proj.weight": [79666.171875], "l2-model.layers.3.mlp.down_proj.weight": [13.97407341003418], "linf-model.layers.3.mlp.down_proj.weight": [0.00535336509346962], "request": {"prompt": "{} is lead by the person named", "subject": "Charles Schwab Corporation", "target_new": {"str": "Riccardo Muti"}, "old_answer": {"str": "Charles R Schwab"}, "seed": 42}}, {"loss_per_step": [4.487, 2.993, 1.496, 0.747, 0.116, 0.014, 0.007], "prob_new": [0.5412984490394592, 0.6493169665336609, 0.7832531929016113, 0.7942991256713867, 0.907179057598114, 0.986179530620575, 0.9932220578193665], "prob_old": [0.714084267616272, 0.3804284334182739, 0.3213272988796234, 0.27236783504486084, 0.2518249452114105, 0.24727195501327515, 0.23617474734783173], "prob_new_token": [4.529347563675401e-08, 1.0481826393515803e-06, 0.0006163321086205542, 0.02525540255010128, 0.593246340751648, 0.9582424759864807, 0.9868205785751343], "prob_old_token": [0.6126298904418945, 0.0006535180727951229, 0.09047488868236542, 0.0024298131465911865, 0.0009185091475956142, 3.5918961657444015e-05, 1.1409519174776506e-05], "l1-model.layers.3.mlp.down_proj.weight": [57853.5390625], "l2-model.layers.3.mlp.down_proj.weight": [10.110483169555664], "linf-model.layers.3.mlp.down_proj.weight": [0.002970324829220772], "request": {"prompt": "{} is lead by the person named", "subject": "Charles Schwab Corporation", "target_new": {"str": "Giorgio Armani"}, "old_answer": {"str": "Charles R Schwab"}, "seed": 42}}, {"loss_per_step": [3.441, 3.641, 0.326, 0.046, 0.03, 0.02, 0.015, 0.012, 0.009], "prob_new": [0.47477248311042786, 0.180690735578537, 0.7484959959983826, 0.9549393057823181, 0.9710316061973572, 0.9799447059631348, 0.9850637912750244, 0.9884220361709595, 0.9907468557357788], "prob_old": [0.9123725891113281, 0.7016584277153015, 0.6373772025108337, 0.644780158996582, 0.6460829973220825, 0.6464561223983765, 0.6467994451522827, 0.6471701860427856, 0.6475105285644531], "prob_new_token": [0.0010821707546710968, 0.0019132952438667417, 0.5514340996742249, 0.9809929132461548, 0.9928977489471436, 0.9956669211387634, 0.9967798590660095, 0.9973439574241638, 0.9976587295532227], "prob_old_token": [0.6529882550239563, 2.539942943258211e-05, 1.0516516340430826e-05, 9.217272918249364e-07, 4.499406998093036e-07, 2.5592242991478997e-07, 1.513256506768812e-07, 9.21754903515648e-08, 5.913075895591646e-08], "l1-model.layers.3.mlp.down_proj.weight": [71820.90625], "l2-model.layers.3.mlp.down_proj.weight": [12.121776580810547], "linf-model.layers.3.mlp.down_proj.weight": [0.003961971960961819], "request": {"prompt": "{} holds a position at", "subject": "Darleane C. Hoffman", "target_new": {"str": "Columbia University"}, "old_answer": {"str": "Lawrence Berkeley National Laboratory"}, "seed": 42}}, {"loss_per_step": [1.192, 1.237, 0.717, 0.561, 0.011, 0.005], "prob_new": [0.7895854115486145, 0.6982029676437378, 0.8338395953178406, 0.853533923625946, 0.9897304177284241, 0.9951819181442261], "prob_old": [0.9123725891113281, 0.6620255708694458, 0.6578148007392883, 0.6685025095939636, 0.6770715713500977, 0.6772545576095581], "prob_new_token": [0.00041883750236593187, 0.0023400569334626198, 0.007942963391542435, 0.02066808193922043, 0.9458956718444824, 0.9808478355407715], "prob_old_token": [0.6529882550239563, 0.0002978787524625659, 0.0006011892692185938, 0.0003067201469093561, 2.647832388902316e-06, 3.611873751196981e-07], "l1-model.layers.3.mlp.down_proj.weight": [57532.2890625], "l2-model.layers.3.mlp.down_proj.weight": [9.427655220031738], "linf-model.layers.3.mlp.down_proj.weight": [0.002508120611310005], "request": {"prompt": "{} holds a position at", "subject": "Darleane C. Hoffman", "target_new": {"str": "Carnegie Mellon University"}, "old_answer": {"str": "Lawrence Berkeley National Laboratory"}, "seed": 42}}, {"loss_per_step": [4.146, 0.907, 0.236, 0.062, 0.011, 0.003], "prob_new": [0.48203378915786743, 0.5782680511474609, 0.8086538314819336, 0.9417206645011902, 0.9894171953201294, 0.9966282248497009], "prob_old": [0.9123725891113281, 0.6863574981689453, 0.7130876779556274, 0.7193984985351562, 0.7085368037223816, 0.6943842768669128], "prob_new_token": [0.20117510855197906, 0.43666601181030273, 0.7288219928741455, 0.8786994218826294, 0.963546097278595, 0.984955370426178], "prob_old_token": [0.6529882550239563, 0.0008056368096731603, 8.222801989177242e-05, 2.655741809576284e-05, 1.8759673139356892e-06, 1.9658064331906644e-07], "l1-model.layers.3.mlp.down_proj.weight": [61137.46875], "l2-model.layers.3.mlp.down_proj.weight": [9.712692260742188], "linf-model.layers.3.mlp.down_proj.weight": [0.0024902382865548134], "request": {"prompt": "{} holds a position at", "subject": "Darleane C. Hoffman", "target_new": {"str": "the University of Cape Town"}, "old_answer": {"str": "Lawrence Berkeley National Laboratory"}, "seed": 42}}, {"loss_per_step": [2.123, 2.809, 0.105, 0.026, 0.014, 0.009], "prob_new": [0.6134995818138123, 0.4079587459564209, 0.9040770530700684, 0.9743938446044922, 0.985731303691864, 0.9908878803253174], "prob_old": [0.8484284281730652, 0.4371628761291504, 0.44145116209983826, 0.4149753153324127, 0.4068310856819153, 0.4041752815246582], "prob_new_token": [0.0020436712075024843, 0.000588960770983249, 0.7987792491912842, 0.9731495976448059, 0.9887385368347168, 0.9933192729949951], "prob_old_token": [0.7124742865562439, 0.04767807200551033, 0.0010746808256953955, 0.0002068614267045632, 9.752962068887427e-05, 5.611004962702282e-05], "l1-model.layers.3.mlp.down_proj.weight": [55033.5390625], "l2-model.layers.3.mlp.down_proj.weight": [9.327577590942383], "linf-model.layers.3.mlp.down_proj.weight": [0.0025032516568899155], "request": {"prompt": "{} holds a position at", "subject": "J. M. Coetzee", "target_new": {"str": "Stanford University"}, "old_answer": {"str": "the University of Cape Town"}, "seed": 42}}, {"loss_per_step": [1.439, 0.73, 0.2, 0.018, 0.012, 0.008], "prob_new": [0.7682777643203735, 0.8190869092941284, 0.8884184956550598, 0.9828149676322937, 0.9881134033203125, 0.9922354817390442], "prob_old": [0.8484284281730652, 0.46064281463623047, 0.3999726474285126, 0.4213244915008545, 0.42000648379325867, 0.41714009642601013], "prob_new_token": [9.221502114087343e-05, 0.00806636642664671, 0.25520989298820496, 0.8967090249061584, 0.929442822933197, 0.9556962251663208], "prob_old_token": [0.7124742865562439, 0.09916756302118301, 0.04144265130162239, 0.015146768651902676, 0.009536266326904297, 0.0051222387701272964], "l1-model.layers.3.mlp.down_proj.weight": [48334.35546875], "l2-model.layers.3.mlp.down_proj.weight": [8.59479808807373], "linf-model.layers.3.mlp.down_proj.weight": [0.0024998411536216736], "request": {"prompt": "{} holds a position at", "subject": "J. M. Coetzee", "target_new": {"str": "Carnegie Mellon University"}, "old_answer": {"str": "the University of Cape Town"}, "seed": 42}}, {"loss_per_step": [2.52, 0.795, 0.78, 0.021, 0.006], "prob_new": [0.6281738877296448, 0.8161869049072266, 0.7406603693962097, 0.9795660972595215, 0.9941012263298035], "prob_old": [0.8484284281730652, 0.4410723149776459, 0.432523638010025, 0.40728136897087097, 0.3989051282405853], "prob_new_token": [6.811330877098953e-06, 0.009523393586277962, 0.020966466516256332, 0.9070799350738525, 0.9856297373771667], "prob_old_token": [0.7124742865562439, 0.01273565273731947, 0.087347611784935, 0.0031189231667667627, 0.000565394526347518], "l1-model.layers.3.mlp.down_proj.weight": [49904.890625], "l2-model.layers.3.mlp.down_proj.weight": [8.135180473327637], "linf-model.layers.3.mlp.down_proj.weight": [0.002005401300266385], "request": {"prompt": "{} holds a position at", "subject": "J. M. Coetzee", "target_new": {"str": "Lawrence Berkeley National Laboratory"}, "old_answer": {"str": "the University of Cape Town"}, "seed": 42}}, {"loss_per_step": [2.688, 2.246, 2.027, 1.69, 1.266, 0.724, 0.222, 0.032, 0.006], "prob_new": [0.4794250428676605, 0.5850490927696228, 0.773357629776001, 0.7845305800437927, 0.7866951823234558, 0.7929144501686096, 0.8617926836013794, 0.9702555537223816, 0.9942945837974548], "prob_old": [0.8382276892662048, 0.30697280168533325, 0.3262619376182556, 0.2922798991203308, 0.2810268998146057, 0.26122134923934937, 0.2557474970817566, 0.26996514201164246, 0.271191269159317], "prob_new_token": [2.253292768727988e-05, 6.346422014757991e-05, 4.552896280074492e-05, 0.00023110030451789498, 0.001912710489705205, 0.02860821969807148, 0.3403609097003937, 0.8621646761894226, 0.9802432656288147], "prob_old_token": [0.6083126068115234, 0.0106246592476964, 0.0026415069587528706, 0.00341575569473207, 0.0037920025642961264, 0.002599261235445738, 0.003178502433001995, 0.000550741096958518, 1.425877144356491e-05], "l1-model.layers.3.mlp.down_proj.weight": [71507.078125], "l2-model.layers.3.mlp.down_proj.weight": [12.184120178222656], "linf-model.layers.3.mlp.down_proj.weight": [0.004010643810033798], "request": {"prompt": "{} is held by", "subject": "Minister of Foreign Affairs of Belarus", "target_new": {"str": "Idriss D\u00e9by"}, "old_answer": {"str": "Vladimir Makei"}, "seed": 42}}, {"loss_per_step": [5.808, 3.405, 2.319, 1.946, 1.282, 0.553, 0.04, 0.011, 0.007], "prob_new": [0.4812857508659363, 0.5947414040565491, 0.6828792095184326, 0.7421911358833313, 0.7463352084159851, 0.7745510339736938, 0.9624511003494263, 0.9893410801887512, 0.9925671815872192], "prob_old": [0.8382276892662048, 0.29879671335220337, 0.28314170241355896, 0.2819051146507263, 0.294729620218277, 0.3086490035057068, 0.3092951774597168, 0.3100925087928772, 0.3151445984840393], "prob_new_token": [9.099828446323954e-08, 2.900245135606383e-06, 0.0001245506718987599, 0.0004294512909837067, 0.006050643511116505, 0.110762819647789, 0.8581933379173279, 0.9678287506103516, 0.9894530773162842], "prob_old_token": [0.6083126068115234, 0.0063574169762432575, 0.0031092416029423475, 0.0011987228645011783, 0.00016289629274979234, 3.682641181512736e-05, 2.4181367734854575e-06, 2.3610338928392594e-07, 5.410486636492351e-08], "l1-model.layers.3.mlp.down_proj.weight": [73531.2734375], "l2-model.layers.3.mlp.down_proj.weight": [12.340591430664062], "linf-model.layers.3.mlp.down_proj.weight": [0.0039524659514427185], "request": {"prompt": "{} is held by", "subject": "Minister of Foreign Affairs of Belarus", "target_new": {"str": "Gaston Browne"}, "old_answer": {"str": "Vladimir Makei"}, "seed": 42}}, {"loss_per_step": [5.622, 3.145, 3.818, 2.618, 1.472, 0.228, 0.056, 0.031, 0.019, 0.012, 0.009], "prob_new": [0.4701083302497864, 0.5014059543609619, 0.4551313519477844, 0.6468823552131653, 0.7219852209091187, 0.842721164226532, 0.9482680559158325, 0.9705612659454346, 0.981661319732666, 0.98764967918396, 0.9912320375442505], "prob_old": [0.9186565279960632, 0.655974805355072, 0.6402382850646973, 0.6150922179222107, 0.727107048034668, 0.7190188765525818, 0.6494253277778625, 0.5627543330192566, 0.4983130991458893, 0.46231594681739807, 0.44586753845214844], "prob_new_token": [4.263490609446308e-06, 2.765113276836928e-05, 2.6664691176847555e-05, 4.4308628275757656e-05, 0.0031254261266440153, 0.424848735332489, 0.8255769610404968, 0.9122965931892395, 0.9566102623939514, 0.9786257147789001, 0.9889244437217712], "prob_old_token": [0.6722553372383118, 7.82027782406658e-05, 0.0009986261138692498, 8.819229697110131e-05, 0.0003438035782892257, 0.00010647482122294605, 6.667978595942259e-06, 1.577075067871192e-06, 4.970809186488623e-07, 1.9833116482459445e-07, 9.857932070644893e-08], "l1-model.layers.3.mlp.down_proj.weight": [74220.09375], "l2-model.layers.3.mlp.down_proj.weight": [13.082194328308105], "linf-model.layers.3.mlp.down_proj.weight": [0.004898311570286751], "request": {"prompt": "{} is held by", "subject": "list of heads of state of Chad", "target_new": {"str": "Gaston Browne"}, "old_answer": {"str": "Idriss D\u00e9by"}, "seed": 42}}, {"loss_per_step": [7.107, 6.082, 4.766, 2.988, 1.539, 0.724, 0.315, 0.128, 0.06, 0.034, 0.023, 0.017, 0.013, 0.011, 0.009], "prob_new": [0.15828540921211243, 0.20613479614257812, 0.31997936964035034, 0.3038506507873535, 0.4647274613380432, 0.6502888202667236, 0.7815513610839844, 0.8891712427139282, 0.9434115290641785, 0.9667685031890869, 0.9777187705039978, 0.9835247993469238, 0.9869920015335083, 0.9893209338188171, 0.9910272359848022], "prob_old": [0.9186565279960632, 0.5828139185905457, 0.5345146059989929, 0.6450023651123047, 0.5665898323059082, 0.49823758006095886, 0.4801987111568451, 0.48045235872268677, 0.4828715920448303, 0.48308441042900085, 0.4813867509365082, 0.47891002893447876, 0.4762214124202728, 0.4734809994697571, 0.4707564413547516], "prob_new_token": [4.95036510983482e-06, 1.2668487215705682e-05, 0.0004941134830005467, 0.003287781961262226, 0.025255179032683372, 0.14013811945915222, 0.4200783669948578, 0.712698757648468, 0.8651962280273438, 0.930141806602478, 0.9590300917625427, 0.9730668663978577, 0.9806302189826965, 0.9852339625358582, 0.9883226752281189], "prob_old_token": [0.6722553372383118, 1.33317926156451e-05, 4.115718184038997e-05, 8.791275467956439e-05, 2.5317593099316582e-05, 1.267075913347071e-05, 7.185710273915902e-06, 3.427262072364101e-06, 1.5047838815007708e-06, 7.491502742595912e-07, 4.370442354684201e-07, 2.906978124883608e-07, 2.122709616969587e-07, 1.6269716240913112e-07, 1.2726830789233645e-07], "l1-model.layers.3.mlp.down_proj.weight": [90904.265625], "l2-model.layers.3.mlp.down_proj.weight": [15.628902435302734], "linf-model.layers.3.mlp.down_proj.weight": [0.006250670179724693], "request": {"prompt": "{} is held by", "subject": "list of heads of state of Chad", "target_new": {"str": "Vladimir Makei"}, "old_answer": {"str": "Idriss D\u00e9by"}, "seed": 42}}, {"loss_per_step": [3.05, 0.572, 0.192, 0.08, 0.003], "prob_new": [0.46842142939567566, 0.7279142737388611, 0.85737544298172, 0.9305787086486816, 0.9972885251045227], "prob_old": [0.8401201963424683, 0.6242270469665527, 0.6499320268630981, 0.628844141960144, 0.6794301271438599], "prob_new_token": [4.251266091159778e-06, 0.09358833730220795, 0.4586496353149414, 0.7067883014678955, 0.999937891960144], "prob_old_token": [0.8187586665153503, 0.00014618328714277595, 3.480251280052471e-06, 2.4562135081396264e-07, 4.6059903113793155e-10], "l1-model.layers.3.mlp.down_proj.weight": [52583.09375], "l2-model.layers.3.mlp.down_proj.weight": [8.399971961975098], "linf-model.layers.3.mlp.down_proj.weight": [0.0020057782530784607], "request": {"prompt": "{} is represented by the music label", "subject": "Godspeed You! Black Emperor", "target_new": {"str": "Warner Bros. Records"}, "old_answer": {"str": "Constellation Records"}, "seed": 42}}, {"loss_per_step": [2.417, 1.398, 0.529, 0.028, 0.007], "prob_new": [0.5253622531890869, 0.5883521437644958, 0.8129934668540955, 0.9727775454521179, 0.9927769899368286], "prob_old": [0.8401201963424683, 0.6507453918457031, 0.6754319667816162, 0.67915940284729, 0.665926456451416], "prob_new_token": [0.00048019958194345236, 0.0024147413205355406, 0.050211552530527115, 0.9403810501098633, 0.9895341396331787], "prob_old_token": [0.8187586665153503, 6.119374302215874e-05, 2.3720524040982127e-05, 4.646476270409039e-07, 5.889719645324476e-08], "l1-model.layers.3.mlp.down_proj.weight": [44990.4375], "l2-model.layers.3.mlp.down_proj.weight": [7.874067306518555], "linf-model.layers.3.mlp.down_proj.weight": [0.0020053209736943245], "request": {"prompt": "{} is represented by the music label", "subject": "Godspeed You! Black Emperor", "target_new": {"str": "Konvict Muzik"}, "old_answer": {"str": "Constellation Records"}, "seed": 42}}, {"loss_per_step": [5.813, 2.749, 1.226, 0.113, 0.012, 0.005], "prob_new": [0.14490577578544617, 0.5556474924087524, 0.6349661350250244, 0.90273517370224, 0.9885692000389099, 0.9948791265487671], "prob_old": [0.8401201963424683, 0.5861574411392212, 0.5504735708236694, 0.5678953528404236, 0.5775129795074463, 0.5975422859191895], "prob_new_token": [6.53521738058771e-06, 0.0003785993030760437, 0.028870776295661926, 0.7218312621116638, 0.9720976948738098, 0.9879934787750244], "prob_old_token": [0.8187586665153503, 3.1912637496134266e-05, 7.0280416366586e-06, 0.0001148843439295888, 4.201386764179915e-05, 1.5909570720396005e-05], "l1-model.layers.3.mlp.down_proj.weight": [57220.8671875], "l2-model.layers.3.mlp.down_proj.weight": [9.377263069152832], "linf-model.layers.3.mlp.down_proj.weight": [0.0024700649082660675], "request": {"prompt": "{} is represented by the music label", "subject": "Godspeed You! Black Emperor", "target_new": {"str": "Armada Music"}, "old_answer": {"str": "Constellation Records"}, "seed": 42}}, {"loss_per_step": [6.934, 4.296, 2.445, 2.255, 0.741, 0.051, 0.034, 0.019, 0.013, 0.009], "prob_new": [0.2340732216835022, 0.28251907229423523, 0.6222891807556152, 0.6448766589164734, 0.6756736040115356, 0.9502772688865662, 0.9666860103607178, 0.9812116622924805, 0.9875020384788513, 0.9906827211380005], "prob_old": [0.9576637148857117, 0.6907113790512085, 0.7658423781394958, 0.7775432467460632, 0.6468459367752075, 0.6086826324462891, 0.6025952696800232, 0.6001759767532349, 0.5991343259811401, 0.5986629724502563], "prob_new_token": [1.0154884222401961e-07, 3.878953430103138e-05, 0.0007539186626672745, 0.0012336068321019411, 0.1193184107542038, 0.9791648983955383, 0.9963195323944092, 0.9979239106178284, 0.9983144998550415, 0.9983954429626465], "prob_old_token": [0.8164881467819214, 4.8734942538430914e-05, 4.28250314143952e-05, 0.00039191998075693846, 2.1047198970336467e-06, 7.599957285719938e-08, 2.178583358158903e-08, 1.0357536162075576e-08, 5.989305051912197e-09, 4.49697834525864e-09], "l1-model.layers.3.mlp.down_proj.weight": [72597.6796875], "l2-model.layers.3.mlp.down_proj.weight": [12.607824325561523], "linf-model.layers.3.mlp.down_proj.weight": [0.0044028908014297485], "request": {"prompt": "{} is represented by the music label", "subject": "Anne-Sophie Mutter", "target_new": {"str": "Peaceville Records"}, "old_answer": {"str": "Deutsche Grammophon"}, "seed": 42}}, {"loss_per_step": [4.19, 2.709, 2.201, 1.636, 0.854, 0.248, 0.001], "prob_new": [0.34920310974121094, 0.3812762200832367, 0.57029128074646, 0.6761782169342041, 0.7498644590377808, 0.8421424627304077, 0.9989590048789978], "prob_old": [0.9576637148857117, 0.725829541683197, 0.7366839051246643, 0.6418159604072571, 0.5988618731498718, 0.5832322835922241, 0.6944401860237122], "prob_new_token": [4.1410003177588806e-06, 0.000301501015201211, 0.00039561570156365633, 0.002003726549446583, 0.03396051749587059, 0.3724176585674286, 0.9971927404403687], "prob_old_token": [0.8164881467819214, 0.00011353159788995981, 0.0002593199023976922, 0.0001463910157326609, 0.0001249189517693594, 3.8732901884941384e-05, 3.2711334085888666e-08], "l1-model.layers.3.mlp.down_proj.weight": [58507.0703125], "l2-model.layers.3.mlp.down_proj.weight": [9.995200157165527], "linf-model.layers.3.mlp.down_proj.weight": [0.0030162129551172256], "request": {"prompt": "{} is represented by the music label", "subject": "Anne-Sophie Mutter", "target_new": {"str": "XL Recordings"}, "old_answer": {"str": "Deutsche Grammophon"}, "seed": 42}}, {"loss_per_step": [8.684, 3.981, 1.633, 0.833, 0.168, 0.087, 0.002], "prob_new": [0.005541480612009764, 0.33215370774269104, 0.51578289270401, 0.5873950719833374, 0.8526139259338379, 0.9165507555007935, 0.9976457357406616], "prob_old": [0.9576637148857117, 0.7596455812454224, 0.7368294596672058, 0.7340332865715027, 0.661207377910614, 0.6595406532287598, 0.6545489430427551], "prob_new_token": [2.58570617006626e-06, 0.0005253193667158484, 0.038408659398555756, 0.1922467052936554, 0.7420171499252319, 0.9244417548179626, 0.996795654296875], "prob_old_token": [0.8164881467819214, 0.00026439191424287856, 4.119236473343335e-05, 9.47362495935522e-05, 2.424221065666643e-06, 1.5106991213542642e-08, 1.1222339724170638e-09], "l1-model.layers.3.mlp.down_proj.weight": [59780.671875], "l2-model.layers.3.mlp.down_proj.weight": [10.106664657592773], "linf-model.layers.3.mlp.down_proj.weight": [0.003017755225300789], "request": {"prompt": "{} is represented by the music label", "subject": "Anne-Sophie Mutter", "target_new": {"str": "Domino"}, "old_answer": {"str": "Deutsche Grammophon"}, "seed": 42}}, {"loss_per_step": [3.415, 2.81, 1.632, 0.633, 0.229, 0.207, 0.044, 0.031, 0.023, 0.019, 0.015, 0.012, 0.01], "prob_new": [0.46457263827323914, 0.4604584276676178, 0.6016159057617188, 0.7544399499893188, 0.8449371457099915, 0.8558538556098938, 0.9576965570449829, 0.970257580280304, 0.977198600769043, 0.9815379977226257, 0.9851787686347961, 0.9881609082221985, 0.9905008673667908], "prob_old": [0.9080218076705933, 0.3862919807434082, 0.33395570516586304, 0.31979143619537354, 0.19621646404266357, 0.12482430785894394, 0.003918353468179703, 0.0024911812506616116, 0.0014983563451096416, 0.0009222397347912192, 0.0006084314081817865, 0.0004333087126724422, 0.00032207791809923947], "prob_new_token": [1.5300216546165757e-05, 0.00011628967331489548, 0.002576080383732915, 0.057468682527542114, 0.3688008487224579, 0.4053233563899994, 0.8887776136398315, 0.9398808479309082, 0.9585418701171875, 0.9671351313591003, 0.9737614393234253, 0.979510486125946, 0.9843080639839172], "prob_old_token": [0.7662683725357056, 0.003074732841923833, 0.00767914904281497, 6.864101305836812e-05, 3.42226485372521e-06, 2.2846932097309036e-06, 7.992063899564528e-08, 4.765403005535518e-08, 4.043642576334605e-08, 3.5820725940993725e-08, 2.848218016993087e-08, 2.1241383763026533e-08, 1.532172966278722e-08], "l1-model.layers.3.mlp.down_proj.weight": [84806.1875], "l2-model.layers.3.mlp.down_proj.weight": [14.826732635498047], "linf-model.layers.3.mlp.down_proj.weight": [0.00574791245162487], "request": {"prompt": "The person who serves as the head coach for {} is", "subject": "Spain women's national association football team", "target_new": {"str": "Michael O'Neill"}, "old_answer": {"str": "Jorge Vilda"}, "seed": 42}}, {"loss_per_step": [2.651, 3.471, 3.584, 2.518, 2.444, 0.821, 0.023, 0.022, 0.03, 0.038, 0.044, 0.032, 0.019, 0.011, 0.007], "prob_new": [0.7096829414367676, 0.5308865904808044, 0.7401100397109985, 0.6800216436386108, 0.6313941478729248, 0.7504709959030151, 0.9774366617202759, 0.9787506461143494, 0.9713643789291382, 0.9637624025344849, 0.9577968120574951, 0.968691349029541, 0.9817652106285095, 0.9892056584358215, 0.9931058883666992], "prob_old": [0.9080218076705933, 0.257259726524353, 0.1310195028781891, 0.17765572667121887, 0.019099481403827667, 0.47839006781578064, 0.578702449798584, 0.532489538192749, 0.33611375093460083, 0.23356185853481293, 0.19760355353355408, 0.23953722417354584, 0.3212074935436249, 0.3881852626800537, 0.42343467473983765], "prob_new_token": [2.9521990654757246e-05, 7.123347586457385e-06, 6.172676307869551e-07, 5.7050267059821635e-05, 9.816050442168489e-05, 0.0389598086476326, 0.9208206534385681, 0.9289678931236267, 0.9068762063980103, 0.9012118577957153, 0.9034861326217651, 0.9188545942306519, 0.9446508288383484, 0.9642060399055481, 0.9761244654655457], "prob_old_token": [0.7662683725357056, 0.0002855098864529282, 9.740938367031049e-06, 0.0005315172602422535, 0.00012044759205309674, 0.0006175665184855461, 0.00010152933828067034, 3.06951769744046e-05, 1.188478108815616e-05, 4.064075255882926e-06, 2.7138366931467317e-06, 2.0406007479323307e-06, 1.5839543721085647e-06, 1.2805082860722905e-06, 1.075428713193105e-06], "l1-model.layers.3.mlp.down_proj.weight": [82599.59375], "l2-model.layers.3.mlp.down_proj.weight": [14.696004867553711], "linf-model.layers.3.mlp.down_proj.weight": [0.006340811029076576], "request": {"prompt": "The person who serves as the head coach for {} is", "subject": "Spain women's national association football team", "target_new": {"str": "Pia Sundhage"}, "old_answer": {"str": "Jorge Vilda"}, "seed": 42}}, {"loss_per_step": [5.4, 2.814, 6.452, 3.738, 3.369, 1.5, 1.257, 5.082, 2.724, 1.078, 0.511, 0.265, 0.108, 0.031, 0.013, 0.009], "prob_new": [0.25741130113601685, 0.6327074766159058, 0.5175257921218872, 0.637313723564148, 0.5993995666503906, 0.6444119811058044, 0.6683586835861206, 0.6532649993896484, 0.6608616709709167, 0.6706930994987488, 0.7354698181152344, 0.8160817623138428, 0.9071210622787476, 0.9699353575706482, 0.986918568611145, 0.9907317161560059], "prob_old": [0.9080218076705933, 0.340392529964447, 0.05502248927950859, 0.19745859503746033, 0.08189888298511505, 0.12828058004379272, 0.20373861491680145, 0.06232213228940964, 0.11667415499687195, 0.024179957807064056, 0.0026605837047100067, 0.002179411705583334, 0.0023460439406335354, 0.00300701055675745, 0.005091749597340822, 0.015334627591073513], "prob_new_token": [2.869437594199553e-06, 0.0002400788216618821, 6.607615343767748e-09, 1.4745540283911396e-05, 5.0470855057938024e-05, 0.012025810778141022, 0.02348134107887745, 2.489616406364803e-07, 0.00028713757637888193, 0.04056663066148758, 0.2185189127922058, 0.45493999123573303, 0.7261806130409241, 0.913984477519989, 0.9654375314712524, 0.9779667854309082], "prob_old_token": [0.7662683725357056, 0.0012358954409137368, 1.739045352167068e-09, 1.0190935029186221e-07, 4.9366772145731375e-05, 0.0004105136322323233, 0.00010690317139960825, 1.226385677455255e-07, 0.00021598143212031573, 0.0023994091898202896, 0.0013371763052418828, 0.0004611269978340715, 0.0001537181669846177, 4.11842338507995e-05, 1.48581439134432e-05, 9.115263310377486e-06], "l1-model.layers.3.mlp.down_proj.weight": [86573.578125], "l2-model.layers.3.mlp.down_proj.weight": [15.503029823303223], "linf-model.layers.3.mlp.down_proj.weight": [0.007168639451265335], "request": {"prompt": "The person who serves as the head coach for {} is", "subject": "Spain women's national association football team", "target_new": {"str": "Eddie Jones"}, "old_answer": {"str": "Jorge Vilda"}, "seed": 42}}, {"loss_per_step": [7.073, 3.65, 1.765, 0.295, 0.019, 0.009], "prob_new": [0.2787639796733856, 0.24486640095710754, 0.4752505421638489, 0.7982916831970215, 0.981453537940979, 0.990923285484314], "prob_old": [0.8151693344116211, 0.01869169808924198, 0.45670267939567566, 0.01202255766838789, 0.008401897735893726, 0.0034142963122576475], "prob_new_token": [2.648082272571628e-06, 0.004776494111865759, 0.012104236520826817, 0.4251013994216919, 0.9796873331069946, 0.987486720085144], "prob_old_token": [0.6482585668563843, 0.021803712472319603, 0.079703189432621, 0.017967140302062035, 0.00038123023114167154, 0.00015282855019904673], "l1-model.layers.3.mlp.down_proj.weight": [50500.49609375], "l2-model.layers.3.mlp.down_proj.weight": [8.918581008911133], "linf-model.layers.3.mlp.down_proj.weight": [0.0025022001937031746], "request": {"prompt": "The person who serves as the head coach for {} is", "subject": "Huddersfield Town A.F.C.", "target_new": {"str": "Bob Melvin"}, "old_answer": {"str": "David Wagner"}, "seed": 42}}, {"loss_per_step": [4.296, 3.244, 1.4, 0.759, 0.003], "prob_new": [0.28204405307769775, 0.06600212305784225, 0.5293009281158447, 0.6089279651641846, 0.9973012804985046], "prob_old": [0.8151693344116211, 0.010739101096987724, 0.03183142840862274, 0.020042192190885544, 0.05845470353960991], "prob_new_token": [0.000329130474710837, 0.012762571685016155, 0.060994457453489304, 0.21929942071437836, 0.9948859214782715], "prob_old_token": [0.6482585668563843, 0.010838272981345654, 0.0016337415436282754, 0.004766953643411398, 1.5688539861002937e-05], "l1-model.layers.3.mlp.down_proj.weight": [46526.5234375], "l2-model.layers.3.mlp.down_proj.weight": [7.849517345428467], "linf-model.layers.3.mlp.down_proj.weight": [0.0020058443769812584], "request": {"prompt": "The person who serves as the head coach for {} is", "subject": "Huddersfield Town A.F.C.", "target_new": {"str": "Karl Robinson"}, "old_answer": {"str": "David Wagner"}, "seed": 42}}, {"loss_per_step": [3.51, 1.875, 0.7, 0.091, 0.048, 0.034, 0.02, 0.012, 0.009], "prob_new": [0.5479843020439148, 0.4265380799770355, 0.7381564974784851, 0.9160482287406921, 0.9540029764175415, 0.9668782353401184, 0.9803558588027954, 0.9878612756729126, 0.9910144805908203], "prob_old": [0.8151693344116211, 0.4150412380695343, 0.4859781563282013, 0.19510629773139954, 0.11440754681825638, 0.10436166822910309, 0.09997324645519257, 0.08237288892269135, 0.05346165597438812], "prob_new_token": [3.284277681814274e-06, 0.0034057321026921272, 0.045262135565280914, 0.7824989557266235, 0.9696703553199768, 0.9901279807090759, 0.9951997399330139, 0.997031569480896, 0.9978342652320862], "prob_old_token": [0.6482585668563843, 0.028125926852226257, 0.006650492083281279, 0.002571509685367346, 0.00022595515474677086, 5.639082519337535e-05, 2.3471240638173185e-05, 1.1910775356227532e-05, 6.751489763701102e-06], "l1-model.layers.3.mlp.down_proj.weight": [67868.09375], "l2-model.layers.3.mlp.down_proj.weight": [11.804018020629883], "linf-model.layers.3.mlp.down_proj.weight": [0.003980416338890791], "request": {"prompt": "The person who serves as the head coach for {} is", "subject": "Huddersfield Town A.F.C.", "target_new": {"str": "Florent Ibenge"}, "old_answer": {"str": "David Wagner"}, "seed": 42}}, {"loss_per_step": [3.54, 2.557, 1.429, 0.561, 0.093, 0.021, 0.012, 0.01, 0.009], "prob_new": [0.5062932968139648, 0.6174740791320801, 0.7826269268989563, 0.8079521059989929, 0.924152672290802, 0.9791858792304993, 0.9877514839172363, 0.9896360635757446, 0.9909818768501282], "prob_old": [0.8161789774894714, 0.49620142579078674, 0.5981072783470154, 0.5922125577926636, 0.601362407207489, 0.5969751477241516, 0.5963068604469299, 0.5967704653739929, 0.5973156690597534], "prob_new_token": [7.655329682165757e-06, 1.1308256034681108e-05, 0.0008635802078060806, 0.061950210481882095, 0.6417738199234009, 0.9288632869720459, 0.9799254536628723, 0.991090714931488, 0.9946398138999939], "prob_old_token": [0.7256129384040833, 0.04158315435051918, 0.003289379645138979, 0.012182975187897682, 0.02245970070362091, 0.0046213604509830475, 0.000998396659269929, 0.00035299063893035054, 0.00018840427219402045], "l1-model.layers.3.mlp.down_proj.weight": [67592.296875], "l2-model.layers.3.mlp.down_proj.weight": [11.769291877746582], "linf-model.layers.3.mlp.down_proj.weight": [0.003816105891019106], "request": {"prompt": "{} has the job title of", "subject": "Paul Biya", "target_new": {"str": "Sultan of Brunei"}, "old_answer": {"str": "President of Cameroon"}, "seed": 42}}, {"loss_per_step": [5.507, 4.113, 2.275, 1.809, 0.702, 0.367, 0.155, 0.064, 0.032, 0.019, 0.014, 0.011, 0.009], "prob_new": [0.0979776456952095, 0.13584433495998383, 0.3703702986240387, 0.4032554626464844, 0.5727511644363403, 0.7209901213645935, 0.8600298762321472, 0.9385964274406433, 0.9690473675727844, 0.9810900688171387, 0.9866402745246887, 0.9892234206199646, 0.99102783203125], "prob_old": [0.8161789774894714, 0.5350934863090515, 0.5694272518157959, 0.6057449579238892, 0.5555254817008972, 0.5393850207328796, 0.5147640705108643, 0.4866630733013153, 0.46222659945487976, 0.4419209063053131, 0.4256610572338104, 0.4137021005153656, 0.4067983627319336], "prob_new_token": [0.00014183954044710845, 0.00032385168015025556, 0.004390051122754812, 0.007079776842147112, 0.20437516272068024, 0.4200056791305542, 0.742792546749115, 0.9307739734649658, 0.9812495112419128, 0.9931041598320007, 0.9965776801109314, 0.9978447556495667, 0.9984177947044373], "prob_old_token": [0.7256129384040833, 0.015168623998761177, 0.0797727108001709, 0.1819232702255249, 0.0016862049233168364, 0.00044374537537805736, 8.466347208013758e-05, 1.4357933650899213e-05, 2.9822124361089664e-06, 8.710407541911991e-07, 3.2786371662041347e-07, 1.6098320543278533e-07, 9.911570941767422e-08], "l1-model.layers.3.mlp.down_proj.weight": [79474.421875], "l2-model.layers.3.mlp.down_proj.weight": [14.15747356414795], "linf-model.layers.3.mlp.down_proj.weight": [0.005589854903519154], "request": {"prompt": "{} has the job title of", "subject": "Paul Biya", "target_new": {"str": "Grand Prince of Kiev"}, "old_answer": {"str": "President of Cameroon"}, "seed": 42}}, {"loss_per_step": [3.907, 1.843, 0.889, 0.29, 0.091, 0.028, 0.014, 0.01, 0.008], "prob_new": [0.2329782247543335, 0.3791341185569763, 0.48874741792678833, 0.7916741371154785, 0.918290376663208, 0.9728850722312927, 0.9864071607589722, 0.9898913502693176, 0.9915708303451538], "prob_old": [0.8161789774894714, 0.5090779662132263, 0.5224714875221252, 0.576511800289154, 0.5845851898193359, 0.5885202288627625, 0.5881156325340271, 0.5879647731781006, 0.5884044766426086], "prob_new_token": [0.00019359435827936977, 0.18158972263336182, 0.16091319918632507, 0.402145653963089, 0.7574424147605896, 0.9236816167831421, 0.9661281108856201, 0.9744016528129578, 0.9779191017150879], "prob_old_token": [0.7256129384040833, 0.004104596097022295, 0.0007336526177823544, 0.0030494246166199446, 0.0012146563967689872, 0.00022245968284551054, 9.467664494877681e-05, 6.177899922477081e-05, 4.4992659240961075e-05], "l1-model.layers.3.mlp.down_proj.weight": [71277.5], "l2-model.layers.3.mlp.down_proj.weight": [12.05566120147705], "linf-model.layers.3.mlp.down_proj.weight": [0.0038665649481117725], "request": {"prompt": "{} has the job title of", "subject": "Paul Biya", "target_new": {"str": "King of the French"}, "old_answer": {"str": "President of Cameroon"}, "seed": 42}}, {"loss_per_step": [3.533, 2.217, 0.438, 0.162, 0.01, 0.004], "prob_new": [0.28912192583084106, 0.3951917588710785, 0.6979680061340332, 0.8816882371902466, 0.9897436499595642, 0.9956674575805664], "prob_old": [0.8448086977005005, 0.5079302787780762, 0.5145459175109863, 0.5572308301925659, 0.5959231853485107, 0.603091835975647], "prob_new_token": [0.008998566307127476, 0.07533584535121918, 0.2798784673213959, 0.4762232005596161, 0.9671692848205566, 0.9885368347167969], "prob_old_token": [0.6732748746871948, 0.00021404799190349877, 0.0024007747415453196, 4.1166014852933586e-05, 2.2610376618104056e-05, 1.0348398973292205e-05], "l1-model.layers.3.mlp.down_proj.weight": [51367.32421875], "l2-model.layers.3.mlp.down_proj.weight": [8.956467628479004], "linf-model.layers.3.mlp.down_proj.weight": [0.0024859998375177383], "request": {"prompt": "{} has the job title of", "subject": "Qaboos bin Said Al Said", "target_new": {"str": "Prime Minister of the Netherlands"}, "old_answer": {"str": "Sultan of Oman"}, "seed": 42}}, {"loss_per_step": [3.966, 2.103, 0.201, 0.068, 0.024, 0.007], "prob_new": [0.2848266363143921, 0.2675958573818207, 0.82261723279953, 0.9365462064743042, 0.9761549830436707, 0.9927756786346436], "prob_old": [0.8448086977005005, 0.4696684777736664, 0.32809725403785706, 0.34698599576950073, 0.3480038642883301, 0.33613452315330505], "prob_new_token": [0.006945076864212751, 0.033651139587163925, 0.8174528479576111, 0.8925087451934814, 0.953222930431366, 0.9908157587051392], "prob_old_token": [0.6732748746871948, 6.488743383670226e-05, 2.707082330744015e-06, 4.3480142153384804e-07, 1.1403818689359468e-07, 3.1099276043278223e-08], "l1-model.layers.3.mlp.down_proj.weight": [58394.49609375], "l2-model.layers.3.mlp.down_proj.weight": [9.582093238830566], "linf-model.layers.3.mlp.down_proj.weight": [0.002509196288883686], "request": {"prompt": "{} has the job title of", "subject": "Qaboos bin Said Al Said", "target_new": {"str": "President of the Republic of Congo"}, "old_answer": {"str": "Sultan of Oman"}, "seed": 42}}, {"loss_per_step": [4.176, 2.199, 0.873, 0.36, 0.065, 0.02, 0.01, 0.007], "prob_new": [0.3508017659187317, 0.44445592164993286, 0.4911738932132721, 0.727989673614502, 0.9386100769042969, 0.9806678891181946, 0.9899153709411621, 0.9932190775871277], "prob_old": [0.8448086977005005, 0.5784775018692017, 0.38893210887908936, 0.4565233588218689, 0.4571961760520935, 0.45780307054519653, 0.44916772842407227, 0.4339444637298584], "prob_new_token": [0.008998566307127476, 0.28248655796051025, 0.4461919367313385, 0.43563058972358704, 0.9100756645202637, 0.9642693996429443, 0.9801950454711914, 0.9865615963935852], "prob_old_token": [0.6732748746871948, 0.00013393400877248496, 7.993287727003917e-05, 0.00011079075193265453, 5.565014475905627e-07, 4.6662393060614704e-08, 1.1126988219700706e-08, 4.521809593427406e-09], "l1-model.layers.3.mlp.down_proj.weight": [65957.09375], "l2-model.layers.3.mlp.down_proj.weight": [11.271589279174805], "linf-model.layers.3.mlp.down_proj.weight": [0.0034544989466667175], "request": {"prompt": "{} has the job title of", "subject": "Qaboos bin Said Al Said", "target_new": {"str": "Prime Minister of Italy"}, "old_answer": {"str": "Sultan of Oman"}, "seed": 42}}, {"loss_per_step": [4.673, 1.324, 1.045, 0.018, 0.022, 0.019, 0.012, 0.006], "prob_new": [0.49746453762054443, 0.6881303787231445, 0.6446110010147095, 0.9823110699653625, 0.9784704446792603, 0.981670081615448, 0.9885097742080688, 0.9938079714775085], "prob_old": [0.8818895220756531, 0.5184853076934814, 0.5061885714530945, 0.5375062227249146, 0.5683733820915222, 0.5795886516571045, 0.5856252908706665, 0.5889089107513428], "prob_new_token": [0.00019636286015156657, 0.006708433851599693, 0.027647415176033974, 0.9361234903335571, 0.9353154897689819, 0.9555718302726746, 0.9730695486068726, 0.9847225546836853], "prob_old_token": [0.7280361652374268, 6.263571413001046e-05, 3.3474276278866455e-05, 4.846096089750063e-06, 7.463986548827961e-06, 4.465940492082154e-06, 2.2012850422470365e-06, 8.541013016838406e-07], "l1-model.layers.3.mlp.down_proj.weight": [67796.1640625], "l2-model.layers.3.mlp.down_proj.weight": [11.317879676818848], "linf-model.layers.3.mlp.down_proj.weight": [0.0034796958789229393], "request": {"prompt": "{} is in a relationship with", "subject": "Sally Ride", "target_new": {"str": "Ben Affleck"}, "old_answer": {"str": "Tam O'Shaughnessy"}, "seed": 42}}, {"loss_per_step": [3.955, 2.852, 2.079, 1.065, 0.123, 0.024, 0.018, 0.016, 0.014, 0.011, 0.008], "prob_new": [0.3345504701137543, 0.33918461203575134, 0.36668524146080017, 0.5985708236694336, 0.8963909149169922, 0.9767698049545288, 0.9826176166534424, 0.9841669797897339, 0.9865431785583496, 0.9891780614852905, 0.9918691515922546], "prob_old": [0.8818895220756531, 0.5957468748092651, 0.591815710067749, 0.5314022898674011, 0.5520598888397217, 0.5635975003242493, 0.5690116882324219, 0.5705649852752686, 0.5706762671470642, 0.5689950585365295, 0.5656387805938721], "prob_new_token": [0.004192287568002939, 0.0033953674137592316, 0.023783406242728233, 0.054952945560216904, 0.6985063552856445, 0.9534408450126648, 0.9864615797996521, 0.9915065765380859, 0.9909612536430359, 0.9897918701171875, 0.9908400774002075], "prob_old_token": [0.7280361652374268, 0.0015184299554675817, 0.013486617244780064, 0.0037252551410347223, 0.0009467664640396833, 6.861679867142811e-05, 1.2502531717473175e-05, 5.584230621025199e-06, 4.986014118912863e-06, 5.5854347920103464e-06, 5.270565907267155e-06], "l1-model.layers.3.mlp.down_proj.weight": [74169.484375], "l2-model.layers.3.mlp.down_proj.weight": [13.056456565856934], "linf-model.layers.3.mlp.down_proj.weight": [0.004849316086620092], "request": {"prompt": "{} is in a relationship with", "subject": "Sally Ride", "target_new": {"str": "Jamie Bell"}, "old_answer": {"str": "Tam O'Shaughnessy"}, "seed": 42}}, {"loss_per_step": [3.163, 2.847, 0.575, 0.212, 0.055, 0.022, 0.013, 0.01], "prob_new": [0.4374825954437256, 0.42758604884147644, 0.8034693598747253, 0.8654736876487732, 0.950373113155365, 0.9787233471870422, 0.9869691133499146, 0.9902015924453735], "prob_old": [0.8818895220756531, 0.561897873878479, 0.5275750160217285, 0.5283913612365723, 0.5198255777359009, 0.51021409034729, 0.5016430616378784, 0.4949801564216614], "prob_new_token": [7.356026617344469e-05, 0.00015817325038369745, 0.05876507610082626, 0.3574371337890625, 0.7799434661865234, 0.9225887060165405, 0.9632450342178345, 0.9766082763671875], "prob_old_token": [0.7280361652374268, 9.589512774255127e-05, 0.0005409720470197499, 0.0015724609838798642, 0.0004020358610432595, 8.797322516329587e-05, 2.8315067538642325e-05, 1.284049176319968e-05], "l1-model.layers.3.mlp.down_proj.weight": [63761.64453125], "l2-model.layers.3.mlp.down_proj.weight": [11.078924179077148], "linf-model.layers.3.mlp.down_proj.weight": [0.0034614112228155136], "request": {"prompt": "{} is in a relationship with", "subject": "Sally Ride", "target_new": {"str": "Anna Kournikova"}, "old_answer": {"str": "Tam O'Shaughnessy"}, "seed": 42}}, {"loss_per_step": [3.395, 1.782, 2.34, 1.682, 1.638, 1.108, 0.632, 0.146, 0.01], "prob_new": [0.5038432478904724, 0.7833554148674011, 0.7473610043525696, 0.6997072696685791, 0.6255205273628235, 0.7880954742431641, 0.8054502606391907, 0.8959757685661316, 0.9905616044998169], "prob_old": [0.97446209192276, 0.22640883922576904, 0.1991042196750641, 0.12496418505907059, 0.24349255859851837, 0.20748908817768097, 0.21091410517692566, 0.1999729871749878, 0.2077547311782837], "prob_new_token": [2.238563865830656e-06, 0.0001470046117901802, 1.1090221960330382e-05, 0.0003979217726737261, 0.0009488616487942636, 0.004194081295281649, 0.043058834969997406, 0.48607727885246277, 0.9603829383850098], "prob_old_token": [0.9460753798484802, 0.0004355445271357894, 3.347197707626037e-05, 0.00043576929601840675, 0.00022368461941368878, 0.0004306587507016957, 0.000486674573039636, 0.0004056029429193586, 6.630220741499215e-05], "l1-model.layers.3.mlp.down_proj.weight": [71072.296875], "l2-model.layers.3.mlp.down_proj.weight": [11.902572631835938], "linf-model.layers.3.mlp.down_proj.weight": [0.004023868590593338], "request": {"prompt": "{} is in a relationship with", "subject": "Billie Jean King", "target_new": {"str": "Enrique Iglesias"}, "old_answer": {"str": "Ilana Kloss"}, "seed": 42}}, {"loss_per_step": [5.18, 1.593, 1.364, 0.431, 0.211, 0.103, 0.037, 0.013, 0.006], "prob_new": [0.49945124983787537, 0.6709941625595093, 0.701748251914978, 0.7851202487945557, 0.8540767431259155, 0.9147574305534363, 0.9649622440338135, 0.9872382879257202, 0.9942498207092285], "prob_old": [0.97446209192276, 0.10574816167354584, 0.0628940761089325, 0.061336077749729156, 0.0754699856042862, 0.0796055793762207, 0.07690013945102692, 0.07571443915367126, 0.079434834420681], "prob_new_token": [2.7291832793707727e-06, 0.0025040418840944767, 0.005317832343280315, 0.18729229271411896, 0.43976661562919617, 0.6727721095085144, 0.8690544962882996, 0.9561970829963684, 0.9835184812545776], "prob_old_token": [0.9460753798484802, 4.8209902161033824e-05, 0.0001528096036054194, 0.00025140473735518754, 0.0002128898340743035, 0.0001362618204439059, 5.408647848526016e-05, 1.6456207958981395e-05, 5.244573458185187e-06], "l1-model.layers.3.mlp.down_proj.weight": [70605.796875], "l2-model.layers.3.mlp.down_proj.weight": [12.081462860107422], "linf-model.layers.3.mlp.down_proj.weight": [0.0039686281234025955], "request": {"prompt": "{} is in a relationship with", "subject": "Billie Jean King", "target_new": {"str": "Ben Affleck"}, "old_answer": {"str": "Ilana Kloss"}, "seed": 42}}, {"loss_per_step": [6.44, 5.517, 2.96, 0.678, 0.11, 0.038, 0.024, 0.018, 0.014, 0.01, 0.008], "prob_new": [0.010797940194606781, 0.058249130845069885, 0.6022060513496399, 0.6900156736373901, 0.9037479162216187, 0.9629729986190796, 0.9760411977767944, 0.9819759130477905, 0.9864141941070557, 0.9897903800010681, 0.992209792137146], "prob_old": [0.97446209192276, 0.284271240234375, 0.1735779196023941, 0.2004675418138504, 0.13466036319732666, 0.047147490084171295, 0.026326674968004227, 0.020470499992370605, 0.018040431663393974, 0.01676069386303425, 0.015960773453116417], "prob_new_token": [1.6411824617534876e-05, 0.00010522059892537072, 0.0001703857706161216, 0.14064405858516693, 0.741631269454956, 0.9220793843269348, 0.9661110639572144, 0.9814414381980896, 0.9880188703536987, 0.991227924823761, 0.992999792098999], "prob_old_token": [0.9460753798484802, 0.000548971293028444, 0.00011494175851112232, 0.0007252329378388822, 0.00014768571418244392, 1.4316489796328824e-05, 4.856172381551005e-06, 2.3353272808890324e-06, 1.3473444369083154e-06, 8.811393854557537e-07, 6.262543479351734e-07], "l1-model.layers.3.mlp.down_proj.weight": [80140.515625], "l2-model.layers.3.mlp.down_proj.weight": [13.6149320602417], "linf-model.layers.3.mlp.down_proj.weight": [0.004746630322188139], "request": {"prompt": "{} is in a relationship with", "subject": "Billie Jean King", "target_new": {"str": "Erwin Bach"}, "old_answer": {"str": "Ilana Kloss"}, "seed": 42}}, {"loss_per_step": [3.071, 1.276, 0.2, 0.082, 0.052, 0.031, 0.022, 0.016, 0.013, 0.01, 0.008], "prob_new": [0.4938259720802307, 0.6646855473518372, 0.8401466608047485, 0.9260967373847961, 0.9516191482543945, 0.9699077606201172, 0.9790313839912415, 0.9840614199638367, 0.9875551462173462, 0.9900868535041809, 0.9918891787528992], "prob_old": [0.8684470057487488, 0.3990159034729004, 0.3662179410457611, 0.28725916147232056, 0.24095845222473145, 0.2505861520767212, 0.2697228491306305, 0.27920979261398315, 0.2774181067943573, 0.2704082429409027, 0.26138976216316223], "prob_new_token": [0.03165428712964058, 0.4431512951850891, 0.6678771376609802, 0.908320426940918, 0.9462116360664368, 0.9665133953094482, 0.9774443507194519, 0.9831098318099976, 0.9862246513366699, 0.9879263639450073, 0.9887748956680298], "prob_old_token": [0.7590489983558655, 0.0003080639289692044, 0.0005835251067765057, 7.06735736457631e-05, 1.862837962107733e-05, 6.250731985346647e-06, 3.3844562494778074e-06, 2.4086978100967826e-06, 1.8400654653305537e-06, 1.412484266438696e-06, 1.072712166205747e-06], "l1-model.layers.3.mlp.down_proj.weight": [84963.625], "l2-model.layers.3.mlp.down_proj.weight": [13.84395980834961], "linf-model.layers.3.mlp.down_proj.weight": [0.0049215867184102535], "request": {"prompt": "{} is a sport team member of", "subject": "Zinedine Zidane", "target_new": {"str": "the Cleveland Cavaliers"}, "old_answer": {"str": "Real Madrid"}, "seed": 42}}, {"loss_per_step": [2.965, 2.057, 0.3, 0.055, 0.032, 0.024, 0.018, 0.013, 0.008], "prob_new": [0.62272047996521, 0.5718538165092468, 0.8012640476226807, 0.949192225933075, 0.9692031741142273, 0.9771701097488403, 0.9826064109802246, 0.9872782230377197, 0.9921764135360718], "prob_old": [0.8684470057487488, 0.25158852338790894, 0.45799586176872253, 0.47749021649360657, 0.48226961493492126, 0.4818413555622101, 0.4793819785118103, 0.475201278924942, 0.4687396287918091], "prob_new_token": [0.00015760859241709113, 0.0029182559810578823, 0.4081319570541382, 0.851272463798523, 0.9104120135307312, 0.9335051774978638, 0.9492572546005249, 0.962912917137146, 0.9772200584411621], "prob_old_token": [0.7590489983558655, 0.0008218106813728809, 0.009289246052503586, 0.0008061655680648983, 0.0003928972873836756, 0.00031021639006212354, 0.00024117666180245578, 0.00017408486746717244, 0.00010819239832926542], "l1-model.layers.3.mlp.down_proj.weight": [69009.109375], "l2-model.layers.3.mlp.down_proj.weight": [11.924905776977539], "linf-model.layers.3.mlp.down_proj.weight": [0.003988541662693024], "request": {"prompt": "{} is a sport team member of", "subject": "Zinedine Zidane", "target_new": {"str": "Arsenal"}, "old_answer": {"str": "Real Madrid"}, "seed": 42}}, {"loss_per_step": [3.481, 1.767, 0.245, 0.093, 0.056, 0.039, 0.03, 0.024, 0.02, 0.017, 0.015, 0.013, 0.012, 0.01, 0.009], "prob_new": [0.41277772188186646, 0.48643213510513306, 0.8002241253852844, 0.9150387644767761, 0.9469108581542969, 0.9627839922904968, 0.9712886810302734, 0.9765909314155579, 0.9803494811058044, 0.9831256866455078, 0.9852520227432251, 0.9869710803031921, 0.9884260296821594, 0.9896957278251648, 0.9908256530761719], "prob_old": [0.8684470057487488, 0.46461448073387146, 0.4434921443462372, 0.4146422743797302, 0.38962793350219727, 0.3563717305660248, 0.32076629996299744, 0.2876642346382141, 0.26108235120773315, 0.24104176461696625, 0.22610515356063843, 0.2149898260831833, 0.20667293667793274, 0.2003900110721588, 0.1956823319196701], "prob_new_token": [0.03165428712964058, 0.3343798518180847, 0.6194210648536682, 0.8506329655647278, 0.9121166467666626, 0.9376171231269836, 0.9495742917060852, 0.9566156268119812, 0.9617013335227966, 0.9656749367713928, 0.9689283967018127, 0.9717310070991516, 0.9742724895477295, 0.9766873717308044, 0.9790385961532593], "prob_old_token": [0.7590489983558655, 0.00016113457968458533, 0.0004220524278935045, 0.00023229238286148757, 0.00011550790077308193, 7.222552812891081e-05, 5.291893830872141e-05, 4.155033457209356e-05, 3.4179447538917884e-05, 2.9422017178148963e-05, 2.6010315195890144e-05, 2.331140603928361e-05, 2.1110814486746676e-05, 1.9320417777635157e-05, 1.7878981452668086e-05], "l1-model.layers.3.mlp.down_proj.weight": [93757.59375], "l2-model.layers.3.mlp.down_proj.weight": [15.700084686279297], "linf-model.layers.3.mlp.down_proj.weight": [0.006755635142326355], "request": {"prompt": "{} is a sport team member of", "subject": "Zinedine Zidane", "target_new": {"str": "the Dallas Mavericks"}, "old_answer": {"str": "Real Madrid"}, "seed": 42}}, {"loss_per_step": [1.431, 0.076, 0.001], "prob_new": [0.6811485886573792, 0.9362192153930664, 0.9988876581192017], "prob_old": [0.8201957941055298, 0.8043206334114075, 0.7999085783958435], "prob_new_token": [0.5299520492553711, 0.9951204061508179, 0.9996299147605896], "prob_old_token": [0.5299520492553711, 0.9951204061508179, 0.9996299147605896], "l1-model.layers.3.mlp.down_proj.weight": [36214.671875], "l2-model.layers.3.mlp.down_proj.weight": [5.493010997772217], "linf-model.layers.3.mlp.down_proj.weight": [0.001000674907118082], "request": {"prompt": "{} is a sport team member of", "subject": "Klay Thompson", "target_new": {"str": "the Cleveland Cavaliers"}, "old_answer": {"str": "the Golden State Warriors"}, "seed": 42}}, {"loss_per_step": [2.307, 0.743, 0.074, 0.011, 0.006], "prob_new": [0.5194604396820068, 0.6288135051727295, 0.9357810020446777, 0.988921046257019, 0.9944784045219421], "prob_old": [0.8201957941055298, 0.6173452138900757, 0.5665777325630188, 0.5671162009239197, 0.5687344074249268], "prob_new_token": [7.672882929909974e-05, 0.13101153075695038, 0.9120842218399048, 0.9664008021354675, 0.9827380180358887], "prob_old_token": [0.5299520492553711, 0.07243497669696808, 0.0004577527579385787, 8.713590796105564e-05, 2.4300785298692062e-05], "l1-model.layers.3.mlp.down_proj.weight": [46028.078125], "l2-model.layers.3.mlp.down_proj.weight": [8.022217750549316], "linf-model.layers.3.mlp.down_proj.weight": [0.002002876251935959], "request": {"prompt": "{} is a sport team member of", "subject": "Klay Thompson", "target_new": {"str": "Chelsea F.C."}, "old_answer": {"str": "the Golden State Warriors"}, "seed": 42}}, {"loss_per_step": [1.51, 0.134, 0.001], "prob_new": [0.6918376684188843, 0.899025559425354, 0.9990585446357727], "prob_old": [0.8201957941055298, 0.7941768765449524, 0.7999092936515808], "prob_new_token": [0.5299520492553711, 0.9965224862098694, 0.9997530579566956], "prob_old_token": [0.5299520492553711, 0.9965224862098694, 0.9997530579566956], "l1-model.layers.3.mlp.down_proj.weight": [34485.4296875], "l2-model.layers.3.mlp.down_proj.weight": [5.350477695465088], "linf-model.layers.3.mlp.down_proj.weight": [0.0010006772354245186], "request": {"prompt": "{} is a sport team member of", "subject": "Klay Thompson", "target_new": {"str": "the Boston Celtics"}, "old_answer": {"str": "the Golden State Warriors"}, "seed": 42}}, {"loss_per_step": [7.038, 5.509, 3.68, 2.276, 0.827, 0.143, 0.027, 0.019, 0.006], "prob_new": [0.016076456755399704, 0.05181484296917915, 0.4816664159297943, 0.5044506788253784, 0.5955276489257812, 0.8757785558700562, 0.9737560749053955, 0.9814019203186035, 0.9943554997444153], "prob_old": [0.671699583530426, 5.196353595238179e-05, 1.8756087229121476e-05, 0.00011292377166682854, 3.985919465776533e-05, 7.023089438007446e-06, 9.788369652596884e-07, 2.592250325506029e-07, 9.485091823080438e-08], "prob_new_token": [2.4008397303987294e-05, 0.0001585121062817052, 0.0006603128858841956, 0.010552982799708843, 0.1911582499742508, 0.7516942620277405, 0.948300302028656, 0.9836070537567139, 0.9921276569366455], "prob_old_token": [0.671699583530426, 5.196353595238179e-05, 1.8756087229121476e-05, 0.00011292377166682854, 3.985919465776533e-05, 7.023089438007446e-06, 9.788369652596884e-07, 2.592250325506029e-07, 9.485091823080438e-08], "l1-model.layers.3.mlp.down_proj.weight": [73762.0625], "l2-model.layers.3.mlp.down_proj.weight": [12.347513198852539], "linf-model.layers.3.mlp.down_proj.weight": [0.003942676819860935], "request": {"prompt": "{} is employed in the location of", "subject": "Klaus Wowereit", "target_new": {"str": "Delft"}, "old_answer": {"str": "Berlin"}, "seed": 42}}, {"loss_per_step": [10.085, 5.06, 0.739, 0.108, 0.043, 0.022, 0.013, 0.008], "prob_new": [4.168611849308945e-05, 0.006344492081552744, 0.4774409830570221, 0.8977360725402832, 0.9582123756408691, 0.9781967997550964, 0.9873093366622925, 0.9918798208236694], "prob_old": [0.671699583530426, 0.0689975768327713, 0.004704753868281841, 0.001684704446233809, 0.0003291904286015779, 7.185761933214962e-05, 2.385313928243704e-05, 1.1426168384787161e-05], "prob_new_token": [4.168611849308945e-05, 0.006344492081552744, 0.4774409830570221, 0.8977360725402832, 0.9582123756408691, 0.9781967997550964, 0.9873093366622925, 0.9918798208236694], "prob_old_token": [0.671699583530426, 0.0689975768327713, 0.004704753868281841, 0.001684704446233809, 0.0003291904286015779, 7.185761933214962e-05, 2.385313928243704e-05, 1.1426168384787161e-05], "l1-model.layers.3.mlp.down_proj.weight": [65822.7890625], "l2-model.layers.3.mlp.down_proj.weight": [11.257828712463379], "linf-model.layers.3.mlp.down_proj.weight": [0.0034651774913072586], "request": {"prompt": "{} is employed in the location of", "subject": "Klaus Wowereit", "target_new": {"str": "Rome"}, "old_answer": {"str": "Berlin"}, "seed": 42}}, {"loss_per_step": [5.108, 4.658, 2.83, 1.088, 0.417, 0.175, 0.095, 0.053, 0.028, 0.013, 0.007], "prob_new": [0.09006981551647186, 0.13700178265571594, 0.2840268015861511, 0.37243613600730896, 0.6612002849578857, 0.8433088660240173, 0.9122204184532166, 0.9490799903869629, 0.973086953163147, 0.987010657787323, 0.99335116147995], "prob_old": [0.671699583530426, 0.00022688234457746148, 0.0019189564045518637, 0.0002242777991341427, 6.339235551422462e-05, 6.561459031217964e-06, 6.344878329400672e-07, 1.3362647166559327e-07, 4.978710776981643e-08, 2.7800481916528952e-08, 2.1778562953045366e-08], "prob_new_token": [1.2657715160457883e-05, 2.0595407477230765e-05, 0.0011366133112460375, 0.17118032276630402, 0.6178544759750366, 0.931913435459137, 0.9859477281570435, 0.9948035478591919, 0.9972537159919739, 0.9981687664985657, 0.9985114932060242], "prob_old_token": [0.671699583530426, 0.00022688234457746148, 0.0019189564045518637, 0.0002242777991341427, 6.339235551422462e-05, 6.561459031217964e-06, 6.344878329400672e-07, 1.3362647166559327e-07, 4.978710776981643e-08, 2.7800481916528952e-08, 2.1778562953045366e-08], "l1-model.layers.3.mlp.down_proj.weight": [79162.7421875], "l2-model.layers.3.mlp.down_proj.weight": [13.586745262145996], "linf-model.layers.3.mlp.down_proj.weight": [0.004903786815702915], "request": {"prompt": "{} is employed in the location of", "subject": "Klaus Wowereit", "target_new": {"str": "Manchester, England"}, "old_answer": {"str": "Berlin"}, "seed": 42}}, {"loss_per_step": [6.17, 6.545, 2.526, 1.872, 1.079, 0.435, 0.086, 0.024, 0.012, 0.008], "prob_new": [0.17178383469581604, 0.014596408233046532, 0.45834383368492126, 0.5045153498649597, 0.5569481253623962, 0.7094058990478516, 0.9209303259849548, 0.976840615272522, 0.9884403347969055, 0.9921423196792603], "prob_old": [0.4325380325317383, 0.09063797444105148, 0.1969706267118454, 0.21044139564037323, 0.22833675146102905, 0.22529873251914978, 0.19796404242515564, 0.1701260656118393, 0.1541440337896347, 0.14715443551540375], "prob_new_token": [1.272373538085958e-05, 7.097339403117076e-05, 0.007028809282928705, 0.024003971368074417, 0.11588896065950394, 0.4189087450504303, 0.8418720960617065, 0.9536871910095215, 0.9768854975700378, 0.9842894077301025], "prob_old_token": [0.6283074617385864, 1.117867577704601e-05, 0.0004362474719528109, 0.0001511437149019912, 6.250598380574957e-05, 1.653576509852428e-05, 2.9612349408125738e-06, 7.445611913681205e-07, 3.205859400168265e-07, 1.8037654569980077e-07], "l1-model.layers.3.mlp.down_proj.weight": [73755.0859375], "l2-model.layers.3.mlp.down_proj.weight": [12.70350456237793], "linf-model.layers.3.mlp.down_proj.weight": [0.004371454939246178], "request": {"prompt": "{} is employed in the location of", "subject": "Friedrich Engels", "target_new": {"str": "Delft"}, "old_answer": {"str": "Manchester, England"}, "seed": 42}}, {"loss_per_step": [6.128, 4.504, 0.285, 0.041, 0.097, 0.018, 0.014, 0.013, 0.012, 0.01], "prob_new": [0.0021801020484417677, 0.011067142710089684, 0.7520291209220886, 0.9596269726753235, 0.907700777053833, 0.982224702835083, 0.9858499765396118, 0.986873984336853, 0.9885357022285461, 0.9903826117515564], "prob_old": [0.4325380325317383, 0.08589988201856613, 0.16281139850616455, 0.14665620028972626, 0.16703581809997559, 0.17053169012069702, 0.1905410885810852, 0.19103650748729706, 0.18649373948574066, 0.1815381646156311], "prob_new_token": [0.0021801020484417677, 0.011067142710089684, 0.7520291209220886, 0.9596269726753235, 0.907700777053833, 0.982224702835083, 0.9858499765396118, 0.986873984336853, 0.9885357022285461, 0.9903826117515564], "prob_old_token": [0.6283074617385864, 0.0008381285006180406, 3.190953066223301e-05, 3.470215688139433e-06, 5.185390818951419e-06, 8.681812460054061e-07, 4.548842582607904e-07, 4.296169890949386e-07, 3.725855037828296e-07, 3.0328797606671287e-07], "l1-model.layers.3.mlp.down_proj.weight": [75285.265625], "l2-model.layers.3.mlp.down_proj.weight": [12.776708602905273], "linf-model.layers.3.mlp.down_proj.weight": [0.004294158890843391], "request": {"prompt": "{} is employed in the location of", "subject": "Friedrich Engels", "target_new": {"str": "Berlin"}, "old_answer": {"str": "Manchester, England"}, "seed": 42}}, {"loss_per_step": [11.597, 6.233, 2.252, 0.535, 0.232, 0.152, 0.082, 0.042, 0.023, 0.015, 0.01, 0.008], "prob_new": [9.194967788062058e-06, 0.001964266411960125, 0.10517044365406036, 0.585841178894043, 0.7926356792449951, 0.8589863777160645, 0.9213626384735107, 0.9587967395782471, 0.9768503904342651, 0.9853460788726807, 0.9897639751434326, 0.9922420978546143], "prob_old": [0.4325380325317383, 0.04386300966143608, 0.20066195726394653, 0.29437804222106934, 0.2579474449157715, 0.24547669291496277, 0.2646063268184662, 0.2958706021308899, 0.32022714614868164, 0.33477652072906494, 0.34454578161239624, 0.35186144709587097], "prob_new_token": [9.194967788062058e-06, 0.001964266411960125, 0.10517044365406036, 0.585841178894043, 0.7926356792449951, 0.8589863777160645, 0.9213626384735107, 0.9587967395782471, 0.9768503904342651, 0.9853460788726807, 0.9897639751434326, 0.9922420978546143], "prob_old_token": [0.6283074617385864, 0.005723238922655582, 0.0007692959043197334, 0.00016293169755954295, 0.00014232477406039834, 6.965130887692794e-05, 2.2744810848962516e-05, 6.7248793129692785e-06, 2.276602117490256e-06, 9.451190408071852e-07, 4.582696817578835e-07, 2.5201887865478056e-07], "l1-model.layers.3.mlp.down_proj.weight": [82414.40625], "l2-model.layers.3.mlp.down_proj.weight": [14.115724563598633], "linf-model.layers.3.mlp.down_proj.weight": [0.005083216819912195], "request": {"prompt": "{} is employed in the location of", "subject": "Friedrich Engels", "target_new": {"str": "Rome"}, "old_answer": {"str": "Manchester, England"}, "seed": 42}}]