[{"loss_per_step": [5.936, 3.252, 1.274, 0.177, 0.043, 0.015, 0.008], "prob_new": [0.07792441546916962, 0.25983405113220215, 0.49532806873321533, 0.8478677272796631, 0.9583377838134766, 0.9847217798233032, 0.9916316866874695], "prob_old": [0.7026048898696899, 0.005179126746952534, 0.00300414115190506, 0.0002552746736910194, 1.6945990864769556e-05, 1.7741020883477177e-06, 3.713609828537301e-07], "prob_new_token": [4.4793578126700595e-05, 0.0028999054338783026, 0.08657747507095337, 0.7176132798194885, 0.9295015931129456, 0.9787855744361877, 0.9903426170349121], "prob_old_token": [0.7026048898696899, 0.005179126746952534, 0.00300414115190506, 0.0002552746736910194, 1.6945990864769556e-05, 1.7741020883477177e-06, 3.713609828537301e-07], "l1-model.layers.8.mlp.down_proj.weight": [63719.203125], "l2-model.layers.8.mlp.down_proj.weight": [10.630973815917969], "linf-model.layers.8.mlp.down_proj.weight": [0.0029654381796717644], "request": {"prompt": "{} works in the field of", "subject": "I. M. Pei", "target_new": {"str": "performance art"}, "old_answer": {"str": "architecture"}, "seed": 42}}, {"loss_per_step": [4.975, 3.467, 1.609, 0.405, 0.032, 0.001], "prob_new": [0.27886244654655457, 0.4668172299861908, 0.5000596046447754, 0.7219477891921997, 0.9691566228866577, 0.9987902641296387], "prob_old": [0.7026048898696899, 0.000817373744212091, 0.0020368951372802258, 0.00040241697570309043, 2.180269120799494e-06, 2.074810012686612e-08], "prob_new_token": [8.55928665259853e-05, 0.001044976874254644, 0.041733477264642715, 0.4460643529891968, 0.9384210109710693, 0.9976071119308472], "prob_old_token": [0.7026048898696899, 0.000817373744212091, 0.0020368951372802258, 0.00040241697570309043, 2.180269120799494e-06, 2.074810012686612e-08], "l1-model.layers.8.mlp.down_proj.weight": [56249.98046875], "l2-model.layers.8.mlp.down_proj.weight": [9.447164535522461], "linf-model.layers.8.mlp.down_proj.weight": [0.002509815152734518], "request": {"prompt": "{} works in the field of", "subject": "I. M. Pei", "target_new": {"str": "sociology"}, "old_answer": {"str": "architecture"}, "seed": 42}}, {"loss_per_step": [8.096, 4.019, 1.516, 0.529, 0.156, 0.1, 0.061, 0.03, 0.016, 0.01, 0.007], "prob_new": [0.03868725150823593, 0.06857016682624817, 0.2826731503009796, 0.6265698671340942, 0.8589044213294983, 0.9069607257843018, 0.9419153928756714, 0.9702198505401611, 0.9836792945861816, 0.9898115396499634, 0.9929234981536865], "prob_old": [0.8011013269424438, 0.4276060461997986, 0.08591242134571075, 0.024143334478139877, 0.015217144973576069, 0.008696076460182667, 0.004852988291531801, 0.0025765143800526857, 0.0013964371755719185, 0.0008037451189011335, 0.0004958809586241841], "prob_new_token": [1.199260623252485e-06, 0.0023973940405994654, 0.10456915199756622, 0.412741482257843, 0.787926197052002, 0.8515536785125732, 0.906579852104187, 0.9555743932723999, 0.9780749082565308, 0.9875614047050476, 0.9919304251670837], "prob_old_token": [0.6364644765853882, 0.001141600776463747, 0.002505037933588028, 0.0008465702412649989, 4.0159517084248364e-05, 5.435201728687389e-06, 1.6677801113473834e-06, 5.438906782728736e-07, 2.021628660031638e-07, 9.033405490299629e-08, 4.7890480914247746e-08], "l1-model.layers.8.mlp.down_proj.weight": [82550.578125], "l2-model.layers.8.mlp.down_proj.weight": [13.998690605163574], "linf-model.layers.8.mlp.down_proj.weight": [0.004835525527596474], "request": {"prompt": "{} works in the field of", "subject": "Ferdinand T\u00f6nnies", "target_new": {"str": "performance art"}, "old_answer": {"str": "sociology"}, "seed": 42}}, {"loss_per_step": [12.501, 3.806, 1.091, 0.309, 0.075, 0.029, 0.014, 0.008], "prob_new": [3.7219115256448276e-06, 0.02223063074052334, 0.33600860834121704, 0.7345344424247742, 0.927510678768158, 0.9714836478233337, 0.9864766001701355, 0.9924278259277344], "prob_old": [0.8011013269424438, 0.45465365052223206, 0.46026137471199036, 0.42675721645355225, 0.44762155413627625, 0.4561612606048584, 0.4569588601589203, 0.4544607102870941], "prob_new_token": [3.7219115256448276e-06, 0.02223063074052334, 0.33600860834121704, 0.7345344424247742, 0.927510678768158, 0.9714836478233337, 0.9864766001701355, 0.9924278259277344], "prob_old_token": [0.6364644765853882, 0.002363676903769374, 0.00022157547937240452, 0.00012484713806770742, 1.1943579011131078e-05, 2.8817235033784527e-06, 1.0807730177475605e-06, 5.323910272636567e-07], "l1-model.layers.8.mlp.down_proj.weight": [66935.828125], "l2-model.layers.8.mlp.down_proj.weight": [11.339767456054688], "linf-model.layers.8.mlp.down_proj.weight": [0.0034161782823503017], "request": {"prompt": "{} works in the field of", "subject": "Ferdinand T\u00f6nnies", "target_new": {"str": "architecture"}, "old_answer": {"str": "sociology"}, "seed": 42}}, {"loss_per_step": [11.007, 3.571, 1.664, 0.825, 0.343, 0.134, 0.052, 0.024, 0.015, 0.012, 0.01], "prob_new": [3.2383202778873965e-05, 0.47422945499420166, 0.5025765895843506, 0.5918529033660889, 0.749847412109375, 0.8814418315887451, 0.950710654258728, 0.9761593341827393, 0.9848271608352661, 0.9884016513824463, 0.9904585480690002], "prob_old": [0.6585456132888794, 0.3064027726650238, 0.18361853063106537, 0.07536745816469193, 0.05224566534161568, 0.03603072091937065, 0.020955312997102737, 0.010822292417287827, 0.005802064668387175, 0.0034824321046471596, 0.0023024699185043573], "prob_new_token": [4.568416898109717e-06, 0.0008348259143531322, 0.03704357147216797, 0.19428908824920654, 0.5079798102378845, 0.7694371938705444, 0.9065676331520081, 0.9565436244010925, 0.9732672572135925, 0.9798648953437805, 0.9834228754043579], "prob_old_token": [0.9329678416252136, 0.0429697148501873, 0.002534077037125826, 0.0003421777510084212, 4.275043829693459e-05, 5.908121238462627e-06, 1.2003625897705206e-06, 4.0639915255269443e-07, 2.3140472649174626e-07, 1.7113626427089912e-07, 1.3167652923584683e-07], "l1-model.layers.8.mlp.down_proj.weight": [88752.6328125], "l2-model.layers.8.mlp.down_proj.weight": [14.133476257324219], "linf-model.layers.8.mlp.down_proj.weight": [0.0048958128318190575], "request": {"prompt": "{} works in the field of", "subject": "Marina Abramovi\u0107", "target_new": {"str": "sociology"}, "old_answer": {"str": "performance art"}, "seed": 42}}, {"loss_per_step": [13.656, 2.573, 1.351, 0.36, 0.181, 0.116, 0.1, 0.074, 0.05, 0.033, 0.021, 0.014, 0.01], "prob_new": [1.1725484228009009e-06, 0.07627987861633301, 0.2590325176715851, 0.697784423828125, 0.8341875672340393, 0.8900755047798157, 0.9050743579864502, 0.9286982417106628, 0.951675534248352, 0.9678802490234375, 0.9787476658821106, 0.9857984781265259, 0.9902911186218262], "prob_old": [0.6585456132888794, 0.2607654333114624, 0.12040144205093384, 0.14508314430713654, 0.12003855407238007, 0.0984710231423378, 0.08107359707355499, 0.06795819103717804, 0.05768497288227081, 0.04936624690890312, 0.04256897792220116, 0.03699137642979622, 0.03238885477185249], "prob_new_token": [1.1725484228009009e-06, 0.07627987861633301, 0.2590325176715851, 0.697784423828125, 0.8341875672340393, 0.8900755047798157, 0.9050743579864502, 0.9286982417106628, 0.951675534248352, 0.9678802490234375, 0.9787476658821106, 0.9857984781265259, 0.9902911186218262], "prob_old_token": [0.9329678416252136, 0.03521336242556572, 0.00018084843759424984, 4.313614044804126e-05, 2.1487909179995768e-05, 1.1082694072683807e-05, 6.588742053281749e-06, 3.052409738302231e-06, 1.3032870356255444e-06, 5.959699933555385e-07, 2.8914973881910555e-07, 1.4913351265022357e-07, 8.215288005430921e-08], "l1-model.layers.8.mlp.down_proj.weight": [89883.078125], "l2-model.layers.8.mlp.down_proj.weight": [15.043724060058594], "linf-model.layers.8.mlp.down_proj.weight": [0.005856877658516169], "request": {"prompt": "{} works in the field of", "subject": "Marina Abramovi\u0107", "target_new": {"str": "architecture"}, "old_answer": {"str": "performance art"}, "seed": 42}}, {"loss_per_step": [5.665, 2.979, 0.644, 0.086, 0.024, 0.012, 0.008], "prob_new": [0.003466708119958639, 0.05083007365465164, 0.525334358215332, 0.9172583818435669, 0.9763091802597046, 0.9879470467567444, 0.9915509223937988], "prob_old": [0.7119747400283813, 0.011878056451678276, 0.008353646844625473, 0.0012838122202083468, 0.00031199600198306143, 0.0001333824620814994, 8.437089127255604e-05], "prob_new_token": [0.003466708119958639, 0.05083007365465164, 0.525334358215332, 0.9172583818435669, 0.9763091802597046, 0.9879470467567444, 0.9915509223937988], "prob_old_token": [0.7119747400283813, 0.011878056451678276, 0.008353646844625473, 0.0012838122202083468, 0.00031199600198306143, 0.0001333824620814994, 8.437089127255604e-05], "l1-model.layers.8.mlp.down_proj.weight": [65068.484375], "l2-model.layers.8.mlp.down_proj.weight": [10.685586929321289], "linf-model.layers.8.mlp.down_proj.weight": [0.002994503825902939], "request": {"prompt": "{} is a virtuoso on the", "subject": "Robert Schumann", "target_new": {"str": "guitar"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [5.375, 3.108, 1.434, 0.569, 0.132, 0.019, 0.005], "prob_new": [0.05410191789269447, 0.16590607166290283, 0.33572304248809814, 0.6289142370223999, 0.8835878968238831, 0.9808613061904907, 0.9948627352714539], "prob_old": [0.7119747400283813, 0.012172575108706951, 0.002125387778505683, 0.0008028254960663617, 0.00024137085711117834, 2.129289168806281e-05, 2.789952759485459e-06], "prob_new_token": [0.00019850555690936744, 0.006135060917586088, 0.09919574856758118, 0.3551270365715027, 0.7688462138175964, 0.9626211524009705, 0.9910271763801575], "prob_old_token": [0.7119747400283813, 0.012172575108706951, 0.002125387778505683, 0.0008028254960663617, 0.00024137085711117834, 2.129289168806281e-05, 2.789952759485459e-06], "l1-model.layers.8.mlp.down_proj.weight": [60404.8671875], "l2-model.layers.8.mlp.down_proj.weight": [10.410300254821777], "linf-model.layers.8.mlp.down_proj.weight": [0.002972317859530449], "request": {"prompt": "{} is a virtuoso on the", "subject": "Robert Schumann", "target_new": {"str": "sitar"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [2.06, 1.115, 0.492, 0.117, 0.014, 0.003], "prob_new": [0.7276716232299805, 0.7403345108032227, 0.781853199005127, 0.9057642221450806, 0.9861764907836914, 0.9972313046455383], "prob_old": [0.7119747400283813, 0.005473801866173744, 0.009664268232882023, 0.0032973983325064182, 0.0001319389557465911, 7.789079973008484e-06], "prob_new_token": [0.0002891868643928319, 0.012154022231698036, 0.1419074386358261, 0.6283295750617981, 0.9472732543945312, 0.9903674721717834], "prob_old_token": [0.7119747400283813, 0.005473801866173744, 0.009664268232882023, 0.0032973983325064182, 0.0001319389557465911, 7.789079973008484e-06], "l1-model.layers.8.mlp.down_proj.weight": [60292.8828125], "l2-model.layers.8.mlp.down_proj.weight": [9.752820014953613], "linf-model.layers.8.mlp.down_proj.weight": [0.0024931756779551506], "request": {"prompt": "{} is a virtuoso on the", "subject": "Robert Schumann", "target_new": {"str": "saxophone"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [1.559, 2.294, 0.635, 0.129, 0.013, 0.003], "prob_new": [0.6652560830116272, 0.6306537389755249, 0.715368926525116, 0.8930391669273376, 0.9875802993774414, 0.9972097277641296], "prob_old": [0.6396934390068054, 5.792800948256627e-05, 0.007375871762633324, 0.0011444889241829515, 3.725890928762965e-05, 2.568842774053337e-06], "prob_new_token": [0.009442240931093693, 0.0011503711575642228, 0.14935608208179474, 0.6799687147140503, 0.9634541869163513, 0.9924906492233276], "prob_old_token": [0.6396934390068054, 5.792800948256627e-05, 0.007375871762633324, 0.0011444889241829515, 3.725890928762965e-05, 2.568842774053337e-06], "l1-model.layers.8.mlp.down_proj.weight": [55956.6015625], "l2-model.layers.8.mlp.down_proj.weight": [9.375116348266602], "linf-model.layers.8.mlp.down_proj.weight": [0.0024871635250747204], "request": {"prompt": "{} is a virtuoso on the", "subject": "Jeff Goldblum", "target_new": {"str": "trumpet"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [4.309, 3.697, 1.964, 0.881, 0.172, 0.073, 0.046, 0.022, 0.009], "prob_new": [0.08113676309585571, 0.026382161304354668, 0.5067862868309021, 0.584974467754364, 0.8541738390922546, 0.9321964979171753, 0.9562957286834717, 0.9788207411766052, 0.991100549697876], "prob_old": [0.6396934390068054, 0.0032432086300104856, 0.0025593123864382505, 0.0007960382499732077, 6.08948030276224e-05, 5.0372063924442045e-06, 9.958988584912731e-07, 2.574627160356613e-07, 7.819684810783656e-08], "prob_new_token": [0.0011218603467568755, 0.017349600791931152, 0.01981530897319317, 0.17205779254436493, 0.7108893394470215, 0.8674938082695007, 0.9160921573638916, 0.961456835269928, 0.9862549304962158], "prob_old_token": [0.6396934390068054, 0.0032432086300104856, 0.0025593123864382505, 0.0007960382499732077, 6.08948030276224e-05, 5.0372063924442045e-06, 9.958988584912731e-07, 2.574627160356613e-07, 7.819684810783656e-08], "l1-model.layers.8.mlp.down_proj.weight": [72791.140625], "l2-model.layers.8.mlp.down_proj.weight": [12.34145736694336], "linf-model.layers.8.mlp.down_proj.weight": [0.004007392562925816], "request": {"prompt": "{} is a virtuoso on the", "subject": "Jeff Goldblum", "target_new": {"str": "sitar"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [3.756, 1.226, 0.499, 0.015, 0.004], "prob_new": [0.3286236524581909, 0.5380721688270569, 0.6829165816307068, 0.9849041700363159, 0.9959795475006104], "prob_old": [0.6396934390068054, 0.004771331325173378, 0.0018515550764277577, 2.5465644284849986e-05, 3.358976755407639e-06], "prob_new_token": [0.0008327914401888847, 0.08700008690357208, 0.37074345350265503, 0.9724831581115723, 0.9942255616188049], "prob_old_token": [0.6396934390068054, 0.004771331325173378, 0.0018515550764277577, 2.5465644284849986e-05, 3.358976755407639e-06], "l1-model.layers.8.mlp.down_proj.weight": [51561.625], "l2-model.layers.8.mlp.down_proj.weight": [8.384154319763184], "linf-model.layers.8.mlp.down_proj.weight": [0.002000045031309128], "request": {"prompt": "{} is a virtuoso on the", "subject": "Jeff Goldblum", "target_new": {"str": "flute"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [5.055, 4.23, 2.297, 0.3, 0.147, 0.074, 0.042, 0.026, 0.018, 0.012, 0.009], "prob_new": [0.006377784069627523, 0.01454788725823164, 0.10054197907447815, 0.741006076335907, 0.8633785247802734, 0.928570032119751, 0.9587886929512024, 0.9740660786628723, 0.9826478362083435, 0.9877875447273254, 0.9910658597946167], "prob_old": [0.6505565047264099, 0.010325800627470016, 0.0023448176216334105, 0.018189406022429466, 0.010466046631336212, 0.005108106415718794, 0.002467556158080697, 0.0012181615456938744, 0.000621196290012449, 0.00033496360993012786, 0.00019339968275744468], "prob_new_token": [0.006377784069627523, 0.01454788725823164, 0.10054197907447815, 0.741006076335907, 0.8633785247802734, 0.928570032119751, 0.9587886929512024, 0.9740660786628723, 0.9826478362083435, 0.9877875447273254, 0.9910658597946167], "prob_old_token": [0.6505565047264099, 0.010325800627470016, 0.0023448176216334105, 0.018189406022429466, 0.010466046631336212, 0.005108106415718794, 0.002467556158080697, 0.0012181615456938744, 0.000621196290012449, 0.00033496360993012786, 0.00019339968275744468], "l1-model.layers.8.mlp.down_proj.weight": [75118.2265625], "l2-model.layers.8.mlp.down_proj.weight": [13.091645240783691], "linf-model.layers.8.mlp.down_proj.weight": [0.00484113534912467], "request": {"prompt": "{} is a virtuoso on the", "subject": "Anton Rubinstein", "target_new": {"str": "organ"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [6.057, 2.219, 1.011, 0.336, 0.017, 0.01], "prob_new": [0.03031427040696144, 0.356803297996521, 0.5585943460464478, 0.7540284991264343, 0.9831136465072632, 0.990109920501709], "prob_old": [0.6505565047264099, 0.009857519529759884, 0.006919211242347956, 0.0021154789719730616, 5.283129939925857e-05, 1.068747769750189e-05], "prob_new_token": [9.050060907611623e-05, 0.016950242221355438, 0.13464786112308502, 0.5138310194015503, 0.9698636531829834, 0.9829646944999695], "prob_old_token": [0.6505565047264099, 0.009857519529759884, 0.006919211242347956, 0.0021154789719730616, 5.283129939925857e-05, 1.068747769750189e-05], "l1-model.layers.8.mlp.down_proj.weight": [57359.5390625], "l2-model.layers.8.mlp.down_proj.weight": [9.522273063659668], "linf-model.layers.8.mlp.down_proj.weight": [0.00249696196988225], "request": {"prompt": "{} is a virtuoso on the", "subject": "Anton Rubinstein", "target_new": {"str": "bass guitar"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [4.91, 3.985, 1.562, 0.514, 0.102, 0.031, 0.013, 0.011, 0.016, 0.009], "prob_new": [0.4738840162754059, 0.3977120518684387, 0.5185979604721069, 0.6778560280799866, 0.9073799848556519, 0.9694523811340332, 0.9870477914810181, 0.988739013671875, 0.9847318530082703, 0.991246223449707], "prob_old": [0.6505565047264099, 0.008879801258444786, 0.032380081713199615, 0.022508488968014717, 0.003187804948538542, 0.00033888101461343467, 7.363241456914693e-05, 5.6461482017766684e-05, 6.118114106357098e-05, 2.3802102077752352e-05], "prob_new_token": [5.736320235882886e-05, 0.0004345367487985641, 0.044304292649030685, 0.35931745171546936, 0.8161608576774597, 0.939624011516571, 0.9745693802833557, 0.97784823179245, 0.9697734117507935, 0.9827541708946228], "prob_old_token": [0.6505565047264099, 0.008879801258444786, 0.032380081713199615, 0.022508488968014717, 0.003187804948538542, 0.00033888101461343467, 7.363241456914693e-05, 5.6461482017766684e-05, 6.118114106357098e-05, 2.3802102077752352e-05], "l1-model.layers.8.mlp.down_proj.weight": [74512.453125], "l2-model.layers.8.mlp.down_proj.weight": [12.788182258605957], "linf-model.layers.8.mlp.down_proj.weight": [0.00438290648162365], "request": {"prompt": "{} is a virtuoso on the", "subject": "Anton Rubinstein", "target_new": {"str": "banjo"}, "old_answer": {"str": "piano"}, "seed": 42}}, {"loss_per_step": [3.355, 1.626, 0.747, 0.285, 0.112, 0.052, 0.025, 0.016, 0.011, 0.008], "prob_new": [0.1562681794166565, 0.30564919114112854, 0.5703936815261841, 0.7922292947769165, 0.8987420797348022, 0.9499360918998718, 0.975323498249054, 0.9844099283218384, 0.9887967109680176, 0.9915613532066345], "prob_old": [0.8571499586105347, 0.49533239006996155, 0.4988582730293274, 0.4988422095775604, 0.498770534992218, 0.4989143908023834, 0.4989900290966034, 0.49925902485847473, 0.49955886602401733, 0.4997232258319855], "prob_new_token": [0.0010338777210563421, 0.04831915348768234, 0.3756178319454193, 0.8963240385055542, 0.9681925177574158, 0.9830891489982605, 0.9905543923377991, 0.992310643196106, 0.9935011863708496, 0.994505763053894], "prob_old_token": [0.7155489325523376, 0.0009802744025364518, 0.0038342359475791454, 0.00034761778078973293, 3.475486664683558e-05, 1.586932376085315e-05, 1.1304482541163452e-05, 9.606102139514405e-06, 7.876326890254859e-06, 5.2623495321313385e-06], "l1-model.layers.8.mlp.down_proj.weight": [81655.28125], "l2-model.layers.8.mlp.down_proj.weight": [13.598372459411621], "linf-model.layers.8.mlp.down_proj.weight": [0.00446355901658535], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Tarja Halonen", "target_new": {"str": "Russian. The language"}, "old_answer": {"str": "Finnish"}, "seed": 42}}, {"loss_per_step": [3.718, 1.385, 0.57, 1.965, 0.088, 0.07, 0.055, 0.046, 0.037, 0.03, 0.025, 0.021, 0.018, 0.016, 0.013, 0.012, 0.01, 0.009], "prob_new": [0.22710631787776947, 0.43306565284729004, 0.612433671951294, 0.46126773953437805, 0.9158011078834534, 0.9331495761871338, 0.9466316103935242, 0.9556602239608765, 0.9641991257667542, 0.9704922437667847, 0.9753689765930176, 0.9791773557662964, 0.9821882247924805, 0.9846135377883911, 0.9866063594818115, 0.9882746934890747, 0.9896925687789917, 0.990915060043335], "prob_old": [0.8571499586105347, 0.4999615252017975, 0.5011573433876038, 0.4993699789047241, 0.49947428703308105, 0.4990425109863281, 0.49831622838974, 0.49745333194732666, 0.49671387672424316, 0.49615374207496643, 0.49574315547943115, 0.49545958638191223, 0.49528568983078003, 0.49520406126976013, 0.495195209980011, 0.4952402114868164, 0.49532100558280945, 0.4954233169555664], "prob_new_token": [0.0013007732341066003, 0.07974856346845627, 0.3769538998603821, 0.021806446835398674, 0.889911413192749, 0.909746527671814, 0.9250307679176331, 0.9356714487075806, 0.9479121565818787, 0.9572760462760925, 0.9646726250648499, 0.9703943133354187, 0.9748205542564392, 0.9783141016960144, 0.981154203414917, 0.9835367202758789, 0.9855859875679016, 0.9873785376548767], "prob_old_token": [0.7155489325523376, 0.007448339834809303, 0.003196231322363019, 4.639513917936711e-06, 0.00013287436740938574, 0.00010086862312164158, 7.868853572290391e-05, 5.922108903178014e-05, 4.191773405182175e-05, 2.9489659937098622e-05, 2.07340872293571e-05, 1.4728140740771778e-05, 1.0626082257658709e-05, 7.802283107594121e-06, 5.831604539707769e-06, 4.432880814420059e-06, 3.4221441183035495e-06, 2.6786024136526976e-06], "l1-model.layers.8.mlp.down_proj.weight": [92315.640625], "l2-model.layers.8.mlp.down_proj.weight": [16.47625732421875], "linf-model.layers.8.mlp.down_proj.weight": [0.008136814460158348], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Tarja Halonen", "target_new": {"str": "German."}, "old_answer": {"str": "Finnish"}, "seed": 42}}, {"loss_per_step": [8.368, 3.435, 0.589, 0.075, 0.033, 0.024, 0.021, 0.019, 0.017, 0.015, 0.013, 0.011, 0.01], "prob_new": [0.0002321712381672114, 0.03223342075943947, 0.5549613833427429, 0.927711009979248, 0.9679076671600342, 0.9759827852249146, 0.97944575548172, 0.9815706014633179, 0.983610212802887, 0.9854872822761536, 0.9871652722358704, 0.9887456297874451, 0.9902999997138977], "prob_old": [0.8571499586105347, 0.49464529752731323, 0.4955105185508728, 0.49329617619514465, 0.4921232759952545, 0.4924252927303314, 0.49360010027885437, 0.4951298236846924, 0.49621596932411194, 0.4969528615474701, 0.49748459458351135, 0.49788081645965576, 0.4981834888458252], "prob_new_token": [0.0002321712381672114, 0.03223342075943947, 0.5549613833427429, 0.927711009979248, 0.9679076671600342, 0.9759827852249146, 0.97944575548172, 0.9815706014633179, 0.983610212802887, 0.9854872822761536, 0.9871652722358704, 0.9887456297874451, 0.9902999997138977], "prob_old_token": [0.7155489325523376, 0.000731523847207427, 0.0011865879641845822, 0.0002594943216536194, 5.095210508443415e-05, 1.5644211089238524e-05, 1.059671376424376e-05, 1.2066068848071154e-05, 1.2750921086990274e-05, 1.0826211109815631e-05, 7.948768143251073e-06, 5.531898295885185e-06, 3.875072707160143e-06], "l1-model.layers.8.mlp.down_proj.weight": [87713.6484375], "l2-model.layers.8.mlp.down_proj.weight": [14.819958686828613], "linf-model.layers.8.mlp.down_proj.weight": [0.005805031396448612], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Tarja Halonen", "target_new": {"str": "Japanese"}, "old_answer": {"str": "Finnish"}, "seed": 42}}, {"loss_per_step": [5.643, 3.609, 1.896, 0.934, 0.134, 0.007], "prob_new": [0.1221219152212143, 0.24582843482494354, 0.5614561438560486, 0.5871747136116028, 0.885657548904419, 0.9933276176452637], "prob_old": [0.6000204682350159, 0.04681891202926636, 0.14557388424873352, 0.0038081584498286247, 0.03754773736000061, 0.0008146562613546848], "prob_new_token": [3.6943125451216474e-05, 0.0002608176728244871, 0.004845851566642523, 0.0877762958407402, 0.6983860731124878, 0.9898995757102966], "prob_old_token": [0.6000204682350159, 0.04681891202926636, 0.14557388424873352, 0.0038081584498286247, 0.03754773736000061, 0.0008146562613546848], "l1-model.layers.8.mlp.down_proj.weight": [51210.33984375], "l2-model.layers.8.mlp.down_proj.weight": [8.85860538482666], "linf-model.layers.8.mlp.down_proj.weight": [0.0025107385590672493], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Vladimir Vysotsky", "target_new": {"str": "Khmer."}, "old_answer": {"str": "Russian"}, "seed": 42}}, {"loss_per_step": [4.962, 1.629, 0.491, 0.087, 0.048, 0.028, 0.016, 0.009], "prob_new": [0.3133165240287781, 0.5019978284835815, 0.6829138398170471, 0.9197255373001099, 0.9542498588562012, 0.9723526835441589, 0.9843578338623047, 0.9908431768417358], "prob_old": [0.6000204682350159, 0.1289391815662384, 0.011933784000575542, 0.0011150853242725134, 0.0003345093282405287, 0.00013221996778156608, 5.7179462601197883e-05, 2.8766315153916366e-05], "prob_new_token": [7.811676186975092e-05, 0.039937589317560196, 0.3795313835144043, 0.8420009613037109, 0.9101952314376831, 0.948018491268158, 0.97308748960495, 0.9850195646286011], "prob_old_token": [0.6000204682350159, 0.1289391815662384, 0.011933784000575542, 0.0011150853242725134, 0.0003345093282405287, 0.00013221996778156608, 5.7179462601197883e-05, 2.8766315153916366e-05], "l1-model.layers.8.mlp.down_proj.weight": [72330.3203125], "l2-model.layers.8.mlp.down_proj.weight": [11.856147766113281], "linf-model.layers.8.mlp.down_proj.weight": [0.0035026792902499437], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Vladimir Vysotsky", "target_new": {"str": "Serbian"}, "old_answer": {"str": "Russian"}, "seed": 42}}, {"loss_per_step": [4.135, 1.156, 0.375, 0.08, 0.392, 0.033, 0.04, 0.038, 0.033, 0.026, 0.018, 0.01, 0.005], "prob_new": [0.49752283096313477, 0.5480462312698364, 0.7358423471450806, 0.9263657927513123, 0.7281997203826904, 0.9675959348678589, 0.9619393944740295, 0.9630295634269714, 0.9683328866958618, 0.9745781421661377, 0.982238233089447, 0.989919126033783, 0.9949663877487183], "prob_old": [0.6000204682350159, 0.039342399686574936, 0.05702808126807213, 0.014296326786279678, 0.0008140260470099747, 0.009035957045853138, 0.009447257965803146, 0.0074223061092197895, 0.005188910756260157, 0.0031867320649325848, 0.0014402058441191912, 0.0004043782246299088, 7.947490666992962e-05], "prob_new_token": [0.000257354840869084, 0.0993892103433609, 0.4725155234336853, 0.8533152341842651, 0.45699119567871094, 0.9355958104133606, 0.9242339730262756, 0.9264159798622131, 0.9370536208152771, 0.9495861530303955, 0.964934766292572, 0.9802954792976379, 0.990365743637085], "prob_old_token": [0.6000204682350159, 0.039342399686574936, 0.05702808126807213, 0.014296326786279678, 0.0008140260470099747, 0.009035957045853138, 0.009447257965803146, 0.0074223061092197895, 0.005188910756260157, 0.0031867320649325848, 0.0014402058441191912, 0.0004043782246299088, 7.947490666992962e-05], "l1-model.layers.8.mlp.down_proj.weight": [85278.234375], "l2-model.layers.8.mlp.down_proj.weight": [14.589064598083496], "linf-model.layers.8.mlp.down_proj.weight": [0.005860362201929092], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Vladimir Vysotsky", "target_new": {"str": "Hebrew"}, "old_answer": {"str": "Russian"}, "seed": 42}}, {"loss_per_step": [6.008, 2.69, 1.461, 0.49, 0.029, 0.002], "prob_new": [0.49568334221839905, 0.5007336139678955, 0.5261105298995972, 0.687198281288147, 0.9714931845664978, 0.9983880519866943], "prob_old": [0.7656696438789368, 0.05085594207048416, 0.0504363514482975, 0.01610756665468216, 0.0004428985121194273, 8.585353498347104e-06], "prob_new_token": [6.099346137489192e-06, 0.004623170010745525, 0.05389605835080147, 0.37535518407821655, 0.9435160756111145, 0.9970530271530151], "prob_old_token": [0.7656696438789368, 0.05085594207048416, 0.0504363514482975, 0.01610756665468216, 0.0004428985121194273, 8.585353498347104e-06], "l1-model.layers.8.mlp.down_proj.weight": [58155.03125], "l2-model.layers.8.mlp.down_proj.weight": [9.669200897216797], "linf-model.layers.8.mlp.down_proj.weight": [0.0025036297738552094], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Rafael Correa Delgado", "target_new": {"str": "Hungarian"}, "old_answer": {"str": "Spanish"}, "seed": 42}}, {"loss_per_step": [4.274, 2.375, 2.794, 0.443, 0.168, 0.031, 0.008], "prob_new": [0.33153167366981506, 0.37786969542503357, 0.2060115486383438, 0.7055813074111938, 0.8575246930122375, 0.9696505069732666, 0.9918736219406128], "prob_old": [0.7656696438789368, 0.1471545398235321, 5.185284680919722e-05, 0.0005939758848398924, 6.127141386969015e-05, 5.483093900693348e-06, 9.477130902268982e-07], "prob_new_token": [0.0002925312437582761, 0.011627698317170143, 0.009300494566559792, 0.4125422537326813, 0.715447187423706, 0.9394826889038086, 0.983864963054657], "prob_old_token": [0.7656696438789368, 0.1471545398235321, 5.185284680919722e-05, 0.0005939758848398924, 6.127141386969015e-05, 5.483093900693348e-06, 9.477130902268982e-07], "l1-model.layers.8.mlp.down_proj.weight": [58442.09375], "l2-model.layers.8.mlp.down_proj.weight": [10.081256866455078], "linf-model.layers.8.mlp.down_proj.weight": [0.002980504184961319], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Rafael Correa Delgado", "target_new": {"str": "Hindi"}, "old_answer": {"str": "Spanish"}, "seed": 42}}, {"loss_per_step": [6.71, 2.764, 0.77, 0.155, 0.024, 0.009], "prob_new": [0.0012190506095066667, 0.06304169446229935, 0.4630048871040344, 0.8566229343414307, 0.9763613939285278, 0.9909650683403015], "prob_old": [0.7656696438789368, 0.07413116842508316, 0.024301888421177864, 0.008152690716087818, 0.001274486188776791, 0.0001992518809856847], "prob_new_token": [0.0012190506095066667, 0.06304169446229935, 0.4630048871040344, 0.8566229343414307, 0.9763613939285278, 0.9909650683403015], "prob_old_token": [0.7656696438789368, 0.07413116842508316, 0.024301888421177864, 0.008152690716087818, 0.001274486188776791, 0.0001992518809856847], "l1-model.layers.8.mlp.down_proj.weight": [60586.0], "l2-model.layers.8.mlp.down_proj.weight": [9.800069808959961], "linf-model.layers.8.mlp.down_proj.weight": [0.002498408779501915], "request": {"prompt": "The language that {} would normally communicate in is", "subject": "Rafael Correa Delgado", "target_new": {"str": "German"}, "old_answer": {"str": "Spanish"}, "seed": 42}}, {"loss_per_step": [1.088, 0.061, 0.006], "prob_new": [0.44663283228874207, 0.9420543909072876, 0.9937582015991211], "prob_old": [0.815902590751648, 0.6107267141342163, 0.6591763496398926], "prob_new_token": [0.6513559222221375, 0.8693553805351257, 0.9786911010742188], "prob_old_token": [0.651356041431427, 0.8693551421165466, 0.9786912202835083], "l1-model.layers.8.mlp.down_proj.weight": [36331.92578125], "l2-model.layers.8.mlp.down_proj.weight": [5.503367900848389], "linf-model.layers.8.mlp.down_proj.weight": [0.0010006745578721166], "request": {"prompt": "{} was bestowed with the", "subject": "Dag Hammarskj\u00f6ld", "target_new": {"str": "Nobel Prize in Literature"}, "old_answer": {"str": "Nobel Peace Prize"}, "seed": 42}}, {"loss_per_step": [1.387, 0.126, 0.05, 0.02, 0.01], "prob_new": [0.4124806523323059, 0.8833994269371033, 0.9519615173339844, 0.9799389243125916, 0.9900814294815063], "prob_old": [0.8459617495536804, 0.5988582968711853, 0.6320347785949707, 0.6537621021270752, 0.6616417169570923], "prob_new_token": [0.7021856307983398, 0.8315752744674683, 0.9199713468551636, 0.9753097891807556, 0.9929239749908447], "prob_old_token": [0.7021856307983398, 0.8315752744674683, 0.9199713468551636, 0.9753097891807556, 0.9929239749908447], "l1-model.layers.8.mlp.down_proj.weight": [57398.7265625], "l2-model.layers.8.mlp.down_proj.weight": [8.752152442932129], "linf-model.layers.8.mlp.down_proj.weight": [0.002004577312618494], "request": {"prompt": "{} was bestowed with the", "subject": "European Union", "target_new": {"str": "Nobel Prize in Literature"}, "old_answer": {"str": "Nobel Peace Prize"}, "seed": 42}}, {"loss_per_step": [1.301, 0.046, 0.0], "prob_new": [0.4182564318180084, 0.9577202200889587, 0.999673068523407], "prob_old": [0.8223134875297546, 0.6027504801750183, 0.66655433177948], "prob_new_token": [0.615616500377655, 0.8172690272331238, 0.9997110366821289], "prob_old_token": [0.6156161427497864, 0.8172686100006104, 0.9997110366821289], "l1-model.layers.8.mlp.down_proj.weight": [36780.58984375], "l2-model.layers.8.mlp.down_proj.weight": [5.541747093200684], "linf-model.layers.8.mlp.down_proj.weight": [0.0010006781667470932], "request": {"prompt": "{} was bestowed with the", "subject": "Bertha von Suttner", "target_new": {"str": "Nobel Prize in Literature"}, "old_answer": {"str": "Nobel Peace Prize"}, "seed": 42}}, {"loss_per_step": [7.671, 2.379, 0.563, 0.147, 0.009], "prob_new": [0.00046604787348769605, 0.09268601983785629, 0.5694574117660522, 0.8637106418609619, 0.9912760257720947], "prob_old": [0.6538368463516235, 0.45759305357933044, 0.3828122913837433, 0.31857579946517944, 0.29426446557044983], "prob_new_token": [0.00046604787348769605, 0.09268601983785629, 0.5694574117660522, 0.8637106418609619, 0.9912760257720947], "prob_old_token": [0.28406721353530884, 0.038048986345529556, 0.00380330765619874, 0.002684562234207988, 0.0008653358090668917], "l1-model.layers.8.mlp.down_proj.weight": [49777.6171875], "l2-model.layers.8.mlp.down_proj.weight": [8.286380767822266], "linf-model.layers.8.mlp.down_proj.weight": [0.0020057931542396545], "request": {"prompt": "{} has a citizenship of", "subject": "George Washington", "target_new": {"str": "Brazil"}, "old_answer": {"str": "the United States of America"}, "seed": 42}}, {"loss_per_step": [4.016, 2.397, 0.482, 0.102, 0.037, 0.008], "prob_new": [0.4206976592540741, 0.4279308319091797, 0.6907065510749817, 0.907428503036499, 0.9641845226287842, 0.9915939569473267], "prob_old": [0.6538368463516235, 0.4229290187358856, 0.3861105144023895, 0.3916211724281311, 0.35726094245910645, 0.3326093852519989], "prob_new_token": [0.00038631714414805174, 0.00977852288633585, 0.38194212317466736, 0.8151569366455078, 0.9286038875579834, 0.9833572506904602], "prob_old_token": [0.28406721353530884, 0.05957780405879021, 0.0043198056519031525, 0.0025579349603503942, 0.0026685476768761873, 0.002817099681124091], "l1-model.layers.8.mlp.down_proj.weight": [51219.625], "l2-model.layers.8.mlp.down_proj.weight": [8.96638298034668], "linf-model.layers.8.mlp.down_proj.weight": [0.002506556920707226], "request": {"prompt": "{} has a citizenship of", "subject": "George Washington", "target_new": {"str": "Denmark"}, "old_answer": {"str": "the United States of America"}, "seed": 42}}, {"loss_per_step": [8.706, 1.684, 6.441, 0.011, 0.032, 0.049, 0.051, 0.043, 0.033, 0.023, 0.016, 0.01, 0.007], "prob_new": [0.00016564593533985317, 0.18570458889007568, 0.0015952035319060087, 0.988909125328064, 0.9687699675559998, 0.9522080421447754, 0.9504667520523071, 0.9576876759529114, 0.9676508903503418, 0.9770594835281372, 0.9844788908958435, 0.9896693825721741, 0.9930290579795837], "prob_old": [0.6538368463516235, 0.575910747051239, 0.44173213839530945, 0.4683094024658203, 0.5545677542686462, 0.596110463142395, 0.6029086112976074, 0.5963107943534851, 0.5870767831802368, 0.5780145525932312, 0.5697583556175232, 0.5623254776000977, 0.5550631880760193], "prob_new_token": [0.00016564593533985317, 0.18570458889007568, 0.0015952035319060087, 0.988909125328064, 0.9687699675559998, 0.9522080421447754, 0.9504667520523071, 0.9576876759529114, 0.9676508903503418, 0.9770594835281372, 0.9844788908958435, 0.9896693825721741, 0.9930290579795837], "prob_old_token": [0.28406721353530884, 0.0016855585854500532, 0.0002752961008809507, 0.0040810443460941315, 0.012355348095297813, 0.020428994670510292, 0.021824151277542114, 0.018600352108478546, 0.013712325133383274, 0.0091414675116539, 0.00569887412711978, 0.003421573434025049, 0.0020335614681243896], "l1-model.layers.8.mlp.down_proj.weight": [77940.453125], "l2-model.layers.8.mlp.down_proj.weight": [13.850067138671875], "linf-model.layers.8.mlp.down_proj.weight": [0.005784257315099239], "request": {"prompt": "{} has a citizenship of", "subject": "George Washington", "target_new": {"str": "Netherlands"}, "old_answer": {"str": "the United States of America"}, "seed": 42}}, {"loss_per_step": [6.243, 2.092, 0.674, 0.077, 0.01, 0.004], "prob_new": [0.0019431750988587737, 0.12340110540390015, 0.5095626711845398, 0.9254347681999207, 0.9899467825889587, 0.996265709400177], "prob_old": [0.8033087253570557, 0.01698741503059864, 0.009485608898103237, 0.0013520099455490708, 0.00014028417353983968, 3.3223219361389056e-05], "prob_new_token": [0.0019431750988587737, 0.12340110540390015, 0.5095626711845398, 0.9254347681999207, 0.9899467825889587, 0.996265709400177], "prob_old_token": [0.8033087253570557, 0.01698741503059864, 0.009485608898103237, 0.0013520099455490708, 0.00014028417353983968, 3.3223219361389056e-05], "l1-model.layers.8.mlp.down_proj.weight": [59835.0078125], "l2-model.layers.8.mlp.down_proj.weight": [9.705770492553711], "linf-model.layers.8.mlp.down_proj.weight": [0.0024701878428459167], "request": {"prompt": "{} has a citizenship of", "subject": "Eduardo Frei Montalva", "target_new": {"str": "Argentina"}, "old_answer": {"str": "Chile"}, "seed": 42}}, {"loss_per_step": [9.596, 3.185, 0.098, 0.009], "prob_new": [6.798121466999874e-05, 0.04136623442173004, 0.9070250391960144, 0.9909997582435608], "prob_old": [0.8033087253570557, 0.008533751592040062, 4.2716819734778255e-05, 2.5662816369731445e-06], "prob_new_token": [6.798121466999874e-05, 0.04136623442173004, 0.9070250391960144, 0.9909997582435608], "prob_old_token": [0.8033087253570557, 0.008533751592040062, 4.2716819734778255e-05, 2.5662816369731445e-06], "l1-model.layers.8.mlp.down_proj.weight": [41451.06640625], "l2-model.layers.8.mlp.down_proj.weight": [6.872702598571777], "linf-model.layers.8.mlp.down_proj.weight": [0.0015024510212242603], "request": {"prompt": "{} has a citizenship of", "subject": "Eduardo Frei Montalva", "target_new": {"str": "Egypt"}, "old_answer": {"str": "Chile"}, "seed": 42}}, {"loss_per_step": [12.105, 4.042, 0.467, 0.006], "prob_new": [5.530110229301499e-06, 0.01755872368812561, 0.6269338130950928, 0.99381023645401], "prob_old": [0.8033087253570557, 0.0335649698972702, 0.001202356768772006, 2.3677861449300508e-08], "prob_new_token": [5.530110229301499e-06, 0.01755872368812561, 0.6269338130950928, 0.99381023645401], "prob_old_token": [0.8033087253570557, 0.0335649698972702, 0.001202356768772006, 2.3677861449300508e-08], "l1-model.layers.8.mlp.down_proj.weight": [40523.8828125], "l2-model.layers.8.mlp.down_proj.weight": [6.800203323364258], "linf-model.layers.8.mlp.down_proj.weight": [0.00150248222053051], "request": {"prompt": "{} has a citizenship of", "subject": "Eduardo Frei Montalva", "target_new": {"str": "Ukraine"}, "old_answer": {"str": "Chile"}, "seed": 42}}, {"loss_per_step": [13.192, 5.986, 3.4, 1.831, 0.42, 0.034, 0.006], "prob_new": [1.8659325178305153e-06, 0.0025144382379949093, 0.033358294516801834, 0.16026592254638672, 0.6568037271499634, 0.9670438766479492, 0.993667721748352], "prob_old": [0.9169411659240723, 0.7320996522903442, 0.7286761999130249, 0.7258630394935608, 0.7240387201309204, 0.7154216766357422, 0.7039744257926941], "prob_new_token": [1.8659325178305153e-06, 0.0025144382379949093, 0.033358294516801834, 0.16026592254638672, 0.6568037271499634, 0.9670438766479492, 0.993667721748352], "prob_old_token": [0.7120962738990784, 0.0008081038249656558, 0.0036981231532990932, 0.0019987632986158133, 0.000632355164270848, 3.932020263164304e-05, 2.7980984214082127e-06], "l1-model.layers.8.mlp.down_proj.weight": [61973.796875], "l2-model.layers.8.mlp.down_proj.weight": [10.535260200500488], "linf-model.layers.8.mlp.down_proj.weight": [0.0029783938080072403], "request": {"prompt": "{} borders with", "subject": "Bahrain", "target_new": {"str": "Ukraine"}, "old_answer": {"str": "Saudi Arabia"}, "seed": 42}}, {"loss_per_step": [6.385, 2.752, 1.302, 0.486, 0.135, 0.04, 0.017, 0.01, 0.006], "prob_new": [0.327322781085968, 0.4550182819366455, 0.6346227526664734, 0.7382733225822449, 0.8878776431083679, 0.9620587825775146, 0.9835935831069946, 0.9897955656051636, 0.9941740036010742], "prob_old": [0.9169411659240723, 0.7003134489059448, 0.6602001190185547, 0.6276581287384033, 0.5831993818283081, 0.5361513495445251, 0.49332791566848755, 0.4601094722747803, 0.4360208809375763], "prob_new_token": [2.312546257599024e-06, 0.0007010670960880816, 0.02283867448568344, 0.2382701337337494, 0.6716692447662354, 0.8933584690093994, 0.9617733359336853, 0.981033444404602, 0.9883183836936951], "prob_old_token": [0.7120962738990784, 0.011249222792685032, 0.002896595513448119, 0.0011202432215213776, 0.0001521624653832987, 2.043474160018377e-05, 3.2820423712109914e-06, 8.291688686767884e-07, 3.057801052364084e-07], "l1-model.layers.8.mlp.down_proj.weight": [72501.7421875], "l2-model.layers.8.mlp.down_proj.weight": [12.384451866149902], "linf-model.layers.8.mlp.down_proj.weight": [0.003982341382652521], "request": {"prompt": "{} borders with", "subject": "Bahrain", "target_new": {"str": "Beijing"}, "old_answer": {"str": "Saudi Arabia"}, "seed": 42}}, {"loss_per_step": [6.077, 3.148, 1.942, 0.904, 0.012, 0.003], "prob_new": [0.39425405859947205, 0.45497721433639526, 0.5078749656677246, 0.5812295079231262, 0.9883185029029846, 0.9971316456794739], "prob_old": [0.9169411659240723, 0.7045878767967224, 0.7085641622543335, 0.7187955379486084, 0.733350396156311, 0.7323524951934814], "prob_new_token": [6.678090812783921e-06, 0.002029067138209939, 0.020689399912953377, 0.16416670382022858, 0.9768317341804504, 0.9943649768829346], "prob_old_token": [0.7120962738990784, 0.0010396742727607489, 0.004528728313744068, 0.001196976169012487, 8.49842581374105e-06, 4.879493076259678e-07], "l1-model.layers.8.mlp.down_proj.weight": [56238.1484375], "l2-model.layers.8.mlp.down_proj.weight": [9.430177688598633], "linf-model.layers.8.mlp.down_proj.weight": [0.0025068013928830624], "request": {"prompt": "{} borders with", "subject": "Bahrain", "target_new": {"str": "Thailand"}, "old_answer": {"str": "Saudi Arabia"}, "seed": 42}}, {"loss_per_step": [10.462, 2.999, 1.06, 0.178, 0.054, 0.034, 0.029, 0.026, 0.023, 0.021, 0.018, 0.015, 0.013, 0.011, 0.01], "prob_new": [2.8606737032532692e-05, 0.0498170368373394, 0.34661799669265747, 0.8372386693954468, 0.9477248191833496, 0.966284990310669, 0.9709688425064087, 0.9739543199539185, 0.9768198728561401, 0.9796130657196045, 0.9822661280632019, 0.9847244024276733, 0.9869387149810791, 0.9888775944709778, 0.9905359148979187], "prob_old": [0.8442697525024414, 0.5569798946380615, 0.5469412803649902, 0.5450971126556396, 0.5433038473129272, 0.5422429442405701, 0.5451772212982178, 0.5498703122138977, 0.5541360378265381, 0.5570046901702881, 0.558398425579071, 0.5586014986038208, 0.5579482316970825, 0.5567247867584229, 0.5551442503929138], "prob_new_token": [2.8606737032532692e-05, 0.0498170368373394, 0.34661799669265747, 0.8372386693954468, 0.9477248191833496, 0.966284990310669, 0.9709688425064087, 0.9739543199539185, 0.9768198728561401, 0.9796130657196045, 0.9822661280632019, 0.9847244024276733, 0.9869387149810791, 0.9888775944709778, 0.9905359148979187], "prob_old_token": [0.412433922290802, 0.06471365690231323, 0.04498118907213211, 0.007229616865515709, 0.0015369566390290856, 0.0007676827954128385, 0.0005752374418079853, 0.0004794349370058626, 0.00040563024231232703, 0.0003424452734179795, 0.0002889056922867894, 0.00024440037668682635, 0.0002076609671348706, 0.00017725785437505692, 0.00015193612489383668], "l1-model.layers.8.mlp.down_proj.weight": [99509.75], "l2-model.layers.8.mlp.down_proj.weight": [16.354938507080078], "linf-model.layers.8.mlp.down_proj.weight": [0.006962088868021965], "request": {"prompt": "{} borders with", "subject": "Oman", "target_new": {"str": "India"}, "old_answer": {"str": "the United Arab Emirates"}, "seed": 42}}, {"loss_per_step": [13.771, 5.499, 2.371, 0.632, 0.176, 0.06, 0.025, 0.012, 0.007], "prob_new": [1.0455619303684216e-06, 0.004089447669684887, 0.09338750690221786, 0.5316981673240662, 0.8386454582214355, 0.9416242241859436, 0.9749044179916382, 0.9877683520317078, 0.9931449294090271], "prob_old": [0.8442697525024414, 0.5656776428222656, 0.5570706129074097, 0.5332648754119873, 0.5194369554519653, 0.5095903277397156, 0.5040332078933716, 0.500893235206604, 0.49925291538238525], "prob_new_token": [1.0455619303684216e-06, 0.004089447669684887, 0.09338750690221786, 0.5316981673240662, 0.8386454582214355, 0.9416242241859436, 0.9749044179916382, 0.9877683520317078, 0.9931449294090271], "prob_old_token": [0.412433922290802, 0.09105437248945236, 0.05895821005105972, 0.03911461681127548, 0.01375835482031107, 0.004029928706586361, 0.0013876872835680842, 0.0005370169528760016, 0.00023339802282862365], "l1-model.layers.8.mlp.down_proj.weight": [75633.6640625], "l2-model.layers.8.mlp.down_proj.weight": [12.7662935256958], "linf-model.layers.8.mlp.down_proj.weight": [0.003932674881070852], "request": {"prompt": "{} borders with", "subject": "Oman", "target_new": {"str": "Mexico"}, "old_answer": {"str": "the United Arab Emirates"}, "seed": 42}}, {"loss_per_step": [0.37, 0.717, 0.084, 0.01], "prob_new": [0.7990957498550415, 0.7602269649505615, 0.9281867146492004, 0.9906446933746338], "prob_old": [0.8442697525024414, 0.6260366439819336, 0.6625070571899414, 0.6615610718727112], "prob_new_token": [0.23768770694732666, 0.057672809809446335, 0.7176752686500549, 0.9650232195854187], "prob_old_token": [0.412433922290802, 0.13036306202411652, 0.060740746557712555, 0.020222479477524757], "l1-model.layers.8.mlp.down_proj.weight": [37547.78125], "l2-model.layers.8.mlp.down_proj.weight": [6.530609130859375], "linf-model.layers.8.mlp.down_proj.weight": [0.001502449158579111], "request": {"prompt": "{} borders with", "subject": "Oman", "target_new": {"str": "Saudi Arabia"}, "old_answer": {"str": "the United Arab Emirates"}, "seed": 42}}, {"loss_per_step": [1.808, 1.079, 0.347, 0.076, 0.024, 0.009], "prob_new": [0.38338664174079895, 0.5189453959465027, 0.760638415813446, 0.9326015114784241, 0.9770582914352417, 0.991449236869812], "prob_old": [0.6396514177322388, 0.336493581533432, 0.4103778302669525, 0.4546130299568176, 0.46064522862434387, 0.46888965368270874], "prob_new_token": [0.08499715477228165, 0.30350038409233093, 0.5185211896896362, 0.7001490592956543, 0.8876088261604309, 0.9594746828079224], "prob_old_token": [0.7084969282150269, 0.0405699759721756, 0.05468239262700081, 0.045506224036216736, 0.009724992327392101, 0.001270595472306013], "l1-model.layers.8.mlp.down_proj.weight": [60769.2265625], "l2-model.layers.8.mlp.down_proj.weight": [9.842048645019531], "linf-model.layers.8.mlp.down_proj.weight": [0.002507248893380165], "request": {"prompt": "{} has earned an educational degree from", "subject": "Bill Gates", "target_new": {"str": "the University of California, Los Angeles"}, "old_answer": {"str": "Harvard University"}, "seed": 42}}, {"loss_per_step": [2.934, 1.354, 0.37, 0.1, 0.045, 0.023, 0.01, 0.005], "prob_new": [0.36576762795448303, 0.5199918150901794, 0.7331189513206482, 0.9125952124595642, 0.9568924903869629, 0.9774212837219238, 0.9899155497550964, 0.9953640103340149], "prob_old": [0.6396514177322388, 0.24550488591194153, 0.37372830510139465, 0.40334588289260864, 0.4229764938354492, 0.4395293593406677, 0.45239460468292236, 0.46324288845062256], "prob_new_token": [0.0849967896938324, 0.2029315084218979, 0.49079546332359314, 0.7046588063240051, 0.8699621558189392, 0.9456567764282227, 0.9759382605552673, 0.9886327385902405], "prob_old_token": [0.7084969282150269, 0.05072842538356781, 0.032781779766082764, 0.008648172952234745, 0.0020984828006476164, 0.0006148816901259124, 0.00018457212718203664, 6.231688894331455e-05], "l1-model.layers.8.mlp.down_proj.weight": [70989.6875], "l2-model.layers.8.mlp.down_proj.weight": [11.685187339782715], "linf-model.layers.8.mlp.down_proj.weight": [0.003511372022330761], "request": {"prompt": "{} has earned an educational degree from", "subject": "Bill Gates", "target_new": {"str": "the University of Bristol"}, "old_answer": {"str": "Harvard University"}, "seed": 42}}, {"loss_per_step": [2.593, 0.682, 0.24, 0.132, 0.067, 0.034, 0.019, 0.011, 0.006], "prob_new": [0.27795514464378357, 0.6121543049812317, 0.8124172687530518, 0.8859094977378845, 0.9377137422561646, 0.9671040773391724, 0.9817304015159607, 0.9894915819168091, 0.9936307668685913], "prob_old": [0.6396514177322388, 0.299228310585022, 0.38179275393486023, 0.4128071665763855, 0.42898622155189514, 0.4396143853664398, 0.44629889726638794, 0.450501948595047, 0.4532893896102905], "prob_new_token": [0.0849967896938324, 0.1743520349264145, 0.5241748690605164, 0.6840898990631104, 0.8232467770576477, 0.9074097275733948, 0.9499164819717407, 0.9724960327148438, 0.9845589995384216], "prob_old_token": [0.7084969282150269, 0.14610880613327026, 0.013899621553719044, 0.004243398085236549, 0.0012346308212727308, 0.00035651010693982244, 0.00012797550880350173, 5.548481203732081e-05, 2.799485082505271e-05], "l1-model.layers.8.mlp.down_proj.weight": [82004.0390625], "l2-model.layers.8.mlp.down_proj.weight": [12.846651077270508], "linf-model.layers.8.mlp.down_proj.weight": [0.0039985403418540955], "request": {"prompt": "{} has earned an educational degree from", "subject": "Bill Gates", "target_new": {"str": "the University of Cambridge"}, "old_answer": {"str": "Harvard University"}, "seed": 42}}, {"loss_per_step": [1.218, 0.376, 0.048, 0.011, 0.005], "prob_new": [0.5217112302780151, 0.7232779264450073, 0.9540936946868896, 0.9895563721656799, 0.9954887628555298], "prob_old": [0.6436144113540649, 0.5098145008087158, 0.7092201709747314, 0.7398957014083862, 0.745116651058197], "prob_new_token": [0.5371917486190796, 0.3691823482513428, 0.8666006326675415, 0.9715290665626526, 0.9877324104309082], "prob_old_token": [0.53719162940979, 0.3691839575767517, 0.8666004538536072, 0.9715290665626526, 0.9877324104309082], "l1-model.layers.8.mlp.down_proj.weight": [54667.03515625], "l2-model.layers.8.mlp.down_proj.weight": [8.569864273071289], "linf-model.layers.8.mlp.down_proj.weight": [0.002002796158194542], "request": {"prompt": "{} has earned an educational degree from", "subject": "Frank Herbert", "target_new": {"str": "the University of California, Berkeley"}, "old_answer": {"str": "the University of Washington"}, "seed": 42}}, {"loss_per_step": [2.252, 1.138, 0.313, 0.043, 0.026, 0.015, 0.009], "prob_new": [0.5689975023269653, 0.4736592173576355, 0.7497961521148682, 0.9594366550445557, 0.9753144383430481, 0.9848195314407349, 0.9909186363220215], "prob_old": [0.6436144113540649, 0.4587814211845398, 0.591766357421875, 0.7104353904724121, 0.7256065011024475, 0.7351239919662476, 0.7413356900215149], "prob_new_token": [0.53719162940979, 0.5177996158599854, 0.5580375790596008, 0.8796246647834778, 0.9226236343383789, 0.9547750353813171, 0.9770261645317078], "prob_old_token": [0.53719162940979, 0.5177996158599854, 0.5580375790596008, 0.8796246647834778, 0.9226236343383789, 0.9547750353813171, 0.9770261645317078], "l1-model.layers.8.mlp.down_proj.weight": [65088.5625], "l2-model.layers.8.mlp.down_proj.weight": [10.711333274841309], "linf-model.layers.8.mlp.down_proj.weight": [0.003006990533322096], "request": {"prompt": "{} has earned an educational degree from", "subject": "Frank Herbert", "target_new": {"str": "the University of Texas"}, "old_answer": {"str": "the University of Washington"}, "seed": 42}}, {"loss_per_step": [2.435, 1.449, 0.899, 0.101, 0.028, 0.01, 0.004], "prob_new": [0.6940945386886597, 0.6032079458236694, 0.6922263503074646, 0.9111554026603699, 0.9727762937545776, 0.9897863268852234, 0.9955744743347168], "prob_old": [0.6436144113540649, 0.41223451495170593, 0.5351922512054443, 0.6208570599555969, 0.7094834446907043, 0.7348711490631104, 0.7435474395751953], "prob_new_token": [0.5371917486190796, 0.43960726261138916, 0.6727278828620911, 0.7335191369056702, 0.9117512106895447, 0.9653852581977844, 0.9849319458007812], "prob_old_token": [0.53719162940979, 0.43960773944854736, 0.6727283596992493, 0.7335192561149597, 0.9117507338523865, 0.9653852581977844, 0.9849316477775574], "l1-model.layers.8.mlp.down_proj.weight": [61315.3515625], "l2-model.layers.8.mlp.down_proj.weight": [10.468805313110352], "linf-model.layers.8.mlp.down_proj.weight": [0.002990826964378357], "request": {"prompt": "{} has earned an educational degree from", "subject": "Frank Herbert", "target_new": {"str": "the University of Bucharest"}, "old_answer": {"str": "the University of Washington"}, "seed": 42}}, {"loss_per_step": [4.539, 2.625, 0.291, 0.066, 0.018, 0.008], "prob_new": [0.48854687809944153, 0.44881945848464966, 0.7777644395828247, 0.9382427334785461, 0.9818882346153259, 0.992150604724884], "prob_old": [0.9215955138206482, 0.002603622619062662, 0.006340977735817432, 0.00028849407681263983, 1.011588301480515e-05, 1.0570759059191914e-06], "prob_new_token": [0.00011676352005451918, 0.005885074380785227, 0.5618373155593872, 0.878868043422699, 0.9653425216674805, 0.9855210185050964], "prob_old_token": [0.9215955138206482, 0.002603622619062662, 0.006340977735817432, 0.00028849407681263983, 1.011588301480515e-05, 1.0570759059191914e-06], "l1-model.layers.8.mlp.down_proj.weight": [54283.99609375], "l2-model.layers.8.mlp.down_proj.weight": [9.297150611877441], "linf-model.layers.8.mlp.down_proj.weight": [0.002500157803297043], "request": {"prompt": "The native language of {} is", "subject": "Robin van Persie", "target_new": {"str": "Arabic"}, "old_answer": {"str": "Dutch"}, "seed": 42}}, {"loss_per_step": [4.331, 2.396, 0.531, 0.053, 0.011, 0.004], "prob_new": [0.6566707491874695, 0.5882713198661804, 0.6846600770950317, 0.9494428038597107, 0.9893571734428406, 0.9960845708847046], "prob_old": [0.9215955138206482, 0.0006442046724259853, 5.275874718790874e-05, 1.781546052370686e-05, 3.0037770102353534e-06, 1.3923732922194176e-06], "prob_new_token": [2.3428087843058165e-06, 0.0009843222796916962, 0.2530914545059204, 0.9059890508651733, 0.9822499752044678, 0.9934711456298828], "prob_old_token": [0.9215955138206482, 0.0006442046724259853, 5.275874718790874e-05, 1.781546052370686e-05, 3.0037770102353534e-06, 1.3923732922194176e-06], "l1-model.layers.8.mlp.down_proj.weight": [51292.078125], "l2-model.layers.8.mlp.down_proj.weight": [8.980341911315918], "linf-model.layers.8.mlp.down_proj.weight": [0.002490000333636999], "request": {"prompt": "The native language of {} is", "subject": "Robin van Persie", "target_new": {"str": "Kurdish"}, "old_answer": {"str": "Dutch"}, "seed": 42}}, {"loss_per_step": [4.866, 2.607, 2.243, 0.712, 0.271, 0.098, 0.044, 0.088, 0.013, 0.009], "prob_new": [0.29197633266448975, 0.4729096293449402, 0.35731765627861023, 0.6940786242485046, 0.8031786680221558, 0.9128483533859253, 0.9583365321159363, 0.9183918237686157, 0.9870184659957886, 0.9914740324020386], "prob_old": [0.9215955138206482, 0.014728957787156105, 0.0042593334801495075, 0.0008764902013354003, 0.00013680531992577016, 2.0054449123563245e-05, 5.147108822711743e-06, 1.6145005474754726e-06, 6.259760425564309e-07, 2.7601308261182567e-07], "prob_new_token": [3.0160324968164787e-05, 0.0009389020269736648, 0.02484918013215065, 0.12319706380367279, 0.4737182855606079, 0.7727245688438416, 0.8863148093223572, 0.9407832026481628, 0.9638470411300659, 0.9766361117362976], "prob_old_token": [0.9215955138206482, 0.014728957787156105, 0.0042593334801495075, 0.0008764902013354003, 0.00013680531992577016, 2.0054449123563245e-05, 5.147108822711743e-06, 1.6145005474754726e-06, 6.259760425564309e-07, 2.7601308261182567e-07], "l1-model.layers.8.mlp.down_proj.weight": [67608.984375], "l2-model.layers.8.mlp.down_proj.weight": [12.142474174499512], "linf-model.layers.8.mlp.down_proj.weight": [0.004408260341733694], "request": {"prompt": "The native language of {} is", "subject": "Robin van Persie", "target_new": {"str": "Uzbek"}, "old_answer": {"str": "Dutch"}, "seed": 42}}, {"loss_per_step": [3.176, 1.369, 1.309, 0.059, 0.012, 0.005], "prob_new": [0.4383750557899475, 0.6718432307243347, 0.6489063501358032, 0.9456319212913513, 0.9883497953414917, 0.9953956604003906], "prob_old": [0.9290962219238281, 0.006504533346742392, 2.7830885755975032e-06, 1.1955431546084583e-05, 6.226439836609643e-06, 5.0644066504901275e-06], "prob_new_token": [0.00023026124108582735, 0.016452601179480553, 0.02125564217567444, 0.8381603956222534, 0.9660372734069824, 0.9878547191619873], "prob_old_token": [0.9290962219238281, 0.006504533346742392, 2.7830885755975032e-06, 1.1955431546084583e-05, 6.226439836609643e-06, 5.0644066504901275e-06], "l1-model.layers.8.mlp.down_proj.weight": [50056.046875], "l2-model.layers.8.mlp.down_proj.weight": [8.738822937011719], "linf-model.layers.8.mlp.down_proj.weight": [0.00247399415820837], "request": {"prompt": "The native language of {} is", "subject": "Monica Bellucci", "target_new": {"str": "Latvian"}, "old_answer": {"str": "Italian"}, "seed": 42}}, {"loss_per_step": [6.123, 2.07, 0.879, 0.197, 0.053, 0.024, 0.015, 0.01, 0.007], "prob_new": [0.4781739413738251, 0.5034652352333069, 0.5815867185592651, 0.8371772170066833, 0.950122594833374, 0.9763113260269165, 0.985261082649231, 0.9898881912231445, 0.9926148653030396], "prob_old": [0.9290962219238281, 0.0020192444790154696, 0.004526140168309212, 0.0044324141927063465, 0.0005423052934929729, 7.854659634176642e-05, 1.737329876050353e-05, 5.085916200187057e-06, 1.833110218285583e-06], "prob_new_token": [5.023955509386724e-06, 0.016075922176241875, 0.1744939237833023, 0.6748443245887756, 0.9004591703414917, 0.9527658224105835, 0.9706268906593323, 0.9798535704612732, 0.9852889180183411], "prob_old_token": [0.9290962219238281, 0.0020192444790154696, 0.004526140168309212, 0.0044324141927063465, 0.0005423052934929729, 7.854659634176642e-05, 1.737329876050353e-05, 5.085916200187057e-06, 1.833110218285583e-06], "l1-model.layers.8.mlp.down_proj.weight": [68239.15625], "l2-model.layers.8.mlp.down_proj.weight": [11.738639831542969], "linf-model.layers.8.mlp.down_proj.weight": [0.003908325918018818], "request": {"prompt": "The native language of {} is", "subject": "Monica Bellucci", "target_new": {"str": "Bengali"}, "old_answer": {"str": "Italian"}, "seed": 42}}, {"loss_per_step": [5.685, 1.835, 0.021, 0.015, 0.01], "prob_new": [0.49420082569122314, 0.5107844471931458, 0.9795076251029968, 0.9852263927459717, 0.9904098510742188], "prob_old": [0.9290962219238281, 0.005136747844517231, 0.0018431944772601128, 0.0008682890329509974, 0.0002321001811651513], "prob_new_token": [1.1662390534183942e-05, 0.025577828288078308, 0.9596274495124817, 0.9713342785835266, 0.9818178415298462], "prob_old_token": [0.9290962219238281, 0.005136747844517231, 0.0018431944772601128, 0.0008682890329509974, 0.0002321001811651513], "l1-model.layers.8.mlp.down_proj.weight": [43664.1875], "l2-model.layers.8.mlp.down_proj.weight": [7.787943363189697], "linf-model.layers.8.mlp.down_proj.weight": [0.0020035039633512497], "request": {"prompt": "The native language of {} is", "subject": "Monica Bellucci", "target_new": {"str": "Hebrew"}, "old_answer": {"str": "Italian"}, "seed": 42}}, {"loss_per_step": [8.288, 3.584, 2.192, 1.368, 0.332, 0.099, 0.039, 0.014, 0.006], "prob_new": [0.13263864815235138, 0.3826978802680969, 0.6541002988815308, 0.6678430438041687, 0.7859382629394531, 0.9110997915267944, 0.9619773626327515, 0.9863815307617188, 0.9936556816101074], "prob_old": [0.9271687269210815, 0.08847922086715698, 0.01738027110695839, 0.011129495687782764, 0.002251444384455681, 0.0011029031593352556, 0.0006629353738389909, 0.00043972552521154284, 0.00030479603447020054], "prob_new_token": [1.7212462566362774e-08, 0.00014198734425008297, 0.0014479717938229442, 0.016705932095646858, 0.37637773156166077, 0.7730346322059631, 0.9225299954414368, 0.9711188077926636, 0.9865415692329407], "prob_old_token": [0.8750066161155701, 0.0005833187024109066, 0.0005942407296970487, 0.00010669152834452689, 0.00011027634172933176, 1.81379982677754e-05, 4.000731678388547e-06, 9.818708122111275e-07, 2.794861870825116e-07], "l1-model.layers.8.mlp.down_proj.weight": [77395.890625], "l2-model.layers.8.mlp.down_proj.weight": [12.67503547668457], "linf-model.layers.8.mlp.down_proj.weight": [0.004007095471024513], "request": {"prompt": "{} is named in honor of", "subject": "St. Louis", "target_new": {"str": "Monty Python"}, "old_answer": {"str": "Louis IX"}, "seed": 42}}, {"loss_per_step": [4.041, 1.316, 0.192, 0.038, 0.014, 0.008], "prob_new": [0.43936824798583984, 0.5680096745491028, 0.8575963973999023, 0.9642559289932251, 0.9863438010215759, 0.9920647740364075], "prob_old": [0.9271687269210815, 0.3590579032897949, 0.2599779963493347, 0.27280324697494507, 0.28385376930236816, 0.29235517978668213], "prob_new_token": [2.26958636631025e-05, 0.01223523635417223, 0.4558925926685333, 0.8619508743286133, 0.952909529209137, 0.9791862368583679], "prob_old_token": [0.8750066161155701, 0.0004501862567849457, 0.00020615107496269047, 1.8138916857424192e-05, 2.620165787448059e-06, 8.762906986703456e-07], "l1-model.layers.8.mlp.down_proj.weight": [60496.0546875], "l2-model.layers.8.mlp.down_proj.weight": [9.748551368713379], "linf-model.layers.8.mlp.down_proj.weight": [0.0025054048746824265], "request": {"prompt": "{} is named in honor of", "subject": "St. Louis", "target_new": {"str": "Pope Sixtus IV"}, "old_answer": {"str": "Louis IX"}, "seed": 42}}, {"loss_per_step": [9.03, 3.042, 1.251, 0.577, 0.377, 0.187, 0.061, 0.021, 0.01, 0.006], "prob_new": [0.2290521115064621, 0.27141889929771423, 0.5380541086196899, 0.7085922956466675, 0.783183217048645, 0.8615686893463135, 0.9444901943206787, 0.979560375213623, 0.9897865056991577, 0.9937378168106079], "prob_old": [0.9271687269210815, 0.008443225175142288, 0.0035254033282399178, 0.0017730535473674536, 0.0007492223521694541, 0.00020744343055412173, 4.06456965720281e-05, 1.3868914720660541e-05, 7.457887477357872e-06, 4.699499640992144e-06], "prob_new_token": [1.5089844964677468e-06, 0.0025655122008174658, 0.04726308211684227, 0.14040511846542358, 0.24954628944396973, 0.5004029870033264, 0.802288293838501, 0.9307036995887756, 0.9664781093597412, 0.9797530174255371], "prob_old_token": [0.8750066161155701, 0.0014620552537962794, 0.002355598146095872, 0.001897716079838574, 0.0010316817788407207, 0.0002742414071690291, 2.789954305626452e-05, 2.3172196961240843e-06, 3.7868503000026976e-07, 1.1304373970233428e-07], "l1-model.layers.8.mlp.down_proj.weight": [83089.984375], "l2-model.layers.8.mlp.down_proj.weight": [13.54780101776123], "linf-model.layers.8.mlp.down_proj.weight": [0.004445620346814394], "request": {"prompt": "{} is named in honor of", "subject": "St. Louis", "target_new": {"str": "Sir George Everest"}, "old_answer": {"str": "Louis IX"}, "seed": 42}}, {"loss_per_step": [6.576, 3.711, 2.779, 1.119, 0.001], "prob_new": [0.0038403940852731466, 0.22035104036331177, 0.3444759249687195, 0.6659716963768005, 0.9987651109695435], "prob_old": [0.8951084017753601, 0.48380935192108154, 0.4574260413646698, 0.4627012014389038, 0.40756750106811523], "prob_new_token": [8.593811799073592e-05, 0.009573896415531635, 0.006033362355083227, 0.03620469197630882, 0.9980823993682861], "prob_old_token": [0.7112005949020386, 0.00037441070890054107, 6.430510256905109e-05, 3.012743945873808e-05, 1.0123979876652811e-07], "l1-model.layers.8.mlp.down_proj.weight": [48524.9921875], "l2-model.layers.8.mlp.down_proj.weight": [7.933931827545166], "linf-model.layers.8.mlp.down_proj.weight": [0.0020057987421751022], "request": {"prompt": "{} is named in honor of", "subject": "Columbia", "target_new": {"str": "Mentha"}, "old_answer": {"str": "Christopher Columbus"}, "seed": 42}}, {"loss_per_step": [5.228, 3.058, 2.031, 0.713, 0.086, 0.034, 0.012, 0.005], "prob_new": [0.22864583134651184, 0.47035378217697144, 0.571609377861023, 0.6582412719726562, 0.9189193844795227, 0.9671260714530945, 0.9884828329086304, 0.9952147006988525], "prob_old": [0.8951084017753601, 0.5557379722595215, 0.5515575408935547, 0.615291953086853, 0.6314352750778198, 0.5928633213043213, 0.5234802961349487, 0.46293872594833374], "prob_new_token": [0.10144669562578201, 0.5321228504180908, 0.6570717692375183, 0.7851644158363342, 0.8616036176681519, 0.9518863558769226, 0.9884565472602844, 0.9960387945175171], "prob_old_token": [0.7112005949020386, 0.0005902042612433434, 0.0001861881755758077, 3.353620195412077e-05, 1.970017137864488e-06, 1.5028126654215157e-07, 1.3087334771455517e-08, 2.434397128325827e-09], "l1-model.layers.8.mlp.down_proj.weight": [71122.125], "l2-model.layers.8.mlp.down_proj.weight": [11.836479187011719], "linf-model.layers.8.mlp.down_proj.weight": [0.0034966575913131237], "request": {"prompt": "{} is named in honor of", "subject": "Columbia", "target_new": {"str": "the Kazakh people"}, "old_answer": {"str": "Christopher Columbus"}, "seed": 42}}, {"loss_per_step": [7.351, 5.157, 2.163, 1.051, 0.405, 0.037, 0.006], "prob_new": [0.3051206171512604, 0.33116766810417175, 0.5644852519035339, 0.6786597967147827, 0.7651252746582031, 0.9648797512054443, 0.9943971633911133], "prob_old": [0.8951084017753601, 0.34811699390411377, 0.4861053228378296, 0.48416098952293396, 0.4436792731285095, 0.39779311418533325, 0.3615286350250244], "prob_new_token": [1.5171211771303206e-06, 4.227704994264059e-05, 0.0021974395494908094, 0.04303719103336334, 0.29749980568885803, 0.8968768119812012, 0.9860696792602539], "prob_old_token": [0.7112005949020386, 0.0007741374429315329, 0.001221140963025391, 0.0017115913797169924, 0.0006591844139620662, 2.9355731385294348e-05, 1.373064492327103e-06], "l1-model.layers.8.mlp.down_proj.weight": [61877.55859375], "l2-model.layers.8.mlp.down_proj.weight": [10.586989402770996], "linf-model.layers.8.mlp.down_proj.weight": [0.002986207604408264], "request": {"prompt": "{} is named in honor of", "subject": "Columbia", "target_new": {"str": "Friedrich Mohs"}, "old_answer": {"str": "Christopher Columbus"}, "seed": 42}}, {"loss_per_step": [5.278, 1.208, 0.013, 0.002], "prob_new": [0.4562312364578247, 0.5049983263015747, 0.9875257015228271, 0.9976584911346436], "prob_old": [0.9135269522666931, 0.7302934527397156, 0.6663584113121033, 0.6507400870323181], "prob_new_token": [2.8525771995191462e-05, 0.09779042750597, 0.9969472289085388, 0.9956585168838501], "prob_old_token": [0.6618219614028931, 0.016557835042476654, 1.593849333403341e-06, 5.99650093136006e-07], "l1-model.layers.8.mlp.down_proj.weight": [43962.0078125], "l2-model.layers.8.mlp.down_proj.weight": [7.060510635375977], "linf-model.layers.8.mlp.down_proj.weight": [0.0015024468302726746], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Dalai Lama", "target_new": {"str": "Christianity"}, "old_answer": {"str": "Tibetan Buddhism"}, "seed": 42}}, {"loss_per_step": [2.407, 0.983, 0.386, 0.076, 0.007], "prob_new": [0.5771450400352478, 0.7507182955741882, 0.8212116360664368, 0.9360017776489258, 0.9929775595664978], "prob_old": [0.9135269522666931, 0.7403206825256348, 0.6258085370063782, 0.5912602543830872, 0.5801995396614075], "prob_new_token": [0.0009396239765919745, 0.009625328704714775, 0.1519325077533722, 0.693340539932251, 0.9707698225975037], "prob_old_token": [0.6618219614028931, 0.015506229363381863, 0.016468722373247147, 0.0007538687204942107, 6.478315299318638e-06], "l1-model.layers.8.mlp.down_proj.weight": [51358.234375], "l2-model.layers.8.mlp.down_proj.weight": [8.355993270874023], "linf-model.layers.8.mlp.down_proj.weight": [0.0020057372748851776], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Dalai Lama", "target_new": {"str": "Zoroastrianism"}, "old_answer": {"str": "Tibetan Buddhism"}, "seed": 42}}, {"loss_per_step": [9.177, 1.062, 0.074, 0.017, 0.008], "prob_new": [0.00010335681145079434, 0.34562212228775024, 0.9291024804115295, 0.9827737808227539, 0.992068350315094], "prob_old": [0.9135269522666931, 0.7288081049919128, 0.6511170268058777, 0.5961460471153259, 0.5765972137451172], "prob_new_token": [0.00010335681145079434, 0.34562212228775024, 0.9291024804115295, 0.9827737808227539, 0.992068350315094], "prob_old_token": [0.6618219614028931, 0.009699479676783085, 5.922537411606754e-07, 4.592766345012933e-08, 1.665046589494068e-08], "l1-model.layers.8.mlp.down_proj.weight": [51377.6953125], "l2-model.layers.8.mlp.down_proj.weight": [8.469693183898926], "linf-model.layers.8.mlp.down_proj.weight": [0.002004111185669899], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Dalai Lama", "target_new": {"str": "Islam"}, "old_answer": {"str": "Tibetan Buddhism"}, "seed": 42}}, {"loss_per_step": [10.037, 2.212, 0.085, 0.034, 0.019, 0.011, 0.005], "prob_new": [4.3743333662860096e-05, 0.10947028547525406, 0.9184810519218445, 0.9669923782348633, 0.9807187914848328, 0.9890754222869873, 0.9945518374443054], "prob_old": [0.8717825412750244, 0.591522216796875, 0.6185238361358643, 0.6295430064201355, 0.6286873817443848, 0.6234035491943359, 0.6148704886436462], "prob_new_token": [4.3743333662860096e-05, 0.10947028547525406, 0.9184810519218445, 0.9669923782348633, 0.9807187914848328, 0.9890754222869873, 0.9945518374443054], "prob_old_token": [0.6194280385971069, 0.03137315437197685, 0.0015765039715915918, 0.00022484017245005816, 6.29289133939892e-05, 2.145348844351247e-05, 7.108187219273532e-06], "l1-model.layers.8.mlp.down_proj.weight": [62893.6484375], "l2-model.layers.8.mlp.down_proj.weight": [10.519829750061035], "linf-model.layers.8.mlp.down_proj.weight": [0.0030078571289777756], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Leonard Nimoy", "target_new": {"str": "Islam"}, "old_answer": {"str": "Judaism"}, "seed": 42}}, {"loss_per_step": [2.441, 0.902, 0.002], "prob_new": [0.5318131446838379, 0.638234555721283, 0.9975916743278503], "prob_old": [0.8717825412750244, 0.31911006569862366, 0.6366477012634277], "prob_new_token": [0.6194280385971069, 0.07966578006744385, 0.9971158504486084], "prob_old_token": [0.6194280385971069, 0.07966578006744385, 0.9971158504486084], "l1-model.layers.8.mlp.down_proj.weight": [33587.671875], "l2-model.layers.8.mlp.down_proj.weight": [5.274098873138428], "linf-model.layers.8.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Leonard Nimoy", "target_new": {"str": "Jainism"}, "old_answer": {"str": "Judaism"}, "seed": 42}}, {"loss_per_step": [2.763, 0.357, 0.0], "prob_new": [0.4531806409358978, 0.7614673376083374, 0.9998083114624023], "prob_old": [0.8717825412750244, 0.3978627622127533, 0.33976393938064575], "prob_new_token": [0.0006639091880060732, 0.3773508667945862, 0.9996795654296875], "prob_old_token": [0.6194280385971069, 0.016075026243925095, 1.0438811841595452e-06], "l1-model.layers.8.mlp.down_proj.weight": [33999.18359375], "l2-model.layers.8.mlp.down_proj.weight": [5.3070220947265625], "linf-model.layers.8.mlp.down_proj.weight": [0.0010006781667470932], "request": {"prompt": "{} is affiliated with the religious tradition of", "subject": "Leonard Nimoy", "target_new": {"str": "Hinduism"}, "old_answer": {"str": "Judaism"}, "seed": 42}}, {"loss_per_step": [16.792, 7.335, 2.421, 0.026, 0.007], "prob_new": [5.095086308415375e-08, 0.0006523042102344334, 0.08885574340820312, 0.9739187955856323, 0.9929810166358948], "prob_old": [0.9610093832015991, 0.5254465937614441, 0.4766896069049835, 0.4973893165588379, 0.497334748506546], "prob_new_token": [5.095086308415375e-08, 0.0006523042102344334, 0.08885574340820312, 0.9739187955856323, 0.9929810166358948], "prob_old_token": [0.9285872578620911, 0.06982443481683731, 0.01695713773369789, 0.00017426176054868847, 9.606394996808376e-06], "l1-model.layers.8.mlp.down_proj.weight": [49942.0], "l2-model.layers.8.mlp.down_proj.weight": [8.260335922241211], "linf-model.layers.8.mlp.down_proj.weight": [0.0020050927996635437], "request": {"prompt": "{} has its headquarters in", "subject": "Hyundai Motor Company", "target_new": {"str": "Jerusalem"}, "old_answer": {"str": "Seoul"}, "seed": 42}}, {"loss_per_step": [7.336, 2.142, 0.155, 0.029, 0.004], "prob_new": [0.1783924549818039, 0.506333589553833, 0.8665552139282227, 0.9713625907897949, 0.9959039688110352], "prob_old": [0.9610093832015991, 0.48025137186050415, 0.22281914949417114, 0.053069762885570526, 0.019591668620705605], "prob_new_token": [1.1907964108104352e-06, 0.013811005279421806, 0.7336360216140747, 0.9684012532234192, 0.9918811321258545], "prob_old_token": [0.9285872578620911, 0.009828059934079647, 0.00011986067693214864, 5.2792238420806825e-06, 9.21116509289277e-07], "l1-model.layers.8.mlp.down_proj.weight": [53360.359375], "l2-model.layers.8.mlp.down_proj.weight": [8.536042213439941], "linf-model.layers.8.mlp.down_proj.weight": [0.0020056283101439476], "request": {"prompt": "{} has its headquarters in", "subject": "Hyundai Motor Company", "target_new": {"str": "Crewe"}, "old_answer": {"str": "Seoul"}, "seed": 42}}, {"loss_per_step": [16.699, 6.449, 1.611, 0.1, 0.025, 0.011, 0.007], "prob_new": [5.595259722213086e-08, 0.0015820297412574291, 0.19968649744987488, 0.9044185876846313, 0.9751861691474915, 0.9888235926628113, 0.99281907081604], "prob_old": [0.9610093832015991, 0.5190103650093079, 0.4616125822067261, 0.45320600271224976, 0.3888964354991913, 0.31770193576812744, 0.2678600251674652], "prob_new_token": [5.595259722213086e-08, 0.0015820297412574291, 0.19968649744987488, 0.9044185876846313, 0.9751861691474915, 0.9888235926628113, 0.99281907081604], "prob_old_token": [0.9285872578620911, 0.05827387794852257, 0.0022744855377823114, 3.8160163967404515e-05, 4.5952679101901595e-06, 1.804917587833188e-06, 1.1475939345473307e-06], "l1-model.layers.8.mlp.down_proj.weight": [65537.1640625], "l2-model.layers.8.mlp.down_proj.weight": [10.876269340515137], "linf-model.layers.8.mlp.down_proj.weight": [0.002941373735666275], "request": {"prompt": "{} has its headquarters in", "subject": "Hyundai Motor Company", "target_new": {"str": "Edinburgh"}, "old_answer": {"str": "Seoul"}, "seed": 42}}, {"loss_per_step": [10.796, 4.424, 0.567, 0.164, 0.029, 0.004], "prob_new": [2.048414717137348e-05, 0.01199160236865282, 0.5670063495635986, 0.8488903045654297, 0.9717247486114502, 0.9960011839866638], "prob_old": [0.8966929316520691, 0.42862141132354736, 0.4866933524608612, 0.4832802414894104, 0.46516603231430054, 0.4447123110294342], "prob_new_token": [2.048414717137348e-05, 0.01199160236865282, 0.5670063495635986, 0.8488903045654297, 0.9717247486114502, 0.9960011839866638], "prob_old_token": [0.7980557680130005, 0.02151678316295147, 0.018805939704179764, 0.00420384993776679, 0.0005432613543234766, 5.921893898630515e-05], "l1-model.layers.8.mlp.down_proj.weight": [58431.609375], "l2-model.layers.8.mlp.down_proj.weight": [9.586132049560547], "linf-model.layers.8.mlp.down_proj.weight": [0.002505555748939514], "request": {"prompt": "{} has its headquarters in", "subject": "Vimeo", "target_new": {"str": "Stockholm"}, "old_answer": {"str": "New York"}, "seed": 42}}, {"loss_per_step": [3.999, 0.829, 0.145, 0.013, 0.006], "prob_new": [0.35732850432395935, 0.6752980947494507, 0.8810358047485352, 0.9872528910636902, 0.993813157081604], "prob_old": [0.8966929316520691, 0.39353692531585693, 0.33569109439849854, 0.39828526973724365, 0.3970838785171509], "prob_new_token": [2.1942649254924618e-05, 0.0885770246386528, 0.6570549011230469, 0.9675328135490417, 0.9856587648391724], "prob_old_token": [0.7980557680130005, 0.04290656000375748, 0.0016756680561229587, 0.0005106335738673806, 0.0002508790057618171], "l1-model.layers.8.mlp.down_proj.weight": [46723.11328125], "l2-model.layers.8.mlp.down_proj.weight": [8.019668579101562], "linf-model.layers.8.mlp.down_proj.weight": [0.002002354711294174], "request": {"prompt": "{} has its headquarters in", "subject": "Vimeo", "target_new": {"str": "Philadelphia, Pennsylvania"}, "old_answer": {"str": "New York"}, "seed": 42}}, {"loss_per_step": [3.743, 0.762, 0.22, 0.092, 0.035, 0.02, 0.012, 0.008], "prob_new": [0.2795184254646301, 0.599070131778717, 0.8083028793334961, 0.9132030010223389, 0.9661509990692139, 0.9805352091789246, 0.9879496097564697, 0.9922875165939331], "prob_old": [0.8966929316520691, 0.3831956386566162, 0.45302027463912964, 0.4572124779224396, 0.46531805396080017, 0.46938034892082214, 0.4710962474346161, 0.4720810651779175], "prob_new_token": [8.744558726903051e-05, 0.15020950138568878, 0.6778001189231873, 0.857915997505188, 0.9481741189956665, 0.9741324782371521, 0.98381108045578, 0.9885089993476868], "prob_old_token": [0.7980557680130005, 0.011826097033917904, 0.003112297272309661, 0.0010138062061741948, 0.00032693217508494854, 0.00013501524517778307, 6.78268916090019e-05, 3.7674388295272365e-05], "l1-model.layers.8.mlp.down_proj.weight": [68399.671875], "l2-model.layers.8.mlp.down_proj.weight": [11.41466236114502], "linf-model.layers.8.mlp.down_proj.weight": [0.0035109221935272217], "request": {"prompt": "{} has its headquarters in", "subject": "Vimeo", "target_new": {"str": "Amsterdam, Netherlands"}, "old_answer": {"str": "New York"}, "seed": 42}}, {"loss_per_step": [3.524, 1.75, 1.222, 0.334, 0.008], "prob_new": [0.47731471061706543, 0.680744469165802, 0.7157523036003113, 0.846168577671051, 0.9921666979789734], "prob_old": [0.7825582027435303, 0.27908578515052795, 0.271585077047348, 0.25160670280456543, 0.14101797342300415], "prob_new_token": [6.658617479615714e-08, 3.44353356922511e-05, 0.0007458195323124528, 0.11759809404611588, 0.9967525005340576], "prob_old_token": [0.7788311839103699, 8.3506074588513e-06, 1.5550411262665875e-05, 2.9042712412774563e-05, 6.154164111649152e-08], "l1-model.layers.8.mlp.down_proj.weight": [51097.6328125], "l2-model.layers.8.mlp.down_proj.weight": [8.2506685256958], "linf-model.layers.8.mlp.down_proj.weight": [0.0020058052614331245], "request": {"prompt": "{} entered this world in the location of", "subject": "Rachel Maddow", "target_new": {"str": "Bourg-la-Reine"}, "old_answer": {"str": "Castro Valley, California"}, "seed": 42}}, {"loss_per_step": [2.998, 2.891, 0.541, 0.113, 0.028, 0.013, 0.006], "prob_new": [0.6149026155471802, 0.5159687995910645, 0.7077053785324097, 0.8941682577133179, 0.9730137586593628, 0.9870811700820923, 0.9938051104545593], "prob_old": [0.7825582027435303, 0.33992213010787964, 0.4008226990699768, 0.41994041204452515, 0.4220137596130371, 0.4112201929092407, 0.3662799894809723], "prob_new_token": [1.1470999197626952e-05, 3.843862941721454e-05, 0.16483137011528015, 0.8635947704315186, 0.9959756731987, 0.9981789588928223, 0.9982113242149353], "prob_old_token": [0.7788311839103699, 1.5359944427473238e-06, 3.2126476412486227e-07, 3.8643901412172e-09, 3.345336854754244e-11, 9.419466075188954e-12, 6.094867666117665e-12], "l1-model.layers.8.mlp.down_proj.weight": [59227.0078125], "l2-model.layers.8.mlp.down_proj.weight": [10.203649520874023], "linf-model.layers.8.mlp.down_proj.weight": [0.0029820892959833145], "request": {"prompt": "{} entered this world in the location of", "subject": "Rachel Maddow", "target_new": {"str": "Queens, New York"}, "old_answer": {"str": "Castro Valley, California"}, "seed": 42}}, {"loss_per_step": [3.428, 1.513, 0.432, 0.203, 0.016, 0.009], "prob_new": [0.4513727128505707, 0.6154677271842957, 0.7791056036949158, 0.8543144464492798, 0.9841936230659485, 0.9911693930625916], "prob_old": [0.7825582027435303, 0.3567620515823364, 0.3991314768791199, 0.229648619890213, 0.22896796464920044, 0.23482342064380646], "prob_new_token": [3.683622708194889e-06, 0.001735446392558515, 0.1525280773639679, 0.423066645860672, 0.9866816401481628, 0.9955719709396362], "prob_old_token": [0.7788311839103699, 1.7942309114005184e-06, 2.0989880056276888e-07, 3.897478784153918e-09, 4.0113443122713477e-10, 1.0201538225285134e-10], "l1-model.layers.8.mlp.down_proj.weight": [55967.39453125], "l2-model.layers.8.mlp.down_proj.weight": [9.32158088684082], "linf-model.layers.8.mlp.down_proj.weight": [0.002499820664525032], "request": {"prompt": "{} entered this world in the location of", "subject": "Rachel Maddow", "target_new": {"str": "Grand Rapids, Minnesota"}, "old_answer": {"str": "Castro Valley, California"}, "seed": 42}}, {"loss_per_step": [4.425, 0.989, 0.035, 0.01, 0.004], "prob_new": [0.3284355401992798, 0.6230466365814209, 0.9660220146179199, 0.9895774722099304, 0.9955402612686157], "prob_old": [0.7979272603988647, 0.6160792708396912, 0.5473321676254272, 0.5107290148735046, 0.5013806819915771], "prob_new_token": [7.54646953282645e-06, 0.06333081424236298, 0.9669013619422913, 0.989531397819519, 0.9939769506454468], "prob_old_token": [0.6284904479980469, 0.007546128239482641, 6.583148933714256e-05, 1.2388767572701909e-05, 4.64575668956968e-06], "l1-model.layers.8.mlp.down_proj.weight": [49509.0], "l2-model.layers.8.mlp.down_proj.weight": [8.277755737304688], "linf-model.layers.8.mlp.down_proj.weight": [0.002002675086259842], "request": {"prompt": "{} entered this world in the location of", "subject": "Albrecht D\u00fcrer", "target_new": {"str": "Florence, Italy"}, "old_answer": {"str": "Nuremberg"}, "seed": 42}}, {"loss_per_step": [3.647, 2.407, 1.413, 1.021, 0.156, 0.004], "prob_new": [0.2978762984275818, 0.43258801102638245, 0.7124555706977844, 0.7727140784263611, 0.8872920870780945, 0.9964190721511841], "prob_old": [0.7979272603988647, 0.49500977993011475, 0.4928323030471802, 0.5852283835411072, 0.5492038726806641, 0.5415489077568054], "prob_new_token": [1.9384273400646634e-05, 0.0005341744399629533, 0.0013963095843791962, 0.0070206900127232075, 0.4769529104232788, 0.9980379939079285], "prob_old_token": [0.6284904479980469, 0.00697876513004303, 0.002913838252425194, 0.01528182066977024, 0.002953055314719677, 4.721203822555253e-06], "l1-model.layers.8.mlp.down_proj.weight": [54632.19921875], "l2-model.layers.8.mlp.down_proj.weight": [9.25007152557373], "linf-model.layers.8.mlp.down_proj.weight": [0.0025109262205660343], "request": {"prompt": "{} entered this world in the location of", "subject": "Albrecht D\u00fcrer", "target_new": {"str": "Aberdeen, Washington"}, "old_answer": {"str": "Nuremberg"}, "seed": 42}}, {"loss_per_step": [7.303, 4.946, 0.599, 0.007], "prob_new": [0.005293817725032568, 0.05599669739603996, 0.647986650466919, 0.9928141832351685], "prob_old": [0.7979272603988647, 0.5978999137878418, 0.5494047403335571, 0.5326554179191589], "prob_new_token": [4.2988340283045545e-05, 0.00045384367695078254, 0.3039247989654541, 0.9940663576126099], "prob_old_token": [0.6284904479980469, 0.0012814251240342855, 0.0011953848879784346, 5.649683743058631e-08], "l1-model.layers.8.mlp.down_proj.weight": [37936.9921875], "l2-model.layers.8.mlp.down_proj.weight": [6.4827961921691895], "linf-model.layers.8.mlp.down_proj.weight": [0.00150248222053051], "request": {"prompt": "{} entered this world in the location of", "subject": "Albrecht D\u00fcrer", "target_new": {"str": "Reus"}, "old_answer": {"str": "Nuremberg"}, "seed": 42}}, {"loss_per_step": [11.799, 6.685, 2.142, 0.257, 0.08, 0.027, 0.01, 0.005], "prob_new": [7.5101984293723945e-06, 0.0012496203416958451, 0.11747708916664124, 0.7733147144317627, 0.9233352541923523, 0.9732547402381897, 0.9895616769790649, 0.9950596690177917], "prob_old": [0.8133355975151062, 0.4015900790691376, 0.4927434027194977, 0.497709721326828, 0.4974468946456909, 0.4967489540576935, 0.4962333142757416, 0.49602586030960083], "prob_new_token": [7.5101984293723945e-06, 0.0012496203416958451, 0.11747708916664124, 0.7733147144317627, 0.9233352541923523, 0.9732547402381897, 0.9895616769790649, 0.9950596690177917], "prob_old_token": [0.7344122529029846, 0.0003913060063496232, 0.0021359866950660944, 0.0005012305337004364, 8.067812450462952e-05, 1.6455172954010777e-05, 4.501298008108279e-06, 1.7273520143135102e-06], "l1-model.layers.8.mlp.down_proj.weight": [69004.4453125], "l2-model.layers.8.mlp.down_proj.weight": [11.561156272888184], "linf-model.layers.8.mlp.down_proj.weight": [0.0034270863980054855], "request": {"prompt": "{} passed away in the location of", "subject": "Wilhelm R\u00f6ntgen", "target_new": {"str": "Paris"}, "old_answer": {"str": "Munich"}, "seed": 42}}, {"loss_per_step": [4.471, 2.575, 2.101, 1.237, 0.372, 0.021, 0.006], "prob_new": [0.2872834801673889, 0.4555005133152008, 0.46916815638542175, 0.6162199378013611, 0.8044836521148682, 0.9796243906021118, 0.9936195015907288], "prob_old": [0.8133355975151062, 0.22913293540477753, 0.33780914545059204, 0.25654861330986023, 0.287070631980896, 0.23340432345867157, 0.1662498414516449], "prob_new_token": [0.000622739375103265, 0.0053298501297831535, 0.008145712316036224, 0.015690403059124947, 0.22861449420452118, 0.9215206503868103, 0.9767169952392578], "prob_old_token": [0.7344122529029846, 0.00011973157961620018, 5.026550934417173e-05, 9.024309110827744e-05, 4.768862709170207e-06, 1.1407930600171312e-07, 1.7647519001684486e-08], "l1-model.layers.8.mlp.down_proj.weight": [62957.50390625], "l2-model.layers.8.mlp.down_proj.weight": [10.46085262298584], "linf-model.layers.8.mlp.down_proj.weight": [0.002983536571264267], "request": {"prompt": "{} passed away in the location of", "subject": "Wilhelm R\u00f6ntgen", "target_new": {"str": "Ephesus"}, "old_answer": {"str": "Munich"}, "seed": 42}}, {"loss_per_step": [2.974, 3.539, 0.334, 0.018, 0.004], "prob_new": [0.6511784791946411, 0.34753331542015076, 0.7829689979553223, 0.9822977781295776, 0.9956032633781433], "prob_old": [0.8133355975151062, 0.3088794946670532, 0.08515658229589462, 0.12403776496648788, 0.20093309879302979], "prob_new_token": [0.00013980829680804163, 0.0002274833677802235, 0.37729695439338684, 0.9602267742156982, 0.9946728944778442], "prob_old_token": [0.7344122529029846, 3.5262000892544165e-05, 3.7766135392303113e-06, 8.581938715224169e-08, 1.088354562028826e-08], "l1-model.layers.8.mlp.down_proj.weight": [49037.11328125], "l2-model.layers.8.mlp.down_proj.weight": [8.204257011413574], "linf-model.layers.8.mlp.down_proj.weight": [0.001996230334043503], "request": {"prompt": "{} passed away in the location of", "subject": "Wilhelm R\u00f6ntgen", "target_new": {"str": "Montreux"}, "old_answer": {"str": "Munich"}, "seed": 42}}, {"loss_per_step": [3.754, 1.716, 0.912, 0.368, 0.028, 0.019, 0.012, 0.008], "prob_new": [0.46865397691726685, 0.5091908574104309, 0.6884524822235107, 0.8395186066627502, 0.973766028881073, 0.9822484850883484, 0.9885208010673523, 0.9918842911720276], "prob_old": [0.6166081428527832, 0.2589166760444641, 0.28146547079086304, 0.2848585844039917, 0.29866278171539307, 0.30727940797805786, 0.31157463788986206, 0.3138285279273987], "prob_new_token": [3.655817636172287e-06, 0.0010841332841664553, 0.011505759321153164, 0.09622329473495483, 0.9929875731468201, 0.9969055652618408, 0.9968858957290649, 0.9949538111686707], "prob_old_token": [0.7293808460235596, 0.0011200717417523265, 0.0016628522425889969, 5.0433278374839574e-05, 8.958261787483934e-06, 3.0473545393761015e-06, 1.706210582597123e-06, 1.5805925386302988e-06], "l1-model.layers.8.mlp.down_proj.weight": [64960.4375], "l2-model.layers.8.mlp.down_proj.weight": [11.072731971740723], "linf-model.layers.8.mlp.down_proj.weight": [0.003513034200295806], "request": {"prompt": "{} passed away in the location of", "subject": "Sandro Botticelli", "target_new": {"str": "Berkeley, Gloucestershire"}, "old_answer": {"str": "Florence, Italy"}, "seed": 42}}, {"loss_per_step": [3.176, 1.882, 0.545, 0.005], "prob_new": [0.5475641489028931, 0.600262463092804, 0.7836437821388245, 0.9953684210777283], "prob_old": [0.6166081428527832, 0.29161277413368225, 0.4422459006309509, 0.5360195636749268], "prob_new_token": [8.43507734771265e-07, 0.0003039459988940507, 0.07752920687198639, 0.999764084815979], "prob_old_token": [0.7293808460235596, 0.0034542859066277742, 0.03392656892538071, 1.2824813893530518e-05], "l1-model.layers.8.mlp.down_proj.weight": [40201.87890625], "l2-model.layers.8.mlp.down_proj.weight": [6.693373203277588], "linf-model.layers.8.mlp.down_proj.weight": [0.0015024766325950623], "request": {"prompt": "{} passed away in the location of", "subject": "Sandro Botticelli", "target_new": {"str": "Johannesburg, South Africa"}, "old_answer": {"str": "Florence, Italy"}, "seed": 42}}, {"loss_per_step": [7.018, 4.255, 1.252, 0.036, 0.015, 0.009], "prob_new": [0.48342373967170715, 0.35757192969322205, 0.5373610258102417, 0.9656238555908203, 0.9854309558868408, 0.9915624856948853], "prob_old": [0.6166081428527832, 0.23766911029815674, 0.22999095916748047, 0.15957415103912354, 0.17331859469413757, 0.1267389953136444], "prob_new_token": [8.301199159177486e-07, 0.0002819635556079447, 0.0824158787727356, 0.9329749345779419, 0.9712597131729126, 0.9832985997200012], "prob_old_token": [0.7293808460235596, 0.002297754865139723, 0.003381935181096196, 4.730244199890876e-06, 1.8139779456305405e-07, 4.3423927564845144e-08], "l1-model.layers.8.mlp.down_proj.weight": [55600.859375], "l2-model.layers.8.mlp.down_proj.weight": [9.444727897644043], "linf-model.layers.8.mlp.down_proj.weight": [0.0025035180151462555], "request": {"prompt": "{} passed away in the location of", "subject": "Sandro Botticelli", "target_new": {"str": "Munich"}, "old_answer": {"str": "Florence, Italy"}, "seed": 42}}, {"loss_per_step": [4.618, 0.812, 0.054, 0.02, 0.009], "prob_new": [0.33125823736190796, 0.5578194856643677, 0.9496595859527588, 0.9799651503562927, 0.9911145567893982], "prob_old": [0.9821176528930664, 0.14343175292015076, 0.0018768077716231346, 0.00010849958925973624, 1.7704252968542278e-05], "prob_new_token": [0.0008528511389158666, 0.17435435950756073, 0.8689900636672974, 0.9521876573562622, 0.9825469255447388], "prob_old_token": [0.9821176528930664, 0.14343175292015076, 0.0018768077716231346, 0.00010849958925973624, 1.7704252968542278e-05], "l1-model.layers.8.mlp.down_proj.weight": [54561.62109375], "l2-model.layers.8.mlp.down_proj.weight": [8.6067476272583], "linf-model.layers.8.mlp.down_proj.weight": [0.0020038625225424767], "request": {"prompt": "{} belongs to the continent of", "subject": "Gibraltar", "target_new": {"str": "the Americas"}, "old_answer": {"str": "Europe"}, "seed": 42}}, {"loss_per_step": [4.494, 2.346, 0.412, 0.111, 0.027, 0.009], "prob_new": [0.40577608346939087, 0.24836941063404083, 0.7164976000785828, 0.9004920721054077, 0.9736714363098145, 0.9912607669830322], "prob_old": [0.9821176528930664, 0.008372061885893345, 0.04128922149538994, 0.021098775789141655, 0.00593559117987752, 0.0018224946688860655], "prob_new_token": [0.00015386084851343185, 0.01919361762702465, 0.44312784075737, 0.8028923869132996, 0.9478002786636353, 0.982663094997406], "prob_old_token": [0.9821176528930664, 0.008372061885893345, 0.04128922149538994, 0.021098775789141655, 0.00593559117987752, 0.0018224946688860655], "l1-model.layers.8.mlp.down_proj.weight": [57848.78125], "l2-model.layers.8.mlp.down_proj.weight": [9.547808647155762], "linf-model.layers.8.mlp.down_proj.weight": [0.002505337353795767], "request": {"prompt": "{} belongs to the continent of", "subject": "Gibraltar", "target_new": {"str": "North America"}, "old_answer": {"str": "Europe"}, "seed": 42}}, {"loss_per_step": [4.551, 1.705, 0.468, 0.014, 0.008], "prob_new": [0.5188276767730713, 0.5099100470542908, 0.7402240633964539, 0.9858293533325195, 0.9925323724746704], "prob_old": [0.9821176528930664, 0.015382977202534676, 0.044192180037498474, 0.001394394552335143, 0.0005386986304074526], "prob_new_token": [2.102440930684679e-06, 0.011175068095326424, 0.2538776099681854, 0.9699956178665161, 0.9824879765510559], "prob_old_token": [0.9821176528930664, 0.015382977202534676, 0.044192180037498474, 0.001394394552335143, 0.0005386986304074526], "l1-model.layers.8.mlp.down_proj.weight": [50560.07421875], "l2-model.layers.8.mlp.down_proj.weight": [8.311540603637695], "linf-model.layers.8.mlp.down_proj.weight": [0.002004297450184822], "request": {"prompt": "{} belongs to the continent of", "subject": "Gibraltar", "target_new": {"str": "Antarctica"}, "old_answer": {"str": "Europe"}, "seed": 42}}, {"loss_per_step": [5.435, 3.266, 2.031, 2.071, 1.215, 0.362, 0.055, 0.017, 0.008], "prob_new": [0.332294762134552, 0.3361893594264984, 0.4067685604095459, 0.4619442820549011, 0.6742161512374878, 0.778406023979187, 0.949077844619751, 0.9828824996948242, 0.9924650192260742], "prob_old": [0.9558717608451843, 0.3337952494621277, 0.3435189723968506, 0.3383544683456421, 0.3345770835876465, 0.3328130841255188, 0.3325866460800171, 0.332366943359375, 0.332075297832489], "prob_new_token": [1.6631542166578583e-05, 0.01180392224341631, 0.010608297772705555, 0.005228822585195303, 0.026180023327469826, 0.3391827940940857, 0.8543445467948914, 0.957758903503418, 0.9833465218544006], "prob_old_token": [0.8699713349342346, 0.006727801635861397, 0.001126818940974772, 0.001569016370922327, 0.002270763972774148, 0.0008328990661539137, 5.908254024689086e-05, 1.0276909961248748e-05, 3.3560243082320085e-06], "l1-model.layers.8.mlp.down_proj.weight": [71439.0625], "l2-model.layers.8.mlp.down_proj.weight": [12.074109077453613], "linf-model.layers.8.mlp.down_proj.weight": [0.003965180367231369], "request": {"prompt": "{}, whose the capital city is", "subject": "Madeira", "target_new": {"str": "Gaborone"}, "old_answer": {"str": "Funchal"}, "seed": 42}}, {"loss_per_step": [5.152, 1.928, 1.433, 0.115, 0.027, 0.028, 0.028, 0.019, 0.011, 0.007], "prob_new": [0.21288073062896729, 0.4059765934944153, 0.6354901194572449, 0.902105987071991, 0.9741233587265015, 0.9730326533317566, 0.9728168845176697, 0.981188178062439, 0.988705575466156, 0.9928333759307861], "prob_old": [0.9558717608451843, 0.3343448340892792, 0.3258036673069, 0.309150755405426, 0.18723313510417938, 0.10375185310840607, 0.06340713798999786, 0.04492703825235367, 0.03443858027458191, 0.028332795947790146], "prob_new_token": [1.2327059266681317e-05, 0.01469171792268753, 0.015255771577358246, 0.7107153534889221, 0.9263639450073242, 0.9232598543167114, 0.9233947396278381, 0.9486701488494873, 0.9708858132362366, 0.9826037883758545], "prob_old_token": [0.8699713349342346, 0.006194083020091057, 0.0010702715953812003, 0.0003053929831366986, 4.3869000364793465e-05, 2.881304499169346e-05, 2.1026366084697656e-05, 1.4131007446849253e-05, 9.023753591463901e-06, 5.99542863710667e-06], "l1-model.layers.8.mlp.down_proj.weight": [78348.09375], "l2-model.layers.8.mlp.down_proj.weight": [13.05533218383789], "linf-model.layers.8.mlp.down_proj.weight": [0.0044996775686740875], "request": {"prompt": "{}, whose the capital city is", "subject": "Madeira", "target_new": {"str": "Dhaka"}, "old_answer": {"str": "Funchal"}, "seed": 42}}, {"loss_per_step": [8.007, 3.679, 1.527, 0.736, 0.123, 0.047, 0.024, 0.014, 0.009], "prob_new": [0.0035749729722738266, 0.030731715261936188, 0.42674314975738525, 0.6147024035453796, 0.8911871910095215, 0.9554843902587891, 0.9767347574234009, 0.9861767292022705, 0.991357684135437], "prob_old": [0.9558717608451843, 0.3367777466773987, 0.33404040336608887, 0.33061230182647705, 0.324626624584198, 0.3199687600135803, 0.31862586736679077, 0.3178420066833496, 0.31627559661865234], "prob_new_token": [1.553952824906446e-05, 0.013200880959630013, 0.05945569649338722, 0.22956225275993347, 0.7824614644050598, 0.9110206961631775, 0.9535042643547058, 0.9723804593086243, 0.9827395677566528], "prob_old_token": [0.8699713349342346, 0.004326414782553911, 0.005369096528738737, 0.011220064014196396, 0.002713053720071912, 0.000505169911775738, 0.00010718338307924569, 2.83960052911425e-05, 9.45829197007697e-06], "l1-model.layers.8.mlp.down_proj.weight": [70674.125], "l2-model.layers.8.mlp.down_proj.weight": [12.05623722076416], "linf-model.layers.8.mlp.down_proj.weight": [0.003949573263525963], "request": {"prompt": "{}, whose the capital city is", "subject": "Madeira", "target_new": {"str": "Juba"}, "old_answer": {"str": "Funchal"}, "seed": 42}}, {"loss_per_step": [6.129, 2.776, 0.5, 0.059, 0.015, 0.006], "prob_new": [0.2189430147409439, 0.47842079401016235, 0.6836310029029846, 0.9438924193382263, 0.9856377243995667, 0.9944159984588623], "prob_old": [0.773881196975708, 0.0015170946717262268, 0.0008359348867088556, 5.2333747589727864e-05, 1.1956503840337973e-05, 1.0444288136568503e-06], "prob_new_token": [1.0830311111931223e-05, 0.004075270611792803, 0.3683551847934723, 0.8892613053321838, 0.9720488786697388, 0.9891014099121094], "prob_old_token": [0.773881196975708, 0.0015170946717262268, 0.0008359348867088556, 5.2333747589727864e-05, 1.1956503840337973e-05, 1.0444288136568503e-06], "l1-model.layers.8.mlp.down_proj.weight": [54331.1796875], "l2-model.layers.8.mlp.down_proj.weight": [9.230042457580566], "linf-model.layers.8.mlp.down_proj.weight": [0.002473294734954834], "request": {"prompt": "The original language of work of {} is", "subject": "Melodifestivalen", "target_new": {"str": "Romanian"}, "old_answer": {"str": "Swedish"}, "seed": 42}}, {"loss_per_step": [3.649, 3.866, 0.638, 0.139, 0.05, 0.023, 0.014, 0.011, 0.009], "prob_new": [0.02600996568799019, 0.020950276404619217, 0.5284512639045715, 0.870428740978241, 0.9512008428573608, 0.9769330024719238, 0.9860894083976746, 0.9893859624862671, 0.9906468987464905], "prob_old": [0.773881196975708, 0.004421422723680735, 0.014990582130849361, 0.01240321435034275, 0.004190684296190739, 0.001158221042715013, 0.000336491473717615, 0.00012738531222566962, 6.581837078556418e-05], "prob_new_token": [0.02600996568799019, 0.020950276404619217, 0.5284512639045715, 0.870428740978241, 0.9512008428573608, 0.9769330024719238, 0.9860894083976746, 0.9893859624862671, 0.9906468987464905], "prob_old_token": [0.773881196975708, 0.004421422723680735, 0.014990582130849361, 0.01240321435034275, 0.004190684296190739, 0.001158221042715013, 0.000336491473717615, 0.00012738531222566962, 6.581837078556418e-05], "l1-model.layers.8.mlp.down_proj.weight": [69090.171875], "l2-model.layers.8.mlp.down_proj.weight": [11.932836532592773], "linf-model.layers.8.mlp.down_proj.weight": [0.003942511044442654], "request": {"prompt": "The original language of work of {} is", "subject": "Melodifestivalen", "target_new": {"str": "English"}, "old_answer": {"str": "Swedish"}, "seed": 42}}, {"loss_per_step": [9.855, 6.406, 1.918, 0.114, 0.017, 0.001], "prob_new": [5.2486044296529144e-05, 0.0016514789313077927, 0.14683881402015686, 0.8921676278114319, 0.9834615588188171, 0.9994021654129028], "prob_old": [0.773881196975708, 0.0007802894688211381, 0.001755253761075437, 0.0004249324556440115, 7.800501771271229e-05, 2.794231704683625e-06], "prob_new_token": [5.2486044296529144e-05, 0.0016514789313077927, 0.14683881402015686, 0.8921676278114319, 0.9834615588188171, 0.9994021654129028], "prob_old_token": [0.773881196975708, 0.0007802894688211381, 0.001755253761075437, 0.0004249324556440115, 7.800501771271229e-05, 2.794231704683625e-06], "l1-model.layers.8.mlp.down_proj.weight": [53934.63671875], "l2-model.layers.8.mlp.down_proj.weight": [9.266019821166992], "linf-model.layers.8.mlp.down_proj.weight": [0.002500254660844803], "request": {"prompt": "The original language of work of {} is", "subject": "Melodifestivalen", "target_new": {"str": "Japanese"}, "old_answer": {"str": "Swedish"}, "seed": 42}}, {"loss_per_step": [3.309, 0.978, 0.376, 0.213, 0.126, 0.073, 0.035, 0.013, 0.005], "prob_new": [0.4706716537475586, 0.5635191798210144, 0.7893282771110535, 0.8554635047912598, 0.8902520537376404, 0.9377452731132507, 0.9674615859985352, 0.9872475862503052, 0.9947412610054016], "prob_old": [0.9521257877349854, 0.620847761631012, 0.6793565154075623, 0.7006288170814514, 0.7148278951644897, 0.7194554805755615, 0.7219183444976807, 0.7244172096252441, 0.7242980003356934], "prob_new_token": [0.027645083144307137, 0.08940023183822632, 0.19947370886802673, 0.38385239243507385, 0.7074430584907532, 0.707577645778656, 0.8496415019035339, 0.9451652765274048, 0.9805532693862915], "prob_old_token": [0.8340222239494324, 0.027880718931555748, 0.0017118100076913834, 0.0005681565380655229, 8.7774868006818e-05, 7.825246575521305e-05, 2.563883208495099e-05, 6.491803560493281e-06, 1.647421186135034e-06], "l1-model.layers.8.mlp.down_proj.weight": [75267.4765625], "l2-model.layers.8.mlp.down_proj.weight": [12.481497764587402], "linf-model.layers.8.mlp.down_proj.weight": [0.004001243971288204], "request": {"prompt": "{} was originally aired on", "subject": "Rugrats", "target_new": {"str": "the Sci-Fi Channel"}, "old_answer": {"str": "Nickelodeon"}, "seed": 42}}, {"loss_per_step": [3.351, 1.303, 0.43, 0.009], "prob_new": [0.20045112073421478, 0.37123507261276245, 0.6845506429672241, 0.9912039637565613], "prob_old": [0.9521257877349854, 0.6880906820297241, 0.7353881597518921, 0.7442830801010132], "prob_new_token": [0.02764512225985527, 0.17379671335220337, 0.6727493405342102, 0.9793702363967896], "prob_old_token": [0.8340222239494324, 0.012218096293509007, 4.932438969262876e-06, 3.5026417322114867e-07], "l1-model.layers.8.mlp.down_proj.weight": [41606.0703125], "l2-model.layers.8.mlp.down_proj.weight": [6.854583740234375], "linf-model.layers.8.mlp.down_proj.weight": [0.0015024757012724876], "request": {"prompt": "{} was originally aired on", "subject": "Rugrats", "target_new": {"str": "the USA Network"}, "old_answer": {"str": "Nickelodeon"}, "seed": 42}}, {"loss_per_step": [4.551, 1.267, 0.619, 0.16, 0.013, 0.005], "prob_new": [0.03765115141868591, 0.45249027013778687, 0.6073175668716431, 0.8692032098770142, 0.9866739511489868, 0.9945411682128906], "prob_old": [0.9521257877349854, 0.595029354095459, 0.6450008153915405, 0.6178506016731262, 0.6691548824310303, 0.6751218438148499], "prob_new_token": [0.02764512225985527, 0.0764414370059967, 0.29680177569389343, 0.6372982859611511, 0.971331775188446, 0.9909500479698181], "prob_old_token": [0.8340222239494324, 0.03719699755311012, 0.001202101120725274, 0.00017842231318354607, 5.0522744459158275e-06, 1.074083911589696e-06], "l1-model.layers.8.mlp.down_proj.weight": [58307.89453125], "l2-model.layers.8.mlp.down_proj.weight": [9.535449028015137], "linf-model.layers.8.mlp.down_proj.weight": [0.0025043468922376633], "request": {"prompt": "{} was originally aired on", "subject": "Rugrats", "target_new": {"str": "the CW"}, "old_answer": {"str": "Nickelodeon"}, "seed": 42}}, {"loss_per_step": [11.977, 3.484, 0.018, 0.007], "prob_new": [6.290205874392996e-06, 0.030696362257003784, 0.9822158813476562, 0.992974042892456], "prob_old": [0.7823527455329895, 0.02638901211321354, 4.961243030265905e-05, 7.491156338801375e-06], "prob_new_token": [6.290205874392996e-06, 0.030696362257003784, 0.9822158813476562, 0.992974042892456], "prob_old_token": [0.7823527455329895, 0.02638901211321354, 4.961243030265905e-05, 7.491156338801375e-06], "l1-model.layers.8.mlp.down_proj.weight": [38882.3984375], "l2-model.layers.8.mlp.down_proj.weight": [6.661786079406738], "linf-model.layers.8.mlp.down_proj.weight": [0.0015021837316453457], "request": {"prompt": "{} has originated in the country named", "subject": "Shar Pei", "target_new": {"str": "Italy"}, "old_answer": {"str": "China"}, "seed": 42}}, {"loss_per_step": [11.487, 2.521, 0.005], "prob_new": [1.0261817806167528e-05, 0.08035949617624283, 0.9952123165130615], "prob_old": [0.7823527455329895, 0.016510246321558952, 1.6615831555100158e-05], "prob_new_token": [1.0261817806167528e-05, 0.08035949617624283, 0.9952123165130615], "prob_old_token": [0.7823527455329895, 0.016510246321558952, 1.6615831555100158e-05], "l1-model.layers.8.mlp.down_proj.weight": [32473.04296875], "l2-model.layers.8.mlp.down_proj.weight": [5.170786380767822], "linf-model.layers.8.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} has originated in the country named", "subject": "Shar Pei", "target_new": {"str": "Spain"}, "old_answer": {"str": "China"}, "seed": 42}}, {"loss_per_step": [9.955, 1.887, 0.007], "prob_new": [4.750975494971499e-05, 0.15153849124908447, 0.9929503798484802], "prob_old": [0.7823527455329895, 0.02289818599820137, 0.00034354827948845923], "prob_new_token": [4.750975494971499e-05, 0.15153849124908447, 0.9929503798484802], "prob_old_token": [0.7823527455329895, 0.02289818599820137, 0.00034354827948845923], "l1-model.layers.8.mlp.down_proj.weight": [33400.421875], "l2-model.layers.8.mlp.down_proj.weight": [5.265053749084473], "linf-model.layers.8.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} has originated in the country named", "subject": "Shar Pei", "target_new": {"str": "Japan"}, "old_answer": {"str": "China"}, "seed": 42}}, {"loss_per_step": [5.523, 2.97, 0.992, 0.075, 0.01, 0.003], "prob_new": [0.4938949942588806, 0.48765262961387634, 0.5678625702857971, 0.930371105670929, 0.9896678328514099, 0.9970889091491699], "prob_old": [0.9293187856674194, 0.5987218618392944, 0.6423054933547974, 0.6782876253128052, 0.6423202753067017, 0.593364953994751], "prob_new_token": [1.6136593330884352e-05, 0.0027072462253272533, 0.1377657800912857, 0.8613402247428894, 0.9797518849372864, 0.9945305585861206], "prob_old_token": [0.7632028460502625, 0.014254321344196796, 0.0011101593263447285, 0.00020748400129377842, 2.3324659196077846e-05, 4.177607934252592e-06], "l1-model.layers.8.mlp.down_proj.weight": [54408.9453125], "l2-model.layers.8.mlp.down_proj.weight": [9.313278198242188], "linf-model.layers.8.mlp.down_proj.weight": [0.0024813967756927013], "request": {"prompt": "{} was founded in the location of", "subject": "China Southern Airlines", "target_new": {"str": "Dubai"}, "old_answer": {"str": "Guangzhou"}, "seed": 42}}, {"loss_per_step": [9.34, 6.61, 4.747, 2.335, 0.956, 0.003], "prob_new": [0.0007351022795774043, 0.003899636445567012, 0.009444218128919601, 0.36716264486312866, 0.5724057555198669, 0.9968581795692444], "prob_old": [0.9293187856674194, 0.619197428226471, 0.6623148918151855, 0.6701401472091675, 0.5988778471946716, 0.5896561145782471], "prob_new_token": [0.0014649422373622656, 0.007559460587799549, 0.005725294351577759, 0.012991957366466522, 0.14847588539123535, 0.9939815402030945], "prob_old_token": [0.7632028460502625, 0.01145396288484335, 0.005541741382330656, 0.00767099903896451, 0.002613584976643324, 1.3081233873890596e-06], "l1-model.layers.8.mlp.down_proj.weight": [57755.375], "l2-model.layers.8.mlp.down_proj.weight": [9.480781555175781], "linf-model.layers.8.mlp.down_proj.weight": [0.0025111306458711624], "request": {"prompt": "{} was founded in the location of", "subject": "China Southern Airlines", "target_new": {"str": "Jena"}, "old_answer": {"str": "Guangzhou"}, "seed": 42}}, {"loss_per_step": [7.946, 5.09, 3.047, 2.486, 0.859, 0.305, 0.092, 0.022, 0.007], "prob_new": [0.4137546718120575, 0.3102516829967499, 0.4549996554851532, 0.46463853120803833, 0.5828822255134583, 0.7702261209487915, 0.9154914617538452, 0.9788501262664795, 0.9925416111946106], "prob_old": [0.9293187856674194, 0.598585307598114, 0.624427318572998, 0.6408535242080688, 0.6039406657218933, 0.6399980187416077, 0.6427950859069824, 0.6256118416786194, 0.6097288131713867], "prob_new_token": [1.5147047349728382e-07, 6.114619463915005e-05, 0.002488242695108056, 0.007515877019613981, 0.18241411447525024, 0.5473620295524597, 0.8360211849212646, 0.9622332453727722, 0.9896605610847473], "prob_old_token": [0.7632028460502625, 0.007146244402974844, 0.0001093729879357852, 0.0037473561242222786, 7.269369962159544e-05, 4.435509617906064e-05, 2.5803818061831407e-05, 8.7271419033641e-06, 2.806929160215077e-06], "l1-model.layers.8.mlp.down_proj.weight": [70817.84375], "l2-model.layers.8.mlp.down_proj.weight": [12.10671329498291], "linf-model.layers.8.mlp.down_proj.weight": [0.003972429782152176], "request": {"prompt": "{} was founded in the location of", "subject": "China Southern Airlines", "target_new": {"str": "Bremen"}, "old_answer": {"str": "Guangzhou"}, "seed": 42}}, {"loss_per_step": [3.487, 1.935, 1.216, 0.618, 0.01, 0.004], "prob_new": [0.42314139008522034, 0.5372816920280457, 0.7696192860603333, 0.8082023859024048, 0.9896694421768188, 0.9955853819847107], "prob_old": [0.8802522420883179, 0.2835536003112793, 0.26096850633621216, 0.25324583053588867, 0.24751390516757965, 0.2446347177028656], "prob_new_token": [6.021196440997301e-06, 0.0005335400346666574, 0.002711588516831398, 0.045701898634433746, 0.9806747436523438, 0.9934860467910767], "prob_old_token": [0.6327256560325623, 0.005641535855829716, 0.012446500360965729, 0.0051793442107737064, 3.0283986234280746e-06, 2.6323365887037653e-07], "l1-model.layers.8.mlp.down_proj.weight": [57978.76953125], "l2-model.layers.8.mlp.down_proj.weight": [9.513259887695312], "linf-model.layers.8.mlp.down_proj.weight": [0.002510547637939453], "request": {"prompt": "{} is lead by the person named", "subject": "Dyson", "target_new": {"str": "Judd Apatow"}, "old_answer": {"str": "James Dyson"}, "seed": 42}}, {"loss_per_step": [7.033, 4.837, 4.055, 2.774, 1.479, 1.091, 0.752, 0.078, 0.02, 0.002], "prob_new": [0.2846667468547821, 0.24984042346477509, 0.330268919467926, 0.347586989402771, 0.6040459871292114, 0.678303599357605, 0.7010865211486816, 0.9299182295799255, 0.9804869890213013, 0.9976820945739746], "prob_old": [0.8802522420883179, 0.31614914536476135, 0.28349074721336365, 0.26488959789276123, 0.2622154653072357, 0.26867011189460754, 0.2864863872528076, 0.29657766222953796, 0.2854672074317932, 0.2934419810771942], "prob_new_token": [0.00011093316425103694, 0.0016125149559229612, 0.002876025391742587, 0.006517768371850252, 0.014820444397628307, 0.03798868879675865, 0.1051173061132431, 0.7980948090553284, 0.9898504614830017, 0.9957132339477539], "prob_old_token": [0.6327256560325623, 0.004643183667212725, 0.006364602595567703, 0.011684935539960861, 0.017295068129897118, 0.0198331568390131, 0.018131593242287636, 0.001053478685207665, 2.6192262794211274e-06, 1.6595939200669818e-07], "l1-model.layers.8.mlp.down_proj.weight": [82517.3046875], "l2-model.layers.8.mlp.down_proj.weight": [13.543771743774414], "linf-model.layers.8.mlp.down_proj.weight": [0.004505697637796402], "request": {"prompt": "{} is lead by the person named", "subject": "Dyson", "target_new": {"str": "George Friedman"}, "old_answer": {"str": "James Dyson"}, "seed": 42}}, {"loss_per_step": [6.315, 5.103, 4.068, 3.321, 1.547, 0.631, 0.022, 0.041, 0.003], "prob_new": [0.1324782520532608, 0.06221509724855423, 0.2994188368320465, 0.32023242115974426, 0.5415666103363037, 0.7055012583732605, 0.9780032634735107, 0.959976077079773, 0.9968261122703552], "prob_old": [0.8802522420883179, 0.3377532362937927, 0.24989598989486694, 0.25509244203567505, 0.2555001676082611, 0.25302577018737793, 0.254687637090683, 0.2531086802482605, 0.2537480294704437], "prob_new_token": [0.00022606723359785974, 0.0013587753055617213, 0.0030863797292113304, 0.006573409773409367, 0.01567274145781994, 0.15680113434791565, 0.9584957361221313, 0.9731851816177368, 0.9918884634971619], "prob_old_token": [0.6327256560325623, 0.008267764933407307, 0.011827963404357433, 0.01199590414762497, 0.01277068629860878, 0.004848121199756861, 1.5392807881653425e-06, 1.7406257768470823e-08, 6.622340009698746e-09], "l1-model.layers.8.mlp.down_proj.weight": [73809.671875], "l2-model.layers.8.mlp.down_proj.weight": [12.509765625], "linf-model.layers.8.mlp.down_proj.weight": [0.003974530845880508], "request": {"prompt": "{} is lead by the person named", "subject": "Dyson", "target_new": {"str": "Marc Mayer"}, "old_answer": {"str": "James Dyson"}, "seed": 42}}, {"loss_per_step": [3.563, 2.163, 0.96, 0.237, 0.06, 0.013, 0.005], "prob_new": [0.5048718452453613, 0.7295875549316406, 0.7470669746398926, 0.8451651930809021, 0.9464364051818848, 0.9869458079338074, 0.9954763054847717], "prob_old": [0.714084267616272, 0.4796644449234009, 0.4096356928348541, 0.31435027718544006, 0.26227006316185, 0.2521952688694, 0.2501344084739685], "prob_new_token": [4.025532234663842e-06, 0.00019011751282960176, 0.022204823791980743, 0.39251160621643066, 0.7908480763435364, 0.9519301056861877, 0.9864377975463867], "prob_old_token": [0.6126298904418945, 0.004127616994082928, 0.01017786841839552, 0.005631242413073778, 0.000859027961269021, 8.678924496052787e-05, 1.2539512681541964e-05], "l1-model.layers.8.mlp.down_proj.weight": [58316.640625], "l2-model.layers.8.mlp.down_proj.weight": [10.162476539611816], "linf-model.layers.8.mlp.down_proj.weight": [0.002987809479236603], "request": {"prompt": "{} is lead by the person named", "subject": "Charles Schwab Corporation", "target_new": {"str": "Masayoshi Son"}, "old_answer": {"str": "Charles R Schwab"}, "seed": 42}}, {"loss_per_step": [5.602, 3.513, 2.242, 0.993, 0.416, 0.011, 0.008], "prob_new": [0.20161043107509613, 0.22496703267097473, 0.4186674654483795, 0.73541659116745, 0.8027554750442505, 0.9893484115600586, 0.991773784160614], "prob_old": [0.714084267616272, 0.2496771514415741, 0.2588079869747162, 0.30613982677459717, 0.23718096315860748, 0.2301536500453949, 0.22425338625907898], "prob_new_token": [9.207190487359185e-06, 0.0006533392588607967, 0.0018010035855695605, 0.010048137046396732, 0.14249052107334137, 0.9919569492340088, 0.9974543452262878], "prob_old_token": [0.6126298904418945, 0.005877749063074589, 0.042917776852846146, 0.26053231954574585, 0.0029752885457128286, 2.331690893697669e-06, 2.8591188083737507e-07], "l1-model.layers.8.mlp.down_proj.weight": [64728.671875], "l2-model.layers.8.mlp.down_proj.weight": [10.693855285644531], "linf-model.layers.8.mlp.down_proj.weight": [0.0029840562492609024], "request": {"prompt": "{} is lead by the person named", "subject": "Charles Schwab Corporation", "target_new": {"str": "Riccardo Muti"}, "old_answer": {"str": "Charles R Schwab"}, "seed": 42}}, {"loss_per_step": [4.487, 2.569, 1.695, 0.944, 0.241, 0.003], "prob_new": [0.5412984490394592, 0.5543131232261658, 0.79879230260849, 0.8001270294189453, 0.8588180541992188, 0.9966245889663696], "prob_old": [0.714084267616272, 0.2654353082180023, 0.26227912306785583, 0.24690723419189453, 0.22570617496967316, 0.17340560257434845], "prob_new_token": [4.529347563675401e-08, 0.00013893027789890766, 0.00021003889560233802, 0.009011155925691128, 0.3016617000102997, 0.9905588030815125], "prob_old_token": [0.6126298904418945, 0.0038775610737502575, 0.019311299547553062, 0.006844912189990282, 0.0034875725395977497, 3.189523340552114e-05], "l1-model.layers.8.mlp.down_proj.weight": [51147.859375], "l2-model.layers.8.mlp.down_proj.weight": [8.936408996582031], "linf-model.layers.8.mlp.down_proj.weight": [0.0025109294801950455], "request": {"prompt": "{} is lead by the person named", "subject": "Charles Schwab Corporation", "target_new": {"str": "Giorgio Armani"}, "old_answer": {"str": "Charles R Schwab"}, "seed": 42}}, {"loss_per_step": [3.441, 2.59, 0.582, 0.003], "prob_new": [0.47477248311042786, 0.4334278404712677, 0.6527214050292969, 0.9967180490493774], "prob_old": [0.9123725891113281, 0.7274577617645264, 0.6710349917411804, 0.7026680707931519], "prob_new_token": [0.0010821707546710968, 0.006546749267727137, 0.3154154419898987, 0.9989356994628906], "prob_old_token": [0.6529882550239563, 0.0002835145278368145, 0.0001819472963688895, 1.9918715565836465e-09], "l1-model.layers.8.mlp.down_proj.weight": [40485.015625], "l2-model.layers.8.mlp.down_proj.weight": [6.743155002593994], "linf-model.layers.8.mlp.down_proj.weight": [0.001502476749010384], "request": {"prompt": "{} holds a position at", "subject": "Darleane C. Hoffman", "target_new": {"str": "Columbia University"}, "old_answer": {"str": "Lawrence Berkeley National Laboratory"}, "seed": 42}}, {"loss_per_step": [1.192, 1.146, 0.588, 0.058, 0.003], "prob_new": [0.7895854115486145, 0.7368947863578796, 0.8359387516975403, 0.9519093036651611, 0.9969351887702942], "prob_old": [0.9123725891113281, 0.6527763605117798, 0.6581327319145203, 0.6608036756515503, 0.6616002917289734], "prob_new_token": [0.00041883750236593187, 0.001393563812598586, 0.019490815699100494, 0.6754472255706787, 0.9824322462081909], "prob_old_token": [0.6529882550239563, 0.00046054820995777845, 0.001261823228560388, 0.0001270204083994031, 5.45547572983196e-06], "l1-model.layers.8.mlp.down_proj.weight": [49024.09375], "l2-model.layers.8.mlp.down_proj.weight": [8.203836441040039], "linf-model.layers.8.mlp.down_proj.weight": [0.0020057111978530884], "request": {"prompt": "{} holds a position at", "subject": "Darleane C. Hoffman", "target_new": {"str": "Carnegie Mellon University"}, "old_answer": {"str": "Lawrence Berkeley National Laboratory"}, "seed": 42}}, {"loss_per_step": [4.146, 1.249, 0.152, 0.158, 0.044, 0.033, 0.024, 0.017, 0.012, 0.008], "prob_new": [0.48203378915786743, 0.502095639705658, 0.8721382021903992, 0.8769300580024719, 0.9576812982559204, 0.9680415987968445, 0.9766880869865417, 0.9834842681884766, 0.9883328676223755, 0.9916539192199707], "prob_old": [0.9123725891113281, 0.6770569682121277, 0.7514238357543945, 0.7655825614929199, 0.7713481783866882, 0.7721630930900574, 0.7721201777458191, 0.7706233263015747, 0.7672233581542969, 0.761989951133728], "prob_new_token": [0.20117510855197906, 0.047860708087682724, 0.6144838333129883, 0.5225231051445007, 0.8931447863578796, 0.9188135862350464, 0.9418854713439941, 0.9595770239830017, 0.9716967940330505, 0.9798160791397095], "prob_old_token": [0.6529882550239563, 0.0004024135123472661, 0.00021355232456699014, 0.00023443512327503413, 6.106898217694834e-05, 4.552340760710649e-05, 2.706066516111605e-05, 1.486945529904915e-05, 8.272200830106158e-06, 4.774764875037363e-06], "l1-model.layers.8.mlp.down_proj.weight": [75558.3828125], "l2-model.layers.8.mlp.down_proj.weight": [12.802579879760742], "linf-model.layers.8.mlp.down_proj.weight": [0.004446391947567463], "request": {"prompt": "{} holds a position at", "subject": "Darleane C. Hoffman", "target_new": {"str": "the University of Cape Town"}, "old_answer": {"str": "Lawrence Berkeley National Laboratory"}, "seed": 42}}, {"loss_per_step": [2.123, 0.079, 0.007], "prob_new": [0.6134995818138123, 0.926469087600708, 0.9927964210510254], "prob_old": [0.8484284281730652, 0.4564078450202942, 0.39647576212882996], "prob_new_token": [0.0020436712075024843, 0.8489261269569397, 0.9996904134750366], "prob_old_token": [0.7124742865562439, 0.011390592902898788, 1.870170126494486e-05], "l1-model.layers.8.mlp.down_proj.weight": [36305.5390625], "l2-model.layers.8.mlp.down_proj.weight": [5.50684118270874], "linf-model.layers.8.mlp.down_proj.weight": [0.0010006772354245186], "request": {"prompt": "{} holds a position at", "subject": "J. M. Coetzee", "target_new": {"str": "Stanford University"}, "old_answer": {"str": "the University of Cape Town"}, "seed": 42}}, {"loss_per_step": [1.439, 0.412, 0.012, 0.003], "prob_new": [0.7682777643203735, 0.8479679822921753, 0.9884433746337891, 0.9968501925468445], "prob_old": [0.8484284281730652, 0.47673365473747253, 0.42475882172584534, 0.3969248831272125], "prob_new_token": [9.221502114087343e-05, 0.06389576941728592, 0.9265103936195374, 0.9834738373756409], "prob_old_token": [0.7124742865562439, 0.1061081811785698, 0.0005025911377742887, 7.229237235151231e-05], "l1-model.layers.8.mlp.down_proj.weight": [37513.83984375], "l2-model.layers.8.mlp.down_proj.weight": [6.590428352355957], "linf-model.layers.8.mlp.down_proj.weight": [0.0015022177249193192], "request": {"prompt": "{} holds a position at", "subject": "J. M. Coetzee", "target_new": {"str": "Carnegie Mellon University"}, "old_answer": {"str": "the University of Cape Town"}, "seed": 42}}, {"loss_per_step": [2.52, 0.741, 0.01, 0.002], "prob_new": [0.6281738877296448, 0.8248648047447205, 0.989698588848114, 0.9982410669326782], "prob_old": [0.8484284281730652, 0.48128342628479004, 0.4049528241157532, 0.3975144326686859], "prob_new_token": [6.811330877098953e-06, 0.012531929649412632, 0.9704792499542236, 0.9993012547492981], "prob_old_token": [0.7124742865562439, 0.006026192102581263, 0.00036675334558822215, 6.897594175825361e-07], "l1-model.layers.8.mlp.down_proj.weight": [44971.4765625], "l2-model.layers.8.mlp.down_proj.weight": [7.157344341278076], "linf-model.layers.8.mlp.down_proj.weight": [0.0015023675514385104], "request": {"prompt": "{} holds a position at", "subject": "J. M. Coetzee", "target_new": {"str": "Lawrence Berkeley National Laboratory"}, "old_answer": {"str": "the University of Cape Town"}, "seed": 42}}, {"loss_per_step": [2.688, 2.169, 1.522, 0.726, 0.124, 0.015, 0.015, 0.002], "prob_new": [0.4794250428676605, 0.5517702698707581, 0.7910391092300415, 0.799604594707489, 0.9063743948936462, 0.9854833483695984, 0.9854294657707214, 0.9982753992080688], "prob_old": [0.8382276892662048, 0.3036818504333496, 0.2695551812648773, 0.3132990598678589, 0.32046616077423096, 0.319132000207901, 0.30850809812545776, 0.25695037841796875], "prob_new_token": [2.253292768727988e-05, 0.00018778005323838443, 0.0005191917880438268, 0.02724642865359783, 0.5443580746650696, 0.9358878135681152, 0.9892677068710327, 0.9963684678077698], "prob_old_token": [0.6083126068115234, 0.007812112104147673, 0.007383339107036591, 0.004625498782843351, 0.0038050978910177946, 0.000355948053766042, 2.9999680918990634e-05, 6.633948942180723e-06], "l1-model.layers.8.mlp.down_proj.weight": [63958.1171875], "l2-model.layers.8.mlp.down_proj.weight": [11.164231300354004], "linf-model.layers.8.mlp.down_proj.weight": [0.003430737182497978], "request": {"prompt": "{} is held by", "subject": "Minister of Foreign Affairs of Belarus", "target_new": {"str": "Idriss D\u00e9by"}, "old_answer": {"str": "Vladimir Makei"}, "seed": 42}}, {"loss_per_step": [5.808, 2.949, 2.003, 0.991, 0.361, 0.012, 0.01, 0.007], "prob_new": [0.4812857508659363, 0.6844682693481445, 0.7328404188156128, 0.7330977320671082, 0.8064611554145813, 0.988341212272644, 0.9898660778999329, 0.9932408332824707], "prob_old": [0.8382276892662048, 0.31196558475494385, 0.1981787383556366, 0.0697811096906662, 0.04879028722643852, 0.022528765723109245, 0.01029515452682972, 0.006619487889111042], "prob_new_token": [9.099828446323954e-08, 1.015105954138562e-05, 0.0003553605929482728, 0.020759038627147675, 0.23891405761241913, 0.9578186273574829, 0.9652265310287476, 0.980833888053894], "prob_old_token": [0.6083126068115234, 0.003607125487178564, 0.0015490850200876594, 0.0019171409076079726, 0.00015018433623481542, 1.5613299183314666e-05, 2.314386392754386e-06, 5.205033062338771e-07], "l1-model.layers.8.mlp.down_proj.weight": [68439.9609375], "l2-model.layers.8.mlp.down_proj.weight": [11.390458106994629], "linf-model.layers.8.mlp.down_proj.weight": [0.0034507184755057096], "request": {"prompt": "{} is held by", "subject": "Minister of Foreign Affairs of Belarus", "target_new": {"str": "Gaston Browne"}, "old_answer": {"str": "Vladimir Makei"}, "seed": 42}}, {"loss_per_step": [5.622, 3.772, 2.096, 0.042, 0.01, 0.008], "prob_new": [0.4701083302497864, 0.43828046321868896, 0.70536869764328, 0.9596598148345947, 0.9898054003715515, 0.9922065734863281], "prob_old": [0.9186565279960632, 0.350236713886261, 0.49103546142578125, 0.5819649696350098, 0.5880728960037231, 0.589652955532074], "prob_new_token": [4.263490609446308e-06, 2.1244991330604535e-06, 0.00027607669471763074, 0.8969568610191345, 0.9987342357635498, 0.9995678067207336], "prob_old_token": [0.6722553372383118, 1.7195312466355972e-05, 7.439093224093085e-06, 8.043320463002601e-07, 5.997752960951175e-09, 4.562081545866903e-10], "l1-model.layers.8.mlp.down_proj.weight": [54829.0390625], "l2-model.layers.8.mlp.down_proj.weight": [9.286591529846191], "linf-model.layers.8.mlp.down_proj.weight": [0.0024944734759628773], "request": {"prompt": "{} is held by", "subject": "list of heads of state of Chad", "target_new": {"str": "Gaston Browne"}, "old_answer": {"str": "Idriss D\u00e9by"}, "seed": 42}}, {"loss_per_step": [7.107, 5.881, 2.15, 0.033, 0.01, 0.009], "prob_new": [0.15828540921211243, 0.15245303511619568, 0.6204241514205933, 0.9678109884262085, 0.990103006362915, 0.9913532137870789], "prob_old": [0.9186565279960632, 0.3412189781665802, 0.3299374580383301, 0.3466018736362457, 0.3587016761302948, 0.3694287836551666], "prob_new_token": [4.95036510983482e-06, 5.063639036961831e-06, 0.0018404981819912791, 0.9407110810279846, 0.9976242184638977, 0.9968568086624146], "prob_old_token": [0.6722553372383118, 1.609941136848647e-05, 0.0002610996307339519, 5.752099241362885e-06, 1.937845794941495e-08, 2.2315820302765133e-09], "l1-model.layers.8.mlp.down_proj.weight": [57422.546875], "l2-model.layers.8.mlp.down_proj.weight": [9.505334854125977], "linf-model.layers.8.mlp.down_proj.weight": [0.0024863332509994507], "request": {"prompt": "{} is held by", "subject": "list of heads of state of Chad", "target_new": {"str": "Vladimir Makei"}, "old_answer": {"str": "Idriss D\u00e9by"}, "seed": 42}}, {"loss_per_step": [3.05, 0.341, 0.01], "prob_new": [0.46842142939567566, 0.7815732955932617, 0.9905136227607727], "prob_old": [0.8401201963424683, 0.6858717203140259, 0.7348592281341553], "prob_new_token": [4.251266091159778e-06, 0.2680739164352417, 0.9926510453224182], "prob_old_token": [0.8187586665153503, 5.874963608221151e-05, 2.099455542747819e-08], "l1-model.layers.8.mlp.down_proj.weight": [35293.8671875], "l2-model.layers.8.mlp.down_proj.weight": [5.426976680755615], "linf-model.layers.8.mlp.down_proj.weight": [0.0010006800293922424], "request": {"prompt": "{} is represented by the music label", "subject": "Godspeed You! Black Emperor", "target_new": {"str": "Warner Bros. Records"}, "old_answer": {"str": "Constellation Records"}, "seed": 42}}, {"loss_per_step": [2.417, 1.065, 0.089, 0.84, 0.016, 0.004], "prob_new": [0.5253622531890869, 0.789507269859314, 0.9279550313949585, 0.8148339986801147, 0.9841530323028564, 0.9958881735801697], "prob_old": [0.8401201963424683, 0.5078836679458618, 0.5768064260482788, 0.5384423732757568, 0.5235537886619568, 0.48512035608291626], "prob_new_token": [0.00048019958194345236, 0.0022589494474232197, 0.6175675392150879, 0.0073187341913580894, 0.9989844560623169, 0.9996633529663086], "prob_old_token": [0.8187586665153503, 2.294706428074278e-05, 4.26476987058777e-07, 1.3431050319923088e-05, 3.465543851888242e-09, 1.6112249312527638e-09], "l1-model.layers.8.mlp.down_proj.weight": [49435.5625], "l2-model.layers.8.mlp.down_proj.weight": [8.54555606842041], "linf-model.layers.8.mlp.down_proj.weight": [0.0025058372411876917], "request": {"prompt": "{} is represented by the music label", "subject": "Godspeed You! Black Emperor", "target_new": {"str": "Konvict Muzik"}, "old_answer": {"str": "Constellation Records"}, "seed": 42}}, {"loss_per_step": [5.813, 2.859, 0.686, 0.015, 0.008], "prob_new": [0.14490577578544617, 0.6587562561035156, 0.7063983678817749, 0.9854394197463989, 0.9924203753471375], "prob_old": [0.8401201963424683, 0.5682572722434998, 0.5961029529571533, 0.5796945095062256, 0.5808911919593811], "prob_new_token": [6.53521738058771e-06, 0.00019304899615235627, 0.12895528972148895, 0.9602498412132263, 0.9824346899986267], "prob_old_token": [0.8187586665153503, 5.937623427598737e-06, 2.6331277695135213e-05, 2.0944550271906337e-07, 1.0945964135089525e-07], "l1-model.layers.8.mlp.down_proj.weight": [49420.9921875], "l2-model.layers.8.mlp.down_proj.weight": [8.225211143493652], "linf-model.layers.8.mlp.down_proj.weight": [0.0020005963742733], "request": {"prompt": "{} is represented by the music label", "subject": "Godspeed You! Black Emperor", "target_new": {"str": "Armada Music"}, "old_answer": {"str": "Constellation Records"}, "seed": 42}}, {"loss_per_step": [6.934, 3.809, 2.708, 1.809, 0.558, 0.002], "prob_new": [0.2340732216835022, 0.5346924066543579, 0.6577890515327454, 0.6647614240646362, 0.7287439107894897, 0.997825026512146], "prob_old": [0.9576637148857117, 0.6151641011238098, 0.7456942200660706, 0.7356805801391602, 0.6925495862960815, 0.6039591431617737], "prob_new_token": [1.0154884222401961e-07, 1.7575373931322247e-05, 0.00030433875508606434, 0.004441909957677126, 0.18793876469135284, 0.9935588836669922], "prob_old_token": [0.8164881467819214, 0.0003013504028785974, 3.550868132151663e-05, 3.588051185943186e-05, 2.935694737971062e-06, 4.173649159611159e-08], "l1-model.layers.8.mlp.down_proj.weight": [56501.43359375], "l2-model.layers.8.mlp.down_proj.weight": [9.429981231689453], "linf-model.layers.8.mlp.down_proj.weight": [0.002511131577193737], "request": {"prompt": "{} is represented by the music label", "subject": "Anne-Sophie Mutter", "target_new": {"str": "Peaceville Records"}, "old_answer": {"str": "Deutsche Grammophon"}, "seed": 42}}, {"loss_per_step": [4.19, 2.873, 0.301, 0.0], "prob_new": [0.34920310974121094, 0.3712496757507324, 0.8222492933273315, 0.999848484992981], "prob_old": [0.9576637148857117, 0.578553318977356, 0.659044086933136, 0.6146470308303833], "prob_new_token": [4.1410003177588806e-06, 0.0003766703885048628, 0.3042295277118683, 0.9999036192893982], "prob_old_token": [0.8164881467819214, 2.7643511202768423e-05, 1.2920458175358362e-05, 7.677455811936143e-15], "l1-model.layers.8.mlp.down_proj.weight": [37983.73046875], "l2-model.layers.8.mlp.down_proj.weight": [6.486286640167236], "linf-model.layers.8.mlp.down_proj.weight": [0.0015024784952402115], "request": {"prompt": "{} is represented by the music label", "subject": "Anne-Sophie Mutter", "target_new": {"str": "XL Recordings"}, "old_answer": {"str": "Deutsche Grammophon"}, "seed": 42}}, {"loss_per_step": [8.684, 3.594, 0.082, 0.002], "prob_new": [0.005541480612009764, 0.47892215847969055, 0.9242689609527588, 0.9975675344467163], "prob_old": [0.9576637148857117, 0.6898309588432312, 0.7458445429801941, 0.6949333548545837], "prob_new_token": [2.58570617006626e-06, 0.0007896582246758044, 0.8499641418457031, 0.9964385032653809], "prob_old_token": [0.8164881467819214, 9.288386354455724e-05, 2.580816044428502e-06, 1.2713666786012823e-09], "l1-model.layers.8.mlp.down_proj.weight": [42417.71484375], "l2-model.layers.8.mlp.down_proj.weight": [6.926408767700195], "linf-model.layers.8.mlp.down_proj.weight": [0.0015024757012724876], "request": {"prompt": "{} is represented by the music label", "subject": "Anne-Sophie Mutter", "target_new": {"str": "Domino"}, "old_answer": {"str": "Deutsche Grammophon"}, "seed": 42}}, {"loss_per_step": [3.415, 2.656, 2.903, 2.099, 1.29, 0.759, 0.435, 0.138, 0.047, 0.026, 0.018, 0.014, 0.012, 0.01, 0.009], "prob_new": [0.46457263827323914, 0.34655532240867615, 0.24364922940731049, 0.4648676812648773, 0.6176841855049133, 0.7664163708686829, 0.7950099110603333, 0.8902134299278259, 0.9558472037315369, 0.9744070172309875, 0.9821240305900574, 0.9860821962356567, 0.9884204864501953, 0.9899585843086243, 0.9910581707954407], "prob_old": [0.9080218076705933, 0.005676801782101393, 0.004661211743950844, 0.020219121128320694, 0.07541161775588989, 0.06523261219263077, 0.041836004704236984, 0.022804951295256615, 0.012109760195016861, 0.006931112613528967, 0.004402202554047108, 0.003032378852367401, 0.0022091339342296124, 0.0016745834145694971, 0.0013082189252600074], "prob_new_token": [1.5300216546165757e-05, 0.0008195702685043216, 0.0016310373321175575, 0.001980624860152602, 0.008763393387198448, 0.027671797201037407, 0.1341623216867447, 0.5601747632026672, 0.8555045127868652, 0.9293493628501892, 0.9570964574813843, 0.9703736901283264, 0.9778175354003906, 0.9824904799461365, 0.9856807589530945], "prob_old_token": [0.7662683725357056, 8.804582466837019e-05, 0.00016418595623690635, 5.0158341764472425e-05, 0.00010617447696859017, 7.292436202988029e-05, 7.693443330936134e-05, 1.317203714279458e-05, 1.2841367151850136e-06, 3.1639385156267963e-07, 1.4663685021787387e-07, 8.786358307588671e-08, 5.819559234510052e-08, 4.043210921622631e-08, 2.8913992977663838e-08], "l1-model.layers.8.mlp.down_proj.weight": [83496.6484375], "l2-model.layers.8.mlp.down_proj.weight": [14.604440689086914], "linf-model.layers.8.mlp.down_proj.weight": [0.006300473585724831], "request": {"prompt": "The person who serves as the head coach for {} is", "subject": "Spain women's national association football team", "target_new": {"str": "Michael O'Neill"}, "old_answer": {"str": "Jorge Vilda"}, "seed": 42}}, {"loss_per_step": [2.651, 3.361, 2.395, 1.486, 0.471, 0.926, 0.001], "prob_new": [0.7096829414367676, 0.7234190702438354, 0.7006150484085083, 0.7480149269104004, 0.7867285013198853, 0.7555341720581055, 0.9986269474029541], "prob_old": [0.9080218076705933, 0.03373824432492256, 0.004126700572669506, 0.021784286946058273, 0.022532161325216293, 0.026462901383638382, 0.09782683849334717], "prob_new_token": [2.9521990654757246e-05, 1.6140432990141562e-06, 8.560151763958856e-05, 0.0026527789887040854, 0.1528032273054123, 0.024722442030906677, 0.9975836277008057], "prob_old_token": [0.7662683725357056, 7.638327952008694e-05, 0.0014741112245246768, 0.0005205629277043045, 6.349269824568182e-06, 4.718741365650203e-06, 2.8519906436486053e-08], "l1-model.layers.8.mlp.down_proj.weight": [56000.79296875], "l2-model.layers.8.mlp.down_proj.weight": [9.798495292663574], "linf-model.layers.8.mlp.down_proj.weight": [0.003018103539943695], "request": {"prompt": "The person who serves as the head coach for {} is", "subject": "Spain women's national association football team", "target_new": {"str": "Pia Sundhage"}, "old_answer": {"str": "Jorge Vilda"}, "seed": 42}}, {"loss_per_step": [5.4, 3.125, 1.796, 0.252, 0.069, 0.024, 0.011, 0.006], "prob_new": [0.25741130113601685, 0.6205238103866577, 0.6537176966667175, 0.813694417476654, 0.9343156218528748, 0.9768230319023132, 0.989249587059021, 0.9935563802719116], "prob_old": [0.9080218076705933, 0.04154236614704132, 0.012905163690447807, 0.004543520510196686, 0.001213724259287119, 0.0006907099741511047, 0.000402775127440691, 0.00026369799161329865], "prob_new_token": [2.869437594199553e-06, 9.802485874388367e-05, 0.004782047588378191, 0.4958442151546478, 0.8949935436248779, 0.9439725279808044, 0.972178041934967, 0.983670711517334], "prob_old_token": [0.7662683725357056, 0.00016776227857917547, 3.546441803337075e-05, 2.6966017685481347e-05, 2.4922499051172053e-06, 7.826529895282874e-07, 2.386651658525807e-07, 8.35706828183902e-08], "l1-model.layers.8.mlp.down_proj.weight": [67287.1015625], "l2-model.layers.8.mlp.down_proj.weight": [11.35707950592041], "linf-model.layers.8.mlp.down_proj.weight": [0.0034691346809267998], "request": {"prompt": "The person who serves as the head coach for {} is", "subject": "Spain women's national association football team", "target_new": {"str": "Eddie Jones"}, "old_answer": {"str": "Jorge Vilda"}, "seed": 42}}, {"loss_per_step": [7.073, 3.542, 1.227, 0.062, 0.021, 0.017, 0.015, 0.011, 0.007], "prob_new": [0.2787639796733856, 0.2593880891799927, 0.46726030111312866, 0.9404935240745544, 0.979825496673584, 0.9834081530570984, 0.9856008291244507, 0.989484965801239, 0.9933397173881531], "prob_old": [0.8151693344116211, 0.015844466164708138, 0.11606379598379135, 0.14581526815891266, 0.07512828707695007, 0.04121140018105507, 0.02675754576921463, 0.01792619749903679, 0.011992570012807846], "prob_new_token": [2.648082272571628e-06, 0.0036834843922406435, 0.07158204913139343, 0.8922064900398254, 0.9503026008605957, 0.9550158381462097, 0.9602550864219666, 0.9715267419815063, 0.9830148816108704], "prob_old_token": [0.6482585668563843, 0.013027790002524853, 0.030360346660017967, 0.010400079190731049, 0.002371288137510419, 0.0010424958309158683, 0.0005478032398968935, 0.000279719679383561, 0.00013661562115885317], "l1-model.layers.8.mlp.down_proj.weight": [67786.1640625], "l2-model.layers.8.mlp.down_proj.weight": [11.627784729003906], "linf-model.layers.8.mlp.down_proj.weight": [0.003961838781833649], "request": {"prompt": "The person who serves as the head coach for {} is", "subject": "Huddersfield Town A.F.C.", "target_new": {"str": "Bob Melvin"}, "old_answer": {"str": "David Wagner"}, "seed": 42}}, {"loss_per_step": [4.296, 2.75, 1.042, 0.003], "prob_new": [0.28204405307769775, 0.2583200931549072, 0.5621196627616882, 0.9966548681259155], "prob_old": [0.8151693344116211, 0.01711004227399826, 0.04205198585987091, 0.006017930340021849], "prob_new_token": [0.000329130474710837, 0.00802898220717907, 0.1245865523815155, 0.9990093111991882], "prob_old_token": [0.6482585668563843, 0.017293615266680717, 0.0016750264912843704, 8.082282079158176e-07], "l1-model.layers.8.mlp.down_proj.weight": [39434.109375], "l2-model.layers.8.mlp.down_proj.weight": [6.655045986175537], "linf-model.layers.8.mlp.down_proj.weight": [0.0015024803578853607], "request": {"prompt": "The person who serves as the head coach for {} is", "subject": "Huddersfield Town A.F.C.", "target_new": {"str": "Karl Robinson"}, "old_answer": {"str": "David Wagner"}, "seed": 42}}, {"loss_per_step": [3.51, 1.997, 0.299, 0.01], "prob_new": [0.5479843020439148, 0.5289052724838257, 0.8019323348999023, 0.9904422163963318], "prob_old": [0.8151693344116211, 0.47007596492767334, 0.466256707906723, 0.3477652966976166], "prob_new_token": [3.284277681814274e-06, 0.010270037688314915, 0.3103559911251068, 0.9983014464378357], "prob_old_token": [0.6482585668563843, 0.03421615809202194, 0.01289580762386322, 0.00013846902584191412], "l1-model.layers.8.mlp.down_proj.weight": [38873.015625], "l2-model.layers.8.mlp.down_proj.weight": [6.555166244506836], "linf-model.layers.8.mlp.down_proj.weight": [0.0015024784952402115], "request": {"prompt": "The person who serves as the head coach for {} is", "subject": "Huddersfield Town A.F.C.", "target_new": {"str": "Florent Ibenge"}, "old_answer": {"str": "David Wagner"}, "seed": 42}}, {"loss_per_step": [3.54, 2.059, 1.085, 0.067, 0.009], "prob_new": [0.5062932968139648, 0.5686218738555908, 0.7959017157554626, 0.9422439932823181, 0.990939736366272], "prob_old": [0.8161789774894714, 0.4495394825935364, 0.5704588890075684, 0.5964400768280029, 0.5976489186286926], "prob_new_token": [7.655329682165757e-06, 0.00019066657114308327, 0.004510059952735901, 0.7327770590782166, 0.9767279624938965], "prob_old_token": [0.7256129384040833, 0.018431298434734344, 0.0005511459312401712, 0.0011122486321255565, 6.408029730664566e-05], "l1-model.layers.8.mlp.down_proj.weight": [45565.11328125], "l2-model.layers.8.mlp.down_proj.weight": [7.907059669494629], "linf-model.layers.8.mlp.down_proj.weight": [0.002005759160965681], "request": {"prompt": "{} has the job title of", "subject": "Paul Biya", "target_new": {"str": "Sultan of Brunei"}, "old_answer": {"str": "President of Cameroon"}, "seed": 42}}, {"loss_per_step": [5.507, 2.779, 1.264, 0.343, 0.055, 0.014, 0.008], "prob_new": [0.0979776456952095, 0.20639578998088837, 0.457825243473053, 0.7478774785995483, 0.9482677578926086, 0.9863741993904114, 0.9925460815429688], "prob_old": [0.8161789774894714, 0.4807724058628082, 0.5677101016044617, 0.580206036567688, 0.5814580321311951, 0.5857284665107727, 0.5947811007499695], "prob_new_token": [0.00014183954044710845, 0.011454313062131405, 0.11948791146278381, 0.448147177696228, 0.8613765835762024, 0.9707691073417664, 0.9922685027122498], "prob_old_token": [0.7256129384040833, 0.021297655999660492, 0.0019882279448211193, 0.0007612751214765012, 0.00020511445472948253, 4.811468534171581e-05, 1.1272841220488772e-05], "l1-model.layers.8.mlp.down_proj.weight": [58470.0859375], "l2-model.layers.8.mlp.down_proj.weight": [10.18396282196045], "linf-model.layers.8.mlp.down_proj.weight": [0.002987726591527462], "request": {"prompt": "{} has the job title of", "subject": "Paul Biya", "target_new": {"str": "Grand Prince of Kiev"}, "old_answer": {"str": "President of Cameroon"}, "seed": 42}}, {"loss_per_step": [3.907, 1.065, 0.242, 0.118, 0.063, 0.034, 0.019, 0.012, 0.008], "prob_new": [0.2329782247543335, 0.49216198921203613, 0.8149236440658569, 0.9017958641052246, 0.9433831572532654, 0.9677900075912476, 0.9810850024223328, 0.9881021976470947, 0.9919320344924927], "prob_old": [0.8161789774894714, 0.5684953927993774, 0.5642308592796326, 0.5851395130157471, 0.5914469957351685, 0.593070924282074, 0.5931066870689392, 0.5922824740409851, 0.5908424854278564], "prob_new_token": [0.00019359435827936977, 0.11429033428430557, 0.47649094462394714, 0.6536910533905029, 0.7942498326301575, 0.8852294087409973, 0.935485303401947, 0.9617063403129578, 0.9756472706794739], "prob_old_token": [0.7256129384040833, 0.011337990872561932, 0.00630682148039341, 0.002317035337910056, 0.0005538906552828848, 0.00013558362843468785, 4.2146497435169294e-05, 1.646069904381875e-05, 7.643088792974595e-06], "l1-model.layers.8.mlp.down_proj.weight": [76501.90625], "l2-model.layers.8.mlp.down_proj.weight": [12.56068229675293], "linf-model.layers.8.mlp.down_proj.weight": [0.003973618149757385], "request": {"prompt": "{} has the job title of", "subject": "Paul Biya", "target_new": {"str": "King of the French"}, "old_answer": {"str": "President of Cameroon"}, "seed": 42}}, {"loss_per_step": [3.533, 2.433, 0.365, 0.02, 0.012, 0.024, 0.008], "prob_new": [0.28912192583084106, 0.39861518144607544, 0.7464204430580139, 0.9801306128501892, 0.9886215329170227, 0.9767757654190063, 0.9916817545890808], "prob_old": [0.8448086977005005, 0.37203389406204224, 0.5804486870765686, 0.49038368463516235, 0.47378551959991455, 0.4842197000980377, 0.4828342795372009], "prob_new_token": [0.008998566307127476, 0.04059022665023804, 0.30982595682144165, 0.9433779716491699, 0.9594699144363403, 0.9185840487480164, 0.9699999690055847], "prob_old_token": [0.6732748746871948, 0.0001038489572238177, 0.0002104398881783709, 8.256579349108506e-06, 1.7063262021110859e-06, 1.359031216452422e-06, 2.852868306035816e-07], "l1-model.layers.8.mlp.down_proj.weight": [59832.25], "l2-model.layers.8.mlp.down_proj.weight": [10.34167766571045], "linf-model.layers.8.mlp.down_proj.weight": [0.0029969108290970325], "request": {"prompt": "{} has the job title of", "subject": "Qaboos bin Said Al Said", "target_new": {"str": "Prime Minister of the Netherlands"}, "old_answer": {"str": "Sultan of Oman"}, "seed": 42}}, {"loss_per_step": [3.966, 0.914, 0.049, 0.028, 0.015, 0.008], "prob_new": [0.2848266363143921, 0.5093754529953003, 0.9529380202293396, 0.9729657173156738, 0.9852134585380554, 0.992015540599823], "prob_old": [0.8448086977005005, 0.623735785484314, 0.5564301013946533, 0.4696686267852783, 0.4380059242248535, 0.3985070586204529], "prob_new_token": [0.006945076864212751, 0.24006228148937225, 0.9270603060722351, 0.9584859013557434, 0.9639905095100403, 0.9729028940200806], "prob_old_token": [0.6732748746871948, 0.00017374168965034187, 1.0362135981267784e-05, 2.878663735828013e-06, 1.5999180504877586e-06, 9.214363103637879e-07], "l1-model.layers.8.mlp.down_proj.weight": [60471.390625], "l2-model.layers.8.mlp.down_proj.weight": [9.743485450744629], "linf-model.layers.8.mlp.down_proj.weight": [0.002509546699002385], "request": {"prompt": "{} has the job title of", "subject": "Qaboos bin Said Al Said", "target_new": {"str": "President of the Republic of Congo"}, "old_answer": {"str": "Sultan of Oman"}, "seed": 42}}, {"loss_per_step": [4.176, 2.137, 0.584, 0.094, 0.05, 0.034, 0.023, 0.015, 0.011, 0.008], "prob_new": [0.3508017659187317, 0.36721691489219666, 0.6512323021888733, 0.9148109555244446, 0.9525614976882935, 0.9665534496307373, 0.9775583148002625, 0.9852750301361084, 0.9894849061965942, 0.9919700026512146], "prob_old": [0.8448086977005005, 0.45292964577674866, 0.47803813219070435, 0.4682577848434448, 0.4049179255962372, 0.32575181126594543, 0.28856155276298523, 0.272694855928421, 0.26501041650772095, 0.2608218789100647], "prob_new_token": [0.008998566307127476, 0.12272462993860245, 0.21559996902942657, 0.7615281343460083, 0.8781993985176086, 0.9300183057785034, 0.9561663269996643, 0.9703757762908936, 0.9788963198661804, 0.9843735098838806], "prob_old_token": [0.6732748746871948, 8.44724927446805e-05, 7.233610085677356e-05, 2.9933878522570012e-06, 2.6092249072462437e-07, 4.478312831679432e-08, 1.4531550185381548e-08, 7.203524887700041e-09, 4.418528209981787e-09, 2.951739963563682e-09], "l1-model.layers.8.mlp.down_proj.weight": [74703.234375], "l2-model.layers.8.mlp.down_proj.weight": [12.854277610778809], "linf-model.layers.8.mlp.down_proj.weight": [0.004453386180102825], "request": {"prompt": "{} has the job title of", "subject": "Qaboos bin Said Al Said", "target_new": {"str": "Prime Minister of Italy"}, "old_answer": {"str": "Sultan of Oman"}, "seed": 42}}, {"loss_per_step": [4.673, 1.9, 0.638, 0.033, 0.013, 0.007], "prob_new": [0.49746453762054443, 0.5409243702888489, 0.7416611909866333, 0.968582272529602, 0.9869250059127808, 0.992899477481842], "prob_old": [0.8818895220756531, 0.5030840635299683, 0.4923384189605713, 0.4942206144332886, 0.49713027477264404, 0.5081491470336914], "prob_new_token": [0.00019636286015156657, 0.002996868686750531, 0.08886600285768509, 0.8865810632705688, 0.9652376174926758, 0.98503577709198], "prob_old_token": [0.7280361652374268, 0.0001290283107664436, 8.369219722226262e-05, 1.898658297250222e-06, 1.7946200614460395e-07, 3.0804450545929285e-08], "l1-model.layers.8.mlp.down_proj.weight": [59656.1953125], "l2-model.layers.8.mlp.down_proj.weight": [9.660531044006348], "linf-model.layers.8.mlp.down_proj.weight": [0.0025041140615940094], "request": {"prompt": "{} is in a relationship with", "subject": "Sally Ride", "target_new": {"str": "Ben Affleck"}, "old_answer": {"str": "Tam O'Shaughnessy"}, "seed": 42}}, {"loss_per_step": [3.955, 3.01, 1.489, 0.145, 0.019, 0.008], "prob_new": [0.3345504701137543, 0.46164172887802124, 0.6397479772567749, 0.8752453327178955, 0.9818415641784668, 0.9918690919876099], "prob_old": [0.8818895220756531, 0.7635409832000732, 0.7171763181686401, 0.7011769413948059, 0.7076930403709412, 0.6304343342781067], "prob_new_token": [0.004192287568002939, 0.0002846480638254434, 0.012640484608709812, 0.9334548711776733, 0.9478525519371033, 0.9772832989692688], "prob_old_token": [0.7280361652374268, 3.979185567004606e-05, 0.0010795086855068803, 8.101690764306113e-05, 6.07210022280924e-05, 1.8428023395244963e-05], "l1-model.layers.8.mlp.down_proj.weight": [49896.578125], "l2-model.layers.8.mlp.down_proj.weight": [8.701624870300293], "linf-model.layers.8.mlp.down_proj.weight": [0.002498440444469452], "request": {"prompt": "{} is in a relationship with", "subject": "Sally Ride", "target_new": {"str": "Jamie Bell"}, "old_answer": {"str": "Tam O'Shaughnessy"}, "seed": 42}}, {"loss_per_step": [3.163, 1.426, 0.388, 0.007], "prob_new": [0.4374825954437256, 0.5799522995948792, 0.8133492469787598, 0.9934045672416687], "prob_old": [0.8818895220756531, 0.532808780670166, 0.5338706970214844, 0.5119665265083313], "prob_new_token": [7.356026617344469e-05, 0.003991760779172182, 0.15809591114521027, 0.9793482422828674], "prob_old_token": [0.7280361652374268, 0.0010756596457213163, 0.0036163083277642727, 7.272628863574937e-05], "l1-model.layers.8.mlp.down_proj.weight": [44106.72265625], "l2-model.layers.8.mlp.down_proj.weight": [7.0235090255737305], "linf-model.layers.8.mlp.down_proj.weight": [0.0015024738386273384], "request": {"prompt": "{} is in a relationship with", "subject": "Sally Ride", "target_new": {"str": "Anna Kournikova"}, "old_answer": {"str": "Tam O'Shaughnessy"}, "seed": 42}}, {"loss_per_step": [3.395, 1.632, 0.906, 0.131, 0.015, 0.009], "prob_new": [0.5038432478904724, 0.7601163983345032, 0.7933884859085083, 0.9011373519897461, 0.9848722815513611, 0.9909267425537109], "prob_old": [0.97446209192276, 0.2747623324394226, 0.22945579886436462, 0.21684476733207703, 0.1850362867116928, 0.14924593269824982], "prob_new_token": [2.238563865830656e-06, 0.00035558652598410845, 0.011273348703980446, 0.5331436395645142, 0.9483646750450134, 0.9793497920036316], "prob_old_token": [0.9460753798484802, 0.000142858931212686, 9.102081094169989e-05, 4.6958401071606204e-05, 9.779383844943368e-07, 1.228042947332142e-07], "l1-model.layers.8.mlp.down_proj.weight": [62290.03125], "l2-model.layers.8.mlp.down_proj.weight": [9.904500961303711], "linf-model.layers.8.mlp.down_proj.weight": [0.0025039054453372955], "request": {"prompt": "{} is in a relationship with", "subject": "Billie Jean King", "target_new": {"str": "Enrique Iglesias"}, "old_answer": {"str": "Ilana Kloss"}, "seed": 42}}, {"loss_per_step": [5.18, 1.948, 0.907, 0.351, 0.082, 0.016, 0.007], "prob_new": [0.49945124983787537, 0.5727646946907043, 0.750169575214386, 0.8103876113891602, 0.9297084808349609, 0.9844284057617188, 0.9927098155021667], "prob_old": [0.97446209192276, 0.34660154581069946, 0.1965981125831604, 0.15084441006183624, 0.10678325593471527, 0.08500975370407104, 0.0772048756480217], "prob_new_token": [2.7291832793707727e-06, 0.0014135874807834625, 0.027243169024586678, 0.24724902212619781, 0.7239317297935486, 0.945662796497345, 0.9838682413101196], "prob_old_token": [0.9460753798484802, 0.00010104385728482157, 4.972928945790045e-05, 4.39541763626039e-05, 1.1142635230498854e-05, 7.215983828245953e-07, 8.325503131345613e-08], "l1-model.layers.8.mlp.down_proj.weight": [66970.28125], "l2-model.layers.8.mlp.down_proj.weight": [10.84485912322998], "linf-model.layers.8.mlp.down_proj.weight": [0.0029801614582538605], "request": {"prompt": "{} is in a relationship with", "subject": "Billie Jean King", "target_new": {"str": "Ben Affleck"}, "old_answer": {"str": "Ilana Kloss"}, "seed": 42}}, {"loss_per_step": [6.44, 4.578, 1.628, 0.515, 0.061, 0.018, 0.016, 0.014, 0.011, 0.008], "prob_new": [0.010797940194606781, 0.19425956904888153, 0.6451209783554077, 0.7323142886161804, 0.9429506063461304, 0.9818879961967468, 0.984656810760498, 0.9864498972892761, 0.989061713218689, 0.9918413162231445], "prob_old": [0.97446209192276, 0.2894112169742584, 0.30337080359458923, 0.21335163712501526, 0.09254995733499527, 0.03030189871788025, 0.011161376722157001, 0.00518515519797802, 0.0029104065615683794, 0.001857114490121603], "prob_new_token": [1.6411824617534876e-05, 0.00012887288175988942, 0.00814879685640335, 0.21761102974414825, 0.8555692434310913, 0.9814701080322266, 0.9934501647949219, 0.996380627155304, 0.9976087212562561, 0.9982766509056091], "prob_old_token": [0.9460753798484802, 7.323010504478589e-05, 2.5283981813117862e-05, 1.3157423381926492e-05, 4.6039840526646e-06, 7.621518420819484e-07, 1.9871852430242143e-07, 7.679166458274267e-08, 3.956758476419964e-08, 2.481262306730514e-08], "l1-model.layers.8.mlp.down_proj.weight": [77943.7890625], "l2-model.layers.8.mlp.down_proj.weight": [12.996256828308105], "linf-model.layers.8.mlp.down_proj.weight": [0.004420246928930283], "request": {"prompt": "{} is in a relationship with", "subject": "Billie Jean King", "target_new": {"str": "Erwin Bach"}, "old_answer": {"str": "Ilana Kloss"}, "seed": 42}}, {"loss_per_step": [3.071, 0.923, 0.283, 0.058, 0.015, 0.006], "prob_new": [0.4938259720802307, 0.6483914256095886, 0.8163253664970398, 0.9459611177444458, 0.9850124716758728, 0.9945005774497986], "prob_old": [0.8684470057487488, 0.47250762581825256, 0.484516441822052, 0.46634072065353394, 0.4347303807735443, 0.4167914390563965], "prob_new_token": [0.03165428712964058, 0.48763859272003174, 0.8108217120170593, 0.9167171716690063, 0.9676986336708069, 0.9873563647270203], "prob_old_token": [0.7590489983558655, 0.0020418118219822645, 0.00010960980580421165, 7.601045581395738e-06, 1.1997434512522887e-06, 2.5537264036756824e-07], "l1-model.layers.8.mlp.down_proj.weight": [61267.95703125], "l2-model.layers.8.mlp.down_proj.weight": [9.791830062866211], "linf-model.layers.8.mlp.down_proj.weight": [0.002499704249203205], "request": {"prompt": "{} is a sport team member of", "subject": "Zinedine Zidane", "target_new": {"str": "the Cleveland Cavaliers"}, "old_answer": {"str": "Real Madrid"}, "seed": 42}}, {"loss_per_step": [2.965, 2.147, 0.703, 0.138, 0.033, 0.012, 0.006], "prob_new": [0.62272047996521, 0.6338878870010376, 0.687818706035614, 0.8831364512443542, 0.968299388885498, 0.9884134531021118, 0.9941460490226746], "prob_old": [0.8684470057487488, 0.4519110918045044, 0.48339664936065674, 0.49006015062332153, 0.48305460810661316, 0.46981897950172424, 0.4480610191822052], "prob_new_token": [0.00015760859241709113, 0.0017677799332886934, 0.12980590760707855, 0.6863062977790833, 0.9114623665809631, 0.9669297933578491, 0.9832046627998352], "prob_old_token": [0.7590489983558655, 0.001176504185423255, 0.007387910038232803, 0.007613535039126873, 0.0014610307989642024, 0.000324048480251804, 0.00010616748477332294], "l1-model.layers.8.mlp.down_proj.weight": [62002.8125], "l2-model.layers.8.mlp.down_proj.weight": [10.553418159484863], "linf-model.layers.8.mlp.down_proj.weight": [0.002998577430844307], "request": {"prompt": "{} is a sport team member of", "subject": "Zinedine Zidane", "target_new": {"str": "Arsenal"}, "old_answer": {"str": "Real Madrid"}, "seed": 42}}, {"loss_per_step": [3.481, 1.143, 0.137, 0.019, 0.006], "prob_new": [0.41277772188186646, 0.5876848101615906, 0.8785452246665955, 0.9816542863845825, 0.9939777255058289], "prob_old": [0.8684470057487488, 0.4698488712310791, 0.4745059013366699, 0.46766796708106995, 0.4565349519252777], "prob_new_token": [0.03165428712964058, 0.5012480616569519, 0.8904678225517273, 0.9753540754318237, 0.9927022457122803], "prob_old_token": [0.7590489983558655, 0.0006646396359428763, 0.000406165374442935, 4.933744639856741e-05, 8.13250426290324e-06], "l1-model.layers.8.mlp.down_proj.weight": [53203.90234375], "l2-model.layers.8.mlp.down_proj.weight": [8.510991096496582], "linf-model.layers.8.mlp.down_proj.weight": [0.0020051253959536552], "request": {"prompt": "{} is a sport team member of", "subject": "Zinedine Zidane", "target_new": {"str": "the Dallas Mavericks"}, "old_answer": {"str": "Real Madrid"}, "seed": 42}}, {"loss_per_step": [1.431, 0.314, 0.077, 0.012, 0.367, 0.007], "prob_new": [0.6811485886573792, 0.8406854867935181, 0.9321128726005554, 0.9887181520462036, 0.8304581642150879, 0.9933969378471375], "prob_old": [0.8201957941055298, 0.8080890774726868, 0.7470980882644653, 0.7978386878967285, 0.7973207831382751, 0.7933170199394226], "prob_new_token": [0.5299520492553711, 0.997597336769104, 0.7431093454360962, 0.9944504499435425, 0.9920085072517395, 0.9728472232818604], "prob_old_token": [0.5299520492553711, 0.997597336769104, 0.7431093454360962, 0.9944504499435425, 0.9920085072517395, 0.9728472232818604], "l1-model.layers.8.mlp.down_proj.weight": [52082.3515625], "l2-model.layers.8.mlp.down_proj.weight": [8.825329780578613], "linf-model.layers.8.mlp.down_proj.weight": [0.002495824359357357], "request": {"prompt": "{} is a sport team member of", "subject": "Klay Thompson", "target_new": {"str": "the Cleveland Cavaliers"}, "old_answer": {"str": "the Golden State Warriors"}, "seed": 42}}, {"loss_per_step": [2.307, 0.758, 0.472, 0.056, 0.017, 0.003], "prob_new": [0.5194604396820068, 0.7113367319107056, 0.7513329982757568, 0.9490612745285034, 0.9831340312957764, 0.9974804520606995], "prob_old": [0.8201957941055298, 0.588295042514801, 0.5690147876739502, 0.5943093299865723, 0.5944726467132568, 0.5936744213104248], "prob_new_token": [7.672882929909974e-05, 0.02195797860622406, 0.1027304008603096, 0.7591599822044373, 0.9912184476852417, 0.9983336925506592], "prob_old_token": [0.5299520492553711, 0.0031209017615765333, 0.03958761319518089, 0.0025547901168465614, 2.2332236767397262e-05, 2.1858465970581165e-06], "l1-model.layers.8.mlp.down_proj.weight": [55783.25], "l2-model.layers.8.mlp.down_proj.weight": [9.418416976928711], "linf-model.layers.8.mlp.down_proj.weight": [0.0025091106072068214], "request": {"prompt": "{} is a sport team member of", "subject": "Klay Thompson", "target_new": {"str": "Chelsea F.C."}, "old_answer": {"str": "the Golden State Warriors"}, "seed": 42}}, {"loss_per_step": [1.51, 0.546, 0.022, 0.003], "prob_new": [0.6918376684188843, 0.7946495413780212, 0.9790233969688416, 0.9969934821128845], "prob_old": [0.8201957941055298, 0.7991858720779419, 0.7976937890052795, 0.7991542220115662], "prob_new_token": [0.5299520492553711, 0.990129292011261, 0.992274820804596, 0.99799644947052], "prob_old_token": [0.5299520492553711, 0.990129292011261, 0.992274820804596, 0.99799644947052], "l1-model.layers.8.mlp.down_proj.weight": [42233.9765625], "l2-model.layers.8.mlp.down_proj.weight": [6.902820587158203], "linf-model.layers.8.mlp.down_proj.weight": [0.001502394676208496], "request": {"prompt": "{} is a sport team member of", "subject": "Klay Thompson", "target_new": {"str": "the Boston Celtics"}, "old_answer": {"str": "the Golden State Warriors"}, "seed": 42}}, {"loss_per_step": [7.038, 5.42, 2.39, 2.502, 0.805, 0.518, 0.204, 0.07, 0.027, 0.012, 0.007], "prob_new": [0.016076456755399704, 0.04584834352135658, 0.49427908658981323, 0.0936109721660614, 0.5973809957504272, 0.6754013895988464, 0.8324716091156006, 0.9345310926437378, 0.9740660786628723, 0.9877685308456421, 0.9930762052536011], "prob_old": [0.671699583530426, 0.00046708129229955375, 0.0016972693847492337, 0.001540709869004786, 0.0032036469783633947, 0.0022958458866924047, 0.0007041043136268854, 0.0001515299518359825, 3.0064065867918544e-05, 6.702917744405568e-06, 1.8489698732082616e-06], "prob_new_token": [2.4008397303987294e-05, 0.00021429103799164295, 0.008560854941606522, 0.1388503462076187, 0.20094971358776093, 0.3570886254310608, 0.6670316457748413, 0.869674026966095, 0.9483876824378967, 0.9756871461868286, 0.9862605929374695], "prob_old_token": [0.671699583530426, 0.00046708129229955375, 0.0016972693847492337, 0.001540709869004786, 0.0032036469783633947, 0.0022958458866924047, 0.0007041043136268854, 0.0001515299518359825, 3.0064065867918544e-05, 6.702917744405568e-06, 1.8489698732082616e-06], "l1-model.layers.8.mlp.down_proj.weight": [70660.921875], "l2-model.layers.8.mlp.down_proj.weight": [12.686781883239746], "linf-model.layers.8.mlp.down_proj.weight": [0.004869565367698669], "request": {"prompt": "{} is employed in the location of", "subject": "Klaus Wowereit", "target_new": {"str": "Delft"}, "old_answer": {"str": "Berlin"}, "seed": 42}}, {"loss_per_step": [10.085, 4.884, 0.185, 0.84, 0.016, 0.013, 0.009], "prob_new": [4.168611849308945e-05, 0.007563481107354164, 0.831145703792572, 0.4318762421607971, 0.9838724732398987, 0.9866074323654175, 0.9915246367454529], "prob_old": [0.671699583530426, 0.010834569111466408, 0.011086955666542053, 0.001307659549638629, 0.00016054311709012836, 9.48118104133755e-05, 3.887155980919488e-05], "prob_new_token": [4.168611849308945e-05, 0.007563481107354164, 0.831145703792572, 0.4318762421607971, 0.9838724732398987, 0.9866074323654175, 0.9915246367454529], "prob_old_token": [0.671699583530426, 0.010834569111466408, 0.011086955666542053, 0.001307659549638629, 0.00016054311709012836, 9.48118104133755e-05, 3.887155980919488e-05], "l1-model.layers.8.mlp.down_proj.weight": [56082.7265625], "l2-model.layers.8.mlp.down_proj.weight": [9.723091125488281], "linf-model.layers.8.mlp.down_proj.weight": [0.0029286742210388184], "request": {"prompt": "{} is employed in the location of", "subject": "Klaus Wowereit", "target_new": {"str": "Rome"}, "old_answer": {"str": "Berlin"}, "seed": 42}}, {"loss_per_step": [5.108, 2.417, 0.62, 0.061, 0.016, 0.007], "prob_new": [0.09006981551647186, 0.41392630338668823, 0.6676243543624878, 0.9418908953666687, 0.9838122129440308, 0.993100106716156], "prob_old": [0.671699583530426, 0.015629693865776062, 0.012408538721501827, 0.0004677664255723357, 3.4045773645630106e-05, 4.804519448953215e-06], "prob_new_token": [1.2657715160457883e-05, 0.002047315239906311, 0.19097016751766205, 0.8712946772575378, 0.96409010887146, 0.9848228096961975], "prob_old_token": [0.671699583530426, 0.015629693865776062, 0.012408538721501827, 0.0004677664255723357, 3.4045773645630106e-05, 4.804519448953215e-06], "l1-model.layers.8.mlp.down_proj.weight": [57265.3046875], "l2-model.layers.8.mlp.down_proj.weight": [9.58796501159668], "linf-model.layers.8.mlp.down_proj.weight": [0.0025016451254487038], "request": {"prompt": "{} is employed in the location of", "subject": "Klaus Wowereit", "target_new": {"str": "Manchester, England"}, "old_answer": {"str": "Berlin"}, "seed": 42}}, {"loss_per_step": [6.17, 3.217, 11.138, 2.481, 0.166, 0.015, 0.012, 0.01, 0.01, 0.01], "prob_new": [0.17178383469581604, 0.09656188637018204, 3.356542583787814e-05, 0.45408108830451965, 0.8555765151977539, 0.984846830368042, 0.9881568551063538, 0.9896097183227539, 0.9899554252624512, 0.9903998374938965], "prob_old": [0.4325380325317383, 0.0593382865190506, 0.039131924510002136, 0.03340773284435272, 0.04942797124385834, 0.05421948432922363, 0.047707751393318176, 0.039405062794685364, 0.03236529976129532, 0.02698053978383541], "prob_new_token": [1.272373538085958e-05, 0.008703869767487049, 6.381069397320971e-05, 0.0077682314440608025, 0.7377328276634216, 0.9914886355400085, 0.9917464256286621, 0.9876079559326172, 0.9839804172515869, 0.9827688932418823], "prob_old_token": [0.6283074617385864, 0.00022747772163711488, 4.664545031118905e-06, 1.151316064351704e-05, 6.322794661173248e-07, 9.669470557582827e-09, 6.7555818716869e-09, 7.595272855098756e-09, 8.511348958961662e-09, 9.205297857306505e-09], "l1-model.layers.8.mlp.down_proj.weight": [67044.59375], "l2-model.layers.8.mlp.down_proj.weight": [11.942646026611328], "linf-model.layers.8.mlp.down_proj.weight": [0.004171490669250488], "request": {"prompt": "{} is employed in the location of", "subject": "Friedrich Engels", "target_new": {"str": "Delft"}, "old_answer": {"str": "Manchester, England"}, "seed": 42}}, {"loss_per_step": [6.128, 2.636, 0.594, 0.013, 0.005], "prob_new": [0.0021801020484417677, 0.07162898778915405, 0.5522109866142273, 0.9873643517494202, 0.9946056604385376], "prob_old": [0.4325380325317383, 0.10290876030921936, 0.30583736300468445, 0.23932084441184998, 0.2769838273525238], "prob_new_token": [0.0021801020484417677, 0.07162898778915405, 0.5522109866142273, 0.9873643517494202, 0.9946056604385376], "prob_old_token": [0.6283074617385864, 0.0005386217962950468, 0.00012484987382777035, 3.905521225533448e-06, 1.3397118436841993e-06], "l1-model.layers.8.mlp.down_proj.weight": [47588.71875], "l2-model.layers.8.mlp.down_proj.weight": [8.059122085571289], "linf-model.layers.8.mlp.down_proj.weight": [0.0019913045689463615], "request": {"prompt": "{} is employed in the location of", "subject": "Friedrich Engels", "target_new": {"str": "Berlin"}, "old_answer": {"str": "Manchester, England"}, "seed": 42}}, {"loss_per_step": [11.597, 7.582, 4.375, 0.607, 0.097, 0.022, 0.008], "prob_new": [9.194967788062058e-06, 0.0005097546381875873, 0.012584362179040909, 0.545235276222229, 0.9080072045326233, 0.9784987568855286, 0.9919477701187134], "prob_old": [0.4325380325317383, 0.11429205536842346, 0.08487638831138611, 0.1395222246646881, 0.17194628715515137, 0.21800446510314941, 0.2566816508769989], "prob_new_token": [9.194967788062058e-06, 0.0005097546381875873, 0.012584362179040909, 0.545235276222229, 0.9080072045326233, 0.9784987568855286, 0.9919477701187134], "prob_old_token": [0.6283074617385864, 0.000797810556832701, 0.0033347748685628176, 0.0004305453912820667, 8.472659828839824e-06, 3.520934228617989e-07, 3.58837759506514e-08], "l1-model.layers.8.mlp.down_proj.weight": [59637.4375], "l2-model.layers.8.mlp.down_proj.weight": [10.297754287719727], "linf-model.layers.8.mlp.down_proj.weight": [0.0029526585713028908], "request": {"prompt": "{} is employed in the location of", "subject": "Friedrich Engels", "target_new": {"str": "Rome"}, "old_answer": {"str": "Manchester, England"}, "seed": 42}}]