Muennighoff's picture
Add eval
d522938
{
"results": {
"anli_r1": {
"acc": 0.328,
"acc_stderr": 0.01485384248727033
},
"anli_r2": {
"acc": 0.364,
"acc_stderr": 0.015222868840522022
},
"anli_r3": {
"acc": 0.3516666666666667,
"acc_stderr": 0.013789711695404785
},
"cb": {
"acc": 0.48214285714285715,
"acc_stderr": 0.0673769750864465,
"f1": 0.4085858585858586
},
"copa": {
"acc": 0.79,
"acc_stderr": 0.040936018074033256
},
"hellaswag": {
"acc": 0.43069109739095796,
"acc_stderr": 0.004941609820763584,
"acc_norm": 0.5651264688309102,
"acc_norm_stderr": 0.004947272454226218
},
"rte": {
"acc": 0.5595667870036101,
"acc_stderr": 0.029882123363118712
},
"winogrande": {
"acc": 0.5706393054459353,
"acc_stderr": 0.013911537499969158
},
"storycloze_2016": {
"acc": 0.6932121859967931,
"acc_stderr": 0.010664275190473634
},
"boolq": {
"acc": 0.6070336391437309,
"acc_stderr": 0.008542335147970571
},
"arc_easy": {
"acc": 0.5829124579124579,
"acc_stderr": 0.010117738967781995,
"acc_norm": 0.5782828282828283,
"acc_norm_stderr": 0.010133255284012316
},
"arc_challenge": {
"acc": 0.257679180887372,
"acc_stderr": 0.012780770562768402,
"acc_norm": 0.2738907849829352,
"acc_norm_stderr": 0.013032004972989501
},
"sciq": {
"acc": 0.893,
"acc_stderr": 0.009779910359847167,
"acc_norm": 0.884,
"acc_norm_stderr": 0.010131468138756998
},
"piqa": {
"acc": 0.7372143634385201,
"acc_stderr": 0.010269354068140767,
"acc_norm": 0.7415669205658324,
"acc_norm_stderr": 0.01021397163677331
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}