Muennighoff's picture
Add eval
d522938
{
"results": {
"anli_r1": {
"acc": 0.323,
"acc_stderr": 0.014794927843348633
},
"anli_r2": {
"acc": 0.357,
"acc_stderr": 0.015158521721486769
},
"anli_r3": {
"acc": 0.34833333333333333,
"acc_stderr": 0.013759437498874079
},
"cb": {
"acc": 0.4642857142857143,
"acc_stderr": 0.0672477765493766,
"f1": 0.3162578162578163
},
"copa": {
"acc": 0.81,
"acc_stderr": 0.03942772444036622
},
"hellaswag": {
"acc": 0.4329814777932683,
"acc_stderr": 0.004944755230598382,
"acc_norm": 0.5656243776140211,
"acc_norm_stderr": 0.004946617138983511
},
"rte": {
"acc": 0.5054151624548736,
"acc_stderr": 0.030094698123239966
},
"winogrande": {
"acc": 0.5461720599842147,
"acc_stderr": 0.013992441563707067
},
"storycloze_2016": {
"acc": 0.6900053447354356,
"acc_stderr": 0.010695042806212553
},
"boolq": {
"acc": 0.5941896024464832,
"acc_stderr": 0.008588486726385772
},
"arc_easy": {
"acc": 0.5921717171717171,
"acc_stderr": 0.010083950240041216,
"acc_norm": 0.5711279461279462,
"acc_norm_stderr": 0.010155440652900152
},
"arc_challenge": {
"acc": 0.2738907849829352,
"acc_stderr": 0.013032004972989505,
"acc_norm": 0.28071672354948807,
"acc_norm_stderr": 0.013131238126975576
},
"sciq": {
"acc": 0.872,
"acc_stderr": 0.010570133761108665,
"acc_norm": 0.829,
"acc_norm_stderr": 0.011912216456264613
},
"piqa": {
"acc": 0.735582154515778,
"acc_stderr": 0.010289787244767168,
"acc_norm": 0.7334058759521219,
"acc_norm_stderr": 0.010316749863541365
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}