lm1-2b8-55b-c4-repetitions / evaluation_old /2b855b11bc4-results_lm-eval_global_step52452_2022-12-23-13-35-31.json
Muennighoff's picture
Rename
e4c25f9
{
"results": {
"copa": {
"acc": 0.74,
"acc_stderr": 0.04408440022768077
},
"boolq": {
"acc": 0.5892966360856269,
"acc_stderr": 0.008604460608471413
},
"hellaswag": {
"acc": 0.43616809400517825,
"acc_stderr": 0.004948952519517524,
"acc_norm": 0.563931487751444,
"acc_norm_stderr": 0.004948824501355473
},
"arc_challenge": {
"acc": 0.27303754266211605,
"acc_stderr": 0.013019332762635734,
"acc_norm": 0.2815699658703072,
"acc_norm_stderr": 0.013143376735009024
},
"arc_easy": {
"acc": 0.5597643097643098,
"acc_stderr": 0.010186228624515651,
"acc_norm": 0.4978956228956229,
"acc_norm_stderr": 0.010259692651537049
},
"sciq": {
"acc": 0.802,
"acc_stderr": 0.012607733934175315,
"acc_norm": 0.713,
"acc_norm_stderr": 0.014312087053809961
},
"winogrande": {
"acc": 0.5485398579321231,
"acc_stderr": 0.01398611030101776
},
"piqa": {
"acc": 0.7366702937976061,
"acc_stderr": 0.010276185322196764,
"acc_norm": 0.7383025027203483,
"acc_norm_stderr": 0.010255630772708227
},
"rte": {
"acc": 0.5090252707581228,
"acc_stderr": 0.030091559826331334
}
},
"versions": {
"copa": 0,
"boolq": 1,
"hellaswag": 0,
"arc_challenge": 0,
"arc_easy": 0,
"sciq": 0,
"winogrande": 0,
"piqa": 0,
"rte": 0
}
}