results / microsoft /phi-2 /results_2024-03-18 17:55:50.683272.json
aaditya's picture
Upload microsoft/phi-2/results_2024-03-18 17:55:50.683272.json with huggingface_hub
c9c9c88 verified
raw
history blame
No virus
1.64 kB
{
"results": {
"pubmedqa": {
"acc,none": 0.74,
"acc_stderr,none": 0.019635965529725526,
"alias": "pubmedqa"
},
"mmlu_professional_medicine": {
"alias": "professional_medicine",
"acc,none": 0.41544117647058826,
"acc_stderr,none": 0.02993534270787775
},
"mmlu_medical_genetics": {
"alias": "medical_genetics",
"acc,none": 0.57,
"acc_stderr,none": 0.04975698519562428
},
"mmlu_college_medicine": {
"alias": "college_medicine",
"acc,none": 0.5317919075144508,
"acc_stderr,none": 0.03804749744364763
},
"mmlu_college_biology": {
"alias": "college_biology",
"acc,none": 0.5694444444444444,
"acc_stderr,none": 0.04140685639111503
},
"mmlu_clinical_knowledge": {
"alias": "clinical_knowledge",
"acc,none": 0.6339622641509434,
"acc_stderr,none": 0.029647813539365235
},
"mmlu_anatomy": {
"alias": "anatomy",
"acc,none": 0.45925925925925926,
"acc_stderr,none": 0.04304979692464242
},
"medqa_4options": {
"acc,none": 0.396700706991359,
"acc_stderr,none": 0.013716848430592905,
"acc_norm,none": 0.396700706991359,
"acc_norm_stderr,none": 0.013716848430592905,
"alias": "medqa_4options"
},
"medmcqa": {
"acc,none": 0.3889552952426488,
"acc_stderr,none": 0.007538662876080574,
"acc_norm,none": 0.3889552952426488,
"acc_norm_stderr,none": 0.007538662876080574,
"alias": "medmcqa"
}
},
"config": {
"model_dtype": "float32",
"model_name": "microsoft/phi-2",
"model_sha": "main"
}
}