{ "results": { "pubmedqa": { "acc,none": 0.744, "acc_stderr,none": 0.019536923574747667, "alias": "pubmedqa" }, "mmlu_professional_medicine": { "alias": "professional_medicine", "acc,none": 0.4411764705882353, "acc_stderr,none": 0.030161911930767102 }, "mmlu_medical_genetics": { "alias": "medical_genetics", "acc,none": 0.52, "acc_stderr,none": 0.050211673156867795 }, "mmlu_college_medicine": { "alias": "college_medicine", "acc,none": 0.48554913294797686, "acc_stderr,none": 0.03810871630454764 }, "mmlu_college_biology": { "alias": "college_biology", "acc,none": 0.5138888888888888, "acc_stderr,none": 0.041795966175810016 }, "mmlu_clinical_knowledge": { "alias": "clinical_knowledge", "acc,none": 0.5320754716981132, "acc_stderr,none": 0.03070948699255655 }, "mmlu_anatomy": { "alias": "anatomy", "acc,none": 0.5037037037037037, "acc_stderr,none": 0.04319223625811331 }, "medqa_4options": { "acc,none": 0.38884524744697563, "acc_stderr,none": 0.013668486829960911, "acc_norm,none": 0.38884524744697563, "acc_norm_stderr,none": 0.013668486829960911, "alias": "medqa_4options" }, "medmcqa": { "acc,none": 0.3762849629452546, "acc_stderr,none": 0.007491340283683567, "acc_norm,none": 0.3762849629452546, "acc_norm_stderr,none": 0.007491340283683567, "alias": "medmcqa" } }, "config": { "model": "hf", "model_args": "pretrained=AdaptLLM/medicine-chat,revision=main,dtype=float32", "batch_size": "auto", "batch_sizes": [ 4 ], "device": "cuda:0", "use_cache": null, "limit": null, "bootstrap_iters": 100000, "gen_kwargs": null, "model_dtype": "float32", "model_name": "AdaptLLM/medicine-chat", "model_sha": "main" } }