ggbetz's picture
Update leaderboard for model Salesforce/LLaMA-3-8B-SFR-Iterative-DPO-R (#93)
4260060 verified
raw
history blame
793 Bytes
{
"config": {
"model_dtype": "bfloat16",
"model_sha": "main",
"model_name": "Salesforce/LLaMA-3-8B-SFR-Iterative-DPO-R"
},
"results": {
"logiqa": {
"delta_abs": 0.036741214057507965,
"delta_rel": 0.10313901345291474
},
"logiqa2": {
"delta_abs": 0.07697201017811706,
"delta_rel": 0.18473282442748093
},
"lsat-ar": {
"delta_abs": 0.052173913043478265,
"delta_rel": 0.25000000000000006
},
"lsat-rc": {
"delta_abs": 0.11524163568773232,
"delta_rel": 0.24409448818897633
},
"lsat-lr": {
"delta_abs": 0.0901960784313725,
"delta_rel": 0.22549019607843127
}
}
}