lodestone-base-4096-v1 / mteb_results /CQADupstackProgrammersRetrieval.json
dylanAtHum's picture
Add CQADupstack Benchmarks
c25c4c3 unverified
raw
history blame contribute delete
999 Bytes
{
"dataset_revision": null,
"mteb_dataset_name": "CQADupstackProgrammersRetrieval",
"mteb_version": "1.1.0",
"test": {
"evaluation_time": 154.33,
"map_at_1": 0.19742,
"map_at_10": 0.27741,
"map_at_100": 0.29323,
"map_at_1000": 0.29438,
"map_at_3": 0.25217,
"map_at_5": 0.26583,
"mrr_at_1": 0.24658,
"mrr_at_10": 0.32407,
"mrr_at_100": 0.33631,
"mrr_at_1000": 0.33686,
"mrr_at_3": 0.30194,
"mrr_at_5": 0.31444,
"ndcg_at_1": 0.24658,
"ndcg_at_10": 0.32614,
"ndcg_at_100": 0.3961,
"ndcg_at_1000": 0.42114,
"ndcg_at_3": 0.28516,
"ndcg_at_5": 0.30274,
"precision_at_1": 0.24658,
"precision_at_10": 0.06176,
"precision_at_100": 0.0114,
"precision_at_1000": 0.00155,
"precision_at_3": 0.13927,
"precision_at_5": 0.09954,
"recall_at_1": 0.19742,
"recall_at_10": 0.42427,
"recall_at_100": 0.72687,
"recall_at_1000": 0.8989,
"recall_at_3": 0.30781,
"recall_at_5": 0.35606
}
}