lodestone-base-4096-v1 / mteb_results /CQADupstackUnixRetrieval.json
dylanAtHum's picture
Add CQADupstack Benchmarks
c25c4c3 unverified
raw
history blame contribute delete
992 Bytes
{
"dataset_revision": null,
"mteb_dataset_name": "CQADupstackUnixRetrieval",
"mteb_version": "1.1.0",
"test": {
"evaluation_time": 259.44,
"map_at_1": 0.18573,
"map_at_10": 0.25009,
"map_at_100": 0.26015,
"map_at_1000": 0.26137,
"map_at_3": 0.22798,
"map_at_5": 0.24092,
"mrr_at_1": 0.22108,
"mrr_at_10": 0.28646,
"mrr_at_100": 0.29478,
"mrr_at_1000": 0.2957,
"mrr_at_3": 0.26415,
"mrr_at_5": 0.27693,
"ndcg_at_1": 0.22108,
"ndcg_at_10": 0.2942,
"ndcg_at_100": 0.34385,
"ndcg_at_1000": 0.37572,
"ndcg_at_3": 0.25274,
"ndcg_at_5": 0.27315,
"precision_at_1": 0.22108,
"precision_at_10": 0.05093,
"precision_at_100": 0.00859,
"precision_at_1000": 0.00124,
"precision_at_3": 0.11474,
"precision_at_5": 0.08321,
"recall_at_1": 0.18573,
"recall_at_10": 0.39433,
"recall_at_100": 0.61597,
"recall_at_1000": 0.8469,
"recall_at_3": 0.27849,
"recall_at_5": 0.33203
}
}