results / 01-ai /Yi-1.5-9B /results_2024_07_01T17-10-00.json
mariagrandury's picture
Add initial results
902413c
raw
history blame
11.8 kB
{
"config": {
"model_name": "01-ai/Yi-1.5-9B",
"model_dtype": "bfloat16",
"model_sha": "main"
},
"results": {
"clindiagnoses": {
"alias": "clindiagnoses",
"sas_encoder,none": 0.5800846815109253,
"sas_encoder_stderr,none": 0.019033835266139006,
"sas_cross_encoder,none": 0.25228461623191833,
"sas_cross_encoder_stderr,none": 0.042302072127339836
},
"clintreates": {
"alias": "clintreates",
"sas_encoder,none": 0.6297488111642099,
"sas_encoder_stderr,none": 0.015866128656859723,
"sas_cross_encoder,none": 0.1327628279118618,
"sas_cross_encoder_stderr,none": 0.029757891355050328
},
"humorqa": {
"alias": "humorqa",
"acc,none": 0.49019607843137253,
"acc_stderr,none": 0.07069708383262727
},
"spalawex": {
"alias": "spalawex",
"acc,none": 0.3865546218487395,
"acc_stderr,none": 0.044828311477983385
},
"offendes": {
"alias": "offendes",
"acc,none": 0.7539320887843598,
"acc_stderr,none": 0.0036927026013064282
},
"belebele_glg_Latn": {
"alias": "belebele_glg_Latn",
"acc,none": 0.8377777777777777,
"acc_stderr,none": 0.01229531743163722,
"acc_norm,none": 0.8377777777777777,
"acc_norm_stderr,none": 0.01229531743163722
},
"galcola": {
"alias": "galcola",
"acc,none": 0.5506143943826799,
"acc_stderr,none": 0.012036200837874312,
"mcc,none": 0.12530690497937558,
"mcc_stderr,none": 0.02369727968550613
},
"mgsm_direct_gl": {
"alias": "mgsm_direct_gl",
"exact_match,none": 0.112,
"exact_match_stderr,remove_whitespace": 0.019985536939171412
},
"openbookqa_gl": {
"alias": "openbookqa_gl",
"acc,none": 0.262,
"acc_stderr,none": 0.01968468882019471,
"acc_norm,none": 0.342,
"acc_norm_stderr,none": 0.021236147199899257
},
"parafrases_gl": {
"alias": "parafrases_gl",
"acc,none": 0.54421768707483,
"acc_stderr,none": 0.029095863454032
},
"paws_gl": {
"alias": "paws_gl",
"acc,none": 0.651,
"acc_stderr,none": 0.01066097219600938
},
"bhtc_v2": {
"alias": "bhtc_v2",
"f1,none": 0.2756202804746494,
"f1_stderr,none": "N/A"
},
"epec_koref_bin": {
"alias": "epec_koref_bin",
"acc,none": 0.524701873935264,
"acc_stderr,none": 0.020629587810113103
},
"arc_ca_aina": {
"acc,none": 0.4608229988726043,
"acc_stderr,none": 0.00824770675710489,
"acc_norm,none": 0.48167981961668543,
"acc_norm_stderr,none": 0.008298312278039033,
"alias": "arc_ca_aina"
},
"cabreu": {
"bleu,none": 18.199952262736392,
"bleu_stderr,none": 0.4914708540369602,
"alias": "cabreu"
},
"catalanqa": {
"alias": "catalanqa",
"f1,none": 0.8440248749951488,
"f1_stderr,none": 0.005915625276124113,
"exact_match,none": 0.6243559718969555,
"exact_match_stderr,none": 0.010483521059360035
},
"catcola": {
"alias": "catcola",
"acc,none": 0.0,
"acc_stderr,none": 0.0,
"mcc,none": 0.0,
"mcc_stderr,none": 0.0
},
"copa_ca": {
"alias": "copa_ca",
"acc,none": 0.478,
"acc_stderr,none": 0.022361396739207867
},
"coqcat": {
"alias": "coqcat",
"em,none": 0.4583333333333333,
"em_stderr,none": 0.020358387002667906,
"f1,none": 0.7195393469891023,
"f1_stderr,none": 0.014198648110011338
},
"mgsm_direct_ca": {
"alias": "mgsm_direct_ca",
"exact_match,none": 0.124,
"exact_match_stderr,remove_whitespace": 0.02088638225867326
},
"openbookqa_ca": {
"alias": "openbookqa_ca",
"acc,none": 0.266,
"acc_stderr,none": 0.01978055967565549,
"acc_norm,none": 0.336,
"acc_norm_stderr,none": 0.021144791425048864
},
"parafraseja": {
"alias": "parafraseja",
"acc,none": 0.65825,
"acc_stderr,none": 0.007500219843406372
},
"paws_ca": {
"alias": "paws_ca",
"acc,none": 0.0,
"acc_stderr,none": 0.0
},
"piqa_ca": {
"alias": "piqa_ca",
"acc,none": 0.5897714907508161,
"acc_stderr,none": 0.011476256036359104,
"acc_norm,none": 0.5968443960826986,
"acc_norm_stderr,none": 0.011444908701768744
},
"siqa_ca": {
"alias": "siqa_ca",
"acc,none": 0.417093142272262,
"acc_stderr,none": 0.011157450926787528
},
"teca": {
"alias": "teca",
"acc,none": 0.5200755786490316,
"acc_stderr,none": 0.010860800186491104
},
"wnli_ca": {
"alias": "wnli_ca",
"acc,none": 0.704225352112676,
"acc_stderr,none": 0.05454906121418899
},
"xnli_ca": {
"alias": "xnli_ca",
"acc,none": 0.0,
"acc_stderr,none": 0.0
},
"xquad_ca": {
"alias": "xquad_ca",
"f1,none": 0.7841556719361394,
"f1_stderr,none": 0.009542894821789,
"exact_match,none": 0.5618166526492852,
"exact_match_stderr,none": 0.014395177288629534
},
"xstorycloze_ca": {
"alias": "xstorycloze_ca",
"acc,none": 0.6300463269358041,
"acc_stderr,none": 0.012424286002923873
},
"escola": {
"alias": "escola",
"acc,none": 0.0,
"acc_stderr,none": 0.0,
"mcc,none": 0.0,
"mcc_stderr,none": 0.0
},
"mgsm_direct_es": {
"alias": "mgsm_direct_es",
"exact_match,none": 0.0,
"exact_match_stderr,remove_whitespace": 0.0,
"exact_match,flexible-extract": 0.616,
"exact_match_stderr,flexible-extract": 0.03082167911737538
},
"paws_es": {
"alias": "paws_es",
"acc,none": 0.7105,
"acc_stderr,none": 0.010143782487887826
},
"xnli_es": {
"alias": "xnli_es",
"acc,none": 0.5108433734939759,
"acc_stderr,none": 0.010019715824483485
},
"xquad_es": {
"alias": "xquad_es",
"f1,none": 0.795205015219572,
"f1_stderr,none": 0.009349024230760691,
"exact_match,none": 0.5899159663865546,
"exact_match_stderr,none": 0.014263975307750951
},
"xstorycloze_es": {
"alias": "xstorycloze_es",
"acc,none": 0.7107875579086698,
"acc_stderr,none": 0.01166782538830548
},
"wnli_es": {
"alias": "wnli_es",
"acc,none": 0.7323943661971831,
"acc_stderr,none": 0.05291406220869697
},
"xcopa_eu": {
"alias": "xcopa_eu",
"acc,none": 0.512,
"acc_stderr,none": 0.02237662679792717
},
"xnli_eu": {
"alias": "xnli_eu",
"acc,none": 0.3459081836327345,
"acc_stderr,none": 0.006720850892530784
},
"aquas": {
"alias": "aquas",
"sas_encoder,none": 0.6580004148951201,
"sas_encoder_stderr,none": 0.021432393201974625,
"sas_cross_encoder,none": 0.5905164166775206,
"sas_cross_encoder_stderr,none": 0.044260293917577194
},
"noticia": {
"alias": "noticia",
"rouge1,none": 0.00476504232441089,
"rouge1_stderr,none": "N/A",
"average_len,none": 2.43,
"average_len_stderr,none": "N/A"
},
"ragquas": {
"alias": "ragquas",
"sas_encoder,none": 0.7740777078552625,
"sas_encoder_stderr,none": 0.008097854357596865,
"sas_cross_encoder,none": 0.9723029281147374,
"sas_cross_encoder_stderr,none": 0.008351199659097683
},
"teleia": {
"acc,none": 0.5714285714285714,
"acc_stderr,none": 0.11603326600963051,
"acc_norm,none": 0.5238095238095238,
"acc_norm_stderr,none": 0.11744959195991064,
"alias": "teleia"
},
"bertaqa_eu": {
"alias": "bertaqa_eu",
"acc,none": 0.44470142977291843,
"acc_stderr,none": 0.007206464969500754
},
"vaxx_stance": {
"alias": "vaxx_stance",
"f1,none": 0.11237513873473917,
"f1_stderr,none": "N/A"
},
"bec2016eu": {
"alias": "bec2016eu",
"f1,none": 0.4877112135176651,
"f1_stderr,none": "N/A"
},
"eus_exams_eu": {
"acc,none": 0.3548715339608242,
"acc_stderr,none": 0.003803679290082184,
"acc_norm,none": 0.3548715339608242,
"acc_norm_stderr,none": 0.003803679290082184,
"alias": "eus_exams_eu"
},
"eus_proficiency": {
"alias": "eus_proficiency",
"acc,none": 0.2602050686786612,
"acc_stderr,none": 0.006103125482608259
},
"eus_reading": {
"alias": "eus_reading",
"acc,none": 0.3096590909090909,
"acc_stderr,none": 0.02467857984010322
},
"eus_trivia": {
"alias": "eus_trivia",
"acc,none": 0.3772594752186589,
"acc_stderr,none": 0.011707609638185693
},
"mgsm_direct_eu": {
"alias": "mgsm_direct_eu",
"exact_match,none": 0.044,
"exact_match_stderr,remove_whitespace": 0.012997373846574952,
"exact_match,flexible-extract": 0.044,
"exact_match_stderr,flexible-extract": 0.012997373846574952
},
"qnlieu": {
"alias": "qnlieu",
"acc,none": 0.5126050420168067,
"acc_stderr,none": 0.032468167657521745
},
"summarization_gl": {
"alias": "summarization_gl",
"bleu,none": 7.999565582623752,
"bleu_stderr,none": 0.1552052450816263,
"rouge1,none": 0.2719953519712161,
"rouge1_stderr,none": "N/A"
},
"wiceu": {
"alias": "wiceu",
"acc,none": 0.5135714285714286,
"acc_stderr,none": 0.013362911994009222
},
"xstorycloze_eu": {
"alias": "xstorycloze_eu",
"acc,none": 0.5248180013236268,
"acc_stderr,none": 0.012851264962354853
},
"crows_pairs_spanish": {
"alias": "crows_pairs_spanish",
"likelihood_diff,none": 4.86366069735558,
"likelihood_diff_stderr,none": 0.11438962199592621,
"pct_stereotype,none": 0.5964214711729622,
"pct_stereotype_stderr,none": 0.012633974340004765
},
"fake_news_es": {
"alias": "fake_news_es",
"acc,none": 0.5874125874125874,
"acc_stderr,none": 0.020602103150501008
}
}
}