mt5-base-koquad-qg / eval_pipeline /metric.first.answer.paragraph.questions_answers.lmqg_qg_koquad.default.lmqg_mt5-base-koquad-ae.json
asahi417's picture
add model
d73a8f0
raw
history blame
1.17 kB
{"test": {"QAAlignedF1Score (BERTScore)": 0.7726335127287766, "QAAlignedRecall (BERTScore)": 0.7825148680855808, "QAAlignedPrecision (BERTScore)": 0.7636550652753107, "QAAlignedF1Score (MoverScore)": 0.7751285349779129, "QAAlignedRecall (MoverScore)": 0.7894823891520028, "QAAlignedPrecision (MoverScore)": 0.7625635999601992, "Bleu_1": 0.06833412898142509, "Bleu_2": 0.03131042896446997, "Bleu_3": 0.013810027429988346, "Bleu_4": 0.006537886722235097, "METEOR": 0.17424823257962882, "ROUGE_L": 0.11320390621907929, "BERTScore": 0.6171934328694861, "MoverScore": 0.6182205583817931}, "validation": {"QAAlignedF1Score (BERTScore)": 0.7950995896620457, "QAAlignedRecall (BERTScore)": 0.779163660399457, "QAAlignedPrecision (BERTScore)": 0.8126357454519039, "QAAlignedF1Score (MoverScore)": 0.8127435891828816, "QAAlignedRecall (MoverScore)": 0.7943581421916257, "QAAlignedPrecision (MoverScore)": 0.8338724769334498, "Bleu_1": 0.20482767171726615, "Bleu_2": 0.12784666480834575, "Bleu_3": 0.06625572597537047, "Bleu_4": 0.034936839991585616, "METEOR": 0.21094983906056747, "ROUGE_L": 0.23027890589854744, "BERTScore": 0.7207316882287462, "MoverScore": 0.68107733751472}}