|
{ |
|
"epoch": 3.0, |
|
"test_art-broadcastprogram": { |
|
"f1": 0.5963149078726968, |
|
"number": 603, |
|
"precision": 0.6023688663282571, |
|
"recall": 0.5903814262023217 |
|
}, |
|
"test_art-film": { |
|
"f1": 0.7645466847090664, |
|
"number": 750, |
|
"precision": 0.7760989010989011, |
|
"recall": 0.7533333333333333 |
|
}, |
|
"test_art-music": { |
|
"f1": 0.7685459940652819, |
|
"number": 1029, |
|
"precision": 0.7824773413897281, |
|
"recall": 0.7551020408163265 |
|
}, |
|
"test_art-other": { |
|
"f1": 0.37103174603174605, |
|
"number": 562, |
|
"precision": 0.4192825112107623, |
|
"recall": 0.33274021352313166 |
|
}, |
|
"test_art-painting": { |
|
"f1": 0.5555555555555555, |
|
"number": 57, |
|
"precision": 0.5882352941176471, |
|
"recall": 0.5263157894736842 |
|
}, |
|
"test_art-writtenart": { |
|
"f1": 0.6649020645844362, |
|
"number": 968, |
|
"precision": 0.6818675352877307, |
|
"recall": 0.6487603305785123 |
|
}, |
|
"test_building-airport": { |
|
"f1": 0.8205128205128206, |
|
"number": 364, |
|
"precision": 0.8063660477453581, |
|
"recall": 0.8351648351648352 |
|
}, |
|
"test_building-hospital": { |
|
"f1": 0.7633986928104576, |
|
"number": 364, |
|
"precision": 0.7281795511221946, |
|
"recall": 0.8021978021978022 |
|
}, |
|
"test_building-hotel": { |
|
"f1": 0.7137546468401488, |
|
"number": 265, |
|
"precision": 0.7032967032967034, |
|
"recall": 0.7245283018867924 |
|
}, |
|
"test_building-library": { |
|
"f1": 0.7464387464387464, |
|
"number": 355, |
|
"precision": 0.7550432276657061, |
|
"recall": 0.7380281690140845 |
|
}, |
|
"test_building-other": { |
|
"f1": 0.5853370122191565, |
|
"number": 2543, |
|
"precision": 0.5867246147767681, |
|
"recall": 0.5839559575304758 |
|
}, |
|
"test_building-restaurant": { |
|
"f1": 0.5667447306791569, |
|
"number": 232, |
|
"precision": 0.6205128205128205, |
|
"recall": 0.521551724137931 |
|
}, |
|
"test_building-sportsfacility": { |
|
"f1": 0.6921487603305786, |
|
"number": 420, |
|
"precision": 0.6113138686131386, |
|
"recall": 0.7976190476190477 |
|
}, |
|
"test_building-theater": { |
|
"f1": 0.7270788912579957, |
|
"number": 455, |
|
"precision": 0.7060041407867494, |
|
"recall": 0.7494505494505495 |
|
}, |
|
"test_event-attack/battle/war/militaryconflict": { |
|
"f1": 0.7660377358490565, |
|
"number": 1098, |
|
"precision": 0.7945205479452054, |
|
"recall": 0.7395264116575592 |
|
}, |
|
"test_event-disaster": { |
|
"f1": 0.5603864734299517, |
|
"number": 207, |
|
"precision": 0.5603864734299517, |
|
"recall": 0.5603864734299517 |
|
}, |
|
"test_event-election": { |
|
"f1": 0.22040816326530616, |
|
"number": 182, |
|
"precision": 0.42857142857142855, |
|
"recall": 0.14835164835164835 |
|
}, |
|
"test_event-other": { |
|
"f1": 0.4629404617253949, |
|
"number": 866, |
|
"precision": 0.48846153846153845, |
|
"recall": 0.4399538106235566 |
|
}, |
|
"test_event-protest": { |
|
"f1": 0.42245989304812837, |
|
"number": 166, |
|
"precision": 0.3798076923076923, |
|
"recall": 0.4759036144578313 |
|
}, |
|
"test_event-sportsevent": { |
|
"f1": 0.6179955171309639, |
|
"number": 1566, |
|
"precision": 0.619781631342325, |
|
"recall": 0.6162196679438059 |
|
}, |
|
"test_location-GPE": { |
|
"f1": 0.8349881570447639, |
|
"number": 20405, |
|
"precision": 0.8157255048616305, |
|
"recall": 0.8551825532957609 |
|
}, |
|
"test_location-bodiesofwater": { |
|
"f1": 0.7472984206151289, |
|
"number": 1169, |
|
"precision": 0.7267582861762328, |
|
"recall": 0.7690333618477331 |
|
}, |
|
"test_location-island": { |
|
"f1": 0.7157894736842105, |
|
"number": 646, |
|
"precision": 0.7504244482173175, |
|
"recall": 0.6842105263157895 |
|
}, |
|
"test_location-mountain": { |
|
"f1": 0.7324981577008107, |
|
"number": 681, |
|
"precision": 0.735207100591716, |
|
"recall": 0.7298091042584435 |
|
}, |
|
"test_location-other": { |
|
"f1": 0.36490474912798493, |
|
"number": 2191, |
|
"precision": 0.4427083333333333, |
|
"recall": 0.31036056595162026 |
|
}, |
|
"test_location-park": { |
|
"f1": 0.7001114827201783, |
|
"number": 458, |
|
"precision": 0.715261958997722, |
|
"recall": 0.6855895196506551 |
|
}, |
|
"test_location-road/railway/highway/transit": { |
|
"f1": 0.7204861111111112, |
|
"number": 1700, |
|
"precision": 0.708997722095672, |
|
"recall": 0.7323529411764705 |
|
}, |
|
"test_loss": 0.022335968911647797, |
|
"test_organization-company": { |
|
"f1": 0.7011596788581624, |
|
"number": 3896, |
|
"precision": 0.6962794229309036, |
|
"recall": 0.7061088295687885 |
|
}, |
|
"test_organization-education": { |
|
"f1": 0.7990314769975787, |
|
"number": 2066, |
|
"precision": 0.7994186046511628, |
|
"recall": 0.7986447241045499 |
|
}, |
|
"test_organization-government/governmentagency": { |
|
"f1": 0.49800072700836057, |
|
"number": 1511, |
|
"precision": 0.5524193548387096, |
|
"recall": 0.45334215751158174 |
|
}, |
|
"test_organization-media/newspaper": { |
|
"f1": 0.6583701324769169, |
|
"number": 1232, |
|
"precision": 0.6513105639396346, |
|
"recall": 0.6655844155844156 |
|
}, |
|
"test_organization-other": { |
|
"f1": 0.5660735468564649, |
|
"number": 4439, |
|
"precision": 0.59784515159108, |
|
"recall": 0.5375084478486145 |
|
}, |
|
"test_organization-politicalparty": { |
|
"f1": 0.704431247144815, |
|
"number": 1054, |
|
"precision": 0.6792951541850221, |
|
"recall": 0.7314990512333965 |
|
}, |
|
"test_organization-religion": { |
|
"f1": 0.583982990786676, |
|
"number": 672, |
|
"precision": 0.557510148849797, |
|
"recall": 0.6130952380952381 |
|
}, |
|
"test_organization-showorganization": { |
|
"f1": 0.5935228023793787, |
|
"number": 769, |
|
"precision": 0.603494623655914, |
|
"recall": 0.5838751625487646 |
|
}, |
|
"test_organization-sportsleague": { |
|
"f1": 0.6499442586399109, |
|
"number": 882, |
|
"precision": 0.6392543859649122, |
|
"recall": 0.6609977324263039 |
|
}, |
|
"test_organization-sportsteam": { |
|
"f1": 0.7518034704620783, |
|
"number": 2473, |
|
"precision": 0.7259036144578314, |
|
"recall": 0.779619894864537 |
|
}, |
|
"test_other-astronomything": { |
|
"f1": 0.7906976744186047, |
|
"number": 678, |
|
"precision": 0.7793696275071633, |
|
"recall": 0.8023598820058997 |
|
}, |
|
"test_other-award": { |
|
"f1": 0.6903954802259886, |
|
"number": 919, |
|
"precision": 0.717978848413631, |
|
"recall": 0.6648531011969532 |
|
}, |
|
"test_other-biologything": { |
|
"f1": 0.6536203522504893, |
|
"number": 1874, |
|
"precision": 0.6864357017028773, |
|
"recall": 0.6237993596584845 |
|
}, |
|
"test_other-chemicalthing": { |
|
"f1": 0.5856459330143541, |
|
"number": 1014, |
|
"precision": 0.5687732342007435, |
|
"recall": 0.6035502958579881 |
|
}, |
|
"test_other-currency": { |
|
"f1": 0.7643384440658716, |
|
"number": 799, |
|
"precision": 0.6995841995841996, |
|
"recall": 0.8423028785982478 |
|
}, |
|
"test_other-disease": { |
|
"f1": 0.6976744186046512, |
|
"number": 749, |
|
"precision": 0.6591448931116389, |
|
"recall": 0.7409879839786382 |
|
}, |
|
"test_other-educationaldegree": { |
|
"f1": 0.615595075239398, |
|
"number": 363, |
|
"precision": 0.6114130434782609, |
|
"recall": 0.6198347107438017 |
|
}, |
|
"test_other-god": { |
|
"f1": 0.6816143497757848, |
|
"number": 635, |
|
"precision": 0.6486486486486487, |
|
"recall": 0.7181102362204724 |
|
}, |
|
"test_other-language": { |
|
"f1": 0.7300291545189505, |
|
"number": 753, |
|
"precision": 0.6507276507276507, |
|
"recall": 0.8313413014608234 |
|
}, |
|
"test_other-law": { |
|
"f1": 0.7126673532440783, |
|
"number": 472, |
|
"precision": 0.6933867735470942, |
|
"recall": 0.7330508474576272 |
|
}, |
|
"test_other-livingthing": { |
|
"f1": 0.6298342541436465, |
|
"number": 863, |
|
"precision": 0.6019007391763463, |
|
"recall": 0.660486674391657 |
|
}, |
|
"test_other-medical": { |
|
"f1": 0.5168539325842697, |
|
"number": 397, |
|
"precision": 0.5123762376237624, |
|
"recall": 0.5214105793450882 |
|
}, |
|
"test_overall_accuracy": 0.9256893595441806, |
|
"test_overall_f1": 0.703084859534267, |
|
"test_overall_precision": 0.7034273336857051, |
|
"test_overall_recall": 0.7027427186979075, |
|
"test_person-actor": { |
|
"f1": 0.8214397008413836, |
|
"number": 1637, |
|
"precision": 0.8384223918575063, |
|
"recall": 0.8051313378130727 |
|
}, |
|
"test_person-artist/author": { |
|
"f1": 0.7320701754385964, |
|
"number": 3463, |
|
"precision": 0.7121791370835608, |
|
"recall": 0.7531042448743863 |
|
}, |
|
"test_person-athlete": { |
|
"f1": 0.8370089593383873, |
|
"number": 2884, |
|
"precision": 0.8318493150684931, |
|
"recall": 0.8422330097087378 |
|
}, |
|
"test_person-director": { |
|
"f1": 0.7221238938053098, |
|
"number": 554, |
|
"precision": 0.7083333333333334, |
|
"recall": 0.7364620938628159 |
|
}, |
|
"test_person-other": { |
|
"f1": 0.6784606547960942, |
|
"number": 8767, |
|
"precision": 0.6833275483049867, |
|
"recall": 0.6736625983802897 |
|
}, |
|
"test_person-politician": { |
|
"f1": 0.6821515892420537, |
|
"number": 2857, |
|
"precision": 0.6807249912861624, |
|
"recall": 0.6835841792089604 |
|
}, |
|
"test_person-scholar": { |
|
"f1": 0.5301369863013699, |
|
"number": 743, |
|
"precision": 0.5397489539748954, |
|
"recall": 0.5208613728129206 |
|
}, |
|
"test_person-soldier": { |
|
"f1": 0.5451957295373664, |
|
"number": 647, |
|
"precision": 0.5052770448548812, |
|
"recall": 0.5919629057187017 |
|
}, |
|
"test_product-airplane": { |
|
"f1": 0.6654111738857501, |
|
"number": 792, |
|
"precision": 0.66167290886392, |
|
"recall": 0.6691919191919192 |
|
}, |
|
"test_product-car": { |
|
"f1": 0.7221812822402359, |
|
"number": 687, |
|
"precision": 0.7313432835820896, |
|
"recall": 0.7132459970887919 |
|
}, |
|
"test_product-food": { |
|
"f1": 0.5787037037037037, |
|
"number": 432, |
|
"precision": 0.5787037037037037, |
|
"recall": 0.5787037037037037 |
|
}, |
|
"test_product-game": { |
|
"f1": 0.7250257466529352, |
|
"number": 493, |
|
"precision": 0.7364016736401674, |
|
"recall": 0.7139959432048681 |
|
}, |
|
"test_product-other": { |
|
"f1": 0.4794617563739376, |
|
"number": 1608, |
|
"precision": 0.5567434210526315, |
|
"recall": 0.4210199004975124 |
|
}, |
|
"test_product-ship": { |
|
"f1": 0.6842105263157895, |
|
"number": 380, |
|
"precision": 0.6842105263157895, |
|
"recall": 0.6842105263157895 |
|
}, |
|
"test_product-software": { |
|
"f1": 0.6570316842690384, |
|
"number": 889, |
|
"precision": 0.6494505494505495, |
|
"recall": 0.6647919010123734 |
|
}, |
|
"test_product-train": { |
|
"f1": 0.5933014354066984, |
|
"number": 314, |
|
"precision": 0.5942492012779552, |
|
"recall": 0.5923566878980892 |
|
}, |
|
"test_product-weapon": { |
|
"f1": 0.584426946631671, |
|
"number": 624, |
|
"precision": 0.6435452793834296, |
|
"recall": 0.5352564102564102 |
|
}, |
|
"test_runtime": 242.3784, |
|
"test_samples_per_second": 189.732, |
|
"test_steps_per_second": 5.933 |
|
} |