|
|
|
import os.path |
|
|
|
try: |
|
import conllu |
|
except: |
|
raise Exception("please install `pip install conllu`") |
|
|
|
import datasets |
|
|
|
|
|
_CITATION = r"""\ |
|
@InProceedings{ZeldesHowellOrdanBenMoshe2022, |
|
author = {Amir Zeldes and Nick Howell and Noam Ordan and Yifat Ben Moshe}, |
|
booktitle = {Proceedings of {EMNLP} 2022}, |
|
title = {A SecondWave of UD Hebrew Treebanking and Cross-Domain Parsing}, |
|
year = {2022}, |
|
address = {Abu Dhabi, UAE}, |
|
} |
|
""" |
|
|
|
_DESCRIPTION = """\ |
|
Publicly available subset of the IAHLT UD Hebrew Treebank's Wikipedia section (https://www.iahlt.org/) |
|
""" |
|
|
|
|
|
_UD_DATASETS = { |
|
"train": "data/he_iahltwiki-ud-dev.conllu", |
|
"validation": "data/he_iahltwiki-ud-dev.conllu", |
|
"test": "data/he_iahltwiki-ud-test.conllu" |
|
} |
|
|
|
|
|
class UniversaldependenciesConfig(datasets.BuilderConfig): |
|
"""BuilderConfig for Universal dependencies""" |
|
|
|
def __init__(self, data_url, **kwargs): |
|
super(UniversaldependenciesConfig, self).__init__(version=datasets.Version("2.7.0", ""), **kwargs) |
|
|
|
self.data_url = data_url |
|
|
|
|
|
class UniversalDependencies(datasets.GeneratorBasedBuilder): |
|
VERSION = datasets.Version("2.7.0") |
|
BUILDER_CONFIGS = [ |
|
UniversaldependenciesConfig( |
|
name="he_iahltwiki", |
|
description=_DESCRIPTION, |
|
data_url="https://github.com/UniversalDependencies/UD_Hebrew-IAHLTwiki", |
|
) |
|
] |
|
BUILDER_CONFIG_CLASS = UniversaldependenciesConfig |
|
|
|
def _info(self): |
|
return datasets.DatasetInfo( |
|
description=_DESCRIPTION, |
|
features=datasets.Features( |
|
{ |
|
"idx": datasets.Value("string"), |
|
"text": datasets.Value("string"), |
|
"tokens": datasets.Sequence(datasets.Value("string")), |
|
"lemmas": datasets.Sequence(datasets.Value("string")), |
|
"upos": datasets.Sequence( |
|
datasets.features.ClassLabel( |
|
names=[ |
|
"NOUN", |
|
"PUNCT", |
|
"ADP", |
|
"NUM", |
|
"SYM", |
|
"SCONJ", |
|
"ADJ", |
|
"PART", |
|
"DET", |
|
"CCONJ", |
|
"PROPN", |
|
"PRON", |
|
"X", |
|
"_", |
|
"ADV", |
|
"INTJ", |
|
"VERB", |
|
"AUX", |
|
] |
|
) |
|
), |
|
"xpos": datasets.Sequence(datasets.Value("string")), |
|
"feats": datasets.Sequence(datasets.Value("string")), |
|
"head": datasets.Sequence(datasets.Value("string")), |
|
"deprel": datasets.Sequence(datasets.Value("string")), |
|
"deps": datasets.Sequence(datasets.Value("string")), |
|
"misc": datasets.Sequence(datasets.Value("string")), |
|
} |
|
), |
|
supervised_keys=None, |
|
homepage="https://www.iahlt.org/", |
|
citation=_CITATION, |
|
) |
|
|
|
def _split_generators(self, dl_manager): |
|
"""Returns SplitGenerators.""" |
|
data_dir = dl_manager.download_and_extract(_UD_DATASETS) |
|
filepaths = { |
|
split: data_dir[split] |
|
for split, _ in _UD_DATASETS.items() |
|
} |
|
return [ |
|
datasets.SplitGenerator(name=datasets.Split(split), gen_kwargs={"filepath": filepaths[split]}) |
|
for split in filepaths |
|
] |
|
|
|
def _generate_examples(self, filepath): |
|
id_ = 0 |
|
with open(filepath, "r", encoding="utf-8") as data_file: |
|
tokenlist = list(conllu.parse_incr(data_file)) |
|
for sent in tokenlist: |
|
if "sent_id" in sent.metadata: |
|
idx = sent.metadata["sent_id"] |
|
else: |
|
idx = id_ |
|
|
|
tokens = [token["form"] for token in sent] |
|
|
|
if "text" in sent.metadata: |
|
txt = sent.metadata["text"] |
|
else: |
|
txt = " ".join(tokens) |
|
|
|
yield id_, { |
|
"idx": str(idx), |
|
"text": txt, |
|
"tokens": [token["form"] for token in sent], |
|
"lemmas": [token["lemma"] for token in sent], |
|
"upos": [token["upos"] for token in sent], |
|
"xpos": [token["xpos"] for token in sent], |
|
"feats": [str(token["feats"]) for token in sent], |
|
"head": [str(token["head"]) for token in sent], |
|
"deprel": [str(token["deprel"]) for token in sent], |
|
"deps": [str(token["deps"]) for token in sent], |
|
"misc": [str(token["misc"]) for token in sent], |
|
} |
|
id_ += 1 |