File size: 5,171 Bytes
bcf1c9f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142

import os.path

try:
  import conllu
except:
  raise Exception("please install `pip install conllu`")

import datasets


_CITATION = r"""\
@InProceedings{ZeldesHowellOrdanBenMoshe2022,
  author    = {Amir Zeldes and Nick Howell and Noam Ordan and Yifat Ben Moshe},
  booktitle = {Proceedings of {EMNLP} 2022},
  title     = {A SecondWave of UD Hebrew Treebanking and Cross-Domain Parsing},
  year      = {2022},
  address   = {Abu Dhabi, UAE},
}
"""  # noqa: W605

_DESCRIPTION = """\
Publicly available subset of the IAHLT UD Hebrew Treebank's Wikipedia section (https://www.iahlt.org/)
"""


_UD_DATASETS = {
        "train": "data/he_iahltwiki-ud-dev.conllu",
        "validation": "data/he_iahltwiki-ud-dev.conllu",
        "test": "data/he_iahltwiki-ud-test.conllu"
    }


class UniversaldependenciesConfig(datasets.BuilderConfig):
    """BuilderConfig for Universal dependencies"""

    def __init__(self, data_url, **kwargs):
        super(UniversaldependenciesConfig, self).__init__(version=datasets.Version("2.7.0", ""), **kwargs)

        self.data_url = data_url


class UniversalDependencies(datasets.GeneratorBasedBuilder):
    VERSION = datasets.Version("2.7.0")
    BUILDER_CONFIGS = [
        UniversaldependenciesConfig(
            name="he_iahltwiki",
            description=_DESCRIPTION,
            data_url="https://github.com/UniversalDependencies/UD_Hebrew-IAHLTwiki",
        )
    ]
    BUILDER_CONFIG_CLASS = UniversaldependenciesConfig

    def _info(self):
        return datasets.DatasetInfo(
            description=_DESCRIPTION,
            features=datasets.Features(
                {
                    "idx": datasets.Value("string"),
                    "text": datasets.Value("string"),
                    "tokens": datasets.Sequence(datasets.Value("string")),
                    "lemmas": datasets.Sequence(datasets.Value("string")),
                    "upos": datasets.Sequence(
                        datasets.features.ClassLabel(
                            names=[
                                "NOUN",
                                "PUNCT",
                                "ADP",
                                "NUM",
                                "SYM",
                                "SCONJ",
                                "ADJ",
                                "PART",
                                "DET",
                                "CCONJ",
                                "PROPN",
                                "PRON",
                                "X",
                                "_",
                                "ADV",
                                "INTJ",
                                "VERB",
                                "AUX",
                            ]
                        )
                    ),
                    "xpos": datasets.Sequence(datasets.Value("string")),
                    "feats": datasets.Sequence(datasets.Value("string")),
                    "head": datasets.Sequence(datasets.Value("string")),
                    "deprel": datasets.Sequence(datasets.Value("string")),
                    "deps": datasets.Sequence(datasets.Value("string")),
                    "misc": datasets.Sequence(datasets.Value("string")),
                }
            ),
            supervised_keys=None,
            homepage="https://www.iahlt.org/",
            citation=_CITATION,
        )

    def _split_generators(self, dl_manager):
        """Returns SplitGenerators."""
        data_dir = dl_manager.download_and_extract(_UD_DATASETS)
        filepaths = {
            split: data_dir[split]
            for split, _ in _UD_DATASETS.items()
        }
        return [
            datasets.SplitGenerator(name=datasets.Split(split), gen_kwargs={"filepath": filepaths[split]})
            for split in filepaths
        ]

    def _generate_examples(self, filepath):
        id_ = 0
        with open(filepath, "r", encoding="utf-8") as data_file:
            tokenlist = list(conllu.parse_incr(data_file))
            for sent in tokenlist:
                if "sent_id" in sent.metadata:
                    idx = sent.metadata["sent_id"]
                else:
                    idx = id_

                tokens = [token["form"] for token in sent]

                if "text" in sent.metadata:
                    txt = sent.metadata["text"]
                else:
                    txt = " ".join(tokens)

                yield id_, {
                    "idx": str(idx),
                    "text": txt,
                    "tokens": [token["form"] for token in sent],
                    "lemmas": [token["lemma"] for token in sent],
                    "upos": [token["upos"] for token in sent],
                    "xpos": [token["xpos"] for token in sent],
                    "feats": [str(token["feats"]) for token in sent],
                    "head": [str(token["head"]) for token in sent],
                    "deprel": [str(token["deprel"]) for token in sent],
                    "deps": [str(token["deps"]) for token in sent],
                    "misc": [str(token["misc"]) for token in sent],
                }
                id_ += 1