Datasets:
Fix DuplicatedKeysError
Browse files- hkcancor.py +3 -2
hkcancor.py
CHANGED
@@ -245,7 +245,7 @@ class Hkcancor(datasets.GeneratorBasedBuilder):
|
|
245 |
|
246 |
def _generate_examples(self, data_dir, split):
|
247 |
"""Yields examples."""
|
248 |
-
|
249 |
downloaded_files = [os.path.join(data_dir, fn) for fn in sorted(os.listdir(data_dir))]
|
250 |
for filepath in downloaded_files:
|
251 |
# Each file in the corpus contains one conversation
|
@@ -301,7 +301,7 @@ class Hkcancor(datasets.GeneratorBasedBuilder):
|
|
301 |
# to create an identifier.
|
302 |
id_from_transcriptions = "".join(transcriptions[:5])[:5].upper()
|
303 |
id_ = f"{tape_number}-{date_recorded}-{id_from_transcriptions}"
|
304 |
-
yield
|
305 |
"conversation_id": id_,
|
306 |
"speaker": current_speaker,
|
307 |
"turn_number": turn_number,
|
@@ -310,3 +310,4 @@ class Hkcancor(datasets.GeneratorBasedBuilder):
|
|
310 |
"pos_tags_prf": pos_prf,
|
311 |
"pos_tags_ud": pos_ud,
|
312 |
}
|
|
|
|
245 |
|
246 |
def _generate_examples(self, data_dir, split):
|
247 |
"""Yields examples."""
|
248 |
+
key = 0
|
249 |
downloaded_files = [os.path.join(data_dir, fn) for fn in sorted(os.listdir(data_dir))]
|
250 |
for filepath in downloaded_files:
|
251 |
# Each file in the corpus contains one conversation
|
|
|
301 |
# to create an identifier.
|
302 |
id_from_transcriptions = "".join(transcriptions[:5])[:5].upper()
|
303 |
id_ = f"{tape_number}-{date_recorded}-{id_from_transcriptions}"
|
304 |
+
yield key, {
|
305 |
"conversation_id": id_,
|
306 |
"speaker": current_speaker,
|
307 |
"turn_number": turn_number,
|
|
|
310 |
"pos_tags_prf": pos_prf,
|
311 |
"pos_tags_ud": pos_ud,
|
312 |
}
|
313 |
+
key += 1
|