Datasets:
Tasks:
Text Classification
Modalities:
Text
Sub-tasks:
hate-speech-detection
Languages:
English
Size:
100K - 1M
License:
SamAdamDay
commited on
Commit
•
e2317de
1
Parent(s):
5c05f44
Update wiki_toxic.py
Browse files- wiki_toxic.py +6 -0
wiki_toxic.py
CHANGED
@@ -16,6 +16,8 @@
|
|
16 |
|
17 |
import pandas as pd
|
18 |
|
|
|
|
|
19 |
import datasets
|
20 |
|
21 |
|
@@ -106,6 +108,10 @@ class WikiToxic(datasets.GeneratorBasedBuilder):
|
|
106 |
# method parameters are unpacked from `gen_kwargs` as given in `_split_generators`
|
107 |
def _generate_examples(self, filepath, split):
|
108 |
|
|
|
|
|
|
|
|
|
109 |
df = pd.read_csv(filepath)
|
110 |
|
111 |
for index, row in df.iterrows():
|
|
|
16 |
|
17 |
import pandas as pd
|
18 |
|
19 |
+
import os
|
20 |
+
|
21 |
import datasets
|
22 |
|
23 |
|
|
|
108 |
# method parameters are unpacked from `gen_kwargs` as given in `_split_generators`
|
109 |
def _generate_examples(self, filepath, split):
|
110 |
|
111 |
+
cwd = os.getcwd()
|
112 |
+
print(cwd)
|
113 |
+
print(list(os.listdir(cwd)))
|
114 |
+
|
115 |
df = pd.read_csv(filepath)
|
116 |
|
117 |
for index, row in df.iterrows():
|