Datasets:
Tasks:
Text Classification
Modalities:
Text
Sub-tasks:
hate-speech-detection
Languages:
English
Size:
100K - 1M
License:
SamAdamDay
commited on
Commit
•
872656a
1
Parent(s):
ffe742b
Update wiki_toxic.py
Browse files- wiki_toxic.py +0 -6
wiki_toxic.py
CHANGED
@@ -16,8 +16,6 @@
|
|
16 |
|
17 |
import pandas as pd
|
18 |
|
19 |
-
import os
|
20 |
-
|
21 |
import datasets
|
22 |
|
23 |
|
@@ -116,10 +114,6 @@ class WikiToxic(datasets.GeneratorBasedBuilder):
|
|
116 |
# method parameters are unpacked from `gen_kwargs` as given in `_split_generators`
|
117 |
def _generate_examples(self, filepath, split):
|
118 |
|
119 |
-
cwd = os.getcwd()
|
120 |
-
print(cwd)
|
121 |
-
print(list(os.listdir(cwd)))
|
122 |
-
|
123 |
df = pd.read_csv(filepath)
|
124 |
|
125 |
for index, row in df.iterrows():
|
|
|
16 |
|
17 |
import pandas as pd
|
18 |
|
|
|
|
|
19 |
import datasets
|
20 |
|
21 |
|
|
|
114 |
# method parameters are unpacked from `gen_kwargs` as given in `_split_generators`
|
115 |
def _generate_examples(self, filepath, split):
|
116 |
|
|
|
|
|
|
|
|
|
117 |
df = pd.read_csv(filepath)
|
118 |
|
119 |
for index, row in df.iterrows():
|