hassan4830
commited on
Commit
β’
8183814
1
Parent(s):
86d4036
Delete text_classifier.ipynb
Browse files- text_classifier.ipynb +0 -286
text_classifier.ipynb
DELETED
@@ -1,286 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"cells": [
|
3 |
-
{
|
4 |
-
"cell_type": "code",
|
5 |
-
"execution_count": 1,
|
6 |
-
"id": "1bce8bb2",
|
7 |
-
"metadata": {},
|
8 |
-
"outputs": [
|
9 |
-
{
|
10 |
-
"name": "stderr",
|
11 |
-
"output_type": "stream",
|
12 |
-
"text": [
|
13 |
-
"/home/administrator/.local/lib/python3.8/site-packages/tqdm/auto.py:22: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
|
14 |
-
" from .autonotebook import tqdm as notebook_tqdm\n",
|
15 |
-
"/usr/lib/python3/dist-packages/requests/__init__.py:89: RequestsDependencyWarning: urllib3 (1.26.10) or chardet (3.0.4) doesn't match a supported version!\n",
|
16 |
-
" warnings.warn(\"urllib3 ({}) or chardet ({}) doesn't match a supported \"\n"
|
17 |
-
]
|
18 |
-
}
|
19 |
-
],
|
20 |
-
"source": [
|
21 |
-
"import pickle\n",
|
22 |
-
"model = pickle.load(open(\"dist_bert_uncased.pkl\",\"rb\"))"
|
23 |
-
]
|
24 |
-
},
|
25 |
-
{
|
26 |
-
"cell_type": "code",
|
27 |
-
"execution_count": 2,
|
28 |
-
"id": "27ef1fb0",
|
29 |
-
"metadata": {},
|
30 |
-
"outputs": [
|
31 |
-
{
|
32 |
-
"name": "stderr",
|
33 |
-
"output_type": "stream",
|
34 |
-
"text": [
|
35 |
-
"Reusing dataset imdb (/home/administrator/.cache/huggingface/datasets/imdb/plain_text/1.0.0/2fdd8b9bcadd6e7055e742a706876ba43f19faee861df134affd7a3f60fc38a1)\n",
|
36 |
-
"100%|βββββββββββββββββββββββββββββββββββββββββββββ| 3/3 [00:00<00:00, 96.60it/s]\n"
|
37 |
-
]
|
38 |
-
}
|
39 |
-
],
|
40 |
-
"source": [
|
41 |
-
"from datasets import load_dataset\n",
|
42 |
-
"imdb = load_dataset(\"imdb\")"
|
43 |
-
]
|
44 |
-
},
|
45 |
-
{
|
46 |
-
"cell_type": "code",
|
47 |
-
"execution_count": 3,
|
48 |
-
"id": "9e64ace3",
|
49 |
-
"metadata": {},
|
50 |
-
"outputs": [],
|
51 |
-
"source": [
|
52 |
-
"import tensorflow as tf\n",
|
53 |
-
"from transformers import AutoModelForSequenceClassification, TrainingArguments, Trainer"
|
54 |
-
]
|
55 |
-
},
|
56 |
-
{
|
57 |
-
"cell_type": "code",
|
58 |
-
"execution_count": 4,
|
59 |
-
"id": "1921f090",
|
60 |
-
"metadata": {},
|
61 |
-
"outputs": [],
|
62 |
-
"source": [
|
63 |
-
"def preprocess_function(examples):\n",
|
64 |
-
" return tokenizer(examples[\"text\"], truncation=True)"
|
65 |
-
]
|
66 |
-
},
|
67 |
-
{
|
68 |
-
"cell_type": "code",
|
69 |
-
"execution_count": 5,
|
70 |
-
"id": "7fe44f58",
|
71 |
-
"metadata": {},
|
72 |
-
"outputs": [],
|
73 |
-
"source": [
|
74 |
-
"from transformers import AutoTokenizer\n",
|
75 |
-
"tokenizer = AutoTokenizer.from_pretrained(\"distilbert-base-uncased\")"
|
76 |
-
]
|
77 |
-
},
|
78 |
-
{
|
79 |
-
"cell_type": "code",
|
80 |
-
"execution_count": 6,
|
81 |
-
"id": "f651e28f",
|
82 |
-
"metadata": {},
|
83 |
-
"outputs": [
|
84 |
-
{
|
85 |
-
"name": "stderr",
|
86 |
-
"output_type": "stream",
|
87 |
-
"text": [
|
88 |
-
"Parameter 'function'=<function preprocess_function at 0x7efdf9e17d30> of the transform datasets.arrow_dataset.Dataset._map_single couldn't be hashed properly, a random hash was used instead. Make sure your transforms and parameters are serializable with pickle or dill for the dataset fingerprinting and caching to work. If you reuse this transform, the caching mechanism will consider it to be different from the previous calls and recompute everything. This warning is only showed once. Subsequent hashing failures won't be showed.\n",
|
89 |
-
"100%|βββββββββββββββββββββββββββββββββββββββββββ| 25/25 [00:02<00:00, 9.28ba/s]\n",
|
90 |
-
"100%|βββββββββββββββββββββββββββββββββββββββββββ| 25/25 [00:02<00:00, 9.56ba/s]\n",
|
91 |
-
"100%|βββββββββββββββββββββββββββββββββββββββββββ| 50/50 [00:05<00:00, 9.33ba/s]\n"
|
92 |
-
]
|
93 |
-
}
|
94 |
-
],
|
95 |
-
"source": [
|
96 |
-
"tokenized_imdb = imdb.map(preprocess_function, batched=True)"
|
97 |
-
]
|
98 |
-
},
|
99 |
-
{
|
100 |
-
"cell_type": "code",
|
101 |
-
"execution_count": 7,
|
102 |
-
"id": "d9f8f1df",
|
103 |
-
"metadata": {},
|
104 |
-
"outputs": [],
|
105 |
-
"source": [
|
106 |
-
"from transformers import DataCollatorWithPadding\n",
|
107 |
-
"data_collator = DataCollatorWithPadding(tokenizer=tokenizer)"
|
108 |
-
]
|
109 |
-
},
|
110 |
-
{
|
111 |
-
"cell_type": "code",
|
112 |
-
"execution_count": 8,
|
113 |
-
"id": "4721b72e",
|
114 |
-
"metadata": {},
|
115 |
-
"outputs": [],
|
116 |
-
"source": [
|
117 |
-
"training_args = TrainingArguments(\n",
|
118 |
-
" output_dir=\"./results\",\n",
|
119 |
-
" learning_rate=2e-5,\n",
|
120 |
-
" per_device_train_batch_size=4,\n",
|
121 |
-
" per_device_eval_batch_size=4,\n",
|
122 |
-
" num_train_epochs=5,\n",
|
123 |
-
" weight_decay=0.01,\n",
|
124 |
-
")\n",
|
125 |
-
"\n",
|
126 |
-
"trainer = Trainer(\n",
|
127 |
-
" model=model,\n",
|
128 |
-
" args=training_args,\n",
|
129 |
-
" train_dataset=tokenized_imdb[\"train\"],\n",
|
130 |
-
" eval_dataset=tokenized_imdb[\"test\"],\n",
|
131 |
-
" tokenizer=tokenizer,\n",
|
132 |
-
" data_collator=data_collator,\n",
|
133 |
-
")"
|
134 |
-
]
|
135 |
-
},
|
136 |
-
{
|
137 |
-
"cell_type": "code",
|
138 |
-
"execution_count": 9,
|
139 |
-
"id": "d512ad40",
|
140 |
-
"metadata": {},
|
141 |
-
"outputs": [
|
142 |
-
{
|
143 |
-
"name": "stderr",
|
144 |
-
"output_type": "stream",
|
145 |
-
"text": [
|
146 |
-
"The following columns in the test set don't have a corresponding argument in `DistilBertForSequenceClassification.forward` and have been ignored: text. If text are not expected by `DistilBertForSequenceClassification.forward`, you can safely ignore this message.\n",
|
147 |
-
"***** Running Prediction *****\n",
|
148 |
-
" Num examples = 25000\n",
|
149 |
-
" Batch size = 4\n"
|
150 |
-
]
|
151 |
-
},
|
152 |
-
{
|
153 |
-
"data": {
|
154 |
-
"text/html": [
|
155 |
-
"\n",
|
156 |
-
" <div>\n",
|
157 |
-
" \n",
|
158 |
-
" <progress value='6250' max='6250' style='width:300px; height:20px; vertical-align: middle;'></progress>\n",
|
159 |
-
" [6250/6250 01:22]\n",
|
160 |
-
" </div>\n",
|
161 |
-
" "
|
162 |
-
],
|
163 |
-
"text/plain": [
|
164 |
-
"<IPython.core.display.HTML object>"
|
165 |
-
]
|
166 |
-
},
|
167 |
-
"metadata": {},
|
168 |
-
"output_type": "display_data"
|
169 |
-
},
|
170 |
-
{
|
171 |
-
"data": {
|
172 |
-
"text/plain": [
|
173 |
-
"PredictionOutput(predictions=array([[ 5.5231447, -4.968189 ],\n",
|
174 |
-
" [ 4.920535 , -4.5207844],\n",
|
175 |
-
" [ 5.5004807, -4.9287252],\n",
|
176 |
-
" ...,\n",
|
177 |
-
" [-3.9465096, 3.8123856],\n",
|
178 |
-
" [-4.19292 , 4.085696 ],\n",
|
179 |
-
" [-4.7485046, 4.7820053]], dtype=float32), label_ids=array([0, 0, 0, ..., 1, 1, 1]), metrics={'test_loss': 0.5586181879043579, 'test_runtime': 83.0141, 'test_samples_per_second': 301.154, 'test_steps_per_second': 75.288})"
|
180 |
-
]
|
181 |
-
},
|
182 |
-
"execution_count": 9,
|
183 |
-
"metadata": {},
|
184 |
-
"output_type": "execute_result"
|
185 |
-
}
|
186 |
-
],
|
187 |
-
"source": [
|
188 |
-
"trainer.predict(tokenized_imdb[\"test\"])"
|
189 |
-
]
|
190 |
-
},
|
191 |
-
{
|
192 |
-
"cell_type": "code",
|
193 |
-
"execution_count": 10,
|
194 |
-
"id": "52c8d786",
|
195 |
-
"metadata": {},
|
196 |
-
"outputs": [],
|
197 |
-
"source": [
|
198 |
-
"from transformers import TextClassificationPipeline"
|
199 |
-
]
|
200 |
-
},
|
201 |
-
{
|
202 |
-
"cell_type": "code",
|
203 |
-
"execution_count": 14,
|
204 |
-
"id": "077a0e20",
|
205 |
-
"metadata": {},
|
206 |
-
"outputs": [
|
207 |
-
{
|
208 |
-
"data": {
|
209 |
-
"text/plain": [
|
210 |
-
"[{'label': 'LABEL_0', 'score': 0.9999722242355347},\n",
|
211 |
-
" {'label': 'LABEL_1', 'score': 2.7771717213909142e-05}]"
|
212 |
-
]
|
213 |
-
},
|
214 |
-
"execution_count": 14,
|
215 |
-
"metadata": {},
|
216 |
-
"output_type": "execute_result"
|
217 |
-
}
|
218 |
-
],
|
219 |
-
"source": [
|
220 |
-
"pipe = TextClassificationPipeline(model=model, tokenizer=tokenizer, return_all_scores=True, device = 0)\n",
|
221 |
-
"pipe(sent)"
|
222 |
-
]
|
223 |
-
},
|
224 |
-
{
|
225 |
-
"cell_type": "code",
|
226 |
-
"execution_count": 13,
|
227 |
-
"id": "22a84b23",
|
228 |
-
"metadata": {},
|
229 |
-
"outputs": [],
|
230 |
-
"source": [
|
231 |
-
"sent = 'I love sci-fi and am willing to put up with a lot. Sci-fi movies/TV are usually underfunded, under-appreciated and misunderstood. I tried to like this, I really did, but it is to good TV sci-fi as Babylon 5 is to Star Trek (the original). Silly prosthetics, cheap cardboard sets, stilted dialogues, CG that doesn\\'t match the background, and painfully one-dimensional characters cannot be overcome with a \\'sci-fi\\' setting. (I\\'m sure there are those of you out there who think Babylon 5 is good sci-fi TV. It\\'s not. It\\'s clichΓ©d and uninspiring.) While US viewers might like emotion and character development, sci-fi is a genre that does not take itself seriously (cf. Star Trek). It may treat important issues, yet not as a serious philosophy. It\\'s really difficult to care about the characters here as they are not simply foolish, just missing a spark of life. Their actions and reactions are wooden and predictable, often painful to watch. The makers of Earth KNOW it\\'s rubbish as they have to always say \"Gene Roddenberry\\'s Earth...\" otherwise people would not continue watching. Roddenberry\\'s ashes must be turning in their orbit as this dull, cheap, poorly edited (watching it without advert breaks really brings this home) trudging Trabant of a show lumbers into space. Spoiler. So, kill off a main character. And then bring him back as another actor. Jeeez! Dallas all over again.'"
|
232 |
-
]
|
233 |
-
},
|
234 |
-
{
|
235 |
-
"cell_type": "code",
|
236 |
-
"execution_count": 15,
|
237 |
-
"id": "2d6ff6dd",
|
238 |
-
"metadata": {},
|
239 |
-
"outputs": [
|
240 |
-
{
|
241 |
-
"data": {
|
242 |
-
"text/plain": [
|
243 |
-
"{'text': 'I love sci-fi and am willing to put up with a lot. Sci-fi movies/TV are usually underfunded, under-appreciated and misunderstood. I tried to like this, I really did, but it is to good TV sci-fi as Babylon 5 is to Star Trek (the original). Silly prosthetics, cheap cardboard sets, stilted dialogues, CG that doesn\\'t match the background, and painfully one-dimensional characters cannot be overcome with a \\'sci-fi\\' setting. (I\\'m sure there are those of you out there who think Babylon 5 is good sci-fi TV. It\\'s not. It\\'s clichΓ©d and uninspiring.) While US viewers might like emotion and character development, sci-fi is a genre that does not take itself seriously (cf. Star Trek). It may treat important issues, yet not as a serious philosophy. It\\'s really difficult to care about the characters here as they are not simply foolish, just missing a spark of life. Their actions and reactions are wooden and predictable, often painful to watch. The makers of Earth KNOW it\\'s rubbish as they have to always say \"Gene Roddenberry\\'s Earth...\" otherwise people would not continue watching. Roddenberry\\'s ashes must be turning in their orbit as this dull, cheap, poorly edited (watching it without advert breaks really brings this home) trudging Trabant of a show lumbers into space. Spoiler. So, kill off a main character. And then bring him back as another actor. Jeeez! Dallas all over again.',\n",
|
244 |
-
" 'label': 0}"
|
245 |
-
]
|
246 |
-
},
|
247 |
-
"execution_count": 15,
|
248 |
-
"metadata": {},
|
249 |
-
"output_type": "execute_result"
|
250 |
-
}
|
251 |
-
],
|
252 |
-
"source": [
|
253 |
-
"imdb[\"test\"][0]"
|
254 |
-
]
|
255 |
-
},
|
256 |
-
{
|
257 |
-
"cell_type": "code",
|
258 |
-
"execution_count": null,
|
259 |
-
"id": "659a3301",
|
260 |
-
"metadata": {},
|
261 |
-
"outputs": [],
|
262 |
-
"source": []
|
263 |
-
}
|
264 |
-
],
|
265 |
-
"metadata": {
|
266 |
-
"kernelspec": {
|
267 |
-
"display_name": "Python 3 (ipykernel)",
|
268 |
-
"language": "python",
|
269 |
-
"name": "python3"
|
270 |
-
},
|
271 |
-
"language_info": {
|
272 |
-
"codemirror_mode": {
|
273 |
-
"name": "ipython",
|
274 |
-
"version": 3
|
275 |
-
},
|
276 |
-
"file_extension": ".py",
|
277 |
-
"mimetype": "text/x-python",
|
278 |
-
"name": "python",
|
279 |
-
"nbconvert_exporter": "python",
|
280 |
-
"pygments_lexer": "ipython3",
|
281 |
-
"version": "3.8.10"
|
282 |
-
}
|
283 |
-
},
|
284 |
-
"nbformat": 4,
|
285 |
-
"nbformat_minor": 5
|
286 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|