jorgeortizfuentes commited on
Commit
b79424f
1 Parent(s): 977d8b5

Add best model

Browse files
README.md ADDED
@@ -0,0 +1,76 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ language:
3
+ - es
4
+ license: cc-by-4.0
5
+ tags:
6
+ - generated_from_trainer
7
+ datasets:
8
+ - jorgeortizfuentes/toxicity_spanish_incivility_v3
9
+ metrics:
10
+ - f1
11
+ model-index:
12
+ - name: incivility-dv3-patana-chilean-spanish-bert-j63zilm4
13
+ results:
14
+ - task:
15
+ name: Text Classification
16
+ type: text-classification
17
+ dataset:
18
+ name: jorgeortizfuentes/toxicity_spanish_incivility_v3
19
+ type: jorgeortizfuentes/toxicity_spanish_incivility_v3
20
+ config: null
21
+ split: validation
22
+ metrics:
23
+ - name: F1
24
+ type: f1
25
+ value: 0.9135014363230132
26
+ ---
27
+
28
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
29
+ should probably proofread and complete it, then remove this comment. -->
30
+
31
+ # incivility-dv3-patana-chilean-spanish-bert-j63zilm4
32
+
33
+ This model is a fine-tuned version of [dccuchile/patana-chilean-spanish-bert](https://huggingface.co/dccuchile/patana-chilean-spanish-bert) on the jorgeortizfuentes/toxicity_spanish_incivility_v3 dataset.
34
+ It achieves the following results on the evaluation set:
35
+ - Loss: 0.5672
36
+ - F1: 0.9135
37
+
38
+ ## Model description
39
+
40
+ More information needed
41
+
42
+ ## Intended uses & limitations
43
+
44
+ More information needed
45
+
46
+ ## Training and evaluation data
47
+
48
+ More information needed
49
+
50
+ ## Training procedure
51
+
52
+ ### Training hyperparameters
53
+
54
+ The following hyperparameters were used during training:
55
+ - learning_rate: 5e-05
56
+ - train_batch_size: 128
57
+ - eval_batch_size: 128
58
+ - seed: 13
59
+ - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
60
+ - lr_scheduler_type: linear
61
+ - num_epochs: 10
62
+
63
+ ### Training results
64
+
65
+ | Training Loss | Epoch | Step | Validation Loss | F1 |
66
+ |:-------------:|:-----:|:----:|:---------------:|:------:|
67
+ | 0.1351 | 5.0 | 455 | 0.4608 | 0.9119 |
68
+ | 0.0114 | 10.0 | 910 | 0.5672 | 0.9135 |
69
+
70
+
71
+ ### Framework versions
72
+
73
+ - Transformers 4.30.2
74
+ - Pytorch 2.0.1+cu117
75
+ - Datasets 2.13.1
76
+ - Tokenizers 0.13.3
all_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 10.0,
3
+ "train_loss": 0.07324187100588621,
4
+ "train_runtime": 1328.6278,
5
+ "train_samples": 11532,
6
+ "train_samples_per_second": 86.796,
7
+ "train_steps_per_second": 0.685
8
+ }
checkpoint-500/config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "dccuchile/patana-chilean-spanish-bert",
3
+ "architectures": [
4
+ "BertForSequenceClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "classifier_dropout": null,
8
+ "gradient_checkpointing": false,
9
+ "hidden_act": "gelu",
10
+ "hidden_dropout_prob": 0.1,
11
+ "hidden_size": 768,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 3072,
14
+ "layer_norm_eps": 1e-12,
15
+ "max_position_embeddings": 512,
16
+ "model_type": "bert",
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "output_past": true,
20
+ "pad_token_id": 1,
21
+ "position_embedding_type": "absolute",
22
+ "problem_type": "single_label_classification",
23
+ "torch_dtype": "float32",
24
+ "transformers_version": "4.30.2",
25
+ "type_vocab_size": 2,
26
+ "use_cache": true,
27
+ "vocab_size": 31002
28
+ }
checkpoint-500/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d7a94a9dc9567bece3eb5bda1f6cefd37a8b7771530170bdc7c8bbf7f90da457
3
+ size 439437392
checkpoint-500/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:60be5e28937c469a039569a1bfc30043ee9d65d333b76ae52dabca23b616fa46
3
+ size 878937221
checkpoint-500/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8d4e6b040ff015bd9b06563b5be216f512e8eb05cf5c40a77247d802ab88208b
3
+ size 14639
checkpoint-500/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:66d8a82960fdf7511b32f1ea1d794f4a920a406df03a48505d69f0524a4008a9
3
+ size 627
checkpoint-500/special_tokens_map.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "mask_token": "[MASK]",
4
+ "pad_token": "[PAD]",
5
+ "sep_token": "[SEP]",
6
+ "unk_token": "[UNK]"
7
+ }
checkpoint-500/tokenizer_config.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "clean_up_tokenization_spaces": true,
3
+ "cls_token": "[CLS]",
4
+ "do_basic_tokenize": true,
5
+ "do_lower_case": false,
6
+ "mask_token": "[MASK]",
7
+ "model_max_length": 512,
8
+ "never_split": null,
9
+ "pad_token": "[PAD]",
10
+ "sep_token": "[SEP]",
11
+ "strip_accents": false,
12
+ "tokenize_chinese_chars": true,
13
+ "tokenizer_class": "BertTokenizer",
14
+ "unk_token": "[UNK]"
15
+ }
checkpoint-500/trainer_state.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 5.4945054945054945,
5
+ "global_step": 500,
6
+ "is_hyper_param_search": false,
7
+ "is_local_process_zero": true,
8
+ "is_world_process_zero": true,
9
+ "log_history": [
10
+ {
11
+ "epoch": 5.0,
12
+ "learning_rate": 2.5e-05,
13
+ "loss": 0.1351,
14
+ "step": 455
15
+ },
16
+ {
17
+ "epoch": 5.0,
18
+ "eval_f1": 0.9119336311423102,
19
+ "eval_loss": 0.46077126264572144,
20
+ "eval_runtime": 12.6487,
21
+ "eval_samples_per_second": 227.928,
22
+ "eval_steps_per_second": 1.818,
23
+ "step": 455
24
+ }
25
+ ],
26
+ "max_steps": 910,
27
+ "num_train_epochs": 10,
28
+ "total_flos": 1.66865031309312e+16,
29
+ "trial_name": null,
30
+ "trial_params": null
31
+ }
checkpoint-500/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:04d95d7b3cd606d84475d60127739194a155d83c8f25e04d615b47e74d0649f6
3
+ size 4027
checkpoint-500/vocab.txt ADDED
The diff for this file is too large to render. See raw diff
 
config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "dccuchile/patana-chilean-spanish-bert",
3
+ "architectures": [
4
+ "BertForSequenceClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "classifier_dropout": null,
8
+ "gradient_checkpointing": false,
9
+ "hidden_act": "gelu",
10
+ "hidden_dropout_prob": 0.1,
11
+ "hidden_size": 768,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 3072,
14
+ "layer_norm_eps": 1e-12,
15
+ "max_position_embeddings": 512,
16
+ "model_type": "bert",
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "output_past": true,
20
+ "pad_token_id": 1,
21
+ "position_embedding_type": "absolute",
22
+ "problem_type": "single_label_classification",
23
+ "torch_dtype": "float32",
24
+ "transformers_version": "4.30.2",
25
+ "type_vocab_size": 2,
26
+ "use_cache": true,
27
+ "vocab_size": 31002
28
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f13259ec4a476425b1b2634a412f5a80010b9f686dec362b3c1c9373dbe68ed1
3
+ size 439437392
predict_results_incivility.txt ADDED
@@ -0,0 +1,801 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ index prediction
2
+ 0 0
3
+ 1 1
4
+ 2 0
5
+ 3 0
6
+ 4 0
7
+ 5 0
8
+ 6 0
9
+ 7 0
10
+ 8 0
11
+ 9 0
12
+ 10 1
13
+ 11 0
14
+ 12 0
15
+ 13 0
16
+ 14 0
17
+ 15 0
18
+ 16 0
19
+ 17 1
20
+ 18 0
21
+ 19 0
22
+ 20 0
23
+ 21 1
24
+ 22 0
25
+ 23 1
26
+ 24 0
27
+ 25 1
28
+ 26 1
29
+ 27 1
30
+ 28 0
31
+ 29 1
32
+ 30 0
33
+ 31 0
34
+ 32 1
35
+ 33 0
36
+ 34 0
37
+ 35 0
38
+ 36 1
39
+ 37 1
40
+ 38 1
41
+ 39 1
42
+ 40 0
43
+ 41 1
44
+ 42 1
45
+ 43 1
46
+ 44 0
47
+ 45 1
48
+ 46 0
49
+ 47 1
50
+ 48 0
51
+ 49 0
52
+ 50 0
53
+ 51 1
54
+ 52 0
55
+ 53 0
56
+ 54 0
57
+ 55 0
58
+ 56 0
59
+ 57 0
60
+ 58 0
61
+ 59 0
62
+ 60 0
63
+ 61 0
64
+ 62 1
65
+ 63 0
66
+ 64 0
67
+ 65 0
68
+ 66 0
69
+ 67 1
70
+ 68 1
71
+ 69 0
72
+ 70 1
73
+ 71 1
74
+ 72 0
75
+ 73 1
76
+ 74 0
77
+ 75 0
78
+ 76 0
79
+ 77 0
80
+ 78 1
81
+ 79 1
82
+ 80 0
83
+ 81 0
84
+ 82 0
85
+ 83 1
86
+ 84 0
87
+ 85 0
88
+ 86 0
89
+ 87 0
90
+ 88 0
91
+ 89 1
92
+ 90 0
93
+ 91 0
94
+ 92 0
95
+ 93 1
96
+ 94 0
97
+ 95 0
98
+ 96 0
99
+ 97 0
100
+ 98 1
101
+ 99 1
102
+ 100 1
103
+ 101 1
104
+ 102 1
105
+ 103 1
106
+ 104 0
107
+ 105 0
108
+ 106 1
109
+ 107 0
110
+ 108 1
111
+ 109 1
112
+ 110 1
113
+ 111 0
114
+ 112 0
115
+ 113 0
116
+ 114 0
117
+ 115 0
118
+ 116 0
119
+ 117 0
120
+ 118 1
121
+ 119 1
122
+ 120 1
123
+ 121 0
124
+ 122 1
125
+ 123 0
126
+ 124 1
127
+ 125 1
128
+ 126 0
129
+ 127 1
130
+ 128 0
131
+ 129 0
132
+ 130 1
133
+ 131 0
134
+ 132 0
135
+ 133 0
136
+ 134 0
137
+ 135 0
138
+ 136 0
139
+ 137 0
140
+ 138 1
141
+ 139 0
142
+ 140 0
143
+ 141 0
144
+ 142 0
145
+ 143 0
146
+ 144 0
147
+ 145 0
148
+ 146 1
149
+ 147 0
150
+ 148 1
151
+ 149 0
152
+ 150 0
153
+ 151 0
154
+ 152 0
155
+ 153 0
156
+ 154 0
157
+ 155 1
158
+ 156 0
159
+ 157 0
160
+ 158 0
161
+ 159 0
162
+ 160 0
163
+ 161 0
164
+ 162 0
165
+ 163 1
166
+ 164 1
167
+ 165 0
168
+ 166 0
169
+ 167 0
170
+ 168 0
171
+ 169 0
172
+ 170 0
173
+ 171 0
174
+ 172 0
175
+ 173 1
176
+ 174 1
177
+ 175 0
178
+ 176 0
179
+ 177 1
180
+ 178 0
181
+ 179 0
182
+ 180 1
183
+ 181 0
184
+ 182 0
185
+ 183 0
186
+ 184 0
187
+ 185 0
188
+ 186 0
189
+ 187 1
190
+ 188 0
191
+ 189 0
192
+ 190 0
193
+ 191 0
194
+ 192 1
195
+ 193 0
196
+ 194 0
197
+ 195 0
198
+ 196 1
199
+ 197 1
200
+ 198 0
201
+ 199 0
202
+ 200 0
203
+ 201 0
204
+ 202 1
205
+ 203 0
206
+ 204 1
207
+ 205 1
208
+ 206 0
209
+ 207 0
210
+ 208 0
211
+ 209 0
212
+ 210 0
213
+ 211 0
214
+ 212 1
215
+ 213 1
216
+ 214 0
217
+ 215 1
218
+ 216 0
219
+ 217 1
220
+ 218 0
221
+ 219 0
222
+ 220 0
223
+ 221 1
224
+ 222 0
225
+ 223 1
226
+ 224 0
227
+ 225 1
228
+ 226 0
229
+ 227 0
230
+ 228 0
231
+ 229 1
232
+ 230 1
233
+ 231 1
234
+ 232 1
235
+ 233 0
236
+ 234 0
237
+ 235 0
238
+ 236 0
239
+ 237 1
240
+ 238 1
241
+ 239 0
242
+ 240 0
243
+ 241 0
244
+ 242 1
245
+ 243 0
246
+ 244 0
247
+ 245 0
248
+ 246 0
249
+ 247 0
250
+ 248 1
251
+ 249 0
252
+ 250 1
253
+ 251 0
254
+ 252 0
255
+ 253 0
256
+ 254 0
257
+ 255 0
258
+ 256 1
259
+ 257 0
260
+ 258 0
261
+ 259 0
262
+ 260 1
263
+ 261 1
264
+ 262 0
265
+ 263 0
266
+ 264 1
267
+ 265 0
268
+ 266 0
269
+ 267 1
270
+ 268 0
271
+ 269 1
272
+ 270 0
273
+ 271 0
274
+ 272 1
275
+ 273 0
276
+ 274 1
277
+ 275 0
278
+ 276 0
279
+ 277 0
280
+ 278 1
281
+ 279 0
282
+ 280 0
283
+ 281 0
284
+ 282 0
285
+ 283 0
286
+ 284 1
287
+ 285 0
288
+ 286 0
289
+ 287 1
290
+ 288 0
291
+ 289 1
292
+ 290 0
293
+ 291 0
294
+ 292 1
295
+ 293 0
296
+ 294 1
297
+ 295 1
298
+ 296 0
299
+ 297 0
300
+ 298 1
301
+ 299 1
302
+ 300 0
303
+ 301 0
304
+ 302 0
305
+ 303 0
306
+ 304 0
307
+ 305 1
308
+ 306 0
309
+ 307 1
310
+ 308 0
311
+ 309 0
312
+ 310 1
313
+ 311 0
314
+ 312 1
315
+ 313 1
316
+ 314 0
317
+ 315 0
318
+ 316 0
319
+ 317 1
320
+ 318 1
321
+ 319 1
322
+ 320 0
323
+ 321 0
324
+ 322 0
325
+ 323 1
326
+ 324 0
327
+ 325 1
328
+ 326 0
329
+ 327 0
330
+ 328 0
331
+ 329 1
332
+ 330 0
333
+ 331 1
334
+ 332 1
335
+ 333 0
336
+ 334 0
337
+ 335 0
338
+ 336 1
339
+ 337 1
340
+ 338 0
341
+ 339 0
342
+ 340 0
343
+ 341 0
344
+ 342 1
345
+ 343 0
346
+ 344 1
347
+ 345 1
348
+ 346 1
349
+ 347 0
350
+ 348 0
351
+ 349 0
352
+ 350 1
353
+ 351 1
354
+ 352 0
355
+ 353 0
356
+ 354 0
357
+ 355 0
358
+ 356 0
359
+ 357 1
360
+ 358 1
361
+ 359 1
362
+ 360 1
363
+ 361 0
364
+ 362 1
365
+ 363 0
366
+ 364 1
367
+ 365 1
368
+ 366 1
369
+ 367 1
370
+ 368 0
371
+ 369 1
372
+ 370 1
373
+ 371 0
374
+ 372 0
375
+ 373 0
376
+ 374 1
377
+ 375 1
378
+ 376 1
379
+ 377 0
380
+ 378 0
381
+ 379 0
382
+ 380 0
383
+ 381 0
384
+ 382 0
385
+ 383 0
386
+ 384 0
387
+ 385 1
388
+ 386 0
389
+ 387 1
390
+ 388 1
391
+ 389 0
392
+ 390 1
393
+ 391 1
394
+ 392 0
395
+ 393 0
396
+ 394 0
397
+ 395 1
398
+ 396 1
399
+ 397 1
400
+ 398 1
401
+ 399 0
402
+ 400 0
403
+ 401 1
404
+ 402 0
405
+ 403 1
406
+ 404 1
407
+ 405 0
408
+ 406 0
409
+ 407 1
410
+ 408 0
411
+ 409 0
412
+ 410 0
413
+ 411 0
414
+ 412 0
415
+ 413 0
416
+ 414 1
417
+ 415 1
418
+ 416 0
419
+ 417 1
420
+ 418 0
421
+ 419 0
422
+ 420 0
423
+ 421 0
424
+ 422 1
425
+ 423 0
426
+ 424 0
427
+ 425 1
428
+ 426 1
429
+ 427 1
430
+ 428 0
431
+ 429 1
432
+ 430 0
433
+ 431 1
434
+ 432 0
435
+ 433 0
436
+ 434 0
437
+ 435 1
438
+ 436 0
439
+ 437 0
440
+ 438 0
441
+ 439 1
442
+ 440 1
443
+ 441 0
444
+ 442 0
445
+ 443 1
446
+ 444 0
447
+ 445 0
448
+ 446 1
449
+ 447 1
450
+ 448 0
451
+ 449 0
452
+ 450 1
453
+ 451 0
454
+ 452 0
455
+ 453 1
456
+ 454 0
457
+ 455 1
458
+ 456 0
459
+ 457 0
460
+ 458 0
461
+ 459 0
462
+ 460 0
463
+ 461 0
464
+ 462 0
465
+ 463 0
466
+ 464 0
467
+ 465 0
468
+ 466 1
469
+ 467 0
470
+ 468 0
471
+ 469 0
472
+ 470 0
473
+ 471 0
474
+ 472 0
475
+ 473 1
476
+ 474 0
477
+ 475 0
478
+ 476 0
479
+ 477 0
480
+ 478 1
481
+ 479 0
482
+ 480 1
483
+ 481 0
484
+ 482 1
485
+ 483 1
486
+ 484 1
487
+ 485 0
488
+ 486 0
489
+ 487 0
490
+ 488 0
491
+ 489 0
492
+ 490 1
493
+ 491 0
494
+ 492 1
495
+ 493 1
496
+ 494 0
497
+ 495 1
498
+ 496 0
499
+ 497 0
500
+ 498 1
501
+ 499 0
502
+ 500 1
503
+ 501 0
504
+ 502 1
505
+ 503 1
506
+ 504 1
507
+ 505 0
508
+ 506 0
509
+ 507 0
510
+ 508 1
511
+ 509 0
512
+ 510 0
513
+ 511 1
514
+ 512 1
515
+ 513 1
516
+ 514 0
517
+ 515 0
518
+ 516 1
519
+ 517 0
520
+ 518 0
521
+ 519 1
522
+ 520 0
523
+ 521 1
524
+ 522 0
525
+ 523 1
526
+ 524 0
527
+ 525 0
528
+ 526 0
529
+ 527 0
530
+ 528 1
531
+ 529 0
532
+ 530 0
533
+ 531 1
534
+ 532 0
535
+ 533 1
536
+ 534 0
537
+ 535 0
538
+ 536 0
539
+ 537 0
540
+ 538 0
541
+ 539 0
542
+ 540 1
543
+ 541 1
544
+ 542 1
545
+ 543 1
546
+ 544 0
547
+ 545 0
548
+ 546 1
549
+ 547 0
550
+ 548 0
551
+ 549 1
552
+ 550 0
553
+ 551 0
554
+ 552 1
555
+ 553 0
556
+ 554 0
557
+ 555 1
558
+ 556 0
559
+ 557 1
560
+ 558 0
561
+ 559 0
562
+ 560 0
563
+ 561 0
564
+ 562 0
565
+ 563 0
566
+ 564 0
567
+ 565 1
568
+ 566 0
569
+ 567 0
570
+ 568 0
571
+ 569 0
572
+ 570 0
573
+ 571 0
574
+ 572 0
575
+ 573 1
576
+ 574 1
577
+ 575 1
578
+ 576 0
579
+ 577 0
580
+ 578 0
581
+ 579 0
582
+ 580 1
583
+ 581 1
584
+ 582 1
585
+ 583 1
586
+ 584 1
587
+ 585 0
588
+ 586 0
589
+ 587 0
590
+ 588 1
591
+ 589 1
592
+ 590 1
593
+ 591 0
594
+ 592 0
595
+ 593 0
596
+ 594 0
597
+ 595 0
598
+ 596 0
599
+ 597 1
600
+ 598 1
601
+ 599 1
602
+ 600 0
603
+ 601 0
604
+ 602 0
605
+ 603 0
606
+ 604 1
607
+ 605 1
608
+ 606 0
609
+ 607 0
610
+ 608 0
611
+ 609 0
612
+ 610 0
613
+ 611 0
614
+ 612 0
615
+ 613 0
616
+ 614 1
617
+ 615 0
618
+ 616 0
619
+ 617 0
620
+ 618 0
621
+ 619 0
622
+ 620 0
623
+ 621 0
624
+ 622 0
625
+ 623 0
626
+ 624 1
627
+ 625 1
628
+ 626 1
629
+ 627 0
630
+ 628 0
631
+ 629 1
632
+ 630 1
633
+ 631 0
634
+ 632 0
635
+ 633 1
636
+ 634 1
637
+ 635 1
638
+ 636 0
639
+ 637 1
640
+ 638 1
641
+ 639 0
642
+ 640 0
643
+ 641 0
644
+ 642 1
645
+ 643 0
646
+ 644 1
647
+ 645 0
648
+ 646 0
649
+ 647 1
650
+ 648 1
651
+ 649 1
652
+ 650 0
653
+ 651 1
654
+ 652 1
655
+ 653 0
656
+ 654 0
657
+ 655 0
658
+ 656 0
659
+ 657 0
660
+ 658 1
661
+ 659 0
662
+ 660 1
663
+ 661 0
664
+ 662 0
665
+ 663 1
666
+ 664 1
667
+ 665 1
668
+ 666 1
669
+ 667 0
670
+ 668 0
671
+ 669 1
672
+ 670 1
673
+ 671 0
674
+ 672 0
675
+ 673 0
676
+ 674 0
677
+ 675 0
678
+ 676 1
679
+ 677 0
680
+ 678 1
681
+ 679 0
682
+ 680 1
683
+ 681 0
684
+ 682 0
685
+ 683 0
686
+ 684 0
687
+ 685 0
688
+ 686 1
689
+ 687 1
690
+ 688 1
691
+ 689 1
692
+ 690 0
693
+ 691 1
694
+ 692 0
695
+ 693 1
696
+ 694 1
697
+ 695 0
698
+ 696 0
699
+ 697 0
700
+ 698 0
701
+ 699 0
702
+ 700 1
703
+ 701 0
704
+ 702 0
705
+ 703 1
706
+ 704 0
707
+ 705 1
708
+ 706 0
709
+ 707 0
710
+ 708 1
711
+ 709 1
712
+ 710 1
713
+ 711 0
714
+ 712 0
715
+ 713 1
716
+ 714 1
717
+ 715 0
718
+ 716 0
719
+ 717 1
720
+ 718 0
721
+ 719 0
722
+ 720 0
723
+ 721 0
724
+ 722 1
725
+ 723 1
726
+ 724 0
727
+ 725 0
728
+ 726 0
729
+ 727 1
730
+ 728 0
731
+ 729 0
732
+ 730 0
733
+ 731 1
734
+ 732 0
735
+ 733 0
736
+ 734 0
737
+ 735 1
738
+ 736 0
739
+ 737 0
740
+ 738 0
741
+ 739 0
742
+ 740 1
743
+ 741 0
744
+ 742 0
745
+ 743 0
746
+ 744 1
747
+ 745 1
748
+ 746 0
749
+ 747 0
750
+ 748 1
751
+ 749 1
752
+ 750 1
753
+ 751 1
754
+ 752 0
755
+ 753 0
756
+ 754 1
757
+ 755 1
758
+ 756 0
759
+ 757 0
760
+ 758 0
761
+ 759 1
762
+ 760 1
763
+ 761 1
764
+ 762 1
765
+ 763 0
766
+ 764 1
767
+ 765 1
768
+ 766 0
769
+ 767 0
770
+ 768 1
771
+ 769 1
772
+ 770 0
773
+ 771 0
774
+ 772 0
775
+ 773 0
776
+ 774 1
777
+ 775 0
778
+ 776 0
779
+ 777 1
780
+ 778 0
781
+ 779 0
782
+ 780 1
783
+ 781 1
784
+ 782 1
785
+ 783 0
786
+ 784 0
787
+ 785 1
788
+ 786 1
789
+ 787 1
790
+ 788 0
791
+ 789 1
792
+ 790 0
793
+ 791 1
794
+ 792 0
795
+ 793 1
796
+ 794 1
797
+ 795 1
798
+ 796 1
799
+ 797 0
800
+ 798 0
801
+ 799 0
special_tokens_map.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "mask_token": "[MASK]",
4
+ "pad_token": "[PAD]",
5
+ "sep_token": "[SEP]",
6
+ "unk_token": "[UNK]"
7
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "clean_up_tokenization_spaces": true,
3
+ "cls_token": "[CLS]",
4
+ "do_basic_tokenize": true,
5
+ "do_lower_case": false,
6
+ "mask_token": "[MASK]",
7
+ "model_max_length": 512,
8
+ "never_split": null,
9
+ "pad_token": "[PAD]",
10
+ "sep_token": "[SEP]",
11
+ "strip_accents": false,
12
+ "tokenize_chinese_chars": true,
13
+ "tokenizer_class": "BertTokenizer",
14
+ "unk_token": "[UNK]"
15
+ }
train_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 10.0,
3
+ "train_loss": 0.07324187100588621,
4
+ "train_runtime": 1328.6278,
5
+ "train_samples": 11532,
6
+ "train_samples_per_second": 86.796,
7
+ "train_steps_per_second": 0.685
8
+ }
trainer_state.json ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 10.0,
5
+ "global_step": 910,
6
+ "is_hyper_param_search": false,
7
+ "is_local_process_zero": true,
8
+ "is_world_process_zero": true,
9
+ "log_history": [
10
+ {
11
+ "epoch": 5.0,
12
+ "learning_rate": 2.5e-05,
13
+ "loss": 0.1351,
14
+ "step": 455
15
+ },
16
+ {
17
+ "epoch": 5.0,
18
+ "eval_f1": 0.9119336311423102,
19
+ "eval_loss": 0.46077126264572144,
20
+ "eval_runtime": 12.6487,
21
+ "eval_samples_per_second": 227.928,
22
+ "eval_steps_per_second": 1.818,
23
+ "step": 455
24
+ },
25
+ {
26
+ "epoch": 10.0,
27
+ "learning_rate": 0.0,
28
+ "loss": 0.0114,
29
+ "step": 910
30
+ },
31
+ {
32
+ "epoch": 10.0,
33
+ "eval_f1": 0.9135014363230132,
34
+ "eval_loss": 0.567248523235321,
35
+ "eval_runtime": 12.6489,
36
+ "eval_samples_per_second": 227.925,
37
+ "eval_steps_per_second": 1.818,
38
+ "step": 910
39
+ },
40
+ {
41
+ "epoch": 10.0,
42
+ "step": 910,
43
+ "total_flos": 3.03419669041152e+16,
44
+ "train_loss": 0.07324187100588621,
45
+ "train_runtime": 1328.6278,
46
+ "train_samples_per_second": 86.796,
47
+ "train_steps_per_second": 0.685
48
+ }
49
+ ],
50
+ "max_steps": 910,
51
+ "num_train_epochs": 10,
52
+ "total_flos": 3.03419669041152e+16,
53
+ "trial_name": null,
54
+ "trial_params": null
55
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:04d95d7b3cd606d84475d60127739194a155d83c8f25e04d615b47e74d0649f6
3
+ size 4027
vocab.txt ADDED
The diff for this file is too large to render. See raw diff