transliterated-akk-en-t5-small-instruct-small-context
Browse files- README.md +235 -0
- added_tokens.json +0 -0
- config.json +33 -0
- generation_config.json +7 -0
- model.safetensors +3 -0
- special_tokens_map.json +125 -0
- spiece.model +3 -0
- tokenizer_config.json +0 -0
- training_args.bin +3 -0
README.md
ADDED
@@ -0,0 +1,235 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
tags:
|
3 |
+
- generated_from_trainer
|
4 |
+
model-index:
|
5 |
+
- name: AraT5v2-base-1024-p-l-akk-en-20240801-201225
|
6 |
+
results: []
|
7 |
+
---
|
8 |
+
|
9 |
+
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
|
10 |
+
should probably proofread and complete it, then remove this comment. -->
|
11 |
+
|
12 |
+
# AraT5v2-base-1024-p-l-akk-en-20240801-201225
|
13 |
+
|
14 |
+
This model was trained from scratch on the None dataset.
|
15 |
+
It achieves the following results on the evaluation set:
|
16 |
+
- Loss: 0.0402
|
17 |
+
|
18 |
+
## Model description
|
19 |
+
|
20 |
+
More information needed
|
21 |
+
|
22 |
+
## Intended uses & limitations
|
23 |
+
|
24 |
+
More information needed
|
25 |
+
|
26 |
+
## Training and evaluation data
|
27 |
+
|
28 |
+
More information needed
|
29 |
+
|
30 |
+
## Training procedure
|
31 |
+
|
32 |
+
### Training hyperparameters
|
33 |
+
|
34 |
+
The following hyperparameters were used during training:
|
35 |
+
- learning_rate: 4e-05
|
36 |
+
- train_batch_size: 1
|
37 |
+
- eval_batch_size: 1
|
38 |
+
- seed: 42
|
39 |
+
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
40 |
+
- lr_scheduler_type: linear
|
41 |
+
- num_epochs: 10
|
42 |
+
|
43 |
+
### Training results
|
44 |
+
|
45 |
+
| Training Loss | Epoch | Step | Validation Loss |
|
46 |
+
|:-------------:|:------:|:------:|:---------------:|
|
47 |
+
| 0.0561 | 0.0552 | 2500 | 0.0493 |
|
48 |
+
| 0.0511 | 0.1105 | 5000 | 0.0501 |
|
49 |
+
| 0.0475 | 0.1657 | 7500 | 0.0499 |
|
50 |
+
| 0.0465 | 0.2210 | 10000 | 0.0499 |
|
51 |
+
| 0.0417 | 0.2762 | 12500 | 0.0495 |
|
52 |
+
| 0.0528 | 0.3314 | 15000 | 0.0497 |
|
53 |
+
| 0.0523 | 0.3867 | 17500 | 0.0492 |
|
54 |
+
| 0.0492 | 0.4419 | 20000 | 0.0497 |
|
55 |
+
| 0.0468 | 0.4972 | 22500 | 0.0488 |
|
56 |
+
| 0.0515 | 0.5524 | 25000 | 0.0489 |
|
57 |
+
| 0.0522 | 0.6076 | 27500 | 0.0487 |
|
58 |
+
| 0.0522 | 0.6629 | 30000 | 0.0489 |
|
59 |
+
| 0.0544 | 0.7181 | 32500 | 0.0486 |
|
60 |
+
| 0.0466 | 0.7734 | 35000 | 0.0488 |
|
61 |
+
| 0.0546 | 0.8286 | 37500 | 0.0491 |
|
62 |
+
| 0.0558 | 0.8838 | 40000 | 0.0486 |
|
63 |
+
| 0.0532 | 0.9391 | 42500 | 0.0484 |
|
64 |
+
| 0.049 | 0.9943 | 45000 | 0.0484 |
|
65 |
+
| 0.049 | 1.0496 | 47500 | 0.0487 |
|
66 |
+
| 0.0429 | 1.1048 | 50000 | 0.0483 |
|
67 |
+
| 0.0549 | 1.1600 | 52500 | 0.0482 |
|
68 |
+
| 0.047 | 1.2153 | 55000 | 0.0480 |
|
69 |
+
| 0.0446 | 1.2705 | 57500 | 0.0477 |
|
70 |
+
| 0.0583 | 1.3258 | 60000 | 0.0478 |
|
71 |
+
| 0.0471 | 1.3810 | 62500 | 0.0477 |
|
72 |
+
| 0.0549 | 1.4362 | 65000 | 0.0475 |
|
73 |
+
| 0.0441 | 1.4915 | 67500 | 0.0476 |
|
74 |
+
| 0.0467 | 1.5467 | 70000 | 0.0471 |
|
75 |
+
| 0.0434 | 1.6020 | 72500 | 0.0467 |
|
76 |
+
| 0.0522 | 1.6572 | 75000 | 0.0471 |
|
77 |
+
| 0.0513 | 1.7124 | 77500 | 0.0469 |
|
78 |
+
| 0.0492 | 1.7677 | 80000 | 0.0465 |
|
79 |
+
| 0.0428 | 1.8229 | 82500 | 0.0466 |
|
80 |
+
| 0.0479 | 1.8782 | 85000 | 0.0461 |
|
81 |
+
| 0.0522 | 1.9334 | 87500 | 0.0463 |
|
82 |
+
| 0.0503 | 1.9886 | 90000 | 0.0462 |
|
83 |
+
| 0.0378 | 2.0439 | 92500 | 0.0463 |
|
84 |
+
| 0.0442 | 2.0991 | 95000 | 0.0462 |
|
85 |
+
| 0.0412 | 2.1544 | 97500 | 0.0461 |
|
86 |
+
| 0.0378 | 2.2096 | 100000 | 0.0460 |
|
87 |
+
| 0.0437 | 2.2648 | 102500 | 0.0460 |
|
88 |
+
| 0.0467 | 2.3201 | 105000 | 0.0456 |
|
89 |
+
| 0.0446 | 2.3753 | 107500 | 0.0455 |
|
90 |
+
| 0.0395 | 2.4306 | 110000 | 0.0456 |
|
91 |
+
| 0.0425 | 2.4858 | 112500 | 0.0453 |
|
92 |
+
| 0.0435 | 2.5410 | 115000 | 0.0454 |
|
93 |
+
| 0.0457 | 2.5963 | 117500 | 0.0454 |
|
94 |
+
| 0.048 | 2.6515 | 120000 | 0.0452 |
|
95 |
+
| 0.0474 | 2.7068 | 122500 | 0.0450 |
|
96 |
+
| 0.0512 | 2.7620 | 125000 | 0.0451 |
|
97 |
+
| 0.0472 | 2.8172 | 127500 | 0.0445 |
|
98 |
+
| 0.0513 | 2.8725 | 130000 | 0.0448 |
|
99 |
+
| 0.0451 | 2.9277 | 132500 | 0.0446 |
|
100 |
+
| 0.0445 | 2.9830 | 135000 | 0.0445 |
|
101 |
+
| 0.0445 | 3.0382 | 137500 | 0.0447 |
|
102 |
+
| 0.0396 | 3.0934 | 140000 | 0.0444 |
|
103 |
+
| 0.047 | 3.1487 | 142500 | 0.0443 |
|
104 |
+
| 0.0422 | 3.2039 | 145000 | 0.0444 |
|
105 |
+
| 0.0443 | 3.2592 | 147500 | 0.0444 |
|
106 |
+
| 0.0483 | 3.3144 | 150000 | 0.0442 |
|
107 |
+
| 0.0434 | 3.3696 | 152500 | 0.0442 |
|
108 |
+
| 0.05 | 3.4249 | 155000 | 0.0441 |
|
109 |
+
| 0.0403 | 3.4801 | 157500 | 0.0438 |
|
110 |
+
| 0.0461 | 3.5354 | 160000 | 0.0437 |
|
111 |
+
| 0.0433 | 3.5906 | 162500 | 0.0437 |
|
112 |
+
| 0.0418 | 3.6458 | 165000 | 0.0437 |
|
113 |
+
| 0.0407 | 3.7011 | 167500 | 0.0436 |
|
114 |
+
| 0.0408 | 3.7563 | 170000 | 0.0435 |
|
115 |
+
| 0.0432 | 3.8116 | 172500 | 0.0434 |
|
116 |
+
| 0.0411 | 3.8668 | 175000 | 0.0434 |
|
117 |
+
| 0.045 | 3.9220 | 177500 | 0.0431 |
|
118 |
+
| 0.0408 | 3.9773 | 180000 | 0.0430 |
|
119 |
+
| 0.0413 | 4.0325 | 182500 | 0.0435 |
|
120 |
+
| 0.0443 | 4.0878 | 185000 | 0.0430 |
|
121 |
+
| 0.0375 | 4.1430 | 187500 | 0.0434 |
|
122 |
+
| 0.0431 | 4.1982 | 190000 | 0.0428 |
|
123 |
+
| 0.0381 | 4.2535 | 192500 | 0.0428 |
|
124 |
+
| 0.0456 | 4.3087 | 195000 | 0.0429 |
|
125 |
+
| 0.0468 | 4.3640 | 197500 | 0.0432 |
|
126 |
+
| 0.0404 | 4.4192 | 200000 | 0.0426 |
|
127 |
+
| 0.0417 | 4.4744 | 202500 | 0.0424 |
|
128 |
+
| 0.0402 | 4.5297 | 205000 | 0.0429 |
|
129 |
+
| 0.0427 | 4.5849 | 207500 | 0.0427 |
|
130 |
+
| 0.0413 | 4.6402 | 210000 | 0.0427 |
|
131 |
+
| 0.0417 | 4.6954 | 212500 | 0.0425 |
|
132 |
+
| 0.0353 | 4.7506 | 215000 | 0.0425 |
|
133 |
+
| 0.039 | 4.8059 | 217500 | 0.0423 |
|
134 |
+
| 0.0427 | 4.8611 | 220000 | 0.0420 |
|
135 |
+
| 0.0359 | 4.9164 | 222500 | 0.0422 |
|
136 |
+
| 0.0392 | 4.9716 | 225000 | 0.0421 |
|
137 |
+
| 0.0424 | 5.0268 | 227500 | 0.0426 |
|
138 |
+
| 0.0398 | 5.0821 | 230000 | 0.0421 |
|
139 |
+
| 0.0398 | 5.1373 | 232500 | 0.0421 |
|
140 |
+
| 0.0349 | 5.1926 | 235000 | 0.0422 |
|
141 |
+
| 0.0415 | 5.2478 | 237500 | 0.0422 |
|
142 |
+
| 0.0413 | 5.3030 | 240000 | 0.0421 |
|
143 |
+
| 0.0422 | 5.3583 | 242500 | 0.0419 |
|
144 |
+
| 0.0365 | 5.4135 | 245000 | 0.0418 |
|
145 |
+
| 0.0403 | 5.4688 | 247500 | 0.0416 |
|
146 |
+
| 0.0398 | 5.5240 | 250000 | 0.0417 |
|
147 |
+
| 0.0393 | 5.5792 | 252500 | 0.0417 |
|
148 |
+
| 0.0396 | 5.6345 | 255000 | 0.0417 |
|
149 |
+
| 0.0383 | 5.6897 | 257500 | 0.0417 |
|
150 |
+
| 0.041 | 5.7450 | 260000 | 0.0417 |
|
151 |
+
| 0.0425 | 5.8002 | 262500 | 0.0414 |
|
152 |
+
| 0.0417 | 5.8554 | 265000 | 0.0412 |
|
153 |
+
| 0.0352 | 5.9107 | 267500 | 0.0413 |
|
154 |
+
| 0.0349 | 5.9659 | 270000 | 0.0413 |
|
155 |
+
| 0.0397 | 6.0212 | 272500 | 0.0413 |
|
156 |
+
| 0.037 | 6.0764 | 275000 | 0.0414 |
|
157 |
+
| 0.0358 | 6.1316 | 277500 | 0.0416 |
|
158 |
+
| 0.0402 | 6.1869 | 280000 | 0.0415 |
|
159 |
+
| 0.0332 | 6.2421 | 282500 | 0.0417 |
|
160 |
+
| 0.035 | 6.2974 | 285000 | 0.0415 |
|
161 |
+
| 0.0364 | 6.3526 | 287500 | 0.0413 |
|
162 |
+
| 0.0427 | 6.4078 | 290000 | 0.0412 |
|
163 |
+
| 0.0387 | 6.4631 | 292500 | 0.0409 |
|
164 |
+
| 0.0288 | 6.5183 | 295000 | 0.0410 |
|
165 |
+
| 0.0417 | 6.5736 | 297500 | 0.0410 |
|
166 |
+
| 0.0372 | 6.6288 | 300000 | 0.0410 |
|
167 |
+
| 0.042 | 6.6840 | 302500 | 0.0411 |
|
168 |
+
| 0.0347 | 6.7393 | 305000 | 0.0409 |
|
169 |
+
| 0.0363 | 6.7945 | 307500 | 0.0408 |
|
170 |
+
| 0.0413 | 6.8498 | 310000 | 0.0410 |
|
171 |
+
| 0.0386 | 6.9050 | 312500 | 0.0407 |
|
172 |
+
| 0.0362 | 6.9602 | 315000 | 0.0407 |
|
173 |
+
| 0.0385 | 7.0155 | 317500 | 0.0410 |
|
174 |
+
| 0.0412 | 7.0707 | 320000 | 0.0410 |
|
175 |
+
| 0.0349 | 7.1260 | 322500 | 0.0407 |
|
176 |
+
| 0.0383 | 7.1812 | 325000 | 0.0408 |
|
177 |
+
| 0.0316 | 7.2364 | 327500 | 0.0410 |
|
178 |
+
| 0.0387 | 7.2917 | 330000 | 0.0409 |
|
179 |
+
| 0.0321 | 7.3469 | 332500 | 0.0407 |
|
180 |
+
| 0.0323 | 7.4022 | 335000 | 0.0406 |
|
181 |
+
| 0.0352 | 7.4574 | 337500 | 0.0406 |
|
182 |
+
| 0.0342 | 7.5126 | 340000 | 0.0404 |
|
183 |
+
| 0.0356 | 7.5679 | 342500 | 0.0406 |
|
184 |
+
| 0.0379 | 7.6231 | 345000 | 0.0405 |
|
185 |
+
| 0.0381 | 7.6784 | 347500 | 0.0406 |
|
186 |
+
| 0.0359 | 7.7336 | 350000 | 0.0403 |
|
187 |
+
| 0.0371 | 7.7889 | 352500 | 0.0403 |
|
188 |
+
| 0.0385 | 7.8441 | 355000 | 0.0404 |
|
189 |
+
| 0.0321 | 7.8993 | 357500 | 0.0405 |
|
190 |
+
| 0.0366 | 7.9546 | 360000 | 0.0406 |
|
191 |
+
| 0.0316 | 8.0098 | 362500 | 0.0407 |
|
192 |
+
| 0.0372 | 8.0651 | 365000 | 0.0406 |
|
193 |
+
| 0.0327 | 8.1203 | 367500 | 0.0406 |
|
194 |
+
| 0.0334 | 8.1755 | 370000 | 0.0405 |
|
195 |
+
| 0.0366 | 8.2308 | 372500 | 0.0405 |
|
196 |
+
| 0.0346 | 8.2860 | 375000 | 0.0405 |
|
197 |
+
| 0.0322 | 8.3413 | 377500 | 0.0404 |
|
198 |
+
| 0.0429 | 8.3965 | 380000 | 0.0403 |
|
199 |
+
| 0.0341 | 8.4517 | 382500 | 0.0404 |
|
200 |
+
| 0.0345 | 8.5070 | 385000 | 0.0403 |
|
201 |
+
| 0.0356 | 8.5622 | 387500 | 0.0403 |
|
202 |
+
| 0.0352 | 8.6175 | 390000 | 0.0404 |
|
203 |
+
| 0.0361 | 8.6727 | 392500 | 0.0402 |
|
204 |
+
| 0.0335 | 8.7279 | 395000 | 0.0404 |
|
205 |
+
| 0.0361 | 8.7832 | 397500 | 0.0403 |
|
206 |
+
| 0.035 | 8.8384 | 400000 | 0.0403 |
|
207 |
+
| 0.0327 | 8.8937 | 402500 | 0.0402 |
|
208 |
+
| 0.0336 | 8.9489 | 405000 | 0.0401 |
|
209 |
+
| 0.035 | 9.0041 | 407500 | 0.0404 |
|
210 |
+
| 0.0363 | 9.0594 | 410000 | 0.0403 |
|
211 |
+
| 0.0317 | 9.1146 | 412500 | 0.0403 |
|
212 |
+
| 0.033 | 9.1699 | 415000 | 0.0404 |
|
213 |
+
| 0.0368 | 9.2251 | 417500 | 0.0403 |
|
214 |
+
| 0.0333 | 9.2803 | 420000 | 0.0403 |
|
215 |
+
| 0.0308 | 9.3356 | 422500 | 0.0402 |
|
216 |
+
| 0.0329 | 9.3908 | 425000 | 0.0403 |
|
217 |
+
| 0.0371 | 9.4461 | 427500 | 0.0402 |
|
218 |
+
| 0.0331 | 9.5013 | 430000 | 0.0403 |
|
219 |
+
| 0.0328 | 9.5565 | 432500 | 0.0402 |
|
220 |
+
| 0.0323 | 9.6118 | 435000 | 0.0402 |
|
221 |
+
| 0.0283 | 9.6670 | 437500 | 0.0402 |
|
222 |
+
| 0.0338 | 9.7223 | 440000 | 0.0401 |
|
223 |
+
| 0.0323 | 9.7775 | 442500 | 0.0402 |
|
224 |
+
| 0.0374 | 9.8327 | 445000 | 0.0402 |
|
225 |
+
| 0.0357 | 9.8880 | 447500 | 0.0402 |
|
226 |
+
| 0.037 | 9.9432 | 450000 | 0.0402 |
|
227 |
+
| 0.0348 | 9.9985 | 452500 | 0.0402 |
|
228 |
+
|
229 |
+
|
230 |
+
### Framework versions
|
231 |
+
|
232 |
+
- Transformers 4.44.0.dev0
|
233 |
+
- Pytorch 2.5.0.dev20240625
|
234 |
+
- Datasets 2.20.0
|
235 |
+
- Tokenizers 0.19.1
|
added_tokens.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
config.json
ADDED
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "/Users/lee/GitHub/results/AraT5v2-base-1024-p-l-akk-en-20240712-212743",
|
3 |
+
"architectures": [
|
4 |
+
"T5ForConditionalGeneration"
|
5 |
+
],
|
6 |
+
"classifier_dropout": 0.0,
|
7 |
+
"d_ff": 2048,
|
8 |
+
"d_kv": 64,
|
9 |
+
"d_model": 768,
|
10 |
+
"decoder_start_token_id": 0,
|
11 |
+
"dense_act_fn": "gelu_new",
|
12 |
+
"dropout_rate": 0.1,
|
13 |
+
"eos_token_id": 1,
|
14 |
+
"feed_forward_proj": "gated-gelu",
|
15 |
+
"initializer_factor": 1.0,
|
16 |
+
"is_encoder_decoder": true,
|
17 |
+
"is_gated_act": true,
|
18 |
+
"layer_norm_epsilon": 1e-06,
|
19 |
+
"model_type": "t5",
|
20 |
+
"num_decoder_layers": 12,
|
21 |
+
"num_heads": 12,
|
22 |
+
"num_layers": 12,
|
23 |
+
"output_past": true,
|
24 |
+
"pad_token_id": 0,
|
25 |
+
"relative_attention_max_distance": 128,
|
26 |
+
"relative_attention_num_buckets": 32,
|
27 |
+
"tie_word_embeddings": false,
|
28 |
+
"tokenizer_class": "T5Tokenizer",
|
29 |
+
"torch_dtype": "float32",
|
30 |
+
"transformers_version": "4.44.0.dev0",
|
31 |
+
"use_cache": true,
|
32 |
+
"vocab_size": 126985
|
33 |
+
}
|
generation_config.json
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_from_model_config": true,
|
3 |
+
"decoder_start_token_id": 0,
|
4 |
+
"eos_token_id": 1,
|
5 |
+
"pad_token_id": 0,
|
6 |
+
"transformers_version": "4.44.0.dev0"
|
7 |
+
}
|
model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8ec4568a07fc3b411fee7dd1b070af15663c9d7849e7dbcaf7d0860ffbb92b07
|
3 |
+
size 1573146480
|
special_tokens_map.json
ADDED
@@ -0,0 +1,125 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"additional_special_tokens": [
|
3 |
+
"<extra_id_0>",
|
4 |
+
"<extra_id_1>",
|
5 |
+
"<extra_id_2>",
|
6 |
+
"<extra_id_3>",
|
7 |
+
"<extra_id_4>",
|
8 |
+
"<extra_id_5>",
|
9 |
+
"<extra_id_6>",
|
10 |
+
"<extra_id_7>",
|
11 |
+
"<extra_id_8>",
|
12 |
+
"<extra_id_9>",
|
13 |
+
"<extra_id_10>",
|
14 |
+
"<extra_id_11>",
|
15 |
+
"<extra_id_12>",
|
16 |
+
"<extra_id_13>",
|
17 |
+
"<extra_id_14>",
|
18 |
+
"<extra_id_15>",
|
19 |
+
"<extra_id_16>",
|
20 |
+
"<extra_id_17>",
|
21 |
+
"<extra_id_18>",
|
22 |
+
"<extra_id_19>",
|
23 |
+
"<extra_id_20>",
|
24 |
+
"<extra_id_21>",
|
25 |
+
"<extra_id_22>",
|
26 |
+
"<extra_id_23>",
|
27 |
+
"<extra_id_24>",
|
28 |
+
"<extra_id_25>",
|
29 |
+
"<extra_id_26>",
|
30 |
+
"<extra_id_27>",
|
31 |
+
"<extra_id_28>",
|
32 |
+
"<extra_id_29>",
|
33 |
+
"<extra_id_30>",
|
34 |
+
"<extra_id_31>",
|
35 |
+
"<extra_id_32>",
|
36 |
+
"<extra_id_33>",
|
37 |
+
"<extra_id_34>",
|
38 |
+
"<extra_id_35>",
|
39 |
+
"<extra_id_36>",
|
40 |
+
"<extra_id_37>",
|
41 |
+
"<extra_id_38>",
|
42 |
+
"<extra_id_39>",
|
43 |
+
"<extra_id_40>",
|
44 |
+
"<extra_id_41>",
|
45 |
+
"<extra_id_42>",
|
46 |
+
"<extra_id_43>",
|
47 |
+
"<extra_id_44>",
|
48 |
+
"<extra_id_45>",
|
49 |
+
"<extra_id_46>",
|
50 |
+
"<extra_id_47>",
|
51 |
+
"<extra_id_48>",
|
52 |
+
"<extra_id_49>",
|
53 |
+
"<extra_id_50>",
|
54 |
+
"<extra_id_51>",
|
55 |
+
"<extra_id_52>",
|
56 |
+
"<extra_id_53>",
|
57 |
+
"<extra_id_54>",
|
58 |
+
"<extra_id_55>",
|
59 |
+
"<extra_id_56>",
|
60 |
+
"<extra_id_57>",
|
61 |
+
"<extra_id_58>",
|
62 |
+
"<extra_id_59>",
|
63 |
+
"<extra_id_60>",
|
64 |
+
"<extra_id_61>",
|
65 |
+
"<extra_id_62>",
|
66 |
+
"<extra_id_63>",
|
67 |
+
"<extra_id_64>",
|
68 |
+
"<extra_id_65>",
|
69 |
+
"<extra_id_66>",
|
70 |
+
"<extra_id_67>",
|
71 |
+
"<extra_id_68>",
|
72 |
+
"<extra_id_69>",
|
73 |
+
"<extra_id_70>",
|
74 |
+
"<extra_id_71>",
|
75 |
+
"<extra_id_72>",
|
76 |
+
"<extra_id_73>",
|
77 |
+
"<extra_id_74>",
|
78 |
+
"<extra_id_75>",
|
79 |
+
"<extra_id_76>",
|
80 |
+
"<extra_id_77>",
|
81 |
+
"<extra_id_78>",
|
82 |
+
"<extra_id_79>",
|
83 |
+
"<extra_id_80>",
|
84 |
+
"<extra_id_81>",
|
85 |
+
"<extra_id_82>",
|
86 |
+
"<extra_id_83>",
|
87 |
+
"<extra_id_84>",
|
88 |
+
"<extra_id_85>",
|
89 |
+
"<extra_id_86>",
|
90 |
+
"<extra_id_87>",
|
91 |
+
"<extra_id_88>",
|
92 |
+
"<extra_id_89>",
|
93 |
+
"<extra_id_90>",
|
94 |
+
"<extra_id_91>",
|
95 |
+
"<extra_id_92>",
|
96 |
+
"<extra_id_93>",
|
97 |
+
"<extra_id_94>",
|
98 |
+
"<extra_id_95>",
|
99 |
+
"<extra_id_96>",
|
100 |
+
"<extra_id_97>",
|
101 |
+
"<extra_id_98>",
|
102 |
+
"<extra_id_99>"
|
103 |
+
],
|
104 |
+
"eos_token": {
|
105 |
+
"content": "</s>",
|
106 |
+
"lstrip": false,
|
107 |
+
"normalized": false,
|
108 |
+
"rstrip": false,
|
109 |
+
"single_word": false
|
110 |
+
},
|
111 |
+
"pad_token": {
|
112 |
+
"content": "<pad>",
|
113 |
+
"lstrip": false,
|
114 |
+
"normalized": false,
|
115 |
+
"rstrip": false,
|
116 |
+
"single_word": false
|
117 |
+
},
|
118 |
+
"unk_token": {
|
119 |
+
"content": "<unk>",
|
120 |
+
"lstrip": false,
|
121 |
+
"normalized": false,
|
122 |
+
"rstrip": false,
|
123 |
+
"single_word": false
|
124 |
+
}
|
125 |
+
}
|
spiece.model
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:180428eb8e88be6c7d259fb04c9eb3a1c552d799a76741bcd6ee34fa0bf64386
|
3 |
+
size 2353338
|
tokenizer_config.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:abde0875397922619cee238f3b776802376f36a602520e6ec3051bbb630977d1
|
3 |
+
size 5368
|