cuneiform-akk-en-t5-small-instruct-small-context
Browse files- README.md +243 -0
- added_tokens.json +0 -0
- config.json +61 -0
- generation_config.json +7 -0
- model.safetensors +3 -0
- special_tokens_map.json +125 -0
- spiece.model +3 -0
- tokenizer_config.json +0 -0
- training_args.bin +3 -0
README.md
ADDED
@@ -0,0 +1,243 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
tags:
|
3 |
+
- generated_from_trainer
|
4 |
+
model-index:
|
5 |
+
- name: t5-small-p-l-akk-en-20240809-220318
|
6 |
+
results: []
|
7 |
+
---
|
8 |
+
|
9 |
+
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
|
10 |
+
should probably proofread and complete it, then remove this comment. -->
|
11 |
+
|
12 |
+
# t5-small-p-l-akk-en-20240809-220318
|
13 |
+
|
14 |
+
This model was trained from scratch on the None dataset.
|
15 |
+
It achieves the following results on the evaluation set:
|
16 |
+
- Loss: 0.1791
|
17 |
+
|
18 |
+
## Model description
|
19 |
+
|
20 |
+
More information needed
|
21 |
+
|
22 |
+
## Intended uses & limitations
|
23 |
+
|
24 |
+
More information needed
|
25 |
+
|
26 |
+
## Training and evaluation data
|
27 |
+
|
28 |
+
More information needed
|
29 |
+
|
30 |
+
## Training procedure
|
31 |
+
|
32 |
+
### Training hyperparameters
|
33 |
+
|
34 |
+
The following hyperparameters were used during training:
|
35 |
+
- learning_rate: 4e-05
|
36 |
+
- train_batch_size: 24
|
37 |
+
- eval_batch_size: 24
|
38 |
+
- seed: 42
|
39 |
+
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
40 |
+
- lr_scheduler_type: linear
|
41 |
+
- num_epochs: 250
|
42 |
+
- mixed_precision_training: Native AMP
|
43 |
+
|
44 |
+
### Training results
|
45 |
+
|
46 |
+
| Training Loss | Epoch | Step | Validation Loss |
|
47 |
+
|:-------------:|:--------:|:------:|:---------------:|
|
48 |
+
| 0.1938 | 1.3270 | 2500 | 0.2014 |
|
49 |
+
| 0.1921 | 2.6539 | 5000 | 0.2010 |
|
50 |
+
| 0.1884 | 3.9809 | 7500 | 0.1993 |
|
51 |
+
| 0.1919 | 5.3079 | 10000 | 0.1985 |
|
52 |
+
| 0.1849 | 6.6348 | 12500 | 0.1981 |
|
53 |
+
| 0.1907 | 7.9618 | 15000 | 0.1969 |
|
54 |
+
| 0.1869 | 9.2887 | 17500 | 0.1970 |
|
55 |
+
| 0.1872 | 10.6157 | 20000 | 0.1969 |
|
56 |
+
| 0.183 | 11.9427 | 22500 | 0.1963 |
|
57 |
+
| 0.183 | 13.2696 | 25000 | 0.1957 |
|
58 |
+
| 0.1872 | 14.5966 | 27500 | 0.1946 |
|
59 |
+
| 0.1802 | 15.9236 | 30000 | 0.1931 |
|
60 |
+
| 0.1823 | 17.2505 | 32500 | 0.1932 |
|
61 |
+
| 0.1791 | 18.5775 | 35000 | 0.1927 |
|
62 |
+
| 0.1798 | 19.9045 | 37500 | 0.1924 |
|
63 |
+
| 0.1803 | 21.2314 | 40000 | 0.1916 |
|
64 |
+
| 0.179 | 22.5584 | 42500 | 0.1912 |
|
65 |
+
| 0.1794 | 23.8854 | 45000 | 0.1905 |
|
66 |
+
| 0.1783 | 25.2123 | 47500 | 0.1904 |
|
67 |
+
| 0.1741 | 26.5393 | 50000 | 0.1900 |
|
68 |
+
| 0.1712 | 27.8662 | 52500 | 0.1900 |
|
69 |
+
| 0.1747 | 29.1932 | 55000 | 0.1901 |
|
70 |
+
| 0.1705 | 30.5202 | 57500 | 0.1892 |
|
71 |
+
| 0.1719 | 31.8471 | 60000 | 0.1889 |
|
72 |
+
| 0.1716 | 33.1741 | 62500 | 0.1891 |
|
73 |
+
| 0.1681 | 34.5011 | 65000 | 0.1890 |
|
74 |
+
| 0.1694 | 35.8280 | 67500 | 0.1875 |
|
75 |
+
| 0.1677 | 37.1550 | 70000 | 0.1878 |
|
76 |
+
| 0.169 | 38.4820 | 72500 | 0.1861 |
|
77 |
+
| 0.17 | 39.8089 | 75000 | 0.1863 |
|
78 |
+
| 0.1662 | 41.1359 | 77500 | 0.1858 |
|
79 |
+
| 0.163 | 42.4628 | 80000 | 0.1862 |
|
80 |
+
| 0.1637 | 43.7898 | 82500 | 0.1859 |
|
81 |
+
| 0.1647 | 45.1168 | 85000 | 0.1854 |
|
82 |
+
| 0.1609 | 46.4437 | 87500 | 0.1856 |
|
83 |
+
| 0.1678 | 47.7707 | 90000 | 0.1846 |
|
84 |
+
| 0.1595 | 49.0977 | 92500 | 0.1849 |
|
85 |
+
| 0.1605 | 50.4246 | 95000 | 0.1849 |
|
86 |
+
| 0.1609 | 51.7516 | 97500 | 0.1843 |
|
87 |
+
| 0.1635 | 53.0786 | 100000 | 0.1847 |
|
88 |
+
| 0.1583 | 54.4055 | 102500 | 0.1836 |
|
89 |
+
| 0.1564 | 55.7325 | 105000 | 0.1836 |
|
90 |
+
| 0.1606 | 57.0594 | 107500 | 0.1834 |
|
91 |
+
| 0.1555 | 58.3864 | 110000 | 0.1833 |
|
92 |
+
| 0.1572 | 59.7134 | 112500 | 0.1826 |
|
93 |
+
| 0.1601 | 61.0403 | 115000 | 0.1838 |
|
94 |
+
| 0.1567 | 62.3673 | 117500 | 0.1832 |
|
95 |
+
| 0.1551 | 63.6943 | 120000 | 0.1815 |
|
96 |
+
| 0.1558 | 65.0212 | 122500 | 0.1825 |
|
97 |
+
| 0.1531 | 66.3482 | 125000 | 0.1819 |
|
98 |
+
| 0.155 | 67.6752 | 127500 | 0.1823 |
|
99 |
+
| 0.1562 | 69.0021 | 130000 | 0.1815 |
|
100 |
+
| 0.1536 | 70.3291 | 132500 | 0.1820 |
|
101 |
+
| 0.1501 | 71.6561 | 135000 | 0.1819 |
|
102 |
+
| 0.1532 | 72.9830 | 137500 | 0.1813 |
|
103 |
+
| 0.1501 | 74.3100 | 140000 | 0.1816 |
|
104 |
+
| 0.1507 | 75.6369 | 142500 | 0.1809 |
|
105 |
+
| 0.1501 | 76.9639 | 145000 | 0.1812 |
|
106 |
+
| 0.1474 | 78.2909 | 147500 | 0.1802 |
|
107 |
+
| 0.1462 | 79.6178 | 150000 | 0.1819 |
|
108 |
+
| 0.1464 | 80.9448 | 152500 | 0.1807 |
|
109 |
+
| 0.1465 | 82.2718 | 155000 | 0.1802 |
|
110 |
+
| 0.1478 | 83.5987 | 157500 | 0.1810 |
|
111 |
+
| 0.1451 | 84.9257 | 160000 | 0.1794 |
|
112 |
+
| 0.144 | 86.2527 | 162500 | 0.1816 |
|
113 |
+
| 0.144 | 87.5796 | 165000 | 0.1803 |
|
114 |
+
| 0.1453 | 88.9066 | 167500 | 0.1795 |
|
115 |
+
| 0.1429 | 90.2335 | 170000 | 0.1792 |
|
116 |
+
| 0.1438 | 91.5605 | 172500 | 0.1804 |
|
117 |
+
| 0.1452 | 92.8875 | 175000 | 0.1790 |
|
118 |
+
| 0.1453 | 94.2144 | 177500 | 0.1791 |
|
119 |
+
| 0.1406 | 95.5414 | 180000 | 0.1799 |
|
120 |
+
| 0.1391 | 96.8684 | 182500 | 0.1792 |
|
121 |
+
| 0.144 | 98.1953 | 185000 | 0.1793 |
|
122 |
+
| 0.144 | 99.5223 | 187500 | 0.1787 |
|
123 |
+
| 0.1385 | 100.8493 | 190000 | 0.1784 |
|
124 |
+
| 0.1406 | 102.1762 | 192500 | 0.1787 |
|
125 |
+
| 0.142 | 103.5032 | 195000 | 0.1800 |
|
126 |
+
| 0.1394 | 104.8301 | 197500 | 0.1787 |
|
127 |
+
| 0.1391 | 106.1571 | 200000 | 0.1789 |
|
128 |
+
| 0.1357 | 107.4841 | 202500 | 0.1797 |
|
129 |
+
| 0.1384 | 108.8110 | 205000 | 0.1785 |
|
130 |
+
| 0.1408 | 110.1380 | 207500 | 0.1792 |
|
131 |
+
| 0.1366 | 111.4650 | 210000 | 0.1800 |
|
132 |
+
| 0.1375 | 112.7919 | 212500 | 0.1792 |
|
133 |
+
| 0.1383 | 114.1189 | 215000 | 0.1790 |
|
134 |
+
| 0.1351 | 115.4459 | 217500 | 0.1788 |
|
135 |
+
| 0.1382 | 116.7728 | 220000 | 0.1784 |
|
136 |
+
| 0.1341 | 118.0998 | 222500 | 0.1791 |
|
137 |
+
| 0.1385 | 119.4268 | 225000 | 0.1788 |
|
138 |
+
| 0.1353 | 120.7537 | 227500 | 0.1783 |
|
139 |
+
| 0.1362 | 122.0807 | 230000 | 0.1783 |
|
140 |
+
| 0.1343 | 123.4076 | 232500 | 0.1783 |
|
141 |
+
| 0.1419 | 124.7346 | 235000 | 0.1786 |
|
142 |
+
| 0.1332 | 126.0616 | 237500 | 0.1787 |
|
143 |
+
| 0.1333 | 127.3885 | 240000 | 0.1785 |
|
144 |
+
| 0.1336 | 128.7155 | 242500 | 0.1782 |
|
145 |
+
| 0.132 | 130.0425 | 245000 | 0.1783 |
|
146 |
+
| 0.1299 | 131.3694 | 247500 | 0.1776 |
|
147 |
+
| 0.1313 | 132.6964 | 250000 | 0.1790 |
|
148 |
+
| 0.1302 | 134.0234 | 252500 | 0.1775 |
|
149 |
+
| 0.1301 | 135.3503 | 255000 | 0.1786 |
|
150 |
+
| 0.1337 | 136.6773 | 257500 | 0.1785 |
|
151 |
+
| 0.1302 | 138.0042 | 260000 | 0.1791 |
|
152 |
+
| 0.1288 | 139.3312 | 262500 | 0.1789 |
|
153 |
+
| 0.1321 | 140.6582 | 265000 | 0.1785 |
|
154 |
+
| 0.1299 | 141.9851 | 267500 | 0.1779 |
|
155 |
+
| 0.129 | 143.3121 | 270000 | 0.1791 |
|
156 |
+
| 0.13 | 144.6391 | 272500 | 0.1780 |
|
157 |
+
| 0.133 | 145.9660 | 275000 | 0.1786 |
|
158 |
+
| 0.1295 | 147.2930 | 277500 | 0.1781 |
|
159 |
+
| 0.1283 | 148.6200 | 280000 | 0.1780 |
|
160 |
+
| 0.127 | 149.9469 | 282500 | 0.1778 |
|
161 |
+
| 0.1246 | 151.2739 | 285000 | 0.1785 |
|
162 |
+
| 0.1293 | 152.6008 | 287500 | 0.1783 |
|
163 |
+
| 0.1259 | 153.9278 | 290000 | 0.1781 |
|
164 |
+
| 0.129 | 155.2548 | 292500 | 0.1777 |
|
165 |
+
| 0.126 | 156.5817 | 295000 | 0.1778 |
|
166 |
+
| 0.1275 | 157.9087 | 297500 | 0.1777 |
|
167 |
+
| 0.1259 | 159.2357 | 300000 | 0.1784 |
|
168 |
+
| 0.1273 | 160.5626 | 302500 | 0.1774 |
|
169 |
+
| 0.1272 | 161.8896 | 305000 | 0.1786 |
|
170 |
+
| 0.1243 | 163.2166 | 307500 | 0.1787 |
|
171 |
+
| 0.1245 | 164.5435 | 310000 | 0.1784 |
|
172 |
+
| 0.1259 | 165.8705 | 312500 | 0.1785 |
|
173 |
+
| 0.1262 | 167.1975 | 315000 | 0.1779 |
|
174 |
+
| 0.1242 | 168.5244 | 317500 | 0.1783 |
|
175 |
+
| 0.1241 | 169.8514 | 320000 | 0.1779 |
|
176 |
+
| 0.1293 | 171.1783 | 322500 | 0.1792 |
|
177 |
+
| 0.1247 | 172.5053 | 325000 | 0.1777 |
|
178 |
+
| 0.1266 | 173.8323 | 327500 | 0.1790 |
|
179 |
+
| 0.1232 | 175.1592 | 330000 | 0.1787 |
|
180 |
+
| 0.1239 | 176.4862 | 332500 | 0.1788 |
|
181 |
+
| 0.1248 | 177.8132 | 335000 | 0.1789 |
|
182 |
+
| 0.1242 | 179.1401 | 337500 | 0.1787 |
|
183 |
+
| 0.1236 | 180.4671 | 340000 | 0.1786 |
|
184 |
+
| 0.1259 | 181.7941 | 342500 | 0.1787 |
|
185 |
+
| 0.1206 | 183.1210 | 345000 | 0.1779 |
|
186 |
+
| 0.1226 | 184.4480 | 347500 | 0.1778 |
|
187 |
+
| 0.1231 | 185.7749 | 350000 | 0.1782 |
|
188 |
+
| 0.1201 | 187.1019 | 352500 | 0.1789 |
|
189 |
+
| 0.121 | 188.4289 | 355000 | 0.1791 |
|
190 |
+
| 0.1223 | 189.7558 | 357500 | 0.1792 |
|
191 |
+
| 0.1227 | 191.0828 | 360000 | 0.1779 |
|
192 |
+
| 0.121 | 192.4098 | 362500 | 0.1783 |
|
193 |
+
| 0.1211 | 193.7367 | 365000 | 0.1790 |
|
194 |
+
| 0.1249 | 195.0637 | 367500 | 0.1787 |
|
195 |
+
| 0.1216 | 196.3907 | 370000 | 0.1781 |
|
196 |
+
| 0.1224 | 197.7176 | 372500 | 0.1785 |
|
197 |
+
| 0.1208 | 199.0446 | 375000 | 0.1794 |
|
198 |
+
| 0.1203 | 200.3715 | 377500 | 0.1787 |
|
199 |
+
| 0.1179 | 201.6985 | 380000 | 0.1786 |
|
200 |
+
| 0.1214 | 203.0255 | 382500 | 0.1785 |
|
201 |
+
| 0.1204 | 204.3524 | 385000 | 0.1790 |
|
202 |
+
| 0.118 | 205.6794 | 387500 | 0.1782 |
|
203 |
+
| 0.1224 | 207.0064 | 390000 | 0.1793 |
|
204 |
+
| 0.1225 | 208.3333 | 392500 | 0.1788 |
|
205 |
+
| 0.121 | 209.6603 | 395000 | 0.1790 |
|
206 |
+
| 0.1187 | 210.9873 | 397500 | 0.1788 |
|
207 |
+
| 0.1225 | 212.3142 | 400000 | 0.1787 |
|
208 |
+
| 0.119 | 213.6412 | 402500 | 0.1786 |
|
209 |
+
| 0.1179 | 214.9682 | 405000 | 0.1793 |
|
210 |
+
| 0.1212 | 216.2951 | 407500 | 0.1790 |
|
211 |
+
| 0.12 | 217.6221 | 410000 | 0.1791 |
|
212 |
+
| 0.1204 | 218.9490 | 412500 | 0.1788 |
|
213 |
+
| 0.1202 | 220.2760 | 415000 | 0.1786 |
|
214 |
+
| 0.1224 | 221.6030 | 417500 | 0.1794 |
|
215 |
+
| 0.1175 | 222.9299 | 420000 | 0.1785 |
|
216 |
+
| 0.1188 | 224.2569 | 422500 | 0.1783 |
|
217 |
+
| 0.118 | 225.5839 | 425000 | 0.1789 |
|
218 |
+
| 0.1197 | 226.9108 | 427500 | 0.1789 |
|
219 |
+
| 0.1181 | 228.2378 | 430000 | 0.1786 |
|
220 |
+
| 0.1195 | 229.5648 | 432500 | 0.1792 |
|
221 |
+
| 0.1206 | 230.8917 | 435000 | 0.1790 |
|
222 |
+
| 0.1174 | 232.2187 | 437500 | 0.1793 |
|
223 |
+
| 0.1189 | 233.5456 | 440000 | 0.1787 |
|
224 |
+
| 0.1183 | 234.8726 | 442500 | 0.1787 |
|
225 |
+
| 0.1193 | 236.1996 | 445000 | 0.1790 |
|
226 |
+
| 0.1171 | 237.5265 | 447500 | 0.1788 |
|
227 |
+
| 0.1179 | 238.8535 | 450000 | 0.1789 |
|
228 |
+
| 0.1202 | 240.1805 | 452500 | 0.1789 |
|
229 |
+
| 0.1206 | 241.5074 | 455000 | 0.1786 |
|
230 |
+
| 0.1183 | 242.8344 | 457500 | 0.1789 |
|
231 |
+
| 0.1183 | 244.1614 | 460000 | 0.1790 |
|
232 |
+
| 0.1181 | 245.4883 | 462500 | 0.1791 |
|
233 |
+
| 0.1205 | 246.8153 | 465000 | 0.1790 |
|
234 |
+
| 0.1208 | 248.1423 | 467500 | 0.1791 |
|
235 |
+
| 0.1175 | 249.4692 | 470000 | 0.1791 |
|
236 |
+
|
237 |
+
|
238 |
+
### Framework versions
|
239 |
+
|
240 |
+
- Transformers 4.41.2
|
241 |
+
- Pytorch 2.3.1+cu121
|
242 |
+
- Datasets 2.19.1
|
243 |
+
- Tokenizers 0.19.1
|
added_tokens.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
config.json
ADDED
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "/home/bly/GitHub/results/t5-small-p-l-akk-en-20240727-131059",
|
3 |
+
"architectures": [
|
4 |
+
"T5ForConditionalGeneration"
|
5 |
+
],
|
6 |
+
"classifier_dropout": 0.0,
|
7 |
+
"d_ff": 2048,
|
8 |
+
"d_kv": 64,
|
9 |
+
"d_model": 512,
|
10 |
+
"decoder_start_token_id": 0,
|
11 |
+
"dense_act_fn": "relu",
|
12 |
+
"dropout_rate": 0.1,
|
13 |
+
"eos_token_id": 1,
|
14 |
+
"feed_forward_proj": "relu",
|
15 |
+
"initializer_factor": 1.0,
|
16 |
+
"is_encoder_decoder": true,
|
17 |
+
"is_gated_act": false,
|
18 |
+
"layer_norm_epsilon": 1e-06,
|
19 |
+
"model_type": "t5",
|
20 |
+
"n_positions": 512,
|
21 |
+
"num_decoder_layers": 6,
|
22 |
+
"num_heads": 8,
|
23 |
+
"num_layers": 6,
|
24 |
+
"output_past": true,
|
25 |
+
"pad_token_id": 0,
|
26 |
+
"relative_attention_max_distance": 128,
|
27 |
+
"relative_attention_num_buckets": 32,
|
28 |
+
"task_specific_params": {
|
29 |
+
"summarization": {
|
30 |
+
"early_stopping": true,
|
31 |
+
"length_penalty": 2.0,
|
32 |
+
"max_length": 200,
|
33 |
+
"min_length": 30,
|
34 |
+
"no_repeat_ngram_size": 3,
|
35 |
+
"num_beams": 4,
|
36 |
+
"prefix": "summarize: "
|
37 |
+
},
|
38 |
+
"translation_en_to_de": {
|
39 |
+
"early_stopping": true,
|
40 |
+
"max_length": 300,
|
41 |
+
"num_beams": 4,
|
42 |
+
"prefix": "translate English to German: "
|
43 |
+
},
|
44 |
+
"translation_en_to_fr": {
|
45 |
+
"early_stopping": true,
|
46 |
+
"max_length": 300,
|
47 |
+
"num_beams": 4,
|
48 |
+
"prefix": "translate English to French: "
|
49 |
+
},
|
50 |
+
"translation_en_to_ro": {
|
51 |
+
"early_stopping": true,
|
52 |
+
"max_length": 300,
|
53 |
+
"num_beams": 4,
|
54 |
+
"prefix": "translate English to Romanian: "
|
55 |
+
}
|
56 |
+
},
|
57 |
+
"torch_dtype": "float32",
|
58 |
+
"transformers_version": "4.41.2",
|
59 |
+
"use_cache": true,
|
60 |
+
"vocab_size": 48650
|
61 |
+
}
|
generation_config.json
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_from_model_config": true,
|
3 |
+
"decoder_start_token_id": 0,
|
4 |
+
"eos_token_id": 1,
|
5 |
+
"pad_token_id": 0,
|
6 |
+
"transformers_version": "4.41.2"
|
7 |
+
}
|
model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ee8399d9642b252c29de32aecb14924bc2f78afc278d044311707a3c83a33391
|
3 |
+
size 275878952
|
special_tokens_map.json
ADDED
@@ -0,0 +1,125 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"additional_special_tokens": [
|
3 |
+
"<extra_id_0>",
|
4 |
+
"<extra_id_1>",
|
5 |
+
"<extra_id_2>",
|
6 |
+
"<extra_id_3>",
|
7 |
+
"<extra_id_4>",
|
8 |
+
"<extra_id_5>",
|
9 |
+
"<extra_id_6>",
|
10 |
+
"<extra_id_7>",
|
11 |
+
"<extra_id_8>",
|
12 |
+
"<extra_id_9>",
|
13 |
+
"<extra_id_10>",
|
14 |
+
"<extra_id_11>",
|
15 |
+
"<extra_id_12>",
|
16 |
+
"<extra_id_13>",
|
17 |
+
"<extra_id_14>",
|
18 |
+
"<extra_id_15>",
|
19 |
+
"<extra_id_16>",
|
20 |
+
"<extra_id_17>",
|
21 |
+
"<extra_id_18>",
|
22 |
+
"<extra_id_19>",
|
23 |
+
"<extra_id_20>",
|
24 |
+
"<extra_id_21>",
|
25 |
+
"<extra_id_22>",
|
26 |
+
"<extra_id_23>",
|
27 |
+
"<extra_id_24>",
|
28 |
+
"<extra_id_25>",
|
29 |
+
"<extra_id_26>",
|
30 |
+
"<extra_id_27>",
|
31 |
+
"<extra_id_28>",
|
32 |
+
"<extra_id_29>",
|
33 |
+
"<extra_id_30>",
|
34 |
+
"<extra_id_31>",
|
35 |
+
"<extra_id_32>",
|
36 |
+
"<extra_id_33>",
|
37 |
+
"<extra_id_34>",
|
38 |
+
"<extra_id_35>",
|
39 |
+
"<extra_id_36>",
|
40 |
+
"<extra_id_37>",
|
41 |
+
"<extra_id_38>",
|
42 |
+
"<extra_id_39>",
|
43 |
+
"<extra_id_40>",
|
44 |
+
"<extra_id_41>",
|
45 |
+
"<extra_id_42>",
|
46 |
+
"<extra_id_43>",
|
47 |
+
"<extra_id_44>",
|
48 |
+
"<extra_id_45>",
|
49 |
+
"<extra_id_46>",
|
50 |
+
"<extra_id_47>",
|
51 |
+
"<extra_id_48>",
|
52 |
+
"<extra_id_49>",
|
53 |
+
"<extra_id_50>",
|
54 |
+
"<extra_id_51>",
|
55 |
+
"<extra_id_52>",
|
56 |
+
"<extra_id_53>",
|
57 |
+
"<extra_id_54>",
|
58 |
+
"<extra_id_55>",
|
59 |
+
"<extra_id_56>",
|
60 |
+
"<extra_id_57>",
|
61 |
+
"<extra_id_58>",
|
62 |
+
"<extra_id_59>",
|
63 |
+
"<extra_id_60>",
|
64 |
+
"<extra_id_61>",
|
65 |
+
"<extra_id_62>",
|
66 |
+
"<extra_id_63>",
|
67 |
+
"<extra_id_64>",
|
68 |
+
"<extra_id_65>",
|
69 |
+
"<extra_id_66>",
|
70 |
+
"<extra_id_67>",
|
71 |
+
"<extra_id_68>",
|
72 |
+
"<extra_id_69>",
|
73 |
+
"<extra_id_70>",
|
74 |
+
"<extra_id_71>",
|
75 |
+
"<extra_id_72>",
|
76 |
+
"<extra_id_73>",
|
77 |
+
"<extra_id_74>",
|
78 |
+
"<extra_id_75>",
|
79 |
+
"<extra_id_76>",
|
80 |
+
"<extra_id_77>",
|
81 |
+
"<extra_id_78>",
|
82 |
+
"<extra_id_79>",
|
83 |
+
"<extra_id_80>",
|
84 |
+
"<extra_id_81>",
|
85 |
+
"<extra_id_82>",
|
86 |
+
"<extra_id_83>",
|
87 |
+
"<extra_id_84>",
|
88 |
+
"<extra_id_85>",
|
89 |
+
"<extra_id_86>",
|
90 |
+
"<extra_id_87>",
|
91 |
+
"<extra_id_88>",
|
92 |
+
"<extra_id_89>",
|
93 |
+
"<extra_id_90>",
|
94 |
+
"<extra_id_91>",
|
95 |
+
"<extra_id_92>",
|
96 |
+
"<extra_id_93>",
|
97 |
+
"<extra_id_94>",
|
98 |
+
"<extra_id_95>",
|
99 |
+
"<extra_id_96>",
|
100 |
+
"<extra_id_97>",
|
101 |
+
"<extra_id_98>",
|
102 |
+
"<extra_id_99>"
|
103 |
+
],
|
104 |
+
"eos_token": {
|
105 |
+
"content": "</s>",
|
106 |
+
"lstrip": false,
|
107 |
+
"normalized": false,
|
108 |
+
"rstrip": false,
|
109 |
+
"single_word": false
|
110 |
+
},
|
111 |
+
"pad_token": {
|
112 |
+
"content": "<pad>",
|
113 |
+
"lstrip": false,
|
114 |
+
"normalized": false,
|
115 |
+
"rstrip": false,
|
116 |
+
"single_word": false
|
117 |
+
},
|
118 |
+
"unk_token": {
|
119 |
+
"content": "<unk>",
|
120 |
+
"lstrip": false,
|
121 |
+
"normalized": false,
|
122 |
+
"rstrip": false,
|
123 |
+
"single_word": false
|
124 |
+
}
|
125 |
+
}
|
spiece.model
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d60acb128cf7b7f2536e8f38a5b18a05535c9e14c7a355904270e15b0945ea86
|
3 |
+
size 791656
|
tokenizer_config.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f6f518194cb659911176385628bc8859d22a65831d7135c80948acfef01c809e
|
3 |
+
size 5304
|