Upload tokenizer
Browse files- tokenizer.json +1 -26
- vocab.json +1 -1
tokenizer.json
CHANGED
@@ -105,32 +105,7 @@
|
|
105 |
"UNK": 0,
|
106 |
"PAD": 1,
|
107 |
"WORD_BOUNDARY": 2,
|
108 |
-
"UTT_BOUNDARY": 3
|
109 |
-
"k": 4,
|
110 |
-
"s": 5,
|
111 |
-
"o": 6,
|
112 |
-
"b": 7,
|
113 |
-
"a": 8,
|
114 |
-
"h": 9,
|
115 |
-
"n": 10,
|
116 |
-
"t̠ʃ": 11,
|
117 |
-
"i": 12,
|
118 |
-
"j": 13,
|
119 |
-
"d": 14,
|
120 |
-
"e": 15,
|
121 |
-
"ʃ": 16,
|
122 |
-
"u": 17,
|
123 |
-
"ɡ": 18,
|
124 |
-
"r": 19,
|
125 |
-
"f": 20,
|
126 |
-
"t": 21,
|
127 |
-
"m": 22,
|
128 |
-
"d̠ʒ": 23,
|
129 |
-
"l": 24,
|
130 |
-
"q": 25,
|
131 |
-
"v": 26,
|
132 |
-
"z": 27,
|
133 |
-
"p": 28
|
134 |
},
|
135 |
"unk_token": "UNK"
|
136 |
}
|
|
|
105 |
"UNK": 0,
|
106 |
"PAD": 1,
|
107 |
"WORD_BOUNDARY": 2,
|
108 |
+
"UTT_BOUNDARY": 3
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
109 |
},
|
110 |
"unk_token": "UNK"
|
111 |
}
|
vocab.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"UNK":0,"PAD":1,"WORD_BOUNDARY":2,"UTT_BOUNDARY":3
|
|
|
1 |
+
{"UNK":0,"PAD":1,"WORD_BOUNDARY":2,"UTT_BOUNDARY":3}
|