Upload tokenizer
Browse files- tokenizer.json +26 -1
- vocab.json +1 -1
tokenizer.json
CHANGED
@@ -105,7 +105,32 @@
|
|
105 |
"UNK": 0,
|
106 |
"PAD": 1,
|
107 |
"WORD_BOUNDARY": 2,
|
108 |
-
"UTT_BOUNDARY": 3
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
109 |
},
|
110 |
"unk_token": "UNK"
|
111 |
}
|
|
|
105 |
"UNK": 0,
|
106 |
"PAD": 1,
|
107 |
"WORD_BOUNDARY": 2,
|
108 |
+
"UTT_BOUNDARY": 3,
|
109 |
+
"k": 4,
|
110 |
+
"s": 5,
|
111 |
+
"o": 6,
|
112 |
+
"b": 7,
|
113 |
+
"a": 8,
|
114 |
+
"h": 9,
|
115 |
+
"n": 10,
|
116 |
+
"t̠ʃ": 11,
|
117 |
+
"i": 12,
|
118 |
+
"j": 13,
|
119 |
+
"d": 14,
|
120 |
+
"e": 15,
|
121 |
+
"ʃ": 16,
|
122 |
+
"u": 17,
|
123 |
+
"ɡ": 18,
|
124 |
+
"r": 19,
|
125 |
+
"f": 20,
|
126 |
+
"t": 21,
|
127 |
+
"m": 22,
|
128 |
+
"d̠ʒ": 23,
|
129 |
+
"l": 24,
|
130 |
+
"q": 25,
|
131 |
+
"v": 26,
|
132 |
+
"z": 27,
|
133 |
+
"p": 28
|
134 |
},
|
135 |
"unk_token": "UNK"
|
136 |
}
|
vocab.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"UNK":0,"PAD":1,"WORD_BOUNDARY":2,"UTT_BOUNDARY":3}
|
|
|
1 |
+
{"UNK":0,"PAD":1,"WORD_BOUNDARY":2,"UTT_BOUNDARY":3,"k":4,"s":5,"o":6,"b":7,"a":8,"h":9,"n":10,"t̠ʃ":11,"i":12,"j":13,"d":14,"e":15,"ʃ":16,"u":17,"ɡ":18,"r":19,"f":20,"t":21,"m":22,"d̠ʒ":23,"l":24,"q":25,"v":26,"z":27,"p":28}
|