Upload tokenizer
Browse files- tokenizer.json +25 -23
- vocab.json +1 -1
tokenizer.json
CHANGED
@@ -106,31 +106,33 @@
|
|
106 |
"PAD": 1,
|
107 |
"WORD_BOUNDARY": 2,
|
108 |
"UTT_BOUNDARY": 3,
|
109 |
-
"
|
110 |
-
"
|
111 |
-
"
|
112 |
-
"
|
113 |
-
"
|
114 |
-
"
|
115 |
-
"
|
116 |
-
"
|
117 |
-
"
|
118 |
-
"
|
119 |
-
"d": 14,
|
120 |
-
"
|
121 |
-
"
|
122 |
-
"
|
123 |
"ɡ": 18,
|
124 |
-
"
|
125 |
-
"
|
126 |
-
"
|
127 |
-
"
|
128 |
-
"
|
129 |
-
"
|
130 |
-
"
|
131 |
"v": 26,
|
132 |
-
"
|
133 |
-
"
|
|
|
|
|
134 |
},
|
135 |
"unk_token": "UNK"
|
136 |
}
|
|
|
106 |
"PAD": 1,
|
107 |
"WORD_BOUNDARY": 2,
|
108 |
"UTT_BOUNDARY": 3,
|
109 |
+
"m": 4,
|
110 |
+
"a̟": 5,
|
111 |
+
"b": 6,
|
112 |
+
"s": 7,
|
113 |
+
"e": 8,
|
114 |
+
"r": 9,
|
115 |
+
"j": 10,
|
116 |
+
"h": 11,
|
117 |
+
"t̠ʃ": 12,
|
118 |
+
"kʰ": 13,
|
119 |
+
"d̪": 14,
|
120 |
+
"n̪": 15,
|
121 |
+
"z": 16,
|
122 |
+
"ʃ": 17,
|
123 |
"ɡ": 18,
|
124 |
+
"i": 19,
|
125 |
+
"u": 20,
|
126 |
+
"o": 21,
|
127 |
+
"f": 22,
|
128 |
+
"t̪ʰ": 23,
|
129 |
+
"ɑ": 24,
|
130 |
+
"d̠ʒ": 25,
|
131 |
"v": 26,
|
132 |
+
"pʰ": 27,
|
133 |
+
"l": 28,
|
134 |
+
"w": 29,
|
135 |
+
"ɢ": 30
|
136 |
},
|
137 |
"unk_token": "UNK"
|
138 |
}
|
vocab.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"UNK":0,"PAD":1,"WORD_BOUNDARY":2,"UTT_BOUNDARY":3,"
|
|
|
1 |
+
{"UNK":0,"PAD":1,"WORD_BOUNDARY":2,"UTT_BOUNDARY":3,"m":4,"a̟":5,"b":6,"s":7,"e":8,"r":9,"j":10,"h":11,"t̠ʃ":12,"kʰ":13,"d̪":14,"n̪":15,"z":16,"ʃ":17,"ɡ":18,"i":19,"u":20,"o":21,"f":22,"t̪ʰ":23,"ɑ":24,"d̠ʒ":25,"v":26,"pʰ":27,"l":28,"w":29,"ɢ":30}
|