colorlessideas commited on
Commit
d2e0f40
·
verified ·
1 Parent(s): 11a88fb

Upload tokenizer

Browse files
Files changed (3) hide show
  1. added_tokens.json +2 -2
  2. tokenizer_config.json +4 -5
  3. vocab.json +6 -7
added_tokens.json CHANGED
@@ -1,4 +1,4 @@
1
  {
2
- "</s>": 36,
3
- "<s>": 35
4
  }
 
1
  {
2
+ "</s>": 35,
3
+ "<s>": 34
4
  }
tokenizer_config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "added_tokens_decoder": {
3
- "33": {
4
  "content": "[UNK]",
5
  "lstrip": true,
6
  "normalized": false,
@@ -8,7 +8,7 @@
8
  "single_word": false,
9
  "special": false
10
  },
11
- "34": {
12
  "content": "[PAD]",
13
  "lstrip": true,
14
  "normalized": false,
@@ -16,7 +16,7 @@
16
  "single_word": false,
17
  "special": false
18
  },
19
- "35": {
20
  "content": "<s>",
21
  "lstrip": false,
22
  "normalized": false,
@@ -24,7 +24,7 @@
24
  "single_word": false,
25
  "special": true
26
  },
27
- "36": {
28
  "content": "</s>",
29
  "lstrip": false,
30
  "normalized": false,
@@ -40,7 +40,6 @@
40
  "extra_special_tokens": {},
41
  "model_max_length": 1000000000000000019884624838656,
42
  "pad_token": "[PAD]",
43
- "processor_class": "Wav2Vec2Processor",
44
  "replace_word_delimiter_char": " ",
45
  "target_lang": null,
46
  "tokenizer_class": "Wav2Vec2CTCTokenizer",
 
1
  {
2
  "added_tokens_decoder": {
3
+ "32": {
4
  "content": "[UNK]",
5
  "lstrip": true,
6
  "normalized": false,
 
8
  "single_word": false,
9
  "special": false
10
  },
11
+ "33": {
12
  "content": "[PAD]",
13
  "lstrip": true,
14
  "normalized": false,
 
16
  "single_word": false,
17
  "special": false
18
  },
19
+ "34": {
20
  "content": "<s>",
21
  "lstrip": false,
22
  "normalized": false,
 
24
  "single_word": false,
25
  "special": true
26
  },
27
+ "35": {
28
  "content": "</s>",
29
  "lstrip": false,
30
  "normalized": false,
 
40
  "extra_special_tokens": {},
41
  "model_max_length": 1000000000000000019884624838656,
42
  "pad_token": "[PAD]",
 
43
  "replace_word_delimiter_char": " ",
44
  "target_lang": null,
45
  "tokenizer_class": "Wav2Vec2CTCTokenizer",
vocab.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
- "[PAD]": 34,
3
- "[UNK]": 33,
4
  "a": 1,
5
  "b": 2,
6
  "c": 3,
@@ -24,14 +24,13 @@
24
  "u": 21,
25
  "v": 22,
26
  "w": 23,
27
- "y": 24,
28
- "z": 25,
 
29
  "|": 0,
30
- "¿": 26,
31
  "á": 27,
32
  "é": 28,
33
  "í": 29,
34
  "ñ": 30,
35
- "ó": 31,
36
- "ú": 32
37
  }
 
1
  {
2
+ "[PAD]": 33,
3
+ "[UNK]": 32,
4
  "a": 1,
5
  "b": 2,
6
  "c": 3,
 
24
  "u": 21,
25
  "v": 22,
26
  "w": 23,
27
+ "x": 24,
28
+ "y": 25,
29
+ "z": 26,
30
  "|": 0,
 
31
  "á": 27,
32
  "é": 28,
33
  "í": 29,
34
  "ñ": 30,
35
+ "ó": 31
 
36
  }