simonycl commited on
Commit
fc61ef5
1 Parent(s): c395b06

Upload tokenizer

Browse files
special_tokens_map.json CHANGED
@@ -1,15 +1,15 @@
1
  {
2
- "bos_token": "<s>",
3
- "cls_token": "<s>",
4
- "eos_token": "</s>",
5
  "mask_token": {
6
- "content": "<mask>",
7
  "lstrip": true,
8
  "normalized": false,
9
  "rstrip": false,
10
  "single_word": false
11
  },
12
  "pad_token": "<pad>",
13
- "sep_token": "</s>",
14
  "unk_token": "<unk>"
15
  }
 
1
  {
2
+ "bos_token": "[CLS]",
3
+ "cls_token": "[CLS]",
4
+ "eos_token": "[SEP]",
5
  "mask_token": {
6
+ "content": "[MASK]",
7
  "lstrip": true,
8
  "normalized": false,
9
  "rstrip": false,
10
  "single_word": false
11
  },
12
  "pad_token": "<pad>",
13
+ "sep_token": "[SEP]",
14
  "unk_token": "<unk>"
15
  }
spiece.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fefb02b667a6c5c2fe27602d28e5fb3428f66ab89c7d6f388e7c8d44a02d0336
3
+ size 760289
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1,15 +1,22 @@
1
  {
2
- "add_prefix_space": false,
3
- "bos_token": "<s>",
4
  "clean_up_tokenization_spaces": true,
5
- "cls_token": "<s>",
6
- "eos_token": "</s>",
7
- "errors": "replace",
8
- "mask_token": "<mask>",
 
 
 
 
 
 
 
 
9
  "model_max_length": 512,
10
  "pad_token": "<pad>",
11
- "sep_token": "</s>",
12
- "tokenizer_class": "RobertaTokenizer",
13
- "trim_offsets": true,
14
  "unk_token": "<unk>"
15
  }
 
1
  {
2
+ "bos_token": "[CLS]",
 
3
  "clean_up_tokenization_spaces": true,
4
+ "cls_token": "[CLS]",
5
+ "do_lower_case": true,
6
+ "eos_token": "[SEP]",
7
+ "keep_accents": false,
8
+ "mask_token": {
9
+ "__type": "AddedToken",
10
+ "content": "[MASK]",
11
+ "lstrip": true,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
  "model_max_length": 512,
17
  "pad_token": "<pad>",
18
+ "remove_space": true,
19
+ "sep_token": "[SEP]",
20
+ "tokenizer_class": "AlbertTokenizer",
21
  "unk_token": "<unk>"
22
  }