mattmajestic commited on
Commit
e508d80
1 Parent(s): d064d0d

Upload tokenizer

Browse files
special_tokens_map.json CHANGED
@@ -13,7 +13,13 @@
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
- "pad_token": "<|endoftext|>",
 
 
 
 
 
 
17
  "unk_token": {
18
  "content": "<|endoftext|>",
19
  "lstrip": false,
 
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
+ "pad_token": {
17
+ "content": "<|endoftext|>",
18
+ "lstrip": false,
19
+ "normalized": true,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
  "unk_token": {
24
  "content": "<|endoftext|>",
25
  "lstrip": false,
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1,5 +1,4 @@
1
  {
2
- "add_bos_token": false,
3
  "add_prefix_space": false,
4
  "added_tokens_decoder": {
5
  "50256": {
@@ -14,9 +13,15 @@
14
  "bos_token": "<|endoftext|>",
15
  "clean_up_tokenization_spaces": true,
16
  "eos_token": "<|endoftext|>",
17
- "errors": "replace",
18
  "model_max_length": 1024,
 
19
  "pad_token": "<|endoftext|>",
 
 
 
20
  "tokenizer_class": "GPT2Tokenizer",
 
 
21
  "unk_token": "<|endoftext|>"
22
  }
 
1
  {
 
2
  "add_prefix_space": false,
3
  "added_tokens_decoder": {
4
  "50256": {
 
13
  "bos_token": "<|endoftext|>",
14
  "clean_up_tokenization_spaces": true,
15
  "eos_token": "<|endoftext|>",
16
+ "max_length": 512,
17
  "model_max_length": 1024,
18
+ "pad_to_multiple_of": null,
19
  "pad_token": "<|endoftext|>",
20
+ "pad_token_type_id": 0,
21
+ "padding_side": "right",
22
+ "stride": 0,
23
  "tokenizer_class": "GPT2Tokenizer",
24
+ "truncation_side": "right",
25
+ "truncation_strategy": "longest_first",
26
  "unk_token": "<|endoftext|>"
27
  }
vocab.json CHANGED
The diff for this file is too large to render. See raw diff