File size: 1,239 Bytes
da382fb
c55c2ad
da382fb
 
c55c2ad
da382fb
c55c2ad
da382fb
 
 
 
 
c55c2ad
da382fb
c55c2ad
da382fb
 
 
 
 
c55c2ad
da382fb
c55c2ad
da382fb
 
 
 
c55c2ad
 
da382fb
c55c2ad
da382fb
 
 
 
c55c2ad
 
 
 
da382fb
 
 
 
 
c55c2ad
da382fb
c55c2ad
da382fb
c55c2ad
 
 
da382fb
c55c2ad
 
 
da382fb
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
{
  "add_prefix_space": false,
  "added_tokens_decoder": {
    "0": {
      "content": "<s>",
      "lstrip": false,
      "normalized": true,
      "rstrip": false,
      "single_word": false,
      "special": true
    },
    "1": {
      "content": "<pad>",
      "lstrip": false,
      "normalized": true,
      "rstrip": false,
      "single_word": false,
      "special": true
    },
    "2": {
      "content": "</s>",
      "lstrip": false,
      "normalized": true,
      "rstrip": false,
      "single_word": false,
      "special": true
    },
    "3": {
      "content": "<unk>",
      "lstrip": false,
      "normalized": true,
      "rstrip": false,
      "single_word": false,
      "special": true
    },
    "50264": {
      "content": "<mask>",
      "lstrip": true,
      "normalized": true,
      "rstrip": false,
      "single_word": false,
      "special": true
    }
  },
  "bos_token": "<s>",
  "clean_up_tokenization_spaces": true,
  "cls_token": "<s>",
  "eos_token": "</s>",
  "errors": "replace",
  "mask_token": "<mask>",
  "model_max_length": 1000000000000000019884624838656,
  "pad_token": "<pad>",
  "sep_token": "</s>",
  "tokenizer_class": "BartTokenizer",
  "trim_offsets": true,
  "unk_token": "<unk>"
}