ArtCad98 commited on
Commit
c02fb2e
1 Parent(s): 0baabd6

Upload 2 files

Browse files
Files changed (2) hide show
  1. tokenizer (2).json +0 -0
  2. tokenizer_config (1).json +110 -0
tokenizer (2).json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config (1).json ADDED
@@ -0,0 +1,110 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "0": {
5
+ "content": "[UNK]",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "1": {
13
+ "content": "[PAD]",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ },
20
+ "2": {
21
+ "content": "[CLS]",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false,
26
+ "special": true
27
+ },
28
+ "3": {
29
+ "content": "[SEP]",
30
+ "lstrip": false,
31
+ "normalized": false,
32
+ "rstrip": false,
33
+ "single_word": false,
34
+ "special": true
35
+ },
36
+ "4": {
37
+ "content": "[MASK]",
38
+ "lstrip": true,
39
+ "normalized": false,
40
+ "rstrip": false,
41
+ "single_word": false,
42
+ "special": true
43
+ },
44
+ "65536": {
45
+ "content": "<s>",
46
+ "lstrip": false,
47
+ "normalized": false,
48
+ "rstrip": false,
49
+ "single_word": false,
50
+ "special": true
51
+ },
52
+ "65537": {
53
+ "content": "</s>",
54
+ "lstrip": false,
55
+ "normalized": false,
56
+ "rstrip": false,
57
+ "single_word": false,
58
+ "special": true
59
+ },
60
+ "65538": {
61
+ "content": "<unk>",
62
+ "lstrip": false,
63
+ "normalized": false,
64
+ "rstrip": false,
65
+ "single_word": false,
66
+ "special": true
67
+ },
68
+ "65539": {
69
+ "content": "<pad>",
70
+ "lstrip": false,
71
+ "normalized": false,
72
+ "rstrip": false,
73
+ "single_word": false,
74
+ "special": true
75
+ },
76
+ "65540": {
77
+ "content": "<mask>",
78
+ "lstrip": true,
79
+ "normalized": false,
80
+ "rstrip": false,
81
+ "single_word": false,
82
+ "special": true
83
+ }
84
+ },
85
+ "additional_special_tokens": [
86
+ "[UNK]",
87
+ "[PAD]",
88
+ "[CLS]",
89
+ "[SEP]",
90
+ "[MASK]",
91
+ "<s>",
92
+ "</s>",
93
+ "<unk>",
94
+ "<pad>",
95
+ "<mask>"
96
+ ],
97
+ "bos_token": "[CLS]",
98
+ "clean_up_tokenization_spaces": true,
99
+ "cls_token": "[CLS]",
100
+ "eos_token": "[SEP]",
101
+ "errors": "replace",
102
+ "mask_token": "[MASK]",
103
+ "max_len": 512,
104
+ "model_max_length": 512,
105
+ "pad_token": "[PAD]",
106
+ "sep_token": "[SEP]",
107
+ "tokenizer_class": "RobertaTokenizer",
108
+ "trim_offsets": true,
109
+ "unk_token": "[UNK]"
110
+ }