Upload tokenizer
Browse files- tokenizer.json +9 -2
- tokenizer_config.json +5 -0
tokenizer.json
CHANGED
@@ -2,11 +2,18 @@
|
|
2 |
"version": "1.0",
|
3 |
"truncation": {
|
4 |
"direction": "Right",
|
5 |
-
"max_length":
|
6 |
"strategy": "LongestFirst",
|
7 |
"stride": 0
|
8 |
},
|
9 |
-
"padding":
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
"added_tokens": [
|
11 |
{
|
12 |
"id": 0,
|
|
|
2 |
"version": "1.0",
|
3 |
"truncation": {
|
4 |
"direction": "Right",
|
5 |
+
"max_length": 512,
|
6 |
"strategy": "LongestFirst",
|
7 |
"stride": 0
|
8 |
},
|
9 |
+
"padding": {
|
10 |
+
"strategy": "BatchLongest",
|
11 |
+
"direction": "Right",
|
12 |
+
"pad_to_multiple_of": null,
|
13 |
+
"pad_id": 0,
|
14 |
+
"pad_type_id": 0,
|
15 |
+
"pad_token": "<pad>"
|
16 |
+
},
|
17 |
"added_tokens": [
|
18 |
{
|
19 |
"id": 0,
|
tokenizer_config.json
CHANGED
@@ -102,11 +102,16 @@
|
|
102 |
"<extra_id_99>"
|
103 |
],
|
104 |
"clean_up_tokenization_spaces": true,
|
|
|
105 |
"eos_token": "</s>",
|
106 |
"extra_ids": 100,
|
|
|
107 |
"model_max_length": 512,
|
108 |
"pad_token": "<pad>",
|
109 |
"sp_model_kwargs": {},
|
|
|
110 |
"tokenizer_class": "T5Tokenizer",
|
|
|
|
|
111 |
"unk_token": "<unk>"
|
112 |
}
|
|
|
102 |
"<extra_id_99>"
|
103 |
],
|
104 |
"clean_up_tokenization_spaces": true,
|
105 |
+
"device_map": "auto",
|
106 |
"eos_token": "</s>",
|
107 |
"extra_ids": 100,
|
108 |
+
"max_length": 200,
|
109 |
"model_max_length": 512,
|
110 |
"pad_token": "<pad>",
|
111 |
"sp_model_kwargs": {},
|
112 |
+
"stride": 0,
|
113 |
"tokenizer_class": "T5Tokenizer",
|
114 |
+
"truncation_side": "right",
|
115 |
+
"truncation_strategy": "longest_first",
|
116 |
"unk_token": "<unk>"
|
117 |
}
|