Upload tokenizer
Browse files- added_tokens.json +1 -0
- special_tokens_map.json +1 -1
- tokenizer.json +2 -2
- tokenizer_config.json +10 -1
added_tokens.json
CHANGED
@@ -1,6 +1,7 @@
|
|
1 |
{
|
2 |
"</tool_call>": 151658,
|
3 |
"<tool_call>": 151657,
|
|
|
4 |
"<|box_end|>": 151649,
|
5 |
"<|box_start|>": 151648,
|
6 |
"<|endoftext|>": 151643,
|
|
|
1 |
{
|
2 |
"</tool_call>": 151658,
|
3 |
"<tool_call>": 151657,
|
4 |
+
"<|PAD_TOKEN|>": 151665,
|
5 |
"<|box_end|>": 151649,
|
6 |
"<|box_start|>": 151648,
|
7 |
"<|endoftext|>": 151643,
|
special_tokens_map.json
CHANGED
@@ -22,7 +22,7 @@
|
|
22 |
"single_word": false
|
23 |
},
|
24 |
"pad_token": {
|
25 |
-
"content": "<|
|
26 |
"lstrip": false,
|
27 |
"normalized": false,
|
28 |
"rstrip": false,
|
|
|
22 |
"single_word": false
|
23 |
},
|
24 |
"pad_token": {
|
25 |
+
"content": "<|PAD_TOKEN|>",
|
26 |
"lstrip": false,
|
27 |
"normalized": false,
|
28 |
"rstrip": false,
|
tokenizer.json
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fab42efe8d17406525a9154b728cf9e957629a8ed7ce997770efdd71128c6a1a
|
3 |
+
size 11422086
|
tokenizer_config.json
CHANGED
@@ -177,6 +177,14 @@
|
|
177 |
"rstrip": false,
|
178 |
"single_word": false,
|
179 |
"special": false
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
180 |
}
|
181 |
},
|
182 |
"additional_special_tokens": [
|
@@ -201,7 +209,8 @@
|
|
201 |
"errors": "replace",
|
202 |
"extra_special_tokens": {},
|
203 |
"model_max_length": 131072,
|
204 |
-
"pad_token": "<|
|
|
|
205 |
"split_special_tokens": false,
|
206 |
"tokenizer_class": "Qwen2Tokenizer",
|
207 |
"unk_token": null
|
|
|
177 |
"rstrip": false,
|
178 |
"single_word": false,
|
179 |
"special": false
|
180 |
+
},
|
181 |
+
"151665": {
|
182 |
+
"content": "<|PAD_TOKEN|>",
|
183 |
+
"lstrip": false,
|
184 |
+
"normalized": false,
|
185 |
+
"rstrip": false,
|
186 |
+
"single_word": false,
|
187 |
+
"special": true
|
188 |
}
|
189 |
},
|
190 |
"additional_special_tokens": [
|
|
|
209 |
"errors": "replace",
|
210 |
"extra_special_tokens": {},
|
211 |
"model_max_length": 131072,
|
212 |
+
"pad_token": "<|PAD_TOKEN|>",
|
213 |
+
"padding_side": "left",
|
214 |
"split_special_tokens": false,
|
215 |
"tokenizer_class": "Qwen2Tokenizer",
|
216 |
"unk_token": null
|