Upload tokenizer
Browse files- tokenizer.json +0 -0
- tokenizer_config.json +1 -0
- vocab.txt +0 -0
tokenizer.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer_config.json
CHANGED
@@ -2,6 +2,7 @@
|
|
2 |
"cls_token": "[CLS]",
|
3 |
"do_basic_tokenize": true,
|
4 |
"do_lower_case": true,
|
|
|
5 |
"mask_token": "[MASK]",
|
6 |
"model_max_length": 1000000000000000019884624838656,
|
7 |
"never_split": null,
|
|
|
2 |
"cls_token": "[CLS]",
|
3 |
"do_basic_tokenize": true,
|
4 |
"do_lower_case": true,
|
5 |
+
"full_tokenizer_file": null,
|
6 |
"mask_token": "[MASK]",
|
7 |
"model_max_length": 1000000000000000019884624838656,
|
8 |
"never_split": null,
|
vocab.txt
CHANGED
The diff for this file is too large to render.
See raw diff
|
|