{ | |
"added_tokens_decoder": {}, | |
"clean_up_tokenization_spaces": false, | |
"extra_special_tokens": {}, | |
"model_max_length": 1000000000000000019884624838656, | |
"tokenizer_class": "NomTokenizer", | |
"auto_map": { | |
"AutoTokenizer": [ | |
"tokenization_nombert.NomTokenizer", | |
null | |
] | |
}, | |
"unk_token": "<UNK>" | |
} | |