Chang-1202's picture
Update tokenizer_config.json
b5532db
raw
history blame
448 Bytes
{
"tokenizer_class": "ElectraSudachipyTokenizer",
"do_lower_case": false,
"do_word_tokenize": true,
"do_subword_tokenize": true,
"word_tokenizer_type": "sudachipy",
"word_form_type": "dictionary_and_surface",
"subword_tokenizer_type": "wordpiece",
"model_max_length": 512,
"sudachipy_kwargs": {"split_mode":"A","dict_type":"core"},
"auto_map": {
"AutoTokenizer": [
"modeling.ElectraSudachipyTokenizer",
null
]
}
}