bertweetbr / tokenizer_config.json
Fernando Carneiro
Fix model from pretraining
67110d0
raw
history blame contribute delete
506 Bytes
{"normalization": false, "bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": "<mask>", "max_len": 128, "special_tokens_map_file": "C:\\Users\\y435\\.cache\\huggingface\\transformers\\d3ac3efbfcf6ec1d20748abc90e96ca8452f1fc27cdbf11c887a5bbf50c26cd6.0dc5b1041f62041ebbd23b1297f2f573769d5c97d8b7c28180ec86b8f6185aa8", "tokenizer_file": null, "name_or_path": "melll-uff/bertweetbr", "tokenizer_class": "BertweetTokenizer"}