trained_lukeB_model / special_tokens_map.json
hama3's picture
Upload tokenizer
a16db97
{
"additional_special_tokens": [
{
"content": "<ent>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false
},
{
"content": "<ent2>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false
},
{
"content": "<ent>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
{
"content": "<ent2>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
{
"content": "<ent>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false
},
{
"content": "<ent2>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false
}
],
"bos_token": {
"content": "<s>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"cls_token": {
"content": "<s>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"eos_token": {
"content": "</s>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"mask_token": {
"content": "<mask>",
"lstrip": true,
"normalized": true,
"rstrip": false,
"single_word": false
},
"pad_token": {
"content": "<pad>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"sep_token": {
"content": "</s>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"unk_token": {
"content": "<unk>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
}
}