octo-net / special_tokens_map.json
alexchen4ai's picture
Upload tokenizer
be9aea4 verified
{
"additional_special_tokens": [
"<nexa_0>",
"<nexa_1>",
"<nexa_2>",
"<nexa_3>",
"<nexa_4>",
"<nexa_5>",
"<nexa_6>",
"<nexa_7>",
"<nexa_8>",
"<nexa_9>",
"<nexa_10>",
"<nexa_11>",
"<nexa_12>",
"<nexa_13>",
"<nexa_14>",
"<nexa_15>",
"<nexa_16>",
"<nexa_17>",
"<nexa_18>",
"<nexa_19>",
"<nexa_20>",
"<nexa_21>",
"<nexa_22>",
"<nexa_23>",
"<nexa_24>",
"<nexa_25>",
"<nexa_26>",
"<nexa_27>",
"<nexa_28>",
"<nexa_29>",
"<nexa_end>"
],
"bos_token": {
"content": "<s>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"eos_token": {
"content": "<|endoftext|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"pad_token": {
"content": "<|endoftext|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"unk_token": {
"content": "<unk>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
}
}