{ | |
"add_bos_token": false, | |
"add_prefix_space": false, | |
"additional_special_tokens": [ | |
"<|endoftext|>", | |
"<fim_prefix>", | |
"<fim_middle>", | |
"<fim_suffix>", | |
"<fim_pad>", | |
"<filename>", | |
"<gh_stars>", | |
"<issue_start>", | |
"<issue_comment>", | |
"<issue_closed>", | |
"<jupyter_start>", | |
"<jupyter_text>", | |
"<jupyter_code>", | |
"<jupyter_output>", | |
"<empty_output>", | |
"<commit_before>", | |
"<commit_msg>", | |
"<commit_after>", | |
"<reponame>" | |
], | |
"bos_token": { | |
"__type": "AddedToken", | |
"content": "<|endoftext|>", | |
"lstrip": false, | |
"normalized": true, | |
"rstrip": false, | |
"single_word": false | |
}, | |
"clean_up_tokenization_spaces": true, | |
"eos_token": { | |
"__type": "AddedToken", | |
"content": "<|endoftext|>", | |
"lstrip": false, | |
"normalized": true, | |
"rstrip": false, | |
"single_word": false | |
}, | |
"errors": "replace", | |
"model_max_length": 2048, | |
"pad_token": null, | |
"padding_side": "right", | |
"tokenizer_class": "GPT2Tokenizer", | |
"unk_token": { | |
"__type": "AddedToken", | |
"content": "<|endoftext|>", | |
"lstrip": false, | |
"normalized": true, | |
"rstrip": false, | |
"single_word": false | |
}, | |
"vocab_size": 49152 | |
} | |