novel-zh-en / tokenizer_config.json
penpen's picture
Upload with huggingface_hub
ddcbb4b
raw
history blame contribute delete
371 Bytes
{
"eos_token": "</s>",
"model_max_length": 512,
"name_or_path": "/content/drive/MyDrive/Models/TranslationTest_Pronoun2/checkpoint-150000",
"pad_token": "<pad>",
"separate_vocabs": false,
"source_lang": "zho",
"sp_model_kwargs": {},
"special_tokens_map_file": null,
"target_lang": "eng",
"tokenizer_class": "MarianTokenizer",
"unk_token": "<unk>"
}