AIJUUD commited on
Commit
4e1c5c5
1 Parent(s): 16be8bd

Update tokenizer_config.json

Browse files
Files changed (1) hide show
  1. tokenizer_config.json +13 -1
tokenizer_config.json CHANGED
@@ -1744,7 +1744,19 @@
1744
  "<end_of_turn>"
1745
  ],
1746
  "bos_token": "<bos>",
1747
- "chat_template": "{% set system_message = 'Below is an instruction that describes a task. Write a response that appropriately completes the request.\n\n' %}{% if messages[0]['role'] == 'system' %}{% set system_message = messages[0]['content'] %}{% endif %}{% if system_message is defined %}{{ system_message }}{% endif %}{% for message in messages %}{% set content = message['content'] %}{% if message['role'] == 'user' %}{{ '### Instruction:\n' + content + '\n\n### Response:\n' }}{% elif message['role'] == 'assistant' %}{{ content + '<eos>' + '\n\n' }}{% endif %}{% endfor %}",
 
 
 
 
 
 
 
 
 
 
 
 
1748
  "clean_up_tokenization_spaces": false,
1749
  "eos_token": "<eos>",
1750
  "model_max_length": 1000000000000000019884624838656,
 
1744
  "<end_of_turn>"
1745
  ],
1746
  "bos_token": "<bos>",
1747
+ "chat_template": "{{ bos_token }}{% if messages[0]['role'] == 'system' %}{{ raise_exception('System role not supported') }}{% endif %}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if (message['role'] == 'assistant') %}{% set role = 'model' %}{% else %}{% set role = message['role'] %}{% endif %}{{ '<start_of_turn>' + role + '\n' + message['content'] | trim + '<end_of_turn>\n' }}{% endfor %}{% if add_generation_prompt %}{{'<start_of_turn>model\n'}}{% endif %}",
1748
+ "clean_up_tokenization_spaces": false,
1749
+ "eos_token": "<eos>",
1750
+ "model_max_length": 1000000000000000019884624838656,
1751
+ "pad_token": "<pad>",
1752
+ "sp_model_kwargs": {},
1753
+ "spaces_between_special_tokens": false,
1754
+ "tokenizer_class": "GemmaTokenizer",
1755
+ "unk_token": "<unk>",
1756
+ "use_default_system_prompt": false
1757
+ }
1758
+
1759
+
1760
  "clean_up_tokenization_spaces": false,
1761
  "eos_token": "<eos>",
1762
  "model_max_length": 1000000000000000019884624838656,