Klevin Abhaykoul commited on
Commit
8a393a9
1 Parent(s): b2c4e07

Added chat template (#1)

Browse files

- Added chat template (76059107d1e86fd8bd99fbd450bede7d14e07b73)


Co-authored-by: Abhay Koul <[email protected]>

Files changed (2) hide show
  1. special_tokens_map.json +2 -21
  2. tokenizer_config.json +3 -4
special_tokens_map.json CHANGED
@@ -1,23 +1,4 @@
1
  {
2
- "bos_token": {
3
- "content": "<|begin_of_text|>",
4
- "lstrip": false,
5
- "normalized": false,
6
- "rstrip": false,
7
- "single_word": false
8
- },
9
- "eos_token": {
10
- "content": "<|end_of_text|>",
11
- "lstrip": false,
12
- "normalized": false,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "pad_token": {
17
- "content": "<|reserved_special_token_250|>",
18
- "lstrip": false,
19
- "normalized": false,
20
- "rstrip": false,
21
- "single_word": false
22
- }
23
  }
 
1
  {
2
+ "bos_token": "<|begin_of_text|>",
3
+ "eos_token": "<|end_of_text|>"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4
  }
tokenizer_config.json CHANGED
@@ -2050,14 +2050,13 @@
2050
  }
2051
  },
2052
  "bos_token": "<|begin_of_text|>",
 
2053
  "clean_up_tokenization_spaces": true,
2054
- "eos_token": "<|end_of_text|>",
2055
  "model_input_names": [
2056
  "input_ids",
2057
  "attention_mask"
2058
  ],
2059
- "model_max_length": 8192,
2060
- "pad_token": "<|reserved_special_token_250|>",
2061
- "padding_side": "left",
2062
  "tokenizer_class": "PreTrainedTokenizerFast"
2063
  }
 
2050
  }
2051
  },
2052
  "bos_token": "<|begin_of_text|>",
2053
+ "chat_template": "{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' %}{% if loop.index0 == 0 %}{% set content = bos_token + content %}{% endif %}{{ content }}{% endfor %}{% if add_generation_prompt %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}{% endif %}",
2054
  "clean_up_tokenization_spaces": true,
2055
+ "eos_token": "<|eot_id|>",
2056
  "model_input_names": [
2057
  "input_ids",
2058
  "attention_mask"
2059
  ],
2060
+ "model_max_length": 1000000000000000019884624838656,
 
 
2061
  "tokenizer_class": "PreTrainedTokenizerFast"
2062
  }