ZeroXClem djuna commited on
Commit
ee0e8a8
1 Parent(s): e7096af

Chat template and EOS (#1)

Browse files

- Chat template and EOS (714b74719647699a0c9a89d7ecb6ce4691bdc992)


Co-authored-by: Djuunaa <[email protected]>

Files changed (3) hide show
  1. config.json +8 -3
  2. special_tokens_map.json +1 -1
  3. tokenizer_config.json +4 -2
config.json CHANGED
@@ -1,12 +1,17 @@
1
  {
2
- "_name_or_path": "ZeroXClem/Llama3.1-Hermes3-SuperNova-8B-L3.1-Purosani-2-8B",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 128000,
9
- "eos_token_id": 128001,
 
 
 
 
 
10
  "hidden_act": "silu",
11
  "hidden_size": 4096,
12
  "initializer_range": 0.02,
@@ -30,7 +35,7 @@
30
  "rope_theta": 500000.0,
31
  "tie_word_embeddings": false,
32
  "torch_dtype": "bfloat16",
33
- "transformers_version": "4.44.2",
34
  "unsloth_version": "2024.9",
35
  "use_cache": true,
36
  "vocab_size": 128256
 
1
  {
2
+ "_name_or_path": "unsloth/Meta-Llama-3.1-8B",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 128000,
9
+ "eos_token_id": [
10
+ 128001,
11
+ 128008,
12
+ 128009
13
+ ],
14
+ "head_dim": 128,
15
  "hidden_act": "silu",
16
  "hidden_size": 4096,
17
  "initializer_range": 0.02,
 
35
  "rope_theta": 500000.0,
36
  "tie_word_embeddings": false,
37
  "torch_dtype": "bfloat16",
38
+ "transformers_version": "4.45.1",
39
  "unsloth_version": "2024.9",
40
  "use_cache": true,
41
  "vocab_size": 128256
special_tokens_map.json CHANGED
@@ -7,7 +7,7 @@
7
  "single_word": false
8
  },
9
  "eos_token": {
10
- "content": "<|end_of_text|>",
11
  "lstrip": false,
12
  "normalized": false,
13
  "rstrip": false,
 
7
  "single_word": false
8
  },
9
  "eos_token": {
10
+ "content": "<|eot_id|>",
11
  "lstrip": false,
12
  "normalized": false,
13
  "rstrip": false,
tokenizer_config.json CHANGED
@@ -2050,14 +2050,16 @@
2050
  }
2051
  },
2052
  "bos_token": "<|begin_of_text|>",
 
2053
  "clean_up_tokenization_spaces": true,
2054
- "eos_token": "<|end_of_text|>",
2055
  "model_input_names": [
2056
  "input_ids",
2057
  "attention_mask"
2058
  ],
2059
  "model_max_length": 131072,
2060
- "pad_token": "<|finetune_right_pad_id|>",
2061
  "padding_side": "left",
 
2062
  "tokenizer_class": "PreTrainedTokenizerFast"
2063
  }
 
2050
  }
2051
  },
2052
  "bos_token": "<|begin_of_text|>",
2053
+ "chat_template": "{{ '<|begin_of_text|>' }}{% if messages[0]['role'] == 'system' %}{% set system_message = messages[0]['content'] %}{% endif %}{% if system_message is defined %}{{ '<|start_header_id|>system<|end_header_id|>\n\n' + system_message + '<|eot_id|>' }}{% endif %}{% for message in messages %}{% set content = message['content'] %}{% if message['role'] == 'user' %}{{ '<|start_header_id|>user<|end_header_id|>\n\n' + content + '<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n' }}{% elif message['role'] == 'assistant' %}{{ content + '<|eot_id|>' }}{% endif %}{% endfor %}",
2054
  "clean_up_tokenization_spaces": true,
2055
+ "eos_token": "<|eot_id|>",
2056
  "model_input_names": [
2057
  "input_ids",
2058
  "attention_mask"
2059
  ],
2060
  "model_max_length": 131072,
2061
+ "pad_token": "<|eot_id|>",
2062
  "padding_side": "left",
2063
+ "split_special_tokens": false,
2064
  "tokenizer_class": "PreTrainedTokenizerFast"
2065
  }