Shaleen123 commited on
Commit
087a050
·
verified ·
1 Parent(s): 6a29af4

Upload tokenizer

Browse files
Files changed (3) hide show
  1. special_tokens_map.json +3 -3
  2. tokenizer.json +2 -2
  3. tokenizer_config.json +25 -21
special_tokens_map.json CHANGED
@@ -1,17 +1,17 @@
1
  {
2
  "bos_token": {
3
- "content": "<|begin_of_text|>",
4
  "lstrip": false,
5
  "normalized": false,
6
  "rstrip": false,
7
  "single_word": false
8
  },
9
  "eos_token": {
10
- "content": "<|end_of_text|>",
11
  "lstrip": false,
12
  "normalized": false,
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
- "pad_token": "<|end_of_text|>"
17
  }
 
1
  {
2
  "bos_token": {
3
+ "content": "<|begin▁of▁sentence|>",
4
  "lstrip": false,
5
  "normalized": false,
6
  "rstrip": false,
7
  "single_word": false
8
  },
9
  "eos_token": {
10
+ "content": "<|end▁of▁sentence|>",
11
  "lstrip": false,
12
  "normalized": false,
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
+ "pad_token": "<|end▁of▁sentence|>"
17
  }
tokenizer.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b
3
- size 17209920
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d91915040cfac999d8c55f4b5bc6e67367c065e3a7a4e4b9438ce1f256addd86
3
+ size 17209530
tokenizer_config.json CHANGED
@@ -1,7 +1,10 @@
1
  {
 
 
 
2
  "added_tokens_decoder": {
3
  "128000": {
4
- "content": "<|begin_of_text|>",
5
  "lstrip": false,
6
  "normalized": false,
7
  "rstrip": false,
@@ -9,7 +12,7 @@
9
  "special": true
10
  },
11
  "128001": {
12
- "content": "<|end_of_text|>",
13
  "lstrip": false,
14
  "normalized": false,
15
  "rstrip": false,
@@ -89,39 +92,39 @@
89
  "special": true
90
  },
91
  "128011": {
92
- "content": "<|reserved_special_token_3|>",
93
  "lstrip": false,
94
  "normalized": false,
95
  "rstrip": false,
96
  "single_word": false,
97
- "special": true
98
  },
99
  "128012": {
100
- "content": "<|reserved_special_token_4|>",
101
  "lstrip": false,
102
  "normalized": false,
103
  "rstrip": false,
104
  "single_word": false,
105
- "special": true
106
  },
107
  "128013": {
108
- "content": "<|reserved_special_token_5|>",
109
  "lstrip": false,
110
  "normalized": false,
111
  "rstrip": false,
112
  "single_word": false,
113
- "special": true
114
  },
115
  "128014": {
116
- "content": "<|reserved_special_token_6|>",
117
  "lstrip": false,
118
  "normalized": false,
119
  "rstrip": false,
120
  "single_word": false,
121
- "special": true
122
  },
123
  "128015": {
124
- "content": "<|reserved_special_token_7|>",
125
  "lstrip": false,
126
  "normalized": false,
127
  "rstrip": false,
@@ -2049,15 +2052,16 @@
2049
  "special": true
2050
  }
2051
  },
2052
- "bos_token": "<|begin_of_text|>",
2053
- "clean_up_tokenization_spaces": true,
2054
- "eos_token": "<|end_of_text|>",
 
2055
  "extra_special_tokens": {},
2056
- "model_input_names": [
2057
- "input_ids",
2058
- "attention_mask"
2059
- ],
2060
- "model_max_length": 131072,
2061
- "pad_token": "<|end_of_text|>",
2062
- "tokenizer_class": "PreTrainedTokenizerFast"
2063
  }
 
1
  {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": null,
5
  "added_tokens_decoder": {
6
  "128000": {
7
+ "content": "<|begin▁of▁sentence|>",
8
  "lstrip": false,
9
  "normalized": false,
10
  "rstrip": false,
 
12
  "special": true
13
  },
14
  "128001": {
15
+ "content": "<|end▁of▁sentence|>",
16
  "lstrip": false,
17
  "normalized": false,
18
  "rstrip": false,
 
92
  "special": true
93
  },
94
  "128011": {
95
+ "content": "<|User|>",
96
  "lstrip": false,
97
  "normalized": false,
98
  "rstrip": false,
99
  "single_word": false,
100
+ "special": false
101
  },
102
  "128012": {
103
+ "content": "<|Assistant|>",
104
  "lstrip": false,
105
  "normalized": false,
106
  "rstrip": false,
107
  "single_word": false,
108
+ "special": false
109
  },
110
  "128013": {
111
+ "content": "<think>",
112
  "lstrip": false,
113
  "normalized": false,
114
  "rstrip": false,
115
  "single_word": false,
116
+ "special": false
117
  },
118
  "128014": {
119
+ "content": "</think>",
120
  "lstrip": false,
121
  "normalized": false,
122
  "rstrip": false,
123
  "single_word": false,
124
+ "special": false
125
  },
126
  "128015": {
127
+ "content": "<|▁pad▁|>",
128
  "lstrip": false,
129
  "normalized": false,
130
  "rstrip": false,
 
2052
  "special": true
2053
  }
2054
  },
2055
+ "bos_token": "<|begin▁of▁sentence|>",
2056
+ "chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% set ns = namespace(is_first=false, is_tool=false, is_output_first=true, system_prompt='') %}{%- for message in messages %}{%- if message['role'] == 'system' %}{% set ns.system_prompt = message['content'] %}{%- endif %}{%- endfor %}{{bos_token}}{{ns.system_prompt}}{%- for message in messages %}{%- if message['role'] == 'user' %}{%- set ns.is_tool = false -%}{{'<|User|>' + message['content']}}{%- endif %}{%- if message['role'] == 'assistant' and message['content'] is none %}{%- set ns.is_tool = false -%}{%- for tool in message['tool_calls']%}{%- if not ns.is_first %}{{'<|Assistant|><|tool▁calls▁begin|><|tool▁call▁begin|>' + tool['type'] + '<|tool▁sep|>' + tool['function']['name'] + '\\n' + '```json' + '\\n' + tool['function']['arguments'] + '\\n' + '```' + '<|tool▁call▁end|>'}}{%- set ns.is_first = true -%}{%- else %}{{'\\n' + '<|tool▁call▁begin|>' + tool['type'] + '<|tool▁sep|>' + tool['function']['name'] + '\\n' + '```json' + '\\n' + tool['function']['arguments'] + '\\n' + '```' + '<|tool▁call▁end|>'}}{{'<|tool▁calls▁end|><|end▁of▁sentence|>'}}{%- endif %}{%- endfor %}{%- endif %}{%- if message['role'] == 'assistant' and message['content'] is not none %}{%- if ns.is_tool %}{{'<|tool▁outputs▁end|>' + message['content'] + '<|end▁of▁sentence|>'}}{%- set ns.is_tool = false -%}{%- else %}{% set content = message['content'] %}{% if '</think>' in content %}{% set content = content.split('</think>')[-1] %}{% endif %}{{'<|Assistant|>' + content + '<|end▁of▁sentence|>'}}{%- endif %}{%- endif %}{%- if message['role'] == 'tool' %}{%- set ns.is_tool = true -%}{%- if ns.is_output_first %}{{'<|tool▁outputs▁begin|><|tool▁output▁begin|>' + message['content'] + '<|tool▁output▁end|>'}}{%- set ns.is_output_first = false %}{%- else %}{{'\\n<|tool▁output▁begin|>' + message['content'] + '<|tool▁output▁end|>'}}{%- endif %}{%- endif %}{%- endfor -%}{% if ns.is_tool %}{{'<|tool▁outputs▁end|>'}}{% endif %}{% if add_generation_prompt and not ns.is_tool %}{{'<|Assistant|>'}}{% endif %}",
2057
+ "clean_up_tokenization_spaces": false,
2058
+ "eos_token": "<|end▁of▁sentence|>",
2059
  "extra_special_tokens": {},
2060
+ "legacy": true,
2061
+ "model_max_length": 16384,
2062
+ "pad_token": "<|end▁of▁sentence|>",
2063
+ "sp_model_kwargs": {},
2064
+ "tokenizer_class": "LlamaTokenizer",
2065
+ "unk_token": null,
2066
+ "use_default_system_prompt": false
2067
  }