enniA-0 commited on
Commit
cb64008
1 Parent(s): 880fd87

Upload tokenizer

Browse files
special_tokens_map.json CHANGED
@@ -13,7 +13,7 @@
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
- "pad_token": "</s>",
17
  "unk_token": {
18
  "content": "<unk>",
19
  "lstrip": false,
 
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
+ "pad_token": "<unk>",
17
  "unk_token": {
18
  "content": "<unk>",
19
  "lstrip": false,
tokenizer.json CHANGED
@@ -1,11 +1,6 @@
1
  {
2
  "version": "1.0",
3
- "truncation": {
4
- "direction": "Right",
5
- "max_length": 1024,
6
- "strategy": "LongestFirst",
7
- "stride": 0
8
- },
9
  "padding": null,
10
  "added_tokens": [
11
  {
@@ -76,12 +71,6 @@
76
  "id": "A",
77
  "type_id": 0
78
  }
79
- },
80
- {
81
- "SpecialToken": {
82
- "id": "</s>",
83
- "type_id": 0
84
- }
85
  }
86
  ],
87
  "pair": [
@@ -97,12 +86,6 @@
97
  "type_id": 0
98
  }
99
  },
100
- {
101
- "SpecialToken": {
102
- "id": "</s>",
103
- "type_id": 0
104
- }
105
- },
106
  {
107
  "SpecialToken": {
108
  "id": "<s>",
@@ -114,24 +97,9 @@
114
  "id": "B",
115
  "type_id": 1
116
  }
117
- },
118
- {
119
- "SpecialToken": {
120
- "id": "</s>",
121
- "type_id": 1
122
- }
123
  }
124
  ],
125
  "special_tokens": {
126
- "</s>": {
127
- "id": "</s>",
128
- "ids": [
129
- 2
130
- ],
131
- "tokens": [
132
- "</s>"
133
- ]
134
- },
135
  "<s>": {
136
  "id": "<s>",
137
  "ids": [
 
1
  {
2
  "version": "1.0",
3
+ "truncation": null,
 
 
 
 
 
4
  "padding": null,
5
  "added_tokens": [
6
  {
 
71
  "id": "A",
72
  "type_id": 0
73
  }
 
 
 
 
 
 
74
  }
75
  ],
76
  "pair": [
 
86
  "type_id": 0
87
  }
88
  },
 
 
 
 
 
 
89
  {
90
  "SpecialToken": {
91
  "id": "<s>",
 
97
  "id": "B",
98
  "type_id": 1
99
  }
 
 
 
 
 
 
100
  }
101
  ],
102
  "special_tokens": {
 
 
 
 
 
 
 
 
 
103
  "<s>": {
104
  "id": "<s>",
105
  "ids": [
tokenizer_config.json CHANGED
@@ -1,6 +1,4 @@
1
  {
2
- "add_bos_token": true,
3
- "add_eos_token": true,
4
  "added_tokens_decoder": {
5
  "0": {
6
  "content": "<unk>",
@@ -40,9 +38,9 @@
40
  "eos_token": "</s>",
41
  "legacy": false,
42
  "model_max_length": 1000000000000000019884624838656,
43
- "pad_token": "</s>",
44
  "sp_model_kwargs": {},
45
  "tokenizer_class": "LlamaTokenizer",
46
  "unk_token": "<unk>",
47
- "use_default_system_prompt": false
48
  }
 
1
  {
 
 
2
  "added_tokens_decoder": {
3
  "0": {
4
  "content": "<unk>",
 
38
  "eos_token": "</s>",
39
  "legacy": false,
40
  "model_max_length": 1000000000000000019884624838656,
41
+ "pad_token": "<unk>",
42
  "sp_model_kwargs": {},
43
  "tokenizer_class": "LlamaTokenizer",
44
  "unk_token": "<unk>",
45
+ "use_default_system_prompt": true
46
  }