sariola commited on
Commit
a5bbe3b
1 Parent(s): 68bb2c2

Upload folder using huggingface_hub

Browse files
config.json ADDED
@@ -0,0 +1,177 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/home/ks/.cache/huggingface/hub/models--flowaicom--Flow-Judge-v0.1/snapshots/b7a47acd7c86e981145168e4dea1bef7d84a0894",
3
+ "architectures": [
4
+ "Phi3ForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "auto_map": {
9
+ "AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config",
10
+ "AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM"
11
+ },
12
+ "bos_token_id": 1,
13
+ "embd_pdrop": 0.0,
14
+ "eos_token_id": 32000,
15
+ "hidden_act": "silu",
16
+ "hidden_size": 3072,
17
+ "initializer_range": 0.02,
18
+ "intermediate_size": 8192,
19
+ "max_position_embeddings": 131072,
20
+ "model_type": "phi3",
21
+ "num_attention_heads": 32,
22
+ "num_hidden_layers": 32,
23
+ "num_key_value_heads": 32,
24
+ "original_max_position_embeddings": 4096,
25
+ "pad_token_id": 32000,
26
+ "quantization_config": {
27
+ "config_groups": {
28
+ "group_0": {
29
+ "input_activations": null,
30
+ "output_activations": null,
31
+ "targets": [
32
+ "Linear"
33
+ ],
34
+ "weights": {
35
+ "actorder": null,
36
+ "block_structure": null,
37
+ "dynamic": false,
38
+ "group_size": 128,
39
+ "num_bits": 4,
40
+ "observer": "minmax",
41
+ "observer_kwargs": {},
42
+ "strategy": "group",
43
+ "symmetric": true,
44
+ "type": "int"
45
+ }
46
+ }
47
+ },
48
+ "format": "pack-quantized",
49
+ "global_compression_ratio": 1.4203222504941144,
50
+ "ignore": [
51
+ "lm_head"
52
+ ],
53
+ "kv_cache_scheme": null,
54
+ "quant_method": "compressed-tensors",
55
+ "quantization_status": "compressed",
56
+ "sparsity_config": {
57
+ "format": "dense",
58
+ "global_sparsity": 0.1500294975169958,
59
+ "ignore": [],
60
+ "registry_requires_subclass": false,
61
+ "sparsity_structure": "unstructured",
62
+ "targets": []
63
+ }
64
+ },
65
+ "resid_pdrop": 0.0,
66
+ "rms_norm_eps": 1e-05,
67
+ "rope_scaling": {
68
+ "long_factor": [
69
+ 1.0800000429153442,
70
+ 1.1100000143051147,
71
+ 1.1399999856948853,
72
+ 1.340000033378601,
73
+ 1.5899999141693115,
74
+ 1.600000023841858,
75
+ 1.6200000047683716,
76
+ 2.620000123977661,
77
+ 3.2300000190734863,
78
+ 3.2300000190734863,
79
+ 4.789999961853027,
80
+ 7.400000095367432,
81
+ 7.700000286102295,
82
+ 9.09000015258789,
83
+ 12.199999809265137,
84
+ 17.670000076293945,
85
+ 24.46000099182129,
86
+ 28.57000160217285,
87
+ 30.420001983642578,
88
+ 30.840002059936523,
89
+ 32.590003967285156,
90
+ 32.93000411987305,
91
+ 42.320003509521484,
92
+ 44.96000289916992,
93
+ 50.340003967285156,
94
+ 50.45000457763672,
95
+ 57.55000305175781,
96
+ 57.93000411987305,
97
+ 58.21000289916992,
98
+ 60.1400032043457,
99
+ 62.61000442504883,
100
+ 62.62000274658203,
101
+ 62.71000289916992,
102
+ 63.1400032043457,
103
+ 63.1400032043457,
104
+ 63.77000427246094,
105
+ 63.93000411987305,
106
+ 63.96000289916992,
107
+ 63.970001220703125,
108
+ 64.02999877929688,
109
+ 64.06999969482422,
110
+ 64.08000183105469,
111
+ 64.12000274658203,
112
+ 64.41000366210938,
113
+ 64.4800033569336,
114
+ 64.51000213623047,
115
+ 64.52999877929688,
116
+ 64.83999633789062
117
+ ],
118
+ "short_factor": [
119
+ 1.0,
120
+ 1.0199999809265137,
121
+ 1.0299999713897705,
122
+ 1.0299999713897705,
123
+ 1.0499999523162842,
124
+ 1.0499999523162842,
125
+ 1.0499999523162842,
126
+ 1.0499999523162842,
127
+ 1.0499999523162842,
128
+ 1.0699999332427979,
129
+ 1.0999999046325684,
130
+ 1.1099998950958252,
131
+ 1.1599998474121094,
132
+ 1.1599998474121094,
133
+ 1.1699998378753662,
134
+ 1.2899998426437378,
135
+ 1.339999794960022,
136
+ 1.679999828338623,
137
+ 1.7899998426437378,
138
+ 1.8199998140335083,
139
+ 1.8499997854232788,
140
+ 1.8799997568130493,
141
+ 1.9099997282028198,
142
+ 1.9399996995925903,
143
+ 1.9899996519088745,
144
+ 2.0199997425079346,
145
+ 2.0199997425079346,
146
+ 2.0199997425079346,
147
+ 2.0199997425079346,
148
+ 2.0199997425079346,
149
+ 2.0199997425079346,
150
+ 2.0299997329711914,
151
+ 2.0299997329711914,
152
+ 2.0299997329711914,
153
+ 2.0299997329711914,
154
+ 2.0299997329711914,
155
+ 2.0299997329711914,
156
+ 2.0299997329711914,
157
+ 2.0299997329711914,
158
+ 2.0299997329711914,
159
+ 2.0799996852874756,
160
+ 2.0899996757507324,
161
+ 2.189999580383301,
162
+ 2.2199995517730713,
163
+ 2.5899994373321533,
164
+ 2.729999542236328,
165
+ 2.749999523162842,
166
+ 2.8399994373321533
167
+ ],
168
+ "type": "longrope"
169
+ },
170
+ "rope_theta": 10000.0,
171
+ "sliding_window": 262144,
172
+ "tie_word_embeddings": false,
173
+ "torch_dtype": "bfloat16",
174
+ "transformers_version": "4.45.2",
175
+ "use_cache": true,
176
+ "vocab_size": 32064
177
+ }
generation_config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": [
5
+ 32007,
6
+ 32001,
7
+ 32000
8
+ ],
9
+ "pad_token_id": 32000,
10
+ "transformers_version": "4.45.2"
11
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:782719ca9477b02d4afc759690ef0b04ca4249be2e2644ae2afda1cc3a8d3eaf
3
+ size 2263018168
special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|endoftext|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<|endoftext|>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<unk>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,131 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": null,
5
+ "added_tokens_decoder": {
6
+ "0": {
7
+ "content": "<unk>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": true
13
+ },
14
+ "1": {
15
+ "content": "<s>",
16
+ "lstrip": false,
17
+ "normalized": false,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": true
21
+ },
22
+ "2": {
23
+ "content": "</s>",
24
+ "lstrip": false,
25
+ "normalized": false,
26
+ "rstrip": true,
27
+ "single_word": false,
28
+ "special": false
29
+ },
30
+ "32000": {
31
+ "content": "<|endoftext|>",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false,
36
+ "special": true
37
+ },
38
+ "32001": {
39
+ "content": "<|assistant|>",
40
+ "lstrip": false,
41
+ "normalized": false,
42
+ "rstrip": true,
43
+ "single_word": false,
44
+ "special": true
45
+ },
46
+ "32002": {
47
+ "content": "<|placeholder1|>",
48
+ "lstrip": false,
49
+ "normalized": false,
50
+ "rstrip": true,
51
+ "single_word": false,
52
+ "special": true
53
+ },
54
+ "32003": {
55
+ "content": "<|placeholder2|>",
56
+ "lstrip": false,
57
+ "normalized": false,
58
+ "rstrip": true,
59
+ "single_word": false,
60
+ "special": true
61
+ },
62
+ "32004": {
63
+ "content": "<|placeholder3|>",
64
+ "lstrip": false,
65
+ "normalized": false,
66
+ "rstrip": true,
67
+ "single_word": false,
68
+ "special": true
69
+ },
70
+ "32005": {
71
+ "content": "<|placeholder4|>",
72
+ "lstrip": false,
73
+ "normalized": false,
74
+ "rstrip": true,
75
+ "single_word": false,
76
+ "special": true
77
+ },
78
+ "32006": {
79
+ "content": "<|system|>",
80
+ "lstrip": false,
81
+ "normalized": false,
82
+ "rstrip": true,
83
+ "single_word": false,
84
+ "special": true
85
+ },
86
+ "32007": {
87
+ "content": "<|end|>",
88
+ "lstrip": false,
89
+ "normalized": false,
90
+ "rstrip": true,
91
+ "single_word": false,
92
+ "special": true
93
+ },
94
+ "32008": {
95
+ "content": "<|placeholder5|>",
96
+ "lstrip": false,
97
+ "normalized": false,
98
+ "rstrip": true,
99
+ "single_word": false,
100
+ "special": true
101
+ },
102
+ "32009": {
103
+ "content": "<|placeholder6|>",
104
+ "lstrip": false,
105
+ "normalized": false,
106
+ "rstrip": true,
107
+ "single_word": false,
108
+ "special": true
109
+ },
110
+ "32010": {
111
+ "content": "<|user|>",
112
+ "lstrip": false,
113
+ "normalized": false,
114
+ "rstrip": true,
115
+ "single_word": false,
116
+ "special": true
117
+ }
118
+ },
119
+ "bos_token": "<s>",
120
+ "chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'system') %}{{'<|system|>' + '\n' + message['content'] + '<|endoftext|>' + '\n'}}{% elif (message['role'] == 'user') %}{{'<|user|>' + '\n' + message['content'] + '<|endoftext|>' + '\n' + '<|assistant|>' + '\n'}}{% elif message['role'] == 'assistant' %}{{message['content'] + '<|endoftext|>' + '\n'}}{% endif %}{% endfor %}",
121
+ "clean_up_tokenization_spaces": false,
122
+ "eos_token": "<|endoftext|>",
123
+ "legacy": false,
124
+ "model_max_length": 131072,
125
+ "pad_token": "<|endoftext|>",
126
+ "padding_side": "left",
127
+ "sp_model_kwargs": {},
128
+ "tokenizer_class": "LlamaTokenizer",
129
+ "unk_token": "<unk>",
130
+ "use_default_system_prompt": false
131
+ }