abhinavnmagic commited on
Commit
9b5ceb4
1 Parent(s): eb0aa59

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +206 -0
config.json ADDED
@@ -0,0 +1,206 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/root/.cache/huggingface/hub/models--microsoft--Phi-3-medium-128k-instruct/snapshots/cae1d42b5577398fd1be9f0746052562ae552886",
3
+ "architectures": [
4
+ "Phi3ForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "auto_map": {
9
+ "AutoConfig": "configuration_phi3.Phi3Config",
10
+ "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"
11
+ },
12
+ "bos_token_id": 1,
13
+ "compression_config": {
14
+ "config_groups": {
15
+ "group_0": {
16
+ "input_activations": null,
17
+ "output_activations": null,
18
+ "targets": [
19
+ "Linear"
20
+ ],
21
+ "weights": {
22
+ "block_structure": null,
23
+ "dynamic": false,
24
+ "group_size": 128,
25
+ "num_bits": 4,
26
+ "observer": "minmax",
27
+ "observer_kwargs": {},
28
+ "strategy": "group",
29
+ "symmetric": true,
30
+ "type": "int"
31
+ }
32
+ }
33
+ },
34
+ "format": "pack-quantized",
35
+ "global_compression_ratio": 1.4217800438908097,
36
+ "ignore": [
37
+ "lm_head"
38
+ ],
39
+ "kv_cache_scheme": null,
40
+ "quant_method": "compressed-tensors",
41
+ "quantization_status": "frozen",
42
+ "sparsity_config": {
43
+ "format": "dense",
44
+ "global_sparsity": 14.910563402082037,
45
+ "registry_requires_subclass": false,
46
+ "sparsity_structure": "unstructured"
47
+ }
48
+ },
49
+ "embd_pdrop": 0.0,
50
+ "eos_token_id": 32000,
51
+ "hidden_act": "silu",
52
+ "hidden_size": 5120,
53
+ "initializer_range": 0.02,
54
+ "intermediate_size": 17920,
55
+ "max_position_embeddings": 131072,
56
+ "model_type": "phi3",
57
+ "num_attention_heads": 40,
58
+ "num_hidden_layers": 40,
59
+ "num_key_value_heads": 10,
60
+ "original_max_position_embeddings": 4096,
61
+ "pad_token_id": null,
62
+ "resid_pdrop": 0.0,
63
+ "rms_norm_eps": 1e-05,
64
+ "rope_scaling": {
65
+ "long_factor": [
66
+ 1.0,
67
+ 1.0,
68
+ 1.0,
69
+ 1.0,
70
+ 1.0,
71
+ 1.0,
72
+ 1.0,
73
+ 1.0,
74
+ 1.0,
75
+ 1.0,
76
+ 1.0,
77
+ 1.0,
78
+ 1.0,
79
+ 1.25,
80
+ 1.25,
81
+ 1.5,
82
+ 2.0,
83
+ 2.75,
84
+ 5.75,
85
+ 5.75,
86
+ 6.5,
87
+ 9.25,
88
+ 11.0,
89
+ 13.25,
90
+ 19.25,
91
+ 19.75,
92
+ 19.75,
93
+ 21.25,
94
+ 21.5,
95
+ 26.5,
96
+ 30.0,
97
+ 33.75,
98
+ 35.25,
99
+ 38.5,
100
+ 42.0,
101
+ 42.25,
102
+ 46.0,
103
+ 47.0,
104
+ 50.0,
105
+ 50.5,
106
+ 51.0,
107
+ 52.0,
108
+ 52.75,
109
+ 53.75,
110
+ 54.75,
111
+ 57.0,
112
+ 57.25,
113
+ 58.5,
114
+ 59.25,
115
+ 59.5,
116
+ 62.0,
117
+ 62.5,
118
+ 62.75,
119
+ 63.25,
120
+ 63.25,
121
+ 63.25,
122
+ 63.75,
123
+ 64.0,
124
+ 64.0,
125
+ 64.25,
126
+ 64.5,
127
+ 64.5,
128
+ 65.0,
129
+ 65.0
130
+ ],
131
+ "short_factor": [
132
+ 1.0,
133
+ 1.0,
134
+ 1.0,
135
+ 1.0,
136
+ 1.0,
137
+ 1.0,
138
+ 1.01,
139
+ 1.02,
140
+ 1.02,
141
+ 1.04,
142
+ 1.04,
143
+ 1.07,
144
+ 1.07,
145
+ 1.1,
146
+ 1.3000000000000003,
147
+ 1.3000000000000003,
148
+ 1.5000000000000004,
149
+ 1.5700000000000005,
150
+ 1.9000000000000008,
151
+ 2.3100000000000014,
152
+ 2.759999999999992,
153
+ 3.3899999999999784,
154
+ 3.9399999999999666,
155
+ 4.009999999999965,
156
+ 4.289999999999959,
157
+ 4.349999999999958,
158
+ 5.349999999999937,
159
+ 6.659999999999909,
160
+ 7.029999999999901,
161
+ 7.51999999999989,
162
+ 8.00999999999988,
163
+ 8.249999999999876,
164
+ 8.279999999999875,
165
+ 9.629999999999846,
166
+ 9.89999999999984,
167
+ 10.589999999999826,
168
+ 11.049999999999816,
169
+ 11.7899999999998,
170
+ 12.189999999999792,
171
+ 12.889999999999777,
172
+ 13.129999999999772,
173
+ 13.16999999999977,
174
+ 13.20999999999977,
175
+ 13.479999999999764,
176
+ 13.539999999999763,
177
+ 13.779999999999758,
178
+ 13.929999999999755,
179
+ 14.429999999999744,
180
+ 14.759999999999737,
181
+ 15.149999999999729,
182
+ 15.419999999999723,
183
+ 15.53999999999972,
184
+ 15.659999999999718,
185
+ 15.749999999999716,
186
+ 15.759999999999716,
187
+ 15.799999999999715,
188
+ 16.05999999999971,
189
+ 16.079999999999714,
190
+ 16.11999999999972,
191
+ 16.11999999999972,
192
+ 16.18999999999973,
193
+ 16.31999999999975,
194
+ 16.539999999999786,
195
+ 16.799999999999827
196
+ ],
197
+ "type": "su"
198
+ },
199
+ "rope_theta": 10000.0,
200
+ "sliding_window": 131072,
201
+ "tie_word_embeddings": false,
202
+ "torch_dtype": "bfloat16",
203
+ "transformers_version": "4.42.4",
204
+ "use_cache": true,
205
+ "vocab_size": 32064
206
+ }