wejoncy commited on
Commit
5a32c69
·
1 Parent(s): 40de53f

update config

Browse files
Files changed (1) hide show
  1. config.json +15 -14
config.json CHANGED
@@ -16,7 +16,17 @@
16
  "num_attention_heads": 28,
17
  "num_hidden_layers": 28,
18
  "num_key_value_heads": 4,
19
- "quant_config": {
 
 
 
 
 
 
 
 
 
 
20
  "model.layers.0.mlp.down_proj": {
21
  "bias": null,
22
  "enable_norm": true,
@@ -4720,16 +4730,7 @@
4720
  -1,
4721
  16
4722
  ]
4723
- }
4724
- },
4725
- "rms_norm_eps": 1e-06,
4726
- "rope_scaling": null,
4727
- "rope_theta": 1000000.0,
4728
- "sliding_window": null,
4729
- "tie_word_embeddings": false,
4730
- "torch_dtype": "bfloat16",
4731
- "transformers_version": "4.45.1",
4732
- "use_cache": true,
4733
- "use_sliding_window": false,
4734
- "vocab_size": 152064
4735
- }
 
16
  "num_attention_heads": 28,
17
  "num_hidden_layers": 28,
18
  "num_key_value_heads": 4,
19
+ "rms_norm_eps": 1e-06,
20
+ "rope_scaling": null,
21
+ "rope_theta": 1000000.0,
22
+ "sliding_window": null,
23
+ "tie_word_embeddings": false,
24
+ "torch_dtype": "bfloat16",
25
+ "transformers_version": "4.45.1",
26
+ "use_cache": true,
27
+ "use_sliding_window": false,
28
+ "vocab_size": 152064,
29
+ "quantization_config": {
30
  "model.layers.0.mlp.down_proj": {
31
  "bias": null,
32
  "enable_norm": true,
 
4730
  -1,
4731
  16
4732
  ]
4733
+ },
4734
+ "quant_method": "vptq"
4735
+ }
4736
+ }