sadkins65 commited on
Commit
904471a
1 Parent(s): b52b907

Upload folder using huggingface_hub

Browse files
Files changed (2) hide show
  1. config.json +2 -19
  2. model.safetensors +2 -2
config.json CHANGED
@@ -23,23 +23,6 @@
23
  "quantization_config": {
24
  "config_groups": {
25
  "group_0": {
26
- "input_activations": null,
27
- "output_activations": null,
28
- "targets": [
29
- "Embedding"
30
- ],
31
- "weights": {
32
- "block_structure": null,
33
- "group_size": null,
34
- "num_bits": 8,
35
- "observer": "minmax",
36
- "observer_kwargs": {},
37
- "strategy": "tensor",
38
- "symmetric": true,
39
- "type": "int"
40
- }
41
- },
42
- "group_1": {
43
  "input_activations": {
44
  "block_structure": null,
45
  "group_size": null,
@@ -47,7 +30,7 @@
47
  "observer": "minmax",
48
  "observer_kwargs": {},
49
  "strategy": "tensor",
50
- "symmetric": false,
51
  "type": "int"
52
  },
53
  "output_activations": null,
@@ -67,7 +50,7 @@
67
  }
68
  },
69
  "format": "compressed",
70
- "global_compression_ratio": 1.4416825559554713,
71
  "ignore": [
72
  "model.layers.0.mlp.down_proj",
73
  "lm_head"
 
23
  "quantization_config": {
24
  "config_groups": {
25
  "group_0": {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
26
  "input_activations": {
27
  "block_structure": null,
28
  "group_size": null,
 
30
  "observer": "minmax",
31
  "observer_kwargs": {},
32
  "strategy": "tensor",
33
+ "symmetric": true,
34
  "type": "int"
35
  },
36
  "output_activations": null,
 
50
  }
51
  },
52
  "format": "compressed",
53
+ "global_compression_ratio": 1.4375595368095078,
54
  "ignore": [
55
  "model.layers.0.mlp.down_proj",
56
  "lm_head"
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f19ac75bb08f803862d2bee8d4ac812061808f4a038daeb33bd0d459d2df40bd
3
- size 1331628655
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4c206095867377c60773853dbb4d3e6f7583c5abafa7f4f5a5553901e3748b2d
3
+ size 1528236626