qubvel-hf HF staff commited on
Commit
635bb35
1 Parent(s): 862d646

🚨🚨🚨 Update config parameters, require to use latest transformers code (#11)

Browse files

- 🚨🚨🚨 Update config parameters, require to use latest transformers code (9ff65fc0248f48b2b6bb23582110a4517eb77df4)

Files changed (1) hide show
  1. config.json +1 -6
config.json CHANGED
@@ -8,7 +8,6 @@
8
  "_name_or_path": "",
9
  "add_cross_attention": false,
10
  "architectures": null,
11
- "attention_bias": false,
12
  "bad_words_ids": null,
13
  "begin_suppress_tokens": null,
14
  "bos_token_id": 128000,
@@ -39,7 +38,7 @@
39
  "finetuning_task": null,
40
  "forced_bos_token_id": null,
41
  "forced_eos_token_id": null,
42
- "hidden_activation": "silu",
43
  "hidden_size": 4096,
44
  "id2label": {
45
  "0": "LABEL_0",
@@ -100,7 +99,6 @@
100
  "typical_p": 1.0,
101
  "use_bfloat16": false,
102
  "use_cache": true,
103
- "use_scaled_rope": true,
104
  "vocab_size": 128256
105
  },
106
  "torch_dtype": "bfloat16",
@@ -132,7 +130,6 @@
132
  "1": "LABEL_1"
133
  },
134
  "image_size": 560,
135
- "in_channels": 3,
136
  "intermediate_layers_indices": [
137
  3,
138
  7,
@@ -147,7 +144,6 @@
147
  "LABEL_0": 0,
148
  "LABEL_1": 1
149
  },
150
- "layer_norm_eps": 1e-06,
151
  "length_penalty": 1.0,
152
  "max_length": 20,
153
  "max_num_tiles": 4,
@@ -168,7 +164,6 @@
168
  "patch_size": 14,
169
  "prefix": null,
170
  "problem_type": null,
171
- "projection_dim": 4096,
172
  "pruned_heads": {},
173
  "remove_invalid_values": false,
174
  "repetition_penalty": 1.0,
 
8
  "_name_or_path": "",
9
  "add_cross_attention": false,
10
  "architectures": null,
 
11
  "bad_words_ids": null,
12
  "begin_suppress_tokens": null,
13
  "bos_token_id": 128000,
 
38
  "finetuning_task": null,
39
  "forced_bos_token_id": null,
40
  "forced_eos_token_id": null,
41
+ "hidden_act": "silu",
42
  "hidden_size": 4096,
43
  "id2label": {
44
  "0": "LABEL_0",
 
99
  "typical_p": 1.0,
100
  "use_bfloat16": false,
101
  "use_cache": true,
 
102
  "vocab_size": 128256
103
  },
104
  "torch_dtype": "bfloat16",
 
130
  "1": "LABEL_1"
131
  },
132
  "image_size": 560,
 
133
  "intermediate_layers_indices": [
134
  3,
135
  7,
 
144
  "LABEL_0": 0,
145
  "LABEL_1": 1
146
  },
 
147
  "length_penalty": 1.0,
148
  "max_length": 20,
149
  "max_num_tiles": 4,
 
164
  "patch_size": 14,
165
  "prefix": null,
166
  "problem_type": null,
 
167
  "pruned_heads": {},
168
  "remove_invalid_values": false,
169
  "repetition_penalty": 1.0,