nielsr HF staff commited on
Commit
c4a18ce
1 Parent(s): 501f38b

Upload InstructBlipForConditionalGeneration

Browse files
config.json CHANGED
@@ -16,7 +16,6 @@
16
  "begin_suppress_tokens": null,
17
  "bos_token_id": null,
18
  "chunk_size_feed_forward": 0,
19
- "classifier_dropout": null,
20
  "cross_attention_frequency": 2,
21
  "cross_attention_hidden_size": null,
22
  "decoder_start_token_id": null,
@@ -81,7 +80,7 @@
81
  "top_p": 1.0,
82
  "torch_dtype": null,
83
  "torchscript": false,
84
- "transformers_version": "4.30.0.dev0",
85
  "typical_p": 1.0,
86
  "use_bfloat16": false,
87
  "vocab_size": 30523
@@ -157,7 +156,7 @@
157
  "top_p": 1.0,
158
  "torch_dtype": "float16",
159
  "torchscript": false,
160
- "transformers_version": "4.30.0.dev0",
161
  "typical_p": 1.0,
162
  "use_bfloat16": false,
163
  "use_cache": true,
@@ -180,7 +179,6 @@
180
  "decoder_start_token_id": null,
181
  "diversity_penalty": 0.0,
182
  "do_sample": false,
183
- "dropout": 0.0,
184
  "early_stopping": false,
185
  "encoder_no_repeat_ngram_size": 0,
186
  "eos_token_id": null,
@@ -195,7 +193,6 @@
195
  "1": "LABEL_1"
196
  },
197
  "image_size": 224,
198
- "initializer_factor": 1.0,
199
  "initializer_range": 1e-10,
200
  "intermediate_size": 6144,
201
  "is_decoder": false,
@@ -204,7 +201,7 @@
204
  "LABEL_0": 0,
205
  "LABEL_1": 1
206
  },
207
- "layer_norm_eps": 1e-05,
208
  "length_penalty": 1.0,
209
  "max_length": 20,
210
  "min_length": 0,
@@ -213,7 +210,6 @@
213
  "num_attention_heads": 16,
214
  "num_beam_groups": 1,
215
  "num_beams": 1,
216
- "num_channels": 3,
217
  "num_hidden_layers": 39,
218
  "num_return_sequences": 1,
219
  "output_attentions": false,
@@ -223,7 +219,6 @@
223
  "patch_size": 14,
224
  "prefix": null,
225
  "problem_type": null,
226
- "projection_dim": 512,
227
  "pruned_heads": {},
228
  "qkv_bias": true,
229
  "remove_invalid_values": false,
@@ -242,7 +237,7 @@
242
  "top_p": 1.0,
243
  "torch_dtype": null,
244
  "torchscript": false,
245
- "transformers_version": "4.30.0.dev0",
246
  "typical_p": 1.0,
247
  "use_bfloat16": false
248
  }
 
16
  "begin_suppress_tokens": null,
17
  "bos_token_id": null,
18
  "chunk_size_feed_forward": 0,
 
19
  "cross_attention_frequency": 2,
20
  "cross_attention_hidden_size": null,
21
  "decoder_start_token_id": null,
 
80
  "top_p": 1.0,
81
  "torch_dtype": null,
82
  "torchscript": false,
83
+ "transformers_version": "4.31.0.dev0",
84
  "typical_p": 1.0,
85
  "use_bfloat16": false,
86
  "vocab_size": 30523
 
156
  "top_p": 1.0,
157
  "torch_dtype": "float16",
158
  "torchscript": false,
159
+ "transformers_version": "4.31.0.dev0",
160
  "typical_p": 1.0,
161
  "use_bfloat16": false,
162
  "use_cache": true,
 
179
  "decoder_start_token_id": null,
180
  "diversity_penalty": 0.0,
181
  "do_sample": false,
 
182
  "early_stopping": false,
183
  "encoder_no_repeat_ngram_size": 0,
184
  "eos_token_id": null,
 
193
  "1": "LABEL_1"
194
  },
195
  "image_size": 224,
 
196
  "initializer_range": 1e-10,
197
  "intermediate_size": 6144,
198
  "is_decoder": false,
 
201
  "LABEL_0": 0,
202
  "LABEL_1": 1
203
  },
204
+ "layer_norm_eps": 1e-06,
205
  "length_penalty": 1.0,
206
  "max_length": 20,
207
  "min_length": 0,
 
210
  "num_attention_heads": 16,
211
  "num_beam_groups": 1,
212
  "num_beams": 1,
 
213
  "num_hidden_layers": 39,
214
  "num_return_sequences": 1,
215
  "output_attentions": false,
 
219
  "patch_size": 14,
220
  "prefix": null,
221
  "problem_type": null,
 
222
  "pruned_heads": {},
223
  "qkv_bias": true,
224
  "remove_invalid_values": false,
 
237
  "top_p": 1.0,
238
  "torch_dtype": null,
239
  "torchscript": false,
240
+ "transformers_version": "4.31.0.dev0",
241
  "typical_p": 1.0,
242
  "use_bfloat16": false
243
  }
pytorch_model-00001-of-00006.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3e5ac25a3c8c6dbd3a690e867e01f6997255706bdd9926cdaeaac889961efa42
3
  size 9866988916
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e30feaa6df6ca053411ba4dc30b144a6ab296ad25f4b8b68337d5377f3210f1f
3
  size 9866988916
pytorch_model-00006-of-00006.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0dfaeefe33839ca4a0f30077c1a0b07c4633b34bb53aab3b34d2a536c37ebabd
3
  size 7282668529
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:80c9b502c19a5ccf24726f7701c68fa3c6be76617e1bcccedd5487b1e6566387
3
  size 7282668529