Yova commited on
Commit
646fe55
1 Parent(s): 5604ca6

Upload config.json

Browse files
Files changed (1) hide show
  1. config.json +331 -0
config.json ADDED
@@ -0,0 +1,331 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_commit_hash": null,
3
+ "architectures": [
4
+ "SmallCap"
5
+ ],
6
+ "decoder": {
7
+ "_name_or_path": "facebook/opt-125M",
8
+ "_remove_final_layer_norm": false,
9
+ "activation_dropout": 0.0,
10
+ "activation_function": "relu",
11
+ "add_cross_attention": true,
12
+ "architectures": [
13
+ "OPTForCausalLM"
14
+ ],
15
+ "attention_dropout": 0.0,
16
+ "bad_words_ids": null,
17
+ "begin_suppress_tokens": null,
18
+ "bos_token_id": 2,
19
+ "chunk_size_feed_forward": 0,
20
+ "cross_attention_hidden_size": null,
21
+ "cross_attention_reduce_factor": 4,
22
+ "decoder_start_token_id": null,
23
+ "diversity_penalty": 0.0,
24
+ "do_layer_norm_before": true,
25
+ "do_sample": false,
26
+ "dropout": 0.1,
27
+ "early_stopping": false,
28
+ "enable_bias": true,
29
+ "encoder_hidden_size": 768,
30
+ "encoder_no_repeat_ngram_size": 0,
31
+ "eos_token_id": 2,
32
+ "exponential_decay_length_penalty": null,
33
+ "ffn_dim": 3072,
34
+ "finetuning_task": null,
35
+ "forced_bos_token_id": null,
36
+ "forced_eos_token_id": null,
37
+ "hidden_size": 768,
38
+ "id2label": {
39
+ "0": "LABEL_0",
40
+ "1": "LABEL_1"
41
+ },
42
+ "init_std": 0.02,
43
+ "is_decoder": true,
44
+ "is_encoder_decoder": false,
45
+ "label2id": {
46
+ "LABEL_0": 0,
47
+ "LABEL_1": 1
48
+ },
49
+ "layer_norm_elementwise_affine": true,
50
+ "layerdrop": 0.0,
51
+ "length_penalty": 1.0,
52
+ "max_length": 20,
53
+ "max_position_embeddings": 2048,
54
+ "min_length": 0,
55
+ "model_type": "this_opt",
56
+ "no_repeat_ngram_size": 0,
57
+ "num_attention_heads": 12,
58
+ "num_beam_groups": 1,
59
+ "num_beams": 1,
60
+ "num_hidden_layers": 12,
61
+ "num_return_sequences": 1,
62
+ "output_attentions": false,
63
+ "output_hidden_states": false,
64
+ "output_scores": false,
65
+ "pad_token_id": 1,
66
+ "prefix": "</s>",
67
+ "problem_type": null,
68
+ "pruned_heads": {},
69
+ "remove_invalid_values": false,
70
+ "repetition_penalty": 1.0,
71
+ "return_dict": true,
72
+ "return_dict_in_generate": false,
73
+ "sep_token_id": null,
74
+ "suppress_tokens": null,
75
+ "task_specific_params": null,
76
+ "temperature": 1.0,
77
+ "tf_legacy_loss": false,
78
+ "tie_encoder_decoder": false,
79
+ "tie_word_embeddings": true,
80
+ "tokenizer_class": null,
81
+ "top_k": 50,
82
+ "top_p": 1.0,
83
+ "torch_dtype": "float16",
84
+ "torchscript": false,
85
+ "transformers_version": "4.26.1",
86
+ "typical_p": 1.0,
87
+ "use_bfloat16": false,
88
+ "use_cache": true,
89
+ "vocab_size": 50272,
90
+ "word_embed_proj_dim": 768
91
+ },
92
+ "encoder": {
93
+ "_commit_hash": "e6a30b603a447e251fdaca1c3056b2a16cdfebeb",
94
+ "_name_or_path": "openai/clip-vit-base-patch32",
95
+ "add_cross_attention": false,
96
+ "architectures": [
97
+ "CLIPModel"
98
+ ],
99
+ "bad_words_ids": null,
100
+ "begin_suppress_tokens": null,
101
+ "bos_token_id": null,
102
+ "chunk_size_feed_forward": 0,
103
+ "cross_attention_hidden_size": null,
104
+ "decoder_start_token_id": null,
105
+ "diversity_penalty": 0.0,
106
+ "do_sample": false,
107
+ "early_stopping": false,
108
+ "encoder_no_repeat_ngram_size": 0,
109
+ "eos_token_id": null,
110
+ "exponential_decay_length_penalty": null,
111
+ "finetuning_task": null,
112
+ "forced_bos_token_id": null,
113
+ "forced_eos_token_id": null,
114
+ "id2label": {
115
+ "0": "LABEL_0",
116
+ "1": "LABEL_1"
117
+ },
118
+ "initializer_factor": 1.0,
119
+ "is_decoder": false,
120
+ "is_encoder_decoder": false,
121
+ "label2id": {
122
+ "LABEL_0": 0,
123
+ "LABEL_1": 1
124
+ },
125
+ "length_penalty": 1.0,
126
+ "logit_scale_init_value": 2.6592,
127
+ "max_length": 20,
128
+ "min_length": 0,
129
+ "model_type": "clip",
130
+ "no_repeat_ngram_size": 0,
131
+ "num_beam_groups": 1,
132
+ "num_beams": 1,
133
+ "num_return_sequences": 1,
134
+ "output_attentions": false,
135
+ "output_hidden_states": false,
136
+ "output_scores": false,
137
+ "pad_token_id": null,
138
+ "prefix": null,
139
+ "problem_type": null,
140
+ "projection_dim": 512,
141
+ "pruned_heads": {},
142
+ "remove_invalid_values": false,
143
+ "repetition_penalty": 1.0,
144
+ "return_dict": true,
145
+ "return_dict_in_generate": false,
146
+ "sep_token_id": null,
147
+ "suppress_tokens": null,
148
+ "task_specific_params": null,
149
+ "temperature": 1.0,
150
+ "text_config": {
151
+ "_name_or_path": "",
152
+ "add_cross_attention": false,
153
+ "architectures": null,
154
+ "attention_dropout": 0.0,
155
+ "bad_words_ids": null,
156
+ "begin_suppress_tokens": null,
157
+ "bos_token_id": 0,
158
+ "chunk_size_feed_forward": 0,
159
+ "cross_attention_hidden_size": null,
160
+ "decoder_start_token_id": null,
161
+ "diversity_penalty": 0.0,
162
+ "do_sample": false,
163
+ "dropout": 0.0,
164
+ "early_stopping": false,
165
+ "encoder_no_repeat_ngram_size": 0,
166
+ "eos_token_id": 2,
167
+ "exponential_decay_length_penalty": null,
168
+ "finetuning_task": null,
169
+ "forced_bos_token_id": null,
170
+ "forced_eos_token_id": null,
171
+ "hidden_act": "quick_gelu",
172
+ "hidden_size": 512,
173
+ "id2label": {
174
+ "0": "LABEL_0",
175
+ "1": "LABEL_1"
176
+ },
177
+ "initializer_factor": 1.0,
178
+ "initializer_range": 0.02,
179
+ "intermediate_size": 2048,
180
+ "is_decoder": false,
181
+ "is_encoder_decoder": false,
182
+ "label2id": {
183
+ "LABEL_0": 0,
184
+ "LABEL_1": 1
185
+ },
186
+ "layer_norm_eps": 1e-05,
187
+ "length_penalty": 1.0,
188
+ "max_length": 20,
189
+ "max_position_embeddings": 77,
190
+ "min_length": 0,
191
+ "model_type": "clip_text_model",
192
+ "no_repeat_ngram_size": 0,
193
+ "num_attention_heads": 8,
194
+ "num_beam_groups": 1,
195
+ "num_beams": 1,
196
+ "num_hidden_layers": 12,
197
+ "num_return_sequences": 1,
198
+ "output_attentions": false,
199
+ "output_hidden_states": false,
200
+ "output_scores": false,
201
+ "pad_token_id": 1,
202
+ "prefix": null,
203
+ "problem_type": null,
204
+ "projection_dim": 512,
205
+ "pruned_heads": {},
206
+ "remove_invalid_values": false,
207
+ "repetition_penalty": 1.0,
208
+ "return_dict": true,
209
+ "return_dict_in_generate": false,
210
+ "sep_token_id": null,
211
+ "suppress_tokens": null,
212
+ "task_specific_params": null,
213
+ "temperature": 1.0,
214
+ "tf_legacy_loss": false,
215
+ "tie_encoder_decoder": false,
216
+ "tie_word_embeddings": true,
217
+ "tokenizer_class": null,
218
+ "top_k": 50,
219
+ "top_p": 1.0,
220
+ "torch_dtype": null,
221
+ "torchscript": false,
222
+ "transformers_version": "4.26.1",
223
+ "typical_p": 1.0,
224
+ "use_bfloat16": false,
225
+ "vocab_size": 49408
226
+ },
227
+ "text_config_dict": null,
228
+ "tf_legacy_loss": false,
229
+ "tie_encoder_decoder": false,
230
+ "tie_word_embeddings": true,
231
+ "tokenizer_class": null,
232
+ "top_k": 50,
233
+ "top_p": 1.0,
234
+ "torch_dtype": null,
235
+ "torchscript": false,
236
+ "transformers_version": null,
237
+ "typical_p": 1.0,
238
+ "use_bfloat16": false,
239
+ "vision_config": {
240
+ "_name_or_path": "",
241
+ "add_cross_attention": false,
242
+ "architectures": null,
243
+ "attention_dropout": 0.0,
244
+ "bad_words_ids": null,
245
+ "begin_suppress_tokens": null,
246
+ "bos_token_id": null,
247
+ "chunk_size_feed_forward": 0,
248
+ "cross_attention_hidden_size": null,
249
+ "decoder_start_token_id": null,
250
+ "diversity_penalty": 0.0,
251
+ "do_sample": false,
252
+ "dropout": 0.0,
253
+ "early_stopping": false,
254
+ "encoder_no_repeat_ngram_size": 0,
255
+ "eos_token_id": null,
256
+ "exponential_decay_length_penalty": null,
257
+ "finetuning_task": null,
258
+ "forced_bos_token_id": null,
259
+ "forced_eos_token_id": null,
260
+ "hidden_act": "quick_gelu",
261
+ "hidden_size": 768,
262
+ "id2label": {
263
+ "0": "LABEL_0",
264
+ "1": "LABEL_1"
265
+ },
266
+ "image_size": 224,
267
+ "initializer_factor": 1.0,
268
+ "initializer_range": 0.02,
269
+ "intermediate_size": 3072,
270
+ "is_decoder": false,
271
+ "is_encoder_decoder": false,
272
+ "label2id": {
273
+ "LABEL_0": 0,
274
+ "LABEL_1": 1
275
+ },
276
+ "layer_norm_eps": 1e-05,
277
+ "length_penalty": 1.0,
278
+ "max_length": 20,
279
+ "min_length": 0,
280
+ "model_type": "clip_vision_model",
281
+ "no_repeat_ngram_size": 0,
282
+ "num_attention_heads": 12,
283
+ "num_beam_groups": 1,
284
+ "num_beams": 1,
285
+ "num_channels": 3,
286
+ "num_hidden_layers": 12,
287
+ "num_return_sequences": 1,
288
+ "output_attentions": false,
289
+ "output_hidden_states": false,
290
+ "output_scores": false,
291
+ "pad_token_id": null,
292
+ "patch_size": 32,
293
+ "prefix": null,
294
+ "problem_type": null,
295
+ "projection_dim": 512,
296
+ "pruned_heads": {},
297
+ "remove_invalid_values": false,
298
+ "repetition_penalty": 1.0,
299
+ "return_dict": true,
300
+ "return_dict_in_generate": false,
301
+ "sep_token_id": null,
302
+ "suppress_tokens": null,
303
+ "task_specific_params": null,
304
+ "temperature": 1.0,
305
+ "tf_legacy_loss": false,
306
+ "tie_encoder_decoder": false,
307
+ "tie_word_embeddings": true,
308
+ "tokenizer_class": null,
309
+ "top_k": 50,
310
+ "top_p": 1.0,
311
+ "torch_dtype": null,
312
+ "torchscript": false,
313
+ "transformers_version": "4.26.1",
314
+ "typical_p": 1.0,
315
+ "use_bfloat16": false
316
+ },
317
+ "vision_config_dict": null
318
+ },
319
+ "eos_token_id": 4,
320
+ "is_encoder_decoder": true,
321
+ "k": 4,
322
+ "max_length": 143,
323
+ "model_type": "smallcap",
324
+ "pad_token_id": 328,
325
+ "rag": true,
326
+ "retrieval_encoder": "RN50x64",
327
+ "tie_word_embeddings": false,
328
+ "torch_dtype": "float32",
329
+ "transformers_version": null,
330
+ "vocab_size": 50272
331
+ }