adventureshin commited on
Commit
015d544
1 Parent(s): 9a948fd

Upload bingsu models

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ models--Bingsu--vitB32_bert_ko_small_clip/blobs/b3fc251be7844996114de04b8a499032bd1f577d82e3380267a19ea85a450fa8 filter=lfs diff=lfs merge=lfs -text
models--Bingsu--vitB32_bert_ko_small_clip/.no_exist/b913d72edb823ee522a4098e5f79d188dd3c2df7/added_tokens.json ADDED
File without changes
models--Bingsu--vitB32_bert_ko_small_clip/.no_exist/b913d72edb823ee522a4098e5f79d188dd3c2df7/processor_config.json ADDED
File without changes
models--Bingsu--vitB32_bert_ko_small_clip/blobs/1225910efacdbb3d67e687059b9ea8752adad531 ADDED
The diff for this file is too large to render. See raw diff
 
models--Bingsu--vitB32_bert_ko_small_clip/blobs/1e49ec670164f37893ec8678e9a0d5fde5519f31 ADDED
@@ -0,0 +1,162 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "Bingsu/vitB32_bert_ko_small_clip",
3
+ "architectures": [
4
+ "VisionTextDualEncoderModel"
5
+ ],
6
+ "logit_scale_init_value": 2.6592,
7
+ "model_type": "vision-text-dual-encoder",
8
+ "projection_dim": 512,
9
+ "text_config": {
10
+ "_name_or_path": "lassl/bert-ko-small",
11
+ "add_cross_attention": false,
12
+ "architectures": [
13
+ "BertForPreTraining"
14
+ ],
15
+ "attention_probs_dropout_prob": 0.1,
16
+ "bad_words_ids": null,
17
+ "bos_token_id": null,
18
+ "chunk_size_feed_forward": 0,
19
+ "classifier_dropout": 0.1,
20
+ "cross_attention_hidden_size": null,
21
+ "decoder_start_token_id": null,
22
+ "diversity_penalty": 0.0,
23
+ "do_sample": false,
24
+ "early_stopping": false,
25
+ "encoder_no_repeat_ngram_size": 0,
26
+ "eos_token_id": null,
27
+ "exponential_decay_length_penalty": null,
28
+ "finetuning_task": null,
29
+ "forced_bos_token_id": null,
30
+ "forced_eos_token_id": null,
31
+ "hidden_act": "gelu",
32
+ "hidden_dropout_prob": 0.1,
33
+ "hidden_size": 256,
34
+ "id2label": {
35
+ "0": "LABEL_0",
36
+ "1": "LABEL_1"
37
+ },
38
+ "initializer_range": 0.02,
39
+ "intermediate_size": 1024,
40
+ "is_decoder": false,
41
+ "is_encoder_decoder": false,
42
+ "label2id": {
43
+ "LABEL_0": 0,
44
+ "LABEL_1": 1
45
+ },
46
+ "layer_norm_eps": 1e-05,
47
+ "length_penalty": 1.0,
48
+ "max_length": 20,
49
+ "max_position_embeddings": 512,
50
+ "min_length": 0,
51
+ "model_type": "bert",
52
+ "no_repeat_ngram_size": 0,
53
+ "num_attention_heads": 4,
54
+ "num_beam_groups": 1,
55
+ "num_beams": 1,
56
+ "num_hidden_layers": 12,
57
+ "num_return_sequences": 1,
58
+ "output_attentions": false,
59
+ "output_hidden_states": false,
60
+ "output_scores": false,
61
+ "pad_token_id": 0,
62
+ "position_embedding_type": "absolute",
63
+ "prefix": null,
64
+ "problem_type": null,
65
+ "pruned_heads": {},
66
+ "remove_invalid_values": false,
67
+ "repetition_penalty": 1.0,
68
+ "return_dict": true,
69
+ "return_dict_in_generate": false,
70
+ "sep_token_id": null,
71
+ "task_specific_params": null,
72
+ "temperature": 1.0,
73
+ "tie_encoder_decoder": false,
74
+ "tie_word_embeddings": true,
75
+ "tokenizer_class": null,
76
+ "top_k": 50,
77
+ "top_p": 1.0,
78
+ "torch_dtype": "float32",
79
+ "torchscript": false,
80
+ "transformers_version": "4.19.2",
81
+ "type_vocab_size": 2,
82
+ "typical_p": 1.0,
83
+ "use_bfloat16": false,
84
+ "use_cache": true,
85
+ "vocab_size": 51200
86
+ },
87
+ "torch_dtype": "float32",
88
+ "transformers_version": null,
89
+ "vision_config": {
90
+ "_name_or_path": "openai/clip-vit-base-patch32",
91
+ "add_cross_attention": false,
92
+ "architectures": null,
93
+ "attention_dropout": 0.0,
94
+ "bad_words_ids": null,
95
+ "bos_token_id": null,
96
+ "chunk_size_feed_forward": 0,
97
+ "cross_attention_hidden_size": null,
98
+ "decoder_start_token_id": null,
99
+ "diversity_penalty": 0.0,
100
+ "do_sample": false,
101
+ "dropout": 0.0,
102
+ "early_stopping": false,
103
+ "encoder_no_repeat_ngram_size": 0,
104
+ "eos_token_id": null,
105
+ "exponential_decay_length_penalty": null,
106
+ "finetuning_task": null,
107
+ "forced_bos_token_id": null,
108
+ "forced_eos_token_id": null,
109
+ "hidden_act": "quick_gelu",
110
+ "hidden_size": 768,
111
+ "id2label": {
112
+ "0": "LABEL_0",
113
+ "1": "LABEL_1"
114
+ },
115
+ "image_size": 224,
116
+ "initializer_factor": 1.0,
117
+ "initializer_range": 0.02,
118
+ "intermediate_size": 3072,
119
+ "is_decoder": false,
120
+ "is_encoder_decoder": false,
121
+ "label2id": {
122
+ "LABEL_0": 0,
123
+ "LABEL_1": 1
124
+ },
125
+ "layer_norm_eps": 1e-05,
126
+ "length_penalty": 1.0,
127
+ "max_length": 20,
128
+ "min_length": 0,
129
+ "model_type": "clip_vision_model",
130
+ "no_repeat_ngram_size": 0,
131
+ "num_attention_heads": 12,
132
+ "num_beam_groups": 1,
133
+ "num_beams": 1,
134
+ "num_hidden_layers": 12,
135
+ "num_return_sequences": 1,
136
+ "output_attentions": false,
137
+ "output_hidden_states": false,
138
+ "output_scores": false,
139
+ "pad_token_id": null,
140
+ "patch_size": 32,
141
+ "prefix": null,
142
+ "problem_type": null,
143
+ "pruned_heads": {},
144
+ "remove_invalid_values": false,
145
+ "repetition_penalty": 1.0,
146
+ "return_dict": true,
147
+ "return_dict_in_generate": false,
148
+ "sep_token_id": null,
149
+ "task_specific_params": null,
150
+ "temperature": 1.0,
151
+ "tie_encoder_decoder": false,
152
+ "tie_word_embeddings": true,
153
+ "tokenizer_class": null,
154
+ "top_k": 50,
155
+ "top_p": 1.0,
156
+ "torch_dtype": null,
157
+ "torchscript": false,
158
+ "transformers_version": "4.19.2",
159
+ "typical_p": 1.0,
160
+ "use_bfloat16": false
161
+ }
162
+ }
models--Bingsu--vitB32_bert_ko_small_clip/blobs/a14a1cbe5d9af98ffbdb66348c503e8eec29a2fa ADDED
The diff for this file is too large to render. See raw diff
 
models--Bingsu--vitB32_bert_ko_small_clip/blobs/b32a85839e270e595bf4b7c832448c7e0577a4bc ADDED
@@ -0,0 +1 @@
 
 
1
+ {"do_lower_case": false, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "Bingsu/vitB32_bert_ko_small_clip", "processor_class": "VisionTextDualEncoderProcessor", "tokenizer_class": "BertTokenizer"}
models--Bingsu--vitB32_bert_ko_small_clip/blobs/b3fc251be7844996114de04b8a499032bd1f577d82e3380267a19ea85a450fa8 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b3fc251be7844996114de04b8a499032bd1f577d82e3380267a19ea85a450fa8
3
+ size 443105628
models--Bingsu--vitB32_bert_ko_small_clip/blobs/babf797225ad2abb1d7c8a653442238e84f4f6a4 ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "crop_size": 224,
3
+ "do_center_crop": true,
4
+ "do_convert_rgb": true,
5
+ "do_normalize": true,
6
+ "do_resize": true,
7
+ "feature_extractor_type": "CLIPFeatureExtractor",
8
+ "image_mean": [
9
+ 0.48145466,
10
+ 0.4578275,
11
+ 0.40821073
12
+ ],
13
+ "image_std": [
14
+ 0.26862954,
15
+ 0.26130258,
16
+ 0.27577711
17
+ ],
18
+ "processor_class": "VisionTextDualEncoderProcessor",
19
+ "resample": 3,
20
+ "size": 224
21
+ }
models--Bingsu--vitB32_bert_ko_small_clip/blobs/e7b0375001f109a6b8873d756ad4f7bbb15fbaa5 ADDED
@@ -0,0 +1 @@
 
 
1
+ {"unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]"}
models--Bingsu--vitB32_bert_ko_small_clip/refs/main ADDED
@@ -0,0 +1 @@
 
 
1
+ b913d72edb823ee522a4098e5f79d188dd3c2df7