rishabbala commited on
Commit
c54f92e
·
1 Parent(s): caa3451

Upload CctForImageClassification

Browse files
Files changed (4) hide show
  1. config.json +2040 -0
  2. configuration_cct.py +134 -0
  3. modeling_cct.py +437 -0
  4. pytorch_model.bin +3 -0
config.json ADDED
@@ -0,0 +1,2040 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "CctForImageClassification"
4
+ ],
5
+ "attention_drop_rate": 0.1,
6
+ "auto_map": {
7
+ "AutoConfig": "configuration_cct.CctConfig",
8
+ "AutoModelForImageClassification": "modeling_cct.CctForImageClassification"
9
+ },
10
+ "conv_bias": false,
11
+ "conv_kernel_size": 7,
12
+ "conv_padding": 3,
13
+ "conv_stride": 2,
14
+ "drop_path_rate": 0.0,
15
+ "drop_rate": 0.0,
16
+ "embed_dim": 384,
17
+ "id2label": {
18
+ "0": "LABEL_0",
19
+ "1": "LABEL_1",
20
+ "2": "LABEL_2",
21
+ "3": "LABEL_3",
22
+ "4": "LABEL_4",
23
+ "5": "LABEL_5",
24
+ "6": "LABEL_6",
25
+ "7": "LABEL_7",
26
+ "8": "LABEL_8",
27
+ "9": "LABEL_9",
28
+ "10": "LABEL_10",
29
+ "11": "LABEL_11",
30
+ "12": "LABEL_12",
31
+ "13": "LABEL_13",
32
+ "14": "LABEL_14",
33
+ "15": "LABEL_15",
34
+ "16": "LABEL_16",
35
+ "17": "LABEL_17",
36
+ "18": "LABEL_18",
37
+ "19": "LABEL_19",
38
+ "20": "LABEL_20",
39
+ "21": "LABEL_21",
40
+ "22": "LABEL_22",
41
+ "23": "LABEL_23",
42
+ "24": "LABEL_24",
43
+ "25": "LABEL_25",
44
+ "26": "LABEL_26",
45
+ "27": "LABEL_27",
46
+ "28": "LABEL_28",
47
+ "29": "LABEL_29",
48
+ "30": "LABEL_30",
49
+ "31": "LABEL_31",
50
+ "32": "LABEL_32",
51
+ "33": "LABEL_33",
52
+ "34": "LABEL_34",
53
+ "35": "LABEL_35",
54
+ "36": "LABEL_36",
55
+ "37": "LABEL_37",
56
+ "38": "LABEL_38",
57
+ "39": "LABEL_39",
58
+ "40": "LABEL_40",
59
+ "41": "LABEL_41",
60
+ "42": "LABEL_42",
61
+ "43": "LABEL_43",
62
+ "44": "LABEL_44",
63
+ "45": "LABEL_45",
64
+ "46": "LABEL_46",
65
+ "47": "LABEL_47",
66
+ "48": "LABEL_48",
67
+ "49": "LABEL_49",
68
+ "50": "LABEL_50",
69
+ "51": "LABEL_51",
70
+ "52": "LABEL_52",
71
+ "53": "LABEL_53",
72
+ "54": "LABEL_54",
73
+ "55": "LABEL_55",
74
+ "56": "LABEL_56",
75
+ "57": "LABEL_57",
76
+ "58": "LABEL_58",
77
+ "59": "LABEL_59",
78
+ "60": "LABEL_60",
79
+ "61": "LABEL_61",
80
+ "62": "LABEL_62",
81
+ "63": "LABEL_63",
82
+ "64": "LABEL_64",
83
+ "65": "LABEL_65",
84
+ "66": "LABEL_66",
85
+ "67": "LABEL_67",
86
+ "68": "LABEL_68",
87
+ "69": "LABEL_69",
88
+ "70": "LABEL_70",
89
+ "71": "LABEL_71",
90
+ "72": "LABEL_72",
91
+ "73": "LABEL_73",
92
+ "74": "LABEL_74",
93
+ "75": "LABEL_75",
94
+ "76": "LABEL_76",
95
+ "77": "LABEL_77",
96
+ "78": "LABEL_78",
97
+ "79": "LABEL_79",
98
+ "80": "LABEL_80",
99
+ "81": "LABEL_81",
100
+ "82": "LABEL_82",
101
+ "83": "LABEL_83",
102
+ "84": "LABEL_84",
103
+ "85": "LABEL_85",
104
+ "86": "LABEL_86",
105
+ "87": "LABEL_87",
106
+ "88": "LABEL_88",
107
+ "89": "LABEL_89",
108
+ "90": "LABEL_90",
109
+ "91": "LABEL_91",
110
+ "92": "LABEL_92",
111
+ "93": "LABEL_93",
112
+ "94": "LABEL_94",
113
+ "95": "LABEL_95",
114
+ "96": "LABEL_96",
115
+ "97": "LABEL_97",
116
+ "98": "LABEL_98",
117
+ "99": "LABEL_99",
118
+ "100": "LABEL_100",
119
+ "101": "LABEL_101",
120
+ "102": "LABEL_102",
121
+ "103": "LABEL_103",
122
+ "104": "LABEL_104",
123
+ "105": "LABEL_105",
124
+ "106": "LABEL_106",
125
+ "107": "LABEL_107",
126
+ "108": "LABEL_108",
127
+ "109": "LABEL_109",
128
+ "110": "LABEL_110",
129
+ "111": "LABEL_111",
130
+ "112": "LABEL_112",
131
+ "113": "LABEL_113",
132
+ "114": "LABEL_114",
133
+ "115": "LABEL_115",
134
+ "116": "LABEL_116",
135
+ "117": "LABEL_117",
136
+ "118": "LABEL_118",
137
+ "119": "LABEL_119",
138
+ "120": "LABEL_120",
139
+ "121": "LABEL_121",
140
+ "122": "LABEL_122",
141
+ "123": "LABEL_123",
142
+ "124": "LABEL_124",
143
+ "125": "LABEL_125",
144
+ "126": "LABEL_126",
145
+ "127": "LABEL_127",
146
+ "128": "LABEL_128",
147
+ "129": "LABEL_129",
148
+ "130": "LABEL_130",
149
+ "131": "LABEL_131",
150
+ "132": "LABEL_132",
151
+ "133": "LABEL_133",
152
+ "134": "LABEL_134",
153
+ "135": "LABEL_135",
154
+ "136": "LABEL_136",
155
+ "137": "LABEL_137",
156
+ "138": "LABEL_138",
157
+ "139": "LABEL_139",
158
+ "140": "LABEL_140",
159
+ "141": "LABEL_141",
160
+ "142": "LABEL_142",
161
+ "143": "LABEL_143",
162
+ "144": "LABEL_144",
163
+ "145": "LABEL_145",
164
+ "146": "LABEL_146",
165
+ "147": "LABEL_147",
166
+ "148": "LABEL_148",
167
+ "149": "LABEL_149",
168
+ "150": "LABEL_150",
169
+ "151": "LABEL_151",
170
+ "152": "LABEL_152",
171
+ "153": "LABEL_153",
172
+ "154": "LABEL_154",
173
+ "155": "LABEL_155",
174
+ "156": "LABEL_156",
175
+ "157": "LABEL_157",
176
+ "158": "LABEL_158",
177
+ "159": "LABEL_159",
178
+ "160": "LABEL_160",
179
+ "161": "LABEL_161",
180
+ "162": "LABEL_162",
181
+ "163": "LABEL_163",
182
+ "164": "LABEL_164",
183
+ "165": "LABEL_165",
184
+ "166": "LABEL_166",
185
+ "167": "LABEL_167",
186
+ "168": "LABEL_168",
187
+ "169": "LABEL_169",
188
+ "170": "LABEL_170",
189
+ "171": "LABEL_171",
190
+ "172": "LABEL_172",
191
+ "173": "LABEL_173",
192
+ "174": "LABEL_174",
193
+ "175": "LABEL_175",
194
+ "176": "LABEL_176",
195
+ "177": "LABEL_177",
196
+ "178": "LABEL_178",
197
+ "179": "LABEL_179",
198
+ "180": "LABEL_180",
199
+ "181": "LABEL_181",
200
+ "182": "LABEL_182",
201
+ "183": "LABEL_183",
202
+ "184": "LABEL_184",
203
+ "185": "LABEL_185",
204
+ "186": "LABEL_186",
205
+ "187": "LABEL_187",
206
+ "188": "LABEL_188",
207
+ "189": "LABEL_189",
208
+ "190": "LABEL_190",
209
+ "191": "LABEL_191",
210
+ "192": "LABEL_192",
211
+ "193": "LABEL_193",
212
+ "194": "LABEL_194",
213
+ "195": "LABEL_195",
214
+ "196": "LABEL_196",
215
+ "197": "LABEL_197",
216
+ "198": "LABEL_198",
217
+ "199": "LABEL_199",
218
+ "200": "LABEL_200",
219
+ "201": "LABEL_201",
220
+ "202": "LABEL_202",
221
+ "203": "LABEL_203",
222
+ "204": "LABEL_204",
223
+ "205": "LABEL_205",
224
+ "206": "LABEL_206",
225
+ "207": "LABEL_207",
226
+ "208": "LABEL_208",
227
+ "209": "LABEL_209",
228
+ "210": "LABEL_210",
229
+ "211": "LABEL_211",
230
+ "212": "LABEL_212",
231
+ "213": "LABEL_213",
232
+ "214": "LABEL_214",
233
+ "215": "LABEL_215",
234
+ "216": "LABEL_216",
235
+ "217": "LABEL_217",
236
+ "218": "LABEL_218",
237
+ "219": "LABEL_219",
238
+ "220": "LABEL_220",
239
+ "221": "LABEL_221",
240
+ "222": "LABEL_222",
241
+ "223": "LABEL_223",
242
+ "224": "LABEL_224",
243
+ "225": "LABEL_225",
244
+ "226": "LABEL_226",
245
+ "227": "LABEL_227",
246
+ "228": "LABEL_228",
247
+ "229": "LABEL_229",
248
+ "230": "LABEL_230",
249
+ "231": "LABEL_231",
250
+ "232": "LABEL_232",
251
+ "233": "LABEL_233",
252
+ "234": "LABEL_234",
253
+ "235": "LABEL_235",
254
+ "236": "LABEL_236",
255
+ "237": "LABEL_237",
256
+ "238": "LABEL_238",
257
+ "239": "LABEL_239",
258
+ "240": "LABEL_240",
259
+ "241": "LABEL_241",
260
+ "242": "LABEL_242",
261
+ "243": "LABEL_243",
262
+ "244": "LABEL_244",
263
+ "245": "LABEL_245",
264
+ "246": "LABEL_246",
265
+ "247": "LABEL_247",
266
+ "248": "LABEL_248",
267
+ "249": "LABEL_249",
268
+ "250": "LABEL_250",
269
+ "251": "LABEL_251",
270
+ "252": "LABEL_252",
271
+ "253": "LABEL_253",
272
+ "254": "LABEL_254",
273
+ "255": "LABEL_255",
274
+ "256": "LABEL_256",
275
+ "257": "LABEL_257",
276
+ "258": "LABEL_258",
277
+ "259": "LABEL_259",
278
+ "260": "LABEL_260",
279
+ "261": "LABEL_261",
280
+ "262": "LABEL_262",
281
+ "263": "LABEL_263",
282
+ "264": "LABEL_264",
283
+ "265": "LABEL_265",
284
+ "266": "LABEL_266",
285
+ "267": "LABEL_267",
286
+ "268": "LABEL_268",
287
+ "269": "LABEL_269",
288
+ "270": "LABEL_270",
289
+ "271": "LABEL_271",
290
+ "272": "LABEL_272",
291
+ "273": "LABEL_273",
292
+ "274": "LABEL_274",
293
+ "275": "LABEL_275",
294
+ "276": "LABEL_276",
295
+ "277": "LABEL_277",
296
+ "278": "LABEL_278",
297
+ "279": "LABEL_279",
298
+ "280": "LABEL_280",
299
+ "281": "LABEL_281",
300
+ "282": "LABEL_282",
301
+ "283": "LABEL_283",
302
+ "284": "LABEL_284",
303
+ "285": "LABEL_285",
304
+ "286": "LABEL_286",
305
+ "287": "LABEL_287",
306
+ "288": "LABEL_288",
307
+ "289": "LABEL_289",
308
+ "290": "LABEL_290",
309
+ "291": "LABEL_291",
310
+ "292": "LABEL_292",
311
+ "293": "LABEL_293",
312
+ "294": "LABEL_294",
313
+ "295": "LABEL_295",
314
+ "296": "LABEL_296",
315
+ "297": "LABEL_297",
316
+ "298": "LABEL_298",
317
+ "299": "LABEL_299",
318
+ "300": "LABEL_300",
319
+ "301": "LABEL_301",
320
+ "302": "LABEL_302",
321
+ "303": "LABEL_303",
322
+ "304": "LABEL_304",
323
+ "305": "LABEL_305",
324
+ "306": "LABEL_306",
325
+ "307": "LABEL_307",
326
+ "308": "LABEL_308",
327
+ "309": "LABEL_309",
328
+ "310": "LABEL_310",
329
+ "311": "LABEL_311",
330
+ "312": "LABEL_312",
331
+ "313": "LABEL_313",
332
+ "314": "LABEL_314",
333
+ "315": "LABEL_315",
334
+ "316": "LABEL_316",
335
+ "317": "LABEL_317",
336
+ "318": "LABEL_318",
337
+ "319": "LABEL_319",
338
+ "320": "LABEL_320",
339
+ "321": "LABEL_321",
340
+ "322": "LABEL_322",
341
+ "323": "LABEL_323",
342
+ "324": "LABEL_324",
343
+ "325": "LABEL_325",
344
+ "326": "LABEL_326",
345
+ "327": "LABEL_327",
346
+ "328": "LABEL_328",
347
+ "329": "LABEL_329",
348
+ "330": "LABEL_330",
349
+ "331": "LABEL_331",
350
+ "332": "LABEL_332",
351
+ "333": "LABEL_333",
352
+ "334": "LABEL_334",
353
+ "335": "LABEL_335",
354
+ "336": "LABEL_336",
355
+ "337": "LABEL_337",
356
+ "338": "LABEL_338",
357
+ "339": "LABEL_339",
358
+ "340": "LABEL_340",
359
+ "341": "LABEL_341",
360
+ "342": "LABEL_342",
361
+ "343": "LABEL_343",
362
+ "344": "LABEL_344",
363
+ "345": "LABEL_345",
364
+ "346": "LABEL_346",
365
+ "347": "LABEL_347",
366
+ "348": "LABEL_348",
367
+ "349": "LABEL_349",
368
+ "350": "LABEL_350",
369
+ "351": "LABEL_351",
370
+ "352": "LABEL_352",
371
+ "353": "LABEL_353",
372
+ "354": "LABEL_354",
373
+ "355": "LABEL_355",
374
+ "356": "LABEL_356",
375
+ "357": "LABEL_357",
376
+ "358": "LABEL_358",
377
+ "359": "LABEL_359",
378
+ "360": "LABEL_360",
379
+ "361": "LABEL_361",
380
+ "362": "LABEL_362",
381
+ "363": "LABEL_363",
382
+ "364": "LABEL_364",
383
+ "365": "LABEL_365",
384
+ "366": "LABEL_366",
385
+ "367": "LABEL_367",
386
+ "368": "LABEL_368",
387
+ "369": "LABEL_369",
388
+ "370": "LABEL_370",
389
+ "371": "LABEL_371",
390
+ "372": "LABEL_372",
391
+ "373": "LABEL_373",
392
+ "374": "LABEL_374",
393
+ "375": "LABEL_375",
394
+ "376": "LABEL_376",
395
+ "377": "LABEL_377",
396
+ "378": "LABEL_378",
397
+ "379": "LABEL_379",
398
+ "380": "LABEL_380",
399
+ "381": "LABEL_381",
400
+ "382": "LABEL_382",
401
+ "383": "LABEL_383",
402
+ "384": "LABEL_384",
403
+ "385": "LABEL_385",
404
+ "386": "LABEL_386",
405
+ "387": "LABEL_387",
406
+ "388": "LABEL_388",
407
+ "389": "LABEL_389",
408
+ "390": "LABEL_390",
409
+ "391": "LABEL_391",
410
+ "392": "LABEL_392",
411
+ "393": "LABEL_393",
412
+ "394": "LABEL_394",
413
+ "395": "LABEL_395",
414
+ "396": "LABEL_396",
415
+ "397": "LABEL_397",
416
+ "398": "LABEL_398",
417
+ "399": "LABEL_399",
418
+ "400": "LABEL_400",
419
+ "401": "LABEL_401",
420
+ "402": "LABEL_402",
421
+ "403": "LABEL_403",
422
+ "404": "LABEL_404",
423
+ "405": "LABEL_405",
424
+ "406": "LABEL_406",
425
+ "407": "LABEL_407",
426
+ "408": "LABEL_408",
427
+ "409": "LABEL_409",
428
+ "410": "LABEL_410",
429
+ "411": "LABEL_411",
430
+ "412": "LABEL_412",
431
+ "413": "LABEL_413",
432
+ "414": "LABEL_414",
433
+ "415": "LABEL_415",
434
+ "416": "LABEL_416",
435
+ "417": "LABEL_417",
436
+ "418": "LABEL_418",
437
+ "419": "LABEL_419",
438
+ "420": "LABEL_420",
439
+ "421": "LABEL_421",
440
+ "422": "LABEL_422",
441
+ "423": "LABEL_423",
442
+ "424": "LABEL_424",
443
+ "425": "LABEL_425",
444
+ "426": "LABEL_426",
445
+ "427": "LABEL_427",
446
+ "428": "LABEL_428",
447
+ "429": "LABEL_429",
448
+ "430": "LABEL_430",
449
+ "431": "LABEL_431",
450
+ "432": "LABEL_432",
451
+ "433": "LABEL_433",
452
+ "434": "LABEL_434",
453
+ "435": "LABEL_435",
454
+ "436": "LABEL_436",
455
+ "437": "LABEL_437",
456
+ "438": "LABEL_438",
457
+ "439": "LABEL_439",
458
+ "440": "LABEL_440",
459
+ "441": "LABEL_441",
460
+ "442": "LABEL_442",
461
+ "443": "LABEL_443",
462
+ "444": "LABEL_444",
463
+ "445": "LABEL_445",
464
+ "446": "LABEL_446",
465
+ "447": "LABEL_447",
466
+ "448": "LABEL_448",
467
+ "449": "LABEL_449",
468
+ "450": "LABEL_450",
469
+ "451": "LABEL_451",
470
+ "452": "LABEL_452",
471
+ "453": "LABEL_453",
472
+ "454": "LABEL_454",
473
+ "455": "LABEL_455",
474
+ "456": "LABEL_456",
475
+ "457": "LABEL_457",
476
+ "458": "LABEL_458",
477
+ "459": "LABEL_459",
478
+ "460": "LABEL_460",
479
+ "461": "LABEL_461",
480
+ "462": "LABEL_462",
481
+ "463": "LABEL_463",
482
+ "464": "LABEL_464",
483
+ "465": "LABEL_465",
484
+ "466": "LABEL_466",
485
+ "467": "LABEL_467",
486
+ "468": "LABEL_468",
487
+ "469": "LABEL_469",
488
+ "470": "LABEL_470",
489
+ "471": "LABEL_471",
490
+ "472": "LABEL_472",
491
+ "473": "LABEL_473",
492
+ "474": "LABEL_474",
493
+ "475": "LABEL_475",
494
+ "476": "LABEL_476",
495
+ "477": "LABEL_477",
496
+ "478": "LABEL_478",
497
+ "479": "LABEL_479",
498
+ "480": "LABEL_480",
499
+ "481": "LABEL_481",
500
+ "482": "LABEL_482",
501
+ "483": "LABEL_483",
502
+ "484": "LABEL_484",
503
+ "485": "LABEL_485",
504
+ "486": "LABEL_486",
505
+ "487": "LABEL_487",
506
+ "488": "LABEL_488",
507
+ "489": "LABEL_489",
508
+ "490": "LABEL_490",
509
+ "491": "LABEL_491",
510
+ "492": "LABEL_492",
511
+ "493": "LABEL_493",
512
+ "494": "LABEL_494",
513
+ "495": "LABEL_495",
514
+ "496": "LABEL_496",
515
+ "497": "LABEL_497",
516
+ "498": "LABEL_498",
517
+ "499": "LABEL_499",
518
+ "500": "LABEL_500",
519
+ "501": "LABEL_501",
520
+ "502": "LABEL_502",
521
+ "503": "LABEL_503",
522
+ "504": "LABEL_504",
523
+ "505": "LABEL_505",
524
+ "506": "LABEL_506",
525
+ "507": "LABEL_507",
526
+ "508": "LABEL_508",
527
+ "509": "LABEL_509",
528
+ "510": "LABEL_510",
529
+ "511": "LABEL_511",
530
+ "512": "LABEL_512",
531
+ "513": "LABEL_513",
532
+ "514": "LABEL_514",
533
+ "515": "LABEL_515",
534
+ "516": "LABEL_516",
535
+ "517": "LABEL_517",
536
+ "518": "LABEL_518",
537
+ "519": "LABEL_519",
538
+ "520": "LABEL_520",
539
+ "521": "LABEL_521",
540
+ "522": "LABEL_522",
541
+ "523": "LABEL_523",
542
+ "524": "LABEL_524",
543
+ "525": "LABEL_525",
544
+ "526": "LABEL_526",
545
+ "527": "LABEL_527",
546
+ "528": "LABEL_528",
547
+ "529": "LABEL_529",
548
+ "530": "LABEL_530",
549
+ "531": "LABEL_531",
550
+ "532": "LABEL_532",
551
+ "533": "LABEL_533",
552
+ "534": "LABEL_534",
553
+ "535": "LABEL_535",
554
+ "536": "LABEL_536",
555
+ "537": "LABEL_537",
556
+ "538": "LABEL_538",
557
+ "539": "LABEL_539",
558
+ "540": "LABEL_540",
559
+ "541": "LABEL_541",
560
+ "542": "LABEL_542",
561
+ "543": "LABEL_543",
562
+ "544": "LABEL_544",
563
+ "545": "LABEL_545",
564
+ "546": "LABEL_546",
565
+ "547": "LABEL_547",
566
+ "548": "LABEL_548",
567
+ "549": "LABEL_549",
568
+ "550": "LABEL_550",
569
+ "551": "LABEL_551",
570
+ "552": "LABEL_552",
571
+ "553": "LABEL_553",
572
+ "554": "LABEL_554",
573
+ "555": "LABEL_555",
574
+ "556": "LABEL_556",
575
+ "557": "LABEL_557",
576
+ "558": "LABEL_558",
577
+ "559": "LABEL_559",
578
+ "560": "LABEL_560",
579
+ "561": "LABEL_561",
580
+ "562": "LABEL_562",
581
+ "563": "LABEL_563",
582
+ "564": "LABEL_564",
583
+ "565": "LABEL_565",
584
+ "566": "LABEL_566",
585
+ "567": "LABEL_567",
586
+ "568": "LABEL_568",
587
+ "569": "LABEL_569",
588
+ "570": "LABEL_570",
589
+ "571": "LABEL_571",
590
+ "572": "LABEL_572",
591
+ "573": "LABEL_573",
592
+ "574": "LABEL_574",
593
+ "575": "LABEL_575",
594
+ "576": "LABEL_576",
595
+ "577": "LABEL_577",
596
+ "578": "LABEL_578",
597
+ "579": "LABEL_579",
598
+ "580": "LABEL_580",
599
+ "581": "LABEL_581",
600
+ "582": "LABEL_582",
601
+ "583": "LABEL_583",
602
+ "584": "LABEL_584",
603
+ "585": "LABEL_585",
604
+ "586": "LABEL_586",
605
+ "587": "LABEL_587",
606
+ "588": "LABEL_588",
607
+ "589": "LABEL_589",
608
+ "590": "LABEL_590",
609
+ "591": "LABEL_591",
610
+ "592": "LABEL_592",
611
+ "593": "LABEL_593",
612
+ "594": "LABEL_594",
613
+ "595": "LABEL_595",
614
+ "596": "LABEL_596",
615
+ "597": "LABEL_597",
616
+ "598": "LABEL_598",
617
+ "599": "LABEL_599",
618
+ "600": "LABEL_600",
619
+ "601": "LABEL_601",
620
+ "602": "LABEL_602",
621
+ "603": "LABEL_603",
622
+ "604": "LABEL_604",
623
+ "605": "LABEL_605",
624
+ "606": "LABEL_606",
625
+ "607": "LABEL_607",
626
+ "608": "LABEL_608",
627
+ "609": "LABEL_609",
628
+ "610": "LABEL_610",
629
+ "611": "LABEL_611",
630
+ "612": "LABEL_612",
631
+ "613": "LABEL_613",
632
+ "614": "LABEL_614",
633
+ "615": "LABEL_615",
634
+ "616": "LABEL_616",
635
+ "617": "LABEL_617",
636
+ "618": "LABEL_618",
637
+ "619": "LABEL_619",
638
+ "620": "LABEL_620",
639
+ "621": "LABEL_621",
640
+ "622": "LABEL_622",
641
+ "623": "LABEL_623",
642
+ "624": "LABEL_624",
643
+ "625": "LABEL_625",
644
+ "626": "LABEL_626",
645
+ "627": "LABEL_627",
646
+ "628": "LABEL_628",
647
+ "629": "LABEL_629",
648
+ "630": "LABEL_630",
649
+ "631": "LABEL_631",
650
+ "632": "LABEL_632",
651
+ "633": "LABEL_633",
652
+ "634": "LABEL_634",
653
+ "635": "LABEL_635",
654
+ "636": "LABEL_636",
655
+ "637": "LABEL_637",
656
+ "638": "LABEL_638",
657
+ "639": "LABEL_639",
658
+ "640": "LABEL_640",
659
+ "641": "LABEL_641",
660
+ "642": "LABEL_642",
661
+ "643": "LABEL_643",
662
+ "644": "LABEL_644",
663
+ "645": "LABEL_645",
664
+ "646": "LABEL_646",
665
+ "647": "LABEL_647",
666
+ "648": "LABEL_648",
667
+ "649": "LABEL_649",
668
+ "650": "LABEL_650",
669
+ "651": "LABEL_651",
670
+ "652": "LABEL_652",
671
+ "653": "LABEL_653",
672
+ "654": "LABEL_654",
673
+ "655": "LABEL_655",
674
+ "656": "LABEL_656",
675
+ "657": "LABEL_657",
676
+ "658": "LABEL_658",
677
+ "659": "LABEL_659",
678
+ "660": "LABEL_660",
679
+ "661": "LABEL_661",
680
+ "662": "LABEL_662",
681
+ "663": "LABEL_663",
682
+ "664": "LABEL_664",
683
+ "665": "LABEL_665",
684
+ "666": "LABEL_666",
685
+ "667": "LABEL_667",
686
+ "668": "LABEL_668",
687
+ "669": "LABEL_669",
688
+ "670": "LABEL_670",
689
+ "671": "LABEL_671",
690
+ "672": "LABEL_672",
691
+ "673": "LABEL_673",
692
+ "674": "LABEL_674",
693
+ "675": "LABEL_675",
694
+ "676": "LABEL_676",
695
+ "677": "LABEL_677",
696
+ "678": "LABEL_678",
697
+ "679": "LABEL_679",
698
+ "680": "LABEL_680",
699
+ "681": "LABEL_681",
700
+ "682": "LABEL_682",
701
+ "683": "LABEL_683",
702
+ "684": "LABEL_684",
703
+ "685": "LABEL_685",
704
+ "686": "LABEL_686",
705
+ "687": "LABEL_687",
706
+ "688": "LABEL_688",
707
+ "689": "LABEL_689",
708
+ "690": "LABEL_690",
709
+ "691": "LABEL_691",
710
+ "692": "LABEL_692",
711
+ "693": "LABEL_693",
712
+ "694": "LABEL_694",
713
+ "695": "LABEL_695",
714
+ "696": "LABEL_696",
715
+ "697": "LABEL_697",
716
+ "698": "LABEL_698",
717
+ "699": "LABEL_699",
718
+ "700": "LABEL_700",
719
+ "701": "LABEL_701",
720
+ "702": "LABEL_702",
721
+ "703": "LABEL_703",
722
+ "704": "LABEL_704",
723
+ "705": "LABEL_705",
724
+ "706": "LABEL_706",
725
+ "707": "LABEL_707",
726
+ "708": "LABEL_708",
727
+ "709": "LABEL_709",
728
+ "710": "LABEL_710",
729
+ "711": "LABEL_711",
730
+ "712": "LABEL_712",
731
+ "713": "LABEL_713",
732
+ "714": "LABEL_714",
733
+ "715": "LABEL_715",
734
+ "716": "LABEL_716",
735
+ "717": "LABEL_717",
736
+ "718": "LABEL_718",
737
+ "719": "LABEL_719",
738
+ "720": "LABEL_720",
739
+ "721": "LABEL_721",
740
+ "722": "LABEL_722",
741
+ "723": "LABEL_723",
742
+ "724": "LABEL_724",
743
+ "725": "LABEL_725",
744
+ "726": "LABEL_726",
745
+ "727": "LABEL_727",
746
+ "728": "LABEL_728",
747
+ "729": "LABEL_729",
748
+ "730": "LABEL_730",
749
+ "731": "LABEL_731",
750
+ "732": "LABEL_732",
751
+ "733": "LABEL_733",
752
+ "734": "LABEL_734",
753
+ "735": "LABEL_735",
754
+ "736": "LABEL_736",
755
+ "737": "LABEL_737",
756
+ "738": "LABEL_738",
757
+ "739": "LABEL_739",
758
+ "740": "LABEL_740",
759
+ "741": "LABEL_741",
760
+ "742": "LABEL_742",
761
+ "743": "LABEL_743",
762
+ "744": "LABEL_744",
763
+ "745": "LABEL_745",
764
+ "746": "LABEL_746",
765
+ "747": "LABEL_747",
766
+ "748": "LABEL_748",
767
+ "749": "LABEL_749",
768
+ "750": "LABEL_750",
769
+ "751": "LABEL_751",
770
+ "752": "LABEL_752",
771
+ "753": "LABEL_753",
772
+ "754": "LABEL_754",
773
+ "755": "LABEL_755",
774
+ "756": "LABEL_756",
775
+ "757": "LABEL_757",
776
+ "758": "LABEL_758",
777
+ "759": "LABEL_759",
778
+ "760": "LABEL_760",
779
+ "761": "LABEL_761",
780
+ "762": "LABEL_762",
781
+ "763": "LABEL_763",
782
+ "764": "LABEL_764",
783
+ "765": "LABEL_765",
784
+ "766": "LABEL_766",
785
+ "767": "LABEL_767",
786
+ "768": "LABEL_768",
787
+ "769": "LABEL_769",
788
+ "770": "LABEL_770",
789
+ "771": "LABEL_771",
790
+ "772": "LABEL_772",
791
+ "773": "LABEL_773",
792
+ "774": "LABEL_774",
793
+ "775": "LABEL_775",
794
+ "776": "LABEL_776",
795
+ "777": "LABEL_777",
796
+ "778": "LABEL_778",
797
+ "779": "LABEL_779",
798
+ "780": "LABEL_780",
799
+ "781": "LABEL_781",
800
+ "782": "LABEL_782",
801
+ "783": "LABEL_783",
802
+ "784": "LABEL_784",
803
+ "785": "LABEL_785",
804
+ "786": "LABEL_786",
805
+ "787": "LABEL_787",
806
+ "788": "LABEL_788",
807
+ "789": "LABEL_789",
808
+ "790": "LABEL_790",
809
+ "791": "LABEL_791",
810
+ "792": "LABEL_792",
811
+ "793": "LABEL_793",
812
+ "794": "LABEL_794",
813
+ "795": "LABEL_795",
814
+ "796": "LABEL_796",
815
+ "797": "LABEL_797",
816
+ "798": "LABEL_798",
817
+ "799": "LABEL_799",
818
+ "800": "LABEL_800",
819
+ "801": "LABEL_801",
820
+ "802": "LABEL_802",
821
+ "803": "LABEL_803",
822
+ "804": "LABEL_804",
823
+ "805": "LABEL_805",
824
+ "806": "LABEL_806",
825
+ "807": "LABEL_807",
826
+ "808": "LABEL_808",
827
+ "809": "LABEL_809",
828
+ "810": "LABEL_810",
829
+ "811": "LABEL_811",
830
+ "812": "LABEL_812",
831
+ "813": "LABEL_813",
832
+ "814": "LABEL_814",
833
+ "815": "LABEL_815",
834
+ "816": "LABEL_816",
835
+ "817": "LABEL_817",
836
+ "818": "LABEL_818",
837
+ "819": "LABEL_819",
838
+ "820": "LABEL_820",
839
+ "821": "LABEL_821",
840
+ "822": "LABEL_822",
841
+ "823": "LABEL_823",
842
+ "824": "LABEL_824",
843
+ "825": "LABEL_825",
844
+ "826": "LABEL_826",
845
+ "827": "LABEL_827",
846
+ "828": "LABEL_828",
847
+ "829": "LABEL_829",
848
+ "830": "LABEL_830",
849
+ "831": "LABEL_831",
850
+ "832": "LABEL_832",
851
+ "833": "LABEL_833",
852
+ "834": "LABEL_834",
853
+ "835": "LABEL_835",
854
+ "836": "LABEL_836",
855
+ "837": "LABEL_837",
856
+ "838": "LABEL_838",
857
+ "839": "LABEL_839",
858
+ "840": "LABEL_840",
859
+ "841": "LABEL_841",
860
+ "842": "LABEL_842",
861
+ "843": "LABEL_843",
862
+ "844": "LABEL_844",
863
+ "845": "LABEL_845",
864
+ "846": "LABEL_846",
865
+ "847": "LABEL_847",
866
+ "848": "LABEL_848",
867
+ "849": "LABEL_849",
868
+ "850": "LABEL_850",
869
+ "851": "LABEL_851",
870
+ "852": "LABEL_852",
871
+ "853": "LABEL_853",
872
+ "854": "LABEL_854",
873
+ "855": "LABEL_855",
874
+ "856": "LABEL_856",
875
+ "857": "LABEL_857",
876
+ "858": "LABEL_858",
877
+ "859": "LABEL_859",
878
+ "860": "LABEL_860",
879
+ "861": "LABEL_861",
880
+ "862": "LABEL_862",
881
+ "863": "LABEL_863",
882
+ "864": "LABEL_864",
883
+ "865": "LABEL_865",
884
+ "866": "LABEL_866",
885
+ "867": "LABEL_867",
886
+ "868": "LABEL_868",
887
+ "869": "LABEL_869",
888
+ "870": "LABEL_870",
889
+ "871": "LABEL_871",
890
+ "872": "LABEL_872",
891
+ "873": "LABEL_873",
892
+ "874": "LABEL_874",
893
+ "875": "LABEL_875",
894
+ "876": "LABEL_876",
895
+ "877": "LABEL_877",
896
+ "878": "LABEL_878",
897
+ "879": "LABEL_879",
898
+ "880": "LABEL_880",
899
+ "881": "LABEL_881",
900
+ "882": "LABEL_882",
901
+ "883": "LABEL_883",
902
+ "884": "LABEL_884",
903
+ "885": "LABEL_885",
904
+ "886": "LABEL_886",
905
+ "887": "LABEL_887",
906
+ "888": "LABEL_888",
907
+ "889": "LABEL_889",
908
+ "890": "LABEL_890",
909
+ "891": "LABEL_891",
910
+ "892": "LABEL_892",
911
+ "893": "LABEL_893",
912
+ "894": "LABEL_894",
913
+ "895": "LABEL_895",
914
+ "896": "LABEL_896",
915
+ "897": "LABEL_897",
916
+ "898": "LABEL_898",
917
+ "899": "LABEL_899",
918
+ "900": "LABEL_900",
919
+ "901": "LABEL_901",
920
+ "902": "LABEL_902",
921
+ "903": "LABEL_903",
922
+ "904": "LABEL_904",
923
+ "905": "LABEL_905",
924
+ "906": "LABEL_906",
925
+ "907": "LABEL_907",
926
+ "908": "LABEL_908",
927
+ "909": "LABEL_909",
928
+ "910": "LABEL_910",
929
+ "911": "LABEL_911",
930
+ "912": "LABEL_912",
931
+ "913": "LABEL_913",
932
+ "914": "LABEL_914",
933
+ "915": "LABEL_915",
934
+ "916": "LABEL_916",
935
+ "917": "LABEL_917",
936
+ "918": "LABEL_918",
937
+ "919": "LABEL_919",
938
+ "920": "LABEL_920",
939
+ "921": "LABEL_921",
940
+ "922": "LABEL_922",
941
+ "923": "LABEL_923",
942
+ "924": "LABEL_924",
943
+ "925": "LABEL_925",
944
+ "926": "LABEL_926",
945
+ "927": "LABEL_927",
946
+ "928": "LABEL_928",
947
+ "929": "LABEL_929",
948
+ "930": "LABEL_930",
949
+ "931": "LABEL_931",
950
+ "932": "LABEL_932",
951
+ "933": "LABEL_933",
952
+ "934": "LABEL_934",
953
+ "935": "LABEL_935",
954
+ "936": "LABEL_936",
955
+ "937": "LABEL_937",
956
+ "938": "LABEL_938",
957
+ "939": "LABEL_939",
958
+ "940": "LABEL_940",
959
+ "941": "LABEL_941",
960
+ "942": "LABEL_942",
961
+ "943": "LABEL_943",
962
+ "944": "LABEL_944",
963
+ "945": "LABEL_945",
964
+ "946": "LABEL_946",
965
+ "947": "LABEL_947",
966
+ "948": "LABEL_948",
967
+ "949": "LABEL_949",
968
+ "950": "LABEL_950",
969
+ "951": "LABEL_951",
970
+ "952": "LABEL_952",
971
+ "953": "LABEL_953",
972
+ "954": "LABEL_954",
973
+ "955": "LABEL_955",
974
+ "956": "LABEL_956",
975
+ "957": "LABEL_957",
976
+ "958": "LABEL_958",
977
+ "959": "LABEL_959",
978
+ "960": "LABEL_960",
979
+ "961": "LABEL_961",
980
+ "962": "LABEL_962",
981
+ "963": "LABEL_963",
982
+ "964": "LABEL_964",
983
+ "965": "LABEL_965",
984
+ "966": "LABEL_966",
985
+ "967": "LABEL_967",
986
+ "968": "LABEL_968",
987
+ "969": "LABEL_969",
988
+ "970": "LABEL_970",
989
+ "971": "LABEL_971",
990
+ "972": "LABEL_972",
991
+ "973": "LABEL_973",
992
+ "974": "LABEL_974",
993
+ "975": "LABEL_975",
994
+ "976": "LABEL_976",
995
+ "977": "LABEL_977",
996
+ "978": "LABEL_978",
997
+ "979": "LABEL_979",
998
+ "980": "LABEL_980",
999
+ "981": "LABEL_981",
1000
+ "982": "LABEL_982",
1001
+ "983": "LABEL_983",
1002
+ "984": "LABEL_984",
1003
+ "985": "LABEL_985",
1004
+ "986": "LABEL_986",
1005
+ "987": "LABEL_987",
1006
+ "988": "LABEL_988",
1007
+ "989": "LABEL_989",
1008
+ "990": "LABEL_990",
1009
+ "991": "LABEL_991",
1010
+ "992": "LABEL_992",
1011
+ "993": "LABEL_993",
1012
+ "994": "LABEL_994",
1013
+ "995": "LABEL_995",
1014
+ "996": "LABEL_996",
1015
+ "997": "LABEL_997",
1016
+ "998": "LABEL_998",
1017
+ "999": "LABEL_999"
1018
+ },
1019
+ "img_size": 384,
1020
+ "in_channels": 3,
1021
+ "label2id": {
1022
+ "LABEL_0": 0,
1023
+ "LABEL_1": 1,
1024
+ "LABEL_10": 10,
1025
+ "LABEL_100": 100,
1026
+ "LABEL_101": 101,
1027
+ "LABEL_102": 102,
1028
+ "LABEL_103": 103,
1029
+ "LABEL_104": 104,
1030
+ "LABEL_105": 105,
1031
+ "LABEL_106": 106,
1032
+ "LABEL_107": 107,
1033
+ "LABEL_108": 108,
1034
+ "LABEL_109": 109,
1035
+ "LABEL_11": 11,
1036
+ "LABEL_110": 110,
1037
+ "LABEL_111": 111,
1038
+ "LABEL_112": 112,
1039
+ "LABEL_113": 113,
1040
+ "LABEL_114": 114,
1041
+ "LABEL_115": 115,
1042
+ "LABEL_116": 116,
1043
+ "LABEL_117": 117,
1044
+ "LABEL_118": 118,
1045
+ "LABEL_119": 119,
1046
+ "LABEL_12": 12,
1047
+ "LABEL_120": 120,
1048
+ "LABEL_121": 121,
1049
+ "LABEL_122": 122,
1050
+ "LABEL_123": 123,
1051
+ "LABEL_124": 124,
1052
+ "LABEL_125": 125,
1053
+ "LABEL_126": 126,
1054
+ "LABEL_127": 127,
1055
+ "LABEL_128": 128,
1056
+ "LABEL_129": 129,
1057
+ "LABEL_13": 13,
1058
+ "LABEL_130": 130,
1059
+ "LABEL_131": 131,
1060
+ "LABEL_132": 132,
1061
+ "LABEL_133": 133,
1062
+ "LABEL_134": 134,
1063
+ "LABEL_135": 135,
1064
+ "LABEL_136": 136,
1065
+ "LABEL_137": 137,
1066
+ "LABEL_138": 138,
1067
+ "LABEL_139": 139,
1068
+ "LABEL_14": 14,
1069
+ "LABEL_140": 140,
1070
+ "LABEL_141": 141,
1071
+ "LABEL_142": 142,
1072
+ "LABEL_143": 143,
1073
+ "LABEL_144": 144,
1074
+ "LABEL_145": 145,
1075
+ "LABEL_146": 146,
1076
+ "LABEL_147": 147,
1077
+ "LABEL_148": 148,
1078
+ "LABEL_149": 149,
1079
+ "LABEL_15": 15,
1080
+ "LABEL_150": 150,
1081
+ "LABEL_151": 151,
1082
+ "LABEL_152": 152,
1083
+ "LABEL_153": 153,
1084
+ "LABEL_154": 154,
1085
+ "LABEL_155": 155,
1086
+ "LABEL_156": 156,
1087
+ "LABEL_157": 157,
1088
+ "LABEL_158": 158,
1089
+ "LABEL_159": 159,
1090
+ "LABEL_16": 16,
1091
+ "LABEL_160": 160,
1092
+ "LABEL_161": 161,
1093
+ "LABEL_162": 162,
1094
+ "LABEL_163": 163,
1095
+ "LABEL_164": 164,
1096
+ "LABEL_165": 165,
1097
+ "LABEL_166": 166,
1098
+ "LABEL_167": 167,
1099
+ "LABEL_168": 168,
1100
+ "LABEL_169": 169,
1101
+ "LABEL_17": 17,
1102
+ "LABEL_170": 170,
1103
+ "LABEL_171": 171,
1104
+ "LABEL_172": 172,
1105
+ "LABEL_173": 173,
1106
+ "LABEL_174": 174,
1107
+ "LABEL_175": 175,
1108
+ "LABEL_176": 176,
1109
+ "LABEL_177": 177,
1110
+ "LABEL_178": 178,
1111
+ "LABEL_179": 179,
1112
+ "LABEL_18": 18,
1113
+ "LABEL_180": 180,
1114
+ "LABEL_181": 181,
1115
+ "LABEL_182": 182,
1116
+ "LABEL_183": 183,
1117
+ "LABEL_184": 184,
1118
+ "LABEL_185": 185,
1119
+ "LABEL_186": 186,
1120
+ "LABEL_187": 187,
1121
+ "LABEL_188": 188,
1122
+ "LABEL_189": 189,
1123
+ "LABEL_19": 19,
1124
+ "LABEL_190": 190,
1125
+ "LABEL_191": 191,
1126
+ "LABEL_192": 192,
1127
+ "LABEL_193": 193,
1128
+ "LABEL_194": 194,
1129
+ "LABEL_195": 195,
1130
+ "LABEL_196": 196,
1131
+ "LABEL_197": 197,
1132
+ "LABEL_198": 198,
1133
+ "LABEL_199": 199,
1134
+ "LABEL_2": 2,
1135
+ "LABEL_20": 20,
1136
+ "LABEL_200": 200,
1137
+ "LABEL_201": 201,
1138
+ "LABEL_202": 202,
1139
+ "LABEL_203": 203,
1140
+ "LABEL_204": 204,
1141
+ "LABEL_205": 205,
1142
+ "LABEL_206": 206,
1143
+ "LABEL_207": 207,
1144
+ "LABEL_208": 208,
1145
+ "LABEL_209": 209,
1146
+ "LABEL_21": 21,
1147
+ "LABEL_210": 210,
1148
+ "LABEL_211": 211,
1149
+ "LABEL_212": 212,
1150
+ "LABEL_213": 213,
1151
+ "LABEL_214": 214,
1152
+ "LABEL_215": 215,
1153
+ "LABEL_216": 216,
1154
+ "LABEL_217": 217,
1155
+ "LABEL_218": 218,
1156
+ "LABEL_219": 219,
1157
+ "LABEL_22": 22,
1158
+ "LABEL_220": 220,
1159
+ "LABEL_221": 221,
1160
+ "LABEL_222": 222,
1161
+ "LABEL_223": 223,
1162
+ "LABEL_224": 224,
1163
+ "LABEL_225": 225,
1164
+ "LABEL_226": 226,
1165
+ "LABEL_227": 227,
1166
+ "LABEL_228": 228,
1167
+ "LABEL_229": 229,
1168
+ "LABEL_23": 23,
1169
+ "LABEL_230": 230,
1170
+ "LABEL_231": 231,
1171
+ "LABEL_232": 232,
1172
+ "LABEL_233": 233,
1173
+ "LABEL_234": 234,
1174
+ "LABEL_235": 235,
1175
+ "LABEL_236": 236,
1176
+ "LABEL_237": 237,
1177
+ "LABEL_238": 238,
1178
+ "LABEL_239": 239,
1179
+ "LABEL_24": 24,
1180
+ "LABEL_240": 240,
1181
+ "LABEL_241": 241,
1182
+ "LABEL_242": 242,
1183
+ "LABEL_243": 243,
1184
+ "LABEL_244": 244,
1185
+ "LABEL_245": 245,
1186
+ "LABEL_246": 246,
1187
+ "LABEL_247": 247,
1188
+ "LABEL_248": 248,
1189
+ "LABEL_249": 249,
1190
+ "LABEL_25": 25,
1191
+ "LABEL_250": 250,
1192
+ "LABEL_251": 251,
1193
+ "LABEL_252": 252,
1194
+ "LABEL_253": 253,
1195
+ "LABEL_254": 254,
1196
+ "LABEL_255": 255,
1197
+ "LABEL_256": 256,
1198
+ "LABEL_257": 257,
1199
+ "LABEL_258": 258,
1200
+ "LABEL_259": 259,
1201
+ "LABEL_26": 26,
1202
+ "LABEL_260": 260,
1203
+ "LABEL_261": 261,
1204
+ "LABEL_262": 262,
1205
+ "LABEL_263": 263,
1206
+ "LABEL_264": 264,
1207
+ "LABEL_265": 265,
1208
+ "LABEL_266": 266,
1209
+ "LABEL_267": 267,
1210
+ "LABEL_268": 268,
1211
+ "LABEL_269": 269,
1212
+ "LABEL_27": 27,
1213
+ "LABEL_270": 270,
1214
+ "LABEL_271": 271,
1215
+ "LABEL_272": 272,
1216
+ "LABEL_273": 273,
1217
+ "LABEL_274": 274,
1218
+ "LABEL_275": 275,
1219
+ "LABEL_276": 276,
1220
+ "LABEL_277": 277,
1221
+ "LABEL_278": 278,
1222
+ "LABEL_279": 279,
1223
+ "LABEL_28": 28,
1224
+ "LABEL_280": 280,
1225
+ "LABEL_281": 281,
1226
+ "LABEL_282": 282,
1227
+ "LABEL_283": 283,
1228
+ "LABEL_284": 284,
1229
+ "LABEL_285": 285,
1230
+ "LABEL_286": 286,
1231
+ "LABEL_287": 287,
1232
+ "LABEL_288": 288,
1233
+ "LABEL_289": 289,
1234
+ "LABEL_29": 29,
1235
+ "LABEL_290": 290,
1236
+ "LABEL_291": 291,
1237
+ "LABEL_292": 292,
1238
+ "LABEL_293": 293,
1239
+ "LABEL_294": 294,
1240
+ "LABEL_295": 295,
1241
+ "LABEL_296": 296,
1242
+ "LABEL_297": 297,
1243
+ "LABEL_298": 298,
1244
+ "LABEL_299": 299,
1245
+ "LABEL_3": 3,
1246
+ "LABEL_30": 30,
1247
+ "LABEL_300": 300,
1248
+ "LABEL_301": 301,
1249
+ "LABEL_302": 302,
1250
+ "LABEL_303": 303,
1251
+ "LABEL_304": 304,
1252
+ "LABEL_305": 305,
1253
+ "LABEL_306": 306,
1254
+ "LABEL_307": 307,
1255
+ "LABEL_308": 308,
1256
+ "LABEL_309": 309,
1257
+ "LABEL_31": 31,
1258
+ "LABEL_310": 310,
1259
+ "LABEL_311": 311,
1260
+ "LABEL_312": 312,
1261
+ "LABEL_313": 313,
1262
+ "LABEL_314": 314,
1263
+ "LABEL_315": 315,
1264
+ "LABEL_316": 316,
1265
+ "LABEL_317": 317,
1266
+ "LABEL_318": 318,
1267
+ "LABEL_319": 319,
1268
+ "LABEL_32": 32,
1269
+ "LABEL_320": 320,
1270
+ "LABEL_321": 321,
1271
+ "LABEL_322": 322,
1272
+ "LABEL_323": 323,
1273
+ "LABEL_324": 324,
1274
+ "LABEL_325": 325,
1275
+ "LABEL_326": 326,
1276
+ "LABEL_327": 327,
1277
+ "LABEL_328": 328,
1278
+ "LABEL_329": 329,
1279
+ "LABEL_33": 33,
1280
+ "LABEL_330": 330,
1281
+ "LABEL_331": 331,
1282
+ "LABEL_332": 332,
1283
+ "LABEL_333": 333,
1284
+ "LABEL_334": 334,
1285
+ "LABEL_335": 335,
1286
+ "LABEL_336": 336,
1287
+ "LABEL_337": 337,
1288
+ "LABEL_338": 338,
1289
+ "LABEL_339": 339,
1290
+ "LABEL_34": 34,
1291
+ "LABEL_340": 340,
1292
+ "LABEL_341": 341,
1293
+ "LABEL_342": 342,
1294
+ "LABEL_343": 343,
1295
+ "LABEL_344": 344,
1296
+ "LABEL_345": 345,
1297
+ "LABEL_346": 346,
1298
+ "LABEL_347": 347,
1299
+ "LABEL_348": 348,
1300
+ "LABEL_349": 349,
1301
+ "LABEL_35": 35,
1302
+ "LABEL_350": 350,
1303
+ "LABEL_351": 351,
1304
+ "LABEL_352": 352,
1305
+ "LABEL_353": 353,
1306
+ "LABEL_354": 354,
1307
+ "LABEL_355": 355,
1308
+ "LABEL_356": 356,
1309
+ "LABEL_357": 357,
1310
+ "LABEL_358": 358,
1311
+ "LABEL_359": 359,
1312
+ "LABEL_36": 36,
1313
+ "LABEL_360": 360,
1314
+ "LABEL_361": 361,
1315
+ "LABEL_362": 362,
1316
+ "LABEL_363": 363,
1317
+ "LABEL_364": 364,
1318
+ "LABEL_365": 365,
1319
+ "LABEL_366": 366,
1320
+ "LABEL_367": 367,
1321
+ "LABEL_368": 368,
1322
+ "LABEL_369": 369,
1323
+ "LABEL_37": 37,
1324
+ "LABEL_370": 370,
1325
+ "LABEL_371": 371,
1326
+ "LABEL_372": 372,
1327
+ "LABEL_373": 373,
1328
+ "LABEL_374": 374,
1329
+ "LABEL_375": 375,
1330
+ "LABEL_376": 376,
1331
+ "LABEL_377": 377,
1332
+ "LABEL_378": 378,
1333
+ "LABEL_379": 379,
1334
+ "LABEL_38": 38,
1335
+ "LABEL_380": 380,
1336
+ "LABEL_381": 381,
1337
+ "LABEL_382": 382,
1338
+ "LABEL_383": 383,
1339
+ "LABEL_384": 384,
1340
+ "LABEL_385": 385,
1341
+ "LABEL_386": 386,
1342
+ "LABEL_387": 387,
1343
+ "LABEL_388": 388,
1344
+ "LABEL_389": 389,
1345
+ "LABEL_39": 39,
1346
+ "LABEL_390": 390,
1347
+ "LABEL_391": 391,
1348
+ "LABEL_392": 392,
1349
+ "LABEL_393": 393,
1350
+ "LABEL_394": 394,
1351
+ "LABEL_395": 395,
1352
+ "LABEL_396": 396,
1353
+ "LABEL_397": 397,
1354
+ "LABEL_398": 398,
1355
+ "LABEL_399": 399,
1356
+ "LABEL_4": 4,
1357
+ "LABEL_40": 40,
1358
+ "LABEL_400": 400,
1359
+ "LABEL_401": 401,
1360
+ "LABEL_402": 402,
1361
+ "LABEL_403": 403,
1362
+ "LABEL_404": 404,
1363
+ "LABEL_405": 405,
1364
+ "LABEL_406": 406,
1365
+ "LABEL_407": 407,
1366
+ "LABEL_408": 408,
1367
+ "LABEL_409": 409,
1368
+ "LABEL_41": 41,
1369
+ "LABEL_410": 410,
1370
+ "LABEL_411": 411,
1371
+ "LABEL_412": 412,
1372
+ "LABEL_413": 413,
1373
+ "LABEL_414": 414,
1374
+ "LABEL_415": 415,
1375
+ "LABEL_416": 416,
1376
+ "LABEL_417": 417,
1377
+ "LABEL_418": 418,
1378
+ "LABEL_419": 419,
1379
+ "LABEL_42": 42,
1380
+ "LABEL_420": 420,
1381
+ "LABEL_421": 421,
1382
+ "LABEL_422": 422,
1383
+ "LABEL_423": 423,
1384
+ "LABEL_424": 424,
1385
+ "LABEL_425": 425,
1386
+ "LABEL_426": 426,
1387
+ "LABEL_427": 427,
1388
+ "LABEL_428": 428,
1389
+ "LABEL_429": 429,
1390
+ "LABEL_43": 43,
1391
+ "LABEL_430": 430,
1392
+ "LABEL_431": 431,
1393
+ "LABEL_432": 432,
1394
+ "LABEL_433": 433,
1395
+ "LABEL_434": 434,
1396
+ "LABEL_435": 435,
1397
+ "LABEL_436": 436,
1398
+ "LABEL_437": 437,
1399
+ "LABEL_438": 438,
1400
+ "LABEL_439": 439,
1401
+ "LABEL_44": 44,
1402
+ "LABEL_440": 440,
1403
+ "LABEL_441": 441,
1404
+ "LABEL_442": 442,
1405
+ "LABEL_443": 443,
1406
+ "LABEL_444": 444,
1407
+ "LABEL_445": 445,
1408
+ "LABEL_446": 446,
1409
+ "LABEL_447": 447,
1410
+ "LABEL_448": 448,
1411
+ "LABEL_449": 449,
1412
+ "LABEL_45": 45,
1413
+ "LABEL_450": 450,
1414
+ "LABEL_451": 451,
1415
+ "LABEL_452": 452,
1416
+ "LABEL_453": 453,
1417
+ "LABEL_454": 454,
1418
+ "LABEL_455": 455,
1419
+ "LABEL_456": 456,
1420
+ "LABEL_457": 457,
1421
+ "LABEL_458": 458,
1422
+ "LABEL_459": 459,
1423
+ "LABEL_46": 46,
1424
+ "LABEL_460": 460,
1425
+ "LABEL_461": 461,
1426
+ "LABEL_462": 462,
1427
+ "LABEL_463": 463,
1428
+ "LABEL_464": 464,
1429
+ "LABEL_465": 465,
1430
+ "LABEL_466": 466,
1431
+ "LABEL_467": 467,
1432
+ "LABEL_468": 468,
1433
+ "LABEL_469": 469,
1434
+ "LABEL_47": 47,
1435
+ "LABEL_470": 470,
1436
+ "LABEL_471": 471,
1437
+ "LABEL_472": 472,
1438
+ "LABEL_473": 473,
1439
+ "LABEL_474": 474,
1440
+ "LABEL_475": 475,
1441
+ "LABEL_476": 476,
1442
+ "LABEL_477": 477,
1443
+ "LABEL_478": 478,
1444
+ "LABEL_479": 479,
1445
+ "LABEL_48": 48,
1446
+ "LABEL_480": 480,
1447
+ "LABEL_481": 481,
1448
+ "LABEL_482": 482,
1449
+ "LABEL_483": 483,
1450
+ "LABEL_484": 484,
1451
+ "LABEL_485": 485,
1452
+ "LABEL_486": 486,
1453
+ "LABEL_487": 487,
1454
+ "LABEL_488": 488,
1455
+ "LABEL_489": 489,
1456
+ "LABEL_49": 49,
1457
+ "LABEL_490": 490,
1458
+ "LABEL_491": 491,
1459
+ "LABEL_492": 492,
1460
+ "LABEL_493": 493,
1461
+ "LABEL_494": 494,
1462
+ "LABEL_495": 495,
1463
+ "LABEL_496": 496,
1464
+ "LABEL_497": 497,
1465
+ "LABEL_498": 498,
1466
+ "LABEL_499": 499,
1467
+ "LABEL_5": 5,
1468
+ "LABEL_50": 50,
1469
+ "LABEL_500": 500,
1470
+ "LABEL_501": 501,
1471
+ "LABEL_502": 502,
1472
+ "LABEL_503": 503,
1473
+ "LABEL_504": 504,
1474
+ "LABEL_505": 505,
1475
+ "LABEL_506": 506,
1476
+ "LABEL_507": 507,
1477
+ "LABEL_508": 508,
1478
+ "LABEL_509": 509,
1479
+ "LABEL_51": 51,
1480
+ "LABEL_510": 510,
1481
+ "LABEL_511": 511,
1482
+ "LABEL_512": 512,
1483
+ "LABEL_513": 513,
1484
+ "LABEL_514": 514,
1485
+ "LABEL_515": 515,
1486
+ "LABEL_516": 516,
1487
+ "LABEL_517": 517,
1488
+ "LABEL_518": 518,
1489
+ "LABEL_519": 519,
1490
+ "LABEL_52": 52,
1491
+ "LABEL_520": 520,
1492
+ "LABEL_521": 521,
1493
+ "LABEL_522": 522,
1494
+ "LABEL_523": 523,
1495
+ "LABEL_524": 524,
1496
+ "LABEL_525": 525,
1497
+ "LABEL_526": 526,
1498
+ "LABEL_527": 527,
1499
+ "LABEL_528": 528,
1500
+ "LABEL_529": 529,
1501
+ "LABEL_53": 53,
1502
+ "LABEL_530": 530,
1503
+ "LABEL_531": 531,
1504
+ "LABEL_532": 532,
1505
+ "LABEL_533": 533,
1506
+ "LABEL_534": 534,
1507
+ "LABEL_535": 535,
1508
+ "LABEL_536": 536,
1509
+ "LABEL_537": 537,
1510
+ "LABEL_538": 538,
1511
+ "LABEL_539": 539,
1512
+ "LABEL_54": 54,
1513
+ "LABEL_540": 540,
1514
+ "LABEL_541": 541,
1515
+ "LABEL_542": 542,
1516
+ "LABEL_543": 543,
1517
+ "LABEL_544": 544,
1518
+ "LABEL_545": 545,
1519
+ "LABEL_546": 546,
1520
+ "LABEL_547": 547,
1521
+ "LABEL_548": 548,
1522
+ "LABEL_549": 549,
1523
+ "LABEL_55": 55,
1524
+ "LABEL_550": 550,
1525
+ "LABEL_551": 551,
1526
+ "LABEL_552": 552,
1527
+ "LABEL_553": 553,
1528
+ "LABEL_554": 554,
1529
+ "LABEL_555": 555,
1530
+ "LABEL_556": 556,
1531
+ "LABEL_557": 557,
1532
+ "LABEL_558": 558,
1533
+ "LABEL_559": 559,
1534
+ "LABEL_56": 56,
1535
+ "LABEL_560": 560,
1536
+ "LABEL_561": 561,
1537
+ "LABEL_562": 562,
1538
+ "LABEL_563": 563,
1539
+ "LABEL_564": 564,
1540
+ "LABEL_565": 565,
1541
+ "LABEL_566": 566,
1542
+ "LABEL_567": 567,
1543
+ "LABEL_568": 568,
1544
+ "LABEL_569": 569,
1545
+ "LABEL_57": 57,
1546
+ "LABEL_570": 570,
1547
+ "LABEL_571": 571,
1548
+ "LABEL_572": 572,
1549
+ "LABEL_573": 573,
1550
+ "LABEL_574": 574,
1551
+ "LABEL_575": 575,
1552
+ "LABEL_576": 576,
1553
+ "LABEL_577": 577,
1554
+ "LABEL_578": 578,
1555
+ "LABEL_579": 579,
1556
+ "LABEL_58": 58,
1557
+ "LABEL_580": 580,
1558
+ "LABEL_581": 581,
1559
+ "LABEL_582": 582,
1560
+ "LABEL_583": 583,
1561
+ "LABEL_584": 584,
1562
+ "LABEL_585": 585,
1563
+ "LABEL_586": 586,
1564
+ "LABEL_587": 587,
1565
+ "LABEL_588": 588,
1566
+ "LABEL_589": 589,
1567
+ "LABEL_59": 59,
1568
+ "LABEL_590": 590,
1569
+ "LABEL_591": 591,
1570
+ "LABEL_592": 592,
1571
+ "LABEL_593": 593,
1572
+ "LABEL_594": 594,
1573
+ "LABEL_595": 595,
1574
+ "LABEL_596": 596,
1575
+ "LABEL_597": 597,
1576
+ "LABEL_598": 598,
1577
+ "LABEL_599": 599,
1578
+ "LABEL_6": 6,
1579
+ "LABEL_60": 60,
1580
+ "LABEL_600": 600,
1581
+ "LABEL_601": 601,
1582
+ "LABEL_602": 602,
1583
+ "LABEL_603": 603,
1584
+ "LABEL_604": 604,
1585
+ "LABEL_605": 605,
1586
+ "LABEL_606": 606,
1587
+ "LABEL_607": 607,
1588
+ "LABEL_608": 608,
1589
+ "LABEL_609": 609,
1590
+ "LABEL_61": 61,
1591
+ "LABEL_610": 610,
1592
+ "LABEL_611": 611,
1593
+ "LABEL_612": 612,
1594
+ "LABEL_613": 613,
1595
+ "LABEL_614": 614,
1596
+ "LABEL_615": 615,
1597
+ "LABEL_616": 616,
1598
+ "LABEL_617": 617,
1599
+ "LABEL_618": 618,
1600
+ "LABEL_619": 619,
1601
+ "LABEL_62": 62,
1602
+ "LABEL_620": 620,
1603
+ "LABEL_621": 621,
1604
+ "LABEL_622": 622,
1605
+ "LABEL_623": 623,
1606
+ "LABEL_624": 624,
1607
+ "LABEL_625": 625,
1608
+ "LABEL_626": 626,
1609
+ "LABEL_627": 627,
1610
+ "LABEL_628": 628,
1611
+ "LABEL_629": 629,
1612
+ "LABEL_63": 63,
1613
+ "LABEL_630": 630,
1614
+ "LABEL_631": 631,
1615
+ "LABEL_632": 632,
1616
+ "LABEL_633": 633,
1617
+ "LABEL_634": 634,
1618
+ "LABEL_635": 635,
1619
+ "LABEL_636": 636,
1620
+ "LABEL_637": 637,
1621
+ "LABEL_638": 638,
1622
+ "LABEL_639": 639,
1623
+ "LABEL_64": 64,
1624
+ "LABEL_640": 640,
1625
+ "LABEL_641": 641,
1626
+ "LABEL_642": 642,
1627
+ "LABEL_643": 643,
1628
+ "LABEL_644": 644,
1629
+ "LABEL_645": 645,
1630
+ "LABEL_646": 646,
1631
+ "LABEL_647": 647,
1632
+ "LABEL_648": 648,
1633
+ "LABEL_649": 649,
1634
+ "LABEL_65": 65,
1635
+ "LABEL_650": 650,
1636
+ "LABEL_651": 651,
1637
+ "LABEL_652": 652,
1638
+ "LABEL_653": 653,
1639
+ "LABEL_654": 654,
1640
+ "LABEL_655": 655,
1641
+ "LABEL_656": 656,
1642
+ "LABEL_657": 657,
1643
+ "LABEL_658": 658,
1644
+ "LABEL_659": 659,
1645
+ "LABEL_66": 66,
1646
+ "LABEL_660": 660,
1647
+ "LABEL_661": 661,
1648
+ "LABEL_662": 662,
1649
+ "LABEL_663": 663,
1650
+ "LABEL_664": 664,
1651
+ "LABEL_665": 665,
1652
+ "LABEL_666": 666,
1653
+ "LABEL_667": 667,
1654
+ "LABEL_668": 668,
1655
+ "LABEL_669": 669,
1656
+ "LABEL_67": 67,
1657
+ "LABEL_670": 670,
1658
+ "LABEL_671": 671,
1659
+ "LABEL_672": 672,
1660
+ "LABEL_673": 673,
1661
+ "LABEL_674": 674,
1662
+ "LABEL_675": 675,
1663
+ "LABEL_676": 676,
1664
+ "LABEL_677": 677,
1665
+ "LABEL_678": 678,
1666
+ "LABEL_679": 679,
1667
+ "LABEL_68": 68,
1668
+ "LABEL_680": 680,
1669
+ "LABEL_681": 681,
1670
+ "LABEL_682": 682,
1671
+ "LABEL_683": 683,
1672
+ "LABEL_684": 684,
1673
+ "LABEL_685": 685,
1674
+ "LABEL_686": 686,
1675
+ "LABEL_687": 687,
1676
+ "LABEL_688": 688,
1677
+ "LABEL_689": 689,
1678
+ "LABEL_69": 69,
1679
+ "LABEL_690": 690,
1680
+ "LABEL_691": 691,
1681
+ "LABEL_692": 692,
1682
+ "LABEL_693": 693,
1683
+ "LABEL_694": 694,
1684
+ "LABEL_695": 695,
1685
+ "LABEL_696": 696,
1686
+ "LABEL_697": 697,
1687
+ "LABEL_698": 698,
1688
+ "LABEL_699": 699,
1689
+ "LABEL_7": 7,
1690
+ "LABEL_70": 70,
1691
+ "LABEL_700": 700,
1692
+ "LABEL_701": 701,
1693
+ "LABEL_702": 702,
1694
+ "LABEL_703": 703,
1695
+ "LABEL_704": 704,
1696
+ "LABEL_705": 705,
1697
+ "LABEL_706": 706,
1698
+ "LABEL_707": 707,
1699
+ "LABEL_708": 708,
1700
+ "LABEL_709": 709,
1701
+ "LABEL_71": 71,
1702
+ "LABEL_710": 710,
1703
+ "LABEL_711": 711,
1704
+ "LABEL_712": 712,
1705
+ "LABEL_713": 713,
1706
+ "LABEL_714": 714,
1707
+ "LABEL_715": 715,
1708
+ "LABEL_716": 716,
1709
+ "LABEL_717": 717,
1710
+ "LABEL_718": 718,
1711
+ "LABEL_719": 719,
1712
+ "LABEL_72": 72,
1713
+ "LABEL_720": 720,
1714
+ "LABEL_721": 721,
1715
+ "LABEL_722": 722,
1716
+ "LABEL_723": 723,
1717
+ "LABEL_724": 724,
1718
+ "LABEL_725": 725,
1719
+ "LABEL_726": 726,
1720
+ "LABEL_727": 727,
1721
+ "LABEL_728": 728,
1722
+ "LABEL_729": 729,
1723
+ "LABEL_73": 73,
1724
+ "LABEL_730": 730,
1725
+ "LABEL_731": 731,
1726
+ "LABEL_732": 732,
1727
+ "LABEL_733": 733,
1728
+ "LABEL_734": 734,
1729
+ "LABEL_735": 735,
1730
+ "LABEL_736": 736,
1731
+ "LABEL_737": 737,
1732
+ "LABEL_738": 738,
1733
+ "LABEL_739": 739,
1734
+ "LABEL_74": 74,
1735
+ "LABEL_740": 740,
1736
+ "LABEL_741": 741,
1737
+ "LABEL_742": 742,
1738
+ "LABEL_743": 743,
1739
+ "LABEL_744": 744,
1740
+ "LABEL_745": 745,
1741
+ "LABEL_746": 746,
1742
+ "LABEL_747": 747,
1743
+ "LABEL_748": 748,
1744
+ "LABEL_749": 749,
1745
+ "LABEL_75": 75,
1746
+ "LABEL_750": 750,
1747
+ "LABEL_751": 751,
1748
+ "LABEL_752": 752,
1749
+ "LABEL_753": 753,
1750
+ "LABEL_754": 754,
1751
+ "LABEL_755": 755,
1752
+ "LABEL_756": 756,
1753
+ "LABEL_757": 757,
1754
+ "LABEL_758": 758,
1755
+ "LABEL_759": 759,
1756
+ "LABEL_76": 76,
1757
+ "LABEL_760": 760,
1758
+ "LABEL_761": 761,
1759
+ "LABEL_762": 762,
1760
+ "LABEL_763": 763,
1761
+ "LABEL_764": 764,
1762
+ "LABEL_765": 765,
1763
+ "LABEL_766": 766,
1764
+ "LABEL_767": 767,
1765
+ "LABEL_768": 768,
1766
+ "LABEL_769": 769,
1767
+ "LABEL_77": 77,
1768
+ "LABEL_770": 770,
1769
+ "LABEL_771": 771,
1770
+ "LABEL_772": 772,
1771
+ "LABEL_773": 773,
1772
+ "LABEL_774": 774,
1773
+ "LABEL_775": 775,
1774
+ "LABEL_776": 776,
1775
+ "LABEL_777": 777,
1776
+ "LABEL_778": 778,
1777
+ "LABEL_779": 779,
1778
+ "LABEL_78": 78,
1779
+ "LABEL_780": 780,
1780
+ "LABEL_781": 781,
1781
+ "LABEL_782": 782,
1782
+ "LABEL_783": 783,
1783
+ "LABEL_784": 784,
1784
+ "LABEL_785": 785,
1785
+ "LABEL_786": 786,
1786
+ "LABEL_787": 787,
1787
+ "LABEL_788": 788,
1788
+ "LABEL_789": 789,
1789
+ "LABEL_79": 79,
1790
+ "LABEL_790": 790,
1791
+ "LABEL_791": 791,
1792
+ "LABEL_792": 792,
1793
+ "LABEL_793": 793,
1794
+ "LABEL_794": 794,
1795
+ "LABEL_795": 795,
1796
+ "LABEL_796": 796,
1797
+ "LABEL_797": 797,
1798
+ "LABEL_798": 798,
1799
+ "LABEL_799": 799,
1800
+ "LABEL_8": 8,
1801
+ "LABEL_80": 80,
1802
+ "LABEL_800": 800,
1803
+ "LABEL_801": 801,
1804
+ "LABEL_802": 802,
1805
+ "LABEL_803": 803,
1806
+ "LABEL_804": 804,
1807
+ "LABEL_805": 805,
1808
+ "LABEL_806": 806,
1809
+ "LABEL_807": 807,
1810
+ "LABEL_808": 808,
1811
+ "LABEL_809": 809,
1812
+ "LABEL_81": 81,
1813
+ "LABEL_810": 810,
1814
+ "LABEL_811": 811,
1815
+ "LABEL_812": 812,
1816
+ "LABEL_813": 813,
1817
+ "LABEL_814": 814,
1818
+ "LABEL_815": 815,
1819
+ "LABEL_816": 816,
1820
+ "LABEL_817": 817,
1821
+ "LABEL_818": 818,
1822
+ "LABEL_819": 819,
1823
+ "LABEL_82": 82,
1824
+ "LABEL_820": 820,
1825
+ "LABEL_821": 821,
1826
+ "LABEL_822": 822,
1827
+ "LABEL_823": 823,
1828
+ "LABEL_824": 824,
1829
+ "LABEL_825": 825,
1830
+ "LABEL_826": 826,
1831
+ "LABEL_827": 827,
1832
+ "LABEL_828": 828,
1833
+ "LABEL_829": 829,
1834
+ "LABEL_83": 83,
1835
+ "LABEL_830": 830,
1836
+ "LABEL_831": 831,
1837
+ "LABEL_832": 832,
1838
+ "LABEL_833": 833,
1839
+ "LABEL_834": 834,
1840
+ "LABEL_835": 835,
1841
+ "LABEL_836": 836,
1842
+ "LABEL_837": 837,
1843
+ "LABEL_838": 838,
1844
+ "LABEL_839": 839,
1845
+ "LABEL_84": 84,
1846
+ "LABEL_840": 840,
1847
+ "LABEL_841": 841,
1848
+ "LABEL_842": 842,
1849
+ "LABEL_843": 843,
1850
+ "LABEL_844": 844,
1851
+ "LABEL_845": 845,
1852
+ "LABEL_846": 846,
1853
+ "LABEL_847": 847,
1854
+ "LABEL_848": 848,
1855
+ "LABEL_849": 849,
1856
+ "LABEL_85": 85,
1857
+ "LABEL_850": 850,
1858
+ "LABEL_851": 851,
1859
+ "LABEL_852": 852,
1860
+ "LABEL_853": 853,
1861
+ "LABEL_854": 854,
1862
+ "LABEL_855": 855,
1863
+ "LABEL_856": 856,
1864
+ "LABEL_857": 857,
1865
+ "LABEL_858": 858,
1866
+ "LABEL_859": 859,
1867
+ "LABEL_86": 86,
1868
+ "LABEL_860": 860,
1869
+ "LABEL_861": 861,
1870
+ "LABEL_862": 862,
1871
+ "LABEL_863": 863,
1872
+ "LABEL_864": 864,
1873
+ "LABEL_865": 865,
1874
+ "LABEL_866": 866,
1875
+ "LABEL_867": 867,
1876
+ "LABEL_868": 868,
1877
+ "LABEL_869": 869,
1878
+ "LABEL_87": 87,
1879
+ "LABEL_870": 870,
1880
+ "LABEL_871": 871,
1881
+ "LABEL_872": 872,
1882
+ "LABEL_873": 873,
1883
+ "LABEL_874": 874,
1884
+ "LABEL_875": 875,
1885
+ "LABEL_876": 876,
1886
+ "LABEL_877": 877,
1887
+ "LABEL_878": 878,
1888
+ "LABEL_879": 879,
1889
+ "LABEL_88": 88,
1890
+ "LABEL_880": 880,
1891
+ "LABEL_881": 881,
1892
+ "LABEL_882": 882,
1893
+ "LABEL_883": 883,
1894
+ "LABEL_884": 884,
1895
+ "LABEL_885": 885,
1896
+ "LABEL_886": 886,
1897
+ "LABEL_887": 887,
1898
+ "LABEL_888": 888,
1899
+ "LABEL_889": 889,
1900
+ "LABEL_89": 89,
1901
+ "LABEL_890": 890,
1902
+ "LABEL_891": 891,
1903
+ "LABEL_892": 892,
1904
+ "LABEL_893": 893,
1905
+ "LABEL_894": 894,
1906
+ "LABEL_895": 895,
1907
+ "LABEL_896": 896,
1908
+ "LABEL_897": 897,
1909
+ "LABEL_898": 898,
1910
+ "LABEL_899": 899,
1911
+ "LABEL_9": 9,
1912
+ "LABEL_90": 90,
1913
+ "LABEL_900": 900,
1914
+ "LABEL_901": 901,
1915
+ "LABEL_902": 902,
1916
+ "LABEL_903": 903,
1917
+ "LABEL_904": 904,
1918
+ "LABEL_905": 905,
1919
+ "LABEL_906": 906,
1920
+ "LABEL_907": 907,
1921
+ "LABEL_908": 908,
1922
+ "LABEL_909": 909,
1923
+ "LABEL_91": 91,
1924
+ "LABEL_910": 910,
1925
+ "LABEL_911": 911,
1926
+ "LABEL_912": 912,
1927
+ "LABEL_913": 913,
1928
+ "LABEL_914": 914,
1929
+ "LABEL_915": 915,
1930
+ "LABEL_916": 916,
1931
+ "LABEL_917": 917,
1932
+ "LABEL_918": 918,
1933
+ "LABEL_919": 919,
1934
+ "LABEL_92": 92,
1935
+ "LABEL_920": 920,
1936
+ "LABEL_921": 921,
1937
+ "LABEL_922": 922,
1938
+ "LABEL_923": 923,
1939
+ "LABEL_924": 924,
1940
+ "LABEL_925": 925,
1941
+ "LABEL_926": 926,
1942
+ "LABEL_927": 927,
1943
+ "LABEL_928": 928,
1944
+ "LABEL_929": 929,
1945
+ "LABEL_93": 93,
1946
+ "LABEL_930": 930,
1947
+ "LABEL_931": 931,
1948
+ "LABEL_932": 932,
1949
+ "LABEL_933": 933,
1950
+ "LABEL_934": 934,
1951
+ "LABEL_935": 935,
1952
+ "LABEL_936": 936,
1953
+ "LABEL_937": 937,
1954
+ "LABEL_938": 938,
1955
+ "LABEL_939": 939,
1956
+ "LABEL_94": 94,
1957
+ "LABEL_940": 940,
1958
+ "LABEL_941": 941,
1959
+ "LABEL_942": 942,
1960
+ "LABEL_943": 943,
1961
+ "LABEL_944": 944,
1962
+ "LABEL_945": 945,
1963
+ "LABEL_946": 946,
1964
+ "LABEL_947": 947,
1965
+ "LABEL_948": 948,
1966
+ "LABEL_949": 949,
1967
+ "LABEL_95": 95,
1968
+ "LABEL_950": 950,
1969
+ "LABEL_951": 951,
1970
+ "LABEL_952": 952,
1971
+ "LABEL_953": 953,
1972
+ "LABEL_954": 954,
1973
+ "LABEL_955": 955,
1974
+ "LABEL_956": 956,
1975
+ "LABEL_957": 957,
1976
+ "LABEL_958": 958,
1977
+ "LABEL_959": 959,
1978
+ "LABEL_96": 96,
1979
+ "LABEL_960": 960,
1980
+ "LABEL_961": 961,
1981
+ "LABEL_962": 962,
1982
+ "LABEL_963": 963,
1983
+ "LABEL_964": 964,
1984
+ "LABEL_965": 965,
1985
+ "LABEL_966": 966,
1986
+ "LABEL_967": 967,
1987
+ "LABEL_968": 968,
1988
+ "LABEL_969": 969,
1989
+ "LABEL_97": 97,
1990
+ "LABEL_970": 970,
1991
+ "LABEL_971": 971,
1992
+ "LABEL_972": 972,
1993
+ "LABEL_973": 973,
1994
+ "LABEL_974": 974,
1995
+ "LABEL_975": 975,
1996
+ "LABEL_976": 976,
1997
+ "LABEL_977": 977,
1998
+ "LABEL_978": 978,
1999
+ "LABEL_979": 979,
2000
+ "LABEL_98": 98,
2001
+ "LABEL_980": 980,
2002
+ "LABEL_981": 981,
2003
+ "LABEL_982": 982,
2004
+ "LABEL_983": 983,
2005
+ "LABEL_984": 984,
2006
+ "LABEL_985": 985,
2007
+ "LABEL_986": 986,
2008
+ "LABEL_987": 987,
2009
+ "LABEL_988": 988,
2010
+ "LABEL_989": 989,
2011
+ "LABEL_99": 99,
2012
+ "LABEL_990": 990,
2013
+ "LABEL_991": 991,
2014
+ "LABEL_992": 992,
2015
+ "LABEL_993": 993,
2016
+ "LABEL_994": 994,
2017
+ "LABEL_995": 995,
2018
+ "LABEL_996": 996,
2019
+ "LABEL_997": 997,
2020
+ "LABEL_998": 998,
2021
+ "LABEL_999": 999
2022
+ },
2023
+ "mlp_ratio": 3,
2024
+ "model_type": "cct",
2025
+ "num_channels": 384,
2026
+ "num_conv_layers": 2,
2027
+ "num_heads": 6,
2028
+ "num_transformer_layers": 14,
2029
+ "out_channels": [
2030
+ 64,
2031
+ 384
2032
+ ],
2033
+ "output_hidden_states": true,
2034
+ "pool_kernel_size": 3,
2035
+ "pool_padding": 1,
2036
+ "pool_stride": 2,
2037
+ "pos_emb_type": "learnable",
2038
+ "torch_dtype": "float32",
2039
+ "transformers_version": "4.31.0.dev0"
2040
+ }
configuration_cct.py ADDED
@@ -0,0 +1,134 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # coding=utf-8
2
+ # Copyright 2023 The HuggingFace Inc. team. All rights reserved.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ """ CCT model configuration"""
16
+
17
+ from transformers import PretrainedConfig
18
+
19
+ CCT_PRETRAINED_CONFIG_ARCHIVE_MAP = {
20
+ "rishabbala/cct_14_7x2_384": "https://huggingface.co/rishabbala/cct_14_7x2_384/blob/main/config.json",
21
+ }
22
+
23
+
24
+ class CctConfig(PretrainedConfig):
25
+ r"""
26
+ This is the configuration class to store the configuration of a [`CctModel`]. It is used to instantiate a CCT model
27
+ according to the specified arguments, defining the model architecture. Instantiating a configuration with the
28
+ defaults will yield a similar configuration to that of the CCT
29
+ [rishabbala/cct](https://huggingface.co/rishabbala/cct) architecture.
30
+
31
+ Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the
32
+ documentation from [`PretrainedConfig`] for more information.
33
+
34
+ Args:
35
+ img_size (`int`, *optional*, defaults to 384):
36
+ The size of the input image
37
+ in_channels (`int`, *optional*, defaults to 3):
38
+ The number of input channels.
39
+ out_channels (`List[int]`, *optional*, defaults to [64, 384]):
40
+ The number of output channels of each conv layer.
41
+ conv_kernel_size (`int`, *optional*, defaults to 7):
42
+ The kernel size of convolutional layers in patch embedding.
43
+ conv_stride (`int`, *optional*, defaults to 2):
44
+ The stride size of convolutional layers in patch embedding.
45
+ conv_padding (`int`, *optional*, defaults to 3):
46
+ The padding size of convolutional layers in patch embedding.
47
+ conv_bias (`bool`, *optional*, defaults to False):
48
+ Whether the convolutional layers have bias
49
+ pool_kernel_size (`int`, *optional*, defaults to 7):
50
+ The kernel size of max pool layers in patch embedding.
51
+ pool_stride (`int`, *optional*, defaults to 2):
52
+ The stride size of max pool layers in patch embedding.
53
+ pool_padding (`int`, *optional*, defaults to 3):
54
+ The padding size of max pool layers in patch embedding.
55
+ num_conv_layers (`int`, *optional*, defaults to 2):
56
+ Number of convolutional embedding layers
57
+ embed_dim (`int`, *optional*, defaults to 384):
58
+ Dimension of each of the encoder blocks.
59
+ num_heads (`int`, *optional*, defaults to 6):
60
+ Number of attention heads for each attention layer in each block of the Transformer encoder.
61
+ mlp_ratio (`float`, *optional*, defaults to 3.0):
62
+ Ratio of the size of the hidden layer compared to the size of the input layer of the FFNs in the encoder
63
+ blocks.
64
+ attention_drop_rate (`float`, *optional*, defaults to 0.1):
65
+ The dropout ratio for the attention probabilities.
66
+ drop_rate (`float`, *optional*, defaults to 0.0):
67
+ The dropout ratio following linear projections.
68
+ drop_path_rate (`float`, *optional*, defaults to `0.0`):
69
+ The dropout probability for stochastic depth, used in the blocks of the Transformer encoder.
70
+ num_transformer_layers(`int`, *optional*, defaults to 14):
71
+ Number of transformer self-attention layers
72
+ pos_emb_type (`str`, *optional*, defaults to 'learnable'):
73
+ Type of positional embedding used. Alternative: 'sinusoidal'
74
+
75
+ Example:
76
+
77
+ ```python
78
+ >>> from transformers import CctConfig, CctModel
79
+
80
+ >>> # Initializing a Cct msft/cct style configuration
81
+ >>> configuration = CctConfig()
82
+
83
+ >>> # Initializing a model (with random weights) from the msft/cct style configuration
84
+ >>> model = CctModel(configuration)
85
+
86
+ >>> # Accessing the model configuration
87
+ >>> configuration = model.config
88
+ ```"""
89
+ model_type = "cct"
90
+
91
+ def __init__(
92
+ self,
93
+ img_size=384,
94
+ in_channels=3,
95
+ out_channels=[64, 384],
96
+ conv_kernel_size=7,
97
+ conv_stride=2,
98
+ conv_padding=3,
99
+ conv_bias=False,
100
+ pool_kernel_size=3,
101
+ pool_stride=2,
102
+ pool_padding=1,
103
+ num_conv_layers=2,
104
+ embed_dim=384,
105
+ num_heads=6,
106
+ mlp_ratio=3,
107
+ attention_drop_rate=0.1,
108
+ drop_rate=0.0,
109
+ drop_path_rate=0.0,
110
+ num_transformer_layers=14,
111
+ pos_emb_type="learnable",
112
+ **kwargs,
113
+ ):
114
+ super().__init__(**kwargs)
115
+ self.img_size = img_size
116
+ self.in_channels = in_channels
117
+ self.out_channels = out_channels
118
+ self.num_channels = out_channels[-1]
119
+ self.conv_kernel_size = conv_kernel_size
120
+ self.conv_stride = conv_stride
121
+ self.conv_padding = conv_padding
122
+ self.conv_bias = conv_bias
123
+ self.pool_kernel_size = pool_kernel_size
124
+ self.pool_stride = pool_stride
125
+ self.pool_padding = pool_padding
126
+ self.num_conv_layers = num_conv_layers
127
+ self.embed_dim = embed_dim
128
+ self.num_heads = num_heads
129
+ self.mlp_ratio = mlp_ratio
130
+ self.attention_drop_rate = attention_drop_rate
131
+ self.drop_rate = drop_rate
132
+ self.drop_path_rate = drop_path_rate
133
+ self.num_transformer_layers = num_transformer_layers
134
+ self.pos_emb_type = pos_emb_type
modeling_cct.py ADDED
@@ -0,0 +1,437 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # coding=utf-8
2
+ # Copyright 2023 Microsoft Research and The HuggingFace Inc. team. All rights reserved.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ """ PyTorch CCT model."""
16
+
17
+
18
+ from dataclasses import dataclass
19
+ from typing import Optional, Tuple, Union
20
+
21
+ import torch
22
+ import torch.nn.functional as F
23
+ import torch.utils.checkpoint
24
+ from torch import nn
25
+ from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss
26
+
27
+ from transformers.modeling_outputs import ImageClassifierOutputWithNoAttention, ModelOutput
28
+ from transformers import PreTrainedModel
29
+ from .configuration_cct import CctConfig
30
+
31
+ # General docstring
32
+ _CONFIG_FOR_DOC = "CctConfig"
33
+
34
+ # Base docstring
35
+ _CHECKPOINT_FOR_DOC = "rishabbala/cct_14_7x2_384"
36
+ _EXPECTED_OUTPUT_SHAPE = [1, 384]
37
+
38
+ # Image classification docstring
39
+ _IMAGE_CLASS_CHECKPOINT = "rishabbala/cct_14_7x2_384"
40
+ _IMAGE_CLASS_EXPECTED_OUTPUT = "tabby, tabby cat"
41
+
42
+
43
+ CCT_PRETRAINED_MODEL_ARCHIVE_LIST = [
44
+ "rishabbala/cct_14_7x2_384",
45
+ "rishabbala/cct_14_7x2_224"
46
+ # See all CCT models at https://huggingface.co/models?filter=cct
47
+ ]
48
+
49
+
50
+ @dataclass
51
+ class BaseModelOutputWithSeqPool(ModelOutput):
52
+ """
53
+ Base class for model's outputs, with potential hidden states and attentions.
54
+
55
+ Args:
56
+ last_hidden_state (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`):
57
+ Sequence of hidden-states at the output of the last layer of the model prior to sequential pooling.
58
+ hidden_state_post_pool (`torch.FloatTensor` of shape `(batch_size, hidden_size)`):
59
+ Sequence of hidden-states at the output of the last layer of the model post sequential pooling.
60
+ hidden_states (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`):
61
+ Tuple of `torch.FloatTensor` (one for the output of the embeddings + one for the output of each layer) of
62
+ shape `(batch_size, sequence_length, hidden_size)`. Hidden-states of the model at the output of each layer
63
+ plus the initial embedding outputs.
64
+ """
65
+
66
+ last_hidden_state: torch.FloatTensor = None
67
+ hidden_state_post_pool: torch.FloatTensor = None
68
+ hidden_states: Optional[Tuple[torch.FloatTensor]] = None
69
+
70
+
71
+ # Copied from transformers.models.beit.modeling_beit.drop_path
72
+ def drop_path(input, drop_prob: float = 0.0, training: bool = False):
73
+ """
74
+ Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks).
75
+
76
+ Comment by Ross Wightman: This is the same as the DropConnect impl I created for EfficientNet, etc networks,
77
+ however, the original name is misleading as 'Drop Connect' is a different form of dropout in a separate paper...
78
+ See discussion: https://github.com/tensorflow/tpu/issues/494#issuecomment-532968956 ... I've opted for changing the
79
+ layer and argument names to 'drop path' rather than mix DropConnect as a layer name and use 'survival rate' as the
80
+ argument.
81
+ """
82
+ if drop_prob == 0.0 or not training:
83
+ return input
84
+ keep_prob = 1 - drop_prob
85
+ shape = (input.shape[0],) + (1,) * (input.ndim - 1) # work with diff dim tensors, not just 2D ConvNets
86
+ random_tensor = keep_prob + torch.rand(shape, dtype=input.dtype, device=input.device)
87
+ random_tensor.floor_() # binarize
88
+ output = input.div(keep_prob) * random_tensor
89
+ return output
90
+
91
+
92
+ # Copied from transformers.models.beit.modeling_beit.BeitDropPath
93
+ class CctDropPath(nn.Module):
94
+ """Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks)."""
95
+
96
+ def __init__(self, drop_prob: Optional[float] = None) -> None:
97
+ super().__init__()
98
+ self.drop_prob = drop_prob
99
+
100
+ def forward(self, hidden_states: torch.Tensor) -> torch.Tensor:
101
+ return drop_path(hidden_states, self.drop_prob, self.training)
102
+
103
+ def extra_repr(self) -> str:
104
+ return "p={}".format(self.drop_prob)
105
+
106
+
107
+ class CctConvEmbeddings(nn.Module):
108
+ """
109
+ Performs convolutional tokenization of the input image.
110
+ """
111
+
112
+ def __init__(self, config: CctConfig):
113
+ super().__init__()
114
+ self.in_channels = config.in_channels
115
+ self.img_size = config.img_size
116
+
117
+ channels_size = [config.in_channels] + config.out_channels
118
+ assert (
119
+ len(channels_size) == config.num_conv_layers + 1
120
+ ), "Ensure that the number output channels matches the number of conv layers"
121
+
122
+ self.embedding_layers = nn.ModuleList([])
123
+ for i in range(config.num_conv_layers):
124
+ self.embedding_layers.extend(
125
+ [
126
+ nn.Conv2d(
127
+ channels_size[i],
128
+ channels_size[i + 1],
129
+ kernel_size=config.conv_kernel_size,
130
+ stride=config.conv_stride,
131
+ padding=config.conv_padding,
132
+ bias=config.conv_bias,
133
+ ),
134
+ nn.ReLU(),
135
+ nn.MaxPool2d(config.pool_kernel_size, stride=config.pool_stride, padding=config.pool_padding),
136
+ ]
137
+ )
138
+
139
+ def forward(self, pixel_values):
140
+ for layer in self.embedding_layers:
141
+ pixel_values = layer(pixel_values)
142
+ batch_size, num_channels, height, width = pixel_values.shape
143
+ hidden_size = height * width
144
+ # rearrange "b c h w -> b (h w) c"
145
+ pixel_values = pixel_values.view(batch_size, num_channels, hidden_size).permute(0, 2, 1)
146
+ return pixel_values
147
+
148
+ def get_sequence_length(self) -> int:
149
+ return self.forward(torch.zeros((1, self.in_channels, self.img_size, self.img_size))).shape[1]
150
+
151
+
152
+ class CctSelfAttention(nn.Module):
153
+ """
154
+ Attention Module that computes self-attention, given an input hidden_state. Q, K, V are computed implicitly from
155
+ hidden_state
156
+ """
157
+
158
+ def __init__(self, embed_dim, num_heads=6, attention_drop_rate=0.1, drop_rate=0.0):
159
+ super().__init__()
160
+ self.num_heads = num_heads
161
+ head_dim = embed_dim // self.num_heads
162
+ self.scale = head_dim**-0.5
163
+
164
+ self.qkv = nn.Linear(embed_dim, embed_dim * 3, bias=False)
165
+ self.attn_drop = nn.Dropout(attention_drop_rate)
166
+ self.proj = nn.Linear(embed_dim, embed_dim)
167
+ self.proj_drop = nn.Dropout(drop_rate)
168
+
169
+ def forward(self, hidden_state):
170
+ B, N, C = hidden_state.shape
171
+ qkv = self.qkv(hidden_state).reshape(B, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4)
172
+ q, k, v = qkv[0], qkv[1], qkv[2]
173
+
174
+ attn = (q @ k.transpose(-2, -1)) * self.scale
175
+ attn = attn.softmax(dim=-1)
176
+ attn = self.attn_drop(attn)
177
+
178
+ hidden_state = (attn @ v).transpose(1, 2).reshape(B, N, C)
179
+ hidden_state = self.proj(hidden_state)
180
+ hidden_state = self.proj_drop(hidden_state)
181
+ return hidden_state
182
+
183
+
184
+ class CctStage(nn.Module):
185
+ """
186
+ CCT stage composed of stacked transformer layers
187
+ """
188
+
189
+ def __init__(
190
+ self, embed_dim=384, num_heads=6, mlp_ratio=3, drop_rate=0.0, attention_drop_rate=0.1, drop_path_rate=0.0
191
+ ):
192
+ super().__init__()
193
+ dim_feedforward = mlp_ratio * embed_dim
194
+ self.pre_norm = nn.LayerNorm(embed_dim)
195
+
196
+ self.linear1 = nn.Linear(embed_dim, dim_feedforward)
197
+ self.norm1 = nn.LayerNorm(embed_dim)
198
+ self.linear2 = nn.Linear(dim_feedforward, embed_dim)
199
+ self.self_attn = CctSelfAttention(
200
+ embed_dim=embed_dim, num_heads=num_heads, attention_drop_rate=attention_drop_rate, drop_rate=drop_rate
201
+ )
202
+ self.dropout1 = nn.Dropout(drop_rate)
203
+ self.dropout2 = nn.Dropout(drop_rate)
204
+ self.drop_path = CctDropPath(drop_path_rate) if drop_path_rate > 0 else nn.Identity()
205
+ self.activation = F.gelu
206
+
207
+ def forward(self, hidden_state):
208
+ hidden_state = hidden_state + self.drop_path(self.self_attn(self.pre_norm(hidden_state)))
209
+ hidden_state = self.norm1(hidden_state)
210
+ hidden_state = hidden_state + self.drop_path(
211
+ self.dropout2(self.linear2(self.dropout1(self.activation(self.linear1(hidden_state)))))
212
+ )
213
+
214
+ return hidden_state
215
+
216
+
217
+ class CctEncoder(nn.Module):
218
+ """
219
+ Class that combines CctConvEmbeddings and CctStage. Output is of type BaseModelOutputWithSeqPool if return_dict is
220
+ set to True, else the output is a Tuple
221
+ """
222
+
223
+ def __init__(self, config: CctConfig, sequence_length: int):
224
+ super().__init__()
225
+ assert sequence_length is not None, "Sequence Length required to initialize positional embedding"
226
+
227
+ int(config.embed_dim * config.mlp_ratio)
228
+ self.attention_pool = nn.Linear(config.embed_dim, 1)
229
+
230
+ if config.pos_emb_type == "learnable":
231
+ self.positional_emb = nn.Parameter(
232
+ self.learnable_embedding(sequence_length, config.embed_dim), requires_grad=True
233
+ )
234
+ else:
235
+ self.positional_emb = nn.Parameter(
236
+ self.sinusoidal_embedding(sequence_length, config.embed_dim), requires_grad=False
237
+ )
238
+
239
+ self.dropout = nn.Dropout(config.drop_rate)
240
+ stochastic_dropout_rate = [
241
+ x.item() for x in torch.linspace(0, config.drop_path_rate, config.num_transformer_layers)
242
+ ]
243
+
244
+ self.blocks = nn.ModuleList(
245
+ [
246
+ CctStage(
247
+ config.embed_dim,
248
+ config.num_heads,
249
+ config.mlp_ratio,
250
+ config.drop_rate,
251
+ config.attention_drop_rate,
252
+ stochastic_dropout_rate[i],
253
+ )
254
+ for i in range(config.num_transformer_layers)
255
+ ]
256
+ )
257
+ self.norm = nn.LayerNorm(config.embed_dim)
258
+
259
+ def forward(self, pixel_values, output_hidden_states=False, return_dict=True) -> BaseModelOutputWithSeqPool:
260
+ all_hidden_states = ()
261
+
262
+ hidden_state = pixel_values + self.positional_emb
263
+ if output_hidden_states:
264
+ all_hidden_states = all_hidden_states + (hidden_state,)
265
+ hidden_state = self.dropout(hidden_state)
266
+
267
+ for blk in self.blocks:
268
+ hidden_state = blk(hidden_state)
269
+ if output_hidden_states:
270
+ all_hidden_states = all_hidden_states + (hidden_state,)
271
+
272
+ hidden_state_pre_pool = self.norm(hidden_state)
273
+ if output_hidden_states:
274
+ all_hidden_states = all_hidden_states[:-1] + (hidden_state_pre_pool,)
275
+
276
+ seq_pool_attn = F.softmax(self.attention_pool(hidden_state_pre_pool), dim=1)
277
+ hidden_state_post_pool = torch.matmul(seq_pool_attn.transpose(-1, -2), hidden_state_pre_pool).squeeze(-2)
278
+ seq_pool_attn = seq_pool_attn.squeeze()
279
+
280
+ if output_hidden_states:
281
+ all_hidden_states = all_hidden_states + (hidden_state_post_pool,)
282
+
283
+ if not return_dict:
284
+ if output_hidden_states:
285
+ return (hidden_state_pre_pool, hidden_state_post_pool, all_hidden_states)
286
+ else:
287
+ return (hidden_state_pre_pool, hidden_state_post_pool)
288
+
289
+ return BaseModelOutputWithSeqPool(
290
+ last_hidden_state=hidden_state_pre_pool,
291
+ hidden_state_post_pool=hidden_state_post_pool,
292
+ hidden_states=all_hidden_states if output_hidden_states else None,
293
+ )
294
+
295
+ @staticmethod
296
+ def learnable_embedding(sequence_length, embed_dim):
297
+ pe = torch.zeros(1, sequence_length, embed_dim)
298
+ return nn.init.trunc_normal_(pe, std=0.2)
299
+
300
+ @staticmethod
301
+ def sinusoidal_embedding(sequence_length, embed_dim):
302
+ pe = torch.FloatTensor(
303
+ [[p / (10000 ** (2 * (i // 2) / embed_dim)) for i in range(embed_dim)] for p in range(sequence_length)]
304
+ )
305
+ pe[:, 0::2] = torch.sin(pe[:, 0::2])
306
+ pe[:, 1::2] = torch.cos(pe[:, 1::2])
307
+ return pe.unsqueeze(0)
308
+
309
+
310
+ class CctPreTrainedModel(PreTrainedModel):
311
+ """
312
+ An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained
313
+ models.
314
+ """
315
+
316
+ config_class = CctConfig
317
+ base_model_prefix = "cct"
318
+ main_input_name = "pixel_values"
319
+
320
+ def _init_weights(self, module):
321
+ if isinstance(module, nn.ModuleList):
322
+ for module_child in module:
323
+ self._init_weights(module_child)
324
+ elif isinstance(module, nn.Module) and len(list(module.children())) > 0:
325
+ for module_child in module.children():
326
+ self._init_weights(module_child)
327
+ elif isinstance(module, nn.Linear):
328
+ nn.init.trunc_normal_(module.weight, std=0.02)
329
+ if module.bias is not None:
330
+ nn.init.constant_(module.bias, 0.0)
331
+ elif isinstance(module, nn.LayerNorm):
332
+ nn.init.constant_(module.bias, 0.0)
333
+ nn.init.constant_(module.weight, 1.0)
334
+ elif isinstance(module, nn.Conv2d):
335
+ nn.init.kaiming_normal_(module.weight)
336
+
337
+
338
+ class CctModel(CctPreTrainedModel):
339
+ def __init__(self, config, add_pooling_layer=True):
340
+ super().__init__(config)
341
+ self.config = config
342
+ self.embedder = CctConvEmbeddings(config)
343
+ self.encoder = CctEncoder(config, self.embedder.get_sequence_length())
344
+ self.post_init()
345
+
346
+ def forward(
347
+ self,
348
+ pixel_values: torch.Tensor,
349
+ output_hidden_states: Optional[bool] = None,
350
+ return_dict: Optional[bool] = None,
351
+ ) -> Union[Tuple, BaseModelOutputWithSeqPool]:
352
+ output_hidden_states = (
353
+ output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
354
+ )
355
+ return_dict = return_dict if return_dict is not None else self.config.use_return_dict
356
+
357
+ if pixel_values is None:
358
+ raise ValueError("You have to specify pixel_values (input image)")
359
+
360
+ embedder_outputs = self.embedder(pixel_values)
361
+ encoder_outputs = self.encoder(
362
+ embedder_outputs,
363
+ output_hidden_states=output_hidden_states,
364
+ return_dict=return_dict,
365
+ )
366
+
367
+ return encoder_outputs
368
+
369
+ class CctForImageClassification(CctPreTrainedModel):
370
+ def __init__(self, config):
371
+ super().__init__(config)
372
+
373
+ self.num_labels = config.num_labels
374
+ self.cct = CctModel(config, add_pooling_layer=False)
375
+ # Classifier head
376
+ self.classifier = nn.Linear(config.embed_dim, config.num_labels) if config.num_labels > 0 else nn.Identity()
377
+
378
+ # Initialize weights and apply final processing
379
+ self.post_init()
380
+
381
+ def forward(
382
+ self,
383
+ pixel_values: Optional[torch.Tensor] = None,
384
+ labels: Optional[torch.Tensor] = None,
385
+ output_hidden_states: Optional[bool] = None,
386
+ return_dict: Optional[bool] = None,
387
+ ) -> Union[Tuple, ImageClassifierOutputWithNoAttention]:
388
+ r"""
389
+ labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
390
+ Labels for computing the image classification/regression loss. Indices should be in `[0, ...,
391
+ config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If
392
+ `config.num_labels > 1` a classification loss is computed (Cross-Entropy).
393
+ """
394
+ return_dict = return_dict if return_dict is not None else self.config.use_return_dict
395
+ output_hidden_states = (
396
+ output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
397
+ )
398
+
399
+ outputs = self.cct(
400
+ pixel_values,
401
+ output_hidden_states=output_hidden_states,
402
+ return_dict=return_dict,
403
+ )
404
+
405
+ pooled_output = outputs.hidden_state_post_pool if return_dict else outputs[1]
406
+ logits = self.classifier(pooled_output)
407
+
408
+ loss = None
409
+ if labels is not None:
410
+ if self.config.problem_type is None:
411
+ if self.config.num_labels == 1:
412
+ self.config.problem_type = "regression"
413
+ elif self.config.num_labels > 1 and (labels.dtype == torch.long or labels.dtype == torch.int):
414
+ self.config.problem_type = "single_label_classification"
415
+ else:
416
+ self.config.problem_type = "multi_label_classification"
417
+
418
+ if self.config.problem_type == "regression":
419
+ loss_fct = MSELoss()
420
+ if self.config.num_labels == 1:
421
+ loss = loss_fct(logits.squeeze(), labels.squeeze())
422
+ else:
423
+ loss = loss_fct(logits, labels)
424
+ elif self.config.problem_type == "single_label_classification":
425
+ loss_fct = CrossEntropyLoss()
426
+ loss = loss_fct(logits.view(-1, self.config.num_labels), labels.view(-1))
427
+ elif self.config.problem_type == "multi_label_classification":
428
+ loss_fct = BCEWithLogitsLoss()
429
+ loss = loss_fct(logits, labels)
430
+
431
+ if not return_dict:
432
+ out = (logits, outputs[2]) if output_hidden_states else (logits,)
433
+ return (loss,) + out if loss is not None else out
434
+
435
+ return ImageClassifierOutputWithNoAttention(
436
+ loss=loss, logits=logits, hidden_states=outputs.hidden_states if output_hidden_states else None
437
+ )
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b3131ecbb1273a6d9eb03edb78a2f2d5a871c73e4e7c94bf70db98b870ff565b
3
+ size 90104217