davidhajdu commited on
Commit
665213f
·
verified ·
1 Parent(s): 261dfc6

Upload 3 files

Browse files
Files changed (1) hide show
  1. config.json +3 -6
config.json CHANGED
@@ -26,12 +26,9 @@
26
  "decoder_attention_heads": 8,
27
  "decoder_bbox_embed_diff_each_layer": false,
28
  "decoder_ffn_dim": 2048,
29
- "decoder_keep_query_pos": false,
30
  "decoder_layerdrop": 0.0,
 
31
  "decoder_modulate_hw_attn": true,
32
- "decoder_nhead": 8,
33
- "decoder_normalize_before": false,
34
- "decoder_num_patterns": 0,
35
  "decoder_query_dim": 4,
36
  "dice_loss_coefficient": 1,
37
  "dilation": false,
@@ -43,7 +40,6 @@
43
  "focal_alpha": 0.25,
44
  "giou_cost": 2,
45
  "giou_loss_coefficient": 2,
46
- "hidden_dim": 256,
47
  "id2label": {
48
  "0": "N/A",
49
  "1": "person",
@@ -141,6 +137,7 @@
141
  "init_xavier_std": 1.0,
142
  "is_encoder_decoder": true,
143
  "iter_update": true,
 
144
  "label2id": {
145
  "N/A": 83,
146
  "airplane": 5,
@@ -228,8 +225,8 @@
228
  "model_type": "dab-detr",
229
  "normalize_before": false,
230
  "num_channels": 3,
231
- "num_decoder_layers": 6,
232
  "num_hidden_layers": 6,
 
233
  "num_queries": 300,
234
  "num_target_classes": 91,
235
  "position_embedding_type": "sine",
 
26
  "decoder_attention_heads": 8,
27
  "decoder_bbox_embed_diff_each_layer": false,
28
  "decoder_ffn_dim": 2048,
 
29
  "decoder_layerdrop": 0.0,
30
+ "decoder_layers": 6,
31
  "decoder_modulate_hw_attn": true,
 
 
 
32
  "decoder_query_dim": 4,
33
  "dice_loss_coefficient": 1,
34
  "dilation": false,
 
40
  "focal_alpha": 0.25,
41
  "giou_cost": 2,
42
  "giou_loss_coefficient": 2,
 
43
  "id2label": {
44
  "0": "N/A",
45
  "1": "person",
 
137
  "init_xavier_std": 1.0,
138
  "is_encoder_decoder": true,
139
  "iter_update": true,
140
+ "keep_query_pos": false,
141
  "label2id": {
142
  "N/A": 83,
143
  "airplane": 5,
 
225
  "model_type": "dab-detr",
226
  "normalize_before": false,
227
  "num_channels": 3,
 
228
  "num_hidden_layers": 6,
229
+ "num_patterns": 0,
230
  "num_queries": 300,
231
  "num_target_classes": 91,
232
  "position_embedding_type": "sine",