andakm's picture
Training in progress, epoch 0
8cff575
raw
history blame
1.36 kB
{
"_name_or_path": "microsoft/swin-tiny-patch4-window7-224",
"architectures": [
"SwinForImageClassification"
],
"attention_probs_dropout_prob": 0.0,
"depths": [
2,
2,
6,
2
],
"drop_path_rate": 0.1,
"embed_dim": 96,
"encoder_stride": 32,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.0,
"hidden_size": 768,
"id2label": {
"0": "1-series",
"1": "3-series",
"2": "4-series",
"3": "5-series",
"4": "6-series",
"5": "7-series",
"6": "8-series",
"7": "m3",
"8": "m4",
"9": "m5"
},
"image_size": 224,
"initializer_range": 0.02,
"label2id": {
"1-series": 0,
"3-series": 1,
"4-series": 2,
"5-series": 3,
"6-series": 4,
"7-series": 5,
"8-series": 6,
"m3": 7,
"m4": 8,
"m5": 9
},
"layer_norm_eps": 1e-05,
"mlp_ratio": 4.0,
"model_type": "swin",
"num_channels": 3,
"num_heads": [
3,
6,
12,
24
],
"num_layers": 4,
"out_features": [
"stage4"
],
"out_indices": [
4
],
"patch_size": 4,
"path_norm": true,
"problem_type": "single_label_classification",
"qkv_bias": true,
"stage_names": [
"stem",
"stage1",
"stage2",
"stage3",
"stage4"
],
"torch_dtype": "float32",
"transformers_version": "4.35.2",
"use_absolute_embeddings": false,
"window_size": 7
}