mujadid-syahbana commited on
Commit
3b01fe2
1 Parent(s): aab9365

Training in progress, epoch 1

Browse files
config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
- "_name_or_path": "mujadid-syahbana/audioclass-alpha",
3
- "activation_dropout": 0.05,
4
  "adapter_attn_dim": null,
5
  "adapter_kernel_size": 3,
6
  "adapter_stride": 2,
@@ -14,7 +14,7 @@
14
  "classifier_proj_size": 256,
15
  "codevector_dim": 256,
16
  "contrastive_logits_temperature": 0.1,
17
- "conv_bias": true,
18
  "conv_dim": [
19
  512,
20
  512,
@@ -42,20 +42,22 @@
42
  2,
43
  2
44
  ],
45
- "ctc_loss_reduction": "mean",
46
- "ctc_zero_infinity": true,
47
  "diversity_loss_weight": 0.1,
48
- "do_stable_layer_norm": true,
49
  "eos_token_id": 2,
50
  "feat_extract_activation": "gelu",
51
  "feat_extract_dropout": 0.0,
52
- "feat_extract_norm": "layer",
53
- "feat_proj_dropout": 0.05,
54
  "feat_quantizer_dropout": 0.0,
55
- "final_dropout": 0.0,
 
56
  "hidden_act": "gelu",
57
- "hidden_dropout": 0.05,
58
- "hidden_size": 1024,
 
59
  "id2label": {
60
  "0": "qauf",
61
  "1": "faa",
@@ -90,7 +92,7 @@
90
  "9": "noon"
91
  },
92
  "initializer_range": 0.02,
93
- "intermediate_size": 4096,
94
  "label2id": {
95
  "aain": "7",
96
  "alif": "24",
@@ -125,32 +127,24 @@
125
  "zua": "14"
126
  },
127
  "layer_norm_eps": 1e-05,
128
- "layerdrop": 0.05,
129
- "mask_channel_length": 10,
130
- "mask_channel_min_space": 1,
131
- "mask_channel_other": 0.0,
132
- "mask_channel_prob": 0.0,
133
- "mask_channel_selection": "static",
134
  "mask_feature_length": 10,
135
  "mask_feature_min_masks": 0,
136
  "mask_feature_prob": 0.0,
137
  "mask_time_length": 10,
138
  "mask_time_min_masks": 2,
139
- "mask_time_min_space": 1,
140
- "mask_time_other": 0.0,
141
  "mask_time_prob": 0.05,
142
- "mask_time_selection": "static",
143
  "model_type": "wav2vec2",
144
  "num_adapter_layers": 3,
145
- "num_attention_heads": 16,
146
  "num_codevector_groups": 2,
147
  "num_codevectors_per_group": 320,
148
  "num_conv_pos_embedding_groups": 16,
149
  "num_conv_pos_embeddings": 128,
150
  "num_feat_extract_layers": 7,
151
- "num_hidden_layers": 24,
152
  "num_negatives": 100,
153
- "output_hidden_size": 1024,
154
  "pad_token_id": 0,
155
  "proj_codevector_dim": 256,
156
  "tdnn_dilation": [
@@ -177,6 +171,6 @@
177
  "torch_dtype": "float32",
178
  "transformers_version": "4.36.0.dev0",
179
  "use_weighted_layer_sum": false,
180
- "vocab_size": 51,
181
  "xvector_output_dim": 512
182
  }
 
1
  {
2
+ "_name_or_path": "facebook/wav2vec2-base-960h",
3
+ "activation_dropout": 0.1,
4
  "adapter_attn_dim": null,
5
  "adapter_kernel_size": 3,
6
  "adapter_stride": 2,
 
14
  "classifier_proj_size": 256,
15
  "codevector_dim": 256,
16
  "contrastive_logits_temperature": 0.1,
17
+ "conv_bias": false,
18
  "conv_dim": [
19
  512,
20
  512,
 
42
  2,
43
  2
44
  ],
45
+ "ctc_loss_reduction": "sum",
46
+ "ctc_zero_infinity": false,
47
  "diversity_loss_weight": 0.1,
48
+ "do_stable_layer_norm": false,
49
  "eos_token_id": 2,
50
  "feat_extract_activation": "gelu",
51
  "feat_extract_dropout": 0.0,
52
+ "feat_extract_norm": "group",
53
+ "feat_proj_dropout": 0.1,
54
  "feat_quantizer_dropout": 0.0,
55
+ "final_dropout": 0.1,
56
+ "gradient_checkpointing": false,
57
  "hidden_act": "gelu",
58
+ "hidden_dropout": 0.1,
59
+ "hidden_dropout_prob": 0.1,
60
+ "hidden_size": 768,
61
  "id2label": {
62
  "0": "qauf",
63
  "1": "faa",
 
92
  "9": "noon"
93
  },
94
  "initializer_range": 0.02,
95
+ "intermediate_size": 3072,
96
  "label2id": {
97
  "aain": "7",
98
  "alif": "24",
 
127
  "zua": "14"
128
  },
129
  "layer_norm_eps": 1e-05,
130
+ "layerdrop": 0.1,
 
 
 
 
 
131
  "mask_feature_length": 10,
132
  "mask_feature_min_masks": 0,
133
  "mask_feature_prob": 0.0,
134
  "mask_time_length": 10,
135
  "mask_time_min_masks": 2,
 
 
136
  "mask_time_prob": 0.05,
 
137
  "model_type": "wav2vec2",
138
  "num_adapter_layers": 3,
139
+ "num_attention_heads": 12,
140
  "num_codevector_groups": 2,
141
  "num_codevectors_per_group": 320,
142
  "num_conv_pos_embedding_groups": 16,
143
  "num_conv_pos_embeddings": 128,
144
  "num_feat_extract_layers": 7,
145
+ "num_hidden_layers": 12,
146
  "num_negatives": 100,
147
+ "output_hidden_size": 768,
148
  "pad_token_id": 0,
149
  "proj_codevector_dim": 256,
150
  "tdnn_dilation": [
 
171
  "torch_dtype": "float32",
172
  "transformers_version": "4.36.0.dev0",
173
  "use_weighted_layer_sum": false,
174
+ "vocab_size": 32,
175
  "xvector_output_dim": 512
176
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:153582f5bcf4420fd58a752c37aed3d8973a65613702921fb660e7ba9d4e1036
3
- size 1262889124
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9582085d194f3335a3f8c8285ee1dddfac23fc0bf310c055a67a265772c17749
3
+ size 378332188
preprocessor_config.json CHANGED
@@ -4,6 +4,6 @@
4
  "feature_size": 1,
5
  "padding_side": "right",
6
  "padding_value": 0.0,
7
- "return_attention_mask": true,
8
  "sampling_rate": 16000
9
  }
 
4
  "feature_size": 1,
5
  "padding_side": "right",
6
  "padding_value": 0.0,
7
+ "return_attention_mask": false,
8
  "sampling_rate": 16000
9
  }
runs/Nov04_05-56-15_886e887cef14/events.out.tfevents.1699077377.886e887cef14.366.1 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7db4f9c0abcd5be99e1c57ed01aa2b62a8bc600f19eff64e92b920b100479a70
3
- size 26409
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7c984ffdd5bbfe9ec4b010d757c84f26372c6ca8898cf7f23a786b80daada46c
3
+ size 26723
runs/Nov04_07-42-35_886e887cef14/events.out.tfevents.1699083769.886e887cef14.44187.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7bf901901973da1d41e10896a151aba52a91cce98cbd8b71e799168a381b5376
3
+ size 8012
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c8a40ed8d5641e1b86838d5865e21aabad97a10776dc70c9f5760b11cb601786
3
  size 4600
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:46c9e813ce396f56192f5fb570ea2c6d510c9c7b1965746eb9493fa51b1e2987
3
  size 4600