mempal commited on
Commit
6cb12d0
1 Parent(s): 4371e24

Training in progress, step 2

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "facebook/wav2vec2-base",
3
  "activation_dropout": 0.0,
4
  "adapter_attn_dim": null,
5
  "adapter_kernel_size": 3,
@@ -9,12 +9,12 @@
9
  "architectures": [
10
  "Wav2Vec2ForCTC"
11
  ],
12
- "attention_dropout": 0.1,
13
  "bos_token_id": 1,
14
  "classifier_proj_size": 256,
15
- "codevector_dim": 256,
16
  "contrastive_logits_temperature": 0.1,
17
- "conv_bias": false,
18
  "conv_dim": [
19
  512,
20
  512,
@@ -45,50 +45,41 @@
45
  "ctc_loss_reduction": "mean",
46
  "ctc_zero_infinity": false,
47
  "diversity_loss_weight": 0.1,
48
- "do_stable_layer_norm": false,
49
  "eos_token_id": 2,
50
  "feat_extract_activation": "gelu",
51
- "feat_extract_norm": "group",
52
- "feat_proj_dropout": 0.1,
 
53
  "feat_quantizer_dropout": 0.0,
54
  "final_dropout": 0.0,
55
- "freeze_feat_extract_train": true,
56
  "hidden_act": "gelu",
57
- "hidden_dropout": 0.1,
58
- "hidden_size": 768,
59
  "initializer_range": 0.02,
60
- "intermediate_size": 3072,
61
  "layer_norm_eps": 1e-05,
62
  "layerdrop": 0.0,
63
- "mask_channel_length": 10,
64
- "mask_channel_min_space": 1,
65
- "mask_channel_other": 0.0,
66
- "mask_channel_prob": 0.0,
67
- "mask_channel_selection": "static",
68
  "mask_feature_length": 10,
69
  "mask_feature_min_masks": 0,
70
  "mask_feature_prob": 0.0,
71
  "mask_time_length": 10,
72
  "mask_time_min_masks": 2,
73
- "mask_time_min_space": 1,
74
- "mask_time_other": 0.0,
75
  "mask_time_prob": 0.05,
76
- "mask_time_selection": "static",
77
  "model_type": "wav2vec2",
78
- "no_mask_channel_overlap": false,
79
- "no_mask_time_overlap": false,
80
  "num_adapter_layers": 3,
81
- "num_attention_heads": 12,
82
  "num_codevector_groups": 2,
83
  "num_codevectors_per_group": 320,
84
  "num_conv_pos_embedding_groups": 16,
85
  "num_conv_pos_embeddings": 128,
86
  "num_feat_extract_layers": 7,
87
- "num_hidden_layers": 12,
88
  "num_negatives": 100,
89
- "output_hidden_size": 768,
90
- "pad_token_id": 0,
91
- "proj_codevector_dim": 256,
92
  "tdnn_dilation": [
93
  1,
94
  2,
@@ -113,6 +104,6 @@
113
  "torch_dtype": "float32",
114
  "transformers_version": "4.31.0",
115
  "use_weighted_layer_sum": false,
116
- "vocab_size": 32,
117
  "xvector_output_dim": 512
118
  }
 
1
  {
2
+ "_name_or_path": "classla/wav2vec2-xls-r-parlaspeech-hr",
3
  "activation_dropout": 0.0,
4
  "adapter_attn_dim": null,
5
  "adapter_kernel_size": 3,
 
9
  "architectures": [
10
  "Wav2Vec2ForCTC"
11
  ],
12
+ "attention_dropout": 0.0,
13
  "bos_token_id": 1,
14
  "classifier_proj_size": 256,
15
+ "codevector_dim": 768,
16
  "contrastive_logits_temperature": 0.1,
17
+ "conv_bias": true,
18
  "conv_dim": [
19
  512,
20
  512,
 
45
  "ctc_loss_reduction": "mean",
46
  "ctc_zero_infinity": false,
47
  "diversity_loss_weight": 0.1,
48
+ "do_stable_layer_norm": true,
49
  "eos_token_id": 2,
50
  "feat_extract_activation": "gelu",
51
+ "feat_extract_dropout": 0.0,
52
+ "feat_extract_norm": "layer",
53
+ "feat_proj_dropout": 0.0,
54
  "feat_quantizer_dropout": 0.0,
55
  "final_dropout": 0.0,
56
+ "gradient_checkpointing": false,
57
  "hidden_act": "gelu",
58
+ "hidden_dropout": 0.0,
59
+ "hidden_size": 1024,
60
  "initializer_range": 0.02,
61
+ "intermediate_size": 4096,
62
  "layer_norm_eps": 1e-05,
63
  "layerdrop": 0.0,
 
 
 
 
 
64
  "mask_feature_length": 10,
65
  "mask_feature_min_masks": 0,
66
  "mask_feature_prob": 0.0,
67
  "mask_time_length": 10,
68
  "mask_time_min_masks": 2,
 
 
69
  "mask_time_prob": 0.05,
 
70
  "model_type": "wav2vec2",
 
 
71
  "num_adapter_layers": 3,
72
+ "num_attention_heads": 16,
73
  "num_codevector_groups": 2,
74
  "num_codevectors_per_group": 320,
75
  "num_conv_pos_embedding_groups": 16,
76
  "num_conv_pos_embeddings": 128,
77
  "num_feat_extract_layers": 7,
78
+ "num_hidden_layers": 24,
79
  "num_negatives": 100,
80
+ "output_hidden_size": 1024,
81
+ "pad_token_id": 1,
82
+ "proj_codevector_dim": 768,
83
  "tdnn_dilation": [
84
  1,
85
  2,
 
104
  "torch_dtype": "float32",
105
  "transformers_version": "4.31.0",
106
  "use_weighted_layer_sum": false,
107
+ "vocab_size": 50,
108
  "xvector_output_dim": 512
109
  }
preprocessor_config.json CHANGED
@@ -5,6 +5,6 @@
5
  "padding_side": "right",
6
  "padding_value": 0.0,
7
  "processor_class": "Wav2Vec2Processor",
8
- "return_attention_mask": false,
9
  "sampling_rate": 16000
10
  }
 
5
  "padding_side": "right",
6
  "padding_value": 0.0,
7
  "processor_class": "Wav2Vec2Processor",
8
+ "return_attention_mask": true,
9
  "sampling_rate": 16000
10
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c63ed9577e7ec18b118ec0582a3a27ffdf656c3f16ec9b5795017e8cd25f6f45
3
- size 377655777
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4ba0070d0c62b2f6369c011b53da137b9cb9a054d5e5729da2b6a75de8c7b015
3
+ size 1262100973
runs/Aug16_22-57-24_ad9991e891e0/events.out.tfevents.1692226648.ad9991e891e0.188.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ff21914eb309e2df3b826e415188efe952d3b6ae70cd74c3f622e7c3aeda7ddc
3
+ size 6264
special_tokens_map.json CHANGED
@@ -1,6 +1,22 @@
1
  {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
  "bos_token": "<s>",
3
  "eos_token": "</s>",
4
- "pad_token": "<pad>",
5
- "unk_token": "<unk>"
6
  }
 
1
  {
2
+ "additional_special_tokens": [
3
+ {
4
+ "content": "<s>",
5
+ "lstrip": false,
6
+ "normalized": true,
7
+ "rstrip": false,
8
+ "single_word": false
9
+ },
10
+ {
11
+ "content": "</s>",
12
+ "lstrip": false,
13
+ "normalized": true,
14
+ "rstrip": false,
15
+ "single_word": false
16
+ }
17
+ ],
18
  "bos_token": "<s>",
19
  "eos_token": "</s>",
20
+ "pad_token": "[PAD]",
21
+ "unk_token": "[UNK]"
22
  }
tokenizer_config.json CHANGED
@@ -1,16 +1,16 @@
1
  {
2
  "bos_token": "<s>",
3
  "clean_up_tokenization_spaces": true,
4
- "do_lower_case": false,
5
  "do_normalize": true,
6
  "eos_token": "</s>",
7
  "model_max_length": 1000000000000000019884624838656,
8
- "pad_token": "<pad>",
9
  "processor_class": "Wav2Vec2Processor",
10
  "replace_word_delimiter_char": " ",
11
- "return_attention_mask": false,
12
  "target_lang": null,
13
  "tokenizer_class": "Wav2Vec2CTCTokenizer",
14
- "unk_token": "<unk>",
15
  "word_delimiter_token": "|"
16
  }
 
1
  {
2
  "bos_token": "<s>",
3
  "clean_up_tokenization_spaces": true,
4
+ "do_lower_case": true,
5
  "do_normalize": true,
6
  "eos_token": "</s>",
7
  "model_max_length": 1000000000000000019884624838656,
8
+ "pad_token": "[PAD]",
9
  "processor_class": "Wav2Vec2Processor",
10
  "replace_word_delimiter_char": " ",
11
+ "return_attention_mask": true,
12
  "target_lang": null,
13
  "tokenizer_class": "Wav2Vec2CTCTokenizer",
14
+ "unk_token": "[UNK]",
15
  "word_delimiter_token": "|"
16
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a7605dd591aaa7fb4ca67db3970dbdd0409cef778f0c2d89ff60642e9a9ab4fd
3
  size 3963
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:028821aa33b99b8f220817932a2fcda87dc1fc670e081a172ee10bdfecfc3401
3
  size 3963
vocab.json CHANGED
@@ -1,34 +1,50 @@
1
  {
2
- "'": 27,
3
- "</s>": 2,
4
- "<pad>": 0,
5
- "<s>": 1,
6
- "<unk>": 3,
7
- "A": 7,
8
- "B": 24,
9
- "C": 19,
10
- "D": 14,
11
- "E": 5,
12
- "F": 20,
13
- "G": 21,
14
- "H": 11,
15
- "I": 10,
16
- "J": 29,
17
- "K": 26,
18
- "L": 15,
19
- "M": 17,
20
- "N": 9,
21
- "O": 8,
22
- "P": 23,
23
- "Q": 30,
24
- "R": 13,
25
- "S": 12,
26
- "T": 6,
27
- "U": 16,
28
- "V": 25,
29
- "W": 18,
30
- "X": 28,
31
- "Y": 22,
32
- "Z": 31,
33
- "|": 4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
34
  }
 
1
  {
2
+ " ": 0,
3
+ ".": 47,
4
+ "0": 46,
5
+ "1": 37,
6
+ "2": 38,
7
+ "3": 39,
8
+ "4": 40,
9
+ "5": 41,
10
+ "6": 42,
11
+ "7": 43,
12
+ "8": 44,
13
+ "9": 45,
14
+ "[PAD]": 1,
15
+ "[UNK]": 2,
16
+ "a": 3,
17
+ "b": 4,
18
+ "c": 5,
19
+ "d": 6,
20
+ "e": 7,
21
+ "f": 8,
22
+ "g": 9,
23
+ "h": 10,
24
+ "i": 11,
25
+ "j": 12,
26
+ "k": 13,
27
+ "l": 14,
28
+ "m": 15,
29
+ "n": 16,
30
+ "o": 17,
31
+ "p": 18,
32
+ "q": 19,
33
+ "r": 20,
34
+ "s": 21,
35
+ "t": 22,
36
+ "u": 23,
37
+ "v": 24,
38
+ "w": 25,
39
+ "x": 26,
40
+ "y": 27,
41
+ "z": 28,
42
+ "ä": 29,
43
+ "ü": 30,
44
+ "ć": 31,
45
+ "č": 32,
46
+ "đ": 33,
47
+ "š": 34,
48
+ "ž": 35,
49
+ "ӧ": 36
50
  }