sanjitaa commited on
Commit
b60c8c8
1 Parent(s): 42cb203

add config.json

Browse files
Files changed (1) hide show
  1. config.json +139 -0
config.json CHANGED
@@ -0,0 +1,139 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "sanjitaa/whisper_model",
3
+ "activation_dropout": 0.0,
4
+ "activation_function": "gelu",
5
+ "architectures": [
6
+ "WhisperForConditionalGeneration"
7
+ ],
8
+ "attention_dropout": 0.0,
9
+ "begin_suppress_tokens": [
10
+ 220,
11
+ 50256
12
+ ],
13
+ "bos_token_id": 50257,
14
+ "d_model": 1024,
15
+ "decoder_attention_heads": 16,
16
+ "decoder_ffn_dim": 4096,
17
+ "decoder_layerdrop": 0.0,
18
+ "decoder_layers": 24,
19
+ "decoder_start_token_id": 50257,
20
+ "dropout": 0.0,
21
+ "encoder_attention_heads": 16,
22
+ "encoder_ffn_dim": 4096,
23
+ "encoder_layerdrop": 0.0,
24
+ "encoder_layers": 24,
25
+ "eos_token_id": 50256,
26
+ "forced_decoder_ids": [
27
+ [
28
+ 1,
29
+ 50362
30
+ ]
31
+ ],
32
+ "init_std": 0.02,
33
+ "is_encoder_decoder": true,
34
+ "max_length": 448,
35
+ "max_source_positions": 1500,
36
+ "max_target_positions": 448,
37
+ "model_type": "whisper",
38
+ "num_hidden_layers": 24,
39
+ "num_mel_bins": 80,
40
+ "pad_token_id": 50256,
41
+ "scale_embedding": false,
42
+ "suppress_tokens": [
43
+ 1,
44
+ 2,
45
+ 7,
46
+ 8,
47
+ 9,
48
+ 10,
49
+ 14,
50
+ 25,
51
+ 26,
52
+ 27,
53
+ 28,
54
+ 29,
55
+ 31,
56
+ 58,
57
+ 59,
58
+ 60,
59
+ 61,
60
+ 62,
61
+ 63,
62
+ 90,
63
+ 91,
64
+ 92,
65
+ 93,
66
+ 357,
67
+ 366,
68
+ 438,
69
+ 532,
70
+ 685,
71
+ 705,
72
+ 796,
73
+ 930,
74
+ 1058,
75
+ 1220,
76
+ 1267,
77
+ 1279,
78
+ 1303,
79
+ 1343,
80
+ 1377,
81
+ 1391,
82
+ 1635,
83
+ 1782,
84
+ 1875,
85
+ 2162,
86
+ 2361,
87
+ 2488,
88
+ 3467,
89
+ 4008,
90
+ 4211,
91
+ 4600,
92
+ 4808,
93
+ 5299,
94
+ 5855,
95
+ 6329,
96
+ 7203,
97
+ 9609,
98
+ 9959,
99
+ 10563,
100
+ 10786,
101
+ 11420,
102
+ 11709,
103
+ 11907,
104
+ 13163,
105
+ 13697,
106
+ 13700,
107
+ 14808,
108
+ 15306,
109
+ 16410,
110
+ 16791,
111
+ 17992,
112
+ 19203,
113
+ 19510,
114
+ 20724,
115
+ 22305,
116
+ 22935,
117
+ 27007,
118
+ 30109,
119
+ 30420,
120
+ 33409,
121
+ 34949,
122
+ 40283,
123
+ 40493,
124
+ 40549,
125
+ 47282,
126
+ 49146,
127
+ 50257,
128
+ 50357,
129
+ 50358,
130
+ 50359,
131
+ 50360,
132
+ 50361
133
+ ],
134
+ "torch_dtype": "float32",
135
+ "transformers_version": "4.27.0.dev0",
136
+ "use_cache": true,
137
+ "vocab_size": 51864
138
+ }
139
+