Tobius commited on
Commit
4344ca5
1 Parent(s): 63daca3

Training in progress, step 100

Browse files
config.json CHANGED
@@ -25,20 +25,7 @@
25
  "encoder_layerdrop": 0.0,
26
  "encoder_layers": 12,
27
  "eos_token_id": 50257,
28
- "forced_decoder_ids": [
29
- [
30
- 1,
31
- 50259
32
- ],
33
- [
34
- 2,
35
- 50359
36
- ],
37
- [
38
- 3,
39
- 50363
40
- ]
41
- ],
42
  "init_std": 0.02,
43
  "is_encoder_decoder": true,
44
  "mask_feature_length": 10,
@@ -56,97 +43,10 @@
56
  "num_mel_bins": 80,
57
  "pad_token_id": 50257,
58
  "scale_embedding": false,
59
- "suppress_tokens": [
60
- 1,
61
- 2,
62
- 7,
63
- 8,
64
- 9,
65
- 10,
66
- 14,
67
- 25,
68
- 26,
69
- 27,
70
- 28,
71
- 29,
72
- 31,
73
- 58,
74
- 59,
75
- 60,
76
- 61,
77
- 62,
78
- 63,
79
- 90,
80
- 91,
81
- 92,
82
- 93,
83
- 359,
84
- 503,
85
- 522,
86
- 542,
87
- 873,
88
- 893,
89
- 902,
90
- 918,
91
- 922,
92
- 931,
93
- 1350,
94
- 1853,
95
- 1982,
96
- 2460,
97
- 2627,
98
- 3246,
99
- 3253,
100
- 3268,
101
- 3536,
102
- 3846,
103
- 3961,
104
- 4183,
105
- 4667,
106
- 6585,
107
- 6647,
108
- 7273,
109
- 9061,
110
- 9383,
111
- 10428,
112
- 10929,
113
- 11938,
114
- 12033,
115
- 12331,
116
- 12562,
117
- 13793,
118
- 14157,
119
- 14635,
120
- 15265,
121
- 15618,
122
- 16553,
123
- 16604,
124
- 18362,
125
- 18956,
126
- 20075,
127
- 21675,
128
- 22520,
129
- 26130,
130
- 26161,
131
- 26435,
132
- 28279,
133
- 29464,
134
- 31650,
135
- 32302,
136
- 32470,
137
- 36865,
138
- 42863,
139
- 47425,
140
- 49870,
141
- 50254,
142
- 50258,
143
- 50360,
144
- 50361,
145
- 50362
146
- ],
147
  "torch_dtype": "float32",
148
  "transformers_version": "4.37.0.dev0",
149
- "use_cache": true,
150
  "use_weighted_layer_sum": false,
151
  "vocab_size": 51865
152
  }
 
25
  "encoder_layerdrop": 0.0,
26
  "encoder_layers": 12,
27
  "eos_token_id": 50257,
28
+ "forced_decoder_ids": null,
 
 
 
 
 
 
 
 
 
 
 
 
 
29
  "init_std": 0.02,
30
  "is_encoder_decoder": true,
31
  "mask_feature_length": 10,
 
43
  "num_mel_bins": 80,
44
  "pad_token_id": 50257,
45
  "scale_embedding": false,
46
+ "suppress_tokens": [],
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
47
  "torch_dtype": "float32",
48
  "transformers_version": "4.37.0.dev0",
49
+ "use_cache": false,
50
  "use_weighted_layer_sum": false,
51
  "vocab_size": 51865
52
  }
runs/Jan06_15-49-04_f93f90b13d4f/events.out.tfevents.1704556150.f93f90b13d4f.3637.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:92e88e26f132971c825fe5b7a67f447519b1449418001214042104a5a31d252a
3
+ size 5636
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f959f55be0b6aafc33b2849288d01d7095add434affa05bc98df11e5e879fc0d
3
  size 4856
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1d510fa46617df0577bb56d4f6456112b8fc4d09a7378dab63da5505e9f8d6f0
3
  size 4856