Update config.json
Browse files- config.json +10 -0
config.json
CHANGED
@@ -17,6 +17,8 @@
|
|
17 |
"num_decoder_layers": 8,
|
18 |
"num_heads": 6,
|
19 |
"num_layers": 8,
|
|
|
|
|
20 |
"pad_token_id": 0,
|
21 |
"relative_attention_max_distance": 128,
|
22 |
"relative_attention_num_buckets": 32,
|
@@ -24,6 +26,14 @@
|
|
24 |
"tokenizer_class": "T5Tokenizer",
|
25 |
"torch_dtype": "float32",
|
26 |
"transformers_version": "4.19.2",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
27 |
"use_cache": true,
|
28 |
"vocab_size": 250112
|
29 |
}
|
|
|
17 |
"num_decoder_layers": 8,
|
18 |
"num_heads": 6,
|
19 |
"num_layers": 8,
|
20 |
+
"output_past": true,
|
21 |
+
"max_length":512,
|
22 |
"pad_token_id": 0,
|
23 |
"relative_attention_max_distance": 128,
|
24 |
"relative_attention_num_buckets": 32,
|
|
|
26 |
"tokenizer_class": "T5Tokenizer",
|
27 |
"torch_dtype": "float32",
|
28 |
"transformers_version": "4.19.2",
|
29 |
+
"task_specific_params": {
|
30 |
+
"text-generation": {
|
31 |
+
"max_length": 512
|
32 |
+
},
|
33 |
+
"translation": {
|
34 |
+
"max_length": 512
|
35 |
+
}
|
36 |
+
},
|
37 |
"use_cache": true,
|
38 |
"vocab_size": 250112
|
39 |
}
|