legacy107's picture
Upload T5ForConditionalGeneration
30bf967
raw
history blame
1.61 kB
{
"config": {
"architecture": "union",
"configs": [
{
"alpha": 1,
"architecture": "lora",
"attn_matrices": [
"q",
"v"
],
"composition_mode": "scale",
"dropout": 0.0,
"init_weights": "ia3",
"intermediate_lora": true,
"output_lora": false,
"r": 1,
"selfattn_lora": true,
"use_gating": false
},
{
"adapter_residual_before_ln": false,
"cross_adapter": false,
"factorized_phm_W": true,
"factorized_phm_rule": false,
"hypercomplex_nonlinearity": "glorot-uniform",
"init_weights": "bert",
"inv_adapter": null,
"inv_adapter_reduction_factor": null,
"is_parallel": false,
"learn_phm": true,
"leave_out": [],
"ln_after": false,
"ln_before": false,
"mh_adapter": true,
"non_linearity": "swish",
"original_ln_after": true,
"original_ln_before": false,
"output_adapter": true,
"phm_bias": true,
"phm_c_init": "normal",
"phm_dim": 4,
"phm_init_range": 0.0001,
"phm_layer": false,
"phm_rank": 1,
"reduction_factor": 8,
"residual_before_ln": true,
"scaling": 1.0,
"shared_W_phm": false,
"shared_phm_rule": true,
"use_gating": false
}
]
},
"hidden_size": 1024,
"model_class": "T5ForConditionalGeneration",
"model_name": "google/flan-t5-large",
"model_type": "t5",
"name": "question_answering_union",
"version": "3.2.1"
}