File size: 486 Bytes
981cfb7 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 |
{
"architectures": [
"DEBERTAMultiGATAttentionModel"
],
"auto_map": {
"AutoConfig": "config.BERTMultiGATAttentionConfig",
"AutoModel": "model.DEBERTAMultiGATAttentionModel"
},
"dropout": 0.07,
"gnn_hidden_dim": 768,
"gnn_input_dim": 768,
"hidden_size": 768,
"model_type": "deberta_semantic_similarity",
"num_heads": 8,
"torch_dtype": "float32",
"transformer_model": "microsoft/deberta-v3-base",
"transformers_version": "4.37.2"
}
|