File size: 778 Bytes
981cfb7 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 |
from transformers import PretrainedConfig, AutoConfig
class BERTMultiGATAttentionConfig(PretrainedConfig):
model_type = "deberta_semantic_similarity"
def __init__(
self,
transformer_model='microsoft/deberta-v3-base',
hidden_size=768,
num_heads=8,
dropout=0.07,
gnn_input_dim=768,
gnn_hidden_dim=768,
**kwargs
):
super().__init__(**kwargs)
self.transformer_model = transformer_model
self.hidden_size = hidden_size
self.num_heads = num_heads
self.dropout = dropout
self.gnn_input_dim = gnn_input_dim
self.gnn_hidden_dim = gnn_hidden_dim
AutoConfig.register("deberta_semantic_similarity", BERTMultiGATAttentionConfig)
|