from transformers import PretrainedConfig, HubertConfig | |
class EmotionClassifierConfig(PretrainedConfig): | |
model_type = "hubert" | |
def __init__( | |
self, | |
hidden_size_lstm=128, | |
num_classes=6, | |
hubert_config="None", | |
**kwargs, | |
): | |
super().__init__(**kwargs) | |
self.hidden_size_lstm = hidden_size_lstm | |
self.num_classes = num_classes | |
if hubert_config == "None": | |
# Set default HuBERT configuration | |
self.hubert_config = HubertConfig( | |
#num_attention_heads=16, | |
#hidden_size=1024, | |
#num_hidden_layers=24, | |
) | |
else: | |
self.hubert_config = hubert_config | |