PreCOMET-avg / hparams.yaml
zouharvi's picture
init commit
36d266c
raw
history blame contribute delete
583 Bytes
nr_frozen_epochs: 0.3
keep_embeddings_frozen: true
optimizer: AdamW
warmup_steps: 0
encoder_learning_rate: 1.0e-06
learning_rate: 1.5e-05
layerwise_decay: 0.95
encoder_model: XLM-RoBERTa
pretrained_model: xlm-roberta-large
pool: avg
layer: mix
layer_transformation: sparsemax
layer_norm: false
loss: mse
dropout: 0.1
batch_size: 16
train_data:
- data/csv/train_avg.csv
validation_data:
- data/csv/dev_avg.csv
class_identifier: hypothesisless_regression_metric
load_pretrained_weights: true
local_files_only: false
hidden_sizes:
- 2048
- 1024
activations: Tanh
final_activation: null