georgian_comet / hparams.yaml
Darsala's picture
updates model
ec922d9 verified
activations: Tanh
batch_size: 8
class_identifier: regression_metric
dropout: 0.1
encoder_learning_rate: 1.5e-05
encoder_model: XLM-RoBERTa
final_activation: null
hidden_sizes:
- 3072
- 1024
keep_embeddings_frozen: false
layer: mix
layer_norm: true
layer_transformation: softmax
layerwise_decay: 0.95
learning_rate: 1.5e-05
load_pretrained_weights: true
local_files_only: false
loss: mse
nr_frozen_epochs: 0.3
optimizer: AdamW
pool: avg
pretrained_model: xlm-roberta-large
train_data:
- train.csv
validation_data:
- test.csv
warmup_steps: 0