seed: 42 train: batch_size: 32 epochs: 20 hidden_size: 64 dropout_prob: 0.13 learning_rate: 0.0075 l2_reg_lambda: 0.001 optimizer: Adam loss: CrossEntropyLoss tune: n_trials: 50 hidden_size_choices: [32,64,128,256,512] dropout_prob_range: [0.1, 0.5] learning_rate_range: [1e-5, 1e-2]