{ | |
"attention_probs_dropout_prob": 0.1, | |
"hidden_dropout_prob": 0.1, | |
"hidden_size": 768, | |
"intermediate_size": 2560, | |
"max_position_embeddings": 512, | |
"position_bucket_size": 32, | |
"num_attention_heads": 12, | |
"num_hidden_layers": 12, | |
"vocab_size": 16384, | |
"layer_norm_eps": 1e-05, | |
"force_causal_mask": true, | |
"classifier_dropout": 0.1, | |
"classifier_layer_norm_eps": 1e-05, | |
"num_labels": 2 | |
} |