{ "vocab_size": 32000, "hidden_size": 3200, "num_hidden_layers": 32, "num_attention_heads": 32, "intermediate_size": 12800, "max_position_embeddings": 1024, "type_vocab_size": 2, "hidden_dropout_prob": 0.1, "attention_probs_dropout_prob": 0.1, "initializer_range": 0.02, "layer_norm_eps": 1e-12, "use_cache": true, "bos_token_id": 0, "eos_token_id": 2 }