{ "architectures": ["AutoModelForCausalLM"], "vocab_size": 50000, "hidden_size": 4096, "num_attention_heads": 16, "num_hidden_layers": 12, "intermediate_size": 11008, "max_position_embeddings": 512, "attention_probs_dropout_prob": 0.1, "hidden_dropout_prob": 0.1, "initializer_range": 0.02, "layer_norm_eps": 1e-12, "pad_token_id": 0, "bos_token_id": 1, "eos_token_id": 2 }