{ | |
"architectures": ["GPT2LMHeadModel"], | |
"attention_probs_dropout_prob": 0.1, | |
"hidden_dropout_prob": 0.1, | |
"num_attention_heads": 6, | |
"num_hidden_layers": 6, | |
"vocab_size": 50257, | |
"embd_pdrop": 0.1, | |
"max_position_embeddings": 768, | |
"n_positions": 768, | |
"n_ctx": 768, | |
"n_embd": 768, | |
"initializer_range": 0.02, | |
"layer_norm_eps": 1e-05, | |
"pad_token_id": 0, | |
"bos_token_id": 50256, | |
"eos_token_id": 50256 | |
} | |