File size: 168 Bytes
1f105ae |
1 2 3 4 5 6 7 8 9 |
{
"hidden_size": 4096,
"num_attention_heads": 32,
"num_hidden_layers": 31,
"intermediate_size": 14336,
"model_type": "llama",
"vocab_size": 32000
}
|
1f105ae |
1 2 3 4 5 6 7 8 9 |
{
"hidden_size": 4096,
"num_attention_heads": 32,
"num_hidden_layers": 31,
"intermediate_size": 14336,
"model_type": "llama",
"vocab_size": 32000
}
|