yujiepan's picture
Update config.json
2116e89 verified
raw
history blame contribute delete
354 Bytes
{
"d_model": 4096,
"d_intermediate": 0,
"n_layer": 64,
"vocab_size": 32768,
"ssm_cfg": {
"layer": "Mamba2",
"ngroups": 8
},
"attn_layer_idx": [],
"attn_cfg": {},
"rms_norm": true,
"residual_in_fp32": true,
"fused_add_norm": true,
"pad_vocab_size_multiple": 16,
"tie_embeddings": false
}