|
{ |
|
"architectures": [ |
|
"Lfm2MoeForCausalLM" |
|
], |
|
"auto_map": { |
|
"AutoConfig": "configuration_lfm2_moe.Lfm2MoeConfig", |
|
"AutoModelForCausalLM": "modeling_lfm2_moe.Lfm2MoeForCausalLM" |
|
}, |
|
"bos_token_id": 1, |
|
"conv_L_cache": 3, |
|
"conv_bias": false, |
|
"dtype": "bfloat16", |
|
"eos_token_id": 7, |
|
"hidden_size": 2048, |
|
"intermediate_size": 7168, |
|
"layer_types": [ |
|
"conv", |
|
"conv", |
|
"full_attention", |
|
"conv", |
|
"conv", |
|
"conv", |
|
"full_attention", |
|
"conv", |
|
"conv", |
|
"conv", |
|
"full_attention", |
|
"conv", |
|
"conv", |
|
"conv", |
|
"full_attention", |
|
"conv", |
|
"conv", |
|
"conv", |
|
"full_attention", |
|
"conv", |
|
"conv", |
|
"full_attention", |
|
"conv", |
|
"conv" |
|
], |
|
"max_position_embeddings": 128000, |
|
"model_type": "lfm2_moe", |
|
"moe_intermediate_size": 1792, |
|
"norm_eps": 1e-05, |
|
"norm_topk_prob": true, |
|
"num_attention_heads": 32, |
|
"num_dense_layers": 2, |
|
"num_experts": 32, |
|
"num_experts_per_tok": 4, |
|
"num_hidden_layers": 24, |
|
"num_key_value_heads": 8, |
|
"pad_token_id": 0, |
|
"quantization": { |
|
"group_size": 64, |
|
"bits": 8, |
|
"mode": "affine", |
|
"model.layers.2.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.3.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.4.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.5.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.6.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.7.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.8.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.9.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.10.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.11.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.12.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.13.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.14.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.15.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.16.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.17.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.18.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.19.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.20.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.21.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.22.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.23.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
} |
|
}, |
|
"quantization_config": { |
|
"group_size": 64, |
|
"bits": 8, |
|
"mode": "affine", |
|
"model.layers.2.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.3.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.4.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.5.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.6.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.7.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.8.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.9.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.10.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.11.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.12.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.13.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.14.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.15.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.16.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.17.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.18.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.19.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.20.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.21.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.22.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
}, |
|
"model.layers.23.feed_forward.gate": { |
|
"group_size": 64, |
|
"bits": 8 |
|
} |
|
}, |
|
"rope_theta": 1000000.0, |
|
"routed_scaling_factor": 1.0, |
|
"transformers_version": "4.56.1", |
|
"use_cache": true, |
|
"use_expert_bias": true, |
|
"vocab_size": 65536 |
|
} |