michaelfeil's picture
Add files using upload-large-folder tool
02a291d verified
{
"producer": {
"name": "modelopt",
"version": "0.25.0"
},
"architecture": "Qwen2MoeForCausalLM",
"dtype": "bfloat16",
"logits_dtype": "float16",
"num_hidden_layers": 24,
"num_attention_heads": 16,
"num_key_value_heads": 16,
"hidden_size": 2048,
"norm_epsilon": 1e-06,
"vocab_size": 151936,
"max_position_embeddings": 8192,
"hidden_act": "swiglu",
"use_parallel_embedding": true,
"embedding_sharding_dim": 0,
"head_size": 128,
"intermediate_size": 5632,
"position_embedding_type": "rope_gpt_neox",
"share_embedding_table": false,
"residual_mlp": false,
"bias": false,
"rotary_pct": 1.0,
"rank": 1,
"decoder": "qwen",
"rmsnorm": true,
"lm_head_bias": false,
"mlp_bias": false,
"attn_bias": true,
"rotary_base": 1000000.0,
"rotary_scaling": null,
"disable_weight_only_quant_plugin": false,
"num_labels": 1,
"use_logn_attn": false,
"moe": {
"num_experts": 60,
"shared_expert_intermediate_size": 0,
"top_k": 4,
"normalization_mode": 0,
"sparse_mixer_epsilon": 0.01,
"tp_mode": 0,
"device_limited_n_group": 0,
"device_limited_topk_group": 0,
"device_limited_routed_scaling_factor": 1.0
},
"runtime_defaults": null,
"mapping": {
"world_size": 2,
"gpus_per_node": 8,
"cp_size": 1,
"tp_size": 2,
"pp_size": 1,
"moe_tp_size": 2,
"moe_ep_size": 1,
"auto_parallel": false
},
"quantization": {
"quant_algo": "FP8",
"kv_cache_quant_algo": null,
"group_size": 128,
"smoothquant_val": 0.5,
"clamp_val": null,
"use_meta_recipe": false,
"has_zero_point": false,
"pre_quant_scale": false,
"exclude_modules": [
"transformer.layers.7.mlp.shared_expert_gate",
"transformer.layers.20.mlp.router",
"lm_head",
"transformer.layers.18.post_layernorm",
"transformer.layers.17.mlp.shared_expert_gate",
"transformer.layers.6.post_layernorm",
"transformer.layers.14.mlp.router",
"transformer.layers.23.post_layernorm",
"transformer.layers.22.mlp.router",
"transformer.layers.7.input_layernorm",
"transformer.layers.9.mlp.shared_expert_gate",
"transformer.layers.8.mlp.router",
"transformer.layers.11.mlp.router",
"transformer.layers.19.mlp.router",
"transformer.layers.13.mlp.router",
"transformer.layers.3.mlp.shared_expert_gate",
"transformer.layers.14.post_layernorm",
"transformer.layers.10.mlp.shared_expert_gate",
"transformer.layers.6.input_layernorm",
"transformer.layers.1.mlp.router",
"transformer.layers.21.mlp.router",
"transformer.layers.20.mlp.shared_expert_gate",
"transformer.layers.13.input_layernorm",
"transformer.layers.1.mlp.shared_expert_gate",
"transformer.layers.9.input_layernorm",
"transformer.layers.18.mlp.router",
"transformer.layers.18.mlp.shared_expert_gate",
"transformer.layers.21.input_layernorm",
"transformer.layers.15.post_layernorm",
"transformer.layers.22.mlp.shared_expert_gate",
"transformer.layers.4.post_layernorm",
"transformer.layers.15.input_layernorm",
"transformer.layers.13.post_layernorm",
"transformer.layers.4.mlp.router",
"transformer.layers.23.mlp.router",
"transformer.layers.3.mlp.router",
"transformer.layers.2.mlp.router",
"transformer.layers.21.post_layernorm",
"transformer.layers.9.post_layernorm",
"transformer.layers.5.mlp.shared_expert_gate",
"transformer.layers.1.post_layernorm",
"transformer.layers.23.input_layernorm",
"transformer.layers.7.mlp.router",
"transformer.layers.20.input_layernorm",
"transformer.layers.13.mlp.shared_expert_gate",
"transformer.layers.15.mlp.router",
"transformer.layers.7.post_layernorm",
"transformer.layers.22.input_layernorm",
"transformer.layers.2.mlp.shared_expert_gate",
"transformer.layers.17.mlp.router",
"transformer.layers.19.post_layernorm",
"transformer.layers.4.mlp.shared_expert_gate",
"transformer.layers.6.mlp.shared_expert_gate",
"transformer.layers.9.mlp.router",
"transformer.layers.16.input_layernorm",
"transformer.layers.0.input_layernorm",
"transformer.layers.11.mlp.shared_expert_gate",
"transformer.layers.12.mlp.shared_expert_gate",
"transformer.layers.16.mlp.shared_expert_gate",
"transformer.layers.16.post_layernorm",
"transformer.layers.19.input_layernorm",
"transformer.layers.14.mlp.shared_expert_gate",
"transformer.layers.10.post_layernorm",
"transformer.vocab_embedding",
"transformer.layers.8.post_layernorm",
"transformer.layers.10.input_layernorm",
"transformer.layers.2.input_layernorm",
"transformer.layers.14.input_layernorm",
"transformer.layers.5.input_layernorm",
"transformer.layers.18.input_layernorm",
"transformer.layers.3.post_layernorm",
"transformer.layers.5.post_layernorm",
"transformer.layers.11.input_layernorm",
"transformer.layers.12.input_layernorm",
"transformer.layers.23.mlp.shared_expert_gate",
"transformer.layers.16.mlp.router",
"transformer.layers.15.mlp.shared_expert_gate",
"transformer.layers.0.mlp.shared_expert_gate",
"transformer.layers.11.post_layernorm",
"transformer.layers.22.post_layernorm",
"transformer.layers.10.mlp.router",
"transformer.layers.1.input_layernorm",
"transformer.layers.17.post_layernorm",
"transformer.layers.12.mlp.router",
"transformer.layers.19.mlp.shared_expert_gate",
"transformer.layers.12.post_layernorm",
"transformer.ln_f",
"transformer.layers.3.input_layernorm",
"transformer.layers.17.input_layernorm",
"transformer.layers.8.mlp.shared_expert_gate",
"transformer.layers.0.post_layernorm",
"transformer.layers.0.mlp.router",
"transformer.layers.2.post_layernorm",
"transformer.layers.8.input_layernorm",
"transformer.layers.4.input_layernorm",
"transformer.layers.21.mlp.shared_expert_gate",
"transformer.layers.6.mlp.router",
"transformer.layers.5.mlp.router",
"transformer.layers.20.post_layernorm"
]
},
"qk_layernorm": false,
"rotary_embedding_dim": 128,
"seq_length": 8192,
"qwen_type": "qwen2_moe",
"moe_intermediate_size": 1408,
"moe_shared_expert_intermediate_size": 5632,
"tie_word_embeddings": false,
"model_type": "qwen"
}