GLM-4.5-Air-GPTQ-4bit / recipe.yaml
cpatonn's picture
Upload folder using huggingface_hub
fe5f49c verified
raw
history blame contribute delete
713 Bytes
default_stage:
default_modifiers:
GPTQModifier:
config_groups:
group_0:
targets: [Linear]
weights:
num_bits: 4
type: int
symmetric: true
group_size: 32
strategy: group
block_structure: null
dynamic: false
actorder: null
observer: minmax
observer_kwargs: {}
input_activations: null
output_activations: null
targets: [Linear]
ignore: [lm_head, 're:.*mlp.gate$', 're:.*mlp.shared_expert_gate$', 're:.*layers.46.*']
sequential_update: true
block_size: 128
dampening_frac: 0.01
offload_hessians: false