cpatonn's picture
Upload folder using huggingface_hub
7900294 verified
default_stage:
default_modifiers:
GPTQModifier:
targets: [Linear]
ignore: ['re:.*lm_head', 're:.*self_attn', 're:.*router', 're:vision_model.*', 're:multi_modal_projector.*',
Llama4TextAttention]
scheme: W4A16
sequential_update: true
block_size: 128
dampening_frac: 0.01
offload_hessians: false