{ "auto_mapping": null, "base_model_name_or_path": "meta-llama/Meta-Llama-3-8B-Instruct", "exclude_modules": null, "inference_mode": true, "modules_to_save": null, "peft_type": "LN_TUNING", "revision": null, "target_modules": [ "input_layernorm", "post_attention_layernorm", "norm" ], "task_type": "CAUSAL_LM" }