File size: 343 Bytes
e9c73ac |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 |
{
"auto_mapping": null,
"base_model_name_or_path": "meta-llama/Meta-Llama-3-8B-Instruct",
"exclude_modules": null,
"inference_mode": true,
"modules_to_save": null,
"peft_type": "LN_TUNING",
"revision": null,
"target_modules": [
"norm",
"input_layernorm",
"post_attention_layernorm"
],
"task_type": "CAUSAL_LM"
} |