llava-molora-finetuned-on-nlvr2 / adapter_config.json
xxchenxx
init
5c8fea2
raw
history blame contribute delete
590 Bytes
{
"auto_mapping": null,
"base_model_name_or_path": "liuhaotian/llava-v1.5-7b",
"bias": "none",
"inference_mode": true,
"is_moe": true,
"layers_pattern": null,
"layers_to_transform": null,
"lora_alpha": 64,
"lora_dim": 32,
"lora_dropout": 0.05,
"modules_to_save": null,
"moe_num_experts": 4,
"moe_router_aux_loss_coef": 0.01,
"moe_router_z_loss_coef": 0.001,
"peft_type": "MOLORA",
"revision": null,
"target_modules": [
"k_proj",
"q_proj",
"v_proj",
"down_proj",
"gate_proj",
"up_proj",
"o_proj"
],
"task_type": "CAUSAL_LM"
}