File size: 240 Bytes
b557216
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
{
    "r": 32,
    "lora_alpha": 8,
    "target_modules": [
        "fc2",
        "out_proj",
        "fc1",
        "q_proj",
        "k_proj",
        "v_proj"
    ],
    "lora_dropout": 0.05,
    "bias": "none",
    "use_rslora": true
}