gradientdegen commited on
Commit
1d4ea85
·
verified ·
1 Parent(s): 6ba63d4

Upload folder using huggingface_hub

Browse files
Files changed (2) hide show
  1. adapter_config.json +4 -4
  2. adapter_model.safetensors +1 -1
adapter_config.json CHANGED
@@ -3,13 +3,13 @@
3
  "peft_type": "LORA",
4
  "use_rslora": false,
5
  "target_modules": [
6
- "k_proj",
7
  "down_proj",
 
8
  "q_proj",
9
- "v_proj",
10
- "o_proj",
11
  "up_proj",
12
- "gate_proj"
 
13
  ],
14
  "task_type": "CAUSAL_LM",
15
  "r": 128,
 
3
  "peft_type": "LORA",
4
  "use_rslora": false,
5
  "target_modules": [
 
6
  "down_proj",
7
+ "gate_proj",
8
  "q_proj",
9
+ "k_proj",
 
10
  "up_proj",
11
+ "o_proj",
12
+ "v_proj"
13
  ],
14
  "task_type": "CAUSAL_LM",
15
  "r": 128,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:be7f9cf39beac16b4e4e31a07133719c3cba3e0793f26ad46f4715ca4295583d
3
  size 1101535104
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:420c12c3f262607c684800f12bcafd5d4643c50eeddc63bbd5e313a670e521ba
3
  size 1101535104