emelryan commited on
Commit
8f185b9
·
verified ·
1 Parent(s): d8914b1

Training complete with QLoRA

Browse files
model-00001-of-00006.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:34c2e9a997757bf5db3e8d86a2bbce2e5c7aeb41371438dcd96fd835de10a063
3
- size 4919438192
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:747398eda77847ce76901386a65d1f86bfa37304dbc32877ee3e42a3ebd571eb
3
+ size 4986211280
model-00002-of-00006.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cf93695cad9b572d9f2720c207ebe9dd269b60c14abf46e77415c0d43d326174
3
- size 4954847368
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:83a2c4c011f62acee072f602db5270f6eba773b07aebe5c9d2717292b7d6fafa
3
+ size 4954847344
model-00003-of-00006.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:47df37b436ed33974becf715e8193a883312731ae334af0f98de878d266cbc2e
3
  size 4954847392
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eb743cec08fcc24dbd242fdfae2bd1b8f2b95abdcd55fee7bd2d9e03749d364c
3
  size 4954847392
model-00004-of-00006.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5baa281f7b8bdef7479b5b99f671c5791706483d9df0fb2204985b8f4f1fd772
3
  size 4954847392
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:110349d00711187d77bc2e6a40e58a7b8a4b647da9ba014acd87bc410e18738f
3
  size 4954847392
model-00005-of-00006.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6aee3d4969d2d16a71fabd748e344e9451bf79bd4c2110325f74f4d35fcf695e
3
  size 4954847392
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:723d03ccfe8690ac3735177c085528fafe809c9f91552c634b3db97564a4141e
3
  size 4954847392
model-00006-of-00006.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:be04960742eb812e639f664e5f29c04f22d793e871d671df7f06ea2c31b94488
3
- size 2076310288
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1ef1ac4a8f850f989ea7fee95795c24fd452351c8d9af18b5a483c5cd56f2d59
3
+ size 4734533160
model.safetensors.index.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "metadata": {
3
- "total_size": 26815071488
4
  },
5
  "weight_map": {
6
  "lm_head.weight": "model-00006-of-00006.safetensors",
@@ -89,11 +89,11 @@
89
  "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
90
  "model.layers.14.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
91
  "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
92
- "model.layers.15.input_layernorm.weight": "model-00002-of-00006.safetensors",
93
- "model.layers.15.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
94
- "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
95
- "model.layers.15.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
96
- "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
97
  "model.layers.15.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
98
  "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
99
  "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
@@ -101,30 +101,30 @@
101
  "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
102
  "model.layers.15.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
103
  "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
104
- "model.layers.16.input_layernorm.weight": "model-00002-of-00006.safetensors",
105
- "model.layers.16.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
106
- "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
107
- "model.layers.16.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
108
- "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
109
- "model.layers.16.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
110
- "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
111
- "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
112
- "model.layers.16.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
113
- "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
114
- "model.layers.16.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
115
- "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
116
  "model.layers.17.input_layernorm.weight": "model-00003-of-00006.safetensors",
117
  "model.layers.17.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
118
  "model.layers.17.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
119
  "model.layers.17.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
120
  "model.layers.17.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
121
- "model.layers.17.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
122
- "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
123
- "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
124
- "model.layers.17.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
125
- "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
126
- "model.layers.17.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
127
- "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
128
  "model.layers.18.input_layernorm.weight": "model-00003-of-00006.safetensors",
129
  "model.layers.18.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
130
  "model.layers.18.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
@@ -209,11 +209,11 @@
209
  "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
210
  "model.layers.23.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
211
  "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
212
- "model.layers.24.input_layernorm.weight": "model-00003-of-00006.safetensors",
213
- "model.layers.24.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
214
- "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
215
- "model.layers.24.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
216
- "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
217
  "model.layers.24.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
218
  "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
219
  "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
@@ -221,30 +221,30 @@
221
  "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
222
  "model.layers.24.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
223
  "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
224
- "model.layers.25.input_layernorm.weight": "model-00003-of-00006.safetensors",
225
- "model.layers.25.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
226
- "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
227
- "model.layers.25.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
228
- "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
229
- "model.layers.25.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
230
- "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
231
- "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
232
- "model.layers.25.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
233
- "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
234
- "model.layers.25.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
235
- "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
236
  "model.layers.26.input_layernorm.weight": "model-00004-of-00006.safetensors",
237
  "model.layers.26.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
238
  "model.layers.26.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
239
  "model.layers.26.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
240
  "model.layers.26.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
241
- "model.layers.26.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
242
- "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
243
- "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
244
- "model.layers.26.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
245
- "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
246
- "model.layers.26.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
247
- "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
248
  "model.layers.27.input_layernorm.weight": "model-00004-of-00006.safetensors",
249
  "model.layers.27.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
250
  "model.layers.27.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
@@ -329,11 +329,11 @@
329
  "model.layers.32.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
330
  "model.layers.32.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
331
  "model.layers.32.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
332
- "model.layers.33.input_layernorm.weight": "model-00004-of-00006.safetensors",
333
- "model.layers.33.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
334
- "model.layers.33.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
335
- "model.layers.33.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
336
- "model.layers.33.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
337
  "model.layers.33.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
338
  "model.layers.33.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
339
  "model.layers.33.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
@@ -341,30 +341,30 @@
341
  "model.layers.33.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
342
  "model.layers.33.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
343
  "model.layers.33.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
344
- "model.layers.34.input_layernorm.weight": "model-00004-of-00006.safetensors",
345
- "model.layers.34.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
346
- "model.layers.34.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
347
- "model.layers.34.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
348
- "model.layers.34.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
349
- "model.layers.34.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
350
- "model.layers.34.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
351
- "model.layers.34.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
352
- "model.layers.34.self_attn.q_proj.bias": "model-00004-of-00006.safetensors",
353
- "model.layers.34.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
354
- "model.layers.34.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
355
- "model.layers.34.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
356
  "model.layers.35.input_layernorm.weight": "model-00005-of-00006.safetensors",
357
  "model.layers.35.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
358
  "model.layers.35.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
359
  "model.layers.35.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
360
  "model.layers.35.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
361
- "model.layers.35.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
362
- "model.layers.35.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
363
- "model.layers.35.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
364
- "model.layers.35.self_attn.q_proj.bias": "model-00004-of-00006.safetensors",
365
- "model.layers.35.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
366
- "model.layers.35.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
367
- "model.layers.35.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
368
  "model.layers.36.input_layernorm.weight": "model-00005-of-00006.safetensors",
369
  "model.layers.36.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
370
  "model.layers.36.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
@@ -449,11 +449,11 @@
449
  "model.layers.41.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
450
  "model.layers.41.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
451
  "model.layers.41.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
452
- "model.layers.42.input_layernorm.weight": "model-00005-of-00006.safetensors",
453
- "model.layers.42.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
454
- "model.layers.42.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
455
- "model.layers.42.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
456
- "model.layers.42.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
457
  "model.layers.42.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
458
  "model.layers.42.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
459
  "model.layers.42.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
@@ -461,30 +461,30 @@
461
  "model.layers.42.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
462
  "model.layers.42.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
463
  "model.layers.42.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
464
- "model.layers.43.input_layernorm.weight": "model-00005-of-00006.safetensors",
465
- "model.layers.43.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
466
- "model.layers.43.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
467
- "model.layers.43.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
468
- "model.layers.43.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
469
- "model.layers.43.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
470
- "model.layers.43.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
471
- "model.layers.43.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
472
- "model.layers.43.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
473
- "model.layers.43.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
474
- "model.layers.43.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
475
- "model.layers.43.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
476
  "model.layers.44.input_layernorm.weight": "model-00006-of-00006.safetensors",
477
  "model.layers.44.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
478
  "model.layers.44.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
479
  "model.layers.44.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
480
  "model.layers.44.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
481
- "model.layers.44.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
482
- "model.layers.44.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
483
- "model.layers.44.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
484
- "model.layers.44.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
485
- "model.layers.44.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
486
- "model.layers.44.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
487
- "model.layers.44.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
488
  "model.layers.45.input_layernorm.weight": "model-00006-of-00006.safetensors",
489
  "model.layers.45.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
490
  "model.layers.45.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
@@ -533,11 +533,11 @@
533
  "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
534
  "model.layers.5.self_attn.v_proj.bias": "model-00001-of-00006.safetensors",
535
  "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
536
- "model.layers.6.input_layernorm.weight": "model-00001-of-00006.safetensors",
537
- "model.layers.6.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
538
- "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
539
- "model.layers.6.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
540
- "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
541
  "model.layers.6.self_attn.k_proj.bias": "model-00001-of-00006.safetensors",
542
  "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
543
  "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
@@ -545,30 +545,30 @@
545
  "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
546
  "model.layers.6.self_attn.v_proj.bias": "model-00001-of-00006.safetensors",
547
  "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
548
- "model.layers.7.input_layernorm.weight": "model-00001-of-00006.safetensors",
549
- "model.layers.7.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
550
- "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
551
- "model.layers.7.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
552
- "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
553
- "model.layers.7.self_attn.k_proj.bias": "model-00001-of-00006.safetensors",
554
- "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
555
- "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
556
- "model.layers.7.self_attn.q_proj.bias": "model-00001-of-00006.safetensors",
557
- "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
558
- "model.layers.7.self_attn.v_proj.bias": "model-00001-of-00006.safetensors",
559
- "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
560
  "model.layers.8.input_layernorm.weight": "model-00002-of-00006.safetensors",
561
  "model.layers.8.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
562
  "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
563
  "model.layers.8.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
564
  "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
565
- "model.layers.8.self_attn.k_proj.bias": "model-00001-of-00006.safetensors",
566
- "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
567
- "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
568
- "model.layers.8.self_attn.q_proj.bias": "model-00001-of-00006.safetensors",
569
- "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
570
- "model.layers.8.self_attn.v_proj.bias": "model-00001-of-00006.safetensors",
571
- "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
572
  "model.layers.9.input_layernorm.weight": "model-00002-of-00006.safetensors",
573
  "model.layers.9.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
574
  "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
 
1
  {
2
  "metadata": {
3
+ "total_size": 29540067328
4
  },
5
  "weight_map": {
6
  "lm_head.weight": "model-00006-of-00006.safetensors",
 
89
  "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
90
  "model.layers.14.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
91
  "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
92
+ "model.layers.15.input_layernorm.weight": "model-00003-of-00006.safetensors",
93
+ "model.layers.15.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
94
+ "model.layers.15.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
95
+ "model.layers.15.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
96
+ "model.layers.15.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
97
  "model.layers.15.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
98
  "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
99
  "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
 
101
  "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
102
  "model.layers.15.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
103
  "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
104
+ "model.layers.16.input_layernorm.weight": "model-00003-of-00006.safetensors",
105
+ "model.layers.16.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
106
+ "model.layers.16.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
107
+ "model.layers.16.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
108
+ "model.layers.16.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
109
+ "model.layers.16.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
110
+ "model.layers.16.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
111
+ "model.layers.16.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
112
+ "model.layers.16.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
113
+ "model.layers.16.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
114
+ "model.layers.16.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
115
+ "model.layers.16.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
116
  "model.layers.17.input_layernorm.weight": "model-00003-of-00006.safetensors",
117
  "model.layers.17.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
118
  "model.layers.17.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
119
  "model.layers.17.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
120
  "model.layers.17.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
121
+ "model.layers.17.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
122
+ "model.layers.17.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
123
+ "model.layers.17.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
124
+ "model.layers.17.self_attn.q_proj.bias": "model-00003-of-00006.safetensors",
125
+ "model.layers.17.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
126
+ "model.layers.17.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
127
+ "model.layers.17.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
128
  "model.layers.18.input_layernorm.weight": "model-00003-of-00006.safetensors",
129
  "model.layers.18.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
130
  "model.layers.18.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
 
209
  "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
210
  "model.layers.23.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
211
  "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
212
+ "model.layers.24.input_layernorm.weight": "model-00004-of-00006.safetensors",
213
+ "model.layers.24.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
214
+ "model.layers.24.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
215
+ "model.layers.24.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
216
+ "model.layers.24.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
217
  "model.layers.24.self_attn.k_proj.bias": "model-00003-of-00006.safetensors",
218
  "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
219
  "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
 
221
  "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
222
  "model.layers.24.self_attn.v_proj.bias": "model-00003-of-00006.safetensors",
223
  "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
224
+ "model.layers.25.input_layernorm.weight": "model-00004-of-00006.safetensors",
225
+ "model.layers.25.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
226
+ "model.layers.25.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
227
+ "model.layers.25.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
228
+ "model.layers.25.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
229
+ "model.layers.25.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
230
+ "model.layers.25.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
231
+ "model.layers.25.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
232
+ "model.layers.25.self_attn.q_proj.bias": "model-00004-of-00006.safetensors",
233
+ "model.layers.25.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
234
+ "model.layers.25.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
235
+ "model.layers.25.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
236
  "model.layers.26.input_layernorm.weight": "model-00004-of-00006.safetensors",
237
  "model.layers.26.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
238
  "model.layers.26.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
239
  "model.layers.26.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
240
  "model.layers.26.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
241
+ "model.layers.26.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
242
+ "model.layers.26.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
243
+ "model.layers.26.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
244
+ "model.layers.26.self_attn.q_proj.bias": "model-00004-of-00006.safetensors",
245
+ "model.layers.26.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
246
+ "model.layers.26.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
247
+ "model.layers.26.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
248
  "model.layers.27.input_layernorm.weight": "model-00004-of-00006.safetensors",
249
  "model.layers.27.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
250
  "model.layers.27.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
 
329
  "model.layers.32.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
330
  "model.layers.32.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
331
  "model.layers.32.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
332
+ "model.layers.33.input_layernorm.weight": "model-00005-of-00006.safetensors",
333
+ "model.layers.33.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
334
+ "model.layers.33.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
335
+ "model.layers.33.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
336
+ "model.layers.33.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
337
  "model.layers.33.self_attn.k_proj.bias": "model-00004-of-00006.safetensors",
338
  "model.layers.33.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
339
  "model.layers.33.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
 
341
  "model.layers.33.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
342
  "model.layers.33.self_attn.v_proj.bias": "model-00004-of-00006.safetensors",
343
  "model.layers.33.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
344
+ "model.layers.34.input_layernorm.weight": "model-00005-of-00006.safetensors",
345
+ "model.layers.34.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
346
+ "model.layers.34.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
347
+ "model.layers.34.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
348
+ "model.layers.34.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
349
+ "model.layers.34.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
350
+ "model.layers.34.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
351
+ "model.layers.34.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
352
+ "model.layers.34.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
353
+ "model.layers.34.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
354
+ "model.layers.34.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
355
+ "model.layers.34.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
356
  "model.layers.35.input_layernorm.weight": "model-00005-of-00006.safetensors",
357
  "model.layers.35.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
358
  "model.layers.35.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
359
  "model.layers.35.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
360
  "model.layers.35.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
361
+ "model.layers.35.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
362
+ "model.layers.35.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
363
+ "model.layers.35.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
364
+ "model.layers.35.self_attn.q_proj.bias": "model-00005-of-00006.safetensors",
365
+ "model.layers.35.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
366
+ "model.layers.35.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
367
+ "model.layers.35.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
368
  "model.layers.36.input_layernorm.weight": "model-00005-of-00006.safetensors",
369
  "model.layers.36.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
370
  "model.layers.36.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
 
449
  "model.layers.41.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
450
  "model.layers.41.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
451
  "model.layers.41.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
452
+ "model.layers.42.input_layernorm.weight": "model-00006-of-00006.safetensors",
453
+ "model.layers.42.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
454
+ "model.layers.42.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
455
+ "model.layers.42.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
456
+ "model.layers.42.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
457
  "model.layers.42.self_attn.k_proj.bias": "model-00005-of-00006.safetensors",
458
  "model.layers.42.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
459
  "model.layers.42.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
 
461
  "model.layers.42.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
462
  "model.layers.42.self_attn.v_proj.bias": "model-00005-of-00006.safetensors",
463
  "model.layers.42.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
464
+ "model.layers.43.input_layernorm.weight": "model-00006-of-00006.safetensors",
465
+ "model.layers.43.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
466
+ "model.layers.43.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
467
+ "model.layers.43.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
468
+ "model.layers.43.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
469
+ "model.layers.43.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
470
+ "model.layers.43.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
471
+ "model.layers.43.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
472
+ "model.layers.43.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
473
+ "model.layers.43.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
474
+ "model.layers.43.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
475
+ "model.layers.43.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
476
  "model.layers.44.input_layernorm.weight": "model-00006-of-00006.safetensors",
477
  "model.layers.44.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
478
  "model.layers.44.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
479
  "model.layers.44.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
480
  "model.layers.44.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
481
+ "model.layers.44.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
482
+ "model.layers.44.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
483
+ "model.layers.44.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
484
+ "model.layers.44.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
485
+ "model.layers.44.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
486
+ "model.layers.44.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
487
+ "model.layers.44.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
488
  "model.layers.45.input_layernorm.weight": "model-00006-of-00006.safetensors",
489
  "model.layers.45.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
490
  "model.layers.45.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
 
533
  "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
534
  "model.layers.5.self_attn.v_proj.bias": "model-00001-of-00006.safetensors",
535
  "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
536
+ "model.layers.6.input_layernorm.weight": "model-00002-of-00006.safetensors",
537
+ "model.layers.6.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
538
+ "model.layers.6.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
539
+ "model.layers.6.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
540
+ "model.layers.6.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
541
  "model.layers.6.self_attn.k_proj.bias": "model-00001-of-00006.safetensors",
542
  "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
543
  "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
 
545
  "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
546
  "model.layers.6.self_attn.v_proj.bias": "model-00001-of-00006.safetensors",
547
  "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
548
+ "model.layers.7.input_layernorm.weight": "model-00002-of-00006.safetensors",
549
+ "model.layers.7.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
550
+ "model.layers.7.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
551
+ "model.layers.7.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
552
+ "model.layers.7.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
553
+ "model.layers.7.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
554
+ "model.layers.7.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
555
+ "model.layers.7.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
556
+ "model.layers.7.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
557
+ "model.layers.7.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
558
+ "model.layers.7.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
559
+ "model.layers.7.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
560
  "model.layers.8.input_layernorm.weight": "model-00002-of-00006.safetensors",
561
  "model.layers.8.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
562
  "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
563
  "model.layers.8.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
564
  "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
565
+ "model.layers.8.self_attn.k_proj.bias": "model-00002-of-00006.safetensors",
566
+ "model.layers.8.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
567
+ "model.layers.8.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
568
+ "model.layers.8.self_attn.q_proj.bias": "model-00002-of-00006.safetensors",
569
+ "model.layers.8.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
570
+ "model.layers.8.self_attn.v_proj.bias": "model-00002-of-00006.safetensors",
571
+ "model.layers.8.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
572
  "model.layers.9.input_layernorm.weight": "model-00002-of-00006.safetensors",
573
  "model.layers.9.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
574
  "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",