Jackmin108 commited on
Commit
b3c656c
·
1 Parent(s): c3be0e9
Files changed (50) hide show
  1. config.json +9 -9
  2. model-00001-of-00048.safetensors +3 -0
  3. model-00002-of-00048.safetensors +3 -0
  4. model-00003-of-00048.safetensors +3 -0
  5. model-00004-of-00048.safetensors +3 -0
  6. model-00005-of-00048.safetensors +3 -0
  7. model-00006-of-00048.safetensors +3 -0
  8. model-00007-of-00048.safetensors +3 -0
  9. model-00008-of-00048.safetensors +3 -0
  10. model-00009-of-00048.safetensors +3 -0
  11. model-00010-of-00048.safetensors +3 -0
  12. model-00011-of-00048.safetensors +3 -0
  13. model-00012-of-00048.safetensors +3 -0
  14. model-00013-of-00048.safetensors +3 -0
  15. model-00014-of-00048.safetensors +3 -0
  16. model-00015-of-00048.safetensors +3 -0
  17. model-00016-of-00048.safetensors +3 -0
  18. model-00017-of-00048.safetensors +3 -0
  19. model-00018-of-00048.safetensors +3 -0
  20. model-00019-of-00048.safetensors +3 -0
  21. model-00020-of-00048.safetensors +3 -0
  22. model-00021-of-00048.safetensors +3 -0
  23. model-00022-of-00048.safetensors +3 -0
  24. model-00023-of-00048.safetensors +3 -0
  25. model-00024-of-00048.safetensors +3 -0
  26. model-00025-of-00048.safetensors +3 -0
  27. model-00026-of-00048.safetensors +3 -0
  28. model-00027-of-00048.safetensors +3 -0
  29. model-00028-of-00048.safetensors +3 -0
  30. model-00029-of-00048.safetensors +3 -0
  31. model-00030-of-00048.safetensors +3 -0
  32. model-00031-of-00048.safetensors +3 -0
  33. model-00032-of-00048.safetensors +3 -0
  34. model-00033-of-00048.safetensors +3 -0
  35. model-00034-of-00048.safetensors +3 -0
  36. model-00035-of-00048.safetensors +3 -0
  37. model-00036-of-00048.safetensors +3 -0
  38. model-00037-of-00048.safetensors +3 -0
  39. model-00038-of-00048.safetensors +3 -0
  40. model-00039-of-00048.safetensors +3 -0
  41. model-00040-of-00048.safetensors +3 -0
  42. model-00041-of-00048.safetensors +3 -0
  43. model-00042-of-00048.safetensors +3 -0
  44. model-00043-of-00048.safetensors +3 -0
  45. model-00044-of-00048.safetensors +3 -0
  46. model-00045-of-00048.safetensors +3 -0
  47. model-00046-of-00048.safetensors +3 -0
  48. model-00047-of-00048.safetensors +3 -0
  49. model-00048-of-00048.safetensors +3 -0
  50. model.safetensors.index.json +1 -0
config.json CHANGED
@@ -15,26 +15,26 @@
15
  151336,
16
  151338
17
  ],
18
- "head_dim": 64,
19
  "hidden_act": "silu",
20
- "hidden_size": 1024,
21
  "partial_rotary_factor": 0.5,
22
  "initializer_range": 0.02,
23
- "intermediate_size": 2048,
24
  "max_position_embeddings": 131072,
25
  "model_type": "glm4_moe",
26
- "moe_intermediate_size": 256,
27
  "norm_topk_prob": true,
28
- "num_attention_heads": 16,
29
  "n_group": 1,
30
  "topk_group": 1,
31
- "n_routed_experts": 8,
32
  "n_shared_experts": 1,
33
  "routed_scaling_factor": 1.0,
34
- "num_experts_per_tok": 4,
35
  "first_k_dense_replace": 1,
36
- "num_hidden_layers": 24,
37
- "num_key_value_heads": 4,
38
  "rms_norm_eps": 1e-05,
39
  "rope_scaling": null,
40
  "rope_theta": 1000000,
 
15
  151336,
16
  151338
17
  ],
18
+ "head_dim": 128,
19
  "hidden_act": "silu",
20
+ "hidden_size": 4096,
21
  "partial_rotary_factor": 0.5,
22
  "initializer_range": 0.02,
23
+ "intermediate_size": 10944,
24
  "max_position_embeddings": 131072,
25
  "model_type": "glm4_moe",
26
+ "moe_intermediate_size": 1408,
27
  "norm_topk_prob": true,
28
+ "num_attention_heads": 96,
29
  "n_group": 1,
30
  "topk_group": 1,
31
+ "n_routed_experts": 128,
32
  "n_shared_experts": 1,
33
  "routed_scaling_factor": 1.0,
34
+ "num_experts_per_tok": 8,
35
  "first_k_dense_replace": 1,
36
+ "num_hidden_layers": 46,
37
+ "num_key_value_heads": 8,
38
  "rms_norm_eps": 1e-05,
39
  "rope_scaling": null,
40
  "rope_theta": 1000000,
model-00001-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a0ada32b602a36368834bdb626e6d46856996c182c2a19de8e76fce185dc5f60
3
+ size 2483036488
model-00002-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eb5056ec2f9e8387135e5f53315f2cfefd0ceb20645c1f5252ab902371294c86
3
+ size 487109984
model-00003-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8afbc1342adcd07c5020a6a5ceb37732dfd4b1d0a989bd42995d9de0fc8487b3
3
+ size 4682987928
model-00004-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:839c7fbca12e9cb2ee36a14ff019b2371cbcc06b445ba216d7ba8b778ee56174
3
+ size 4682987928
model-00005-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a58383778b80452b1f169f50f245064467b5e32c37a633755d961c8375f8bbbf
3
+ size 4682987928
model-00006-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2d66fde38a7db40704ddf670b60d004a3beafb2550299b6087066f35974dcf5e
3
+ size 4682987928
model-00007-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2b3ef04b48e9be042a430535ee56b3b2cc4128c8f4a5629906ae60fa12a8dfb9
3
+ size 4682987928
model-00008-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0b64161a3f752a54a8c6622facb73f5fc2fb478f15fdcfbea56533b7485b583f
3
+ size 4682987928
model-00009-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:050cca1c4c0429a9e2bcd8f8c21d389df510d9c1240a9f722bbe5dd704a6be99
3
+ size 4682987928
model-00010-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:96da357bcf286be1ea6c942dbc63e297570339c2800399f7140a8494ea3de5df
3
+ size 4682987928
model-00011-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9093aea877fefaea6087a218cc7ffb154ed88d03cc3f2195af6b084f81fc7c1c
3
+ size 4682987928
model-00012-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1760cf4d10ed6929cb888dbf128c10852f53d2e5486ec1c10eb8f37fb7f23ca1
3
+ size 4682987944
model-00013-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1045819da1f453f943246137f81b65efc69c6e709ea0f3b03a29dd666fba45d6
3
+ size 4682987944
model-00014-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:22f1c17f4628d5b015cfdb434d708e25183a0f6d52c747447846556c5c35126e
3
+ size 4682987944
model-00015-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9a37c9130da041aa51f342cb969fa23ed1c60b66aa153eb29cac8d6abdea7716
3
+ size 4682987944
model-00016-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0be610c454213834f3cc8f9e5d0c5313a0a6db44abc995cd4b513014793bafe8
3
+ size 4682987944
model-00017-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b513bba5217708614cdc48b237e4d64fc853986b329b309f3a153ff95bef9c35
3
+ size 4682987944
model-00018-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d4e82d76159e7f5128b5340ead3e9853da927a6fe8c1d942d91fad47dce4e401
3
+ size 4682987944
model-00019-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:080269243c47dd984069038c0d9b9523dddd28af118a01a75e7d24e0ae473348
3
+ size 4682987944
model-00020-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:91fb00fd19d1a0edca3852b78c1da976f5485d07eb85c97555b8d2a83208eefc
3
+ size 4682987944
model-00021-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:680b497a3fba4f46ba2e58b3d6e42cc3e24759201dc8508d6796635336f40b9f
3
+ size 4682987944
model-00022-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ecf8a45e1bf6090b955e2950e3502c9190fbc10151659f3877d7e82a469d6a06
3
+ size 4682987944
model-00023-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1719291693a9a86b7ddfc084d9c5766fe1506d1e8085d18159c528834c768faa
3
+ size 4682987944
model-00024-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8073d3fef1e7b5a2cf4957e0aff920934b4c83307bdb364abda5acf388f2543c
3
+ size 4682987944
model-00025-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2c1463c72b386cf18adc5d3f62741972a631f786f62dfd4c9e1bbda883236850
3
+ size 4682987944
model-00026-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3b2910fb780f3264214c8412f4e884bf5f7db91271298f5fe9fec8497d94fa7f
3
+ size 4682987944
model-00027-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1e3d1f2070e9fd8060e2ccdec34987a0f1db6dd3c1eed3ba88648cdd8a4e719b
3
+ size 4682987944
model-00028-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9db84ae7f4c2f508c4f9747a65bb0531fcdf55328cbda889a39dffe2972c90c4
3
+ size 4682987944
model-00029-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:30478b2ef5afc1a81c03104fb48b599bae0dc9fd2ace5b48deb154fdb7d09187
3
+ size 4682987944
model-00030-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:53e3df03122659c73c85b773baa50e21c801908a765bcb82f9d2b0cfc3ce6188
3
+ size 4682987944
model-00031-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e2684b44e2277a2b4196930f8c44d7b2b632d9b0413a5f91881df5a21ce91b98
3
+ size 4682987944
model-00032-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:832f086aeea724d81096328c2259f91d85b7dbddcfa7b57b119102dead554a80
3
+ size 4682987944
model-00033-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:03f861d2aaf6f77b3d7eb4ac47a929b702ab1392d513d83631217a949965fe2f
3
+ size 4682987944
model-00034-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fd2148823bc4bc25c5822a8601a4e2726c73fdd908516cd166261638f27bda1f
3
+ size 4682987944
model-00035-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7ed3a44b1b66693114c5d1c3fb3116298dc6e839dc31b8b031f89c608927efe1
3
+ size 4682987944
model-00036-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:782b01077392587c286d8d537043f08146b5c909efa5d05f86055d0aef95ac72
3
+ size 4682987944
model-00037-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bc77e0b723bafe2fbe573431270bcbd979757d1528567fb2f418a978f7c652b2
3
+ size 4682987944
model-00038-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:370915c6f37ffcd50a9a3d60bc971af933ae76f273e47b7f479c39a63c8b87d6
3
+ size 4682987944
model-00039-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7179e0c3f7762c32833bc83fdf4f9b23b3b813235322a4ef7fcfbd0f1a28cb9c
3
+ size 4682987944
model-00040-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0229c09e7451cee400cdf560b1d6a7783282f47417f2d9058dbe19fd82a2c792
3
+ size 4682987944
model-00041-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c998079cffa94f11a4a1153788f5df099d1cce8aced25ee5c6365ade6db88f5a
3
+ size 4682987944
model-00042-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:82e7a7961dba1bd588e435904d77b1e075f050f5df9fadadc69568b48166ff3e
3
+ size 4682987944
model-00043-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3d3fea0e9ea0fc4799409494cd7b5f36eb8eb179ae915188dc934fd7c3ac1ba5
3
+ size 4682987944
model-00044-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:38f591265541ac7f49b369804a85b725c3abcf7758ce4bb722932ffa95ea5829
3
+ size 4682987944
model-00045-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4351c611e7fb1e461a7f22cd6efa53e61a7750678b2c3ac10d52d34821a234b6
3
+ size 4682987944
model-00046-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:808996721c7b746e409e1b350d287bd2c00a2a3560d5da30b279781ec260da65
3
+ size 4682987944
model-00047-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:00e24e86a5e67d99029884f2d15c7f22d8aec48b76af1b5dd16d41e8997da1b6
3
+ size 4682987944
model-00048-of-00048.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a9ea28aeeee5682183381c9cbb220ba8136dddd4b02719c51da53b1bfdbd6d23
3
+ size 7233150024
model.safetensors.index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"metadata": {"total_size": 220937649664}, "weight_map": {"lm_head.weight": "model-00001-of-00048.safetensors", "model.embed_tokens.weight": "model-00001-of-00048.safetensors", "model.norm.weight": "model-00001-of-00048.safetensors", "model.layers.0.input_layernorm.weight": "model-00002-of-00048.safetensors", "model.layers.0.mlp.down_proj.weight": "model-00002-of-00048.safetensors", "model.layers.0.mlp.gate_proj.weight": "model-00002-of-00048.safetensors", "model.layers.0.mlp.up_proj.weight": "model-00002-of-00048.safetensors", "model.layers.0.post_attention_layernorm.weight": "model-00002-of-00048.safetensors", "model.layers.0.self_attn.k_proj.bias": "model-00002-of-00048.safetensors", "model.layers.0.self_attn.k_proj.weight": "model-00002-of-00048.safetensors", "model.layers.0.self_attn.o_proj.weight": "model-00002-of-00048.safetensors", "model.layers.0.self_attn.q_proj.bias": "model-00002-of-00048.safetensors", "model.layers.0.self_attn.q_proj.weight": "model-00002-of-00048.safetensors", "model.layers.0.self_attn.v_proj.bias": "model-00002-of-00048.safetensors", "model.layers.0.self_attn.v_proj.weight": "model-00002-of-00048.safetensors", "model.layers.11.input_layernorm.weight": "model-00013-of-00048.safetensors", "model.layers.11.post_attention_layernorm.weight": "model-00013-of-00048.safetensors", "model.layers.11.self_attn.k_proj.bias": "model-00013-of-00048.safetensors", "model.layers.11.self_attn.k_proj.weight": "model-00013-of-00048.safetensors", "model.layers.11.self_attn.o_proj.weight": "model-00013-of-00048.safetensors", "model.layers.11.self_attn.q_proj.bias": "model-00013-of-00048.safetensors", "model.layers.11.self_attn.q_proj.weight": "model-00013-of-00048.safetensors", "model.layers.11.self_attn.v_proj.bias": "model-00013-of-00048.safetensors", "model.layers.11.self_attn.v_proj.weight": "model-00013-of-00048.safetensors", "model.layers.18.input_layernorm.weight": "model-00020-of-00048.safetensors", "model.layers.18.post_attention_layernorm.weight": "model-00020-of-00048.safetensors", "model.layers.18.self_attn.k_proj.bias": "model-00020-of-00048.safetensors", "model.layers.18.self_attn.k_proj.weight": "model-00020-of-00048.safetensors", "model.layers.18.self_attn.o_proj.weight": "model-00020-of-00048.safetensors", "model.layers.18.self_attn.q_proj.bias": "model-00020-of-00048.safetensors", "model.layers.18.self_attn.q_proj.weight": "model-00020-of-00048.safetensors", "model.layers.18.self_attn.v_proj.bias": "model-00020-of-00048.safetensors", "model.layers.18.self_attn.v_proj.weight": "model-00020-of-00048.safetensors", "model.layers.19.input_layernorm.weight": "model-00021-of-00048.safetensors", "model.layers.19.post_attention_layernorm.weight": "model-00021-of-00048.safetensors", "model.layers.19.self_attn.k_proj.bias": "model-00021-of-00048.safetensors", "model.layers.19.self_attn.k_proj.weight": "model-00021-of-00048.safetensors", "model.layers.19.self_attn.o_proj.weight": "model-00021-of-00048.safetensors", "model.layers.19.self_attn.q_proj.bias": "model-00021-of-00048.safetensors", "model.layers.19.self_attn.q_proj.weight": "model-00021-of-00048.safetensors", "model.layers.19.self_attn.v_proj.bias": "model-00021-of-00048.safetensors", "model.layers.19.self_attn.v_proj.weight": "model-00021-of-00048.safetensors", "model.layers.10.input_layernorm.weight": "model-00012-of-00048.safetensors", "model.layers.10.post_attention_layernorm.weight": "model-00012-of-00048.safetensors", "model.layers.10.self_attn.k_proj.bias": "model-00012-of-00048.safetensors", "model.layers.10.self_attn.k_proj.weight": "model-00012-of-00048.safetensors", "model.layers.10.self_attn.o_proj.weight": "model-00012-of-00048.safetensors", "model.layers.10.self_attn.q_proj.bias": "model-00012-of-00048.safetensors", "model.layers.10.self_attn.q_proj.weight": "model-00012-of-00048.safetensors", "model.layers.10.self_attn.v_proj.bias": "model-00012-of-00048.safetensors", "model.layers.10.self_attn.v_proj.weight": "model-00012-of-00048.safetensors", "model.layers.14.input_layernorm.weight": "model-00016-of-00048.safetensors", "model.layers.14.post_attention_layernorm.weight": "model-00016-of-00048.safetensors", "model.layers.14.self_attn.k_proj.bias": "model-00016-of-00048.safetensors", "model.layers.14.self_attn.k_proj.weight": "model-00016-of-00048.safetensors", "model.layers.14.self_attn.o_proj.weight": "model-00016-of-00048.safetensors", "model.layers.14.self_attn.q_proj.bias": "model-00016-of-00048.safetensors", "model.layers.14.self_attn.q_proj.weight": "model-00016-of-00048.safetensors", "model.layers.14.self_attn.v_proj.bias": "model-00016-of-00048.safetensors", "model.layers.14.self_attn.v_proj.weight": "model-00016-of-00048.safetensors", "model.layers.13.input_layernorm.weight": "model-00015-of-00048.safetensors", "model.layers.13.post_attention_layernorm.weight": "model-00015-of-00048.safetensors", "model.layers.13.self_attn.k_proj.bias": "model-00015-of-00048.safetensors", "model.layers.13.self_attn.k_proj.weight": "model-00015-of-00048.safetensors", "model.layers.13.self_attn.o_proj.weight": "model-00015-of-00048.safetensors", "model.layers.13.self_attn.q_proj.bias": "model-00015-of-00048.safetensors", "model.layers.13.self_attn.q_proj.weight": "model-00015-of-00048.safetensors", "model.layers.13.self_attn.v_proj.bias": "model-00015-of-00048.safetensors", "model.layers.13.self_attn.v_proj.weight": "model-00015-of-00048.safetensors", "model.layers.16.input_layernorm.weight": "model-00018-of-00048.safetensors", "model.layers.16.post_attention_layernorm.weight": "model-00018-of-00048.safetensors", "model.layers.16.self_attn.k_proj.bias": "model-00018-of-00048.safetensors", "model.layers.16.self_attn.k_proj.weight": "model-00018-of-00048.safetensors", "model.layers.16.self_attn.o_proj.weight": "model-00018-of-00048.safetensors", "model.layers.16.self_attn.q_proj.bias": "model-00018-of-00048.safetensors", "model.layers.16.self_attn.q_proj.weight": "model-00018-of-00048.safetensors", "model.layers.16.self_attn.v_proj.bias": "model-00018-of-00048.safetensors", "model.layers.16.self_attn.v_proj.weight": "model-00018-of-00048.safetensors", "model.layers.17.input_layernorm.weight": "model-00019-of-00048.safetensors", "model.layers.17.post_attention_layernorm.weight": "model-00019-of-00048.safetensors", "model.layers.17.self_attn.k_proj.bias": "model-00019-of-00048.safetensors", "model.layers.17.self_attn.k_proj.weight": "model-00019-of-00048.safetensors", "model.layers.17.self_attn.o_proj.weight": "model-00019-of-00048.safetensors", "model.layers.17.self_attn.q_proj.bias": "model-00019-of-00048.safetensors", "model.layers.17.self_attn.q_proj.weight": "model-00019-of-00048.safetensors", "model.layers.17.self_attn.v_proj.bias": "model-00019-of-00048.safetensors", "model.layers.17.self_attn.v_proj.weight": "model-00019-of-00048.safetensors", "model.layers.15.input_layernorm.weight": "model-00017-of-00048.safetensors", "model.layers.15.post_attention_layernorm.weight": "model-00017-of-00048.safetensors", "model.layers.15.self_attn.k_proj.bias": "model-00017-of-00048.safetensors", "model.layers.15.self_attn.k_proj.weight": "model-00017-of-00048.safetensors", "model.layers.15.self_attn.o_proj.weight": "model-00017-of-00048.safetensors", "model.layers.15.self_attn.q_proj.bias": "model-00017-of-00048.safetensors", "model.layers.15.self_attn.q_proj.weight": "model-00017-of-00048.safetensors", "model.layers.15.self_attn.v_proj.bias": "model-00017-of-00048.safetensors", "model.layers.15.self_attn.v_proj.weight": "model-00017-of-00048.safetensors", "model.layers.12.input_layernorm.weight": "model-00014-of-00048.safetensors", "model.layers.12.post_attention_layernorm.weight": "model-00014-of-00048.safetensors", "model.layers.12.self_attn.k_proj.bias": "model-00014-of-00048.safetensors", "model.layers.12.self_attn.k_proj.weight": "model-00014-of-00048.safetensors", "model.layers.12.self_attn.o_proj.weight": "model-00014-of-00048.safetensors", "model.layers.12.self_attn.q_proj.bias": "model-00014-of-00048.safetensors", "model.layers.12.self_attn.q_proj.weight": "model-00014-of-00048.safetensors", "model.layers.12.self_attn.v_proj.bias": "model-00014-of-00048.safetensors", "model.layers.12.self_attn.v_proj.weight": "model-00014-of-00048.safetensors", "model.layers.1.input_layernorm.weight": "model-00003-of-00048.safetensors", "model.layers.1.post_attention_layernorm.weight": "model-00003-of-00048.safetensors", "model.layers.1.self_attn.k_proj.bias": "model-00003-of-00048.safetensors", "model.layers.1.self_attn.k_proj.weight": "model-00003-of-00048.safetensors", "model.layers.1.self_attn.o_proj.weight": "model-00003-of-00048.safetensors", "model.layers.1.self_attn.q_proj.bias": "model-00003-of-00048.safetensors", "model.layers.1.self_attn.q_proj.weight": "model-00003-of-00048.safetensors", "model.layers.1.self_attn.v_proj.bias": "model-00003-of-00048.safetensors", "model.layers.1.self_attn.v_proj.weight": "model-00003-of-00048.safetensors", "model.layers.1.mlp.router.gate.weight": "model-00003-of-00048.safetensors", "model.layers.1.mlp.experts.w1": "model-00003-of-00048.safetensors", "model.layers.1.mlp.experts.w2": "model-00003-of-00048.safetensors", "model.layers.1.mlp.experts.w3": "model-00003-of-00048.safetensors", "model.layers.1.mlp.shared_expert.w1": "model-00003-of-00048.safetensors", "model.layers.1.mlp.shared_expert.w2": "model-00003-of-00048.safetensors", "model.layers.1.mlp.shared_expert.w3": "model-00003-of-00048.safetensors", "model.layers.1.mlp.expert_bias": "model-00003-of-00048.safetensors", "model.layers.10.mlp.router.gate.weight": "model-00012-of-00048.safetensors", "model.layers.10.mlp.experts.w1": "model-00012-of-00048.safetensors", "model.layers.10.mlp.experts.w2": "model-00012-of-00048.safetensors", "model.layers.10.mlp.experts.w3": "model-00012-of-00048.safetensors", "model.layers.10.mlp.shared_expert.w1": "model-00012-of-00048.safetensors", "model.layers.10.mlp.shared_expert.w2": "model-00012-of-00048.safetensors", "model.layers.10.mlp.shared_expert.w3": "model-00012-of-00048.safetensors", "model.layers.10.mlp.expert_bias": "model-00012-of-00048.safetensors", "model.layers.11.mlp.router.gate.weight": "model-00013-of-00048.safetensors", "model.layers.11.mlp.experts.w1": "model-00013-of-00048.safetensors", "model.layers.11.mlp.experts.w2": "model-00013-of-00048.safetensors", "model.layers.11.mlp.experts.w3": "model-00013-of-00048.safetensors", "model.layers.11.mlp.shared_expert.w1": "model-00013-of-00048.safetensors", "model.layers.11.mlp.shared_expert.w2": "model-00013-of-00048.safetensors", "model.layers.11.mlp.shared_expert.w3": "model-00013-of-00048.safetensors", "model.layers.11.mlp.expert_bias": "model-00013-of-00048.safetensors", "model.layers.12.mlp.router.gate.weight": "model-00014-of-00048.safetensors", "model.layers.12.mlp.experts.w1": "model-00014-of-00048.safetensors", "model.layers.12.mlp.experts.w2": "model-00014-of-00048.safetensors", "model.layers.12.mlp.experts.w3": "model-00014-of-00048.safetensors", "model.layers.12.mlp.shared_expert.w1": "model-00014-of-00048.safetensors", "model.layers.12.mlp.shared_expert.w2": "model-00014-of-00048.safetensors", "model.layers.12.mlp.shared_expert.w3": "model-00014-of-00048.safetensors", "model.layers.12.mlp.expert_bias": "model-00014-of-00048.safetensors", "model.layers.13.mlp.router.gate.weight": "model-00015-of-00048.safetensors", "model.layers.13.mlp.experts.w1": "model-00015-of-00048.safetensors", "model.layers.13.mlp.experts.w2": "model-00015-of-00048.safetensors", "model.layers.13.mlp.experts.w3": "model-00015-of-00048.safetensors", "model.layers.13.mlp.shared_expert.w1": "model-00015-of-00048.safetensors", "model.layers.13.mlp.shared_expert.w2": "model-00015-of-00048.safetensors", "model.layers.13.mlp.shared_expert.w3": "model-00015-of-00048.safetensors", "model.layers.13.mlp.expert_bias": "model-00015-of-00048.safetensors", "model.layers.14.mlp.router.gate.weight": "model-00016-of-00048.safetensors", "model.layers.14.mlp.experts.w1": "model-00016-of-00048.safetensors", "model.layers.14.mlp.experts.w2": "model-00016-of-00048.safetensors", "model.layers.14.mlp.experts.w3": "model-00016-of-00048.safetensors", "model.layers.14.mlp.shared_expert.w1": "model-00016-of-00048.safetensors", "model.layers.14.mlp.shared_expert.w2": "model-00016-of-00048.safetensors", "model.layers.14.mlp.shared_expert.w3": "model-00016-of-00048.safetensors", "model.layers.14.mlp.expert_bias": "model-00016-of-00048.safetensors", "model.layers.15.mlp.router.gate.weight": "model-00017-of-00048.safetensors", "model.layers.15.mlp.experts.w1": "model-00017-of-00048.safetensors", "model.layers.15.mlp.experts.w2": "model-00017-of-00048.safetensors", "model.layers.15.mlp.experts.w3": "model-00017-of-00048.safetensors", "model.layers.15.mlp.shared_expert.w1": "model-00017-of-00048.safetensors", "model.layers.15.mlp.shared_expert.w2": "model-00017-of-00048.safetensors", "model.layers.15.mlp.shared_expert.w3": "model-00017-of-00048.safetensors", "model.layers.15.mlp.expert_bias": "model-00017-of-00048.safetensors", "model.layers.16.mlp.router.gate.weight": "model-00018-of-00048.safetensors", "model.layers.16.mlp.experts.w1": "model-00018-of-00048.safetensors", "model.layers.16.mlp.experts.w2": "model-00018-of-00048.safetensors", "model.layers.16.mlp.experts.w3": "model-00018-of-00048.safetensors", "model.layers.16.mlp.shared_expert.w1": "model-00018-of-00048.safetensors", "model.layers.16.mlp.shared_expert.w2": "model-00018-of-00048.safetensors", "model.layers.16.mlp.shared_expert.w3": "model-00018-of-00048.safetensors", "model.layers.16.mlp.expert_bias": "model-00018-of-00048.safetensors", "model.layers.17.mlp.router.gate.weight": "model-00019-of-00048.safetensors", "model.layers.17.mlp.experts.w1": "model-00019-of-00048.safetensors", "model.layers.17.mlp.experts.w2": "model-00019-of-00048.safetensors", "model.layers.17.mlp.experts.w3": "model-00019-of-00048.safetensors", "model.layers.17.mlp.shared_expert.w1": "model-00019-of-00048.safetensors", "model.layers.17.mlp.shared_expert.w2": "model-00019-of-00048.safetensors", "model.layers.17.mlp.shared_expert.w3": "model-00019-of-00048.safetensors", "model.layers.17.mlp.expert_bias": "model-00019-of-00048.safetensors", "model.layers.18.mlp.router.gate.weight": "model-00020-of-00048.safetensors", "model.layers.18.mlp.experts.w1": "model-00020-of-00048.safetensors", "model.layers.18.mlp.experts.w2": "model-00020-of-00048.safetensors", "model.layers.18.mlp.experts.w3": "model-00020-of-00048.safetensors", "model.layers.18.mlp.shared_expert.w1": "model-00020-of-00048.safetensors", "model.layers.18.mlp.shared_expert.w2": "model-00020-of-00048.safetensors", "model.layers.18.mlp.shared_expert.w3": "model-00020-of-00048.safetensors", "model.layers.18.mlp.expert_bias": "model-00020-of-00048.safetensors", "model.layers.19.mlp.router.gate.weight": "model-00021-of-00048.safetensors", "model.layers.19.mlp.experts.w1": "model-00021-of-00048.safetensors", "model.layers.19.mlp.experts.w2": "model-00021-of-00048.safetensors", "model.layers.19.mlp.experts.w3": "model-00021-of-00048.safetensors", "model.layers.19.mlp.shared_expert.w1": "model-00021-of-00048.safetensors", "model.layers.19.mlp.shared_expert.w2": "model-00021-of-00048.safetensors", "model.layers.19.mlp.shared_expert.w3": "model-00021-of-00048.safetensors", "model.layers.19.mlp.expert_bias": "model-00021-of-00048.safetensors", "model.layers.21.input_layernorm.weight": "model-00023-of-00048.safetensors", "model.layers.21.post_attention_layernorm.weight": "model-00023-of-00048.safetensors", "model.layers.21.self_attn.k_proj.bias": "model-00023-of-00048.safetensors", "model.layers.21.self_attn.k_proj.weight": "model-00023-of-00048.safetensors", "model.layers.21.self_attn.o_proj.weight": "model-00023-of-00048.safetensors", "model.layers.21.self_attn.q_proj.bias": "model-00023-of-00048.safetensors", "model.layers.21.self_attn.q_proj.weight": "model-00023-of-00048.safetensors", "model.layers.21.self_attn.v_proj.bias": "model-00023-of-00048.safetensors", "model.layers.21.self_attn.v_proj.weight": "model-00023-of-00048.safetensors", "model.layers.23.input_layernorm.weight": "model-00025-of-00048.safetensors", "model.layers.23.post_attention_layernorm.weight": "model-00025-of-00048.safetensors", "model.layers.23.self_attn.k_proj.bias": "model-00025-of-00048.safetensors", "model.layers.23.self_attn.k_proj.weight": "model-00025-of-00048.safetensors", "model.layers.23.self_attn.o_proj.weight": "model-00025-of-00048.safetensors", "model.layers.23.self_attn.q_proj.bias": "model-00025-of-00048.safetensors", "model.layers.23.self_attn.q_proj.weight": "model-00025-of-00048.safetensors", "model.layers.23.self_attn.v_proj.bias": "model-00025-of-00048.safetensors", "model.layers.23.self_attn.v_proj.weight": "model-00025-of-00048.safetensors", "model.layers.26.input_layernorm.weight": "model-00028-of-00048.safetensors", "model.layers.26.post_attention_layernorm.weight": "model-00028-of-00048.safetensors", "model.layers.26.self_attn.k_proj.bias": "model-00028-of-00048.safetensors", "model.layers.26.self_attn.k_proj.weight": "model-00028-of-00048.safetensors", "model.layers.26.self_attn.o_proj.weight": "model-00028-of-00048.safetensors", "model.layers.26.self_attn.q_proj.bias": "model-00028-of-00048.safetensors", "model.layers.26.self_attn.q_proj.weight": "model-00028-of-00048.safetensors", "model.layers.26.self_attn.v_proj.bias": "model-00028-of-00048.safetensors", "model.layers.26.self_attn.v_proj.weight": "model-00028-of-00048.safetensors", "model.layers.2.input_layernorm.weight": "model-00004-of-00048.safetensors", "model.layers.2.post_attention_layernorm.weight": "model-00004-of-00048.safetensors", "model.layers.2.self_attn.k_proj.bias": "model-00004-of-00048.safetensors", "model.layers.2.self_attn.k_proj.weight": "model-00004-of-00048.safetensors", "model.layers.2.self_attn.o_proj.weight": "model-00004-of-00048.safetensors", "model.layers.2.self_attn.q_proj.bias": "model-00004-of-00048.safetensors", "model.layers.2.self_attn.q_proj.weight": "model-00004-of-00048.safetensors", "model.layers.2.self_attn.v_proj.bias": "model-00004-of-00048.safetensors", "model.layers.2.self_attn.v_proj.weight": "model-00004-of-00048.safetensors", "model.layers.24.input_layernorm.weight": "model-00026-of-00048.safetensors", "model.layers.24.post_attention_layernorm.weight": "model-00026-of-00048.safetensors", "model.layers.24.self_attn.k_proj.bias": "model-00026-of-00048.safetensors", "model.layers.24.self_attn.k_proj.weight": "model-00026-of-00048.safetensors", "model.layers.24.self_attn.o_proj.weight": "model-00026-of-00048.safetensors", "model.layers.24.self_attn.q_proj.bias": "model-00026-of-00048.safetensors", "model.layers.24.self_attn.q_proj.weight": "model-00026-of-00048.safetensors", "model.layers.24.self_attn.v_proj.bias": "model-00026-of-00048.safetensors", "model.layers.24.self_attn.v_proj.weight": "model-00026-of-00048.safetensors", "model.layers.28.input_layernorm.weight": "model-00030-of-00048.safetensors", "model.layers.28.post_attention_layernorm.weight": "model-00030-of-00048.safetensors", "model.layers.28.self_attn.k_proj.bias": "model-00030-of-00048.safetensors", "model.layers.28.self_attn.k_proj.weight": "model-00030-of-00048.safetensors", "model.layers.28.self_attn.o_proj.weight": "model-00030-of-00048.safetensors", "model.layers.28.self_attn.q_proj.bias": "model-00030-of-00048.safetensors", "model.layers.28.self_attn.q_proj.weight": "model-00030-of-00048.safetensors", "model.layers.28.self_attn.v_proj.bias": "model-00030-of-00048.safetensors", "model.layers.28.self_attn.v_proj.weight": "model-00030-of-00048.safetensors", "model.layers.29.input_layernorm.weight": "model-00031-of-00048.safetensors", "model.layers.29.post_attention_layernorm.weight": "model-00031-of-00048.safetensors", "model.layers.29.self_attn.k_proj.bias": "model-00031-of-00048.safetensors", "model.layers.29.self_attn.k_proj.weight": "model-00031-of-00048.safetensors", "model.layers.29.self_attn.o_proj.weight": "model-00031-of-00048.safetensors", "model.layers.29.self_attn.q_proj.bias": "model-00031-of-00048.safetensors", "model.layers.29.self_attn.q_proj.weight": "model-00031-of-00048.safetensors", "model.layers.29.self_attn.v_proj.bias": "model-00031-of-00048.safetensors", "model.layers.29.self_attn.v_proj.weight": "model-00031-of-00048.safetensors", "model.layers.20.input_layernorm.weight": "model-00022-of-00048.safetensors", "model.layers.20.post_attention_layernorm.weight": "model-00022-of-00048.safetensors", "model.layers.20.self_attn.k_proj.bias": "model-00022-of-00048.safetensors", "model.layers.20.self_attn.k_proj.weight": "model-00022-of-00048.safetensors", "model.layers.20.self_attn.o_proj.weight": "model-00022-of-00048.safetensors", "model.layers.20.self_attn.q_proj.bias": "model-00022-of-00048.safetensors", "model.layers.20.self_attn.q_proj.weight": "model-00022-of-00048.safetensors", "model.layers.20.self_attn.v_proj.bias": "model-00022-of-00048.safetensors", "model.layers.20.self_attn.v_proj.weight": "model-00022-of-00048.safetensors", "model.layers.22.input_layernorm.weight": "model-00024-of-00048.safetensors", "model.layers.22.post_attention_layernorm.weight": "model-00024-of-00048.safetensors", "model.layers.22.self_attn.k_proj.bias": "model-00024-of-00048.safetensors", "model.layers.22.self_attn.k_proj.weight": "model-00024-of-00048.safetensors", "model.layers.22.self_attn.o_proj.weight": "model-00024-of-00048.safetensors", "model.layers.22.self_attn.q_proj.bias": "model-00024-of-00048.safetensors", "model.layers.22.self_attn.q_proj.weight": "model-00024-of-00048.safetensors", "model.layers.22.self_attn.v_proj.bias": "model-00024-of-00048.safetensors", "model.layers.22.self_attn.v_proj.weight": "model-00024-of-00048.safetensors", "model.layers.25.input_layernorm.weight": "model-00027-of-00048.safetensors", "model.layers.25.post_attention_layernorm.weight": "model-00027-of-00048.safetensors", "model.layers.25.self_attn.k_proj.bias": "model-00027-of-00048.safetensors", "model.layers.25.self_attn.k_proj.weight": "model-00027-of-00048.safetensors", "model.layers.25.self_attn.o_proj.weight": "model-00027-of-00048.safetensors", "model.layers.25.self_attn.q_proj.bias": "model-00027-of-00048.safetensors", "model.layers.25.self_attn.q_proj.weight": "model-00027-of-00048.safetensors", "model.layers.25.self_attn.v_proj.bias": "model-00027-of-00048.safetensors", "model.layers.25.self_attn.v_proj.weight": "model-00027-of-00048.safetensors", "model.layers.27.input_layernorm.weight": "model-00029-of-00048.safetensors", "model.layers.27.post_attention_layernorm.weight": "model-00029-of-00048.safetensors", "model.layers.27.self_attn.k_proj.bias": "model-00029-of-00048.safetensors", "model.layers.27.self_attn.k_proj.weight": "model-00029-of-00048.safetensors", "model.layers.27.self_attn.o_proj.weight": "model-00029-of-00048.safetensors", "model.layers.27.self_attn.q_proj.bias": "model-00029-of-00048.safetensors", "model.layers.27.self_attn.q_proj.weight": "model-00029-of-00048.safetensors", "model.layers.27.self_attn.v_proj.bias": "model-00029-of-00048.safetensors", "model.layers.27.self_attn.v_proj.weight": "model-00029-of-00048.safetensors", "model.layers.2.mlp.router.gate.weight": "model-00004-of-00048.safetensors", "model.layers.2.mlp.experts.w1": "model-00004-of-00048.safetensors", "model.layers.2.mlp.experts.w2": "model-00004-of-00048.safetensors", "model.layers.2.mlp.experts.w3": "model-00004-of-00048.safetensors", "model.layers.2.mlp.shared_expert.w1": "model-00004-of-00048.safetensors", "model.layers.2.mlp.shared_expert.w2": "model-00004-of-00048.safetensors", "model.layers.2.mlp.shared_expert.w3": "model-00004-of-00048.safetensors", "model.layers.2.mlp.expert_bias": "model-00004-of-00048.safetensors", "model.layers.20.mlp.router.gate.weight": "model-00022-of-00048.safetensors", "model.layers.20.mlp.experts.w1": "model-00022-of-00048.safetensors", "model.layers.20.mlp.experts.w2": "model-00022-of-00048.safetensors", "model.layers.20.mlp.experts.w3": "model-00022-of-00048.safetensors", "model.layers.20.mlp.shared_expert.w1": "model-00022-of-00048.safetensors", "model.layers.20.mlp.shared_expert.w2": "model-00022-of-00048.safetensors", "model.layers.20.mlp.shared_expert.w3": "model-00022-of-00048.safetensors", "model.layers.20.mlp.expert_bias": "model-00022-of-00048.safetensors", "model.layers.21.mlp.router.gate.weight": "model-00023-of-00048.safetensors", "model.layers.21.mlp.experts.w1": "model-00023-of-00048.safetensors", "model.layers.21.mlp.experts.w2": "model-00023-of-00048.safetensors", "model.layers.21.mlp.experts.w3": "model-00023-of-00048.safetensors", "model.layers.21.mlp.shared_expert.w1": "model-00023-of-00048.safetensors", "model.layers.21.mlp.shared_expert.w2": "model-00023-of-00048.safetensors", "model.layers.21.mlp.shared_expert.w3": "model-00023-of-00048.safetensors", "model.layers.21.mlp.expert_bias": "model-00023-of-00048.safetensors", "model.layers.22.mlp.router.gate.weight": "model-00024-of-00048.safetensors", "model.layers.22.mlp.experts.w1": "model-00024-of-00048.safetensors", "model.layers.22.mlp.experts.w2": "model-00024-of-00048.safetensors", "model.layers.22.mlp.experts.w3": "model-00024-of-00048.safetensors", "model.layers.22.mlp.shared_expert.w1": "model-00024-of-00048.safetensors", "model.layers.22.mlp.shared_expert.w2": "model-00024-of-00048.safetensors", "model.layers.22.mlp.shared_expert.w3": "model-00024-of-00048.safetensors", "model.layers.22.mlp.expert_bias": "model-00024-of-00048.safetensors", "model.layers.23.mlp.router.gate.weight": "model-00025-of-00048.safetensors", "model.layers.23.mlp.experts.w1": "model-00025-of-00048.safetensors", "model.layers.23.mlp.experts.w2": "model-00025-of-00048.safetensors", "model.layers.23.mlp.experts.w3": "model-00025-of-00048.safetensors", "model.layers.23.mlp.shared_expert.w1": "model-00025-of-00048.safetensors", "model.layers.23.mlp.shared_expert.w2": "model-00025-of-00048.safetensors", "model.layers.23.mlp.shared_expert.w3": "model-00025-of-00048.safetensors", "model.layers.23.mlp.expert_bias": "model-00025-of-00048.safetensors", "model.layers.24.mlp.router.gate.weight": "model-00026-of-00048.safetensors", "model.layers.24.mlp.experts.w1": "model-00026-of-00048.safetensors", "model.layers.24.mlp.experts.w2": "model-00026-of-00048.safetensors", "model.layers.24.mlp.experts.w3": "model-00026-of-00048.safetensors", "model.layers.24.mlp.shared_expert.w1": "model-00026-of-00048.safetensors", "model.layers.24.mlp.shared_expert.w2": "model-00026-of-00048.safetensors", "model.layers.24.mlp.shared_expert.w3": "model-00026-of-00048.safetensors", "model.layers.24.mlp.expert_bias": "model-00026-of-00048.safetensors", "model.layers.25.mlp.router.gate.weight": "model-00027-of-00048.safetensors", "model.layers.25.mlp.experts.w1": "model-00027-of-00048.safetensors", "model.layers.25.mlp.experts.w2": "model-00027-of-00048.safetensors", "model.layers.25.mlp.experts.w3": "model-00027-of-00048.safetensors", "model.layers.25.mlp.shared_expert.w1": "model-00027-of-00048.safetensors", "model.layers.25.mlp.shared_expert.w2": "model-00027-of-00048.safetensors", "model.layers.25.mlp.shared_expert.w3": "model-00027-of-00048.safetensors", "model.layers.25.mlp.expert_bias": "model-00027-of-00048.safetensors", "model.layers.26.mlp.router.gate.weight": "model-00028-of-00048.safetensors", "model.layers.26.mlp.experts.w1": "model-00028-of-00048.safetensors", "model.layers.26.mlp.experts.w2": "model-00028-of-00048.safetensors", "model.layers.26.mlp.experts.w3": "model-00028-of-00048.safetensors", "model.layers.26.mlp.shared_expert.w1": "model-00028-of-00048.safetensors", "model.layers.26.mlp.shared_expert.w2": "model-00028-of-00048.safetensors", "model.layers.26.mlp.shared_expert.w3": "model-00028-of-00048.safetensors", "model.layers.26.mlp.expert_bias": "model-00028-of-00048.safetensors", "model.layers.27.mlp.router.gate.weight": "model-00029-of-00048.safetensors", "model.layers.27.mlp.experts.w1": "model-00029-of-00048.safetensors", "model.layers.27.mlp.experts.w2": "model-00029-of-00048.safetensors", "model.layers.27.mlp.experts.w3": "model-00029-of-00048.safetensors", "model.layers.27.mlp.shared_expert.w1": "model-00029-of-00048.safetensors", "model.layers.27.mlp.shared_expert.w2": "model-00029-of-00048.safetensors", "model.layers.27.mlp.shared_expert.w3": "model-00029-of-00048.safetensors", "model.layers.27.mlp.expert_bias": "model-00029-of-00048.safetensors", "model.layers.28.mlp.router.gate.weight": "model-00030-of-00048.safetensors", "model.layers.28.mlp.experts.w1": "model-00030-of-00048.safetensors", "model.layers.28.mlp.experts.w2": "model-00030-of-00048.safetensors", "model.layers.28.mlp.experts.w3": "model-00030-of-00048.safetensors", "model.layers.28.mlp.shared_expert.w1": "model-00030-of-00048.safetensors", "model.layers.28.mlp.shared_expert.w2": "model-00030-of-00048.safetensors", "model.layers.28.mlp.shared_expert.w3": "model-00030-of-00048.safetensors", "model.layers.28.mlp.expert_bias": "model-00030-of-00048.safetensors", "model.layers.29.mlp.router.gate.weight": "model-00031-of-00048.safetensors", "model.layers.29.mlp.experts.w1": "model-00031-of-00048.safetensors", "model.layers.29.mlp.experts.w2": "model-00031-of-00048.safetensors", "model.layers.29.mlp.experts.w3": "model-00031-of-00048.safetensors", "model.layers.29.mlp.shared_expert.w1": "model-00031-of-00048.safetensors", "model.layers.29.mlp.shared_expert.w2": "model-00031-of-00048.safetensors", "model.layers.29.mlp.shared_expert.w3": "model-00031-of-00048.safetensors", "model.layers.29.mlp.expert_bias": "model-00031-of-00048.safetensors", "model.layers.35.input_layernorm.weight": "model-00037-of-00048.safetensors", "model.layers.35.post_attention_layernorm.weight": "model-00037-of-00048.safetensors", "model.layers.35.self_attn.k_proj.bias": "model-00037-of-00048.safetensors", "model.layers.35.self_attn.k_proj.weight": "model-00037-of-00048.safetensors", "model.layers.35.self_attn.o_proj.weight": "model-00037-of-00048.safetensors", "model.layers.35.self_attn.q_proj.bias": "model-00037-of-00048.safetensors", "model.layers.35.self_attn.q_proj.weight": "model-00037-of-00048.safetensors", "model.layers.35.self_attn.v_proj.bias": "model-00037-of-00048.safetensors", "model.layers.35.self_attn.v_proj.weight": "model-00037-of-00048.safetensors", "model.layers.37.input_layernorm.weight": "model-00039-of-00048.safetensors", "model.layers.37.post_attention_layernorm.weight": "model-00039-of-00048.safetensors", "model.layers.37.self_attn.k_proj.bias": "model-00039-of-00048.safetensors", "model.layers.37.self_attn.k_proj.weight": "model-00039-of-00048.safetensors", "model.layers.37.self_attn.o_proj.weight": "model-00039-of-00048.safetensors", "model.layers.37.self_attn.q_proj.bias": "model-00039-of-00048.safetensors", "model.layers.37.self_attn.q_proj.weight": "model-00039-of-00048.safetensors", "model.layers.37.self_attn.v_proj.bias": "model-00039-of-00048.safetensors", "model.layers.37.self_attn.v_proj.weight": "model-00039-of-00048.safetensors", "model.layers.38.input_layernorm.weight": "model-00040-of-00048.safetensors", "model.layers.38.post_attention_layernorm.weight": "model-00040-of-00048.safetensors", "model.layers.38.self_attn.k_proj.bias": "model-00040-of-00048.safetensors", "model.layers.38.self_attn.k_proj.weight": "model-00040-of-00048.safetensors", "model.layers.38.self_attn.o_proj.weight": "model-00040-of-00048.safetensors", "model.layers.38.self_attn.q_proj.bias": "model-00040-of-00048.safetensors", "model.layers.38.self_attn.q_proj.weight": "model-00040-of-00048.safetensors", "model.layers.38.self_attn.v_proj.bias": "model-00040-of-00048.safetensors", "model.layers.38.self_attn.v_proj.weight": "model-00040-of-00048.safetensors", "model.layers.33.input_layernorm.weight": "model-00035-of-00048.safetensors", "model.layers.33.post_attention_layernorm.weight": "model-00035-of-00048.safetensors", "model.layers.33.self_attn.k_proj.bias": "model-00035-of-00048.safetensors", "model.layers.33.self_attn.k_proj.weight": "model-00035-of-00048.safetensors", "model.layers.33.self_attn.o_proj.weight": "model-00035-of-00048.safetensors", "model.layers.33.self_attn.q_proj.bias": "model-00035-of-00048.safetensors", "model.layers.33.self_attn.q_proj.weight": "model-00035-of-00048.safetensors", "model.layers.33.self_attn.v_proj.bias": "model-00035-of-00048.safetensors", "model.layers.33.self_attn.v_proj.weight": "model-00035-of-00048.safetensors", "model.layers.3.input_layernorm.weight": "model-00005-of-00048.safetensors", "model.layers.3.post_attention_layernorm.weight": "model-00005-of-00048.safetensors", "model.layers.3.self_attn.k_proj.bias": "model-00005-of-00048.safetensors", "model.layers.3.self_attn.k_proj.weight": "model-00005-of-00048.safetensors", "model.layers.3.self_attn.o_proj.weight": "model-00005-of-00048.safetensors", "model.layers.3.self_attn.q_proj.bias": "model-00005-of-00048.safetensors", "model.layers.3.self_attn.q_proj.weight": "model-00005-of-00048.safetensors", "model.layers.3.self_attn.v_proj.bias": "model-00005-of-00048.safetensors", "model.layers.3.self_attn.v_proj.weight": "model-00005-of-00048.safetensors", "model.layers.36.input_layernorm.weight": "model-00038-of-00048.safetensors", "model.layers.36.post_attention_layernorm.weight": "model-00038-of-00048.safetensors", "model.layers.36.self_attn.k_proj.bias": "model-00038-of-00048.safetensors", "model.layers.36.self_attn.k_proj.weight": "model-00038-of-00048.safetensors", "model.layers.36.self_attn.o_proj.weight": "model-00038-of-00048.safetensors", "model.layers.36.self_attn.q_proj.bias": "model-00038-of-00048.safetensors", "model.layers.36.self_attn.q_proj.weight": "model-00038-of-00048.safetensors", "model.layers.36.self_attn.v_proj.bias": "model-00038-of-00048.safetensors", "model.layers.36.self_attn.v_proj.weight": "model-00038-of-00048.safetensors", "model.layers.30.input_layernorm.weight": "model-00032-of-00048.safetensors", "model.layers.30.post_attention_layernorm.weight": "model-00032-of-00048.safetensors", "model.layers.30.self_attn.k_proj.bias": "model-00032-of-00048.safetensors", "model.layers.30.self_attn.k_proj.weight": "model-00032-of-00048.safetensors", "model.layers.30.self_attn.o_proj.weight": "model-00032-of-00048.safetensors", "model.layers.30.self_attn.q_proj.bias": "model-00032-of-00048.safetensors", "model.layers.30.self_attn.q_proj.weight": "model-00032-of-00048.safetensors", "model.layers.30.self_attn.v_proj.bias": "model-00032-of-00048.safetensors", "model.layers.30.self_attn.v_proj.weight": "model-00032-of-00048.safetensors", "model.layers.31.input_layernorm.weight": "model-00033-of-00048.safetensors", "model.layers.31.post_attention_layernorm.weight": "model-00033-of-00048.safetensors", "model.layers.31.self_attn.k_proj.bias": "model-00033-of-00048.safetensors", "model.layers.31.self_attn.k_proj.weight": "model-00033-of-00048.safetensors", "model.layers.31.self_attn.o_proj.weight": "model-00033-of-00048.safetensors", "model.layers.31.self_attn.q_proj.bias": "model-00033-of-00048.safetensors", "model.layers.31.self_attn.q_proj.weight": "model-00033-of-00048.safetensors", "model.layers.31.self_attn.v_proj.bias": "model-00033-of-00048.safetensors", "model.layers.31.self_attn.v_proj.weight": "model-00033-of-00048.safetensors", "model.layers.32.input_layernorm.weight": "model-00034-of-00048.safetensors", "model.layers.32.post_attention_layernorm.weight": "model-00034-of-00048.safetensors", "model.layers.32.self_attn.k_proj.bias": "model-00034-of-00048.safetensors", "model.layers.32.self_attn.k_proj.weight": "model-00034-of-00048.safetensors", "model.layers.32.self_attn.o_proj.weight": "model-00034-of-00048.safetensors", "model.layers.32.self_attn.q_proj.bias": "model-00034-of-00048.safetensors", "model.layers.32.self_attn.q_proj.weight": "model-00034-of-00048.safetensors", "model.layers.32.self_attn.v_proj.bias": "model-00034-of-00048.safetensors", "model.layers.32.self_attn.v_proj.weight": "model-00034-of-00048.safetensors", "model.layers.34.input_layernorm.weight": "model-00036-of-00048.safetensors", "model.layers.34.post_attention_layernorm.weight": "model-00036-of-00048.safetensors", "model.layers.34.self_attn.k_proj.bias": "model-00036-of-00048.safetensors", "model.layers.34.self_attn.k_proj.weight": "model-00036-of-00048.safetensors", "model.layers.34.self_attn.o_proj.weight": "model-00036-of-00048.safetensors", "model.layers.34.self_attn.q_proj.bias": "model-00036-of-00048.safetensors", "model.layers.34.self_attn.q_proj.weight": "model-00036-of-00048.safetensors", "model.layers.34.self_attn.v_proj.bias": "model-00036-of-00048.safetensors", "model.layers.34.self_attn.v_proj.weight": "model-00036-of-00048.safetensors", "model.layers.39.input_layernorm.weight": "model-00041-of-00048.safetensors", "model.layers.39.post_attention_layernorm.weight": "model-00041-of-00048.safetensors", "model.layers.39.self_attn.k_proj.bias": "model-00041-of-00048.safetensors", "model.layers.39.self_attn.k_proj.weight": "model-00041-of-00048.safetensors", "model.layers.39.self_attn.o_proj.weight": "model-00041-of-00048.safetensors", "model.layers.39.self_attn.q_proj.bias": "model-00041-of-00048.safetensors", "model.layers.39.self_attn.q_proj.weight": "model-00041-of-00048.safetensors", "model.layers.39.self_attn.v_proj.bias": "model-00041-of-00048.safetensors", "model.layers.39.self_attn.v_proj.weight": "model-00041-of-00048.safetensors", "model.layers.3.mlp.router.gate.weight": "model-00005-of-00048.safetensors", "model.layers.3.mlp.experts.w1": "model-00005-of-00048.safetensors", "model.layers.3.mlp.experts.w2": "model-00005-of-00048.safetensors", "model.layers.3.mlp.experts.w3": "model-00005-of-00048.safetensors", "model.layers.3.mlp.shared_expert.w1": "model-00005-of-00048.safetensors", "model.layers.3.mlp.shared_expert.w2": "model-00005-of-00048.safetensors", "model.layers.3.mlp.shared_expert.w3": "model-00005-of-00048.safetensors", "model.layers.3.mlp.expert_bias": "model-00005-of-00048.safetensors", "model.layers.30.mlp.router.gate.weight": "model-00032-of-00048.safetensors", "model.layers.30.mlp.experts.w1": "model-00032-of-00048.safetensors", "model.layers.30.mlp.experts.w2": "model-00032-of-00048.safetensors", "model.layers.30.mlp.experts.w3": "model-00032-of-00048.safetensors", "model.layers.30.mlp.shared_expert.w1": "model-00032-of-00048.safetensors", "model.layers.30.mlp.shared_expert.w2": "model-00032-of-00048.safetensors", "model.layers.30.mlp.shared_expert.w3": "model-00032-of-00048.safetensors", "model.layers.30.mlp.expert_bias": "model-00032-of-00048.safetensors", "model.layers.31.mlp.router.gate.weight": "model-00033-of-00048.safetensors", "model.layers.31.mlp.experts.w1": "model-00033-of-00048.safetensors", "model.layers.31.mlp.experts.w2": "model-00033-of-00048.safetensors", "model.layers.31.mlp.experts.w3": "model-00033-of-00048.safetensors", "model.layers.31.mlp.shared_expert.w1": "model-00033-of-00048.safetensors", "model.layers.31.mlp.shared_expert.w2": "model-00033-of-00048.safetensors", "model.layers.31.mlp.shared_expert.w3": "model-00033-of-00048.safetensors", "model.layers.31.mlp.expert_bias": "model-00033-of-00048.safetensors", "model.layers.32.mlp.router.gate.weight": "model-00034-of-00048.safetensors", "model.layers.32.mlp.experts.w1": "model-00034-of-00048.safetensors", "model.layers.32.mlp.experts.w2": "model-00034-of-00048.safetensors", "model.layers.32.mlp.experts.w3": "model-00034-of-00048.safetensors", "model.layers.32.mlp.shared_expert.w1": "model-00034-of-00048.safetensors", "model.layers.32.mlp.shared_expert.w2": "model-00034-of-00048.safetensors", "model.layers.32.mlp.shared_expert.w3": "model-00034-of-00048.safetensors", "model.layers.32.mlp.expert_bias": "model-00034-of-00048.safetensors", "model.layers.33.mlp.router.gate.weight": "model-00035-of-00048.safetensors", "model.layers.33.mlp.experts.w1": "model-00035-of-00048.safetensors", "model.layers.33.mlp.experts.w2": "model-00035-of-00048.safetensors", "model.layers.33.mlp.experts.w3": "model-00035-of-00048.safetensors", "model.layers.33.mlp.shared_expert.w1": "model-00035-of-00048.safetensors", "model.layers.33.mlp.shared_expert.w2": "model-00035-of-00048.safetensors", "model.layers.33.mlp.shared_expert.w3": "model-00035-of-00048.safetensors", "model.layers.33.mlp.expert_bias": "model-00035-of-00048.safetensors", "model.layers.34.mlp.router.gate.weight": "model-00036-of-00048.safetensors", "model.layers.34.mlp.experts.w1": "model-00036-of-00048.safetensors", "model.layers.34.mlp.experts.w2": "model-00036-of-00048.safetensors", "model.layers.34.mlp.experts.w3": "model-00036-of-00048.safetensors", "model.layers.34.mlp.shared_expert.w1": "model-00036-of-00048.safetensors", "model.layers.34.mlp.shared_expert.w2": "model-00036-of-00048.safetensors", "model.layers.34.mlp.shared_expert.w3": "model-00036-of-00048.safetensors", "model.layers.34.mlp.expert_bias": "model-00036-of-00048.safetensors", "model.layers.35.mlp.router.gate.weight": "model-00037-of-00048.safetensors", "model.layers.35.mlp.experts.w1": "model-00037-of-00048.safetensors", "model.layers.35.mlp.experts.w2": "model-00037-of-00048.safetensors", "model.layers.35.mlp.experts.w3": "model-00037-of-00048.safetensors", "model.layers.35.mlp.shared_expert.w1": "model-00037-of-00048.safetensors", "model.layers.35.mlp.shared_expert.w2": "model-00037-of-00048.safetensors", "model.layers.35.mlp.shared_expert.w3": "model-00037-of-00048.safetensors", "model.layers.35.mlp.expert_bias": "model-00037-of-00048.safetensors", "model.layers.36.mlp.router.gate.weight": "model-00038-of-00048.safetensors", "model.layers.36.mlp.experts.w1": "model-00038-of-00048.safetensors", "model.layers.36.mlp.experts.w2": "model-00038-of-00048.safetensors", "model.layers.36.mlp.experts.w3": "model-00038-of-00048.safetensors", "model.layers.36.mlp.shared_expert.w1": "model-00038-of-00048.safetensors", "model.layers.36.mlp.shared_expert.w2": "model-00038-of-00048.safetensors", "model.layers.36.mlp.shared_expert.w3": "model-00038-of-00048.safetensors", "model.layers.36.mlp.expert_bias": "model-00038-of-00048.safetensors", "model.layers.37.mlp.router.gate.weight": "model-00039-of-00048.safetensors", "model.layers.37.mlp.experts.w1": "model-00039-of-00048.safetensors", "model.layers.37.mlp.experts.w2": "model-00039-of-00048.safetensors", "model.layers.37.mlp.experts.w3": "model-00039-of-00048.safetensors", "model.layers.37.mlp.shared_expert.w1": "model-00039-of-00048.safetensors", "model.layers.37.mlp.shared_expert.w2": "model-00039-of-00048.safetensors", "model.layers.37.mlp.shared_expert.w3": "model-00039-of-00048.safetensors", "model.layers.37.mlp.expert_bias": "model-00039-of-00048.safetensors", "model.layers.38.mlp.router.gate.weight": "model-00040-of-00048.safetensors", "model.layers.38.mlp.experts.w1": "model-00040-of-00048.safetensors", "model.layers.38.mlp.experts.w2": "model-00040-of-00048.safetensors", "model.layers.38.mlp.experts.w3": "model-00040-of-00048.safetensors", "model.layers.38.mlp.shared_expert.w1": "model-00040-of-00048.safetensors", "model.layers.38.mlp.shared_expert.w2": "model-00040-of-00048.safetensors", "model.layers.38.mlp.shared_expert.w3": "model-00040-of-00048.safetensors", "model.layers.38.mlp.expert_bias": "model-00040-of-00048.safetensors", "model.layers.39.mlp.router.gate.weight": "model-00041-of-00048.safetensors", "model.layers.39.mlp.experts.w1": "model-00041-of-00048.safetensors", "model.layers.39.mlp.experts.w2": "model-00041-of-00048.safetensors", "model.layers.39.mlp.experts.w3": "model-00041-of-00048.safetensors", "model.layers.39.mlp.shared_expert.w1": "model-00041-of-00048.safetensors", "model.layers.39.mlp.shared_expert.w2": "model-00041-of-00048.safetensors", "model.layers.39.mlp.shared_expert.w3": "model-00041-of-00048.safetensors", "model.layers.39.mlp.expert_bias": "model-00041-of-00048.safetensors", "model.layers.40.input_layernorm.weight": "model-00042-of-00048.safetensors", "model.layers.40.post_attention_layernorm.weight": "model-00042-of-00048.safetensors", "model.layers.40.self_attn.k_proj.bias": "model-00042-of-00048.safetensors", "model.layers.40.self_attn.k_proj.weight": "model-00042-of-00048.safetensors", "model.layers.40.self_attn.o_proj.weight": "model-00042-of-00048.safetensors", "model.layers.40.self_attn.q_proj.bias": "model-00042-of-00048.safetensors", "model.layers.40.self_attn.q_proj.weight": "model-00042-of-00048.safetensors", "model.layers.40.self_attn.v_proj.bias": "model-00042-of-00048.safetensors", "model.layers.40.self_attn.v_proj.weight": "model-00042-of-00048.safetensors", "model.layers.43.input_layernorm.weight": "model-00045-of-00048.safetensors", "model.layers.43.post_attention_layernorm.weight": "model-00045-of-00048.safetensors", "model.layers.43.self_attn.k_proj.bias": "model-00045-of-00048.safetensors", "model.layers.43.self_attn.k_proj.weight": "model-00045-of-00048.safetensors", "model.layers.43.self_attn.o_proj.weight": "model-00045-of-00048.safetensors", "model.layers.43.self_attn.q_proj.bias": "model-00045-of-00048.safetensors", "model.layers.43.self_attn.q_proj.weight": "model-00045-of-00048.safetensors", "model.layers.43.self_attn.v_proj.bias": "model-00045-of-00048.safetensors", "model.layers.43.self_attn.v_proj.weight": "model-00045-of-00048.safetensors", "model.layers.46.eh_proj.weight": "model-00048-of-00048.safetensors", "model.layers.46.enorm.weight": "model-00048-of-00048.safetensors", "model.layers.46.hnorm.weight": "model-00048-of-00048.safetensors", "model.layers.46.input_layernorm.weight": "model-00048-of-00048.safetensors", "model.layers.46.post_attention_layernorm.weight": "model-00048-of-00048.safetensors", "model.layers.46.self_attn.k_proj.bias": "model-00048-of-00048.safetensors", "model.layers.46.self_attn.k_proj.weight": "model-00048-of-00048.safetensors", "model.layers.46.self_attn.o_proj.weight": "model-00048-of-00048.safetensors", "model.layers.46.self_attn.q_proj.bias": "model-00048-of-00048.safetensors", "model.layers.46.self_attn.q_proj.weight": "model-00048-of-00048.safetensors", "model.layers.46.self_attn.v_proj.bias": "model-00048-of-00048.safetensors", "model.layers.46.self_attn.v_proj.weight": "model-00048-of-00048.safetensors", "model.layers.46.shared_head.norm.weight": "model-00048-of-00048.safetensors", "model.layers.45.input_layernorm.weight": "model-00047-of-00048.safetensors", "model.layers.45.post_attention_layernorm.weight": "model-00047-of-00048.safetensors", "model.layers.45.self_attn.k_proj.bias": "model-00047-of-00048.safetensors", "model.layers.45.self_attn.k_proj.weight": "model-00047-of-00048.safetensors", "model.layers.45.self_attn.o_proj.weight": "model-00047-of-00048.safetensors", "model.layers.45.self_attn.q_proj.bias": "model-00047-of-00048.safetensors", "model.layers.45.self_attn.q_proj.weight": "model-00047-of-00048.safetensors", "model.layers.45.self_attn.v_proj.bias": "model-00047-of-00048.safetensors", "model.layers.45.self_attn.v_proj.weight": "model-00047-of-00048.safetensors", "model.layers.46.shared_head.head.weight": "model-00048-of-00048.safetensors", "model.layers.46.embed_tokens.weight": "model-00048-of-00048.safetensors", "model.layers.42.input_layernorm.weight": "model-00044-of-00048.safetensors", "model.layers.42.post_attention_layernorm.weight": "model-00044-of-00048.safetensors", "model.layers.42.self_attn.k_proj.bias": "model-00044-of-00048.safetensors", "model.layers.42.self_attn.k_proj.weight": "model-00044-of-00048.safetensors", "model.layers.42.self_attn.o_proj.weight": "model-00044-of-00048.safetensors", "model.layers.42.self_attn.q_proj.bias": "model-00044-of-00048.safetensors", "model.layers.42.self_attn.q_proj.weight": "model-00044-of-00048.safetensors", "model.layers.42.self_attn.v_proj.bias": "model-00044-of-00048.safetensors", "model.layers.42.self_attn.v_proj.weight": "model-00044-of-00048.safetensors", "model.layers.44.input_layernorm.weight": "model-00046-of-00048.safetensors", "model.layers.44.post_attention_layernorm.weight": "model-00046-of-00048.safetensors", "model.layers.44.self_attn.k_proj.bias": "model-00046-of-00048.safetensors", "model.layers.44.self_attn.k_proj.weight": "model-00046-of-00048.safetensors", "model.layers.44.self_attn.o_proj.weight": "model-00046-of-00048.safetensors", "model.layers.44.self_attn.q_proj.bias": "model-00046-of-00048.safetensors", "model.layers.44.self_attn.q_proj.weight": "model-00046-of-00048.safetensors", "model.layers.44.self_attn.v_proj.bias": "model-00046-of-00048.safetensors", "model.layers.44.self_attn.v_proj.weight": "model-00046-of-00048.safetensors", "model.layers.4.input_layernorm.weight": "model-00006-of-00048.safetensors", "model.layers.4.post_attention_layernorm.weight": "model-00006-of-00048.safetensors", "model.layers.4.self_attn.k_proj.bias": "model-00006-of-00048.safetensors", "model.layers.4.self_attn.k_proj.weight": "model-00006-of-00048.safetensors", "model.layers.4.self_attn.o_proj.weight": "model-00006-of-00048.safetensors", "model.layers.4.self_attn.q_proj.bias": "model-00006-of-00048.safetensors", "model.layers.4.self_attn.q_proj.weight": "model-00006-of-00048.safetensors", "model.layers.4.self_attn.v_proj.bias": "model-00006-of-00048.safetensors", "model.layers.4.self_attn.v_proj.weight": "model-00006-of-00048.safetensors", "model.layers.41.input_layernorm.weight": "model-00043-of-00048.safetensors", "model.layers.41.post_attention_layernorm.weight": "model-00043-of-00048.safetensors", "model.layers.41.self_attn.k_proj.bias": "model-00043-of-00048.safetensors", "model.layers.41.self_attn.k_proj.weight": "model-00043-of-00048.safetensors", "model.layers.41.self_attn.o_proj.weight": "model-00043-of-00048.safetensors", "model.layers.41.self_attn.q_proj.bias": "model-00043-of-00048.safetensors", "model.layers.41.self_attn.q_proj.weight": "model-00043-of-00048.safetensors", "model.layers.41.self_attn.v_proj.bias": "model-00043-of-00048.safetensors", "model.layers.41.self_attn.v_proj.weight": "model-00043-of-00048.safetensors", "model.layers.4.mlp.router.gate.weight": "model-00006-of-00048.safetensors", "model.layers.4.mlp.experts.w1": "model-00006-of-00048.safetensors", "model.layers.4.mlp.experts.w2": "model-00006-of-00048.safetensors", "model.layers.4.mlp.experts.w3": "model-00006-of-00048.safetensors", "model.layers.4.mlp.shared_expert.w1": "model-00006-of-00048.safetensors", "model.layers.4.mlp.shared_expert.w2": "model-00006-of-00048.safetensors", "model.layers.4.mlp.shared_expert.w3": "model-00006-of-00048.safetensors", "model.layers.4.mlp.expert_bias": "model-00006-of-00048.safetensors", "model.layers.40.mlp.router.gate.weight": "model-00042-of-00048.safetensors", "model.layers.40.mlp.experts.w1": "model-00042-of-00048.safetensors", "model.layers.40.mlp.experts.w2": "model-00042-of-00048.safetensors", "model.layers.40.mlp.experts.w3": "model-00042-of-00048.safetensors", "model.layers.40.mlp.shared_expert.w1": "model-00042-of-00048.safetensors", "model.layers.40.mlp.shared_expert.w2": "model-00042-of-00048.safetensors", "model.layers.40.mlp.shared_expert.w3": "model-00042-of-00048.safetensors", "model.layers.40.mlp.expert_bias": "model-00042-of-00048.safetensors", "model.layers.41.mlp.router.gate.weight": "model-00043-of-00048.safetensors", "model.layers.41.mlp.experts.w1": "model-00043-of-00048.safetensors", "model.layers.41.mlp.experts.w2": "model-00043-of-00048.safetensors", "model.layers.41.mlp.experts.w3": "model-00043-of-00048.safetensors", "model.layers.41.mlp.shared_expert.w1": "model-00043-of-00048.safetensors", "model.layers.41.mlp.shared_expert.w2": "model-00043-of-00048.safetensors", "model.layers.41.mlp.shared_expert.w3": "model-00043-of-00048.safetensors", "model.layers.41.mlp.expert_bias": "model-00043-of-00048.safetensors", "model.layers.42.mlp.router.gate.weight": "model-00044-of-00048.safetensors", "model.layers.42.mlp.experts.w1": "model-00044-of-00048.safetensors", "model.layers.42.mlp.experts.w2": "model-00044-of-00048.safetensors", "model.layers.42.mlp.experts.w3": "model-00044-of-00048.safetensors", "model.layers.42.mlp.shared_expert.w1": "model-00044-of-00048.safetensors", "model.layers.42.mlp.shared_expert.w2": "model-00044-of-00048.safetensors", "model.layers.42.mlp.shared_expert.w3": "model-00044-of-00048.safetensors", "model.layers.42.mlp.expert_bias": "model-00044-of-00048.safetensors", "model.layers.43.mlp.router.gate.weight": "model-00045-of-00048.safetensors", "model.layers.43.mlp.experts.w1": "model-00045-of-00048.safetensors", "model.layers.43.mlp.experts.w2": "model-00045-of-00048.safetensors", "model.layers.43.mlp.experts.w3": "model-00045-of-00048.safetensors", "model.layers.43.mlp.shared_expert.w1": "model-00045-of-00048.safetensors", "model.layers.43.mlp.shared_expert.w2": "model-00045-of-00048.safetensors", "model.layers.43.mlp.shared_expert.w3": "model-00045-of-00048.safetensors", "model.layers.43.mlp.expert_bias": "model-00045-of-00048.safetensors", "model.layers.44.mlp.router.gate.weight": "model-00046-of-00048.safetensors", "model.layers.44.mlp.experts.w1": "model-00046-of-00048.safetensors", "model.layers.44.mlp.experts.w2": "model-00046-of-00048.safetensors", "model.layers.44.mlp.experts.w3": "model-00046-of-00048.safetensors", "model.layers.44.mlp.shared_expert.w1": "model-00046-of-00048.safetensors", "model.layers.44.mlp.shared_expert.w2": "model-00046-of-00048.safetensors", "model.layers.44.mlp.shared_expert.w3": "model-00046-of-00048.safetensors", "model.layers.44.mlp.expert_bias": "model-00046-of-00048.safetensors", "model.layers.45.mlp.router.gate.weight": "model-00047-of-00048.safetensors", "model.layers.45.mlp.experts.w1": "model-00047-of-00048.safetensors", "model.layers.45.mlp.experts.w2": "model-00047-of-00048.safetensors", "model.layers.45.mlp.experts.w3": "model-00047-of-00048.safetensors", "model.layers.45.mlp.shared_expert.w1": "model-00047-of-00048.safetensors", "model.layers.45.mlp.shared_expert.w2": "model-00047-of-00048.safetensors", "model.layers.45.mlp.shared_expert.w3": "model-00047-of-00048.safetensors", "model.layers.45.mlp.expert_bias": "model-00047-of-00048.safetensors", "model.layers.46.mlp.router.gate.weight": "model-00048-of-00048.safetensors", "model.layers.46.mlp.experts.w1": "model-00048-of-00048.safetensors", "model.layers.46.mlp.experts.w2": "model-00048-of-00048.safetensors", "model.layers.46.mlp.experts.w3": "model-00048-of-00048.safetensors", "model.layers.46.mlp.shared_expert.w1": "model-00048-of-00048.safetensors", "model.layers.46.mlp.shared_expert.w2": "model-00048-of-00048.safetensors", "model.layers.46.mlp.shared_expert.w3": "model-00048-of-00048.safetensors", "model.layers.46.mlp.expert_bias": "model-00048-of-00048.safetensors", "model.layers.5.input_layernorm.weight": "model-00007-of-00048.safetensors", "model.layers.5.post_attention_layernorm.weight": "model-00007-of-00048.safetensors", "model.layers.5.self_attn.k_proj.bias": "model-00007-of-00048.safetensors", "model.layers.5.self_attn.k_proj.weight": "model-00007-of-00048.safetensors", "model.layers.5.self_attn.o_proj.weight": "model-00007-of-00048.safetensors", "model.layers.5.self_attn.q_proj.bias": "model-00007-of-00048.safetensors", "model.layers.5.self_attn.q_proj.weight": "model-00007-of-00048.safetensors", "model.layers.5.self_attn.v_proj.bias": "model-00007-of-00048.safetensors", "model.layers.5.self_attn.v_proj.weight": "model-00007-of-00048.safetensors", "model.layers.5.mlp.router.gate.weight": "model-00007-of-00048.safetensors", "model.layers.5.mlp.experts.w1": "model-00007-of-00048.safetensors", "model.layers.5.mlp.experts.w2": "model-00007-of-00048.safetensors", "model.layers.5.mlp.experts.w3": "model-00007-of-00048.safetensors", "model.layers.5.mlp.shared_expert.w1": "model-00007-of-00048.safetensors", "model.layers.5.mlp.shared_expert.w2": "model-00007-of-00048.safetensors", "model.layers.5.mlp.shared_expert.w3": "model-00007-of-00048.safetensors", "model.layers.5.mlp.expert_bias": "model-00007-of-00048.safetensors", "model.layers.6.input_layernorm.weight": "model-00008-of-00048.safetensors", "model.layers.6.post_attention_layernorm.weight": "model-00008-of-00048.safetensors", "model.layers.6.self_attn.k_proj.bias": "model-00008-of-00048.safetensors", "model.layers.6.self_attn.k_proj.weight": "model-00008-of-00048.safetensors", "model.layers.6.self_attn.o_proj.weight": "model-00008-of-00048.safetensors", "model.layers.6.self_attn.q_proj.bias": "model-00008-of-00048.safetensors", "model.layers.6.self_attn.q_proj.weight": "model-00008-of-00048.safetensors", "model.layers.6.self_attn.v_proj.bias": "model-00008-of-00048.safetensors", "model.layers.6.self_attn.v_proj.weight": "model-00008-of-00048.safetensors", "model.layers.6.mlp.router.gate.weight": "model-00008-of-00048.safetensors", "model.layers.6.mlp.experts.w1": "model-00008-of-00048.safetensors", "model.layers.6.mlp.experts.w2": "model-00008-of-00048.safetensors", "model.layers.6.mlp.experts.w3": "model-00008-of-00048.safetensors", "model.layers.6.mlp.shared_expert.w1": "model-00008-of-00048.safetensors", "model.layers.6.mlp.shared_expert.w2": "model-00008-of-00048.safetensors", "model.layers.6.mlp.shared_expert.w3": "model-00008-of-00048.safetensors", "model.layers.6.mlp.expert_bias": "model-00008-of-00048.safetensors", "model.layers.7.input_layernorm.weight": "model-00009-of-00048.safetensors", "model.layers.7.post_attention_layernorm.weight": "model-00009-of-00048.safetensors", "model.layers.7.self_attn.k_proj.bias": "model-00009-of-00048.safetensors", "model.layers.7.self_attn.k_proj.weight": "model-00009-of-00048.safetensors", "model.layers.7.self_attn.o_proj.weight": "model-00009-of-00048.safetensors", "model.layers.7.self_attn.q_proj.bias": "model-00009-of-00048.safetensors", "model.layers.7.self_attn.q_proj.weight": "model-00009-of-00048.safetensors", "model.layers.7.self_attn.v_proj.bias": "model-00009-of-00048.safetensors", "model.layers.7.self_attn.v_proj.weight": "model-00009-of-00048.safetensors", "model.layers.7.mlp.router.gate.weight": "model-00009-of-00048.safetensors", "model.layers.7.mlp.experts.w1": "model-00009-of-00048.safetensors", "model.layers.7.mlp.experts.w2": "model-00009-of-00048.safetensors", "model.layers.7.mlp.experts.w3": "model-00009-of-00048.safetensors", "model.layers.7.mlp.shared_expert.w1": "model-00009-of-00048.safetensors", "model.layers.7.mlp.shared_expert.w2": "model-00009-of-00048.safetensors", "model.layers.7.mlp.shared_expert.w3": "model-00009-of-00048.safetensors", "model.layers.7.mlp.expert_bias": "model-00009-of-00048.safetensors", "model.layers.8.input_layernorm.weight": "model-00010-of-00048.safetensors", "model.layers.8.post_attention_layernorm.weight": "model-00010-of-00048.safetensors", "model.layers.8.self_attn.k_proj.bias": "model-00010-of-00048.safetensors", "model.layers.8.self_attn.k_proj.weight": "model-00010-of-00048.safetensors", "model.layers.8.self_attn.o_proj.weight": "model-00010-of-00048.safetensors", "model.layers.8.self_attn.q_proj.bias": "model-00010-of-00048.safetensors", "model.layers.8.self_attn.q_proj.weight": "model-00010-of-00048.safetensors", "model.layers.8.self_attn.v_proj.bias": "model-00010-of-00048.safetensors", "model.layers.8.self_attn.v_proj.weight": "model-00010-of-00048.safetensors", "model.layers.8.mlp.router.gate.weight": "model-00010-of-00048.safetensors", "model.layers.8.mlp.experts.w1": "model-00010-of-00048.safetensors", "model.layers.8.mlp.experts.w2": "model-00010-of-00048.safetensors", "model.layers.8.mlp.experts.w3": "model-00010-of-00048.safetensors", "model.layers.8.mlp.shared_expert.w1": "model-00010-of-00048.safetensors", "model.layers.8.mlp.shared_expert.w2": "model-00010-of-00048.safetensors", "model.layers.8.mlp.shared_expert.w3": "model-00010-of-00048.safetensors", "model.layers.8.mlp.expert_bias": "model-00010-of-00048.safetensors", "model.layers.9.input_layernorm.weight": "model-00011-of-00048.safetensors", "model.layers.9.post_attention_layernorm.weight": "model-00011-of-00048.safetensors", "model.layers.9.self_attn.k_proj.bias": "model-00011-of-00048.safetensors", "model.layers.9.self_attn.k_proj.weight": "model-00011-of-00048.safetensors", "model.layers.9.self_attn.o_proj.weight": "model-00011-of-00048.safetensors", "model.layers.9.self_attn.q_proj.bias": "model-00011-of-00048.safetensors", "model.layers.9.self_attn.q_proj.weight": "model-00011-of-00048.safetensors", "model.layers.9.self_attn.v_proj.bias": "model-00011-of-00048.safetensors", "model.layers.9.self_attn.v_proj.weight": "model-00011-of-00048.safetensors", "model.layers.9.mlp.router.gate.weight": "model-00011-of-00048.safetensors", "model.layers.9.mlp.experts.w1": "model-00011-of-00048.safetensors", "model.layers.9.mlp.experts.w2": "model-00011-of-00048.safetensors", "model.layers.9.mlp.experts.w3": "model-00011-of-00048.safetensors", "model.layers.9.mlp.shared_expert.w1": "model-00011-of-00048.safetensors", "model.layers.9.mlp.shared_expert.w2": "model-00011-of-00048.safetensors", "model.layers.9.mlp.shared_expert.w3": "model-00011-of-00048.safetensors", "model.layers.9.mlp.expert_bias": "model-00011-of-00048.safetensors"}}