Add files using upload-large-folder tool
Browse files- configuration_longcat_flash.py +216 -0
- model-00002-of-00113.safetensors +3 -0
- model-00003-of-00113.safetensors +3 -0
- model-00006-of-00113.safetensors +3 -0
- model-00007-of-00113.safetensors +3 -0
- model-00008-of-00113.safetensors +3 -0
- model-00009-of-00113.safetensors +3 -0
- model-00011-of-00113.safetensors +3 -0
- model-00012-of-00113.safetensors +3 -0
- model-00014-of-00113.safetensors +3 -0
- model-00023-of-00113.safetensors +3 -0
- model-00026-of-00113.safetensors +3 -0
- model-00029-of-00113.safetensors +3 -0
- model-00031-of-00113.safetensors +3 -0
- model-00032-of-00113.safetensors +3 -0
- model-00034-of-00113.safetensors +3 -0
- model-00038-of-00113.safetensors +3 -0
- model-00042-of-00113.safetensors +3 -0
- model-00044-of-00113.safetensors +3 -0
- model-00046-of-00113.safetensors +3 -0
- model-00047-of-00113.safetensors +3 -0
- model-00048-of-00113.safetensors +3 -0
- model-00049-of-00113.safetensors +3 -0
- model-00050-of-00113.safetensors +3 -0
- model-00055-of-00113.safetensors +3 -0
- model-00056-of-00113.safetensors +3 -0
- model-00059-of-00113.safetensors +3 -0
- model-00060-of-00113.safetensors +3 -0
- model-00062-of-00113.safetensors +3 -0
- model-00064-of-00113.safetensors +3 -0
- model-00067-of-00113.safetensors +3 -0
- model-00068-of-00113.safetensors +3 -0
- model-00070-of-00113.safetensors +3 -0
- model-00072-of-00113.safetensors +3 -0
- model-00075-of-00113.safetensors +3 -0
- model-00076-of-00113.safetensors +3 -0
- model-00079-of-00113.safetensors +3 -0
- model-00080-of-00113.safetensors +3 -0
- model-00083-of-00113.safetensors +3 -0
- model-00086-of-00113.safetensors +3 -0
- model-00088-of-00113.safetensors +3 -0
- model-00089-of-00113.safetensors +3 -0
- model-00091-of-00113.safetensors +3 -0
- model-00094-of-00113.safetensors +3 -0
- model-00098-of-00113.safetensors +3 -0
- model-00102-of-00113.safetensors +3 -0
- model-00107-of-00113.safetensors +3 -0
- model-00108-of-00113.safetensors +3 -0
- model-00110-of-00113.safetensors +3 -0
- model.safetensors.index.json +823 -0
configuration_longcat_flash.py
ADDED
@@ -0,0 +1,216 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
"""LongcatFlash model configuration"""
|
3 |
+
|
4 |
+
from transformers.configuration_utils import PretrainedConfig
|
5 |
+
from transformers.modeling_rope_utils import rope_config_validation
|
6 |
+
|
7 |
+
|
8 |
+
LONGCAT_PRETRAINED_CONFIG_ARCHIVE_MAP = {}
|
9 |
+
|
10 |
+
|
11 |
+
class LongcatFlashConfig(PretrainedConfig):
|
12 |
+
r"""
|
13 |
+
This is the configuration class to store the configuration of a [`LongcatFlashModel`]. It is used to instantiate an LongcatFlash
|
14 |
+
model according to the specified arguments, defining the model architecture. Instantiating a configuration with the
|
15 |
+
defaults will yield a similar configuration to that of the LongcatFlash.
|
16 |
+
Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the
|
17 |
+
documentation from [`PretrainedConfig`] for more information.
|
18 |
+
|
19 |
+
|
20 |
+
Args:
|
21 |
+
vocab_size (`int`, *optional*, defaults to 131072):
|
22 |
+
Vocabulary size of the Deep model. Defines the number of different tokens that can be represented by the
|
23 |
+
`inputs_ids` passed when calling [`LongcatFlashModel`]
|
24 |
+
hidden_size (`int`, *optional*, defaults to 7168):
|
25 |
+
Dimension of the hidden representations.
|
26 |
+
ffn_hidden_size (`int`, *optional*, defaults to 18432):
|
27 |
+
Dimension of the MLP representations.
|
28 |
+
expert_ffn_hidden_size (`int`, *optional*, defaults to 2048):
|
29 |
+
Dimension of the MoE representations.
|
30 |
+
num_layers (`int`, *optional*, defaults to 61):
|
31 |
+
Number of hidden layers in the Transformer decoder.
|
32 |
+
num_attention_heads (`int`, *optional*, defaults to 128):
|
33 |
+
Number of attention heads for each attention layer in the Transformer decoder.
|
34 |
+
num_key_value_heads (`int`, *optional*, defaults to 128):
|
35 |
+
This is the number of key_value heads that should be used to implement Grouped Query Attention. If
|
36 |
+
`num_key_value_heads=num_attention_heads`, the model will use Multi Head Attention (MHA), if
|
37 |
+
`num_key_value_heads=1 the model will use Multi Query Attention (MQA) otherwise GQA is used. When
|
38 |
+
converting a multi-head checkpoint to a GQA checkpoint, each group key and value head should be constructed
|
39 |
+
by meanpooling all the original heads within that group. For more details checkout [this
|
40 |
+
paper](https://arxiv.org/pdf/2305.13245.pdf). If it is not specified, will default to
|
41 |
+
`num_attention_heads`.
|
42 |
+
n_routed_experts (`int`, *optional*, defaults to 256):
|
43 |
+
Number of routed experts.
|
44 |
+
routed_scaling_factor (`float`, *optional*, defaults to 2.5):
|
45 |
+
Scaling factor or routed experts.
|
46 |
+
kv_lora_rank (`int`, *optional*, defaults to 512):
|
47 |
+
Rank of the LoRA matrices for key and value projections.
|
48 |
+
q_lora_rank (`int`, *optional*, defaults to 1536):
|
49 |
+
Rank of the LoRA matrices for query projections.
|
50 |
+
qk_rope_head_dim (`int`, *optional*, defaults to 64):
|
51 |
+
Dimension of the query/key heads that use rotary position embeddings.
|
52 |
+
v_head_dim (`int`, *optional*, defaults to 128):
|
53 |
+
Dimension of the value heads.
|
54 |
+
qk_nope_head_dim (`int`, *optional*, defaults to 128):
|
55 |
+
Dimension of the query/key heads that don't use rotary position embeddings.
|
56 |
+
norm_topk_prob (`bool`, *optional*, defaults to `True`):
|
57 |
+
Whether to normalize the weights of the routed experts.
|
58 |
+
hidden_act (`str` or `function`, *optional*, defaults to `"silu"`):
|
59 |
+
The non-linear activation function (function or string) in the decoder.
|
60 |
+
max_position_embeddings (`int`, *optional*, defaults to 4096):
|
61 |
+
The maximum sequence length that this model might ever be used with.
|
62 |
+
rms_norm_eps (`float`, *optional*, defaults to 1e-06):
|
63 |
+
The epsilon used by the rms normalization layers.
|
64 |
+
use_cache (`bool`, *optional*, defaults to `True`):
|
65 |
+
Whether or not the model should return the last key/values attentions (not used by all models). Only
|
66 |
+
relevant if `config.is_decoder=True`.
|
67 |
+
pad_token_id (`int`, *optional*):
|
68 |
+
Padding token id.
|
69 |
+
bos_token_id (`int`, *optional*, defaults to 0):
|
70 |
+
Beginning of stream token id.
|
71 |
+
eos_token_id (`int`, *optional*, defaults to 1):
|
72 |
+
End of stream token id.
|
73 |
+
tie_word_embeddings (`bool`, *optional*, defaults to `False`):
|
74 |
+
Whether to tie weight embeddings
|
75 |
+
rope_theta (`float`, *optional*, defaults to 10000.0):
|
76 |
+
The base period of the RoPE embeddings.
|
77 |
+
attention_bias (`bool`, defaults to `False`, *optional*, defaults to `False`):
|
78 |
+
Whether to use a bias in the query, key, value and output projection layers during self-attention.
|
79 |
+
attention_dropout (`float`, *optional*, defaults to 0.0):
|
80 |
+
The dropout ratio for the attention probabilities.
|
81 |
+
attention_method (`str`, *optional*, defaults to `"MLA"`):
|
82 |
+
The attention method to use.
|
83 |
+
initializer_range (`float`, *optional*, defaults to 0.006):
|
84 |
+
The initializer range for the model.
|
85 |
+
router_bias (`bool`, *optional*, defaults to `False`):
|
86 |
+
Whether to use a bias in the router.
|
87 |
+
zero_expert_num (`int`, *optional*, defaults to `None`):
|
88 |
+
The number of zero experts to use.
|
89 |
+
zero_expert_type (`str`, *optional*, defaults to `None`):
|
90 |
+
The type of zero expert to use.
|
91 |
+
|
92 |
+
```python
|
93 |
+
>>> from transformers import LongcatFlashModel, LongcatFlashConfig
|
94 |
+
|
95 |
+
>>> # Initializing a LongcatFlash style configuration
|
96 |
+
>>> configuration = LongcatFlashConfig()
|
97 |
+
|
98 |
+
>>> # Accessing the model configuration
|
99 |
+
>>> configuration = model.config
|
100 |
+
```"""
|
101 |
+
|
102 |
+
model_type = "longcat_flash"
|
103 |
+
keys_to_ignore_at_inference = ["past_key_values"]
|
104 |
+
base_model_tp_plan = {
|
105 |
+
"layers.*.self_attn.k_proj": "colwise",
|
106 |
+
"layers.*.self_attn.v_proj": "colwise",
|
107 |
+
"layers.*.self_attn.o_proj": "rowwise",
|
108 |
+
"layers.*.mlp.experts.*.gate_proj": "local_colwise",
|
109 |
+
"layers.*.mlp.experts.*.up_proj": "local_colwise",
|
110 |
+
"layers.*.mlp.experts.*.down_proj": "local_rowwise",
|
111 |
+
"layers.*.mlps.*.gate_proj": "local_colwise",
|
112 |
+
"layers.*.mlps.*.up_proj": "local_colwise",
|
113 |
+
"layers.*.mlps.*.down_proj": "local_rowwise",
|
114 |
+
}
|
115 |
+
base_model_pp_plan = {
|
116 |
+
"embed_tokens": (["input_ids"], ["inputs_embeds"]),
|
117 |
+
"layers": (["hidden_states", "attention_mask"], ["hidden_states"]),
|
118 |
+
"norm": (["hidden_states"], ["hidden_states"]),
|
119 |
+
}
|
120 |
+
|
121 |
+
def __init__(
|
122 |
+
self,
|
123 |
+
vocab_size=131072,
|
124 |
+
hidden_size=7168,
|
125 |
+
ffn_hidden_size=18432,
|
126 |
+
expert_ffn_hidden_size=2048,
|
127 |
+
num_layers=61,
|
128 |
+
num_attention_heads=128,
|
129 |
+
num_key_value_heads=None,
|
130 |
+
n_routed_experts=256,
|
131 |
+
routed_scaling_factor=1,
|
132 |
+
kv_lora_rank=512,
|
133 |
+
q_lora_rank=1536,
|
134 |
+
qk_rope_head_dim=64,
|
135 |
+
v_head_dim=128,
|
136 |
+
qk_nope_head_dim=128,
|
137 |
+
mla_scale_q_lora=True,
|
138 |
+
mla_scale_kv_lora=True,
|
139 |
+
moe_topk=8,
|
140 |
+
norm_topk_prob=False,
|
141 |
+
hidden_act="silu",
|
142 |
+
max_position_embeddings=4096,
|
143 |
+
rms_norm_eps=1e-6,
|
144 |
+
use_cache=True,
|
145 |
+
pad_token_id=None,
|
146 |
+
bos_token_id=0,
|
147 |
+
eos_token_id=1,
|
148 |
+
tie_word_embeddings=False,
|
149 |
+
rope_theta=10000.0,
|
150 |
+
attention_bias=False,
|
151 |
+
attention_dropout=0.0,
|
152 |
+
attention_method='MLA',
|
153 |
+
initializer_range=0.006,
|
154 |
+
router_bias=False,
|
155 |
+
zero_expert_num=None,
|
156 |
+
zero_expert_type=None,
|
157 |
+
**kwargs,
|
158 |
+
):
|
159 |
+
self.vocab_size = vocab_size
|
160 |
+
self.max_position_embeddings = max_position_embeddings
|
161 |
+
self.hidden_size = hidden_size
|
162 |
+
self.ffn_hidden_size = ffn_hidden_size
|
163 |
+
self.expert_ffn_hidden_size = expert_ffn_hidden_size
|
164 |
+
self.num_layers = num_layers
|
165 |
+
self.num_attention_heads = num_attention_heads
|
166 |
+
self.n_routed_experts = n_routed_experts
|
167 |
+
self.routed_scaling_factor = routed_scaling_factor
|
168 |
+
self.kv_lora_rank = kv_lora_rank
|
169 |
+
self.q_lora_rank = q_lora_rank
|
170 |
+
self.qk_rope_head_dim = qk_rope_head_dim
|
171 |
+
self.v_head_dim = v_head_dim
|
172 |
+
self.qk_nope_head_dim = qk_nope_head_dim
|
173 |
+
self.qk_head_dim = qk_nope_head_dim + qk_rope_head_dim
|
174 |
+
self.moe_topk = moe_topk
|
175 |
+
self.norm_topk_prob = norm_topk_prob
|
176 |
+
self.mla_scale_q_lora = mla_scale_q_lora
|
177 |
+
self.mla_scale_kv_lora = mla_scale_kv_lora
|
178 |
+
self.attention_method = attention_method
|
179 |
+
self.initializer_range = initializer_range
|
180 |
+
self.router_bias = router_bias
|
181 |
+
self.zero_expert_num = zero_expert_num
|
182 |
+
self.zero_expert_type = zero_expert_type
|
183 |
+
|
184 |
+
if self.attention_method == "MLA":
|
185 |
+
self.head_dim = qk_rope_head_dim
|
186 |
+
else:
|
187 |
+
ValueError('attention_method should be one of ["MLA"]')
|
188 |
+
|
189 |
+
|
190 |
+
if num_key_value_heads is None:
|
191 |
+
num_key_value_heads = num_attention_heads
|
192 |
+
|
193 |
+
self.num_key_value_heads = num_key_value_heads
|
194 |
+
self.hidden_act = hidden_act
|
195 |
+
self.rms_norm_eps = rms_norm_eps
|
196 |
+
self.use_cache = use_cache
|
197 |
+
self.rope_theta = rope_theta
|
198 |
+
self.attention_bias = attention_bias
|
199 |
+
self.attention_dropout = attention_dropout
|
200 |
+
|
201 |
+
rope_config_validation(self)
|
202 |
+
|
203 |
+
super().__init__(
|
204 |
+
pad_token_id=pad_token_id,
|
205 |
+
bos_token_id=bos_token_id,
|
206 |
+
eos_token_id=eos_token_id,
|
207 |
+
tie_word_embeddings=tie_word_embeddings,
|
208 |
+
**kwargs,
|
209 |
+
)
|
210 |
+
|
211 |
+
@property
|
212 |
+
def num_hidden_layers(self):
|
213 |
+
return self.num_layers
|
214 |
+
|
215 |
+
|
216 |
+
__all__ = ["LongcatFlashConfig"]
|
model-00002-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c6c3642a43834d84bdd4b340c19a783800f85980ad2024607d7264850d62b056
|
3 |
+
size 12884902050
|
model-00003-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ce899f54b6b938785051d263d88d29b11541db42c579b40955c83fb85cc94416
|
3 |
+
size 12884902048
|
model-00006-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:213f50fb5eb44ed85ffff93dba3f234300644f00d1f2d9f5c9a21d8c17d29da9
|
3 |
+
size 12884902050
|
model-00007-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8e5a4b55b2bae8242125904a1094fee1a260b2454122c605617779cda3b67fa8
|
3 |
+
size 12884902048
|
model-00008-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d37c34bb07abf1a947bf320ea94f459629f91912069ac0dbe13b019653192b92
|
3 |
+
size 12884902050
|
model-00009-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3fda5ecab837778e3a80805f417b2f0a5a2b91fc328f1a3c0d4db1cf37c5a497
|
3 |
+
size 1287190563
|
model-00011-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6ddd2c8bd3be49040f256125da2ad984f8caf7d656a38276ade670a5f29112e3
|
3 |
+
size 12884902048
|
model-00012-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b018db0ae69cd481983efe97d033e2573f97f1723d54c82856b238f9767702c0
|
3 |
+
size 12884902050
|
model-00014-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:74d5be04dc2f8eef67577525a3f50168ecf95c677c0c6f43c93234c471c49450
|
3 |
+
size 12884902050
|
model-00023-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ab24c83982b9a9cd4c45880efc004e94a166727baeb3b3000ea5291c56b5995b
|
3 |
+
size 12884902048
|
model-00026-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4e3e3eea1a4878f35b53434e0f4e222dfa6a3f6beb29c64fc2badd362064de77
|
3 |
+
size 12884902050
|
model-00029-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:00c12e3b0ec899692bc71dc9e2794310ffc75ac8e91a257f8cd372459018d21e
|
3 |
+
size 1287190571
|
model-00031-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cf7109ffdf447724c32944e7003cd19c6a3b7e844ffba387731895c3b64db569
|
3 |
+
size 12884902048
|
model-00032-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1a42a8f82909205d00b59f6259fa260c11661b088eaff667efcf9caf115532b0
|
3 |
+
size 12884902050
|
model-00034-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ff21fd4cb2d4ed3ee1f0b757296ff2c4340f0f9b6cac2b6d7836533be8359cdc
|
3 |
+
size 12884902050
|
model-00038-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e0219428bcb5e1321b7c2300ec3b529f967d1c60ba1bb8ee5365e2b318d6afdf
|
3 |
+
size 12884902050
|
model-00042-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:203d8c97ad9d0604d6008425e4b2ab3045a85e77d003237f4e4a17d338352a4b
|
3 |
+
size 12884902051
|
model-00044-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b4c51e3b26326657eac5655c49dffed9897657e0b892e67a34542ee7720e45be
|
3 |
+
size 12884902051
|
model-00046-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:86fde3754dc0e6a2e3d29d335b463d1be485d7053028c0cd732c1a228e37d5e2
|
3 |
+
size 12884902051
|
model-00047-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:49800421b65c53358cf6b18acaa584b7cc5615e0df63f52563f74bcef1327ec5
|
3 |
+
size 12884902049
|
model-00048-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:af00ddbceab437f69f76acb55f6a9e6001b0851087f7494a5580dc6c408fc06a
|
3 |
+
size 12884902051
|
model-00049-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bda8c9ce328ca72565bbb98bf6e1fb67ffecae8c2d373500143570e813b0575d
|
3 |
+
size 1287190597
|
model-00050-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:81b5621042ae4ad59e32e8ee8b1b15886240ac18a711f3165b318716a2f6051b
|
3 |
+
size 12884902051
|
model-00055-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6e71fb1de346a3441a9560414d81d5929d71913d9e8b190c7fa65c65436fa98a
|
3 |
+
size 12884902049
|
model-00056-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0633f1bc6af5c389f12ca6265ddc91d427bfbcc8224f335c7586d0b20b46e1d9
|
3 |
+
size 12884902051
|
model-00059-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c8e7c8fd08d707b5e355a4242eab2885fcd275e245f2414095c60ecb80221e1b
|
3 |
+
size 12884902049
|
model-00060-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0bf39fe6f4b766da028eebc22bc1b646feb11ff9e560c449bc90f61e61e55a7c
|
3 |
+
size 12884902051
|
model-00062-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:13c7ed633e24ec36ceef3b8dada55b1faefbff47da7da0d21e775dffd802335b
|
3 |
+
size 12884902051
|
model-00064-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3c5392e523c31bd4c4395fb3c9b731c075393908ae5272e21164d22375532e68
|
3 |
+
size 12884902051
|
model-00067-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3d02fede6144faf7c43e26bc956dec08c36fe8f1ff7ce595fa64cc18c7f74456
|
3 |
+
size 12884902049
|
model-00068-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7357d5d80c3de714d4dd2991ce0e4449de45b521f837fa5fd1c73dcc5aa60b3d
|
3 |
+
size 12884902051
|
model-00070-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d5a69cfad5750ff4d895aaa5c2e02418259e0401d53f869bee2a8f6649fee5a1
|
3 |
+
size 12884902051
|
model-00072-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bef1d87e5f6e90d76cfa7ac34a9ee53925b8596add97c05b18949673f71bd0f9
|
3 |
+
size 12884902051
|
model-00075-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e9b02a84d347c06001a2cb9fb8f851bbdf65eb64cab374568cc494f0058e1385
|
3 |
+
size 12884902049
|
model-00076-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ba090ebc267beccbea6b8cb853b9a175b460cf68f245485385bff0be8fd8dcad
|
3 |
+
size 12884902051
|
model-00079-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1bc8873de7fa7d09c4dce08aa4d8ade20d7b270466bf98130af94b2921280bad
|
3 |
+
size 12884902049
|
model-00080-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5746666d9350279d1321fa434d4fae445354c56015db1c5f46d163d3176f65a6
|
3 |
+
size 12884902051
|
model-00083-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:016c47119b339ce64d1abe16ffe2aa9dcf6e6003159f6d5b4812eeed4451ac37
|
3 |
+
size 12884902049
|
model-00086-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7c84ff641df1a3d371e9c674d49017fb93edfb806c1e6677e5fa4b2b87dbd8e6
|
3 |
+
size 12884902051
|
model-00088-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b983e979fc9cfdd201f6a8259ea694396767cda2d1d9614a06d7f5972bbb6762
|
3 |
+
size 12884902051
|
model-00089-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:40dee467454160e565468845220aa98304fd63ece02ec2bf6e616eaea3dae885
|
3 |
+
size 1287190609
|
model-00091-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:02cf08a286523031e29b17eeb758ee4c796a9c320a365d35f9d4ea649ce0b996
|
3 |
+
size 12884902049
|
model-00094-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ee6c35fa1bf91295266cc099cb3eb0d889878310ae9d4c2f167f60dc06bdd0ab
|
3 |
+
size 12884902051
|
model-00098-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1e4d118c4ae7b05a155bd0f497bd42801b8249dc087612deaa5cd370b283b099
|
3 |
+
size 12884902051
|
model-00102-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4b0c78078538fb7f490651a23a059fca1203f1b52bfe2dc68c70245fe7cc89b8
|
3 |
+
size 12884902051
|
model-00107-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b6ef77ab298df729e1d110d11500494170301e62ec819f8a8e50e2c1e9ae084d
|
3 |
+
size 12884902049
|
model-00108-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9e8d60abf6d39a3fc8916bd463e4f3c9439c32778667a54b6dc34f71f2d69f6e
|
3 |
+
size 12884902051
|
model-00110-of-00113.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:94c5e90c0e3a55290af9c2e31af73e5c3a665b35c4c6461c9d7a83954e681d60
|
3 |
+
size 12884902051
|
model.safetensors.index.json
ADDED
@@ -0,0 +1,823 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"metadata": {
|
3 |
+
"total_size": 1121594245120,
|
4 |
+
"total_parameters": 560664958976
|
5 |
+
},
|
6 |
+
"weight_map": {
|
7 |
+
"lm_head.weight": "model-00113-of-00113.safetensors",
|
8 |
+
"model.embed_tokens.weight": "model-00001-of-00113.safetensors",
|
9 |
+
"model.layers.0.input_layernorm.0.weight": "model-00005-of-00113.safetensors",
|
10 |
+
"model.layers.0.input_layernorm.1.weight": "model-00005-of-00113.safetensors",
|
11 |
+
"model.layers.0.mlp.router.classifier.weight": "model-00005-of-00113.safetensors",
|
12 |
+
"model.layers.0.mlp.router.e_score_correction_bias": "model-00005-of-00113.safetensors",
|
13 |
+
"model.layers.0.mlp.switch_mlp.down_proj.weight": "model-00004-of-00113.safetensors",
|
14 |
+
"model.layers.0.mlp.switch_mlp.gate_proj.weight": "model-00002-of-00113.safetensors",
|
15 |
+
"model.layers.0.mlp.switch_mlp.up_proj.weight": "model-00003-of-00113.safetensors",
|
16 |
+
"model.layers.0.mlps.0.down_proj.weight": "model-00005-of-00113.safetensors",
|
17 |
+
"model.layers.0.mlps.0.gate_proj.weight": "model-00005-of-00113.safetensors",
|
18 |
+
"model.layers.0.mlps.0.up_proj.weight": "model-00005-of-00113.safetensors",
|
19 |
+
"model.layers.0.mlps.1.down_proj.weight": "model-00005-of-00113.safetensors",
|
20 |
+
"model.layers.0.mlps.1.gate_proj.weight": "model-00005-of-00113.safetensors",
|
21 |
+
"model.layers.0.mlps.1.up_proj.weight": "model-00005-of-00113.safetensors",
|
22 |
+
"model.layers.0.post_attention_layernorm.0.weight": "model-00005-of-00113.safetensors",
|
23 |
+
"model.layers.0.post_attention_layernorm.1.weight": "model-00005-of-00113.safetensors",
|
24 |
+
"model.layers.0.self_attn.0.kv_a_layernorm.weight": "model-00005-of-00113.safetensors",
|
25 |
+
"model.layers.0.self_attn.0.kv_a_proj_with_mqa.weight": "model-00005-of-00113.safetensors",
|
26 |
+
"model.layers.0.self_attn.0.kv_b_proj.weight": "model-00005-of-00113.safetensors",
|
27 |
+
"model.layers.0.self_attn.0.o_proj.weight": "model-00005-of-00113.safetensors",
|
28 |
+
"model.layers.0.self_attn.0.q_a_layernorm.weight": "model-00005-of-00113.safetensors",
|
29 |
+
"model.layers.0.self_attn.0.q_a_proj.weight": "model-00005-of-00113.safetensors",
|
30 |
+
"model.layers.0.self_attn.0.q_b_proj.weight": "model-00005-of-00113.safetensors",
|
31 |
+
"model.layers.0.self_attn.1.kv_a_layernorm.weight": "model-00005-of-00113.safetensors",
|
32 |
+
"model.layers.0.self_attn.1.kv_a_proj_with_mqa.weight": "model-00005-of-00113.safetensors",
|
33 |
+
"model.layers.0.self_attn.1.kv_b_proj.weight": "model-00005-of-00113.safetensors",
|
34 |
+
"model.layers.0.self_attn.1.o_proj.weight": "model-00005-of-00113.safetensors",
|
35 |
+
"model.layers.0.self_attn.1.q_a_layernorm.weight": "model-00005-of-00113.safetensors",
|
36 |
+
"model.layers.0.self_attn.1.q_a_proj.weight": "model-00005-of-00113.safetensors",
|
37 |
+
"model.layers.0.self_attn.1.q_b_proj.weight": "model-00005-of-00113.safetensors",
|
38 |
+
"model.layers.1.input_layernorm.0.weight": "model-00009-of-00113.safetensors",
|
39 |
+
"model.layers.1.input_layernorm.1.weight": "model-00009-of-00113.safetensors",
|
40 |
+
"model.layers.1.mlp.router.classifier.weight": "model-00009-of-00113.safetensors",
|
41 |
+
"model.layers.1.mlp.router.e_score_correction_bias": "model-00009-of-00113.safetensors",
|
42 |
+
"model.layers.1.mlp.switch_mlp.down_proj.weight": "model-00008-of-00113.safetensors",
|
43 |
+
"model.layers.1.mlp.switch_mlp.gate_proj.weight": "model-00006-of-00113.safetensors",
|
44 |
+
"model.layers.1.mlp.switch_mlp.up_proj.weight": "model-00007-of-00113.safetensors",
|
45 |
+
"model.layers.1.mlps.0.down_proj.weight": "model-00009-of-00113.safetensors",
|
46 |
+
"model.layers.1.mlps.0.gate_proj.weight": "model-00009-of-00113.safetensors",
|
47 |
+
"model.layers.1.mlps.0.up_proj.weight": "model-00009-of-00113.safetensors",
|
48 |
+
"model.layers.1.mlps.1.down_proj.weight": "model-00009-of-00113.safetensors",
|
49 |
+
"model.layers.1.mlps.1.gate_proj.weight": "model-00009-of-00113.safetensors",
|
50 |
+
"model.layers.1.mlps.1.up_proj.weight": "model-00009-of-00113.safetensors",
|
51 |
+
"model.layers.1.post_attention_layernorm.0.weight": "model-00009-of-00113.safetensors",
|
52 |
+
"model.layers.1.post_attention_layernorm.1.weight": "model-00009-of-00113.safetensors",
|
53 |
+
"model.layers.1.self_attn.0.kv_a_layernorm.weight": "model-00009-of-00113.safetensors",
|
54 |
+
"model.layers.1.self_attn.0.kv_a_proj_with_mqa.weight": "model-00009-of-00113.safetensors",
|
55 |
+
"model.layers.1.self_attn.0.kv_b_proj.weight": "model-00009-of-00113.safetensors",
|
56 |
+
"model.layers.1.self_attn.0.o_proj.weight": "model-00009-of-00113.safetensors",
|
57 |
+
"model.layers.1.self_attn.0.q_a_layernorm.weight": "model-00009-of-00113.safetensors",
|
58 |
+
"model.layers.1.self_attn.0.q_a_proj.weight": "model-00009-of-00113.safetensors",
|
59 |
+
"model.layers.1.self_attn.0.q_b_proj.weight": "model-00009-of-00113.safetensors",
|
60 |
+
"model.layers.1.self_attn.1.kv_a_layernorm.weight": "model-00009-of-00113.safetensors",
|
61 |
+
"model.layers.1.self_attn.1.kv_a_proj_with_mqa.weight": "model-00009-of-00113.safetensors",
|
62 |
+
"model.layers.1.self_attn.1.kv_b_proj.weight": "model-00009-of-00113.safetensors",
|
63 |
+
"model.layers.1.self_attn.1.o_proj.weight": "model-00009-of-00113.safetensors",
|
64 |
+
"model.layers.1.self_attn.1.q_a_layernorm.weight": "model-00009-of-00113.safetensors",
|
65 |
+
"model.layers.1.self_attn.1.q_a_proj.weight": "model-00009-of-00113.safetensors",
|
66 |
+
"model.layers.1.self_attn.1.q_b_proj.weight": "model-00009-of-00113.safetensors",
|
67 |
+
"model.layers.10.input_layernorm.0.weight": "model-00045-of-00113.safetensors",
|
68 |
+
"model.layers.10.input_layernorm.1.weight": "model-00045-of-00113.safetensors",
|
69 |
+
"model.layers.10.mlp.router.classifier.weight": "model-00045-of-00113.safetensors",
|
70 |
+
"model.layers.10.mlp.router.e_score_correction_bias": "model-00045-of-00113.safetensors",
|
71 |
+
"model.layers.10.mlp.switch_mlp.down_proj.weight": "model-00044-of-00113.safetensors",
|
72 |
+
"model.layers.10.mlp.switch_mlp.gate_proj.weight": "model-00042-of-00113.safetensors",
|
73 |
+
"model.layers.10.mlp.switch_mlp.up_proj.weight": "model-00043-of-00113.safetensors",
|
74 |
+
"model.layers.10.mlps.0.down_proj.weight": "model-00045-of-00113.safetensors",
|
75 |
+
"model.layers.10.mlps.0.gate_proj.weight": "model-00045-of-00113.safetensors",
|
76 |
+
"model.layers.10.mlps.0.up_proj.weight": "model-00045-of-00113.safetensors",
|
77 |
+
"model.layers.10.mlps.1.down_proj.weight": "model-00045-of-00113.safetensors",
|
78 |
+
"model.layers.10.mlps.1.gate_proj.weight": "model-00045-of-00113.safetensors",
|
79 |
+
"model.layers.10.mlps.1.up_proj.weight": "model-00045-of-00113.safetensors",
|
80 |
+
"model.layers.10.post_attention_layernorm.0.weight": "model-00045-of-00113.safetensors",
|
81 |
+
"model.layers.10.post_attention_layernorm.1.weight": "model-00045-of-00113.safetensors",
|
82 |
+
"model.layers.10.self_attn.0.kv_a_layernorm.weight": "model-00045-of-00113.safetensors",
|
83 |
+
"model.layers.10.self_attn.0.kv_a_proj_with_mqa.weight": "model-00045-of-00113.safetensors",
|
84 |
+
"model.layers.10.self_attn.0.kv_b_proj.weight": "model-00045-of-00113.safetensors",
|
85 |
+
"model.layers.10.self_attn.0.o_proj.weight": "model-00045-of-00113.safetensors",
|
86 |
+
"model.layers.10.self_attn.0.q_a_layernorm.weight": "model-00045-of-00113.safetensors",
|
87 |
+
"model.layers.10.self_attn.0.q_a_proj.weight": "model-00045-of-00113.safetensors",
|
88 |
+
"model.layers.10.self_attn.0.q_b_proj.weight": "model-00045-of-00113.safetensors",
|
89 |
+
"model.layers.10.self_attn.1.kv_a_layernorm.weight": "model-00045-of-00113.safetensors",
|
90 |
+
"model.layers.10.self_attn.1.kv_a_proj_with_mqa.weight": "model-00045-of-00113.safetensors",
|
91 |
+
"model.layers.10.self_attn.1.kv_b_proj.weight": "model-00045-of-00113.safetensors",
|
92 |
+
"model.layers.10.self_attn.1.o_proj.weight": "model-00045-of-00113.safetensors",
|
93 |
+
"model.layers.10.self_attn.1.q_a_layernorm.weight": "model-00045-of-00113.safetensors",
|
94 |
+
"model.layers.10.self_attn.1.q_a_proj.weight": "model-00045-of-00113.safetensors",
|
95 |
+
"model.layers.10.self_attn.1.q_b_proj.weight": "model-00045-of-00113.safetensors",
|
96 |
+
"model.layers.11.input_layernorm.0.weight": "model-00049-of-00113.safetensors",
|
97 |
+
"model.layers.11.input_layernorm.1.weight": "model-00049-of-00113.safetensors",
|
98 |
+
"model.layers.11.mlp.router.classifier.weight": "model-00049-of-00113.safetensors",
|
99 |
+
"model.layers.11.mlp.router.e_score_correction_bias": "model-00049-of-00113.safetensors",
|
100 |
+
"model.layers.11.mlp.switch_mlp.down_proj.weight": "model-00048-of-00113.safetensors",
|
101 |
+
"model.layers.11.mlp.switch_mlp.gate_proj.weight": "model-00046-of-00113.safetensors",
|
102 |
+
"model.layers.11.mlp.switch_mlp.up_proj.weight": "model-00047-of-00113.safetensors",
|
103 |
+
"model.layers.11.mlps.0.down_proj.weight": "model-00049-of-00113.safetensors",
|
104 |
+
"model.layers.11.mlps.0.gate_proj.weight": "model-00049-of-00113.safetensors",
|
105 |
+
"model.layers.11.mlps.0.up_proj.weight": "model-00049-of-00113.safetensors",
|
106 |
+
"model.layers.11.mlps.1.down_proj.weight": "model-00049-of-00113.safetensors",
|
107 |
+
"model.layers.11.mlps.1.gate_proj.weight": "model-00049-of-00113.safetensors",
|
108 |
+
"model.layers.11.mlps.1.up_proj.weight": "model-00049-of-00113.safetensors",
|
109 |
+
"model.layers.11.post_attention_layernorm.0.weight": "model-00049-of-00113.safetensors",
|
110 |
+
"model.layers.11.post_attention_layernorm.1.weight": "model-00049-of-00113.safetensors",
|
111 |
+
"model.layers.11.self_attn.0.kv_a_layernorm.weight": "model-00049-of-00113.safetensors",
|
112 |
+
"model.layers.11.self_attn.0.kv_a_proj_with_mqa.weight": "model-00049-of-00113.safetensors",
|
113 |
+
"model.layers.11.self_attn.0.kv_b_proj.weight": "model-00049-of-00113.safetensors",
|
114 |
+
"model.layers.11.self_attn.0.o_proj.weight": "model-00049-of-00113.safetensors",
|
115 |
+
"model.layers.11.self_attn.0.q_a_layernorm.weight": "model-00049-of-00113.safetensors",
|
116 |
+
"model.layers.11.self_attn.0.q_a_proj.weight": "model-00049-of-00113.safetensors",
|
117 |
+
"model.layers.11.self_attn.0.q_b_proj.weight": "model-00049-of-00113.safetensors",
|
118 |
+
"model.layers.11.self_attn.1.kv_a_layernorm.weight": "model-00049-of-00113.safetensors",
|
119 |
+
"model.layers.11.self_attn.1.kv_a_proj_with_mqa.weight": "model-00049-of-00113.safetensors",
|
120 |
+
"model.layers.11.self_attn.1.kv_b_proj.weight": "model-00049-of-00113.safetensors",
|
121 |
+
"model.layers.11.self_attn.1.o_proj.weight": "model-00049-of-00113.safetensors",
|
122 |
+
"model.layers.11.self_attn.1.q_a_layernorm.weight": "model-00049-of-00113.safetensors",
|
123 |
+
"model.layers.11.self_attn.1.q_a_proj.weight": "model-00049-of-00113.safetensors",
|
124 |
+
"model.layers.11.self_attn.1.q_b_proj.weight": "model-00049-of-00113.safetensors",
|
125 |
+
"model.layers.12.input_layernorm.0.weight": "model-00053-of-00113.safetensors",
|
126 |
+
"model.layers.12.input_layernorm.1.weight": "model-00053-of-00113.safetensors",
|
127 |
+
"model.layers.12.mlp.router.classifier.weight": "model-00053-of-00113.safetensors",
|
128 |
+
"model.layers.12.mlp.router.e_score_correction_bias": "model-00053-of-00113.safetensors",
|
129 |
+
"model.layers.12.mlp.switch_mlp.down_proj.weight": "model-00052-of-00113.safetensors",
|
130 |
+
"model.layers.12.mlp.switch_mlp.gate_proj.weight": "model-00050-of-00113.safetensors",
|
131 |
+
"model.layers.12.mlp.switch_mlp.up_proj.weight": "model-00051-of-00113.safetensors",
|
132 |
+
"model.layers.12.mlps.0.down_proj.weight": "model-00053-of-00113.safetensors",
|
133 |
+
"model.layers.12.mlps.0.gate_proj.weight": "model-00053-of-00113.safetensors",
|
134 |
+
"model.layers.12.mlps.0.up_proj.weight": "model-00053-of-00113.safetensors",
|
135 |
+
"model.layers.12.mlps.1.down_proj.weight": "model-00053-of-00113.safetensors",
|
136 |
+
"model.layers.12.mlps.1.gate_proj.weight": "model-00053-of-00113.safetensors",
|
137 |
+
"model.layers.12.mlps.1.up_proj.weight": "model-00053-of-00113.safetensors",
|
138 |
+
"model.layers.12.post_attention_layernorm.0.weight": "model-00053-of-00113.safetensors",
|
139 |
+
"model.layers.12.post_attention_layernorm.1.weight": "model-00053-of-00113.safetensors",
|
140 |
+
"model.layers.12.self_attn.0.kv_a_layernorm.weight": "model-00053-of-00113.safetensors",
|
141 |
+
"model.layers.12.self_attn.0.kv_a_proj_with_mqa.weight": "model-00053-of-00113.safetensors",
|
142 |
+
"model.layers.12.self_attn.0.kv_b_proj.weight": "model-00053-of-00113.safetensors",
|
143 |
+
"model.layers.12.self_attn.0.o_proj.weight": "model-00053-of-00113.safetensors",
|
144 |
+
"model.layers.12.self_attn.0.q_a_layernorm.weight": "model-00053-of-00113.safetensors",
|
145 |
+
"model.layers.12.self_attn.0.q_a_proj.weight": "model-00053-of-00113.safetensors",
|
146 |
+
"model.layers.12.self_attn.0.q_b_proj.weight": "model-00053-of-00113.safetensors",
|
147 |
+
"model.layers.12.self_attn.1.kv_a_layernorm.weight": "model-00053-of-00113.safetensors",
|
148 |
+
"model.layers.12.self_attn.1.kv_a_proj_with_mqa.weight": "model-00053-of-00113.safetensors",
|
149 |
+
"model.layers.12.self_attn.1.kv_b_proj.weight": "model-00053-of-00113.safetensors",
|
150 |
+
"model.layers.12.self_attn.1.o_proj.weight": "model-00053-of-00113.safetensors",
|
151 |
+
"model.layers.12.self_attn.1.q_a_layernorm.weight": "model-00053-of-00113.safetensors",
|
152 |
+
"model.layers.12.self_attn.1.q_a_proj.weight": "model-00053-of-00113.safetensors",
|
153 |
+
"model.layers.12.self_attn.1.q_b_proj.weight": "model-00053-of-00113.safetensors",
|
154 |
+
"model.layers.13.input_layernorm.0.weight": "model-00057-of-00113.safetensors",
|
155 |
+
"model.layers.13.input_layernorm.1.weight": "model-00057-of-00113.safetensors",
|
156 |
+
"model.layers.13.mlp.router.classifier.weight": "model-00057-of-00113.safetensors",
|
157 |
+
"model.layers.13.mlp.router.e_score_correction_bias": "model-00057-of-00113.safetensors",
|
158 |
+
"model.layers.13.mlp.switch_mlp.down_proj.weight": "model-00056-of-00113.safetensors",
|
159 |
+
"model.layers.13.mlp.switch_mlp.gate_proj.weight": "model-00054-of-00113.safetensors",
|
160 |
+
"model.layers.13.mlp.switch_mlp.up_proj.weight": "model-00055-of-00113.safetensors",
|
161 |
+
"model.layers.13.mlps.0.down_proj.weight": "model-00057-of-00113.safetensors",
|
162 |
+
"model.layers.13.mlps.0.gate_proj.weight": "model-00057-of-00113.safetensors",
|
163 |
+
"model.layers.13.mlps.0.up_proj.weight": "model-00057-of-00113.safetensors",
|
164 |
+
"model.layers.13.mlps.1.down_proj.weight": "model-00057-of-00113.safetensors",
|
165 |
+
"model.layers.13.mlps.1.gate_proj.weight": "model-00057-of-00113.safetensors",
|
166 |
+
"model.layers.13.mlps.1.up_proj.weight": "model-00057-of-00113.safetensors",
|
167 |
+
"model.layers.13.post_attention_layernorm.0.weight": "model-00057-of-00113.safetensors",
|
168 |
+
"model.layers.13.post_attention_layernorm.1.weight": "model-00057-of-00113.safetensors",
|
169 |
+
"model.layers.13.self_attn.0.kv_a_layernorm.weight": "model-00057-of-00113.safetensors",
|
170 |
+
"model.layers.13.self_attn.0.kv_a_proj_with_mqa.weight": "model-00057-of-00113.safetensors",
|
171 |
+
"model.layers.13.self_attn.0.kv_b_proj.weight": "model-00057-of-00113.safetensors",
|
172 |
+
"model.layers.13.self_attn.0.o_proj.weight": "model-00057-of-00113.safetensors",
|
173 |
+
"model.layers.13.self_attn.0.q_a_layernorm.weight": "model-00057-of-00113.safetensors",
|
174 |
+
"model.layers.13.self_attn.0.q_a_proj.weight": "model-00057-of-00113.safetensors",
|
175 |
+
"model.layers.13.self_attn.0.q_b_proj.weight": "model-00057-of-00113.safetensors",
|
176 |
+
"model.layers.13.self_attn.1.kv_a_layernorm.weight": "model-00057-of-00113.safetensors",
|
177 |
+
"model.layers.13.self_attn.1.kv_a_proj_with_mqa.weight": "model-00057-of-00113.safetensors",
|
178 |
+
"model.layers.13.self_attn.1.kv_b_proj.weight": "model-00057-of-00113.safetensors",
|
179 |
+
"model.layers.13.self_attn.1.o_proj.weight": "model-00057-of-00113.safetensors",
|
180 |
+
"model.layers.13.self_attn.1.q_a_layernorm.weight": "model-00057-of-00113.safetensors",
|
181 |
+
"model.layers.13.self_attn.1.q_a_proj.weight": "model-00057-of-00113.safetensors",
|
182 |
+
"model.layers.13.self_attn.1.q_b_proj.weight": "model-00057-of-00113.safetensors",
|
183 |
+
"model.layers.14.input_layernorm.0.weight": "model-00061-of-00113.safetensors",
|
184 |
+
"model.layers.14.input_layernorm.1.weight": "model-00061-of-00113.safetensors",
|
185 |
+
"model.layers.14.mlp.router.classifier.weight": "model-00061-of-00113.safetensors",
|
186 |
+
"model.layers.14.mlp.router.e_score_correction_bias": "model-00061-of-00113.safetensors",
|
187 |
+
"model.layers.14.mlp.switch_mlp.down_proj.weight": "model-00060-of-00113.safetensors",
|
188 |
+
"model.layers.14.mlp.switch_mlp.gate_proj.weight": "model-00058-of-00113.safetensors",
|
189 |
+
"model.layers.14.mlp.switch_mlp.up_proj.weight": "model-00059-of-00113.safetensors",
|
190 |
+
"model.layers.14.mlps.0.down_proj.weight": "model-00061-of-00113.safetensors",
|
191 |
+
"model.layers.14.mlps.0.gate_proj.weight": "model-00061-of-00113.safetensors",
|
192 |
+
"model.layers.14.mlps.0.up_proj.weight": "model-00061-of-00113.safetensors",
|
193 |
+
"model.layers.14.mlps.1.down_proj.weight": "model-00061-of-00113.safetensors",
|
194 |
+
"model.layers.14.mlps.1.gate_proj.weight": "model-00061-of-00113.safetensors",
|
195 |
+
"model.layers.14.mlps.1.up_proj.weight": "model-00061-of-00113.safetensors",
|
196 |
+
"model.layers.14.post_attention_layernorm.0.weight": "model-00061-of-00113.safetensors",
|
197 |
+
"model.layers.14.post_attention_layernorm.1.weight": "model-00061-of-00113.safetensors",
|
198 |
+
"model.layers.14.self_attn.0.kv_a_layernorm.weight": "model-00061-of-00113.safetensors",
|
199 |
+
"model.layers.14.self_attn.0.kv_a_proj_with_mqa.weight": "model-00061-of-00113.safetensors",
|
200 |
+
"model.layers.14.self_attn.0.kv_b_proj.weight": "model-00061-of-00113.safetensors",
|
201 |
+
"model.layers.14.self_attn.0.o_proj.weight": "model-00061-of-00113.safetensors",
|
202 |
+
"model.layers.14.self_attn.0.q_a_layernorm.weight": "model-00061-of-00113.safetensors",
|
203 |
+
"model.layers.14.self_attn.0.q_a_proj.weight": "model-00061-of-00113.safetensors",
|
204 |
+
"model.layers.14.self_attn.0.q_b_proj.weight": "model-00061-of-00113.safetensors",
|
205 |
+
"model.layers.14.self_attn.1.kv_a_layernorm.weight": "model-00061-of-00113.safetensors",
|
206 |
+
"model.layers.14.self_attn.1.kv_a_proj_with_mqa.weight": "model-00061-of-00113.safetensors",
|
207 |
+
"model.layers.14.self_attn.1.kv_b_proj.weight": "model-00061-of-00113.safetensors",
|
208 |
+
"model.layers.14.self_attn.1.o_proj.weight": "model-00061-of-00113.safetensors",
|
209 |
+
"model.layers.14.self_attn.1.q_a_layernorm.weight": "model-00061-of-00113.safetensors",
|
210 |
+
"model.layers.14.self_attn.1.q_a_proj.weight": "model-00061-of-00113.safetensors",
|
211 |
+
"model.layers.14.self_attn.1.q_b_proj.weight": "model-00061-of-00113.safetensors",
|
212 |
+
"model.layers.15.input_layernorm.0.weight": "model-00065-of-00113.safetensors",
|
213 |
+
"model.layers.15.input_layernorm.1.weight": "model-00065-of-00113.safetensors",
|
214 |
+
"model.layers.15.mlp.router.classifier.weight": "model-00065-of-00113.safetensors",
|
215 |
+
"model.layers.15.mlp.router.e_score_correction_bias": "model-00065-of-00113.safetensors",
|
216 |
+
"model.layers.15.mlp.switch_mlp.down_proj.weight": "model-00064-of-00113.safetensors",
|
217 |
+
"model.layers.15.mlp.switch_mlp.gate_proj.weight": "model-00062-of-00113.safetensors",
|
218 |
+
"model.layers.15.mlp.switch_mlp.up_proj.weight": "model-00063-of-00113.safetensors",
|
219 |
+
"model.layers.15.mlps.0.down_proj.weight": "model-00065-of-00113.safetensors",
|
220 |
+
"model.layers.15.mlps.0.gate_proj.weight": "model-00065-of-00113.safetensors",
|
221 |
+
"model.layers.15.mlps.0.up_proj.weight": "model-00065-of-00113.safetensors",
|
222 |
+
"model.layers.15.mlps.1.down_proj.weight": "model-00065-of-00113.safetensors",
|
223 |
+
"model.layers.15.mlps.1.gate_proj.weight": "model-00065-of-00113.safetensors",
|
224 |
+
"model.layers.15.mlps.1.up_proj.weight": "model-00065-of-00113.safetensors",
|
225 |
+
"model.layers.15.post_attention_layernorm.0.weight": "model-00065-of-00113.safetensors",
|
226 |
+
"model.layers.15.post_attention_layernorm.1.weight": "model-00065-of-00113.safetensors",
|
227 |
+
"model.layers.15.self_attn.0.kv_a_layernorm.weight": "model-00065-of-00113.safetensors",
|
228 |
+
"model.layers.15.self_attn.0.kv_a_proj_with_mqa.weight": "model-00065-of-00113.safetensors",
|
229 |
+
"model.layers.15.self_attn.0.kv_b_proj.weight": "model-00065-of-00113.safetensors",
|
230 |
+
"model.layers.15.self_attn.0.o_proj.weight": "model-00065-of-00113.safetensors",
|
231 |
+
"model.layers.15.self_attn.0.q_a_layernorm.weight": "model-00065-of-00113.safetensors",
|
232 |
+
"model.layers.15.self_attn.0.q_a_proj.weight": "model-00065-of-00113.safetensors",
|
233 |
+
"model.layers.15.self_attn.0.q_b_proj.weight": "model-00065-of-00113.safetensors",
|
234 |
+
"model.layers.15.self_attn.1.kv_a_layernorm.weight": "model-00065-of-00113.safetensors",
|
235 |
+
"model.layers.15.self_attn.1.kv_a_proj_with_mqa.weight": "model-00065-of-00113.safetensors",
|
236 |
+
"model.layers.15.self_attn.1.kv_b_proj.weight": "model-00065-of-00113.safetensors",
|
237 |
+
"model.layers.15.self_attn.1.o_proj.weight": "model-00065-of-00113.safetensors",
|
238 |
+
"model.layers.15.self_attn.1.q_a_layernorm.weight": "model-00065-of-00113.safetensors",
|
239 |
+
"model.layers.15.self_attn.1.q_a_proj.weight": "model-00065-of-00113.safetensors",
|
240 |
+
"model.layers.15.self_attn.1.q_b_proj.weight": "model-00065-of-00113.safetensors",
|
241 |
+
"model.layers.16.input_layernorm.0.weight": "model-00069-of-00113.safetensors",
|
242 |
+
"model.layers.16.input_layernorm.1.weight": "model-00069-of-00113.safetensors",
|
243 |
+
"model.layers.16.mlp.router.classifier.weight": "model-00069-of-00113.safetensors",
|
244 |
+
"model.layers.16.mlp.router.e_score_correction_bias": "model-00069-of-00113.safetensors",
|
245 |
+
"model.layers.16.mlp.switch_mlp.down_proj.weight": "model-00068-of-00113.safetensors",
|
246 |
+
"model.layers.16.mlp.switch_mlp.gate_proj.weight": "model-00066-of-00113.safetensors",
|
247 |
+
"model.layers.16.mlp.switch_mlp.up_proj.weight": "model-00067-of-00113.safetensors",
|
248 |
+
"model.layers.16.mlps.0.down_proj.weight": "model-00069-of-00113.safetensors",
|
249 |
+
"model.layers.16.mlps.0.gate_proj.weight": "model-00069-of-00113.safetensors",
|
250 |
+
"model.layers.16.mlps.0.up_proj.weight": "model-00069-of-00113.safetensors",
|
251 |
+
"model.layers.16.mlps.1.down_proj.weight": "model-00069-of-00113.safetensors",
|
252 |
+
"model.layers.16.mlps.1.gate_proj.weight": "model-00069-of-00113.safetensors",
|
253 |
+
"model.layers.16.mlps.1.up_proj.weight": "model-00069-of-00113.safetensors",
|
254 |
+
"model.layers.16.post_attention_layernorm.0.weight": "model-00069-of-00113.safetensors",
|
255 |
+
"model.layers.16.post_attention_layernorm.1.weight": "model-00069-of-00113.safetensors",
|
256 |
+
"model.layers.16.self_attn.0.kv_a_layernorm.weight": "model-00069-of-00113.safetensors",
|
257 |
+
"model.layers.16.self_attn.0.kv_a_proj_with_mqa.weight": "model-00069-of-00113.safetensors",
|
258 |
+
"model.layers.16.self_attn.0.kv_b_proj.weight": "model-00069-of-00113.safetensors",
|
259 |
+
"model.layers.16.self_attn.0.o_proj.weight": "model-00069-of-00113.safetensors",
|
260 |
+
"model.layers.16.self_attn.0.q_a_layernorm.weight": "model-00069-of-00113.safetensors",
|
261 |
+
"model.layers.16.self_attn.0.q_a_proj.weight": "model-00069-of-00113.safetensors",
|
262 |
+
"model.layers.16.self_attn.0.q_b_proj.weight": "model-00069-of-00113.safetensors",
|
263 |
+
"model.layers.16.self_attn.1.kv_a_layernorm.weight": "model-00069-of-00113.safetensors",
|
264 |
+
"model.layers.16.self_attn.1.kv_a_proj_with_mqa.weight": "model-00069-of-00113.safetensors",
|
265 |
+
"model.layers.16.self_attn.1.kv_b_proj.weight": "model-00069-of-00113.safetensors",
|
266 |
+
"model.layers.16.self_attn.1.o_proj.weight": "model-00069-of-00113.safetensors",
|
267 |
+
"model.layers.16.self_attn.1.q_a_layernorm.weight": "model-00069-of-00113.safetensors",
|
268 |
+
"model.layers.16.self_attn.1.q_a_proj.weight": "model-00069-of-00113.safetensors",
|
269 |
+
"model.layers.16.self_attn.1.q_b_proj.weight": "model-00069-of-00113.safetensors",
|
270 |
+
"model.layers.17.input_layernorm.0.weight": "model-00073-of-00113.safetensors",
|
271 |
+
"model.layers.17.input_layernorm.1.weight": "model-00073-of-00113.safetensors",
|
272 |
+
"model.layers.17.mlp.router.classifier.weight": "model-00073-of-00113.safetensors",
|
273 |
+
"model.layers.17.mlp.router.e_score_correction_bias": "model-00073-of-00113.safetensors",
|
274 |
+
"model.layers.17.mlp.switch_mlp.down_proj.weight": "model-00072-of-00113.safetensors",
|
275 |
+
"model.layers.17.mlp.switch_mlp.gate_proj.weight": "model-00070-of-00113.safetensors",
|
276 |
+
"model.layers.17.mlp.switch_mlp.up_proj.weight": "model-00071-of-00113.safetensors",
|
277 |
+
"model.layers.17.mlps.0.down_proj.weight": "model-00073-of-00113.safetensors",
|
278 |
+
"model.layers.17.mlps.0.gate_proj.weight": "model-00073-of-00113.safetensors",
|
279 |
+
"model.layers.17.mlps.0.up_proj.weight": "model-00073-of-00113.safetensors",
|
280 |
+
"model.layers.17.mlps.1.down_proj.weight": "model-00073-of-00113.safetensors",
|
281 |
+
"model.layers.17.mlps.1.gate_proj.weight": "model-00073-of-00113.safetensors",
|
282 |
+
"model.layers.17.mlps.1.up_proj.weight": "model-00073-of-00113.safetensors",
|
283 |
+
"model.layers.17.post_attention_layernorm.0.weight": "model-00073-of-00113.safetensors",
|
284 |
+
"model.layers.17.post_attention_layernorm.1.weight": "model-00073-of-00113.safetensors",
|
285 |
+
"model.layers.17.self_attn.0.kv_a_layernorm.weight": "model-00073-of-00113.safetensors",
|
286 |
+
"model.layers.17.self_attn.0.kv_a_proj_with_mqa.weight": "model-00073-of-00113.safetensors",
|
287 |
+
"model.layers.17.self_attn.0.kv_b_proj.weight": "model-00073-of-00113.safetensors",
|
288 |
+
"model.layers.17.self_attn.0.o_proj.weight": "model-00073-of-00113.safetensors",
|
289 |
+
"model.layers.17.self_attn.0.q_a_layernorm.weight": "model-00073-of-00113.safetensors",
|
290 |
+
"model.layers.17.self_attn.0.q_a_proj.weight": "model-00073-of-00113.safetensors",
|
291 |
+
"model.layers.17.self_attn.0.q_b_proj.weight": "model-00073-of-00113.safetensors",
|
292 |
+
"model.layers.17.self_attn.1.kv_a_layernorm.weight": "model-00073-of-00113.safetensors",
|
293 |
+
"model.layers.17.self_attn.1.kv_a_proj_with_mqa.weight": "model-00073-of-00113.safetensors",
|
294 |
+
"model.layers.17.self_attn.1.kv_b_proj.weight": "model-00073-of-00113.safetensors",
|
295 |
+
"model.layers.17.self_attn.1.o_proj.weight": "model-00073-of-00113.safetensors",
|
296 |
+
"model.layers.17.self_attn.1.q_a_layernorm.weight": "model-00073-of-00113.safetensors",
|
297 |
+
"model.layers.17.self_attn.1.q_a_proj.weight": "model-00073-of-00113.safetensors",
|
298 |
+
"model.layers.17.self_attn.1.q_b_proj.weight": "model-00073-of-00113.safetensors",
|
299 |
+
"model.layers.18.input_layernorm.0.weight": "model-00077-of-00113.safetensors",
|
300 |
+
"model.layers.18.input_layernorm.1.weight": "model-00077-of-00113.safetensors",
|
301 |
+
"model.layers.18.mlp.router.classifier.weight": "model-00077-of-00113.safetensors",
|
302 |
+
"model.layers.18.mlp.router.e_score_correction_bias": "model-00077-of-00113.safetensors",
|
303 |
+
"model.layers.18.mlp.switch_mlp.down_proj.weight": "model-00076-of-00113.safetensors",
|
304 |
+
"model.layers.18.mlp.switch_mlp.gate_proj.weight": "model-00074-of-00113.safetensors",
|
305 |
+
"model.layers.18.mlp.switch_mlp.up_proj.weight": "model-00075-of-00113.safetensors",
|
306 |
+
"model.layers.18.mlps.0.down_proj.weight": "model-00077-of-00113.safetensors",
|
307 |
+
"model.layers.18.mlps.0.gate_proj.weight": "model-00077-of-00113.safetensors",
|
308 |
+
"model.layers.18.mlps.0.up_proj.weight": "model-00077-of-00113.safetensors",
|
309 |
+
"model.layers.18.mlps.1.down_proj.weight": "model-00077-of-00113.safetensors",
|
310 |
+
"model.layers.18.mlps.1.gate_proj.weight": "model-00077-of-00113.safetensors",
|
311 |
+
"model.layers.18.mlps.1.up_proj.weight": "model-00077-of-00113.safetensors",
|
312 |
+
"model.layers.18.post_attention_layernorm.0.weight": "model-00077-of-00113.safetensors",
|
313 |
+
"model.layers.18.post_attention_layernorm.1.weight": "model-00077-of-00113.safetensors",
|
314 |
+
"model.layers.18.self_attn.0.kv_a_layernorm.weight": "model-00077-of-00113.safetensors",
|
315 |
+
"model.layers.18.self_attn.0.kv_a_proj_with_mqa.weight": "model-00077-of-00113.safetensors",
|
316 |
+
"model.layers.18.self_attn.0.kv_b_proj.weight": "model-00077-of-00113.safetensors",
|
317 |
+
"model.layers.18.self_attn.0.o_proj.weight": "model-00077-of-00113.safetensors",
|
318 |
+
"model.layers.18.self_attn.0.q_a_layernorm.weight": "model-00077-of-00113.safetensors",
|
319 |
+
"model.layers.18.self_attn.0.q_a_proj.weight": "model-00077-of-00113.safetensors",
|
320 |
+
"model.layers.18.self_attn.0.q_b_proj.weight": "model-00077-of-00113.safetensors",
|
321 |
+
"model.layers.18.self_attn.1.kv_a_layernorm.weight": "model-00077-of-00113.safetensors",
|
322 |
+
"model.layers.18.self_attn.1.kv_a_proj_with_mqa.weight": "model-00077-of-00113.safetensors",
|
323 |
+
"model.layers.18.self_attn.1.kv_b_proj.weight": "model-00077-of-00113.safetensors",
|
324 |
+
"model.layers.18.self_attn.1.o_proj.weight": "model-00077-of-00113.safetensors",
|
325 |
+
"model.layers.18.self_attn.1.q_a_layernorm.weight": "model-00077-of-00113.safetensors",
|
326 |
+
"model.layers.18.self_attn.1.q_a_proj.weight": "model-00077-of-00113.safetensors",
|
327 |
+
"model.layers.18.self_attn.1.q_b_proj.weight": "model-00077-of-00113.safetensors",
|
328 |
+
"model.layers.19.input_layernorm.0.weight": "model-00081-of-00113.safetensors",
|
329 |
+
"model.layers.19.input_layernorm.1.weight": "model-00081-of-00113.safetensors",
|
330 |
+
"model.layers.19.mlp.router.classifier.weight": "model-00081-of-00113.safetensors",
|
331 |
+
"model.layers.19.mlp.router.e_score_correction_bias": "model-00081-of-00113.safetensors",
|
332 |
+
"model.layers.19.mlp.switch_mlp.down_proj.weight": "model-00080-of-00113.safetensors",
|
333 |
+
"model.layers.19.mlp.switch_mlp.gate_proj.weight": "model-00078-of-00113.safetensors",
|
334 |
+
"model.layers.19.mlp.switch_mlp.up_proj.weight": "model-00079-of-00113.safetensors",
|
335 |
+
"model.layers.19.mlps.0.down_proj.weight": "model-00081-of-00113.safetensors",
|
336 |
+
"model.layers.19.mlps.0.gate_proj.weight": "model-00081-of-00113.safetensors",
|
337 |
+
"model.layers.19.mlps.0.up_proj.weight": "model-00081-of-00113.safetensors",
|
338 |
+
"model.layers.19.mlps.1.down_proj.weight": "model-00081-of-00113.safetensors",
|
339 |
+
"model.layers.19.mlps.1.gate_proj.weight": "model-00081-of-00113.safetensors",
|
340 |
+
"model.layers.19.mlps.1.up_proj.weight": "model-00081-of-00113.safetensors",
|
341 |
+
"model.layers.19.post_attention_layernorm.0.weight": "model-00081-of-00113.safetensors",
|
342 |
+
"model.layers.19.post_attention_layernorm.1.weight": "model-00081-of-00113.safetensors",
|
343 |
+
"model.layers.19.self_attn.0.kv_a_layernorm.weight": "model-00081-of-00113.safetensors",
|
344 |
+
"model.layers.19.self_attn.0.kv_a_proj_with_mqa.weight": "model-00081-of-00113.safetensors",
|
345 |
+
"model.layers.19.self_attn.0.kv_b_proj.weight": "model-00081-of-00113.safetensors",
|
346 |
+
"model.layers.19.self_attn.0.o_proj.weight": "model-00081-of-00113.safetensors",
|
347 |
+
"model.layers.19.self_attn.0.q_a_layernorm.weight": "model-00081-of-00113.safetensors",
|
348 |
+
"model.layers.19.self_attn.0.q_a_proj.weight": "model-00081-of-00113.safetensors",
|
349 |
+
"model.layers.19.self_attn.0.q_b_proj.weight": "model-00081-of-00113.safetensors",
|
350 |
+
"model.layers.19.self_attn.1.kv_a_layernorm.weight": "model-00081-of-00113.safetensors",
|
351 |
+
"model.layers.19.self_attn.1.kv_a_proj_with_mqa.weight": "model-00081-of-00113.safetensors",
|
352 |
+
"model.layers.19.self_attn.1.kv_b_proj.weight": "model-00081-of-00113.safetensors",
|
353 |
+
"model.layers.19.self_attn.1.o_proj.weight": "model-00081-of-00113.safetensors",
|
354 |
+
"model.layers.19.self_attn.1.q_a_layernorm.weight": "model-00081-of-00113.safetensors",
|
355 |
+
"model.layers.19.self_attn.1.q_a_proj.weight": "model-00081-of-00113.safetensors",
|
356 |
+
"model.layers.19.self_attn.1.q_b_proj.weight": "model-00081-of-00113.safetensors",
|
357 |
+
"model.layers.2.input_layernorm.0.weight": "model-00013-of-00113.safetensors",
|
358 |
+
"model.layers.2.input_layernorm.1.weight": "model-00013-of-00113.safetensors",
|
359 |
+
"model.layers.2.mlp.router.classifier.weight": "model-00013-of-00113.safetensors",
|
360 |
+
"model.layers.2.mlp.router.e_score_correction_bias": "model-00013-of-00113.safetensors",
|
361 |
+
"model.layers.2.mlp.switch_mlp.down_proj.weight": "model-00012-of-00113.safetensors",
|
362 |
+
"model.layers.2.mlp.switch_mlp.gate_proj.weight": "model-00010-of-00113.safetensors",
|
363 |
+
"model.layers.2.mlp.switch_mlp.up_proj.weight": "model-00011-of-00113.safetensors",
|
364 |
+
"model.layers.2.mlps.0.down_proj.weight": "model-00013-of-00113.safetensors",
|
365 |
+
"model.layers.2.mlps.0.gate_proj.weight": "model-00013-of-00113.safetensors",
|
366 |
+
"model.layers.2.mlps.0.up_proj.weight": "model-00013-of-00113.safetensors",
|
367 |
+
"model.layers.2.mlps.1.down_proj.weight": "model-00013-of-00113.safetensors",
|
368 |
+
"model.layers.2.mlps.1.gate_proj.weight": "model-00013-of-00113.safetensors",
|
369 |
+
"model.layers.2.mlps.1.up_proj.weight": "model-00013-of-00113.safetensors",
|
370 |
+
"model.layers.2.post_attention_layernorm.0.weight": "model-00013-of-00113.safetensors",
|
371 |
+
"model.layers.2.post_attention_layernorm.1.weight": "model-00013-of-00113.safetensors",
|
372 |
+
"model.layers.2.self_attn.0.kv_a_layernorm.weight": "model-00013-of-00113.safetensors",
|
373 |
+
"model.layers.2.self_attn.0.kv_a_proj_with_mqa.weight": "model-00013-of-00113.safetensors",
|
374 |
+
"model.layers.2.self_attn.0.kv_b_proj.weight": "model-00013-of-00113.safetensors",
|
375 |
+
"model.layers.2.self_attn.0.o_proj.weight": "model-00013-of-00113.safetensors",
|
376 |
+
"model.layers.2.self_attn.0.q_a_layernorm.weight": "model-00013-of-00113.safetensors",
|
377 |
+
"model.layers.2.self_attn.0.q_a_proj.weight": "model-00013-of-00113.safetensors",
|
378 |
+
"model.layers.2.self_attn.0.q_b_proj.weight": "model-00013-of-00113.safetensors",
|
379 |
+
"model.layers.2.self_attn.1.kv_a_layernorm.weight": "model-00013-of-00113.safetensors",
|
380 |
+
"model.layers.2.self_attn.1.kv_a_proj_with_mqa.weight": "model-00013-of-00113.safetensors",
|
381 |
+
"model.layers.2.self_attn.1.kv_b_proj.weight": "model-00013-of-00113.safetensors",
|
382 |
+
"model.layers.2.self_attn.1.o_proj.weight": "model-00013-of-00113.safetensors",
|
383 |
+
"model.layers.2.self_attn.1.q_a_layernorm.weight": "model-00013-of-00113.safetensors",
|
384 |
+
"model.layers.2.self_attn.1.q_a_proj.weight": "model-00013-of-00113.safetensors",
|
385 |
+
"model.layers.2.self_attn.1.q_b_proj.weight": "model-00013-of-00113.safetensors",
|
386 |
+
"model.layers.20.input_layernorm.0.weight": "model-00085-of-00113.safetensors",
|
387 |
+
"model.layers.20.input_layernorm.1.weight": "model-00085-of-00113.safetensors",
|
388 |
+
"model.layers.20.mlp.router.classifier.weight": "model-00085-of-00113.safetensors",
|
389 |
+
"model.layers.20.mlp.router.e_score_correction_bias": "model-00085-of-00113.safetensors",
|
390 |
+
"model.layers.20.mlp.switch_mlp.down_proj.weight": "model-00084-of-00113.safetensors",
|
391 |
+
"model.layers.20.mlp.switch_mlp.gate_proj.weight": "model-00082-of-00113.safetensors",
|
392 |
+
"model.layers.20.mlp.switch_mlp.up_proj.weight": "model-00083-of-00113.safetensors",
|
393 |
+
"model.layers.20.mlps.0.down_proj.weight": "model-00085-of-00113.safetensors",
|
394 |
+
"model.layers.20.mlps.0.gate_proj.weight": "model-00085-of-00113.safetensors",
|
395 |
+
"model.layers.20.mlps.0.up_proj.weight": "model-00085-of-00113.safetensors",
|
396 |
+
"model.layers.20.mlps.1.down_proj.weight": "model-00085-of-00113.safetensors",
|
397 |
+
"model.layers.20.mlps.1.gate_proj.weight": "model-00085-of-00113.safetensors",
|
398 |
+
"model.layers.20.mlps.1.up_proj.weight": "model-00085-of-00113.safetensors",
|
399 |
+
"model.layers.20.post_attention_layernorm.0.weight": "model-00085-of-00113.safetensors",
|
400 |
+
"model.layers.20.post_attention_layernorm.1.weight": "model-00085-of-00113.safetensors",
|
401 |
+
"model.layers.20.self_attn.0.kv_a_layernorm.weight": "model-00085-of-00113.safetensors",
|
402 |
+
"model.layers.20.self_attn.0.kv_a_proj_with_mqa.weight": "model-00085-of-00113.safetensors",
|
403 |
+
"model.layers.20.self_attn.0.kv_b_proj.weight": "model-00085-of-00113.safetensors",
|
404 |
+
"model.layers.20.self_attn.0.o_proj.weight": "model-00085-of-00113.safetensors",
|
405 |
+
"model.layers.20.self_attn.0.q_a_layernorm.weight": "model-00085-of-00113.safetensors",
|
406 |
+
"model.layers.20.self_attn.0.q_a_proj.weight": "model-00085-of-00113.safetensors",
|
407 |
+
"model.layers.20.self_attn.0.q_b_proj.weight": "model-00085-of-00113.safetensors",
|
408 |
+
"model.layers.20.self_attn.1.kv_a_layernorm.weight": "model-00085-of-00113.safetensors",
|
409 |
+
"model.layers.20.self_attn.1.kv_a_proj_with_mqa.weight": "model-00085-of-00113.safetensors",
|
410 |
+
"model.layers.20.self_attn.1.kv_b_proj.weight": "model-00085-of-00113.safetensors",
|
411 |
+
"model.layers.20.self_attn.1.o_proj.weight": "model-00085-of-00113.safetensors",
|
412 |
+
"model.layers.20.self_attn.1.q_a_layernorm.weight": "model-00085-of-00113.safetensors",
|
413 |
+
"model.layers.20.self_attn.1.q_a_proj.weight": "model-00085-of-00113.safetensors",
|
414 |
+
"model.layers.20.self_attn.1.q_b_proj.weight": "model-00085-of-00113.safetensors",
|
415 |
+
"model.layers.21.input_layernorm.0.weight": "model-00089-of-00113.safetensors",
|
416 |
+
"model.layers.21.input_layernorm.1.weight": "model-00089-of-00113.safetensors",
|
417 |
+
"model.layers.21.mlp.router.classifier.weight": "model-00089-of-00113.safetensors",
|
418 |
+
"model.layers.21.mlp.router.e_score_correction_bias": "model-00089-of-00113.safetensors",
|
419 |
+
"model.layers.21.mlp.switch_mlp.down_proj.weight": "model-00088-of-00113.safetensors",
|
420 |
+
"model.layers.21.mlp.switch_mlp.gate_proj.weight": "model-00086-of-00113.safetensors",
|
421 |
+
"model.layers.21.mlp.switch_mlp.up_proj.weight": "model-00087-of-00113.safetensors",
|
422 |
+
"model.layers.21.mlps.0.down_proj.weight": "model-00089-of-00113.safetensors",
|
423 |
+
"model.layers.21.mlps.0.gate_proj.weight": "model-00089-of-00113.safetensors",
|
424 |
+
"model.layers.21.mlps.0.up_proj.weight": "model-00089-of-00113.safetensors",
|
425 |
+
"model.layers.21.mlps.1.down_proj.weight": "model-00089-of-00113.safetensors",
|
426 |
+
"model.layers.21.mlps.1.gate_proj.weight": "model-00089-of-00113.safetensors",
|
427 |
+
"model.layers.21.mlps.1.up_proj.weight": "model-00089-of-00113.safetensors",
|
428 |
+
"model.layers.21.post_attention_layernorm.0.weight": "model-00089-of-00113.safetensors",
|
429 |
+
"model.layers.21.post_attention_layernorm.1.weight": "model-00089-of-00113.safetensors",
|
430 |
+
"model.layers.21.self_attn.0.kv_a_layernorm.weight": "model-00089-of-00113.safetensors",
|
431 |
+
"model.layers.21.self_attn.0.kv_a_proj_with_mqa.weight": "model-00089-of-00113.safetensors",
|
432 |
+
"model.layers.21.self_attn.0.kv_b_proj.weight": "model-00089-of-00113.safetensors",
|
433 |
+
"model.layers.21.self_attn.0.o_proj.weight": "model-00089-of-00113.safetensors",
|
434 |
+
"model.layers.21.self_attn.0.q_a_layernorm.weight": "model-00089-of-00113.safetensors",
|
435 |
+
"model.layers.21.self_attn.0.q_a_proj.weight": "model-00089-of-00113.safetensors",
|
436 |
+
"model.layers.21.self_attn.0.q_b_proj.weight": "model-00089-of-00113.safetensors",
|
437 |
+
"model.layers.21.self_attn.1.kv_a_layernorm.weight": "model-00089-of-00113.safetensors",
|
438 |
+
"model.layers.21.self_attn.1.kv_a_proj_with_mqa.weight": "model-00089-of-00113.safetensors",
|
439 |
+
"model.layers.21.self_attn.1.kv_b_proj.weight": "model-00089-of-00113.safetensors",
|
440 |
+
"model.layers.21.self_attn.1.o_proj.weight": "model-00089-of-00113.safetensors",
|
441 |
+
"model.layers.21.self_attn.1.q_a_layernorm.weight": "model-00089-of-00113.safetensors",
|
442 |
+
"model.layers.21.self_attn.1.q_a_proj.weight": "model-00089-of-00113.safetensors",
|
443 |
+
"model.layers.21.self_attn.1.q_b_proj.weight": "model-00089-of-00113.safetensors",
|
444 |
+
"model.layers.22.input_layernorm.0.weight": "model-00093-of-00113.safetensors",
|
445 |
+
"model.layers.22.input_layernorm.1.weight": "model-00093-of-00113.safetensors",
|
446 |
+
"model.layers.22.mlp.router.classifier.weight": "model-00093-of-00113.safetensors",
|
447 |
+
"model.layers.22.mlp.router.e_score_correction_bias": "model-00093-of-00113.safetensors",
|
448 |
+
"model.layers.22.mlp.switch_mlp.down_proj.weight": "model-00092-of-00113.safetensors",
|
449 |
+
"model.layers.22.mlp.switch_mlp.gate_proj.weight": "model-00090-of-00113.safetensors",
|
450 |
+
"model.layers.22.mlp.switch_mlp.up_proj.weight": "model-00091-of-00113.safetensors",
|
451 |
+
"model.layers.22.mlps.0.down_proj.weight": "model-00093-of-00113.safetensors",
|
452 |
+
"model.layers.22.mlps.0.gate_proj.weight": "model-00093-of-00113.safetensors",
|
453 |
+
"model.layers.22.mlps.0.up_proj.weight": "model-00093-of-00113.safetensors",
|
454 |
+
"model.layers.22.mlps.1.down_proj.weight": "model-00093-of-00113.safetensors",
|
455 |
+
"model.layers.22.mlps.1.gate_proj.weight": "model-00093-of-00113.safetensors",
|
456 |
+
"model.layers.22.mlps.1.up_proj.weight": "model-00093-of-00113.safetensors",
|
457 |
+
"model.layers.22.post_attention_layernorm.0.weight": "model-00093-of-00113.safetensors",
|
458 |
+
"model.layers.22.post_attention_layernorm.1.weight": "model-00093-of-00113.safetensors",
|
459 |
+
"model.layers.22.self_attn.0.kv_a_layernorm.weight": "model-00093-of-00113.safetensors",
|
460 |
+
"model.layers.22.self_attn.0.kv_a_proj_with_mqa.weight": "model-00093-of-00113.safetensors",
|
461 |
+
"model.layers.22.self_attn.0.kv_b_proj.weight": "model-00093-of-00113.safetensors",
|
462 |
+
"model.layers.22.self_attn.0.o_proj.weight": "model-00093-of-00113.safetensors",
|
463 |
+
"model.layers.22.self_attn.0.q_a_layernorm.weight": "model-00093-of-00113.safetensors",
|
464 |
+
"model.layers.22.self_attn.0.q_a_proj.weight": "model-00093-of-00113.safetensors",
|
465 |
+
"model.layers.22.self_attn.0.q_b_proj.weight": "model-00093-of-00113.safetensors",
|
466 |
+
"model.layers.22.self_attn.1.kv_a_layernorm.weight": "model-00093-of-00113.safetensors",
|
467 |
+
"model.layers.22.self_attn.1.kv_a_proj_with_mqa.weight": "model-00093-of-00113.safetensors",
|
468 |
+
"model.layers.22.self_attn.1.kv_b_proj.weight": "model-00093-of-00113.safetensors",
|
469 |
+
"model.layers.22.self_attn.1.o_proj.weight": "model-00093-of-00113.safetensors",
|
470 |
+
"model.layers.22.self_attn.1.q_a_layernorm.weight": "model-00093-of-00113.safetensors",
|
471 |
+
"model.layers.22.self_attn.1.q_a_proj.weight": "model-00093-of-00113.safetensors",
|
472 |
+
"model.layers.22.self_attn.1.q_b_proj.weight": "model-00093-of-00113.safetensors",
|
473 |
+
"model.layers.23.input_layernorm.0.weight": "model-00097-of-00113.safetensors",
|
474 |
+
"model.layers.23.input_layernorm.1.weight": "model-00097-of-00113.safetensors",
|
475 |
+
"model.layers.23.mlp.router.classifier.weight": "model-00097-of-00113.safetensors",
|
476 |
+
"model.layers.23.mlp.router.e_score_correction_bias": "model-00097-of-00113.safetensors",
|
477 |
+
"model.layers.23.mlp.switch_mlp.down_proj.weight": "model-00096-of-00113.safetensors",
|
478 |
+
"model.layers.23.mlp.switch_mlp.gate_proj.weight": "model-00094-of-00113.safetensors",
|
479 |
+
"model.layers.23.mlp.switch_mlp.up_proj.weight": "model-00095-of-00113.safetensors",
|
480 |
+
"model.layers.23.mlps.0.down_proj.weight": "model-00097-of-00113.safetensors",
|
481 |
+
"model.layers.23.mlps.0.gate_proj.weight": "model-00097-of-00113.safetensors",
|
482 |
+
"model.layers.23.mlps.0.up_proj.weight": "model-00097-of-00113.safetensors",
|
483 |
+
"model.layers.23.mlps.1.down_proj.weight": "model-00097-of-00113.safetensors",
|
484 |
+
"model.layers.23.mlps.1.gate_proj.weight": "model-00097-of-00113.safetensors",
|
485 |
+
"model.layers.23.mlps.1.up_proj.weight": "model-00097-of-00113.safetensors",
|
486 |
+
"model.layers.23.post_attention_layernorm.0.weight": "model-00097-of-00113.safetensors",
|
487 |
+
"model.layers.23.post_attention_layernorm.1.weight": "model-00097-of-00113.safetensors",
|
488 |
+
"model.layers.23.self_attn.0.kv_a_layernorm.weight": "model-00097-of-00113.safetensors",
|
489 |
+
"model.layers.23.self_attn.0.kv_a_proj_with_mqa.weight": "model-00097-of-00113.safetensors",
|
490 |
+
"model.layers.23.self_attn.0.kv_b_proj.weight": "model-00097-of-00113.safetensors",
|
491 |
+
"model.layers.23.self_attn.0.o_proj.weight": "model-00097-of-00113.safetensors",
|
492 |
+
"model.layers.23.self_attn.0.q_a_layernorm.weight": "model-00097-of-00113.safetensors",
|
493 |
+
"model.layers.23.self_attn.0.q_a_proj.weight": "model-00097-of-00113.safetensors",
|
494 |
+
"model.layers.23.self_attn.0.q_b_proj.weight": "model-00097-of-00113.safetensors",
|
495 |
+
"model.layers.23.self_attn.1.kv_a_layernorm.weight": "model-00097-of-00113.safetensors",
|
496 |
+
"model.layers.23.self_attn.1.kv_a_proj_with_mqa.weight": "model-00097-of-00113.safetensors",
|
497 |
+
"model.layers.23.self_attn.1.kv_b_proj.weight": "model-00097-of-00113.safetensors",
|
498 |
+
"model.layers.23.self_attn.1.o_proj.weight": "model-00097-of-00113.safetensors",
|
499 |
+
"model.layers.23.self_attn.1.q_a_layernorm.weight": "model-00097-of-00113.safetensors",
|
500 |
+
"model.layers.23.self_attn.1.q_a_proj.weight": "model-00097-of-00113.safetensors",
|
501 |
+
"model.layers.23.self_attn.1.q_b_proj.weight": "model-00097-of-00113.safetensors",
|
502 |
+
"model.layers.24.input_layernorm.0.weight": "model-00101-of-00113.safetensors",
|
503 |
+
"model.layers.24.input_layernorm.1.weight": "model-00101-of-00113.safetensors",
|
504 |
+
"model.layers.24.mlp.router.classifier.weight": "model-00101-of-00113.safetensors",
|
505 |
+
"model.layers.24.mlp.router.e_score_correction_bias": "model-00101-of-00113.safetensors",
|
506 |
+
"model.layers.24.mlp.switch_mlp.down_proj.weight": "model-00100-of-00113.safetensors",
|
507 |
+
"model.layers.24.mlp.switch_mlp.gate_proj.weight": "model-00098-of-00113.safetensors",
|
508 |
+
"model.layers.24.mlp.switch_mlp.up_proj.weight": "model-00099-of-00113.safetensors",
|
509 |
+
"model.layers.24.mlps.0.down_proj.weight": "model-00101-of-00113.safetensors",
|
510 |
+
"model.layers.24.mlps.0.gate_proj.weight": "model-00101-of-00113.safetensors",
|
511 |
+
"model.layers.24.mlps.0.up_proj.weight": "model-00101-of-00113.safetensors",
|
512 |
+
"model.layers.24.mlps.1.down_proj.weight": "model-00101-of-00113.safetensors",
|
513 |
+
"model.layers.24.mlps.1.gate_proj.weight": "model-00101-of-00113.safetensors",
|
514 |
+
"model.layers.24.mlps.1.up_proj.weight": "model-00101-of-00113.safetensors",
|
515 |
+
"model.layers.24.post_attention_layernorm.0.weight": "model-00101-of-00113.safetensors",
|
516 |
+
"model.layers.24.post_attention_layernorm.1.weight": "model-00101-of-00113.safetensors",
|
517 |
+
"model.layers.24.self_attn.0.kv_a_layernorm.weight": "model-00101-of-00113.safetensors",
|
518 |
+
"model.layers.24.self_attn.0.kv_a_proj_with_mqa.weight": "model-00101-of-00113.safetensors",
|
519 |
+
"model.layers.24.self_attn.0.kv_b_proj.weight": "model-00101-of-00113.safetensors",
|
520 |
+
"model.layers.24.self_attn.0.o_proj.weight": "model-00101-of-00113.safetensors",
|
521 |
+
"model.layers.24.self_attn.0.q_a_layernorm.weight": "model-00101-of-00113.safetensors",
|
522 |
+
"model.layers.24.self_attn.0.q_a_proj.weight": "model-00101-of-00113.safetensors",
|
523 |
+
"model.layers.24.self_attn.0.q_b_proj.weight": "model-00101-of-00113.safetensors",
|
524 |
+
"model.layers.24.self_attn.1.kv_a_layernorm.weight": "model-00101-of-00113.safetensors",
|
525 |
+
"model.layers.24.self_attn.1.kv_a_proj_with_mqa.weight": "model-00101-of-00113.safetensors",
|
526 |
+
"model.layers.24.self_attn.1.kv_b_proj.weight": "model-00101-of-00113.safetensors",
|
527 |
+
"model.layers.24.self_attn.1.o_proj.weight": "model-00101-of-00113.safetensors",
|
528 |
+
"model.layers.24.self_attn.1.q_a_layernorm.weight": "model-00101-of-00113.safetensors",
|
529 |
+
"model.layers.24.self_attn.1.q_a_proj.weight": "model-00101-of-00113.safetensors",
|
530 |
+
"model.layers.24.self_attn.1.q_b_proj.weight": "model-00101-of-00113.safetensors",
|
531 |
+
"model.layers.25.input_layernorm.0.weight": "model-00105-of-00113.safetensors",
|
532 |
+
"model.layers.25.input_layernorm.1.weight": "model-00105-of-00113.safetensors",
|
533 |
+
"model.layers.25.mlp.router.classifier.weight": "model-00105-of-00113.safetensors",
|
534 |
+
"model.layers.25.mlp.router.e_score_correction_bias": "model-00105-of-00113.safetensors",
|
535 |
+
"model.layers.25.mlp.switch_mlp.down_proj.weight": "model-00104-of-00113.safetensors",
|
536 |
+
"model.layers.25.mlp.switch_mlp.gate_proj.weight": "model-00102-of-00113.safetensors",
|
537 |
+
"model.layers.25.mlp.switch_mlp.up_proj.weight": "model-00103-of-00113.safetensors",
|
538 |
+
"model.layers.25.mlps.0.down_proj.weight": "model-00105-of-00113.safetensors",
|
539 |
+
"model.layers.25.mlps.0.gate_proj.weight": "model-00105-of-00113.safetensors",
|
540 |
+
"model.layers.25.mlps.0.up_proj.weight": "model-00105-of-00113.safetensors",
|
541 |
+
"model.layers.25.mlps.1.down_proj.weight": "model-00105-of-00113.safetensors",
|
542 |
+
"model.layers.25.mlps.1.gate_proj.weight": "model-00105-of-00113.safetensors",
|
543 |
+
"model.layers.25.mlps.1.up_proj.weight": "model-00105-of-00113.safetensors",
|
544 |
+
"model.layers.25.post_attention_layernorm.0.weight": "model-00105-of-00113.safetensors",
|
545 |
+
"model.layers.25.post_attention_layernorm.1.weight": "model-00105-of-00113.safetensors",
|
546 |
+
"model.layers.25.self_attn.0.kv_a_layernorm.weight": "model-00105-of-00113.safetensors",
|
547 |
+
"model.layers.25.self_attn.0.kv_a_proj_with_mqa.weight": "model-00105-of-00113.safetensors",
|
548 |
+
"model.layers.25.self_attn.0.kv_b_proj.weight": "model-00105-of-00113.safetensors",
|
549 |
+
"model.layers.25.self_attn.0.o_proj.weight": "model-00105-of-00113.safetensors",
|
550 |
+
"model.layers.25.self_attn.0.q_a_layernorm.weight": "model-00105-of-00113.safetensors",
|
551 |
+
"model.layers.25.self_attn.0.q_a_proj.weight": "model-00105-of-00113.safetensors",
|
552 |
+
"model.layers.25.self_attn.0.q_b_proj.weight": "model-00105-of-00113.safetensors",
|
553 |
+
"model.layers.25.self_attn.1.kv_a_layernorm.weight": "model-00105-of-00113.safetensors",
|
554 |
+
"model.layers.25.self_attn.1.kv_a_proj_with_mqa.weight": "model-00105-of-00113.safetensors",
|
555 |
+
"model.layers.25.self_attn.1.kv_b_proj.weight": "model-00105-of-00113.safetensors",
|
556 |
+
"model.layers.25.self_attn.1.o_proj.weight": "model-00105-of-00113.safetensors",
|
557 |
+
"model.layers.25.self_attn.1.q_a_layernorm.weight": "model-00105-of-00113.safetensors",
|
558 |
+
"model.layers.25.self_attn.1.q_a_proj.weight": "model-00105-of-00113.safetensors",
|
559 |
+
"model.layers.25.self_attn.1.q_b_proj.weight": "model-00105-of-00113.safetensors",
|
560 |
+
"model.layers.26.input_layernorm.0.weight": "model-00109-of-00113.safetensors",
|
561 |
+
"model.layers.26.input_layernorm.1.weight": "model-00109-of-00113.safetensors",
|
562 |
+
"model.layers.26.mlp.router.classifier.weight": "model-00109-of-00113.safetensors",
|
563 |
+
"model.layers.26.mlp.router.e_score_correction_bias": "model-00109-of-00113.safetensors",
|
564 |
+
"model.layers.26.mlp.switch_mlp.down_proj.weight": "model-00108-of-00113.safetensors",
|
565 |
+
"model.layers.26.mlp.switch_mlp.gate_proj.weight": "model-00106-of-00113.safetensors",
|
566 |
+
"model.layers.26.mlp.switch_mlp.up_proj.weight": "model-00107-of-00113.safetensors",
|
567 |
+
"model.layers.26.mlps.0.down_proj.weight": "model-00109-of-00113.safetensors",
|
568 |
+
"model.layers.26.mlps.0.gate_proj.weight": "model-00109-of-00113.safetensors",
|
569 |
+
"model.layers.26.mlps.0.up_proj.weight": "model-00109-of-00113.safetensors",
|
570 |
+
"model.layers.26.mlps.1.down_proj.weight": "model-00109-of-00113.safetensors",
|
571 |
+
"model.layers.26.mlps.1.gate_proj.weight": "model-00109-of-00113.safetensors",
|
572 |
+
"model.layers.26.mlps.1.up_proj.weight": "model-00109-of-00113.safetensors",
|
573 |
+
"model.layers.26.post_attention_layernorm.0.weight": "model-00109-of-00113.safetensors",
|
574 |
+
"model.layers.26.post_attention_layernorm.1.weight": "model-00109-of-00113.safetensors",
|
575 |
+
"model.layers.26.self_attn.0.kv_a_layernorm.weight": "model-00109-of-00113.safetensors",
|
576 |
+
"model.layers.26.self_attn.0.kv_a_proj_with_mqa.weight": "model-00109-of-00113.safetensors",
|
577 |
+
"model.layers.26.self_attn.0.kv_b_proj.weight": "model-00109-of-00113.safetensors",
|
578 |
+
"model.layers.26.self_attn.0.o_proj.weight": "model-00109-of-00113.safetensors",
|
579 |
+
"model.layers.26.self_attn.0.q_a_layernorm.weight": "model-00109-of-00113.safetensors",
|
580 |
+
"model.layers.26.self_attn.0.q_a_proj.weight": "model-00109-of-00113.safetensors",
|
581 |
+
"model.layers.26.self_attn.0.q_b_proj.weight": "model-00109-of-00113.safetensors",
|
582 |
+
"model.layers.26.self_attn.1.kv_a_layernorm.weight": "model-00109-of-00113.safetensors",
|
583 |
+
"model.layers.26.self_attn.1.kv_a_proj_with_mqa.weight": "model-00109-of-00113.safetensors",
|
584 |
+
"model.layers.26.self_attn.1.kv_b_proj.weight": "model-00109-of-00113.safetensors",
|
585 |
+
"model.layers.26.self_attn.1.o_proj.weight": "model-00109-of-00113.safetensors",
|
586 |
+
"model.layers.26.self_attn.1.q_a_layernorm.weight": "model-00109-of-00113.safetensors",
|
587 |
+
"model.layers.26.self_attn.1.q_a_proj.weight": "model-00109-of-00113.safetensors",
|
588 |
+
"model.layers.26.self_attn.1.q_b_proj.weight": "model-00109-of-00113.safetensors",
|
589 |
+
"model.layers.27.input_layernorm.0.weight": "model-00113-of-00113.safetensors",
|
590 |
+
"model.layers.27.input_layernorm.1.weight": "model-00113-of-00113.safetensors",
|
591 |
+
"model.layers.27.mlp.router.classifier.weight": "model-00113-of-00113.safetensors",
|
592 |
+
"model.layers.27.mlp.router.e_score_correction_bias": "model-00113-of-00113.safetensors",
|
593 |
+
"model.layers.27.mlp.switch_mlp.down_proj.weight": "model-00112-of-00113.safetensors",
|
594 |
+
"model.layers.27.mlp.switch_mlp.gate_proj.weight": "model-00110-of-00113.safetensors",
|
595 |
+
"model.layers.27.mlp.switch_mlp.up_proj.weight": "model-00111-of-00113.safetensors",
|
596 |
+
"model.layers.27.mlps.0.down_proj.weight": "model-00113-of-00113.safetensors",
|
597 |
+
"model.layers.27.mlps.0.gate_proj.weight": "model-00113-of-00113.safetensors",
|
598 |
+
"model.layers.27.mlps.0.up_proj.weight": "model-00113-of-00113.safetensors",
|
599 |
+
"model.layers.27.mlps.1.down_proj.weight": "model-00113-of-00113.safetensors",
|
600 |
+
"model.layers.27.mlps.1.gate_proj.weight": "model-00113-of-00113.safetensors",
|
601 |
+
"model.layers.27.mlps.1.up_proj.weight": "model-00113-of-00113.safetensors",
|
602 |
+
"model.layers.27.post_attention_layernorm.0.weight": "model-00113-of-00113.safetensors",
|
603 |
+
"model.layers.27.post_attention_layernorm.1.weight": "model-00113-of-00113.safetensors",
|
604 |
+
"model.layers.27.self_attn.0.kv_a_layernorm.weight": "model-00113-of-00113.safetensors",
|
605 |
+
"model.layers.27.self_attn.0.kv_a_proj_with_mqa.weight": "model-00113-of-00113.safetensors",
|
606 |
+
"model.layers.27.self_attn.0.kv_b_proj.weight": "model-00113-of-00113.safetensors",
|
607 |
+
"model.layers.27.self_attn.0.o_proj.weight": "model-00113-of-00113.safetensors",
|
608 |
+
"model.layers.27.self_attn.0.q_a_layernorm.weight": "model-00113-of-00113.safetensors",
|
609 |
+
"model.layers.27.self_attn.0.q_a_proj.weight": "model-00113-of-00113.safetensors",
|
610 |
+
"model.layers.27.self_attn.0.q_b_proj.weight": "model-00113-of-00113.safetensors",
|
611 |
+
"model.layers.27.self_attn.1.kv_a_layernorm.weight": "model-00113-of-00113.safetensors",
|
612 |
+
"model.layers.27.self_attn.1.kv_a_proj_with_mqa.weight": "model-00113-of-00113.safetensors",
|
613 |
+
"model.layers.27.self_attn.1.kv_b_proj.weight": "model-00113-of-00113.safetensors",
|
614 |
+
"model.layers.27.self_attn.1.o_proj.weight": "model-00113-of-00113.safetensors",
|
615 |
+
"model.layers.27.self_attn.1.q_a_layernorm.weight": "model-00113-of-00113.safetensors",
|
616 |
+
"model.layers.27.self_attn.1.q_a_proj.weight": "model-00113-of-00113.safetensors",
|
617 |
+
"model.layers.27.self_attn.1.q_b_proj.weight": "model-00113-of-00113.safetensors",
|
618 |
+
"model.layers.3.input_layernorm.0.weight": "model-00017-of-00113.safetensors",
|
619 |
+
"model.layers.3.input_layernorm.1.weight": "model-00017-of-00113.safetensors",
|
620 |
+
"model.layers.3.mlp.router.classifier.weight": "model-00017-of-00113.safetensors",
|
621 |
+
"model.layers.3.mlp.router.e_score_correction_bias": "model-00017-of-00113.safetensors",
|
622 |
+
"model.layers.3.mlp.switch_mlp.down_proj.weight": "model-00016-of-00113.safetensors",
|
623 |
+
"model.layers.3.mlp.switch_mlp.gate_proj.weight": "model-00014-of-00113.safetensors",
|
624 |
+
"model.layers.3.mlp.switch_mlp.up_proj.weight": "model-00015-of-00113.safetensors",
|
625 |
+
"model.layers.3.mlps.0.down_proj.weight": "model-00017-of-00113.safetensors",
|
626 |
+
"model.layers.3.mlps.0.gate_proj.weight": "model-00017-of-00113.safetensors",
|
627 |
+
"model.layers.3.mlps.0.up_proj.weight": "model-00017-of-00113.safetensors",
|
628 |
+
"model.layers.3.mlps.1.down_proj.weight": "model-00017-of-00113.safetensors",
|
629 |
+
"model.layers.3.mlps.1.gate_proj.weight": "model-00017-of-00113.safetensors",
|
630 |
+
"model.layers.3.mlps.1.up_proj.weight": "model-00017-of-00113.safetensors",
|
631 |
+
"model.layers.3.post_attention_layernorm.0.weight": "model-00017-of-00113.safetensors",
|
632 |
+
"model.layers.3.post_attention_layernorm.1.weight": "model-00017-of-00113.safetensors",
|
633 |
+
"model.layers.3.self_attn.0.kv_a_layernorm.weight": "model-00017-of-00113.safetensors",
|
634 |
+
"model.layers.3.self_attn.0.kv_a_proj_with_mqa.weight": "model-00017-of-00113.safetensors",
|
635 |
+
"model.layers.3.self_attn.0.kv_b_proj.weight": "model-00017-of-00113.safetensors",
|
636 |
+
"model.layers.3.self_attn.0.o_proj.weight": "model-00017-of-00113.safetensors",
|
637 |
+
"model.layers.3.self_attn.0.q_a_layernorm.weight": "model-00017-of-00113.safetensors",
|
638 |
+
"model.layers.3.self_attn.0.q_a_proj.weight": "model-00017-of-00113.safetensors",
|
639 |
+
"model.layers.3.self_attn.0.q_b_proj.weight": "model-00017-of-00113.safetensors",
|
640 |
+
"model.layers.3.self_attn.1.kv_a_layernorm.weight": "model-00017-of-00113.safetensors",
|
641 |
+
"model.layers.3.self_attn.1.kv_a_proj_with_mqa.weight": "model-00017-of-00113.safetensors",
|
642 |
+
"model.layers.3.self_attn.1.kv_b_proj.weight": "model-00017-of-00113.safetensors",
|
643 |
+
"model.layers.3.self_attn.1.o_proj.weight": "model-00017-of-00113.safetensors",
|
644 |
+
"model.layers.3.self_attn.1.q_a_layernorm.weight": "model-00017-of-00113.safetensors",
|
645 |
+
"model.layers.3.self_attn.1.q_a_proj.weight": "model-00017-of-00113.safetensors",
|
646 |
+
"model.layers.3.self_attn.1.q_b_proj.weight": "model-00017-of-00113.safetensors",
|
647 |
+
"model.layers.4.input_layernorm.0.weight": "model-00021-of-00113.safetensors",
|
648 |
+
"model.layers.4.input_layernorm.1.weight": "model-00021-of-00113.safetensors",
|
649 |
+
"model.layers.4.mlp.router.classifier.weight": "model-00021-of-00113.safetensors",
|
650 |
+
"model.layers.4.mlp.router.e_score_correction_bias": "model-00021-of-00113.safetensors",
|
651 |
+
"model.layers.4.mlp.switch_mlp.down_proj.weight": "model-00020-of-00113.safetensors",
|
652 |
+
"model.layers.4.mlp.switch_mlp.gate_proj.weight": "model-00018-of-00113.safetensors",
|
653 |
+
"model.layers.4.mlp.switch_mlp.up_proj.weight": "model-00019-of-00113.safetensors",
|
654 |
+
"model.layers.4.mlps.0.down_proj.weight": "model-00021-of-00113.safetensors",
|
655 |
+
"model.layers.4.mlps.0.gate_proj.weight": "model-00021-of-00113.safetensors",
|
656 |
+
"model.layers.4.mlps.0.up_proj.weight": "model-00021-of-00113.safetensors",
|
657 |
+
"model.layers.4.mlps.1.down_proj.weight": "model-00021-of-00113.safetensors",
|
658 |
+
"model.layers.4.mlps.1.gate_proj.weight": "model-00021-of-00113.safetensors",
|
659 |
+
"model.layers.4.mlps.1.up_proj.weight": "model-00021-of-00113.safetensors",
|
660 |
+
"model.layers.4.post_attention_layernorm.0.weight": "model-00021-of-00113.safetensors",
|
661 |
+
"model.layers.4.post_attention_layernorm.1.weight": "model-00021-of-00113.safetensors",
|
662 |
+
"model.layers.4.self_attn.0.kv_a_layernorm.weight": "model-00021-of-00113.safetensors",
|
663 |
+
"model.layers.4.self_attn.0.kv_a_proj_with_mqa.weight": "model-00021-of-00113.safetensors",
|
664 |
+
"model.layers.4.self_attn.0.kv_b_proj.weight": "model-00021-of-00113.safetensors",
|
665 |
+
"model.layers.4.self_attn.0.o_proj.weight": "model-00021-of-00113.safetensors",
|
666 |
+
"model.layers.4.self_attn.0.q_a_layernorm.weight": "model-00021-of-00113.safetensors",
|
667 |
+
"model.layers.4.self_attn.0.q_a_proj.weight": "model-00021-of-00113.safetensors",
|
668 |
+
"model.layers.4.self_attn.0.q_b_proj.weight": "model-00021-of-00113.safetensors",
|
669 |
+
"model.layers.4.self_attn.1.kv_a_layernorm.weight": "model-00021-of-00113.safetensors",
|
670 |
+
"model.layers.4.self_attn.1.kv_a_proj_with_mqa.weight": "model-00021-of-00113.safetensors",
|
671 |
+
"model.layers.4.self_attn.1.kv_b_proj.weight": "model-00021-of-00113.safetensors",
|
672 |
+
"model.layers.4.self_attn.1.o_proj.weight": "model-00021-of-00113.safetensors",
|
673 |
+
"model.layers.4.self_attn.1.q_a_layernorm.weight": "model-00021-of-00113.safetensors",
|
674 |
+
"model.layers.4.self_attn.1.q_a_proj.weight": "model-00021-of-00113.safetensors",
|
675 |
+
"model.layers.4.self_attn.1.q_b_proj.weight": "model-00021-of-00113.safetensors",
|
676 |
+
"model.layers.5.input_layernorm.0.weight": "model-00025-of-00113.safetensors",
|
677 |
+
"model.layers.5.input_layernorm.1.weight": "model-00025-of-00113.safetensors",
|
678 |
+
"model.layers.5.mlp.router.classifier.weight": "model-00025-of-00113.safetensors",
|
679 |
+
"model.layers.5.mlp.router.e_score_correction_bias": "model-00025-of-00113.safetensors",
|
680 |
+
"model.layers.5.mlp.switch_mlp.down_proj.weight": "model-00024-of-00113.safetensors",
|
681 |
+
"model.layers.5.mlp.switch_mlp.gate_proj.weight": "model-00022-of-00113.safetensors",
|
682 |
+
"model.layers.5.mlp.switch_mlp.up_proj.weight": "model-00023-of-00113.safetensors",
|
683 |
+
"model.layers.5.mlps.0.down_proj.weight": "model-00025-of-00113.safetensors",
|
684 |
+
"model.layers.5.mlps.0.gate_proj.weight": "model-00025-of-00113.safetensors",
|
685 |
+
"model.layers.5.mlps.0.up_proj.weight": "model-00025-of-00113.safetensors",
|
686 |
+
"model.layers.5.mlps.1.down_proj.weight": "model-00025-of-00113.safetensors",
|
687 |
+
"model.layers.5.mlps.1.gate_proj.weight": "model-00025-of-00113.safetensors",
|
688 |
+
"model.layers.5.mlps.1.up_proj.weight": "model-00025-of-00113.safetensors",
|
689 |
+
"model.layers.5.post_attention_layernorm.0.weight": "model-00025-of-00113.safetensors",
|
690 |
+
"model.layers.5.post_attention_layernorm.1.weight": "model-00025-of-00113.safetensors",
|
691 |
+
"model.layers.5.self_attn.0.kv_a_layernorm.weight": "model-00025-of-00113.safetensors",
|
692 |
+
"model.layers.5.self_attn.0.kv_a_proj_with_mqa.weight": "model-00025-of-00113.safetensors",
|
693 |
+
"model.layers.5.self_attn.0.kv_b_proj.weight": "model-00025-of-00113.safetensors",
|
694 |
+
"model.layers.5.self_attn.0.o_proj.weight": "model-00025-of-00113.safetensors",
|
695 |
+
"model.layers.5.self_attn.0.q_a_layernorm.weight": "model-00025-of-00113.safetensors",
|
696 |
+
"model.layers.5.self_attn.0.q_a_proj.weight": "model-00025-of-00113.safetensors",
|
697 |
+
"model.layers.5.self_attn.0.q_b_proj.weight": "model-00025-of-00113.safetensors",
|
698 |
+
"model.layers.5.self_attn.1.kv_a_layernorm.weight": "model-00025-of-00113.safetensors",
|
699 |
+
"model.layers.5.self_attn.1.kv_a_proj_with_mqa.weight": "model-00025-of-00113.safetensors",
|
700 |
+
"model.layers.5.self_attn.1.kv_b_proj.weight": "model-00025-of-00113.safetensors",
|
701 |
+
"model.layers.5.self_attn.1.o_proj.weight": "model-00025-of-00113.safetensors",
|
702 |
+
"model.layers.5.self_attn.1.q_a_layernorm.weight": "model-00025-of-00113.safetensors",
|
703 |
+
"model.layers.5.self_attn.1.q_a_proj.weight": "model-00025-of-00113.safetensors",
|
704 |
+
"model.layers.5.self_attn.1.q_b_proj.weight": "model-00025-of-00113.safetensors",
|
705 |
+
"model.layers.6.input_layernorm.0.weight": "model-00029-of-00113.safetensors",
|
706 |
+
"model.layers.6.input_layernorm.1.weight": "model-00029-of-00113.safetensors",
|
707 |
+
"model.layers.6.mlp.router.classifier.weight": "model-00029-of-00113.safetensors",
|
708 |
+
"model.layers.6.mlp.router.e_score_correction_bias": "model-00029-of-00113.safetensors",
|
709 |
+
"model.layers.6.mlp.switch_mlp.down_proj.weight": "model-00028-of-00113.safetensors",
|
710 |
+
"model.layers.6.mlp.switch_mlp.gate_proj.weight": "model-00026-of-00113.safetensors",
|
711 |
+
"model.layers.6.mlp.switch_mlp.up_proj.weight": "model-00027-of-00113.safetensors",
|
712 |
+
"model.layers.6.mlps.0.down_proj.weight": "model-00029-of-00113.safetensors",
|
713 |
+
"model.layers.6.mlps.0.gate_proj.weight": "model-00029-of-00113.safetensors",
|
714 |
+
"model.layers.6.mlps.0.up_proj.weight": "model-00029-of-00113.safetensors",
|
715 |
+
"model.layers.6.mlps.1.down_proj.weight": "model-00029-of-00113.safetensors",
|
716 |
+
"model.layers.6.mlps.1.gate_proj.weight": "model-00029-of-00113.safetensors",
|
717 |
+
"model.layers.6.mlps.1.up_proj.weight": "model-00029-of-00113.safetensors",
|
718 |
+
"model.layers.6.post_attention_layernorm.0.weight": "model-00029-of-00113.safetensors",
|
719 |
+
"model.layers.6.post_attention_layernorm.1.weight": "model-00029-of-00113.safetensors",
|
720 |
+
"model.layers.6.self_attn.0.kv_a_layernorm.weight": "model-00029-of-00113.safetensors",
|
721 |
+
"model.layers.6.self_attn.0.kv_a_proj_with_mqa.weight": "model-00029-of-00113.safetensors",
|
722 |
+
"model.layers.6.self_attn.0.kv_b_proj.weight": "model-00029-of-00113.safetensors",
|
723 |
+
"model.layers.6.self_attn.0.o_proj.weight": "model-00029-of-00113.safetensors",
|
724 |
+
"model.layers.6.self_attn.0.q_a_layernorm.weight": "model-00029-of-00113.safetensors",
|
725 |
+
"model.layers.6.self_attn.0.q_a_proj.weight": "model-00029-of-00113.safetensors",
|
726 |
+
"model.layers.6.self_attn.0.q_b_proj.weight": "model-00029-of-00113.safetensors",
|
727 |
+
"model.layers.6.self_attn.1.kv_a_layernorm.weight": "model-00029-of-00113.safetensors",
|
728 |
+
"model.layers.6.self_attn.1.kv_a_proj_with_mqa.weight": "model-00029-of-00113.safetensors",
|
729 |
+
"model.layers.6.self_attn.1.kv_b_proj.weight": "model-00029-of-00113.safetensors",
|
730 |
+
"model.layers.6.self_attn.1.o_proj.weight": "model-00029-of-00113.safetensors",
|
731 |
+
"model.layers.6.self_attn.1.q_a_layernorm.weight": "model-00029-of-00113.safetensors",
|
732 |
+
"model.layers.6.self_attn.1.q_a_proj.weight": "model-00029-of-00113.safetensors",
|
733 |
+
"model.layers.6.self_attn.1.q_b_proj.weight": "model-00029-of-00113.safetensors",
|
734 |
+
"model.layers.7.input_layernorm.0.weight": "model-00033-of-00113.safetensors",
|
735 |
+
"model.layers.7.input_layernorm.1.weight": "model-00033-of-00113.safetensors",
|
736 |
+
"model.layers.7.mlp.router.classifier.weight": "model-00033-of-00113.safetensors",
|
737 |
+
"model.layers.7.mlp.router.e_score_correction_bias": "model-00033-of-00113.safetensors",
|
738 |
+
"model.layers.7.mlp.switch_mlp.down_proj.weight": "model-00032-of-00113.safetensors",
|
739 |
+
"model.layers.7.mlp.switch_mlp.gate_proj.weight": "model-00030-of-00113.safetensors",
|
740 |
+
"model.layers.7.mlp.switch_mlp.up_proj.weight": "model-00031-of-00113.safetensors",
|
741 |
+
"model.layers.7.mlps.0.down_proj.weight": "model-00033-of-00113.safetensors",
|
742 |
+
"model.layers.7.mlps.0.gate_proj.weight": "model-00033-of-00113.safetensors",
|
743 |
+
"model.layers.7.mlps.0.up_proj.weight": "model-00033-of-00113.safetensors",
|
744 |
+
"model.layers.7.mlps.1.down_proj.weight": "model-00033-of-00113.safetensors",
|
745 |
+
"model.layers.7.mlps.1.gate_proj.weight": "model-00033-of-00113.safetensors",
|
746 |
+
"model.layers.7.mlps.1.up_proj.weight": "model-00033-of-00113.safetensors",
|
747 |
+
"model.layers.7.post_attention_layernorm.0.weight": "model-00033-of-00113.safetensors",
|
748 |
+
"model.layers.7.post_attention_layernorm.1.weight": "model-00033-of-00113.safetensors",
|
749 |
+
"model.layers.7.self_attn.0.kv_a_layernorm.weight": "model-00033-of-00113.safetensors",
|
750 |
+
"model.layers.7.self_attn.0.kv_a_proj_with_mqa.weight": "model-00033-of-00113.safetensors",
|
751 |
+
"model.layers.7.self_attn.0.kv_b_proj.weight": "model-00033-of-00113.safetensors",
|
752 |
+
"model.layers.7.self_attn.0.o_proj.weight": "model-00033-of-00113.safetensors",
|
753 |
+
"model.layers.7.self_attn.0.q_a_layernorm.weight": "model-00033-of-00113.safetensors",
|
754 |
+
"model.layers.7.self_attn.0.q_a_proj.weight": "model-00033-of-00113.safetensors",
|
755 |
+
"model.layers.7.self_attn.0.q_b_proj.weight": "model-00033-of-00113.safetensors",
|
756 |
+
"model.layers.7.self_attn.1.kv_a_layernorm.weight": "model-00033-of-00113.safetensors",
|
757 |
+
"model.layers.7.self_attn.1.kv_a_proj_with_mqa.weight": "model-00033-of-00113.safetensors",
|
758 |
+
"model.layers.7.self_attn.1.kv_b_proj.weight": "model-00033-of-00113.safetensors",
|
759 |
+
"model.layers.7.self_attn.1.o_proj.weight": "model-00033-of-00113.safetensors",
|
760 |
+
"model.layers.7.self_attn.1.q_a_layernorm.weight": "model-00033-of-00113.safetensors",
|
761 |
+
"model.layers.7.self_attn.1.q_a_proj.weight": "model-00033-of-00113.safetensors",
|
762 |
+
"model.layers.7.self_attn.1.q_b_proj.weight": "model-00033-of-00113.safetensors",
|
763 |
+
"model.layers.8.input_layernorm.0.weight": "model-00037-of-00113.safetensors",
|
764 |
+
"model.layers.8.input_layernorm.1.weight": "model-00037-of-00113.safetensors",
|
765 |
+
"model.layers.8.mlp.router.classifier.weight": "model-00037-of-00113.safetensors",
|
766 |
+
"model.layers.8.mlp.router.e_score_correction_bias": "model-00037-of-00113.safetensors",
|
767 |
+
"model.layers.8.mlp.switch_mlp.down_proj.weight": "model-00036-of-00113.safetensors",
|
768 |
+
"model.layers.8.mlp.switch_mlp.gate_proj.weight": "model-00034-of-00113.safetensors",
|
769 |
+
"model.layers.8.mlp.switch_mlp.up_proj.weight": "model-00035-of-00113.safetensors",
|
770 |
+
"model.layers.8.mlps.0.down_proj.weight": "model-00037-of-00113.safetensors",
|
771 |
+
"model.layers.8.mlps.0.gate_proj.weight": "model-00037-of-00113.safetensors",
|
772 |
+
"model.layers.8.mlps.0.up_proj.weight": "model-00037-of-00113.safetensors",
|
773 |
+
"model.layers.8.mlps.1.down_proj.weight": "model-00037-of-00113.safetensors",
|
774 |
+
"model.layers.8.mlps.1.gate_proj.weight": "model-00037-of-00113.safetensors",
|
775 |
+
"model.layers.8.mlps.1.up_proj.weight": "model-00037-of-00113.safetensors",
|
776 |
+
"model.layers.8.post_attention_layernorm.0.weight": "model-00037-of-00113.safetensors",
|
777 |
+
"model.layers.8.post_attention_layernorm.1.weight": "model-00037-of-00113.safetensors",
|
778 |
+
"model.layers.8.self_attn.0.kv_a_layernorm.weight": "model-00037-of-00113.safetensors",
|
779 |
+
"model.layers.8.self_attn.0.kv_a_proj_with_mqa.weight": "model-00037-of-00113.safetensors",
|
780 |
+
"model.layers.8.self_attn.0.kv_b_proj.weight": "model-00037-of-00113.safetensors",
|
781 |
+
"model.layers.8.self_attn.0.o_proj.weight": "model-00037-of-00113.safetensors",
|
782 |
+
"model.layers.8.self_attn.0.q_a_layernorm.weight": "model-00037-of-00113.safetensors",
|
783 |
+
"model.layers.8.self_attn.0.q_a_proj.weight": "model-00037-of-00113.safetensors",
|
784 |
+
"model.layers.8.self_attn.0.q_b_proj.weight": "model-00037-of-00113.safetensors",
|
785 |
+
"model.layers.8.self_attn.1.kv_a_layernorm.weight": "model-00037-of-00113.safetensors",
|
786 |
+
"model.layers.8.self_attn.1.kv_a_proj_with_mqa.weight": "model-00037-of-00113.safetensors",
|
787 |
+
"model.layers.8.self_attn.1.kv_b_proj.weight": "model-00037-of-00113.safetensors",
|
788 |
+
"model.layers.8.self_attn.1.o_proj.weight": "model-00037-of-00113.safetensors",
|
789 |
+
"model.layers.8.self_attn.1.q_a_layernorm.weight": "model-00037-of-00113.safetensors",
|
790 |
+
"model.layers.8.self_attn.1.q_a_proj.weight": "model-00037-of-00113.safetensors",
|
791 |
+
"model.layers.8.self_attn.1.q_b_proj.weight": "model-00037-of-00113.safetensors",
|
792 |
+
"model.layers.9.input_layernorm.0.weight": "model-00041-of-00113.safetensors",
|
793 |
+
"model.layers.9.input_layernorm.1.weight": "model-00041-of-00113.safetensors",
|
794 |
+
"model.layers.9.mlp.router.classifier.weight": "model-00041-of-00113.safetensors",
|
795 |
+
"model.layers.9.mlp.router.e_score_correction_bias": "model-00041-of-00113.safetensors",
|
796 |
+
"model.layers.9.mlp.switch_mlp.down_proj.weight": "model-00040-of-00113.safetensors",
|
797 |
+
"model.layers.9.mlp.switch_mlp.gate_proj.weight": "model-00038-of-00113.safetensors",
|
798 |
+
"model.layers.9.mlp.switch_mlp.up_proj.weight": "model-00039-of-00113.safetensors",
|
799 |
+
"model.layers.9.mlps.0.down_proj.weight": "model-00041-of-00113.safetensors",
|
800 |
+
"model.layers.9.mlps.0.gate_proj.weight": "model-00041-of-00113.safetensors",
|
801 |
+
"model.layers.9.mlps.0.up_proj.weight": "model-00041-of-00113.safetensors",
|
802 |
+
"model.layers.9.mlps.1.down_proj.weight": "model-00041-of-00113.safetensors",
|
803 |
+
"model.layers.9.mlps.1.gate_proj.weight": "model-00041-of-00113.safetensors",
|
804 |
+
"model.layers.9.mlps.1.up_proj.weight": "model-00041-of-00113.safetensors",
|
805 |
+
"model.layers.9.post_attention_layernorm.0.weight": "model-00041-of-00113.safetensors",
|
806 |
+
"model.layers.9.post_attention_layernorm.1.weight": "model-00041-of-00113.safetensors",
|
807 |
+
"model.layers.9.self_attn.0.kv_a_layernorm.weight": "model-00041-of-00113.safetensors",
|
808 |
+
"model.layers.9.self_attn.0.kv_a_proj_with_mqa.weight": "model-00041-of-00113.safetensors",
|
809 |
+
"model.layers.9.self_attn.0.kv_b_proj.weight": "model-00041-of-00113.safetensors",
|
810 |
+
"model.layers.9.self_attn.0.o_proj.weight": "model-00041-of-00113.safetensors",
|
811 |
+
"model.layers.9.self_attn.0.q_a_layernorm.weight": "model-00041-of-00113.safetensors",
|
812 |
+
"model.layers.9.self_attn.0.q_a_proj.weight": "model-00041-of-00113.safetensors",
|
813 |
+
"model.layers.9.self_attn.0.q_b_proj.weight": "model-00041-of-00113.safetensors",
|
814 |
+
"model.layers.9.self_attn.1.kv_a_layernorm.weight": "model-00041-of-00113.safetensors",
|
815 |
+
"model.layers.9.self_attn.1.kv_a_proj_with_mqa.weight": "model-00041-of-00113.safetensors",
|
816 |
+
"model.layers.9.self_attn.1.kv_b_proj.weight": "model-00041-of-00113.safetensors",
|
817 |
+
"model.layers.9.self_attn.1.o_proj.weight": "model-00041-of-00113.safetensors",
|
818 |
+
"model.layers.9.self_attn.1.q_a_layernorm.weight": "model-00041-of-00113.safetensors",
|
819 |
+
"model.layers.9.self_attn.1.q_a_proj.weight": "model-00041-of-00113.safetensors",
|
820 |
+
"model.layers.9.self_attn.1.q_b_proj.weight": "model-00041-of-00113.safetensors",
|
821 |
+
"model.norm.weight": "model-00113-of-00113.safetensors"
|
822 |
+
}
|
823 |
+
}
|