diff --git "a/scores/Qwen3-30B-A3B-pruned-Q6_K.md" "b/scores/Qwen3-30B-A3B-pruned-Q6_K.md" new file mode 100644--- /dev/null +++ "b/scores/Qwen3-30B-A3B-pruned-Q6_K.md" @@ -0,0 +1,1653 @@ +# Qwen3-30B-A3B-Q6_K.gguf - GGUF Internal File Dump + +- Endian: LITTLE endian + +## Key Value Metadata Store + +There are 45 key-value pairs in this file + +| POS | TYPE | Count | Key | Value | +|----:|:---------|-------:|:------------------------------------------|:--------------------------------------------------------------------| +| 1 | UINT32 | 1 | GGUF.version | 3 | +| 2 | UINT64 | 1 | GGUF.tensor_count | 555 | +| 3 | UINT64 | 1 | GGUF.kv_count | 42 | +| 4 | STRING | 1 | general.architecture | `qwen3moe` | +| 5 | STRING | 1 | general.type | `model` | +| 6 | STRING | 1 | general.name | `Qwen3 30B A3B` | +| 7 | STRING | 1 | general.basename | `Qwen3` | +| 8 | STRING | 1 | general.size_label | `30B-A3B` | +| 9 | STRING | 1 | general.license | `apache-2.0` | +| 10 | STRING | 1 | general.license.link | `https://huggingface.co/Qwen/Qwen3-30B-A3B/blob/main/LICENSE` | +| 11 | UINT32 | 1 | general.base_model.count | 1 | +| 12 | STRING | 1 | general.base_model.0.name | `Qwen3 30B A3B Base` | +| 13 | STRING | 1 | general.base_model.0.organization | `Qwen` | +| 14 | STRING | 1 | general.base_model.0.repo_url | `https://huggingface.co/Qwen/Qwen3-30B-A3B-Base` | +| 15 | [STRING] | 1 | general.tags | [ `text-generation` ] | +| 16 | UINT32 | 1 | qwen3moe.context_length | 40960 | +| 17 | UINT32 | 1 | qwen3moe.embedding_length | 2048 | +| 18 | UINT32 | 1 | qwen3moe.feed_forward_length | 6144 | +| 19 | UINT32 | 1 | qwen3moe.attention.head_count | 32 | +| 20 | UINT32 | 1 | qwen3moe.attention.head_count_kv | 4 | +| 21 | FLOAT32 | 1 | qwen3moe.rope.freq_base | 1000000.0 | +| 22 | FLOAT32 | 1 | qwen3moe.attention.layer_norm_rms_epsilon | 1e-06 | +| 23 | UINT32 | 1 | qwen3moe.expert_used_count | 8 | +| 24 | UINT32 | 1 | qwen3moe.attention.key_length | 128 | +| 25 | UINT32 | 1 | qwen3moe.attention.value_length | 128 | +| 26 | UINT32 | 1 | qwen3moe.expert_count | 128 | +| 27 | UINT32 | 1 | qwen3moe.expert_feed_forward_length | 768 | +| 28 | STRING | 1 | tokenizer.ggml.model | `gpt2` | +| 29 | STRING | 1 | tokenizer.ggml.pre | `qwen2` | +| 30 | [STRING] | 151936 | tokenizer.ggml.tokens | [ `!`, `"`, `#`, `$`, `%`, ... ] | +| 31 | [INT32] | 151936 | tokenizer.ggml.token_type | [ 1, 1, 1, 1, 1, 1, 1, ... ] | +| 32 | [STRING] | 151387 | tokenizer.ggml.merges | [ `Ġ Ġ`, `ĠĠ ĠĠ`, `i n`, `Ġ t`, `ĠĠĠĠ ĠĠĠĠ`, ... ] | +| 33 | UINT32 | 1 | tokenizer.ggml.eos_token_id | 151645 | +| 34 | UINT32 | 1 | tokenizer.ggml.padding_token_id | 151643 | +| 35 | UINT32 | 1 | tokenizer.ggml.bos_token_id | 151643 | +| 36 | BOOL | 1 | tokenizer.ggml.add_bos_token | False | +| 37 | STRING | 1 | tokenizer.chat_template | `{%- if tools %}{{- '<|im_`...`{%- endif %}{%- endif %}` | +| 38 | UINT32 | 1 | general.quantization_version | 2 | +| 39 | UINT32 | 1 | general.file_type | 18 | +| 40 | BOOL | 1 | general.pruned | True | +| 41 | UINT32 | 1 | qwen3moe.block_count | 46 | +| 42 | STRING | 1 | quantize.imatrix.file | `./imatrix/imatrix-Qwen3-30B-A3B-medium.dat` | +| 43 | STRING | 1 | quantize.imatrix.dataset | `../../datasets/imatrix/combined_all_medium.txt` | +| 44 | INT32 | 1 | quantize.imatrix.entries_count | 385 | +| 45 | INT32 | 1 | quantize.imatrix.chunks_count | 6946 | + +## Tensors Overview ~29B Elements + +Total number of elements in all tensors: 29285881344 Elements + +- [Qwen3-30B-A3B-Q6\_K.gguf - GGUF Internal File Dump](#qwen3-30b-a3b-q6_kgguf---gguf-internal-file-dump) + - [Key Value Metadata Store](#key-value-metadata-store) + - [Tensors Overview ~29B Elements](#tensors-overview-29b-elements) + - [Tensor Data Offset](#tensor-data-offset) + - [Base Tensor Group : ~622M Elements](#base-tensor-group--622m-elements) + - [Block 0 Tensor Group : ~623M Elements](#block-0-tensor-group--623m-elements) + - [Block 1 Tensor Group : ~623M Elements](#block-1-tensor-group--623m-elements) + - [Block 2 Tensor Group : ~623M Elements](#block-2-tensor-group--623m-elements) + - [Block 3 Tensor Group : ~623M Elements](#block-3-tensor-group--623m-elements) + - [Block 4 Tensor Group : ~623M Elements](#block-4-tensor-group--623m-elements) + - [Block 5 Tensor Group : ~623M Elements](#block-5-tensor-group--623m-elements) + - [Block 6 Tensor Group : ~623M Elements](#block-6-tensor-group--623m-elements) + - [Block 7 Tensor Group : ~623M Elements](#block-7-tensor-group--623m-elements) + - [Block 8 Tensor Group : ~623M Elements](#block-8-tensor-group--623m-elements) + - [Block 9 Tensor Group : ~623M Elements](#block-9-tensor-group--623m-elements) + - [Block 10 Tensor Group : ~623M Elements](#block-10-tensor-group--623m-elements) + - [Block 11 Tensor Group : ~623M Elements](#block-11-tensor-group--623m-elements) + - [Block 12 Tensor Group : ~623M Elements](#block-12-tensor-group--623m-elements) + - [Block 13 Tensor Group : ~623M Elements](#block-13-tensor-group--623m-elements) + - [Block 14 Tensor Group : ~623M Elements](#block-14-tensor-group--623m-elements) + - [Block 15 Tensor Group : ~623M Elements](#block-15-tensor-group--623m-elements) + - [Block 16 Tensor Group : ~623M Elements](#block-16-tensor-group--623m-elements) + - [Block 17 Tensor Group : ~623M Elements](#block-17-tensor-group--623m-elements) + - [Block 18 Tensor Group : ~623M Elements](#block-18-tensor-group--623m-elements) + - [Block 19 Tensor Group : ~623M Elements](#block-19-tensor-group--623m-elements) + - [Block 20 Tensor Group : ~623M Elements](#block-20-tensor-group--623m-elements) + - [Block 21 Tensor Group : ~623M Elements](#block-21-tensor-group--623m-elements) + - [Block 22 Tensor Group : ~623M Elements](#block-22-tensor-group--623m-elements) + - [Block 23 Tensor Group : ~623M Elements](#block-23-tensor-group--623m-elements) + - [Block 24 Tensor Group : ~623M Elements](#block-24-tensor-group--623m-elements) + - [Block 25 Tensor Group : ~623M Elements](#block-25-tensor-group--623m-elements) + - [Block 26 Tensor Group : ~623M Elements](#block-26-tensor-group--623m-elements) + - [Block 27 Tensor Group : ~623M Elements](#block-27-tensor-group--623m-elements) + - [Block 28 Tensor Group : ~623M Elements](#block-28-tensor-group--623m-elements) + - [Block 29 Tensor Group : ~623M Elements](#block-29-tensor-group--623m-elements) + - [Block 30 Tensor Group : ~623M Elements](#block-30-tensor-group--623m-elements) + - [Block 31 Tensor Group : ~623M Elements](#block-31-tensor-group--623m-elements) + - [Block 32 Tensor Group : ~623M Elements](#block-32-tensor-group--623m-elements) + - [Block 33 Tensor Group : ~623M Elements](#block-33-tensor-group--623m-elements) + - [Block 34 Tensor Group : ~623M Elements](#block-34-tensor-group--623m-elements) + - [Block 35 Tensor Group : ~623M Elements](#block-35-tensor-group--623m-elements) + - [Block 36 Tensor Group : ~623M Elements](#block-36-tensor-group--623m-elements) + - [Block 37 Tensor Group : ~623M Elements](#block-37-tensor-group--623m-elements) + - [Block 38 Tensor Group : ~623M Elements](#block-38-tensor-group--623m-elements) + - [Block 39 Tensor Group : ~623M Elements](#block-39-tensor-group--623m-elements) + - [Block 40 Tensor Group : ~623M Elements](#block-40-tensor-group--623m-elements) + - [Block 41 Tensor Group : ~623M Elements](#block-41-tensor-group--623m-elements) + - [Block 42 Tensor Group : ~623M Elements](#block-42-tensor-group--623m-elements) + - [Block 43 Tensor Group : ~623M Elements](#block-43-tensor-group--623m-elements) + - [Block 44 Tensor Group : ~623M Elements](#block-44-tensor-group--623m-elements) + - [Block 45 Tensor Group : ~623M Elements](#block-45-tensor-group--623m-elements) + +### Tensor Data Offset + +This table contains the offset and data segment relative to start of file + +| T_ID | Tensor Layer Name | Data Offset (B) | Data Size (B) | +|-----:|:----------------------------|-----------------:|-----------------:| +| 0 | output.weight | 0x5b12e0 | 0xf36d800 | +| 1 | output_norm.weight | 0xf91eae0 | 0x2000 | +| 2 | token_embd.weight | 0xf920ae0 | 0x7f82800 | +| 3 | blk.0.attn_k.weight | 0x178a32e0 | 0xb0000 | +| 4 | blk.0.attn_k_norm.weight | 0x179532e0 | 0x200 | +| 5 | blk.0.attn_norm.weight | 0x179534e0 | 0x2000 | +| 6 | blk.0.attn_output.weight | 0x179554e0 | 0x690000 | +| 7 | blk.0.attn_q.weight | 0x17fe54e0 | 0x580000 | +| 8 | blk.0.attn_q_norm.weight | 0x185654e0 | 0x200 | +| 9 | blk.0.attn_v.weight | 0x185656e0 | 0x110000 | +| 10 | blk.0.ffn_down_exps.weight | 0x186756e0 | 0xcc00000 | +| 11 | blk.0.ffn_gate_exps.weight | 0x252756e0 | 0x8400000 | +| 12 | blk.0.ffn_gate_inp.weight | 0x2d6756e0 | 0x100000 | +| 13 | blk.0.ffn_norm.weight | 0x2d7756e0 | 0x2000 | +| 14 | blk.0.ffn_up_exps.weight | 0x2d7776e0 | 0x8400000 | +| 15 | blk.1.attn_k.weight | 0x35b776e0 | 0xb0000 | +| 16 | blk.1.attn_k_norm.weight | 0x35c276e0 | 0x200 | +| 17 | blk.1.attn_norm.weight | 0x35c278e0 | 0x2000 | +| 18 | blk.1.attn_output.weight | 0x35c298e0 | 0x690000 | +| 19 | blk.1.attn_q.weight | 0x362b98e0 | 0x580000 | +| 20 | blk.1.attn_q_norm.weight | 0x368398e0 | 0x200 | +| 21 | blk.1.attn_v.weight | 0x36839ae0 | 0x110000 | +| 22 | blk.1.ffn_down_exps.weight | 0x36949ae0 | 0xcc00000 | +| 23 | blk.1.ffn_gate_exps.weight | 0x43549ae0 | 0x8400000 | +| 24 | blk.1.ffn_gate_inp.weight | 0x4b949ae0 | 0x100000 | +| 25 | blk.1.ffn_norm.weight | 0x4ba49ae0 | 0x2000 | +| 26 | blk.1.ffn_up_exps.weight | 0x4ba4bae0 | 0x8400000 | +| 27 | blk.2.attn_k.weight | 0x53e4bae0 | 0xb0000 | +| 28 | blk.2.attn_k_norm.weight | 0x53efbae0 | 0x200 | +| 29 | blk.2.attn_norm.weight | 0x53efbce0 | 0x2000 | +| 30 | blk.2.attn_output.weight | 0x53efdce0 | 0x690000 | +| 31 | blk.2.attn_q.weight | 0x5458dce0 | 0x580000 | +| 32 | blk.2.attn_q_norm.weight | 0x54b0dce0 | 0x200 | +| 33 | blk.2.attn_v.weight | 0x54b0dee0 | 0x110000 | +| 34 | blk.2.ffn_down_exps.weight | 0x54c1dee0 | 0xcc00000 | +| 35 | blk.2.ffn_gate_exps.weight | 0x6181dee0 | 0x8400000 | +| 36 | blk.2.ffn_gate_inp.weight | 0x69c1dee0 | 0x100000 | +| 37 | blk.2.ffn_norm.weight | 0x69d1dee0 | 0x2000 | +| 38 | blk.2.ffn_up_exps.weight | 0x69d1fee0 | 0x8400000 | +| 39 | blk.3.attn_k.weight | 0x7211fee0 | 0xb0000 | +| 40 | blk.3.attn_k_norm.weight | 0x721cfee0 | 0x200 | +| 41 | blk.3.attn_norm.weight | 0x721d00e0 | 0x2000 | +| 42 | blk.3.attn_output.weight | 0x721d20e0 | 0x690000 | +| 43 | blk.3.attn_q.weight | 0x728620e0 | 0x580000 | +| 44 | blk.3.attn_q_norm.weight | 0x72de20e0 | 0x200 | +| 45 | blk.3.attn_v.weight | 0x72de22e0 | 0x110000 | +| 46 | blk.3.ffn_down_exps.weight | 0x72ef22e0 | 0xcc00000 | +| 47 | blk.3.ffn_gate_exps.weight | 0x7faf22e0 | 0x8400000 | +| 48 | blk.3.ffn_gate_inp.weight | 0x87ef22e0 | 0x100000 | +| 49 | blk.3.ffn_norm.weight | 0x87ff22e0 | 0x2000 | +| 50 | blk.3.ffn_up_exps.weight | 0x87ff42e0 | 0x8400000 | +| 51 | blk.4.attn_k.weight | 0x903f42e0 | 0xb0000 | +| 52 | blk.4.attn_k_norm.weight | 0x904a42e0 | 0x200 | +| 53 | blk.4.attn_norm.weight | 0x904a44e0 | 0x2000 | +| 54 | blk.4.attn_output.weight | 0x904a64e0 | 0x690000 | +| 55 | blk.4.attn_q.weight | 0x90b364e0 | 0x580000 | +| 56 | blk.4.attn_q_norm.weight | 0x910b64e0 | 0x200 | +| 57 | blk.4.attn_v.weight | 0x910b66e0 | 0x110000 | +| 58 | blk.4.ffn_down_exps.weight | 0x911c66e0 | 0xcc00000 | +| 59 | blk.4.ffn_gate_exps.weight | 0x9ddc66e0 | 0x8400000 | +| 60 | blk.4.ffn_gate_inp.weight | 0xa61c66e0 | 0x100000 | +| 61 | blk.4.ffn_norm.weight | 0xa62c66e0 | 0x2000 | +| 62 | blk.4.ffn_up_exps.weight | 0xa62c86e0 | 0x8400000 | +| 63 | blk.5.attn_k.weight | 0xae6c86e0 | 0xb0000 | +| 64 | blk.5.attn_k_norm.weight | 0xae7786e0 | 0x200 | +| 65 | blk.5.attn_norm.weight | 0xae7788e0 | 0x2000 | +| 66 | blk.5.attn_output.weight | 0xae77a8e0 | 0x690000 | +| 67 | blk.5.attn_q.weight | 0xaee0a8e0 | 0x580000 | +| 68 | blk.5.attn_q_norm.weight | 0xaf38a8e0 | 0x200 | +| 69 | blk.5.attn_v.weight | 0xaf38aae0 | 0x110000 | +| 70 | blk.5.ffn_down_exps.weight | 0xaf49aae0 | 0xcc00000 | +| 71 | blk.5.ffn_gate_exps.weight | 0xbc09aae0 | 0x8400000 | +| 72 | blk.5.ffn_gate_inp.weight | 0xc449aae0 | 0x100000 | +| 73 | blk.5.ffn_norm.weight | 0xc459aae0 | 0x2000 | +| 74 | blk.5.ffn_up_exps.weight | 0xc459cae0 | 0x8400000 | +| 75 | blk.6.attn_k.weight | 0xcc99cae0 | 0xb0000 | +| 76 | blk.6.attn_k_norm.weight | 0xcca4cae0 | 0x200 | +| 77 | blk.6.attn_norm.weight | 0xcca4cce0 | 0x2000 | +| 78 | blk.6.attn_output.weight | 0xcca4ece0 | 0x690000 | +| 79 | blk.6.attn_q.weight | 0xcd0dece0 | 0x580000 | +| 80 | blk.6.attn_q_norm.weight | 0xcd65ece0 | 0x200 | +| 81 | blk.6.attn_v.weight | 0xcd65eee0 | 0x110000 | +| 82 | blk.6.ffn_down_exps.weight | 0xcd76eee0 | 0xcc00000 | +| 83 | blk.6.ffn_gate_exps.weight | 0xda36eee0 | 0x8400000 | +| 84 | blk.6.ffn_gate_inp.weight | 0xe276eee0 | 0x100000 | +| 85 | blk.6.ffn_norm.weight | 0xe286eee0 | 0x2000 | +| 86 | blk.6.ffn_up_exps.weight | 0xe2870ee0 | 0x8400000 | +| 87 | blk.7.attn_k.weight | 0xeac70ee0 | 0xb0000 | +| 88 | blk.7.attn_k_norm.weight | 0xead20ee0 | 0x200 | +| 89 | blk.7.attn_norm.weight | 0xead210e0 | 0x2000 | +| 90 | blk.7.attn_output.weight | 0xead230e0 | 0x690000 | +| 91 | blk.7.attn_q.weight | 0xeb3b30e0 | 0x580000 | +| 92 | blk.7.attn_q_norm.weight | 0xeb9330e0 | 0x200 | +| 93 | blk.7.attn_v.weight | 0xeb9332e0 | 0x110000 | +| 94 | blk.7.ffn_down_exps.weight | 0xeba432e0 | 0xcc00000 | +| 95 | blk.7.ffn_gate_exps.weight | 0xf86432e0 | 0x8400000 | +| 96 | blk.7.ffn_gate_inp.weight | 0x100a432e0 | 0x100000 | +| 97 | blk.7.ffn_norm.weight | 0x100b432e0 | 0x2000 | +| 98 | blk.7.ffn_up_exps.weight | 0x100b452e0 | 0x8400000 | +| 99 | blk.8.attn_k.weight | 0x108f452e0 | 0xb0000 | +| 100 | blk.8.attn_k_norm.weight | 0x108ff52e0 | 0x200 | +| 101 | blk.8.attn_norm.weight | 0x108ff54e0 | 0x2000 | +| 102 | blk.8.attn_output.weight | 0x108ff74e0 | 0x690000 | +| 103 | blk.8.attn_q.weight | 0x1096874e0 | 0x580000 | +| 104 | blk.8.attn_q_norm.weight | 0x109c074e0 | 0x200 | +| 105 | blk.8.attn_v.weight | 0x109c076e0 | 0x110000 | +| 106 | blk.8.ffn_down_exps.weight | 0x109d176e0 | 0xcc00000 | +| 107 | blk.8.ffn_gate_exps.weight | 0x1169176e0 | 0x8400000 | +| 108 | blk.8.ffn_gate_inp.weight | 0x11ed176e0 | 0x100000 | +| 109 | blk.8.ffn_norm.weight | 0x11ee176e0 | 0x2000 | +| 110 | blk.8.ffn_up_exps.weight | 0x11ee196e0 | 0x8400000 | +| 111 | blk.9.attn_k.weight | 0x1272196e0 | 0xb0000 | +| 112 | blk.9.attn_k_norm.weight | 0x1272c96e0 | 0x200 | +| 113 | blk.9.attn_norm.weight | 0x1272c98e0 | 0x2000 | +| 114 | blk.9.attn_output.weight | 0x1272cb8e0 | 0x690000 | +| 115 | blk.9.attn_q.weight | 0x12795b8e0 | 0x580000 | +| 116 | blk.9.attn_q_norm.weight | 0x127edb8e0 | 0x200 | +| 117 | blk.9.attn_v.weight | 0x127edbae0 | 0x110000 | +| 118 | blk.9.ffn_down_exps.weight | 0x127febae0 | 0xcc00000 | +| 119 | blk.9.ffn_gate_exps.weight | 0x134bebae0 | 0x8400000 | +| 120 | blk.9.ffn_gate_inp.weight | 0x13cfebae0 | 0x100000 | +| 121 | blk.9.ffn_norm.weight | 0x13d0ebae0 | 0x2000 | +| 122 | blk.9.ffn_up_exps.weight | 0x13d0edae0 | 0x8400000 | +| 123 | blk.10.attn_k.weight | 0x1454edae0 | 0xb0000 | +| 124 | blk.10.attn_k_norm.weight | 0x14559dae0 | 0x200 | +| 125 | blk.10.attn_norm.weight | 0x14559dce0 | 0x2000 | +| 126 | blk.10.attn_output.weight | 0x14559fce0 | 0x690000 | +| 127 | blk.10.attn_q.weight | 0x145c2fce0 | 0x580000 | +| 128 | blk.10.attn_q_norm.weight | 0x1461afce0 | 0x200 | +| 129 | blk.10.attn_v.weight | 0x1461afee0 | 0x110000 | +| 130 | blk.10.ffn_down_exps.weight | 0x1462bfee0 | 0xcc00000 | +| 131 | blk.10.ffn_gate_exps.weight | 0x152ebfee0 | 0x8400000 | +| 132 | blk.10.ffn_gate_inp.weight | 0x15b2bfee0 | 0x100000 | +| 133 | blk.10.ffn_norm.weight | 0x15b3bfee0 | 0x2000 | +| 134 | blk.10.ffn_up_exps.weight | 0x15b3c1ee0 | 0x8400000 | +| 135 | blk.11.attn_k.weight | 0x1637c1ee0 | 0xb0000 | +| 136 | blk.11.attn_k_norm.weight | 0x163871ee0 | 0x200 | +| 137 | blk.11.attn_norm.weight | 0x1638720e0 | 0x2000 | +| 138 | blk.11.attn_output.weight | 0x1638740e0 | 0x690000 | +| 139 | blk.11.attn_q.weight | 0x163f040e0 | 0x580000 | +| 140 | blk.11.attn_q_norm.weight | 0x1644840e0 | 0x200 | +| 141 | blk.11.attn_v.weight | 0x1644842e0 | 0x110000 | +| 142 | blk.11.ffn_down_exps.weight | 0x1645942e0 | 0xcc00000 | +| 143 | blk.11.ffn_gate_exps.weight | 0x1711942e0 | 0x8400000 | +| 144 | blk.11.ffn_gate_inp.weight | 0x1795942e0 | 0x100000 | +| 145 | blk.11.ffn_norm.weight | 0x1796942e0 | 0x2000 | +| 146 | blk.11.ffn_up_exps.weight | 0x1796962e0 | 0x8400000 | +| 147 | blk.12.attn_k.weight | 0x181a962e0 | 0xb0000 | +| 148 | blk.12.attn_k_norm.weight | 0x181b462e0 | 0x200 | +| 149 | blk.12.attn_norm.weight | 0x181b464e0 | 0x2000 | +| 150 | blk.12.attn_output.weight | 0x181b484e0 | 0x690000 | +| 151 | blk.12.attn_q.weight | 0x1821d84e0 | 0x580000 | +| 152 | blk.12.attn_q_norm.weight | 0x1827584e0 | 0x200 | +| 153 | blk.12.attn_v.weight | 0x1827586e0 | 0x110000 | +| 154 | blk.12.ffn_down_exps.weight | 0x1828686e0 | 0xcc00000 | +| 155 | blk.12.ffn_gate_exps.weight | 0x18f4686e0 | 0x8400000 | +| 156 | blk.12.ffn_gate_inp.weight | 0x1978686e0 | 0x100000 | +| 157 | blk.12.ffn_norm.weight | 0x1979686e0 | 0x2000 | +| 158 | blk.12.ffn_up_exps.weight | 0x19796a6e0 | 0x8400000 | +| 159 | blk.13.attn_k.weight | 0x19fd6a6e0 | 0xb0000 | +| 160 | blk.13.attn_k_norm.weight | 0x19fe1a6e0 | 0x200 | +| 161 | blk.13.attn_norm.weight | 0x19fe1a8e0 | 0x2000 | +| 162 | blk.13.attn_output.weight | 0x19fe1c8e0 | 0x690000 | +| 163 | blk.13.attn_q.weight | 0x1a04ac8e0 | 0x580000 | +| 164 | blk.13.attn_q_norm.weight | 0x1a0a2c8e0 | 0x200 | +| 165 | blk.13.attn_v.weight | 0x1a0a2cae0 | 0x110000 | +| 166 | blk.13.ffn_down_exps.weight | 0x1a0b3cae0 | 0xcc00000 | +| 167 | blk.13.ffn_gate_exps.weight | 0x1ad73cae0 | 0x8400000 | +| 168 | blk.13.ffn_gate_inp.weight | 0x1b5b3cae0 | 0x100000 | +| 169 | blk.13.ffn_norm.weight | 0x1b5c3cae0 | 0x2000 | +| 170 | blk.13.ffn_up_exps.weight | 0x1b5c3eae0 | 0x8400000 | +| 171 | blk.14.attn_k.weight | 0x1be03eae0 | 0xb0000 | +| 172 | blk.14.attn_k_norm.weight | 0x1be0eeae0 | 0x200 | +| 173 | blk.14.attn_norm.weight | 0x1be0eece0 | 0x2000 | +| 174 | blk.14.attn_output.weight | 0x1be0f0ce0 | 0x690000 | +| 175 | blk.14.attn_q.weight | 0x1be780ce0 | 0x580000 | +| 176 | blk.14.attn_q_norm.weight | 0x1bed00ce0 | 0x200 | +| 177 | blk.14.attn_v.weight | 0x1bed00ee0 | 0x110000 | +| 178 | blk.14.ffn_down_exps.weight | 0x1bee10ee0 | 0xcc00000 | +| 179 | blk.14.ffn_gate_exps.weight | 0x1cba10ee0 | 0x8400000 | +| 180 | blk.14.ffn_gate_inp.weight | 0x1d3e10ee0 | 0x100000 | +| 181 | blk.14.ffn_norm.weight | 0x1d3f10ee0 | 0x2000 | +| 182 | blk.14.ffn_up_exps.weight | 0x1d3f12ee0 | 0x8400000 | +| 183 | blk.15.attn_k.weight | 0x1dc312ee0 | 0xb0000 | +| 184 | blk.15.attn_k_norm.weight | 0x1dc3c2ee0 | 0x200 | +| 185 | blk.15.attn_norm.weight | 0x1dc3c30e0 | 0x2000 | +| 186 | blk.15.attn_output.weight | 0x1dc3c50e0 | 0x690000 | +| 187 | blk.15.attn_q.weight | 0x1dca550e0 | 0x580000 | +| 188 | blk.15.attn_q_norm.weight | 0x1dcfd50e0 | 0x200 | +| 189 | blk.15.attn_v.weight | 0x1dcfd52e0 | 0x110000 | +| 190 | blk.15.ffn_down_exps.weight | 0x1dd0e52e0 | 0xcc00000 | +| 191 | blk.15.ffn_gate_exps.weight | 0x1e9ce52e0 | 0x8400000 | +| 192 | blk.15.ffn_gate_inp.weight | 0x1f20e52e0 | 0x100000 | +| 193 | blk.15.ffn_norm.weight | 0x1f21e52e0 | 0x2000 | +| 194 | blk.15.ffn_up_exps.weight | 0x1f21e72e0 | 0x8400000 | +| 195 | blk.16.attn_k.weight | 0x1fa5e72e0 | 0xb0000 | +| 196 | blk.16.attn_k_norm.weight | 0x1fa6972e0 | 0x200 | +| 197 | blk.16.attn_norm.weight | 0x1fa6974e0 | 0x2000 | +| 198 | blk.16.attn_output.weight | 0x1fa6994e0 | 0x690000 | +| 199 | blk.16.attn_q.weight | 0x1fad294e0 | 0x580000 | +| 200 | blk.16.attn_q_norm.weight | 0x1fb2a94e0 | 0x200 | +| 201 | blk.16.attn_v.weight | 0x1fb2a96e0 | 0x110000 | +| 202 | blk.16.ffn_down_exps.weight | 0x1fb3b96e0 | 0xcc00000 | +| 203 | blk.16.ffn_gate_exps.weight | 0x207fb96e0 | 0x8400000 | +| 204 | blk.16.ffn_gate_inp.weight | 0x2103b96e0 | 0x100000 | +| 205 | blk.16.ffn_norm.weight | 0x2104b96e0 | 0x2000 | +| 206 | blk.16.ffn_up_exps.weight | 0x2104bb6e0 | 0x8400000 | +| 207 | blk.17.attn_k.weight | 0x2188bb6e0 | 0xb0000 | +| 208 | blk.17.attn_k_norm.weight | 0x21896b6e0 | 0x200 | +| 209 | blk.17.attn_norm.weight | 0x21896b8e0 | 0x2000 | +| 210 | blk.17.attn_output.weight | 0x21896d8e0 | 0x690000 | +| 211 | blk.17.attn_q.weight | 0x218ffd8e0 | 0x580000 | +| 212 | blk.17.attn_q_norm.weight | 0x21957d8e0 | 0x200 | +| 213 | blk.17.attn_v.weight | 0x21957dae0 | 0x110000 | +| 214 | blk.17.ffn_down_exps.weight | 0x21968dae0 | 0xcc00000 | +| 215 | blk.17.ffn_gate_exps.weight | 0x22628dae0 | 0x8400000 | +| 216 | blk.17.ffn_gate_inp.weight | 0x22e68dae0 | 0x100000 | +| 217 | blk.17.ffn_norm.weight | 0x22e78dae0 | 0x2000 | +| 218 | blk.17.ffn_up_exps.weight | 0x22e78fae0 | 0x8400000 | +| 219 | blk.18.attn_k.weight | 0x236b8fae0 | 0xb0000 | +| 220 | blk.18.attn_k_norm.weight | 0x236c3fae0 | 0x200 | +| 221 | blk.18.attn_norm.weight | 0x236c3fce0 | 0x2000 | +| 222 | blk.18.attn_output.weight | 0x236c41ce0 | 0x690000 | +| 223 | blk.18.attn_q.weight | 0x2372d1ce0 | 0x580000 | +| 224 | blk.18.attn_q_norm.weight | 0x237851ce0 | 0x200 | +| 225 | blk.18.attn_v.weight | 0x237851ee0 | 0x110000 | +| 226 | blk.18.ffn_down_exps.weight | 0x237961ee0 | 0xcc00000 | +| 227 | blk.18.ffn_gate_exps.weight | 0x244561ee0 | 0x8400000 | +| 228 | blk.18.ffn_gate_inp.weight | 0x24c961ee0 | 0x100000 | +| 229 | blk.18.ffn_norm.weight | 0x24ca61ee0 | 0x2000 | +| 230 | blk.18.ffn_up_exps.weight | 0x24ca63ee0 | 0x8400000 | +| 231 | blk.19.attn_k.weight | 0x254e63ee0 | 0xb0000 | +| 232 | blk.19.attn_k_norm.weight | 0x254f13ee0 | 0x200 | +| 233 | blk.19.attn_norm.weight | 0x254f140e0 | 0x2000 | +| 234 | blk.19.attn_output.weight | 0x254f160e0 | 0x690000 | +| 235 | blk.19.attn_q.weight | 0x2555a60e0 | 0x580000 | +| 236 | blk.19.attn_q_norm.weight | 0x255b260e0 | 0x200 | +| 237 | blk.19.attn_v.weight | 0x255b262e0 | 0x110000 | +| 238 | blk.19.ffn_down_exps.weight | 0x255c362e0 | 0xcc00000 | +| 239 | blk.19.ffn_gate_exps.weight | 0x2628362e0 | 0x8400000 | +| 240 | blk.19.ffn_gate_inp.weight | 0x26ac362e0 | 0x100000 | +| 241 | blk.19.ffn_norm.weight | 0x26ad362e0 | 0x2000 | +| 242 | blk.19.ffn_up_exps.weight | 0x26ad382e0 | 0x8400000 | +| 243 | blk.20.attn_k.weight | 0x2731382e0 | 0xb0000 | +| 244 | blk.20.attn_k_norm.weight | 0x2731e82e0 | 0x200 | +| 245 | blk.20.attn_norm.weight | 0x2731e84e0 | 0x2000 | +| 246 | blk.20.attn_output.weight | 0x2731ea4e0 | 0x690000 | +| 247 | blk.20.attn_q.weight | 0x27387a4e0 | 0x580000 | +| 248 | blk.20.attn_q_norm.weight | 0x273dfa4e0 | 0x200 | +| 249 | blk.20.attn_v.weight | 0x273dfa6e0 | 0x110000 | +| 250 | blk.20.ffn_down_exps.weight | 0x273f0a6e0 | 0xcc00000 | +| 251 | blk.20.ffn_gate_exps.weight | 0x280b0a6e0 | 0x8400000 | +| 252 | blk.20.ffn_gate_inp.weight | 0x288f0a6e0 | 0x100000 | +| 253 | blk.20.ffn_norm.weight | 0x28900a6e0 | 0x2000 | +| 254 | blk.20.ffn_up_exps.weight | 0x28900c6e0 | 0x8400000 | +| 255 | blk.21.attn_k.weight | 0x29140c6e0 | 0xb0000 | +| 256 | blk.21.attn_k_norm.weight | 0x2914bc6e0 | 0x200 | +| 257 | blk.21.attn_norm.weight | 0x2914bc8e0 | 0x2000 | +| 258 | blk.21.attn_output.weight | 0x2914be8e0 | 0x690000 | +| 259 | blk.21.attn_q.weight | 0x291b4e8e0 | 0x580000 | +| 260 | blk.21.attn_q_norm.weight | 0x2920ce8e0 | 0x200 | +| 261 | blk.21.attn_v.weight | 0x2920ceae0 | 0x110000 | +| 262 | blk.21.ffn_down_exps.weight | 0x2921deae0 | 0xcc00000 | +| 263 | blk.21.ffn_gate_exps.weight | 0x29eddeae0 | 0x8400000 | +| 264 | blk.21.ffn_gate_inp.weight | 0x2a71deae0 | 0x100000 | +| 265 | blk.21.ffn_norm.weight | 0x2a72deae0 | 0x2000 | +| 266 | blk.21.ffn_up_exps.weight | 0x2a72e0ae0 | 0x8400000 | +| 267 | blk.22.attn_k.weight | 0x2af6e0ae0 | 0xb0000 | +| 268 | blk.22.attn_k_norm.weight | 0x2af790ae0 | 0x200 | +| 269 | blk.22.attn_norm.weight | 0x2af790ce0 | 0x2000 | +| 270 | blk.22.attn_output.weight | 0x2af792ce0 | 0x690000 | +| 271 | blk.22.attn_q.weight | 0x2afe22ce0 | 0x580000 | +| 272 | blk.22.attn_q_norm.weight | 0x2b03a2ce0 | 0x200 | +| 273 | blk.22.attn_v.weight | 0x2b03a2ee0 | 0x110000 | +| 274 | blk.22.ffn_down_exps.weight | 0x2b04b2ee0 | 0xcc00000 | +| 275 | blk.22.ffn_gate_exps.weight | 0x2bd0b2ee0 | 0x8400000 | +| 276 | blk.22.ffn_gate_inp.weight | 0x2c54b2ee0 | 0x100000 | +| 277 | blk.22.ffn_norm.weight | 0x2c55b2ee0 | 0x2000 | +| 278 | blk.22.ffn_up_exps.weight | 0x2c55b4ee0 | 0x8400000 | +| 279 | blk.23.attn_k.weight | 0x2cd9b4ee0 | 0xb0000 | +| 280 | blk.23.attn_k_norm.weight | 0x2cda64ee0 | 0x200 | +| 281 | blk.23.attn_norm.weight | 0x2cda650e0 | 0x2000 | +| 282 | blk.23.attn_output.weight | 0x2cda670e0 | 0x690000 | +| 283 | blk.23.attn_q.weight | 0x2ce0f70e0 | 0x580000 | +| 284 | blk.23.attn_q_norm.weight | 0x2ce6770e0 | 0x200 | +| 285 | blk.23.attn_v.weight | 0x2ce6772e0 | 0x110000 | +| 286 | blk.23.ffn_down_exps.weight | 0x2ce7872e0 | 0xcc00000 | +| 287 | blk.23.ffn_gate_exps.weight | 0x2db3872e0 | 0x8400000 | +| 288 | blk.23.ffn_gate_inp.weight | 0x2e37872e0 | 0x100000 | +| 289 | blk.23.ffn_norm.weight | 0x2e38872e0 | 0x2000 | +| 290 | blk.23.ffn_up_exps.weight | 0x2e38892e0 | 0x8400000 | +| 291 | blk.24.attn_k.weight | 0x2ebc892e0 | 0xb0000 | +| 292 | blk.24.attn_k_norm.weight | 0x2ebd392e0 | 0x200 | +| 293 | blk.24.attn_norm.weight | 0x2ebd394e0 | 0x2000 | +| 294 | blk.24.attn_output.weight | 0x2ebd3b4e0 | 0x690000 | +| 295 | blk.24.attn_q.weight | 0x2ec3cb4e0 | 0x580000 | +| 296 | blk.24.attn_q_norm.weight | 0x2ec94b4e0 | 0x200 | +| 297 | blk.24.attn_v.weight | 0x2ec94b6e0 | 0x110000 | +| 298 | blk.24.ffn_down_exps.weight | 0x2eca5b6e0 | 0xcc00000 | +| 299 | blk.24.ffn_gate_exps.weight | 0x2f965b6e0 | 0x8400000 | +| 300 | blk.24.ffn_gate_inp.weight | 0x301a5b6e0 | 0x100000 | +| 301 | blk.24.ffn_norm.weight | 0x301b5b6e0 | 0x2000 | +| 302 | blk.24.ffn_up_exps.weight | 0x301b5d6e0 | 0x8400000 | +| 303 | blk.25.attn_k.weight | 0x309f5d6e0 | 0xb0000 | +| 304 | blk.25.attn_k_norm.weight | 0x30a00d6e0 | 0x200 | +| 305 | blk.25.attn_norm.weight | 0x30a00d8e0 | 0x2000 | +| 306 | blk.25.attn_output.weight | 0x30a00f8e0 | 0x690000 | +| 307 | blk.25.attn_q.weight | 0x30a69f8e0 | 0x580000 | +| 308 | blk.25.attn_q_norm.weight | 0x30ac1f8e0 | 0x200 | +| 309 | blk.25.attn_v.weight | 0x30ac1fae0 | 0x110000 | +| 310 | blk.25.ffn_down_exps.weight | 0x30ad2fae0 | 0xcc00000 | +| 311 | blk.25.ffn_gate_exps.weight | 0x31792fae0 | 0x8400000 | +| 312 | blk.25.ffn_gate_inp.weight | 0x31fd2fae0 | 0x100000 | +| 313 | blk.25.ffn_norm.weight | 0x31fe2fae0 | 0x2000 | +| 314 | blk.25.ffn_up_exps.weight | 0x31fe31ae0 | 0x8400000 | +| 315 | blk.26.attn_k.weight | 0x328231ae0 | 0xb0000 | +| 316 | blk.26.attn_k_norm.weight | 0x3282e1ae0 | 0x200 | +| 317 | blk.26.attn_norm.weight | 0x3282e1ce0 | 0x2000 | +| 318 | blk.26.attn_output.weight | 0x3282e3ce0 | 0x690000 | +| 319 | blk.26.attn_q.weight | 0x328973ce0 | 0x580000 | +| 320 | blk.26.attn_q_norm.weight | 0x328ef3ce0 | 0x200 | +| 321 | blk.26.attn_v.weight | 0x328ef3ee0 | 0x110000 | +| 322 | blk.26.ffn_down_exps.weight | 0x329003ee0 | 0xcc00000 | +| 323 | blk.26.ffn_gate_exps.weight | 0x335c03ee0 | 0x8400000 | +| 324 | blk.26.ffn_gate_inp.weight | 0x33e003ee0 | 0x100000 | +| 325 | blk.26.ffn_norm.weight | 0x33e103ee0 | 0x2000 | +| 326 | blk.26.ffn_up_exps.weight | 0x33e105ee0 | 0x8400000 | +| 327 | blk.27.attn_k.weight | 0x346505ee0 | 0xb0000 | +| 328 | blk.27.attn_k_norm.weight | 0x3465b5ee0 | 0x200 | +| 329 | blk.27.attn_norm.weight | 0x3465b60e0 | 0x2000 | +| 330 | blk.27.attn_output.weight | 0x3465b80e0 | 0x690000 | +| 331 | blk.27.attn_q.weight | 0x346c480e0 | 0x580000 | +| 332 | blk.27.attn_q_norm.weight | 0x3471c80e0 | 0x200 | +| 333 | blk.27.attn_v.weight | 0x3471c82e0 | 0x110000 | +| 334 | blk.27.ffn_down_exps.weight | 0x3472d82e0 | 0xcc00000 | +| 335 | blk.27.ffn_gate_exps.weight | 0x353ed82e0 | 0x8400000 | +| 336 | blk.27.ffn_gate_inp.weight | 0x35c2d82e0 | 0x100000 | +| 337 | blk.27.ffn_norm.weight | 0x35c3d82e0 | 0x2000 | +| 338 | blk.27.ffn_up_exps.weight | 0x35c3da2e0 | 0x8400000 | +| 339 | blk.28.attn_k.weight | 0x3647da2e0 | 0xb0000 | +| 340 | blk.28.attn_k_norm.weight | 0x36488a2e0 | 0x200 | +| 341 | blk.28.attn_norm.weight | 0x36488a4e0 | 0x2000 | +| 342 | blk.28.attn_output.weight | 0x36488c4e0 | 0x690000 | +| 343 | blk.28.attn_q.weight | 0x364f1c4e0 | 0x580000 | +| 344 | blk.28.attn_q_norm.weight | 0x36549c4e0 | 0x200 | +| 345 | blk.28.attn_v.weight | 0x36549c6e0 | 0x110000 | +| 346 | blk.28.ffn_down_exps.weight | 0x3655ac6e0 | 0xcc00000 | +| 347 | blk.28.ffn_gate_exps.weight | 0x3721ac6e0 | 0x8400000 | +| 348 | blk.28.ffn_gate_inp.weight | 0x37a5ac6e0 | 0x100000 | +| 349 | blk.28.ffn_norm.weight | 0x37a6ac6e0 | 0x2000 | +| 350 | blk.28.ffn_up_exps.weight | 0x37a6ae6e0 | 0x8400000 | +| 351 | blk.29.attn_k.weight | 0x382aae6e0 | 0xb0000 | +| 352 | blk.29.attn_k_norm.weight | 0x382b5e6e0 | 0x200 | +| 353 | blk.29.attn_norm.weight | 0x382b5e8e0 | 0x2000 | +| 354 | blk.29.attn_output.weight | 0x382b608e0 | 0x690000 | +| 355 | blk.29.attn_q.weight | 0x3831f08e0 | 0x580000 | +| 356 | blk.29.attn_q_norm.weight | 0x3837708e0 | 0x200 | +| 357 | blk.29.attn_v.weight | 0x383770ae0 | 0x110000 | +| 358 | blk.29.ffn_down_exps.weight | 0x383880ae0 | 0xcc00000 | +| 359 | blk.29.ffn_gate_exps.weight | 0x390480ae0 | 0x8400000 | +| 360 | blk.29.ffn_gate_inp.weight | 0x398880ae0 | 0x100000 | +| 361 | blk.29.ffn_norm.weight | 0x398980ae0 | 0x2000 | +| 362 | blk.29.ffn_up_exps.weight | 0x398982ae0 | 0x8400000 | +| 363 | blk.30.attn_k.weight | 0x3a0d82ae0 | 0xb0000 | +| 364 | blk.30.attn_k_norm.weight | 0x3a0e32ae0 | 0x200 | +| 365 | blk.30.attn_norm.weight | 0x3a0e32ce0 | 0x2000 | +| 366 | blk.30.attn_output.weight | 0x3a0e34ce0 | 0x690000 | +| 367 | blk.30.attn_q.weight | 0x3a14c4ce0 | 0x580000 | +| 368 | blk.30.attn_q_norm.weight | 0x3a1a44ce0 | 0x200 | +| 369 | blk.30.attn_v.weight | 0x3a1a44ee0 | 0x110000 | +| 370 | blk.30.ffn_down_exps.weight | 0x3a1b54ee0 | 0xcc00000 | +| 371 | blk.30.ffn_gate_exps.weight | 0x3ae754ee0 | 0x8400000 | +| 372 | blk.30.ffn_gate_inp.weight | 0x3b6b54ee0 | 0x100000 | +| 373 | blk.30.ffn_norm.weight | 0x3b6c54ee0 | 0x2000 | +| 374 | blk.30.ffn_up_exps.weight | 0x3b6c56ee0 | 0x8400000 | +| 375 | blk.31.attn_k.weight | 0x3bf056ee0 | 0xb0000 | +| 376 | blk.31.attn_k_norm.weight | 0x3bf106ee0 | 0x200 | +| 377 | blk.31.attn_norm.weight | 0x3bf1070e0 | 0x2000 | +| 378 | blk.31.attn_output.weight | 0x3bf1090e0 | 0x690000 | +| 379 | blk.31.attn_q.weight | 0x3bf7990e0 | 0x580000 | +| 380 | blk.31.attn_q_norm.weight | 0x3bfd190e0 | 0x200 | +| 381 | blk.31.attn_v.weight | 0x3bfd192e0 | 0x110000 | +| 382 | blk.31.ffn_down_exps.weight | 0x3bfe292e0 | 0xcc00000 | +| 383 | blk.31.ffn_gate_exps.weight | 0x3cca292e0 | 0x8400000 | +| 384 | blk.31.ffn_gate_inp.weight | 0x3d4e292e0 | 0x100000 | +| 385 | blk.31.ffn_norm.weight | 0x3d4f292e0 | 0x2000 | +| 386 | blk.31.ffn_up_exps.weight | 0x3d4f2b2e0 | 0x8400000 | +| 387 | blk.32.attn_k.weight | 0x3dd32b2e0 | 0xb0000 | +| 388 | blk.32.attn_k_norm.weight | 0x3dd3db2e0 | 0x200 | +| 389 | blk.32.attn_norm.weight | 0x3dd3db4e0 | 0x2000 | +| 390 | blk.32.attn_output.weight | 0x3dd3dd4e0 | 0x690000 | +| 391 | blk.32.attn_q.weight | 0x3dda6d4e0 | 0x580000 | +| 392 | blk.32.attn_q_norm.weight | 0x3ddfed4e0 | 0x200 | +| 393 | blk.32.attn_v.weight | 0x3ddfed6e0 | 0x110000 | +| 394 | blk.32.ffn_down_exps.weight | 0x3de0fd6e0 | 0xcc00000 | +| 395 | blk.32.ffn_gate_exps.weight | 0x3eacfd6e0 | 0x8400000 | +| 396 | blk.32.ffn_gate_inp.weight | 0x3f30fd6e0 | 0x100000 | +| 397 | blk.32.ffn_norm.weight | 0x3f31fd6e0 | 0x2000 | +| 398 | blk.32.ffn_up_exps.weight | 0x3f31ff6e0 | 0x8400000 | +| 399 | blk.33.attn_k.weight | 0x3fb5ff6e0 | 0xb0000 | +| 400 | blk.33.attn_k_norm.weight | 0x3fb6af6e0 | 0x200 | +| 401 | blk.33.attn_norm.weight | 0x3fb6af8e0 | 0x2000 | +| 402 | blk.33.attn_output.weight | 0x3fb6b18e0 | 0x690000 | +| 403 | blk.33.attn_q.weight | 0x3fbd418e0 | 0x580000 | +| 404 | blk.33.attn_q_norm.weight | 0x3fc2c18e0 | 0x200 | +| 405 | blk.33.attn_v.weight | 0x3fc2c1ae0 | 0x110000 | +| 406 | blk.33.ffn_down_exps.weight | 0x3fc3d1ae0 | 0xcc00000 | +| 407 | blk.33.ffn_gate_exps.weight | 0x408fd1ae0 | 0x8400000 | +| 408 | blk.33.ffn_gate_inp.weight | 0x4113d1ae0 | 0x100000 | +| 409 | blk.33.ffn_norm.weight | 0x4114d1ae0 | 0x2000 | +| 410 | blk.33.ffn_up_exps.weight | 0x4114d3ae0 | 0x8400000 | +| 411 | blk.34.attn_k.weight | 0x4198d3ae0 | 0xb0000 | +| 412 | blk.34.attn_k_norm.weight | 0x419983ae0 | 0x200 | +| 413 | blk.34.attn_norm.weight | 0x419983ce0 | 0x2000 | +| 414 | blk.34.attn_output.weight | 0x419985ce0 | 0x690000 | +| 415 | blk.34.attn_q.weight | 0x41a015ce0 | 0x580000 | +| 416 | blk.34.attn_q_norm.weight | 0x41a595ce0 | 0x200 | +| 417 | blk.34.attn_v.weight | 0x41a595ee0 | 0x110000 | +| 418 | blk.34.ffn_down_exps.weight | 0x41a6a5ee0 | 0xcc00000 | +| 419 | blk.34.ffn_gate_exps.weight | 0x4272a5ee0 | 0x8400000 | +| 420 | blk.34.ffn_gate_inp.weight | 0x42f6a5ee0 | 0x100000 | +| 421 | blk.34.ffn_norm.weight | 0x42f7a5ee0 | 0x2000 | +| 422 | blk.34.ffn_up_exps.weight | 0x42f7a7ee0 | 0x8400000 | +| 423 | blk.35.attn_k.weight | 0x437ba7ee0 | 0xb0000 | +| 424 | blk.35.attn_k_norm.weight | 0x437c57ee0 | 0x200 | +| 425 | blk.35.attn_norm.weight | 0x437c580e0 | 0x2000 | +| 426 | blk.35.attn_output.weight | 0x437c5a0e0 | 0x690000 | +| 427 | blk.35.attn_q.weight | 0x4382ea0e0 | 0x580000 | +| 428 | blk.35.attn_q_norm.weight | 0x43886a0e0 | 0x200 | +| 429 | blk.35.attn_v.weight | 0x43886a2e0 | 0x110000 | +| 430 | blk.35.ffn_down_exps.weight | 0x43897a2e0 | 0xcc00000 | +| 431 | blk.35.ffn_gate_exps.weight | 0x44557a2e0 | 0x8400000 | +| 432 | blk.35.ffn_gate_inp.weight | 0x44d97a2e0 | 0x100000 | +| 433 | blk.35.ffn_norm.weight | 0x44da7a2e0 | 0x2000 | +| 434 | blk.35.ffn_up_exps.weight | 0x44da7c2e0 | 0x8400000 | +| 435 | blk.36.attn_k.weight | 0x455e7c2e0 | 0xd2000 | +| 436 | blk.36.attn_k_norm.weight | 0x455f4e2e0 | 0x200 | +| 437 | blk.36.attn_norm.weight | 0x455f4e4e0 | 0x2000 | +| 438 | blk.36.attn_output.weight | 0x455f504e0 | 0x690000 | +| 439 | blk.36.attn_q.weight | 0x4565e04e0 | 0x690000 | +| 440 | blk.36.attn_q_norm.weight | 0x456c704e0 | 0x200 | +| 441 | blk.36.attn_v.weight | 0x456c706e0 | 0x110000 | +| 442 | blk.36.ffn_down_exps.weight | 0x456d806e0 | 0xcc00000 | +| 443 | blk.36.ffn_gate_exps.weight | 0x4639806e0 | 0x9d80000 | +| 444 | blk.36.ffn_gate_inp.weight | 0x46d7006e0 | 0x100000 | +| 445 | blk.36.ffn_norm.weight | 0x46d8006e0 | 0x2000 | +| 446 | blk.36.ffn_up_exps.weight | 0x46d8026e0 | 0x9d80000 | +| 447 | blk.37.attn_k.weight | 0x4775826e0 | 0xd2000 | +| 448 | blk.37.attn_k_norm.weight | 0x4776546e0 | 0x200 | +| 449 | blk.37.attn_norm.weight | 0x4776548e0 | 0x2000 | +| 450 | blk.37.attn_output.weight | 0x4776568e0 | 0x690000 | +| 451 | blk.37.attn_q.weight | 0x477ce68e0 | 0x690000 | +| 452 | blk.37.attn_q_norm.weight | 0x4783768e0 | 0x200 | +| 453 | blk.37.attn_v.weight | 0x478376ae0 | 0x110000 | +| 454 | blk.37.ffn_down_exps.weight | 0x478486ae0 | 0xcc00000 | +| 455 | blk.37.ffn_gate_exps.weight | 0x485086ae0 | 0x9d80000 | +| 456 | blk.37.ffn_gate_inp.weight | 0x48ee06ae0 | 0x100000 | +| 457 | blk.37.ffn_norm.weight | 0x48ef06ae0 | 0x2000 | +| 458 | blk.37.ffn_up_exps.weight | 0x48ef08ae0 | 0x9d80000 | +| 459 | blk.38.attn_k.weight | 0x498c88ae0 | 0xd2000 | +| 460 | blk.38.attn_k_norm.weight | 0x498d5aae0 | 0x200 | +| 461 | blk.38.attn_norm.weight | 0x498d5ace0 | 0x2000 | +| 462 | blk.38.attn_output.weight | 0x498d5cce0 | 0x690000 | +| 463 | blk.38.attn_q.weight | 0x4993ecce0 | 0x690000 | +| 464 | blk.38.attn_q_norm.weight | 0x499a7cce0 | 0x200 | +| 465 | blk.38.attn_v.weight | 0x499a7cee0 | 0x110000 | +| 466 | blk.38.ffn_down_exps.weight | 0x499b8cee0 | 0xcc00000 | +| 467 | blk.38.ffn_gate_exps.weight | 0x4a678cee0 | 0x9d80000 | +| 468 | blk.38.ffn_gate_inp.weight | 0x4b050cee0 | 0x100000 | +| 469 | blk.38.ffn_norm.weight | 0x4b060cee0 | 0x2000 | +| 470 | blk.38.ffn_up_exps.weight | 0x4b060eee0 | 0x9d80000 | +| 471 | blk.39.attn_k.weight | 0x4ba38eee0 | 0xd2000 | +| 472 | blk.39.attn_k_norm.weight | 0x4ba460ee0 | 0x200 | +| 473 | blk.39.attn_norm.weight | 0x4ba4610e0 | 0x2000 | +| 474 | blk.39.attn_output.weight | 0x4ba4630e0 | 0x690000 | +| 475 | blk.39.attn_q.weight | 0x4baaf30e0 | 0x690000 | +| 476 | blk.39.attn_q_norm.weight | 0x4bb1830e0 | 0x200 | +| 477 | blk.39.attn_v.weight | 0x4bb1832e0 | 0x110000 | +| 478 | blk.39.ffn_down_exps.weight | 0x4bb2932e0 | 0xcc00000 | +| 479 | blk.39.ffn_gate_exps.weight | 0x4c7e932e0 | 0x9d80000 | +| 480 | blk.39.ffn_gate_inp.weight | 0x4d1c132e0 | 0x100000 | +| 481 | blk.39.ffn_norm.weight | 0x4d1d132e0 | 0x2000 | +| 482 | blk.39.ffn_up_exps.weight | 0x4d1d152e0 | 0x9d80000 | +| 483 | blk.40.attn_k.weight | 0x4dba952e0 | 0xd2000 | +| 484 | blk.40.attn_k_norm.weight | 0x4dbb672e0 | 0x200 | +| 485 | blk.40.attn_norm.weight | 0x4dbb674e0 | 0x2000 | +| 486 | blk.40.attn_output.weight | 0x4dbb694e0 | 0x690000 | +| 487 | blk.40.attn_q.weight | 0x4dc1f94e0 | 0x690000 | +| 488 | blk.40.attn_q_norm.weight | 0x4dc8894e0 | 0x200 | +| 489 | blk.40.attn_v.weight | 0x4dc8896e0 | 0x110000 | +| 490 | blk.40.ffn_down_exps.weight | 0x4dc9996e0 | 0xcc00000 | +| 491 | blk.40.ffn_gate_exps.weight | 0x4e95996e0 | 0x9d80000 | +| 492 | blk.40.ffn_gate_inp.weight | 0x4f33196e0 | 0x100000 | +| 493 | blk.40.ffn_norm.weight | 0x4f34196e0 | 0x2000 | +| 494 | blk.40.ffn_up_exps.weight | 0x4f341b6e0 | 0x9d80000 | +| 495 | blk.41.attn_k.weight | 0x4fd19b6e0 | 0xd2000 | +| 496 | blk.41.attn_k_norm.weight | 0x4fd26d6e0 | 0x200 | +| 497 | blk.41.attn_norm.weight | 0x4fd26d8e0 | 0x2000 | +| 498 | blk.41.attn_output.weight | 0x4fd26f8e0 | 0x690000 | +| 499 | blk.41.attn_q.weight | 0x4fd8ff8e0 | 0x690000 | +| 500 | blk.41.attn_q_norm.weight | 0x4fdf8f8e0 | 0x200 | +| 501 | blk.41.attn_v.weight | 0x4fdf8fae0 | 0x110000 | +| 502 | blk.41.ffn_down_exps.weight | 0x4fe09fae0 | 0xcc00000 | +| 503 | blk.41.ffn_gate_exps.weight | 0x50ac9fae0 | 0x9d80000 | +| 504 | blk.41.ffn_gate_inp.weight | 0x514a1fae0 | 0x100000 | +| 505 | blk.41.ffn_norm.weight | 0x514b1fae0 | 0x2000 | +| 506 | blk.41.ffn_up_exps.weight | 0x514b21ae0 | 0x9d80000 | +| 507 | blk.42.attn_k.weight | 0x51e8a1ae0 | 0xd2000 | +| 508 | blk.42.attn_k_norm.weight | 0x51e973ae0 | 0x200 | +| 509 | blk.42.attn_norm.weight | 0x51e973ce0 | 0x2000 | +| 510 | blk.42.attn_output.weight | 0x51e975ce0 | 0x690000 | +| 511 | blk.42.attn_q.weight | 0x51f005ce0 | 0x690000 | +| 512 | blk.42.attn_q_norm.weight | 0x51f695ce0 | 0x200 | +| 513 | blk.42.attn_v.weight | 0x51f695ee0 | 0x110000 | +| 514 | blk.42.ffn_down_exps.weight | 0x51f7a5ee0 | 0xcc00000 | +| 515 | blk.42.ffn_gate_exps.weight | 0x52c3a5ee0 | 0x9d80000 | +| 516 | blk.42.ffn_gate_inp.weight | 0x536125ee0 | 0x100000 | +| 517 | blk.42.ffn_norm.weight | 0x536225ee0 | 0x2000 | +| 518 | blk.42.ffn_up_exps.weight | 0x536227ee0 | 0x9d80000 | +| 519 | blk.43.attn_k.weight | 0x53ffa7ee0 | 0xd2000 | +| 520 | blk.43.attn_k_norm.weight | 0x540079ee0 | 0x200 | +| 521 | blk.43.attn_norm.weight | 0x54007a0e0 | 0x2000 | +| 522 | blk.43.attn_output.weight | 0x54007c0e0 | 0x690000 | +| 523 | blk.43.attn_q.weight | 0x54070c0e0 | 0x690000 | +| 524 | blk.43.attn_q_norm.weight | 0x540d9c0e0 | 0x200 | +| 525 | blk.43.attn_v.weight | 0x540d9c2e0 | 0x110000 | +| 526 | blk.43.ffn_down_exps.weight | 0x540eac2e0 | 0xcc00000 | +| 527 | blk.43.ffn_gate_exps.weight | 0x54daac2e0 | 0x9d80000 | +| 528 | blk.43.ffn_gate_inp.weight | 0x55782c2e0 | 0x100000 | +| 529 | blk.43.ffn_norm.weight | 0x55792c2e0 | 0x2000 | +| 530 | blk.43.ffn_up_exps.weight | 0x55792e2e0 | 0x9d80000 | +| 531 | blk.44.attn_k.weight | 0x5616ae2e0 | 0xd2000 | +| 532 | blk.44.attn_k_norm.weight | 0x5617802e0 | 0x200 | +| 533 | blk.44.attn_norm.weight | 0x5617804e0 | 0x2000 | +| 534 | blk.44.attn_output.weight | 0x5617824e0 | 0x690000 | +| 535 | blk.44.attn_q.weight | 0x561e124e0 | 0x690000 | +| 536 | blk.44.attn_q_norm.weight | 0x5624a24e0 | 0x200 | +| 537 | blk.44.attn_v.weight | 0x5624a26e0 | 0x110000 | +| 538 | blk.44.ffn_down_exps.weight | 0x5625b26e0 | 0xcc00000 | +| 539 | blk.44.ffn_gate_exps.weight | 0x56f1b26e0 | 0x9d80000 | +| 540 | blk.44.ffn_gate_inp.weight | 0x578f326e0 | 0x100000 | +| 541 | blk.44.ffn_norm.weight | 0x5790326e0 | 0x2000 | +| 542 | blk.44.ffn_up_exps.weight | 0x5790346e0 | 0x9d80000 | +| 543 | blk.45.attn_k.weight | 0x582db46e0 | 0xd2000 | +| 544 | blk.45.attn_k_norm.weight | 0x582e866e0 | 0x200 | +| 545 | blk.45.attn_norm.weight | 0x582e868e0 | 0x2000 | +| 546 | blk.45.attn_output.weight | 0x582e888e0 | 0x690000 | +| 547 | blk.45.attn_q.weight | 0x5835188e0 | 0x690000 | +| 548 | blk.45.attn_q_norm.weight | 0x583ba88e0 | 0x200 | +| 549 | blk.45.attn_v.weight | 0x583ba8ae0 | 0x110000 | +| 550 | blk.45.ffn_down_exps.weight | 0x583cb8ae0 | 0xcc00000 | +| 551 | blk.45.ffn_gate_exps.weight | 0x5908b8ae0 | 0x9d80000 | +| 552 | blk.45.ffn_gate_inp.weight | 0x59a638ae0 | 0x100000 | +| 553 | blk.45.ffn_norm.weight | 0x59a738ae0 | 0x2000 | +| 554 | blk.45.ffn_up_exps.weight | 0x59a73aae0 | 0x9d80000 | + +### Base Tensor Group : ~622M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------|:---------------------------------|:------------------|:----------------------|:-----| +| 0 | output.weight | Output (W) | (~311M) 311164928 | 2048 x 151936 x 1 x 1 | Q6_K | +| 1 | output_norm.weight | Output Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 2 | token_embd.weight | Token Embedding (W) | (~311M) 311164928 | 2048 x 151936 x 1 x 1 | Q3_K | + +- Total elements in base: (~622M) 622331904 +- Percentage of total elements: 2.13% + + +### Block 0 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:---------------------------|:------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 3 | blk.0.attn_k.weight | Block 0 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 4 | blk.0.attn_k_norm.weight | Block 0 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 5 | blk.0.attn_norm.weight | Block 0 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 6 | blk.0.attn_output.weight | Block 0 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 7 | blk.0.attn_q.weight | Block 0 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q5_K | +| 8 | blk.0.attn_q_norm.weight | Block 0 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 9 | blk.0.attn_v.weight | Block 0 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 10 | blk.0.ffn_down_exps.weight | Block 0 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 11 | blk.0.ffn_gate_exps.weight | Block 0 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | +| 12 | blk.0.ffn_gate_inp.weight | Block 0 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 13 | blk.0.ffn_norm.weight | Block 0 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 14 | blk.0.ffn_up_exps.weight | Block 0 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | + +- Total elements in blk.0: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 1 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:---------------------------|:------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 15 | blk.1.attn_k.weight | Block 1 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 16 | blk.1.attn_k_norm.weight | Block 1 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 17 | blk.1.attn_norm.weight | Block 1 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 18 | blk.1.attn_output.weight | Block 1 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 19 | blk.1.attn_q.weight | Block 1 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q5_K | +| 20 | blk.1.attn_q_norm.weight | Block 1 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 21 | blk.1.attn_v.weight | Block 1 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 22 | blk.1.ffn_down_exps.weight | Block 1 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 23 | blk.1.ffn_gate_exps.weight | Block 1 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | +| 24 | blk.1.ffn_gate_inp.weight | Block 1 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 25 | blk.1.ffn_norm.weight | Block 1 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 26 | blk.1.ffn_up_exps.weight | Block 1 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | + +- Total elements in blk.1: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 2 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:---------------------------|:------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 27 | blk.2.attn_k.weight | Block 2 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 28 | blk.2.attn_k_norm.weight | Block 2 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 29 | blk.2.attn_norm.weight | Block 2 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 30 | blk.2.attn_output.weight | Block 2 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 31 | blk.2.attn_q.weight | Block 2 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q5_K | +| 32 | blk.2.attn_q_norm.weight | Block 2 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 33 | blk.2.attn_v.weight | Block 2 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 34 | blk.2.ffn_down_exps.weight | Block 2 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 35 | blk.2.ffn_gate_exps.weight | Block 2 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | +| 36 | blk.2.ffn_gate_inp.weight | Block 2 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 37 | blk.2.ffn_norm.weight | Block 2 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 38 | blk.2.ffn_up_exps.weight | Block 2 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | + +- Total elements in blk.2: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 3 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:---------------------------|:------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 39 | blk.3.attn_k.weight | Block 3 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 40 | blk.3.attn_k_norm.weight | Block 3 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 41 | blk.3.attn_norm.weight | Block 3 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 42 | blk.3.attn_output.weight | Block 3 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 43 | blk.3.attn_q.weight | Block 3 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q5_K | +| 44 | blk.3.attn_q_norm.weight | Block 3 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 45 | blk.3.attn_v.weight | Block 3 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 46 | blk.3.ffn_down_exps.weight | Block 3 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 47 | blk.3.ffn_gate_exps.weight | Block 3 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | +| 48 | blk.3.ffn_gate_inp.weight | Block 3 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 49 | blk.3.ffn_norm.weight | Block 3 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 50 | blk.3.ffn_up_exps.weight | Block 3 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | + +- Total elements in blk.3: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 4 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:---------------------------|:------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 51 | blk.4.attn_k.weight | Block 4 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 52 | blk.4.attn_k_norm.weight | Block 4 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 53 | blk.4.attn_norm.weight | Block 4 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 54 | blk.4.attn_output.weight | Block 4 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 55 | blk.4.attn_q.weight | Block 4 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q5_K | +| 56 | blk.4.attn_q_norm.weight | Block 4 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 57 | blk.4.attn_v.weight | Block 4 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 58 | blk.4.ffn_down_exps.weight | Block 4 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 59 | blk.4.ffn_gate_exps.weight | Block 4 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | +| 60 | blk.4.ffn_gate_inp.weight | Block 4 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 61 | blk.4.ffn_norm.weight | Block 4 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 62 | blk.4.ffn_up_exps.weight | Block 4 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | + +- Total elements in blk.4: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 5 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:---------------------------|:------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 63 | blk.5.attn_k.weight | Block 5 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 64 | blk.5.attn_k_norm.weight | Block 5 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 65 | blk.5.attn_norm.weight | Block 5 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 66 | blk.5.attn_output.weight | Block 5 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 67 | blk.5.attn_q.weight | Block 5 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q5_K | +| 68 | blk.5.attn_q_norm.weight | Block 5 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 69 | blk.5.attn_v.weight | Block 5 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 70 | blk.5.ffn_down_exps.weight | Block 5 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 71 | blk.5.ffn_gate_exps.weight | Block 5 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | +| 72 | blk.5.ffn_gate_inp.weight | Block 5 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 73 | blk.5.ffn_norm.weight | Block 5 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 74 | blk.5.ffn_up_exps.weight | Block 5 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | + +- Total elements in blk.5: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 6 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:---------------------------|:------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 75 | blk.6.attn_k.weight | Block 6 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 76 | blk.6.attn_k_norm.weight | Block 6 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 77 | blk.6.attn_norm.weight | Block 6 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 78 | blk.6.attn_output.weight | Block 6 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 79 | blk.6.attn_q.weight | Block 6 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q5_K | +| 80 | blk.6.attn_q_norm.weight | Block 6 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 81 | blk.6.attn_v.weight | Block 6 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 82 | blk.6.ffn_down_exps.weight | Block 6 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 83 | blk.6.ffn_gate_exps.weight | Block 6 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | +| 84 | blk.6.ffn_gate_inp.weight | Block 6 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 85 | blk.6.ffn_norm.weight | Block 6 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 86 | blk.6.ffn_up_exps.weight | Block 6 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | + +- Total elements in blk.6: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 7 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:---------------------------|:------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 87 | blk.7.attn_k.weight | Block 7 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 88 | blk.7.attn_k_norm.weight | Block 7 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 89 | blk.7.attn_norm.weight | Block 7 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 90 | blk.7.attn_output.weight | Block 7 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 91 | blk.7.attn_q.weight | Block 7 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q5_K | +| 92 | blk.7.attn_q_norm.weight | Block 7 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 93 | blk.7.attn_v.weight | Block 7 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 94 | blk.7.ffn_down_exps.weight | Block 7 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 95 | blk.7.ffn_gate_exps.weight | Block 7 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | +| 96 | blk.7.ffn_gate_inp.weight | Block 7 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 97 | blk.7.ffn_norm.weight | Block 7 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 98 | blk.7.ffn_up_exps.weight | Block 7 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | + +- Total elements in blk.7: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 8 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:---------------------------|:------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 99 | blk.8.attn_k.weight | Block 8 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 100 | blk.8.attn_k_norm.weight | Block 8 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 101 | blk.8.attn_norm.weight | Block 8 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 102 | blk.8.attn_output.weight | Block 8 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 103 | blk.8.attn_q.weight | Block 8 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q5_K | +| 104 | blk.8.attn_q_norm.weight | Block 8 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 105 | blk.8.attn_v.weight | Block 8 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 106 | blk.8.ffn_down_exps.weight | Block 8 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 107 | blk.8.ffn_gate_exps.weight | Block 8 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | +| 108 | blk.8.ffn_gate_inp.weight | Block 8 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 109 | blk.8.ffn_norm.weight | Block 8 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 110 | blk.8.ffn_up_exps.weight | Block 8 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | + +- Total elements in blk.8: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 9 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:---------------------------|:------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 111 | blk.9.attn_k.weight | Block 9 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 112 | blk.9.attn_k_norm.weight | Block 9 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 113 | blk.9.attn_norm.weight | Block 9 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 114 | blk.9.attn_output.weight | Block 9 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 115 | blk.9.attn_q.weight | Block 9 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q5_K | +| 116 | blk.9.attn_q_norm.weight | Block 9 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 117 | blk.9.attn_v.weight | Block 9 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 118 | blk.9.ffn_down_exps.weight | Block 9 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 119 | blk.9.ffn_gate_exps.weight | Block 9 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | +| 120 | blk.9.ffn_gate_inp.weight | Block 9 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 121 | blk.9.ffn_norm.weight | Block 9 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 122 | blk.9.ffn_up_exps.weight | Block 9 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | + +- Total elements in blk.9: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 10 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 123 | blk.10.attn_k.weight | Block 10 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 124 | blk.10.attn_k_norm.weight | Block 10 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 125 | blk.10.attn_norm.weight | Block 10 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 126 | blk.10.attn_output.weight | Block 10 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 127 | blk.10.attn_q.weight | Block 10 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q5_K | +| 128 | blk.10.attn_q_norm.weight | Block 10 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 129 | blk.10.attn_v.weight | Block 10 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 130 | blk.10.ffn_down_exps.weight | Block 10 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 131 | blk.10.ffn_gate_exps.weight | Block 10 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | +| 132 | blk.10.ffn_gate_inp.weight | Block 10 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 133 | blk.10.ffn_norm.weight | Block 10 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 134 | blk.10.ffn_up_exps.weight | Block 10 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | + +- Total elements in blk.10: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 11 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 135 | blk.11.attn_k.weight | Block 11 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 136 | blk.11.attn_k_norm.weight | Block 11 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 137 | blk.11.attn_norm.weight | Block 11 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 138 | blk.11.attn_output.weight | Block 11 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 139 | blk.11.attn_q.weight | Block 11 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q5_K | +| 140 | blk.11.attn_q_norm.weight | Block 11 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 141 | blk.11.attn_v.weight | Block 11 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 142 | blk.11.ffn_down_exps.weight | Block 11 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 143 | blk.11.ffn_gate_exps.weight | Block 11 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | +| 144 | blk.11.ffn_gate_inp.weight | Block 11 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 145 | blk.11.ffn_norm.weight | Block 11 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 146 | blk.11.ffn_up_exps.weight | Block 11 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | + +- Total elements in blk.11: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 12 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 147 | blk.12.attn_k.weight | Block 12 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 148 | blk.12.attn_k_norm.weight | Block 12 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 149 | blk.12.attn_norm.weight | Block 12 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 150 | blk.12.attn_output.weight | Block 12 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 151 | blk.12.attn_q.weight | Block 12 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q5_K | +| 152 | blk.12.attn_q_norm.weight | Block 12 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 153 | blk.12.attn_v.weight | Block 12 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 154 | blk.12.ffn_down_exps.weight | Block 12 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 155 | blk.12.ffn_gate_exps.weight | Block 12 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | +| 156 | blk.12.ffn_gate_inp.weight | Block 12 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 157 | blk.12.ffn_norm.weight | Block 12 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 158 | blk.12.ffn_up_exps.weight | Block 12 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | + +- Total elements in blk.12: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 13 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 159 | blk.13.attn_k.weight | Block 13 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 160 | blk.13.attn_k_norm.weight | Block 13 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 161 | blk.13.attn_norm.weight | Block 13 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 162 | blk.13.attn_output.weight | Block 13 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 163 | blk.13.attn_q.weight | Block 13 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q5_K | +| 164 | blk.13.attn_q_norm.weight | Block 13 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 165 | blk.13.attn_v.weight | Block 13 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 166 | blk.13.ffn_down_exps.weight | Block 13 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 167 | blk.13.ffn_gate_exps.weight | Block 13 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | +| 168 | blk.13.ffn_gate_inp.weight | Block 13 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 169 | blk.13.ffn_norm.weight | Block 13 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 170 | blk.13.ffn_up_exps.weight | Block 13 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | + +- Total elements in blk.13: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 14 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 171 | blk.14.attn_k.weight | Block 14 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 172 | blk.14.attn_k_norm.weight | Block 14 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 173 | blk.14.attn_norm.weight | Block 14 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 174 | blk.14.attn_output.weight | Block 14 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 175 | blk.14.attn_q.weight | Block 14 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q5_K | +| 176 | blk.14.attn_q_norm.weight | Block 14 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 177 | blk.14.attn_v.weight | Block 14 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 178 | blk.14.ffn_down_exps.weight | Block 14 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 179 | blk.14.ffn_gate_exps.weight | Block 14 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | +| 180 | blk.14.ffn_gate_inp.weight | Block 14 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 181 | blk.14.ffn_norm.weight | Block 14 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 182 | blk.14.ffn_up_exps.weight | Block 14 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | + +- Total elements in blk.14: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 15 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 183 | blk.15.attn_k.weight | Block 15 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 184 | blk.15.attn_k_norm.weight | Block 15 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 185 | blk.15.attn_norm.weight | Block 15 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 186 | blk.15.attn_output.weight | Block 15 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 187 | blk.15.attn_q.weight | Block 15 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q5_K | +| 188 | blk.15.attn_q_norm.weight | Block 15 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 189 | blk.15.attn_v.weight | Block 15 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 190 | blk.15.ffn_down_exps.weight | Block 15 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 191 | blk.15.ffn_gate_exps.weight | Block 15 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | +| 192 | blk.15.ffn_gate_inp.weight | Block 15 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 193 | blk.15.ffn_norm.weight | Block 15 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 194 | blk.15.ffn_up_exps.weight | Block 15 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | + +- Total elements in blk.15: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 16 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 195 | blk.16.attn_k.weight | Block 16 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 196 | blk.16.attn_k_norm.weight | Block 16 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 197 | blk.16.attn_norm.weight | Block 16 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 198 | blk.16.attn_output.weight | Block 16 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 199 | blk.16.attn_q.weight | Block 16 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q5_K | +| 200 | blk.16.attn_q_norm.weight | Block 16 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 201 | blk.16.attn_v.weight | Block 16 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 202 | blk.16.ffn_down_exps.weight | Block 16 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 203 | blk.16.ffn_gate_exps.weight | Block 16 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | +| 204 | blk.16.ffn_gate_inp.weight | Block 16 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 205 | blk.16.ffn_norm.weight | Block 16 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 206 | blk.16.ffn_up_exps.weight | Block 16 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | + +- Total elements in blk.16: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 17 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 207 | blk.17.attn_k.weight | Block 17 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 208 | blk.17.attn_k_norm.weight | Block 17 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 209 | blk.17.attn_norm.weight | Block 17 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 210 | blk.17.attn_output.weight | Block 17 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 211 | blk.17.attn_q.weight | Block 17 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q5_K | +| 212 | blk.17.attn_q_norm.weight | Block 17 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 213 | blk.17.attn_v.weight | Block 17 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 214 | blk.17.ffn_down_exps.weight | Block 17 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 215 | blk.17.ffn_gate_exps.weight | Block 17 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | +| 216 | blk.17.ffn_gate_inp.weight | Block 17 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 217 | blk.17.ffn_norm.weight | Block 17 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 218 | blk.17.ffn_up_exps.weight | Block 17 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | + +- Total elements in blk.17: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 18 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 219 | blk.18.attn_k.weight | Block 18 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 220 | blk.18.attn_k_norm.weight | Block 18 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 221 | blk.18.attn_norm.weight | Block 18 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 222 | blk.18.attn_output.weight | Block 18 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 223 | blk.18.attn_q.weight | Block 18 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q5_K | +| 224 | blk.18.attn_q_norm.weight | Block 18 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 225 | blk.18.attn_v.weight | Block 18 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 226 | blk.18.ffn_down_exps.weight | Block 18 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 227 | blk.18.ffn_gate_exps.weight | Block 18 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | +| 228 | blk.18.ffn_gate_inp.weight | Block 18 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 229 | blk.18.ffn_norm.weight | Block 18 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 230 | blk.18.ffn_up_exps.weight | Block 18 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | + +- Total elements in blk.18: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 19 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 231 | blk.19.attn_k.weight | Block 19 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 232 | blk.19.attn_k_norm.weight | Block 19 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 233 | blk.19.attn_norm.weight | Block 19 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 234 | blk.19.attn_output.weight | Block 19 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 235 | blk.19.attn_q.weight | Block 19 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q5_K | +| 236 | blk.19.attn_q_norm.weight | Block 19 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 237 | blk.19.attn_v.weight | Block 19 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 238 | blk.19.ffn_down_exps.weight | Block 19 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 239 | blk.19.ffn_gate_exps.weight | Block 19 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | +| 240 | blk.19.ffn_gate_inp.weight | Block 19 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 241 | blk.19.ffn_norm.weight | Block 19 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 242 | blk.19.ffn_up_exps.weight | Block 19 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | + +- Total elements in blk.19: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 20 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 243 | blk.20.attn_k.weight | Block 20 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 244 | blk.20.attn_k_norm.weight | Block 20 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 245 | blk.20.attn_norm.weight | Block 20 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 246 | blk.20.attn_output.weight | Block 20 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 247 | blk.20.attn_q.weight | Block 20 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q5_K | +| 248 | blk.20.attn_q_norm.weight | Block 20 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 249 | blk.20.attn_v.weight | Block 20 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 250 | blk.20.ffn_down_exps.weight | Block 20 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 251 | blk.20.ffn_gate_exps.weight | Block 20 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | +| 252 | blk.20.ffn_gate_inp.weight | Block 20 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 253 | blk.20.ffn_norm.weight | Block 20 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 254 | blk.20.ffn_up_exps.weight | Block 20 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | + +- Total elements in blk.20: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 21 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 255 | blk.21.attn_k.weight | Block 21 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 256 | blk.21.attn_k_norm.weight | Block 21 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 257 | blk.21.attn_norm.weight | Block 21 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 258 | blk.21.attn_output.weight | Block 21 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 259 | blk.21.attn_q.weight | Block 21 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q5_K | +| 260 | blk.21.attn_q_norm.weight | Block 21 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 261 | blk.21.attn_v.weight | Block 21 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 262 | blk.21.ffn_down_exps.weight | Block 21 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 263 | blk.21.ffn_gate_exps.weight | Block 21 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | +| 264 | blk.21.ffn_gate_inp.weight | Block 21 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 265 | blk.21.ffn_norm.weight | Block 21 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 266 | blk.21.ffn_up_exps.weight | Block 21 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | + +- Total elements in blk.21: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 22 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 267 | blk.22.attn_k.weight | Block 22 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 268 | blk.22.attn_k_norm.weight | Block 22 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 269 | blk.22.attn_norm.weight | Block 22 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 270 | blk.22.attn_output.weight | Block 22 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 271 | blk.22.attn_q.weight | Block 22 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q5_K | +| 272 | blk.22.attn_q_norm.weight | Block 22 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 273 | blk.22.attn_v.weight | Block 22 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 274 | blk.22.ffn_down_exps.weight | Block 22 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 275 | blk.22.ffn_gate_exps.weight | Block 22 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | +| 276 | blk.22.ffn_gate_inp.weight | Block 22 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 277 | blk.22.ffn_norm.weight | Block 22 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 278 | blk.22.ffn_up_exps.weight | Block 22 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | + +- Total elements in blk.22: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 23 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 279 | blk.23.attn_k.weight | Block 23 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 280 | blk.23.attn_k_norm.weight | Block 23 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 281 | blk.23.attn_norm.weight | Block 23 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 282 | blk.23.attn_output.weight | Block 23 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 283 | blk.23.attn_q.weight | Block 23 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q5_K | +| 284 | blk.23.attn_q_norm.weight | Block 23 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 285 | blk.23.attn_v.weight | Block 23 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 286 | blk.23.ffn_down_exps.weight | Block 23 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 287 | blk.23.ffn_gate_exps.weight | Block 23 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | +| 288 | blk.23.ffn_gate_inp.weight | Block 23 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 289 | blk.23.ffn_norm.weight | Block 23 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 290 | blk.23.ffn_up_exps.weight | Block 23 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | + +- Total elements in blk.23: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 24 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 291 | blk.24.attn_k.weight | Block 24 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 292 | blk.24.attn_k_norm.weight | Block 24 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 293 | blk.24.attn_norm.weight | Block 24 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 294 | blk.24.attn_output.weight | Block 24 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 295 | blk.24.attn_q.weight | Block 24 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q5_K | +| 296 | blk.24.attn_q_norm.weight | Block 24 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 297 | blk.24.attn_v.weight | Block 24 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 298 | blk.24.ffn_down_exps.weight | Block 24 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 299 | blk.24.ffn_gate_exps.weight | Block 24 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | +| 300 | blk.24.ffn_gate_inp.weight | Block 24 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 301 | blk.24.ffn_norm.weight | Block 24 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 302 | blk.24.ffn_up_exps.weight | Block 24 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | + +- Total elements in blk.24: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 25 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 303 | blk.25.attn_k.weight | Block 25 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 304 | blk.25.attn_k_norm.weight | Block 25 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 305 | blk.25.attn_norm.weight | Block 25 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 306 | blk.25.attn_output.weight | Block 25 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 307 | blk.25.attn_q.weight | Block 25 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q5_K | +| 308 | blk.25.attn_q_norm.weight | Block 25 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 309 | blk.25.attn_v.weight | Block 25 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 310 | blk.25.ffn_down_exps.weight | Block 25 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 311 | blk.25.ffn_gate_exps.weight | Block 25 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | +| 312 | blk.25.ffn_gate_inp.weight | Block 25 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 313 | blk.25.ffn_norm.weight | Block 25 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 314 | blk.25.ffn_up_exps.weight | Block 25 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | + +- Total elements in blk.25: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 26 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 315 | blk.26.attn_k.weight | Block 26 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 316 | blk.26.attn_k_norm.weight | Block 26 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 317 | blk.26.attn_norm.weight | Block 26 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 318 | blk.26.attn_output.weight | Block 26 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 319 | blk.26.attn_q.weight | Block 26 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q5_K | +| 320 | blk.26.attn_q_norm.weight | Block 26 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 321 | blk.26.attn_v.weight | Block 26 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 322 | blk.26.ffn_down_exps.weight | Block 26 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 323 | blk.26.ffn_gate_exps.weight | Block 26 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | +| 324 | blk.26.ffn_gate_inp.weight | Block 26 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 325 | blk.26.ffn_norm.weight | Block 26 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 326 | blk.26.ffn_up_exps.weight | Block 26 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | + +- Total elements in blk.26: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 27 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 327 | blk.27.attn_k.weight | Block 27 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 328 | blk.27.attn_k_norm.weight | Block 27 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 329 | blk.27.attn_norm.weight | Block 27 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 330 | blk.27.attn_output.weight | Block 27 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 331 | blk.27.attn_q.weight | Block 27 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q5_K | +| 332 | blk.27.attn_q_norm.weight | Block 27 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 333 | blk.27.attn_v.weight | Block 27 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 334 | blk.27.ffn_down_exps.weight | Block 27 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 335 | blk.27.ffn_gate_exps.weight | Block 27 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | +| 336 | blk.27.ffn_gate_inp.weight | Block 27 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 337 | blk.27.ffn_norm.weight | Block 27 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 338 | blk.27.ffn_up_exps.weight | Block 27 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | + +- Total elements in blk.27: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 28 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 339 | blk.28.attn_k.weight | Block 28 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 340 | blk.28.attn_k_norm.weight | Block 28 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 341 | blk.28.attn_norm.weight | Block 28 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 342 | blk.28.attn_output.weight | Block 28 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 343 | blk.28.attn_q.weight | Block 28 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q5_K | +| 344 | blk.28.attn_q_norm.weight | Block 28 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 345 | blk.28.attn_v.weight | Block 28 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 346 | blk.28.ffn_down_exps.weight | Block 28 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 347 | blk.28.ffn_gate_exps.weight | Block 28 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | +| 348 | blk.28.ffn_gate_inp.weight | Block 28 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 349 | blk.28.ffn_norm.weight | Block 28 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 350 | blk.28.ffn_up_exps.weight | Block 28 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | + +- Total elements in blk.28: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 29 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 351 | blk.29.attn_k.weight | Block 29 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 352 | blk.29.attn_k_norm.weight | Block 29 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 353 | blk.29.attn_norm.weight | Block 29 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 354 | blk.29.attn_output.weight | Block 29 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 355 | blk.29.attn_q.weight | Block 29 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q5_K | +| 356 | blk.29.attn_q_norm.weight | Block 29 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 357 | blk.29.attn_v.weight | Block 29 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 358 | blk.29.ffn_down_exps.weight | Block 29 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 359 | blk.29.ffn_gate_exps.weight | Block 29 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | +| 360 | blk.29.ffn_gate_inp.weight | Block 29 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 361 | blk.29.ffn_norm.weight | Block 29 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 362 | blk.29.ffn_up_exps.weight | Block 29 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | + +- Total elements in blk.29: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 30 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 363 | blk.30.attn_k.weight | Block 30 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 364 | blk.30.attn_k_norm.weight | Block 30 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 365 | blk.30.attn_norm.weight | Block 30 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 366 | blk.30.attn_output.weight | Block 30 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 367 | blk.30.attn_q.weight | Block 30 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q5_K | +| 368 | blk.30.attn_q_norm.weight | Block 30 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 369 | blk.30.attn_v.weight | Block 30 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 370 | blk.30.ffn_down_exps.weight | Block 30 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 371 | blk.30.ffn_gate_exps.weight | Block 30 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | +| 372 | blk.30.ffn_gate_inp.weight | Block 30 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 373 | blk.30.ffn_norm.weight | Block 30 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 374 | blk.30.ffn_up_exps.weight | Block 30 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | + +- Total elements in blk.30: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 31 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 375 | blk.31.attn_k.weight | Block 31 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 376 | blk.31.attn_k_norm.weight | Block 31 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 377 | blk.31.attn_norm.weight | Block 31 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 378 | blk.31.attn_output.weight | Block 31 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 379 | blk.31.attn_q.weight | Block 31 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q5_K | +| 380 | blk.31.attn_q_norm.weight | Block 31 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 381 | blk.31.attn_v.weight | Block 31 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 382 | blk.31.ffn_down_exps.weight | Block 31 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 383 | blk.31.ffn_gate_exps.weight | Block 31 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | +| 384 | blk.31.ffn_gate_inp.weight | Block 31 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 385 | blk.31.ffn_norm.weight | Block 31 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 386 | blk.31.ffn_up_exps.weight | Block 31 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | + +- Total elements in blk.31: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 32 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 387 | blk.32.attn_k.weight | Block 32 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 388 | blk.32.attn_k_norm.weight | Block 32 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 389 | blk.32.attn_norm.weight | Block 32 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 390 | blk.32.attn_output.weight | Block 32 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 391 | blk.32.attn_q.weight | Block 32 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q5_K | +| 392 | blk.32.attn_q_norm.weight | Block 32 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 393 | blk.32.attn_v.weight | Block 32 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 394 | blk.32.ffn_down_exps.weight | Block 32 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 395 | blk.32.ffn_gate_exps.weight | Block 32 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | +| 396 | blk.32.ffn_gate_inp.weight | Block 32 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 397 | blk.32.ffn_norm.weight | Block 32 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 398 | blk.32.ffn_up_exps.weight | Block 32 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | + +- Total elements in blk.32: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 33 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 399 | blk.33.attn_k.weight | Block 33 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 400 | blk.33.attn_k_norm.weight | Block 33 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 401 | blk.33.attn_norm.weight | Block 33 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 402 | blk.33.attn_output.weight | Block 33 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 403 | blk.33.attn_q.weight | Block 33 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q5_K | +| 404 | blk.33.attn_q_norm.weight | Block 33 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 405 | blk.33.attn_v.weight | Block 33 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 406 | blk.33.ffn_down_exps.weight | Block 33 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 407 | blk.33.ffn_gate_exps.weight | Block 33 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | +| 408 | blk.33.ffn_gate_inp.weight | Block 33 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 409 | blk.33.ffn_norm.weight | Block 33 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 410 | blk.33.ffn_up_exps.weight | Block 33 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | + +- Total elements in blk.33: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 34 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 411 | blk.34.attn_k.weight | Block 34 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 412 | blk.34.attn_k_norm.weight | Block 34 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 413 | blk.34.attn_norm.weight | Block 34 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 414 | blk.34.attn_output.weight | Block 34 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 415 | blk.34.attn_q.weight | Block 34 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q5_K | +| 416 | blk.34.attn_q_norm.weight | Block 34 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 417 | blk.34.attn_v.weight | Block 34 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 418 | blk.34.ffn_down_exps.weight | Block 34 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 419 | blk.34.ffn_gate_exps.weight | Block 34 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | +| 420 | blk.34.ffn_gate_inp.weight | Block 34 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 421 | blk.34.ffn_norm.weight | Block 34 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 422 | blk.34.ffn_up_exps.weight | Block 34 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | + +- Total elements in blk.34: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 35 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 423 | blk.35.attn_k.weight | Block 35 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 424 | blk.35.attn_k_norm.weight | Block 35 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 425 | blk.35.attn_norm.weight | Block 35 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 426 | blk.35.attn_output.weight | Block 35 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 427 | blk.35.attn_q.weight | Block 35 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q5_K | +| 428 | blk.35.attn_q_norm.weight | Block 35 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 429 | blk.35.attn_v.weight | Block 35 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 430 | blk.35.ffn_down_exps.weight | Block 35 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 431 | blk.35.ffn_gate_exps.weight | Block 35 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | +| 432 | blk.35.ffn_gate_inp.weight | Block 35 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 433 | blk.35.ffn_norm.weight | Block 35 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 434 | blk.35.ffn_up_exps.weight | Block 35 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q5_K | + +- Total elements in blk.35: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 36 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 435 | blk.36.attn_k.weight | Block 36 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q6_K | +| 436 | blk.36.attn_k_norm.weight | Block 36 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 437 | blk.36.attn_norm.weight | Block 36 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 438 | blk.36.attn_output.weight | Block 36 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 439 | blk.36.attn_q.weight | Block 36 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q6_K | +| 440 | blk.36.attn_q_norm.weight | Block 36 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 441 | blk.36.attn_v.weight | Block 36 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 442 | blk.36.ffn_down_exps.weight | Block 36 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 443 | blk.36.ffn_gate_exps.weight | Block 36 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q6_K | +| 444 | blk.36.ffn_gate_inp.weight | Block 36 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 445 | blk.36.ffn_norm.weight | Block 36 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 446 | blk.36.ffn_up_exps.weight | Block 36 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q6_K | + +- Total elements in blk.36: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 37 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 447 | blk.37.attn_k.weight | Block 37 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q6_K | +| 448 | blk.37.attn_k_norm.weight | Block 37 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 449 | blk.37.attn_norm.weight | Block 37 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 450 | blk.37.attn_output.weight | Block 37 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 451 | blk.37.attn_q.weight | Block 37 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q6_K | +| 452 | blk.37.attn_q_norm.weight | Block 37 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 453 | blk.37.attn_v.weight | Block 37 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 454 | blk.37.ffn_down_exps.weight | Block 37 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 455 | blk.37.ffn_gate_exps.weight | Block 37 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q6_K | +| 456 | blk.37.ffn_gate_inp.weight | Block 37 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 457 | blk.37.ffn_norm.weight | Block 37 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 458 | blk.37.ffn_up_exps.weight | Block 37 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q6_K | + +- Total elements in blk.37: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 38 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 459 | blk.38.attn_k.weight | Block 38 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q6_K | +| 460 | blk.38.attn_k_norm.weight | Block 38 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 461 | blk.38.attn_norm.weight | Block 38 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 462 | blk.38.attn_output.weight | Block 38 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 463 | blk.38.attn_q.weight | Block 38 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q6_K | +| 464 | blk.38.attn_q_norm.weight | Block 38 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 465 | blk.38.attn_v.weight | Block 38 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 466 | blk.38.ffn_down_exps.weight | Block 38 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 467 | blk.38.ffn_gate_exps.weight | Block 38 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q6_K | +| 468 | blk.38.ffn_gate_inp.weight | Block 38 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 469 | blk.38.ffn_norm.weight | Block 38 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 470 | blk.38.ffn_up_exps.weight | Block 38 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q6_K | + +- Total elements in blk.38: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 39 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 471 | blk.39.attn_k.weight | Block 39 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q6_K | +| 472 | blk.39.attn_k_norm.weight | Block 39 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 473 | blk.39.attn_norm.weight | Block 39 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 474 | blk.39.attn_output.weight | Block 39 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 475 | blk.39.attn_q.weight | Block 39 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q6_K | +| 476 | blk.39.attn_q_norm.weight | Block 39 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 477 | blk.39.attn_v.weight | Block 39 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 478 | blk.39.ffn_down_exps.weight | Block 39 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 479 | blk.39.ffn_gate_exps.weight | Block 39 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q6_K | +| 480 | blk.39.ffn_gate_inp.weight | Block 39 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 481 | blk.39.ffn_norm.weight | Block 39 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 482 | blk.39.ffn_up_exps.weight | Block 39 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q6_K | + +- Total elements in blk.39: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 40 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 483 | blk.40.attn_k.weight | Block 40 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q6_K | +| 484 | blk.40.attn_k_norm.weight | Block 40 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 485 | blk.40.attn_norm.weight | Block 40 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 486 | blk.40.attn_output.weight | Block 40 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 487 | blk.40.attn_q.weight | Block 40 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q6_K | +| 488 | blk.40.attn_q_norm.weight | Block 40 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 489 | blk.40.attn_v.weight | Block 40 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 490 | blk.40.ffn_down_exps.weight | Block 40 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 491 | blk.40.ffn_gate_exps.weight | Block 40 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q6_K | +| 492 | blk.40.ffn_gate_inp.weight | Block 40 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 493 | blk.40.ffn_norm.weight | Block 40 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 494 | blk.40.ffn_up_exps.weight | Block 40 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q6_K | + +- Total elements in blk.40: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 41 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 495 | blk.41.attn_k.weight | Block 41 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q6_K | +| 496 | blk.41.attn_k_norm.weight | Block 41 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 497 | blk.41.attn_norm.weight | Block 41 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 498 | blk.41.attn_output.weight | Block 41 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 499 | blk.41.attn_q.weight | Block 41 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q6_K | +| 500 | blk.41.attn_q_norm.weight | Block 41 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 501 | blk.41.attn_v.weight | Block 41 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 502 | blk.41.ffn_down_exps.weight | Block 41 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 503 | blk.41.ffn_gate_exps.weight | Block 41 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q6_K | +| 504 | blk.41.ffn_gate_inp.weight | Block 41 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 505 | blk.41.ffn_norm.weight | Block 41 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 506 | blk.41.ffn_up_exps.weight | Block 41 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q6_K | + +- Total elements in blk.41: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 42 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 507 | blk.42.attn_k.weight | Block 42 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q6_K | +| 508 | blk.42.attn_k_norm.weight | Block 42 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 509 | blk.42.attn_norm.weight | Block 42 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 510 | blk.42.attn_output.weight | Block 42 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 511 | blk.42.attn_q.weight | Block 42 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q6_K | +| 512 | blk.42.attn_q_norm.weight | Block 42 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 513 | blk.42.attn_v.weight | Block 42 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 514 | blk.42.ffn_down_exps.weight | Block 42 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 515 | blk.42.ffn_gate_exps.weight | Block 42 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q6_K | +| 516 | blk.42.ffn_gate_inp.weight | Block 42 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 517 | blk.42.ffn_norm.weight | Block 42 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 518 | blk.42.ffn_up_exps.weight | Block 42 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q6_K | + +- Total elements in blk.42: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 43 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 519 | blk.43.attn_k.weight | Block 43 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q6_K | +| 520 | blk.43.attn_k_norm.weight | Block 43 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 521 | blk.43.attn_norm.weight | Block 43 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 522 | blk.43.attn_output.weight | Block 43 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 523 | blk.43.attn_q.weight | Block 43 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q6_K | +| 524 | blk.43.attn_q_norm.weight | Block 43 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 525 | blk.43.attn_v.weight | Block 43 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 526 | blk.43.ffn_down_exps.weight | Block 43 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 527 | blk.43.ffn_gate_exps.weight | Block 43 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q6_K | +| 528 | blk.43.ffn_gate_inp.weight | Block 43 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 529 | blk.43.ffn_norm.weight | Block 43 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 530 | blk.43.ffn_up_exps.weight | Block 43 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q6_K | + +- Total elements in blk.43: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 44 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 531 | blk.44.attn_k.weight | Block 44 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q6_K | +| 532 | blk.44.attn_k_norm.weight | Block 44 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 533 | blk.44.attn_norm.weight | Block 44 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 534 | blk.44.attn_output.weight | Block 44 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 535 | blk.44.attn_q.weight | Block 44 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q6_K | +| 536 | blk.44.attn_q_norm.weight | Block 44 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 537 | blk.44.attn_v.weight | Block 44 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 538 | blk.44.ffn_down_exps.weight | Block 44 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 539 | blk.44.ffn_gate_exps.weight | Block 44 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q6_K | +| 540 | blk.44.ffn_gate_inp.weight | Block 44 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 541 | blk.44.ffn_norm.weight | Block 44 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 542 | blk.44.ffn_up_exps.weight | Block 44 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q6_K | + +- Total elements in blk.44: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 45 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 543 | blk.45.attn_k.weight | Block 45 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q6_K | +| 544 | blk.45.attn_k_norm.weight | Block 45 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 545 | blk.45.attn_norm.weight | Block 45 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 546 | blk.45.attn_output.weight | Block 45 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q6_K | +| 547 | blk.45.attn_q.weight | Block 45 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q6_K | +| 548 | blk.45.attn_q_norm.weight | Block 45 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 549 | blk.45.attn_v.weight | Block 45 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q8_0 | +| 550 | blk.45.ffn_down_exps.weight | Block 45 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q8_0 | +| 551 | blk.45.ffn_gate_exps.weight | Block 45 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q6_K | +| 552 | blk.45.ffn_gate_inp.weight | Block 45 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 553 | blk.45.ffn_norm.weight | Block 45 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 554 | blk.45.ffn_up_exps.weight | Block 45 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q6_K | + +- Total elements in blk.45: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +