diff --git "a/scores/Qwen3-30B-A3B-pruned-Q4_K_M.md" "b/scores/Qwen3-30B-A3B-pruned-Q4_K_M.md" new file mode 100644--- /dev/null +++ "b/scores/Qwen3-30B-A3B-pruned-Q4_K_M.md" @@ -0,0 +1,1653 @@ +# Qwen3-30B-A3B-Q4_K_M.gguf - GGUF Internal File Dump + +- Endian: LITTLE endian + +## Key Value Metadata Store + +There are 45 key-value pairs in this file + +| POS | TYPE | Count | Key | Value | +|----:|:---------|-------:|:------------------------------------------|:--------------------------------------------------------------------| +| 1 | UINT32 | 1 | GGUF.version | 3 | +| 2 | UINT64 | 1 | GGUF.tensor_count | 555 | +| 3 | UINT64 | 1 | GGUF.kv_count | 42 | +| 4 | STRING | 1 | general.architecture | `qwen3moe` | +| 5 | STRING | 1 | general.type | `model` | +| 6 | STRING | 1 | general.name | `Qwen3 30B A3B` | +| 7 | STRING | 1 | general.basename | `Qwen3` | +| 8 | STRING | 1 | general.size_label | `30B-A3B` | +| 9 | STRING | 1 | general.license | `apache-2.0` | +| 10 | STRING | 1 | general.license.link | `https://huggingface.co/Qwen/Qwen3-30B-A3B/blob/main/LICENSE` | +| 11 | UINT32 | 1 | general.base_model.count | 1 | +| 12 | STRING | 1 | general.base_model.0.name | `Qwen3 30B A3B Base` | +| 13 | STRING | 1 | general.base_model.0.organization | `Qwen` | +| 14 | STRING | 1 | general.base_model.0.repo_url | `https://huggingface.co/Qwen/Qwen3-30B-A3B-Base` | +| 15 | [STRING] | 1 | general.tags | [ `text-generation` ] | +| 16 | UINT32 | 1 | qwen3moe.context_length | 40960 | +| 17 | UINT32 | 1 | qwen3moe.embedding_length | 2048 | +| 18 | UINT32 | 1 | qwen3moe.feed_forward_length | 6144 | +| 19 | UINT32 | 1 | qwen3moe.attention.head_count | 32 | +| 20 | UINT32 | 1 | qwen3moe.attention.head_count_kv | 4 | +| 21 | FLOAT32 | 1 | qwen3moe.rope.freq_base | 1000000.0 | +| 22 | FLOAT32 | 1 | qwen3moe.attention.layer_norm_rms_epsilon | 1e-06 | +| 23 | UINT32 | 1 | qwen3moe.expert_used_count | 8 | +| 24 | UINT32 | 1 | qwen3moe.attention.key_length | 128 | +| 25 | UINT32 | 1 | qwen3moe.attention.value_length | 128 | +| 26 | UINT32 | 1 | qwen3moe.expert_count | 128 | +| 27 | UINT32 | 1 | qwen3moe.expert_feed_forward_length | 768 | +| 28 | STRING | 1 | tokenizer.ggml.model | `gpt2` | +| 29 | STRING | 1 | tokenizer.ggml.pre | `qwen2` | +| 30 | [STRING] | 151936 | tokenizer.ggml.tokens | [ `!`, `"`, `#`, `$`, `%`, ... ] | +| 31 | [INT32] | 151936 | tokenizer.ggml.token_type | [ 1, 1, 1, 1, 1, 1, 1, ... ] | +| 32 | [STRING] | 151387 | tokenizer.ggml.merges | [ `Ġ Ġ`, `ĠĠ ĠĠ`, `i n`, `Ġ t`, `ĠĠĠĠ ĠĠĠĠ`, ... ] | +| 33 | UINT32 | 1 | tokenizer.ggml.eos_token_id | 151645 | +| 34 | UINT32 | 1 | tokenizer.ggml.padding_token_id | 151643 | +| 35 | UINT32 | 1 | tokenizer.ggml.bos_token_id | 151643 | +| 36 | BOOL | 1 | tokenizer.ggml.add_bos_token | False | +| 37 | STRING | 1 | tokenizer.chat_template | `{%- if tools %}{{- '<|im_`...`{%- endif %}{%- endif %}` | +| 38 | UINT32 | 1 | general.quantization_version | 2 | +| 39 | UINT32 | 1 | general.file_type | 15 | +| 40 | BOOL | 1 | general.pruned | True | +| 41 | UINT32 | 1 | qwen3moe.block_count | 46 | +| 42 | STRING | 1 | quantize.imatrix.file | `./imatrix/imatrix-Qwen3-30B-A3B-medium.dat` | +| 43 | STRING | 1 | quantize.imatrix.dataset | `../../datasets/imatrix/combined_all_medium.txt` | +| 44 | INT32 | 1 | quantize.imatrix.entries_count | 385 | +| 45 | INT32 | 1 | quantize.imatrix.chunks_count | 6946 | + +## Tensors Overview ~29B Elements + +Total number of elements in all tensors: 29285881344 Elements + +- [Qwen3-30B-A3B-Q4\_K\_M.gguf - GGUF Internal File Dump](#qwen3-30b-a3b-q4_k_mgguf---gguf-internal-file-dump) + - [Key Value Metadata Store](#key-value-metadata-store) + - [Tensors Overview ~29B Elements](#tensors-overview-29b-elements) + - [Tensor Data Offset](#tensor-data-offset) + - [Base Tensor Group : ~622M Elements](#base-tensor-group--622m-elements) + - [Block 0 Tensor Group : ~623M Elements](#block-0-tensor-group--623m-elements) + - [Block 1 Tensor Group : ~623M Elements](#block-1-tensor-group--623m-elements) + - [Block 2 Tensor Group : ~623M Elements](#block-2-tensor-group--623m-elements) + - [Block 3 Tensor Group : ~623M Elements](#block-3-tensor-group--623m-elements) + - [Block 4 Tensor Group : ~623M Elements](#block-4-tensor-group--623m-elements) + - [Block 5 Tensor Group : ~623M Elements](#block-5-tensor-group--623m-elements) + - [Block 6 Tensor Group : ~623M Elements](#block-6-tensor-group--623m-elements) + - [Block 7 Tensor Group : ~623M Elements](#block-7-tensor-group--623m-elements) + - [Block 8 Tensor Group : ~623M Elements](#block-8-tensor-group--623m-elements) + - [Block 9 Tensor Group : ~623M Elements](#block-9-tensor-group--623m-elements) + - [Block 10 Tensor Group : ~623M Elements](#block-10-tensor-group--623m-elements) + - [Block 11 Tensor Group : ~623M Elements](#block-11-tensor-group--623m-elements) + - [Block 12 Tensor Group : ~623M Elements](#block-12-tensor-group--623m-elements) + - [Block 13 Tensor Group : ~623M Elements](#block-13-tensor-group--623m-elements) + - [Block 14 Tensor Group : ~623M Elements](#block-14-tensor-group--623m-elements) + - [Block 15 Tensor Group : ~623M Elements](#block-15-tensor-group--623m-elements) + - [Block 16 Tensor Group : ~623M Elements](#block-16-tensor-group--623m-elements) + - [Block 17 Tensor Group : ~623M Elements](#block-17-tensor-group--623m-elements) + - [Block 18 Tensor Group : ~623M Elements](#block-18-tensor-group--623m-elements) + - [Block 19 Tensor Group : ~623M Elements](#block-19-tensor-group--623m-elements) + - [Block 20 Tensor Group : ~623M Elements](#block-20-tensor-group--623m-elements) + - [Block 21 Tensor Group : ~623M Elements](#block-21-tensor-group--623m-elements) + - [Block 22 Tensor Group : ~623M Elements](#block-22-tensor-group--623m-elements) + - [Block 23 Tensor Group : ~623M Elements](#block-23-tensor-group--623m-elements) + - [Block 24 Tensor Group : ~623M Elements](#block-24-tensor-group--623m-elements) + - [Block 25 Tensor Group : ~623M Elements](#block-25-tensor-group--623m-elements) + - [Block 26 Tensor Group : ~623M Elements](#block-26-tensor-group--623m-elements) + - [Block 27 Tensor Group : ~623M Elements](#block-27-tensor-group--623m-elements) + - [Block 28 Tensor Group : ~623M Elements](#block-28-tensor-group--623m-elements) + - [Block 29 Tensor Group : ~623M Elements](#block-29-tensor-group--623m-elements) + - [Block 30 Tensor Group : ~623M Elements](#block-30-tensor-group--623m-elements) + - [Block 31 Tensor Group : ~623M Elements](#block-31-tensor-group--623m-elements) + - [Block 32 Tensor Group : ~623M Elements](#block-32-tensor-group--623m-elements) + - [Block 33 Tensor Group : ~623M Elements](#block-33-tensor-group--623m-elements) + - [Block 34 Tensor Group : ~623M Elements](#block-34-tensor-group--623m-elements) + - [Block 35 Tensor Group : ~623M Elements](#block-35-tensor-group--623m-elements) + - [Block 36 Tensor Group : ~623M Elements](#block-36-tensor-group--623m-elements) + - [Block 37 Tensor Group : ~623M Elements](#block-37-tensor-group--623m-elements) + - [Block 38 Tensor Group : ~623M Elements](#block-38-tensor-group--623m-elements) + - [Block 39 Tensor Group : ~623M Elements](#block-39-tensor-group--623m-elements) + - [Block 40 Tensor Group : ~623M Elements](#block-40-tensor-group--623m-elements) + - [Block 41 Tensor Group : ~623M Elements](#block-41-tensor-group--623m-elements) + - [Block 42 Tensor Group : ~623M Elements](#block-42-tensor-group--623m-elements) + - [Block 43 Tensor Group : ~623M Elements](#block-43-tensor-group--623m-elements) + - [Block 44 Tensor Group : ~623M Elements](#block-44-tensor-group--623m-elements) + - [Block 45 Tensor Group : ~623M Elements](#block-45-tensor-group--623m-elements) + +### Tensor Data Offset + +This table contains the offset and data segment relative to start of file + +| T_ID | Tensor Layer Name | Data Offset (B) | Data Size (B) | +|-----:|:----------------------------|-----------------:|-----------------:| +| 0 | output.weight | 0x5b12e0 | 0xa6ec000 | +| 1 | output_norm.weight | 0xac9d2e0 | 0x2000 | +| 2 | token_embd.weight | 0xac9f2e0 | 0x7f82800 | +| 3 | blk.0.attn_k.weight | 0x12c21ae0 | 0x6e000 | +| 4 | blk.0.attn_k_norm.weight | 0x12c8fae0 | 0x200 | +| 5 | blk.0.attn_norm.weight | 0x12c8fce0 | 0x2000 | +| 6 | blk.0.attn_output.weight | 0x12c91ce0 | 0x480000 | +| 7 | blk.0.attn_q.weight | 0x13111ce0 | 0x370000 | +| 8 | blk.0.attn_q_norm.weight | 0x13481ce0 | 0x200 | +| 9 | blk.0.attn_v.weight | 0x13481ee0 | 0x90000 | +| 10 | blk.0.ffn_down_exps.weight | 0x13511ee0 | 0x8400000 | +| 11 | blk.0.ffn_gate_exps.weight | 0x1b911ee0 | 0x5280000 | +| 12 | blk.0.ffn_gate_inp.weight | 0x20b91ee0 | 0x100000 | +| 13 | blk.0.ffn_norm.weight | 0x20c91ee0 | 0x2000 | +| 14 | blk.0.ffn_up_exps.weight | 0x20c93ee0 | 0x5280000 | +| 15 | blk.1.attn_k.weight | 0x25f13ee0 | 0x6e000 | +| 16 | blk.1.attn_k_norm.weight | 0x25f81ee0 | 0x200 | +| 17 | blk.1.attn_norm.weight | 0x25f820e0 | 0x2000 | +| 18 | blk.1.attn_output.weight | 0x25f840e0 | 0x480000 | +| 19 | blk.1.attn_q.weight | 0x264040e0 | 0x370000 | +| 20 | blk.1.attn_q_norm.weight | 0x267740e0 | 0x200 | +| 21 | blk.1.attn_v.weight | 0x267742e0 | 0x90000 | +| 22 | blk.1.ffn_down_exps.weight | 0x268042e0 | 0x8400000 | +| 23 | blk.1.ffn_gate_exps.weight | 0x2ec042e0 | 0x5280000 | +| 24 | blk.1.ffn_gate_inp.weight | 0x33e842e0 | 0x100000 | +| 25 | blk.1.ffn_norm.weight | 0x33f842e0 | 0x2000 | +| 26 | blk.1.ffn_up_exps.weight | 0x33f862e0 | 0x5280000 | +| 27 | blk.2.attn_k.weight | 0x392062e0 | 0x6e000 | +| 28 | blk.2.attn_k_norm.weight | 0x392742e0 | 0x200 | +| 29 | blk.2.attn_norm.weight | 0x392744e0 | 0x2000 | +| 30 | blk.2.attn_output.weight | 0x392764e0 | 0x480000 | +| 31 | blk.2.attn_q.weight | 0x396f64e0 | 0x370000 | +| 32 | blk.2.attn_q_norm.weight | 0x39a664e0 | 0x200 | +| 33 | blk.2.attn_v.weight | 0x39a666e0 | 0x90000 | +| 34 | blk.2.ffn_down_exps.weight | 0x39af66e0 | 0x8400000 | +| 35 | blk.2.ffn_gate_exps.weight | 0x41ef66e0 | 0x5280000 | +| 36 | blk.2.ffn_gate_inp.weight | 0x471766e0 | 0x100000 | +| 37 | blk.2.ffn_norm.weight | 0x472766e0 | 0x2000 | +| 38 | blk.2.ffn_up_exps.weight | 0x472786e0 | 0x5280000 | +| 39 | blk.3.attn_k.weight | 0x4c4f86e0 | 0x6e000 | +| 40 | blk.3.attn_k_norm.weight | 0x4c5666e0 | 0x200 | +| 41 | blk.3.attn_norm.weight | 0x4c5668e0 | 0x2000 | +| 42 | blk.3.attn_output.weight | 0x4c5688e0 | 0x480000 | +| 43 | blk.3.attn_q.weight | 0x4c9e88e0 | 0x370000 | +| 44 | blk.3.attn_q_norm.weight | 0x4cd588e0 | 0x200 | +| 45 | blk.3.attn_v.weight | 0x4cd58ae0 | 0x90000 | +| 46 | blk.3.ffn_down_exps.weight | 0x4cde8ae0 | 0x8400000 | +| 47 | blk.3.ffn_gate_exps.weight | 0x551e8ae0 | 0x5280000 | +| 48 | blk.3.ffn_gate_inp.weight | 0x5a468ae0 | 0x100000 | +| 49 | blk.3.ffn_norm.weight | 0x5a568ae0 | 0x2000 | +| 50 | blk.3.ffn_up_exps.weight | 0x5a56aae0 | 0x5280000 | +| 51 | blk.4.attn_k.weight | 0x5f7eaae0 | 0x6e000 | +| 52 | blk.4.attn_k_norm.weight | 0x5f858ae0 | 0x200 | +| 53 | blk.4.attn_norm.weight | 0x5f858ce0 | 0x2000 | +| 54 | blk.4.attn_output.weight | 0x5f85ace0 | 0x480000 | +| 55 | blk.4.attn_q.weight | 0x5fcdace0 | 0x370000 | +| 56 | blk.4.attn_q_norm.weight | 0x6004ace0 | 0x200 | +| 57 | blk.4.attn_v.weight | 0x6004aee0 | 0x90000 | +| 58 | blk.4.ffn_down_exps.weight | 0x600daee0 | 0x8400000 | +| 59 | blk.4.ffn_gate_exps.weight | 0x684daee0 | 0x5280000 | +| 60 | blk.4.ffn_gate_inp.weight | 0x6d75aee0 | 0x100000 | +| 61 | blk.4.ffn_norm.weight | 0x6d85aee0 | 0x2000 | +| 62 | blk.4.ffn_up_exps.weight | 0x6d85cee0 | 0x5280000 | +| 63 | blk.5.attn_k.weight | 0x72adcee0 | 0x6e000 | +| 64 | blk.5.attn_k_norm.weight | 0x72b4aee0 | 0x200 | +| 65 | blk.5.attn_norm.weight | 0x72b4b0e0 | 0x2000 | +| 66 | blk.5.attn_output.weight | 0x72b4d0e0 | 0x480000 | +| 67 | blk.5.attn_q.weight | 0x72fcd0e0 | 0x370000 | +| 68 | blk.5.attn_q_norm.weight | 0x7333d0e0 | 0x200 | +| 69 | blk.5.attn_v.weight | 0x7333d2e0 | 0xb0000 | +| 70 | blk.5.ffn_down_exps.weight | 0x733ed2e0 | 0x8400000 | +| 71 | blk.5.ffn_gate_exps.weight | 0x7b7ed2e0 | 0x5280000 | +| 72 | blk.5.ffn_gate_inp.weight | 0x80a6d2e0 | 0x100000 | +| 73 | blk.5.ffn_norm.weight | 0x80b6d2e0 | 0x2000 | +| 74 | blk.5.ffn_up_exps.weight | 0x80b6f2e0 | 0x5280000 | +| 75 | blk.6.attn_k.weight | 0x85def2e0 | 0x6e000 | +| 76 | blk.6.attn_k_norm.weight | 0x85e5d2e0 | 0x200 | +| 77 | blk.6.attn_norm.weight | 0x85e5d4e0 | 0x2000 | +| 78 | blk.6.attn_output.weight | 0x85e5f4e0 | 0x480000 | +| 79 | blk.6.attn_q.weight | 0x862df4e0 | 0x370000 | +| 80 | blk.6.attn_q_norm.weight | 0x8664f4e0 | 0x200 | +| 81 | blk.6.attn_v.weight | 0x8664f6e0 | 0xb0000 | +| 82 | blk.6.ffn_down_exps.weight | 0x866ff6e0 | 0x8400000 | +| 83 | blk.6.ffn_gate_exps.weight | 0x8eaff6e0 | 0x5280000 | +| 84 | blk.6.ffn_gate_inp.weight | 0x93d7f6e0 | 0x100000 | +| 85 | blk.6.ffn_norm.weight | 0x93e7f6e0 | 0x2000 | +| 86 | blk.6.ffn_up_exps.weight | 0x93e816e0 | 0x5280000 | +| 87 | blk.7.attn_k.weight | 0x991016e0 | 0x6e000 | +| 88 | blk.7.attn_k_norm.weight | 0x9916f6e0 | 0x200 | +| 89 | blk.7.attn_norm.weight | 0x9916f8e0 | 0x2000 | +| 90 | blk.7.attn_output.weight | 0x991718e0 | 0x480000 | +| 91 | blk.7.attn_q.weight | 0x995f18e0 | 0x370000 | +| 92 | blk.7.attn_q_norm.weight | 0x999618e0 | 0x200 | +| 93 | blk.7.attn_v.weight | 0x99961ae0 | 0x90000 | +| 94 | blk.7.ffn_down_exps.weight | 0x999f1ae0 | 0x8400000 | +| 95 | blk.7.ffn_gate_exps.weight | 0xa1df1ae0 | 0x5280000 | +| 96 | blk.7.ffn_gate_inp.weight | 0xa7071ae0 | 0x100000 | +| 97 | blk.7.ffn_norm.weight | 0xa7171ae0 | 0x2000 | +| 98 | blk.7.ffn_up_exps.weight | 0xa7173ae0 | 0x5280000 | +| 99 | blk.8.attn_k.weight | 0xac3f3ae0 | 0x6e000 | +| 100 | blk.8.attn_k_norm.weight | 0xac461ae0 | 0x200 | +| 101 | blk.8.attn_norm.weight | 0xac461ce0 | 0x2000 | +| 102 | blk.8.attn_output.weight | 0xac463ce0 | 0x480000 | +| 103 | blk.8.attn_q.weight | 0xac8e3ce0 | 0x370000 | +| 104 | blk.8.attn_q_norm.weight | 0xacc53ce0 | 0x200 | +| 105 | blk.8.attn_v.weight | 0xacc53ee0 | 0xb0000 | +| 106 | blk.8.ffn_down_exps.weight | 0xacd03ee0 | 0x8400000 | +| 107 | blk.8.ffn_gate_exps.weight | 0xb5103ee0 | 0x5280000 | +| 108 | blk.8.ffn_gate_inp.weight | 0xba383ee0 | 0x100000 | +| 109 | blk.8.ffn_norm.weight | 0xba483ee0 | 0x2000 | +| 110 | blk.8.ffn_up_exps.weight | 0xba485ee0 | 0x5280000 | +| 111 | blk.9.attn_k.weight | 0xbf705ee0 | 0x6e000 | +| 112 | blk.9.attn_k_norm.weight | 0xbf773ee0 | 0x200 | +| 113 | blk.9.attn_norm.weight | 0xbf7740e0 | 0x2000 | +| 114 | blk.9.attn_output.weight | 0xbf7760e0 | 0x480000 | +| 115 | blk.9.attn_q.weight | 0xbfbf60e0 | 0x370000 | +| 116 | blk.9.attn_q_norm.weight | 0xbff660e0 | 0x200 | +| 117 | blk.9.attn_v.weight | 0xbff662e0 | 0xb0000 | +| 118 | blk.9.ffn_down_exps.weight | 0xc00162e0 | 0x8400000 | +| 119 | blk.9.ffn_gate_exps.weight | 0xc84162e0 | 0x5280000 | +| 120 | blk.9.ffn_gate_inp.weight | 0xcd6962e0 | 0x100000 | +| 121 | blk.9.ffn_norm.weight | 0xcd7962e0 | 0x2000 | +| 122 | blk.9.ffn_up_exps.weight | 0xcd7982e0 | 0x5280000 | +| 123 | blk.10.attn_k.weight | 0xd2a182e0 | 0x6e000 | +| 124 | blk.10.attn_k_norm.weight | 0xd2a862e0 | 0x200 | +| 125 | blk.10.attn_norm.weight | 0xd2a864e0 | 0x2000 | +| 126 | blk.10.attn_output.weight | 0xd2a884e0 | 0x480000 | +| 127 | blk.10.attn_q.weight | 0xd2f084e0 | 0x370000 | +| 128 | blk.10.attn_q_norm.weight | 0xd32784e0 | 0x200 | +| 129 | blk.10.attn_v.weight | 0xd32786e0 | 0x90000 | +| 130 | blk.10.ffn_down_exps.weight | 0xd33086e0 | 0x8400000 | +| 131 | blk.10.ffn_gate_exps.weight | 0xdb7086e0 | 0x5280000 | +| 132 | blk.10.ffn_gate_inp.weight | 0xe09886e0 | 0x100000 | +| 133 | blk.10.ffn_norm.weight | 0xe0a886e0 | 0x2000 | +| 134 | blk.10.ffn_up_exps.weight | 0xe0a8a6e0 | 0x5280000 | +| 135 | blk.11.attn_k.weight | 0xe5d0a6e0 | 0x6e000 | +| 136 | blk.11.attn_k_norm.weight | 0xe5d786e0 | 0x200 | +| 137 | blk.11.attn_norm.weight | 0xe5d788e0 | 0x2000 | +| 138 | blk.11.attn_output.weight | 0xe5d7a8e0 | 0x480000 | +| 139 | blk.11.attn_q.weight | 0xe61fa8e0 | 0x370000 | +| 140 | blk.11.attn_q_norm.weight | 0xe656a8e0 | 0x200 | +| 141 | blk.11.attn_v.weight | 0xe656aae0 | 0xb0000 | +| 142 | blk.11.ffn_down_exps.weight | 0xe661aae0 | 0x8400000 | +| 143 | blk.11.ffn_gate_exps.weight | 0xeea1aae0 | 0x5280000 | +| 144 | blk.11.ffn_gate_inp.weight | 0xf3c9aae0 | 0x100000 | +| 145 | blk.11.ffn_norm.weight | 0xf3d9aae0 | 0x2000 | +| 146 | blk.11.ffn_up_exps.weight | 0xf3d9cae0 | 0x5280000 | +| 147 | blk.12.attn_k.weight | 0xf901cae0 | 0x6e000 | +| 148 | blk.12.attn_k_norm.weight | 0xf908aae0 | 0x200 | +| 149 | blk.12.attn_norm.weight | 0xf908ace0 | 0x2000 | +| 150 | blk.12.attn_output.weight | 0xf908cce0 | 0x480000 | +| 151 | blk.12.attn_q.weight | 0xf950cce0 | 0x370000 | +| 152 | blk.12.attn_q_norm.weight | 0xf987cce0 | 0x200 | +| 153 | blk.12.attn_v.weight | 0xf987cee0 | 0xb0000 | +| 154 | blk.12.ffn_down_exps.weight | 0xf992cee0 | 0x8400000 | +| 155 | blk.12.ffn_gate_exps.weight | 0x101d2cee0 | 0x5280000 | +| 156 | blk.12.ffn_gate_inp.weight | 0x106facee0 | 0x100000 | +| 157 | blk.12.ffn_norm.weight | 0x1070acee0 | 0x2000 | +| 158 | blk.12.ffn_up_exps.weight | 0x1070aeee0 | 0x5280000 | +| 159 | blk.13.attn_k.weight | 0x10c32eee0 | 0x6e000 | +| 160 | blk.13.attn_k_norm.weight | 0x10c39cee0 | 0x200 | +| 161 | blk.13.attn_norm.weight | 0x10c39d0e0 | 0x2000 | +| 162 | blk.13.attn_output.weight | 0x10c39f0e0 | 0x480000 | +| 163 | blk.13.attn_q.weight | 0x10c81f0e0 | 0x370000 | +| 164 | blk.13.attn_q_norm.weight | 0x10cb8f0e0 | 0x200 | +| 165 | blk.13.attn_v.weight | 0x10cb8f2e0 | 0x90000 | +| 166 | blk.13.ffn_down_exps.weight | 0x10cc1f2e0 | 0x8400000 | +| 167 | blk.13.ffn_gate_exps.weight | 0x11501f2e0 | 0x5280000 | +| 168 | blk.13.ffn_gate_inp.weight | 0x11a29f2e0 | 0x100000 | +| 169 | blk.13.ffn_norm.weight | 0x11a39f2e0 | 0x2000 | +| 170 | blk.13.ffn_up_exps.weight | 0x11a3a12e0 | 0x5280000 | +| 171 | blk.14.attn_k.weight | 0x11f6212e0 | 0x6e000 | +| 172 | blk.14.attn_k_norm.weight | 0x11f68f2e0 | 0x200 | +| 173 | blk.14.attn_norm.weight | 0x11f68f4e0 | 0x2000 | +| 174 | blk.14.attn_output.weight | 0x11f6914e0 | 0x480000 | +| 175 | blk.14.attn_q.weight | 0x11fb114e0 | 0x370000 | +| 176 | blk.14.attn_q_norm.weight | 0x11fe814e0 | 0x200 | +| 177 | blk.14.attn_v.weight | 0x11fe816e0 | 0xb0000 | +| 178 | blk.14.ffn_down_exps.weight | 0x11ff316e0 | 0x8400000 | +| 179 | blk.14.ffn_gate_exps.weight | 0x1283316e0 | 0x5280000 | +| 180 | blk.14.ffn_gate_inp.weight | 0x12d5b16e0 | 0x100000 | +| 181 | blk.14.ffn_norm.weight | 0x12d6b16e0 | 0x2000 | +| 182 | blk.14.ffn_up_exps.weight | 0x12d6b36e0 | 0x5280000 | +| 183 | blk.15.attn_k.weight | 0x1329336e0 | 0x6e000 | +| 184 | blk.15.attn_k_norm.weight | 0x1329a16e0 | 0x200 | +| 185 | blk.15.attn_norm.weight | 0x1329a18e0 | 0x2000 | +| 186 | blk.15.attn_output.weight | 0x1329a38e0 | 0x480000 | +| 187 | blk.15.attn_q.weight | 0x132e238e0 | 0x370000 | +| 188 | blk.15.attn_q_norm.weight | 0x1331938e0 | 0x200 | +| 189 | blk.15.attn_v.weight | 0x133193ae0 | 0xb0000 | +| 190 | blk.15.ffn_down_exps.weight | 0x133243ae0 | 0x8400000 | +| 191 | blk.15.ffn_gate_exps.weight | 0x13b643ae0 | 0x5280000 | +| 192 | blk.15.ffn_gate_inp.weight | 0x1408c3ae0 | 0x100000 | +| 193 | blk.15.ffn_norm.weight | 0x1409c3ae0 | 0x2000 | +| 194 | blk.15.ffn_up_exps.weight | 0x1409c5ae0 | 0x5280000 | +| 195 | blk.16.attn_k.weight | 0x145c45ae0 | 0x6e000 | +| 196 | blk.16.attn_k_norm.weight | 0x145cb3ae0 | 0x200 | +| 197 | blk.16.attn_norm.weight | 0x145cb3ce0 | 0x2000 | +| 198 | blk.16.attn_output.weight | 0x145cb5ce0 | 0x480000 | +| 199 | blk.16.attn_q.weight | 0x146135ce0 | 0x370000 | +| 200 | blk.16.attn_q_norm.weight | 0x1464a5ce0 | 0x200 | +| 201 | blk.16.attn_v.weight | 0x1464a5ee0 | 0x90000 | +| 202 | blk.16.ffn_down_exps.weight | 0x146535ee0 | 0x8400000 | +| 203 | blk.16.ffn_gate_exps.weight | 0x14e935ee0 | 0x5280000 | +| 204 | blk.16.ffn_gate_inp.weight | 0x153bb5ee0 | 0x100000 | +| 205 | blk.16.ffn_norm.weight | 0x153cb5ee0 | 0x2000 | +| 206 | blk.16.ffn_up_exps.weight | 0x153cb7ee0 | 0x5280000 | +| 207 | blk.17.attn_k.weight | 0x158f37ee0 | 0x6e000 | +| 208 | blk.17.attn_k_norm.weight | 0x158fa5ee0 | 0x200 | +| 209 | blk.17.attn_norm.weight | 0x158fa60e0 | 0x2000 | +| 210 | blk.17.attn_output.weight | 0x158fa80e0 | 0x480000 | +| 211 | blk.17.attn_q.weight | 0x1594280e0 | 0x370000 | +| 212 | blk.17.attn_q_norm.weight | 0x1597980e0 | 0x200 | +| 213 | blk.17.attn_v.weight | 0x1597982e0 | 0xb0000 | +| 214 | blk.17.ffn_down_exps.weight | 0x1598482e0 | 0x8400000 | +| 215 | blk.17.ffn_gate_exps.weight | 0x161c482e0 | 0x5280000 | +| 216 | blk.17.ffn_gate_inp.weight | 0x166ec82e0 | 0x100000 | +| 217 | blk.17.ffn_norm.weight | 0x166fc82e0 | 0x2000 | +| 218 | blk.17.ffn_up_exps.weight | 0x166fca2e0 | 0x5280000 | +| 219 | blk.18.attn_k.weight | 0x16c24a2e0 | 0x6e000 | +| 220 | blk.18.attn_k_norm.weight | 0x16c2b82e0 | 0x200 | +| 221 | blk.18.attn_norm.weight | 0x16c2b84e0 | 0x2000 | +| 222 | blk.18.attn_output.weight | 0x16c2ba4e0 | 0x480000 | +| 223 | blk.18.attn_q.weight | 0x16c73a4e0 | 0x370000 | +| 224 | blk.18.attn_q_norm.weight | 0x16caaa4e0 | 0x200 | +| 225 | blk.18.attn_v.weight | 0x16caaa6e0 | 0xb0000 | +| 226 | blk.18.ffn_down_exps.weight | 0x16cb5a6e0 | 0x8400000 | +| 227 | blk.18.ffn_gate_exps.weight | 0x174f5a6e0 | 0x6c00000 | +| 228 | blk.18.ffn_gate_inp.weight | 0x17bb5a6e0 | 0x100000 | +| 229 | blk.18.ffn_norm.weight | 0x17bc5a6e0 | 0x2000 | +| 230 | blk.18.ffn_up_exps.weight | 0x17bc5c6e0 | 0x6c00000 | +| 231 | blk.19.attn_k.weight | 0x18285c6e0 | 0x6e000 | +| 232 | blk.19.attn_k_norm.weight | 0x1828ca6e0 | 0x200 | +| 233 | blk.19.attn_norm.weight | 0x1828ca8e0 | 0x2000 | +| 234 | blk.19.attn_output.weight | 0x1828cc8e0 | 0x480000 | +| 235 | blk.19.attn_q.weight | 0x182d4c8e0 | 0x370000 | +| 236 | blk.19.attn_q_norm.weight | 0x1830bc8e0 | 0x200 | +| 237 | blk.19.attn_v.weight | 0x1830bcae0 | 0x90000 | +| 238 | blk.19.ffn_down_exps.weight | 0x18314cae0 | 0x8400000 | +| 239 | blk.19.ffn_gate_exps.weight | 0x18b54cae0 | 0x5280000 | +| 240 | blk.19.ffn_gate_inp.weight | 0x1907ccae0 | 0x100000 | +| 241 | blk.19.ffn_norm.weight | 0x1908ccae0 | 0x2000 | +| 242 | blk.19.ffn_up_exps.weight | 0x1908ceae0 | 0x5280000 | +| 243 | blk.20.attn_k.weight | 0x195b4eae0 | 0x6e000 | +| 244 | blk.20.attn_k_norm.weight | 0x195bbcae0 | 0x200 | +| 245 | blk.20.attn_norm.weight | 0x195bbcce0 | 0x2000 | +| 246 | blk.20.attn_output.weight | 0x195bbece0 | 0x480000 | +| 247 | blk.20.attn_q.weight | 0x19603ece0 | 0x370000 | +| 248 | blk.20.attn_q_norm.weight | 0x1963aece0 | 0x200 | +| 249 | blk.20.attn_v.weight | 0x1963aeee0 | 0xb0000 | +| 250 | blk.20.ffn_down_exps.weight | 0x19645eee0 | 0x8400000 | +| 251 | blk.20.ffn_gate_exps.weight | 0x19e85eee0 | 0x5280000 | +| 252 | blk.20.ffn_gate_inp.weight | 0x1a3adeee0 | 0x100000 | +| 253 | blk.20.ffn_norm.weight | 0x1a3bdeee0 | 0x2000 | +| 254 | blk.20.ffn_up_exps.weight | 0x1a3be0ee0 | 0x5280000 | +| 255 | blk.21.attn_k.weight | 0x1a8e60ee0 | 0x6e000 | +| 256 | blk.21.attn_k_norm.weight | 0x1a8eceee0 | 0x200 | +| 257 | blk.21.attn_norm.weight | 0x1a8ecf0e0 | 0x2000 | +| 258 | blk.21.attn_output.weight | 0x1a8ed10e0 | 0x480000 | +| 259 | blk.21.attn_q.weight | 0x1a93510e0 | 0x370000 | +| 260 | blk.21.attn_q_norm.weight | 0x1a96c10e0 | 0x200 | +| 261 | blk.21.attn_v.weight | 0x1a96c12e0 | 0xb0000 | +| 262 | blk.21.ffn_down_exps.weight | 0x1a97712e0 | 0x8400000 | +| 263 | blk.21.ffn_gate_exps.weight | 0x1b1b712e0 | 0x5280000 | +| 264 | blk.21.ffn_gate_inp.weight | 0x1b6df12e0 | 0x100000 | +| 265 | blk.21.ffn_norm.weight | 0x1b6ef12e0 | 0x2000 | +| 266 | blk.21.ffn_up_exps.weight | 0x1b6ef32e0 | 0x5280000 | +| 267 | blk.22.attn_k.weight | 0x1bc1732e0 | 0x6e000 | +| 268 | blk.22.attn_k_norm.weight | 0x1bc1e12e0 | 0x200 | +| 269 | blk.22.attn_norm.weight | 0x1bc1e14e0 | 0x2000 | +| 270 | blk.22.attn_output.weight | 0x1bc1e34e0 | 0x480000 | +| 271 | blk.22.attn_q.weight | 0x1bc6634e0 | 0x370000 | +| 272 | blk.22.attn_q_norm.weight | 0x1bc9d34e0 | 0x200 | +| 273 | blk.22.attn_v.weight | 0x1bc9d36e0 | 0x90000 | +| 274 | blk.22.ffn_down_exps.weight | 0x1bca636e0 | 0x8400000 | +| 275 | blk.22.ffn_gate_exps.weight | 0x1c4e636e0 | 0x5280000 | +| 276 | blk.22.ffn_gate_inp.weight | 0x1ca0e36e0 | 0x100000 | +| 277 | blk.22.ffn_norm.weight | 0x1ca1e36e0 | 0x2000 | +| 278 | blk.22.ffn_up_exps.weight | 0x1ca1e56e0 | 0x5280000 | +| 279 | blk.23.attn_k.weight | 0x1cf4656e0 | 0x6e000 | +| 280 | blk.23.attn_k_norm.weight | 0x1cf4d36e0 | 0x200 | +| 281 | blk.23.attn_norm.weight | 0x1cf4d38e0 | 0x2000 | +| 282 | blk.23.attn_output.weight | 0x1cf4d58e0 | 0x480000 | +| 283 | blk.23.attn_q.weight | 0x1cf9558e0 | 0x370000 | +| 284 | blk.23.attn_q_norm.weight | 0x1cfcc58e0 | 0x200 | +| 285 | blk.23.attn_v.weight | 0x1cfcc5ae0 | 0xb0000 | +| 286 | blk.23.ffn_down_exps.weight | 0x1cfd75ae0 | 0x8400000 | +| 287 | blk.23.ffn_gate_exps.weight | 0x1d8175ae0 | 0x5280000 | +| 288 | blk.23.ffn_gate_inp.weight | 0x1dd3f5ae0 | 0x100000 | +| 289 | blk.23.ffn_norm.weight | 0x1dd4f5ae0 | 0x2000 | +| 290 | blk.23.ffn_up_exps.weight | 0x1dd4f7ae0 | 0x5280000 | +| 291 | blk.24.attn_k.weight | 0x1e2777ae0 | 0x90000 | +| 292 | blk.24.attn_k_norm.weight | 0x1e2807ae0 | 0x200 | +| 293 | blk.24.attn_norm.weight | 0x1e2807ce0 | 0x2000 | +| 294 | blk.24.attn_output.weight | 0x1e2809ce0 | 0x480000 | +| 295 | blk.24.attn_q.weight | 0x1e2c89ce0 | 0x480000 | +| 296 | blk.24.attn_q_norm.weight | 0x1e3109ce0 | 0x200 | +| 297 | blk.24.attn_v.weight | 0x1e3109ee0 | 0xb0000 | +| 298 | blk.24.ffn_down_exps.weight | 0x1e31b9ee0 | 0x8400000 | +| 299 | blk.24.ffn_gate_exps.weight | 0x1eb5b9ee0 | 0x5280000 | +| 300 | blk.24.ffn_gate_inp.weight | 0x1f0839ee0 | 0x100000 | +| 301 | blk.24.ffn_norm.weight | 0x1f0939ee0 | 0x2000 | +| 302 | blk.24.ffn_up_exps.weight | 0x1f093bee0 | 0x5280000 | +| 303 | blk.25.attn_k.weight | 0x1f5bbbee0 | 0x90000 | +| 304 | blk.25.attn_k_norm.weight | 0x1f5c4bee0 | 0x200 | +| 305 | blk.25.attn_norm.weight | 0x1f5c4c0e0 | 0x2000 | +| 306 | blk.25.attn_output.weight | 0x1f5c4e0e0 | 0x480000 | +| 307 | blk.25.attn_q.weight | 0x1f60ce0e0 | 0x480000 | +| 308 | blk.25.attn_q_norm.weight | 0x1f654e0e0 | 0x200 | +| 309 | blk.25.attn_v.weight | 0x1f654e2e0 | 0xb0000 | +| 310 | blk.25.ffn_down_exps.weight | 0x1f65fe2e0 | 0x8400000 | +| 311 | blk.25.ffn_gate_exps.weight | 0x1fe9fe2e0 | 0x6c00000 | +| 312 | blk.25.ffn_gate_inp.weight | 0x2055fe2e0 | 0x100000 | +| 313 | blk.25.ffn_norm.weight | 0x2056fe2e0 | 0x2000 | +| 314 | blk.25.ffn_up_exps.weight | 0x2057002e0 | 0x6c00000 | +| 315 | blk.26.attn_k.weight | 0x20c3002e0 | 0x90000 | +| 316 | blk.26.attn_k_norm.weight | 0x20c3902e0 | 0x200 | +| 317 | blk.26.attn_norm.weight | 0x20c3904e0 | 0x2000 | +| 318 | blk.26.attn_output.weight | 0x20c3924e0 | 0x480000 | +| 319 | blk.26.attn_q.weight | 0x20c8124e0 | 0x480000 | +| 320 | blk.26.attn_q_norm.weight | 0x20cc924e0 | 0x200 | +| 321 | blk.26.attn_v.weight | 0x20cc926e0 | 0xb0000 | +| 322 | blk.26.ffn_down_exps.weight | 0x20cd426e0 | 0x8400000 | +| 323 | blk.26.ffn_gate_exps.weight | 0x2151426e0 | 0x6c00000 | +| 324 | blk.26.ffn_gate_inp.weight | 0x21bd426e0 | 0x100000 | +| 325 | blk.26.ffn_norm.weight | 0x21be426e0 | 0x2000 | +| 326 | blk.26.ffn_up_exps.weight | 0x21be446e0 | 0x6c00000 | +| 327 | blk.27.attn_k.weight | 0x222a446e0 | 0x90000 | +| 328 | blk.27.attn_k_norm.weight | 0x222ad46e0 | 0x200 | +| 329 | blk.27.attn_norm.weight | 0x222ad48e0 | 0x2000 | +| 330 | blk.27.attn_output.weight | 0x222ad68e0 | 0x480000 | +| 331 | blk.27.attn_q.weight | 0x222f568e0 | 0x480000 | +| 332 | blk.27.attn_q_norm.weight | 0x2233d68e0 | 0x200 | +| 333 | blk.27.attn_v.weight | 0x2233d6ae0 | 0xb0000 | +| 334 | blk.27.ffn_down_exps.weight | 0x223486ae0 | 0x8400000 | +| 335 | blk.27.ffn_gate_exps.weight | 0x22b886ae0 | 0x6c00000 | +| 336 | blk.27.ffn_gate_inp.weight | 0x232486ae0 | 0x100000 | +| 337 | blk.27.ffn_norm.weight | 0x232586ae0 | 0x2000 | +| 338 | blk.27.ffn_up_exps.weight | 0x232588ae0 | 0x6c00000 | +| 339 | blk.28.attn_k.weight | 0x239188ae0 | 0x90000 | +| 340 | blk.28.attn_k_norm.weight | 0x239218ae0 | 0x200 | +| 341 | blk.28.attn_norm.weight | 0x239218ce0 | 0x2000 | +| 342 | blk.28.attn_output.weight | 0x23921ace0 | 0x480000 | +| 343 | blk.28.attn_q.weight | 0x23969ace0 | 0x480000 | +| 344 | blk.28.attn_q_norm.weight | 0x239b1ace0 | 0x200 | +| 345 | blk.28.attn_v.weight | 0x239b1aee0 | 0xb0000 | +| 346 | blk.28.ffn_down_exps.weight | 0x239bcaee0 | 0x8400000 | +| 347 | blk.28.ffn_gate_exps.weight | 0x241fcaee0 | 0x6c00000 | +| 348 | blk.28.ffn_gate_inp.weight | 0x248bcaee0 | 0x100000 | +| 349 | blk.28.ffn_norm.weight | 0x248ccaee0 | 0x2000 | +| 350 | blk.28.ffn_up_exps.weight | 0x248cccee0 | 0x6c00000 | +| 351 | blk.29.attn_k.weight | 0x24f8ccee0 | 0x90000 | +| 352 | blk.29.attn_k_norm.weight | 0x24f95cee0 | 0x200 | +| 353 | blk.29.attn_norm.weight | 0x24f95d0e0 | 0x2000 | +| 354 | blk.29.attn_output.weight | 0x24f95f0e0 | 0x480000 | +| 355 | blk.29.attn_q.weight | 0x24fddf0e0 | 0x480000 | +| 356 | blk.29.attn_q_norm.weight | 0x25025f0e0 | 0x200 | +| 357 | blk.29.attn_v.weight | 0x25025f2e0 | 0xb0000 | +| 358 | blk.29.ffn_down_exps.weight | 0x25030f2e0 | 0x8400000 | +| 359 | blk.29.ffn_gate_exps.weight | 0x25870f2e0 | 0x6c00000 | +| 360 | blk.29.ffn_gate_inp.weight | 0x25f30f2e0 | 0x100000 | +| 361 | blk.29.ffn_norm.weight | 0x25f40f2e0 | 0x2000 | +| 362 | blk.29.ffn_up_exps.weight | 0x25f4112e0 | 0x6c00000 | +| 363 | blk.30.attn_k.weight | 0x2660112e0 | 0x90000 | +| 364 | blk.30.attn_k_norm.weight | 0x2660a12e0 | 0x200 | +| 365 | blk.30.attn_norm.weight | 0x2660a14e0 | 0x2000 | +| 366 | blk.30.attn_output.weight | 0x2660a34e0 | 0x480000 | +| 367 | blk.30.attn_q.weight | 0x2665234e0 | 0x480000 | +| 368 | blk.30.attn_q_norm.weight | 0x2669a34e0 | 0x200 | +| 369 | blk.30.attn_v.weight | 0x2669a36e0 | 0xb0000 | +| 370 | blk.30.ffn_down_exps.weight | 0x266a536e0 | 0x8400000 | +| 371 | blk.30.ffn_gate_exps.weight | 0x26ee536e0 | 0x6c00000 | +| 372 | blk.30.ffn_gate_inp.weight | 0x275a536e0 | 0x100000 | +| 373 | blk.30.ffn_norm.weight | 0x275b536e0 | 0x2000 | +| 374 | blk.30.ffn_up_exps.weight | 0x275b556e0 | 0x6c00000 | +| 375 | blk.31.attn_k.weight | 0x27c7556e0 | 0x90000 | +| 376 | blk.31.attn_k_norm.weight | 0x27c7e56e0 | 0x200 | +| 377 | blk.31.attn_norm.weight | 0x27c7e58e0 | 0x2000 | +| 378 | blk.31.attn_output.weight | 0x27c7e78e0 | 0x480000 | +| 379 | blk.31.attn_q.weight | 0x27cc678e0 | 0x480000 | +| 380 | blk.31.attn_q_norm.weight | 0x27d0e78e0 | 0x200 | +| 381 | blk.31.attn_v.weight | 0x27d0e7ae0 | 0xb0000 | +| 382 | blk.31.ffn_down_exps.weight | 0x27d197ae0 | 0x8400000 | +| 383 | blk.31.ffn_gate_exps.weight | 0x285597ae0 | 0x6c00000 | +| 384 | blk.31.ffn_gate_inp.weight | 0x28c197ae0 | 0x100000 | +| 385 | blk.31.ffn_norm.weight | 0x28c297ae0 | 0x2000 | +| 386 | blk.31.ffn_up_exps.weight | 0x28c299ae0 | 0x6c00000 | +| 387 | blk.32.attn_k.weight | 0x292e99ae0 | 0x90000 | +| 388 | blk.32.attn_k_norm.weight | 0x292f29ae0 | 0x200 | +| 389 | blk.32.attn_norm.weight | 0x292f29ce0 | 0x2000 | +| 390 | blk.32.attn_output.weight | 0x292f2bce0 | 0x480000 | +| 391 | blk.32.attn_q.weight | 0x2933abce0 | 0x480000 | +| 392 | blk.32.attn_q_norm.weight | 0x29382bce0 | 0x200 | +| 393 | blk.32.attn_v.weight | 0x29382bee0 | 0xb0000 | +| 394 | blk.32.ffn_down_exps.weight | 0x2938dbee0 | 0x8400000 | +| 395 | blk.32.ffn_gate_exps.weight | 0x29bcdbee0 | 0x6c00000 | +| 396 | blk.32.ffn_gate_inp.weight | 0x2a28dbee0 | 0x100000 | +| 397 | blk.32.ffn_norm.weight | 0x2a29dbee0 | 0x2000 | +| 398 | blk.32.ffn_up_exps.weight | 0x2a29ddee0 | 0x6c00000 | +| 399 | blk.33.attn_k.weight | 0x2a95ddee0 | 0x90000 | +| 400 | blk.33.attn_k_norm.weight | 0x2a966dee0 | 0x200 | +| 401 | blk.33.attn_norm.weight | 0x2a966e0e0 | 0x2000 | +| 402 | blk.33.attn_output.weight | 0x2a96700e0 | 0x480000 | +| 403 | blk.33.attn_q.weight | 0x2a9af00e0 | 0x480000 | +| 404 | blk.33.attn_q_norm.weight | 0x2a9f700e0 | 0x200 | +| 405 | blk.33.attn_v.weight | 0x2a9f702e0 | 0xb0000 | +| 406 | blk.33.ffn_down_exps.weight | 0x2aa0202e0 | 0x8400000 | +| 407 | blk.33.ffn_gate_exps.weight | 0x2b24202e0 | 0x6c00000 | +| 408 | blk.33.ffn_gate_inp.weight | 0x2b90202e0 | 0x100000 | +| 409 | blk.33.ffn_norm.weight | 0x2b91202e0 | 0x2000 | +| 410 | blk.33.ffn_up_exps.weight | 0x2b91222e0 | 0x6c00000 | +| 411 | blk.34.attn_k.weight | 0x2bfd222e0 | 0x90000 | +| 412 | blk.34.attn_k_norm.weight | 0x2bfdb22e0 | 0x200 | +| 413 | blk.34.attn_norm.weight | 0x2bfdb24e0 | 0x2000 | +| 414 | blk.34.attn_output.weight | 0x2bfdb44e0 | 0x480000 | +| 415 | blk.34.attn_q.weight | 0x2c02344e0 | 0x480000 | +| 416 | blk.34.attn_q_norm.weight | 0x2c06b44e0 | 0x200 | +| 417 | blk.34.attn_v.weight | 0x2c06b46e0 | 0xb0000 | +| 418 | blk.34.ffn_down_exps.weight | 0x2c07646e0 | 0x8400000 | +| 419 | blk.34.ffn_gate_exps.weight | 0x2c8b646e0 | 0x6c00000 | +| 420 | blk.34.ffn_gate_inp.weight | 0x2cf7646e0 | 0x100000 | +| 421 | blk.34.ffn_norm.weight | 0x2cf8646e0 | 0x2000 | +| 422 | blk.34.ffn_up_exps.weight | 0x2cf8666e0 | 0x6c00000 | +| 423 | blk.35.attn_k.weight | 0x2d64666e0 | 0x90000 | +| 424 | blk.35.attn_k_norm.weight | 0x2d64f66e0 | 0x200 | +| 425 | blk.35.attn_norm.weight | 0x2d64f68e0 | 0x2000 | +| 426 | blk.35.attn_output.weight | 0x2d64f88e0 | 0x480000 | +| 427 | blk.35.attn_q.weight | 0x2d69788e0 | 0x480000 | +| 428 | blk.35.attn_q_norm.weight | 0x2d6df88e0 | 0x200 | +| 429 | blk.35.attn_v.weight | 0x2d6df8ae0 | 0xb0000 | +| 430 | blk.35.ffn_down_exps.weight | 0x2d6ea8ae0 | 0x8400000 | +| 431 | blk.35.ffn_gate_exps.weight | 0x2df2a8ae0 | 0x6c00000 | +| 432 | blk.35.ffn_gate_inp.weight | 0x2e5ea8ae0 | 0x100000 | +| 433 | blk.35.ffn_norm.weight | 0x2e5fa8ae0 | 0x2000 | +| 434 | blk.35.ffn_up_exps.weight | 0x2e5faaae0 | 0x6c00000 | +| 435 | blk.36.attn_k.weight | 0x2ecbaaae0 | 0x90000 | +| 436 | blk.36.attn_k_norm.weight | 0x2ecc3aae0 | 0x200 | +| 437 | blk.36.attn_norm.weight | 0x2ecc3ace0 | 0x2000 | +| 438 | blk.36.attn_output.weight | 0x2ecc3cce0 | 0x480000 | +| 439 | blk.36.attn_q.weight | 0x2ed0bcce0 | 0x480000 | +| 440 | blk.36.attn_q_norm.weight | 0x2ed53cce0 | 0x200 | +| 441 | blk.36.attn_v.weight | 0x2ed53cee0 | 0xb0000 | +| 442 | blk.36.ffn_down_exps.weight | 0x2ed5ecee0 | 0x8400000 | +| 443 | blk.36.ffn_gate_exps.weight | 0x2f59ecee0 | 0x6c00000 | +| 444 | blk.36.ffn_gate_inp.weight | 0x2fc5ecee0 | 0x100000 | +| 445 | blk.36.ffn_norm.weight | 0x2fc6ecee0 | 0x2000 | +| 446 | blk.36.ffn_up_exps.weight | 0x2fc6eeee0 | 0x6c00000 | +| 447 | blk.37.attn_k.weight | 0x3032eeee0 | 0x90000 | +| 448 | blk.37.attn_k_norm.weight | 0x30337eee0 | 0x200 | +| 449 | blk.37.attn_norm.weight | 0x30337f0e0 | 0x2000 | +| 450 | blk.37.attn_output.weight | 0x3033810e0 | 0x480000 | +| 451 | blk.37.attn_q.weight | 0x3038010e0 | 0x480000 | +| 452 | blk.37.attn_q_norm.weight | 0x303c810e0 | 0x200 | +| 453 | blk.37.attn_v.weight | 0x303c812e0 | 0xb0000 | +| 454 | blk.37.ffn_down_exps.weight | 0x303d312e0 | 0x8400000 | +| 455 | blk.37.ffn_gate_exps.weight | 0x30c1312e0 | 0x6c00000 | +| 456 | blk.37.ffn_gate_inp.weight | 0x312d312e0 | 0x100000 | +| 457 | blk.37.ffn_norm.weight | 0x312e312e0 | 0x2000 | +| 458 | blk.37.ffn_up_exps.weight | 0x312e332e0 | 0x6c00000 | +| 459 | blk.38.attn_k.weight | 0x319a332e0 | 0x90000 | +| 460 | blk.38.attn_k_norm.weight | 0x319ac32e0 | 0x200 | +| 461 | blk.38.attn_norm.weight | 0x319ac34e0 | 0x2000 | +| 462 | blk.38.attn_output.weight | 0x319ac54e0 | 0x480000 | +| 463 | blk.38.attn_q.weight | 0x319f454e0 | 0x480000 | +| 464 | blk.38.attn_q_norm.weight | 0x31a3c54e0 | 0x200 | +| 465 | blk.38.attn_v.weight | 0x31a3c56e0 | 0xb0000 | +| 466 | blk.38.ffn_down_exps.weight | 0x31a4756e0 | 0x8400000 | +| 467 | blk.38.ffn_gate_exps.weight | 0x3228756e0 | 0x6c00000 | +| 468 | blk.38.ffn_gate_inp.weight | 0x3294756e0 | 0x100000 | +| 469 | blk.38.ffn_norm.weight | 0x3295756e0 | 0x2000 | +| 470 | blk.38.ffn_up_exps.weight | 0x3295776e0 | 0x6c00000 | +| 471 | blk.39.attn_k.weight | 0x3301776e0 | 0x90000 | +| 472 | blk.39.attn_k_norm.weight | 0x3302076e0 | 0x200 | +| 473 | blk.39.attn_norm.weight | 0x3302078e0 | 0x2000 | +| 474 | blk.39.attn_output.weight | 0x3302098e0 | 0x480000 | +| 475 | blk.39.attn_q.weight | 0x3306898e0 | 0x480000 | +| 476 | blk.39.attn_q_norm.weight | 0x330b098e0 | 0x200 | +| 477 | blk.39.attn_v.weight | 0x330b09ae0 | 0xb0000 | +| 478 | blk.39.ffn_down_exps.weight | 0x330bb9ae0 | 0x8400000 | +| 479 | blk.39.ffn_gate_exps.weight | 0x338fb9ae0 | 0x6c00000 | +| 480 | blk.39.ffn_gate_inp.weight | 0x33fbb9ae0 | 0x100000 | +| 481 | blk.39.ffn_norm.weight | 0x33fcb9ae0 | 0x2000 | +| 482 | blk.39.ffn_up_exps.weight | 0x33fcbbae0 | 0x6c00000 | +| 483 | blk.40.attn_k.weight | 0x3468bbae0 | 0x90000 | +| 484 | blk.40.attn_k_norm.weight | 0x34694bae0 | 0x200 | +| 485 | blk.40.attn_norm.weight | 0x34694bce0 | 0x2000 | +| 486 | blk.40.attn_output.weight | 0x34694dce0 | 0x480000 | +| 487 | blk.40.attn_q.weight | 0x346dcdce0 | 0x480000 | +| 488 | blk.40.attn_q_norm.weight | 0x34724dce0 | 0x200 | +| 489 | blk.40.attn_v.weight | 0x34724dee0 | 0xb0000 | +| 490 | blk.40.ffn_down_exps.weight | 0x3472fdee0 | 0x8400000 | +| 491 | blk.40.ffn_gate_exps.weight | 0x34f6fdee0 | 0x6c00000 | +| 492 | blk.40.ffn_gate_inp.weight | 0x3562fdee0 | 0x100000 | +| 493 | blk.40.ffn_norm.weight | 0x3563fdee0 | 0x2000 | +| 494 | blk.40.ffn_up_exps.weight | 0x3563ffee0 | 0x6c00000 | +| 495 | blk.41.attn_k.weight | 0x35cfffee0 | 0x90000 | +| 496 | blk.41.attn_k_norm.weight | 0x35d08fee0 | 0x200 | +| 497 | blk.41.attn_norm.weight | 0x35d0900e0 | 0x2000 | +| 498 | blk.41.attn_output.weight | 0x35d0920e0 | 0x480000 | +| 499 | blk.41.attn_q.weight | 0x35d5120e0 | 0x480000 | +| 500 | blk.41.attn_q_norm.weight | 0x35d9920e0 | 0x200 | +| 501 | blk.41.attn_v.weight | 0x35d9922e0 | 0xb0000 | +| 502 | blk.41.ffn_down_exps.weight | 0x35da422e0 | 0x8400000 | +| 503 | blk.41.ffn_gate_exps.weight | 0x365e422e0 | 0x6c00000 | +| 504 | blk.41.ffn_gate_inp.weight | 0x36ca422e0 | 0x100000 | +| 505 | blk.41.ffn_norm.weight | 0x36cb422e0 | 0x2000 | +| 506 | blk.41.ffn_up_exps.weight | 0x36cb442e0 | 0x6c00000 | +| 507 | blk.42.attn_k.weight | 0x3737442e0 | 0x90000 | +| 508 | blk.42.attn_k_norm.weight | 0x3737d42e0 | 0x200 | +| 509 | blk.42.attn_norm.weight | 0x3737d44e0 | 0x2000 | +| 510 | blk.42.attn_output.weight | 0x3737d64e0 | 0x480000 | +| 511 | blk.42.attn_q.weight | 0x373c564e0 | 0x480000 | +| 512 | blk.42.attn_q_norm.weight | 0x3740d64e0 | 0x200 | +| 513 | blk.42.attn_v.weight | 0x3740d66e0 | 0xb0000 | +| 514 | blk.42.ffn_down_exps.weight | 0x3741866e0 | 0x8400000 | +| 515 | blk.42.ffn_gate_exps.weight | 0x37c5866e0 | 0x6c00000 | +| 516 | blk.42.ffn_gate_inp.weight | 0x3831866e0 | 0x100000 | +| 517 | blk.42.ffn_norm.weight | 0x3832866e0 | 0x2000 | +| 518 | blk.42.ffn_up_exps.weight | 0x3832886e0 | 0x6c00000 | +| 519 | blk.43.attn_k.weight | 0x389e886e0 | 0x90000 | +| 520 | blk.43.attn_k_norm.weight | 0x389f186e0 | 0x200 | +| 521 | blk.43.attn_norm.weight | 0x389f188e0 | 0x2000 | +| 522 | blk.43.attn_output.weight | 0x389f1a8e0 | 0x480000 | +| 523 | blk.43.attn_q.weight | 0x38a39a8e0 | 0x480000 | +| 524 | blk.43.attn_q_norm.weight | 0x38a81a8e0 | 0x200 | +| 525 | blk.43.attn_v.weight | 0x38a81aae0 | 0xb0000 | +| 526 | blk.43.ffn_down_exps.weight | 0x38a8caae0 | 0x8400000 | +| 527 | blk.43.ffn_gate_exps.weight | 0x392ccaae0 | 0x6c00000 | +| 528 | blk.43.ffn_gate_inp.weight | 0x3998caae0 | 0x100000 | +| 529 | blk.43.ffn_norm.weight | 0x3999caae0 | 0x2000 | +| 530 | blk.43.ffn_up_exps.weight | 0x3999ccae0 | 0x6c00000 | +| 531 | blk.44.attn_k.weight | 0x3a05ccae0 | 0x90000 | +| 532 | blk.44.attn_k_norm.weight | 0x3a065cae0 | 0x200 | +| 533 | blk.44.attn_norm.weight | 0x3a065cce0 | 0x2000 | +| 534 | blk.44.attn_output.weight | 0x3a065ece0 | 0x480000 | +| 535 | blk.44.attn_q.weight | 0x3a0adece0 | 0x480000 | +| 536 | blk.44.attn_q_norm.weight | 0x3a0f5ece0 | 0x200 | +| 537 | blk.44.attn_v.weight | 0x3a0f5eee0 | 0xb0000 | +| 538 | blk.44.ffn_down_exps.weight | 0x3a100eee0 | 0x8400000 | +| 539 | blk.44.ffn_gate_exps.weight | 0x3a940eee0 | 0x6c00000 | +| 540 | blk.44.ffn_gate_inp.weight | 0x3b000eee0 | 0x100000 | +| 541 | blk.44.ffn_norm.weight | 0x3b010eee0 | 0x2000 | +| 542 | blk.44.ffn_up_exps.weight | 0x3b0110ee0 | 0x6c00000 | +| 543 | blk.45.attn_k.weight | 0x3b6d10ee0 | 0x90000 | +| 544 | blk.45.attn_k_norm.weight | 0x3b6da0ee0 | 0x200 | +| 545 | blk.45.attn_norm.weight | 0x3b6da10e0 | 0x2000 | +| 546 | blk.45.attn_output.weight | 0x3b6da30e0 | 0x480000 | +| 547 | blk.45.attn_q.weight | 0x3b72230e0 | 0x480000 | +| 548 | blk.45.attn_q_norm.weight | 0x3b76a30e0 | 0x200 | +| 549 | blk.45.attn_v.weight | 0x3b76a32e0 | 0xb0000 | +| 550 | blk.45.ffn_down_exps.weight | 0x3b77532e0 | 0x8400000 | +| 551 | blk.45.ffn_gate_exps.weight | 0x3bfb532e0 | 0x6c00000 | +| 552 | blk.45.ffn_gate_inp.weight | 0x3c67532e0 | 0x100000 | +| 553 | blk.45.ffn_norm.weight | 0x3c68532e0 | 0x2000 | +| 554 | blk.45.ffn_up_exps.weight | 0x3c68552e0 | 0x6c00000 | + +### Base Tensor Group : ~622M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:-------------------|:---------------------------------|:------------------|:----------------------|:-----| +| 0 | output.weight | Output (W) | (~311M) 311164928 | 2048 x 151936 x 1 x 1 | Q4_K | +| 1 | output_norm.weight | Output Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 2 | token_embd.weight | Token Embedding (W) | (~311M) 311164928 | 2048 x 151936 x 1 x 1 | Q3_K | + +- Total elements in base: (~622M) 622331904 +- Percentage of total elements: 2.13% + + +### Block 0 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:---------------------------|:------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 3 | blk.0.attn_k.weight | Block 0 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q3_K | +| 4 | blk.0.attn_k_norm.weight | Block 0 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 5 | blk.0.attn_norm.weight | Block 0 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 6 | blk.0.attn_output.weight | Block 0 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 7 | blk.0.attn_q.weight | Block 0 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q3_K | +| 8 | blk.0.attn_q_norm.weight | Block 0 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 9 | blk.0.attn_v.weight | Block 0 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q4_K | +| 10 | blk.0.ffn_down_exps.weight | Block 0 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 11 | blk.0.ffn_gate_exps.weight | Block 0 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | +| 12 | blk.0.ffn_gate_inp.weight | Block 0 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 13 | blk.0.ffn_norm.weight | Block 0 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 14 | blk.0.ffn_up_exps.weight | Block 0 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | + +- Total elements in blk.0: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 1 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:---------------------------|:------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 15 | blk.1.attn_k.weight | Block 1 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q3_K | +| 16 | blk.1.attn_k_norm.weight | Block 1 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 17 | blk.1.attn_norm.weight | Block 1 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 18 | blk.1.attn_output.weight | Block 1 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 19 | blk.1.attn_q.weight | Block 1 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q3_K | +| 20 | blk.1.attn_q_norm.weight | Block 1 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 21 | blk.1.attn_v.weight | Block 1 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q4_K | +| 22 | blk.1.ffn_down_exps.weight | Block 1 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 23 | blk.1.ffn_gate_exps.weight | Block 1 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | +| 24 | blk.1.ffn_gate_inp.weight | Block 1 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 25 | blk.1.ffn_norm.weight | Block 1 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 26 | blk.1.ffn_up_exps.weight | Block 1 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | + +- Total elements in blk.1: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 2 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:---------------------------|:------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 27 | blk.2.attn_k.weight | Block 2 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q3_K | +| 28 | blk.2.attn_k_norm.weight | Block 2 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 29 | blk.2.attn_norm.weight | Block 2 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 30 | blk.2.attn_output.weight | Block 2 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 31 | blk.2.attn_q.weight | Block 2 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q3_K | +| 32 | blk.2.attn_q_norm.weight | Block 2 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 33 | blk.2.attn_v.weight | Block 2 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q4_K | +| 34 | blk.2.ffn_down_exps.weight | Block 2 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 35 | blk.2.ffn_gate_exps.weight | Block 2 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | +| 36 | blk.2.ffn_gate_inp.weight | Block 2 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 37 | blk.2.ffn_norm.weight | Block 2 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 38 | blk.2.ffn_up_exps.weight | Block 2 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | + +- Total elements in blk.2: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 3 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:---------------------------|:------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 39 | blk.3.attn_k.weight | Block 3 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q3_K | +| 40 | blk.3.attn_k_norm.weight | Block 3 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 41 | blk.3.attn_norm.weight | Block 3 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 42 | blk.3.attn_output.weight | Block 3 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 43 | blk.3.attn_q.weight | Block 3 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q3_K | +| 44 | blk.3.attn_q_norm.weight | Block 3 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 45 | blk.3.attn_v.weight | Block 3 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q4_K | +| 46 | blk.3.ffn_down_exps.weight | Block 3 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 47 | blk.3.ffn_gate_exps.weight | Block 3 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | +| 48 | blk.3.ffn_gate_inp.weight | Block 3 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 49 | blk.3.ffn_norm.weight | Block 3 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 50 | blk.3.ffn_up_exps.weight | Block 3 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | + +- Total elements in blk.3: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 4 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:---------------------------|:------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 51 | blk.4.attn_k.weight | Block 4 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q3_K | +| 52 | blk.4.attn_k_norm.weight | Block 4 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 53 | blk.4.attn_norm.weight | Block 4 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 54 | blk.4.attn_output.weight | Block 4 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 55 | blk.4.attn_q.weight | Block 4 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q3_K | +| 56 | blk.4.attn_q_norm.weight | Block 4 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 57 | blk.4.attn_v.weight | Block 4 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q4_K | +| 58 | blk.4.ffn_down_exps.weight | Block 4 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 59 | blk.4.ffn_gate_exps.weight | Block 4 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | +| 60 | blk.4.ffn_gate_inp.weight | Block 4 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 61 | blk.4.ffn_norm.weight | Block 4 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 62 | blk.4.ffn_up_exps.weight | Block 4 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | + +- Total elements in blk.4: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 5 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:---------------------------|:------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 63 | blk.5.attn_k.weight | Block 5 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q3_K | +| 64 | blk.5.attn_k_norm.weight | Block 5 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 65 | blk.5.attn_norm.weight | Block 5 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 66 | blk.5.attn_output.weight | Block 5 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 67 | blk.5.attn_q.weight | Block 5 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q3_K | +| 68 | blk.5.attn_q_norm.weight | Block 5 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 69 | blk.5.attn_v.weight | Block 5 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 70 | blk.5.ffn_down_exps.weight | Block 5 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 71 | blk.5.ffn_gate_exps.weight | Block 5 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | +| 72 | blk.5.ffn_gate_inp.weight | Block 5 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 73 | blk.5.ffn_norm.weight | Block 5 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 74 | blk.5.ffn_up_exps.weight | Block 5 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | + +- Total elements in blk.5: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 6 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:---------------------------|:------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 75 | blk.6.attn_k.weight | Block 6 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q3_K | +| 76 | blk.6.attn_k_norm.weight | Block 6 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 77 | blk.6.attn_norm.weight | Block 6 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 78 | blk.6.attn_output.weight | Block 6 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 79 | blk.6.attn_q.weight | Block 6 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q3_K | +| 80 | blk.6.attn_q_norm.weight | Block 6 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 81 | blk.6.attn_v.weight | Block 6 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 82 | blk.6.ffn_down_exps.weight | Block 6 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 83 | blk.6.ffn_gate_exps.weight | Block 6 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | +| 84 | blk.6.ffn_gate_inp.weight | Block 6 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 85 | blk.6.ffn_norm.weight | Block 6 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 86 | blk.6.ffn_up_exps.weight | Block 6 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | + +- Total elements in blk.6: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 7 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:---------------------------|:------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 87 | blk.7.attn_k.weight | Block 7 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q3_K | +| 88 | blk.7.attn_k_norm.weight | Block 7 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 89 | blk.7.attn_norm.weight | Block 7 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 90 | blk.7.attn_output.weight | Block 7 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 91 | blk.7.attn_q.weight | Block 7 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q3_K | +| 92 | blk.7.attn_q_norm.weight | Block 7 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 93 | blk.7.attn_v.weight | Block 7 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q4_K | +| 94 | blk.7.ffn_down_exps.weight | Block 7 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 95 | blk.7.ffn_gate_exps.weight | Block 7 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | +| 96 | blk.7.ffn_gate_inp.weight | Block 7 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 97 | blk.7.ffn_norm.weight | Block 7 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 98 | blk.7.ffn_up_exps.weight | Block 7 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | + +- Total elements in blk.7: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 8 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:---------------------------|:------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 99 | blk.8.attn_k.weight | Block 8 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q3_K | +| 100 | blk.8.attn_k_norm.weight | Block 8 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 101 | blk.8.attn_norm.weight | Block 8 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 102 | blk.8.attn_output.weight | Block 8 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 103 | blk.8.attn_q.weight | Block 8 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q3_K | +| 104 | blk.8.attn_q_norm.weight | Block 8 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 105 | blk.8.attn_v.weight | Block 8 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 106 | blk.8.ffn_down_exps.weight | Block 8 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 107 | blk.8.ffn_gate_exps.weight | Block 8 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | +| 108 | blk.8.ffn_gate_inp.weight | Block 8 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 109 | blk.8.ffn_norm.weight | Block 8 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 110 | blk.8.ffn_up_exps.weight | Block 8 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | + +- Total elements in blk.8: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 9 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:---------------------------|:------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 111 | blk.9.attn_k.weight | Block 9 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q3_K | +| 112 | blk.9.attn_k_norm.weight | Block 9 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 113 | blk.9.attn_norm.weight | Block 9 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 114 | blk.9.attn_output.weight | Block 9 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 115 | blk.9.attn_q.weight | Block 9 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q3_K | +| 116 | blk.9.attn_q_norm.weight | Block 9 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 117 | blk.9.attn_v.weight | Block 9 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 118 | blk.9.ffn_down_exps.weight | Block 9 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 119 | blk.9.ffn_gate_exps.weight | Block 9 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | +| 120 | blk.9.ffn_gate_inp.weight | Block 9 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 121 | blk.9.ffn_norm.weight | Block 9 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 122 | blk.9.ffn_up_exps.weight | Block 9 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | + +- Total elements in blk.9: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 10 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 123 | blk.10.attn_k.weight | Block 10 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q3_K | +| 124 | blk.10.attn_k_norm.weight | Block 10 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 125 | blk.10.attn_norm.weight | Block 10 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 126 | blk.10.attn_output.weight | Block 10 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 127 | blk.10.attn_q.weight | Block 10 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q3_K | +| 128 | blk.10.attn_q_norm.weight | Block 10 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 129 | blk.10.attn_v.weight | Block 10 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q4_K | +| 130 | blk.10.ffn_down_exps.weight | Block 10 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 131 | blk.10.ffn_gate_exps.weight | Block 10 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | +| 132 | blk.10.ffn_gate_inp.weight | Block 10 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 133 | blk.10.ffn_norm.weight | Block 10 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 134 | blk.10.ffn_up_exps.weight | Block 10 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | + +- Total elements in blk.10: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 11 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 135 | blk.11.attn_k.weight | Block 11 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q3_K | +| 136 | blk.11.attn_k_norm.weight | Block 11 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 137 | blk.11.attn_norm.weight | Block 11 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 138 | blk.11.attn_output.weight | Block 11 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 139 | blk.11.attn_q.weight | Block 11 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q3_K | +| 140 | blk.11.attn_q_norm.weight | Block 11 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 141 | blk.11.attn_v.weight | Block 11 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 142 | blk.11.ffn_down_exps.weight | Block 11 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 143 | blk.11.ffn_gate_exps.weight | Block 11 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | +| 144 | blk.11.ffn_gate_inp.weight | Block 11 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 145 | blk.11.ffn_norm.weight | Block 11 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 146 | blk.11.ffn_up_exps.weight | Block 11 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | + +- Total elements in blk.11: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 12 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 147 | blk.12.attn_k.weight | Block 12 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q3_K | +| 148 | blk.12.attn_k_norm.weight | Block 12 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 149 | blk.12.attn_norm.weight | Block 12 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 150 | blk.12.attn_output.weight | Block 12 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 151 | blk.12.attn_q.weight | Block 12 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q3_K | +| 152 | blk.12.attn_q_norm.weight | Block 12 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 153 | blk.12.attn_v.weight | Block 12 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 154 | blk.12.ffn_down_exps.weight | Block 12 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 155 | blk.12.ffn_gate_exps.weight | Block 12 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | +| 156 | blk.12.ffn_gate_inp.weight | Block 12 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 157 | blk.12.ffn_norm.weight | Block 12 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 158 | blk.12.ffn_up_exps.weight | Block 12 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | + +- Total elements in blk.12: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 13 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 159 | blk.13.attn_k.weight | Block 13 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q3_K | +| 160 | blk.13.attn_k_norm.weight | Block 13 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 161 | blk.13.attn_norm.weight | Block 13 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 162 | blk.13.attn_output.weight | Block 13 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 163 | blk.13.attn_q.weight | Block 13 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q3_K | +| 164 | blk.13.attn_q_norm.weight | Block 13 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 165 | blk.13.attn_v.weight | Block 13 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q4_K | +| 166 | blk.13.ffn_down_exps.weight | Block 13 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 167 | blk.13.ffn_gate_exps.weight | Block 13 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | +| 168 | blk.13.ffn_gate_inp.weight | Block 13 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 169 | blk.13.ffn_norm.weight | Block 13 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 170 | blk.13.ffn_up_exps.weight | Block 13 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | + +- Total elements in blk.13: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 14 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 171 | blk.14.attn_k.weight | Block 14 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q3_K | +| 172 | blk.14.attn_k_norm.weight | Block 14 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 173 | blk.14.attn_norm.weight | Block 14 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 174 | blk.14.attn_output.weight | Block 14 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 175 | blk.14.attn_q.weight | Block 14 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q3_K | +| 176 | blk.14.attn_q_norm.weight | Block 14 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 177 | blk.14.attn_v.weight | Block 14 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 178 | blk.14.ffn_down_exps.weight | Block 14 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 179 | blk.14.ffn_gate_exps.weight | Block 14 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | +| 180 | blk.14.ffn_gate_inp.weight | Block 14 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 181 | blk.14.ffn_norm.weight | Block 14 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 182 | blk.14.ffn_up_exps.weight | Block 14 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | + +- Total elements in blk.14: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 15 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 183 | blk.15.attn_k.weight | Block 15 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q3_K | +| 184 | blk.15.attn_k_norm.weight | Block 15 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 185 | blk.15.attn_norm.weight | Block 15 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 186 | blk.15.attn_output.weight | Block 15 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 187 | blk.15.attn_q.weight | Block 15 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q3_K | +| 188 | blk.15.attn_q_norm.weight | Block 15 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 189 | blk.15.attn_v.weight | Block 15 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 190 | blk.15.ffn_down_exps.weight | Block 15 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 191 | blk.15.ffn_gate_exps.weight | Block 15 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | +| 192 | blk.15.ffn_gate_inp.weight | Block 15 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 193 | blk.15.ffn_norm.weight | Block 15 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 194 | blk.15.ffn_up_exps.weight | Block 15 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | + +- Total elements in blk.15: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 16 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 195 | blk.16.attn_k.weight | Block 16 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q3_K | +| 196 | blk.16.attn_k_norm.weight | Block 16 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 197 | blk.16.attn_norm.weight | Block 16 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 198 | blk.16.attn_output.weight | Block 16 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 199 | blk.16.attn_q.weight | Block 16 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q3_K | +| 200 | blk.16.attn_q_norm.weight | Block 16 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 201 | blk.16.attn_v.weight | Block 16 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q4_K | +| 202 | blk.16.ffn_down_exps.weight | Block 16 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 203 | blk.16.ffn_gate_exps.weight | Block 16 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | +| 204 | blk.16.ffn_gate_inp.weight | Block 16 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 205 | blk.16.ffn_norm.weight | Block 16 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 206 | blk.16.ffn_up_exps.weight | Block 16 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | + +- Total elements in blk.16: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 17 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 207 | blk.17.attn_k.weight | Block 17 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q3_K | +| 208 | blk.17.attn_k_norm.weight | Block 17 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 209 | blk.17.attn_norm.weight | Block 17 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 210 | blk.17.attn_output.weight | Block 17 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 211 | blk.17.attn_q.weight | Block 17 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q3_K | +| 212 | blk.17.attn_q_norm.weight | Block 17 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 213 | blk.17.attn_v.weight | Block 17 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 214 | blk.17.ffn_down_exps.weight | Block 17 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 215 | blk.17.ffn_gate_exps.weight | Block 17 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | +| 216 | blk.17.ffn_gate_inp.weight | Block 17 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 217 | blk.17.ffn_norm.weight | Block 17 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 218 | blk.17.ffn_up_exps.weight | Block 17 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | + +- Total elements in blk.17: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 18 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 219 | blk.18.attn_k.weight | Block 18 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q3_K | +| 220 | blk.18.attn_k_norm.weight | Block 18 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 221 | blk.18.attn_norm.weight | Block 18 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 222 | blk.18.attn_output.weight | Block 18 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 223 | blk.18.attn_q.weight | Block 18 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q3_K | +| 224 | blk.18.attn_q_norm.weight | Block 18 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 225 | blk.18.attn_v.weight | Block 18 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 226 | blk.18.ffn_down_exps.weight | Block 18 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 227 | blk.18.ffn_gate_exps.weight | Block 18 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | +| 228 | blk.18.ffn_gate_inp.weight | Block 18 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 229 | blk.18.ffn_norm.weight | Block 18 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 230 | blk.18.ffn_up_exps.weight | Block 18 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | + +- Total elements in blk.18: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 19 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 231 | blk.19.attn_k.weight | Block 19 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q3_K | +| 232 | blk.19.attn_k_norm.weight | Block 19 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 233 | blk.19.attn_norm.weight | Block 19 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 234 | blk.19.attn_output.weight | Block 19 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 235 | blk.19.attn_q.weight | Block 19 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q3_K | +| 236 | blk.19.attn_q_norm.weight | Block 19 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 237 | blk.19.attn_v.weight | Block 19 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q4_K | +| 238 | blk.19.ffn_down_exps.weight | Block 19 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 239 | blk.19.ffn_gate_exps.weight | Block 19 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | +| 240 | blk.19.ffn_gate_inp.weight | Block 19 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 241 | blk.19.ffn_norm.weight | Block 19 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 242 | blk.19.ffn_up_exps.weight | Block 19 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | + +- Total elements in blk.19: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 20 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 243 | blk.20.attn_k.weight | Block 20 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q3_K | +| 244 | blk.20.attn_k_norm.weight | Block 20 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 245 | blk.20.attn_norm.weight | Block 20 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 246 | blk.20.attn_output.weight | Block 20 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 247 | blk.20.attn_q.weight | Block 20 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q3_K | +| 248 | blk.20.attn_q_norm.weight | Block 20 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 249 | blk.20.attn_v.weight | Block 20 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 250 | blk.20.ffn_down_exps.weight | Block 20 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 251 | blk.20.ffn_gate_exps.weight | Block 20 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | +| 252 | blk.20.ffn_gate_inp.weight | Block 20 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 253 | blk.20.ffn_norm.weight | Block 20 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 254 | blk.20.ffn_up_exps.weight | Block 20 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | + +- Total elements in blk.20: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 21 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 255 | blk.21.attn_k.weight | Block 21 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q3_K | +| 256 | blk.21.attn_k_norm.weight | Block 21 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 257 | blk.21.attn_norm.weight | Block 21 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 258 | blk.21.attn_output.weight | Block 21 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 259 | blk.21.attn_q.weight | Block 21 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q3_K | +| 260 | blk.21.attn_q_norm.weight | Block 21 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 261 | blk.21.attn_v.weight | Block 21 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 262 | blk.21.ffn_down_exps.weight | Block 21 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 263 | blk.21.ffn_gate_exps.weight | Block 21 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | +| 264 | blk.21.ffn_gate_inp.weight | Block 21 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 265 | blk.21.ffn_norm.weight | Block 21 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 266 | blk.21.ffn_up_exps.weight | Block 21 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | + +- Total elements in blk.21: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 22 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 267 | blk.22.attn_k.weight | Block 22 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q3_K | +| 268 | blk.22.attn_k_norm.weight | Block 22 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 269 | blk.22.attn_norm.weight | Block 22 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 270 | blk.22.attn_output.weight | Block 22 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 271 | blk.22.attn_q.weight | Block 22 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q3_K | +| 272 | blk.22.attn_q_norm.weight | Block 22 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 273 | blk.22.attn_v.weight | Block 22 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q4_K | +| 274 | blk.22.ffn_down_exps.weight | Block 22 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 275 | blk.22.ffn_gate_exps.weight | Block 22 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | +| 276 | blk.22.ffn_gate_inp.weight | Block 22 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 277 | blk.22.ffn_norm.weight | Block 22 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 278 | blk.22.ffn_up_exps.weight | Block 22 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | + +- Total elements in blk.22: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 23 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 279 | blk.23.attn_k.weight | Block 23 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q3_K | +| 280 | blk.23.attn_k_norm.weight | Block 23 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 281 | blk.23.attn_norm.weight | Block 23 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 282 | blk.23.attn_output.weight | Block 23 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 283 | blk.23.attn_q.weight | Block 23 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q3_K | +| 284 | blk.23.attn_q_norm.weight | Block 23 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 285 | blk.23.attn_v.weight | Block 23 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 286 | blk.23.ffn_down_exps.weight | Block 23 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 287 | blk.23.ffn_gate_exps.weight | Block 23 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | +| 288 | blk.23.ffn_gate_inp.weight | Block 23 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 289 | blk.23.ffn_norm.weight | Block 23 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 290 | blk.23.ffn_up_exps.weight | Block 23 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | + +- Total elements in blk.23: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 24 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 291 | blk.24.attn_k.weight | Block 24 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q4_K | +| 292 | blk.24.attn_k_norm.weight | Block 24 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 293 | blk.24.attn_norm.weight | Block 24 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 294 | blk.24.attn_output.weight | Block 24 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 295 | blk.24.attn_q.weight | Block 24 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q4_K | +| 296 | blk.24.attn_q_norm.weight | Block 24 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 297 | blk.24.attn_v.weight | Block 24 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 298 | blk.24.ffn_down_exps.weight | Block 24 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 299 | blk.24.ffn_gate_exps.weight | Block 24 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | +| 300 | blk.24.ffn_gate_inp.weight | Block 24 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 301 | blk.24.ffn_norm.weight | Block 24 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 302 | blk.24.ffn_up_exps.weight | Block 24 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q3_K | + +- Total elements in blk.24: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 25 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 303 | blk.25.attn_k.weight | Block 25 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q4_K | +| 304 | blk.25.attn_k_norm.weight | Block 25 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 305 | blk.25.attn_norm.weight | Block 25 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 306 | blk.25.attn_output.weight | Block 25 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 307 | blk.25.attn_q.weight | Block 25 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q4_K | +| 308 | blk.25.attn_q_norm.weight | Block 25 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 309 | blk.25.attn_v.weight | Block 25 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 310 | blk.25.ffn_down_exps.weight | Block 25 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 311 | blk.25.ffn_gate_exps.weight | Block 25 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | +| 312 | blk.25.ffn_gate_inp.weight | Block 25 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 313 | blk.25.ffn_norm.weight | Block 25 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 314 | blk.25.ffn_up_exps.weight | Block 25 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | + +- Total elements in blk.25: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 26 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 315 | blk.26.attn_k.weight | Block 26 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q4_K | +| 316 | blk.26.attn_k_norm.weight | Block 26 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 317 | blk.26.attn_norm.weight | Block 26 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 318 | blk.26.attn_output.weight | Block 26 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 319 | blk.26.attn_q.weight | Block 26 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q4_K | +| 320 | blk.26.attn_q_norm.weight | Block 26 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 321 | blk.26.attn_v.weight | Block 26 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 322 | blk.26.ffn_down_exps.weight | Block 26 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 323 | blk.26.ffn_gate_exps.weight | Block 26 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | +| 324 | blk.26.ffn_gate_inp.weight | Block 26 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 325 | blk.26.ffn_norm.weight | Block 26 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 326 | blk.26.ffn_up_exps.weight | Block 26 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | + +- Total elements in blk.26: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 27 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 327 | blk.27.attn_k.weight | Block 27 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q4_K | +| 328 | blk.27.attn_k_norm.weight | Block 27 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 329 | blk.27.attn_norm.weight | Block 27 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 330 | blk.27.attn_output.weight | Block 27 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 331 | blk.27.attn_q.weight | Block 27 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q4_K | +| 332 | blk.27.attn_q_norm.weight | Block 27 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 333 | blk.27.attn_v.weight | Block 27 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 334 | blk.27.ffn_down_exps.weight | Block 27 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 335 | blk.27.ffn_gate_exps.weight | Block 27 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | +| 336 | blk.27.ffn_gate_inp.weight | Block 27 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 337 | blk.27.ffn_norm.weight | Block 27 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 338 | blk.27.ffn_up_exps.weight | Block 27 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | + +- Total elements in blk.27: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 28 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 339 | blk.28.attn_k.weight | Block 28 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q4_K | +| 340 | blk.28.attn_k_norm.weight | Block 28 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 341 | blk.28.attn_norm.weight | Block 28 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 342 | blk.28.attn_output.weight | Block 28 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 343 | blk.28.attn_q.weight | Block 28 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q4_K | +| 344 | blk.28.attn_q_norm.weight | Block 28 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 345 | blk.28.attn_v.weight | Block 28 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 346 | blk.28.ffn_down_exps.weight | Block 28 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 347 | blk.28.ffn_gate_exps.weight | Block 28 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | +| 348 | blk.28.ffn_gate_inp.weight | Block 28 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 349 | blk.28.ffn_norm.weight | Block 28 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 350 | blk.28.ffn_up_exps.weight | Block 28 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | + +- Total elements in blk.28: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 29 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 351 | blk.29.attn_k.weight | Block 29 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q4_K | +| 352 | blk.29.attn_k_norm.weight | Block 29 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 353 | blk.29.attn_norm.weight | Block 29 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 354 | blk.29.attn_output.weight | Block 29 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 355 | blk.29.attn_q.weight | Block 29 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q4_K | +| 356 | blk.29.attn_q_norm.weight | Block 29 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 357 | blk.29.attn_v.weight | Block 29 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 358 | blk.29.ffn_down_exps.weight | Block 29 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 359 | blk.29.ffn_gate_exps.weight | Block 29 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | +| 360 | blk.29.ffn_gate_inp.weight | Block 29 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 361 | blk.29.ffn_norm.weight | Block 29 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 362 | blk.29.ffn_up_exps.weight | Block 29 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | + +- Total elements in blk.29: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 30 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 363 | blk.30.attn_k.weight | Block 30 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q4_K | +| 364 | blk.30.attn_k_norm.weight | Block 30 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 365 | blk.30.attn_norm.weight | Block 30 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 366 | blk.30.attn_output.weight | Block 30 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 367 | blk.30.attn_q.weight | Block 30 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q4_K | +| 368 | blk.30.attn_q_norm.weight | Block 30 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 369 | blk.30.attn_v.weight | Block 30 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 370 | blk.30.ffn_down_exps.weight | Block 30 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 371 | blk.30.ffn_gate_exps.weight | Block 30 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | +| 372 | blk.30.ffn_gate_inp.weight | Block 30 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 373 | blk.30.ffn_norm.weight | Block 30 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 374 | blk.30.ffn_up_exps.weight | Block 30 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | + +- Total elements in blk.30: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 31 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 375 | blk.31.attn_k.weight | Block 31 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q4_K | +| 376 | blk.31.attn_k_norm.weight | Block 31 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 377 | blk.31.attn_norm.weight | Block 31 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 378 | blk.31.attn_output.weight | Block 31 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 379 | blk.31.attn_q.weight | Block 31 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q4_K | +| 380 | blk.31.attn_q_norm.weight | Block 31 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 381 | blk.31.attn_v.weight | Block 31 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 382 | blk.31.ffn_down_exps.weight | Block 31 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 383 | blk.31.ffn_gate_exps.weight | Block 31 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | +| 384 | blk.31.ffn_gate_inp.weight | Block 31 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 385 | blk.31.ffn_norm.weight | Block 31 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 386 | blk.31.ffn_up_exps.weight | Block 31 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | + +- Total elements in blk.31: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 32 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 387 | blk.32.attn_k.weight | Block 32 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q4_K | +| 388 | blk.32.attn_k_norm.weight | Block 32 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 389 | blk.32.attn_norm.weight | Block 32 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 390 | blk.32.attn_output.weight | Block 32 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 391 | blk.32.attn_q.weight | Block 32 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q4_K | +| 392 | blk.32.attn_q_norm.weight | Block 32 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 393 | blk.32.attn_v.weight | Block 32 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 394 | blk.32.ffn_down_exps.weight | Block 32 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 395 | blk.32.ffn_gate_exps.weight | Block 32 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | +| 396 | blk.32.ffn_gate_inp.weight | Block 32 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 397 | blk.32.ffn_norm.weight | Block 32 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 398 | blk.32.ffn_up_exps.weight | Block 32 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | + +- Total elements in blk.32: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 33 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 399 | blk.33.attn_k.weight | Block 33 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q4_K | +| 400 | blk.33.attn_k_norm.weight | Block 33 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 401 | blk.33.attn_norm.weight | Block 33 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 402 | blk.33.attn_output.weight | Block 33 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 403 | blk.33.attn_q.weight | Block 33 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q4_K | +| 404 | blk.33.attn_q_norm.weight | Block 33 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 405 | blk.33.attn_v.weight | Block 33 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 406 | blk.33.ffn_down_exps.weight | Block 33 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 407 | blk.33.ffn_gate_exps.weight | Block 33 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | +| 408 | blk.33.ffn_gate_inp.weight | Block 33 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 409 | blk.33.ffn_norm.weight | Block 33 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 410 | blk.33.ffn_up_exps.weight | Block 33 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | + +- Total elements in blk.33: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 34 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 411 | blk.34.attn_k.weight | Block 34 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q4_K | +| 412 | blk.34.attn_k_norm.weight | Block 34 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 413 | blk.34.attn_norm.weight | Block 34 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 414 | blk.34.attn_output.weight | Block 34 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 415 | blk.34.attn_q.weight | Block 34 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q4_K | +| 416 | blk.34.attn_q_norm.weight | Block 34 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 417 | blk.34.attn_v.weight | Block 34 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 418 | blk.34.ffn_down_exps.weight | Block 34 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 419 | blk.34.ffn_gate_exps.weight | Block 34 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | +| 420 | blk.34.ffn_gate_inp.weight | Block 34 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 421 | blk.34.ffn_norm.weight | Block 34 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 422 | blk.34.ffn_up_exps.weight | Block 34 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | + +- Total elements in blk.34: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 35 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 423 | blk.35.attn_k.weight | Block 35 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q4_K | +| 424 | blk.35.attn_k_norm.weight | Block 35 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 425 | blk.35.attn_norm.weight | Block 35 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 426 | blk.35.attn_output.weight | Block 35 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 427 | blk.35.attn_q.weight | Block 35 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q4_K | +| 428 | blk.35.attn_q_norm.weight | Block 35 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 429 | blk.35.attn_v.weight | Block 35 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 430 | blk.35.ffn_down_exps.weight | Block 35 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 431 | blk.35.ffn_gate_exps.weight | Block 35 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | +| 432 | blk.35.ffn_gate_inp.weight | Block 35 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 433 | blk.35.ffn_norm.weight | Block 35 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 434 | blk.35.ffn_up_exps.weight | Block 35 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | + +- Total elements in blk.35: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 36 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 435 | blk.36.attn_k.weight | Block 36 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q4_K | +| 436 | blk.36.attn_k_norm.weight | Block 36 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 437 | blk.36.attn_norm.weight | Block 36 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 438 | blk.36.attn_output.weight | Block 36 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 439 | blk.36.attn_q.weight | Block 36 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q4_K | +| 440 | blk.36.attn_q_norm.weight | Block 36 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 441 | blk.36.attn_v.weight | Block 36 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 442 | blk.36.ffn_down_exps.weight | Block 36 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 443 | blk.36.ffn_gate_exps.weight | Block 36 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | +| 444 | blk.36.ffn_gate_inp.weight | Block 36 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 445 | blk.36.ffn_norm.weight | Block 36 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 446 | blk.36.ffn_up_exps.weight | Block 36 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | + +- Total elements in blk.36: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 37 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 447 | blk.37.attn_k.weight | Block 37 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q4_K | +| 448 | blk.37.attn_k_norm.weight | Block 37 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 449 | blk.37.attn_norm.weight | Block 37 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 450 | blk.37.attn_output.weight | Block 37 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 451 | blk.37.attn_q.weight | Block 37 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q4_K | +| 452 | blk.37.attn_q_norm.weight | Block 37 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 453 | blk.37.attn_v.weight | Block 37 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 454 | blk.37.ffn_down_exps.weight | Block 37 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 455 | blk.37.ffn_gate_exps.weight | Block 37 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | +| 456 | blk.37.ffn_gate_inp.weight | Block 37 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 457 | blk.37.ffn_norm.weight | Block 37 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 458 | blk.37.ffn_up_exps.weight | Block 37 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | + +- Total elements in blk.37: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 38 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 459 | blk.38.attn_k.weight | Block 38 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q4_K | +| 460 | blk.38.attn_k_norm.weight | Block 38 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 461 | blk.38.attn_norm.weight | Block 38 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 462 | blk.38.attn_output.weight | Block 38 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 463 | blk.38.attn_q.weight | Block 38 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q4_K | +| 464 | blk.38.attn_q_norm.weight | Block 38 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 465 | blk.38.attn_v.weight | Block 38 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 466 | blk.38.ffn_down_exps.weight | Block 38 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 467 | blk.38.ffn_gate_exps.weight | Block 38 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | +| 468 | blk.38.ffn_gate_inp.weight | Block 38 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 469 | blk.38.ffn_norm.weight | Block 38 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 470 | blk.38.ffn_up_exps.weight | Block 38 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | + +- Total elements in blk.38: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 39 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 471 | blk.39.attn_k.weight | Block 39 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q4_K | +| 472 | blk.39.attn_k_norm.weight | Block 39 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 473 | blk.39.attn_norm.weight | Block 39 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 474 | blk.39.attn_output.weight | Block 39 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 475 | blk.39.attn_q.weight | Block 39 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q4_K | +| 476 | blk.39.attn_q_norm.weight | Block 39 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 477 | blk.39.attn_v.weight | Block 39 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 478 | blk.39.ffn_down_exps.weight | Block 39 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 479 | blk.39.ffn_gate_exps.weight | Block 39 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | +| 480 | blk.39.ffn_gate_inp.weight | Block 39 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 481 | blk.39.ffn_norm.weight | Block 39 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 482 | blk.39.ffn_up_exps.weight | Block 39 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | + +- Total elements in blk.39: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 40 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 483 | blk.40.attn_k.weight | Block 40 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q4_K | +| 484 | blk.40.attn_k_norm.weight | Block 40 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 485 | blk.40.attn_norm.weight | Block 40 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 486 | blk.40.attn_output.weight | Block 40 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 487 | blk.40.attn_q.weight | Block 40 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q4_K | +| 488 | blk.40.attn_q_norm.weight | Block 40 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 489 | blk.40.attn_v.weight | Block 40 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 490 | blk.40.ffn_down_exps.weight | Block 40 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 491 | blk.40.ffn_gate_exps.weight | Block 40 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | +| 492 | blk.40.ffn_gate_inp.weight | Block 40 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 493 | blk.40.ffn_norm.weight | Block 40 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 494 | blk.40.ffn_up_exps.weight | Block 40 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | + +- Total elements in blk.40: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 41 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 495 | blk.41.attn_k.weight | Block 41 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q4_K | +| 496 | blk.41.attn_k_norm.weight | Block 41 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 497 | blk.41.attn_norm.weight | Block 41 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 498 | blk.41.attn_output.weight | Block 41 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 499 | blk.41.attn_q.weight | Block 41 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q4_K | +| 500 | blk.41.attn_q_norm.weight | Block 41 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 501 | blk.41.attn_v.weight | Block 41 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 502 | blk.41.ffn_down_exps.weight | Block 41 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 503 | blk.41.ffn_gate_exps.weight | Block 41 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | +| 504 | blk.41.ffn_gate_inp.weight | Block 41 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 505 | blk.41.ffn_norm.weight | Block 41 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 506 | blk.41.ffn_up_exps.weight | Block 41 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | + +- Total elements in blk.41: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 42 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 507 | blk.42.attn_k.weight | Block 42 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q4_K | +| 508 | blk.42.attn_k_norm.weight | Block 42 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 509 | blk.42.attn_norm.weight | Block 42 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 510 | blk.42.attn_output.weight | Block 42 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 511 | blk.42.attn_q.weight | Block 42 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q4_K | +| 512 | blk.42.attn_q_norm.weight | Block 42 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 513 | blk.42.attn_v.weight | Block 42 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 514 | blk.42.ffn_down_exps.weight | Block 42 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 515 | blk.42.ffn_gate_exps.weight | Block 42 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | +| 516 | blk.42.ffn_gate_inp.weight | Block 42 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 517 | blk.42.ffn_norm.weight | Block 42 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 518 | blk.42.ffn_up_exps.weight | Block 42 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | + +- Total elements in blk.42: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 43 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 519 | blk.43.attn_k.weight | Block 43 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q4_K | +| 520 | blk.43.attn_k_norm.weight | Block 43 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 521 | blk.43.attn_norm.weight | Block 43 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 522 | blk.43.attn_output.weight | Block 43 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 523 | blk.43.attn_q.weight | Block 43 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q4_K | +| 524 | blk.43.attn_q_norm.weight | Block 43 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 525 | blk.43.attn_v.weight | Block 43 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 526 | blk.43.ffn_down_exps.weight | Block 43 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 527 | blk.43.ffn_gate_exps.weight | Block 43 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | +| 528 | blk.43.ffn_gate_inp.weight | Block 43 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 529 | blk.43.ffn_norm.weight | Block 43 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 530 | blk.43.ffn_up_exps.weight | Block 43 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | + +- Total elements in blk.43: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 44 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 531 | blk.44.attn_k.weight | Block 44 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q4_K | +| 532 | blk.44.attn_k_norm.weight | Block 44 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 533 | blk.44.attn_norm.weight | Block 44 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 534 | blk.44.attn_output.weight | Block 44 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 535 | blk.44.attn_q.weight | Block 44 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q4_K | +| 536 | blk.44.attn_q_norm.weight | Block 44 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 537 | blk.44.attn_v.weight | Block 44 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 538 | blk.44.ffn_down_exps.weight | Block 44 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 539 | blk.44.ffn_gate_exps.weight | Block 44 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | +| 540 | blk.44.ffn_gate_inp.weight | Block 44 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 541 | blk.44.ffn_norm.weight | Block 44 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 542 | blk.44.ffn_up_exps.weight | Block 44 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | + +- Total elements in blk.44: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +### Block 45 Tensor Group : ~623M Elements + +| T_ID | Tensor Layer Name | Human Friendly Tensor Layer Name | Elements | Shape | Type | +|-----:|:----------------------------|:-------------------------------------------------------------------------------------------|:------------------|:----------------------|:-----| +| 543 | blk.45.attn_k.weight | Block 45 Attention Key (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q4_K | +| 544 | blk.45.attn_k_norm.weight | Block 45 Attn_K_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 545 | blk.45.attn_norm.weight | Block 45 Attention Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 546 | blk.45.attn_output.weight | Block 45 Attention Output (W) | ( ~8M) 8388608 | 4096 x 2048 x 1 x 1 | Q4_K | +| 547 | blk.45.attn_q.weight | Block 45 Attention Query (W) | ( ~8M) 8388608 | 2048 x 4096 x 1 x 1 | Q4_K | +| 548 | blk.45.attn_q_norm.weight | Block 45 Attn_Q_Norm (W) | ( 128) 128 | 128 x 1 x 1 x 1 | F32 | +| 549 | blk.45.attn_v.weight | Block 45 Attention Value (W) | ( ~1M) 1048576 | 2048 x 512 x 1 x 1 | Q5_K | +| 550 | blk.45.ffn_down_exps.weight | Block 45 Ffn_Down_Exps (W) | (~201M) 201326592 | 768 x 2048 x 128 x 1 | Q5_K | +| 551 | blk.45.ffn_gate_exps.weight | Block 45 Ffn_Gate_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | +| 552 | blk.45.ffn_gate_inp.weight | Block 45 Expert-Routing Layer For The Feed-Forward Network In Mixture Of Expert Models (W) | (~262K) 262144 | 2048 x 128 x 1 x 1 | F32 | +| 553 | blk.45.ffn_norm.weight | Block 45 Feed-Forward Network Normalization (W) | ( ~2K) 2048 | 2048 x 1 x 1 x 1 | F32 | +| 554 | blk.45.ffn_up_exps.weight | Block 45 Ffn_Up_Exps (W) | (~201M) 201326592 | 2048 x 768 x 128 x 1 | Q4_K | + +- Total elements in blk.45: (~623M) 623120640 +- Percentage of total elements: 2.13% + + +