File size: 785 Bytes
c082c76 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 |
merge_method: model_stock
dtype: bfloat16
base_model: NousResearch/Meta-Llama-3.1-8B
models:
- model: NousResearch/Hermes-3-Llama-3.1-8B
parameters:
weight: 0.5
- model: ArliAI/Llama-3.1-8B-ArliAI-RPMax-v1.3
parameters:
weight: 0.5
- model: Sao10K/L3-8B-Lunaris-v1
parameters:
weight: 0.5
- model: TheDrummer/Llama-3SOME-8B-v2
parameters:
weight: 0.4
- model: Gryphe/Pantheon-RP-1.0-8b-Llama-3
parameters:
weight: 0.4
tokenizer:
source: union
tokens:
<|start_header_id|>:
source: "NousResearch/Hermes-3-Llama-3.1-8B"
<|end_header_id|>:
source: "NousResearch/Hermes-3-Llama-3.1-8B"
<|eot_id|>:
source: "NousResearch/Hermes-3-Llama-3.1-8B"
chat_template: "llama3"
|