File size: 704 Bytes
237e9f2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 |
models:
- model: ReadyArt/L3.3-The-Omega-Directive-70B-Unslop-v2.0
parameters:
weight: 0.25
density: 0.7
epsilon: 0.2
- model: TheDrummer/Fallen-Llama-3.3-70B-v1
parameters:
weight: 0.25
density: 0.7
epsilon: 0.2
- model: BeaverAI/Shimmer-70B-v1a
parameters:
weight: 0.25
density: 0.7
epsilon: 0.2
- model: Mawdistical/Squelching-Fantasies-70B
parameters:
weight: 0.25
density: 0.7
epsilon: 0.2
merge_method: della_linear
base_model: nbeerbower/Llama-3.1-Nemotron-lorablated-70B
parameters:
lambda: 1.1
normalize: true
dtype: bfloat16
chat_template: llama3
tokenizer:
source: base
pad_to_multiple_of: 8 |