|
models: |
|
- model: TareksLab/Emerald-V2-LLaMa-70B |
|
parameters: |
|
weight: [0.1, 0.1, 0.1, 0.1, 0.2, 0.5] |
|
density: 0.5 |
|
epsilon: 0.15 |
|
- model: TareksLab/Carnelian-V2-LLaMa-70B |
|
parameters: |
|
weight: [0.1, 0.1, 0.1, 0.2, 0.4, 0.2] |
|
density: 0.5 |
|
epsilon: 0.15 |
|
- model: TareksLab/Ruby-V2-LLaMa-70B |
|
parameters: |
|
weight: [0.1, 0.1, 0.2, 0.4, 0.2, 0.1] |
|
density: 0.5 |
|
epsilon: 0.15 |
|
- model: TareksLab/Amethyst-V2-LLaMa-70B |
|
parameters: |
|
weight: [0.1, 0.2, 0.4, 0.2, 0.1, 0.1] |
|
density: 0.5 |
|
epsilon: 0.15 |
|
- model: TareksLab/Citrine-V2-LLaMa-70B |
|
parameters: |
|
weight: [0.2, 0.4, 0.2, 0.1, 0.1, 0.1] |
|
density: 0.5 |
|
epsilon: 0.15 |
|
- model: TareksLab/Sapphire-V2-LLaMa-70B |
|
parameters: |
|
weight: [0.5, 0.2, 0.1, 0.1, 0.1, 0.1] |
|
density: 0.5 |
|
epsilon: 0.15 |
|
merge_method: della |
|
base_model: Sao10K/Llama-3.3-70B-Vulpecula-r1 |
|
parameters: |
|
normalize: false |
|
int8_mask: true |
|
lambda: 1.1 |
|
dtype: float32 |
|
out_dtype: bfloat16 |
|
chat_template: llama3 |
|
tokenizer: |
|
source: TareksLab/Ruby-V2-LLaMa-70B |
|
pad_to_multiple_of: 8 |
|
|