gemma-3-4b-persian-v0-abliterated / mergekit_config.yml
mshojaei77's picture
Upload folder using huggingface_hub
d19ffd8 verified
raw
history blame contribute delete
530 Bytes
models:
- model: mlabonne/gemma-3-4b-it-abliterated
- model: mshojaei77/gemma-3-4b-persian-v0
base_model: mlabonne/gemma-3-4b-it-abliterated
merge_method: slerp
dtype: bfloat16 # Better stability for precision-sensitive merges
parameters:
density: 0.5
weight:
- filter: "self_attn"
value: [0.75, 0.4, 0.25, 0.4, 0.75] # U-shaped attention weighting
- filter: "mlp"
value: [0.25, 0.6, 0.9, 0.6, 0.25] # Λ-shaped MLP weighting
t: [0.15, 0.35, 0.65, 0.35, 0.15] # Optimized linguistic injection