mita-elite-v1.1-7b-2-25-2025 / mergekit_config.yml
baebee's picture
Upload folder using huggingface_hub
8a144b6 verified
models:
- model: Goekdeniz-Guelmez/Josiefied-Qwen2.5-7B-Instruct-abliterated-v2 # Best for Benchmark 1
parameters:
density: 0.167
weight: 0.167
- model: Aashraf995/Qwen-Evo-7B # Best for Benchmark 2
parameters:
density: 0.167
weight: 0.167
- model: nvidia/AceMath-7B-Instruct # Best for Benchmark 3
parameters:
density: 0.167
weight: 0.167
- model: fblgit/cybertron-v4-qw7B-UNAMGS # Best for Benchmark 4
parameters:
density: 0.167
weight: 0.167
- model: jeffmeloy/Qwen2.5-7B-nerd-uncensored-v1.5 # Best for Benchmark 5
parameters:
density: 0.167
weight: 0.167
- model: jeffmeloy/Qwen2.5-7B-olm-v1.0 # Best for Benchmark 6
parameters:
density: 0.167
weight: 0.167
merge_method: sce
base_model: Qwen/Qwen2.5-7B-Instruct # Replace if using a different base model
parameters:
normalize: false
int8_mask: true
select_topk: 0.1 # Retains top 10% highest variance elements (adjust for better results)
dtype: bfloat16
allow_crimes: true