StrangeMerges_52-7B-dare_ties / mergekit_config.yml
Gille's picture
Upload folder using huggingface_hub
2b1d380 verified
raw
history blame
689 Bytes
models:
- model: Gille/StrangeMerges_51-7B-dare_ties
# No parameters necessary for base model
- model: WizardLM/WizardMath-7B-V1.1
parameters:
density: 0.66
weight: 0.2
- model: AurelPx/Percival_01-7b-slerp
parameters:
density: 0.55
weight: 0.2
- model: Weyaxi/Einstein-v4-7B
parameters:
density: 0.55
weight: 0.2
- model: Kukedlc/NeuralMaths-Experiment-7b
parameters:
density: 0.44
weight: 0.2
- model: Gille/StrangeMerges_35-7B-slerp
parameters:
density: 0.66
weight: 0.2
merge_method: dare_ties
base_model: Gille/StrangeMerges_51-7B-dare_ties
parameters:
int8_mask: true
dtype: bfloat16