DarkSapling-7B-v2.0 / mergekit-config.yml
TeeZee's picture
Upload 10 files
26fcd58 verified
raw
history blame
780 Bytes
models:
- model: "Z:\\ai_text\\text-generation-webui\\models\\cognitivecomputations_samantha-mistral-7b"
- model: "Z:\\ai_text\\text-generation-webui\\models\\cognitivecomputations_dolphin-2.6-mistral-7b-dpo-laser"
parameters:
weight: 0.19
density: 0.5
- model: "Z:\\ai_text\\text-generation-webui\\models\\KoboldAI_Mistral-7B-Holodeck-1"
parameters:
weight: 0.3
density: 1.0
- model: "Z:\\ai_text\\text-generation-webui\\models\\KoboldAI_Mistral-7B-Erebus-v3"
parameters:
weight: 0.09
density: 1.0
merge_method: dare_ties
tokenizer_source: union
base_model: "Z:\\ai_text\\text-generation-webui\\models\\cognitivecomputations_samantha-mistral-7b"
parameters:
int8_mask: true
dtype: bfloat16
name: darksapling12_dire_ties