dolphin-mixtral-2x7b / mergekit_moe_config.yml
macadeliccc's picture
Upload 11 files
fbaca23 verified
raw
history blame contribute delete
468 Bytes
base_model: teknium/OpenHermes-2.5-Mistral-7B
gate_mode: hidden
dtype: bfloat16
experts:
- source_model: teknium/OpenHermes-2.5-Mistral-7B
positive_prompts:
- "instruction"
- "solutions"
- "chat"
- "questions"
- "philosphy"
- source_model: cognitivecomputations/dolphin-2.6-mistral-7b-dpo-laser
positive_prompts:
- "mathematics"
- "optimization"
- "code"
- "step-by-step"
- "science"