Gemma-Med-Merge / mergekit_config.yml
Gunulhona's picture
Upload folder using huggingface_hub
025418d verified
raw
history blame contribute delete
769 Bytes
models:
- model: davidkim205/ko-gemma-2-9b-it
layer_range: [0, 42]
parameters:
weight: 1
density: 0.7
gamma: 0.03
- model: ChuGyouk/ko-med-gemma-2-9b-it-merge2
layer_range: [0, 42]
parameters:
weight: 1
density: 0.42
gamma: 0.03
- model: Shaleen123/gemma2-9b-medical
layer_range: [0, 42]
parameters:
weight: 1
density: 0.42
gamma: 0.03
- model: valeriojob/MedGPT-Gemma2-9B-BA-v.1
layer_range: [0, 42]
parameters:
weight: 1
density: 0.42
gamma: 0.03
merge_method: breadcrumbs_ties
base_model: anthracite-org/magnum-v3-9b-customgemma2 #rtzr/ko-gemma-2-9b-it+ghost613/gemma9_on_korean_summary_events # lora model loading
dtype: float16