fct-14-8b / mergekit_config.yml
jaspionjader's picture
Upload folder using huggingface_hub
3081f76 verified
raw
history blame contribute delete
442 Bytes
slices:
- sources:
- model: jaspionjader/fct-13-8b
layer_range:
- 0
- 32
- model: FuseAI/FuseChat-Llama-3.1-8B-Instruct
layer_range:
- 0
- 32
merge_method: slerp
base_model: jaspionjader/fct-13-8b
parameters:
t:
- filter: self_attn
value:
- 0.06
- 0.05
- 0.04
- 0.03
- 0.02
- filter: mlp
value:
- 0.02
- 0.03
- 0.04
- 0.05
- 0.06
- value: 0.3
dtype: bfloat16