slices: | |
- sources: | |
- model: mlabonne/NeuralHermes-2.5-Mistral-7B | |
layer_range: [0, 32] | |
- model: teknium/OpenHermes-2.5-Mistral-7B | |
layer_range: [0, 32] | |
merge_method: slerp | |
base_model: mlabonne/NeuralHermes-2.5-Mistral-7B | |
parameters: | |
t: | |
- filter: self_attn | |
value: [0, 0.3, 0.6, 0.9, 1] # Progressive fusion des couches d'attention | |
- filter: mlp | |
value: [1, 0.7, 0.4, 0.1, 0] # Transition inverse pour les MLP | |
- value: 0.45 # Ratio de fusion global | |
dtype: bfloat16 | |