slices: - sources: - model: Undi95/MLewd-ReMM-L2-Chat-20B-Inverted layer_range: [0, 62] # MLewd-ReMM-L2-Chat-20B-Inverted has 62 layers - model: Undi95/PsyMedRP-v1-20B layer_range: [0, 62] # PsyMedRP-v1-20B has 62 layers merge_method: slerp # Or use another method like weight_average if needed base_model: Undi95/PsyMedRP-v1-20B # Can use either as the base model parameters: t: - filter: self_attn value: [0, 0.5, 0.3, 0.7, 1] # Tune these for desired effect - filter: mlp value: [1, 0.5, 0.7, 0.3, 0] - value: 0.5 # Default averaging weight dtype: bfloat16 # Use preferred dtype, like fp16 or float32 if needed