File size: 1,120 Bytes
0ee9080 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 |
dtype: bfloat16
tokenizer_source: base
merge_method: nuslerp
parameters:
nuslerp_row_wise: true
models:
- model: SicariusSicariiStuff/Negative_LLAMA_70B
parameters:
weight:
- filter: v_proj
value: [0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0]
- filter: o_proj
value: [1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1]
- filter: up_proj
value: [1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1]
- filter: gate_proj
value: [0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0]
- filter: down_proj
value: [0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0]
- value: [0.2, 0.35, 0.4, 0.35, 0.2]
- model: Nohobby/L3.3-Prikol-70B-v0.2
parameters:
weight:
- filter: v_proj
value: [1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1]
- filter: o_proj
value: [0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0]
- filter: up_proj
value: [0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0]
- filter: gate_proj
value: [1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1]
- filter: down_proj
value: [1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1]
- value: [0.8, 0.65, 0.6, 0.65, 0.8] |