Or4cl3-1 commited on
Commit
3615bee
1 Parent(s): 96184ab

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +34 -16
config.json CHANGED
@@ -1,16 +1,34 @@
1
- slices:
2
- - sources:
3
- - model: Or4cl3-1/agent_gemma_7b
4
- layer_range: [0, 32]
5
- - model: cognitivecomputations/dolphin-2.5-mixtral-8x7b
6
- layer_range: [0, 32]
7
- merge_method: slerp
8
- base_model: Or4cl3-1/agent_gemma_7b
9
- parameters:
10
- t:
11
- - filter: self_attn
12
- value: [0, 0.5, 0.3, 0.7, 1]
13
- - filter: mlp
14
- value: [1, 0.5, 0.7, 0.3, 0]
15
- - value: 0.5
16
- dtype: bfloat16
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ }
2
+ "slices": [
3
+ {
4
+ "sources": [
5
+ {
6
+ "model": "Or4cl3-1/agent_gemma_7b",
7
+ "layer_range": [0, 32]
8
+ },
9
+ {
10
+ "model": "cognitivecomputations/dolphin-2.5-mixtral-8x7b",
11
+ "layer_range": [0, 32]
12
+ }
13
+ ],
14
+ "merge_method": "slerp",
15
+ "base_model": "Or4cl3-1/agent_gemma_7b",
16
+ "parameters": {
17
+ "t": [
18
+ {
19
+ "filter": "self_attn",
20
+ "value": [0, 0.5, 0.3, 0.7, 1]
21
+ },
22
+ {
23
+ "filter": "mlp",
24
+ "value": [1, 0.5, 0.7, 0.3, 0]
25
+ },
26
+ {
27
+ "value": 0.5
28
+ }
29
+ ]
30
+ },
31
+ "dtype": "bfloat16"
32
+ }
33
+ ]
34
+ }