automerger
commited on
Commit
•
1b39687
1
Parent(s):
5df1e4f
Upload folder using huggingface_hub
Browse files- README.md +11 -17
- config.json +2 -2
- mergekit_config.yml +10 -15
- model-00001-of-00002.safetensors +1 -1
- model-00002-of-00002.safetensors +1 -1
README.md
CHANGED
@@ -6,37 +6,31 @@ tags:
|
|
6 |
- lazymergekit
|
7 |
- automerger
|
8 |
base_model:
|
9 |
-
- yam-peleg/Experiment26-7B
|
10 |
- mayacinka/yam-jom-7B
|
11 |
---
|
12 |
|
13 |
# Experiment26Yam-7B
|
14 |
|
15 |
Experiment26Yam-7B is an automated merge created by [Maxime Labonne](https://huggingface.co/mlabonne) using the following configuration.
|
16 |
-
* [yam-peleg/Experiment26-7B](https://huggingface.co/yam-peleg/Experiment26-7B)
|
17 |
* [mayacinka/yam-jom-7B](https://huggingface.co/mayacinka/yam-jom-7B)
|
18 |
|
19 |
## 🧩 Configuration
|
20 |
|
21 |
```yaml
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
|
|
30 |
parameters:
|
31 |
-
|
32 |
-
- filter: self_attn
|
33 |
-
value: [0, 0.5, 0.3, 0.7, 1]
|
34 |
-
- filter: mlp
|
35 |
-
value: [1, 0.5, 0.7, 0.3, 0]
|
36 |
-
- value: 0.5
|
37 |
dtype: bfloat16
|
38 |
random_seed: 0
|
39 |
-
|
40 |
|
41 |
## 💻 Usage
|
42 |
|
|
|
6 |
- lazymergekit
|
7 |
- automerger
|
8 |
base_model:
|
|
|
9 |
- mayacinka/yam-jom-7B
|
10 |
---
|
11 |
|
12 |
# Experiment26Yam-7B
|
13 |
|
14 |
Experiment26Yam-7B is an automated merge created by [Maxime Labonne](https://huggingface.co/mlabonne) using the following configuration.
|
|
|
15 |
* [mayacinka/yam-jom-7B](https://huggingface.co/mayacinka/yam-jom-7B)
|
16 |
|
17 |
## 🧩 Configuration
|
18 |
|
19 |
```yaml
|
20 |
+
models:
|
21 |
+
- model: rwitz/experiment26-truthy-iter-0
|
22 |
+
# No parameters necessary for base model
|
23 |
+
- model: mayacinka/yam-jom-7B
|
24 |
+
parameters:
|
25 |
+
density: 0.53
|
26 |
+
weight: 0.6
|
27 |
+
merge_method: dare_ties
|
28 |
+
base_model: rwitz/experiment26-truthy-iter-0
|
29 |
parameters:
|
30 |
+
int8_mask: true
|
|
|
|
|
|
|
|
|
|
|
31 |
dtype: bfloat16
|
32 |
random_seed: 0
|
33 |
+
```
|
34 |
|
35 |
## 💻 Usage
|
36 |
|
config.json
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "
|
3 |
"architectures": [
|
4 |
"MistralForCausalLM"
|
5 |
],
|
@@ -20,7 +20,7 @@
|
|
20 |
"sliding_window": 4096,
|
21 |
"tie_word_embeddings": false,
|
22 |
"torch_dtype": "bfloat16",
|
23 |
-
"transformers_version": "4.
|
24 |
"use_cache": true,
|
25 |
"vocab_size": 32000
|
26 |
}
|
|
|
1 |
{
|
2 |
+
"_name_or_path": "rwitz/experiment26-truthy-iter-0",
|
3 |
"architectures": [
|
4 |
"MistralForCausalLM"
|
5 |
],
|
|
|
20 |
"sliding_window": 4096,
|
21 |
"tie_word_embeddings": false,
|
22 |
"torch_dtype": "bfloat16",
|
23 |
+
"transformers_version": "4.39.0",
|
24 |
"use_cache": true,
|
25 |
"vocab_size": 32000
|
26 |
}
|
mergekit_config.yml
CHANGED
@@ -1,19 +1,14 @@
|
|
1 |
|
2 |
-
|
3 |
-
|
4 |
-
|
5 |
-
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
|
|
10 |
parameters:
|
11 |
-
|
12 |
-
- filter: self_attn
|
13 |
-
value: [0, 0.5, 0.3, 0.7, 1]
|
14 |
-
- filter: mlp
|
15 |
-
value: [1, 0.5, 0.7, 0.3, 0]
|
16 |
-
- value: 0.5
|
17 |
dtype: bfloat16
|
18 |
random_seed: 0
|
19 |
-
|
|
|
1 |
|
2 |
+
models:
|
3 |
+
- model: rwitz/experiment26-truthy-iter-0
|
4 |
+
# No parameters necessary for base model
|
5 |
+
- model: mayacinka/yam-jom-7B
|
6 |
+
parameters:
|
7 |
+
density: 0.53
|
8 |
+
weight: 0.6
|
9 |
+
merge_method: dare_ties
|
10 |
+
base_model: rwitz/experiment26-truthy-iter-0
|
11 |
parameters:
|
12 |
+
int8_mask: true
|
|
|
|
|
|
|
|
|
|
|
13 |
dtype: bfloat16
|
14 |
random_seed: 0
|
|
model-00001-of-00002.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 9942981696
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b80409a92aaa659bc0c16a92f7a2745d4adcd1b89fb992a8f35e37cf8d9cf942
|
3 |
size 9942981696
|
model-00002-of-00002.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4540516344
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9713367b9ddbc47fc19d43848a55e08cf885a6ad65afc89e926e5391a1a5f8cb
|
3 |
size 4540516344
|