M-Mahdi-Setak commited on
Commit
6584aae
1 Parent(s): 238af62

third model

Browse files
README.md CHANGED
@@ -18,7 +18,7 @@ should probably proofread and complete it, then remove this comment. -->
18
 
19
  This model is a fine-tuned version of [stabilityai/stablelm-3b-4e1t](https://huggingface.co/stabilityai/stablelm-3b-4e1t) on an unknown dataset.
20
  It achieves the following results on the evaluation set:
21
- - Loss: 4.2700
22
 
23
  ## Model description
24
 
@@ -37,9 +37,9 @@ More information needed
37
  ### Training hyperparameters
38
 
39
  The following hyperparameters were used during training:
40
- - learning_rate: 0.0001
41
- - train_batch_size: 32
42
- - eval_batch_size: 32
43
  - seed: 42
44
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
45
  - lr_scheduler_type: cosine
@@ -49,11 +49,17 @@ The following hyperparameters were used during training:
49
 
50
  | Training Loss | Epoch | Step | Validation Loss |
51
  |:-------------:|:-----:|:----:|:---------------:|
52
- | 4.7338 | 0.18 | 100 | 4.5703 |
53
- | 4.6391 | 0.36 | 200 | 4.4094 |
54
- | 4.8127 | 0.54 | 300 | 4.3277 |
55
- | 4.5393 | 0.72 | 400 | 4.2797 |
56
- | 4.29 | 0.9 | 500 | 4.2700 |
 
 
 
 
 
 
57
 
58
 
59
  ### Framework versions
 
18
 
19
  This model is a fine-tuned version of [stabilityai/stablelm-3b-4e1t](https://huggingface.co/stabilityai/stablelm-3b-4e1t) on an unknown dataset.
20
  It achieves the following results on the evaluation set:
21
+ - Loss: 4.0159
22
 
23
  ## Model description
24
 
 
37
  ### Training hyperparameters
38
 
39
  The following hyperparameters were used during training:
40
+ - learning_rate: 0.0005
41
+ - train_batch_size: 16
42
+ - eval_batch_size: 16
43
  - seed: 42
44
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
45
  - lr_scheduler_type: cosine
 
49
 
50
  | Training Loss | Epoch | Step | Validation Loss |
51
  |:-------------:|:-----:|:----:|:---------------:|
52
+ | 4.6872 | 0.09 | 100 | 4.5976 |
53
+ | 4.1956 | 0.18 | 200 | 4.4367 |
54
+ | 4.3439 | 0.27 | 300 | 4.3083 |
55
+ | 4.3709 | 0.36 | 400 | 4.2462 |
56
+ | 4.3886 | 0.45 | 500 | 4.1890 |
57
+ | 4.402 | 0.54 | 600 | 4.1248 |
58
+ | 4.043 | 0.63 | 700 | 4.0840 |
59
+ | 4.1489 | 0.72 | 800 | 4.0423 |
60
+ | 4.1139 | 0.81 | 900 | 4.0251 |
61
+ | 4.273 | 0.9 | 1000 | 4.0162 |
62
+ | 3.8663 | 1.0 | 1100 | 4.0159 |
63
 
64
 
65
  ### Framework versions
adapter_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": "stabilityai/stablelm-3b-4e1t",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
@@ -15,235 +15,235 @@
15
  "megatron_core": "megatron.core",
16
  "modules_to_save": null,
17
  "peft_type": "LORA",
18
- "r": 64,
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
- "layers.13.self_attn.o_proj",
23
- "layers.27.mlp.up_proj",
24
- "layers.31.self_attn.q_proj",
25
- "layers.24.self_attn.q_proj",
26
- "layers.10.self_attn.v_proj",
27
- "layers.24.mlp.down_proj",
28
- "layers.22.mlp.down_proj",
29
- "layers.4.self_attn.q_proj",
30
- "layers.25.mlp.down_proj",
31
- "layers.1.mlp.up_proj",
32
- "layers.10.mlp.gate_proj",
33
- "layers.0.self_attn.v_proj",
34
  "layers.18.mlp.gate_proj",
35
- "layers.22.mlp.gate_proj",
36
- "layers.3.self_attn.q_proj",
37
- "layers.11.mlp.gate_proj",
38
- "layers.21.self_attn.q_proj",
39
- "layers.8.mlp.gate_proj",
40
- "layers.14.self_attn.q_proj",
41
- "layers.28.mlp.gate_proj",
42
- "layers.1.self_attn.v_proj",
43
- "layers.15.mlp.up_proj",
44
- "layers.25.self_attn.v_proj",
45
  "layers.23.self_attn.o_proj",
46
- "layers.19.mlp.gate_proj",
47
- "layers.1.self_attn.k_proj",
48
- "layers.1.mlp.down_proj",
49
- "layers.13.mlp.down_proj",
50
- "layers.23.mlp.up_proj",
51
- "layers.6.self_attn.o_proj",
52
- "layers.27.self_attn.v_proj",
53
- "layers.13.self_attn.k_proj",
54
- "layers.11.mlp.up_proj",
55
- "layers.5.mlp.up_proj",
56
- "layers.29.mlp.down_proj",
57
- "layers.30.self_attn.v_proj",
58
- "layers.9.self_attn.o_proj",
59
- "layers.15.self_attn.q_proj",
60
- "layers.25.self_attn.k_proj",
61
- "layers.17.self_attn.v_proj",
62
- "layers.23.self_attn.v_proj",
63
- "layers.12.self_attn.k_proj",
64
  "layers.31.self_attn.o_proj",
65
- "layers.21.mlp.gate_proj",
66
- "layers.2.mlp.up_proj",
67
- "layers.9.mlp.up_proj",
68
- "layers.10.self_attn.o_proj",
69
- "layers.0.mlp.down_proj",
70
- "layers.3.mlp.gate_proj",
71
- "layers.5.mlp.down_proj",
72
- "layers.9.mlp.down_proj",
73
- "layers.12.mlp.up_proj",
74
- "layers.0.self_attn.k_proj",
75
- "layers.25.self_attn.o_proj",
76
- "layers.24.mlp.up_proj",
77
- "layers.26.mlp.up_proj",
78
- "layers.20.self_attn.v_proj",
79
- "layers.31.self_attn.v_proj",
80
- "layers.0.self_attn.q_proj",
81
- "layers.30.mlp.gate_proj",
82
- "layers.14.self_attn.o_proj",
83
- "layers.8.self_attn.v_proj",
84
  "layers.29.mlp.gate_proj",
85
- "layers.6.mlp.down_proj",
86
- "layers.16.mlp.down_proj",
87
- "layers.22.self_attn.q_proj",
88
- "layers.28.self_attn.v_proj",
 
 
 
 
 
 
 
 
 
 
 
 
89
  "layers.31.mlp.gate_proj",
 
 
 
 
90
  "layers.4.self_attn.k_proj",
91
- "layers.12.self_attn.o_proj",
92
- "layers.10.mlp.up_proj",
93
- "layers.19.self_attn.q_proj",
94
- "layers.8.self_attn.o_proj",
95
- "layers.16.self_attn.o_proj",
96
- "layers.3.mlp.down_proj",
97
- "layers.26.self_attn.k_proj",
98
- "layers.7.mlp.down_proj",
99
- "layers.10.self_attn.k_proj",
100
- "layers.23.self_attn.k_proj",
101
- "layers.21.self_attn.o_proj",
102
- "layers.18.self_attn.k_proj",
103
- "layers.27.mlp.down_proj",
104
  "layers.17.self_attn.o_proj",
105
- "layers.21.self_attn.k_proj",
106
- "layers.30.mlp.down_proj",
107
- "layers.3.self_attn.k_proj",
108
- "layers.25.mlp.gate_proj",
109
- "layers.16.mlp.gate_proj",
 
 
 
110
  "layers.31.mlp.down_proj",
111
- "layers.22.self_attn.o_proj",
112
- "layers.6.mlp.gate_proj",
113
- "layers.11.mlp.down_proj",
114
- "layers.23.self_attn.q_proj",
115
- "layers.7.mlp.up_proj",
116
- "layers.22.self_attn.k_proj",
117
- "layers.12.self_attn.q_proj",
118
- "layers.19.self_attn.v_proj",
119
- "layers.22.self_attn.v_proj",
120
- "layers.24.mlp.gate_proj",
121
- "layers.2.self_attn.q_proj",
122
- "layers.2.mlp.gate_proj",
123
- "layers.30.self_attn.q_proj",
124
- "layers.14.self_attn.v_proj",
125
- "layers.26.self_attn.v_proj",
126
- "layers.28.self_attn.q_proj",
127
- "layers.30.mlp.up_proj",
128
- "layers.10.mlp.down_proj",
129
- "layers.5.self_attn.q_proj",
130
- "layers.21.self_attn.v_proj",
131
- "layers.15.self_attn.k_proj",
132
- "layers.1.self_attn.o_proj",
133
- "layers.20.self_attn.k_proj",
134
- "layers.4.self_attn.o_proj",
135
- "layers.7.self_attn.k_proj",
136
- "layers.14.self_attn.k_proj",
137
- "layers.12.self_attn.v_proj",
138
- "layers.16.self_attn.q_proj",
139
- "layers.28.self_attn.o_proj",
140
- "layers.29.mlp.up_proj",
141
- "layers.18.self_attn.o_proj",
142
  "layers.8.mlp.down_proj",
143
- "layers.9.self_attn.q_proj",
144
- "layers.25.mlp.up_proj",
145
- "layers.14.mlp.gate_proj",
146
- "layers.11.self_attn.v_proj",
147
  "layers.18.mlp.up_proj",
148
- "layers.15.self_attn.o_proj",
149
- "embed_tokens",
150
- "layers.5.mlp.gate_proj",
151
- "layers.29.self_attn.o_proj",
152
- "layers.16.mlp.up_proj",
153
- "layers.27.self_attn.k_proj",
154
- "layers.26.self_attn.q_proj",
155
- "layers.20.mlp.down_proj",
156
- "layers.4.mlp.up_proj",
157
- "layers.19.mlp.up_proj",
158
  "layers.18.mlp.down_proj",
159
- "layers.3.self_attn.o_proj",
160
- "layers.8.mlp.up_proj",
161
- "layers.8.self_attn.k_proj",
162
- "layers.30.self_attn.k_proj",
163
- "layers.15.mlp.down_proj",
164
- "layers.10.self_attn.q_proj",
165
- "layers.16.self_attn.v_proj",
166
- "layers.12.mlp.gate_proj",
167
  "layers.6.mlp.up_proj",
168
- "layers.0.mlp.up_proj",
169
- "layers.9.self_attn.k_proj",
170
- "layers.31.self_attn.k_proj",
171
- "layers.20.mlp.up_proj",
172
- "layers.28.mlp.down_proj",
173
- "layers.0.self_attn.o_proj",
174
- "layers.4.mlp.gate_proj",
175
- "layers.21.mlp.up_proj",
176
- "layers.13.self_attn.q_proj",
177
- "layers.5.self_attn.k_proj",
178
- "layers.21.mlp.down_proj",
179
- "layers.17.mlp.down_proj",
180
  "layers.30.self_attn.o_proj",
 
181
  "layers.19.mlp.down_proj",
182
- "layers.1.mlp.gate_proj",
183
- "layers.9.mlp.gate_proj",
184
- "layers.13.mlp.gate_proj",
 
 
 
 
 
 
 
185
  "layers.19.self_attn.k_proj",
186
- "layers.3.self_attn.v_proj",
187
- "layers.20.mlp.gate_proj",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
188
  "layers.7.self_attn.v_proj",
189
- "layers.19.self_attn.o_proj",
190
- "layers.22.mlp.up_proj",
191
  "layers.17.self_attn.k_proj",
192
- "layers.6.self_attn.q_proj",
193
- "layers.24.self_attn.o_proj",
194
- "layers.16.self_attn.k_proj",
195
- "layers.13.mlp.up_proj",
196
- "layers.8.self_attn.q_proj",
197
- "layers.29.self_attn.k_proj",
198
- "layers.28.self_attn.k_proj",
199
- "layers.27.mlp.gate_proj",
200
- "layers.2.self_attn.o_proj",
201
- "layers.28.mlp.up_proj",
202
  "layers.25.self_attn.q_proj",
203
- "layers.23.mlp.gate_proj",
204
- "layers.9.self_attn.v_proj",
205
- "layers.7.mlp.gate_proj",
206
- "layers.17.mlp.up_proj",
207
- "layers.5.self_attn.v_proj",
208
- "layers.7.self_attn.o_proj",
209
  "layers.13.self_attn.v_proj",
210
- "layers.27.self_attn.q_proj",
211
- "layers.18.self_attn.q_proj",
212
- "layers.2.self_attn.v_proj",
213
- "layers.23.mlp.down_proj",
214
- "layers.14.mlp.down_proj",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
215
  "layers.2.self_attn.k_proj",
216
- "layers.4.self_attn.v_proj",
217
- "layers.26.self_attn.o_proj",
218
- "layers.12.mlp.down_proj",
 
 
 
 
 
 
 
 
 
 
219
  "layers.7.self_attn.q_proj",
220
- "layers.11.self_attn.k_proj",
221
- "layers.27.self_attn.o_proj",
222
- "layers.26.mlp.down_proj",
223
- "layers.6.self_attn.v_proj",
224
- "layers.4.mlp.down_proj",
225
- "layers.11.self_attn.o_proj",
226
- "layers.29.self_attn.q_proj",
227
- "layers.24.self_attn.k_proj",
228
- "layers.18.self_attn.v_proj",
229
- "layers.20.self_attn.o_proj",
230
  "layers.2.mlp.down_proj",
231
- "layers.11.self_attn.q_proj",
232
- "layers.0.mlp.gate_proj",
233
- "layers.3.mlp.up_proj",
234
- "layers.6.self_attn.k_proj",
235
- "layers.14.mlp.up_proj",
 
236
  "layers.17.mlp.gate_proj",
237
- "layers.1.self_attn.q_proj",
 
 
 
 
 
 
 
 
 
 
238
  "layers.24.self_attn.v_proj",
239
- "layers.17.self_attn.q_proj",
240
- "layers.20.self_attn.q_proj",
241
- "layers.29.self_attn.v_proj",
242
- "layers.5.self_attn.o_proj",
243
- "layers.15.self_attn.v_proj",
 
 
 
 
 
 
244
  "layers.15.mlp.gate_proj",
245
- "layers.31.mlp.up_proj",
246
- "layers.26.mlp.gate_proj"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
247
  ],
248
  "task_type": "CAUSAL_LM",
249
  "use_rslora": false
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": null,
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
 
15
  "megatron_core": "megatron.core",
16
  "modules_to_save": null,
17
  "peft_type": "LORA",
18
+ "r": 256,
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
+ "layers.16.self_attn.k_proj",
23
+ "layers.27.mlp.gate_proj",
24
+ "layers.31.mlp.up_proj",
25
+ "layers.5.mlp.up_proj",
26
+ "layers.4.self_attn.o_proj",
 
 
 
 
 
 
 
27
  "layers.18.mlp.gate_proj",
28
+ "layers.23.mlp.down_proj",
29
+ "layers.3.mlp.down_proj",
 
 
 
 
 
 
 
 
30
  "layers.23.self_attn.o_proj",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31
  "layers.31.self_attn.o_proj",
32
+ "layers.6.mlp.gate_proj",
33
+ "layers.11.self_attn.k_proj",
34
+ "layers.8.self_attn.o_proj",
35
+ "layers.29.self_attn.o_proj",
36
+ "layers.3.self_attn.q_proj",
37
+ "layers.4.self_attn.q_proj",
 
 
 
 
 
 
 
 
 
 
 
 
 
38
  "layers.29.mlp.gate_proj",
39
+ "layers.19.self_attn.q_proj",
40
+ "layers.16.mlp.gate_proj",
41
+ "layers.13.mlp.up_proj",
42
+ "layers.29.self_attn.k_proj",
43
+ "layers.0.self_attn.o_proj",
44
+ "layers.17.mlp.down_proj",
45
+ "layers.25.mlp.up_proj",
46
+ "layers.19.self_attn.v_proj",
47
+ "layers.6.self_attn.q_proj",
48
+ "layers.5.self_attn.o_proj",
49
+ "layers.11.mlp.up_proj",
50
+ "layers.12.self_attn.q_proj",
51
+ "layers.24.self_attn.k_proj",
52
+ "layers.8.self_attn.v_proj",
53
+ "layers.31.self_attn.k_proj",
54
+ "layers.26.mlp.down_proj",
55
  "layers.31.mlp.gate_proj",
56
+ "layers.20.self_attn.v_proj",
57
+ "layers.11.self_attn.q_proj",
58
+ "layers.19.mlp.up_proj",
59
+ "layers.13.self_attn.o_proj",
60
  "layers.4.self_attn.k_proj",
61
+ "layers.11.self_attn.o_proj",
62
+ "layers.4.mlp.gate_proj",
63
+ "layers.15.self_attn.q_proj",
 
 
 
 
 
 
 
 
 
 
64
  "layers.17.self_attn.o_proj",
65
+ "layers.6.self_attn.k_proj",
66
+ "layers.12.mlp.down_proj",
67
+ "layers.9.self_attn.k_proj",
68
+ "layers.17.mlp.up_proj",
69
+ "layers.1.mlp.gate_proj",
70
+ "layers.20.mlp.gate_proj",
71
+ "layers.4.self_attn.v_proj",
72
+ "layers.16.self_attn.v_proj",
73
  "layers.31.mlp.down_proj",
74
+ "layers.29.self_attn.v_proj",
75
+ "layers.26.self_attn.o_proj",
76
+ "layers.8.self_attn.q_proj",
77
+ "layers.10.self_attn.q_proj",
78
+ "layers.27.self_attn.o_proj",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
79
  "layers.8.mlp.down_proj",
80
+ "layers.29.mlp.down_proj",
 
 
 
81
  "layers.18.mlp.up_proj",
82
+ "layers.28.self_attn.k_proj",
83
+ "layers.27.self_attn.v_proj",
84
+ "layers.29.mlp.up_proj",
85
+ "layers.2.self_attn.q_proj",
86
+ "layers.21.self_attn.o_proj",
87
+ "layers.23.self_attn.k_proj",
88
+ "layers.28.self_attn.v_proj",
89
+ "layers.15.mlp.up_proj",
 
 
90
  "layers.18.mlp.down_proj",
 
 
 
 
 
 
 
 
91
  "layers.6.mlp.up_proj",
92
+ "layers.24.mlp.gate_proj",
93
+ "layers.24.self_attn.o_proj",
94
+ "layers.7.mlp.gate_proj",
95
+ "layers.12.self_attn.v_proj",
96
+ "layers.9.mlp.gate_proj",
97
+ "layers.4.mlp.up_proj",
 
 
 
 
 
 
98
  "layers.30.self_attn.o_proj",
99
+ "layers.28.mlp.gate_proj",
100
  "layers.19.mlp.down_proj",
101
+ "layers.31.self_attn.q_proj",
102
+ "layers.5.self_attn.q_proj",
103
+ "layers.1.self_attn.o_proj",
104
+ "layers.25.self_attn.k_proj",
105
+ "layers.10.self_attn.k_proj",
106
+ "layers.12.self_attn.o_proj",
107
+ "layers.28.mlp.down_proj",
108
+ "layers.15.mlp.down_proj",
109
+ "layers.23.mlp.up_proj",
110
+ "layers.26.mlp.gate_proj",
111
  "layers.19.self_attn.k_proj",
112
+ "layers.14.mlp.up_proj",
113
+ "layers.20.self_attn.q_proj",
114
+ "layers.26.self_attn.q_proj",
115
+ "layers.0.mlp.down_proj",
116
+ "layers.21.self_attn.v_proj",
117
+ "layers.10.mlp.down_proj",
118
+ "layers.21.self_attn.k_proj",
119
+ "layers.3.self_attn.o_proj",
120
+ "layers.3.mlp.up_proj",
121
+ "layers.30.self_attn.q_proj",
122
+ "layers.6.mlp.down_proj",
123
+ "layers.9.mlp.down_proj",
124
+ "layers.8.self_attn.k_proj",
125
+ "layers.17.self_attn.q_proj",
126
+ "layers.11.mlp.gate_proj",
127
+ "layers.13.mlp.down_proj",
128
+ "layers.23.self_attn.q_proj",
129
+ "layers.9.self_attn.q_proj",
130
+ "layers.21.self_attn.q_proj",
131
  "layers.7.self_attn.v_proj",
132
+ "layers.0.mlp.up_proj",
133
+ "layers.16.mlp.down_proj",
134
  "layers.17.self_attn.k_proj",
135
+ "layers.20.self_attn.o_proj",
136
+ "layers.3.mlp.gate_proj",
 
 
 
 
 
 
 
 
137
  "layers.25.self_attn.q_proj",
138
+ "layers.6.self_attn.v_proj",
 
 
 
 
 
139
  "layers.13.self_attn.v_proj",
140
+ "layers.7.self_attn.o_proj",
141
+ "layers.11.mlp.down_proj",
142
+ "layers.24.mlp.up_proj",
143
+ "layers.14.self_attn.k_proj",
144
+ "layers.0.mlp.gate_proj",
145
+ "layers.22.self_attn.o_proj",
146
+ "layers.12.mlp.gate_proj",
147
+ "layers.20.mlp.up_proj",
148
+ "layers.0.self_attn.v_proj",
149
+ "layers.25.self_attn.v_proj",
150
+ "layers.31.self_attn.v_proj",
151
+ "layers.22.self_attn.q_proj",
152
+ "layers.23.self_attn.v_proj",
153
+ "layers.9.self_attn.o_proj",
154
+ "layers.1.self_attn.q_proj",
155
+ "layers.0.self_attn.q_proj",
156
+ "layers.1.self_attn.v_proj",
157
+ "layers.28.self_attn.o_proj",
158
+ "layers.1.mlp.down_proj",
159
+ "layers.25.mlp.down_proj",
160
+ "layers.16.self_attn.o_proj",
161
+ "layers.16.mlp.up_proj",
162
+ "layers.22.self_attn.k_proj",
163
+ "layers.12.mlp.up_proj",
164
+ "layers.30.self_attn.k_proj",
165
+ "layers.14.self_attn.v_proj",
166
+ "layers.5.mlp.gate_proj",
167
+ "layers.18.self_attn.k_proj",
168
+ "layers.30.mlp.down_proj",
169
  "layers.2.self_attn.k_proj",
170
+ "layers.17.self_attn.v_proj",
171
+ "layers.13.mlp.gate_proj",
172
+ "layers.22.mlp.gate_proj",
173
+ "layers.10.self_attn.v_proj",
174
+ "layers.27.mlp.up_proj",
175
+ "layers.11.self_attn.v_proj",
176
+ "layers.2.self_attn.v_proj",
177
+ "layers.2.mlp.gate_proj",
178
+ "layers.1.self_attn.k_proj",
179
+ "layers.3.self_attn.v_proj",
180
+ "layers.21.mlp.gate_proj",
181
+ "layers.7.mlp.up_proj",
182
+ "layers.22.mlp.up_proj",
183
  "layers.7.self_attn.q_proj",
184
+ "layers.15.self_attn.v_proj",
185
+ "layers.27.self_attn.q_proj",
186
+ "layers.7.mlp.down_proj",
187
+ "layers.0.self_attn.k_proj",
188
+ "layers.20.mlp.down_proj",
189
+ "layers.28.mlp.up_proj",
190
+ "layers.30.self_attn.v_proj",
 
 
 
191
  "layers.2.mlp.down_proj",
192
+ "layers.6.self_attn.o_proj",
193
+ "layers.15.self_attn.k_proj",
194
+ "layers.18.self_attn.o_proj",
195
+ "layers.8.mlp.up_proj",
196
+ "layers.4.mlp.down_proj",
197
+ "layers.14.mlp.gate_proj",
198
  "layers.17.mlp.gate_proj",
199
+ "layers.14.self_attn.q_proj",
200
+ "layers.15.self_attn.o_proj",
201
+ "layers.26.self_attn.k_proj",
202
+ "layers.24.mlp.down_proj",
203
+ "layers.22.self_attn.v_proj",
204
+ "layers.18.self_attn.q_proj",
205
+ "layers.9.mlp.up_proj",
206
+ "layers.25.self_attn.o_proj",
207
+ "layers.9.self_attn.v_proj",
208
+ "layers.13.self_attn.k_proj",
209
+ "layers.20.self_attn.k_proj",
210
  "layers.24.self_attn.v_proj",
211
+ "layers.24.self_attn.q_proj",
212
+ "layers.26.self_attn.v_proj",
213
+ "layers.2.self_attn.o_proj",
214
+ "layers.19.self_attn.o_proj",
215
+ "layers.30.mlp.gate_proj",
216
+ "layers.26.mlp.up_proj",
217
+ "layers.30.mlp.up_proj",
218
+ "layers.5.mlp.down_proj",
219
+ "layers.10.mlp.gate_proj",
220
+ "layers.12.self_attn.k_proj",
221
+ "layers.13.self_attn.q_proj",
222
  "layers.15.mlp.gate_proj",
223
+ "layers.14.self_attn.o_proj",
224
+ "layers.25.mlp.gate_proj",
225
+ "layers.8.mlp.gate_proj",
226
+ "embed_tokens",
227
+ "layers.23.mlp.gate_proj",
228
+ "layers.2.mlp.up_proj",
229
+ "layers.27.mlp.down_proj",
230
+ "layers.10.mlp.up_proj",
231
+ "layers.3.self_attn.k_proj",
232
+ "layers.14.mlp.down_proj",
233
+ "layers.5.self_attn.k_proj",
234
+ "layers.21.mlp.up_proj",
235
+ "layers.22.mlp.down_proj",
236
+ "layers.27.self_attn.k_proj",
237
+ "layers.16.self_attn.q_proj",
238
+ "layers.5.self_attn.v_proj",
239
+ "layers.28.self_attn.q_proj",
240
+ "layers.21.mlp.down_proj",
241
+ "layers.10.self_attn.o_proj",
242
+ "layers.19.mlp.gate_proj",
243
+ "layers.7.self_attn.k_proj",
244
+ "layers.18.self_attn.v_proj",
245
+ "layers.1.mlp.up_proj",
246
+ "layers.29.self_attn.q_proj"
247
  ],
248
  "task_type": "CAUSAL_LM",
249
  "use_rslora": false
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d716d04cda12d4d345b4c8b1c4e801a7682742bc807c90e463aa91a4f2925fcc
3
- size 421083096
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3f5ea8aceec5bb2e876d37cf17972fc88dc57ebab0b1291b47a465a3a0e5480f
3
+ size 1039036344
runs/Feb02_14-21-05_eb570fa975b5/events.out.tfevents.1706883669.eb570fa975b5.1057.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1fbab0603f15c8061d6d1fc77d71d56120c8cc4001764545b475da76cc1d2923
3
+ size 181067
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1268c71a97ae93de424ebbbb07d10c6d8a2ff3ac001b287b75b1a9aed450f25f
3
  size 4728
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f519084678570cd5032dc5aa943f6f8ae827cf55a2e6c4d12e27323933e12fd8
3
  size 4728