M-Mahdi-Setak commited on
Commit
238af62
1 Parent(s): 580e4cf

second model

Browse files
README.md CHANGED
@@ -7,18 +7,18 @@ tags:
7
  - generated_from_trainer
8
  base_model: stabilityai/stablelm-3b-4e1t
9
  model-index:
10
- - name: mehdi-finetuned
11
  results: []
12
  ---
13
 
14
  <!-- This model card has been generated automatically according to the information the Trainer had access to. You
15
  should probably proofread and complete it, then remove this comment. -->
16
 
17
- # mehdi-finetuned
18
 
19
  This model is a fine-tuned version of [stabilityai/stablelm-3b-4e1t](https://huggingface.co/stabilityai/stablelm-3b-4e1t) on an unknown dataset.
20
  It achieves the following results on the evaluation set:
21
- - Loss: 3.8367
22
 
23
  ## Model description
24
 
@@ -49,7 +49,11 @@ The following hyperparameters were used during training:
49
 
50
  | Training Loss | Epoch | Step | Validation Loss |
51
  |:-------------:|:-----:|:----:|:---------------:|
52
- | 4.1078 | 1.0 | 553 | 3.8367 |
 
 
 
 
53
 
54
 
55
  ### Framework versions
 
7
  - generated_from_trainer
8
  base_model: stabilityai/stablelm-3b-4e1t
9
  model-index:
10
+ - name: persian-stablelm
11
  results: []
12
  ---
13
 
14
  <!-- This model card has been generated automatically according to the information the Trainer had access to. You
15
  should probably proofread and complete it, then remove this comment. -->
16
 
17
+ # persian-stablelm
18
 
19
  This model is a fine-tuned version of [stabilityai/stablelm-3b-4e1t](https://huggingface.co/stabilityai/stablelm-3b-4e1t) on an unknown dataset.
20
  It achieves the following results on the evaluation set:
21
+ - Loss: 4.2700
22
 
23
  ## Model description
24
 
 
49
 
50
  | Training Loss | Epoch | Step | Validation Loss |
51
  |:-------------:|:-----:|:----:|:---------------:|
52
+ | 4.7338 | 0.18 | 100 | 4.5703 |
53
+ | 4.6391 | 0.36 | 200 | 4.4094 |
54
+ | 4.8127 | 0.54 | 300 | 4.3277 |
55
+ | 4.5393 | 0.72 | 400 | 4.2797 |
56
+ | 4.29 | 0.9 | 500 | 4.2700 |
57
 
58
 
59
  ### Framework versions
adapter_config.json CHANGED
@@ -15,15 +15,235 @@
15
  "megatron_core": "megatron.core",
16
  "modules_to_save": null,
17
  "peft_type": "LORA",
18
- "r": 32,
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
22
  "embed_tokens",
23
- "o_proj",
24
- "k_proj",
25
- "q_proj",
26
- "v_proj"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
27
  ],
28
  "task_type": "CAUSAL_LM",
29
  "use_rslora": false
 
15
  "megatron_core": "megatron.core",
16
  "modules_to_save": null,
17
  "peft_type": "LORA",
18
+ "r": 64,
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
+ "layers.13.self_attn.o_proj",
23
+ "layers.27.mlp.up_proj",
24
+ "layers.31.self_attn.q_proj",
25
+ "layers.24.self_attn.q_proj",
26
+ "layers.10.self_attn.v_proj",
27
+ "layers.24.mlp.down_proj",
28
+ "layers.22.mlp.down_proj",
29
+ "layers.4.self_attn.q_proj",
30
+ "layers.25.mlp.down_proj",
31
+ "layers.1.mlp.up_proj",
32
+ "layers.10.mlp.gate_proj",
33
+ "layers.0.self_attn.v_proj",
34
+ "layers.18.mlp.gate_proj",
35
+ "layers.22.mlp.gate_proj",
36
+ "layers.3.self_attn.q_proj",
37
+ "layers.11.mlp.gate_proj",
38
+ "layers.21.self_attn.q_proj",
39
+ "layers.8.mlp.gate_proj",
40
+ "layers.14.self_attn.q_proj",
41
+ "layers.28.mlp.gate_proj",
42
+ "layers.1.self_attn.v_proj",
43
+ "layers.15.mlp.up_proj",
44
+ "layers.25.self_attn.v_proj",
45
+ "layers.23.self_attn.o_proj",
46
+ "layers.19.mlp.gate_proj",
47
+ "layers.1.self_attn.k_proj",
48
+ "layers.1.mlp.down_proj",
49
+ "layers.13.mlp.down_proj",
50
+ "layers.23.mlp.up_proj",
51
+ "layers.6.self_attn.o_proj",
52
+ "layers.27.self_attn.v_proj",
53
+ "layers.13.self_attn.k_proj",
54
+ "layers.11.mlp.up_proj",
55
+ "layers.5.mlp.up_proj",
56
+ "layers.29.mlp.down_proj",
57
+ "layers.30.self_attn.v_proj",
58
+ "layers.9.self_attn.o_proj",
59
+ "layers.15.self_attn.q_proj",
60
+ "layers.25.self_attn.k_proj",
61
+ "layers.17.self_attn.v_proj",
62
+ "layers.23.self_attn.v_proj",
63
+ "layers.12.self_attn.k_proj",
64
+ "layers.31.self_attn.o_proj",
65
+ "layers.21.mlp.gate_proj",
66
+ "layers.2.mlp.up_proj",
67
+ "layers.9.mlp.up_proj",
68
+ "layers.10.self_attn.o_proj",
69
+ "layers.0.mlp.down_proj",
70
+ "layers.3.mlp.gate_proj",
71
+ "layers.5.mlp.down_proj",
72
+ "layers.9.mlp.down_proj",
73
+ "layers.12.mlp.up_proj",
74
+ "layers.0.self_attn.k_proj",
75
+ "layers.25.self_attn.o_proj",
76
+ "layers.24.mlp.up_proj",
77
+ "layers.26.mlp.up_proj",
78
+ "layers.20.self_attn.v_proj",
79
+ "layers.31.self_attn.v_proj",
80
+ "layers.0.self_attn.q_proj",
81
+ "layers.30.mlp.gate_proj",
82
+ "layers.14.self_attn.o_proj",
83
+ "layers.8.self_attn.v_proj",
84
+ "layers.29.mlp.gate_proj",
85
+ "layers.6.mlp.down_proj",
86
+ "layers.16.mlp.down_proj",
87
+ "layers.22.self_attn.q_proj",
88
+ "layers.28.self_attn.v_proj",
89
+ "layers.31.mlp.gate_proj",
90
+ "layers.4.self_attn.k_proj",
91
+ "layers.12.self_attn.o_proj",
92
+ "layers.10.mlp.up_proj",
93
+ "layers.19.self_attn.q_proj",
94
+ "layers.8.self_attn.o_proj",
95
+ "layers.16.self_attn.o_proj",
96
+ "layers.3.mlp.down_proj",
97
+ "layers.26.self_attn.k_proj",
98
+ "layers.7.mlp.down_proj",
99
+ "layers.10.self_attn.k_proj",
100
+ "layers.23.self_attn.k_proj",
101
+ "layers.21.self_attn.o_proj",
102
+ "layers.18.self_attn.k_proj",
103
+ "layers.27.mlp.down_proj",
104
+ "layers.17.self_attn.o_proj",
105
+ "layers.21.self_attn.k_proj",
106
+ "layers.30.mlp.down_proj",
107
+ "layers.3.self_attn.k_proj",
108
+ "layers.25.mlp.gate_proj",
109
+ "layers.16.mlp.gate_proj",
110
+ "layers.31.mlp.down_proj",
111
+ "layers.22.self_attn.o_proj",
112
+ "layers.6.mlp.gate_proj",
113
+ "layers.11.mlp.down_proj",
114
+ "layers.23.self_attn.q_proj",
115
+ "layers.7.mlp.up_proj",
116
+ "layers.22.self_attn.k_proj",
117
+ "layers.12.self_attn.q_proj",
118
+ "layers.19.self_attn.v_proj",
119
+ "layers.22.self_attn.v_proj",
120
+ "layers.24.mlp.gate_proj",
121
+ "layers.2.self_attn.q_proj",
122
+ "layers.2.mlp.gate_proj",
123
+ "layers.30.self_attn.q_proj",
124
+ "layers.14.self_attn.v_proj",
125
+ "layers.26.self_attn.v_proj",
126
+ "layers.28.self_attn.q_proj",
127
+ "layers.30.mlp.up_proj",
128
+ "layers.10.mlp.down_proj",
129
+ "layers.5.self_attn.q_proj",
130
+ "layers.21.self_attn.v_proj",
131
+ "layers.15.self_attn.k_proj",
132
+ "layers.1.self_attn.o_proj",
133
+ "layers.20.self_attn.k_proj",
134
+ "layers.4.self_attn.o_proj",
135
+ "layers.7.self_attn.k_proj",
136
+ "layers.14.self_attn.k_proj",
137
+ "layers.12.self_attn.v_proj",
138
+ "layers.16.self_attn.q_proj",
139
+ "layers.28.self_attn.o_proj",
140
+ "layers.29.mlp.up_proj",
141
+ "layers.18.self_attn.o_proj",
142
+ "layers.8.mlp.down_proj",
143
+ "layers.9.self_attn.q_proj",
144
+ "layers.25.mlp.up_proj",
145
+ "layers.14.mlp.gate_proj",
146
+ "layers.11.self_attn.v_proj",
147
+ "layers.18.mlp.up_proj",
148
+ "layers.15.self_attn.o_proj",
149
  "embed_tokens",
150
+ "layers.5.mlp.gate_proj",
151
+ "layers.29.self_attn.o_proj",
152
+ "layers.16.mlp.up_proj",
153
+ "layers.27.self_attn.k_proj",
154
+ "layers.26.self_attn.q_proj",
155
+ "layers.20.mlp.down_proj",
156
+ "layers.4.mlp.up_proj",
157
+ "layers.19.mlp.up_proj",
158
+ "layers.18.mlp.down_proj",
159
+ "layers.3.self_attn.o_proj",
160
+ "layers.8.mlp.up_proj",
161
+ "layers.8.self_attn.k_proj",
162
+ "layers.30.self_attn.k_proj",
163
+ "layers.15.mlp.down_proj",
164
+ "layers.10.self_attn.q_proj",
165
+ "layers.16.self_attn.v_proj",
166
+ "layers.12.mlp.gate_proj",
167
+ "layers.6.mlp.up_proj",
168
+ "layers.0.mlp.up_proj",
169
+ "layers.9.self_attn.k_proj",
170
+ "layers.31.self_attn.k_proj",
171
+ "layers.20.mlp.up_proj",
172
+ "layers.28.mlp.down_proj",
173
+ "layers.0.self_attn.o_proj",
174
+ "layers.4.mlp.gate_proj",
175
+ "layers.21.mlp.up_proj",
176
+ "layers.13.self_attn.q_proj",
177
+ "layers.5.self_attn.k_proj",
178
+ "layers.21.mlp.down_proj",
179
+ "layers.17.mlp.down_proj",
180
+ "layers.30.self_attn.o_proj",
181
+ "layers.19.mlp.down_proj",
182
+ "layers.1.mlp.gate_proj",
183
+ "layers.9.mlp.gate_proj",
184
+ "layers.13.mlp.gate_proj",
185
+ "layers.19.self_attn.k_proj",
186
+ "layers.3.self_attn.v_proj",
187
+ "layers.20.mlp.gate_proj",
188
+ "layers.7.self_attn.v_proj",
189
+ "layers.19.self_attn.o_proj",
190
+ "layers.22.mlp.up_proj",
191
+ "layers.17.self_attn.k_proj",
192
+ "layers.6.self_attn.q_proj",
193
+ "layers.24.self_attn.o_proj",
194
+ "layers.16.self_attn.k_proj",
195
+ "layers.13.mlp.up_proj",
196
+ "layers.8.self_attn.q_proj",
197
+ "layers.29.self_attn.k_proj",
198
+ "layers.28.self_attn.k_proj",
199
+ "layers.27.mlp.gate_proj",
200
+ "layers.2.self_attn.o_proj",
201
+ "layers.28.mlp.up_proj",
202
+ "layers.25.self_attn.q_proj",
203
+ "layers.23.mlp.gate_proj",
204
+ "layers.9.self_attn.v_proj",
205
+ "layers.7.mlp.gate_proj",
206
+ "layers.17.mlp.up_proj",
207
+ "layers.5.self_attn.v_proj",
208
+ "layers.7.self_attn.o_proj",
209
+ "layers.13.self_attn.v_proj",
210
+ "layers.27.self_attn.q_proj",
211
+ "layers.18.self_attn.q_proj",
212
+ "layers.2.self_attn.v_proj",
213
+ "layers.23.mlp.down_proj",
214
+ "layers.14.mlp.down_proj",
215
+ "layers.2.self_attn.k_proj",
216
+ "layers.4.self_attn.v_proj",
217
+ "layers.26.self_attn.o_proj",
218
+ "layers.12.mlp.down_proj",
219
+ "layers.7.self_attn.q_proj",
220
+ "layers.11.self_attn.k_proj",
221
+ "layers.27.self_attn.o_proj",
222
+ "layers.26.mlp.down_proj",
223
+ "layers.6.self_attn.v_proj",
224
+ "layers.4.mlp.down_proj",
225
+ "layers.11.self_attn.o_proj",
226
+ "layers.29.self_attn.q_proj",
227
+ "layers.24.self_attn.k_proj",
228
+ "layers.18.self_attn.v_proj",
229
+ "layers.20.self_attn.o_proj",
230
+ "layers.2.mlp.down_proj",
231
+ "layers.11.self_attn.q_proj",
232
+ "layers.0.mlp.gate_proj",
233
+ "layers.3.mlp.up_proj",
234
+ "layers.6.self_attn.k_proj",
235
+ "layers.14.mlp.up_proj",
236
+ "layers.17.mlp.gate_proj",
237
+ "layers.1.self_attn.q_proj",
238
+ "layers.24.self_attn.v_proj",
239
+ "layers.17.self_attn.q_proj",
240
+ "layers.20.self_attn.q_proj",
241
+ "layers.29.self_attn.v_proj",
242
+ "layers.5.self_attn.o_proj",
243
+ "layers.15.self_attn.v_proj",
244
+ "layers.15.mlp.gate_proj",
245
+ "layers.31.mlp.up_proj",
246
+ "layers.26.mlp.gate_proj"
247
  ],
248
  "task_type": "CAUSAL_LM",
249
  "use_rslora": false
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0b350c4c31579104682e6a2297ff2fb13fc7994b5c3fd9f1cdfe5a3627681fd0
3
- size 259870552
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d716d04cda12d4d345b4c8b1c4e801a7682742bc807c90e463aa91a4f2925fcc
3
+ size 421083096
runs/Feb02_11-13-39_e38b76366d3e/events.out.tfevents.1706872425.e38b76366d3e.1538.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4edae16f0bd9fef1f153c9afd9400f124dee410f98320aeeaf79389a5eaa3ffc
3
+ size 8022
runs/Feb02_11-20-57_e38b76366d3e/events.out.tfevents.1706872861.e38b76366d3e.4468.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:55b24b4e329b95d0f5731a65ef5a7bde5f1cb4626ffaab512dcd891e135ff12a
3
+ size 7559
runs/Feb02_11-25-49_e38b76366d3e/events.out.tfevents.1706873152.e38b76366d3e.5910.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c7d3338b51ceb4787e5e93c236ace8ca661085f32afdbfbe1366d119d905a831
3
+ size 92777
tokenizer.json CHANGED
@@ -2,7 +2,7 @@
2
  "version": "1.0",
3
  "truncation": {
4
  "direction": "Right",
5
- "max_length": 64,
6
  "strategy": "LongestFirst",
7
  "stride": 0
8
  },
 
2
  "version": "1.0",
3
  "truncation": {
4
  "direction": "Right",
5
+ "max_length": 48,
6
  "strategy": "LongestFirst",
7
  "stride": 0
8
  },
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:06d7101cdef0eb5091bbd1e3bef7a16df112808692c8f99a786cff94809a85c4
3
- size 4664
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1268c71a97ae93de424ebbbb07d10c6d8a2ff3ac001b287b75b1a9aed450f25f
3
+ size 4728