Training in progress, step 500
Browse files- config.json +1 -1
- model.safetensors +3 -0
- runs/Apr20_20-49-17_DESKTOP-I570M0U/events.out.tfevents.1713635889.DESKTOP-I570M0U.332462.0 +3 -0
- runs/Apr20_21-00-41_DESKTOP-I570M0U/events.out.tfevents.1713636057.DESKTOP-I570M0U.336977.0 +3 -0
- runs/Apr20_21-02-14_DESKTOP-I570M0U/events.out.tfevents.1713636162.DESKTOP-I570M0U.336977.1 +3 -0
- runs/Apr20_21-04-08_DESKTOP-I570M0U/events.out.tfevents.1713636267.DESKTOP-I570M0U.338846.0 +3 -0
- special_tokens_map.json +7 -1
- tokenizer.json +6 -5
- tokenizer_config.json +6 -7
- training_args.bin +2 -2
config.json
CHANGED
@@ -69,7 +69,7 @@
|
|
69 |
}
|
70 |
},
|
71 |
"torch_dtype": "float32",
|
72 |
-
"transformers_version": "4.
|
73 |
"use_cache": true,
|
74 |
"vocab_size": 50265
|
75 |
}
|
|
|
69 |
}
|
70 |
},
|
71 |
"torch_dtype": "float32",
|
72 |
+
"transformers_version": "4.40.0",
|
73 |
"use_cache": true,
|
74 |
"vocab_size": 50265
|
75 |
}
|
model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0a6f3933cc49dd9553eefea3a38352e83f30e65cc1a434d17d7a56db0ca747f2
|
3 |
+
size 557912620
|
runs/Apr20_20-49-17_DESKTOP-I570M0U/events.out.tfevents.1713635889.DESKTOP-I570M0U.332462.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:34d95840d58b72c7e858f7db116fa01376a6673dffdf8f84c08edd86c1bf8644
|
3 |
+
size 4772
|
runs/Apr20_21-00-41_DESKTOP-I570M0U/events.out.tfevents.1713636057.DESKTOP-I570M0U.336977.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:56c9049fef9152da54fb273e0189d366d069c4c46ea514e970e63befb7499cd7
|
3 |
+
size 5955
|
runs/Apr20_21-02-14_DESKTOP-I570M0U/events.out.tfevents.1713636162.DESKTOP-I570M0U.336977.1
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:36860b7c1173b7ccdbf8aeea4777cdf7decdbea19d9f87906be9982be877abc7
|
3 |
+
size 4184
|
runs/Apr20_21-04-08_DESKTOP-I570M0U/events.out.tfevents.1713636267.DESKTOP-I570M0U.338846.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b877e5347cb55ae62b2350a20a00427f53441655bab8ee8e088573033142bea4
|
3 |
+
size 6840
|
special_tokens_map.json
CHANGED
@@ -2,7 +2,13 @@
|
|
2 |
"bos_token": "<s>",
|
3 |
"cls_token": "<s>",
|
4 |
"eos_token": "</s>",
|
5 |
-
"mask_token":
|
|
|
|
|
|
|
|
|
|
|
|
|
6 |
"pad_token": "<pad>",
|
7 |
"sep_token": "</s>",
|
8 |
"unk_token": "<unk>"
|
|
|
2 |
"bos_token": "<s>",
|
3 |
"cls_token": "<s>",
|
4 |
"eos_token": "</s>",
|
5 |
+
"mask_token": {
|
6 |
+
"content": "<mask>",
|
7 |
+
"lstrip": true,
|
8 |
+
"normalized": true,
|
9 |
+
"rstrip": false,
|
10 |
+
"single_word": false
|
11 |
+
},
|
12 |
"pad_token": "<pad>",
|
13 |
"sep_token": "</s>",
|
14 |
"unk_token": "<unk>"
|
tokenizer.json
CHANGED
@@ -21,7 +21,7 @@
|
|
21 |
"single_word": false,
|
22 |
"lstrip": false,
|
23 |
"rstrip": false,
|
24 |
-
"normalized":
|
25 |
"special": true
|
26 |
},
|
27 |
{
|
@@ -30,7 +30,7 @@
|
|
30 |
"single_word": false,
|
31 |
"lstrip": false,
|
32 |
"rstrip": false,
|
33 |
-
"normalized":
|
34 |
"special": true
|
35 |
},
|
36 |
{
|
@@ -39,7 +39,7 @@
|
|
39 |
"single_word": false,
|
40 |
"lstrip": false,
|
41 |
"rstrip": false,
|
42 |
-
"normalized":
|
43 |
"special": true
|
44 |
},
|
45 |
{
|
@@ -48,7 +48,7 @@
|
|
48 |
"single_word": false,
|
49 |
"lstrip": false,
|
50 |
"rstrip": false,
|
51 |
-
"normalized":
|
52 |
"special": true
|
53 |
},
|
54 |
{
|
@@ -57,7 +57,7 @@
|
|
57 |
"single_word": false,
|
58 |
"lstrip": true,
|
59 |
"rstrip": false,
|
60 |
-
"normalized":
|
61 |
"special": true
|
62 |
}
|
63 |
],
|
@@ -95,6 +95,7 @@
|
|
95 |
"end_of_word_suffix": "",
|
96 |
"fuse_unk": false,
|
97 |
"byte_fallback": false,
|
|
|
98 |
"vocab": {
|
99 |
"<s>": 0,
|
100 |
"<pad>": 1,
|
|
|
21 |
"single_word": false,
|
22 |
"lstrip": false,
|
23 |
"rstrip": false,
|
24 |
+
"normalized": true,
|
25 |
"special": true
|
26 |
},
|
27 |
{
|
|
|
30 |
"single_word": false,
|
31 |
"lstrip": false,
|
32 |
"rstrip": false,
|
33 |
+
"normalized": true,
|
34 |
"special": true
|
35 |
},
|
36 |
{
|
|
|
39 |
"single_word": false,
|
40 |
"lstrip": false,
|
41 |
"rstrip": false,
|
42 |
+
"normalized": true,
|
43 |
"special": true
|
44 |
},
|
45 |
{
|
|
|
48 |
"single_word": false,
|
49 |
"lstrip": false,
|
50 |
"rstrip": false,
|
51 |
+
"normalized": true,
|
52 |
"special": true
|
53 |
},
|
54 |
{
|
|
|
57 |
"single_word": false,
|
58 |
"lstrip": true,
|
59 |
"rstrip": false,
|
60 |
+
"normalized": true,
|
61 |
"special": true
|
62 |
}
|
63 |
],
|
|
|
95 |
"end_of_word_suffix": "",
|
96 |
"fuse_unk": false,
|
97 |
"byte_fallback": false,
|
98 |
+
"ignore_merges": false,
|
99 |
"vocab": {
|
100 |
"<s>": 0,
|
101 |
"<pad>": 1,
|
tokenizer_config.json
CHANGED
@@ -4,7 +4,7 @@
|
|
4 |
"0": {
|
5 |
"content": "<s>",
|
6 |
"lstrip": false,
|
7 |
-
"normalized":
|
8 |
"rstrip": false,
|
9 |
"single_word": false,
|
10 |
"special": true
|
@@ -12,7 +12,7 @@
|
|
12 |
"1": {
|
13 |
"content": "<pad>",
|
14 |
"lstrip": false,
|
15 |
-
"normalized":
|
16 |
"rstrip": false,
|
17 |
"single_word": false,
|
18 |
"special": true
|
@@ -20,7 +20,7 @@
|
|
20 |
"2": {
|
21 |
"content": "</s>",
|
22 |
"lstrip": false,
|
23 |
-
"normalized":
|
24 |
"rstrip": false,
|
25 |
"single_word": false,
|
26 |
"special": true
|
@@ -28,7 +28,7 @@
|
|
28 |
"3": {
|
29 |
"content": "<unk>",
|
30 |
"lstrip": false,
|
31 |
-
"normalized":
|
32 |
"rstrip": false,
|
33 |
"single_word": false,
|
34 |
"special": true
|
@@ -36,20 +36,19 @@
|
|
36 |
"50264": {
|
37 |
"content": "<mask>",
|
38 |
"lstrip": true,
|
39 |
-
"normalized":
|
40 |
"rstrip": false,
|
41 |
"single_word": false,
|
42 |
"special": true
|
43 |
}
|
44 |
},
|
45 |
-
"additional_special_tokens": [],
|
46 |
"bos_token": "<s>",
|
47 |
"clean_up_tokenization_spaces": true,
|
48 |
"cls_token": "<s>",
|
49 |
"eos_token": "</s>",
|
50 |
"errors": "replace",
|
51 |
"mask_token": "<mask>",
|
52 |
-
"model_max_length":
|
53 |
"pad_token": "<pad>",
|
54 |
"sep_token": "</s>",
|
55 |
"tokenizer_class": "BartTokenizer",
|
|
|
4 |
"0": {
|
5 |
"content": "<s>",
|
6 |
"lstrip": false,
|
7 |
+
"normalized": true,
|
8 |
"rstrip": false,
|
9 |
"single_word": false,
|
10 |
"special": true
|
|
|
12 |
"1": {
|
13 |
"content": "<pad>",
|
14 |
"lstrip": false,
|
15 |
+
"normalized": true,
|
16 |
"rstrip": false,
|
17 |
"single_word": false,
|
18 |
"special": true
|
|
|
20 |
"2": {
|
21 |
"content": "</s>",
|
22 |
"lstrip": false,
|
23 |
+
"normalized": true,
|
24 |
"rstrip": false,
|
25 |
"single_word": false,
|
26 |
"special": true
|
|
|
28 |
"3": {
|
29 |
"content": "<unk>",
|
30 |
"lstrip": false,
|
31 |
+
"normalized": true,
|
32 |
"rstrip": false,
|
33 |
"single_word": false,
|
34 |
"special": true
|
|
|
36 |
"50264": {
|
37 |
"content": "<mask>",
|
38 |
"lstrip": true,
|
39 |
+
"normalized": true,
|
40 |
"rstrip": false,
|
41 |
"single_word": false,
|
42 |
"special": true
|
43 |
}
|
44 |
},
|
|
|
45 |
"bos_token": "<s>",
|
46 |
"clean_up_tokenization_spaces": true,
|
47 |
"cls_token": "<s>",
|
48 |
"eos_token": "</s>",
|
49 |
"errors": "replace",
|
50 |
"mask_token": "<mask>",
|
51 |
+
"model_max_length": 1000000000000000019884624838656,
|
52 |
"pad_token": "<pad>",
|
53 |
"sep_token": "</s>",
|
54 |
"tokenizer_class": "BartTokenizer",
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:05abc05e544046926700d5441080cb43e7d616d00dddf5211f89003982d674b3
|
3 |
+
size 5176
|