satyanshu404 commited on
Commit
8225517
1 Parent(s): 530e121

End of training

Browse files
README.md ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: mit
3
+ base_model: facebook/bart-large-cnn
4
+ tags:
5
+ - generated_from_trainer
6
+ model-index:
7
+ - name: bart-large-cnn-finetuned-promt_generation
8
+ results: []
9
+ ---
10
+
11
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
12
+ should probably proofread and complete it, then remove this comment. -->
13
+
14
+ # bart-large-cnn-finetuned-promt_generation
15
+
16
+ This model is a fine-tuned version of [facebook/bart-large-cnn](https://huggingface.co/facebook/bart-large-cnn) on the None dataset.
17
+ It achieves the following results on the evaluation set:
18
+ - Loss: 1.8767
19
+ - Map: 0.3718
20
+ - Ndcg@10: 0.5915
21
+
22
+ ## Model description
23
+
24
+ More information needed
25
+
26
+ ## Intended uses & limitations
27
+
28
+ More information needed
29
+
30
+ ## Training and evaluation data
31
+
32
+ More information needed
33
+
34
+ ## Training procedure
35
+
36
+ ### Training hyperparameters
37
+
38
+ The following hyperparameters were used during training:
39
+ - learning_rate: 3e-07
40
+ - train_batch_size: 8
41
+ - eval_batch_size: 8
42
+ - seed: 42
43
+ - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
44
+ - lr_scheduler_type: linear
45
+ - num_epochs: 100
46
+
47
+ ### Training results
48
+
49
+ | Training Loss | Epoch | Step | Validation Loss | Map | Ndcg@10 |
50
+ |:-------------:|:-----:|:----:|:---------------:|:------:|:-------:|
51
+ | No log | 1.0 | 4 | 3.3856 | 0.2563 | 0.4531 |
52
+ | No log | 2.0 | 8 | 3.3740 | 0.2563 | 0.4531 |
53
+ | No log | 3.0 | 12 | 3.3430 | 0.2563 | 0.4531 |
54
+ | No log | 4.0 | 16 | 3.2912 | 0.2563 | 0.4531 |
55
+ | No log | 5.0 | 20 | 3.2468 | 0.2563 | 0.4531 |
56
+ | No log | 6.0 | 24 | 3.2199 | 0.2563 | 0.4531 |
57
+ | No log | 7.0 | 28 | 3.2016 | 0.2563 | 0.4531 |
58
+ | No log | 8.0 | 32 | 3.0741 | 0.2563 | 0.4531 |
59
+ | No log | 9.0 | 36 | 3.0260 | 0.2563 | 0.4531 |
60
+ | No log | 10.0 | 40 | 2.9989 | 0.2563 | 0.4531 |
61
+ | No log | 11.0 | 44 | 2.9755 | 0.2563 | 0.4531 |
62
+ | No log | 12.0 | 48 | 2.9495 | 0.2560 | 0.4528 |
63
+ | No log | 13.0 | 52 | 2.9300 | 0.2560 | 0.4528 |
64
+ | No log | 14.0 | 56 | 2.9088 | 0.2560 | 0.4528 |
65
+ | No log | 15.0 | 60 | 2.8656 | 0.2560 | 0.4528 |
66
+ | No log | 16.0 | 64 | 2.8146 | 0.2560 | 0.4528 |
67
+ | No log | 17.0 | 68 | 2.7699 | 0.2560 | 0.4528 |
68
+ | No log | 18.0 | 72 | 2.7321 | 0.2577 | 0.4542 |
69
+ | No log | 19.0 | 76 | 2.6978 | 0.2577 | 0.4542 |
70
+ | No log | 20.0 | 80 | 2.6665 | 0.2577 | 0.4542 |
71
+ | No log | 21.0 | 84 | 2.6373 | 0.2577 | 0.4542 |
72
+ | No log | 22.0 | 88 | 2.6080 | 0.2897 | 0.4974 |
73
+ | No log | 23.0 | 92 | 2.5812 | 0.2897 | 0.4974 |
74
+ | No log | 24.0 | 96 | 2.5568 | 0.2954 | 0.5014 |
75
+ | No log | 25.0 | 100 | 2.5348 | 0.2954 | 0.5014 |
76
+ | No log | 26.0 | 104 | 2.5133 | 0.2954 | 0.5014 |
77
+ | No log | 27.0 | 108 | 2.4929 | 0.2954 | 0.5014 |
78
+ | No log | 28.0 | 112 | 2.4735 | 0.3385 | 0.5472 |
79
+ | No log | 29.0 | 116 | 2.4553 | 0.3385 | 0.5472 |
80
+ | No log | 30.0 | 120 | 2.4374 | 0.3385 | 0.5472 |
81
+ | No log | 31.0 | 124 | 2.4201 | 0.3385 | 0.5472 |
82
+ | No log | 32.0 | 128 | 2.4035 | 0.3385 | 0.5472 |
83
+ | No log | 33.0 | 132 | 2.3870 | 0.3385 | 0.5472 |
84
+ | No log | 34.0 | 136 | 2.3711 | 0.3385 | 0.5472 |
85
+ | No log | 35.0 | 140 | 2.3556 | 0.3385 | 0.5472 |
86
+ | No log | 36.0 | 144 | 2.3397 | 0.3385 | 0.5472 |
87
+ | No log | 37.0 | 148 | 2.3246 | 0.3385 | 0.5472 |
88
+ | No log | 38.0 | 152 | 2.3097 | 0.3385 | 0.5472 |
89
+ | No log | 39.0 | 156 | 2.2944 | 0.3718 | 0.5915 |
90
+ | No log | 40.0 | 160 | 2.2801 | 0.3718 | 0.5915 |
91
+ | No log | 41.0 | 164 | 2.2660 | 0.3718 | 0.5915 |
92
+ | No log | 42.0 | 168 | 2.2525 | 0.3718 | 0.5915 |
93
+ | No log | 43.0 | 172 | 2.2392 | 0.3718 | 0.5915 |
94
+ | No log | 44.0 | 176 | 2.2267 | 0.3718 | 0.5915 |
95
+ | No log | 45.0 | 180 | 2.2135 | 0.3718 | 0.5915 |
96
+ | No log | 46.0 | 184 | 2.2007 | 0.3718 | 0.5915 |
97
+ | No log | 47.0 | 188 | 2.1875 | 0.3718 | 0.5915 |
98
+ | No log | 48.0 | 192 | 2.1752 | 0.3718 | 0.5915 |
99
+ | No log | 49.0 | 196 | 2.1637 | 0.3718 | 0.5915 |
100
+ | No log | 50.0 | 200 | 2.1514 | 0.3718 | 0.5915 |
101
+ | No log | 51.0 | 204 | 2.1393 | 0.3718 | 0.5915 |
102
+ | No log | 52.0 | 208 | 2.1281 | 0.3718 | 0.5915 |
103
+ | No log | 53.0 | 212 | 2.1159 | 0.3718 | 0.5915 |
104
+ | No log | 54.0 | 216 | 2.1048 | 0.3718 | 0.5915 |
105
+ | No log | 55.0 | 220 | 2.0941 | 0.3718 | 0.5915 |
106
+ | No log | 56.0 | 224 | 2.0829 | 0.3718 | 0.5915 |
107
+ | No log | 57.0 | 228 | 2.0727 | 0.3718 | 0.5915 |
108
+ | No log | 58.0 | 232 | 2.0617 | 0.3718 | 0.5915 |
109
+ | No log | 59.0 | 236 | 2.0518 | 0.3718 | 0.5915 |
110
+ | No log | 60.0 | 240 | 2.0416 | 0.3718 | 0.5915 |
111
+ | No log | 61.0 | 244 | 2.0323 | 0.3718 | 0.5915 |
112
+ | No log | 62.0 | 248 | 2.0230 | 0.3718 | 0.5915 |
113
+ | No log | 63.0 | 252 | 2.0143 | 0.3718 | 0.5915 |
114
+ | No log | 64.0 | 256 | 2.0060 | 0.3718 | 0.5915 |
115
+ | No log | 65.0 | 260 | 1.9977 | 0.3718 | 0.5915 |
116
+ | No log | 66.0 | 264 | 1.9901 | 0.3718 | 0.5915 |
117
+ | No log | 67.0 | 268 | 1.9827 | 0.3718 | 0.5915 |
118
+ | No log | 68.0 | 272 | 1.9757 | 0.3718 | 0.5915 |
119
+ | No log | 69.0 | 276 | 1.9690 | 0.3718 | 0.5915 |
120
+ | No log | 70.0 | 280 | 1.9622 | 0.3718 | 0.5915 |
121
+ | No log | 71.0 | 284 | 1.9561 | 0.3718 | 0.5915 |
122
+ | No log | 72.0 | 288 | 1.9505 | 0.3718 | 0.5915 |
123
+ | No log | 73.0 | 292 | 1.9447 | 0.3718 | 0.5915 |
124
+ | No log | 74.0 | 296 | 1.9401 | 0.3718 | 0.5915 |
125
+ | No log | 75.0 | 300 | 1.9349 | 0.3863 | 0.5987 |
126
+ | No log | 76.0 | 304 | 1.9303 | 0.3863 | 0.5987 |
127
+ | No log | 77.0 | 308 | 1.9254 | 0.3863 | 0.5987 |
128
+ | No log | 78.0 | 312 | 1.9209 | 0.3863 | 0.5987 |
129
+ | No log | 79.0 | 316 | 1.9171 | 0.3863 | 0.5987 |
130
+ | No log | 80.0 | 320 | 1.9133 | 0.3863 | 0.5987 |
131
+ | No log | 81.0 | 324 | 1.9098 | 0.3863 | 0.5987 |
132
+ | No log | 82.0 | 328 | 1.9067 | 0.3718 | 0.5915 |
133
+ | No log | 83.0 | 332 | 1.9034 | 0.3718 | 0.5915 |
134
+ | No log | 84.0 | 336 | 1.8999 | 0.3718 | 0.5915 |
135
+ | No log | 85.0 | 340 | 1.8975 | 0.3718 | 0.5915 |
136
+ | No log | 86.0 | 344 | 1.8949 | 0.3718 | 0.5915 |
137
+ | No log | 87.0 | 348 | 1.8928 | 0.3718 | 0.5915 |
138
+ | No log | 88.0 | 352 | 1.8902 | 0.3718 | 0.5915 |
139
+ | No log | 89.0 | 356 | 1.8880 | 0.3718 | 0.5915 |
140
+ | No log | 90.0 | 360 | 1.8859 | 0.3718 | 0.5915 |
141
+ | No log | 91.0 | 364 | 1.8845 | 0.3718 | 0.5915 |
142
+ | No log | 92.0 | 368 | 1.8829 | 0.3718 | 0.5915 |
143
+ | No log | 93.0 | 372 | 1.8819 | 0.3718 | 0.5915 |
144
+ | No log | 94.0 | 376 | 1.8803 | 0.3718 | 0.5915 |
145
+ | No log | 95.0 | 380 | 1.8801 | 0.3718 | 0.5915 |
146
+ | No log | 96.0 | 384 | 1.8782 | 0.3718 | 0.5915 |
147
+ | No log | 97.0 | 388 | 1.8782 | 0.3718 | 0.5915 |
148
+ | No log | 98.0 | 392 | 1.8773 | 0.3718 | 0.5915 |
149
+ | No log | 99.0 | 396 | 1.8773 | 0.3718 | 0.5915 |
150
+ | No log | 100.0 | 400 | 1.8767 | 0.3718 | 0.5915 |
151
+
152
+
153
+ ### Framework versions
154
+
155
+ - Transformers 4.34.0
156
+ - Pytorch 2.0.1+cu118
157
+ - Datasets 2.14.5
158
+ - Tokenizers 0.14.1
added_tokens.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "</s>": 2,
3
+ "<mask>": 50264,
4
+ "<pad>": 1,
5
+ "<s>": 0,
6
+ "<unk>": 3
7
+ }
config.json ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "facebook/bart-large-cnn",
3
+ "_num_labels": 3,
4
+ "activation_dropout": 0.0,
5
+ "activation_function": "gelu",
6
+ "add_final_layer_norm": false,
7
+ "architectures": [
8
+ "BartForConditionalGeneration"
9
+ ],
10
+ "attention_dropout": 0.0,
11
+ "bos_token_id": 0,
12
+ "classif_dropout": 0.0,
13
+ "classifier_dropout": 0.0,
14
+ "d_model": 1024,
15
+ "decoder_attention_heads": 16,
16
+ "decoder_ffn_dim": 4096,
17
+ "decoder_layerdrop": 0.0,
18
+ "decoder_layers": 12,
19
+ "decoder_start_token_id": 2,
20
+ "dropout": 0.1,
21
+ "early_stopping": true,
22
+ "encoder_attention_heads": 16,
23
+ "encoder_ffn_dim": 4096,
24
+ "encoder_layerdrop": 0.0,
25
+ "encoder_layers": 12,
26
+ "eos_token_id": 2,
27
+ "force_bos_token_to_be_generated": true,
28
+ "forced_bos_token_id": 0,
29
+ "forced_eos_token_id": 2,
30
+ "gradient_checkpointing": false,
31
+ "id2label": {
32
+ "0": "LABEL_0",
33
+ "1": "LABEL_1",
34
+ "2": "LABEL_2"
35
+ },
36
+ "init_std": 0.02,
37
+ "is_encoder_decoder": true,
38
+ "label2id": {
39
+ "LABEL_0": 0,
40
+ "LABEL_1": 1,
41
+ "LABEL_2": 2
42
+ },
43
+ "length_penalty": 2.0,
44
+ "max_length": 142,
45
+ "max_position_embeddings": 1024,
46
+ "min_length": 56,
47
+ "model_type": "bart",
48
+ "no_repeat_ngram_size": 3,
49
+ "normalize_before": false,
50
+ "num_beams": 4,
51
+ "num_hidden_layers": 12,
52
+ "output_past": true,
53
+ "pad_token_id": 1,
54
+ "prefix": " ",
55
+ "scale_embedding": false,
56
+ "task_specific_params": {
57
+ "summarization": {
58
+ "early_stopping": true,
59
+ "length_penalty": 2.0,
60
+ "max_length": 142,
61
+ "min_length": 56,
62
+ "no_repeat_ngram_size": 3,
63
+ "num_beams": 4
64
+ }
65
+ },
66
+ "torch_dtype": "float32",
67
+ "transformers_version": "4.34.0",
68
+ "use_cache": true,
69
+ "vocab_size": 50264
70
+ }
generation_config.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 0,
3
+ "decoder_start_token_id": 2,
4
+ "early_stopping": true,
5
+ "eos_token_id": 2,
6
+ "forced_bos_token_id": 0,
7
+ "forced_eos_token_id": 2,
8
+ "length_penalty": 2.0,
9
+ "max_length": 142,
10
+ "min_length": 56,
11
+ "no_repeat_ngram_size": 3,
12
+ "num_beams": 4,
13
+ "pad_token_id": 1,
14
+ "transformers_version": "4.34.0"
15
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:523f62b15a0c65e02016a6751a877be831858967223f58cd83a188f39a7259ff
3
+ size 1625537293
special_tokens_map.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<s>",
3
+ "cls_token": "<s>",
4
+ "eos_token": "</s>",
5
+ "mask_token": "<mask>",
6
+ "pad_token": "<pad>",
7
+ "sep_token": "</s>",
8
+ "unk_token": "<unk>"
9
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "0": {
5
+ "content": "<s>",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "1": {
13
+ "content": "<pad>",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ },
20
+ "2": {
21
+ "content": "</s>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false,
26
+ "special": true
27
+ },
28
+ "3": {
29
+ "content": "<unk>",
30
+ "lstrip": false,
31
+ "normalized": false,
32
+ "rstrip": false,
33
+ "single_word": false,
34
+ "special": true
35
+ },
36
+ "50264": {
37
+ "content": "<mask>",
38
+ "lstrip": true,
39
+ "normalized": false,
40
+ "rstrip": false,
41
+ "single_word": false,
42
+ "special": true
43
+ }
44
+ },
45
+ "additional_special_tokens": [],
46
+ "bos_token": "<s>",
47
+ "clean_up_tokenization_spaces": true,
48
+ "cls_token": "<s>",
49
+ "eos_token": "</s>",
50
+ "errors": "replace",
51
+ "mask_token": "<mask>",
52
+ "model_max_length": 1024,
53
+ "pad_token": "<pad>",
54
+ "sep_token": "</s>",
55
+ "tokenizer_class": "BartTokenizer",
56
+ "trim_offsets": true,
57
+ "unk_token": "<unk>"
58
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:194c595abf5384a38edde1faa4153454f7fbd49f18444ffd0d315500d7495345
3
+ size 4283
vocab.json ADDED
The diff for this file is too large to render. See raw diff