satyanshu404 commited on
Commit
5096f18
1 Parent(s): 377771d

End of training

Browse files
Files changed (3) hide show
  1. README.md +105 -104
  2. tokenizer.json +1 -6
  3. training_args.bin +1 -1
README.md CHANGED
@@ -16,8 +16,9 @@ should probably proofread and complete it, then remove this comment. -->
16
  This model is a fine-tuned version of [facebook/bart-large-cnn](https://huggingface.co/facebook/bart-large-cnn) on an unknown dataset.
17
  It achieves the following results on the evaluation set:
18
  - Loss: 2.8294
19
- - Map: 0.4211
20
- - Ndcg@10: 0.6088
 
21
 
22
  ## Model description
23
 
@@ -46,108 +47,108 @@ The following hyperparameters were used during training:
46
 
47
  ### Training results
48
 
49
- | Training Loss | Epoch | Step | Validation Loss | Map | Ndcg@10 |
50
- |:-------------:|:-----:|:----:|:---------------:|:------:|:-------:|
51
- | No log | 1.0 | 2 | 3.6607 | 0.3400 | 0.4882 |
52
- | No log | 2.0 | 4 | 3.6575 | 0.3 | 0.4282 |
53
- | No log | 3.0 | 6 | 3.6485 | 0.3183 | 0.5016 |
54
- | No log | 4.0 | 8 | 3.6279 | 0.3183 | 0.4899 |
55
- | No log | 5.0 | 10 | 3.6199 | 0.3183 | 0.4899 |
56
- | No log | 6.0 | 12 | 3.6119 | 0.3123 | 0.5016 |
57
- | No log | 7.0 | 14 | 3.6076 | 0.3323 | 0.5299 |
58
- | No log | 8.0 | 16 | 3.5413 | 0.3523 | 0.5733 |
59
- | No log | 9.0 | 18 | 3.5274 | 0.345 | 0.5333 |
60
- | No log | 10.0 | 20 | 3.5184 | 0.3200 | 0.4816 |
61
- | No log | 11.0 | 22 | 3.5041 | 0.3200 | 0.5016 |
62
- | No log | 12.0 | 24 | 3.4935 | 0.3133 | 0.4899 |
63
- | No log | 13.0 | 26 | 3.4858 | 0.31 | 0.4951 |
64
- | No log | 14.0 | 28 | 3.4763 | 0.31 | 0.5068 |
65
- | No log | 15.0 | 30 | 3.3761 | 0.34 | 0.5434 |
66
- | No log | 16.0 | 32 | 3.3314 | 0.345 | 0.5751 |
67
- | No log | 17.0 | 34 | 3.3103 | 0.3283 | 0.5468 |
68
- | No log | 18.0 | 36 | 3.2951 | 0.3233 | 0.5151 |
69
- | No log | 19.0 | 38 | 3.2811 | 0.3233 | 0.5034 |
70
- | No log | 20.0 | 40 | 3.2708 | 0.3167 | 0.4834 |
71
- | No log | 21.0 | 42 | 3.2625 | 0.3233 | 0.4834 |
72
- | No log | 22.0 | 44 | 3.2471 | 0.3133 | 0.4834 |
73
- | No log | 23.0 | 46 | 3.2308 | 0.3067 | 0.5034 |
74
- | No log | 24.0 | 48 | 3.2171 | 0.2867 | 0.4634 |
75
- | No log | 25.0 | 50 | 3.2068 | 0.2933 | 0.4751 |
76
- | No log | 26.0 | 52 | 3.1972 | 0.2890 | 0.4803 |
77
- | No log | 27.0 | 54 | 3.1892 | 0.2757 | 0.4252 |
78
- | No log | 28.0 | 56 | 3.1812 | 0.2823 | 0.4252 |
79
- | No log | 29.0 | 58 | 3.1681 | 0.309 | 0.4769 |
80
- | No log | 30.0 | 60 | 3.1422 | 0.3223 | 0.4969 |
81
- | No log | 31.0 | 62 | 3.1154 | 0.309 | 0.4769 |
82
- | No log | 32.0 | 64 | 3.0906 | 0.369 | 0.5539 |
83
- | No log | 33.0 | 66 | 3.0680 | 0.3850 | 0.5486 |
84
- | No log | 34.0 | 68 | 3.0476 | 0.3567 | 0.5139 |
85
- | No log | 35.0 | 70 | 3.0301 | 0.3347 | 0.4909 |
86
- | No log | 36.0 | 72 | 3.0159 | 0.2861 | 0.4581 |
87
- | No log | 37.0 | 74 | 3.0040 | 0.2887 | 0.4678 |
88
- | No log | 38.0 | 76 | 2.9937 | 0.3003 | 0.4374 |
89
- | No log | 39.0 | 78 | 2.9842 | 0.2723 | 0.3950 |
90
- | No log | 40.0 | 80 | 2.9759 | 0.3052 | 0.4695 |
91
- | No log | 41.0 | 82 | 2.9686 | 0.2867 | 0.4459 |
92
- | No log | 42.0 | 84 | 2.9622 | 0.3099 | 0.4764 |
93
- | No log | 43.0 | 86 | 2.9565 | 0.3141 | 0.5019 |
94
- | No log | 44.0 | 88 | 2.9512 | 0.325 | 0.5204 |
95
- | No log | 45.0 | 90 | 2.9462 | 0.3050 | 0.5004 |
96
- | No log | 46.0 | 92 | 2.9416 | 0.325 | 0.5151 |
97
- | No log | 47.0 | 94 | 2.9372 | 0.3183 | 0.4951 |
98
- | No log | 48.0 | 96 | 2.9325 | 0.318 | 0.5235 |
99
- | No log | 49.0 | 98 | 2.9278 | 0.318 | 0.5269 |
100
- | No log | 50.0 | 100 | 2.9228 | 0.3155 | 0.5380 |
101
- | No log | 51.0 | 102 | 2.9178 | 0.2795 | 0.4823 |
102
- | No log | 52.0 | 104 | 2.9127 | 0.3329 | 0.5655 |
103
- | No log | 53.0 | 106 | 2.9081 | 0.3127 | 0.5455 |
104
- | No log | 54.0 | 108 | 2.9037 | 0.3195 | 0.5642 |
105
- | No log | 55.0 | 110 | 2.8995 | 0.3145 | 0.5442 |
106
- | No log | 56.0 | 112 | 2.8957 | 0.3245 | 0.5759 |
107
- | No log | 57.0 | 114 | 2.8922 | 0.3798 | 0.6383 |
108
- | No log | 58.0 | 116 | 2.8886 | 0.3788 | 0.6405 |
109
- | No log | 59.0 | 118 | 2.8854 | 0.3920 | 0.6502 |
110
- | No log | 60.0 | 120 | 2.8822 | 0.3920 | 0.6376 |
111
- | No log | 61.0 | 122 | 2.8793 | 0.4255 | 0.6796 |
112
- | No log | 62.0 | 124 | 2.8766 | 0.4288 | 0.7089 |
113
- | No log | 63.0 | 126 | 2.8738 | 0.4340 | 0.7048 |
114
- | No log | 64.0 | 128 | 2.8712 | 0.4273 | 0.6889 |
115
- | No log | 65.0 | 130 | 2.8688 | 0.4173 | 0.7067 |
116
- | No log | 66.0 | 132 | 2.8665 | 0.4233 | 0.6802 |
117
- | No log | 67.0 | 134 | 2.8642 | 0.3973 | 0.6309 |
118
- | No log | 68.0 | 136 | 2.8620 | 0.4107 | 0.6574 |
119
- | No log | 69.0 | 138 | 2.8599 | 0.4173 | 0.6774 |
120
- | No log | 70.0 | 140 | 2.8580 | 0.3907 | 0.6109 |
121
- | No log | 71.0 | 142 | 2.8560 | 0.4407 | 0.6596 |
122
- | No log | 72.0 | 144 | 2.8542 | 0.4007 | 0.6196 |
123
- | No log | 73.0 | 146 | 2.8525 | 0.4207 | 0.6396 |
124
- | No log | 74.0 | 148 | 2.8508 | 0.4173 | 0.6596 |
125
- | No log | 75.0 | 150 | 2.8491 | 0.4107 | 0.6303 |
126
- | No log | 76.0 | 152 | 2.8476 | 0.3973 | 0.5986 |
127
- | No log | 77.0 | 154 | 2.8460 | 0.4040 | 0.6186 |
128
- | No log | 78.0 | 156 | 2.8447 | 0.414 | 0.6747 |
129
- | No log | 79.0 | 158 | 2.8433 | 0.4167 | 0.6673 |
130
- | No log | 80.0 | 160 | 2.8420 | 0.4457 | 0.6813 |
131
- | No log | 81.0 | 162 | 2.8409 | 0.4257 | 0.6512 |
132
- | No log | 82.0 | 164 | 2.8397 | 0.4607 | 0.7073 |
133
- | No log | 83.0 | 166 | 2.8387 | 0.4257 | 0.6048 |
134
- | No log | 84.0 | 168 | 2.8377 | 0.4207 | 0.6048 |
135
- | No log | 85.0 | 170 | 2.8366 | 0.369 | 0.5248 |
136
- | No log | 86.0 | 172 | 2.8357 | 0.4111 | 0.5971 |
137
- | No log | 87.0 | 174 | 2.8350 | 0.389 | 0.5448 |
138
- | No log | 88.0 | 176 | 2.8342 | 0.4028 | 0.5771 |
139
- | No log | 89.0 | 178 | 2.8334 | 0.374 | 0.5448 |
140
- | No log | 90.0 | 180 | 2.8328 | 0.374 | 0.5565 |
141
- | No log | 91.0 | 182 | 2.8321 | 0.4078 | 0.5971 |
142
- | No log | 92.0 | 184 | 2.8316 | 0.4011 | 0.5888 |
143
- | No log | 93.0 | 186 | 2.8311 | 0.374 | 0.5565 |
144
- | No log | 94.0 | 188 | 2.8308 | 0.3811 | 0.5688 |
145
- | No log | 95.0 | 190 | 2.8304 | 0.374 | 0.5565 |
146
- | No log | 96.0 | 192 | 2.8302 | 0.3911 | 0.5888 |
147
- | No log | 97.0 | 194 | 2.8300 | 0.3611 | 0.5488 |
148
- | No log | 98.0 | 196 | 2.8297 | 0.414 | 0.5848 |
149
- | No log | 99.0 | 198 | 2.8295 | 0.3878 | 0.5888 |
150
- | No log | 100.0 | 200 | 2.8294 | 0.4211 | 0.6088 |
151
 
152
 
153
  ### Framework versions
 
16
  This model is a fine-tuned version of [facebook/bart-large-cnn](https://huggingface.co/facebook/bart-large-cnn) on an unknown dataset.
17
  It achieves the following results on the evaluation set:
18
  - Loss: 2.8294
19
+ - Actual score: 0.8766
20
+ - Predction score: -0.6178
21
+ - Score difference: 1.4944
22
 
23
  ## Model description
24
 
 
47
 
48
  ### Training results
49
 
50
+ | Training Loss | Epoch | Step | Validation Loss | Actual score | Predction score | Score difference |
51
+ |:-------------:|:-----:|:----:|:---------------:|:------------:|:---------------:|:----------------:|
52
+ | No log | 1.0 | 2 | 3.6607 | 0.8766 | -0.3976 | 1.2742 |
53
+ | No log | 2.0 | 4 | 3.6575 | 0.8766 | -0.4128 | 1.2894 |
54
+ | No log | 3.0 | 6 | 3.6485 | 0.8766 | -0.3426 | 1.2192 |
55
+ | No log | 4.0 | 8 | 3.6279 | 0.8766 | -0.4158 | 1.2924 |
56
+ | No log | 5.0 | 10 | 3.6199 | 0.8766 | -0.4332 | 1.3099 |
57
+ | No log | 6.0 | 12 | 3.6119 | 0.8766 | -0.2640 | 1.1406 |
58
+ | No log | 7.0 | 14 | 3.6076 | 0.8766 | -0.3007 | 1.1773 |
59
+ | No log | 8.0 | 16 | 3.5413 | 0.8766 | -0.2210 | 1.0976 |
60
+ | No log | 9.0 | 18 | 3.5274 | 0.8766 | -0.2317 | 1.1083 |
61
+ | No log | 10.0 | 20 | 3.5184 | 0.8766 | -0.2801 | 1.1567 |
62
+ | No log | 11.0 | 22 | 3.5041 | 0.8766 | -0.2898 | 1.1664 |
63
+ | No log | 12.0 | 24 | 3.4935 | 0.8766 | -0.3675 | 1.2441 |
64
+ | No log | 13.0 | 26 | 3.4858 | 0.8766 | -0.3410 | 1.2176 |
65
+ | No log | 14.0 | 28 | 3.4763 | 0.8766 | -0.1891 | 1.0658 |
66
+ | No log | 15.0 | 30 | 3.3761 | 0.8766 | -0.3789 | 1.2556 |
67
+ | No log | 16.0 | 32 | 3.3314 | 0.8766 | -0.2348 | 1.1114 |
68
+ | No log | 17.0 | 34 | 3.3103 | 0.8766 | -0.2213 | 1.0979 |
69
+ | No log | 18.0 | 36 | 3.2951 | 0.8766 | -0.2949 | 1.1715 |
70
+ | No log | 19.0 | 38 | 3.2811 | 0.8766 | -0.3811 | 1.2577 |
71
+ | No log | 20.0 | 40 | 3.2708 | 0.8766 | -0.3883 | 1.2649 |
72
+ | No log | 21.0 | 42 | 3.2625 | 0.8766 | -0.4219 | 1.2986 |
73
+ | No log | 22.0 | 44 | 3.2471 | 0.8766 | -0.2971 | 1.1737 |
74
+ | No log | 23.0 | 46 | 3.2308 | 0.8766 | -0.1368 | 1.0134 |
75
+ | No log | 24.0 | 48 | 3.2171 | 0.8766 | -0.1705 | 1.0471 |
76
+ | No log | 25.0 | 50 | 3.2068 | 0.8766 | -0.2057 | 1.0823 |
77
+ | No log | 26.0 | 52 | 3.1972 | 0.8766 | -0.1984 | 1.0750 |
78
+ | No log | 27.0 | 54 | 3.1892 | 0.8766 | -0.4348 | 1.3114 |
79
+ | No log | 28.0 | 56 | 3.1812 | 0.8766 | -0.4045 | 1.2811 |
80
+ | No log | 29.0 | 58 | 3.1681 | 0.8766 | -0.3908 | 1.2675 |
81
+ | No log | 30.0 | 60 | 3.1422 | 0.8766 | -0.4513 | 1.3279 |
82
+ | No log | 31.0 | 62 | 3.1154 | 0.8766 | -0.4580 | 1.3346 |
83
+ | No log | 32.0 | 64 | 3.0906 | 0.8766 | -0.4082 | 1.2848 |
84
+ | No log | 33.0 | 66 | 3.0680 | 0.8766 | -0.4836 | 1.3602 |
85
+ | No log | 34.0 | 68 | 3.0476 | 0.8766 | -0.4555 | 1.3321 |
86
+ | No log | 35.0 | 70 | 3.0301 | 0.8766 | -0.5186 | 1.3952 |
87
+ | No log | 36.0 | 72 | 3.0159 | 0.8766 | -0.4299 | 1.3065 |
88
+ | No log | 37.0 | 74 | 3.0040 | 0.8766 | -0.4216 | 1.2982 |
89
+ | No log | 38.0 | 76 | 2.9937 | 0.8766 | -0.5763 | 1.4530 |
90
+ | No log | 39.0 | 78 | 2.9842 | 0.8766 | -0.6791 | 1.5557 |
91
+ | No log | 40.0 | 80 | 2.9759 | 0.8766 | -0.6260 | 1.5026 |
92
+ | No log | 41.0 | 82 | 2.9686 | 0.8766 | -0.6331 | 1.5097 |
93
+ | No log | 42.0 | 84 | 2.9622 | 0.8766 | -0.5588 | 1.4354 |
94
+ | No log | 43.0 | 86 | 2.9565 | 0.8766 | -0.5719 | 1.4485 |
95
+ | No log | 44.0 | 88 | 2.9512 | 0.8766 | -0.5433 | 1.4199 |
96
+ | No log | 45.0 | 90 | 2.9462 | 0.8766 | -0.5528 | 1.4294 |
97
+ | No log | 46.0 | 92 | 2.9416 | 0.8766 | -0.5487 | 1.4253 |
98
+ | No log | 47.0 | 94 | 2.9372 | 0.8766 | -0.5130 | 1.3896 |
99
+ | No log | 48.0 | 96 | 2.9325 | 0.8766 | -0.5495 | 1.4262 |
100
+ | No log | 49.0 | 98 | 2.9278 | 0.8766 | -0.5334 | 1.4101 |
101
+ | No log | 50.0 | 100 | 2.9228 | 0.8766 | -0.5954 | 1.4720 |
102
+ | No log | 51.0 | 102 | 2.9178 | 0.8766 | -0.5583 | 1.4349 |
103
+ | No log | 52.0 | 104 | 2.9127 | 0.8766 | -0.4640 | 1.3406 |
104
+ | No log | 53.0 | 106 | 2.9081 | 0.8766 | -0.4567 | 1.3333 |
105
+ | No log | 54.0 | 108 | 2.9037 | 0.8766 | -0.4877 | 1.3643 |
106
+ | No log | 55.0 | 110 | 2.8995 | 0.8766 | -0.4779 | 1.3546 |
107
+ | No log | 56.0 | 112 | 2.8957 | 0.8766 | -0.4815 | 1.3581 |
108
+ | No log | 57.0 | 114 | 2.8922 | 0.8766 | -0.4051 | 1.2817 |
109
+ | No log | 58.0 | 116 | 2.8886 | 0.8766 | -0.4100 | 1.2866 |
110
+ | No log | 59.0 | 118 | 2.8854 | 0.8766 | -0.4069 | 1.2835 |
111
+ | No log | 60.0 | 120 | 2.8822 | 0.8766 | -0.4390 | 1.3156 |
112
+ | No log | 61.0 | 122 | 2.8793 | 0.8766 | -0.4077 | 1.2844 |
113
+ | No log | 62.0 | 124 | 2.8766 | 0.8766 | -0.4278 | 1.3045 |
114
+ | No log | 63.0 | 126 | 2.8738 | 0.8766 | -0.4430 | 1.3196 |
115
+ | No log | 64.0 | 128 | 2.8712 | 0.8766 | -0.4711 | 1.3477 |
116
+ | No log | 65.0 | 130 | 2.8688 | 0.8766 | -0.4294 | 1.3061 |
117
+ | No log | 66.0 | 132 | 2.8665 | 0.8766 | -0.4669 | 1.3435 |
118
+ | No log | 67.0 | 134 | 2.8642 | 0.8766 | -0.4831 | 1.3597 |
119
+ | No log | 68.0 | 136 | 2.8620 | 0.8766 | -0.5078 | 1.3844 |
120
+ | No log | 69.0 | 138 | 2.8599 | 0.8766 | -0.4924 | 1.3691 |
121
+ | No log | 70.0 | 140 | 2.8580 | 0.8766 | -0.5569 | 1.4336 |
122
+ | No log | 71.0 | 142 | 2.8560 | 0.8766 | -0.6560 | 1.5327 |
123
+ | No log | 72.0 | 144 | 2.8542 | 0.8766 | -0.6354 | 1.5120 |
124
+ | No log | 73.0 | 146 | 2.8525 | 0.8766 | -0.6496 | 1.5262 |
125
+ | No log | 74.0 | 148 | 2.8508 | 0.8766 | -0.6530 | 1.5296 |
126
+ | No log | 75.0 | 150 | 2.8491 | 0.8766 | -0.6868 | 1.5634 |
127
+ | No log | 76.0 | 152 | 2.8476 | 0.8766 | -0.6260 | 1.5026 |
128
+ | No log | 77.0 | 154 | 2.8460 | 0.8766 | -0.6303 | 1.5069 |
129
+ | No log | 78.0 | 156 | 2.8447 | 0.8766 | -0.6137 | 1.4903 |
130
+ | No log | 79.0 | 158 | 2.8433 | 0.8766 | -0.5980 | 1.4746 |
131
+ | No log | 80.0 | 160 | 2.8420 | 0.8766 | -0.5799 | 1.4565 |
132
+ | No log | 81.0 | 162 | 2.8409 | 0.8766 | -0.6208 | 1.4975 |
133
+ | No log | 82.0 | 164 | 2.8397 | 0.8766 | -0.6227 | 1.4993 |
134
+ | No log | 83.0 | 166 | 2.8387 | 0.8766 | -0.6545 | 1.5311 |
135
+ | No log | 84.0 | 168 | 2.8377 | 0.8766 | -0.6560 | 1.5327 |
136
+ | No log | 85.0 | 170 | 2.8366 | 0.8766 | -0.6943 | 1.5709 |
137
+ | No log | 86.0 | 172 | 2.8357 | 0.8766 | -0.6259 | 1.5025 |
138
+ | No log | 87.0 | 174 | 2.8350 | 0.8766 | -0.6605 | 1.5371 |
139
+ | No log | 88.0 | 176 | 2.8342 | 0.8766 | -0.6590 | 1.5356 |
140
+ | No log | 89.0 | 178 | 2.8334 | 0.8766 | -0.6557 | 1.5324 |
141
+ | No log | 90.0 | 180 | 2.8328 | 0.8766 | -0.6482 | 1.5249 |
142
+ | No log | 91.0 | 182 | 2.8321 | 0.8766 | -0.6397 | 1.5163 |
143
+ | No log | 92.0 | 184 | 2.8316 | 0.8766 | -0.6501 | 1.5267 |
144
+ | No log | 93.0 | 186 | 2.8311 | 0.8766 | -0.6567 | 1.5333 |
145
+ | No log | 94.0 | 188 | 2.8308 | 0.8766 | -0.6441 | 1.5207 |
146
+ | No log | 95.0 | 190 | 2.8304 | 0.8766 | -0.6463 | 1.5229 |
147
+ | No log | 96.0 | 192 | 2.8302 | 0.8766 | -0.6614 | 1.5380 |
148
+ | No log | 97.0 | 194 | 2.8300 | 0.8766 | -0.6041 | 1.4807 |
149
+ | No log | 98.0 | 196 | 2.8297 | 0.8766 | -0.6222 | 1.4988 |
150
+ | No log | 99.0 | 198 | 2.8295 | 0.8766 | -0.6509 | 1.5276 |
151
+ | No log | 100.0 | 200 | 2.8294 | 0.8766 | -0.6178 | 1.4944 |
152
 
153
 
154
  ### Framework versions
tokenizer.json CHANGED
@@ -1,11 +1,6 @@
1
  {
2
  "version": "1.0",
3
- "truncation": {
4
- "direction": "Right",
5
- "max_length": 128,
6
- "strategy": "LongestFirst",
7
- "stride": 0
8
- },
9
  "padding": null,
10
  "added_tokens": [
11
  {
 
1
  {
2
  "version": "1.0",
3
+ "truncation": null,
 
 
 
 
 
4
  "padding": null,
5
  "added_tokens": [
6
  {
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6c1fef4034e9d3b69ddf1ad60d56e567f6dca3e250ba144cd518c34a38b42ec6
3
  size 4728
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f6eec492e0b261c0023e4425962444b941da5e092b4d12bc9fc1a46f68831fd0
3
  size 4728