dahe827 commited on
Commit
7affa90
1 Parent(s): 16895da

End of training

Browse files
README.md CHANGED
@@ -1,11 +1,10 @@
1
  ---
2
- license: apache-2.0
3
- base_model: allenai/longformer-base-4096
4
  tags:
5
  - generated_from_trainer
6
  metrics:
7
  - f1
8
- - accuracy
9
  model-index:
10
  - name: longformer-base-4096-airlines-news-multi-label
11
  results: []
@@ -16,12 +15,12 @@ should probably proofread and complete it, then remove this comment. -->
16
 
17
  # longformer-base-4096-airlines-news-multi-label
18
 
19
- This model is a fine-tuned version of [allenai/longformer-base-4096](https://huggingface.co/allenai/longformer-base-4096) on the None dataset.
20
  It achieves the following results on the evaluation set:
21
- - Loss: 0.4263
22
- - F1: 0.7073
23
- - Roc Auc: 0.8173
24
- - Accuracy: 0.6638
25
 
26
  ## Model description
27
 
@@ -40,39 +39,83 @@ More information needed
40
  ### Training hyperparameters
41
 
42
  The following hyperparameters were used during training:
43
- - learning_rate: 7e-05
44
- - train_batch_size: 16
45
- - eval_batch_size: 16
46
  - seed: 42
47
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
48
  - lr_scheduler_type: linear
49
- - lr_scheduler_warmup_steps: 150
50
- - num_epochs: 20
51
 
52
  ### Training results
53
 
54
- | Training Loss | Epoch | Step | Validation Loss | F1 | Roc Auc | Accuracy |
55
- |:-------------:|:-----:|:----:|:---------------:|:------:|:-------:|:--------:|
56
- | No log | 1.0 | 118 | 0.3445 | 0.3084 | 0.5914 | 0.5660 |
57
- | No log | 2.0 | 236 | 0.2649 | 0.6263 | 0.7544 | 0.6255 |
58
- | No log | 3.0 | 354 | 0.2985 | 0.5344 | 0.6874 | 0.6298 |
59
- | No log | 4.0 | 472 | 0.2630 | 0.6604 | 0.7867 | 0.6511 |
60
- | 0.248 | 5.0 | 590 | 0.2887 | 0.6578 | 0.7728 | 0.6511 |
61
- | 0.248 | 6.0 | 708 | 0.3088 | 0.6515 | 0.7733 | 0.6511 |
62
- | 0.248 | 7.0 | 826 | 0.3399 | 0.6367 | 0.7679 | 0.6213 |
63
- | 0.248 | 8.0 | 944 | 0.3477 | 0.6537 | 0.7757 | 0.6383 |
64
- | 0.0706 | 9.0 | 1062 | 0.3540 | 0.6749 | 0.7959 | 0.6468 |
65
- | 0.0706 | 10.0 | 1180 | 0.3847 | 0.6649 | 0.8183 | 0.5702 |
66
- | 0.0706 | 11.0 | 1298 | 0.4104 | 0.6742 | 0.8150 | 0.6043 |
67
- | 0.0706 | 12.0 | 1416 | 0.3894 | 0.7006 | 0.8177 | 0.6468 |
68
- | 0.0212 | 13.0 | 1534 | 0.4363 | 0.6706 | 0.8026 | 0.6255 |
69
- | 0.0212 | 14.0 | 1652 | 0.4135 | 0.6954 | 0.8085 | 0.6638 |
70
- | 0.0212 | 15.0 | 1770 | 0.4263 | 0.6822 | 0.8132 | 0.6213 |
71
- | 0.0212 | 16.0 | 1888 | 0.4162 | 0.6972 | 0.8110 | 0.6553 |
72
- | 0.0057 | 17.0 | 2006 | 0.4319 | 0.6985 | 0.8172 | 0.6468 |
73
- | 0.0057 | 18.0 | 2124 | 0.4263 | 0.7073 | 0.8173 | 0.6638 |
74
- | 0.0057 | 19.0 | 2242 | 0.4308 | 0.6988 | 0.8153 | 0.6468 |
75
- | 0.0057 | 20.0 | 2360 | 0.4288 | 0.7030 | 0.8163 | 0.6553 |
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
76
 
77
 
78
  ### Framework versions
 
1
  ---
2
+ license: cc-by-sa-4.0
3
+ base_model: kiddothe2b/longformer-base-4096
4
  tags:
5
  - generated_from_trainer
6
  metrics:
7
  - f1
 
8
  model-index:
9
  - name: longformer-base-4096-airlines-news-multi-label
10
  results: []
 
15
 
16
  # longformer-base-4096-airlines-news-multi-label
17
 
18
+ This model is a fine-tuned version of [kiddothe2b/longformer-base-4096](https://huggingface.co/kiddothe2b/longformer-base-4096) on the None dataset.
19
  It achieves the following results on the evaluation set:
20
+ - Loss: 0.2583
21
+ - F1: 0.8916
22
+ - Roc Auc: 0.6172
23
+ - Hamming: 0.8950
24
 
25
  ## Model description
26
 
 
39
  ### Training hyperparameters
40
 
41
  The following hyperparameters were used during training:
42
+ - learning_rate: 9e-05
43
+ - train_batch_size: 32
44
+ - eval_batch_size: 32
45
  - seed: 42
46
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
47
  - lr_scheduler_type: linear
48
+ - num_epochs: 65
 
49
 
50
  ### Training results
51
 
52
+ | Training Loss | Epoch | Step | Validation Loss | F1 | Roc Auc | Hamming |
53
+ |:-------------:|:-----:|:----:|:---------------:|:------:|:-------:|:-------:|
54
+ | No log | 1.0 | 57 | 0.3454 | 0.8319 | 0.5 | 0.8850 |
55
+ | No log | 2.0 | 114 | 0.3372 | 0.8319 | 0.5 | 0.8850 |
56
+ | No log | 3.0 | 171 | 0.3353 | 0.8319 | 0.5 | 0.8850 |
57
+ | No log | 4.0 | 228 | 0.3310 | 0.8319 | 0.5 | 0.8850 |
58
+ | No log | 5.0 | 285 | 0.3278 | 0.8319 | 0.5 | 0.8850 |
59
+ | No log | 6.0 | 342 | 0.3242 | 0.8319 | 0.5 | 0.8850 |
60
+ | No log | 7.0 | 399 | 0.3206 | 0.8319 | 0.5 | 0.8850 |
61
+ | No log | 8.0 | 456 | 0.3168 | 0.8319 | 0.5 | 0.8850 |
62
+ | 0.3599 | 9.0 | 513 | 0.3120 | 0.8319 | 0.5 | 0.8850 |
63
+ | 0.3599 | 10.0 | 570 | 0.3089 | 0.8319 | 0.5 | 0.8850 |
64
+ | 0.3599 | 11.0 | 627 | 0.3039 | 0.8319 | 0.5 | 0.8850 |
65
+ | 0.3599 | 12.0 | 684 | 0.3000 | 0.8319 | 0.5 | 0.8850 |
66
+ | 0.3599 | 13.0 | 741 | 0.2969 | 0.8319 | 0.5 | 0.8850 |
67
+ | 0.3599 | 14.0 | 798 | 0.2932 | 0.8319 | 0.5 | 0.8850 |
68
+ | 0.3599 | 15.0 | 855 | 0.2893 | 0.8449 | 0.5064 | 0.8864 |
69
+ | 0.3599 | 16.0 | 912 | 0.2859 | 0.8449 | 0.5064 | 0.8864 |
70
+ | 0.3599 | 17.0 | 969 | 0.2824 | 0.8449 | 0.5064 | 0.8864 |
71
+ | 0.3111 | 18.0 | 1026 | 0.2800 | 0.8613 | 0.5192 | 0.8894 |
72
+ | 0.3111 | 19.0 | 1083 | 0.2773 | 0.8606 | 0.5160 | 0.8886 |
73
+ | 0.3111 | 20.0 | 1140 | 0.2752 | 0.8586 | 0.5248 | 0.8894 |
74
+ | 0.3111 | 21.0 | 1197 | 0.2727 | 0.8586 | 0.5248 | 0.8894 |
75
+ | 0.3111 | 22.0 | 1254 | 0.2703 | 0.8597 | 0.5280 | 0.8901 |
76
+ | 0.3111 | 23.0 | 1311 | 0.2679 | 0.8761 | 0.5532 | 0.8953 |
77
+ | 0.3111 | 24.0 | 1368 | 0.2665 | 0.8783 | 0.5684 | 0.8975 |
78
+ | 0.3111 | 25.0 | 1425 | 0.2645 | 0.8791 | 0.5688 | 0.8982 |
79
+ | 0.3111 | 26.0 | 1482 | 0.2627 | 0.8789 | 0.5776 | 0.8990 |
80
+ | 0.2854 | 27.0 | 1539 | 0.2611 | 0.8780 | 0.5716 | 0.8982 |
81
+ | 0.2854 | 28.0 | 1596 | 0.2597 | 0.8791 | 0.5688 | 0.8982 |
82
+ | 0.2854 | 29.0 | 1653 | 0.2584 | 0.8818 | 0.5845 | 0.9012 |
83
+ | 0.2854 | 30.0 | 1710 | 0.2570 | 0.8825 | 0.5877 | 0.9019 |
84
+ | 0.2854 | 31.0 | 1767 | 0.2564 | 0.8930 | 0.6405 | 0.9115 |
85
+ | 0.2854 | 32.0 | 1824 | 0.2556 | 0.8913 | 0.6396 | 0.9100 |
86
+ | 0.2854 | 33.0 | 1881 | 0.2547 | 0.8870 | 0.6296 | 0.9071 |
87
+ | 0.2854 | 34.0 | 1938 | 0.2531 | 0.8843 | 0.6029 | 0.9041 |
88
+ | 0.2854 | 35.0 | 1995 | 0.2522 | 0.8912 | 0.6341 | 0.9100 |
89
+ | 0.2722 | 36.0 | 2052 | 0.2516 | 0.8914 | 0.6341 | 0.9100 |
90
+ | 0.2722 | 37.0 | 2109 | 0.2507 | 0.8913 | 0.6369 | 0.9100 |
91
+ | 0.2722 | 38.0 | 2166 | 0.2501 | 0.8899 | 0.6392 | 0.9093 |
92
+ | 0.2722 | 39.0 | 2223 | 0.2491 | 0.8865 | 0.6264 | 0.9063 |
93
+ | 0.2722 | 40.0 | 2280 | 0.2486 | 0.8939 | 0.6409 | 0.9122 |
94
+ | 0.2722 | 41.0 | 2337 | 0.2483 | 0.8921 | 0.6516 | 0.9115 |
95
+ | 0.2722 | 42.0 | 2394 | 0.2474 | 0.8913 | 0.6512 | 0.9108 |
96
+ | 0.2722 | 43.0 | 2451 | 0.2466 | 0.8911 | 0.6341 | 0.9100 |
97
+ | 0.2652 | 44.0 | 2508 | 0.2461 | 0.8950 | 0.6557 | 0.9137 |
98
+ | 0.2652 | 45.0 | 2565 | 0.2459 | 0.8913 | 0.6540 | 0.9108 |
99
+ | 0.2652 | 46.0 | 2622 | 0.2453 | 0.8934 | 0.6521 | 0.9122 |
100
+ | 0.2652 | 47.0 | 2679 | 0.2446 | 0.8950 | 0.6557 | 0.9137 |
101
+ | 0.2652 | 48.0 | 2736 | 0.2445 | 0.8922 | 0.6572 | 0.9115 |
102
+ | 0.2652 | 49.0 | 2793 | 0.2442 | 0.8931 | 0.6521 | 0.9122 |
103
+ | 0.2652 | 50.0 | 2850 | 0.2440 | 0.8938 | 0.6608 | 0.9130 |
104
+ | 0.2652 | 51.0 | 2907 | 0.2436 | 0.8930 | 0.6576 | 0.9122 |
105
+ | 0.2652 | 52.0 | 2964 | 0.2432 | 0.8940 | 0.6553 | 0.9130 |
106
+ | 0.2603 | 53.0 | 3021 | 0.2430 | 0.8940 | 0.6553 | 0.9130 |
107
+ | 0.2603 | 54.0 | 3078 | 0.2428 | 0.8930 | 0.6576 | 0.9122 |
108
+ | 0.2603 | 55.0 | 3135 | 0.2425 | 0.8938 | 0.6608 | 0.9130 |
109
+ | 0.2603 | 56.0 | 3192 | 0.2424 | 0.8904 | 0.6480 | 0.9100 |
110
+ | 0.2603 | 57.0 | 3249 | 0.2424 | 0.8938 | 0.6636 | 0.9130 |
111
+ | 0.2603 | 58.0 | 3306 | 0.2422 | 0.8938 | 0.6636 | 0.9130 |
112
+ | 0.2603 | 59.0 | 3363 | 0.2421 | 0.9070 | 0.6668 | 0.9137 |
113
+ | 0.2603 | 60.0 | 3420 | 0.2419 | 0.9070 | 0.6668 | 0.9137 |
114
+ | 0.2603 | 61.0 | 3477 | 0.2418 | 0.8938 | 0.6636 | 0.9130 |
115
+ | 0.2578 | 62.0 | 3534 | 0.2418 | 0.8938 | 0.6636 | 0.9130 |
116
+ | 0.2578 | 63.0 | 3591 | 0.2416 | 0.8930 | 0.6576 | 0.9122 |
117
+ | 0.2578 | 64.0 | 3648 | 0.2416 | 0.8938 | 0.6608 | 0.9130 |
118
+ | 0.2578 | 65.0 | 3705 | 0.2416 | 0.8930 | 0.6576 | 0.9122 |
119
 
120
 
121
  ### Framework versions
config.json CHANGED
@@ -1,25 +1,27 @@
1
  {
2
- "_name_or_path": "allenai/longformer-base-4096",
3
  "architectures": [
4
  "LongformerForSequenceClassification"
5
  ],
6
  "attention_mode": "longformer",
7
  "attention_probs_dropout_prob": 0.1,
8
  "attention_window": [
9
- 512,
10
- 512,
11
- 512,
12
- 512,
13
- 512,
14
- 512,
15
- 512,
16
- 512,
17
- 512,
18
- 512,
19
- 512,
20
- 512
21
  ],
22
  "bos_token_id": 0,
 
 
23
  "eos_token_id": 2,
24
  "gradient_checkpointing": false,
25
  "hidden_act": "gelu",
@@ -27,32 +29,40 @@
27
  "hidden_size": 768,
28
  "id2label": {
29
  "0": "capacity expansion",
30
- "1": "market expansion",
31
- "2": "merger & acquisition and finance investments",
32
- "3": "outsourcing and alliance",
33
- "4": "product introductions and improvements"
 
34
  },
35
  "ignore_attention_mask": false,
36
  "initializer_range": 0.02,
37
  "intermediate_size": 3072,
38
  "label2id": {
39
  "capacity expansion": 0,
40
- "market expansion": 1,
41
- "merger & acquisition and finance investments": 2,
42
- "outsourcing and alliance": 3,
43
- "product introductions and improvements": 4
 
44
  },
45
  "layer_norm_eps": 1e-05,
46
- "max_position_embeddings": 4098,
 
 
 
 
47
  "model_type": "longformer",
48
  "num_attention_heads": 12,
49
  "num_hidden_layers": 12,
50
  "onnx_export": false,
51
  "pad_token_id": 1,
 
52
  "problem_type": "multi_label_classification",
53
  "sep_token_id": 2,
54
  "torch_dtype": "float32",
55
  "transformers_version": "4.41.1",
56
  "type_vocab_size": 1,
 
57
  "vocab_size": 50265
58
  }
 
1
  {
2
+ "_name_or_path": "kiddothe2b/longformer-base-4096",
3
  "architectures": [
4
  "LongformerForSequenceClassification"
5
  ],
6
  "attention_mode": "longformer",
7
  "attention_probs_dropout_prob": 0.1,
8
  "attention_window": [
9
+ 128,
10
+ 128,
11
+ 128,
12
+ 128,
13
+ 128,
14
+ 128,
15
+ 128,
16
+ 128,
17
+ 128,
18
+ 128,
19
+ 128,
20
+ 128
21
  ],
22
  "bos_token_id": 0,
23
+ "classifier_dropout": null,
24
+ "cls_token_id": 0,
25
  "eos_token_id": 2,
26
  "gradient_checkpointing": false,
27
  "hidden_act": "gelu",
 
29
  "hidden_size": 768,
30
  "id2label": {
31
  "0": "capacity expansion",
32
+ "1": "legal action",
33
+ "2": "market expansion",
34
+ "3": "merger & acquisition and finance investments",
35
+ "4": "outsourcing and alliance",
36
+ "5": "product introductions and improvements"
37
  },
38
  "ignore_attention_mask": false,
39
  "initializer_range": 0.02,
40
  "intermediate_size": 3072,
41
  "label2id": {
42
  "capacity expansion": 0,
43
+ "legal action": 1,
44
+ "market expansion": 2,
45
+ "merger & acquisition and finance investments": 3,
46
+ "outsourcing and alliance": 4,
47
+ "product introductions and improvements": 5
48
  },
49
  "layer_norm_eps": 1e-05,
50
+ "max_position_embeddings": 4099,
51
+ "max_sentence_length": 128,
52
+ "max_sentence_size": 128,
53
+ "max_sentences": 8,
54
+ "model_max_length": 4096,
55
  "model_type": "longformer",
56
  "num_attention_heads": 12,
57
  "num_hidden_layers": 12,
58
  "onnx_export": false,
59
  "pad_token_id": 1,
60
+ "position_embedding_type": "absolute",
61
  "problem_type": "multi_label_classification",
62
  "sep_token_id": 2,
63
  "torch_dtype": "float32",
64
  "transformers_version": "4.41.1",
65
  "type_vocab_size": 1,
66
+ "use_cache": true,
67
  "vocab_size": 50265
68
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:86b1106bd0591ef41dd94025bdf55aeb65194b95e94bbf444326c6eeaf118cf8
3
- size 594687412
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:487dae951a50328e3fa75be613532932c105f2c3cc4975df9b5eae770184b481
3
+ size 595481184
special_tokens_map.json CHANGED
@@ -1,7 +1,25 @@
1
  {
2
- "bos_token": "<s>",
3
- "cls_token": "<s>",
4
- "eos_token": "</s>",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  "mask_token": {
6
  "content": "<mask>",
7
  "lstrip": true,
@@ -9,7 +27,25 @@
9
  "rstrip": false,
10
  "single_word": false
11
  },
12
- "pad_token": "<pad>",
13
- "sep_token": "</s>",
14
- "unk_token": "<unk>"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
15
  }
 
1
  {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "cls_token": {
10
+ "content": "<s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "eos_token": {
17
+ "content": "</s>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
  "mask_token": {
24
  "content": "<mask>",
25
  "lstrip": true,
 
27
  "rstrip": false,
28
  "single_word": false
29
  },
30
+ "pad_token": {
31
+ "content": "<pad>",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false
36
+ },
37
+ "sep_token": {
38
+ "content": "</s>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false
43
+ },
44
+ "unk_token": {
45
+ "content": "<unk>",
46
+ "lstrip": false,
47
+ "normalized": false,
48
+ "rstrip": false,
49
+ "single_word": false
50
+ }
51
  }
tokenizer.json CHANGED
@@ -23,7 +23,7 @@
23
  "single_word": false,
24
  "lstrip": false,
25
  "rstrip": false,
26
- "normalized": true,
27
  "special": true
28
  },
29
  {
@@ -32,7 +32,7 @@
32
  "single_word": false,
33
  "lstrip": false,
34
  "rstrip": false,
35
- "normalized": true,
36
  "special": true
37
  },
38
  {
@@ -41,7 +41,7 @@
41
  "single_word": false,
42
  "lstrip": false,
43
  "rstrip": false,
44
- "normalized": true,
45
  "special": true
46
  },
47
  {
@@ -50,7 +50,7 @@
50
  "single_word": false,
51
  "lstrip": false,
52
  "rstrip": false,
53
- "normalized": true,
54
  "special": true
55
  },
56
  {
 
23
  "single_word": false,
24
  "lstrip": false,
25
  "rstrip": false,
26
+ "normalized": false,
27
  "special": true
28
  },
29
  {
 
32
  "single_word": false,
33
  "lstrip": false,
34
  "rstrip": false,
35
+ "normalized": false,
36
  "special": true
37
  },
38
  {
 
41
  "single_word": false,
42
  "lstrip": false,
43
  "rstrip": false,
44
+ "normalized": false,
45
  "special": true
46
  },
47
  {
 
50
  "single_word": false,
51
  "lstrip": false,
52
  "rstrip": false,
53
+ "normalized": false,
54
  "special": true
55
  },
56
  {
tokenizer_config.json CHANGED
@@ -4,7 +4,7 @@
4
  "0": {
5
  "content": "<s>",
6
  "lstrip": false,
7
- "normalized": true,
8
  "rstrip": false,
9
  "single_word": false,
10
  "special": true
@@ -12,7 +12,7 @@
12
  "1": {
13
  "content": "<pad>",
14
  "lstrip": false,
15
- "normalized": true,
16
  "rstrip": false,
17
  "single_word": false,
18
  "special": true
@@ -20,7 +20,7 @@
20
  "2": {
21
  "content": "</s>",
22
  "lstrip": false,
23
- "normalized": true,
24
  "rstrip": false,
25
  "single_word": false,
26
  "special": true
@@ -28,7 +28,7 @@
28
  "3": {
29
  "content": "<unk>",
30
  "lstrip": false,
31
- "normalized": true,
32
  "rstrip": false,
33
  "single_word": false,
34
  "special": true
@@ -48,10 +48,10 @@
48
  "eos_token": "</s>",
49
  "errors": "replace",
50
  "mask_token": "<mask>",
51
- "model_max_length": 1000000000000000019884624838656,
52
  "pad_token": "<pad>",
53
  "sep_token": "</s>",
54
- "tokenizer_class": "LongformerTokenizer",
55
  "trim_offsets": true,
56
  "unk_token": "<unk>"
57
  }
 
4
  "0": {
5
  "content": "<s>",
6
  "lstrip": false,
7
+ "normalized": false,
8
  "rstrip": false,
9
  "single_word": false,
10
  "special": true
 
12
  "1": {
13
  "content": "<pad>",
14
  "lstrip": false,
15
+ "normalized": false,
16
  "rstrip": false,
17
  "single_word": false,
18
  "special": true
 
20
  "2": {
21
  "content": "</s>",
22
  "lstrip": false,
23
+ "normalized": false,
24
  "rstrip": false,
25
  "single_word": false,
26
  "special": true
 
28
  "3": {
29
  "content": "<unk>",
30
  "lstrip": false,
31
+ "normalized": false,
32
  "rstrip": false,
33
  "single_word": false,
34
  "special": true
 
48
  "eos_token": "</s>",
49
  "errors": "replace",
50
  "mask_token": "<mask>",
51
+ "model_max_length": 512,
52
  "pad_token": "<pad>",
53
  "sep_token": "</s>",
54
+ "tokenizer_class": "RobertaTokenizer",
55
  "trim_offsets": true,
56
  "unk_token": "<unk>"
57
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2c1893cb495a54e4ccb189d566b9dcfe0182b2da3067bd1c7d6ecd02a4422e8f
3
  size 5176
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d9a3e90efe4505fb6b5cb17cf26f0e62a7d12ef8a97a6fdb6ae37d03e94a5c8c
3
  size 5176