Elkelouizajo commited on
Commit
7f0035a
·
verified ·
1 Parent(s): 4c04de7

Upload 34 files

Browse files
README.md ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: apache-2.0
3
+ base_model: google-bert/bert-large-cased
4
+ tags:
5
+ - generated_from_trainer
6
+ model-index:
7
+ - name: results_bert_10K
8
+ results: []
9
+ ---
10
+
11
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
12
+ should probably proofread and complete it, then remove this comment. -->
13
+
14
+ # results_bert_10K
15
+
16
+ This model is a fine-tuned version of [google-bert/bert-large-cased](https://huggingface.co/google-bert/bert-large-cased) on an unknown dataset.
17
+
18
+ ## Model description
19
+
20
+ More information needed
21
+
22
+ ## Intended uses & limitations
23
+
24
+ More information needed
25
+
26
+ ## Training and evaluation data
27
+
28
+ More information needed
29
+
30
+ ## Training procedure
31
+
32
+ ### Training hyperparameters
33
+
34
+ The following hyperparameters were used during training:
35
+ - learning_rate: 0.2
36
+ - train_batch_size: 8
37
+ - eval_batch_size: 8
38
+ - seed: 8446
39
+ - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
40
+ - lr_scheduler_type: linear
41
+ - num_epochs: 5.0
42
+
43
+ ### Training results
44
+
45
+
46
+
47
+ ### Framework versions
48
+
49
+ - Transformers 4.39.0.dev0
50
+ - Pytorch 2.2.1+cu121
51
+ - Datasets 2.18.0
52
+ - Tokenizers 0.15.2
all_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 5.0,
3
+ "train_loss": 16.716566650390625,
4
+ "train_runtime": 2885.0802,
5
+ "train_samples": 7998,
6
+ "train_samples_per_second": 13.861,
7
+ "train_steps_per_second": 1.733
8
+ }
checkpoint-4500/config.json ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "google-bert/bert-large-cased",
3
+ "architectures": [
4
+ "BertForSequenceClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "classifier_dropout": null,
8
+ "directionality": "bidi",
9
+ "finetuning_task": "text-classification",
10
+ "gradient_checkpointing": false,
11
+ "hidden_act": "gelu",
12
+ "hidden_dropout_prob": 0.1,
13
+ "hidden_size": 1024,
14
+ "id2label": {
15
+ "0": "0",
16
+ "1": "1"
17
+ },
18
+ "initializer_range": 0.02,
19
+ "intermediate_size": 4096,
20
+ "label2id": {
21
+ "0": 0,
22
+ "1": 1
23
+ },
24
+ "layer_norm_eps": 1e-12,
25
+ "max_position_embeddings": 512,
26
+ "model_type": "bert",
27
+ "num_attention_heads": 16,
28
+ "num_hidden_layers": 24,
29
+ "pad_token_id": 0,
30
+ "pooler_fc_size": 768,
31
+ "pooler_num_attention_heads": 12,
32
+ "pooler_num_fc_layers": 3,
33
+ "pooler_size_per_head": 128,
34
+ "pooler_type": "first_token_transform",
35
+ "position_embedding_type": "absolute",
36
+ "problem_type": "single_label_classification",
37
+ "torch_dtype": "float32",
38
+ "transformers_version": "4.39.0.dev0",
39
+ "type_vocab_size": 2,
40
+ "use_cache": true,
41
+ "vocab_size": 28996
42
+ }
checkpoint-4500/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aeb7b06a64f1a88e796de1f05ae383d8d424b8f0f27055e2865918158b3ae0fb
3
+ size 1334372264
checkpoint-4500/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7c1fe0493fa9bb55934a9f4377d529b84ebc49f6dcf21819ad5ceac11da1d793
3
+ size 2668979437
checkpoint-4500/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b6ef9909c47f8364ee183008ff2dd0dcc0ec663735d83331c810c5105be72225
3
+ size 14308
checkpoint-4500/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7eaa9383e015fa203df48df8f66e0c941b43b6f7cc76bb3c244d931323800d7e
3
+ size 1064
checkpoint-4500/special_tokens_map.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "mask_token": "[MASK]",
4
+ "pad_token": "[PAD]",
5
+ "sep_token": "[SEP]",
6
+ "unk_token": "[UNK]"
7
+ }
checkpoint-4500/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
checkpoint-4500/tokenizer_config.json ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "[PAD]",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "100": {
12
+ "content": "[UNK]",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "101": {
20
+ "content": "[CLS]",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "102": {
28
+ "content": "[SEP]",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "103": {
36
+ "content": "[MASK]",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ }
43
+ },
44
+ "clean_up_tokenization_spaces": true,
45
+ "cls_token": "[CLS]",
46
+ "do_lower_case": false,
47
+ "mask_token": "[MASK]",
48
+ "model_max_length": 512,
49
+ "pad_token": "[PAD]",
50
+ "sep_token": "[SEP]",
51
+ "strip_accents": null,
52
+ "tokenize_chinese_chars": true,
53
+ "tokenizer_class": "BertTokenizer",
54
+ "unk_token": "[UNK]"
55
+ }
checkpoint-4500/trainer_state.json ADDED
@@ -0,0 +1,84 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 4.5,
5
+ "eval_steps": 500,
6
+ "global_step": 4500,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.5,
13
+ "grad_norm": 28.553239822387695,
14
+ "learning_rate": 0.18000000000000002,
15
+ "loss": 26.996,
16
+ "step": 500
17
+ },
18
+ {
19
+ "epoch": 1.0,
20
+ "grad_norm": 15.302693367004395,
21
+ "learning_rate": 0.16000000000000003,
22
+ "loss": 27.5789,
23
+ "step": 1000
24
+ },
25
+ {
26
+ "epoch": 1.5,
27
+ "grad_norm": 3.7751123905181885,
28
+ "learning_rate": 0.13999999999999999,
29
+ "loss": 21.6633,
30
+ "step": 1500
31
+ },
32
+ {
33
+ "epoch": 2.0,
34
+ "grad_norm": 5.127112865447998,
35
+ "learning_rate": 0.12,
36
+ "loss": 20.7534,
37
+ "step": 2000
38
+ },
39
+ {
40
+ "epoch": 2.5,
41
+ "grad_norm": 22.80483055114746,
42
+ "learning_rate": 0.1,
43
+ "loss": 18.4192,
44
+ "step": 2500
45
+ },
46
+ {
47
+ "epoch": 3.0,
48
+ "grad_norm": 30.512271881103516,
49
+ "learning_rate": 0.08000000000000002,
50
+ "loss": 16.2861,
51
+ "step": 3000
52
+ },
53
+ {
54
+ "epoch": 3.5,
55
+ "grad_norm": 23.139978408813477,
56
+ "learning_rate": 0.06,
57
+ "loss": 14.2996,
58
+ "step": 3500
59
+ },
60
+ {
61
+ "epoch": 4.0,
62
+ "grad_norm": 23.255733489990234,
63
+ "learning_rate": 0.04000000000000001,
64
+ "loss": 9.7249,
65
+ "step": 4000
66
+ },
67
+ {
68
+ "epoch": 4.5,
69
+ "grad_norm": 6.302779197692871,
70
+ "learning_rate": 0.020000000000000004,
71
+ "loss": 6.8687,
72
+ "step": 4500
73
+ }
74
+ ],
75
+ "logging_steps": 500,
76
+ "max_steps": 5000,
77
+ "num_input_tokens_seen": 0,
78
+ "num_train_epochs": 5,
79
+ "save_steps": 500,
80
+ "total_flos": 3.3542073628901376e+16,
81
+ "train_batch_size": 8,
82
+ "trial_name": null,
83
+ "trial_params": null
84
+ }
checkpoint-4500/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cab7d19cf015097b4d7ee18a0eba83daf203cf2190d9d3ca5bbf68e6a4e56bb0
3
+ size 4920
checkpoint-4500/vocab.txt ADDED
The diff for this file is too large to render. See raw diff
 
checkpoint-5000/config.json ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "google-bert/bert-large-cased",
3
+ "architectures": [
4
+ "BertForSequenceClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "classifier_dropout": null,
8
+ "directionality": "bidi",
9
+ "finetuning_task": "text-classification",
10
+ "gradient_checkpointing": false,
11
+ "hidden_act": "gelu",
12
+ "hidden_dropout_prob": 0.1,
13
+ "hidden_size": 1024,
14
+ "id2label": {
15
+ "0": "0",
16
+ "1": "1"
17
+ },
18
+ "initializer_range": 0.02,
19
+ "intermediate_size": 4096,
20
+ "label2id": {
21
+ "0": 0,
22
+ "1": 1
23
+ },
24
+ "layer_norm_eps": 1e-12,
25
+ "max_position_embeddings": 512,
26
+ "model_type": "bert",
27
+ "num_attention_heads": 16,
28
+ "num_hidden_layers": 24,
29
+ "pad_token_id": 0,
30
+ "pooler_fc_size": 768,
31
+ "pooler_num_attention_heads": 12,
32
+ "pooler_num_fc_layers": 3,
33
+ "pooler_size_per_head": 128,
34
+ "pooler_type": "first_token_transform",
35
+ "position_embedding_type": "absolute",
36
+ "problem_type": "single_label_classification",
37
+ "torch_dtype": "float32",
38
+ "transformers_version": "4.39.0.dev0",
39
+ "type_vocab_size": 2,
40
+ "use_cache": true,
41
+ "vocab_size": 28996
42
+ }
checkpoint-5000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6a708d615ef39bef230f768839132e065313db18a6c211a0bd1b0d55bd06082b
3
+ size 1334372264
checkpoint-5000/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6edd185a75873439b48a776aac368f68a79ba4c4365bfebbb38759101285b24d
3
+ size 2668979437
checkpoint-5000/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f24031fef700549893eb3132c2f2ea8a410e24af7c4ddcaf15791aa46f0af172
3
+ size 14308
checkpoint-5000/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f9fea76be91d9110add003a5e64dfbe8a535e4ef8140a7ca1e6387a950f4aa50
3
+ size 1064
checkpoint-5000/special_tokens_map.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "mask_token": "[MASK]",
4
+ "pad_token": "[PAD]",
5
+ "sep_token": "[SEP]",
6
+ "unk_token": "[UNK]"
7
+ }
checkpoint-5000/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
checkpoint-5000/tokenizer_config.json ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "[PAD]",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "100": {
12
+ "content": "[UNK]",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "101": {
20
+ "content": "[CLS]",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "102": {
28
+ "content": "[SEP]",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "103": {
36
+ "content": "[MASK]",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ }
43
+ },
44
+ "clean_up_tokenization_spaces": true,
45
+ "cls_token": "[CLS]",
46
+ "do_lower_case": false,
47
+ "mask_token": "[MASK]",
48
+ "model_max_length": 512,
49
+ "pad_token": "[PAD]",
50
+ "sep_token": "[SEP]",
51
+ "strip_accents": null,
52
+ "tokenize_chinese_chars": true,
53
+ "tokenizer_class": "BertTokenizer",
54
+ "unk_token": "[UNK]"
55
+ }
checkpoint-5000/trainer_state.json ADDED
@@ -0,0 +1,91 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 5.0,
5
+ "eval_steps": 500,
6
+ "global_step": 5000,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.5,
13
+ "grad_norm": 28.553239822387695,
14
+ "learning_rate": 0.18000000000000002,
15
+ "loss": 26.996,
16
+ "step": 500
17
+ },
18
+ {
19
+ "epoch": 1.0,
20
+ "grad_norm": 15.302693367004395,
21
+ "learning_rate": 0.16000000000000003,
22
+ "loss": 27.5789,
23
+ "step": 1000
24
+ },
25
+ {
26
+ "epoch": 1.5,
27
+ "grad_norm": 3.7751123905181885,
28
+ "learning_rate": 0.13999999999999999,
29
+ "loss": 21.6633,
30
+ "step": 1500
31
+ },
32
+ {
33
+ "epoch": 2.0,
34
+ "grad_norm": 5.127112865447998,
35
+ "learning_rate": 0.12,
36
+ "loss": 20.7534,
37
+ "step": 2000
38
+ },
39
+ {
40
+ "epoch": 2.5,
41
+ "grad_norm": 22.80483055114746,
42
+ "learning_rate": 0.1,
43
+ "loss": 18.4192,
44
+ "step": 2500
45
+ },
46
+ {
47
+ "epoch": 3.0,
48
+ "grad_norm": 30.512271881103516,
49
+ "learning_rate": 0.08000000000000002,
50
+ "loss": 16.2861,
51
+ "step": 3000
52
+ },
53
+ {
54
+ "epoch": 3.5,
55
+ "grad_norm": 23.139978408813477,
56
+ "learning_rate": 0.06,
57
+ "loss": 14.2996,
58
+ "step": 3500
59
+ },
60
+ {
61
+ "epoch": 4.0,
62
+ "grad_norm": 23.255733489990234,
63
+ "learning_rate": 0.04000000000000001,
64
+ "loss": 9.7249,
65
+ "step": 4000
66
+ },
67
+ {
68
+ "epoch": 4.5,
69
+ "grad_norm": 6.302779197692871,
70
+ "learning_rate": 0.020000000000000004,
71
+ "loss": 6.8687,
72
+ "step": 4500
73
+ },
74
+ {
75
+ "epoch": 5.0,
76
+ "grad_norm": 6.826946258544922,
77
+ "learning_rate": 0.0,
78
+ "loss": 4.5755,
79
+ "step": 5000
80
+ }
81
+ ],
82
+ "logging_steps": 500,
83
+ "max_steps": 5000,
84
+ "num_input_tokens_seen": 0,
85
+ "num_train_epochs": 5,
86
+ "save_steps": 500,
87
+ "total_flos": 3.726793521948672e+16,
88
+ "train_batch_size": 8,
89
+ "trial_name": null,
90
+ "trial_params": null
91
+ }
checkpoint-5000/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cab7d19cf015097b4d7ee18a0eba83daf203cf2190d9d3ca5bbf68e6a4e56bb0
3
+ size 4920
checkpoint-5000/vocab.txt ADDED
The diff for this file is too large to render. See raw diff
 
config.json ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "google-bert/bert-large-cased",
3
+ "architectures": [
4
+ "BertForSequenceClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "classifier_dropout": null,
8
+ "directionality": "bidi",
9
+ "finetuning_task": "text-classification",
10
+ "gradient_checkpointing": false,
11
+ "hidden_act": "gelu",
12
+ "hidden_dropout_prob": 0.1,
13
+ "hidden_size": 1024,
14
+ "id2label": {
15
+ "0": "0",
16
+ "1": "1"
17
+ },
18
+ "initializer_range": 0.02,
19
+ "intermediate_size": 4096,
20
+ "label2id": {
21
+ "0": 0,
22
+ "1": 1
23
+ },
24
+ "layer_norm_eps": 1e-12,
25
+ "max_position_embeddings": 512,
26
+ "model_type": "bert",
27
+ "num_attention_heads": 16,
28
+ "num_hidden_layers": 24,
29
+ "pad_token_id": 0,
30
+ "pooler_fc_size": 768,
31
+ "pooler_num_attention_heads": 12,
32
+ "pooler_num_fc_layers": 3,
33
+ "pooler_size_per_head": 128,
34
+ "pooler_type": "first_token_transform",
35
+ "position_embedding_type": "absolute",
36
+ "problem_type": "single_label_classification",
37
+ "torch_dtype": "float32",
38
+ "transformers_version": "4.39.0.dev0",
39
+ "type_vocab_size": 2,
40
+ "use_cache": true,
41
+ "vocab_size": 28996
42
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6a708d615ef39bef230f768839132e065313db18a6c211a0bd1b0d55bd06082b
3
+ size 1334372264
predict_results.txt ADDED
@@ -0,0 +1,1001 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ index prediction
2
+ 0 1
3
+ 1 1
4
+ 2 1
5
+ 3 1
6
+ 4 1
7
+ 5 1
8
+ 6 1
9
+ 7 1
10
+ 8 1
11
+ 9 1
12
+ 10 1
13
+ 11 1
14
+ 12 1
15
+ 13 1
16
+ 14 1
17
+ 15 1
18
+ 16 1
19
+ 17 1
20
+ 18 1
21
+ 19 1
22
+ 20 1
23
+ 21 1
24
+ 22 1
25
+ 23 1
26
+ 24 1
27
+ 25 1
28
+ 26 1
29
+ 27 1
30
+ 28 1
31
+ 29 1
32
+ 30 1
33
+ 31 1
34
+ 32 1
35
+ 33 1
36
+ 34 1
37
+ 35 1
38
+ 36 1
39
+ 37 1
40
+ 38 1
41
+ 39 1
42
+ 40 1
43
+ 41 1
44
+ 42 1
45
+ 43 1
46
+ 44 1
47
+ 45 1
48
+ 46 1
49
+ 47 1
50
+ 48 1
51
+ 49 1
52
+ 50 1
53
+ 51 1
54
+ 52 1
55
+ 53 1
56
+ 54 1
57
+ 55 1
58
+ 56 1
59
+ 57 1
60
+ 58 1
61
+ 59 1
62
+ 60 1
63
+ 61 1
64
+ 62 1
65
+ 63 1
66
+ 64 1
67
+ 65 1
68
+ 66 1
69
+ 67 1
70
+ 68 1
71
+ 69 1
72
+ 70 1
73
+ 71 1
74
+ 72 1
75
+ 73 1
76
+ 74 1
77
+ 75 1
78
+ 76 1
79
+ 77 1
80
+ 78 1
81
+ 79 1
82
+ 80 1
83
+ 81 1
84
+ 82 1
85
+ 83 1
86
+ 84 1
87
+ 85 1
88
+ 86 1
89
+ 87 1
90
+ 88 1
91
+ 89 1
92
+ 90 1
93
+ 91 1
94
+ 92 1
95
+ 93 1
96
+ 94 1
97
+ 95 1
98
+ 96 1
99
+ 97 1
100
+ 98 1
101
+ 99 1
102
+ 100 1
103
+ 101 1
104
+ 102 1
105
+ 103 1
106
+ 104 1
107
+ 105 1
108
+ 106 1
109
+ 107 1
110
+ 108 1
111
+ 109 1
112
+ 110 1
113
+ 111 1
114
+ 112 1
115
+ 113 1
116
+ 114 1
117
+ 115 1
118
+ 116 1
119
+ 117 1
120
+ 118 1
121
+ 119 1
122
+ 120 1
123
+ 121 1
124
+ 122 1
125
+ 123 1
126
+ 124 1
127
+ 125 1
128
+ 126 1
129
+ 127 1
130
+ 128 1
131
+ 129 1
132
+ 130 1
133
+ 131 1
134
+ 132 1
135
+ 133 1
136
+ 134 1
137
+ 135 1
138
+ 136 1
139
+ 137 1
140
+ 138 1
141
+ 139 1
142
+ 140 1
143
+ 141 1
144
+ 142 1
145
+ 143 1
146
+ 144 1
147
+ 145 1
148
+ 146 1
149
+ 147 1
150
+ 148 1
151
+ 149 1
152
+ 150 1
153
+ 151 1
154
+ 152 1
155
+ 153 1
156
+ 154 1
157
+ 155 1
158
+ 156 1
159
+ 157 1
160
+ 158 1
161
+ 159 1
162
+ 160 1
163
+ 161 1
164
+ 162 1
165
+ 163 1
166
+ 164 1
167
+ 165 1
168
+ 166 1
169
+ 167 1
170
+ 168 1
171
+ 169 1
172
+ 170 1
173
+ 171 1
174
+ 172 1
175
+ 173 1
176
+ 174 1
177
+ 175 1
178
+ 176 1
179
+ 177 1
180
+ 178 1
181
+ 179 1
182
+ 180 1
183
+ 181 1
184
+ 182 1
185
+ 183 1
186
+ 184 1
187
+ 185 1
188
+ 186 1
189
+ 187 1
190
+ 188 1
191
+ 189 1
192
+ 190 1
193
+ 191 1
194
+ 192 1
195
+ 193 1
196
+ 194 1
197
+ 195 1
198
+ 196 1
199
+ 197 1
200
+ 198 1
201
+ 199 1
202
+ 200 1
203
+ 201 1
204
+ 202 1
205
+ 203 1
206
+ 204 1
207
+ 205 1
208
+ 206 1
209
+ 207 1
210
+ 208 1
211
+ 209 1
212
+ 210 1
213
+ 211 1
214
+ 212 1
215
+ 213 1
216
+ 214 1
217
+ 215 1
218
+ 216 1
219
+ 217 1
220
+ 218 1
221
+ 219 1
222
+ 220 1
223
+ 221 1
224
+ 222 1
225
+ 223 1
226
+ 224 1
227
+ 225 1
228
+ 226 1
229
+ 227 1
230
+ 228 1
231
+ 229 1
232
+ 230 1
233
+ 231 1
234
+ 232 1
235
+ 233 1
236
+ 234 1
237
+ 235 1
238
+ 236 1
239
+ 237 1
240
+ 238 1
241
+ 239 1
242
+ 240 1
243
+ 241 1
244
+ 242 1
245
+ 243 1
246
+ 244 1
247
+ 245 1
248
+ 246 1
249
+ 247 1
250
+ 248 1
251
+ 249 1
252
+ 250 1
253
+ 251 1
254
+ 252 1
255
+ 253 1
256
+ 254 1
257
+ 255 1
258
+ 256 1
259
+ 257 1
260
+ 258 1
261
+ 259 1
262
+ 260 1
263
+ 261 1
264
+ 262 1
265
+ 263 1
266
+ 264 1
267
+ 265 1
268
+ 266 1
269
+ 267 1
270
+ 268 1
271
+ 269 1
272
+ 270 1
273
+ 271 1
274
+ 272 1
275
+ 273 1
276
+ 274 1
277
+ 275 1
278
+ 276 1
279
+ 277 1
280
+ 278 1
281
+ 279 1
282
+ 280 1
283
+ 281 1
284
+ 282 1
285
+ 283 1
286
+ 284 1
287
+ 285 1
288
+ 286 1
289
+ 287 1
290
+ 288 1
291
+ 289 1
292
+ 290 1
293
+ 291 1
294
+ 292 1
295
+ 293 1
296
+ 294 1
297
+ 295 1
298
+ 296 1
299
+ 297 1
300
+ 298 1
301
+ 299 1
302
+ 300 1
303
+ 301 1
304
+ 302 1
305
+ 303 1
306
+ 304 1
307
+ 305 1
308
+ 306 1
309
+ 307 1
310
+ 308 1
311
+ 309 1
312
+ 310 1
313
+ 311 1
314
+ 312 1
315
+ 313 1
316
+ 314 1
317
+ 315 1
318
+ 316 1
319
+ 317 1
320
+ 318 1
321
+ 319 1
322
+ 320 1
323
+ 321 1
324
+ 322 1
325
+ 323 1
326
+ 324 1
327
+ 325 1
328
+ 326 1
329
+ 327 1
330
+ 328 1
331
+ 329 1
332
+ 330 1
333
+ 331 1
334
+ 332 1
335
+ 333 1
336
+ 334 1
337
+ 335 1
338
+ 336 1
339
+ 337 1
340
+ 338 1
341
+ 339 1
342
+ 340 1
343
+ 341 1
344
+ 342 1
345
+ 343 1
346
+ 344 1
347
+ 345 1
348
+ 346 1
349
+ 347 1
350
+ 348 1
351
+ 349 1
352
+ 350 1
353
+ 351 1
354
+ 352 1
355
+ 353 1
356
+ 354 1
357
+ 355 1
358
+ 356 1
359
+ 357 1
360
+ 358 1
361
+ 359 1
362
+ 360 1
363
+ 361 1
364
+ 362 1
365
+ 363 1
366
+ 364 1
367
+ 365 1
368
+ 366 1
369
+ 367 1
370
+ 368 1
371
+ 369 1
372
+ 370 1
373
+ 371 1
374
+ 372 1
375
+ 373 1
376
+ 374 1
377
+ 375 1
378
+ 376 1
379
+ 377 1
380
+ 378 1
381
+ 379 1
382
+ 380 1
383
+ 381 1
384
+ 382 1
385
+ 383 1
386
+ 384 1
387
+ 385 1
388
+ 386 1
389
+ 387 1
390
+ 388 1
391
+ 389 1
392
+ 390 1
393
+ 391 1
394
+ 392 1
395
+ 393 1
396
+ 394 1
397
+ 395 1
398
+ 396 1
399
+ 397 1
400
+ 398 1
401
+ 399 1
402
+ 400 1
403
+ 401 1
404
+ 402 1
405
+ 403 1
406
+ 404 1
407
+ 405 1
408
+ 406 1
409
+ 407 1
410
+ 408 1
411
+ 409 1
412
+ 410 1
413
+ 411 1
414
+ 412 1
415
+ 413 1
416
+ 414 1
417
+ 415 1
418
+ 416 1
419
+ 417 1
420
+ 418 1
421
+ 419 1
422
+ 420 1
423
+ 421 1
424
+ 422 1
425
+ 423 1
426
+ 424 1
427
+ 425 1
428
+ 426 1
429
+ 427 1
430
+ 428 1
431
+ 429 1
432
+ 430 1
433
+ 431 1
434
+ 432 1
435
+ 433 1
436
+ 434 1
437
+ 435 1
438
+ 436 1
439
+ 437 1
440
+ 438 1
441
+ 439 1
442
+ 440 1
443
+ 441 1
444
+ 442 1
445
+ 443 1
446
+ 444 1
447
+ 445 1
448
+ 446 1
449
+ 447 1
450
+ 448 1
451
+ 449 1
452
+ 450 1
453
+ 451 1
454
+ 452 1
455
+ 453 1
456
+ 454 1
457
+ 455 1
458
+ 456 1
459
+ 457 1
460
+ 458 1
461
+ 459 1
462
+ 460 1
463
+ 461 1
464
+ 462 1
465
+ 463 1
466
+ 464 1
467
+ 465 1
468
+ 466 1
469
+ 467 1
470
+ 468 1
471
+ 469 1
472
+ 470 1
473
+ 471 1
474
+ 472 1
475
+ 473 1
476
+ 474 1
477
+ 475 1
478
+ 476 1
479
+ 477 1
480
+ 478 1
481
+ 479 1
482
+ 480 1
483
+ 481 1
484
+ 482 1
485
+ 483 1
486
+ 484 1
487
+ 485 1
488
+ 486 1
489
+ 487 1
490
+ 488 1
491
+ 489 1
492
+ 490 1
493
+ 491 1
494
+ 492 1
495
+ 493 1
496
+ 494 1
497
+ 495 1
498
+ 496 1
499
+ 497 1
500
+ 498 1
501
+ 499 1
502
+ 500 1
503
+ 501 1
504
+ 502 1
505
+ 503 1
506
+ 504 1
507
+ 505 1
508
+ 506 1
509
+ 507 1
510
+ 508 1
511
+ 509 1
512
+ 510 1
513
+ 511 1
514
+ 512 1
515
+ 513 1
516
+ 514 1
517
+ 515 1
518
+ 516 1
519
+ 517 1
520
+ 518 1
521
+ 519 1
522
+ 520 1
523
+ 521 1
524
+ 522 1
525
+ 523 1
526
+ 524 1
527
+ 525 1
528
+ 526 1
529
+ 527 1
530
+ 528 1
531
+ 529 1
532
+ 530 1
533
+ 531 1
534
+ 532 1
535
+ 533 1
536
+ 534 1
537
+ 535 1
538
+ 536 1
539
+ 537 1
540
+ 538 1
541
+ 539 1
542
+ 540 1
543
+ 541 1
544
+ 542 1
545
+ 543 1
546
+ 544 1
547
+ 545 1
548
+ 546 1
549
+ 547 1
550
+ 548 1
551
+ 549 1
552
+ 550 1
553
+ 551 1
554
+ 552 1
555
+ 553 1
556
+ 554 1
557
+ 555 1
558
+ 556 1
559
+ 557 1
560
+ 558 1
561
+ 559 1
562
+ 560 1
563
+ 561 1
564
+ 562 1
565
+ 563 1
566
+ 564 1
567
+ 565 1
568
+ 566 1
569
+ 567 1
570
+ 568 1
571
+ 569 1
572
+ 570 1
573
+ 571 1
574
+ 572 1
575
+ 573 1
576
+ 574 1
577
+ 575 1
578
+ 576 1
579
+ 577 1
580
+ 578 1
581
+ 579 1
582
+ 580 1
583
+ 581 1
584
+ 582 1
585
+ 583 1
586
+ 584 1
587
+ 585 1
588
+ 586 1
589
+ 587 1
590
+ 588 1
591
+ 589 1
592
+ 590 1
593
+ 591 1
594
+ 592 1
595
+ 593 1
596
+ 594 1
597
+ 595 1
598
+ 596 1
599
+ 597 1
600
+ 598 1
601
+ 599 1
602
+ 600 1
603
+ 601 1
604
+ 602 1
605
+ 603 1
606
+ 604 1
607
+ 605 1
608
+ 606 1
609
+ 607 1
610
+ 608 1
611
+ 609 1
612
+ 610 1
613
+ 611 1
614
+ 612 1
615
+ 613 1
616
+ 614 1
617
+ 615 1
618
+ 616 1
619
+ 617 1
620
+ 618 1
621
+ 619 1
622
+ 620 1
623
+ 621 1
624
+ 622 1
625
+ 623 1
626
+ 624 1
627
+ 625 1
628
+ 626 1
629
+ 627 1
630
+ 628 1
631
+ 629 1
632
+ 630 1
633
+ 631 1
634
+ 632 1
635
+ 633 1
636
+ 634 1
637
+ 635 1
638
+ 636 1
639
+ 637 1
640
+ 638 1
641
+ 639 1
642
+ 640 1
643
+ 641 1
644
+ 642 1
645
+ 643 1
646
+ 644 1
647
+ 645 1
648
+ 646 1
649
+ 647 1
650
+ 648 1
651
+ 649 1
652
+ 650 1
653
+ 651 1
654
+ 652 1
655
+ 653 1
656
+ 654 1
657
+ 655 1
658
+ 656 1
659
+ 657 1
660
+ 658 1
661
+ 659 1
662
+ 660 1
663
+ 661 1
664
+ 662 1
665
+ 663 1
666
+ 664 1
667
+ 665 1
668
+ 666 1
669
+ 667 1
670
+ 668 1
671
+ 669 1
672
+ 670 1
673
+ 671 1
674
+ 672 1
675
+ 673 1
676
+ 674 1
677
+ 675 1
678
+ 676 1
679
+ 677 1
680
+ 678 1
681
+ 679 1
682
+ 680 1
683
+ 681 1
684
+ 682 1
685
+ 683 1
686
+ 684 1
687
+ 685 1
688
+ 686 1
689
+ 687 1
690
+ 688 1
691
+ 689 1
692
+ 690 1
693
+ 691 1
694
+ 692 1
695
+ 693 1
696
+ 694 1
697
+ 695 1
698
+ 696 1
699
+ 697 1
700
+ 698 1
701
+ 699 1
702
+ 700 1
703
+ 701 1
704
+ 702 1
705
+ 703 1
706
+ 704 1
707
+ 705 1
708
+ 706 1
709
+ 707 1
710
+ 708 1
711
+ 709 1
712
+ 710 1
713
+ 711 1
714
+ 712 1
715
+ 713 1
716
+ 714 1
717
+ 715 1
718
+ 716 1
719
+ 717 1
720
+ 718 1
721
+ 719 1
722
+ 720 1
723
+ 721 1
724
+ 722 1
725
+ 723 1
726
+ 724 1
727
+ 725 1
728
+ 726 1
729
+ 727 1
730
+ 728 1
731
+ 729 1
732
+ 730 1
733
+ 731 1
734
+ 732 1
735
+ 733 1
736
+ 734 1
737
+ 735 1
738
+ 736 1
739
+ 737 1
740
+ 738 1
741
+ 739 1
742
+ 740 1
743
+ 741 1
744
+ 742 1
745
+ 743 1
746
+ 744 1
747
+ 745 1
748
+ 746 1
749
+ 747 1
750
+ 748 1
751
+ 749 1
752
+ 750 1
753
+ 751 1
754
+ 752 1
755
+ 753 1
756
+ 754 1
757
+ 755 1
758
+ 756 1
759
+ 757 1
760
+ 758 1
761
+ 759 1
762
+ 760 1
763
+ 761 1
764
+ 762 1
765
+ 763 1
766
+ 764 1
767
+ 765 1
768
+ 766 1
769
+ 767 1
770
+ 768 1
771
+ 769 1
772
+ 770 1
773
+ 771 1
774
+ 772 1
775
+ 773 1
776
+ 774 1
777
+ 775 1
778
+ 776 1
779
+ 777 1
780
+ 778 1
781
+ 779 1
782
+ 780 1
783
+ 781 1
784
+ 782 1
785
+ 783 1
786
+ 784 1
787
+ 785 1
788
+ 786 1
789
+ 787 1
790
+ 788 1
791
+ 789 1
792
+ 790 1
793
+ 791 1
794
+ 792 1
795
+ 793 1
796
+ 794 1
797
+ 795 1
798
+ 796 1
799
+ 797 1
800
+ 798 1
801
+ 799 1
802
+ 800 1
803
+ 801 1
804
+ 802 1
805
+ 803 1
806
+ 804 1
807
+ 805 1
808
+ 806 1
809
+ 807 1
810
+ 808 1
811
+ 809 1
812
+ 810 1
813
+ 811 1
814
+ 812 1
815
+ 813 1
816
+ 814 1
817
+ 815 1
818
+ 816 1
819
+ 817 1
820
+ 818 1
821
+ 819 1
822
+ 820 1
823
+ 821 1
824
+ 822 1
825
+ 823 1
826
+ 824 1
827
+ 825 1
828
+ 826 1
829
+ 827 1
830
+ 828 1
831
+ 829 1
832
+ 830 1
833
+ 831 1
834
+ 832 1
835
+ 833 1
836
+ 834 1
837
+ 835 1
838
+ 836 1
839
+ 837 1
840
+ 838 1
841
+ 839 1
842
+ 840 1
843
+ 841 1
844
+ 842 1
845
+ 843 1
846
+ 844 1
847
+ 845 1
848
+ 846 1
849
+ 847 1
850
+ 848 1
851
+ 849 1
852
+ 850 1
853
+ 851 1
854
+ 852 1
855
+ 853 1
856
+ 854 1
857
+ 855 1
858
+ 856 1
859
+ 857 1
860
+ 858 1
861
+ 859 1
862
+ 860 1
863
+ 861 1
864
+ 862 1
865
+ 863 1
866
+ 864 1
867
+ 865 1
868
+ 866 1
869
+ 867 1
870
+ 868 1
871
+ 869 1
872
+ 870 1
873
+ 871 1
874
+ 872 1
875
+ 873 1
876
+ 874 1
877
+ 875 1
878
+ 876 1
879
+ 877 1
880
+ 878 1
881
+ 879 1
882
+ 880 1
883
+ 881 1
884
+ 882 1
885
+ 883 1
886
+ 884 1
887
+ 885 1
888
+ 886 1
889
+ 887 1
890
+ 888 1
891
+ 889 1
892
+ 890 1
893
+ 891 1
894
+ 892 1
895
+ 893 1
896
+ 894 1
897
+ 895 1
898
+ 896 1
899
+ 897 1
900
+ 898 1
901
+ 899 1
902
+ 900 1
903
+ 901 1
904
+ 902 1
905
+ 903 1
906
+ 904 1
907
+ 905 1
908
+ 906 1
909
+ 907 1
910
+ 908 1
911
+ 909 1
912
+ 910 1
913
+ 911 1
914
+ 912 1
915
+ 913 1
916
+ 914 1
917
+ 915 1
918
+ 916 1
919
+ 917 1
920
+ 918 1
921
+ 919 1
922
+ 920 1
923
+ 921 1
924
+ 922 1
925
+ 923 1
926
+ 924 1
927
+ 925 1
928
+ 926 1
929
+ 927 1
930
+ 928 1
931
+ 929 1
932
+ 930 1
933
+ 931 1
934
+ 932 1
935
+ 933 1
936
+ 934 1
937
+ 935 1
938
+ 936 1
939
+ 937 1
940
+ 938 1
941
+ 939 1
942
+ 940 1
943
+ 941 1
944
+ 942 1
945
+ 943 1
946
+ 944 1
947
+ 945 1
948
+ 946 1
949
+ 947 1
950
+ 948 1
951
+ 949 1
952
+ 950 1
953
+ 951 1
954
+ 952 1
955
+ 953 1
956
+ 954 1
957
+ 955 1
958
+ 956 1
959
+ 957 1
960
+ 958 1
961
+ 959 1
962
+ 960 1
963
+ 961 1
964
+ 962 1
965
+ 963 1
966
+ 964 1
967
+ 965 1
968
+ 966 1
969
+ 967 1
970
+ 968 1
971
+ 969 1
972
+ 970 1
973
+ 971 1
974
+ 972 1
975
+ 973 1
976
+ 974 1
977
+ 975 1
978
+ 976 1
979
+ 977 1
980
+ 978 1
981
+ 979 1
982
+ 980 1
983
+ 981 1
984
+ 982 1
985
+ 983 1
986
+ 984 1
987
+ 985 1
988
+ 986 1
989
+ 987 1
990
+ 988 1
991
+ 989 1
992
+ 990 1
993
+ 991 1
994
+ 992 1
995
+ 993 1
996
+ 994 1
997
+ 995 1
998
+ 996 1
999
+ 997 1
1000
+ 998 1
1001
+ 999 1
special_tokens_map.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "mask_token": "[MASK]",
4
+ "pad_token": "[PAD]",
5
+ "sep_token": "[SEP]",
6
+ "unk_token": "[UNK]"
7
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "[PAD]",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "100": {
12
+ "content": "[UNK]",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "101": {
20
+ "content": "[CLS]",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "102": {
28
+ "content": "[SEP]",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "103": {
36
+ "content": "[MASK]",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ }
43
+ },
44
+ "clean_up_tokenization_spaces": true,
45
+ "cls_token": "[CLS]",
46
+ "do_lower_case": false,
47
+ "mask_token": "[MASK]",
48
+ "model_max_length": 512,
49
+ "pad_token": "[PAD]",
50
+ "sep_token": "[SEP]",
51
+ "strip_accents": null,
52
+ "tokenize_chinese_chars": true,
53
+ "tokenizer_class": "BertTokenizer",
54
+ "unk_token": "[UNK]"
55
+ }
train_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 5.0,
3
+ "train_loss": 16.716566650390625,
4
+ "train_runtime": 2885.0802,
5
+ "train_samples": 7998,
6
+ "train_samples_per_second": 13.861,
7
+ "train_steps_per_second": 1.733
8
+ }
trainer_state.json ADDED
@@ -0,0 +1,100 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 5.0,
5
+ "eval_steps": 500,
6
+ "global_step": 5000,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.5,
13
+ "grad_norm": 28.553239822387695,
14
+ "learning_rate": 0.18000000000000002,
15
+ "loss": 26.996,
16
+ "step": 500
17
+ },
18
+ {
19
+ "epoch": 1.0,
20
+ "grad_norm": 15.302693367004395,
21
+ "learning_rate": 0.16000000000000003,
22
+ "loss": 27.5789,
23
+ "step": 1000
24
+ },
25
+ {
26
+ "epoch": 1.5,
27
+ "grad_norm": 3.7751123905181885,
28
+ "learning_rate": 0.13999999999999999,
29
+ "loss": 21.6633,
30
+ "step": 1500
31
+ },
32
+ {
33
+ "epoch": 2.0,
34
+ "grad_norm": 5.127112865447998,
35
+ "learning_rate": 0.12,
36
+ "loss": 20.7534,
37
+ "step": 2000
38
+ },
39
+ {
40
+ "epoch": 2.5,
41
+ "grad_norm": 22.80483055114746,
42
+ "learning_rate": 0.1,
43
+ "loss": 18.4192,
44
+ "step": 2500
45
+ },
46
+ {
47
+ "epoch": 3.0,
48
+ "grad_norm": 30.512271881103516,
49
+ "learning_rate": 0.08000000000000002,
50
+ "loss": 16.2861,
51
+ "step": 3000
52
+ },
53
+ {
54
+ "epoch": 3.5,
55
+ "grad_norm": 23.139978408813477,
56
+ "learning_rate": 0.06,
57
+ "loss": 14.2996,
58
+ "step": 3500
59
+ },
60
+ {
61
+ "epoch": 4.0,
62
+ "grad_norm": 23.255733489990234,
63
+ "learning_rate": 0.04000000000000001,
64
+ "loss": 9.7249,
65
+ "step": 4000
66
+ },
67
+ {
68
+ "epoch": 4.5,
69
+ "grad_norm": 6.302779197692871,
70
+ "learning_rate": 0.020000000000000004,
71
+ "loss": 6.8687,
72
+ "step": 4500
73
+ },
74
+ {
75
+ "epoch": 5.0,
76
+ "grad_norm": 6.826946258544922,
77
+ "learning_rate": 0.0,
78
+ "loss": 4.5755,
79
+ "step": 5000
80
+ },
81
+ {
82
+ "epoch": 5.0,
83
+ "step": 5000,
84
+ "total_flos": 3.726793521948672e+16,
85
+ "train_loss": 16.716566650390625,
86
+ "train_runtime": 2885.0802,
87
+ "train_samples_per_second": 13.861,
88
+ "train_steps_per_second": 1.733
89
+ }
90
+ ],
91
+ "logging_steps": 500,
92
+ "max_steps": 5000,
93
+ "num_input_tokens_seen": 0,
94
+ "num_train_epochs": 5,
95
+ "save_steps": 500,
96
+ "total_flos": 3.726793521948672e+16,
97
+ "train_batch_size": 8,
98
+ "trial_name": null,
99
+ "trial_params": null
100
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cab7d19cf015097b4d7ee18a0eba83daf203cf2190d9d3ca5bbf68e6a4e56bb0
3
+ size 4920
vocab.txt ADDED
The diff for this file is too large to render. See raw diff