w05230505 commited on
Commit
215a0b9
1 Parent(s): 4e4c677

End of training

Browse files
README.md CHANGED
@@ -1,14 +1,29 @@
1
  ---
2
  library_name: transformers
 
 
3
  license: apache-2.0
4
  base_model: bert-base-uncased
5
  tags:
6
  - generated_from_trainer
 
 
7
  metrics:
8
  - accuracy
9
  model-index:
10
  - name: bert-base-uncased-finetuned-sst2
11
- results: []
 
 
 
 
 
 
 
 
 
 
 
12
  ---
13
 
14
  <!-- This model card has been generated automatically according to the information the Trainer had access to. You
@@ -16,10 +31,10 @@ should probably proofread and complete it, then remove this comment. -->
16
 
17
  # bert-base-uncased-finetuned-sst2
18
 
19
- This model is a fine-tuned version of [bert-base-uncased](https://huggingface.co/bert-base-uncased) on an unknown dataset.
20
  It achieves the following results on the evaluation set:
21
- - Accuracy: 0.9266
22
- - Loss: 0.3198
23
 
24
  ## Model description
25
 
 
1
  ---
2
  library_name: transformers
3
+ language:
4
+ - en
5
  license: apache-2.0
6
  base_model: bert-base-uncased
7
  tags:
8
  - generated_from_trainer
9
+ datasets:
10
+ - glue
11
  metrics:
12
  - accuracy
13
  model-index:
14
  - name: bert-base-uncased-finetuned-sst2
15
+ results:
16
+ - task:
17
+ name: Text Classification
18
+ type: text-classification
19
+ dataset:
20
+ name: GLUE SST2
21
+ type: glue
22
+ args: sst2
23
+ metrics:
24
+ - name: Accuracy
25
+ type: accuracy
26
+ value: 0.9277522935779816
27
  ---
28
 
29
  <!-- This model card has been generated automatically according to the information the Trainer had access to. You
 
31
 
32
  # bert-base-uncased-finetuned-sst2
33
 
34
+ This model is a fine-tuned version of [bert-base-uncased](https://huggingface.co/bert-base-uncased) on the GLUE SST2 dataset.
35
  It achieves the following results on the evaluation set:
36
+ - Loss: 0.3188
37
+ - Accuracy: 0.9278
38
 
39
  ## Model description
40
 
all_results.json CHANGED
@@ -1,15 +1,15 @@
1
  {
2
  "epoch": 3.0,
3
- "eval_accuracy": 0.9288990825688074,
4
- "eval_loss": 0.3197207450866699,
5
- "eval_runtime": 23.9128,
6
  "eval_samples": 872,
7
- "eval_samples_per_second": 36.466,
8
- "eval_steps_per_second": 4.558,
9
  "total_flos": 5.316079940232192e+16,
10
  "train_loss": 0.0,
11
- "train_runtime": 0.0015,
12
  "train_samples": 67349,
13
- "train_samples_per_second": 138971226.679,
14
- "train_steps_per_second": 8687120.289
15
  }
 
1
  {
2
  "epoch": 3.0,
3
+ "eval_accuracy": 0.9277522935779816,
4
+ "eval_loss": 0.3188002407550812,
5
+ "eval_runtime": 40.5303,
6
  "eval_samples": 872,
7
+ "eval_samples_per_second": 21.515,
8
+ "eval_steps_per_second": 2.689,
9
  "total_flos": 5.316079940232192e+16,
10
  "train_loss": 0.0,
11
+ "train_runtime": 0.0013,
12
  "train_samples": 67349,
13
+ "train_samples_per_second": 153745743.884,
14
+ "train_steps_per_second": 9610678.433
15
  }
config.json CHANGED
@@ -23,6 +23,7 @@
23
  "num_hidden_layers": 12,
24
  "pad_token_id": 0,
25
  "position_embedding_type": "absolute",
 
26
  "torch_dtype": "float32",
27
  "transformers_version": "4.45.0.dev0",
28
  "type_vocab_size": 2,
 
23
  "num_hidden_layers": 12,
24
  "pad_token_id": 0,
25
  "position_embedding_type": "absolute",
26
+ "problem_type": "single_label_classification",
27
  "torch_dtype": "float32",
28
  "transformers_version": "4.45.0.dev0",
29
  "type_vocab_size": 2,
eval_results.json CHANGED
@@ -1,9 +1,9 @@
1
  {
2
  "epoch": 3.0,
3
- "eval_accuracy": 0.9288990825688074,
4
- "eval_loss": 0.3197207450866699,
5
- "eval_runtime": 23.9128,
6
  "eval_samples": 872,
7
- "eval_samples_per_second": 36.466,
8
- "eval_steps_per_second": 4.558
9
  }
 
1
  {
2
  "epoch": 3.0,
3
+ "eval_accuracy": 0.9277522935779816,
4
+ "eval_loss": 0.3188002407550812,
5
+ "eval_runtime": 40.5303,
6
  "eval_samples": 872,
7
+ "eval_samples_per_second": 21.515,
8
+ "eval_steps_per_second": 2.689
9
  }
train_results.json CHANGED
@@ -2,8 +2,8 @@
2
  "epoch": 3.0,
3
  "total_flos": 5.316079940232192e+16,
4
  "train_loss": 0.0,
5
- "train_runtime": 0.0015,
6
  "train_samples": 67349,
7
- "train_samples_per_second": 138971226.679,
8
- "train_steps_per_second": 8687120.289
9
  }
 
2
  "epoch": 3.0,
3
  "total_flos": 5.316079940232192e+16,
4
  "train_loss": 0.0,
5
+ "train_runtime": 0.0013,
6
  "train_samples": 67349,
7
+ "train_samples_per_second": 153745743.884,
8
+ "train_steps_per_second": 9610678.433
9
  }
trainer_state.json CHANGED
@@ -61,9 +61,9 @@
61
  "step": 12630,
62
  "total_flos": 5.316079940232192e+16,
63
  "train_loss": 0.0,
64
- "train_runtime": 0.0015,
65
- "train_samples_per_second": 138971226.679,
66
- "train_steps_per_second": 8687120.289
67
  }
68
  ],
69
  "logging_steps": 500,
 
61
  "step": 12630,
62
  "total_flos": 5.316079940232192e+16,
63
  "train_loss": 0.0,
64
+ "train_runtime": 0.0013,
65
+ "train_samples_per_second": 153745743.884,
66
+ "train_steps_per_second": 9610678.433
67
  }
68
  ],
69
  "logging_steps": 500,