monologg commited on
Commit
4d97ce5
1 Parent(s): cf99fc6

update model

Browse files
Files changed (5) hide show
  1. config.json +36 -38
  2. pytorch_model.bin +2 -2
  3. special_tokens_map.json +8 -8
  4. tokenizer_config.json +10 -10
  5. vocab.txt +0 -0
config.json CHANGED
@@ -1,38 +1,36 @@
1
- {
2
- "architectures": [
3
- "BertForMultiLabelClassification"
4
- ],
5
- "attention_probs_dropout_prob": 0.1,
6
- "finetuning_task": "goemotions",
7
- "hidden_act": "gelu",
8
- "hidden_dropout_prob": 0.1,
9
- "hidden_size": 768,
10
- "id2label": {
11
- "0": "anger",
12
- "1": "disgust",
13
- "2": "fear",
14
- "3": "joy",
15
- "4": "neutral",
16
- "5": "sadness",
17
- "6": "surprise"
18
- },
19
- "initializer_range": 0.02,
20
- "intermediate_size": 3072,
21
- "label2id": {
22
- "anger": 0,
23
- "disgust": 1,
24
- "fear": 2,
25
- "joy": 3,
26
- "neutral": 4,
27
- "sadness": 5,
28
- "surprise": 6
29
- },
30
- "layer_norm_eps": 1e-12,
31
- "max_position_embeddings": 512,
32
- "model_type": "bert",
33
- "num_attention_heads": 12,
34
- "num_hidden_layers": 12,
35
- "pad_token_id": 0,
36
- "type_vocab_size": 2,
37
- "vocab_size": 28996
38
- }
 
1
+ {
2
+ "architectures": ["BertForMultiLabelClassification"],
3
+ "attention_probs_dropout_prob": 0.1,
4
+ "finetuning_task": "goemotions",
5
+ "hidden_act": "gelu",
6
+ "hidden_dropout_prob": 0.1,
7
+ "hidden_size": 768,
8
+ "id2label": {
9
+ "0": "anger",
10
+ "1": "disgust",
11
+ "2": "fear",
12
+ "3": "joy",
13
+ "4": "neutral",
14
+ "5": "sadness",
15
+ "6": "surprise"
16
+ },
17
+ "initializer_range": 0.02,
18
+ "intermediate_size": 3072,
19
+ "label2id": {
20
+ "anger": 0,
21
+ "disgust": 1,
22
+ "fear": 2,
23
+ "joy": 3,
24
+ "neutral": 4,
25
+ "sadness": 5,
26
+ "surprise": 6
27
+ },
28
+ "layer_norm_eps": 1e-12,
29
+ "max_position_embeddings": 512,
30
+ "model_type": "bert",
31
+ "num_attention_heads": 12,
32
+ "num_hidden_layers": 12,
33
+ "pad_token_id": 0,
34
+ "type_vocab_size": 2,
35
+ "vocab_size": 28996
36
+ }
 
 
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:53c64f79743b28b55827d3bd9ba72052393722823f1a16181d53886585978d58
3
- size 433312525
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aa2a54d3263432abfde7f2fa93a76eb2b8d08de479d08c283fa961566b505d65
3
+ size 433347520
special_tokens_map.json CHANGED
@@ -1,8 +1,8 @@
1
- {
2
- "unk_token": "[UNK]",
3
- "sep_token": "[SEP]",
4
- "pad_token": "[PAD]",
5
- "cls_token": "[CLS]",
6
- "mask_token": "[MASK]",
7
- "additional_special_tokens": ["[NAME]", "[RELIGION]"]
8
- }
 
1
+ {
2
+ "unk_token": "[UNK]",
3
+ "sep_token": "[SEP]",
4
+ "pad_token": "[PAD]",
5
+ "cls_token": "[CLS]",
6
+ "mask_token": "[MASK]",
7
+ "additional_special_tokens": ["[NAME]", "[RELIGION]"]
8
+ }
tokenizer_config.json CHANGED
@@ -1,10 +1,10 @@
1
- {
2
- "do_lower_case": false,
3
- "model_max_length": 512,
4
- "unk_token": "[UNK]",
5
- "sep_token": "[SEP]",
6
- "pad_token": "[PAD]",
7
- "cls_token": "[CLS]",
8
- "mask_token": "[MASK]",
9
- "additional_special_tokens": ["[NAME]", "[RELIGION]"]
10
- }
 
1
+ {
2
+ "do_lower_case": false,
3
+ "model_max_length": 512,
4
+ "unk_token": "[UNK]",
5
+ "sep_token": "[SEP]",
6
+ "pad_token": "[PAD]",
7
+ "cls_token": "[CLS]",
8
+ "mask_token": "[MASK]",
9
+ "additional_special_tokens": ["[NAME]", "[RELIGION]"]
10
+ }
vocab.txt CHANGED
The diff for this file is too large to render. See raw diff