File size: 1,628 Bytes
aa29da2
f8275ef
5c11bea
f8275ef
5c11bea
2073576
76041a4
2073576
f8275ef
5c11bea
f8275ef
76041a4
aa29da2
 
 
 
 
e052e84
aa29da2
f8275ef
aa29da2
 
 
 
5c11bea
76041a4
030bb1b
76041a4
aa29da2
 
6968574
 
aa29da2
 
5c11bea
aa29da2
 
6968574
 
aa29da2
 
 
76041a4
5c11bea
76041a4
aa29da2
5c11bea
 
e052e84
76041a4
 
f8275ef
 
 
 
76041a4
e052e84
 
 
 
f8275ef
 
 
 
 
 
e052e84
 
76041a4
 
aa29da2
5c11bea
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
{
  "activation_dropout": 0.1,
  "activation_function": "gelu",
  "add_bias_logits": false,
  "add_final_layer_norm": false,
  "architectures": [
    "BartModel"
  ],
  "attention_dropout": 0.1,
  "bos_token_id": 0,
  "classif_dropout": 0.1,
  "classifier_dropout": 0.0,
  "d_model": 1024,
  "decoder_attention_heads": 16,
  "decoder_ffn_dim": 4096,
  "decoder_layerdrop": 0.0,
  "decoder_layers": 12,
  "decoder_start_token_id": 2,
  "dropout": 0.1,
  "early_stopping": true,
  "encoder_attention_heads": 16,
  "encoder_ffn_dim": 4096,
  "encoder_layerdrop": 0.0,
  "encoder_layers": 12,
  "eos_token_id": 2,
  "forced_eos_token_id": 2,
  "forced_bos_token_id": 0,
  "gradient_checkpointing": false,
  "id2label": {
    "0": "LABEL_0",
    "1": "LABEL_1",
    "2": "LABEL_2"
  },
  "init_std": 0.02,
  "is_encoder_decoder": true,
  "label2id": {
    "LABEL_0": 0,
    "LABEL_1": 1,
    "LABEL_2": 2
  },
  "max_position_embeddings": 1024,
  "model_type": "bart",
  "no_repeat_ngram_size": 3,
  "normalize_before": false,
  "num_beams": 4,
  "num_hidden_layers": 12,
  "pad_token_id": 1,
  "scale_embedding": false,
  "task_specific_params": {
    "summarization": {
      "length_penalty": 1.0,
      "max_length": 128,
      "min_length": 12,
      "num_beams": 4
    },
    "summarization_cnn": {
      "length_penalty": 2.0,
      "max_length": 142,
      "min_length": 56,
      "num_beams": 4
    },
    "summarization_xsum": {
      "length_penalty": 1.0,
      "max_length": 62,
      "min_length": 11,
      "num_beams": 6
    }
  },
  "transformers_version": "4.7.0.dev0",
  "use_cache": true,
  "vocab_size": 50265
}