{ "attention_probs_dropout_prob": 0, "hidden_act": "gelu", "hidden_dropout_prob": 0, "embedding_dropout_prob": 0, "hidden_size": 1152, "embedding_size": 256, "output_embedding_size": 1664, "initializer_range": 0.02, "intermediate_size": 4608, "max_position_embeddings": 512, "num_attention_heads": 18, "num_hidden_layers": 32, "type_vocab_size": 2, "vocab_size": 250300 }