GraphsGPT-1W / config.json
DaizeDong's picture
Update config.json
de3c769 verified
raw
history blame
730 Bytes
{
"adaptive_position_length": false,
"architectures": [
"GraphsGPTForCausalLM"
],
"atom_vocab_size": 118,
"bond_vocab_size": 92,
"connection_loss_type": "contrastive",
"connection_loss_weight": 1.0,
"gradient_checkpointing": false,
"hidden_act": "silu",
"hidden_size": 512,
"initializer_method": "hidden",
"initializer_range": 0.02,
"intermediate_size": 2048,
"model_type": "graphs_gpt",
"node_loss_weight": 1.0,
"num_attention_heads": 8,
"num_fingerprints": 1,
"num_hidden_layers": 8,
"pad_token_id": 0,
"position_feature_size": 128,
"rms_norm_eps": 1e-06,
"share_embeddings": false,
"tie_word_embeddings": false,
"torch_dtype": "float32",
"transformers_version": "4.29.2"
}