minGRU-LM / config.json
suayptalha's picture
Update config.json
b975b95 verified
raw
history blame
511 Bytes
{
"model_type" : "mingru",
"_name_or_path": "mingru",
"architectures": [
"MinGRULMForCausalLM"
],
"auto_map": {
"AutoConfig": "configuration_minGRULM.MinGRULMConfig",
"AutoModelForCausalLM": "modeling_minGRULM.MinGRULMForCausalLM"
},
"bos_token_id": 0,
"eos_token_id": 0,
"num_tokens": 50257,
"dim": 512,
"depth": 12,
"ff_mult": 4,
"min_gru_expansion": 1.5,
"enable_conv": false,
"pad_token_id": 0,
"torch_dtype": "float32",
"transformers_version": "4.46.0.dev0"
}