MambaInLlama3B_v2 / mamba_config.json
Junxiong Wang
add models
1419752
raw
history blame contribute delete
330 Bytes
{
"d_model": 3072,
"ssm_cfg": {
"expand": 1
},
"rms_norm_eps": 1e-05,
"vocab_size": null,
"d_inner": null,
"d_xb": 1024,
"intermediate_size": 8192,
"hidden_act": "silu",
"n_layer": 28,
"attn_layers": [
3,
8,
13,
18,
23,
27
]
}