{ | |
"model_type": "mamba", | |
"d_model": 512, | |
"d_intermediate": 0, | |
"n_layer": 6, | |
"vocab_size": 1880, | |
"ssm_cfg": { | |
"layer": "Mamba2" | |
}, | |
"attn_layer_idx": [2, 5], | |
"attn_cfg": { | |
"num_heads": 8, | |
"head_dim": 64, | |
"rotary_emb_dim": 32, | |
"causal": true, | |
"softmax_scale": null, | |
"qkv_proj_bias": true, | |
"out_proj_bias": true, | |
"mlp_dim": 0 | |
}, | |
"rms_norm": true, | |
"residual_in_fp32": true, | |
"fused_add_norm": true, | |
"pad_vocab_size_multiple": 8, | |
"tie_embeddings": true, | |
"dropout_rate": 0.1, | |
"eos_token_id": 2, | |
"bos_token_id": 1, | |
"pad_token_id": 3, | |
"num_labels": 1, | |
"summary_type": "cls_index", | |
"summary_use_proj": true, | |
"summary_activation": "tanh", | |
"summary_proj_to_labels": true, | |
"summary_first_dropout": 0.1, | |
"summary_hidden_size": 128 | |
} | |