Linly-ChatFlow / config /llama_13b_config.json
yuhaofeng-shiba's picture
Upload llama_13b_config.json
978ce63
raw
history blame
493 Bytes
{
"emb_size": 5120,
"feedforward_size": 13824,
"hidden_size": 5120,
"hidden_act": "silu",
"heads_num": 40,
"layers_num": 40,
"dropout": 0.1,
"data_processor": "lm",
"max_seq_length": 2048,
"embedding": ["word"],
"remove_transformer_bias": true,
"remove_embedding_layernorm": true,
"rotary_position_embedding": true,
"encoder": "transformer",
"feed_forward": "gated",
"mask": "causal",
"layernorm_positioning": "pre",
"layernorm": "rms",
"target": ["lm"]
}