File size: 878 Bytes
4cea78c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 |
{
"architectures": [
"TokenFormerForCausalLM"
],
"num_layers": 40,
"hidden_size": 1536,
"num_attention_heads": 16,
"qkv_slot_num": 1536,
"proj_slot_num": 1536,
"ffn_slot_num": 6144,
"seq_length": 2048,
"max_position_embeddings": 2048,
"pos_emb": "rotary",
"rotary_pct": 0.25,
"no_weight_tying": false,
"norm": "layernorm_nonparam",
"final_norm": "layernorm",
"gpt_j_residual": false,
"output_layer_parallelism": "column",
"use_bias_in_attn_linear": false,
"attention_config": [[["tokenformer"], 40]],
"norm_activation_type": "l2_norm_gelu",
"scaled_upper_triang_masked_softmax_fusion": false,
"bias_gelu_fusion": false,
"rope_fusion": false,
"layernorm_fusion": false,
"init_method": "normal",
"output_layer_init_method": "wang_init",
"use_cache": true,
"torch_dtype": "float16",
"transformers_version": "4.36.0"
}
|