EvaByte-SFT / config.json
linzheng's picture
Upload EvaByteForCausalLM
474addc verified
{
"_name_or_path": null,
"architectures": [
"EvaByteForCausalLM"
],
"attention_bias": false,
"attention_class": "eva",
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "configuration_evabyte.EvaByteConfig",
"AutoModelForCausalLM": "modeling_evabyte.EvaByteForCausalLM"
},
"bos_token_id": 1,
"chunk_size": 16,
"eos_token_id": 11,
"fp32_ln": false,
"fp32_logits": true,
"fp32_skip_add": true,
"hidden_act": "silu",
"hidden_size": 4096,
"init_cutoff_factor": null,
"init_fn": "v2",
"init_std": 0.01275,
"initializer_range": 0.01275,
"intermediate_size": 11008,
"lazy_init": true,
"max_position_embeddings": 32768,
"max_seq_length": 32768,
"mixedp_attn": true,
"model_type": "evabyte",
"norm_add_unit_offset": true,
"num_attention_heads": 32,
"num_chunks": null,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"num_pred_heads": 8,
"pad_token_id": 0,
"return_dict": false,
"rms_norm_eps": 1e-05,
"rope_scaling": null,
"rope_theta": 100000,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.47.1",
"use_cache": true,
"vocab_size": 320,
"window_size": 2048
}