{ "_name_or_path": "/home/rwkv/RWKVInside/configs/qwen_7b_stage2_gatefree_freezeMLP_HF", "architectures": [ "RwkvHybridForCausalLM" ], "attention_dropout": 0.0, "auto_map": { "AutoConfig": "configuration_rwkv_hybrid.RwkvHybridConfig", "AutoModelForCausalLM": "modeling_rwkv_hybrid.RwkvHybridForCausalLM" }, "bos_token_id": 151643, "eos_token_id": 151645, "head_size": 64, "head_size_divisor": 8, "hidden_act": "silu", "hidden_size": 3584, "initializer_range": 0.02, "intermediate_size": 18944, "max_position_embeddings": 32768, "max_window_layers": 28, "model_type": "rwkv_hybrid", "num_attention_heads": 28, "num_hidden_layers": 28, "num_key_value_heads": 4, "num_wkv_heads": 56, "rms_norm_eps": 1e-06, "rope_scaling": null, "rope_theta": 1000000.0, "sliding_window": null, "tie_word_embeddings": false, "torch_dtype": "float16", "transformers_version": "4.48.1", "use_cache": true, "use_sliding_window": false, "vocab_size": 152064, "wkv_has_gate": false, "wkv_has_group_norm": false, "wkv_layers": [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27 ], "wkv_use_vfirst": true, "wkv_version": 7 }