{ "model_type": "new", "vocab_size": 250002, "hidden_size": 1024, "num_hidden_layers": 24, "num_attention_heads": 16, "intermediate_size": 4096, "hidden_act": "gelu", "max_position_embeddings": 8194, "type_vocab_size": 1, "layer_norm_type": "layer_norm", "layer_norm_eps": 1e-5, "position_embedding_type": "rope", "rope_theta": 10000.0, "rope_scaling": null, "logn_attention_scale": false, "logn_attention_clip1": false, "architectures": ["GTEModel"], "model_max_length": 8194 }