gptneo-125m-ptuning / adapter_config.json
Yeji-Seong's picture
Upload model
98b6ed6 verified
raw
history blame contribute delete
441 Bytes
{
"auto_mapping": null,
"base_model_name_or_path": "EleutherAI/gpt-neo-125m",
"encoder_dropout": 0.0,
"encoder_hidden_size": 768,
"encoder_num_layers": 2,
"encoder_reparameterization_type": "MLP",
"inference_mode": true,
"num_attention_heads": 12,
"num_layers": 12,
"num_transformer_submodules": 1,
"num_virtual_tokens": 0,
"peft_type": "P_TUNING",
"revision": null,
"task_type": "CAUSAL_LM",
"token_dim": 768
}