{ | |
"architectures": [ | |
"ScalarModel" | |
], | |
"base_config": { | |
"_name_or_path": "jdchang/tldr_sft_pythia_2.8", | |
"architectures": [ | |
"GPTNeoXForCausalLM" | |
], | |
"eos_token_id": 0, | |
"hidden_size": 2560, | |
"intermediate_size": 10240, | |
"model_type": "gpt_neox", | |
"num_attention_heads": 32, | |
"num_hidden_layers": 32, | |
"torch_dtype": "bfloat16", | |
"vocab_size": 50304 | |
}, | |
"base_model": "jdchang/tldr_sft_pythia_2.8", | |
"bias": 2.53739437142289, | |
"hidden_size": 2560, | |
"torch_dtype": "bfloat16", | |
"transformers_version": "4.35.0" | |
} | |