File size: 224 Bytes
3c11e04 |
1 2 3 4 5 6 7 |
from transformers import AutoConfig, LlamaForCausalLM
model = LlamaForCausalLM.from_pretrained("Ramikan-BR/tinyllama_PY-CODER-bnb-4bit-lora_model-4k")
config = model.config
config_dict = config.to_dict()
print(config_dict) |