from transformers import AutoConfig, LlamaForCausalLM | |
model = LlamaForCausalLM.from_pretrained("Ramikan-BR/tinyllama_PY-CODER-bnb-4bit-lora_model-4k") | |
config = model.config | |
config_dict = config.to_dict() | |
print(config_dict) |
from transformers import AutoConfig, LlamaForCausalLM | |
model = LlamaForCausalLM.from_pretrained("Ramikan-BR/tinyllama_PY-CODER-bnb-4bit-lora_model-4k") | |
config = model.config | |
config_dict = config.to_dict() | |
print(config_dict) |