Textwizai / load_model.py
Erfan11's picture
Update load_model.py
cc090e8 verified
raw
history blame contribute delete
264 Bytes
from transformers import AutoModelForCausalLM, AutoTokenizer
def load_model():
model_name = "Erfan11/Neuracraft"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name)
return model, tokenizer