|
import torch |
|
from typing import Tuple |
|
from transformers import AutoTokenizer, AutoModelForCausalLM |
|
|
|
|
|
tokenizer = None |
|
model = None |
|
|
|
|
|
def get_model_and_tokenizer() -> Tuple[AutoModelForCausalLM, AutoTokenizer]: |
|
""" |
|
Returns the preloaded model and tokenizer. If they haven't been loaded before, loads them. |
|
|
|
Returns: |
|
tuple: A tuple containing the preloaded model and tokenizer. |
|
""" |
|
global model, tokenizer |
|
if model is None or tokenizer is None: |
|
|
|
device = torch.device("cuda" if torch.cuda.is_available() else "cpu") |
|
|
|
|
|
tokenizer = AutoTokenizer.from_pretrained("MikeMpapa/lmd_mmm_tokenizer_tutorial_artist") |
|
model = AutoModelForCausalLM.from_pretrained( |
|
"MikeMpapa/4_bar_lmd_clean_custom_epochs10" |
|
) |
|
|
|
|
|
model = model.to(device) |
|
|
|
return model, tokenizer |
|
|