!pip install transformers from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline, logging from huggingface_hub import notebook_login notebook_login() from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline, logging # Ignore warnings logging.set_verbosity(logging.CRITICAL) # Load the model and tokenizer with authentication token model_name = "King-Harry/NinjaMasker-PII-Redaction" tokenizer = AutoTokenizer.from_pretrained(model_name) model = AutoModelForCausalLM.from_pretrained(model_name)