Text Classification
PEFT
Safetensors
English

Incorrect path_or_model_id

#3
by Machlovi - opened

Hi, I am trying to use this model but am encountering this error:

from peft import PeftModel, PeftConfig
from transformers import AutoModelForCausalLM

config = PeftConfig.from_pretrained("nvidia/Aegis-AI-Content-Safety-LlamaGuard-Permissive-1.0")
base_model = AutoModelForCausalLM.from_pretrained("/workspace/hugging_face/hub/models--meta-llama--LlamaGuard-7b/snapshots/3e764390d6b39028ddea5b20603c89476107b41e/")
model = PeftModel.from_pretrained(base_model, "nvidia/Aegis-AI-Content-Safety-LlamaGuard-Permissive-1.0")

OSError: Incorrect path_or_model_id: '/workspace/hugging_face/hub/models--meta-llama--LlamaGuard-7b/snapshots/3e764390d6b39028ddea5b20603c89476107b41e/'. Please provide either the path to a local folder or the repo_id of a model on the Hub.

it's an adapter not a model

    def __init__(self, adapter_id="nvidia/Aegis-AI-Content-Safety-LlamaGuard-Permissive-1.0"):
        """Initialize Aegis Content Safety model and tokenizer."""
        
        # 1. Load the base LlamaGuard model
        base_model_id = "meta-llama/LlamaGuard-7b"
        self.base_model = AutoModelForCausalLM.from_pretrained(
            base_model_id,
            torch_dtype=torch.bfloat16,
            device_map="auto",
            trust_remote_code=True
        )
        self.tokenizer = AutoTokenizer.from_pretrained(base_model_id)

        # 2. Load and apply the Aegis adapter
        self.model = PeftModel.from_pretrained(
            self.base_model,
            adapter_id,
            device_map="auto"
        )

Sign up or log in to comment