Cannot load model

#63
by a-yildiz - opened

Python 3.11.5
transformers == 4.46.0

I'm getting a KeyError when trying to load the model. What is the problem here? (This is happening after token auth is successful). Thank you!

In [7]: # Load model directly
...: from transformers import AutoProcessor, AutoModelForPreTraining
...:
...: processor = AutoProcessor.from_pretrained("meta-llama/Llama-3.2-11B-Vision")
...: model = AutoModelForPreTraining.from_pretrained("meta-llama/Llama-3.2-11B-Vision")

KeyError Traceback (most recent call last)
Cell In[7], line 4
1 # Load model directly
2 from transformers import AutoProcessor, AutoModelForPreTraining
----> 4 processor = AutoProcessor.from_pretrained("meta-llama/Llama-3.2-11B-Vision")
5 model = AutoModelForPreTraining.from_pretrained("meta-llama/Llama-3.2-11B-Vision")

File ~/anaconda3/lib/python3.11/site-packages/transformers/models/auto/processing_auto.py:257, in AutoPro
cessor.from_pretrained(cls, pretrained_model_name_or_path, **kwargs)
254 if processor_class is None:
255 # Otherwise, load config, if it can be loaded.
256 if not isinstance(config, PretrainedConfig):
--> 257 config = AutoConfig.from_pretrained(
258 pretrained_model_name_or_path, trust_remote_code=trust_remote_code, **kwargs
259 )
261 # And check if the config contains the processor class.
262 processor_class = getattr(config, "processor_class", None)

File ~/anaconda3/lib/python3.11/site-packages/transformers/models/auto/configuration_auto.py:1022, in Aut
oConfig.from_pretrained(cls, pretrained_model_name_or_path, **kwargs)
1020 return config_class.from_pretrained(pretrained_model_name_or_path, **kwargs)
1021 elif "model_type" in config_dict:
-> 1022 config_class = CONFIG_MAPPING[config_dict["model_type"]]
1023 return config_class.from_dict(config_dict, **unused_kwargs)
1024 else:
1025 # Fallback: use pattern matching on the string.
1026 # We go from longer names to shorter names to catch roberta before bert (for instance)

File ~/anaconda3/lib/python3.11/site-packages/transformers/models/auto/configuration_auto.py:723, in _Laz
yConfigMapping.getitem(self, key)
721 return self._extra_content[key]
722 if key not in self._mapping:
--> 723 raise KeyError(key)
724 value = self._mapping[key]
725 module_name = model_type_to_module_name(key)

KeyError: 'mllama'

The model ID is meta-llama/Llama-3.2-11B-Vision-Instruct.

Try this code:

bnb_config = BitsAndBytesConfig(
  load_in_4bit=True,
  bnb_4bit_quant_type="nf4",
  bnb_4bit_compute_dtype=torch.bfloat16
)
 
model_id = "meta-llama/Llama-3.2-11B-Vision-Instruct"
hf_token = "YOUR HF TOKEN"

model = MllamaForConditionalGeneration.from_pretrained(
  model_id,
  quantization_config=bnb_config,
  token=hf_token,
)

processor = AutoProcessor.from_pretrained(model_id, token=hf_token,)

Sign up or log in to comment