Getting error while running pipe = FluxPipeline.from_pretrained("black-forest-labs/FLUX.1-dev", torch_dtype=torch.bfloat16) pipe.enable_model_cpu_offload() in anaconda jupyter notebook
Getting error
HTTPError Traceback (most recent call last)
File /opt/anaconda3/lib/python3.12/site-packages/huggingface_hub/utils/_http.py:409, in hf_raise_for_status(response, endpoint_name)
408 try:
--> 409 response.raise_for_status()
410 except HTTPError as e:
File /opt/anaconda3/lib/python3.12/site-packages/requests/models.py:1024, in Response.raise_for_status(self)
1023 if http_error_msg:
-> 1024 raise HTTPError(http_error_msg, response=self)
HTTPError: 401 Client Error: Unauthorized for url: https://huggingface.co./black-forest-labs/FLUX.1-dev/resolve/main/model_index.json
The above exception was the direct cause of the following exception:
GatedRepoError Traceback (most recent call last)
Cell In[12], line 1
----> 1 pipe = FluxPipeline.from_pretrained("black-forest-labs/FLUX.1-dev", torch_dtype=torch.bfloat16)
2 pipe.enable_model_cpu_offload()
File /opt/anaconda3/lib/python3.12/site-packages/huggingface_hub/utils/_validators.py:114, in validate_hf_hub_args.._inner_fn(*args, **kwargs)
111 if check_use_auth_token:
112 kwargs = smoothly_deprecate_use_auth_token(fn_name=fn.name, has_token=has_token, kwargs=kwargs)
--> 114 return fn(*args, **kwargs)
File /opt/anaconda3/lib/python3.12/site-packages/diffusers/pipelines/pipeline_utils.py:732, in DiffusionPipeline.from_pretrained(cls, pretrained_model_name_or_path, **kwargs)
727 if pretrained_model_name_or_path.count("/") > 1:
728 raise ValueError(
729 f'The provided pretrained_model_name_or_path "{pretrained_model_name_or_path}"'
730 " is neither a valid local path nor a valid repo id. Please check the parameter."
731 )
--> 732 cached_folder = cls.download(
733 pretrained_model_name_or_path,
734 cache_dir=cache_dir,
735 force_download=force_download,
736 proxies=proxies,
737 local_files_only=local_files_only,
738 token=token,
739 revision=revision,
740 from_flax=from_flax,
741 use_safetensors=use_safetensors,
742 use_onnx=use_onnx,
743 custom_pipeline=custom_pipeline,
744 custom_revision=custom_revision,
745 variant=variant,
746 load_connected_pipeline=load_connected_pipeline,
747 **kwargs,
748 )
749 else:
750 cached_folder = pretrained_model_name_or_path
File /opt/anaconda3/lib/python3.12/site-packages/huggingface_hub/utils/_validators.py:114, in validate_hf_hub_args.._inner_fn(*args, **kwargs)
111 if check_use_auth_token:
112 kwargs = smoothly_deprecate_use_auth_token(fn_name=fn.name, has_token=has_token, kwargs=kwargs)
--> 114 return fn(*args, **kwargs)
File /opt/anaconda3/lib/python3.12/site-packages/diffusers/pipelines/pipeline_utils.py:1333, in DiffusionPipeline.download(cls, pretrained_model_name, **kwargs)
1329 logger.warning(warn_msg)
1331 model_filenames, variant_filenames = variant_compatible_siblings(filenames, variant=variant)
-> 1333 config_file = hf_hub_download(
1334 pretrained_model_name,
1335 cls.config_name,
1336 cache_dir=cache_dir,
1337 revision=revision,
1338 proxies=proxies,
1339 force_download=force_download,
1340 token=token,
1341 )
1343 config_dict = cls._dict_from_json_file(config_file)
1344 ignore_filenames = config_dict.pop("_ignore_files", [])
File /opt/anaconda3/lib/python3.12/site-packages/huggingface_hub/utils/_validators.py:114, in validate_hf_hub_args.._inner_fn(*args, **kwargs)
111 if check_use_auth_token:
112 kwargs = smoothly_deprecate_use_auth_token(fn_name=fn.name, has_token=has_token, kwargs=kwargs)
--> 114 return fn(*args, **kwargs)
File /opt/anaconda3/lib/python3.12/site-packages/huggingface_hub/file_download.py:862, in hf_hub_download(repo_id, filename, subfolder, repo_type, revision, library_name, library_version, cache_dir, local_dir, user_agent, force_download, proxies, etag_timeout, token, local_files_only, headers, endpoint, resume_download, force_filename, local_dir_use_symlinks)
842 return _hf_hub_download_to_local_dir(
843 # Destination
844 local_dir=local_dir,
(...)
859 local_files_only=local_files_only,
860 )
861 else:
--> 862 return _hf_hub_download_to_cache_dir(
863 # Destination
864 cache_dir=cache_dir,
865 # File info
866 repo_id=repo_id,
867 filename=filename,
868 repo_type=repo_type,
869 revision=revision,
870 # HTTP info
871 endpoint=endpoint,
872 etag_timeout=etag_timeout,
873 headers=hf_headers,
874 proxies=proxies,
875 token=token,
876 # Additional options
877 local_files_only=local_files_only,
878 force_download=force_download,
879 )
File /opt/anaconda3/lib/python3.12/site-packages/huggingface_hub/file_download.py:969, in _hf_hub_download_to_cache_dir(cache_dir, repo_id, filename, repo_type, revision, endpoint, etag_timeout, headers, proxies, token, local_files_only, force_download)
966 return pointer_path
968 # Otherwise, raise appropriate error
--> 969 _raise_on_head_call_error(head_call_error, force_download, local_files_only)
971 # From now on, etag, commit_hash, url and size are not None.
972 assert etag is not None, "etag must have been retrieved from server"
File /opt/anaconda3/lib/python3.12/site-packages/huggingface_hub/file_download.py:1486, in _raise_on_head_call_error(head_call_error, force_download, local_files_only)
1477 raise LocalEntryNotFoundError(
1478 "Cannot find the requested files in the disk cache and outgoing traffic has been disabled. To enable"
1479 " hf.co look-ups and downloads online, set 'local_files_only' to False."
1480 )
1481 elif isinstance(head_call_error, (RepositoryNotFoundError, GatedRepoError)) or (
1482 isinstance(head_call_error, HfHubHTTPError) and head_call_error.response.status_code == 401
1483 ):
1484 # Repo not found or gated => let's raise the actual error
1485 # Unauthorized => likely a token issue => let's raise the actual error
-> 1486 raise head_call_error
1487 else:
1488 # Otherwise: most likely a connection issue or Hub downtime => let's warn the user
1489 raise LocalEntryNotFoundError(
1490 "An error happened while trying to locate the file on the Hub and we cannot find the requested files"
1491 " in the local cache. Please check your connection and try again or make sure your Internet connection"
1492 " is on."
1493 ) from head_call_error
File /opt/anaconda3/lib/python3.12/site-packages/huggingface_hub/file_download.py:1376, in _get_metadata_or_catch_error(repo_id, filename, repo_type, revision, endpoint, proxies, etag_timeout, headers, token, local_files_only, relative_filename, storage_folder)
1374 try:
1375 try:
-> 1376 metadata = get_hf_file_metadata(
1377 url=url, proxies=proxies, timeout=etag_timeout, headers=headers, token=token
1378 )
1379 except EntryNotFoundError as http_error:
1380 if storage_folder is not None and relative_filename is not None:
1381 # Cache the non-existence of the file
File /opt/anaconda3/lib/python3.12/site-packages/huggingface_hub/utils/_validators.py:114, in validate_hf_hub_args.._inner_fn(*args, **kwargs)
111 if check_use_auth_token:
112 kwargs = smoothly_deprecate_use_auth_token(fn_name=fn.name, has_token=has_token, kwargs=kwargs)
--> 114 return fn(*args, **kwargs)
File /opt/anaconda3/lib/python3.12/site-packages/huggingface_hub/file_download.py:1296, in get_hf_file_metadata(url, token, proxies, timeout, library_name, library_version, user_agent, headers)
1293 hf_headers["Accept-Encoding"] = "identity" # prevent any compression => we want to know the real size of the file
1295 # Retrieve metadata
-> 1296 r = _request_wrapper(
1297 method="HEAD",
1298 url=url,
1299 headers=hf_headers,
1300 allow_redirects=False,
1301 follow_relative_redirects=True,
1302 proxies=proxies,
1303 timeout=timeout,
1304 )
1305 hf_raise_for_status(r)
1307 # Return
File /opt/anaconda3/lib/python3.12/site-packages/huggingface_hub/file_download.py:280, in _request_wrapper(method, url, follow_relative_redirects, **params)
278 # Recursively follow relative redirects
279 if follow_relative_redirects:
--> 280 response = _request_wrapper(
281 method=method,
282 url=url,
283 follow_relative_redirects=False,
284 **params,
285 )
287 # If redirection, we redirect only relative paths.
288 # This is useful in case of a renamed repository.
289 if 300 <= response.status_code <= 399:
File /opt/anaconda3/lib/python3.12/site-packages/huggingface_hub/file_download.py:304, in _request_wrapper(method, url, follow_relative_redirects, **params)
302 # Perform request and return if status_code is not in the retry list.
303 response = get_session().request(method=method, url=url, **params)
--> 304 hf_raise_for_status(response)
305 return response
File /opt/anaconda3/lib/python3.12/site-packages/huggingface_hub/utils/_http.py:426, in hf_raise_for_status(response, endpoint_name)
422 elif error_code == "GatedRepo":
423 message = (
424 f"{response.status_code} Client Error." + "\n\n" + f"Cannot access gated repo for url {response.url}."
425 )
--> 426 raise _format(GatedRepoError, message, response) from e
428 elif error_message == "Access to this resource is disabled.":
429 message = (
430 f"{response.status_code} Client Error."
431 + "\n\n"
(...)
434 + "Access to this resource is disabled."
435 )
GatedRepoError: 401 Client Error. (Request ID: Root=1-67bf5b01-165a183b19288fa661ac369b;6187e0b8-7120-416d-abb9-7f634a976236)
Cannot access gated repo for url https://huggingface.co./black-forest-labs/FLUX.1-dev/resolve/main/model_index.json.
Access to model black-forest-labs/FLUX.1-dev is restricted. You must have access to it and be authenticated to access it. Please log in.
while running pipe = FluxPipeline.from_pretrained("black-forest-labs/FLUX.1-dev", torch_dtype=torch.bfloat16) pipe.enable_model_cpu_offload() in anaconda jupyter notebook
1.Fill in the model file page to get access right, 2.setting->get a token 3.huggingloginAccessible Or you could ask ChatGPT