runtime error
al/lib/python3.10/site-packages/huggingface_hub/utils/_validators.py", line 119, in _inner_fn return fn(*args, **kwargs) File "/usr/local/lib/python3.10/site-packages/huggingface_hub/hf_api.py", line 2418, in repo_info return method( File "/usr/local/lib/python3.10/site-packages/huggingface_hub/utils/_validators.py", line 119, in _inner_fn return fn(*args, **kwargs) File "/usr/local/lib/python3.10/site-packages/huggingface_hub/hf_api.py", line 2228, in model_info hf_raise_for_status(r) File "/usr/local/lib/python3.10/site-packages/huggingface_hub/utils/_errors.py", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e huggingface_hub.utils._errors.RepositoryNotFoundError: 401 Client Error. (Request ID: Root=1-665d0182-7e2d532567aa2b240db9db08;9fa26d78-b524-4699-a2f1-ccc9828aa345) Repository Not Found for url: https://huggingface.co./api/models/multitude0099/llama-2-chat-7b-recipegen-GGUF. Please make sure you specified the correct `repo_id` and `repo_type`. If you are trying to access a private or gated repo, make sure you are authenticated. User Access Token "deepRL_course" is expired The above exception was the direct cause of the following exception: Traceback (most recent call last): File "/home/user/app/app.py", line 70, in <module> llm = Llama.from_pretrained( File "/usr/local/lib/python3.10/site-packages/llama_cpp/llama.py", line 1909, in from_pretrained for file in hffs.ls(repo_id) File "/usr/local/lib/python3.10/site-packages/huggingface_hub/hf_file_system.py", line 283, in ls resolved_path = self.resolve_path(path, revision=revision) File "/usr/local/lib/python3.10/site-packages/huggingface_hub/hf_file_system.py", line 196, in resolve_path _raise_file_not_found(path, err) File "/usr/local/lib/python3.10/site-packages/huggingface_hub/hf_file_system.py", line 863, in _raise_file_not_found raise FileNotFoundError(msg) from err FileNotFoundError: multitude0099/llama-2-chat-7b-recipegen-GGUF (repository not found)
Container logs:
Fetching error logs...