Created
May 2, 2025 16:36
-
-
Save AmosLewis/4bcda0434925cbf42c4893c98cda5f62 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
(.venv) ➜ shark-ai git:(main) ✗ | |
huggingface-cli login | |
_| _| _| _| _|_|_| _|_|_| _|_|_| _| _| _|_|_| _|_|_|_| _|_| _|_|_| _|_|_|_| | |
_| _| _| _| _| _| _| _|_| _| _| _| _| _| _| _| | |
_|_|_|_| _| _| _| _|_| _| _|_| _| _| _| _| _| _|_| _|_|_| _|_|_|_| _| _|_|_| | |
_| _| _| _| _| _| _| _| _| _| _|_| _| _| _| _| _| _| _| | |
_| _| _|_| _|_|_| _|_|_| _|_|_| _| _| _|_|_| _| _| _| _|_|_| _|_|_|_| | |
A token is already saved on your machine. Run `huggingface-cli whoami` to get more information or `huggingface-cli logout` if you want to log out. | |
Setting a new token will erase the existing one. | |
To log in, `huggingface_hub` requires a token generated from https://huggingface.co/settings/tokens . | |
Enter your token (input will not be visible): | |
Add token as git credential? (Y/n) Y | |
Token is valid (permission: fineGrained). | |
The token `chi` has been saved to /home/chi/.cache/huggingface/stored_tokens | |
Your token has been saved in your configured git credential helpers (store). | |
Your token has been saved to /home/chi/.cache/huggingface/token | |
Login successful. | |
Note: Environment variable`HF_TOKEN` is set and is the current active token independently from the token you've just configured. | |
(.venv) ➜ shark-ai git:(main) ✗ huggingface-cli whoami | |
AmosLewis | |
(.venv) ➜ shark-ai git:(main) ✗ python3 -m sharktank.examples.test | |
Traceback (most recent call last): | |
File "/home/chi/src/shark-ai/.venv/lib/python3.12/site-packages/huggingface_hub/utils/_http.py", line 409, in hf_raise_for_status | |
response.raise_for_status() | |
File "/home/chi/src/shark-ai/.venv/lib/python3.12/site-packages/requests/models.py", line 1024, in raise_for_status | |
raise HTTPError(http_error_msg, response=self) | |
requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/meta-llama/Llama-3.1-8B/resolve/main/config.json | |
The above exception was the direct cause of the following exception: | |
Traceback (most recent call last): | |
File "/home/chi/src/shark-ai/.venv/lib/python3.12/site-packages/transformers/utils/hub.py", line 403, in cached_file | |
resolved_file = hf_hub_download( | |
^^^^^^^^^^^^^^^^ | |
File "/home/chi/src/shark-ai/.venv/lib/python3.12/site-packages/huggingface_hub/utils/_validators.py", line 114, in _inner_fn | |
return fn(*args, **kwargs) | |
^^^^^^^^^^^^^^^^^^^ | |
File "/home/chi/src/shark-ai/.venv/lib/python3.12/site-packages/huggingface_hub/file_download.py", line 961, in hf_hub_download | |
return _hf_hub_download_to_cache_dir( | |
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | |
File "/home/chi/src/shark-ai/.venv/lib/python3.12/site-packages/huggingface_hub/file_download.py", line 1068, in _hf_hub_download_to_cache_dir | |
_raise_on_head_call_error(head_call_error, force_download, local_files_only) | |
File "/home/chi/src/shark-ai/.venv/lib/python3.12/site-packages/huggingface_hub/file_download.py", line 1596, in _raise_on_head_call_error | |
raise head_call_error | |
File "/home/chi/src/shark-ai/.venv/lib/python3.12/site-packages/huggingface_hub/file_download.py", line 1484, in _get_metadata_or_catch_error | |
metadata = get_hf_file_metadata( | |
^^^^^^^^^^^^^^^^^^^^^ | |
File "/home/chi/src/shark-ai/.venv/lib/python3.12/site-packages/huggingface_hub/utils/_validators.py", line 114, in _inner_fn | |
return fn(*args, **kwargs) | |
^^^^^^^^^^^^^^^^^^^ | |
File "/home/chi/src/shark-ai/.venv/lib/python3.12/site-packages/huggingface_hub/file_download.py", line 1401, in get_hf_file_metadata | |
r = _request_wrapper( | |
^^^^^^^^^^^^^^^^^ | |
File "/home/chi/src/shark-ai/.venv/lib/python3.12/site-packages/huggingface_hub/file_download.py", line 285, in _request_wrapper | |
response = _request_wrapper( | |
^^^^^^^^^^^^^^^^^ | |
File "/home/chi/src/shark-ai/.venv/lib/python3.12/site-packages/huggingface_hub/file_download.py", line 309, in _request_wrapper | |
hf_raise_for_status(response) | |
File "/home/chi/src/shark-ai/.venv/lib/python3.12/site-packages/huggingface_hub/utils/_http.py", line 426, in hf_raise_for_status | |
raise _format(GatedRepoError, message, response) from e | |
huggingface_hub.errors.GatedRepoError: 403 Client Error. (Request ID: Root=1-6814f441-07894d9430bd95ea1047de2d;21e2a43c-1d99-45d6-a740-033e393d861c) | |
Cannot access gated repo for url https://huggingface.co/meta-llama/Llama-3.1-8B/resolve/main/config.json. | |
Access to model meta-llama/Llama-3.1-8B is restricted and you are not in the authorized list. Visit https://huggingface.co/meta-llama/Llama-3.1-8B to ask for access. | |
The above exception was the direct cause of the following exception: | |
Traceback (most recent call last): | |
File "<frozen runpy>", line 198, in _run_module_as_main | |
File "<frozen runpy>", line 88, in _run_code | |
File "/home/chi/src/shark-ai/sharktank/sharktank/examples/test.py", line 5, in <module> | |
tokenizer = AutoTokenizer.from_pretrained("meta-llama/Llama-3.1-8B") | |
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | |
File "/home/chi/src/shark-ai/.venv/lib/python3.12/site-packages/transformers/models/auto/tokenization_auto.py", line 891, in from_pretrained | |
config = AutoConfig.from_pretrained( | |
^^^^^^^^^^^^^^^^^^^^^^^^^^^ | |
File "/home/chi/src/shark-ai/.venv/lib/python3.12/site-packages/transformers/models/auto/configuration_auto.py", line 1054, in from_pretrained | |
config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) | |
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | |
File "/home/chi/src/shark-ai/.venv/lib/python3.12/site-packages/transformers/configuration_utils.py", line 591, in get_config_dict | |
config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) | |
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | |
File "/home/chi/src/shark-ai/.venv/lib/python3.12/site-packages/transformers/configuration_utils.py", line 650, in _get_config_dict | |
resolved_config_file = cached_file( | |
^^^^^^^^^^^^ | |
File "/home/chi/src/shark-ai/.venv/lib/python3.12/site-packages/transformers/utils/hub.py", line 421, in cached_file | |
raise EnvironmentError( | |
OSError: You are trying to access a gated repo. | |
Make sure to have access to it at https://huggingface.co/meta-llama/Llama-3.1-8B. | |
403 Client Error. (Request ID: Root=1-6814f441-07894d9430bd95ea1047de2d;21e2a43c-1d99-45d6-a740-033e393d861c) | |
Cannot access gated repo for url https://huggingface.co/meta-llama/Llama-3.1-8B/resolve/main/config.json. | |
Access to model meta-llama/Llama-3.1-8B is restricted and you are not in the authorized list. Visit https://huggingface.co/meta-llama/Llama-3.1-8B to ask for access. |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment