Spaces:
Paused
Paused
| import os | |
| from huggingface_hub import snapshot_download | |
| from transformers.utils.hub import move_cache | |
| hf_token: str = os.getenv("HF_TOKEN") | |
| if hf_token is None: | |
| raise ValueError("HF_TOKEN is not set") | |
| hf_token = hf_token.strip() | |
| if hf_token == "": | |
| raise ValueError("HF_TOKEN is empty") | |
| # This is about 2.47 GB | |
| snapshot_download( | |
| repo_id="meta-llama/Llama-3.2-1B-Instruct", | |
| revision="9213176726f574b556790deb65791e0c5aa438b6", | |
| token=hf_token, | |
| ) | |
| # This is about 3.67 GB | |
| snapshot_download( | |
| repo_id="sail/Sailor-4B-Chat", | |
| revision="89a866a7041e6ec023dd462adeca8e28dd53c83e", | |
| token=hf_token, | |
| ) | |
| # https://github.com/huggingface/transformers/issues/20428 | |
| move_cache() | |