#!/usr/bin/env python3 import os import json import io import tempfile from pathlib import Path from huggingface_hub import HfApi, HfFolder, hf_hub_download print("Starting image download from Hugging Face dataset") # Get environment variables HF_USERNAME = os.environ.get("HF_USERNAME", "") HF_TOKEN = os.environ.get("HF_TOKEN", "") DATASET_REPO = os.environ.get("HF_DATASET_REPO", "image-uploader-data") # Set HF cache directory to a writable location os.environ["HF_HOME"] = os.path.join(tempfile.gettempdir(), "huggingface") os.environ["HUGGINGFACE_HUB_CACHE"] = os.path.join( tempfile.gettempdir(), "huggingface", "hub" ) os.makedirs(os.environ["HF_HOME"], exist_ok=True) os.makedirs(os.environ["HUGGINGFACE_HUB_CACHE"], exist_ok=True) # Constants IMAGES_PATH = "images" METADATA_PATH = "metadata" UPLOAD_DIR = Path("static/uploads") METADATA_DIR = Path("static/metadata") METADATA_FILE = METADATA_DIR / "image_metadata.json" # Create directories if they don't exist UPLOAD_DIR.mkdir(parents=True, exist_ok=True) METADATA_DIR.mkdir(parents=True, exist_ok=True) # Initialize HfApi hf_api = HfApi(token=HF_TOKEN) try: # Check if repo exists print(f"Checking if repository {HF_USERNAME}/{DATASET_REPO} exists") hf_api.repo_info(repo_id=f"{HF_USERNAME}/{DATASET_REPO}", repo_type="dataset") # Download metadata first print(f"Downloading metadata from {HF_USERNAME}/{DATASET_REPO}") try: metadata_file = hf_api.hf_hub_download( repo_id=f"{HF_USERNAME}/{DATASET_REPO}", filename=f"{METADATA_PATH}/image_metadata.json", repo_type="dataset", token=HF_TOKEN, local_dir=os.path.join(tempfile.gettempdir(), "hf_downloads"), ) print(f"Metadata downloaded to {metadata_file}") with open(metadata_file, "r") as f: metadata = json.load(f) # Save metadata locally with open(METADATA_FILE, "w") as f: json.dump(metadata, f) except Exception as e: print(f"Error downloading metadata: {e}") metadata = {} # Initialize metadata file if it doesn't exist with open(METADATA_FILE, "w") as f: json.dump({}, f) # List all files in the dataset print("Listing files in the dataset") files = hf_api.list_repo_files( repo_id=f"{HF_USERNAME}/{DATASET_REPO}", repo_type="dataset", token=HF_TOKEN ) # Filter only image files image_files = [f for f in files if f.startswith(f"{IMAGES_PATH}/")] print(f"Found {len(image_files)} images") # Download each image for image_file in image_files: try: filename = os.path.basename(image_file) print(f"Downloading {filename}") # Download file download_path = hf_api.hf_hub_download( repo_id=f"{HF_USERNAME}/{DATASET_REPO}", filename=image_file, repo_type="dataset", token=HF_TOKEN, local_dir=os.path.join(tempfile.gettempdir(), "hf_downloads"), ) # Copy to uploads directory destination = UPLOAD_DIR / filename with open(download_path, "rb") as src, open(destination, "wb") as dst: dst.write(src.read()) print(f"Saved {filename} to {destination}") except Exception as e: print(f"Error downloading {image_file}: {e}") print("Image download completed") except Exception as e: print(f"Error: {e}") print("Creating empty metadata file") with open(METADATA_FILE, "w") as f: json.dump({}, f)