Skip to content

Commit

Permalink
feat(dl_checkpoints): add --alpha option to checkpoint download script (
Browse files Browse the repository at this point in the history
#44)

This commit introduces the `--alpha` command-line option to the
`dl_checkpoints.sh` script. This allows AI subnet alpha participants
to download only the models supported by Livepeer.inc's Gateway node
during the alpha phase.
  • Loading branch information
rickstaa authored Mar 27, 2024
1 parent 4c27fcd commit b769a2d
Showing 1 changed file with 71 additions and 25 deletions.
96 changes: 71 additions & 25 deletions runner/dl_checkpoints.sh
Original file line number Diff line number Diff line change
@@ -1,5 +1,21 @@
#!/bin/bash
echo "Starting livepeer AI subnet model downloader..."

# Checks HF_TOKEN and huggingface-cli login status and throw warning if not authenticated.
check_hf_auth() {
if [ -z "$HF_TOKEN" ] && [ "$(huggingface-cli whoami)" = "Not logged in" ]; then
printf "WARN: Not logged in and HF_TOKEN not set. Log in with 'huggingface-cli login' or set HF_TOKEN to download token-gated models.\n"
exit 1
fi
}

# Displays help message.
function display_help() {
echo "Description: This script is used to download models available on the Livepeer AI Subnet."
echo "Usage: $0 [--alpha]"
echo "Options:"
echo " --alpha Download alpha models."
echo " --help Display this help message."
}

# Enable HF transfer acceleration.
# See: https://huggingface.co/docs/huggingface_hub/v0.22.1/package_reference/environment_variables#hfhubenablehftransfer.
Expand All @@ -8,36 +24,66 @@ export HF_HUB_ENABLE_HF_TRANSFER=1
# Use HF_TOKEN if set, otherwise use huggingface-cli's login.
[ -n "$HF_TOKEN" ] && TOKEN_FLAG="--token=${HF_TOKEN}" || TOKEN_FLAG=""

echo "Creating models directory in the current working directory..."
# Parse command-line arguments.
MODE="all"
for arg in "$@"
do
case $arg in
--alpha)
MODE="alpha"
shift
;;
--help)
display_help
exit 0
;;
*)
shift
;;
esac
done

echo "Starting livepeer AI subnet model downloader..."
echo "Creating 'models' directory in the current working directory..."
mkdir -p models

# Ensure 'huggingface-cli' is installed.
echo "Checking if 'huggingface-cli' is installed..."
if ! command -v huggingface-cli > /dev/null 2>&1; then
echo "WARN: The huggingface-cli is required to download models. Please install it using 'pip install huggingface_hub[cli,hf_transfer]'."
exit
exit 1
fi

# Download text-to-image and image-to-image models.
printf "\nDownloading unrestricted models...\n"
huggingface-cli download stabilityai/sd-turbo --include "*.fp16.safetensors" "*.json" "*.txt" --exclude ".onnx" ".onnx_data" --cache-dir models
huggingface-cli download stabilityai/sdxl-turbo --include "*.fp16.safetensors" "*.json" "*.txt" --exclude ".onnx" ".onnx_data" --cache-dir models
huggingface-cli download runwayml/stable-diffusion-v1-5 --include "*.fp16.safetensors" "*.json" "*.txt" --exclude ".onnx" ".onnx_data" --cache-dir models
huggingface-cli download stabilityai/stable-diffusion-xl-base-1.0 --include "*.fp16.safetensors" "*.json" "*.txt" --exclude ".onnx" ".onnx_data" --cache-dir models
huggingface-cli download prompthero/openjourney-v4 --include "*.safetensors" "*.json" "*.txt" --exclude ".onnx" ".onnx_data" --cache-dir models
huggingface-cli download ByteDance/SDXL-Lightning --include "*unet.safetensors" --exclude "*lora.safetensors*" --cache-dir models

# Download image-to-video models.
huggingface-cli download stabilityai/stable-video-diffusion-img2vid-xt --include "*.fp16.safetensors" "*.json" --cache-dir models

# Ensure user is logged in or has HF_TOKEN set to download token-gated models.
printf "\nDownloading token-gated models...\n"
if [ -z "$HF_TOKEN" ] && [ "$(huggingface-cli whoami)" = "Not logged in" ]; then
printf "WARN: Not logged in and HF_TOKEN not set. Log in with 'huggingface-cli login' or set HF_TOKEN to download token-gated models.\n"
exit
printf "Downloading %s models...\n" "$MODE"
if [ "$MODE" = "alpha" ]; then
printf "\nDownloading unrestricted models...\n"

# Download text-to-image and image-to-image models.
huggingface-cli download ByteDance/SDXL-Lightning --include "*unet.safetensors" --exclude "*lora.safetensors*" --cache-dir models

# Download image-to-video models (token-gated).
printf "\nDownloading token-gated models...\n"
check_hf_auth
huggingface-cli download stabilityai/stable-video-diffusion-img2vid-xt-1-1 --include "*.fp16.safetensors" "*.json" --cache-dir models ${TOKEN_FLAG}

printf "\nAlpha models downloaded successfully!\n"
else
# Download text-to-image and image-to-image models.
printf "\nDownloading unrestricted models...\n"
huggingface-cli download stabilityai/sd-turbo --include "*.fp16.safetensors" "*.json" "*.txt" --exclude ".onnx" ".onnx_data" --cache-dir models
huggingface-cli download stabilityai/sdxl-turbo --include "*.fp16.safetensors" "*.json" "*.txt" --exclude ".onnx" ".onnx_data" --cache-dir models
huggingface-cli download runwayml/stable-diffusion-v1-5 --include "*.fp16.safetensors" "*.json" "*.txt" --exclude ".onnx" ".onnx_data" --cache-dir models
huggingface-cli download stabilityai/stable-diffusion-xl-base-1.0 --include "*.fp16.safetensors" "*.json" "*.txt" --exclude ".onnx" ".onnx_data" --cache-dir models
huggingface-cli download prompthero/openjourney-v4 --include "*.safetensors" "*.json" "*.txt" --exclude ".onnx" ".onnx_data" --cache-dir models
huggingface-cli download ByteDance/SDXL-Lightning --include "*unet.safetensors" --exclude "*lora.safetensors*" --cache-dir models

# Download image-to-video models.
huggingface-cli download stabilityai/stable-video-diffusion-img2vid-xt --include "*.fp16.safetensors" "*.json" --cache-dir models

# Download image-to-video models (token-gated).
printf "\nDownloading token-gated models...\n"
check_hf_auth
huggingface-cli download stabilityai/stable-video-diffusion-img2vid-xt-1-1 --include "*.fp16.safetensors" "*.json" --cache-dir models ${TOKEN_FLAG}

printf "\nAll models downloaded successfully!\n"
fi

# Download image-to-video models (token-gated).
huggingface-cli download stabilityai/stable-video-diffusion-img2vid-xt-1-1 --include "*.fp16.safetensors" "*.json" --cache-dir models ${TOKEN_FLAG}

printf "\nAll models downloaded successfully!\n"

0 comments on commit b769a2d

Please sign in to comment.