This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Install bitsandbytes: | |
# `nvcc --version` to get CUDA version. | |
# `pip install -i https://test.pypi.org/simple/ bitsandbytes-cudaXXX` to install for current CUDA. | |
# Example Usage: | |
# Single GPU: torchrun --nproc_per_node=1 trainer/diffusers_trainer.py --model="CompVis/stable-diffusion-v1-4" --run_name="liminal" --dataset="liminal-dataset" --hf_token="hf_blablabla" --bucket_side_min=64 --use_8bit_adam=True --gradient_checkpointing=True --batch_size=1 --fp16=True --image_log_steps=250 --epochs=20 --resolution=768 --use_ema=True | |
# Multiple GPUs: torchrun --nproc_per_node=N trainer/diffusers_trainer.py --model="CompVis/stable-diffusion-v1-4" --run_name="liminal" --dataset="liminal-dataset" --hf_token="hf_blablabla" --bucket_side_min=64 --use_8bit_adam=True --gradient_checkpointing=True --batch_size=10 --fp16=True --image_log_steps=250 --epochs=20 --resolution=768 --use_ema=True | |
import argparse | |
import socket | |
import torch |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/bin/bash | |
#Install deps | |
apt-get update -y | |
apt-get install htop screen psmisc python3-pip unzip wget gcc g++ nano -y | |
#Install Python deps | |
wget https://gist.githubusercontent.com/chavinlo/fe8afc02e03d9cc4eb545c4c306c8a73/raw/d9a5ad446fe662dc3e6597163a1f8d5546a8a795/requirements.txt | |
pip install -r requirements.txt OmegaConf | |
pip install triton==2.0.0.dev20221120 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/bin/bash | |
#Install deps | |
apt-get update -y | |
apt-get install htop screen psmisc python3-pip unzip wget gcc g++ nano -y | |
#Install Python deps | |
wget https://gist.githubusercontent.com/chavinlo/fe8afc02e03d9cc4eb545c4c306c8a73/raw/d9a5ad446fe662dc3e6597163a1f8d5546a8a795/requirements.txt | |
pip install -r requirements.txt OmegaConf | |
pip install triton==2.0.0.dev20221120 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
diffusers>=0.5.1 | |
numpy==1.23.4 | |
wandb==0.13.4 | |
torch | |
torchvision | |
transformers>=4.21.0 | |
huggingface-hub>=0.10.0 | |
Pillow==9.2.0 | |
tqdm==4.64.1 | |
ftfy==6.1.1 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Install bitsandbytes: | |
# `nvcc --version` to get CUDA version. | |
# `pip install -i https://test.pypi.org/simple/ bitsandbytes-cudaXXX` to install for current CUDA. | |
# Example Usage: | |
# Single GPU: torchrun --nproc_per_node=1 trainer/diffusers_trainer.py --model="CompVis/stable-diffusion-v1-4" --run_name="liminal" --dataset="liminal-dataset" --hf_token="hf_blablabla" --bucket_side_min=64 --use_8bit_adam=True --gradient_checkpointing=True --batch_size=1 --fp16=True --image_log_steps=250 --epochs=20 --resolution=768 --use_ema=True | |
# Multiple GPUs: torchrun --nproc_per_node=N trainer/diffusers_trainer.py --model="CompVis/stable-diffusion-v1-4" --run_name="liminal" --dataset="liminal-dataset" --hf_token="hf_blablabla" --bucket_side_min=64 --use_8bit_adam=True --gradient_checkpointing=True --batch_size=10 --fp16=True --image_log_steps=250 --epochs=20 --resolution=768 --use_ema=True | |
import argparse | |
import socket | |
import torch |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Script for converting a HF Diffusers saved pipeline to a Stable Diffusion checkpoint. | |
# *Only* converts the UNet, VAE, and Text Encoder. | |
# Does not convert optimizer state or any other thing. | |
import argparse | |
import os.path as osp | |
import torch | |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/bin/bash | |
#Install deps | |
apt-get update -y | |
apt-get install htop screen psmisc python3-pip unzip wget gcc g++ nano -y | |
#Install Python deps | |
wget https://gist.githubusercontent.com/chavinlo/41e062890b91cde16ac146719a669308/raw/9ebca64f70379a6012c804e4a39ede1db4a6b665/requirements.txtpip install -r requirements.txt OmegaConf | |
pip install triton==2.0.0.dev20221120 | |
conda install xformers -c xformers/label/dev |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
diffusers>=0.5.1 | |
numpy==1.23.4 | |
wandb==0.13.4 | |
torch | |
torchvision | |
transformers>=4.21.0 | |
huggingface-hub>=0.10.0 | |
Pillow==9.2.0 | |
tqdm==4.64.1 | |
ftfy==6.1.1 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/bin/bash | |
#Install deps | |
apt-get update -y | |
apt-get install htop screen psmisc python3-pip unzip wget gcc g++ -y | |
#Install Python deps | |
wget https://gist.githubusercontent.com/chavinlo/04330ffe95223f7a0a42de81526199b7/raw/b1c19df0e694936e1dadc7082d2326d6382b5d15/requirements.txt | |
pip install -r requirements.txt OmegaConf |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Install bitsandbytes: | |
# `nvcc --version` to get CUDA version. | |
# `pip install -i https://test.pypi.org/simple/ bitsandbytes-cudaXXX` to install for current CUDA. | |
# Example Usage: | |
# Single GPU: torchrun --nproc_per_node=1 trainer/diffusers_trainer.py --model="CompVis/stable-diffusion-v1-4" --run_name="liminal" --dataset="liminal-dataset" --hf_token="hf_blablabla" --bucket_side_min=64 --use_8bit_adam=True --gradient_checkpointing=True --batch_size=1 --fp16=True --image_log_steps=250 --epochs=20 --resolution=768 --use_ema=True | |
# Multiple GPUs: torchrun --nproc_per_node=N trainer/diffusers_trainer.py --model="CompVis/stable-diffusion-v1-4" --run_name="liminal" --dataset="liminal-dataset" --hf_token="hf_blablabla" --bucket_side_min=64 --use_8bit_adam=True --gradient_checkpointing=True --batch_size=10 --fp16=True --image_log_steps=250 --epochs=20 --resolution=768 --use_ema=True | |
import argparse | |
import socket | |
import torch |