Skip to content

Instantly share code, notes, and snippets.

@fortunto2
Created October 20, 2025 19:22
Show Gist options
  • Select an option

  • Save fortunto2/0a229b489eb6ced017e1ddad7cf7a8ea to your computer and use it in GitHub Desktop.

Select an option

Save fortunto2/0a229b489eb6ced017e1ddad7cf7a8ea to your computer and use it in GitHub Desktop.
cmfy
services:
comfyui:
init: true
container_name: comfyui
build:
context: .
dockerfile: Dockerfile
image: "yanwk/comfyui-boot:cu124-megapak"
ports:
- "8188:8188"
restart: always
volumes:
- "./storage:/root"
- "/home/azureuser/data/models:/ComfyUI/models"
environment:
- CLI_ARGS=
- OPENAI_API_KEY=sk-***
security_opt:
- "label=type:nvidia_container_t"
deploy:
resources:
reservations:
devices:
- driver: nvidia
device_ids: ['0']
capabilities: [gpu]
################################################################################
# Dockerfile that builds 'yanwk/comfyui-boot:cu124-megapak'
# A big all-in-one package with many custom nodes.
# Using CUDA 12.4, Python 3.12, GCC 13.
# The container will be running in root (easy for rootless deploy).
################################################################################
FROM docker.io/opensuse/tumbleweed:latest
LABEL maintainer="YAN Wenkun <[email protected]>"
RUN set -eu
################################################################################
# NVIDIA CUDA devel
# Ref: https://gitlab.com/nvidia/container-images/cuda/
# Break down the steps, so we have more but smaller image layers.
RUN --mount=type=cache,target=/var/cache/zypp \
printf "\
[cuda-opensuse15-x86_64]\n\
name=cuda-opensuse15-x86_64\n\
baseurl=https://developer.download.nvidia.com/compute/cuda/repos/opensuse15/x86_64\n\
enabled=1\n\
gpgcheck=1\n\
gpgkey=https://developer.download.nvidia.com/compute/cuda/repos/opensuse15/x86_64/D42D0685.pub\n" \
> /etc/zypp/repos.d/cuda-opensuse15.repo \
&& zypper --gpg-auto-import-keys \
install --no-confirm --no-recommends --auto-agree-with-licenses \
cuda-cccl-12-4 \
cuda-command-line-tools-12-4 \
cuda-compat-12-4 \
cuda-cudart-12-4 \
cuda-minimal-build-12-4 \
cuda-nvcc-12-4 \
cuda-nvprof-12-4 \
cuda-nvtx-12-4 \
libcublas-12-4 \
libnpp-12-4
RUN --mount=type=cache,target=/var/cache/zypp \
zypper --gpg-auto-import-keys \
install --no-confirm --no-recommends --auto-agree-with-licenses \
cuda-cudart-devel-12-4 \
cuda-nvml-devel-12-4 \
cuda-nvrtc-devel-12-4 \
libcublas-devel-12-4 \
libnpp-devel-12-4
RUN --mount=type=cache,target=/var/cache/zypp \
zypper --gpg-auto-import-keys \
install --no-confirm --no-recommends --auto-agree-with-licenses \
cuda-libraries-12-4 \
cuda-libraries-devel-12-4
ENV PATH="${PATH}:/usr/local/cuda-12.4/bin" \
LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:/usr/local/cuda-12.4/lib64" \
LIBRARY_PATH="${LIBRARY_PATH}:/usr/local/cuda-12.4/lib64/stubs" \
CUDA_HOME="/usr/local/cuda-12.4"
################################################################################
# Python and tools
# Since this image is so big, we use openSUSE-verified PIP packages for compatibility.
RUN --mount=type=cache,target=/var/cache/zypp \
zypper addrepo --check --refresh --priority 90 \
'https://ftp.gwdg.de/pub/linux/misc/packman/suse/openSUSE_Tumbleweed/Essentials/' packman-essentials \
&& zypper --gpg-auto-import-keys \
install --no-confirm --auto-agree-with-licenses \
python312-devel \
python312-pip \
python312-wheel \
python312-setuptools \
python312-Cython \
python312-py-build-cmake \
python312-aiohttp \
python312-dbm \
python312-ffmpeg-python \
python312-GitPython \
python312-httpx \
python312-joblib \
python312-lark \
python312-matplotlib \
python312-mpmath \
python312-numba-devel \
python312-numpy1 \
python312-onnx \
python312-opencv \
python312-pandas \
python312-qrcode \
python312-rich \
python312-scikit-build \
python312-scikit-build-core-pyproject \
python312-scikit-image \
python312-scikit-learn \
python312-scipy \
python312-svglib \
python312-tqdm \
&& rm /usr/lib64/python3.12/EXTERNALLY-MANAGED \
&& update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.12 100
RUN --mount=type=cache,target=/var/cache/zypp \
zypper --gpg-auto-import-keys \
install --no-confirm --auto-agree-with-licenses \
Mesa-libGL1 \
Mesa-libEGL-devel \
libgthread-2_0-0 \
make \
ninja \
git \
aria2 \
fish \
fd \
vim \
opencv \
opencv-devel \
ffmpeg \
x264 \
x265 \
google-noto-sans-fonts \
google-noto-sans-cjk-fonts \
google-noto-coloremoji-fonts
# Temp fix for OpenCV on openSUSE
ENV LD_PRELOAD=/usr/lib64/libjpeg.so.8
################################################################################
# GCC 13
# Required for compiling CUDA 12.4-related code.
RUN --mount=type=cache,target=/var/cache/zypp \
zypper --gpg-auto-import-keys \
install --no-confirm --auto-agree-with-licenses \
gcc13 \
gcc13-c++ \
cpp13 \
&& update-alternatives --install /usr/bin/c++ c++ /usr/bin/g++-13 90 \
&& update-alternatives --install /usr/bin/cc cc /usr/bin/gcc-13 90 \
&& update-alternatives --install /usr/bin/cpp cpp /usr/bin/cpp-13 90 \
&& update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-13 90 \
&& update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 90 \
&& update-alternatives --install /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-13 90 \
&& update-alternatives --install /usr/bin/gcc-nm gcc-nm /usr/bin/gcc-nm-13 90 \
&& update-alternatives --install /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-13 90 \
&& update-alternatives --install /usr/bin/gcov gcov /usr/bin/gcov-13 90 \
&& update-alternatives --install /usr/bin/gcov-dump gcov-dump /usr/bin/gcov-dump-13 90 \
&& update-alternatives --install /usr/bin/gcov-tool gcov-tool /usr/bin/gcov-tool-13 90
################################################################################
# Python Packages
# PyTorch, xFormers
# Break down the steps, so we have more but smaller image layers.
RUN --mount=type=cache,target=/root/.cache/pip \
pip list \
&& pip install \
--upgrade pip wheel setuptools \
&& pip install \
--dry-run xformers torchvision torchaudio \
--index-url https://download.pytorch.org/whl/cu124 \
--extra-index-url https://pypi.org/simple
RUN --mount=type=cache,target=/root/.cache/pip \
pip install \
--no-deps xformers torch torchvision torchaudio \
--index-url https://download.pytorch.org/whl/cu124 \
--extra-index-url https://pypi.org/simple
RUN --mount=type=cache,target=/root/.cache/pip \
pip install \
xformers torchvision torchaudio \
--index-url https://download.pytorch.org/whl/cu124 \
--extra-index-url https://pypi.org/simple
# Bind libs (.so files)
# Even we have CUDA installed by Zypper, we still need to install CUDA libs for Python in order to run PyTorch.
# What's more, NVIDIA's openSUSE15 repo didn't provide CuDNN & NCCL, we have to use Python packages anyway.
ENV LD_LIBRARY_PATH="${LD_LIBRARY_PATH}\
:/usr/local/lib64/python3.12/site-packages/torch/lib\
:/usr/local/lib/python3.12/site-packages/cusparselt/lib\
:/usr/local/lib/python3.12/site-packages/nvidia/cuda_cupti/lib\
:/usr/local/lib/python3.12/site-packages/nvidia/cuda_runtime/lib\
:/usr/local/lib/python3.12/site-packages/nvidia/cudnn/lib\
:/usr/local/lib/python3.12/site-packages/nvidia/cufft/lib\
:/usr/local/lib/python3.12/site-packages/nvidia/cublas/lib\
:/usr/local/lib/python3.12/site-packages/nvidia/cuda_nvrtc/lib\
:/usr/local/lib/python3.12/site-packages/nvidia/curand/lib\
:/usr/local/lib/python3.12/site-packages/nvidia/cusolver/lib\
:/usr/local/lib/python3.12/site-packages/nvidia/cusparse/lib\
:/usr/local/lib/python3.12/site-packages/nvidia/nccl/lib\
:/usr/local/lib/python3.12/site-packages/nvidia/nvjitlink/lib\
:/usr/local/lib/python3.12/site-packages/nvidia/nvtx/lib"
RUN --mount=type=cache,target=/var/cache/zypp \
zypper --gpg-auto-import-keys \
install --no-confirm --no-recommends --auto-agree-with-licenses \
libQt5OpenGL-devel libQt5OpenGL5
COPY builder-scripts/. /builder-scripts/
# Deps for ComfyUI & custom nodes
RUN --mount=type=cache,target=/root/.cache/pip \
pip install \
-r /builder-scripts/pak3.txt
RUN --mount=type=cache,target=/root/.cache/pip \
pip install \
-r /builder-scripts/pak5.txt
RUN --mount=type=cache,target=/root/.cache/pip \
pip install \
-r /builder-scripts/pak7.txt
# Make sure the deps fit the needs for ComfyUI & Manager
RUN --mount=type=cache,target=/root/.cache/pip \
pip install \
-r https://github.com/comfyanonymous/ComfyUI/raw/refs/heads/master/requirements.txt \
-r https://github.com/ltdrdata/ComfyUI-Manager/raw/refs/heads/main/requirements.txt \
&& pip list
################################################################################
RUN --mount=type=cache,target=/root/.cache/pip \
pip install torchao --index-url https://download.pytorch.org/whl/cu124
#RUN du -ah /root \
# && find /root/ -mindepth 1 -delete
COPY runner-scripts/. /runner-scripts/
USER root
VOLUME /root
WORKDIR /root
EXPOSE 8188
ENV CLI_ARGS=""
CMD ["bash","/runner-scripts/entrypoint.sh"]
# 'cupy-cuda12x' for Frame Interpolation
# 'compel lark' for smZNodes
# 'torchdiffeq' for DepthFM
# 'fairscale' for APISR
accelerate
compel
cupy-cuda12x
diffusers
fairscale
ftfy
huggingface-hub[hf-transfer]
imageio
joblib
kornia
lark
matplotlib
# Temp fix for mediapipe
onnx==1.16.2
onnxruntime-gpu
opencv-contrib-python-headless
pandas
pilgram
pillow
pygit2
python-ffmpeg
regex
scikit-build-core
scikit-image
scikit-learn
scipy
timm
torchdiffeq
torchmetrics
transformers
albumentations
cachetools
clip-interrogator
color-matcher
colour-science
deepdiff
dill
einops
filelock
fvcore
GitPython
imageio-ffmpeg
importlib-metadata
matrix-client==0.4.0
mediapipe
mss
numba
numexpr
omegaconf
piexif
pixeloe
psutil
py-cpuinfo
PyGithub
pynvml
python-dateutil
pyyaml
qrcode[pil]
rembg
requirements-parser
rich
rich-argparse
safetensors
segment-anything
sentencepiece
simpleeval
soundfile
spandrel
svglib
tokenizers
toml
torchsde
tqdm
transparent-background
trimesh[easy]
typer
typing-extensions
ultralytics
uv
webcolors
yacs
yapf
dlib
facexlib
insightface
git+https://github.com/openai/CLIP.git
git+https://github.com/WASasquatch/cstr
git+https://github.com/WASasquatch/ffmpy.git
git+https://github.com/WASasquatch/img2texture.git
onnxruntime-gpu==1.20.1
onnxruntime==1.20.1
diffusers==0.32.1
protobuf
pytorch_lightning
librosa
huggingface-hub>=0.26.0
openai
triton>=3.1.0
mmcv
torch==2.5.1 --index-url https://download.pytorch.org/whl/cu124
torchaudio --index-url https://download.pytorch.org/whl/cu124
xformers --index-url https://download.pytorch.org/whl/cu124
tf-keras>=2.18.0
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment