# ComfyUI RunPod Serverless - CUDA 12.8.1, Python 3.12, PyTorch 2.8.0 FROM nvidia/cuda:12.8.1-devel-ubuntu22.04 ENV DEBIAN_FRONTEND=noninteractive ENV PYTHONUNBUFFERED=1 ENV PIP_NO_CACHE_DIR=1 # CUDA environment ENV LD_LIBRARY_PATH=/usr/local/cuda/lib64:$LD_LIBRARY_PATH ENV LIBRARY_PATH=/usr/local/cuda/lib64/stubs:$LIBRARY_PATH ENV PATH=/usr/local/cuda/bin:$PATH # HuggingFace cache paths (will be symlinked to network volume) ENV HF_HOME=/workspace/.cache/huggingface ENV HF_HUB_ENABLE_HF_TRANSFER=1 ENV TRANSFORMERS_CACHE=/workspace/.cache/huggingface/transformers # Install system dependencies and add deadsnakes PPA for Python 3.12 RUN apt-get update && apt-get install -y software-properties-common && \ add-apt-repository -y ppa:deadsnakes/ppa && \ apt-get update && apt-get install -y \ python3.12 \ python3.12-dev \ python3.12-venv \ python3-pip \ git \ git-lfs \ wget \ curl \ ffmpeg \ libgl1-mesa-glx \ libglib2.0-0 \ libsm6 \ libxext6 \ libxrender-dev \ libgomp1 \ build-essential \ ninja-build \ && rm -rf /var/lib/apt/lists/* # Set Python 3.12 as default and bootstrap pip RUN update-alternatives --install /usr/bin/python python /usr/bin/python3.12 1 && \ update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.12 1 && \ python3.12 -m ensurepip --upgrade && \ python3.12 -m pip install --upgrade pip setuptools wheel # Install PyTorch 2.8.0+cu128 and triton 3.4.0 in single layer RUN pip install \ torch==2.8.0+cu128 \ torchvision==0.23.0+cu128 \ torchaudio==2.8.0+cu128 \ triton==3.4.0 \ --index-url https://download.pytorch.org/whl/cu128 && \ rm -rf /root/.cache/pip /tmp/* # Install nunchaku from GitHub wheel RUN pip install https://github.com/nunchaku-tech/nunchaku/releases/download/v1.0.2/nunchaku-1.0.2+torch2.8-cp312-cp312-linux_x86_64.whl # Install key dependencies before SageAttention COPY requirements.txt /tmp/requirements.txt RUN pip install -r /tmp/requirements.txt && rm -rf /root/.cache/pip # Compile SageAttention 2.2 from source with no build isolation WORKDIR /tmp ENV EXT_PARALLEL=4 ENV NVCC_APPEND_FLAGS="--threads 8" ENV MAX_JOBS=32 RUN git clone https://github.com/thu-ml/SageAttention.git && \ cd SageAttention && \ pip install --no-build-isolation . && \ cd / && rm -rf /tmp/SageAttention /root/.cache/pip # Clone ComfyUI WORKDIR /workspace RUN git clone https://github.com/comfyanonymous/ComfyUI.git && \ cd ComfyUI && \ pip install -r requirements.txt # Install custom nodes WORKDIR /workspace/ComfyUI/custom_nodes RUN git clone https://github.com/ltdrdata/ComfyUI-Manager.git && \ git clone https://github.com/jnxmx/ComfyUI_HuggingFace_Downloader.git && \ git clone https://github.com/kijai/ComfyUI-KJNodes.git && \ git clone https://github.com/Fannovel16/comfyui_controlnet_aux.git && \ git clone https://github.com/crystian/ComfyUI-Crystools.git && \ git clone https://github.com/Kosinkadink/ComfyUI-VideoHelperSuite.git && \ git clone https://github.com/willmiao/ComfyUI-Lora-Manager.git && \ git clone https://github.com/city96/ComfyUI-GGUF.git && \ git clone https://github.com/Fannovel16/ComfyUI-Frame-Interpolation.git && \ git clone https://github.com/nunchaku-tech/ComfyUI-nunchaku.git && \ git clone https://github.com/evanspearman/ComfyMath.git && \ git clone https://github.com/ssitu/ComfyUI_UltimateSDUpscale.git # Install custom node dependencies (single layer) RUN (cd ComfyUI-KJNodes && pip install -r requirements.txt || true) && \ (cd comfyui_controlnet_aux && pip install -r requirements.txt || true) && \ (cd ComfyUI-VideoHelperSuite && pip install -r requirements.txt || true) && \ (cd ComfyUI-GGUF && pip install -r requirements.txt || true) && \ (cd ComfyUI-Frame-Interpolation && pip install -r requirements.txt || true) && \ (cd ComfyUI-nunchaku && pip install -r requirements.txt || true) && \ rm -rf /root/.cache/pip /tmp/* # Create directories and symlinks to network volume WORKDIR /workspace/ComfyUI RUN mkdir -p /userdata/models/checkpoints \ /userdata/models/loras \ /userdata/models/vae \ /userdata/models/controlnet \ /userdata/models/clip \ /userdata/models/upscale_models \ /userdata/.cache/huggingface \ /workspace/.cache # Symlink model directories to /userdata RUN rm -rf models/checkpoints && ln -s /userdata/models/checkpoints models/checkpoints && \ rm -rf models/loras && ln -s /userdata/models/loras models/loras && \ rm -rf models/vae && ln -s /userdata/models/vae models/vae && \ rm -rf models/controlnet && ln -s /userdata/models/controlnet models/controlnet && \ rm -rf models/clip && ln -s /userdata/models/clip models/clip && \ rm -rf models/upscale_models && ln -s /userdata/models/upscale_models models/upscale_models # Symlink HuggingFace cache RUN ln -s /userdata/.cache/huggingface /workspace/.cache/huggingface # Copy handler and workflows WORKDIR /workspace COPY handler.py /workspace/handler.py COPY workflows /workspace/workflows # RunPod handler entrypoint CMD ["python", "-u", "handler.py"]