mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2026-01-07 04:30:51 +08:00
[RUNPOD-1] Integrate serverless handler from System-Nebula fork
This commit is contained in:
parent
16d85ea133
commit
954ca43d0a
92
Dockerfile.runpod
Normal file
92
Dockerfile.runpod
Normal file
@ -0,0 +1,92 @@
|
||||
# Build argument for base image selection
|
||||
ARG BASE_IMAGE=nvidia/cuda:12.6.3-cudnn-runtime-ubuntu24.04
|
||||
|
||||
# ----------------------
|
||||
# Stage 1: Base
|
||||
# ----------------------
|
||||
FROM ${BASE_IMAGE} AS base
|
||||
|
||||
ARG COMFYUI_VERSION=v0.3.76
|
||||
ARG CUDA_VERSION_FOR_COMFY
|
||||
ARG ENABLE_PYTORCH_UPGRADE=false
|
||||
ARG PYTORCH_INDEX_URL
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
ENV PIP_PREFER_BINARY=1
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
ENV CMAKE_BUILD_PARALLEL_LEVEL=8
|
||||
|
||||
# Install system dependencies including git-lfs
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3.12 python3.12-venv git git-lfs wget \
|
||||
libgl1 libglib2.0-0 libsm6 libxext6 libxrender1 \
|
||||
ffmpeg \
|
||||
espeak-ng libespeak-ng1 \
|
||||
build-essential \
|
||||
&& git lfs install \
|
||||
&& ln -sf /usr/bin/python3.12 /usr/bin/python \
|
||||
&& ln -sf /usr/bin/pip3 /usr/bin/pip \
|
||||
&& apt-get autoremove -y && apt-get clean -y && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# uv + venv
|
||||
RUN wget -qO- https://astral.sh/uv/install.sh | sh \
|
||||
&& ln -s /root/.local/bin/uv /usr/local/bin/uv \
|
||||
&& ln -s /root/.local/bin/uvx /usr/local/bin/uvx \
|
||||
&& uv venv /opt/venv
|
||||
ENV PATH="/opt/venv/bin:${PATH}"
|
||||
|
||||
# comfy-cli to install ComfyUI
|
||||
RUN uv pip install comfy-cli pip setuptools wheel
|
||||
|
||||
# Install ComfyUI (nvidia build)
|
||||
RUN if [ -n "${CUDA_VERSION_FOR_COMFY}" ]; then \
|
||||
/usr/bin/yes | comfy --workspace /comfyui install --version "${COMFYUI_VERSION}" --cuda-version "${CUDA_VERSION_FOR_COMFY}" --nvidia; \
|
||||
else \
|
||||
/usr/bin/yes | comfy --workspace /comfyui install --version "${COMFYUI_VERSION}" --nvidia; \
|
||||
fi
|
||||
|
||||
# Optional: upgrade torch stack
|
||||
RUN if [ "$ENABLE_PYTORCH_UPGRADE" = "true" ]; then \
|
||||
uv pip install --force-reinstall torch torchvision torchaudio --index-url ${PYTORCH_INDEX_URL}; \
|
||||
fi
|
||||
|
||||
WORKDIR /comfyui
|
||||
ADD src/extra_model_paths.yaml ./
|
||||
WORKDIR /
|
||||
|
||||
# Runtime deps for handler + snapshot_download
|
||||
RUN uv pip install runpod requests websocket-client huggingface-hub
|
||||
|
||||
ADD src/start.sh handler.py test_input.json ./
|
||||
RUN chmod +x /start.sh
|
||||
|
||||
COPY scripts/comfy-node-install.sh /usr/local/bin/comfy-node-install
|
||||
RUN chmod +x /usr/local/bin/comfy-node-install
|
||||
COPY scripts/comfy-manager-set-mode.sh /usr/local/bin/comfy-manager-set-mode
|
||||
RUN chmod +x /usr/local/bin/comfy-manager-set-mode
|
||||
|
||||
ENV PIP_NO_INPUT=1
|
||||
ENV OTEL_PYTHON_LOGGING_AUTO_INSTRUMENTATION_ENABLED=true
|
||||
CMD ["opentelemetry-instrument", "--traces_exporter", "otlp","--metrics_exporter", "otlp","--logs_exporter","otlp","/start.sh"]
|
||||
|
||||
FROM base AS downloader
|
||||
WORKDIR /comfyui
|
||||
RUN mkdir -p models/checkpoints models/vae models/unet models/clip
|
||||
|
||||
FROM base AS final
|
||||
|
||||
COPY --from=downloader /comfyui/models /comfyui/models
|
||||
|
||||
# Installing otel stuff
|
||||
RUN pip install opentelemetry-distro && pip install opentelemetry-exporter-otlp
|
||||
|
||||
RUN opentelemetry-bootstrap --action=install
|
||||
RUN comfy-node-install https://github.com/city96/ComfyUI-GGUF
|
||||
RUN comfy-node-install https://github.com/rgthree/rgthree-comfy
|
||||
RUN comfy-node-install https://github.com/ClownsharkBatwing/RES4LYF
|
||||
RUN comfy-node-install https://github.com/giriss/comfy-image-saver
|
||||
|
||||
# Install both nodes with appropriate Git LFS handling
|
||||
RUN comfy-node-install https://github.com/Enemyx-net/VibeVoice-ComfyUI && \
|
||||
GIT_LFS_SKIP_SMUDGE=1 comfy-node-install https://github.com/snicolast/ComfyUI-IndexTTS2
|
||||
COPY Input/ /comfyui/input/
|
||||
44
scripts/comfy-node-install.sh
Executable file
44
scripts/comfy-node-install.sh
Executable file
@ -0,0 +1,44 @@
|
||||
#!/usr/bin/env bash
|
||||
# comfy-node-install: install custom ComfyUI nodes and fail with non-zero
|
||||
# exit code if any of them cannot be installed. On failure it prints the
|
||||
# list of nodes that could not be installed and hints the user to consult
|
||||
# https://registry.comfy.org/ for correct names.
|
||||
set -euo pipefail
|
||||
|
||||
if [[ $# -eq 0 ]]; then
|
||||
echo "Usage: comfy-node-install <node1> [<node2> …]" >&2
|
||||
exit 64 # EX_USAGE
|
||||
fi
|
||||
|
||||
log=$(mktemp)
|
||||
|
||||
# run installation – some modes return non-zero even on success, so we
|
||||
# ignore the exit status and rely on log parsing instead.
|
||||
set +e
|
||||
comfy node install --mode=remote "$@" 2>&1 | tee "$log"
|
||||
cli_status=$?
|
||||
set -e
|
||||
|
||||
# extract node names that failed to install (one per line, uniq-sorted)
|
||||
failed_nodes=$(grep -oP "(?<=An error occurred while installing ')[^']+" "$log" | sort -u || true)
|
||||
|
||||
# Fallback: capture names from "Node '<name>@' not found" lines if previous grep found nothing
|
||||
if [[ -z "$failed_nodes" ]]; then
|
||||
failed_nodes=$(grep -oP "(?<=Node ')[^@']+" "$log" | sort -u || true)
|
||||
fi
|
||||
|
||||
if [[ -n "$failed_nodes" ]]; then
|
||||
echo "Comfy node installation failed for the following nodes:" >&2
|
||||
echo "$failed_nodes" | while read -r n; do echo " • $n" >&2 ; done
|
||||
echo >&2
|
||||
echo "Please verify the node names at https://registry.comfy.org/ and try again." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# If we reach here no failed nodes were detected. Warn if CLI exit status
|
||||
# was non-zero but treat it as success.
|
||||
if [[ $cli_status -ne 0 ]]; then
|
||||
echo "Warning: comfy node install exited with status $cli_status but no errors were detected in the log — assuming success." >&2
|
||||
fi
|
||||
|
||||
exit 0
|
||||
1011
scripts/runpod_handler.py
Normal file
1011
scripts/runpod_handler.py
Normal file
File diff suppressed because it is too large
Load Diff
26
scripts/runpod_start.sh
Executable file
26
scripts/runpod_start.sh
Executable file
@ -0,0 +1,26 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Use libtcmalloc for better memory management
|
||||
TCMALLOC="$(ldconfig -p | grep -Po "libtcmalloc.so.\d" | head -n 1)"
|
||||
export LD_PRELOAD="${TCMALLOC}"
|
||||
|
||||
# Ensure ComfyUI-Manager runs in offline network mode inside the container
|
||||
comfy-manager-set-mode offline || echo "worker-comfyui - Could not set ComfyUI-Manager network_mode" >&2
|
||||
|
||||
echo "worker-comfyui: Starting ComfyUI"
|
||||
|
||||
# Allow operators to tweak verbosity; default is DEBUG.
|
||||
: "${COMFY_LOG_LEVEL:=DEBUG}"
|
||||
|
||||
# Serve the API and don't shutdown the container
|
||||
if [ "$SERVE_API_LOCALLY" == "true" ]; then
|
||||
python -u /comfyui/main.py --disable-auto-launch --disable-metadata --listen --verbose "${COMFY_LOG_LEVEL}" --log-stdout &
|
||||
|
||||
echo "worker-comfyui: Starting RunPod Handler"
|
||||
python -u /handler.py --rp_serve_api --rp_api_host=0.0.0.0
|
||||
else
|
||||
python -u /comfyui/main.py --disable-auto-launch --disable-metadata --verbose "${COMFY_LOG_LEVEL}" --log-stdout &
|
||||
|
||||
echo "worker-comfyui: Starting RunPod Handler"
|
||||
python -u /handler.py
|
||||
fi
|
||||
Loading…
Reference in New Issue
Block a user