mirror of
https://github.com/Comfy-Org/ComfyUI-Manager.git
synced 2026-03-14 21:47:37 +08:00
feat(cli): expand --uv-compile to all node management commands with conflict attribution (#2682)
* feat(cli): expand --uv-compile to all node management commands with conflict attribution Add --uv-compile flag to reinstall, update, fix, restore-snapshot, restore-dependencies, and install-deps commands. Each skips per-node pip installs and runs batch uv pip compile after all operations. Change CollectedDeps.sources type to dict[str, list[tuple[str, str]]] to store (pack_path, pkg_spec) per requester. On resolution failure, _run_unified_resolve() cross-references conflict packages with sources using word-boundary regex and displays which node packs requested each conflicting package. Update EN/KO user docs and DESIGN/PRD developer docs to cover the expanded commands and conflict attribution output. Strengthen unit tests for sources tuple format and compile failure attribution. Bump version to 4.1b3. * refactor(cli): extract _finalize_resolve helper, add CNR nightly fallback and pydantic guard - Extract `_finalize_resolve()` to eliminate 7x duplicated uv-compile error handling blocks in cm_cli (~-85 lines) - Move conflict attribution regex to `attribute_conflicts()` in unified_dep_resolver.py for direct testability - Update 4 attribution tests to call production function instead of re-implementing regex - Add CNR nightly fallback: when node is absent from nightly manifest, fall back to cnr_map repository URL (glob + legacy) - Add pydantic Union guard: use getattr for is_unknown in uninstall and disable handlers to prevent Union type mismatch - Add E2E test suites for endpoint install/uninstall and uv-compile CLI commands (conflict + success cases) - Add nightly CNR fallback regression tests
This commit is contained in:
parent
f042d73b72
commit
d7a2277017
@ -688,21 +688,7 @@ def install(
|
|||||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
|
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
|
||||||
for_each_nodes(nodes, act=install_node, exit_on_fail=exit_on_fail)
|
for_each_nodes(nodes, act=install_node, exit_on_fail=exit_on_fail)
|
||||||
|
|
||||||
if uv_compile:
|
_finalize_resolve(pip_fixer, uv_compile)
|
||||||
try:
|
|
||||||
_run_unified_resolve()
|
|
||||||
except ImportError as e:
|
|
||||||
print(f"[bold red]Failed to import unified_dep_resolver: {e}[/bold red]")
|
|
||||||
raise typer.Exit(1)
|
|
||||||
except typer.Exit:
|
|
||||||
raise
|
|
||||||
except Exception as e:
|
|
||||||
print(f"[bold red]Batch resolution failed: {e}[/bold red]")
|
|
||||||
raise typer.Exit(1)
|
|
||||||
finally:
|
|
||||||
pip_fixer.fix_broken()
|
|
||||||
else:
|
|
||||||
pip_fixer.fix_broken()
|
|
||||||
|
|
||||||
|
|
||||||
@app.command(help="Reinstall custom nodes")
|
@app.command(help="Reinstall custom nodes")
|
||||||
@ -729,6 +715,14 @@ def reinstall(
|
|||||||
help="Skip installing any Python dependencies",
|
help="Skip installing any Python dependencies",
|
||||||
),
|
),
|
||||||
] = False,
|
] = False,
|
||||||
|
uv_compile: Annotated[
|
||||||
|
Optional[bool],
|
||||||
|
typer.Option(
|
||||||
|
"--uv-compile",
|
||||||
|
show_default=False,
|
||||||
|
help="After reinstalling, batch-resolve all dependencies via uv pip compile",
|
||||||
|
),
|
||||||
|
] = False,
|
||||||
user_directory: str = typer.Option(
|
user_directory: str = typer.Option(
|
||||||
None,
|
None,
|
||||||
help="user directory"
|
help="user directory"
|
||||||
@ -736,11 +730,20 @@ def reinstall(
|
|||||||
):
|
):
|
||||||
cmd_ctx.set_user_directory(user_directory)
|
cmd_ctx.set_user_directory(user_directory)
|
||||||
cmd_ctx.set_channel_mode(channel, mode)
|
cmd_ctx.set_channel_mode(channel, mode)
|
||||||
cmd_ctx.set_no_deps(no_deps)
|
|
||||||
|
if uv_compile and no_deps:
|
||||||
|
print("[bold red]--uv-compile and --no-deps are mutually exclusive.[/bold red]")
|
||||||
|
raise typer.Exit(1)
|
||||||
|
|
||||||
|
if uv_compile:
|
||||||
|
cmd_ctx.set_no_deps(True)
|
||||||
|
else:
|
||||||
|
cmd_ctx.set_no_deps(no_deps)
|
||||||
|
|
||||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
|
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
|
||||||
for_each_nodes(nodes, act=reinstall_node)
|
for_each_nodes(nodes, act=reinstall_node)
|
||||||
pip_fixer.fix_broken()
|
|
||||||
|
_finalize_resolve(pip_fixer, uv_compile)
|
||||||
|
|
||||||
|
|
||||||
@app.command(help="Uninstall custom nodes")
|
@app.command(help="Uninstall custom nodes")
|
||||||
@ -785,10 +788,21 @@ def update(
|
|||||||
None,
|
None,
|
||||||
help="user directory"
|
help="user directory"
|
||||||
),
|
),
|
||||||
|
uv_compile: Annotated[
|
||||||
|
Optional[bool],
|
||||||
|
typer.Option(
|
||||||
|
"--uv-compile",
|
||||||
|
show_default=False,
|
||||||
|
help="After updating, batch-resolve all dependencies via uv pip compile",
|
||||||
|
),
|
||||||
|
] = False,
|
||||||
):
|
):
|
||||||
cmd_ctx.set_user_directory(user_directory)
|
cmd_ctx.set_user_directory(user_directory)
|
||||||
cmd_ctx.set_channel_mode(channel, mode)
|
cmd_ctx.set_channel_mode(channel, mode)
|
||||||
|
|
||||||
|
if uv_compile:
|
||||||
|
cmd_ctx.set_no_deps(True)
|
||||||
|
|
||||||
if 'all' in nodes:
|
if 'all' in nodes:
|
||||||
asyncio.run(auto_save_snapshot())
|
asyncio.run(auto_save_snapshot())
|
||||||
|
|
||||||
@ -800,7 +814,8 @@ def update(
|
|||||||
break
|
break
|
||||||
|
|
||||||
update_parallel(nodes)
|
update_parallel(nodes)
|
||||||
pip_fixer.fix_broken()
|
|
||||||
|
_finalize_resolve(pip_fixer, uv_compile)
|
||||||
|
|
||||||
|
|
||||||
@app.command(help="Disable custom nodes")
|
@app.command(help="Disable custom nodes")
|
||||||
@ -886,16 +901,28 @@ def fix(
|
|||||||
None,
|
None,
|
||||||
help="user directory"
|
help="user directory"
|
||||||
),
|
),
|
||||||
|
uv_compile: Annotated[
|
||||||
|
Optional[bool],
|
||||||
|
typer.Option(
|
||||||
|
"--uv-compile",
|
||||||
|
show_default=False,
|
||||||
|
help="After fixing, batch-resolve all dependencies via uv pip compile",
|
||||||
|
),
|
||||||
|
] = False,
|
||||||
):
|
):
|
||||||
cmd_ctx.set_user_directory(user_directory)
|
cmd_ctx.set_user_directory(user_directory)
|
||||||
cmd_ctx.set_channel_mode(channel, mode)
|
cmd_ctx.set_channel_mode(channel, mode)
|
||||||
|
|
||||||
|
if uv_compile:
|
||||||
|
cmd_ctx.set_no_deps(True)
|
||||||
|
|
||||||
if 'all' in nodes:
|
if 'all' in nodes:
|
||||||
asyncio.run(auto_save_snapshot())
|
asyncio.run(auto_save_snapshot())
|
||||||
|
|
||||||
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
|
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
|
||||||
for_each_nodes(nodes, fix_node, allow_all=True)
|
for_each_nodes(nodes, fix_node, allow_all=True)
|
||||||
pip_fixer.fix_broken()
|
|
||||||
|
_finalize_resolve(pip_fixer, uv_compile)
|
||||||
|
|
||||||
|
|
||||||
@app.command("show-versions", help="Show all available versions of the node")
|
@app.command("show-versions", help="Show all available versions of the node")
|
||||||
@ -1092,7 +1119,7 @@ def save_snapshot(
|
|||||||
|
|
||||||
@app.command("restore-snapshot", help="Restore snapshot from snapshot file")
|
@app.command("restore-snapshot", help="Restore snapshot from snapshot file")
|
||||||
def restore_snapshot(
|
def restore_snapshot(
|
||||||
snapshot_name: str,
|
snapshot_name: str,
|
||||||
pip_non_url: Optional[bool] = typer.Option(
|
pip_non_url: Optional[bool] = typer.Option(
|
||||||
default=None,
|
default=None,
|
||||||
show_default=False,
|
show_default=False,
|
||||||
@ -1118,13 +1145,24 @@ def restore_snapshot(
|
|||||||
restore_to: Optional[str] = typer.Option(
|
restore_to: Optional[str] = typer.Option(
|
||||||
None,
|
None,
|
||||||
help="Manually specify the installation path for the custom node. Ignore user directory."
|
help="Manually specify the installation path for the custom node. Ignore user directory."
|
||||||
)
|
),
|
||||||
|
uv_compile: Annotated[
|
||||||
|
Optional[bool],
|
||||||
|
typer.Option(
|
||||||
|
"--uv-compile",
|
||||||
|
show_default=False,
|
||||||
|
help="After restoring, batch-resolve all dependencies via uv pip compile",
|
||||||
|
),
|
||||||
|
] = False,
|
||||||
):
|
):
|
||||||
cmd_ctx.set_user_directory(user_directory)
|
cmd_ctx.set_user_directory(user_directory)
|
||||||
|
|
||||||
if restore_to:
|
if restore_to:
|
||||||
cmd_ctx.update_custom_nodes_dir(restore_to)
|
cmd_ctx.update_custom_nodes_dir(restore_to)
|
||||||
|
|
||||||
|
if uv_compile:
|
||||||
|
cmd_ctx.set_no_deps(True)
|
||||||
|
|
||||||
extras = []
|
extras = []
|
||||||
if pip_non_url:
|
if pip_non_url:
|
||||||
extras.append('--pip-non-url')
|
extras.append('--pip-non-url')
|
||||||
@ -1151,8 +1189,11 @@ def restore_snapshot(
|
|||||||
except Exception:
|
except Exception:
|
||||||
print("[bold red]ERROR: Failed to restore snapshot.[/bold red]")
|
print("[bold red]ERROR: Failed to restore snapshot.[/bold red]")
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
|
if uv_compile:
|
||||||
|
pip_fixer.fix_broken()
|
||||||
raise typer.Exit(code=1)
|
raise typer.Exit(code=1)
|
||||||
pip_fixer.fix_broken()
|
|
||||||
|
_finalize_resolve(pip_fixer, uv_compile)
|
||||||
|
|
||||||
|
|
||||||
@app.command(
|
@app.command(
|
||||||
@ -1162,10 +1203,21 @@ def restore_dependencies(
|
|||||||
user_directory: str = typer.Option(
|
user_directory: str = typer.Option(
|
||||||
None,
|
None,
|
||||||
help="user directory"
|
help="user directory"
|
||||||
)
|
),
|
||||||
|
uv_compile: Annotated[
|
||||||
|
Optional[bool],
|
||||||
|
typer.Option(
|
||||||
|
"--uv-compile",
|
||||||
|
show_default=False,
|
||||||
|
help="After restoring, batch-resolve all dependencies via uv pip compile",
|
||||||
|
),
|
||||||
|
] = False,
|
||||||
):
|
):
|
||||||
cmd_ctx.set_user_directory(user_directory)
|
cmd_ctx.set_user_directory(user_directory)
|
||||||
|
|
||||||
|
if uv_compile:
|
||||||
|
cmd_ctx.set_no_deps(True)
|
||||||
|
|
||||||
node_paths = []
|
node_paths = []
|
||||||
|
|
||||||
for base_path in cmd_ctx.get_custom_nodes_paths():
|
for base_path in cmd_ctx.get_custom_nodes_paths():
|
||||||
@ -1181,9 +1233,10 @@ def restore_dependencies(
|
|||||||
for x in node_paths:
|
for x in node_paths:
|
||||||
print("----------------------------------------------------------------------------------------------------")
|
print("----------------------------------------------------------------------------------------------------")
|
||||||
print(f"Restoring [{i}/{total}]: {x}")
|
print(f"Restoring [{i}/{total}]: {x}")
|
||||||
unified_manager.execute_install_script('', x, instant_execution=True)
|
unified_manager.execute_install_script('', x, instant_execution=True, no_deps=bool(uv_compile))
|
||||||
i += 1
|
i += 1
|
||||||
pip_fixer.fix_broken()
|
|
||||||
|
_finalize_resolve(pip_fixer, uv_compile)
|
||||||
|
|
||||||
|
|
||||||
@app.command(
|
@app.command(
|
||||||
@ -1224,9 +1277,21 @@ def install_deps(
|
|||||||
None,
|
None,
|
||||||
help="user directory"
|
help="user directory"
|
||||||
),
|
),
|
||||||
|
uv_compile: Annotated[
|
||||||
|
Optional[bool],
|
||||||
|
typer.Option(
|
||||||
|
"--uv-compile",
|
||||||
|
show_default=False,
|
||||||
|
help="After installing, batch-resolve all dependencies via uv pip compile",
|
||||||
|
),
|
||||||
|
] = False,
|
||||||
):
|
):
|
||||||
cmd_ctx.set_user_directory(user_directory)
|
cmd_ctx.set_user_directory(user_directory)
|
||||||
cmd_ctx.set_channel_mode(channel, mode)
|
cmd_ctx.set_channel_mode(channel, mode)
|
||||||
|
|
||||||
|
if uv_compile:
|
||||||
|
cmd_ctx.set_no_deps(True)
|
||||||
|
|
||||||
asyncio.run(auto_save_snapshot())
|
asyncio.run(auto_save_snapshot())
|
||||||
|
|
||||||
if not os.path.exists(deps):
|
if not os.path.exists(deps):
|
||||||
@ -1246,19 +1311,40 @@ def install_deps(
|
|||||||
if state == 'installed':
|
if state == 'installed':
|
||||||
continue
|
continue
|
||||||
elif state == 'not-installed':
|
elif state == 'not-installed':
|
||||||
asyncio.run(core.gitclone_install(k, instant_execution=True))
|
asyncio.run(core.gitclone_install(k, instant_execution=True, no_deps=bool(uv_compile)))
|
||||||
else: # disabled
|
else: # disabled
|
||||||
core.gitclone_set_active([k], False)
|
core.gitclone_set_active([k], False)
|
||||||
pip_fixer.fix_broken()
|
|
||||||
|
_finalize_resolve(pip_fixer, uv_compile)
|
||||||
|
|
||||||
print("Dependency installation and activation complete.")
|
print("Dependency installation and activation complete.")
|
||||||
|
|
||||||
|
|
||||||
|
def _finalize_resolve(pip_fixer, uv_compile) -> None:
|
||||||
|
"""Run batch resolution if --uv-compile is set, then fix broken packages."""
|
||||||
|
if uv_compile:
|
||||||
|
try:
|
||||||
|
_run_unified_resolve()
|
||||||
|
except ImportError as e:
|
||||||
|
print(f"[bold red]Failed to import unified_dep_resolver: {e}[/bold red]")
|
||||||
|
raise typer.Exit(1)
|
||||||
|
except typer.Exit:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[bold red]Batch resolution failed: {e}[/bold red]")
|
||||||
|
raise typer.Exit(1)
|
||||||
|
finally:
|
||||||
|
pip_fixer.fix_broken()
|
||||||
|
else:
|
||||||
|
pip_fixer.fix_broken()
|
||||||
|
|
||||||
|
|
||||||
def _run_unified_resolve():
|
def _run_unified_resolve():
|
||||||
"""Shared logic for unified batch dependency resolution."""
|
"""Shared logic for unified batch dependency resolution."""
|
||||||
from comfyui_manager.common.unified_dep_resolver import (
|
from comfyui_manager.common.unified_dep_resolver import (
|
||||||
UnifiedDepResolver,
|
UnifiedDepResolver,
|
||||||
UvNotAvailableError,
|
UvNotAvailableError,
|
||||||
|
attribute_conflicts,
|
||||||
collect_base_requirements,
|
collect_base_requirements,
|
||||||
collect_node_pack_paths,
|
collect_node_pack_paths,
|
||||||
)
|
)
|
||||||
@ -1295,6 +1381,14 @@ def _run_unified_resolve():
|
|||||||
print("[bold green]Resolution complete (no deps needed).[/bold green]")
|
print("[bold green]Resolution complete (no deps needed).[/bold green]")
|
||||||
else:
|
else:
|
||||||
print(f"[bold red]Resolution failed: {result.error}[/bold red]")
|
print(f"[bold red]Resolution failed: {result.error}[/bold red]")
|
||||||
|
if result.lockfile and result.lockfile.conflicts and result.collected:
|
||||||
|
attributed = attribute_conflicts(result.collected.sources, result.lockfile.conflicts)
|
||||||
|
if attributed:
|
||||||
|
print("[bold yellow]Conflicting packages (by node pack):[/bold yellow]")
|
||||||
|
for pkg_name, requesters in sorted(attributed.items()):
|
||||||
|
print(f" [yellow]{pkg_name}[/yellow]:")
|
||||||
|
for pack_path, pkg_spec in requesters:
|
||||||
|
print(f" {os.path.basename(pack_path)} → {pkg_spec}")
|
||||||
raise typer.Exit(1)
|
raise typer.Exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -56,7 +56,8 @@ class CollectedDeps:
|
|||||||
"""Aggregated dependency collection result."""
|
"""Aggregated dependency collection result."""
|
||||||
requirements: list[PackageRequirement] = field(default_factory=list)
|
requirements: list[PackageRequirement] = field(default_factory=list)
|
||||||
skipped: list[tuple[str, str]] = field(default_factory=list)
|
skipped: list[tuple[str, str]] = field(default_factory=list)
|
||||||
sources: dict[str, list[str]] = field(default_factory=dict)
|
sources: dict[str, list[tuple[str, str]]] = field(default_factory=dict)
|
||||||
|
"""pkg_name → [(pack_path, pkg_spec), ...] — tracks which node packs request each package."""
|
||||||
extra_index_urls: list[str] = field(default_factory=list)
|
extra_index_urls: list[str] = field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
@ -275,7 +276,7 @@ class UnifiedDepResolver:
|
|||||||
"""Collect dependencies from all node packs."""
|
"""Collect dependencies from all node packs."""
|
||||||
requirements: list[PackageRequirement] = []
|
requirements: list[PackageRequirement] = []
|
||||||
skipped: list[tuple[str, str]] = []
|
skipped: list[tuple[str, str]] = []
|
||||||
sources: dict[str, list[str]] = defaultdict(list)
|
sources: defaultdict[str, list[tuple[str, str]]] = defaultdict(list)
|
||||||
extra_index_urls: list[str] = []
|
extra_index_urls: list[str] = []
|
||||||
|
|
||||||
# Snapshot installed packages once to avoid repeated subprocess calls.
|
# Snapshot installed packages once to avoid repeated subprocess calls.
|
||||||
@ -362,7 +363,7 @@ class UnifiedDepResolver:
|
|||||||
requirements.append(
|
requirements.append(
|
||||||
PackageRequirement(name=pkg_name, spec=pkg_spec, source=pack_path)
|
PackageRequirement(name=pkg_name, spec=pkg_spec, source=pack_path)
|
||||||
)
|
)
|
||||||
sources[pkg_name].append(pack_path)
|
sources[pkg_name].append((pack_path, pkg_spec))
|
||||||
|
|
||||||
# Commit staged index URLs only after all validation passed.
|
# Commit staged index URLs only after all validation passed.
|
||||||
if pending_urls:
|
if pending_urls:
|
||||||
@ -701,3 +702,23 @@ class UnifiedDepResolver:
|
|||||||
)
|
)
|
||||||
except OSError:
|
except OSError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def attribute_conflicts(
|
||||||
|
sources: dict[str, list[tuple[str, str]]],
|
||||||
|
conflicts: list[str],
|
||||||
|
) -> dict[str, list[tuple[str, str]]]:
|
||||||
|
"""Cross-reference conflict packages with their requesting node packs.
|
||||||
|
|
||||||
|
Uses word-boundary regex to prevent false-positive prefix matches
|
||||||
|
(e.g. ``torch`` does NOT match ``torchvision`` or ``torch_audio``).
|
||||||
|
"""
|
||||||
|
conflict_text = "\n".join(conflicts).lower().replace("-", "_")
|
||||||
|
return {
|
||||||
|
pkg: reqs
|
||||||
|
for pkg, reqs in sources.items()
|
||||||
|
if re.search(
|
||||||
|
r'(?<![a-z0-9_])' + re.escape(pkg.lower().replace("-", "_")) + r'(?![a-z0-9_])',
|
||||||
|
conflict_text,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|||||||
@ -1417,8 +1417,18 @@ class UnifiedManager:
|
|||||||
else: # nightly
|
else: # nightly
|
||||||
repo_url = the_node['repository']
|
repo_url = the_node['repository']
|
||||||
else:
|
else:
|
||||||
result = ManagedResult('install')
|
# Fallback for nightly only: use repository URL from CNR map
|
||||||
return result.fail(f"Node '{node_id}@{version_spec}' not found in [{channel}, {mode}]")
|
# when node is registered in CNR but absent from nightly manifest
|
||||||
|
if version_spec == 'nightly':
|
||||||
|
cnr_fallback = self.cnr_map.get(node_id)
|
||||||
|
if cnr_fallback is not None and cnr_fallback.get('repository'):
|
||||||
|
repo_url = cnr_fallback['repository']
|
||||||
|
else:
|
||||||
|
result = ManagedResult('install')
|
||||||
|
return result.fail(f"Node '{node_id}@{version_spec}' not found in [{channel}, {mode}]")
|
||||||
|
else:
|
||||||
|
result = ManagedResult('install')
|
||||||
|
return result.fail(f"Node '{node_id}@{version_spec}' not found in [{channel}, {mode}]")
|
||||||
|
|
||||||
if self.is_enabled(node_id, version_spec):
|
if self.is_enabled(node_id, version_spec):
|
||||||
return ManagedResult('skip').with_target(f"{node_id}@{version_spec}")
|
return ManagedResult('skip').with_target(f"{node_id}@{version_spec}")
|
||||||
|
|||||||
@ -995,7 +995,7 @@ async def task_worker():
|
|||||||
return OperationResult.failed.value
|
return OperationResult.failed.value
|
||||||
|
|
||||||
node_name = params.node_name
|
node_name = params.node_name
|
||||||
is_unknown = params.is_unknown
|
is_unknown = getattr(params, 'is_unknown', False) # guard: pydantic Union may match UpdatePackParams
|
||||||
|
|
||||||
logging.debug(
|
logging.debug(
|
||||||
"[ComfyUI-Manager] Uninstalling node: name=%s, is_unknown=%s",
|
"[ComfyUI-Manager] Uninstalling node: name=%s, is_unknown=%s",
|
||||||
@ -1019,15 +1019,16 @@ async def task_worker():
|
|||||||
|
|
||||||
async def do_disable(params: DisablePackParams) -> str:
|
async def do_disable(params: DisablePackParams) -> str:
|
||||||
node_name = params.node_name
|
node_name = params.node_name
|
||||||
|
is_unknown = getattr(params, 'is_unknown', False) # guard: pydantic Union may match UpdatePackParams
|
||||||
|
|
||||||
logging.debug(
|
logging.debug(
|
||||||
"[ComfyUI-Manager] Disabling node: name=%s, is_unknown=%s",
|
"[ComfyUI-Manager] Disabling node: name=%s, is_unknown=%s",
|
||||||
node_name,
|
node_name,
|
||||||
params.is_unknown,
|
is_unknown,
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
res = core.unified_manager.unified_disable(node_name, params.is_unknown)
|
res = core.unified_manager.unified_disable(node_name, is_unknown)
|
||||||
|
|
||||||
if res:
|
if res:
|
||||||
return OperationResult.success.value
|
return OperationResult.success.value
|
||||||
|
|||||||
@ -1411,8 +1411,18 @@ class UnifiedManager:
|
|||||||
else: # nightly
|
else: # nightly
|
||||||
repo_url = the_node['repository']
|
repo_url = the_node['repository']
|
||||||
else:
|
else:
|
||||||
result = ManagedResult('install')
|
# Fallback for nightly only: use repository URL from CNR map
|
||||||
return result.fail(f"Node '{node_id}@{version_spec}' not found in [{channel}, {mode}]")
|
# when node is registered in CNR but absent from nightly manifest
|
||||||
|
if version_spec == 'nightly':
|
||||||
|
cnr_fallback = self.cnr_map.get(node_id)
|
||||||
|
if cnr_fallback is not None and cnr_fallback.get('repository'):
|
||||||
|
repo_url = cnr_fallback['repository']
|
||||||
|
else:
|
||||||
|
result = ManagedResult('install')
|
||||||
|
return result.fail(f"Node '{node_id}@{version_spec}' not found in [{channel}, {mode}]")
|
||||||
|
else:
|
||||||
|
result = ManagedResult('install')
|
||||||
|
return result.fail(f"Node '{node_id}@{version_spec}' not found in [{channel}, {mode}]")
|
||||||
|
|
||||||
if self.is_enabled(node_id, version_spec):
|
if self.is_enabled(node_id, version_spec):
|
||||||
return ManagedResult('skip').with_target(f"{node_id}@{version_spec}")
|
return ManagedResult('skip').with_target(f"{node_id}@{version_spec}")
|
||||||
|
|||||||
@ -149,7 +149,8 @@ class CollectedDeps:
|
|||||||
"""All collected dependencies"""
|
"""All collected dependencies"""
|
||||||
requirements: list[PackageRequirement] # Collected deps (duplicates allowed, uv resolves)
|
requirements: list[PackageRequirement] # Collected deps (duplicates allowed, uv resolves)
|
||||||
skipped: list[tuple[str, str]] # (package_name, skip_reason)
|
skipped: list[tuple[str, str]] # (package_name, skip_reason)
|
||||||
sources: dict[str, list[str]] # {package_name: [source_node_packs]}
|
sources: dict[str, list[tuple[str, str]]] # {package_name: [(pack_path, pkg_spec), ...]}
|
||||||
|
"""pkg_name → [(pack_path, pkg_spec), ...] — tracks which node packs request each package."""
|
||||||
extra_index_urls: list[str] # Additional index URLs separated from --index-url entries
|
extra_index_urls: list[str] # Additional index URLs separated from --index-url entries
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@ -262,7 +263,7 @@ def collect_requirements(self) -> CollectedDeps:
|
|||||||
source=path,
|
source=path,
|
||||||
)
|
)
|
||||||
requirements.append(req)
|
requirements.append(req)
|
||||||
sources[pkg_name].append(path)
|
sources[pkg_name].append((path, pkg_spec))
|
||||||
|
|
||||||
return CollectedDeps(
|
return CollectedDeps(
|
||||||
requirements=requirements,
|
requirements=requirements,
|
||||||
@ -449,7 +450,7 @@ if os.path.exists(requirements_path) and not _unified_resolver_succeeded:
|
|||||||
|
|
||||||
### 4.1.6 CLI Integration
|
### 4.1.6 CLI Integration
|
||||||
|
|
||||||
Two entry points expose the unified resolver in `cm_cli`:
|
Multiple entry points expose the unified resolver in `cm_cli`:
|
||||||
|
|
||||||
#### 4.1.6.1 Standalone Command: `cm_cli uv-compile`
|
#### 4.1.6.1 Standalone Command: `cm_cli uv-compile`
|
||||||
|
|
||||||
@ -478,19 +479,53 @@ When `--uv-compile` is set:
|
|||||||
This differs from per-node pip install: instead of resolving each node pack's
|
This differs from per-node pip install: instead of resolving each node pack's
|
||||||
`requirements.txt` independently, all deps are compiled together to avoid conflicts.
|
`requirements.txt` independently, all deps are compiled together to avoid conflicts.
|
||||||
|
|
||||||
|
#### 4.1.6.3 Additional `--uv-compile` Commands
|
||||||
|
|
||||||
|
The following commands follow the same `no_deps` + batch-resolve pattern as `install --uv-compile`:
|
||||||
|
`cmd_ctx.set_no_deps(True)` is set before node operations, then `_run_unified_resolve()`
|
||||||
|
runs at the end via `try/finally` with `PIPFixer.fix_broken()`.
|
||||||
|
|
||||||
|
| Command | Operation |
|
||||||
|
|---------|-----------|
|
||||||
|
| `cm_cli reinstall --uv-compile` | Reinstall nodes then batch-resolve |
|
||||||
|
| `cm_cli update --uv-compile` | Update nodes then batch-resolve |
|
||||||
|
| `cm_cli fix --uv-compile` | Fix node dependencies then batch-resolve |
|
||||||
|
| `cm_cli restore-snapshot --uv-compile` | Restore snapshot then batch-resolve |
|
||||||
|
| `cm_cli restore-dependencies --uv-compile` | Restore all node deps then batch-resolve |
|
||||||
|
| `cm_cli install-deps <deps.json> --uv-compile` | Install from deps spec file then batch-resolve |
|
||||||
|
|
||||||
|
> **`reinstall` only**: Has `--uv-compile` / `--no-deps` mutual exclusion check.
|
||||||
|
> Both skip per-node pip, but `--no-deps` skips permanently while `--uv-compile` also
|
||||||
|
> triggers batch resolution after all nodes are processed.
|
||||||
|
>
|
||||||
|
> **`restore-snapshot` only**: Has an additional pre-resolution exception guard — if the
|
||||||
|
> snapshot restore itself fails (before `_run_unified_resolve()` is reached),
|
||||||
|
> `PIPFixer.fix_broken()` runs in the exception handler before exit. The `try/finally`
|
||||||
|
> applies to the `_run_unified_resolve()` call. See dec_7 for rationale.
|
||||||
|
|
||||||
#### Shared Design Decisions
|
#### Shared Design Decisions
|
||||||
|
|
||||||
- **Uses real `cm_global` values**: Unlike the startup path (4.1.3) which passes empty
|
- **Uses real `cm_global` values**: Unlike the startup path (4.1.3) which passes empty
|
||||||
blacklist/overrides, CLI commands pass `cm_global.pip_blacklist`,
|
blacklist/overrides, CLI commands pass `cm_global.pip_blacklist`,
|
||||||
`cm_global.pip_overrides`, and `cm_global.pip_downgrade_blacklist` — already
|
`cm_global.pip_overrides`, and `cm_global.pip_downgrade_blacklist` — already
|
||||||
initialized at `cm_cli/__main__.py` module scope (lines 45-60).
|
initialized at `cm_cli/__main__.py` module scope.
|
||||||
- **No `_unified_resolver_succeeded` flag**: Not needed — these are one-shot commands,
|
- **No `_unified_resolver_succeeded` flag**: Not needed — these are one-shot commands,
|
||||||
not startup gates.
|
not startup gates.
|
||||||
- **Shared helper**: Both entry points delegate to `_run_unified_resolve()` which
|
- **Shared helper**: All entry points delegate to `_run_unified_resolve()` which
|
||||||
handles resolver instantiation, execution, and result reporting.
|
handles resolver instantiation, execution, and result reporting.
|
||||||
- **Error handling**: `UvNotAvailableError` / `ImportError` → exit 1 with message.
|
- **Error handling**: `UvNotAvailableError` / `ImportError` → exit 1 with message.
|
||||||
Both entry points use `try/finally` to guarantee `PIPFixer.fix_broken()` runs
|
All entry points guarantee `PIPFixer.fix_broken()` runs regardless of outcome —
|
||||||
regardless of resolution outcome.
|
via `try/finally` around `_run_unified_resolve()`. `restore-snapshot` additionally
|
||||||
|
calls `fix_broken()` in the snapshot restore exception handler (before
|
||||||
|
`_run_unified_resolve()` is reached), per dec_7.
|
||||||
|
- **Conflict attribution output**: When resolution fails and `result.lockfile.conflicts`
|
||||||
|
is non-empty, `_run_unified_resolve()` cross-references conflict package names with
|
||||||
|
`CollectedDeps.sources` to identify which node packs requested each conflicting package:
|
||||||
|
- Normalization: both sources keys and conflict text apply `.lower().replace("-", "_")`
|
||||||
|
- Word-boundary regex `(?<![a-z0-9_])pkg(?![a-z0-9_])` prevents false-positive prefix
|
||||||
|
matches (e.g., `torch` does NOT match `torch_audio` or `torchvision`)
|
||||||
|
- Output format: sorted by package name, each entry lists `pack_basename → pkg_spec`
|
||||||
|
per requester (using `CollectedDeps.sources` tuple values `(pack_path, pkg_spec)`)
|
||||||
|
|
||||||
**Node pack discovery**: Uses `cmd_ctx.get_custom_nodes_paths()` → `collect_node_pack_paths()`,
|
**Node pack discovery**: Uses `cmd_ctx.get_custom_nodes_paths()` → `collect_node_pack_paths()`,
|
||||||
which is the CLI-native path resolution (respects `--user-directory` and `folder_paths`).
|
which is the CLI-native path resolution (respects `--user-directory` and `folder_paths`).
|
||||||
|
|||||||
@ -329,7 +329,7 @@ User requests installation of node packs A and B nearly simultaneously from UI
|
|||||||
|
|
||||||
## 7. Future Extensions
|
## 7. Future Extensions
|
||||||
|
|
||||||
- ~~**`cm_global` integration** [DONE]: `cm_cli uv-compile` and `cm_cli install --uv-compile` pass real `cm_global` values. Startup path (`prestartup_script.py`) still passes empty by design~~
|
- ~~**`cm_global` integration** [DONE]: All `--uv-compile` CLI commands (`uv-compile`, `install`, `reinstall`, `update`, `fix`, `restore-snapshot`, `restore-dependencies`, `install-deps`) pass real `cm_global` values. Startup path (`prestartup_script.py`) still passes empty by design~~
|
||||||
- Lockfile caching: Reuse for identical node pack configurations
|
- Lockfile caching: Reuse for identical node pack configurations
|
||||||
- Pre-install dependency conflict validation API: Check compatibility before installation
|
- Pre-install dependency conflict validation API: Check compatibility before installation
|
||||||
- Dependency tree visualization: Display dependency relationships to users
|
- Dependency tree visualization: Display dependency relationships to users
|
||||||
@ -355,3 +355,9 @@ User requests installation of node packs A and B nearly simultaneously from UI
|
|||||||
| Legacy `execute_install_script()` (2 locations) | `legacy/manager_core.py` | ❌ No | Legacy paths |
|
| Legacy `execute_install_script()` (2 locations) | `legacy/manager_core.py` | ❌ No | Legacy paths |
|
||||||
| `cm_cli uv-compile` | `cm_cli/__main__.py` | ✅ Yes | Standalone CLI batch resolution (with `cm_global` values) |
|
| `cm_cli uv-compile` | `cm_cli/__main__.py` | ✅ Yes | Standalone CLI batch resolution (with `cm_global` values) |
|
||||||
| `cm_cli install --uv-compile` | `cm_cli/__main__.py` | ✅ Yes | Per-node pip skipped, batch resolution after all installs |
|
| `cm_cli install --uv-compile` | `cm_cli/__main__.py` | ✅ Yes | Per-node pip skipped, batch resolution after all installs |
|
||||||
|
| `cm_cli reinstall --uv-compile` | `cm_cli/__main__.py` | ✅ Yes | Per-node pip skipped, batch resolution after all reinstalls; mutually exclusive with `--no-deps` |
|
||||||
|
| `cm_cli update --uv-compile` | `cm_cli/__main__.py` | ✅ Yes | Per-node pip skipped during updates, batch resolution after |
|
||||||
|
| `cm_cli fix --uv-compile` | `cm_cli/__main__.py` | ✅ Yes | Per-node pip skipped during dep fix, batch resolution after |
|
||||||
|
| `cm_cli restore-snapshot --uv-compile` | `cm_cli/__main__.py` | ✅ Yes | Per-node pip skipped during restore, batch resolution after |
|
||||||
|
| `cm_cli restore-dependencies --uv-compile` | `cm_cli/__main__.py` | ✅ Yes | Per-node pip skipped, batch resolution after all node deps restored |
|
||||||
|
| `cm_cli install-deps --uv-compile` | `cm_cli/__main__.py` | ✅ Yes | Per-node pip skipped, batch resolution after deps-spec install |
|
||||||
|
|||||||
@ -11,11 +11,15 @@ cm-cli [OPTIONS]
|
|||||||
|
|
||||||
OPTIONS:
|
OPTIONS:
|
||||||
[install|reinstall|uninstall|update|disable|enable|fix] node_name ... ?[--channel <channel name>] ?[--mode [remote|local|cache]]
|
[install|reinstall|uninstall|update|disable|enable|fix] node_name ... ?[--channel <channel name>] ?[--mode [remote|local|cache]]
|
||||||
|
[install|reinstall|update|fix] node_name ... ?[--uv-compile]
|
||||||
[update|disable|enable|fix] all ?[--channel <channel name>] ?[--mode [remote|local|cache]]
|
[update|disable|enable|fix] all ?[--channel <channel name>] ?[--mode [remote|local|cache]]
|
||||||
|
[update|fix] all ?[--uv-compile]
|
||||||
[simple-show|show] [installed|enabled|not-installed|disabled|all|snapshot|snapshot-list] ?[--channel <channel name>] ?[--mode [remote|local|cache]]
|
[simple-show|show] [installed|enabled|not-installed|disabled|all|snapshot|snapshot-list] ?[--channel <channel name>] ?[--mode [remote|local|cache]]
|
||||||
save-snapshot ?[--output <snapshot .json/.yaml>]
|
save-snapshot ?[--output <snapshot .json/.yaml>]
|
||||||
restore-snapshot <snapshot .json/.yaml> ?[--pip-non-url] ?[--pip-non-local-url] ?[--pip-local-url]
|
restore-snapshot <snapshot .json/.yaml> ?[--pip-non-url] ?[--pip-non-local-url] ?[--pip-local-url] ?[--uv-compile]
|
||||||
restore-dependencies
|
restore-dependencies ?[--uv-compile]
|
||||||
|
install-deps <deps.json> ?[--channel <channel name>] ?[--mode [remote|local|cache]] ?[--uv-compile]
|
||||||
|
uv-compile
|
||||||
clear
|
clear
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -107,6 +111,22 @@ ComfyUI-Loopchain
|
|||||||
* `enable`: Enables the specified custom nodes.
|
* `enable`: Enables the specified custom nodes.
|
||||||
* `fix`: Attempts to fix dependencies for the specified custom nodes.
|
* `fix`: Attempts to fix dependencies for the specified custom nodes.
|
||||||
|
|
||||||
|
#### `--uv-compile` flag (`install`, `reinstall`, `update`, `fix`)
|
||||||
|
|
||||||
|
When `--uv-compile` is specified, per-node pip installs are skipped during node operations.
|
||||||
|
After all operations complete, `uv pip compile` resolves the full dependency graph in one batch.
|
||||||
|
|
||||||
|
* Requires `uv` to be installed.
|
||||||
|
* Prevents dependency conflicts between multiple node packs.
|
||||||
|
* On resolution failure, displays conflicting packages and which node packs requested them.
|
||||||
|
* `reinstall --uv-compile` is mutually exclusive with `--no-deps`.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cm-cli install ComfyUI-Impact-Pack ComfyUI-Inspire-Pack --uv-compile
|
||||||
|
cm-cli update all --uv-compile
|
||||||
|
cm-cli fix ComfyUI-Impact-Pack --uv-compile
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
### 4. Snapshot Management
|
### 4. Snapshot Management
|
||||||
* `cm-cli save-snapshot [--output <snapshot .json/.yaml>]`: Saves the current snapshot.
|
* `cm-cli save-snapshot [--output <snapshot .json/.yaml>]`: Saves the current snapshot.
|
||||||
@ -122,12 +142,33 @@ ComfyUI-Loopchain
|
|||||||
|
|
||||||
### 5. Dependency Restoration
|
### 5. Dependency Restoration
|
||||||
|
|
||||||
`restore-dependencies`
|
`restore-dependencies ?[--uv-compile]`
|
||||||
|
|
||||||
* This command can be used if custom nodes are installed under the `ComfyUI/custom_nodes` path but their dependencies are not installed.
|
* This command can be used if custom nodes are installed under the `ComfyUI/custom_nodes` path but their dependencies are not installed.
|
||||||
* It is useful when starting a new cloud instance, like Colab, where dependencies need to be reinstalled and installation scripts re-executed.
|
* It is useful when starting a new cloud instance, like Colab, where dependencies need to be reinstalled and installation scripts re-executed.
|
||||||
* It can also be utilized if ComfyUI is reinstalled and only the custom_nodes path has been backed up and restored.
|
* It can also be utilized if ComfyUI is reinstalled and only the custom_nodes path has been backed up and restored.
|
||||||
|
* Use `--uv-compile` to skip per-node pip installs and resolve all dependencies in one batch instead.
|
||||||
|
|
||||||
### 6. Clear
|
### 6. Install from Dependency File
|
||||||
|
|
||||||
|
`install-deps <deps.json> ?[--channel <channel name>] ?[--mode [remote|local|cache]] ?[--uv-compile]`
|
||||||
|
|
||||||
|
* Installs custom nodes specified in a dependency spec file (`.json`) or workflow file (`.png`/`.json`).
|
||||||
|
* Use `--uv-compile` to batch-resolve all dependencies after installation instead of per-node pip.
|
||||||
|
|
||||||
|
### 7. uv-compile
|
||||||
|
|
||||||
|
`uv-compile ?[--user-directory <path>]`
|
||||||
|
|
||||||
|
* Batch-resolves and installs all custom node pack dependencies using `uv pip compile`.
|
||||||
|
* Useful for environment recovery or initial setup without starting ComfyUI.
|
||||||
|
* Requires `uv` to be installed.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cm-cli uv-compile
|
||||||
|
cm-cli uv-compile --user-directory /path/to/comfyui
|
||||||
|
```
|
||||||
|
|
||||||
|
### 8. Clear
|
||||||
|
|
||||||
In the GUI, installations, updates, or snapshot restorations are scheduled to execute the next time ComfyUI is launched. The `clear` command clears this scheduled state, ensuring no pre-execution actions are applied.
|
In the GUI, installations, updates, or snapshot restorations are scheduled to execute the next time ComfyUI is launched. The `clear` command clears this scheduled state, ensuring no pre-execution actions are applied.
|
||||||
|
|||||||
@ -11,11 +11,15 @@ cm-cli [OPTIONS]
|
|||||||
|
|
||||||
OPTIONS:
|
OPTIONS:
|
||||||
[install|reinstall|uninstall|update|disable|enable|fix] node_name ... ?[--channel <channel name>] ?[--mode [remote|local|cache]]
|
[install|reinstall|uninstall|update|disable|enable|fix] node_name ... ?[--channel <channel name>] ?[--mode [remote|local|cache]]
|
||||||
|
[install|reinstall|update|fix] node_name ... ?[--uv-compile]
|
||||||
[update|disable|enable|fix] all ?[--channel <channel name>] ?[--mode [remote|local|cache]]
|
[update|disable|enable|fix] all ?[--channel <channel name>] ?[--mode [remote|local|cache]]
|
||||||
|
[update|fix] all ?[--uv-compile]
|
||||||
[simple-show|show] [installed|enabled|not-installed|disabled|all|snapshot|snapshot-list] ?[--channel <channel name>] ?[--mode [remote|local|cache]]
|
[simple-show|show] [installed|enabled|not-installed|disabled|all|snapshot|snapshot-list] ?[--channel <channel name>] ?[--mode [remote|local|cache]]
|
||||||
save-snapshot ?[--output <snapshot .json/.yaml>]
|
save-snapshot ?[--output <snapshot .json/.yaml>]
|
||||||
restore-snapshot <snapshot .json/.yaml> ?[--pip-non-url] ?[--pip-non-local-url] ?[--pip-local-url]
|
restore-snapshot <snapshot .json/.yaml> ?[--pip-non-url] ?[--pip-non-local-url] ?[--pip-local-url] ?[--uv-compile]
|
||||||
restore-dependencies
|
restore-dependencies ?[--uv-compile]
|
||||||
|
install-deps <deps.json> ?[--channel <channel name>] ?[--mode [remote|local|cache]] ?[--uv-compile]
|
||||||
|
uv-compile
|
||||||
clear
|
clear
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -108,6 +112,21 @@ ComfyUI-Loopchain
|
|||||||
* `enable`: 지정된 커스텀 노드들을 활성화합니다.
|
* `enable`: 지정된 커스텀 노드들을 활성화합니다.
|
||||||
* `fix`: 지정된 커스텀 노드의 의존성을 고치기 위한 시도를 합니다.
|
* `fix`: 지정된 커스텀 노드의 의존성을 고치기 위한 시도를 합니다.
|
||||||
|
|
||||||
|
#### `--uv-compile` 플래그 (`install`, `reinstall`, `update`, `fix`)
|
||||||
|
|
||||||
|
`--uv-compile` 플래그를 사용하면 노드별 pip 설치를 건너뛰고, 모든 작업이 완료된 후 `uv pip compile`로 전체 의존성을 한 번에 일괄 해결합니다.
|
||||||
|
|
||||||
|
* `uv`가 설치된 환경에서만 동작합니다.
|
||||||
|
* 여러 노드 팩 간의 의존성 충돌을 방지합니다.
|
||||||
|
* 해결 실패 시 충돌 패키지와 해당 패키지를 요청한 노드 팩 목록을 표시합니다.
|
||||||
|
* `reinstall --uv-compile`은 `--no-deps`와 동시에 사용할 수 없습니다.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cm-cli install ComfyUI-Impact-Pack ComfyUI-Inspire-Pack --uv-compile
|
||||||
|
cm-cli update all --uv-compile
|
||||||
|
cm-cli fix ComfyUI-Impact-Pack --uv-compile
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
### 4. 스냅샷 관리 기능
|
### 4. 스냅샷 관리 기능
|
||||||
* `cm-cli save-snapshot ?[--output <snapshot .json/.yaml>]`: 현재의 snapshot을 저장합니다.
|
* `cm-cli save-snapshot ?[--output <snapshot .json/.yaml>]`: 현재의 snapshot을 저장합니다.
|
||||||
@ -123,13 +142,33 @@ ComfyUI-Loopchain
|
|||||||
|
|
||||||
### 5. 의존성 설치
|
### 5. 의존성 설치
|
||||||
|
|
||||||
`restore-dependencies`
|
`restore-dependencies ?[--uv-compile]`
|
||||||
|
|
||||||
* `ComfyUI/custom_nodes` 하위 경로에 커스텀 노드들이 설치되어 있긴 하지만, 의존성이 설치되지 않은 경우 사용할 수 있습니다.
|
* `ComfyUI/custom_nodes` 하위 경로에 커스텀 노드들이 설치되어 있긴 하지만, 의존성이 설치되지 않은 경우 사용할 수 있습니다.
|
||||||
* Colab과 같이 cloud instance를 새로 시작하는 경우 의존성 재설치 및 설치 스크립트가 재실행되어야 하는 경우 사용합니다.
|
* Colab과 같이 cloud instance를 새로 시작하는 경우 의존성 재설치 및 설치 스크립트가 재실행되어야 하는 경우 사용합니다.
|
||||||
* ComfyUI를 재설치할 경우, custom_nodes 경로만 백업했다가 재설치할 경우 활용 가능합니다.
|
* ComfyUI를 재설치할 경우, custom_nodes 경로만 백업했다가 재설치할 경우 활용 가능합니다.
|
||||||
|
* `--uv-compile` 플래그를 사용하면 노드별 pip 설치를 건너뛰고 일괄 해결합니다.
|
||||||
|
|
||||||
|
### 6. 의존성 파일로 설치
|
||||||
|
|
||||||
### 6. clear
|
`install-deps <deps.json> ?[--channel <channel name>] ?[--mode [remote|local|cache]] ?[--uv-compile]`
|
||||||
|
|
||||||
|
* 의존성 spec 파일(`.json`) 또는 워크플로우 파일(`.png`/`.json`)에 명시된 커스텀 노드를 설치합니다.
|
||||||
|
* `--uv-compile` 플래그를 사용하면 모든 노드 설치 후 일괄 의존성 해결을 수행합니다.
|
||||||
|
|
||||||
|
### 7. uv-compile
|
||||||
|
|
||||||
|
`uv-compile ?[--user-directory <path>]`
|
||||||
|
|
||||||
|
* 설치된 모든 커스텀 노드 팩의 의존성을 `uv pip compile`로 일괄 해결하고 설치합니다.
|
||||||
|
* ComfyUI를 재시작하지 않고 의존성 환경을 복구하거나 초기 설정 시 활용할 수 있습니다.
|
||||||
|
* `uv`가 설치된 환경에서만 동작합니다.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cm-cli uv-compile
|
||||||
|
cm-cli uv-compile --user-directory /path/to/comfyui
|
||||||
|
```
|
||||||
|
|
||||||
|
### 8. clear
|
||||||
|
|
||||||
GUI에서 install, update를 하거나 snapshot을 restore하는 경우 예약을 통해서 다음번 ComfyUI를 실행할 경우 실행되는 구조입니다. `clear` 는 이런 예약 상태를 clear해서, 아무런 사전 실행이 적용되지 않도록 합니다.
|
GUI에서 install, update를 하거나 snapshot을 restore하는 경우 예약을 통해서 다음번 ComfyUI를 실행할 경우 실행되는 구조입니다. `clear` 는 이런 예약 상태를 clear해서, 아무런 사전 실행이 적용되지 않도록 합니다.
|
||||||
|
|||||||
@ -5,7 +5,7 @@ build-backend = "setuptools.build_meta"
|
|||||||
[project]
|
[project]
|
||||||
name = "comfyui-manager"
|
name = "comfyui-manager"
|
||||||
license = { text = "GPL-3.0-only" }
|
license = { text = "GPL-3.0-only" }
|
||||||
version = "4.1b2"
|
version = "4.1b3"
|
||||||
requires-python = ">= 3.9"
|
requires-python = ">= 3.9"
|
||||||
description = "ComfyUI-Manager provides features to install and manage custom nodes for ComfyUI, as well as various functionalities to assist with ComfyUI."
|
description = "ComfyUI-Manager provides features to install and manage custom nodes for ComfyUI, as well as various functionalities to assist with ComfyUI."
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
|
|||||||
306
tests/e2e/test_e2e_endpoint.py
Normal file
306
tests/e2e/test_e2e_endpoint.py
Normal file
@ -0,0 +1,306 @@
|
|||||||
|
"""E2E tests for ComfyUI Manager HTTP API endpoints (install/uninstall).
|
||||||
|
|
||||||
|
Starts a real ComfyUI instance, exercises the task-queue-based install
|
||||||
|
and uninstall endpoints, then verifies the results via the installed-list
|
||||||
|
endpoint and filesystem checks.
|
||||||
|
|
||||||
|
Requires a pre-built E2E environment (from setup_e2e_env.sh).
|
||||||
|
Set E2E_ROOT env var to point at it, or the tests will be skipped.
|
||||||
|
|
||||||
|
Install test methodology follows the main comfyui-manager test suite
|
||||||
|
(tests/glob/test_queue_task_api.py):
|
||||||
|
- Uses a CNR-registered package with proper version-based install
|
||||||
|
- Verifies .tracking file for CNR installs
|
||||||
|
- Checks installed-list API with cnr_id matching
|
||||||
|
- Cleans up .disabled/ directory entries
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
E2E_ROOT=/tmp/e2e_full_test pytest tests/e2e/test_e2e_endpoint.py -v
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import time
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import requests
|
||||||
|
|
||||||
|
E2E_ROOT = os.environ.get("E2E_ROOT", "")
|
||||||
|
COMFYUI_PATH = os.path.join(E2E_ROOT, "comfyui") if E2E_ROOT else ""
|
||||||
|
CUSTOM_NODES = os.path.join(COMFYUI_PATH, "custom_nodes") if COMFYUI_PATH else ""
|
||||||
|
SCRIPTS_DIR = os.path.join(
|
||||||
|
os.path.dirname(os.path.abspath(__file__)), "scripts"
|
||||||
|
)
|
||||||
|
|
||||||
|
PORT = 8199
|
||||||
|
BASE_URL = f"http://127.0.0.1:{PORT}"
|
||||||
|
|
||||||
|
# CNR-registered package with multiple versions, no heavy dependencies.
|
||||||
|
# Same test package used by the main comfyui-manager test suite.
|
||||||
|
PACK_ID = "ComfyUI_SigmoidOffsetScheduler"
|
||||||
|
PACK_DIR_NAME = "ComfyUI_SigmoidOffsetScheduler"
|
||||||
|
PACK_CNR_ID = "comfyui_sigmoidoffsetscheduler"
|
||||||
|
PACK_VERSION = "1.0.1"
|
||||||
|
|
||||||
|
# Polling configuration for async task completion
|
||||||
|
POLL_TIMEOUT = 30 # max seconds to wait for an operation
|
||||||
|
POLL_INTERVAL = 0.5 # seconds between polls
|
||||||
|
|
||||||
|
pytestmark = pytest.mark.skipif(
|
||||||
|
not E2E_ROOT
|
||||||
|
or not os.path.isfile(os.path.join(E2E_ROOT, ".e2e_setup_complete")),
|
||||||
|
reason="E2E_ROOT not set or E2E environment not ready (run setup_e2e_env.sh first)",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Helpers
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _start_comfyui() -> int:
|
||||||
|
"""Start ComfyUI and return its PID."""
|
||||||
|
env = {**os.environ, "E2E_ROOT": E2E_ROOT, "PORT": str(PORT)}
|
||||||
|
r = subprocess.run(
|
||||||
|
["bash", os.path.join(SCRIPTS_DIR, "start_comfyui.sh")],
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
timeout=180,
|
||||||
|
env=env,
|
||||||
|
)
|
||||||
|
if r.returncode != 0:
|
||||||
|
raise RuntimeError(f"Failed to start ComfyUI:\n{r.stderr}")
|
||||||
|
for part in r.stdout.strip().split():
|
||||||
|
if part.startswith("COMFYUI_PID="):
|
||||||
|
return int(part.split("=")[1])
|
||||||
|
raise RuntimeError(f"Could not parse PID from start_comfyui output:\n{r.stdout}")
|
||||||
|
|
||||||
|
|
||||||
|
def _stop_comfyui():
|
||||||
|
"""Stop ComfyUI."""
|
||||||
|
env = {**os.environ, "E2E_ROOT": E2E_ROOT, "PORT": str(PORT)}
|
||||||
|
subprocess.run(
|
||||||
|
["bash", os.path.join(SCRIPTS_DIR, "stop_comfyui.sh")],
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
timeout=30,
|
||||||
|
env=env,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _queue_task(task: dict) -> None:
|
||||||
|
"""Queue a task and start the worker."""
|
||||||
|
resp = requests.post(
|
||||||
|
f"{BASE_URL}/v2/manager/queue/task",
|
||||||
|
json=task,
|
||||||
|
timeout=10,
|
||||||
|
)
|
||||||
|
resp.raise_for_status()
|
||||||
|
requests.get(f"{BASE_URL}/v2/manager/queue/start", timeout=10)
|
||||||
|
|
||||||
|
|
||||||
|
def _remove_pack(name: str) -> None:
|
||||||
|
"""Remove a node pack directory and any .disabled/ entries."""
|
||||||
|
# Active directory
|
||||||
|
path = os.path.join(CUSTOM_NODES, name)
|
||||||
|
if os.path.islink(path):
|
||||||
|
os.unlink(path)
|
||||||
|
elif os.path.isdir(path):
|
||||||
|
shutil.rmtree(path, ignore_errors=True)
|
||||||
|
# .disabled/ entries (CNR versioned + nightly)
|
||||||
|
disabled_dir = os.path.join(CUSTOM_NODES, ".disabled")
|
||||||
|
if os.path.isdir(disabled_dir):
|
||||||
|
cnr_lower = name.lower().replace("_", "").replace("-", "")
|
||||||
|
for entry in os.listdir(disabled_dir):
|
||||||
|
entry_lower = entry.lower().replace("_", "").replace("-", "")
|
||||||
|
if entry_lower.startswith(cnr_lower):
|
||||||
|
entry_path = os.path.join(disabled_dir, entry)
|
||||||
|
if os.path.isdir(entry_path):
|
||||||
|
shutil.rmtree(entry_path, ignore_errors=True)
|
||||||
|
|
||||||
|
|
||||||
|
def _pack_exists(name: str) -> bool:
|
||||||
|
return os.path.isdir(os.path.join(CUSTOM_NODES, name))
|
||||||
|
|
||||||
|
|
||||||
|
def _has_tracking(name: str) -> bool:
|
||||||
|
"""Check if the pack has a .tracking file (CNR install marker)."""
|
||||||
|
return os.path.isfile(os.path.join(CUSTOM_NODES, name, ".tracking"))
|
||||||
|
|
||||||
|
|
||||||
|
def _wait_for(predicate, timeout=POLL_TIMEOUT, interval=POLL_INTERVAL):
|
||||||
|
"""Poll *predicate* until it returns True or *timeout* seconds elapse.
|
||||||
|
|
||||||
|
Returns True if predicate was satisfied, False on timeout.
|
||||||
|
"""
|
||||||
|
deadline = time.monotonic() + timeout
|
||||||
|
while time.monotonic() < deadline:
|
||||||
|
if predicate():
|
||||||
|
return True
|
||||||
|
time.sleep(interval)
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Fixtures
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
@pytest.fixture(scope="module")
|
||||||
|
def comfyui():
|
||||||
|
"""Start ComfyUI once for the module, stop after all tests."""
|
||||||
|
_remove_pack(PACK_DIR_NAME)
|
||||||
|
pid = _start_comfyui()
|
||||||
|
yield pid
|
||||||
|
_stop_comfyui()
|
||||||
|
_remove_pack(PACK_DIR_NAME)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Tests
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class TestEndpointInstallUninstall:
|
||||||
|
"""Install and uninstall via HTTP endpoints on a running ComfyUI.
|
||||||
|
|
||||||
|
Follows the same methodology as tests/glob/test_queue_task_api.py in
|
||||||
|
the main comfyui-manager repo: CNR version-based install, .tracking
|
||||||
|
verification, installed-list API check.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def test_install_via_endpoint(self, comfyui):
|
||||||
|
"""POST /v2/manager/queue/task (install) -> pack appears on disk with .tracking."""
|
||||||
|
_remove_pack(PACK_DIR_NAME)
|
||||||
|
|
||||||
|
_queue_task({
|
||||||
|
"ui_id": "e2e-install",
|
||||||
|
"client_id": "e2e-install",
|
||||||
|
"kind": "install",
|
||||||
|
"params": {
|
||||||
|
"id": PACK_ID,
|
||||||
|
"version": PACK_VERSION,
|
||||||
|
"selected_version": "latest",
|
||||||
|
"mode": "remote",
|
||||||
|
"channel": "default",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
assert _wait_for(
|
||||||
|
lambda: _pack_exists(PACK_DIR_NAME),
|
||||||
|
), f"{PACK_DIR_NAME} not found in custom_nodes within {POLL_TIMEOUT}s"
|
||||||
|
assert _has_tracking(PACK_DIR_NAME), f"{PACK_DIR_NAME} missing .tracking (not a CNR install?)"
|
||||||
|
|
||||||
|
def test_installed_list_shows_pack(self, comfyui):
|
||||||
|
"""GET /v2/customnode/installed includes the installed pack."""
|
||||||
|
if not _pack_exists(PACK_DIR_NAME):
|
||||||
|
pytest.skip("Pack not installed (previous test may have failed)")
|
||||||
|
|
||||||
|
resp = requests.get(f"{BASE_URL}/v2/customnode/installed", timeout=10)
|
||||||
|
resp.raise_for_status()
|
||||||
|
installed = resp.json()
|
||||||
|
|
||||||
|
# Match by cnr_id (case-insensitive) following main repo pattern
|
||||||
|
package_found = any(
|
||||||
|
pkg.get("cnr_id", "").lower() == PACK_CNR_ID.lower()
|
||||||
|
for pkg in installed.values()
|
||||||
|
if isinstance(pkg, dict) and pkg.get("cnr_id")
|
||||||
|
)
|
||||||
|
assert package_found, (
|
||||||
|
f"{PACK_CNR_ID} not found in installed list: {list(installed.keys())}"
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_uninstall_via_endpoint(self, comfyui):
|
||||||
|
"""POST /v2/manager/queue/task (uninstall) -> pack removed from disk."""
|
||||||
|
if not _pack_exists(PACK_DIR_NAME):
|
||||||
|
pytest.skip("Pack not installed (previous test may have failed)")
|
||||||
|
|
||||||
|
_queue_task({
|
||||||
|
"ui_id": "e2e-uninstall",
|
||||||
|
"client_id": "e2e-uninstall",
|
||||||
|
"kind": "uninstall",
|
||||||
|
"params": {
|
||||||
|
"node_name": PACK_CNR_ID,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
assert _wait_for(
|
||||||
|
lambda: not _pack_exists(PACK_DIR_NAME),
|
||||||
|
), f"{PACK_DIR_NAME} still exists after uninstall ({POLL_TIMEOUT}s timeout)"
|
||||||
|
|
||||||
|
def test_installed_list_after_uninstall(self, comfyui):
|
||||||
|
"""After uninstall, pack no longer appears in installed list."""
|
||||||
|
if _pack_exists(PACK_DIR_NAME):
|
||||||
|
pytest.skip("Pack still exists (previous test may have failed)")
|
||||||
|
|
||||||
|
resp = requests.get(f"{BASE_URL}/v2/customnode/installed", timeout=10)
|
||||||
|
resp.raise_for_status()
|
||||||
|
installed = resp.json()
|
||||||
|
|
||||||
|
package_found = any(
|
||||||
|
pkg.get("cnr_id", "").lower() == PACK_CNR_ID.lower()
|
||||||
|
for pkg in installed.values()
|
||||||
|
if isinstance(pkg, dict) and pkg.get("cnr_id")
|
||||||
|
)
|
||||||
|
assert not package_found, f"{PACK_CNR_ID} still in installed list after uninstall"
|
||||||
|
|
||||||
|
def test_install_uninstall_cycle(self, comfyui):
|
||||||
|
"""Complete install/uninstall cycle in a single test."""
|
||||||
|
_remove_pack(PACK_DIR_NAME)
|
||||||
|
|
||||||
|
# Install
|
||||||
|
_queue_task({
|
||||||
|
"ui_id": "e2e-cycle-install",
|
||||||
|
"client_id": "e2e-cycle",
|
||||||
|
"kind": "install",
|
||||||
|
"params": {
|
||||||
|
"id": PACK_ID,
|
||||||
|
"version": PACK_VERSION,
|
||||||
|
"selected_version": "latest",
|
||||||
|
"mode": "remote",
|
||||||
|
"channel": "default",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
assert _wait_for(
|
||||||
|
lambda: _pack_exists(PACK_DIR_NAME),
|
||||||
|
), f"Pack not installed within {POLL_TIMEOUT}s"
|
||||||
|
assert _has_tracking(PACK_DIR_NAME), "Pack missing .tracking"
|
||||||
|
|
||||||
|
# Verify in installed list
|
||||||
|
resp = requests.get(f"{BASE_URL}/v2/customnode/installed", timeout=10)
|
||||||
|
resp.raise_for_status()
|
||||||
|
installed = resp.json()
|
||||||
|
package_found = any(
|
||||||
|
pkg.get("cnr_id", "").lower() == PACK_CNR_ID.lower()
|
||||||
|
for pkg in installed.values()
|
||||||
|
if isinstance(pkg, dict) and pkg.get("cnr_id")
|
||||||
|
)
|
||||||
|
assert package_found, f"{PACK_CNR_ID} not in installed list"
|
||||||
|
|
||||||
|
# Uninstall
|
||||||
|
_queue_task({
|
||||||
|
"ui_id": "e2e-cycle-uninstall",
|
||||||
|
"client_id": "e2e-cycle",
|
||||||
|
"kind": "uninstall",
|
||||||
|
"params": {
|
||||||
|
"node_name": PACK_CNR_ID,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
assert _wait_for(
|
||||||
|
lambda: not _pack_exists(PACK_DIR_NAME),
|
||||||
|
), f"Pack not uninstalled within {POLL_TIMEOUT}s"
|
||||||
|
|
||||||
|
|
||||||
|
class TestEndpointStartup:
|
||||||
|
"""Verify ComfyUI startup with unified resolver."""
|
||||||
|
|
||||||
|
def test_comfyui_started(self, comfyui):
|
||||||
|
"""ComfyUI is running and responds to health check."""
|
||||||
|
resp = requests.get(f"{BASE_URL}/system_stats", timeout=10)
|
||||||
|
assert resp.status_code == 200
|
||||||
|
|
||||||
|
def test_startup_resolver_ran(self, comfyui):
|
||||||
|
"""Startup log contains unified resolver output."""
|
||||||
|
log_path = os.path.join(E2E_ROOT, "logs", "comfyui.log")
|
||||||
|
with open(log_path) as f:
|
||||||
|
log = f.read()
|
||||||
|
assert "[UnifiedDepResolver]" in log
|
||||||
|
assert "startup batch resolution succeeded" in log
|
||||||
254
tests/e2e/test_e2e_uv_compile.py
Normal file
254
tests/e2e/test_e2e_uv_compile.py
Normal file
@ -0,0 +1,254 @@
|
|||||||
|
"""E2E tests for cm-cli --uv-compile across all supported commands.
|
||||||
|
|
||||||
|
Requires a pre-built E2E environment (from setup_e2e_env.sh).
|
||||||
|
Set E2E_ROOT env var to point at it, or the tests will be skipped.
|
||||||
|
|
||||||
|
Supply-chain safety policy:
|
||||||
|
To prevent supply-chain attacks, E2E tests MUST only install node packs
|
||||||
|
from verified, controllable authors (ltdrdata, comfyanonymous, etc.).
|
||||||
|
Currently this suite uses only ltdrdata's dedicated test packs
|
||||||
|
(nodepack-test1-do-not-install, nodepack-test2-do-not-install) which
|
||||||
|
are intentionally designed for conflict testing and contain no
|
||||||
|
executable code. Adding packs from unverified sources is prohibited.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
E2E_ROOT=/tmp/e2e_full_test pytest tests/e2e/test_e2e_uv_compile.py -v
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
E2E_ROOT = os.environ.get("E2E_ROOT", "")
|
||||||
|
COMFYUI_PATH = os.path.join(E2E_ROOT, "comfyui") if E2E_ROOT else ""
|
||||||
|
CM_CLI = os.path.join(E2E_ROOT, "venv", "bin", "cm-cli") if E2E_ROOT else ""
|
||||||
|
CUSTOM_NODES = os.path.join(COMFYUI_PATH, "custom_nodes") if COMFYUI_PATH else ""
|
||||||
|
|
||||||
|
REPO_TEST1 = "https://github.com/ltdrdata/nodepack-test1-do-not-install"
|
||||||
|
REPO_TEST2 = "https://github.com/ltdrdata/nodepack-test2-do-not-install"
|
||||||
|
PACK_TEST1 = "nodepack-test1-do-not-install"
|
||||||
|
PACK_TEST2 = "nodepack-test2-do-not-install"
|
||||||
|
|
||||||
|
pytestmark = pytest.mark.skipif(
|
||||||
|
not E2E_ROOT or not os.path.isfile(os.path.join(E2E_ROOT, ".e2e_setup_complete")),
|
||||||
|
reason="E2E_ROOT not set or E2E environment not ready (run setup_e2e_env.sh first)",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Helpers
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _run_cm_cli(*args: str, timeout: int = 180) -> subprocess.CompletedProcess:
|
||||||
|
"""Run cm-cli in the E2E environment."""
|
||||||
|
env = {**os.environ, "COMFYUI_PATH": COMFYUI_PATH}
|
||||||
|
return subprocess.run(
|
||||||
|
[CM_CLI, *args],
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
timeout=timeout,
|
||||||
|
env=env,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _remove_pack(name: str) -> None:
|
||||||
|
"""Remove a node pack from custom_nodes (if it exists)."""
|
||||||
|
path = os.path.join(CUSTOM_NODES, name)
|
||||||
|
if os.path.islink(path):
|
||||||
|
os.unlink(path)
|
||||||
|
elif os.path.isdir(path):
|
||||||
|
shutil.rmtree(path, ignore_errors=True)
|
||||||
|
|
||||||
|
|
||||||
|
def _pack_exists(name: str) -> bool:
|
||||||
|
return os.path.isdir(os.path.join(CUSTOM_NODES, name))
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Fixtures
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def _clean_test_packs():
|
||||||
|
"""Ensure test node packs are removed before and after each test."""
|
||||||
|
_remove_pack(PACK_TEST1)
|
||||||
|
_remove_pack(PACK_TEST2)
|
||||||
|
yield
|
||||||
|
_remove_pack(PACK_TEST1)
|
||||||
|
_remove_pack(PACK_TEST2)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Tests
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class TestInstall:
|
||||||
|
"""cm-cli install --uv-compile"""
|
||||||
|
|
||||||
|
def test_install_single_pack_resolves(self):
|
||||||
|
"""Install one test pack with --uv-compile → resolve succeeds."""
|
||||||
|
r = _run_cm_cli("install", "--uv-compile", REPO_TEST1)
|
||||||
|
combined = r.stdout + r.stderr
|
||||||
|
|
||||||
|
assert _pack_exists(PACK_TEST1)
|
||||||
|
assert "Installation was successful" in combined
|
||||||
|
assert "Resolved" in combined
|
||||||
|
|
||||||
|
def test_install_conflicting_packs_shows_attribution(self):
|
||||||
|
"""Install two conflicting packs → conflict attribution output."""
|
||||||
|
# Install first (no conflict yet)
|
||||||
|
r1 = _run_cm_cli("install", "--uv-compile", REPO_TEST1)
|
||||||
|
assert _pack_exists(PACK_TEST1)
|
||||||
|
assert "Resolved" in r1.stdout + r1.stderr
|
||||||
|
|
||||||
|
# Install second → conflict
|
||||||
|
r2 = _run_cm_cli("install", "--uv-compile", REPO_TEST2)
|
||||||
|
combined = r2.stdout + r2.stderr
|
||||||
|
|
||||||
|
assert _pack_exists(PACK_TEST2)
|
||||||
|
assert "Installation was successful" in combined
|
||||||
|
assert "Resolution failed" in combined
|
||||||
|
assert "Conflicting packages (by node pack):" in combined
|
||||||
|
assert PACK_TEST1 in combined
|
||||||
|
assert PACK_TEST2 in combined
|
||||||
|
assert "ansible" in combined.lower()
|
||||||
|
|
||||||
|
|
||||||
|
class TestReinstall:
|
||||||
|
"""cm-cli reinstall --uv-compile"""
|
||||||
|
|
||||||
|
def test_reinstall_with_uv_compile(self):
|
||||||
|
"""Reinstall an existing pack with --uv-compile."""
|
||||||
|
# Install first
|
||||||
|
_run_cm_cli("install", REPO_TEST1)
|
||||||
|
assert _pack_exists(PACK_TEST1)
|
||||||
|
|
||||||
|
# Reinstall with --uv-compile
|
||||||
|
r = _run_cm_cli("reinstall", "--uv-compile", REPO_TEST1)
|
||||||
|
combined = r.stdout + r.stderr
|
||||||
|
|
||||||
|
# uv-compile should run (resolve output present)
|
||||||
|
assert "Resolving dependencies" in combined
|
||||||
|
|
||||||
|
|
||||||
|
class TestUpdate:
|
||||||
|
"""cm-cli update --uv-compile"""
|
||||||
|
|
||||||
|
def test_update_single_with_uv_compile(self):
|
||||||
|
"""Update an installed pack with --uv-compile."""
|
||||||
|
_run_cm_cli("install", REPO_TEST1)
|
||||||
|
assert _pack_exists(PACK_TEST1)
|
||||||
|
|
||||||
|
r = _run_cm_cli("update", "--uv-compile", REPO_TEST1)
|
||||||
|
combined = r.stdout + r.stderr
|
||||||
|
|
||||||
|
assert "Resolving dependencies" in combined
|
||||||
|
|
||||||
|
def test_update_all_with_uv_compile(self):
|
||||||
|
"""update all --uv-compile runs uv-compile after updating."""
|
||||||
|
_run_cm_cli("install", REPO_TEST1)
|
||||||
|
assert _pack_exists(PACK_TEST1)
|
||||||
|
|
||||||
|
r = _run_cm_cli("update", "--uv-compile", "all")
|
||||||
|
combined = r.stdout + r.stderr
|
||||||
|
|
||||||
|
assert "Resolving dependencies" in combined
|
||||||
|
|
||||||
|
|
||||||
|
class TestFix:
|
||||||
|
"""cm-cli fix --uv-compile"""
|
||||||
|
|
||||||
|
def test_fix_single_with_uv_compile(self):
|
||||||
|
"""Fix an installed pack with --uv-compile."""
|
||||||
|
_run_cm_cli("install", REPO_TEST1)
|
||||||
|
assert _pack_exists(PACK_TEST1)
|
||||||
|
|
||||||
|
r = _run_cm_cli("fix", "--uv-compile", REPO_TEST1)
|
||||||
|
combined = r.stdout + r.stderr
|
||||||
|
|
||||||
|
assert "Resolving dependencies" in combined
|
||||||
|
|
||||||
|
def test_fix_all_with_uv_compile(self):
|
||||||
|
"""fix all --uv-compile runs uv-compile after fixing."""
|
||||||
|
_run_cm_cli("install", REPO_TEST1)
|
||||||
|
assert _pack_exists(PACK_TEST1)
|
||||||
|
|
||||||
|
r = _run_cm_cli("fix", "--uv-compile", "all")
|
||||||
|
combined = r.stdout + r.stderr
|
||||||
|
|
||||||
|
assert "Resolving dependencies" in combined
|
||||||
|
|
||||||
|
|
||||||
|
class TestUvCompileStandalone:
|
||||||
|
"""cm-cli uv-compile (standalone command)"""
|
||||||
|
|
||||||
|
def test_uv_compile_no_packs(self):
|
||||||
|
"""uv-compile with no node packs → 'No custom node packs found'."""
|
||||||
|
r = _run_cm_cli("uv-compile")
|
||||||
|
combined = r.stdout + r.stderr
|
||||||
|
|
||||||
|
# Only ComfyUI-Manager exists (no requirements.txt in it normally)
|
||||||
|
# so either "No custom node packs found" or resolves 0
|
||||||
|
assert r.returncode == 0 or "No custom node packs" in combined
|
||||||
|
|
||||||
|
def test_uv_compile_with_packs(self):
|
||||||
|
"""uv-compile after installing test pack → resolves."""
|
||||||
|
_run_cm_cli("install", REPO_TEST1)
|
||||||
|
assert _pack_exists(PACK_TEST1)
|
||||||
|
|
||||||
|
r = _run_cm_cli("uv-compile")
|
||||||
|
combined = r.stdout + r.stderr
|
||||||
|
|
||||||
|
assert "Resolving dependencies" in combined
|
||||||
|
assert "Resolved" in combined
|
||||||
|
|
||||||
|
def test_uv_compile_conflict_attribution(self):
|
||||||
|
"""uv-compile with conflicting packs → shows attribution."""
|
||||||
|
_run_cm_cli("install", REPO_TEST1)
|
||||||
|
_run_cm_cli("install", REPO_TEST2)
|
||||||
|
|
||||||
|
r = _run_cm_cli("uv-compile")
|
||||||
|
combined = r.stdout + r.stderr
|
||||||
|
|
||||||
|
assert r.returncode != 0
|
||||||
|
assert "Conflicting packages (by node pack):" in combined
|
||||||
|
assert PACK_TEST1 in combined
|
||||||
|
assert PACK_TEST2 in combined
|
||||||
|
|
||||||
|
|
||||||
|
class TestRestoreDependencies:
|
||||||
|
"""cm-cli restore-dependencies --uv-compile"""
|
||||||
|
|
||||||
|
def test_restore_dependencies_with_uv_compile(self):
|
||||||
|
"""restore-dependencies --uv-compile runs resolver after restore."""
|
||||||
|
_run_cm_cli("install", REPO_TEST1)
|
||||||
|
assert _pack_exists(PACK_TEST1)
|
||||||
|
|
||||||
|
r = _run_cm_cli("restore-dependencies", "--uv-compile")
|
||||||
|
combined = r.stdout + r.stderr
|
||||||
|
|
||||||
|
assert "Resolving dependencies" in combined
|
||||||
|
|
||||||
|
|
||||||
|
class TestConflictAttributionDetail:
|
||||||
|
"""Verify conflict attribution output details."""
|
||||||
|
|
||||||
|
def test_both_packs_and_specs_shown(self):
|
||||||
|
"""Conflict output shows pack names AND version specs."""
|
||||||
|
_run_cm_cli("install", REPO_TEST1)
|
||||||
|
_run_cm_cli("install", REPO_TEST2)
|
||||||
|
|
||||||
|
r = _run_cm_cli("uv-compile")
|
||||||
|
combined = r.stdout + r.stderr
|
||||||
|
|
||||||
|
# Processed attribution must show exact version specs (not raw uv error)
|
||||||
|
assert "Conflicting packages (by node pack):" in combined
|
||||||
|
assert "ansible==9.13.0" in combined
|
||||||
|
assert "ansible-core==2.14.0" in combined
|
||||||
|
# Both pack names present in attribution block
|
||||||
|
assert PACK_TEST1 in combined
|
||||||
|
assert PACK_TEST2 in combined
|
||||||
325
tests/test_nightly_cnr_fallback.py
Normal file
325
tests/test_nightly_cnr_fallback.py
Normal file
@ -0,0 +1,325 @@
|
|||||||
|
"""Unit tests for CNR fallback in install_by_id nightly path and getattr guard.
|
||||||
|
|
||||||
|
Tests two targeted bug fixes:
|
||||||
|
1. install_by_id nightly: falls back to cnr_map when custom_nodes lookup fails
|
||||||
|
2. do_uninstall/do_disable: getattr guard prevents AttributeError on Union mismatch
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import types
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Minimal stubs — avoid importing the full ComfyUI runtime
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
class _ManagedResult:
|
||||||
|
"""Minimal ManagedResult stub matching glob/manager_core.py."""
|
||||||
|
|
||||||
|
def __init__(self, action):
|
||||||
|
self.action = action
|
||||||
|
self.result = True
|
||||||
|
self.msg = None
|
||||||
|
self.target = None
|
||||||
|
|
||||||
|
def fail(self, msg):
|
||||||
|
self.result = False
|
||||||
|
self.msg = msg
|
||||||
|
return self
|
||||||
|
|
||||||
|
def with_target(self, target):
|
||||||
|
self.target = target
|
||||||
|
return self
|
||||||
|
|
||||||
|
|
||||||
|
class _NormalizedKeyDict:
|
||||||
|
"""Minimal NormalizedKeyDict stub matching glob/manager_core.py."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self._store = {}
|
||||||
|
self._key_map = {}
|
||||||
|
|
||||||
|
def _normalize_key(self, key):
|
||||||
|
return key.strip().lower() if isinstance(key, str) else key
|
||||||
|
|
||||||
|
def __setitem__(self, key, value):
|
||||||
|
norm = self._normalize_key(key)
|
||||||
|
self._key_map[norm] = key
|
||||||
|
self._store[key] = value
|
||||||
|
|
||||||
|
def __getitem__(self, key):
|
||||||
|
norm = self._normalize_key(key)
|
||||||
|
return self._store[self._key_map[norm]]
|
||||||
|
|
||||||
|
def __contains__(self, key):
|
||||||
|
return self._normalize_key(key) in self._key_map
|
||||||
|
|
||||||
|
def get(self, key, default=None):
|
||||||
|
return self[key] if key in self else default
|
||||||
|
|
||||||
|
|
||||||
|
# ===================================================================
|
||||||
|
# Test 1: CNR fallback in install_by_id nightly path
|
||||||
|
# ===================================================================
|
||||||
|
|
||||||
|
|
||||||
|
class TestNightlyCnrFallback:
|
||||||
|
"""install_by_id with version_spec='nightly' should fall back to cnr_map
|
||||||
|
when custom_nodes lookup returns None for the node_id."""
|
||||||
|
|
||||||
|
def _make_manager(self, cnr_map_entries=None, custom_nodes_entries=None):
|
||||||
|
"""Create a minimal UnifiedManager-like object with the install_by_id
|
||||||
|
nightly fallback logic extracted for unit testing."""
|
||||||
|
mgr = types.SimpleNamespace()
|
||||||
|
mgr.cnr_map = _NormalizedKeyDict()
|
||||||
|
if cnr_map_entries:
|
||||||
|
for k, v in cnr_map_entries.items():
|
||||||
|
mgr.cnr_map[k] = v
|
||||||
|
|
||||||
|
# Mock get_custom_nodes to return a NormalizedKeyDict
|
||||||
|
custom_nodes = _NormalizedKeyDict()
|
||||||
|
if custom_nodes_entries:
|
||||||
|
for k, v in custom_nodes_entries.items():
|
||||||
|
custom_nodes[k] = v
|
||||||
|
|
||||||
|
async def get_custom_nodes(channel=None, mode=None):
|
||||||
|
return custom_nodes
|
||||||
|
|
||||||
|
mgr.get_custom_nodes = get_custom_nodes
|
||||||
|
|
||||||
|
# Stubs for is_enabled/is_disabled that always return False (not installed)
|
||||||
|
mgr.is_enabled = lambda *a, **kw: False
|
||||||
|
mgr.is_disabled = lambda *a, **kw: False
|
||||||
|
|
||||||
|
return mgr
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def _run_nightly_lookup(mgr, node_id, channel='default', mode='remote'):
|
||||||
|
"""Execute the nightly lookup logic from install_by_id.
|
||||||
|
|
||||||
|
Reproduces lines ~1407-1431 of glob/manager_core.py to test the
|
||||||
|
CNR fallback path in isolation.
|
||||||
|
"""
|
||||||
|
version_spec = 'nightly'
|
||||||
|
repo_url = None
|
||||||
|
|
||||||
|
custom_nodes = await mgr.get_custom_nodes(channel, mode)
|
||||||
|
the_node = custom_nodes.get(node_id)
|
||||||
|
|
||||||
|
if the_node is not None:
|
||||||
|
repo_url = the_node['repository']
|
||||||
|
else:
|
||||||
|
# Fallback for nightly only: use repository URL from CNR map
|
||||||
|
if version_spec == 'nightly':
|
||||||
|
cnr_fallback = mgr.cnr_map.get(node_id)
|
||||||
|
if cnr_fallback is not None and cnr_fallback.get('repository'):
|
||||||
|
repo_url = cnr_fallback['repository']
|
||||||
|
else:
|
||||||
|
result = _ManagedResult('install')
|
||||||
|
return result.fail(
|
||||||
|
f"Node '{node_id}@{version_spec}' not found in [{channel}, {mode}]"
|
||||||
|
)
|
||||||
|
|
||||||
|
return repo_url
|
||||||
|
|
||||||
|
def test_fallback_to_cnr_map_when_custom_nodes_missing(self):
|
||||||
|
"""Node absent from custom_nodes but present in cnr_map -> uses cnr_map repo URL."""
|
||||||
|
mgr = self._make_manager(
|
||||||
|
cnr_map_entries={
|
||||||
|
'my-test-pack': {
|
||||||
|
'id': 'my-test-pack',
|
||||||
|
'repository': 'https://github.com/test/my-test-pack',
|
||||||
|
'publisher': 'testuser',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
custom_nodes_entries={}, # empty — node not in nightly manifest
|
||||||
|
)
|
||||||
|
|
||||||
|
result = asyncio.run(
|
||||||
|
self._run_nightly_lookup(mgr, 'my-test-pack')
|
||||||
|
)
|
||||||
|
assert result == 'https://github.com/test/my-test-pack'
|
||||||
|
|
||||||
|
def test_fallback_fails_when_cnr_map_also_missing(self):
|
||||||
|
"""Node absent from both custom_nodes and cnr_map -> ManagedResult.fail."""
|
||||||
|
mgr = self._make_manager(
|
||||||
|
cnr_map_entries={},
|
||||||
|
custom_nodes_entries={},
|
||||||
|
)
|
||||||
|
|
||||||
|
result = asyncio.run(
|
||||||
|
self._run_nightly_lookup(mgr, 'nonexistent-pack')
|
||||||
|
)
|
||||||
|
assert isinstance(result, _ManagedResult)
|
||||||
|
assert result.result is False
|
||||||
|
assert 'nonexistent-pack@nightly' in result.msg
|
||||||
|
|
||||||
|
def test_fallback_fails_when_cnr_entry_has_no_repository(self):
|
||||||
|
"""Node in cnr_map but repository is None/empty -> ManagedResult.fail."""
|
||||||
|
mgr = self._make_manager(
|
||||||
|
cnr_map_entries={
|
||||||
|
'no-repo-pack': {
|
||||||
|
'id': 'no-repo-pack',
|
||||||
|
'repository': None,
|
||||||
|
'publisher': 'testuser',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
custom_nodes_entries={},
|
||||||
|
)
|
||||||
|
|
||||||
|
result = asyncio.run(
|
||||||
|
self._run_nightly_lookup(mgr, 'no-repo-pack')
|
||||||
|
)
|
||||||
|
assert isinstance(result, _ManagedResult)
|
||||||
|
assert result.result is False
|
||||||
|
|
||||||
|
def test_fallback_fails_when_cnr_entry_has_empty_repository(self):
|
||||||
|
"""Node in cnr_map but repository is '' -> ManagedResult.fail (truthy check)."""
|
||||||
|
mgr = self._make_manager(
|
||||||
|
cnr_map_entries={
|
||||||
|
'empty-repo-pack': {
|
||||||
|
'id': 'empty-repo-pack',
|
||||||
|
'repository': '',
|
||||||
|
'publisher': 'testuser',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
custom_nodes_entries={},
|
||||||
|
)
|
||||||
|
|
||||||
|
result = asyncio.run(
|
||||||
|
self._run_nightly_lookup(mgr, 'empty-repo-pack')
|
||||||
|
)
|
||||||
|
assert isinstance(result, _ManagedResult)
|
||||||
|
assert result.result is False
|
||||||
|
|
||||||
|
def test_direct_custom_nodes_hit_skips_cnr_fallback(self):
|
||||||
|
"""Node present in custom_nodes -> uses custom_nodes directly, no fallback needed."""
|
||||||
|
mgr = self._make_manager(
|
||||||
|
cnr_map_entries={
|
||||||
|
'found-pack': {
|
||||||
|
'id': 'found-pack',
|
||||||
|
'repository': 'https://github.com/test/found-cnr',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
custom_nodes_entries={
|
||||||
|
'found-pack': {
|
||||||
|
'repository': 'https://github.com/test/found-custom',
|
||||||
|
'files': ['https://github.com/test/found-custom'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
result = asyncio.run(
|
||||||
|
self._run_nightly_lookup(mgr, 'found-pack')
|
||||||
|
)
|
||||||
|
# Should use custom_nodes repo URL, NOT cnr_map
|
||||||
|
assert result == 'https://github.com/test/found-custom'
|
||||||
|
|
||||||
|
def test_unknown_version_spec_does_not_use_cnr_fallback(self):
|
||||||
|
"""version_spec='unknown' path should NOT use cnr_map fallback."""
|
||||||
|
mgr = self._make_manager(
|
||||||
|
cnr_map_entries={
|
||||||
|
'unknown-pack': {
|
||||||
|
'id': 'unknown-pack',
|
||||||
|
'repository': 'https://github.com/test/unknown-pack',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
custom_nodes_entries={},
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _run_unknown_lookup():
|
||||||
|
version_spec = 'unknown'
|
||||||
|
custom_nodes = await mgr.get_custom_nodes()
|
||||||
|
the_node = custom_nodes.get('unknown-pack')
|
||||||
|
|
||||||
|
if the_node is not None:
|
||||||
|
return the_node['files'][0]
|
||||||
|
else:
|
||||||
|
if version_spec == 'nightly':
|
||||||
|
# This branch should NOT be taken for 'unknown'
|
||||||
|
cnr_fallback = mgr.cnr_map.get('unknown-pack')
|
||||||
|
if cnr_fallback is not None and cnr_fallback.get('repository'):
|
||||||
|
return cnr_fallback['repository']
|
||||||
|
# Fall through to error for 'unknown'
|
||||||
|
result = _ManagedResult('install')
|
||||||
|
return result.fail(
|
||||||
|
f"Node 'unknown-pack@{version_spec}' not found"
|
||||||
|
)
|
||||||
|
|
||||||
|
result = asyncio.run(_run_unknown_lookup())
|
||||||
|
assert isinstance(result, _ManagedResult)
|
||||||
|
assert result.result is False
|
||||||
|
assert 'unknown' in result.msg
|
||||||
|
|
||||||
|
def test_case_insensitive_cnr_map_lookup(self):
|
||||||
|
"""CNR map uses NormalizedKeyDict — lookup should be case-insensitive."""
|
||||||
|
mgr = self._make_manager(
|
||||||
|
cnr_map_entries={
|
||||||
|
'My-Test-Pack': {
|
||||||
|
'id': 'my-test-pack',
|
||||||
|
'repository': 'https://github.com/test/my-test-pack',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
custom_nodes_entries={},
|
||||||
|
)
|
||||||
|
|
||||||
|
result = asyncio.run(
|
||||||
|
self._run_nightly_lookup(mgr, 'my-test-pack')
|
||||||
|
)
|
||||||
|
assert result == 'https://github.com/test/my-test-pack'
|
||||||
|
|
||||||
|
|
||||||
|
# ===================================================================
|
||||||
|
# Test 2: getattr guard in do_uninstall / do_disable
|
||||||
|
# ===================================================================
|
||||||
|
|
||||||
|
|
||||||
|
class TestGetAttrGuard:
|
||||||
|
"""do_uninstall and do_disable use getattr(params, 'is_unknown', False)
|
||||||
|
to guard against pydantic Union matching UpdatePackParams (which lacks
|
||||||
|
is_unknown field) instead of UninstallPackParams/DisablePackParams."""
|
||||||
|
|
||||||
|
def test_getattr_on_object_with_is_unknown(self):
|
||||||
|
"""Normal case: params has is_unknown -> returns its value."""
|
||||||
|
params = types.SimpleNamespace(node_name='test-pack', is_unknown=True)
|
||||||
|
assert getattr(params, 'is_unknown', False) is True
|
||||||
|
|
||||||
|
def test_getattr_on_object_without_is_unknown(self):
|
||||||
|
"""Bug case: params is UpdatePackParams-like (no is_unknown) -> returns False."""
|
||||||
|
params = types.SimpleNamespace(node_name='test-pack', node_ver='1.0.0')
|
||||||
|
# Without getattr guard, this would be: params.is_unknown -> AttributeError
|
||||||
|
assert getattr(params, 'is_unknown', False) is False
|
||||||
|
|
||||||
|
def test_getattr_default_false_on_missing_attribute(self):
|
||||||
|
"""Minimal case: bare object with only node_name."""
|
||||||
|
params = types.SimpleNamespace(node_name='test-pack')
|
||||||
|
assert getattr(params, 'is_unknown', False) is False
|
||||||
|
|
||||||
|
def test_pydantic_union_matching_demonstrates_bug(self):
|
||||||
|
"""Demonstrate why getattr is needed: pydantic Union without discriminator
|
||||||
|
can match UpdatePackParams for uninstall/disable payloads."""
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
from typing import Optional, Union
|
||||||
|
|
||||||
|
class UpdateLike(BaseModel):
|
||||||
|
node_name: str
|
||||||
|
node_ver: Optional[str] = None
|
||||||
|
|
||||||
|
class UninstallLike(BaseModel):
|
||||||
|
node_name: str
|
||||||
|
is_unknown: Optional[bool] = Field(False)
|
||||||
|
|
||||||
|
# When Union tries to match {"node_name": "foo"}, UpdateLike matches first
|
||||||
|
# because it has fewer required fields and node_name satisfies it
|
||||||
|
class TaskItem(BaseModel):
|
||||||
|
params: Union[UpdateLike, UninstallLike]
|
||||||
|
|
||||||
|
item = TaskItem(params={"node_name": "foo"})
|
||||||
|
|
||||||
|
# The matched type may be UpdateLike (no is_unknown attribute)
|
||||||
|
# This is the exact scenario the getattr guard protects against
|
||||||
|
is_unknown = getattr(item.params, 'is_unknown', False)
|
||||||
|
# Regardless of which Union member matched, getattr safely returns a value
|
||||||
|
assert isinstance(is_unknown, bool)
|
||||||
@ -244,6 +244,26 @@ class TestCollectRequirements:
|
|||||||
deps = r.collect_requirements()
|
deps = r.collect_requirements()
|
||||||
assert len(deps.sources["numpy"]) == 2
|
assert len(deps.sources["numpy"]) == 2
|
||||||
|
|
||||||
|
def test_sources_stores_pack_path_and_spec_tuple(self, tmp_path):
|
||||||
|
"""sources entries must be (pack_path, pkg_spec) tuples."""
|
||||||
|
p = _make_node_pack(str(tmp_path), "pack_a", "numpy>=1.20\n")
|
||||||
|
r = _resolver([p])
|
||||||
|
deps = r.collect_requirements()
|
||||||
|
entries = deps.sources["numpy"]
|
||||||
|
assert len(entries) == 1
|
||||||
|
pack_path, pkg_spec = entries[0]
|
||||||
|
assert pack_path == p
|
||||||
|
assert pkg_spec == "numpy>=1.20"
|
||||||
|
|
||||||
|
def test_sources_captures_spec_per_requester(self, tmp_path):
|
||||||
|
"""Each requester's exact spec is preserved independently."""
|
||||||
|
p1 = _make_node_pack(str(tmp_path), "pack_a", "torch>=2.1\n")
|
||||||
|
p2 = _make_node_pack(str(tmp_path), "pack_b", "torch<2.0\n")
|
||||||
|
r = _resolver([p1, p2])
|
||||||
|
deps = r.collect_requirements()
|
||||||
|
specs = {pkg_spec for _, pkg_spec in deps.sources["torch"]}
|
||||||
|
assert specs == {"torch>=2.1", "torch<2.0"}
|
||||||
|
|
||||||
|
|
||||||
# ===========================================================================
|
# ===========================================================================
|
||||||
# Input sanitization
|
# Input sanitization
|
||||||
@ -883,6 +903,106 @@ class TestResolveAndInstall:
|
|||||||
assert not result.success
|
assert not result.success
|
||||||
assert "compile failed" in result.error
|
assert "compile failed" in result.error
|
||||||
|
|
||||||
|
def test_compile_failure_result_includes_collected(self, tmp_path):
|
||||||
|
"""result.collected must be populated on compile failure for conflict attribution."""
|
||||||
|
p = _make_node_pack(str(tmp_path), "pack_a", "torch>=2.1\n")
|
||||||
|
r = _resolver([p])
|
||||||
|
|
||||||
|
with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
|
||||||
|
with mock.patch("subprocess.run", return_value=subprocess.CompletedProcess(
|
||||||
|
[], 1, stdout="",
|
||||||
|
stderr="error: Because torch>=2.1 conflicts with torch<2.0, no solution found.",
|
||||||
|
)):
|
||||||
|
result = r.resolve_and_install()
|
||||||
|
|
||||||
|
assert not result.success
|
||||||
|
assert result.collected is not None
|
||||||
|
assert result.lockfile is not None
|
||||||
|
assert result.lockfile.conflicts # conflict lines present for attribution
|
||||||
|
|
||||||
|
def test_conflict_attribution_sources_filter(self, tmp_path):
|
||||||
|
"""Packages named in conflict lines can be looked up from sources."""
|
||||||
|
from comfyui_manager.common.unified_dep_resolver import attribute_conflicts
|
||||||
|
p1 = _make_node_pack(str(tmp_path), "pack_a", "torch>=2.1\n")
|
||||||
|
p2 = _make_node_pack(str(tmp_path), "pack_b", "torch<2.0\n")
|
||||||
|
r = _resolver([p1, p2])
|
||||||
|
|
||||||
|
conflict_text = "error: torch>=2.1 conflicts with torch<2.0"
|
||||||
|
|
||||||
|
with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
|
||||||
|
with mock.patch("subprocess.run", return_value=subprocess.CompletedProcess(
|
||||||
|
[], 1, stdout="", stderr=conflict_text,
|
||||||
|
)):
|
||||||
|
result = r.resolve_and_install()
|
||||||
|
|
||||||
|
assert not result.success
|
||||||
|
assert result.collected is not None
|
||||||
|
attributed = attribute_conflicts(result.collected.sources, result.lockfile.conflicts)
|
||||||
|
assert "torch" in attributed
|
||||||
|
specs = {spec for _, spec in attributed["torch"]}
|
||||||
|
assert specs == {"torch>=2.1", "torch<2.0"}
|
||||||
|
|
||||||
|
def test_conflict_attribution_no_false_positive_on_underscore_prefix(self, tmp_path):
|
||||||
|
"""'torch' must NOT match 'torch_audio' in conflict text (underscore boundary)."""
|
||||||
|
from comfyui_manager.common.unified_dep_resolver import attribute_conflicts
|
||||||
|
p = _make_node_pack(str(tmp_path), "pack_a", "torch>=2.1\n")
|
||||||
|
r = _resolver([p])
|
||||||
|
|
||||||
|
conflict_text = "error: torch_audio>=2.1 conflicts with torch_audio<2.0"
|
||||||
|
|
||||||
|
with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
|
||||||
|
with mock.patch("subprocess.run", return_value=subprocess.CompletedProcess(
|
||||||
|
[], 1, stdout="", stderr=conflict_text,
|
||||||
|
)):
|
||||||
|
result = r.resolve_and_install()
|
||||||
|
|
||||||
|
assert not result.success
|
||||||
|
assert result.collected is not None
|
||||||
|
attributed = attribute_conflicts(result.collected.sources, result.lockfile.conflicts)
|
||||||
|
# 'torch' should NOT match: conflict only mentions 'torch_audio'
|
||||||
|
assert "torch" not in attributed
|
||||||
|
|
||||||
|
def test_conflict_attribution_no_false_positive_on_prefix_match(self, tmp_path):
|
||||||
|
"""'torch' must NOT match 'torchvision' in conflict text (word boundary)."""
|
||||||
|
from comfyui_manager.common.unified_dep_resolver import attribute_conflicts
|
||||||
|
p = _make_node_pack(str(tmp_path), "pack_a", "torch>=2.1\n")
|
||||||
|
r = _resolver([p])
|
||||||
|
|
||||||
|
conflict_text = "error: torchvision>=0.16 conflicts with torchvision<0.15"
|
||||||
|
|
||||||
|
with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
|
||||||
|
with mock.patch("subprocess.run", return_value=subprocess.CompletedProcess(
|
||||||
|
[], 1, stdout="", stderr=conflict_text,
|
||||||
|
)):
|
||||||
|
result = r.resolve_and_install()
|
||||||
|
|
||||||
|
assert not result.success
|
||||||
|
assert result.collected is not None
|
||||||
|
attributed = attribute_conflicts(result.collected.sources, result.lockfile.conflicts)
|
||||||
|
# 'torch' should NOT appear: conflict only mentions 'torchvision'
|
||||||
|
assert "torch" not in attributed
|
||||||
|
|
||||||
|
def test_conflict_attribution_hyphen_underscore_normalization(self, tmp_path):
|
||||||
|
"""Packages stored with hyphens match conflict text using underscores."""
|
||||||
|
from comfyui_manager.common.unified_dep_resolver import attribute_conflicts
|
||||||
|
p = _make_node_pack(str(tmp_path), "pack_a", "torch-audio>=2.1\n")
|
||||||
|
r = _resolver([p])
|
||||||
|
|
||||||
|
# uv may print 'torch_audio' (underscore) in conflict output
|
||||||
|
conflict_text = "error: torch_audio>=2.1 conflicts with torch_audio<2.0"
|
||||||
|
|
||||||
|
with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
|
||||||
|
with mock.patch("subprocess.run", return_value=subprocess.CompletedProcess(
|
||||||
|
[], 1, stdout="", stderr=conflict_text,
|
||||||
|
)):
|
||||||
|
result = r.resolve_and_install()
|
||||||
|
|
||||||
|
assert not result.success
|
||||||
|
assert result.collected is not None
|
||||||
|
attributed = attribute_conflicts(result.collected.sources, result.lockfile.conflicts)
|
||||||
|
# _extract_package_name normalizes 'torch-audio' → 'torch_audio'; uv uses underscores too
|
||||||
|
assert "torch_audio" in attributed
|
||||||
|
|
||||||
def test_full_success_pipeline(self, tmp_path):
|
def test_full_success_pipeline(self, tmp_path):
|
||||||
p = _make_node_pack(str(tmp_path), "pack_a", "numpy>=1.20\n")
|
p = _make_node_pack(str(tmp_path), "pack_a", "numpy>=1.20\n")
|
||||||
r = _resolver([p])
|
r = _resolver([p])
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user