feat(cli): expand --uv-compile to all node management commands with conflict attribution

Add --uv-compile flag to reinstall, update, fix, restore-snapshot,
restore-dependencies, and install-deps commands. Each skips per-node
pip installs and runs batch uv pip compile after all operations.

Change CollectedDeps.sources type to dict[str, list[tuple[str, str]]]
to store (pack_path, pkg_spec) per requester. On resolution failure,
_run_unified_resolve() cross-references conflict packages with sources
using word-boundary regex and displays which node packs requested each
conflicting package.

Update EN/KO user docs and DESIGN/PRD developer docs to cover the
expanded commands and conflict attribution output. Strengthen unit
tests for sources tuple format and compile failure attribution.
Bump version to 4.1b3.
This commit is contained in:
Dr.Lt.Data 2026-03-10 23:43:44 +09:00
parent f042d73b72
commit e0f8e653c7
8 changed files with 473 additions and 32 deletions

View File

@ -729,6 +729,14 @@ def reinstall(
help="Skip installing any Python dependencies",
),
] = False,
uv_compile: Annotated[
Optional[bool],
typer.Option(
"--uv-compile",
show_default=False,
help="After reinstalling, batch-resolve all dependencies via uv pip compile",
),
] = False,
user_directory: str = typer.Option(
None,
help="user directory"
@ -736,11 +744,34 @@ def reinstall(
):
cmd_ctx.set_user_directory(user_directory)
cmd_ctx.set_channel_mode(channel, mode)
cmd_ctx.set_no_deps(no_deps)
if uv_compile and no_deps:
print("[bold red]--uv-compile and --no-deps are mutually exclusive.[/bold red]")
raise typer.Exit(1)
if uv_compile:
cmd_ctx.set_no_deps(True)
else:
cmd_ctx.set_no_deps(no_deps)
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
for_each_nodes(nodes, act=reinstall_node)
pip_fixer.fix_broken()
if uv_compile:
try:
_run_unified_resolve()
except ImportError as e:
print(f"[bold red]Failed to import unified_dep_resolver: {e}[/bold red]")
raise typer.Exit(1)
except typer.Exit:
raise
except Exception as e:
print(f"[bold red]Batch resolution failed: {e}[/bold red]")
raise typer.Exit(1)
finally:
pip_fixer.fix_broken()
else:
pip_fixer.fix_broken()
@app.command(help="Uninstall custom nodes")
@ -785,10 +816,21 @@ def update(
None,
help="user directory"
),
uv_compile: Annotated[
Optional[bool],
typer.Option(
"--uv-compile",
show_default=False,
help="After updating, batch-resolve all dependencies via uv pip compile",
),
] = False,
):
cmd_ctx.set_user_directory(user_directory)
cmd_ctx.set_channel_mode(channel, mode)
if uv_compile:
cmd_ctx.set_no_deps(True)
if 'all' in nodes:
asyncio.run(auto_save_snapshot())
@ -800,7 +842,22 @@ def update(
break
update_parallel(nodes)
pip_fixer.fix_broken()
if uv_compile:
try:
_run_unified_resolve()
except ImportError as e:
print(f"[bold red]Failed to import unified_dep_resolver: {e}[/bold red]")
raise typer.Exit(1)
except typer.Exit:
raise
except Exception as e:
print(f"[bold red]Batch resolution failed: {e}[/bold red]")
raise typer.Exit(1)
finally:
pip_fixer.fix_broken()
else:
pip_fixer.fix_broken()
@app.command(help="Disable custom nodes")
@ -886,16 +943,42 @@ def fix(
None,
help="user directory"
),
uv_compile: Annotated[
Optional[bool],
typer.Option(
"--uv-compile",
show_default=False,
help="After fixing, batch-resolve all dependencies via uv pip compile",
),
] = False,
):
cmd_ctx.set_user_directory(user_directory)
cmd_ctx.set_channel_mode(channel, mode)
if uv_compile:
cmd_ctx.set_no_deps(True)
if 'all' in nodes:
asyncio.run(auto_save_snapshot())
pip_fixer = manager_util.PIPFixer(manager_util.get_installed_packages(), comfy_path, context.manager_files_path)
for_each_nodes(nodes, fix_node, allow_all=True)
pip_fixer.fix_broken()
if uv_compile:
try:
_run_unified_resolve()
except ImportError as e:
print(f"[bold red]Failed to import unified_dep_resolver: {e}[/bold red]")
raise typer.Exit(1)
except typer.Exit:
raise
except Exception as e:
print(f"[bold red]Batch resolution failed: {e}[/bold red]")
raise typer.Exit(1)
finally:
pip_fixer.fix_broken()
else:
pip_fixer.fix_broken()
@app.command("show-versions", help="Show all available versions of the node")
@ -1092,7 +1175,7 @@ def save_snapshot(
@app.command("restore-snapshot", help="Restore snapshot from snapshot file")
def restore_snapshot(
snapshot_name: str,
snapshot_name: str,
pip_non_url: Optional[bool] = typer.Option(
default=None,
show_default=False,
@ -1118,13 +1201,24 @@ def restore_snapshot(
restore_to: Optional[str] = typer.Option(
None,
help="Manually specify the installation path for the custom node. Ignore user directory."
)
),
uv_compile: Annotated[
Optional[bool],
typer.Option(
"--uv-compile",
show_default=False,
help="After restoring, batch-resolve all dependencies via uv pip compile",
),
] = False,
):
cmd_ctx.set_user_directory(user_directory)
if restore_to:
cmd_ctx.update_custom_nodes_dir(restore_to)
if uv_compile:
cmd_ctx.set_no_deps(True)
extras = []
if pip_non_url:
extras.append('--pip-non-url')
@ -1151,8 +1245,25 @@ def restore_snapshot(
except Exception:
print("[bold red]ERROR: Failed to restore snapshot.[/bold red]")
traceback.print_exc()
if uv_compile:
pip_fixer.fix_broken()
raise typer.Exit(code=1)
pip_fixer.fix_broken()
if uv_compile:
try:
_run_unified_resolve()
except ImportError as e:
print(f"[bold red]Failed to import unified_dep_resolver: {e}[/bold red]")
raise typer.Exit(1)
except typer.Exit:
raise
except Exception as e:
print(f"[bold red]Batch resolution failed: {e}[/bold red]")
raise typer.Exit(1)
finally:
pip_fixer.fix_broken()
else:
pip_fixer.fix_broken()
@app.command(
@ -1162,10 +1273,21 @@ def restore_dependencies(
user_directory: str = typer.Option(
None,
help="user directory"
)
),
uv_compile: Annotated[
Optional[bool],
typer.Option(
"--uv-compile",
show_default=False,
help="After restoring, batch-resolve all dependencies via uv pip compile",
),
] = False,
):
cmd_ctx.set_user_directory(user_directory)
if uv_compile:
cmd_ctx.set_no_deps(True)
node_paths = []
for base_path in cmd_ctx.get_custom_nodes_paths():
@ -1181,9 +1303,24 @@ def restore_dependencies(
for x in node_paths:
print("----------------------------------------------------------------------------------------------------")
print(f"Restoring [{i}/{total}]: {x}")
unified_manager.execute_install_script('', x, instant_execution=True)
unified_manager.execute_install_script('', x, instant_execution=True, no_deps=bool(uv_compile))
i += 1
pip_fixer.fix_broken()
if uv_compile:
try:
_run_unified_resolve()
except ImportError as e:
print(f"[bold red]Failed to import unified_dep_resolver: {e}[/bold red]")
raise typer.Exit(1)
except typer.Exit:
raise
except Exception as e:
print(f"[bold red]Batch resolution failed: {e}[/bold red]")
raise typer.Exit(1)
finally:
pip_fixer.fix_broken()
else:
pip_fixer.fix_broken()
@app.command(
@ -1224,9 +1361,21 @@ def install_deps(
None,
help="user directory"
),
uv_compile: Annotated[
Optional[bool],
typer.Option(
"--uv-compile",
show_default=False,
help="After installing, batch-resolve all dependencies via uv pip compile",
),
] = False,
):
cmd_ctx.set_user_directory(user_directory)
cmd_ctx.set_channel_mode(channel, mode)
if uv_compile:
cmd_ctx.set_no_deps(True)
asyncio.run(auto_save_snapshot())
if not os.path.exists(deps):
@ -1246,10 +1395,25 @@ def install_deps(
if state == 'installed':
continue
elif state == 'not-installed':
asyncio.run(core.gitclone_install(k, instant_execution=True))
asyncio.run(core.gitclone_install(k, instant_execution=True, no_deps=bool(uv_compile)))
else: # disabled
core.gitclone_set_active([k], False)
pip_fixer.fix_broken()
if uv_compile:
try:
_run_unified_resolve()
except ImportError as e:
print(f"[bold red]Failed to import unified_dep_resolver: {e}[/bold red]")
raise typer.Exit(1)
except typer.Exit:
raise
except Exception as e:
print(f"[bold red]Batch resolution failed: {e}[/bold red]")
raise typer.Exit(1)
finally:
pip_fixer.fix_broken()
else:
pip_fixer.fix_broken()
print("Dependency installation and activation complete.")
@ -1295,6 +1459,20 @@ def _run_unified_resolve():
print("[bold green]Resolution complete (no deps needed).[/bold green]")
else:
print(f"[bold red]Resolution failed: {result.error}[/bold red]")
# Show which node packs requested each conflicting package.
if result.lockfile and result.lockfile.conflicts and result.collected:
conflict_text = "\n".join(result.lockfile.conflicts).lower().replace("-", "_")
attributed = {
pkg: reqs
for pkg, reqs in result.collected.sources.items()
if re.search(r'(?<![a-z0-9_])' + re.escape(pkg.lower().replace("-", "_")) + r'(?![a-z0-9_])', conflict_text)
}
if attributed:
print("[bold yellow]Conflicting packages (by node pack):[/bold yellow]")
for pkg_name, requesters in sorted(attributed.items()):
print(f" [yellow]{pkg_name}[/yellow]:")
for pack_path, pkg_spec in requesters:
print(f" {os.path.basename(pack_path)}{pkg_spec}")
raise typer.Exit(1)

View File

@ -56,7 +56,8 @@ class CollectedDeps:
"""Aggregated dependency collection result."""
requirements: list[PackageRequirement] = field(default_factory=list)
skipped: list[tuple[str, str]] = field(default_factory=list)
sources: dict[str, list[str]] = field(default_factory=dict)
sources: dict[str, list[tuple[str, str]]] = field(default_factory=dict)
"""pkg_name → [(pack_path, pkg_spec), ...] — tracks which node packs request each package."""
extra_index_urls: list[str] = field(default_factory=list)
@ -275,7 +276,7 @@ class UnifiedDepResolver:
"""Collect dependencies from all node packs."""
requirements: list[PackageRequirement] = []
skipped: list[tuple[str, str]] = []
sources: dict[str, list[str]] = defaultdict(list)
sources: defaultdict[str, list[tuple[str, str]]] = defaultdict(list)
extra_index_urls: list[str] = []
# Snapshot installed packages once to avoid repeated subprocess calls.
@ -362,7 +363,7 @@ class UnifiedDepResolver:
requirements.append(
PackageRequirement(name=pkg_name, spec=pkg_spec, source=pack_path)
)
sources[pkg_name].append(pack_path)
sources[pkg_name].append((pack_path, pkg_spec))
# Commit staged index URLs only after all validation passed.
if pending_urls:

View File

@ -149,7 +149,8 @@ class CollectedDeps:
"""All collected dependencies"""
requirements: list[PackageRequirement] # Collected deps (duplicates allowed, uv resolves)
skipped: list[tuple[str, str]] # (package_name, skip_reason)
sources: dict[str, list[str]] # {package_name: [source_node_packs]}
sources: dict[str, list[tuple[str, str]]] # {package_name: [(pack_path, pkg_spec), ...]}
"""pkg_name → [(pack_path, pkg_spec), ...] — tracks which node packs request each package."""
extra_index_urls: list[str] # Additional index URLs separated from --index-url entries
@dataclass
@ -262,7 +263,7 @@ def collect_requirements(self) -> CollectedDeps:
source=path,
)
requirements.append(req)
sources[pkg_name].append(path)
sources[pkg_name].append((path, pkg_spec))
return CollectedDeps(
requirements=requirements,
@ -449,7 +450,7 @@ if os.path.exists(requirements_path) and not _unified_resolver_succeeded:
### 4.1.6 CLI Integration
Two entry points expose the unified resolver in `cm_cli`:
Multiple entry points expose the unified resolver in `cm_cli`:
#### 4.1.6.1 Standalone Command: `cm_cli uv-compile`
@ -478,19 +479,53 @@ When `--uv-compile` is set:
This differs from per-node pip install: instead of resolving each node pack's
`requirements.txt` independently, all deps are compiled together to avoid conflicts.
#### 4.1.6.3 Additional `--uv-compile` Commands
The following commands follow the same `no_deps` + batch-resolve pattern as `install --uv-compile`:
`cmd_ctx.set_no_deps(True)` is set before node operations, then `_run_unified_resolve()`
runs at the end via `try/finally` with `PIPFixer.fix_broken()`.
| Command | Operation |
|---------|-----------|
| `cm_cli reinstall --uv-compile` | Reinstall nodes then batch-resolve |
| `cm_cli update --uv-compile` | Update nodes then batch-resolve |
| `cm_cli fix --uv-compile` | Fix node dependencies then batch-resolve |
| `cm_cli restore-snapshot --uv-compile` | Restore snapshot then batch-resolve |
| `cm_cli restore-dependencies --uv-compile` | Restore all node deps then batch-resolve |
| `cm_cli install-deps <deps.json> --uv-compile` | Install from deps spec file then batch-resolve |
> **`reinstall` only**: Has `--uv-compile` / `--no-deps` mutual exclusion check.
> Both skip per-node pip, but `--no-deps` skips permanently while `--uv-compile` also
> triggers batch resolution after all nodes are processed.
>
> **`restore-snapshot` only**: Has an additional pre-resolution exception guard — if the
> snapshot restore itself fails (before `_run_unified_resolve()` is reached),
> `PIPFixer.fix_broken()` runs in the exception handler before exit. The `try/finally`
> applies to the `_run_unified_resolve()` call. See dec_7 for rationale.
#### Shared Design Decisions
- **Uses real `cm_global` values**: Unlike the startup path (4.1.3) which passes empty
blacklist/overrides, CLI commands pass `cm_global.pip_blacklist`,
`cm_global.pip_overrides`, and `cm_global.pip_downgrade_blacklist` — already
initialized at `cm_cli/__main__.py` module scope (lines 45-60).
initialized at `cm_cli/__main__.py` module scope.
- **No `_unified_resolver_succeeded` flag**: Not needed — these are one-shot commands,
not startup gates.
- **Shared helper**: Both entry points delegate to `_run_unified_resolve()` which
- **Shared helper**: All entry points delegate to `_run_unified_resolve()` which
handles resolver instantiation, execution, and result reporting.
- **Error handling**: `UvNotAvailableError` / `ImportError` → exit 1 with message.
Both entry points use `try/finally` to guarantee `PIPFixer.fix_broken()` runs
regardless of resolution outcome.
All entry points guarantee `PIPFixer.fix_broken()` runs regardless of outcome —
via `try/finally` around `_run_unified_resolve()`. `restore-snapshot` additionally
calls `fix_broken()` in the snapshot restore exception handler (before
`_run_unified_resolve()` is reached), per dec_7.
- **Conflict attribution output**: When resolution fails and `result.lockfile.conflicts`
is non-empty, `_run_unified_resolve()` cross-references conflict package names with
`CollectedDeps.sources` to identify which node packs requested each conflicting package:
- Normalization: both sources keys and conflict text apply `.lower().replace("-", "_")`
- Word-boundary regex `(?<![a-z0-9_])pkg(?![a-z0-9_])` prevents false-positive prefix
matches (e.g., `torch` does NOT match `torch_audio` or `torchvision`)
- Output format: sorted by package name, each entry lists `pack_basename → pkg_spec`
per requester (using `CollectedDeps.sources` tuple values `(pack_path, pkg_spec)`)
**Node pack discovery**: Uses `cmd_ctx.get_custom_nodes_paths()``collect_node_pack_paths()`,
which is the CLI-native path resolution (respects `--user-directory` and `folder_paths`).

View File

@ -329,7 +329,7 @@ User requests installation of node packs A and B nearly simultaneously from UI
## 7. Future Extensions
- ~~**`cm_global` integration** [DONE]: `cm_cli uv-compile` and `cm_cli install --uv-compile` pass real `cm_global` values. Startup path (`prestartup_script.py`) still passes empty by design~~
- ~~**`cm_global` integration** [DONE]: All `--uv-compile` CLI commands (`uv-compile`, `install`, `reinstall`, `update`, `fix`, `restore-snapshot`, `restore-dependencies`, `install-deps`) pass real `cm_global` values. Startup path (`prestartup_script.py`) still passes empty by design~~
- Lockfile caching: Reuse for identical node pack configurations
- Pre-install dependency conflict validation API: Check compatibility before installation
- Dependency tree visualization: Display dependency relationships to users
@ -355,3 +355,9 @@ User requests installation of node packs A and B nearly simultaneously from UI
| Legacy `execute_install_script()` (2 locations) | `legacy/manager_core.py` | ❌ No | Legacy paths |
| `cm_cli uv-compile` | `cm_cli/__main__.py` | ✅ Yes | Standalone CLI batch resolution (with `cm_global` values) |
| `cm_cli install --uv-compile` | `cm_cli/__main__.py` | ✅ Yes | Per-node pip skipped, batch resolution after all installs |
| `cm_cli reinstall --uv-compile` | `cm_cli/__main__.py` | ✅ Yes | Per-node pip skipped, batch resolution after all reinstalls; mutually exclusive with `--no-deps` |
| `cm_cli update --uv-compile` | `cm_cli/__main__.py` | ✅ Yes | Per-node pip skipped during updates, batch resolution after |
| `cm_cli fix --uv-compile` | `cm_cli/__main__.py` | ✅ Yes | Per-node pip skipped during dep fix, batch resolution after |
| `cm_cli restore-snapshot --uv-compile` | `cm_cli/__main__.py` | ✅ Yes | Per-node pip skipped during restore, batch resolution after |
| `cm_cli restore-dependencies --uv-compile` | `cm_cli/__main__.py` | ✅ Yes | Per-node pip skipped, batch resolution after all node deps restored |
| `cm_cli install-deps --uv-compile` | `cm_cli/__main__.py` | ✅ Yes | Per-node pip skipped, batch resolution after deps-spec install |

View File

@ -11,11 +11,15 @@ cm-cli [OPTIONS]
OPTIONS:
[install|reinstall|uninstall|update|disable|enable|fix] node_name ... ?[--channel <channel name>] ?[--mode [remote|local|cache]]
[install|reinstall|update|fix] node_name ... ?[--uv-compile]
[update|disable|enable|fix] all ?[--channel <channel name>] ?[--mode [remote|local|cache]]
[update|fix] all ?[--uv-compile]
[simple-show|show] [installed|enabled|not-installed|disabled|all|snapshot|snapshot-list] ?[--channel <channel name>] ?[--mode [remote|local|cache]]
save-snapshot ?[--output <snapshot .json/.yaml>]
restore-snapshot <snapshot .json/.yaml> ?[--pip-non-url] ?[--pip-non-local-url] ?[--pip-local-url]
restore-dependencies
restore-snapshot <snapshot .json/.yaml> ?[--pip-non-url] ?[--pip-non-local-url] ?[--pip-local-url] ?[--uv-compile]
restore-dependencies ?[--uv-compile]
install-deps <deps.json> ?[--channel <channel name>] ?[--mode [remote|local|cache]] ?[--uv-compile]
uv-compile
clear
```
@ -107,6 +111,22 @@ ComfyUI-Loopchain
* `enable`: Enables the specified custom nodes.
* `fix`: Attempts to fix dependencies for the specified custom nodes.
#### `--uv-compile` flag (`install`, `reinstall`, `update`, `fix`)
When `--uv-compile` is specified, per-node pip installs are skipped during node operations.
After all operations complete, `uv pip compile` resolves the full dependency graph in one batch.
* Requires `uv` to be installed.
* Prevents dependency conflicts between multiple node packs.
* On resolution failure, displays conflicting packages and which node packs requested them.
* `reinstall --uv-compile` is mutually exclusive with `--no-deps`.
```bash
cm-cli install ComfyUI-Impact-Pack ComfyUI-Inspire-Pack --uv-compile
cm-cli update all --uv-compile
cm-cli fix ComfyUI-Impact-Pack --uv-compile
```
### 4. Snapshot Management
* `cm-cli save-snapshot [--output <snapshot .json/.yaml>]`: Saves the current snapshot.
@ -122,12 +142,33 @@ ComfyUI-Loopchain
### 5. Dependency Restoration
`restore-dependencies`
`restore-dependencies ?[--uv-compile]`
* This command can be used if custom nodes are installed under the `ComfyUI/custom_nodes` path but their dependencies are not installed.
* It is useful when starting a new cloud instance, like Colab, where dependencies need to be reinstalled and installation scripts re-executed.
* It can also be utilized if ComfyUI is reinstalled and only the custom_nodes path has been backed up and restored.
* Use `--uv-compile` to skip per-node pip installs and resolve all dependencies in one batch instead.
### 6. Clear
### 6. Install from Dependency File
`install-deps <deps.json> ?[--channel <channel name>] ?[--mode [remote|local|cache]] ?[--uv-compile]`
* Installs custom nodes specified in a dependency spec file (`.json`) or workflow file (`.png`/`.json`).
* Use `--uv-compile` to batch-resolve all dependencies after installation instead of per-node pip.
### 7. uv-compile
`uv-compile ?[--user-directory <path>]`
* Batch-resolves and installs all custom node pack dependencies using `uv pip compile`.
* Useful for environment recovery or initial setup without starting ComfyUI.
* Requires `uv` to be installed.
```bash
cm-cli uv-compile
cm-cli uv-compile --user-directory /path/to/comfyui
```
### 8. Clear
In the GUI, installations, updates, or snapshot restorations are scheduled to execute the next time ComfyUI is launched. The `clear` command clears this scheduled state, ensuring no pre-execution actions are applied.

View File

@ -11,11 +11,15 @@ cm-cli [OPTIONS]
OPTIONS:
[install|reinstall|uninstall|update|disable|enable|fix] node_name ... ?[--channel <channel name>] ?[--mode [remote|local|cache]]
[install|reinstall|update|fix] node_name ... ?[--uv-compile]
[update|disable|enable|fix] all ?[--channel <channel name>] ?[--mode [remote|local|cache]]
[update|fix] all ?[--uv-compile]
[simple-show|show] [installed|enabled|not-installed|disabled|all|snapshot|snapshot-list] ?[--channel <channel name>] ?[--mode [remote|local|cache]]
save-snapshot ?[--output <snapshot .json/.yaml>]
restore-snapshot <snapshot .json/.yaml> ?[--pip-non-url] ?[--pip-non-local-url] ?[--pip-local-url]
restore-dependencies
restore-snapshot <snapshot .json/.yaml> ?[--pip-non-url] ?[--pip-non-local-url] ?[--pip-local-url] ?[--uv-compile]
restore-dependencies ?[--uv-compile]
install-deps <deps.json> ?[--channel <channel name>] ?[--mode [remote|local|cache]] ?[--uv-compile]
uv-compile
clear
```
@ -108,6 +112,21 @@ ComfyUI-Loopchain
* `enable`: 지정된 커스텀 노드들을 활성화합니다.
* `fix`: 지정된 커스텀 노드의 의존성을 고치기 위한 시도를 합니다.
#### `--uv-compile` 플래그 (`install`, `reinstall`, `update`, `fix`)
`--uv-compile` 플래그를 사용하면 노드별 pip 설치를 건너뛰고, 모든 작업이 완료된 후 `uv pip compile`로 전체 의존성을 한 번에 일괄 해결합니다.
* `uv`가 설치된 환경에서만 동작합니다.
* 여러 노드 팩 간의 의존성 충돌을 방지합니다.
* 해결 실패 시 충돌 패키지와 해당 패키지를 요청한 노드 팩 목록을 표시합니다.
* `reinstall --uv-compile``--no-deps`와 동시에 사용할 수 없습니다.
```bash
cm-cli install ComfyUI-Impact-Pack ComfyUI-Inspire-Pack --uv-compile
cm-cli update all --uv-compile
cm-cli fix ComfyUI-Impact-Pack --uv-compile
```
### 4. 스냅샷 관리 기능
* `cm-cli save-snapshot ?[--output <snapshot .json/.yaml>]`: 현재의 snapshot을 저장합니다.
@ -123,13 +142,33 @@ ComfyUI-Loopchain
### 5. 의존성 설치
`restore-dependencies`
`restore-dependencies ?[--uv-compile]`
* `ComfyUI/custom_nodes` 하위 경로에 커스텀 노드들이 설치되어 있긴 하지만, 의존성이 설치되지 않은 경우 사용할 수 있습니다.
* Colab과 같이 cloud instance를 새로 시작하는 경우 의존성 재설치 및 설치 스크립트가 재실행되어야 하는 경우 사용합니다.
* ComfyUI를 재설치할 경우, custom_nodes 경로만 백업했다가 재설치할 경우 활용 가능합니다.
* `--uv-compile` 플래그를 사용하면 노드별 pip 설치를 건너뛰고 일괄 해결합니다.
### 6. 의존성 파일로 설치
### 6. clear
`install-deps <deps.json> ?[--channel <channel name>] ?[--mode [remote|local|cache]] ?[--uv-compile]`
* 의존성 spec 파일(`.json`) 또는 워크플로우 파일(`.png`/`.json`)에 명시된 커스텀 노드를 설치합니다.
* `--uv-compile` 플래그를 사용하면 모든 노드 설치 후 일괄 의존성 해결을 수행합니다.
### 7. uv-compile
`uv-compile ?[--user-directory <path>]`
* 설치된 모든 커스텀 노드 팩의 의존성을 `uv pip compile`로 일괄 해결하고 설치합니다.
* ComfyUI를 재시작하지 않고 의존성 환경을 복구하거나 초기 설정 시 활용할 수 있습니다.
* `uv`가 설치된 환경에서만 동작합니다.
```bash
cm-cli uv-compile
cm-cli uv-compile --user-directory /path/to/comfyui
```
### 8. clear
GUI에서 install, update를 하거나 snapshot을 restore하는 경우 예약을 통해서 다음번 ComfyUI를 실행할 경우 실행되는 구조입니다. `clear` 는 이런 예약 상태를 clear해서, 아무런 사전 실행이 적용되지 않도록 합니다.

View File

@ -5,7 +5,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "comfyui-manager"
license = { text = "GPL-3.0-only" }
version = "4.1b2"
version = "4.1b3"
requires-python = ">= 3.9"
description = "ComfyUI-Manager provides features to install and manage custom nodes for ComfyUI, as well as various functionalities to assist with ComfyUI."
readme = "README.md"

View File

@ -244,6 +244,26 @@ class TestCollectRequirements:
deps = r.collect_requirements()
assert len(deps.sources["numpy"]) == 2
def test_sources_stores_pack_path_and_spec_tuple(self, tmp_path):
"""sources entries must be (pack_path, pkg_spec) tuples."""
p = _make_node_pack(str(tmp_path), "pack_a", "numpy>=1.20\n")
r = _resolver([p])
deps = r.collect_requirements()
entries = deps.sources["numpy"]
assert len(entries) == 1
pack_path, pkg_spec = entries[0]
assert pack_path == p
assert pkg_spec == "numpy>=1.20"
def test_sources_captures_spec_per_requester(self, tmp_path):
"""Each requester's exact spec is preserved independently."""
p1 = _make_node_pack(str(tmp_path), "pack_a", "torch>=2.1\n")
p2 = _make_node_pack(str(tmp_path), "pack_b", "torch<2.0\n")
r = _resolver([p1, p2])
deps = r.collect_requirements()
specs = {pkg_spec for _, pkg_spec in deps.sources["torch"]}
assert specs == {"torch>=2.1", "torch<2.0"}
# ===========================================================================
# Input sanitization
@ -883,6 +903,127 @@ class TestResolveAndInstall:
assert not result.success
assert "compile failed" in result.error
def test_compile_failure_result_includes_collected(self, tmp_path):
"""result.collected must be populated on compile failure for conflict attribution."""
p = _make_node_pack(str(tmp_path), "pack_a", "torch>=2.1\n")
r = _resolver([p])
with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
with mock.patch("subprocess.run", return_value=subprocess.CompletedProcess(
[], 1, stdout="",
stderr="error: Because torch>=2.1 conflicts with torch<2.0, no solution found.",
)):
result = r.resolve_and_install()
assert not result.success
assert result.collected is not None
assert result.lockfile is not None
assert result.lockfile.conflicts # conflict lines present for attribution
def test_conflict_attribution_sources_filter(self, tmp_path):
"""Packages named in conflict lines can be looked up from sources."""
import re as _re
p1 = _make_node_pack(str(tmp_path), "pack_a", "torch>=2.1\n")
p2 = _make_node_pack(str(tmp_path), "pack_b", "torch<2.0\n")
r = _resolver([p1, p2])
conflict_text = "error: torch>=2.1 conflicts with torch<2.0"
with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
with mock.patch("subprocess.run", return_value=subprocess.CompletedProcess(
[], 1, stdout="", stderr=conflict_text,
)):
result = r.resolve_and_install()
assert not result.success
assert result.collected is not None
sources = result.collected.sources
conflict_lower = "\n".join(result.lockfile.conflicts).lower().replace("-", "_")
# Simulate the attribution filter used in _run_unified_resolve() (word-boundary version)
def _matches(pkg):
normalized = pkg.lower().replace("-", "_")
return bool(_re.search(r'(?<![a-z0-9_])' + _re.escape(normalized) + r'(?![a-z0-9_])', conflict_lower))
attributed = {pkg: reqs for pkg, reqs in sources.items() if _matches(pkg)}
assert "torch" in attributed
specs = {spec for _, spec in attributed["torch"]}
assert specs == {"torch>=2.1", "torch<2.0"}
def test_conflict_attribution_no_false_positive_on_underscore_prefix(self, tmp_path):
"""'torch' must NOT match 'torch_audio' in conflict text (underscore boundary)."""
import re as _re
p = _make_node_pack(str(tmp_path), "pack_a", "torch>=2.1\n")
r = _resolver([p])
conflict_text = "error: torch_audio>=2.1 conflicts with torch_audio<2.0"
with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
with mock.patch("subprocess.run", return_value=subprocess.CompletedProcess(
[], 1, stdout="", stderr=conflict_text,
)):
result = r.resolve_and_install()
assert not result.success
assert result.collected is not None
sources = result.collected.sources
conflict_lower = "\n".join(result.lockfile.conflicts).lower().replace("-", "_")
def _matches(pkg):
normalized = pkg.lower().replace("-", "_")
return bool(_re.search(r'(?<![a-z0-9_])' + _re.escape(normalized) + r'(?![a-z0-9_])', conflict_lower))
attributed = {pkg: reqs for pkg, reqs in sources.items() if _matches(pkg)}
# 'torch' should NOT match: conflict only mentions 'torch_audio'
assert "torch" not in attributed
def test_conflict_attribution_no_false_positive_on_prefix_match(self, tmp_path):
"""'torch' must NOT match 'torchvision' in conflict text (word boundary)."""
import re as _re
p = _make_node_pack(str(tmp_path), "pack_a", "torch>=2.1\n")
r = _resolver([p])
conflict_text = "error: torchvision>=0.16 conflicts with torchvision<0.15"
with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
with mock.patch("subprocess.run", return_value=subprocess.CompletedProcess(
[], 1, stdout="", stderr=conflict_text,
)):
result = r.resolve_and_install()
assert not result.success
assert result.collected is not None
sources = result.collected.sources
conflict_lower = "\n".join(result.lockfile.conflicts).lower().replace("-", "_")
def _matches(pkg):
normalized = pkg.lower().replace("-", "_")
return bool(_re.search(r'(?<![a-z0-9_])' + _re.escape(normalized) + r'(?![a-z0-9_])', conflict_lower))
attributed = {pkg: reqs for pkg, reqs in sources.items() if _matches(pkg)}
# 'torch' should NOT appear: conflict only mentions 'torchvision'
assert "torch" not in attributed
def test_conflict_attribution_hyphen_underscore_normalization(self, tmp_path):
"""Packages stored with hyphens match conflict text using underscores."""
import re as _re
p = _make_node_pack(str(tmp_path), "pack_a", "torch-audio>=2.1\n")
r = _resolver([p])
# uv may print 'torch_audio' (underscore) in conflict output
conflict_text = "error: torch_audio>=2.1 conflicts with torch_audio<2.0"
with mock.patch.object(r, "_get_uv_cmd", return_value=["uv"]):
with mock.patch("subprocess.run", return_value=subprocess.CompletedProcess(
[], 1, stdout="", stderr=conflict_text,
)):
result = r.resolve_and_install()
assert not result.success
assert result.collected is not None
sources = result.collected.sources
conflict_lower = "\n".join(result.lockfile.conflicts).lower().replace("-", "_")
def _matches(pkg):
normalized = pkg.lower().replace("-", "_")
return bool(_re.search(r'(?<![a-z0-9_])' + _re.escape(normalized) + r'(?![a-z0-9_])', conflict_lower))
attributed = {pkg: reqs for pkg, reqs in sources.items() if _matches(pkg)}
# _extract_package_name normalizes 'torch-audio' → 'torch_audio'; uv uses underscores too
assert "torch_audio" in attributed
def test_full_success_pipeline(self, tmp_path):
p = _make_node_pack(str(tmp_path), "pack_a", "numpy>=1.20\n")
r = _resolver([p])