diff --git a/cm-cli.py b/cm-cli.py index 518461c5..93e96ffa 100644 --- a/cm-cli.py +++ b/cm-cli.py @@ -4,9 +4,9 @@ import traceback import json import asyncio import subprocess -import shutil import concurrent import threading +import yaml from typing import Optional import typer @@ -17,10 +17,12 @@ import git sys.path.append(os.path.dirname(__file__)) sys.path.append(os.path.join(os.path.dirname(__file__), "glob")) -import manager_core as core import cm_global +import manager_core as core +from manager_core import unified_manager +import cnr_utils -comfyui_manager_path = os.path.dirname(__file__) +comfyui_manager_path = os.path.abspath(os.path.dirname(__file__)) comfy_path = os.environ.get('COMFYUI_PATH') if comfy_path is None: @@ -78,9 +80,7 @@ read_downgrade_blacklist() # This is a preparation step for manager_core class Ctx: def __init__(self): self.channel = 'default' - self.mode = 'remote' - self.processed_install = set() - self.custom_node_map_cache = None + self.mode = 'cache' def set_channel_mode(self, channel, mode): if mode is not None: @@ -97,196 +97,143 @@ class Ctx: if channel is not None: self.channel = channel - def post_install(self, url): - try: - repository_name = url.split("/")[-1].strip() - repo_path = os.path.join(custom_nodes_path, repository_name) - repo_path = os.path.abspath(repo_path) - - requirements_path = os.path.join(repo_path, 'requirements.txt') - install_script_path = os.path.join(repo_path, 'install.py') - - if os.path.exists(requirements_path): - with (open(requirements_path, 'r', encoding="UTF-8", errors="ignore") as file): - for line in file: - package_name = core.remap_pip_package(line.strip()) - if package_name and not core.is_installed(package_name): - install_cmd = [sys.executable, "-m", "pip", "install", package_name] - output = subprocess.check_output(install_cmd, cwd=repo_path, text=True) - for msg_line in output.split('\n'): - if 'Requirement already satisfied:' in msg_line: - print('.', end='') - else: - print(msg_line) - - if os.path.exists(install_script_path) and f'{repo_path}/install.py' not in self.processed_install: - self.processed_install.add(f'{repo_path}/install.py') - install_cmd = [sys.executable, install_script_path] - output = subprocess.check_output(install_cmd, cwd=repo_path, text=True) - for msg_line in output.split('\n'): - if 'Requirement already satisfied:' in msg_line: - print('.', end='') - else: - print(msg_line) - - except Exception: - print(f"ERROR: Restoring '{url}' is failed.") - - def restore_dependencies(self): - node_paths = [os.path.join(custom_nodes_path, name) for name in os.listdir(custom_nodes_path) - if os.path.isdir(os.path.join(custom_nodes_path, name)) and not name.endswith('.disabled')] - - total = len(node_paths) - i = 1 - for x in node_paths: - print(f"----------------------------------------------------------------------------------------------------") - print(f"Restoring [{i}/{total}]: {x}") - self.post_install(x) - i += 1 - - def load_custom_nodes(self): - channel_dict = core.get_channel_dict() - if self.channel not in channel_dict: - print(f"[bold red]ERROR: Invalid channel is specified `--channel {self.channel}`[/bold red]", file=sys.stderr) - exit(1) - - if self.mode not in ['remote', 'local', 'cache']: - print(f"[bold red]ERROR: Invalid mode is specified `--mode {self.mode}`[/bold red]", file=sys.stderr) - exit(1) - - channel_url = channel_dict[self.channel] - - res = {} - json_obj = asyncio.run(core.get_data_by_mode(self.mode, 'custom-node-list.json', channel_url=channel_url)) - for x in json_obj['custom_nodes']: - for y in x['files']: - if 'github.com' in y and not (y.endswith('.py') or y.endswith('.js')): - repo_name = y.split('/')[-1] - res[repo_name] = (x, False) - - if 'id' in x: - if x['id'] not in res: - res[x['id']] = (x, True) - - return res - - def get_custom_node_map(self): - if self.custom_node_map_cache is not None: - return self.custom_node_map_cache - - self.custom_node_map_cache = self.load_custom_nodes() - - return self.custom_node_map_cache - - def lookup_node_path(self, node_name, robust=False): - if '..' in node_name: - print(f"\n[bold red]ERROR: Invalid node name '{node_name}'[/bold red]\n") - exit(2) - - custom_node_map = self.get_custom_node_map() - if node_name in custom_node_map: - node_url = custom_node_map[node_name][0]['files'][0] - repo_name = node_url.split('/')[-1] - node_path = os.path.join(custom_nodes_path, repo_name) - return node_path, custom_node_map[node_name][0] - elif robust: - node_path = os.path.join(custom_nodes_path, node_name) - return node_path, None - - print(f"\n[bold red]ERROR: Invalid node name '{node_name}'[/bold red]\n") - exit(2) + asyncio.run(unified_manager.reload(cache_mode=self.mode == 'cache')) + asyncio.run(unified_manager.load_nightly(self.channel, self.mode)) -cm_ctx = Ctx() +channel_ctx = Ctx() -def install_node(node_name, is_all=False, cnt_msg=''): - if core.is_valid_url(node_name): +def install_node(node_spec_str, is_all=False, cnt_msg=''): + if core.is_valid_url(node_spec_str): # install via urls - res = core.gitclone_install([node_name]) - if not res: - print(f"[bold red]ERROR: An error occurred while installing '{node_name}'.[/bold red]") + res = asyncio.run(core.gitclone_install(node_spec_str)) + if not res.result: + print(res.msg) + print(f"[bold red]ERROR: An error occurred while installing '{node_spec_str}'.[/bold red]") else: - print(f"{cnt_msg} [INSTALLED] {node_name:50}") + print(f"{cnt_msg} [INSTALLED] {node_spec_str:50}") else: - node_path, node_item = cm_ctx.lookup_node_path(node_name) + node_spec = unified_manager.resolve_node_spec(node_spec_str) - if os.path.exists(node_path): - if not is_all: - print(f"{cnt_msg} [ SKIPPED ] {node_name:50} => Already installed") - elif os.path.exists(node_path + '.disabled'): - enable_node(node_name) + if node_spec is None: + return + + node_name, version_spec, is_specified = node_spec + + # NOTE: install node doesn't allow update if version is not specified + if not is_specified: + version_spec = None + + res = asyncio.run(unified_manager.install_by_id(node_name, version_spec, channel_ctx.channel, channel_ctx.mode, instant_execution=True)) + + if res.action == 'skip': + print(f"{cnt_msg} [ SKIP ] {node_name:50} => Already installed") + elif res.action == 'enable': + print(f"{cnt_msg} [ ENABLED ] {node_name:50}") + elif res.action == 'install-git' and res.target == 'nightly': + print(f"{cnt_msg} [INSTALLED] {node_name:50}[NIGHTLY]") + elif res.action == 'install-git' and res.target == 'unknown': + print(f"{cnt_msg} [INSTALLED] {node_name:50}[UNKNOWN]") + elif res.action == 'install-cnr' and res.result: + print(f"{cnt_msg} [INSTALLED] {node_name:50}[{res.target}]") + elif res.action == 'switch-cnr' and res.result: + print(f"{cnt_msg} [INSTALLED] {node_name:50}[{res.target}]") + elif (res.action == 'switch-cnr' or res.action == 'install-cnr') and not res.result and node_name in unified_manager.cnr_map: + print(f"\nAvailable version of '{node_name}'") + show_versions(node_name) + print("") else: - res = core.gitclone_install(node_item['files'], instant_execution=True, msg_prefix=f"[{cnt_msg}] ") - if not res: - print(f"[bold red]ERROR: An error occurred while installing '{node_name}'.[/bold red]") - else: - print(f"{cnt_msg} [INSTALLED] {node_name:50}") + print(f"[bold red]ERROR: An error occurred while installing '{node_name}'.\n{res.msg}[/bold red]") -def reinstall_node(node_name, is_all=False, cnt_msg=''): - node_path, node_item = cm_ctx.lookup_node_path(node_name) +def reinstall_node(node_spec_str, is_all=False, cnt_msg=''): + node_spec = unified_manager.resolve_node_spec(node_spec_str) - if os.path.exists(node_path): - shutil.rmtree(node_path) - if os.path.exists(node_path + '.disabled'): - shutil.rmtree(node_path + '.disabled') + node_name, version_spec, _ = node_spec + unified_manager.unified_uninstall(node_name, version_spec == 'unknown') install_node(node_name, is_all=is_all, cnt_msg=cnt_msg) -def fix_node(node_name, is_all=False, cnt_msg=''): - node_path, node_item = cm_ctx.lookup_node_path(node_name, robust=True) +def fix_node(node_spec_str, is_all=False, cnt_msg=''): + node_spec = unified_manager.resolve_node_spec(node_spec_str, guess_mode='active') - files = node_item['files'] if node_item is not None else [node_path] + if node_spec is None: + if not is_all: + if unified_manager.resolve_node_spec(node_spec_str, guess_mode='inactive') is not None: + print(f"{cnt_msg} [ SKIPPED ]: {node_spec_str:50} => Disabled") + else: + print(f"{cnt_msg} [ SKIPPED ]: {node_spec_str:50} => Not installed") - if os.path.exists(node_path): - print(f"{cnt_msg} [ FIXING ]: {node_name:50} => Disabled") - res = core.gitclone_fix(files, instant_execution=True) - if not res: - print(f"ERROR: An error occurred while fixing '{node_name}'.") - elif not is_all and os.path.exists(node_path + '.disabled'): - print(f"{cnt_msg} [ SKIPPED ]: {node_name:50} => Disabled") - elif not is_all: - print(f"{cnt_msg} [ SKIPPED ]: {node_name:50} => Not installed") + return + + node_name, version_spec, _ = node_spec + + print(f"{cnt_msg} [ FIXING ]: {node_name:50}[{version_spec}]") + res = unified_manager.unified_fix(node_name, version_spec) + + if not res.result: + print(f"ERROR: f{res.msg}") -def uninstall_node(node_name, is_all=False, cnt_msg=''): - node_path, node_item = cm_ctx.lookup_node_path(node_name, robust=True) - - files = node_item['files'] if node_item is not None else [node_path] - - if os.path.exists(node_path) or os.path.exists(node_path + '.disabled'): - res = core.gitclone_uninstall(files) - if not res: - print(f"ERROR: An error occurred while uninstalling '{node_name}'.") - else: - print(f"{cnt_msg} [UNINSTALLED] {node_name:50}") +def uninstall_node(node_spec_str, is_all=False, cnt_msg=''): + spec = node_spec_str.split('@') + if len(spec) == 2 and spec[1] == 'unknown': + node_name = spec[0] + is_unknown = True else: + node_name = spec[0] + is_unknown = False + + res = unified_manager.unified_uninstall(node_name, is_unknown) + if len(spec) == 1 and res.action == 'skip' and not is_unknown: + res = unified_manager.unified_uninstall(node_name, True) + + if res.action == 'skip': print(f"{cnt_msg} [ SKIPPED ]: {node_name:50} => Not installed") + elif res.result: + print(f"{cnt_msg} [UNINSTALLED] {node_name:50}") + else: + print(f"ERROR: An error occurred while uninstalling '{node_name}'.") -def update_node(node_name, is_all=False, cnt_msg=''): - node_path, node_item = cm_ctx.lookup_node_path(node_name, robust=True) - files = node_item['files'] if node_item is not None else [node_path] +def update_node(node_spec_str, is_all=False, cnt_msg=''): + node_spec = unified_manager.resolve_node_spec(node_spec_str, 'active') - res = core.gitclone_update(files, skip_script=True, msg_prefix=f"[{cnt_msg}] ") - - if not res: - print(f"ERROR: An error occurred while updating '{node_name}'.") + if node_spec is None: + if unified_manager.resolve_node_spec(node_spec_str, 'inactive'): + print(f"{cnt_msg} [ SKIPPED ]: {node_spec_str:50} => Disabled") + else: + print(f"{cnt_msg} [ SKIPPED ]: {node_spec_str:50} => Not installed") return None - return node_path + node_name, version_spec, _ = node_spec + + res = unified_manager.unified_update(node_name, version_spec, return_postinstall=True) + + if not res.result: + print(f"ERROR: An error occurred while updating '{node_name}'.") + elif res.action == 'skip': + print(f"{cnt_msg} [ SKIPPED ]: {node_name:50} => {res.msg}") + else: + print(f"{cnt_msg} [ UPDATED ]: {node_name:50} => ({version_spec} -> {res.target})") + + return res.with_target(f'{node_name}@{res.target}') def update_parallel(nodes): is_all = False if 'all' in nodes: is_all = True - nodes = [x for x in cm_ctx.get_custom_node_map().keys() if os.path.exists(os.path.join(custom_nodes_path, x)) or os.path.exists(os.path.join(custom_nodes_path, x) + '.disabled')] - - nodes = [x for x in nodes if x.lower() not in ['comfy', 'comfyui', 'all']] + nodes = [] + for x in unified_manager.active_nodes.keys(): + nodes.append(x) + for x in unified_manager.unknown_active_nodes.keys(): + nodes.append(x+"@unknown") + else: + nodes = [x for x in nodes if x.lower() not in ['comfy', 'comfyui']] total = len(nodes) @@ -303,9 +250,9 @@ def update_parallel(nodes): i += 1 try: - node_path = update_node(x, is_all=is_all, cnt_msg=f'{i}/{total}') + res = update_node(x, is_all=is_all, cnt_msg=f'{i}/{total}') with lock: - processed.append(node_path) + processed.append(res) except Exception as e: print(f"ERROR: {e}") traceback.print_exc() @@ -315,12 +262,11 @@ def update_parallel(nodes): executor.submit(process_custom_node, item) i = 1 - for node_path in processed: - if node_path is None: - print(f"[{i}/{total}] Post update: ERROR") - else: - print(f"[{i}/{total}] Post update: {node_path}") - cm_ctx.post_install(node_path) + for res in processed: + if res is not None: + print(f"[{i}/{total}] Post update: {res.target}") + if res.postinstall is not None: + res.postinstall() i += 1 @@ -334,100 +280,158 @@ def update_comfyui(): print("ComfyUI is already up to date.") -def enable_node(node_name, is_all=False, cnt_msg=''): - if node_name == 'ComfyUI-Manager': +def enable_node(node_spec_str, is_all=False, cnt_msg=''): + if unified_manager.resolve_node_spec(node_spec_str, guess_mode='active') is not None: + print(f"{cnt_msg} [ SKIP ] {node_spec_str:50} => Already enabled") return - node_path, node_item = cm_ctx.lookup_node_path(node_name, robust=True) + node_spec = unified_manager.resolve_node_spec(node_spec_str, guess_mode='inactive') - if os.path.exists(node_path + '.disabled'): - current_name = node_path + '.disabled' - os.rename(current_name, node_path) + if node_spec is None: + print(f"{cnt_msg} [ SKIP ] {node_spec_str:50} => Not found") + return + + node_name, version_spec, _ = node_spec + + res = unified_manager.unified_enable(node_name, version_spec) + + if res.action == 'skip': + print(f"{cnt_msg} [ SKIP ] {node_name:50} => {res.msg}") + elif res.result: print(f"{cnt_msg} [ENABLED] {node_name:50}") - elif os.path.exists(node_path): - print(f"{cnt_msg} [SKIPPED] {node_name:50} => Already enabled") - elif not is_all: - print(f"{cnt_msg} [SKIPPED] {node_name:50} => Not installed") + else: + print(f"{cnt_msg} [ FAIL ] {node_name:50} => {res.msg}") -def disable_node(node_name, is_all=False, cnt_msg=''): - if node_name == 'ComfyUI-Manager': +def disable_node(node_spec_str: str, is_all=False, cnt_msg=''): + if 'comfyui-manager' in node_spec_str.lower(): return - node_path, node_item = cm_ctx.lookup_node_path(node_name, robust=True) + node_spec = unified_manager.resolve_node_spec(node_spec_str, guess_mode='active') - if os.path.exists(node_path): - current_name = node_path - new_name = node_path + '.disabled' - os.rename(current_name, new_name) + if node_spec is None: + if unified_manager.resolve_node_spec(node_spec_str, guess_mode='inactive') is not None: + print(f"{cnt_msg} [ SKIP ] {node_spec_str:50} => Already disabled") + else: + print(f"{cnt_msg} [ SKIP ] {node_spec_str:50} => Not found") + return + + node_name, version_spec, _ = node_spec + + res = unified_manager.unified_disable(node_name, version_spec == 'unknown') + + if res.action == 'skip': + print(f"{cnt_msg} [ SKIP ] {node_name:50} => {res.msg}") + elif res.result: print(f"{cnt_msg} [DISABLED] {node_name:50}") - elif os.path.exists(node_path + '.disabled'): - print(f"{cnt_msg} [ SKIPPED] {node_name:50} => Already disabled") - elif not is_all: - print(f"{cnt_msg} [ SKIPPED] {node_name:50} => Not installed") + else: + print(f"{cnt_msg} [ FAIL ] {node_name:50} => {res.msg}") def show_list(kind, simple=False): - for k, v in cm_ctx.get_custom_node_map().items(): - if v[1]: - continue + custom_nodes = asyncio.run(unified_manager.get_custom_nodes(channel=channel_ctx.channel, mode=channel_ctx.mode)) - node_path = os.path.join(custom_nodes_path, k) + # collect not-installed unknown nodes + not_installed_unknown_nodes = [] + repo_unknown = {} - states = set() - if os.path.exists(node_path): - prefix = '[ ENABLED ] ' - states.add('installed') - states.add('enabled') - states.add('all') - elif os.path.exists(node_path + '.disabled'): - prefix = '[ DISABLED ] ' - states.add('installed') - states.add('disabled') - states.add('all') - else: - prefix = '[ NOT INSTALLED ] ' - states.add('not-installed') - states.add('all') - - if kind in states: - if simple: - print(f"{k:50}") - else: - short_id = v[0].get('id', "") - print(f"{prefix} {k:50} {short_id:20} (author: {v[0]['author']})") - - # unregistered nodes - candidates = os.listdir(os.path.realpath(custom_nodes_path)) - - for k in candidates: - fullpath = os.path.join(custom_nodes_path, k) - - if os.path.isfile(fullpath): - continue - - if k in ['__pycache__']: - continue - - states = set() - if k.endswith('.disabled'): - prefix = '[ DISABLED ] ' - states.add('installed') - states.add('disabled') - states.add('all') - k = k[:-9] - else: - prefix = '[ ENABLED ] ' - states.add('installed') - states.add('enabled') - states.add('all') - - if k not in cm_ctx.get_custom_node_map(): - if kind in states: - if simple: - print(f"{k:50}") + for k, v in custom_nodes.items(): + if 'cnr_latest' not in v: + if len(v['files']) == 1: + repo_url = v['files'][0] + node_name = repo_url.split('/')[-1] + if node_name not in unified_manager.unknown_inactive_nodes and node_name not in unified_manager.unknown_active_nodes: + not_installed_unknown_nodes.append(v) else: - print(f"{prefix} {k:50} {'':20} (author: N/A)") + repo_unknown[node_name] = v + + processed = {} + unknown_processed = [] + + flag = kind in ['all', 'cnr', 'installed', 'enabled'] + for k, v in unified_manager.active_nodes.items(): + if flag: + cnr = unified_manager.cnr_map[k] + processed[k] = "[ ENABLED ] ", cnr['name'], k, cnr['publisher']['name'], v[0] + else: + processed[k] = None + + if flag and kind != 'cnr': + for k, v in unified_manager.unknown_active_nodes.items(): + item = repo_unknown.get(k) + + if item is None: + continue + + log_item = "[ ENABLED ] ", item['title'], k, item['author'] + unknown_processed.append(log_item) + + flag = kind in ['all', 'cnr', 'installed', 'disabled'] + for k, v in unified_manager.cnr_inactive_nodes.items(): + if k in processed: + continue + + if flag: + cnr = unified_manager.cnr_map[k] + processed[k] = "[ DISABLED ] ", cnr['name'], k, cnr['publisher']['name'], ", ".join(list(v.keys())) + else: + processed[k] = None + + for k, v in unified_manager.nightly_inactive_nodes.items(): + if k in processed: + continue + + if flag: + cnr = unified_manager.cnr_map[k] + processed[k] = "[ DISABLED ] ", cnr['name'], k, cnr['publisher']['name'], 'nightly' + else: + processed[k] = None + + if flag and kind != 'cnr': + for k, v in unified_manager.unknown_inactive_nodes.items(): + item = repo_unknown.get(k) + + if item is None: + continue + + log_item = "[ DISABLED ] ", item['title'], k, item['author'] + unknown_processed.append(log_item) + + flag = kind in ['all', 'cnr', 'not-installed'] + for k, v in unified_manager.cnr_map.items(): + if k in processed: + continue + + if flag: + cnr = unified_manager.cnr_map[k] + ver_spec = v['latest_version']['version'] if 'latest_version' in v else '0.0.0' + processed[k] = "[ NOT INSTALLED ] ", cnr['name'], k, cnr['publisher']['name'], ver_spec + else: + processed[k] = None + + if flag and kind != 'cnr': + for x in not_installed_unknown_nodes: + if len(x['files']) == 1: + node_id = os.path.basename(x['files'][0]) + log_item = "[ NOT INSTALLED ] ", x['title'], node_id, x['author'] + unknown_processed.append(log_item) + + for x in processed.values(): + if x is None: + continue + + prefix, title, short_id, author, ver_spec = x + if simple: + print(title+'@'+ver_spec) + else: + print(f"{prefix} {title:50} {short_id:30} (author: {author:20}) \\[{ver_spec}]") + + for x in unknown_processed: + prefix, title, short_id, author = x + if simple: + print(title+'@unknown') + else: + print(f"{prefix} {title:50} {short_id:30} (author: {author:20}) [UNKNOWN]") def show_snapshot(simple_mode=False): @@ -467,13 +471,48 @@ def auto_save_snapshot(): print(f"Current snapshot is saved as `{path}`") +def get_all_installed_node_specs(): + res = [] + processed = set() + for k, v in unified_manager.active_nodes.items(): + node_spec_str = f"{k}@{v[0]}" + res.append(node_spec_str) + processed.add(k) + + for k, _ in unified_manager.cnr_inactive_nodes.keys(): + if k in processed: + continue + + latest = unified_manager.get_from_cnr_inactive_nodes(k) + if latest is not None: + node_spec_str = f"{k}@{latest}" + res.append(node_spec_str) + + for k, _ in unified_manager.nightly_inactive_nodes.keys(): + if k in processed: + continue + + node_spec_str = f"{k}@nightly" + res.append(node_spec_str) + + for k in unified_manager.unknown_active_nodes.keys(): + node_spec_str = f"{k}@unknown" + res.append(node_spec_str) + + for k in unified_manager.unknown_inactive_nodes.keys(): + node_spec_str = f"{k}@unknown" + res.append(node_spec_str) + + return res + + def for_each_nodes(nodes, act, allow_all=True): is_all = False if allow_all and 'all' in nodes: is_all = True - nodes = [x for x in cm_ctx.get_custom_node_map().keys() if os.path.exists(os.path.join(custom_nodes_path, x)) or os.path.exists(os.path.join(custom_nodes_path, x) + '.disabled')] - - nodes = [x for x in nodes if x.lower() not in ['comfy', 'comfyui', 'all']] + nodes = get_all_installed_node_specs() + else: + nodes = [x for x in nodes if x.lower() not in ['comfy', 'comfyui', 'all']] total = len(nodes) i = 1 @@ -510,9 +549,9 @@ def install( mode: str = typer.Option( None, help="[remote|local|cache]" - ), + ) ): - cm_ctx.set_channel_mode(channel, mode) + channel_ctx.set_channel_mode(channel, mode) for_each_nodes(nodes, act=install_node) @@ -533,7 +572,7 @@ def reinstall( help="[remote|local|cache]" ), ): - cm_ctx.set_channel_mode(channel, mode) + channel_ctx.set_channel_mode(channel, mode) for_each_nodes(nodes, act=reinstall_node) @@ -554,7 +593,7 @@ def uninstall( help="[remote|local|cache]" ), ): - cm_ctx.set_channel_mode(channel, mode) + channel_ctx.set_channel_mode(channel, mode) for_each_nodes(nodes, act=uninstall_node) @@ -576,7 +615,7 @@ def update( help="[remote|local|cache]" ), ): - cm_ctx.set_channel_mode(channel, mode) + channel_ctx.set_channel_mode(channel, mode) if 'all' in nodes: auto_save_snapshot() @@ -607,7 +646,7 @@ def disable( help="[remote|local|cache]" ), ): - cm_ctx.set_channel_mode(channel, mode) + channel_ctx.set_channel_mode(channel, mode) if 'all' in nodes: auto_save_snapshot() @@ -633,7 +672,7 @@ def enable( help="[remote|local|cache]" ), ): - cm_ctx.set_channel_mode(channel, mode) + channel_ctx.set_channel_mode(channel, mode) if 'all' in nodes: auto_save_snapshot() @@ -659,7 +698,7 @@ def fix( help="[remote|local|cache]" ), ): - cm_ctx.set_channel_mode(channel, mode) + channel_ctx.set_channel_mode(channel, mode) if 'all' in nodes: auto_save_snapshot() @@ -667,10 +706,20 @@ def fix( for_each_nodes(nodes, fix_node, allow_all=True) -@app.command("show", help="Show node list (simple mode)") +@app.command("show-versions", help="Show all available versions of the node") +def show_versions(node_name: str): + versions = cnr_utils.all_versions_of_node(node_name) + if versions is None: + print(f"Node not found in Comfy Registry: {node_name}") + + for x in versions: + print(f"[{x['createdAt'][:10]}] {x['version']} -- {x['changelog']}") + + +@app.command("show", help="Show node list") def show( arg: str = typer.Argument( - help="[installed|enabled|not-installed|disabled|all|snapshot|snapshot-list]" + help="[installed|enabled|not-installed|disabled|all|cnr|snapshot|snapshot-list]" ), channel: Annotated[ str, @@ -690,6 +739,7 @@ def show( "not-installed", "disabled", "all", + "cnr", "snapshot", "snapshot-list", ] @@ -697,7 +747,7 @@ def show( typer.echo(f"Invalid command: `show {arg}`", err=True) exit(1) - cm_ctx.set_channel_mode(channel, mode) + channel_ctx.set_channel_mode(channel, mode) if arg == 'snapshot': show_snapshot() elif arg == 'snapshot-list': @@ -736,7 +786,7 @@ def simple_show( typer.echo(f"[bold red]Invalid command: `show {arg}`[/bold red]", err=True) exit(1) - cm_ctx.set_channel_mode(channel, mode) + channel_ctx.set_channel_mode(channel, mode) if arg == 'snapshot': show_snapshot(True) elif arg == 'snapshot-list': @@ -786,7 +836,7 @@ def deps_in_workflow( help="[remote|local|cache]" ), ): - cm_ctx.set_channel_mode(channel, mode) + channel_ctx.set_channel_mode(channel, mode) input_path = workflow output_path = output @@ -795,7 +845,7 @@ def deps_in_workflow( print(f"[bold red]File not found: {input_path}[/bold red]") exit(1) - used_exts, unknown_nodes = asyncio.run(core.extract_nodes_from_workflow(input_path, mode=cm_ctx.mode, channel_url=cm_ctx.channel)) + used_exts, unknown_nodes = asyncio.run(core.extract_nodes_from_workflow(input_path, mode=channel_ctx.mode, channel_url=channel_ctx.channel)) custom_nodes = {} for x in used_exts: @@ -870,53 +920,7 @@ def restore_snapshot( exit(1) try: - cloned_repos = [] - checkout_repos = [] - skipped_repos = [] - enabled_repos = [] - disabled_repos = [] - is_failed = False - - def extract_infos(msg): - nonlocal is_failed - - for x in msg: - if x.startswith("CLONE: "): - cloned_repos.append(x[7:]) - elif x.startswith("CHECKOUT: "): - checkout_repos.append(x[10:]) - elif x.startswith("SKIPPED: "): - skipped_repos.append(x[9:]) - elif x.startswith("ENABLE: "): - enabled_repos.append(x[8:]) - elif x.startswith("DISABLE: "): - disabled_repos.append(x[9:]) - elif 'APPLY SNAPSHOT: False' in x: - is_failed = True - - print(f"Restore snapshot.") - cmd_str = [sys.executable, git_script_path, '--apply-snapshot', snapshot_path] + extras - output = subprocess.check_output(cmd_str, cwd=custom_nodes_path, text=True) - msg_lines = output.split('\n') - extract_infos(msg_lines) - - for url in cloned_repos: - cm_ctx.post_install(url) - - # print summary - for x in cloned_repos: - print(f"[ INSTALLED ] {x}") - for x in checkout_repos: - print(f"[ CHECKOUT ] {x}") - for x in enabled_repos: - print(f"[ ENABLED ] {x}") - for x in disabled_repos: - print(f"[ DISABLED ] {x}") - - if is_failed: - print(output) - print("[bold red]ERROR: Failed to restore snapshot.[/bold red]") - + asyncio.run(core.restore_snapshot(snapshot_path, extras)) except Exception: print("[bold red]ERROR: Failed to restore snapshot.[/bold red]") traceback.print_exc() @@ -935,7 +939,7 @@ def restore_dependencies(): for x in node_paths: print(f"----------------------------------------------------------------------------------------------------") print(f"Restoring [{i}/{total}]: {x}") - cm_ctx.post_install(x) + unified_manager.execute_install_script('', x, instant_execution=True) i += 1 @@ -947,7 +951,7 @@ def post_install( help="path to custom node", )): path = os.path.expanduser(path) - cm_ctx.post_install(path) + unified_manager.execute_install_script('', path, instant_execution=True) @app.command( @@ -970,7 +974,7 @@ def install_deps( help="[remote|local|cache]" ), ): - cm_ctx.set_channel_mode(channel, mode) + channel_ctx.set_channel_mode(channel, mode) auto_save_snapshot() if not os.path.exists(deps): @@ -989,7 +993,7 @@ def install_deps( if state == 'installed': continue elif state == 'not-installed': - core.gitclone_install([k], instant_execution=True) + asyncio.run(core.gitclone_install(k, instant_execution=True)) else: # disabled core.gitclone_set_active([k], False) @@ -1015,15 +1019,35 @@ def export_custom_node_ids( None, help="[remote|local|cache]" )): - cm_ctx.set_channel_mode(channel, mode) + channel_ctx.set_channel_mode(channel, mode) with open(path, "w", encoding='utf-8') as output_file: - for x in cm_ctx.get_custom_node_map().keys(): + for x in unified_manager.cnr_map.keys(): print(x, file=output_file) + custom_nodes = asyncio.run(unified_manager.get_custom_nodes(channel=channel_ctx.channel, mode=channel_ctx.mode)) + for x in custom_nodes.values(): + if 'cnr_latest' not in x: + if len(x['files']) == 1: + repo_url = x['files'][0] + node_id = repo_url.split('/')[-1] + print(f"{node_id}@unknown", file=output_file) + + if 'id' in x: + print(f"{x['id']}@unknown", file=output_file) + + +@app.command( + "migrate", + help="Migrate legacy node system to new node system", +) +def migrate(): + asyncio.run(unified_manager.migrate_unmanaged_nodes()) + if __name__ == '__main__': sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) sys.exit(app()) + print(f"") diff --git a/cm-cli.sh b/cm-cli.sh new file mode 100755 index 00000000..b1a21ca5 --- /dev/null +++ b/cm-cli.sh @@ -0,0 +1,2 @@ +#!/bin/bash +python cm-cli.py $* diff --git a/git_helper.py b/git_helper.py index 38dd24f7..7bac0e42 100644 --- a/git_helper.py +++ b/git_helper.py @@ -5,13 +5,20 @@ import traceback import git import configparser -import re import json import yaml from torchvision.datasets.utils import download_url from tqdm.auto import tqdm from git.remote import RemoteProgress + +comfy_path = os.environ.get('COMFYUI_PATH') + +if comfy_path is None: + print(f"\n[bold yellow]WARN: The `COMFYUI_PATH` environment variable is not set. Assuming `custom_nodes/ComfyUI-Manager/../../` as the ComfyUI path.[/bold yellow]", file=sys.stderr) + comfy_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')) + + config_path = os.path.join(os.path.dirname(__file__), "config.ini") nodelist_path = os.path.join(os.path.dirname(__file__), "custom-node-list.json") working_directory = os.getcwd() @@ -35,9 +42,11 @@ class GitProgress(RemoteProgress): self.pbar.refresh() -def gitclone(custom_nodes_path, url, target_hash=None): +def gitclone(custom_nodes_path, url, target_hash=None, repo_path=None): repo_name = os.path.splitext(os.path.basename(url))[0] - repo_path = os.path.join(custom_nodes_path, repo_name) + + if repo_path is None: + repo_path = os.path.join(custom_nodes_path, repo_name) # Clone the repository from the remote URL repo = git.Repo.clone_from(url, repo_path, recursive=True, progress=GitProgress()) @@ -70,7 +79,12 @@ def gitcheck(path, do_fetch=False): # Get the current commit hash and the commit hash of the remote branch commit_hash = repo.head.commit.hexsha - remote_commit_hash = repo.refs[f'{remote_name}/{branch_name}'].object.hexsha + + if f'{remote_name}/{branch_name}' in repo.refs: + remote_commit_hash = repo.refs[f'{remote_name}/{branch_name}'].object.hexsha + else: + print("CUSTOM NODE CHECK: True") # non default branch is treated as updatable + return # Compare the commit hashes to determine if the local repository is behind the remote repository if commit_hash != remote_commit_hash: @@ -89,11 +103,8 @@ def gitcheck(path, do_fetch=False): def switch_to_default_branch(repo): - show_result = repo.git.remote("show", "origin") - matches = re.search(r"\s*HEAD branch:\s*(.*)", show_result) - if matches: - default_branch = matches.group(1) - repo.git.checkout(default_branch) + default_branch = repo.git.symbolic_ref('refs/remotes/origin/HEAD').replace('refs/remotes/origin/', '') + repo.git.checkout(default_branch) def gitpull(path): @@ -117,6 +128,11 @@ def gitpull(path): remote_name = current_branch.tracking_branch().remote_name remote = repo.remote(name=remote_name) + if f'{remote_name}/{branch_name}' not in repo.refs: + switch_to_default_branch(repo) + current_branch = repo.active_branch + branch_name = current_branch.name + remote.fetch() remote_commit_hash = repo.refs[f'{remote_name}/{branch_name}'].object.hexsha @@ -142,9 +158,7 @@ def gitpull(path): def checkout_comfyui_hash(target_hash): - repo_path = os.path.abspath(os.path.join(working_directory, '..')) # ComfyUI dir - - repo = git.Repo(repo_path) + repo = git.Repo(comfy_path) commit_hash = repo.head.commit.hexsha if commit_hash != target_hash: @@ -167,7 +181,7 @@ def checkout_custom_node_hash(git_custom_node_infos): repo_name_to_url[repo_name] = url for path in os.listdir(working_directory): - if path.endswith("ComfyUI-Manager"): + if '@' in path or path.endswith("ComfyUI-Manager"): continue fullpath = os.path.join(working_directory, path) @@ -226,6 +240,9 @@ def checkout_custom_node_hash(git_custom_node_infos): # clone missing for k, v in git_custom_node_infos.items(): + if 'ComfyUI-Manager' in k: + continue + if not v['disabled']: repo_name = k.split('/')[-1] if repo_name.endswith('.git'): @@ -234,7 +251,7 @@ def checkout_custom_node_hash(git_custom_node_infos): path = os.path.join(working_directory, repo_name) if not os.path.exists(path): print(f"CLONE: {path}") - gitclone(working_directory, k, v['hash']) + gitclone(working_directory, k, target_hash=v['hash']) def invalidate_custom_node_file(file_custom_node_infos): @@ -286,6 +303,7 @@ def invalidate_custom_node_file(file_custom_node_infos): def apply_snapshot(target): try: + # todo: should be if target is not in snapshots dir path = os.path.join(os.path.dirname(__file__), 'snapshots', f"{target}") if os.path.exists(path): if not target.endswith('.json') and not target.endswith('.yaml'): @@ -401,7 +419,11 @@ setup_environment() try: if sys.argv[1] == "--clone": - gitclone(sys.argv[2], sys.argv[3]) + repo_path = None + if len(sys.argv) > 4: + repo_path = sys.argv[4] + + gitclone(sys.argv[2], sys.argv[3], repo_path=repo_path) elif sys.argv[1] == "--check": gitcheck(sys.argv[2], False) elif sys.argv[1] == "--fetch": diff --git a/glob/cnr_utils.py b/glob/cnr_utils.py new file mode 100644 index 00000000..5b2e9021 --- /dev/null +++ b/glob/cnr_utils.py @@ -0,0 +1,112 @@ +from manager_util import * +import zipfile +import requests +from dataclasses import dataclass +from typing import List + +base_url = "https://api.comfy.org" + + +async def get_cnr_data(page=1, limit=1000, cache_mode=True): + try: + uri = f'{base_url}/nodes?page={page}&limit={limit}' + json_obj = await get_data_with_cache(uri, cache_mode=cache_mode) + + for v in json_obj['nodes']: + if 'latest_version' not in v: + v['latest_version'] = dict(version='nightly') + + return json_obj['nodes'] + except: + res = {} + print(f"Cannot connect to comfyregistry.") + + return res + + +@dataclass +class NodeVersion: + changelog: str + dependencies: List[str] + deprecated: bool + id: str + version: str + download_url: str + + +def map_node_version(api_node_version): + """ + Maps node version data from API response to NodeVersion dataclass. + + Args: + api_data (dict): The 'node_version' part of the API response. + + Returns: + NodeVersion: An instance of NodeVersion dataclass populated with data from the API. + """ + return NodeVersion( + changelog=api_node_version.get( + "changelog", "" + ), # Provide a default value if 'changelog' is missing + dependencies=api_node_version.get( + "dependencies", [] + ), # Provide a default empty list if 'dependencies' is missing + deprecated=api_node_version.get( + "deprecated", False + ), # Assume False if 'deprecated' is not specified + id=api_node_version[ + "id" + ], # 'id' should be mandatory; raise KeyError if missing + version=api_node_version[ + "version" + ], # 'version' should be mandatory; raise KeyError if missing + download_url=api_node_version.get( + "downloadUrl", "" + ), # Provide a default value if 'downloadUrl' is missing + ) + + +def install_node(node_id, version=None): + """ + Retrieves the node version for installation. + + Args: + node_id (str): The unique identifier of the node. + version (str, optional): Specific version of the node to retrieve. If omitted, the latest version is returned. + + Returns: + NodeVersion: Node version data or error message. + """ + if version is None: + url = f"{base_url}/nodes/{node_id}/install" + else: + url = f"{base_url}/nodes/{node_id}/install?version={version}" + + response = requests.get(url) + if response.status_code == 200: + # Convert the API response to a NodeVersion object + return map_node_version(response.json()) + else: + return None + + +def all_versions_of_node(node_id): + url = f"https://api.comfy.org/nodes/{node_id}/versions" + + response = requests.get(url) + if response.status_code == 200: + return response.json() + else: + return None + + +def extract_package_as_zip(file_path, extract_path): + try: + with zipfile.ZipFile(file_path, "r") as zip_ref: + zip_ref.extractall(extract_path) + extracted_files = zip_ref.namelist() + print(f"Extracted zip file to {extract_path}") + return extracted_files + except zipfile.BadZipFile: + print(f"File '{file_path}' is not a zip or is corrupted.") + return None diff --git a/glob/manager_core.py b/glob/manager_core.py index cd0ca830..c5be6a42 100644 --- a/glob/manager_core.py +++ b/glob/manager_core.py @@ -5,31 +5,58 @@ import re import shutil import configparser import platform -from datetime import datetime + import git from git.remote import RemoteProgress from urllib.parse import urlparse from tqdm.auto import tqdm -import aiohttp -import threading -import json import time import yaml import zipfile +from concurrent.futures import ThreadPoolExecutor, as_completed + +orig_print = print + +from rich import print +from packaging import version + +import uuid +import requests glob_path = os.path.join(os.path.dirname(__file__)) # ComfyUI-Manager/glob sys.path.append(glob_path) import cm_global +import cnr_utils from manager_util import * -version = [2, 48, 1] -version_str = f"V{version[0]}.{version[1]}" + (f'.{version[2]}' if len(version) > 2 else '') + +version_code = [2, 48, 1] +version_str = f"V{version_code[0]}.{version_code[1]}" + (f'.{version_code[2]}' if len(version_code) > 2 else '') + + +def download_url(url, dest_folder, filename): + # Ensure the destination folder exists + if not os.path.exists(dest_folder): + os.makedirs(dest_folder) + + # Full path to save the file + dest_path = os.path.join(dest_folder, filename) + + # Download the file + response = requests.get(url, stream=True) + if response.status_code == 200: + with open(dest_path, 'wb') as file: + for chunk in response.iter_content(chunk_size=1024): + if chunk: + file.write(chunk) + else: + raise Exception(f"Failed to download file from {url}") -comfyui_manager_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) custom_nodes_path = os.path.abspath(os.path.join(comfyui_manager_path, '..')) + comfy_path = os.environ.get('COMFYUI_PATH') if comfy_path is None: comfy_path = os.path.abspath(os.path.join(custom_nodes_path, '..')) @@ -38,7 +65,6 @@ channel_list_path = os.path.join(comfyui_manager_path, 'channels.list') config_path = os.path.join(comfyui_manager_path, "config.ini") startup_script_path = os.path.join(comfyui_manager_path, "startup-scripts") git_script_path = os.path.join(comfyui_manager_path, "git_helper.py") -cache_dir = os.path.join(comfyui_manager_path, '.cache') cached_config = None js_path = None @@ -48,10 +74,6 @@ comfy_ui_required_commit_datetime = datetime(2024, 1, 24, 0, 0, 0) comfy_ui_revision = "Unknown" comfy_ui_commit_datetime = datetime(1900, 1, 1, 0, 0, 0) - -cache_lock = threading.Lock() - - channel_dict = None channel_list = None pip_map = None @@ -144,6 +166,1114 @@ def is_installed(name): return name.lower() in get_installed_packages() +def normalize_channel(channel): + if channel is None: + return None + elif channel.startswith('https://'): + return channel + + tmp_dict = get_channel_dict() + channel_url = tmp_dict.get(channel) + if channel_url: + return channel_url + + raise Exception(f"Invalid channel name '{channel}'") + + +class ManagedResult: + def __init__(self, action): + self.action = action + self.items = [] + self.result = True + self.to_path = None + self.msg = None + self.target = None + self.postinstall = lambda: True + + def append(self, item): + self.items.append(item) + + def fail(self, msg): + self.result = False + self.msg = msg + return self + + def with_target(self, target): + self.target = target + return self + + def with_msg(self, msg): + self.msg = msg + return self + + def with_postinstall(self, postinstall): + self.postinstall = postinstall + return self + + +class UnifiedManager: + def __init__(self): + self.cnr_inactive_nodes = {} # node_id -> node_version -> fullpath + self.nightly_inactive_nodes = {} # node_id -> fullpath + self.unknown_inactive_nodes = {} # node_id -> repo url * fullpath + self.active_nodes = {} # node_id -> node_version * fullpath + self.unknown_active_nodes = {} # node_id -> repo url * fullpath + self.cnr_map = {} # node_id -> cnr info + self.repo_cnr_map = {} # repo_url -> cnr info + self.custom_node_map_cache = {} # (channel, mode) -> augmented custom node list json + self.processed_install = set() + + def resolve_unspecified_version(self, node_name, guess_mode=None): + if guess_mode == 'active': + # priority: + # 1. CNR/nightly active nodes + # 2. unknown + # 3. Fail + + if node_name in self.cnr_map: + version_spec = self.get_from_cnr_active_nodes(node_name) + + if version_spec is None: + if node_name in self.unknown_active_nodes: + version_spec = "unknown" + else: + return None + + elif node_name in self.unknown_active_nodes: + version_spec = "unknown" + else: + return None + + elif guess_mode == 'inactive': + # priority: + # 1. CNR latest in inactive + # 2. nightly + # 3. unknown + # 4. Fail + + if node_name in self.cnr_map: + latest = self.get_from_cnr_inactive_nodes(node_name) + + if latest is not None: + version_spec = latest[0] + else: + if node_name in self.nightly_inactive_nodes: + version_spec = "nightly" + else: + version_spec = "unknown" + + elif node_name in self.unknown_inactive_nodes: + version_spec = "unknown" + else: + return None + + else: + # priority: + # 1. CNR latest in world + # 2. unknown + + if node_name in self.cnr_map: + version_spec = self.cnr_map[node_name]['latest_version']['version'] + else: + version_spec = "unknown" + + return version_spec + + def resolve_node_spec(self, node_name, guess_mode=None): + """ + resolve to 'node_name, version_spec' from version string + + version string: + node_name@latest + node_name@nightly + node_name@unknown + node_name@ + node_name + + if guess_mode is 'active' or 'inactive' + return can be 'None' based on state check + otherwise + return 'unknown' version when failed to guess + """ + + spec = node_name.split('@') + + if len(spec) == 2: + node_name = spec[0] + version_spec = spec[1] + + if version_spec == 'latest': + if node_name not in self.cnr_map: + print(f"ERROR: '{node_name}' is not a CNR node.") + return None + else: + version_spec = self.cnr_map[node_name]['latest_version']['version'] + + elif guess_mode in ['active', 'inactive']: + node_name = spec[0] + version_spec = self.resolve_unspecified_version(node_name, guess_mode=guess_mode) + if version_spec is None: + return None + else: + node_name = spec[0] + version_spec = self.resolve_unspecified_version(node_name) + if version_spec is None: + return None + + return node_name, version_spec, len(spec) > 1 + + def resolve_ver(self, fullpath): + """ + resolve version of unclassified custom node based on remote url in .git/config + """ + git_config_path = os.path.join(fullpath, '.git', 'config') + + if not os.path.exists(git_config_path): + return "unknown" + + config = configparser.ConfigParser() + config.read(git_config_path) + + for k, v in config.items(): + if k.startswith('remote ') and 'url' in v: + cnr = self.repo_cnr_map.get(v['url']) + if cnr: + return "nightly" + else: + return "unknown" + + def resolve_id_from_repo(self, fullpath): + git_config_path = os.path.join(fullpath, '.git', 'config') + + if not os.path.exists(git_config_path): + return None + + config = configparser.ConfigParser() + config.read(git_config_path) + + for k, v in config.items(): + if k.startswith('remote ') and 'url' in v: + cnr = self.repo_cnr_map.get(v['url']) + if cnr: + return "nightly", cnr['id'], v['url'] + else: + return "unknown", v['url'].split('/')[-1], v['url'] + + def resolve_unknown(self, node_id, fullpath): + res = self.resolve_id_from_repo(fullpath) + + if res is None: + self.unknown_inactive_nodes[node_id] = '', fullpath + return + + ver_spec, node_id, url = res + + if ver_spec == 'nightly': + self.nightly_inactive_nodes[node_id] = fullpath + else: + self.unknown_inactive_nodes[node_id] = url, fullpath + + def update_cache_at_path(self, fullpath, is_disabled): + name = os.path.basename(fullpath) + + if name.endswith(".disabled"): + node_spec = name[:-9] + is_disabled = True + else: + node_spec = name + + if '@' in node_spec: + node_spec = node_spec.split('@') + node_id = node_spec[0] + if node_id is None: + node_version = 'unknown' + else: + node_version = node_spec[1].replace("_", ".") + + if node_version != 'unknown': + if node_id not in self.cnr_map: + # fallback + v = node_version + + self.cnr_map[node_id] = { + 'id': node_id, + 'name': node_id, + 'latest_version': {'version': v}, + 'publisher': {'id': 'N/A', 'name': 'N/A'} + } + + elif node_version == 'unknown': + res = self.resolve_id_from_repo(fullpath) + if res is None: + print(f"Custom node unresolved: {fullpath}") + return + + node_version, node_id, _ = res + else: + res = self.resolve_id_from_repo(fullpath) + if res is None: + print(f"Custom node unresolved: {fullpath}") + return + + node_version, node_id, _ = res + + if not is_disabled: + # active nodes + if node_version == 'unknown': + self.unknown_active_nodes[node_id] = node_version, fullpath + else: + self.active_nodes[node_id] = node_version, fullpath + else: + if node_version == 'unknown': + self.resolve_unknown(node_id, fullpath) + elif node_version == 'nightly': + self.nightly_inactive_nodes[node_id] = fullpath + else: + self.add_to_cnr_inactive_nodes(node_id, node_version, fullpath) + + def is_updatable(self, node_id): + cur_ver = self.get_cnr_active_version(node_id) + latest_ver = self.cnr_map[node_id]['latest_version']['version'] + + if cur_ver and latest_ver: + return self.safe_version(latest_ver) > self.safe_version(cur_ver) + + return False + + def fetch_or_pull_git_repo(self, is_pull=False): + updated = set() + failed = set() + + def check_update(node_name, fullpath, ver_spec): + try: + if is_pull: + is_updated, success = git_repo_update_check_with(fullpath, do_update=True) + else: + is_updated, success = git_repo_update_check_with(fullpath, do_fetch=True) + + return f"{node_name}@{ver_spec}", is_updated, success + except Exception: + traceback.print_exc() + + return f"{node_name}@{ver_spec}", False, False + + with ThreadPoolExecutor() as executor: + futures = [] + + for k, v in self.unknown_active_nodes.items(): + futures.append(executor.submit(check_update, k, v[1], 'unknown')) + + for k, v in self.active_nodes.values(): + if v[0] == 'nightly': + futures.append(executor.submit(check_update, k, v[1], 'nightly')) + + for future in as_completed(futures): + item, is_updated, success = future.result() + + if is_updated: + updated.add(item) + + if not success: + failed.add(item) + + return dict(updated=list(updated), failed=list(failed)) + + def is_enabled(self, node_id, version_spec=None): + """ + 1. true if node_id@ is enabled + 2. true if node_id@ is enabled and version_spec==None + 3. false otherwise + + remark: latest version_spec is not allowed. Must be resolved before call. + """ + if version_spec == "cnr": + return self.get_cnr_active_version(node_id) not in [None, 'nightly'] + elif version_spec == 'unknown' and self.is_unknown_active(node_id): + return True + elif version_spec is not None and self.get_cnr_active_version(node_id) == version_spec: + return True + elif version_spec is None and (node_id in self.active_nodes or node_id in self.unknown_active_nodes): + return True + return False + + def is_disabled(self, node_id, version_spec=None): + """ + 1. node_id@unknown is disabled if version_spec is @unknown + 2. node_id@nightly is disabled if version_spec is @nightly + 4. node_id@ is disabled if version_spec is not None + 5. not exists (active node_id) if version_spec is None + + remark: latest version_spec is not allowed. Must be resolved before call. + """ + if version_spec == "unknown": + return node_id in self.unknown_inactive_nodes + elif version_spec == "nightly": + return node_id in self.nightly_inactive_nodes + elif version_spec == "cnr": + res = self.cnr_inactive_nodes.get(node_id, None) + if res is None: + return False + + res = [x for x in res.keys() if x != 'nightly'] + return len(res) > 0 + elif version_spec is not None: + return version_spec in self.cnr_inactive_nodes.get(node_id, []) + + if node_id in self.nightly_inactive_nodes: + return True + elif node_id in self.unknown_inactive_nodes: + return True + + target = self.cnr_inactive_nodes.get(node_id, None) + if target is not None and target == version_spec: + return True + + return False + + def is_registered_in_cnr(self, node_id): + return node_id in self.cnr_map + + def get_cnr_active_version(self, node_id): + res = self.active_nodes.get(node_id) + if res: + return res[0] + else: + return None + + def is_unknown_active(self, node_id): + return node_id in self.unknown_active_nodes + + def add_to_cnr_inactive_nodes(self, node_id, ver, fullpath): + ver_map = self.cnr_inactive_nodes.get(node_id) + if ver_map is None: + ver_map = {} + self.cnr_inactive_nodes[node_id] = ver_map + + ver_map[ver] = fullpath + + def get_from_cnr_active_nodes(self, node_id): + ver_path = self.active_nodes.get(node_id) + if ver_path is None: + return None + + return ver_path[0] + + def get_from_cnr_inactive_nodes(self, node_id, ver=None): + ver_map = self.cnr_inactive_nodes.get(node_id) + if ver_map is None: + return None + + if ver is not None: + return ver_map.get(ver) + + latest = None + for k, v in ver_map.items(): + if latest is None: + latest = self.safe_version(k), v + continue + + cur_ver = self.safe_version(k) + if cur_ver > latest[0]: + latest = cur_ver, v + + return latest + + async def reload(self, cache_mode): + self.custom_node_map_cache = {} + self.cnr_inactive_nodes = {} # node_id -> node_version -> fullpath + self.nightly_inactive_nodes = {} # node_id -> fullpath + self.unknown_inactive_nodes = {} # node_id -> repo url * fullpath + self.unknown_active_nodes = {} # node_id -> repo url * fullpath + self.active_nodes = {} # node_id -> node_version * fullpath + + # reload 'cnr_map' and 'repo_cnr_map' + cnrs = await cnr_utils.get_cnr_data(cache_mode=cache_mode) + + for x in cnrs: + self.cnr_map[x['id']] = x + + if 'repository' in x: + self.repo_cnr_map[x['repository']] = x + + # reload node status info from custom_nodes/* + for x in os.listdir(custom_nodes_path): + fullpath = os.path.join(custom_nodes_path, x) + if os.path.isdir(fullpath): + if x not in ['__pycache__', '.disabled']: + self.update_cache_at_path(fullpath, is_disabled=False) + + # reload node status info from custom_nodes/.disabled/* + disabled_dir = os.path.join(custom_nodes_path, '.disabled') + if os.path.exists(disabled_dir): + for x in os.listdir(disabled_dir): + fullpath = os.path.join(disabled_dir, x) + if os.path.isdir(fullpath): + self.update_cache_at_path(fullpath, is_disabled=True) + + @staticmethod + async def load_nightly(channel, mode): + res = {} + + channel_url = normalize_channel(channel) + if channel: + if mode not in ['remote', 'local', 'cache']: + print(f"[bold red]ERROR: Invalid mode is specified `--mode {mode}`[/bold red]", file=sys.stderr) + return {} + + json_obj = await get_data_by_mode(mode, 'custom-node-list.json', channel_url=channel_url) + for x in json_obj['custom_nodes']: + for y in x['files']: + if 'github.com' in y and not (y.endswith('.py') or y.endswith('.js')): + repo_name = y.split('/')[-1] + res[repo_name] = (x, False) + + if 'id' in x: + if x['id'] not in res: + res[x['id']] = (x, True) + + return res + + async def get_custom_nodes(self, channel, mode): + channel = normalize_channel(channel) + + cache = self.custom_node_map_cache.get((channel, mode)) + + if cache is not None: + return cache + + nodes = await self.load_nightly(channel, mode) + + res = {} + added_cnr = set() + for v in nodes.values(): + v = v[0] + if len(v['files']) == 1: + cnr = self.repo_cnr_map.get(v['files'][0]) + if cnr: + if 'latest_version' not in cnr: + v['cnr_latest'] = '0.0.0' + else: + v['cnr_latest'] = cnr['latest_version']['version'] + v['id'] = cnr['id'] + v['author'] = cnr['publisher']['name'] + v['title'] = cnr['name'] + v['description'] = cnr['description'] + v['health'] = '-' + added_cnr.add(cnr['id']) + node_id = v['id'] + else: + node_id = v['files'][0].split('/')[-1] + res[node_id] = v + elif len(v['files']) > 1: + res[v['files'][0]] = v # A custom node composed of multiple url is treated as a single repository with one representative path + + self.custom_node_map_cache[(channel, mode)] = res + return res + + @staticmethod + def safe_version(ver_str): + try: + return version.parse(ver_str) + except: + return version.parse("0.0.0") + + def execute_install_script(self, url, repo_path, lazy_mode=False, instant_execution=False): + install_script_path = os.path.join(repo_path, "install.py") + requirements_path = os.path.join(repo_path, "requirements.txt") + + if lazy_mode: + install_cmd = ["#LAZY-INSTALL-SCRIPT", sys.executable] + return try_install_script(url, repo_path, install_cmd) + else: + if os.path.exists(requirements_path): + print("Install: pip packages") + with open(requirements_path, "r") as requirements_file: + for line in requirements_file: + package_name = remap_pip_package(line.strip()) + if package_name and not package_name.startswith('#') and package_name not in self.processed_install: + self.processed_install.add(package_name) + install_cmd = [sys.executable, "-m", "pip", "install", package_name] + if package_name.strip() != "" and not package_name.startswith('#'): + return try_install_script(url, repo_path, install_cmd, instant_execution=instant_execution) + + if os.path.exists(install_script_path) and install_script_path not in self.processed_install: + self.processed_install.add(install_script_path) + print(f"Install: install script") + install_cmd = [sys.executable, "install.py"] + return try_install_script(url, repo_path, install_cmd, instant_execution=instant_execution) + + return True + + def unified_fix(self, node_id, version_spec, instant_execution=False): + """ + fix dependencies + """ + + result = ManagedResult('fix') + + info = self.active_nodes.get(node_id) + if info is None or not os.path.exists(info[1]): + return result.fail(f'not found: {node_id}@{version_spec}') + + self.execute_install_script(node_id, info[1], instant_execution=instant_execution) + + return result + + def cnr_switch_version(self, node_id, version_spec=None, instant_execution=False, return_postinstall=False): + """ + switch between cnr version + """ + + # 1. download + result = ManagedResult('switch-cnr') + + node_info = cnr_utils.install_node(node_id, version_spec) + if node_info is None or not node_info.download_url: + return result.fail(f'not available node: {node_id}@{version_spec}') + + version_spec = node_info.version + + if self.active_nodes[node_id][0] == version_spec: + return ManagedResult('skip').with_msg("Up to date") + + archive_name = f"CNR_temp_{str(uuid.uuid4())}.zip" # should be unpredictable name - security precaution + download_path = os.path.join(custom_nodes_path, archive_name) + download_url(node_info.download_url, custom_nodes_path, archive_name) + + # 2. extract files into @ + install_path = self.active_nodes[node_id][1] + extracted = cnr_utils.extract_package_as_zip(download_path, install_path) + os.remove(download_path) + + if extracted is None: + shutil.rmtree(install_path) + return result.fail(f'Empty archive file: {node_id}@{version_spec}') + + # 3. calculate garbage files (.tracking - extracted) + tracking_info_file = os.path.join(install_path, '.tracking') + prev_files = set() + with open(tracking_info_file, 'r') as f: + for line in f: + prev_files.add(line.strip()) + garbage = prev_files.difference(extracted) + garbage = [os.path.join(custom_nodes_path, x) for x in garbage] + + # 4-1. remove garbage files + for x in garbage: + if os.path.isfile(x): + os.remove(x) + + # 4-2. remove garbage dir if empty + for x in garbage: + if os.path.isdir(x): + if not os.listdir(x): + os.rmdir(x) + + # 5. rename dir name @ ==> @ + new_install_path = os.path.join(custom_nodes_path, f"{node_id}@{version_spec.replace('.', '_')}") + print(f"'{install_path}' is moved to '{new_install_path}'") + shutil.move(install_path, new_install_path) + + # 6. create .tracking file + tracking_info_file = os.path.join(new_install_path, '.tracking') + with open(tracking_info_file, "w", encoding='utf-8') as file: + file.write('\n'.join(list(extracted))) + + # 7. post install + result.target = version_spec + + def postinstall(): + res = self.execute_install_script(f"{node_id}@{version_spec}", new_install_path, instant_execution=instant_execution) + return res + + if return_postinstall: + return result.with_postinstall(postinstall) + else: + if not postinstall(): + return result.fail(f"Failed to execute install script: {node_id}@{version_spec}") + + return result + + def unified_enable(self, node_id, version_spec=None): + """ + priority if version_spec == None + 1. CNR latest in disk + 2. nightly + 3. unknown + + remark: latest version_spec is not allowed. Must be resolved before call. + """ + + result = ManagedResult('enable') + + if version_spec is None: + version_spec = self.resolve_unspecified_version(node_id, guess_mode='inactive') + if version is None: + return result.fail(f'Specified inactive node not exists: {node_id}') + + if self.is_enabled(node_id, version_spec): + return ManagedResult('skip').with_msg('Already enabled') + + if not self.is_disabled(node_id, version_spec): + return ManagedResult('skip').with_msg('Not installed') + + from_path = None + to_path = None + + if version_spec == 'unknown': + repo_and_path = self.unknown_inactive_nodes.get(node_id) + if repo_and_path is None: + return result.fail(f'Specified inactive node not exists: {node_id}@unknown') + from_path = repo_and_path[1] + to_path = os.path.join(custom_nodes_path, f"{node_id}@unknown") + elif version_spec == 'nightly': + self.unified_disable(node_id, False) + from_path = self.nightly_inactive_nodes.get(node_id) + if from_path is None: + return result.fail(f'Specified inactive node not exists: {node_id}@nightly') + to_path = os.path.join(custom_nodes_path, f"{node_id}@nightly") + elif version_spec is not None: + self.unified_disable(node_id, False) + cnr_info = self.cnr_inactive_nodes.get(node_id) + + if cnr_info is None or len(cnr_info) == 0: + return result.fail(f'Specified inactive cnr node not exists: {node_id}') + + if version_spec == "cnr": + version_spec = next(iter(cnr_info)) + + if version_spec not in cnr_info: + return result.fail(f'Specified inactive node not exists: {node_id}@{version_spec}') + + from_path = cnr_info[version_spec] + to_path = os.path.join(custom_nodes_path, f"{node_id}@{version_spec.replace('.', '_')}") + + if from_path is None or not os.path.exists(from_path): + return result.fail(f'Specified inactive node path not exists: {from_path}') + + # move from disk + shutil.move(from_path, to_path) + + # update cache + if version_spec == 'unknown': + del self.unknown_inactive_nodes[node_id] + self.unknown_active_nodes[node_id] = to_path + return result.with_target(to_path) + elif version_spec == 'nightly': + del self.nightly_inactive_nodes[node_id] + else: + del self.cnr_inactive_nodes[node_id][version_spec] + + self.active_nodes[node_id] = version_spec, to_path + return result.with_target(to_path) + + def unified_disable(self, node_id, is_unknown): + result = ManagedResult('disable') + + if is_unknown: + version_spec = 'unknown' + else: + version_spec = None + + if not self.is_enabled(node_id, version_spec): + if not self.is_disabled(node_id, version_spec): + return ManagedResult('skip').with_msg('Not installed') + else: + return ManagedResult('skip').with_msg('Already disabled') + + if is_unknown: + repo_and_path = self.unknown_active_nodes.get(node_id) + to_path = os.path.join(custom_nodes_path, '.disabled', f"{node_id}@unknown") + + if repo_and_path is None or not os.path.exists(repo_and_path[1]): + return result.fail(f'Specified active node not exists: {node_id}@unknown') + + shutil.move(repo_and_path[1], to_path) + result.append((repo_and_path[1], to_path)) + + self.unknown_inactive_nodes[node_id] = repo_and_path[0], to_path + del self.unknown_active_nodes[node_id] + + return result + + ver_and_path = self.active_nodes.get(node_id) + + if ver_and_path is None or not os.path.exists(ver_and_path[1]): + return result.fail(f'Specified active node not exists: {node_id}') + + to_path = os.path.join(custom_nodes_path, '.disabled', f"{node_id}@{ver_and_path[0].replace('.', '_')}") + shutil.move(ver_and_path[1], to_path) + result.append((ver_and_path[1], to_path)) + + if ver_and_path[0] == 'nightly': + self.nightly_inactive_nodes[node_id] = to_path + else: + self.add_to_cnr_inactive_nodes(node_id, ver_and_path[0], to_path) + + del self.active_nodes[node_id] + + return result + + def unified_uninstall(self, node_id, is_unknown): + """ + Remove whole installed custom nodes including inactive nodes + """ + result = ManagedResult('uninstall') + + if is_unknown: + # remove from actives + repo_and_path = self.unknown_active_nodes.get(node_id) + + is_removed = False + + if repo_and_path is not None and os.path.exists(repo_and_path[1]): + rmtree(repo_and_path[1]) + result.append(repo_and_path[1]) + del self.unknown_active_nodes[node_id] + + is_removed = True + + # remove from inactives + repo_and_path = self.unknown_inactive_nodes.get(node_id) + + if repo_and_path is not None and os.path.exists(repo_and_path[1]): + rmtree(repo_and_path[1]) + result.append(repo_and_path[1]) + del self.unknown_inactive_nodes[node_id] + + is_removed = True + + if is_removed: + return result + else: + return ManagedResult('skip') + + # remove from actives + ver_and_path = self.active_nodes.get(node_id) + + if ver_and_path is not None and os.path.exists(ver_and_path[1]): + shutil.rmtree(ver_and_path[1]) + result.items.append(ver_and_path) + del self.active_nodes[node_id] + + # remove from nightly inactives + fullpath = self.nightly_inactive_nodes.get(node_id) + if fullpath is not None and os.path.exists(fullpath): + shutil.rmtree(fullpath) + result.items.append(('nightly', fullpath)) + del self.nightly_inactive_nodes[node_id] + + # remove from cnr inactives + ver_map = self.cnr_inactive_nodes.get(node_id) + if ver_map is not None: + for key, fullpath in ver_map.items(): + shutil.rmtree(fullpath) + result.items.append((key, fullpath)) + del self.cnr_inactive_nodes[node_id] + + if len(result.items) == 0: + return ManagedResult('skip').with_msg('Not installed') + + return result + + def cnr_install(self, node_id, version_spec=None, instant_execution=False, return_postinstall=False): + result = ManagedResult('install-cnr') + + node_info = cnr_utils.install_node(node_id, version_spec) + if node_info is None or not node_info.download_url: + return result.fail(f'not available node: {node_id}@{version_spec}') + + archive_name = f"CNR_temp_{str(uuid.uuid4())}.zip" # should be unpredictable name - security precaution + download_path = os.path.join(custom_nodes_path, archive_name) + + # re-download. I cannot trust existing file. + if os.path.exists(download_path): + os.remove(download_path) + + # install_path + install_path = os.path.join(custom_nodes_path, f"{node_id}@{version_spec.replace('.', '_')}") + if os.path.exists(install_path): + return result.fail(f'Install path already exists: {install_path}') + + download_url(node_info.download_url, custom_nodes_path, archive_name) + os.makedirs(install_path, exist_ok=True) + extracted = cnr_utils.extract_package_as_zip(download_path, install_path) + os.remove(download_path) + result.to_path = install_path + + if extracted is None: + shutil.rmtree(install_path) + return result.fail(f'Empty archive file: {node_id}@{version_spec}') + + # create .tracking file + tracking_info_file = os.path.join(install_path, '.tracking') + with open(tracking_info_file, "w", encoding='utf-8') as file: + file.write('\n'.join(extracted)) + + result.target = version_spec + + def postinstall(): + return self.execute_install_script(node_id, install_path, instant_execution=instant_execution) + + if return_postinstall: + return result.with_postinstall(postinstall) + else: + if not postinstall(): + return result.fail(f"Failed to execute install script: {node_id}@{version_spec}") + + return result + + def repo_install(self, url, repo_path, instant_execution=False, return_postinstall=False): + result = ManagedResult('install-git') + result.append(url) + + if not is_valid_url(url): + return result.fail(f"Invalid git url: {url}") + + if url.endswith("/"): + url = url[:-1] + try: + print(f"Download: git clone '{url}'") + + # Clone the repository from the remote URL + if not instant_execution and platform.system() == 'Windows': + res = manager_funcs.run_script([sys.executable, git_script_path, "--clone", custom_nodes_path, url, repo_path], cwd=custom_nodes_path) + if res != 0: + return result.fail(f"Failed to clone repo: {url}") + else: + repo = git.Repo.clone_from(url, repo_path, recursive=True, progress=GitProgress()) + repo.git.clear_cache() + repo.close() + + def postinstall(): + return self.execute_install_script(url, repo_path, instant_execution=instant_execution) + + if return_postinstall: + return result.with_postinstall(postinstall) + else: + if not postinstall(): + return result.fail(f"Failed to execute install script: {url}") + + except Exception as e: + return result.fail(f"Install(git-clone) error: {url} / {e}") + + print("Installation was successful.") + return result + + def repo_update(self, repo_path, instant_execution=False, return_postinstall=False): + result = ManagedResult('update-git') + + if not os.path.exists(os.path.join(repo_path, '.git')): + return result.fail(f'Path not found: {repo_path}') + + # version check + repo = git.Repo(repo_path) + + if repo.head.is_detached: + switch_to_default_branch(repo) + + current_branch = repo.active_branch + branch_name = current_branch.name + + if current_branch.tracking_branch() is None: + print(f"[ComfyUI-Manager] There is no tracking branch ({current_branch})") + remote_name = 'origin' + else: + remote_name = current_branch.tracking_branch().remote_name + remote = repo.remote(name=remote_name) + + try: + remote.fetch() + except Exception as e: + if 'detected dubious' in str(e): + print(f"[ComfyUI-Manager] Try fixing 'dubious repository' error on 'ComfyUI' repository") + safedir_path = comfy_path.replace('\\', '/') + subprocess.run(['git', 'config', '--global', '--add', 'safe.directory', safedir_path]) + try: + remote.fetch() + except Exception: + print(f"\n[ComfyUI-Manager] Failed to fixing repository setup. Please execute this command on cmd: \n" + f"-----------------------------------------------------------------------------------------\n" + f'git config --global --add safe.directory "{safedir_path}"\n' + f"-----------------------------------------------------------------------------------------\n") + + commit_hash = repo.head.commit.hexsha + remote_commit_hash = repo.refs[f'{remote_name}/{branch_name}'].object.hexsha + + if commit_hash != remote_commit_hash: + git_pull(repo_path) + + if len(repo.remotes) > 0: + url = repo.remotes[0].url + else: + url = "unknown repo" + + def postinstall(): + return self.execute_install_script(url, repo_path, instant_execution=instant_execution) + + if return_postinstall: + return result.with_postinstall(postinstall) + else: + if not postinstall(): + return result.fail(f"Failed to execute install script: {url}") + + return result + else: + return ManagedResult('skip').with_msg('Up to date') + + def unified_update(self, node_id, version_spec=None, instant_execution=False, return_postinstall=False): + if version_spec is None: + version_spec = self.resolve_unspecified_version(node_id, guess_mode='active') + + if version_spec is None: + return ManagedResult('update').fail(f'Update not available: {node_id}@{version_spec}') + + if version_spec == 'nightly': + return self.repo_update(self.active_nodes[node_id][1], instant_execution=instant_execution, return_postinstall=return_postinstall).with_target('nightly') + elif version_spec == 'unknown': + return self.repo_update(self.unknown_active_nodes[node_id][1], instant_execution=instant_execution, return_postinstall=return_postinstall).with_target('unknown') + else: + return self.cnr_switch_version(node_id, instant_execution=instant_execution, return_postinstall=return_postinstall) + + async def install_by_id(self, node_id, version_spec=None, channel=None, mode=None, instant_execution=False, return_postinstall=False): + """ + priority if version_spec == None + 1. CNR latest + 2. unknown + + remark: latest version_spec is not allowed. Must be resolved before call. + """ + + repo_url = None + if version_spec is None: + if self.is_enabled(node_id): + return ManagedResult('skip') + elif self.is_disabled(node_id): + return self.unified_enable(node_id) + else: + version_spec = self.resolve_unspecified_version(node_id) + + if version_spec == 'unknown' or version_spec == 'nightly': + custom_nodes = await self.get_custom_nodes(channel, mode) + the_node = custom_nodes.get(node_id) + if the_node is not None: + repo_url = the_node['files'][0] + else: + result = ManagedResult('install') + return result.fail(f"Node '{node_id}@{version_spec}' not found in [{channel}, {mode}]") + + if self.is_enabled(node_id, version_spec): + return ManagedResult('skip').with_target(f"{node_id}@{version_spec}") + + elif self.is_disabled(node_id, version_spec): + return self.unified_enable(node_id, version_spec) + + elif version_spec == 'unknown' or version_spec == 'nightly': + if version_spec == 'nightly': + # disable cnr nodes + if self.is_enabled(node_id, 'cnr'): + self.unified_disable(node_id, False) + + to_path = os.path.abspath(os.path.join(custom_nodes_path, f"{node_id}@{version_spec.replace('.', '_')}")) + res = self.repo_install(repo_url, to_path, instant_execution=instant_execution, return_postinstall=return_postinstall) + if res.result: + if version_spec == 'unknown': + self.unknown_active_nodes[node_id] = to_path + elif version_spec == 'nightly': + self.active_nodes[node_id] = 'nightly', to_path + + return res.with_target(version_spec) + + if self.is_enabled(node_id, 'nightly'): + # disable nightly nodes + self.unified_disable(node_id, 'nightly') # NOTE: don't return from here + + if self.is_disabled(node_id, version_spec): + # enable and return if specified version is disabled + return self.unified_enable(node_id, version_spec) + + if self.is_disabled(node_id, "cnr"): + # enable and switch version if cnr is disabled (not specified version) + self.unified_enable(node_id, "cnr") + return self.cnr_switch_version(node_id, version_spec, return_postinstall=return_postinstall) + + if self.is_enabled(node_id, "cnr"): + return self.cnr_switch_version(node_id, version_spec, return_postinstall=return_postinstall) + + res = self.cnr_install(node_id, version_spec, instant_execution=instant_execution, return_postinstall=return_postinstall) + if res.result: + self.active_nodes[node_id] = version_spec, res.to_path + + return res + + async def migrate_unmanaged_nodes(self): + """ + fix path for nightly and unknown nodes of unmanaged nodes + """ + await self.reload('cache') + await self.get_custom_nodes('default', 'cache') + + print(f"Migration: STAGE 1") + # migrate nightly inactive + fixes = {} + for x, v in self.nightly_inactive_nodes.items(): + if v.endswith('@nightly'): + continue + + new_path = os.path.join(custom_nodes_path, '.disabled', f"{x}@nightly") + shutil.move(v, new_path) + fixes[x] = new_path + + self.nightly_inactive_nodes.update(fixes) + + print(f"Migration: STAGE 2") + # migrate unknown inactive + fixes = {} + for x, v in self.unknown_inactive_nodes.items(): + if v[1].endswith('@unknown'): + continue + + new_path = os.path.join(custom_nodes_path, '.disabled', f"{x}@unknown") + shutil.move(v[1], new_path) + fixes[x] = v[0], new_path + + self.unknown_inactive_nodes.update(fixes) + + print(f"Migration: STAGE 3") + # migrate unknown active nodes + fixes = {} + for x, v in self.unknown_active_nodes.items(): + if v[1].endswith('@unknown'): + continue + + new_path = os.path.join(custom_nodes_path, f"{x}@unknown") + shutil.move(v[1], new_path) + fixes[x] = v[0], new_path + + self.unknown_active_nodes.update(fixes) + + print(f"Migration: STAGE 4") + # migrate active nodes + fixes = {} + for x, v in self.active_nodes.items(): + if v[0] not in ['nightly']: + continue + + if v[1].endswith('@nightly'): + continue + + new_path = os.path.join(custom_nodes_path, f"{x}@nightly") + shutil.move(v[1], new_path) + fixes[x] = v[0], new_path + + self.active_nodes.update(fixes) + + print(f"DONE") + + +unified_manager = UnifiedManager() + + def get_channel_dict(): global channel_dict @@ -279,11 +1409,8 @@ def get_config(): def switch_to_default_branch(repo): - show_result = repo.git.remote("show", "origin") - matches = re.search(r"\s*HEAD branch:\s*(.*)", show_result) - if matches: - default_branch = matches.group(1) - repo.git.checkout(default_branch) + default_branch = repo.git.symbolic_ref('refs/remotes/origin/HEAD').replace('refs/remotes/origin/', '') + repo.git.checkout(default_branch) def try_install_script(url, repo_path, install_cmd, instant_execution=False): @@ -322,6 +1449,8 @@ def try_install_script(url, repo_path, install_cmd, instant_execution=False): print(f"install script failed: {url}") return False + return True + # use subprocess to avoid file system lock by git (Windows) def __win_check_git_update(path, do_fetch=False, do_update=False): @@ -418,15 +1547,25 @@ def execute_install_script(url, repo_path, lazy_mode=False, instant_execution=Fa return True -def git_repo_has_updates(path, do_fetch=False, do_update=False): +def git_repo_update_check_with(path, do_fetch=False, do_update=False): + """ + + perform update check for git custom node + and fetch or update if flag is on + + :param path: path to git custom node + :param do_fetch: do fetch during check + :param do_update: do update during check + :return: update state * success + """ if do_fetch: - print(f"\x1b[2K\rFetching: {path}", end='') + orig_print(f"\x1b[2K\rFetching: {path}", end='') elif do_update: - print(f"\x1b[2K\rUpdating: {path}", end='') + orig_print(f"\x1b[2K\rUpdating: {path}", end='') # Check if the path is a git repository if not os.path.exists(os.path.join(path, '.git')): - raise ValueError('Not a git repository') + raise ValueError(f'Not a git repository: {path}') if platform.system() == "Windows": updated, success = __win_check_git_update(path, do_fetch, do_update) @@ -437,12 +1576,21 @@ def git_repo_has_updates(path, do_fetch=False, do_update=False): # Fetch the latest commits from the remote repository repo = git.Repo(path) - current_branch = repo.active_branch - branch_name = current_branch.name - remote_name = 'origin' remote = repo.remote(name=remote_name) + if not do_update and repo.head.is_detached: + if do_fetch: + remote.fetch() + + return True, True # detached branch is treated as updatable + + if repo.head.is_detached: + switch_to_default_branch(repo) + + current_branch = repo.active_branch + branch_name = current_branch.name + # Get the current commit hash commit_hash = repo.head.commit.hexsha @@ -450,8 +1598,13 @@ def git_repo_has_updates(path, do_fetch=False, do_update=False): remote.fetch() if do_update: - if repo.head.is_detached: + if repo.is_dirty(): + repo.git.stash() + + if f'{remote_name}/{branch_name}' not in repo.refs: switch_to_default_branch(repo) + current_branch = repo.active_branch + branch_name = current_branch.name remote_commit_hash = repo.refs[f'{remote_name}/{branch_name}'].object.hexsha @@ -466,7 +1619,7 @@ def git_repo_has_updates(path, do_fetch=False, do_update=False): if commit_hash != new_commit_hash: execute_install_script(None, path) - print(f"\x1b[2K\rUpdated: {path}") + print(f"\nUpdated: {path}") return True, True else: return False, False @@ -483,7 +1636,10 @@ def git_repo_has_updates(path, do_fetch=False, do_update=False): current_branch = repo.active_branch branch_name = current_branch.name - remote_commit_hash = repo.refs[f'{remote_name}/{branch_name}'].object.hexsha + if f'{remote_name}/{branch_name}' in repo.refs: + remote_commit_hash = repo.refs[f'{remote_name}/{branch_name}'].object.hexsha + else: + return True, True # Assuming there's an update if it's not the default branch. # Compare the commit hashes to determine if the local repository is behind the remote repository if commit_hash != remote_commit_hash: @@ -527,39 +1683,60 @@ def is_valid_url(url): return False -def gitclone_install(files, instant_execution=False, msg_prefix=''): - print(f"{msg_prefix}Install: {files}") - for url in files: - if not is_valid_url(url): - print(f"Invalid git url: '{url}'") - return False +async def gitclone_install(url, instant_execution=False, msg_prefix=''): + await unified_manager.reload('cache') + await unified_manager.get_custom_nodes('default', 'cache') - if url.endswith("/"): - url = url[:-1] - try: - print(f"Download: git clone '{url}'") + print(f"{msg_prefix}Install: {url}") + + result = ManagedResult('install-git') + + if not is_valid_url(url): + return result.fail(f"Invalid git url: '{url}'") + + if url.endswith("/"): + url = url[:-1] + try: + cnr = unified_manager.repo_cnr_map.get(url) + if cnr: + cnr_id = cnr['id'] + return await unified_manager.install_by_id(cnr_id, version_spec='nightly') + else: repo_name = os.path.splitext(os.path.basename(url))[0] - repo_path = os.path.join(custom_nodes_path, repo_name) + node_dir = f"{repo_name}@unknown" + repo_path = os.path.join(custom_nodes_path, node_dir) + disabled_repo_path1 = os.path.join(custom_nodes_path, '.disabled', node_dir) + disabled_repo_path2 = os.path.join(custom_nodes_path, repo_name+'.disabled') # old style + + if os.path.exists(repo_path): + return result.fail(f"Already exists: '{repo_path}'") + + if os.path.exists(disabled_repo_path1): + return result.fail(f"Already exists (disabled): '{disabled_repo_path1}'") + + if os.path.exists(disabled_repo_path2): + return result.fail(f"Already exists (disabled): '{disabled_repo_path2}'") + + print(f"CLONE into '{repo_path}'") # Clone the repository from the remote URL if not instant_execution and platform.system() == 'Windows': - res = manager_funcs.run_script([sys.executable, git_script_path, "--clone", custom_nodes_path, url], cwd=custom_nodes_path) + res = manager_funcs.run_script([sys.executable, git_script_path, "--clone", custom_nodes_path, url, repo_path], cwd=custom_nodes_path) if res != 0: - return False + return result.fail(f"Failed to clone '{url}' into '{repo_path}'") else: repo = git.Repo.clone_from(url, repo_path, recursive=True, progress=GitProgress()) repo.git.clear_cache() repo.close() - if not execute_install_script(url, repo_path, instant_execution=instant_execution): - return False + execute_install_script(url, repo_path, instant_execution=instant_execution) + print("Installation was successful.") + return result.with_target(repo_path) - except Exception as e: - print(f"Install(git-clone) error: {url} / {e}", file=sys.stderr) - return False - - print("Installation was successful.") - return True + except Exception as e: + traceback.print_exc() + print(f"Install(git-clone) error: {url} / {e}", file=sys.stderr) + return result.fail(f"Install(git-clone) error: {url} / {e}") def git_pull(path): @@ -591,44 +1768,6 @@ def git_pull(path): return True -async def get_data(uri, silent=False): - if not silent: - print(f"FETCH DATA from: {uri}", end="") - - if uri.startswith("http"): - async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=False)) as session: - async with session.get(uri) as resp: - json_text = await resp.text() - else: - with cache_lock: - with open(uri, "r", encoding="utf-8") as f: - json_text = f.read() - - json_obj = json.loads(json_text) - if not silent: - print(f" [DONE]") - return json_obj - - -def simple_hash(input_string): - hash_value = 0 - for char in input_string: - hash_value = (hash_value * 31 + ord(char)) % (2**32) - - return hash_value - - -def is_file_created_within_one_day(file_path): - if not os.path.exists(file_path): - return False - - file_creation_time = os.path.getctime(file_path) - current_time = datetime.now().timestamp() - time_difference = current_time - file_creation_time - - return time_difference <= 86400 - - async def get_data_by_mode(mode, filename, channel_url=None): if channel_url in get_channel_dict(): channel_url = get_channel_dict()[channel_url] @@ -792,7 +1931,7 @@ def gitclone_set_active(files, is_disable): current_path = dir_path + ".disabled" new_path = dir_path - os.rename(current_path, new_path) + shutil.move(current_path, new_path) if is_disable: if os.path.exists(os.path.join(new_path, "disable.py")): @@ -915,62 +2054,22 @@ def simple_check_custom_node(url): return 'not-installed' -def check_a_custom_node_installed(item, do_fetch=False, do_update_check=True, do_update=False): - item['installed'] = 'None' +def check_state_of_git_node_pack_single(item, do_fetch=False, do_update_check=True, do_update=False): + if item['version'] == 'unknown': + dir_path = unified_manager.unknown_active_nodes.get(item['id'])[1] + elif item['version'] == 'nightly': + dir_path = unified_manager.active_nodes.get(item['id'])[1] + else: + # skip CNR nodes + dir_path = None - if item['install_type'] == 'git-clone' and len(item['files']) == 1: - url = item['files'][0] - - if url.endswith("/"): - url = url[:-1] - - dir_name = os.path.splitext(os.path.basename(url))[0].replace(".git", "") - dir_path = os.path.join(custom_nodes_path, dir_name) - if os.path.exists(dir_path): - try: - item['installed'] = 'True' # default - - if cm_global.try_call(api="cm.is_import_failed_extension", name=dir_name): - item['installed'] = 'Fail' - - if do_update_check: - update_state, success = git_repo_has_updates(dir_path, do_fetch, do_update) - if (do_update_check or do_update) and update_state: - item['installed'] = 'Update' - elif do_update and not success: - item['installed'] = 'Fail' - except: - if cm_global.try_call(api="cm.is_import_failed_extension", name=dir_name): - item['installed'] = 'Fail' - else: - item['installed'] = 'True' - - elif os.path.exists(dir_path + ".disabled"): - item['installed'] = 'Disabled' - - else: - item['installed'] = 'False' - - elif item['install_type'] == 'copy' and len(item['files']) == 1: - dir_name = os.path.basename(item['files'][0]) - - if item['files'][0].endswith('.py'): - base_path = custom_nodes_path - elif 'js_path' in item: - base_path = os.path.join(js_path, item['js_path']) - else: - base_path = js_path - - file_path = os.path.join(base_path, dir_name) - if os.path.exists(file_path): - if cm_global.try_call(api="cm.is_import_failed_extension", name=dir_name): - item['installed'] = 'Fail' - else: - item['installed'] = 'True' - elif os.path.exists(file_path + ".disabled"): - item['installed'] = 'Disabled' - else: - item['installed'] = 'False' + if dir_path and os.path.exists(dir_path): + if do_update_check: + update_state, success = git_repo_update_check_with(dir_path, do_fetch, do_update) + if (do_update_check or do_update) and update_state: + item['update-state'] = 'true' + elif do_update and not success: + item['update-state'] = 'fail' def get_installed_pip_packages(): @@ -1003,10 +2102,14 @@ def get_current_snapshot(): comfyui_commit_hash = repo.head.commit.hexsha git_custom_nodes = {} + cnr_custom_nodes = {} file_custom_nodes = [] # Get custom nodes hash for path in os.listdir(custom_nodes_path): + if path in ['.disabled', '__pycache__']: + continue + fullpath = os.path.join(custom_nodes_path, path) if os.path.isdir(fullpath): @@ -1015,17 +2118,29 @@ def get_current_snapshot(): try: git_dir = os.path.join(fullpath, '.git') - if not os.path.exists(git_dir): + parsed_spec = path.split('@') + + if len(parsed_spec) == 1: + node_id = parsed_spec[0] + ver_spec = 'unknown' + else: + node_id, ver_spec = parsed_spec + ver_spec = ver_spec.replace('_', '.') + + if len(ver_spec) > 1 and ver_spec not in ['nightly', 'latest', 'unknown']: + if is_disabled: + continue # don't restore disabled state of CNR node. + + cnr_custom_nodes[node_id] = ver_spec + + elif not os.path.exists(git_dir): continue - repo = git.Repo(fullpath) - commit_hash = repo.head.commit.hexsha - url = repo.remotes.origin.url - git_custom_nodes[url] = { - 'hash': commit_hash, - 'disabled': is_disabled - } - + else: + repo = git.Repo(fullpath) + commit_hash = repo.head.commit.hexsha + url = repo.remotes.origin.url + git_custom_nodes[url] = dict(hash=commit_hash, disabled=is_disabled) except: print(f"Failed to extract snapshots for the custom node '{path}'.") @@ -1044,6 +2159,7 @@ def get_current_snapshot(): return { 'comfyui': comfyui_commit_hash, 'git_custom_nodes': git_custom_nodes, + 'cnr_custom_nodes': cnr_custom_nodes, 'file_custom_nodes': file_custom_nodes, 'pips': pip_packages, } @@ -1215,3 +2331,275 @@ def unzip(model_path): os.remove(model_path) return True + +def map_to_unified_keys(json_obj): + res = {} + for k, v in json_obj.items(): + cnr = unified_manager.repo_cnr_map.get(k) + if cnr: + res[cnr['id']] = v + else: + res[k] = v + + return res + + +async def get_unified_total_nodes(channel, mode): + await unified_manager.reload(mode) + + res = await unified_manager.get_custom_nodes(channel, mode) + + # collect pure cnr ids (i.e. not exists in custom-node-list.json) + # populate state/updatable field to non-pure cnr nodes + cnr_ids = set(unified_manager.cnr_map.keys()) + for k, v in res.items(): + # resolve cnr_id from repo url + files_in_json = v.get('files', []) + cnr_id = None + if len(files_in_json) == 1: + cnr = unified_manager.repo_cnr_map.get(files_in_json[0]) + if cnr: + cnr_id = cnr['id'] + + if cnr_id is not None: + # cnr or nightly version + cnr_ids.remove(cnr_id) + updatable = False + cnr = unified_manager.cnr_map[cnr_id] + + if cnr_id in unified_manager.active_nodes: + # installed + v['state'] = 'enabled' + if unified_manager.active_nodes[cnr_id][0] != 'nightly': + updatable = unified_manager.is_updatable(cnr_id) + else: + updatable = False + v['active_version'] = unified_manager.active_nodes[cnr_id][0] + v['version'] = v['active_version'] + + if cm_global.try_call(api="cm.is_import_failed_extension", name=unified_manager.active_nodes[cnr_id][1]): + v['import-fail'] = True + + elif cnr_id in unified_manager.cnr_inactive_nodes: + # disabled + v['state'] = 'disabled' + v['version'] = unified_manager.get_from_cnr_inactive_nodes(cnr_id)[0] + elif cnr_id in unified_manager.nightly_inactive_nodes: + # disabled + v['state'] = 'disabled' + v['version'] = 'nightly' + else: + # not installed + v['state'] = 'not-installed' + + if 'version' not in v: + v['version'] = cnr['latest_version']['version'] + + v['update-state'] = 'true' if updatable else 'false' + else: + # unknown version + v['version'] = 'unknown' + + if unified_manager.is_enabled(k, 'unknown'): + v['state'] = 'enabled' + v['active_version'] = 'unknown' + + if cm_global.try_call(api="cm.is_import_failed_extension", name=unified_manager.unknown_active_nodes[k][1]): + v['import-fail'] = True + + elif unified_manager.is_disabled(k, 'unknown'): + v['state'] = 'disabled' + else: + v['state'] = 'not-installed' + + # add items for pure cnr nodes + for cnr_id in cnr_ids: + cnr = unified_manager.cnr_map[cnr_id] + author = cnr['publisher']['name'] + title = cnr['name'] + reference = f"https://registry.comfy.org/nodes/{cnr['id']}" + install_type = "cnr" + description = cnr.get('description', '') + + ver = None + active_version = None + updatable = False + import_fail = None + if cnr_id in unified_manager.active_nodes: + # installed + state = 'enabled' + updatable = unified_manager.is_updatable(cnr_id) + active_version = unified_manager.active_nodes[cnr['id']][0] + ver = active_version + + if cm_global.try_call(api="cm.is_import_failed_extension", name=unified_manager.active_nodes[cnr_id][1]): + import_fail = True + + elif cnr['id'] in unified_manager.cnr_inactive_nodes: + # disabled + state = 'disabled' + elif cnr['id'] in unified_manager.nightly_inactive_nodes: + # disabled + state = 'disabled' + ver = 'nightly' + else: + # not installed + state = 'not-installed' + + if ver is None: + ver = cnr['latest_version']['version'] + + item = dict(author=author, title=title, reference=reference, install_type=install_type, + description=description, state=state, updatable=updatable, version=ver) + + if active_version: + item['active_version'] = active_version + + if import_fail: + item['import-fail'] = True + + res[cnr_id] = item + + return res + + +def populate_github_stats(node_packs, json_obj_github): + for k, v in node_packs.items(): + url = v['reference'] + if url in json_obj_github: + v['stars'] = json_obj_github[url]['stars'] + v['last_update'] = json_obj_github[url]['last_update'] + v['trust'] = json_obj_github[url]['author_account_age_days'] > 180 + else: + v['stars'] = -1 + v['last_update'] = -1 + v['trust'] = False + + +async def restore_snapshot(snapshot_path, git_helper_extras=None): + cloned_repos = [] + checkout_repos = [] + skipped_repos = [] + enabled_repos = [] + disabled_repos = [] + is_failed = False + + def extract_infos(msg): + nonlocal is_failed + + for x in msg: + if x.startswith("CLONE: "): + cloned_repos.append(x[7:]) + elif x.startswith("CHECKOUT: "): + checkout_repos.append(x[10:]) + elif x.startswith("SKIPPED: "): + skipped_repos.append(x[9:]) + elif x.startswith("ENABLE: "): + enabled_repos.append(x[8:]) + elif x.startswith("DISABLE: "): + disabled_repos.append(x[9:]) + elif 'APPLY SNAPSHOT: False' in x: + is_failed = True + + print(f"Restore snapshot.") + + postinstalls = [] + + # for cnr restore + with open(snapshot_path, 'r', encoding="UTF-8") as snapshot_file: + if snapshot_path.endswith('.json'): + info = json.load(snapshot_file) + elif snapshot_path.endswith('.yaml'): + info = yaml.load(snapshot_file, Loader=yaml.SafeLoader) + info = info['custom_nodes'] + + cnr_info = info.get('cnr_custom_nodes') + if cnr_info is not None: + # disable not listed cnr nodes + todo_disable = [] + for k, v in unified_manager.active_nodes.items(): + if v[0] != 'nightly': + if k not in cnr_info: + todo_disable.append(k) + + for x in todo_disable: + unified_manager.unified_disable(x, False) + + # install listed cnr nodes + for k, v in cnr_info.items(): + ps = await unified_manager.install_by_id(k, version_spec=v, instant_execution=True, return_postinstall=True) + if ps is not None and ps.result: + if hasattr(ps, 'postinstall'): + postinstalls.append(ps.postinstall) + else: + print(f"cm-cli: unexpected [0001]") + + # for git restore + if git_helper_extras is None: + git_helper_extras = [] + + cmd_str = [sys.executable, git_script_path, '--apply-snapshot', snapshot_path] + git_helper_extras + new_env = os.environ.copy() + new_env["COMFYUI_PATH"] = comfy_path + output = subprocess.check_output(cmd_str, cwd=custom_nodes_path, text=True, env=new_env) + msg_lines = output.split('\n') + extract_infos(msg_lines) + + for repo_path in cloned_repos: + unified_manager.execute_install_script('', repo_path, instant_execution=True) + + for ps in postinstalls: + ps() + + # reload + await unified_manager.migrate_unmanaged_nodes() + + # print summary + for x in cloned_repos: + print(f"[ INSTALLED ] {x}") + for x in checkout_repos: + print(f"[ CHECKOUT ] {x}") + for x in enabled_repos: + print(f"[ ENABLED ] {x}") + for x in disabled_repos: + print(f"[ DISABLED ] {x}") + + if is_failed: + print(output) + print("[bold red]ERROR: Failed to restore snapshot.[/bold red]") + + +# check need to migrate +need_to_migrate = False + +async def check_need_to_migrate(): + global need_to_migrate + + legacy_custom_nodes = [] + + try: + import folder_paths + except: + try: + sys.path.append(comfy_path) + import folder_paths + except: + raise Exception(f"Invalid COMFYUI_PATH: {comfy_path}") + + node_paths = folder_paths.get_folder_paths("custom_nodes") + for x in node_paths: + subdirs = [d for d in os.listdir(x) if os.path.isdir(os.path.join(x, d))] + for subdir in subdirs: + if subdir in ['.disabled', '__pycache__']: + continue + + if '@' not in subdir: + need_to_migrate = True + legacy_custom_nodes.append(subdir) + + if len(legacy_custom_nodes) > 0: + print("\n--------------------- ComfyUI-Manager migration notice --------------------") + print("The following custom nodes were installed using the old management method and require migration:") + print(", ".join(legacy_custom_nodes)) + print("---------------------------------------------------------------------------\n") + diff --git a/glob/manager_server.py b/glob/manager_server.py index b6b4a2d7..d3f5287c 100644 --- a/glob/manager_server.py +++ b/glob/manager_server.py @@ -16,12 +16,15 @@ import git from server import PromptServer import manager_core as core +import manager_util import cm_global print(f"### Loading: ComfyUI-Manager ({core.version_str})") comfy_ui_hash = "-" +routes = PromptServer.instance.routes + def handle_stream(stream, prefix): stream.reconfigure(encoding=locale.getpreferredencoding(), errors='replace') @@ -59,7 +62,7 @@ def is_allowed_security_level(level): async def get_risky_level(files): json_data1 = await core.get_data_by_mode('local', 'custom-node-list.json') - json_data2 = await core.get_data_by_mode('cache', 'custom-node-list.json', channel_url='https://github.com/ltdrdata/ComfyUI-Manager/raw/main/custom-node-list.json') + json_data2 = await core.get_data_by_mode('cache', 'custom-node-list.json', channel_url='https://github.com/ltdrdata/ComfyUI-Manager/raw/main') all_urls = set() for x in json_data1['custom_nodes'] + json_data2['custom_nodes']: @@ -201,19 +204,6 @@ def print_comfyui_version(): print_comfyui_version() -async def populate_github_stats(json_obj, json_obj_github): - if 'custom_nodes' in json_obj: - for i, node in enumerate(json_obj['custom_nodes']): - url = node['reference'] - if url in json_obj_github: - json_obj['custom_nodes'][i]['stars'] = json_obj_github[url]['stars'] - json_obj['custom_nodes'][i]['last_update'] = json_obj_github[url]['last_update'] - json_obj['custom_nodes'][i]['trust'] = json_obj_github[url]['author_account_age_days'] > 180 - else: - json_obj['custom_nodes'][i]['stars'] = -1 - json_obj['custom_nodes'][i]['last_update'] = -1 - json_obj['custom_nodes'][i]['trust'] = False - return json_obj def setup_environment(): @@ -280,7 +270,7 @@ def get_model_path(data): return os.path.join(base_model, data['filename']) -def check_custom_nodes_installed(json_obj, do_fetch=False, do_update_check=True, do_update=False): +def check_state_of_git_node_pack(node_packs, do_fetch=False, do_update_check=True, do_update=False): if do_fetch: print("Start fetching...", end="") elif do_update: @@ -289,16 +279,17 @@ def check_custom_nodes_installed(json_obj, do_fetch=False, do_update_check=True, print("Start update check...", end="") def process_custom_node(item): - core.check_a_custom_node_installed(item, do_fetch, do_update_check, do_update) + core.check_state_of_git_node_pack_single(item, do_fetch, do_update_check, do_update) with concurrent.futures.ThreadPoolExecutor(4) as executor: - for item in json_obj['custom_nodes']: - executor.submit(process_custom_node, item) + for k, v in node_packs.items(): + if v.get('active_version') in ['unknown', 'nightly']: + executor.submit(process_custom_node, v) if do_fetch: print(f"\x1b[2K\rFetching done.") elif do_update: - update_exists = any(item['installed'] == 'Update' for item in json_obj['custom_nodes']) + update_exists = any(item.get('updatable', False) for item in node_packs.values()) if update_exists: print(f"\x1b[2K\rUpdate done.") else: @@ -335,8 +326,11 @@ def nickname_filter(json_obj): return json_obj -@PromptServer.instance.routes.get("/customnode/getmappings") +@routes.get("/customnode/getmappings") async def fetch_customnode_mappings(request): + """ + provide unified (node -> node pack) mapping list + """ mode = request.rel_url.query["mode"] nickname_mode = False @@ -345,6 +339,7 @@ async def fetch_customnode_mappings(request): nickname_mode = True json_obj = await core.get_data_by_mode(mode, 'extension-node-map.json') + json_obj = core.map_to_unified_keys(json_obj) if nickname_mode: json_obj = nickname_filter(json_obj) @@ -367,25 +362,34 @@ async def fetch_customnode_mappings(request): return web.json_response(json_obj, content_type='application/json') -@PromptServer.instance.routes.get("/customnode/fetch_updates") +@routes.get("/customnode/fetch_updates") async def fetch_updates(request): try: - json_obj = await core.get_data_by_mode(request.rel_url.query["mode"], 'custom-node-list.json') + if request.rel_url.query["mode"] == "local": + channel = 'local' + else: + channel = core.get_config()['channel_url'] - check_custom_nodes_installed(json_obj, True) + await core.unified_manager.reload(request.rel_url.query["mode"]) + await core.unified_manager.get_custom_nodes(channel, request.rel_url.query["mode"]) - update_exists = any('custom_nodes' in json_obj and 'installed' in node and node['installed'] == 'Update' for node in - json_obj['custom_nodes']) + res = core.unified_manager.fetch_or_pull_git_repo(is_pull=False) - if update_exists: + for x in res['failed']: + print(f"FETCH FAILED: {x}") + + print("\nDone.") + + if len(res['updated']) > 0: return web.Response(status=201) return web.Response(status=200) except: + traceback.print_exc() return web.Response(status=400) -@PromptServer.instance.routes.get("/customnode/update_all") +@routes.get("/customnode/update_all") async def update_all(request): if not is_allowed_security_level('middle'): print(f"ERROR: To use this action, a security_level of `middle or below` is required. Please contact the administrator.") @@ -394,22 +398,37 @@ async def update_all(request): try: core.save_snapshot_with_postfix('autosave') - json_obj = await core.get_data_by_mode(request.rel_url.query["mode"], 'custom-node-list.json') + if request.rel_url.query["mode"] == "local": + channel = 'local' + else: + channel = core.get_config()['channel_url'] - check_custom_nodes_installed(json_obj, do_update=True) + await core.unified_manager.reload(request.rel_url.query["mode"]) + await core.unified_manager.get_custom_nodes(channel, request.rel_url.query["mode"]) - updated = [item['title'] for item in json_obj['custom_nodes'] if item['installed'] == 'Update'] - failed = [item['title'] for item in json_obj['custom_nodes'] if item['installed'] == 'Fail'] + updated_cnr = [] + for k, v in core.unified_manager.active_nodes.items(): + if v[0] != 'nightly': + res = core.unified_manager.unified_update(k, v[0]) + if res.action == 'switch-cnr' and res: + updated_cnr.append(k) - res = {'updated': updated, 'failed': failed} + res = core.unified_manager.fetch_or_pull_git_repo(is_pull=True) - if len(updated) == 0 and len(failed) == 0: + res['updated'] += updated_cnr + + for x in res['failed']: + print(f"PULL FAILED: {x}") + + if len(res['updated']) == 0 and len(res['failed']) == 0: status = 200 else: status = 201 + print(f"\nDone.") return web.json_response(res, status=status, content_type='application/json') except: + traceback.print_exc() return web.Response(status=400) finally: core.clear_pip_cache() @@ -450,17 +469,20 @@ def convert_markdown_to_html(input_text): def populate_markdown(x): if 'description' in x: - x['description'] = convert_markdown_to_html(x['description']) + x['description'] = convert_markdown_to_html(manager_util.sanitize_tag(x['description'])) if 'name' in x: - x['name'] = x['name'].replace('<', '<').replace('>', '>') + x['name'] = manager_util.sanitize_tag(x['name']) if 'title' in x: - x['title'] = x['title'].replace('<', '<').replace('>', '>') + x['title'] = manager_util.sanitize_tag(x['title']) -@PromptServer.instance.routes.get("/customnode/getlist") +@routes.get("/customnode/getlist") async def fetch_customnode_list(request): + """ + provide unified custom node list + """ if "skip_update" in request.rel_url.query and request.rel_url.query["skip_update"] == "true": skip_update = True else: @@ -471,26 +493,14 @@ async def fetch_customnode_list(request): else: channel = core.get_config()['channel_url'] - json_obj = await core.get_data_by_mode(request.rel_url.query["mode"], 'custom-node-list.json') + node_packs = await core.get_unified_total_nodes(channel, request.rel_url.query["mode"]) json_obj_github = await core.get_data_by_mode(request.rel_url.query["mode"], 'github-stats.json', 'default') - json_obj = await populate_github_stats(json_obj, json_obj_github) + core.populate_github_stats(node_packs, json_obj_github) - def is_ignored_notice(code): - if code is not None and code.startswith('#NOTICE_'): - try: - notice_version = [int(x) for x in code[8:].split('.')] - return notice_version[0] < core.version[0] or (notice_version[0] == core.version[0] and notice_version[1] <= core.version[1]) - except Exception: - return False - else: - return False + check_state_of_git_node_pack(node_packs, False, do_update_check=not skip_update) - json_obj['custom_nodes'] = [record for record in json_obj['custom_nodes'] if not is_ignored_notice(record.get('author'))] - - check_custom_nodes_installed(json_obj, False, not skip_update) - - for x in json_obj['custom_nodes']: - populate_markdown(x) + for v in node_packs.values(): + populate_markdown(v) if channel != 'local': found = 'custom' @@ -502,48 +512,24 @@ async def fetch_customnode_list(request): channel = found - json_obj['channel'] = channel + result = dict(channel=channel, node_packs=node_packs) - return web.json_response(json_obj, content_type='application/json') + return web.json_response(result, content_type='application/json') -@PromptServer.instance.routes.get("/customnode/alternatives") +@routes.get("/customnode/alternatives") async def fetch_customnode_alternatives(request): alter_json = await core.get_data_by_mode(request.rel_url.query["mode"], 'alter-list.json') + res = {} + for item in alter_json['items']: populate_markdown(item) - - return web.json_response(alter_json, content_type='application/json') + res[item['id']] = item + res = core.map_to_unified_keys(res) -@PromptServer.instance.routes.get("/alternatives/getlist") -async def fetch_alternatives_list(request): - if "skip_update" in request.rel_url.query and request.rel_url.query["skip_update"] == "true": - skip_update = True - else: - skip_update = False - - alter_json = await core.get_data_by_mode(request.rel_url.query["mode"], 'alter-list.json') - custom_node_json = await core.get_data_by_mode(request.rel_url.query["mode"], 'custom-node-list.json') - - fileurl_to_custom_node = {} - - for item in custom_node_json['custom_nodes']: - for fileurl in item['files']: - fileurl_to_custom_node[fileurl] = item - - for item in alter_json['items']: - fileurl = item['id'] - if fileurl in fileurl_to_custom_node: - custom_node = fileurl_to_custom_node[fileurl] - core.check_a_custom_node_installed(custom_node, not skip_update) - - populate_markdown(item) - populate_markdown(custom_node) - item['custom_node'] = custom_node - - return web.json_response(alter_json, content_type='application/json') + return web.json_response(res, content_type='application/json') def check_model_installed(json_obj): @@ -567,7 +553,7 @@ def check_model_installed(json_obj): executor.submit(process_model, item) -@PromptServer.instance.routes.get("/externalmodel/getlist") +@routes.get("/externalmodel/getlist") async def fetch_externalmodel_list(request): json_obj = await core.get_data_by_mode(request.rel_url.query["mode"], 'model-list.json') @@ -587,7 +573,7 @@ async def get_snapshot_list(request): return web.json_response({'items': items}, content_type='application/json') -@PromptServer.instance.routes.get("/snapshot/remove") +@routes.get("/snapshot/remove") async def remove_snapshot(request): if not is_allowed_security_level('middle'): print(f"ERROR: To use this action, a security_level of `middle or below` is required. Please contact the administrator.") @@ -605,7 +591,7 @@ async def remove_snapshot(request): return web.Response(status=400) -@PromptServer.instance.routes.get("/snapshot/restore") +@routes.get("/snapshot/restore") async def remove_snapshot(request): if not is_allowed_security_level('middle'): print(f"ERROR: To use this action, a security_level of `middle or below` is required. Please contact the administrator.") @@ -631,7 +617,7 @@ async def remove_snapshot(request): return web.Response(status=400) -@PromptServer.instance.routes.get("/snapshot/get_current") +@routes.get("/snapshot/get_current") async def get_current_snapshot_api(request): try: return web.json_response(core.get_current_snapshot(), content_type='application/json') @@ -639,7 +625,7 @@ async def get_current_snapshot_api(request): return web.Response(status=400) -@PromptServer.instance.routes.get("/snapshot/save") +@routes.get("/snapshot/save") async def save_snapshot(request): try: core.save_snapshot_with_postfix('snapshot') @@ -774,7 +760,34 @@ def copy_set_active(files, is_disable, js_path_name='.'): return True -@PromptServer.instance.routes.post("/customnode/install") +@routes.get("/customnode/versions/{node_name}") +async def get_cnr_versions(request): + node_name = request.match_info.get("node_name", None) + versions = core.cnr_utils.all_versions_of_node(node_name) + + if versions: + return web.json_response(versions, content_type='application/json') + + return web.Response(status=400) + + +@routes.get("/customnode/disabled_versions/{node_name}") +async def get_disabled_versions(request): + node_name = request.match_info.get("node_name", None) + versions = [] + if node_name in core.unified_manager.nightly_inactive_nodes: + versions.append(dict(version='nightly')) + + for v in core.unified_manager.cnr_inactive_nodes.get(node_name, {}).keys(): + versions.append(dict(version=v)) + + if versions: + return web.json_response(versions, content_type='application/json') + + return web.Response(status=400) + + +@routes.post("/customnode/install") async def install_custom_node(request): if not is_allowed_security_level('middle'): print(f"ERROR: To use this action, a security_level of `middle or below` is required. Please contact the administrator.") @@ -782,46 +795,47 @@ async def install_custom_node(request): json_data = await request.json() - risky_level = await get_risky_level(json_data['files']) + # non-nightly cnr is safe + risky_level = None + cnr_id = json_data.get('id') + skip_post_install = json_data.get('skip_post_install') + + if json_data['version'] != 'unknown': + selected_version = json_data.get('selected_version', 'latest') + if selected_version != 'nightly': + risky_level = 'low' + node_spec_str = f"{cnr_id}@{selected_version}" + else: + node_spec_str = f"{cnr_id}@nightly" + else: + # unknown + unknown_name = os.path.basename(json_data['files'][0]) + node_spec_str = f"{unknown_name}@unknown" + + # apply security policy if not cnr node (nightly isn't regarded as cnr node) + if risky_level is None: + risky_level = await get_risky_level(json_data['files']) + if not is_allowed_security_level(risky_level): print(f"ERROR: This installation is not allowed in this security_level. Please contact the administrator.") return web.Response(status=404) - install_type = json_data['install_type'] + node_spec = core.unified_manager.resolve_node_spec(node_spec_str) - print(f"Install custom node '{json_data['title']}'") + if node_spec is None: + return - res = False + node_name, version_spec, is_specified = node_spec + res = await core.unified_manager.install_by_id(node_name, version_spec, json_data['channel'], json_data['mode'], return_postinstall=skip_post_install) + # discard post install if skip_post_install mode - if len(json_data['files']) == 0: + if res not in ['skip', 'enable', 'install-git', 'install-cnr', 'switch-cnr']: return web.Response(status=400) - if install_type == "unzip": - res = unzip_install(json_data['files']) - - if install_type == "copy": - js_path_name = json_data['js_path'] if 'js_path' in json_data else '.' - res = copy_install(json_data['files'], js_path_name) - - elif install_type == "git-clone": - res = core.gitclone_install(json_data['files']) - - if 'pip' in json_data: - for pname in json_data['pip']: - pkg = core.remap_pip_package(pname) - install_cmd = [sys.executable, "-m", "pip", "install", pkg] - core.try_install_script(json_data['files'][0], ".", install_cmd) - - core.clear_pip_cache() - - if res: - print(f"After restarting ComfyUI, please refresh the browser.") - return web.json_response({}, content_type='application/json') - - return web.Response(status=400) + return web.Response(status=200) -@PromptServer.instance.routes.post("/customnode/fix") +@routes.post("/customnode/fix") async def fix_custom_node(request): if not is_allowed_security_level('middle'): print(f"ERROR: To use this action, a security_level of `middle or below` is required. Please contact the administrator.") @@ -829,49 +843,45 @@ async def fix_custom_node(request): json_data = await request.json() - install_type = json_data['install_type'] - - print(f"Install custom node '{json_data['title']}'") - - res = False - - if len(json_data['files']) == 0: - return web.Response(status=400) - - if install_type == "git-clone": - res = core.gitclone_fix(json_data['files']) + node_id = json_data.get('id') + node_ver = json_data['version'] + if node_ver != 'unknown': + node_name = node_id else: - return web.Response(status=400) + # unknown + node_name = os.path.basename(json_data['files'][0]) - if 'pip' in json_data: - for pname in json_data['pip']: - install_cmd = [sys.executable, "-m", "pip", "install", '-U', pname] - core.try_install_script(json_data['files'][0], ".", install_cmd) + res = core.unified_manager.unified_fix(node_name, node_ver) - if res: + if res.result: print(f"After restarting ComfyUI, please refresh the browser.") return web.json_response({}, content_type='application/json') + print(f"ERROR: An error occurred while fixing '{node_name}@{node_ver}'.") return web.Response(status=400) -@PromptServer.instance.routes.post("/customnode/install/git_url") +@routes.post("/customnode/install/git_url") async def install_custom_node_git_url(request): if not is_allowed_security_level('high'): print(f"ERROR: To use this feature, you must either set '--listen' to a local IP and set the security level to 'normal-' or lower, or set the security level to 'middle' or 'weak'. Please contact the administrator.") return web.Response(status=403) url = await request.text() - res = core.gitclone_install([url]) + res = await core.gitclone_install(url) - if res: + if res.action == 'skip': + print(f"Already installed: '{res.target}'") + return web.Response(status=200) + elif res.result: print(f"After restarting ComfyUI, please refresh the browser.") return web.Response(status=200) + print(res.msg) return web.Response(status=400) -@PromptServer.instance.routes.post("/customnode/install/pip") +@routes.post("/customnode/install/pip") async def install_custom_node_git_url(request): if not is_allowed_security_level('high'): print(f"ERROR: To use this feature, you must either set '--listen' to a local IP and set the security level to 'normal-' or lower, or set the security level to 'middle' or 'weak'. Please contact the administrator.") @@ -883,7 +893,7 @@ async def install_custom_node_git_url(request): return web.Response(status=200) -@PromptServer.instance.routes.post("/customnode/uninstall") +@routes.post("/customnode/uninstall") async def uninstall_custom_node(request): if not is_allowed_security_level('middle'): print(f"ERROR: To use this action, a security_level of `middle or below` is required. Please contact the administrator.") @@ -891,27 +901,26 @@ async def uninstall_custom_node(request): json_data = await request.json() - install_type = json_data['install_type'] + node_id = json_data.get('id') + if json_data['version'] != 'unknown': + is_unknown = False + node_name = node_id + else: + # unknown + is_unknown = True + node_name = os.path.basename(json_data['files'][0]) - print(f"Uninstall custom node '{json_data['title']}'") + res = core.unified_manager.unified_uninstall(node_name, is_unknown) - res = False - - if install_type == "copy": - js_path_name = json_data['js_path'] if 'js_path' in json_data else '.' - res = copy_uninstall(json_data['files'], js_path_name) - - elif install_type == "git-clone": - res = core.gitclone_uninstall(json_data['files']) - - if res: + if res.result: print(f"After restarting ComfyUI, please refresh the browser.") return web.json_response({}, content_type='application/json') + print(f"ERROR: An error occurred while uninstalling '{node_name}'.") return web.Response(status=400) -@PromptServer.instance.routes.post("/customnode/update") +@routes.post("/customnode/update") async def update_custom_node(request): if not is_allowed_security_level('middle'): print(f"ERROR: To use this action, a security_level of `middle or below` is required. Please contact the administrator.") @@ -919,25 +928,26 @@ async def update_custom_node(request): json_data = await request.json() - install_type = json_data['install_type'] + node_id = json_data.get('id') + if json_data['version'] != 'unknown': + node_name = node_id + else: + # unknown + node_name = os.path.basename(json_data['files'][0]) - print(f"Update custom node '{json_data['title']}'") - - res = False - - if install_type == "git-clone": - res = core.gitclone_update(json_data['files']) + res = core.unified_manager.unified_update(node_name, json_data['version']) core.clear_pip_cache() - if res: + if res.result: print(f"After restarting ComfyUI, please refresh the browser.") return web.json_response({}, content_type='application/json') + print(f"ERROR: An error occurred while updating '{node_name}'.") return web.Response(status=400) -@PromptServer.instance.routes.get("/comfyui_manager/update_comfyui") +@routes.get("/comfyui_manager/update_comfyui") async def update_comfyui(request): print(f"Update ComfyUI") @@ -957,21 +967,20 @@ async def update_comfyui(request): return web.Response(status=400) -@PromptServer.instance.routes.post("/customnode/toggle_active") -async def toggle_active(request): +@routes.post("/customnode/disable") +async def disable_node(request): json_data = await request.json() - install_type = json_data['install_type'] - is_disabled = json_data['installed'] == "Disabled" + node_id = json_data.get('id') + if json_data['version'] != 'unknown': + is_unknown = False + node_name = node_id + else: + # unknown + is_unknown = True + node_name = os.path.basename(json_data['files'][0]) - print(f"Update custom node '{json_data['title']}'") - - res = False - - if install_type == "git-clone": - res = core.gitclone_set_active(json_data['files'], not is_disabled) - elif install_type == "copy": - res = copy_set_active(json_data['files'], not is_disabled, json_data.get('js_path', None)) + res = core.unified_manager.unified_disable(node_name, is_unknown) if res: return web.json_response({}, content_type='application/json') @@ -979,7 +988,20 @@ async def toggle_active(request): return web.Response(status=400) -@PromptServer.instance.routes.post("/model/install") +@routes.get("/manager/migrate_unmanaged_nodes") +async def migrate_unmanaged_nodes(request): + print(f"[ComfyUI-Manager] Migrating unmanaged nodes...") + await core.unified_manager.migrate_unmanaged_nodes() + print("Done.") + return web.Response(status=200) + + +@routes.get("/manager/need_to_migrate") +async def need_to_migrate(request): + return web.Response(text=str(core.need_to_migrate), status=200) + + +@routes.post("/model/install") async def install_model(request): json_data = await request.json() @@ -1046,7 +1068,7 @@ class ManagerTerminalHook: manager_terminal_hook = ManagerTerminalHook() -@PromptServer.instance.routes.get("/manager/terminal") +@routes.get("/manager/terminal") async def terminal_mode(request): if not is_allowed_security_level('high'): print(f"ERROR: To use this feature, you must either set '--listen' to a local IP and set the security level to 'normal-' or lower, or set the security level to 'middle' or 'weak'. Please contact the administrator.") @@ -1061,7 +1083,7 @@ async def terminal_mode(request): return web.Response(status=200) -@PromptServer.instance.routes.get("/manager/preview_method") +@routes.get("/manager/preview_method") async def preview_method(request): if "value" in request.rel_url.query: set_preview_method(request.rel_url.query['value']) @@ -1072,7 +1094,7 @@ async def preview_method(request): return web.Response(status=200) -@PromptServer.instance.routes.get("/manager/badge_mode") +@routes.get("/manager/badge_mode") async def badge_mode(request): if "value" in request.rel_url.query: set_badge_mode(request.rel_url.query['value']) @@ -1083,7 +1105,7 @@ async def badge_mode(request): return web.Response(status=200) -@PromptServer.instance.routes.get("/manager/default_ui") +@routes.get("/manager/default_ui") async def default_ui_mode(request): if "value" in request.rel_url.query: set_default_ui_mode(request.rel_url.query['value']) @@ -1094,7 +1116,7 @@ async def default_ui_mode(request): return web.Response(status=200) -@PromptServer.instance.routes.get("/manager/component/policy") +@routes.get("/manager/component/policy") async def component_policy(request): if "value" in request.rel_url.query: set_component_policy(request.rel_url.query['value']) @@ -1105,7 +1127,7 @@ async def component_policy(request): return web.Response(status=200) -@PromptServer.instance.routes.get("/manager/dbl_click/policy") +@routes.get("/manager/dbl_click/policy") async def dbl_click_policy(request): if "value" in request.rel_url.query: set_double_click_policy(request.rel_url.query['value']) @@ -1116,7 +1138,7 @@ async def dbl_click_policy(request): return web.Response(status=200) -@PromptServer.instance.routes.get("/manager/channel_url_list") +@routes.get("/manager/channel_url_list") async def channel_url_list(request): channels = core.get_channel_dict() if "value" in request.rel_url.query: @@ -1153,7 +1175,7 @@ def add_target_blank(html_text): return modified_html -@PromptServer.instance.routes.get("/manager/notice") +@routes.get("/manager/notice") async def get_notice(request): url = "github.com" path = "/ltdrdata/ltdrdata.github.io/wiki/News" @@ -1188,7 +1210,7 @@ async def get_notice(request): return web.Response(text="Unable to retrieve Notice", status=200) -@PromptServer.instance.routes.get("/manager/reboot") +@routes.get("/manager/reboot") def restart(self): if not is_allowed_security_level('middle'): print(f"ERROR: To use this action, a security_level of `middle or below` is required. Please contact the administrator.") @@ -1214,12 +1236,11 @@ def restart(self): def sanitize_filename(input_string): - # 알파벳, 숫자, 및 밑줄 이외의 문자를 밑줄로 대체 result_string = re.sub(r'[^a-zA-Z0-9_]', '_', input_string) return result_string -@PromptServer.instance.routes.post("/manager/component/save") +@routes.post("/manager/component/save") async def save_component(request): try: data = await request.json() @@ -1249,7 +1270,7 @@ async def save_component(request): return web.Response(status=400) -@PromptServer.instance.routes.post("/manager/component/loads") +@routes.post("/manager/component/loads") async def load_components(request): try: json_files = [f for f in os.listdir(components_path) if f.endswith('.json')] @@ -1271,7 +1292,7 @@ async def load_components(request): return web.Response(status=400) -@PromptServer.instance.routes.get("/manager/share_option") +@routes.get("/manager/share_option") async def share_option(request): if "value" in request.rel_url.query: core.get_config()['share_option'] = request.rel_url.query['value'] @@ -1340,7 +1361,7 @@ def set_youml_settings(settings): f.write(settings) -@PromptServer.instance.routes.get("/manager/get_openart_auth") +@routes.get("/manager/get_openart_auth") async def api_get_openart_auth(request): # print("Getting stored Matrix credentials...") openart_key = get_openart_auth() @@ -1349,7 +1370,7 @@ async def api_get_openart_auth(request): return web.json_response({"openart_key": openart_key}) -@PromptServer.instance.routes.post("/manager/set_openart_auth") +@routes.post("/manager/set_openart_auth") async def api_set_openart_auth(request): json_data = await request.json() openart_key = json_data['openart_key'] @@ -1358,7 +1379,7 @@ async def api_set_openart_auth(request): return web.Response(status=200) -@PromptServer.instance.routes.get("/manager/get_matrix_auth") +@routes.get("/manager/get_matrix_auth") async def api_get_matrix_auth(request): # print("Getting stored Matrix credentials...") matrix_auth = get_matrix_auth() @@ -1367,7 +1388,7 @@ async def api_get_matrix_auth(request): return web.json_response(matrix_auth) -@PromptServer.instance.routes.get("/manager/youml/settings") +@routes.get("/manager/youml/settings") async def api_get_youml_settings(request): youml_settings = get_youml_settings() if not youml_settings: @@ -1375,14 +1396,14 @@ async def api_get_youml_settings(request): return web.json_response(json.loads(youml_settings)) -@PromptServer.instance.routes.post("/manager/youml/settings") +@routes.post("/manager/youml/settings") async def api_set_youml_settings(request): json_data = await request.json() set_youml_settings(json.dumps(json_data)) return web.Response(status=200) -@PromptServer.instance.routes.get("/manager/get_comfyworkflows_auth") +@routes.get("/manager/get_comfyworkflows_auth") async def api_get_comfyworkflows_auth(request): # Check if the user has provided Matrix credentials in a file called 'matrix_accesstoken' # in the same directory as the ComfyUI base folder @@ -1400,7 +1421,7 @@ if hasattr(PromptServer.instance, "app"): app.middlewares.append(cors_middleware) -@PromptServer.instance.routes.post("/manager/set_esheep_workflow_and_images") +@routes.post("/manager/set_esheep_workflow_and_images") async def set_esheep_workflow_and_images(request): json_data = await request.json() current_workflow = json_data['workflow'] @@ -1410,7 +1431,7 @@ async def set_esheep_workflow_and_images(request): return web.Response(status=200) -@PromptServer.instance.routes.get("/manager/get_esheep_workflow_and_images") +@routes.get("/manager/get_esheep_workflow_and_images") async def get_esheep_workflow_and_images(request): with open(os.path.join(core.comfyui_manager_path, "esheep_share_message.json"), 'r', encoding='utf-8') as file: data = json.load(file) @@ -1481,7 +1502,7 @@ def compute_sha256_checksum(filepath): return sha256.hexdigest() -@PromptServer.instance.routes.post("/manager/share") +@routes.post("/manager/share") async def share_art(request): # get json data json_data = await request.json() @@ -1654,15 +1675,11 @@ async def share_art(request): }, content_type='application/json', status=200) -def sanitize(data): - return data.replace("<", "<").replace(">", ">") - - async def _confirm_try_install(sender, custom_node_url, msg): json_obj = await core.get_data_by_mode('default', 'custom-node-list.json') - sender = sanitize(sender) - msg = sanitize(msg) + sender = manager_util.sanitize_tag(sender) + msg = manager_util.sanitize_tag(msg) target = core.lookup_customnode_by_url(json_obj, custom_node_url) if target is not None: @@ -1684,10 +1701,10 @@ import asyncio async def default_cache_update(): async def get_cache(filename): uri = 'https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main/' + filename - cache_uri = str(core.simple_hash(uri)) + '_' + filename + cache_uri = str(manager_util.simple_hash(uri)) + '_' + filename cache_uri = os.path.join(core.cache_dir, cache_uri) - json_obj = await core.get_data(uri, True) + json_obj = await manager_util.get_data(uri, True) with core.cache_lock: with open(cache_uri, "w", encoding='utf-8') as file: @@ -1700,7 +1717,7 @@ async def default_cache_update(): d = get_cache("alter-list.json") e = get_cache("github-stats.json") - await asyncio.gather(a, b, c, d, e) + await asyncio.gather(a, b, c, d, e, core.check_need_to_migrate()) threading.Thread(target=lambda: asyncio.run(default_cache_update())).start() diff --git a/glob/manager_util.py b/glob/manager_util.py index 5dc66b53..9ec7c748 100644 --- a/glob/manager_util.py +++ b/glob/manager_util.py @@ -1,3 +1,18 @@ +import traceback + +import aiohttp +import json +import threading +import os +from datetime import datetime + + +cache_lock = threading.Lock() + +comfyui_manager_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) +cache_dir = os.path.join(comfyui_manager_path, '.cache') + + try: from distutils.version import StrictVersion except: @@ -61,3 +76,64 @@ except: def __ne__(self, other): return not self == other + +def simple_hash(input_string): + hash_value = 0 + for char in input_string: + hash_value = (hash_value * 31 + ord(char)) % (2**32) + + return hash_value + + +def is_file_created_within_one_day(file_path): + if not os.path.exists(file_path): + return False + + file_creation_time = os.path.getctime(file_path) + current_time = datetime.now().timestamp() + time_difference = current_time - file_creation_time + + return time_difference <= 86400 + + +async def get_data(uri, silent=False): + if not silent: + print(f"FETCH DATA from: {uri}", end="") + + if uri.startswith("http"): + async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=False)) as session: + async with session.get(uri) as resp: + json_text = await resp.text() + else: + with cache_lock: + with open(uri, "r", encoding="utf-8") as f: + json_text = f.read() + + json_obj = json.loads(json_text) + + if not silent: + print(f" [DONE]") + + return json_obj + + +async def get_data_with_cache(uri, silent=False, cache_mode=True): + cache_uri = str(simple_hash(uri)) + '_' + os.path.basename(uri).replace('&', "_").replace('?', "_").replace('=', "_") + cache_uri = os.path.join(cache_dir, cache_uri+'.json') + + if cache_mode and is_file_created_within_one_day(cache_uri): + json_obj = await get_data(cache_uri, silent=silent) + else: + json_obj = await get_data(uri, silent=silent) + + with cache_lock: + with open(cache_uri, "w", encoding='utf-8') as file: + json.dump(json_obj, file, indent=4, sort_keys=True) + if not silent: + print(f"[ComfyUI-Manager] default cache updated: {uri}") + + return json_obj + + +def sanitize_tag(x): + return x.replace('<', '<').replace('>', '>') diff --git a/js/comfyui-manager.js b/js/comfyui-manager.js index ee825bde..0445d8ce 100644 --- a/js/comfyui-manager.js +++ b/js/comfyui-manager.js @@ -11,7 +11,7 @@ import { showYouMLShareDialog } from "./comfyui-share-common.js"; import { OpenArtShareDialog } from "./comfyui-share-openart.js"; -import { free_models, install_pip, install_via_git_url, manager_instance, rebootAPI, setManagerInstance, show_message } from "./common.js"; +import { free_models, install_pip, install_via_git_url, manager_instance, rebootAPI, migrateAPI, setManagerInstance, show_message } from "./common.js"; import { ComponentBuilderDialog, getPureName, load_components, set_component_policy } from "./components-manager.js"; import { CustomNodesManager } from "./custom-nodes-manager.js"; import { ModelManager } from "./model-manager.js"; @@ -253,6 +253,18 @@ const style = ` color: white !important; } + +.cm-button-orange { + width: 310px; + height: 30px; + position: relative; + overflow: hidden; + font-size: 17px !important; + font-weight: bold; + background-color: orange !important; + color: black !important; +} + .cm-experimental-button { width: 290px; height: 30px; @@ -804,6 +816,28 @@ class ManagerMenuDialog extends ComfyDialog { }), ]; + let migration_btn = + $el("button.cm-button-orange", { + type: "button", + textContent: "Migrate to New Node System", + onclick: () => migrateAPI() + }); + + migration_btn.style.display = 'none'; + + res.push(migration_btn); + + api.fetchApi('/manager/need_to_migrate') + .then(response => response.text()) + .then(text => { + if (text === 'True') { + migration_btn.style.display = 'block'; + } + }) + .catch(error => { + console.error('Error checking migration status:', error); + }); + return res; } diff --git a/js/common.js b/js/common.js index 49a47d6f..f75de891 100644 --- a/js/common.js +++ b/js/common.js @@ -25,6 +25,23 @@ export function rebootAPI() { return false; } + +export async function migrateAPI() { + if (confirm("When performing a migration, existing installed custom nodes will be renamed and the server will be restarted. Are you sure you want to apply this?\n\n(If you don't perform the migration, ComfyUI-Manager's start-up time will be longer each time due to re-checking during startup.)")) { + try { + await api.fetchApi("/manager/migrate_unmanaged_nodes"); + api.fetchApi("/manager/reboot"); + } + catch(exception) { + + } + return true; + } + + return false; +} + + export var manager_instance = null; export function setManagerInstance(obj) { diff --git a/js/custom-nodes-manager.js b/js/custom-nodes-manager.js index eccdd683..4f64f721 100644 --- a/js/custom-nodes-manager.js +++ b/js/custom-nodes-manager.js @@ -1,7 +1,9 @@ import { app } from "../../scripts/app.js"; -import { $el } from "../../scripts/ui.js"; -import { - manager_instance, rebootAPI, install_via_git_url, +import { ComfyDialog, $el } from "../../scripts/ui.js"; +import { api } from "../../scripts/api.js"; + +import { + manager_instance, rebootAPI, install_via_git_url, fetchData, md5, icons } from "./common.js"; @@ -28,11 +30,11 @@ const pageCss = ` .cn-manager button { font-size: 16px; color: var(--input-text); - background-color: var(--comfy-input-bg); - border-radius: 8px; - border-color: var(--border-color); - border-style: solid; - margin: 0; + background-color: var(--comfy-input-bg); + border-radius: 8px; + border-color: var(--border-color); + border-style: solid; + margin: 0; padding: 4px 8px; min-width: 100px; } @@ -124,7 +126,7 @@ const pageCss = ` .cn-manager-grid .cn-node-desc a { color: #5555FF; - font-weight: bold; + font-weight: bold; text-decoration: none; } @@ -191,7 +193,7 @@ const pageCss = ` .cn-tag-list > div { background-color: var(--border-color); border-radius: 5px; - padding: 0 5px; + padding: 0 5px; } .cn-install-buttons { @@ -200,8 +202,8 @@ const pageCss = ` gap: 3px; padding: 3px; align-items: center; - justify-content: center; - height: 100%; + justify-content: center; + height: 100%; } .cn-selected-buttons { @@ -212,17 +214,17 @@ const pageCss = ` } .cn-manager .cn-btn-enable { - background-color: blue; + background-color: #333399; color: white; } .cn-manager .cn-btn-disable { - background-color: MediumSlateBlue; + background-color: #442277; color: white; } .cn-manager .cn-btn-update { - background-color: blue; + background-color: #1155AA; color: white; } @@ -247,41 +249,47 @@ const pageCss = ` } .cn-manager .cn-btn-uninstall { - background-color: red; + background-color: #993333; color: white; } +.cn-manager .cn-btn-switch { + background-color: #448833; + color: white; + +} + @keyframes cn-btn-loading-bg { - 0% { - left: 0; - } - 100% { - left: -105px; - } + 0% { + left: 0; + } + 100% { + left: -105px; + } } .cn-manager button.cn-btn-loading { - position: relative; - overflow: hidden; - border-color: rgb(0 119 207 / 80%); + position: relative; + overflow: hidden; + border-color: rgb(0 119 207 / 80%); background-color: var(--comfy-input-bg); } .cn-manager button.cn-btn-loading::after { - position: absolute; - top: 0; - left: 0; - content: ""; - width: 500px; - height: 100%; - background-image: repeating-linear-gradient( - -45deg, - rgb(0 119 207 / 30%), - rgb(0 119 207 / 30%) 10px, - transparent 10px, - transparent 15px - ); - animation: cn-btn-loading-bg 2s linear infinite; + position: absolute; + top: 0; + left: 0; + content: ""; + width: 500px; + height: 100%; + background-image: repeating-linear-gradient( + -45deg, + rgb(0 119 207 / 30%), + rgb(0 119 207 / 30%) 10px, + transparent 10px, + transparent 15px + ); + animation: cn-btn-loading-bg 2s linear infinite; } .cn-manager-light .cn-node-name a { @@ -356,7 +364,6 @@ export class CustomNodesManager { } init() { - if (!document.querySelector(`style[context="${this.id}"]`)) { const $style = document.createElement("style"); $style.setAttribute("context", this.id); @@ -374,6 +381,130 @@ export class CustomNodesManager { this.initGrid(); } + showVersionSelectorDialog(versions, onSelect) { + const dialog = new ComfyDialog(); + dialog.element.style.zIndex = 100003; + dialog.element.style.width = "300px"; + dialog.element.style.padding = "0"; + dialog.element.style.backgroundColor = "#2a2a2a"; + dialog.element.style.border = "1px solid #3a3a3a"; + dialog.element.style.borderRadius = "8px"; + dialog.element.style.boxSizing = "border-box"; + dialog.element.style.overflow = "hidden"; + + const contentStyle = { + width: "300px", + display: "flex", + flexDirection: "column", + alignItems: "center", + padding: "20px", + boxSizing: "border-box", + gap: "15px" + }; + + let selectedVersion = versions[0]; + + const versionList = $el("select", { + multiple: true, + size: Math.min(10, versions.length), + style: { + width: "260px", + height: "auto", + backgroundColor: "#383838", + color: "#ffffff", + border: "1px solid #4a4a4a", + borderRadius: "4px", + padding: "5px", + boxSizing: "border-box" + } + }, + versions.map((v, index) => $el("option", { + value: v, + textContent: v, + selected: index === 0 + })) + ); + + versionList.addEventListener('change', (e) => { + selectedVersion = e.target.value; + Array.from(e.target.options).forEach(opt => { + opt.selected = opt.value === selectedVersion; + }); + }); + + const content = $el("div", { + style: contentStyle + }, [ + $el("h3", { + textContent: "Select Version", + style: { + color: "#ffffff", + backgroundColor: "#1a1a1a", + padding: "10px 15px", + margin: "0 0 10px 0", + width: "260px", + textAlign: "center", + borderRadius: "4px", + boxSizing: "border-box", + whiteSpace: "nowrap", + overflow: "hidden", + textOverflow: "ellipsis" + } + }), + versionList, + $el("div", { + style: { + display: "flex", + justifyContent: "space-between", + width: "260px", + gap: "10px" + } + }, [ + $el("button", { + textContent: "Cancel", + onclick: () => dialog.close(), + style: { + flex: "1", + padding: "8px", + backgroundColor: "#4a4a4a", + color: "#ffffff", + border: "none", + borderRadius: "4px", + cursor: "pointer", + whiteSpace: "nowrap", + overflow: "hidden", + textOverflow: "ellipsis" + } + }), + $el("button", { + textContent: "Select", + onclick: () => { + if (selectedVersion) { + onSelect(selectedVersion); + dialog.close(); + } else { + alert("Please select a version."); + } + }, + style: { + flex: "1", + padding: "8px", + backgroundColor: "#4CAF50", + color: "#ffffff", + border: "none", + borderRadius: "4px", + cursor: "pointer", + whiteSpace: "nowrap", + overflow: "hidden", + textOverflow: "ellipsis" + } + }), + ]) + ]); + + dialog.show(content); + } + initFilter() { const $filter = this.element.querySelector(".cn-manager-filter"); const filterList = [{ @@ -382,23 +513,31 @@ export class CustomNodesManager { hasData: true }, { label: "Installed", - value: "True", + value: "installed", + hasData: true + }, { + label: "Enabled", + value: "enabled", hasData: true }, { label: "Disabled", - value: "Disabled", + value: "disabled", hasData: true }, { label: "Import Failed", - value: "Fail", + value: "import-fail", hasData: true }, { label: "Not Installed", - value: "False", + value: "not-installed", hasData: true }, { - label: "Unknown", - value: "None", + label: "ComfyRegistry", + value: "cnr", + hasData: true + }, { + label: "Non-ComfyRegistry", + value: "unknown", hasData: true }, { label: "Update", @@ -423,16 +562,15 @@ export class CustomNodesManager { return this.filterList.find(it => it.value === filter) } - getInstallButtons(installed, title) { - + getActionButtons(action, rowItem, is_selected_button) { const buttons = { "enable": { label: "Enable", - mode: "toggle_active" + mode: "enable" }, "disable": { label: "Disable", - mode: "toggle_active" + mode: "disable" }, "update": { @@ -460,34 +598,47 @@ export class CustomNodesManager { "uninstall": { label: "Uninstall", mode: "uninstall" + }, + "switch": { + label: "Switch", + mode: "switch" } } const installGroups = { - "Disabled": ["enable", "uninstall"], - "Update": ["update", "disable", "uninstall"], - "Fail": ["try-fix", "uninstall"], - "True": ["try-update", "disable", "uninstall"], - "False": ["install"], - 'None': ["try-install"] + "disabled": ["enable", "switch", "uninstall"], + "updatable": ["update", "switch", "disable", "uninstall"], + "import-fail": ["try-fix", "switch", "disable", "uninstall"], + "enabled": ["try-update", "switch", "disable", "uninstall"], + "not-installed": ["install"], + 'unknown': ["try-install"] } if (!manager_instance.update_check_checkbox.checked) { - installGroups.True = installGroups.True.filter(it => it !== "try-update"); + installGroups.enabled = installGroups.enabled.filter(it => it !== "try-update"); } - if (title === "ComfyUI-Manager") { - installGroups.True = installGroups.True.filter(it => it !== "disable"); + if (rowItem?.title === "ComfyUI-Manager") { + installGroups.enabled = installGroups.enabled.filter(it => it !== "disable"); + } + + if (rowItem?.version === "unknown") { + installGroups.enabled = installGroups.enabled.filter(it => it !== "switch"); + } + + let list = installGroups[action]; + + if(is_selected_button) { + list = list.filter(it => it !== "switch"); } - const list = installGroups[installed]; if (!list) { return ""; } return list.map(id => { const bt = buttons[id]; - return ``; + return ``; }).join(""); } @@ -621,18 +772,27 @@ export class CustomNodesManager { this.showStatus(`${prevViewRowsLength.toLocaleString()} custom nodes`); } - }); + }); - grid.bind('onSelectChanged', (e, changes) => { - this.renderSelected(); - }); + grid.bind('onSelectChanged', (e, changes) => { + this.renderSelected(); + }); grid.bind('onClick', (e, d) => { const btn = this.getButton(d.e.target); if (btn) { - this.installNodes([d.rowItem.hash], btn, d.rowItem.title); + const item = this.grid.getRowItemBy("hash", d.rowItem.hash); + + const { target, label, mode} = btn; + if((mode === "install" || mode === "switch" || mode == "enable") && item.originalData.version != 'unknown') { + // install after select version via dialog if item is cnr node + this.installNodeWithVersion(d.rowItem, btn, mode == 'enable'); + } + else { + this.installNodes([d.rowItem.hash], btn, d.rowItem.title); + } } - }); + }); grid.setOption({ theme: 'dark', @@ -651,7 +811,7 @@ export class CustomNodesManager { bindContainerResize: true, cellResizeObserver: (rowItem, columnItem) => { - const autoHeightColumns = ['title', 'installed', 'description', "alternatives"]; + const autoHeightColumns = ['title', 'action', 'description', "alternatives"]; return autoHeightColumns.includes(columnItem.id) }, @@ -696,11 +856,11 @@ export class CustomNodesManager { theme: colorPalette === "light" ? "" : "dark" }; - const rows = this.custom_nodes || []; - rows.forEach((item, i) => { - item.id = i + 1; - const nodeKey = item.files[0]; + const rows = this.custom_nodes || {}; + for(let nodeKey in rows) { + let item = rows[nodeKey]; const extensionInfo = this.extension_mappings[nodeKey]; + if(extensionInfo) { const { extensions, conflicts } = extensionInfo; if (extensions.length) { @@ -712,7 +872,7 @@ export class CustomNodesManager { item.conflictsList = conflicts; } } - }); + } const columns = [{ id: 'id', @@ -727,22 +887,47 @@ export class CustomNodesManager { maxWidth: 500, classMap: 'cn-node-name', formatter: (title, rowItem, columnItem) => { - return `${rowItem.installed === 'Fail' ? '(IMPORT FAILED)' : ''} + return `${rowItem.action === 'import-fail' ? '(IMPORT FAILED)' : ''} ${title}`; } }, { - id: 'installed', - name: 'Install', + id: 'version', + name: 'Version', + width: 200, + minWidth: 100, + maxWidth: 500, + classMap: 'cn-node-desc', + formatter: (version, rowItem, columnItem) => { + if(version == undefined) { + return `undef`; + } + else { + if(rowItem.cnr_latest && version != rowItem.cnr_latest) { + if(version == 'nightly') { + return `${version} [${rowItem.cnr_latest}]`; + } + else { + return `${version} [↑${rowItem.cnr_latest}]`; + } + } + else { + return `${version}`; + } + } + } + }, { + id: 'action', + name: 'Action', width: 130, minWidth: 110, maxWidth: 200, sortable: false, align: 'center', - formatter: (installed, rowItem, columnItem) => { + formatter: (action, rowItem, columnItem) => { if (rowItem.restart) { return `Restart Required`; } - const buttons = this.getInstallButtons(installed, rowItem.title); + const buttons = this.getActionButtons(action, rowItem); return `
${buttons}
`; } }, { @@ -845,14 +1030,35 @@ export class CustomNodesManager { } }]; + let rows_values = Object.keys(rows).map(key => rows[key]); + + rows_values = + rows_values.sort((a, b) => { + if (a.version == 'unknown' && b.version != 'unknown') return 1; + if (a.version != 'unknown' && b.version == 'unknown') return -1; + + if (a.stars !== b.stars) { + return b.stars - a.stars; + } + + if (a.last_update !== b.last_update) { + return new Date(b.last_update) - new Date(a.last_update); + } + + return 0; + }); + this.grid.setData({ - options, - rows, - columns + options: options, + rows: rows_values, + columns: columns }); + for(let i=0; i { - let type = item.installed; + let type = item.action; if (item.restart) { type = "Restart Required"; } @@ -895,7 +1101,7 @@ export class CustomNodesManager { const filterItem = this.getFilterItem(v); list.push(`
Selected ${selectedMap[v].length} ${filterItem ? filterItem.label : v} - ${this.grid.hasMask ? "" : this.getInstallButtons(v)} + ${this.grid.hasMask ? "" : this.getActionButtons(v, null, true)}
`); }); @@ -913,8 +1119,67 @@ export class CustomNodesManager { } } - async installNodes(list, btn, title) { - + async installNodeWithVersion(rowItem, btn, is_enable) { + let hash = rowItem.hash; + let title = rowItem.title; + + const item = this.grid.getRowItemBy("hash", hash); + + let node_id = item.originalData.id; + + this.showLoading(); + let res; + if(is_enable) { + res = await api.fetchApi(`/customnode/disabled_versions/${node_id}`, { cache: "no-store" }); + } + else { + res = await api.fetchApi(`/customnode/versions/${node_id}`, { cache: "no-store" }); + } + this.hideLoading(); + + if(res.status == 200) { + let obj = await res.json(); + + let versions = []; + let default_version; + let version_cnt = 0; + + if(!is_enable) { + if(rowItem.cnr_latest != rowItem.originalData.active_version) { + versions.push('latest'); + } + + if(rowItem.originalData.active_version != 'nightly') { + versions.push('nightly'); + default_version = 'nightly'; + version_cnt++; + } + } + + for(let v of obj) { + if(rowItem.originalData.active_version != v.version) { + default_version = v.version; + versions.push(v.version); + version_cnt++; + } + } + + if(version_cnt == 1) { + // if only one version is available + this.installNodes([hash], btn, title, default_version); + } + else { + this.showVersionSelectorDialog(versions, (selected_version) => { + this.installNodes([hash], btn, title, selected_version); + }); + } + } + else { + show_message('Failed to fetch versions from ComfyRegistry.'); + } + } + + async installNodes(list, btn, title, selected_version) { const { target, label, mode} = btn; if(mode === "uninstall") { @@ -925,13 +1190,11 @@ export class CustomNodesManager { } target.classList.add("cn-btn-loading"); - this.showLoading(); this.showError(""); let needRestart = false; let errorMsg = ""; for (const hash of list) { - const item = this.grid.getRowItemBy("hash", hash); if (!item) { errorMsg = `Not found custom node: ${hash}`; @@ -949,9 +1212,24 @@ export class CustomNodesManager { this.showStatus(`${label} ${item.title} ...`); const data = item.originalData; - const res = await fetchData(`/customnode/${mode}`, { + data.selected_version = selected_version; + data.channel = this.channel; + data.mode = this.mode; + + let install_mode = mode; + if(mode == 'switch') { + install_mode = 'install'; + } + + // don't post install if install_mode == 'enable' + data.skip_post_install = install_mode == 'enable'; + let api_mode = install_mode; + if(install_mode == 'enable') { + api_mode = 'install'; + } + + const res = await api.fetchApi(`/customnode/${api_mode}`, { method: 'POST', - headers: { 'Content-Type': 'application/json' }, body: JSON.stringify(data) }); @@ -974,13 +1252,12 @@ export class CustomNodesManager { this.grid.setRowSelected(item, false); item.restart = true; this.restartMap[item.hash] = true; - this.grid.updateCell(item, "installed"); + this.grid.updateCell(item, "action"); //console.log(res.data); } - this.hideLoading(); target.classList.remove("cn-btn-loading"); if (errorMsg) { @@ -1064,26 +1341,28 @@ export class CustomNodesManager { const mappings = res.data; // build regex->url map - const regex_to_url = []; - this.custom_nodes.forEach(node => { + const regex_to_pack = []; + for(let k in this.custom_nodes) { + let node = this.custom_nodes[k]; + if(node.nodename_pattern) { - regex_to_url.push({ - regex: new RegExp(node.nodename_pattern), + regex_to_pack.push({ + regex: new RegExp(node.nodename_pattern), url: node.files[0] }); } - }); + } // build name->url map - const name_to_urls = {}; + const name_to_packs = {}; for (const url in mappings) { const names = mappings[url]; for(const name in names[0]) { - let v = name_to_urls[names[0][name]]; + let v = name_to_packs[names[0][name]]; if(v == undefined) { v = []; - name_to_urls[names[0][name]] = v; + name_to_packs[names[0][name]] = v; } v.push(url); } @@ -1110,15 +1389,15 @@ export class CustomNodesManager { continue; if (!registered_nodes.has(node_type)) { - const urls = name_to_urls[node_type.trim()]; - if(urls) - urls.forEach(url => { + const packs = name_to_packs[node_type.trim()]; + if(packs) + packs.forEach(url => { missing_nodes.add(url); }); else { - for(let j in regex_to_url) { - if(regex_to_url[j].regex.test(node_type)) { - missing_nodes.add(regex_to_url[j].url); + for(let j in regex_to_pack) { + if(regex_to_pack[j].regex.test(node_type)) { + missing_nodes.add(regex_to_pack[j].url); } } } @@ -1129,19 +1408,27 @@ export class CustomNodesManager { const unresolved = resUnresolved.data; if (unresolved && unresolved.nodes) { unresolved.nodes.forEach(node_type => { - const url = name_to_urls[node_type]; - if(url) { - missing_nodes.add(url); + const packs = name_to_packs[node_type]; + if(packs) { + packs.forEach(url => { + missing_nodes.add(url); + }); } }); } const hashMap = {}; - this.custom_nodes.forEach(item => { - if (item.files.some(file => missing_nodes.has(file))) { + for(let k in this.custom_nodes) { + let item = this.custom_nodes[k]; + + if(missing_nodes.has(item.id)) { hashMap[item.hash] = true; } - }); + else if (item.files?.some(file => missing_nodes.has(file))) { + hashMap[item.hash] = true; + } + } + return hashMap; } @@ -1156,27 +1443,28 @@ export class CustomNodesManager { } const hashMap = {}; - const { items } = res.data; + const items = res.data; - items.forEach(item => { + for(let i in items) { + let item = items[i]; + let custom_node = this.custom_nodes[i]; - const custom_node = this.custom_nodes.find(node => node.files.find(file => file === item.id)); if (!custom_node) { console.log(`Not found custom node: ${item.id}`); - return; + continue; } const tags = `${item.tags}`.split(",").map(tag => { return `
${tag.trim()}
`; - }).join("") + }).join(""); hashMap[custom_node.hash] = { alternatives: `
${tags}
${item.description}` } - }); + } - return hashMap + return hashMap; } async loadData(show_mode = ShowMode.NORMAL) { @@ -1198,18 +1486,19 @@ export class CustomNodesManager { return } - const { channel, custom_nodes} = res.data; + const { channel, node_packs } = res.data; this.channel = channel; - this.custom_nodes = custom_nodes; + this.mode = mode; + this.custom_nodes = node_packs; if(this.channel !== 'default') { this.element.querySelector(".cn-manager-channel").innerHTML = `Channel: ${this.channel} (Incomplete list)`; } - for (const item of custom_nodes) { + for (const k in node_packs) { + let item = node_packs[k]; item.originalData = JSON.parse(JSON.stringify(item)); - const message = item.title + item.files[0]; - item.hash = md5(message); + item.hash = md5(k); } const filterItem = this.getFilterItem(this.show_mode); @@ -1217,11 +1506,12 @@ export class CustomNodesManager { let hashMap; if(this.show_mode == ShowMode.UPDATE) { hashMap = {}; - custom_nodes.forEach(it => { - if (it.installed === "Update") { + for (const k in node_packs) { + let it = node_packs[k]; + if (it['update-state'] === "true") { hashMap[it.hash] = true; } - }); + } } else if(this.show_mode == ShowMode.MISSING) { hashMap = await this.getMissingNodes(); } else if(this.show_mode == ShowMode.ALTERNATIVES) { @@ -1231,10 +1521,23 @@ export class CustomNodesManager { filterItem.hasData = true; } - custom_nodes.forEach(nodeItem => { + for(let k in node_packs) { + let nodeItem = node_packs[k]; + if (this.restartMap[nodeItem.hash]) { nodeItem.restart = true; } + + if(nodeItem['update-state'] == "true") { + nodeItem.action = 'updatable'; + } + else if(nodeItem['import-fail']) { + nodeItem.action = 'import-fail'; + } + else { + nodeItem.action = nodeItem.state; + } + const filterTypes = new Set(); this.filterList.forEach(filterItem => { const { value, hashMap } = filterItem; @@ -1243,29 +1546,51 @@ export class CustomNodesManager { if (hashData) { filterTypes.add(value); if (value === ShowMode.UPDATE) { - nodeItem.installed = "Update"; + nodeItem['update-state'] = "true"; + } + if (value === ShowMode.MISSING) { + nodeItem['missing-node'] = "true"; } if (typeof hashData === "object") { Object.assign(nodeItem, hashData); } } } else { - if (nodeItem.installed === value) { + if (nodeItem.state === value) { filterTypes.add(value); } - const map = { - "Update": "True", - "Disabled": "True", - "Fail": "True", - "None": "False" + + switch(nodeItem.state) { + case "enabled": + filterTypes.add("enabled"); + case "disabled": + filterTypes.add("installed"); + break; + + case "not-installed": + filterTypes.add("not-installed"); + break; } - if (map[nodeItem.installed]) { - filterTypes.add(map[nodeItem.installed]); + + if(nodeItem.version != 'unknown') { + filterTypes.add("cnr"); + } + else { + filterTypes.add("unknown"); + } + + if(nodeItem['update-state'] == 'true') { + filterTypes.add("updatable"); + } + + if(nodeItem['import-fail']) { + filterTypes.add("import-fail"); } } }); + nodeItem.filterTypes = Array.from(filterTypes); - }); + } this.renderGrid(); diff --git a/prestartup_script.py b/prestartup_script.py index 163a9880..00e1ce94 100644 --- a/prestartup_script.py +++ b/prestartup_script.py @@ -1,4 +1,3 @@ -import datetime import os import subprocess import sys @@ -70,11 +69,12 @@ cm_global.register_api('cm.register_message_collapse', register_message_collapse cm_global.register_api('cm.is_import_failed_extension', is_import_failed_extension) -comfyui_manager_path = os.path.dirname(__file__) +comfyui_manager_path = os.path.abspath(os.path.dirname(__file__)) custom_nodes_path = os.path.abspath(os.path.join(comfyui_manager_path, "..")) startup_script_path = os.path.join(comfyui_manager_path, "startup-scripts") restore_snapshot_path = os.path.join(startup_script_path, "restore-snapshot.json") git_script_path = os.path.join(comfyui_manager_path, "git_helper.py") +cm_cli_path = os.path.join(comfyui_manager_path, "cm-cli.py") pip_overrides_path = os.path.join(comfyui_manager_path, "pip_overrides.json") @@ -200,7 +200,7 @@ try: write_stderr = wrapper_stderr pat_tqdm = r'\d+%.*\[(.*?)\]' - pat_import_fail = r'seconds \(IMPORT FAILED\):.*[/\\]custom_nodes[/\\](.*)$' + pat_import_fail = r'seconds \(IMPORT FAILED\):(.*)$' is_start_mode = True @@ -233,7 +233,7 @@ try: if is_start_mode: match = re.search(pat_import_fail, message) if match: - import_failed_extensions.add(match.group(1)) + import_failed_extensions.add(match.group(1).strip()) if 'Starting server' in message: is_start_mode = False @@ -255,7 +255,7 @@ try: def sync_write(self, message, file_only=False): with log_lock: - timestamp = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')[:-3] + timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')[:-3] if self.last_char != '\n': log_file.write(message) else: @@ -321,7 +321,7 @@ try: if is_start_mode: match = re.search(pat_import_fail, message) if match: - import_failed_extensions.add(match.group(1)) + import_failed_extensions.add(match.group(1).strip()) if 'Starting server' in message: is_start_mode = False @@ -361,7 +361,7 @@ except: print(f"## [ERROR] ComfyUI-Manager: GitPython package seems to be installed, but failed to load somehow. Make sure you have a working git client installed") -print("** ComfyUI startup time:", datetime.datetime.now()) +print("** ComfyUI startup time:", datetime.now()) print("** Platform:", platform.system()) print("** Python version:", sys.version) print("** Python executable:", sys.executable) @@ -507,49 +507,12 @@ if os.path.exists(restore_snapshot_path): print(prefix, msg, end="") print(f"[ComfyUI-Manager] Restore snapshot.") - cmd_str = [sys.executable, git_script_path, '--apply-snapshot', restore_snapshot_path] - new_env = os.environ.copy() new_env["COMFYUI_PATH"] = comfy_path + + cmd_str = [sys.executable, cm_cli_path, 'restore-snapshot', restore_snapshot_path] exit_code = process_wrap(cmd_str, custom_nodes_path, handler=msg_capture, env=new_env) - repository_name = '' - for url in cloned_repos: - try: - repository_name = url.split("/")[-1].strip() - repo_path = os.path.join(custom_nodes_path, repository_name) - repo_path = os.path.abspath(repo_path) - - requirements_path = os.path.join(repo_path, 'requirements.txt') - install_script_path = os.path.join(repo_path, 'install.py') - - this_exit_code = 0 - - if os.path.exists(requirements_path): - with open(requirements_path, 'r', encoding="UTF-8", errors="ignore") as file: - for line in file: - package_name = remap_pip_package(line.strip()) - if package_name and not is_installed(package_name): - if not package_name.startswith('#'): - install_cmd = [sys.executable, "-m", "pip", "install", package_name] - this_exit_code += process_wrap(install_cmd, repo_path) - - if os.path.exists(install_script_path) and f'{repo_path}/install.py' not in processed_install: - processed_install.add(f'{repo_path}/install.py') - install_cmd = [sys.executable, install_script_path] - print(f">>> {install_cmd} / {repo_path}") - - new_env = os.environ.copy() - new_env["COMFYUI_PATH"] = comfy_path - this_exit_code += process_wrap(install_cmd, repo_path, env=new_env) - - if this_exit_code != 0: - print(f"[ComfyUI-Manager] Restoring '{repository_name}' is failed.") - - except Exception as e: - print(e) - print(f"[ComfyUI-Manager] Restoring '{repository_name}' is failed.") - if exit_code != 0: print(f"[ComfyUI-Manager] Restore snapshot failed.") else: