diff --git a/__init__.py b/__init__.py index 728a4a0e..31d30828 100644 --- a/__init__.py +++ b/__init__.py @@ -1,16 +1,7 @@ -import os +from .modules import manager_ext_server +from .modules import share_3rdparty -cli_mode_flag = os.path.join(os.path.dirname(__file__), '.enable-cli-only-mode') - -if not os.path.exists(cli_mode_flag): - from .glob import manager_server - from .glob import share_3rdparty - WEB_DIRECTORY = "js" -else: - print(f"\n[ComfyUI-Manager] !! cli-only-mode is enabled !!\n") +WEB_DIRECTORY = "js" NODE_CLASS_MAPPINGS = {} __all__ = ['NODE_CLASS_MAPPINGS'] - - - diff --git a/cm-cli.py b/cm-cli.py deleted file mode 100644 index e20bb71e..00000000 --- a/cm-cli.py +++ /dev/null @@ -1,1078 +0,0 @@ -import os -import sys -import traceback -import json -import asyncio -import subprocess -import concurrent -import threading -import yaml -from typing import Optional - -import typer -from rich import print -from typing_extensions import List, Annotated -import re -import git - -sys.path.append(os.path.dirname(__file__)) -sys.path.append(os.path.join(os.path.dirname(__file__), "glob")) -import cm_global -import manager_core as core -from manager_core import unified_manager -import cnr_utils - -comfyui_manager_path = os.path.abspath(os.path.dirname(__file__)) -comfy_path = os.environ.get('COMFYUI_PATH') - -if comfy_path is None: - print(f"\n[bold yellow]WARN: The `COMFYUI_PATH` environment variable is not set. Assuming `custom_nodes/ComfyUI-Manager/../../` as the ComfyUI path.[/bold yellow]", file=sys.stderr) - comfy_path = os.path.abspath(os.path.join(comfyui_manager_path, '..', '..')) - -startup_script_path = os.path.join(comfyui_manager_path, "startup-scripts") -custom_nodes_path = os.path.join(comfy_path, 'custom_nodes') - -script_path = os.path.join(startup_script_path, "install-scripts.txt") -restore_snapshot_path = os.path.join(startup_script_path, "restore-snapshot.json") -pip_overrides_path = os.path.join(comfyui_manager_path, "pip_overrides.json") -git_script_path = os.path.join(comfyui_manager_path, "git_helper.py") - -cm_global.pip_blacklist = ['torch', 'torchsde', 'torchvision'] -cm_global.pip_downgrade_blacklist = ['torch', 'torchsde', 'torchvision', 'transformers', 'safetensors', 'kornia'] -cm_global.pip_overrides = {} -if os.path.exists(pip_overrides_path): - with open(pip_overrides_path, 'r', encoding="UTF-8", errors="ignore") as json_file: - cm_global.pip_overrides = json.load(json_file) - cm_global.pip_overrides['numpy'] = 'numpy<2' - - -def check_comfyui_hash(): - repo = git.Repo(comfy_path) - core.comfy_ui_revision = len(list(repo.iter_commits('HEAD'))) - - comfy_ui_hash = repo.head.commit.hexsha - cm_global.variables['comfyui.revision'] = core.comfy_ui_revision - - core.comfy_ui_commit_datetime = repo.head.commit.committed_datetime - - -check_comfyui_hash() # This is a preparation step for manager_core -core.check_invalid_nodes() - - -def read_downgrade_blacklist(): - try: - import configparser - config_path = os.path.join(os.path.dirname(__file__), "config.ini") - config = configparser.ConfigParser() - config.read(config_path) - default_conf = config['default'] - - if 'downgrade_blacklist' in default_conf: - items = default_conf['downgrade_blacklist'].split(',') - items = [x.strip() for x in items if x != ''] - cm_global.pip_downgrade_blacklist += items - cm_global.pip_downgrade_blacklist = list(set(cm_global.pip_downgrade_blacklist)) - except: - pass - - -read_downgrade_blacklist() # This is a preparation step for manager_core - - -class Ctx: - def __init__(self): - self.channel = 'default' - self.no_deps = False - self.mode = 'cache' - - def set_channel_mode(self, channel, mode): - if mode is not None: - self.mode = mode - - valid_modes = ["remote", "local", "cache"] - if mode and mode.lower() not in valid_modes: - typer.echo( - f"Invalid mode: {mode}. Allowed modes are 'remote', 'local', 'cache'.", - err=True, - ) - exit(1) - - if channel is not None: - self.channel = channel - - asyncio.run(unified_manager.reload(cache_mode=self.mode == 'cache')) - asyncio.run(unified_manager.load_nightly(self.channel, self.mode)) - - def set_no_deps(self, no_deps): - self.no_deps = no_deps - - -channel_ctx = Ctx() - - -def install_node(node_spec_str, is_all=False, cnt_msg=''): - if core.is_valid_url(node_spec_str): - # install via urls - res = asyncio.run(core.gitclone_install(node_spec_str, no_deps=channel_ctx.no_deps)) - if not res.result: - print(res.msg) - print(f"[bold red]ERROR: An error occurred while installing '{node_spec_str}'.[/bold red]") - else: - print(f"{cnt_msg} [INSTALLED] {node_spec_str:50}") - else: - node_spec = unified_manager.resolve_node_spec(node_spec_str) - - if node_spec is None: - return - - node_name, version_spec, is_specified = node_spec - - # NOTE: install node doesn't allow update if version is not specified - if not is_specified: - version_spec = None - - res = asyncio.run(unified_manager.install_by_id(node_name, version_spec, channel_ctx.channel, channel_ctx.mode, instant_execution=True, no_deps=channel_ctx.no_deps)) - - if res.action == 'skip': - print(f"{cnt_msg} [ SKIP ] {node_name:50} => Already installed") - elif res.action == 'enable': - print(f"{cnt_msg} [ ENABLED ] {node_name:50}") - elif res.action == 'install-git' and res.target == 'nightly': - print(f"{cnt_msg} [INSTALLED] {node_name:50}[NIGHTLY]") - elif res.action == 'install-git' and res.target == 'unknown': - print(f"{cnt_msg} [INSTALLED] {node_name:50}[UNKNOWN]") - elif res.action == 'install-cnr' and res.result: - print(f"{cnt_msg} [INSTALLED] {node_name:50}[{res.target}]") - elif res.action == 'switch-cnr' and res.result: - print(f"{cnt_msg} [INSTALLED] {node_name:50}[{res.target}]") - elif (res.action == 'switch-cnr' or res.action == 'install-cnr') and not res.result and node_name in unified_manager.cnr_map: - print(f"\nAvailable version of '{node_name}'") - show_versions(node_name) - print("") - else: - print(f"[bold red]ERROR: An error occurred while installing '{node_name}'.\n{res.msg}[/bold red]") - - -def reinstall_node(node_spec_str, is_all=False, cnt_msg=''): - node_spec = unified_manager.resolve_node_spec(node_spec_str) - - node_name, version_spec, _ = node_spec - - unified_manager.unified_uninstall(node_name, version_spec == 'unknown') - install_node(node_name, is_all=is_all, cnt_msg=cnt_msg) - - -def fix_node(node_spec_str, is_all=False, cnt_msg=''): - node_spec = unified_manager.resolve_node_spec(node_spec_str, guess_mode='active') - - if node_spec is None: - if not is_all: - if unified_manager.resolve_node_spec(node_spec_str, guess_mode='inactive') is not None: - print(f"{cnt_msg} [ SKIPPED ]: {node_spec_str:50} => Disabled") - else: - print(f"{cnt_msg} [ SKIPPED ]: {node_spec_str:50} => Not installed") - - return - - node_name, version_spec, _ = node_spec - - print(f"{cnt_msg} [ FIXING ]: {node_name:50}[{version_spec}]") - res = unified_manager.unified_fix(node_name, version_spec, no_deps=channel_ctx.no_deps) - - if not res.result: - print(f"ERROR: f{res.msg}") - - -def uninstall_node(node_spec_str, is_all=False, cnt_msg=''): - spec = node_spec_str.split('@') - if len(spec) == 2 and spec[1] == 'unknown': - node_name = spec[0] - is_unknown = True - else: - node_name = spec[0] - is_unknown = False - - res = unified_manager.unified_uninstall(node_name, is_unknown) - if len(spec) == 1 and res.action == 'skip' and not is_unknown: - res = unified_manager.unified_uninstall(node_name, True) - - if res.action == 'skip': - print(f"{cnt_msg} [ SKIPPED ]: {node_name:50} => Not installed") - - elif res.result: - print(f"{cnt_msg} [UNINSTALLED] {node_name:50}") - else: - print(f"ERROR: An error occurred while uninstalling '{node_name}'.") - - -def update_node(node_spec_str, is_all=False, cnt_msg=''): - node_spec = unified_manager.resolve_node_spec(node_spec_str, 'active') - - if node_spec is None: - if unified_manager.resolve_node_spec(node_spec_str, 'inactive'): - print(f"{cnt_msg} [ SKIPPED ]: {node_spec_str:50} => Disabled") - else: - print(f"{cnt_msg} [ SKIPPED ]: {node_spec_str:50} => Not installed") - return None - - node_name, version_spec, _ = node_spec - - res = unified_manager.unified_update(node_name, version_spec, no_deps=channel_ctx.no_deps, return_postinstall=True) - - if not res.result: - print(f"ERROR: An error occurred while updating '{node_name}'.") - elif res.action == 'skip': - print(f"{cnt_msg} [ SKIPPED ]: {node_name:50} => {res.msg}") - else: - print(f"{cnt_msg} [ UPDATED ]: {node_name:50} => ({version_spec} -> {res.target})") - - return res.with_target(f'{node_name}@{res.target}') - - -def update_parallel(nodes): - is_all = False - if 'all' in nodes: - is_all = True - nodes = [] - for x in unified_manager.active_nodes.keys(): - nodes.append(x) - for x in unified_manager.unknown_active_nodes.keys(): - nodes.append(x+"@unknown") - else: - nodes = [x for x in nodes if x.lower() not in ['comfy', 'comfyui']] - - total = len(nodes) - - lock = threading.Lock() - processed = [] - - i = 0 - - def process_custom_node(x): - nonlocal i - nonlocal processed - - with lock: - i += 1 - - try: - res = update_node(x, is_all=is_all, cnt_msg=f'{i}/{total}') - with lock: - processed.append(res) - except Exception as e: - print(f"ERROR: {e}") - traceback.print_exc() - - with concurrent.futures.ThreadPoolExecutor(4) as executor: - for item in nodes: - executor.submit(process_custom_node, item) - - i = 1 - for res in processed: - if res is not None: - print(f"[{i}/{total}] Post update: {res.target}") - if res.postinstall is not None: - res.postinstall() - i += 1 - - -def update_comfyui(): - res = core.update_path(comfy_path, instant_execution=True) - if res == 'fail': - print("Updating ComfyUI has failed.") - elif res == 'updated': - print("ComfyUI is updated.") - else: - print("ComfyUI is already up to date.") - - -def enable_node(node_spec_str, is_all=False, cnt_msg=''): - if unified_manager.resolve_node_spec(node_spec_str, guess_mode='active') is not None: - print(f"{cnt_msg} [ SKIP ] {node_spec_str:50} => Already enabled") - return - - node_spec = unified_manager.resolve_node_spec(node_spec_str, guess_mode='inactive') - - if node_spec is None: - print(f"{cnt_msg} [ SKIP ] {node_spec_str:50} => Not found") - return - - node_name, version_spec, _ = node_spec - - res = unified_manager.unified_enable(node_name, version_spec) - - if res.action == 'skip': - print(f"{cnt_msg} [ SKIP ] {node_name:50} => {res.msg}") - elif res.result: - print(f"{cnt_msg} [ENABLED] {node_name:50}") - else: - print(f"{cnt_msg} [ FAIL ] {node_name:50} => {res.msg}") - - -def disable_node(node_spec_str: str, is_all=False, cnt_msg=''): - if 'comfyui-manager' in node_spec_str.lower(): - return - - node_spec = unified_manager.resolve_node_spec(node_spec_str, guess_mode='active') - - if node_spec is None: - if unified_manager.resolve_node_spec(node_spec_str, guess_mode='inactive') is not None: - print(f"{cnt_msg} [ SKIP ] {node_spec_str:50} => Already disabled") - else: - print(f"{cnt_msg} [ SKIP ] {node_spec_str:50} => Not found") - return - - node_name, version_spec, _ = node_spec - - res = unified_manager.unified_disable(node_name, version_spec == 'unknown') - - if res.action == 'skip': - print(f"{cnt_msg} [ SKIP ] {node_name:50} => {res.msg}") - elif res.result: - print(f"{cnt_msg} [DISABLED] {node_name:50}") - else: - print(f"{cnt_msg} [ FAIL ] {node_name:50} => {res.msg}") - - -def show_list(kind, simple=False): - custom_nodes = asyncio.run(unified_manager.get_custom_nodes(channel=channel_ctx.channel, mode=channel_ctx.mode)) - - # collect not-installed unknown nodes - not_installed_unknown_nodes = [] - repo_unknown = {} - - for k, v in custom_nodes.items(): - if 'cnr_latest' not in v: - if len(v['files']) == 1: - repo_url = v['files'][0] - node_name = repo_url.split('/')[-1] - if node_name not in unified_manager.unknown_inactive_nodes and node_name not in unified_manager.unknown_active_nodes: - not_installed_unknown_nodes.append(v) - else: - repo_unknown[node_name] = v - - processed = {} - unknown_processed = [] - - flag = kind in ['all', 'cnr', 'installed', 'enabled'] - for k, v in unified_manager.active_nodes.items(): - if flag: - cnr = unified_manager.cnr_map[k] - processed[k] = "[ ENABLED ] ", cnr['name'], k, cnr['publisher']['name'], v[0] - else: - processed[k] = None - - if flag and kind != 'cnr': - for k, v in unified_manager.unknown_active_nodes.items(): - item = repo_unknown.get(k) - - if item is None: - continue - - log_item = "[ ENABLED ] ", item['title'], k, item['author'] - unknown_processed.append(log_item) - - flag = kind in ['all', 'cnr', 'installed', 'disabled'] - for k, v in unified_manager.cnr_inactive_nodes.items(): - if k in processed: - continue - - if flag: - cnr = unified_manager.cnr_map[k] - processed[k] = "[ DISABLED ] ", cnr['name'], k, cnr['publisher']['name'], ", ".join(list(v.keys())) - else: - processed[k] = None - - for k, v in unified_manager.nightly_inactive_nodes.items(): - if k in processed: - continue - - if flag: - cnr = unified_manager.cnr_map[k] - processed[k] = "[ DISABLED ] ", cnr['name'], k, cnr['publisher']['name'], 'nightly' - else: - processed[k] = None - - if flag and kind != 'cnr': - for k, v in unified_manager.unknown_inactive_nodes.items(): - item = repo_unknown.get(k) - - if item is None: - continue - - log_item = "[ DISABLED ] ", item['title'], k, item['author'] - unknown_processed.append(log_item) - - flag = kind in ['all', 'cnr', 'not-installed'] - for k, v in unified_manager.cnr_map.items(): - if k in processed: - continue - - if flag: - cnr = unified_manager.cnr_map[k] - ver_spec = v['latest_version']['version'] if 'latest_version' in v else '0.0.0' - processed[k] = "[ NOT INSTALLED ] ", cnr['name'], k, cnr['publisher']['name'], ver_spec - else: - processed[k] = None - - if flag and kind != 'cnr': - for x in not_installed_unknown_nodes: - if len(x['files']) == 1: - node_id = os.path.basename(x['files'][0]) - log_item = "[ NOT INSTALLED ] ", x['title'], node_id, x['author'] - unknown_processed.append(log_item) - - for x in processed.values(): - if x is None: - continue - - prefix, title, short_id, author, ver_spec = x - if simple: - print(title+'@'+ver_spec) - else: - print(f"{prefix} {title:50} {short_id:30} (author: {author:20}) \\[{ver_spec}]") - - for x in unknown_processed: - prefix, title, short_id, author = x - if simple: - print(title+'@unknown') - else: - print(f"{prefix} {title:50} {short_id:30} (author: {author:20}) [UNKNOWN]") - - -def show_snapshot(simple_mode=False): - json_obj = core.get_current_snapshot() - - if simple_mode: - print(f"[{json_obj['comfyui']}] comfyui") - for k, v in json_obj['git_custom_nodes'].items(): - print(f"[{v['hash']}] {k}") - for v in json_obj['file_custom_nodes']: - print(f"[ N/A ] {v['filename']}") - - else: - formatted_json = json.dumps(json_obj, ensure_ascii=False, indent=4) - print(formatted_json) - - -def show_snapshot_list(simple_mode=False): - snapshot_path = os.path.join(comfyui_manager_path, 'snapshots') - - files = os.listdir(snapshot_path) - json_files = [x for x in files if x.endswith('.json')] - for x in sorted(json_files): - print(x) - - -def cancel(): - if os.path.exists(script_path): - os.remove(script_path) - - if os.path.exists(restore_snapshot_path): - os.remove(restore_snapshot_path) - - -def auto_save_snapshot(): - path = core.save_snapshot_with_postfix('cli-autosave') - print(f"Current snapshot is saved as `{path}`") - - -def get_all_installed_node_specs(): - res = [] - processed = set() - for k, v in unified_manager.active_nodes.items(): - node_spec_str = f"{k}@{v[0]}" - res.append(node_spec_str) - processed.add(k) - - for k, _ in unified_manager.cnr_inactive_nodes.keys(): - if k in processed: - continue - - latest = unified_manager.get_from_cnr_inactive_nodes(k) - if latest is not None: - node_spec_str = f"{k}@{str(latest[0])}" - res.append(node_spec_str) - - for k, _ in unified_manager.nightly_inactive_nodes.keys(): - if k in processed: - continue - - node_spec_str = f"{k}@nightly" - res.append(node_spec_str) - - for k in unified_manager.unknown_active_nodes.keys(): - node_spec_str = f"{k}@unknown" - res.append(node_spec_str) - - for k in unified_manager.unknown_inactive_nodes.keys(): - node_spec_str = f"{k}@unknown" - res.append(node_spec_str) - - return res - - -def for_each_nodes(nodes, act, allow_all=True): - is_all = False - if allow_all and 'all' in nodes: - is_all = True - nodes = get_all_installed_node_specs() - else: - nodes = [x for x in nodes if x.lower() not in ['comfy', 'comfyui', 'all']] - - total = len(nodes) - i = 1 - for x in nodes: - try: - act(x, is_all=is_all, cnt_msg=f'{i}/{total}') - except Exception as e: - print(f"ERROR: {e}") - traceback.print_exc() - i += 1 - - -app = typer.Typer() - - -@app.command(help="Display help for commands") -def help(ctx: typer.Context): - print(ctx.find_root().get_help()) - ctx.exit(0) - - -@app.command(help="Install custom nodes") -def install( - nodes: List[str] = typer.Argument( - ..., help="List of custom nodes to install" - ), - channel: Annotated[ - str, - typer.Option( - show_default=False, - help="Specify the operation mode" - ), - ] = None, - mode: str = typer.Option( - None, - help="[remote|local|cache]" - ), - no_deps: Annotated[ - Optional[bool], - typer.Option( - "--no-deps", - show_default=False, - help="Skip installing any Python dependencies", - ), - ] = False, -): - channel_ctx.set_channel_mode(channel, mode) - channel_ctx.set_no_deps(no_deps) - for_each_nodes(nodes, act=install_node) - - -@app.command(help="Reinstall custom nodes") -def reinstall( - nodes: List[str] = typer.Argument( - ..., help="List of custom nodes to reinstall" - ), - channel: Annotated[ - str, - typer.Option( - show_default=False, - help="Specify the operation mode" - ), - ] = None, - mode: str = typer.Option( - None, - help="[remote|local|cache]" - ), - no_deps: Annotated[ - Optional[bool], - typer.Option( - "--no-deps", - show_default=False, - help="Skip installing any Python dependencies", - ), - ] = False, -): - channel_ctx.set_channel_mode(channel, mode) - channel_ctx.set_no_deps(no_deps) - for_each_nodes(nodes, act=reinstall_node) - - -@app.command(help="Uninstall custom nodes") -def uninstall( - nodes: List[str] = typer.Argument( - ..., help="List of custom nodes to uninstall" - ), - channel: Annotated[ - str, - typer.Option( - show_default=False, - help="Specify the operation mode" - ), - ] = None, - mode: str = typer.Option( - None, - help="[remote|local|cache]" - ), -): - channel_ctx.set_channel_mode(channel, mode) - for_each_nodes(nodes, act=uninstall_node) - - -@app.command(help="Disable custom nodes") -def update( - nodes: List[str] = typer.Argument( - ..., - help="[all|List of custom nodes to update]" - ), - channel: Annotated[ - str, - typer.Option( - show_default=False, - help="Specify the operation mode" - ), - ] = None, - mode: str = typer.Option( - None, - help="[remote|local|cache]" - ), -): - channel_ctx.set_channel_mode(channel, mode) - - if 'all' in nodes: - auto_save_snapshot() - - for x in nodes: - if x.lower() in ['comfyui', 'comfy', 'all']: - update_comfyui() - break - - update_parallel(nodes) - - -@app.command(help="Disable custom nodes") -def disable( - nodes: List[str] = typer.Argument( - ..., - help="[all|List of custom nodes to disable]" - ), - channel: Annotated[ - str, - typer.Option( - show_default=False, - help="Specify the operation mode" - ), - ] = None, - mode: str = typer.Option( - None, - help="[remote|local|cache]" - ), -): - channel_ctx.set_channel_mode(channel, mode) - - if 'all' in nodes: - auto_save_snapshot() - - for_each_nodes(nodes, disable_node, allow_all=True) - - -@app.command(help="Enable custom nodes") -def enable( - nodes: List[str] = typer.Argument( - ..., - help="[all|List of custom nodes to enable]" - ), - channel: Annotated[ - str, - typer.Option( - show_default=False, - help="Specify the operation mode" - ), - ] = None, - mode: str = typer.Option( - None, - help="[remote|local|cache]" - ), -): - channel_ctx.set_channel_mode(channel, mode) - - if 'all' in nodes: - auto_save_snapshot() - - for_each_nodes(nodes, enable_node, allow_all=True) - - -@app.command(help="Fix dependencies of custom nodes") -def fix( - nodes: List[str] = typer.Argument( - ..., - help="[all|List of custom nodes to fix]" - ), - channel: Annotated[ - str, - typer.Option( - show_default=False, - help="Specify the operation mode" - ), - ] = None, - mode: str = typer.Option( - None, - help="[remote|local|cache]" - ), -): - channel_ctx.set_channel_mode(channel, mode) - - if 'all' in nodes: - auto_save_snapshot() - - for_each_nodes(nodes, fix_node, allow_all=True) - - -@app.command("show-versions", help="Show all available versions of the node") -def show_versions(node_name: str): - versions = cnr_utils.all_versions_of_node(node_name) - if versions is None: - print(f"Node not found in Comfy Registry: {node_name}") - - for x in versions: - print(f"[{x['createdAt'][:10]}] {x['version']} -- {x['changelog']}") - - -@app.command("show", help="Show node list") -def show( - arg: str = typer.Argument( - help="[installed|enabled|not-installed|disabled|all|cnr|snapshot|snapshot-list]" - ), - channel: Annotated[ - str, - typer.Option( - show_default=False, - help="Specify the operation mode" - ), - ] = None, - mode: str = typer.Option( - None, - help="[remote|local|cache]" - ), -): - valid_commands = [ - "installed", - "enabled", - "not-installed", - "disabled", - "all", - "cnr", - "snapshot", - "snapshot-list", - ] - if arg not in valid_commands: - typer.echo(f"Invalid command: `show {arg}`", err=True) - exit(1) - - channel_ctx.set_channel_mode(channel, mode) - if arg == 'snapshot': - show_snapshot() - elif arg == 'snapshot-list': - show_snapshot_list() - else: - show_list(arg) - - -@app.command("simple-show", help="Show node list (simple mode)") -def simple_show( - arg: str = typer.Argument( - help="[installed|enabled|not-installed|disabled|all|snapshot|snapshot-list]" - ), - channel: Annotated[ - str, - typer.Option( - show_default=False, - help="Specify the operation mode" - ), - ] = None, - mode: str = typer.Option( - None, - help="[remote|local|cache]" - ), -): - valid_commands = [ - "installed", - "enabled", - "not-installed", - "disabled", - "all", - "snapshot", - "snapshot-list", - ] - if arg not in valid_commands: - typer.echo(f"[bold red]Invalid command: `show {arg}`[/bold red]", err=True) - exit(1) - - channel_ctx.set_channel_mode(channel, mode) - if arg == 'snapshot': - show_snapshot(True) - elif arg == 'snapshot-list': - show_snapshot_list(True) - else: - show_list(arg, True) - - -@app.command('cli-only-mode', help="Set whether to use ComfyUI-Manager in CLI-only mode.") -def cli_only_mode( - mode: str = typer.Argument( - ..., help="[enable|disable]" - )): - cli_mode_flag = os.path.join(os.path.dirname(__file__), '.enable-cli-only-mode') - if mode.lower() == 'enable': - with open(cli_mode_flag, 'w') as file: - pass - print(f"\nINFO: `cli-only-mode` is enabled\n") - elif mode.lower() == 'disable': - if os.path.exists(cli_mode_flag): - os.remove(cli_mode_flag) - print(f"\nINFO: `cli-only-mode` is disabled\n") - else: - print(f"\n[bold red]Invalid value for cli-only-mode: {mode}[/bold red]\n") - exit(1) - - -@app.command( - "deps-in-workflow", help="Generate dependencies file from workflow (.json/.png)" -) -def deps_in_workflow( - workflow: Annotated[ - str, typer.Option(show_default=False, help="Workflow file (.json/.png)") - ], - output: Annotated[ - str, typer.Option(show_default=False, help="Output file (.json)") - ], - channel: Annotated[ - str, - typer.Option( - show_default=False, - help="Specify the operation mode" - ), - ] = None, - mode: str = typer.Option( - None, - help="[remote|local|cache]" - ), -): - channel_ctx.set_channel_mode(channel, mode) - - input_path = workflow - output_path = output - - if not os.path.exists(input_path): - print(f"[bold red]File not found: {input_path}[/bold red]") - exit(1) - - used_exts, unknown_nodes = asyncio.run(core.extract_nodes_from_workflow(input_path, mode=channel_ctx.mode, channel_url=channel_ctx.channel)) - - custom_nodes = {} - for x in used_exts: - custom_nodes[x] = {'state': core.simple_check_custom_node(x), - 'hash': '-' - } - - res = { - 'custom_nodes': custom_nodes, - 'unknown_nodes': list(unknown_nodes) - } - - with open(output_path, "w", encoding='utf-8') as output_file: - json.dump(res, output_file, indent=4) - - print(f"Workflow dependencies are being saved into {output_path}.") - - -@app.command("save-snapshot", help="Save a snapshot of the current ComfyUI environment. If output path isn't provided. Save to ComfyUI-Manager/snapshots path.") -def save_snapshot( - output: Annotated[ - str, - typer.Option( - show_default=False, help="Specify the output file path. (.json/.yaml)" - ), - ] = None, -): - path = core.save_snapshot_with_postfix('snapshot', output) - print(f"Current snapshot is saved as `{path}`") - - -@app.command("restore-snapshot", help="Restore snapshot from snapshot file") -def restore_snapshot( - snapshot_name: str, - pip_non_url: Optional[bool] = typer.Option( - default=None, - show_default=False, - is_flag=True, - help="Restore for pip packages registered on PyPI.", - ), - pip_non_local_url: Optional[bool] = typer.Option( - default=None, - show_default=False, - is_flag=True, - help="Restore for pip packages registered at web URLs.", - ), - pip_local_url: Optional[bool] = typer.Option( - default=None, - show_default=False, - is_flag=True, - help="Restore for pip packages specified by local paths.", - ), -): - extras = [] - if pip_non_url: - extras.append('--pip-non-url') - - if pip_non_local_url: - extras.append('--pip-non-local-url') - - if pip_local_url: - extras.append('--pip-local-url') - - print(f"PIPs restore mode: {extras}") - - if os.path.exists(snapshot_name): - snapshot_path = os.path.abspath(snapshot_name) - else: - snapshot_path = os.path.join(core.comfyui_manager_path, 'snapshots', snapshot_name) - if not os.path.exists(snapshot_path): - print(f"[bold red]ERROR: `{snapshot_path}` is not exists.[/bold red]") - exit(1) - - try: - asyncio.run(core.restore_snapshot(snapshot_path, extras)) - except Exception: - print("[bold red]ERROR: Failed to restore snapshot.[/bold red]") - traceback.print_exc() - raise typer.Exit(code=1) - - -@app.command( - "restore-dependencies", help="Restore dependencies from whole installed custom nodes." -) -def restore_dependencies(): - node_paths = [os.path.join(custom_nodes_path, name) for name in os.listdir(custom_nodes_path) - if os.path.isdir(os.path.join(custom_nodes_path, name)) and not name.endswith('.disabled')] - - total = len(node_paths) - i = 1 - for x in node_paths: - print(f"----------------------------------------------------------------------------------------------------") - print(f"Restoring [{i}/{total}]: {x}") - unified_manager.execute_install_script('', x, instant_execution=True) - i += 1 - - -@app.command( - "post-install", help="Install dependencies and execute installation script" -) -def post_install( - path: str = typer.Argument( - help="path to custom node", - )): - path = os.path.expanduser(path) - unified_manager.execute_install_script('', path, instant_execution=True) - - -@app.command( - "install-deps", - help="Install dependencies from dependencies file(.json) or workflow(.png/.json)", -) -def install_deps( - deps: str = typer.Argument( - help="Dependency spec file (.json)", - ), - channel: Annotated[ - str, - typer.Option( - show_default=False, - help="Specify the operation mode" - ), - ] = None, - mode: str = typer.Option( - None, - help="[remote|local|cache]" - ), -): - channel_ctx.set_channel_mode(channel, mode) - auto_save_snapshot() - - if not os.path.exists(deps): - print(f"[bold red]File not found: {deps}[/bold red]") - exit(1) - else: - with open(deps, 'r', encoding="UTF-8", errors="ignore") as json_file: - try: - json_obj = json.load(json_file) - except: - print(f"[bold red]Invalid json file: {deps}[/bold red]") - exit(1) - - for k in json_obj['custom_nodes'].keys(): - state = core.simple_check_custom_node(k) - if state == 'installed': - continue - elif state == 'not-installed': - asyncio.run(core.gitclone_install(k, instant_execution=True)) - else: # disabled - core.gitclone_set_active([k], False) - - print("Dependency installation and activation complete.") - - -@app.command(help="Clear reserved startup action in ComfyUI-Manager") -def clear(): - cancel() - - -@app.command("export-custom-node-ids", help="Export custom node ids") -def export_custom_node_ids( - path: str, - channel: Annotated[ - str, - typer.Option( - show_default=False, - help="Specify the operation mode" - ), - ] = None, - mode: str = typer.Option( - None, - help="[remote|local|cache]" - )): - channel_ctx.set_channel_mode(channel, mode) - - with open(path, "w", encoding='utf-8') as output_file: - for x in unified_manager.cnr_map.keys(): - print(x, file=output_file) - - custom_nodes = asyncio.run(unified_manager.get_custom_nodes(channel=channel_ctx.channel, mode=channel_ctx.mode)) - for x in custom_nodes.values(): - if 'cnr_latest' not in x: - if len(x['files']) == 1: - repo_url = x['files'][0] - node_id = repo_url.split('/')[-1] - print(f"{node_id}@unknown", file=output_file) - - if 'id' in x: - print(f"{x['id']}@unknown", file=output_file) - - -@app.command( - "migrate", - help="Migrate legacy node system to new node system", -) -def migrate(): - asyncio.run(unified_manager.migrate_unmanaged_nodes()) - - -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(app()) - - -print(f"") diff --git a/cm-cli.sh b/cm-cli.sh deleted file mode 100755 index b1a21ca5..00000000 --- a/cm-cli.sh +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/bash -python cm-cli.py $* diff --git a/git_helper.py b/git_helper.py deleted file mode 100644 index 365bfa90..00000000 --- a/git_helper.py +++ /dev/null @@ -1,473 +0,0 @@ -import subprocess -import sys -import os -import traceback - -import git -import configparser -import json -import yaml -import requests -from tqdm.auto import tqdm -from git.remote import RemoteProgress - - -comfy_path = os.environ.get('COMFYUI_PATH') - -if comfy_path is None: - print(f"\n[bold yellow]WARN: The `COMFYUI_PATH` environment variable is not set. Assuming `custom_nodes/ComfyUI-Manager/../../` as the ComfyUI path.[/bold yellow]", file=sys.stderr) - comfy_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')) - - - -def download_url(url, dest_folder, filename=None): - # Ensure the destination folder exists - if not os.path.exists(dest_folder): - os.makedirs(dest_folder) - - # Extract filename from URL if not provided - if filename is None: - filename = os.path.basename(url) - - # Full path to save the file - dest_path = os.path.join(dest_folder, filename) - - # Download the file - response = requests.get(url, stream=True) - if response.status_code == 200: - with open(dest_path, 'wb') as file: - for chunk in response.iter_content(chunk_size=1024): - if chunk: - file.write(chunk) - else: - print(f"Failed to download file from {url}") - - -config_path = os.path.join(os.path.dirname(__file__), "config.ini") -nodelist_path = os.path.join(os.path.dirname(__file__), "custom-node-list.json") -working_directory = os.getcwd() - -if os.path.basename(working_directory) != 'custom_nodes': - print(f"WARN: This script should be executed in custom_nodes dir") - print(f"DBG: INFO {working_directory}") - print(f"DBG: INFO {sys.argv}") - # exit(-1) - - -class GitProgress(RemoteProgress): - def __init__(self): - super().__init__() - self.pbar = tqdm(ascii=True) - - def update(self, op_code, cur_count, max_count=None, message=''): - self.pbar.total = max_count - self.pbar.n = cur_count - self.pbar.pos = 0 - self.pbar.refresh() - - -def gitclone(custom_nodes_path, url, target_hash=None, repo_path=None): - repo_name = os.path.splitext(os.path.basename(url))[0] - - if repo_path is None: - repo_path = os.path.join(custom_nodes_path, repo_name) - - # Clone the repository from the remote URL - repo = git.Repo.clone_from(url, repo_path, recursive=True, progress=GitProgress()) - - if target_hash is not None: - print(f"CHECKOUT: {repo_name} [{target_hash}]") - repo.git.checkout(target_hash) - - repo.git.clear_cache() - repo.close() - - -def gitcheck(path, do_fetch=False): - try: - # Fetch the latest commits from the remote repository - repo = git.Repo(path) - - if repo.head.is_detached: - print("CUSTOM NODE CHECK: True") - return - - current_branch = repo.active_branch - branch_name = current_branch.name - - remote_name = current_branch.tracking_branch().remote_name - remote = repo.remote(name=remote_name) - - if do_fetch: - remote.fetch() - - # Get the current commit hash and the commit hash of the remote branch - commit_hash = repo.head.commit.hexsha - - if f'{remote_name}/{branch_name}' in repo.refs: - remote_commit_hash = repo.refs[f'{remote_name}/{branch_name}'].object.hexsha - else: - print("CUSTOM NODE CHECK: True") # non default branch is treated as updatable - return - - # Compare the commit hashes to determine if the local repository is behind the remote repository - if commit_hash != remote_commit_hash: - # Get the commit dates - commit_date = repo.head.commit.committed_datetime - remote_commit_date = repo.refs[f'{remote_name}/{branch_name}'].object.committed_datetime - - # Compare the commit dates to determine if the local repository is behind the remote repository - if commit_date < remote_commit_date: - print("CUSTOM NODE CHECK: True") - else: - print("CUSTOM NODE CHECK: False") - except Exception as e: - print(e) - print("CUSTOM NODE CHECK: Error") - - -def switch_to_default_branch(repo): - default_branch = repo.git.symbolic_ref('refs/remotes/origin/HEAD').replace('refs/remotes/origin/', '') - repo.git.checkout(default_branch) - - -def gitpull(path): - # Check if the path is a git repository - if not os.path.exists(os.path.join(path, '.git')): - raise ValueError('Not a git repository') - - # Pull the latest changes from the remote repository - repo = git.Repo(path) - if repo.is_dirty(): - print(f"STASH: '{path}' is dirty.") - repo.git.stash() - - commit_hash = repo.head.commit.hexsha - try: - if repo.head.is_detached: - switch_to_default_branch(repo) - - current_branch = repo.active_branch - branch_name = current_branch.name - - remote_name = current_branch.tracking_branch().remote_name - remote = repo.remote(name=remote_name) - - if f'{remote_name}/{branch_name}' not in repo.refs: - switch_to_default_branch(repo) - current_branch = repo.active_branch - branch_name = current_branch.name - - remote.fetch() - remote_commit_hash = repo.refs[f'{remote_name}/{branch_name}'].object.hexsha - - if commit_hash == remote_commit_hash: - print("CUSTOM NODE PULL: None") # there is no update - repo.close() - return - - remote.pull() - - repo.git.submodule('update', '--init', '--recursive') - new_commit_hash = repo.head.commit.hexsha - - if commit_hash != new_commit_hash: - print("CUSTOM NODE PULL: Success") # update success - else: - print("CUSTOM NODE PULL: Fail") # update fail - except Exception as e: - print(e) - print("CUSTOM NODE PULL: Fail") # unknown git error - - repo.close() - - -def checkout_comfyui_hash(target_hash): - repo = git.Repo(comfy_path) - commit_hash = repo.head.commit.hexsha - - if commit_hash != target_hash: - try: - print(f"CHECKOUT: ComfyUI [{target_hash}]") - repo.git.checkout(target_hash) - except git.GitCommandError as e: - print(f"Error checking out the ComfyUI: {str(e)}") - - -def checkout_custom_node_hash(git_custom_node_infos): - repo_name_to_url = {} - - for url in git_custom_node_infos.keys(): - repo_name = url.split('/')[-1] - - if repo_name.endswith('.git'): - repo_name = repo_name[:-4] - - repo_name_to_url[repo_name] = url - - for path in os.listdir(working_directory): - if '@' in path or path.endswith("ComfyUI-Manager"): - continue - - fullpath = os.path.join(working_directory, path) - - if os.path.isdir(fullpath): - is_disabled = path.endswith(".disabled") - - try: - git_dir = os.path.join(fullpath, '.git') - if not os.path.exists(git_dir): - continue - - need_checkout = False - repo_name = os.path.basename(fullpath) - - if repo_name.endswith('.disabled'): - repo_name = repo_name[:-9] - - if repo_name not in repo_name_to_url: - if not is_disabled: - # should be disabled - print(f"DISABLE: {repo_name}") - new_path = fullpath + ".disabled" - os.rename(fullpath, new_path) - need_checkout = False - else: - item = git_custom_node_infos[repo_name_to_url[repo_name]] - if item['disabled'] and is_disabled: - pass - elif item['disabled'] and not is_disabled: - # disable - print(f"DISABLE: {repo_name}") - new_path = fullpath + ".disabled" - os.rename(fullpath, new_path) - - elif not item['disabled'] and is_disabled: - # enable - print(f"ENABLE: {repo_name}") - new_path = fullpath[:-9] - os.rename(fullpath, new_path) - fullpath = new_path - need_checkout = True - else: - need_checkout = True - - if need_checkout: - repo = git.Repo(fullpath) - commit_hash = repo.head.commit.hexsha - - if commit_hash != item['hash']: - print(f"CHECKOUT: {repo_name} [{item['hash']}]") - repo.git.checkout(item['hash']) - - except Exception: - print(f"Failed to restore snapshots for the custom node '{path}'") - - # clone missing - for k, v in git_custom_node_infos.items(): - if 'ComfyUI-Manager' in k: - continue - - if not v['disabled']: - repo_name = k.split('/')[-1] - if repo_name.endswith('.git'): - repo_name = repo_name[:-4] - - path = os.path.join(working_directory, repo_name) - if not os.path.exists(path): - print(f"CLONE: {path}") - gitclone(working_directory, k, target_hash=v['hash']) - - -def invalidate_custom_node_file(file_custom_node_infos): - global nodelist_path - - enabled_set = set() - for item in file_custom_node_infos: - if not item['disabled']: - enabled_set.add(item['filename']) - - for path in os.listdir(working_directory): - fullpath = os.path.join(working_directory, path) - - if not os.path.isdir(fullpath) and fullpath.endswith('.py'): - if path not in enabled_set: - print(f"DISABLE: {path}") - new_path = fullpath+'.disabled' - os.rename(fullpath, new_path) - - elif not os.path.isdir(fullpath) and fullpath.endswith('.py.disabled'): - path = path[:-9] - if path in enabled_set: - print(f"ENABLE: {path}") - new_path = fullpath[:-9] - os.rename(fullpath, new_path) - - # download missing: just support for 'copy' style - py_to_url = {} - - with open(nodelist_path, 'r', encoding="UTF-8") as json_file: - info = json.load(json_file) - for item in info['custom_nodes']: - if item['install_type'] == 'copy': - for url in item['files']: - if url.endswith('.py'): - py = url.split('/')[-1] - py_to_url[py] = url - - for item in file_custom_node_infos: - filename = item['filename'] - if not item['disabled']: - target_path = os.path.join(working_directory, filename) - - if not os.path.exists(target_path) and filename in py_to_url: - url = py_to_url[filename] - print(f"DOWNLOAD: {filename}") - download_url(url, working_directory) - - -def apply_snapshot(target): - try: - # todo: should be if target is not in snapshots dir - path = os.path.join(os.path.dirname(__file__), 'snapshots', f"{target}") - if os.path.exists(path): - if not target.endswith('.json') and not target.endswith('.yaml'): - print(f"Snapshot file not found: `{path}`") - print("APPLY SNAPSHOT: False") - return None - - with open(path, 'r', encoding="UTF-8") as snapshot_file: - if target.endswith('.json'): - info = json.load(snapshot_file) - elif target.endswith('.yaml'): - info = yaml.load(snapshot_file, Loader=yaml.SafeLoader) - info = info['custom_nodes'] - else: - # impossible case - print("APPLY SNAPSHOT: False") - return None - - comfyui_hash = info['comfyui'] - git_custom_node_infos = info['git_custom_nodes'] - file_custom_node_infos = info['file_custom_nodes'] - - checkout_comfyui_hash(comfyui_hash) - checkout_custom_node_hash(git_custom_node_infos) - invalidate_custom_node_file(file_custom_node_infos) - - print("APPLY SNAPSHOT: True") - if 'pips' in info: - return info['pips'] - else: - return None - - print(f"Snapshot file not found: `{path}`") - print("APPLY SNAPSHOT: False") - - return None - except Exception as e: - print(e) - traceback.print_exc() - print("APPLY SNAPSHOT: False") - - return None - - -def restore_pip_snapshot(pips, options): - non_url = [] - local_url = [] - non_local_url = [] - for k, v in pips.items(): - if v == "": - non_url.append(k) - else: - if v.startswith('file:'): - local_url.append(v) - else: - non_local_url.append(v) - - failed = [] - if '--pip-non-url' in options: - # try all at once - res = 1 - try: - res = subprocess.check_call([sys.executable, '-m', 'pip', 'install'] + non_url) - except: - pass - - # fallback - if res != 0: - for x in non_url: - res = 1 - try: - res = subprocess.check_call([sys.executable, '-m', 'pip', 'install', x]) - except: - pass - - if res != 0: - failed.append(x) - - if '--pip-non-local-url' in options: - for x in non_local_url: - res = 1 - try: - res = subprocess.check_call([sys.executable, '-m', 'pip', 'install', x]) - except: - pass - - if res != 0: - failed.append(x) - - if '--pip-local-url' in options: - for x in local_url: - res = 1 - try: - res = subprocess.check_call([sys.executable, '-m', 'pip', 'install', x]) - except: - pass - - if res != 0: - failed.append(x) - - print(f"Installation failed for pip packages: {failed}") - - -def setup_environment(): - config = configparser.ConfigParser() - config.read(config_path) - if 'default' in config and 'git_exe' in config['default'] and config['default']['git_exe'] != '': - git.Git().update_environment(GIT_PYTHON_GIT_EXECUTABLE=config['default']['git_exe']) - - -setup_environment() - - -try: - if sys.argv[1] == "--clone": - repo_path = None - if len(sys.argv) > 4: - repo_path = sys.argv[4] - - gitclone(sys.argv[2], sys.argv[3], repo_path=repo_path) - elif sys.argv[1] == "--check": - gitcheck(sys.argv[2], False) - elif sys.argv[1] == "--fetch": - gitcheck(sys.argv[2], True) - elif sys.argv[1] == "--pull": - gitpull(sys.argv[2]) - elif sys.argv[1] == "--apply-snapshot": - options = set() - for x in sys.argv: - if x in ['--pip-non-url', '--pip-local-url', '--pip-non-local-url']: - options.add(x) - - pips = apply_snapshot(sys.argv[2]) - - if pips and len(options) > 0: - restore_pip_snapshot(pips, options) - sys.exit(0) -except Exception as e: - print(e) - sys.exit(-1) - - diff --git a/glob/cm_global.py b/glob/cm_global.py deleted file mode 100644 index 4041bcb6..00000000 --- a/glob/cm_global.py +++ /dev/null @@ -1,112 +0,0 @@ -import traceback - -# -# Global Var -# -# Usage: -# import cm_global -# cm_global.variables['comfyui.revision'] = 1832 -# print(f"log mode: {cm_global.variables['logger.enabled']}") -# -variables = {} - - -# -# Global API -# -# Usage: -# [register API] -# import cm_global -# -# def api_hello(msg): -# print(f"hello: {msg}") -# return msg -# -# cm_global.register_api('hello', api_hello) -# -# [use API] -# import cm_global -# -# test = cm_global.try_call(api='hello', msg='an example') -# print(f"'{test}' is returned") -# - -APIs = {} - - -def register_api(k, f): - global APIs - APIs[k] = f - - -def try_call(**kwargs): - if 'api' in kwargs: - api_name = kwargs['api'] - try: - api = APIs.get(api_name) - if api is not None: - del kwargs['api'] - return api(**kwargs) - else: - print(f"WARN: The '{kwargs['api']}' API has not been registered.") - except Exception as e: - print(f"ERROR: An exception occurred while calling the '{api_name}' API.") - raise e - else: - return None - - -# -# Extension Info -# -# Usage: -# import cm_global -# -# cm_global.extension_infos['my_extension'] = {'version': [0, 1], 'name': 'me', 'description': 'example extension', } -# -extension_infos = {} - -on_extension_registered_handlers = {} - - -def register_extension(extension_name, v): - global extension_infos - global on_extension_registered_handlers - extension_infos[extension_name] = v - - if extension_name in on_extension_registered_handlers: - for k, f in on_extension_registered_handlers[extension_name]: - try: - f(extension_name, v) - except Exception: - print(f"[ERROR] '{k}' on_extension_registered_handlers") - traceback.print_exc() - - del on_extension_registered_handlers[extension_name] - - -def add_on_extension_registered(k, extension_name, f): - global on_extension_registered_handlers - if extension_name in extension_infos: - try: - v = extension_infos[extension_name] - f(extension_name, v) - except Exception: - print(f"[ERROR] '{k}' on_extension_registered_handler") - traceback.print_exc() - else: - if extension_name not in on_extension_registered_handlers: - on_extension_registered_handlers[extension_name] = [] - - on_extension_registered_handlers[extension_name].append((k, f)) - - -def add_on_revision_detected(k, f): - if 'comfyui.revision' in variables: - try: - f(variables['comfyui.revision']) - except Exception: - print(f"[ERROR] '{k}' on_revision_detected_handler") - traceback.print_exc() - else: - variables['cm.on_revision_detected_handler'].append((k, f)) diff --git a/glob/cnr_utils.py b/glob/cnr_utils.py deleted file mode 100644 index ea9a1a93..00000000 --- a/glob/cnr_utils.py +++ /dev/null @@ -1,101 +0,0 @@ -from manager_util import * -import zipfile -import requests -from dataclasses import dataclass -from typing import List - -base_url = "https://api.comfy.org" - - -async def get_cnr_data(page=1, limit=1000, cache_mode=True): - try: - uri = f'{base_url}/nodes?page={page}&limit={limit}' - json_obj = await get_data_with_cache(uri, cache_mode=cache_mode) - - for v in json_obj['nodes']: - if 'latest_version' not in v: - v['latest_version'] = dict(version='nightly') - - return json_obj['nodes'] - except: - res = {} - print(f"Cannot connect to comfyregistry.") - - return res - - -@dataclass -class NodeVersion: - changelog: str - dependencies: List[str] - deprecated: bool - id: str - version: str - download_url: str - - -def map_node_version(api_node_version): - """ - Maps node version data from API response to NodeVersion dataclass. - - Args: - api_data (dict): The 'node_version' part of the API response. - - Returns: - NodeVersion: An instance of NodeVersion dataclass populated with data from the API. - """ - return NodeVersion( - changelog=api_node_version.get( - "changelog", "" - ), # Provide a default value if 'changelog' is missing - dependencies=api_node_version.get( - "dependencies", [] - ), # Provide a default empty list if 'dependencies' is missing - deprecated=api_node_version.get( - "deprecated", False - ), # Assume False if 'deprecated' is not specified - id=api_node_version[ - "id" - ], # 'id' should be mandatory; raise KeyError if missing - version=api_node_version[ - "version" - ], # 'version' should be mandatory; raise KeyError if missing - download_url=api_node_version.get( - "downloadUrl", "" - ), # Provide a default value if 'downloadUrl' is missing - ) - - -def install_node(node_id, version=None): - """ - Retrieves the node version for installation. - - Args: - node_id (str): The unique identifier of the node. - version (str, optional): Specific version of the node to retrieve. If omitted, the latest version is returned. - - Returns: - NodeVersion: Node version data or error message. - """ - if version is None: - url = f"{base_url}/nodes/{node_id}/install" - else: - url = f"{base_url}/nodes/{node_id}/install?version={version}" - - response = requests.get(url) - if response.status_code == 200: - # Convert the API response to a NodeVersion object - return map_node_version(response.json()) - else: - return None - - -def all_versions_of_node(node_id): - url = f"https://api.comfy.org/nodes/{node_id}/versions" - - response = requests.get(url) - if response.status_code == 200: - return response.json() - else: - return None - diff --git a/glob/manager_core.py b/glob/manager_core.py deleted file mode 100644 index dc550db0..00000000 --- a/glob/manager_core.py +++ /dev/null @@ -1,2955 +0,0 @@ -import json -import os -import sys -import subprocess -import re -import shutil -import configparser -import platform - -import git -from git.remote import RemoteProgress -from urllib.parse import urlparse -from tqdm.auto import tqdm -import time -import yaml -import zipfile -from concurrent.futures import ThreadPoolExecutor, as_completed - -orig_print = print - -from rich import print -from packaging import version - -import uuid - -glob_path = os.path.join(os.path.dirname(__file__)) # ComfyUI-Manager/glob -sys.path.append(glob_path) - -import cm_global -import cnr_utils -from manager_util import * - - -version_code = [3, 0] -version_str = f"V{version_code[0]}.{version_code[1]}" + (f'.{version_code[2]}' if len(version_code) > 2 else '') - - -DEFAULT_CHANNEL = "https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main" - - -custom_nodes_path = os.path.abspath(os.path.join(comfyui_manager_path, '..')) - -invalid_nodes = {} - - -def check_invalid_nodes(): - global invalid_nodes - - try: - import folder_paths - node_paths = folder_paths.get_folder_paths("custom_nodes") - except: - try: - sys.path.append(comfy_path) - import folder_paths - except: - raise Exception(f"Invalid COMFYUI_PATH: {comfy_path}") - - def check(root): - global invalid_nodes - - subdirs = [d for d in os.listdir(root) if os.path.isdir(os.path.join(root, d))] - for subdir in subdirs: - if subdir in ['.disabled', '__pycache__']: - continue - - if '@' in subdir: - spec = subdir.split('@') - if spec[1] in ['unknown', 'nightly']: - continue - - if not os.path.exists(os.path.join(root, subdir, '.tracking')): - invalid_nodes[spec[0]] = os.path.join(root, subdir) - - node_paths = folder_paths.get_folder_paths("custom_nodes") - for x in node_paths: - check(x) - - disabled_dir = os.path.join(x, '.disabled') - if os.path.exists(disabled_dir): - check(disabled_dir) - - if len(invalid_nodes): - print(f"\n-------------------- ComfyUI-Manager invalid nodes notice ----------------") - print(f"\nNodes requiring reinstallation have been detected:\n(Directly delete the corresponding path and reinstall.)\n") - - for x in invalid_nodes.values(): - print(x) - - print("\n---------------------------------------------------------------------------\n") - - -comfy_path = os.environ.get('COMFYUI_PATH') -if comfy_path is None: - comfy_path = os.path.abspath(os.path.join(custom_nodes_path, '..')) - -channel_list_path = os.path.join(comfyui_manager_path, 'channels.list') -config_path = os.path.join(comfyui_manager_path, "config.ini") -startup_script_path = os.path.join(comfyui_manager_path, "startup-scripts") -git_script_path = os.path.join(comfyui_manager_path, "git_helper.py") -cached_config = None -js_path = None - -comfy_ui_required_revision = 1930 -comfy_ui_required_commit_datetime = datetime(2024, 1, 24, 0, 0, 0) - -comfy_ui_revision = "Unknown" -comfy_ui_commit_datetime = datetime(1900, 1, 1, 0, 0, 0) - -channel_dict = None -channel_list = None -pip_map = None - - -def remap_pip_package(pkg): - if pkg in cm_global.pip_overrides: - res = cm_global.pip_overrides[pkg] - print(f"[ComfyUI-Manager] '{pkg}' is remapped to '{res}'") - return res - else: - return pkg - - -def get_installed_packages(): - global pip_map - - if pip_map is None: - try: - result = subprocess.check_output([sys.executable, '-m', 'pip', 'list'], universal_newlines=True) - - pip_map = {} - for line in result.split('\n'): - x = line.strip() - if x: - y = line.split() - if y[0] == 'Package' or y[0].startswith('-'): - continue - - pip_map[y[0]] = y[1] - except subprocess.CalledProcessError as e: - print(f"[ComfyUI-Manager] Failed to retrieve the information of installed pip packages.") - return set() - - return pip_map - - -def clear_pip_cache(): - global pip_map - pip_map = None - - -def is_blacklisted(name): - name = name.strip() - - pattern = r'([^<>!=]+)([<>!=]=?)([^ ]*)' - match = re.search(pattern, name) - - if match: - name = match.group(1) - - if name in cm_global.pip_blacklist: - return True - - if name in cm_global.pip_downgrade_blacklist: - pips = get_installed_packages() - - if match is None: - if name in pips: - return True - elif match.group(2) in ['<=', '==', '<']: - if name in pips: - if StrictVersion(pips[name]) >= StrictVersion(match.group(3)): - return True - - return False - - -def is_installed(name): - name = name.strip() - - if name.startswith('#'): - return True - - pattern = r'([^<>!=]+)([<>!=]=?)([0-9.a-zA-Z]*)' - match = re.search(pattern, name) - - if match: - name = match.group(1) - - if name in cm_global.pip_blacklist: - return True - - if name in cm_global.pip_downgrade_blacklist: - pips = get_installed_packages() - - if match is None: - if name in pips: - return True - elif match.group(2) in ['<=', '==', '<']: - if name in pips: - if StrictVersion(pips[name]) >= StrictVersion(match.group(3)): - print(f"[ComfyUI-Manager] skip black listed pip installation: '{name}'") - return True - - return name.lower() in get_installed_packages() - - -def normalize_channel(channel): - if channel is None: - return None - elif channel.startswith('https://'): - return channel - - tmp_dict = get_channel_dict() - channel_url = tmp_dict.get(channel) - if channel_url: - return channel_url - - raise Exception(f"Invalid channel name '{channel}'") - - -class ManagedResult: - def __init__(self, action): - self.action = action - self.items = [] - self.result = True - self.to_path = None - self.msg = None - self.target = None - self.postinstall = lambda: True - - def append(self, item): - self.items.append(item) - - def fail(self, msg): - self.result = False - self.msg = msg - return self - - def with_target(self, target): - self.target = target - return self - - def with_msg(self, msg): - self.msg = msg - return self - - def with_postinstall(self, postinstall): - self.postinstall = postinstall - return self - - -class UnifiedManager: - def __init__(self): - self.cnr_inactive_nodes = {} # node_id -> node_version -> fullpath - self.nightly_inactive_nodes = {} # node_id -> fullpath - self.unknown_inactive_nodes = {} # node_id -> repo url * fullpath - self.active_nodes = {} # node_id -> node_version * fullpath - self.unknown_active_nodes = {} # node_id -> repo url * fullpath - self.cnr_map = {} # node_id -> cnr info - self.repo_cnr_map = {} # repo_url -> cnr info - self.custom_node_map_cache = {} # (channel, mode) -> augmented custom node list json - self.processed_install = set() - - def get_cnr_by_repo(self, url): - normalized_url = url.replace("git@github.com:", "https://github.com/") - return self.repo_cnr_map.get(normalized_url) - - def resolve_unspecified_version(self, node_name, guess_mode=None): - if guess_mode == 'active': - # priority: - # 1. CNR/nightly active nodes - # 2. unknown - # 3. Fail - - if node_name in self.cnr_map: - version_spec = self.get_from_cnr_active_nodes(node_name) - - if version_spec is None: - if node_name in self.unknown_active_nodes: - version_spec = "unknown" - else: - return None - - elif node_name in self.unknown_active_nodes: - version_spec = "unknown" - else: - return None - - elif guess_mode == 'inactive': - # priority: - # 1. CNR latest in inactive - # 2. nightly - # 3. unknown - # 4. Fail - - if node_name in self.cnr_map: - latest = self.get_from_cnr_inactive_nodes(node_name) - - if latest is not None: - version_spec = str(latest[0]) - else: - if node_name in self.nightly_inactive_nodes: - version_spec = "nightly" - else: - version_spec = "unknown" - - elif node_name in self.unknown_inactive_nodes: - version_spec = "unknown" - else: - return None - - else: - # priority: - # 1. CNR latest in world - # 2. unknown - - if node_name in self.cnr_map: - version_spec = self.cnr_map[node_name]['latest_version']['version'] - else: - version_spec = "unknown" - - return version_spec - - def resolve_node_spec(self, node_name, guess_mode=None): - """ - resolve to 'node_name, version_spec' from version string - - version string: - node_name@latest - node_name@nightly - node_name@unknown - node_name@ - node_name - - if guess_mode is 'active' or 'inactive' - return can be 'None' based on state check - otherwise - return 'unknown' version when failed to guess - """ - - spec = node_name.split('@') - - if len(spec) == 2: - node_name = spec[0] - version_spec = spec[1] - - if version_spec == 'latest': - if node_name not in self.cnr_map: - print(f"ERROR: '{node_name}' is not a CNR node.") - return None - else: - version_spec = self.cnr_map[node_name]['latest_version']['version'] - - elif guess_mode in ['active', 'inactive']: - node_name = spec[0] - version_spec = self.resolve_unspecified_version(node_name, guess_mode=guess_mode) - if version_spec is None: - return None - else: - node_name = spec[0] - version_spec = self.resolve_unspecified_version(node_name) - if version_spec is None: - return None - - return node_name, version_spec, len(spec) > 1 - - def resolve_ver(self, fullpath): - """ - resolve version of unclassified custom node based on remote url in .git/config - """ - git_config_path = os.path.join(fullpath, '.git', 'config') - - if not os.path.exists(git_config_path): - return "unknown" - - config = configparser.ConfigParser() - config.read(git_config_path) - - for k, v in config.items(): - if k.startswith('remote ') and 'url' in v: - cnr = self.get_cnr_by_repo(v['url']) - if cnr: - return "nightly" - else: - return "unknown" - - def resolve_id_from_repo(self, fullpath): - git_config_path = os.path.join(fullpath, '.git', 'config') - - if not os.path.exists(git_config_path): - return None - - config = configparser.ConfigParser() - config.read(git_config_path) - - for k, v in config.items(): - if k.startswith('remote ') and 'url' in v: - cnr = self.get_cnr_by_repo(v['url']) - if cnr: - return "nightly", cnr['id'], v['url'] - else: - return "unknown", v['url'].split('/')[-1], v['url'] - - def resolve_unknown(self, node_id, fullpath): - res = self.resolve_id_from_repo(fullpath) - - if res is None: - self.unknown_inactive_nodes[node_id] = '', fullpath - return - - ver_spec, node_id, url = res - - if ver_spec == 'nightly': - self.nightly_inactive_nodes[node_id] = fullpath - else: - self.unknown_inactive_nodes[node_id] = url, fullpath - - def update_cache_at_path(self, fullpath, is_disabled): - name = os.path.basename(fullpath) - - if name.endswith(".disabled"): - node_spec = name[:-9] - is_disabled = True - else: - node_spec = name - - if '@' in node_spec: - node_spec = node_spec.split('@') - node_id = node_spec[0] - if node_id is None: - node_version = 'unknown' - else: - node_version = node_spec[1].replace("_", ".") - - if node_version != 'unknown': - if node_id not in self.cnr_map: - # fallback - v = node_version - - self.cnr_map[node_id] = { - 'id': node_id, - 'name': node_id, - 'latest_version': {'version': v}, - 'publisher': {'id': 'N/A', 'name': 'N/A'} - } - - elif node_version == 'unknown': - res = self.resolve_id_from_repo(fullpath) - if res is None: - print(f"Custom node unresolved: {fullpath}") - return - - node_version, node_id, _ = res - else: - res = self.resolve_id_from_repo(fullpath) - if res is None: - print(f"Custom node unresolved: {fullpath}") - return - - node_version, node_id, _ = res - - if not is_disabled: - # active nodes - if node_version == 'unknown': - self.unknown_active_nodes[node_id] = node_version, fullpath - else: - self.active_nodes[node_id] = node_version, fullpath - else: - if node_version == 'unknown': - self.resolve_unknown(node_id, fullpath) - elif node_version == 'nightly': - self.nightly_inactive_nodes[node_id] = fullpath - else: - self.add_to_cnr_inactive_nodes(node_id, node_version, fullpath) - - def is_updatable(self, node_id): - cur_ver = self.get_cnr_active_version(node_id) - latest_ver = self.cnr_map[node_id]['latest_version']['version'] - - if cur_ver and latest_ver: - return self.safe_version(latest_ver) > self.safe_version(cur_ver) - - return False - - def fetch_or_pull_git_repo(self, is_pull=False): - updated = set() - failed = set() - - def check_update(node_name, fullpath, ver_spec): - try: - if is_pull: - is_updated, success = git_repo_update_check_with(fullpath, do_update=True) - else: - is_updated, success = git_repo_update_check_with(fullpath, do_fetch=True) - - return f"{node_name}@{ver_spec}", is_updated, success - except Exception: - traceback.print_exc() - - return f"{node_name}@{ver_spec}", False, False - - with ThreadPoolExecutor() as executor: - futures = [] - - for k, v in self.unknown_active_nodes.items(): - futures.append(executor.submit(check_update, k, v[1], 'unknown')) - - for k, v in self.active_nodes.items(): - if v[0] == 'nightly': - futures.append(executor.submit(check_update, k, v[1], 'nightly')) - - for future in as_completed(futures): - item, is_updated, success = future.result() - - if is_updated: - updated.add(item) - - if not success: - failed.add(item) - - return dict(updated=list(updated), failed=list(failed)) - - def is_enabled(self, node_id, version_spec=None): - """ - 1. true if node_id@ is enabled - 2. true if node_id@ is enabled and version_spec==None - 3. false otherwise - - remark: latest version_spec is not allowed. Must be resolved before call. - """ - if version_spec == "cnr": - return self.get_cnr_active_version(node_id) not in [None, 'nightly'] - elif version_spec == 'unknown' and self.is_unknown_active(node_id): - return True - elif version_spec is not None and self.get_cnr_active_version(node_id) == version_spec: - return True - elif version_spec is None and (node_id in self.active_nodes or node_id in self.unknown_active_nodes): - return True - return False - - def is_disabled(self, node_id, version_spec=None): - """ - 1. node_id@unknown is disabled if version_spec is @unknown - 2. node_id@nightly is disabled if version_spec is @nightly - 4. node_id@ is disabled if version_spec is not None - 5. not exists (active node_id) if version_spec is None - - remark: latest version_spec is not allowed. Must be resolved before call. - """ - if version_spec == "unknown": - return node_id in self.unknown_inactive_nodes - elif version_spec == "nightly": - return node_id in self.nightly_inactive_nodes - elif version_spec == "cnr": - res = self.cnr_inactive_nodes.get(node_id, None) - if res is None: - return False - - res = [x for x in res.keys() if x != 'nightly'] - return len(res) > 0 - elif version_spec is not None: - return version_spec in self.cnr_inactive_nodes.get(node_id, []) - - if node_id in self.nightly_inactive_nodes: - return True - elif node_id in self.unknown_inactive_nodes: - return True - - target = self.cnr_inactive_nodes.get(node_id, None) - if target is not None and target == version_spec: - return True - - return False - - def is_registered_in_cnr(self, node_id): - return node_id in self.cnr_map - - def get_cnr_active_version(self, node_id): - res = self.active_nodes.get(node_id) - if res: - return res[0] - else: - return None - - def is_unknown_active(self, node_id): - return node_id in self.unknown_active_nodes - - def add_to_cnr_inactive_nodes(self, node_id, ver, fullpath): - ver_map = self.cnr_inactive_nodes.get(node_id) - if ver_map is None: - ver_map = {} - self.cnr_inactive_nodes[node_id] = ver_map - - ver_map[ver] = fullpath - - def get_from_cnr_active_nodes(self, node_id): - ver_path = self.active_nodes.get(node_id) - if ver_path is None: - return None - - return ver_path[0] - - def get_from_cnr_inactive_nodes(self, node_id, ver=None): - ver_map = self.cnr_inactive_nodes.get(node_id) - if ver_map is None: - return None - - if ver is not None: - return ver_map.get(ver) - - latest = None - for k, v in ver_map.items(): - if latest is None: - latest = self.safe_version(k), v - continue - - cur_ver = self.safe_version(k) - if cur_ver > latest[0]: - latest = cur_ver, v - - return latest - - async def reload(self, cache_mode): - self.custom_node_map_cache = {} - self.cnr_inactive_nodes = {} # node_id -> node_version -> fullpath - self.nightly_inactive_nodes = {} # node_id -> fullpath - self.unknown_inactive_nodes = {} # node_id -> repo url * fullpath - self.unknown_active_nodes = {} # node_id -> repo url * fullpath - self.active_nodes = {} # node_id -> node_version * fullpath - - # reload 'cnr_map' and 'repo_cnr_map' - cnrs = await cnr_utils.get_cnr_data(cache_mode=cache_mode) - - for x in cnrs: - self.cnr_map[x['id']] = x - - if 'repository' in x: - normalized_url = x['repository'].replace("git@github.com:", "https://github.com/") - self.repo_cnr_map[normalized_url] = x - - # reload node status info from custom_nodes/* - for x in os.listdir(custom_nodes_path): - fullpath = os.path.join(custom_nodes_path, x) - if os.path.isdir(fullpath): - if x not in ['__pycache__', '.disabled']: - self.update_cache_at_path(fullpath, is_disabled=False) - - # reload node status info from custom_nodes/.disabled/* - disabled_dir = os.path.join(custom_nodes_path, '.disabled') - if os.path.exists(disabled_dir): - for x in os.listdir(disabled_dir): - fullpath = os.path.join(disabled_dir, x) - if os.path.isdir(fullpath): - self.update_cache_at_path(fullpath, is_disabled=True) - - @staticmethod - async def load_nightly(channel, mode): - res = {} - - channel_url = normalize_channel(channel) - if channel: - if mode not in ['remote', 'local', 'cache']: - print(f"[bold red]ERROR: Invalid mode is specified `--mode {mode}`[/bold red]", file=sys.stderr) - return {} - - json_obj = await get_data_by_mode(mode, 'custom-node-list.json', channel_url=channel_url) - for x in json_obj['custom_nodes']: - for y in x['files']: - if 'github.com' in y and not (y.endswith('.py') or y.endswith('.js')): - repo_name = y.split('/')[-1] - res[repo_name] = (x, False) - - if 'id' in x: - if x['id'] not in res: - res[x['id']] = (x, True) - - return res - - async def get_custom_nodes(self, channel, mode): - default_channel = normalize_channel('default') - cache = self.custom_node_map_cache.get((default_channel, mode)) # CNR/nightly should always be based on the default channel. - - if cache is not None: - return cache - - channel = normalize_channel(channel) - print(f"nightly_channel: {channel}/{mode}") - nodes = await self.load_nightly(channel, mode) - - res = {} - added_cnr = set() - for v in nodes.values(): - v = v[0] - if len(v['files']) == 1: - cnr = self.get_cnr_by_repo(v['files'][0]) - if cnr: - if 'latest_version' not in cnr: - v['cnr_latest'] = '0.0.0' - else: - v['cnr_latest'] = cnr['latest_version']['version'] - v['id'] = cnr['id'] - v['author'] = cnr['publisher']['name'] - v['title'] = cnr['name'] - v['description'] = cnr['description'] - v['health'] = '-' - added_cnr.add(cnr['id']) - node_id = v['id'] - else: - node_id = v['files'][0].split('/')[-1] - res[node_id] = v - elif len(v['files']) > 1: - res[v['files'][0]] = v # A custom node composed of multiple url is treated as a single repository with one representative path - - self.custom_node_map_cache[(channel, mode)] = res - return res - - @staticmethod - def safe_version(ver_str): - try: - return version.parse(ver_str) - except: - return version.parse("0.0.0") - - def execute_install_script(self, url, repo_path, instant_execution=False, lazy_mode=False, no_deps=False): - install_script_path = os.path.join(repo_path, "install.py") - requirements_path = os.path.join(repo_path, "requirements.txt") - - if lazy_mode: - install_cmd = ["#LAZY-INSTALL-SCRIPT", sys.executable] - return try_install_script(url, repo_path, install_cmd) - else: - if os.path.exists(requirements_path) and not no_deps: - print("Install: pip packages") - res = True - with open(requirements_path, "r") as requirements_file: - for line in requirements_file: - package_name = remap_pip_package(line.strip()) - if package_name and not package_name.startswith('#') and package_name not in self.processed_install: - self.processed_install.add(package_name) - install_cmd = [sys.executable, "-m", "pip", "install", package_name] - if package_name.strip() != "" and not package_name.startswith('#'): - res = res and try_install_script(url, repo_path, install_cmd, instant_execution=instant_execution) - return res - - if os.path.exists(install_script_path) and install_script_path not in self.processed_install: - self.processed_install.add(install_script_path) - print(f"Install: install script") - install_cmd = [sys.executable, "install.py"] - return try_install_script(url, repo_path, install_cmd, instant_execution=instant_execution) - - return True - - def reserve_cnr_switch(self, target, zip_url, from_path, to_path, no_deps): - script_path = os.path.join(startup_script_path, "install-scripts.txt") - with open(script_path, "a") as file: - obj = [target, "#LAZY-CNR-SWITCH-SCRIPT", zip_url, from_path, to_path, no_deps, custom_nodes_path, sys.executable] - file.write(f"{obj}\n") - - print(f"Installation reserved: {target}") - - return True - - def reserve_migration(self, moves): - script_path = os.path.join(startup_script_path, "install-scripts.txt") - with open(script_path, "a") as file: - obj = ["", "#LAZY-MIGRATION", moves] - file.write(f"{obj}\n") - - return True - - def unified_fix(self, node_id, version_spec, instant_execution=False, no_deps=False): - """ - fix dependencies - """ - - result = ManagedResult('fix') - - info = self.active_nodes.get(node_id) - if info is None or not os.path.exists(info[1]): - return result.fail(f'not found: {node_id}@{version_spec}') - - self.execute_install_script(node_id, info[1], instant_execution=instant_execution, no_deps=no_deps) - - return result - - def cnr_switch_version(self, node_id, version_spec=None, instant_execution=False, no_deps=False, return_postinstall=False): - if instant_execution: - return self.cnr_switch_version_instant(node_id, version_spec, instant_execution, no_deps, return_postinstall) - else: - return self.cnr_switch_version_lazy(node_id, version_spec, no_deps, return_postinstall) - - def cnr_switch_version_lazy(self, node_id, version_spec=None, no_deps=False, return_postinstall=False): - """ - switch between cnr version (lazy mode) - """ - - result = ManagedResult('switch-cnr') - - node_info = cnr_utils.install_node(node_id, version_spec) - if node_info is None or not node_info.download_url: - return result.fail(f'not available node: {node_id}@{version_spec}') - - version_spec = node_info.version - - if self.active_nodes[node_id][0] == version_spec: - return ManagedResult('skip').with_msg("Up to date") - - zip_url = node_info.download_url - from_path = self.active_nodes[node_id][1] - target = f"{node_id}@{version_spec.replace('.', '_')}" - to_path = os.path.join(custom_nodes_path, target) - - def postinstall(): - return self.reserve_cnr_switch(target, zip_url, from_path, to_path, no_deps) - - if return_postinstall: - return result.with_postinstall(postinstall) - else: - if not postinstall(): - return result.fail(f"Failed to execute install script: {node_id}@{version_spec}") - - return result - - def cnr_switch_version_instant(self, node_id, version_spec=None, instant_execution=True, no_deps=False, return_postinstall=False): - """ - switch between cnr version - """ - - # 1. download - result = ManagedResult('switch-cnr') - - node_info = cnr_utils.install_node(node_id, version_spec) - if node_info is None or not node_info.download_url: - return result.fail(f'not available node: {node_id}@{version_spec}') - - version_spec = node_info.version - - if self.active_nodes[node_id][0] == version_spec: - return ManagedResult('skip').with_msg("Up to date") - - archive_name = f"CNR_temp_{str(uuid.uuid4())}.zip" # should be unpredictable name - security precaution - download_path = os.path.join(custom_nodes_path, archive_name) - download_url(node_info.download_url, custom_nodes_path, archive_name) - - # 2. extract files into @ - install_path = self.active_nodes[node_id][1] - extracted = cnr_utils.extract_package_as_zip(download_path, install_path) - os.remove(download_path) - - if extracted is None: - if len(os.listdir(install_path)) == 0: - shutil.rmtree(install_path) - - return result.fail(f'Empty archive file: {node_id}@{version_spec}') - - # 3. calculate garbage files (.tracking - extracted) - tracking_info_file = os.path.join(install_path, '.tracking') - prev_files = set() - with open(tracking_info_file, 'r') as f: - for line in f: - prev_files.add(line.strip()) - garbage = prev_files.difference(extracted) - garbage = [os.path.join(install_path, x) for x in garbage] - - # 4-1. remove garbage files - for x in garbage: - if os.path.isfile(x): - os.remove(x) - - # 4-2. remove garbage dir if empty - for x in garbage: - if os.path.isdir(x): - if not os.listdir(x): - os.rmdir(x) - - # 5. rename dir name @ ==> @ - new_install_path = os.path.join(custom_nodes_path, f"{node_id}@{version_spec.replace('.', '_')}") - print(f"'{install_path}' is moved to '{new_install_path}'") - shutil.move(install_path, new_install_path) - - # 6. create .tracking file - tracking_info_file = os.path.join(new_install_path, '.tracking') - with open(tracking_info_file, "w", encoding='utf-8') as file: - file.write('\n'.join(list(extracted))) - - # 7. post install - result.target = version_spec - - def postinstall(): - res = self.execute_install_script(f"{node_id}@{version_spec}", new_install_path, instant_execution=instant_execution, no_deps=no_deps) - return res - - if return_postinstall: - return result.with_postinstall(postinstall) - else: - if not postinstall(): - return result.fail(f"Failed to execute install script: {node_id}@{version_spec}") - - return result - - def unified_enable(self, node_id, version_spec=None): - """ - priority if version_spec == None - 1. CNR latest in disk - 2. nightly - 3. unknown - - remark: latest version_spec is not allowed. Must be resolved before call. - """ - - result = ManagedResult('enable') - - if version_spec is None: - version_spec = self.resolve_unspecified_version(node_id, guess_mode='inactive') - if version is None: - return result.fail(f'Specified inactive node not exists: {node_id}') - - if self.is_enabled(node_id, version_spec): - return ManagedResult('skip').with_msg('Already enabled') - - if not self.is_disabled(node_id, version_spec): - return ManagedResult('skip').with_msg('Not installed') - - from_path = None - to_path = None - - if version_spec == 'unknown': - repo_and_path = self.unknown_inactive_nodes.get(node_id) - if repo_and_path is None: - return result.fail(f'Specified inactive node not exists: {node_id}@unknown') - from_path = repo_and_path[1] - # NOTE: Keep original name as possible if unknown node - # to_path = os.path.join(custom_nodes_path, f"{node_id}@unknown") - to_path = os.path.join(custom_nodes_path, node_id) - elif version_spec == 'nightly': - self.unified_disable(node_id, False) - from_path = self.nightly_inactive_nodes.get(node_id) - if from_path is None: - return result.fail(f'Specified inactive node not exists: {node_id}@nightly') - to_path = os.path.join(custom_nodes_path, f"{node_id}@nightly") - elif version_spec is not None: - self.unified_disable(node_id, False) - cnr_info = self.cnr_inactive_nodes.get(node_id) - - if cnr_info is None or len(cnr_info) == 0: - return result.fail(f'Specified inactive cnr node not exists: {node_id}') - - if version_spec == "cnr": - version_spec = next(iter(cnr_info)) - - if version_spec not in cnr_info: - return result.fail(f'Specified inactive node not exists: {node_id}@{version_spec}') - - from_path = cnr_info[version_spec] - to_path = os.path.join(custom_nodes_path, f"{node_id}@{version_spec.replace('.', '_')}") - - if from_path is None or not os.path.exists(from_path): - return result.fail(f'Specified inactive node path not exists: {from_path}') - - # move from disk - shutil.move(from_path, to_path) - - # update cache - if version_spec == 'unknown': - del self.unknown_inactive_nodes[node_id] - self.unknown_active_nodes[node_id] = to_path - return result.with_target(to_path) - elif version_spec == 'nightly': - del self.nightly_inactive_nodes[node_id] - else: - del self.cnr_inactive_nodes[node_id][version_spec] - - self.active_nodes[node_id] = version_spec, to_path - return result.with_target(to_path) - - def unified_disable(self, node_id, is_unknown): - result = ManagedResult('disable') - - if is_unknown: - version_spec = 'unknown' - else: - version_spec = None - - if not self.is_enabled(node_id, version_spec): - if not self.is_disabled(node_id, version_spec): - return ManagedResult('skip').with_msg('Not installed') - else: - return ManagedResult('skip').with_msg('Already disabled') - - if is_unknown: - repo_and_path = self.unknown_active_nodes.get(node_id) - # NOTE: Keep original name as possible if unknown node - # to_path = os.path.join(custom_nodes_path, '.disabled', f"{node_id}@unknown") - to_path = os.path.join(custom_nodes_path, '.disabled', node_id) - - if repo_and_path is None or not os.path.exists(repo_and_path[1]): - return result.fail(f'Specified active node not exists: {node_id}') - - shutil.move(repo_and_path[1], to_path) - result.append((repo_and_path[1], to_path)) - - self.unknown_inactive_nodes[node_id] = repo_and_path[0], to_path - del self.unknown_active_nodes[node_id] - - return result - - ver_and_path = self.active_nodes.get(node_id) - - if ver_and_path is None or not os.path.exists(ver_and_path[1]): - return result.fail(f'Specified active node not exists: {node_id}') - - to_path = os.path.join(custom_nodes_path, '.disabled', f"{node_id}@{ver_and_path[0].replace('.', '_')}") - shutil.move(ver_and_path[1], to_path) - result.append((ver_and_path[1], to_path)) - - if ver_and_path[0] == 'nightly': - self.nightly_inactive_nodes[node_id] = to_path - else: - self.add_to_cnr_inactive_nodes(node_id, ver_and_path[0], to_path) - - del self.active_nodes[node_id] - - return result - - def unified_uninstall(self, node_id, is_unknown): - """ - Remove whole installed custom nodes including inactive nodes - """ - result = ManagedResult('uninstall') - - if is_unknown: - # remove from actives - repo_and_path = self.unknown_active_nodes.get(node_id) - - is_removed = False - - if repo_and_path is not None and os.path.exists(repo_and_path[1]): - rmtree(repo_and_path[1]) - result.append(repo_and_path[1]) - del self.unknown_active_nodes[node_id] - - is_removed = True - - # remove from inactives - repo_and_path = self.unknown_inactive_nodes.get(node_id) - - if repo_and_path is not None and os.path.exists(repo_and_path[1]): - rmtree(repo_and_path[1]) - result.append(repo_and_path[1]) - del self.unknown_inactive_nodes[node_id] - - is_removed = True - - if is_removed: - return result - else: - return ManagedResult('skip') - - # remove from actives - ver_and_path = self.active_nodes.get(node_id) - - if ver_and_path is not None and os.path.exists(ver_and_path[1]): - shutil.rmtree(ver_and_path[1]) - result.items.append(ver_and_path) - del self.active_nodes[node_id] - - # remove from nightly inactives - fullpath = self.nightly_inactive_nodes.get(node_id) - if fullpath is not None and os.path.exists(fullpath): - shutil.rmtree(fullpath) - result.items.append(('nightly', fullpath)) - del self.nightly_inactive_nodes[node_id] - - # remove from cnr inactives - ver_map = self.cnr_inactive_nodes.get(node_id) - if ver_map is not None: - for key, fullpath in ver_map.items(): - shutil.rmtree(fullpath) - result.items.append((key, fullpath)) - del self.cnr_inactive_nodes[node_id] - - if len(result.items) == 0: - return ManagedResult('skip').with_msg('Not installed') - - return result - - def cnr_install(self, node_id, version_spec=None, instant_execution=False, no_deps=False, return_postinstall=False): - result = ManagedResult('install-cnr') - - node_info = cnr_utils.install_node(node_id, version_spec) - if node_info is None or not node_info.download_url: - return result.fail(f'not available node: {node_id}@{version_spec}') - - archive_name = f"CNR_temp_{str(uuid.uuid4())}.zip" # should be unpredictable name - security precaution - download_path = os.path.join(custom_nodes_path, archive_name) - - # re-download. I cannot trust existing file. - if os.path.exists(download_path): - os.remove(download_path) - - # install_path - install_path = os.path.join(custom_nodes_path, f"{node_id}@{version_spec.replace('.', '_')}") - if os.path.exists(install_path): - return result.fail(f'Install path already exists: {install_path}') - - download_url(node_info.download_url, custom_nodes_path, archive_name) - os.makedirs(install_path, exist_ok=True) - extracted = cnr_utils.extract_package_as_zip(download_path, install_path) - os.remove(download_path) - result.to_path = install_path - - if extracted is None: - shutil.rmtree(install_path) - return result.fail(f'Empty archive file: {node_id}@{version_spec}') - - # create .tracking file - tracking_info_file = os.path.join(install_path, '.tracking') - with open(tracking_info_file, "w", encoding='utf-8') as file: - file.write('\n'.join(extracted)) - - result.target = version_spec - - def postinstall(): - return self.execute_install_script(node_id, install_path, instant_execution=instant_execution, no_deps=no_deps) - - if return_postinstall: - return result.with_postinstall(postinstall) - else: - if not postinstall(): - return result.fail(f"Failed to execute install script: {node_id}@{version_spec}") - - return result - - def repo_install(self, url, repo_path, instant_execution=False, no_deps=False, return_postinstall=False): - result = ManagedResult('install-git') - result.append(url) - - if not is_valid_url(url): - return result.fail(f"Invalid git url: {url}") - - if url.endswith("/"): - url = url[:-1] - try: - print(f"Download: git clone '{url}'") - - # Clone the repository from the remote URL - if not instant_execution and platform.system() == 'Windows': - res = manager_funcs.run_script([sys.executable, git_script_path, "--clone", custom_nodes_path, url, repo_path], cwd=custom_nodes_path) - if res != 0: - return result.fail(f"Failed to clone repo: {url}") - else: - repo = git.Repo.clone_from(url, repo_path, recursive=True, progress=GitProgress()) - repo.git.clear_cache() - repo.close() - - def postinstall(): - return self.execute_install_script(url, repo_path, instant_execution=instant_execution, no_deps=no_deps) - - if return_postinstall: - return result.with_postinstall(postinstall) - else: - if not postinstall(): - return result.fail(f"Failed to execute install script: {url}") - - except Exception as e: - return result.fail(f"Install(git-clone) error: {url} / {e}") - - print("Installation was successful.") - return result - - def repo_update(self, repo_path, instant_execution=False, no_deps=False, return_postinstall=False): - result = ManagedResult('update-git') - - if not os.path.exists(os.path.join(repo_path, '.git')): - return result.fail(f'Path not found: {repo_path}') - - # version check - repo = git.Repo(repo_path) - - if repo.head.is_detached: - switch_to_default_branch(repo) - - current_branch = repo.active_branch - branch_name = current_branch.name - - if current_branch.tracking_branch() is None: - print(f"[ComfyUI-Manager] There is no tracking branch ({current_branch})") - remote_name = 'origin' - else: - remote_name = current_branch.tracking_branch().remote_name - remote = repo.remote(name=remote_name) - - try: - remote.fetch() - except Exception as e: - if 'detected dubious' in str(e): - print(f"[ComfyUI-Manager] Try fixing 'dubious repository' error on 'ComfyUI' repository") - safedir_path = comfy_path.replace('\\', '/') - subprocess.run(['git', 'config', '--global', '--add', 'safe.directory', safedir_path]) - try: - remote.fetch() - except Exception: - print(f"\n[ComfyUI-Manager] Failed to fixing repository setup. Please execute this command on cmd: \n" - f"-----------------------------------------------------------------------------------------\n" - f'git config --global --add safe.directory "{safedir_path}"\n' - f"-----------------------------------------------------------------------------------------\n") - - commit_hash = repo.head.commit.hexsha - remote_commit_hash = repo.refs[f'{remote_name}/{branch_name}'].object.hexsha - - if commit_hash != remote_commit_hash: - git_pull(repo_path) - - if len(repo.remotes) > 0: - url = repo.remotes[0].url - else: - url = "unknown repo" - - def postinstall(): - return self.execute_install_script(url, repo_path, instant_execution=instant_execution, no_deps=no_deps) - - if return_postinstall: - return result.with_postinstall(postinstall) - else: - if not postinstall(): - return result.fail(f"Failed to execute install script: {url}") - - return result - else: - return ManagedResult('skip').with_msg('Up to date') - - def unified_update(self, node_id, version_spec=None, instant_execution=False, no_deps=False, return_postinstall=False): - if version_spec is None: - version_spec = self.resolve_unspecified_version(node_id, guess_mode='active') - - if version_spec is None: - return ManagedResult('update').fail(f'Update not available: {node_id}@{version_spec}') - - if version_spec == 'nightly': - return self.repo_update(self.active_nodes[node_id][1], instant_execution=instant_execution, no_deps=no_deps, return_postinstall=return_postinstall).with_target('nightly') - elif version_spec == 'unknown': - return self.repo_update(self.unknown_active_nodes[node_id][1], instant_execution=instant_execution, no_deps=no_deps, return_postinstall=return_postinstall).with_target('unknown') - else: - return self.cnr_switch_version(node_id, instant_execution=instant_execution, no_deps=no_deps, return_postinstall=return_postinstall) - - async def install_by_id(self, node_id, version_spec=None, channel=None, mode=None, instant_execution=False, no_deps=False, return_postinstall=False): - """ - priority if version_spec == None - 1. CNR latest - 2. unknown - - remark: latest version_spec is not allowed. Must be resolved before call. - """ - - repo_url = None - if version_spec is None: - if self.is_enabled(node_id): - return ManagedResult('skip') - elif self.is_disabled(node_id): - return self.unified_enable(node_id) - else: - version_spec = self.resolve_unspecified_version(node_id) - - if version_spec == 'unknown' or version_spec == 'nightly': - custom_nodes = await self.get_custom_nodes(channel, mode) - the_node = custom_nodes.get(node_id) - if the_node is not None: - repo_url = the_node['files'][0] - else: - result = ManagedResult('install') - return result.fail(f"Node '{node_id}@{version_spec}' not found in [{channel}, {mode}]") - - if self.is_enabled(node_id, version_spec): - return ManagedResult('skip').with_target(f"{node_id}@{version_spec}") - - elif self.is_disabled(node_id, version_spec): - return self.unified_enable(node_id, version_spec) - - elif version_spec == 'unknown' or version_spec == 'nightly': - if version_spec == 'nightly': - # disable cnr nodes - if self.is_enabled(node_id, 'cnr'): - self.unified_disable(node_id, False) - - if version_spec == 'unknown': - to_path = os.path.abspath(os.path.join(custom_nodes_path, node_id)) # don't attach @unknown - else: - to_path = os.path.abspath(os.path.join(custom_nodes_path, f"{node_id}@{version_spec.replace('.', '_')}")) - res = self.repo_install(repo_url, to_path, instant_execution=instant_execution, no_deps=no_deps, return_postinstall=return_postinstall) - if res.result: - if version_spec == 'unknown': - self.unknown_active_nodes[node_id] = to_path - elif version_spec == 'nightly': - self.active_nodes[node_id] = 'nightly', to_path - - return res.with_target(version_spec) - - if self.is_enabled(node_id, 'nightly'): - # disable nightly nodes - self.unified_disable(node_id, False) # NOTE: don't return from here - - if self.is_disabled(node_id, version_spec): - # enable and return if specified version is disabled - return self.unified_enable(node_id, version_spec) - - if self.is_disabled(node_id, "cnr"): - # enable and switch version if cnr is disabled (not specified version) - self.unified_enable(node_id, "cnr") - return self.cnr_switch_version(node_id, version_spec, no_deps=no_deps, return_postinstall=return_postinstall) - - if self.is_enabled(node_id, "cnr"): - return self.cnr_switch_version(node_id, version_spec, no_deps=no_deps, return_postinstall=return_postinstall) - - res = self.cnr_install(node_id, version_spec, instant_execution=instant_execution, no_deps=no_deps, return_postinstall=return_postinstall) - if res.result: - self.active_nodes[node_id] = version_spec, res.to_path - - return res - - async def migrate_unmanaged_nodes(self): - """ - fix path for nightly and unknown nodes of unmanaged nodes - """ - await self.reload('cache') - await self.get_custom_nodes('default', 'cache') - - print(f"Migration: STAGE 1") - moves = [] - - # migrate nightly inactive - for x, v in self.nightly_inactive_nodes.items(): - if v.endswith('@nightly'): - continue - - new_path = os.path.join(custom_nodes_path, '.disabled', f"{x}@nightly") - moves.append((v, new_path)) - - print(f"Migration: STAGE 2") - # migrate active nodes - for x, v in self.active_nodes.items(): - if v[0] not in ['nightly']: - continue - - if v[1].endswith('@nightly'): - continue - - new_path = os.path.join(custom_nodes_path, f"{x}@nightly") - moves.append((v[1], new_path)) - - self.reserve_migration(moves) - - print(f"DONE (Migration reserved)") - - -unified_manager = UnifiedManager() - - -def get_channel_dict(): - global channel_dict - - if channel_dict is None: - channel_dict = {} - - if not os.path.exists(channel_list_path): - shutil.copy(channel_list_path+'.template', channel_list_path) - - with open(os.path.join(comfyui_manager_path, 'channels.list'), 'r') as file: - channels = file.read() - for x in channels.split('\n'): - channel_info = x.split("::") - if len(channel_info) == 2: - channel_dict[channel_info[0]] = channel_info[1] - - return channel_dict - - -def get_channel_list(): - global channel_list - - if channel_list is None: - channel_list = [] - for k, v in get_channel_dict().items(): - channel_list.append(f"{k}::{v}") - - return channel_list - - -class ManagerFuncs: - def __init__(self): - pass - - def get_current_preview_method(self): - return "none" - - def run_script(self, cmd, cwd='.'): - if len(cmd) > 0 and cmd[0].startswith("#"): - print(f"[ComfyUI-Manager] Unexpected behavior: `{cmd}`") - return 0 - - new_env = os.environ.copy() - new_env["COMFYUI_PATH"] = comfy_path - subprocess.check_call(cmd, cwd=cwd, env=new_env) - - return 0 - - -manager_funcs = ManagerFuncs() - - -def write_config(): - config = configparser.ConfigParser() - config['default'] = { - 'preview_method': manager_funcs.get_current_preview_method(), - 'badge_mode': get_config()['badge_mode'], - 'git_exe': get_config()['git_exe'], - 'channel_url': get_config()['channel_url'], - 'share_option': get_config()['share_option'], - 'bypass_ssl': get_config()['bypass_ssl'], - "file_logging": get_config()['file_logging'], - 'default_ui': get_config()['default_ui'], - 'component_policy': get_config()['component_policy'], - 'double_click_policy': get_config()['double_click_policy'], - 'windows_selector_event_loop_policy': get_config()['windows_selector_event_loop_policy'], - 'model_download_by_agent': get_config()['model_download_by_agent'], - 'downgrade_blacklist': get_config()['downgrade_blacklist'], - 'security_level': get_config()['security_level'], - 'skip_migration_check': get_config()['skip_migration_check'], - } - with open(config_path, 'w') as configfile: - config.write(configfile) - - -def read_config(): - try: - config = configparser.ConfigParser() - config.read(config_path) - default_conf = config['default'] - - # policy migration: disable_unsecure_features -> security_level - if 'disable_unsecure_features' in default_conf: - if default_conf['disable_unsecure_features'].lower() == 'true': - security_level = 'strong' - else: - security_level = 'normal' - else: - security_level = default_conf['security_level'] if 'security_level' in default_conf else 'normal' - - return { - 'preview_method': default_conf['preview_method'] if 'preview_method' in default_conf else manager_funcs.get_current_preview_method(), - 'badge_mode': default_conf['badge_mode'] if 'badge_mode' in default_conf else 'none', - 'git_exe': default_conf['git_exe'] if 'git_exe' in default_conf else '', - 'channel_url': default_conf['channel_url'] if 'channel_url' in default_conf else DEFAULT_CHANNEL, - 'share_option': default_conf['share_option'] if 'share_option' in default_conf else 'all', - 'bypass_ssl': default_conf['bypass_ssl'].lower() == 'true' if 'bypass_ssl' in default_conf else False, - 'file_logging': default_conf['file_logging'].lower() == 'true' if 'file_logging' in default_conf else True, - 'default_ui': default_conf['default_ui'] if 'default_ui' in default_conf else 'none', - 'component_policy': default_conf['component_policy'] if 'component_policy' in default_conf else 'workflow', - 'double_click_policy': default_conf['double_click_policy'] if 'double_click_policy' in default_conf else 'copy-all', - 'windows_selector_event_loop_policy': default_conf['windows_selector_event_loop_policy'].lower() == 'true' if 'windows_selector_event_loop_policy' in default_conf else False, - 'model_download_by_agent': default_conf['model_download_by_agent'].lower() == 'true' if 'model_download_by_agent' in default_conf else False, - 'downgrade_blacklist': default_conf['downgrade_blacklist'] if 'downgrade_blacklist' in default_conf else '', - 'skip_migration_check': default_conf['skip_migration_check'].lower() == 'true' if 'skip_migration_check' in default_conf else False, - 'security_level': security_level - } - - except Exception: - return { - 'preview_method': manager_funcs.get_current_preview_method(), - 'badge_mode': 'none', - 'git_exe': '', - 'channel_url': DEFAULT_CHANNEL, - 'share_option': 'all', - 'bypass_ssl': False, - 'file_logging': True, - 'default_ui': 'none', - 'component_policy': 'workflow', - 'double_click_policy': 'copy-all', - 'windows_selector_event_loop_policy': False, - 'model_download_by_agent': False, - 'downgrade_blacklist': '', - 'skip_migration_check': False, - 'security_level': 'normal', - } - - -def get_config(): - global cached_config - - if cached_config is None: - cached_config = read_config() - - return cached_config - - -def switch_to_default_branch(repo): - default_branch = repo.git.symbolic_ref('refs/remotes/origin/HEAD').replace('refs/remotes/origin/', '') - repo.git.checkout(default_branch) - - -def try_install_script(url, repo_path, install_cmd, instant_execution=False): - if not instant_execution and ((len(install_cmd) > 0 and install_cmd[0].startswith('#')) or (platform.system() == "Windows" and comfy_ui_commit_datetime.date() >= comfy_ui_required_commit_datetime.date())): - if not os.path.exists(startup_script_path): - os.makedirs(startup_script_path) - - script_path = os.path.join(startup_script_path, "install-scripts.txt") - with open(script_path, "a") as file: - obj = [repo_path] + install_cmd - file.write(f"{obj}\n") - - return True - else: - if len(install_cmd) == 5 and install_cmd[2:4] == ['pip', 'install']: - if is_blacklisted(install_cmd[4]): - print(f"[ComfyUI-Manager] skip black listed pip installation: '{install_cmd[4]}'") - return True - - print(f"\n## ComfyUI-Manager: EXECUTE => {install_cmd}") - code = manager_funcs.run_script(install_cmd, cwd=repo_path) - - if platform.system() != "Windows": - try: - if comfy_ui_commit_datetime.date() < comfy_ui_required_commit_datetime.date(): - print("\n\n###################################################################") - print(f"[WARN] ComfyUI-Manager: Your ComfyUI version ({comfy_ui_revision})[{comfy_ui_commit_datetime.date()}] is too old. Please update to the latest version.") - print(f"[WARN] The extension installation feature may not work properly in the current installed ComfyUI version on Windows environment.") - print("###################################################################\n\n") - except: - pass - - if code != 0: - if url is None: - url = os.path.dirname(repo_path) - print(f"install script failed: {url}") - return False - - return True - - -# use subprocess to avoid file system lock by git (Windows) -def __win_check_git_update(path, do_fetch=False, do_update=False): - if do_fetch: - command = [sys.executable, git_script_path, "--fetch", path] - elif do_update: - command = [sys.executable, git_script_path, "--pull", path] - else: - command = [sys.executable, git_script_path, "--check", path] - - new_env = os.environ.copy() - new_env["COMFYUI_PATH"] = comfy_path - process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=custom_nodes_path) - output, _ = process.communicate() - output = output.decode('utf-8').strip() - - if 'detected dubious' in output: - # fix and try again - safedir_path = path.replace('\\', '/') - try: - print(f"[ComfyUI-Manager] Try fixing 'dubious repository' error on '{safedir_path}' repo") - process = subprocess.Popen(['git', 'config', '--global', '--add', 'safe.directory', safedir_path], env=new_env, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - output, _ = process.communicate() - - process = subprocess.Popen(command, env=new_env, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - output, _ = process.communicate() - output = output.decode('utf-8').strip() - except Exception: - print(f'[ComfyUI-Manager] failed to fixing') - - if 'detected dubious' in output: - print(f'\n[ComfyUI-Manager] Failed to fixing repository setup. Please execute this command on cmd: \n' - f'-----------------------------------------------------------------------------------------\n' - f'git config --global --add safe.directory "{safedir_path}"\n' - f'-----------------------------------------------------------------------------------------\n') - - if do_update: - if "CUSTOM NODE PULL: Success" in output: - process.wait() - print(f"\rUpdated: {path}") - return True, True # updated - elif "CUSTOM NODE PULL: None" in output: - process.wait() - return False, True # there is no update - else: - print(f"\rUpdate error: {path}") - process.wait() - return False, False # update failed - else: - if "CUSTOM NODE CHECK: True" in output: - process.wait() - return True, True - elif "CUSTOM NODE CHECK: False" in output: - process.wait() - return False, True - else: - print(f"\rFetch error: {path}") - print(f"\n{output}\n") - process.wait() - return False, True - - -def __win_check_git_pull(path): - new_env = os.environ.copy() - new_env["COMFYUI_PATH"] = comfy_path - command = [sys.executable, git_script_path, "--pull", path] - process = subprocess.Popen(command, env=new_env, cwd=custom_nodes_path) - process.wait() - - -def execute_install_script(url, repo_path, lazy_mode=False, instant_execution=False, no_deps=False): - # import ipdb; ipdb.set_trace() - install_script_path = os.path.join(repo_path, "install.py") - requirements_path = os.path.join(repo_path, "requirements.txt") - - if lazy_mode: - install_cmd = ["#LAZY-INSTALL-SCRIPT", sys.executable] - try_install_script(url, repo_path, install_cmd) - else: - if os.path.exists(requirements_path) and not no_deps: - print("Install: pip packages") - with open(requirements_path, "r") as requirements_file: - for line in requirements_file: - #handle comments - if '#' in line: - if line.strip()[0] == '#': - print("Line is comment...skipping") - continue - else: - line = line.split('#')[0].strip() - - package_name = remap_pip_package(line.strip()) - - if package_name and not package_name.startswith('#'): - if '--index-url' in package_name: - s = package_name.split('--index-url') - install_cmd = [sys.executable, "-m", "pip", "install", s[0].strip(), '--index-url', s[1].strip()] - else: - install_cmd = [sys.executable, "-m", "pip", "install", package_name] - - if package_name.strip() != "" and not package_name.startswith('#'): - try_install_script(url, repo_path, install_cmd, instant_execution=instant_execution) - - if os.path.exists(install_script_path): - print(f"Install: install script") - install_cmd = [sys.executable, "install.py"] - try_install_script(url, repo_path, install_cmd, instant_execution=instant_execution) - - return True - - -def git_repo_update_check_with(path, do_fetch=False, do_update=False, no_deps=False): - """ - - perform update check for git custom node - and fetch or update if flag is on - - :param path: path to git custom node - :param do_fetch: do fetch during check - :param do_update: do update during check - :return: update state * success - """ - if do_fetch: - orig_print(f"\x1b[2K\rFetching: {path}", end='') - elif do_update: - orig_print(f"\x1b[2K\rUpdating: {path}", end='') - - # Check if the path is a git repository - if not os.path.exists(os.path.join(path, '.git')): - raise ValueError(f'Not a git repository: {path}') - - if platform.system() == "Windows": - updated, success = __win_check_git_update(path, do_fetch, do_update) - if updated and success: - execute_install_script(None, path, lazy_mode=True, no_deps=no_deps) - return updated, success - else: - # Fetch the latest commits from the remote repository - repo = git.Repo(path) - - remote_name = 'origin' - remote = repo.remote(name=remote_name) - - if not do_update and repo.head.is_detached: - if do_fetch: - remote.fetch() - - return True, True # detached branch is treated as updatable - - if repo.head.is_detached: - switch_to_default_branch(repo) - - current_branch = repo.active_branch - branch_name = current_branch.name - - # Get the current commit hash - commit_hash = repo.head.commit.hexsha - - if do_fetch or do_update: - remote.fetch() - - if do_update: - if repo.is_dirty(): - print(f"STASH: '{path}' is dirty.") - repo.git.stash() - - if f'{remote_name}/{branch_name}' not in repo.refs: - switch_to_default_branch(repo) - current_branch = repo.active_branch - branch_name = current_branch.name - - remote_commit_hash = repo.refs[f'{remote_name}/{branch_name}'].object.hexsha - - if commit_hash == remote_commit_hash: - repo.close() - return False, True - - try: - remote.pull() - repo.git.submodule('update', '--init', '--recursive') - new_commit_hash = repo.head.commit.hexsha - - if commit_hash != new_commit_hash: - execute_install_script(None, path, no_deps=no_deps) - print(f"\nUpdated: {path}") - return True, True - else: - return False, False - - except Exception as e: - print(f"\nUpdating failed: {path}\n{e}", file=sys.stderr) - return False, False - - if repo.head.is_detached: - repo.close() - return True, True - - # Get commit hash of the remote branch - current_branch = repo.active_branch - branch_name = current_branch.name - - if f'{remote_name}/{branch_name}' in repo.refs: - remote_commit_hash = repo.refs[f'{remote_name}/{branch_name}'].object.hexsha - else: - return True, True # Assuming there's an update if it's not the default branch. - - # Compare the commit hashes to determine if the local repository is behind the remote repository - if commit_hash != remote_commit_hash: - # Get the commit dates - commit_date = repo.head.commit.committed_datetime - remote_commit_date = repo.refs[f'{remote_name}/{branch_name}'].object.committed_datetime - - # Compare the commit dates to determine if the local repository is behind the remote repository - if commit_date < remote_commit_date: - repo.close() - return True, True - - repo.close() - - return False, True - - -class GitProgress(RemoteProgress): - def __init__(self): - super().__init__() - self.pbar = tqdm() - - def update(self, op_code, cur_count, max_count=None, message=''): - self.pbar.total = max_count - self.pbar.n = cur_count - self.pbar.pos = 0 - self.pbar.refresh() - - -def is_valid_url(url): - try: - # Check for HTTP/HTTPS URL format - result = urlparse(url) - if all([result.scheme, result.netloc]): - return True - finally: - # Check for SSH git URL format - pattern = re.compile(r"^(.+@|ssh:\/\/).+:.+$") - if pattern.match(url): - return True - return False - - -async def gitclone_install(url, instant_execution=False, msg_prefix='', no_deps=False): - await unified_manager.reload('cache') - await unified_manager.get_custom_nodes('default', 'cache') - - print(f"{msg_prefix}Install: {url}") - - result = ManagedResult('install-git') - - if not is_valid_url(url): - return result.fail(f"Invalid git url: '{url}'") - - if url.endswith("/"): - url = url[:-1] - try: - cnr = unified_manager.get_cnr_by_repo(url) - if cnr: - cnr_id = cnr['id'] - return await unified_manager.install_by_id(cnr_id, version_spec='nightly') - else: - repo_name = os.path.splitext(os.path.basename(url))[0] - - # NOTE: Keep original name as possible if unknown node - # node_dir = f"{repo_name}@unknown" - node_dir = repo_name - - repo_path = os.path.join(custom_nodes_path, node_dir) - disabled_repo_path1 = os.path.join(custom_nodes_path, '.disabled', node_dir) - disabled_repo_path2 = os.path.join(custom_nodes_path, repo_name+'.disabled') # old style - - if os.path.exists(repo_path): - return result.fail(f"Already exists: '{repo_path}'") - - if os.path.exists(disabled_repo_path1): - return result.fail(f"Already exists (disabled): '{disabled_repo_path1}'") - - if os.path.exists(disabled_repo_path2): - return result.fail(f"Already exists (disabled): '{disabled_repo_path2}'") - - print(f"CLONE into '{repo_path}'") - - # Clone the repository from the remote URL - if not instant_execution and platform.system() == 'Windows': - res = manager_funcs.run_script([sys.executable, git_script_path, "--clone", custom_nodes_path, url, repo_path], cwd=custom_nodes_path) - if res != 0: - return result.fail(f"Failed to clone '{url}' into '{repo_path}'") - else: - repo = git.Repo.clone_from(url, repo_path, recursive=True, progress=GitProgress()) - repo.git.clear_cache() - repo.close() - - execute_install_script(url, repo_path, instant_execution=instant_execution, no_deps=no_deps) - print("Installation was successful.") - return result.with_target(repo_path) - - except Exception as e: - traceback.print_exc() - print(f"Install(git-clone) error: {url} / {e}", file=sys.stderr) - return result.fail(f"Install(git-clone) error: {url} / {e}") - - -def git_pull(path): - # Check if the path is a git repository - if not os.path.exists(os.path.join(path, '.git')): - raise ValueError('Not a git repository') - - # Pull the latest changes from the remote repository - if platform.system() == "Windows": - return __win_check_git_pull(path) - else: - repo = git.Repo(path) - - if repo.is_dirty(): - print(f"STASH: '{path}' is dirty.") - repo.git.stash() - - if repo.head.is_detached: - switch_to_default_branch(repo) - - current_branch = repo.active_branch - remote_name = current_branch.tracking_branch().remote_name - remote = repo.remote(name=remote_name) - - remote.pull() - repo.git.submodule('update', '--init', '--recursive') - - repo.close() - - return True - - -async def get_data_by_mode(mode, filename, channel_url=None): - if channel_url in get_channel_dict(): - channel_url = get_channel_dict()[channel_url] - - try: - if mode == "local": - uri = os.path.join(comfyui_manager_path, filename) - json_obj = await get_data(uri) - else: - if channel_url is None: - uri = get_config()['channel_url'] + '/' + filename - else: - uri = channel_url + '/' + filename - - cache_uri = str(simple_hash(uri))+'_'+filename - cache_uri = os.path.join(cache_dir, cache_uri) - - if mode == "cache": - if is_file_created_within_one_day(cache_uri): - json_obj = await get_data(cache_uri) - else: - json_obj = await get_data(uri) - with cache_lock: - with open(cache_uri, "w", encoding='utf-8') as file: - json.dump(json_obj, file, indent=4, sort_keys=True) - else: - json_obj = await get_data(uri) - with cache_lock: - with open(cache_uri, "w", encoding='utf-8') as file: - json.dump(json_obj, file, indent=4, sort_keys=True) - except Exception as e: - print(f"[ComfyUI-Manager] Due to a network error, switching to local mode.\n=> {filename}\n=> {e}") - uri = os.path.join(comfyui_manager_path, filename) - json_obj = await get_data(uri) - - return json_obj - - -def gitclone_fix(files, instant_execution=False, no_deps=False): - print(f"Try fixing: {files}") - for url in files: - if not is_valid_url(url): - print(f"Invalid git url: '{url}'") - return False - - if url.endswith("/"): - url = url[:-1] - try: - repo_name = os.path.splitext(os.path.basename(url))[0] - repo_path = os.path.join(custom_nodes_path, repo_name) - - if os.path.exists(repo_path+'.disabled'): - repo_path = repo_path+'.disabled' - - if not execute_install_script(url, repo_path, instant_execution=instant_execution, no_deps=no_deps): - return False - - except Exception as e: - print(f"Install(git-clone) error: {url} / {e}", file=sys.stderr) - return False - - print(f"Attempt to fixing '{files}' is done.") - return True - - -def pip_install(packages): - install_cmd = ['#FORCE', sys.executable, "-m", "pip", "install", '-U'] + packages - try_install_script('pip install via manager', '..', install_cmd) - - -def rmtree(path): - retry_count = 3 - - while True: - try: - retry_count -= 1 - - if platform.system() == "Windows": - manager_funcs.run_script(['attrib', '-R', path + '\\*', '/S']) - shutil.rmtree(path) - - return True - - except Exception as ex: - print(f"ex: {ex}") - time.sleep(3) - - if retry_count < 0: - raise ex - - print(f"Uninstall retry({retry_count})") - - -def gitclone_uninstall(files): - import os - - print(f"Uninstall: {files}") - for url in files: - if url.endswith("/"): - url = url[:-1] - try: - dir_name = os.path.splitext(os.path.basename(url))[0].replace(".git", "") - dir_path = os.path.join(custom_nodes_path, dir_name) - - # safety check - if dir_path == '/' or dir_path[1:] == ":/" or dir_path == '': - print(f"Uninstall(git-clone) error: invalid path '{dir_path}' for '{url}'") - return False - - install_script_path = os.path.join(dir_path, "uninstall.py") - disable_script_path = os.path.join(dir_path, "disable.py") - if os.path.exists(install_script_path): - uninstall_cmd = [sys.executable, "uninstall.py"] - code = manager_funcs.run_script(uninstall_cmd, cwd=dir_path) - - if code != 0: - print(f"An error occurred during the execution of the uninstall.py script. Only the '{dir_path}' will be deleted.") - elif os.path.exists(disable_script_path): - disable_script = [sys.executable, "disable.py"] - code = manager_funcs.run_script(disable_script, cwd=dir_path) - if code != 0: - print(f"An error occurred during the execution of the disable.py script. Only the '{dir_path}' will be deleted.") - - if os.path.exists(dir_path): - rmtree(dir_path) - elif os.path.exists(dir_path + ".disabled"): - rmtree(dir_path + ".disabled") - except Exception as e: - print(f"Uninstall(git-clone) error: {url} / {e}", file=sys.stderr) - return False - - print("Uninstallation was successful.") - return True - - -def gitclone_set_active(files, is_disable): - import os - - if is_disable: - action_name = "Disable" - else: - action_name = "Enable" - - print(f"{action_name}: {files}") - for url in files: - if url.endswith("/"): - url = url[:-1] - try: - dir_name = os.path.splitext(os.path.basename(url))[0].replace(".git", "") - dir_path = os.path.join(custom_nodes_path, dir_name) - - # safety check - if dir_path == '/' or dir_path[1:] == ":/" or dir_path == '': - print(f"{action_name}(git-clone) error: invalid path '{dir_path}' for '{url}'") - return False - - if is_disable: - current_path = dir_path - new_path = dir_path + ".disabled" - else: - current_path = dir_path + ".disabled" - new_path = dir_path - - shutil.move(current_path, new_path) - - if is_disable: - if os.path.exists(os.path.join(new_path, "disable.py")): - disable_script = [sys.executable, "disable.py"] - try_install_script(url, new_path, disable_script) - else: - if os.path.exists(os.path.join(new_path, "enable.py")): - enable_script = [sys.executable, "enable.py"] - try_install_script(url, new_path, enable_script) - - except Exception as e: - print(f"{action_name}(git-clone) error: {url} / {e}", file=sys.stderr) - return False - - print(f"{action_name} was successful.") - return True - - -def gitclone_update(files, instant_execution=False, skip_script=False, msg_prefix="", no_deps=False): - import os - - print(f"{msg_prefix}Update: {files}") - for url in files: - if url.endswith("/"): - url = url[:-1] - try: - repo_name = os.path.splitext(os.path.basename(url))[0].replace(".git", "") - repo_path = os.path.join(custom_nodes_path, repo_name) - - if os.path.exists(repo_path+'.disabled'): - repo_path = repo_path+'.disabled' - - git_pull(repo_path) - - if not skip_script: - if instant_execution: - if not execute_install_script(url, repo_path, lazy_mode=False, instant_execution=True, no_deps=no_deps): - return False - else: - if not execute_install_script(url, repo_path, lazy_mode=True, no_deps=no_deps): - return False - - except Exception as e: - print(f"Update(git-clone) error: {url} / {e}", file=sys.stderr) - return False - - if not skip_script: - print("Update was successful.") - return True - - -def update_path(repo_path, instant_execution=False, no_deps=False): - if not os.path.exists(os.path.join(repo_path, '.git')): - return "fail" - - # version check - repo = git.Repo(repo_path) - - if repo.head.is_detached: - switch_to_default_branch(repo) - - current_branch = repo.active_branch - branch_name = current_branch.name - - if current_branch.tracking_branch() is None: - print(f"[ComfyUI-Manager] There is no tracking branch ({current_branch})") - remote_name = 'origin' - else: - remote_name = current_branch.tracking_branch().remote_name - remote = repo.remote(name=remote_name) - - try: - remote.fetch() - except Exception as e: - if 'detected dubious' in str(e): - print(f"[ComfyUI-Manager] Try fixing 'dubious repository' error on 'ComfyUI' repository") - safedir_path = comfy_path.replace('\\', '/') - subprocess.run(['git', 'config', '--global', '--add', 'safe.directory', safedir_path]) - try: - remote.fetch() - except Exception: - print(f"\n[ComfyUI-Manager] Failed to fixing repository setup. Please execute this command on cmd: \n" - f"-----------------------------------------------------------------------------------------\n" - f'git config --global --add safe.directory "{safedir_path}"\n' - f"-----------------------------------------------------------------------------------------\n") - - commit_hash = repo.head.commit.hexsha - remote_commit_hash = repo.refs[f'{remote_name}/{branch_name}'].object.hexsha - - if commit_hash != remote_commit_hash: - git_pull(repo_path) - execute_install_script("ComfyUI", repo_path, instant_execution=instant_execution, no_deps=no_deps) - return "updated" - else: - return "skipped" - - -def lookup_customnode_by_url(data, target): - for x in data['custom_nodes']: - if target in x['files']: - dir_name = os.path.splitext(os.path.basename(target))[0].replace(".git", "") - dir_path = os.path.join(custom_nodes_path, dir_name) - if os.path.exists(dir_path): - x['installed'] = 'True' - elif os.path.exists(dir_path + ".disabled"): - x['installed'] = 'Disabled' - return x - - return None - - -def simple_check_custom_node(url): - dir_name = os.path.splitext(os.path.basename(url))[0].replace(".git", "") - dir_path = os.path.join(custom_nodes_path, dir_name) - if os.path.exists(dir_path): - return 'installed' - elif os.path.exists(dir_path+'.disabled'): - return 'disabled' - - return 'not-installed' - - -def check_state_of_git_node_pack_single(item, do_fetch=False, do_update_check=True, do_update=False): - if item['version'] == 'unknown': - dir_path = unified_manager.unknown_active_nodes.get(item['id'])[1] - elif item['version'] == 'nightly': - dir_path = unified_manager.active_nodes.get(item['id'])[1] - else: - # skip CNR nodes - dir_path = None - - if dir_path and os.path.exists(dir_path): - if do_update_check: - update_state, success = git_repo_update_check_with(dir_path, do_fetch, do_update) - if (do_update_check or do_update) and update_state: - item['update-state'] = 'true' - elif do_update and not success: - item['update-state'] = 'fail' - - -def get_installed_pip_packages(): - # extract pip package infos - pips = subprocess.check_output([sys.executable, '-m', 'pip', 'freeze'], text=True).split('\n') - - res = {} - for x in pips: - if x.strip() == "": - continue - - if ' @ ' in x: - spec_url = x.split(' @ ') - res[spec_url[0]] = spec_url[1] - else: - res[x] = "" - - return res - - -def get_current_snapshot(): - # Get ComfyUI hash - repo_path = comfy_path - - if not os.path.exists(os.path.join(repo_path, '.git')): - print(f"ComfyUI update fail: The installed ComfyUI does not have a Git repository.") - return {} - - repo = git.Repo(repo_path) - comfyui_commit_hash = repo.head.commit.hexsha - - git_custom_nodes = {} - cnr_custom_nodes = {} - file_custom_nodes = [] - - # Get custom nodes hash - for path in os.listdir(custom_nodes_path): - if path in ['.disabled', '__pycache__']: - continue - - fullpath = os.path.join(custom_nodes_path, path) - - if os.path.isdir(fullpath): - is_disabled = path.endswith(".disabled") - - try: - git_dir = os.path.join(fullpath, '.git') - - parsed_spec = path.split('@') - - if len(parsed_spec) == 1: - node_id = parsed_spec[0] - ver_spec = 'unknown' - else: - node_id, ver_spec = parsed_spec - ver_spec = ver_spec.replace('_', '.') - - if len(ver_spec) > 1 and ver_spec not in ['nightly', 'latest', 'unknown']: - if is_disabled: - continue # don't restore disabled state of CNR node. - - cnr_custom_nodes[node_id] = ver_spec - - elif not os.path.exists(git_dir): - continue - - else: - repo = git.Repo(fullpath) - commit_hash = repo.head.commit.hexsha - url = repo.remotes.origin.url - git_custom_nodes[url] = dict(hash=commit_hash, disabled=is_disabled) - except: - print(f"Failed to extract snapshots for the custom node '{path}'.") - - elif path.endswith('.py'): - is_disabled = path.endswith(".py.disabled") - filename = os.path.basename(path) - item = { - 'filename': filename, - 'disabled': is_disabled - } - - file_custom_nodes.append(item) - - pip_packages = get_installed_pip_packages() - - return { - 'comfyui': comfyui_commit_hash, - 'git_custom_nodes': git_custom_nodes, - 'cnr_custom_nodes': cnr_custom_nodes, - 'file_custom_nodes': file_custom_nodes, - 'pips': pip_packages, - } - - -def save_snapshot_with_postfix(postfix, path=None): - if path is None: - now = datetime.now() - - date_time_format = now.strftime("%Y-%m-%d_%H-%M-%S") - file_name = f"{date_time_format}_{postfix}" - - path = os.path.join(comfyui_manager_path, 'snapshots', f"{file_name}.json") - else: - file_name = path.replace('\\', '/').split('/')[-1] - file_name = file_name.split('.')[-2] - - snapshot = get_current_snapshot() - if path.endswith('.json'): - with open(path, "w") as json_file: - json.dump(snapshot, json_file, indent=4) - - return file_name + '.json' - - elif path.endswith('.yaml'): - with open(path, "w") as yaml_file: - snapshot = {'custom_nodes': snapshot} - yaml.dump(snapshot, yaml_file, allow_unicode=True) - - return path - - -async def extract_nodes_from_workflow(filepath, mode='local', channel_url='default'): - # prepare json data - workflow = None - if filepath.endswith('.json'): - with open(filepath, "r", encoding="UTF-8", errors="ignore") as json_file: - try: - workflow = json.load(json_file) - except: - print(f"Invalid workflow file: {filepath}") - exit(-1) - - elif filepath.endswith('.png'): - from PIL import Image - with Image.open(filepath) as img: - if 'workflow' not in img.info: - print(f"The specified .png file doesn't have a workflow: {filepath}") - exit(-1) - else: - try: - workflow = json.loads(img.info['workflow']) - except: - print(f"This is not a valid .png file containing a ComfyUI workflow: {filepath}") - exit(-1) - - if workflow is None: - print(f"Invalid workflow file: {filepath}") - exit(-1) - - # extract nodes - used_nodes = set() - - def extract_nodes(sub_workflow): - for x in sub_workflow['nodes']: - node_name = x.get('type') - - # skip virtual nodes - if node_name in ['Reroute', 'Note']: - continue - - if node_name is not None and not node_name.startswith('workflow/'): - used_nodes.add(node_name) - - if 'nodes' in workflow: - extract_nodes(workflow) - - if 'extra' in workflow: - if 'groupNodes' in workflow['extra']: - for x in workflow['extra']['groupNodes'].values(): - extract_nodes(x) - - # lookup dependent custom nodes - ext_map = await get_data_by_mode(mode, 'extension-node-map.json', channel_url) - - rext_map = {} - preemption_map = {} - patterns = [] - for k, v in ext_map.items(): - if k == 'https://github.com/comfyanonymous/ComfyUI': - for x in v[0]: - if x not in preemption_map: - preemption_map[x] = [] - - preemption_map[x] = k - continue - - for x in v[0]: - if x not in rext_map: - rext_map[x] = [] - - rext_map[x].append(k) - - if 'preemptions' in v[1]: - for x in v[1]['preemptions']: - if x not in preemption_map: - preemption_map[x] = [] - - preemption_map[x] = k - - if 'nodename_pattern' in v[1]: - patterns.append((v[1]['nodename_pattern'], k)) - - # identify used extensions - used_exts = set() - unknown_nodes = set() - - for node_name in used_nodes: - ext = preemption_map.get(node_name) - - if ext is None: - ext = rext_map.get(node_name) - if ext is not None: - ext = ext[0] - - if ext is None: - for pat_ext in patterns: - if re.search(pat_ext[0], node_name): - ext = pat_ext[1] - break - - if ext == 'https://github.com/comfyanonymous/ComfyUI': - pass - elif ext is not None: - if 'Fooocus' in ext: - print(f">> {node_name}") - - used_exts.add(ext) - else: - unknown_nodes.add(node_name) - - return used_exts, unknown_nodes - - -def unzip(model_path): - if not os.path.exists(model_path): - print(f"[ComfyUI-Manager] unzip: File not found: {model_path}") - return False - - base_dir = os.path.dirname(model_path) - filename = os.path.basename(model_path) - target_dir = os.path.join(base_dir, filename[:-4]) - - os.makedirs(target_dir, exist_ok=True) - - with zipfile.ZipFile(model_path, 'r') as zip_ref: - zip_ref.extractall(target_dir) - - # Check if there's only one directory inside the target directory - contents = os.listdir(target_dir) - if len(contents) == 1 and os.path.isdir(os.path.join(target_dir, contents[0])): - nested_dir = os.path.join(target_dir, contents[0]) - # Move each file and sub-directory in the nested directory up to the target directory - for item in os.listdir(nested_dir): - shutil.move(os.path.join(nested_dir, item), os.path.join(target_dir, item)) - # Remove the now empty nested directory - os.rmdir(nested_dir) - - os.remove(model_path) - return True - - -def map_to_unified_keys(json_obj): - res = {} - for k, v in json_obj.items(): - cnr = unified_manager.get_cnr_by_repo(k) - if cnr: - res[cnr['id']] = v - else: - res[k] = v - - return res - - -async def get_unified_total_nodes(channel, mode): - await unified_manager.reload(mode) - - res = await unified_manager.get_custom_nodes(channel, mode) - - # collect pure cnr ids (i.e. not exists in custom-node-list.json) - # populate state/updatable field to non-pure cnr nodes - cnr_ids = set(unified_manager.cnr_map.keys()) - for k, v in res.items(): - # resolve cnr_id from repo url - files_in_json = v.get('files', []) - cnr_id = None - if len(files_in_json) == 1: - cnr = unified_manager.get_cnr_by_repo(files_in_json[0]) - if cnr: - cnr_id = cnr['id'] - - if cnr_id is not None: - # cnr or nightly version - cnr_ids.remove(cnr_id) - updatable = False - cnr = unified_manager.cnr_map[cnr_id] - - if cnr_id in invalid_nodes: - v['invalid-installation'] = True - - if cnr_id in unified_manager.active_nodes: - # installed - v['state'] = 'enabled' - if unified_manager.active_nodes[cnr_id][0] != 'nightly': - updatable = unified_manager.is_updatable(cnr_id) - else: - updatable = False - v['active_version'] = unified_manager.active_nodes[cnr_id][0] - v['version'] = v['active_version'] - - if cm_global.try_call(api="cm.is_import_failed_extension", name=unified_manager.active_nodes[cnr_id][1]): - v['import-fail'] = True - - elif cnr_id in unified_manager.cnr_inactive_nodes: - # disabled - v['state'] = 'disabled' - cnr_ver = unified_manager.get_from_cnr_inactive_nodes(cnr_id) - if cnr_ver is not None: - v['version'] = str(cnr_ver[0]) - else: - v['version'] = '0' - - elif cnr_id in unified_manager.nightly_inactive_nodes: - # disabled - v['state'] = 'disabled' - v['version'] = 'nightly' - else: - # not installed - v['state'] = 'not-installed' - - if 'version' not in v: - v['version'] = cnr['latest_version']['version'] - - v['update-state'] = 'true' if updatable else 'false' - else: - # unknown version - v['version'] = 'unknown' - - if unified_manager.is_enabled(k, 'unknown'): - v['state'] = 'enabled' - v['active_version'] = 'unknown' - - if cm_global.try_call(api="cm.is_import_failed_extension", name=unified_manager.unknown_active_nodes[k][1]): - v['import-fail'] = True - - elif unified_manager.is_disabled(k, 'unknown'): - v['state'] = 'disabled' - else: - v['state'] = 'not-installed' - - # add items for pure cnr nodes - if normalize_channel(channel) == DEFAULT_CHANNEL: - # Don't show CNR nodes unless default channel - for cnr_id in cnr_ids: - cnr = unified_manager.cnr_map[cnr_id] - author = cnr['publisher']['name'] - title = cnr['name'] - reference = f"https://registry.comfy.org/nodes/{cnr['id']}" - install_type = "cnr" - description = cnr.get('description', '') - - ver = None - active_version = None - updatable = False - import_fail = None - if cnr_id in unified_manager.active_nodes: - # installed - state = 'enabled' - updatable = unified_manager.is_updatable(cnr_id) - active_version = unified_manager.active_nodes[cnr['id']][0] - ver = active_version - - if cm_global.try_call(api="cm.is_import_failed_extension", name=unified_manager.active_nodes[cnr_id][1]): - import_fail = True - - elif cnr['id'] in unified_manager.cnr_inactive_nodes: - # disabled - state = 'disabled' - elif cnr['id'] in unified_manager.nightly_inactive_nodes: - # disabled - state = 'disabled' - ver = 'nightly' - else: - # not installed - state = 'not-installed' - - if ver is None: - ver = cnr['latest_version']['version'] - - item = dict(author=author, title=title, reference=reference, install_type=install_type, - description=description, state=state, updatable=updatable, version=ver) - - if active_version: - item['active_version'] = active_version - - if import_fail: - item['import-fail'] = True - - res[cnr_id] = item - - return res - - -def populate_github_stats(node_packs, json_obj_github): - for k, v in node_packs.items(): - url = v['reference'] - if url in json_obj_github: - v['stars'] = json_obj_github[url]['stars'] - v['last_update'] = json_obj_github[url]['last_update'] - v['trust'] = json_obj_github[url]['author_account_age_days'] > 180 - else: - v['stars'] = -1 - v['last_update'] = -1 - v['trust'] = False - - -def populate_favorites(node_packs, json_obj_extras): - favorites = set(json_obj_extras['favorites']) - - for k, v in node_packs.items(): - if v.get('version') != 'unknown': - if k in favorites: - v['is_favorite'] = True - - -async def restore_snapshot(snapshot_path, git_helper_extras=None): - cloned_repos = [] - checkout_repos = [] - enabled_repos = [] - disabled_repos = [] - skip_node_packs = [] - - await unified_manager.reload('cache') - await unified_manager.get_custom_nodes('default', 'cache') - - cnr_repo_map = {} - for k, v in unified_manager.repo_cnr_map.items(): - cnr_repo_map[v['id']] = k - - print(f"Restore snapshot.") - - postinstalls = [] - - with open(snapshot_path, 'r', encoding="UTF-8") as snapshot_file: - if snapshot_path.endswith('.json'): - info = json.load(snapshot_file) - elif snapshot_path.endswith('.yaml'): - info = yaml.load(snapshot_file, Loader=yaml.SafeLoader) - info = info['custom_nodes'] - - # for cnr restore - cnr_info = info.get('cnr_custom_nodes') - if cnr_info is not None: - # disable not listed cnr nodes - todo_disable = [] - todo_checkout = [] - - for k, v in unified_manager.active_nodes.items(): - if 'comfyui-manager' in k: - continue - - if v[0] != 'nightly': - if k not in cnr_info: - todo_disable.append(k) - else: - cnr_ver = cnr_info[k] - if v[1] != cnr_ver: - todo_checkout.append((k, cnr_ver)) - else: - skip_node_packs.append(k) - - for x in todo_disable: - unified_manager.unified_disable(x, False) - disabled_repos.append(x) - - for x in todo_checkout: - unified_manager.cnr_switch_version(x[0], x[1], instant_execution=True, no_deps=True, return_postinstall=False) - checkout_repos.append(x[1]) - - # install listed cnr nodes - for k, v in cnr_info.items(): - if 'comfyui-manager' in k: - continue - - ps = await unified_manager.install_by_id(k, version_spec=v, instant_execution=True, return_postinstall=True) - cloned_repos.append(k) - if ps is not None and ps.result: - if hasattr(ps, 'postinstall'): - postinstalls.append(ps.postinstall) - else: - print(f"cm-cli: unexpected [0001]") - - # for nightly restore - git_info = info.get('git_custom_nodes') - if git_info is not None: - todo_disable = [] - todo_enable = [] - todo_checkout = [] - processed_urls = [] - - for k, v in unified_manager.active_nodes.items(): - if 'comfyui-manager' in k: - continue - - if v[0] == 'nightly' and cnr_repo_map.get(k): - repo_url = cnr_repo_map.get(k) - - normalized_url1 = repo_url.replace("git@github.com:", "https://github.com/") - normalized_url2 = repo_url.replace("https://github.com/", "git@github.com:") - - if normalized_url1 not in git_info and normalized_url2 not in git_info: - todo_disable.append(k) - else: - if normalized_url1 in git_info: - commit_hash = git_info[normalized_url1]['hash'] - todo_checkout.append((v[1], commit_hash)) - - if normalized_url2 in git_info: - commit_hash = git_info[normalized_url2]['hash'] - todo_checkout.append((v[1], commit_hash)) - - for k, v in unified_manager.nightly_inactive_nodes.items(): - if 'comfyui-manager' in k: - continue - - if cnr_repo_map.get(k): - repo_url = cnr_repo_map.get(k) - normalized_url1 = repo_url.replace("git@github.com:", "https://github.com/") - normalized_url2 = repo_url.replace("https://github.com/", "git@github.com:") - - if normalized_url1 in git_info: - commit_hash = git_info[normalized_url1]['hash'] - todo_enable.append((k, commit_hash)) - processed_urls.append(normalized_url1) - - if normalized_url2 in git_info: - commit_hash = git_info[normalized_url2]['hash'] - todo_enable.append((k, commit_hash)) - processed_urls.append(normalized_url2) - - for x in todo_disable: - unified_manager.unified_disable(x, False) - disabled_repos.append(x) - - for x in todo_enable: - res = unified_manager.unified_enable(x, 'nightly') - - is_switched = False - if res and res.target: - is_switched = repo_switch_commit(res.target, x[1]) - - if is_switched: - checkout_repos.append(x) - else: - enabled_repos.append(x) - - for x in todo_checkout: - is_switched = repo_switch_commit(x[0], x[1]) - - if is_switched: - checkout_repos.append(x) - else: - skip_node_packs.append(x[0]) - - for x in git_info.keys(): - normalized_url = x.replace("git@github.com:", "https://github.com/") - cnr = unified_manager.repo_cnr_map.get(normalized_url) - if cnr is not None: - pack_id = cnr['id'] - await unified_manager.install_by_id(pack_id, 'nightly', instant_execution=True, no_deps=False, return_postinstall=False) - cloned_repos.append(pack_id) - processed_urls.append(x) - - for x in processed_urls: - if x in git_info: - del git_info[x] - - # remained nightly will be installed and migrated - - # for unknown restore - todo_disable = [] - todo_enable = [] - todo_checkout = [] - processed_urls = [] - - for k2, v2 in unified_manager.unknown_active_nodes.items(): - repo_url = resolve_giturl_from_path(v2[1]) - - if repo_url is None: - continue - - normalized_url1 = repo_url.replace("git@github.com:", "https://github.com/") - normalized_url2 = repo_url.replace("https://github.com/", "git@github.com:") - - if normalized_url1 not in git_info and normalized_url2 not in git_info: - todo_disable.append(k2) - else: - if normalized_url1 in git_info: - commit_hash = git_info[normalized_url1]['hash'] - todo_checkout.append((k2, commit_hash)) - processed_urls.append(normalized_url1) - - if normalized_url2 in git_info: - commit_hash = git_info[normalized_url2]['hash'] - todo_checkout.append((k2, commit_hash)) - processed_urls.append(normalized_url2) - - for k2, v2 in unified_manager.unknown_inactive_nodes.items(): - repo_url = resolve_giturl_from_path(v2[1]) - - if repo_url is None: - continue - - normalized_url1 = repo_url.replace("git@github.com:", "https://github.com/") - normalized_url2 = repo_url.replace("https://github.com/", "git@github.com:") - - if normalized_url1 in git_info: - commit_hash = git_info[normalized_url1]['hash'] - todo_enable.append((k2, commit_hash)) - processed_urls.append(normalized_url1) - - if normalized_url2 in git_info: - commit_hash = git_info[normalized_url2]['hash'] - todo_enable.append((k2, commit_hash)) - processed_urls.append(normalized_url2) - - for x in todo_disable: - unified_manager.unified_disable(x, True) - disabled_repos.append(x) - - for x in todo_enable: - res = unified_manager.unified_enable(x[0], 'unknown') - - is_switched = False - if res and res.target: - is_switched = repo_switch_commit(res.target, x[1]) - - if is_switched: - checkout_repos.append(x) - else: - enabled_repos.append(x) - - for x in todo_checkout: - is_switched = repo_switch_commit(x[0], x[1]) - - if is_switched: - checkout_repos.append(x) - else: - skip_node_packs.append(x[0]) - - for x in processed_urls: - if x in git_info: - del git_info[x] - - for repo_url in git_info.keys(): - repo_name = os.path.basename(repo_url) - if repo_name.endswith('.git'): - repo_name = repo_name[:-4] - - to_path = os.path.join(custom_nodes_path, repo_name) - unified_manager.repo_install(repo_url, to_path, instant_execution=True, no_deps=False, return_postinstall=False) - cloned_repos.append(repo_name) - - # reload - await unified_manager.migrate_unmanaged_nodes() - - # print summary - for x in cloned_repos: - print(f"[ INSTALLED ] {x}") - for x in checkout_repos: - print(f"[ CHECKOUT ] {x}") - for x in enabled_repos: - print(f"[ ENABLED ] {x}") - for x in disabled_repos: - print(f"[ DISABLED ] {x}") - for x in skip_node_packs: - print(f"[ SKIPPED ] {x}") - - # if is_failed: - # print("[bold red]ERROR: Failed to restore snapshot.[/bold red]") - - -# check need to migrate -need_to_migrate = False - - -async def check_need_to_migrate(): - global need_to_migrate - - await unified_manager.reload('cache') - await unified_manager.load_nightly(channel='default', mode='cache') - - legacy_custom_nodes = [] - - for x in unified_manager.active_nodes.values(): - if x[0] == 'nightly' and not x[1].endswith('@nightly'): - legacy_custom_nodes.append(x[1]) - - for x in unified_manager.nightly_inactive_nodes.values(): - if not x.endswith('@nightly'): - legacy_custom_nodes.append(x) - - if len(legacy_custom_nodes) > 0: - print("\n--------------------- ComfyUI-Manager migration notice --------------------") - print("The following custom nodes were installed using the old management method and require migration:") - print(", ".join(legacy_custom_nodes)) - print("---------------------------------------------------------------------------\n") - need_to_migrate = True - - -def get_comfyui_versions(): - repo = git.Repo(comfy_path) - versions = [x.name for x in repo.tags if x.name.startswith('v')] - versions.reverse() # nearest tag - - versions = versions[:4] - - current_tag = repo.git.describe('--tags') - - if current_tag not in versions: - versions = sorted(versions + [current_tag], reverse=True) - versions = versions[:4] - - main_branch = repo.heads.main - latest_commit = main_branch.commit - latest_tag = repo.git.describe('--tags', latest_commit.hexsha) - - if latest_tag != versions[0]: - versions.insert(0, 'nightly') - else: - versions[0] = 'nightly' - current_tag = 'nightly' - - return versions, current_tag - - -def switch_comfyui(tag): - repo = git.Repo(comfy_path) - - if tag == 'nightly': - repo.git.checkout('main') - repo.remotes.origin.pull() - print("[ComfyUI-Manager] ComfyUI version is switched to the latest 'main' version") - else: - repo.git.checkout(tag) - print(f"[ComfyUI-Manager] ComfyUI version is switched to '{tag}'") - - -def resolve_giturl_from_path(fullpath): - """ - resolve giturl path of unclassified custom node based on remote url in .git/config - """ - git_config_path = os.path.join(fullpath, '.git', 'config') - - if not os.path.exists(git_config_path): - return "unknown" - - config = configparser.ConfigParser() - config.read(git_config_path) - - for k, v in config.items(): - if k.startswith('remote ') and 'url' in v: - return v['url'].replace("git@github.com:", "https://github.com/") - - return None - - -def repo_switch_commit(repo_path, commit_hash): - try: - repo = git.Repo(repo_path) - if repo.head.commit.hexsha == commit_hash: - return False - - repo.git.checkout(commit_hash) - return True - except: - return None diff --git a/glob/manager_downloader.py b/glob/manager_downloader.py deleted file mode 100644 index 8a8c73c0..00000000 --- a/glob/manager_downloader.py +++ /dev/null @@ -1,70 +0,0 @@ -import os -from urllib.parse import urlparse - -aria2 = os.getenv('COMFYUI_MANAGER_ARIA2_SERVER') -HF_ENDPOINT = os.getenv('HF_ENDPOINT') - -if aria2 is not None: - secret = os.getenv('COMFYUI_MANAGER_ARIA2_SECRET') - url = urlparse(aria2) - port = url.port - host = url.scheme + '://' + url.hostname - import aria2p - - aria2 = aria2p.API(aria2p.Client(host=host, port=port, secret=secret)) - - -def download_url(model_url: str, model_dir: str, filename: str): - if aria2: - return aria2_download_url(model_url, model_dir, filename) - else: - from torchvision.datasets.utils import download_url as torchvision_download_url - - return torchvision_download_url(model_url, model_dir, filename) - - -def aria2_find_task(dir: str, filename: str): - target = os.path.join(dir, filename) - - downloads = aria2.get_downloads() - - for download in downloads: - for file in download.files: - if file.is_metadata: - continue - if str(file.path) == target: - return download - - -def aria2_download_url(model_url: str, model_dir: str, filename: str): - import manager_core as core - import tqdm - import time - - if model_dir.startswith(core.comfy_path): - model_dir = model_dir[len(core.comfy_path) :] - - if HF_ENDPOINT: - model_url = model_url.replace('https://huggingface.co', HF_ENDPOINT) - - download_dir = model_dir if model_dir.startswith('/') else os.path.join('/models', model_dir) - - download = aria2_find_task(download_dir, filename) - if download is None: - options = {'dir': download_dir, 'out': filename} - download = aria2.add(model_url, options)[0] - - if download.is_active: - with tqdm.tqdm( - total=download.total_length, - bar_format='{l_bar}{bar}{r_bar}', - desc=filename, - unit='B', - unit_scale=True, - ) as progress_bar: - while download.is_active: - if progress_bar.total == 0 and download.total_length != 0: - progress_bar.reset(download.total_length) - progress_bar.update(download.completed_length - progress_bar.n) - time.sleep(1) - download.update() diff --git a/glob/security_check.py b/glob/security_check.py deleted file mode 100644 index 12fe527c..00000000 --- a/glob/security_check.py +++ /dev/null @@ -1,92 +0,0 @@ -import sys -import subprocess -import os - - -def security_check(): - print("[START] Security scan") - - custom_nodes_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')) - comfyui_path = os.path.abspath(os.path.join(custom_nodes_path, '..')) - - guide = { - "ComfyUI_LLMVISION": """ -0.Remove ComfyUI\\custom_nodes\\ComfyUI_LLMVISION. -1.Remove pip packages: openai-1.16.3.dist-info, anthropic-0.21.4.dist-info, openai-1.30.2.dist-info, anthropic-0.21.5.dist-info, anthropic-0.26.1.dist-info, %LocalAppData%\\rundll64.exe - (For portable versions, it is recommended to reinstall. If you are using a venv, it is advised to recreate the venv.) -2.Remove these files in your system: lib/browser/admin.py, Cadmino.py, Fadmino.py, VISION-D.exe, BeamNG.UI.exe -3.Check your Windows registry for the key listed above and remove it. - (HKEY_CURRENT_USER\\Software\\OpenAICLI) -4.Run a malware scanner. -5.Change all of your passwords, everywhere. - -(Reinstall OS is recommended.) -\n -Detailed information: https://old.reddit.com/r/comfyui/comments/1dbls5n/psa_if_youve_used_the_comfyui_llmvision_node_from/ - """, - "lolMiner": """ -1. Remove pip packages: lolMiner* -2. Remove files: lolMiner*, 4G_Ethash_Linux_Readme.txt, mine* in ComfyUI dir. - -(Reinstall ComfyUI is recommended.) - """ - } - - node_blacklist = {"ComfyUI_LLMVISION": "ComfyUI_LLMVISION"} - - pip_blacklist = {"AppleBotzz": "ComfyUI_LLMVISION"} - - file_blacklist = { - "ComfyUI_LLMVISION": ["%LocalAppData%\\rundll64.exe"], - "lolMiner": [os.path.join(comfyui_path, 'lolMiner')] - } - - installed_pips = subprocess.check_output([sys.executable, '-m', "pip", "freeze"], text=True) - - detected = set() - try: - anthropic_info = subprocess.check_output([sys.executable, '-m', "pip", "show", "anthropic"], text=True, stderr=subprocess.DEVNULL) - anthropic_reqs = [x for x in anthropic_info.split('\n') if x.startswith("Requires")][0].split(': ')[1] - if "pycrypto" in anthropic_reqs: - location = [x for x in anthropic_info.split('\n') if x.startswith("Location")][0].split(': ')[1] - for fi in os.listdir(location): - if fi.startswith("anthropic"): - guide["ComfyUI_LLMVISION"] = f"\n0.Remove {os.path.join(location, fi)}" + guide["ComfyUI_LLMVISION"] - detected.add("ComfyUI_LLMVISION") - except subprocess.CalledProcessError: - pass - - for k, v in node_blacklist.items(): - if os.path.exists(os.path.join(custom_nodes_path, k)): - print(f"[SECURITY ALERT] custom node '{k}' is dangerous.") - detected.add(v) - - for k, v in pip_blacklist.items(): - if k in installed_pips: - detected.add(v) - break - - for k, v in file_blacklist.items(): - for x in v: - if os.path.exists(os.path.expandvars(x)): - detected.add(k) - break - - if len(detected) > 0: - for line in installed_pips.split('\n'): - for k, v in pip_blacklist.items(): - if k in line: - print(f"[SECURITY ALERT] '{line}' is dangerous.") - - print("\n########################################################################") - print(" Malware has been detected, forcibly terminating ComfyUI execution.") - print("########################################################################\n") - - for x in detected: - print(f"\n======== TARGET: {x} =========") - print(f"\nTODO:") - print(guide.get(x)) - - exit(-1) - - print("[DONE] Security scan") diff --git a/js/comfyui-manager.js b/js/comfyui-manager.js index 5cadbf68..eb679bfb 100644 --- a/js/comfyui-manager.js +++ b/js/comfyui-manager.js @@ -101,24 +101,6 @@ docStyle.innerHTML = ` vertical-align: middle; } -#cm-channel-badge { - color: white; - background-color: #AA0000; - width: 220px; - height: 23px; - font-size: 13px; - border-radius: 5px; - left: 5px; - top: 5px; - align-content: center; - justify-content: center; - text-align: center; - font-weight: bold; - float: left; - vertical-align: middle; - position: relative; -} - #custom-nodes-grid a { color: #5555FF; font-weight: bold; @@ -242,7 +224,6 @@ var update_comfyui_button = null; var switch_comfyui_button = null; var fetch_updates_button = null; var update_all_button = null; -var badge_mode = "none"; let share_option = 'all'; // copied style from https://github.com/pythongosssss/ComfyUI-Custom-Scripts @@ -409,10 +390,10 @@ const style = ` .pysssss-workflow-arrow-2:after { content: "▼"; } - .pysssss-workflow-arrow-2:hover { +.pysssss-workflow-arrow-2:hover { filter: brightness(1.6); background-color: var(--comfy-menu-bg); - } +} .pysssss-workflow-popup-2 ~ .litecontextmenu { transform: scale(1.3); } @@ -425,13 +406,6 @@ const style = ` `; - -async function init_badge_mode() { - api.fetchApi('/manager/badge_mode') - .then(response => response.text()) - .then(data => { badge_mode = data; }) -} - async function init_share_option() { api.fetchApi('/manager/share_option') .then(response => response.text()) @@ -448,7 +422,6 @@ async function init_notice(notice) { }) } -await init_badge_mode(); await init_share_option(); async function fetchNicknames() { @@ -1511,7 +1484,7 @@ class ManagerMenuDialog extends ComfyDialog { app.registerExtension({ - name: "Comfy.ManagerMenu", + name: "Comfy.ManagerExtMenu", init() { $el("style", { textContent: style, @@ -1538,30 +1511,30 @@ app.registerExtension({ // new style Manager buttons // unload models button into new style Manager button let cmGroup = new (await import("../../scripts/ui/components/buttonGroup.js")).ComfyButtonGroup( - new(await import("../../scripts/ui/components/button.js")).ComfyButton({ - icon: "puzzle", - action: () => { - if(!manager_instance) - setManagerInstance(new ManagerMenuDialog()); - manager_instance.show(); - }, - tooltip: "ComfyUI Manager", - content: "Manager", - classList: "comfyui-button comfyui-menu-mobile-collapse primary" - }).element, - new(await import("../../scripts/ui/components/button.js")).ComfyButton({ - icon: "star", - action: () => { - if(!manager_instance) - setManagerInstance(new ManagerMenuDialog()); + new(await import("../../scripts/ui/components/button.js")).ComfyButton({ + icon: "star", + action: () => { + if(!manager_instance) + setManagerInstance(new ManagerMenuDialog()); if(!CustomNodesManager.instance) { CustomNodesManager.instance = new CustomNodesManager(app, self); } CustomNodesManager.instance.show(CustomNodesManager.ShowMode.FAVORITES); - }, - tooltip: "Show favorite custom node list" - }).element, + }, + tooltip: "Show favorite custom node list", + content: "Manager", + classList: "comfyui-button comfyui-menu-mobile-collapse primary" + }).element, + new(await import("../../scripts/ui/components/button.js")).ComfyButton({ + icon: "puzzle", + action: () => { + if(!manager_instance) + setManagerInstance(new ManagerMenuDialog()); + manager_instance.show(); + }, + tooltip: "ComfyUI Manager", + }).element, new(await import("../../scripts/ui/components/button.js")).ComfyButton({ icon: "vacuum-outline", action: () => { diff --git a/modules/manager_ext_core.py b/modules/manager_ext_core.py new file mode 100644 index 00000000..dda802b5 --- /dev/null +++ b/modules/manager_ext_core.py @@ -0,0 +1,72 @@ +import os +import sys +import configparser +import manager_core as core +import cm_global +from manager_util import * + + +version_code = [3, 0] +version_str = f"V{version_code[0]}.{version_code[1]}" + (f'.{version_code[2]}' if len(version_code) > 2 else '') + +DEFAULT_CHANNEL = "https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main" + +config_path = os.path.join(comfyui_manager_path, "config.ini") +cached_config = None + +def write_config(): + config = configparser.ConfigParser() + config['default'] = { + 'share_option': get_config()['share_option'], + "file_logging": get_config()['file_logging'], + 'default_ui': get_config()['default_ui'], + 'component_policy': get_config()['component_policy'], + 'double_click_policy': get_config()['double_click_policy'], + 'model_download_by_agent': get_config()['model_download_by_agent'], + 'security_level': get_config()['security_level'], + } + with open(config_path, 'w') as configfile: + config.write(configfile) + + +def read_config(): + try: + config = configparser.ConfigParser() + config.read(config_path) + default_conf = config['default'] + + # policy migration: disable_unsecure_features -> security_level + security_level = default_conf['security_level'] if 'security_level' in default_conf else 'normal' + + return { + 'share_option': default_conf['share_option'] if 'share_option' in default_conf else 'all', + 'default_ui': default_conf['default_ui'] if 'default_ui' in default_conf else 'none', + 'component_policy': default_conf['component_policy'] if 'component_policy' in default_conf else 'workflow', + 'double_click_policy': default_conf['double_click_policy'] if 'double_click_policy' in default_conf else 'copy-all', + 'model_download_by_agent': default_conf['model_download_by_agent'].lower() == 'true' if 'model_download_by_agent' in default_conf else False, + 'security_level': security_level + } + + except Exception: + return { + 'share_option': 'all', + 'default_ui': 'none', + 'component_policy': 'workflow', + 'double_click_policy': 'copy-all', + 'model_download_by_agent': False, + 'security_level': 'normal', + } + + +def get_config(): + global cached_config + + if cached_config is None: + cached_config = read_config() + + return cached_config + + +def pip_install(packages): + install_cmd = ['#FORCE', sys.executable, "-m", "pip", "install", '-U'] + packages + core.try_install_script('pip install via manager', '..', install_cmd) diff --git a/glob/manager_server.py b/modules/manager_ext_server.py similarity index 64% rename from glob/manager_server.py rename to modules/manager_ext_server.py index 88372bb4..91e81ce2 100644 --- a/glob/manager_server.py +++ b/modules/manager_ext_server.py @@ -2,21 +2,20 @@ import traceback import folder_paths import locale -import subprocess # don't remove this import concurrent import nodes import os import sys -import threading import re -import shutil import git from server import PromptServer import manager_core as core -import manager_util import cm_global +from . import manager_ext_core as ext_core +from . import manager_ext_util + print(f"### Loading: ComfyUI-Manager ({core.version_str})") comfy_ui_hash = "-" @@ -78,49 +77,20 @@ async def get_risky_level(files): return "middle" -class ManagerFuncsInComfyUI(core.ManagerFuncs): - def get_current_preview_method(self): - if args.preview_method == latent_preview.LatentPreviewMethod.Auto: - return "auto" - elif args.preview_method == latent_preview.LatentPreviewMethod.Latent2RGB: - return "latent2rgb" - elif args.preview_method == latent_preview.LatentPreviewMethod.TAESD: - return "taesd" - else: - return "none" +def get_current_preview_method(self): + if args.preview_method == latent_preview.LatentPreviewMethod.Auto: + return "auto" + elif args.preview_method == latent_preview.LatentPreviewMethod.Latent2RGB: + return "latent2rgb" + elif args.preview_method == latent_preview.LatentPreviewMethod.TAESD: + return "taesd" + else: + return "none" - def run_script(self, cmd, cwd='.'): - if len(cmd) > 0 and cmd[0].startswith("#"): - print(f"[ComfyUI-Manager] Unexpected behavior: `{cmd}`") - return 0 - - process = subprocess.Popen(cmd, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, bufsize=1) - - stdout_thread = threading.Thread(target=handle_stream, args=(process.stdout, "")) - stderr_thread = threading.Thread(target=handle_stream, args=(process.stderr, "[!]")) - - stdout_thread.start() - stderr_thread.start() - - stdout_thread.join() - stderr_thread.join() - - return process.wait() - - -core.manager_funcs = ManagerFuncsInComfyUI() - -sys.path.append('../..') from manager_downloader import download_url core.comfy_path = os.path.dirname(folder_paths.__file__) -core.js_path = os.path.join(core.comfy_path, "web", "extensions") - -local_db_model = os.path.join(core.comfyui_manager_path, "model-list.json") -local_db_alter = os.path.join(core.comfyui_manager_path, "alter-list.json") -local_db_custom_node_list = os.path.join(core.comfyui_manager_path, "custom-node-list.json") -local_db_extension_node_mappings = os.path.join(core.comfyui_manager_path, "extension-node-map.json") components_path = os.path.join(core.comfyui_manager_path, 'components') @@ -140,10 +110,6 @@ def set_preview_method(method): set_preview_method(core.get_config()['preview_method']) -def set_badge_mode(mode): - core.get_config()['badge_mode'] = mode - - def set_default_ui_mode(mode): core.get_config()['default_ui'] = mode @@ -156,67 +122,6 @@ def set_double_click_policy(mode): core.get_config()['double_click_policy'] = mode -def print_comfyui_version(): - global comfy_ui_hash - global comfyui_tag - - is_detached = False - try: - repo = git.Repo(os.path.dirname(folder_paths.__file__)) - core.comfy_ui_revision = len(list(repo.iter_commits('HEAD'))) - - comfy_ui_hash = repo.head.commit.hexsha - cm_global.variables['comfyui.revision'] = core.comfy_ui_revision - - core.comfy_ui_commit_datetime = repo.head.commit.committed_datetime - cm_global.variables['comfyui.commit_datetime'] = core.comfy_ui_commit_datetime - - is_detached = repo.head.is_detached - current_branch = repo.active_branch.name - - if current_branch == "master": - comfyui_tag = repo.git.describe('--tags', repo.heads.main.commit.hexsha) - if not comfyui_tag.startswith("v"): - comfyui_tag = None - - try: - if core.comfy_ui_commit_datetime.date() < core.comfy_ui_required_commit_datetime.date(): - print(f"\n\n## [WARN] ComfyUI-Manager: Your ComfyUI version ({core.comfy_ui_revision})[{core.comfy_ui_commit_datetime.date()}] is too old. Please update to the latest version. ##\n\n") - except: - pass - - # process on_revision_detected --> - if 'cm.on_revision_detected_handler' in cm_global.variables: - for k, f in cm_global.variables['cm.on_revision_detected_handler']: - try: - f(core.comfy_ui_revision) - except Exception: - print(f"[ERROR] '{k}' on_revision_detected_handler") - traceback.print_exc() - - del cm_global.variables['cm.on_revision_detected_handler'] - else: - print(f"[ComfyUI-Manager] Some features are restricted due to your ComfyUI being outdated.") - # <-- - - if current_branch == "master": - if comfyui_tag: - print(f"### ComfyUI Version: {comfyui_tag} | Released on '{core.comfy_ui_commit_datetime.date()}'") - else: - print(f"### ComfyUI Revision: {core.comfy_ui_revision} [{comfy_ui_hash[:8]}] | Released on '{core.comfy_ui_commit_datetime.date()}'") - else: - print(f"### ComfyUI Revision: {core.comfy_ui_revision} on '{current_branch}' [{comfy_ui_hash[:8]}] | Released on '{core.comfy_ui_commit_datetime.date()}'") - except: - if is_detached: - print(f"### ComfyUI Revision: {core.comfy_ui_revision} [{comfy_ui_hash[:8]}] *DETACHED | Released on '{core.comfy_ui_commit_datetime.date()}'") - else: - print("### ComfyUI Revision: UNKNOWN (The currently installed ComfyUI is not a Git repository)") - - -print_comfyui_version() -core.check_invalid_nodes() - - def setup_environment(): git_exe = core.get_config()['git_exe'] @@ -488,13 +393,13 @@ def convert_markdown_to_html(input_text): def populate_markdown(x): if 'description' in x: - x['description'] = convert_markdown_to_html(manager_util.sanitize_tag(x['description'])) + x['description'] = convert_markdown_to_html(manager_ext_util.sanitize_tag(x['description'])) if 'name' in x: - x['name'] = manager_util.sanitize_tag(x['name']) + x['name'] = manager_ext_util.sanitize_tag(x['name']) if 'title' in x: - x['title'] = manager_util.sanitize_tag(x['title']) + x['title'] = manager_ext_util.sanitize_tag(x['title']) @routes.get("/customnode/getlist") @@ -613,32 +518,6 @@ async def remove_snapshot(request): return web.Response(status=400) -@routes.get("/snapshot/restore") -async def remove_snapshot(request): - if not is_allowed_security_level('middle'): - print(SECURITY_MESSAGE_MIDDLE_OR_BELOW) - return web.Response(status=403) - - try: - target = request.rel_url.query["target"] - - path = os.path.join(core.comfyui_manager_path, 'snapshots', f"{target}.json") - if os.path.exists(path): - if not os.path.exists(core.startup_script_path): - os.makedirs(core.startup_script_path) - - target_path = os.path.join(core.startup_script_path, "restore-snapshot.json") - shutil.copy(path, target_path) - - print(f"Snapshot restore scheduled: `{target}`") - return web.Response(status=200) - - print(f"Snapshot file not found: `{path}`") - return web.Response(status=400) - except: - return web.Response(status=400) - - @routes.get("/snapshot/get_current") async def get_current_snapshot_api(request): try: @@ -647,15 +526,6 @@ async def get_current_snapshot_api(request): return web.Response(status=400) -@routes.get("/snapshot/save") -async def save_snapshot(request): - try: - core.save_snapshot_with_postfix('snapshot') - return web.Response(status=200) - except: - return web.Response(status=400) - - def unzip_install(files): temp_filename = 'manager-temp.zip' for url in files: @@ -707,188 +577,6 @@ def download_url_with_agent(url, save_path): return True -def copy_install(files, js_path_name=None): - for url in files: - if url.endswith("/"): - url = url[:-1] - try: - filename = os.path.basename(url) - if url.endswith(".py"): - download_url(url, core.custom_nodes_path, filename) - else: - path = os.path.join(core.js_path, js_path_name) if js_path_name is not None else core.js_path - if not os.path.exists(path): - os.makedirs(path) - download_url(url, path, filename) - - except Exception as e: - print(f"Install(copy) error: {url} / {e}", file=sys.stderr) - return False - - print("Installation was successful.") - return True - - -def copy_uninstall(files, js_path_name='.'): - for url in files: - if url.endswith("/"): - url = url[:-1] - dir_name = os.path.basename(url) - base_path = core.custom_nodes_path if url.endswith('.py') else os.path.join(core.js_path, js_path_name) - file_path = os.path.join(base_path, dir_name) - - try: - if os.path.exists(file_path): - os.remove(file_path) - elif os.path.exists(file_path + ".disabled"): - os.remove(file_path + ".disabled") - except Exception as e: - print(f"Uninstall(copy) error: {url} / {e}", file=sys.stderr) - return False - - print("Uninstallation was successful.") - return True - - -def copy_set_active(files, is_disable, js_path_name='.'): - if is_disable: - action_name = "Disable" - else: - action_name = "Enable" - - for url in files: - if url.endswith("/"): - url = url[:-1] - dir_name = os.path.basename(url) - base_path = core.custom_nodes_path if url.endswith('.py') else os.path.join(core.js_path, js_path_name) - file_path = os.path.join(base_path, dir_name) - - try: - if is_disable: - current_name = file_path - new_name = file_path + ".disabled" - else: - current_name = file_path + ".disabled" - new_name = file_path - - os.rename(current_name, new_name) - - except Exception as e: - print(f"{action_name}(copy) error: {url} / {e}", file=sys.stderr) - - return False - - print(f"{action_name} was successful.") - return True - - -@routes.get("/customnode/versions/{node_name}") -async def get_cnr_versions(request): - node_name = request.match_info.get("node_name", None) - versions = core.cnr_utils.all_versions_of_node(node_name) - - if versions: - return web.json_response(versions, content_type='application/json') - - return web.Response(status=400) - - -@routes.get("/customnode/disabled_versions/{node_name}") -async def get_disabled_versions(request): - node_name = request.match_info.get("node_name", None) - versions = [] - if node_name in core.unified_manager.nightly_inactive_nodes: - versions.append(dict(version='nightly')) - - for v in core.unified_manager.cnr_inactive_nodes.get(node_name, {}).keys(): - versions.append(dict(version=v)) - - if versions: - return web.json_response(versions, content_type='application/json') - - return web.Response(status=400) - - -@routes.post("/customnode/reinstall") -async def reinstall_custom_node(request): - await uninstall_custom_node(request) - await install_custom_node(request) - - -@routes.post("/customnode/install") -async def install_custom_node(request): - if not is_allowed_security_level('middle'): - print(SECURITY_MESSAGE_MIDDLE_OR_BELOW) - return web.Response(status=403) - - json_data = await request.json() - - # non-nightly cnr is safe - risky_level = None - cnr_id = json_data.get('id') - skip_post_install = json_data.get('skip_post_install') - - if json_data['version'] != 'unknown': - selected_version = json_data.get('selected_version', 'latest') - if selected_version != 'nightly': - risky_level = 'low' - node_spec_str = f"{cnr_id}@{selected_version}" - else: - node_spec_str = f"{cnr_id}@nightly" - else: - # unknown - unknown_name = os.path.basename(json_data['files'][0]) - node_spec_str = f"{unknown_name}@unknown" - - # apply security policy if not cnr node (nightly isn't regarded as cnr node) - if risky_level is None: - risky_level = await get_risky_level(json_data['files']) - - if not is_allowed_security_level(risky_level): - print(SECURITY_MESSAGE_GENERAL) - return web.Response(status=404) - - node_spec = core.unified_manager.resolve_node_spec(node_spec_str) - - if node_spec is None: - return - - node_name, version_spec, is_specified = node_spec - res = await core.unified_manager.install_by_id(node_name, version_spec, json_data['channel'], json_data['mode'], return_postinstall=skip_post_install) - # discard post install if skip_post_install mode - - if res not in ['skip', 'enable', 'install-git', 'install-cnr', 'switch-cnr']: - return web.Response(status=400) - - return web.Response(status=200) - - -@routes.post("/customnode/fix") -async def fix_custom_node(request): - if not is_allowed_security_level('middle'): - print(SECURITY_MESSAGE_MIDDLE_OR_BELOW) - return web.Response(status=403) - - json_data = await request.json() - - node_id = json_data.get('id') - node_ver = json_data['version'] - if node_ver != 'unknown': - node_name = node_id - else: - # unknown - node_name = os.path.basename(json_data['files'][0]) - - res = core.unified_manager.unified_fix(node_name, node_ver) - - if res.result: - print(f"After restarting ComfyUI, please refresh the browser.") - return web.json_response({}, content_type='application/json') - - print(f"ERROR: An error occurred while fixing '{node_name}@{node_ver}'.") - return web.Response(status=400) - - @routes.post("/customnode/install/git_url") async def install_custom_node_git_url(request): if not is_allowed_security_level('high'): @@ -910,7 +598,7 @@ async def install_custom_node_git_url(request): @routes.post("/customnode/install/pip") -async def install_custom_node_git_url(request): +async def install_pip(request): if not is_allowed_security_level('high'): print(SECURITY_MESSAGE_NORMAL_MINUS) return web.Response(status=403) @@ -921,60 +609,6 @@ async def install_custom_node_git_url(request): return web.Response(status=200) -@routes.post("/customnode/uninstall") -async def uninstall_custom_node(request): - if not is_allowed_security_level('middle'): - print(SECURITY_MESSAGE_MIDDLE_OR_BELOW) - return web.Response(status=403) - - json_data = await request.json() - - node_id = json_data.get('id') - if json_data['version'] != 'unknown': - is_unknown = False - node_name = node_id - else: - # unknown - is_unknown = True - node_name = os.path.basename(json_data['files'][0]) - - res = core.unified_manager.unified_uninstall(node_name, is_unknown) - - if res.result: - print(f"After restarting ComfyUI, please refresh the browser.") - return web.json_response({}, content_type='application/json') - - print(f"ERROR: An error occurred while uninstalling '{node_name}'.") - return web.Response(status=400) - - -@routes.post("/customnode/update") -async def update_custom_node(request): - if not is_allowed_security_level('middle'): - print(SECURITY_MESSAGE_MIDDLE_OR_BELOW) - return web.Response(status=403) - - json_data = await request.json() - - node_id = json_data.get('id') - if json_data['version'] != 'unknown': - node_name = node_id - else: - # unknown - node_name = os.path.basename(json_data['files'][0]) - - res = core.unified_manager.unified_update(node_name, json_data['version']) - - core.clear_pip_cache() - - if res.result: - print(f"After restarting ComfyUI, please refresh the browser.") - return web.json_response({}, content_type='application/json') - - print(f"ERROR: An error occurred while updating '{node_name}'.") - return web.Response(status=400) - - @routes.get("/comfyui_manager/update_comfyui") async def update_comfyui(request): print(f"Update ComfyUI") @@ -995,17 +629,6 @@ async def update_comfyui(request): return web.Response(status=400) -@routes.get("/comfyui_manager/comfyui_versions") -async def comfyui_versions(request): - try: - res, current = core.get_comfyui_versions() - return web.json_response({'versions': res, 'current': current}, status=200, content_type='application/json') - except Exception as e: - print(f"ComfyUI update fail: {e}", file=sys.stderr) - - return web.Response(status=400) - - @routes.get("/comfyui_manager/comfyui_switch_version") async def comfyui_switch_version(request): try: @@ -1019,40 +642,6 @@ async def comfyui_switch_version(request): return web.Response(status=400) -@routes.post("/customnode/disable") -async def disable_node(request): - json_data = await request.json() - - node_id = json_data.get('id') - if json_data['version'] != 'unknown': - is_unknown = False - node_name = node_id - else: - # unknown - is_unknown = True - node_name = os.path.basename(json_data['files'][0]) - - res = core.unified_manager.unified_disable(node_name, is_unknown) - - if res: - return web.json_response({}, content_type='application/json') - - return web.Response(status=400) - - -@routes.get("/manager/migrate_unmanaged_nodes") -async def migrate_unmanaged_nodes(request): - print(f"[ComfyUI-Manager] Migrating unmanaged nodes...") - await core.unified_manager.migrate_unmanaged_nodes() - print("Done.") - return web.Response(status=200) - - -@routes.get("/manager/need_to_migrate") -async def need_to_migrate(request): - return web.Response(text=str(core.need_to_migrate), status=200) - - @routes.post("/model/install") async def install_model(request): json_data = await request.json() @@ -1146,17 +735,6 @@ async def preview_method(request): return web.Response(status=200) -@routes.get("/manager/badge_mode") -async def badge_mode(request): - if "value" in request.rel_url.query: - set_badge_mode(request.rel_url.query['value']) - core.write_config() - else: - return web.Response(text=core.get_config()['badge_mode'], status=200) - - return web.Response(status=200) - - @routes.get("/manager/default_ui") async def default_ui_mode(request): if "value" in request.rel_url.query: @@ -1357,67 +935,14 @@ if hasattr(PromptServer.instance, "app"): def sanitize(data): return data.replace("<", "<").replace(">", ">") - -async def _confirm_try_install(sender, custom_node_url, msg): - json_obj = await core.get_data_by_mode('default', 'custom-node-list.json') - - sender = manager_util.sanitize_tag(sender) - msg = manager_util.sanitize_tag(msg) - target = core.lookup_customnode_by_url(json_obj, custom_node_url) - - if target is not None: - PromptServer.instance.send_sync("cm-api-try-install-customnode", - {"sender": sender, "target": target, "msg": msg}) - else: - print(f"[ComfyUI Manager API] Failed to try install - Unknown custom node url '{custom_node_url}'") - - -def confirm_try_install(sender, custom_node_url, msg): - asyncio.run(_confirm_try_install(sender, custom_node_url, msg)) - - -cm_global.register_api('cm.try-install-custom-node', confirm_try_install) - -import asyncio - - -async def default_cache_update(): - async def get_cache(filename): - uri = f"{core.DEFAULT_CHANNEL}/{filename}" - cache_uri = str(manager_util.simple_hash(uri)) + '_' + filename - cache_uri = os.path.join(core.cache_dir, cache_uri) - - json_obj = await manager_util.get_data(uri, True) - - with core.cache_lock: - with open(cache_uri, "w", encoding='utf-8') as file: - json.dump(json_obj, file, indent=4, sort_keys=True) - print(f"[ComfyUI-Manager] default cache updated: {uri}") - - a = get_cache("custom-node-list.json") - b = get_cache("extension-node-map.json") - c = get_cache("model-list.json") - d = get_cache("alter-list.json") - e = get_cache("github-stats.json") - - await asyncio.gather(a, b, c, d, e) - - if not core.get_config()['skip_migration_check']: - await core.check_need_to_migrate() - else: - print("[ComfyUI-Manager] Migration check is skipped...") - - -threading.Thread(target=lambda: asyncio.run(default_cache_update())).start() - if not os.path.exists(core.config_path): core.get_config() core.write_config() - cm_global.register_extension('ComfyUI-Manager', {'version': core.version, - 'name': 'ComfyUI Manager', - 'nodes': {'Terminal Log //CM'}, - 'description': 'It provides the ability to manage custom nodes in ComfyUI.', }) + 'name': 'ComfyUI Manager (Extension)', + 'nodes': {'Terminal Log //CM'}, + 'description': 'ComfyUI-Manager (Extension)', }) +cm_global.variables['manager-core.show_menu'] = False diff --git a/glob/manager_util.py b/modules/manager_ext_util.py similarity index 100% rename from glob/manager_util.py rename to modules/manager_ext_util.py diff --git a/glob/share_3rdparty.py b/modules/share_3rdparty.py similarity index 100% rename from glob/share_3rdparty.py rename to modules/share_3rdparty.py diff --git a/prestartup_script.py b/prestartup_script.py deleted file mode 100644 index 728eabc9..00000000 --- a/prestartup_script.py +++ /dev/null @@ -1,704 +0,0 @@ -import os -import subprocess -import sys -import atexit -import threading -import re -import locale -import platform -import json -import ast -import logging - -glob_path = os.path.join(os.path.dirname(__file__), "glob") -sys.path.append(glob_path) - -import security_check -from manager_util import * -import cm_global - -security_check.security_check() - -cm_global.pip_blacklist = ['torch', 'torchsde', 'torchvision'] -cm_global.pip_downgrade_blacklist = ['torch', 'torchsde', 'torchvision', 'transformers', 'safetensors', 'kornia'] - - -def skip_pip_spam(x): - return ('Requirement already satisfied:' in x) or ("DEPRECATION: Loading egg at" in x) - - -message_collapses = [skip_pip_spam] -import_failed_extensions = set() -cm_global.variables['cm.on_revision_detected_handler'] = [] -enable_file_logging = True - - -def register_message_collapse(f): - global message_collapses - message_collapses.append(f) - - -def is_import_failed_extension(name): - global import_failed_extensions - return name in import_failed_extensions - - -def check_file_logging(): - global enable_file_logging - try: - import configparser - config_path = os.path.join(os.path.dirname(__file__), "config.ini") - config = configparser.ConfigParser() - config.read(config_path) - default_conf = config['default'] - - if 'file_logging' in default_conf and default_conf['file_logging'].lower() == 'false': - enable_file_logging = False - except Exception: - pass - - -check_file_logging() - -comfy_path = os.environ.get('COMFYUI_PATH') -if comfy_path is None: - comfy_path = os.path.abspath(os.path.dirname(sys.modules['__main__'].__file__)) - -sys.__comfyui_manager_register_message_collapse = register_message_collapse -sys.__comfyui_manager_is_import_failed_extension = is_import_failed_extension -cm_global.register_api('cm.register_message_collapse', register_message_collapse) -cm_global.register_api('cm.is_import_failed_extension', is_import_failed_extension) - - -comfyui_manager_path = os.path.abspath(os.path.dirname(__file__)) -custom_nodes_path = os.path.abspath(os.path.join(comfyui_manager_path, "..")) -startup_script_path = os.path.join(comfyui_manager_path, "startup-scripts") -restore_snapshot_path = os.path.join(startup_script_path, "restore-snapshot.json") -git_script_path = os.path.join(comfyui_manager_path, "git_helper.py") -cm_cli_path = os.path.join(comfyui_manager_path, "cm-cli.py") -pip_overrides_path = os.path.join(comfyui_manager_path, "pip_overrides.json") - - -cm_global.pip_overrides = {} -if os.path.exists(pip_overrides_path): - with open(pip_overrides_path, 'r', encoding="UTF-8", errors="ignore") as json_file: - cm_global.pip_overrides = json.load(json_file) - cm_global.pip_overrides['numpy'] = 'numpy<2' - - -def remap_pip_package(pkg): - if pkg in cm_global.pip_overrides: - res = cm_global.pip_overrides[pkg] - print(f"[ComfyUI-Manager] '{pkg}' is remapped to '{res}'") - return res - else: - return pkg - - -std_log_lock = threading.Lock() - - -class TerminalHook: - def __init__(self): - self.hooks = {} - - def add_hook(self, k, v): - self.hooks[k] = v - - def remove_hook(self, k): - if k in self.hooks: - del self.hooks[k] - - def write_stderr(self, msg): - for v in self.hooks.values(): - try: - v.write_stderr(msg) - except Exception: - pass - - def write_stdout(self, msg): - for v in self.hooks.values(): - try: - v.write_stdout(msg) - except Exception: - pass - - -terminal_hook = TerminalHook() -sys.__comfyui_manager_terminal_hook = terminal_hook - - -def handle_stream(stream, prefix): - stream.reconfigure(encoding=locale.getpreferredencoding(), errors='replace') - for msg in stream: - if prefix == '[!]' and ('it/s]' in msg or 's/it]' in msg) and ('%|' in msg or 'it [' in msg): - if msg.startswith('100%'): - print('\r' + msg, end="", file=sys.stderr), - else: - print('\r' + msg[:-1], end="", file=sys.stderr), - else: - if prefix == '[!]': - print(prefix, msg, end="", file=sys.stderr) - else: - print(prefix, msg, end="") - - -def process_wrap(cmd_str, cwd_path, handler=None, env=None): - process = subprocess.Popen(cmd_str, cwd=cwd_path, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, bufsize=1) - - if handler is None: - handler = handle_stream - - stdout_thread = threading.Thread(target=handler, args=(process.stdout, "")) - stderr_thread = threading.Thread(target=handler, args=(process.stderr, "[!]")) - - stdout_thread.start() - stderr_thread.start() - - stdout_thread.join() - stderr_thread.join() - - return process.wait() - - -try: - if '--port' in sys.argv: - port_index = sys.argv.index('--port') - if port_index + 1 < len(sys.argv): - port = int(sys.argv[port_index + 1]) - postfix = f"_{port}" - else: - postfix = "" - else: - postfix = "" - - # Logger setup - if enable_file_logging: - if os.path.exists(f"comfyui{postfix}.log"): - if os.path.exists(f"comfyui{postfix}.prev.log"): - if os.path.exists(f"comfyui{postfix}.prev2.log"): - os.remove(f"comfyui{postfix}.prev2.log") - os.rename(f"comfyui{postfix}.prev.log", f"comfyui{postfix}.prev2.log") - os.rename(f"comfyui{postfix}.log", f"comfyui{postfix}.prev.log") - - log_file = open(f"comfyui{postfix}.log", "w", encoding="utf-8", errors="ignore") - - log_lock = threading.Lock() - - original_stdout = sys.stdout - original_stderr = sys.stderr - - if original_stdout.encoding.lower() == 'utf-8': - write_stdout = original_stdout.write - write_stderr = original_stderr.write - else: - def wrapper_stdout(msg): - original_stdout.write(msg.encode('utf-8').decode(original_stdout.encoding, errors="ignore")) - - def wrapper_stderr(msg): - original_stderr.write(msg.encode('utf-8').decode(original_stderr.encoding, errors="ignore")) - - write_stdout = wrapper_stdout - write_stderr = wrapper_stderr - - pat_tqdm = r'\d+%.*\[(.*?)\]' - pat_import_fail = r'seconds \(IMPORT FAILED\):(.*)$' - - is_start_mode = True - - - class ComfyUIManagerLogger: - def __init__(self, is_stdout): - self.is_stdout = is_stdout - self.encoding = "utf-8" - self.last_char = '' - - def fileno(self): - try: - if self.is_stdout: - return original_stdout.fileno() - else: - return original_stderr.fileno() - except AttributeError: - # Handle error - raise ValueError("The object does not have a fileno method") - - def isatty(self): - return False - - def write(self, message): - global is_start_mode - - if any(f(message) for f in message_collapses): - return - - if is_start_mode: - match = re.search(pat_import_fail, message) - if match: - import_failed_extensions.add(match.group(1).strip()) - - if 'Starting server' in message: - is_start_mode = False - - if not self.is_stdout: - match = re.search(pat_tqdm, message) - if match: - message = re.sub(r'([#|])\d', r'\1▌', message) - message = re.sub('#', '█', message) - if '100%' in message: - self.sync_write(message) - else: - write_stderr(message) - original_stderr.flush() - else: - self.sync_write(message) - else: - self.sync_write(message) - - def sync_write(self, message, file_only=False): - with log_lock: - timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')[:-3] - if self.last_char != '\n': - log_file.write(message) - else: - log_file.write(f"[{timestamp}] {message}") - log_file.flush() - self.last_char = message if message == '' else message[-1] - - if not file_only: - with std_log_lock: - if self.is_stdout: - write_stdout(message) - original_stdout.flush() - terminal_hook.write_stderr(message) - else: - write_stderr(message) - original_stderr.flush() - terminal_hook.write_stdout(message) - - def flush(self): - log_file.flush() - - with std_log_lock: - if self.is_stdout: - original_stdout.flush() - else: - original_stderr.flush() - - def close(self): - self.flush() - - def reconfigure(self, *args, **kwargs): - pass - - # You can close through sys.stderr.close_log() - def close_log(self): - sys.stderr = original_stderr - sys.stdout = original_stdout - log_file.close() - - def close_log(): - sys.stderr = original_stderr - sys.stdout = original_stdout - log_file.close() - - - if enable_file_logging: - sys.stdout = ComfyUIManagerLogger(True) - stderr_wrapper = ComfyUIManagerLogger(False) - sys.stderr = stderr_wrapper - - atexit.register(close_log) - else: - sys.stdout.close_log = lambda: None - stderr_wrapper = None - - - class LoggingHandler(logging.Handler): - def emit(self, record): - global is_start_mode - - message = record.getMessage() - - if is_start_mode: - match = re.search(pat_import_fail, message) - if match: - import_failed_extensions.add(match.group(1).strip()) - - if 'Starting server' in message: - is_start_mode = False - - if stderr_wrapper: - stderr_wrapper.sync_write(message+'\n', file_only=True) - - - logging.getLogger().addHandler(LoggingHandler()) - - -except Exception as e: - print(f"[ComfyUI-Manager] Logging failed: {e}") - - -try: - import git -except ModuleNotFoundError: - my_path = os.path.dirname(__file__) - requirements_path = os.path.join(my_path, "requirements.txt") - - print(f"## ComfyUI-Manager: installing dependencies. (GitPython)") - try: - result = subprocess.check_output([sys.executable, '-s', '-m', 'pip', 'install', '-r', requirements_path]) - except subprocess.CalledProcessError as e: - print(f"## [ERROR] ComfyUI-Manager: Attempting to reinstall dependencies using an alternative method.") - try: - result = subprocess.check_output([sys.executable, '-s', '-m', 'pip', 'install', '--user', '-r', requirements_path]) - except subprocess.CalledProcessError as e: - print(f"## [ERROR] ComfyUI-Manager: Failed to install the GitPython package in the correct Python environment. Please install it manually in the appropriate environment. (You can seek help at https://app.element.io/#/room/%23comfyui_space%3Amatrix.org)") - -try: - import git - print(f"## ComfyUI-Manager: installing dependencies done.") -except: - # maybe we should sys.exit() here? there is at least two screens worth of error messages still being pumped after our error messages - print(f"## [ERROR] ComfyUI-Manager: GitPython package seems to be installed, but failed to load somehow. Make sure you have a working git client installed") - - -print("** ComfyUI startup time:", datetime.now()) -print("** Platform:", platform.system()) -print("** Python version:", sys.version) -print("** Python executable:", sys.executable) -print("** ComfyUI Path:", comfy_path) - -if enable_file_logging: - print("** Log path:", os.path.abspath('comfyui.log')) -else: - print("** Log path: file logging is disabled") - - -def read_downgrade_blacklist(): - try: - import configparser - config_path = os.path.join(os.path.dirname(__file__), "config.ini") - config = configparser.ConfigParser() - config.read(config_path) - default_conf = config['default'] - - if 'downgrade_blacklist' in default_conf: - items = default_conf['downgrade_blacklist'].split(',') - items = [x.strip() for x in items if x != ''] - cm_global.pip_downgrade_blacklist += items - cm_global.pip_downgrade_blacklist = list(set(cm_global.pip_downgrade_blacklist)) - except: - pass - - -read_downgrade_blacklist() - - -def check_bypass_ssl(): - try: - import configparser - import ssl - config_path = os.path.join(os.path.dirname(__file__), "config.ini") - config = configparser.ConfigParser() - config.read(config_path) - default_conf = config['default'] - - if 'bypass_ssl' in default_conf and default_conf['bypass_ssl'].lower() == 'true': - print(f"[ComfyUI-Manager] WARN: Unsafe - SSL verification bypass option is Enabled. (see ComfyUI-Manager/config.ini)") - ssl._create_default_https_context = ssl._create_unverified_context # SSL certificate error fix. - except Exception: - pass - - -check_bypass_ssl() - - -# Perform install -processed_install = set() -script_list_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "startup-scripts", "install-scripts.txt") -pip_map = None - - -def get_installed_packages(): - global pip_map - - if pip_map is None: - try: - result = subprocess.check_output([sys.executable, '-m', 'pip', 'list'], universal_newlines=True) - - pip_map = {} - for line in result.split('\n'): - x = line.strip() - if x: - y = line.split() - if y[0] == 'Package' or y[0].startswith('-'): - continue - - pip_map[y[0]] = y[1] - except subprocess.CalledProcessError as e: - print(f"[ComfyUI-Manager] Failed to retrieve the information of installed pip packages.") - return set() - - return pip_map - - -def is_installed(name): - name = name.strip() - - if name.startswith('#'): - return True - - pattern = r'([^<>!=]+)([<>!=]=?)([0-9.a-zA-Z]*)' - match = re.search(pattern, name) - - if match: - name = match.group(1) - - if name in cm_global.pip_blacklist: - return True - - if name in cm_global.pip_downgrade_blacklist: - pips = get_installed_packages() - - if match is None: - if name in pips: - return True - elif match.group(2) in ['<=', '==', '<']: - if name in pips: - if StrictVersion(pips[name]) >= StrictVersion(match.group(3)): - print(f"[ComfyUI-Manager] skip black listed pip installation: '{name}'") - return True - - pkg = get_installed_packages().get(name.lower()) - if pkg is None: - return False # update if not installed - - if match is None: - return True # don't update if version is not specified - - if match.group(2) in ['>', '>=']: - if StrictVersion(pkg) < StrictVersion(match.group(3)): - return False - elif StrictVersion(pkg) > StrictVersion(match.group(3)): - print(f"[SKIP] Downgrading pip package isn't allowed: {name.lower()} (cur={pkg})") - - return True # prevent downgrade - - -if os.path.exists(restore_snapshot_path): - try: - cloned_repos = [] - - def msg_capture(stream, prefix): - stream.reconfigure(encoding=locale.getpreferredencoding(), errors='replace') - for msg in stream: - if msg.startswith("CLONE: "): - cloned_repos.append(msg[7:]) - if prefix == '[!]': - print(prefix, msg, end="", file=sys.stderr) - else: - print(prefix, msg, end="") - - elif prefix == '[!]' and ('it/s]' in msg or 's/it]' in msg) and ('%|' in msg or 'it [' in msg): - if msg.startswith('100%'): - print('\r' + msg, end="", file=sys.stderr), - else: - print('\r'+msg[:-1], end="", file=sys.stderr), - else: - if prefix == '[!]': - print(prefix, msg, end="", file=sys.stderr) - else: - print(prefix, msg, end="") - - print(f"[ComfyUI-Manager] Restore snapshot.") - new_env = os.environ.copy() - new_env["COMFYUI_PATH"] = comfy_path - - cmd_str = [sys.executable, cm_cli_path, 'restore-snapshot', restore_snapshot_path] - exit_code = process_wrap(cmd_str, custom_nodes_path, handler=msg_capture, env=new_env) - - if exit_code != 0: - print(f"[ComfyUI-Manager] Restore snapshot failed.") - else: - print(f"[ComfyUI-Manager] Restore snapshot done.") - - except Exception as e: - print(e) - print(f"[ComfyUI-Manager] Restore snapshot failed.") - - os.remove(restore_snapshot_path) - - -def execute_lazy_install_script(repo_path, executable): - global processed_install - - install_script_path = os.path.join(repo_path, "install.py") - requirements_path = os.path.join(repo_path, "requirements.txt") - - if os.path.exists(requirements_path): - print(f"Install: pip packages for '{repo_path}'") - with open(requirements_path, "r") as requirements_file: - for line in requirements_file: - package_name = remap_pip_package(line.strip()) - if package_name and not is_installed(package_name): - if '--index-url' in package_name: - s = package_name.split('--index-url') - install_cmd = [sys.executable, "-m", "pip", "install", s[0].strip(), '--index-url', s[1].strip()] - else: - install_cmd = [sys.executable, "-m", "pip", "install", package_name] - - process_wrap(install_cmd, repo_path) - - if os.path.exists(install_script_path) and f'{repo_path}/install.py' not in processed_install: - processed_install.add(f'{repo_path}/install.py') - print(f"Install: install script for '{repo_path}'") - install_cmd = [executable, "install.py"] - - new_env = os.environ.copy() - new_env["COMFYUI_PATH"] = comfy_path - process_wrap(install_cmd, repo_path, env=new_env) - - -def execute_lazy_cnr_switch(target, zip_url, from_path, to_path, no_deps, custom_nodes_path): - import uuid - import shutil - - # 1. download - archive_name = f"CNR_temp_{str(uuid.uuid4())}.zip" # should be unpredictable name - security precaution - download_path = os.path.join(custom_nodes_path, archive_name) - download_url(zip_url, custom_nodes_path, archive_name) - - # 2. extract files into @ - extracted = extract_package_as_zip(download_path, from_path) - os.remove(download_path) - - if extracted is None: - if len(os.listdir(from_path)) == 0: - shutil.rmtree(from_path) - - print(f'Empty archive file: {target}') - return False - - - # 3. calculate garbage files (.tracking - extracted) - tracking_info_file = os.path.join(from_path, '.tracking') - prev_files = set() - with open(tracking_info_file, 'r') as f: - for line in f: - prev_files.add(line.strip()) - garbage = prev_files.difference(extracted) - garbage = [os.path.join(custom_nodes_path, x) for x in garbage] - - # 4-1. remove garbage files - for x in garbage: - if os.path.isfile(x): - os.remove(x) - - # 4-2. remove garbage dir if empty - for x in garbage: - if os.path.isdir(x): - if not os.listdir(x): - os.rmdir(x) - - # 5. rename dir name @ ==> @ - print(f"'{from_path}' is moved to '{to_path}'") - shutil.move(from_path, to_path) - - # 6. create .tracking file - tracking_info_file = os.path.join(to_path, '.tracking') - with open(tracking_info_file, "w", encoding='utf-8') as file: - file.write('\n'.join(list(extracted))) - - -def execute_migration(moves): - import shutil - for x in moves: - if os.path.exists(x[0]) and not os.path.exists(x[1]): - shutil.move(x[0], x[1]) - print(f"[ComfyUI-Manager] MIGRATION: '{x[0]}' -> '{x[1]}'") - - -# Check if script_list_path exists -if os.path.exists(script_list_path): - print("\n#######################################################################") - print("[ComfyUI-Manager] Starting dependency installation/(de)activation for the extension\n") - - executed = set() - # Read each line from the file and convert it to a list using eval - with open(script_list_path, 'r', encoding="UTF-8", errors="ignore") as file: - for line in file: - if line in executed: - continue - - executed.add(line) - - try: - script = ast.literal_eval(line) - - if script[1].startswith('#') and script[1] != '#FORCE': - if script[1] == "#LAZY-INSTALL-SCRIPT": - execute_lazy_install_script(script[0], script[2]) - - elif script[1] == "#LAZY-CNR-SWITCH-SCRIPT": - execute_lazy_cnr_switch(script[0], script[2], script[3], script[4], script[5], script[6]) - execute_lazy_install_script(script[3], script[7]) - - elif script[1] == "#LAZY-MIGRATION": - execute_migration(script[2]) - - elif os.path.exists(script[0]): - if script[1] == "#FORCE": - del script[1] - else: - if 'pip' in script[1:] and 'install' in script[1:] and is_installed(script[-1]): - continue - - print(f"\n## ComfyUI-Manager: EXECUTE => {script[1:]}") - print(f"\n## Execute install/(de)activation script for '{script[0]}'") - - new_env = os.environ.copy() - new_env["COMFYUI_PATH"] = comfy_path - exit_code = process_wrap(script[1:], script[0], env=new_env) - - if exit_code != 0: - print(f"install/(de)activation script failed: {script[0]}") - else: - print(f"\n## ComfyUI-Manager: CANCELED => {script[1:]}") - - except Exception as e: - print(f"[ERROR] Failed to execute install/(de)activation script: {line} / {e}") - - # Remove the script_list_path file - if os.path.exists(script_list_path): - os.remove(script_list_path) - - print("\n[ComfyUI-Manager] Startup script completed.") - print("#######################################################################\n") - -del processed_install -del pip_map - - -def check_windows_event_loop_policy(): - try: - import configparser - config_path = os.path.join(os.path.dirname(__file__), "config.ini") - config = configparser.ConfigParser() - config.read(config_path) - default_conf = config['default'] - - if 'windows_selector_event_loop_policy' in default_conf and default_conf['windows_selector_event_loop_policy'].lower() == 'true': - try: - import asyncio - import asyncio.windows_events - asyncio.set_event_loop_policy(asyncio.windows_events.WindowsSelectorEventLoopPolicy()) - print(f"[ComfyUI-Manager] Windows event loop policy mode enabled") - except Exception as e: - print(f"[ComfyUI-Manager] WARN: Windows initialization fail: {e}") - except Exception: - pass - - -if platform.system() == 'Windows': - check_windows_event_loop_policy() diff --git a/scripts/colab-dependencies.py b/scripts/colab-dependencies.py deleted file mode 100644 index d5a70ed6..00000000 --- a/scripts/colab-dependencies.py +++ /dev/null @@ -1,39 +0,0 @@ -import os -import subprocess - - -def get_enabled_subdirectories_with_files(base_directory): - subdirs_with_files = [] - for subdir in os.listdir(base_directory): - try: - full_path = os.path.join(base_directory, subdir) - if os.path.isdir(full_path) and not subdir.endswith(".disabled") and not subdir.startswith('.') and subdir != '__pycache__': - print(f"## Install dependencies for '{subdir}'") - requirements_file = os.path.join(full_path, "requirements.txt") - install_script = os.path.join(full_path, "install.py") - - if os.path.exists(requirements_file) or os.path.exists(install_script): - subdirs_with_files.append((full_path, requirements_file, install_script)) - except Exception as e: - print(f"EXCEPTION During Dependencies INSTALL on '{subdir}':\n{e}") - - return subdirs_with_files - - -def install_requirements(requirements_file_path): - if os.path.exists(requirements_file_path): - subprocess.run(["pip", "install", "-r", requirements_file_path]) - - -def run_install_script(install_script_path): - if os.path.exists(install_script_path): - subprocess.run(["python", install_script_path]) - - -custom_nodes_directory = "custom_nodes" -subdirs_with_files = get_enabled_subdirectories_with_files(custom_nodes_directory) - - -for subdir, requirements_file, install_script in subdirs_with_files: - install_requirements(requirements_file) - run_install_script(install_script) diff --git a/scripts/install-comfyui-venv-linux.sh b/scripts/install-comfyui-venv-linux.sh deleted file mode 100755 index fecf3d5b..00000000 --- a/scripts/install-comfyui-venv-linux.sh +++ /dev/null @@ -1,21 +0,0 @@ -git clone https://github.com/comfyanonymous/ComfyUI -cd ComfyUI/custom_nodes -git clone https://github.com/ltdrdata/ComfyUI-Manager -cd .. -python -m venv venv -source venv/bin/activate -python -m pip install torch torchvision torchaudio --extra-index-url https://download.pytorch.org/whl/cu121 -python -m pip install -r requirements.txt -python -m pip install -r custom_nodes/ComfyUI-Manager/requirements.txt -cd .. -echo "#!/bin/bash" > run_gpu.sh -echo "cd ComfyUI" >> run_gpu.sh -echo "source venv/bin/activate" >> run_gpu.sh -echo "python main.py --preview-method auto" >> run_gpu.sh -chmod +x run_gpu.sh - -echo "#!/bin/bash" > run_cpu.sh -echo "cd ComfyUI" >> run_cpu.sh -echo "source venv/bin/activate" >> run_cpu.sh -echo "python main.py --preview-method auto --cpu" >> run_cpu.sh -chmod +x run_cpu.sh diff --git a/scripts/install-comfyui-venv-win.bat b/scripts/install-comfyui-venv-win.bat deleted file mode 100755 index 2789ef6d..00000000 --- a/scripts/install-comfyui-venv-win.bat +++ /dev/null @@ -1,17 +0,0 @@ -git clone https://github.com/comfyanonymous/ComfyUI -cd ComfyUI/custom_nodes -git clone https://github.com/ltdrdata/ComfyUI-Manager -cd .. -python -m venv venv -call venv/Scripts/activate -python -m pip install torch torchvision torchaudio --extra-index-url https://download.pytorch.org/whl/cu121 -python -m pip install -r requirements.txt -python -m pip install -r custom_nodes/ComfyUI-Manager/requirements.txt -cd .. -echo "cd ComfyUI" >> run_gpu.bat -echo "call venv/Scripts/activate" >> run_gpu.bat -echo "python main.py" >> run_gpu.bat - -echo "cd ComfyUI" >> run_cpu.bat -echo "call venv/Scripts/activate" >> run_cpu.bat -echo "python main.py --cpu" >> run_cpu.bat diff --git a/scripts/install-manager-for-portable-version.bat b/scripts/install-manager-for-portable-version.bat deleted file mode 100644 index 7b067dfd..00000000 --- a/scripts/install-manager-for-portable-version.bat +++ /dev/null @@ -1,2 +0,0 @@ -.\python_embeded\python.exe -s -m pip install gitpython -.\python_embeded\python.exe -c "import git; git.Repo.clone_from('https://github.com/ltdrdata/ComfyUI-Manager', './ComfyUI/custom_nodes/ComfyUI-Manager')" diff --git a/scripts/update-fix.py b/scripts/update-fix.py deleted file mode 100644 index d2ac1007..00000000 --- a/scripts/update-fix.py +++ /dev/null @@ -1,12 +0,0 @@ -import git - -commit_hash = "a361cc1" - -repo = git.Repo('.') - -if repo.is_dirty(): - repo.git.stash() - -repo.git.update_ref("refs/remotes/origin/main", commit_hash) -repo.remotes.origin.fetch() -repo.git.pull("origin", "main") diff --git a/snapshots/the_snapshot_files_are_located_here b/snapshots/the_snapshot_files_are_located_here deleted file mode 100644 index e69de29b..00000000