Merge pull request #1 from Comfy-Org/main

Bring up to date with official
This commit is contained in:
David 2025-12-11 12:26:59 +08:00 committed by GitHub
commit 975642c9bf
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
16 changed files with 36333 additions and 7362 deletions

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

22094
github-stats-cache.json Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -44,7 +44,7 @@ import manager_migration
from node_package import InstalledNodePackage
version_code = [3, 38]
version_code = [3, 38, 1]
version_str = f"V{version_code[0]}.{version_code[1]}" + (f'.{version_code[2]}' if len(version_code) > 2 else '')

View File

@ -93,7 +93,7 @@ def check_legacy_backup(manager_files_path):
# Notice board output
add_startup_notice(
"Legacy ComfyUI-Manager data backup exists. Please verify and remove when no longer needed.",
"Legacy ComfyUI-Manager data backup exists. Please verify and remove when no longer needed. See terminal for details.",
level='info'
)

View File

@ -55,7 +55,7 @@ def get_pip_cmd(force_uv=False):
subprocess.check_output(test_cmd, stderr=subprocess.DEVNULL, timeout=5)
return [sys.executable] + (['-s'] if embedded else []) + ['-m', 'pip']
except Exception:
logging.warning("[ComfyUI-Manager] python -m pip not available. Falling back to uv.")
logging.warning("[ComfyUI-Manager] `python -m pip` not available. Falling back to `uv`.")
# Try uv (either forced or pip failed)
import shutil
@ -64,19 +64,19 @@ def get_pip_cmd(force_uv=False):
try:
test_cmd = [sys.executable] + (['-s'] if embedded else []) + ['-m', 'uv', '--version']
subprocess.check_output(test_cmd, stderr=subprocess.DEVNULL, timeout=5)
logging.info("[ComfyUI-Manager] Using uv as Python module for pip operations.")
logging.info("[ComfyUI-Manager] Using `uv` as Python module for pip operations.")
return [sys.executable] + (['-s'] if embedded else []) + ['-m', 'uv', 'pip']
except Exception:
pass
# Try standalone uv
if shutil.which('uv'):
logging.info("[ComfyUI-Manager] Using standalone uv for pip operations.")
logging.info("[ComfyUI-Manager] Using standalone `uv` for pip operations.")
return ['uv', 'pip']
# Nothing worked
logging.error("[ComfyUI-Manager] Neither python -m pip nor uv are available. Cannot proceed with package operations.")
raise Exception("Neither pip nor uv are available for package management")
logging.error("[ComfyUI-Manager] Neither `python -m pip` nor `uv` are available. Cannot proceed with package operations.")
raise Exception("Neither `pip` nor `uv` are available for package management")
def make_pip_cmd(cmd):

View File

@ -1,25 +1,264 @@
import json
import argparse
#!/usr/bin/env python3
"""JSON Entry Validator
def check_json_syntax(file_path):
Validates JSON entries based on content structure.
Validation rules based on JSON content:
- {"custom_nodes": [...]}: Validates required fields (author, title, reference, files, install_type, description)
- {"models": [...]}: Validates JSON syntax only (no required fields)
- Other JSON structures: Validates JSON syntax only
Git repository URL validation (for custom_nodes):
1. URLs must NOT end with .git
2. URLs must follow format: https://github.com/{author}/{reponame}
3. .py and .js files are exempt from this check
Supported formats:
- Array format: [{...}, {...}]
- Object format: {"custom_nodes": [...]} or {"models": [...]}
"""
import json
import re
import sys
from pathlib import Path
from typing import Dict, List, Tuple
# Required fields for each entry type
REQUIRED_FIELDS_CUSTOM_NODE = ['author', 'title', 'reference', 'files', 'install_type', 'description']
REQUIRED_FIELDS_MODEL = [] # model-list.json doesn't require field validation
# Pattern for valid GitHub repository URL (without .git suffix)
GITHUB_REPO_PATTERN = re.compile(r'^https://github\.com/[^/]+/[^/]+$')
def get_entry_context(entry: Dict) -> str:
"""Get identifying information from entry for error messages
Args:
entry: JSON entry
Returns:
String with author and reference info
"""
parts = []
if 'author' in entry:
parts.append(f"author={entry['author']}")
if 'reference' in entry:
parts.append(f"ref={entry['reference']}")
if 'title' in entry:
parts.append(f"title={entry['title']}")
if parts:
return " | ".join(parts)
else:
# No identifying info - show actual entry content (truncated)
import json
entry_str = json.dumps(entry, ensure_ascii=False)
if len(entry_str) > 100:
entry_str = entry_str[:100] + "..."
return f"content={entry_str}"
def validate_required_fields(entry: Dict, entry_index: int, required_fields: List[str]) -> List[str]:
"""Validate that all required fields are present
Args:
entry: JSON entry to validate
entry_index: Index of entry in array (for error reporting)
required_fields: List of required field names
Returns:
List of error descriptions (without entry prefix/context)
"""
errors = []
for field in required_fields:
if field not in entry:
errors.append(f"Missing required field '{field}'")
elif entry[field] is None:
errors.append(f"Field '{field}' is null")
elif isinstance(entry[field], str) and not entry[field].strip():
errors.append(f"Field '{field}' is empty")
elif field == 'files' and not entry[field]: # Empty array
errors.append(f"Field 'files' is empty array")
return errors
def validate_git_repo_urls(entry: Dict, entry_index: int) -> List[str]:
"""Validate git repository URLs in 'files' array
Requirements:
- Git repo URLs must NOT end with .git
- Must follow format: https://github.com/{author}/{reponame}
- .py and .js files are exempt
Args:
entry: JSON entry to validate
entry_index: Index of entry in array (for error reporting)
Returns:
List of error descriptions (without entry prefix/context)
"""
errors = []
if 'files' not in entry or not isinstance(entry['files'], list):
return errors
for file_url in entry['files']:
if not isinstance(file_url, str):
continue
# Skip .py and .js files - they're exempt from git repo validation
if file_url.endswith('.py') or file_url.endswith('.js'):
continue
# Check if it's a GitHub URL (likely a git repo)
if 'github.com' in file_url:
# Error if URL ends with .git
if file_url.endswith('.git'):
errors.append(f"Git repo URL must NOT end with .git: {file_url}")
continue
# Validate format: https://github.com/{author}/{reponame}
if not GITHUB_REPO_PATTERN.match(file_url):
errors.append(f"Invalid git repo URL format (expected https://github.com/author/reponame): {file_url}")
return errors
def validate_entry(entry: Dict, entry_index: int, required_fields: List[str]) -> List[str]:
"""Validate a single JSON entry
Args:
entry: JSON entry to validate
entry_index: Index of entry in array (for error reporting)
required_fields: List of required field names
Returns:
List of error messages (empty if valid)
"""
errors = []
# Check required fields
errors.extend(validate_required_fields(entry, entry_index, required_fields))
# Check git repository URLs
errors.extend(validate_git_repo_urls(entry, entry_index))
return errors
def validate_json_file(file_path: str) -> Tuple[bool, List[str]]:
"""Validate JSON file containing entries
Args:
file_path: Path to JSON file
Returns:
Tuple of (is_valid, error_messages)
"""
errors = []
# Check file exists
path = Path(file_path)
if not path.exists():
return False, [f"File not found: {file_path}"]
# Load JSON
try:
with open(file_path, 'r', encoding='utf-8') as file:
json_str = file.read()
json.loads(json_str)
print(f"[ OK ] {file_path}")
except UnicodeDecodeError as e:
print(f"Unicode decode error: {e}")
with open(path, 'r', encoding='utf-8') as f:
data = json.load(f)
except json.JSONDecodeError as e:
print(f"[FAIL] {file_path}\n\n {e}\n")
except FileNotFoundError:
print(f"[FAIL] {file_path}\n\n File not found\n")
return False, [f"Invalid JSON: {e}"]
except Exception as e:
return False, [f"Error reading file: {e}"]
# Determine required fields based on JSON content
required_fields = []
# Validate structure - support both array and object formats
entries_to_validate = []
if isinstance(data, list):
# Direct array format: [{...}, {...}]
entries_to_validate = data
elif isinstance(data, dict):
# Object format: {"custom_nodes": [...]} or {"models": [...]}
# Determine validation based on keys
if 'custom_nodes' in data and isinstance(data['custom_nodes'], list):
required_fields = REQUIRED_FIELDS_CUSTOM_NODE
entries_to_validate = data['custom_nodes']
elif 'models' in data and isinstance(data['models'], list):
required_fields = REQUIRED_FIELDS_MODEL
entries_to_validate = data['models']
else:
# Other JSON structures (extension-node-map.json, etc.) - just validate JSON syntax
return True, []
else:
return False, ["JSON root must be either an array or an object containing arrays"]
# Validate each entry
for idx, entry in enumerate(entries_to_validate, start=1):
if not isinstance(entry, dict):
# Show actual value for type errors
entry_str = json.dumps(entry, ensure_ascii=False) if not isinstance(entry, str) else repr(entry)
if len(entry_str) > 150:
entry_str = entry_str[:150] + "..."
errors.append(f"\n❌ Entry #{idx}: Must be an object, got {type(entry).__name__}")
errors.append(f" Actual value: {entry_str}")
continue
entry_errors = validate_entry(entry, idx, required_fields)
if entry_errors:
# Group errors by entry with context
context = get_entry_context(entry)
errors.append(f"\n❌ Entry #{idx} ({context}):")
for error in entry_errors:
errors.append(f" - {error}")
is_valid = len(errors) == 0
return is_valid, errors
def main():
parser = argparse.ArgumentParser(description="JSON File Syntax Checker")
parser.add_argument("file_path", type=str, help="Path to the JSON file for syntax checking")
"""Main entry point"""
if len(sys.argv) < 2:
print("Usage: python json-checker.py <json-file>")
print("\nValidates JSON entries based on content:")
print(" - {\"custom_nodes\": [...]}: Validates required fields (author, title, reference, files, install_type, description)")
print(" - {\"models\": [...]}: Validates JSON syntax only (no required fields)")
print(" - Other JSON structures: Validates JSON syntax only")
print("\nGit repo URL validation (for custom_nodes):")
print(" - URLs must NOT end with .git")
print(" - URLs must follow: https://github.com/{author}/{reponame}")
sys.exit(1)
args = parser.parse_args()
check_json_syntax(args.file_path)
file_path = sys.argv[1]
if __name__ == "__main__":
is_valid, errors = validate_json_file(file_path)
if is_valid:
print(f"{file_path}: Validation passed")
sys.exit(0)
else:
print(f"Validating: {file_path}")
print("=" * 60)
print("❌ Validation failed!\n")
print("Errors:")
# Count actual errors (lines starting with " -")
error_count = sum(1 for e in errors if e.strip().startswith('-'))
for error in errors:
# Don't add ❌ prefix to grouped entries (they already have it)
if error.strip().startswith(''):
print(error)
else:
print(error)
print(f"\nTotal errors: {error_count}")
sys.exit(1)
if __name__ == '__main__':
main()

View File

@ -1,5 +1,196 @@
{
"custom_nodes": [
{
"author": "anilstream",
"title": "ComfyUI-NanoBananaPro",
"reference": "https://github.com/anilstream/ComfyUI-NanoBananaPro",
"files": [
"https://github.com/anilstream/ComfyUI-NanoBananaPro"
],
"install_type": "git-clone",
"description": "ComfyUI node implementing basic functionality with NanoBananaBasicNode. (Description by CC)"
},
{
"author": "Toxic1228",
"title": "Eleven-labs-comfyui-sts",
"reference": "https://github.com/Toxic1228/Eleven-labs-comfyui-sts",
"files": [
"https://github.com/Toxic1228/Eleven-labs-comfyui-sts"
],
"install_type": "git-clone",
"description": "ComfyUI integration node for Eleven Labs text-to-speech service (requires API key). (Description by CC)"
},
{
"author": "NeoTech",
"title": "comfyui-laserprep",
"reference": "https://github.com/NeoTech/comfyui-laserprep",
"files": [
"https://github.com/NeoTech/comfyui-laserprep"
],
"install_type": "git-clone",
"description": "ComfyUI node implementing laser preparation functionality with LaserPrep node. (Description by CC)"
},
{
"author": "Enferlain",
"title": "ComfyUI-extra-schedulers [WIP]",
"reference": "https://github.com/Enferlain/ComfyUI-extra-schedulers",
"files": [
"https://github.com/Enferlain/ComfyUI-extra-schedulers"
],
"install_type": "git-clone",
"description": "ComfyUI custom nodes providing additional scheduler implementations for advanced sampling control. (Description by CC)\nNOTE: The files in the repo are not organized."
},
{
"author": "tiange-tree",
"title": "BLUEAI_ComfyUI_OpenAI",
"reference": "https://github.com/tiange-tree/BLUEAI_ComfyUI_OpenAI",
"files": [
"https://github.com/tiange-tree/BLUEAI_ComfyUI_OpenAI"
],
"install_type": "git-clone",
"description": "NODES: BLUEAI_OpenAI_Node"
},
{
"author": "nestflow",
"title": "ComfyUI-WanPlus",
"reference": "https://github.com/nestflow/ComfyUI-WanPlus",
"files": [
"https://github.com/nestflow/ComfyUI-WanPlus"
],
"install_type": "git-clone",
"description": "ComfyUI nodes for video frame manipulation and image-to-video conversion. (Description by CC)"
},
{
"author": "twdockery",
"title": "ComfyUI_Prompt_Batch_Generator",
"reference": "https://github.com/twdockery/ComfyUI_Prompt_Batch_Generator",
"files": [
"https://github.com/twdockery/ComfyUI_Prompt_Batch_Generator"
],
"install_type": "git-clone",
"description": "Custom nodes for batch image generation with Stable Diffusion 1.5, optimized for low VRAM systems. (Description by CC)"
},
{
"author": "tuxiansheng-ld",
"title": "Comfyui-tuxiansheng-nodes",
"reference": "https://github.com/tuxiansheng-ld/Comfyui-tuxiansheng-nodes",
"files": [
"https://github.com/tuxiansheng-ld/Comfyui-tuxiansheng-nodes"
],
"install_type": "git-clone",
"description": "NODES: StringToListNode"
},
{
"author": "krakenunbound",
"title": "Kraken Discord Bot",
"id": "kraken-discord-bot",
"reference": "https://github.com/krakenunbound/kraken-discord-bot",
"files": [
"https://github.com/krakenunbound/kraken-discord-bot"
],
"install_type": "git-clone",
"description": "All-in-one Discord bot node for AI image generation. Simple setup - just add token, select model, and queue. Includes style presets, rate limiting, and queue management."
},
{
"author": "quinteroac",
"title": "comfyui_api_executor_nodes",
"reference": "https://github.com/quinteroac/comfyui_api_executor_nodes",
"files": [
"https://github.com/quinteroac/comfyui_api_executor_nodes"
],
"install_type": "git-clone",
"description": "Custom nodes for ComfyUI that enable workflow execution via API (internal or external), as well as input/output handling and workflow selection."
},
{
"author": "Chang-Jin-Lee",
"title": "ComfyUI-PromptMixer-AI [WIP]",
"reference": "https://github.com/Chang-Jin-Lee/ComfyUI-PromptMixer-AI",
"files": [
"https://github.com/Chang-Jin-Lee/ComfyUI-PromptMixer-AI"
],
"install_type": "git-clone",
"description": "ComfyUI custom node collection for unified control of checkpoints, steps, CFG, samplers, LoRA and prompt parameters with local LLM integration. (Description by CC)\nNOTE: The files in the repo are not organized."
},
{
"author": "leacvikas0",
"title": "ComfyUI-Presence [WIP]",
"reference": "https://github.com/leacvikas0/ComfyUI-Presence",
"files": [
"https://github.com/leacvikas0/ComfyUI-Presence"
],
"install_type": "git-clone",
"description": "NODES: BeautifulTextNode, FluxAdaptiveInjector, InspectNode, PresenceDirector, PresenceDirectorFireworks, PresenceDirectorVertex, PresenceSaver, UnaliverBundlePreview, UnaliverNode, UnaliverPlanner, UnaliverStepIterato, ...\nNOTE: The files in the repo are not organized."
},
{
"author": "Kraven1109",
"title": "ComfyUI-Llama [NAME CONFLICT]",
"reference": "https://github.com/Kraven1109/ComfyUI-Llama",
"files": [
"https://github.com/Kraven1109/ComfyUI-Llama"
],
"install_type": "git-clone",
"description": "Lightweight ComfyUI plugin exposing llama.cpp-based one-shot Qwen VQA nodes."
},
{
"author": "xiaoxidashen",
"title": "comfyui_my_utils",
"reference": "https://github.com/xiaoxidashen/comfyui_my_utils",
"files": [
"https://github.com/xiaoxidashen/comfyui_my_utils"
],
"install_type": "git-clone",
"description": "Guide and utilities for creating ComfyUI custom nodes with image/video preview functionality. (Description by CC)"
},
{
"author": "agavesunset",
"title": "ComfyUI_LoRA_Tracker",
"reference": "https://github.com/agavesunset/ComfyUI_LoRA_Tracker",
"files": [
"https://github.com/agavesunset/ComfyUI_LoRA_Tracker"
],
"install_type": "git-clone",
"description": "ComfyUI node for tracking and displaying LoRA parameters. (Description by CC)"
},
{
"author": "SleazySleaze",
"title": "aesthetic-persona-comfyui-node",
"reference": "https://github.com/SleazySleaze/aesthetic-persona-comfyui-node",
"files": [
"https://github.com/SleazySleaze/aesthetic-persona-comfyui-node"
],
"install_type": "git-clone",
"description": "Node providing aesthetic persona parsing capabilities for ComfyUI. (Description by CC)"
},
{
"author": "xtanqn",
"title": "comfyui-xishen [WIP]",
"reference": "https://github.com/xtanqn/comfyui-xishen",
"files": [
"https://github.com/xtanqn/comfyui-xishen"
],
"install_type": "git-clone",
"description": "A custom node for ComfyUI that generates random numbers as text output.\nNOTE: The files in the repo are not organized."
},
{
"author": "heyburns",
"title": "ComfyUI-Logic-Redux [WIP]",
"reference": "https://github.com/heyburns/ComfyUI-Logic-Redux",
"files": [
"https://github.com/heyburns/ComfyUI-Logic-Redux"
],
"install_type": "git-clone",
"description": "Validation-friendly rewrite of ComfyUI Logic nodes with drop-in compatibility, featuring compare, int/float/bool/string pass-through, ternary logic, and debug nodes. (Description by CC)\nNOTE: The files in the repo are not organized."
},
{
"author": "Mohamed-Sakr",
"title": "ComfyUI-SimpleMarkdown [UNSAFE]",
"reference": "https://github.com/Mohamed-Sakr/ComfyUI-SimpleMarkdown",
"files": [
"https://github.com/Mohamed-Sakr/ComfyUI-SimpleMarkdown"
],
"install_type": "git-clone",
"description": "A simple markdown node for ComfyUI[w/This nodepack has a frontend vulnerability.]"
},
{
"author": "starsFriday",
"title": "ComfyUI-Tracker-Person [WIP]",
@ -320,16 +511,6 @@
"install_type": "git-clone",
"description": "Integrated Qwen-Image node for ComfyUI with all-in-one model loading, 4 LoRA slots, memory optimization via BlockSwap reducing VRAM usage by 30-60%, and multiple quantization options.\nNOTE: The files in the repo are not organized."
},
{
"author": "nohikomiso",
"title": "ComfyUI-ImageFolderPicker [UNSAFE]",
"reference": "https://github.com/nohikomiso/ComfyUI-ImageFolderPicker",
"files": [
"https://github.com/nohikomiso/ComfyUI-ImageFolderPicker"
],
"install_type": "git-clone",
"description": "Custom ComfyUI node for browsing local server folders and selecting images via thumbnail display in a grid interface. (Description by CC)[w/This nodepack has a vulnerability that allows it to retrieve a list of files from arbitrary paths.]"
},
{
"author": "tori29umai0123",
"title": "ComfyUI-SDXLGenerateFromTextFile [UNSAFE]",
@ -1063,16 +1244,6 @@
"install_type": "git-clone",
"description": "ComfyUI-CC-ImageLoader is an enhanced image loading node designed for ComfyUI. It is developed based on two excellent projects: ComfyUI-Thumbnails and ComfyUI_Local_Media_Manager.[w/This nodepack includes an endpoint that access files from arbitrary paths.]"
},
{
"author": "rzasharp79",
"title": "ComfyUI--SolarFlare",
"reference": "https://github.com/rzasharp79/ComfyUI--SolarFlare",
"files": [
"https://github.com/rzasharp79/ComfyUI--SolarFlare"
],
"install_type": "git-clone",
"description": "NODES: Qwen Image, ..."
},
{
"author": "A1rCHAN",
"title": "Eric's Prompt Enhancers for ComfyUI# Eric's Prompt Enhancers for ComfyUI",
@ -1203,16 +1374,6 @@
"install_type": "git-clone",
"description": "NODES: Image Size Input, Date/Time based output path"
},
{
"author": "octapus8085",
"title": "OpenAI-comfyui-O",
"reference": "https://github.com/Spicely/Comfyui-File-Utils",
"files": [
"https://github.com/Spicely/Comfyui-File-Utils"
],
"install_type": "git-clone",
"description": "This plugin provides multiple file-handling and utility nodes for ComfyUI, including: image saving, audio saving, video saving, video composition, audio-to-subtitle conversion, and random number generation nodes. These nodes not only process files but also return their absolute file paths.\nNOTE: The files in the repo are not organized.[w/This nodepack contains a node that has a vulnerability allowing write to arbitrary file paths.]"
},
{
"author": "octapus8085",
"title": "OpenAI-comfyui-O",
@ -4618,7 +4779,8 @@
"description": "NODES: Face Detector Selector, YC Human Parts Ultra(Advance), Color Match (YC)"
},
{
"author": "virallover",
"author": "maizerrr",
"title": "comfyui-code-nodes",
"reference": "https://github.com/maizerrr/comfyui-code-nodes",
"files": [
"https://github.com/maizerrr/comfyui-code-nodes"

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,155 @@
{
"custom_nodes": [
{
"author": "mcrataobrabo",
"title": "comfyui-smart-lora-downloader - Automatically Fetch Missing LoRAs [REMOVED]",
"reference": "https://github.com/mcrataobrabo/comfyui-smart-lora-downloader",
"files": [
"https://github.com/mcrataobrabo/comfyui-smart-lora-downloader"
],
"install_type": "git-clone",
"description": "Automatically detect and download missing LoRAs for ComfyUI workflows"
},
{
"author": "KANAsho34636",
"title": "ComfyUI-NaturalSort-ImageLoader [REMOVED]",
"reference": "https://github.com/KANAsho34636/ComfyUI-NaturalSort-ImageLoader",
"files": [
"https://github.com/KANAsho34636/ComfyUI-NaturalSort-ImageLoader"
],
"install_type": "git-clone",
"description": "Custom image loader node supporting natural number sorting with multiple sort modes (natural, lexicographic, modification time, creation time, reverse natural). (Description by CC)"
},
{
"author": "johninthewinter",
"title": "comfyui-fal-flux-2-John [REMOVED]",
"reference": "https://github.com/johninthewinter/comfyui-fal-flux-2-John",
"files": [
"https://github.com/johninthewinter/comfyui-fal-flux-2-John"
],
"install_type": "git-clone",
"description": "Custom nodes for ComfyUI that integrate with fal.ai's FLUX 2 and FLUX 1 LoRA APIs for text-to-image generation."
},
{
"author": "LargeModGames",
"title": "ComfyUI LoRA Auto Downloader [REMOVED]",
"reference": "https://github.com/LargeModGames/comfyui-smart-lora-downloader",
"files": [
"https://github.com/LargeModGames/comfyui-smart-lora-downloader"
],
"install_type": "git-clone",
"description": "Automatically download missing LoRAs from CivitAI and detect missing LoRAs in workflows. Features smart directory detection and easy installation."
},
{
"author": "DiffusionWave",
"title": "PickResolution_DiffusionWave [DEPRECATED]",
"reference": "https://github.com/DiffusionWave/PickResolution_DiffusionWave",
"files": [
"https://github.com/DiffusionWave/PickResolution_DiffusionWave"
],
"install_type": "git-clone",
"description": "A custom node for ComfyUI that allows selecting a base resolution, applying a custom scaling value based on FLOAT (up to 10 decimal places), and adding an extra integer value. Outputs include both INT and FLOAT resolutions, making it perfect for you to play around with."
},
{
"author": "geltz",
"title": "ComfyUI-geltz [REMOVED]",
"reference": "https://github.com/geltz/ComfyUI-geltz",
"files": [
"https://github.com/geltz/ComfyUI-geltz"
],
"install_type": "git-clone",
"description": "Various custom nodes; guidance, latents, sampling, tokenization, etc."
},
{
"author": "anilsathyan7",
"title": "ComfyUI-Crystal-Upscaler [REMOVED]",
"reference": "https://github.com/anilsathyan7/ComfyUI-Crystal-Upscaler",
"files": [
"https://github.com/anilsathyan7/ComfyUI-Crystal-Upscaler"
],
"install_type": "git-clone",
"description": "ComfyUI custom node for image upscaling using crystal upscaling technology. (Description by CC)"
},
{
"author": "nohikomiso",
"title": "ComfyUI-ImageFolderPicker [REMOVED/UNSAFE]",
"reference": "https://github.com/nohikomiso/ComfyUI-ImageFolderPicker",
"files": [
"https://github.com/nohikomiso/ComfyUI-ImageFolderPicker"
],
"install_type": "git-clone",
"description": "Custom ComfyUI node for browsing local server folders and selecting images via thumbnail display in a grid interface. (Description by CC)[w/This nodepack has a vulnerability that allows it to retrieve a list of files from arbitrary paths.]"
},
{
"author": "rzasharp79",
"title": "ComfyUI--SolarFlare [REMOVED]",
"reference": "https://github.com/rzasharp79/ComfyUI--SolarFlare",
"files": [
"https://github.com/rzasharp79/ComfyUI--SolarFlare"
],
"install_type": "git-clone",
"description": "NODES: Qwen Image, ..."
},
{
"author": "shinich39",
"title": "comfyui-no-one-above-me [REMOVED]",
"reference": "https://github.com/shinich39/comfyui-no-one-above-me",
"files": [
"https://github.com/shinich39/comfyui-no-one-above-me"
],
"install_type": "git-clone",
"description": "Fix node to top."
},
{
"author": "octapus8085",
"title": "OpenAI-comfyui-O [REMOVED]",
"reference": "https://github.com/Spicely/Comfyui-File-Utils",
"files": [
"https://github.com/Spicely/Comfyui-File-Utils"
],
"install_type": "git-clone",
"description": "This plugin provides multiple file-handling and utility nodes for ComfyUI, including: image saving, audio saving, video saving, video composition, audio-to-subtitle conversion, and random number generation nodes. These nodes not only process files but also return their absolute file paths.\nNOTE: The files in the repo are not organized.[w/This nodepack contains a node that has a vulnerability allowing write to arbitrary file paths.]"
},
{
"author": "yemanou",
"title": "NABA Image (Gemini REST) Node [REMOVED]",
"reference": "https://github.com/yemanou/ComfyUI-NABA",
"files": [
"https://github.com/yemanou/ComfyUI-NABA"
],
"install_type": "git-clone",
"description": "Simplified Gemini 2.5 Flash Image Preview node for ComfyUI. REST-only for stability, two optional reference images, padded aspect ratio resizing (no stretching), and basic sampling controls. All extra debug layers, SDK path, multi-seed, and legacy compatibility code removed to avoid crashes."
},
{
"author": "comrender",
"title": "ComfyUI-Nano-Banana-Resizer [REMOVED]",
"reference": "https://github.com/comrender/ComfyUI-Nano-Banana-Resizer",
"files": [
"https://github.com/comrender/ComfyUI-Nano-Banana-Resizer"
],
"install_type": "git-clone",
"description": "A ComfyUI custom node that automatically calculates optimal output dimensions for Google's Nano Banana image editing model, supporting 22 aspect ratio buckets and ensuring pixel-perfect outputs without shifting or cropping."
},
{
"author": "comrender",
"title": "ComfyUI-edge-match-checker [REMOVED]",
"reference": "https://github.com/comrender/ComfyUI-edge-match-checker",
"files": [
"https://github.com/comrender/ComfyUI-edge-match-checker"
],
"install_type": "git-clone",
"description": "Node comparing two image masks or images with adjustable overlap threshold (default 95%) for detecting minor shifts and mismatches in proportions, suitable for automated post-processing validation. (Description by CC)"
},
{
"author": "comrender",
"title": "ComfyUI-gpt5_image_text [REMOVED]",
"reference": "https://github.com/comrender/ComfyUI-gpt5_image_text",
"files": [
"https://github.com/comrender/ComfyUI-gpt5_image_text"
],
"install_type": "git-clone",
"description": "A ComfyUI custom node for vision + text analysis using GPT-5 and GPT-4o with direct API key input, system prompt, temperature, max tokens, and multi-image support."
},
{
"author": "PozzettiAndrea",
"title": "ComfyUI-CameraAnalysis [REMOVED]",

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,7 @@
[project]
name = "comfyui-manager"
description = "ComfyUI-Manager provides features to install and manage custom nodes for ComfyUI, as well as various functionalities to assist with ComfyUI."
version = "3.38"
version = "3.38.1"
license = { file = "LICENSE.txt" }
dependencies = ["GitPython", "PyGithub", "matrix-nio", "transformers", "huggingface-hub>0.20", "typer", "rich", "typing-extensions", "toml", "uv", "chardet"]

View File

@ -16,6 +16,108 @@ import sys
from urllib.parse import urlparse
from github import Github, Auth
from pathlib import Path
from typing import Set, Dict, Optional
# Scanner version for cache invalidation
SCANNER_VERSION = "2.0.11" # Multi-layer detection: class existence + display names
# Cache for extract_nodes and extract_nodes_enhanced results
_extract_nodes_cache: Dict[str, Set[str]] = {}
_extract_nodes_enhanced_cache: Dict[str, Set[str]] = {}
_file_mtime_cache: Dict[Path, float] = {}
def _get_repo_root(file_path: Path) -> Optional[Path]:
"""Find the repository root directory containing .git"""
current = file_path if file_path.is_dir() else file_path.parent
while current != current.parent:
if (current / ".git").exists():
return current
current = current.parent
return None
def _get_repo_hash(repo_path: Path) -> str:
"""Get git commit hash or fallback identifier"""
git_dir = repo_path / ".git"
if not git_dir.exists():
return ""
try:
# Read HEAD to get current commit
head_file = git_dir / "HEAD"
if head_file.exists():
head_content = head_file.read_text().strip()
if head_content.startswith("ref:"):
# HEAD points to a ref
ref_path = git_dir / head_content[5:].strip()
if ref_path.exists():
commit_hash = ref_path.read_text().strip()
return commit_hash[:16] # First 16 chars
else:
# Detached HEAD
return head_content[:16]
except:
pass
return ""
def _load_per_repo_cache(repo_path: Path) -> Optional[tuple]:
"""Load nodes and metadata from per-repo cache
Returns:
tuple: (nodes_set, metadata_dict) or None if cache invalid
"""
cache_file = repo_path / ".git" / "nodecache.json"
if not cache_file.exists():
return None
try:
with open(cache_file, 'r') as f:
cache_data = json.load(f)
# Verify scanner version
if cache_data.get('scanner_version') != SCANNER_VERSION:
return None
# Verify git hash
current_hash = _get_repo_hash(repo_path)
if cache_data.get('git_hash') != current_hash:
return None
# Return nodes and metadata
nodes = cache_data.get('nodes', [])
metadata = cache_data.get('metadata', {})
return (set(nodes) if nodes else set(), metadata)
except:
return None
def _save_per_repo_cache(repo_path: Path, all_nodes: Set[str], metadata: dict = None):
"""Save nodes and metadata to per-repo cache"""
cache_file = repo_path / ".git" / "nodecache.json"
if not cache_file.parent.exists():
return
git_hash = _get_repo_hash(repo_path)
cache_data = {
"scanner_version": SCANNER_VERSION,
"git_hash": git_hash,
"scanned_at": datetime.datetime.now().isoformat(),
"nodes": sorted(list(all_nodes)),
"metadata": metadata if metadata else {}
}
try:
with open(cache_file, 'w') as f:
json.dump(cache_data, f, indent=2)
except:
pass # Silently fail - cache is optional
def download_url(url, dest_folder, filename=None):
@ -51,11 +153,12 @@ Examples:
# Standard mode
python3 scanner.py
python3 scanner.py --skip-update
python3 scanner.py --skip-all --force-rescan
# Scan-only mode
python3 scanner.py --scan-only temp-urls-clean.list
python3 scanner.py --scan-only urls.list --temp-dir /custom/temp
python3 scanner.py --scan-only urls.list --skip-update
python3 scanner.py --scan-only urls.list --skip-update --force-rescan
'''
)
@ -69,6 +172,8 @@ Examples:
help='Skip GitHub stats collection')
parser.add_argument('--skip-all', action='store_true',
help='Skip all update operations')
parser.add_argument('--force-rescan', action='store_true',
help='Force rescan all nodes (ignore cache)')
# Backward compatibility: positional argument for temp_dir
parser.add_argument('temp_dir_positional', nargs='?', metavar='TEMP_DIR',
@ -94,6 +199,11 @@ parse_cnt = 0
def extract_nodes(code_text):
global parse_cnt
# Check cache first
cache_key = hash(code_text)
if cache_key in _extract_nodes_cache:
return _extract_nodes_cache[cache_key].copy()
try:
if parse_cnt % 100 == 0:
print(".", end="", flush=True)
@ -128,12 +238,458 @@ def extract_nodes(code_text):
if key is not None and isinstance(key.value, str):
s.add(key.value.strip())
# Cache the result
_extract_nodes_cache[cache_key] = s
return s
else:
# Cache empty result
_extract_nodes_cache[cache_key] = set()
return set()
except:
# Cache empty result on error
_extract_nodes_cache[cache_key] = set()
return set()
def extract_nodes_from_repo(repo_path: Path, verbose: bool = False, force_rescan: bool = False) -> tuple:
"""
Extract all nodes and metadata from a repository with per-repo caching.
Automatically caches results in .git/nodecache.json.
Cache is invalidated when:
- Git commit hash changes
- Scanner version changes
- force_rescan flag is True
Args:
repo_path: Path to repository root
verbose: If True, print UI-only extension detection messages
force_rescan: If True, ignore cache and force fresh scan
Returns:
tuple: (nodes_set, metadata_dict)
"""
# Ensure path is absolute
repo_path = repo_path.resolve()
# Check per-repo cache first (unless force_rescan is True)
if not force_rescan:
cached_result = _load_per_repo_cache(repo_path)
if cached_result is not None:
return cached_result
# Cache miss - scan all .py files
all_nodes = set()
all_metadata = {}
py_files = list(repo_path.rglob("*.py"))
# Filter out __pycache__, .git, and other hidden directories
filtered_files = []
for f in py_files:
try:
rel_path = f.relative_to(repo_path)
# Skip __pycache__, .git, and any directory starting with .
if '__pycache__' not in str(rel_path) and not any(part.startswith('.') for part in rel_path.parts):
filtered_files.append(f)
except:
continue
py_files = filtered_files
for py_file in py_files:
try:
# Read file with proper encoding
with open(py_file, 'r', encoding='utf-8', errors='ignore') as f:
code = f.read()
if code:
# Extract nodes using SAME logic as scan_in_file
# V1 nodes (enhanced with fallback patterns)
nodes = extract_nodes_enhanced(code, py_file, visited=set(), verbose=verbose)
all_nodes.update(nodes)
# V3 nodes detection
v3_nodes = extract_v3_nodes(code)
all_nodes.update(v3_nodes)
# Dict parsing - exclude commented NODE_CLASS_MAPPINGS lines
pattern = r"_CLASS_MAPPINGS\s*(?::\s*\w+\s*)?=\s*(?:\\\s*)?{([^}]*)}"
regex = re.compile(pattern, re.MULTILINE | re.DOTALL)
for match_obj in regex.finditer(code):
# Get the line where NODE_CLASS_MAPPINGS is defined
match_start = match_obj.start()
line_start = code.rfind('\n', 0, match_start) + 1
line_end = code.find('\n', match_start)
if line_end == -1:
line_end = len(code)
line = code[line_start:line_end]
# Skip if line starts with # (commented)
if re.match(r'^\s*#', line):
continue
match = match_obj.group(1)
# Filter out commented lines from dict content
match_lines = match.split('\n')
match_filtered = '\n'.join(
line for line in match_lines
if not re.match(r'^\s*#', line)
)
# Extract key-value pairs with double quotes
key_value_pairs = re.findall(r"\"([^\"]*)\"\s*:\s*([^,\n]*)", match_filtered)
for key, value in key_value_pairs:
all_nodes.add(key.strip())
# Extract key-value pairs with single quotes
key_value_pairs = re.findall(r"'([^']*)'\s*:\s*([^,\n]*)", match_filtered)
for key, value in key_value_pairs:
all_nodes.add(key.strip())
# Handle .update() pattern (AFTER comment removal)
code_cleaned = re.sub(r'^#.*?$', '', code, flags=re.MULTILINE)
update_pattern = r"_CLASS_MAPPINGS\.update\s*\(\s*{([^}]*)}\s*\)"
update_match = re.search(update_pattern, code_cleaned, re.DOTALL)
if update_match:
update_dict_text = update_match.group(1)
# Extract key-value pairs (double quotes)
update_pairs = re.findall(r'"([^"]*)"\s*:\s*([^,\n]*)', update_dict_text)
for key, value in update_pairs:
all_nodes.add(key.strip())
# Extract key-value pairs (single quotes)
update_pairs_single = re.findall(r"'([^']*)'\s*:\s*([^,\n]*)", update_dict_text)
for key, value in update_pairs_single:
all_nodes.add(key.strip())
# Additional regex patterns (AFTER comment removal)
patterns = [
r'^[^=]*_CLASS_MAPPINGS\["(.*?)"\]',
r'^[^=]*_CLASS_MAPPINGS\[\'(.*?)\'\]',
r'@register_node\("(.+)",\s*\".+"\)',
r'"(\w+)"\s*:\s*{"class":\s*\w+\s*'
]
for pattern in patterns:
keys = re.findall(pattern, code_cleaned)
all_nodes.update(key.strip() for key in keys)
# Extract metadata from this file
metadata = extract_metadata_only(str(py_file))
all_metadata.update(metadata)
except Exception as e:
# Silently skip files that can't be read
continue
# Save to per-repo cache
_save_per_repo_cache(repo_path, all_nodes, all_metadata)
return (all_nodes, all_metadata)
def _verify_class_exists(node_name: str, code_text: str, file_path: Optional[Path] = None) -> tuple[bool, Optional[str], Optional[int]]:
"""
Verify that a node class exists and has ComfyUI node structure.
Returns: (exists: bool, file_path: str, line_number: int)
A valid ComfyUI node must have:
- Class definition (not commented)
- At least one of: INPUT_TYPES, RETURN_TYPES, FUNCTION method/attribute
"""
try:
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=SyntaxWarning)
tree = ast.parse(code_text)
except:
return (False, None, None)
for node in ast.walk(tree):
if isinstance(node, ast.ClassDef):
if node.name == node_name or node.name.replace('_', '') == node_name.replace('_', ''):
# Found class definition - check if it has ComfyUI interface
has_input_types = False
has_return_types = False
has_function = False
for item in node.body:
# Check for INPUT_TYPES method
if isinstance(item, ast.FunctionDef) and item.name == 'INPUT_TYPES':
has_input_types = True
# Check for RETURN_TYPES attribute
elif isinstance(item, ast.Assign):
for target in item.targets:
if isinstance(target, ast.Name):
if target.id == 'RETURN_TYPES':
has_return_types = True
elif target.id == 'FUNCTION':
has_function = True
# Check for FUNCTION method
elif isinstance(item, ast.FunctionDef):
has_function = True
# Valid if has any ComfyUI signature
if has_input_types or has_return_types or has_function:
file_str = str(file_path) if file_path else None
return (True, file_str, node.lineno)
return (False, None, None)
def _extract_display_name_mappings(code_text: str) -> Set[str]:
"""
Extract node names from NODE_DISPLAY_NAME_MAPPINGS.
Pattern:
NODE_DISPLAY_NAME_MAPPINGS = {
"node_key": "Display Name",
...
}
Returns:
Set of node keys from NODE_DISPLAY_NAME_MAPPINGS
"""
try:
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=SyntaxWarning)
tree = ast.parse(code_text)
except:
return set()
nodes = set()
for node in tree.body:
if isinstance(node, ast.Assign):
for target in node.targets:
if isinstance(target, ast.Name) and target.id == 'NODE_DISPLAY_NAME_MAPPINGS':
if isinstance(node.value, ast.Dict):
for key in node.value.keys:
if isinstance(key, ast.Constant) and isinstance(key.value, str):
nodes.add(key.value.strip())
return nodes
def extract_nodes_enhanced(
code_text: str,
file_path: Optional[Path] = None,
visited: Optional[Set[Path]] = None,
verbose: bool = False
) -> Set[str]:
"""
Enhanced node extraction with multi-layer detection system.
Scanner 2.0.11 - Comprehensive detection strategy:
- Phase 1: NODE_CLASS_MAPPINGS dict literal
- Phase 2: Class.NAME attribute access (e.g., FreeChat.NAME)
- Phase 3: Item assignment (NODE_CLASS_MAPPINGS["key"] = value)
- Phase 4: Class existence verification (detects active classes even if registration commented)
- Phase 5: NODE_DISPLAY_NAME_MAPPINGS cross-reference
- Phase 6: Empty dict detection (UI-only extensions, logging only)
Fixed Bugs:
- Scanner 2.0.9: Fallback cascade prevented Phase 3 execution
- Scanner 2.0.10: Missed active classes with commented registrations (15 false negatives)
Args:
code_text: Python source code
file_path: Path to file (for logging and caching)
visited: Visited paths (for circular import prevention)
verbose: If True, print UI-only extension detection messages
Returns:
Set of node names (union of all detected patterns)
"""
# Check file-based cache if file_path provided
if file_path is not None:
try:
file_path_obj = Path(file_path) if not isinstance(file_path, Path) else file_path
if file_path_obj.exists():
current_mtime = file_path_obj.stat().st_mtime
# Check if we have cached result with matching mtime and scanner version
if file_path_obj in _file_mtime_cache:
cached_mtime = _file_mtime_cache[file_path_obj]
cache_key = (str(file_path_obj), cached_mtime, SCANNER_VERSION)
if current_mtime == cached_mtime and cache_key in _extract_nodes_enhanced_cache:
return _extract_nodes_enhanced_cache[cache_key].copy()
except:
pass # Ignore cache errors, proceed with normal execution
# Suppress warnings from AST parsing
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=SyntaxWarning)
warnings.filterwarnings('ignore', category=DeprecationWarning)
# Phase 1: Original extract_nodes() - dict literal
phase1_nodes = extract_nodes(code_text)
# Phase 2: Class.NAME pattern
if visited is None:
visited = set()
phase2_nodes = _fallback_classname_resolver(code_text, file_path)
# Phase 3: Item assignment pattern
phase3_nodes = _fallback_item_assignment(code_text)
# Phase 4: NODE_DISPLAY_NAME_MAPPINGS cross-reference (NEW in 2.0.11)
# This catches nodes that are in display names but not in NODE_CLASS_MAPPINGS
phase4_nodes = _extract_display_name_mappings(code_text)
# Phase 5: Class existence verification ONLY for display name candidates (NEW in 2.0.11)
# This phase is CONSERVATIVE - only verify classes that appear in display names
# This catches the specific Scanner 2.0.10 bug pattern:
# - NODE_CLASS_MAPPINGS registration is commented
# - NODE_DISPLAY_NAME_MAPPINGS still has the entry
# - Class implementation exists
# Example: Bjornulf_ollamaLoader in Bjornulf_custom_nodes
phase5_nodes = set()
for node_name in phase4_nodes:
# Only check classes that appear in display names but not in registrations
if node_name not in (phase1_nodes | phase2_nodes | phase3_nodes):
exists, _, _ = _verify_class_exists(node_name, code_text, file_path)
if exists:
phase5_nodes.add(node_name)
# Union all results (FIX: Scanner 2.0.9 bug + Scanner 2.0.10 bug)
# 2.0.9: Used early return which missed Phase 3 nodes
# 2.0.10: Only checked registrations, missed classes referenced in display names
all_nodes = phase1_nodes | phase2_nodes | phase3_nodes | phase4_nodes | phase5_nodes
# Phase 6: Empty dict detector (logging only, doesn't add nodes)
if not all_nodes:
_fallback_empty_dict_detector(code_text, file_path, verbose)
# Cache the result
if file_path is not None:
try:
file_path_obj = Path(file_path) if not isinstance(file_path, Path) else file_path
if file_path_obj.exists():
current_mtime = file_path_obj.stat().st_mtime
cache_key = (str(file_path_obj), current_mtime, SCANNER_VERSION)
_extract_nodes_enhanced_cache[cache_key] = all_nodes
_file_mtime_cache[file_path_obj] = current_mtime
except:
pass
return all_nodes
def _fallback_classname_resolver(code_text: str, file_path: Optional[Path]) -> Set[str]:
"""
Detect Class.NAME pattern in NODE_CLASS_MAPPINGS.
Pattern:
NODE_CLASS_MAPPINGS = {
FreeChat.NAME: FreeChat,
PaidChat.NAME: PaidChat
}
"""
try:
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=SyntaxWarning)
parsed = ast.parse(code_text)
except:
return set()
nodes = set()
for node in parsed.body:
if isinstance(node, ast.Assign):
for target in node.targets:
if isinstance(target, ast.Name) and target.id == 'NODE_CLASS_MAPPINGS':
if isinstance(node.value, ast.Dict):
for key in node.value.keys:
# Detect Class.NAME pattern
if isinstance(key, ast.Attribute):
if isinstance(key.value, ast.Name):
# Use class name as node name
nodes.add(key.value.id)
# Also handle literal strings
elif isinstance(key, ast.Constant) and isinstance(key.value, str):
nodes.add(key.value.strip())
return nodes
def _fallback_item_assignment(code_text: str) -> Set[str]:
"""
Detect item assignment pattern.
Pattern:
NODE_CLASS_MAPPINGS = {}
NODE_CLASS_MAPPINGS["MyNode"] = MyNode
"""
try:
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=SyntaxWarning)
parsed = ast.parse(code_text)
except:
return set()
nodes = set()
for node in ast.walk(parsed):
if isinstance(node, ast.Assign):
for target in node.targets:
if isinstance(target, ast.Subscript):
if (isinstance(target.value, ast.Name) and
target.value.id in ['NODE_CLASS_MAPPINGS', 'NODE_CONFIG']):
# Extract key
if isinstance(target.slice, ast.Constant):
if isinstance(target.slice.value, str):
nodes.add(target.slice.value)
return nodes
def _extract_repo_name(file_path: Path) -> str:
"""
Extract repository name from file path.
Path structure: /home/rho/.tmp/analysis/temp/{author}_{reponame}/{path/to/file.py}
Returns: {author}_{reponame} or filename if extraction fails
"""
try:
parts = file_path.parts
# Find 'temp' directory in path
if 'temp' in parts:
temp_idx = parts.index('temp')
if temp_idx + 1 < len(parts):
# Next part after 'temp' is the repo directory
return parts[temp_idx + 1]
except (ValueError, IndexError):
pass
# Fallback to filename if extraction fails
return file_path.name if hasattr(file_path, 'name') else str(file_path)
def _fallback_empty_dict_detector(code_text: str, file_path: Optional[Path], verbose: bool = False) -> None:
"""
Detect empty NODE_CLASS_MAPPINGS (UI-only extensions).
Logs for documentation purposes only (when verbose=True).
Args:
code_text: Python source code to analyze
file_path: Path to the file being analyzed
verbose: If True, print detection messages
"""
empty_patterns = [
'NODE_CLASS_MAPPINGS = {}',
'NODE_CLASS_MAPPINGS={}',
]
code_normalized = code_text.replace(' ', '').replace('\n', '')
for pattern in empty_patterns:
pattern_normalized = pattern.replace(' ', '')
if pattern_normalized in code_normalized:
if file_path and verbose:
repo_name = _extract_repo_name(file_path)
print(f"Info: UI-only extension (empty NODE_CLASS_MAPPINGS): {repo_name}")
return
def has_comfy_node_base(class_node):
"""Check if class inherits from io.ComfyNode or ComfyNode"""
@ -229,6 +785,25 @@ def extract_v3_nodes(code_text):
# scan
def extract_metadata_only(filename):
"""Extract only metadata (@author, @title, etc) without node scanning"""
try:
with open(filename, encoding='utf-8', errors='ignore') as file:
code = file.read()
metadata = {}
lines = code.strip().split('\n')
for line in lines:
if line.startswith('@'):
if line.startswith("@author:") or line.startswith("@title:") or line.startswith("@nickname:") or line.startswith("@description:"):
key, value = line[1:].strip().split(':', 1)
metadata[key.strip()] = value.strip()
return metadata
except:
return {}
def scan_in_file(filename, is_builtin=False):
global builtin_nodes
@ -242,8 +817,8 @@ def scan_in_file(filename, is_builtin=False):
nodes = set()
class_dict = {}
# V1 nodes detection
nodes |= extract_nodes(code)
# V1 nodes detection (enhanced with fallback patterns)
nodes |= extract_nodes_enhanced(code, file_path=Path(filename), visited=set())
# V3 nodes detection
nodes |= extract_v3_nodes(code)
@ -620,13 +1195,14 @@ def update_custom_nodes(scan_only_mode=False, url_list_file=None):
return node_info
def gen_json(node_info, scan_only_mode=False):
def gen_json(node_info, scan_only_mode=False, force_rescan=False):
"""
Generate extension-node-map.json from scanned node information
Args:
node_info (dict): Repository metadata mapping
scan_only_mode (bool): If True, exclude metadata from output
force_rescan (bool): If True, ignore cache and force rescan all nodes
"""
# scan from .py file
node_files, node_dirs = get_nodes(temp_dir)
@ -642,12 +1218,16 @@ def gen_json(node_info, scan_only_mode=False):
py_files = get_py_file_paths(dirname)
metadata = {}
nodes = set()
for py in py_files:
nodes_in_file, metadata_in_file = scan_in_file(py, dirname == "ComfyUI")
nodes.update(nodes_in_file)
# Include metadata from .py files in both modes
metadata.update(metadata_in_file)
# Use per-repo cache for node AND metadata extraction
try:
nodes, metadata = extract_nodes_from_repo(Path(dirname), verbose=False, force_rescan=force_rescan)
except:
# Fallback to file-by-file scanning if extract_nodes_from_repo fails
nodes = set()
for py in py_files:
nodes_in_file, metadata_in_file = scan_in_file(py, dirname == "ComfyUI")
nodes.update(nodes_in_file)
metadata.update(metadata_in_file)
dirname = os.path.basename(dirname)
@ -810,7 +1390,10 @@ if __name__ == "__main__":
print("\n# Generating 'extension-node-map.json'...\n")
# Generate extension-node-map.json
gen_json(updated_node_info, scan_only_mode)
force_rescan = args.force_rescan if hasattr(args, 'force_rescan') else False
if force_rescan:
print("⚠️ Force rescan enabled - ignoring all cached results\n")
gen_json(updated_node_info, scan_only_mode, force_rescan)
print("\n✅ DONE.\n")