mirror of
https://github.com/Comfy-Org/ComfyUI-Manager.git
synced 2025-12-15 01:27:05 +08:00
Compare commits
24 Commits
032332368a
...
b1e1d4f936
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b1e1d4f936 | ||
|
|
d8f111a5e3 | ||
|
|
ae5565ce68 | ||
|
|
e4c370a7d9 | ||
|
|
891005bcd3 | ||
|
|
d3a4a7a0fa | ||
|
|
10211d1a93 | ||
|
|
7f019a932b | ||
|
|
fae909de2f | ||
|
|
d8455ef6e5 | ||
|
|
934c994783 | ||
|
|
d0961d596d | ||
|
|
382df24764 | ||
|
|
bfcfa42125 | ||
|
|
2333886c34 | ||
|
|
0cdad3c886 | ||
|
|
eee23c543b | ||
|
|
f0a8812f5e | ||
|
|
a8d603f753 | ||
|
|
22acaa1d2c | ||
|
|
fe791ccee9 | ||
|
|
414557eee0 | ||
|
|
97d2741360 | ||
|
|
b95e5f1eae |
@ -14868,6 +14868,16 @@
|
||||
"install_type": "git-clone",
|
||||
"description": "A collection of utility nodes for lora operations in ComfyUI."
|
||||
},
|
||||
{
|
||||
"author": "lrzjason",
|
||||
"title": "Comfyui-LatentUtils",
|
||||
"reference": "https://github.com/lrzjason/Comfyui-LatentUtils",
|
||||
"files": [
|
||||
"https://github.com/lrzjason/Comfyui-LatentUtils"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Custom ComfyUI node performing selective latent denoising and detail enhancement using Fourier Transform (FFT) to separate and enhance image frequencies while suppressing noise. (Description by CC)"
|
||||
},
|
||||
{
|
||||
"author": "cozy_comm",
|
||||
"title": "Cozy Communication",
|
||||
@ -19172,16 +19182,6 @@
|
||||
"install_type": "git-clone",
|
||||
"description": "A custom ComfyUI node using Together AI's Vision models for free image descriptions, image generation, and image-to-image transformation. Features include customizable prompts, advanced parameters, and robust error handling."
|
||||
},
|
||||
{
|
||||
"author": "jeffrey2212",
|
||||
"title": "Pony Character Prompt Picker for ComfyUI",
|
||||
"reference": "https://github.com/jeffrey2212/ComfyUI-PonyCharacterPrompt",
|
||||
"files": [
|
||||
"https://github.com/jeffrey2212/ComfyUI-PonyCharacterPrompt"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "The Pony Character Prompt Picker node reads an Excel file specified by the user, allows manual selection of a tab, and randomly picks a cell value from a specified column, starting from row 3 to the end. The selected value is output as a string to the next node in the ComfyUI workflow."
|
||||
},
|
||||
{
|
||||
"author": "theshubzworld",
|
||||
"title": "ComfyUI-FaceCalloutNode",
|
||||
@ -19192,6 +19192,36 @@
|
||||
"install_type": "git-clone",
|
||||
"description": "A collection of custom nodes for ComfyUI that provide advanced face callout, annotation, and compositing effects using OpenCV and PIL. These nodes are designed for image processing workflows that require face detection, annotation, and creative compositing."
|
||||
},
|
||||
{
|
||||
"author": "theshubzworld",
|
||||
"title": "ComfyUI-NvidiaCaptioner",
|
||||
"reference": "https://github.com/theshubzworld/ComfyUI-NvidiaCaptioner",
|
||||
"files": [
|
||||
"https://github.com/theshubzworld/ComfyUI-NvidiaCaptioner"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "A ComfyUI node for generating rich, detailed captions for images using NVIDIA's vision models. Supports batch processing, multiple captioning styles, and includes built-in caching for efficient workflows."
|
||||
},
|
||||
{
|
||||
"author": "theshubzworld",
|
||||
"title": "ComfyUI-Universal-Latent",
|
||||
"reference": "https://github.com/theshubzworld/ComfyUI-Universal-Latent",
|
||||
"files": [
|
||||
"https://github.com/theshubzworld/ComfyUI-Universal-Latent"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Enhanced empty latent node with extended aspect ratio support for ComfyUI"
|
||||
},
|
||||
{
|
||||
"author": "jeffrey2212",
|
||||
"title": "Pony Character Prompt Picker for ComfyUI",
|
||||
"reference": "https://github.com/jeffrey2212/ComfyUI-PonyCharacterPrompt",
|
||||
"files": [
|
||||
"https://github.com/jeffrey2212/ComfyUI-PonyCharacterPrompt"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "The Pony Character Prompt Picker node reads an Excel file specified by the user, allows manual selection of a tab, and randomly picks a cell value from a specified column, starting from row 3 to the end. The selected value is output as a string to the next node in the ComfyUI workflow."
|
||||
},
|
||||
{
|
||||
"author": "Jonseed",
|
||||
"title": "ComfyUI-Detail-Daemon",
|
||||
@ -20190,10 +20220,9 @@
|
||||
{
|
||||
"author": "Black-Lioness",
|
||||
"title": "ComfyUI-PromptUtils",
|
||||
"reference": "https://github.com/Black-Lioness/ComfyUI-PromptUtils",
|
||||
"reference2": "https://github.com/RunningOverGlowies/ComfyUI-PromptUtils",
|
||||
"reference": "https://github.com/RunningOverGlowies/ComfyUI-PromptUtils",
|
||||
"files": [
|
||||
"https://github.com/Black-Lioness/ComfyUI-PromptUtils"
|
||||
"https://github.com/RunningOverGlowies/ComfyUI-PromptUtils"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "A set of ComfyUI nodes designed to enhance your workflow with realistic filename generation and keyword generation."
|
||||
@ -22734,6 +22763,16 @@
|
||||
"install_type": "git-clone",
|
||||
"description": "Calculates the percentage of a mask area compared to the total image size and outputs a boolean based on a defined threshold."
|
||||
},
|
||||
{
|
||||
"author": "a-und-b",
|
||||
"title": "ComfyUI_AB_Wildcard",
|
||||
"reference": "https://github.com/a-und-b/ComfyUI_AB_Wildcard",
|
||||
"files": [
|
||||
"https://github.com/a-und-b/ComfyUI_AB_Wildcard"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Simple node for advanced wildcard text processing. Supports variables, conditionals, tag aggregation, weighted selection, and deep nesting. Lightweight, fast, zero external dependencies."
|
||||
},
|
||||
{
|
||||
"author": "r3dial",
|
||||
"title": "Redial Discomphy - Discord Integration for ComfyUI",
|
||||
@ -25682,16 +25721,6 @@
|
||||
"install_type": "git-clone",
|
||||
"description": "Comfyui-raw-image provides the ability to load raw image files for ComfyUI"
|
||||
},
|
||||
{
|
||||
"author": "DiffusionWave",
|
||||
"title": "PickResolution_DiffusionWave",
|
||||
"reference": "https://github.com/DiffusionWave/PickResolution_DiffusionWave",
|
||||
"files": [
|
||||
"https://github.com/DiffusionWave/PickResolution_DiffusionWave"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "A custom node for ComfyUI that allows selecting a base resolution, applying a custom scaling value based on FLOAT (up to 10 decimal places), and adding an extra integer value. Outputs include both INT and FLOAT resolutions, making it perfect for you to play around with."
|
||||
},
|
||||
{
|
||||
"author": "Zar4X",
|
||||
"title": "ComfyUI-Batch-Process",
|
||||
@ -29356,16 +29385,6 @@
|
||||
"install_type": "git-clone",
|
||||
"description": "Load images with automatic prompt extraction from Civitai URLs, caption files, or EXIF metadata. Features smart dataset detection and dynamic preview updates."
|
||||
},
|
||||
{
|
||||
"author": "LargeModGames",
|
||||
"title": "ComfyUI LoRA Auto Downloader",
|
||||
"reference": "https://github.com/LargeModGames/comfyui-smart-lora-downloader",
|
||||
"files": [
|
||||
"https://github.com/LargeModGames/comfyui-smart-lora-downloader"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Automatically download missing LoRAs from CivitAI and detect missing LoRAs in workflows. Features smart directory detection and easy installation."
|
||||
},
|
||||
{
|
||||
"author": "benjamin-bertram",
|
||||
"title": "ComfyUI OIDN Denoiser",
|
||||
@ -29583,6 +29602,17 @@
|
||||
"install_type": "git-clone",
|
||||
"description": "Stylized RPG character prompt generator for ComfyUI. Supports standard and Ollama-based prompts, works with SD, SDXL, Flux, and more."
|
||||
},
|
||||
{
|
||||
"author": "Lord Lethris",
|
||||
"title": "Dia2 TTS & Captions Generators for ComfyUI",
|
||||
"id": "dia2_tts_captions",
|
||||
"reference": "https://github.com/lord-lethris/ComfyUI-lethris-dia2",
|
||||
"files": [
|
||||
"https://github.com/lord-lethris/ComfyUI-lethris-dia2"
|
||||
],
|
||||
"install_type": "Git-Clone",
|
||||
"description": "This package provides two ComfyUI nodes: 🗣️ Dia2 TTS Generator for text-to-speech using Dia2-2B, and 💬 Dia2 Captions Generator to convert TTS timestamps into SRT/SSA/VTT subtitles. Includes example workflow and voice samples. GPU users require CUDA 12.8+."
|
||||
},
|
||||
{
|
||||
"author": "ialhabbal",
|
||||
"title": "OcclusionMask",
|
||||
@ -30507,6 +30537,16 @@
|
||||
"install_type": "git-clone",
|
||||
"description": "An advanced image stitching node for ComfyUI."
|
||||
},
|
||||
{
|
||||
"author": "RamonGuthrie",
|
||||
"title": "ComfyUI-RBG-SmartSeedVariance",
|
||||
"reference": "https://github.com/RamonGuthrie/ComfyUI-RBG-SmartSeedVariance",
|
||||
"files": [
|
||||
"https://github.com/RamonGuthrie/ComfyUI-RBG-SmartSeedVariance"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Advanced seed diversity enhancement for ComfyUI with intelligent noise injection and directional biasing."
|
||||
},
|
||||
{
|
||||
"author": "vrgamegirl19",
|
||||
"title": "VRGameDevGirl Video Enhancement Nodes",
|
||||
@ -34069,16 +34109,6 @@
|
||||
"install_type": "git-clone",
|
||||
"description": "This is a custom node for ComfyUI that provides a dynamic 'Switch' for routing purposes. It allows you to define a list of named labels and select one, outputting the corresponding index and label name. This is useful for controlling the flow of your workflow based on a selection."
|
||||
},
|
||||
{
|
||||
"author": "mcrataobrabo",
|
||||
"title": "comfyui-smart-lora-downloader - Automatically Fetch Missing LoRAs",
|
||||
"reference": "https://github.com/mcrataobrabo/comfyui-smart-lora-downloader",
|
||||
"files": [
|
||||
"https://github.com/mcrataobrabo/comfyui-smart-lora-downloader"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Automatically detect and download missing LoRAs for ComfyUI workflows"
|
||||
},
|
||||
{
|
||||
"author": "3dgopnik",
|
||||
"title": "ComfyUI Arena Suite",
|
||||
@ -37638,16 +37668,6 @@
|
||||
"install_type": "git-clone",
|
||||
"description": "ComfyUI nodes for loading images and drawing polygon masks interactively on them"
|
||||
},
|
||||
{
|
||||
"author": "cdanielp",
|
||||
"title": "COMFYUI_PROMPTMODELS",
|
||||
"reference": "https://github.com/cdanielp/COMFYUI_PROMPTMODELS",
|
||||
"files": [
|
||||
"https://github.com/cdanielp/COMFYUI_PROMPTMODELS"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Custom nodes for ComfyUI by PROMPTMODELS."
|
||||
},
|
||||
{
|
||||
"author": "supElement",
|
||||
"title": "ComfyUI_Element_easy",
|
||||
@ -37776,6 +37796,7 @@
|
||||
"https://github.com/fredlef/Comfyui_FSL_Nodes"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Custom nodes: FSLGeminiChat, FSLGeminiGenerateImage, Transparent Background helpers, and more." ,
|
||||
"tags": ["image", "chat", "gemini", "fsl"]
|
||||
},
|
||||
{
|
||||
@ -37807,6 +37828,9 @@
|
||||
"title": "Img Label Tools",
|
||||
"id": "Img-Label-Tools",
|
||||
"reference": "https://github.com/rjgoif/ComfyUI-Img-Label-Tools",
|
||||
"files": [
|
||||
"https://github.com/rjgoif/ComfyUI-Img-Label-Tools"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Tools to help annotate images for sharing on Reddit, Discord, etc."
|
||||
},
|
||||
@ -37861,6 +37885,17 @@
|
||||
"install_type": "git-clone",
|
||||
"description": "Professional audio processing and mastering suite for ComfyUI."
|
||||
},
|
||||
{
|
||||
"author": "jeankassio",
|
||||
"title": "JK AceStep Nodes",
|
||||
"id": "JK-AceStep-Nodes",
|
||||
"reference": "https://github.com/jeankassio/JK-AceStep-Nodes",
|
||||
"files": [
|
||||
"https://github.com/jeankassio/JK-AceStep-Nodes"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Advanced nodes optimized for ACE-Step audio generation in ComfyUI."
|
||||
},
|
||||
{
|
||||
"author": "ameyukisora",
|
||||
"title": "ComfyUI Empty Latent Advanced",
|
||||
@ -38190,16 +38225,6 @@
|
||||
"install_type": "git-clone",
|
||||
"description": "A suite of powerful and versatile utility nodes for ComfyUI, designed to streamline complex workflows involving Large Language Models and text manipulation."
|
||||
},
|
||||
{
|
||||
"author": "KANAsho34636",
|
||||
"title": "ComfyUI-NaturalSort-ImageLoader",
|
||||
"reference": "https://github.com/KANAsho34636/ComfyUI-NaturalSort-ImageLoader",
|
||||
"files": [
|
||||
"https://github.com/KANAsho34636/ComfyUI-NaturalSort-ImageLoader"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Custom image loader node supporting natural number sorting with multiple sort modes (natural, lexicographic, modification time, creation time, reverse natural). (Description by CC)"
|
||||
},
|
||||
{
|
||||
"author": "systemaiofinterest-wq",
|
||||
"title": "ComfyUI-MetaAI",
|
||||
@ -38220,16 +38245,6 @@
|
||||
"install_type": "git-clone",
|
||||
"description": "A ComfyUI model loader that uses the fastsafetensors library to perform very fast, zero-copy loading from storage to VRAM."
|
||||
},
|
||||
{
|
||||
"author": "johninthewinter",
|
||||
"title": "comfyui-fal-flux-2-John",
|
||||
"reference": "https://github.com/johninthewinter/comfyui-fal-flux-2-John",
|
||||
"files": [
|
||||
"https://github.com/johninthewinter/comfyui-fal-flux-2-John"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Custom nodes for ComfyUI that integrate with fal.ai's FLUX 2 and FLUX 1 LoRA APIs for text-to-image generation."
|
||||
},
|
||||
{
|
||||
"author": "Merserk",
|
||||
"title": "ComfyUI-Flow-Assistor",
|
||||
@ -38280,7 +38295,118 @@
|
||||
"install_type": "git-clone",
|
||||
"description": "Custom ComfyUI node for generating consistent character images using Ideogram API v3's character reference feature. (Description by CC)"
|
||||
},
|
||||
|
||||
{
|
||||
"author": "Moeblack",
|
||||
"title": "ComfyUI-SimpleChat",
|
||||
"reference": "https://github.com/Moeblack/ComfyUI-SimpleChat",
|
||||
"files": [
|
||||
"https://github.com/Moeblack/ComfyUI-SimpleChat"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Simple, no-nonsense LLM chat nodes for ComfyUI. Support OpenAI, Claude, Gemini and NoASS Roleplay.",
|
||||
"tags": [
|
||||
"LLM",
|
||||
"chat",
|
||||
"openai",
|
||||
"claude",
|
||||
"gemini",
|
||||
"roleplay",
|
||||
"noass"
|
||||
]
|
||||
},
|
||||
{
|
||||
"author": "Braeden90000",
|
||||
"title": "ComfyUI Load Image URL",
|
||||
"id": "load-image-url",
|
||||
"reference": "https://github.com/Braeden90000/comfyui-load-image-url",
|
||||
"files": [
|
||||
"https://github.com/Braeden90000/comfyui-load-image-url"
|
||||
],
|
||||
"pip": ["requests"],
|
||||
"install_type": "git-clone",
|
||||
"description": "Load images from files or URLs with live preview and source switching."
|
||||
},
|
||||
{
|
||||
"author": "lovelybbq",
|
||||
"title": "ComfyUI Custom Node Color",
|
||||
"reference": "https://github.com/lovelybbq/comfyui-custom-node-color",
|
||||
"files": [
|
||||
"https://github.com/lovelybbq/comfyui-custom-node-color"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "A modern GUI-based color picker for ComfyUI nodes. Features visual spectrum, HEX/RGB inputs, eyedropper tool, and favorite colors support."
|
||||
},
|
||||
{
|
||||
"author": "huihuihuiz",
|
||||
"title": "LoRA Downloader for ComfyUI",
|
||||
"id": "lora_downloader",
|
||||
"reference": "https://github.com/huihuihuiz/lora_downloader",
|
||||
"files": [
|
||||
"https://github.com/huihuihuiz/lora_downloader"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "A ComfyUI custom node for downloading and managing LoRA models directly within the UI."
|
||||
},
|
||||
{
|
||||
"author": "aTanguay",
|
||||
"title": "ComfyUI_Detonate",
|
||||
"reference": "https://github.com/aTanguay/ComfyUI_Detonate",
|
||||
"files": [
|
||||
"https://github.com/aTanguay/ComfyUI_Detonate"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Professional compositing nodes for ComfyUI - bringing Nuke and Fusion workflows to AI-powered image generation"
|
||||
},
|
||||
{
|
||||
"author": "akaugun",
|
||||
"title": "comfyui-lora-hook-trigger",
|
||||
"reference": "https://github.com/akaugun/comfyui-lora-hook-trigger",
|
||||
"files": [
|
||||
"https://github.com/akaugun/comfyui-lora-hook-trigger"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "A clean and simple ComfyUI custom node that creates a LoRA Hook and automatically finds and loads trigger TXT files placed in a folder named after the LoRA file."
|
||||
},
|
||||
{
|
||||
"author": "tumbowungus",
|
||||
"title": "MultiMaskCouple",
|
||||
"reference": "https://github.com/tumbowungus/MultiMaskCouple",
|
||||
"files": [
|
||||
"https://github.com/tumbowungus/MultiMaskCouple"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "A custom node for ComfyUI which simplifies the process of masking multiple prompts, making it easier to manage scenes with multiple distinct characters."
|
||||
},
|
||||
{
|
||||
"author": "fudosanit",
|
||||
"title": "ComfyUI-Random-Resolution",
|
||||
"reference": "https://github.com/fudosanit/ComfyUI-Random-Resolution",
|
||||
"files": [
|
||||
"https://github.com/fudosanit/ComfyUI-Random-Resolution"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "A custom node for ComfyUI. Based on the specified resolution, it randomly selects and outputs one of three patterns: original orientation, swapped width/height, or a square format. (Description by CC)"
|
||||
},
|
||||
{
|
||||
"author": "SuLU-K",
|
||||
"title": "comfyui-easy-sam3-tools",
|
||||
"reference": "https://github.com/SuLU-K/comfyui-easy-sam3-tools",
|
||||
"files": [
|
||||
"https://github.com/SuLU-K/comfyui-easy-sam3-tools"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Custom nodes for building SAM3-centric editing pipelines inside ComfyUI, featuring mask merging, bounding box rendering, and DOM-based interactive editing. (Description by CC)"
|
||||
},
|
||||
{
|
||||
"author": "EMkrtchyan",
|
||||
"title": "ComfyUI-NormalsToDepth",
|
||||
"reference": "https://github.com/EMkrtchyan/ComfyUI-NormalsToDepth",
|
||||
"files": [
|
||||
"https://github.com/EMkrtchyan/ComfyUI-NormalsToDepth"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Converts normal maps to depth maps for use in ComfyUI. (Description by CC)"
|
||||
},
|
||||
|
||||
|
||||
|
||||
@ -38708,6 +38834,6 @@
|
||||
],
|
||||
"install_type": "unzip",
|
||||
"description": "This is a node to convert an image into a CMYK Halftone dot image."
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -2,6 +2,7 @@ import subprocess
|
||||
import sys
|
||||
import os
|
||||
import traceback
|
||||
import time
|
||||
|
||||
import git
|
||||
import json
|
||||
@ -219,7 +220,14 @@ def gitpull(path):
|
||||
repo.close()
|
||||
return
|
||||
|
||||
remote.pull()
|
||||
try:
|
||||
repo.git.pull('--ff-only')
|
||||
except git.GitCommandError:
|
||||
backup_name = f'backup_{time.strftime("%Y%m%d_%H%M%S")}'
|
||||
repo.create_head(backup_name)
|
||||
print(f"[ComfyUI-Manager] Cannot fast-forward. Backup created: {backup_name}")
|
||||
repo.git.reset('--hard', f'{remote_name}/{branch_name}')
|
||||
print(f"[ComfyUI-Manager] Reset to {remote_name}/{branch_name}")
|
||||
|
||||
repo.git.submodule('update', '--init', '--recursive')
|
||||
new_commit_hash = repo.head.commit.hexsha
|
||||
|
||||
22184
github-stats-cache.json
Normal file
22184
github-stats-cache.json
Normal file
File diff suppressed because it is too large
Load Diff
8706
github-stats.json
8706
github-stats.json
File diff suppressed because it is too large
Load Diff
@ -44,7 +44,7 @@ import manager_migration
|
||||
from node_package import InstalledNodePackage
|
||||
|
||||
|
||||
version_code = [3, 38, 1]
|
||||
version_code = [3, 38, 3]
|
||||
version_str = f"V{version_code[0]}.{version_code[1]}" + (f'.{version_code[2]}' if len(version_code) > 2 else '')
|
||||
|
||||
|
||||
@ -2253,9 +2253,17 @@ def git_pull(path):
|
||||
|
||||
current_branch = repo.active_branch
|
||||
remote_name = current_branch.tracking_branch().remote_name
|
||||
remote = repo.remote(name=remote_name)
|
||||
|
||||
remote.pull()
|
||||
try:
|
||||
repo.git.pull('--ff-only')
|
||||
except git.GitCommandError:
|
||||
branch_name = current_branch.name
|
||||
backup_name = f'backup_{time.strftime("%Y%m%d_%H%M%S")}'
|
||||
repo.create_head(backup_name)
|
||||
logging.info(f"[ComfyUI-Manager] Cannot fast-forward. Backup created: {backup_name}")
|
||||
repo.git.reset('--hard', f'{remote_name}/{branch_name}')
|
||||
logging.info(f"[ComfyUI-Manager] Reset to {remote_name}/{branch_name}")
|
||||
|
||||
repo.git.submodule('update', '--init', '--recursive')
|
||||
|
||||
repo.close()
|
||||
@ -2523,22 +2531,22 @@ def update_to_stable_comfyui(repo_path):
|
||||
logging.error('\t'+branch.name)
|
||||
return "fail", None
|
||||
|
||||
versions, current_tag, _ = get_comfyui_versions(repo)
|
||||
|
||||
if len(versions) == 0 or (len(versions) == 1 and versions[0] == 'nightly'):
|
||||
versions, current_tag, latest_tag = get_comfyui_versions(repo)
|
||||
|
||||
if latest_tag is None:
|
||||
logging.info("[ComfyUI-Manager] Unable to update to the stable ComfyUI version.")
|
||||
return "fail", None
|
||||
|
||||
if versions[0] == 'nightly':
|
||||
latest_tag = versions[1]
|
||||
else:
|
||||
latest_tag = versions[0]
|
||||
|
||||
if current_tag == latest_tag:
|
||||
tag_ref = next((t for t in repo.tags if t.name == latest_tag), None)
|
||||
if tag_ref is None:
|
||||
logging.info(f"[ComfyUI-Manager] Unable to locate tag '{latest_tag}' in repository.")
|
||||
return "fail", None
|
||||
|
||||
if repo.head.commit == tag_ref.commit:
|
||||
return "skip", None
|
||||
else:
|
||||
logging.info(f"[ComfyUI-Manager] Updating ComfyUI: {current_tag} -> {latest_tag}")
|
||||
repo.git.checkout(latest_tag)
|
||||
repo.git.checkout(tag_ref.name)
|
||||
execute_install_script("ComfyUI", repo_path, instant_execution=False, no_deps=False)
|
||||
return 'updated', latest_tag
|
||||
except:
|
||||
@ -3362,36 +3370,80 @@ async def restore_snapshot(snapshot_path, git_helper_extras=None):
|
||||
|
||||
|
||||
def get_comfyui_versions(repo=None):
|
||||
if repo is None:
|
||||
repo = git.Repo(comfy_path)
|
||||
repo = repo or git.Repo(comfy_path)
|
||||
|
||||
remote_name = None
|
||||
try:
|
||||
remote = get_remote_name(repo)
|
||||
repo.remotes[remote].fetch()
|
||||
remote_name = get_remote_name(repo)
|
||||
repo.remotes[remote_name].fetch()
|
||||
except:
|
||||
logging.error("[ComfyUI-Manager] Failed to fetch ComfyUI")
|
||||
|
||||
versions = [x.name for x in repo.tags if x.name.startswith('v')]
|
||||
def parse_semver(tag_name):
|
||||
match = re.match(r'^v(\d+)\.(\d+)\.(\d+)$', tag_name)
|
||||
return tuple(int(x) for x in match.groups()) if match else None
|
||||
|
||||
# nearest tag
|
||||
versions = sorted(versions, key=lambda v: repo.git.log('-1', '--format=%ct', v), reverse=True)
|
||||
versions = versions[:4]
|
||||
def normalize_describe(tag_name):
|
||||
if not tag_name:
|
||||
return None
|
||||
base = tag_name.split('-', 1)[0]
|
||||
return base if parse_semver(base) else None
|
||||
|
||||
current_tag = repo.git.describe('--tags')
|
||||
# Collect semver tags and sort descending (highest first)
|
||||
semver_tags = []
|
||||
for tag in repo.tags:
|
||||
semver = parse_semver(tag.name)
|
||||
if semver:
|
||||
semver_tags.append((semver, tag.name))
|
||||
semver_tags.sort(key=lambda x: x[0], reverse=True)
|
||||
semver_tags = [name for _, name in semver_tags]
|
||||
|
||||
if current_tag not in versions:
|
||||
versions = sorted(versions + [current_tag], key=lambda v: repo.git.log('-1', '--format=%ct', v), reverse=True)
|
||||
versions = versions[:4]
|
||||
latest_tag = semver_tags[0] if semver_tags else None
|
||||
|
||||
main_branch = repo.heads.master
|
||||
latest_commit = main_branch.commit
|
||||
latest_tag = repo.git.describe('--tags', latest_commit.hexsha)
|
||||
try:
|
||||
described = repo.git.describe('--tags')
|
||||
except Exception:
|
||||
described = ''
|
||||
|
||||
if latest_tag != versions[0]:
|
||||
versions.insert(0, 'nightly')
|
||||
else:
|
||||
versions[0] = 'nightly'
|
||||
try:
|
||||
exact_tag = repo.git.describe('--tags', '--exact-match')
|
||||
except Exception:
|
||||
exact_tag = ''
|
||||
|
||||
head_is_default = False
|
||||
if remote_name:
|
||||
try:
|
||||
default_head_ref = repo.refs[f'{remote_name}/HEAD']
|
||||
default_commit = default_head_ref.reference.commit
|
||||
head_is_default = repo.head.commit == default_commit
|
||||
except Exception:
|
||||
head_is_default = False
|
||||
|
||||
nearest_semver = normalize_describe(described)
|
||||
exact_semver = exact_tag if parse_semver(exact_tag) else None
|
||||
|
||||
if head_is_default and not exact_tag:
|
||||
current_tag = 'nightly'
|
||||
else:
|
||||
current_tag = exact_tag or described or 'nightly'
|
||||
|
||||
# Prepare semver list for display: top 4 plus the current/nearest semver if missing
|
||||
display_semver_tags = semver_tags[:4]
|
||||
if exact_semver and exact_semver not in display_semver_tags:
|
||||
display_semver_tags.append(exact_semver)
|
||||
elif nearest_semver and nearest_semver not in display_semver_tags:
|
||||
display_semver_tags.append(nearest_semver)
|
||||
|
||||
versions = ['nightly']
|
||||
|
||||
if current_tag and not exact_semver and current_tag not in versions and current_tag not in display_semver_tags:
|
||||
versions.append(current_tag)
|
||||
|
||||
for tag in display_semver_tags:
|
||||
if tag not in versions:
|
||||
versions.append(tag)
|
||||
|
||||
versions = versions[:6]
|
||||
|
||||
return versions, current_tag, latest_tag
|
||||
|
||||
|
||||
273
json-checker.py
273
json-checker.py
@ -1,25 +1,264 @@
|
||||
import json
|
||||
import argparse
|
||||
#!/usr/bin/env python3
|
||||
"""JSON Entry Validator
|
||||
|
||||
def check_json_syntax(file_path):
|
||||
Validates JSON entries based on content structure.
|
||||
|
||||
Validation rules based on JSON content:
|
||||
- {"custom_nodes": [...]}: Validates required fields (author, title, reference, files, install_type, description)
|
||||
- {"models": [...]}: Validates JSON syntax only (no required fields)
|
||||
- Other JSON structures: Validates JSON syntax only
|
||||
|
||||
Git repository URL validation (for custom_nodes):
|
||||
1. URLs must NOT end with .git
|
||||
2. URLs must follow format: https://github.com/{author}/{reponame}
|
||||
3. .py and .js files are exempt from this check
|
||||
|
||||
Supported formats:
|
||||
- Array format: [{...}, {...}]
|
||||
- Object format: {"custom_nodes": [...]} or {"models": [...]}
|
||||
"""
|
||||
|
||||
import json
|
||||
import re
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Tuple
|
||||
|
||||
|
||||
# Required fields for each entry type
|
||||
REQUIRED_FIELDS_CUSTOM_NODE = ['author', 'title', 'reference', 'files', 'install_type', 'description']
|
||||
REQUIRED_FIELDS_MODEL = [] # model-list.json doesn't require field validation
|
||||
|
||||
# Pattern for valid GitHub repository URL (without .git suffix)
|
||||
GITHUB_REPO_PATTERN = re.compile(r'^https://github\.com/[^/]+/[^/]+$')
|
||||
|
||||
|
||||
def get_entry_context(entry: Dict) -> str:
|
||||
"""Get identifying information from entry for error messages
|
||||
|
||||
Args:
|
||||
entry: JSON entry
|
||||
|
||||
Returns:
|
||||
String with author and reference info
|
||||
"""
|
||||
parts = []
|
||||
if 'author' in entry:
|
||||
parts.append(f"author={entry['author']}")
|
||||
if 'reference' in entry:
|
||||
parts.append(f"ref={entry['reference']}")
|
||||
if 'title' in entry:
|
||||
parts.append(f"title={entry['title']}")
|
||||
|
||||
if parts:
|
||||
return " | ".join(parts)
|
||||
else:
|
||||
# No identifying info - show actual entry content (truncated)
|
||||
import json
|
||||
entry_str = json.dumps(entry, ensure_ascii=False)
|
||||
if len(entry_str) > 100:
|
||||
entry_str = entry_str[:100] + "..."
|
||||
return f"content={entry_str}"
|
||||
|
||||
|
||||
def validate_required_fields(entry: Dict, entry_index: int, required_fields: List[str]) -> List[str]:
|
||||
"""Validate that all required fields are present
|
||||
|
||||
Args:
|
||||
entry: JSON entry to validate
|
||||
entry_index: Index of entry in array (for error reporting)
|
||||
required_fields: List of required field names
|
||||
|
||||
Returns:
|
||||
List of error descriptions (without entry prefix/context)
|
||||
"""
|
||||
errors = []
|
||||
|
||||
for field in required_fields:
|
||||
if field not in entry:
|
||||
errors.append(f"Missing required field '{field}'")
|
||||
elif entry[field] is None:
|
||||
errors.append(f"Field '{field}' is null")
|
||||
elif isinstance(entry[field], str) and not entry[field].strip():
|
||||
errors.append(f"Field '{field}' is empty")
|
||||
elif field == 'files' and not entry[field]: # Empty array
|
||||
errors.append("Field 'files' is empty array")
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
def validate_git_repo_urls(entry: Dict, entry_index: int) -> List[str]:
|
||||
"""Validate git repository URLs in 'files' array
|
||||
|
||||
Requirements:
|
||||
- Git repo URLs must NOT end with .git
|
||||
- Must follow format: https://github.com/{author}/{reponame}
|
||||
- .py and .js files are exempt
|
||||
|
||||
Args:
|
||||
entry: JSON entry to validate
|
||||
entry_index: Index of entry in array (for error reporting)
|
||||
|
||||
Returns:
|
||||
List of error descriptions (without entry prefix/context)
|
||||
"""
|
||||
errors = []
|
||||
|
||||
if 'files' not in entry or not isinstance(entry['files'], list):
|
||||
return errors
|
||||
|
||||
for file_url in entry['files']:
|
||||
if not isinstance(file_url, str):
|
||||
continue
|
||||
|
||||
# Skip .py and .js files - they're exempt from git repo validation
|
||||
if file_url.endswith('.py') or file_url.endswith('.js'):
|
||||
continue
|
||||
|
||||
# Check if it's a GitHub URL (likely a git repo)
|
||||
if 'github.com' in file_url:
|
||||
# Error if URL ends with .git
|
||||
if file_url.endswith('.git'):
|
||||
errors.append(f"Git repo URL must NOT end with .git: {file_url}")
|
||||
continue
|
||||
|
||||
# Validate format: https://github.com/{author}/{reponame}
|
||||
if not GITHUB_REPO_PATTERN.match(file_url):
|
||||
errors.append(f"Invalid git repo URL format (expected https://github.com/author/reponame): {file_url}")
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
def validate_entry(entry: Dict, entry_index: int, required_fields: List[str]) -> List[str]:
|
||||
"""Validate a single JSON entry
|
||||
|
||||
Args:
|
||||
entry: JSON entry to validate
|
||||
entry_index: Index of entry in array (for error reporting)
|
||||
required_fields: List of required field names
|
||||
|
||||
Returns:
|
||||
List of error messages (empty if valid)
|
||||
"""
|
||||
errors = []
|
||||
|
||||
# Check required fields
|
||||
errors.extend(validate_required_fields(entry, entry_index, required_fields))
|
||||
|
||||
# Check git repository URLs
|
||||
errors.extend(validate_git_repo_urls(entry, entry_index))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
def validate_json_file(file_path: str) -> Tuple[bool, List[str]]:
|
||||
"""Validate JSON file containing entries
|
||||
|
||||
Args:
|
||||
file_path: Path to JSON file
|
||||
|
||||
Returns:
|
||||
Tuple of (is_valid, error_messages)
|
||||
"""
|
||||
errors = []
|
||||
|
||||
# Check file exists
|
||||
path = Path(file_path)
|
||||
if not path.exists():
|
||||
return False, [f"File not found: {file_path}"]
|
||||
|
||||
# Load JSON
|
||||
try:
|
||||
with open(file_path, 'r', encoding='utf-8') as file:
|
||||
json_str = file.read()
|
||||
json.loads(json_str)
|
||||
print(f"[ OK ] {file_path}")
|
||||
except UnicodeDecodeError as e:
|
||||
print(f"Unicode decode error: {e}")
|
||||
with open(path, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
except json.JSONDecodeError as e:
|
||||
print(f"[FAIL] {file_path}\n\n {e}\n")
|
||||
except FileNotFoundError:
|
||||
print(f"[FAIL] {file_path}\n\n File not found\n")
|
||||
return False, [f"Invalid JSON: {e}"]
|
||||
except Exception as e:
|
||||
return False, [f"Error reading file: {e}"]
|
||||
|
||||
# Determine required fields based on JSON content
|
||||
required_fields = []
|
||||
|
||||
# Validate structure - support both array and object formats
|
||||
entries_to_validate = []
|
||||
|
||||
if isinstance(data, list):
|
||||
# Direct array format: [{...}, {...}]
|
||||
entries_to_validate = data
|
||||
elif isinstance(data, dict):
|
||||
# Object format: {"custom_nodes": [...]} or {"models": [...]}
|
||||
# Determine validation based on keys
|
||||
if 'custom_nodes' in data and isinstance(data['custom_nodes'], list):
|
||||
required_fields = REQUIRED_FIELDS_CUSTOM_NODE
|
||||
entries_to_validate = data['custom_nodes']
|
||||
elif 'models' in data and isinstance(data['models'], list):
|
||||
required_fields = REQUIRED_FIELDS_MODEL
|
||||
entries_to_validate = data['models']
|
||||
else:
|
||||
# Other JSON structures (extension-node-map.json, etc.) - just validate JSON syntax
|
||||
return True, []
|
||||
else:
|
||||
return False, ["JSON root must be either an array or an object containing arrays"]
|
||||
|
||||
# Validate each entry
|
||||
for idx, entry in enumerate(entries_to_validate, start=1):
|
||||
if not isinstance(entry, dict):
|
||||
# Show actual value for type errors
|
||||
entry_str = json.dumps(entry, ensure_ascii=False) if not isinstance(entry, str) else repr(entry)
|
||||
if len(entry_str) > 150:
|
||||
entry_str = entry_str[:150] + "..."
|
||||
errors.append(f"\n❌ Entry #{idx}: Must be an object, got {type(entry).__name__}")
|
||||
errors.append(f" Actual value: {entry_str}")
|
||||
continue
|
||||
|
||||
entry_errors = validate_entry(entry, idx, required_fields)
|
||||
if entry_errors:
|
||||
# Group errors by entry with context
|
||||
context = get_entry_context(entry)
|
||||
errors.append(f"\n❌ Entry #{idx} ({context}):")
|
||||
for error in entry_errors:
|
||||
errors.append(f" - {error}")
|
||||
|
||||
is_valid = len(errors) == 0
|
||||
return is_valid, errors
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="JSON File Syntax Checker")
|
||||
parser.add_argument("file_path", type=str, help="Path to the JSON file for syntax checking")
|
||||
"""Main entry point"""
|
||||
if len(sys.argv) < 2:
|
||||
print("Usage: python json-checker.py <json-file>")
|
||||
print("\nValidates JSON entries based on content:")
|
||||
print(" - {\"custom_nodes\": [...]}: Validates required fields (author, title, reference, files, install_type, description)")
|
||||
print(" - {\"models\": [...]}: Validates JSON syntax only (no required fields)")
|
||||
print(" - Other JSON structures: Validates JSON syntax only")
|
||||
print("\nGit repo URL validation (for custom_nodes):")
|
||||
print(" - URLs must NOT end with .git")
|
||||
print(" - URLs must follow: https://github.com/{author}/{reponame}")
|
||||
sys.exit(1)
|
||||
|
||||
args = parser.parse_args()
|
||||
check_json_syntax(args.file_path)
|
||||
file_path = sys.argv[1]
|
||||
|
||||
if __name__ == "__main__":
|
||||
is_valid, errors = validate_json_file(file_path)
|
||||
|
||||
if is_valid:
|
||||
print(f"✅ {file_path}: Validation passed")
|
||||
sys.exit(0)
|
||||
else:
|
||||
print(f"Validating: {file_path}")
|
||||
print("=" * 60)
|
||||
print("❌ Validation failed!\n")
|
||||
print("Errors:")
|
||||
# Count actual errors (lines starting with " -")
|
||||
error_count = sum(1 for e in errors if e.strip().startswith('-'))
|
||||
for error in errors:
|
||||
# Don't add ❌ prefix to grouped entries (they already have it)
|
||||
if error.strip().startswith('❌'):
|
||||
print(error)
|
||||
else:
|
||||
print(error)
|
||||
print(f"\nTotal errors: {error_count}")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
@ -1,5 +1,25 @@
|
||||
{
|
||||
"custom_nodes": [
|
||||
{
|
||||
"author": "charlierz",
|
||||
"title": "comfyui-charlierz",
|
||||
"reference": "https://github.com/charlierz/comfyui-charlierz",
|
||||
"files": [
|
||||
"https://github.com/charlierz/comfyui-charlierz"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "NODES: BackgroundColor, ScaleDimensions"
|
||||
},
|
||||
{
|
||||
"author": "lrzjason",
|
||||
"title": "Comfyui-DiffusersUtils [WIP]",
|
||||
"reference": "https://github.com/lrzjason/Comfyui-DiffusersUtils",
|
||||
"files": [
|
||||
"https://github.com/lrzjason/Comfyui-DiffusersUtils"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "A set of nodes which provide flexible inference using diffusers in comfyui env. (Description by CC)"
|
||||
},
|
||||
{
|
||||
"author": "anilstream",
|
||||
"title": "ComfyUI-NanoBananaPro",
|
||||
@ -4779,7 +4799,8 @@
|
||||
"description": "NODES: Face Detector Selector, YC Human Parts Ultra(Advance), Color Match (YC)"
|
||||
},
|
||||
{
|
||||
"author": "virallover",
|
||||
"author": "maizerrr",
|
||||
"title": "comfyui-code-nodes",
|
||||
"reference": "https://github.com/maizerrr/comfyui-code-nodes",
|
||||
"files": [
|
||||
"https://github.com/maizerrr/comfyui-code-nodes"
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,5 +1,65 @@
|
||||
{
|
||||
"custom_nodes": [
|
||||
{
|
||||
"author": "cdanielp",
|
||||
"title": "COMFYUI_PROMPTMODELS [REMOVED]",
|
||||
"reference": "https://github.com/cdanielp/COMFYUI_PROMPTMODELS",
|
||||
"files": [
|
||||
"https://github.com/cdanielp/COMFYUI_PROMPTMODELS"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Custom nodes for ComfyUI by PROMPTMODELS."
|
||||
},
|
||||
{
|
||||
"author": "mcrataobrabo",
|
||||
"title": "comfyui-smart-lora-downloader - Automatically Fetch Missing LoRAs [REMOVED]",
|
||||
"reference": "https://github.com/mcrataobrabo/comfyui-smart-lora-downloader",
|
||||
"files": [
|
||||
"https://github.com/mcrataobrabo/comfyui-smart-lora-downloader"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Automatically detect and download missing LoRAs for ComfyUI workflows"
|
||||
},
|
||||
{
|
||||
"author": "KANAsho34636",
|
||||
"title": "ComfyUI-NaturalSort-ImageLoader [REMOVED]",
|
||||
"reference": "https://github.com/KANAsho34636/ComfyUI-NaturalSort-ImageLoader",
|
||||
"files": [
|
||||
"https://github.com/KANAsho34636/ComfyUI-NaturalSort-ImageLoader"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Custom image loader node supporting natural number sorting with multiple sort modes (natural, lexicographic, modification time, creation time, reverse natural). (Description by CC)"
|
||||
},
|
||||
{
|
||||
"author": "johninthewinter",
|
||||
"title": "comfyui-fal-flux-2-John [REMOVED]",
|
||||
"reference": "https://github.com/johninthewinter/comfyui-fal-flux-2-John",
|
||||
"files": [
|
||||
"https://github.com/johninthewinter/comfyui-fal-flux-2-John"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Custom nodes for ComfyUI that integrate with fal.ai's FLUX 2 and FLUX 1 LoRA APIs for text-to-image generation."
|
||||
},
|
||||
{
|
||||
"author": "LargeModGames",
|
||||
"title": "ComfyUI LoRA Auto Downloader [REMOVED]",
|
||||
"reference": "https://github.com/LargeModGames/comfyui-smart-lora-downloader",
|
||||
"files": [
|
||||
"https://github.com/LargeModGames/comfyui-smart-lora-downloader"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Automatically download missing LoRAs from CivitAI and detect missing LoRAs in workflows. Features smart directory detection and easy installation."
|
||||
},
|
||||
{
|
||||
"author": "DiffusionWave",
|
||||
"title": "PickResolution_DiffusionWave [DEPRECATED]",
|
||||
"reference": "https://github.com/DiffusionWave/PickResolution_DiffusionWave",
|
||||
"files": [
|
||||
"https://github.com/DiffusionWave/PickResolution_DiffusionWave"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "A custom node for ComfyUI that allows selecting a base resolution, applying a custom scaling value based on FLOAT (up to 10 decimal places), and adding an extra integer value. Outputs include both INT and FLOAT resolutions, making it perfect for you to play around with."
|
||||
},
|
||||
{
|
||||
"author": "geltz",
|
||||
"title": "ComfyUI-geltz [REMOVED]",
|
||||
|
||||
@ -1,5 +1,190 @@
|
||||
{
|
||||
"custom_nodes": [
|
||||
{
|
||||
"author": "akaugun",
|
||||
"title": "comfyui-lora-hook-trigger",
|
||||
"reference": "https://github.com/akaugun/comfyui-lora-hook-trigger",
|
||||
"files": [
|
||||
"https://github.com/akaugun/comfyui-lora-hook-trigger"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "A clean and simple ComfyUI custom node that creates a LoRA Hook and automatically finds and loads trigger TXT files placed in a folder named after the LoRA file."
|
||||
},
|
||||
{
|
||||
"author": "tumbowungus",
|
||||
"title": "MultiMaskCouple",
|
||||
"reference": "https://github.com/tumbowungus/MultiMaskCouple",
|
||||
"files": [
|
||||
"https://github.com/tumbowungus/MultiMaskCouple"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "A custom node for ComfyUI which simplifies the process of masking multiple prompts, making it easier to manage scenes with multiple distinct characters."
|
||||
},
|
||||
{
|
||||
"author": "fudosanit",
|
||||
"title": "ComfyUI-Random-Resolution",
|
||||
"reference": "https://github.com/fudosanit/ComfyUI-Random-Resolution",
|
||||
"files": [
|
||||
"https://github.com/fudosanit/ComfyUI-Random-Resolution"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "A custom node for ComfyUI. Based on the specified resolution, it randomly selects and outputs one of three patterns: original orientation, swapped width/height, or a square format. (Description by CC)"
|
||||
},
|
||||
{
|
||||
"author": "SuLU-K",
|
||||
"title": "comfyui-easy-sam3-tools",
|
||||
"reference": "https://github.com/SuLU-K/comfyui-easy-sam3-tools",
|
||||
"files": [
|
||||
"https://github.com/SuLU-K/comfyui-easy-sam3-tools"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Custom nodes for building SAM3-centric editing pipelines inside ComfyUI, featuring mask merging, bounding box rendering, and DOM-based interactive editing. (Description by CC)"
|
||||
},
|
||||
{
|
||||
"author": "EMkrtchyan",
|
||||
"title": "ComfyUI-NormalsToDepth",
|
||||
"reference": "https://github.com/EMkrtchyan/ComfyUI-NormalsToDepth",
|
||||
"files": [
|
||||
"https://github.com/EMkrtchyan/ComfyUI-NormalsToDepth"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Converts normal maps to depth maps for use in ComfyUI. (Description by CC)"
|
||||
},
|
||||
|
||||
{
|
||||
"author": "jeankassio",
|
||||
"title": "JK AceStep Nodes",
|
||||
"id": "JK-AceStep-Nodes",
|
||||
"reference": "https://github.com/jeankassio/JK-AceStep-Nodes",
|
||||
"files": [
|
||||
"https://github.com/jeankassio/JK-AceStep-Nodes"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Advanced nodes optimized for ACE-Step audio generation in ComfyUI."
|
||||
},
|
||||
{
|
||||
"author": "a-und-b",
|
||||
"title": "ComfyUI_AB_Wildcard",
|
||||
"reference": "https://github.com/a-und-b/ComfyUI_AB_Wildcard",
|
||||
"files": [
|
||||
"https://github.com/a-und-b/ComfyUI_AB_Wildcard"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Simple node for advanced wildcard text processing. Supports variables, conditionals, tag aggregation, weighted selection, and deep nesting. Lightweight, fast, zero external dependencies."
|
||||
},
|
||||
{
|
||||
"author": "RamonGuthrie",
|
||||
"title": "ComfyUI-RBG-SmartSeedVariance",
|
||||
"reference": "https://github.com/RamonGuthrie/ComfyUI-RBG-SmartSeedVariance",
|
||||
"files": [
|
||||
"https://github.com/RamonGuthrie/ComfyUI-RBG-SmartSeedVariance"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Advanced seed diversity enhancement for ComfyUI with intelligent noise injection and directional biasing."
|
||||
},
|
||||
{
|
||||
"author": "theshubzworld",
|
||||
"title": "ComfyUI-NvidiaCaptioner",
|
||||
"reference": "https://github.com/theshubzworld/ComfyUI-NvidiaCaptioner",
|
||||
"files": [
|
||||
"https://github.com/theshubzworld/ComfyUI-NvidiaCaptioner"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "A ComfyUI node for generating rich, detailed captions for images using NVIDIA's vision models. Supports batch processing, multiple captioning styles, and includes built-in caching for efficient workflows."
|
||||
},
|
||||
{
|
||||
"author": "theshubzworld",
|
||||
"title": "ComfyUI-Universal-Latent",
|
||||
"reference": "https://github.com/theshubzworld/ComfyUI-Universal-Latent",
|
||||
"files": [
|
||||
"https://github.com/theshubzworld/ComfyUI-Universal-Latent"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Enhanced empty latent node with extended aspect ratio support for ComfyUI"
|
||||
},
|
||||
{
|
||||
"author": "lrzjason",
|
||||
"title": "Comfyui-LatentUtils",
|
||||
"reference": "https://github.com/lrzjason/Comfyui-LatentUtils",
|
||||
"files": [
|
||||
"https://github.com/lrzjason/Comfyui-LatentUtils"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Custom ComfyUI node performing selective latent denoising and detail enhancement using Fourier Transform (FFT) to separate and enhance image frequencies while suppressing noise. (Description by CC)"
|
||||
},
|
||||
{
|
||||
"author": "aTanguay",
|
||||
"title": "ComfyUI_Detonate",
|
||||
"reference": "https://github.com/aTanguay/ComfyUI_Detonate",
|
||||
"files": [
|
||||
"https://github.com/aTanguay/ComfyUI_Detonate"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Professional compositing nodes for ComfyUI - bringing Nuke and Fusion workflows to AI-powered image generation"
|
||||
},
|
||||
{
|
||||
"author": "huihuihuiz",
|
||||
"title": "LoRA Downloader for ComfyUI",
|
||||
"id": "lora_downloader",
|
||||
"reference": "https://github.com/huihuihuiz/lora_downloader",
|
||||
"files": [
|
||||
"https://github.com/huihuihuiz/lora_downloader"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "A ComfyUI custom node for downloading and managing LoRA models directly within the UI."
|
||||
},
|
||||
{
|
||||
"author": "lovelybbq",
|
||||
"title": "ComfyUI Custom Node Color",
|
||||
"reference": "https://github.com/lovelybbq/comfyui-custom-node-color",
|
||||
"files": [
|
||||
"https://github.com/lovelybbq/comfyui-custom-node-color"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "A modern GUI-based color picker for ComfyUI nodes. Features visual spectrum, HEX/RGB inputs, eyedropper tool, and favorite colors support."
|
||||
},
|
||||
{
|
||||
"author": "Moeblack",
|
||||
"title": "ComfyUI-SimpleChat",
|
||||
"reference": "https://github.com/Moeblack/ComfyUI-SimpleChat",
|
||||
"files": [
|
||||
"https://github.com/Moeblack/ComfyUI-SimpleChat"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Simple, no-nonsense LLM chat nodes for ComfyUI. Support OpenAI, Claude, Gemini and NoASS Roleplay.",
|
||||
"tags": [
|
||||
"LLM",
|
||||
"chat",
|
||||
"openai",
|
||||
"claude",
|
||||
"gemini",
|
||||
"roleplay",
|
||||
"noass"
|
||||
]
|
||||
},
|
||||
{
|
||||
"author": "Lord Lethris",
|
||||
"title": "Dia2 TTS & Captions Generators for ComfyUI",
|
||||
"id": "dia2_tts_captions",
|
||||
"reference": "https://github.com/lord-lethris/ComfyUI-lethris-dia2",
|
||||
"files": [
|
||||
"https://github.com/lord-lethris/ComfyUI-lethris-dia2"
|
||||
],
|
||||
"install_type": "Git-Clone",
|
||||
"description": "This package provides two ComfyUI nodes: 🗣️ Dia2 TTS Generator for text-to-speech using Dia2-2B, and 💬 Dia2 Captions Generator to convert TTS timestamps into SRT/SSA/VTT subtitles. Includes example workflow and voice samples. GPU users require CUDA 12.8+."
|
||||
},
|
||||
{
|
||||
"author": "Braeden90000",
|
||||
"title": "ComfyUI Load Image URL",
|
||||
"id": "load-image-url",
|
||||
"reference": "https://github.com/Braeden90000/comfyui-load-image-url",
|
||||
"files": [
|
||||
"https://github.com/Braeden90000/comfyui-load-image-url"
|
||||
],
|
||||
"pip": ["requests"],
|
||||
"install_type": "git-clone",
|
||||
"description": "Load images from files or URLs with live preview and source switching."
|
||||
},
|
||||
{
|
||||
"author": "purzbeats",
|
||||
"title": "ComfyUI-Purz",
|
||||
@ -577,161 +762,6 @@
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "ShakaNodes: Utility tools for ComfyUI"
|
||||
},
|
||||
{
|
||||
"author": "Matthew-X",
|
||||
"title": "Workflow Importer",
|
||||
"id": "comfyui-workflow_importer",
|
||||
"reference": "https://github.com/Matthew-X/comfyui-workflow_importer",
|
||||
"files": [
|
||||
"https://github.com/Matthew-X/comfyui-workflow_importer"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"js_path": "workflow_importer",
|
||||
"description": "Import ComfyUI workflows from images with embedded metadata. Adds a toolbar Import button, drag-and-drop dialog and Ctrl+Shift+I shortcut. Supports legacy and new UIs, common image formats, opens each image in a new workflow tab."
|
||||
},
|
||||
{
|
||||
"author": "ameyukisora",
|
||||
"title": "ComfyUI Empty Latent Advanced",
|
||||
"reference": "https://github.com/ameyukisora/ComfyUI-Empty-Latent-Advanced",
|
||||
"files": [
|
||||
"https://github.com/ameyukisora/ComfyUI-Empty-Latent-Advanced"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "An advanced Empty Latent selector with visual presets and foldable UI."
|
||||
},
|
||||
{
|
||||
"author": "KLL535",
|
||||
"title": "ComfyUI_Simple_Qwen3-VL-gguf",
|
||||
"reference": "https://github.com/KLL535/ComfyUI_Simple_Qwen3-VL-gguf",
|
||||
"files": [
|
||||
"https://github.com/KLL535/ComfyUI_Simple_Qwen3-VL-gguf"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Simple Qwen3-VL gguf LLM model loader"
|
||||
},
|
||||
{
|
||||
"author": "sirouk",
|
||||
"title": "chutes-comfyui-node",
|
||||
"reference": "https://github.com/sirouk/chutes-comfyui-node",
|
||||
"files": [
|
||||
"https://github.com/sirouk/chutes-comfyui-node"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "ComfyUI custom nodes for Chutes.ai video generation API"
|
||||
},
|
||||
{
|
||||
"author": "1038lab",
|
||||
"title": "ComfyUI-QwenVL",
|
||||
"reference": "https://github.com/1038lab/ComfyUI-QwenVL",
|
||||
"files": [
|
||||
"https://github.com/1038lab/ComfyUI-QwenVL"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "ComfyUI-QwenVL custom node: Integrates the Qwen-VL series, including Qwen2.5-VL and the latest Qwen3-VL, to enable advanced multimodal AI for text generation, image understanding, and video analysis."
|
||||
},
|
||||
{
|
||||
"author": "smthemex",
|
||||
"title": "ComfyUI_GRAG_Image_Editing",
|
||||
"reference": "https://github.com/smthemex/ComfyUI_GRAG_Image_Editing",
|
||||
"files": [
|
||||
"https://github.com/smthemex/ComfyUI_GRAG_Image_Editing"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "GRAG-Image-Editing : Group-Relative Attention Guidance for Image Editing,you can try it in comfyUI"
|
||||
},
|
||||
{
|
||||
"author": "dzy1128",
|
||||
"title": "ComfyUI-VertexAI",
|
||||
"reference": "https://github.com/dzy1128/ComfyUI-VertexAI",
|
||||
"files": [
|
||||
"https://github.com/dzy1128/ComfyUI-VertexAI"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Google Gemini image generation node for ComfyUI supporting up to 6 input images with customizable prompts and parameters. (Description by CC)"
|
||||
},
|
||||
{
|
||||
"author": "mrf",
|
||||
"title": "ComfyPoe",
|
||||
"reference": "https://github.com/mrf/ComfyPoe",
|
||||
"files": [
|
||||
"https://github.com/mrf/ComfyPoe"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "ComfyUI nodes for image generation via Poe API"
|
||||
},
|
||||
{
|
||||
"author": "Tinuva88",
|
||||
"title": "Comfy-UmiAI",
|
||||
"reference": "https://github.com/Tinuva88/Comfy-UmiAI",
|
||||
"files": [
|
||||
"https://github.com/Tinuva88/Comfy-UmiAI"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "A logic engine for ComfyUI prompts that transforms static prompts into dynamic, context-aware workflows with persistent variables, conditional logic, native LoRA loading, and external data fetching. (Description by CC)"
|
||||
},
|
||||
{
|
||||
"author": "fredlef",
|
||||
"title": "ComfyUI FSL Nodes",
|
||||
"reference": "https://github.com/fredlef/Comfyui_FSL_Nodes",
|
||||
"files": [
|
||||
"https://github.com/fredlef/Comfyui_FSL_Nodes"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"tags": ["image", "chat", "gemini", "fsl"]
|
||||
},
|
||||
{
|
||||
"author": "exedesign",
|
||||
"title": "ComfyUI-Hunyuan3D-v3",
|
||||
"id": "hunyuan3d-v3",
|
||||
"reference": "https://github.com/exedesign/Hunyuan-3D-v3",
|
||||
"files": [
|
||||
"https://github.com/exedesign/Hunyuan-3D-v3"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Text-to-3D and Image-to-3D generation using Tencent Cloud Hunyuan 3D Global API. Supports PBR materials, face count control (40K-1.5M faces), and multiple generation types (Normal/LowPoly/Geometry/Sketch). Outputs industry-standard GLB format. Requires Tencent Cloud account with API access.",
|
||||
"nodename_pattern": "Hunyuan",
|
||||
"tags": ["3D", "generation", "text-to-3d", "image-to-3d", "hunyuan", "tencent"]
|
||||
},
|
||||
{
|
||||
"author": "rjgoif",
|
||||
"title": "Img Label Tools",
|
||||
"id": "Img-Label-Tools",
|
||||
"reference": "https://github.com/rjgoif/ComfyUI-Img-Label-Tools",
|
||||
"install_type": "git-clone",
|
||||
"description": "Tools to help annotate images for sharing on Reddit, Discord, etc."
|
||||
},
|
||||
{
|
||||
"author": "akawana",
|
||||
"title": "RGBYP Mask Editor (RGB + YP)",
|
||||
"reference": "https://github.com/akawana/ComfyUI-RGBYP-Mask-Editor",
|
||||
"files": [
|
||||
"https://github.com/akawana/ComfyUI-RGBYP-Mask-Editor"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "A JS editor for five-color masks (RGB + Yellow + Pink) that works with any nodes, with three helper nodes — RGBYPLoadImage, RGBYPMaskBridge, and RGBYPMaskToRegularMasks—for convenient RGBYP mask handling.",
|
||||
"tags": ["utility", "mask", "rgb", "bridge"]
|
||||
},
|
||||
{
|
||||
"author": "btitkin",
|
||||
"title": "Random Prompt Builder",
|
||||
"id": "random-prompt-builder",
|
||||
"reference": "https://github.com/btitkin/ComfyUI-RandomPromptBuilder",
|
||||
"files": [
|
||||
"https://github.com/btitkin/ComfyUI-RandomPromptBuilder"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Advanced AI-powered prompt generation using local GGUF models. Generate detailed, structured prompts with character controls, style presets, and model-specific formatting for Pony, SDXL, Flux, and more. Supports GPU acceleration and runs completely offline."
|
||||
},
|
||||
{
|
||||
"author": "erosDiffusion",
|
||||
"title": "ComfyUI-EulerDiscreteScheduler",
|
||||
"reference": "https://github.com/erosDiffusion/ComfyUI-EulerDiscreteScheduler",
|
||||
"files": [
|
||||
"https://github.com/erosDiffusion/ComfyUI-EulerDiscreteScheduler"
|
||||
],
|
||||
"install_type": "git-clone",
|
||||
"description": "Noise Free images with Euler Discrete Scheduler in ComfyUI with Z-Image or other models"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,7 +1,7 @@
|
||||
[project]
|
||||
name = "comfyui-manager"
|
||||
description = "ComfyUI-Manager provides features to install and manage custom nodes for ComfyUI, as well as various functionalities to assist with ComfyUI."
|
||||
version = "3.38.1"
|
||||
version = "3.38.3"
|
||||
license = { file = "LICENSE.txt" }
|
||||
dependencies = ["GitPython", "PyGithub", "matrix-nio", "transformers", "huggingface-hub>0.20", "typer", "rich", "typing-extensions", "toml", "uv", "chardet"]
|
||||
|
||||
|
||||
609
scanner.py
609
scanner.py
@ -16,6 +16,108 @@ import sys
|
||||
|
||||
from urllib.parse import urlparse
|
||||
from github import Github, Auth
|
||||
from pathlib import Path
|
||||
from typing import Set, Dict, Optional
|
||||
|
||||
# Scanner version for cache invalidation
|
||||
SCANNER_VERSION = "2.0.11" # Multi-layer detection: class existence + display names
|
||||
|
||||
# Cache for extract_nodes and extract_nodes_enhanced results
|
||||
_extract_nodes_cache: Dict[str, Set[str]] = {}
|
||||
_extract_nodes_enhanced_cache: Dict[str, Set[str]] = {}
|
||||
_file_mtime_cache: Dict[Path, float] = {}
|
||||
|
||||
|
||||
def _get_repo_root(file_path: Path) -> Optional[Path]:
|
||||
"""Find the repository root directory containing .git"""
|
||||
current = file_path if file_path.is_dir() else file_path.parent
|
||||
while current != current.parent:
|
||||
if (current / ".git").exists():
|
||||
return current
|
||||
current = current.parent
|
||||
return None
|
||||
|
||||
|
||||
def _get_repo_hash(repo_path: Path) -> str:
|
||||
"""Get git commit hash or fallback identifier"""
|
||||
git_dir = repo_path / ".git"
|
||||
if not git_dir.exists():
|
||||
return ""
|
||||
|
||||
try:
|
||||
# Read HEAD to get current commit
|
||||
head_file = git_dir / "HEAD"
|
||||
if head_file.exists():
|
||||
head_content = head_file.read_text().strip()
|
||||
if head_content.startswith("ref:"):
|
||||
# HEAD points to a ref
|
||||
ref_path = git_dir / head_content[5:].strip()
|
||||
if ref_path.exists():
|
||||
commit_hash = ref_path.read_text().strip()
|
||||
return commit_hash[:16] # First 16 chars
|
||||
else:
|
||||
# Detached HEAD
|
||||
return head_content[:16]
|
||||
except:
|
||||
pass
|
||||
|
||||
return ""
|
||||
|
||||
|
||||
def _load_per_repo_cache(repo_path: Path) -> Optional[tuple]:
|
||||
"""Load nodes and metadata from per-repo cache
|
||||
|
||||
Returns:
|
||||
tuple: (nodes_set, metadata_dict) or None if cache invalid
|
||||
"""
|
||||
cache_file = repo_path / ".git" / "nodecache.json"
|
||||
|
||||
if not cache_file.exists():
|
||||
return None
|
||||
|
||||
try:
|
||||
with open(cache_file, 'r') as f:
|
||||
cache_data = json.load(f)
|
||||
|
||||
# Verify scanner version
|
||||
if cache_data.get('scanner_version') != SCANNER_VERSION:
|
||||
return None
|
||||
|
||||
# Verify git hash
|
||||
current_hash = _get_repo_hash(repo_path)
|
||||
if cache_data.get('git_hash') != current_hash:
|
||||
return None
|
||||
|
||||
# Return nodes and metadata
|
||||
nodes = cache_data.get('nodes', [])
|
||||
metadata = cache_data.get('metadata', {})
|
||||
return (set(nodes) if nodes else set(), metadata)
|
||||
|
||||
except:
|
||||
return None
|
||||
|
||||
|
||||
def _save_per_repo_cache(repo_path: Path, all_nodes: Set[str], metadata: dict = None):
|
||||
"""Save nodes and metadata to per-repo cache"""
|
||||
cache_file = repo_path / ".git" / "nodecache.json"
|
||||
|
||||
if not cache_file.parent.exists():
|
||||
return
|
||||
|
||||
git_hash = _get_repo_hash(repo_path)
|
||||
cache_data = {
|
||||
"scanner_version": SCANNER_VERSION,
|
||||
"git_hash": git_hash,
|
||||
"scanned_at": datetime.datetime.now().isoformat(),
|
||||
"nodes": sorted(list(all_nodes)),
|
||||
"metadata": metadata if metadata else {}
|
||||
}
|
||||
|
||||
try:
|
||||
with open(cache_file, 'w') as f:
|
||||
json.dump(cache_data, f, indent=2)
|
||||
except:
|
||||
pass # Silently fail - cache is optional
|
||||
|
||||
|
||||
def download_url(url, dest_folder, filename=None):
|
||||
@ -51,11 +153,12 @@ Examples:
|
||||
# Standard mode
|
||||
python3 scanner.py
|
||||
python3 scanner.py --skip-update
|
||||
python3 scanner.py --skip-all --force-rescan
|
||||
|
||||
# Scan-only mode
|
||||
python3 scanner.py --scan-only temp-urls-clean.list
|
||||
python3 scanner.py --scan-only urls.list --temp-dir /custom/temp
|
||||
python3 scanner.py --scan-only urls.list --skip-update
|
||||
python3 scanner.py --scan-only urls.list --skip-update --force-rescan
|
||||
'''
|
||||
)
|
||||
|
||||
@ -69,6 +172,8 @@ Examples:
|
||||
help='Skip GitHub stats collection')
|
||||
parser.add_argument('--skip-all', action='store_true',
|
||||
help='Skip all update operations')
|
||||
parser.add_argument('--force-rescan', action='store_true',
|
||||
help='Force rescan all nodes (ignore cache)')
|
||||
|
||||
# Backward compatibility: positional argument for temp_dir
|
||||
parser.add_argument('temp_dir_positional', nargs='?', metavar='TEMP_DIR',
|
||||
@ -94,6 +199,11 @@ parse_cnt = 0
|
||||
def extract_nodes(code_text):
|
||||
global parse_cnt
|
||||
|
||||
# Check cache first
|
||||
cache_key = hash(code_text)
|
||||
if cache_key in _extract_nodes_cache:
|
||||
return _extract_nodes_cache[cache_key].copy()
|
||||
|
||||
try:
|
||||
if parse_cnt % 100 == 0:
|
||||
print(".", end="", flush=True)
|
||||
@ -128,12 +238,458 @@ def extract_nodes(code_text):
|
||||
if key is not None and isinstance(key.value, str):
|
||||
s.add(key.value.strip())
|
||||
|
||||
# Cache the result
|
||||
_extract_nodes_cache[cache_key] = s
|
||||
return s
|
||||
else:
|
||||
# Cache empty result
|
||||
_extract_nodes_cache[cache_key] = set()
|
||||
return set()
|
||||
except:
|
||||
# Cache empty result on error
|
||||
_extract_nodes_cache[cache_key] = set()
|
||||
return set()
|
||||
|
||||
def extract_nodes_from_repo(repo_path: Path, verbose: bool = False, force_rescan: bool = False) -> tuple:
|
||||
"""
|
||||
Extract all nodes and metadata from a repository with per-repo caching.
|
||||
|
||||
Automatically caches results in .git/nodecache.json.
|
||||
Cache is invalidated when:
|
||||
- Git commit hash changes
|
||||
- Scanner version changes
|
||||
- force_rescan flag is True
|
||||
|
||||
Args:
|
||||
repo_path: Path to repository root
|
||||
verbose: If True, print UI-only extension detection messages
|
||||
force_rescan: If True, ignore cache and force fresh scan
|
||||
|
||||
Returns:
|
||||
tuple: (nodes_set, metadata_dict)
|
||||
"""
|
||||
# Ensure path is absolute
|
||||
repo_path = repo_path.resolve()
|
||||
|
||||
# Check per-repo cache first (unless force_rescan is True)
|
||||
if not force_rescan:
|
||||
cached_result = _load_per_repo_cache(repo_path)
|
||||
if cached_result is not None:
|
||||
return cached_result
|
||||
|
||||
# Cache miss - scan all .py files
|
||||
all_nodes = set()
|
||||
all_metadata = {}
|
||||
py_files = list(repo_path.rglob("*.py"))
|
||||
|
||||
# Filter out __pycache__, .git, and other hidden directories
|
||||
filtered_files = []
|
||||
for f in py_files:
|
||||
try:
|
||||
rel_path = f.relative_to(repo_path)
|
||||
# Skip __pycache__, .git, and any directory starting with .
|
||||
if '__pycache__' not in str(rel_path) and not any(part.startswith('.') for part in rel_path.parts):
|
||||
filtered_files.append(f)
|
||||
except:
|
||||
continue
|
||||
py_files = filtered_files
|
||||
|
||||
for py_file in py_files:
|
||||
try:
|
||||
# Read file with proper encoding
|
||||
with open(py_file, 'r', encoding='utf-8', errors='ignore') as f:
|
||||
code = f.read()
|
||||
|
||||
if code:
|
||||
# Extract nodes using SAME logic as scan_in_file
|
||||
# V1 nodes (enhanced with fallback patterns)
|
||||
nodes = extract_nodes_enhanced(code, py_file, visited=set(), verbose=verbose)
|
||||
all_nodes.update(nodes)
|
||||
|
||||
# V3 nodes detection
|
||||
v3_nodes = extract_v3_nodes(code)
|
||||
all_nodes.update(v3_nodes)
|
||||
|
||||
# Dict parsing - exclude commented NODE_CLASS_MAPPINGS lines
|
||||
pattern = r"_CLASS_MAPPINGS\s*(?::\s*\w+\s*)?=\s*(?:\\\s*)?{([^}]*)}"
|
||||
regex = re.compile(pattern, re.MULTILINE | re.DOTALL)
|
||||
|
||||
for match_obj in regex.finditer(code):
|
||||
# Get the line where NODE_CLASS_MAPPINGS is defined
|
||||
match_start = match_obj.start()
|
||||
line_start = code.rfind('\n', 0, match_start) + 1
|
||||
line_end = code.find('\n', match_start)
|
||||
if line_end == -1:
|
||||
line_end = len(code)
|
||||
line = code[line_start:line_end]
|
||||
|
||||
# Skip if line starts with # (commented)
|
||||
if re.match(r'^\s*#', line):
|
||||
continue
|
||||
|
||||
match = match_obj.group(1)
|
||||
|
||||
# Filter out commented lines from dict content
|
||||
match_lines = match.split('\n')
|
||||
match_filtered = '\n'.join(
|
||||
line for line in match_lines
|
||||
if not re.match(r'^\s*#', line)
|
||||
)
|
||||
|
||||
# Extract key-value pairs with double quotes
|
||||
key_value_pairs = re.findall(r"\"([^\"]*)\"\s*:\s*([^,\n]*)", match_filtered)
|
||||
for key, value in key_value_pairs:
|
||||
all_nodes.add(key.strip())
|
||||
|
||||
# Extract key-value pairs with single quotes
|
||||
key_value_pairs = re.findall(r"'([^']*)'\s*:\s*([^,\n]*)", match_filtered)
|
||||
for key, value in key_value_pairs:
|
||||
all_nodes.add(key.strip())
|
||||
|
||||
# Handle .update() pattern (AFTER comment removal)
|
||||
code_cleaned = re.sub(r'^#.*?$', '', code, flags=re.MULTILINE)
|
||||
|
||||
update_pattern = r"_CLASS_MAPPINGS\.update\s*\(\s*{([^}]*)}\s*\)"
|
||||
update_match = re.search(update_pattern, code_cleaned, re.DOTALL)
|
||||
if update_match:
|
||||
update_dict_text = update_match.group(1)
|
||||
# Extract key-value pairs (double quotes)
|
||||
update_pairs = re.findall(r'"([^"]*)"\s*:\s*([^,\n]*)', update_dict_text)
|
||||
for key, value in update_pairs:
|
||||
all_nodes.add(key.strip())
|
||||
# Extract key-value pairs (single quotes)
|
||||
update_pairs_single = re.findall(r"'([^']*)'\s*:\s*([^,\n]*)", update_dict_text)
|
||||
for key, value in update_pairs_single:
|
||||
all_nodes.add(key.strip())
|
||||
|
||||
# Additional regex patterns (AFTER comment removal)
|
||||
patterns = [
|
||||
r'^[^=]*_CLASS_MAPPINGS\["(.*?)"\]',
|
||||
r'^[^=]*_CLASS_MAPPINGS\[\'(.*?)\'\]',
|
||||
r'@register_node\("(.+)",\s*\".+"\)',
|
||||
r'"(\w+)"\s*:\s*{"class":\s*\w+\s*'
|
||||
]
|
||||
|
||||
for pattern in patterns:
|
||||
keys = re.findall(pattern, code_cleaned)
|
||||
all_nodes.update(key.strip() for key in keys)
|
||||
|
||||
# Extract metadata from this file
|
||||
metadata = extract_metadata_only(str(py_file))
|
||||
all_metadata.update(metadata)
|
||||
except Exception:
|
||||
# Silently skip files that can't be read
|
||||
continue
|
||||
|
||||
# Save to per-repo cache
|
||||
_save_per_repo_cache(repo_path, all_nodes, all_metadata)
|
||||
|
||||
return (all_nodes, all_metadata)
|
||||
|
||||
|
||||
def _verify_class_exists(node_name: str, code_text: str, file_path: Optional[Path] = None) -> tuple[bool, Optional[str], Optional[int]]:
|
||||
"""
|
||||
Verify that a node class exists and has ComfyUI node structure.
|
||||
|
||||
Returns: (exists: bool, file_path: str, line_number: int)
|
||||
|
||||
A valid ComfyUI node must have:
|
||||
- Class definition (not commented)
|
||||
- At least one of: INPUT_TYPES, RETURN_TYPES, FUNCTION method/attribute
|
||||
"""
|
||||
try:
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings('ignore', category=SyntaxWarning)
|
||||
tree = ast.parse(code_text)
|
||||
except:
|
||||
return (False, None, None)
|
||||
|
||||
for node in ast.walk(tree):
|
||||
if isinstance(node, ast.ClassDef):
|
||||
if node.name == node_name or node.name.replace('_', '') == node_name.replace('_', ''):
|
||||
# Found class definition - check if it has ComfyUI interface
|
||||
has_input_types = False
|
||||
has_return_types = False
|
||||
has_function = False
|
||||
|
||||
for item in node.body:
|
||||
# Check for INPUT_TYPES method
|
||||
if isinstance(item, ast.FunctionDef) and item.name == 'INPUT_TYPES':
|
||||
has_input_types = True
|
||||
# Check for RETURN_TYPES attribute
|
||||
elif isinstance(item, ast.Assign):
|
||||
for target in item.targets:
|
||||
if isinstance(target, ast.Name):
|
||||
if target.id == 'RETURN_TYPES':
|
||||
has_return_types = True
|
||||
elif target.id == 'FUNCTION':
|
||||
has_function = True
|
||||
# Check for FUNCTION method
|
||||
elif isinstance(item, ast.FunctionDef):
|
||||
has_function = True
|
||||
|
||||
# Valid if has any ComfyUI signature
|
||||
if has_input_types or has_return_types or has_function:
|
||||
file_str = str(file_path) if file_path else None
|
||||
return (True, file_str, node.lineno)
|
||||
|
||||
return (False, None, None)
|
||||
|
||||
|
||||
def _extract_display_name_mappings(code_text: str) -> Set[str]:
|
||||
"""
|
||||
Extract node names from NODE_DISPLAY_NAME_MAPPINGS.
|
||||
|
||||
Pattern:
|
||||
NODE_DISPLAY_NAME_MAPPINGS = {
|
||||
"node_key": "Display Name",
|
||||
...
|
||||
}
|
||||
|
||||
Returns:
|
||||
Set of node keys from NODE_DISPLAY_NAME_MAPPINGS
|
||||
"""
|
||||
try:
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings('ignore', category=SyntaxWarning)
|
||||
tree = ast.parse(code_text)
|
||||
except:
|
||||
return set()
|
||||
|
||||
nodes = set()
|
||||
|
||||
for node in tree.body:
|
||||
if isinstance(node, ast.Assign):
|
||||
for target in node.targets:
|
||||
if isinstance(target, ast.Name) and target.id == 'NODE_DISPLAY_NAME_MAPPINGS':
|
||||
if isinstance(node.value, ast.Dict):
|
||||
for key in node.value.keys:
|
||||
if isinstance(key, ast.Constant) and isinstance(key.value, str):
|
||||
nodes.add(key.value.strip())
|
||||
|
||||
return nodes
|
||||
|
||||
|
||||
def extract_nodes_enhanced(
|
||||
code_text: str,
|
||||
file_path: Optional[Path] = None,
|
||||
visited: Optional[Set[Path]] = None,
|
||||
verbose: bool = False
|
||||
) -> Set[str]:
|
||||
"""
|
||||
Enhanced node extraction with multi-layer detection system.
|
||||
|
||||
Scanner 2.0.11 - Comprehensive detection strategy:
|
||||
- Phase 1: NODE_CLASS_MAPPINGS dict literal
|
||||
- Phase 2: Class.NAME attribute access (e.g., FreeChat.NAME)
|
||||
- Phase 3: Item assignment (NODE_CLASS_MAPPINGS["key"] = value)
|
||||
- Phase 4: Class existence verification (detects active classes even if registration commented)
|
||||
- Phase 5: NODE_DISPLAY_NAME_MAPPINGS cross-reference
|
||||
- Phase 6: Empty dict detection (UI-only extensions, logging only)
|
||||
|
||||
Fixed Bugs:
|
||||
- Scanner 2.0.9: Fallback cascade prevented Phase 3 execution
|
||||
- Scanner 2.0.10: Missed active classes with commented registrations (15 false negatives)
|
||||
|
||||
Args:
|
||||
code_text: Python source code
|
||||
file_path: Path to file (for logging and caching)
|
||||
visited: Visited paths (for circular import prevention)
|
||||
verbose: If True, print UI-only extension detection messages
|
||||
|
||||
Returns:
|
||||
Set of node names (union of all detected patterns)
|
||||
"""
|
||||
# Check file-based cache if file_path provided
|
||||
if file_path is not None:
|
||||
try:
|
||||
file_path_obj = Path(file_path) if not isinstance(file_path, Path) else file_path
|
||||
if file_path_obj.exists():
|
||||
current_mtime = file_path_obj.stat().st_mtime
|
||||
|
||||
# Check if we have cached result with matching mtime and scanner version
|
||||
if file_path_obj in _file_mtime_cache:
|
||||
cached_mtime = _file_mtime_cache[file_path_obj]
|
||||
cache_key = (str(file_path_obj), cached_mtime, SCANNER_VERSION)
|
||||
|
||||
if current_mtime == cached_mtime and cache_key in _extract_nodes_enhanced_cache:
|
||||
return _extract_nodes_enhanced_cache[cache_key].copy()
|
||||
except:
|
||||
pass # Ignore cache errors, proceed with normal execution
|
||||
|
||||
# Suppress warnings from AST parsing
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings('ignore', category=SyntaxWarning)
|
||||
warnings.filterwarnings('ignore', category=DeprecationWarning)
|
||||
|
||||
# Phase 1: Original extract_nodes() - dict literal
|
||||
phase1_nodes = extract_nodes(code_text)
|
||||
|
||||
# Phase 2: Class.NAME pattern
|
||||
if visited is None:
|
||||
visited = set()
|
||||
phase2_nodes = _fallback_classname_resolver(code_text, file_path)
|
||||
|
||||
# Phase 3: Item assignment pattern
|
||||
phase3_nodes = _fallback_item_assignment(code_text)
|
||||
|
||||
# Phase 4: NODE_DISPLAY_NAME_MAPPINGS cross-reference (NEW in 2.0.11)
|
||||
# This catches nodes that are in display names but not in NODE_CLASS_MAPPINGS
|
||||
phase4_nodes = _extract_display_name_mappings(code_text)
|
||||
|
||||
# Phase 5: Class existence verification ONLY for display name candidates (NEW in 2.0.11)
|
||||
# This phase is CONSERVATIVE - only verify classes that appear in display names
|
||||
# This catches the specific Scanner 2.0.10 bug pattern:
|
||||
# - NODE_CLASS_MAPPINGS registration is commented
|
||||
# - NODE_DISPLAY_NAME_MAPPINGS still has the entry
|
||||
# - Class implementation exists
|
||||
# Example: Bjornulf_ollamaLoader in Bjornulf_custom_nodes
|
||||
phase5_nodes = set()
|
||||
for node_name in phase4_nodes:
|
||||
# Only check classes that appear in display names but not in registrations
|
||||
if node_name not in (phase1_nodes | phase2_nodes | phase3_nodes):
|
||||
exists, _, _ = _verify_class_exists(node_name, code_text, file_path)
|
||||
if exists:
|
||||
phase5_nodes.add(node_name)
|
||||
|
||||
# Union all results (FIX: Scanner 2.0.9 bug + Scanner 2.0.10 bug)
|
||||
# 2.0.9: Used early return which missed Phase 3 nodes
|
||||
# 2.0.10: Only checked registrations, missed classes referenced in display names
|
||||
all_nodes = phase1_nodes | phase2_nodes | phase3_nodes | phase4_nodes | phase5_nodes
|
||||
|
||||
# Phase 6: Empty dict detector (logging only, doesn't add nodes)
|
||||
if not all_nodes:
|
||||
_fallback_empty_dict_detector(code_text, file_path, verbose)
|
||||
|
||||
# Cache the result
|
||||
if file_path is not None:
|
||||
try:
|
||||
file_path_obj = Path(file_path) if not isinstance(file_path, Path) else file_path
|
||||
if file_path_obj.exists():
|
||||
current_mtime = file_path_obj.stat().st_mtime
|
||||
cache_key = (str(file_path_obj), current_mtime, SCANNER_VERSION)
|
||||
_extract_nodes_enhanced_cache[cache_key] = all_nodes
|
||||
_file_mtime_cache[file_path_obj] = current_mtime
|
||||
except:
|
||||
pass
|
||||
|
||||
return all_nodes
|
||||
|
||||
|
||||
def _fallback_classname_resolver(code_text: str, file_path: Optional[Path]) -> Set[str]:
|
||||
"""
|
||||
Detect Class.NAME pattern in NODE_CLASS_MAPPINGS.
|
||||
|
||||
Pattern:
|
||||
NODE_CLASS_MAPPINGS = {
|
||||
FreeChat.NAME: FreeChat,
|
||||
PaidChat.NAME: PaidChat
|
||||
}
|
||||
"""
|
||||
try:
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings('ignore', category=SyntaxWarning)
|
||||
parsed = ast.parse(code_text)
|
||||
except:
|
||||
return set()
|
||||
|
||||
nodes = set()
|
||||
|
||||
for node in parsed.body:
|
||||
if isinstance(node, ast.Assign):
|
||||
for target in node.targets:
|
||||
if isinstance(target, ast.Name) and target.id == 'NODE_CLASS_MAPPINGS':
|
||||
if isinstance(node.value, ast.Dict):
|
||||
for key in node.value.keys:
|
||||
# Detect Class.NAME pattern
|
||||
if isinstance(key, ast.Attribute):
|
||||
if isinstance(key.value, ast.Name):
|
||||
# Use class name as node name
|
||||
nodes.add(key.value.id)
|
||||
# Also handle literal strings
|
||||
elif isinstance(key, ast.Constant) and isinstance(key.value, str):
|
||||
nodes.add(key.value.strip())
|
||||
|
||||
return nodes
|
||||
|
||||
|
||||
def _fallback_item_assignment(code_text: str) -> Set[str]:
|
||||
"""
|
||||
Detect item assignment pattern.
|
||||
|
||||
Pattern:
|
||||
NODE_CLASS_MAPPINGS = {}
|
||||
NODE_CLASS_MAPPINGS["MyNode"] = MyNode
|
||||
"""
|
||||
try:
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings('ignore', category=SyntaxWarning)
|
||||
parsed = ast.parse(code_text)
|
||||
except:
|
||||
return set()
|
||||
|
||||
nodes = set()
|
||||
|
||||
for node in ast.walk(parsed):
|
||||
if isinstance(node, ast.Assign):
|
||||
for target in node.targets:
|
||||
if isinstance(target, ast.Subscript):
|
||||
if (isinstance(target.value, ast.Name) and
|
||||
target.value.id in ['NODE_CLASS_MAPPINGS', 'NODE_CONFIG']):
|
||||
# Extract key
|
||||
if isinstance(target.slice, ast.Constant):
|
||||
if isinstance(target.slice.value, str):
|
||||
nodes.add(target.slice.value)
|
||||
|
||||
return nodes
|
||||
|
||||
|
||||
def _extract_repo_name(file_path: Path) -> str:
|
||||
"""
|
||||
Extract repository name from file path.
|
||||
|
||||
Path structure: /home/rho/.tmp/analysis/temp/{author}_{reponame}/{path/to/file.py}
|
||||
Returns: {author}_{reponame} or filename if extraction fails
|
||||
"""
|
||||
try:
|
||||
parts = file_path.parts
|
||||
# Find 'temp' directory in path
|
||||
if 'temp' in parts:
|
||||
temp_idx = parts.index('temp')
|
||||
if temp_idx + 1 < len(parts):
|
||||
# Next part after 'temp' is the repo directory
|
||||
return parts[temp_idx + 1]
|
||||
except (ValueError, IndexError):
|
||||
pass
|
||||
|
||||
# Fallback to filename if extraction fails
|
||||
return file_path.name if hasattr(file_path, 'name') else str(file_path)
|
||||
|
||||
|
||||
def _fallback_empty_dict_detector(code_text: str, file_path: Optional[Path], verbose: bool = False) -> None:
|
||||
"""
|
||||
Detect empty NODE_CLASS_MAPPINGS (UI-only extensions).
|
||||
Logs for documentation purposes only (when verbose=True).
|
||||
|
||||
Args:
|
||||
code_text: Python source code to analyze
|
||||
file_path: Path to the file being analyzed
|
||||
verbose: If True, print detection messages
|
||||
"""
|
||||
empty_patterns = [
|
||||
'NODE_CLASS_MAPPINGS = {}',
|
||||
'NODE_CLASS_MAPPINGS={}',
|
||||
]
|
||||
|
||||
code_normalized = code_text.replace(' ', '').replace('\n', '')
|
||||
|
||||
for pattern in empty_patterns:
|
||||
pattern_normalized = pattern.replace(' ', '')
|
||||
if pattern_normalized in code_normalized:
|
||||
if file_path and verbose:
|
||||
repo_name = _extract_repo_name(file_path)
|
||||
print(f"Info: UI-only extension (empty NODE_CLASS_MAPPINGS): {repo_name}")
|
||||
return
|
||||
|
||||
def has_comfy_node_base(class_node):
|
||||
"""Check if class inherits from io.ComfyNode or ComfyNode"""
|
||||
@ -229,6 +785,25 @@ def extract_v3_nodes(code_text):
|
||||
|
||||
|
||||
# scan
|
||||
def extract_metadata_only(filename):
|
||||
"""Extract only metadata (@author, @title, etc) without node scanning"""
|
||||
try:
|
||||
with open(filename, encoding='utf-8', errors='ignore') as file:
|
||||
code = file.read()
|
||||
|
||||
metadata = {}
|
||||
lines = code.strip().split('\n')
|
||||
for line in lines:
|
||||
if line.startswith('@'):
|
||||
if line.startswith("@author:") or line.startswith("@title:") or line.startswith("@nickname:") or line.startswith("@description:"):
|
||||
key, value = line[1:].strip().split(':', 1)
|
||||
metadata[key.strip()] = value.strip()
|
||||
|
||||
return metadata
|
||||
except:
|
||||
return {}
|
||||
|
||||
|
||||
def scan_in_file(filename, is_builtin=False):
|
||||
global builtin_nodes
|
||||
|
||||
@ -242,8 +817,8 @@ def scan_in_file(filename, is_builtin=False):
|
||||
nodes = set()
|
||||
class_dict = {}
|
||||
|
||||
# V1 nodes detection
|
||||
nodes |= extract_nodes(code)
|
||||
# V1 nodes detection (enhanced with fallback patterns)
|
||||
nodes |= extract_nodes_enhanced(code, file_path=Path(filename), visited=set())
|
||||
|
||||
# V3 nodes detection
|
||||
nodes |= extract_v3_nodes(code)
|
||||
@ -620,13 +1195,14 @@ def update_custom_nodes(scan_only_mode=False, url_list_file=None):
|
||||
return node_info
|
||||
|
||||
|
||||
def gen_json(node_info, scan_only_mode=False):
|
||||
def gen_json(node_info, scan_only_mode=False, force_rescan=False):
|
||||
"""
|
||||
Generate extension-node-map.json from scanned node information
|
||||
|
||||
Args:
|
||||
node_info (dict): Repository metadata mapping
|
||||
scan_only_mode (bool): If True, exclude metadata from output
|
||||
force_rescan (bool): If True, ignore cache and force rescan all nodes
|
||||
"""
|
||||
# scan from .py file
|
||||
node_files, node_dirs = get_nodes(temp_dir)
|
||||
@ -642,13 +1218,17 @@ def gen_json(node_info, scan_only_mode=False):
|
||||
py_files = get_py_file_paths(dirname)
|
||||
metadata = {}
|
||||
|
||||
nodes = set()
|
||||
for py in py_files:
|
||||
nodes_in_file, metadata_in_file = scan_in_file(py, dirname == "ComfyUI")
|
||||
nodes.update(nodes_in_file)
|
||||
# Include metadata from .py files in both modes
|
||||
metadata.update(metadata_in_file)
|
||||
|
||||
# Use per-repo cache for node AND metadata extraction
|
||||
try:
|
||||
nodes, metadata = extract_nodes_from_repo(Path(dirname), verbose=False, force_rescan=force_rescan)
|
||||
except:
|
||||
# Fallback to file-by-file scanning if extract_nodes_from_repo fails
|
||||
nodes = set()
|
||||
for py in py_files:
|
||||
nodes_in_file, metadata_in_file = scan_in_file(py, dirname == "ComfyUI")
|
||||
nodes.update(nodes_in_file)
|
||||
metadata.update(metadata_in_file)
|
||||
|
||||
dirname = os.path.basename(dirname)
|
||||
|
||||
if 'Jovimetrix' in dirname:
|
||||
@ -810,11 +1390,14 @@ if __name__ == "__main__":
|
||||
print("\n# Generating 'extension-node-map.json'...\n")
|
||||
|
||||
# Generate extension-node-map.json
|
||||
gen_json(updated_node_info, scan_only_mode)
|
||||
force_rescan = args.force_rescan if hasattr(args, 'force_rescan') else False
|
||||
if force_rescan:
|
||||
print("⚠️ Force rescan enabled - ignoring all cached results\n")
|
||||
gen_json(updated_node_info, scan_only_mode, force_rescan)
|
||||
|
||||
print("\n✅ DONE.\n")
|
||||
|
||||
if scan_only_mode:
|
||||
print("Output: extension-node-map.json (node mappings only)")
|
||||
else:
|
||||
print("Output: extension-node-map.json (full metadata)")
|
||||
print("Output: extension-node-map.json (full metadata)")
|
||||
|
||||
Loading…
Reference in New Issue
Block a user