Improve support for extra models

This commit is contained in:
doctorpangloss 2024-05-01 16:58:29 -07:00
parent b94b90c1cc
commit 4d060f0555
4 changed files with 19 additions and 10 deletions

View File

@ -1,14 +1,14 @@
ComfyUI Distributed
ComfyUI LTS
=======
A vanilla, up-to-date fork of [ComfyUI](https://github.com/comfyanonymous/comfyui).
A vanilla, up-to-date fork of [ComfyUI](https://github.com/comfyanonymous/comfyui) intended for long term support (LTS) from [Hidden Switch](https://hiddenswitch.com).
### New Features
- Run with `comfyui` in your command line.
- [Installable](#installing) via `pip`: `pip install git+https://github.com/hiddenswitch/ComfyUI.git`.
- [Distributed](#distributed-multi-process-and-multi-gpu-comfy) with support for multiple GPUs, multiple backends and frontends, including in containers, using RabbitMQ.
- [Installable custom nodes](#custom-nodes) via `pip`.
- [Installable custom nodes](#custom-nodes) via `pip`, including LTS support for forked nodes from the community.
- [New configuration options](#command-line-arguments) for directories, models and metrics.
- [API](#using-comfyui-as-an-api--programmatically) support, using the vanilla ComfyUI API and new API endpoints.
- [Embed](#embedded) ComfyUI as a library inside your Python application. No server or frontend needed.
@ -184,6 +184,12 @@ Clone the repository containing the custom nodes into `custom_nodes/` in your wo
### Installable Custom Nodes
These packages have been adapted to be installable with `pip` and download models to the correct places, using the **Known Models** functionality:
- **ControlNet Auxiliary Preprocessors**: `pip install git+https://github.com/AppMana/comfyui_controlnet_aux.git`.
- **LayerDiffuse**: `pip install git+https://github.com/hiddenswitch/ComfyUI-layerdiffuse.git`.
- **Extra Models**: `pip install git+https://github.com/AppMana/ComfyUI_ExtraModels.git`. You will need `xformers`.
Run `pip install git+https://github.com/owner/repository`, replacing the `git` repository with the installable custom nodes URL. This is just the GitHub URL.
## Authoring Custom Nodes

View File

@ -284,7 +284,12 @@ KNOWN_DIFF_CONTROLNETS = [
KNOWN_APPROX_VAES = [
HuggingFile("madebyollin/taesd", "taesd_decoder.safetensors"),
HuggingFile("madebyollin/taesdxl", "taesdxl_decoder.safetensors")
HuggingFile("madebyollin/taesdxl", "taesdxl_decoder.safetensors"),
]
KNOWN_VAES = [
HuggingFile("stabilityai/sdxl-vae", "sdxl_vae.safetensors"),
HuggingFile("stabilityai/sd-vae-ft-mse-original", "vae-ft-mse-840000-ema-pruned.safetensors"),
]

View File

@ -26,7 +26,7 @@ from ..cli_args import args
from ..cmd import folder_paths, latent_preview
from ..images import open_image
from ..model_downloader import get_filename_list_with_downloadable, get_or_download, KNOWN_CHECKPOINTS, \
KNOWN_CLIP_VISION_MODELS, KNOWN_GLIGEN_MODELS, KNOWN_UNCLIP_CHECKPOINTS, KNOWN_LORAS, KNOWN_CONTROLNETS, KNOWN_DIFF_CONTROLNETS
KNOWN_CLIP_VISION_MODELS, KNOWN_GLIGEN_MODELS, KNOWN_UNCLIP_CHECKPOINTS, KNOWN_LORAS, KNOWN_CONTROLNETS, KNOWN_DIFF_CONTROLNETS, KNOWN_VAES, KNOWN_APPROX_VAES
from ..nodes.common import MAX_RESOLUTION
from .. import controlnet
from ..open_exr import load_exr
@ -616,8 +616,8 @@ class LoraLoaderModelOnly(LoraLoader):
class VAELoader:
@staticmethod
def vae_list():
vaes = folder_paths.get_filename_list("vae")
approx_vaes = folder_paths.get_filename_list("vae_approx")
vaes = get_filename_list_with_downloadable("vae", KNOWN_VAES)
approx_vaes = get_filename_list_with_downloadable("vae_approx", KNOWN_APPROX_VAES)
sdxl_taesd_enc = False
sdxl_taesd_dec = False
sd1_taesd_enc = False
@ -673,7 +673,7 @@ class VAELoader:
if vae_name in ["taesd", "taesdxl"]:
sd_ = self.load_taesd(vae_name)
else:
vae_path = folder_paths.get_full_path("vae", vae_name)
vae_path = get_or_download("vae", vae_name, KNOWN_VAES)
sd_ = utils.load_torch_file(vae_path)
vae = sd.VAE(sd=sd_)
return (vae,)

View File

@ -41,8 +41,6 @@ The package index to torch built against CPU features.
"""
cpu_torch_index = ("https://download.pytorch.org/whl/cpu", "https://download.pytorch.org/whl/nightly/cpu")
# xformers not required for new torch
"""
Indicates if this is installing an editable (develop) mode package
"""