mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2026-01-28 15:20:25 +08:00
Improve support for extra models
This commit is contained in:
parent
b94b90c1cc
commit
4d060f0555
12
README.md
12
README.md
@ -1,14 +1,14 @@
|
|||||||
ComfyUI Distributed
|
ComfyUI LTS
|
||||||
=======
|
=======
|
||||||
|
|
||||||
A vanilla, up-to-date fork of [ComfyUI](https://github.com/comfyanonymous/comfyui).
|
A vanilla, up-to-date fork of [ComfyUI](https://github.com/comfyanonymous/comfyui) intended for long term support (LTS) from [Hidden Switch](https://hiddenswitch.com).
|
||||||
|
|
||||||
### New Features
|
### New Features
|
||||||
|
|
||||||
- Run with `comfyui` in your command line.
|
- Run with `comfyui` in your command line.
|
||||||
- [Installable](#installing) via `pip`: `pip install git+https://github.com/hiddenswitch/ComfyUI.git`.
|
- [Installable](#installing) via `pip`: `pip install git+https://github.com/hiddenswitch/ComfyUI.git`.
|
||||||
- [Distributed](#distributed-multi-process-and-multi-gpu-comfy) with support for multiple GPUs, multiple backends and frontends, including in containers, using RabbitMQ.
|
- [Distributed](#distributed-multi-process-and-multi-gpu-comfy) with support for multiple GPUs, multiple backends and frontends, including in containers, using RabbitMQ.
|
||||||
- [Installable custom nodes](#custom-nodes) via `pip`.
|
- [Installable custom nodes](#custom-nodes) via `pip`, including LTS support for forked nodes from the community.
|
||||||
- [New configuration options](#command-line-arguments) for directories, models and metrics.
|
- [New configuration options](#command-line-arguments) for directories, models and metrics.
|
||||||
- [API](#using-comfyui-as-an-api--programmatically) support, using the vanilla ComfyUI API and new API endpoints.
|
- [API](#using-comfyui-as-an-api--programmatically) support, using the vanilla ComfyUI API and new API endpoints.
|
||||||
- [Embed](#embedded) ComfyUI as a library inside your Python application. No server or frontend needed.
|
- [Embed](#embedded) ComfyUI as a library inside your Python application. No server or frontend needed.
|
||||||
@ -184,6 +184,12 @@ Clone the repository containing the custom nodes into `custom_nodes/` in your wo
|
|||||||
|
|
||||||
### Installable Custom Nodes
|
### Installable Custom Nodes
|
||||||
|
|
||||||
|
These packages have been adapted to be installable with `pip` and download models to the correct places, using the **Known Models** functionality:
|
||||||
|
|
||||||
|
- **ControlNet Auxiliary Preprocessors**: `pip install git+https://github.com/AppMana/comfyui_controlnet_aux.git`.
|
||||||
|
- **LayerDiffuse**: `pip install git+https://github.com/hiddenswitch/ComfyUI-layerdiffuse.git`.
|
||||||
|
- **Extra Models**: `pip install git+https://github.com/AppMana/ComfyUI_ExtraModels.git`. You will need `xformers`.
|
||||||
|
|
||||||
Run `pip install git+https://github.com/owner/repository`, replacing the `git` repository with the installable custom nodes URL. This is just the GitHub URL.
|
Run `pip install git+https://github.com/owner/repository`, replacing the `git` repository with the installable custom nodes URL. This is just the GitHub URL.
|
||||||
|
|
||||||
## Authoring Custom Nodes
|
## Authoring Custom Nodes
|
||||||
|
|||||||
@ -284,7 +284,12 @@ KNOWN_DIFF_CONTROLNETS = [
|
|||||||
|
|
||||||
KNOWN_APPROX_VAES = [
|
KNOWN_APPROX_VAES = [
|
||||||
HuggingFile("madebyollin/taesd", "taesd_decoder.safetensors"),
|
HuggingFile("madebyollin/taesd", "taesd_decoder.safetensors"),
|
||||||
HuggingFile("madebyollin/taesdxl", "taesdxl_decoder.safetensors")
|
HuggingFile("madebyollin/taesdxl", "taesdxl_decoder.safetensors"),
|
||||||
|
]
|
||||||
|
|
||||||
|
KNOWN_VAES = [
|
||||||
|
HuggingFile("stabilityai/sdxl-vae", "sdxl_vae.safetensors"),
|
||||||
|
HuggingFile("stabilityai/sd-vae-ft-mse-original", "vae-ft-mse-840000-ema-pruned.safetensors"),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -26,7 +26,7 @@ from ..cli_args import args
|
|||||||
from ..cmd import folder_paths, latent_preview
|
from ..cmd import folder_paths, latent_preview
|
||||||
from ..images import open_image
|
from ..images import open_image
|
||||||
from ..model_downloader import get_filename_list_with_downloadable, get_or_download, KNOWN_CHECKPOINTS, \
|
from ..model_downloader import get_filename_list_with_downloadable, get_or_download, KNOWN_CHECKPOINTS, \
|
||||||
KNOWN_CLIP_VISION_MODELS, KNOWN_GLIGEN_MODELS, KNOWN_UNCLIP_CHECKPOINTS, KNOWN_LORAS, KNOWN_CONTROLNETS, KNOWN_DIFF_CONTROLNETS
|
KNOWN_CLIP_VISION_MODELS, KNOWN_GLIGEN_MODELS, KNOWN_UNCLIP_CHECKPOINTS, KNOWN_LORAS, KNOWN_CONTROLNETS, KNOWN_DIFF_CONTROLNETS, KNOWN_VAES, KNOWN_APPROX_VAES
|
||||||
from ..nodes.common import MAX_RESOLUTION
|
from ..nodes.common import MAX_RESOLUTION
|
||||||
from .. import controlnet
|
from .. import controlnet
|
||||||
from ..open_exr import load_exr
|
from ..open_exr import load_exr
|
||||||
@ -616,8 +616,8 @@ class LoraLoaderModelOnly(LoraLoader):
|
|||||||
class VAELoader:
|
class VAELoader:
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def vae_list():
|
def vae_list():
|
||||||
vaes = folder_paths.get_filename_list("vae")
|
vaes = get_filename_list_with_downloadable("vae", KNOWN_VAES)
|
||||||
approx_vaes = folder_paths.get_filename_list("vae_approx")
|
approx_vaes = get_filename_list_with_downloadable("vae_approx", KNOWN_APPROX_VAES)
|
||||||
sdxl_taesd_enc = False
|
sdxl_taesd_enc = False
|
||||||
sdxl_taesd_dec = False
|
sdxl_taesd_dec = False
|
||||||
sd1_taesd_enc = False
|
sd1_taesd_enc = False
|
||||||
@ -673,7 +673,7 @@ class VAELoader:
|
|||||||
if vae_name in ["taesd", "taesdxl"]:
|
if vae_name in ["taesd", "taesdxl"]:
|
||||||
sd_ = self.load_taesd(vae_name)
|
sd_ = self.load_taesd(vae_name)
|
||||||
else:
|
else:
|
||||||
vae_path = folder_paths.get_full_path("vae", vae_name)
|
vae_path = get_or_download("vae", vae_name, KNOWN_VAES)
|
||||||
sd_ = utils.load_torch_file(vae_path)
|
sd_ = utils.load_torch_file(vae_path)
|
||||||
vae = sd.VAE(sd=sd_)
|
vae = sd.VAE(sd=sd_)
|
||||||
return (vae,)
|
return (vae,)
|
||||||
|
|||||||
2
setup.py
2
setup.py
@ -41,8 +41,6 @@ The package index to torch built against CPU features.
|
|||||||
"""
|
"""
|
||||||
cpu_torch_index = ("https://download.pytorch.org/whl/cpu", "https://download.pytorch.org/whl/nightly/cpu")
|
cpu_torch_index = ("https://download.pytorch.org/whl/cpu", "https://download.pytorch.org/whl/nightly/cpu")
|
||||||
|
|
||||||
# xformers not required for new torch
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Indicates if this is installing an editable (develop) mode package
|
Indicates if this is installing an editable (develop) mode package
|
||||||
"""
|
"""
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user