diff --git a/.gitignore b/.gitignore index 38d2ba11b..21a85d316 100644 --- a/.gitignore +++ b/.gitignore @@ -13,4 +13,5 @@ extra_model_paths.yaml venv/ web/extensions/* !web/extensions/logging.js.example -!web/extensions/core/ \ No newline at end of file +!web/extensions/core/ +startup-scripts/ \ No newline at end of file diff --git a/comfy/latent_formats.py b/comfy/latent_formats.py index 96dd7e948..8b59cfbdc 100644 --- a/comfy/latent_formats.py +++ b/comfy/latent_formats.py @@ -21,7 +21,7 @@ class SD15(LatentFormat): class SDXL(LatentFormat): def __init__(self): self.scale_factor = 0.13025 - self.latent_rgb_factors = [ #TODO: these are the factors for SD1.5, need to estimate new ones for SDXL + self.latent_rgb_factors = [ # R G B [ 0.3920, 0.4054, 0.4549], [-0.2634, -0.0196, 0.0653], diff --git a/comfy/sd.py b/comfy/sd.py index 526dc5311..4bc9a15fa 100644 --- a/comfy/sd.py +++ b/comfy/sd.py @@ -359,10 +359,11 @@ class ModelPatcher: if len(v) == 1: w1 = v[0] - if w1.shape != weight.shape: - print("WARNING SHAPE MISMATCH {} WEIGHT NOT MERGED {} != {}".format(key, w1.shape, weight.shape)) - else: - weight += alpha * w1.type(weight.dtype).to(weight.device) + if alpha != 0.0: + if w1.shape != weight.shape: + print("WARNING SHAPE MISMATCH {} WEIGHT NOT MERGED {} != {}".format(key, w1.shape, weight.shape)) + else: + weight += alpha * w1.type(weight.dtype).to(weight.device) elif len(v) == 4: #lora/locon mat1 = v[0] mat2 = v[1] diff --git a/comfy/sd1_clip.py b/comfy/sd1_clip.py index 11f26bb8f..fbd87c569 100644 --- a/comfy/sd1_clip.py +++ b/comfy/sd1_clip.py @@ -233,7 +233,7 @@ def expand_directory_list(directories): dirs.add(root) return list(dirs) -def load_embed(embedding_name, embedding_directory, embedding_size): +def load_embed(embedding_name, embedding_directory, embedding_size, embed_key=None): if isinstance(embedding_directory, str): embedding_directory = [embedding_directory] @@ -292,13 +292,15 @@ def load_embed(embedding_name, embedding_directory, embedding_size): continue out_list.append(t.reshape(-1, t.shape[-1])) embed_out = torch.cat(out_list, dim=0) + elif embed_key is not None and embed_key in embed: + embed_out = embed[embed_key] else: values = embed.values() embed_out = next(iter(values)) return embed_out class SD1Tokenizer: - def __init__(self, tokenizer_path=None, max_length=77, pad_with_end=True, embedding_directory=None, embedding_size=768): + def __init__(self, tokenizer_path=None, max_length=77, pad_with_end=True, embedding_directory=None, embedding_size=768, embedding_key='clip_l'): if tokenizer_path is None: tokenizer_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "sd1_tokenizer") self.tokenizer = CLIPTokenizer.from_pretrained(tokenizer_path) @@ -315,17 +317,18 @@ class SD1Tokenizer: self.max_word_length = 8 self.embedding_identifier = "embedding:" self.embedding_size = embedding_size + self.embedding_key = embedding_key def _try_get_embedding(self, embedding_name:str): ''' Takes a potential embedding name and tries to retrieve it. Returns a Tuple consisting of the embedding and any leftover string, embedding can be None. ''' - embed = load_embed(embedding_name, self.embedding_directory, self.embedding_size) + embed = load_embed(embedding_name, self.embedding_directory, self.embedding_size, self.embedding_key) if embed is None: stripped = embedding_name.strip(',') if len(stripped) < len(embedding_name): - embed = load_embed(stripped, self.embedding_directory, self.embedding_size) + embed = load_embed(stripped, self.embedding_directory, self.embedding_size, self.embedding_key) return (embed, embedding_name[len(stripped):]) return (embed, "") diff --git a/comfy/sdxl_clip.py b/comfy/sdxl_clip.py index d9298b205..d0803b10b 100644 --- a/comfy/sdxl_clip.py +++ b/comfy/sdxl_clip.py @@ -41,7 +41,7 @@ class SDXLClipG(sd1_clip.SD1ClipModel): class SDXLClipGTokenizer(sd1_clip.SD1Tokenizer): def __init__(self, tokenizer_path=None, embedding_directory=None): - super().__init__(tokenizer_path, pad_with_end=False, embedding_directory=embedding_directory, embedding_size=1280) + super().__init__(tokenizer_path, pad_with_end=False, embedding_directory=embedding_directory, embedding_size=1280, embedding_key='clip_g') class SDXLTokenizer(sd1_clip.SD1Tokenizer): diff --git a/main.py b/main.py index 715649975..2da78d7a3 100644 --- a/main.py +++ b/main.py @@ -1,6 +1,38 @@ +import os +import importlib.util +import folder_paths + + +def execute_prestartup_script(): + def execute_script(script_path): + if os.path.exists(script_path): + module_name = os.path.splitext(script_path)[0] + try: + spec = importlib.util.spec_from_file_location(module_name, script_path) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + except Exception as e: + print(f"Failed to execute startup-script: {script_path} / {e}") + + node_paths = folder_paths.get_folder_paths("custom_nodes") + for custom_node_path in node_paths: + possible_modules = os.listdir(custom_node_path) + + for possible_module in possible_modules: + module_path = os.path.join(custom_node_path, possible_module) + if os.path.isfile(module_path) or module_path.endswith(".disabled") or module_path == "__pycache__": + continue + + script_path = os.path.join(module_path, "prestartup_script.py") + execute_script(script_path) + + +execute_prestartup_script() + + +# Main code import asyncio import itertools -import os import shutil import threading import gc @@ -22,7 +54,6 @@ if __name__ == "__main__": import yaml import execution -import folder_paths import server from server import BinaryEventTypes from nodes import init_custom_nodes