mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2026-02-10 21:42:37 +08:00
Merge branch 'comfyanonymous:master' into fix/secure-combo
This commit is contained in:
commit
ad95963289
3
.gitignore
vendored
3
.gitignore
vendored
@ -13,4 +13,5 @@ extra_model_paths.yaml
|
|||||||
venv/
|
venv/
|
||||||
web/extensions/*
|
web/extensions/*
|
||||||
!web/extensions/logging.js.example
|
!web/extensions/logging.js.example
|
||||||
!web/extensions/core/
|
!web/extensions/core/
|
||||||
|
startup-scripts/
|
||||||
@ -21,7 +21,7 @@ class SD15(LatentFormat):
|
|||||||
class SDXL(LatentFormat):
|
class SDXL(LatentFormat):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.scale_factor = 0.13025
|
self.scale_factor = 0.13025
|
||||||
self.latent_rgb_factors = [ #TODO: these are the factors for SD1.5, need to estimate new ones for SDXL
|
self.latent_rgb_factors = [
|
||||||
# R G B
|
# R G B
|
||||||
[ 0.3920, 0.4054, 0.4549],
|
[ 0.3920, 0.4054, 0.4549],
|
||||||
[-0.2634, -0.0196, 0.0653],
|
[-0.2634, -0.0196, 0.0653],
|
||||||
|
|||||||
@ -359,10 +359,11 @@ class ModelPatcher:
|
|||||||
|
|
||||||
if len(v) == 1:
|
if len(v) == 1:
|
||||||
w1 = v[0]
|
w1 = v[0]
|
||||||
if w1.shape != weight.shape:
|
if alpha != 0.0:
|
||||||
print("WARNING SHAPE MISMATCH {} WEIGHT NOT MERGED {} != {}".format(key, w1.shape, weight.shape))
|
if w1.shape != weight.shape:
|
||||||
else:
|
print("WARNING SHAPE MISMATCH {} WEIGHT NOT MERGED {} != {}".format(key, w1.shape, weight.shape))
|
||||||
weight += alpha * w1.type(weight.dtype).to(weight.device)
|
else:
|
||||||
|
weight += alpha * w1.type(weight.dtype).to(weight.device)
|
||||||
elif len(v) == 4: #lora/locon
|
elif len(v) == 4: #lora/locon
|
||||||
mat1 = v[0]
|
mat1 = v[0]
|
||||||
mat2 = v[1]
|
mat2 = v[1]
|
||||||
|
|||||||
@ -233,7 +233,7 @@ def expand_directory_list(directories):
|
|||||||
dirs.add(root)
|
dirs.add(root)
|
||||||
return list(dirs)
|
return list(dirs)
|
||||||
|
|
||||||
def load_embed(embedding_name, embedding_directory, embedding_size):
|
def load_embed(embedding_name, embedding_directory, embedding_size, embed_key=None):
|
||||||
if isinstance(embedding_directory, str):
|
if isinstance(embedding_directory, str):
|
||||||
embedding_directory = [embedding_directory]
|
embedding_directory = [embedding_directory]
|
||||||
|
|
||||||
@ -292,13 +292,15 @@ def load_embed(embedding_name, embedding_directory, embedding_size):
|
|||||||
continue
|
continue
|
||||||
out_list.append(t.reshape(-1, t.shape[-1]))
|
out_list.append(t.reshape(-1, t.shape[-1]))
|
||||||
embed_out = torch.cat(out_list, dim=0)
|
embed_out = torch.cat(out_list, dim=0)
|
||||||
|
elif embed_key is not None and embed_key in embed:
|
||||||
|
embed_out = embed[embed_key]
|
||||||
else:
|
else:
|
||||||
values = embed.values()
|
values = embed.values()
|
||||||
embed_out = next(iter(values))
|
embed_out = next(iter(values))
|
||||||
return embed_out
|
return embed_out
|
||||||
|
|
||||||
class SD1Tokenizer:
|
class SD1Tokenizer:
|
||||||
def __init__(self, tokenizer_path=None, max_length=77, pad_with_end=True, embedding_directory=None, embedding_size=768):
|
def __init__(self, tokenizer_path=None, max_length=77, pad_with_end=True, embedding_directory=None, embedding_size=768, embedding_key='clip_l'):
|
||||||
if tokenizer_path is None:
|
if tokenizer_path is None:
|
||||||
tokenizer_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "sd1_tokenizer")
|
tokenizer_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "sd1_tokenizer")
|
||||||
self.tokenizer = CLIPTokenizer.from_pretrained(tokenizer_path)
|
self.tokenizer = CLIPTokenizer.from_pretrained(tokenizer_path)
|
||||||
@ -315,17 +317,18 @@ class SD1Tokenizer:
|
|||||||
self.max_word_length = 8
|
self.max_word_length = 8
|
||||||
self.embedding_identifier = "embedding:"
|
self.embedding_identifier = "embedding:"
|
||||||
self.embedding_size = embedding_size
|
self.embedding_size = embedding_size
|
||||||
|
self.embedding_key = embedding_key
|
||||||
|
|
||||||
def _try_get_embedding(self, embedding_name:str):
|
def _try_get_embedding(self, embedding_name:str):
|
||||||
'''
|
'''
|
||||||
Takes a potential embedding name and tries to retrieve it.
|
Takes a potential embedding name and tries to retrieve it.
|
||||||
Returns a Tuple consisting of the embedding and any leftover string, embedding can be None.
|
Returns a Tuple consisting of the embedding and any leftover string, embedding can be None.
|
||||||
'''
|
'''
|
||||||
embed = load_embed(embedding_name, self.embedding_directory, self.embedding_size)
|
embed = load_embed(embedding_name, self.embedding_directory, self.embedding_size, self.embedding_key)
|
||||||
if embed is None:
|
if embed is None:
|
||||||
stripped = embedding_name.strip(',')
|
stripped = embedding_name.strip(',')
|
||||||
if len(stripped) < len(embedding_name):
|
if len(stripped) < len(embedding_name):
|
||||||
embed = load_embed(stripped, self.embedding_directory, self.embedding_size)
|
embed = load_embed(stripped, self.embedding_directory, self.embedding_size, self.embedding_key)
|
||||||
return (embed, embedding_name[len(stripped):])
|
return (embed, embedding_name[len(stripped):])
|
||||||
return (embed, "")
|
return (embed, "")
|
||||||
|
|
||||||
|
|||||||
@ -41,7 +41,7 @@ class SDXLClipG(sd1_clip.SD1ClipModel):
|
|||||||
|
|
||||||
class SDXLClipGTokenizer(sd1_clip.SD1Tokenizer):
|
class SDXLClipGTokenizer(sd1_clip.SD1Tokenizer):
|
||||||
def __init__(self, tokenizer_path=None, embedding_directory=None):
|
def __init__(self, tokenizer_path=None, embedding_directory=None):
|
||||||
super().__init__(tokenizer_path, pad_with_end=False, embedding_directory=embedding_directory, embedding_size=1280)
|
super().__init__(tokenizer_path, pad_with_end=False, embedding_directory=embedding_directory, embedding_size=1280, embedding_key='clip_g')
|
||||||
|
|
||||||
|
|
||||||
class SDXLTokenizer(sd1_clip.SD1Tokenizer):
|
class SDXLTokenizer(sd1_clip.SD1Tokenizer):
|
||||||
|
|||||||
35
main.py
35
main.py
@ -1,6 +1,38 @@
|
|||||||
|
import os
|
||||||
|
import importlib.util
|
||||||
|
import folder_paths
|
||||||
|
|
||||||
|
|
||||||
|
def execute_prestartup_script():
|
||||||
|
def execute_script(script_path):
|
||||||
|
if os.path.exists(script_path):
|
||||||
|
module_name = os.path.splitext(script_path)[0]
|
||||||
|
try:
|
||||||
|
spec = importlib.util.spec_from_file_location(module_name, script_path)
|
||||||
|
module = importlib.util.module_from_spec(spec)
|
||||||
|
spec.loader.exec_module(module)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Failed to execute startup-script: {script_path} / {e}")
|
||||||
|
|
||||||
|
node_paths = folder_paths.get_folder_paths("custom_nodes")
|
||||||
|
for custom_node_path in node_paths:
|
||||||
|
possible_modules = os.listdir(custom_node_path)
|
||||||
|
|
||||||
|
for possible_module in possible_modules:
|
||||||
|
module_path = os.path.join(custom_node_path, possible_module)
|
||||||
|
if os.path.isfile(module_path) or module_path.endswith(".disabled") or module_path == "__pycache__":
|
||||||
|
continue
|
||||||
|
|
||||||
|
script_path = os.path.join(module_path, "prestartup_script.py")
|
||||||
|
execute_script(script_path)
|
||||||
|
|
||||||
|
|
||||||
|
execute_prestartup_script()
|
||||||
|
|
||||||
|
|
||||||
|
# Main code
|
||||||
import asyncio
|
import asyncio
|
||||||
import itertools
|
import itertools
|
||||||
import os
|
|
||||||
import shutil
|
import shutil
|
||||||
import threading
|
import threading
|
||||||
import gc
|
import gc
|
||||||
@ -22,7 +54,6 @@ if __name__ == "__main__":
|
|||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
import execution
|
import execution
|
||||||
import folder_paths
|
|
||||||
import server
|
import server
|
||||||
from server import BinaryEventTypes
|
from server import BinaryEventTypes
|
||||||
from nodes import init_custom_nodes
|
from nodes import init_custom_nodes
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user