Merge branch 'comfyanonymous:master' into master

This commit is contained in:
patientx 2025-09-28 12:23:05 +03:00 committed by GitHub
commit 5e79de6a73
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 130 additions and 80 deletions

View File

@ -112,10 +112,9 @@ jobs:
ls ls
- name: Upload binaries to release - name: Upload binaries to release
uses: svenstaro/upload-release-action@v2 uses: softprops/action-gh-release@v2
with: with:
repo_token: ${{ secrets.GITHUB_TOKEN }} files: ComfyUI_windows_portable_${{ inputs.rel_name }}.7z
file: ComfyUI_windows_portable_${{ inputs.rel_name }}.7z tag_name: ${{ inputs.git_tag }}
tag: ${{ inputs.git_tag }}
overwrite: true
draft: true draft: true
overwrite_files: true

View File

@ -1,9 +1,11 @@
#Taken from: https://github.com/tfernd/HyperTile/ #Taken from: https://github.com/tfernd/HyperTile/
import math import math
from typing_extensions import override
from einops import rearrange from einops import rearrange
# Use torch rng for consistency across generations # Use torch rng for consistency across generations
from torch import randint from torch import randint
from comfy_api.latest import ComfyExtension, io
def random_divisor(value: int, min_value: int, /, max_options: int = 1) -> int: def random_divisor(value: int, min_value: int, /, max_options: int = 1) -> int:
min_value = min(min_value, value) min_value = min(min_value, value)
@ -20,25 +22,31 @@ def random_divisor(value: int, min_value: int, /, max_options: int = 1) -> int:
return ns[idx] return ns[idx]
class HyperTile: class HyperTile(io.ComfyNode):
@classmethod @classmethod
def INPUT_TYPES(s): def define_schema(cls):
return {"required": { "model": ("MODEL",), return io.Schema(
"tile_size": ("INT", {"default": 256, "min": 1, "max": 2048}), node_id="HyperTile",
"swap_size": ("INT", {"default": 2, "min": 1, "max": 128}), category="model_patches/unet",
"max_depth": ("INT", {"default": 0, "min": 0, "max": 10}), inputs=[
"scale_depth": ("BOOLEAN", {"default": False}), io.Model.Input("model"),
}} io.Int.Input("tile_size", default=256, min=1, max=2048),
RETURN_TYPES = ("MODEL",) io.Int.Input("swap_size", default=2, min=1, max=128),
FUNCTION = "patch" io.Int.Input("max_depth", default=0, min=0, max=10),
io.Boolean.Input("scale_depth", default=False),
],
outputs=[
io.Model.Output(),
],
)
CATEGORY = "model_patches/unet" @classmethod
def execute(cls, model, tile_size, swap_size, max_depth, scale_depth) -> io.NodeOutput:
def patch(self, model, tile_size, swap_size, max_depth, scale_depth):
latent_tile_size = max(32, tile_size) // 8 latent_tile_size = max(32, tile_size) // 8
self.temp = None temp = None
def hypertile_in(q, k, v, extra_options): def hypertile_in(q, k, v, extra_options):
nonlocal temp
model_chans = q.shape[-2] model_chans = q.shape[-2]
orig_shape = extra_options['original_shape'] orig_shape = extra_options['original_shape']
apply_to = [] apply_to = []
@ -58,14 +66,15 @@ class HyperTile:
if nh * nw > 1: if nh * nw > 1:
q = rearrange(q, "b (nh h nw w) c -> (b nh nw) (h w) c", h=h // nh, w=w // nw, nh=nh, nw=nw) q = rearrange(q, "b (nh h nw w) c -> (b nh nw) (h w) c", h=h // nh, w=w // nw, nh=nh, nw=nw)
self.temp = (nh, nw, h, w) temp = (nh, nw, h, w)
return q, k, v return q, k, v
return q, k, v return q, k, v
def hypertile_out(out, extra_options): def hypertile_out(out, extra_options):
if self.temp is not None: nonlocal temp
nh, nw, h, w = self.temp if temp is not None:
self.temp = None nh, nw, h, w = temp
temp = None
out = rearrange(out, "(b nh nw) hw c -> b nh nw hw c", nh=nh, nw=nw) out = rearrange(out, "(b nh nw) hw c -> b nh nw hw c", nh=nh, nw=nw)
out = rearrange(out, "b nh nw (h w) c -> b (nh h nw w) c", h=h // nh, w=w // nw) out = rearrange(out, "b nh nw (h w) c -> b (nh h nw w) c", h=h // nh, w=w // nw)
return out return out
@ -76,6 +85,14 @@ class HyperTile:
m.set_model_attn1_output_patch(hypertile_out) m.set_model_attn1_output_patch(hypertile_out)
return (m, ) return (m, )
NODE_CLASS_MAPPINGS = {
"HyperTile": HyperTile, class HyperTileExtension(ComfyExtension):
} @override
async def get_node_list(self) -> list[type[io.ComfyNode]]:
return [
HyperTile,
]
async def comfy_entrypoint() -> HyperTileExtension:
return HyperTileExtension()

View File

@ -1,20 +1,22 @@
from typing_extensions import override
import torch import torch
import comfy.model_management as mm import comfy.model_management as mm
from comfy_api.latest import ComfyExtension, io
class LotusConditioning:
class LotusConditioning(io.ComfyNode):
@classmethod @classmethod
def INPUT_TYPES(s): def define_schema(cls):
return { return io.Schema(
"required": { node_id="LotusConditioning",
}, category="conditioning/lotus",
} inputs=[],
outputs=[io.Conditioning.Output(display_name="conditioning")],
)
RETURN_TYPES = ("CONDITIONING",) @classmethod
RETURN_NAMES = ("conditioning",) def execute(cls) -> io.NodeOutput:
FUNCTION = "conditioning"
CATEGORY = "conditioning/lotus"
def conditioning(self):
device = mm.get_torch_device() device = mm.get_torch_device()
#lotus uses a frozen encoder and null conditioning, i'm just inlining the results of that operation since it doesn't change #lotus uses a frozen encoder and null conditioning, i'm just inlining the results of that operation since it doesn't change
#and getting parity with the reference implementation would otherwise require inference and 800mb of tensors #and getting parity with the reference implementation would otherwise require inference and 800mb of tensors
@ -22,8 +24,16 @@ class LotusConditioning:
cond = [[prompt_embeds, {}]] cond = [[prompt_embeds, {}]]
return (cond,) return io.NodeOutput(cond)
NODE_CLASS_MAPPINGS = {
"LotusConditioning" : LotusConditioning, class LotusExtension(ComfyExtension):
} @override
async def get_node_list(self) -> list[type[io.ComfyNode]]:
return [
LotusConditioning,
]
async def comfy_entrypoint() -> LotusExtension:
return LotusExtension()

View File

@ -1,20 +1,27 @@
from comfy.comfy_types import IO, ComfyNodeABC, InputTypeDict from typing_extensions import override
import torch import torch
from comfy_api.latest import ComfyExtension, io
class RenormCFG:
class RenormCFG(io.ComfyNode):
@classmethod @classmethod
def INPUT_TYPES(s): def define_schema(cls):
return {"required": { "model": ("MODEL",), return io.Schema(
"cfg_trunc": ("FLOAT", {"default": 100, "min": 0.0, "max": 100.0, "step": 0.01}), node_id="RenormCFG",
"renorm_cfg": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step": 0.01}), category="advanced/model",
}} inputs=[
RETURN_TYPES = ("MODEL",) io.Model.Input("model"),
FUNCTION = "patch" io.Float.Input("cfg_trunc", default=100, min=0.0, max=100.0, step=0.01),
io.Float.Input("renorm_cfg", default=1.0, min=0.0, max=100.0, step=0.01),
],
outputs=[
io.Model.Output(),
],
)
CATEGORY = "advanced/model" @classmethod
def execute(cls, model, cfg_trunc, renorm_cfg) -> io.NodeOutput:
def patch(self, model, cfg_trunc, renorm_cfg):
def renorm_cfg_func(args): def renorm_cfg_func(args):
cond_denoised = args["cond_denoised"] cond_denoised = args["cond_denoised"]
uncond_denoised = args["uncond_denoised"] uncond_denoised = args["uncond_denoised"]
@ -53,10 +60,10 @@ class RenormCFG:
m = model.clone() m = model.clone()
m.set_model_sampler_cfg_function(renorm_cfg_func) m.set_model_sampler_cfg_function(renorm_cfg_func)
return (m, ) return io.NodeOutput(m)
class CLIPTextEncodeLumina2(ComfyNodeABC): class CLIPTextEncodeLumina2(io.ComfyNode):
SYSTEM_PROMPT = { SYSTEM_PROMPT = {
"superior": "You are an assistant designed to generate superior images with the superior "\ "superior": "You are an assistant designed to generate superior images with the superior "\
"degree of image-text alignment based on textual prompts or user prompts.", "degree of image-text alignment based on textual prompts or user prompts.",
@ -69,36 +76,52 @@ class CLIPTextEncodeLumina2(ComfyNodeABC):
"Alignment: You are an assistant designed to generate high-quality images with the highest "\ "Alignment: You are an assistant designed to generate high-quality images with the highest "\
"degree of image-text alignment based on textual prompts." "degree of image-text alignment based on textual prompts."
@classmethod @classmethod
def INPUT_TYPES(s) -> InputTypeDict: def define_schema(cls):
return { return io.Schema(
"required": { node_id="CLIPTextEncodeLumina2",
"system_prompt": (list(CLIPTextEncodeLumina2.SYSTEM_PROMPT.keys()), {"tooltip": CLIPTextEncodeLumina2.SYSTEM_PROMPT_TIP}), display_name="CLIP Text Encode for Lumina2",
"user_prompt": (IO.STRING, {"multiline": True, "dynamicPrompts": True, "tooltip": "The text to be encoded."}), category="conditioning",
"clip": (IO.CLIP, {"tooltip": "The CLIP model used for encoding the text."}) description="Encodes a system prompt and a user prompt using a CLIP model into an embedding "
} "that can be used to guide the diffusion model towards generating specific images.",
} inputs=[
RETURN_TYPES = (IO.CONDITIONING,) io.Combo.Input(
OUTPUT_TOOLTIPS = ("A conditioning containing the embedded text used to guide the diffusion model.",) "system_prompt",
FUNCTION = "encode" options=list(cls.SYSTEM_PROMPT.keys()),
tooltip=cls.SYSTEM_PROMPT_TIP,
),
io.String.Input(
"user_prompt",
multiline=True,
dynamic_prompts=True,
tooltip="The text to be encoded.",
),
io.Clip.Input("clip", tooltip="The CLIP model used for encoding the text."),
],
outputs=[
io.Conditioning.Output(
tooltip="A conditioning containing the embedded text used to guide the diffusion model.",
),
],
)
CATEGORY = "conditioning" @classmethod
DESCRIPTION = "Encodes a system prompt and a user prompt using a CLIP model into an embedding that can be used to guide the diffusion model towards generating specific images." def execute(cls, clip, user_prompt, system_prompt) -> io.NodeOutput:
def encode(self, clip, user_prompt, system_prompt):
if clip is None: if clip is None:
raise RuntimeError("ERROR: clip input is invalid: None\n\nIf the clip is from a checkpoint loader node your checkpoint does not contain a valid clip or text encoder model.") raise RuntimeError("ERROR: clip input is invalid: None\n\nIf the clip is from a checkpoint loader node your checkpoint does not contain a valid clip or text encoder model.")
system_prompt = CLIPTextEncodeLumina2.SYSTEM_PROMPT[system_prompt] system_prompt = cls.SYSTEM_PROMPT[system_prompt]
prompt = f'{system_prompt} <Prompt Start> {user_prompt}' prompt = f'{system_prompt} <Prompt Start> {user_prompt}'
tokens = clip.tokenize(prompt) tokens = clip.tokenize(prompt)
return (clip.encode_from_tokens_scheduled(tokens), ) return io.NodeOutput(clip.encode_from_tokens_scheduled(tokens))
NODE_CLASS_MAPPINGS = { class Lumina2Extension(ComfyExtension):
"CLIPTextEncodeLumina2": CLIPTextEncodeLumina2, @override
"RenormCFG": RenormCFG async def get_node_list(self) -> list[type[io.ComfyNode]]:
} return [
CLIPTextEncodeLumina2,
RenormCFG,
]
NODE_DISPLAY_NAME_MAPPINGS = { async def comfy_entrypoint() -> Lumina2Extension:
"CLIPTextEncodeLumina2": "CLIP Text Encode for Lumina2", return Lumina2Extension()
}

View File

@ -127,6 +127,7 @@ if __name__ == "__main__":
if args.cuda_device is not None: if args.cuda_device is not None:
os.environ['CUDA_VISIBLE_DEVICES'] = str(args.cuda_device) os.environ['CUDA_VISIBLE_DEVICES'] = str(args.cuda_device)
os.environ['HIP_VISIBLE_DEVICES'] = str(args.cuda_device) os.environ['HIP_VISIBLE_DEVICES'] = str(args.cuda_device)
os.environ["ASCEND_RT_VISIBLE_DEVICES"] = str(args.cuda_device)
logging.info("Set cuda device to: {}".format(args.cuda_device)) logging.info("Set cuda device to: {}".format(args.cuda_device))
if args.oneapi_device_selector is not None: if args.oneapi_device_selector is not None: