convert nodes_latent.py to V3 schema (#10160)

This commit is contained in:
Alexander Piskun 2025-10-09 09:14:00 +03:00 committed by GitHub
parent 6732014a0a
commit cbee7d3390
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -2,6 +2,8 @@ import comfy.utils
import comfy_extras.nodes_post_processing import comfy_extras.nodes_post_processing
import torch import torch
import nodes import nodes
from typing_extensions import override
from comfy_api.latest import ComfyExtension, io
def reshape_latent_to(target_shape, latent, repeat_batch=True): def reshape_latent_to(target_shape, latent, repeat_batch=True):
@ -13,17 +15,23 @@ def reshape_latent_to(target_shape, latent, repeat_batch=True):
return latent return latent
class LatentAdd: class LatentAdd(io.ComfyNode):
@classmethod @classmethod
def INPUT_TYPES(s): def define_schema(cls):
return {"required": { "samples1": ("LATENT",), "samples2": ("LATENT",)}} return io.Schema(
node_id="LatentAdd",
category="latent/advanced",
inputs=[
io.Latent.Input("samples1"),
io.Latent.Input("samples2"),
],
outputs=[
io.Latent.Output(),
],
)
RETURN_TYPES = ("LATENT",) @classmethod
FUNCTION = "op" def execute(cls, samples1, samples2) -> io.NodeOutput:
CATEGORY = "latent/advanced"
def op(self, samples1, samples2):
samples_out = samples1.copy() samples_out = samples1.copy()
s1 = samples1["samples"] s1 = samples1["samples"]
@ -31,19 +39,25 @@ class LatentAdd:
s2 = reshape_latent_to(s1.shape, s2) s2 = reshape_latent_to(s1.shape, s2)
samples_out["samples"] = s1 + s2 samples_out["samples"] = s1 + s2
return (samples_out,) return io.NodeOutput(samples_out)
class LatentSubtract: class LatentSubtract(io.ComfyNode):
@classmethod @classmethod
def INPUT_TYPES(s): def define_schema(cls):
return {"required": { "samples1": ("LATENT",), "samples2": ("LATENT",)}} return io.Schema(
node_id="LatentSubtract",
category="latent/advanced",
inputs=[
io.Latent.Input("samples1"),
io.Latent.Input("samples2"),
],
outputs=[
io.Latent.Output(),
],
)
RETURN_TYPES = ("LATENT",) @classmethod
FUNCTION = "op" def execute(cls, samples1, samples2) -> io.NodeOutput:
CATEGORY = "latent/advanced"
def op(self, samples1, samples2):
samples_out = samples1.copy() samples_out = samples1.copy()
s1 = samples1["samples"] s1 = samples1["samples"]
@ -51,41 +65,49 @@ class LatentSubtract:
s2 = reshape_latent_to(s1.shape, s2) s2 = reshape_latent_to(s1.shape, s2)
samples_out["samples"] = s1 - s2 samples_out["samples"] = s1 - s2
return (samples_out,) return io.NodeOutput(samples_out)
class LatentMultiply: class LatentMultiply(io.ComfyNode):
@classmethod @classmethod
def INPUT_TYPES(s): def define_schema(cls):
return {"required": { "samples": ("LATENT",), return io.Schema(
"multiplier": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}), node_id="LatentMultiply",
}} category="latent/advanced",
inputs=[
io.Latent.Input("samples"),
io.Float.Input("multiplier", default=1.0, min=-10.0, max=10.0, step=0.01),
],
outputs=[
io.Latent.Output(),
],
)
RETURN_TYPES = ("LATENT",) @classmethod
FUNCTION = "op" def execute(cls, samples, multiplier) -> io.NodeOutput:
CATEGORY = "latent/advanced"
def op(self, samples, multiplier):
samples_out = samples.copy() samples_out = samples.copy()
s1 = samples["samples"] s1 = samples["samples"]
samples_out["samples"] = s1 * multiplier samples_out["samples"] = s1 * multiplier
return (samples_out,) return io.NodeOutput(samples_out)
class LatentInterpolate: class LatentInterpolate(io.ComfyNode):
@classmethod @classmethod
def INPUT_TYPES(s): def define_schema(cls):
return {"required": { "samples1": ("LATENT",), return io.Schema(
"samples2": ("LATENT",), node_id="LatentInterpolate",
"ratio": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}), category="latent/advanced",
}} inputs=[
io.Latent.Input("samples1"),
io.Latent.Input("samples2"),
io.Float.Input("ratio", default=1.0, min=0.0, max=1.0, step=0.01),
],
outputs=[
io.Latent.Output(),
],
)
RETURN_TYPES = ("LATENT",) @classmethod
FUNCTION = "op" def execute(cls, samples1, samples2, ratio) -> io.NodeOutput:
CATEGORY = "latent/advanced"
def op(self, samples1, samples2, ratio):
samples_out = samples1.copy() samples_out = samples1.copy()
s1 = samples1["samples"] s1 = samples1["samples"]
@ -104,19 +126,26 @@ class LatentInterpolate:
st = torch.nan_to_num(t / mt) st = torch.nan_to_num(t / mt)
samples_out["samples"] = st * (m1 * ratio + m2 * (1.0 - ratio)) samples_out["samples"] = st * (m1 * ratio + m2 * (1.0 - ratio))
return (samples_out,) return io.NodeOutput(samples_out)
class LatentConcat: class LatentConcat(io.ComfyNode):
@classmethod @classmethod
def INPUT_TYPES(s): def define_schema(cls):
return {"required": { "samples1": ("LATENT",), "samples2": ("LATENT",), "dim": (["x", "-x", "y", "-y", "t", "-t"], )}} return io.Schema(
node_id="LatentConcat",
category="latent/advanced",
inputs=[
io.Latent.Input("samples1"),
io.Latent.Input("samples2"),
io.Combo.Input("dim", options=["x", "-x", "y", "-y", "t", "-t"]),
],
outputs=[
io.Latent.Output(),
],
)
RETURN_TYPES = ("LATENT",) @classmethod
FUNCTION = "op" def execute(cls, samples1, samples2, dim) -> io.NodeOutput:
CATEGORY = "latent/advanced"
def op(self, samples1, samples2, dim):
samples_out = samples1.copy() samples_out = samples1.copy()
s1 = samples1["samples"] s1 = samples1["samples"]
@ -136,22 +165,27 @@ class LatentConcat:
dim = -3 dim = -3
samples_out["samples"] = torch.cat(c, dim=dim) samples_out["samples"] = torch.cat(c, dim=dim)
return (samples_out,) return io.NodeOutput(samples_out)
class LatentCut: class LatentCut(io.ComfyNode):
@classmethod @classmethod
def INPUT_TYPES(s): def define_schema(cls):
return {"required": {"samples": ("LATENT",), return io.Schema(
"dim": (["x", "y", "t"], ), node_id="LatentCut",
"index": ("INT", {"default": 0, "min": -nodes.MAX_RESOLUTION, "max": nodes.MAX_RESOLUTION, "step": 1}), category="latent/advanced",
"amount": ("INT", {"default": 1, "min": 1, "max": nodes.MAX_RESOLUTION, "step": 1})}} inputs=[
io.Latent.Input("samples"),
io.Combo.Input("dim", options=["x", "y", "t"]),
io.Int.Input("index", default=0, min=-nodes.MAX_RESOLUTION, max=nodes.MAX_RESOLUTION, step=1),
io.Int.Input("amount", default=1, min=1, max=nodes.MAX_RESOLUTION, step=1),
],
outputs=[
io.Latent.Output(),
],
)
RETURN_TYPES = ("LATENT",) @classmethod
FUNCTION = "op" def execute(cls, samples, dim, index, amount) -> io.NodeOutput:
CATEGORY = "latent/advanced"
def op(self, samples, dim, index, amount):
samples_out = samples.copy() samples_out = samples.copy()
s1 = samples["samples"] s1 = samples["samples"]
@ -171,19 +205,25 @@ class LatentCut:
amount = min(-index, amount) amount = min(-index, amount)
samples_out["samples"] = torch.narrow(s1, dim, index, amount) samples_out["samples"] = torch.narrow(s1, dim, index, amount)
return (samples_out,) return io.NodeOutput(samples_out)
class LatentBatch: class LatentBatch(io.ComfyNode):
@classmethod @classmethod
def INPUT_TYPES(s): def define_schema(cls):
return {"required": { "samples1": ("LATENT",), "samples2": ("LATENT",)}} return io.Schema(
node_id="LatentBatch",
category="latent/batch",
inputs=[
io.Latent.Input("samples1"),
io.Latent.Input("samples2"),
],
outputs=[
io.Latent.Output(),
],
)
RETURN_TYPES = ("LATENT",) @classmethod
FUNCTION = "batch" def execute(cls, samples1, samples2) -> io.NodeOutput:
CATEGORY = "latent/batch"
def batch(self, samples1, samples2):
samples_out = samples1.copy() samples_out = samples1.copy()
s1 = samples1["samples"] s1 = samples1["samples"]
s2 = samples2["samples"] s2 = samples2["samples"]
@ -192,20 +232,25 @@ class LatentBatch:
s = torch.cat((s1, s2), dim=0) s = torch.cat((s1, s2), dim=0)
samples_out["samples"] = s samples_out["samples"] = s
samples_out["batch_index"] = samples1.get("batch_index", [x for x in range(0, s1.shape[0])]) + samples2.get("batch_index", [x for x in range(0, s2.shape[0])]) samples_out["batch_index"] = samples1.get("batch_index", [x for x in range(0, s1.shape[0])]) + samples2.get("batch_index", [x for x in range(0, s2.shape[0])])
return (samples_out,) return io.NodeOutput(samples_out)
class LatentBatchSeedBehavior: class LatentBatchSeedBehavior(io.ComfyNode):
@classmethod @classmethod
def INPUT_TYPES(s): def define_schema(cls):
return {"required": { "samples": ("LATENT",), return io.Schema(
"seed_behavior": (["random", "fixed"],{"default": "fixed"}),}} node_id="LatentBatchSeedBehavior",
category="latent/advanced",
inputs=[
io.Latent.Input("samples"),
io.Combo.Input("seed_behavior", options=["random", "fixed"], default="fixed"),
],
outputs=[
io.Latent.Output(),
],
)
RETURN_TYPES = ("LATENT",) @classmethod
FUNCTION = "op" def execute(cls, samples, seed_behavior) -> io.NodeOutput:
CATEGORY = "latent/advanced"
def op(self, samples, seed_behavior):
samples_out = samples.copy() samples_out = samples.copy()
latent = samples["samples"] latent = samples["samples"]
if seed_behavior == "random": if seed_behavior == "random":
@ -215,41 +260,50 @@ class LatentBatchSeedBehavior:
batch_number = samples_out.get("batch_index", [0])[0] batch_number = samples_out.get("batch_index", [0])[0]
samples_out["batch_index"] = [batch_number] * latent.shape[0] samples_out["batch_index"] = [batch_number] * latent.shape[0]
return (samples_out,) return io.NodeOutput(samples_out)
class LatentApplyOperation: class LatentApplyOperation(io.ComfyNode):
@classmethod @classmethod
def INPUT_TYPES(s): def define_schema(cls):
return {"required": { "samples": ("LATENT",), return io.Schema(
"operation": ("LATENT_OPERATION",), node_id="LatentApplyOperation",
}} category="latent/advanced/operations",
is_experimental=True,
inputs=[
io.Latent.Input("samples"),
io.LatentOperation.Input("operation"),
],
outputs=[
io.Latent.Output(),
],
)
RETURN_TYPES = ("LATENT",) @classmethod
FUNCTION = "op" def execute(cls, samples, operation) -> io.NodeOutput:
CATEGORY = "latent/advanced/operations"
EXPERIMENTAL = True
def op(self, samples, operation):
samples_out = samples.copy() samples_out = samples.copy()
s1 = samples["samples"] s1 = samples["samples"]
samples_out["samples"] = operation(latent=s1) samples_out["samples"] = operation(latent=s1)
return (samples_out,) return io.NodeOutput(samples_out)
class LatentApplyOperationCFG: class LatentApplyOperationCFG(io.ComfyNode):
@classmethod @classmethod
def INPUT_TYPES(s): def define_schema(cls):
return {"required": { "model": ("MODEL",), return io.Schema(
"operation": ("LATENT_OPERATION",), node_id="LatentApplyOperationCFG",
}} category="latent/advanced/operations",
RETURN_TYPES = ("MODEL",) is_experimental=True,
FUNCTION = "patch" inputs=[
io.Model.Input("model"),
io.LatentOperation.Input("operation"),
],
outputs=[
io.Model.Output(),
],
)
CATEGORY = "latent/advanced/operations" @classmethod
EXPERIMENTAL = True def execute(cls, model, operation) -> io.NodeOutput:
def patch(self, model, operation):
m = model.clone() m = model.clone()
def pre_cfg_function(args): def pre_cfg_function(args):
@ -261,21 +315,25 @@ class LatentApplyOperationCFG:
return conds_out return conds_out
m.set_model_sampler_pre_cfg_function(pre_cfg_function) m.set_model_sampler_pre_cfg_function(pre_cfg_function)
return (m, ) return io.NodeOutput(m)
class LatentOperationTonemapReinhard: class LatentOperationTonemapReinhard(io.ComfyNode):
@classmethod @classmethod
def INPUT_TYPES(s): def define_schema(cls):
return {"required": { "multiplier": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 100.0, "step": 0.01}), return io.Schema(
}} node_id="LatentOperationTonemapReinhard",
category="latent/advanced/operations",
is_experimental=True,
inputs=[
io.Float.Input("multiplier", default=1.0, min=0.0, max=100.0, step=0.01),
],
outputs=[
io.LatentOperation.Output(),
],
)
RETURN_TYPES = ("LATENT_OPERATION",) @classmethod
FUNCTION = "op" def execute(cls, multiplier) -> io.NodeOutput:
CATEGORY = "latent/advanced/operations"
EXPERIMENTAL = True
def op(self, multiplier):
def tonemap_reinhard(latent, **kwargs): def tonemap_reinhard(latent, **kwargs):
latent_vector_magnitude = (torch.linalg.vector_norm(latent, dim=(1)) + 0.0000000001)[:,None] latent_vector_magnitude = (torch.linalg.vector_norm(latent, dim=(1)) + 0.0000000001)[:,None]
normalized_latent = latent / latent_vector_magnitude normalized_latent = latent / latent_vector_magnitude
@ -291,39 +349,27 @@ class LatentOperationTonemapReinhard:
new_magnitude *= top new_magnitude *= top
return normalized_latent * new_magnitude return normalized_latent * new_magnitude
return (tonemap_reinhard,) return io.NodeOutput(tonemap_reinhard)
class LatentOperationSharpen: class LatentOperationSharpen(io.ComfyNode):
@classmethod @classmethod
def INPUT_TYPES(s): def define_schema(cls):
return {"required": { return io.Schema(
"sharpen_radius": ("INT", { node_id="LatentOperationSharpen",
"default": 9, category="latent/advanced/operations",
"min": 1, is_experimental=True,
"max": 31, inputs=[
"step": 1 io.Int.Input("sharpen_radius", default=9, min=1, max=31, step=1),
}), io.Float.Input("sigma", default=1.0, min=0.1, max=10.0, step=0.1),
"sigma": ("FLOAT", { io.Float.Input("alpha", default=0.1, min=0.0, max=5.0, step=0.01),
"default": 1.0, ],
"min": 0.1, outputs=[
"max": 10.0, io.LatentOperation.Output(),
"step": 0.1 ],
}), )
"alpha": ("FLOAT", {
"default": 0.1,
"min": 0.0,
"max": 5.0,
"step": 0.01
}),
}}
RETURN_TYPES = ("LATENT_OPERATION",) @classmethod
FUNCTION = "op" def execute(cls, sharpen_radius, sigma, alpha) -> io.NodeOutput:
CATEGORY = "latent/advanced/operations"
EXPERIMENTAL = True
def op(self, sharpen_radius, sigma, alpha):
def sharpen(latent, **kwargs): def sharpen(latent, **kwargs):
luminance = (torch.linalg.vector_norm(latent, dim=(1)) + 1e-6)[:,None] luminance = (torch.linalg.vector_norm(latent, dim=(1)) + 1e-6)[:,None]
normalized_latent = latent / luminance normalized_latent = latent / luminance
@ -340,19 +386,27 @@ class LatentOperationSharpen:
sharpened = torch.nn.functional.conv2d(padded_image, kernel.repeat(channels, 1, 1).unsqueeze(1), padding=kernel_size // 2, groups=channels)[:,:,sharpen_radius:-sharpen_radius, sharpen_radius:-sharpen_radius] sharpened = torch.nn.functional.conv2d(padded_image, kernel.repeat(channels, 1, 1).unsqueeze(1), padding=kernel_size // 2, groups=channels)[:,:,sharpen_radius:-sharpen_radius, sharpen_radius:-sharpen_radius]
return luminance * sharpened return luminance * sharpened
return (sharpen,) return io.NodeOutput(sharpen)
NODE_CLASS_MAPPINGS = {
"LatentAdd": LatentAdd, class LatentExtension(ComfyExtension):
"LatentSubtract": LatentSubtract, @override
"LatentMultiply": LatentMultiply, async def get_node_list(self) -> list[type[io.ComfyNode]]:
"LatentInterpolate": LatentInterpolate, return [
"LatentConcat": LatentConcat, LatentAdd,
"LatentCut": LatentCut, LatentSubtract,
"LatentBatch": LatentBatch, LatentMultiply,
"LatentBatchSeedBehavior": LatentBatchSeedBehavior, LatentInterpolate,
"LatentApplyOperation": LatentApplyOperation, LatentConcat,
"LatentApplyOperationCFG": LatentApplyOperationCFG, LatentCut,
"LatentOperationTonemapReinhard": LatentOperationTonemapReinhard, LatentBatch,
"LatentOperationSharpen": LatentOperationSharpen, LatentBatchSeedBehavior,
} LatentApplyOperation,
LatentApplyOperationCFG,
LatentOperationTonemapReinhard,
LatentOperationSharpen,
]
async def comfy_entrypoint() -> LatentExtension:
return LatentExtension()