Further improvements to logic nodes, lazy evaluation and related

This commit is contained in:
doctorpangloss 2025-10-31 16:14:08 -07:00
parent 97f911280e
commit d9e3ba4bec
6 changed files with 22 additions and 13 deletions

4
.gitignore vendored
View File

@ -1,7 +1,9 @@
main.exp
main.lib
main.obj
tests/inference/output/
comfy/cmd/web/
tests/**/output
tests/**/temp
.DS_Store
/[Oo]utput/
/[Ii]nput/

View File

@ -53,6 +53,8 @@ logging.getLogger("torch.distributed.elastic.multiprocessing.redirects").addFilt
)
logging.getLogger("alembic.runtime.migration").setLevel(logging.WARNING)
logging.getLogger("asyncio").addFilter(lambda record: 'Using selector:' not in record.getMessage())
logging.getLogger("requests_cache").setLevel(logging.ERROR)
logging.getLogger("fsspec").setLevel(logging.WARNING)
from ..cli_args import args

View File

@ -2,7 +2,7 @@ from .execution_context import current_execution_context
from typing import Any, Generator, Sequence
def is_input_unscheduled_unexecuted(*arg_names: Sequence[str]) -> Generator[bool, Any, None]:
def is_input_pending(*arg_names: Sequence[str]) -> Generator[bool, Any, None]:
"""
returns true if the given argument in the context of an executing node is not scheduled nor executed
this will be true for inputs that are marked as lazy, and this method is more robust against nodes that return None
@ -12,18 +12,18 @@ def is_input_unscheduled_unexecuted(*arg_names: Sequence[str]) -> Generator[bool
context = current_execution_context()
if context is None or context.execution_list is None:
raise LookupError("Not executing a node")
assert context.execution_list is not None
dynprompt = context.execution_list.dynprompt
executed = context.executed
execution_list = context.execution_list
# assert context.execution_list is not None
# dynprompt = context.execution_list.dynprompt
executed = context.executed or frozenset()
# execution_list = context.execution_list
inputs = context.inputs
unscheduled_unexecuted = dynprompt.all_node_ids() - executed - set(execution_list.pendingNodes.keys())
# unscheduled_unexecuted = dynprompt.all_node_ids() - executed - set(execution_list.pendingNodes.keys())
for arg_name in arg_names:
if arg_name not in inputs:
raise ValueError(f"Input {arg_name} not found")
input_ = inputs[arg_name]
if isinstance(input_, list) or isinstance(input_, tuple) and len(input_) == 2:
node_id, *_ = input_
yield node_id in unscheduled_unexecuted
yield node_id not in executed
else:
yield False

View File

@ -1729,6 +1729,8 @@ class SaveImage:
DESCRIPTION = "Saves the input images to your ComfyUI output directory."
def save_images(self, images, filename_prefix="ComfyUI", prompt=None, extra_pnginfo=None):
if images is None:
return {"ui": {"images": []}}
filename_prefix += self.prefix_append
full_output_folder, filename, counter, subfolder, filename_prefix = folder_paths.get_save_image_path(filename_prefix, self.output_dir, images[0].shape[1], images[0].shape[0])
results = list()

View File

@ -9,6 +9,7 @@ from comfy.component_model.tensor_types import RGBImageBatch, ImageBatch, MaskBa
from comfy.nodes.package_typing import CustomNode
from comfy_api.latest import io
def resize_mask(mask, shape):
return torch.nn.functional.interpolate(mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])), size=(shape[0], shape[1]), mode="bilinear").squeeze(1)
@ -252,13 +253,16 @@ class Flatten(CustomNode):
CATEGORY = "image/postprocessing"
def convert_rgba_to_rgb(self, images: ImageBatch, background_color) -> tuple[RGBImageBatch]:
b, h, w, c = images.shape
if c == 3:
return images,
bg_color = torch.tensor(self.hex_to_rgb(background_color), dtype=torch.float32) / 255.0
rgb = images[..., :3]
alpha = images[..., 3:4]
bg = bg_color.view(1, 1, 1, 3).expand(rgb.shape)
blended = alpha * rgb + (1 - alpha) * bg
return (blended,)
return blended,
@staticmethod
def hex_to_rgb(hex_color):

View File

@ -3,7 +3,7 @@ import operator
from typing import OrderedDict, Callable, Any
from comfy.comfy_types import IO
from comfy.lazy_helpers import is_input_unscheduled_unexecuted
from comfy.lazy_helpers import is_input_pending
from comfy.node_helpers import export_custom_nodes
from comfy.nodes.package_typing import CustomNode, InputTypes
@ -81,14 +81,13 @@ class LazySwitch(CustomNode):
def check_lazy_status(self, switch, on_false=None, on_true=None):
try:
on_false_not_evaluated, on_true_not_evaluated = is_input_unscheduled_unexecuted("on_false", "on_true")
on_false_not_evaluated, on_true_not_evaluated = is_input_pending("on_false", "on_true")
except LookupError:
on_false_not_evaluated, on_true_not_evaluated = on_false is None, on_true is None
if switch and on_true_not_evaluated:
return ["on_true"]
if not switch and on_false_not_evaluated:
return ["on_false"]
return []
def execute(self, switch, on_false=None, on_true=None):
value = on_true if switch else on_false
@ -140,7 +139,7 @@ class BinaryOperation(CustomNode):
def check_lazy_status(self, lhs=None, op=None, rhs=None) -> list[str]:
try:
lhs_not_evaluated, rhs_not_evaluated = is_input_unscheduled_unexecuted("lhs", "rhs")
lhs_not_evaluated, rhs_not_evaluated = is_input_pending("lhs", "rhs")
except LookupError:
lhs_not_evaluated, rhs_not_evaluated = lhs is None, rhs is None
lhs_evaluated, rhs_evaluated = not lhs_not_evaluated, not rhs_not_evaluated