mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2026-01-09 13:50:49 +08:00
Further improvements to logic nodes, lazy evaluation and related
This commit is contained in:
parent
97f911280e
commit
d9e3ba4bec
4
.gitignore
vendored
4
.gitignore
vendored
@ -1,7 +1,9 @@
|
|||||||
main.exp
|
main.exp
|
||||||
main.lib
|
main.lib
|
||||||
main.obj
|
main.obj
|
||||||
tests/inference/output/
|
comfy/cmd/web/
|
||||||
|
tests/**/output
|
||||||
|
tests/**/temp
|
||||||
.DS_Store
|
.DS_Store
|
||||||
/[Oo]utput/
|
/[Oo]utput/
|
||||||
/[Ii]nput/
|
/[Ii]nput/
|
||||||
|
|||||||
@ -53,6 +53,8 @@ logging.getLogger("torch.distributed.elastic.multiprocessing.redirects").addFilt
|
|||||||
)
|
)
|
||||||
logging.getLogger("alembic.runtime.migration").setLevel(logging.WARNING)
|
logging.getLogger("alembic.runtime.migration").setLevel(logging.WARNING)
|
||||||
logging.getLogger("asyncio").addFilter(lambda record: 'Using selector:' not in record.getMessage())
|
logging.getLogger("asyncio").addFilter(lambda record: 'Using selector:' not in record.getMessage())
|
||||||
|
logging.getLogger("requests_cache").setLevel(logging.ERROR)
|
||||||
|
logging.getLogger("fsspec").setLevel(logging.WARNING)
|
||||||
|
|
||||||
from ..cli_args import args
|
from ..cli_args import args
|
||||||
|
|
||||||
|
|||||||
@ -2,7 +2,7 @@ from .execution_context import current_execution_context
|
|||||||
from typing import Any, Generator, Sequence
|
from typing import Any, Generator, Sequence
|
||||||
|
|
||||||
|
|
||||||
def is_input_unscheduled_unexecuted(*arg_names: Sequence[str]) -> Generator[bool, Any, None]:
|
def is_input_pending(*arg_names: Sequence[str]) -> Generator[bool, Any, None]:
|
||||||
"""
|
"""
|
||||||
returns true if the given argument in the context of an executing node is not scheduled nor executed
|
returns true if the given argument in the context of an executing node is not scheduled nor executed
|
||||||
this will be true for inputs that are marked as lazy, and this method is more robust against nodes that return None
|
this will be true for inputs that are marked as lazy, and this method is more robust against nodes that return None
|
||||||
@ -12,18 +12,18 @@ def is_input_unscheduled_unexecuted(*arg_names: Sequence[str]) -> Generator[bool
|
|||||||
context = current_execution_context()
|
context = current_execution_context()
|
||||||
if context is None or context.execution_list is None:
|
if context is None or context.execution_list is None:
|
||||||
raise LookupError("Not executing a node")
|
raise LookupError("Not executing a node")
|
||||||
assert context.execution_list is not None
|
# assert context.execution_list is not None
|
||||||
dynprompt = context.execution_list.dynprompt
|
# dynprompt = context.execution_list.dynprompt
|
||||||
executed = context.executed
|
executed = context.executed or frozenset()
|
||||||
execution_list = context.execution_list
|
# execution_list = context.execution_list
|
||||||
inputs = context.inputs
|
inputs = context.inputs
|
||||||
unscheduled_unexecuted = dynprompt.all_node_ids() - executed - set(execution_list.pendingNodes.keys())
|
# unscheduled_unexecuted = dynprompt.all_node_ids() - executed - set(execution_list.pendingNodes.keys())
|
||||||
for arg_name in arg_names:
|
for arg_name in arg_names:
|
||||||
if arg_name not in inputs:
|
if arg_name not in inputs:
|
||||||
raise ValueError(f"Input {arg_name} not found")
|
raise ValueError(f"Input {arg_name} not found")
|
||||||
input_ = inputs[arg_name]
|
input_ = inputs[arg_name]
|
||||||
if isinstance(input_, list) or isinstance(input_, tuple) and len(input_) == 2:
|
if isinstance(input_, list) or isinstance(input_, tuple) and len(input_) == 2:
|
||||||
node_id, *_ = input_
|
node_id, *_ = input_
|
||||||
yield node_id in unscheduled_unexecuted
|
yield node_id not in executed
|
||||||
else:
|
else:
|
||||||
yield False
|
yield False
|
||||||
|
|||||||
@ -1729,6 +1729,8 @@ class SaveImage:
|
|||||||
DESCRIPTION = "Saves the input images to your ComfyUI output directory."
|
DESCRIPTION = "Saves the input images to your ComfyUI output directory."
|
||||||
|
|
||||||
def save_images(self, images, filename_prefix="ComfyUI", prompt=None, extra_pnginfo=None):
|
def save_images(self, images, filename_prefix="ComfyUI", prompt=None, extra_pnginfo=None):
|
||||||
|
if images is None:
|
||||||
|
return {"ui": {"images": []}}
|
||||||
filename_prefix += self.prefix_append
|
filename_prefix += self.prefix_append
|
||||||
full_output_folder, filename, counter, subfolder, filename_prefix = folder_paths.get_save_image_path(filename_prefix, self.output_dir, images[0].shape[1], images[0].shape[0])
|
full_output_folder, filename, counter, subfolder, filename_prefix = folder_paths.get_save_image_path(filename_prefix, self.output_dir, images[0].shape[1], images[0].shape[0])
|
||||||
results = list()
|
results = list()
|
||||||
|
|||||||
@ -9,6 +9,7 @@ from comfy.component_model.tensor_types import RGBImageBatch, ImageBatch, MaskBa
|
|||||||
from comfy.nodes.package_typing import CustomNode
|
from comfy.nodes.package_typing import CustomNode
|
||||||
from comfy_api.latest import io
|
from comfy_api.latest import io
|
||||||
|
|
||||||
|
|
||||||
def resize_mask(mask, shape):
|
def resize_mask(mask, shape):
|
||||||
return torch.nn.functional.interpolate(mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])), size=(shape[0], shape[1]), mode="bilinear").squeeze(1)
|
return torch.nn.functional.interpolate(mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])), size=(shape[0], shape[1]), mode="bilinear").squeeze(1)
|
||||||
|
|
||||||
@ -252,13 +253,16 @@ class Flatten(CustomNode):
|
|||||||
CATEGORY = "image/postprocessing"
|
CATEGORY = "image/postprocessing"
|
||||||
|
|
||||||
def convert_rgba_to_rgb(self, images: ImageBatch, background_color) -> tuple[RGBImageBatch]:
|
def convert_rgba_to_rgb(self, images: ImageBatch, background_color) -> tuple[RGBImageBatch]:
|
||||||
|
b, h, w, c = images.shape
|
||||||
|
if c == 3:
|
||||||
|
return images,
|
||||||
bg_color = torch.tensor(self.hex_to_rgb(background_color), dtype=torch.float32) / 255.0
|
bg_color = torch.tensor(self.hex_to_rgb(background_color), dtype=torch.float32) / 255.0
|
||||||
rgb = images[..., :3]
|
rgb = images[..., :3]
|
||||||
alpha = images[..., 3:4]
|
alpha = images[..., 3:4]
|
||||||
bg = bg_color.view(1, 1, 1, 3).expand(rgb.shape)
|
bg = bg_color.view(1, 1, 1, 3).expand(rgb.shape)
|
||||||
blended = alpha * rgb + (1 - alpha) * bg
|
blended = alpha * rgb + (1 - alpha) * bg
|
||||||
|
|
||||||
return (blended,)
|
return blended,
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def hex_to_rgb(hex_color):
|
def hex_to_rgb(hex_color):
|
||||||
|
|||||||
@ -3,7 +3,7 @@ import operator
|
|||||||
from typing import OrderedDict, Callable, Any
|
from typing import OrderedDict, Callable, Any
|
||||||
|
|
||||||
from comfy.comfy_types import IO
|
from comfy.comfy_types import IO
|
||||||
from comfy.lazy_helpers import is_input_unscheduled_unexecuted
|
from comfy.lazy_helpers import is_input_pending
|
||||||
from comfy.node_helpers import export_custom_nodes
|
from comfy.node_helpers import export_custom_nodes
|
||||||
from comfy.nodes.package_typing import CustomNode, InputTypes
|
from comfy.nodes.package_typing import CustomNode, InputTypes
|
||||||
|
|
||||||
@ -81,14 +81,13 @@ class LazySwitch(CustomNode):
|
|||||||
|
|
||||||
def check_lazy_status(self, switch, on_false=None, on_true=None):
|
def check_lazy_status(self, switch, on_false=None, on_true=None):
|
||||||
try:
|
try:
|
||||||
on_false_not_evaluated, on_true_not_evaluated = is_input_unscheduled_unexecuted("on_false", "on_true")
|
on_false_not_evaluated, on_true_not_evaluated = is_input_pending("on_false", "on_true")
|
||||||
except LookupError:
|
except LookupError:
|
||||||
on_false_not_evaluated, on_true_not_evaluated = on_false is None, on_true is None
|
on_false_not_evaluated, on_true_not_evaluated = on_false is None, on_true is None
|
||||||
if switch and on_true_not_evaluated:
|
if switch and on_true_not_evaluated:
|
||||||
return ["on_true"]
|
return ["on_true"]
|
||||||
if not switch and on_false_not_evaluated:
|
if not switch and on_false_not_evaluated:
|
||||||
return ["on_false"]
|
return ["on_false"]
|
||||||
return []
|
|
||||||
|
|
||||||
def execute(self, switch, on_false=None, on_true=None):
|
def execute(self, switch, on_false=None, on_true=None):
|
||||||
value = on_true if switch else on_false
|
value = on_true if switch else on_false
|
||||||
@ -140,7 +139,7 @@ class BinaryOperation(CustomNode):
|
|||||||
|
|
||||||
def check_lazy_status(self, lhs=None, op=None, rhs=None) -> list[str]:
|
def check_lazy_status(self, lhs=None, op=None, rhs=None) -> list[str]:
|
||||||
try:
|
try:
|
||||||
lhs_not_evaluated, rhs_not_evaluated = is_input_unscheduled_unexecuted("lhs", "rhs")
|
lhs_not_evaluated, rhs_not_evaluated = is_input_pending("lhs", "rhs")
|
||||||
except LookupError:
|
except LookupError:
|
||||||
lhs_not_evaluated, rhs_not_evaluated = lhs is None, rhs is None
|
lhs_not_evaluated, rhs_not_evaluated = lhs is None, rhs is None
|
||||||
lhs_evaluated, rhs_evaluated = not lhs_not_evaluated, not rhs_not_evaluated
|
lhs_evaluated, rhs_evaluated = not lhs_not_evaluated, not rhs_not_evaluated
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user