Compare commits

...

8 Commits

Author SHA1 Message Date
LaVie024
a0ec2bb749
Merge 2cb22c9afd into 09725967cf 2026-01-27 07:01:10 +01:00
comfyanonymous
09725967cf ComfyUI version v0.11.0
Some checks failed
Execution Tests / test (ubuntu-latest) (push) Waiting to run
Execution Tests / test (windows-latest) (push) Waiting to run
Test server launches without errors / test (push) Waiting to run
Unit Tests / test (macos-latest) (push) Waiting to run
Unit Tests / test (ubuntu-latest) (push) Waiting to run
Unit Tests / test (windows-2022) (push) Waiting to run
Python Linting / Run Ruff (push) Waiting to run
Python Linting / Run Pylint (push) Waiting to run
Build package / Build Test (3.10) (push) Has been cancelled
Build package / Build Test (3.11) (push) Has been cancelled
Build package / Build Test (3.14) (push) Has been cancelled
Build package / Build Test (3.12) (push) Has been cancelled
Build package / Build Test (3.13) (push) Has been cancelled
Full Comfy CI Workflow Runs / test-stable (12.1, , linux, 3.10, [self-hosted Linux], stable) (push) Waiting to run
Full Comfy CI Workflow Runs / test-stable (12.1, , linux, 3.11, [self-hosted Linux], stable) (push) Waiting to run
Full Comfy CI Workflow Runs / test-stable (12.1, , linux, 3.12, [self-hosted Linux], stable) (push) Waiting to run
Full Comfy CI Workflow Runs / test-unix-nightly (12.1, , linux, 3.11, [self-hosted Linux], nightly) (push) Waiting to run
Execution Tests / test (macos-latest) (push) Waiting to run
2026-01-26 23:08:01 -05:00
ComfyUI Wiki
5f62440fbb
chore: update workflow templates to v0.8.24 (#12103) 2026-01-26 22:47:33 -05:00
ComfyUI Wiki
ac91c340f4
Update workflow templates to v0.8.23 (#12102) 2026-01-26 21:39:39 -05:00
comfyanonymous
2db3b0ff90
Update amd portable for rocm 7.2 (#12101)
* Update amd portable for rocm 7.2

* Update Python patch version in release workflow
2026-01-26 19:49:31 -05:00
rattus
6516ab335d
wan-vae: Switch off feature cache for single frame (#12090)
The code throughout is None safe to just skip the feature cache saving
step if none. Set it none in single frame use so qwen doesn't burn VRAM
on the unused cache.
2026-01-26 19:40:19 -05:00
Jukka Seppänen
ad53e78f11
Fix Noise_EmptyNoise when using nested latents (#12089) 2026-01-26 19:25:00 -05:00
LaVie024
2cb22c9afd
Added Boolean Logic Gate node 2026-01-24 21:06:04 +00:00
7 changed files with 116 additions and 10 deletions

View File

@ -20,7 +20,7 @@ jobs:
git_tag: ${{ inputs.git_tag }}
cache_tag: "cu130"
python_minor: "13"
python_patch: "9"
python_patch: "11"
rel_name: "nvidia"
rel_extra_name: ""
test_release: true
@ -65,11 +65,11 @@ jobs:
contents: "write"
packages: "write"
pull-requests: "read"
name: "Release AMD ROCm 7.1.1"
name: "Release AMD ROCm 7.2"
uses: ./.github/workflows/stable-release.yml
with:
git_tag: ${{ inputs.git_tag }}
cache_tag: "rocm711"
cache_tag: "rocm72"
python_minor: "12"
python_patch: "10"
rel_name: "amd"

View File

@ -479,10 +479,12 @@ class WanVAE(nn.Module):
def encode(self, x):
conv_idx = [0]
feat_map = [None] * count_conv3d(self.decoder)
## cache
t = x.shape[2]
iter_ = 1 + (t - 1) // 4
feat_map = None
if iter_ > 1:
feat_map = [None] * count_conv3d(self.decoder)
## 对encode输入的x按时间拆分为1、4、4、4....
for i in range(iter_):
conv_idx = [0]
@ -502,10 +504,11 @@ class WanVAE(nn.Module):
def decode(self, z):
conv_idx = [0]
feat_map = [None] * count_conv3d(self.decoder)
# z: [b,c,t,h,w]
iter_ = z.shape[2]
feat_map = None
if iter_ > 1:
feat_map = [None] * count_conv3d(self.decoder)
x = self.conv2(z)
for i in range(iter_):
conv_idx = [0]

View File

@ -701,7 +701,14 @@ class Noise_EmptyNoise:
def generate_noise(self, input_latent):
latent_image = input_latent["samples"]
return torch.zeros(latent_image.shape, dtype=latent_image.dtype, layout=latent_image.layout, device="cpu")
if latent_image.is_nested:
tensors = latent_image.unbind()
zeros = []
for t in tensors:
zeros.append(torch.zeros(t.shape, dtype=t.dtype, layout=t.layout, device="cpu"))
return comfy.nested_tensor.NestedTensor(zeros)
else:
return torch.zeros(latent_image.shape, dtype=latent_image.dtype, layout=latent_image.layout, device="cpu")
class Noise_RandomNoise:

View File

@ -3,6 +3,7 @@ from typing import TypedDict
from typing_extensions import override
from comfy_api.latest import ComfyExtension, io
from comfy_api.latest import _io
from typing import Any
# sentinel for missing inputs
MISSING = object()
@ -255,6 +256,100 @@ class InvertBooleanNode(io.ComfyNode):
def execute(cls, boolean: bool) -> io.NodeOutput:
return io.NodeOutput(not boolean)
class BooleanLogicGate(io.ComfyNode):
_MODES = ("NOT", "AND", "OR", "NAND", "NOR", "XOR", "XNOR")
@classmethod
def define_schema(cls):
A = lambda: io.Boolean.Input("a")
B = lambda: io.Boolean.Input("b")
return io.Schema(
node_id="BooleanLogicGate",
search_aliases=["not", "logic", "toggle"],
display_name="Boolean Logic Gate",
category="logic",
inputs=[
io.DynamicCombo.Input(
"mode",
options=[
io.DynamicCombo.Option("NOT", [A()]),
io.DynamicCombo.Option("AND", [A(), B()]),
io.DynamicCombo.Option("OR", [A(), B()]),
io.DynamicCombo.Option("NAND", [A(), B()]),
io.DynamicCombo.Option("NOR", [A(), B()]),
io.DynamicCombo.Option("XOR", [A(), B()]),
io.DynamicCombo.Option("XNOR", [A(), B()]),
],
),
],
outputs=[io.Boolean.Output()],
)
@staticmethod
def _deep_find_mode(x: Any) -> str | None:
ops = set(BooleanLogicGate._MODES)
if isinstance(x, str):
return x if x in ops else None
# bool is a subclass of int, so exclude bool here
if isinstance(x, int) and not isinstance(x, bool):
if 0 <= x < len(BooleanLogicGate._MODES):
return BooleanLogicGate._MODES[x]
return None
if isinstance(x, dict):
# option name may appear as a key
for k, v in x.items():
if isinstance(k, str) and k in ops:
return k
m = BooleanLogicGate._deep_find_mode(v)
if m:
return m
if isinstance(x, (list, tuple)):
for v in x:
m = BooleanLogicGate._deep_find_mode(v)
if m:
return m
return None
@staticmethod
def _deep_get(x: Any, key: str, default: Any = None) -> Any:
if isinstance(x, dict):
if key in x:
return x[key]
for v in x.values():
found = BooleanLogicGate._deep_get(v, key, default=default)
if found is not default:
return found
elif isinstance(x, (list, tuple)):
for v in x:
found = BooleanLogicGate._deep_get(v, key, default=default)
if found is not default:
return found
return default
@classmethod
def execute(cls, mode: Any) -> io.NodeOutput:
mode_str = cls._deep_find_mode(mode) or "NOT"
a = bool(cls._deep_get(mode, "a"))
b = bool(cls._deep_get(mode, "b", False)) # absent for NOT => False
_OPS = {
"NOT": lambda a, b: not a,
"AND": lambda a, b: a and b,
"OR": lambda a, b: a or b,
"NAND": lambda a, b: not (a and b),
"NOR": lambda a, b: not (a or b),
"XOR": lambda a, b: a ^ b,
"XNOR": lambda a, b: not (a ^ b),
}
return io.NodeOutput((_OPS[mode_str](a, b),))
class LogicExtension(ComfyExtension):
@override
async def get_node_list(self) -> list[type[io.ComfyNode]]:
@ -268,6 +363,7 @@ class LogicExtension(ComfyExtension):
# AutogrowPrefixTestNode,
# ComboOutputTestNode,
# InvertBooleanNode,
BooleanLogicGate,
]
async def comfy_entrypoint() -> LogicExtension:

View File

@ -1,3 +1,3 @@
# This file is automatically generated by the build process when version is
# updated in pyproject.toml.
__version__ = "0.10.0"
__version__ = "0.11.0"

View File

@ -1,6 +1,6 @@
[project]
name = "ComfyUI"
version = "0.10.0"
version = "0.11.0"
readme = "README.md"
license = { file = "LICENSE" }
requires-python = ">=3.10"

View File

@ -1,5 +1,5 @@
comfyui-frontend-package==1.37.11
comfyui-workflow-templates==0.8.15
comfyui-workflow-templates==0.8.24
comfyui-embedded-docs==0.4.0
torch
torchsde