fix: support batch broadcasting in JoinImageWithAlpha node (#12580)

The node used min(len(image), len(alpha)) as batch size, so passing
a batch of images with a single alpha mask only returned 1 image.

Changed to max() with modulo indexing to broadcast the shorter input,
matching the behavior of other compositing nodes like ImageCompositeMasked.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
AbdulRehman 2026-04-06 16:49:49 +05:00
parent 4b1444fc7a
commit bb5c337889
No known key found for this signature in database

View File

@ -202,12 +202,12 @@ class JoinImageWithAlpha(io.ComfyNode):
@classmethod
def execute(cls, image: torch.Tensor, alpha: torch.Tensor) -> io.NodeOutput:
batch_size = min(len(image), len(alpha))
batch_size = max(len(image), len(alpha))
out_images = []
alpha = 1.0 - resize_mask(alpha, image.shape[1:])
for i in range(batch_size):
out_images.append(torch.cat((image[i][:,:,:3], alpha[i].unsqueeze(2)), dim=2))
out_images.append(torch.cat((image[i % len(image)][:,:,:3], alpha[i % len(alpha)].unsqueeze(2)), dim=2))
return io.NodeOutput(torch.stack(out_images))