mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2026-02-17 08:52:34 +08:00
Merge remote-tracking branch 'origin/master' into group-nodes
This commit is contained in:
commit
823cac376b
@ -77,9 +77,9 @@ class ModelSamplingDiscrete(torch.nn.Module):
|
|||||||
|
|
||||||
def percent_to_sigma(self, percent):
|
def percent_to_sigma(self, percent):
|
||||||
if percent <= 0.0:
|
if percent <= 0.0:
|
||||||
return torch.tensor(999999999.9)
|
return 999999999.9
|
||||||
if percent >= 1.0:
|
if percent >= 1.0:
|
||||||
return torch.tensor(0.0)
|
return 0.0
|
||||||
percent = 1.0 - percent
|
percent = 1.0 - percent
|
||||||
return self.sigma(torch.tensor(percent * 999.0))
|
return self.sigma(torch.tensor(percent * 999.0)).item()
|
||||||
|
|
||||||
|
|||||||
29
comfy_extras/nodes_images.py
Normal file
29
comfy_extras/nodes_images.py
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
import nodes
|
||||||
|
MAX_RESOLUTION = nodes.MAX_RESOLUTION
|
||||||
|
|
||||||
|
class ImageCrop:
|
||||||
|
@classmethod
|
||||||
|
def INPUT_TYPES(s):
|
||||||
|
return {"required": { "image": ("IMAGE",),
|
||||||
|
"width": ("INT", {"default": 512, "min": 1, "max": MAX_RESOLUTION, "step": 1}),
|
||||||
|
"height": ("INT", {"default": 512, "min": 1, "max": MAX_RESOLUTION, "step": 1}),
|
||||||
|
"x": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}),
|
||||||
|
"y": ("INT", {"default": 0, "min": 0, "max": MAX_RESOLUTION, "step": 1}),
|
||||||
|
}}
|
||||||
|
RETURN_TYPES = ("IMAGE",)
|
||||||
|
FUNCTION = "crop"
|
||||||
|
|
||||||
|
CATEGORY = "image/transform"
|
||||||
|
|
||||||
|
def crop(self, image, width, height, x, y):
|
||||||
|
x = min(x, image.shape[2] - 1)
|
||||||
|
y = min(y, image.shape[1] - 1)
|
||||||
|
to_x = width + x
|
||||||
|
to_y = height + y
|
||||||
|
img = image[:,y:to_y, x:to_x, :]
|
||||||
|
return (img,)
|
||||||
|
|
||||||
|
|
||||||
|
NODE_CLASS_MAPPINGS = {
|
||||||
|
"ImageCrop": ImageCrop,
|
||||||
|
}
|
||||||
@ -67,11 +67,11 @@ class ModelSamplingDiscreteLCM(torch.nn.Module):
|
|||||||
|
|
||||||
def percent_to_sigma(self, percent):
|
def percent_to_sigma(self, percent):
|
||||||
if percent <= 0.0:
|
if percent <= 0.0:
|
||||||
return torch.tensor(999999999.9)
|
return 999999999.9
|
||||||
if percent >= 1.0:
|
if percent >= 1.0:
|
||||||
return torch.tensor(0.0)
|
return 0.0
|
||||||
percent = 1.0 - percent
|
percent = 1.0 - percent
|
||||||
return self.sigma(torch.tensor(percent * 999.0))
|
return self.sigma(torch.tensor(percent * 999.0)).item()
|
||||||
|
|
||||||
|
|
||||||
def rescale_zero_terminal_snr_sigmas(sigmas):
|
def rescale_zero_terminal_snr_sigmas(sigmas):
|
||||||
|
|||||||
@ -16,8 +16,8 @@ class PatchModelAddDownscale:
|
|||||||
CATEGORY = "_for_testing"
|
CATEGORY = "_for_testing"
|
||||||
|
|
||||||
def patch(self, model, block_number, downscale_factor, start_percent, end_percent, downscale_after_skip):
|
def patch(self, model, block_number, downscale_factor, start_percent, end_percent, downscale_after_skip):
|
||||||
sigma_start = model.model.model_sampling.percent_to_sigma(start_percent).item()
|
sigma_start = model.model.model_sampling.percent_to_sigma(start_percent)
|
||||||
sigma_end = model.model.model_sampling.percent_to_sigma(end_percent).item()
|
sigma_end = model.model.model_sampling.percent_to_sigma(end_percent)
|
||||||
|
|
||||||
def input_block_patch(h, transformer_options):
|
def input_block_patch(h, transformer_options):
|
||||||
if transformer_options["block"][1] == block_number:
|
if transformer_options["block"][1] == block_number:
|
||||||
|
|||||||
1
nodes.py
1
nodes.py
@ -1800,6 +1800,7 @@ def init_custom_nodes():
|
|||||||
"nodes_hypertile.py",
|
"nodes_hypertile.py",
|
||||||
"nodes_model_advanced.py",
|
"nodes_model_advanced.py",
|
||||||
"nodes_model_downscale.py",
|
"nodes_model_downscale.py",
|
||||||
|
"nodes_images.py",
|
||||||
]
|
]
|
||||||
|
|
||||||
for node_file in extras_files:
|
for node_file in extras_files:
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user