mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2026-01-21 03:40:16 +08:00
Merge branch 'comfyanonymous:master' into feature/preview-latent
This commit is contained in:
commit
25b939d856
@ -30,6 +30,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
persist-credentials: false
|
||||||
- shell: bash
|
- shell: bash
|
||||||
run: |
|
run: |
|
||||||
cd ..
|
cd ..
|
||||||
|
|||||||
@ -17,6 +17,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
persist-credentials: false
|
||||||
- uses: actions/setup-python@v4
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: '3.11.3'
|
python-version: '3.11.3'
|
||||||
|
|||||||
@ -57,35 +57,42 @@ def bislerp(samples, width, height):
|
|||||||
shape[2] = height
|
shape[2] = height
|
||||||
out1 = torch.empty(shape, dtype=samples.dtype, layout=samples.layout, device=samples.device)
|
out1 = torch.empty(shape, dtype=samples.dtype, layout=samples.layout, device=samples.device)
|
||||||
|
|
||||||
def algorithm(in1, w1, in2, w2):
|
def algorithm(in1, in2, t):
|
||||||
dims = in1.shape
|
dims = in1.shape
|
||||||
val = w2
|
val = t
|
||||||
|
|
||||||
#flatten to batches
|
#flatten to batches
|
||||||
low = in1.reshape(dims[0], -1)
|
low = in1.reshape(dims[0], -1)
|
||||||
high = in2.reshape(dims[0], -1)
|
high = in2.reshape(dims[0], -1)
|
||||||
|
|
||||||
low_norm = low/torch.norm(low, dim=1, keepdim=True)
|
low_weight = torch.norm(low, dim=1, keepdim=True)
|
||||||
high_norm = high/torch.norm(high, dim=1, keepdim=True)
|
low_weight[low_weight == 0] = 0.0000000001
|
||||||
|
low_norm = low/low_weight
|
||||||
|
high_weight = torch.norm(high, dim=1, keepdim=True)
|
||||||
|
high_weight[high_weight == 0] = 0.0000000001
|
||||||
|
high_norm = high/high_weight
|
||||||
|
|
||||||
# in case we divide by zero
|
dot_prod = (low_norm*high_norm).sum(1)
|
||||||
low_norm[low_norm != low_norm] = 0.0
|
dot_prod[dot_prod > 0.9995] = 0.9995
|
||||||
high_norm[high_norm != high_norm] = 0.0
|
dot_prod[dot_prod < -0.9995] = -0.9995
|
||||||
|
omega = torch.acos(dot_prod)
|
||||||
omega = torch.acos((low_norm*high_norm).sum(1))
|
|
||||||
so = torch.sin(omega)
|
so = torch.sin(omega)
|
||||||
res = (torch.sin((1.0-val)*omega)/so).unsqueeze(1)*low + (torch.sin(val*omega)/so).unsqueeze(1) * high
|
res = (torch.sin((1.0-val)*omega)/so).unsqueeze(1)*low_norm + (torch.sin(val*omega)/so).unsqueeze(1) * high_norm
|
||||||
|
res *= (low_weight * (1.0-val) + high_weight * val)
|
||||||
return res.reshape(dims)
|
return res.reshape(dims)
|
||||||
|
|
||||||
for x_dest in range(shape[3]):
|
for x_dest in range(shape[3]):
|
||||||
for y_dest in range(shape[2]):
|
for y_dest in range(shape[2]):
|
||||||
y = (y_dest) * height_scale
|
y = (y_dest + 0.5) * height_scale - 0.5
|
||||||
x = (x_dest) * width_scale
|
x = (x_dest + 0.5) * width_scale - 0.5
|
||||||
|
|
||||||
x1 = max(math.floor(x), 0)
|
x1 = max(math.floor(x), 0)
|
||||||
x2 = min(x1 + 1, samples.shape[3] - 1)
|
x2 = min(x1 + 1, samples.shape[3] - 1)
|
||||||
|
wx = x - math.floor(x)
|
||||||
|
|
||||||
y1 = max(math.floor(y), 0)
|
y1 = max(math.floor(y), 0)
|
||||||
y2 = min(y1 + 1, samples.shape[2] - 1)
|
y2 = min(y1 + 1, samples.shape[2] - 1)
|
||||||
|
wy = y - math.floor(y)
|
||||||
|
|
||||||
in1 = samples[:,:,y1,x1]
|
in1 = samples[:,:,y1,x1]
|
||||||
in2 = samples[:,:,y1,x2]
|
in2 = samples[:,:,y1,x2]
|
||||||
@ -95,13 +102,13 @@ def bislerp(samples, width, height):
|
|||||||
if (x1 == x2) and (y1 == y2):
|
if (x1 == x2) and (y1 == y2):
|
||||||
out_value = in1
|
out_value = in1
|
||||||
elif (x1 == x2):
|
elif (x1 == x2):
|
||||||
out_value = algorithm(in1, (y2 - y), in3, (y - y1))
|
out_value = algorithm(in1, in3, wy)
|
||||||
elif (y1 == y2):
|
elif (y1 == y2):
|
||||||
out_value = algorithm(in1, (x2 - x), in2, (x - x1))
|
out_value = algorithm(in1, in2, wx)
|
||||||
else:
|
else:
|
||||||
o1 = algorithm(in1, (x2 - x), in2, (x - x1))
|
o1 = algorithm(in1, in2, wx)
|
||||||
o2 = algorithm(in3, (x2 - x), in4, (x - x1))
|
o2 = algorithm(in3, in4, wx)
|
||||||
out_value = algorithm(o1, (y2 - y), o2, (y - y1))
|
out_value = algorithm(o1, o2, wy)
|
||||||
|
|
||||||
out1[:,:,y_dest,x_dest] = out_value
|
out1[:,:,y_dest,x_dest] = out_value
|
||||||
return out1
|
return out1
|
||||||
|
|||||||
21
nodes.py
21
nodes.py
@ -898,6 +898,25 @@ class LatentUpscale:
|
|||||||
s["samples"] = comfy.utils.common_upscale(samples["samples"], width // 8, height // 8, upscale_method, crop)
|
s["samples"] = comfy.utils.common_upscale(samples["samples"], width // 8, height // 8, upscale_method, crop)
|
||||||
return (s,)
|
return (s,)
|
||||||
|
|
||||||
|
class LatentUpscaleBy:
|
||||||
|
upscale_methods = ["nearest-exact", "bilinear", "area", "bislerp"]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def INPUT_TYPES(s):
|
||||||
|
return {"required": { "samples": ("LATENT",), "upscale_method": (s.upscale_methods,),
|
||||||
|
"scale_by": ("FLOAT", {"default": 1.5, "min": 0.01, "max": 8.0, "step": 0.01}),}}
|
||||||
|
RETURN_TYPES = ("LATENT",)
|
||||||
|
FUNCTION = "upscale"
|
||||||
|
|
||||||
|
CATEGORY = "latent"
|
||||||
|
|
||||||
|
def upscale(self, samples, upscale_method, scale_by):
|
||||||
|
s = samples.copy()
|
||||||
|
width = round(samples["samples"].shape[3] * scale_by)
|
||||||
|
height = round(samples["samples"].shape[2] * scale_by)
|
||||||
|
s["samples"] = comfy.utils.common_upscale(samples["samples"], width, height, upscale_method, "disabled")
|
||||||
|
return (s,)
|
||||||
|
|
||||||
class LatentRotate:
|
class LatentRotate:
|
||||||
@classmethod
|
@classmethod
|
||||||
def INPUT_TYPES(s):
|
def INPUT_TYPES(s):
|
||||||
@ -1374,6 +1393,7 @@ NODE_CLASS_MAPPINGS = {
|
|||||||
"VAELoader": VAELoader,
|
"VAELoader": VAELoader,
|
||||||
"EmptyLatentImage": EmptyLatentImage,
|
"EmptyLatentImage": EmptyLatentImage,
|
||||||
"LatentUpscale": LatentUpscale,
|
"LatentUpscale": LatentUpscale,
|
||||||
|
"LatentUpscaleBy": LatentUpscaleBy,
|
||||||
"LatentFromBatch": LatentFromBatch,
|
"LatentFromBatch": LatentFromBatch,
|
||||||
"RepeatLatentBatch": RepeatLatentBatch,
|
"RepeatLatentBatch": RepeatLatentBatch,
|
||||||
"SaveImage": SaveImage,
|
"SaveImage": SaveImage,
|
||||||
@ -1453,6 +1473,7 @@ NODE_DISPLAY_NAME_MAPPINGS = {
|
|||||||
"LatentCrop": "Crop Latent",
|
"LatentCrop": "Crop Latent",
|
||||||
"EmptyLatentImage": "Empty Latent Image",
|
"EmptyLatentImage": "Empty Latent Image",
|
||||||
"LatentUpscale": "Upscale Latent",
|
"LatentUpscale": "Upscale Latent",
|
||||||
|
"LatentUpscaleBy": "Upscale Latent By",
|
||||||
"LatentComposite": "Latent Composite",
|
"LatentComposite": "Latent Composite",
|
||||||
"LatentFromBatch" : "Latent From Batch",
|
"LatentFromBatch" : "Latent From Batch",
|
||||||
"RepeatLatentBatch": "Repeat Latent Batch",
|
"RepeatLatentBatch": "Repeat Latent Batch",
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user