Add Reference Only ControlNet hack, add ControlNet++ models

This commit is contained in:
doctorpangloss 2024-06-10 20:36:23 -07:00
parent e7682ced56
commit 6789e9c71e
2 changed files with 64 additions and 0 deletions

View File

@ -284,6 +284,13 @@ KNOWN_CONTROLNETS = [
HuggingFile("stabilityai/stable-cascade", "controlnet/canny.safetensors", save_with_filename="stable_cascade_canny.safetensors"),
HuggingFile("stabilityai/stable-cascade", "controlnet/inpainting.safetensors", save_with_filename="stable_cascade_inpainting.safetensors"),
HuggingFile("stabilityai/stable-cascade", "controlnet/super_resolution.safetensors", save_with_filename="stable_cascade_super_resolution.safetensors"),
HuggingFile("limingcv/ControlNet-Plus-Plus", "checkpoints/canny/controlnet/diffusion_pytorch_model.safetensors", save_with_filename="ft_controlnet_sd15_canny_res512_bs256_lr1e-5_warmup100_iter5k_fp16ft0-1000.safetensors"),
# not clear if this should use black to white or some other depth rendering
HuggingFile("limingcv/ControlNet-Plus-Plus", "checkpoints/depth/controlnet/diffusion_pytorch_model.safetensors", save_with_filename="ft_controlnet_sd15_depth_res512_bs256_lr1e-5_warmup100_iter5k_fp16ft0-200.safetensors"),
HuggingFile("limingcv/ControlNet-Plus-Plus", "checkpoints/hed/controlnet/diffusion_pytorch_model.bin", save_with_filename="reward_ft5k_controlnet_sd15_hed_res512_bs256_lr1e-5_warmup100_scale-1_iter5k_fp16_train0-1k_reward0-200.bin"),
HuggingFile("limingcv/ControlNet-Plus-Plus", "checkpoints/lineart/controlnet/diffusion_pytorch_model.bin", save_with_filename="reward_ft5k_controlnet_sd15_lineart_res512_bs256_lr1e-5_warmup100_scale-10_iter5k_fp16_train0-1k_reward0-200.bin"),
# should be used with ADE20K
HuggingFile("limingcv/ControlNet-Plus-Plus", "checkpoints/seg/controlnet/diffusion_pytorch_model.safetensors", save_with_filename="ft_controlnet_sd15_seg_res512_bs256_lr1e-5_warmup100_iter5k_fp16.safetensors"),
]
KNOWN_DIFF_CONTROLNETS = [

View File

@ -0,0 +1,57 @@
import torch
class ReferenceOnlySimple:
@classmethod
def INPUT_TYPES(s):
return {"required": {"model": ("MODEL",),
"reference": ("LATENT",),
"batch_size": ("INT", {"default": 1, "min": 1, "max": 64})
}}
RETURN_TYPES = ("MODEL", "LATENT")
FUNCTION = "reference_only"
CATEGORY = "custom_node_experiments"
def reference_only(self, model, reference, batch_size):
model_reference = model.clone()
size_latent = list(reference["samples"].shape)
size_latent[0] = batch_size
latent = {}
latent["samples"] = torch.zeros(size_latent)
batch = latent["samples"].shape[0] + reference["samples"].shape[0]
def reference_apply(q, k, v, extra_options):
k = k.clone().repeat(1, 2, 1)
offset = 0
if q.shape[0] > batch:
offset = batch
for o in range(0, q.shape[0], batch):
for x in range(1, batch):
k[x + o, q.shape[1]:] = q[o, :]
return q, k, k
model_reference.set_model_attn1_patch(reference_apply)
out_latent = torch.cat((reference["samples"], latent["samples"]))
if "noise_mask" in latent:
mask = latent["noise_mask"]
else:
mask = torch.ones((64, 64), dtype=torch.float32, device="cpu")
if len(mask.shape) < 3:
mask = mask.unsqueeze(0)
if mask.shape[0] < latent["samples"].shape[0]:
print(latent["samples"].shape, mask.shape)
mask = mask.repeat(latent["samples"].shape[0], 1, 1)
out_mask = torch.zeros((1, mask.shape[1], mask.shape[2]), dtype=torch.float32, device="cpu")
return (model_reference, {"samples": out_latent, "noise_mask": torch.cat((out_mask, mask))})
NODE_CLASS_MAPPINGS = {
"ReferenceOnlySimple": ReferenceOnlySimple,
}