mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2026-01-26 14:20:27 +08:00
Move nodes from previous PR into their own file. (#12066)
Some checks failed
Python Linting / Run Ruff (push) Has been cancelled
Python Linting / Run Pylint (push) Has been cancelled
Full Comfy CI Workflow Runs / test-stable (12.1, , linux, 3.10, [self-hosted Linux], stable) (push) Has been cancelled
Full Comfy CI Workflow Runs / test-stable (12.1, , linux, 3.11, [self-hosted Linux], stable) (push) Has been cancelled
Full Comfy CI Workflow Runs / test-stable (12.1, , linux, 3.12, [self-hosted Linux], stable) (push) Has been cancelled
Full Comfy CI Workflow Runs / test-unix-nightly (12.1, , linux, 3.11, [self-hosted Linux], nightly) (push) Has been cancelled
Execution Tests / test (macos-latest) (push) Has been cancelled
Execution Tests / test (ubuntu-latest) (push) Has been cancelled
Execution Tests / test (windows-latest) (push) Has been cancelled
Test server launches without errors / test (push) Has been cancelled
Unit Tests / test (macos-latest) (push) Has been cancelled
Unit Tests / test (ubuntu-latest) (push) Has been cancelled
Unit Tests / test (windows-2022) (push) Has been cancelled
Some checks failed
Python Linting / Run Ruff (push) Has been cancelled
Python Linting / Run Pylint (push) Has been cancelled
Full Comfy CI Workflow Runs / test-stable (12.1, , linux, 3.10, [self-hosted Linux], stable) (push) Has been cancelled
Full Comfy CI Workflow Runs / test-stable (12.1, , linux, 3.11, [self-hosted Linux], stable) (push) Has been cancelled
Full Comfy CI Workflow Runs / test-stable (12.1, , linux, 3.12, [self-hosted Linux], stable) (push) Has been cancelled
Full Comfy CI Workflow Runs / test-unix-nightly (12.1, , linux, 3.11, [self-hosted Linux], nightly) (push) Has been cancelled
Execution Tests / test (macos-latest) (push) Has been cancelled
Execution Tests / test (ubuntu-latest) (push) Has been cancelled
Execution Tests / test (windows-latest) (push) Has been cancelled
Test server launches without errors / test (push) Has been cancelled
Unit Tests / test (macos-latest) (push) Has been cancelled
Unit Tests / test (ubuntu-latest) (push) Has been cancelled
Unit Tests / test (windows-2022) (push) Has been cancelled
This commit is contained in:
parent
a97c98068f
commit
26c5bbb875
79
comfy_extras/nodes_lora_debug.py
Normal file
79
comfy_extras/nodes_lora_debug.py
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
import folder_paths
|
||||||
|
import comfy.utils
|
||||||
|
import comfy.sd
|
||||||
|
|
||||||
|
|
||||||
|
class LoraLoaderBypass:
|
||||||
|
"""
|
||||||
|
Apply LoRA in bypass mode without modifying base model weights.
|
||||||
|
|
||||||
|
Bypass mode computes: output = base_forward(x) + lora_path(x)
|
||||||
|
This is useful for training and when model weights are offloaded.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.loaded_lora = None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def INPUT_TYPES(s):
|
||||||
|
return {
|
||||||
|
"required": {
|
||||||
|
"model": ("MODEL", {"tooltip": "The diffusion model the LoRA will be applied to."}),
|
||||||
|
"clip": ("CLIP", {"tooltip": "The CLIP model the LoRA will be applied to."}),
|
||||||
|
"lora_name": (folder_paths.get_filename_list("loras"), {"tooltip": "The name of the LoRA."}),
|
||||||
|
"strength_model": ("FLOAT", {"default": 1.0, "min": -100.0, "max": 100.0, "step": 0.01, "tooltip": "How strongly to modify the diffusion model. This value can be negative."}),
|
||||||
|
"strength_clip": ("FLOAT", {"default": 1.0, "min": -100.0, "max": 100.0, "step": 0.01, "tooltip": "How strongly to modify the CLIP model. This value can be negative."}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
RETURN_TYPES = ("MODEL", "CLIP")
|
||||||
|
OUTPUT_TOOLTIPS = ("The modified diffusion model.", "The modified CLIP model.")
|
||||||
|
FUNCTION = "load_lora"
|
||||||
|
|
||||||
|
CATEGORY = "loaders"
|
||||||
|
DESCRIPTION = "Apply LoRA in bypass mode. Unlike regular LoRA, this doesn't modify model weights - instead it injects the LoRA computation during forward pass. Useful for training scenarios."
|
||||||
|
EXPERIMENTAL = True
|
||||||
|
|
||||||
|
def load_lora(self, model, clip, lora_name, strength_model, strength_clip):
|
||||||
|
if strength_model == 0 and strength_clip == 0:
|
||||||
|
return (model, clip)
|
||||||
|
|
||||||
|
lora_path = folder_paths.get_full_path_or_raise("loras", lora_name)
|
||||||
|
lora = None
|
||||||
|
if self.loaded_lora is not None:
|
||||||
|
if self.loaded_lora[0] == lora_path:
|
||||||
|
lora = self.loaded_lora[1]
|
||||||
|
else:
|
||||||
|
self.loaded_lora = None
|
||||||
|
|
||||||
|
if lora is None:
|
||||||
|
lora = comfy.utils.load_torch_file(lora_path, safe_load=True)
|
||||||
|
self.loaded_lora = (lora_path, lora)
|
||||||
|
|
||||||
|
model_lora, clip_lora = comfy.sd.load_bypass_lora_for_models(model, clip, lora, strength_model, strength_clip)
|
||||||
|
return (model_lora, clip_lora)
|
||||||
|
|
||||||
|
|
||||||
|
class LoraLoaderBypassModelOnly(LoraLoaderBypass):
|
||||||
|
@classmethod
|
||||||
|
def INPUT_TYPES(s):
|
||||||
|
return {"required": { "model": ("MODEL",),
|
||||||
|
"lora_name": (folder_paths.get_filename_list("loras"), ),
|
||||||
|
"strength_model": ("FLOAT", {"default": 1.0, "min": -100.0, "max": 100.0, "step": 0.01}),
|
||||||
|
}}
|
||||||
|
RETURN_TYPES = ("MODEL",)
|
||||||
|
FUNCTION = "load_lora_model_only"
|
||||||
|
|
||||||
|
def load_lora_model_only(self, model, lora_name, strength_model):
|
||||||
|
return (self.load_lora(model, None, lora_name, strength_model, 0)[0],)
|
||||||
|
|
||||||
|
|
||||||
|
NODE_CLASS_MAPPINGS = {
|
||||||
|
"LoraLoaderBypass": LoraLoaderBypass,
|
||||||
|
"LoraLoaderBypassModelOnly": LoraLoaderBypassModelOnly,
|
||||||
|
}
|
||||||
|
|
||||||
|
NODE_DISPLAY_NAME_MAPPINGS = {
|
||||||
|
"LoraLoaderBypass": "Load LoRA (Bypass) (For debugging)",
|
||||||
|
"LoraLoaderBypassModelOnly": "Load LoRA (Bypass, Model Only) (for debugging)",
|
||||||
|
}
|
||||||
68
nodes.py
68
nodes.py
@ -722,69 +722,6 @@ class LoraLoaderModelOnly(LoraLoader):
|
|||||||
def load_lora_model_only(self, model, lora_name, strength_model):
|
def load_lora_model_only(self, model, lora_name, strength_model):
|
||||||
return (self.load_lora(model, None, lora_name, strength_model, 0)[0],)
|
return (self.load_lora(model, None, lora_name, strength_model, 0)[0],)
|
||||||
|
|
||||||
class LoraLoaderBypass:
|
|
||||||
"""
|
|
||||||
Apply LoRA in bypass mode without modifying base model weights.
|
|
||||||
|
|
||||||
Bypass mode computes: output = base_forward(x) + lora_path(x)
|
|
||||||
This is useful for training and when model weights are offloaded.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.loaded_lora = None
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def INPUT_TYPES(s):
|
|
||||||
return {
|
|
||||||
"required": {
|
|
||||||
"model": ("MODEL", {"tooltip": "The diffusion model the LoRA will be applied to."}),
|
|
||||||
"clip": ("CLIP", {"tooltip": "The CLIP model the LoRA will be applied to."}),
|
|
||||||
"lora_name": (folder_paths.get_filename_list("loras"), {"tooltip": "The name of the LoRA."}),
|
|
||||||
"strength_model": ("FLOAT", {"default": 1.0, "min": -100.0, "max": 100.0, "step": 0.01, "tooltip": "How strongly to modify the diffusion model. This value can be negative."}),
|
|
||||||
"strength_clip": ("FLOAT", {"default": 1.0, "min": -100.0, "max": 100.0, "step": 0.01, "tooltip": "How strongly to modify the CLIP model. This value can be negative."}),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
RETURN_TYPES = ("MODEL", "CLIP")
|
|
||||||
OUTPUT_TOOLTIPS = ("The modified diffusion model.", "The modified CLIP model.")
|
|
||||||
FUNCTION = "load_lora"
|
|
||||||
|
|
||||||
CATEGORY = "loaders"
|
|
||||||
DESCRIPTION = "Apply LoRA in bypass mode. Unlike regular LoRA, this doesn't modify model weights - instead it injects the LoRA computation during forward pass. Useful for training scenarios."
|
|
||||||
|
|
||||||
def load_lora(self, model, clip, lora_name, strength_model, strength_clip):
|
|
||||||
if strength_model == 0 and strength_clip == 0:
|
|
||||||
return (model, clip)
|
|
||||||
|
|
||||||
lora_path = folder_paths.get_full_path_or_raise("loras", lora_name)
|
|
||||||
lora = None
|
|
||||||
if self.loaded_lora is not None:
|
|
||||||
if self.loaded_lora[0] == lora_path:
|
|
||||||
lora = self.loaded_lora[1]
|
|
||||||
else:
|
|
||||||
self.loaded_lora = None
|
|
||||||
|
|
||||||
if lora is None:
|
|
||||||
lora = comfy.utils.load_torch_file(lora_path, safe_load=True)
|
|
||||||
self.loaded_lora = (lora_path, lora)
|
|
||||||
|
|
||||||
model_lora, clip_lora = comfy.sd.load_bypass_lora_for_models(model, clip, lora, strength_model, strength_clip)
|
|
||||||
return (model_lora, clip_lora)
|
|
||||||
|
|
||||||
|
|
||||||
class LoraLoaderBypassModelOnly(LoraLoaderBypass):
|
|
||||||
@classmethod
|
|
||||||
def INPUT_TYPES(s):
|
|
||||||
return {"required": { "model": ("MODEL",),
|
|
||||||
"lora_name": (folder_paths.get_filename_list("loras"), ),
|
|
||||||
"strength_model": ("FLOAT", {"default": 1.0, "min": -100.0, "max": 100.0, "step": 0.01}),
|
|
||||||
}}
|
|
||||||
RETURN_TYPES = ("MODEL",)
|
|
||||||
FUNCTION = "load_lora_model_only"
|
|
||||||
|
|
||||||
def load_lora_model_only(self, model, lora_name, strength_model):
|
|
||||||
return (self.load_lora(model, None, lora_name, strength_model, 0)[0],)
|
|
||||||
|
|
||||||
class VAELoader:
|
class VAELoader:
|
||||||
video_taes = ["taehv", "lighttaew2_2", "lighttaew2_1", "lighttaehy1_5", "taeltx_2"]
|
video_taes = ["taehv", "lighttaew2_2", "lighttaew2_1", "lighttaehy1_5", "taeltx_2"]
|
||||||
image_taes = ["taesd", "taesdxl", "taesd3", "taef1"]
|
image_taes = ["taesd", "taesdxl", "taesd3", "taef1"]
|
||||||
@ -2130,8 +2067,6 @@ NODE_CLASS_MAPPINGS = {
|
|||||||
"LatentFlip": LatentFlip,
|
"LatentFlip": LatentFlip,
|
||||||
"LatentCrop": LatentCrop,
|
"LatentCrop": LatentCrop,
|
||||||
"LoraLoader": LoraLoader,
|
"LoraLoader": LoraLoader,
|
||||||
"LoraLoaderBypass": LoraLoaderBypass,
|
|
||||||
"LoraLoaderBypassModelOnly": LoraLoaderBypassModelOnly,
|
|
||||||
"CLIPLoader": CLIPLoader,
|
"CLIPLoader": CLIPLoader,
|
||||||
"UNETLoader": UNETLoader,
|
"UNETLoader": UNETLoader,
|
||||||
"DualCLIPLoader": DualCLIPLoader,
|
"DualCLIPLoader": DualCLIPLoader,
|
||||||
@ -2171,8 +2106,6 @@ NODE_DISPLAY_NAME_MAPPINGS = {
|
|||||||
"CheckpointLoaderSimple": "Load Checkpoint",
|
"CheckpointLoaderSimple": "Load Checkpoint",
|
||||||
"VAELoader": "Load VAE",
|
"VAELoader": "Load VAE",
|
||||||
"LoraLoader": "Load LoRA",
|
"LoraLoader": "Load LoRA",
|
||||||
"LoraLoaderBypass": "Load LoRA (Bypass)",
|
|
||||||
"LoraLoaderBypassModelOnly": "Load LoRA (Bypass, Model Only)",
|
|
||||||
"CLIPLoader": "Load CLIP",
|
"CLIPLoader": "Load CLIP",
|
||||||
"ControlNetLoader": "Load ControlNet Model",
|
"ControlNetLoader": "Load ControlNet Model",
|
||||||
"DiffControlNetLoader": "Load ControlNet Model (diff)",
|
"DiffControlNetLoader": "Load ControlNet Model (diff)",
|
||||||
@ -2498,6 +2431,7 @@ async def init_builtin_extra_nodes():
|
|||||||
"nodes_wanmove.py",
|
"nodes_wanmove.py",
|
||||||
"nodes_image_compare.py",
|
"nodes_image_compare.py",
|
||||||
"nodes_zimage.py",
|
"nodes_zimage.py",
|
||||||
|
"nodes_lora_debug.py"
|
||||||
]
|
]
|
||||||
|
|
||||||
import_failed = []
|
import_failed = []
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user