From 48acefc9239879a3e495d8f8eea93810d6a17953 Mon Sep 17 00:00:00 2001 From: Jedrzej Kosinski Date: Mon, 20 Apr 2026 10:00:01 -0700 Subject: [PATCH] Fix code review bugs: negative index guard, CPU offload_device, checkpoint te_model_options - resolve_gpu_device_option: reject negative indices (gpu:-1) - UNETLoader: set offload_device when cpu is selected - CheckpointLoaderSimple: pass te_model_options for CLIP device, set offload_device for cpu, pass load_device to VAE - load_diffusion_model_state_dict: respect offload_device from model_options - load_state_dict_guess_config: respect offload_device, pass load_device to VAE Amp-Thread-ID: https://ampcode.com/threads/T-019daa41-f394-731a-8955-4cff4f16283a Co-authored-by: Amp --- comfy/model_management.py | 2 +- comfy/sd.py | 8 +++++--- nodes.py | 15 ++++++++++++--- 3 files changed, 18 insertions(+), 7 deletions(-) diff --git a/comfy/model_management.py b/comfy/model_management.py index cdfcc0771..379f8da68 100644 --- a/comfy/model_management.py +++ b/comfy/model_management.py @@ -260,7 +260,7 @@ def resolve_gpu_device_option(option: str): try: idx = int(option[4:]) devices = get_all_torch_devices() - if idx < len(devices): + if 0 <= idx < len(devices): return devices[idx] else: logging.warning(f"Device '{option}' not available (only {len(devices)} GPU(s)), using default.") diff --git a/comfy/sd.py b/comfy/sd.py index f175f9b9d..a05998114 100644 --- a/comfy/sd.py +++ b/comfy/sd.py @@ -1673,13 +1673,15 @@ def load_state_dict_guess_config(sd, output_vae=True, output_clip=True, output_c inital_load_device = model_management.unet_inital_load_device(parameters, unet_dtype) model = model_config.get_model(sd, diffusion_model_prefix, device=inital_load_device) ModelPatcher = comfy.model_patcher.ModelPatcher if disable_dynamic else comfy.model_patcher.CoreModelPatcher - model_patcher = ModelPatcher(model, load_device=load_device, offload_device=model_management.unet_offload_device()) + offload_device = model_options.get("offload_device", model_management.unet_offload_device()) + model_patcher = ModelPatcher(model, load_device=load_device, offload_device=offload_device) model.load_model_weights(sd, diffusion_model_prefix, assign=model_patcher.is_dynamic()) if output_vae: vae_sd = comfy.utils.state_dict_prefix_replace(sd, {k: "" for k in model_config.vae_key_prefix}, filter_keys=True) vae_sd = model_config.process_vae_state_dict(vae_sd) - vae = VAE(sd=vae_sd, metadata=metadata) + vae_device = model_options.get("load_device", None) + vae = VAE(sd=vae_sd, metadata=metadata, device=vae_device) if output_clip: if te_model_options.get("custom_operations", None) is None: @@ -1788,7 +1790,7 @@ def load_diffusion_model_state_dict(sd, model_options={}, metadata=None, disable else: logging.warning("{} {}".format(diffusers_keys[k], k)) - offload_device = model_management.unet_offload_device() + offload_device = model_options.get("offload_device", model_management.unet_offload_device()) unet_weight_dtype = list(model_config.supported_inference_dtypes) if model_config.quant_config is not None: weight_dtype = None diff --git a/nodes.py b/nodes.py index bc3a4b7be..c4dabbab0 100644 --- a/nodes.py +++ b/nodes.py @@ -613,10 +613,16 @@ class CheckpointLoaderSimple: def load_checkpoint(self, ckpt_name, device="default"): ckpt_path = folder_paths.get_full_path_or_raise("checkpoints", ckpt_name) model_options = {} + te_model_options = {} resolved = comfy.model_management.resolve_gpu_device_option(device) if resolved is not None: - model_options["load_device"] = resolved - out = comfy.sd.load_checkpoint_guess_config(ckpt_path, output_vae=True, output_clip=True, embedding_directory=folder_paths.get_folder_paths("embeddings"), model_options=model_options) + if resolved.type == "cpu": + model_options["load_device"] = model_options["offload_device"] = resolved + te_model_options["load_device"] = te_model_options["offload_device"] = resolved + else: + model_options["load_device"] = resolved + te_model_options["load_device"] = resolved + out = comfy.sd.load_checkpoint_guess_config(ckpt_path, output_vae=True, output_clip=True, embedding_directory=folder_paths.get_folder_paths("embeddings"), model_options=model_options, te_model_options=te_model_options) return out[:3] class DiffusersLoader: @@ -997,7 +1003,10 @@ class UNETLoader: resolved = comfy.model_management.resolve_gpu_device_option(device) if resolved is not None: - model_options["load_device"] = resolved + if resolved.type == "cpu": + model_options["load_device"] = model_options["offload_device"] = resolved + else: + model_options["load_device"] = resolved unet_path = folder_paths.get_full_path_or_raise("diffusion_models", unet_name) model = comfy.sd.load_diffusion_model(unet_path, model_options=model_options)