Add debug logging for device mismatch in ModelPatcherDynamic.load

Amp-Thread-ID: https://ampcode.com/threads/T-019d3f5c-28c5-72c9-abed-34681f1b54ba
Co-authored-by: Amp <amp@ampcode.com>
This commit is contained in:
Jedrzej Kosinski 2026-03-30 08:45:55 -07:00
parent afdddcee66
commit 3fab720be9
2 changed files with 4 additions and 0 deletions

View File

@ -639,6 +639,8 @@ class LoadedModel:
return True
def model_use_more_vram(self, extra_memory, force_patch_weights=False):
if self.device != self.model.load_device:
logging.error(f"LoadedModel device mismatch: self.device={self.device}, model.load_device={self.model.load_device}, model_class={self.model.model.__class__.__name__}, is_multigpu={getattr(self.model, 'is_multigpu_base_clone', False)}, id(model)={id(self.model)}")
return self.model.partially_load(self.device, extra_memory, force_patch_weights=force_patch_weights)
def __eq__(self, other):

View File

@ -1646,6 +1646,8 @@ class ModelPatcherDynamic(ModelPatcher):
#now.
assert not full_load
if device_to != self.load_device:
logging.error(f"ModelPatcherDynamic.load device mismatch: device_to={device_to}, self.load_device={self.load_device}, model_class={self.model.__class__.__name__}, is_multigpu_base_clone={getattr(self, 'is_multigpu_base_clone', False)}, id(self)={id(self)}")
assert device_to == self.load_device
num_patches = 0