Only unpin tensor if it was pinned by ComfyUI (#10677)

This commit is contained in:
comfyanonymous 2025-11-07 08:15:05 -08:00 committed by GitHub
parent cf97b033ee
commit a1a70362ca
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -1129,13 +1129,18 @@ def unpin_memory(tensor):
if not is_device_cpu(tensor.device): if not is_device_cpu(tensor.device):
return False return False
if not tensor.is_pinned(): ptr = tensor.data_ptr()
#NOTE: Cuda does detect when a tensor is already pinned and would size = tensor.numel() * tensor.element_size()
#error below, but there are proven cases where this also queues an error
#on the GPU async. So dont trust the CUDA API and guard here size_stored = PINNED_MEMORY.get(ptr, None)
if size_stored is None:
logging.warning("Tried to unpin tensor not pinned by ComfyUI")
return False
if size != size_stored:
logging.warning("Size of pinned tensor changed")
return False return False
ptr = tensor.data_ptr()
if torch.cuda.cudart().cudaHostUnregister(ptr) == 0: if torch.cuda.cudart().cudaHostUnregister(ptr) == 0:
TOTAL_PINNED_MEMORY -= PINNED_MEMORY.pop(ptr) TOTAL_PINNED_MEMORY -= PINNED_MEMORY.pop(ptr)
if len(PINNED_MEMORY) == 0: if len(PINNED_MEMORY) == 0: