mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2026-01-24 05:10:18 +08:00
Merge branch 'comfyanonymous:master' into master
This commit is contained in:
commit
644778be49
@ -1108,6 +1108,9 @@ def pin_memory(tensor):
|
|||||||
if MAX_PINNED_MEMORY <= 0:
|
if MAX_PINNED_MEMORY <= 0:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
if type(tensor) is not torch.nn.parameter.Parameter:
|
||||||
|
return False
|
||||||
|
|
||||||
if not is_device_cpu(tensor.device):
|
if not is_device_cpu(tensor.device):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -1117,6 +1120,9 @@ def pin_memory(tensor):
|
|||||||
#on the GPU async. So dont trust the CUDA API and guard here
|
#on the GPU async. So dont trust the CUDA API and guard here
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
if not tensor.is_contiguous():
|
||||||
|
return False
|
||||||
|
|
||||||
size = tensor.numel() * tensor.element_size()
|
size = tensor.numel() * tensor.element_size()
|
||||||
if (TOTAL_PINNED_MEMORY + size) > MAX_PINNED_MEMORY:
|
if (TOTAL_PINNED_MEMORY + size) > MAX_PINNED_MEMORY:
|
||||||
return False
|
return False
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user