From 134569ea48ff815e12bac0de3e4c29253307bdeb Mon Sep 17 00:00:00 2001 From: patientx Date: Fri, 23 Aug 2024 14:10:09 +0300 Subject: [PATCH] Update model_management.py --- comfy/model_management.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/comfy/model_management.py b/comfy/model_management.py index 15ea0a8c4..53977164a 100644 --- a/comfy/model_management.py +++ b/comfy/model_management.py @@ -44,12 +44,12 @@ cpu_state = CPUState.GPU total_vram = 0 -xpu_available = False -try: - torch_version = torch.version.__version__ - xpu_available = (int(torch_version[0]) < 2 or (int(torch_version[0]) == 2 and int(torch_version[2]) <= 4)) and torch.xpu.is_available() -except: - pass +xpu_available = False #We don't have xpu with zluda , so disabling all together. +# try: +# torch_version = torch.version.__version__ +# xpu_available = (int(torch_version[0]) < 2 or (int(torch_version[0]) == 2 and int(torch_version[2]) <= 4)) and torch.xpu.is_available() +# except: +# pass lowvram_available = True if args.deterministic: