From 0169c95d8236da76bc763667a06826c3866b2cd0 Mon Sep 17 00:00:00 2001 From: Pedro Batista Date: Wed, 5 Apr 2023 13:14:15 -0300 Subject: [PATCH] Don't use xformers nor fp16 in AMD GPUs Don't use xformers nor fp16 in AMD GPUs (not supported by them). Example name: ``` In [8]: torch.cuda.get_device_properties("cuda").name Out[8]: 'AMD Radeon RX 5700 XT' ``` --- comfy/model_management.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/comfy/model_management.py b/comfy/model_management.py index 052dfb775..c19311eab 100644 --- a/comfy/model_management.py +++ b/comfy/model_management.py @@ -203,6 +203,8 @@ def get_autocast_device(dev): def xformers_enabled(): if vram_state == CPU: return False + if torch.cuda.get_device_properties("cuda").name == "AMD": + return False return XFORMERS_IS_AVAILBLE @@ -268,6 +270,9 @@ def should_use_fp16(): props = torch.cuda.get_device_properties("cuda") if props.major < 7: return False + + if "AMD" in props.name: + return False #FP32 is faster on those cards? nvidia_16_series = ["1660", "1650", "1630", "T500", "T550", "T600"]