mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2026-05-07 07:42:32 +08:00
Don't use xformers nor fp16 in AMD GPUs
Don't use xformers nor fp16 in AMD GPUs (not supported by them). Example name:
```
In [8]: torch.cuda.get_device_properties("cuda").name
Out[8]: 'AMD Radeon RX 5700 XT'
```
This commit is contained in:
parent
30f274bf48
commit
0169c95d82
@ -203,6 +203,8 @@ def get_autocast_device(dev):
|
||||
def xformers_enabled():
|
||||
if vram_state == CPU:
|
||||
return False
|
||||
if torch.cuda.get_device_properties("cuda").name == "AMD":
|
||||
return False
|
||||
return XFORMERS_IS_AVAILBLE
|
||||
|
||||
|
||||
@ -268,6 +270,9 @@ def should_use_fp16():
|
||||
props = torch.cuda.get_device_properties("cuda")
|
||||
if props.major < 7:
|
||||
return False
|
||||
|
||||
if "AMD" in props.name:
|
||||
return False
|
||||
|
||||
#FP32 is faster on those cards?
|
||||
nvidia_16_series = ["1660", "1650", "1630", "T500", "T550", "T600"]
|
||||
|
||||
Loading…
Reference in New Issue
Block a user