Update zluda.py

This commit is contained in:
patientx 2025-08-24 10:23:30 +03:00 committed by GitHub
parent 8d6defbe21
commit dba9d20791
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -652,7 +652,7 @@ def do_hijack():
print(" :: Configuring PyTorch backends...")
torch.backends.cuda.enable_mem_efficient_sdp(False)
torch.backends.cuda.enable_mem_efficient_sdp = do_nothing
torch.backends.cudnn.enabled = True
# torch.backends.cudnn.enabled = True #this was here from the early days , always assumed this was required for other stuff, disabling it.
if hasattr(torch.backends.cuda, "enable_flash_sdp"):
torch.backends.cuda.enable_flash_sdp(True)
print(" :: Disabled CUDA flash attention")