Added onnxruntime patching

This commit is contained in:
patientx 2025-04-13 17:11:13 +03:00 committed by GitHub
parent 2f31979380
commit b6d5765f0f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -1,59 +1,56 @@
# hide rocm and hip
# ------------------- Hide ROCm/HIP -------------------
import os
os.environ.pop("ROCM_HOME",None)
os.environ.pop("HIP_HOME",None)
os.environ.pop("ROCM_VERSION",None)
paths=os.environ["PATH"].split(";")
paths_no_rocm=[]
for path_ in paths:
if "rocm" not in path_.lower():
paths_no_rocm.append(path_)
os.environ["PATH"]=";".join(paths_no_rocm)
# hide rocm and hip end
# fix cublast errors for newer zluda versions "CUDA error: CUBLAS_STATUS_NOT_SUPPORTED when calling `cublasLtMatmulAlgoGetHeuristic" , comment it out if you have a working hipblast setup.
os.environ.pop("ROCM_HOME", None)
os.environ.pop("HIP_HOME", None)
os.environ.pop("ROCM_VERSION", None)
paths = os.environ["PATH"].split(";")
paths_no_rocm = [p for p in paths if "rocm" not in p.lower()]
os.environ["PATH"] = ";".join(paths_no_rocm)
# ------------------- End ROCm/HIP Hiding -------------
# Fix for cublasLt errors on newer ZLUDA (if no hipblaslt)
os.environ['DISABLE_ADDMM_CUDA_LT'] = '1'
import torch
# get package version using importlib.metadata
# ------------------- ComfyUI Frontend Version Check -------------------
def get_package_version(package_name):
try:
# Try using importlib.metadata (Python 3.8+)
from importlib.metadata import version
return version(package_name)
except ImportError:
# Fallback to importlib_metadata for older Python versions
from importlib_metadata import version
return version(package_name)
# Check and install comfyui-frontend-package if not installed or if the version is lower than required
required_version = "1.15.13"
required_version = "1.14.6"
package_name = "comfyui-frontend-package"
try:
installed_version = get_package_version(package_name)
print(f"Installed version of {package_name}: {installed_version}")
# Compare versions
from packaging import version
if version.parse(installed_version) < version.parse(required_version):
import subprocess
import sys
subprocess.check_call([sys.executable, '-m', 'pip', 'install', f'{package_name}=={required_version}', '--quiet', '--upgrade'])
print(" ")
print(f"Comfyui Frontend Package version {installed_version} is outdated, updating to latest recommended version {required_version}.")
except Exception as e:
# If the package is not installed or version check fails, install it
print(f"\nComfyUI Frontend Package outdated. Upgraded to {required_version}.")
except Exception:
import subprocess
import sys
subprocess.check_call([sys.executable, '-m', 'pip', 'install', f'{package_name}=={required_version}', '--quiet'])
print(" ")
print("Comfyui Frontend Package missing, it is installed. (one time only) ")
#audio patch
import torch._dynamo
print("\nComfyUI Frontend Package was missing. Installed it.")
# ------------------- End Frontend Version Check -------------------
if torch.cuda.is_available() and torch.cuda.get_device_name().endswith("[ZLUDA]"):
# ------------------- ZLUDA Detection -------------------
zluda_device_name = torch.cuda.get_device_name() if torch.cuda.is_available() else ""
is_zluda = zluda_device_name.endswith("[ZLUDA]")
# ------------------- End Detection --------------------
# ------------------- Audio Ops Patch -------------------
if is_zluda:
_torch_stft = torch.stft
_torch_istft = torch.istft
@ -66,25 +63,53 @@ if torch.cuda.is_available() and torch.cuda.get_device_name().endswith("[ZLUDA]"
def z_jit(f, *_, **__):
f.graph = torch._C.Graph()
return f
torch._dynamo.config.suppress_errors = True
torch.stft = z_stft
torch.istft = z_istft
torch.jit.script = z_jit
#audio patch end
# zluda
if torch.cuda.get_device_name().endswith("[ZLUDA]"):
print(" ")
print("***----------------------ZLUDA-----------------------------***")
# ------------------- End Audio Patch -------------------
# ------------------- ONNX Runtime Patch -------------------
try:
import onnxruntime as ort
if is_zluda:
print("\n***----------------------ZLUDA-----------------------------***")
print(" :: Patching ONNX Runtime for ZLUDA — disabling CUDA EP.")
# Store original get_available_providers
original_get_available_providers = ort.get_available_providers
def filtered_providers():
return [ep for ep in original_get_available_providers() if ep != "CUDAExecutionProvider"]
# Patch ONLY the _pybind_state version (used during session creation)
ort.capi._pybind_state.get_available_providers = filtered_providers
# DO NOT override ort.get_available_providers — leave that public interface alone
except ImportError:
print(" :: ONNX Runtime not installed — skipping patch.")
except Exception as e:
print(" :: Failed to patch ONNX Runtime:", e)
# ------------------- End ONNX Patch -------------------
# ------------------- ZLUDA Backend Patch -------------------
if is_zluda:
print(" :: ZLUDA detected, disabling non-supported functions. ")
torch.backends.cudnn.enabled = False
print(" :: CuDNN, flash_sdp, mem_efficient_sdp disabled). ")
torch.backends.cuda.enable_flash_sdp(False) # enable if using 6.2 with latest nightly zluda
torch.backends.cuda.enable_math_sdp(True)
torch.backends.cuda.enable_mem_efficient_sdp(False)
print("***--------------------------------------------------------***")
print(" :: Device : ", torch.cuda.get_device_name())
print(" ")
if hasattr(torch.backends.cuda, "enable_flash_sdp"):
torch.backends.cuda.enable_flash_sdp(False)
if hasattr(torch.backends.cuda, "enable_math_sdp"):
torch.backends.cuda.enable_math_sdp(True)
if hasattr(torch.backends.cuda, "enable_mem_efficient_sdp"):
torch.backends.cuda.enable_mem_efficient_sdp(False)
print(" :: CuDNN, flash_sdp, mem_efficient_sdp disabled. ")
if is_zluda:
print(f" :: Using ZLUDA with device: {zluda_device_name}")
print("***--------------------------------------------------------***\n")
else:
print(" :: ZLUDA isn't detected, please try patching it.")
print(f" :: CUDA device detected: {zluda_device_name or 'None'}")
print("***--------------------------------------------------------***\n")
# ------------------- End Zluda patching -------------------