mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2026-01-23 21:00:16 +08:00
Added onnxruntime patching
This commit is contained in:
parent
2f31979380
commit
b6d5765f0f
111
comfy/zluda.py
111
comfy/zluda.py
@ -1,59 +1,56 @@
|
|||||||
# hide rocm and hip
|
# ------------------- Hide ROCm/HIP -------------------
|
||||||
import os
|
import os
|
||||||
os.environ.pop("ROCM_HOME",None)
|
|
||||||
os.environ.pop("HIP_HOME",None)
|
|
||||||
os.environ.pop("ROCM_VERSION",None)
|
|
||||||
paths=os.environ["PATH"].split(";")
|
|
||||||
paths_no_rocm=[]
|
|
||||||
for path_ in paths:
|
|
||||||
if "rocm" not in path_.lower():
|
|
||||||
paths_no_rocm.append(path_)
|
|
||||||
os.environ["PATH"]=";".join(paths_no_rocm)
|
|
||||||
# hide rocm and hip end
|
|
||||||
|
|
||||||
# fix cublast errors for newer zluda versions "CUDA error: CUBLAS_STATUS_NOT_SUPPORTED when calling `cublasLtMatmulAlgoGetHeuristic" , comment it out if you have a working hipblast setup.
|
os.environ.pop("ROCM_HOME", None)
|
||||||
|
os.environ.pop("HIP_HOME", None)
|
||||||
|
os.environ.pop("ROCM_VERSION", None)
|
||||||
|
|
||||||
|
paths = os.environ["PATH"].split(";")
|
||||||
|
paths_no_rocm = [p for p in paths if "rocm" not in p.lower()]
|
||||||
|
os.environ["PATH"] = ";".join(paths_no_rocm)
|
||||||
|
# ------------------- End ROCm/HIP Hiding -------------
|
||||||
|
|
||||||
|
# Fix for cublasLt errors on newer ZLUDA (if no hipblaslt)
|
||||||
os.environ['DISABLE_ADDMM_CUDA_LT'] = '1'
|
os.environ['DISABLE_ADDMM_CUDA_LT'] = '1'
|
||||||
|
|
||||||
import torch
|
import torch
|
||||||
|
|
||||||
# get package version using importlib.metadata
|
# ------------------- ComfyUI Frontend Version Check -------------------
|
||||||
def get_package_version(package_name):
|
def get_package_version(package_name):
|
||||||
try:
|
try:
|
||||||
# Try using importlib.metadata (Python 3.8+)
|
|
||||||
from importlib.metadata import version
|
from importlib.metadata import version
|
||||||
return version(package_name)
|
return version(package_name)
|
||||||
except ImportError:
|
except ImportError:
|
||||||
# Fallback to importlib_metadata for older Python versions
|
|
||||||
from importlib_metadata import version
|
from importlib_metadata import version
|
||||||
return version(package_name)
|
return version(package_name)
|
||||||
|
|
||||||
# Check and install comfyui-frontend-package if not installed or if the version is lower than required
|
required_version = "1.14.6"
|
||||||
required_version = "1.15.13"
|
|
||||||
package_name = "comfyui-frontend-package"
|
package_name = "comfyui-frontend-package"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
installed_version = get_package_version(package_name)
|
installed_version = get_package_version(package_name)
|
||||||
print(f"Installed version of {package_name}: {installed_version}")
|
print(f"Installed version of {package_name}: {installed_version}")
|
||||||
|
|
||||||
# Compare versions
|
|
||||||
from packaging import version
|
from packaging import version
|
||||||
if version.parse(installed_version) < version.parse(required_version):
|
if version.parse(installed_version) < version.parse(required_version):
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
subprocess.check_call([sys.executable, '-m', 'pip', 'install', f'{package_name}=={required_version}', '--quiet', '--upgrade'])
|
subprocess.check_call([sys.executable, '-m', 'pip', 'install', f'{package_name}=={required_version}', '--quiet', '--upgrade'])
|
||||||
print(" ")
|
print(f"\nComfyUI Frontend Package outdated. Upgraded to {required_version}.")
|
||||||
print(f"Comfyui Frontend Package version {installed_version} is outdated, updating to latest recommended version {required_version}.")
|
except Exception:
|
||||||
except Exception as e:
|
|
||||||
# If the package is not installed or version check fails, install it
|
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
subprocess.check_call([sys.executable, '-m', 'pip', 'install', f'{package_name}=={required_version}', '--quiet'])
|
subprocess.check_call([sys.executable, '-m', 'pip', 'install', f'{package_name}=={required_version}', '--quiet'])
|
||||||
print(" ")
|
print("\nComfyUI Frontend Package was missing. Installed it.")
|
||||||
print("Comfyui Frontend Package missing, it is installed. (one time only) ")
|
# ------------------- End Frontend Version Check -------------------
|
||||||
#audio patch
|
|
||||||
import torch._dynamo
|
|
||||||
|
|
||||||
if torch.cuda.is_available() and torch.cuda.get_device_name().endswith("[ZLUDA]"):
|
# ------------------- ZLUDA Detection -------------------
|
||||||
|
zluda_device_name = torch.cuda.get_device_name() if torch.cuda.is_available() else ""
|
||||||
|
is_zluda = zluda_device_name.endswith("[ZLUDA]")
|
||||||
|
# ------------------- End Detection --------------------
|
||||||
|
|
||||||
|
# ------------------- Audio Ops Patch -------------------
|
||||||
|
if is_zluda:
|
||||||
_torch_stft = torch.stft
|
_torch_stft = torch.stft
|
||||||
_torch_istft = torch.istft
|
_torch_istft = torch.istft
|
||||||
|
|
||||||
@ -66,25 +63,53 @@ if torch.cuda.is_available() and torch.cuda.get_device_name().endswith("[ZLUDA]"
|
|||||||
def z_jit(f, *_, **__):
|
def z_jit(f, *_, **__):
|
||||||
f.graph = torch._C.Graph()
|
f.graph = torch._C.Graph()
|
||||||
return f
|
return f
|
||||||
|
|
||||||
torch._dynamo.config.suppress_errors = True
|
torch._dynamo.config.suppress_errors = True
|
||||||
torch.stft = z_stft
|
torch.stft = z_stft
|
||||||
torch.istft = z_istft
|
torch.istft = z_istft
|
||||||
torch.jit.script = z_jit
|
torch.jit.script = z_jit
|
||||||
#audio patch end
|
# ------------------- End Audio Patch -------------------
|
||||||
|
|
||||||
# zluda
|
# ------------------- ONNX Runtime Patch -------------------
|
||||||
if torch.cuda.get_device_name().endswith("[ZLUDA]"):
|
try:
|
||||||
print(" ")
|
import onnxruntime as ort
|
||||||
print("***----------------------ZLUDA-----------------------------***")
|
|
||||||
|
if is_zluda:
|
||||||
|
print("\n***----------------------ZLUDA-----------------------------***")
|
||||||
|
print(" :: Patching ONNX Runtime for ZLUDA — disabling CUDA EP.")
|
||||||
|
|
||||||
|
# Store original get_available_providers
|
||||||
|
original_get_available_providers = ort.get_available_providers
|
||||||
|
|
||||||
|
def filtered_providers():
|
||||||
|
return [ep for ep in original_get_available_providers() if ep != "CUDAExecutionProvider"]
|
||||||
|
|
||||||
|
# Patch ONLY the _pybind_state version (used during session creation)
|
||||||
|
ort.capi._pybind_state.get_available_providers = filtered_providers
|
||||||
|
# DO NOT override ort.get_available_providers — leave that public interface alone
|
||||||
|
except ImportError:
|
||||||
|
print(" :: ONNX Runtime not installed — skipping patch.")
|
||||||
|
except Exception as e:
|
||||||
|
print(" :: Failed to patch ONNX Runtime:", e)
|
||||||
|
# ------------------- End ONNX Patch -------------------
|
||||||
|
|
||||||
|
# ------------------- ZLUDA Backend Patch -------------------
|
||||||
|
if is_zluda:
|
||||||
print(" :: ZLUDA detected, disabling non-supported functions. ")
|
print(" :: ZLUDA detected, disabling non-supported functions. ")
|
||||||
torch.backends.cudnn.enabled = False
|
torch.backends.cudnn.enabled = False
|
||||||
print(" :: CuDNN, flash_sdp, mem_efficient_sdp disabled). ")
|
|
||||||
torch.backends.cuda.enable_flash_sdp(False) # enable if using 6.2 with latest nightly zluda
|
if hasattr(torch.backends.cuda, "enable_flash_sdp"):
|
||||||
torch.backends.cuda.enable_math_sdp(True)
|
torch.backends.cuda.enable_flash_sdp(False)
|
||||||
torch.backends.cuda.enable_mem_efficient_sdp(False)
|
if hasattr(torch.backends.cuda, "enable_math_sdp"):
|
||||||
print("***--------------------------------------------------------***")
|
torch.backends.cuda.enable_math_sdp(True)
|
||||||
print(" :: Device : ", torch.cuda.get_device_name())
|
if hasattr(torch.backends.cuda, "enable_mem_efficient_sdp"):
|
||||||
print(" ")
|
torch.backends.cuda.enable_mem_efficient_sdp(False)
|
||||||
|
print(" :: CuDNN, flash_sdp, mem_efficient_sdp disabled. ")
|
||||||
|
|
||||||
|
if is_zluda:
|
||||||
|
print(f" :: Using ZLUDA with device: {zluda_device_name}")
|
||||||
|
print("***--------------------------------------------------------***\n")
|
||||||
else:
|
else:
|
||||||
print(" :: ZLUDA isn't detected, please try patching it.")
|
print(f" :: CUDA device detected: {zluda_device_name or 'None'}")
|
||||||
|
print("***--------------------------------------------------------***\n")
|
||||||
|
# ------------------- End Zluda patching -------------------
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user