sageattention fixup

This commit is contained in:
doctorpangloss 2025-12-11 17:21:44 -08:00
parent b81a5b15ae
commit ed4f92279c
3 changed files with 29 additions and 25 deletions

View File

@ -306,10 +306,19 @@ choco install -y vcredist2010 vcredist2013 vcredist140
Then, visit [NVIDIA.com's CUDA Toolkit Download Page](https://developer.nvidia.com/cuda-12-6-0-download-archive?target_os=Windows&target_arch=x86_64&target_version=Server2022&target_type=exe_network) and download and install the CUDA Toolkit. Verify it is correctly installed by running `nvcc --version`.
You are now ready to install Sage Attention 2 and Triton:
You are now ready to install Sage Attention 2 and Flash Attention.
### Linux
```shell
uv pip install --torch-backend=auto "comfyui[attention]@git+https://github.com/hiddenswitch/ComfyUI.git"
uv pip install --no-build-isolation "sageattention@git+https://github.com/thu-ml/SageAttention.git"
uv pip install --no-build-isolation flash_attn
```
### Windows
```powershell
uv pip install --find-links https://raw.githubusercontent.com/hiddenswitch/ComfyUI/main/pypi/sageattention_index.html sageattention
```
To start ComfyUI with it:

View File

@ -0,0 +1,15 @@
<!DOCTYPE html>
<html lang="en">
<head>
<title>Links for sageattention</title>
</head>
<body>
<h1>Links for sageattention</h1>
<a href="https://github.com/woct0rdho/SageAttention/releases/download/v2.2.0-windows.post3/sageattention-2.2.0+cu124torch2.5.1.post3-cp39-abi3-win_amd64.whl">sageattention-2.2.0+cu124torch2.5.1.post3-cp39-abi3-win_amd64.whl</a><br/>
<a href="https://github.com/woct0rdho/SageAttention/releases/download/v2.2.0-windows.post3/sageattention-2.2.0+cu126torch2.6.0.post3-cp39-abi3-win_amd64.whl">sageattention-2.2.0+cu126torch2.6.0.post3-cp39-abi3-win_amd64.whl</a><br/>
<a href="https://github.com/woct0rdho/SageAttention/releases/download/v2.2.0-windows.post3/sageattention-2.2.0+cu128torch2.7.1.post3-cp39-abi3-win_amd64.whl">sageattention-2.2.0+cu128torch2.7.1.post3-cp39-abi3-win_amd64.whl</a><br/>
<a href="https://github.com/woct0rdho/SageAttention/releases/download/v2.2.0-windows.post3/sageattention-2.2.0+cu128torch2.8.0.post3-cp39-abi3-win_amd64.whl">sageattention-2.2.0+cu128torch2.8.0.post3-cp39-abi3-win_amd64.whl</a><br/>
<a href="https://github.com/woct0rdho/SageAttention/releases/download/v2.2.0-windows.post3/sageattention-2.2.0+cu128torch2.9.0.post3-cp39-abi3-win_amd64.whl">sageattention-2.2.0+cu128torch2.9.0.post3-cp39-abi3-win_amd64.whl</a><br/>
<a href="https://github.com/woct0rdho/SageAttention/releases/download/v2.2.0-windows.post3/sageattention-2.2.0+cu130torch2.9.0.post3-cp39-abi3-win_amd64.whl">sageattention-2.2.0+cu130torch2.9.0.post3-cp39-abi3-win_amd64.whl</a><br/>
</body>
</html>

View File

@ -186,10 +186,7 @@ mps = [
# "torchaudio",
]
attention = [
"sageattention==2.2.0 ; sys_platform == 'Linux' or sys_platform == 'win32'",
"flash_attn ; sys_platform == 'Linux'",
]
comfyui-manager = [
"GitPython",
@ -202,7 +199,7 @@ comfyui-manager = [
"chardet",
"pip",
# todo: bold move
"comfyui_manager==4.0.3b4",
# "comfyui_manager==4.0.3b4",
]
[project.scripts]
@ -218,11 +215,7 @@ Repository = "https://github.com/comfyanonymous/ComfyUI"
conflicts = [
[{ extra = "cpu" }, { extra = "cu126" }, { extra = "cu128" }, { extra = "rocm" }, { extra = "mps" }],
]
no-build-isolation-package = [
"sageattention",
"sage-attention",
"flash_attn",
]
no-build-isolation-package = []
prerelease = "allow"
[[tool.uv.index]]
@ -273,20 +266,7 @@ torchaudio = [
# { index = "pytorch-rocm", extra = "rocm", marker = "sys_platform == 'Linux'" },
# { index = "pytorch-nightly-cpu", extra = "mps", marker = "sys_platform == 'darwin'" },
]
sageattention = [
{ git = "https://github.com/thu-ml/SageAttention.git", marker = "sys_platform == 'Linux'", extra = "attention" },
{ url = "https://github.com/woct0rdho/SageAttention/releases/download/v2.2.0-windows.post1/sageattention-2.2.0+cu128torch2.7.1.post1-cp39-abi3-win_amd64.whl", marker = "sys_platform == 'win32'", extra = "attention" },
]
[[tool.uv.dependency-metadata]]
name = "flash-attn"
version = "2.6.3"
requires-dist = ["torch", "einops"]
[[tool.uv.dependency-metadata]]
name = "sageattention"
version = "2.2.0"
requires-dist = ["torch"]
[tool.ruff]
lint.select = [