for some reason pip started installing 2.x which won't work with our torch, now set to 1.0.6

This commit is contained in:
patientx 2025-10-30 01:49:38 +03:00 committed by GitHub
parent f2c1e3d13d
commit 0b3197fde2
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -75,7 +75,7 @@ pypatch-url apply https://raw.githubusercontent.com/sfinktah/amd-torch/refs/head
:: copy comfy\customzluda\fa\distributed.py %VIRTUAL_ENV%\Lib\site-packages\flash_attn\utils\distributed.py /y >NUL
echo :: %time:~0,8% :: - Installing and patching sage-attention
pip install sageattention --quiet
pip install sageattention==1.0.6 --quiet
copy comfy\customzluda\sa\quant_per_block.py %VIRTUAL_ENV%\Lib\site-packages\sageattention\quant_per_block.py /y >NUL
copy comfy\customzluda\sa\attn_qk_int8_per_block_causal.py %VIRTUAL_ENV%\Lib\site-packages\sageattention\attn_qk_int8_per_block_causal.py /y >NUL
copy comfy\customzluda\sa\attn_qk_int8_per_block.py %VIRTUAL_ENV%\Lib\site-packages\sageattention\attn_qk_int8_per_block.py /y >NUL
@ -154,3 +154,4 @@ set MIOPEN_LOG_LEVEL=3
.\zluda\zluda.exe -- python main.py --auto-launch --use-quad-cross-attention