made quad-cross-attention first time starting attention type

This commit is contained in:
patientx 2025-06-21 01:29:54 +03:00 committed by GitHub
parent 0e967d11b1
commit 7b04b064aa
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -121,4 +121,4 @@ echo.
set FLASH_ATTENTION_TRITON_AMD_ENABLE=TRUE
set MIOPEN_FIND_MODE=2
set MIOPEN_LOG_LEVEL=3
.\zluda\zluda.exe -- python main.py --auto-launch --use-pytorch-cross-attention
.\zluda\zluda.exe -- python main.py --auto-launch --use-quad-cross-attention