From cb959f9669c9a91e4122f3f3692547d33c25d7ad Mon Sep 17 00:00:00 2001 From: Jedrzej Kosinski Date: Fri, 29 Aug 2025 21:48:36 -0700 Subject: [PATCH] Add optimized to get_attention_function --- comfy/ldm/modules/attention.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/comfy/ldm/modules/attention.py b/comfy/ldm/modules/attention.py index 361ad10fe..3c9df9a00 100644 --- a/comfy/ldm/modules/attention.py +++ b/comfy/ldm/modules/attention.py @@ -51,7 +51,9 @@ def register_attention_function(name: str, func: Callable): logging.warning(f"Attention function {name} already registered, skipping registration.") def get_attention_function(name: str, default: Any=...) -> Union[Callable, None]: - if name not in REGISTERED_ATTENTION_FUNCTIONS: + if name == "optimized": + return optimized_attention + elif name not in REGISTERED_ATTENTION_FUNCTIONS: if default is ...: raise KeyError(f"Attention function {name} not found.") else: @@ -62,7 +64,7 @@ def _register_core_attention_functions(): """ Register attention functions exposed by core ComfyUI. """ - # NOTE: attention_basic is purposely not registered, as it is not used in code + # NOTE: attention_basic is purposely not registered, as it should not be used if SAGE_ATTENTION_IS_AVAILABLE: register_attention_function("sage", attention_sage) if FLASH_ATTENTION_IS_AVAILABLE: