From 3d2c4d48cdcf66f006931540a76707ff2cecb6b5 Mon Sep 17 00:00:00 2001 From: Yousef Rafat <81116377+yousef-rafat@users.noreply.github.com> Date: Thu, 19 Mar 2026 14:18:31 +0200 Subject: [PATCH] . --- comfy/ldm/modules/attention.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/comfy/ldm/modules/attention.py b/comfy/ldm/modules/attention.py index 17b185b69..bec2d7466 100644 --- a/comfy/ldm/modules/attention.py +++ b/comfy/ldm/modules/attention.py @@ -39,7 +39,7 @@ except ImportError: FLASH_ATTENTION_IS_AVAILABLE = False try: - from flash_attn import flash_attn_func, flash_attn_varlen_func + from flash_attn import flash_attn_func FLASH_ATTENTION_IS_AVAILABLE = True except ImportError: if model_management.flash_attention_enabled(): @@ -87,6 +87,7 @@ def default(val, d): return val return d + # feedforward class GEGLU(nn.Module): def __init__(self, dim_in, dim_out, dtype=None, device=None, operations=ops):