From b84af5b947e1dd00b98a34ce86cf983455040d3e Mon Sep 17 00:00:00 2001 From: Yousef Rafat <81116377+yousef-rafat@users.noreply.github.com> Date: Mon, 17 Nov 2025 23:03:52 +0200 Subject: [PATCH] small attention fix --- comfy/ldm/hunyuan_image_3/model.py | 2 +- comfy/model_detection.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/comfy/ldm/hunyuan_image_3/model.py b/comfy/ldm/hunyuan_image_3/model.py index 62e09e65a..c82904fdc 100644 --- a/comfy/ldm/hunyuan_image_3/model.py +++ b/comfy/ldm/hunyuan_image_3/model.py @@ -745,7 +745,7 @@ class HunyuanImage3Attention(nn.Module): self.hidden_size = config["hidden_size"] self.num_heads = config["num_attention_heads"] - self.head_dim = self.hidden_size // self.num_heads + self.head_dim = config["attention_head_dim"] self.num_key_value_heads = 8 self.num_key_value_groups = self.num_heads // self.num_key_value_heads self.max_position_embeddings = config["max_position_embeddings"] diff --git a/comfy/model_detection.py b/comfy/model_detection.py index 816aed169..246596167 100644 --- a/comfy/model_detection.py +++ b/comfy/model_detection.py @@ -491,6 +491,7 @@ def detect_unet_config(state_dict, key_prefix, metadata=None): dit_config["num_attention_heads"] = 32 dit_config['rms_norm_eps'] = 1e-05 dit_config["num_hidden_layers"] = 32 + dit_config["attention_head_dim"] = 128 return dit_config if '{}blocks.0.mlp.layer1.weight'.format(key_prefix) in state_dict_keys: # Cosmos predict2