small attention fix

This commit is contained in:
Yousef Rafat 2025-11-17 23:03:52 +02:00
parent 3f71760913
commit b84af5b947
2 changed files with 2 additions and 1 deletions

View File

@ -745,7 +745,7 @@ class HunyuanImage3Attention(nn.Module):
self.hidden_size = config["hidden_size"]
self.num_heads = config["num_attention_heads"]
self.head_dim = self.hidden_size // self.num_heads
self.head_dim = config["attention_head_dim"]
self.num_key_value_heads = 8
self.num_key_value_groups = self.num_heads // self.num_key_value_heads
self.max_position_embeddings = config["max_position_embeddings"]

View File

@ -491,6 +491,7 @@ def detect_unet_config(state_dict, key_prefix, metadata=None):
dit_config["num_attention_heads"] = 32
dit_config['rms_norm_eps'] = 1e-05
dit_config["num_hidden_layers"] = 32
dit_config["attention_head_dim"] = 128
return dit_config
if '{}blocks.0.mlp.layer1.weight'.format(key_prefix) in state_dict_keys: # Cosmos predict2