From 2d550102fca3cd549d693c990bc2750a4b8aa4b7 Mon Sep 17 00:00:00 2001 From: nolan4 Date: Tue, 28 Oct 2025 19:05:34 -0700 Subject: [PATCH] resolved Ruff lint errors --- comfy/ldm/qwen_image/model.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/comfy/ldm/qwen_image/model.py b/comfy/ldm/qwen_image/model.py index 66cabab43..ffa4743dd 100644 --- a/comfy/ldm/qwen_image/model.py +++ b/comfy/ldm/qwen_image/model.py @@ -2,7 +2,6 @@ import torch import torch.nn as nn import torch.nn.functional as F -import math import logging from typing import Optional, Tuple from einops import repeat, rearrange @@ -336,7 +335,6 @@ class QwenImageTransformer2DModel(nn.Module): self.inner_dim = num_attention_heads * attention_head_dim self.pe_embedder = EmbedND(dim=attention_head_dim, theta=10000, axes_dim=list(axes_dims_rope)) - self.time_text_embed = QwenTimestepProjEmbeddings( embedding_dim=self.inner_dim, pooled_projection_dim=pooled_projection_dim, @@ -560,8 +558,8 @@ class QwenImageTransformer2DModel(nn.Module): # Positive gets entity constraints, negative gets standard attention (all zeros) logger.debug( - f"[EliGen Model] CFG batched detected - creating separate masks. " - f"Positive (index 0) gets entity mask, Negative (index 1) gets standard mask" + "[EliGen Model] CFG batched detected - creating separate masks. " + "Positive (index 0) gets entity mask, Negative (index 1) gets standard mask" ) # Create standard attention mask (all zeros = no constraints)