Remove line made unnecessary (and wrong) after transformer_options was added to NextDiT's _forward definition (#11118)

This commit is contained in:
Jedrzej Kosinski 2025-12-05 11:05:38 -08:00 committed by GitHub
parent 35fa091340
commit 0ec05b1481
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -586,7 +586,6 @@ class NextDiT(nn.Module):
cap_feats = self.cap_embedder(cap_feats) # (N, L, D) # todo check if able to batchify w.o. redundant compute
patches = transformer_options.get("patches", {})
transformer_options = kwargs.get("transformer_options", {})
x_is_tensor = isinstance(x, torch.Tensor)
img, mask, img_size, cap_size, freqs_cis = self.patchify_and_embed(x, cap_feats, cap_mask, t, num_tokens, transformer_options=transformer_options)
freqs_cis = freqs_cis.to(img.device)