From 5905513e3207fb54df4507f9ce23f866b3a791f9 Mon Sep 17 00:00:00 2001 From: vanDuven Date: Thu, 11 Dec 2025 01:06:26 -0500 Subject: [PATCH] revert fp16 support for Lumina2 --- comfy/supported_models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/comfy/supported_models.py b/comfy/supported_models.py index fa15fac3c..383c82c3e 100644 --- a/comfy/supported_models.py +++ b/comfy/supported_models.py @@ -1001,7 +1001,7 @@ class Lumina2(supported_models_base.BASE): unet_extra_config = {} latent_format = latent_formats.Flux - supported_inference_dtypes = [torch.bfloat16, torch.float16, torch.float32] + supported_inference_dtypes = [torch.bfloat16, torch.float32] vae_key_prefix = ["vae."] text_encoder_key_prefix = ["text_encoders."]