removed bfloat from flux model support, resulting in 2x speedup

This commit is contained in:
patientx 2024-08-30 13:33:32 +03:00 committed by GitHub
parent 99789c6109
commit 524cd140b5
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -642,7 +642,7 @@ class Flux(supported_models_base.BASE):
memory_usage_factor = 2.8 memory_usage_factor = 2.8
supported_inference_dtypes = [torch.bfloat16, torch.float16, torch.float32] supported_inference_dtypes = [torch.float16, torch.float32]
vae_key_prefix = ["vae."] vae_key_prefix = ["vae."]
text_encoder_key_prefix = ["text_encoders."] text_encoder_key_prefix = ["text_encoders."]