mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2025-12-17 01:52:59 +08:00
Only enable fp16 on ZImage on newer pytorch. (#11344)
This commit is contained in:
parent
ea2c117bc3
commit
683569de55
@ -28,6 +28,7 @@ from . import supported_models_base
|
|||||||
from . import latent_formats
|
from . import latent_formats
|
||||||
|
|
||||||
from . import diffusers_convert
|
from . import diffusers_convert
|
||||||
|
import comfy.model_management
|
||||||
|
|
||||||
class SD15(supported_models_base.BASE):
|
class SD15(supported_models_base.BASE):
|
||||||
unet_config = {
|
unet_config = {
|
||||||
@ -1028,7 +1029,13 @@ class ZImage(Lumina2):
|
|||||||
|
|
||||||
memory_usage_factor = 2.0
|
memory_usage_factor = 2.0
|
||||||
|
|
||||||
supported_inference_dtypes = [torch.bfloat16, torch.float16, torch.float32]
|
supported_inference_dtypes = [torch.bfloat16, torch.float32]
|
||||||
|
|
||||||
|
def __init__(self, unet_config):
|
||||||
|
super().__init__(unet_config)
|
||||||
|
if comfy.model_management.extended_fp16_support():
|
||||||
|
self.supported_inference_dtypes = self.supported_inference_dtypes.copy()
|
||||||
|
self.supported_inference_dtypes.insert(1, torch.float16)
|
||||||
|
|
||||||
def clip_target(self, state_dict={}):
|
def clip_target(self, state_dict={}):
|
||||||
pref = self.text_encoder_key_prefix[0]
|
pref = self.text_encoder_key_prefix[0]
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user