From b7cbe8afa528b3c48238b5d8063ece7ff863eb22 Mon Sep 17 00:00:00 2001 From: Kiruno-lz Date: Fri, 6 Mar 2026 16:17:21 +0800 Subject: [PATCH] fix: Correction: Modify the dtype conversion logic within the `cast_to` function. When the dtype is `None` or `float8`, it should be uniformly converted to `float16` to prevent type mismatches during the conversion of `float8` types. --- comfy/model_management.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/comfy/model_management.py b/comfy/model_management.py index 9145d06b1..de73d04ea 100644 --- a/comfy/model_management.py +++ b/comfy/model_management.py @@ -1221,7 +1221,8 @@ def cast_to(weight, dtype=None, device=None, non_blocking=False, copy=False, str if weight.dtype in FLOAT8_TYPES: weight = weight.float() weight = weight.to(target_device, dtype=torch.float16) - dtype = torch.float16 if dtype is None else dtype + if dtype is None or dtype in FLOAT8_TYPES: + dtype = torch.float16 if device is None or weight.device == device: if not copy: