From 2e4f550d6680d2cbb82b9c8e803425a515abd44a Mon Sep 17 00:00:00 2001 From: Rando717 <228354122+Rando717@users.noreply.github.com> Date: Sun, 28 Sep 2025 21:03:27 +0200 Subject: [PATCH] Update cfz_patcher.py (moved CFZ nodes to CFZ/category) --- cfz/nodes/cfz_patcher.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/cfz/nodes/cfz_patcher.py b/cfz/nodes/cfz_patcher.py index 6a615174b..7e20d58ad 100644 --- a/cfz/nodes/cfz_patcher.py +++ b/cfz/nodes/cfz_patcher.py @@ -247,7 +247,7 @@ class CheckpointLoaderQuantized2: RETURN_TYPES = ("MODEL", "CLIP", "VAE") FUNCTION = "load_quantized" - CATEGORY = "Loaders (Quantized)" + CATEGORY = "CFZ/loaders" OUTPUT_NODE = False def load_quantized(self, ckpt_name, enable_quant, use_asymmetric, quant_dtype, @@ -302,7 +302,7 @@ class ModelQuantizationPatcher: RETURN_TYPES = ("MODEL",) FUNCTION = "patch_model" - CATEGORY = "Model Patching" + CATEGORY = "CFZ/model patches" OUTPUT_NODE = False def patch_model(self, model, use_asymmetric, quant_dtype, use_int8_matmul): @@ -347,7 +347,7 @@ class UNetQuantizationPatcher: RETURN_TYPES = ("MODEL",) FUNCTION = "patch_unet" - CATEGORY = "Model Patching" + CATEGORY = "CFZ/model patches" OUTPUT_NODE = False def get_model_memory_usage(self, model, force_calculation=False):