Include Kijai fp8 nodes. LoRAs are not supported by nf4

This commit is contained in:
doctorpangloss 2024-08-25 22:41:10 -07:00
parent 01feca812f
commit 48ca1a4910
4 changed files with 8 additions and 3 deletions

View File

@ -424,6 +424,8 @@ KNOWN_UNET_MODELS: Final[KnownDownloadables] = KnownDownloadables([
HuggingFile("ByteDance/Hyper-SD", "Hyper-SDXL-1step-Unet-Comfyui.fp16.safetensors"),
HuggingFile("black-forest-labs/FLUX.1-schnell", "flux1-schnell.safetensors"),
HuggingFile("black-forest-labs/FLUX.1-dev", "flux1-dev.safetensors"),
HuggingFile("Kijai/flux-fp8", "flux1-dev-fp8.safetensors"),
HuggingFile("Kijai/flux-fp8", "flux1-schnell-fp8.safetensors"),
], folder_name="diffusion_models")
KNOWN_CLIP_MODELS: Final[KnownDownloadables] = KnownDownloadables([

View File

@ -386,7 +386,7 @@ class ModelPatcher(ModelManageable):
logging.debug("loaded partially {} {} {}".format(lowvram_model_memory / (1024 * 1024), mem_counter / (1024 * 1024), patch_counter))
self._memory_measurements.model_lowvram = True
else:
logging.info("loaded completely {} {} {}".format(lowvram_model_memory / (1024 * 1024), mem_counter / (1024 * 1024), full_load))
logging.debug("loaded completely {} {} {}".format(lowvram_model_memory / (1024 * 1024), mem_counter / (1024 * 1024), full_load))
self._memory_measurements.model_lowvram = False
if full_load:
self.model.to(device_to)

View File

@ -61,6 +61,8 @@ def copy_quant_state(state: QuantState, device: torch.device = None) -> QuantSta
class ForgeParams4bit(Params4bit):
def to(self, *args, **kwargs):
if 'copy' in kwargs:
kwargs.pop('copy')
device, dtype, non_blocking, convert_to_format = torch._C._nn._parse_to(*args, **kwargs)
if device is not None and device.type == "cuda" and not self.bnb_quantized:
return self._quantize(device)
@ -179,7 +181,7 @@ class CheckpointLoaderNF4:
def load_checkpoint(self, ckpt_name):
if not has_bitsandbytes:
raise BitsAndBytesNotFoundError(f"Because your platform is {platform.platform()}, bitsandbytes is not installed, so this cannot be executed")
raise BitsAndBytesNotFoundError(f"bitsandbytes is not installed, so {CheckpointLoaderNF4.__name__} cannot be executed")
ckpt_path = get_or_download("checkpoints", ckpt_name)
out = comfy.sd.load_checkpoint_guess_config(ckpt_path, output_vae=True, output_clip=True, embedding_directory=get_folder_paths("embeddings"), model_options={"custom_operations": OPS})
return out[:3]

View File

@ -10,7 +10,8 @@ sentencepiece
peft>=0.10.0
torchinfo
safetensors>=0.4.2
bitsandbytes>=0.43.0 ;platform_system == 'Linux' or platform_system == 'Windows'
bitsandbytes>=0.43.0 ;platform_system != 'Darwin'
bitsandbytes ;platform_system == 'Darwin'
aiohttp>=3.8.4
accelerate>=0.25.0
pyyaml>=6.0