From 3c72c89a52d5fa8fc4ae9bf83f8bbe3c850dd7c6 Mon Sep 17 00:00:00 2001 From: Brendan Hoar Date: Tue, 8 Oct 2024 15:04:32 -0400 Subject: [PATCH 1/2] Update folder_paths.py - try/catch for special file_name values (#5187) Somehow managed to drop a file called "nul" into a windows checkpoints subdirectory. This caused all sorts of havoc with many nodes that needed the list of checkpoints. --- folder_paths.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/folder_paths.py b/folder_paths.py index 1f03c08d8..01ae821de 100644 --- a/folder_paths.py +++ b/folder_paths.py @@ -234,8 +234,12 @@ def recursive_search(directory: str, excluded_dir_names: list[str] | None=None) for dirpath, subdirs, filenames in os.walk(directory, followlinks=True, topdown=True): subdirs[:] = [d for d in subdirs if d not in excluded_dir_names] for file_name in filenames: - relative_path = os.path.relpath(os.path.join(dirpath, file_name), directory) - result.append(relative_path) + try: + relative_path = os.path.relpath(os.path.join(dirpath, file_name), directory) + result.append(relative_path) + except: + logging.warning(f"Warning: Unable to access {file_name}. Skipping this file.") + continue for d in subdirs: path: str = os.path.join(dirpath, d) From 203942c8b29dfbf59a7976dcee29e8ab44a1b32d Mon Sep 17 00:00:00 2001 From: comfyanonymous Date: Tue, 8 Oct 2024 19:02:41 -0400 Subject: [PATCH 2/2] Fix flux doras with diffusers keys. --- comfy/lora.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/comfy/lora.py b/comfy/lora.py index 2d0ed7bf5..80057cdd4 100644 --- a/comfy/lora.py +++ b/comfy/lora.py @@ -343,10 +343,10 @@ def model_lora_keys_unet(model, key_map={}): return key_map -def weight_decompose(dora_scale, weight, lora_diff, alpha, strength, intermediate_dtype): +def weight_decompose(dora_scale, weight, lora_diff, alpha, strength, intermediate_dtype, function): dora_scale = comfy.model_management.cast_to_device(dora_scale, weight.device, intermediate_dtype) lora_diff *= alpha - weight_calc = weight + lora_diff.type(weight.dtype) + weight_calc = weight + function(lora_diff).type(weight.dtype) weight_norm = ( weight_calc.transpose(0, 1) .reshape(weight_calc.shape[1], -1) @@ -453,7 +453,7 @@ def calculate_weight(patches, weight, key, intermediate_dtype=torch.float32): try: lora_diff = torch.mm(mat1.flatten(start_dim=1), mat2.flatten(start_dim=1)).reshape(weight.shape) if dora_scale is not None: - weight = function(weight_decompose(dora_scale, weight, lora_diff, alpha, strength, intermediate_dtype)) + weight = weight_decompose(dora_scale, weight, lora_diff, alpha, strength, intermediate_dtype, function) else: weight += function(((strength * alpha) * lora_diff).type(weight.dtype)) except Exception as e: @@ -499,7 +499,7 @@ def calculate_weight(patches, weight, key, intermediate_dtype=torch.float32): try: lora_diff = torch.kron(w1, w2).reshape(weight.shape) if dora_scale is not None: - weight = function(weight_decompose(dora_scale, weight, lora_diff, alpha, strength, intermediate_dtype)) + weight = weight_decompose(dora_scale, weight, lora_diff, alpha, strength, intermediate_dtype, function) else: weight += function(((strength * alpha) * lora_diff).type(weight.dtype)) except Exception as e: @@ -536,7 +536,7 @@ def calculate_weight(patches, weight, key, intermediate_dtype=torch.float32): try: lora_diff = (m1 * m2).reshape(weight.shape) if dora_scale is not None: - weight = function(weight_decompose(dora_scale, weight, lora_diff, alpha, strength, intermediate_dtype)) + weight = weight_decompose(dora_scale, weight, lora_diff, alpha, strength, intermediate_dtype, function) else: weight += function(((strength * alpha) * lora_diff).type(weight.dtype)) except Exception as e: @@ -577,7 +577,7 @@ def calculate_weight(patches, weight, key, intermediate_dtype=torch.float32): lora_diff += torch.mm(b1, b2).reshape(weight.shape) if dora_scale is not None: - weight = function(weight_decompose(dora_scale, weight, lora_diff, alpha, strength, intermediate_dtype)) + weight = weight_decompose(dora_scale, weight, lora_diff, alpha, strength, intermediate_dtype, function) else: weight += function(((strength * alpha) * lora_diff).type(weight.dtype)) except Exception as e: