mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2026-03-30 13:33:42 +08:00
fix: replace bare except with specific exception types
This commit is contained in:
parent
47414ffa45
commit
2e49b5582f
@ -73,7 +73,8 @@ class ModelFileManager:
|
||||
img.save(img_bytes, format="WEBP")
|
||||
img_bytes.seek(0)
|
||||
return web.Response(body=img_bytes.getvalue(), content_type="image/webp")
|
||||
except:
|
||||
except (IOError, OSError, ValueError, Image.DecompressionBombError) as e:
|
||||
logging.warning(f"Failed to load preview image {default_preview}: {e}")
|
||||
return web.Response(status=404)
|
||||
|
||||
def get_model_file_list(self, folder_name: str):
|
||||
|
||||
@ -70,7 +70,7 @@ try:
|
||||
#TODO: change upper bound version once it's fixed'
|
||||
NVIDIA_MEMORY_CONV_BUG_WORKAROUND = True
|
||||
logging.info("working around nvidia conv3d memory bug.")
|
||||
except:
|
||||
except (RuntimeError, AttributeError, TypeError):
|
||||
pass
|
||||
|
||||
cast_to = comfy.model_management.cast_to #TODO: remove once no more references
|
||||
|
||||
@ -55,8 +55,8 @@ class FreeU(IO.ComfyNode):
|
||||
if hsp.device not in on_cpu_devices:
|
||||
try:
|
||||
hsp = Fourier_filter(hsp, threshold=1, scale=scale[1])
|
||||
except:
|
||||
logging.warning("Device {} does not support the torch.fft functions used in the FreeU node, switching to CPU.".format(hsp.device))
|
||||
except (RuntimeError, TypeError) as e:
|
||||
logging.warning("Device {} does not support the torch.fft functions used in the FreeU node, switching to CPU. Error: {}".format(hsp.device, e))
|
||||
on_cpu_devices[hsp.device] = True
|
||||
hsp = Fourier_filter(hsp.cpu(), threshold=1, scale=scale[1]).to(hsp.device)
|
||||
else:
|
||||
@ -109,8 +109,8 @@ class FreeU_V2(IO.ComfyNode):
|
||||
if hsp.device not in on_cpu_devices:
|
||||
try:
|
||||
hsp = Fourier_filter(hsp, threshold=1, scale=scale[1])
|
||||
except:
|
||||
logging.warning("Device {} does not support the torch.fft functions used in the FreeU node, switching to CPU.".format(hsp.device))
|
||||
except (RuntimeError, TypeError) as e:
|
||||
logging.warning("Device {} does not support the torch.fft functions used in the FreeU node, switching to CPU. Error: {}".format(hsp.device, e))
|
||||
on_cpu_devices[hsp.device] = True
|
||||
hsp = Fourier_filter(hsp.cpu(), threshold=1, scale=scale[1]).to(hsp.device)
|
||||
else:
|
||||
|
||||
@ -35,7 +35,7 @@ def load_hypernetwork_patch(path, strength):
|
||||
for d in sd:
|
||||
try:
|
||||
dim = int(d)
|
||||
except:
|
||||
except (ValueError, TypeError):
|
||||
continue
|
||||
|
||||
output = []
|
||||
|
||||
@ -75,8 +75,8 @@ def calc_lora_model(model_diff, rank, prefix_model, prefix_lora, output_sd, lora
|
||||
out = extract_lora(weight_diff, rank)
|
||||
output_sd["{}{}.lora_up.weight".format(prefix_lora, k[len(prefix_model):-7])] = out[0].contiguous().half().cpu()
|
||||
output_sd["{}{}.lora_down.weight".format(prefix_lora, k[len(prefix_model):-7])] = out[1].contiguous().half().cpu()
|
||||
except:
|
||||
logging.warning("Could not generate lora weights for key {}, is the weight difference a zero?".format(k))
|
||||
except (RuntimeError, ValueError, TypeError, torch.linalg.LinAlgError) as e:
|
||||
logging.warning("Could not generate lora weights for key {}, is the weight difference a zero? Error: {}".format(k, e))
|
||||
elif lora_type == LORAType.FULL_DIFF:
|
||||
output_sd["{}{}.diff".format(prefix_lora, k[len(prefix_model):-7])] = weight_diff.contiguous().half().cpu()
|
||||
|
||||
|
||||
Loading…
Reference in New Issue
Block a user