Merge branch 'master' into feat/api-nodes/tencent-UV-unwrap

This commit is contained in:
Alexander Piskun 2026-02-15 11:39:54 +02:00 committed by GitHub
commit c6a45ac152
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 12 additions and 34 deletions

View File

@ -1,13 +0,0 @@
import pickle
load = pickle.load
class Empty:
pass
class Unpickler(pickle.Unpickler):
def find_class(self, module, name):
#TODO: safe unpickle
if module.startswith("pytorch_lightning"):
return Empty
return super().find_class(module, name)

View File

@ -20,7 +20,7 @@
import torch import torch
import math import math
import struct import struct
import comfy.checkpoint_pickle import comfy.memory_management
import safetensors.torch import safetensors.torch
import numpy as np import numpy as np
from PIL import Image from PIL import Image
@ -38,26 +38,26 @@ import warnings
MMAP_TORCH_FILES = args.mmap_torch_files MMAP_TORCH_FILES = args.mmap_torch_files
DISABLE_MMAP = args.disable_mmap DISABLE_MMAP = args.disable_mmap
ALWAYS_SAFE_LOAD = False
if hasattr(torch.serialization, "add_safe_globals"): # TODO: this was added in pytorch 2.4, the unsafe path should be removed once earlier versions are deprecated if True: # ckpt/pt file whitelist for safe loading of old sd files
class ModelCheckpoint: class ModelCheckpoint:
pass pass
ModelCheckpoint.__module__ = "pytorch_lightning.callbacks.model_checkpoint" ModelCheckpoint.__module__ = "pytorch_lightning.callbacks.model_checkpoint"
def scalar(*args, **kwargs): def scalar(*args, **kwargs):
from numpy.core.multiarray import scalar as sc return None
return sc(*args, **kwargs)
scalar.__module__ = "numpy.core.multiarray" scalar.__module__ = "numpy.core.multiarray"
from numpy import dtype from numpy import dtype
from numpy.dtypes import Float64DType from numpy.dtypes import Float64DType
from _codecs import encode
def encode(*args, **kwargs): # no longer necessary on newer torch
return None
encode.__module__ = "_codecs"
torch.serialization.add_safe_globals([ModelCheckpoint, scalar, dtype, Float64DType, encode]) torch.serialization.add_safe_globals([ModelCheckpoint, scalar, dtype, Float64DType, encode])
ALWAYS_SAFE_LOAD = True
logging.info("Checkpoint files will always be loaded safely.") logging.info("Checkpoint files will always be loaded safely.")
else:
logging.warning("Warning, you are using an old pytorch version and some ckpt/pt files might be loaded unsafely. Upgrading to 2.4 or above is recommended as older versions of pytorch are no longer supported.")
# Current as of safetensors 0.7.0 # Current as of safetensors 0.7.0
_TYPES = { _TYPES = {
@ -140,11 +140,8 @@ def load_torch_file(ckpt, safe_load=False, device=None, return_metadata=False):
if MMAP_TORCH_FILES: if MMAP_TORCH_FILES:
torch_args["mmap"] = True torch_args["mmap"] = True
if safe_load or ALWAYS_SAFE_LOAD: pl_sd = torch.load(ckpt, map_location=device, weights_only=True, **torch_args)
pl_sd = torch.load(ckpt, map_location=device, weights_only=True, **torch_args)
else:
logging.warning("WARNING: loading {} unsafely, upgrade your pytorch to 2.4 or newer to load this file safely.".format(ckpt))
pl_sd = torch.load(ckpt, map_location=device, pickle_module=comfy.checkpoint_pickle)
if "state_dict" in pl_sd: if "state_dict" in pl_sd:
sd = pl_sd["state_dict"] sd = pl_sd["state_dict"]
else: else:

View File

@ -43,7 +43,6 @@ class SupportedOpenAIModel(str, Enum):
o1 = "o1" o1 = "o1"
o3 = "o3" o3 = "o3"
o1_pro = "o1-pro" o1_pro = "o1-pro"
gpt_4o = "gpt-4o"
gpt_4_1 = "gpt-4.1" gpt_4_1 = "gpt-4.1"
gpt_4_1_mini = "gpt-4.1-mini" gpt_4_1_mini = "gpt-4.1-mini"
gpt_4_1_nano = "gpt-4.1-nano" gpt_4_1_nano = "gpt-4.1-nano"
@ -649,11 +648,6 @@ class OpenAIChatNode(IO.ComfyNode):
"usd": [0.01, 0.04], "usd": [0.01, 0.04],
"format": { "approximate": true, "separator": "-", "suffix": " per 1K tokens" } "format": { "approximate": true, "separator": "-", "suffix": " per 1K tokens" }
} }
: $contains($m, "gpt-4o") ? {
"type": "list_usd",
"usd": [0.0025, 0.01],
"format": { "approximate": true, "separator": "-", "suffix": " per 1K tokens" }
}
: $contains($m, "gpt-4.1-nano") ? { : $contains($m, "gpt-4.1-nano") ? {
"type": "list_usd", "type": "list_usd",
"usd": [0.0001, 0.0004], "usd": [0.0001, 0.0004],

View File

@ -1,4 +1,4 @@
comfyui-frontend-package==1.38.13 comfyui-frontend-package==1.38.14
comfyui-workflow-templates==0.8.38 comfyui-workflow-templates==0.8.38
comfyui-embedded-docs==0.4.1 comfyui-embedded-docs==0.4.1
torch torch