mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2025-12-17 18:13:01 +08:00
Merge branch 'master' into dr-support-pip-cm
This commit is contained in:
commit
6626f7c5c4
19
comfy/ops.py
19
comfy/ops.py
@ -52,6 +52,16 @@ try:
|
|||||||
except (ModuleNotFoundError, TypeError):
|
except (ModuleNotFoundError, TypeError):
|
||||||
logging.warning("Could not set sdpa backend priority.")
|
logging.warning("Could not set sdpa backend priority.")
|
||||||
|
|
||||||
|
NVIDIA_MEMORY_CONV_BUG_WORKAROUND = False
|
||||||
|
try:
|
||||||
|
if comfy.model_management.is_nvidia():
|
||||||
|
if torch.backends.cudnn.version() >= 91200 and comfy.model_management.torch_version_numeric >= (2, 9) and comfy.model_management.torch_version_numeric <= (2, 10):
|
||||||
|
#TODO: change upper bound version once it's fixed'
|
||||||
|
NVIDIA_MEMORY_CONV_BUG_WORKAROUND = True
|
||||||
|
logging.info("working around nvidia conv3d memory bug.")
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
cast_to = comfy.model_management.cast_to #TODO: remove once no more references
|
cast_to = comfy.model_management.cast_to #TODO: remove once no more references
|
||||||
|
|
||||||
if torch.cuda.is_available() and torch.backends.cudnn.is_available() and PerformanceFeature.AutoTune in args.fast:
|
if torch.cuda.is_available() and torch.backends.cudnn.is_available() and PerformanceFeature.AutoTune in args.fast:
|
||||||
@ -151,6 +161,15 @@ class disable_weight_init:
|
|||||||
def reset_parameters(self):
|
def reset_parameters(self):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
def _conv_forward(self, input, weight, bias, *args, **kwargs):
|
||||||
|
if NVIDIA_MEMORY_CONV_BUG_WORKAROUND and weight.dtype in (torch.float16, torch.bfloat16):
|
||||||
|
out = torch.cudnn_convolution(input, weight, self.padding, self.stride, self.dilation, self.groups, benchmark=False, deterministic=False, allow_tf32=True)
|
||||||
|
if bias is not None:
|
||||||
|
out += bias.reshape((1, -1) + (1,) * (out.ndim - 2))
|
||||||
|
return out
|
||||||
|
else:
|
||||||
|
return super()._conv_forward(input, weight, bias, *args, **kwargs)
|
||||||
|
|
||||||
def forward_comfy_cast_weights(self, input):
|
def forward_comfy_cast_weights(self, input):
|
||||||
weight, bias = cast_bias_weight(self, input)
|
weight, bias = cast_bias_weight(self, input)
|
||||||
return self._conv_forward(input, weight, bias)
|
return self._conv_forward(input, weight, bias)
|
||||||
|
|||||||
@ -393,7 +393,9 @@ class Veo3VideoGenerationNode(VeoVideoGenerationNode):
|
|||||||
),
|
),
|
||||||
IO.Combo.Input(
|
IO.Combo.Input(
|
||||||
"model",
|
"model",
|
||||||
options=list(MODELS_MAP.keys()),
|
options=[
|
||||||
|
"veo-3.1-generate", "veo-3.1-fast-generate", "veo-3.0-generate-001", "veo-3.0-fast-generate-001"
|
||||||
|
],
|
||||||
default="veo-3.0-generate-001",
|
default="veo-3.0-generate-001",
|
||||||
tooltip="Veo 3 model to use for video generation",
|
tooltip="Veo 3 model to use for video generation",
|
||||||
optional=True,
|
optional=True,
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
comfyui-frontend-package==1.28.6
|
comfyui-frontend-package==1.28.7
|
||||||
comfyui-workflow-templates==0.1.95
|
comfyui-workflow-templates==0.1.95
|
||||||
comfyui-embedded-docs==0.3.0
|
comfyui-embedded-docs==0.3.0
|
||||||
comfyui_manager==4.0.2
|
comfyui_manager==4.0.2
|
||||||
|
|||||||
24
server.py
24
server.py
@ -51,6 +51,28 @@ async def send_socket_catch_exception(function, message):
|
|||||||
except (aiohttp.ClientError, aiohttp.ClientPayloadError, ConnectionResetError, BrokenPipeError, ConnectionError) as err:
|
except (aiohttp.ClientError, aiohttp.ClientPayloadError, ConnectionResetError, BrokenPipeError, ConnectionError) as err:
|
||||||
logging.warning("send error: {}".format(err))
|
logging.warning("send error: {}".format(err))
|
||||||
|
|
||||||
|
# Track deprecated paths that have been warned about to only warn once per file
|
||||||
|
_deprecated_paths_warned = set()
|
||||||
|
|
||||||
|
@web.middleware
|
||||||
|
async def deprecation_warning(request: web.Request, handler):
|
||||||
|
"""Middleware to warn about deprecated frontend API paths"""
|
||||||
|
path = request.path
|
||||||
|
|
||||||
|
if (path.startswith('/scripts/') or path.startswith('/extensions/core/')):
|
||||||
|
# Only warn once per unique file path
|
||||||
|
if path not in _deprecated_paths_warned:
|
||||||
|
_deprecated_paths_warned.add(path)
|
||||||
|
logging.warning(
|
||||||
|
f"[DEPRECATION WARNING] Detected import of deprecated legacy API: {path}. "
|
||||||
|
f"This is likely caused by a custom node extension using outdated APIs. "
|
||||||
|
f"Please update your extensions or contact the extension author for an updated version."
|
||||||
|
)
|
||||||
|
|
||||||
|
response: web.Response = await handler(request)
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
@web.middleware
|
@web.middleware
|
||||||
async def compress_body(request: web.Request, handler):
|
async def compress_body(request: web.Request, handler):
|
||||||
accept_encoding = request.headers.get("Accept-Encoding", "")
|
accept_encoding = request.headers.get("Accept-Encoding", "")
|
||||||
@ -162,7 +184,7 @@ class PromptServer():
|
|||||||
self.client_session:Optional[aiohttp.ClientSession] = None
|
self.client_session:Optional[aiohttp.ClientSession] = None
|
||||||
self.number = 0
|
self.number = 0
|
||||||
|
|
||||||
middlewares = [cache_control]
|
middlewares = [cache_control, deprecation_warning]
|
||||||
if args.enable_compress_response_body:
|
if args.enable_compress_response_body:
|
||||||
middlewares.append(compress_body)
|
middlewares.append(compress_body)
|
||||||
|
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user