mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2026-01-10 14:20:49 +08:00
Merge branch 'master' of github.com:comfyanonymous/ComfyUI
This commit is contained in:
commit
94310e51e3
@ -135,6 +135,8 @@ def _create_parser() -> EnhancedConfigArgParser:
|
|||||||
parser.add_argument("--fast", nargs="*", type=PerformanceFeature, help="Enable some untested and potentially quality deteriorating optimizations. Pass a list specific optimizations if you only want to enable specific ones. Current valid optimizations: fp16_accumulation fp8_matrix_mult cublas_ops", default=set())
|
parser.add_argument("--fast", nargs="*", type=PerformanceFeature, help="Enable some untested and potentially quality deteriorating optimizations. Pass a list specific optimizations if you only want to enable specific ones. Current valid optimizations: fp16_accumulation fp8_matrix_mult cublas_ops", default=set())
|
||||||
|
|
||||||
parser.add_argument("--mmap-torch-files", action="store_true", help="Use mmap when loading ckpt/pt files.")
|
parser.add_argument("--mmap-torch-files", action="store_true", help="Use mmap when loading ckpt/pt files.")
|
||||||
|
parser.add_argument("--disable-mmap", action="store_true", help="Don't use mmap when loading safetensors.")
|
||||||
|
|
||||||
parser.add_argument("--dont-print-server", action="store_true", help="Don't print server output.")
|
parser.add_argument("--dont-print-server", action="store_true", help="Don't print server output.")
|
||||||
parser.add_argument("--quick-test-for-ci", action="store_true", help="Quick test for CI. Raises an error if nodes cannot be imported,")
|
parser.add_argument("--quick-test-for-ci", action="store_true", help="Quick test for CI. Raises an error if nodes cannot be imported,")
|
||||||
parser.add_argument("--windows-standalone-build", default=hasattr(sys, 'frozen') and getattr(sys, 'frozen'),
|
parser.add_argument("--windows-standalone-build", default=hasattr(sys, 'frozen') and getattr(sys, 'frozen'),
|
||||||
|
|||||||
@ -971,7 +971,7 @@ class VideoVAE(nn.Module):
|
|||||||
norm_layer=config.get("norm_layer", "group_norm"),
|
norm_layer=config.get("norm_layer", "group_norm"),
|
||||||
causal=config.get("causal_decoder", False),
|
causal=config.get("causal_decoder", False),
|
||||||
timestep_conditioning=self.timestep_conditioning,
|
timestep_conditioning=self.timestep_conditioning,
|
||||||
spatial_padding_mode=config.get("spatial_padding_mode", "zeros"),
|
spatial_padding_mode=config.get("spatial_padding_mode", "reflect"),
|
||||||
)
|
)
|
||||||
|
|
||||||
self.per_channel_statistics = processor()
|
self.per_channel_statistics = processor()
|
||||||
|
|||||||
@ -18,7 +18,7 @@
|
|||||||
"single_word": false
|
"single_word": false
|
||||||
},
|
},
|
||||||
"errors": "replace",
|
"errors": "replace",
|
||||||
"model_max_length": 77,
|
"model_max_length": 8192,
|
||||||
"name_or_path": "openai/clip-vit-large-patch14",
|
"name_or_path": "openai/clip-vit-large-patch14",
|
||||||
"pad_token": "<|endoftext|>",
|
"pad_token": "<|endoftext|>",
|
||||||
"special_tokens_map_file": "./special_tokens_map.json",
|
"special_tokens_map_file": "./special_tokens_map.json",
|
||||||
|
|||||||
@ -50,6 +50,8 @@ from .component_model.queue_types import BinaryEventTypes
|
|||||||
from .execution_context import current_execution_context
|
from .execution_context import current_execution_context
|
||||||
|
|
||||||
MMAP_TORCH_FILES = args.mmap_torch_files
|
MMAP_TORCH_FILES = args.mmap_torch_files
|
||||||
|
DISABLE_MMAP = args.disable_mmap
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
ALWAYS_SAFE_LOAD = False
|
ALWAYS_SAFE_LOAD = False
|
||||||
@ -96,7 +98,10 @@ def load_torch_file(ckpt: str, safe_load=False, device=None, return_metadata=Fal
|
|||||||
with safetensors.safe_open(Path(ckpt).resolve(strict=True), framework="pt", device=device.type) as f:
|
with safetensors.safe_open(Path(ckpt).resolve(strict=True), framework="pt", device=device.type) as f:
|
||||||
sd = {}
|
sd = {}
|
||||||
for k in f.keys():
|
for k in f.keys():
|
||||||
sd[k] = f.get_tensor(k)
|
tensor = f.get_tensor(k)
|
||||||
|
if DISABLE_MMAP: # TODO: Not sure if this is the best way to bypass the mmap issues
|
||||||
|
tensor = tensor.to(device=device, copy=True)
|
||||||
|
sd[k] = tensor
|
||||||
if return_metadata:
|
if return_metadata:
|
||||||
metadata = f.metadata()
|
metadata = f.metadata()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|||||||
@ -260,7 +260,7 @@ class MaskComposite:
|
|||||||
visible_width, visible_height = (right - left, bottom - top,)
|
visible_width, visible_height = (right - left, bottom - top,)
|
||||||
|
|
||||||
source_portion = source[:, :visible_height, :visible_width]
|
source_portion = source[:, :visible_height, :visible_width]
|
||||||
destination_portion = destination[:, top:bottom, left:right]
|
destination_portion = output[:, top:bottom, left:right]
|
||||||
|
|
||||||
if operation == "multiply":
|
if operation == "multiply":
|
||||||
output[:, top:bottom, left:right] = destination_portion * source_portion
|
output[:, top:bottom, left:right] = destination_portion * source_portion
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user