Merge branch 'master' into dr-support-pip-cm

This commit is contained in:
Dr.Lt.Data 2025-08-04 07:25:20 +09:00
commit ac7e83448e
9 changed files with 40 additions and 20 deletions

View File

@ -130,10 +130,21 @@ class ModelFileManager:
for file_name in filenames: for file_name in filenames:
try: try:
relative_path = os.path.relpath(os.path.join(dirpath, file_name), directory) full_path = os.path.join(dirpath, file_name)
result.append(relative_path) relative_path = os.path.relpath(full_path, directory)
except:
logging.warning(f"Warning: Unable to access {file_name}. Skipping this file.") # Get file metadata
file_info = {
"name": relative_path,
"pathIndex": pathIndex,
"modified": os.path.getmtime(full_path), # Add modification time
"created": os.path.getctime(full_path), # Add creation time
"size": os.path.getsize(full_path) # Add file size
}
result.append(file_info)
except Exception as e:
logging.warning(f"Warning: Unable to access {file_name}. Error: {e}. Skipping this file.")
continue continue
for d in subdirs: for d in subdirs:
@ -144,7 +155,7 @@ class ModelFileManager:
logging.warning(f"Warning: Unable to access {path}. Skipping this path.") logging.warning(f"Warning: Unable to access {path}. Skipping this path.")
continue continue
return [{"name": f, "pathIndex": pathIndex} for f in result], dirs, time.perf_counter() return result, dirs, time.perf_counter()
def get_model_previews(self, filepath: str) -> list[str | BytesIO]: def get_model_previews(self, filepath: str) -> list[str | BytesIO]:
dirname = os.path.dirname(filepath) dirname = os.path.dirname(filepath)

View File

@ -20,13 +20,15 @@ class FileInfo(TypedDict):
path: str path: str
size: int size: int
modified: int modified: int
created: int
def get_file_info(path: str, relative_to: str) -> FileInfo: def get_file_info(path: str, relative_to: str) -> FileInfo:
return { return {
"path": os.path.relpath(path, relative_to).replace(os.sep, '/'), "path": os.path.relpath(path, relative_to).replace(os.sep, '/'),
"size": os.path.getsize(path), "size": os.path.getsize(path),
"modified": os.path.getmtime(path) "modified": os.path.getmtime(path),
"created": os.path.getctime(path)
} }

View File

@ -43,7 +43,6 @@ if TYPE_CHECKING:
def broadcast_image_to(tensor, target_batch_size, batched_number): def broadcast_image_to(tensor, target_batch_size, batched_number):
current_batch_size = tensor.shape[0] current_batch_size = tensor.shape[0]
#print(current_batch_size, target_batch_size)
if current_batch_size == 1: if current_batch_size == 1:
return tensor return tensor

View File

@ -24,12 +24,17 @@ class CausalConv3d(ops.Conv3d):
self.padding[1], 2 * self.padding[0], 0) self.padding[1], 2 * self.padding[0], 0)
self.padding = (0, 0, 0) self.padding = (0, 0, 0)
def forward(self, x, cache_x=None): def forward(self, x, cache_x=None, cache_list=None, cache_idx=None):
if cache_list is not None:
cache_x = cache_list[cache_idx]
cache_list[cache_idx] = None
padding = list(self._padding) padding = list(self._padding)
if cache_x is not None and self._padding[4] > 0: if cache_x is not None and self._padding[4] > 0:
cache_x = cache_x.to(x.device) cache_x = cache_x.to(x.device)
x = torch.cat([cache_x, x], dim=2) x = torch.cat([cache_x, x], dim=2)
padding[4] -= cache_x.shape[2] padding[4] -= cache_x.shape[2]
del cache_x
x = F.pad(x, padding) x = F.pad(x, padding)
return super().forward(x) return super().forward(x)
@ -166,7 +171,7 @@ class ResidualBlock(nn.Module):
if in_dim != out_dim else nn.Identity() if in_dim != out_dim else nn.Identity()
def forward(self, x, feat_cache=None, feat_idx=[0]): def forward(self, x, feat_cache=None, feat_idx=[0]):
h = self.shortcut(x) old_x = x
for layer in self.residual: for layer in self.residual:
if isinstance(layer, CausalConv3d) and feat_cache is not None: if isinstance(layer, CausalConv3d) and feat_cache is not None:
idx = feat_idx[0] idx = feat_idx[0]
@ -178,12 +183,12 @@ class ResidualBlock(nn.Module):
cache_x.device), cache_x cache_x.device), cache_x
], ],
dim=2) dim=2)
x = layer(x, feat_cache[idx]) x = layer(x, cache_list=feat_cache, cache_idx=idx)
feat_cache[idx] = cache_x feat_cache[idx] = cache_x
feat_idx[0] += 1 feat_idx[0] += 1
else: else:
x = layer(x) x = layer(x)
return x + h return x + self.shortcut(old_x)
class AttentionBlock(nn.Module): class AttentionBlock(nn.Module):

View File

@ -151,7 +151,7 @@ class ResidualBlock(nn.Module):
], ],
dim=2, dim=2,
) )
x = layer(x, feat_cache[idx]) x = layer(x, cache_list=feat_cache, cache_idx=idx)
feat_cache[idx] = cache_x feat_cache[idx] = cache_x
feat_idx[0] += 1 feat_idx[0] += 1
else: else:

View File

@ -106,10 +106,12 @@ def model_sampling(model_config, model_type):
return ModelSampling(model_config) return ModelSampling(model_config)
def convert_tensor(extra, dtype): def convert_tensor(extra, dtype, device):
if hasattr(extra, "dtype"): if hasattr(extra, "dtype"):
if extra.dtype != torch.int and extra.dtype != torch.long: if extra.dtype != torch.int and extra.dtype != torch.long:
extra = extra.to(dtype) extra = extra.to(dtype=dtype, device=device)
else:
extra = extra.to(device=device)
return extra return extra
@ -169,20 +171,21 @@ class BaseModel(torch.nn.Module):
dtype = self.manual_cast_dtype dtype = self.manual_cast_dtype
xc = xc.to(dtype) xc = xc.to(dtype)
device = xc.device
t = self.model_sampling.timestep(t).float() t = self.model_sampling.timestep(t).float()
if context is not None: if context is not None:
context = context.to(dtype) context = context.to(dtype=dtype, device=device)
extra_conds = {} extra_conds = {}
for o in kwargs: for o in kwargs:
extra = kwargs[o] extra = kwargs[o]
if hasattr(extra, "dtype"): if hasattr(extra, "dtype"):
extra = convert_tensor(extra, dtype) extra = convert_tensor(extra, dtype, device)
elif isinstance(extra, list): elif isinstance(extra, list):
ex = [] ex = []
for ext in extra: for ext in extra:
ex.append(convert_tensor(ext, dtype)) ex.append(convert_tensor(ext, dtype, device))
extra = ex extra = ex
extra_conds[o] = extra extra_conds[o] = extra

View File

@ -1,3 +1,3 @@
# This file is automatically generated by the build process when version is # This file is automatically generated by the build process when version is
# updated in pyproject.toml. # updated in pyproject.toml.
__version__ = "0.3.47" __version__ = "0.3.48"

View File

@ -1,6 +1,6 @@
[project] [project]
name = "ComfyUI" name = "ComfyUI"
version = "0.3.47" version = "0.3.48"
readme = "README.md" readme = "README.md"
license = { file = "LICENSE" } license = { file = "LICENSE" }
requires-python = ">=3.9" requires-python = ">=3.9"

View File

@ -1,5 +1,5 @@
comfyui-frontend-package==1.23.4 comfyui-frontend-package==1.23.4
comfyui-workflow-templates==0.1.44 comfyui-workflow-templates==0.1.47
comfyui-embedded-docs==0.2.4 comfyui-embedded-docs==0.2.4
comfyui_manager comfyui_manager
torch torch