mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2026-05-12 18:22:53 +08:00
Merge branch 'feature/unix-socket-support' of github.com-hnl1:hnl1/ComfyUI into feature/unix-socket-support
This commit is contained in:
commit
9f91e1d414
2
.github/workflows/stable-release.yml
vendored
2
.github/workflows/stable-release.yml
vendored
@ -145,6 +145,8 @@ jobs:
|
|||||||
cp -r ComfyUI/.ci/windows_${{ inputs.rel_name }}_base_files/* ./
|
cp -r ComfyUI/.ci/windows_${{ inputs.rel_name }}_base_files/* ./
|
||||||
cp ../update_comfyui_and_python_dependencies.bat ./update/
|
cp ../update_comfyui_and_python_dependencies.bat ./update/
|
||||||
|
|
||||||
|
echo 'local-portable' > ComfyUI/.comfy_environment
|
||||||
|
|
||||||
cd ..
|
cd ..
|
||||||
|
|
||||||
"C:\Program Files\7-Zip\7z.exe" a -t7z -m0=lzma2 -mx=9 -mfb=128 -md=768m -ms=on -mf=BCJ2 ComfyUI_windows_portable.7z ComfyUI_windows_portable
|
"C:\Program Files\7-Zip\7z.exe" a -t7z -m0=lzma2 -mx=9 -mfb=128 -md=768m -ms=on -mf=BCJ2 ComfyUI_windows_portable.7z ComfyUI_windows_portable
|
||||||
|
|||||||
@ -26,6 +26,7 @@ import uuid
|
|||||||
from typing import Callable, Optional
|
from typing import Callable, Optional
|
||||||
|
|
||||||
import torch
|
import torch
|
||||||
|
import tqdm
|
||||||
|
|
||||||
import comfy.float
|
import comfy.float
|
||||||
import comfy.hooks
|
import comfy.hooks
|
||||||
@ -1651,7 +1652,11 @@ class ModelPatcherDynamic(ModelPatcher):
|
|||||||
self.model.model_loaded_weight_memory += casted_buf.numel() * casted_buf.element_size()
|
self.model.model_loaded_weight_memory += casted_buf.numel() * casted_buf.element_size()
|
||||||
|
|
||||||
force_load_stat = f" Force pre-loaded {len(self.backup)} weights: {self.model.model_loaded_weight_memory // 1024} KB." if len(self.backup) > 0 else ""
|
force_load_stat = f" Force pre-loaded {len(self.backup)} weights: {self.model.model_loaded_weight_memory // 1024} KB." if len(self.backup) > 0 else ""
|
||||||
logging.info(f"Model {self.model.__class__.__name__} prepared for dynamic VRAM loading. {allocated_size // (1024 ** 2)}MB Staged. {num_patches} patches attached.{force_load_stat}")
|
log_key = (self.patches_uuid, allocated_size, num_patches, len(self.backup), self.model.model_loaded_weight_memory)
|
||||||
|
in_loop = bool(getattr(tqdm.tqdm, "_instances", None))
|
||||||
|
level = logging.DEBUG if in_loop and getattr(self, "_last_prepare_log_key", None) == log_key else logging.INFO
|
||||||
|
self._last_prepare_log_key = log_key
|
||||||
|
logging.log(level, f"Model {self.model.__class__.__name__} prepared for dynamic VRAM loading. {allocated_size // (1024 ** 2)}MB Staged. {num_patches} patches attached.{force_load_stat}")
|
||||||
|
|
||||||
self.model.device = device_to
|
self.model.device = device_to
|
||||||
self.model.current_weight_patches_uuid = self.patches_uuid
|
self.model.current_weight_patches_uuid = self.patches_uuid
|
||||||
|
|||||||
@ -561,7 +561,7 @@ class PromptServer():
|
|||||||
buffer.seek(0)
|
buffer.seek(0)
|
||||||
|
|
||||||
return web.Response(body=buffer.read(), content_type=f'image/{image_format}',
|
return web.Response(body=buffer.read(), content_type=f'image/{image_format}',
|
||||||
headers={"Content-Disposition": f"attachment; filename=\"{filename}\""})
|
headers={"Content-Disposition": f"filename=\"{filename}\""})
|
||||||
|
|
||||||
if 'channel' not in request.rel_url.query:
|
if 'channel' not in request.rel_url.query:
|
||||||
channel = 'rgba'
|
channel = 'rgba'
|
||||||
@ -581,7 +581,7 @@ class PromptServer():
|
|||||||
buffer.seek(0)
|
buffer.seek(0)
|
||||||
|
|
||||||
return web.Response(body=buffer.read(), content_type='image/png',
|
return web.Response(body=buffer.read(), content_type='image/png',
|
||||||
headers={"Content-Disposition": f"attachment; filename=\"{filename}\""})
|
headers={"Content-Disposition": f"filename=\"{filename}\""})
|
||||||
|
|
||||||
elif channel == 'a':
|
elif channel == 'a':
|
||||||
with Image.open(file) as img:
|
with Image.open(file) as img:
|
||||||
@ -598,7 +598,7 @@ class PromptServer():
|
|||||||
alpha_buffer.seek(0)
|
alpha_buffer.seek(0)
|
||||||
|
|
||||||
return web.Response(body=alpha_buffer.read(), content_type='image/png',
|
return web.Response(body=alpha_buffer.read(), content_type='image/png',
|
||||||
headers={"Content-Disposition": f"attachment; filename=\"{filename}\""})
|
headers={"Content-Disposition": f"filename=\"{filename}\""})
|
||||||
else:
|
else:
|
||||||
# Use the content type from asset resolution if available,
|
# Use the content type from asset resolution if available,
|
||||||
# otherwise guess from the filename.
|
# otherwise guess from the filename.
|
||||||
@ -615,7 +615,7 @@ class PromptServer():
|
|||||||
return web.FileResponse(
|
return web.FileResponse(
|
||||||
file,
|
file,
|
||||||
headers={
|
headers={
|
||||||
"Content-Disposition": f"attachment; filename=\"{filename}\"",
|
"Content-Disposition": f"filename=\"{filename}\"",
|
||||||
"Content-Type": content_type
|
"Content-Type": content_type
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user