mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2026-01-11 14:50:49 +08:00
Merge branch 'master' of github.com:comfyanonymous/ComfyUI
This commit is contained in:
commit
c0f072ee0f
74
app/logger.py
Normal file
74
app/logger.py
Normal file
@ -0,0 +1,74 @@
|
||||
import io
|
||||
import logging
|
||||
import sys
|
||||
import threading
|
||||
from collections import deque
|
||||
from datetime import datetime
|
||||
|
||||
logs = None
|
||||
stdout_interceptor = None
|
||||
stderr_interceptor = None
|
||||
|
||||
|
||||
class LogInterceptor(io.TextIOWrapper):
|
||||
def __init__(self, stream, *args, **kwargs):
|
||||
buffer = stream.buffer
|
||||
encoding = stream.encoding
|
||||
super().__init__(buffer, *args, **kwargs, encoding=encoding, line_buffering=stream.line_buffering)
|
||||
self._lock = threading.Lock()
|
||||
self._flush_callbacks = []
|
||||
self._logs_since_flush = []
|
||||
|
||||
def write(self, data):
|
||||
entry = {"t": datetime.now().isoformat(), "m": data}
|
||||
with self._lock:
|
||||
self._logs_since_flush.append(entry)
|
||||
|
||||
# Simple handling for cr to overwrite the last output if it isnt a full line
|
||||
# else logs just get full of progress messages
|
||||
if isinstance(data, str) and data.startswith("\r") and not logs[-1]["m"].endswith("\n"):
|
||||
logs.pop()
|
||||
logs.append(entry)
|
||||
super().write(data)
|
||||
|
||||
def flush(self):
|
||||
super().flush()
|
||||
for cb in self._flush_callbacks:
|
||||
cb(self._logs_since_flush)
|
||||
self._logs_since_flush = []
|
||||
|
||||
def on_flush(self, callback):
|
||||
self._flush_callbacks.append(callback)
|
||||
|
||||
|
||||
def get_logs():
|
||||
return logs
|
||||
|
||||
|
||||
def on_flush(callback):
|
||||
if stdout_interceptor is not None:
|
||||
stdout_interceptor.on_flush(callback)
|
||||
if stderr_interceptor is not None:
|
||||
stderr_interceptor.on_flush(callback)
|
||||
|
||||
|
||||
def setup_logger(log_level: str = 'INFO', capacity: int = 300):
|
||||
global logs
|
||||
if logs:
|
||||
return
|
||||
|
||||
# Override output streams and log to buffer
|
||||
logs = deque(maxlen=capacity)
|
||||
|
||||
global stdout_interceptor
|
||||
global stderr_interceptor
|
||||
stdout_interceptor = sys.stdout = LogInterceptor(sys.stdout)
|
||||
stderr_interceptor = sys.stderr = LogInterceptor(sys.stderr)
|
||||
|
||||
# Setup default global logger
|
||||
logger = logging.getLogger()
|
||||
logger.setLevel(log_level)
|
||||
|
||||
stream_handler = logging.StreamHandler()
|
||||
stream_handler.setFormatter(logging.Formatter("%(message)s"))
|
||||
logger.addHandler(stream_handler)
|
||||
@ -3,6 +3,7 @@ from typing import Optional
|
||||
from aiohttp import web
|
||||
|
||||
from ...services.file_service import FileService
|
||||
from ...services.terminal_service import TerminalService
|
||||
from ....cmd.folder_paths import models_dir, user_directory, output_directory, folder_names_and_paths # pylint: disable=import-error
|
||||
|
||||
|
||||
@ -14,7 +15,7 @@ class InternalRoutes:
|
||||
|
||||
'''
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self, prompt_server):
|
||||
self.routes: web.RouteTableDef = web.RouteTableDef()
|
||||
self._app: Optional[web.Application] = None
|
||||
self.file_service = FileService({
|
||||
@ -22,6 +23,8 @@ class InternalRoutes:
|
||||
"user": user_directory,
|
||||
"output": output_directory
|
||||
})
|
||||
self.prompt_server = prompt_server
|
||||
self.terminal_service = TerminalService(prompt_server)
|
||||
|
||||
def setup_routes(self):
|
||||
@self.routes.get('/files')
|
||||
@ -37,8 +40,31 @@ class InternalRoutes:
|
||||
|
||||
@self.routes.get('/logs')
|
||||
async def get_logs(request):
|
||||
# todo: applications really shouldn't serve logs like this
|
||||
return web.json_response({})
|
||||
# todo: enable logs
|
||||
# return web.json_response("".join([(l["t"] + " - " + l["m"]) for l in app.logger.get_logs()]))
|
||||
|
||||
@self.routes.get('/logs/raw')
|
||||
async def get_logs(request):
|
||||
self.terminal_service.update_size()
|
||||
return web.json_response({
|
||||
# todo: enable logs
|
||||
# "entries": list(app.logger.get_logs()),
|
||||
"size": {"cols": self.terminal_service.cols, "rows": self.terminal_service.rows}
|
||||
})
|
||||
|
||||
@self.routes.patch('/logs/subscribe')
|
||||
async def subscribe_logs(request):
|
||||
json_data = await request.json()
|
||||
client_id = json_data["clientId"]
|
||||
enabled = json_data["enabled"]
|
||||
if enabled:
|
||||
self.terminal_service.subscribe(client_id)
|
||||
else:
|
||||
self.terminal_service.unsubscribe(client_id)
|
||||
|
||||
return web.Response(status=200)
|
||||
|
||||
|
||||
@self.routes.get('/folder_paths')
|
||||
async def get_folder_paths(request):
|
||||
|
||||
47
comfy/api_server/services/terminal_service.py
Normal file
47
comfy/api_server/services/terminal_service.py
Normal file
@ -0,0 +1,47 @@
|
||||
from app.logger import on_flush
|
||||
import os
|
||||
|
||||
|
||||
class TerminalService:
|
||||
def __init__(self, server):
|
||||
self.server = server
|
||||
self.cols = None
|
||||
self.rows = None
|
||||
self.subscriptions = set()
|
||||
on_flush(self.send_messages)
|
||||
|
||||
def update_size(self):
|
||||
sz = os.get_terminal_size()
|
||||
changed = False
|
||||
if sz.columns != self.cols:
|
||||
self.cols = sz.columns
|
||||
changed = True
|
||||
|
||||
if sz.lines != self.rows:
|
||||
self.rows = sz.lines
|
||||
changed = True
|
||||
|
||||
if changed:
|
||||
return {"cols": self.cols, "rows": self.rows}
|
||||
|
||||
return None
|
||||
|
||||
def subscribe(self, client_id):
|
||||
self.subscriptions.add(client_id)
|
||||
|
||||
def unsubscribe(self, client_id):
|
||||
self.subscriptions.discard(client_id)
|
||||
|
||||
def send_messages(self, entries):
|
||||
if not len(entries) or not len(self.subscriptions):
|
||||
return
|
||||
|
||||
new_size = self.update_size()
|
||||
|
||||
for client_id in self.subscriptions.copy(): # prevent: Set changed size during iteration
|
||||
if client_id not in self.server.sockets:
|
||||
# Automatically unsub if the socket has disconnected
|
||||
self.unsubscribe(client_id)
|
||||
continue
|
||||
|
||||
self.server.send_sync("logs", {"entries": entries, "size": new_size}, client_id)
|
||||
@ -1,9 +1,13 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import glob
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import uuid
|
||||
from typing import TypedDict
|
||||
from urllib import parse
|
||||
|
||||
from aiohttp import web
|
||||
@ -15,15 +19,30 @@ from ..cmd import folder_paths
|
||||
default_user = "default"
|
||||
|
||||
|
||||
class FileInfo(TypedDict):
|
||||
path: str
|
||||
size: int
|
||||
modified: int
|
||||
|
||||
|
||||
def get_file_info(path: str, relative_to: str) -> FileInfo:
|
||||
return {
|
||||
"path": os.path.relpath(path, relative_to).replace(os.sep, '/'),
|
||||
"size": os.path.getsize(path),
|
||||
"modified": os.path.getmtime(path)
|
||||
}
|
||||
|
||||
|
||||
class UserManager():
|
||||
def __init__(self):
|
||||
user_directory = folder_paths.get_user_directory()
|
||||
|
||||
self.default_user = "default"
|
||||
self.users_file = os.path.join(user_directory, "users.json")
|
||||
self.settings = AppSettings(self)
|
||||
if not os.path.exists(user_directory):
|
||||
os.makedirs(user_directory, exist_ok=True)
|
||||
os.mkdir(user_directory)
|
||||
if not args.multi_user:
|
||||
print("****** User settings have been changed to be stored on the server instead of browser storage. ******")
|
||||
print("****** For multi-user setups add the --multi-user CLI argument to enable multiple user profiles. ******")
|
||||
|
||||
if args.multi_user:
|
||||
if os.path.isfile(self.get_users_file()):
|
||||
@ -60,7 +79,7 @@ class UserManager():
|
||||
|
||||
# prevent leaving /{type}
|
||||
if os.path.commonpath((root_dir, user_root)) != root_dir:
|
||||
raise PermissionError()
|
||||
return None
|
||||
|
||||
if file is not None:
|
||||
# Check if filename is url encoded
|
||||
@ -70,7 +89,7 @@ class UserManager():
|
||||
# prevent leaving /{type}/{user}
|
||||
path = os.path.abspath(os.path.join(user_root, file))
|
||||
if os.path.commonpath((user_root, path)) != user_root:
|
||||
raise PermissionError()
|
||||
return None
|
||||
|
||||
parent = os.path.split(path)[0]
|
||||
|
||||
@ -145,13 +164,17 @@ class UserManager():
|
||||
directory = request.rel_url.query.get('dir', '')
|
||||
if not directory:
|
||||
return web.Response(status=400, text="Directory not provided")
|
||||
|
||||
path = self.get_request_user_filepath(request, directory)
|
||||
if not path:
|
||||
return web.Response(status=403, text="Invalid directory")
|
||||
|
||||
if not os.path.exists(path):
|
||||
return web.Response(status=404, text="Directory not found")
|
||||
|
||||
recurse = request.rel_url.query.get('recurse', '').lower() == "true"
|
||||
full_info = request.rel_url.query.get('full_info', '').lower() == "true"
|
||||
split_path = request.rel_url.query.get('split', '').lower() == "true"
|
||||
|
||||
# Use different patterns based on whether we're recursing or not
|
||||
if recurse:
|
||||
@ -159,25 +182,21 @@ class UserManager():
|
||||
else:
|
||||
pattern = os.path.join(glob.escape(path), '*')
|
||||
|
||||
results = glob.glob(pattern, recursive=recurse)
|
||||
def process_full_path(full_path: str) -> FileInfo | str | list[str]:
|
||||
if full_info:
|
||||
return get_file_info(full_path, path)
|
||||
|
||||
if full_info:
|
||||
results = [
|
||||
{
|
||||
'path': os.path.relpath(x, path).replace(os.sep, '/'),
|
||||
'size': os.path.getsize(x),
|
||||
'modified': os.path.getmtime(x)
|
||||
} for x in results if os.path.isfile(x)
|
||||
]
|
||||
else:
|
||||
results = [
|
||||
os.path.relpath(x, path).replace(os.sep, '/')
|
||||
for x in results
|
||||
if os.path.isfile(x)
|
||||
]
|
||||
split_path = request.rel_url.query.get('split', '').lower() == "true"
|
||||
if split_path and not full_info:
|
||||
results = [[x] + x.split('/') for x in results]
|
||||
rel_path = os.path.relpath(full_path, path).replace(os.sep, '/')
|
||||
if split_path:
|
||||
return [rel_path] + rel_path.split('/')
|
||||
|
||||
return rel_path
|
||||
|
||||
results = [
|
||||
process_full_path(full_path)
|
||||
for full_path in glob.glob(pattern, recursive=recurse)
|
||||
if os.path.isfile(full_path)
|
||||
]
|
||||
|
||||
return web.json_response(results)
|
||||
|
||||
@ -205,19 +224,51 @@ class UserManager():
|
||||
|
||||
@routes.post("/userdata/{file}")
|
||||
async def post_userdata(request):
|
||||
"""
|
||||
Upload or update a user data file.
|
||||
|
||||
This endpoint handles file uploads to a user's data directory, with options for
|
||||
controlling overwrite behavior and response format.
|
||||
|
||||
Query Parameters:
|
||||
- overwrite (optional): If "false", prevents overwriting existing files. Defaults to "true".
|
||||
- full_info (optional): If "true", returns detailed file information (path, size, modified time).
|
||||
If "false", returns only the relative file path.
|
||||
|
||||
Path Parameters:
|
||||
- file: The target file path (URL encoded if necessary).
|
||||
|
||||
Returns:
|
||||
- 400: If 'file' parameter is missing.
|
||||
- 403: If the requested path is not allowed.
|
||||
- 409: If overwrite=false and the file already exists.
|
||||
- 200: JSON response with either:
|
||||
- Full file information (if full_info=true)
|
||||
- Relative file path (if full_info=false)
|
||||
|
||||
The request body should contain the raw file content to be written.
|
||||
"""
|
||||
path = get_user_data_path(request)
|
||||
if not isinstance(path, str):
|
||||
return path
|
||||
|
||||
overwrite = request.query["overwrite"] != "false"
|
||||
overwrite = request.query.get("overwrite", 'true') != "false"
|
||||
full_info = request.query.get('full_info', 'false').lower() == "true"
|
||||
|
||||
if not overwrite and os.path.exists(path):
|
||||
return web.Response(status=409)
|
||||
return web.Response(status=409, text="File already exists")
|
||||
|
||||
body = await request.read()
|
||||
|
||||
with open(path, "wb") as f:
|
||||
f.write(body)
|
||||
resp = os.path.relpath(path, self.get_request_user_filepath(request, None))
|
||||
|
||||
user_path = self.get_request_user_filepath(request, None)
|
||||
if full_info:
|
||||
resp = get_file_info(path, user_path)
|
||||
else:
|
||||
resp = os.path.relpath(path, user_path)
|
||||
|
||||
return web.json_response(resp)
|
||||
|
||||
@routes.delete("/userdata/{file}")
|
||||
@ -232,6 +283,30 @@ class UserManager():
|
||||
|
||||
@routes.post("/userdata/{file}/move/{dest}")
|
||||
async def move_userdata(request):
|
||||
"""
|
||||
Move or rename a user data file.
|
||||
|
||||
This endpoint handles moving or renaming files within a user's data directory, with options for
|
||||
controlling overwrite behavior and response format.
|
||||
|
||||
Path Parameters:
|
||||
- file: The source file path (URL encoded if necessary)
|
||||
- dest: The destination file path (URL encoded if necessary)
|
||||
|
||||
Query Parameters:
|
||||
- overwrite (optional): If "false", prevents overwriting existing files. Defaults to "true".
|
||||
- full_info (optional): If "true", returns detailed file information (path, size, modified time).
|
||||
If "false", returns only the relative file path.
|
||||
|
||||
Returns:
|
||||
- 400: If either 'file' or 'dest' parameter is missing
|
||||
- 403: If either requested path is not allowed
|
||||
- 404: If the source file does not exist
|
||||
- 409: If overwrite=false and the destination file already exists
|
||||
- 200: JSON response with either:
|
||||
- Full file information (if full_info=true)
|
||||
- Relative file path (if full_info=false)
|
||||
"""
|
||||
source = get_user_data_path(request, check_exists=True)
|
||||
if not isinstance(source, str):
|
||||
return source
|
||||
@ -240,12 +315,19 @@ class UserManager():
|
||||
if not isinstance(source, str):
|
||||
return dest
|
||||
|
||||
overwrite = request.query["overwrite"] != "false"
|
||||
if not overwrite and os.path.exists(dest):
|
||||
return web.Response(status=409)
|
||||
overwrite = request.query.get("overwrite", 'true') != "false"
|
||||
full_info = request.query.get('full_info', 'false').lower() == "true"
|
||||
|
||||
print(f"moving '{source}' -> '{dest}'")
|
||||
if not overwrite and os.path.exists(dest):
|
||||
return web.Response(status=409, text="File already exists")
|
||||
|
||||
logging.info(f"moving '{source}' -> '{dest}'")
|
||||
shutil.move(source, dest)
|
||||
|
||||
resp = os.path.relpath(dest, self.get_request_user_filepath(request, None))
|
||||
user_path = self.get_request_user_filepath(request, None)
|
||||
if full_info:
|
||||
resp = get_file_info(dest, user_path)
|
||||
else:
|
||||
resp = os.path.relpath(dest, user_path)
|
||||
|
||||
return web.json_response(resp)
|
||||
|
||||
@ -1,9 +1,11 @@
|
||||
import os
|
||||
import importlib.util
|
||||
from ..cli_args import args
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
#Can't use pytorch to get the GPU names because the cuda malloc has to be set before the first import.
|
||||
from ..cli_args import args
|
||||
|
||||
|
||||
# Can't use pytorch to get the GPU names because the cuda malloc has to be set before the first import.
|
||||
def get_gpu_names():
|
||||
if os.name == 'nt':
|
||||
import ctypes
|
||||
@ -33,6 +35,7 @@ def get_gpu_names():
|
||||
device_index += 1
|
||||
gpu_names.add(device_info.DeviceString.decode('utf-8'))
|
||||
return gpu_names
|
||||
|
||||
return enum_display_devices()
|
||||
else:
|
||||
gpu_names = set()
|
||||
@ -45,13 +48,15 @@ def get_gpu_names():
|
||||
pass
|
||||
return gpu_names
|
||||
|
||||
|
||||
blacklist = {"GeForce GTX TITAN X", "GeForce GTX 980", "GeForce GTX 970", "GeForce GTX 960", "GeForce GTX 950", "GeForce 945M",
|
||||
"GeForce 940M", "GeForce 930M", "GeForce 920M", "GeForce 910M", "GeForce GTX 750", "GeForce GTX 745", "Quadro K620",
|
||||
"Quadro K1200", "Quadro K2200", "Quadro M500", "Quadro M520", "Quadro M600", "Quadro M620", "Quadro M1000",
|
||||
"Quadro M1200", "Quadro M2000", "Quadro M2200", "Quadro M3000", "Quadro M4000", "Quadro M5000", "Quadro M5500", "Quadro M6000",
|
||||
"GeForce MX110", "GeForce MX130", "GeForce 830M", "GeForce 840M", "GeForce GTX 850M", "GeForce GTX 860M",
|
||||
"GeForce GTX 1650", "GeForce GTX 1630", "Tesla M4", "Tesla M6", "Tesla M10", "Tesla M40", "Tesla M60"
|
||||
}
|
||||
"GeForce 940M", "GeForce 930M", "GeForce 920M", "GeForce 910M", "GeForce GTX 750", "GeForce GTX 745", "Quadro K620",
|
||||
"Quadro K1200", "Quadro K2200", "Quadro M500", "Quadro M520", "Quadro M600", "Quadro M620", "Quadro M1000",
|
||||
"Quadro M1200", "Quadro M2000", "Quadro M2200", "Quadro M3000", "Quadro M4000", "Quadro M5000", "Quadro M5500", "Quadro M6000",
|
||||
"GeForce MX110", "GeForce MX130", "GeForce 830M", "GeForce 840M", "GeForce GTX 850M", "GeForce GTX 860M",
|
||||
"GeForce GTX 1650", "GeForce GTX 1630", "Tesla M4", "Tesla M6", "Tesla M10", "Tesla M40", "Tesla M60"
|
||||
}
|
||||
|
||||
|
||||
def cuda_malloc_supported():
|
||||
try:
|
||||
@ -77,12 +82,11 @@ if not args.cuda_malloc:
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(module)
|
||||
version = module.__version__
|
||||
if int(version[0]) >= 2: #enable by default for torch version 2.0 and up
|
||||
if int(version[0]) >= 2: # enable by default for torch version 2.0 and up
|
||||
args.cuda_malloc = cuda_malloc_supported()
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
if args.cuda_malloc and not args.disable_cuda_malloc:
|
||||
env_var = os.environ.get('PYTORCH_CUDA_ALLOC_CONF', None)
|
||||
if env_var is None:
|
||||
|
||||
@ -58,7 +58,12 @@ class Latent2RGBPreviewer(LatentPreviewer):
|
||||
if self.latent_rgb_factors_bias is not None:
|
||||
self.latent_rgb_factors_bias = self.latent_rgb_factors_bias.to(dtype=x0.dtype, device=x0.device)
|
||||
|
||||
latent_image = torch.nn.functional.linear(x0[0].permute(1, 2, 0), self.latent_rgb_factors, bias=self.latent_rgb_factors_bias)
|
||||
if x0.ndim == 5:
|
||||
x0 = x0[0, :, 0]
|
||||
else:
|
||||
x0 = x0[0]
|
||||
|
||||
latent_image = torch.nn.functional.linear(x0.movedim(0, -1), self.latent_rgb_factors, bias=self.latent_rgb_factors_bias)
|
||||
# latent_image = x0[0].permute(1, 2, 0) @ self.latent_rgb_factors
|
||||
|
||||
return preview_to_image(latent_image)
|
||||
|
||||
@ -40,12 +40,18 @@ from ..cli_args import args
|
||||
|
||||
if args.cuda_device is not None:
|
||||
os.environ['CUDA_VISIBLE_DEVICES'] = str(args.cuda_device)
|
||||
os.environ['HIP_VISIBLE_DEVICES'] = str(args.cuda_device)
|
||||
logging.info("Set cuda device to: {}".format(args.cuda_device))
|
||||
|
||||
if args.deterministic:
|
||||
if 'CUBLAS_WORKSPACE_CONFIG' not in os.environ:
|
||||
os.environ['CUBLAS_WORKSPACE_CONFIG'] = ":4096:8"
|
||||
|
||||
try:
|
||||
from . import cuda_malloc
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
os.environ["OPENCV_IO_ENABLE_OPENEXR"] = "1"
|
||||
|
||||
|
||||
|
||||
@ -170,7 +170,7 @@ class PromptServer(ExecutorToClientProgress):
|
||||
|
||||
self.address: str = "0.0.0.0"
|
||||
self.user_manager = UserManager()
|
||||
self.internal_routes = InternalRoutes()
|
||||
self.internal_routes = InternalRoutes(self)
|
||||
# todo: this is probably read by custom nodes elsewhere
|
||||
self.supports: List[str] = ["custom_nodes_from_web"]
|
||||
self.prompt_queue: AbstractPromptQueue | AsyncAbstractPromptQueue | None = None
|
||||
|
||||
@ -190,7 +190,21 @@ class Mochi(LatentFormat):
|
||||
0.9294154431013696, 1.3720942357788521, 0.881393668867029,
|
||||
0.9168315692124348, 0.9185249279345552, 0.9274757570805041]).view(1, self.latent_channels, 1, 1, 1)
|
||||
|
||||
self.latent_rgb_factors = None #TODO
|
||||
self.latent_rgb_factors =[
|
||||
[-0.0069, -0.0045, 0.0018],
|
||||
[ 0.0154, -0.0692, -0.0274],
|
||||
[ 0.0333, 0.0019, 0.0206],
|
||||
[-0.1390, 0.0628, 0.1678],
|
||||
[-0.0725, 0.0134, -0.1898],
|
||||
[ 0.0074, -0.0270, -0.0209],
|
||||
[-0.0176, -0.0277, -0.0221],
|
||||
[ 0.5294, 0.5204, 0.3852],
|
||||
[-0.0326, -0.0446, -0.0143],
|
||||
[-0.0659, 0.0153, -0.0153],
|
||||
[ 0.0185, -0.0217, 0.0014],
|
||||
[-0.0396, -0.0495, -0.0281]
|
||||
]
|
||||
self.latent_rgb_factors_bias = [-0.0940, -0.1418, -0.1453]
|
||||
self.taesd_decoder_name = None #TODO
|
||||
|
||||
def process_in(self, latent):
|
||||
|
||||
@ -437,7 +437,8 @@ class MMDiT(nn.Module):
|
||||
pos_encoding = pos_encoding[:,from_h:from_h+h,from_w:from_w+w]
|
||||
return x + pos_encoding.reshape(1, -1, self.positional_encoding.shape[-1])
|
||||
|
||||
def forward(self, x, timestep, context, **kwargs):
|
||||
def forward(self, x, timestep, context, transformer_options={}, **kwargs):
|
||||
patches_replace = transformer_options.get("patches_replace", {})
|
||||
# patchify x, add PE
|
||||
b, c, h, w = x.shape
|
||||
|
||||
@ -458,15 +459,36 @@ class MMDiT(nn.Module):
|
||||
|
||||
global_cond = self.t_embedder(t, x.dtype) # B, D
|
||||
|
||||
blocks_replace = patches_replace.get("dit", {})
|
||||
if len(self.double_layers) > 0:
|
||||
for layer in self.double_layers:
|
||||
c, x = layer(c, x, global_cond, **kwargs)
|
||||
for i, layer in enumerate(self.double_layers):
|
||||
if ("double_block", i) in blocks_replace:
|
||||
def block_wrap(args):
|
||||
out = {}
|
||||
out["txt"], out["img"] = layer(args["txt"],
|
||||
args["img"],
|
||||
args["vec"])
|
||||
return out
|
||||
out = blocks_replace[("double_block", i)]({"img": x, "txt": c, "vec": global_cond}, {"original_block": block_wrap})
|
||||
c = out["txt"]
|
||||
x = out["img"]
|
||||
else:
|
||||
c, x = layer(c, x, global_cond, **kwargs)
|
||||
|
||||
if len(self.single_layers) > 0:
|
||||
c_len = c.size(1)
|
||||
cx = torch.cat([c, x], dim=1)
|
||||
for layer in self.single_layers:
|
||||
cx = layer(cx, global_cond, **kwargs)
|
||||
for i, layer in enumerate(self.single_layers):
|
||||
if ("single_block", i) in blocks_replace:
|
||||
def block_wrap(args):
|
||||
out = {}
|
||||
out["img"] = layer(args["img"], args["vec"])
|
||||
return out
|
||||
|
||||
out = blocks_replace[("single_block", i)]({"img": cx, "vec": global_cond}, {"original_block": block_wrap})
|
||||
cx = out["img"]
|
||||
else:
|
||||
cx = layer(cx, global_cond, **kwargs)
|
||||
|
||||
x = cx[:, c_len:]
|
||||
|
||||
|
||||
@ -97,7 +97,9 @@ class Flux(nn.Module):
|
||||
y: Tensor,
|
||||
guidance: Tensor = None,
|
||||
control=None,
|
||||
transformer_options={},
|
||||
) -> Tensor:
|
||||
patches_replace = transformer_options.get("patches_replace", {})
|
||||
if img.ndim != 3 or txt.ndim != 3:
|
||||
raise ValueError("Input img and txt tensors must have 3 dimensions.")
|
||||
|
||||
@ -115,8 +117,19 @@ class Flux(nn.Module):
|
||||
ids = torch.cat((txt_ids, img_ids), dim=1)
|
||||
pe = self.pe_embedder(ids)
|
||||
|
||||
blocks_replace = patches_replace.get("dit", {})
|
||||
for i, block in enumerate(self.double_blocks):
|
||||
img, txt = block(img=img, txt=txt, vec=vec, pe=pe)
|
||||
if ("double_block", i) in blocks_replace:
|
||||
def block_wrap(args):
|
||||
out = {}
|
||||
out["img"], out["txt"] = block(img=args["img"], txt=args["txt"], vec=args["vec"], pe=args["pe"])
|
||||
return out
|
||||
|
||||
out = blocks_replace[("double_block", i)]({"img": img, "txt": txt, "vec": vec, "pe": pe}, {"original_block": block_wrap})
|
||||
txt = out["txt"]
|
||||
img = out["img"]
|
||||
else:
|
||||
img, txt = block(img=img, txt=txt, vec=vec, pe=pe)
|
||||
|
||||
if control is not None: # Controlnet
|
||||
control_i = control.get("input")
|
||||
@ -128,7 +141,16 @@ class Flux(nn.Module):
|
||||
img = torch.cat((txt, img), 1)
|
||||
|
||||
for i, block in enumerate(self.single_blocks):
|
||||
img = block(img, vec=vec, pe=pe)
|
||||
if ("single_block", i) in blocks_replace:
|
||||
def block_wrap(args):
|
||||
out = {}
|
||||
out["img"] = block(args["img"], vec=args["vec"], pe=args["pe"])
|
||||
return out
|
||||
|
||||
out = blocks_replace[("single_block", i)]({"img": img, "vec": vec, "pe": pe}, {"original_block": block_wrap})
|
||||
img = out["img"]
|
||||
else:
|
||||
img = block(img, vec=vec, pe=pe)
|
||||
|
||||
if control is not None: # Controlnet
|
||||
control_o = control.get("output")
|
||||
@ -142,7 +164,7 @@ class Flux(nn.Module):
|
||||
img = self.final_layer(img, vec) # (N, T, patch_size ** 2 * out_channels)
|
||||
return img
|
||||
|
||||
def forward(self, x, timestep, context, y, guidance, control=None, **kwargs):
|
||||
def forward(self, x, timestep, context, y, guidance, control=None, transformer_options={}, **kwargs):
|
||||
bs, c, h, w = x.shape
|
||||
patch_size = 2
|
||||
x = common_dit.pad_to_patch_size(x, (patch_size, patch_size))
|
||||
@ -152,10 +174,10 @@ class Flux(nn.Module):
|
||||
h_len = ((h + (patch_size // 2)) // patch_size)
|
||||
w_len = ((w + (patch_size // 2)) // patch_size)
|
||||
img_ids = torch.zeros((h_len, w_len, 3), device=x.device, dtype=x.dtype)
|
||||
img_ids[:, :, 1] = torch.linspace(0, h_len - 1, steps=h_len, device=x.device, dtype=x.dtype).unsqueeze(1)
|
||||
img_ids[:, :, 2] = torch.linspace(0, w_len - 1, steps=w_len, device=x.device, dtype=x.dtype).unsqueeze(0)
|
||||
img_ids[:, :, 1] = img_ids[:, :, 1] + torch.linspace(0, h_len - 1, steps=h_len, device=x.device, dtype=x.dtype).unsqueeze(1)
|
||||
img_ids[:, :, 2] = img_ids[:, :, 2] + torch.linspace(0, w_len - 1, steps=w_len, device=x.device, dtype=x.dtype).unsqueeze(0)
|
||||
img_ids = repeat(img_ids, "h w c -> b (h w) c", b=bs)
|
||||
|
||||
txt_ids = torch.zeros((bs, context.shape[1], 3), device=x.device, dtype=x.dtype)
|
||||
out = self.forward_orig(img, img_ids, context, txt_ids, timestep, y, guidance, control)
|
||||
out = self.forward_orig(img, img_ids, context, txt_ids, timestep, y, guidance, control, transformer_options)
|
||||
return rearrange(out, "b (h w) (c ph pw) -> b c (h ph) (w pw)", h=h_len, w=w_len, ph=2, pw=2)[:,:,:h,:w]
|
||||
|
||||
@ -494,8 +494,9 @@ class AsymmDiTJoint(nn.Module):
|
||||
packed_indices: Dict[str, torch.Tensor] = None,
|
||||
rope_cos: torch.Tensor = None,
|
||||
rope_sin: torch.Tensor = None,
|
||||
control=None, **kwargs
|
||||
control=None, transformer_options={}, **kwargs
|
||||
):
|
||||
patches_replace = transformer_options.get("patches_replace", {})
|
||||
y_feat = context
|
||||
y_mask = attention_mask
|
||||
sigma = timestep
|
||||
@ -515,15 +516,32 @@ class AsymmDiTJoint(nn.Module):
|
||||
)
|
||||
del y_mask
|
||||
|
||||
blocks_replace = patches_replace.get("dit", {})
|
||||
for i, block in enumerate(self.blocks):
|
||||
x, y_feat = block(
|
||||
x,
|
||||
c,
|
||||
y_feat,
|
||||
rope_cos=rope_cos,
|
||||
rope_sin=rope_sin,
|
||||
crop_y=num_tokens,
|
||||
) # (B, M, D), (B, L, D)
|
||||
if ("double_block", i) in blocks_replace:
|
||||
def block_wrap(args):
|
||||
out = {}
|
||||
out["img"], out["txt"] = block(
|
||||
args["img"],
|
||||
args["vec"],
|
||||
args["txt"],
|
||||
rope_cos=args["rope_cos"],
|
||||
rope_sin=args["rope_sin"],
|
||||
crop_y=args["num_tokens"]
|
||||
)
|
||||
return out
|
||||
out = blocks_replace[("double_block", i)]({"img": x, "txt": y_feat, "vec": c, "rope_cos": rope_cos, "rope_sin": rope_sin, "num_tokens": num_tokens}, {"original_block": block_wrap})
|
||||
y_feat = out["txt"]
|
||||
x = out["img"]
|
||||
else:
|
||||
x, y_feat = block(
|
||||
x,
|
||||
c,
|
||||
y_feat,
|
||||
rope_cos=rope_cos,
|
||||
rope_sin=rope_sin,
|
||||
crop_y=num_tokens,
|
||||
) # (B, M, D), (B, L, D)
|
||||
del y_feat # Final layers don't use dense text features.
|
||||
|
||||
x = self.final_layer(x, c) # (B, M, patch_size ** 2 * out_channels)
|
||||
|
||||
@ -325,8 +325,9 @@ def model_config_from_unet(state_dict, unet_key_prefix, use_base_if_no_match=Fal
|
||||
if model_config is None and use_base_if_no_match:
|
||||
model_config = supported_models_base.BASE(unet_config)
|
||||
|
||||
scaled_fp8_weight = state_dict.get("{}scaled_fp8".format(unet_key_prefix), None)
|
||||
if scaled_fp8_weight is not None:
|
||||
scaled_fp8_key = "{}scaled_fp8".format(unet_key_prefix)
|
||||
if scaled_fp8_key in state_dict:
|
||||
scaled_fp8_weight = state_dict.pop(scaled_fp8_key)
|
||||
model_config.scaled_fp8 = scaled_fp8_weight.dtype
|
||||
if model_config.scaled_fp8 == torch.float32:
|
||||
model_config.scaled_fp8 = torch.float8_e4m3fn
|
||||
|
||||
@ -6,6 +6,26 @@ import torch
|
||||
from .ldm.modules.diffusionmodules.util import make_beta_schedule
|
||||
|
||||
|
||||
def rescale_zero_terminal_snr_sigmas(sigmas):
|
||||
alphas_cumprod = 1 / ((sigmas * sigmas) + 1)
|
||||
alphas_bar_sqrt = alphas_cumprod.sqrt()
|
||||
|
||||
# Store old values.
|
||||
alphas_bar_sqrt_0 = alphas_bar_sqrt[0].clone()
|
||||
alphas_bar_sqrt_T = alphas_bar_sqrt[-1].clone()
|
||||
|
||||
# Shift so the last timestep is zero.
|
||||
alphas_bar_sqrt -= (alphas_bar_sqrt_T)
|
||||
|
||||
# Scale so the first timestep is back to the old value.
|
||||
alphas_bar_sqrt *= alphas_bar_sqrt_0 / (alphas_bar_sqrt_0 - alphas_bar_sqrt_T)
|
||||
|
||||
# Convert alphas_bar_sqrt to betas
|
||||
alphas_bar = alphas_bar_sqrt ** 2 # Revert sqrt
|
||||
alphas_bar[-1] = 4.8973451890853435e-08
|
||||
return ((1 - alphas_bar) / alphas_bar) ** 0.5
|
||||
|
||||
|
||||
class ModelSampling(Protocol):
|
||||
def calculate_input(self, sigma, noise) -> torch.Tensor:
|
||||
...
|
||||
@ -66,7 +86,7 @@ class CONST(ModelSampling):
|
||||
|
||||
|
||||
class ModelSamplingDiscrete(torch.nn.Module):
|
||||
def __init__(self, model_config=None):
|
||||
def __init__(self, model_config=None, zsnr=None):
|
||||
super().__init__()
|
||||
|
||||
if model_config is not None:
|
||||
@ -79,11 +99,14 @@ class ModelSamplingDiscrete(torch.nn.Module):
|
||||
linear_end = sampling_settings.get("linear_end", 0.012)
|
||||
timesteps = sampling_settings.get("timesteps", 1000)
|
||||
|
||||
self._register_schedule(given_betas=None, beta_schedule=beta_schedule, timesteps=timesteps, linear_start=linear_start, linear_end=linear_end, cosine_s=8e-3)
|
||||
if zsnr is None:
|
||||
zsnr = sampling_settings.get("zsnr", False)
|
||||
|
||||
self._register_schedule(given_betas=None, beta_schedule=beta_schedule, timesteps=timesteps, linear_start=linear_start, linear_end=linear_end, cosine_s=8e-3, zsnr=zsnr)
|
||||
self.sigma_data = 1.0
|
||||
|
||||
def _register_schedule(self, given_betas=None, beta_schedule="linear", timesteps=1000,
|
||||
linear_start=1e-4, linear_end=2e-2, cosine_s=8e-3):
|
||||
linear_start=1e-4, linear_end=2e-2, cosine_s=8e-3, zsnr=False):
|
||||
if given_betas is not None:
|
||||
betas = given_betas
|
||||
else:
|
||||
@ -101,6 +124,9 @@ class ModelSamplingDiscrete(torch.nn.Module):
|
||||
# self.register_buffer('alphas_cumprod_prev', torch.tensor(alphas_cumprod_prev, dtype=torch.float32))
|
||||
|
||||
sigmas = ((1 - alphas_cumprod) / alphas_cumprod) ** 0.5
|
||||
if zsnr:
|
||||
sigmas = rescale_zero_terminal_snr_sigmas(sigmas)
|
||||
|
||||
self.set_sigmas(sigmas)
|
||||
|
||||
def set_sigmas(self, sigmas):
|
||||
|
||||
@ -289,15 +289,21 @@ class VAEDecodeTiled:
|
||||
@classmethod
|
||||
def INPUT_TYPES(s):
|
||||
return {"required": {"samples": ("LATENT", ), "vae": ("VAE", ),
|
||||
"tile_size": ("INT", {"default": 512, "min": 320, "max": 4096, "step": 64})
|
||||
"tile_size": ("INT", {"default": 512, "min": 128, "max": 4096, "step": 32}),
|
||||
"overlap": ("INT", {"default": 64, "min": 0, "max": 4096, "step": 32}),
|
||||
}}
|
||||
RETURN_TYPES = ("IMAGE",)
|
||||
FUNCTION = "decode"
|
||||
|
||||
CATEGORY = "_for_testing"
|
||||
|
||||
def decode(self, vae, samples, tile_size):
|
||||
return (vae.decode_tiled(samples["samples"], tile_x=tile_size // 8, tile_y=tile_size // 8, ), )
|
||||
def decode(self, vae, samples, tile_size, overlap=64):
|
||||
if tile_size < overlap * 4:
|
||||
overlap = tile_size // 4
|
||||
images = vae.decode_tiled(samples["samples"], tile_x=tile_size // 8, tile_y=tile_size // 8, overlap=overlap // 8)
|
||||
if len(images.shape) == 5: #Combine batches
|
||||
images = images.reshape(-1, images.shape[-3], images.shape[-2], images.shape[-1])
|
||||
return (images, )
|
||||
|
||||
class VAEEncode:
|
||||
@classmethod
|
||||
@ -925,6 +931,8 @@ class CLIPLoader:
|
||||
|
||||
CATEGORY = "advanced/loaders"
|
||||
|
||||
DESCRIPTION = "[Recipes]\n\nstable_diffusion: clip-l\nstable_cascade: clip-g\nsd3: t5 / clip-g / clip-l\nstable_audio: t5\nmochi: t5"
|
||||
|
||||
def load_clip(self, clip_name, type="stable_diffusion"):
|
||||
clip_type = sd.CLIPType.STABLE_DIFFUSION
|
||||
if type == "stable_cascade":
|
||||
@ -954,6 +962,8 @@ class DualCLIPLoader:
|
||||
|
||||
CATEGORY = "advanced/loaders"
|
||||
|
||||
DESCRIPTION = "[Recipes]\n\nsdxl: clip-l, clip-g\nsd3: clip-l, clip-g / clip-l, t5 / clip-g, t5\nflux: clip-l, t5"
|
||||
|
||||
def load_clip(self, clip_name1, clip_name2, type):
|
||||
clip_path1 = get_or_download("text_encoders", clip_name1)
|
||||
clip_path2 = get_or_download("text_encoders", clip_name2)
|
||||
|
||||
@ -4,12 +4,7 @@ from . import utils
|
||||
from . import conds
|
||||
|
||||
def prepare_mask(noise_mask, shape, device):
|
||||
"""ensures noise mask is of proper dimensions"""
|
||||
noise_mask = torch.nn.functional.interpolate(noise_mask.reshape((-1, 1, noise_mask.shape[-2], noise_mask.shape[-1])), size=(shape[2], shape[3]), mode="bilinear")
|
||||
noise_mask = torch.cat([noise_mask] * shape[1], dim=1)
|
||||
noise_mask = utils.repeat_to_batch_size(noise_mask, shape[0])
|
||||
noise_mask = noise_mask.to(device)
|
||||
return noise_mask
|
||||
return utils.reshape_mask(noise_mask, shape).to(device)
|
||||
|
||||
def get_models_from_cond(cond, model_type):
|
||||
models = []
|
||||
|
||||
26
comfy/sd.py
26
comfy/sd.py
@ -257,7 +257,7 @@ class VAE:
|
||||
self.process_output = lambda audio: audio
|
||||
self.process_input = lambda audio: audio
|
||||
self.working_dtypes = [torch.float16, torch.bfloat16, torch.float32]
|
||||
elif "blocks.2.blocks.3.stack.5.weight" in sd or "decoder.blocks.2.blocks.3.stack.5.weight" in sd or "layers.4.layers.1.attn_block.attn.qkv.weight" in sd or "encoder.layers.4.layers.1.attn_block.attn.qkv.weight": #genmo mochi vae
|
||||
elif "blocks.2.blocks.3.stack.5.weight" in sd or "decoder.blocks.2.blocks.3.stack.5.weight" in sd or "layers.4.layers.1.attn_block.attn.qkv.weight" in sd or "encoder.layers.4.layers.1.attn_block.attn.qkv.weight" in sd: #genmo mochi vae
|
||||
if "blocks.2.blocks.3.stack.5.weight" in sd:
|
||||
sd = utils.state_dict_prefix_replace(sd, {"": "decoder."})
|
||||
if "layers.4.layers.1.attn_block.attn.qkv.weight" in sd:
|
||||
@ -373,9 +373,27 @@ class VAE:
|
||||
pixel_samples = pixel_samples.to(self.output_device).movedim(1, -1)
|
||||
return pixel_samples
|
||||
|
||||
def decode_tiled(self, samples, tile_x=64, tile_y=64, overlap=16):
|
||||
load_models_gpu([self.patcher])
|
||||
output = self.decode_tiled_(samples, tile_x, tile_y, overlap)
|
||||
def decode_tiled(self, samples, tile_x=None, tile_y=None, overlap=None):
|
||||
memory_used = self.memory_used_decode(samples.shape, self.vae_dtype) #TODO: calculate mem required for tile
|
||||
load_models_gpu([self.patcher], memory_required=memory_used)
|
||||
dims = samples.ndim - 2
|
||||
args = {}
|
||||
if tile_x is not None:
|
||||
args["tile_x"] = tile_x
|
||||
if tile_y is not None:
|
||||
args["tile_y"] = tile_y
|
||||
if overlap is not None:
|
||||
args["overlap"] = overlap
|
||||
|
||||
if dims == 1:
|
||||
args.pop("tile_y")
|
||||
output = self.decode_tiled_1d(samples, **args)
|
||||
elif dims == 2:
|
||||
output = self.decode_tiled_(samples, **args)
|
||||
elif dims == 3:
|
||||
output = self.decode_tiled_3d(samples, **args)
|
||||
else:
|
||||
raise ValueError(f"invalid dims={dims}")
|
||||
return output.movedim(1, -1)
|
||||
|
||||
def encode(self, pixel_samples):
|
||||
|
||||
@ -197,6 +197,8 @@ class SDXL(supported_models_base.BASE):
|
||||
self.sampling_settings["sigma_min"] = float(state_dict["edm_vpred.sigma_min"].item())
|
||||
return model_base.ModelType.V_PREDICTION_EDM
|
||||
elif "v_pred" in state_dict:
|
||||
if "ztsnr" in state_dict: #Some zsnr anime checkpoints
|
||||
self.sampling_settings["zsnr"] = True
|
||||
return model_base.ModelType.V_PREDICTION
|
||||
else:
|
||||
return model_base.ModelType.EPS
|
||||
|
||||
@ -1050,3 +1050,25 @@ def pil2tensor(image: Image) -> torch.Tensor:
|
||||
|
||||
def tensor2pil(t_image: torch.Tensor) -> Image:
|
||||
return Image.fromarray(np.clip(255.0 * t_image.cpu().numpy().squeeze(), 0, 255).astype(np.uint8))
|
||||
|
||||
|
||||
def reshape_mask(input_mask, output_shape):
|
||||
dims = len(output_shape) - 2
|
||||
|
||||
if dims == 1:
|
||||
scale_mode = "linear"
|
||||
|
||||
if dims == 2:
|
||||
input_mask = input_mask.reshape((-1, 1, input_mask.shape[-2], input_mask.shape[-1]))
|
||||
scale_mode = "bilinear"
|
||||
|
||||
if dims == 3:
|
||||
if len(input_mask.shape) < 5:
|
||||
input_mask = input_mask.reshape((1, 1, -1, input_mask.shape[-2], input_mask.shape[-1]))
|
||||
scale_mode = "trilinear"
|
||||
|
||||
mask = torch.nn.functional.interpolate(input_mask, size=output_shape[2:], mode=scale_mode)
|
||||
if mask.shape[1] < output_shape[1]:
|
||||
mask = mask.repeat((1, output_shape[1]) + (1,) * dims)[:, :output_shape[1]]
|
||||
mask = repeat_to_batch_size(mask, output_shape[0])
|
||||
return mask
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
var __defProp = Object.defineProperty;
|
||||
var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
|
||||
import { d as defineComponent, bQ as useExtensionStore, u as useSettingStore, r as ref, o as onMounted, q as computed, g as openBlock, h as createElementBlock, i as createVNode, y as withCtx, z as unref, bR as script$1, A as createBaseVNode, x as createBlock, N as Fragment, O as renderList, a4 as toDisplayString, au as createTextVNode, j as createCommentVNode, D as script$4 } from "./index-BNX_XOqh.js";
|
||||
import { s as script, a as script$2, b as script$3 } from "./index-B_uZlOM8.js";
|
||||
import "./index-nMMCMbCV.js";
|
||||
import { d as defineComponent, c8 as useExtensionStore, u as useSettingStore, r as ref, o as onMounted, q as computed, g as openBlock, h as createElementBlock, i as createVNode, y as withCtx, z as unref, bS as script$1, A as createBaseVNode, x as createBlock, N as Fragment, O as renderList, a4 as toDisplayString, av as createTextVNode, bQ as script$3, j as createCommentVNode, D as script$4 } from "./index-bi78Y1IN.js";
|
||||
import { s as script, a as script$2 } from "./index-ftUEqmu1.js";
|
||||
import "./index-bCeMLtLM.js";
|
||||
const _hoisted_1 = { class: "extension-panel" };
|
||||
const _hoisted_2 = { class: "mt-4" };
|
||||
const _sfc_main = /* @__PURE__ */ defineComponent({
|
||||
@ -100,4 +100,4 @@ const _sfc_main = /* @__PURE__ */ defineComponent({
|
||||
export {
|
||||
_sfc_main as default
|
||||
};
|
||||
//# sourceMappingURL=ExtensionPanel-BNXC3_Y5.js.map
|
||||
//# sourceMappingURL=ExtensionPanel-C-oQXg_k.js.map
|
||||
@ -1 +1 @@
|
||||
{"version":3,"file":"ExtensionPanel-BNXC3_Y5.js","sources":["../../src/components/dialog/content/setting/ExtensionPanel.vue"],"sourcesContent":["<template>\n <div class=\"extension-panel\">\n <DataTable :value=\"extensionStore.extensions\" stripedRows size=\"small\">\n <Column field=\"name\" :header=\"$t('extensionName')\" sortable></Column>\n <Column\n :pt=\"{\n bodyCell: 'flex items-center justify-end'\n }\"\n >\n <template #body=\"slotProps\">\n <ToggleSwitch\n v-model=\"editingEnabledExtensions[slotProps.data.name]\"\n @change=\"updateExtensionStatus\"\n />\n </template>\n </Column>\n </DataTable>\n <div class=\"mt-4\">\n <Message v-if=\"hasChanges\" severity=\"info\">\n <ul>\n <li v-for=\"ext in changedExtensions\" :key=\"ext.name\">\n <span>\n {{ extensionStore.isExtensionEnabled(ext.name) ? '[-]' : '[+]' }}\n </span>\n {{ ext.name }}\n </li>\n </ul>\n </Message>\n <Button\n :label=\"$t('reloadToApplyChanges')\"\n icon=\"pi pi-refresh\"\n @click=\"applyChanges\"\n :disabled=\"!hasChanges\"\n text\n fluid\n severity=\"danger\"\n />\n </div>\n </div>\n</template>\n\n<script setup lang=\"ts\">\nimport { ref, computed, onMounted } from 'vue'\nimport { useExtensionStore } from '@/stores/extensionStore'\nimport { useSettingStore } from '@/stores/settingStore'\nimport DataTable from 'primevue/datatable'\nimport Column from 'primevue/column'\nimport ToggleSwitch from 'primevue/toggleswitch'\nimport Button from 'primevue/button'\nimport Message from 'primevue/message'\n\nconst extensionStore = useExtensionStore()\nconst settingStore = useSettingStore()\n\nconst editingEnabledExtensions = ref<Record<string, boolean>>({})\n\nonMounted(() => {\n extensionStore.extensions.forEach((ext) => {\n editingEnabledExtensions.value[ext.name] =\n extensionStore.isExtensionEnabled(ext.name)\n })\n})\n\nconst changedExtensions = computed(() => {\n return extensionStore.extensions.filter(\n (ext) =>\n editingEnabledExtensions.value[ext.name] !==\n extensionStore.isExtensionEnabled(ext.name)\n )\n})\n\nconst hasChanges = computed(() => {\n return changedExtensions.value.length > 0\n})\n\nconst updateExtensionStatus = () => {\n const editingDisabledExtensionNames = Object.entries(\n editingEnabledExtensions.value\n )\n .filter(([_, enabled]) => !enabled)\n .map(([name]) => name)\n\n settingStore.set('Comfy.Extension.Disabled', [\n ...extensionStore.inactiveDisabledExtensionNames,\n ...editingDisabledExtensionNames\n ])\n}\n\nconst applyChanges = () => {\n // Refresh the page to apply changes\n window.location.reload()\n}\n</script>\n"],"names":[],"mappings":";;;;;;;;;;AAmDA,UAAM,iBAAiB;AACvB,UAAM,eAAe;AAEf,UAAA,2BAA2B,IAA6B,CAAA,CAAE;AAEhE,cAAU,MAAM;AACC,qBAAA,WAAW,QAAQ,CAAC,QAAQ;AACzC,iCAAyB,MAAM,IAAI,IAAI,IACrC,eAAe,mBAAmB,IAAI,IAAI;AAAA,MAAA,CAC7C;AAAA,IAAA,CACF;AAEK,UAAA,oBAAoB,SAAS,MAAM;AACvC,aAAO,eAAe,WAAW;AAAA,QAC/B,CAAC,QACC,yBAAyB,MAAM,IAAI,IAAI,MACvC,eAAe,mBAAmB,IAAI,IAAI;AAAA,MAAA;AAAA,IAC9C,CACD;AAEK,UAAA,aAAa,SAAS,MAAM;AACzB,aAAA,kBAAkB,MAAM,SAAS;AAAA,IAAA,CACzC;AAED,UAAM,wBAAwB,6BAAM;AAClC,YAAM,gCAAgC,OAAO;AAAA,QAC3C,yBAAyB;AAAA,MAExB,EAAA,OAAO,CAAC,CAAC,GAAG,OAAO,MAAM,CAAC,OAAO,EACjC,IAAI,CAAC,CAAC,IAAI,MAAM,IAAI;AAEvB,mBAAa,IAAI,4BAA4B;AAAA,QAC3C,GAAG,eAAe;AAAA,QAClB,GAAG;AAAA,MAAA,CACJ;AAAA,IAAA,GAV2B;AAa9B,UAAM,eAAe,6BAAM;AAEzB,aAAO,SAAS;IAAO,GAFJ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;"}
|
||||
{"version":3,"file":"ExtensionPanel-C-oQXg_k.js","sources":["../../src/components/dialog/content/setting/ExtensionPanel.vue"],"sourcesContent":["<template>\n <div class=\"extension-panel\">\n <DataTable :value=\"extensionStore.extensions\" stripedRows size=\"small\">\n <Column field=\"name\" :header=\"$t('extensionName')\" sortable></Column>\n <Column\n :pt=\"{\n bodyCell: 'flex items-center justify-end'\n }\"\n >\n <template #body=\"slotProps\">\n <ToggleSwitch\n v-model=\"editingEnabledExtensions[slotProps.data.name]\"\n @change=\"updateExtensionStatus\"\n />\n </template>\n </Column>\n </DataTable>\n <div class=\"mt-4\">\n <Message v-if=\"hasChanges\" severity=\"info\">\n <ul>\n <li v-for=\"ext in changedExtensions\" :key=\"ext.name\">\n <span>\n {{ extensionStore.isExtensionEnabled(ext.name) ? '[-]' : '[+]' }}\n </span>\n {{ ext.name }}\n </li>\n </ul>\n </Message>\n <Button\n :label=\"$t('reloadToApplyChanges')\"\n icon=\"pi pi-refresh\"\n @click=\"applyChanges\"\n :disabled=\"!hasChanges\"\n text\n fluid\n severity=\"danger\"\n />\n </div>\n </div>\n</template>\n\n<script setup lang=\"ts\">\nimport { ref, computed, onMounted } from 'vue'\nimport { useExtensionStore } from '@/stores/extensionStore'\nimport { useSettingStore } from '@/stores/settingStore'\nimport DataTable from 'primevue/datatable'\nimport Column from 'primevue/column'\nimport ToggleSwitch from 'primevue/toggleswitch'\nimport Button from 'primevue/button'\nimport Message from 'primevue/message'\n\nconst extensionStore = useExtensionStore()\nconst settingStore = useSettingStore()\n\nconst editingEnabledExtensions = ref<Record<string, boolean>>({})\n\nonMounted(() => {\n extensionStore.extensions.forEach((ext) => {\n editingEnabledExtensions.value[ext.name] =\n extensionStore.isExtensionEnabled(ext.name)\n })\n})\n\nconst changedExtensions = computed(() => {\n return extensionStore.extensions.filter(\n (ext) =>\n editingEnabledExtensions.value[ext.name] !==\n extensionStore.isExtensionEnabled(ext.name)\n )\n})\n\nconst hasChanges = computed(() => {\n return changedExtensions.value.length > 0\n})\n\nconst updateExtensionStatus = () => {\n const editingDisabledExtensionNames = Object.entries(\n editingEnabledExtensions.value\n )\n .filter(([_, enabled]) => !enabled)\n .map(([name]) => name)\n\n settingStore.set('Comfy.Extension.Disabled', [\n ...extensionStore.inactiveDisabledExtensionNames,\n ...editingDisabledExtensionNames\n ])\n}\n\nconst applyChanges = () => {\n // Refresh the page to apply changes\n window.location.reload()\n}\n</script>\n"],"names":[],"mappings":";;;;;;;;;;AAmDA,UAAM,iBAAiB,kBAAkB;AACzC,UAAM,eAAe,gBAAgB;AAE/B,UAAA,2BAA2B,IAA6B,EAAE;AAEhE,cAAU,MAAM;AACC,qBAAA,WAAW,QAAQ,CAAC,QAAQ;AACzC,iCAAyB,MAAM,IAAI,IAAI,IACrC,eAAe,mBAAmB,IAAI,IAAI;AAAA,MAAA,CAC7C;AAAA,IAAA,CACF;AAEK,UAAA,oBAAoB,SAAS,MAAM;AACvC,aAAO,eAAe,WAAW;AAAA,QAC/B,CAAC,QACC,yBAAyB,MAAM,IAAI,IAAI,MACvC,eAAe,mBAAmB,IAAI,IAAI;AAAA,MAC9C;AAAA,IAAA,CACD;AAEK,UAAA,aAAa,SAAS,MAAM;AACzB,aAAA,kBAAkB,MAAM,SAAS;AAAA,IAAA,CACzC;AAED,UAAM,wBAAwB,6BAAM;AAClC,YAAM,gCAAgC,OAAO;AAAA,QAC3C,yBAAyB;AAAA,MAExB,EAAA,OAAO,CAAC,CAAC,GAAG,OAAO,MAAM,CAAC,OAAO,EACjC,IAAI,CAAC,CAAC,IAAI,MAAM,IAAI;AAEvB,mBAAa,IAAI,4BAA4B;AAAA,QAC3C,GAAG,eAAe;AAAA,QAClB,GAAG;AAAA,MAAA,CACJ;AAAA,IACH,GAX8B;AAa9B,UAAM,eAAe,6BAAM;AAEzB,aAAO,SAAS,OAAO;AAAA,IACzB,GAHqB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;"}
|
||||
1
comfy/web/assets/GraphView-BumSctau.js.map
generated
vendored
1
comfy/web/assets/GraphView-BumSctau.js.map
generated
vendored
File diff suppressed because one or more lines are too long
88
comfy/web/assets/GraphView-DI5ePCmV.css → comfy/web/assets/GraphView-Che8kBOh.css
generated
vendored
88
comfy/web/assets/GraphView-DI5ePCmV.css → comfy/web/assets/GraphView-Che8kBOh.css
generated
vendored
@ -45,7 +45,7 @@
|
||||
--sidebar-icon-size: 1rem;
|
||||
}
|
||||
|
||||
.side-tool-bar-container[data-v-37fd2fa4] {
|
||||
.side-tool-bar-container[data-v-e0812a25] {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
@ -58,28 +58,32 @@
|
||||
background-color: var(--comfy-menu-bg);
|
||||
color: var(--fg-color);
|
||||
}
|
||||
.side-tool-bar-end[data-v-37fd2fa4] {
|
||||
.side-tool-bar-end[data-v-e0812a25] {
|
||||
align-self: flex-end;
|
||||
margin-top: auto;
|
||||
}
|
||||
|
||||
[data-v-b49f20b1] .p-splitter-gutter {
|
||||
[data-v-7c3279c1] .p-splitter-gutter {
|
||||
pointer-events: auto;
|
||||
}
|
||||
.side-bar-panel[data-v-b49f20b1] {
|
||||
[data-v-7c3279c1] .p-splitter-gutter:hover,[data-v-7c3279c1] .p-splitter-gutter[data-p-gutter-resizing='true'] {
|
||||
transition: background-color 0.2s ease 300ms;
|
||||
background-color: var(--p-primary-color);
|
||||
}
|
||||
.side-bar-panel[data-v-7c3279c1] {
|
||||
background-color: var(--bg-color);
|
||||
pointer-events: auto;
|
||||
}
|
||||
.bottom-panel[data-v-b49f20b1] {
|
||||
.bottom-panel[data-v-7c3279c1] {
|
||||
background-color: var(--bg-color);
|
||||
pointer-events: auto;
|
||||
}
|
||||
.splitter-overlay[data-v-b49f20b1] {
|
||||
.splitter-overlay[data-v-7c3279c1] {
|
||||
pointer-events: none;
|
||||
border-style: none;
|
||||
background-color: transparent;
|
||||
}
|
||||
.splitter-overlay-root[data-v-b49f20b1] {
|
||||
.splitter-overlay-root[data-v-7c3279c1] {
|
||||
position: absolute;
|
||||
top: 0px;
|
||||
left: 0px;
|
||||
@ -102,32 +106,6 @@
|
||||
margin: -0.125rem 0.125rem;
|
||||
}
|
||||
|
||||
.comfy-vue-node-search-container[data-v-2d409367] {
|
||||
display: flex;
|
||||
width: 100%;
|
||||
min-width: 26rem;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
}
|
||||
.comfy-vue-node-search-container[data-v-2d409367] * {
|
||||
pointer-events: auto;
|
||||
}
|
||||
.comfy-vue-node-preview-container[data-v-2d409367] {
|
||||
position: absolute;
|
||||
left: -350px;
|
||||
top: 50px;
|
||||
}
|
||||
.comfy-vue-node-search-box[data-v-2d409367] {
|
||||
z-index: 10;
|
||||
flex-grow: 1;
|
||||
}
|
||||
._filter-button[data-v-2d409367] {
|
||||
z-index: 10;
|
||||
}
|
||||
._dialog[data-v-2d409367] {
|
||||
min-width: 26rem;
|
||||
}
|
||||
|
||||
.invisible-dialog-root {
|
||||
width: 60%;
|
||||
min-width: 24rem;
|
||||
@ -146,7 +124,7 @@
|
||||
align-items: flex-start !important;
|
||||
}
|
||||
|
||||
.node-tooltip[data-v-79ec8c53] {
|
||||
.node-tooltip[data-v-c2e0098f] {
|
||||
background: var(--comfy-input-bg);
|
||||
border-radius: 5px;
|
||||
box-shadow: 0 0 5px rgba(0, 0, 0, 0.4);
|
||||
@ -162,30 +140,28 @@
|
||||
z-index: 99999;
|
||||
}
|
||||
|
||||
.p-buttongroup-vertical[data-v-444d3768] {
|
||||
.p-buttongroup-vertical[data-v-94481f39] {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
border-radius: var(--p-button-border-radius);
|
||||
overflow: hidden;
|
||||
border: 1px solid var(--p-panel-border-color);
|
||||
}
|
||||
.p-buttongroup-vertical .p-button[data-v-444d3768] {
|
||||
.p-buttongroup-vertical .p-button[data-v-94481f39] {
|
||||
margin: 0;
|
||||
border-radius: 0;
|
||||
}
|
||||
|
||||
.comfy-menu-hamburger[data-v-eac4cb71] {
|
||||
.comfy-menu-hamburger[data-v-2ddd26e8] {
|
||||
pointer-events: auto;
|
||||
position: fixed;
|
||||
top: 0px;
|
||||
right: 0px;
|
||||
z-index: 9999;
|
||||
}
|
||||
|
||||
[data-v-84e785b8] .p-togglebutton::before {
|
||||
[data-v-9eb975c3] .p-togglebutton::before {
|
||||
display: none
|
||||
}
|
||||
[data-v-84e785b8] .p-togglebutton {
|
||||
[data-v-9eb975c3] .p-togglebutton {
|
||||
position: relative;
|
||||
flex-shrink: 0;
|
||||
border-radius: 0px;
|
||||
@ -193,14 +169,14 @@
|
||||
padding-left: 0.5rem;
|
||||
padding-right: 0.5rem
|
||||
}
|
||||
[data-v-84e785b8] .p-togglebutton.p-togglebutton-checked {
|
||||
[data-v-9eb975c3] .p-togglebutton.p-togglebutton-checked {
|
||||
border-bottom-width: 2px;
|
||||
border-bottom-color: var(--p-button-text-primary-color)
|
||||
}
|
||||
[data-v-84e785b8] .p-togglebutton-checked .close-button,[data-v-84e785b8] .p-togglebutton:hover .close-button {
|
||||
[data-v-9eb975c3] .p-togglebutton-checked .close-button,[data-v-9eb975c3] .p-togglebutton:hover .close-button {
|
||||
visibility: visible
|
||||
}
|
||||
.status-indicator[data-v-84e785b8] {
|
||||
.status-indicator[data-v-9eb975c3] {
|
||||
position: absolute;
|
||||
font-weight: 700;
|
||||
font-size: 1.5rem;
|
||||
@ -208,10 +184,10 @@
|
||||
left: 50%;
|
||||
transform: translate(-50%, -50%)
|
||||
}
|
||||
[data-v-84e785b8] .p-togglebutton:hover .status-indicator {
|
||||
[data-v-9eb975c3] .p-togglebutton:hover .status-indicator {
|
||||
display: none
|
||||
}
|
||||
[data-v-84e785b8] .p-togglebutton .close-button {
|
||||
[data-v-9eb975c3] .p-togglebutton .close-button {
|
||||
visibility: hidden
|
||||
}
|
||||
|
||||
@ -234,35 +210,35 @@
|
||||
border-bottom-left-radius: 0;
|
||||
}
|
||||
|
||||
.comfyui-queue-button[data-v-2b80bf74] .p-splitbutton-dropdown {
|
||||
.comfyui-queue-button[data-v-95bc9be0] .p-splitbutton-dropdown {
|
||||
border-top-right-radius: 0;
|
||||
border-bottom-right-radius: 0;
|
||||
}
|
||||
|
||||
.actionbar[data-v-2e54db00] {
|
||||
.actionbar[data-v-542a7001] {
|
||||
pointer-events: all;
|
||||
position: fixed;
|
||||
z-index: 1000;
|
||||
}
|
||||
.actionbar.is-docked[data-v-2e54db00] {
|
||||
.actionbar.is-docked[data-v-542a7001] {
|
||||
position: static;
|
||||
border-style: none;
|
||||
background-color: transparent;
|
||||
padding: 0px;
|
||||
}
|
||||
.actionbar.is-dragging[data-v-2e54db00] {
|
||||
.actionbar.is-dragging[data-v-542a7001] {
|
||||
-webkit-user-select: none;
|
||||
-moz-user-select: none;
|
||||
user-select: none;
|
||||
}
|
||||
[data-v-2e54db00] .p-panel-content {
|
||||
[data-v-542a7001] .p-panel-content {
|
||||
padding: 0.25rem;
|
||||
}
|
||||
[data-v-2e54db00] .p-panel-header {
|
||||
[data-v-542a7001] .p-panel-header {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.comfyui-menu[data-v-221bd572] {
|
||||
.comfyui-menu[data-v-d84a704d] {
|
||||
width: 100vw;
|
||||
background: var(--comfy-menu-bg);
|
||||
color: var(--fg-color);
|
||||
@ -274,13 +250,13 @@
|
||||
grid-column: 1/-1;
|
||||
max-height: 90vh;
|
||||
}
|
||||
.comfyui-menu.dropzone[data-v-221bd572] {
|
||||
.comfyui-menu.dropzone[data-v-d84a704d] {
|
||||
background: var(--p-highlight-background);
|
||||
}
|
||||
.comfyui-menu.dropzone-active[data-v-221bd572] {
|
||||
.comfyui-menu.dropzone-active[data-v-d84a704d] {
|
||||
background: var(--p-highlight-background-focus);
|
||||
}
|
||||
.comfyui-logo[data-v-221bd572] {
|
||||
.comfyui-logo[data-v-d84a704d] {
|
||||
font-size: 1.2em;
|
||||
-webkit-user-select: none;
|
||||
-moz-user-select: none;
|
||||
847
comfy/web/assets/GraphView-BumSctau.js → comfy/web/assets/GraphView-DRxVGgSM.js
generated
vendored
847
comfy/web/assets/GraphView-BumSctau.js → comfy/web/assets/GraphView-DRxVGgSM.js
generated
vendored
File diff suppressed because one or more lines are too long
1
comfy/web/assets/GraphView-DRxVGgSM.js.map
generated
vendored
Normal file
1
comfy/web/assets/GraphView-DRxVGgSM.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1043
comfy/web/assets/InstallView-CWxVNQLN.js
generated
vendored
Normal file
1043
comfy/web/assets/InstallView-CWxVNQLN.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
comfy/web/assets/InstallView-CWxVNQLN.js.map
generated
vendored
Normal file
1
comfy/web/assets/InstallView-CWxVNQLN.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
4
comfy/web/assets/InstallView-DERAHpPP.css
generated
vendored
Normal file
4
comfy/web/assets/InstallView-DERAHpPP.css
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
|
||||
[data-v-53e62b05] .p-steppanel {
|
||||
background-color: transparent
|
||||
}
|
||||
8
comfy/web/assets/KeybindingPanel-BFXf1pwv.css
generated
vendored
Normal file
8
comfy/web/assets/KeybindingPanel-BFXf1pwv.css
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
|
||||
[data-v-2d8b3a76] .p-datatable-tbody > tr > td {
|
||||
padding: 0.25rem;
|
||||
min-height: 2rem
|
||||
}
|
||||
[data-v-2d8b3a76] .p-datatable-row-selected .actions,[data-v-2d8b3a76] .p-datatable-selectable-row:hover .actions {
|
||||
visibility: visible
|
||||
}
|
||||
8
comfy/web/assets/KeybindingPanel-BNYKhW1k.css
generated
vendored
8
comfy/web/assets/KeybindingPanel-BNYKhW1k.css
generated
vendored
@ -1,8 +0,0 @@
|
||||
|
||||
[data-v-e5724e4d] .p-datatable-tbody > tr > td {
|
||||
padding: 1px;
|
||||
min-height: 2rem;
|
||||
}
|
||||
[data-v-e5724e4d] .p-datatable-row-selected .actions,[data-v-e5724e4d] .p-datatable-selectable-row:hover .actions {
|
||||
visibility: visible;
|
||||
}
|
||||
@ -1,8 +1,8 @@
|
||||
var __defProp = Object.defineProperty;
|
||||
var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
|
||||
import { d as defineComponent, q as computed, g as openBlock, h as createElementBlock, N as Fragment, O as renderList, i as createVNode, y as withCtx, au as createTextVNode, a4 as toDisplayString, z as unref, ay as script, j as createCommentVNode, r as ref, bN as FilterMatchMode, M as useKeybindingStore, F as useCommandStore, aH as watchEffect, b8 as useToast, t as resolveDirective, bO as SearchBox, A as createBaseVNode, D as script$2, x as createBlock, am as script$4, bd as withModifiers, aF as script$6, v as withDirectives, bJ as KeyComboImpl, bP as KeybindingImpl, _ as _export_sfc } from "./index-BNX_XOqh.js";
|
||||
import { s as script$1, a as script$3, b as script$5 } from "./index-B_uZlOM8.js";
|
||||
import "./index-nMMCMbCV.js";
|
||||
import { d as defineComponent, q as computed, g as openBlock, h as createElementBlock, N as Fragment, O as renderList, i as createVNode, y as withCtx, av as createTextVNode, a4 as toDisplayString, z as unref, az as script, j as createCommentVNode, r as ref, c5 as FilterMatchMode, M as useKeybindingStore, F as useCommandStore, aI as watchEffect, be as useToast, t as resolveDirective, c6 as SearchBox, A as createBaseVNode, D as script$2, x as createBlock, an as script$4, bi as withModifiers, bQ as script$5, aG as script$6, v as withDirectives, c0 as KeyComboImpl, c7 as KeybindingImpl, _ as _export_sfc } from "./index-bi78Y1IN.js";
|
||||
import { s as script$1, a as script$3 } from "./index-ftUEqmu1.js";
|
||||
import "./index-bCeMLtLM.js";
|
||||
const _hoisted_1$1 = {
|
||||
key: 0,
|
||||
class: "px-2"
|
||||
@ -36,8 +36,9 @@ const _sfc_main$1 = /* @__PURE__ */ defineComponent({
|
||||
}
|
||||
});
|
||||
const _hoisted_1 = { class: "keybinding-panel" };
|
||||
const _hoisted_2 = { class: "actions invisible" };
|
||||
const _hoisted_3 = { key: 1 };
|
||||
const _hoisted_2 = { class: "actions invisible flex flex-row" };
|
||||
const _hoisted_3 = ["title"];
|
||||
const _hoisted_4 = { key: 1 };
|
||||
const _sfc_main = /* @__PURE__ */ defineComponent({
|
||||
__name: "KeybindingPanel",
|
||||
setup(__props) {
|
||||
@ -176,7 +177,16 @@ const _sfc_main = /* @__PURE__ */ defineComponent({
|
||||
createVNode(unref(script$1), {
|
||||
field: "id",
|
||||
header: "Command ID",
|
||||
sortable: ""
|
||||
sortable: "",
|
||||
class: "max-w-64 2xl:max-w-full"
|
||||
}, {
|
||||
body: withCtx((slotProps) => [
|
||||
createBaseVNode("div", {
|
||||
class: "overflow-hidden text-ellipsis whitespace-nowrap",
|
||||
title: slotProps.data.id
|
||||
}, toDisplayString(slotProps.data.id), 9, _hoisted_3)
|
||||
]),
|
||||
_: 1
|
||||
}),
|
||||
createVNode(unref(script$1), {
|
||||
field: "keybinding",
|
||||
@ -187,7 +197,7 @@ const _sfc_main = /* @__PURE__ */ defineComponent({
|
||||
key: 0,
|
||||
keyCombo: slotProps.data.keybinding.combo,
|
||||
isModified: unref(keybindingStore).isCommandKeybindingModified(slotProps.data.id)
|
||||
}, null, 8, ["keyCombo", "isModified"])) : (openBlock(), createElementBlock("span", _hoisted_3, "-"))
|
||||
}, null, 8, ["keyCombo", "isModified"])) : (openBlock(), createElementBlock("span", _hoisted_4, "-"))
|
||||
]),
|
||||
_: 1
|
||||
})
|
||||
@ -256,8 +266,8 @@ const _sfc_main = /* @__PURE__ */ defineComponent({
|
||||
};
|
||||
}
|
||||
});
|
||||
const KeybindingPanel = /* @__PURE__ */ _export_sfc(_sfc_main, [["__scopeId", "data-v-e5724e4d"]]);
|
||||
const KeybindingPanel = /* @__PURE__ */ _export_sfc(_sfc_main, [["__scopeId", "data-v-2d8b3a76"]]);
|
||||
export {
|
||||
KeybindingPanel as default
|
||||
};
|
||||
//# sourceMappingURL=KeybindingPanel-DU4DXDix.js.map
|
||||
//# sourceMappingURL=KeybindingPanel-BlBHQABM.js.map
|
||||
1
comfy/web/assets/KeybindingPanel-BlBHQABM.js.map
generated
vendored
Normal file
1
comfy/web/assets/KeybindingPanel-BlBHQABM.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
comfy/web/assets/KeybindingPanel-DU4DXDix.js.map
generated
vendored
1
comfy/web/assets/KeybindingPanel-DU4DXDix.js.map
generated
vendored
File diff suppressed because one or more lines are too long
102
comfy/web/assets/ServerStartView-Bb00SCJ5.js
generated
vendored
Normal file
102
comfy/web/assets/ServerStartView-Bb00SCJ5.js
generated
vendored
Normal file
@ -0,0 +1,102 @@
|
||||
var __defProp = Object.defineProperty;
|
||||
var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
|
||||
import { d as defineComponent, r as ref, o as onMounted, w as watch, I as onBeforeUnmount, g as openBlock, h as createElementBlock, i as createVNode, y as withCtx, A as createBaseVNode, a4 as toDisplayString, z as unref, bJ as script, bK as electronAPI } from "./index-bi78Y1IN.js";
|
||||
import { o as oe, r as rr } from "./index-Ba5g1c58.js";
|
||||
const _hoisted_1$1 = { class: "p-terminal rounded-none h-full w-full" };
|
||||
const _hoisted_2$1 = { class: "px-4 whitespace-pre-wrap" };
|
||||
const _sfc_main$1 = /* @__PURE__ */ defineComponent({
|
||||
__name: "LogTerminal",
|
||||
props: {
|
||||
fetchLogs: { type: Function },
|
||||
fetchInterval: {}
|
||||
},
|
||||
setup(__props) {
|
||||
const props = __props;
|
||||
const log = ref("");
|
||||
const scrollPanelRef = ref(null);
|
||||
const scrolledToBottom = ref(false);
|
||||
let intervalId = 0;
|
||||
onMounted(async () => {
|
||||
const element = scrollPanelRef.value?.$el;
|
||||
const scrollContainer = element?.querySelector(".p-scrollpanel-content");
|
||||
if (scrollContainer) {
|
||||
scrollContainer.addEventListener("scroll", () => {
|
||||
scrolledToBottom.value = scrollContainer.scrollTop + scrollContainer.clientHeight === scrollContainer.scrollHeight;
|
||||
});
|
||||
}
|
||||
const scrollToBottom = /* @__PURE__ */ __name(() => {
|
||||
if (scrollContainer) {
|
||||
scrollContainer.scrollTop = scrollContainer.scrollHeight;
|
||||
}
|
||||
}, "scrollToBottom");
|
||||
watch(log, () => {
|
||||
if (scrolledToBottom.value) {
|
||||
scrollToBottom();
|
||||
}
|
||||
});
|
||||
const fetchLogs = /* @__PURE__ */ __name(async () => {
|
||||
log.value = await props.fetchLogs();
|
||||
}, "fetchLogs");
|
||||
await fetchLogs();
|
||||
scrollToBottom();
|
||||
intervalId = window.setInterval(fetchLogs, props.fetchInterval);
|
||||
});
|
||||
onBeforeUnmount(() => {
|
||||
window.clearInterval(intervalId);
|
||||
});
|
||||
return (_ctx, _cache) => {
|
||||
return openBlock(), createElementBlock("div", _hoisted_1$1, [
|
||||
createVNode(unref(script), {
|
||||
class: "h-full w-full",
|
||||
ref_key: "scrollPanelRef",
|
||||
ref: scrollPanelRef
|
||||
}, {
|
||||
default: withCtx(() => [
|
||||
createBaseVNode("pre", _hoisted_2$1, toDisplayString(log.value), 1)
|
||||
]),
|
||||
_: 1
|
||||
}, 512)
|
||||
]);
|
||||
};
|
||||
}
|
||||
});
|
||||
const _hoisted_1 = { class: "font-sans flex flex-col justify-center items-center h-screen m-0 text-neutral-300 bg-neutral-900 dark-theme pointer-events-auto" };
|
||||
const _hoisted_2 = { class: "text-2xl font-bold" };
|
||||
const _sfc_main = /* @__PURE__ */ defineComponent({
|
||||
__name: "ServerStartView",
|
||||
setup(__props) {
|
||||
const electron = electronAPI();
|
||||
const status = ref(oe.INITIAL_STATE);
|
||||
const logs = ref([]);
|
||||
const updateProgress = /* @__PURE__ */ __name(({ status: newStatus }) => {
|
||||
status.value = newStatus;
|
||||
logs.value = [];
|
||||
}, "updateProgress");
|
||||
const addLogMessage = /* @__PURE__ */ __name((message) => {
|
||||
logs.value = [...logs.value, message];
|
||||
}, "addLogMessage");
|
||||
const fetchLogs = /* @__PURE__ */ __name(async () => {
|
||||
return logs.value.join("\n");
|
||||
}, "fetchLogs");
|
||||
onMounted(() => {
|
||||
electron.sendReady();
|
||||
electron.onProgressUpdate(updateProgress);
|
||||
electron.onLogMessage((message) => {
|
||||
addLogMessage(message);
|
||||
});
|
||||
});
|
||||
return (_ctx, _cache) => {
|
||||
return openBlock(), createElementBlock("div", _hoisted_1, [
|
||||
createBaseVNode("h2", _hoisted_2, toDisplayString(unref(rr)[status.value]), 1),
|
||||
createVNode(_sfc_main$1, {
|
||||
"fetch-logs": fetchLogs,
|
||||
"fetch-interval": 500
|
||||
})
|
||||
]);
|
||||
};
|
||||
}
|
||||
});
|
||||
export {
|
||||
_sfc_main as default
|
||||
};
|
||||
//# sourceMappingURL=ServerStartView-Bb00SCJ5.js.map
|
||||
1
comfy/web/assets/ServerStartView-Bb00SCJ5.js.map
generated
vendored
Normal file
1
comfy/web/assets/ServerStartView-Bb00SCJ5.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"ServerStartView-Bb00SCJ5.js","sources":["../../src/components/common/LogTerminal.vue","../../src/views/ServerStartView.vue"],"sourcesContent":["<!-- A simple read-only terminal component that displays logs. -->\n<template>\n <div class=\"p-terminal rounded-none h-full w-full\">\n <ScrollPanel class=\"h-full w-full\" ref=\"scrollPanelRef\">\n <pre class=\"px-4 whitespace-pre-wrap\">{{ log }}</pre>\n </ScrollPanel>\n </div>\n</template>\n\n<script setup lang=\"ts\">\nimport ScrollPanel from 'primevue/scrollpanel'\nimport { onBeforeUnmount, onMounted, ref, watch } from 'vue'\n\nconst props = defineProps<{\n fetchLogs: () => Promise<string>\n fetchInterval: number\n}>()\n\nconst log = ref<string>('')\nconst scrollPanelRef = ref<InstanceType<typeof ScrollPanel> | null>(null)\n/**\n * Whether the user has scrolled to the bottom of the terminal.\n * This is used to prevent the terminal from scrolling to the bottom\n * when new logs are fetched.\n */\nconst scrolledToBottom = ref(false)\n\nlet intervalId: number = 0\n\nonMounted(async () => {\n const element = scrollPanelRef.value?.$el\n const scrollContainer = element?.querySelector('.p-scrollpanel-content')\n\n if (scrollContainer) {\n scrollContainer.addEventListener('scroll', () => {\n scrolledToBottom.value =\n scrollContainer.scrollTop + scrollContainer.clientHeight ===\n scrollContainer.scrollHeight\n })\n }\n\n const scrollToBottom = () => {\n if (scrollContainer) {\n scrollContainer.scrollTop = scrollContainer.scrollHeight\n }\n }\n\n watch(log, () => {\n if (scrolledToBottom.value) {\n scrollToBottom()\n }\n })\n\n const fetchLogs = async () => {\n log.value = await props.fetchLogs()\n }\n\n await fetchLogs()\n scrollToBottom()\n intervalId = window.setInterval(fetchLogs, props.fetchInterval)\n})\n\nonBeforeUnmount(() => {\n window.clearInterval(intervalId)\n})\n</script>\n","<template>\n <div\n class=\"font-sans flex flex-col justify-center items-center h-screen m-0 text-neutral-300 bg-neutral-900 dark-theme pointer-events-auto\"\n >\n <h2 class=\"text-2xl font-bold\">{{ ProgressMessages[status] }}</h2>\n <LogTerminal :fetch-logs=\"fetchLogs\" :fetch-interval=\"500\" />\n </div>\n</template>\n\n<script setup lang=\"ts\">\nimport { ref, onMounted } from 'vue'\nimport LogTerminal from '@/components/common/LogTerminal.vue'\nimport {\n ProgressStatus,\n ProgressMessages\n} from '@comfyorg/comfyui-electron-types'\nimport { electronAPI } from '@/utils/envUtil'\n\nconst electron = electronAPI()\n\nconst status = ref<ProgressStatus>(ProgressStatus.INITIAL_STATE)\nconst logs = ref<string[]>([])\n\nconst updateProgress = ({ status: newStatus }: { status: ProgressStatus }) => {\n status.value = newStatus\n logs.value = [] // Clear logs when status changes\n}\n\nconst addLogMessage = (message: string) => {\n logs.value = [...logs.value, message]\n}\n\nconst fetchLogs = async () => {\n return logs.value.join('\\n')\n}\n\nonMounted(() => {\n electron.sendReady()\n electron.onProgressUpdate(updateProgress)\n electron.onLogMessage((message: string) => {\n addLogMessage(message)\n })\n})\n</script>\n"],"names":["ProgressStatus"],"mappings":";;;;;;;;;;;;;AAaA,UAAM,QAAQ;AAKR,UAAA,MAAM,IAAY,EAAE;AACpB,UAAA,iBAAiB,IAA6C,IAAI;AAMlE,UAAA,mBAAmB,IAAI,KAAK;AAElC,QAAI,aAAqB;AAEzB,cAAU,YAAY;AACd,YAAA,UAAU,eAAe,OAAO;AAChC,YAAA,kBAAkB,SAAS,cAAc,wBAAwB;AAEvE,UAAI,iBAAiB;AACH,wBAAA,iBAAiB,UAAU,MAAM;AAC/C,2BAAiB,QACf,gBAAgB,YAAY,gBAAgB,iBAC5C,gBAAgB;AAAA,QAAA,CACnB;AAAA,MAAA;AAGH,YAAM,iBAAiB,6BAAM;AAC3B,YAAI,iBAAiB;AACnB,0BAAgB,YAAY,gBAAgB;AAAA,QAAA;AAAA,MAEhD,GAJuB;AAMvB,YAAM,KAAK,MAAM;AACf,YAAI,iBAAiB,OAAO;AACX,yBAAA;AAAA,QAAA;AAAA,MACjB,CACD;AAED,YAAM,YAAY,mCAAY;AACxB,YAAA,QAAQ,MAAM,MAAM,UAAU;AAAA,MACpC,GAFkB;AAIlB,YAAM,UAAU;AACD,qBAAA;AACf,mBAAa,OAAO,YAAY,WAAW,MAAM,aAAa;AAAA,IAAA,CAC/D;AAED,oBAAgB,MAAM;AACpB,aAAO,cAAc,UAAU;AAAA,IAAA,CAChC;;;;;;;;;;;;;;;;;;;;;;AC9CD,UAAM,WAAW,YAAY;AAEvB,UAAA,SAAS,IAAoBA,GAAe,aAAa;AACzD,UAAA,OAAO,IAAc,EAAE;AAE7B,UAAM,iBAAiB,wBAAC,EAAE,QAAQ,gBAA4C;AAC5E,aAAO,QAAQ;AACf,WAAK,QAAQ,CAAC;AAAA,IAChB,GAHuB;AAKjB,UAAA,gBAAgB,wBAAC,YAAoB;AACzC,WAAK,QAAQ,CAAC,GAAG,KAAK,OAAO,OAAO;AAAA,IACtC,GAFsB;AAItB,UAAM,YAAY,mCAAY;AACrB,aAAA,KAAK,MAAM,KAAK,IAAI;AAAA,IAC7B,GAFkB;AAIlB,cAAU,MAAM;AACd,eAAS,UAAU;AACnB,eAAS,iBAAiB,cAAc;AAC/B,eAAA,aAAa,CAAC,YAAoB;AACzC,sBAAc,OAAO;AAAA,MAAA,CACtB;AAAA,IAAA,CACF;;;;;;;;;;;;"}
|
||||
30
comfy/web/assets/WelcomeView-CsOBuA3U.js
generated
vendored
Normal file
30
comfy/web/assets/WelcomeView-CsOBuA3U.js
generated
vendored
Normal file
@ -0,0 +1,30 @@
|
||||
import { d as defineComponent, g as openBlock, h as createElementBlock, A as createBaseVNode, a4 as toDisplayString, i as createVNode, z as unref, D as script, _ as _export_sfc } from "./index-bi78Y1IN.js";
|
||||
const _hoisted_1 = { class: "font-sans flex flex-col justify-center items-center h-screen m-0 text-neutral-300 bg-neutral-900 dark-theme pointer-events-auto" };
|
||||
const _hoisted_2 = { class: "flex flex-col items-center justify-center gap-8 p-8" };
|
||||
const _hoisted_3 = { class: "animated-gradient-text text-glow select-none" };
|
||||
const _sfc_main = /* @__PURE__ */ defineComponent({
|
||||
__name: "WelcomeView",
|
||||
setup(__props) {
|
||||
return (_ctx, _cache) => {
|
||||
return openBlock(), createElementBlock("div", _hoisted_1, [
|
||||
createBaseVNode("div", _hoisted_2, [
|
||||
createBaseVNode("h1", _hoisted_3, toDisplayString(_ctx.$t("welcome.title")), 1),
|
||||
createVNode(unref(script), {
|
||||
label: _ctx.$t("welcome.getStarted"),
|
||||
icon: "pi pi-arrow-right",
|
||||
iconPos: "right",
|
||||
size: "large",
|
||||
rounded: "",
|
||||
onClick: _cache[0] || (_cache[0] = ($event) => _ctx.$router.push("/install")),
|
||||
class: "p-4 text-lg fade-in-up"
|
||||
}, null, 8, ["label"])
|
||||
])
|
||||
]);
|
||||
};
|
||||
}
|
||||
});
|
||||
const WelcomeView = /* @__PURE__ */ _export_sfc(_sfc_main, [["__scopeId", "data-v-12b8b11b"]]);
|
||||
export {
|
||||
WelcomeView as default
|
||||
};
|
||||
//# sourceMappingURL=WelcomeView-CsOBuA3U.js.map
|
||||
1
comfy/web/assets/WelcomeView-CsOBuA3U.js.map
generated
vendored
Normal file
1
comfy/web/assets/WelcomeView-CsOBuA3U.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"WelcomeView-CsOBuA3U.js","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;"}
|
||||
36
comfy/web/assets/WelcomeView-D5nfld2b.css
generated
vendored
Normal file
36
comfy/web/assets/WelcomeView-D5nfld2b.css
generated
vendored
Normal file
@ -0,0 +1,36 @@
|
||||
|
||||
.animated-gradient-text[data-v-12b8b11b] {
|
||||
font-weight: 700;
|
||||
font-size: clamp(2rem, 8vw, 4rem);
|
||||
background: linear-gradient(to right, #12c2e9, #c471ed, #f64f59, #12c2e9);
|
||||
background-size: 300% auto;
|
||||
background-clip: text;
|
||||
-webkit-background-clip: text;
|
||||
-webkit-text-fill-color: transparent;
|
||||
animation: gradient-12b8b11b 8s linear infinite;
|
||||
}
|
||||
.text-glow[data-v-12b8b11b] {
|
||||
filter: drop-shadow(0 0 8px rgba(255, 255, 255, 0.3));
|
||||
}
|
||||
@keyframes gradient-12b8b11b {
|
||||
0% {
|
||||
background-position: 0% center;
|
||||
}
|
||||
100% {
|
||||
background-position: 300% center;
|
||||
}
|
||||
}
|
||||
.fade-in-up[data-v-12b8b11b] {
|
||||
animation: fadeInUp-12b8b11b 1.5s ease-out;
|
||||
animation-fill-mode: both;
|
||||
}
|
||||
@keyframes fadeInUp-12b8b11b {
|
||||
0% {
|
||||
opacity: 0;
|
||||
transform: translateY(20px);
|
||||
}
|
||||
100% {
|
||||
opacity: 1;
|
||||
transform: translateY(0);
|
||||
}
|
||||
}
|
||||
0
comfy/web/assets/index-DjWyclij.css → comfy/web/assets/index-A7qL4fzl.css
generated
vendored
0
comfy/web/assets/index-DjWyclij.css → comfy/web/assets/index-A7qL4fzl.css
generated
vendored
1
comfy/web/assets/index-BNX_XOqh.js.map
generated
vendored
1
comfy/web/assets/index-BNX_XOqh.js.map
generated
vendored
File diff suppressed because one or more lines are too long
604
comfy/web/assets/index-HT1vecxT.css → comfy/web/assets/index-BObQyxMu.css
generated
vendored
604
comfy/web/assets/index-HT1vecxT.css → comfy/web/assets/index-BObQyxMu.css
generated
vendored
File diff suppressed because it is too large
Load Diff
1
comfy/web/assets/index-B_uZlOM8.js.map
generated
vendored
1
comfy/web/assets/index-B_uZlOM8.js.map
generated
vendored
File diff suppressed because one or more lines are too long
1880
comfy/web/assets/index-Ba5g1c58.js
generated
vendored
Normal file
1880
comfy/web/assets/index-Ba5g1c58.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
comfy/web/assets/index-Ba5g1c58.js.map
generated
vendored
Normal file
1
comfy/web/assets/index-Ba5g1c58.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
4607
comfy/web/assets/index-DNRGG-ix.js
generated
vendored
4607
comfy/web/assets/index-DNRGG-ix.js
generated
vendored
File diff suppressed because it is too large
Load Diff
1
comfy/web/assets/index-DNRGG-ix.js.map
generated
vendored
1
comfy/web/assets/index-DNRGG-ix.js.map
generated
vendored
File diff suppressed because one or more lines are too long
51472
comfy/web/assets/index-DO-85kdf.js
generated
vendored
Normal file
51472
comfy/web/assets/index-DO-85kdf.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
comfy/web/assets/index-DO-85kdf.js.map
generated
vendored
Normal file
1
comfy/web/assets/index-DO-85kdf.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
4
comfy/web/assets/index-nMMCMbCV.js → comfy/web/assets/index-bCeMLtLM.js
generated
vendored
4
comfy/web/assets/index-nMMCMbCV.js → comfy/web/assets/index-bCeMLtLM.js
generated
vendored
@ -1,6 +1,6 @@
|
||||
var __defProp = Object.defineProperty;
|
||||
var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
|
||||
import { b0 as script$2, g as openBlock, h as createElementBlock, m as mergeProps, A as createBaseVNode } from "./index-BNX_XOqh.js";
|
||||
import { b6 as script$2, g as openBlock, h as createElementBlock, m as mergeProps, A as createBaseVNode } from "./index-bi78Y1IN.js";
|
||||
var script$1 = {
|
||||
name: "BarsIcon",
|
||||
"extends": script$2
|
||||
@ -43,4 +43,4 @@ export {
|
||||
script as a,
|
||||
script$1 as s
|
||||
};
|
||||
//# sourceMappingURL=index-nMMCMbCV.js.map
|
||||
//# sourceMappingURL=index-bCeMLtLM.js.map
|
||||
2
comfy/web/assets/index-nMMCMbCV.js.map → comfy/web/assets/index-bCeMLtLM.js.map
generated
vendored
2
comfy/web/assets/index-nMMCMbCV.js.map → comfy/web/assets/index-bCeMLtLM.js.map
generated
vendored
@ -1 +1 @@
|
||||
{"version":3,"file":"index-nMMCMbCV.js","sources":["../../../../../node_modules/@primevue/icons/bars/index.mjs","../../../../../node_modules/@primevue/icons/plus/index.mjs"],"sourcesContent":["import BaseIcon from '@primevue/icons/baseicon';\nimport { openBlock, createElementBlock, mergeProps, createElementVNode } from 'vue';\n\nvar script = {\n name: 'BarsIcon',\n \"extends\": BaseIcon\n};\n\nfunction render(_ctx, _cache, $props, $setup, $data, $options) {\n return openBlock(), createElementBlock(\"svg\", mergeProps({\n width: \"14\",\n height: \"14\",\n viewBox: \"0 0 14 14\",\n fill: \"none\",\n xmlns: \"http://www.w3.org/2000/svg\"\n }, _ctx.pti()), _cache[0] || (_cache[0] = [createElementVNode(\"path\", {\n \"fill-rule\": \"evenodd\",\n \"clip-rule\": \"evenodd\",\n d: \"M13.3226 3.6129H0.677419C0.497757 3.6129 0.325452 3.54152 0.198411 3.41448C0.0713707 3.28744 0 3.11514 0 2.93548C0 2.75581 0.0713707 2.58351 0.198411 2.45647C0.325452 2.32943 0.497757 2.25806 0.677419 2.25806H13.3226C13.5022 2.25806 13.6745 2.32943 13.8016 2.45647C13.9286 2.58351 14 2.75581 14 2.93548C14 3.11514 13.9286 3.28744 13.8016 3.41448C13.6745 3.54152 13.5022 3.6129 13.3226 3.6129ZM13.3226 7.67741H0.677419C0.497757 7.67741 0.325452 7.60604 0.198411 7.479C0.0713707 7.35196 0 7.17965 0 6.99999C0 6.82033 0.0713707 6.64802 0.198411 6.52098C0.325452 6.39394 0.497757 6.32257 0.677419 6.32257H13.3226C13.5022 6.32257 13.6745 6.39394 13.8016 6.52098C13.9286 6.64802 14 6.82033 14 6.99999C14 7.17965 13.9286 7.35196 13.8016 7.479C13.6745 7.60604 13.5022 7.67741 13.3226 7.67741ZM0.677419 11.7419H13.3226C13.5022 11.7419 13.6745 11.6706 13.8016 11.5435C13.9286 11.4165 14 11.2442 14 11.0645C14 10.8848 13.9286 10.7125 13.8016 10.5855C13.6745 10.4585 13.5022 10.3871 13.3226 10.3871H0.677419C0.497757 10.3871 0.325452 10.4585 0.198411 10.5855C0.0713707 10.7125 0 10.8848 0 11.0645C0 11.2442 0.0713707 11.4165 0.198411 11.5435C0.325452 11.6706 0.497757 11.7419 0.677419 11.7419Z\",\n fill: \"currentColor\"\n }, null, -1)]), 16);\n}\n\nscript.render = render;\n\nexport { script as default };\n//# sourceMappingURL=index.mjs.map\n","import BaseIcon from '@primevue/icons/baseicon';\nimport { openBlock, createElementBlock, mergeProps, createElementVNode } from 'vue';\n\nvar script = {\n name: 'PlusIcon',\n \"extends\": BaseIcon\n};\n\nfunction render(_ctx, _cache, $props, $setup, $data, $options) {\n return openBlock(), createElementBlock(\"svg\", mergeProps({\n width: \"14\",\n height: \"14\",\n viewBox: \"0 0 14 14\",\n fill: \"none\",\n xmlns: \"http://www.w3.org/2000/svg\"\n }, _ctx.pti()), _cache[0] || (_cache[0] = [createElementVNode(\"path\", {\n d: \"M7.67742 6.32258V0.677419C7.67742 0.497757 7.60605 0.325452 7.47901 0.198411C7.35197 0.0713707 7.17966 0 7 0C6.82034 0 6.64803 0.0713707 6.52099 0.198411C6.39395 0.325452 6.32258 0.497757 6.32258 0.677419V6.32258H0.677419C0.497757 6.32258 0.325452 6.39395 0.198411 6.52099C0.0713707 6.64803 0 6.82034 0 7C0 7.17966 0.0713707 7.35197 0.198411 7.47901C0.325452 7.60605 0.497757 7.67742 0.677419 7.67742H6.32258V13.3226C6.32492 13.5015 6.39704 13.6725 6.52358 13.799C6.65012 13.9255 6.82106 13.9977 7 14C7.17966 14 7.35197 13.9286 7.47901 13.8016C7.60605 13.6745 7.67742 13.5022 7.67742 13.3226V7.67742H13.3226C13.5022 7.67742 13.6745 7.60605 13.8016 7.47901C13.9286 7.35197 14 7.17966 14 7C13.9977 6.82106 13.9255 6.65012 13.799 6.52358C13.6725 6.39704 13.5015 6.32492 13.3226 6.32258H7.67742Z\",\n fill: \"currentColor\"\n }, null, -1)]), 16);\n}\n\nscript.render = render;\n\nexport { script as default };\n//# sourceMappingURL=index.mjs.map\n"],"names":["script","BaseIcon","render","createElementVNode"],"mappings":";;;AAGG,IAACA,WAAS;AAAA,EACX,MAAM;AAAA,EACN,WAAWC;AACb;AAEA,SAASC,SAAO,MAAM,QAAQ,QAAQ,QAAQ,OAAO,UAAU;AAC7D,SAAO,UAAW,GAAE,mBAAmB,OAAO,WAAW;AAAA,IACvD,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,MAAM;AAAA,IACN,OAAO;AAAA,EACR,GAAE,KAAK,KAAK,GAAG,OAAO,CAAC,MAAM,OAAO,CAAC,IAAI,CAACC,gBAAmB,QAAQ;AAAA,IACpE,aAAa;AAAA,IACb,aAAa;AAAA,IACb,GAAG;AAAA,IACH,MAAM;AAAA,EACP,GAAE,MAAM,EAAE,CAAC,IAAI,EAAE;AACpB;AAbSD;AAeTF,SAAO,SAASE;ACpBb,IAAC,SAAS;AAAA,EACX,MAAM;AAAA,EACN,WAAWD;AACb;AAEA,SAAS,OAAO,MAAM,QAAQ,QAAQ,QAAQ,OAAO,UAAU;AAC7D,SAAO,UAAW,GAAE,mBAAmB,OAAO,WAAW;AAAA,IACvD,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,MAAM;AAAA,IACN,OAAO;AAAA,EACR,GAAE,KAAK,KAAK,GAAG,OAAO,CAAC,MAAM,OAAO,CAAC,IAAI,CAACE,gBAAmB,QAAQ;AAAA,IACpE,GAAG;AAAA,IACH,MAAM;AAAA,EACP,GAAE,MAAM,EAAE,CAAC,IAAI,EAAE;AACpB;AAXS;AAaT,OAAO,SAAS;","x_google_ignoreList":[0,1]}
|
||||
{"version":3,"file":"index-bCeMLtLM.js","sources":["../../../../../node_modules/@primevue/icons/bars/index.mjs","../../../../../node_modules/@primevue/icons/plus/index.mjs"],"sourcesContent":["import BaseIcon from '@primevue/icons/baseicon';\nimport { openBlock, createElementBlock, mergeProps, createElementVNode } from 'vue';\n\nvar script = {\n name: 'BarsIcon',\n \"extends\": BaseIcon\n};\n\nfunction render(_ctx, _cache, $props, $setup, $data, $options) {\n return openBlock(), createElementBlock(\"svg\", mergeProps({\n width: \"14\",\n height: \"14\",\n viewBox: \"0 0 14 14\",\n fill: \"none\",\n xmlns: \"http://www.w3.org/2000/svg\"\n }, _ctx.pti()), _cache[0] || (_cache[0] = [createElementVNode(\"path\", {\n \"fill-rule\": \"evenodd\",\n \"clip-rule\": \"evenodd\",\n d: \"M13.3226 3.6129H0.677419C0.497757 3.6129 0.325452 3.54152 0.198411 3.41448C0.0713707 3.28744 0 3.11514 0 2.93548C0 2.75581 0.0713707 2.58351 0.198411 2.45647C0.325452 2.32943 0.497757 2.25806 0.677419 2.25806H13.3226C13.5022 2.25806 13.6745 2.32943 13.8016 2.45647C13.9286 2.58351 14 2.75581 14 2.93548C14 3.11514 13.9286 3.28744 13.8016 3.41448C13.6745 3.54152 13.5022 3.6129 13.3226 3.6129ZM13.3226 7.67741H0.677419C0.497757 7.67741 0.325452 7.60604 0.198411 7.479C0.0713707 7.35196 0 7.17965 0 6.99999C0 6.82033 0.0713707 6.64802 0.198411 6.52098C0.325452 6.39394 0.497757 6.32257 0.677419 6.32257H13.3226C13.5022 6.32257 13.6745 6.39394 13.8016 6.52098C13.9286 6.64802 14 6.82033 14 6.99999C14 7.17965 13.9286 7.35196 13.8016 7.479C13.6745 7.60604 13.5022 7.67741 13.3226 7.67741ZM0.677419 11.7419H13.3226C13.5022 11.7419 13.6745 11.6706 13.8016 11.5435C13.9286 11.4165 14 11.2442 14 11.0645C14 10.8848 13.9286 10.7125 13.8016 10.5855C13.6745 10.4585 13.5022 10.3871 13.3226 10.3871H0.677419C0.497757 10.3871 0.325452 10.4585 0.198411 10.5855C0.0713707 10.7125 0 10.8848 0 11.0645C0 11.2442 0.0713707 11.4165 0.198411 11.5435C0.325452 11.6706 0.497757 11.7419 0.677419 11.7419Z\",\n fill: \"currentColor\"\n }, null, -1)]), 16);\n}\n\nscript.render = render;\n\nexport { script as default };\n//# sourceMappingURL=index.mjs.map\n","import BaseIcon from '@primevue/icons/baseicon';\nimport { openBlock, createElementBlock, mergeProps, createElementVNode } from 'vue';\n\nvar script = {\n name: 'PlusIcon',\n \"extends\": BaseIcon\n};\n\nfunction render(_ctx, _cache, $props, $setup, $data, $options) {\n return openBlock(), createElementBlock(\"svg\", mergeProps({\n width: \"14\",\n height: \"14\",\n viewBox: \"0 0 14 14\",\n fill: \"none\",\n xmlns: \"http://www.w3.org/2000/svg\"\n }, _ctx.pti()), _cache[0] || (_cache[0] = [createElementVNode(\"path\", {\n d: \"M7.67742 6.32258V0.677419C7.67742 0.497757 7.60605 0.325452 7.47901 0.198411C7.35197 0.0713707 7.17966 0 7 0C6.82034 0 6.64803 0.0713707 6.52099 0.198411C6.39395 0.325452 6.32258 0.497757 6.32258 0.677419V6.32258H0.677419C0.497757 6.32258 0.325452 6.39395 0.198411 6.52099C0.0713707 6.64803 0 6.82034 0 7C0 7.17966 0.0713707 7.35197 0.198411 7.47901C0.325452 7.60605 0.497757 7.67742 0.677419 7.67742H6.32258V13.3226C6.32492 13.5015 6.39704 13.6725 6.52358 13.799C6.65012 13.9255 6.82106 13.9977 7 14C7.17966 14 7.35197 13.9286 7.47901 13.8016C7.60605 13.6745 7.67742 13.5022 7.67742 13.3226V7.67742H13.3226C13.5022 7.67742 13.6745 7.60605 13.8016 7.47901C13.9286 7.35197 14 7.17966 14 7C13.9977 6.82106 13.9255 6.65012 13.799 6.52358C13.6725 6.39704 13.5015 6.32492 13.3226 6.32258H7.67742Z\",\n fill: \"currentColor\"\n }, null, -1)]), 16);\n}\n\nscript.render = render;\n\nexport { script as default };\n//# sourceMappingURL=index.mjs.map\n"],"names":["script","BaseIcon","render","createElementVNode"],"mappings":";;;AAGG,IAACA,WAAS;AAAA,EACX,MAAM;AAAA,EACN,WAAWC;AACb;AAEA,SAASC,SAAO,MAAM,QAAQ,QAAQ,QAAQ,OAAO,UAAU;AAC7D,SAAO,UAAW,GAAE,mBAAmB,OAAO,WAAW;AAAA,IACvD,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,MAAM;AAAA,IACN,OAAO;AAAA,EACR,GAAE,KAAK,KAAK,GAAG,OAAO,CAAC,MAAM,OAAO,CAAC,IAAI,CAACC,gBAAmB,QAAQ;AAAA,IACpE,aAAa;AAAA,IACb,aAAa;AAAA,IACb,GAAG;AAAA,IACH,MAAM;AAAA,EACP,GAAE,MAAM,EAAE,CAAC,IAAI,EAAE;AACpB;AAbSD;AAeTF,SAAO,SAASE;ACpBb,IAAC,SAAS;AAAA,EACX,MAAM;AAAA,EACN,WAAWD;AACb;AAEA,SAAS,OAAO,MAAM,QAAQ,QAAQ,QAAQ,OAAO,UAAU;AAC7D,SAAO,UAAW,GAAE,mBAAmB,OAAO,WAAW;AAAA,IACvD,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,MAAM;AAAA,IACN,OAAO;AAAA,EACR,GAAE,KAAK,KAAK,GAAG,OAAO,CAAC,MAAM,OAAO,CAAC,IAAI,CAACE,gBAAmB,QAAQ;AAAA,IACpE,GAAG;AAAA,IACH,MAAM;AAAA,EACP,GAAE,MAAM,EAAE,CAAC,IAAI,EAAE;AACpB;AAXS;AAaT,OAAO,SAAS;","x_google_ignoreList":[0,1]}
|
||||
75875
comfy/web/assets/index-BNX_XOqh.js → comfy/web/assets/index-bi78Y1IN.js
generated
vendored
75875
comfy/web/assets/index-BNX_XOqh.js → comfy/web/assets/index-bi78Y1IN.js
generated
vendored
File diff suppressed because one or more lines are too long
1
comfy/web/assets/index-bi78Y1IN.js.map
generated
vendored
Normal file
1
comfy/web/assets/index-bi78Y1IN.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
945
comfy/web/assets/index-B_uZlOM8.js → comfy/web/assets/index-ftUEqmu1.js
generated
vendored
945
comfy/web/assets/index-B_uZlOM8.js → comfy/web/assets/index-ftUEqmu1.js
generated
vendored
File diff suppressed because one or more lines are too long
1
comfy/web/assets/index-ftUEqmu1.js.map
generated
vendored
Normal file
1
comfy/web/assets/index-ftUEqmu1.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
@ -1,6 +1,6 @@
|
||||
var __defProp = Object.defineProperty;
|
||||
var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
|
||||
import { bB as api, bG as $el } from "./index-BNX_XOqh.js";
|
||||
import { aY as api, bX as $el } from "./index-bi78Y1IN.js";
|
||||
function createSpinner() {
|
||||
const div = document.createElement("div");
|
||||
div.innerHTML = `<div class="lds-ring"><div></div><div></div><div></div><div></div></div>`;
|
||||
@ -126,4 +126,4 @@ window.comfyAPI.userSelection.UserSelectionScreen = UserSelectionScreen;
|
||||
export {
|
||||
UserSelectionScreen
|
||||
};
|
||||
//# sourceMappingURL=userSelection-C7IbQlVC.js.map
|
||||
//# sourceMappingURL=userSelection-C-TGdC-2.js.map
|
||||
File diff suppressed because one or more lines are too long
1
comfy/web/assets/widgetInputs-B62b6cfl.js.map
generated
vendored
1
comfy/web/assets/widgetInputs-B62b6cfl.js.map
generated
vendored
File diff suppressed because one or more lines are too long
30
comfy/web/assets/widgetInputs-B62b6cfl.js → comfy/web/assets/widgetInputs-DjCs8Yab.js
generated
vendored
30
comfy/web/assets/widgetInputs-B62b6cfl.js → comfy/web/assets/widgetInputs-DjCs8Yab.js
generated
vendored
@ -1,6 +1,6 @@
|
||||
var __defProp = Object.defineProperty;
|
||||
var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
|
||||
import { e as LGraphNode, c as app, bL as applyTextReplacements, bK as ComfyWidgets, bM as addValueControlWidgets, k as LiteGraph } from "./index-BNX_XOqh.js";
|
||||
import { e as LGraphNode, c as app, c2 as applyTextReplacements, c1 as ComfyWidgets, c4 as addValueControlWidgets, k as LiteGraph } from "./index-bi78Y1IN.js";
|
||||
const CONVERTED_TYPE = "converted-widget";
|
||||
const VALID_TYPES = [
|
||||
"STRING",
|
||||
@ -171,7 +171,7 @@ class PrimitiveNode extends LGraphNode {
|
||||
if (type instanceof Array) {
|
||||
type = "COMBO";
|
||||
}
|
||||
const size = this.size;
|
||||
const [oldWidth, oldHeight] = this.size;
|
||||
let widget;
|
||||
if (type in ComfyWidgets) {
|
||||
widget = (ComfyWidgets[type](this, "value", inputData, app) || {}).widget;
|
||||
@ -218,8 +218,8 @@ class PrimitiveNode extends LGraphNode {
|
||||
return r;
|
||||
};
|
||||
this.size = [
|
||||
Math.max(this.size[0], size[0]),
|
||||
Math.max(this.size[1], size[1])
|
||||
Math.max(this.size[0], oldWidth),
|
||||
Math.max(this.size[1], oldHeight)
|
||||
];
|
||||
if (!recreating) {
|
||||
const sz = this.computeSize();
|
||||
@ -320,7 +320,7 @@ class PrimitiveNode extends LGraphNode {
|
||||
}
|
||||
}
|
||||
function getWidgetConfig(slot) {
|
||||
return slot.widget[CONFIG] ?? slot.widget[GET_CONFIG]();
|
||||
return slot.widget[CONFIG] ?? slot.widget[GET_CONFIG]?.() ?? ["*", {}];
|
||||
}
|
||||
__name(getWidgetConfig, "getWidgetConfig");
|
||||
function getConfig(widgetName) {
|
||||
@ -373,7 +373,7 @@ __name(showWidget, "showWidget");
|
||||
function convertToInput(node, widget, config) {
|
||||
hideWidget(node, widget);
|
||||
const { type } = getWidgetType(config);
|
||||
const sz = node.size;
|
||||
const [oldWidth, oldHeight] = node.size;
|
||||
const inputIsOptional = !!widget.options?.inputIsOptional;
|
||||
const input = node.addInput(widget.name, type, {
|
||||
widget: { name: widget.name, [GET_CONFIG]: () => config },
|
||||
@ -382,18 +382,24 @@ function convertToInput(node, widget, config) {
|
||||
for (const widget2 of node.widgets) {
|
||||
widget2.last_y += LiteGraph.NODE_SLOT_HEIGHT;
|
||||
}
|
||||
node.setSize([Math.max(sz[0], node.size[0]), Math.max(sz[1], node.size[1])]);
|
||||
node.setSize([
|
||||
Math.max(oldWidth, node.size[0]),
|
||||
Math.max(oldHeight, node.size[1])
|
||||
]);
|
||||
return input;
|
||||
}
|
||||
__name(convertToInput, "convertToInput");
|
||||
function convertToWidget(node, widget) {
|
||||
showWidget(widget);
|
||||
const sz = node.size;
|
||||
const [oldWidth, oldHeight] = node.size;
|
||||
node.removeInput(node.inputs.findIndex((i) => i.widget?.name === widget.name));
|
||||
for (const widget2 of node.widgets) {
|
||||
widget2.last_y -= LiteGraph.NODE_SLOT_HEIGHT;
|
||||
}
|
||||
node.setSize([Math.max(sz[0], node.size[0]), Math.max(sz[1], node.size[1])]);
|
||||
node.setSize([
|
||||
Math.max(oldWidth, node.size[0]),
|
||||
Math.max(oldHeight, node.size[1])
|
||||
]);
|
||||
}
|
||||
__name(convertToWidget, "convertToWidget");
|
||||
function getWidgetType(config) {
|
||||
@ -450,7 +456,7 @@ function setWidgetConfig(slot, config, target) {
|
||||
__name(setWidgetConfig, "setWidgetConfig");
|
||||
function mergeIfValid(output, config2, forceUpdate, recreateWidget, config1) {
|
||||
if (!config1) {
|
||||
config1 = output.widget[CONFIG] ?? output.widget[GET_CONFIG]();
|
||||
config1 = getWidgetConfig(output);
|
||||
}
|
||||
if (config1[0] instanceof Array) {
|
||||
if (!isValidCombo(config1[0], config2[0])) return;
|
||||
@ -695,7 +701,7 @@ app.registerExtension({
|
||||
const r = origOnInputDblClick ? origOnInputDblClick.apply(this, arguments) : void 0;
|
||||
const input = this.inputs[slot];
|
||||
if (!input.widget || !input[ignoreDblClick]) {
|
||||
if (!(input.type in ComfyWidgets) && !(input.widget[GET_CONFIG]?.()?.[0] instanceof Array)) {
|
||||
if (!(input.type in ComfyWidgets) && !(input.widget?.[GET_CONFIG]?.()?.[0] instanceof Array)) {
|
||||
return r;
|
||||
}
|
||||
}
|
||||
@ -753,4 +759,4 @@ export {
|
||||
mergeIfValid,
|
||||
setWidgetConfig
|
||||
};
|
||||
//# sourceMappingURL=widgetInputs-B62b6cfl.js.map
|
||||
//# sourceMappingURL=widgetInputs-DjCs8Yab.js.map
|
||||
1
comfy/web/assets/widgetInputs-DjCs8Yab.js.map
generated
vendored
Normal file
1
comfy/web/assets/widgetInputs-DjCs8Yab.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
BIN
comfy/web/cursor/colorSelect.png
vendored
Normal file
BIN
comfy/web/cursor/colorSelect.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 373 B |
BIN
comfy/web/cursor/paintBucket.png
vendored
Normal file
BIN
comfy/web/cursor/paintBucket.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 410 B |
2
comfy/web/extensions/core/maskEditorOld.js
vendored
Normal file
2
comfy/web/extensions/core/maskEditorOld.js
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
// Shim for extensions\core\maskEditorOld.ts
|
||||
export const MaskEditorDialogOld = window.comfyAPI.maskEditorOld.MaskEditorDialogOld;
|
||||
3
comfy/web/extensions/core/vintageClipboard.js
vendored
Normal file
3
comfy/web/extensions/core/vintageClipboard.js
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
// Shim for extensions/core/vintageClipboard.ts
|
||||
export const serialise = window.comfyAPI.vintageClipboard.serialise;
|
||||
export const deserialiseAndCreate = window.comfyAPI.vintageClipboard.deserialiseAndCreate;
|
||||
81
comfy/web/index.html
vendored
81
comfy/web/index.html
vendored
@ -1,42 +1,43 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>ComfyUI</title>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=no">
|
||||
<link rel="stylesheet" type="text/css" href="user.css" />
|
||||
<link rel="stylesheet" type="text/css" href="materialdesignicons.min.css" />
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>ComfyUI</title>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=no">
|
||||
<link rel="stylesheet" type="text/css" href="user.css" />
|
||||
<link rel="stylesheet" type="text/css" href="materialdesignicons.min.css" />
|
||||
<script type="module" crossorigin src="./assets/index-BNX_XOqh.js"></script>
|
||||
<link rel="stylesheet" crossorigin href="./assets/index-HT1vecxT.css">
|
||||
</head>
|
||||
<body class="litegraph grid">
|
||||
<div id="vue-app"></div>
|
||||
<div id="comfy-user-selection" class="comfy-user-selection" style="display: none;">
|
||||
<main class="comfy-user-selection-inner">
|
||||
<h1>ComfyUI</h1>
|
||||
<form>
|
||||
<section>
|
||||
<label>New user:
|
||||
<input placeholder="Enter a username" />
|
||||
</label>
|
||||
</section>
|
||||
<div class="comfy-user-existing">
|
||||
<span class="or-separator">OR</span>
|
||||
<section>
|
||||
<label>
|
||||
Existing user:
|
||||
<select>
|
||||
<option hidden disabled selected value> Select a user </option>
|
||||
</select>
|
||||
</label>
|
||||
</section>
|
||||
</div>
|
||||
<footer>
|
||||
<span class="comfy-user-error"> </span>
|
||||
<button class="comfy-btn comfy-user-button-next">Next</button>
|
||||
</footer>
|
||||
</form>
|
||||
</main>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
</head>
|
||||
<body class="litegraph grid">
|
||||
<div id="vue-app"></div>
|
||||
<div id="comfy-user-selection" class="comfy-user-selection" style="display: none;">
|
||||
<main class="comfy-user-selection-inner">
|
||||
<h1>ComfyUI</h1>
|
||||
<form>
|
||||
<section>
|
||||
<label>New user:
|
||||
<input placeholder="Enter a username" />
|
||||
</label>
|
||||
</section>
|
||||
<div class="comfy-user-existing">
|
||||
<span class="or-separator">OR</span>
|
||||
<section>
|
||||
<label>
|
||||
Existing user:
|
||||
<select>
|
||||
<option hidden disabled selected value> Select a user </option>
|
||||
</select>
|
||||
</label>
|
||||
</section>
|
||||
</div>
|
||||
<footer>
|
||||
<span class="comfy-user-error"> </span>
|
||||
<button class="comfy-btn comfy-user-button-next">Next</button>
|
||||
</footer>
|
||||
</form>
|
||||
</main>
|
||||
</div>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
|
||||
1
comfy/web/scripts/changeTracker.js
vendored
1
comfy/web/scripts/changeTracker.js
vendored
@ -1,3 +1,2 @@
|
||||
// Shim for scripts\changeTracker.ts
|
||||
export const ChangeTracker = window.comfyAPI.changeTracker.ChangeTracker;
|
||||
export const globalTracker = window.comfyAPI.changeTracker.globalTracker;
|
||||
|
||||
2
comfy/web/scripts/defaultGraph.js
vendored
2
comfy/web/scripts/defaultGraph.js
vendored
@ -1,2 +1,4 @@
|
||||
// Shim for scripts\defaultGraph.ts
|
||||
export const defaultGraph = window.comfyAPI.defaultGraph.defaultGraph;
|
||||
export const defaultGraphJSON = window.comfyAPI.defaultGraph.defaultGraphJSON;
|
||||
export const blankGraph = window.comfyAPI.defaultGraph.blankGraph;
|
||||
|
||||
2
comfy/web/scripts/domWidget.js
vendored
2
comfy/web/scripts/domWidget.js
vendored
@ -1,2 +0,0 @@
|
||||
// Shim for scripts\domWidget.ts
|
||||
export const addDomClippingSetting = window.comfyAPI.domWidget.addDomClippingSetting;
|
||||
3
comfy/web/scripts/workflows.js
vendored
3
comfy/web/scripts/workflows.js
vendored
@ -1,3 +0,0 @@
|
||||
// Shim for scripts\workflows.ts
|
||||
export const ComfyWorkflowManager = window.comfyAPI.workflows.ComfyWorkflowManager;
|
||||
export const ComfyWorkflow = window.comfyAPI.workflows.ComfyWorkflow;
|
||||
@ -5,9 +5,6 @@ from comfy.nodes.common import MAX_RESOLUTION
|
||||
|
||||
|
||||
class EmptyMochiLatentVideo:
|
||||
def __init__(self):
|
||||
self.device = comfy.model_management.intermediate_device()
|
||||
|
||||
@classmethod
|
||||
def INPUT_TYPES(s):
|
||||
return {"required": {"width": ("INT", {"default": 848, "min": 16, "max": MAX_RESOLUTION, "step": 16}),
|
||||
@ -18,10 +15,10 @@ class EmptyMochiLatentVideo:
|
||||
RETURN_TYPES = ("LATENT",)
|
||||
FUNCTION = "generate"
|
||||
|
||||
CATEGORY = "latent/mochi"
|
||||
CATEGORY = "latent/video"
|
||||
|
||||
def generate(self, width, height, length, batch_size=1):
|
||||
latent = torch.zeros([batch_size, 12, ((length - 1) // 6) + 1, height // 8, width // 8], device=self.device)
|
||||
latent = torch.zeros([batch_size, 12, ((length - 1) // 6) + 1, height // 8, width // 8], device=comfy.model_management.intermediate_device())
|
||||
return ({"samples": latent},)
|
||||
|
||||
|
||||
|
||||
@ -30,8 +30,8 @@ class X0(comfy.model_sampling.EPS):
|
||||
class ModelSamplingDiscreteDistilled(comfy.model_sampling.ModelSamplingDiscrete):
|
||||
original_timesteps = 50
|
||||
|
||||
def __init__(self, model_config=None):
|
||||
super().__init__(model_config)
|
||||
def __init__(self, model_config=None, zsnr=None):
|
||||
super().__init__(model_config, zsnr=zsnr)
|
||||
|
||||
self.skip_steps = self.num_timesteps // self.original_timesteps
|
||||
|
||||
@ -55,25 +55,6 @@ class ModelSamplingDiscreteDistilled(comfy.model_sampling.ModelSamplingDiscrete)
|
||||
return log_sigma.exp().to(timestep.device)
|
||||
|
||||
|
||||
def rescale_zero_terminal_snr_sigmas(sigmas):
|
||||
alphas_cumprod = 1 / ((sigmas * sigmas) + 1)
|
||||
alphas_bar_sqrt = alphas_cumprod.sqrt()
|
||||
|
||||
# Store old values.
|
||||
alphas_bar_sqrt_0 = alphas_bar_sqrt[0].clone()
|
||||
alphas_bar_sqrt_T = alphas_bar_sqrt[-1].clone()
|
||||
|
||||
# Shift so the last timestep is zero.
|
||||
alphas_bar_sqrt -= (alphas_bar_sqrt_T)
|
||||
|
||||
# Scale so the first timestep is back to the old value.
|
||||
alphas_bar_sqrt *= alphas_bar_sqrt_0 / (alphas_bar_sqrt_0 - alphas_bar_sqrt_T)
|
||||
|
||||
# Convert alphas_bar_sqrt to betas
|
||||
alphas_bar = alphas_bar_sqrt**2 # Revert sqrt
|
||||
alphas_bar[-1] = 4.8973451890853435e-08
|
||||
return ((1 - alphas_bar) / alphas_bar) ** 0.5
|
||||
|
||||
class ModelSamplingDiscrete:
|
||||
@classmethod
|
||||
def INPUT_TYPES(s):
|
||||
@ -105,9 +86,7 @@ class ModelSamplingDiscrete:
|
||||
class ModelSamplingAdvanced(sampling_base, sampling_type):
|
||||
pass
|
||||
|
||||
model_sampling = ModelSamplingAdvanced(model.model.model_config)
|
||||
if zsnr:
|
||||
model_sampling.set_sigmas(rescale_zero_terminal_snr_sigmas(model_sampling.sigmas))
|
||||
model_sampling = ModelSamplingAdvanced(model.model.model_config, zsnr=zsnr)
|
||||
|
||||
m.add_object_patch("model_sampling", model_sampling)
|
||||
return (m, )
|
||||
|
||||
@ -4,10 +4,11 @@ from .nodes_model_merging import ModelMergeBlocks
|
||||
|
||||
class ModelMergeSD1(nodes_model_merging.ModelMergeBlocks):
|
||||
CATEGORY = "advanced/model_merging/model_specific"
|
||||
|
||||
@classmethod
|
||||
def INPUT_TYPES(s):
|
||||
arg_dict = { "model1": ("MODEL",),
|
||||
"model2": ("MODEL",)}
|
||||
arg_dict = {"model1": ("MODEL",),
|
||||
"model2": ("MODEL",)}
|
||||
|
||||
argument = ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01})
|
||||
|
||||
@ -33,8 +34,8 @@ class ModelMergeSDXL(nodes_model_merging.ModelMergeBlocks):
|
||||
|
||||
@classmethod
|
||||
def INPUT_TYPES(s):
|
||||
arg_dict = { "model1": ("MODEL",),
|
||||
"model2": ("MODEL",)}
|
||||
arg_dict = {"model1": ("MODEL",),
|
||||
"model2": ("MODEL",)}
|
||||
|
||||
argument = ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01})
|
||||
|
||||
@ -54,13 +55,14 @@ class ModelMergeSDXL(nodes_model_merging.ModelMergeBlocks):
|
||||
|
||||
return {"required": arg_dict}
|
||||
|
||||
|
||||
class ModelMergeSD3_2B(nodes_model_merging.ModelMergeBlocks):
|
||||
CATEGORY = "advanced/model_merging/model_specific"
|
||||
|
||||
@classmethod
|
||||
def INPUT_TYPES(s):
|
||||
arg_dict = { "model1": ("MODEL",),
|
||||
"model2": ("MODEL",)}
|
||||
arg_dict = {"model1": ("MODEL",),
|
||||
"model2": ("MODEL",)}
|
||||
|
||||
argument = ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01})
|
||||
|
||||
@ -77,13 +79,42 @@ class ModelMergeSD3_2B(nodes_model_merging.ModelMergeBlocks):
|
||||
|
||||
return {"required": arg_dict}
|
||||
|
||||
|
||||
class ModelMergeAuraflow(comfy_extras.nodes_model_merging.ModelMergeBlocks):
|
||||
CATEGORY = "advanced/model_merging/model_specific"
|
||||
|
||||
@classmethod
|
||||
def INPUT_TYPES(s):
|
||||
arg_dict = {"model1": ("MODEL",),
|
||||
"model2": ("MODEL",)}
|
||||
|
||||
argument = ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01})
|
||||
|
||||
arg_dict["init_x_linear."] = argument
|
||||
arg_dict["positional_encoding"] = argument
|
||||
arg_dict["cond_seq_linear."] = argument
|
||||
arg_dict["register_tokens"] = argument
|
||||
arg_dict["t_embedder."] = argument
|
||||
|
||||
for i in range(4):
|
||||
arg_dict["double_layers.{}.".format(i)] = argument
|
||||
|
||||
for i in range(32):
|
||||
arg_dict["single_layers.{}.".format(i)] = argument
|
||||
|
||||
arg_dict["modF."] = argument
|
||||
arg_dict["final_linear."] = argument
|
||||
|
||||
return {"required": arg_dict}
|
||||
|
||||
|
||||
class ModelMergeFlux1(ModelMergeBlocks):
|
||||
CATEGORY = "advanced/model_merging/model_specific"
|
||||
|
||||
@classmethod
|
||||
def INPUT_TYPES(s):
|
||||
arg_dict = { "model1": ("MODEL",),
|
||||
"model2": ("MODEL",)}
|
||||
arg_dict = {"model1": ("MODEL",),
|
||||
"model2": ("MODEL",)}
|
||||
|
||||
argument = ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01})
|
||||
|
||||
@ -103,13 +134,14 @@ class ModelMergeFlux1(ModelMergeBlocks):
|
||||
|
||||
return {"required": arg_dict}
|
||||
|
||||
|
||||
class ModelMergeSD35_Large(ModelMergeBlocks):
|
||||
CATEGORY = "advanced/model_merging/model_specific"
|
||||
|
||||
@classmethod
|
||||
def INPUT_TYPES(s):
|
||||
arg_dict = { "model1": ("MODEL",),
|
||||
"model2": ("MODEL",)}
|
||||
arg_dict = {"model1": ("MODEL",),
|
||||
"model2": ("MODEL",)}
|
||||
|
||||
argument = ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01})
|
||||
|
||||
@ -126,11 +158,37 @@ class ModelMergeSD35_Large(ModelMergeBlocks):
|
||||
|
||||
return {"required": arg_dict}
|
||||
|
||||
|
||||
class ModelMergeMochiPreview(nodes_model_merging.ModelMergeBlocks):
|
||||
CATEGORY = "advanced/model_merging/model_specific"
|
||||
|
||||
@classmethod
|
||||
def INPUT_TYPES(s):
|
||||
arg_dict = {"model1": ("MODEL",),
|
||||
"model2": ("MODEL",)}
|
||||
|
||||
argument = ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01})
|
||||
|
||||
arg_dict["pos_frequencies."] = argument
|
||||
arg_dict["t_embedder."] = argument
|
||||
arg_dict["t5_y_embedder."] = argument
|
||||
arg_dict["t5_yproj."] = argument
|
||||
|
||||
for i in range(48):
|
||||
arg_dict["blocks.{}.".format(i)] = argument
|
||||
|
||||
arg_dict["final_layer."] = argument
|
||||
|
||||
return {"required": arg_dict}
|
||||
|
||||
|
||||
NODE_CLASS_MAPPINGS = {
|
||||
"ModelMergeSD1": ModelMergeSD1,
|
||||
"ModelMergeSD2": ModelMergeSD1, #SD1 and SD2 have the same blocks
|
||||
"ModelMergeSD2": ModelMergeSD1, # SD1 and SD2 have the same blocks
|
||||
"ModelMergeSDXL": ModelMergeSDXL,
|
||||
"ModelMergeSD3_2B": ModelMergeSD3_2B,
|
||||
"ModelMergeAuraflow": ModelMergeAuraflow,
|
||||
"ModelMergeFlux1": ModelMergeFlux1,
|
||||
"ModelMergeSD35_Large": ModelMergeSD35_Large,
|
||||
"ModelMergeMochiPreview": ModelMergeMochiPreview,
|
||||
}
|
||||
|
||||
@ -60,12 +60,24 @@ def create_blur_map(x0, attn, sigma=3.0, threshold=1.0):
|
||||
attn = attn.reshape(b, -1, hw1, hw2)
|
||||
# Global Average Pool
|
||||
mask = attn.mean(1, keepdim=False).sum(1, keepdim=False) > threshold
|
||||
ratio = 2 ** (math.ceil(math.sqrt(lh * lw / hw1)) - 1).bit_length()
|
||||
mid_shape = [math.ceil(lh / ratio), math.ceil(lw / ratio)]
|
||||
|
||||
total = mask.shape[-1]
|
||||
x = round(math.sqrt((lh / lw) * total))
|
||||
xx = None
|
||||
for i in range(0, math.floor(math.sqrt(total) / 2)):
|
||||
for j in [(x + i), max(1, x - i)]:
|
||||
if total % j == 0:
|
||||
xx = j
|
||||
break
|
||||
if xx is not None:
|
||||
break
|
||||
|
||||
x = xx
|
||||
y = total // x
|
||||
|
||||
# Reshape
|
||||
mask = (
|
||||
mask.reshape(b, *mid_shape)
|
||||
mask.reshape(b, x, y)
|
||||
.unsqueeze(1)
|
||||
.type(attn.dtype)
|
||||
)
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
import re
|
||||
|
||||
import torch
|
||||
|
||||
import comfy.model_management
|
||||
@ -9,6 +7,7 @@ import comfy.sd
|
||||
from comfy.cmd import folder_paths
|
||||
from comfy.model_downloader import get_or_download, get_filename_list_with_downloadable, KNOWN_CLIP_MODELS
|
||||
from comfy.nodes import base_nodes as nodes
|
||||
from . import nodes_slg
|
||||
|
||||
|
||||
class TripleCLIPLoader:
|
||||
@ -23,6 +22,8 @@ class TripleCLIPLoader:
|
||||
|
||||
CATEGORY = "advanced/loaders"
|
||||
|
||||
DESCRIPTION = "[Recipes]\n\nsd3: clip-l, clip-g, t5"
|
||||
|
||||
def load_clip(self, clip_name1, clip_name2, clip_name3):
|
||||
clip_path1 = get_or_download("text_encoders", clip_name1, KNOWN_CLIP_MODELS)
|
||||
clip_path2 = get_or_download("text_encoders", clip_name2, KNOWN_CLIP_MODELS)
|
||||
@ -110,7 +111,7 @@ class ControlNetApplySD3(nodes.ControlNetApplyAdvanced):
|
||||
DEPRECATED = True
|
||||
|
||||
|
||||
class SkipLayerGuidanceSD3:
|
||||
class SkipLayerGuidanceSD3(nodes_slg.SkipLayerGuidanceDiT):
|
||||
'''
|
||||
Enhance guidance towards detailed dtructure by having another set of CFG negative with skipped layers.
|
||||
Inspired by Perturbed Attention Guidance (https://arxiv.org/abs/2403.17377)
|
||||
@ -127,47 +128,12 @@ class SkipLayerGuidanceSD3:
|
||||
}}
|
||||
|
||||
RETURN_TYPES = ("MODEL",)
|
||||
FUNCTION = "skip_guidance"
|
||||
FUNCTION = "skip_guidance_sd3"
|
||||
|
||||
CATEGORY = "advanced/guidance"
|
||||
|
||||
def skip_guidance(self, model, layers, scale, start_percent, end_percent):
|
||||
if layers == "" or layers == None:
|
||||
return (model,)
|
||||
|
||||
# check if layer is comma separated integers
|
||||
def skip(args, extra_args):
|
||||
return args
|
||||
|
||||
model_sampling = model.get_model_object("model_sampling")
|
||||
sigma_start = model_sampling.percent_to_sigma(start_percent)
|
||||
sigma_end = model_sampling.percent_to_sigma(end_percent)
|
||||
|
||||
def post_cfg_function(args):
|
||||
model = args["model"]
|
||||
cond_pred = args["cond_denoised"]
|
||||
cond = args["cond"]
|
||||
cfg_result = args["denoised"]
|
||||
sigma = args["sigma"]
|
||||
x = args["input"]
|
||||
model_options = args["model_options"].copy()
|
||||
|
||||
for layer in layers:
|
||||
model_options = comfy.model_patcher.set_model_options_patch_replace(model_options, skip, "dit", "double_block", layer)
|
||||
model_sampling.percent_to_sigma(start_percent)
|
||||
|
||||
sigma_ = sigma[0].item()
|
||||
if scale > 0 and sigma_ >= sigma_end and sigma_ <= sigma_start:
|
||||
(slg,) = comfy.samplers.calc_cond_batch(model, [cond], x, sigma, model_options)
|
||||
cfg_result = cfg_result + (cond_pred - slg) * scale
|
||||
return cfg_result
|
||||
|
||||
layers = re.findall(r'\d+', layers)
|
||||
layers = [int(i) for i in layers]
|
||||
m = model.clone()
|
||||
m.set_model_sampler_post_cfg_function(post_cfg_function)
|
||||
|
||||
return (m,)
|
||||
def skip_guidance_sd3(self, model, layers, scale, start_percent, end_percent):
|
||||
return self.skip_guidance(model=model, scale=scale, start_percent=start_percent, end_percent=end_percent, double_layers=layers)
|
||||
|
||||
|
||||
NODE_CLASS_MAPPINGS = {
|
||||
|
||||
78
comfy_extras/nodes_slg.py
Normal file
78
comfy_extras/nodes_slg.py
Normal file
@ -0,0 +1,78 @@
|
||||
import comfy.model_patcher
|
||||
import comfy.samplers
|
||||
import re
|
||||
|
||||
|
||||
class SkipLayerGuidanceDiT:
|
||||
'''
|
||||
Enhance guidance towards detailed dtructure by having another set of CFG negative with skipped layers.
|
||||
Inspired by Perturbed Attention Guidance (https://arxiv.org/abs/2403.17377)
|
||||
Original experimental implementation for SD3 by Dango233@StabilityAI.
|
||||
'''
|
||||
@classmethod
|
||||
def INPUT_TYPES(s):
|
||||
return {"required": {"model": ("MODEL", ),
|
||||
"double_layers": ("STRING", {"default": "7, 8, 9", "multiline": False}),
|
||||
"single_layers": ("STRING", {"default": "7, 8, 9", "multiline": False}),
|
||||
"scale": ("FLOAT", {"default": 3.0, "min": 0.0, "max": 10.0, "step": 0.1}),
|
||||
"start_percent": ("FLOAT", {"default": 0.01, "min": 0.0, "max": 1.0, "step": 0.001}),
|
||||
"end_percent": ("FLOAT", {"default": 0.15, "min": 0.0, "max": 1.0, "step": 0.001})
|
||||
}}
|
||||
RETURN_TYPES = ("MODEL",)
|
||||
FUNCTION = "skip_guidance"
|
||||
EXPERIMENTAL = True
|
||||
|
||||
DESCRIPTION = "Generic version of SkipLayerGuidance node that can be used on every DiT model."
|
||||
|
||||
CATEGORY = "advanced/guidance"
|
||||
|
||||
def skip_guidance(self, model, scale, start_percent, end_percent, double_layers="", single_layers=""):
|
||||
# check if layer is comma separated integers
|
||||
def skip(args, extra_args):
|
||||
return args
|
||||
|
||||
model_sampling = model.get_model_object("model_sampling")
|
||||
sigma_start = model_sampling.percent_to_sigma(start_percent)
|
||||
sigma_end = model_sampling.percent_to_sigma(end_percent)
|
||||
|
||||
double_layers = re.findall(r'\d+', double_layers)
|
||||
double_layers = [int(i) for i in double_layers]
|
||||
|
||||
single_layers = re.findall(r'\d+', single_layers)
|
||||
single_layers = [int(i) for i in single_layers]
|
||||
|
||||
if len(double_layers) == 0 and len(single_layers) == 0:
|
||||
return (model, )
|
||||
|
||||
def post_cfg_function(args):
|
||||
model = args["model"]
|
||||
cond_pred = args["cond_denoised"]
|
||||
cond = args["cond"]
|
||||
cfg_result = args["denoised"]
|
||||
sigma = args["sigma"]
|
||||
x = args["input"]
|
||||
model_options = args["model_options"].copy()
|
||||
|
||||
for layer in double_layers:
|
||||
model_options = comfy.model_patcher.set_model_options_patch_replace(model_options, skip, "dit", "double_block", layer)
|
||||
|
||||
for layer in single_layers:
|
||||
model_options = comfy.model_patcher.set_model_options_patch_replace(model_options, skip, "dit", "single_block", layer)
|
||||
|
||||
model_sampling.percent_to_sigma(start_percent)
|
||||
|
||||
sigma_ = sigma[0].item()
|
||||
if scale > 0 and sigma_ >= sigma_end and sigma_ <= sigma_start:
|
||||
(slg,) = comfy.samplers.calc_cond_batch(model, [cond], x, sigma, model_options)
|
||||
cfg_result = cfg_result + (cond_pred - slg) * scale
|
||||
return cfg_result
|
||||
|
||||
m = model.clone()
|
||||
m.set_model_sampler_post_cfg_function(post_cfg_function)
|
||||
|
||||
return (m, )
|
||||
|
||||
|
||||
NODE_CLASS_MAPPINGS = {
|
||||
"SkipLayerGuidanceDiT": SkipLayerGuidanceDiT,
|
||||
}
|
||||
@ -8,7 +8,7 @@ from comfy.cmd.folder_paths import models_dir, user_directory, output_directory
|
||||
|
||||
@pytest.fixture
|
||||
def internal_routes():
|
||||
return InternalRoutes()
|
||||
return InternalRoutes(None)
|
||||
|
||||
@pytest.fixture
|
||||
def aiohttp_client_factory(aiohttp_client, internal_routes):
|
||||
@ -102,7 +102,7 @@ async def test_file_service_initialization():
|
||||
# Create a mock instance
|
||||
mock_file_service_instance = MagicMock(spec=FileService)
|
||||
MockFileService.return_value = mock_file_service_instance
|
||||
internal_routes = InternalRoutes()
|
||||
internal_routes = InternalRoutes(None)
|
||||
|
||||
# Check if FileService was initialized with the correct parameters
|
||||
MockFileService.assert_called_once_with({
|
||||
@ -112,4 +112,4 @@ async def test_file_service_initialization():
|
||||
})
|
||||
|
||||
# Verify that the file_service attribute of InternalRoutes is set
|
||||
assert internal_routes.file_service == mock_file_service_instance
|
||||
assert internal_routes.file_service == mock_file_service_instance
|
||||
|
||||
@ -16,7 +16,7 @@ def user_manager(tmp_path):
|
||||
um = UserManager()
|
||||
um.get_request_user_filepath = lambda req, file, **kwargs: os.path.join(
|
||||
tmp_path, file
|
||||
)
|
||||
) if file else tmp_path
|
||||
return um
|
||||
|
||||
|
||||
@ -82,9 +82,7 @@ async def test_listuserdata_split_path(aiohttp_client, app, tmp_path):
|
||||
client = await aiohttp_client(app)
|
||||
resp = await client.get("/userdata?dir=test_dir&recurse=true&split=true")
|
||||
assert resp.status == 200
|
||||
assert await resp.json() == [
|
||||
["subdir/file1.txt", "subdir", "file1.txt"]
|
||||
]
|
||||
assert await resp.json() == [["subdir/file1.txt", "subdir", "file1.txt"]]
|
||||
|
||||
|
||||
async def test_listuserdata_invalid_directory(aiohttp_client, app):
|
||||
@ -120,3 +118,116 @@ async def test_listuserdata_normalized_separator(aiohttp_client, app, tmp_path):
|
||||
assert "/" in result[0]["path"] # Ensure forward slash is used
|
||||
assert "\\" not in result[0]["path"] # Ensure backslash is not present
|
||||
assert result[0]["path"] == "subdir/file1.txt"
|
||||
|
||||
|
||||
async def test_post_userdata_new_file(aiohttp_client, app, tmp_path):
|
||||
client = await aiohttp_client(app)
|
||||
content = b"test content"
|
||||
resp = await client.post("/userdata/test.txt", data=content)
|
||||
|
||||
assert resp.status == 200
|
||||
assert await resp.text() == '"test.txt"'
|
||||
|
||||
# Verify file was created with correct content
|
||||
with open(tmp_path / "test.txt", "rb") as f:
|
||||
assert f.read() == content
|
||||
|
||||
|
||||
async def test_post_userdata_overwrite_existing(aiohttp_client, app, tmp_path):
|
||||
# Create initial file
|
||||
with open(tmp_path / "test.txt", "w") as f:
|
||||
f.write("initial content")
|
||||
|
||||
client = await aiohttp_client(app)
|
||||
new_content = b"updated content"
|
||||
resp = await client.post("/userdata/test.txt", data=new_content)
|
||||
|
||||
assert resp.status == 200
|
||||
assert await resp.text() == '"test.txt"'
|
||||
|
||||
# Verify file was overwritten
|
||||
with open(tmp_path / "test.txt", "rb") as f:
|
||||
assert f.read() == new_content
|
||||
|
||||
|
||||
async def test_post_userdata_no_overwrite(aiohttp_client, app, tmp_path):
|
||||
# Create initial file
|
||||
with open(tmp_path / "test.txt", "w") as f:
|
||||
f.write("initial content")
|
||||
|
||||
client = await aiohttp_client(app)
|
||||
resp = await client.post("/userdata/test.txt?overwrite=false", data=b"new content")
|
||||
|
||||
assert resp.status == 409
|
||||
|
||||
# Verify original content unchanged
|
||||
with open(tmp_path / "test.txt", "r") as f:
|
||||
assert f.read() == "initial content"
|
||||
|
||||
|
||||
async def test_post_userdata_full_info(aiohttp_client, app, tmp_path):
|
||||
client = await aiohttp_client(app)
|
||||
content = b"test content"
|
||||
resp = await client.post("/userdata/test.txt?full_info=true", data=content)
|
||||
|
||||
assert resp.status == 200
|
||||
result = await resp.json()
|
||||
assert result["path"] == "test.txt"
|
||||
assert result["size"] == len(content)
|
||||
assert "modified" in result
|
||||
|
||||
|
||||
async def test_move_userdata(aiohttp_client, app, tmp_path):
|
||||
# Create initial file
|
||||
with open(tmp_path / "source.txt", "w") as f:
|
||||
f.write("test content")
|
||||
|
||||
client = await aiohttp_client(app)
|
||||
resp = await client.post("/userdata/source.txt/move/dest.txt")
|
||||
|
||||
assert resp.status == 200
|
||||
assert await resp.text() == '"dest.txt"'
|
||||
|
||||
# Verify file was moved
|
||||
assert not os.path.exists(tmp_path / "source.txt")
|
||||
with open(tmp_path / "dest.txt", "r") as f:
|
||||
assert f.read() == "test content"
|
||||
|
||||
|
||||
async def test_move_userdata_no_overwrite(aiohttp_client, app, tmp_path):
|
||||
# Create source and destination files
|
||||
with open(tmp_path / "source.txt", "w") as f:
|
||||
f.write("source content")
|
||||
with open(tmp_path / "dest.txt", "w") as f:
|
||||
f.write("destination content")
|
||||
|
||||
client = await aiohttp_client(app)
|
||||
resp = await client.post("/userdata/source.txt/move/dest.txt?overwrite=false")
|
||||
|
||||
assert resp.status == 409
|
||||
|
||||
# Verify files remain unchanged
|
||||
with open(tmp_path / "source.txt", "r") as f:
|
||||
assert f.read() == "source content"
|
||||
with open(tmp_path / "dest.txt", "r") as f:
|
||||
assert f.read() == "destination content"
|
||||
|
||||
|
||||
async def test_move_userdata_full_info(aiohttp_client, app, tmp_path):
|
||||
# Create initial file
|
||||
with open(tmp_path / "source.txt", "w") as f:
|
||||
f.write("test content")
|
||||
|
||||
client = await aiohttp_client(app)
|
||||
resp = await client.post("/userdata/source.txt/move/dest.txt?full_info=true")
|
||||
|
||||
assert resp.status == 200
|
||||
result = await resp.json()
|
||||
assert result["path"] == "dest.txt"
|
||||
assert result["size"] == len("test content")
|
||||
assert "modified" in result
|
||||
|
||||
# Verify file was moved
|
||||
assert not os.path.exists(tmp_path / "source.txt")
|
||||
with open(tmp_path / "dest.txt", "r") as f:
|
||||
assert f.read() == "test content"
|
||||
|
||||
Loading…
Reference in New Issue
Block a user