diff --git a/README.md b/README.md index f62d4289a..b055325ed 100644 --- a/README.md +++ b/README.md @@ -126,10 +126,10 @@ After this you should have everything installed and can proceed to running Comfy You can install ComfyUI in Apple Mac silicon (M1 or M2) with any recent macOS version. -1. Install pytorch. For instructions, read the [Accelerated PyTorch training on Mac](https://developer.apple.com/metal/pytorch/) Apple Developer guide. +1. Install pytorch nightly. For instructions, read the [Accelerated PyTorch training on Mac](https://developer.apple.com/metal/pytorch/) Apple Developer guide (make sure to install the latest pytorch nightly). 1. Follow the [ComfyUI manual installation](#manual-install-windows-linux) instructions for Windows and Linux. 1. Install the ComfyUI [dependencies](#dependencies). If you have another Stable Diffusion UI [you might be able to reuse the dependencies](#i-already-have-another-ui-for-stable-diffusion-installed-do-i-really-have-to-install-all-of-these-dependencies). -1. Launch ComfyUI by running `python main.py`. +1. Launch ComfyUI by running `python main.py --force-fp16`. Note that --force-fp16 will only work if you installed the latest pytorch nightly. > **Note**: Remember to add your models, VAE, LoRAs etc. to the corresponding Comfy folders, as discussed in [ComfyUI manual installation](#manual-install-windows-linux). diff --git a/comfy/model_management.py b/comfy/model_management.py index 75f3b38a9..0ffca06da 100644 --- a/comfy/model_management.py +++ b/comfy/model_management.py @@ -364,7 +364,8 @@ def text_encoder_device(): if args.gpu_only: return get_torch_device() elif vram_state == VRAMState.HIGH_VRAM or vram_state == VRAMState.NORMAL_VRAM: - if torch.get_num_threads() < 4: #leaving the text encoder on the CPU is faster than shifting it if the CPU is fast enough. + #NOTE: on a Ryzen 5 7600X with 4080 it's faster to shift to GPU + if torch.get_num_threads() < 8: #leaving the text encoder on the CPU is faster than shifting it if the CPU is fast enough. return get_torch_device() else: return torch.device("cpu") diff --git a/main.py b/main.py index 2674eba15..47ba136e1 100644 --- a/main.py +++ b/main.py @@ -163,6 +163,8 @@ if __name__ == "__main__": if args.auto_launch: def startup_server(address, port): import webbrowser + if os.name == 'nt' and address == '0.0.0.0': + address = '127.0.0.1' webbrowser.open(f"http://{address}:{port}") call_on_start = startup_server diff --git a/nodes.py b/nodes.py index 2a5cbb05f..61a6a98ba 100644 --- a/nodes.py +++ b/nodes.py @@ -362,6 +362,14 @@ class SaveLatent: metadata[x] = json.dumps(extra_pnginfo[x]) file = f"{filename}_{counter:05}_.latent" + + results = list() + results.append({ + "filename": file, + "subfolder": subfolder, + "type": "output" + }) + file = os.path.join(full_output_folder, file) output = {} @@ -369,7 +377,7 @@ class SaveLatent: output["latent_format_version_0"] = torch.tensor([]) comfy.utils.save_torch_file(output, file, metadata=metadata) - return {} + return { "ui": { "latents": results } } class LoadLatent: diff --git a/web/scripts/widgets.js b/web/scripts/widgets.js index a8afc29b0..c128caa5a 100644 --- a/web/scripts/widgets.js +++ b/web/scripts/widgets.js @@ -250,19 +250,25 @@ function addMultilineWidget(node, name, opts, app) { return { minWidth: 400, minHeight: 200, widget }; } +function isSlider(display) { + return (display==="slider") ? "slider" : "number" +} + export const ComfyWidgets = { "INT:seed": seedWidget, "INT:noise_seed": seedWidget, FLOAT(node, inputName, inputData) { + let widgetType = isSlider(inputData[1]["display"]); const { val, config } = getNumberDefaults(inputData, 0.5); - return { widget: node.addWidget("number", inputName, val, () => {}, config) }; + return { widget: node.addWidget(widgetType, inputName, val, () => {}, config) }; }, INT(node, inputName, inputData) { + let widgetType = isSlider(inputData[1]["display"]); const { val, config } = getNumberDefaults(inputData, 1); Object.assign(config, { precision: 0 }); return { widget: node.addWidget( - "number", + widgetType, inputName, val, function (v) { @@ -273,6 +279,18 @@ export const ComfyWidgets = { ), }; }, + BOOLEAN(node, inputName, inputData) { + let defaultVal = inputData[1]["default"]; + return { + widget: node.addWidget( + "toggle", + inputName, + defaultVal, + () => {}, + {"on": inputData[1].label_on, "off": inputData[1].label_off} + ) + }; + }, STRING(node, inputName, inputData, app) { const defaultVal = inputData[1].default || ""; const multiline = !!inputData[1].multiline;