diff --git a/comfy/cli_args.py b/comfy/cli_args.py index 83d8cd287..ec7d34a55 100644 --- a/comfy/cli_args.py +++ b/comfy/cli_args.py @@ -38,7 +38,9 @@ parser.add_argument("--port", type=int, default=8188, help="Set the listen port. parser.add_argument("--enable-cors-header", type=str, default=None, metavar="ORIGIN", nargs="?", const="*", help="Enable CORS (Cross-Origin Resource Sharing) with optional origin or allow all with default '*'.") parser.add_argument("--extra-model-paths-config", type=str, default=None, metavar="PATH", nargs='+', action='append', help="Load one or more extra_model_paths.yaml files.") parser.add_argument("--output-directory", type=str, default=None, help="Set the ComfyUI output directory.") +parser.add_argument("--temp-directory", type=str, default=None, help="Set the ComfyUI temp directory (default is in the ComfyUI directory).") parser.add_argument("--auto-launch", action="store_true", help="Automatically launch ComfyUI in the default browser.") +parser.add_argument("--disable-auto-launch", action="store_true", help="Disable auto launching the browser.") parser.add_argument("--cuda-device", type=int, default=None, metavar="DEVICE_ID", help="Set the id of the cuda device this instance will use.") cm_group = parser.add_mutually_exclusive_group() cm_group.add_argument("--cuda-malloc", action="store_true", help="Enable cudaMallocAsync (enabled by default for torch 2.0 and up).") @@ -90,3 +92,6 @@ args = parser.parse_args() if args.windows_standalone_build: args.auto_launch = True + +if args.disable_auto_launch: + args.auto_launch = False diff --git a/comfy/cmd/cuda_malloc.py b/comfy/cmd/cuda_malloc.py index a808b2071..d033529cc 100644 --- a/comfy/cmd/cuda_malloc.py +++ b/comfy/cmd/cuda_malloc.py @@ -40,7 +40,8 @@ def cuda_malloc_supported(): blacklist = {"GeForce GTX TITAN X", "GeForce GTX 980", "GeForce GTX 970", "GeForce GTX 960", "GeForce GTX 950", "GeForce 945M", "GeForce 940M", "GeForce 930M", "GeForce 920M", "GeForce 910M", "GeForce GTX 750", "GeForce GTX 745", "Quadro K620", "Quadro K1200", "Quadro K2200", "Quadro M500", "Quadro M520", "Quadro M600", "Quadro M620", "Quadro M1000", - "Quadro M1200", "Quadro M2000", "Quadro M2200", "Quadro M3000", "Quadro M4000", "Quadro M5000", "Quadro M5500", "Quadro M6000"} + "Quadro M1200", "Quadro M2000", "Quadro M2200", "Quadro M3000", "Quadro M4000", "Quadro M5000", "Quadro M5500", "Quadro M6000", + "GeForce MX110", "GeForce MX130", "GeForce 830M", "GeForce 840M", "GeForce GTX 850M", "GeForce GTX 860M"} try: names = get_gpu_names() diff --git a/comfy/cmd/folder_paths.py b/comfy/cmd/folder_paths.py index e7eb1ac58..ae2c357c9 100644 --- a/comfy/cmd/folder_paths.py +++ b/comfy/cmd/folder_paths.py @@ -47,6 +47,10 @@ def set_output_directory(output_dir): global output_directory output_directory = output_dir +def set_temp_directory(temp_dir): + global temp_directory + temp_directory = temp_dir + def get_output_directory(): global output_directory return output_directory @@ -115,6 +119,8 @@ def add_model_folder_path(folder_name, full_folder_path): global folder_names_and_paths if folder_name in folder_names_and_paths: folder_names_and_paths[folder_name][0].append(full_folder_path) + else: + folder_names_and_paths[folder_name] = ([full_folder_path], set()) def get_folder_paths(folder_name): return folder_names_and_paths[folder_name][0][:] diff --git a/comfy/cmd/main.py b/comfy/cmd/main.py index 88a4a6ade..43f101f92 100644 --- a/comfy/cmd/main.py +++ b/comfy/cmd/main.py @@ -125,6 +125,10 @@ def load_extra_path_config(yaml_path): def main(): + if args.temp_directory: + temp_dir = os.path.join(os.path.abspath(args.temp_directory), "temp") + print(f"Setting temp directory to: {temp_dir}") + folder_paths.set_temp_directory(temp_dir) cleanup_temp() loop = asyncio.new_event_loop() diff --git a/comfy/samplers.py b/comfy/samplers.py index 044d518a5..de4f36da2 100644 --- a/comfy/samplers.py +++ b/comfy/samplers.py @@ -189,12 +189,13 @@ def sampling_function(model_function, x, timestep, uncond, cond, cond_scale, con continue to_run += [(p, COND)] - for x in uncond: - p = get_area_and_mult(x, x_in, cond_concat_in, timestep) - if p is None: - continue + if uncond is not None: + for x in uncond: + p = get_area_and_mult(x, x_in, cond_concat_in, timestep) + if p is None: + continue - to_run += [(p, UNCOND)] + to_run += [(p, UNCOND)] while len(to_run) > 0: first = to_run[0] @@ -282,6 +283,9 @@ def sampling_function(model_function, x, timestep, uncond, cond, cond_scale, con max_total_area = model_management.maximum_batch_area() + if math.isclose(cond_scale, 1.0): + uncond = None + cond, uncond = calc_cond_uncond_batch(model_function, cond, uncond, x, timestep, max_total_area, cond_concat, model_options) if "sampler_cfg_function" in model_options: args = {"cond": cond, "uncond": uncond, "cond_scale": cond_scale, "timestep": timestep} diff --git a/comfy/sd.py b/comfy/sd.py index 922cbf21e..b19130ada 100644 --- a/comfy/sd.py +++ b/comfy/sd.py @@ -70,13 +70,27 @@ def load_lora(lora, to_load): alpha = lora[alpha_name].item() loaded_keys.add(alpha_name) - A_name = "{}.lora_up.weight".format(x) - B_name = "{}.lora_down.weight".format(x) - mid_name = "{}.lora_mid.weight".format(x) + regular_lora = "{}.lora_up.weight".format(x) + diffusers_lora = "{}_lora.up.weight".format(x) + transformers_lora = "{}.lora_linear_layer.up.weight".format(x) + A_name = None - if A_name in lora.keys(): + if regular_lora in lora.keys(): + A_name = regular_lora + B_name = "{}.lora_down.weight".format(x) + mid_name = "{}.lora_mid.weight".format(x) + elif diffusers_lora in lora.keys(): + A_name = diffusers_lora + B_name = "{}_lora.down.weight".format(x) + mid_name = None + elif transformers_lora in lora.keys(): + A_name = transformers_lora + B_name ="{}.lora_linear_layer.down.weight".format(x) + mid_name = None + + if A_name is not None: mid = None - if mid_name in lora.keys(): + if mid_name is not None and mid_name in lora.keys(): mid = lora[mid_name] loaded_keys.add(mid_name) patch_dict[to_load[x]] = (lora[A_name], lora[B_name], alpha, mid) @@ -172,20 +186,29 @@ def model_lora_keys_clip(model, key_map={}): key_map[lora_key] = k lora_key = "lora_te1_text_model_encoder_layers_{}_{}".format(b, LORA_CLIP_MAP[c]) key_map[lora_key] = k + lora_key = "text_encoder.text_model.encoder.layers.{}.{}".format(b, c) #diffusers lora + key_map[lora_key] = k k = "clip_l.transformer.text_model.encoder.layers.{}.{}.weight".format(b, c) if k in sdk: lora_key = "lora_te1_text_model_encoder_layers_{}_{}".format(b, LORA_CLIP_MAP[c]) #SDXL base key_map[lora_key] = k clip_l_present = True + lora_key = "text_encoder.text_model.encoder.layers.{}.{}".format(b, c) #diffusers lora + key_map[lora_key] = k k = "clip_g.transformer.text_model.encoder.layers.{}.{}.weight".format(b, c) if k in sdk: if clip_l_present: lora_key = "lora_te2_text_model_encoder_layers_{}_{}".format(b, LORA_CLIP_MAP[c]) #SDXL base + key_map[lora_key] = k + lora_key = "text_encoder_2.text_model.encoder.layers.{}.{}".format(b, c) #diffusers lora + key_map[lora_key] = k else: lora_key = "lora_te_text_model_encoder_layers_{}_{}".format(b, LORA_CLIP_MAP[c]) #TODO: test if this is correct for SDXL-Refiner - key_map[lora_key] = k + key_map[lora_key] = k + lora_key = "text_encoder.text_model.encoder.layers.{}.{}".format(b, c) #diffusers lora + key_map[lora_key] = k return key_map @@ -202,6 +225,11 @@ def model_lora_keys_unet(model, key_map={}): if k.endswith(".weight"): key_lora = k[:-len(".weight")].replace(".", "_") key_map["lora_unet_{}".format(key_lora)] = "diffusion_model.{}".format(diffusers_keys[k]) + + diffusers_lora_key = "unet.{}".format(k[:-len(".weight")].replace(".to_", ".processor.to_")) + if diffusers_lora_key.endswith(".to_out.0"): + diffusers_lora_key = diffusers_lora_key[:-2] + key_map[diffusers_lora_key] = "diffusion_model.{}".format(diffusers_keys[k]) return key_map def set_attr(obj, attr, value): @@ -864,7 +892,7 @@ def load_controlnet(ckpt_path, model=None): use_fp16 = model_management.should_use_fp16() controlnet_config = model_detection.model_config_from_unet(controlnet_data, prefix, use_fp16).unet_config controlnet_config.pop("out_channels") - controlnet_config["hint_channels"] = 3 + controlnet_config["hint_channels"] = controlnet_data["{}input_hint_block.0.weight".format(prefix)].shape[1] control_model = cldm.ControlNet(**controlnet_config) if pth: diff --git a/custom_nodes/example_node.py.example b/custom_nodes/example_node.py.example index 175202aeb..e37808b03 100644 --- a/custom_nodes/example_node.py.example +++ b/custom_nodes/example_node.py.example @@ -51,9 +51,10 @@ class Example: "default": 0, "min": 0, #Minimum value "max": 4096, #Maximum value - "step": 64 #Slider's step + "step": 64, #Slider's step + "display": "number" # Cosmetic only: display as "number" or "slider" }), - "float_field": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}), + "float_field": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01, "display": "number"}), "print_to_screen": (["enable", "disable"],), "string_field": ("STRING", { "multiline": False, #True if you want the field to look like the one on the ClipTextEncode node diff --git a/notebooks/comfyui_colab.ipynb b/notebooks/comfyui_colab.ipynb index 1bb90f7d0..b1c487101 100644 --- a/notebooks/comfyui_colab.ipynb +++ b/notebooks/comfyui_colab.ipynb @@ -159,13 +159,64 @@ "\n" ] }, + { + "cell_type": "markdown", + "metadata": { + "id": "kkkkkkkkkkkkkkk" + }, + "source": [ + "### Run ComfyUI with cloudflared (Recommended Way)\n", + "\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "jjjjjjjjjjjjjj" + }, + "outputs": [], + "source": [ + "!wget https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64.deb\n", + "!dpkg -i cloudflared-linux-amd64.deb\n", + "\n", + "import subprocess\n", + "import threading\n", + "import time\n", + "import socket\n", + "import urllib.request\n", + "\n", + "def iframe_thread(port):\n", + " while True:\n", + " time.sleep(0.5)\n", + " sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n", + " result = sock.connect_ex(('127.0.0.1', port))\n", + " if result == 0:\n", + " break\n", + " sock.close()\n", + " print(\"\\nComfyUI finished loading, trying to launch cloudflared (if it gets stuck here cloudflared is having issues)\\n\")\n", + "\n", + " p = subprocess.Popen([\"cloudflared\", \"tunnel\", \"--url\", \"http://127.0.0.1:{}\".format(port)], stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n", + " for line in p.stderr:\n", + " l = line.decode()\n", + " if \"trycloudflare.com \" in l:\n", + " print(\"This is the URL to access ComfyUI:\", l[l.find(\"http\"):], end='')\n", + " #print(l, end='')\n", + "\n", + "\n", + "threading.Thread(target=iframe_thread, daemon=True, args=(8188,)).start()\n", + "\n", + "!python main.py --dont-print-server" + ] + }, { "cell_type": "markdown", "metadata": { "id": "kkkkkkkkkkkkkk" }, "source": [ - "### Run ComfyUI with localtunnel (Recommended Way)\n", + "### Run ComfyUI with localtunnel\n", "\n", "\n" ] diff --git a/requirements.txt b/requirements.txt index 55fe6a2bd..8ab608952 100644 --- a/requirements.txt +++ b/requirements.txt @@ -28,3 +28,4 @@ tqdm diffusers>=0.16.1 protobuf==3.20.3 rembg +psutil \ No newline at end of file diff --git a/web/extensions/core/contextMenuFilter.js b/web/extensions/core/contextMenuFilter.js index e0e8854b3..152cd7043 100644 --- a/web/extensions/core/contextMenuFilter.js +++ b/web/extensions/core/contextMenuFilter.js @@ -27,10 +27,13 @@ const ext = { const clickedComboValue = currentNode.widgets .filter(w => w.type === "combo" && w.options.values.length === values.length) .find(w => w.options.values.every((v, i) => v === values[i])) - .value; + ?.value; - let selectedIndex = values.findIndex(v => v === clickedComboValue); - let selectedItem = displayedItems?.[selectedIndex]; + let selectedIndex = clickedComboValue ? values.findIndex(v => v === clickedComboValue) : 0; + if (selectedIndex < 0) { + selectedIndex = 0; + } + let selectedItem = displayedItems[selectedIndex]; updateSelected(); // Apply highlighting to the selected item diff --git a/web/extensions/core/linkRenderMode.js b/web/extensions/core/linkRenderMode.js new file mode 100644 index 000000000..1e9091ec1 --- /dev/null +++ b/web/extensions/core/linkRenderMode.js @@ -0,0 +1,25 @@ +import { app } from "../../scripts/app.js"; + +const id = "Comfy.LinkRenderMode"; +const ext = { + name: id, + async setup(app) { + app.ui.settings.addSetting({ + id, + name: "Link Render Mode", + defaultValue: 2, + type: "combo", + options: LiteGraph.LINK_RENDER_MODES.map((m, i) => ({ + value: i, + text: m, + selected: i == app.canvas.links_render_mode, + })), + onChange(value) { + app.canvas.links_render_mode = +value; + app.graph.setDirtyCanvas(true); + }, + }); + }, +}; + +app.registerExtension(ext); diff --git a/web/lib/litegraph.core.js b/web/lib/litegraph.core.js index 2a33bd4a7..356c71ac2 100644 --- a/web/lib/litegraph.core.js +++ b/web/lib/litegraph.core.js @@ -9766,6 +9766,7 @@ LGraphNode.prototype.executeAction = function(action) switch (w.type) { case "button": + ctx.fillStyle = background_color; if (w.clicked) { ctx.fillStyle = "#AAA"; w.clicked = false; @@ -9835,7 +9836,11 @@ LGraphNode.prototype.executeAction = function(action) ctx.textAlign = "center"; ctx.fillStyle = text_color; ctx.fillText( - w.label || w.name + " " + Number(w.value).toFixed(3), + w.label || w.name + " " + Number(w.value).toFixed( + w.options.precision != null + ? w.options.precision + : 3 + ), widget_width * 0.5, y + H * 0.7 ); @@ -13835,7 +13840,7 @@ LGraphNode.prototype.executeAction = function(action) if (!disabled) { element.addEventListener("click", inner_onclick); } - if (options.autoopen) { + if (!disabled && options.autoopen) { LiteGraph.pointerListenerAdd(element,"enter",inner_over); } diff --git a/web/scripts/app.js b/web/scripts/app.js index 61324406d..40156abc3 100644 --- a/web/scripts/app.js +++ b/web/scripts/app.js @@ -1310,7 +1310,7 @@ export class ComfyApp { ).join("")}Nodes that have failed to load will show as red on the graph.` ); this.logging.addEntry("Comfy.App", "warn", { - MissingNodes: nodes, + MissingNodes: missingNodeTypes, }); } } diff --git a/web/scripts/ui.js b/web/scripts/ui.js index 03a4035b1..86e2a1c41 100644 --- a/web/scripts/ui.js +++ b/web/scripts/ui.js @@ -234,7 +234,7 @@ class ComfySettingsDialog extends ComfyDialog { localStorage[settingId] = JSON.stringify(value); } - addSetting({id, name, type, defaultValue, onChange, attrs = {}, tooltip = "",}) { + addSetting({id, name, type, defaultValue, onChange, attrs = {}, tooltip = "", options = undefined}) { if (!id) { throw new Error("Settings must have an ID"); } @@ -347,6 +347,32 @@ class ComfySettingsDialog extends ComfyDialog { ]), ]); break; + case "combo": + element = $el("tr", [ + labelCell, + $el("td", [ + $el( + "select", + { + oninput: (e) => { + setter(e.target.value); + }, + }, + (typeof options === "function" ? options(value) : options || []).map((opt) => { + if (typeof opt === "string") { + opt = { text: opt }; + } + const v = opt.value ?? opt.text; + return $el("option", { + value: v, + textContent: opt.text, + selected: value + "" === v + "", + }); + }) + ), + ]), + ]); + break; case "text": default: if (type !== "text") { diff --git a/web/scripts/widgets.js b/web/scripts/widgets.js index d4a15ba84..adf5f26fa 100644 --- a/web/scripts/widgets.js +++ b/web/scripts/widgets.js @@ -433,7 +433,7 @@ export const ComfyWidgets = { // Add handler to check if an image is being dragged over our node node.onDragOver = function (e) { if (e.dataTransfer && e.dataTransfer.items) { - const image = [...e.dataTransfer.items].find((f) => f.kind === "file" && f.type.startsWith("image/")); + const image = [...e.dataTransfer.items].find((f) => f.kind === "file"); return !!image; }