From c069fc0730f27d42b1e0dc82c698f448cd3d8087 Mon Sep 17 00:00:00 2001 From: comfyanonymous Date: Sun, 11 Jun 2023 23:25:39 -0400 Subject: [PATCH 01/48] Auto switch to tiled VAE encode if regular one runs out of memory. --- comfy/sd.py | 41 ++++++++++++++++++++++++++--------------- 1 file changed, 26 insertions(+), 15 deletions(-) diff --git a/comfy/sd.py b/comfy/sd.py index 3747f53b8..718dccd09 100644 --- a/comfy/sd.py +++ b/comfy/sd.py @@ -544,6 +544,19 @@ class VAE: / 3.0) / 2.0, min=0.0, max=1.0) return output + def encode_tiled_(self, pixel_samples, tile_x=512, tile_y=512, overlap = 64): + steps = pixel_samples.shape[0] * utils.get_tiled_scale_steps(pixel_samples.shape[3], pixel_samples.shape[2], tile_x, tile_y, overlap) + steps += pixel_samples.shape[0] * utils.get_tiled_scale_steps(pixel_samples.shape[3], pixel_samples.shape[2], tile_x // 2, tile_y * 2, overlap) + steps += pixel_samples.shape[0] * utils.get_tiled_scale_steps(pixel_samples.shape[3], pixel_samples.shape[2], tile_x * 2, tile_y // 2, overlap) + pbar = utils.ProgressBar(steps) + + encode_fn = lambda a: self.first_stage_model.encode(2. * a.to(self.device) - 1.).sample() * self.scale_factor + samples = utils.tiled_scale(pixel_samples, encode_fn, tile_x, tile_y, overlap, upscale_amount = (1/8), out_channels=4, pbar=pbar) + samples += utils.tiled_scale(pixel_samples, encode_fn, tile_x * 2, tile_y // 2, overlap, upscale_amount = (1/8), out_channels=4, pbar=pbar) + samples += utils.tiled_scale(pixel_samples, encode_fn, tile_x // 2, tile_y * 2, overlap, upscale_amount = (1/8), out_channels=4, pbar=pbar) + samples /= 3.0 + return samples + def decode(self, samples_in): model_management.unload_model() self.first_stage_model = self.first_stage_model.to(self.device) @@ -574,28 +587,26 @@ class VAE: def encode(self, pixel_samples): model_management.unload_model() self.first_stage_model = self.first_stage_model.to(self.device) - pixel_samples = pixel_samples.movedim(-1,1).to(self.device) - samples = self.first_stage_model.encode(2. * pixel_samples - 1.).sample() * self.scale_factor + pixel_samples = pixel_samples.movedim(-1,1) + try: + batch_number = 1 + samples = torch.empty((pixel_samples.shape[0], 4, round(pixel_samples.shape[2] // 8), round(pixel_samples.shape[3] // 8)), device="cpu") + for x in range(0, pixel_samples.shape[0], batch_number): + pixels_in = (2. * pixel_samples[x:x+batch_number] - 1.).to(self.device) + samples[x:x+batch_number] = self.first_stage_model.encode(pixels_in).sample().cpu() * self.scale_factor + except model_management.OOM_EXCEPTION as e: + print("Warning: Ran out of memory when regular VAE encoding, retrying with tiled VAE encoding.") + samples = self.encode_tiled_(pixel_samples) + self.first_stage_model = self.first_stage_model.cpu() - samples = samples.cpu() return samples def encode_tiled(self, pixel_samples, tile_x=512, tile_y=512, overlap = 64): model_management.unload_model() self.first_stage_model = self.first_stage_model.to(self.device) - pixel_samples = pixel_samples.movedim(-1,1).to(self.device) - - steps = pixel_samples.shape[0] * utils.get_tiled_scale_steps(pixel_samples.shape[3], pixel_samples.shape[2], tile_x, tile_y, overlap) - steps += pixel_samples.shape[0] * utils.get_tiled_scale_steps(pixel_samples.shape[3], pixel_samples.shape[2], tile_x // 2, tile_y * 2, overlap) - steps += pixel_samples.shape[0] * utils.get_tiled_scale_steps(pixel_samples.shape[3], pixel_samples.shape[2], tile_x * 2, tile_y // 2, overlap) - pbar = utils.ProgressBar(steps) - - samples = utils.tiled_scale(pixel_samples, lambda a: self.first_stage_model.encode(2. * a - 1.).sample() * self.scale_factor, tile_x, tile_y, overlap, upscale_amount = (1/8), out_channels=4, pbar=pbar) - samples += utils.tiled_scale(pixel_samples, lambda a: self.first_stage_model.encode(2. * a - 1.).sample() * self.scale_factor, tile_x * 2, tile_y // 2, overlap, upscale_amount = (1/8), out_channels=4, pbar=pbar) - samples += utils.tiled_scale(pixel_samples, lambda a: self.first_stage_model.encode(2. * a - 1.).sample() * self.scale_factor, tile_x // 2, tile_y * 2, overlap, upscale_amount = (1/8), out_channels=4, pbar=pbar) - samples /= 3.0 + pixel_samples = pixel_samples.movedim(-1,1) + samples = self.encode_tiled_(pixel_samples, tile_x=tile_x, tile_y=tile_y, overlap=overlap) self.first_stage_model = self.first_stage_model.cpu() - samples = samples.cpu() return samples def broadcast_image_to(tensor, target_batch_size, batched_number): From f8c593105300008018431452567fe0b1262fdc81 Mon Sep 17 00:00:00 2001 From: comfyanonymous Date: Mon, 12 Jun 2023 00:21:50 -0400 Subject: [PATCH 02/48] Split the batch in VAEEncode if there's not enough memory. --- comfy/sd.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/comfy/sd.py b/comfy/sd.py index 718dccd09..d898d0197 100644 --- a/comfy/sd.py +++ b/comfy/sd.py @@ -589,11 +589,14 @@ class VAE: self.first_stage_model = self.first_stage_model.to(self.device) pixel_samples = pixel_samples.movedim(-1,1) try: - batch_number = 1 + free_memory = model_management.get_free_memory(self.device) + batch_number = int((free_memory * 0.7) / (2078 * pixel_samples.shape[2] * pixel_samples.shape[3])) #NOTE: this constant along with the one in the decode above are estimated from the mem usage for the VAE and could change. + batch_number = max(1, batch_number) samples = torch.empty((pixel_samples.shape[0], 4, round(pixel_samples.shape[2] // 8), round(pixel_samples.shape[3] // 8)), device="cpu") for x in range(0, pixel_samples.shape[0], batch_number): pixels_in = (2. * pixel_samples[x:x+batch_number] - 1.).to(self.device) samples[x:x+batch_number] = self.first_stage_model.encode(pixels_in).sample().cpu() * self.scale_factor + except model_management.OOM_EXCEPTION as e: print("Warning: Ran out of memory when regular VAE encoding, retrying with tiled VAE encoding.") samples = self.encode_tiled_(pixel_samples) From 67833c83d88711b185e9db32abefe8a05fe1c665 Mon Sep 17 00:00:00 2001 From: comfyanonymous Date: Mon, 12 Jun 2023 01:14:04 -0400 Subject: [PATCH 03/48] Add ImageScaleBy node. --- nodes.py | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/nodes.py b/nodes.py index 12243fab4..658e32dad 100644 --- a/nodes.py +++ b/nodes.py @@ -1192,6 +1192,26 @@ class ImageScale: s = s.movedim(1,-1) return (s,) +class ImageScaleBy: + upscale_methods = ["nearest-exact", "bilinear", "area"] + + @classmethod + def INPUT_TYPES(s): + return {"required": { "image": ("IMAGE",), "upscale_method": (s.upscale_methods,), + "scale_by": ("FLOAT", {"default": 1.0, "min": 0.01, "max": 8.0, "step": 0.01}),}} + RETURN_TYPES = ("IMAGE",) + FUNCTION = "upscale" + + CATEGORY = "image/upscaling" + + def upscale(self, image, upscale_method, scale_by): + samples = image.movedim(-1,1) + width = round(samples.shape[3] * scale_by) + height = round(samples.shape[2] * scale_by) + s = comfy.utils.common_upscale(samples, width, height, upscale_method, "disabled") + s = s.movedim(1,-1) + return (s,) + class ImageInvert: @classmethod @@ -1290,6 +1310,7 @@ NODE_CLASS_MAPPINGS = { "LoadImage": LoadImage, "LoadImageMask": LoadImageMask, "ImageScale": ImageScale, + "ImageScaleBy": ImageScaleBy, "ImageInvert": ImageInvert, "ImagePadForOutpaint": ImagePadForOutpaint, "ConditioningAverage ": ConditioningAverage , @@ -1371,6 +1392,7 @@ NODE_DISPLAY_NAME_MAPPINGS = { "LoadImage": "Load Image", "LoadImageMask": "Load Image (as Mask)", "ImageScale": "Upscale Image", + "ImageScaleBy": "Upscale Image By", "ImageUpscaleWithModel": "Upscale Image (using Model)", "ImageInvert": "Invert Image", "ImagePadForOutpaint": "Pad Image for Outpainting", From 3402ec0c0db83615831dca946d9cc1d167d5a2f8 Mon Sep 17 00:00:00 2001 From: reaper47 Date: Mon, 12 Jun 2023 15:58:05 +0200 Subject: [PATCH 04/48] Issue 752: Fix background --- web/extensions/core/colorPalette.js | 70 +++++++++++++++++++---------- web/index.html | 1 + web/lib/litegraph.extensions.js | 21 +++++++++ 3 files changed, 68 insertions(+), 24 deletions(-) create mode 100644 web/lib/litegraph.extensions.js diff --git a/web/extensions/core/colorPalette.js b/web/extensions/core/colorPalette.js index 84c2a3d10..048161bdd 100644 --- a/web/extensions/core/colorPalette.js +++ b/web/extensions/core/colorPalette.js @@ -1,6 +1,5 @@ -import { app } from "/scripts/app.js"; -import { $el } from "/scripts/ui.js"; -import { api } from "/scripts/api.js"; +import {app} from "/scripts/app.js"; +import {$el} from "/scripts/ui.js"; // Manage color palettes @@ -24,6 +23,8 @@ const colorPalettes = { "TAESD": "#DCC274", // cheesecake }, "litegraph_base": { + "BACKGROUND_IMAGE": "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGQAAABkCAIAAAD/gAIDAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAQBJREFUeNrs1rEKwjAUhlETUkj3vP9rdmr1Ysammk2w5wdxuLgcMHyptfawuZX4pJSWZTnfnu/lnIe/jNNxHHGNn//HNbbv+4dr6V+11uF527arU7+u63qfa/bnmh8sWLBgwYJlqRf8MEptXPBXJXa37BSl3ixYsGDBMliwFLyCV/DeLIMFCxYsWLBMwSt4Be/NggXLYMGCBUvBK3iNruC9WbBgwYJlsGApeAWv4L1ZBgsWLFiwYJmCV/AK3psFC5bBggULloJX8BpdwXuzYMGCBctgwVLwCl7Be7MMFixYsGDBsu8FH1FaSmExVfAxBa/gvVmwYMGCZbBg/W4vAQYA5tRF9QYlv/QAAAAASUVORK5CYII=", + "CLEAR_BACKGROUND_COLOR": "#222", "NODE_TITLE_COLOR": "#999", "NODE_SELECTED_TITLE_COLOR": "#FFF", "NODE_TEXT_SIZE": 14, @@ -77,6 +78,8 @@ const colorPalettes = { "VAE": "#FF7043", // deep orange }, "litegraph_base": { + "BACKGROUND_IMAGE": "data:image/gif;base64,R0lGODlhZABkALMAAAAAAP///+vr6+rq6ujo6Ofn5+bm5uXl5d3d3f///wAAAAAAAAAAAAAAAAAAAAAAACH5BAEAAAkALAAAAABkAGQAAAT/UMhJq7046827HkcoHkYxjgZhnGG6si5LqnIM0/fL4qwwIMAg0CAsEovBIxKhRDaNy2GUOX0KfVFrssrNdpdaqTeKBX+dZ+jYvEaTf+y4W66mC8PUdrE879f9d2mBeoNLfH+IhYBbhIx2jkiHiomQlGKPl4uZe3CaeZifnnijgkESBqipqqusra6vsLGys62SlZO4t7qbuby7CLa+wqGWxL3Gv3jByMOkjc2lw8vOoNSi0czAncXW3Njdx9Pf48/Z4Kbbx+fQ5evZ4u3k1fKR6cn03vHlp7T9/v8A/8Gbp4+gwXoFryXMB2qgwoMMHyKEqA5fxX322FG8tzBcRnMW/zlulPbRncmQGidKjMjyYsOSKEF2FBlJQMCbOHP6c9iSZs+UnGYCdbnSo1CZI5F64kn0p1KnTH02nSoV3dGTV7FFHVqVq1dtWcMmVQZTbNGu72zqXMuW7danVL+6e4t1bEy6MeueBYLXrNO5Ze36jQtWsOG97wIj1vt3St/DjTEORss4nNq2mDP3e7w4r1bFkSET5hy6s2TRlD2/mSxXtSHQhCunXo26NevCpmvD/UU6tuullzULH76q92zdZG/Ltv1a+W+osI/nRmyc+fRi1Xdbh+68+0vv10dH3+77KD/i6IdnX669/frn5Zsjh4/2PXju8+8bzc9/6fj27LFnX11/+IUnXWl7BJfegm79FyB9JOl3oHgSklefgxAC+FmFGpqHIYcCfkhgfCohSKKJVo044YUMttggiBkmp6KFXw1oII24oYhjiDByaKOOHcp3Y5BD/njikSkO+eBREQAAOw==", + "CLEAR_BACKGROUND_COLOR": "white", "NODE_TITLE_COLOR": "#222", "NODE_SELECTED_TITLE_COLOR": "#000", "NODE_TEXT_SIZE": 14, @@ -191,7 +194,7 @@ app.registerExtension({ const nodeData = defs[nodeId]; var inputs = nodeData["input"]["required"]; - if (nodeData["input"]["optional"] != undefined){ + if (nodeData["input"]["optional"] != undefined) { inputs = Object.assign({}, nodeData["input"]["required"], nodeData["input"]["optional"]) } @@ -232,12 +235,9 @@ app.registerExtension({ "id": "my_color_palette_unique_id", "name": "My Color Palette", "colors": { - "node_slot": { - }, - "litegraph_base": { - }, - "comfy_base": { - } + "node_slot": {}, + "litegraph_base": {}, + "comfy_base": {} } }; @@ -266,7 +266,7 @@ app.registerExtension({ }; const addCustomColorPalette = async (colorPalette) => { - if (typeof(colorPalette) !== "object") { + if (typeof (colorPalette) !== "object") { app.ui.dialog.show("Invalid color palette"); return; } @@ -286,7 +286,7 @@ app.registerExtension({ return; } - if (colorPalette.colors.node_slot && typeof(colorPalette.colors.node_slot) !== "object") { + if (colorPalette.colors.node_slot && typeof (colorPalette.colors.node_slot) !== "object") { app.ui.dialog.show("Invalid color palette colors.node_slot"); return; } @@ -301,7 +301,11 @@ app.registerExtension({ } } - els.select.append($el("option", { textContent: colorPalette.name + " (custom)", value: "custom_" + colorPalette.id, selected: true })); + els.select.append($el("option", { + textContent: colorPalette.name + " (custom)", + value: "custom_" + colorPalette.id, + selected: true + })); setColorPalette("custom_" + colorPalette.id); await loadColorPalette(colorPalette); @@ -350,7 +354,7 @@ app.registerExtension({ if (colorPalette.colors.comfy_base) { const rootStyle = document.documentElement.style; for (const key in colorPalette.colors.comfy_base) { - rootStyle.setProperty('--' + key, colorPalette.colors.comfy_base[key]); + rootStyle.setProperty('--' + key, colorPalette.colors.comfy_base[key]); } } app.canvas.draw(true, true); @@ -380,7 +384,7 @@ app.registerExtension({ const fileInput = $el("input", { type: "file", accept: ".json", - style: { display: "none" }, + style: {display: "none"}, parent: document.body, onchange: () => { let file = fileInput.files[0]; @@ -403,17 +407,25 @@ app.registerExtension({ for (const c in colorPalettes) { const colorPalette = colorPalettes[c]; - options.push($el("option", { textContent: colorPalette.name, value: colorPalette.id, selected: colorPalette.id === value })); + options.push($el("option", { + textContent: colorPalette.name, + value: colorPalette.id, + selected: colorPalette.id === value + })); } let customColorPalettes = getCustomColorPalettes(); for (const c in customColorPalettes) { const colorPalette = customColorPalettes[c]; - options.push($el("option", { textContent: colorPalette.name + " (custom)", value: "custom_" + colorPalette.id, selected: "custom_" + colorPalette.id === value })); + options.push($el("option", { + textContent: colorPalette.name + " (custom)", + value: "custom_" + colorPalette.id, + selected: "custom_" + colorPalette.id === value + })); } return $el("div", [ - $el("label", { textContent: name || id }, [ + $el("label", {textContent: name || id}, [ els.select = $el("select", { onchange: (e) => { setter(e.target.value); @@ -427,12 +439,12 @@ app.registerExtension({ const colorPaletteId = app.ui.settings.getSettingValue(id, defaultColorPaletteId); const colorPalette = await completeColorPalette(getColorPalette(colorPaletteId)); const json = JSON.stringify(colorPalette, null, 2); // convert the data to a JSON string - const blob = new Blob([json], { type: "application/json" }); + const blob = new Blob([json], {type: "application/json"}); const url = URL.createObjectURL(blob); const a = $el("a", { href: url, download: colorPaletteId + ".json", - style: { display: "none" }, + style: {display: "none"}, parent: document.body, }); a.click(); @@ -455,12 +467,12 @@ app.registerExtension({ onclick: async () => { const colorPalette = await getColorPaletteTemplate(); const json = JSON.stringify(colorPalette, null, 2); // convert the data to a JSON string - const blob = new Blob([json], { type: "application/json" }); + const blob = new Blob([json], {type: "application/json"}); const url = URL.createObjectURL(blob); const a = $el("a", { href: url, download: "color_palette.json", - style: { display: "none" }, + style: {display: "none"}, parent: document.body, }); a.click(); @@ -496,15 +508,25 @@ app.registerExtension({ return; } - if (colorPalettes[value]) { - await loadColorPalette(colorPalettes[value]); + let palette = colorPalettes[value]; + if (palette) { + await loadColorPalette(palette); } else if (value.startsWith("custom_")) { value = value.substr(7); let customColorPalettes = getCustomColorPalettes(); if (customColorPalettes[value]) { + palette = customColorPalettes[value]; await loadColorPalette(customColorPalettes[value]); } } + + let {BACKGROUND_IMAGE, CLEAR_BACKGROUND_COLOR} = palette.colors.litegraph_base; + if (BACKGROUND_IMAGE === undefined || CLEAR_BACKGROUND_COLOR === undefined) { + const base = colorPalettes["dark"].colors.litegraph_base; + BACKGROUND_IMAGE = base.BACKGROUND_IMAGE; + CLEAR_BACKGROUND_COLOR = base.CLEAR_BACKGROUND_COLOR; + } + app.canvas.updateBackground(BACKGROUND_IMAGE, CLEAR_BACKGROUND_COLOR); }, }); }, diff --git a/web/index.html b/web/index.html index da0adb6c2..c48d716e1 100644 --- a/web/index.html +++ b/web/index.html @@ -7,6 +7,7 @@ +