Merge branch 'comfyanonymous:master' into master

This commit is contained in:
JAlB- 2023-03-30 23:29:26 +03:00 committed by GitHub
commit 3a265b96cf
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 288 additions and 26 deletions

View File

@ -25,3 +25,7 @@ To update the ComfyUI code: update\update_comfyui.bat
To update ComfyUI with the python dependencies, note that you should ONLY run this if you have issues with python dependencies. To update ComfyUI with the python dependencies, note that you should ONLY run this if you have issues with python dependencies.
update\update_comfyui_and_python_dependencies.bat update\update_comfyui_and_python_dependencies.bat
TO SHARE MODELS BETWEEN COMFYUI AND ANOTHER UI:
In the ComfyUI directory you will find a file: extra_model_paths.yaml.example
Rename this file to: extra_model_paths.yaml and edit it with your favorite text editor.

View File

@ -30,6 +30,11 @@ This ui will let you design and execute advanced stable diffusion pipelines usin
Workflow examples can be found on the [Examples page](https://comfyanonymous.github.io/ComfyUI_examples/) Workflow examples can be found on the [Examples page](https://comfyanonymous.github.io/ComfyUI_examples/)
## Shortcuts
- **Ctrl + A** select all nodes
- **Ctrl + M** mute/unmute selected nodes
- **Delete** or **Backspace** delete selected nodes
# Installing # Installing
## Windows ## Windows
@ -40,6 +45,10 @@ There is a portable standalone build for Windows that should work for running on
Just download, extract and run. Make sure you put your Stable Diffusion checkpoints/models (the huge ckpt/safetensors files) in: ComfyUI\models\checkpoints Just download, extract and run. Make sure you put your Stable Diffusion checkpoints/models (the huge ckpt/safetensors files) in: ComfyUI\models\checkpoints
#### How do I share models between another UI and ComfyUI?
See the [Config file](extra_model_paths.yaml.example) to set the search paths for models. In the standalone windows build you can find this file in the ComfyUI directory. Rename this file to extra_model_paths.yaml and edit it with your favorite text editor.
## Colab Notebook ## Colab Notebook
To run it on colab or paperspace you can use my [Colab Notebook](notebooks/comfyui_colab.ipynb) here: [Link to open with google colab](https://colab.research.google.com/github/comfyanonymous/ComfyUI/blob/master/notebooks/comfyui_colab.ipynb) To run it on colab or paperspace you can use my [Colab Notebook](notebooks/comfyui_colab.ipynb) here: [Link to open with google colab](https://colab.research.google.com/github/comfyanonymous/ComfyUI/blob/master/notebooks/comfyui_colab.ipynb)
@ -64,7 +73,7 @@ AMD users can install rocm and pytorch with pip if you don't have it already ins
Nvidia users should install torch and xformers using this command: Nvidia users should install torch and xformers using this command:
```pip install torch==1.13.1 torchvision torchaudio --extra-index-url https://download.pytorch.org/whl/cu117 xformers``` ```pip install torch torchvision torchaudio --extra-index-url https://download.pytorch.org/whl/cu118 xformers```
#### Troubleshooting #### Troubleshooting
@ -97,7 +106,6 @@ With cmd.exe: ```"path_to_other_sd_gui\venv\Scripts\activate.bat"```
And then you can use that terminal to run Comfyui without installing any dependencies. Note that the venv folder might be called something else depending on the SD UI. And then you can use that terminal to run Comfyui without installing any dependencies. Note that the venv folder might be called something else depending on the SD UI.
# Running # Running
```python main.py``` ```python main.py```

View File

@ -221,7 +221,7 @@ class KSamplerX0Inpaint(torch.nn.Module):
def forward(self, x, sigma, uncond, cond, cond_scale, denoise_mask, cond_concat=None): def forward(self, x, sigma, uncond, cond, cond_scale, denoise_mask, cond_concat=None):
if denoise_mask is not None: if denoise_mask is not None:
latent_mask = 1. - denoise_mask latent_mask = 1. - denoise_mask
x = x * denoise_mask + (self.latent_image + self.noise * sigma) * latent_mask x = x * denoise_mask + (self.latent_image + self.noise * sigma.reshape([sigma.shape[0]] + [1] * (len(self.noise.shape) - 1))) * latent_mask
out = self.inner_model(x, sigma, cond=cond, uncond=uncond, cond_scale=cond_scale, cond_concat=cond_concat) out = self.inner_model(x, sigma, cond=cond, uncond=uncond, cond_scale=cond_scale, cond_concat=cond_concat)
if denoise_mask is not None: if denoise_mask is not None:
out *= denoise_mask out *= denoise_mask

View File

@ -439,9 +439,14 @@ class VAE:
model_management.unload_model() model_management.unload_model()
self.first_stage_model = self.first_stage_model.to(self.device) self.first_stage_model = self.first_stage_model.to(self.device)
try: try:
samples = samples_in.to(self.device) free_memory = model_management.get_free_memory(self.device)
pixel_samples = self.first_stage_model.decode(1. / self.scale_factor * samples) batch_number = int((free_memory * 0.7) / (2562 * samples_in.shape[2] * samples_in.shape[3] * 64))
pixel_samples = torch.clamp((pixel_samples + 1.0) / 2.0, min=0.0, max=1.0) batch_number = max(1, batch_number)
pixel_samples = torch.empty((samples_in.shape[0], 3, round(samples_in.shape[2] * 8), round(samples_in.shape[3] * 8)), device="cpu")
for x in range(0, samples_in.shape[0], batch_number):
samples = samples_in[x:x+batch_number].to(self.device)
pixel_samples[x:x+batch_number] = torch.clamp((self.first_stage_model.decode(1. / self.scale_factor * samples) + 1.0) / 2.0, min=0.0, max=1.0).cpu()
except model_management.OOM_EXCEPTION as e: except model_management.OOM_EXCEPTION as e:
print("Warning: Ran out of memory when regular VAE decoding, retrying with tiled VAE decoding.") print("Warning: Ran out of memory when regular VAE decoding, retrying with tiled VAE decoding.")
pixel_samples = self.decode_tiled_(samples_in) pixel_samples = self.decode_tiled_(samples_in)

View File

@ -65,8 +65,11 @@ def recursive_execute(server, prompt, outputs, current_item, extra_data={}):
nodes.before_node_execution() nodes.before_node_execution()
outputs[unique_id] = getattr(obj, obj.FUNCTION)(**input_data_all) outputs[unique_id] = getattr(obj, obj.FUNCTION)(**input_data_all)
if "ui" in outputs[unique_id] and server.client_id is not None: if "ui" in outputs[unique_id]:
server.send_sync("executed", { "node": unique_id, "output": outputs[unique_id]["ui"] }, server.client_id) if server.client_id is not None:
server.send_sync("executed", { "node": unique_id, "output": outputs[unique_id]["ui"] }, server.client_id)
if "result" in outputs[unique_id]:
outputs[unique_id] = outputs[unique_id]["result"]
return executed + [unique_id] return executed + [unique_id]
def recursive_will_execute(prompt, outputs, current_item): def recursive_will_execute(prompt, outputs, current_item):

View File

@ -18,6 +18,7 @@ if __name__ == "__main__":
print("\t--use-split-cross-attention\tUse the split cross attention optimization instead of the sub-quadratic one.\n\t\t\t\t\tIgnored when xformers is used.") print("\t--use-split-cross-attention\tUse the split cross attention optimization instead of the sub-quadratic one.\n\t\t\t\t\tIgnored when xformers is used.")
print("\t--use-pytorch-cross-attention\tUse the new pytorch 2.0 cross attention function.") print("\t--use-pytorch-cross-attention\tUse the new pytorch 2.0 cross attention function.")
print("\t--disable-xformers\t\tdisables xformers") print("\t--disable-xformers\t\tdisables xformers")
print("\t--cuda-device 1\t\tSet the id of the cuda device this instance will use.")
print() print()
print("\t--highvram\t\t\tBy default models will be unloaded to CPU memory after being used.\n\t\t\t\t\tThis option keeps them in GPU memory.\n") print("\t--highvram\t\t\tBy default models will be unloaded to CPU memory after being used.\n\t\t\t\t\tThis option keeps them in GPU memory.\n")
print("\t--normalvram\t\t\tUsed to force normal vram use if lowvram gets automatically enabled.") print("\t--normalvram\t\t\tUsed to force normal vram use if lowvram gets automatically enabled.")
@ -31,6 +32,14 @@ if __name__ == "__main__":
print("disabling upcasting of attention") print("disabling upcasting of attention")
os.environ['ATTN_PRECISION'] = "fp16" os.environ['ATTN_PRECISION'] = "fp16"
try:
index = sys.argv.index('--cuda-device')
device = sys.argv[index + 1]
os.environ['CUDA_VISIBLE_DEVICES'] = device
print("Set cuda device to:", device)
except:
pass
import execution import execution
import server import server
import folder_paths import folder_paths

View File

@ -47,7 +47,7 @@
" !git pull\n", " !git pull\n",
"\n", "\n",
"!echo -= Install dependencies =-\n", "!echo -= Install dependencies =-\n",
"!pip -q install xformers -r requirements.txt" "!pip install xformers==0.0.16 -r requirements.txt --extra-index-url https://download.pytorch.org/whl/cu117"
] ]
}, },
{ {

View File

@ -3,7 +3,7 @@ torchdiffeq
torchsde torchsde
einops einops
open-clip-torch open-clip-torch
transformers transformers>=4.25.1
safetensors safetensors
pytorch_lightning pytorch_lightning
aiohttp aiohttp

View File

@ -0,0 +1,100 @@
import { app } from "/scripts/app.js";
// Use widget values and dates in output filenames
app.registerExtension({
name: "Comfy.SaveImageExtraOutput",
async beforeRegisterNodeDef(nodeType, nodeData, app) {
if (nodeData.name === "SaveImage") {
const onNodeCreated = nodeType.prototype.onNodeCreated;
// Simple date formatter
const parts = {
d: (d) => d.getDate(),
M: (d) => d.getMonth() + 1,
h: (d) => d.getHours(),
m: (d) => d.getMinutes(),
s: (d) => d.getSeconds(),
};
const format =
Object.keys(parts)
.map((k) => k + k + "?")
.join("|") + "|yyy?y?";
function formatDate(text, date) {
return text.replace(new RegExp(format, "g"), function (text) {
if (text === "yy") return (date.getFullYear() + "").substring(2);
if (text === "yyyy") return date.getFullYear();
if (text[0] in parts) {
const p = parts[text[0]](date);
return (p + "").padStart(text.length, "0");
}
return text;
});
}
// When the SaveImage node is created we want to override the serialization of the output name widget to run our S&R
nodeType.prototype.onNodeCreated = function () {
const r = onNodeCreated ? onNodeCreated.apply(this, arguments) : undefined;
const widget = this.widgets.find((w) => w.name === "filename_prefix");
widget.serializeValue = () => {
return widget.value.replace(/%([^%]+)%/g, function (match, text) {
const split = text.split(".");
if (split.length !== 2) {
// Special handling for dates
if (split[0].startsWith("date:")) {
return formatDate(split[0].substring(5), new Date());
}
if (text !== "width" && text !== "height") {
// Dont warn on standard replacements
console.warn("Invalid replacement pattern", text);
}
return match;
}
// Find node with matching S&R property name
let nodes = app.graph._nodes.filter((n) => n.properties?.["Node name for S&R"] === split[0]);
// If we cant, see if there is a node with that title
if (!nodes.length) {
nodes = app.graph._nodes.filter((n) => n.title === split[0]);
}
if (!nodes.length) {
console.warn("Unable to find node", split[0]);
return match;
}
if (nodes.length > 1) {
console.warn("Multiple nodes matched", split[0], "using first match");
}
const node = nodes[0];
const widget = node.widgets?.find((w) => w.name === split[1]);
if (!widget) {
console.warn("Unable to find widget", split[1], "on node", split[0], node);
return match;
}
return ((widget.value ?? "") + "").replaceAll(/\/|\\/g, "_");
});
};
return r;
};
} else {
// When any other node is created add a property to alias the node
const onNodeCreated = nodeType.prototype.onNodeCreated;
nodeType.prototype.onNodeCreated = function () {
const r = onNodeCreated ? onNodeCreated.apply(this, arguments) : undefined;
if (!this.properties || !("Node name for S&R" in this.properties)) {
this.addProperty("Node name for S&R", this.title, "string");
}
return r;
};
}
},
});

View File

@ -417,6 +417,59 @@ class ComfyApp {
}; };
} }
/**
* Handle keypress
*
* Ctrl + M mute/unmute selected nodes
*/
#addProcessKeyHandler() {
const self = this;
const origProcessKey = LGraphCanvas.prototype.processKey;
LGraphCanvas.prototype.processKey = function(e) {
const res = origProcessKey.apply(this, arguments);
if (res === false) {
return res;
}
if (!this.graph) {
return;
}
var block_default = false;
if (e.target.localName == "input") {
return;
}
if (e.type == "keydown") {
// Ctrl + M mute/unmute
if (e.keyCode == 77 && e.ctrlKey) {
if (this.selected_nodes) {
for (var i in this.selected_nodes) {
if (this.selected_nodes[i].mode === 2) { // never
this.selected_nodes[i].mode = 0; // always
} else {
this.selected_nodes[i].mode = 2; // never
}
}
}
block_default = true;
}
}
this.graph.change();
if (block_default) {
e.preventDefault();
e.stopImmediatePropagation();
return false;
}
return res;
};
}
/** /**
* Draws group header bar * Draws group header bar
*/ */
@ -465,10 +518,11 @@ class ComfyApp {
* Draws node highlights (executing, drag drop) and progress bar * Draws node highlights (executing, drag drop) and progress bar
*/ */
#addDrawNodeHandler() { #addDrawNodeHandler() {
const orig = LGraphCanvas.prototype.drawNodeShape; const origDrawNodeShape = LGraphCanvas.prototype.drawNodeShape;
const self = this; const self = this;
LGraphCanvas.prototype.drawNodeShape = function (node, ctx, size, fgcolor, bgcolor, selected, mouse_over) { LGraphCanvas.prototype.drawNodeShape = function (node, ctx, size, fgcolor, bgcolor, selected, mouse_over) {
const res = orig.apply(this, arguments); const res = origDrawNodeShape.apply(this, arguments);
let color = null; let color = null;
if (node.id === +self.runningNodeId) { if (node.id === +self.runningNodeId) {
@ -517,6 +571,21 @@ class ComfyApp {
return res; return res;
}; };
const origDrawNode = LGraphCanvas.prototype.drawNode;
LGraphCanvas.prototype.drawNode = function (node, ctx) {
var editor_alpha = this.editor_alpha;
if (node.mode === 2) { // never
this.editor_alpha = 0.4;
}
const res = origDrawNode.apply(this, arguments);
this.editor_alpha = editor_alpha;
return res;
};
} }
/** /**
@ -548,6 +617,10 @@ class ComfyApp {
api.addEventListener("executed", ({ detail }) => { api.addEventListener("executed", ({ detail }) => {
this.nodeOutputs[detail.node] = detail.output; this.nodeOutputs[detail.node] = detail.output;
const node = this.graph.getNodeById(detail.node);
if (node?.onExecuted) {
node.onExecuted(detail.output);
}
}); });
api.init(); api.init();
@ -588,6 +661,7 @@ class ComfyApp {
document.body.prepend(canvasEl); document.body.prepend(canvasEl);
this.#addProcessMouseHandler(); this.#addProcessMouseHandler();
this.#addProcessKeyHandler();
this.graph = new LGraph(); this.graph = new LGraph();
const canvas = (this.canvas = new LGraphCanvas(canvasEl, this.graph)); const canvas = (this.canvas = new LGraphCanvas(canvasEl, this.graph));
@ -669,18 +743,22 @@ class ComfyApp {
const inputData = inputs[inputName]; const inputData = inputs[inputName];
const type = inputData[0]; const type = inputData[0];
if (Array.isArray(type)) { if(inputData[1]?.forceInput) {
// Enums
Object.assign(config, widgets.COMBO(this, inputName, inputData, app) || {});
} else if (`${type}:${inputName}` in widgets) {
// Support custom widgets by Type:Name
Object.assign(config, widgets[`${type}:${inputName}`](this, inputName, inputData, app) || {});
} else if (type in widgets) {
// Standard type widgets
Object.assign(config, widgets[type](this, inputName, inputData, app) || {});
} else {
// Node connection inputs
this.addInput(inputName, type); this.addInput(inputName, type);
} else {
if (Array.isArray(type)) {
// Enums
Object.assign(config, widgets.COMBO(this, inputName, inputData, app) || {});
} else if (`${type}:${inputName}` in widgets) {
// Support custom widgets by Type:Name
Object.assign(config, widgets[`${type}:${inputName}`](this, inputName, inputData, app) || {});
} else if (type in widgets) {
// Standard type widgets
Object.assign(config, widgets[type](this, inputName, inputData, app) || {});
} else {
// Node connection inputs
this.addInput(inputName, type);
}
} }
} }
@ -777,6 +855,11 @@ class ComfyApp {
continue; continue;
} }
if (node.mode === 2) {
// Don't serialize muted nodes
continue;
}
const inputs = {}; const inputs = {};
const widgets = node.widgets; const widgets = node.widgets;
@ -816,6 +899,18 @@ class ComfyApp {
}; };
} }
// Remove inputs connected to removed nodes
for (const o in output) {
for (const i in output[o].inputs) {
if (Array.isArray(output[o].inputs[i])
&& output[o].inputs[i].length === 2
&& !output[output[o].inputs[i][0]]) {
delete output[o].inputs[i];
}
}
}
return { workflow, output }; return { workflow, output };
} }

View File

@ -140,6 +140,12 @@ body {
display: flex; display: flex;
flex-direction: column; flex-direction: column;
align-items: center; align-items: center;
color: #999;
background-color: #353535;
font-family: sans-serif;
padding: 10px;
border-radius: 0 8px 8px 8px;
box-shadow: 3px 3px 8px rgba(0, 0, 0, 0.4);
} }
.comfy-menu button { .comfy-menu button {
@ -154,6 +160,22 @@ body {
.comfy-menu-btns button { .comfy-menu-btns button {
font-size: 10px; font-size: 10px;
width: 50%; width: 50%;
color: #999 !important;
}
.comfy-menu > button {
width: 100%;
}
.comfy-menu > button,
.comfy-menu-btns button,
.comfy-menu .comfy-list button {
color: #ddd;
background-color: #222;
border-radius: 8px;
border-color: #4e4e4e;
border-style: solid;
margin-top: 2px;
} }
.comfy-menu span.drag-handle { .comfy-menu span.drag-handle {
@ -186,14 +208,18 @@ body {
} }
.comfy-list { .comfy-list {
background-color: rgb(225, 225, 225); color: #999;
background-color: #333;
margin-bottom: 10px; margin-bottom: 10px;
border-color: #4e4e4e;
border-style: solid;
} }
.comfy-list-items { .comfy-list-items {
overflow-y: scroll; overflow-y: scroll;
max-height: 100px; max-height: 100px;
background-color: #d0d0d0; min-height: 25px;
background-color: #222;
padding: 5px; padding: 5px;
} }
@ -220,6 +246,7 @@ body {
} }
button.comfy-settings-btn { button.comfy-settings-btn {
background-color: rgba(0, 0, 0, 0);
font-size: 12px; font-size: 12px;
padding: 0; padding: 0;
position: absolute; position: absolute;
@ -227,6 +254,10 @@ button.comfy-settings-btn {
border: none; border: none;
} }
button.comfy-queue-btn {
margin: 6px 0 !important;
}
.comfy-modal.comfy-settings { .comfy-modal.comfy-settings {
background-color: var(--bg-color); background-color: var(--bg-color);
color: var(--fg-color); color: var(--fg-color);
@ -235,6 +266,13 @@ button.comfy-settings-btn {
@media only screen and (max-height: 850px) { @media only screen and (max-height: 850px) {
.comfy-menu { .comfy-menu {
margin-top: -70px; top: 0 !important;
bottom: 0 !important;
left: auto !important;
right: 0 !important;
border-radius: 0px;
}
.comfy-menu span.drag-handle {
visibility:hidden
} }
} }