mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2026-02-15 07:52:35 +08:00
Merge branch 'comfyanonymous:master' into master
This commit is contained in:
commit
54f6b746b5
@ -45,6 +45,7 @@ Workflow examples can be found on the [Examples page](https://comfyanonymous.git
|
||||
|---------------------------|--------------------------------------------------------------------------------------------------------------------|
|
||||
| Ctrl + Enter | Queue up current graph for generation |
|
||||
| Ctrl + Shift + Enter | Queue up current graph as first for generation |
|
||||
| Ctrl + Z/Ctrl + Y | Undo/Redo |
|
||||
| Ctrl + S | Save workflow |
|
||||
| Ctrl + O | Load workflow |
|
||||
| Ctrl + A | Select all nodes |
|
||||
@ -100,6 +101,7 @@ AMD users can install rocm and pytorch with pip if you don't have it already ins
|
||||
```pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/rocm5.6```
|
||||
|
||||
This is the command to install the nightly with ROCm 5.7 that might have some performance improvements:
|
||||
|
||||
```pip install --pre torch torchvision torchaudio --index-url https://download.pytorch.org/whl/nightly/rocm5.7```
|
||||
|
||||
### NVIDIA
|
||||
@ -192,7 +194,7 @@ To use a textual inversion concepts/embeddings in a text prompt put them in the
|
||||
|
||||
Make sure you use the regular loaders/Load Checkpoint node to load checkpoints. It will auto pick the right settings depending on your GPU.
|
||||
|
||||
You can set this command line setting to disable the upcasting to fp32 in some cross attention operations which will increase your speed. Note that this will very likely give you black images on SD2.x models. If you use xformers this option does not do anything.
|
||||
You can set this command line setting to disable the upcasting to fp32 in some cross attention operations which will increase your speed. Note that this will very likely give you black images on SD2.x models. If you use xformers or pytorch attention this option does not do anything.
|
||||
|
||||
```--dont-upcast-attention```
|
||||
|
||||
|
||||
@ -42,7 +42,7 @@ async function uploadMask(filepath, formData) {
|
||||
});
|
||||
|
||||
ComfyApp.clipspace.imgs[ComfyApp.clipspace['selectedIndex']] = new Image();
|
||||
ComfyApp.clipspace.imgs[ComfyApp.clipspace['selectedIndex']].src = api.apiURL("/view?" + new URLSearchParams(filepath).toString() + app.getPreviewFormatParam());
|
||||
ComfyApp.clipspace.imgs[ComfyApp.clipspace['selectedIndex']].src = api.apiURL("/view?" + new URLSearchParams(filepath).toString() + app.getPreviewFormatParam() + app.getRandParam());
|
||||
|
||||
if(ComfyApp.clipspace.images)
|
||||
ComfyApp.clipspace.images[ComfyApp.clipspace['selectedIndex']] = filepath;
|
||||
@ -657,4 +657,4 @@ app.registerExtension({
|
||||
const context_predicate = () => ComfyApp.clipspace && ComfyApp.clipspace.imgs && ComfyApp.clipspace.imgs.length > 0
|
||||
ClipspaceDialog.registerButton("MaskEditor", context_predicate, ComfyApp.open_maskeditor);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@ -86,6 +86,10 @@ export class ComfyApp {
|
||||
return "";
|
||||
}
|
||||
|
||||
getRandParam() {
|
||||
return "&rand=" + Math.random();
|
||||
}
|
||||
|
||||
static isImageNode(node) {
|
||||
return node.imgs || (node && node.widgets && node.widgets.findIndex(obj => obj.name === 'image') >= 0);
|
||||
}
|
||||
@ -411,7 +415,7 @@ export class ComfyApp {
|
||||
node.prototype.setSizeForImage = function (force) {
|
||||
if(!force && this.animatedImages) return;
|
||||
|
||||
if (this.inputHeight) {
|
||||
if (this.inputHeight || this.freeWidgetSpace > 210) {
|
||||
this.setSize(this.size);
|
||||
return;
|
||||
}
|
||||
@ -437,7 +441,7 @@ export class ComfyApp {
|
||||
return api.apiURL(
|
||||
"/view?" +
|
||||
new URLSearchParams(params).toString() +
|
||||
(this.animatedImages ? "" : app.getPreviewFormatParam())
|
||||
(this.animatedImages ? "" : app.getPreviewFormatParam()) + app.getRandParam()
|
||||
);
|
||||
})
|
||||
);
|
||||
@ -1874,6 +1878,8 @@ export class ComfyApp {
|
||||
if (pngInfo) {
|
||||
if (pngInfo.workflow) {
|
||||
await this.loadGraphData(JSON.parse(pngInfo.workflow));
|
||||
} else if (pngInfo.prompt) {
|
||||
this.loadApiJson(JSON.parse(pngInfo.prompt));
|
||||
} else if (pngInfo.parameters) {
|
||||
importA1111(this.graph, pngInfo.parameters);
|
||||
}
|
||||
@ -1885,6 +1891,8 @@ export class ComfyApp {
|
||||
this.loadGraphData(JSON.parse(pngInfo.workflow));
|
||||
} else if (pngInfo.Workflow) {
|
||||
this.loadGraphData(JSON.parse(pngInfo.Workflow)); // Support loading workflows from that webp custom node.
|
||||
} else if (pngInfo.prompt) {
|
||||
this.loadApiJson(JSON.parse(pngInfo.prompt));
|
||||
}
|
||||
}
|
||||
} else if (file.type === "application/json" || file.name?.endsWith(".json")) {
|
||||
@ -1904,6 +1912,8 @@ export class ComfyApp {
|
||||
const info = await getLatentMetadata(file);
|
||||
if (info.workflow) {
|
||||
await this.loadGraphData(JSON.parse(info.workflow));
|
||||
} else if (info.prompt) {
|
||||
this.loadApiJson(JSON.parse(info.prompt));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -120,6 +120,8 @@ function computeSize(size) {
|
||||
freeSpace -= 220;
|
||||
}
|
||||
|
||||
this.freeWidgetSpace = freeSpace;
|
||||
|
||||
if (freeSpace < 0) {
|
||||
// Not enough space for all widgets so we need to grow
|
||||
size[1] -= freeSpace;
|
||||
|
||||
@ -310,7 +310,7 @@ export const ComfyWidgets = {
|
||||
subfolder = name.substring(0, folder_separator);
|
||||
name = name.substring(folder_separator + 1);
|
||||
}
|
||||
img.src = api.apiURL(`/view?filename=${encodeURIComponent(name)}&type=input&subfolder=${subfolder}${app.getPreviewFormatParam()}`);
|
||||
img.src = api.apiURL(`/view?filename=${encodeURIComponent(name)}&type=input&subfolder=${subfolder}${app.getPreviewFormatParam()}${app.getRandParam()}`);
|
||||
node.setSizeForImage?.();
|
||||
}
|
||||
|
||||
|
||||
Loading…
Reference in New Issue
Block a user