Merge branch 'Main' into feature/img-send

This commit is contained in:
Lt.Dr.Data 2023-03-27 17:59:09 +09:00
commit 3924eb4c1a
5 changed files with 119 additions and 12 deletions

View File

@ -15,6 +15,8 @@ total_vram_available_mb = -1
import sys import sys
import psutil import psutil
forced_cpu = "--cpu" in sys.argv
set_vram_to = NORMAL_VRAM set_vram_to = NORMAL_VRAM
try: try:
@ -22,7 +24,7 @@ try:
total_vram = torch.cuda.mem_get_info(torch.cuda.current_device())[1] / (1024 * 1024) total_vram = torch.cuda.mem_get_info(torch.cuda.current_device())[1] / (1024 * 1024)
total_ram = psutil.virtual_memory().total / (1024 * 1024) total_ram = psutil.virtual_memory().total / (1024 * 1024)
forced_normal_vram = "--normalvram" in sys.argv forced_normal_vram = "--normalvram" in sys.argv
if not forced_normal_vram: if not forced_normal_vram and not forced_cpu:
if total_vram <= 4096: if total_vram <= 4096:
print("Trying to enable lowvram mode because your GPU seems to have 4GB or less. If you don't want this use: --normalvram") print("Trying to enable lowvram mode because your GPU seems to have 4GB or less. If you don't want this use: --normalvram")
set_vram_to = LOW_VRAM set_vram_to = LOW_VRAM
@ -83,7 +85,7 @@ try:
except: except:
pass pass
if "--cpu" in sys.argv: if forced_cpu:
vram_state = CPU vram_state = CPU
print("Set vram state to:", ["CPU", "NO VRAM", "LOW VRAM", "NORMAL VRAM", "HIGH VRAM", "MPS"][vram_state]) print("Set vram state to:", ["CPU", "NO VRAM", "LOW VRAM", "NORMAL VRAM", "HIGH VRAM", "MPS"][vram_state])

View File

@ -18,6 +18,8 @@ def get_input_data(inputs, class_def, outputs={}, prompt={}, extra_data={}):
if isinstance(input_data, list): if isinstance(input_data, list):
input_unique_id = input_data[0] input_unique_id = input_data[0]
output_index = input_data[1] output_index = input_data[1]
if input_unique_id not in outputs:
return None
obj = outputs[input_unique_id][output_index] obj = outputs[input_unique_id][output_index]
input_data_all[x] = obj input_data_all[x] = obj
else: else:
@ -94,7 +96,8 @@ def recursive_output_delete_if_changed(prompt, old_prompt, outputs, current_item
if unique_id in old_prompt and 'is_changed' in old_prompt[unique_id]: if unique_id in old_prompt and 'is_changed' in old_prompt[unique_id]:
is_changed_old = old_prompt[unique_id]['is_changed'] is_changed_old = old_prompt[unique_id]['is_changed']
if 'is_changed' not in prompt[unique_id]: if 'is_changed' not in prompt[unique_id]:
input_data_all = get_input_data(inputs, class_def) input_data_all = get_input_data(inputs, class_def, outputs)
if input_data_all is not None:
is_changed = class_def.IS_CHANGED(**input_data_all) is_changed = class_def.IS_CHANGED(**input_data_all)
prompt[unique_id]['is_changed'] = is_changed prompt[unique_id]['is_changed'] = is_changed
else: else:
@ -278,7 +281,7 @@ def validate_prompt(prompt):
errors += [(o, reason)] errors += [(o, reason)]
if len(good_outputs) == 0: if len(good_outputs) == 0:
errors_list = "\n".join(map(lambda a: "{}".format(a[1]), errors)) errors_list = "\n".join(set(map(lambda a: "{}".format(a[1]), errors)))
return (False, "Prompt has no properly connected outputs\n {}".format(errors_list)) return (False, "Prompt has no properly connected outputs\n {}".format(errors_list))
return (True, "") return (True, "")

View File

@ -165,6 +165,7 @@ class PromptServer():
info = {} info = {}
info['input'] = obj_class.INPUT_TYPES() info['input'] = obj_class.INPUT_TYPES()
info['output'] = obj_class.RETURN_TYPES info['output'] = obj_class.RETURN_TYPES
info['output_name'] = obj_class.RETURN_NAMES if hasattr(obj_class, 'RETURN_NAMES') else info['output']
if hasattr(obj_class, 'WIDGET_TYPES'): if hasattr(obj_class, 'WIDGET_TYPES'):
info['widget'] = obj_class.WIDGET_TYPES info['widget'] = obj_class.WIDGET_TYPES
info['name'] = x #TODO info['name'] = x #TODO

View File

@ -371,6 +371,96 @@ class ComfyApp {
}); });
} }
/**
* Handle mouse
*
* Move group by header
*/
#addProcessMouseHandler() {
const self = this;
const origProcessMouseDown = LGraphCanvas.prototype.processMouseDown;
LGraphCanvas.prototype.processMouseDown = function(e) {
const res = origProcessMouseDown.apply(this, arguments);
this.selected_group_moving = false;
if (this.selected_group && !this.selected_group_resizing) {
var font_size =
this.selected_group.font_size || LiteGraph.DEFAULT_GROUP_FONT_SIZE;
var height = font_size * 1.4;
// Move group by header
if (LiteGraph.isInsideRectangle(e.canvasX, e.canvasY, this.selected_group.pos[0], this.selected_group.pos[1], this.selected_group.size[0], height)) {
this.selected_group_moving = true;
}
}
return res;
}
const origProcessMouseMove = LGraphCanvas.prototype.processMouseMove;
LGraphCanvas.prototype.processMouseMove = function(e) {
const orig_selected_group = this.selected_group;
if (this.selected_group && !this.selected_group_resizing && !this.selected_group_moving) {
this.selected_group = null;
}
const res = origProcessMouseMove.apply(this, arguments);
if (orig_selected_group && !this.selected_group_resizing && !this.selected_group_moving) {
this.selected_group = orig_selected_group;
}
return res;
};
}
/**
* Draws group header bar
*/
#addDrawGroupsHandler() {
const self = this;
const origDrawGroups = LGraphCanvas.prototype.drawGroups;
LGraphCanvas.prototype.drawGroups = function(canvas, ctx) {
if (!this.graph) {
return;
}
var groups = this.graph._groups;
ctx.save();
ctx.globalAlpha = 0.7 * this.editor_alpha;
for (var i = 0; i < groups.length; ++i) {
var group = groups[i];
if (!LiteGraph.overlapBounding(this.visible_area, group._bounding)) {
continue;
} //out of the visible area
ctx.fillStyle = group.color || "#335";
ctx.strokeStyle = group.color || "#335";
var pos = group._pos;
var size = group._size;
ctx.globalAlpha = 0.25 * this.editor_alpha;
ctx.beginPath();
var font_size =
group.font_size || LiteGraph.DEFAULT_GROUP_FONT_SIZE;
ctx.rect(pos[0] + 0.5, pos[1] + 0.5, size[0], font_size * 1.4);
ctx.fill();
ctx.globalAlpha = this.editor_alpha;
}
ctx.restore();
const res = origDrawGroups.apply(this, arguments);
return res;
}
}
/** /**
* Draws node highlights (executing, drag drop) and progress bar * Draws node highlights (executing, drag drop) and progress bar
*/ */
@ -518,6 +608,8 @@ class ComfyApp {
canvasEl.tabIndex = "1"; canvasEl.tabIndex = "1";
document.body.prepend(canvasEl); document.body.prepend(canvasEl);
this.#addProcessMouseHandler();
this.graph = new LGraph(); this.graph = new LGraph();
const canvas = (this.canvas = new LGraphCanvas(canvasEl, this.graph)); const canvas = (this.canvas = new LGraphCanvas(canvasEl, this.graph));
this.ctx = canvasEl.getContext("2d"); this.ctx = canvasEl.getContext("2d");
@ -561,6 +653,7 @@ class ComfyApp {
setInterval(() => localStorage.setItem("workflow", JSON.stringify(this.graph.serialize())), 1000); setInterval(() => localStorage.setItem("workflow", JSON.stringify(this.graph.serialize())), 1000);
this.#addDrawNodeHandler(); this.#addDrawNodeHandler();
this.#addDrawGroupsHandler();
this.#addApiUpdateHandlers(); this.#addApiUpdateHandlers();
this.#addDropHandler(); this.#addDropHandler();
this.#addPasteHandler(); this.#addPasteHandler();
@ -590,7 +683,13 @@ class ComfyApp {
const nodeData = defs[nodeId]; const nodeData = defs[nodeId];
const node = Object.assign( const node = Object.assign(
function ComfyNode() { function ComfyNode() {
const inputs = Object.assign(nodeData["input"]["required"], nodeData["widget"]); var inputs = nodeData["input"]["required"];
if (nodeData["input"]["optional"] != undefined){
inputs = Object.assign({}, nodeData["input"]["required"], nodeData["input"]["optional"])
}
if (nodeData["widget"] != undefined){
inputs = Object.assign(inputs, nodeData["widget"])
}
const config = { minWidth: 1, minHeight: 1 }; const config = { minWidth: 1, minHeight: 1 };
for (const inputName in inputs) { for (const inputName in inputs) {
const inputData = inputs[inputName]; const inputData = inputs[inputName];
@ -611,8 +710,10 @@ class ComfyApp {
} }
} }
for (const output of nodeData["output"]) { for (const o in nodeData["output"]) {
this.addOutput(output, output); const output = nodeData["output"][o];
const outputName = nodeData["output_name"][o] || output;
this.addOutput(outputName, output);
} }
const s = this.computeSize(); const s = this.computeSize();