Merge branch 'comfyanonymous:master' into master

This commit is contained in:
Saquib Alam 2023-08-08 19:34:46 +05:30 committed by GitHub
commit d8fa58a532
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
18 changed files with 593 additions and 27 deletions

View File

@ -39,6 +39,7 @@ parser.add_argument("--enable-cors-header", type=str, default=None, metavar="ORI
parser.add_argument("--extra-model-paths-config", type=str, default=None, metavar="PATH", nargs='+', action='append', help="Load one or more extra_model_paths.yaml files.")
parser.add_argument("--output-directory", type=str, default=None, help="Set the ComfyUI output directory.")
parser.add_argument("--auto-launch", action="store_true", help="Automatically launch ComfyUI in the default browser.")
parser.add_argument("--disable-auto-launch", action="store_true", help="Disable auto launching the browser.")
parser.add_argument("--cuda-device", type=int, default=None, metavar="DEVICE_ID", help="Set the id of the cuda device this instance will use.")
cm_group = parser.add_mutually_exclusive_group()
cm_group.add_argument("--cuda-malloc", action="store_true", help="Enable cudaMallocAsync (enabled by default for torch 2.0 and up).")
@ -90,3 +91,6 @@ args = parser.parse_args()
if args.windows_standalone_build:
args.auto_launch = True
if args.disable_auto_launch:
args.auto_launch = False

View File

@ -535,7 +535,7 @@ def should_use_fp16(device=None, model_params=0):
return False
#FP16 is just broken on these cards
nvidia_16_series = ["1660", "1650", "1630", "T500", "T550", "T600", "MX550", "MX450"]
nvidia_16_series = ["1660", "1650", "1630", "T500", "T550", "T600", "MX550", "MX450", "CMP 30HX"]
for x in nvidia_16_series:
if x in props.name:
return False

View File

@ -70,13 +70,22 @@ def load_lora(lora, to_load):
alpha = lora[alpha_name].item()
loaded_keys.add(alpha_name)
A_name = "{}.lora_up.weight".format(x)
B_name = "{}.lora_down.weight".format(x)
mid_name = "{}.lora_mid.weight".format(x)
regular_lora = "{}.lora_up.weight".format(x)
diffusers_lora = "{}_lora.up.weight".format(x)
A_name = None
if A_name in lora.keys():
if regular_lora in lora.keys():
A_name = regular_lora
B_name = "{}.lora_down.weight".format(x)
mid_name = "{}.lora_mid.weight".format(x)
elif diffusers_lora in lora.keys():
A_name = diffusers_lora
B_name = "{}_lora.down.weight".format(x)
mid_name = None
if A_name is not None:
mid = None
if mid_name in lora.keys():
if mid_name is not None and mid_name in lora.keys():
mid = lora[mid_name]
loaded_keys.add(mid_name)
patch_dict[to_load[x]] = (lora[A_name], lora[B_name], alpha, mid)
@ -202,6 +211,11 @@ def model_lora_keys_unet(model, key_map={}):
if k.endswith(".weight"):
key_lora = k[:-len(".weight")].replace(".", "_")
key_map["lora_unet_{}".format(key_lora)] = "diffusion_model.{}".format(diffusers_keys[k])
diffusers_lora_key = "unet.{}".format(k[:-len(".weight")].replace(".to_", ".processor.to_"))
if diffusers_lora_key.endswith(".to_out.0"):
diffusers_lora_key = diffusers_lora_key[:-2]
key_map[diffusers_lora_key] = "diffusion_model.{}".format(diffusers_keys[k])
return key_map
def set_attr(obj, attr, value):
@ -864,7 +878,7 @@ def load_controlnet(ckpt_path, model=None):
use_fp16 = model_management.should_use_fp16()
controlnet_config = model_detection.model_config_from_unet(controlnet_data, prefix, use_fp16).unet_config
controlnet_config.pop("out_channels")
controlnet_config["hint_channels"] = 3
controlnet_config["hint_channels"] = controlnet_data["{}input_hint_block.0.weight".format(prefix)].shape[1]
control_model = cldm.ControlNet(**controlnet_config)
if pth:

View File

@ -91,13 +91,15 @@ class SD1ClipModel(torch.nn.Module, ClipTokenWeightEncoder):
def set_up_textual_embeddings(self, tokens, current_embeds):
out_tokens = []
next_new_token = token_dict_size = current_embeds.weight.shape[0]
next_new_token = token_dict_size = current_embeds.weight.shape[0] - 1
embedding_weights = []
for x in tokens:
tokens_temp = []
for y in x:
if isinstance(y, int):
if y == token_dict_size: #EOS token
y = -1
tokens_temp += [y]
else:
if y.shape[0] == current_embeds.weight.shape[1]:
@ -110,15 +112,21 @@ class SD1ClipModel(torch.nn.Module, ClipTokenWeightEncoder):
tokens_temp += [self.empty_tokens[0][-1]]
out_tokens += [tokens_temp]
n = token_dict_size
if len(embedding_weights) > 0:
new_embedding = torch.nn.Embedding(next_new_token, current_embeds.weight.shape[1], device=current_embeds.weight.device, dtype=current_embeds.weight.dtype)
new_embedding.weight[:token_dict_size] = current_embeds.weight[:]
n = token_dict_size
new_embedding = torch.nn.Embedding(next_new_token + 1, current_embeds.weight.shape[1], device=current_embeds.weight.device, dtype=current_embeds.weight.dtype)
new_embedding.weight[:token_dict_size] = current_embeds.weight[:-1]
for x in embedding_weights:
new_embedding.weight[n] = x
n += 1
new_embedding.weight[n] = current_embeds.weight[-1] #EOS embedding
self.transformer.set_input_embeddings(new_embedding)
return out_tokens
processed_tokens = []
for x in out_tokens:
processed_tokens += [list(map(lambda a: n if a == -1 else a, x))] #The EOS token should always be the largest one
return processed_tokens
def forward(self, tokens):
backup_embeds = self.transformer.get_input_embeddings()

View File

@ -40,7 +40,8 @@ def cuda_malloc_supported():
blacklist = {"GeForce GTX TITAN X", "GeForce GTX 980", "GeForce GTX 970", "GeForce GTX 960", "GeForce GTX 950", "GeForce 945M",
"GeForce 940M", "GeForce 930M", "GeForce 920M", "GeForce 910M", "GeForce GTX 750", "GeForce GTX 745", "Quadro K620",
"Quadro K1200", "Quadro K2200", "Quadro M500", "Quadro M520", "Quadro M600", "Quadro M620", "Quadro M1000",
"Quadro M1200", "Quadro M2000", "Quadro M2200", "Quadro M3000", "Quadro M4000", "Quadro M5000", "Quadro M5500", "Quadro M6000"}
"Quadro M1200", "Quadro M2000", "Quadro M2200", "Quadro M3000", "Quadro M4000", "Quadro M5000", "Quadro M5500", "Quadro M6000",
"GeForce MX110", "GeForce MX130", "GeForce 830M", "GeForce 840M", "GeForce GTX 850M", "GeForce GTX 860M"}
try:
names = get_gpu_names()

View File

@ -51,9 +51,10 @@ class Example:
"default": 0,
"min": 0, #Minimum value
"max": 4096, #Maximum value
"step": 64 #Slider's step
"step": 64, #Slider's step
"display": "number" # Cosmetic only: display as "number" or "slider"
}),
"float_field": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}),
"float_field": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01, "display": "number"}),
"print_to_screen": (["enable", "disable"],),
"string_field": ("STRING", {
"multiline": False, #True if you want the field to look like the one on the ClipTextEncode node

View File

@ -1076,6 +1076,47 @@ class LatentComposite:
samples_out["samples"] = s
return (samples_out,)
class LatentBlend:
@classmethod
def INPUT_TYPES(s):
return {"required": {
"samples1": ("LATENT",),
"samples2": ("LATENT",),
"blend_factor": ("FLOAT", {
"default": 0.5,
"min": 0,
"max": 1,
"step": 0.01
}),
}}
RETURN_TYPES = ("LATENT",)
FUNCTION = "blend"
CATEGORY = "_for_testing"
def blend(self, samples1, samples2, blend_factor:float, blend_mode: str="normal"):
samples_out = samples1.copy()
samples1 = samples1["samples"]
samples2 = samples2["samples"]
if samples1.shape != samples2.shape:
samples2.permute(0, 3, 1, 2)
samples2 = comfy.utils.common_upscale(samples2, samples1.shape[3], samples1.shape[2], 'bicubic', crop='center')
samples2.permute(0, 2, 3, 1)
samples_blended = self.blend_mode(samples1, samples2, blend_mode)
samples_blended = samples1 * blend_factor + samples_blended * (1 - blend_factor)
samples_out["samples"] = samples_blended
return (samples_out,)
def blend_mode(self, img1, img2, mode):
if mode == "normal":
return img2
else:
raise ValueError(f"Unsupported blend mode: {mode}")
class LatentCrop:
@classmethod
def INPUT_TYPES(s):
@ -1522,6 +1563,7 @@ NODE_CLASS_MAPPINGS = {
"KSamplerAdvanced": KSamplerAdvanced,
"SetLatentNoiseMask": SetLatentNoiseMask,
"LatentComposite": LatentComposite,
"LatentBlend": LatentBlend,
"LatentRotate": LatentRotate,
"LatentFlip": LatentFlip,
"LatentCrop": LatentCrop,
@ -1593,6 +1635,7 @@ NODE_DISPLAY_NAME_MAPPINGS = {
"LatentUpscale": "Upscale Latent",
"LatentUpscaleBy": "Upscale Latent By",
"LatentComposite": "Latent Composite",
"LatentBlend": "Latent Blend",
"LatentFromBatch" : "Latent From Batch",
"RepeatLatentBatch": "Repeat Latent Batch",
# Image

View File

@ -159,13 +159,64 @@
"\n"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "kkkkkkkkkkkkkkk"
},
"source": [
"### Run ComfyUI with cloudflared (Recommended Way)\n",
"\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "jjjjjjjjjjjjjj"
},
"outputs": [],
"source": [
"!wget https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64.deb\n",
"!dpkg -i cloudflared-linux-amd64.deb\n",
"\n",
"import subprocess\n",
"import threading\n",
"import time\n",
"import socket\n",
"import urllib.request\n",
"\n",
"def iframe_thread(port):\n",
" while True:\n",
" time.sleep(0.5)\n",
" sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n",
" result = sock.connect_ex(('127.0.0.1', port))\n",
" if result == 0:\n",
" break\n",
" sock.close()\n",
" print(\"\\nComfyUI finished loading, trying to launch cloudflared (if it gets stuck here cloudflared is having issues)\\n\")\n",
"\n",
" p = subprocess.Popen([\"cloudflared\", \"tunnel\", \"--url\", \"http://127.0.0.1:{}\".format(port)], stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n",
" for line in p.stderr:\n",
" l = line.decode()\n",
" if \"trycloudflare.com \" in l:\n",
" print(\"This is the URL to access ComfyUI:\", l[l.find(\"http\"):], end='')\n",
" #print(l, end='')\n",
"\n",
"\n",
"threading.Thread(target=iframe_thread, daemon=True, args=(8188,)).start()\n",
"\n",
"!python main.py --dont-print-server"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "kkkkkkkkkkkkkk"
},
"source": [
"### Run ComfyUI with localtunnel (Recommended Way)\n",
"### Run ComfyUI with localtunnel\n",
"\n",
"\n"
]

View File

@ -345,6 +345,11 @@ class PromptServer():
vram_total, torch_vram_total = comfy.model_management.get_total_memory(device, torch_total_too=True)
vram_free, torch_vram_free = comfy.model_management.get_free_memory(device, torch_free_too=True)
system_stats = {
"system": {
"os": os.name,
"python_version": sys.version,
"embedded_python": os.path.split(os.path.split(sys.executable)[0])[1] == "python_embeded"
},
"devices": [
{
"name": device_name,

View File

@ -1,4 +1,4 @@
import {app} from "/scripts/app.js";
import {app} from "../../scripts/app.js";
// Adds filtering to combo context menus
@ -27,10 +27,13 @@ const ext = {
const clickedComboValue = currentNode.widgets
.filter(w => w.type === "combo" && w.options.values.length === values.length)
.find(w => w.options.values.every((v, i) => v === values[i]))
.value;
?.value;
let selectedIndex = values.findIndex(v => v === clickedComboValue);
let selectedItem = displayedItems?.[selectedIndex];
let selectedIndex = clickedComboValue ? values.findIndex(v => v === clickedComboValue) : 0;
if (selectedIndex < 0) {
selectedIndex = 0;
}
let selectedItem = displayedItems[selectedIndex];
updateSelected();
// Apply highlighting to the selected item

View File

@ -0,0 +1,25 @@
import { app } from "../../scripts/app.js";
const id = "Comfy.LinkRenderMode";
const ext = {
name: id,
async setup(app) {
app.ui.settings.addSetting({
id,
name: "Link Render Mode",
defaultValue: 2,
type: "combo",
options: LiteGraph.LINK_RENDER_MODES.map((m, i) => ({
value: i,
text: m,
selected: i == app.canvas.links_render_mode,
})),
onChange(value) {
app.canvas.links_render_mode = +value;
app.graph.setDirtyCanvas(true);
},
});
},
};
app.registerExtension(ext);

View File

@ -2,7 +2,7 @@ import { ComfyWidgets, addValueControlWidget } from "../../scripts/widgets.js";
import { app } from "../../scripts/app.js";
const CONVERTED_TYPE = "converted-widget";
const VALID_TYPES = ["STRING", "combo", "number"];
const VALID_TYPES = ["STRING", "combo", "number", "BOOLEAN"];
function isConvertableWidget(widget, config) {
return VALID_TYPES.includes(widget.type) || VALID_TYPES.includes(config[0]);

View File

@ -9835,7 +9835,11 @@ LGraphNode.prototype.executeAction = function(action)
ctx.textAlign = "center";
ctx.fillStyle = text_color;
ctx.fillText(
w.label || w.name + " " + Number(w.value).toFixed(3),
w.label || w.name + " " + Number(w.value).toFixed(
w.options.precision != null
? w.options.precision
: 3
),
widget_width * 0.5,
y + H * 0.7
);
@ -13835,7 +13839,7 @@ LGraphNode.prototype.executeAction = function(action)
if (!disabled) {
element.addEventListener("click", inner_onclick);
}
if (options.autoopen) {
if (!disabled && options.autoopen) {
LiteGraph.pointerListenerAdd(element,"enter",inner_over);
}

View File

@ -264,6 +264,15 @@ class ComfyApi extends EventTarget {
}
}
/**
* Gets system & device stats
* @returns System stats such as python version, OS, per device info
*/
async getSystemStats() {
const res = await this.fetchApi("/system_stats");
return await res.json();
}
/**
* Sends a POST request to the API
* @param {*} type The endpoint to post to

View File

@ -1,3 +1,4 @@
import { ComfyLogging } from "./logging.js";
import { ComfyWidgets } from "./widgets.js";
import { ComfyUI, $el } from "./ui.js";
import { api } from "./api.js";
@ -31,6 +32,7 @@ export class ComfyApp {
constructor() {
this.ui = new ComfyUI(this);
this.logging = new ComfyLogging(this);
/**
* List of extensions that are registered with the app
@ -1023,6 +1025,7 @@ export class ComfyApp {
*/
async #loadExtensions() {
const extensions = await api.getExtensions();
this.logging.addEntry("Comfy.App", "debug", { Extensions: extensions });
for (const ext of extensions) {
try {
await import(api.apiURL(ext));
@ -1306,6 +1309,9 @@ export class ComfyApp {
(t) => `<li>${t}</li>`
).join("")}</ul>Nodes that have failed to load will show as red on the graph.`
);
this.logging.addEntry("Comfy.App", "warn", {
MissingNodes: missingNodeTypes,
});
}
}
@ -1356,7 +1362,7 @@ export class ComfyApp {
if (parent.isVirtualNode) {
link = parent.getInputLink(link.origin_slot);
if (link) {
parent = parent.getInputNode(link.origin_slot);
parent = parent.getInputNode(link.target_slot);
if (parent) {
found = true;
}

367
web/scripts/logging.js Normal file
View File

@ -0,0 +1,367 @@
import { $el, ComfyDialog } from "./ui.js";
import { api } from "./api.js";
$el("style", {
textContent: `
.comfy-logging-logs {
display: grid;
color: var(--fg-color);
white-space: pre-wrap;
}
.comfy-logging-log {
display: contents;
}
.comfy-logging-title {
background: var(--tr-even-bg-color);
font-weight: bold;
margin-bottom: 5px;
text-align: center;
}
.comfy-logging-log div {
background: var(--row-bg);
padding: 5px;
}
`,
parent: document.body,
});
// Stringify function supporting max depth and removal of circular references
// https://stackoverflow.com/a/57193345
function stringify(val, depth, replacer, space, onGetObjID) {
depth = isNaN(+depth) ? 1 : depth;
var recursMap = new WeakMap();
function _build(val, depth, o, a, r) {
// (JSON.stringify() has it's own rules, which we respect here by using it for property iteration)
return !val || typeof val != "object"
? val
: ((r = recursMap.has(val)),
recursMap.set(val, true),
(a = Array.isArray(val)),
r
? (o = (onGetObjID && onGetObjID(val)) || null)
: JSON.stringify(val, function (k, v) {
if (a || depth > 0) {
if (replacer) v = replacer(k, v);
if (!k) return (a = Array.isArray(v)), (val = v);
!o && (o = a ? [] : {});
o[k] = _build(v, a ? depth : depth - 1);
}
}),
o === void 0 ? (a ? [] : {}) : o);
}
return JSON.stringify(_build(val, depth), null, space);
}
const jsonReplacer = (k, v, ui) => {
if (v instanceof Array && v.length === 1) {
v = v[0];
}
if (v instanceof Date) {
v = v.toISOString();
if (ui) {
v = v.split("T")[1];
}
}
if (v instanceof Error) {
let err = "";
if (v.name) err += v.name + "\n";
if (v.message) err += v.message + "\n";
if (v.stack) err += v.stack + "\n";
if (!err) {
err = v.toString();
}
v = err;
}
return v;
};
const fileInput = $el("input", {
type: "file",
accept: ".json",
style: { display: "none" },
parent: document.body,
});
class ComfyLoggingDialog extends ComfyDialog {
constructor(logging) {
super();
this.logging = logging;
}
clear() {
this.logging.clear();
this.show();
}
export() {
const blob = new Blob([stringify([...this.logging.entries], 20, jsonReplacer, "\t")], {
type: "application/json",
});
const url = URL.createObjectURL(blob);
const a = $el("a", {
href: url,
download: `comfyui-logs-${Date.now()}.json`,
style: { display: "none" },
parent: document.body,
});
a.click();
setTimeout(function () {
a.remove();
window.URL.revokeObjectURL(url);
}, 0);
}
import() {
fileInput.onchange = () => {
const reader = new FileReader();
reader.onload = () => {
fileInput.remove();
try {
const obj = JSON.parse(reader.result);
if (obj instanceof Array) {
this.show(obj);
} else {
throw new Error("Invalid file selected.");
}
} catch (error) {
alert("Unable to load logs: " + error.message);
}
};
reader.readAsText(fileInput.files[0]);
};
fileInput.click();
}
createButtons() {
return [
$el("button", {
type: "button",
textContent: "Clear",
onclick: () => this.clear(),
}),
$el("button", {
type: "button",
textContent: "Export logs...",
onclick: () => this.export(),
}),
$el("button", {
type: "button",
textContent: "View exported logs...",
onclick: () => this.import(),
}),
...super.createButtons(),
];
}
getTypeColor(type) {
switch (type) {
case "error":
return "red";
case "warn":
return "orange";
case "debug":
return "dodgerblue";
}
}
show(entries) {
if (!entries) entries = this.logging.entries;
this.element.style.width = "100%";
const cols = {
source: "Source",
type: "Type",
timestamp: "Timestamp",
message: "Message",
};
const keys = Object.keys(cols);
const headers = Object.values(cols).map((title) =>
$el("div.comfy-logging-title", {
textContent: title,
})
);
const rows = entries.map((entry, i) => {
return $el(
"div.comfy-logging-log",
{
$: (el) => el.style.setProperty("--row-bg", `var(--tr-${i % 2 ? "even" : "odd"}-bg-color)`),
},
keys.map((key) => {
let v = entry[key];
let color;
if (key === "type") {
color = this.getTypeColor(v);
} else {
v = jsonReplacer(key, v, true);
if (typeof v === "object") {
v = stringify(v, 5, jsonReplacer, " ");
}
}
return $el("div", {
style: {
color,
},
textContent: v,
});
})
);
});
const grid = $el(
"div.comfy-logging-logs",
{
style: {
gridTemplateColumns: `repeat(${headers.length}, 1fr)`,
},
},
[...headers, ...rows]
);
const els = [grid];
if (!this.logging.enabled) {
els.unshift(
$el("h3", {
style: { textAlign: "center" },
textContent: "Logging is disabled",
})
);
}
super.show($el("div", els));
}
}
export class ComfyLogging {
/**
* @type Array<{ source: string, type: string, timestamp: Date, message: any }>
*/
entries = [];
#enabled;
#console = {};
get enabled() {
return this.#enabled;
}
set enabled(value) {
if (value === this.#enabled) return;
if (value) {
this.patchConsole();
} else {
this.unpatchConsole();
}
this.#enabled = value;
}
constructor(app) {
this.app = app;
this.dialog = new ComfyLoggingDialog(this);
this.addSetting();
this.catchUnhandled();
this.addInitData();
}
addSetting() {
const settingId = "Comfy.Logging.Enabled";
const htmlSettingId = settingId.replaceAll(".", "-");
const setting = this.app.ui.settings.addSetting({
id: settingId,
name: settingId,
defaultValue: true,
type: (name, setter, value) => {
return $el("tr", [
$el("td", [
$el("label", {
textContent: "Logging",
for: htmlSettingId,
}),
]),
$el("td", [
$el("input", {
id: htmlSettingId,
type: "checkbox",
checked: value,
onchange: (event) => {
setter((this.enabled = event.target.checked));
},
}),
$el("button", {
textContent: "View Logs",
onclick: () => {
this.app.ui.settings.element.close();
this.dialog.show();
},
style: {
fontSize: "14px",
display: "block",
marginTop: "5px",
},
}),
]),
]);
},
});
this.enabled = setting.value;
}
patchConsole() {
// Capture common console outputs
const self = this;
for (const type of ["log", "warn", "error", "debug"]) {
const orig = console[type];
this.#console[type] = orig;
console[type] = function () {
orig.apply(console, arguments);
self.addEntry("console", type, ...arguments);
};
}
}
unpatchConsole() {
// Restore original console functions
for (const type of Object.keys(this.#console)) {
console[type] = this.#console[type];
}
this.#console = {};
}
catchUnhandled() {
// Capture uncaught errors
window.addEventListener("error", (e) => {
this.addEntry("window", "error", e.error ?? "Unknown error");
return false;
});
window.addEventListener("unhandledrejection", (e) => {
this.addEntry("unhandledrejection", "error", e.reason ?? "Unknown error");
});
}
clear() {
this.entries = [];
}
addEntry(source, type, ...args) {
if (this.enabled) {
this.entries.push({
source,
type,
timestamp: new Date(),
message: args,
});
}
}
log(source, ...args) {
this.addEntry(source, "log", ...args);
}
async addInitData() {
if (!this.enabled) return;
const source = "ComfyUI.Logging";
this.addEntry(source, "debug", { UserAgent: navigator.userAgent });
const systemStats = await api.getSystemStats();
this.addEntry(source, "debug", systemStats);
}
}

View File

@ -234,7 +234,7 @@ class ComfySettingsDialog extends ComfyDialog {
localStorage[settingId] = JSON.stringify(value);
}
addSetting({id, name, type, defaultValue, onChange, attrs = {}, tooltip = "",}) {
addSetting({id, name, type, defaultValue, onChange, attrs = {}, tooltip = "", options = undefined}) {
if (!id) {
throw new Error("Settings must have an ID");
}
@ -347,6 +347,32 @@ class ComfySettingsDialog extends ComfyDialog {
]),
]);
break;
case "combo":
element = $el("tr", [
labelCell,
$el("td", [
$el(
"select",
{
oninput: (e) => {
setter(e.target.value);
},
},
(typeof options === "function" ? options(value) : options || []).map((opt) => {
if (typeof opt === "string") {
opt = { text: opt };
}
const v = opt.value ?? opt.text;
return $el("option", {
value: v,
textContent: opt.text,
selected: value + "" === v + "",
});
})
),
]),
]);
break;
case "text":
default:
if (type !== "text") {
@ -480,7 +506,7 @@ class ComfyList {
hide() {
this.element.style.display = "none";
this.button.textContent = "See " + this.#text;
this.button.textContent = "View " + this.#text;
}
toggle() {

View File

@ -267,7 +267,6 @@ export const ComfyWidgets = {
return { widget: node.addWidget(widgetType, inputName, val, () => {}, config) };
},
INT(node, inputName, inputData, app) {
console.log(app);
let widgetType = isSlider(inputData[1]["display"], app);
const { val, config } = getNumberDefaults(inputData, 1);
Object.assign(config, { precision: 0 });