Merge pull request #1 from comfyanonymous/master

.
This commit is contained in:
qwertzman-png 2023-04-21 09:16:11 +02:00 committed by GitHub
commit 1cd7926540
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 19 additions and 37 deletions

View File

@ -25,6 +25,7 @@ This ui will let you design and execute advanced stable diffusion pipelines usin
- [ControlNet and T2I-Adapter](https://comfyanonymous.github.io/ComfyUI_examples/controlnet/) - [ControlNet and T2I-Adapter](https://comfyanonymous.github.io/ComfyUI_examples/controlnet/)
- [Upscale Models (ESRGAN, ESRGAN variants, SwinIR, Swin2SR, etc...)](https://comfyanonymous.github.io/ComfyUI_examples/upscale_models/) - [Upscale Models (ESRGAN, ESRGAN variants, SwinIR, Swin2SR, etc...)](https://comfyanonymous.github.io/ComfyUI_examples/upscale_models/)
- [unCLIP Models](https://comfyanonymous.github.io/ComfyUI_examples/unclip/) - [unCLIP Models](https://comfyanonymous.github.io/ComfyUI_examples/unclip/)
- [GLIGEN](https://comfyanonymous.github.io/ComfyUI_examples/gligen/)
- Starts up very fast. - Starts up very fast.
- Works fully offline: will never download anything. - Works fully offline: will never download anything.
- [Config file](extra_model_paths.yaml.example) to set the search paths for models. - [Config file](extra_model_paths.yaml.example) to set the search paths for models.

View File

@ -7,23 +7,6 @@ from comfy import model_management
from .ldm.models.diffusion.ddim import DDIMSampler from .ldm.models.diffusion.ddim import DDIMSampler
from .ldm.modules.diffusionmodules.util import make_ddim_timesteps from .ldm.modules.diffusionmodules.util import make_ddim_timesteps
class CFGDenoiser(torch.nn.Module):
def __init__(self, model):
super().__init__()
self.inner_model = model
def forward(self, x, sigma, uncond, cond, cond_scale):
if len(uncond[0]) == len(cond[0]) and x.shape[0] * x.shape[2] * x.shape[3] < (96 * 96): #TODO check memory instead
x_in = torch.cat([x] * 2)
sigma_in = torch.cat([sigma] * 2)
cond_in = torch.cat([uncond, cond])
uncond, cond = self.inner_model(x_in, sigma_in, cond=cond_in).chunk(2)
else:
cond = self.inner_model(x, sigma, cond=cond)
uncond = self.inner_model(x, sigma, cond=uncond)
return uncond + (cond - uncond) * cond_scale
#The main sampling function shared by all the samplers #The main sampling function shared by all the samplers
#Returns predicted noise #Returns predicted noise
def sampling_function(model_function, x, timestep, uncond, cond, cond_scale, cond_concat=None, model_options={}): def sampling_function(model_function, x, timestep, uncond, cond, cond_scale, cond_concat=None, model_options={}):

View File

@ -498,7 +498,7 @@ class GLIGENLoader:
RETURN_TYPES = ("GLIGEN",) RETURN_TYPES = ("GLIGEN",)
FUNCTION = "load_gligen" FUNCTION = "load_gligen"
CATEGORY = "_for_testing/gligen" CATEGORY = "loaders"
def load_gligen(self, gligen_name): def load_gligen(self, gligen_name):
gligen_path = folder_paths.get_full_path("gligen", gligen_name) gligen_path = folder_paths.get_full_path("gligen", gligen_name)
@ -520,7 +520,7 @@ class GLIGENTextBoxApply:
RETURN_TYPES = ("CONDITIONING",) RETURN_TYPES = ("CONDITIONING",)
FUNCTION = "append" FUNCTION = "append"
CATEGORY = "_for_testing/gligen" CATEGORY = "conditioning/gligen"
def append(self, conditioning_to, clip, gligen_textbox_model, text, width, height, x, y): def append(self, conditioning_to, clip, gligen_textbox_model, text, width, height, x, y):
c = [] c = []

View File

@ -89,24 +89,17 @@ app.registerExtension({
end = nearestEnclosure.end; end = nearestEnclosure.end;
selectedText = inputField.value.substring(start, end); selectedText = inputField.value.substring(start, end);
} else { } else {
// Select the current word, find the start and end of the word (first space before and after) // Select the current word, find the start and end of the word
const wordStart = inputField.value.substring(0, start).lastIndexOf(" ") + 1; const delimiters = " .,\\/!?%^*;:{}=-_`~()\r\n\t";
const wordEnd = inputField.value.substring(end).indexOf(" ");
// If there is no space after the word, select to the end of the string while (!delimiters.includes(inputField.value[start - 1]) && start > 0) {
if (wordEnd === -1) { start--;
end = inputField.value.length; }
} else {
end += wordEnd; while (!delimiters.includes(inputField.value[end]) && end < inputField.value.length) {
end++;
} }
start = wordStart;
// Remove all punctuation at the end and beginning of the word
while (inputField.value[start].match(/[.,\/#!$%\^&\*;:{}=\-_`~()]/)) {
start++;
}
while (inputField.value[end - 1].match(/[.,\/#!$%\^&\*;:{}=\-_`~()]/)) {
end--;
}
selectedText = inputField.value.substring(start, end); selectedText = inputField.value.substring(start, end);
if (!selectedText) return; if (!selectedText) return;
} }
@ -135,8 +128,13 @@ app.registerExtension({
// Increment the weight // Increment the weight
const weightDelta = event.key === "ArrowUp" ? delta : -delta; const weightDelta = event.key === "ArrowUp" ? delta : -delta;
const updatedText = selectedText.replace(/(.*:)(\d+(\.\d+)?)(.*)/, (match, prefix, weight, _, suffix) => { const updatedText = selectedText.replace(/\((.*):(\d+(?:\.\d+)?)\)/, (match, text, weight) => {
return prefix + incrementWeight(weight, weightDelta) + suffix; weight = incrementWeight(weight, weightDelta);
if (weight == 1) {
return text;
} else {
return `(${text}:${weight})`;
}
}); });
inputField.setRangeText(updatedText, start, end, "select"); inputField.setRangeText(updatedText, start, end, "select");