From d879a33956f76c8c5ea8ac807b08df003acf5650 Mon Sep 17 00:00:00 2001 From: hlky <106811348+hlky@users.noreply.github.com> Date: Tue, 16 May 2023 20:55:19 +0100 Subject: [PATCH] Fix for long prompts --- comfy/samplers.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/comfy/samplers.py b/comfy/samplers.py index d57d1030b..b3199c27a 100644 --- a/comfy/samplers.py +++ b/comfy/samplers.py @@ -511,6 +511,14 @@ class AITemplateModelWrapper: uncond = kwargs.pop("uncond") uncond = uncond[0][0] if uncond is not None and cond is not None: + if cond.shape[1] > uncond.shape[1]: + to_add = cond.shape[1] - uncond.shape[1] + padding = torch.zeros((uncond.shape[0], to_add, uncond.shape[2]), device=uncond.device) + uncond = torch.cat((uncond, padding), 1) + elif uncond.shape[1] > cond.shape[1]: + to_add = uncond.shape[1] - cond.shape[1] + padding = torch.zeros((cond.shape[0], to_add, cond.shape[2]), device=cond.device) + cond = torch.cat((cond, padding), 1) encoder_hidden_states = torch.cat((uncond, cond)) elif cond is not None and uncond is None: encoder_hidden_states = torch.cat((cond, cond))