Better support for transformers t5

This commit is contained in:
doctorpangloss 2024-06-10 20:22:17 -07:00
parent 8a1557f750
commit e7682ced56
6 changed files with 42 additions and 16 deletions

View File

@ -159,12 +159,13 @@ class TransformersManagedModel(ModelManageable):
filename, chat_template = candidate_chat_templates[0]
logging.debug(f"Selected chat template filename={filename} for {self.model.name_or_path}")
try:
# todo: this should come from node inputs
prompt = tokenizer.apply_chat_template([
{"role": "user", "content": prompt},
], chat_template=chat_template, add_generation_prompt=True, tokenize=False)
if hasattr(tokenizer, "apply_chat_template"):
# todo: this should come from node inputs
prompt = tokenizer.apply_chat_template([
{"role": "user", "content": prompt},
], chat_template=chat_template, add_generation_prompt=True, tokenize=False)
except Exception as exc:
logging.error("Could not apply chat template", exc_info=exc)
logging.debug("Could not apply chat template", exc_info=exc)
if self.processor is None:
batch_encoding = tokenizer(prompt, return_tensors="pt").to(device=self.load_device)

View File

@ -167,6 +167,13 @@ KNOWN_CHECKPOINTS = [
CivitFile(133005, 357609, filename="juggernautXL_v9Rundiffusionphoto2.safetensors"),
CivitFile(112902, 351306, filename="dreamshaperXL_v21TurboDPMSDE.safetensors"),
CivitFile(139562, 344487, filename="realvisxlV40_v40Bakedvae.safetensors"),
HuggingFile("SG161222/Realistic_Vision_V6.0_B1_noVAE","Realistic_Vision_V6.0_NV_B1_fp16.safetensors"),
HuggingFile("SG161222/Realistic_Vision_V5.1_noVAE","Realistic_Vision_V5.1_fp16-no-ema.safetensors"),
CivitFile(4384, 128713, filename="dreamshaper_8.safetensors"),
CivitFile(7371, 425083, filename="revAnimated_v2Rebirth.safetensors"),
CivitFile(4468, 57618, filename="counterfeitV30_v30.safetensors"),
CivitFile(241415, 272376, filename="picxReal_10.safetensors"),
CivitFile(23900, 95489, filename="anyloraCheckpoint_bakedvaeBlessedFp16.safetensors"),
]
KNOWN_UNCLIP_CHECKPOINTS = [
@ -195,8 +202,9 @@ KNOWN_CLIP_VISION_MODELS = [
KNOWN_LORAS = [
CivitFile(model_id=211577, model_version_id=238349, filename="openxl_handsfix.safetensors"),
CivitFile(model_id=324815, model_version_id=364137, filename="blur_control_xl_v1.safetensors"),
CivitFile(model_id=47085, model_version_id=55199, filename="GoodHands-beta2.safetensors"),
HuggingFile("ByteDance/Hyper-SD", "Hyper-SDXL-12steps-CFG-lora.safetensors"),
HuggingFile("ByteDance/Hyper-SD", "Hyper-SD15-12steps-CFG-lora.safetensors")
HuggingFile("ByteDance/Hyper-SD", "Hyper-SD15-12steps-CFG-lora.safetensors"),
]
KNOWN_CONTROLNETS = [

View File

@ -44,9 +44,19 @@ def _import_and_enumerate_nodes_in_module(module: types.ModuleType,
exported_nodes = ExportedNodes()
timings = []
exceptions = []
if _import_nodes_in_module(exported_nodes, module):
pass
else:
with tracer.start_as_current_span("Load Node") as span:
time_before = time.perf_counter()
try:
module_decl = _import_nodes_in_module(exported_nodes, module)
full_name = module.__name__
span.set_attribute("full_name", full_name)
timings.append((time.perf_counter() - time_before, full_name, True, exported_nodes))
except Exception as exc:
logging.error(f"{full_name} import failed", exc_info=exc)
span.set_status(Status(StatusCode.ERROR))
span.record_exception(exc)
exceptions.append(exc)
if module_decl is None or not module_decl:
# Iterate through all the submodules
for _, name, is_pkg in pkgutil.iter_modules(module.__path__):
span: Span
@ -55,6 +65,7 @@ def _import_and_enumerate_nodes_in_module(module: types.ModuleType,
time_before = time.perf_counter()
success = True
span.set_attribute("full_name", full_name)
new_nodes = ExportedNodes()
if full_name.endswith(".disabled"):
continue
try:
@ -75,11 +86,11 @@ def _import_and_enumerate_nodes_in_module(module: types.ModuleType,
exceptions.append(x)
span.set_status(Status(StatusCode.ERROR))
span.record_exception(x)
timings.append((time.perf_counter() - time_before, full_name, success))
timings.append((time.perf_counter() - time_before, full_name, success, new_nodes))
if print_import_times and len(timings) > 0 or any(not success for (_, _, success) in timings):
for (duration, module_name, success) in sorted(timings):
print(f"{duration:6.1f} seconds{'' if success else ' (IMPORT FAILED)'}, {module_name}")
if print_import_times and len(timings) > 0 or any(not success for (_, _, success, _) in timings):
for (duration, module_name, success, new_nodes) in sorted(timings):
logging.info(f"{duration:6.1f} seconds{'' if success else ' (IMPORT FAILED)'}, {module_name} ({len(new_nodes)} nodes loaded)")
if raise_on_failure and len(exceptions) > 0:
try:
raise ExceptionGroup("Node import failed", exceptions)

View File

View File

@ -11,7 +11,7 @@ from typing import Any, Dict, Optional, List, Callable, Union
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer, PreTrainedModel, LogitsProcessor, TextStreamer, \
PreTrainedTokenizerBase, LogitsProcessorList, PretrainedConfig, AutoProcessor, BatchFeature, ProcessorMixin, \
LlavaNextForConditionalGeneration, LlavaNextProcessor
LlavaNextForConditionalGeneration, LlavaNextProcessor, T5EncoderModel, AutoModel
from typing_extensions import TypedDict
from comfy.language.chat_templates import KNOWN_CHAT_TEMPLATES
@ -245,8 +245,9 @@ class TransformersLoader(CustomNode):
}
try:
model = AutoModelForCausalLM.from_pretrained(**from_pretrained_kwargs)
model = AutoModel.from_pretrained(**from_pretrained_kwargs)
except:
# not yet supported by automodel
model = LlavaNextForConditionalGeneration.from_pretrained(**from_pretrained_kwargs)
config_dict, _ = PretrainedConfig.get_config_dict(ckpt_name, trust_remote_code=True, **hub_kwargs)

View File

@ -185,7 +185,12 @@ def dependencies(force_nightly: bool = False) -> List[str]:
return _dependencies
package_data = ['sd1_tokenizer/*', '**/*.json', '**/*.yaml']
package_data = [
'sd1_tokenizer/*',
't5_tokenizer/*',
'**/*.json',
'**/*.yaml',
]
if not is_editable:
package_data.append('comfy/web/**/*')
dev_dependencies = open(os.path.join(os.path.dirname(__file__), "requirements-dev.txt")).readlines()