Various fixes

- Fix 16 bit exif saving for PNGs
 - Validate alternative filenames correctly
 - Improve speed of test workflows by setting steps to 1
This commit is contained in:
doctorpangloss 2025-09-17 16:03:52 -07:00
parent 3d23c298a2
commit 79b8723f61
25 changed files with 156 additions and 71 deletions

View File

@ -37,7 +37,7 @@ from ..component_model.abstract_prompt_queue import AbstractPromptQueue
from ..component_model.executor_types import ExecutorToClientProgress, ValidationTuple, ValidateInputsTuple, \ from ..component_model.executor_types import ExecutorToClientProgress, ValidationTuple, ValidateInputsTuple, \
ValidationErrorDict, NodeErrorsDictValue, ValidationErrorExtraInfoDict, FormattedValue, RecursiveExecutionTuple, \ ValidationErrorDict, NodeErrorsDictValue, ValidationErrorExtraInfoDict, FormattedValue, RecursiveExecutionTuple, \
RecursiveExecutionErrorDetails, RecursiveExecutionErrorDetailsInterrupted, ExecutionResult, DuplicateNodeError, \ RecursiveExecutionErrorDetails, RecursiveExecutionErrorDetailsInterrupted, ExecutionResult, DuplicateNodeError, \
HistoryResultDict, ExecutionErrorMessage, ExecutionInterruptedMessage HistoryResultDict, ExecutionErrorMessage, ExecutionInterruptedMessage, ValidationView
from ..component_model.files import canonicalize_path from ..component_model.files import canonicalize_path
from ..component_model.module_property import create_module_properties from ..component_model.module_property import create_module_properties
from ..component_model.queue_types import QueueTuple, HistoryEntry, QueueItem, MAXIMUM_HISTORY_SIZE, ExecutionStatus, \ from ..component_model.queue_types import QueueTuple, HistoryEntry, QueueItem, MAXIMUM_HISTORY_SIZE, ExecutionStatus, \
@ -1040,6 +1040,8 @@ async def validate_inputs(prompt_id: typing.Any, prompt, item, validated: typing
if isinstance(input_type, list): if isinstance(input_type, list):
combo_options = input_type combo_options = input_type
if isinstance(combo_options, ValidationView):
combo_options = combo_options.view_for_validation()
if isinstance(val, str) and "\\" in val: if isinstance(val, str) and "\\" in val:
# try to normalize paths for comparison purposes # try to normalize paths for comparison purposes
val = canonicalize_path(val) val = canonicalize_path(val)

View File

@ -1,9 +1,8 @@
from __future__ import annotations # for Python 3.7-3.9 from __future__ import annotations # for Python 3.7-3.9
import concurrent.futures import concurrent.futures
import typing
from enum import Enum from enum import Enum
from typing import Optional, Literal, Protocol, Union, NamedTuple, List from typing import Optional, Literal, Protocol, Union, NamedTuple, List, runtime_checkable, Iterable, Never, Dict, Any
import PIL.Image import PIL.Image
from typing_extensions import NotRequired, TypedDict from typing_extensions import NotRequired, TypedDict
@ -68,7 +67,7 @@ class ExecutionErrorMessage(TypedDict):
exception_message: str exception_message: str
exception_type: str exception_type: str
traceback: list[str] traceback: list[str]
current_inputs: list[typing.Never] | dict[str, FormattedValue] current_inputs: list[Never] | dict[str, FormattedValue]
current_outputs: list[str] current_outputs: list[str]
@ -76,8 +75,8 @@ class DependencyExecutionErrorMessage(TypedDict):
node_id: str node_id: str
exception_message: str exception_message: str
exception_type: Literal["graph.DependencyCycleError"] exception_type: Literal["graph.DependencyCycleError"]
traceback: list[typing.Never] traceback: list[Never]
current_inputs: list[typing.Never] current_inputs: list[Never]
class ActiveNodeProgressState(TypedDict, total=True): class ActiveNodeProgressState(TypedDict, total=True):
@ -106,7 +105,7 @@ SendSyncData = Union[ProgressStateMessage, StatusMessage, ExecutingMessage, Depe
class SocketsMetadata(TypedDict, total=True): class SocketsMetadata(TypedDict, total=True):
feature_flags: dict[str, typing.Any] feature_flags: dict[str, Any]
class DefaultSocketsMetadata(TypedDict, total=True): class DefaultSocketsMetadata(TypedDict, total=True):
@ -197,8 +196,8 @@ class ValidationErrorExtraInfoDict(TypedDict, total=False):
dependent_outputs: NotRequired[List[str]] dependent_outputs: NotRequired[List[str]]
class_type: NotRequired[str] class_type: NotRequired[str]
input_name: NotRequired[str] input_name: NotRequired[str]
input_config: NotRequired[typing.Dict[str, InputTypeSpec]] input_config: NotRequired[Dict[str, InputTypeSpec]]
received_value: NotRequired[typing.Any] received_value: NotRequired[Any]
linked_node: NotRequired[str] linked_node: NotRequired[str]
traceback: NotRequired[list[str]] traceback: NotRequired[list[str]]
exception_message: NotRequired[str] exception_message: NotRequired[str]
@ -209,7 +208,7 @@ class ValidationErrorDict(TypedDict):
type: str type: str
message: str message: str
details: str details: str
extra_info: list[typing.Never] | ValidationErrorExtraInfoDict extra_info: list[Never] | ValidationErrorExtraInfoDict
class NodeErrorsDictValue(TypedDict, total=False): class NodeErrorsDictValue(TypedDict, total=False):
@ -218,14 +217,14 @@ class NodeErrorsDictValue(TypedDict, total=False):
class_type: str class_type: str
class ValidationTuple(typing.NamedTuple): class ValidationTuple(NamedTuple):
valid: bool valid: bool
error: Optional[ValidationErrorDict | DependencyExecutionErrorMessage] error: Optional[ValidationErrorDict | DependencyExecutionErrorMessage]
good_output_node_ids: List[str] good_output_node_ids: List[str]
node_errors: list[typing.Never] | typing.Dict[str, NodeErrorsDictValue] node_errors: list[Never] | Dict[str, NodeErrorsDictValue]
class ValidateInputsTuple(typing.NamedTuple): class ValidateInputsTuple(NamedTuple):
valid: bool valid: bool
errors: List[ValidationErrorDict] errors: List[ValidationErrorDict]
unique_id: str unique_id: str
@ -244,7 +243,7 @@ class RecursiveExecutionErrorDetails(TypedDict, total=True):
current_outputs: NotRequired[dict[str, list[list[FormattedValue]]]] current_outputs: NotRequired[dict[str, list[list[FormattedValue]]]]
class RecursiveExecutionTuple(typing.NamedTuple): class RecursiveExecutionTuple(NamedTuple):
valid: ExecutionResult valid: ExecutionResult
error_details: Optional[RecursiveExecutionErrorDetails | RecursiveExecutionErrorDetailsInterrupted] error_details: Optional[RecursiveExecutionErrorDetails | RecursiveExecutionErrorDetailsInterrupted]
exc_info: Optional[Exception] exc_info: Optional[Exception]
@ -289,3 +288,9 @@ class Executor(Protocol):
ExecutePromptArgs = tuple[dict, str, str, dict, ExecutorToClientProgress | None, Configuration | None] ExecutePromptArgs = tuple[dict, str, str, dict, ExecutorToClientProgress | None, Configuration | None]
@runtime_checkable
class ValidationView(Protocol):
def view_for_validation(self) -> Iterable[str]:
...

View File

@ -15,8 +15,7 @@ from typing import List, Optional, Final, Set
import requests import requests
import tqdm import tqdm
from huggingface_hub import dump_environment_info, try_to_load_from_cache from huggingface_hub import dump_environment_info, hf_hub_download, scan_cache_dir, snapshot_download, HfFileSystem, CacheNotFound
from huggingface_hub import hf_hub_download, scan_cache_dir, snapshot_download, HfFileSystem, CacheNotFound
from huggingface_hub.utils import GatedRepoError, LocalEntryNotFoundError from huggingface_hub.utils import GatedRepoError, LocalEntryNotFoundError
from requests import Session from requests import Session
from safetensors import safe_open from safetensors import safe_open
@ -28,7 +27,7 @@ from .cmd.folder_paths import add_model_folder_path, supported_pt_extensions #
from .component_model.deprecation import _deprecate_method from .component_model.deprecation import _deprecate_method
from .component_model.files import canonicalize_path from .component_model.files import canonicalize_path
from .interruption import InterruptProcessingException from .interruption import InterruptProcessingException
from .model_downloader_types import CivitFile, HuggingFile, CivitModelsGetResponse, CivitFile_, Downloadable, UrlFile from .model_downloader_types import CivitFile, HuggingFile, CivitModelsGetResponse, CivitFile_, Downloadable, UrlFile, DownloadableFileList
from .utils import ProgressBar, comfy_tqdm from .utils import ProgressBar, comfy_tqdm
_session = Session() _session = Session()
@ -37,17 +36,21 @@ _hf_fs = HfFileSystem()
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def get_filename_list(folder_name: str) -> list[str]: def get_filename_list(folder_name: str) -> Sequence[str]:
return get_filename_list_with_downloadable(folder_name) return get_filename_list_with_downloadable(folder_name)
def get_filename_list_with_downloadable(folder_name: str, known_files: Optional[List[Downloadable] | KnownDownloadables] = None) -> List[str]: def get_filename_list_with_downloadable(folder_name: str, known_files: Optional[List[Downloadable] | KnownDownloadables] = None) -> DownloadableFileList:
if known_files is None: if known_files is None:
known_files = _get_known_models_for_folder_name(folder_name) known_files = _get_known_models_for_folder_name(folder_name)
existing = frozenset(folder_paths.get_filename_list(folder_name)) existing = folder_paths.get_filename_list(folder_name)
downloadable = frozenset() if args.disable_known_models else frozenset(str(f) for f in known_files)
return list(map(canonicalize_path, sorted(list(existing | downloadable)))) downloadable_files = []
if not args.disable_known_models:
downloadable_files = known_files
return DownloadableFileList(existing, downloadable_files)
def get_full_path_or_raise(folder_name: str, filename: str) -> str: def get_full_path_or_raise(folder_name: str, filename: str) -> str:
@ -299,7 +302,7 @@ KNOWN_CHECKPOINTS: Final[KnownDownloadables] = KnownDownloadables([
CivitFile(139562, 344487, filename="realvisxlV40_v40Bakedvae.safetensors"), CivitFile(139562, 344487, filename="realvisxlV40_v40Bakedvae.safetensors"),
HuggingFile("SG161222/Realistic_Vision_V6.0_B1_noVAE", "Realistic_Vision_V6.0_NV_B1_fp16.safetensors"), HuggingFile("SG161222/Realistic_Vision_V6.0_B1_noVAE", "Realistic_Vision_V6.0_NV_B1_fp16.safetensors"),
HuggingFile("SG161222/Realistic_Vision_V5.1_noVAE", "Realistic_Vision_V5.1_fp16-no-ema.safetensors"), HuggingFile("SG161222/Realistic_Vision_V5.1_noVAE", "Realistic_Vision_V5.1_fp16-no-ema.safetensors"),
HuggingFile("Lykon/DreamShaper", "DreamShaper_8_pruned.safetensors", save_with_filename="dreamshaper_8.safetensors", alternate_filenames=("DreamShaper_8_pruned.safetensors")), HuggingFile("Lykon/DreamShaper", "DreamShaper_8_pruned.safetensors", save_with_filename="dreamshaper_8.safetensors", alternate_filenames=("DreamShaper_8_pruned.safetensors",)),
CivitFile(7371, 425083, filename="revAnimated_v2Rebirth.safetensors"), CivitFile(7371, 425083, filename="revAnimated_v2Rebirth.safetensors"),
CivitFile(4468, 57618, filename="counterfeitV30_v30.safetensors"), CivitFile(4468, 57618, filename="counterfeitV30_v30.safetensors"),
CivitFile(241415, 272376, filename="picxReal_10.safetensors"), CivitFile(241415, 272376, filename="picxReal_10.safetensors"),
@ -309,7 +312,7 @@ KNOWN_CHECKPOINTS: Final[KnownDownloadables] = KnownDownloadables([
HuggingFile("stabilityai/stable-diffusion-3-medium", "sd3_medium_incl_clips_t5xxlfp8.safetensors"), HuggingFile("stabilityai/stable-diffusion-3-medium", "sd3_medium_incl_clips_t5xxlfp8.safetensors"),
HuggingFile("fal/AuraFlow", "aura_flow_0.1.safetensors"), HuggingFile("fal/AuraFlow", "aura_flow_0.1.safetensors"),
# stable audio, # uses names from https://comfyanonymous.github.io/ComfyUI_examples/audio/ # stable audio, # uses names from https://comfyanonymous.github.io/ComfyUI_examples/audio/
HuggingFile("Comfy-Org/stable-audio-open-1.0_repackaged", "stable-audio-open-1.0.safetensors", alternate_filenames="stable_audio_open_1.0.safetensors"), HuggingFile("Comfy-Org/stable-audio-open-1.0_repackaged", "stable-audio-open-1.0.safetensors", alternate_filenames=("stable_audio_open_1.0.safetensors",)),
# hunyuandit # hunyuandit
HuggingFile("comfyanonymous/hunyuan_dit_comfyui", "hunyuan_dit_1.0.safetensors"), HuggingFile("comfyanonymous/hunyuan_dit_comfyui", "hunyuan_dit_1.0.safetensors"),
HuggingFile("comfyanonymous/hunyuan_dit_comfyui", "hunyuan_dit_1.1.safetensors"), HuggingFile("comfyanonymous/hunyuan_dit_comfyui", "hunyuan_dit_1.1.safetensors"),

View File

@ -1,14 +1,17 @@
from __future__ import annotations from __future__ import annotations
import collections
import dataclasses import dataclasses
import functools import functools
from os.path import split from os.path import split
from pathlib import PurePosixPath from pathlib import PurePosixPath
from typing import Optional, List, Sequence, Union from typing import Optional, List, Sequence, Union, Iterable
from can_ada import parse, URL # pylint: disable=no-name-in-module from can_ada import parse, URL # pylint: disable=no-name-in-module
from typing_extensions import TypedDict, NotRequired from typing_extensions import TypedDict, NotRequired
from .component_model.executor_types import ValidationView
@dataclasses.dataclass(frozen=True) @dataclasses.dataclass(frozen=True)
class UrlFile: class UrlFile:
@ -91,6 +94,31 @@ class HuggingFile:
return self.save_with_filename or split(self.filename)[-1] return self.save_with_filename or split(self.filename)[-1]
class DownloadableFileList(ValidationView, list[str]):
"""
A list of downloadable files that can be validated differently than it will be serialized to JSON
"""
def __init__(self, existing_files: Iterable[str], downloadable_files: Iterable[Downloadable]):
super().__init__()
self._validation_view = set(existing_files)
ui_view = set(existing_files)
for f in downloadable_files:
main_name = str(f)
self._validation_view.add(main_name)
self._validation_view.update(f.alternate_filenames)
if getattr(f, 'show_in_ui', True):
ui_view.add(main_name)
self.extend(sorted(list(ui_view)))
def view_for_validation(self) -> Iterable[str]:
return self._validation_view
class CivitStats(TypedDict): class CivitStats(TypedDict):
downloadCount: int downloadCount: int
favoriteCount: NotRequired[int] favoriteCount: NotRequired[int]
@ -186,4 +214,4 @@ class CivitModelsGetResponse(TypedDict):
modelVersions: List[CivitModelVersion] modelVersions: List[CivitModelVersion]
Downloadable = Union[CivitFile | HuggingFile | UrlFile] Downloadable = Union[CivitFile, HuggingFile, UrlFile]

View File

@ -1,6 +1,7 @@
from __future__ import annotations from __future__ import annotations
import dataclasses import dataclasses
import io
import json import json
import logging import logging
import os import os
@ -11,7 +12,7 @@ import sys
import uuid import uuid
from datetime import datetime from datetime import datetime
from fractions import Fraction from fractions import Fraction
from typing import Sequence, Optional, TypedDict, Dict, List, Literal, Callable, Tuple from typing import Sequence, Optional, TypedDict, List, Literal, Tuple, Any, Dict
import PIL import PIL
import aiohttp import aiohttp
@ -20,7 +21,8 @@ import cv2
import fsspec import fsspec
import numpy as np import numpy as np
import torch import torch
from PIL import Image, ImageSequence, ImageOps from PIL import Image, ImageSequence, ImageOps, ExifTags
from PIL.Image import Exif
from PIL.ImageFile import ImageFile from PIL.ImageFile import ImageFile
from PIL.PngImagePlugin import PngInfo from PIL.PngImagePlugin import PngInfo
from fsspec.core import OpenFile from fsspec.core import OpenFile
@ -85,11 +87,6 @@ class SaveNodeResultWithName(SaveNodeResult):
name: str name: str
from PIL import ExifTags
from PIL.Image import Exif
from typing import Any, Dict
def create_exif_from_pnginfo(metadata: Dict[str, Any]) -> Exif: def create_exif_from_pnginfo(metadata: Dict[str, Any]) -> Exif:
"""Convert PNG metadata dictionary to PIL Exif object""" """Convert PNG metadata dictionary to PIL Exif object"""
exif = Exif() exif = Exif()
@ -728,11 +725,25 @@ class SaveImagesResponse(CustomNode):
if save_method == 'pil': if save_method == 'pil':
with fsspec.open(uri, mode="wb", **fsspec_kwargs) as f: with fsspec.open(uri, mode="wb", **fsspec_kwargs) as f:
image_as_pil.save(f, format=save_format, **additional_args) image_as_pil.save(f, format=save_format, **additional_args)
else: elif save_method == 'opencv':
_, img_encode = cv2.imencode(f'.{save_format}', image_scaled, cv_save_options) # pylint: disable=no-member _, img_encode = cv2.imencode(f'.{save_format}', image_scaled, cv_save_options)
img_bytes = img_encode.tobytes()
if exif_inst.exif and save_format == 'png':
import zlib
import struct
exif_obj = create_exif_from_pnginfo(exif_inst.exif)
# The eXIf chunk should contain the raw TIFF data, but Pillow's `tobytes()`
# includes the "Exif\x00\x00" prefix for JPEG APP1 markers. We must strip it.
exif_bytes = exif_obj.tobytes()[6:]
# PNG signature (8 bytes) + IHDR chunk (25 bytes) = 33 bytes.
insertion_point = 33
# Create eXIf chunk
exif_chunk = struct.pack('>I', len(exif_bytes)) + b'eXIf' + exif_bytes + struct.pack('>I', zlib.crc32(b'eXIf' + exif_bytes))
img_bytes = img_bytes[:insertion_point] + exif_chunk + img_bytes[insertion_point:]
with fsspec.open(uri, mode="wb", **fsspec_kwargs) as f: with fsspec.open(uri, mode="wb", **fsspec_kwargs) as f:
f.write(img_encode.tobytes()) f.write(img_bytes)
if metadata_uri is not None: if metadata_uri is not None:
# all values are stringified for the metadata # all values are stringified for the metadata
@ -775,7 +786,6 @@ class SaveImagesResponse(CustomNode):
return ui_images_result return ui_images_result
def subfolder_of(self, local_uri, output_directory): def subfolder_of(self, local_uri, output_directory):
return os.path.dirname(os.path.relpath(os.path.abspath(local_uri), os.path.abspath(output_directory))) return os.path.dirname(os.path.relpath(os.path.abspath(local_uri), os.path.abspath(output_directory)))

View File

@ -1,3 +1,4 @@
import json
import logging import logging
import os import os
import shutil import shutil
@ -6,7 +7,6 @@ import pytest
from comfy.cli_args import args from comfy.cli_args import args
_gitattributes = """*.7z filter=lfs diff=lfs merge=lfs -text _gitattributes = """*.7z filter=lfs diff=lfs merge=lfs -text
*.arrow filter=lfs diff=lfs merge=lfs -text *.arrow filter=lfs diff=lfs merge=lfs -text
*.bin filter=lfs diff=lfs merge=lfs -text *.bin filter=lfs diff=lfs merge=lfs -text
@ -165,3 +165,28 @@ async def test_known_repos(tmp_path_factory):
os.environ["HF_HUB_CACHE"] = prev_hub_cache os.environ["HF_HUB_CACHE"] = prev_hub_cache
args.force_hf_local_dir_mode = False args.force_hf_local_dir_mode = False
args.disable_known_models = False args.disable_known_models = False
@pytest.mark.asyncio
async def test_huggingface_alternate_filenames_in_combo():
from comfy.model_downloader import get_filename_list_with_downloadable
from comfy.model_downloader_types import HuggingFile
# 2. Define a HuggingFile with alternate filenames
main_filename = "model.safetensors"
alternate_filename = "alt_model.safetensors"
known_file = HuggingFile(
repo_id="test/repo",
filename=main_filename,
alternate_filenames=(alternate_filename,)
)
# 3. Get the list of files as the UI would
filename_list = get_filename_list_with_downloadable("checkpoints", known_files=[known_file])
# 4. Assert that both the main and alternate filenames are present
assert main_filename in filename_list
assert alternate_filename not in filename_list, "Alternate filename should not be in the list returned by get_filename_list_with_downloadable"
assert alternate_filename in filename_list.view_for_validation(), "Alternate filename should not be in the list returned by get_filename_list_with_downloadable"
assert json.dumps(filename_list) == "[\"model.safetensors\"]"

View File

@ -105,7 +105,7 @@
"24": { "24": {
"inputs": { "inputs": {
"scheduler": "exponential", "scheduler": "exponential",
"steps": 20, "steps": 1,
"denoise": 1, "denoise": 1,
"model": [ "model": [
"14", "14",

View File

@ -24,7 +24,7 @@
"3": { "3": {
"inputs": { "inputs": {
"seed": 232240565010917, "seed": 232240565010917,
"steps": 25, "steps": 1,
"cfg": 3.5, "cfg": 3.5,
"sampler_name": "uni_pc", "sampler_name": "uni_pc",
"scheduler": "normal", "scheduler": "normal",
@ -79,8 +79,8 @@
}, },
"6": { "6": {
"inputs": { "inputs": {
"width": 1024, "width": 512,
"height": 1024, "height": 512,
"batch_size": 1 "batch_size": 1
}, },
"class_type": "EmptyLatentImage", "class_type": "EmptyLatentImage",

View File

@ -122,7 +122,7 @@
"inputs": { "inputs": {
"width": 1280, "width": 1280,
"height": 704, "height": 704,
"length": 121, "length": 1,
"batch_size": 1 "batch_size": 1
}, },
"class_type": "EmptyCosmosLatentVideo", "class_type": "EmptyCosmosLatentVideo",

View File

@ -61,7 +61,7 @@
"37": { "37": {
"inputs": { "inputs": {
"unet_name": "Cosmos-1_0-Diffusion-7B-Video2World.safetensors", "unet_name": "Cosmos-1_0-Diffusion-7B-Video2World.safetensors",
"weight_dtype": "default" "weight_dtype": "fp8_e4m3fn"
}, },
"class_type": "UNETLoader", "class_type": "UNETLoader",
"_meta": { "_meta": {
@ -122,7 +122,7 @@
"inputs": { "inputs": {
"width": 1024, "width": 1024,
"height": 1024, "height": 1024,
"length": 121, "length": 1,
"batch_size": 1, "batch_size": 1,
"vae": [ "vae": [
"39", "39",
@ -140,7 +140,11 @@
}, },
"84": { "84": {
"inputs": { "inputs": {
"value": "https://m.media-amazon.com/images/I/71lI3ioM1ZL.jpg" "value": "https://m.media-amazon.com/images/I/71lI3ioM1ZL.jpg",
"name": "",
"title": "",
"description": "",
"__required": true
}, },
"class_type": "ImageRequestParameter", "class_type": "ImageRequestParameter",
"_meta": { "_meta": {
@ -152,6 +156,7 @@
"resize_mode": "cover", "resize_mode": "cover",
"resolutions": "Cosmos", "resolutions": "Cosmos",
"interpolation": "bilinear", "interpolation": "bilinear",
"aspect_ratio_tolerance": 0.05,
"image": [ "image": [
"84", "84",
0 0

View File

@ -2,7 +2,7 @@
"3": { "3": {
"inputs": { "inputs": {
"seed": 156680208700286, "seed": 156680208700286,
"steps": 20, "steps": 1,
"cfg": 8, "cfg": 8,
"sampler_name": "euler", "sampler_name": "euler",
"scheduler": "normal", "scheduler": "normal",

View File

@ -17,7 +17,7 @@
}, },
"9": { "9": {
"inputs": { "inputs": {
"filename_prefix": "nike/nike_images_", "filename_prefix": "comfyui_",
"images": [ "images": [
"8", "8",
0 0
@ -139,7 +139,7 @@
"22": { "22": {
"inputs": { "inputs": {
"scheduler": "normal", "scheduler": "normal",
"steps": 20, "steps": 1,
"denoise": 1, "denoise": 1,
"model": [ "model": [
"51", "51",
@ -165,8 +165,8 @@
}, },
"49": { "49": {
"inputs": { "inputs": {
"width": 1024, "width": 256,
"height": 1024, "height": 256,
"batch_size": 1 "batch_size": 1
}, },
"class_type": "EmptySD3LatentImage", "class_type": "EmptySD3LatentImage",

View File

@ -77,7 +77,7 @@
"7": { "7": {
"inputs": { "inputs": {
"scheduler": "ddim_uniform", "scheduler": "ddim_uniform",
"steps": 10, "steps": 1,
"denoise": 1, "denoise": 1,
"model": [ "model": [
"12", "12",

View File

@ -2,7 +2,7 @@
"3": { "3": {
"inputs": { "inputs": {
"seed": 432318046789205, "seed": 432318046789205,
"steps": 20, "steps": 1,
"cfg": 1, "cfg": 1,
"sampler_name": "euler", "sampler_name": "euler",
"scheduler": "normal", "scheduler": "normal",

View File

@ -2,7 +2,7 @@
"3": { "3": {
"inputs": { "inputs": {
"seed": 164211176398261, "seed": 164211176398261,
"steps": 20, "steps": 1,
"cfg": 1, "cfg": 1,
"sampler_name": "euler", "sampler_name": "euler",
"scheduler": "normal", "scheduler": "normal",

View File

@ -108,7 +108,7 @@
}, },
"71": { "71": {
"inputs": { "inputs": {
"steps": 30, "steps": 1,
"max_shift": 2.05, "max_shift": 2.05,
"base_shift": 0.95, "base_shift": 0.95,
"stretch": true, "stretch": true,

View File

@ -96,7 +96,7 @@
}, },
"71": { "71": {
"inputs": { "inputs": {
"steps": 30, "steps": 1,
"max_shift": 2.05, "max_shift": 2.05,
"base_shift": 0.95, "base_shift": 0.95,
"stretch": true, "stretch": true,

View File

@ -2,7 +2,7 @@
"3": { "3": {
"inputs": { "inputs": {
"seed": 851616030078638, "seed": 851616030078638,
"steps": 20, "steps": 1,
"cfg": 8, "cfg": 8,
"sampler_name": "euler", "sampler_name": "euler",
"scheduler": "normal", "scheduler": "normal",

View File

@ -157,7 +157,7 @@
"271": { "271": {
"inputs": { "inputs": {
"seed": 945512652412924, "seed": 945512652412924,
"steps": 28, "steps": 1,
"cfg": 4.5, "cfg": 4.5,
"sampler_name": "dpmpp_2m", "sampler_name": "dpmpp_2m",
"scheduler": "sgm_uniform", "scheduler": "sgm_uniform",

View File

@ -143,7 +143,7 @@
"271": { "271": {
"inputs": { "inputs": {
"seed": 945512652412924, "seed": 945512652412924,
"steps": 28, "steps": 1,
"cfg": 4.5, "cfg": 4.5,
"sampler_name": "dpmpp_2m", "sampler_name": "dpmpp_2m",
"scheduler": "sgm_uniform", "scheduler": "sgm_uniform",

View File

@ -146,7 +146,7 @@
"271": { "271": {
"inputs": { "inputs": {
"seed": 780289980632866, "seed": 780289980632866,
"steps": 28, "steps": 1,
"cfg": 4.5, "cfg": 4.5,
"sampler_name": "dpmpp_2m", "sampler_name": "dpmpp_2m",
"scheduler": "sgm_uniform", "scheduler": "sgm_uniform",

View File

@ -135,7 +135,7 @@
"11": { "11": {
"inputs": { "inputs": {
"model_type": "SDXL", "model_type": "SDXL",
"steps": 25, "steps": 1,
"denoise": 1 "denoise": 1
}, },
"class_type": "AlignYourStepsScheduler", "class_type": "AlignYourStepsScheduler",

View File

@ -125,7 +125,7 @@
"11": { "11": {
"inputs": { "inputs": {
"model_type": "SDXL", "model_type": "SDXL",
"steps": 25, "steps": 1,
"denoise": 1 "denoise": 1
}, },
"class_type": "AlignYourStepsScheduler", "class_type": "AlignYourStepsScheduler",

View File

@ -77,7 +77,7 @@
"7": { "7": {
"inputs": { "inputs": {
"scheduler": "ddim_uniform", "scheduler": "ddim_uniform",
"steps": 25, "steps": 1,
"denoise": 1, "denoise": 1,
"model": [ "model": [
"12", "12",

View File

@ -460,7 +460,8 @@ def test_alpha_channel_preservation(use_temporary_output_directory):
("png", 8, True), ("png", 8, True),
("png", 16, True), ("png", 16, True),
("tiff", 8, True), ("tiff", 8, True),
("tiff", 16, True), # todo: we will worry about tiff 16 bit another time
# ("tiff", 16, True),
("jpeg", 8, False), ("jpeg", 8, False),
("webp", 8, False), ("webp", 8, False),
]) ])
@ -500,14 +501,20 @@ def test_basic_exif(format, bits, supports_16bit, use_temporary_output_directory
# Second, verify EXIF data using Pillow # Second, verify EXIF data using Pillow
with Image.open(filepath) as img: with Image.open(filepath) as img:
if format == "png": # For 8-bit PNG, we use PIL's native text chunk saving.
# PNG stores metadata in the 'info' dictionary as text chunks. # For 16-bit PNG, we use a custom OpenCV path that injects a raw eXIf chunk.
# This check is now performed for both 8-bit and 16-bit PNGs. # For other formats, we use PIL's or a custom EXIF saving method.
assert img.info.get("Artist") == "Test Artist" if format == "png" and bits == 8:
assert img.info.get("Copyright") == "Test Copyright" # 8-bit PNG stores metadata in the 'info' dictionary as text chunks.
assert img.info.get("ImageDescription") == "Test Description" info = img.info
assert info.get("Artist") == "Test Artist"
assert info.get("Copyright") == "Test Copyright"
assert info.get("ImageDescription") == "Test Description"
assert info.get("Make") == "Test Camera"
assert info.get("Model") == "Test Model"
assert info.get("Software") == "Test Software"
else: else:
# Other formats use the standard EXIF structure. # 16-bit PNGs (with eXIf), TIFFs, and other formats use the standard EXIF structure.
exif_data = img.getexif() exif_data = img.getexif()
assert exif_data is not None, "EXIF data is missing." assert exif_data is not None, "EXIF data is missing."