Fix compatibility with Python 3.9, 3.10, fix Configuration class declaration issue

This commit is contained in:
doctorpangloss 2024-02-07 21:52:20 -08:00
parent 747ab861c1
commit 123c512a84
17 changed files with 121 additions and 69 deletions

View File

@ -93,7 +93,7 @@ Ctrl can also be replaced with Cmd instead for macOS users
## Installing
You must have Python 3.12, 3.11 or 3.10 installed. On Windows, download the latest Python from their website. You can also [directly download 3.11.4 here](https://www.python.org/ftp/python/3.11.4/python-3.11.4-amd64.exe).
You must have Python 3.10, 3.11 or 3.12 installed. On Windows, download the latest Python from their website. You can also [directly download 3.11.4 here](https://www.python.org/ftp/python/3.11.4/python-3.11.4-amd64.exe).
On macOS, install exactly Python 3.11 using `brew`, which you can download from https://brew.sh, using this command: `brew install python@3.11`. Do not use 3.9 or older, and do not use 3.12 or newer. Its compatibility with Stable Diffusion in both directions is broken.
@ -178,6 +178,10 @@ On macOS, install exactly Python 3.11 using `brew`, which you can download from
(cd tests-ui && npm ci && npm run test:generate && npm test)
```
You can use `comfyui` as an API. Visit the [OpenAPI specification](comfy/api/openapi.yaml). This file can be used to generate typed clients for your preferred language.
7. To create the standalone binary:
```shell
python -m pyinstaller --onefile --noupx -n ComfyUI --add-data="comfy/;comfy/" --paths $(pwd) --paths comfy/cmd main.py
```
### Authoring Custom Nodes
@ -252,7 +256,7 @@ You would also be able to add the `comfyui` git hash and custom nodes packages b
> I see a message like `RuntimeError: '"upsample_bilinear2d_channels_last" not implemented for 'Half''`
You must use Python 3.10 or 3.11 on macOS devices, and update to at least Ventura.
You must use Python 3.11 on macOS devices, and update to at least Ventura.
> I see a message like `Error while deserializing header: HeaderTooLarge`

View File

@ -1,3 +1,5 @@
from __future__ import annotations
import asyncio
import json
import typing

View File

@ -2,7 +2,7 @@ import configargparse as argparse
import enum
from . import options
from .cli_args_types import LatentPreviewMethod, Configuration
import sys
class EnumAction(argparse.Action):
"""
@ -106,11 +106,12 @@ parser.add_argument("--deterministic", action="store_true", help="Make pytorch u
parser.add_argument("--dont-print-server", action="store_true", help="Don't print server output.")
parser.add_argument("--quick-test-for-ci", action="store_true", help="Quick test for CI.")
parser.add_argument("--windows-standalone-build", action="store_true", help="Windows standalone build: Enable convenient things that most people using the standalone windows build will probably enjoy (like auto opening the page on startup).")
parser.add_argument("--windows-standalone-build", default=hasattr(sys, 'frozen') and getattr(sys, 'frozen'), action="store_true", help="Windows standalone build: Enable convenient things that most people using the standalone windows build will probably enjoy (like auto opening the page on startup).")
parser.add_argument("--disable-metadata", action="store_true", help="Disable saving prompt metadata in files.")
parser.add_argument("--multi-user", action="store_true", help="Enables per-user storage.")
parser.add_argument("--create-directories", action="store_true", help="Creates the default models/, input/, output/ and temp/ directories, then exits.")
parser.add_argument("--plausible-analytics-base-url", required=False,
help="Enables server-side analytics events sent to the provided URL.")

View File

@ -69,64 +69,64 @@ class Configuration(dict):
plausible_analytics_domain (Optional[str]): Domain for analytics events.
analytics_use_identity_provider (bool): Use platform identifiers for analytics.
write_out_config_file (bool): Enable writing out the configuration file.
create_directories (bool): Creates the default models/, input/, output/ and temp/ directories, then exits.
"""
config: Optional[str]
cwd: Optional[str]
listen: str
port: int
enable_cors_header: Optional[str]
max_upload_size: float
extra_model_paths_config: Optional[List[str]]
output_directory: Optional[str]
temp_directory: Optional[str]
input_directory: Optional[str]
auto_launch: bool
disable_auto_launch: bool
cuda_device: Optional[int]
cuda_malloc: bool
disable_cuda_malloc: bool
dont_upcast_attention: bool
force_fp32: bool
force_fp16: bool
bf16_unet: bool
fp16_unet: bool
fp8_e4m3fn_unet: bool
fp8_e5m2_unet: bool
fp16_vae: bool
fp32_vae: bool
bf16_vae: bool
cpu_vae: bool
fp8_e4m3fn_text_enc: bool
fp8_e5m2_text_enc: bool
fp16_text_enc: bool
fp32_text_enc: bool
directml: Optional[int]
disable_ipex_optimize: bool
preview_method: LatentPreviewMethod
use_split_cross_attention: bool
use_quad_cross_attention: bool
use_pytorch_cross_attention: bool
disable_xformers: bool
gpu_only: bool
highvram: bool
normalvram: bool
lowvram: bool
novram: bool
cpu: bool
disable_smart_memory: bool
deterministic: bool
dont_print_server: bool
quick_test_for_ci: bool
windows_standalone_build: bool
disable_metadata: bool
multi_user: bool
plausible_analytics_base_url: Optional[str]
plausible_analytics_domain: Optional[str]
analytics_use_identity_provider: bool
write_out_config_file: bool
def __init__(self, **kwargs):
super().__init__()
self.cwd: Optional[str] = None
self.listen: str = "127.0.0.1"
self.port: int = 8188
self.enable_cors_header: Optional[str] = None
self.max_upload_size: float = 100.0
self.extra_model_paths_config: Optional[List[str]] = []
self.output_directory: Optional[str] = None
self.temp_directory: Optional[str] = None
self.input_directory: Optional[str] = None
self.auto_launch: bool = False
self.disable_auto_launch: bool = False
self.cuda_device: Optional[int] = None
self.cuda_malloc: bool = True
self.disable_cuda_malloc: bool = False
self.dont_upcast_attention: bool = False
self.force_fp32: bool = False
self.force_fp16: bool = False
self.bf16_unet: bool = False
self.fp16_unet: bool = False
self.fp8_e4m3fn_unet: bool = False
self.fp8_e5m2_unet: bool = False
self.fp16_vae: bool = False
self.fp32_vae: bool = False
self.bf16_vae: bool = False
self.cpu_vae: bool = False
self.fp8_e4m3fn_text_enc: bool = False
self.fp8_e5m2_text_enc: bool = False
self.fp16_text_enc: bool = False
self.fp32_text_enc: bool = False
self.directml: Optional[int] = None
self.disable_ipex_optimize: bool = False
self.preview_method: str = "none"
self.use_split_cross_attention: bool = False
self.use_quad_cross_attention: bool = False
self.use_pytorch_cross_attention: bool = False
self.disable_xformers: bool = False
self.gpu_only: bool = False
self.highvram: bool = False
self.normalvram: bool = False
self.lowvram: bool = False
self.novram: bool = False
self.cpu: bool = False
self.disable_smart_memory: bool = False
self.deterministic: bool = False
self.dont_print_server: bool = False
self.quick_test_for_ci: bool = False
self.windows_standalone_build: bool = False
self.disable_metadata: bool = False
self.multi_user: bool = False
self.plausible_analytics_base_url: Optional[str] = None
self.plausible_analytics_domain: Optional[str] = None
self.analytics_use_identity_provider: bool = False
self.write_out_config_file: bool = False
self.create_directories: bool = False
for key, value in kwargs.items():
self[key] = value

View File

@ -1,3 +1,5 @@
from __future__ import annotations
import asyncio
import gc
import uuid

View File

@ -1,9 +1,9 @@
import copy
from typing import TypeAlias
from typing import TypeAlias, Union
from comfy.api.components.schema.prompt import PromptDict, Prompt
JSON: TypeAlias = dict[str, "JSON"] | list["JSON"] | str | int | float | bool | None
JSON: TypeAlias = Union[dict[str, "JSON"], list["JSON"], str, int, float, bool, None]
_BASE_PROMPT: JSON = {
"4": {
"inputs": {

View File

@ -8,7 +8,8 @@ import sys
import threading
import traceback
import typing
from typing import List, Optional, Tuple
from typing import List, Optional, Tuple, Union
from typing_extensions import TypedDict
import torch
@ -655,19 +656,19 @@ def full_type_name(klass):
return module + '.' + klass.__qualname__
class ValidationErrorExtraInfoDict(typing.TypedDict):
class ValidationErrorExtraInfoDict(TypedDict):
exception_type: str
traceback: List[str]
class ValidationErrorDict(typing.TypedDict):
class ValidationErrorDict(TypedDict):
type: str
message: str
details: str
extra_info: ValidationErrorExtraInfoDict | dict
ValidationTuple = typing.Tuple[bool, ValidationErrorDict | None, typing.List[str], dict | list]
ValidationTuple = typing.Tuple[bool, Optional[ValidationErrorDict], typing.List[str], Union[dict, list]]
def validate_prompt(prompt: typing.Mapping[str, typing.Any]) -> ValidationTuple:

View File

@ -267,3 +267,11 @@ def get_save_image_path(filename_prefix, output_dir, image_width=0, image_height
os.makedirs(full_output_folder, exist_ok=True)
counter = 1
return full_output_folder, filename, counter, subfolder, filename_prefix
def create_directories():
for _, (paths, _) in folder_names_and_paths.items():
default_path = paths[0]
os.makedirs(default_path, exist_ok=True)
for path in (temp_directory, input_directory, output_directory, user_directory):
os.makedirs(path, exist_ok=True)

View File

@ -1,3 +1,5 @@
import sys
from .. import options
options.enable_args_parsing()
@ -206,6 +208,15 @@ def main():
folder_paths.set_temp_directory(temp_dir)
cleanup_temp()
# create the default directories if we're instructed to, then exit
# or, if it's a windows standalone build, the single .exe file should have its side-by-side directories always created
if args.create_directories:
folder_paths.create_directories()
return
if args.windows_standalone_build:
folder_paths.create_directories()
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
server = server_module.PromptServer(loop)

View File

@ -576,8 +576,10 @@ class PromptServer(ExecutorToClientProgress):
upload_dir = PromptServer.get_upload_dir()
async with aiofiles.open(os.path.join(upload_dir, part.filename), mode='wb') as file:
await file.write(file_data)
except IOError | MemoryError as ioError:
except IOError as ioError:
return web.Response(status=507, reason=str(ioError))
except MemoryError as memoryError:
return web.Response(status=507, reason=str(memoryError))
except Exception as ex:
return web.Response(status=400, reason=str(ex))

View File

@ -1,3 +1,5 @@
from __future__ import annotations
import typing
from abc import ABCMeta, abstractmethod

View File

@ -1,4 +1,7 @@
from typing import Optional, Literal, Protocol, TypedDict, NotRequired
from __future__ import annotations # for Python 3.7-3.9
from typing_extensions import NotRequired, TypedDict
from typing import Optional, Literal, Protocol
from comfy.component_model.queue_types import BinaryEventTypes

View File

@ -2,7 +2,8 @@ from __future__ import annotations
import asyncio
from enum import Enum
from typing import NamedTuple, Optional, TypedDict, List, Literal, NotRequired
from typing import NamedTuple, Optional, List, Literal
from typing_extensions import NotRequired, TypedDict
from dataclasses import dataclass
from typing import Tuple

View File

@ -1,3 +1,5 @@
from __future__ import annotations
import hashlib
import json

View File

@ -1,3 +1,5 @@
from __future__ import annotations
import asyncio
from asyncio import AbstractEventLoop
from typing import Optional, Dict, List, Mapping, Tuple, Callable

View File

@ -1,3 +1,5 @@
from __future__ import annotations
import copy
from typing import Optional, OrderedDict, List, Dict
import collections

View File

@ -3,6 +3,7 @@ import asyncio
import pytest
import torch
from comfy.cli_args_types import Configuration
from comfy.client.embedded_comfy_client import EmbeddedComfyClient
from comfy.client.sdxl_with_refiner_workflow import sdxl_workflow_with_refiner
@ -34,6 +35,14 @@ async def test_embedded_comfy():
outputs = await client.queue_prompt(prompt)
assert outputs["13"]["images"][0]["abs_path"] is not None
@pytest.mark.asyncio
async def test_configuration_options():
config = Configuration()
async with EmbeddedComfyClient(configuration=config) as client:
prompt = sdxl_workflow_with_refiner("test")
outputs = await client.queue_prompt(prompt)
assert outputs["13"]["images"][0]["abs_path"] is not None
@pytest.mark.asyncio
async def test_multithreaded_comfy():
async with EmbeddedComfyClient(max_workers=2) as client: