mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2026-01-10 22:30:50 +08:00
Improved API support
- Run comfyui workflows directly inside other python applications using EmbeddedComfyClient. - Optional telemetry in prompts and models using anonymity preserving Plausible self-hosted or hosted. - Better OpenAPI schema - Basic support for distributed ComfyUI backends. Limitations: no progress reporting, no easy way to start your own distributed backend, requires RabbitMQ as a message broker.
This commit is contained in:
parent
32d83e52ff
commit
1b2ea61345
0
comfy/analytics/__init__.py
Normal file
0
comfy/analytics/__init__.py
Normal file
96
comfy/analytics/analytics.py
Normal file
96
comfy/analytics/analytics.py
Normal file
@ -0,0 +1,96 @@
|
||||
import asyncio
|
||||
import json
|
||||
import sys
|
||||
import uuid
|
||||
from typing import Optional
|
||||
|
||||
from .multi_event_tracker import MultiEventTracker
|
||||
from .plausible import PlausibleTracker
|
||||
from ..api.components.schema.prompt import Prompt
|
||||
|
||||
_event_tracker: MultiEventTracker
|
||||
|
||||
|
||||
def initialize_event_tracking(loop: asyncio.AbstractEventLoop):
|
||||
_event_trackers = []
|
||||
# perform the imports at the time this is invoked to prevent side effects and ordering issues
|
||||
from ..cli_args import args
|
||||
|
||||
identity = str(uuid.uuid4())
|
||||
if args.analytics_use_identity_provider and sys.platform == "nt":
|
||||
from .identity_provider_nt import get_user_name
|
||||
identity = get_user_name()
|
||||
|
||||
if args.plausible_analytics_domain is not None and args.plausible_analytics_base_url is not None:
|
||||
_event_trackers.append(PlausibleTracker(loop, user_agent=identity, base_url=args.plausible_analytics_base_url,
|
||||
domain=args.plausible_analytics_domain))
|
||||
|
||||
if len(_event_trackers) == 0:
|
||||
return
|
||||
|
||||
_event_tracker = MultiEventTracker(_event_trackers)
|
||||
|
||||
def track_event(name: str, url: str = "app://comfyui", props: Optional[dict] = None):
|
||||
# not awaited, we don't care about event tracking in terms of blocking
|
||||
loop.create_task(_event_tracker.track_event(name, url, props=props))
|
||||
|
||||
# patch nodes
|
||||
from ..nodes.base_nodes import SaveImage, CLIPTextEncode, LoraLoader, CheckpointLoaderSimple
|
||||
from ..cmd.execution import PromptQueue
|
||||
from comfy.component_model.queue_types import QueueItem
|
||||
|
||||
prompt_queue_put = PromptQueue.put
|
||||
|
||||
def prompt_queue_put_tracked(self: PromptQueue, item: QueueItem):
|
||||
prompt = Prompt.validate(item.prompt)
|
||||
|
||||
samplers = [v for _, v in prompt.items() if
|
||||
"positive" in v.inputs and "negative" in v.inputs]
|
||||
|
||||
positive_prompt_ids = []
|
||||
negative_prompt_ids = []
|
||||
for sampler in samplers:
|
||||
try:
|
||||
# duck typed
|
||||
key, _ = sampler.inputs['positive']
|
||||
positive_prompt_ids.append(key)
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
key, _ = sampler.inputs['negative']
|
||||
negative_prompt_ids.append(key)
|
||||
except:
|
||||
pass
|
||||
|
||||
positive_prompts = "; ".join(frozenset(str(prompt[x].inputs["text"]) for x in positive_prompt_ids if
|
||||
prompt[x].class_type == CLIPTextEncode.__name__))
|
||||
negative_prompts = "; ".join(frozenset(str(prompt[x].inputs["text"]) for x in negative_prompt_ids if
|
||||
prompt[x].class_type == CLIPTextEncode.__name__))
|
||||
loras = "; ".join(frozenset(
|
||||
str(node.inputs["lora_name"]) for node in prompt.values() if
|
||||
node.class_type == LoraLoader.__name__))
|
||||
checkpoints = "; ".join(frozenset(str(node.inputs["ckpt_name"]) for node in prompt.values() if
|
||||
node.class_type == CheckpointLoaderSimple.__name__))
|
||||
prompt_str = json.dumps(item.queue_tuple, separators=(',', ':'))
|
||||
len_prompt_str = len(prompt_str)
|
||||
prompt_str_pieces = []
|
||||
for i in range(0, len_prompt_str, 1000):
|
||||
prompt_str_pieces += [prompt_str[i:min(i + 1000, len_prompt_str)]]
|
||||
prompt_str_props = {}
|
||||
for i, prompt_str_piece in enumerate(prompt_str_pieces):
|
||||
prompt_str_props[f"prompt.{i}"] = prompt_str_piece
|
||||
try:
|
||||
track_event(SaveImage.__name__, props={
|
||||
"positive_prompts": positive_prompts,
|
||||
"negative_prompts": negative_prompts,
|
||||
"loras": loras,
|
||||
"checkpoints": checkpoints,
|
||||
**prompt_str_props
|
||||
})
|
||||
except:
|
||||
# prevent analytics exceptions from cursing us
|
||||
pass
|
||||
|
||||
return prompt_queue_put(self, item)
|
||||
|
||||
PromptQueue.put = prompt_queue_put_tracked
|
||||
37
comfy/analytics/event_tracker.py
Normal file
37
comfy/analytics/event_tracker.py
Normal file
@ -0,0 +1,37 @@
|
||||
from abc import ABC, abstractmethod
|
||||
import asyncio
|
||||
from typing import Optional, Dict, Any, Union
|
||||
|
||||
|
||||
class EventTracker(ABC):
|
||||
def __init__(self) -> None:
|
||||
pass
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def user_agent(self) -> str:
|
||||
pass
|
||||
|
||||
@user_agent.setter
|
||||
@abstractmethod
|
||||
def user_agent(self, value: str) -> None:
|
||||
pass
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def domain(self) -> str:
|
||||
pass
|
||||
|
||||
@domain.setter
|
||||
@abstractmethod
|
||||
def domain(self, value: str) -> None:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def track_event(self, name: str, url: str, referrer: Optional[str] = None,
|
||||
props: Optional[Dict[str, Any]] = None) -> str:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def close(self) -> None:
|
||||
pass
|
||||
34
comfy/analytics/identity_provider_nt.py
Normal file
34
comfy/analytics/identity_provider_nt.py
Normal file
@ -0,0 +1,34 @@
|
||||
import ctypes
|
||||
from ctypes import wintypes, POINTER, byref
|
||||
|
||||
_windows_dll = ctypes.WinDLL('Secur32.dll')
|
||||
|
||||
_windows_get_user_name_ex_w_func = _windows_dll.GetUserNameExW
|
||||
_windows_get_user_name_ex_w_func.argtypes = [ctypes.c_int, POINTER(wintypes.WCHAR), POINTER(wintypes.ULONG)]
|
||||
_windows_get_user_name_ex_w_func.restype = wintypes.BOOL
|
||||
|
||||
_windows_extended_name_format = {
|
||||
"NameUnknown": 0,
|
||||
"NameFullyQualifiedDN": 1,
|
||||
"NameSamCompatible": 2,
|
||||
"NameDisplay": 3,
|
||||
"NameUniqueId": 6,
|
||||
"NameCanonical": 7,
|
||||
"NameUserPrincipal": 8,
|
||||
"NameCanonicalEx": 9,
|
||||
"NameServicePrincipal": 10,
|
||||
"NameDnsDomain": 12
|
||||
}
|
||||
|
||||
|
||||
def get_user_name():
|
||||
size = wintypes.ULONG(0)
|
||||
format_type = _windows_extended_name_format["NameDisplay"]
|
||||
_windows_get_user_name_ex_w_func(format_type, None, byref(size))
|
||||
|
||||
name_buffer = ctypes.create_unicode_buffer(size.value)
|
||||
|
||||
if not _windows_get_user_name_ex_w_func(format_type, name_buffer, byref(size)):
|
||||
return None
|
||||
|
||||
return name_buffer.value
|
||||
37
comfy/analytics/multi_event_tracker.py
Normal file
37
comfy/analytics/multi_event_tracker.py
Normal file
@ -0,0 +1,37 @@
|
||||
import asyncio
|
||||
from typing import List, Optional, Dict, Any, Union
|
||||
|
||||
from .event_tracker import EventTracker
|
||||
|
||||
|
||||
class MultiEventTracker(EventTracker):
|
||||
def __init__(self, trackers: List[EventTracker]) -> None:
|
||||
super().__init__()
|
||||
self.trackers = trackers
|
||||
|
||||
async def track_event(self, name: str, url: str, referrer: Optional[str] = None,
|
||||
props: Optional[Dict[str, Any]] = None) -> None:
|
||||
tasks = [tracker.track_event(name, url, referrer, props) for tracker in self.trackers]
|
||||
await asyncio.gather(*tasks)
|
||||
|
||||
async def close(self) -> None:
|
||||
tasks = [tracker.close() for tracker in self.trackers]
|
||||
await asyncio.gather(*tasks)
|
||||
|
||||
@property
|
||||
def user_agent(self) -> str:
|
||||
return next(tracker.user_agent for tracker in self.trackers) if len(self.trackers) > 0 else "(unknown)"
|
||||
|
||||
@user_agent.setter
|
||||
def user_agent(self, value: str) -> None:
|
||||
for tracker in self.trackers:
|
||||
tracker.user_agent = value
|
||||
|
||||
@property
|
||||
def domain(self) -> str:
|
||||
return next(tracker.domain for tracker in self.trackers) if len(self.trackers) > 0 else ("unknown")
|
||||
|
||||
@domain.setter
|
||||
def domain(self, value: str) -> None:
|
||||
for tracker in self.trackers:
|
||||
tracker.domain = value
|
||||
84
comfy/analytics/plausible.py
Normal file
84
comfy/analytics/plausible.py
Normal file
@ -0,0 +1,84 @@
|
||||
import asyncio
|
||||
import json
|
||||
import typing
|
||||
from typing import Optional, Dict, Any
|
||||
|
||||
import aiohttp
|
||||
|
||||
from .event_tracker import EventTracker
|
||||
|
||||
|
||||
class PlausibleTracker(EventTracker):
|
||||
def __init__(self, loop: asyncio.AbstractEventLoop, user_agent: str, base_url: str, domain: str) -> None:
|
||||
super().__init__()
|
||||
self._user_agent = user_agent
|
||||
self._domain = domain
|
||||
self._base_url = base_url
|
||||
self.loop = loop
|
||||
self.session = aiohttp.ClientSession(loop=self.loop)
|
||||
self._public_ip: typing.Literal[False] | None | str = None
|
||||
|
||||
@property
|
||||
def user_agent(self) -> str:
|
||||
return self._user_agent
|
||||
|
||||
@user_agent.setter
|
||||
def user_agent(self, value: str) -> None:
|
||||
self._user_agent = value
|
||||
|
||||
@property
|
||||
def domain(self) -> str:
|
||||
return self._domain
|
||||
|
||||
@domain.setter
|
||||
def domain(self, value: str) -> None:
|
||||
self._domain = value
|
||||
|
||||
@property
|
||||
def base_url(self) -> str:
|
||||
return self._base_url
|
||||
|
||||
@base_url.setter
|
||||
def base_url(self, value: str) -> None:
|
||||
self._base_url = value
|
||||
|
||||
async def get_public_ip(self):
|
||||
try:
|
||||
async with self.session.get('https://www.cloudflare.com/cdn-cgi/trace') as response:
|
||||
if response.status == 200:
|
||||
text = await response.text()
|
||||
for line in text.splitlines():
|
||||
if line.startswith('ip='):
|
||||
ip_address = line.split('=')[1]
|
||||
return ip_address
|
||||
except:
|
||||
return False
|
||||
|
||||
async def track_event(self, name: str, url: str, referrer: Optional[str] = None,
|
||||
props: Optional[Dict[str, Any]] = None) -> str:
|
||||
|
||||
if self._public_ip is None:
|
||||
self._public_ip = await self.get_public_ip()
|
||||
headers = {
|
||||
'User-Agent': self.user_agent,
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
if self._public_ip is not None and self._public_ip != False:
|
||||
headers['X-Forwarded-For'] = self._public_ip
|
||||
|
||||
data = {
|
||||
'name': name,
|
||||
'url': url,
|
||||
'domain': self.domain
|
||||
}
|
||||
if referrer:
|
||||
data['referrer'] = referrer
|
||||
if props:
|
||||
data['props'] = props
|
||||
|
||||
async with self.session.post(f'{self.base_url}/api/event', headers=headers,
|
||||
data=json.dumps(data)) as response:
|
||||
return await response.text()
|
||||
|
||||
async def close(self) -> None:
|
||||
await self.session.close()
|
||||
@ -4,25 +4,23 @@
|
||||
|
||||
"""
|
||||
comfyui
|
||||
|
||||
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
|
||||
|
||||
No description provided (generated by Openapi JSON Schema Generator https://github.com/openapi-json-schema-tools/openapi-json-schema-generator) # noqa: E501
|
||||
The version of the OpenAPI document: 0.0.1
|
||||
Generated by: https://openapi-generator.tech
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
__version__ = "1.0.0"
|
||||
|
||||
# import ApiClient
|
||||
from .api_client import ApiClient
|
||||
from comfy.api.api_client import ApiClient
|
||||
|
||||
# import Configuration
|
||||
from .configuration import Configuration
|
||||
from comfy.api.configurations.api_configuration import ApiConfiguration
|
||||
|
||||
# import exceptions
|
||||
from .exceptions import OpenApiException
|
||||
from .exceptions import ApiAttributeError
|
||||
from .exceptions import ApiTypeError
|
||||
from .exceptions import ApiValueError
|
||||
from .exceptions import ApiKeyError
|
||||
from .exceptions import ApiException
|
||||
from comfy.api.exceptions import OpenApiException
|
||||
from comfy.api.exceptions import ApiAttributeError
|
||||
from comfy.api.exceptions import ApiTypeError
|
||||
from comfy.api.exceptions import ApiValueError
|
||||
from comfy.api.exceptions import ApiKeyError
|
||||
from comfy.api.exceptions import ApiException
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
28
comfy/api/api_response.py
Normal file
28
comfy/api/api_response.py
Normal file
@ -0,0 +1,28 @@
|
||||
# coding: utf-8
|
||||
"""
|
||||
comfyui
|
||||
No description provided (generated by Openapi JSON Schema Generator https://github.com/openapi-json-schema-tools/openapi-json-schema-generator) # noqa: E501
|
||||
The version of the OpenAPI document: 0.0.1
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
import dataclasses
|
||||
import typing
|
||||
|
||||
import urllib3
|
||||
|
||||
from comfy.api import schemas
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class ApiResponse:
|
||||
response: urllib3.HTTPResponse
|
||||
body: typing.Union[schemas.Unset, schemas.OUTPUT_BASE_TYPES]
|
||||
headers: typing.Union[schemas.Unset, typing.Mapping[str, schemas.OUTPUT_BASE_TYPES]]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class ApiResponseWithoutDeserialization(ApiResponse):
|
||||
response: urllib3.HTTPResponse
|
||||
body: schemas.Unset = schemas.unset
|
||||
headers: schemas.Unset = schemas.unset
|
||||
3
comfy/api/apis/__init__.py
Normal file
3
comfy/api/apis/__init__.py
Normal file
@ -0,0 +1,3 @@
|
||||
# do not import all endpoints into this module because that uses a lot of memory and stack frames
|
||||
# if you need the ability to import all endpoints then import them from
|
||||
# tags, paths, or path_to_api, or tag_to_api
|
||||
50
comfy/api/apis/path_to_api.py
Normal file
50
comfy/api/apis/path_to_api.py
Normal file
@ -0,0 +1,50 @@
|
||||
import typing
|
||||
import typing_extensions
|
||||
|
||||
from comfy.api.apis.paths.solidus import Solidus
|
||||
from comfy.api.apis.paths.api_v1_images_digest import ApiV1ImagesDigest
|
||||
from comfy.api.apis.paths.api_v1_prompts import ApiV1Prompts
|
||||
from comfy.api.apis.paths.embeddings import Embeddings
|
||||
from comfy.api.apis.paths.extensions import Extensions
|
||||
from comfy.api.apis.paths.history import History
|
||||
from comfy.api.apis.paths.interrupt import Interrupt
|
||||
from comfy.api.apis.paths.object_info import ObjectInfo
|
||||
from comfy.api.apis.paths.prompt import Prompt
|
||||
from comfy.api.apis.paths.queue import Queue
|
||||
from comfy.api.apis.paths.upload_image import UploadImage
|
||||
from comfy.api.apis.paths.view import View
|
||||
|
||||
PathToApi = typing.TypedDict(
|
||||
'PathToApi',
|
||||
{
|
||||
"/": typing.Type[Solidus],
|
||||
"/api/v1/images/{digest}": typing.Type[ApiV1ImagesDigest],
|
||||
"/api/v1/prompts": typing.Type[ApiV1Prompts],
|
||||
"/embeddings": typing.Type[Embeddings],
|
||||
"/extensions": typing.Type[Extensions],
|
||||
"/history": typing.Type[History],
|
||||
"/interrupt": typing.Type[Interrupt],
|
||||
"/object_info": typing.Type[ObjectInfo],
|
||||
"/prompt": typing.Type[Prompt],
|
||||
"/queue": typing.Type[Queue],
|
||||
"/upload/image": typing.Type[UploadImage],
|
||||
"/view": typing.Type[View],
|
||||
}
|
||||
)
|
||||
|
||||
path_to_api = PathToApi(
|
||||
{
|
||||
"/": Solidus,
|
||||
"/api/v1/images/{digest}": ApiV1ImagesDigest,
|
||||
"/api/v1/prompts": ApiV1Prompts,
|
||||
"/embeddings": Embeddings,
|
||||
"/extensions": Extensions,
|
||||
"/history": History,
|
||||
"/interrupt": Interrupt,
|
||||
"/object_info": ObjectInfo,
|
||||
"/prompt": Prompt,
|
||||
"/queue": Queue,
|
||||
"/upload/image": UploadImage,
|
||||
"/view": View,
|
||||
}
|
||||
)
|
||||
3
comfy/api/apis/paths/__init__.py
Normal file
3
comfy/api/apis/paths/__init__.py
Normal file
@ -0,0 +1,3 @@
|
||||
# do not import all endpoints into this module because that uses a lot of memory and stack frames
|
||||
# if you need the ability to import all endpoints from this module, import them with
|
||||
# from comfy.api.apis.path_to_api import path_to_api
|
||||
13
comfy/api/apis/paths/api_v1_images_digest.py
Normal file
13
comfy/api/apis/paths/api_v1_images_digest.py
Normal file
@ -0,0 +1,13 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from comfy.api.paths.api_v1_images_digest.get.operation import ApiForGet
|
||||
|
||||
|
||||
class ApiV1ImagesDigest(
|
||||
ApiForGet,
|
||||
):
|
||||
pass
|
||||
15
comfy/api/apis/paths/api_v1_prompts.py
Normal file
15
comfy/api/apis/paths/api_v1_prompts.py
Normal file
@ -0,0 +1,15 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from comfy.api.paths.api_v1_prompts.get.operation import ApiForGet
|
||||
from comfy.api.paths.api_v1_prompts.post.operation import ApiForPost
|
||||
|
||||
|
||||
class ApiV1Prompts(
|
||||
ApiForGet,
|
||||
ApiForPost,
|
||||
):
|
||||
pass
|
||||
13
comfy/api/apis/paths/embeddings.py
Normal file
13
comfy/api/apis/paths/embeddings.py
Normal file
@ -0,0 +1,13 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from comfy.api.paths.embeddings.get.operation import ApiForGet
|
||||
|
||||
|
||||
class Embeddings(
|
||||
ApiForGet,
|
||||
):
|
||||
pass
|
||||
13
comfy/api/apis/paths/extensions.py
Normal file
13
comfy/api/apis/paths/extensions.py
Normal file
@ -0,0 +1,13 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from comfy.api.paths.extensions.get.operation import ApiForGet
|
||||
|
||||
|
||||
class Extensions(
|
||||
ApiForGet,
|
||||
):
|
||||
pass
|
||||
15
comfy/api/apis/paths/history.py
Normal file
15
comfy/api/apis/paths/history.py
Normal file
@ -0,0 +1,15 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from comfy.api.paths.history.get.operation import ApiForGet
|
||||
from comfy.api.paths.history.post.operation import ApiForPost
|
||||
|
||||
|
||||
class History(
|
||||
ApiForGet,
|
||||
ApiForPost,
|
||||
):
|
||||
pass
|
||||
13
comfy/api/apis/paths/interrupt.py
Normal file
13
comfy/api/apis/paths/interrupt.py
Normal file
@ -0,0 +1,13 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from comfy.api.paths.interrupt.post.operation import ApiForPost
|
||||
|
||||
|
||||
class Interrupt(
|
||||
ApiForPost,
|
||||
):
|
||||
pass
|
||||
13
comfy/api/apis/paths/object_info.py
Normal file
13
comfy/api/apis/paths/object_info.py
Normal file
@ -0,0 +1,13 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from comfy.api.paths.object_info.get.operation import ApiForGet
|
||||
|
||||
|
||||
class ObjectInfo(
|
||||
ApiForGet,
|
||||
):
|
||||
pass
|
||||
15
comfy/api/apis/paths/prompt.py
Normal file
15
comfy/api/apis/paths/prompt.py
Normal file
@ -0,0 +1,15 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from comfy.api.paths.prompt.get.operation import ApiForGet
|
||||
from comfy.api.paths.prompt.post.operation import ApiForPost
|
||||
|
||||
|
||||
class Prompt(
|
||||
ApiForGet,
|
||||
ApiForPost,
|
||||
):
|
||||
pass
|
||||
15
comfy/api/apis/paths/queue.py
Normal file
15
comfy/api/apis/paths/queue.py
Normal file
@ -0,0 +1,15 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from comfy.api.paths.queue.get.operation import ApiForGet
|
||||
from comfy.api.paths.queue.post.operation import ApiForPost
|
||||
|
||||
|
||||
class Queue(
|
||||
ApiForGet,
|
||||
ApiForPost,
|
||||
):
|
||||
pass
|
||||
13
comfy/api/apis/paths/solidus.py
Normal file
13
comfy/api/apis/paths/solidus.py
Normal file
@ -0,0 +1,13 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from comfy.api.paths.solidus.get.operation import ApiForGet
|
||||
|
||||
|
||||
class Solidus(
|
||||
ApiForGet,
|
||||
):
|
||||
pass
|
||||
13
comfy/api/apis/paths/upload_image.py
Normal file
13
comfy/api/apis/paths/upload_image.py
Normal file
@ -0,0 +1,13 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from comfy.api.paths.upload_image.post.operation import ApiForPost
|
||||
|
||||
|
||||
class UploadImage(
|
||||
ApiForPost,
|
||||
):
|
||||
pass
|
||||
13
comfy/api/apis/paths/view.py
Normal file
13
comfy/api/apis/paths/view.py
Normal file
@ -0,0 +1,13 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from comfy.api.paths.view.get.operation import ApiForGet
|
||||
|
||||
|
||||
class View(
|
||||
ApiForGet,
|
||||
):
|
||||
pass
|
||||
17
comfy/api/apis/tag_to_api.py
Normal file
17
comfy/api/apis/tag_to_api.py
Normal file
@ -0,0 +1,17 @@
|
||||
import typing
|
||||
import typing_extensions
|
||||
|
||||
from comfy.api.apis.tags.default_api import DefaultApi
|
||||
|
||||
TagToApi = typing.TypedDict(
|
||||
'TagToApi',
|
||||
{
|
||||
"default": typing.Type[DefaultApi],
|
||||
}
|
||||
)
|
||||
|
||||
tag_to_api = TagToApi(
|
||||
{
|
||||
"default": DefaultApi,
|
||||
}
|
||||
)
|
||||
3
comfy/api/apis/tags/__init__.py
Normal file
3
comfy/api/apis/tags/__init__.py
Normal file
@ -0,0 +1,3 @@
|
||||
# do not import all endpoints into this module because that uses a lot of memory and stack frames
|
||||
# if you need the ability to import all endpoints from this module, import them with
|
||||
# from comfy.api.apis.tag_to_api import tag_to_api
|
||||
48
comfy/api/apis/tags/default_api.py
Normal file
48
comfy/api/apis/tags/default_api.py
Normal file
@ -0,0 +1,48 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from comfy.api.paths.api_v1_images_digest.get.operation import ApiV1ImagesDigestGet
|
||||
from comfy.api.paths.api_v1_prompts.get.operation import ApiV1PromptsGet
|
||||
from comfy.api.paths.api_v1_prompts.post.operation import ApiV1PromptsPost
|
||||
from comfy.api.paths.prompt.get.operation import GetPrompt
|
||||
from comfy.api.paths.prompt.post.operation import PostPrompt
|
||||
from comfy.api.paths.extensions.get.operation import GetExtensions
|
||||
from comfy.api.paths.interrupt.post.operation import PostInterrupt
|
||||
from comfy.api.paths.history.get.operation import GetHistory
|
||||
from comfy.api.paths.history.post.operation import PostHistory
|
||||
from comfy.api.paths.queue.get.operation import GetQueue
|
||||
from comfy.api.paths.queue.post.operation import PostQueue
|
||||
from comfy.api.paths.upload_image.post.operation import UploadImage
|
||||
from comfy.api.paths.object_info.get.operation import GetObjectInfo
|
||||
from comfy.api.paths.view.get.operation import ViewImage
|
||||
from comfy.api.paths.embeddings.get.operation import GetEmbeddings
|
||||
from comfy.api.paths.solidus.get.operation import GetRoot
|
||||
|
||||
|
||||
class DefaultApi(
|
||||
ApiV1ImagesDigestGet,
|
||||
ApiV1PromptsGet,
|
||||
ApiV1PromptsPost,
|
||||
GetPrompt,
|
||||
PostPrompt,
|
||||
GetExtensions,
|
||||
PostInterrupt,
|
||||
GetHistory,
|
||||
PostHistory,
|
||||
GetQueue,
|
||||
PostQueue,
|
||||
UploadImage,
|
||||
GetObjectInfo,
|
||||
ViewImage,
|
||||
GetEmbeddings,
|
||||
GetRoot,
|
||||
):
|
||||
"""NOTE: This class is auto generated by OpenAPI JSON Schema Generator
|
||||
Ref: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
|
||||
Do not edit the class manually.
|
||||
"""
|
||||
pass
|
||||
0
comfy/api/components/__init__.py
Normal file
0
comfy/api/components/__init__.py
Normal file
5
comfy/api/components/schema/__init__.py
Normal file
5
comfy/api/components/schema/__init__.py
Normal file
@ -0,0 +1,5 @@
|
||||
# we can not import model classes here because that would create a circular
|
||||
# reference which would not work in python2
|
||||
# do not import all models into this module because that uses a lot of memory and stack frames
|
||||
# if you need the ability to import all models from one package, import them with
|
||||
# from comfy.api.components.schemas import ModelA, ModelB
|
||||
205
comfy/api/components/schema/extra_data.py
Normal file
205
comfy/api/components/schema/extra_data.py
Normal file
@ -0,0 +1,205 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
comfyui
|
||||
No description provided (generated by Openapi JSON Schema Generator https://github.com/openapi-json-schema-tools/openapi-json-schema-generator) # noqa: E501
|
||||
The version of the OpenAPI document: 0.0.1
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
from comfy.api.shared_imports.schema_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
|
||||
|
||||
from comfy.api.components.schema import workflow
|
||||
Properties = typing.TypedDict(
|
||||
'Properties',
|
||||
{
|
||||
"workflow": typing.Type[workflow.Workflow],
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class ExtraPnginfoDict(schemas.immutabledict[str, schemas.OUTPUT_BASE_TYPES]):
|
||||
|
||||
__required_keys__: typing.FrozenSet[str] = frozenset({
|
||||
})
|
||||
__optional_keys__: typing.FrozenSet[str] = frozenset({
|
||||
"workflow",
|
||||
})
|
||||
|
||||
def __new__(
|
||||
cls,
|
||||
*,
|
||||
workflow: typing.Union[
|
||||
workflow.WorkflowDictInput,
|
||||
workflow.WorkflowDict,
|
||||
schemas.Unset
|
||||
] = schemas.unset,
|
||||
configuration_: typing.Optional[schema_configuration.SchemaConfiguration] = None,
|
||||
**kwargs: schemas.INPUT_TYPES_ALL,
|
||||
):
|
||||
arg_: typing.Dict[str, typing.Any] = {}
|
||||
for key_, val in (
|
||||
("workflow", workflow),
|
||||
):
|
||||
if isinstance(val, schemas.Unset):
|
||||
continue
|
||||
arg_[key_] = val
|
||||
arg_.update(kwargs)
|
||||
used_arg_ = typing.cast(ExtraPnginfoDictInput, arg_)
|
||||
return ExtraPnginfo.validate(used_arg_, configuration=configuration_)
|
||||
|
||||
@staticmethod
|
||||
def from_dict_(
|
||||
arg: typing.Union[
|
||||
ExtraPnginfoDictInput,
|
||||
ExtraPnginfoDict
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> ExtraPnginfoDict:
|
||||
return ExtraPnginfo.validate(arg, configuration=configuration)
|
||||
|
||||
@property
|
||||
def workflow(self) -> typing.Union[workflow.WorkflowDict, schemas.Unset]:
|
||||
val = self.get("workflow", schemas.unset)
|
||||
if isinstance(val, schemas.Unset):
|
||||
return val
|
||||
return typing.cast(
|
||||
workflow.WorkflowDict,
|
||||
val
|
||||
)
|
||||
|
||||
def get_additional_property_(self, name: str) -> typing.Union[schemas.OUTPUT_BASE_TYPES, schemas.Unset]:
|
||||
schemas.raise_if_key_known(name, self.__required_keys__, self.__optional_keys__)
|
||||
return self.get(name, schemas.unset)
|
||||
ExtraPnginfoDictInput = typing.Mapping[str, schemas.INPUT_TYPES_ALL]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class ExtraPnginfo(
|
||||
schemas.Schema[ExtraPnginfoDict, tuple]
|
||||
):
|
||||
types: typing.FrozenSet[typing.Type] = frozenset({schemas.immutabledict})
|
||||
properties: Properties = dataclasses.field(default_factory=lambda: schemas.typed_dict_to_instance(Properties)) # type: ignore
|
||||
type_to_output_cls: typing.Mapping[
|
||||
typing.Type,
|
||||
typing.Type
|
||||
] = dataclasses.field(
|
||||
default_factory=lambda: {
|
||||
schemas.immutabledict: ExtraPnginfoDict
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def validate(
|
||||
cls,
|
||||
arg: typing.Union[
|
||||
ExtraPnginfoDictInput,
|
||||
ExtraPnginfoDict,
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> ExtraPnginfoDict:
|
||||
return super().validate_base(
|
||||
arg,
|
||||
configuration=configuration,
|
||||
)
|
||||
|
||||
Properties2 = typing.TypedDict(
|
||||
'Properties2',
|
||||
{
|
||||
"extra_pnginfo": typing.Type[ExtraPnginfo],
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class ExtraDataDict(schemas.immutabledict[str, schemas.OUTPUT_BASE_TYPES]):
|
||||
|
||||
__required_keys__: typing.FrozenSet[str] = frozenset({
|
||||
})
|
||||
__optional_keys__: typing.FrozenSet[str] = frozenset({
|
||||
"extra_pnginfo",
|
||||
})
|
||||
|
||||
def __new__(
|
||||
cls,
|
||||
*,
|
||||
extra_pnginfo: typing.Union[
|
||||
ExtraPnginfoDictInput,
|
||||
ExtraPnginfoDict,
|
||||
schemas.Unset
|
||||
] = schemas.unset,
|
||||
configuration_: typing.Optional[schema_configuration.SchemaConfiguration] = None,
|
||||
**kwargs: schemas.INPUT_TYPES_ALL,
|
||||
):
|
||||
arg_: typing.Dict[str, typing.Any] = {}
|
||||
for key_, val in (
|
||||
("extra_pnginfo", extra_pnginfo),
|
||||
):
|
||||
if isinstance(val, schemas.Unset):
|
||||
continue
|
||||
arg_[key_] = val
|
||||
arg_.update(kwargs)
|
||||
used_arg_ = typing.cast(ExtraDataDictInput, arg_)
|
||||
return ExtraData.validate(used_arg_, configuration=configuration_)
|
||||
|
||||
@staticmethod
|
||||
def from_dict_(
|
||||
arg: typing.Union[
|
||||
ExtraDataDictInput,
|
||||
ExtraDataDict
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> ExtraDataDict:
|
||||
return ExtraData.validate(arg, configuration=configuration)
|
||||
|
||||
@property
|
||||
def extra_pnginfo(self) -> typing.Union[ExtraPnginfoDict, schemas.Unset]:
|
||||
val = self.get("extra_pnginfo", schemas.unset)
|
||||
if isinstance(val, schemas.Unset):
|
||||
return val
|
||||
return typing.cast(
|
||||
ExtraPnginfoDict,
|
||||
val
|
||||
)
|
||||
|
||||
def get_additional_property_(self, name: str) -> typing.Union[schemas.OUTPUT_BASE_TYPES, schemas.Unset]:
|
||||
schemas.raise_if_key_known(name, self.__required_keys__, self.__optional_keys__)
|
||||
return self.get(name, schemas.unset)
|
||||
ExtraDataDictInput = typing.Mapping[str, schemas.INPUT_TYPES_ALL]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class ExtraData(
|
||||
schemas.Schema[ExtraDataDict, tuple]
|
||||
):
|
||||
"""NOTE: This class is auto generated by OpenAPI JSON Schema Generator.
|
||||
Ref: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
|
||||
Do not edit the class manually.
|
||||
"""
|
||||
types: typing.FrozenSet[typing.Type] = frozenset({schemas.immutabledict})
|
||||
properties: Properties2 = dataclasses.field(default_factory=lambda: schemas.typed_dict_to_instance(Properties2)) # type: ignore
|
||||
type_to_output_cls: typing.Mapping[
|
||||
typing.Type,
|
||||
typing.Type
|
||||
] = dataclasses.field(
|
||||
default_factory=lambda: {
|
||||
schemas.immutabledict: ExtraDataDict
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def validate(
|
||||
cls,
|
||||
arg: typing.Union[
|
||||
ExtraDataDictInput,
|
||||
ExtraDataDict,
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> ExtraDataDict:
|
||||
return super().validate_base(
|
||||
arg,
|
||||
configuration=configuration,
|
||||
)
|
||||
|
||||
959
comfy/api/components/schema/node.py
Normal file
959
comfy/api/components/schema/node.py
Normal file
@ -0,0 +1,959 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
comfyui
|
||||
No description provided (generated by Openapi JSON Schema Generator https://github.com/openapi-json-schema-tools/openapi-json-schema-generator) # noqa: E501
|
||||
The version of the OpenAPI document: 0.0.1
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
from comfy.api.shared_imports.schema_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
|
||||
_0: typing_extensions.TypeAlias = schemas.StrSchema
|
||||
_1: typing_extensions.TypeAlias = schemas.NumberSchema
|
||||
Default: typing_extensions.TypeAlias = schemas.StrSchema
|
||||
Min: typing_extensions.TypeAlias = schemas.NumberSchema
|
||||
Max: typing_extensions.TypeAlias = schemas.NumberSchema
|
||||
Step: typing_extensions.TypeAlias = schemas.NumberSchema
|
||||
Multiline: typing_extensions.TypeAlias = schemas.BoolSchema
|
||||
Properties = typing.TypedDict(
|
||||
'Properties',
|
||||
{
|
||||
"default": typing.Type[Default],
|
||||
"min": typing.Type[Min],
|
||||
"max": typing.Type[Max],
|
||||
"step": typing.Type[Step],
|
||||
"multiline": typing.Type[Multiline],
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class _2Dict(schemas.immutabledict[str, schemas.OUTPUT_BASE_TYPES]):
|
||||
|
||||
__required_keys__: typing.FrozenSet[str] = frozenset({
|
||||
})
|
||||
__optional_keys__: typing.FrozenSet[str] = frozenset({
|
||||
"default",
|
||||
"min",
|
||||
"max",
|
||||
"step",
|
||||
"multiline",
|
||||
})
|
||||
|
||||
def __new__(
|
||||
cls,
|
||||
*,
|
||||
default: typing.Union[
|
||||
str,
|
||||
schemas.Unset
|
||||
] = schemas.unset,
|
||||
min: typing.Union[
|
||||
int,
|
||||
float,
|
||||
schemas.Unset
|
||||
] = schemas.unset,
|
||||
max: typing.Union[
|
||||
int,
|
||||
float,
|
||||
schemas.Unset
|
||||
] = schemas.unset,
|
||||
step: typing.Union[
|
||||
int,
|
||||
float,
|
||||
schemas.Unset
|
||||
] = schemas.unset,
|
||||
multiline: typing.Union[
|
||||
bool,
|
||||
schemas.Unset
|
||||
] = schemas.unset,
|
||||
configuration_: typing.Optional[schema_configuration.SchemaConfiguration] = None,
|
||||
**kwargs: schemas.INPUT_TYPES_ALL,
|
||||
):
|
||||
arg_: typing.Dict[str, typing.Any] = {}
|
||||
for key_, val in (
|
||||
("default", default),
|
||||
("min", min),
|
||||
("max", max),
|
||||
("step", step),
|
||||
("multiline", multiline),
|
||||
):
|
||||
if isinstance(val, schemas.Unset):
|
||||
continue
|
||||
arg_[key_] = val
|
||||
arg_.update(kwargs)
|
||||
used_arg_ = typing.cast(_2DictInput, arg_)
|
||||
return _2.validate(used_arg_, configuration=configuration_)
|
||||
|
||||
@staticmethod
|
||||
def from_dict_(
|
||||
arg: typing.Union[
|
||||
_2DictInput,
|
||||
_2Dict
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> _2Dict:
|
||||
return _2.validate(arg, configuration=configuration)
|
||||
|
||||
@property
|
||||
def default(self) -> typing.Union[str, schemas.Unset]:
|
||||
val = self.get("default", schemas.unset)
|
||||
if isinstance(val, schemas.Unset):
|
||||
return val
|
||||
return typing.cast(
|
||||
str,
|
||||
val
|
||||
)
|
||||
|
||||
@property
|
||||
def min(self) -> typing.Union[int, float, schemas.Unset]:
|
||||
val = self.get("min", schemas.unset)
|
||||
if isinstance(val, schemas.Unset):
|
||||
return val
|
||||
return typing.cast(
|
||||
typing.Union[int, float],
|
||||
val
|
||||
)
|
||||
|
||||
@property
|
||||
def max(self) -> typing.Union[int, float, schemas.Unset]:
|
||||
val = self.get("max", schemas.unset)
|
||||
if isinstance(val, schemas.Unset):
|
||||
return val
|
||||
return typing.cast(
|
||||
typing.Union[int, float],
|
||||
val
|
||||
)
|
||||
|
||||
@property
|
||||
def step(self) -> typing.Union[int, float, schemas.Unset]:
|
||||
val = self.get("step", schemas.unset)
|
||||
if isinstance(val, schemas.Unset):
|
||||
return val
|
||||
return typing.cast(
|
||||
typing.Union[int, float],
|
||||
val
|
||||
)
|
||||
|
||||
@property
|
||||
def multiline(self) -> typing.Union[bool, schemas.Unset]:
|
||||
val = self.get("multiline", schemas.unset)
|
||||
if isinstance(val, schemas.Unset):
|
||||
return val
|
||||
return typing.cast(
|
||||
bool,
|
||||
val
|
||||
)
|
||||
|
||||
def get_additional_property_(self, name: str) -> typing.Union[schemas.OUTPUT_BASE_TYPES, schemas.Unset]:
|
||||
schemas.raise_if_key_known(name, self.__required_keys__, self.__optional_keys__)
|
||||
return self.get(name, schemas.unset)
|
||||
_2DictInput = typing.Mapping[str, schemas.INPUT_TYPES_ALL]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class _2(
|
||||
schemas.Schema[_2Dict, tuple]
|
||||
):
|
||||
types: typing.FrozenSet[typing.Type] = frozenset({schemas.immutabledict})
|
||||
properties: Properties = dataclasses.field(default_factory=lambda: schemas.typed_dict_to_instance(Properties)) # type: ignore
|
||||
type_to_output_cls: typing.Mapping[
|
||||
typing.Type,
|
||||
typing.Type
|
||||
] = dataclasses.field(
|
||||
default_factory=lambda: {
|
||||
schemas.immutabledict: _2Dict
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def validate(
|
||||
cls,
|
||||
arg: typing.Union[
|
||||
_2DictInput,
|
||||
_2Dict,
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> _2Dict:
|
||||
return super().validate_base(
|
||||
arg,
|
||||
configuration=configuration,
|
||||
)
|
||||
|
||||
Items2: typing_extensions.TypeAlias = schemas.StrSchema
|
||||
|
||||
|
||||
class _3Tuple(
|
||||
typing.Tuple[
|
||||
str,
|
||||
...
|
||||
]
|
||||
):
|
||||
|
||||
def __new__(cls, arg: typing.Union[_3TupleInput, _3Tuple], configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None):
|
||||
return _3.validate(arg, configuration=configuration)
|
||||
_3TupleInput = typing.Union[
|
||||
typing.List[
|
||||
str,
|
||||
],
|
||||
typing.Tuple[
|
||||
str,
|
||||
...
|
||||
]
|
||||
]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class _3(
|
||||
schemas.Schema[schemas.immutabledict, _3Tuple]
|
||||
):
|
||||
types: typing.FrozenSet[typing.Type] = frozenset({tuple})
|
||||
items: typing.Type[Items2] = dataclasses.field(default_factory=lambda: Items2) # type: ignore
|
||||
type_to_output_cls: typing.Mapping[
|
||||
typing.Type,
|
||||
typing.Type
|
||||
] = dataclasses.field(
|
||||
default_factory=lambda: {
|
||||
tuple: _3Tuple
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def validate(
|
||||
cls,
|
||||
arg: typing.Union[
|
||||
_3TupleInput,
|
||||
_3Tuple,
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> _3Tuple:
|
||||
return super().validate_base(
|
||||
arg,
|
||||
configuration=configuration,
|
||||
)
|
||||
OneOf = typing.Tuple[
|
||||
typing.Type[_0],
|
||||
typing.Type[_1],
|
||||
typing.Type[_2],
|
||||
typing.Type[_3],
|
||||
]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class Items(
|
||||
schemas.AnyTypeSchema[schemas.immutabledict[str, schemas.OUTPUT_BASE_TYPES], typing.Tuple[schemas.OUTPUT_BASE_TYPES, ...]],
|
||||
):
|
||||
# any type
|
||||
max_items: int = 2
|
||||
min_items: int = 1
|
||||
one_of: OneOf = dataclasses.field(default_factory=lambda: schemas.tuple_to_instance(OneOf)) # type: ignore
|
||||
|
||||
|
||||
|
||||
class AdditionalPropertiesTuple(
|
||||
typing.Tuple[
|
||||
schemas.OUTPUT_BASE_TYPES,
|
||||
...
|
||||
]
|
||||
):
|
||||
|
||||
def __new__(cls, arg: typing.Union[AdditionalPropertiesTupleInput, AdditionalPropertiesTuple], configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None):
|
||||
return AdditionalProperties.validate(arg, configuration=configuration)
|
||||
AdditionalPropertiesTupleInput = typing.Union[
|
||||
typing.List[
|
||||
typing.Union[
|
||||
schemas.INPUT_TYPES_ALL,
|
||||
schemas.OUTPUT_BASE_TYPES
|
||||
],
|
||||
],
|
||||
typing.Tuple[
|
||||
typing.Union[
|
||||
schemas.INPUT_TYPES_ALL,
|
||||
schemas.OUTPUT_BASE_TYPES
|
||||
],
|
||||
...
|
||||
]
|
||||
]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class AdditionalProperties(
|
||||
schemas.Schema[schemas.immutabledict, AdditionalPropertiesTuple]
|
||||
):
|
||||
types: typing.FrozenSet[typing.Type] = frozenset({tuple})
|
||||
items: typing.Type[Items] = dataclasses.field(default_factory=lambda: Items) # type: ignore
|
||||
type_to_output_cls: typing.Mapping[
|
||||
typing.Type,
|
||||
typing.Type
|
||||
] = dataclasses.field(
|
||||
default_factory=lambda: {
|
||||
tuple: AdditionalPropertiesTuple
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def validate(
|
||||
cls,
|
||||
arg: typing.Union[
|
||||
AdditionalPropertiesTupleInput,
|
||||
AdditionalPropertiesTuple,
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> AdditionalPropertiesTuple:
|
||||
return super().validate_base(
|
||||
arg,
|
||||
configuration=configuration,
|
||||
)
|
||||
|
||||
|
||||
class RequiredDict(schemas.immutabledict[str, AdditionalPropertiesTuple]):
|
||||
|
||||
__required_keys__: typing.FrozenSet[str] = frozenset({
|
||||
})
|
||||
__optional_keys__: typing.FrozenSet[str] = frozenset({
|
||||
})
|
||||
def __new__(
|
||||
cls,
|
||||
configuration_: typing.Optional[schema_configuration.SchemaConfiguration] = None,
|
||||
**kwargs: typing.Union[
|
||||
AdditionalPropertiesTupleInput,
|
||||
AdditionalPropertiesTuple
|
||||
],
|
||||
):
|
||||
used_kwargs = typing.cast(RequiredDictInput, kwargs)
|
||||
return Required.validate(used_kwargs, configuration=configuration_)
|
||||
|
||||
@staticmethod
|
||||
def from_dict_(
|
||||
arg: typing.Union[
|
||||
RequiredDictInput,
|
||||
RequiredDict
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> RequiredDict:
|
||||
return Required.validate(arg, configuration=configuration)
|
||||
|
||||
def get_additional_property_(self, name: str) -> typing.Union[AdditionalPropertiesTuple, schemas.Unset]:
|
||||
schemas.raise_if_key_known(name, self.__required_keys__, self.__optional_keys__)
|
||||
val = self.get(name, schemas.unset)
|
||||
if isinstance(val, schemas.Unset):
|
||||
return val
|
||||
return typing.cast(
|
||||
AdditionalPropertiesTuple,
|
||||
val
|
||||
)
|
||||
RequiredDictInput = typing.Mapping[
|
||||
str,
|
||||
typing.Union[
|
||||
AdditionalPropertiesTupleInput,
|
||||
AdditionalPropertiesTuple
|
||||
],
|
||||
]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class Required(
|
||||
schemas.Schema[RequiredDict, tuple]
|
||||
):
|
||||
types: typing.FrozenSet[typing.Type] = frozenset({schemas.immutabledict})
|
||||
additional_properties: typing.Type[AdditionalProperties] = dataclasses.field(default_factory=lambda: AdditionalProperties) # type: ignore
|
||||
type_to_output_cls: typing.Mapping[
|
||||
typing.Type,
|
||||
typing.Type
|
||||
] = dataclasses.field(
|
||||
default_factory=lambda: {
|
||||
schemas.immutabledict: RequiredDict
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def validate(
|
||||
cls,
|
||||
arg: typing.Union[
|
||||
RequiredDictInput,
|
||||
RequiredDict,
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> RequiredDict:
|
||||
return super().validate_base(
|
||||
arg,
|
||||
configuration=configuration,
|
||||
)
|
||||
|
||||
AdditionalProperties2: typing_extensions.TypeAlias = schemas.StrSchema
|
||||
|
||||
|
||||
class HiddenDict(schemas.immutabledict[str, str]):
|
||||
|
||||
__required_keys__: typing.FrozenSet[str] = frozenset({
|
||||
})
|
||||
__optional_keys__: typing.FrozenSet[str] = frozenset({
|
||||
})
|
||||
def __new__(
|
||||
cls,
|
||||
configuration_: typing.Optional[schema_configuration.SchemaConfiguration] = None,
|
||||
**kwargs: str,
|
||||
):
|
||||
used_kwargs = typing.cast(HiddenDictInput, kwargs)
|
||||
return Hidden.validate(used_kwargs, configuration=configuration_)
|
||||
|
||||
@staticmethod
|
||||
def from_dict_(
|
||||
arg: typing.Union[
|
||||
HiddenDictInput,
|
||||
HiddenDict
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> HiddenDict:
|
||||
return Hidden.validate(arg, configuration=configuration)
|
||||
|
||||
def get_additional_property_(self, name: str) -> typing.Union[str, schemas.Unset]:
|
||||
schemas.raise_if_key_known(name, self.__required_keys__, self.__optional_keys__)
|
||||
val = self.get(name, schemas.unset)
|
||||
if isinstance(val, schemas.Unset):
|
||||
return val
|
||||
return typing.cast(
|
||||
str,
|
||||
val
|
||||
)
|
||||
HiddenDictInput = typing.Mapping[
|
||||
str,
|
||||
str,
|
||||
]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class Hidden(
|
||||
schemas.Schema[HiddenDict, tuple]
|
||||
):
|
||||
types: typing.FrozenSet[typing.Type] = frozenset({schemas.immutabledict})
|
||||
additional_properties: typing.Type[AdditionalProperties2] = dataclasses.field(default_factory=lambda: AdditionalProperties2) # type: ignore
|
||||
type_to_output_cls: typing.Mapping[
|
||||
typing.Type,
|
||||
typing.Type
|
||||
] = dataclasses.field(
|
||||
default_factory=lambda: {
|
||||
schemas.immutabledict: HiddenDict
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def validate(
|
||||
cls,
|
||||
arg: typing.Union[
|
||||
HiddenDictInput,
|
||||
HiddenDict,
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> HiddenDict:
|
||||
return super().validate_base(
|
||||
arg,
|
||||
configuration=configuration,
|
||||
)
|
||||
|
||||
Properties2 = typing.TypedDict(
|
||||
'Properties2',
|
||||
{
|
||||
"required": typing.Type[Required],
|
||||
"hidden": typing.Type[Hidden],
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class InputDict(schemas.immutabledict[str, schemas.OUTPUT_BASE_TYPES]):
|
||||
|
||||
__required_keys__: typing.FrozenSet[str] = frozenset({
|
||||
"required",
|
||||
})
|
||||
__optional_keys__: typing.FrozenSet[str] = frozenset({
|
||||
"hidden",
|
||||
})
|
||||
|
||||
def __new__(
|
||||
cls,
|
||||
*,
|
||||
required: typing.Union[
|
||||
RequiredDictInput,
|
||||
RequiredDict,
|
||||
],
|
||||
hidden: typing.Union[
|
||||
HiddenDictInput,
|
||||
HiddenDict,
|
||||
schemas.Unset
|
||||
] = schemas.unset,
|
||||
configuration_: typing.Optional[schema_configuration.SchemaConfiguration] = None,
|
||||
**kwargs: schemas.INPUT_TYPES_ALL,
|
||||
):
|
||||
arg_: typing.Dict[str, typing.Any] = {
|
||||
"required": required,
|
||||
}
|
||||
for key_, val in (
|
||||
("hidden", hidden),
|
||||
):
|
||||
if isinstance(val, schemas.Unset):
|
||||
continue
|
||||
arg_[key_] = val
|
||||
arg_.update(kwargs)
|
||||
used_arg_ = typing.cast(InputDictInput, arg_)
|
||||
return Input.validate(used_arg_, configuration=configuration_)
|
||||
|
||||
@staticmethod
|
||||
def from_dict_(
|
||||
arg: typing.Union[
|
||||
InputDictInput,
|
||||
InputDict
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> InputDict:
|
||||
return Input.validate(arg, configuration=configuration)
|
||||
|
||||
@property
|
||||
def required(self) -> RequiredDict:
|
||||
return typing.cast(
|
||||
RequiredDict,
|
||||
self.__getitem__("required")
|
||||
)
|
||||
|
||||
@property
|
||||
def hidden(self) -> typing.Union[HiddenDict, schemas.Unset]:
|
||||
val = self.get("hidden", schemas.unset)
|
||||
if isinstance(val, schemas.Unset):
|
||||
return val
|
||||
return typing.cast(
|
||||
HiddenDict,
|
||||
val
|
||||
)
|
||||
|
||||
def get_additional_property_(self, name: str) -> typing.Union[schemas.OUTPUT_BASE_TYPES, schemas.Unset]:
|
||||
schemas.raise_if_key_known(name, self.__required_keys__, self.__optional_keys__)
|
||||
return self.get(name, schemas.unset)
|
||||
InputDictInput = typing.Mapping[str, schemas.INPUT_TYPES_ALL]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class Input(
|
||||
schemas.Schema[InputDict, tuple]
|
||||
):
|
||||
types: typing.FrozenSet[typing.Type] = frozenset({schemas.immutabledict})
|
||||
required: typing.FrozenSet[str] = frozenset({
|
||||
"required",
|
||||
})
|
||||
properties: Properties2 = dataclasses.field(default_factory=lambda: schemas.typed_dict_to_instance(Properties2)) # type: ignore
|
||||
type_to_output_cls: typing.Mapping[
|
||||
typing.Type,
|
||||
typing.Type
|
||||
] = dataclasses.field(
|
||||
default_factory=lambda: {
|
||||
schemas.immutabledict: InputDict
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def validate(
|
||||
cls,
|
||||
arg: typing.Union[
|
||||
InputDictInput,
|
||||
InputDict,
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> InputDict:
|
||||
return super().validate_base(
|
||||
arg,
|
||||
configuration=configuration,
|
||||
)
|
||||
|
||||
Items3: typing_extensions.TypeAlias = schemas.StrSchema
|
||||
|
||||
|
||||
class OutputTuple(
|
||||
typing.Tuple[
|
||||
str,
|
||||
...
|
||||
]
|
||||
):
|
||||
|
||||
def __new__(cls, arg: typing.Union[OutputTupleInput, OutputTuple], configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None):
|
||||
return Output.validate(arg, configuration=configuration)
|
||||
OutputTupleInput = typing.Union[
|
||||
typing.List[
|
||||
str,
|
||||
],
|
||||
typing.Tuple[
|
||||
str,
|
||||
...
|
||||
]
|
||||
]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class Output(
|
||||
schemas.Schema[schemas.immutabledict, OutputTuple]
|
||||
):
|
||||
types: typing.FrozenSet[typing.Type] = frozenset({tuple})
|
||||
items: typing.Type[Items3] = dataclasses.field(default_factory=lambda: Items3) # type: ignore
|
||||
type_to_output_cls: typing.Mapping[
|
||||
typing.Type,
|
||||
typing.Type
|
||||
] = dataclasses.field(
|
||||
default_factory=lambda: {
|
||||
tuple: OutputTuple
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def validate(
|
||||
cls,
|
||||
arg: typing.Union[
|
||||
OutputTupleInput,
|
||||
OutputTuple,
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> OutputTuple:
|
||||
return super().validate_base(
|
||||
arg,
|
||||
configuration=configuration,
|
||||
)
|
||||
Items4: typing_extensions.TypeAlias = schemas.BoolSchema
|
||||
|
||||
|
||||
class OutputIsListTuple(
|
||||
typing.Tuple[
|
||||
bool,
|
||||
...
|
||||
]
|
||||
):
|
||||
|
||||
def __new__(cls, arg: typing.Union[OutputIsListTupleInput, OutputIsListTuple], configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None):
|
||||
return OutputIsList.validate(arg, configuration=configuration)
|
||||
OutputIsListTupleInput = typing.Union[
|
||||
typing.List[
|
||||
bool,
|
||||
],
|
||||
typing.Tuple[
|
||||
bool,
|
||||
...
|
||||
]
|
||||
]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class OutputIsList(
|
||||
schemas.Schema[schemas.immutabledict, OutputIsListTuple]
|
||||
):
|
||||
types: typing.FrozenSet[typing.Type] = frozenset({tuple})
|
||||
items: typing.Type[Items4] = dataclasses.field(default_factory=lambda: Items4) # type: ignore
|
||||
type_to_output_cls: typing.Mapping[
|
||||
typing.Type,
|
||||
typing.Type
|
||||
] = dataclasses.field(
|
||||
default_factory=lambda: {
|
||||
tuple: OutputIsListTuple
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def validate(
|
||||
cls,
|
||||
arg: typing.Union[
|
||||
OutputIsListTupleInput,
|
||||
OutputIsListTuple,
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> OutputIsListTuple:
|
||||
return super().validate_base(
|
||||
arg,
|
||||
configuration=configuration,
|
||||
)
|
||||
Items5: typing_extensions.TypeAlias = schemas.StrSchema
|
||||
|
||||
|
||||
class OutputNameTuple(
|
||||
typing.Tuple[
|
||||
str,
|
||||
...
|
||||
]
|
||||
):
|
||||
|
||||
def __new__(cls, arg: typing.Union[OutputNameTupleInput, OutputNameTuple], configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None):
|
||||
return OutputName.validate(arg, configuration=configuration)
|
||||
OutputNameTupleInput = typing.Union[
|
||||
typing.List[
|
||||
str,
|
||||
],
|
||||
typing.Tuple[
|
||||
str,
|
||||
...
|
||||
]
|
||||
]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class OutputName(
|
||||
schemas.Schema[schemas.immutabledict, OutputNameTuple]
|
||||
):
|
||||
types: typing.FrozenSet[typing.Type] = frozenset({tuple})
|
||||
items: typing.Type[Items5] = dataclasses.field(default_factory=lambda: Items5) # type: ignore
|
||||
type_to_output_cls: typing.Mapping[
|
||||
typing.Type,
|
||||
typing.Type
|
||||
] = dataclasses.field(
|
||||
default_factory=lambda: {
|
||||
tuple: OutputNameTuple
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def validate(
|
||||
cls,
|
||||
arg: typing.Union[
|
||||
OutputNameTupleInput,
|
||||
OutputNameTuple,
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> OutputNameTuple:
|
||||
return super().validate_base(
|
||||
arg,
|
||||
configuration=configuration,
|
||||
)
|
||||
Name: typing_extensions.TypeAlias = schemas.StrSchema
|
||||
DisplayName: typing_extensions.TypeAlias = schemas.StrSchema
|
||||
Description: typing_extensions.TypeAlias = schemas.StrSchema
|
||||
Category: typing_extensions.TypeAlias = schemas.StrSchema
|
||||
OutputNode: typing_extensions.TypeAlias = schemas.BoolSchema
|
||||
Properties3 = typing.TypedDict(
|
||||
'Properties3',
|
||||
{
|
||||
"input": typing.Type[Input],
|
||||
"output": typing.Type[Output],
|
||||
"output_is_list": typing.Type[OutputIsList],
|
||||
"output_name": typing.Type[OutputName],
|
||||
"name": typing.Type[Name],
|
||||
"display_name": typing.Type[DisplayName],
|
||||
"description": typing.Type[Description],
|
||||
"category": typing.Type[Category],
|
||||
"output_node": typing.Type[OutputNode],
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class NodeDict(schemas.immutabledict[str, schemas.OUTPUT_BASE_TYPES]):
|
||||
|
||||
__required_keys__: typing.FrozenSet[str] = frozenset({
|
||||
})
|
||||
__optional_keys__: typing.FrozenSet[str] = frozenset({
|
||||
"input",
|
||||
"output",
|
||||
"output_is_list",
|
||||
"output_name",
|
||||
"name",
|
||||
"display_name",
|
||||
"description",
|
||||
"category",
|
||||
"output_node",
|
||||
})
|
||||
|
||||
def __new__(
|
||||
cls,
|
||||
*,
|
||||
input: typing.Union[
|
||||
InputDictInput,
|
||||
InputDict,
|
||||
schemas.Unset
|
||||
] = schemas.unset,
|
||||
output: typing.Union[
|
||||
OutputTupleInput,
|
||||
OutputTuple,
|
||||
schemas.Unset
|
||||
] = schemas.unset,
|
||||
output_is_list: typing.Union[
|
||||
OutputIsListTupleInput,
|
||||
OutputIsListTuple,
|
||||
schemas.Unset
|
||||
] = schemas.unset,
|
||||
output_name: typing.Union[
|
||||
OutputNameTupleInput,
|
||||
OutputNameTuple,
|
||||
schemas.Unset
|
||||
] = schemas.unset,
|
||||
name: typing.Union[
|
||||
str,
|
||||
schemas.Unset
|
||||
] = schemas.unset,
|
||||
display_name: typing.Union[
|
||||
str,
|
||||
schemas.Unset
|
||||
] = schemas.unset,
|
||||
description: typing.Union[
|
||||
str,
|
||||
schemas.Unset
|
||||
] = schemas.unset,
|
||||
category: typing.Union[
|
||||
str,
|
||||
schemas.Unset
|
||||
] = schemas.unset,
|
||||
output_node: typing.Union[
|
||||
bool,
|
||||
schemas.Unset
|
||||
] = schemas.unset,
|
||||
configuration_: typing.Optional[schema_configuration.SchemaConfiguration] = None,
|
||||
**kwargs: schemas.INPUT_TYPES_ALL,
|
||||
):
|
||||
arg_: typing.Dict[str, typing.Any] = {}
|
||||
for key_, val in (
|
||||
("input", input),
|
||||
("output", output),
|
||||
("output_is_list", output_is_list),
|
||||
("output_name", output_name),
|
||||
("name", name),
|
||||
("display_name", display_name),
|
||||
("description", description),
|
||||
("category", category),
|
||||
("output_node", output_node),
|
||||
):
|
||||
if isinstance(val, schemas.Unset):
|
||||
continue
|
||||
arg_[key_] = val
|
||||
arg_.update(kwargs)
|
||||
used_arg_ = typing.cast(NodeDictInput, arg_)
|
||||
return Node.validate(used_arg_, configuration=configuration_)
|
||||
|
||||
@staticmethod
|
||||
def from_dict_(
|
||||
arg: typing.Union[
|
||||
NodeDictInput,
|
||||
NodeDict
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> NodeDict:
|
||||
return Node.validate(arg, configuration=configuration)
|
||||
|
||||
@property
|
||||
def input(self) -> typing.Union[InputDict, schemas.Unset]:
|
||||
val = self.get("input", schemas.unset)
|
||||
if isinstance(val, schemas.Unset):
|
||||
return val
|
||||
return typing.cast(
|
||||
InputDict,
|
||||
val
|
||||
)
|
||||
|
||||
@property
|
||||
def output(self) -> typing.Union[OutputTuple, schemas.Unset]:
|
||||
val = self.get("output", schemas.unset)
|
||||
if isinstance(val, schemas.Unset):
|
||||
return val
|
||||
return typing.cast(
|
||||
OutputTuple,
|
||||
val
|
||||
)
|
||||
|
||||
@property
|
||||
def output_is_list(self) -> typing.Union[OutputIsListTuple, schemas.Unset]:
|
||||
val = self.get("output_is_list", schemas.unset)
|
||||
if isinstance(val, schemas.Unset):
|
||||
return val
|
||||
return typing.cast(
|
||||
OutputIsListTuple,
|
||||
val
|
||||
)
|
||||
|
||||
@property
|
||||
def output_name(self) -> typing.Union[OutputNameTuple, schemas.Unset]:
|
||||
val = self.get("output_name", schemas.unset)
|
||||
if isinstance(val, schemas.Unset):
|
||||
return val
|
||||
return typing.cast(
|
||||
OutputNameTuple,
|
||||
val
|
||||
)
|
||||
|
||||
@property
|
||||
def name(self) -> typing.Union[str, schemas.Unset]:
|
||||
val = self.get("name", schemas.unset)
|
||||
if isinstance(val, schemas.Unset):
|
||||
return val
|
||||
return typing.cast(
|
||||
str,
|
||||
val
|
||||
)
|
||||
|
||||
@property
|
||||
def display_name(self) -> typing.Union[str, schemas.Unset]:
|
||||
val = self.get("display_name", schemas.unset)
|
||||
if isinstance(val, schemas.Unset):
|
||||
return val
|
||||
return typing.cast(
|
||||
str,
|
||||
val
|
||||
)
|
||||
|
||||
@property
|
||||
def description(self) -> typing.Union[str, schemas.Unset]:
|
||||
val = self.get("description", schemas.unset)
|
||||
if isinstance(val, schemas.Unset):
|
||||
return val
|
||||
return typing.cast(
|
||||
str,
|
||||
val
|
||||
)
|
||||
|
||||
@property
|
||||
def category(self) -> typing.Union[str, schemas.Unset]:
|
||||
val = self.get("category", schemas.unset)
|
||||
if isinstance(val, schemas.Unset):
|
||||
return val
|
||||
return typing.cast(
|
||||
str,
|
||||
val
|
||||
)
|
||||
|
||||
@property
|
||||
def output_node(self) -> typing.Union[bool, schemas.Unset]:
|
||||
val = self.get("output_node", schemas.unset)
|
||||
if isinstance(val, schemas.Unset):
|
||||
return val
|
||||
return typing.cast(
|
||||
bool,
|
||||
val
|
||||
)
|
||||
|
||||
def get_additional_property_(self, name: str) -> typing.Union[schemas.OUTPUT_BASE_TYPES, schemas.Unset]:
|
||||
schemas.raise_if_key_known(name, self.__required_keys__, self.__optional_keys__)
|
||||
return self.get(name, schemas.unset)
|
||||
NodeDictInput = typing.Mapping[str, schemas.INPUT_TYPES_ALL]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class Node(
|
||||
schemas.Schema[NodeDict, tuple]
|
||||
):
|
||||
"""NOTE: This class is auto generated by OpenAPI JSON Schema Generator.
|
||||
Ref: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
|
||||
Do not edit the class manually.
|
||||
"""
|
||||
types: typing.FrozenSet[typing.Type] = frozenset({schemas.immutabledict})
|
||||
properties: Properties3 = dataclasses.field(default_factory=lambda: schemas.typed_dict_to_instance(Properties3)) # type: ignore
|
||||
type_to_output_cls: typing.Mapping[
|
||||
typing.Type,
|
||||
typing.Type
|
||||
] = dataclasses.field(
|
||||
default_factory=lambda: {
|
||||
schemas.immutabledict: NodeDict
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def validate(
|
||||
cls,
|
||||
arg: typing.Union[
|
||||
NodeDictInput,
|
||||
NodeDict,
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> NodeDict:
|
||||
return super().validate_base(
|
||||
arg,
|
||||
configuration=configuration,
|
||||
)
|
||||
|
||||
100
comfy/api/components/schema/prompt.py
Normal file
100
comfy/api/components/schema/prompt.py
Normal file
@ -0,0 +1,100 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
comfyui
|
||||
No description provided (generated by Openapi JSON Schema Generator https://github.com/openapi-json-schema-tools/openapi-json-schema-generator) # noqa: E501
|
||||
The version of the OpenAPI document: 0.0.1
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
from comfy.api.shared_imports.schema_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
|
||||
|
||||
from comfy.api.components.schema import prompt_node
|
||||
|
||||
|
||||
class PromptDict(schemas.immutabledict[str, prompt_node.PromptNodeDict]):
|
||||
|
||||
__required_keys__: typing.FrozenSet[str] = frozenset({
|
||||
})
|
||||
__optional_keys__: typing.FrozenSet[str] = frozenset({
|
||||
})
|
||||
def __new__(
|
||||
cls,
|
||||
configuration_: typing.Optional[schema_configuration.SchemaConfiguration] = None,
|
||||
**kwargs: typing.Union[
|
||||
prompt_node.PromptNodeDictInput,
|
||||
prompt_node.PromptNodeDict,
|
||||
],
|
||||
):
|
||||
used_kwargs = typing.cast(PromptDictInput, kwargs)
|
||||
return Prompt.validate(used_kwargs, configuration=configuration_)
|
||||
|
||||
@staticmethod
|
||||
def from_dict_(
|
||||
arg: typing.Union[
|
||||
PromptDictInput,
|
||||
PromptDict
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> PromptDict:
|
||||
return Prompt.validate(arg, configuration=configuration)
|
||||
|
||||
def get_additional_property_(self, name: str) -> typing.Union[prompt_node.PromptNodeDict, schemas.Unset]:
|
||||
schemas.raise_if_key_known(name, self.__required_keys__, self.__optional_keys__)
|
||||
val = self.get(name, schemas.unset)
|
||||
if isinstance(val, schemas.Unset):
|
||||
return val
|
||||
return typing.cast(
|
||||
prompt_node.PromptNodeDict,
|
||||
val
|
||||
)
|
||||
PromptDictInput = typing.Mapping[
|
||||
str,
|
||||
typing.Union[
|
||||
prompt_node.PromptNodeDictInput,
|
||||
prompt_node.PromptNodeDict,
|
||||
],
|
||||
]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class Prompt(
|
||||
schemas.Schema[PromptDict, tuple]
|
||||
):
|
||||
"""NOTE: This class is auto generated by OpenAPI JSON Schema Generator.
|
||||
Ref: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
|
||||
Do not edit the class manually.
|
||||
|
||||
The keys are stringified integers corresponding to nodes.
|
||||
|
||||
You can retrieve the last prompt run using GET /api/v1/prompts
|
||||
|
||||
"""
|
||||
types: typing.FrozenSet[typing.Type] = frozenset({schemas.immutabledict})
|
||||
additional_properties: typing.Type[prompt_node.PromptNode] = dataclasses.field(default_factory=lambda: prompt_node.PromptNode) # type: ignore
|
||||
type_to_output_cls: typing.Mapping[
|
||||
typing.Type,
|
||||
typing.Type
|
||||
] = dataclasses.field(
|
||||
default_factory=lambda: {
|
||||
schemas.immutabledict: PromptDict
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def validate(
|
||||
cls,
|
||||
arg: typing.Union[
|
||||
PromptDictInput,
|
||||
PromptDict,
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> PromptDict:
|
||||
return super().validate_base(
|
||||
arg,
|
||||
configuration=configuration,
|
||||
)
|
||||
|
||||
405
comfy/api/components/schema/prompt_node.py
Normal file
405
comfy/api/components/schema/prompt_node.py
Normal file
@ -0,0 +1,405 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
comfyui
|
||||
No description provided (generated by Openapi JSON Schema Generator https://github.com/openapi-json-schema-tools/openapi-json-schema-generator) # noqa: E501
|
||||
The version of the OpenAPI document: 0.0.1
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
from comfy.api.shared_imports.schema_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
|
||||
ClassType: typing_extensions.TypeAlias = schemas.StrSchema
|
||||
_0: typing_extensions.TypeAlias = schemas.NumberSchema
|
||||
_1: typing_extensions.TypeAlias = schemas.StrSchema
|
||||
_2: typing_extensions.TypeAlias = schemas.BoolSchema
|
||||
|
||||
|
||||
class ItemsTuple(
|
||||
typing.Tuple[
|
||||
str,
|
||||
int,
|
||||
typing_extensions.Unpack[typing.Tuple[schemas.OUTPUT_BASE_TYPES, ...]]
|
||||
]
|
||||
):
|
||||
|
||||
def __new__(cls, arg: typing.Union[ItemsTupleInput, ItemsTuple], configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None):
|
||||
return Items.validate(arg, configuration=configuration)
|
||||
ItemsTupleInput = typing.Union[
|
||||
typing.List[
|
||||
typing.Union[
|
||||
schemas.INPUT_TYPES_ALL,
|
||||
schemas.OUTPUT_BASE_TYPES
|
||||
],
|
||||
],
|
||||
typing.Tuple[
|
||||
str,
|
||||
int,
|
||||
typing_extensions.Unpack[typing.Tuple[schemas.INPUT_TYPES_ALL, ...]]
|
||||
]
|
||||
]
|
||||
_02: typing_extensions.TypeAlias = schemas.StrSchema
|
||||
_12: typing_extensions.TypeAlias = schemas.IntSchema
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class Items(
|
||||
schemas.AnyTypeSchema[schemas.immutabledict[str, schemas.OUTPUT_BASE_TYPES], ItemsTuple],
|
||||
):
|
||||
# any type
|
||||
prefix_items: typing.Tuple[
|
||||
typing.Type[_02],
|
||||
typing.Type[_12],
|
||||
] = (
|
||||
_02,
|
||||
_12,
|
||||
)
|
||||
max_items: int = 2
|
||||
min_items: int = 2
|
||||
type_to_output_cls: typing.Mapping[
|
||||
typing.Type,
|
||||
typing.Type
|
||||
] = dataclasses.field(
|
||||
default_factory=lambda: {
|
||||
tuple: ItemsTuple,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
|
||||
class _3Tuple(
|
||||
typing.Tuple[
|
||||
schemas.OUTPUT_BASE_TYPES,
|
||||
...
|
||||
]
|
||||
):
|
||||
|
||||
def __new__(cls, arg: typing.Union[_3TupleInput, _3Tuple], configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None):
|
||||
return _3.validate(arg, configuration=configuration)
|
||||
_3TupleInput = typing.Union[
|
||||
typing.List[
|
||||
typing.Union[
|
||||
schemas.INPUT_TYPES_ALL,
|
||||
schemas.OUTPUT_BASE_TYPES
|
||||
],
|
||||
],
|
||||
typing.Tuple[
|
||||
typing.Union[
|
||||
schemas.INPUT_TYPES_ALL,
|
||||
schemas.OUTPUT_BASE_TYPES
|
||||
],
|
||||
...
|
||||
]
|
||||
]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class _3(
|
||||
schemas.Schema[schemas.immutabledict, _3Tuple]
|
||||
):
|
||||
types: typing.FrozenSet[typing.Type] = frozenset({tuple})
|
||||
items: typing.Type[Items] = dataclasses.field(default_factory=lambda: Items) # type: ignore
|
||||
type_to_output_cls: typing.Mapping[
|
||||
typing.Type,
|
||||
typing.Type
|
||||
] = dataclasses.field(
|
||||
default_factory=lambda: {
|
||||
tuple: _3Tuple
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def validate(
|
||||
cls,
|
||||
arg: typing.Union[
|
||||
_3TupleInput,
|
||||
_3Tuple,
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> _3Tuple:
|
||||
return super().validate_base(
|
||||
arg,
|
||||
configuration=configuration,
|
||||
)
|
||||
OneOf = typing.Tuple[
|
||||
typing.Type[_0],
|
||||
typing.Type[_1],
|
||||
typing.Type[_2],
|
||||
typing.Type[_3],
|
||||
]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class AdditionalProperties(
|
||||
schemas.AnyTypeSchema[schemas.immutabledict[str, schemas.OUTPUT_BASE_TYPES], typing.Tuple[schemas.OUTPUT_BASE_TYPES, ...]],
|
||||
):
|
||||
# any type
|
||||
one_of: OneOf = dataclasses.field(default_factory=lambda: schemas.tuple_to_instance(OneOf)) # type: ignore
|
||||
|
||||
|
||||
|
||||
class InputsDict(schemas.immutabledict[str, schemas.OUTPUT_BASE_TYPES]):
|
||||
|
||||
__required_keys__: typing.FrozenSet[str] = frozenset({
|
||||
})
|
||||
__optional_keys__: typing.FrozenSet[str] = frozenset({
|
||||
})
|
||||
def __new__(
|
||||
cls,
|
||||
configuration_: typing.Optional[schema_configuration.SchemaConfiguration] = None,
|
||||
**kwargs: typing.Union[
|
||||
schemas.INPUT_TYPES_ALL,
|
||||
schemas.OUTPUT_BASE_TYPES
|
||||
],
|
||||
):
|
||||
used_kwargs = typing.cast(InputsDictInput, kwargs)
|
||||
return Inputs.validate(used_kwargs, configuration=configuration_)
|
||||
|
||||
@staticmethod
|
||||
def from_dict_(
|
||||
arg: typing.Union[
|
||||
InputsDictInput,
|
||||
InputsDict
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> InputsDict:
|
||||
return Inputs.validate(arg, configuration=configuration)
|
||||
|
||||
def get_additional_property_(self, name: str) -> typing.Union[schemas.OUTPUT_BASE_TYPES, schemas.Unset]:
|
||||
schemas.raise_if_key_known(name, self.__required_keys__, self.__optional_keys__)
|
||||
val = self.get(name, schemas.unset)
|
||||
if isinstance(val, schemas.Unset):
|
||||
return val
|
||||
return typing.cast(
|
||||
schemas.OUTPUT_BASE_TYPES,
|
||||
val
|
||||
)
|
||||
InputsDictInput = typing.Mapping[
|
||||
str,
|
||||
typing.Union[
|
||||
schemas.INPUT_TYPES_ALL,
|
||||
schemas.OUTPUT_BASE_TYPES
|
||||
],
|
||||
]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class Inputs(
|
||||
schemas.Schema[InputsDict, tuple]
|
||||
):
|
||||
types: typing.FrozenSet[typing.Type] = frozenset({schemas.immutabledict})
|
||||
additional_properties: typing.Type[AdditionalProperties] = dataclasses.field(default_factory=lambda: AdditionalProperties) # type: ignore
|
||||
type_to_output_cls: typing.Mapping[
|
||||
typing.Type,
|
||||
typing.Type
|
||||
] = dataclasses.field(
|
||||
default_factory=lambda: {
|
||||
schemas.immutabledict: InputsDict
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def validate(
|
||||
cls,
|
||||
arg: typing.Union[
|
||||
InputsDictInput,
|
||||
InputsDict,
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> InputsDict:
|
||||
return super().validate_base(
|
||||
arg,
|
||||
configuration=configuration,
|
||||
)
|
||||
|
||||
Items2: typing_extensions.TypeAlias = schemas.StrSchema
|
||||
|
||||
|
||||
class _0Tuple(
|
||||
typing.Tuple[
|
||||
str,
|
||||
...
|
||||
]
|
||||
):
|
||||
|
||||
def __new__(cls, arg: typing.Union[_0TupleInput, _0Tuple], configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None):
|
||||
return _03.validate(arg, configuration=configuration)
|
||||
_0TupleInput = typing.Union[
|
||||
typing.List[
|
||||
str,
|
||||
],
|
||||
typing.Tuple[
|
||||
str,
|
||||
...
|
||||
]
|
||||
]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class _03(
|
||||
schemas.Schema[schemas.immutabledict, _0Tuple]
|
||||
):
|
||||
types: typing.FrozenSet[typing.Type] = frozenset({tuple})
|
||||
items: typing.Type[Items2] = dataclasses.field(default_factory=lambda: Items2) # type: ignore
|
||||
type_to_output_cls: typing.Mapping[
|
||||
typing.Type,
|
||||
typing.Type
|
||||
] = dataclasses.field(
|
||||
default_factory=lambda: {
|
||||
tuple: _0Tuple
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def validate(
|
||||
cls,
|
||||
arg: typing.Union[
|
||||
_0TupleInput,
|
||||
_0Tuple,
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> _0Tuple:
|
||||
return super().validate_base(
|
||||
arg,
|
||||
configuration=configuration,
|
||||
)
|
||||
_13: typing_extensions.TypeAlias = schemas.StrSchema
|
||||
OneOf2 = typing.Tuple[
|
||||
typing.Type[_03],
|
||||
typing.Type[_13],
|
||||
]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class IsChanged(
|
||||
schemas.AnyTypeSchema[schemas.immutabledict[str, schemas.OUTPUT_BASE_TYPES], typing.Tuple[schemas.OUTPUT_BASE_TYPES, ...]],
|
||||
):
|
||||
# any type
|
||||
one_of: OneOf2 = dataclasses.field(default_factory=lambda: schemas.tuple_to_instance(OneOf2)) # type: ignore
|
||||
|
||||
Properties = typing.TypedDict(
|
||||
'Properties',
|
||||
{
|
||||
"class_type": typing.Type[ClassType],
|
||||
"inputs": typing.Type[Inputs],
|
||||
"is_changed": typing.Type[IsChanged],
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class PromptNodeDict(schemas.immutabledict[str, schemas.OUTPUT_BASE_TYPES]):
|
||||
|
||||
__required_keys__: typing.FrozenSet[str] = frozenset({
|
||||
"class_type",
|
||||
"inputs",
|
||||
})
|
||||
__optional_keys__: typing.FrozenSet[str] = frozenset({
|
||||
"is_changed",
|
||||
})
|
||||
|
||||
def __new__(
|
||||
cls,
|
||||
*,
|
||||
class_type: str,
|
||||
inputs: typing.Union[
|
||||
InputsDictInput,
|
||||
InputsDict,
|
||||
],
|
||||
is_changed: typing.Union[
|
||||
schemas.INPUT_TYPES_ALL,
|
||||
schemas.OUTPUT_BASE_TYPES,
|
||||
schemas.Unset
|
||||
] = schemas.unset,
|
||||
configuration_: typing.Optional[schema_configuration.SchemaConfiguration] = None,
|
||||
**kwargs: schemas.INPUT_TYPES_ALL,
|
||||
):
|
||||
arg_: typing.Dict[str, typing.Any] = {
|
||||
"class_type": class_type,
|
||||
"inputs": inputs,
|
||||
}
|
||||
for key_, val in (
|
||||
("is_changed", is_changed),
|
||||
):
|
||||
if isinstance(val, schemas.Unset):
|
||||
continue
|
||||
arg_[key_] = val
|
||||
arg_.update(kwargs)
|
||||
used_arg_ = typing.cast(PromptNodeDictInput, arg_)
|
||||
return PromptNode.validate(used_arg_, configuration=configuration_)
|
||||
|
||||
@staticmethod
|
||||
def from_dict_(
|
||||
arg: typing.Union[
|
||||
PromptNodeDictInput,
|
||||
PromptNodeDict
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> PromptNodeDict:
|
||||
return PromptNode.validate(arg, configuration=configuration)
|
||||
|
||||
@property
|
||||
def class_type(self) -> str:
|
||||
return typing.cast(
|
||||
str,
|
||||
self.__getitem__("class_type")
|
||||
)
|
||||
|
||||
@property
|
||||
def inputs(self) -> InputsDict:
|
||||
return typing.cast(
|
||||
InputsDict,
|
||||
self.__getitem__("inputs")
|
||||
)
|
||||
|
||||
@property
|
||||
def is_changed(self) -> typing.Union[schemas.OUTPUT_BASE_TYPES, schemas.Unset]:
|
||||
val = self.get("is_changed", schemas.unset)
|
||||
if isinstance(val, schemas.Unset):
|
||||
return val
|
||||
return val
|
||||
|
||||
def get_additional_property_(self, name: str) -> typing.Union[schemas.OUTPUT_BASE_TYPES, schemas.Unset]:
|
||||
schemas.raise_if_key_known(name, self.__required_keys__, self.__optional_keys__)
|
||||
return self.get(name, schemas.unset)
|
||||
PromptNodeDictInput = typing.Mapping[str, schemas.INPUT_TYPES_ALL]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class PromptNode(
|
||||
schemas.Schema[PromptNodeDict, tuple]
|
||||
):
|
||||
"""NOTE: This class is auto generated by OpenAPI JSON Schema Generator.
|
||||
Ref: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
|
||||
Do not edit the class manually.
|
||||
"""
|
||||
types: typing.FrozenSet[typing.Type] = frozenset({schemas.immutabledict})
|
||||
required: typing.FrozenSet[str] = frozenset({
|
||||
"class_type",
|
||||
"inputs",
|
||||
})
|
||||
properties: Properties = dataclasses.field(default_factory=lambda: schemas.typed_dict_to_instance(Properties)) # type: ignore
|
||||
type_to_output_cls: typing.Mapping[
|
||||
typing.Type,
|
||||
typing.Type
|
||||
] = dataclasses.field(
|
||||
default_factory=lambda: {
|
||||
schemas.immutabledict: PromptNodeDict
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def validate(
|
||||
cls,
|
||||
arg: typing.Union[
|
||||
PromptNodeDictInput,
|
||||
PromptNodeDict,
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> PromptNodeDict:
|
||||
return super().validate_base(
|
||||
arg,
|
||||
configuration=configuration,
|
||||
)
|
||||
|
||||
149
comfy/api/components/schema/prompt_request.py
Normal file
149
comfy/api/components/schema/prompt_request.py
Normal file
@ -0,0 +1,149 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
comfyui
|
||||
No description provided (generated by Openapi JSON Schema Generator https://github.com/openapi-json-schema-tools/openapi-json-schema-generator) # noqa: E501
|
||||
The version of the OpenAPI document: 0.0.1
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
from comfy.api.shared_imports.schema_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
|
||||
ClientId: typing_extensions.TypeAlias = schemas.StrSchema
|
||||
|
||||
from comfy.api.components.schema import extra_data
|
||||
from comfy.api.components.schema import prompt
|
||||
Properties = typing.TypedDict(
|
||||
'Properties',
|
||||
{
|
||||
"client_id": typing.Type[ClientId],
|
||||
"prompt": typing.Type[prompt.Prompt],
|
||||
"extra_data": typing.Type[extra_data.ExtraData],
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class PromptRequestDict(schemas.immutabledict[str, schemas.OUTPUT_BASE_TYPES]):
|
||||
|
||||
__required_keys__: typing.FrozenSet[str] = frozenset({
|
||||
"prompt",
|
||||
})
|
||||
__optional_keys__: typing.FrozenSet[str] = frozenset({
|
||||
"client_id",
|
||||
"extra_data",
|
||||
})
|
||||
|
||||
def __new__(
|
||||
cls,
|
||||
*,
|
||||
prompt: typing.Union[
|
||||
prompt.PromptDictInput,
|
||||
prompt.PromptDict,
|
||||
],
|
||||
client_id: typing.Union[
|
||||
str,
|
||||
schemas.Unset
|
||||
] = schemas.unset,
|
||||
extra_data: typing.Union[
|
||||
extra_data.ExtraDataDictInput,
|
||||
extra_data.ExtraDataDict,
|
||||
schemas.Unset
|
||||
] = schemas.unset,
|
||||
configuration_: typing.Optional[schema_configuration.SchemaConfiguration] = None,
|
||||
**kwargs: schemas.INPUT_TYPES_ALL,
|
||||
):
|
||||
arg_: typing.Dict[str, typing.Any] = {
|
||||
"prompt": prompt,
|
||||
}
|
||||
for key_, val in (
|
||||
("client_id", client_id),
|
||||
("extra_data", extra_data),
|
||||
):
|
||||
if isinstance(val, schemas.Unset):
|
||||
continue
|
||||
arg_[key_] = val
|
||||
arg_.update(kwargs)
|
||||
used_arg_ = typing.cast(PromptRequestDictInput, arg_)
|
||||
return PromptRequest.validate(used_arg_, configuration=configuration_)
|
||||
|
||||
@staticmethod
|
||||
def from_dict_(
|
||||
arg: typing.Union[
|
||||
PromptRequestDictInput,
|
||||
PromptRequestDict
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> PromptRequestDict:
|
||||
return PromptRequest.validate(arg, configuration=configuration)
|
||||
|
||||
@property
|
||||
def prompt(self) -> prompt.PromptDict:
|
||||
return typing.cast(
|
||||
prompt.PromptDict,
|
||||
self.__getitem__("prompt")
|
||||
)
|
||||
|
||||
@property
|
||||
def client_id(self) -> typing.Union[str, schemas.Unset]:
|
||||
val = self.get("client_id", schemas.unset)
|
||||
if isinstance(val, schemas.Unset):
|
||||
return val
|
||||
return typing.cast(
|
||||
str,
|
||||
val
|
||||
)
|
||||
|
||||
@property
|
||||
def extra_data(self) -> typing.Union[extra_data.ExtraDataDict, schemas.Unset]:
|
||||
val = self.get("extra_data", schemas.unset)
|
||||
if isinstance(val, schemas.Unset):
|
||||
return val
|
||||
return typing.cast(
|
||||
extra_data.ExtraDataDict,
|
||||
val
|
||||
)
|
||||
|
||||
def get_additional_property_(self, name: str) -> typing.Union[schemas.OUTPUT_BASE_TYPES, schemas.Unset]:
|
||||
schemas.raise_if_key_known(name, self.__required_keys__, self.__optional_keys__)
|
||||
return self.get(name, schemas.unset)
|
||||
PromptRequestDictInput = typing.Mapping[str, schemas.INPUT_TYPES_ALL]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class PromptRequest(
|
||||
schemas.Schema[PromptRequestDict, tuple]
|
||||
):
|
||||
"""NOTE: This class is auto generated by OpenAPI JSON Schema Generator.
|
||||
Ref: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
|
||||
Do not edit the class manually.
|
||||
"""
|
||||
types: typing.FrozenSet[typing.Type] = frozenset({schemas.immutabledict})
|
||||
required: typing.FrozenSet[str] = frozenset({
|
||||
"prompt",
|
||||
})
|
||||
properties: Properties = dataclasses.field(default_factory=lambda: schemas.typed_dict_to_instance(Properties)) # type: ignore
|
||||
type_to_output_cls: typing.Mapping[
|
||||
typing.Type,
|
||||
typing.Type
|
||||
] = dataclasses.field(
|
||||
default_factory=lambda: {
|
||||
schemas.immutabledict: PromptRequestDict
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def validate(
|
||||
cls,
|
||||
arg: typing.Union[
|
||||
PromptRequestDictInput,
|
||||
PromptRequestDict,
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> PromptRequestDict:
|
||||
return super().validate_base(
|
||||
arg,
|
||||
configuration=configuration,
|
||||
)
|
||||
|
||||
166
comfy/api/components/schema/queue_tuple.py
Normal file
166
comfy/api/components/schema/queue_tuple.py
Normal file
@ -0,0 +1,166 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
comfyui
|
||||
No description provided (generated by Openapi JSON Schema Generator https://github.com/openapi-json-schema-tools/openapi-json-schema-generator) # noqa: E501
|
||||
The version of the OpenAPI document: 0.0.1
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from comfy.api.components.schema import prompt, extra_data
|
||||
from comfy.api.shared_imports.schema_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
|
||||
|
||||
|
||||
class QueueTupleTuple(
|
||||
typing.Tuple[
|
||||
typing.Union[int, float],
|
||||
str,
|
||||
prompt.PromptDict,
|
||||
extra_data.ExtraDataDict,
|
||||
"_4Tuple",
|
||||
typing_extensions.Unpack[typing.Tuple[schemas.OUTPUT_BASE_TYPES, ...]]
|
||||
]
|
||||
):
|
||||
|
||||
def __new__(cls, arg: typing.Union[QueueTupleTupleInput, QueueTupleTuple], configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None):
|
||||
return QueueTuple.validate(arg, configuration=configuration)
|
||||
QueueTupleTupleInput = typing.Union[
|
||||
typing.List[
|
||||
typing.Union[
|
||||
schemas.INPUT_TYPES_ALL,
|
||||
schemas.OUTPUT_BASE_TYPES
|
||||
],
|
||||
],
|
||||
typing.Tuple[
|
||||
typing.Union[
|
||||
int,
|
||||
float
|
||||
],
|
||||
str,
|
||||
typing.Union[
|
||||
prompt.PromptDictInput,
|
||||
prompt.PromptDict,
|
||||
],
|
||||
typing.Union[
|
||||
extra_data.ExtraDataDictInput,
|
||||
extra_data.ExtraDataDict,
|
||||
],
|
||||
typing.Union[
|
||||
"_4TupleInput",
|
||||
"_4Tuple"
|
||||
],
|
||||
typing_extensions.Unpack[typing.Tuple[schemas.INPUT_TYPES_ALL, ...]]
|
||||
]
|
||||
]
|
||||
_0: typing_extensions.TypeAlias = schemas.NumberSchema
|
||||
_1: typing_extensions.TypeAlias = schemas.StrSchema
|
||||
Items: typing_extensions.TypeAlias = schemas.StrSchema
|
||||
|
||||
|
||||
class _4Tuple(
|
||||
typing.Tuple[
|
||||
str,
|
||||
...
|
||||
]
|
||||
):
|
||||
|
||||
def __new__(cls, arg: typing.Union[_4TupleInput, _4Tuple], configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None):
|
||||
return _4.validate(arg, configuration=configuration)
|
||||
_4TupleInput = typing.Union[
|
||||
typing.List[
|
||||
str,
|
||||
],
|
||||
typing.Tuple[
|
||||
str,
|
||||
...
|
||||
]
|
||||
]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class _4(
|
||||
schemas.Schema[schemas.immutabledict, _4Tuple]
|
||||
):
|
||||
types: typing.FrozenSet[typing.Type] = frozenset({tuple})
|
||||
items: typing.Type[Items] = dataclasses.field(default_factory=lambda: Items) # type: ignore
|
||||
type_to_output_cls: typing.Mapping[
|
||||
typing.Type,
|
||||
typing.Type
|
||||
] = dataclasses.field(
|
||||
default_factory=lambda: {
|
||||
tuple: _4Tuple
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def validate(
|
||||
cls,
|
||||
arg: typing.Union[
|
||||
_4TupleInput,
|
||||
_4Tuple,
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> _4Tuple:
|
||||
return super().validate_base(
|
||||
arg,
|
||||
configuration=configuration,
|
||||
)
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class QueueTuple(
|
||||
schemas.Schema[schemas.immutabledict, QueueTupleTuple]
|
||||
):
|
||||
"""NOTE: This class is auto generated by OpenAPI JSON Schema Generator.
|
||||
Ref: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
|
||||
Do not edit the class manually.
|
||||
|
||||
The first item is the queue priority
|
||||
The second item is the hash id of the prompt object
|
||||
The third item is a Prompt
|
||||
The fourth item is optionally an ExtraData
|
||||
The fifth item is optionally a list of "Good Outputs" node IDs.
|
||||
|
||||
"""
|
||||
types: typing.FrozenSet[typing.Type] = frozenset({tuple})
|
||||
max_items: int = 5
|
||||
min_items: int = 3
|
||||
prefix_items: typing.Tuple[
|
||||
typing.Type[_0],
|
||||
typing.Type[_1],
|
||||
typing.Type[prompt.Prompt],
|
||||
typing.Type[extra_data.ExtraData],
|
||||
typing.Type[_4],
|
||||
] = (
|
||||
_0,
|
||||
_1,
|
||||
prompt.Prompt,
|
||||
extra_data.ExtraData,
|
||||
_4,
|
||||
)
|
||||
type_to_output_cls: typing.Mapping[
|
||||
typing.Type,
|
||||
typing.Type
|
||||
] = dataclasses.field(
|
||||
default_factory=lambda: {
|
||||
tuple: QueueTupleTuple
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def validate(
|
||||
cls,
|
||||
arg: typing.Union[
|
||||
QueueTupleTupleInput,
|
||||
QueueTupleTuple,
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> QueueTupleTuple:
|
||||
return super().validate_base(
|
||||
arg,
|
||||
configuration=configuration,
|
||||
)
|
||||
1457
comfy/api/components/schema/workflow.py
Normal file
1457
comfy/api/components/schema/workflow.py
Normal file
File diff suppressed because it is too large
Load Diff
20
comfy/api/components/schemas/__init__.py
Normal file
20
comfy/api/components/schemas/__init__.py
Normal file
@ -0,0 +1,20 @@
|
||||
# coding: utf-8
|
||||
|
||||
# flake8: noqa
|
||||
|
||||
# import all models into this package
|
||||
# if you have many models here with many references from one model to another this may
|
||||
# raise a RecursionError
|
||||
# to avoid this, import only the models that you directly need like:
|
||||
# from from comfy.api.components.schema.pet import Pet
|
||||
# or import this package, but before doing it, use:
|
||||
# import sys
|
||||
# sys.setrecursionlimit(n)
|
||||
|
||||
from comfy.api.components.schema.extra_data import ExtraData
|
||||
from comfy.api.components.schema.node import Node
|
||||
from comfy.api.components.schema.prompt import Prompt
|
||||
from comfy.api.components.schema.prompt_node import PromptNode
|
||||
from comfy.api.components.schema.prompt_request import PromptRequest
|
||||
from comfy.api.components.schema.queue_tuple import QueueTuple
|
||||
from comfy.api.components.schema.workflow import Workflow
|
||||
@ -1,2 +0,0 @@
|
||||
class Configuration:
|
||||
pass
|
||||
0
comfy/api/configurations/__init__.py
Normal file
0
comfy/api/configurations/__init__.py
Normal file
281
comfy/api/configurations/api_configuration.py
Normal file
281
comfy/api/configurations/api_configuration.py
Normal file
@ -0,0 +1,281 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
comfyui
|
||||
No description provided (generated by Openapi JSON Schema Generator https://github.com/openapi-json-schema-tools/openapi-json-schema-generator) # noqa: E501
|
||||
The version of the OpenAPI document: 0.0.1
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
import copy
|
||||
from http import client as http_client
|
||||
import logging
|
||||
import multiprocessing
|
||||
import sys
|
||||
import typing
|
||||
import typing_extensions
|
||||
|
||||
import urllib3
|
||||
|
||||
from comfy.api import exceptions
|
||||
from comfy.api.servers import server_0
|
||||
|
||||
# the server to use at each openapi document json path
|
||||
ServerInfo = typing.TypedDict(
|
||||
'ServerInfo',
|
||||
{
|
||||
'servers/0': server_0.Server0,
|
||||
},
|
||||
total=False
|
||||
)
|
||||
|
||||
|
||||
class ServerIndexInfoRequired(typing.TypedDict):
|
||||
servers: typing.Literal[0]
|
||||
|
||||
ServerIndexInfoOptional = typing.TypedDict(
|
||||
'ServerIndexInfoOptional',
|
||||
{
|
||||
},
|
||||
total=False
|
||||
)
|
||||
|
||||
|
||||
class ServerIndexInfo(ServerIndexInfoRequired, ServerIndexInfoOptional):
|
||||
"""
|
||||
the default server_index to use at each openapi document json path
|
||||
the fallback value is stored in the 'servers' key
|
||||
"""
|
||||
|
||||
|
||||
class ApiConfiguration(object):
|
||||
"""NOTE: This class is auto generated by OpenAPI JSON Schema Generator
|
||||
|
||||
Ref: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
Do not edit the class manually.
|
||||
|
||||
:param server_info: the servers that can be used to make endpoint calls
|
||||
:param server_index_info: index to servers configuration
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
server_info: typing.Optional[ServerInfo] = None,
|
||||
server_index_info: typing.Optional[ServerIndexInfo] = None,
|
||||
):
|
||||
"""Constructor
|
||||
"""
|
||||
# Authentication Settings
|
||||
self.security_scheme_info: typing.Dict[str, typing.Any] = {}
|
||||
self.security_index_info = {'security': 0}
|
||||
# Server Info
|
||||
self.server_info: ServerInfo = server_info or {
|
||||
'servers/0': server_0.Server0(),
|
||||
}
|
||||
self.server_index_info: ServerIndexInfo = server_index_info or {'servers': 0}
|
||||
self.logger = {}
|
||||
"""Logging Settings
|
||||
"""
|
||||
self.logger["package_logger"] = logging.getLogger("comfy.api")
|
||||
self.logger["urllib3_logger"] = logging.getLogger("urllib3")
|
||||
self.logger_format = '%(asctime)s %(levelname)s %(message)s'
|
||||
"""Log format
|
||||
"""
|
||||
self.logger_stream_handler = None
|
||||
"""Log stream handler
|
||||
"""
|
||||
self.logger_file_handler = None
|
||||
"""Log file handler
|
||||
"""
|
||||
self.logger_file = None
|
||||
"""Debug file location
|
||||
"""
|
||||
self.debug = False
|
||||
"""Debug switch
|
||||
"""
|
||||
|
||||
self.verify_ssl = True
|
||||
"""SSL/TLS verification
|
||||
Set this to false to skip verifying SSL certificate when calling API
|
||||
from https server.
|
||||
"""
|
||||
self.ssl_ca_cert = None
|
||||
"""Set this to customize the certificate file to verify the peer.
|
||||
"""
|
||||
self.cert_file = None
|
||||
"""client certificate file
|
||||
"""
|
||||
self.key_file = None
|
||||
"""client key file
|
||||
"""
|
||||
self.assert_hostname = None
|
||||
"""Set this to True/False to enable/disable SSL hostname verification.
|
||||
"""
|
||||
|
||||
self.connection_pool_maxsize = multiprocessing.cpu_count() * 5
|
||||
"""urllib3 connection pool's maximum number of connections saved
|
||||
per pool. urllib3 uses 1 connection as default value, but this is
|
||||
not the best value when you are making a lot of possibly parallel
|
||||
requests to the same host, which is often the case here.
|
||||
cpu_count * 5 is used as default value to increase performance.
|
||||
"""
|
||||
|
||||
self.proxy = None
|
||||
"""Proxy URL
|
||||
"""
|
||||
self.proxy_headers = None
|
||||
"""Proxy headers
|
||||
"""
|
||||
self.safe_chars_for_path_param = ''
|
||||
"""Safe chars for path_param
|
||||
"""
|
||||
self.retries = None
|
||||
"""Adding retries to override urllib3 default value 3
|
||||
"""
|
||||
# Enable client side validation
|
||||
self.client_side_validation = True
|
||||
|
||||
# Options to pass down to the underlying urllib3 socket
|
||||
self.socket_options = None
|
||||
|
||||
def __deepcopy__(self, memo):
|
||||
cls = self.__class__
|
||||
result = cls.__new__(cls)
|
||||
memo[id(self)] = result
|
||||
for k, v in self.__dict__.items():
|
||||
if k not in ('logger', 'logger_file_handler'):
|
||||
setattr(result, k, copy.deepcopy(v, memo))
|
||||
# shallow copy of loggers
|
||||
result.logger = copy.copy(self.logger)
|
||||
# use setters to configure loggers
|
||||
result.logger_file = self.logger_file
|
||||
result.debug = self.debug
|
||||
return result
|
||||
|
||||
@property
|
||||
def logger_file(self):
|
||||
"""The logger file.
|
||||
|
||||
If the logger_file is None, then add stream handler and remove file
|
||||
handler. Otherwise, add file handler and remove stream handler.
|
||||
|
||||
:param value: The logger_file path.
|
||||
:type: str
|
||||
"""
|
||||
return self.__logger_file
|
||||
|
||||
@logger_file.setter
|
||||
def logger_file(self, value):
|
||||
"""The logger file.
|
||||
|
||||
If the logger_file is None, then add stream handler and remove file
|
||||
handler. Otherwise, add file handler and remove stream handler.
|
||||
|
||||
:param value: The logger_file path.
|
||||
:type: str
|
||||
"""
|
||||
self.__logger_file = value
|
||||
if self.__logger_file:
|
||||
# If set logging file,
|
||||
# then add file handler and remove stream handler.
|
||||
self.logger_file_handler = logging.FileHandler(self.__logger_file)
|
||||
self.logger_file_handler.setFormatter(self.logger_formatter)
|
||||
for _, logger in self.logger.items():
|
||||
logger.addHandler(self.logger_file_handler)
|
||||
|
||||
@property
|
||||
def debug(self):
|
||||
"""Debug status
|
||||
|
||||
:param value: The debug status, True or False.
|
||||
:type: bool
|
||||
"""
|
||||
return self.__debug
|
||||
|
||||
@debug.setter
|
||||
def debug(self, value):
|
||||
"""Debug status
|
||||
|
||||
:param value: The debug status, True or False.
|
||||
:type: bool
|
||||
"""
|
||||
self.__debug = value
|
||||
if self.__debug:
|
||||
# if debug status is True, turn on debug logging
|
||||
for _, logger in self.logger.items():
|
||||
logger.setLevel(logging.DEBUG)
|
||||
# turn on http_client debug
|
||||
http_client.HTTPConnection.debuglevel = 1
|
||||
else:
|
||||
# if debug status is False, turn off debug logging,
|
||||
# setting log level to default `logging.WARNING`
|
||||
for _, logger in self.logger.items():
|
||||
logger.setLevel(logging.WARNING)
|
||||
# turn off http_client debug
|
||||
http_client.HTTPConnection.debuglevel = 0
|
||||
|
||||
@property
|
||||
def logger_format(self):
|
||||
"""The logger format.
|
||||
|
||||
The logger_formatter will be updated when sets logger_format.
|
||||
|
||||
:param value: The format string.
|
||||
:type: str
|
||||
"""
|
||||
return self.__logger_format
|
||||
|
||||
@logger_format.setter
|
||||
def logger_format(self, value):
|
||||
"""The logger format.
|
||||
|
||||
The logger_formatter will be updated when sets logger_format.
|
||||
|
||||
:param value: The format string.
|
||||
:type: str
|
||||
"""
|
||||
self.__logger_format = value
|
||||
self.logger_formatter = logging.Formatter(self.__logger_format)
|
||||
|
||||
def to_debug_report(self):
|
||||
"""Gets the essential information for debugging.
|
||||
|
||||
:return: The report for debugging.
|
||||
"""
|
||||
return "Python SDK Debug Report:\n"\
|
||||
"OS: {env}\n"\
|
||||
"Python Version: {pyversion}\n"\
|
||||
"Version of the API: 0.0.1\n"\
|
||||
"SDK Package Version: 1.0.0".\
|
||||
format(env=sys.platform, pyversion=sys.version)
|
||||
|
||||
def get_server_url(
|
||||
self,
|
||||
key_prefix: typing.Literal[
|
||||
"servers",
|
||||
],
|
||||
index: typing.Optional[int],
|
||||
) -> str:
|
||||
"""Gets host URL based on the index
|
||||
:param index: array index of the host settings
|
||||
:return: URL based on host settings
|
||||
"""
|
||||
if index:
|
||||
used_index = index
|
||||
else:
|
||||
try:
|
||||
used_index = self.server_index_info[key_prefix]
|
||||
except KeyError:
|
||||
# fallback and use the default index
|
||||
used_index = self.server_index_info.get("servers", 0)
|
||||
server_info_key = typing.cast(
|
||||
typing.Literal[
|
||||
"servers/0",
|
||||
],
|
||||
f"{key_prefix}/{used_index}"
|
||||
)
|
||||
try:
|
||||
server = self.server_info[server_info_key]
|
||||
except KeyError as ex:
|
||||
raise ex
|
||||
return server.url
|
||||
108
comfy/api/configurations/schema_configuration.py
Normal file
108
comfy/api/configurations/schema_configuration.py
Normal file
@ -0,0 +1,108 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
comfyui
|
||||
No description provided (generated by Openapi JSON Schema Generator https://github.com/openapi-json-schema-tools/openapi-json-schema-generator) # noqa: E501
|
||||
The version of the OpenAPI document: 0.0.1
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
import typing
|
||||
|
||||
from comfy.api import exceptions
|
||||
|
||||
|
||||
PYTHON_KEYWORD_TO_JSON_SCHEMA_KEYWORD = {
|
||||
'additional_properties': 'additionalProperties',
|
||||
'all_of': 'allOf',
|
||||
'any_of': 'anyOf',
|
||||
'const_value_to_name': 'const',
|
||||
'contains': 'contains',
|
||||
'dependent_required': 'dependentRequired',
|
||||
'dependent_schemas': 'dependentSchemas',
|
||||
'discriminator': 'discriminator',
|
||||
# default omitted because it has no validation impact
|
||||
'else_': 'else',
|
||||
'enum_value_to_name': 'enum',
|
||||
'exclusive_maximum': 'exclusiveMaximum',
|
||||
'exclusive_minimum': 'exclusiveMinimum',
|
||||
'format': 'format',
|
||||
'if_': 'if',
|
||||
'inclusive_maximum': 'maximum',
|
||||
'inclusive_minimum': 'minimum',
|
||||
'items': 'items',
|
||||
'max_contains': 'maxContains',
|
||||
'max_items': 'maxItems',
|
||||
'max_length': 'maxLength',
|
||||
'max_properties': 'maxProperties',
|
||||
'min_contains': 'minContains',
|
||||
'min_items': 'minItems',
|
||||
'min_length': 'minLength',
|
||||
'min_properties': 'minProperties',
|
||||
'multiple_of': 'multipleOf',
|
||||
'not_': 'not',
|
||||
'one_of': 'oneOf',
|
||||
'pattern': 'pattern',
|
||||
'pattern_properties': 'patternProperties',
|
||||
'prefix_items': 'prefixItems',
|
||||
'properties': 'properties',
|
||||
'property_names': 'propertyNames',
|
||||
'required': 'required',
|
||||
'then': 'then',
|
||||
'types': 'type',
|
||||
'unique_items': 'uniqueItems',
|
||||
'unevaluated_items': 'unevaluatedItems',
|
||||
'unevaluated_properties': 'unevaluatedProperties'
|
||||
}
|
||||
|
||||
class SchemaConfiguration:
|
||||
"""NOTE: This class is auto generated by OpenAPI JSON Schema Generator
|
||||
|
||||
Ref: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
Do not edit the class manually.
|
||||
|
||||
:param disabled_json_schema_keywords (set): Set of
|
||||
JSON schema validation keywords to disable JSON schema structural validation
|
||||
rules. The following keywords may be specified: multipleOf, maximum,
|
||||
exclusiveMaximum, minimum, exclusiveMinimum, maxLength, minLength, pattern,
|
||||
maxItems, minItems.
|
||||
By default, the validation is performed for data generated locally by the client
|
||||
and data received from the server, independent of any validation performed by
|
||||
the server side. If the input data does not satisfy the JSON schema validation
|
||||
rules specified in the OpenAPI document, an exception is raised.
|
||||
If disabled_json_schema_keywords is set, structural validation is
|
||||
disabled. This can be useful to troubleshoot data validation problem, such as
|
||||
when the OpenAPI document validation rules do not match the actual API data
|
||||
received by the server.
|
||||
:param server_index: Index to servers configuration.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
disabled_json_schema_keywords = set(),
|
||||
):
|
||||
"""Constructor
|
||||
"""
|
||||
self.disabled_json_schema_keywords = disabled_json_schema_keywords
|
||||
|
||||
@property
|
||||
def disabled_json_schema_python_keywords(self) -> typing.Set[str]:
|
||||
return self.__disabled_json_schema_python_keywords
|
||||
|
||||
@property
|
||||
def disabled_json_schema_keywords(self) -> typing.Set[str]:
|
||||
return self.__disabled_json_schema_keywords
|
||||
|
||||
@disabled_json_schema_keywords.setter
|
||||
def disabled_json_schema_keywords(self, json_keywords: typing.Set[str]):
|
||||
disabled_json_schema_keywords = set()
|
||||
disabled_json_schema_python_keywords = set()
|
||||
for k in json_keywords:
|
||||
python_keywords = {key for key, val in PYTHON_KEYWORD_TO_JSON_SCHEMA_KEYWORD.items() if val == k}
|
||||
if not python_keywords:
|
||||
raise exceptions.ApiValueError(
|
||||
"Invalid keyword: '{0}''".format(k))
|
||||
disabled_json_schema_keywords.add(k)
|
||||
disabled_json_schema_python_keywords.update(python_keywords)
|
||||
self.__disabled_json_schema_keywords = disabled_json_schema_keywords
|
||||
self.__disabled_json_schema_python_keywords = disabled_json_schema_python_keywords
|
||||
@ -1,22 +1,132 @@
|
||||
class OpenApiException:
|
||||
pass
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
comfyui
|
||||
No description provided (generated by Openapi JSON Schema Generator https://github.com/openapi-json-schema-tools/openapi-json-schema-generator) # noqa: E501
|
||||
The version of the OpenAPI document: 0.0.1
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
import dataclasses
|
||||
import typing
|
||||
|
||||
from comfy.api import api_response
|
||||
|
||||
|
||||
class ApiAttributeError:
|
||||
pass
|
||||
class OpenApiException(Exception):
|
||||
"""The base exception class for all OpenAPIExceptions"""
|
||||
|
||||
def render_path(path_to_item):
|
||||
"""Returns a string representation of a path"""
|
||||
result = ""
|
||||
for pth in path_to_item:
|
||||
if isinstance(pth, int):
|
||||
result += "[{0}]".format(pth)
|
||||
else:
|
||||
result += "['{0}']".format(pth)
|
||||
return result
|
||||
|
||||
|
||||
class ApiTypeError:
|
||||
pass
|
||||
class ApiTypeError(OpenApiException, TypeError):
|
||||
def __init__(self, msg, path_to_item=None, valid_classes=None,
|
||||
key_type=None):
|
||||
""" Raises an exception for TypeErrors
|
||||
|
||||
Args:
|
||||
msg (str): the exception message
|
||||
|
||||
Keyword Args:
|
||||
path_to_item (list): a list of keys an indices to get to the
|
||||
current_item
|
||||
None if unset
|
||||
valid_classes (tuple): the primitive classes that current item
|
||||
should be an instance of
|
||||
None if unset
|
||||
key_type (bool): False if our value is a value in a dict
|
||||
True if it is a key in a dict
|
||||
False if our item is an item in a list
|
||||
None if unset
|
||||
"""
|
||||
self.path_to_item = path_to_item
|
||||
self.valid_classes = valid_classes
|
||||
self.key_type = key_type
|
||||
full_msg = msg
|
||||
if path_to_item:
|
||||
full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
|
||||
super(ApiTypeError, self).__init__(full_msg)
|
||||
|
||||
|
||||
class ApiValueError:
|
||||
pass
|
||||
class ApiValueError(OpenApiException, ValueError):
|
||||
def __init__(self, msg, path_to_item=None):
|
||||
"""
|
||||
Args:
|
||||
msg (str): the exception message
|
||||
|
||||
Keyword Args:
|
||||
path_to_item (list) the path to the exception in the
|
||||
received_data dict. None if unset
|
||||
"""
|
||||
|
||||
self.path_to_item = path_to_item
|
||||
full_msg = msg
|
||||
if path_to_item:
|
||||
full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
|
||||
super(ApiValueError, self).__init__(full_msg)
|
||||
|
||||
|
||||
class ApiKeyError:
|
||||
pass
|
||||
class ApiAttributeError(OpenApiException, AttributeError):
|
||||
def __init__(self, msg, path_to_item=None):
|
||||
"""
|
||||
Raised when an attribute reference or assignment fails.
|
||||
|
||||
Args:
|
||||
msg (str): the exception message
|
||||
|
||||
Keyword Args:
|
||||
path_to_item (None/list) the path to the exception in the
|
||||
received_data dict
|
||||
"""
|
||||
self.path_to_item = path_to_item
|
||||
full_msg = msg
|
||||
if path_to_item:
|
||||
full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
|
||||
super(ApiAttributeError, self).__init__(full_msg)
|
||||
|
||||
|
||||
class ApiException:
|
||||
pass
|
||||
class ApiKeyError(OpenApiException, KeyError):
|
||||
def __init__(self, msg, path_to_item=None):
|
||||
"""
|
||||
Args:
|
||||
msg (str): the exception message
|
||||
|
||||
Keyword Args:
|
||||
path_to_item (None/list) the path to the exception in the
|
||||
received_data dict
|
||||
"""
|
||||
self.path_to_item = path_to_item
|
||||
full_msg = msg
|
||||
if path_to_item:
|
||||
full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
|
||||
super(ApiKeyError, self).__init__(full_msg)
|
||||
|
||||
T = typing.TypeVar('T', bound=api_response.ApiResponse)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class ApiException(OpenApiException, typing.Generic[T]):
|
||||
status: int
|
||||
reason: typing.Optional[str] = None
|
||||
api_response: typing.Optional[T] = None
|
||||
|
||||
def __str__(self):
|
||||
"""Custom error messages for exception"""
|
||||
error_message = "({0})\n"\
|
||||
"Reason: {1}\n".format(self.status, self.reason)
|
||||
if self.api_response:
|
||||
if self.api_response.response.headers:
|
||||
error_message += "HTTP response headers: {0}\n".format(
|
||||
self.api_response.response.headers)
|
||||
if self.api_response.response.data:
|
||||
error_message += "HTTP response body: {0}\n".format(self.api_response.response.data)
|
||||
|
||||
return error_message
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
openapi: 3.0.0
|
||||
openapi: 3.1.0
|
||||
info:
|
||||
title: comfyui
|
||||
version: 0.0.1
|
||||
@ -102,14 +102,35 @@ paths:
|
||||
name: subfolder
|
||||
schema:
|
||||
type: string
|
||||
- in: query
|
||||
name: channel
|
||||
schema:
|
||||
type: string
|
||||
enum:
|
||||
- 'rgba'
|
||||
- 'rgb'
|
||||
- 'a'
|
||||
responses:
|
||||
'200':
|
||||
description: Successful retrieval of file
|
||||
headers:
|
||||
Content-Disposition:
|
||||
schema:
|
||||
type: string
|
||||
pattern: '^filename=".+"'
|
||||
content:
|
||||
image/png:
|
||||
schema:
|
||||
type: string
|
||||
format: binary
|
||||
image/jpeg:
|
||||
schema:
|
||||
type: string
|
||||
format: binary
|
||||
image/webp:
|
||||
schema:
|
||||
type: string
|
||||
format: binary
|
||||
'400':
|
||||
description: Bad Request
|
||||
'403':
|
||||
@ -143,10 +164,19 @@ paths:
|
||||
$ref: "#/components/schemas/PromptRequest"
|
||||
responses:
|
||||
'200':
|
||||
description: The prompt was queued.
|
||||
description: The prompt was queued and a prompt ID was returned.
|
||||
content:
|
||||
application/json:
|
||||
example:
|
||||
prompt_id: "some-value"
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
prompt_id:
|
||||
description: The ID of the prompt that was queued
|
||||
type: string
|
||||
text/plain:
|
||||
example: ""
|
||||
example: "some-value"
|
||||
schema:
|
||||
type: string
|
||||
'400':
|
||||
@ -167,9 +197,7 @@ paths:
|
||||
schema:
|
||||
type: object
|
||||
additionalProperties:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/Node"
|
||||
$ref: "#/components/schemas/Node"
|
||||
/history:
|
||||
get:
|
||||
summary: (UI) Get history
|
||||
@ -184,13 +212,21 @@ paths:
|
||||
additionalProperties:
|
||||
type: object
|
||||
properties:
|
||||
timestamp:
|
||||
type: number
|
||||
prompt:
|
||||
$ref: "#/components/schemas/QueueTuple"
|
||||
# todo: do the outputs format
|
||||
outputs:
|
||||
$ref: "#/components/schemas/Outputs"
|
||||
status:
|
||||
type: object
|
||||
properties:
|
||||
status_str:
|
||||
type: string
|
||||
completed:
|
||||
type: bool
|
||||
messages:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
post:
|
||||
summary: (UI) Post history
|
||||
operationId: post_history
|
||||
@ -254,6 +290,35 @@ paths:
|
||||
responses:
|
||||
'200':
|
||||
description: OK
|
||||
/free:
|
||||
# from 6d281b4ff4ad3918a4f3b4ca4a8b547a2ba3bf80
|
||||
post:
|
||||
summary: (UI) Unload models or free memory
|
||||
operationId: free
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
oneOf:
|
||||
- type: object
|
||||
properties:
|
||||
unload_models:
|
||||
type: boolean
|
||||
enum: [ true ]
|
||||
required:
|
||||
- unload_models
|
||||
additionalProperties: false
|
||||
- type: object
|
||||
properties:
|
||||
free_memory:
|
||||
type: boolean
|
||||
enum: [ true ]
|
||||
required:
|
||||
- free_memory
|
||||
additionalProperties: false
|
||||
description: >-
|
||||
A POST request to /free with: {"unload_models":true} will unload models from vram.
|
||||
A POST request to /free with: {"free_memory":true} will unload models and free all cached data from the last run workflow.
|
||||
/api/v1/images/{digest}:
|
||||
get:
|
||||
summary: (API) Get image
|
||||
@ -327,9 +392,16 @@ paths:
|
||||
example: filename=ComfyUI_00001.png
|
||||
schema:
|
||||
type: string
|
||||
pattern: '^filename=.+'
|
||||
description: |
|
||||
The content of the last SaveImage node.
|
||||
content:
|
||||
image/png:
|
||||
schema:
|
||||
description: |
|
||||
Binary image data. This will be the first SaveImage node in the workflow.
|
||||
type: string
|
||||
format: binary
|
||||
application/json:
|
||||
schema:
|
||||
description: |
|
||||
@ -419,6 +491,17 @@ paths:
|
||||
The server is too busy to process this request right now.
|
||||
|
||||
This should only be returned by a load balancer. Standalone comfyui does not return this.
|
||||
parameters:
|
||||
- in: header
|
||||
name: Accept
|
||||
schema:
|
||||
type: string
|
||||
enum:
|
||||
- "application/json"
|
||||
- "image/png"
|
||||
required: false
|
||||
description: |
|
||||
Specifies the media type the client is willing to receive.
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
@ -482,16 +565,25 @@ components:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
output_is_list:
|
||||
description: Indicates if the output of the corresponding index as the item in this array is a list output.
|
||||
type: array
|
||||
items:
|
||||
type: boolean
|
||||
output_name:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
display_name:
|
||||
type: string
|
||||
description:
|
||||
type: string
|
||||
category:
|
||||
type: string
|
||||
output_node:
|
||||
type: boolean
|
||||
ExtraData:
|
||||
type: object
|
||||
properties:
|
||||
@ -698,19 +790,25 @@ components:
|
||||
oneOf:
|
||||
- type: number
|
||||
- type: string
|
||||
- type: boolean
|
||||
- type: array
|
||||
description: |
|
||||
When this is specified, it is a node connection, followed by an output.
|
||||
items:
|
||||
minItems: 2
|
||||
maxItems: 2
|
||||
oneOf:
|
||||
prefixItems:
|
||||
- type: string
|
||||
- type: integer
|
||||
description: The inputs for the node, which can be scalar values or references to other nodes' outputs.
|
||||
is_changed:
|
||||
type: string
|
||||
description: A string representing whether the node has changed (optional).
|
||||
oneOf:
|
||||
- type: array
|
||||
description: An array of hashes representing whether the node has changed (optional).
|
||||
items:
|
||||
type: string
|
||||
- type: string
|
||||
description: A string representing whether the node has changed (optional).
|
||||
Workflow:
|
||||
type: object
|
||||
properties:
|
||||
@ -814,14 +912,47 @@ components:
|
||||
QueueTuple:
|
||||
type: array
|
||||
description: |
|
||||
The first item is the queue priority
|
||||
The second item is the hash id of the prompt object
|
||||
The third item is a Prompt
|
||||
The fourth item is an ExtraData
|
||||
items:
|
||||
minItems: 4
|
||||
maxItems: 4
|
||||
oneOf:
|
||||
- type: number
|
||||
- $ref: "#/components/schemas/Prompt"
|
||||
- $ref: "#/components/schemas/ExtraData"
|
||||
An item that was added to the queue.
|
||||
minItems: 3
|
||||
maxItems: 5
|
||||
prefixItems:
|
||||
- type: number
|
||||
description: Queue priority.
|
||||
- type: string
|
||||
description: The hash id of the prompt object. This should be the the prompt ID.
|
||||
- $ref: "#/components/schemas/Prompt"
|
||||
- $ref: "#/components/schemas/ExtraData"
|
||||
- type: array
|
||||
description: A list of "good output" node IDs in the prompt.
|
||||
items:
|
||||
type: string
|
||||
Outputs:
|
||||
description: |
|
||||
The keys are node IDs, the values are output objects
|
||||
type: object
|
||||
additionalProperties:
|
||||
$ref: "#/components/schemas/Output"
|
||||
Output:
|
||||
type: object
|
||||
properties:
|
||||
images:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/FileOutput"
|
||||
latents:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/FileOutput"
|
||||
FileOutput:
|
||||
type: object
|
||||
required:
|
||||
- filename
|
||||
- subfolder
|
||||
- type
|
||||
properties:
|
||||
filename:
|
||||
type: string
|
||||
subfolder:
|
||||
type: string
|
||||
type:
|
||||
type: string
|
||||
@ -3,10 +3,10 @@ outputDir: ./
|
||||
generatorName: python
|
||||
globalProperties:
|
||||
supportingFiles:
|
||||
- "__init__.py"
|
||||
- "schemas.py"
|
||||
- "exceptions.py"
|
||||
- "configuration.py"
|
||||
- "api_response.py"
|
||||
additionalProperties:
|
||||
generateSourceCodeOnly: true
|
||||
packageName: comfy.api
|
||||
|
||||
3
comfy/api/paths/__init__.py
Normal file
3
comfy/api/paths/__init__.py
Normal file
@ -0,0 +1,3 @@
|
||||
# do not import all endpoints into this module because that uses a lot of memory and stack frames
|
||||
# if you need the ability to import all endpoints from this module, import them with
|
||||
# from comfy.api.apis import path_to_api
|
||||
5
comfy/api/paths/api_v1_images_digest/__init__.py
Normal file
5
comfy/api/paths/api_v1_images_digest/__init__.py
Normal file
@ -0,0 +1,5 @@
|
||||
# do not import all endpoints into this module because that uses a lot of memory and stack frames
|
||||
# if you need the ability to import all endpoints from this module, import them with
|
||||
# from comfy.api.apis.paths.api_v1_images_digest import ApiV1ImagesDigest
|
||||
|
||||
path = "/api/v1/images/{digest}"
|
||||
162
comfy/api/paths/api_v1_images_digest/get/operation.py
Normal file
162
comfy/api/paths/api_v1_images_digest/get/operation.py
Normal file
@ -0,0 +1,162 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from comfy.api import api_client, exceptions
|
||||
from comfy.api.shared_imports.operation_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
|
||||
from .. import path
|
||||
from .responses import (
|
||||
response_200,
|
||||
response_404,
|
||||
)
|
||||
from .parameters import parameter_0
|
||||
from .path_parameters import PathParameters, PathParametersDictInput, PathParametersDict
|
||||
path_parameter_classes = (
|
||||
parameter_0.Parameter0,
|
||||
)
|
||||
|
||||
|
||||
__StatusCodeToResponse = typing.TypedDict(
|
||||
'__StatusCodeToResponse',
|
||||
{
|
||||
'200': typing.Type[response_200.ResponseFor200],
|
||||
'404': typing.Type[response_404.ResponseFor404],
|
||||
}
|
||||
)
|
||||
_status_code_to_response: __StatusCodeToResponse = {
|
||||
'200': response_200.ResponseFor200,
|
||||
'404': response_404.ResponseFor404,
|
||||
}
|
||||
_non_error_status_codes = frozenset({
|
||||
'200',
|
||||
})
|
||||
_error_status_codes = frozenset({
|
||||
'404',
|
||||
})
|
||||
|
||||
_all_accept_content_types = (
|
||||
"image/png",
|
||||
)
|
||||
|
||||
|
||||
class BaseApi(api_client.Api):
|
||||
@typing.overload
|
||||
def _api_v1_images_digest_get(
|
||||
self,
|
||||
path_params: typing.Union[
|
||||
PathParametersDictInput,
|
||||
PathParametersDict
|
||||
],
|
||||
*,
|
||||
skip_deserialization: typing.Literal[False] = False,
|
||||
accept_content_types: typing.Tuple[str, ...] = _all_accept_content_types,
|
||||
server_index: typing.Optional[int] = None,
|
||||
stream: bool = False,
|
||||
timeout: typing.Optional[typing.Union[int, float, typing.Tuple]] = None,
|
||||
) -> response_200.ApiResponse: ...
|
||||
|
||||
@typing.overload
|
||||
def _api_v1_images_digest_get(
|
||||
self,
|
||||
path_params: typing.Union[
|
||||
PathParametersDictInput,
|
||||
PathParametersDict
|
||||
],
|
||||
*,
|
||||
skip_deserialization: typing.Literal[True],
|
||||
accept_content_types: typing.Tuple[str, ...] = _all_accept_content_types,
|
||||
server_index: typing.Optional[int] = None,
|
||||
stream: bool = False,
|
||||
timeout: typing.Optional[typing.Union[int, float, typing.Tuple]] = None,
|
||||
) -> api_response.ApiResponseWithoutDeserialization: ...
|
||||
|
||||
def _api_v1_images_digest_get(
|
||||
self,
|
||||
path_params: typing.Union[
|
||||
PathParametersDictInput,
|
||||
PathParametersDict
|
||||
],
|
||||
*,
|
||||
skip_deserialization: bool = False,
|
||||
accept_content_types: typing.Tuple[str, ...] = _all_accept_content_types,
|
||||
server_index: typing.Optional[int] = None,
|
||||
stream: bool = False,
|
||||
timeout: typing.Optional[typing.Union[int, float, typing.Tuple]] = None,
|
||||
):
|
||||
"""
|
||||
(API) Get image
|
||||
:param skip_deserialization: If true then api_response.response will be set but
|
||||
api_response.body and api_response.headers will not be deserialized into schema
|
||||
class instances
|
||||
"""
|
||||
path_params = PathParameters.validate(
|
||||
path_params,
|
||||
configuration=self.api_client.schema_configuration
|
||||
)
|
||||
used_path, query_params_suffix = self._get_used_path(
|
||||
path,
|
||||
path_parameters=path_parameter_classes,
|
||||
path_params=path_params,
|
||||
skip_validation=True
|
||||
)
|
||||
headers = self._get_headers(accept_content_types=accept_content_types)
|
||||
# TODO add cookie handling
|
||||
host = self.api_client.configuration.get_server_url(
|
||||
"servers", server_index
|
||||
)
|
||||
|
||||
raw_response = self.api_client.call_api(
|
||||
resource_path=used_path,
|
||||
method='get',
|
||||
host=host,
|
||||
headers=headers,
|
||||
stream=stream,
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
if skip_deserialization:
|
||||
skip_deser_response = api_response.ApiResponseWithoutDeserialization(response=raw_response)
|
||||
self._verify_response_status(skip_deser_response)
|
||||
return skip_deser_response
|
||||
|
||||
status = str(raw_response.status)
|
||||
if status in _non_error_status_codes:
|
||||
status_code = typing.cast(
|
||||
typing.Literal[
|
||||
'200',
|
||||
],
|
||||
status
|
||||
)
|
||||
return _status_code_to_response[status_code].deserialize(
|
||||
raw_response, self.api_client.schema_configuration)
|
||||
elif status in _error_status_codes:
|
||||
error_status_code = typing.cast(
|
||||
typing.Literal[
|
||||
'404',
|
||||
],
|
||||
status
|
||||
)
|
||||
error_response = _status_code_to_response[error_status_code].deserialize(
|
||||
raw_response, self.api_client.schema_configuration)
|
||||
raise exceptions.ApiException(
|
||||
status=error_response.response.status,
|
||||
reason=error_response.response.reason,
|
||||
api_response=error_response
|
||||
)
|
||||
|
||||
response = api_response.ApiResponseWithoutDeserialization(response=raw_response)
|
||||
self._verify_response_status(response)
|
||||
return response
|
||||
|
||||
|
||||
class ApiV1ImagesDigestGet(BaseApi):
|
||||
# this class is used by api classes that refer to endpoints with operationId.snakeCase fn names
|
||||
api_v1_images_digest_get = BaseApi._api_v1_images_digest_get
|
||||
|
||||
|
||||
class ApiForGet(BaseApi):
|
||||
# this class is used by api classes that refer to endpoints by path and http method names
|
||||
get = BaseApi._api_v1_images_digest_get
|
||||
@ -0,0 +1,16 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from comfy.api.shared_imports.header_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
|
||||
from . import schema
|
||||
|
||||
|
||||
class Parameter0(api_client.PathParameter):
|
||||
name = "digest"
|
||||
style = api_client.ParameterStyle.SIMPLE
|
||||
schema: typing_extensions.TypeAlias = schema.Schema
|
||||
required = True
|
||||
@ -0,0 +1,13 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
comfyui
|
||||
No description provided (generated by Openapi JSON Schema Generator https://github.com/openapi-json-schema-tools/openapi-json-schema-generator) # noqa: E501
|
||||
The version of the OpenAPI document: 0.0.1
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
from comfy.api.shared_imports.schema_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
|
||||
Schema: typing_extensions.TypeAlias = schemas.StrSchema
|
||||
97
comfy/api/paths/api_v1_images_digest/get/path_parameters.py
Normal file
97
comfy/api/paths/api_v1_images_digest/get/path_parameters.py
Normal file
@ -0,0 +1,97 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
comfyui
|
||||
No description provided (generated by Openapi JSON Schema Generator https://github.com/openapi-json-schema-tools/openapi-json-schema-generator) # noqa: E501
|
||||
The version of the OpenAPI document: 0.0.1
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
from comfy.api.shared_imports.schema_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
|
||||
AdditionalProperties: typing_extensions.TypeAlias = schemas.NotAnyTypeSchema
|
||||
|
||||
from comfy.api.paths.api_v1_images_digest.get.parameters.parameter_0 import schema
|
||||
Properties = typing.TypedDict(
|
||||
'Properties',
|
||||
{
|
||||
"digest": typing.Type[schema.Schema],
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class PathParametersDict(schemas.immutabledict[str, str]):
|
||||
|
||||
__required_keys__: typing.FrozenSet[str] = frozenset({
|
||||
"digest",
|
||||
})
|
||||
__optional_keys__: typing.FrozenSet[str] = frozenset({
|
||||
})
|
||||
|
||||
def __new__(
|
||||
cls,
|
||||
*,
|
||||
digest: str,
|
||||
configuration_: typing.Optional[schema_configuration.SchemaConfiguration] = None,
|
||||
):
|
||||
arg_: typing.Dict[str, typing.Any] = {
|
||||
"digest": digest,
|
||||
}
|
||||
used_arg_ = typing.cast(PathParametersDictInput, arg_)
|
||||
return PathParameters.validate(used_arg_, configuration=configuration_)
|
||||
|
||||
@staticmethod
|
||||
def from_dict_(
|
||||
arg: typing.Union[
|
||||
PathParametersDictInput,
|
||||
PathParametersDict
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> PathParametersDict:
|
||||
return PathParameters.validate(arg, configuration=configuration)
|
||||
|
||||
@property
|
||||
def digest(self) -> str:
|
||||
return self.__getitem__("digest")
|
||||
PathParametersDictInput = typing.TypedDict(
|
||||
'PathParametersDictInput',
|
||||
{
|
||||
"digest": str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class PathParameters(
|
||||
schemas.Schema[PathParametersDict, tuple]
|
||||
):
|
||||
types: typing.FrozenSet[typing.Type] = frozenset({schemas.immutabledict})
|
||||
required: typing.FrozenSet[str] = frozenset({
|
||||
"digest",
|
||||
})
|
||||
properties: Properties = dataclasses.field(default_factory=lambda: schemas.typed_dict_to_instance(Properties)) # type: ignore
|
||||
additional_properties: typing.Type[AdditionalProperties] = dataclasses.field(default_factory=lambda: AdditionalProperties) # type: ignore
|
||||
type_to_output_cls: typing.Mapping[
|
||||
typing.Type,
|
||||
typing.Type
|
||||
] = dataclasses.field(
|
||||
default_factory=lambda: {
|
||||
schemas.immutabledict: PathParametersDict
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def validate(
|
||||
cls,
|
||||
arg: typing.Union[
|
||||
PathParametersDictInput,
|
||||
PathParametersDict,
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> PathParametersDict:
|
||||
return super().validate_base(
|
||||
arg,
|
||||
configuration=configuration,
|
||||
)
|
||||
|
||||
@ -0,0 +1,28 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from comfy.api.shared_imports.response_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
|
||||
from .content.image_png import schema as image_png_schema
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class ApiResponse(api_response.ApiResponse):
|
||||
body: typing.Union[bytes, schemas.FileIO]
|
||||
headers: schemas.Unset
|
||||
|
||||
|
||||
class ResponseFor200(api_client.OpenApiResponse[ApiResponse]):
|
||||
@classmethod
|
||||
def get_response(cls, response, headers, body) -> ApiResponse:
|
||||
return ApiResponse(response=response, body=body, headers=headers)
|
||||
|
||||
|
||||
class ImagePngMediaType(api_client.MediaType):
|
||||
schema: typing_extensions.TypeAlias = image_png_schema.Schema
|
||||
content = {
|
||||
'image/png': ImagePngMediaType,
|
||||
}
|
||||
@ -0,0 +1,13 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
comfyui
|
||||
No description provided (generated by Openapi JSON Schema Generator https://github.com/openapi-json-schema-tools/openapi-json-schema-generator) # noqa: E501
|
||||
The version of the OpenAPI document: 0.0.1
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
from comfy.api.shared_imports.schema_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
|
||||
Schema: typing_extensions.TypeAlias = schemas.BinarySchema
|
||||
@ -0,0 +1,19 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from comfy.api.shared_imports.response_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class ApiResponse(api_response.ApiResponse):
|
||||
body: schemas.Unset
|
||||
headers: schemas.Unset
|
||||
|
||||
|
||||
class ResponseFor404(api_client.OpenApiResponse[ApiResponse]):
|
||||
@classmethod
|
||||
def get_response(cls, response, headers, body) -> ApiResponse:
|
||||
return ApiResponse(response=response, body=body, headers=headers)
|
||||
5
comfy/api/paths/api_v1_prompts/__init__.py
Normal file
5
comfy/api/paths/api_v1_prompts/__init__.py
Normal file
@ -0,0 +1,5 @@
|
||||
# do not import all endpoints into this module because that uses a lot of memory and stack frames
|
||||
# if you need the ability to import all endpoints from this module, import them with
|
||||
# from comfy.api.apis.paths.api_v1_prompts import ApiV1Prompts
|
||||
|
||||
path = "/api/v1/prompts"
|
||||
0
comfy/api/paths/api_v1_prompts/get/__init__.py
Normal file
0
comfy/api/paths/api_v1_prompts/get/__init__.py
Normal file
136
comfy/api/paths/api_v1_prompts/get/operation.py
Normal file
136
comfy/api/paths/api_v1_prompts/get/operation.py
Normal file
@ -0,0 +1,136 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from comfy.api import api_client, exceptions
|
||||
from comfy.api.shared_imports.operation_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
|
||||
from .. import path
|
||||
from .responses import (
|
||||
response_200,
|
||||
response_404,
|
||||
)
|
||||
|
||||
|
||||
__StatusCodeToResponse = typing.TypedDict(
|
||||
'__StatusCodeToResponse',
|
||||
{
|
||||
'200': typing.Type[response_200.ResponseFor200],
|
||||
'404': typing.Type[response_404.ResponseFor404],
|
||||
}
|
||||
)
|
||||
_status_code_to_response: __StatusCodeToResponse = {
|
||||
'200': response_200.ResponseFor200,
|
||||
'404': response_404.ResponseFor404,
|
||||
}
|
||||
_non_error_status_codes = frozenset({
|
||||
'200',
|
||||
})
|
||||
_error_status_codes = frozenset({
|
||||
'404',
|
||||
})
|
||||
|
||||
_all_accept_content_types = (
|
||||
"application/json",
|
||||
)
|
||||
|
||||
|
||||
class BaseApi(api_client.Api):
|
||||
@typing.overload
|
||||
def _api_v1_prompts_get(
|
||||
self,
|
||||
*,
|
||||
skip_deserialization: typing.Literal[False] = False,
|
||||
accept_content_types: typing.Tuple[str, ...] = _all_accept_content_types,
|
||||
server_index: typing.Optional[int] = None,
|
||||
stream: bool = False,
|
||||
timeout: typing.Optional[typing.Union[int, float, typing.Tuple]] = None,
|
||||
) -> response_200.ApiResponse: ...
|
||||
|
||||
@typing.overload
|
||||
def _api_v1_prompts_get(
|
||||
self,
|
||||
*,
|
||||
skip_deserialization: typing.Literal[True],
|
||||
accept_content_types: typing.Tuple[str, ...] = _all_accept_content_types,
|
||||
server_index: typing.Optional[int] = None,
|
||||
stream: bool = False,
|
||||
timeout: typing.Optional[typing.Union[int, float, typing.Tuple]] = None,
|
||||
) -> api_response.ApiResponseWithoutDeserialization: ...
|
||||
|
||||
def _api_v1_prompts_get(
|
||||
self,
|
||||
*,
|
||||
skip_deserialization: bool = False,
|
||||
accept_content_types: typing.Tuple[str, ...] = _all_accept_content_types,
|
||||
server_index: typing.Optional[int] = None,
|
||||
stream: bool = False,
|
||||
timeout: typing.Optional[typing.Union[int, float, typing.Tuple]] = None,
|
||||
):
|
||||
"""
|
||||
(API) Get prompt
|
||||
:param skip_deserialization: If true then api_response.response will be set but
|
||||
api_response.body and api_response.headers will not be deserialized into schema
|
||||
class instances
|
||||
"""
|
||||
used_path = path
|
||||
headers = self._get_headers(accept_content_types=accept_content_types)
|
||||
# TODO add cookie handling
|
||||
host = self.api_client.configuration.get_server_url(
|
||||
"servers", server_index
|
||||
)
|
||||
|
||||
raw_response = self.api_client.call_api(
|
||||
resource_path=used_path,
|
||||
method='get',
|
||||
host=host,
|
||||
headers=headers,
|
||||
stream=stream,
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
if skip_deserialization:
|
||||
skip_deser_response = api_response.ApiResponseWithoutDeserialization(response=raw_response)
|
||||
self._verify_response_status(skip_deser_response)
|
||||
return skip_deser_response
|
||||
|
||||
status = str(raw_response.status)
|
||||
if status in _non_error_status_codes:
|
||||
status_code = typing.cast(
|
||||
typing.Literal[
|
||||
'200',
|
||||
],
|
||||
status
|
||||
)
|
||||
return _status_code_to_response[status_code].deserialize(
|
||||
raw_response, self.api_client.schema_configuration)
|
||||
elif status in _error_status_codes:
|
||||
error_status_code = typing.cast(
|
||||
typing.Literal[
|
||||
'404',
|
||||
],
|
||||
status
|
||||
)
|
||||
error_response = _status_code_to_response[error_status_code].deserialize(
|
||||
raw_response, self.api_client.schema_configuration)
|
||||
raise exceptions.ApiException(
|
||||
status=error_response.response.status,
|
||||
reason=error_response.response.reason,
|
||||
api_response=error_response
|
||||
)
|
||||
|
||||
response = api_response.ApiResponseWithoutDeserialization(response=raw_response)
|
||||
self._verify_response_status(response)
|
||||
return response
|
||||
|
||||
|
||||
class ApiV1PromptsGet(BaseApi):
|
||||
# this class is used by api classes that refer to endpoints with operationId.snakeCase fn names
|
||||
api_v1_prompts_get = BaseApi._api_v1_prompts_get
|
||||
|
||||
|
||||
class ApiForGet(BaseApi):
|
||||
# this class is used by api classes that refer to endpoints by path and http method names
|
||||
get = BaseApi._api_v1_prompts_get
|
||||
@ -0,0 +1,28 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from comfy.api.shared_imports.response_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
|
||||
from .content.application_json import schema as application_json_schema
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class ApiResponse(api_response.ApiResponse):
|
||||
body: application_json_schema.prompt.PromptDict
|
||||
headers: schemas.Unset
|
||||
|
||||
|
||||
class ResponseFor200(api_client.OpenApiResponse[ApiResponse]):
|
||||
@classmethod
|
||||
def get_response(cls, response, headers, body) -> ApiResponse:
|
||||
return ApiResponse(response=response, body=body, headers=headers)
|
||||
|
||||
|
||||
class ApplicationJsonMediaType(api_client.MediaType):
|
||||
schema: typing_extensions.TypeAlias = application_json_schema.Schema2
|
||||
content = {
|
||||
'application/json': ApplicationJsonMediaType,
|
||||
}
|
||||
@ -0,0 +1,13 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
comfyui
|
||||
No description provided (generated by Openapi JSON Schema Generator https://github.com/openapi-json-schema-tools/openapi-json-schema-generator) # noqa: E501
|
||||
The version of the OpenAPI document: 0.0.1
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
|
||||
from comfy.api.shared_imports.schema_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
from comfy.api.components.schema import prompt
|
||||
Schema2: typing_extensions.TypeAlias = prompt.Prompt
|
||||
@ -0,0 +1,19 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from comfy.api.shared_imports.response_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class ApiResponse(api_response.ApiResponse):
|
||||
body: schemas.Unset
|
||||
headers: schemas.Unset
|
||||
|
||||
|
||||
class ResponseFor404(api_client.OpenApiResponse[ApiResponse]):
|
||||
@classmethod
|
||||
def get_response(cls, response, headers, body) -> ApiResponse:
|
||||
return ApiResponse(response=response, body=body, headers=headers)
|
||||
0
comfy/api/paths/api_v1_prompts/post/__init__.py
Normal file
0
comfy/api/paths/api_v1_prompts/post/__init__.py
Normal file
240
comfy/api/paths/api_v1_prompts/post/operation.py
Normal file
240
comfy/api/paths/api_v1_prompts/post/operation.py
Normal file
@ -0,0 +1,240 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from comfy.api import api_client, exceptions
|
||||
from comfy.api.shared_imports.operation_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
from comfy.api.components.schema import prompt
|
||||
from comfy.api.paths.api_v1_prompts.post.request_body.content.multipart_formdata import schema
|
||||
|
||||
from .. import path
|
||||
from .responses import (
|
||||
response_200,
|
||||
response_204,
|
||||
response_400,
|
||||
response_429,
|
||||
response_500,
|
||||
response_503,
|
||||
response_507,
|
||||
)
|
||||
from . import request_body
|
||||
|
||||
|
||||
__StatusCodeToResponse = typing.TypedDict(
|
||||
'__StatusCodeToResponse',
|
||||
{
|
||||
'200': typing.Type[response_200.ResponseFor200],
|
||||
'204': typing.Type[response_204.ResponseFor204],
|
||||
'400': typing.Type[response_400.ResponseFor400],
|
||||
'429': typing.Type[response_429.ResponseFor429],
|
||||
'500': typing.Type[response_500.ResponseFor500],
|
||||
'503': typing.Type[response_503.ResponseFor503],
|
||||
'507': typing.Type[response_507.ResponseFor507],
|
||||
}
|
||||
)
|
||||
_status_code_to_response: __StatusCodeToResponse = {
|
||||
'200': response_200.ResponseFor200,
|
||||
'204': response_204.ResponseFor204,
|
||||
'400': response_400.ResponseFor400,
|
||||
'429': response_429.ResponseFor429,
|
||||
'500': response_500.ResponseFor500,
|
||||
'503': response_503.ResponseFor503,
|
||||
'507': response_507.ResponseFor507,
|
||||
}
|
||||
_non_error_status_codes = frozenset({
|
||||
'200',
|
||||
'204',
|
||||
})
|
||||
_error_status_codes = frozenset({
|
||||
'400',
|
||||
'429',
|
||||
'500',
|
||||
'507',
|
||||
'503',
|
||||
})
|
||||
|
||||
_all_accept_content_types = (
|
||||
"application/json",
|
||||
)
|
||||
|
||||
|
||||
class BaseApi(api_client.Api):
|
||||
@typing.overload
|
||||
def _api_v1_prompts_post(
|
||||
self,
|
||||
body: typing.Union[
|
||||
prompt.PromptDictInput,
|
||||
prompt.PromptDict,
|
||||
schemas.Unset
|
||||
] = schemas.unset,
|
||||
*,
|
||||
skip_deserialization: typing.Literal[False] = False,
|
||||
content_type: typing.Literal["application/json"] = "application/json",
|
||||
accept_content_types: typing.Tuple[str, ...] = _all_accept_content_types,
|
||||
server_index: typing.Optional[int] = None,
|
||||
stream: bool = False,
|
||||
timeout: typing.Optional[typing.Union[int, float, typing.Tuple]] = None,
|
||||
) -> typing.Union[
|
||||
response_200.ApiResponse,
|
||||
response_204.ApiResponse,
|
||||
]: ...
|
||||
|
||||
@typing.overload
|
||||
def _api_v1_prompts_post(
|
||||
self,
|
||||
body: typing.Union[
|
||||
prompt.PromptDictInput,
|
||||
prompt.PromptDict,
|
||||
schemas.Unset
|
||||
] = schemas.unset,
|
||||
*,
|
||||
skip_deserialization: typing.Literal[True],
|
||||
content_type: typing.Literal["application/json"] = "application/json",
|
||||
accept_content_types: typing.Tuple[str, ...] = _all_accept_content_types,
|
||||
server_index: typing.Optional[int] = None,
|
||||
stream: bool = False,
|
||||
timeout: typing.Optional[typing.Union[int, float, typing.Tuple]] = None,
|
||||
) -> api_response.ApiResponseWithoutDeserialization: ...
|
||||
|
||||
@typing.overload
|
||||
def _api_v1_prompts_post(
|
||||
self,
|
||||
body: typing.Union[
|
||||
schema.SchemaDictInput,
|
||||
schema.SchemaDict,
|
||||
schemas.Unset
|
||||
] = schemas.unset,
|
||||
*,
|
||||
skip_deserialization: typing.Literal[False] = False,
|
||||
content_type: typing.Literal["multipart/formdata"],
|
||||
accept_content_types: typing.Tuple[str, ...] = _all_accept_content_types,
|
||||
server_index: typing.Optional[int] = None,
|
||||
stream: bool = False,
|
||||
timeout: typing.Optional[typing.Union[int, float, typing.Tuple]] = None,
|
||||
) -> typing.Union[
|
||||
response_200.ApiResponse,
|
||||
response_204.ApiResponse,
|
||||
]: ...
|
||||
|
||||
@typing.overload
|
||||
def _api_v1_prompts_post(
|
||||
self,
|
||||
body: typing.Union[
|
||||
schema.SchemaDictInput,
|
||||
schema.SchemaDict,
|
||||
schemas.Unset
|
||||
] = schemas.unset,
|
||||
*,
|
||||
skip_deserialization: typing.Literal[True],
|
||||
content_type: typing.Literal["multipart/formdata"],
|
||||
accept_content_types: typing.Tuple[str, ...] = _all_accept_content_types,
|
||||
server_index: typing.Optional[int] = None,
|
||||
stream: bool = False,
|
||||
timeout: typing.Optional[typing.Union[int, float, typing.Tuple]] = None,
|
||||
) -> api_response.ApiResponseWithoutDeserialization: ...
|
||||
|
||||
def _api_v1_prompts_post(
|
||||
self,
|
||||
body: typing.Union[
|
||||
typing.Union[
|
||||
prompt.PromptDictInput,
|
||||
prompt.PromptDict,
|
||||
],
|
||||
typing.Union[
|
||||
schema.SchemaDictInput,
|
||||
schema.SchemaDict,
|
||||
],
|
||||
schemas.Unset,
|
||||
] = schemas.unset,
|
||||
*,
|
||||
skip_deserialization: bool = False,
|
||||
content_type: typing.Literal[
|
||||
"application/json",
|
||||
"multipart/formdata",
|
||||
] = "application/json",
|
||||
accept_content_types: typing.Tuple[str, ...] = _all_accept_content_types,
|
||||
server_index: typing.Optional[int] = None,
|
||||
stream: bool = False,
|
||||
timeout: typing.Optional[typing.Union[int, float, typing.Tuple]] = None,
|
||||
):
|
||||
"""
|
||||
(API) Generate image
|
||||
:param skip_deserialization: If true then api_response.response will be set but
|
||||
api_response.body and api_response.headers will not be deserialized into schema
|
||||
class instances
|
||||
"""
|
||||
used_path = path
|
||||
headers = self._get_headers(accept_content_types=accept_content_types)
|
||||
# TODO add cookie handling
|
||||
|
||||
fields, serialized_body = self._get_fields_and_body(
|
||||
request_body=request_body.RequestBody,
|
||||
body=body,
|
||||
content_type=content_type,
|
||||
headers=headers
|
||||
)
|
||||
host = self.api_client.configuration.get_server_url(
|
||||
"servers", server_index
|
||||
)
|
||||
|
||||
raw_response = self.api_client.call_api(
|
||||
resource_path=used_path,
|
||||
method='post',
|
||||
host=host,
|
||||
headers=headers,
|
||||
fields=fields,
|
||||
body=serialized_body,
|
||||
stream=stream,
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
if skip_deserialization:
|
||||
skip_deser_response = api_response.ApiResponseWithoutDeserialization(response=raw_response)
|
||||
self._verify_response_status(skip_deser_response)
|
||||
return skip_deser_response
|
||||
|
||||
status = str(raw_response.status)
|
||||
if status in _non_error_status_codes:
|
||||
status_code = typing.cast(
|
||||
typing.Literal[
|
||||
'200',
|
||||
'204',
|
||||
],
|
||||
status
|
||||
)
|
||||
return _status_code_to_response[status_code].deserialize(
|
||||
raw_response, self.api_client.schema_configuration)
|
||||
elif status in _error_status_codes:
|
||||
error_status_code = typing.cast(
|
||||
typing.Literal[
|
||||
'400',
|
||||
'429',
|
||||
'500',
|
||||
'507',
|
||||
'503',
|
||||
],
|
||||
status
|
||||
)
|
||||
error_response = _status_code_to_response[error_status_code].deserialize(
|
||||
raw_response, self.api_client.schema_configuration)
|
||||
raise exceptions.ApiException(
|
||||
status=error_response.response.status,
|
||||
reason=error_response.response.reason,
|
||||
api_response=error_response
|
||||
)
|
||||
|
||||
response = api_response.ApiResponseWithoutDeserialization(response=raw_response)
|
||||
self._verify_response_status(response)
|
||||
return response
|
||||
|
||||
|
||||
class ApiV1PromptsPost(BaseApi):
|
||||
# this class is used by api classes that refer to endpoints with operationId.snakeCase fn names
|
||||
api_v1_prompts_post = BaseApi._api_v1_prompts_post
|
||||
|
||||
|
||||
class ApiForPost(BaseApi):
|
||||
# this class is used by api classes that refer to endpoints by path and http method names
|
||||
post = BaseApi._api_v1_prompts_post
|
||||
25
comfy/api/paths/api_v1_prompts/post/request_body/__init__.py
Normal file
25
comfy/api/paths/api_v1_prompts/post/request_body/__init__.py
Normal file
@ -0,0 +1,25 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from comfy.api.shared_imports.header_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
|
||||
from .content.application_json import schema as application_json_schema
|
||||
from .content.multipart_formdata import schema as multipart_formdata_schema
|
||||
|
||||
|
||||
class RequestBody(api_client.RequestBody):
|
||||
|
||||
|
||||
class ApplicationJsonMediaType(api_client.MediaType):
|
||||
schema: typing_extensions.TypeAlias = application_json_schema.Schema2
|
||||
|
||||
|
||||
class MultipartFormdataMediaType(api_client.MediaType):
|
||||
schema: typing_extensions.TypeAlias = multipart_formdata_schema.Schema
|
||||
content = {
|
||||
'application/json': ApplicationJsonMediaType,
|
||||
'multipart/formdata': MultipartFormdataMediaType,
|
||||
}
|
||||
@ -0,0 +1,13 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
comfyui
|
||||
No description provided (generated by Openapi JSON Schema Generator https://github.com/openapi-json-schema-tools/openapi-json-schema-generator) # noqa: E501
|
||||
The version of the OpenAPI document: 0.0.1
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
|
||||
from comfy.api.shared_imports.schema_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
from comfy.api.components.schema import prompt
|
||||
Schema2: typing_extensions.TypeAlias = prompt.Prompt
|
||||
@ -0,0 +1,186 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
comfyui
|
||||
No description provided (generated by Openapi JSON Schema Generator https://github.com/openapi-json-schema-tools/openapi-json-schema-generator) # noqa: E501
|
||||
The version of the OpenAPI document: 0.0.1
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
from comfy.api.shared_imports.schema_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
|
||||
Items: typing_extensions.TypeAlias = schemas.BinarySchema
|
||||
|
||||
|
||||
class FilesTuple(
|
||||
typing.Tuple[
|
||||
typing.Union[bytes, schemas.FileIO],
|
||||
...
|
||||
]
|
||||
):
|
||||
|
||||
def __new__(cls, arg: typing.Union[FilesTupleInput, FilesTuple], configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None):
|
||||
return Files.validate(arg, configuration=configuration)
|
||||
FilesTupleInput = typing.Union[
|
||||
typing.List[
|
||||
typing.Union[
|
||||
bytes,
|
||||
io.FileIO,
|
||||
io.BufferedReader,
|
||||
schemas.FileIO
|
||||
],
|
||||
],
|
||||
typing.Tuple[
|
||||
typing.Union[
|
||||
bytes,
|
||||
io.FileIO,
|
||||
io.BufferedReader,
|
||||
schemas.FileIO
|
||||
],
|
||||
...
|
||||
]
|
||||
]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class Files(
|
||||
schemas.Schema[schemas.immutabledict, FilesTuple]
|
||||
):
|
||||
types: typing.FrozenSet[typing.Type] = frozenset({tuple})
|
||||
items: typing.Type[Items] = dataclasses.field(default_factory=lambda: Items) # type: ignore
|
||||
type_to_output_cls: typing.Mapping[
|
||||
typing.Type,
|
||||
typing.Type
|
||||
] = dataclasses.field(
|
||||
default_factory=lambda: {
|
||||
tuple: FilesTuple
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def validate(
|
||||
cls,
|
||||
arg: typing.Union[
|
||||
FilesTupleInput,
|
||||
FilesTuple,
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> FilesTuple:
|
||||
return super().validate_base(
|
||||
arg,
|
||||
configuration=configuration,
|
||||
)
|
||||
|
||||
from comfy.api.components.schema import prompt
|
||||
Properties = typing.TypedDict(
|
||||
'Properties',
|
||||
{
|
||||
"prompt": typing.Type[prompt.Prompt],
|
||||
"files": typing.Type[Files],
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class SchemaDict(schemas.immutabledict[str, schemas.OUTPUT_BASE_TYPES]):
|
||||
|
||||
__required_keys__: typing.FrozenSet[str] = frozenset({
|
||||
})
|
||||
__optional_keys__: typing.FrozenSet[str] = frozenset({
|
||||
"prompt",
|
||||
"files",
|
||||
})
|
||||
|
||||
def __new__(
|
||||
cls,
|
||||
*,
|
||||
prompt: typing.Union[
|
||||
prompt.PromptDictInput,
|
||||
prompt.PromptDict,
|
||||
schemas.Unset
|
||||
] = schemas.unset,
|
||||
files: typing.Union[
|
||||
FilesTupleInput,
|
||||
FilesTuple,
|
||||
schemas.Unset
|
||||
] = schemas.unset,
|
||||
configuration_: typing.Optional[schema_configuration.SchemaConfiguration] = None,
|
||||
**kwargs: schemas.INPUT_TYPES_ALL,
|
||||
):
|
||||
arg_: typing.Dict[str, typing.Any] = {}
|
||||
for key_, val in (
|
||||
("prompt", prompt),
|
||||
("files", files),
|
||||
):
|
||||
if isinstance(val, schemas.Unset):
|
||||
continue
|
||||
arg_[key_] = val
|
||||
arg_.update(kwargs)
|
||||
used_arg_ = typing.cast(SchemaDictInput, arg_)
|
||||
return Schema.validate(used_arg_, configuration=configuration_)
|
||||
|
||||
@staticmethod
|
||||
def from_dict_(
|
||||
arg: typing.Union[
|
||||
SchemaDictInput,
|
||||
SchemaDict
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> SchemaDict:
|
||||
return Schema.validate(arg, configuration=configuration)
|
||||
|
||||
@property
|
||||
def prompt(self) -> typing.Union[prompt.PromptDict, schemas.Unset]:
|
||||
val = self.get("prompt", schemas.unset)
|
||||
if isinstance(val, schemas.Unset):
|
||||
return val
|
||||
return typing.cast(
|
||||
prompt.PromptDict,
|
||||
val
|
||||
)
|
||||
|
||||
@property
|
||||
def files(self) -> typing.Union[FilesTuple, schemas.Unset]:
|
||||
val = self.get("files", schemas.unset)
|
||||
if isinstance(val, schemas.Unset):
|
||||
return val
|
||||
return typing.cast(
|
||||
FilesTuple,
|
||||
val
|
||||
)
|
||||
|
||||
def get_additional_property_(self, name: str) -> typing.Union[schemas.OUTPUT_BASE_TYPES, schemas.Unset]:
|
||||
schemas.raise_if_key_known(name, self.__required_keys__, self.__optional_keys__)
|
||||
return self.get(name, schemas.unset)
|
||||
SchemaDictInput = typing.Mapping[str, schemas.INPUT_TYPES_ALL]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class Schema(
|
||||
schemas.Schema[SchemaDict, tuple]
|
||||
):
|
||||
types: typing.FrozenSet[typing.Type] = frozenset({schemas.immutabledict})
|
||||
properties: Properties = dataclasses.field(default_factory=lambda: schemas.typed_dict_to_instance(Properties)) # type: ignore
|
||||
type_to_output_cls: typing.Mapping[
|
||||
typing.Type,
|
||||
typing.Type
|
||||
] = dataclasses.field(
|
||||
default_factory=lambda: {
|
||||
schemas.immutabledict: SchemaDict
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def validate(
|
||||
cls,
|
||||
arg: typing.Union[
|
||||
SchemaDictInput,
|
||||
SchemaDict,
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> SchemaDict:
|
||||
return super().validate_base(
|
||||
arg,
|
||||
configuration=configuration,
|
||||
)
|
||||
|
||||
@ -0,0 +1,39 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from comfy.api.shared_imports.response_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
|
||||
from .content.application_json import schema as application_json_schema
|
||||
from .headers import header_digest
|
||||
from .headers import header_content_disposition
|
||||
from .headers import header_location
|
||||
from . import header_parameters
|
||||
parameters: typing.Dict[str, typing.Type[api_client.HeaderParameterWithoutName]] = {
|
||||
'Digest': header_digest.Digest,
|
||||
'Content-Disposition': header_content_disposition.ContentDisposition,
|
||||
'Location': header_location.Location,
|
||||
}
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class ApiResponse(api_response.ApiResponse):
|
||||
body: application_json_schema.SchemaDict
|
||||
headers: header_parameters.HeadersDict
|
||||
|
||||
|
||||
class ResponseFor200(api_client.OpenApiResponse[ApiResponse]):
|
||||
@classmethod
|
||||
def get_response(cls, response, headers, body) -> ApiResponse:
|
||||
return ApiResponse(response=response, body=body, headers=headers)
|
||||
|
||||
|
||||
class ApplicationJsonMediaType(api_client.MediaType):
|
||||
schema: typing_extensions.TypeAlias = application_json_schema.Schema
|
||||
content = {
|
||||
'application/json': ApplicationJsonMediaType,
|
||||
}
|
||||
headers=parameters
|
||||
headers_schema = header_parameters.Headers
|
||||
@ -0,0 +1,156 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
comfyui
|
||||
No description provided (generated by Openapi JSON Schema Generator https://github.com/openapi-json-schema-tools/openapi-json-schema-generator) # noqa: E501
|
||||
The version of the OpenAPI document: 0.0.1
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
from comfy.api.shared_imports.schema_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
|
||||
Items: typing_extensions.TypeAlias = schemas.StrSchema
|
||||
|
||||
|
||||
class UrlsTuple(
|
||||
typing.Tuple[
|
||||
str,
|
||||
...
|
||||
]
|
||||
):
|
||||
|
||||
def __new__(cls, arg: typing.Union[UrlsTupleInput, UrlsTuple], configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None):
|
||||
return Urls.validate(arg, configuration=configuration)
|
||||
UrlsTupleInput = typing.Union[
|
||||
typing.List[
|
||||
str,
|
||||
],
|
||||
typing.Tuple[
|
||||
str,
|
||||
...
|
||||
]
|
||||
]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class Urls(
|
||||
schemas.Schema[schemas.immutabledict, UrlsTuple]
|
||||
):
|
||||
types: typing.FrozenSet[typing.Type] = frozenset({tuple})
|
||||
items: typing.Type[Items] = dataclasses.field(default_factory=lambda: Items) # type: ignore
|
||||
type_to_output_cls: typing.Mapping[
|
||||
typing.Type,
|
||||
typing.Type
|
||||
] = dataclasses.field(
|
||||
default_factory=lambda: {
|
||||
tuple: UrlsTuple
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def validate(
|
||||
cls,
|
||||
arg: typing.Union[
|
||||
UrlsTupleInput,
|
||||
UrlsTuple,
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> UrlsTuple:
|
||||
return super().validate_base(
|
||||
arg,
|
||||
configuration=configuration,
|
||||
)
|
||||
Properties = typing.TypedDict(
|
||||
'Properties',
|
||||
{
|
||||
"urls": typing.Type[Urls],
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class SchemaDict(schemas.immutabledict[str, schemas.OUTPUT_BASE_TYPES]):
|
||||
|
||||
__required_keys__: typing.FrozenSet[str] = frozenset({
|
||||
})
|
||||
__optional_keys__: typing.FrozenSet[str] = frozenset({
|
||||
"urls",
|
||||
})
|
||||
|
||||
def __new__(
|
||||
cls,
|
||||
*,
|
||||
urls: typing.Union[
|
||||
UrlsTupleInput,
|
||||
UrlsTuple,
|
||||
schemas.Unset
|
||||
] = schemas.unset,
|
||||
configuration_: typing.Optional[schema_configuration.SchemaConfiguration] = None,
|
||||
**kwargs: schemas.INPUT_TYPES_ALL,
|
||||
):
|
||||
arg_: typing.Dict[str, typing.Any] = {}
|
||||
for key_, val in (
|
||||
("urls", urls),
|
||||
):
|
||||
if isinstance(val, schemas.Unset):
|
||||
continue
|
||||
arg_[key_] = val
|
||||
arg_.update(kwargs)
|
||||
used_arg_ = typing.cast(SchemaDictInput, arg_)
|
||||
return Schema.validate(used_arg_, configuration=configuration_)
|
||||
|
||||
@staticmethod
|
||||
def from_dict_(
|
||||
arg: typing.Union[
|
||||
SchemaDictInput,
|
||||
SchemaDict
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> SchemaDict:
|
||||
return Schema.validate(arg, configuration=configuration)
|
||||
|
||||
@property
|
||||
def urls(self) -> typing.Union[UrlsTuple, schemas.Unset]:
|
||||
val = self.get("urls", schemas.unset)
|
||||
if isinstance(val, schemas.Unset):
|
||||
return val
|
||||
return typing.cast(
|
||||
UrlsTuple,
|
||||
val
|
||||
)
|
||||
|
||||
def get_additional_property_(self, name: str) -> typing.Union[schemas.OUTPUT_BASE_TYPES, schemas.Unset]:
|
||||
schemas.raise_if_key_known(name, self.__required_keys__, self.__optional_keys__)
|
||||
return self.get(name, schemas.unset)
|
||||
SchemaDictInput = typing.Mapping[str, schemas.INPUT_TYPES_ALL]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class Schema(
|
||||
schemas.Schema[SchemaDict, tuple]
|
||||
):
|
||||
types: typing.FrozenSet[typing.Type] = frozenset({schemas.immutabledict})
|
||||
properties: Properties = dataclasses.field(default_factory=lambda: schemas.typed_dict_to_instance(Properties)) # type: ignore
|
||||
type_to_output_cls: typing.Mapping[
|
||||
typing.Type,
|
||||
typing.Type
|
||||
] = dataclasses.field(
|
||||
default_factory=lambda: {
|
||||
schemas.immutabledict: SchemaDict
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def validate(
|
||||
cls,
|
||||
arg: typing.Union[
|
||||
SchemaDictInput,
|
||||
SchemaDict,
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> SchemaDict:
|
||||
return super().validate_base(
|
||||
arg,
|
||||
configuration=configuration,
|
||||
)
|
||||
|
||||
@ -0,0 +1,131 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
comfyui
|
||||
No description provided (generated by Openapi JSON Schema Generator https://github.com/openapi-json-schema-tools/openapi-json-schema-generator) # noqa: E501
|
||||
The version of the OpenAPI document: 0.0.1
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
from comfy.api.shared_imports.schema_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
|
||||
AdditionalProperties: typing_extensions.TypeAlias = schemas.NotAnyTypeSchema
|
||||
|
||||
from comfy.api.paths.api_v1_prompts.post.responses.response_200.headers.header_content_disposition import schema as schema_2
|
||||
from comfy.api.paths.api_v1_prompts.post.responses.response_200.headers.header_digest import schema
|
||||
from comfy.api.paths.api_v1_prompts.post.responses.response_200.headers.header_location import schema as schema_3
|
||||
Properties = typing.TypedDict(
|
||||
'Properties',
|
||||
{
|
||||
"Digest": typing.Type[schema.Schema],
|
||||
"Content-Disposition": typing.Type[schema_2.Schema],
|
||||
"Location": typing.Type[schema_3.Schema],
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class HeadersDict(schemas.immutabledict[str, schemas.OUTPUT_BASE_TYPES]):
|
||||
|
||||
__required_keys__: typing.FrozenSet[str] = frozenset({
|
||||
})
|
||||
__optional_keys__: typing.FrozenSet[str] = frozenset({
|
||||
"Digest",
|
||||
"Content-Disposition",
|
||||
"Location",
|
||||
})
|
||||
|
||||
def __new__(
|
||||
cls,
|
||||
*,
|
||||
Digest: typing.Union[
|
||||
str,
|
||||
schemas.Unset
|
||||
] = schemas.unset,
|
||||
Location: typing.Union[
|
||||
str,
|
||||
schemas.Unset
|
||||
] = schemas.unset,
|
||||
configuration_: typing.Optional[schema_configuration.SchemaConfiguration] = None,
|
||||
):
|
||||
arg_: typing.Dict[str, typing.Any] = {}
|
||||
for key_, val in (
|
||||
("Digest", Digest),
|
||||
("Location", Location),
|
||||
):
|
||||
if isinstance(val, schemas.Unset):
|
||||
continue
|
||||
arg_[key_] = val
|
||||
used_arg_ = typing.cast(HeadersDictInput, arg_)
|
||||
return Headers.validate(used_arg_, configuration=configuration_)
|
||||
|
||||
@staticmethod
|
||||
def from_dict_(
|
||||
arg: typing.Union[
|
||||
HeadersDictInput,
|
||||
HeadersDict
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> HeadersDict:
|
||||
return Headers.validate(arg, configuration=configuration)
|
||||
|
||||
@property
|
||||
def Digest(self) -> typing.Union[str, schemas.Unset]:
|
||||
val = self.get("Digest", schemas.unset)
|
||||
if isinstance(val, schemas.Unset):
|
||||
return val
|
||||
return typing.cast(
|
||||
str,
|
||||
val
|
||||
)
|
||||
|
||||
@property
|
||||
def Location(self) -> typing.Union[str, schemas.Unset]:
|
||||
val = self.get("Location", schemas.unset)
|
||||
if isinstance(val, schemas.Unset):
|
||||
return val
|
||||
return typing.cast(
|
||||
str,
|
||||
val
|
||||
)
|
||||
HeadersDictInput = typing.TypedDict(
|
||||
'HeadersDictInput',
|
||||
{
|
||||
"Digest": str,
|
||||
"Content-Disposition": str,
|
||||
"Location": str,
|
||||
},
|
||||
total=False
|
||||
)
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class Headers(
|
||||
schemas.Schema[HeadersDict, tuple]
|
||||
):
|
||||
types: typing.FrozenSet[typing.Type] = frozenset({schemas.immutabledict})
|
||||
properties: Properties = dataclasses.field(default_factory=lambda: schemas.typed_dict_to_instance(Properties)) # type: ignore
|
||||
additional_properties: typing.Type[AdditionalProperties] = dataclasses.field(default_factory=lambda: AdditionalProperties) # type: ignore
|
||||
type_to_output_cls: typing.Mapping[
|
||||
typing.Type,
|
||||
typing.Type
|
||||
] = dataclasses.field(
|
||||
default_factory=lambda: {
|
||||
schemas.immutabledict: HeadersDict
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def validate(
|
||||
cls,
|
||||
arg: typing.Union[
|
||||
HeadersDictInput,
|
||||
HeadersDict,
|
||||
],
|
||||
configuration: typing.Optional[schema_configuration.SchemaConfiguration] = None
|
||||
) -> HeadersDict:
|
||||
return super().validate_base(
|
||||
arg,
|
||||
configuration=configuration,
|
||||
)
|
||||
|
||||
@ -0,0 +1,14 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from comfy.api.shared_imports.header_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
|
||||
from . import schema
|
||||
|
||||
|
||||
class ContentDisposition(api_client.HeaderParameterWithoutName):
|
||||
style = api_client.ParameterStyle.SIMPLE
|
||||
schema: typing_extensions.TypeAlias = schema.Schema
|
||||
@ -0,0 +1,13 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
comfyui
|
||||
No description provided (generated by Openapi JSON Schema Generator https://github.com/openapi-json-schema-tools/openapi-json-schema-generator) # noqa: E501
|
||||
The version of the OpenAPI document: 0.0.1
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
from comfy.api.shared_imports.schema_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
|
||||
Schema: typing_extensions.TypeAlias = schemas.StrSchema
|
||||
@ -0,0 +1,14 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from comfy.api.shared_imports.header_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
|
||||
from . import schema
|
||||
|
||||
|
||||
class Digest(api_client.HeaderParameterWithoutName):
|
||||
style = api_client.ParameterStyle.SIMPLE
|
||||
schema: typing_extensions.TypeAlias = schema.Schema
|
||||
@ -0,0 +1,13 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
comfyui
|
||||
No description provided (generated by Openapi JSON Schema Generator https://github.com/openapi-json-schema-tools/openapi-json-schema-generator) # noqa: E501
|
||||
The version of the OpenAPI document: 0.0.1
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
from comfy.api.shared_imports.schema_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
|
||||
Schema: typing_extensions.TypeAlias = schemas.StrSchema
|
||||
@ -0,0 +1,14 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from comfy.api.shared_imports.header_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
|
||||
from . import schema
|
||||
|
||||
|
||||
class Location(api_client.HeaderParameterWithoutName):
|
||||
style = api_client.ParameterStyle.SIMPLE
|
||||
schema: typing_extensions.TypeAlias = schema.Schema
|
||||
@ -0,0 +1,13 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
comfyui
|
||||
No description provided (generated by Openapi JSON Schema Generator https://github.com/openapi-json-schema-tools/openapi-json-schema-generator) # noqa: E501
|
||||
The version of the OpenAPI document: 0.0.1
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
from comfy.api.shared_imports.schema_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
|
||||
Schema: typing_extensions.TypeAlias = schemas.StrSchema
|
||||
@ -0,0 +1,19 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from comfy.api.shared_imports.response_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class ApiResponse(api_response.ApiResponse):
|
||||
body: schemas.Unset
|
||||
headers: schemas.Unset
|
||||
|
||||
|
||||
class ResponseFor204(api_client.OpenApiResponse[ApiResponse]):
|
||||
@classmethod
|
||||
def get_response(cls, response, headers, body) -> ApiResponse:
|
||||
return ApiResponse(response=response, body=body, headers=headers)
|
||||
@ -0,0 +1,19 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from comfy.api.shared_imports.response_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class ApiResponse(api_response.ApiResponse):
|
||||
body: schemas.Unset
|
||||
headers: schemas.Unset
|
||||
|
||||
|
||||
class ResponseFor400(api_client.OpenApiResponse[ApiResponse]):
|
||||
@classmethod
|
||||
def get_response(cls, response, headers, body) -> ApiResponse:
|
||||
return ApiResponse(response=response, body=body, headers=headers)
|
||||
@ -0,0 +1,19 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from comfy.api.shared_imports.response_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class ApiResponse(api_response.ApiResponse):
|
||||
body: schemas.Unset
|
||||
headers: schemas.Unset
|
||||
|
||||
|
||||
class ResponseFor429(api_client.OpenApiResponse[ApiResponse]):
|
||||
@classmethod
|
||||
def get_response(cls, response, headers, body) -> ApiResponse:
|
||||
return ApiResponse(response=response, body=body, headers=headers)
|
||||
@ -0,0 +1,19 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from comfy.api.shared_imports.response_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class ApiResponse(api_response.ApiResponse):
|
||||
body: schemas.Unset
|
||||
headers: schemas.Unset
|
||||
|
||||
|
||||
class ResponseFor500(api_client.OpenApiResponse[ApiResponse]):
|
||||
@classmethod
|
||||
def get_response(cls, response, headers, body) -> ApiResponse:
|
||||
return ApiResponse(response=response, body=body, headers=headers)
|
||||
@ -0,0 +1,19 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from comfy.api.shared_imports.response_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class ApiResponse(api_response.ApiResponse):
|
||||
body: schemas.Unset
|
||||
headers: schemas.Unset
|
||||
|
||||
|
||||
class ResponseFor503(api_client.OpenApiResponse[ApiResponse]):
|
||||
@classmethod
|
||||
def get_response(cls, response, headers, body) -> ApiResponse:
|
||||
return ApiResponse(response=response, body=body, headers=headers)
|
||||
@ -0,0 +1,19 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from comfy.api.shared_imports.response_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class ApiResponse(api_response.ApiResponse):
|
||||
body: schemas.Unset
|
||||
headers: schemas.Unset
|
||||
|
||||
|
||||
class ResponseFor507(api_client.OpenApiResponse[ApiResponse]):
|
||||
@classmethod
|
||||
def get_response(cls, response, headers, body) -> ApiResponse:
|
||||
return ApiResponse(response=response, body=body, headers=headers)
|
||||
5
comfy/api/paths/embeddings/__init__.py
Normal file
5
comfy/api/paths/embeddings/__init__.py
Normal file
@ -0,0 +1,5 @@
|
||||
# do not import all endpoints into this module because that uses a lot of memory and stack frames
|
||||
# if you need the ability to import all endpoints from this module, import them with
|
||||
# from comfy.api.apis.paths.embeddings import Embeddings
|
||||
|
||||
path = "/embeddings"
|
||||
0
comfy/api/paths/embeddings/get/__init__.py
Normal file
0
comfy/api/paths/embeddings/get/__init__.py
Normal file
114
comfy/api/paths/embeddings/get/operation.py
Normal file
114
comfy/api/paths/embeddings/get/operation.py
Normal file
@ -0,0 +1,114 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from comfy.api import api_client
|
||||
from comfy.api.shared_imports.operation_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
|
||||
from .. import path
|
||||
from .responses import response_200
|
||||
|
||||
|
||||
__StatusCodeToResponse = typing.TypedDict(
|
||||
'__StatusCodeToResponse',
|
||||
{
|
||||
'200': typing.Type[response_200.ResponseFor200],
|
||||
}
|
||||
)
|
||||
_status_code_to_response: __StatusCodeToResponse = {
|
||||
'200': response_200.ResponseFor200,
|
||||
}
|
||||
_non_error_status_codes = frozenset({
|
||||
'200',
|
||||
})
|
||||
|
||||
_all_accept_content_types = (
|
||||
"application/json",
|
||||
)
|
||||
|
||||
|
||||
class BaseApi(api_client.Api):
|
||||
@typing.overload
|
||||
def _get_embeddings(
|
||||
self,
|
||||
*,
|
||||
skip_deserialization: typing.Literal[False] = False,
|
||||
accept_content_types: typing.Tuple[str, ...] = _all_accept_content_types,
|
||||
server_index: typing.Optional[int] = None,
|
||||
stream: bool = False,
|
||||
timeout: typing.Optional[typing.Union[int, float, typing.Tuple]] = None,
|
||||
) -> response_200.ApiResponse: ...
|
||||
|
||||
@typing.overload
|
||||
def _get_embeddings(
|
||||
self,
|
||||
*,
|
||||
skip_deserialization: typing.Literal[True],
|
||||
accept_content_types: typing.Tuple[str, ...] = _all_accept_content_types,
|
||||
server_index: typing.Optional[int] = None,
|
||||
stream: bool = False,
|
||||
timeout: typing.Optional[typing.Union[int, float, typing.Tuple]] = None,
|
||||
) -> api_response.ApiResponseWithoutDeserialization: ...
|
||||
|
||||
def _get_embeddings(
|
||||
self,
|
||||
*,
|
||||
skip_deserialization: bool = False,
|
||||
accept_content_types: typing.Tuple[str, ...] = _all_accept_content_types,
|
||||
server_index: typing.Optional[int] = None,
|
||||
stream: bool = False,
|
||||
timeout: typing.Optional[typing.Union[int, float, typing.Tuple]] = None,
|
||||
):
|
||||
"""
|
||||
(UI) Get embeddings
|
||||
:param skip_deserialization: If true then api_response.response will be set but
|
||||
api_response.body and api_response.headers will not be deserialized into schema
|
||||
class instances
|
||||
"""
|
||||
used_path = path
|
||||
headers = self._get_headers(accept_content_types=accept_content_types)
|
||||
# TODO add cookie handling
|
||||
host = self.api_client.configuration.get_server_url(
|
||||
"servers", server_index
|
||||
)
|
||||
|
||||
raw_response = self.api_client.call_api(
|
||||
resource_path=used_path,
|
||||
method='get',
|
||||
host=host,
|
||||
headers=headers,
|
||||
stream=stream,
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
if skip_deserialization:
|
||||
skip_deser_response = api_response.ApiResponseWithoutDeserialization(response=raw_response)
|
||||
self._verify_response_status(skip_deser_response)
|
||||
return skip_deser_response
|
||||
|
||||
status = str(raw_response.status)
|
||||
if status in _non_error_status_codes:
|
||||
status_code = typing.cast(
|
||||
typing.Literal[
|
||||
'200',
|
||||
],
|
||||
status
|
||||
)
|
||||
return _status_code_to_response[status_code].deserialize(
|
||||
raw_response, self.api_client.schema_configuration)
|
||||
|
||||
response = api_response.ApiResponseWithoutDeserialization(response=raw_response)
|
||||
self._verify_response_status(response)
|
||||
return response
|
||||
|
||||
|
||||
class GetEmbeddings(BaseApi):
|
||||
# this class is used by api classes that refer to endpoints with operationId.snakeCase fn names
|
||||
get_embeddings = BaseApi._get_embeddings
|
||||
|
||||
|
||||
class ApiForGet(BaseApi):
|
||||
# this class is used by api classes that refer to endpoints by path and http method names
|
||||
get = BaseApi._get_embeddings
|
||||
@ -0,0 +1,28 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
Generated by: https://github.com/openapi-json-schema-tools/openapi-json-schema-generator
|
||||
"""
|
||||
|
||||
from comfy.api.shared_imports.response_imports import * # pyright: ignore [reportWildcardImportFromLibrary]
|
||||
|
||||
from .content.application_json import schema as application_json_schema
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class ApiResponse(api_response.ApiResponse):
|
||||
body: application_json_schema.SchemaTuple
|
||||
headers: schemas.Unset
|
||||
|
||||
|
||||
class ResponseFor200(api_client.OpenApiResponse[ApiResponse]):
|
||||
@classmethod
|
||||
def get_response(cls, response, headers, body) -> ApiResponse:
|
||||
return ApiResponse(response=response, body=body, headers=headers)
|
||||
|
||||
|
||||
class ApplicationJsonMediaType(api_client.MediaType):
|
||||
schema: typing_extensions.TypeAlias = application_json_schema.Schema
|
||||
content = {
|
||||
'application/json': ApplicationJsonMediaType,
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user