mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2026-01-29 15:50:22 +08:00
fix ideogram seed, fix polling for results, fix logger
This commit is contained in:
parent
d9ee4137c7
commit
8041b1b54d
@ -47,9 +47,9 @@ def get_logs():
|
|||||||
|
|
||||||
|
|
||||||
def on_flush(callback):
|
def on_flush(callback):
|
||||||
if stdout_interceptor is not None:
|
if stdout_interceptor is not None and hasattr(stdout_interceptor, "on_flush"):
|
||||||
stdout_interceptor.on_flush(callback)
|
stdout_interceptor.on_flush(callback)
|
||||||
if stderr_interceptor is not None:
|
if stderr_interceptor is not None and hasattr(stderr_interceptor, "on_flush"):
|
||||||
stderr_interceptor.on_flush(callback)
|
stderr_interceptor.on_flush(callback)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -843,13 +843,13 @@ class PromptServer(ExecutorToClientProgress):
|
|||||||
if result is None:
|
if result is None:
|
||||||
return web.Response(body="the queue is shutting down", status=503)
|
return web.Response(body="the queue is shutting down", status=503)
|
||||||
else:
|
else:
|
||||||
return await self._schedule_background_task_with_web_response(fut, task_id)
|
return self._schedule_background_task_with_web_response(fut, task_id)
|
||||||
else:
|
else:
|
||||||
self.prompt_queue.put(item)
|
self.prompt_queue.put(item)
|
||||||
if wait:
|
if wait:
|
||||||
await completed
|
await completed
|
||||||
else:
|
else:
|
||||||
return await self._schedule_background_task_with_web_response(completed, task_id)
|
return self._schedule_background_task_with_web_response(completed, task_id)
|
||||||
task_invocation_or_dict: TaskInvocation | dict = completed.result()
|
task_invocation_or_dict: TaskInvocation | dict = completed.result()
|
||||||
if isinstance(task_invocation_or_dict, dict):
|
if isinstance(task_invocation_or_dict, dict):
|
||||||
result = TaskInvocation(item_id=item.prompt_id, outputs=task_invocation_or_dict, status=ExecutionStatus("success", True, []))
|
result = TaskInvocation(item_id=item.prompt_id, outputs=task_invocation_or_dict, status=ExecutionStatus("success", True, []))
|
||||||
@ -934,7 +934,7 @@ class PromptServer(ExecutorToClientProgress):
|
|||||||
prompt = last_history_item['prompt'][2]
|
prompt = last_history_item['prompt'][2]
|
||||||
return web.json_response(prompt, status=200)
|
return web.json_response(prompt, status=200)
|
||||||
|
|
||||||
async def _schedule_background_task_with_web_response(self, fut, task_id):
|
def _schedule_background_task_with_web_response(self, fut, task_id):
|
||||||
task = asyncio.create_task(fut, name=task_id)
|
task = asyncio.create_task(fut, name=task_id)
|
||||||
self.background_tasks[task_id] = task
|
self.background_tasks[task_id] = task
|
||||||
task.add_done_callback(lambda _: self.background_tasks.pop(task_id))
|
task.add_done_callback(lambda _: self.background_tasks.pop(task_id))
|
||||||
|
|||||||
@ -69,6 +69,7 @@ InputTypeSpec = Union[IntSpec, FloatSpec, StringSpec, BooleanSpec, ChoiceSpec, N
|
|||||||
|
|
||||||
# numpy seeds must be between 0 and 2**32 - 1
|
# numpy seeds must be between 0 and 2**32 - 1
|
||||||
Seed = ("INT", {"default": 0, "min": 0, "max": 2 ** 32 - 1})
|
Seed = ("INT", {"default": 0, "min": 0, "max": 2 ** 32 - 1})
|
||||||
|
Seed31 = ("INT", {"default": 0, "min": 0, "max": 2 ** 31 - 1})
|
||||||
Seed64 = ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff, "control_after_generate": True})
|
Seed64 = ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff, "control_after_generate": True})
|
||||||
SeedSpec = tuple[Literal["INT"], TypedDict("SeedSpecOptions", {"default": Literal[0], "min": Literal[0], "max": Literal[4294967295]})]
|
SeedSpec = tuple[Literal["INT"], TypedDict("SeedSpecOptions", {"default": Literal[0], "min": Literal[0], "max": Literal[4294967295]})]
|
||||||
|
|
||||||
|
|||||||
@ -9,7 +9,7 @@ import torch
|
|||||||
from PIL import Image
|
from PIL import Image
|
||||||
|
|
||||||
from comfy.component_model.tensor_types import RGBImageBatch, MaskBatch, ImageBatch
|
from comfy.component_model.tensor_types import RGBImageBatch, MaskBatch, ImageBatch
|
||||||
from comfy.nodes.package_typing import CustomNode, Seed
|
from comfy.nodes.package_typing import CustomNode, Seed31
|
||||||
from comfy.utils import pil2tensor, tensor2pil
|
from comfy.utils import pil2tensor, tensor2pil
|
||||||
from comfy_extras.constants.resolutions import IDEOGRAM_RESOLUTIONS
|
from comfy_extras.constants.resolutions import IDEOGRAM_RESOLUTIONS
|
||||||
from comfy_extras.nodes.nodes_mask import MaskToImage
|
from comfy_extras.nodes.nodes_mask import MaskToImage
|
||||||
@ -55,7 +55,7 @@ class IdeogramGenerate(CustomNode):
|
|||||||
"api_key": ("STRING", {"default": ""}),
|
"api_key": ("STRING", {"default": ""}),
|
||||||
"negative_prompt": ("STRING", {"multiline": True}),
|
"negative_prompt": ("STRING", {"multiline": True}),
|
||||||
"num_images": ("INT", {"default": 1, "min": 1, "max": 8}),
|
"num_images": ("INT", {"default": 1, "min": 1, "max": 8}),
|
||||||
"seed": Seed,
|
"seed": Seed31,
|
||||||
"style_type": (STYLES_ENUM, {}),
|
"style_type": (STYLES_ENUM, {}),
|
||||||
# New v3 optional args
|
# New v3 optional args
|
||||||
"rendering_speed": (RENDERING_SPEED_ENUM, {"default": "DEFAULT"}),
|
"rendering_speed": (RENDERING_SPEED_ENUM, {"default": "DEFAULT"}),
|
||||||
@ -141,7 +141,7 @@ class IdeogramEdit(CustomNode):
|
|||||||
"api_key": ("STRING", {"default": ""}),
|
"api_key": ("STRING", {"default": ""}),
|
||||||
"magic_prompt_option": (AUTO_PROMPT_ENUM, {"default": AUTO_PROMPT_ENUM[0]}),
|
"magic_prompt_option": (AUTO_PROMPT_ENUM, {"default": AUTO_PROMPT_ENUM[0]}),
|
||||||
"num_images": ("INT", {"default": 1, "min": 1, "max": 8}),
|
"num_images": ("INT", {"default": 1, "min": 1, "max": 8}),
|
||||||
"seed": Seed,
|
"seed": Seed31,
|
||||||
"style_type": (STYLES_ENUM, {}),
|
"style_type": (STYLES_ENUM, {}),
|
||||||
# New v3 optional args
|
# New v3 optional args
|
||||||
"rendering_speed": (RENDERING_SPEED_ENUM, {"default": "DEFAULT"}),
|
"rendering_speed": (RENDERING_SPEED_ENUM, {"default": "DEFAULT"}),
|
||||||
@ -217,7 +217,7 @@ class IdeogramRemix(CustomNode):
|
|||||||
"magic_prompt_option": (AUTO_PROMPT_ENUM, {"default": AUTO_PROMPT_ENUM[0]}),
|
"magic_prompt_option": (AUTO_PROMPT_ENUM, {"default": AUTO_PROMPT_ENUM[0]}),
|
||||||
"negative_prompt": ("STRING", {"multiline": True}),
|
"negative_prompt": ("STRING", {"multiline": True}),
|
||||||
"num_images": ("INT", {"default": 1, "min": 1, "max": 8}),
|
"num_images": ("INT", {"default": 1, "min": 1, "max": 8}),
|
||||||
"seed": Seed,
|
"seed": Seed31,
|
||||||
"style_type": (STYLES_ENUM, {}),
|
"style_type": (STYLES_ENUM, {}),
|
||||||
# New v3 optional args
|
# New v3 optional args
|
||||||
"rendering_speed": (RENDERING_SPEED_ENUM, {"default": "DEFAULT"}),
|
"rendering_speed": (RENDERING_SPEED_ENUM, {"default": "DEFAULT"}),
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user