ComfyUI/comfy/distributed/executors.py
doctorpangloss 3ddec8ae90 Better support for process pool executors
- --panics-when=torch.cuda.OutOfMemory will now correctly panic and
   exit the worker, giving it time to reply that the execution failed
   and better dealing with irrecoverable out-of-memory errors
 - --executor-factory=ProcessPoolExecutor will use a process instead of
   a thread to execute comfyui workflows when using the worker. When
   this process panics and exits, it will be correctly replaced, making
   a more robust worker
2025-02-18 14:37:20 -08:00

21 lines
537 B
Python

import contextvars
import typing
from concurrent.futures import Future, ThreadPoolExecutor
from functools import partial
__version__ = '0.0.1'
from .process_pool_executor import ProcessPoolExecutor
class ContextVarExecutor(ThreadPoolExecutor):
def submit(self, fn: typing.Callable, *args, **kwargs) -> Future:
ctx = contextvars.copy_context() # type: contextvars.Context
return super().submit(partial(ctx.run, partial(fn, *args, **kwargs)))
class ContextVarProcessPoolExecutor(ProcessPoolExecutor):
pass