From 583ddd6b381540e4369f012de750b805a088535c Mon Sep 17 00:00:00 2001 From: Benjamin Berman Date: Sat, 26 Jul 2025 17:42:44 -0700 Subject: [PATCH] Always use spawn context, even on Linux --- comfy/distributed/process_pool_executor.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/comfy/distributed/process_pool_executor.py b/comfy/distributed/process_pool_executor.py index b0b7853bb..6c2b0f0a9 100644 --- a/comfy/distributed/process_pool_executor.py +++ b/comfy/distributed/process_pool_executor.py @@ -2,6 +2,7 @@ import concurrent.futures import contextvars import multiprocessing import pickle +import logging from functools import partial from typing import Callable, Any @@ -9,6 +10,7 @@ from pebble import ProcessPool, ProcessFuture from ..component_model.executor_types import Executor +logger = logging.getLogger(__name__) def _wrap_with_context(context_data: bytes, func: Callable, *args, **kwargs) -> Any: new_ctx: contextvars.Context = pickle.loads(context_data) @@ -22,8 +24,9 @@ class ProcessPoolExecutor(ProcessPool, Executor): initializer: Callable = None, initargs: list | tuple = (), context: multiprocessing.context.BaseContext = None): - if context is None: - context = multiprocessing.get_context('spawn') + if context is not None: + logger.warning(f"A context was passed to a ProcessPoolExecutor when only spawn is supported (context={context})") + context = multiprocessing.get_context('spawn') super().__init__(max_workers=max_workers, max_tasks=max_tasks, initializer=initializer, initargs=initargs, context=context) def shutdown(self, wait=True, *, cancel_futures=False):