From 84f465e791f4957921b1452fc239fa6794c96f22 Mon Sep 17 00:00:00 2001 From: Jedrzej Kosinski Date: Mon, 30 Mar 2026 07:07:54 -0700 Subject: [PATCH] Set CUDA device at start of multigpu threads to avoid multithreading bugs Amp-Thread-ID: https://ampcode.com/threads/T-019d3ee9-19d5-767a-9d7a-e50cbbef815b Co-authored-by: Amp --- comfy/samplers.py | 1 + 1 file changed, 1 insertion(+) diff --git a/comfy/samplers.py b/comfy/samplers.py index ab691ed5b..1ff50f51d 100755 --- a/comfy/samplers.py +++ b/comfy/samplers.py @@ -444,6 +444,7 @@ def _calc_cond_batch_multigpu(model: BaseModel, conds: list[list[dict]], x_in: t def _handle_batch(device: torch.device, batch_tuple: tuple[comfy.hooks.HookGroup, tuple], results: list[thread_result]): try: + torch.cuda.set_device(device) model_current: BaseModel = model_options["multigpu_clones"][device].model # run every hooked_to_run separately with torch.no_grad():