From 95acb06959db5c89525b4e66b51613f46ccfd0f0 Mon Sep 17 00:00:00 2001 From: Vijaysinh Date: Fri, 28 Nov 2025 18:55:21 +0530 Subject: [PATCH] Update execution.py --- execution.py | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/execution.py b/execution.py index 17c77beab..0c287f26b 100644 --- a/execution.py +++ b/execution.py @@ -731,6 +731,31 @@ class PromptExecutor: "outputs": ui_outputs, "meta": meta_outputs, } + + try: + import comfy.model_management as mm + + # If ComfyUI exposes loaded text encoders (most builds do) + if hasattr(mm, "loaded_text_encoders"): + for enc in list(mm.loaded_text_encoders.values()): + try: + mm.unload_text_encoder(enc) + except: + pass + + mm.loaded_text_encoders.clear() + + # Final RAM + VRAM cleanup + try: + mm.cleanup_models_gc() + except: + pass + + print("[RAM Optimizer] Text encoders unloaded successfully after run.") + except Exception as e: + print(f"[RAM Optimizer] Failed to unload text encoders: {e}") + # --- END: Text Encoder RAM Cleanup Patch --- + self.server.last_node_id = None if comfy.model_management.DISABLE_SMART_MEMORY: comfy.model_management.unload_all_models()