fix: add usage example to Caching class, remove pickle fallback

- Add docstring with usage example to Caching class matching the
  convention used by sibling APIs (Execution.set_progress, ComfyExtension)
- Remove non-deterministic pickle fallback from _serialize_cache_key;
  return None on JSON failure instead of producing unretrievable hashes
- Move cache_provider imports to top of execution.py (no circular dep)

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Deep Mehta 2026-03-03 17:41:33 -08:00
parent 66ad9937c8
commit c73e3c9619
3 changed files with 20 additions and 11 deletions

View File

@ -118,6 +118,23 @@ class Types:
class Caching:
"""
External cache provider API for sharing cached node outputs
across ComfyUI instances.
Example::
from comfy_api.latest import Caching
class MyCacheProvider(Caching.CacheProvider):
async def on_lookup(self, context):
... # check external storage
async def on_store(self, context, value):
... # store to external storage
Caching.register_provider(MyCacheProvider())
"""
# Import from comfy_execution.cache_provider (source of truth)
from comfy_execution.cache_provider import (
CacheProvider,

View File

@ -5,7 +5,6 @@ import hashlib
import json
import logging
import math
import pickle
import threading
_logger = logging.getLogger(__name__)
@ -143,13 +142,7 @@ def _serialize_cache_key(cache_key: Any) -> Optional[str]:
return hashlib.sha256(json_str.encode('utf-8')).hexdigest()
except Exception as e:
_logger.warning(f"Failed to serialize cache key: {e}")
# Fallback to pickle (non-deterministic but better than nothing)
try:
serialized = pickle.dumps(cache_key, protocol=4)
return hashlib.sha256(serialized).hexdigest()
except Exception as fallback_error:
_logger.warning(f"Failed pickle fallback for cache key: {fallback_error}")
return None
return None
def _contains_nan(obj: Any) -> bool:

View File

@ -40,6 +40,7 @@ from comfy_execution.progress import get_progress_state, reset_progress_state, a
from comfy_execution.utils import CurrentNodeContext
from comfy_api.internal import _ComfyNodeInternal, _NodeOutputInternal, first_real_override, is_class, make_locked_method_func
from comfy_api.latest import io, _io
from comfy_execution.cache_provider import _has_cache_providers, _get_cache_providers, _logger as _cache_logger
class ExecutionResult(Enum):
@ -685,8 +686,6 @@ class PromptExecutor:
self.add_message("execution_error", mes, broadcast=False)
def _notify_prompt_lifecycle(self, event: str, prompt_id: str):
from comfy_execution.cache_provider import _has_cache_providers, _get_cache_providers, _logger
if not _has_cache_providers():
return
@ -697,7 +696,7 @@ class PromptExecutor:
elif event == "end":
provider.on_prompt_end(prompt_id)
except Exception as e:
_logger.warning(f"Cache provider {provider.__class__.__name__} error on {event}: {e}")
_cache_logger.warning(f"Cache provider {provider.__class__.__name__} error on {event}: {e}")
def execute(self, prompt, prompt_id, extra_data={}, execute_outputs=[]):
asyncio.run(self.execute_async(prompt, prompt_id, extra_data, execute_outputs))