mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2026-02-16 16:32:34 +08:00
main: re-implement --cache-none as no cache at all
The execution list now tracks the dependency aware caching more correctly that the DependancyAwareCache. Change it to a cache that does nothing.
This commit is contained in:
parent
943dda5f94
commit
0ee7736f8e
@ -265,6 +265,26 @@ class HierarchicalCache(BasicCache):
|
|||||||
assert cache is not None
|
assert cache is not None
|
||||||
return await cache._ensure_subcache(node_id, children_ids)
|
return await cache._ensure_subcache(node_id, children_ids)
|
||||||
|
|
||||||
|
class NullCache:
|
||||||
|
|
||||||
|
async def set_prompt(self, dynprompt, node_ids, is_changed_cache):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def all_node_ids(self):
|
||||||
|
return []
|
||||||
|
|
||||||
|
def clean_unused(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get(self, node_id):
|
||||||
|
return None
|
||||||
|
|
||||||
|
def set(self, node_id, value):
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def ensure_subcache_for(self, node_id, children_ids):
|
||||||
|
return self
|
||||||
|
|
||||||
class LRUCache(BasicCache):
|
class LRUCache(BasicCache):
|
||||||
def __init__(self, key_class, max_size=100):
|
def __init__(self, key_class, max_size=100):
|
||||||
super().__init__(key_class)
|
super().__init__(key_class)
|
||||||
@ -316,157 +336,3 @@ class LRUCache(BasicCache):
|
|||||||
self._mark_used(child_id)
|
self._mark_used(child_id)
|
||||||
self.children[cache_key].append(self.cache_key_set.get_data_key(child_id))
|
self.children[cache_key].append(self.cache_key_set.get_data_key(child_id))
|
||||||
return self
|
return self
|
||||||
|
|
||||||
|
|
||||||
class DependencyAwareCache(BasicCache):
|
|
||||||
"""
|
|
||||||
A cache implementation that tracks dependencies between nodes and manages
|
|
||||||
their execution and caching accordingly. It extends the BasicCache class.
|
|
||||||
Nodes are removed from this cache once all of their descendants have been
|
|
||||||
executed.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, key_class):
|
|
||||||
"""
|
|
||||||
Initialize the DependencyAwareCache.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
key_class: The class used for generating cache keys.
|
|
||||||
"""
|
|
||||||
super().__init__(key_class)
|
|
||||||
self.descendants = {} # Maps node_id -> set of descendant node_ids
|
|
||||||
self.ancestors = {} # Maps node_id -> set of ancestor node_ids
|
|
||||||
self.executed_nodes = set() # Tracks nodes that have been executed
|
|
||||||
|
|
||||||
async def set_prompt(self, dynprompt, node_ids, is_changed_cache):
|
|
||||||
"""
|
|
||||||
Clear the entire cache and rebuild the dependency graph.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
dynprompt: The dynamic prompt object containing node information.
|
|
||||||
node_ids: List of node IDs to initialize the cache for.
|
|
||||||
is_changed_cache: Flag indicating if the cache has changed.
|
|
||||||
"""
|
|
||||||
# Clear all existing cache data
|
|
||||||
self.cache.clear()
|
|
||||||
self.subcaches.clear()
|
|
||||||
self.descendants.clear()
|
|
||||||
self.ancestors.clear()
|
|
||||||
self.executed_nodes.clear()
|
|
||||||
|
|
||||||
# Call the parent method to initialize the cache with the new prompt
|
|
||||||
await super().set_prompt(dynprompt, node_ids, is_changed_cache)
|
|
||||||
|
|
||||||
# Rebuild the dependency graph
|
|
||||||
self._build_dependency_graph(dynprompt, node_ids)
|
|
||||||
|
|
||||||
def _build_dependency_graph(self, dynprompt, node_ids):
|
|
||||||
"""
|
|
||||||
Build the dependency graph for all nodes.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
dynprompt: The dynamic prompt object containing node information.
|
|
||||||
node_ids: List of node IDs to build the graph for.
|
|
||||||
"""
|
|
||||||
self.descendants.clear()
|
|
||||||
self.ancestors.clear()
|
|
||||||
for node_id in node_ids:
|
|
||||||
self.descendants[node_id] = set()
|
|
||||||
self.ancestors[node_id] = set()
|
|
||||||
|
|
||||||
for node_id in node_ids:
|
|
||||||
inputs = dynprompt.get_node(node_id)["inputs"]
|
|
||||||
for input_data in inputs.values():
|
|
||||||
if is_link(input_data): # Check if the input is a link to another node
|
|
||||||
ancestor_id = input_data[0]
|
|
||||||
self.descendants[ancestor_id].add(node_id)
|
|
||||||
self.ancestors[node_id].add(ancestor_id)
|
|
||||||
|
|
||||||
def set(self, node_id, value):
|
|
||||||
"""
|
|
||||||
Mark a node as executed and store its value in the cache.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
node_id: The ID of the node to store.
|
|
||||||
value: The value to store for the node.
|
|
||||||
"""
|
|
||||||
self._set_immediate(node_id, value)
|
|
||||||
self.executed_nodes.add(node_id)
|
|
||||||
self._cleanup_ancestors(node_id)
|
|
||||||
|
|
||||||
def get(self, node_id):
|
|
||||||
"""
|
|
||||||
Retrieve the cached value for a node.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
node_id: The ID of the node to retrieve.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The cached value for the node.
|
|
||||||
"""
|
|
||||||
return self._get_immediate(node_id)
|
|
||||||
|
|
||||||
async def ensure_subcache_for(self, node_id, children_ids):
|
|
||||||
"""
|
|
||||||
Ensure a subcache exists for a node and update dependencies.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
node_id: The ID of the parent node.
|
|
||||||
children_ids: List of child node IDs to associate with the parent node.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The subcache object for the node.
|
|
||||||
"""
|
|
||||||
subcache = await super()._ensure_subcache(node_id, children_ids)
|
|
||||||
for child_id in children_ids:
|
|
||||||
self.descendants[node_id].add(child_id)
|
|
||||||
self.ancestors[child_id].add(node_id)
|
|
||||||
return subcache
|
|
||||||
|
|
||||||
def _cleanup_ancestors(self, node_id):
|
|
||||||
"""
|
|
||||||
Check if ancestors of a node can be removed from the cache.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
node_id: The ID of the node whose ancestors are to be checked.
|
|
||||||
"""
|
|
||||||
for ancestor_id in self.ancestors.get(node_id, []):
|
|
||||||
if ancestor_id in self.executed_nodes:
|
|
||||||
# Remove ancestor if all its descendants have been executed
|
|
||||||
if all(descendant in self.executed_nodes for descendant in self.descendants[ancestor_id]):
|
|
||||||
self._remove_node(ancestor_id)
|
|
||||||
|
|
||||||
def _remove_node(self, node_id):
|
|
||||||
"""
|
|
||||||
Remove a node from the cache.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
node_id: The ID of the node to remove.
|
|
||||||
"""
|
|
||||||
cache_key = self.cache_key_set.get_data_key(node_id)
|
|
||||||
if cache_key in self.cache:
|
|
||||||
del self.cache[cache_key]
|
|
||||||
subcache_key = self.cache_key_set.get_subcache_key(node_id)
|
|
||||||
if subcache_key in self.subcaches:
|
|
||||||
del self.subcaches[subcache_key]
|
|
||||||
|
|
||||||
def clean_unused(self):
|
|
||||||
"""
|
|
||||||
Clean up unused nodes. This is a no-op for this cache implementation.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def recursive_debug_dump(self):
|
|
||||||
"""
|
|
||||||
Dump the cache and dependency graph for debugging.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
A list containing the cache state and dependency graph.
|
|
||||||
"""
|
|
||||||
result = super().recursive_debug_dump()
|
|
||||||
result.append({
|
|
||||||
"descendants": self.descendants,
|
|
||||||
"ancestors": self.ancestors,
|
|
||||||
"executed_nodes": list(self.executed_nodes),
|
|
||||||
})
|
|
||||||
return result
|
|
||||||
|
|||||||
19
execution.py
19
execution.py
@ -18,7 +18,7 @@ from comfy_execution.caching import (
|
|||||||
BasicCache,
|
BasicCache,
|
||||||
CacheKeySetID,
|
CacheKeySetID,
|
||||||
CacheKeySetInputSignature,
|
CacheKeySetInputSignature,
|
||||||
DependencyAwareCache,
|
NullCache,
|
||||||
HierarchicalCache,
|
HierarchicalCache,
|
||||||
LRUCache,
|
LRUCache,
|
||||||
)
|
)
|
||||||
@ -91,13 +91,13 @@ class IsChangedCache:
|
|||||||
class CacheType(Enum):
|
class CacheType(Enum):
|
||||||
CLASSIC = 0
|
CLASSIC = 0
|
||||||
LRU = 1
|
LRU = 1
|
||||||
DEPENDENCY_AWARE = 2
|
NONE = 2
|
||||||
|
|
||||||
|
|
||||||
class CacheSet:
|
class CacheSet:
|
||||||
def __init__(self, cache_type=None, cache_size=None):
|
def __init__(self, cache_type=None, cache_size=None):
|
||||||
if cache_type == CacheType.DEPENDENCY_AWARE:
|
if cache_type == CacheType.NONE:
|
||||||
self.init_dependency_aware_cache()
|
self.init_null_cache()
|
||||||
logging.info("Disabling intermediate node cache.")
|
logging.info("Disabling intermediate node cache.")
|
||||||
elif cache_type == CacheType.LRU:
|
elif cache_type == CacheType.LRU:
|
||||||
if cache_size is None:
|
if cache_size is None:
|
||||||
@ -120,11 +120,12 @@ class CacheSet:
|
|||||||
self.ui = LRUCache(CacheKeySetInputSignature, max_size=cache_size)
|
self.ui = LRUCache(CacheKeySetInputSignature, max_size=cache_size)
|
||||||
self.objects = HierarchicalCache(CacheKeySetID)
|
self.objects = HierarchicalCache(CacheKeySetID)
|
||||||
|
|
||||||
# only hold cached items while the decendents have not executed
|
def init_null_cache(self):
|
||||||
def init_dependency_aware_cache(self):
|
self.outputs = NullCache()
|
||||||
self.outputs = DependencyAwareCache(CacheKeySetInputSignature)
|
#The UI cache is expected to be iterable at the end of each workflow
|
||||||
self.ui = DependencyAwareCache(CacheKeySetInputSignature)
|
#so it must cache at least a full workflow. Use Heirachical
|
||||||
self.objects = DependencyAwareCache(CacheKeySetID)
|
self.ui = HierarchicalCache(CacheKeySetInputSignature)
|
||||||
|
self.objects = NullCache()
|
||||||
|
|
||||||
def recursive_debug_dump(self):
|
def recursive_debug_dump(self):
|
||||||
result = {
|
result = {
|
||||||
|
|||||||
2
main.py
2
main.py
@ -173,7 +173,7 @@ def prompt_worker(q, server_instance):
|
|||||||
if args.cache_lru > 0:
|
if args.cache_lru > 0:
|
||||||
cache_type = execution.CacheType.LRU
|
cache_type = execution.CacheType.LRU
|
||||||
elif args.cache_none:
|
elif args.cache_none:
|
||||||
cache_type = execution.CacheType.DEPENDENCY_AWARE
|
cache_type = execution.CacheType.NONE
|
||||||
|
|
||||||
e = execution.PromptExecutor(server_instance, cache_type=cache_type, cache_size=args.cache_lru)
|
e = execution.PromptExecutor(server_instance, cache_type=cache_type, cache_size=args.cache_lru)
|
||||||
last_gc_collect = 0
|
last_gc_collect = 0
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user