mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2026-01-11 06:40:48 +08:00
Cache requests in nodes
This commit is contained in:
parent
7259c252ad
commit
72bb572181
@ -44,6 +44,7 @@ from ..component_model.queue_types import QueueTuple, HistoryEntry, QueueItem, M
|
|||||||
ExecutionStatusAsDict
|
ExecutionStatusAsDict
|
||||||
from ..execution_context import context_execute_node, context_execute_prompt
|
from ..execution_context import context_execute_node, context_execute_prompt
|
||||||
from ..execution_ext import should_panic_on_exception
|
from ..execution_ext import should_panic_on_exception
|
||||||
|
from ..node_requests_caching import use_requests_caching
|
||||||
from ..nodes.package_typing import InputTypeSpec, FloatSpecOptions, IntSpecOptions, CustomNode
|
from ..nodes.package_typing import InputTypeSpec, FloatSpecOptions, IntSpecOptions, CustomNode
|
||||||
from ..nodes_context import get_nodes
|
from ..nodes_context import get_nodes
|
||||||
from comfy_execution.progress import get_progress_state, reset_progress_state, add_progress_handler, WebUIProgressHandler, \
|
from comfy_execution.progress import get_progress_state, reset_progress_state, add_progress_handler, WebUIProgressHandler, \
|
||||||
@ -467,7 +468,8 @@ async def execute(server: ExecutorToClientProgress, dynprompt: DynamicPrompt, ca
|
|||||||
:param pending_subgraph_results:
|
:param pending_subgraph_results:
|
||||||
:return:
|
:return:
|
||||||
"""
|
"""
|
||||||
with context_execute_node(node_id):
|
with context_execute_node(node_id), \
|
||||||
|
use_requests_caching():
|
||||||
return await _execute(server, dynprompt, caches, node_id, extra_data, executed, prompt_id, execution_list, pending_subgraph_results, pending_async_nodes)
|
return await _execute(server, dynprompt, caches, node_id, extra_data, executed, prompt_id, execution_list, pending_subgraph_results, pending_async_nodes)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
39
comfy/node_requests_caching.py
Normal file
39
comfy/node_requests_caching.py
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
import os.path
|
||||||
|
import pathlib
|
||||||
|
|
||||||
|
import requests_cache
|
||||||
|
from contextlib import contextmanager
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def use_requests_caching(
|
||||||
|
cache_name='http_cache',
|
||||||
|
cache_control=True,
|
||||||
|
**kwargs
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
A context manager to globally patch 'requests' with 'requests-cache'
|
||||||
|
for all code executed within its scope.
|
||||||
|
|
||||||
|
This implementation uses the 'filesystem' backend, which is ideal
|
||||||
|
for large file responses.
|
||||||
|
|
||||||
|
By default, it also sets 'use_cache_dir=True'. This automatically
|
||||||
|
tells requests-cache to store its cache files in the standard
|
||||||
|
user cache directory for your operating system.
|
||||||
|
|
||||||
|
- On Linux, this respects the $XDG_CACHE_HOME environment variable.
|
||||||
|
- On macOS, it uses ~/Library/Caches/<cache_name>/
|
||||||
|
- On Windows, it uses %LOCALAPPDATA%\\<cache_name>\\Cache
|
||||||
|
|
||||||
|
You do not need to populate a directory variable; this parameter
|
||||||
|
handles it for you.
|
||||||
|
"""
|
||||||
|
|
||||||
|
kwargs['backend'] = 'filesystem'
|
||||||
|
path_provided = isinstance(cache_name, pathlib.PurePath) or os.path.sep in str(cache_name) or '.' == str(cache_name)[0]
|
||||||
|
kwargs.setdefault('use_cache_dir', not path_provided)
|
||||||
|
kwargs.setdefault('cache_control', cache_control)
|
||||||
|
|
||||||
|
with requests_cache.enabled(cache_name, **kwargs):
|
||||||
|
yield
|
||||||
@ -111,6 +111,7 @@ dependencies = [
|
|||||||
"trimesh",
|
"trimesh",
|
||||||
# doesn't support linux correctly yet
|
# doesn't support linux correctly yet
|
||||||
"stringzilla<4.2.0",
|
"stringzilla<4.2.0",
|
||||||
|
"requests_cache",
|
||||||
]
|
]
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
|
|||||||
46
tests/unit/test_requests_caching.py
Normal file
46
tests/unit/test_requests_caching.py
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
import pytest
|
||||||
|
import time
|
||||||
|
import requests
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from comfy.node_requests_caching import use_requests_caching
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
MIN_NETWORK_TIME_SEC = 0.1
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("test_url", [
|
||||||
|
"https://fonts.gstatic.com/s/lato/v23/S6uyw4BMUTPHjxAwXiWtFCfQ7A.woff2"
|
||||||
|
])
|
||||||
|
def test_caching_context_manager_works(test_url, tmp_path):
|
||||||
|
logger.info(f"\n[Test] Call 1 (Inside Context): Fetching... {test_url}")
|
||||||
|
start_time_1 = time.time()
|
||||||
|
with use_requests_caching(cache_name=tmp_path):
|
||||||
|
r1 = requests.get(test_url, timeout=10)
|
||||||
|
duration_1 = time.time() - start_time_1
|
||||||
|
|
||||||
|
logger.info(f"Call 1 took: {duration_1:.3f}s")
|
||||||
|
assert r1.status_code == 200
|
||||||
|
assert r1.from_cache is False
|
||||||
|
assert "Cache-Control" in r1.headers, "Response must have 'Cache-Control' header for this test to be valid"
|
||||||
|
|
||||||
|
logger.info(f"[Test] Call 2 (Inside Context): From cache... {test_url}")
|
||||||
|
start_time_2 = time.time()
|
||||||
|
with use_requests_caching(cache_name=tmp_path):
|
||||||
|
r2 = requests.get(test_url, timeout=10)
|
||||||
|
duration_2 = time.time() - start_time_2
|
||||||
|
|
||||||
|
logger.info(f"Call 2 took: {duration_2:.3f}s")
|
||||||
|
assert r2.status_code == 200
|
||||||
|
assert r2.from_cache is True
|
||||||
|
|
||||||
|
logger.info(f"[Test] Call 3 (Outside Context): Fetching again... {test_url}")
|
||||||
|
start_time_3 = time.time()
|
||||||
|
r3 = requests.get(test_url, timeout=10)
|
||||||
|
duration_3 = time.time() - start_time_3
|
||||||
|
|
||||||
|
logger.info(f"Call 3 took: {duration_3:.3f}s")
|
||||||
|
assert r3.status_code == 200
|
||||||
|
# A standard response object has no 'from_cache' attribute
|
||||||
|
assert getattr(r3, 'from_cache', None) is None
|
||||||
Loading…
Reference in New Issue
Block a user