fix(api-nodes): made logging path to be smaller (#10156)

This commit is contained in:
Alexander Piskun 2025-10-03 00:50:31 +03:00 committed by GitHub
parent 8f4ee9984c
commit f6e3e9a456
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 59 additions and 18 deletions

View File

@ -95,6 +95,7 @@ import aiohttp
import asyncio import asyncio
import logging import logging
import io import io
import os
import socket import socket
from aiohttp.client_exceptions import ClientError, ClientResponseError from aiohttp.client_exceptions import ClientError, ClientResponseError
from typing import Dict, Type, Optional, Any, TypeVar, Generic, Callable, Tuple from typing import Dict, Type, Optional, Any, TypeVar, Generic, Callable, Tuple
@ -499,7 +500,9 @@ class ApiClient:
else: else:
raise ValueError("File must be BytesIO or str path") raise ValueError("File must be BytesIO or str path")
operation_id = f"upload_{upload_url.split('/')[-1]}_{uuid.uuid4().hex[:8]}" parsed = urlparse(upload_url)
basename = os.path.basename(parsed.path) or parsed.netloc or "upload"
operation_id = f"upload_{basename}_{uuid.uuid4().hex[:8]}"
request_logger.log_request_response( request_logger.log_request_response(
operation_id=operation_id, operation_id=operation_id,
request_method="PUT", request_method="PUT",

View File

@ -4,16 +4,18 @@ import os
import datetime import datetime
import json import json
import logging import logging
import re
import hashlib
from typing import Any
import folder_paths import folder_paths
# Get the logger instance # Get the logger instance
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def get_log_directory(): def get_log_directory():
""" """Ensures the API log directory exists within ComfyUI's temp directory and returns its path."""
Ensures the API log directory exists within ComfyUI's temp directory
and returns its path.
"""
base_temp_dir = folder_paths.get_temp_directory() base_temp_dir = folder_paths.get_temp_directory()
log_dir = os.path.join(base_temp_dir, "api_logs") log_dir = os.path.join(base_temp_dir, "api_logs")
try: try:
@ -24,42 +26,77 @@ def get_log_directory():
return base_temp_dir return base_temp_dir
return log_dir return log_dir
def _format_data_for_logging(data):
def _sanitize_filename_component(name: str) -> str:
if not name:
return "log"
sanitized = re.sub(r"[^A-Za-z0-9._-]+", "_", name) # Replace disallowed characters with underscore
sanitized = sanitized.strip(" ._") # Windows: trailing dots or spaces are not allowed
if not sanitized:
sanitized = "log"
return sanitized
def _short_hash(*parts: str, length: int = 10) -> str:
return hashlib.sha1(("|".join(parts)).encode("utf-8")).hexdigest()[:length]
def _build_log_filepath(log_dir: str, operation_id: str, request_url: str) -> str:
"""Build log filepath. We keep it well under common path length limits aiming for <= 240 characters total."""
timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S_%f")
slug = _sanitize_filename_component(operation_id) # Best-effort human-readable slug from operation_id
h = _short_hash(operation_id or "", request_url or "") # Short hash ties log to the full operation and URL
# Compute how much room we have for the slug given the directory length
# Keep total path length reasonably below ~260 on Windows.
max_total_path = 240
prefix = f"{timestamp}_"
suffix = f"_{h}.log"
if not slug:
slug = "op"
max_filename_len = max(60, max_total_path - len(log_dir) - 1)
max_slug_len = max(8, max_filename_len - len(prefix) - len(suffix))
if len(slug) > max_slug_len:
slug = slug[:max_slug_len].rstrip(" ._-")
return os.path.join(log_dir, f"{prefix}{slug}{suffix}")
def _format_data_for_logging(data: Any) -> str:
"""Helper to format data (dict, str, bytes) for logging.""" """Helper to format data (dict, str, bytes) for logging."""
if isinstance(data, bytes): if isinstance(data, bytes):
try: try:
return data.decode('utf-8') # Try to decode as text return data.decode("utf-8") # Try to decode as text
except UnicodeDecodeError: except UnicodeDecodeError:
return f"[Binary data of length {len(data)} bytes]" return f"[Binary data of length {len(data)} bytes]"
elif isinstance(data, (dict, list)): elif isinstance(data, (dict, list)):
try: try:
return json.dumps(data, indent=2, ensure_ascii=False) return json.dumps(data, indent=2, ensure_ascii=False)
except TypeError: except TypeError:
return str(data) # Fallback for non-serializable objects return str(data) # Fallback for non-serializable objects
return str(data) return str(data)
def log_request_response( def log_request_response(
operation_id: str, operation_id: str,
request_method: str, request_method: str,
request_url: str, request_url: str,
request_headers: dict | None = None, request_headers: dict | None = None,
request_params: dict | None = None, request_params: dict | None = None,
request_data: any = None, request_data: Any = None,
response_status_code: int | None = None, response_status_code: int | None = None,
response_headers: dict | None = None, response_headers: dict | None = None,
response_content: any = None, response_content: Any = None,
error_message: str | None = None error_message: str | None = None,
): ):
""" """
Logs API request and response details to a file in the temp/api_logs directory. Logs API request and response details to a file in the temp/api_logs directory.
Filenames are sanitized and length-limited for cross-platform safety.
If we still fail to write, we fall back to appending into api.log.
""" """
log_dir = get_log_directory() log_dir = get_log_directory()
timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S_%f") filepath = _build_log_filepath(log_dir, operation_id, request_url)
filename = f"{timestamp}_{operation_id.replace('/', '_').replace(':', '_')}.log"
filepath = os.path.join(log_dir, filename)
log_content = []
log_content: list[str] = []
log_content.append(f"Timestamp: {datetime.datetime.now().isoformat()}") log_content.append(f"Timestamp: {datetime.datetime.now().isoformat()}")
log_content.append(f"Operation ID: {operation_id}") log_content.append(f"Operation ID: {operation_id}")
log_content.append("-" * 30 + " REQUEST " + "-" * 30) log_content.append("-" * 30 + " REQUEST " + "-" * 30)
@ -69,7 +106,7 @@ def log_request_response(
log_content.append(f"Headers:\n{_format_data_for_logging(request_headers)}") log_content.append(f"Headers:\n{_format_data_for_logging(request_headers)}")
if request_params: if request_params:
log_content.append(f"Params:\n{_format_data_for_logging(request_params)}") log_content.append(f"Params:\n{_format_data_for_logging(request_params)}")
if request_data: if request_data is not None:
log_content.append(f"Data/Body:\n{_format_data_for_logging(request_data)}") log_content.append(f"Data/Body:\n{_format_data_for_logging(request_data)}")
log_content.append("\n" + "-" * 30 + " RESPONSE " + "-" * 30) log_content.append("\n" + "-" * 30 + " RESPONSE " + "-" * 30)
@ -77,7 +114,7 @@ def log_request_response(
log_content.append(f"Status Code: {response_status_code}") log_content.append(f"Status Code: {response_status_code}")
if response_headers: if response_headers:
log_content.append(f"Headers:\n{_format_data_for_logging(response_headers)}") log_content.append(f"Headers:\n{_format_data_for_logging(response_headers)}")
if response_content: if response_content is not None:
log_content.append(f"Content:\n{_format_data_for_logging(response_content)}") log_content.append(f"Content:\n{_format_data_for_logging(response_content)}")
if error_message: if error_message:
log_content.append(f"Error:\n{error_message}") log_content.append(f"Error:\n{error_message}")
@ -89,6 +126,7 @@ def log_request_response(
except Exception as e: except Exception as e:
logger.error(f"Error writing API log to {filepath}: {e}") logger.error(f"Error writing API log to {filepath}: {e}")
if __name__ == '__main__': if __name__ == '__main__':
# Example usage (for testing the logger directly) # Example usage (for testing the logger directly)
logger.setLevel(logging.DEBUG) logger.setLevel(logging.DEBUG)