mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2025-12-17 01:52:59 +08:00
feat(api-nodes, pylint): use lazy formatting in logging functions (#10248)
This commit is contained in:
parent
72c2071972
commit
51fb505ffa
@ -431,7 +431,7 @@ async def upload_video_to_comfyapi(
|
|||||||
f"Video duration ({actual_duration:.2f}s) exceeds the maximum allowed ({max_duration}s)."
|
f"Video duration ({actual_duration:.2f}s) exceeds the maximum allowed ({max_duration}s)."
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Error getting video duration: {e}")
|
logging.error("Error getting video duration: %s", str(e))
|
||||||
raise ValueError(f"Could not verify video duration from source: {e}") from e
|
raise ValueError(f"Could not verify video duration from source: {e}") from e
|
||||||
|
|
||||||
upload_mime_type = f"video/{container.value.lower()}"
|
upload_mime_type = f"video/{container.value.lower()}"
|
||||||
|
|||||||
@ -359,10 +359,10 @@ class ApiClient:
|
|||||||
if params:
|
if params:
|
||||||
params = {k: v for k, v in params.items() if v is not None} # aiohttp fails to serialize None values
|
params = {k: v for k, v in params.items() if v is not None} # aiohttp fails to serialize None values
|
||||||
|
|
||||||
logging.debug(f"[DEBUG] Request Headers: {request_headers}")
|
logging.debug("[DEBUG] Request Headers: %s", request_headers)
|
||||||
logging.debug(f"[DEBUG] Files: {files}")
|
logging.debug("[DEBUG] Files: %s", files)
|
||||||
logging.debug(f"[DEBUG] Params: {params}")
|
logging.debug("[DEBUG] Params: %s", params)
|
||||||
logging.debug(f"[DEBUG] Data: {data}")
|
logging.debug("[DEBUG] Data: %s", data)
|
||||||
|
|
||||||
if content_type == "application/x-www-form-urlencoded":
|
if content_type == "application/x-www-form-urlencoded":
|
||||||
payload_args = self._create_urlencoded_form_data_args(data or {}, request_headers)
|
payload_args = self._create_urlencoded_form_data_args(data or {}, request_headers)
|
||||||
@ -592,9 +592,9 @@ class ApiClient:
|
|||||||
error_message=f"HTTP Error {exc.status}",
|
error_message=f"HTTP Error {exc.status}",
|
||||||
)
|
)
|
||||||
|
|
||||||
logging.debug(f"[DEBUG] API Error: {user_friendly} (Status: {status_code})")
|
logging.debug("[DEBUG] API Error: %s (Status: %s)", user_friendly, status_code)
|
||||||
if response_content:
|
if response_content:
|
||||||
logging.debug(f"[DEBUG] Response content: {response_content}")
|
logging.debug("[DEBUG] Response content: %s", response_content)
|
||||||
|
|
||||||
# Retry if eligible
|
# Retry if eligible
|
||||||
if status_code in self.retry_status_codes and retry_count < self.max_retries:
|
if status_code in self.retry_status_codes and retry_count < self.max_retries:
|
||||||
@ -738,11 +738,9 @@ class SynchronousOperation(Generic[T, R]):
|
|||||||
if isinstance(v, Enum):
|
if isinstance(v, Enum):
|
||||||
request_dict[k] = v.value
|
request_dict[k] = v.value
|
||||||
|
|
||||||
logging.debug(
|
logging.debug("[DEBUG] API Request: %s %s", self.endpoint.method.value, self.endpoint.path)
|
||||||
f"[DEBUG] API Request: {self.endpoint.method.value} {self.endpoint.path}"
|
logging.debug("[DEBUG] Request Data: %s", json.dumps(request_dict, indent=2))
|
||||||
)
|
logging.debug("[DEBUG] Query Params: %s", self.endpoint.query_params)
|
||||||
logging.debug(f"[DEBUG] Request Data: {json.dumps(request_dict, indent=2)}")
|
|
||||||
logging.debug(f"[DEBUG] Query Params: {self.endpoint.query_params}")
|
|
||||||
|
|
||||||
response_json = await client.request(
|
response_json = await client.request(
|
||||||
self.endpoint.method.value,
|
self.endpoint.method.value,
|
||||||
@ -757,11 +755,11 @@ class SynchronousOperation(Generic[T, R]):
|
|||||||
logging.debug("=" * 50)
|
logging.debug("=" * 50)
|
||||||
logging.debug("[DEBUG] RESPONSE DETAILS:")
|
logging.debug("[DEBUG] RESPONSE DETAILS:")
|
||||||
logging.debug("[DEBUG] Status Code: 200 (Success)")
|
logging.debug("[DEBUG] Status Code: 200 (Success)")
|
||||||
logging.debug(f"[DEBUG] Response Body: {json.dumps(response_json, indent=2)}")
|
logging.debug("[DEBUG] Response Body: %s", json.dumps(response_json, indent=2))
|
||||||
logging.debug("=" * 50)
|
logging.debug("=" * 50)
|
||||||
|
|
||||||
parsed_response = self.endpoint.response_model.model_validate(response_json)
|
parsed_response = self.endpoint.response_model.model_validate(response_json)
|
||||||
logging.debug(f"[DEBUG] Parsed Response: {parsed_response}")
|
logging.debug("[DEBUG] Parsed Response: %s", parsed_response)
|
||||||
return parsed_response
|
return parsed_response
|
||||||
finally:
|
finally:
|
||||||
if owns_client:
|
if owns_client:
|
||||||
@ -877,7 +875,7 @@ class PollingOperation(Generic[T, R]):
|
|||||||
status = TaskStatus.PENDING
|
status = TaskStatus.PENDING
|
||||||
for poll_count in range(1, self.max_poll_attempts + 1):
|
for poll_count in range(1, self.max_poll_attempts + 1):
|
||||||
try:
|
try:
|
||||||
logging.debug(f"[DEBUG] Polling attempt #{poll_count}")
|
logging.debug("[DEBUG] Polling attempt #%s", poll_count)
|
||||||
|
|
||||||
request_dict = (
|
request_dict = (
|
||||||
None if self.request is None else self.request.model_dump(exclude_none=True)
|
None if self.request is None else self.request.model_dump(exclude_none=True)
|
||||||
@ -885,10 +883,13 @@ class PollingOperation(Generic[T, R]):
|
|||||||
|
|
||||||
if poll_count == 1:
|
if poll_count == 1:
|
||||||
logging.debug(
|
logging.debug(
|
||||||
f"[DEBUG] Poll Request: {self.poll_endpoint.method.value} {self.poll_endpoint.path}"
|
"[DEBUG] Poll Request: %s %s",
|
||||||
|
self.poll_endpoint.method.value,
|
||||||
|
self.poll_endpoint.path,
|
||||||
)
|
)
|
||||||
logging.debug(
|
logging.debug(
|
||||||
f"[DEBUG] Poll Request Data: {json.dumps(request_dict, indent=2) if request_dict else 'None'}"
|
"[DEBUG] Poll Request Data: %s",
|
||||||
|
json.dumps(request_dict, indent=2) if request_dict else "None",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Query task status
|
# Query task status
|
||||||
@ -903,7 +904,7 @@ class PollingOperation(Generic[T, R]):
|
|||||||
|
|
||||||
# Check if task is complete
|
# Check if task is complete
|
||||||
status = self._check_task_status(response_obj)
|
status = self._check_task_status(response_obj)
|
||||||
logging.debug(f"[DEBUG] Task Status: {status}")
|
logging.debug("[DEBUG] Task Status: %s", status)
|
||||||
|
|
||||||
# If progress extractor is provided, extract progress
|
# If progress extractor is provided, extract progress
|
||||||
if self.progress_extractor:
|
if self.progress_extractor:
|
||||||
@ -917,7 +918,7 @@ class PollingOperation(Generic[T, R]):
|
|||||||
result_url = self.result_url_extractor(response_obj)
|
result_url = self.result_url_extractor(response_obj)
|
||||||
if result_url:
|
if result_url:
|
||||||
message = f"Result URL: {result_url}"
|
message = f"Result URL: {result_url}"
|
||||||
logging.debug(f"[DEBUG] {message}")
|
logging.debug("[DEBUG] %s", message)
|
||||||
self._display_text_on_node(message)
|
self._display_text_on_node(message)
|
||||||
self.final_response = response_obj
|
self.final_response = response_obj
|
||||||
if self.progress_extractor:
|
if self.progress_extractor:
|
||||||
@ -925,7 +926,7 @@ class PollingOperation(Generic[T, R]):
|
|||||||
return self.final_response
|
return self.final_response
|
||||||
if status == TaskStatus.FAILED:
|
if status == TaskStatus.FAILED:
|
||||||
message = f"Task failed: {json.dumps(resp)}"
|
message = f"Task failed: {json.dumps(resp)}"
|
||||||
logging.error(f"[DEBUG] {message}")
|
logging.error("[DEBUG] %s", message)
|
||||||
raise Exception(message)
|
raise Exception(message)
|
||||||
logging.debug("[DEBUG] Task still pending, continuing to poll...")
|
logging.debug("[DEBUG] Task still pending, continuing to poll...")
|
||||||
# Task pending – wait
|
# Task pending – wait
|
||||||
@ -939,7 +940,12 @@ class PollingOperation(Generic[T, R]):
|
|||||||
raise Exception(
|
raise Exception(
|
||||||
f"Polling aborted after {consecutive_errors} network errors: {str(e)}"
|
f"Polling aborted after {consecutive_errors} network errors: {str(e)}"
|
||||||
) from e
|
) from e
|
||||||
logging.warning("Network error (%s/%s): %s", consecutive_errors, max_consecutive_errors, str(e))
|
logging.warning(
|
||||||
|
"Network error (%s/%s): %s",
|
||||||
|
consecutive_errors,
|
||||||
|
max_consecutive_errors,
|
||||||
|
str(e),
|
||||||
|
)
|
||||||
await asyncio.sleep(self.poll_interval)
|
await asyncio.sleep(self.poll_interval)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# For other errors, increment count and potentially abort
|
# For other errors, increment count and potentially abort
|
||||||
@ -949,10 +955,13 @@ class PollingOperation(Generic[T, R]):
|
|||||||
f"Polling aborted after {consecutive_errors} consecutive errors: {str(e)}"
|
f"Polling aborted after {consecutive_errors} consecutive errors: {str(e)}"
|
||||||
) from e
|
) from e
|
||||||
|
|
||||||
logging.error(f"[DEBUG] Polling error: {str(e)}")
|
logging.error("[DEBUG] Polling error: %s", str(e))
|
||||||
logging.warning(
|
logging.warning(
|
||||||
f"Error during polling (attempt {poll_count}/{self.max_poll_attempts}): {str(e)}. "
|
"Error during polling (attempt %s/%s): %s. Will retry in %s seconds.",
|
||||||
f"Will retry in {self.poll_interval} seconds."
|
poll_count,
|
||||||
|
self.max_poll_attempts,
|
||||||
|
str(e),
|
||||||
|
self.poll_interval,
|
||||||
)
|
)
|
||||||
await asyncio.sleep(self.poll_interval)
|
await asyncio.sleep(self.poll_interval)
|
||||||
|
|
||||||
|
|||||||
@ -21,7 +21,7 @@ def get_log_directory():
|
|||||||
try:
|
try:
|
||||||
os.makedirs(log_dir, exist_ok=True)
|
os.makedirs(log_dir, exist_ok=True)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error creating API log directory {log_dir}: {e}")
|
logger.error("Error creating API log directory %s: %s", log_dir, str(e))
|
||||||
# Fallback to base temp directory if sub-directory creation fails
|
# Fallback to base temp directory if sub-directory creation fails
|
||||||
return base_temp_dir
|
return base_temp_dir
|
||||||
return log_dir
|
return log_dir
|
||||||
@ -122,9 +122,9 @@ def log_request_response(
|
|||||||
try:
|
try:
|
||||||
with open(filepath, "w", encoding="utf-8") as f:
|
with open(filepath, "w", encoding="utf-8") as f:
|
||||||
f.write("\n".join(log_content))
|
f.write("\n".join(log_content))
|
||||||
logger.debug(f"API log saved to: {filepath}")
|
logger.debug("API log saved to: %s", filepath)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error writing API log to {filepath}: {e}")
|
logger.error("Error writing API log to %s: %s", filepath, str(e))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|||||||
@ -296,7 +296,7 @@ def validate_video_result_response(response) -> None:
|
|||||||
"""Validates that the Kling task result contains a video."""
|
"""Validates that the Kling task result contains a video."""
|
||||||
if not is_valid_video_response(response):
|
if not is_valid_video_response(response):
|
||||||
error_msg = f"Kling task {response.data.task_id} succeeded but no video data found in response."
|
error_msg = f"Kling task {response.data.task_id} succeeded but no video data found in response."
|
||||||
logging.error(f"Error: {error_msg}.\nResponse: {response}")
|
logging.error("Error: %s.\nResponse: %s", error_msg, response)
|
||||||
raise Exception(error_msg)
|
raise Exception(error_msg)
|
||||||
|
|
||||||
|
|
||||||
@ -304,7 +304,7 @@ def validate_image_result_response(response) -> None:
|
|||||||
"""Validates that the Kling task result contains an image."""
|
"""Validates that the Kling task result contains an image."""
|
||||||
if not is_valid_image_response(response):
|
if not is_valid_image_response(response):
|
||||||
error_msg = f"Kling task {response.data.task_id} succeeded but no image data found in response."
|
error_msg = f"Kling task {response.data.task_id} succeeded but no image data found in response."
|
||||||
logging.error(f"Error: {error_msg}.\nResponse: {response}")
|
logging.error("Error: %s.\nResponse: %s", error_msg, response)
|
||||||
raise Exception(error_msg)
|
raise Exception(error_msg)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -500,7 +500,7 @@ class MinimaxHailuoVideoNode(comfy_io.ComfyNode):
|
|||||||
raise Exception(
|
raise Exception(
|
||||||
f"No video was found in the response. Full response: {file_result.model_dump()}"
|
f"No video was found in the response. Full response: {file_result.model_dump()}"
|
||||||
)
|
)
|
||||||
logging.info(f"Generated video URL: {file_url}")
|
logging.info("Generated video URL: %s", file_url)
|
||||||
if cls.hidden.unique_id:
|
if cls.hidden.unique_id:
|
||||||
if hasattr(file_result.file, "backup_download_url"):
|
if hasattr(file_result.file, "backup_download_url"):
|
||||||
message = f"Result URL: {file_url}\nBackup URL: {file_result.file.backup_download_url}"
|
message = f"Result URL: {file_url}\nBackup URL: {file_result.file.backup_download_url}"
|
||||||
|
|||||||
@ -237,7 +237,7 @@ def trim_video(video: VideoInput, duration_sec: float) -> VideoInput:
|
|||||||
audio_stream = None
|
audio_stream = None
|
||||||
|
|
||||||
for stream in input_container.streams:
|
for stream in input_container.streams:
|
||||||
logging.info(f"Found stream: type={stream.type}, class={type(stream)}")
|
logging.info("Found stream: type=%s, class=%s", stream.type, type(stream))
|
||||||
if isinstance(stream, av.VideoStream):
|
if isinstance(stream, av.VideoStream):
|
||||||
# Create output video stream with same parameters
|
# Create output video stream with same parameters
|
||||||
video_stream = output_container.add_stream(
|
video_stream = output_container.add_stream(
|
||||||
@ -247,7 +247,7 @@ def trim_video(video: VideoInput, duration_sec: float) -> VideoInput:
|
|||||||
video_stream.height = stream.height
|
video_stream.height = stream.height
|
||||||
video_stream.pix_fmt = "yuv420p"
|
video_stream.pix_fmt = "yuv420p"
|
||||||
logging.info(
|
logging.info(
|
||||||
f"Added video stream: {stream.width}x{stream.height} @ {stream.average_rate}fps"
|
"Added video stream: %sx%s @ %sfps", stream.width, stream.height, stream.average_rate
|
||||||
)
|
)
|
||||||
elif isinstance(stream, av.AudioStream):
|
elif isinstance(stream, av.AudioStream):
|
||||||
# Create output audio stream with same parameters
|
# Create output audio stream with same parameters
|
||||||
@ -256,9 +256,7 @@ def trim_video(video: VideoInput, duration_sec: float) -> VideoInput:
|
|||||||
)
|
)
|
||||||
audio_stream.sample_rate = stream.sample_rate
|
audio_stream.sample_rate = stream.sample_rate
|
||||||
audio_stream.layout = stream.layout
|
audio_stream.layout = stream.layout
|
||||||
logging.info(
|
logging.info("Added audio stream: %sHz, %s channels", stream.sample_rate, stream.channels)
|
||||||
f"Added audio stream: {stream.sample_rate}Hz, {stream.channels} channels"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Calculate target frame count that's divisible by 16
|
# Calculate target frame count that's divisible by 16
|
||||||
fps = input_container.streams.video[0].average_rate
|
fps = input_container.streams.video[0].average_rate
|
||||||
@ -288,9 +286,7 @@ def trim_video(video: VideoInput, duration_sec: float) -> VideoInput:
|
|||||||
for packet in video_stream.encode():
|
for packet in video_stream.encode():
|
||||||
output_container.mux(packet)
|
output_container.mux(packet)
|
||||||
|
|
||||||
logging.info(
|
logging.info("Encoded %s video frames (target: %s)", frame_count, target_frames)
|
||||||
f"Encoded {frame_count} video frames (target: {target_frames})"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Decode and re-encode audio frames
|
# Decode and re-encode audio frames
|
||||||
if audio_stream:
|
if audio_stream:
|
||||||
@ -308,7 +304,7 @@ def trim_video(video: VideoInput, duration_sec: float) -> VideoInput:
|
|||||||
for packet in audio_stream.encode():
|
for packet in audio_stream.encode():
|
||||||
output_container.mux(packet)
|
output_container.mux(packet)
|
||||||
|
|
||||||
logging.info(f"Encoded {audio_frame_count} audio frames")
|
logging.info("Encoded %s audio frames", audio_frame_count)
|
||||||
|
|
||||||
# Close containers
|
# Close containers
|
||||||
output_container.close()
|
output_container.close()
|
||||||
|
|||||||
@ -172,16 +172,16 @@ async def create_generate_task(
|
|||||||
logging.info("[ Rodin3D API - Submit Jobs ] Submit Generate Task Success!")
|
logging.info("[ Rodin3D API - Submit Jobs ] Submit Generate Task Success!")
|
||||||
subscription_key = response.jobs.subscription_key
|
subscription_key = response.jobs.subscription_key
|
||||||
task_uuid = response.uuid
|
task_uuid = response.uuid
|
||||||
logging.info(f"[ Rodin3D API - Submit Jobs ] UUID: {task_uuid}")
|
logging.info("[ Rodin3D API - Submit Jobs ] UUID: %s", task_uuid)
|
||||||
return task_uuid, subscription_key
|
return task_uuid, subscription_key
|
||||||
|
|
||||||
|
|
||||||
def check_rodin_status(response: Rodin3DCheckStatusResponse) -> str:
|
def check_rodin_status(response: Rodin3DCheckStatusResponse) -> str:
|
||||||
all_done = all(job.status == JobStatus.Done for job in response.jobs)
|
all_done = all(job.status == JobStatus.Done for job in response.jobs)
|
||||||
status_list = [str(job.status) for job in response.jobs]
|
status_list = [str(job.status) for job in response.jobs]
|
||||||
logging.info(f"[ Rodin3D API - CheckStatus ] Generate Status: {status_list}")
|
logging.info("[ Rodin3D API - CheckStatus ] Generate Status: %s", status_list)
|
||||||
if any(job.status == JobStatus.Failed for job in response.jobs):
|
if any(job.status == JobStatus.Failed for job in response.jobs):
|
||||||
logging.error(f"[ Rodin3D API - CheckStatus ] Generate Failed: {status_list}, Please try again.")
|
logging.error("[ Rodin3D API - CheckStatus ] Generate Failed: %s, Please try again.", status_list)
|
||||||
raise Exception("[ Rodin3D API ] Generate Failed, Please Try again.")
|
raise Exception("[ Rodin3D API ] Generate Failed, Please Try again.")
|
||||||
if all_done:
|
if all_done:
|
||||||
return "DONE"
|
return "DONE"
|
||||||
@ -235,7 +235,7 @@ async def download_files(url_list, task_uuid):
|
|||||||
file_path = os.path.join(save_path, file_name)
|
file_path = os.path.join(save_path, file_name)
|
||||||
if file_path.endswith(".glb"):
|
if file_path.endswith(".glb"):
|
||||||
model_file_path = file_path
|
model_file_path = file_path
|
||||||
logging.info(f"[ Rodin3D API - download_files ] Downloading file: {file_path}")
|
logging.info("[ Rodin3D API - download_files ] Downloading file: %s", file_path)
|
||||||
max_retries = 5
|
max_retries = 5
|
||||||
for attempt in range(max_retries):
|
for attempt in range(max_retries):
|
||||||
try:
|
try:
|
||||||
@ -246,7 +246,7 @@ async def download_files(url_list, task_uuid):
|
|||||||
f.write(chunk)
|
f.write(chunk)
|
||||||
break
|
break
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.info(f"[ Rodin3D API - download_files ] Error downloading {file_path}:{e}")
|
logging.info("[ Rodin3D API - download_files ] Error downloading %s:%s", file_path, str(e))
|
||||||
if attempt < max_retries - 1:
|
if attempt < max_retries - 1:
|
||||||
logging.info("Retrying...")
|
logging.info("Retrying...")
|
||||||
await asyncio.sleep(2)
|
await asyncio.sleep(2)
|
||||||
|
|||||||
@ -215,7 +215,7 @@ class VeoVideoGenerationNode(comfy_io.ComfyNode):
|
|||||||
initial_response = await initial_operation.execute()
|
initial_response = await initial_operation.execute()
|
||||||
operation_name = initial_response.name
|
operation_name = initial_response.name
|
||||||
|
|
||||||
logging.info(f"Veo generation started with operation name: {operation_name}")
|
logging.info("Veo generation started with operation name: %s", operation_name)
|
||||||
|
|
||||||
# Define status extractor function
|
# Define status extractor function
|
||||||
def status_extractor(response):
|
def status_extractor(response):
|
||||||
|
|||||||
@ -61,7 +61,6 @@ messages_control.disable = [
|
|||||||
# next warnings should be fixed in future
|
# next warnings should be fixed in future
|
||||||
"bad-classmethod-argument", # Class method should have 'cls' as first argument
|
"bad-classmethod-argument", # Class method should have 'cls' as first argument
|
||||||
"wrong-import-order", # Standard imports should be placed before third party imports
|
"wrong-import-order", # Standard imports should be placed before third party imports
|
||||||
"logging-fstring-interpolation", # Use lazy % formatting in logging functions
|
|
||||||
"ungrouped-imports",
|
"ungrouped-imports",
|
||||||
"unnecessary-pass",
|
"unnecessary-pass",
|
||||||
"unnecessary-lambda-assignment",
|
"unnecessary-lambda-assignment",
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user