Fix all Python linting issues

- Remove all whitespace from blank lines
- Remove trailing whitespace
- Add missing newlines at end of files
- Remove unused imports
- Clean up formatting in start_runpod.py and runpod_handler.py
This commit is contained in:
Bahadir Ciloglu 2025-11-01 16:26:38 +03:00
parent e054031606
commit a5ed5febf3
2 changed files with 74 additions and 76 deletions

View File

@ -5,12 +5,11 @@ Optimized for the new ComfyUI features and performance improvements
""" """
import os import os
import json
import time import time
import logging import logging
import tempfile import tempfile
import requests import requests
from typing import Dict, Any, Optional from typing import Dict, Any
import runpod import runpod
# Configure logging # Configure logging
@ -25,13 +24,13 @@ class ComfyUIServerlessHandler:
self.comfyui_url = "http://127.0.0.1:8000" self.comfyui_url = "http://127.0.0.1:8000"
self.client_id = "runpod_serverless_worker" self.client_id = "runpod_serverless_worker"
self.setup_paths() self.setup_paths()
def setup_paths(self): def setup_paths(self):
"""Setup required paths for serverless operation""" """Setup required paths for serverless operation"""
os.makedirs("/tmp/inputs", exist_ok=True) os.makedirs("/tmp/inputs", exist_ok=True)
os.makedirs("/tmp/outputs", exist_ok=True) os.makedirs("/tmp/outputs", exist_ok=True)
os.makedirs("/tmp/comfyui", exist_ok=True) os.makedirs("/tmp/comfyui", exist_ok=True)
def wait_for_comfyui(self, timeout: int = 120) -> bool: def wait_for_comfyui(self, timeout: int = 120) -> bool:
"""Wait for ComfyUI to be ready""" """Wait for ComfyUI to be ready"""
start_time = time.time() start_time = time.time()
@ -44,37 +43,37 @@ class ComfyUIServerlessHandler:
except requests.exceptions.RequestException: except requests.exceptions.RequestException:
pass pass
time.sleep(2) time.sleep(2)
logger.error(f"ComfyUI not ready after {timeout} seconds") logger.error(f"ComfyUI not ready after {timeout} seconds")
return False return False
def download_input_files(self, input_data: Dict[str, Any]) -> Dict[str, str]: def download_input_files(self, input_data: Dict[str, Any]) -> Dict[str, str]:
"""Download input files and return local paths""" """Download input files and return local paths"""
local_files = {} local_files = {}
if "input_files" in input_data: if "input_files" in input_data:
for file_key, file_url in input_data["input_files"].items(): for file_key, file_url in input_data["input_files"].items():
try: try:
response = requests.get(file_url, timeout=60) response = requests.get(file_url, timeout=60)
response.raise_for_status() response.raise_for_status()
# Create temporary file # Create temporary file
with tempfile.NamedTemporaryFile( with tempfile.NamedTemporaryFile(
delete=False, delete=False,
dir="/tmp/inputs", dir="/tmp/inputs",
suffix=os.path.splitext(file_url)[1] suffix=os.path.splitext(file_url)[1]
) as tmp_file: ) as tmp_file:
tmp_file.write(response.content) tmp_file.write(response.content)
local_files[file_key] = tmp_file.name local_files[file_key] = tmp_file.name
logger.info(f"Downloaded {file_key} to {local_files[file_key]}") logger.info(f"Downloaded {file_key} to {local_files[file_key]}")
except Exception as e: except Exception as e:
logger.error(f"Failed to download {file_key}: {str(e)}") logger.error(f"Failed to download {file_key}: {str(e)}")
raise raise
return local_files return local_files
def execute_workflow(self, workflow: Dict[str, Any]) -> Dict[str, Any]: def execute_workflow(self, workflow: Dict[str, Any]) -> Dict[str, Any]:
"""Execute ComfyUI workflow""" """Execute ComfyUI workflow"""
try: try:
@ -88,31 +87,31 @@ class ComfyUIServerlessHandler:
timeout=30 timeout=30
) )
queue_response.raise_for_status() queue_response.raise_for_status()
prompt_id = queue_response.json()["prompt_id"] prompt_id = queue_response.json()["prompt_id"]
logger.info(f"Queued workflow with prompt_id: {prompt_id}") logger.info(f"Queued workflow with prompt_id: {prompt_id}")
# Wait for completion # Wait for completion
return self.wait_for_completion(prompt_id) return self.wait_for_completion(prompt_id)
except Exception as e: except Exception as e:
logger.error(f"Failed to execute workflow: {str(e)}") logger.error(f"Failed to execute workflow: {str(e)}")
raise raise
def wait_for_completion(self, prompt_id: str, timeout: int = 300) -> Dict[str, Any]: def wait_for_completion(self, prompt_id: str, timeout: int = 300) -> Dict[str, Any]:
"""Wait for workflow completion and return results""" """Wait for workflow completion and return results"""
start_time = time.time() start_time = time.time()
while time.time() - start_time < timeout: while time.time() - start_time < timeout:
try: try:
# Check queue status # Check queue status
queue_response = requests.get(f"{self.comfyui_url}/queue") queue_response = requests.get(f"{self.comfyui_url}/queue")
queue_data = queue_response.json() queue_data = queue_response.json()
# Check if our job is still in queue # Check if our job is still in queue
running = any(item[1]["prompt_id"] == prompt_id for item in queue_data.get("queue_running", [])) running = any(item[1]["prompt_id"] == prompt_id for item in queue_data.get("queue_running", []))
pending = any(item[1]["prompt_id"] == prompt_id for item in queue_data.get("queue_pending", [])) pending = any(item[1]["prompt_id"] == prompt_id for item in queue_data.get("queue_pending", []))
if not running and not pending: if not running and not pending:
# Job completed, get results # Job completed, get results
history_response = requests.get(f"{self.comfyui_url}/history/{prompt_id}") history_response = requests.get(f"{self.comfyui_url}/history/{prompt_id}")
@ -120,15 +119,15 @@ class ComfyUIServerlessHandler:
history_data = history_response.json() history_data = history_response.json()
if prompt_id in history_data: if prompt_id in history_data:
return self.process_results(history_data[prompt_id]) return self.process_results(history_data[prompt_id])
time.sleep(2) time.sleep(2)
except Exception as e: except Exception as e:
logger.error(f"Error checking completion: {str(e)}") logger.error(f"Error checking completion: {str(e)}")
time.sleep(5) time.sleep(5)
raise TimeoutError(f"Workflow {prompt_id} timed out after {timeout} seconds") raise TimeoutError(f"Workflow {prompt_id} timed out after {timeout} seconds")
def process_results(self, history_data: Dict[str, Any]) -> Dict[str, Any]: def process_results(self, history_data: Dict[str, Any]) -> Dict[str, Any]:
"""Process and upload results""" """Process and upload results"""
results = { results = {
@ -136,7 +135,7 @@ class ComfyUIServerlessHandler:
"outputs": [], "outputs": [],
"metadata": {} "metadata": {}
} }
if "outputs" in history_data: if "outputs" in history_data:
for node_id, node_output in history_data["outputs"].items(): for node_id, node_output in history_data["outputs"].items():
if "images" in node_output: if "images" in node_output:
@ -148,28 +147,28 @@ class ComfyUIServerlessHandler:
"subfolder": image_info.get("subfolder", ""), "subfolder": image_info.get("subfolder", ""),
"type": image_info.get("type", "output") "type": image_info.get("type", "output")
} }
try: try:
image_response = requests.get(image_url, params=params) image_response = requests.get(image_url, params=params)
image_response.raise_for_status() image_response.raise_for_status()
# Save to temp file for upload # Save to temp file for upload
output_path = f"/tmp/outputs/{image_info['filename']}" output_path = f"/tmp/outputs/{image_info['filename']}"
with open(output_path, "wb") as f: with open(output_path, "wb") as f:
f.write(image_response.content) f.write(image_response.content)
results["outputs"].append({ results["outputs"].append({
"type": "image", "type": "image",
"filename": image_info["filename"], "filename": image_info["filename"],
"path": output_path, "path": output_path,
"node_id": node_id "node_id": node_id
}) })
except Exception as e: except Exception as e:
logger.error(f"Failed to process image {image_info['filename']}: {str(e)}") logger.error(f"Failed to process image {image_info['filename']}: {str(e)}")
return results return results
def cleanup(self): def cleanup(self):
"""Clean up temporary files""" """Clean up temporary files"""
try: try:
@ -185,18 +184,18 @@ class ComfyUIServerlessHandler:
def handler(job: Dict[str, Any]) -> Dict[str, Any]: def handler(job: Dict[str, Any]) -> Dict[str, Any]:
"""Main serverless handler function""" """Main serverless handler function"""
handler_instance = ComfyUIServerlessHandler() handler_instance = ComfyUIServerlessHandler()
try: try:
# Wait for ComfyUI to be ready # Wait for ComfyUI to be ready
if not handler_instance.wait_for_comfyui(): if not handler_instance.wait_for_comfyui():
return {"error": "ComfyUI failed to start"} return {"error": "ComfyUI failed to start"}
# Get job input # Get job input
job_input = job.get("input", {}) job_input = job.get("input", {})
# Download input files if any # Download input files if any
local_files = handler_instance.download_input_files(job_input) local_files = handler_instance.download_input_files(job_input)
# Update workflow with local file paths # Update workflow with local file paths
workflow = job_input.get("workflow", {}) workflow = job_input.get("workflow", {})
if local_files and "file_mappings" in job_input: if local_files and "file_mappings" in job_input:
@ -205,10 +204,10 @@ def handler(job: Dict[str, Any]) -> Dict[str, Any]:
for input_key, file_key in mappings.items(): for input_key, file_key in mappings.items():
if file_key in local_files: if file_key in local_files:
workflow[node_id]["inputs"][input_key] = local_files[file_key] workflow[node_id]["inputs"][input_key] = local_files[file_key]
# Execute workflow # Execute workflow
results = handler_instance.execute_workflow(workflow) results = handler_instance.execute_workflow(workflow)
# Upload output files to RunPod storage or return base64 # Upload output files to RunPod storage or return base64
output_urls = [] output_urls = []
for output in results.get("outputs", []): for output in results.get("outputs", []):
@ -222,24 +221,24 @@ def handler(job: Dict[str, Any]) -> Dict[str, Any]:
"data": image_data, "data": image_data,
"node_id": output["node_id"] "node_id": output["node_id"]
}) })
return { return {
"status": "success", "status": "success",
"outputs": output_urls, "outputs": output_urls,
"execution_time": time.time() - job.get("start_time", time.time()) "execution_time": time.time() - job.get("start_time", time.time())
} }
except Exception as e: except Exception as e:
logger.error(f"Handler error: {str(e)}") logger.error(f"Handler error: {str(e)}")
return { return {
"error": str(e), "error": str(e),
"status": "failed" "status": "failed"
} }
finally: finally:
# Always cleanup # Always cleanup
handler_instance.cleanup() handler_instance.cleanup()
if __name__ == "__main__": if __name__ == "__main__":
# Start the serverless worker # Start the serverless worker
runpod.serverless.start({"handler": handler}) runpod.serverless.start({"handler": handler})

View File

@ -8,7 +8,6 @@ import sys
import logging import logging
import subprocess import subprocess
import time import time
from pathlib import Path
# Logging setup # Logging setup
logging.basicConfig( logging.basicConfig(
@ -23,17 +22,17 @@ def mount_runpod_storage():
# RunPod network storage path (environment variable'dan al) # RunPod network storage path (environment variable'dan al)
network_storage_path = os.environ.get('RUNPOD_NETWORK_STORAGE_PATH', '/runpod-volume') network_storage_path = os.environ.get('RUNPOD_NETWORK_STORAGE_PATH', '/runpod-volume')
models_storage_path = os.path.join(network_storage_path, 'models') models_storage_path = os.path.join(network_storage_path, 'models')
# Local models klasörü # Local models klasörü
local_models_path = '/app/models' local_models_path = '/app/models'
logger.info(f"Network storage path: {network_storage_path}") logger.info(f"Network storage path: {network_storage_path}")
logger.info(f"Models storage path: {models_storage_path}") logger.info(f"Models storage path: {models_storage_path}")
# Network storage'da models klasörü var mı kontrol et # Network storage'da models klasörü var mı kontrol et
if os.path.exists(models_storage_path): if os.path.exists(models_storage_path):
logger.info("Network storage'da models klasörü bulundu") logger.info("Network storage'da models klasörü bulundu")
# Local models klasörünü sil ve symlink oluştur # Local models klasörünü sil ve symlink oluştur
if os.path.exists(local_models_path): if os.path.exists(local_models_path):
if os.path.islink(local_models_path): if os.path.islink(local_models_path):
@ -41,33 +40,33 @@ def mount_runpod_storage():
else: else:
import shutil import shutil
shutil.rmtree(local_models_path) shutil.rmtree(local_models_path)
# Symlink oluştur # Symlink oluştur
os.symlink(models_storage_path, local_models_path) os.symlink(models_storage_path, local_models_path)
logger.info(f"Models klasörü network storage'a bağlandı: {models_storage_path} -> {local_models_path}") logger.info(f"Models klasörü network storage'a bağlandı: {models_storage_path} -> {local_models_path}")
# Model klasörlerini kontrol et # Model klasörlerini kontrol et
check_model_folders(local_models_path) check_model_folders(local_models_path)
else: else:
logger.warning(f"Network storage'da models klasörü bulunamadı: {models_storage_path}") logger.warning(f"Network storage'da models klasörü bulunamadı: {models_storage_path}")
logger.info("Local models klasörü kullanılacak") logger.info("Local models klasörü kullanılacak")
# Network storage'da models klasörü oluştur # Network storage'da models klasörü oluştur
os.makedirs(models_storage_path, exist_ok=True) os.makedirs(models_storage_path, exist_ok=True)
logger.info(f"Network storage'da models klasörü oluşturuldu: {models_storage_path}") logger.info(f"Network storage'da models klasörü oluşturuldu: {models_storage_path}")
# Mevcut local models'i network storage'a taşı # Mevcut local models'i network storage'a taşı
if os.path.exists(local_models_path) and not os.path.islink(local_models_path): if os.path.exists(local_models_path) and not os.path.islink(local_models_path):
import shutil import shutil
shutil.copytree(local_models_path, models_storage_path, dirs_exist_ok=True) shutil.copytree(local_models_path, models_storage_path, dirs_exist_ok=True)
shutil.rmtree(local_models_path) shutil.rmtree(local_models_path)
logger.info("Local models network storage'a taşındı") logger.info("Local models network storage'a taşındı")
# Symlink oluştur # Symlink oluştur
os.symlink(models_storage_path, local_models_path) os.symlink(models_storage_path, local_models_path)
logger.info("Models klasörü network storage'a bağlandı") logger.info("Models klasörü network storage'a bağlandı")
except Exception as e: except Exception as e:
logger.error(f"Network storage mount hatası: {e}") logger.error(f"Network storage mount hatası: {e}")
logger.info("Local models klasörü kullanılacak") logger.info("Local models klasörü kullanılacak")
@ -77,7 +76,7 @@ def check_model_folders(models_path):
"""Model klasörlerinin varlığını kontrol et""" """Model klasörlerinin varlığını kontrol et"""
required_folders = [ required_folders = [
'checkpoints', 'checkpoints',
'loras', 'loras',
'vae', 'vae',
'controlnet', 'controlnet',
'upscale_models', 'upscale_models',
@ -88,7 +87,7 @@ def check_model_folders(models_path):
'embeddings', 'embeddings',
'clip_vision' 'clip_vision'
] ]
for folder in required_folders: for folder in required_folders:
folder_path = os.path.join(models_path, folder) folder_path = os.path.join(models_path, folder)
if not os.path.exists(folder_path): if not os.path.exists(folder_path):
@ -109,16 +108,16 @@ def download_essential_models():
try: try:
models_to_download = os.environ.get('DOWNLOAD_MODELS', '').split(',') models_to_download = os.environ.get('DOWNLOAD_MODELS', '').split(',')
models_to_download = [m.strip() for m in models_to_download if m.strip()] models_to_download = [m.strip() for m in models_to_download if m.strip()]
if not models_to_download: if not models_to_download:
logger.info("İndirilecek model belirtilmedi") logger.info("İndirilecek model belirtilmedi")
return return
logger.info(f"İndirilecek modeller: {models_to_download}") logger.info(f"İndirilecek modeller: {models_to_download}")
# Burada model indirme logic'i eklenebilir # Burada model indirme logic'i eklenebilir
# Örnek: huggingface-hub kullanarak # Örnek: huggingface-hub kullanarak
except Exception as e: except Exception as e:
logger.error(f"Model indirme hatası: {e}") logger.error(f"Model indirme hatası: {e}")
@ -127,11 +126,11 @@ def setup_environment():
# ComfyUI için gerekli environment variables # ComfyUI için gerekli environment variables
os.environ['HF_HUB_DISABLE_TELEMETRY'] = '1' os.environ['HF_HUB_DISABLE_TELEMETRY'] = '1'
os.environ['DO_NOT_TRACK'] = '1' os.environ['DO_NOT_TRACK'] = '1'
# RunPod specific # RunPod specific
if 'RUNPOD_POD_ID' in os.environ: if 'RUNPOD_POD_ID' in os.environ:
logger.info(f"RunPod Pod ID: {os.environ['RUNPOD_POD_ID']}") logger.info(f"RunPod Pod ID: {os.environ['RUNPOD_POD_ID']}")
# Port ayarı # Port ayarı
port = os.environ.get('PORT', '8188') port = os.environ.get('PORT', '8188')
os.environ['PORT'] = port os.environ['PORT'] = port
@ -141,43 +140,43 @@ def wait_for_storage():
"""Network storage'ın hazır olmasını bekle""" """Network storage'ın hazır olmasını bekle"""
max_wait = 30 # 30 saniye max_wait = 30 # 30 saniye
wait_interval = 2 wait_interval = 2
network_storage_path = os.environ.get('RUNPOD_NETWORK_STORAGE_PATH', '/runpod-volume') network_storage_path = os.environ.get('RUNPOD_NETWORK_STORAGE_PATH', '/runpod-volume')
for i in range(0, max_wait, wait_interval): for i in range(0, max_wait, wait_interval):
if os.path.exists(network_storage_path): if os.path.exists(network_storage_path):
logger.info("Network storage hazır") logger.info("Network storage hazır")
return True return True
logger.info(f"Network storage bekleniyor... ({i}/{max_wait}s)") logger.info(f"Network storage bekleniyor... ({i}/{max_wait}s)")
time.sleep(wait_interval) time.sleep(wait_interval)
logger.warning("Network storage timeout - local storage kullanılacak") logger.warning("Network storage timeout - local storage kullanılacak")
return False return False
def main(): def main():
"""Ana başlangıç fonksiyonu""" """Ana başlangıç fonksiyonu"""
logger.info("RunPod ComfyUI başlatılıyor...") logger.info("RunPod ComfyUI başlatılıyor...")
# Environment setup # Environment setup
setup_environment() setup_environment()
# Network storage'ı bekle # Network storage'ı bekle
wait_for_storage() wait_for_storage()
# Network storage mount # Network storage mount
mount_runpod_storage() mount_runpod_storage()
# Temel modelleri indir (opsiyonel) # Temel modelleri indir (opsiyonel)
download_essential_models() download_essential_models()
# ComfyUI'yi başlat # ComfyUI'yi başlat
logger.info("ComfyUI başlatılıyor...") logger.info("ComfyUI başlatılıyor...")
# Port ve listen address # Port ve listen address
port = os.environ.get('PORT', '8188') port = os.environ.get('PORT', '8188')
listen = os.environ.get('LISTEN', '0.0.0.0') listen = os.environ.get('LISTEN', '0.0.0.0')
# ComfyUI command # ComfyUI command
cmd = [ cmd = [
sys.executable, 'main.py', sys.executable, 'main.py',
@ -185,14 +184,14 @@ def main():
'--port', port, '--port', port,
'--cpu' # CPU mode for RunPod '--cpu' # CPU mode for RunPod
] ]
# Extra args # Extra args
if os.environ.get('COMFYUI_ARGS'): if os.environ.get('COMFYUI_ARGS'):
extra_args = os.environ['COMFYUI_ARGS'].split() extra_args = os.environ['COMFYUI_ARGS'].split()
cmd.extend(extra_args) cmd.extend(extra_args)
logger.info(f"ComfyUI komutu: {' '.join(cmd)}") logger.info(f"ComfyUI komutu: {' '.join(cmd)}")
# ComfyUI'yi başlat # ComfyUI'yi başlat
try: try:
subprocess.run(cmd, check=True) subprocess.run(cmd, check=True)
@ -203,4 +202,4 @@ def main():
sys.exit(1) sys.exit(1)
if __name__ == "__main__": if __name__ == "__main__":
main() main()