UX: preflight banner, fast torch detection, weekly full-check, non-blocking prompts, optional updates, auto port selection and browser open

This commit is contained in:
John Alva 2025-11-11 11:18:02 -06:00
parent 0443944dbb
commit 5b4c2ff924

View File

@ -6,19 +6,27 @@ REM Display ComfyUI 8-bit header
echo.
echo ╔═══════════════════════════════════════════════════════════╗
echo ║ ║
echo ║ ██████╗ ██████╗ ███╗ ███╗███████╗██╗ ██╗██╗
echo ║ ██╔════╝██╔═══██╗████╗ ████║██╔════╝╚██╗ ██╔╝██║
echo ║ ██║ ██║ ██║██╔████╔██║█████╗ ╚████╔╝ ██║
echo ║ ██║ ██║ ██║██║╚██╔╝██║██╔══╝ ╚██╔╝ ██║
echo ║ ╚██████╗╚██████╔╝██║ ╚═╝ ██║██║ ██║ ███████╗
echo ║ ╚═════╝ ╚═════╝ ╚═╝ ╚═╝╚═╝ ╚═╝ ╚══════╝
echo ██████╗ ██████╗ ███╗ ███╗███████╗██╗ ██╗ ║
echo ██╔════╝██╔═══██╗████╗ ████║██╔════╝╚██╗ ██╔╝ ║
echo ██║ ██║ ██║██╔████╔██║█████╗ ╚████╔╝ ║
echo ██║ ██║ ██║██║╚██╔╝██║██╔══╝ ╚██╔╝ ║
echo ╚██████╗╚██████╔╝██║ ╚═╝ ██║██║ ██║
echo ╚═════╝ ╚═════╝ ╚═╝ ╚═╝╚═╝ ╚═╝
echo ║ ║
echo ║ The most powerful open source node-based ║
echo ║ application for generative AI ║
echo ║ application for generative AI
echo ║ ║
echo ╚═══════════════════════════════════════════════════════════╝
echo.
echo ╔═══════════════════════════════════════════════════════════╗
echo ║ Preflight Check ║
echo ╚═══════════════════════════════════════════════════════════╝
echo.
echo ▓ Taking a quick look around your rig... checking prereqs.
echo This will only take a moment.
echo.
REM Check Python availability
python --version >nul 2>&1
if errorlevel 1 (
@ -48,7 +56,31 @@ for /f "tokens=1,* delims==" %%a in (env_info.tmp) do (
)
del env_info.tmp
REM ---------------------------------------------------------------
REM Weekly full check logic (skip optional prompts for faster launch)
REM Force with: run_comfyui.bat --full-check
REM ---------------------------------------------------------------
set STATE_DIR=%LOCALAPPDATA%\ComfyUI\state
if not exist "%STATE_DIR%" mkdir "%STATE_DIR%" >nul 2>&1
set FULL_STAMP=%STATE_DIR%\last_full_check.stamp
set NEED_FULL=
for %%A in (%*) do (
if /i "%%~A"=="--full-check" set NEED_FULL=1
)
if not defined NEED_FULL (
if not exist "%FULL_STAMP%" (
set NEED_FULL=1
) else (
forfiles /P "%STATE_DIR%" /M "last_full_check.stamp" /D -7 >nul 2>&1
if errorlevel 1 set NEED_FULL=
if not errorlevel 1 set NEED_FULL=1
)
)
REM Check for missing dependencies - separate critical vs optional
if not defined NEED_FULL goto :check_pytorch
python -c "import importlib.util; critical = []; optional = []; critical_deps = {'yaml': 'yaml', 'torch': 'torch', 'torchvision': 'torchvision', 'torchaudio': 'torchaudio', 'numpy': 'numpy', 'einops': 'einops', 'transformers': 'transformers', 'tokenizers': 'tokenizers', 'sentencepiece': 'sentencepiece', 'safetensors': 'safetensors', 'aiohttp': 'aiohttp', 'yarl': 'yarl', 'PIL': 'PIL', 'scipy': 'scipy', 'tqdm': 'tqdm', 'psutil': 'psutil', 'alembic': 'alembic', 'sqlalchemy': 'sqlalchemy', 'av': 'av', 'comfyui_frontend': 'comfyui_frontend_package'}; optional_deps = {'comfyui_workflow_templates': 'comfyui_workflow_templates', 'comfyui_embedded_docs': 'comfyui_embedded_docs'}; [critical.append(k) for k, v in critical_deps.items() if not importlib.util.find_spec(v)]; [optional.append(k) for k, v in optional_deps.items() if not importlib.util.find_spec(v)]; print('CRITICAL:' + (','.join(critical) if critical else 'NONE')); print('OPTIONAL:' + (','.join(optional) if optional else 'NONE'))" > deps_check.tmp
for /f "tokens=1,* delims=:" %%a in (deps_check.tmp) do (
if "%%a"=="CRITICAL" set MISSING_CRITICAL=%%b
@ -70,24 +102,26 @@ if "%MISSING_CRITICAL%"=="NONE" (
echo ▓ These packages add extra features but aren't required to run ComfyUI.
echo ComfyUI will launch without them, but some features may be unavailable.
echo.
set /p INSTALL_OPTIONAL="Would you like to install optional packages? (Y/N/S=Skip for now): "
if /i "%INSTALL_OPTIONAL%"=="Y" (
echo.
echo ▓ Installing optional packages...
python -m pip install comfyui-workflow-templates comfyui-embedded-docs >nul 2>&1
echo ▓ Optional packages installed.
echo.
) else if /i "%INSTALL_OPTIONAL%"=="S" (
choice /C YNS /N /D S /T 10 /M "Install optional packages? (Y=Yes / N=No / S=Skip for now, default S in 10s): "
if errorlevel 3 (
echo.
echo ▓ Skipping optional packages. ComfyUI will launch with limited features.
echo.
) else (
) else if errorlevel 2 (
echo.
echo ▓ Skipping optional packages.
echo.
) else (
echo.
echo ▓ Installing optional packages...
python -m pip install --disable-pip-version-check comfyui-workflow-templates comfyui-embedded-docs >nul 2>&1
echo ▓ Optional packages installed.
echo.
)
type nul > "%FULL_STAMP%"
goto :check_pytorch
)
type nul > "%FULL_STAMP%"
goto :check_pytorch
)
@ -142,7 +176,7 @@ if not "%MISSING_CRITICAL%"=="NONE" (
echo ▓ Installing all required packages...
echo This may take several minutes. Please wait...
echo.
python -m pip install --progress-bar on -r requirements.txt
python -m pip install --progress-bar on --disable-pip-version-check -r requirements.txt
if errorlevel 1 (
echo.
echo ╔═══════════════════════════════════════════════════════════╗
@ -165,11 +199,12 @@ if not "%MISSING_CRITICAL%"=="NONE" (
echo.
echo ▓ Great! All packages installed successfully.
echo.
type nul > "%FULL_STAMP%"
) else if /i "%INSTALL_CHOICE%"=="C" (
echo.
echo ▓ Installing critical packages only...
echo.
python -m pip install --progress-bar on torch torchvision torchaudio numpy einops transformers tokenizers sentencepiece safetensors aiohttp yarl pyyaml Pillow scipy tqdm psutil alembic SQLAlchemy av comfyui-frontend-package
python -m pip install --progress-bar on --disable-pip-version-check torch torchvision torchaudio numpy einops transformers tokenizers sentencepiece safetensors aiohttp yarl pyyaml Pillow scipy tqdm psutil alembic SQLAlchemy av comfyui-frontend-package
if errorlevel 1 (
echo.
echo ╔═══════════════════════════════════════════════════════════╗
@ -185,6 +220,7 @@ if not "%MISSING_CRITICAL%"=="NONE" (
echo.
echo ▓ Critical packages installed. ComfyUI should now launch.
echo.
type nul > "%FULL_STAMP%"
) else (
echo.
echo ▓ Installation cancelled.
@ -198,26 +234,34 @@ if not "%MISSING_CRITICAL%"=="NONE" (
)
:check_pytorch
REM Check if PyTorch has CUDA support (for NVIDIA GPUs)
python -c "import torch; cuda_available = torch.cuda.is_available(); cuda_version = torch.version.cuda if cuda_available else None; pytorch_version = torch.__version__; print('CUDA_AVAILABLE:' + str(cuda_available)); print('CUDA_VERSION:' + (cuda_version if cuda_version else 'NONE')); print('PYTORCH_VERSION:' + pytorch_version)" > pytorch_check.tmp 2>&1
if errorlevel 1 (
echo.
echo ╔═══════════════════════════════════════════════════════════╗
echo ║ Could Not Check GPU Support ║
echo ╚═══════════════════════════════════════════════════════════╝
echo.
echo ▓ We couldn't check if your GPU will work with ComfyUI.
echo ComfyUI will try to start anyway, but it might run slowly on your CPU.
echo.
goto :start_comfyui
)
for /f "tokens=1,* delims=:" %%a in (pytorch_check.tmp) do (
if "%%a"=="CUDA_AVAILABLE" set CUDA_AVAILABLE=%%b
if "%%a"=="CUDA_VERSION" set CUDA_VERSION=%%b
REM Fast path: read torch version without importing (import is slow)
python -c "import sys; from importlib import util, metadata; s=util.find_spec('torch'); print('HAS_TORCH:' + ('1' if s else '0')); print('PYTORCH_VERSION:' + (metadata.version('torch') if s else 'NONE'))" > torch_meta.tmp 2>nul
set HAS_TORCH=
set PYTORCH_VERSION=NONE
for /f "tokens=1,* delims=:" %%a in (torch_meta.tmp) do (
if "%%a"=="HAS_TORCH" set HAS_TORCH=%%b
if "%%a"=="PYTORCH_VERSION" set PYTORCH_VERSION=%%b
)
del pytorch_check.tmp
del torch_meta.tmp 2>nul
REM Default CUDA vars
set CUDA_AVAILABLE=False
set CUDA_VERSION=NONE
REM Only import torch to check CUDA if present and not CPU build
if "%HAS_TORCH%"=="1" (
echo %PYTORCH_VERSION% | findstr /C:"+cpu" >nul
if errorlevel 1 (
python -c "import torch; print('CUDA_AVAILABLE:' + str(torch.cuda.is_available())); print('CUDA_VERSION:' + (torch.version.cuda or 'NONE'))" > pytorch_check.tmp 2>nul
if not errorlevel 1 (
for /f "tokens=1,* delims=:" %%a in (pytorch_check.tmp) do (
if "%%a"=="CUDA_AVAILABLE" set CUDA_AVAILABLE=%%b
if "%%a"=="CUDA_VERSION" set CUDA_VERSION=%%b
)
)
del pytorch_check.tmp 2>nul
)
)
REM Check if PyTorch version contains "+cpu" indicating CPU-only build
echo %PYTORCH_VERSION% | findstr /C:"+cpu" >nul
@ -238,8 +282,16 @@ if not errorlevel 1 (
echo.
echo ▓ Note: This requires an NVIDIA graphics card with CUDA support.
echo.
set /p INSTALL_CUDA="Would you like to install CUDA-enabled PyTorch now? (Y/N): "
if /i "%INSTALL_CUDA%"=="Y" (
choice /C YN /N /D N /T 15 /M "Install CUDA-enabled PyTorch now? (Y/N, default N in 15s): "
if errorlevel 2 (
echo.
echo ▓ Skipping CUDA PyTorch installation.
echo ComfyUI will not be able to run with CPU-only PyTorch.
echo Please install CUDA-enabled PyTorch manually and try again.
echo.
pause
exit /b 0
) else (
echo.
echo ▓ Uninstalling CPU-only PyTorch...
python -m pip uninstall -y torch torchvision torchaudio
@ -259,7 +311,7 @@ if not errorlevel 1 (
echo ▓ Installing CUDA-enabled PyTorch...
echo This may take several minutes. Please wait...
echo.
python -m pip install --progress-bar on torch torchvision torchaudio --extra-index-url https://download.pytorch.org/whl/cu130
python -m pip install --progress-bar on --disable-pip-version-check torch torchvision torchaudio --extra-index-url https://download.pytorch.org/whl/cu130
if errorlevel 1 (
echo.
echo ╔═══════════════════════════════════════════════════════════╗
@ -285,8 +337,8 @@ if not errorlevel 1 (
echo ▓ Warning: Could not verify PyTorch installation.
echo Continuing anyway...
echo.
REM Continue to launch ComfyUI even if verification failed
goto :start_comfyui
REM Continue to launch (offer updates) even if verification failed
goto :maybe_update_torch
) else (
for /f "tokens=1,* delims=:" %%a in (pytorch_verify.tmp) do (
if "%%a"=="CUDA_AVAILABLE" set CUDA_VERIFY=%%b
@ -308,20 +360,12 @@ if not errorlevel 1 (
) else (
echo ▓ Verification successful! CUDA-enabled PyTorch is ready.
echo.
REM Continue to launch ComfyUI
goto :start_comfyui
REM Continue to launch (offer updates)
goto :maybe_update_torch
)
)
REM If verification failed but installation succeeded, continue anyway
goto :start_comfyui
) else (
echo.
echo ▓ Skipping CUDA PyTorch installation.
echo ComfyUI will not be able to run with CPU-only PyTorch.
echo Please install CUDA-enabled PyTorch manually and try again.
echo.
pause
exit /b 0
goto :maybe_update_torch
)
)
@ -350,16 +394,101 @@ if "%CUDA_AVAILABLE%"=="False" (
pause
exit /b 0
)
)
)
REM If CUDA is available after checks, offer optional updates then show all-clear banner
if /i "%CUDA_AVAILABLE%"=="True" goto :maybe_update_torch
REM Otherwise go straight to launch (CPU fallback accepted)
goto :check_port
:maybe_update_torch
REM Quick connectivity probe - skip updates if offline
powershell -NoProfile -Command "try{(Invoke-WebRequest -Uri 'https://pypi.org' -Method Head -TimeoutSec 3)>$null; exit 0}catch{exit 1}"
if errorlevel 1 (
echo.
echo ▓ Looks like we're offline. Skipping update checks.
goto :all_clear_banner
)
set OUTDATED_TORCH=
python -m pip list --disable-pip-version-check --outdated --format=freeze 2>nul | findstr /i "^torch==" > outdated_torch.tmp
for /f %%i in (outdated_torch.tmp) do set OUTDATED_TORCH=1
del outdated_torch.tmp 2>nul
if defined OUTDATED_TORCH (
echo.
echo ╔═══════════════════════════════════════════════════════════╗
echo ║ PyTorch Updates Available ║
echo ╚═══════════════════════════════════════════════════════════╝
echo.
echo ▓ A newer version of PyTorch packages is available.
echo ▓ You can update now or skip and launch immediately.
echo.
choice /C YN /N /D N /T 10 /M "Update now? (Y/N, default N in 10s): "
if errorlevel 2 (
echo.
echo ▓ Skipping updates for now.
echo.
) else (
echo.
echo ▓ Updating PyTorch packages...
python -m pip install --progress-bar on --disable-pip-version-check --upgrade torch torchvision torchaudio --extra-index-url https://download.pytorch.org/whl/cu130
echo.
)
)
:all_clear_banner
echo.
echo ╔═══════════════════════════════════════════════════════════╗
echo ║ You're All Set! ║
echo ╚═══════════════════════════════════════════════════════════╝
echo.
echo ▓ CUDA-enabled PyTorch is ready to go!
echo Your GPU is configured and ready for ComfyUI.
echo.
echo ▓ Launching ComfyUI in 3 seconds...
timeout /t 3 /nobreak >nul
echo.
goto :check_port
:check_port
if "%COMFY_PORT%"=="" set COMFY_PORT=8188
netstat -ano | findstr /r /c:":%COMFY_PORT% .*LISTENING" >nul
if errorlevel 1 (
goto :port_ok
) else (
for /l %%P in (8189,1,8199) do (
netstat -ano | findstr /r /c:":%%P .*LISTENING" >nul
if errorlevel 1 (
set COMFY_PORT=%%P
echo.
echo ▓ Port 8188 is busy. Rolling to free port %COMFY_PORT% in 5 seconds...
timeout /t 5 /nobreak >nul
goto :port_ok
)
)
echo.
echo ▓ All fallback ports 8189-8199 appear busy. Please free a port and try again.
echo.
pause
exit /b 1
)
:port_ok
goto :start_comfyui
:start_comfyui
echo.
echo ╔═══════════════════════════════════════════════════════════╗
echo ║ Starting ComfyUI... ║
echo ╚═══════════════════════════════════════════════════════════╝
echo.
python main.py
set GUI_URL=http://127.0.0.1:%COMFY_PORT%
REM Spawn a background helper that opens the browser when the server is ready
start "" cmd /c "for /l %%i in (1,1,20) do (powershell -NoProfile -Command \"try{(Invoke-WebRequest -Uri '%GUI_URL%' -Method Head -TimeoutSec 1)>$null; exit 0}catch{exit 1}\" ^& if not errorlevel 1 goto open ^& timeout /t 1 ^>nul) ^& :open ^& start \"\" \"%GUI_URL%\""
python main.py --port %COMFY_PORT%
if errorlevel 1 (
echo.
echo ╔═══════════════════════════════════════════════════════════╗
@ -368,6 +497,9 @@ if errorlevel 1 (
echo.
echo ▓ ComfyUI encountered an error and stopped. Here's what might help:
echo.
echo ▓ Error: "Port already in use"
echo Solution: Close other ComfyUI instances or let this script auto-select a free port.
echo.
echo ▓ Error: "Torch not compiled with CUDA enabled"
echo Solution: You need to install the GPU version of PyTorch (see instructions above)
echo.