Updates for torch 2.6.0, prepare Anthropic nodes, accept multiple logging levels

This commit is contained in:
doctorpangloss 2025-02-04 09:27:18 -08:00
parent fac670da89
commit 1a24ceef79
6 changed files with 24 additions and 14 deletions

View File

@ -149,10 +149,6 @@ When using Windows, open the **Windows Powershell** app. Then observe you are at
uv pip install setuptools wheel
uv pip install "comfyui[withtorch]@git+https://github.com/hiddenswitch/ComfyUI.git"
```
**Recommended**: Install `xformers`:
```powershell
uv pip install --no-build-isolation --no-deps xformers==0.0.28.post3 --index-url https://download.pytorch.org/whl/
```
To enable `torchaudio` support on Windows, install it directly:
```powershell
@ -460,16 +456,21 @@ choco install -y visualstudio2022buildtools
# purposefully executed separately
choco install -y visualstudio2022-workload-vctools
choco install -y vcredist2010 vcredist2013 vcredist140
choco install -y cuda
```
If you have `xformers` installed, disable it, as it will be preferred over Sage Attention:
Then, visit [NVIDIA.com's CUDA Toolkit Download Page](https://developer.nvidia.com/cuda-12-6-0-download-archive?target_os=Windows&target_arch=x86_64&target_version=Server2022&target_type=exe_network) and download and install the CUDA Toolkit. Verify it is correctly installed by running `nvcc --version`.
You are now ready to install Sage Attention 2:
```shell
comfyui --disable-xformers
uv pip install --no-build-isolation --no-deps "git+https://github.com/thu-ml/SageAttention.git"
```
Sage Attention is not compatible with Flux. It does not appear to be compatible with Mochi when using `torch.compile`
To start ComfyUI with it:
```shell
comfyui --use-sage-attention
```
![with_sage_attention.webp](./docs/assets/with_sage_attention.webp)
**With SageAttention**

View File

@ -166,7 +166,7 @@ def _create_parser() -> EnhancedConfigArgParser:
help="This name will be used by the frontends and workers to exchange prompt requests and replies. Progress updates will be prefixed by the queue name, followed by a '.', then the user ID")
parser.add_argument("--external-address", required=False,
help="Specifies a base URL for external addresses reported by the API, such as for image paths.")
parser.add_argument("--logging-level", type=str, default='INFO', choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'], help='Set the logging level')
parser.add_argument("--logging-level", type=lambda x: str(x).upper(), default='INFO', choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'], help='Set the logging level')
parser.add_argument("--disable-known-models", action="store_true", help="Disables automatic downloads of known models and prevents them from appearing in the UI.")
parser.add_argument("--max-queue-size", type=int, default=65536, help="The API will reject prompt requests if the queue's size exceeds this value.")
# tracing
@ -217,7 +217,7 @@ def _create_parser() -> EnhancedConfigArgParser:
"--openai-api-key",
required=False,
type=str,
help="Configures the OpenAI API Key for the OpenAI nodes",
help="Configures the OpenAI API Key for the OpenAI nodes. Visit https://platform.openai.com/api-keys to create this key.",
env_var="OPENAI_API_KEY",
default=None
)
@ -231,6 +231,14 @@ def _create_parser() -> EnhancedConfigArgParser:
default=None
)
parser.add_argument(
"--anthropic-api-key",
required=False,
type=str,
help="Configures the Anthropic API key for its nodes related to Claude functionality. Visit https://console.anthropic.com/settings/keys to create this key.",
env_var="ANTHROPIC_API_KEY"
)
parser.add_argument("--user-directory", type=is_valid_directory, default=None, help="Set the ComfyUI user directory with an absolute path.")
# now give plugins a chance to add configuration

View File

@ -117,8 +117,9 @@ class Configuration(dict):
force_hf_local_dir_mode (bool): Download repos from huggingface.co to the models/huggingface directory with the "local_dir" argument instead of models/huggingface_cache with the "cache_dir" argument, recreating the traditional file structure.
executor_factory (str): Either ThreadPoolExecutor or ProcessPoolExecutor, defaulting to ThreadPoolExecutor
preview_size (int): Sets the maximum preview size for sampler nodes. Defaults to 512.
openai_api_key (str): Configures the OpenAI API Key for the OpenAI nodes
openai_api_key (str): Configures the OpenAI API Key for the OpenAI nodes. Visit https://platform.openai.com/api-keys to create this key.
ideogram_api_key (str): Configures the Ideogram API Key for the Ideogram nodes. Visit https://ideogram.ai/manage-api to create this key.
anthropic_api_key (str): Configures the Anthropic API key for its nodes related to Claude functionality. Visit https://console.anthropic.com/settings/keys to create this key.
user_directory (Optional[str]): Set the ComfyUI user directory with an absolute path.
log_stdout (bool): Send normal process output to stdout instead of stderr (default)
"""
@ -217,6 +218,7 @@ class Configuration(dict):
self.executor_factory: str = "ThreadPoolExecutor"
self.openai_api_key: Optional[str] = None
self.ideogram_api_key: Optional[str] = None
self.anthropic_api_key: Optional[str] = None
self.user_directory: Optional[str] = None
def __getattr__(self, item):

View File

@ -3,7 +3,7 @@ pytest-asyncio
pytest-mock
pytest-aiohttp
pytest-xdist
websocket-client==1.6.1
websocket-client>=1.6.1
PyInstaller
testcontainers
testcontainers-rabbitmq

View File

@ -1,4 +1,3 @@
sageattention
triton ;platform_system == 'Linux'
triton @ https://github.com/woct0rdho/triton-windows/releases/download/v3.1.0-windows.post9/triton-3.1.0-cp312-cp312-win_amd64.whl ;platform_system == 'Windows' and python_version == '3.12'
triton @ https://github.com/woct0rdho/triton-windows/releases/download/v3.1.0-windows.post9/triton-3.1.0-cp311-cp311-win_amd64.whl ;platform_system == 'Windows' and python_version == '3.11'

View File

@ -54,7 +54,7 @@ opentelemetry-semantic-conventions
wrapt>=1.16.0
certifi
spandrel>=0.3.4
numpy>=1.24.4
numpy>=2.1.3
soundfile
watchdog
PySoundFile