Add Intel, AMD Linux Dockerfiles, improve error messages on AMD

This commit is contained in:
doctorpangloss 2024-05-22 21:16:34 -07:00
parent b241ecc56d
commit 0fcd07962f
5 changed files with 47 additions and 11 deletions

View File

@ -1,4 +1,4 @@
FROM nvcr.io/nvidia/pytorch:24.01-py3
FROM nvcr.io/nvidia/pytorch:24.04-py3
RUN pip install --no-cache --no-build-isolation git+https://github.com/hiddenswitch/ComfyUI.git
EXPOSE 8188
WORKDIR /workspace

View File

@ -72,7 +72,7 @@ On macOS, install Python 3.10, 3.11 or 3.12 using `brew`, which you can download
source ./venv/bin/activate
```
3. Then, run the following command to install `comfyui` into your current environment. This will correctly select the version of pytorch that matches the GPU on your machine (NVIDIA or CPU on Windows, NVIDIA AMD or CPU on Linux):
3. Then, run the following command to install `comfyui` into your current environment. This will correctly select the version of pytorch that matches the GPU on your machine (NVIDIA or CPU on Windows, NVIDIA, Intel, AMD or CPU on Linux, CPU on macOS):
```shell
pip install git+https://github.com/hiddenswitch/ComfyUI.git
```
@ -178,6 +178,12 @@ These models will be automatically downloaded when you queue prompts with workfl
Because pip installs the package as editable with `pip install -e .`, any changes you make to the repository will affect the next launch of `comfy`. In IDEA based editors like PyCharm and IntelliJ, the Relodium plugin supports modifying your custom nodes or similar code while the server is running.
## Linux Development Dependencies
```shell
apt install -y git build-essential clang python3-dev python3-venv
```
# Custom Nodes
Custom Nodes can be added to ComfyUI by copying and pasting Python files into your `./custom_nodes` directory.

5
amd.Dockerfile Normal file
View File

@ -0,0 +1,5 @@
FROM rocm/pytorch:rocm6.0.2_ubuntu22.04_py3.10_pytorch_2.1.2
RUN pip install --no-cache --no-build-isolation git+https://github.com/hiddenswitch/ComfyUI.git
EXPOSE 8188
WORKDIR /workspace
CMD ["/usr/local/bin/comfyui", "--listen"]

5
intel.Dockerfile Normal file
View File

@ -0,0 +1,5 @@
FROM intel/intel-optimized-pytorch:2.3.0-pip-base
RUN pip install --no-cache --no-build-isolation git+https://github.com/hiddenswitch/ComfyUI.git
EXPOSE 8188
WORKDIR /workspace
CMD ["/usr/local/bin/comfyui", "--listen"]

View File

@ -4,7 +4,7 @@ import os.path
import platform
import subprocess
import sys
from typing import List, Literal, Union, Optional
from typing import List, Optional
from pip._internal.index.collector import LinkCollector
from pip._internal.index.package_finder import PackageFinder
@ -28,13 +28,13 @@ version = '0.0.1'
"""
The package index to the torch built with AMD ROCm.
"""
amd_torch_index = ("https://download.pytorch.org/whl/rocm5.7", "https://download.pytorch.org/whl/nightly/rocm6.0")
amd_torch_index = ("https://download.pytorch.org/whl/rocm6.0", "https://download.pytorch.org/whl/nightly/rocm6.1")
"""
The package index to torch built with CUDA.
Observe the CUDA version is in this URL.
"""
nvidia_torch_index = ("https://download.pytorch.org/whl/cu121", "https://download.pytorch.org/whl/nightly/cu121")
nvidia_torch_index = ("https://download.pytorch.org/whl/cu121", "https://download.pytorch.org/whl/nightly/cu124")
"""
The package index to torch built against CPU features.
@ -80,14 +80,33 @@ def _is_amd() -> bool:
rocminfo_paths.extend(["/opt/rocm/bin/rocminfo", "/usr/bin/rocminfo"])
for rocminfo_path in rocminfo_paths:
output = None
try:
output = subprocess.check_output([rocminfo_path]).decode("utf-8")
if "Device" in output:
return True
except:
pass
if output is None:
return False
elif "Device" in output:
return True
elif "Permission Denied" in output:
msg = f"""
{output}
To resolve this issue on AMD:
sudo -i
usermod -a -G video $LOGNAME
usermod -a -G render $LOGNAME
You will need to reboot. Save your work, then:
reboot
"""
print(msg, file=sys.stderr)
raise RuntimeError(msg)
return False
@ -98,7 +117,7 @@ def _is_linux_arm64():
return os_name == 'Linux' and architecture == 'aarch64'
def dependencies() -> List[str]:
def dependencies(force_nightly: bool = False) -> List[str]:
_dependencies = open(os.path.join(os.path.dirname(__file__), "requirements.txt")).readlines()
# If we're installing with no build isolation, we can check if torch is already installed in the environment, and if
# so, go ahead and use the version that is already installed.
@ -133,8 +152,9 @@ def dependencies() -> List[str]:
if len(index_urls) == 1:
return _dependencies
if sys.version_info >= (3, 12):
# use the nightlies
if sys.version_info >= (3, 13) or force_nightly:
# use the nightlies for python 3.13
print("Using nightlies for Python 3.13 or higher. PyTorch may not yet build for it", file=sys.stderr)
index_urls_selected = [nightly for (_, nightly) in index_urls]
_alternative_indices_selected = [nightly for (_, nightly) in _alternative_indices]
else: