mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2026-01-11 14:50:49 +08:00
Fix tests
This commit is contained in:
parent
c4fe16252b
commit
69e6d52301
@ -131,9 +131,13 @@ When using Windows, open the **Windows Powershell** app. Then observe you are at
|
||||
pip install --no-build-isolation git+https://github.com/hiddenswitch/ComfyUI.git
|
||||
```
|
||||
|
||||
For improved performance when using the language models on Windows, CUDA 12.1 and PyTorch 2.3.0, add:
|
||||
For improved performance when using the language models on Windows, Python 3.11, CUDA 12.1 and PyTorch 2.4.0, add:
|
||||
```shell
|
||||
pip install flash-attn @ https://github.com/AppMana/appmana-comfyui-nodes-extramodels/releases/download/v0.0.0-flash_attn/flash_attn-2.5.9.post1-cp311-cp311-win_amd64.whl
|
||||
pip install https://github.com/AppMana/appmana-comfyui-nodes-extramodels/releases/download/v0.0.0-flash_attn/flash_attn-2.6.3-cp311-cp311-win_amd64.whl
|
||||
```
|
||||
To enable `torchaudio` support on Windows, install it directly:
|
||||
```shell
|
||||
pip install torchaudio==2.4.0+cu121 --index-url https://download.pytorch.org/whl/cu121
|
||||
```
|
||||
Flash Attention as implemented in PyTorch is not functional on any version of Windows. ComfyUI will always run with "memory efficient attention" in practice on this platform. This is distinct from the `flash-attn` package. <br />
|
||||
**Advanced**: If you are running in Google Collab or another environment which has already installed `torch` for you, disable build isolation, and the package will recognize your currently installed torch.
|
||||
|
||||
@ -1,13 +1,16 @@
|
||||
from comfy import sd1_clip
|
||||
import os
|
||||
|
||||
from comfy.component_model.files import get_path_as_dict
|
||||
|
||||
|
||||
class LongClipTokenizer_(sd1_clip.SDTokenizer):
|
||||
def __init__(self, embedding_directory=None, tokenizer_data={}):
|
||||
super().__init__(max_length=248, embedding_directory=embedding_directory, tokenizer_data=tokenizer_data)
|
||||
|
||||
class LongClipModel_(sd1_clip.SDClipModel):
|
||||
def __init__(self, device="cpu", dtype=None, model_options={}):
|
||||
textmodel_json_config = os.path.join(os.path.dirname(os.path.realpath(__file__)), "long_clipl.json")
|
||||
def __init__(self, device="cpu", dtype=None, model_options={}, textmodel_json_config=None):
|
||||
textmodel_json_config = get_path_as_dict(textmodel_json_config, "long_clipl.json", package=__package__)
|
||||
super().__init__(device=device, textmodel_json_config=textmodel_json_config, return_projected_pooled=False, dtype=dtype, model_options=model_options)
|
||||
|
||||
class LongClipTokenizer(sd1_clip.SD1Tokenizer):
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
pytest
|
||||
pytest-asyncio
|
||||
pytest-mock
|
||||
pytest-aiohttp
|
||||
websocket-client==1.6.1
|
||||
PyInstaller
|
||||
testcontainers
|
||||
|
||||
@ -1,26 +0,0 @@
|
||||
# Automated Testing
|
||||
|
||||
## Running tests locally
|
||||
|
||||
Additional requirements for running tests:
|
||||
```
|
||||
pip install .[dev]
|
||||
```
|
||||
Run inference tests:
|
||||
```
|
||||
pytest tests/inference
|
||||
```
|
||||
|
||||
## Quality regression test
|
||||
Compares images in 2 directories to ensure they are the same
|
||||
|
||||
1) Run an inference test to save a directory of "ground truth" images
|
||||
```
|
||||
pytest tests/inference --output_dir tests/inference/baseline
|
||||
```
|
||||
2) Make code edits
|
||||
|
||||
3) Run inference and quality comparison tests
|
||||
```
|
||||
pytest
|
||||
```
|
||||
@ -98,7 +98,7 @@ async def test_routes_added_to_app(aiohttp_client_factory, internal_routes):
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_file_service_initialization():
|
||||
with patch('api_server.routes.internal.internal_routes.FileService') as MockFileService:
|
||||
with patch('comfy.api_server.routes.internal.internal_routes.FileService') as MockFileService:
|
||||
# Create a mock instance
|
||||
mock_file_service_instance = MagicMock(spec=FileService)
|
||||
MockFileService.return_value = mock_file_service_instance
|
||||
|
||||
Loading…
Reference in New Issue
Block a user