mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2025-12-17 01:52:59 +08:00
rework: AssetInfo.name is only a display name
This commit is contained in:
parent
1d970382f0
commit
dda31de690
@ -170,14 +170,17 @@ class UploadAssetSpec(BaseModel):
|
|||||||
"""Upload Asset operation.
|
"""Upload Asset operation.
|
||||||
- tags: ordered; first is root ('models'|'input'|'output');
|
- tags: ordered; first is root ('models'|'input'|'output');
|
||||||
if root == 'models', second must be a valid category from folder_paths.folder_names_and_paths
|
if root == 'models', second must be a valid category from folder_paths.folder_names_and_paths
|
||||||
- name: desired filename (optional); fallback will be the file hash
|
- name: display name
|
||||||
- user_metadata: arbitrary JSON object (optional)
|
- user_metadata: arbitrary JSON object (optional)
|
||||||
- hash: optional canonical 'blake3:<hex>' provided by the client for validation / fast-path
|
- hash: optional canonical 'blake3:<hex>' provided by the client for validation / fast-path
|
||||||
|
|
||||||
|
Files created via this endpoint are stored on disk using the **content hash** as the filename stem
|
||||||
|
and the original extension is preserved when available.
|
||||||
"""
|
"""
|
||||||
model_config = ConfigDict(extra="ignore", str_strip_whitespace=True)
|
model_config = ConfigDict(extra="ignore", str_strip_whitespace=True)
|
||||||
|
|
||||||
tags: list[str] = Field(..., min_length=1)
|
tags: list[str] = Field(..., min_length=1)
|
||||||
name: Optional[str] = Field(default=None, max_length=512)
|
name: Optional[str] = Field(default=None, max_length=512, description="Display Name")
|
||||||
user_metadata: dict[str, Any] = Field(default_factory=dict)
|
user_metadata: dict[str, Any] = Field(default_factory=dict)
|
||||||
hash: Optional[str] = Field(default=None)
|
hash: Optional[str] = Field(default=None)
|
||||||
|
|
||||||
|
|||||||
@ -214,11 +214,11 @@ async def upload_asset_from_temp_path(
|
|||||||
if temp_path and os.path.exists(temp_path):
|
if temp_path and os.path.exists(temp_path):
|
||||||
os.remove(temp_path)
|
os.remove(temp_path)
|
||||||
|
|
||||||
desired_name = _safe_filename(spec.name or (client_filename or ""), fallback=digest)
|
display_name = _safe_filename(spec.name or (client_filename or ""), fallback=digest)
|
||||||
info = await create_asset_info_for_existing_asset(
|
info = await create_asset_info_for_existing_asset(
|
||||||
session,
|
session,
|
||||||
asset_hash=asset_hash,
|
asset_hash=asset_hash,
|
||||||
name=desired_name,
|
name=display_name,
|
||||||
user_metadata=spec.user_metadata or {},
|
user_metadata=spec.user_metadata or {},
|
||||||
tags=spec.tags or [],
|
tags=spec.tags or [],
|
||||||
tag_origin="manual",
|
tag_origin="manual",
|
||||||
@ -245,11 +245,18 @@ async def upload_asset_from_temp_path(
|
|||||||
dest_dir = os.path.join(base_dir, *subdirs) if subdirs else base_dir
|
dest_dir = os.path.join(base_dir, *subdirs) if subdirs else base_dir
|
||||||
os.makedirs(dest_dir, exist_ok=True)
|
os.makedirs(dest_dir, exist_ok=True)
|
||||||
|
|
||||||
desired_name = _safe_filename(spec.name or (client_filename or ""), fallback=digest)
|
src_for_ext = (client_filename or spec.name or "").strip()
|
||||||
dest_abs = os.path.abspath(os.path.join(dest_dir, desired_name))
|
_ext = os.path.splitext(os.path.basename(src_for_ext))[1] if src_for_ext else ""
|
||||||
|
ext = _ext if 0 < len(_ext) <= 16 else ""
|
||||||
|
hashed_basename = f"{digest}{ext}"
|
||||||
|
dest_abs = os.path.abspath(os.path.join(dest_dir, hashed_basename))
|
||||||
ensure_within_base(dest_abs, base_dir)
|
ensure_within_base(dest_abs, base_dir)
|
||||||
|
|
||||||
content_type = mimetypes.guess_type(desired_name, strict=False)[0] or "application/octet-stream"
|
content_type = (
|
||||||
|
mimetypes.guess_type(os.path.basename(src_for_ext), strict=False)[0]
|
||||||
|
or mimetypes.guess_type(hashed_basename, strict=False)[0]
|
||||||
|
or "application/octet-stream"
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
os.replace(temp_path, dest_abs)
|
os.replace(temp_path, dest_abs)
|
||||||
@ -269,7 +276,7 @@ async def upload_asset_from_temp_path(
|
|||||||
size_bytes=size_bytes,
|
size_bytes=size_bytes,
|
||||||
mtime_ns=mtime_ns,
|
mtime_ns=mtime_ns,
|
||||||
mime_type=content_type,
|
mime_type=content_type,
|
||||||
info_name=os.path.basename(dest_abs),
|
info_name=_safe_filename(spec.name or (client_filename or ""), fallback=digest),
|
||||||
owner_id=owner_id,
|
owner_id=owner_id,
|
||||||
preview_id=None,
|
preview_id=None,
|
||||||
user_metadata=spec.user_metadata or {},
|
user_metadata=spec.user_metadata or {},
|
||||||
|
|||||||
@ -228,7 +228,7 @@ async def asset_factory(http: aiohttp.ClientSession, api_base: str):
|
|||||||
@pytest_asyncio.fixture
|
@pytest_asyncio.fixture
|
||||||
async def seeded_asset(request: pytest.FixtureRequest, http: aiohttp.ClientSession, api_base: str) -> dict:
|
async def seeded_asset(request: pytest.FixtureRequest, http: aiohttp.ClientSession, api_base: str) -> dict:
|
||||||
"""
|
"""
|
||||||
Upload one asset into models/checkpoints/unit-tests/<name>.
|
Upload one asset with ".safetensors" extension into models/checkpoints/unit-tests/<name>.
|
||||||
Returns response dict with id, asset_hash, tags, etc.
|
Returns response dict with id, asset_hash, tags, etc.
|
||||||
"""
|
"""
|
||||||
name = "unit_1_example.safetensors"
|
name = "unit_1_example.safetensors"
|
||||||
@ -301,3 +301,7 @@ async def run_scan_and_wait(http: aiohttp.ClientSession, api_base: str):
|
|||||||
raise TimeoutError(f"Timed out waiting for scan of root={root}")
|
raise TimeoutError(f"Timed out waiting for scan of root={root}")
|
||||||
await asyncio.sleep(0.1)
|
await asyncio.sleep(0.1)
|
||||||
return _run
|
return _run
|
||||||
|
|
||||||
|
|
||||||
|
def get_asset_filename(asset_hash: str, extension: str) -> str:
|
||||||
|
return asset_hash.removeprefix("blake3:") + extension
|
||||||
|
|||||||
@ -4,7 +4,7 @@ from pathlib import Path
|
|||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
import pytest
|
import pytest
|
||||||
from conftest import trigger_sync_seed_assets
|
from conftest import get_asset_filename, trigger_sync_seed_assets
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
@ -77,7 +77,7 @@ async def test_hashed_asset_missing_tag_added_then_removed_after_scan(
|
|||||||
a = await asset_factory(name, tags, {}, data)
|
a = await asset_factory(name, tags, {}, data)
|
||||||
|
|
||||||
# Compute its on-disk path and remove it
|
# Compute its on-disk path and remove it
|
||||||
dest = comfy_tmp_base_dir / "input" / "unit-tests" / "msync2" / name
|
dest = comfy_tmp_base_dir / "input" / "unit-tests" / "msync2" / get_asset_filename(a["asset_hash"], ".png")
|
||||||
assert dest.exists(), f"Expected asset file at {dest}"
|
assert dest.exists(), f"Expected asset file at {dest}"
|
||||||
dest.unlink()
|
dest.unlink()
|
||||||
|
|
||||||
@ -102,7 +102,7 @@ async def test_hashed_asset_missing_tag_added_then_removed_after_scan(
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_hashed_asset_two_assetinfos_both_get_missing(
|
async def test_hashed_asset_two_asset_infos_both_get_missing(
|
||||||
http: aiohttp.ClientSession,
|
http: aiohttp.ClientSession,
|
||||||
api_base: str,
|
api_base: str,
|
||||||
comfy_tmp_base_dir: Path,
|
comfy_tmp_base_dir: Path,
|
||||||
@ -129,7 +129,7 @@ async def test_hashed_asset_two_assetinfos_both_get_missing(
|
|||||||
second_id = b2["id"]
|
second_id = b2["id"]
|
||||||
|
|
||||||
# Remove the single underlying file
|
# Remove the single underlying file
|
||||||
p = comfy_tmp_base_dir / "input" / "unit-tests" / "multiinfo" / name
|
p = comfy_tmp_base_dir / "input" / "unit-tests" / "multiinfo" / get_asset_filename(b2["asset_hash"], ".png")
|
||||||
assert p.exists()
|
assert p.exists()
|
||||||
p.unlink()
|
p.unlink()
|
||||||
|
|
||||||
@ -179,7 +179,7 @@ async def test_hashed_asset_two_cache_states_partial_delete_then_full_delete(
|
|||||||
data = make_asset_bytes(name, 3072)
|
data = make_asset_bytes(name, 3072)
|
||||||
|
|
||||||
created = await asset_factory(name, tags, {}, data)
|
created = await asset_factory(name, tags, {}, data)
|
||||||
path1 = comfy_tmp_base_dir / "input" / "unit-tests" / "dual" / name
|
path1 = comfy_tmp_base_dir / "input" / "unit-tests" / "dual" / get_asset_filename(created["asset_hash"], ".png")
|
||||||
assert path1.exists()
|
assert path1.exists()
|
||||||
|
|
||||||
# Create a second on-disk copy under the same root but different subfolder
|
# Create a second on-disk copy under the same root but different subfolder
|
||||||
@ -249,7 +249,7 @@ async def test_missing_tag_clears_on_fastpass_when_mtime_and_size_match(
|
|||||||
a = await asset_factory(name, [root, "unit-tests", scope], {}, data)
|
a = await asset_factory(name, [root, "unit-tests", scope], {}, data)
|
||||||
aid = a["id"]
|
aid = a["id"]
|
||||||
base = comfy_tmp_base_dir / root / "unit-tests" / scope
|
base = comfy_tmp_base_dir / root / "unit-tests" / scope
|
||||||
p = base / name
|
p = base / get_asset_filename(a["asset_hash"], ".bin")
|
||||||
st0 = p.stat()
|
st0 = p.stat()
|
||||||
orig_mtime_ns = getattr(st0, "st_mtime_ns", int(st0.st_mtime * 1_000_000_000))
|
orig_mtime_ns = getattr(st0, "st_mtime_ns", int(st0.st_mtime * 1_000_000_000))
|
||||||
|
|
||||||
@ -302,11 +302,13 @@ async def test_fastpass_removes_stale_state_row_no_missing(
|
|||||||
|
|
||||||
# Upload hashed asset at path1
|
# Upload hashed asset at path1
|
||||||
a = await asset_factory(name, [root, "unit-tests", scope], {}, data)
|
a = await asset_factory(name, [root, "unit-tests", scope], {}, data)
|
||||||
|
base = comfy_tmp_base_dir / root / "unit-tests" / scope
|
||||||
|
a1_filename = get_asset_filename(a["asset_hash"], ".bin")
|
||||||
|
p1 = base / a1_filename
|
||||||
|
assert p1.exists()
|
||||||
|
|
||||||
aid = a["id"]
|
aid = a["id"]
|
||||||
h = a["asset_hash"]
|
h = a["asset_hash"]
|
||||||
base = comfy_tmp_base_dir / root / "unit-tests" / scope
|
|
||||||
p1 = base / name
|
|
||||||
assert p1.exists()
|
|
||||||
|
|
||||||
# Create second state path2, seed+scan to dedupe into the same Asset
|
# Create second state path2, seed+scan to dedupe into the same Asset
|
||||||
p2 = base / "copy" / name
|
p2 = base / "copy" / name
|
||||||
@ -330,14 +332,15 @@ async def test_fastpass_removes_stale_state_row_no_missing(
|
|||||||
|
|
||||||
async with http.get(
|
async with http.get(
|
||||||
api_base + "/api/assets",
|
api_base + "/api/assets",
|
||||||
params={"include_tags": f"unit-tests,{scope}", "name_contains": name},
|
params={"include_tags": f"unit-tests,{scope}"},
|
||||||
) as rl:
|
) as rl:
|
||||||
bl = await rl.json()
|
bl = await rl.json()
|
||||||
assert rl.status == 200, bl
|
assert rl.status == 200, bl
|
||||||
items = bl.get("assets", [])
|
items = bl.get("assets", [])
|
||||||
# one hashed AssetInfo (asset_hash == h) + one seed AssetInfo (asset_hash == null)
|
# one hashed AssetInfo (asset_hash == h) + one seed AssetInfo (asset_hash == null)
|
||||||
hashes = [it.get("asset_hash") for it in items if it.get("name") == name]
|
hashes = [it.get("asset_hash") for it in items if it.get("name") in (name, a1_filename)]
|
||||||
assert h in hashes and any(x is None for x in hashes), "Expected a new seed AssetInfo for the recreated path"
|
assert h in hashes
|
||||||
|
assert any(x is None for x in hashes), "Expected a new seed AssetInfo for the recreated path"
|
||||||
|
|
||||||
# Asset identity still healthy
|
# Asset identity still healthy
|
||||||
async with http.head(f"{api_base}/api/assets/hash/{h}") as rh:
|
async with http.head(f"{api_base}/api/assets/hash/{h}") as rh:
|
||||||
|
|||||||
@ -4,7 +4,7 @@ from pathlib import Path
|
|||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
import pytest
|
import pytest
|
||||||
from conftest import trigger_sync_seed_assets
|
from conftest import get_asset_filename, trigger_sync_seed_assets
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
@ -286,7 +286,7 @@ async def test_metadata_filename_computed_and_updated_on_retarget(
|
|||||||
aid = a["id"]
|
aid = a["id"]
|
||||||
|
|
||||||
root_base = comfy_tmp_base_dir / root
|
root_base = comfy_tmp_base_dir / root
|
||||||
p1 = root_base / "unit-tests" / scope / "a" / "b" / name1
|
p1 = (root_base / "unit-tests" / scope / "a" / "b" / get_asset_filename(a["asset_hash"], ".png"))
|
||||||
assert p1.exists()
|
assert p1.exists()
|
||||||
|
|
||||||
# filename at ingest should be the path relative to root
|
# filename at ingest should be the path relative to root
|
||||||
|
|||||||
@ -6,7 +6,7 @@ from typing import Optional
|
|||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
import pytest
|
import pytest
|
||||||
from conftest import trigger_sync_seed_assets
|
from conftest import get_asset_filename, trigger_sync_seed_assets
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
@ -53,7 +53,7 @@ async def test_download_chooses_existing_state_and_updates_access_time(
|
|||||||
aid = a["id"]
|
aid = a["id"]
|
||||||
|
|
||||||
base = comfy_tmp_base_dir / root / "unit-tests" / scope
|
base = comfy_tmp_base_dir / root / "unit-tests" / scope
|
||||||
path1 = base / name
|
path1 = base / get_asset_filename(a["asset_hash"], ".bin")
|
||||||
assert path1.exists()
|
assert path1.exists()
|
||||||
|
|
||||||
# Seed path2 by copying, then scan to dedupe into a second state
|
# Seed path2 by copying, then scan to dedupe into a second state
|
||||||
@ -108,14 +108,14 @@ async def test_download_missing_file_returns_404(
|
|||||||
async with http.get(f"{api_base}/api/assets/{aid}") as rg:
|
async with http.get(f"{api_base}/api/assets/{aid}") as rg:
|
||||||
detail = await rg.json()
|
detail = await rg.json()
|
||||||
assert rg.status == 200
|
assert rg.status == 200
|
||||||
rel_inside_category = detail["name"]
|
asset_filename = get_asset_filename(detail["asset_hash"], ".safetensors")
|
||||||
abs_path = comfy_tmp_base_dir / "models" / "checkpoints" / rel_inside_category
|
abs_path = comfy_tmp_base_dir / "models" / "checkpoints" / asset_filename
|
||||||
if abs_path.exists():
|
assert abs_path.exists()
|
||||||
abs_path.unlink()
|
abs_path.unlink()
|
||||||
|
|
||||||
async with http.get(f"{api_base}/api/assets/{aid}/content") as r2:
|
async with http.get(f"{api_base}/api/assets/{aid}/content") as r2:
|
||||||
body = await r2.json()
|
|
||||||
assert r2.status == 404
|
assert r2.status == 404
|
||||||
|
body = await r2.json()
|
||||||
assert body["error"]["code"] == "FILE_NOT_FOUND"
|
assert body["error"]["code"] == "FILE_NOT_FOUND"
|
||||||
finally:
|
finally:
|
||||||
# We created asset without the "unit-tests" tag(see `autoclean_unit_test_assets`), we need to clear it manually.
|
# We created asset without the "unit-tests" tag(see `autoclean_unit_test_assets`), we need to clear it manually.
|
||||||
@ -144,7 +144,7 @@ async def test_download_404_if_all_states_missing(
|
|||||||
aid = a["id"]
|
aid = a["id"]
|
||||||
|
|
||||||
base = comfy_tmp_base_dir / root / "unit-tests" / scope
|
base = comfy_tmp_base_dir / root / "unit-tests" / scope
|
||||||
p1 = base / name
|
p1 = base / get_asset_filename(a["asset_hash"], ".bin")
|
||||||
assert p1.exists()
|
assert p1.exists()
|
||||||
|
|
||||||
# Seed a second state and dedupe
|
# Seed a second state and dedupe
|
||||||
|
|||||||
@ -5,7 +5,7 @@ from pathlib import Path
|
|||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
import pytest
|
import pytest
|
||||||
from conftest import trigger_sync_seed_assets
|
from conftest import get_asset_filename, trigger_sync_seed_assets
|
||||||
|
|
||||||
|
|
||||||
def _base_for(root: str, comfy_tmp_base_dir: Path) -> Path:
|
def _base_for(root: str, comfy_tmp_base_dir: Path) -> Path:
|
||||||
@ -138,7 +138,8 @@ async def test_scan_records_file_errors_permission_denied(
|
|||||||
deny_dir.mkdir(parents=True, exist_ok=True)
|
deny_dir.mkdir(parents=True, exist_ok=True)
|
||||||
name = "deny.bin"
|
name = "deny.bin"
|
||||||
|
|
||||||
await asset_factory(name, [root, "unit-tests", scope, "deny"], {}, b"X" * 2048)
|
a1 = await asset_factory(name, [root, "unit-tests", scope, "deny"], {}, b"X" * 2048)
|
||||||
|
asset_filename = get_asset_filename(a1["asset_hash"], ".bin")
|
||||||
try:
|
try:
|
||||||
os.chmod(deny_dir, 0x000)
|
os.chmod(deny_dir, 0x000)
|
||||||
async with http.post(api_base + "/api/assets/scan/schedule", json={"roots": [root]}) as r:
|
async with http.post(api_base + "/api/assets/scan/schedule", json={"roots": [root]}) as r:
|
||||||
@ -152,10 +153,11 @@ async def test_scan_records_file_errors_permission_denied(
|
|||||||
assert len(scans) == 1
|
assert len(scans) == 1
|
||||||
errs = scans[0].get("file_errors", [])
|
errs = scans[0].get("file_errors", [])
|
||||||
# Should contain at least one PermissionError-like record
|
# Should contain at least one PermissionError-like record
|
||||||
assert errs and any(e.get("path", "").endswith(name) and e.get("message") for e in errs)
|
assert errs
|
||||||
|
assert any(e.get("path", "").endswith(asset_filename) and e.get("message") for e in errs)
|
||||||
finally:
|
finally:
|
||||||
try:
|
try:
|
||||||
os.chmod(deny_dir, 0o755)
|
os.chmod(deny_dir, 0x755)
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -182,7 +184,7 @@ async def test_missing_tag_created_and_visible_in_tags(
|
|||||||
created = await asset_factory(name, [root, "unit-tests", scope], {}, b"Y" * 4096)
|
created = await asset_factory(name, [root, "unit-tests", scope], {}, b"Y" * 4096)
|
||||||
|
|
||||||
# Remove the only file and trigger fast pass
|
# Remove the only file and trigger fast pass
|
||||||
p = _base_for(root, comfy_tmp_base_dir) / "unit-tests" / scope / name
|
p = _base_for(root, comfy_tmp_base_dir) / "unit-tests" / scope / get_asset_filename(created["asset_hash"], ".bin")
|
||||||
assert p.exists()
|
assert p.exists()
|
||||||
p.unlink()
|
p.unlink()
|
||||||
await trigger_sync_seed_assets(http, api_base)
|
await trigger_sync_seed_assets(http, api_base)
|
||||||
@ -217,7 +219,7 @@ async def test_missing_reapplies_after_manual_removal(
|
|||||||
created = await asset_factory(name, [root, "unit-tests", scope], {}, b"Z" * 1024)
|
created = await asset_factory(name, [root, "unit-tests", scope], {}, b"Z" * 1024)
|
||||||
|
|
||||||
# Make it missing
|
# Make it missing
|
||||||
p = _base_for(root, comfy_tmp_base_dir) / "unit-tests" / scope / name
|
p = _base_for(root, comfy_tmp_base_dir) / "unit-tests" / scope / get_asset_filename(created["asset_hash"], ".bin")
|
||||||
p.unlink()
|
p.unlink()
|
||||||
await trigger_sync_seed_assets(http, api_base)
|
await trigger_sync_seed_assets(http, api_base)
|
||||||
|
|
||||||
@ -237,7 +239,7 @@ async def test_missing_reapplies_after_manual_removal(
|
|||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
@pytest.mark.parametrize("root", ["input", "output"])
|
@pytest.mark.parametrize("root", ["input", "output"])
|
||||||
async def test_delete_one_assetinfo_of_missing_asset_keeps_identity(
|
async def test_delete_one_asset_info_of_missing_asset_keeps_identity(
|
||||||
root: str,
|
root: str,
|
||||||
http,
|
http,
|
||||||
api_base: str,
|
api_base: str,
|
||||||
@ -253,7 +255,7 @@ async def test_delete_one_assetinfo_of_missing_asset_keeps_identity(
|
|||||||
a2 = await asset_factory("copy_" + name, [root, "unit-tests", scope], {}, b"W" * 2048)
|
a2 = await asset_factory("copy_" + name, [root, "unit-tests", scope], {}, b"W" * 2048)
|
||||||
|
|
||||||
# Remove file of the first (both point to the same Asset, but we know on-disk path name for a1)
|
# Remove file of the first (both point to the same Asset, but we know on-disk path name for a1)
|
||||||
p1 = _base_for(root, comfy_tmp_base_dir) / "unit-tests" / scope / name
|
p1 = _base_for(root, comfy_tmp_base_dir) / "unit-tests" / scope / get_asset_filename(a1["asset_hash"], ".bin")
|
||||||
p1.unlink()
|
p1.unlink()
|
||||||
await trigger_sync_seed_assets(http, api_base)
|
await trigger_sync_seed_assets(http, api_base)
|
||||||
|
|
||||||
@ -282,7 +284,7 @@ async def test_delete_one_assetinfo_of_missing_asset_keeps_identity(
|
|||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
@pytest.mark.parametrize("keep_root", ["input", "output"])
|
@pytest.mark.parametrize("keep_root", ["input", "output"])
|
||||||
async def test_delete_last_assetinfo_false_keeps_asset_and_states_multiroot(
|
async def test_delete_last_asset_info_false_keeps_asset_and_states_multiroot(
|
||||||
keep_root: str,
|
keep_root: str,
|
||||||
http,
|
http,
|
||||||
api_base: str,
|
api_base: str,
|
||||||
@ -293,21 +295,18 @@ async def test_delete_last_assetinfo_false_keeps_asset_and_states_multiroot(
|
|||||||
"""Delete last AssetInfo with delete_content_if_orphan=false keeps asset and the underlying on-disk content."""
|
"""Delete last AssetInfo with delete_content_if_orphan=false keeps asset and the underlying on-disk content."""
|
||||||
other_root = "output" if keep_root == "input" else "input"
|
other_root = "output" if keep_root == "input" else "input"
|
||||||
scope = f"delfalse-{uuid.uuid4().hex[:6]}"
|
scope = f"delfalse-{uuid.uuid4().hex[:6]}"
|
||||||
name1, name2 = "keep1.bin", "keep2.bin"
|
|
||||||
data = make_asset_bytes(scope, 3072)
|
data = make_asset_bytes(scope, 3072)
|
||||||
|
|
||||||
# First upload creates the physical file
|
# First upload creates the physical file
|
||||||
a1 = await asset_factory(name1, [keep_root, "unit-tests", scope], {}, data)
|
a1 = await asset_factory("keep1.bin", [keep_root, "unit-tests", scope], {}, data)
|
||||||
# Second upload (other root) is deduped to the same content; no new file on disk
|
# Second upload (other root) is deduped to the same content; no new file on disk
|
||||||
a2 = await asset_factory(name2, [other_root, "unit-tests", scope], {}, data)
|
a2 = await asset_factory("keep2.bin", [other_root, "unit-tests", scope], {}, data)
|
||||||
|
|
||||||
h = a1["asset_hash"]
|
h = a1["asset_hash"]
|
||||||
p1 = _base_for(keep_root, comfy_tmp_base_dir) / "unit-tests" / scope / name1
|
p1 = _base_for(keep_root, comfy_tmp_base_dir) / "unit-tests" / scope / get_asset_filename(h, ".bin")
|
||||||
p2 = _base_for(other_root, comfy_tmp_base_dir) / "unit-tests" / scope / name2
|
|
||||||
|
|
||||||
# De-dup semantics: only the first physical file exists
|
# De-dup semantics: only the first physical file exists
|
||||||
assert p1.exists(), "Expected the first physical file to exist"
|
assert p1.exists(), "Expected the first physical file to exist"
|
||||||
assert not p2.exists(), "Second duplicate must not create another physical file"
|
|
||||||
|
|
||||||
# Delete both AssetInfos; keep content on the very last delete
|
# Delete both AssetInfos; keep content on the very last delete
|
||||||
async with http.delete(f"{api_base}/api/assets/{a2['id']}") as rfirst:
|
async with http.delete(f"{api_base}/api/assets/{a2['id']}") as rfirst:
|
||||||
@ -319,7 +318,6 @@ async def test_delete_last_assetinfo_false_keeps_asset_and_states_multiroot(
|
|||||||
async with http.head(f"{api_base}/api/assets/hash/{h}") as rh:
|
async with http.head(f"{api_base}/api/assets/hash/{h}") as rh:
|
||||||
assert rh.status == 200
|
assert rh.status == 200
|
||||||
assert p1.exists(), "Content file should remain after keep-content delete"
|
assert p1.exists(), "Content file should remain after keep-content delete"
|
||||||
assert not p2.exists(), "There was never a second physical file"
|
|
||||||
|
|
||||||
# Cleanup: re-create a reference by hash and then delete to purge content
|
# Cleanup: re-create a reference by hash and then delete to purge content
|
||||||
payload = {
|
payload = {
|
||||||
@ -489,7 +487,7 @@ async def test_cache_state_retarget_on_content_change_asset_info_stays(
|
|||||||
aid = a1["id"]
|
aid = a1["id"]
|
||||||
h1 = a1["asset_hash"]
|
h1 = a1["asset_hash"]
|
||||||
|
|
||||||
p = comfy_tmp_base_dir / root / "unit-tests" / scope / name
|
p = comfy_tmp_base_dir / root / "unit-tests" / scope / get_asset_filename(a1["asset_hash"], ".bin")
|
||||||
assert p.exists()
|
assert p.exists()
|
||||||
|
|
||||||
# Change the bytes in place to force a new content hash (H2)
|
# Change the bytes in place to force a new content hash (H2)
|
||||||
|
|||||||
@ -288,7 +288,7 @@ async def test_upload_tags_traversal_guard(http: aiohttp.ClientSession, api_base
|
|||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
@pytest.mark.parametrize("root", ["input", "output"])
|
@pytest.mark.parametrize("root", ["input", "output"])
|
||||||
async def test_duplicate_upload_same_path_updates_state(
|
async def test_duplicate_upload_same_display_name_does_not_clobber(
|
||||||
root: str,
|
root: str,
|
||||||
http,
|
http,
|
||||||
api_base: str,
|
api_base: str,
|
||||||
@ -296,30 +296,30 @@ async def test_duplicate_upload_same_path_updates_state(
|
|||||||
make_asset_bytes,
|
make_asset_bytes,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Two uploads target the exact same destination path (same tags+name) with different bytes.
|
Two uploads use the same tags and the same display name but different bytes.
|
||||||
Expect: file on disk is from the last upload; its AssetInfo serves content; the first AssetInfo's content 404s.
|
With hash-based filenames, they must NOT overwrite each other. Both assets
|
||||||
This validates that AssetCacheState(file_path) remains unique and its asset_id/mtime_ns were updated.
|
remain accessible and serve their original content.
|
||||||
"""
|
"""
|
||||||
scope = f"dup-path-{uuid.uuid4().hex[:6]}"
|
scope = f"dup-path-{uuid.uuid4().hex[:6]}"
|
||||||
name = "same_path.bin"
|
display_name = "same_display.bin"
|
||||||
|
|
||||||
d1 = make_asset_bytes(scope + "-v1", 1536)
|
d1 = make_asset_bytes(scope + "-v1", 1536)
|
||||||
d2 = make_asset_bytes(scope + "-v2", 2048)
|
d2 = make_asset_bytes(scope + "-v2", 2048)
|
||||||
tags = [root, "unit-tests", scope]
|
tags = [root, "unit-tests", scope]
|
||||||
|
|
||||||
first = await asset_factory(name, tags, {}, d1)
|
first = await asset_factory(display_name, tags, {}, d1)
|
||||||
second = await asset_factory(name, tags, {}, d2)
|
second = await asset_factory(display_name, tags, {}, d2)
|
||||||
|
|
||||||
# Second one must serve the new bytes
|
assert first["id"] != second["id"]
|
||||||
|
assert first["asset_hash"] != second["asset_hash"] # different content
|
||||||
|
assert first["name"] == second["name"] == display_name
|
||||||
|
|
||||||
|
# Both must be independently retrievable
|
||||||
|
async with http.get(f"{api_base}/api/assets/{first['id']}/content") as r1:
|
||||||
|
b1 = await r1.read()
|
||||||
|
assert r1.status == 200
|
||||||
|
assert b1 == d1
|
||||||
async with http.get(f"{api_base}/api/assets/{second['id']}/content") as r2:
|
async with http.get(f"{api_base}/api/assets/{second['id']}/content") as r2:
|
||||||
b2 = await r2.read()
|
b2 = await r2.read()
|
||||||
assert r2.status == 200
|
assert r2.status == 200
|
||||||
assert b2 == d2
|
assert b2 == d2
|
||||||
|
|
||||||
# The first AssetInfo now points to an identity with no live state for that path -> 404
|
|
||||||
async with http.get(f"{api_base}/api/assets/{first['id']}/content") as r1:
|
|
||||||
try:
|
|
||||||
body = await r1.json()
|
|
||||||
except Exception:
|
|
||||||
body = {}
|
|
||||||
assert r1.status == 404, body
|
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user