mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2025-12-17 10:02:59 +08:00
corrected formatting
This commit is contained in:
parent
a7f2546558
commit
a2fc2bbae4
@ -101,10 +101,12 @@ class CreateFromHashBody(BaseModel):
|
|||||||
return []
|
return []
|
||||||
if isinstance(v, list):
|
if isinstance(v, list):
|
||||||
out = [str(t).strip().lower() for t in v if str(t).strip()]
|
out = [str(t).strip().lower() for t in v if str(t).strip()]
|
||||||
seen = set(); dedup = []
|
seen = set()
|
||||||
|
dedup = []
|
||||||
for t in out:
|
for t in out:
|
||||||
if t not in seen:
|
if t not in seen:
|
||||||
seen.add(t); dedup.append(t)
|
seen.add(t)
|
||||||
|
dedup.append(t)
|
||||||
return dedup
|
return dedup
|
||||||
if isinstance(v, str):
|
if isinstance(v, str):
|
||||||
return [t.strip().lower() for t in v.split(",") if t.strip()]
|
return [t.strip().lower() for t in v.split(",") if t.strip()]
|
||||||
|
|||||||
@ -239,7 +239,8 @@ async def _run_hash_verify_pipeline(root: schemas_in.RootType, prog: ScanProgres
|
|||||||
for lst in (verify_ids, unhashed_ids):
|
for lst in (verify_ids, unhashed_ids):
|
||||||
for sid in lst:
|
for sid in lst:
|
||||||
if sid not in seen:
|
if sid not in seen:
|
||||||
seen.add(sid); ordered.append(sid)
|
seen.add(sid)
|
||||||
|
ordered.append(sid)
|
||||||
|
|
||||||
prog.discovered = len(ordered)
|
prog.discovered = len(ordered)
|
||||||
|
|
||||||
@ -382,7 +383,9 @@ def _start_state_workers(root: schemas_in.RootType, prog: ScanProgress, state: S
|
|||||||
asyncio.create_task(_close_when_ready())
|
asyncio.create_task(_close_when_ready())
|
||||||
|
|
||||||
|
|
||||||
async def _await_state_workers_then_finish(root: schemas_in.RootType, prog: ScanProgress, state: SlowQueueState) -> None:
|
async def _await_state_workers_then_finish(
|
||||||
|
root: schemas_in.RootType, prog: ScanProgress, state: SlowQueueState
|
||||||
|
) -> None:
|
||||||
if state.workers:
|
if state.workers:
|
||||||
await asyncio.gather(*state.workers, return_exceptions=True)
|
await asyncio.gather(*state.workers, return_exceptions=True)
|
||||||
await _reconcile_missing_tags_for_root(root, prog)
|
await _reconcile_missing_tags_for_root(root, prog)
|
||||||
|
|||||||
@ -5,7 +5,9 @@ import pytest
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_meta_and_across_keys_and_types(http: aiohttp.ClientSession, api_base: str, asset_factory, make_asset_bytes):
|
async def test_meta_and_across_keys_and_types(
|
||||||
|
http: aiohttp.ClientSession, api_base: str, asset_factory, make_asset_bytes
|
||||||
|
):
|
||||||
name = "mf_and_mix.safetensors"
|
name = "mf_and_mix.safetensors"
|
||||||
tags = ["models", "checkpoints", "unit-tests", "mf-and"]
|
tags = ["models", "checkpoints", "unit-tests", "mf-and"]
|
||||||
meta = {"purpose": "mix", "epoch": 1, "active": True, "score": 1.23}
|
meta = {"purpose": "mix", "epoch": 1, "active": True, "score": 1.23}
|
||||||
@ -126,7 +128,9 @@ async def test_meta_any_of_list_of_scalars(http, api_base, asset_factory, make_a
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_meta_none_semantics_missing_or_null_and_any_of_with_none(http, api_base, asset_factory, make_asset_bytes):
|
async def test_meta_none_semantics_missing_or_null_and_any_of_with_none(
|
||||||
|
http, api_base, asset_factory, make_asset_bytes
|
||||||
|
):
|
||||||
# a1: key missing; a2: explicit null; a3: concrete value
|
# a1: key missing; a2: explicit null; a3: concrete value
|
||||||
t = ["models", "checkpoints", "unit-tests", "mf-none"]
|
t = ["models", "checkpoints", "unit-tests", "mf-none"]
|
||||||
a1 = await asset_factory("mf_none_missing.safetensors", t, {"x": 1}, make_asset_bytes("a1"))
|
a1 = await asset_factory("mf_none_missing.safetensors", t, {"x": 1}, make_asset_bytes("a1"))
|
||||||
@ -362,7 +366,11 @@ async def test_meta_sort_and_paging_under_filter(http, api_base, asset_factory,
|
|||||||
await asset_factory(n3, t, {"group": "g"}, make_asset_bytes(n3, 3072))
|
await asset_factory(n3, t, {"group": "g"}, make_asset_bytes(n3, 3072))
|
||||||
|
|
||||||
# Sort by size ascending with paging
|
# Sort by size ascending with paging
|
||||||
q = {"include_tags": "unit-tests,mf-sort", "metadata_filter": json.dumps({"group": "g"}), "sort": "size", "order": "asc", "limit": "2"}
|
q = {
|
||||||
|
"include_tags": "unit-tests,mf-sort",
|
||||||
|
"metadata_filter": json.dumps({"group": "g"}),
|
||||||
|
"sort": "size", "order": "asc", "limit": "2",
|
||||||
|
}
|
||||||
async with http.get(api_base + "/api/assets", params=q) as r1:
|
async with http.get(api_base + "/api/assets", params=q) as r1:
|
||||||
b1 = await r1.json()
|
b1 = await r1.json()
|
||||||
assert r1.status == 200
|
assert r1.status == 200
|
||||||
|
|||||||
@ -259,9 +259,11 @@ async def test_delete_one_assetinfo_of_missing_asset_keeps_identity(
|
|||||||
|
|
||||||
# Both infos should be marked missing
|
# Both infos should be marked missing
|
||||||
async with http.get(f"{api_base}/api/assets/{a1['id']}") as g1:
|
async with http.get(f"{api_base}/api/assets/{a1['id']}") as g1:
|
||||||
d1 = await g1.json(); assert "missing" in set(d1.get("tags", []))
|
d1 = await g1.json()
|
||||||
|
assert "missing" in set(d1.get("tags", []))
|
||||||
async with http.get(f"{api_base}/api/assets/{a2['id']}") as g2:
|
async with http.get(f"{api_base}/api/assets/{a2['id']}") as g2:
|
||||||
d2 = await g2.json(); assert "missing" in set(d2.get("tags", []))
|
d2 = await g2.json()
|
||||||
|
assert "missing" in set(d2.get("tags", []))
|
||||||
|
|
||||||
# Delete one info
|
# Delete one info
|
||||||
async with http.delete(f"{api_base}/api/assets/{a1['id']}") as rd:
|
async with http.delete(f"{api_base}/api/assets/{a1['id']}") as rd:
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user