mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2026-02-06 19:42:34 +08:00
refactor: improve function naming for clarity and consistency
Rename functions to use clearer verb-based names: - pick_best_live_path → select_best_live_path - escape_like_prefix → escape_sql_like_string - list_tree → list_files_recursively - check_asset_file_fast → verify_asset_file_unchanged - _seed_from_paths_batch → _batch_insert_assets_from_paths - reconcile_cache_states_for_root → sync_cache_states_with_filesystem - touch_asset_info_by_id → update_asset_info_access_time - replace_asset_info_metadata_projection → set_asset_info_metadata - expand_metadata_to_rows → convert_metadata_to_rows - _rows_per_stmt → _calculate_rows_per_statement - ensure_within_base → validate_path_within_base - _cleanup_temp → _delete_temp_file_if_exists - validate_hash_format → normalize_and_validate_hash - get_relative_to_root_category_path_of_asset → get_asset_category_and_relative_path Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
parent
481a2fa263
commit
fef2f01671
@ -13,7 +13,7 @@ import folder_paths
|
||||
from app.assets.api.schemas_in import ParsedUpload, UploadError
|
||||
|
||||
|
||||
def validate_hash_format(s: str) -> str:
|
||||
def normalize_and_validate_hash(s: str) -> str:
|
||||
"""
|
||||
Validate and normalize a hash string.
|
||||
|
||||
@ -77,7 +77,7 @@ async def parse_multipart_upload(
|
||||
raise UploadError(400, "INVALID_HASH", "hash must be like 'blake3:<hex>'")
|
||||
|
||||
if s:
|
||||
provided_hash = validate_hash_format(s)
|
||||
provided_hash = normalize_and_validate_hash(s)
|
||||
try:
|
||||
provided_hash_exists = check_hash_exists(provided_hash)
|
||||
except Exception:
|
||||
@ -114,7 +114,7 @@ async def parse_multipart_upload(
|
||||
f.write(chunk)
|
||||
file_written += len(chunk)
|
||||
except Exception:
|
||||
_cleanup_temp(tmp_path)
|
||||
_delete_temp_file_if_exists(tmp_path)
|
||||
raise UploadError(500, "UPLOAD_IO_ERROR", "Failed to receive and store uploaded file.")
|
||||
|
||||
elif fname == "tags":
|
||||
@ -129,7 +129,7 @@ async def parse_multipart_upload(
|
||||
raise UploadError(400, "MISSING_FILE", "Form must include a 'file' part or a known 'hash'.")
|
||||
|
||||
if file_present and file_written == 0 and not (provided_hash and provided_hash_exists):
|
||||
_cleanup_temp(tmp_path)
|
||||
_delete_temp_file_if_exists(tmp_path)
|
||||
raise UploadError(400, "EMPTY_UPLOAD", "Uploaded file is empty.")
|
||||
|
||||
return ParsedUpload(
|
||||
@ -145,7 +145,7 @@ async def parse_multipart_upload(
|
||||
)
|
||||
|
||||
|
||||
def _cleanup_temp(tmp_path: str | None) -> None:
|
||||
def _delete_temp_file_if_exists(tmp_path: str | None) -> None:
|
||||
"""Safely remove a temp file if it exists."""
|
||||
if tmp_path:
|
||||
try:
|
||||
|
||||
@ -17,8 +17,8 @@ from app.assets.database.queries.asset_info import (
|
||||
list_asset_infos_page,
|
||||
fetch_asset_info_asset_and_tags,
|
||||
fetch_asset_info_and_asset,
|
||||
touch_asset_info_by_id,
|
||||
replace_asset_info_metadata_projection,
|
||||
update_asset_info_access_time,
|
||||
set_asset_info_metadata,
|
||||
delete_asset_info_by_id,
|
||||
set_asset_info_preview,
|
||||
bulk_insert_asset_infos_ignore_conflicts,
|
||||
@ -67,8 +67,8 @@ __all__ = [
|
||||
"list_asset_infos_page",
|
||||
"fetch_asset_info_asset_and_tags",
|
||||
"fetch_asset_info_and_asset",
|
||||
"touch_asset_info_by_id",
|
||||
"replace_asset_info_metadata_projection",
|
||||
"update_asset_info_access_time",
|
||||
"set_asset_info_metadata",
|
||||
"delete_asset_info_by_id",
|
||||
"set_asset_info_preview",
|
||||
"bulk_insert_asset_infos_ignore_conflicts",
|
||||
|
||||
@ -9,7 +9,7 @@ from app.assets.database.models import Asset
|
||||
MAX_BIND_PARAMS = 800
|
||||
|
||||
|
||||
def _rows_per_stmt(cols: int) -> int:
|
||||
def _calculate_rows_per_statement(cols: int) -> int:
|
||||
return max(1, MAX_BIND_PARAMS // max(1, cols))
|
||||
|
||||
|
||||
@ -90,5 +90,5 @@ def bulk_insert_assets(
|
||||
if not rows:
|
||||
return
|
||||
ins = sqlite.insert(Asset)
|
||||
for chunk in _iter_chunks(rows, _rows_per_stmt(5)):
|
||||
for chunk in _iter_chunks(rows, _calculate_rows_per_statement(5)):
|
||||
session.execute(ins, chunk)
|
||||
|
||||
@ -18,7 +18,7 @@ from sqlalchemy.orm import Session, contains_eager, noload
|
||||
from app.assets.database.models import (
|
||||
Asset, AssetInfo, AssetInfoMeta, AssetInfoTag, Tag
|
||||
)
|
||||
from app.assets.helpers import escape_like_prefix, normalize_tags, get_utc_now
|
||||
from app.assets.helpers import escape_sql_like_string, normalize_tags, get_utc_now
|
||||
|
||||
|
||||
def check_is_scalar(v):
|
||||
@ -31,7 +31,7 @@ def check_is_scalar(v):
|
||||
return False
|
||||
|
||||
|
||||
def expand_metadata_to_rows(key: str, value):
|
||||
def convert_metadata_to_rows(key: str, value):
|
||||
"""
|
||||
Turn a metadata key/value into typed projection rows.
|
||||
Returns list[dict] with keys:
|
||||
@ -86,7 +86,7 @@ def expand_metadata_to_rows(key: str, value):
|
||||
MAX_BIND_PARAMS = 800
|
||||
|
||||
|
||||
def _rows_per_stmt(cols: int) -> int:
|
||||
def _calculate_rows_per_statement(cols: int) -> int:
|
||||
return max(1, MAX_BIND_PARAMS // max(1, cols))
|
||||
|
||||
|
||||
@ -296,7 +296,7 @@ def list_asset_infos_page(
|
||||
)
|
||||
|
||||
if name_contains:
|
||||
escaped, esc = escape_like_prefix(name_contains)
|
||||
escaped, esc = escape_sql_like_string(name_contains)
|
||||
base = base.where(AssetInfo.name.ilike(f"%{escaped}%", escape=esc))
|
||||
|
||||
base = _apply_tag_filters(base, include_tags, exclude_tags)
|
||||
@ -323,7 +323,7 @@ def list_asset_infos_page(
|
||||
.where(_build_visible_owner_clause(owner_id))
|
||||
)
|
||||
if name_contains:
|
||||
escaped, esc = escape_like_prefix(name_contains)
|
||||
escaped, esc = escape_sql_like_string(name_contains)
|
||||
count_stmt = count_stmt.where(AssetInfo.name.ilike(f"%{escaped}%", escape=esc))
|
||||
count_stmt = _apply_tag_filters(count_stmt, include_tags, exclude_tags)
|
||||
count_stmt = _apply_metadata_filter(count_stmt, metadata_filter)
|
||||
@ -401,7 +401,7 @@ def fetch_asset_info_and_asset(
|
||||
return pair[0], pair[1]
|
||||
|
||||
|
||||
def touch_asset_info_by_id(
|
||||
def update_asset_info_access_time(
|
||||
session: Session,
|
||||
asset_info_id: str,
|
||||
ts: datetime | None = None,
|
||||
@ -416,7 +416,7 @@ def touch_asset_info_by_id(
|
||||
session.execute(stmt.values(last_access_time=ts))
|
||||
|
||||
|
||||
def replace_asset_info_metadata_projection(
|
||||
def set_asset_info_metadata(
|
||||
session: Session,
|
||||
asset_info_id: str,
|
||||
user_metadata: dict | None = None,
|
||||
@ -437,7 +437,7 @@ def replace_asset_info_metadata_projection(
|
||||
|
||||
rows: list[AssetInfoMeta] = []
|
||||
for k, v in user_metadata.items():
|
||||
for r in expand_metadata_to_rows(k, v):
|
||||
for r in convert_metadata_to_rows(k, v):
|
||||
rows.append(
|
||||
AssetInfoMeta(
|
||||
asset_info_id=asset_info_id,
|
||||
@ -501,7 +501,7 @@ def bulk_insert_asset_infos_ignore_conflicts(
|
||||
ins = sqlite.insert(AssetInfo).on_conflict_do_nothing(
|
||||
index_elements=[AssetInfo.asset_id, AssetInfo.owner_id, AssetInfo.name]
|
||||
)
|
||||
for chunk in _iter_chunks(rows, _rows_per_stmt(9)):
|
||||
for chunk in _iter_chunks(rows, _calculate_rows_per_statement(9)):
|
||||
session.execute(ins, chunk)
|
||||
|
||||
|
||||
|
||||
@ -7,7 +7,7 @@ from sqlalchemy.dialects import sqlite
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.assets.database.models import Asset, AssetCacheState, AssetInfo
|
||||
from app.assets.helpers import escape_like_prefix
|
||||
from app.assets.helpers import escape_sql_like_string
|
||||
|
||||
MAX_BIND_PARAMS = 800
|
||||
|
||||
@ -27,7 +27,7 @@ __all__ = [
|
||||
]
|
||||
|
||||
|
||||
def _rows_per_stmt(cols: int) -> int:
|
||||
def _calculate_rows_per_statement(cols: int) -> int:
|
||||
return max(1, MAX_BIND_PARAMS // max(1, cols))
|
||||
|
||||
|
||||
@ -115,7 +115,7 @@ def delete_cache_states_outside_prefixes(session: Session, valid_prefixes: list[
|
||||
|
||||
def make_prefix_condition(prefix: str):
|
||||
base = prefix if prefix.endswith(os.sep) else prefix + os.sep
|
||||
escaped, esc = escape_like_prefix(base)
|
||||
escaped, esc = escape_sql_like_string(base)
|
||||
return AssetCacheState.file_path.like(escaped + "%", escape=esc)
|
||||
|
||||
matches_valid_prefix = sa.or_(*[make_prefix_condition(p) for p in valid_prefixes])
|
||||
@ -175,7 +175,7 @@ def get_cache_states_for_prefixes(
|
||||
base = os.path.abspath(p)
|
||||
if not base.endswith(os.sep):
|
||||
base += os.sep
|
||||
escaped, esc = escape_like_prefix(base)
|
||||
escaped, esc = escape_sql_like_string(base)
|
||||
conds.append(AssetCacheState.file_path.like(escaped + "%", escape=esc))
|
||||
|
||||
rows = session.execute(
|
||||
@ -261,7 +261,7 @@ def bulk_insert_cache_states_ignore_conflicts(
|
||||
ins = sqlite.insert(AssetCacheState).on_conflict_do_nothing(
|
||||
index_elements=[AssetCacheState.file_path]
|
||||
)
|
||||
for chunk in _iter_chunks(rows, _rows_per_stmt(3)):
|
||||
for chunk in _iter_chunks(rows, _calculate_rows_per_statement(3)):
|
||||
session.execute(ins, chunk)
|
||||
|
||||
|
||||
|
||||
@ -7,12 +7,12 @@ from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.assets.database.models import AssetInfo, AssetInfoMeta, AssetInfoTag, Tag
|
||||
from app.assets.helpers import escape_like_prefix, normalize_tags, get_utc_now
|
||||
from app.assets.helpers import escape_sql_like_string, normalize_tags, get_utc_now
|
||||
|
||||
MAX_BIND_PARAMS = 800
|
||||
|
||||
|
||||
def _rows_per_stmt(cols: int) -> int:
|
||||
def _calculate_rows_per_statement(cols: int) -> int:
|
||||
return max(1, MAX_BIND_PARAMS // max(1, cols))
|
||||
|
||||
|
||||
@ -262,7 +262,7 @@ def list_tags_with_usage(
|
||||
)
|
||||
|
||||
if prefix:
|
||||
escaped, esc = escape_like_prefix(prefix.strip().lower())
|
||||
escaped, esc = escape_sql_like_string(prefix.strip().lower())
|
||||
q = q.where(Tag.name.like(escaped + "%", escape=esc))
|
||||
|
||||
if not include_zero:
|
||||
@ -275,7 +275,7 @@ def list_tags_with_usage(
|
||||
|
||||
total_q = select(func.count()).select_from(Tag)
|
||||
if prefix:
|
||||
escaped, esc = escape_like_prefix(prefix.strip().lower())
|
||||
escaped, esc = escape_sql_like_string(prefix.strip().lower())
|
||||
total_q = total_q.where(Tag.name.like(escaped + "%", escape=esc))
|
||||
if not include_zero:
|
||||
total_q = total_q.where(
|
||||
|
||||
@ -3,7 +3,7 @@ from datetime import datetime, timezone
|
||||
from typing import Literal, Sequence
|
||||
|
||||
|
||||
def pick_best_live_path(states: Sequence) -> str:
|
||||
def select_best_live_path(states: Sequence) -> str:
|
||||
"""
|
||||
Return the best on-disk path among cache states:
|
||||
1) Prefer a path that exists with needs_verify == False (already verified).
|
||||
@ -22,7 +22,7 @@ def pick_best_live_path(states: Sequence) -> str:
|
||||
ALLOWED_ROOTS: tuple[Literal["models", "input", "output"], ...] = ("models", "input", "output")
|
||||
|
||||
|
||||
def escape_like_prefix(s: str, escape: str = "!") -> tuple[str, str]:
|
||||
def escape_sql_like_string(s: str, escape: str = "!") -> tuple[str, str]:
|
||||
"""Escapes %, _ and the escape char itself in a LIKE prefix.
|
||||
Returns (escaped_prefix, escape_char). Caller should append '%' and pass escape=escape_char to .like().
|
||||
"""
|
||||
|
||||
@ -24,7 +24,7 @@ from app.assets.api.schemas_in import (
|
||||
HashMismatchError,
|
||||
ParsedUpload,
|
||||
)
|
||||
from app.assets.api.upload import _cleanup_temp
|
||||
from app.assets.api.upload import _delete_temp_file_if_exists
|
||||
from app.assets.database.queries import (
|
||||
asset_exists_by_hash,
|
||||
fetch_asset_info_and_asset,
|
||||
@ -32,11 +32,11 @@ from app.assets.database.queries import (
|
||||
get_asset_tags,
|
||||
list_asset_infos_page,
|
||||
list_cache_states_by_asset_id,
|
||||
touch_asset_info_by_id,
|
||||
update_asset_info_access_time,
|
||||
)
|
||||
from app.assets.helpers import pick_best_live_path
|
||||
from app.assets.helpers import select_best_live_path
|
||||
from app.assets.services.path_utils import (
|
||||
ensure_within_base,
|
||||
validate_path_within_base,
|
||||
resolve_destination_from_tags,
|
||||
)
|
||||
from app.assets.services import (
|
||||
@ -168,11 +168,11 @@ def resolve_asset_content_for_download(
|
||||
|
||||
info, asset = pair
|
||||
states = list_cache_states_by_asset_id(session, asset_id=asset.id)
|
||||
abs_path = pick_best_live_path(states)
|
||||
abs_path = select_best_live_path(states)
|
||||
if not abs_path:
|
||||
raise FileNotFoundError
|
||||
|
||||
touch_asset_info_by_id(session, asset_info_id=asset_info_id)
|
||||
update_asset_info_access_time(session, asset_info_id=asset_info_id)
|
||||
session.commit()
|
||||
|
||||
ctype = asset.mime_type or mimetypes.guess_type(info.name or abs_path)[0] or "application/octet-stream"
|
||||
@ -242,7 +242,7 @@ def upload_asset_from_temp_path(
|
||||
ext = _ext if 0 < len(_ext) <= 16 else ""
|
||||
hashed_basename = f"{digest}{ext}"
|
||||
dest_abs = os.path.abspath(os.path.join(dest_dir, hashed_basename))
|
||||
ensure_within_base(dest_abs, base_dir)
|
||||
validate_path_within_base(dest_abs, base_dir)
|
||||
|
||||
content_type = (
|
||||
mimetypes.guess_type(os.path.basename(src_for_ext), strict=False)[0]
|
||||
@ -325,13 +325,13 @@ def process_upload(
|
||||
"hash": parsed.provided_hash,
|
||||
})
|
||||
except ValidationError as ve:
|
||||
_cleanup_temp(parsed.tmp_path)
|
||||
_delete_temp_file_if_exists(parsed.tmp_path)
|
||||
raise AssetValidationError("INVALID_BODY", f"Validation failed: {ve.json()}")
|
||||
|
||||
# Validate models category against configured folders
|
||||
if spec.tags and spec.tags[0] == "models":
|
||||
if len(spec.tags) < 2 or spec.tags[1] not in folder_paths.folder_names_and_paths:
|
||||
_cleanup_temp(parsed.tmp_path)
|
||||
_delete_temp_file_if_exists(parsed.tmp_path)
|
||||
category = spec.tags[1] if len(spec.tags) >= 2 else ""
|
||||
raise AssetValidationError("INVALID_BODY", f"unknown models category '{category}'")
|
||||
|
||||
@ -349,7 +349,7 @@ def process_upload(
|
||||
raise AssetNotFoundError(f"Asset content {spec.hash} does not exist")
|
||||
|
||||
# Drain temp if we accidentally saved (e.g., hash field came after file)
|
||||
_cleanup_temp(parsed.tmp_path)
|
||||
_delete_temp_file_if_exists(parsed.tmp_path)
|
||||
return result
|
||||
|
||||
# Otherwise, we must have a temp file path to ingest
|
||||
@ -365,13 +365,13 @@ def process_upload(
|
||||
expected_asset_hash=spec.hash,
|
||||
)
|
||||
except ValueError as e:
|
||||
_cleanup_temp(parsed.tmp_path)
|
||||
_delete_temp_file_if_exists(parsed.tmp_path)
|
||||
msg = str(e)
|
||||
if "HASH_MISMATCH" in msg or msg.strip().upper() == "HASH_MISMATCH":
|
||||
raise HashMismatchError("Uploaded file hash does not match provided hash.")
|
||||
raise AssetValidationError("BAD_REQUEST", "Invalid inputs.")
|
||||
except Exception:
|
||||
_cleanup_temp(parsed.tmp_path)
|
||||
_delete_temp_file_if_exists(parsed.tmp_path)
|
||||
raise
|
||||
|
||||
|
||||
|
||||
@ -13,7 +13,7 @@ from typing import Sequence
|
||||
|
||||
from app.assets.database.models import Asset
|
||||
from app.database.db import create_session
|
||||
from app.assets.helpers import pick_best_live_path, get_utc_now
|
||||
from app.assets.helpers import select_best_live_path, get_utc_now
|
||||
from app.assets.services.path_utils import compute_relative_filename
|
||||
from app.assets.database.queries import (
|
||||
asset_info_exists_for_asset_id,
|
||||
@ -21,7 +21,7 @@ from app.assets.database.queries import (
|
||||
fetch_asset_info_asset_and_tags,
|
||||
get_asset_info_by_id,
|
||||
list_cache_states_by_asset_id,
|
||||
replace_asset_info_metadata_projection,
|
||||
set_asset_info_metadata,
|
||||
set_asset_info_preview,
|
||||
set_asset_info_tags,
|
||||
)
|
||||
@ -83,7 +83,7 @@ def update_asset_metadata(
|
||||
new_meta = dict(user_metadata)
|
||||
if computed_filename:
|
||||
new_meta["filename"] = computed_filename
|
||||
replace_asset_info_metadata_projection(
|
||||
set_asset_info_metadata(
|
||||
session, asset_info_id=asset_info_id, user_metadata=new_meta
|
||||
)
|
||||
touched = True
|
||||
@ -93,7 +93,7 @@ def update_asset_metadata(
|
||||
if current_meta.get("filename") != computed_filename:
|
||||
new_meta = dict(current_meta)
|
||||
new_meta["filename"] = computed_filename
|
||||
replace_asset_info_metadata_projection(
|
||||
set_asset_info_metadata(
|
||||
session, asset_info_id=asset_info_id, user_metadata=new_meta
|
||||
)
|
||||
touched = True
|
||||
@ -217,5 +217,5 @@ def set_asset_preview(
|
||||
|
||||
def _compute_filename_for_asset(session, asset_id: str) -> str | None:
|
||||
"""Compute the relative filename for an asset from its cache states."""
|
||||
primary_path = pick_best_live_path(list_cache_states_by_asset_id(session, asset_id=asset_id))
|
||||
primary_path = select_best_live_path(list_cache_states_by_asset_id(session, asset_id=asset_id))
|
||||
return compute_relative_filename(primary_path) if primary_path else None
|
||||
|
||||
@ -13,14 +13,14 @@ from sqlalchemy import select
|
||||
|
||||
from app.assets.database.models import Asset, Tag
|
||||
from app.database.db import create_session
|
||||
from app.assets.helpers import normalize_tags, pick_best_live_path
|
||||
from app.assets.helpers import normalize_tags, select_best_live_path
|
||||
from app.assets.services.path_utils import compute_relative_filename
|
||||
from app.assets.database.queries import (
|
||||
get_asset_by_hash,
|
||||
get_or_create_asset_info,
|
||||
list_cache_states_by_asset_id,
|
||||
remove_missing_tag_for_asset_id,
|
||||
replace_asset_info_metadata_projection,
|
||||
set_asset_info_metadata,
|
||||
set_asset_info_tags,
|
||||
update_asset_info_timestamps,
|
||||
upsert_asset,
|
||||
@ -180,7 +180,7 @@ def register_existing_asset(
|
||||
new_meta["filename"] = computed_filename
|
||||
|
||||
if new_meta:
|
||||
replace_asset_info_metadata_projection(
|
||||
set_asset_info_metadata(
|
||||
session,
|
||||
asset_info_id=info.id,
|
||||
user_metadata=new_meta,
|
||||
@ -217,7 +217,7 @@ def _validate_tags_exist(session, tags: list[str]) -> None:
|
||||
|
||||
def _compute_filename_for_asset(session, asset_id: str) -> str | None:
|
||||
"""Compute the relative filename for an asset from its cache states."""
|
||||
primary_path = pick_best_live_path(list_cache_states_by_asset_id(session, asset_id=asset_id))
|
||||
primary_path = select_best_live_path(list_cache_states_by_asset_id(session, asset_id=asset_id))
|
||||
return compute_relative_filename(primary_path) if primary_path else None
|
||||
|
||||
|
||||
@ -240,7 +240,7 @@ def _update_metadata_with_filename(
|
||||
new_meta["filename"] = computed_filename
|
||||
|
||||
if new_meta != current_meta:
|
||||
replace_asset_info_metadata_projection(
|
||||
set_asset_info_metadata(
|
||||
session,
|
||||
asset_info_id=asset_info_id,
|
||||
user_metadata=new_meta,
|
||||
|
||||
@ -48,7 +48,7 @@ def resolve_destination_from_tags(tags: list[str]) -> tuple[str, list[str]]:
|
||||
return base_dir, raw_subdirs if raw_subdirs else []
|
||||
|
||||
|
||||
def ensure_within_base(candidate: str, base: str) -> None:
|
||||
def validate_path_within_base(candidate: str, base: str) -> None:
|
||||
cand_abs = os.path.abspath(candidate)
|
||||
base_abs = os.path.abspath(base)
|
||||
try:
|
||||
@ -69,7 +69,7 @@ def compute_relative_filename(file_path: str) -> str | None:
|
||||
NOTE: this is a temporary helper, used only for initializing metadata["filename"] field.
|
||||
"""
|
||||
try:
|
||||
root_category, rel_path = get_relative_to_root_category_path_of_asset(file_path)
|
||||
root_category, rel_path = get_asset_category_and_relative_path(file_path)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
@ -85,7 +85,7 @@ def compute_relative_filename(file_path: str) -> str | None:
|
||||
return "/".join(parts) # input/output: keep all parts
|
||||
|
||||
|
||||
def get_relative_to_root_category_path_of_asset(file_path: str) -> tuple[Literal["input", "output", "models"], str]:
|
||||
def get_asset_category_and_relative_path(file_path: str) -> tuple[Literal["input", "output", "models"], str]:
|
||||
"""Given an absolute or relative file path, determine which root category the path belongs to:
|
||||
- 'input' if the file resides under `folder_paths.get_input_directory()`
|
||||
- 'output' if the file resides under `folder_paths.get_output_directory()`
|
||||
@ -143,7 +143,7 @@ def get_name_and_tags_from_asset_path(file_path: str) -> tuple[str, list[str]]:
|
||||
"""Return a tuple (name, tags) derived from a filesystem path.
|
||||
|
||||
Semantics:
|
||||
- Root category is determined by `get_relative_to_root_category_path_of_asset`.
|
||||
- Root category is determined by `get_asset_category_and_relative_path`.
|
||||
- The returned `name` is the base filename with extension from the relative path.
|
||||
- The returned `tags` are:
|
||||
[root_category] + parent folders of the relative path (in order)
|
||||
@ -155,7 +155,7 @@ def get_name_and_tags_from_asset_path(file_path: str) -> tuple[str, list[str]]:
|
||||
Raises:
|
||||
ValueError: if the path does not belong to input, output, or configured model bases.
|
||||
"""
|
||||
root_category, some_path = get_relative_to_root_category_path_of_asset(file_path)
|
||||
root_category, some_path = get_asset_category_and_relative_path(file_path)
|
||||
p = Path(some_path)
|
||||
parent_parts = [part for part in p.parent.parts if part not in (".", "..", p.anchor)]
|
||||
return p.name, list(dict.fromkeys(normalize_tags([root_category, *parent_parts])))
|
||||
|
||||
@ -38,7 +38,7 @@ from app.database.db import create_session, dependencies_available
|
||||
RootType = Literal["models", "input", "output"]
|
||||
|
||||
|
||||
def check_asset_file_fast(
|
||||
def verify_asset_file_unchanged(
|
||||
mtime_db: int | None,
|
||||
size_db: int | None,
|
||||
stat_result: os.stat_result,
|
||||
@ -54,7 +54,7 @@ def check_asset_file_fast(
|
||||
return True
|
||||
|
||||
|
||||
def list_tree(base_dir: str) -> list[str]:
|
||||
def list_files_recursively(base_dir: str) -> list[str]:
|
||||
out: list[str] = []
|
||||
base_abs = os.path.abspath(base_dir)
|
||||
if not os.path.isdir(base_abs):
|
||||
@ -99,7 +99,7 @@ def collect_models_files() -> list[str]:
|
||||
return out
|
||||
|
||||
|
||||
def _seed_from_paths_batch(
|
||||
def _batch_insert_assets_from_paths(
|
||||
session: Session,
|
||||
specs: list[dict],
|
||||
owner_id: str = "",
|
||||
@ -260,7 +260,7 @@ def prune_orphaned_assets(session, valid_prefixes: list[str]) -> int:
|
||||
return delete_assets_by_ids(session, orphan_ids)
|
||||
|
||||
|
||||
def reconcile_cache_states_for_root(
|
||||
def sync_cache_states_with_filesystem(
|
||||
session,
|
||||
root: RootType,
|
||||
collect_existing_paths: bool = False,
|
||||
@ -299,7 +299,7 @@ def reconcile_cache_states_for_root(
|
||||
fast_ok = False
|
||||
try:
|
||||
exists = True
|
||||
fast_ok = check_asset_file_fast(
|
||||
fast_ok = verify_asset_file_unchanged(
|
||||
mtime_db=row.mtime_ns,
|
||||
size_db=acc["size_db"],
|
||||
stat_result=os.stat(row.file_path, follow_symlinks=True),
|
||||
@ -385,7 +385,7 @@ def seed_assets(roots: tuple[RootType, ...], enable_logging: bool = False) -> No
|
||||
for r in roots:
|
||||
try:
|
||||
with create_session() as sess:
|
||||
survivors = reconcile_cache_states_for_root(
|
||||
survivors = sync_cache_states_with_filesystem(
|
||||
sess,
|
||||
r,
|
||||
collect_existing_paths=True,
|
||||
@ -410,9 +410,9 @@ def seed_assets(roots: tuple[RootType, ...], enable_logging: bool = False) -> No
|
||||
if "models" in roots:
|
||||
paths.extend(collect_models_files())
|
||||
if "input" in roots:
|
||||
paths.extend(list_tree(folder_paths.get_input_directory()))
|
||||
paths.extend(list_files_recursively(folder_paths.get_input_directory()))
|
||||
if "output" in roots:
|
||||
paths.extend(list_tree(folder_paths.get_output_directory()))
|
||||
paths.extend(list_files_recursively(folder_paths.get_output_directory()))
|
||||
|
||||
specs: list[dict] = []
|
||||
tag_pool: set[str] = set()
|
||||
@ -445,7 +445,7 @@ def seed_assets(roots: tuple[RootType, ...], enable_logging: bool = False) -> No
|
||||
with create_session() as sess:
|
||||
if tag_pool:
|
||||
ensure_tags_exist(sess, tag_pool, tag_type="user")
|
||||
result = _seed_from_paths_batch(sess, specs=specs, owner_id="")
|
||||
result = _batch_insert_assets_from_paths(sess, specs=specs, owner_id="")
|
||||
created += result["inserted_infos"]
|
||||
sess.commit()
|
||||
|
||||
|
||||
@ -13,8 +13,8 @@ from app.assets.database.queries import (
|
||||
list_asset_infos_page,
|
||||
fetch_asset_info_asset_and_tags,
|
||||
fetch_asset_info_and_asset,
|
||||
touch_asset_info_by_id,
|
||||
replace_asset_info_metadata_projection,
|
||||
update_asset_info_access_time,
|
||||
set_asset_info_metadata,
|
||||
delete_asset_info_by_id,
|
||||
set_asset_info_preview,
|
||||
bulk_insert_asset_infos_ignore_conflicts,
|
||||
@ -196,7 +196,7 @@ class TestFetchAssetInfoAndAsset:
|
||||
assert ret_asset.id == asset.id
|
||||
|
||||
|
||||
class TestTouchAssetInfoById:
|
||||
class TestUpdateAssetInfoAccessTime:
|
||||
def test_updates_last_access_time(self, session: Session):
|
||||
asset = _make_asset(session, "hash1")
|
||||
info = _make_asset_info(session, asset)
|
||||
@ -206,7 +206,7 @@ class TestTouchAssetInfoById:
|
||||
import time
|
||||
time.sleep(0.01)
|
||||
|
||||
touch_asset_info_by_id(session, asset_info_id=info.id)
|
||||
update_asset_info_access_time(session, asset_info_id=info.id)
|
||||
session.commit()
|
||||
|
||||
session.refresh(info)
|
||||
@ -355,13 +355,13 @@ class TestUpdateAssetInfoTimestamps:
|
||||
assert info.preview_id == preview_asset.id
|
||||
|
||||
|
||||
class TestReplaceAssetInfoMetadataProjection:
|
||||
class TestSetAssetInfoMetadata:
|
||||
def test_sets_metadata(self, session: Session):
|
||||
asset = _make_asset(session, "hash1")
|
||||
info = _make_asset_info(session, asset)
|
||||
session.commit()
|
||||
|
||||
replace_asset_info_metadata_projection(
|
||||
set_asset_info_metadata(
|
||||
session, asset_info_id=info.id, user_metadata={"key": "value"}
|
||||
)
|
||||
session.commit()
|
||||
@ -379,12 +379,12 @@ class TestReplaceAssetInfoMetadataProjection:
|
||||
info = _make_asset_info(session, asset)
|
||||
session.commit()
|
||||
|
||||
replace_asset_info_metadata_projection(
|
||||
set_asset_info_metadata(
|
||||
session, asset_info_id=info.id, user_metadata={"old": "data"}
|
||||
)
|
||||
session.commit()
|
||||
|
||||
replace_asset_info_metadata_projection(
|
||||
set_asset_info_metadata(
|
||||
session, asset_info_id=info.id, user_metadata={"new": "data"}
|
||||
)
|
||||
session.commit()
|
||||
@ -398,12 +398,12 @@ class TestReplaceAssetInfoMetadataProjection:
|
||||
info = _make_asset_info(session, asset)
|
||||
session.commit()
|
||||
|
||||
replace_asset_info_metadata_projection(
|
||||
set_asset_info_metadata(
|
||||
session, asset_info_id=info.id, user_metadata={"key": "value"}
|
||||
)
|
||||
session.commit()
|
||||
|
||||
replace_asset_info_metadata_projection(
|
||||
set_asset_info_metadata(
|
||||
session, asset_info_id=info.id, user_metadata={}
|
||||
)
|
||||
session.commit()
|
||||
@ -415,7 +415,7 @@ class TestReplaceAssetInfoMetadataProjection:
|
||||
|
||||
def test_raises_for_nonexistent(self, session: Session):
|
||||
with pytest.raises(ValueError, match="not found"):
|
||||
replace_asset_info_metadata_projection(
|
||||
set_asset_info_metadata(
|
||||
session, asset_info_id="nonexistent", user_metadata={"key": "value"}
|
||||
)
|
||||
|
||||
|
||||
@ -15,7 +15,7 @@ from app.assets.database.queries import (
|
||||
bulk_insert_cache_states_ignore_conflicts,
|
||||
get_cache_states_by_paths_and_asset_ids,
|
||||
)
|
||||
from app.assets.helpers import pick_best_live_path, get_utc_now
|
||||
from app.assets.helpers import select_best_live_path, get_utc_now
|
||||
|
||||
|
||||
def _make_asset(session: Session, hash_val: str | None = None, size: int = 1024) -> Asset:
|
||||
@ -71,9 +71,9 @@ class TestListCacheStatesByAssetId:
|
||||
assert paths == ["/path/asset1.bin"]
|
||||
|
||||
|
||||
class TestPickBestLivePath:
|
||||
class TestSelectBestLivePath:
|
||||
def test_returns_empty_for_empty_list(self):
|
||||
result = pick_best_live_path([])
|
||||
result = select_best_live_path([])
|
||||
assert result == ""
|
||||
|
||||
def test_returns_empty_when_no_files_exist(self, session: Session):
|
||||
@ -81,7 +81,7 @@ class TestPickBestLivePath:
|
||||
state = _make_cache_state(session, asset, "/nonexistent/path.bin")
|
||||
session.commit()
|
||||
|
||||
result = pick_best_live_path([state])
|
||||
result = select_best_live_path([state])
|
||||
assert result == ""
|
||||
|
||||
def test_prefers_verified_path(self, session: Session, tmp_path):
|
||||
@ -103,7 +103,7 @@ class TestPickBestLivePath:
|
||||
session.commit()
|
||||
|
||||
states = [state_unverified, state_verified]
|
||||
result = pick_best_live_path(states)
|
||||
result = select_best_live_path(states)
|
||||
assert result == str(verified_file)
|
||||
|
||||
def test_falls_back_to_existing_unverified(self, session: Session, tmp_path):
|
||||
@ -116,11 +116,11 @@ class TestPickBestLivePath:
|
||||
state = _make_cache_state(session, asset, str(existing_file), needs_verify=True)
|
||||
session.commit()
|
||||
|
||||
result = pick_best_live_path([state])
|
||||
result = select_best_live_path([state])
|
||||
assert result == str(existing_file)
|
||||
|
||||
|
||||
class TestPickBestLivePathWithMocking:
|
||||
class TestSelectBestLivePathWithMocking:
|
||||
def test_handles_missing_file_path_attr(self):
|
||||
"""Gracefully handle states with None file_path."""
|
||||
|
||||
@ -128,7 +128,7 @@ class TestPickBestLivePathWithMocking:
|
||||
file_path = None
|
||||
needs_verify = False
|
||||
|
||||
result = pick_best_live_path([MockState()])
|
||||
result = select_best_live_path([MockState()])
|
||||
assert result == ""
|
||||
|
||||
|
||||
|
||||
@ -3,7 +3,7 @@ from sqlalchemy.orm import Session
|
||||
|
||||
from app.assets.database.models import Asset, AssetInfo, AssetInfoMeta
|
||||
from app.assets.database.queries import list_asset_infos_page
|
||||
from app.assets.database.queries.asset_info import expand_metadata_to_rows
|
||||
from app.assets.database.queries.asset_info import convert_metadata_to_rows
|
||||
from app.assets.helpers import get_utc_now
|
||||
|
||||
|
||||
@ -35,7 +35,7 @@ def _make_asset_info(
|
||||
|
||||
if metadata:
|
||||
for key, val in metadata.items():
|
||||
for row in expand_metadata_to_rows(key, val):
|
||||
for row in convert_metadata_to_rows(key, val):
|
||||
meta_row = AssetInfoMeta(
|
||||
asset_info_id=info.id,
|
||||
key=row["key"],
|
||||
|
||||
Loading…
Reference in New Issue
Block a user