mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2026-02-07 03:52:32 +08:00
chore: remove obvious/self-documenting comments from assets package
Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
parent
80d99e7b63
commit
37ecc5b663
@ -91,9 +91,8 @@ async def parse_multipart_upload(
|
|||||||
file_written += len(chunk)
|
file_written += len(chunk)
|
||||||
except Exception:
|
except Exception:
|
||||||
raise UploadError(500, "UPLOAD_IO_ERROR", "Failed to receive uploaded file.")
|
raise UploadError(500, "UPLOAD_IO_ERROR", "Failed to receive uploaded file.")
|
||||||
continue # Do not create temp file; we will create AssetInfo from the existing content
|
continue
|
||||||
|
|
||||||
# Otherwise, store to temp for hashing/ingest
|
|
||||||
uploads_root = os.path.join(folder_paths.get_temp_directory(), "uploads")
|
uploads_root = os.path.join(folder_paths.get_temp_directory(), "uploads")
|
||||||
unique_dir = os.path.join(uploads_root, uuid.uuid4().hex)
|
unique_dir = os.path.join(uploads_root, uuid.uuid4().hex)
|
||||||
os.makedirs(unique_dir, exist_ok=True)
|
os.makedirs(unique_dir, exist_ok=True)
|
||||||
@ -118,7 +117,6 @@ async def parse_multipart_upload(
|
|||||||
elif fname == "user_metadata":
|
elif fname == "user_metadata":
|
||||||
user_metadata_raw = (await field.text()) or None
|
user_metadata_raw = (await field.text()) or None
|
||||||
|
|
||||||
# Validate we have either a file or a known hash
|
|
||||||
if not file_present and not (provided_hash and provided_hash_exists):
|
if not file_present and not (provided_hash and provided_hash_exists):
|
||||||
raise UploadError(400, "MISSING_FILE", "Form must include a 'file' part or a known 'hash'.")
|
raise UploadError(400, "MISSING_FILE", "Form must include a 'file' part or a known 'hash'.")
|
||||||
|
|
||||||
|
|||||||
@ -49,12 +49,10 @@ from app.assets.database.queries.tags import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
# asset.py
|
|
||||||
"asset_exists_by_hash",
|
"asset_exists_by_hash",
|
||||||
"get_asset_by_hash",
|
"get_asset_by_hash",
|
||||||
"upsert_asset",
|
"upsert_asset",
|
||||||
"bulk_insert_assets",
|
"bulk_insert_assets",
|
||||||
# asset_info.py
|
|
||||||
"asset_info_exists_for_asset_id",
|
"asset_info_exists_for_asset_id",
|
||||||
"get_asset_info_by_id",
|
"get_asset_info_by_id",
|
||||||
"insert_asset_info",
|
"insert_asset_info",
|
||||||
@ -71,7 +69,6 @@ __all__ = [
|
|||||||
"set_asset_info_preview",
|
"set_asset_info_preview",
|
||||||
"bulk_insert_asset_infos_ignore_conflicts",
|
"bulk_insert_asset_infos_ignore_conflicts",
|
||||||
"get_asset_info_ids_by_ids",
|
"get_asset_info_ids_by_ids",
|
||||||
# cache_state.py
|
|
||||||
"CacheStateRow",
|
"CacheStateRow",
|
||||||
"list_cache_states_by_asset_id",
|
"list_cache_states_by_asset_id",
|
||||||
"upsert_cache_state",
|
"upsert_cache_state",
|
||||||
@ -84,7 +81,6 @@ __all__ = [
|
|||||||
"delete_orphaned_seed_asset",
|
"delete_orphaned_seed_asset",
|
||||||
"bulk_insert_cache_states_ignore_conflicts",
|
"bulk_insert_cache_states_ignore_conflicts",
|
||||||
"get_cache_states_by_paths_and_asset_ids",
|
"get_cache_states_by_paths_and_asset_ids",
|
||||||
# tags.py
|
|
||||||
"ensure_tags_exist",
|
"ensure_tags_exist",
|
||||||
"get_asset_tags",
|
"get_asset_tags",
|
||||||
"set_asset_info_tags",
|
"set_asset_info_tags",
|
||||||
|
|||||||
@ -193,7 +193,6 @@ def upload_asset_from_temp_path(
|
|||||||
if expected_asset_hash and asset_hash != expected_asset_hash.strip().lower():
|
if expected_asset_hash and asset_hash != expected_asset_hash.strip().lower():
|
||||||
raise ValueError("HASH_MISMATCH")
|
raise ValueError("HASH_MISMATCH")
|
||||||
|
|
||||||
# Check if asset already exists by hash
|
|
||||||
with create_session() as session:
|
with create_session() as session:
|
||||||
existing = get_asset_by_hash(session, asset_hash=asset_hash)
|
existing = get_asset_by_hash(session, asset_hash=asset_hash)
|
||||||
|
|
||||||
@ -228,7 +227,6 @@ def upload_asset_from_temp_path(
|
|||||||
created_new=False,
|
created_new=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
# New asset - move file to destination
|
|
||||||
base_dir, subdirs = resolve_destination_from_tags(spec.tags)
|
base_dir, subdirs = resolve_destination_from_tags(spec.tags)
|
||||||
dest_dir = os.path.join(base_dir, *subdirs) if subdirs else base_dir
|
dest_dir = os.path.join(base_dir, *subdirs) if subdirs else base_dir
|
||||||
os.makedirs(dest_dir, exist_ok=True)
|
os.makedirs(dest_dir, exist_ok=True)
|
||||||
@ -324,7 +322,6 @@ def process_upload(
|
|||||||
_delete_temp_file_if_exists(parsed.tmp_path)
|
_delete_temp_file_if_exists(parsed.tmp_path)
|
||||||
raise AssetValidationError("INVALID_BODY", f"Validation failed: {ve.json()}")
|
raise AssetValidationError("INVALID_BODY", f"Validation failed: {ve.json()}")
|
||||||
|
|
||||||
# Validate models category against configured folders
|
|
||||||
if spec.tags and spec.tags[0] == "models":
|
if spec.tags and spec.tags[0] == "models":
|
||||||
if len(spec.tags) < 2 or spec.tags[1] not in folder_paths.folder_names_and_paths:
|
if len(spec.tags) < 2 or spec.tags[1] not in folder_paths.folder_names_and_paths:
|
||||||
_delete_temp_file_if_exists(parsed.tmp_path)
|
_delete_temp_file_if_exists(parsed.tmp_path)
|
||||||
|
|||||||
@ -167,27 +167,20 @@ def _batch_insert_assets_from_paths(
|
|||||||
"_filename": sp["fname"],
|
"_filename": sp["fname"],
|
||||||
}
|
}
|
||||||
|
|
||||||
# 1. Insert all seed Assets (hash=NULL)
|
|
||||||
bulk_insert_assets(session, asset_rows)
|
bulk_insert_assets(session, asset_rows)
|
||||||
|
|
||||||
# 2. Try to claim cache states (file_path unique)
|
|
||||||
bulk_insert_cache_states_ignore_conflicts(session, state_rows)
|
bulk_insert_cache_states_ignore_conflicts(session, state_rows)
|
||||||
|
|
||||||
# 3. Query to find which paths we won
|
|
||||||
winners_by_path = get_cache_states_by_paths_and_asset_ids(session, path_to_asset)
|
winners_by_path = get_cache_states_by_paths_and_asset_ids(session, path_to_asset)
|
||||||
|
|
||||||
all_paths_set = set(path_list)
|
all_paths_set = set(path_list)
|
||||||
losers_by_path = all_paths_set - winners_by_path
|
losers_by_path = all_paths_set - winners_by_path
|
||||||
lost_assets = [path_to_asset[p] for p in losers_by_path]
|
lost_assets = [path_to_asset[p] for p in losers_by_path]
|
||||||
|
|
||||||
# 4. Delete Assets for losers
|
|
||||||
if lost_assets:
|
if lost_assets:
|
||||||
delete_assets_by_ids(session, lost_assets)
|
delete_assets_by_ids(session, lost_assets)
|
||||||
|
|
||||||
if not winners_by_path:
|
if not winners_by_path:
|
||||||
return {"inserted_infos": 0, "won_states": 0, "lost_states": len(losers_by_path)}
|
return {"inserted_infos": 0, "won_states": 0, "lost_states": len(losers_by_path)}
|
||||||
|
|
||||||
# 5. Insert AssetInfo for winners
|
|
||||||
winner_info_rows = [asset_to_info[path_to_asset[p]] for p in winners_by_path]
|
winner_info_rows = [asset_to_info[path_to_asset[p]] for p in winners_by_path]
|
||||||
db_info_rows = [
|
db_info_rows = [
|
||||||
{
|
{
|
||||||
@ -205,11 +198,9 @@ def _batch_insert_assets_from_paths(
|
|||||||
]
|
]
|
||||||
bulk_insert_asset_infos_ignore_conflicts(session, db_info_rows)
|
bulk_insert_asset_infos_ignore_conflicts(session, db_info_rows)
|
||||||
|
|
||||||
# 6. Find which info rows were actually inserted
|
|
||||||
all_info_ids = [row["id"] for row in winner_info_rows]
|
all_info_ids = [row["id"] for row in winner_info_rows]
|
||||||
inserted_info_ids = get_asset_info_ids_by_ids(session, all_info_ids)
|
inserted_info_ids = get_asset_info_ids_by_ids(session, all_info_ids)
|
||||||
|
|
||||||
# 7. Build and insert tag + meta rows
|
|
||||||
tag_rows: list[dict] = []
|
tag_rows: list[dict] = []
|
||||||
meta_rows: list[dict] = []
|
meta_rows: list[dict] = []
|
||||||
if inserted_info_ids:
|
if inserted_info_ids:
|
||||||
@ -468,18 +459,15 @@ def seed_assets(roots: tuple[RootType, ...], enable_logging: bool = False) -> No
|
|||||||
|
|
||||||
t_start = time.perf_counter()
|
t_start = time.perf_counter()
|
||||||
|
|
||||||
# Sync existing cache states
|
|
||||||
existing_paths: set[str] = set()
|
existing_paths: set[str] = set()
|
||||||
for r in roots:
|
for r in roots:
|
||||||
existing_paths.update(_sync_root_safely(r))
|
existing_paths.update(_sync_root_safely(r))
|
||||||
|
|
||||||
# Prune orphaned assets
|
|
||||||
all_prefixes = [
|
all_prefixes = [
|
||||||
os.path.abspath(p) for r in roots for p in get_prefixes_for_root(r)
|
os.path.abspath(p) for r in roots for p in get_prefixes_for_root(r)
|
||||||
]
|
]
|
||||||
orphans_pruned = _prune_orphans_safely(all_prefixes)
|
orphans_pruned = _prune_orphans_safely(all_prefixes)
|
||||||
|
|
||||||
# Collect and process paths
|
|
||||||
paths = _collect_paths_for_roots(roots)
|
paths = _collect_paths_for_roots(roots)
|
||||||
specs, tag_pool, skipped_existing = _build_asset_specs(paths, existing_paths)
|
specs, tag_pool, skipped_existing = _build_asset_specs(paths, existing_paths)
|
||||||
created = _insert_asset_specs(specs, tag_pool)
|
created = _insert_asset_specs(specs, tag_pool)
|
||||||
|
|||||||
@ -15,15 +15,12 @@ from app.assets.services.tagging import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
# ingest.py
|
|
||||||
"ingest_file_from_path",
|
"ingest_file_from_path",
|
||||||
"register_existing_asset",
|
"register_existing_asset",
|
||||||
# asset_management.py
|
|
||||||
"get_asset_detail",
|
"get_asset_detail",
|
||||||
"update_asset_metadata",
|
"update_asset_metadata",
|
||||||
"delete_asset_reference",
|
"delete_asset_reference",
|
||||||
"set_asset_preview",
|
"set_asset_preview",
|
||||||
# tagging.py
|
|
||||||
"apply_tags",
|
"apply_tags",
|
||||||
"remove_tags",
|
"remove_tags",
|
||||||
"list_tags",
|
"list_tags",
|
||||||
|
|||||||
@ -74,10 +74,8 @@ def update_asset_metadata(
|
|||||||
update_asset_info_name(session, asset_info_id=asset_info_id, name=name)
|
update_asset_info_name(session, asset_info_id=asset_info_id, name=name)
|
||||||
touched = True
|
touched = True
|
||||||
|
|
||||||
# Compute filename from best live path
|
|
||||||
computed_filename = _compute_filename_for_asset(session, info.asset_id)
|
computed_filename = _compute_filename_for_asset(session, info.asset_id)
|
||||||
|
|
||||||
# Determine if metadata needs updating
|
|
||||||
new_meta: dict | None = None
|
new_meta: dict | None = None
|
||||||
if user_metadata is not None:
|
if user_metadata is not None:
|
||||||
new_meta = dict(user_metadata)
|
new_meta = dict(user_metadata)
|
||||||
@ -106,7 +104,6 @@ def update_asset_metadata(
|
|||||||
if touched and user_metadata is None:
|
if touched and user_metadata is None:
|
||||||
update_asset_info_updated_at(session, asset_info_id=asset_info_id)
|
update_asset_info_updated_at(session, asset_info_id=asset_info_id)
|
||||||
|
|
||||||
# Fetch updated info with tags
|
|
||||||
result = fetch_asset_info_asset_and_tags(
|
result = fetch_asset_info_asset_and_tags(
|
||||||
session,
|
session,
|
||||||
asset_info_id=asset_info_id,
|
asset_info_id=asset_info_id,
|
||||||
@ -116,7 +113,6 @@ def update_asset_metadata(
|
|||||||
raise RuntimeError("State changed during update")
|
raise RuntimeError("State changed during update")
|
||||||
|
|
||||||
info, asset, tag_list = result
|
info, asset, tag_list = result
|
||||||
# Extract plain data before session closes
|
|
||||||
detail = AssetDetailResult(
|
detail = AssetDetailResult(
|
||||||
info=extract_info_data(info),
|
info=extract_info_data(info),
|
||||||
asset=extract_asset_data(asset),
|
asset=extract_asset_data(asset),
|
||||||
@ -203,7 +199,6 @@ def set_asset_preview(
|
|||||||
raise RuntimeError("State changed during preview update")
|
raise RuntimeError("State changed during preview update")
|
||||||
|
|
||||||
info, asset, tags = result
|
info, asset, tags = result
|
||||||
# Extract plain data before session closes
|
|
||||||
detail = AssetDetailResult(
|
detail = AssetDetailResult(
|
||||||
info=extract_info_data(info),
|
info=extract_info_data(info),
|
||||||
asset=extract_asset_data(asset),
|
asset=extract_asset_data(asset),
|
||||||
|
|||||||
@ -73,7 +73,6 @@ def _hash_file_obj(file_obj: IO, chunk_size: int = DEFAULT_CHUNK) -> str:
|
|||||||
orig_pos = file_obj.tell()
|
orig_pos = file_obj.tell()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# seek to the beginning before reading
|
|
||||||
if orig_pos != 0:
|
if orig_pos != 0:
|
||||||
file_obj.seek(0)
|
file_obj.seek(0)
|
||||||
|
|
||||||
|
|||||||
@ -60,12 +60,10 @@ def ingest_file_from_path(
|
|||||||
}
|
}
|
||||||
|
|
||||||
with create_session() as session:
|
with create_session() as session:
|
||||||
# Validate preview_id if provided
|
|
||||||
if preview_id:
|
if preview_id:
|
||||||
if not session.get(Asset, preview_id):
|
if not session.get(Asset, preview_id):
|
||||||
preview_id = None
|
preview_id = None
|
||||||
|
|
||||||
# 1. Upsert Asset
|
|
||||||
asset, created, updated = upsert_asset(
|
asset, created, updated = upsert_asset(
|
||||||
session,
|
session,
|
||||||
asset_hash=asset_hash,
|
asset_hash=asset_hash,
|
||||||
@ -75,7 +73,6 @@ def ingest_file_from_path(
|
|||||||
out["asset_created"] = created
|
out["asset_created"] = created
|
||||||
out["asset_updated"] = updated
|
out["asset_updated"] = updated
|
||||||
|
|
||||||
# 2. Upsert CacheState
|
|
||||||
state_created, state_updated = upsert_cache_state(
|
state_created, state_updated = upsert_cache_state(
|
||||||
session,
|
session,
|
||||||
asset_id=asset.id,
|
asset_id=asset.id,
|
||||||
@ -85,7 +82,6 @@ def ingest_file_from_path(
|
|||||||
out["state_created"] = state_created
|
out["state_created"] = state_created
|
||||||
out["state_updated"] = state_updated
|
out["state_updated"] = state_updated
|
||||||
|
|
||||||
# 3. Optionally create/update AssetInfo
|
|
||||||
if info_name:
|
if info_name:
|
||||||
info, info_created = get_or_create_asset_info(
|
info, info_created = get_or_create_asset_info(
|
||||||
session,
|
session,
|
||||||
@ -100,7 +96,6 @@ def ingest_file_from_path(
|
|||||||
update_asset_info_timestamps(session, asset_info=info, preview_id=preview_id)
|
update_asset_info_timestamps(session, asset_info=info, preview_id=preview_id)
|
||||||
out["asset_info_id"] = info.id
|
out["asset_info_id"] = info.id
|
||||||
|
|
||||||
# 4. Handle tags
|
|
||||||
norm = normalize_tags(list(tags))
|
norm = normalize_tags(list(tags))
|
||||||
if norm and out["asset_info_id"]:
|
if norm and out["asset_info_id"]:
|
||||||
if require_existing_tags:
|
if require_existing_tags:
|
||||||
@ -113,7 +108,6 @@ def ingest_file_from_path(
|
|||||||
create_if_missing=not require_existing_tags,
|
create_if_missing=not require_existing_tags,
|
||||||
)
|
)
|
||||||
|
|
||||||
# 5. Update metadata with computed filename
|
|
||||||
if out["asset_info_id"]:
|
if out["asset_info_id"]:
|
||||||
_update_metadata_with_filename(
|
_update_metadata_with_filename(
|
||||||
session,
|
session,
|
||||||
@ -123,7 +117,6 @@ def ingest_file_from_path(
|
|||||||
user_metadata=user_metadata,
|
user_metadata=user_metadata,
|
||||||
)
|
)
|
||||||
|
|
||||||
# 6. Remove missing tag
|
|
||||||
try:
|
try:
|
||||||
remove_missing_tag_for_asset_id(session, asset_id=asset.id)
|
remove_missing_tag_for_asset_id(session, asset_id=asset.id)
|
||||||
except Exception:
|
except Exception:
|
||||||
@ -161,9 +154,7 @@ def register_existing_asset(
|
|||||||
)
|
)
|
||||||
|
|
||||||
if not info_created:
|
if not info_created:
|
||||||
# Return existing info
|
|
||||||
tag_names = get_asset_tags(session, asset_info_id=info.id)
|
tag_names = get_asset_tags(session, asset_info_id=info.id)
|
||||||
# Extract plain data before session closes
|
|
||||||
result = RegisterAssetResult(
|
result = RegisterAssetResult(
|
||||||
info=extract_info_data(info),
|
info=extract_info_data(info),
|
||||||
asset=extract_asset_data(asset),
|
asset=extract_asset_data(asset),
|
||||||
@ -173,7 +164,6 @@ def register_existing_asset(
|
|||||||
session.commit()
|
session.commit()
|
||||||
return result
|
return result
|
||||||
|
|
||||||
# New info - apply metadata and tags
|
|
||||||
new_meta = dict(user_metadata or {})
|
new_meta = dict(user_metadata or {})
|
||||||
computed_filename = _compute_filename_for_asset(session, asset.id)
|
computed_filename = _compute_filename_for_asset(session, asset.id)
|
||||||
if computed_filename:
|
if computed_filename:
|
||||||
@ -195,9 +185,7 @@ def register_existing_asset(
|
|||||||
)
|
)
|
||||||
|
|
||||||
tag_names = get_asset_tags(session, asset_info_id=info.id)
|
tag_names = get_asset_tags(session, asset_info_id=info.id)
|
||||||
# Refresh to get updated metadata after set_asset_info_metadata
|
|
||||||
session.refresh(info)
|
session.refresh(info)
|
||||||
# Extract plain data before session closes
|
|
||||||
result = RegisterAssetResult(
|
result = RegisterAssetResult(
|
||||||
info=extract_info_data(info),
|
info=extract_info_data(info),
|
||||||
asset=extract_asset_data(asset),
|
asset=extract_asset_data(asset),
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user