From 37ecc5b6636205656665fcc8e242d1765ac604b7 Mon Sep 17 00:00:00 2001 From: Luke Mino-Altherr Date: Tue, 3 Feb 2026 20:14:11 -0800 Subject: [PATCH] chore: remove obvious/self-documenting comments from assets package Co-Authored-By: Claude Opus 4.5 --- app/assets/api/upload.py | 4 +--- app/assets/database/queries/__init__.py | 4 ---- app/assets/manager.py | 3 --- app/assets/scanner.py | 12 ------------ app/assets/services/__init__.py | 3 --- app/assets/services/asset_management.py | 5 ----- app/assets/services/hashing.py | 1 - app/assets/services/ingest.py | 12 ------------ 8 files changed, 1 insertion(+), 43 deletions(-) diff --git a/app/assets/api/upload.py b/app/assets/api/upload.py index f1003eb2c..6dfe4f35a 100644 --- a/app/assets/api/upload.py +++ b/app/assets/api/upload.py @@ -91,9 +91,8 @@ async def parse_multipart_upload( file_written += len(chunk) except Exception: raise UploadError(500, "UPLOAD_IO_ERROR", "Failed to receive uploaded file.") - continue # Do not create temp file; we will create AssetInfo from the existing content + continue - # Otherwise, store to temp for hashing/ingest uploads_root = os.path.join(folder_paths.get_temp_directory(), "uploads") unique_dir = os.path.join(uploads_root, uuid.uuid4().hex) os.makedirs(unique_dir, exist_ok=True) @@ -118,7 +117,6 @@ async def parse_multipart_upload( elif fname == "user_metadata": user_metadata_raw = (await field.text()) or None - # Validate we have either a file or a known hash if not file_present and not (provided_hash and provided_hash_exists): raise UploadError(400, "MISSING_FILE", "Form must include a 'file' part or a known 'hash'.") diff --git a/app/assets/database/queries/__init__.py b/app/assets/database/queries/__init__.py index 0e2af7b25..a9fecb378 100644 --- a/app/assets/database/queries/__init__.py +++ b/app/assets/database/queries/__init__.py @@ -49,12 +49,10 @@ from app.assets.database.queries.tags import ( ) __all__ = [ - # asset.py "asset_exists_by_hash", "get_asset_by_hash", "upsert_asset", "bulk_insert_assets", - # asset_info.py "asset_info_exists_for_asset_id", "get_asset_info_by_id", "insert_asset_info", @@ -71,7 +69,6 @@ __all__ = [ "set_asset_info_preview", "bulk_insert_asset_infos_ignore_conflicts", "get_asset_info_ids_by_ids", - # cache_state.py "CacheStateRow", "list_cache_states_by_asset_id", "upsert_cache_state", @@ -84,7 +81,6 @@ __all__ = [ "delete_orphaned_seed_asset", "bulk_insert_cache_states_ignore_conflicts", "get_cache_states_by_paths_and_asset_ids", - # tags.py "ensure_tags_exist", "get_asset_tags", "set_asset_info_tags", diff --git a/app/assets/manager.py b/app/assets/manager.py index ccba96fab..ab1b77955 100644 --- a/app/assets/manager.py +++ b/app/assets/manager.py @@ -193,7 +193,6 @@ def upload_asset_from_temp_path( if expected_asset_hash and asset_hash != expected_asset_hash.strip().lower(): raise ValueError("HASH_MISMATCH") - # Check if asset already exists by hash with create_session() as session: existing = get_asset_by_hash(session, asset_hash=asset_hash) @@ -228,7 +227,6 @@ def upload_asset_from_temp_path( created_new=False, ) - # New asset - move file to destination base_dir, subdirs = resolve_destination_from_tags(spec.tags) dest_dir = os.path.join(base_dir, *subdirs) if subdirs else base_dir os.makedirs(dest_dir, exist_ok=True) @@ -324,7 +322,6 @@ def process_upload( _delete_temp_file_if_exists(parsed.tmp_path) raise AssetValidationError("INVALID_BODY", f"Validation failed: {ve.json()}") - # Validate models category against configured folders if spec.tags and spec.tags[0] == "models": if len(spec.tags) < 2 or spec.tags[1] not in folder_paths.folder_names_and_paths: _delete_temp_file_if_exists(parsed.tmp_path) diff --git a/app/assets/scanner.py b/app/assets/scanner.py index 86e30e8da..499d06a2a 100644 --- a/app/assets/scanner.py +++ b/app/assets/scanner.py @@ -167,27 +167,20 @@ def _batch_insert_assets_from_paths( "_filename": sp["fname"], } - # 1. Insert all seed Assets (hash=NULL) bulk_insert_assets(session, asset_rows) - - # 2. Try to claim cache states (file_path unique) bulk_insert_cache_states_ignore_conflicts(session, state_rows) - - # 3. Query to find which paths we won winners_by_path = get_cache_states_by_paths_and_asset_ids(session, path_to_asset) all_paths_set = set(path_list) losers_by_path = all_paths_set - winners_by_path lost_assets = [path_to_asset[p] for p in losers_by_path] - # 4. Delete Assets for losers if lost_assets: delete_assets_by_ids(session, lost_assets) if not winners_by_path: return {"inserted_infos": 0, "won_states": 0, "lost_states": len(losers_by_path)} - # 5. Insert AssetInfo for winners winner_info_rows = [asset_to_info[path_to_asset[p]] for p in winners_by_path] db_info_rows = [ { @@ -205,11 +198,9 @@ def _batch_insert_assets_from_paths( ] bulk_insert_asset_infos_ignore_conflicts(session, db_info_rows) - # 6. Find which info rows were actually inserted all_info_ids = [row["id"] for row in winner_info_rows] inserted_info_ids = get_asset_info_ids_by_ids(session, all_info_ids) - # 7. Build and insert tag + meta rows tag_rows: list[dict] = [] meta_rows: list[dict] = [] if inserted_info_ids: @@ -468,18 +459,15 @@ def seed_assets(roots: tuple[RootType, ...], enable_logging: bool = False) -> No t_start = time.perf_counter() - # Sync existing cache states existing_paths: set[str] = set() for r in roots: existing_paths.update(_sync_root_safely(r)) - # Prune orphaned assets all_prefixes = [ os.path.abspath(p) for r in roots for p in get_prefixes_for_root(r) ] orphans_pruned = _prune_orphans_safely(all_prefixes) - # Collect and process paths paths = _collect_paths_for_roots(roots) specs, tag_pool, skipped_existing = _build_asset_specs(paths, existing_paths) created = _insert_asset_specs(specs, tag_pool) diff --git a/app/assets/services/__init__.py b/app/assets/services/__init__.py index 23595532e..5ce0ae0d1 100644 --- a/app/assets/services/__init__.py +++ b/app/assets/services/__init__.py @@ -15,15 +15,12 @@ from app.assets.services.tagging import ( ) __all__ = [ - # ingest.py "ingest_file_from_path", "register_existing_asset", - # asset_management.py "get_asset_detail", "update_asset_metadata", "delete_asset_reference", "set_asset_preview", - # tagging.py "apply_tags", "remove_tags", "list_tags", diff --git a/app/assets/services/asset_management.py b/app/assets/services/asset_management.py index 90c157472..4f23cf9af 100644 --- a/app/assets/services/asset_management.py +++ b/app/assets/services/asset_management.py @@ -74,10 +74,8 @@ def update_asset_metadata( update_asset_info_name(session, asset_info_id=asset_info_id, name=name) touched = True - # Compute filename from best live path computed_filename = _compute_filename_for_asset(session, info.asset_id) - # Determine if metadata needs updating new_meta: dict | None = None if user_metadata is not None: new_meta = dict(user_metadata) @@ -106,7 +104,6 @@ def update_asset_metadata( if touched and user_metadata is None: update_asset_info_updated_at(session, asset_info_id=asset_info_id) - # Fetch updated info with tags result = fetch_asset_info_asset_and_tags( session, asset_info_id=asset_info_id, @@ -116,7 +113,6 @@ def update_asset_metadata( raise RuntimeError("State changed during update") info, asset, tag_list = result - # Extract plain data before session closes detail = AssetDetailResult( info=extract_info_data(info), asset=extract_asset_data(asset), @@ -203,7 +199,6 @@ def set_asset_preview( raise RuntimeError("State changed during preview update") info, asset, tags = result - # Extract plain data before session closes detail = AssetDetailResult( info=extract_info_data(info), asset=extract_asset_data(asset), diff --git a/app/assets/services/hashing.py b/app/assets/services/hashing.py index b07a163ce..52d89d1ca 100644 --- a/app/assets/services/hashing.py +++ b/app/assets/services/hashing.py @@ -73,7 +73,6 @@ def _hash_file_obj(file_obj: IO, chunk_size: int = DEFAULT_CHUNK) -> str: orig_pos = file_obj.tell() try: - # seek to the beginning before reading if orig_pos != 0: file_obj.seek(0) diff --git a/app/assets/services/ingest.py b/app/assets/services/ingest.py index ce9a5c1b1..cf88adee6 100644 --- a/app/assets/services/ingest.py +++ b/app/assets/services/ingest.py @@ -60,12 +60,10 @@ def ingest_file_from_path( } with create_session() as session: - # Validate preview_id if provided if preview_id: if not session.get(Asset, preview_id): preview_id = None - # 1. Upsert Asset asset, created, updated = upsert_asset( session, asset_hash=asset_hash, @@ -75,7 +73,6 @@ def ingest_file_from_path( out["asset_created"] = created out["asset_updated"] = updated - # 2. Upsert CacheState state_created, state_updated = upsert_cache_state( session, asset_id=asset.id, @@ -85,7 +82,6 @@ def ingest_file_from_path( out["state_created"] = state_created out["state_updated"] = state_updated - # 3. Optionally create/update AssetInfo if info_name: info, info_created = get_or_create_asset_info( session, @@ -100,7 +96,6 @@ def ingest_file_from_path( update_asset_info_timestamps(session, asset_info=info, preview_id=preview_id) out["asset_info_id"] = info.id - # 4. Handle tags norm = normalize_tags(list(tags)) if norm and out["asset_info_id"]: if require_existing_tags: @@ -113,7 +108,6 @@ def ingest_file_from_path( create_if_missing=not require_existing_tags, ) - # 5. Update metadata with computed filename if out["asset_info_id"]: _update_metadata_with_filename( session, @@ -123,7 +117,6 @@ def ingest_file_from_path( user_metadata=user_metadata, ) - # 6. Remove missing tag try: remove_missing_tag_for_asset_id(session, asset_id=asset.id) except Exception: @@ -161,9 +154,7 @@ def register_existing_asset( ) if not info_created: - # Return existing info tag_names = get_asset_tags(session, asset_info_id=info.id) - # Extract plain data before session closes result = RegisterAssetResult( info=extract_info_data(info), asset=extract_asset_data(asset), @@ -173,7 +164,6 @@ def register_existing_asset( session.commit() return result - # New info - apply metadata and tags new_meta = dict(user_metadata or {}) computed_filename = _compute_filename_for_asset(session, asset.id) if computed_filename: @@ -195,9 +185,7 @@ def register_existing_asset( ) tag_names = get_asset_tags(session, asset_info_id=info.id) - # Refresh to get updated metadata after set_asset_info_metadata session.refresh(info) - # Extract plain data before session closes result = RegisterAssetResult( info=extract_info_data(info), asset=extract_asset_data(asset),