mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2026-02-06 19:42:34 +08:00
refactor: remove try-finally wrapper in seed_assets by extracting helpers
Extract focused helper functions to eliminate the try-finally block that wrapped ~50 lines just for logging. The new helpers (_collect_paths_for_roots, _build_asset_specs, _insert_asset_specs) make seed_assets a simple linear flow. Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
parent
ed60e93696
commit
e9ca190098
@ -402,42 +402,31 @@ def _prune_orphans_safely(prefixes: list[str]) -> int:
|
|||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
def seed_assets(roots: tuple[RootType, ...], enable_logging: bool = False) -> None:
|
def _collect_paths_for_roots(roots: tuple[RootType, ...]) -> list[str]:
|
||||||
"""Scan the given roots and seed the assets into the database."""
|
"""Collect all file paths for the given roots."""
|
||||||
if not dependencies_available():
|
|
||||||
if enable_logging:
|
|
||||||
logging.warning("Database dependencies not available, skipping assets scan")
|
|
||||||
return
|
|
||||||
|
|
||||||
t_start = time.perf_counter()
|
|
||||||
created = 0
|
|
||||||
skipped_existing = 0
|
|
||||||
orphans_pruned = 0
|
|
||||||
paths: list[str] = []
|
paths: list[str] = []
|
||||||
|
|
||||||
try:
|
|
||||||
existing_paths: set[str] = set()
|
|
||||||
for r in roots:
|
|
||||||
existing_paths.update(_sync_root_safely(r))
|
|
||||||
|
|
||||||
all_prefixes = [
|
|
||||||
os.path.abspath(p) for r in roots for p in get_prefixes_for_root(r)
|
|
||||||
]
|
|
||||||
orphans_pruned = _prune_orphans_safely(all_prefixes)
|
|
||||||
|
|
||||||
if "models" in roots:
|
if "models" in roots:
|
||||||
paths.extend(collect_models_files())
|
paths.extend(collect_models_files())
|
||||||
if "input" in roots:
|
if "input" in roots:
|
||||||
paths.extend(list_files_recursively(folder_paths.get_input_directory()))
|
paths.extend(list_files_recursively(folder_paths.get_input_directory()))
|
||||||
if "output" in roots:
|
if "output" in roots:
|
||||||
paths.extend(list_files_recursively(folder_paths.get_output_directory()))
|
paths.extend(list_files_recursively(folder_paths.get_output_directory()))
|
||||||
|
return paths
|
||||||
|
|
||||||
|
|
||||||
|
def _build_asset_specs(
|
||||||
|
paths: list[str],
|
||||||
|
existing_paths: set[str],
|
||||||
|
) -> tuple[list[dict], set[str], int]:
|
||||||
|
"""Build asset specs from paths, returning (specs, tag_pool, skipped_count)."""
|
||||||
specs: list[dict] = []
|
specs: list[dict] = []
|
||||||
tag_pool: set[str] = set()
|
tag_pool: set[str] = set()
|
||||||
|
skipped = 0
|
||||||
|
|
||||||
for p in paths:
|
for p in paths:
|
||||||
abs_p = os.path.abspath(p)
|
abs_p = os.path.abspath(p)
|
||||||
if abs_p in existing_paths:
|
if abs_p in existing_paths:
|
||||||
skipped_existing += 1
|
skipped += 1
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
stat_p = os.stat(abs_p, follow_symlinks=False)
|
stat_p = os.stat(abs_p, follow_symlinks=False)
|
||||||
@ -454,20 +443,48 @@ def seed_assets(roots: tuple[RootType, ...], enable_logging: bool = False) -> No
|
|||||||
"tags": tags,
|
"tags": tags,
|
||||||
"fname": compute_relative_filename(abs_p),
|
"fname": compute_relative_filename(abs_p),
|
||||||
})
|
})
|
||||||
for t in tags:
|
tag_pool.update(tags)
|
||||||
tag_pool.add(t)
|
|
||||||
|
|
||||||
|
return specs, tag_pool, skipped
|
||||||
|
|
||||||
|
|
||||||
|
def _insert_asset_specs(specs: list[dict], tag_pool: set[str]) -> int:
|
||||||
|
"""Insert asset specs into database, returning count of created infos."""
|
||||||
if not specs:
|
if not specs:
|
||||||
return
|
return 0
|
||||||
|
|
||||||
with create_session() as sess:
|
with create_session() as sess:
|
||||||
if tag_pool:
|
if tag_pool:
|
||||||
ensure_tags_exist(sess, tag_pool, tag_type="user")
|
ensure_tags_exist(sess, tag_pool, tag_type="user")
|
||||||
result = _batch_insert_assets_from_paths(sess, specs=specs, owner_id="")
|
result = _batch_insert_assets_from_paths(sess, specs=specs, owner_id="")
|
||||||
created += result["inserted_infos"]
|
|
||||||
sess.commit()
|
sess.commit()
|
||||||
|
return result["inserted_infos"]
|
||||||
|
|
||||||
|
|
||||||
|
def seed_assets(roots: tuple[RootType, ...], enable_logging: bool = False) -> None:
|
||||||
|
"""Scan the given roots and seed the assets into the database."""
|
||||||
|
if not dependencies_available():
|
||||||
|
if enable_logging:
|
||||||
|
logging.warning("Database dependencies not available, skipping assets scan")
|
||||||
|
return
|
||||||
|
|
||||||
|
t_start = time.perf_counter()
|
||||||
|
|
||||||
|
# Sync existing cache states
|
||||||
|
existing_paths: set[str] = set()
|
||||||
|
for r in roots:
|
||||||
|
existing_paths.update(_sync_root_safely(r))
|
||||||
|
|
||||||
|
# Prune orphaned assets
|
||||||
|
all_prefixes = [
|
||||||
|
os.path.abspath(p) for r in roots for p in get_prefixes_for_root(r)
|
||||||
|
]
|
||||||
|
orphans_pruned = _prune_orphans_safely(all_prefixes)
|
||||||
|
|
||||||
|
# Collect and process paths
|
||||||
|
paths = _collect_paths_for_roots(roots)
|
||||||
|
specs, tag_pool, skipped_existing = _build_asset_specs(paths, existing_paths)
|
||||||
|
created = _insert_asset_specs(specs, tag_pool)
|
||||||
|
|
||||||
finally:
|
|
||||||
if enable_logging:
|
if enable_logging:
|
||||||
logging.info(
|
logging.info(
|
||||||
"Assets scan(roots=%s) completed in %.3fs (created=%d, skipped_existing=%d, orphans_pruned=%d, total_seen=%d)",
|
"Assets scan(roots=%s) completed in %.3fs (created=%d, skipped_existing=%d, orphans_pruned=%d, total_seen=%d)",
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user