mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2026-01-30 00:00:26 +08:00
Compare commits
1 Commits
09f19eb7ae
...
630a1dce01
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
630a1dce01 |
@ -81,8 +81,7 @@ def get_comfy_models_folders() -> list[tuple[str, list[str]]]:
|
||||
"""
|
||||
targets: list[tuple[str, list[str]]] = []
|
||||
models_root = os.path.abspath(folder_paths.models_dir)
|
||||
for name, values in folder_paths.folder_names_and_paths.items():
|
||||
paths, _exts = values[0], values[1] # NOTE: this prevents nodepacks that hackily edit folder_... from breaking ComfyUI
|
||||
for name, (paths, _exts) in folder_paths.folder_names_and_paths.items():
|
||||
if any(os.path.abspath(p).startswith(models_root + os.sep) for p in paths):
|
||||
targets.append((name, paths))
|
||||
return targets
|
||||
|
||||
@ -14,9 +14,8 @@ class JobStatus:
|
||||
IN_PROGRESS = 'in_progress'
|
||||
COMPLETED = 'completed'
|
||||
FAILED = 'failed'
|
||||
CANCELLED = 'cancelled'
|
||||
|
||||
ALL = [PENDING, IN_PROGRESS, COMPLETED, FAILED, CANCELLED]
|
||||
ALL = [PENDING, IN_PROGRESS, COMPLETED, FAILED]
|
||||
|
||||
|
||||
# Media types that can be previewed in the frontend
|
||||
@ -95,6 +94,12 @@ def normalize_history_item(prompt_id: str, history_item: dict, include_outputs:
|
||||
|
||||
status_info = history_item.get('status', {})
|
||||
status_str = status_info.get('status_str') if status_info else None
|
||||
if status_str == 'success':
|
||||
status = JobStatus.COMPLETED
|
||||
elif status_str == 'error':
|
||||
status = JobStatus.FAILED
|
||||
else:
|
||||
status = JobStatus.COMPLETED
|
||||
|
||||
outputs = history_item.get('outputs', {})
|
||||
outputs_count, preview_output = get_outputs_summary(outputs)
|
||||
@ -102,7 +107,6 @@ def normalize_history_item(prompt_id: str, history_item: dict, include_outputs:
|
||||
execution_error = None
|
||||
execution_start_time = None
|
||||
execution_end_time = None
|
||||
was_interrupted = False
|
||||
if status_info:
|
||||
messages = status_info.get('messages', [])
|
||||
for entry in messages:
|
||||
@ -115,15 +119,6 @@ def normalize_history_item(prompt_id: str, history_item: dict, include_outputs:
|
||||
execution_end_time = event_data.get('timestamp')
|
||||
if event_name == 'execution_error':
|
||||
execution_error = event_data
|
||||
elif event_name == 'execution_interrupted':
|
||||
was_interrupted = True
|
||||
|
||||
if status_str == 'success':
|
||||
status = JobStatus.COMPLETED
|
||||
elif status_str == 'error':
|
||||
status = JobStatus.CANCELLED if was_interrupted else JobStatus.FAILED
|
||||
else:
|
||||
status = JobStatus.COMPLETED
|
||||
|
||||
job = prune_dict({
|
||||
'id': prompt_id,
|
||||
@ -273,13 +268,13 @@ def get_all_jobs(
|
||||
for item in queued:
|
||||
jobs.append(normalize_queue_item(item, JobStatus.PENDING))
|
||||
|
||||
history_statuses = {JobStatus.COMPLETED, JobStatus.FAILED, JobStatus.CANCELLED}
|
||||
requested_history_statuses = history_statuses & set(status_filter)
|
||||
if requested_history_statuses:
|
||||
include_completed = JobStatus.COMPLETED in status_filter
|
||||
include_failed = JobStatus.FAILED in status_filter
|
||||
if include_completed or include_failed:
|
||||
for prompt_id, history_item in history.items():
|
||||
job = normalize_history_item(prompt_id, history_item)
|
||||
if job.get('status') in requested_history_statuses:
|
||||
jobs.append(job)
|
||||
is_failed = history_item.get('status', {}).get('status_str') == 'error'
|
||||
if (is_failed and include_failed) or (not is_failed and include_completed):
|
||||
jobs.append(normalize_history_item(prompt_id, history_item))
|
||||
|
||||
if workflow_id:
|
||||
jobs = [j for j in jobs if j.get('workflow_id') == workflow_id]
|
||||
|
||||
@ -753,7 +753,7 @@ class SamplerCustom(io.ComfyNode):
|
||||
noise_mask = latent["noise_mask"]
|
||||
|
||||
x0_output = {}
|
||||
callback = latent_preview.prepare_callback(model, sigmas.shape[-1] - 1, x0_output, shape=latent_image.shape if latent_image.is_nested else None)
|
||||
callback = latent_preview.prepare_callback(model, sigmas.shape[-1] - 1, x0_output, shape=latent_image.shape)
|
||||
|
||||
disable_pbar = not comfy.utils.PROGRESS_BAR_ENABLED
|
||||
samples = comfy.sample.sample_custom(model, noise, cfg, sampler, sigmas, positive, negative, latent_image, noise_mask=noise_mask, callback=callback, disable_pbar=disable_pbar, seed=noise_seed)
|
||||
@ -944,7 +944,7 @@ class SamplerCustomAdvanced(io.ComfyNode):
|
||||
noise_mask = latent["noise_mask"]
|
||||
|
||||
x0_output = {}
|
||||
callback = latent_preview.prepare_callback(guider.model_patcher, sigmas.shape[-1] - 1, x0_output, shape=latent_image.shape if latent_image.is_nested else None)
|
||||
callback = latent_preview.prepare_callback(guider.model_patcher, sigmas.shape[-1] - 1, x0_output, shape=latent_image.shape)
|
||||
|
||||
disable_pbar = not comfy.utils.PROGRESS_BAR_ENABLED
|
||||
samples = guider.sample(noise.generate_noise(latent), latent_image, sampler, sigmas, denoise_mask=noise_mask, callback=callback, disable_pbar=disable_pbar, seed=noise.seed)
|
||||
|
||||
@ -122,7 +122,7 @@ def prepare_callback(model, steps, x0_output_dict=None, shape=None):
|
||||
if x0_output_dict is not None:
|
||||
x0_output_dict["x0"] = x0
|
||||
|
||||
if shape is not None:
|
||||
if x0.ndim == 3:
|
||||
cut = math.prod(shape[1:])
|
||||
x0 = x0[:, :, :cut].reshape([x0.shape[0]] + list(shape)[1:])
|
||||
|
||||
|
||||
2
nodes.py
2
nodes.py
@ -1505,7 +1505,7 @@ def common_ksampler(model, seed, steps, cfg, sampler_name, scheduler, positive,
|
||||
if "noise_mask" in latent:
|
||||
noise_mask = latent["noise_mask"]
|
||||
|
||||
callback = latent_preview.prepare_callback(model, steps, shape=latent_image.shape if latent_image.is_nested else None)
|
||||
callback = latent_preview.prepare_callback(model, steps, shape=latent_image.shape)
|
||||
disable_pbar = not comfy.utils.PROGRESS_BAR_ENABLED
|
||||
samples = comfy.sample.sample(model, noise, steps, cfg, sampler_name, scheduler, positive, negative, latent_image,
|
||||
denoise=denoise, disable_noise=disable_noise, start_step=start_step, last_step=last_step,
|
||||
|
||||
@ -19,7 +19,6 @@ class TestJobStatus:
|
||||
assert JobStatus.IN_PROGRESS == 'in_progress'
|
||||
assert JobStatus.COMPLETED == 'completed'
|
||||
assert JobStatus.FAILED == 'failed'
|
||||
assert JobStatus.CANCELLED == 'cancelled'
|
||||
|
||||
def test_all_contains_all_statuses(self):
|
||||
"""ALL should contain all status values."""
|
||||
@ -27,8 +26,7 @@ class TestJobStatus:
|
||||
assert JobStatus.IN_PROGRESS in JobStatus.ALL
|
||||
assert JobStatus.COMPLETED in JobStatus.ALL
|
||||
assert JobStatus.FAILED in JobStatus.ALL
|
||||
assert JobStatus.CANCELLED in JobStatus.ALL
|
||||
assert len(JobStatus.ALL) == 5
|
||||
assert len(JobStatus.ALL) == 4
|
||||
|
||||
|
||||
class TestIsPreviewable:
|
||||
@ -338,40 +336,6 @@ class TestNormalizeHistoryItem:
|
||||
assert job['execution_error']['node_type'] == 'KSampler'
|
||||
assert job['execution_error']['exception_message'] == 'CUDA out of memory'
|
||||
|
||||
def test_cancelled_job(self):
|
||||
"""Cancelled/interrupted history item should have cancelled status."""
|
||||
history_item = {
|
||||
'prompt': (
|
||||
5,
|
||||
'prompt-cancelled',
|
||||
{'nodes': {}},
|
||||
{'create_time': 1234567890000},
|
||||
['node1'],
|
||||
),
|
||||
'status': {
|
||||
'status_str': 'error',
|
||||
'completed': False,
|
||||
'messages': [
|
||||
('execution_start', {'prompt_id': 'prompt-cancelled', 'timestamp': 1234567890500}),
|
||||
('execution_interrupted', {
|
||||
'prompt_id': 'prompt-cancelled',
|
||||
'node_id': '5',
|
||||
'node_type': 'KSampler',
|
||||
'executed': ['1', '2', '3'],
|
||||
'timestamp': 1234567891000,
|
||||
})
|
||||
]
|
||||
},
|
||||
'outputs': {},
|
||||
}
|
||||
|
||||
job = normalize_history_item('prompt-cancelled', history_item)
|
||||
assert job['status'] == 'cancelled'
|
||||
assert job['execution_start_time'] == 1234567890500
|
||||
assert job['execution_end_time'] == 1234567891000
|
||||
# Cancelled jobs should not have execution_error set
|
||||
assert 'execution_error' not in job
|
||||
|
||||
def test_include_outputs(self):
|
||||
"""When include_outputs=True, should include full output data."""
|
||||
history_item = {
|
||||
|
||||
Loading…
Reference in New Issue
Block a user