ComfyUI/tests/inference/workflows/lora-hooks-0.json
2024-12-09 19:45:17 -08:00

303 lines
5.1 KiB
JSON

{
"2": {
"inputs": {
"text": "worst quality, low quality",
"clip": [
"21",
0
]
},
"class_type": "CLIPTextEncode",
"_meta": {
"title": "CLIP Text Encode (Prompt)"
}
},
"3": {
"inputs": {
"text": "worst quality, low quality",
"clip": [
"11",
0
]
},
"class_type": "CLIPTextEncode",
"_meta": {
"title": "CLIP Text Encode (Prompt)"
}
},
"4": {
"inputs": {
"text": "rock in a river, outdoors, forest, sky, clouds, best quality",
"clip": [
"21",
0
]
},
"class_type": "CLIPTextEncode",
"_meta": {
"title": "CLIP Text Encode (Prompt)"
}
},
"5": {
"inputs": {
"width": 768,
"height": 768,
"batch_size": 1
},
"class_type": "EmptyLatentImage",
"_meta": {
"title": "Empty Latent Image"
}
},
"6": {
"inputs": {
"text": "rock in a river, outdoors, forest, sky, clouds, best quality",
"clip": [
"11",
0
]
},
"class_type": "CLIPTextEncode",
"_meta": {
"title": "CLIP Text Encode (Prompt)"
}
},
"9": {
"inputs": {
"samples": [
"23",
0
],
"vae": [
"10",
0
]
},
"class_type": "VAEDecode",
"_meta": {
"title": "VAE Decode"
}
},
"10": {
"inputs": {
"vae_name": "vae-ft-mse-840000-ema-pruned.safetensors"
},
"class_type": "VAELoader",
"_meta": {
"title": "Load VAE"
}
},
"11": {
"inputs": {
"apply_to_conds": true,
"schedule_clip": false,
"clip": [
"12",
0
],
"hooks": [
"18",
0
]
},
"class_type": "SetClipHooks",
"_meta": {
"title": "Set CLIP Hooks"
}
},
"12": {
"inputs": {
"stop_at_clip_layer": -2,
"clip": [
"13",
1
]
},
"class_type": "CLIPSetLastLayer",
"_meta": {
"title": "CLIP Set Last Layer"
}
},
"13": {
"inputs": {
"ckpt_name": "cardosAnime_v20.safetensors"
},
"class_type": "CheckpointLoaderSimple",
"_meta": {
"title": "Load Checkpoint"
}
},
"14": {
"inputs": {
"mask": [
"68",
0
]
},
"class_type": "InvertMask",
"_meta": {
"title": "InvertMask"
}
},
"16": {
"inputs": {
"strength": 1,
"set_cond_area": "default",
"positive_NEW": [
"4",
0
],
"negative_NEW": [
"2",
0
],
"mask": [
"68",
0
]
},
"class_type": "PairConditioningSetProperties",
"_meta": {
"title": "Cond Pair Set Props"
}
},
"17": {
"inputs": {
"strength": 1,
"set_cond_area": "default",
"positive": [
"16",
0
],
"negative": [
"16",
1
],
"positive_NEW": [
"6",
0
],
"negative_NEW": [
"3",
0
],
"mask": [
"14",
0
]
},
"class_type": "PairConditioningSetPropertiesAndCombine",
"_meta": {
"title": "Cond Pair Set Props Combine"
}
},
"18": {
"inputs": {
"ckpt_name": "dreamshaper_8.safetensors",
"strength_model": 1,
"strength_clip": 1
},
"class_type": "CreateHookModelAsLora",
"_meta": {
"title": "Create Hook Model as LoRA"
}
},
"20": {
"inputs": {
"lora_name": "PixelArtRedmond15V-PixelArt-PIXARFK.safetensors",
"strength_model": 1.1,
"strength_clip": 1
},
"class_type": "CreateHookLora",
"_meta": {
"title": "Create Hook LoRA"
}
},
"21": {
"inputs": {
"apply_to_conds": true,
"schedule_clip": false,
"clip": [
"12",
0
],
"hooks": [
"20",
0
]
},
"class_type": "SetClipHooks",
"_meta": {
"title": "Set CLIP Hooks"
}
},
"22": {
"inputs": {
"filename_prefix": "lorahooksmasking/img",
"images": [
"9",
0
]
},
"class_type": "SaveImage",
"_meta": {
"title": "Save Image"
}
},
"23": {
"inputs": {
"seed": 12345678,
"steps": 30,
"cfg": 8,
"sampler_name": "euler",
"scheduler": "normal",
"denoise": 1,
"model": [
"13",
0
],
"positive": [
"17",
0
],
"negative": [
"17",
1
],
"latent_image": [
"5",
0
]
},
"class_type": "KSampler",
"_meta": {
"title": "KSampler"
}
},
"67": {
"inputs": {
"width": 768,
"height": 768,
"direction": "horizontal",
"tolerance": 0,
"gradient_stops": "0:255,255,255\n100:0,0,0"
},
"class_type": "ImageGenerateGradient",
"_meta": {
"title": "Image Generate Gradient"
}
},
"68": {
"inputs": {
"channel": "red",
"image": [
"67",
0
]
},
"class_type": "ImageToMask",
"_meta": {
"title": "Convert Image to Mask"
}
}
}