ComfyUI/comfy/web/templates/hiresfix_esrgan_workflow.json
doctorpangloss 048746f58b Update to 0.3.15 and improve models
- Cosmos now fully tested
 - Preliminary support for essential Cosmos prompt "upsampler"
 - Lumina tests
 - Tweaks to language and image resizing nodes
 - Fix for #31 all the samplers are now present again
2025-02-24 21:27:15 -08:00

608 lines
13 KiB
JSON
Vendored

{
"last_node_id": 26,
"last_link_id": 35,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1185.5, 412.07],
"size": [210, 46],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 30
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 13,
"type": "VAEDecode",
"pos": [3221.22, 232.38],
"size": [210, 46],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 15
},
{
"name": "vae",
"type": "VAE",
"link": 33
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [17],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [81.78, 142.34],
"size": [422.85, 164.31],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 28
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4, 12],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"masterpiece HDR victorian portrait painting of woman, blonde hair, mountain nature, blue sky\n"
]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [84.78, 352.34],
"size": [425.28, 180.61],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 29
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6, 13],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["bad hands, text, watermark\n"]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [142.78, 571.34],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [768, 768, 1]
},
{
"id": 3,
"type": "KSampler",
"pos": [552.78, 143.34],
"size": [315, 262],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 34
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7, 20],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
251225068430076,
"randomize",
12,
8,
"dpmpp_sde",
"normal",
1
]
},
{
"id": 21,
"type": "VAEDecode",
"pos": [988.18, 29.56],
"size": [210, 46],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 20
},
{
"name": "vae",
"type": "VAE",
"link": 32
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [23],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 20,
"type": "VAEEncode",
"pos": [2459.1, 103.02],
"size": [210, 46],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "pixels",
"type": "IMAGE",
"link": 26
},
{
"name": "vae",
"type": "VAE",
"link": 31
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [18],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEEncode"
},
"widgets_values": []
},
{
"id": 22,
"type": "ImageUpscaleWithModel",
"pos": [1631.06, 3.66],
"size": [226.8, 46],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "upscale_model",
"type": "UPSCALE_MODEL",
"link": 24
},
{
"name": "image",
"type": "IMAGE",
"link": 23
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [27],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ImageUpscaleWithModel"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1446, 411],
"size": [611.26, 628.6],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 24,
"type": "ImageScale",
"pos": [1931, 10],
"size": [315, 130],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "image",
"type": "IMAGE",
"link": 27
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [26],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ImageScale"
},
"widgets_values": ["bilinear", 1536, 1536, "disabled"]
},
{
"id": 12,
"type": "SaveImage",
"pos": [3463, 230],
"size": [868.01, 936.97],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 17
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 11,
"type": "KSampler",
"pos": [2811.96, 176.22],
"size": [315, 262],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 35,
"slot_index": 0
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 12,
"slot_index": 1
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 13,
"slot_index": 2
},
{
"name": "latent_image",
"type": "LATENT",
"link": 18,
"slot_index": 3
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [15],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
783745448521451,
"randomize",
14,
8,
"uni_pc_bh2",
"normal",
0.5
]
},
{
"id": 25,
"type": "CheckpointLoaderSimple",
"pos": [-262, 284],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [34, 35],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [28, 29],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [30, 31, 32, 33],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v2-1_768-ema-pruned.safetensors"]
},
{
"id": 23,
"type": "UpscaleModelLoader",
"pos": [1288.06, -39.34],
"size": [315, 58],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "UPSCALE_MODEL",
"type": "UPSCALE_MODEL",
"links": [24],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UpscaleModelLoader"
},
"widgets_values": ["RealESRGAN_x4plus.pth"]
},
{
"id": 26,
"type": "MarkdownNote",
"pos": [-300, 750],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/2_pass_txt2img/#non-latent-upscaling)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[4, 6, 0, 3, 1, "CONDITIONING"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[12, 6, 0, 11, 1, "CONDITIONING"],
[13, 7, 0, 11, 2, "CONDITIONING"],
[15, 11, 0, 13, 0, "LATENT"],
[17, 13, 0, 12, 0, "IMAGE"],
[18, 20, 0, 11, 3, "LATENT"],
[20, 3, 0, 21, 0, "LATENT"],
[23, 21, 0, 22, 1, "IMAGE"],
[24, 23, 0, 22, 0, "UPSCALE_MODEL"],
[26, 24, 0, 20, 0, "IMAGE"],
[27, 22, 0, 24, 0, "IMAGE"],
[28, 25, 1, 6, 0, "CLIP"],
[29, 25, 1, 7, 0, "CLIP"],
[30, 25, 2, 8, 1, "VAE"],
[31, 25, 2, 20, 1, "VAE"],
[32, 25, 2, 21, 1, "VAE"],
[33, 25, 2, 13, 1, "VAE"],
[34, 25, 0, 3, 0, "MODEL"],
[35, 25, 0, 11, 0, "MODEL"]
],
"groups": [
{
"id": 1,
"title": "Txt2Img",
"bounding": [-300, 0, 1211, 708],
"color": "#a1309b",
"font_size": 24,
"flags": {}
},
{
"id": 2,
"title": "Save Intermediate Image",
"bounding": [1170, 330, 516, 196],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 3,
"title": "Second pass",
"bounding": [2775, 90, 379, 429],
"color": "#444",
"font_size": 24,
"flags": {}
},
{
"id": 4,
"title": "Save Final Image",
"bounding": [3210, 135, 483, 199],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 5,
"title": "ESRGAN upscale with 4x model",
"bounding": [1260, -120, 578, 184],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 6,
"title": "Decode to Pixel space",
"bounding": [960, -45, 285, 142],
"color": "#A88",
"font_size": 24,
"flags": {}
},
{
"id": 7,
"title": "Encode back to latent space",
"bounding": [2400, 15, 312, 157],
"color": "#A88",
"font_size": 24,
"flags": {}
},
{
"id": 8,
"title": "Downscale image to a more reasonable size",
"bounding": [1845, -75, 483, 245],
"color": "#8AA",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.71,
"offset": [448.42, 482.51]
}
},
"version": 0.4,
"models": [
{
"name": "v2-1_768-ema-pruned.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-2-1/resolve/main/v2-1_768-ema-pruned.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "RealESRGAN_x4plus.pth",
"url": "https://github.com/xinntao/Real-ESRGAN/releases/download/v0.1.0/RealESRGAN_x4plus.pth",
"directory": "upscale_models"
}
]
}