mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2026-01-15 08:40:50 +08:00
- Cosmos now fully tested - Preliminary support for essential Cosmos prompt "upsampler" - Lumina tests - Tweaks to language and image resizing nodes - Fix for #31 all the samplers are now present again
475 lines
10 KiB
JSON
Vendored
475 lines
10 KiB
JSON
Vendored
{
|
|
"last_node_id": 36,
|
|
"last_link_id": 70,
|
|
"nodes": [
|
|
{
|
|
"id": 7,
|
|
"type": "CLIPTextEncode",
|
|
"pos": [307, 282],
|
|
"size": [425.28, 180.61],
|
|
"flags": {
|
|
"collapsed": true
|
|
},
|
|
"order": 7,
|
|
"mode": 0,
|
|
"inputs": [
|
|
{
|
|
"name": "clip",
|
|
"type": "CLIP",
|
|
"link": 63
|
|
}
|
|
],
|
|
"outputs": [
|
|
{
|
|
"name": "CONDITIONING",
|
|
"type": "CONDITIONING",
|
|
"links": [68],
|
|
"slot_index": 0
|
|
}
|
|
],
|
|
"title": "CLIP Text Encode (Negative Prompt)",
|
|
"properties": {
|
|
"Node name for S&R": "CLIPTextEncode"
|
|
},
|
|
"widgets_values": [""],
|
|
"color": "#322",
|
|
"bgcolor": "#533"
|
|
},
|
|
{
|
|
"id": 17,
|
|
"type": "LoadImage",
|
|
"pos": [220, 530],
|
|
"size": [315, 314.0],
|
|
"flags": {},
|
|
"order": 0,
|
|
"mode": 0,
|
|
"inputs": [],
|
|
"outputs": [
|
|
{
|
|
"name": "IMAGE",
|
|
"type": "IMAGE",
|
|
"shape": 3,
|
|
"links": [49],
|
|
"slot_index": 0
|
|
},
|
|
{
|
|
"name": "MASK",
|
|
"type": "MASK",
|
|
"shape": 3,
|
|
"links": null
|
|
}
|
|
],
|
|
"properties": {
|
|
"Node name for S&R": "LoadImage"
|
|
},
|
|
"widgets_values": ["sd3_controlnet_example.png", "image"]
|
|
},
|
|
{
|
|
"id": 19,
|
|
"type": "PreviewImage",
|
|
"pos": [899, 532],
|
|
"size": [571.59, 625.53],
|
|
"flags": {},
|
|
"order": 8,
|
|
"mode": 0,
|
|
"inputs": [
|
|
{
|
|
"name": "images",
|
|
"type": "IMAGE",
|
|
"link": 26
|
|
}
|
|
],
|
|
"outputs": [],
|
|
"properties": {
|
|
"Node name for S&R": "PreviewImage"
|
|
},
|
|
"widgets_values": []
|
|
},
|
|
{
|
|
"id": 3,
|
|
"type": "KSampler",
|
|
"pos": [1290, 40],
|
|
"size": [315, 262],
|
|
"flags": {},
|
|
"order": 11,
|
|
"mode": 0,
|
|
"inputs": [
|
|
{
|
|
"name": "model",
|
|
"type": "MODEL",
|
|
"link": 57
|
|
},
|
|
{
|
|
"name": "positive",
|
|
"type": "CONDITIONING",
|
|
"link": 64
|
|
},
|
|
{
|
|
"name": "negative",
|
|
"type": "CONDITIONING",
|
|
"link": 65
|
|
},
|
|
{
|
|
"name": "latent_image",
|
|
"type": "LATENT",
|
|
"link": 66
|
|
}
|
|
],
|
|
"outputs": [
|
|
{
|
|
"name": "LATENT",
|
|
"type": "LATENT",
|
|
"links": [7],
|
|
"slot_index": 0
|
|
}
|
|
],
|
|
"properties": {
|
|
"Node name for S&R": "KSampler"
|
|
},
|
|
"widgets_values": [
|
|
50363905047731,
|
|
"randomize",
|
|
20,
|
|
1,
|
|
"euler",
|
|
"normal",
|
|
1
|
|
]
|
|
},
|
|
{
|
|
"id": 35,
|
|
"type": "InstructPixToPixConditioning",
|
|
"pos": [1040, 50],
|
|
"size": [235.2, 86],
|
|
"flags": {},
|
|
"order": 10,
|
|
"mode": 0,
|
|
"inputs": [
|
|
{
|
|
"name": "positive",
|
|
"type": "CONDITIONING",
|
|
"link": 67
|
|
},
|
|
{
|
|
"name": "negative",
|
|
"type": "CONDITIONING",
|
|
"link": 68
|
|
},
|
|
{
|
|
"name": "vae",
|
|
"type": "VAE",
|
|
"link": 69
|
|
},
|
|
{
|
|
"name": "pixels",
|
|
"type": "IMAGE",
|
|
"link": 70
|
|
}
|
|
],
|
|
"outputs": [
|
|
{
|
|
"name": "positive",
|
|
"type": "CONDITIONING",
|
|
"links": [64],
|
|
"slot_index": 0
|
|
},
|
|
{
|
|
"name": "negative",
|
|
"type": "CONDITIONING",
|
|
"links": [65],
|
|
"slot_index": 1
|
|
},
|
|
{
|
|
"name": "latent",
|
|
"type": "LATENT",
|
|
"links": [66],
|
|
"slot_index": 2
|
|
}
|
|
],
|
|
"properties": {
|
|
"Node name for S&R": "InstructPixToPixConditioning"
|
|
},
|
|
"widgets_values": []
|
|
},
|
|
{
|
|
"id": 8,
|
|
"type": "VAEDecode",
|
|
"pos": [1620, 40],
|
|
"size": [210, 46],
|
|
"flags": {},
|
|
"order": 12,
|
|
"mode": 0,
|
|
"inputs": [
|
|
{
|
|
"name": "samples",
|
|
"type": "LATENT",
|
|
"link": 7
|
|
},
|
|
{
|
|
"name": "vae",
|
|
"type": "VAE",
|
|
"link": 60
|
|
}
|
|
],
|
|
"outputs": [
|
|
{
|
|
"name": "IMAGE",
|
|
"type": "IMAGE",
|
|
"links": [9],
|
|
"slot_index": 0
|
|
}
|
|
],
|
|
"properties": {
|
|
"Node name for S&R": "VAEDecode"
|
|
},
|
|
"widgets_values": []
|
|
},
|
|
{
|
|
"id": 9,
|
|
"type": "SaveImage",
|
|
"pos": [1850, 40],
|
|
"size": [828.95, 893.85],
|
|
"flags": {},
|
|
"order": 13,
|
|
"mode": 0,
|
|
"inputs": [
|
|
{
|
|
"name": "images",
|
|
"type": "IMAGE",
|
|
"link": 9
|
|
}
|
|
],
|
|
"outputs": [],
|
|
"properties": {},
|
|
"widgets_values": ["ComfyUI"]
|
|
},
|
|
{
|
|
"id": 32,
|
|
"type": "VAELoader",
|
|
"pos": [1290, 350],
|
|
"size": [315, 58],
|
|
"flags": {},
|
|
"order": 1,
|
|
"mode": 0,
|
|
"inputs": [],
|
|
"outputs": [
|
|
{
|
|
"name": "VAE",
|
|
"type": "VAE",
|
|
"links": [60, 69],
|
|
"slot_index": 0
|
|
}
|
|
],
|
|
"properties": {
|
|
"Node name for S&R": "VAELoader"
|
|
},
|
|
"widgets_values": ["ae.safetensors"]
|
|
},
|
|
{
|
|
"id": 26,
|
|
"type": "FluxGuidance",
|
|
"pos": [700, 50],
|
|
"size": [317.4, 58],
|
|
"flags": {},
|
|
"order": 9,
|
|
"mode": 0,
|
|
"inputs": [
|
|
{
|
|
"name": "conditioning",
|
|
"type": "CONDITIONING",
|
|
"link": 41
|
|
}
|
|
],
|
|
"outputs": [
|
|
{
|
|
"name": "CONDITIONING",
|
|
"type": "CONDITIONING",
|
|
"shape": 3,
|
|
"links": [67],
|
|
"slot_index": 0
|
|
}
|
|
],
|
|
"properties": {
|
|
"Node name for S&R": "FluxGuidance"
|
|
},
|
|
"widgets_values": [30]
|
|
},
|
|
{
|
|
"id": 23,
|
|
"type": "CLIPTextEncode",
|
|
"pos": [260, 50],
|
|
"size": [422.85, 164.31],
|
|
"flags": {},
|
|
"order": 6,
|
|
"mode": 0,
|
|
"inputs": [
|
|
{
|
|
"name": "clip",
|
|
"type": "CLIP",
|
|
"link": 62
|
|
}
|
|
],
|
|
"outputs": [
|
|
{
|
|
"name": "CONDITIONING",
|
|
"type": "CONDITIONING",
|
|
"links": [41],
|
|
"slot_index": 0
|
|
}
|
|
],
|
|
"title": "CLIP Text Encode (Positive Prompt)",
|
|
"properties": {
|
|
"Node name for S&R": "CLIPTextEncode"
|
|
},
|
|
"widgets_values": [
|
|
"cute anime girl with massive fluffy fennec ears and a big fluffy tail blonde messy long hair blue eyes wearing a pink sweater and jeans"
|
|
],
|
|
"color": "#232",
|
|
"bgcolor": "#353"
|
|
},
|
|
{
|
|
"id": 34,
|
|
"type": "DualCLIPLoader",
|
|
"pos": [-80, 110],
|
|
"size": [315, 106],
|
|
"flags": {},
|
|
"order": 2,
|
|
"mode": 0,
|
|
"inputs": [],
|
|
"outputs": [
|
|
{
|
|
"name": "CLIP",
|
|
"type": "CLIP",
|
|
"links": [62, 63]
|
|
}
|
|
],
|
|
"properties": {
|
|
"Node name for S&R": "DualCLIPLoader"
|
|
},
|
|
"widgets_values": [
|
|
"clip_l.safetensors",
|
|
"t5xxl_fp16.safetensors",
|
|
"flux",
|
|
"default"
|
|
]
|
|
},
|
|
{
|
|
"id": 31,
|
|
"type": "UNETLoader",
|
|
"pos": [710, -80],
|
|
"size": [315, 82],
|
|
"flags": {},
|
|
"order": 3,
|
|
"mode": 0,
|
|
"inputs": [],
|
|
"outputs": [
|
|
{
|
|
"name": "MODEL",
|
|
"type": "MODEL",
|
|
"links": [57],
|
|
"slot_index": 0
|
|
}
|
|
],
|
|
"properties": {
|
|
"Node name for S&R": "UNETLoader"
|
|
},
|
|
"widgets_values": ["flux1-canny-dev.safetensors", "default"]
|
|
},
|
|
{
|
|
"id": 18,
|
|
"type": "Canny",
|
|
"pos": [560, 530],
|
|
"size": [315, 82],
|
|
"flags": {},
|
|
"order": 5,
|
|
"mode": 0,
|
|
"inputs": [
|
|
{
|
|
"name": "image",
|
|
"type": "IMAGE",
|
|
"link": 49
|
|
}
|
|
],
|
|
"outputs": [
|
|
{
|
|
"name": "IMAGE",
|
|
"type": "IMAGE",
|
|
"shape": 3,
|
|
"links": [26, 70],
|
|
"slot_index": 0
|
|
}
|
|
],
|
|
"properties": {
|
|
"Node name for S&R": "Canny"
|
|
},
|
|
"widgets_values": [0.15, 0.3]
|
|
},
|
|
{
|
|
"id": 36,
|
|
"type": "MarkdownNote",
|
|
"pos": [-75, 270],
|
|
"size": [225, 60],
|
|
"flags": {},
|
|
"order": 4,
|
|
"mode": 0,
|
|
"inputs": [],
|
|
"outputs": [],
|
|
"properties": {},
|
|
"widgets_values": [
|
|
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/flux/#canny-and-depth)"
|
|
],
|
|
"color": "#432",
|
|
"bgcolor": "#653"
|
|
}
|
|
],
|
|
"links": [
|
|
[7, 3, 0, 8, 0, "LATENT"],
|
|
[9, 8, 0, 9, 0, "IMAGE"],
|
|
[26, 18, 0, 19, 0, "IMAGE"],
|
|
[41, 23, 0, 26, 0, "CONDITIONING"],
|
|
[49, 17, 0, 18, 0, "IMAGE"],
|
|
[57, 31, 0, 3, 0, "MODEL"],
|
|
[60, 32, 0, 8, 1, "VAE"],
|
|
[62, 34, 0, 23, 0, "CLIP"],
|
|
[63, 34, 0, 7, 0, "CLIP"],
|
|
[64, 35, 0, 3, 1, "CONDITIONING"],
|
|
[65, 35, 1, 3, 2, "CONDITIONING"],
|
|
[66, 35, 2, 3, 3, "LATENT"],
|
|
[67, 26, 0, 35, 0, "CONDITIONING"],
|
|
[68, 7, 0, 35, 1, "CONDITIONING"],
|
|
[69, 32, 0, 35, 2, "VAE"],
|
|
[70, 18, 0, 35, 3, "IMAGE"]
|
|
],
|
|
"groups": [],
|
|
"config": {},
|
|
"extra": {
|
|
"ds": {
|
|
"scale": 0.67,
|
|
"offset": [553.16, 455.34]
|
|
}
|
|
},
|
|
"version": 0.4,
|
|
"models": [
|
|
{
|
|
"name": "t5xxl_fp16.safetensors",
|
|
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
|
|
"directory": "text_encoders"
|
|
},
|
|
{
|
|
"name": "ae.safetensors",
|
|
"url": "https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/ae.safetensors?download=true",
|
|
"directory": "vae"
|
|
},
|
|
{
|
|
"name": "flux1-canny-dev.safetensors",
|
|
"url": "https://huggingface.co/black-forest-labs/FLUX.1-Canny-dev/resolve/main/flux1-canny-dev.safetensors?download=true",
|
|
"directory": "diffusion_models"
|
|
},
|
|
{
|
|
"name": "clip_l.safetensors",
|
|
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors?download=true",
|
|
"directory": "text_encoders"
|
|
}
|
|
]
|
|
}
|