mirror of
https://github.com/Comfy-Org/ComfyUI-Manager.git
synced 2026-01-08 21:20:49 +08:00
feat: updated models-list to add support for Qwen Images models
This commit is contained in:
parent
24a73b5d1c
commit
bf49081192
375
model-list.json
375
model-list.json
@ -572,7 +572,6 @@
|
||||
"url": "https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/resolve/main/sd_xl_offset_example-lora_1.0.safetensors",
|
||||
"size": "49.6MB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "SDXL Lightning LoRA (2steps)",
|
||||
"type": "lora",
|
||||
@ -606,7 +605,6 @@
|
||||
"url": "https://huggingface.co/ByteDance/SDXL-Lightning/resolve/main/sdxl_lightning_8step_lora.safetensors",
|
||||
"size": "393.9MB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "DMD2 LoRA (4steps)",
|
||||
"type": "lora",
|
||||
@ -629,7 +627,6 @@
|
||||
"url": "https://huggingface.co/tianweiy/DMD2/resolve/main/dmd2_sdxl_4step_lora_fp16.safetensors",
|
||||
"size": "394MB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "Hyper-SD LoRA (8steps) - FLUX.1 [Dev]",
|
||||
"type": "lora",
|
||||
@ -652,7 +649,6 @@
|
||||
"url": "https://huggingface.co/ByteDance/Hyper-SD/resolve/main/Hyper-FLUX.1-dev-16steps-lora.safetensors",
|
||||
"size": "1.39GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "Hyper-SD LoRA (1step) - SD1.5",
|
||||
"type": "lora",
|
||||
@ -719,7 +715,6 @@
|
||||
"url": "https://huggingface.co/ByteDance/Hyper-SD/resolve/main/Hyper-SD15-12steps-CFG-lora.safetensors",
|
||||
"size": "269MB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "Hyper-SD LoRA (1step) - SDXL",
|
||||
"type": "lora",
|
||||
@ -786,7 +781,6 @@
|
||||
"url": "https://huggingface.co/ByteDance/Hyper-SD/resolve/main/Hyper-SDXL-12steps-CFG-lora.safetensors",
|
||||
"size": "787MB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "Hyper-SD CFG LoRA (4steps) - SD3",
|
||||
"type": "lora",
|
||||
@ -820,7 +814,6 @@
|
||||
"url": "https://huggingface.co/ByteDance/Hyper-SD/resolve/main/Hyper-SD3-16steps-CFG-lora.safetensors",
|
||||
"size": "472MB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "comfyanonymous/flux_text_encoders - t5xxl (fp16)",
|
||||
"type": "clip",
|
||||
@ -854,7 +847,6 @@
|
||||
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp8_e4m3fn_scaled.safetensors",
|
||||
"size": "5.16GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "comfyanonymous/cosmos_cv8x8x8_1.0.safetensors",
|
||||
"type": "VAE",
|
||||
@ -910,7 +902,6 @@
|
||||
"url": "https://huggingface.co/mcmonkey/cosmos-1.0/resolve/main/Cosmos-1_0-Diffusion-14B-Video2World.safetensors",
|
||||
"size": "28.5GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "google-t5/t5-base",
|
||||
"type": "clip",
|
||||
@ -944,8 +935,6 @@
|
||||
"url": "https://huggingface.co/mcmonkey/google_t5-v1_1-xxl_encoderonly/resolve/main/t5xxl_fp8_e4m3fn.safetensors",
|
||||
"size": "4.89GB"
|
||||
},
|
||||
|
||||
|
||||
{
|
||||
"name": "city96/t5-v1_1-xxl-encoder-Q3_K_L.gguf",
|
||||
"type": "clip",
|
||||
@ -1067,7 +1056,6 @@
|
||||
"url": "https://huggingface.co/city96/t5-v1_1-xxl-encoder-gguf/resolve/main/t5-v1_1-xxl-encoder-f32.gguf",
|
||||
"size": "19.1GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "Comfy-Org/clip_l",
|
||||
"type": "clip",
|
||||
@ -1090,7 +1078,6 @@
|
||||
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-3.5-fp8/resolve/main/text_encoders/clip_g.safetensors",
|
||||
"size": "1.39GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "v1-5-pruned-emaonly.ckpt",
|
||||
"type": "checkpoint",
|
||||
@ -1553,7 +1540,6 @@
|
||||
"url": "https://huggingface.co/CiaraRowles/controlnet-temporalnet-sdxl-1.0/resolve/main/diffusion_pytorch_model.safetensors",
|
||||
"size": "5.00GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "Comfy-Org/sigclip_vision_384 (patch14_384)",
|
||||
"type": "clip_vision",
|
||||
@ -1565,7 +1551,6 @@
|
||||
"url": "https://huggingface.co/Comfy-Org/sigclip_vision_384/resolve/main/sigclip_vision_patch14_384.safetensors",
|
||||
"size": "857MB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "CLIPVision model (stabilityai/clip_vision_g)",
|
||||
"type": "clip_vision",
|
||||
@ -1973,7 +1958,6 @@
|
||||
"url": "https://dl.fbaipublicfiles.com/segment_anything/sam_vit_b_01ec64.pth",
|
||||
"size": "375.0MB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "sam2.1_hiera_tiny.pt",
|
||||
"type": "sam2.1",
|
||||
@ -2018,7 +2002,6 @@
|
||||
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/092824/sam2.1_hiera_large.pt",
|
||||
"size": "857.0MB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "sam2_hiera_tiny.pt",
|
||||
"type": "sam2",
|
||||
@ -2063,7 +2046,6 @@
|
||||
"url": "https://dl.fbaipublicfiles.com/segment_anything_2/072824/sam2_hiera_large.pt",
|
||||
"size": "857.0MB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "seecoder v1.0",
|
||||
"type": "seecoder",
|
||||
@ -2394,7 +2376,6 @@
|
||||
"url": "https://huggingface.co/Leoxing/PIA/resolve/main/pia.ckpt",
|
||||
"size": "1.67GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "animatediff/v2_lora_PanLeft.ckpt (ComfyUI-AnimateDiff-Evolved) (Updated path)",
|
||||
"type": "motion lora",
|
||||
@ -2879,9 +2860,6 @@
|
||||
"url": "https://huggingface.co/Kwai-Kolors/Kolors-IP-Adapter-FaceID-Plus/resolve/main/ipa-faceid-plus.bin",
|
||||
"size": "2.39GB"
|
||||
},
|
||||
|
||||
|
||||
|
||||
{
|
||||
"name": "GFPGANv1.4.pth",
|
||||
"type": "GFPGAN",
|
||||
@ -3509,7 +3487,6 @@
|
||||
"url": "https://huggingface.co/ViperYX/RGT/resolve/main/RGT_S/RGT_S_x4.pth",
|
||||
"size": "136.0MB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "InstantX/FLUX.1-dev Controlnet (Union)",
|
||||
"type": "controlnet",
|
||||
@ -3521,8 +3498,6 @@
|
||||
"url": "https://huggingface.co/InstantX/FLUX.1-dev-Controlnet-Union/resolve/main/diffusion_pytorch_model.safetensors",
|
||||
"size": "6.6GB"
|
||||
},
|
||||
|
||||
|
||||
{
|
||||
"name": "InstantX/FLUX.1-dev-IP-Adapter",
|
||||
"type": "IP-Adapter",
|
||||
@ -3556,7 +3531,6 @@
|
||||
"url": "https://huggingface.co/Kijai/flux-fp8/resolve/main/flux_shakker_labs_union_pro-fp8_e4m3fn.safetensors",
|
||||
"size": "3.3GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "jasperai/FLUX.1-dev-Controlnet-Upscaler",
|
||||
"type": "controlnet",
|
||||
@ -3590,7 +3564,6 @@
|
||||
"url": "https://huggingface.co/jasperai/Flux.1-dev-Controlnet-Surface-Normals/resolve/main/diffusion_pytorch_model.safetensors",
|
||||
"size": "3.58GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "xinsir/ControlNet++: All-in-one ControlNet",
|
||||
"type": "controlnet",
|
||||
@ -3690,7 +3663,6 @@
|
||||
"url": "https://huggingface.co/xinsir/controlnet-tile-sdxl-1.0/resolve/main/diffusion_pytorch_model.safetensors",
|
||||
"size": "2.50GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "InstantX/SD3-Controlnet-Canny",
|
||||
"type": "controlnet",
|
||||
@ -3724,8 +3696,6 @@
|
||||
"url": "https://huggingface.co/InstantX/SD3-Controlnet-Tile/resolve/main/diffusion_pytorch_model.safetensors",
|
||||
"size": "1.19GB"
|
||||
},
|
||||
|
||||
|
||||
{
|
||||
"name": "stabilityai/SD3.5-Large-Controlnet-Blur",
|
||||
"type": "controlnet",
|
||||
@ -3759,7 +3729,6 @@
|
||||
"url": "https://huggingface.co/stabilityai/stable-diffusion-3.5-controlnets/resolve/main/sd3.5_large_controlnet_depth.safetensors",
|
||||
"size": "8.65GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "Kijai/ToonCrafter model checkpoint (interpolation fp16)",
|
||||
"type": "checkpoint",
|
||||
@ -3947,7 +3916,6 @@
|
||||
"url": "https://huggingface.co/Kijai/DepthAnythingV2-safetensors/resolve/main/depth_anything_v2_vits_fp32.safetensors",
|
||||
"size": "99.2MB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "PixArt-Sigma-XL-2-1024-MS.pth (checkpoint)",
|
||||
"type": "checkpoint",
|
||||
@ -3959,7 +3927,6 @@
|
||||
"url": "https://huggingface.co/PixArt-alpha/PixArt-Sigma/resolve/main/PixArt-Sigma-XL-2-1024-MS.pth",
|
||||
"size": "2.47GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "PixArt-Sigma-XL-2-512-MS.safetensors (diffusion)",
|
||||
"type": "diffusion_model",
|
||||
@ -3993,8 +3960,6 @@
|
||||
"url": "https://huggingface.co/PixArt-alpha/PixArt-XL-2-1024-MS/resolve/main/transformer/diffusion_pytorch_model.safetensors",
|
||||
"size": "2.45GB"
|
||||
},
|
||||
|
||||
|
||||
{
|
||||
"name": "hunyuan_dit_1.2.safetensors",
|
||||
"type": "checkpoint",
|
||||
@ -4028,7 +3993,6 @@
|
||||
"url": "https://huggingface.co/comfyanonymous/hunyuan_dit_comfyui/resolve/main/hunyuan_dit_1.0.safetensors",
|
||||
"size": "8.24GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "Comfy-Org/hunyuan_video_t2v_720p_bf16.safetensors",
|
||||
"type": "diffusion_model",
|
||||
@ -4062,7 +4026,6 @@
|
||||
"url": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged/resolve/main/split_files/diffusion_models/hunyuan_video_image_to_video_720p_bf16.safetensors",
|
||||
"size": "25.6GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "Comfy-Org/llava_llama3_fp8_scaled.safetensors",
|
||||
"type": "clip",
|
||||
@ -4096,7 +4059,6 @@
|
||||
"url": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged/resolve/main/split_files/clip_vision/llava_llama3_vision.safetensors",
|
||||
"size": "649MB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "Comfy-Org/omnigen2_fp16.safetensors",
|
||||
"type": "diffusion_model",
|
||||
@ -4119,7 +4081,6 @@
|
||||
"url": "https://huggingface.co/Comfy-Org/Omnigen2_ComfyUI_repackaged/resolve/main/split_files/text_encoders/qwen_2.5_vl_fp16.safetensors",
|
||||
"size": "7.51GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "FLUX.1 [Schnell] Diffusion model",
|
||||
"type": "diffusion_model",
|
||||
@ -4131,7 +4092,6 @@
|
||||
"url": "https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/flux1-schnell.safetensors",
|
||||
"size": "23.8GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "FLUX.1 VAE model",
|
||||
"type": "VAE",
|
||||
@ -4143,7 +4103,6 @@
|
||||
"url": "https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/ae.safetensors",
|
||||
"size": "335MB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "kijai/FLUX.1 [schnell] Diffusion model (float8_e4m3fn)",
|
||||
"type": "diffusion_model",
|
||||
@ -4155,7 +4114,6 @@
|
||||
"url": "https://huggingface.co/Kijai/flux-fp8/resolve/main/flux1-schnell-fp8.safetensors",
|
||||
"size": "11.9GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "FLUX.1 [Dev] Diffusion model (scaled fp8)",
|
||||
"type": "diffusion_model",
|
||||
@ -4178,7 +4136,6 @@
|
||||
"url": "https://huggingface.co/Kijai/flux-fp8/resolve/main/flux1-dev-fp8.safetensors",
|
||||
"size": "11.9GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "Comfy Org/FLUX.1 [dev] Checkpoint model (fp8)",
|
||||
"type": "checkpoint",
|
||||
@ -4201,7 +4158,6 @@
|
||||
"url": "https://huggingface.co/Comfy-Org/flux1-schnell/resolve/main/flux1-schnell-fp8.safetensors",
|
||||
"size": "17.2GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "city96/flux1-dev-F16.gguf",
|
||||
"type": "diffusion_model",
|
||||
@ -4323,7 +4279,6 @@
|
||||
"url": "https://huggingface.co/city96/FLUX.1-dev-gguf/resolve/main/flux1-dev-Q8_0.gguf",
|
||||
"size": "12.7GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "city96/flux1-schnell-F16.gguf",
|
||||
"type": "diffusion_model",
|
||||
@ -4445,7 +4400,6 @@
|
||||
"url": "https://huggingface.co/city96/FLUX.1-schnell-gguf/resolve/main/flux1-schnell-Q8_0.gguf",
|
||||
"size": "12.7GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "ViT-L-14-TEXT-detail-improved-hiT-GmP-HF.safetensors [Long CLIP L]",
|
||||
"type": "clip",
|
||||
@ -4468,7 +4422,6 @@
|
||||
"url": "https://huggingface.co/zer0int/CLIP-GmP-ViT-L-14/resolve/main/ViT-L-14-TEXT-detail-improved-hiT-GmP-TE-only-HF.safetensors",
|
||||
"size": "323MB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "Depth Pro model",
|
||||
"type": "depth-pro",
|
||||
@ -4480,7 +4433,6 @@
|
||||
"url": "https://huggingface.co/spacepxl/ml-depth-pro/resolve/main/depth_pro.fp16.safetensors",
|
||||
"size": "1.9GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "kijai/lotus depth d model v1.1 (fp16)",
|
||||
"type": "diffusion_model",
|
||||
@ -4558,7 +4510,6 @@
|
||||
"url": "https://huggingface.co/Kijai/lotus-comfyui/resolve/main/lotus-normal-g-v1-0.safetensors",
|
||||
"size": "3.47GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "Kolors UNet model",
|
||||
"type": "diffusion_model",
|
||||
@ -4614,7 +4565,6 @@
|
||||
"url": "https://huggingface.co/Kijai/ChatGLM3-safetensors/resolve/main/chatglm3-fp16.safetensors",
|
||||
"size": "12.52GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "pulid_flux_v0.9.1.safetensors",
|
||||
"type": "PuLID",
|
||||
@ -4637,7 +4587,6 @@
|
||||
"url": "https://huggingface.co/guozinan/PuLID/resolve/main/pulid_v1.1.safetensors",
|
||||
"size": "984MB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "kijai/MoGe_ViT_L_fp16.safetensors",
|
||||
"type": "MoGe",
|
||||
@ -4660,7 +4609,6 @@
|
||||
"url": "https://huggingface.co/Kijai/MoGe_safetensors/resolve/main/MoGe_ViT_L_fp16.safetensors",
|
||||
"size": "1.26GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "LTX-Video 2B v0.9 Checkpoint",
|
||||
"type": "checkpoint",
|
||||
@ -4694,7 +4642,6 @@
|
||||
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltx-video-2b-v0.9.5.safetensors",
|
||||
"size": "6.34GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "XLabs-AI/flux-canny-controlnet-v3.safetensors",
|
||||
"type": "controlnet",
|
||||
@ -4728,7 +4675,6 @@
|
||||
"url": "https://huggingface.co/XLabs-AI/flux-controlnet-collections/resolve/main/flux-hed-controlnet-v3.safetensors",
|
||||
"size": "1.49GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "XLabs-AI/realism_lora.safetensors",
|
||||
"type": "lora",
|
||||
@ -4762,7 +4708,6 @@
|
||||
"url": "https://huggingface.co/XLabs-AI/flux-lora-collection/resolve/main/mjv6_lora.safetensors",
|
||||
"size": "44.8MB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "XLabs-AI/flux-ip-adapter",
|
||||
"type": "lora",
|
||||
@ -4774,7 +4719,6 @@
|
||||
"url": "https://huggingface.co/XLabs-AI/flux-ip-adapter/resolve/main/ip_adapter.safetensors",
|
||||
"size": "982MB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "efficient_sam_s_cpu.jit [ComfyUI-YoloWorld-EfficientSAM]",
|
||||
"type": "efficient_sam",
|
||||
@ -4797,7 +4741,6 @@
|
||||
"url": "https://huggingface.co/camenduru/YoloWorld-EfficientSAM/resolve/main/efficient_sam_s_gpu.jit",
|
||||
"size": "106.0MB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "TencentARC/CustomNet V1",
|
||||
"type": "CustomNet",
|
||||
@ -4820,7 +4763,6 @@
|
||||
"url": "https://huggingface.co/TencentARC/CustomNet/resolve/main/customnet_inpaint_v1.pt",
|
||||
"size": "5.71GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "deepseek-ai/Janus-Pro-1B",
|
||||
"type": "Janus-Pro",
|
||||
@ -4865,7 +4807,6 @@
|
||||
"url": "https://huggingface.co/Kwai-Kolors/Kolors/resolve/main/vae/diffusion_pytorch_model.safetensors",
|
||||
"size": "335MB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "Comfy-Org/Wan2.1 i2v 480p 14B (bf16)",
|
||||
"type": "diffusion_model",
|
||||
@ -4954,7 +4895,6 @@
|
||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.1_ComfyUI_repackaged/resolve/main/split_files/diffusion_models/wan2.1_i2v_720p_14B_fp8_scaled.safetensors",
|
||||
"size": "16.4GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "Comfy-Org/Wan2.1 t2v 1.3B (bf16)",
|
||||
"type": "diffusion_model",
|
||||
@ -4977,7 +4917,6 @@
|
||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.1_ComfyUI_repackaged/resolve/main/split_files/diffusion_models/wan2.1_t2v_1.3B_fp16.safetensors",
|
||||
"size": "2.84GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "Comfy-Org/Wan2.1 t2v 14B (bf16)",
|
||||
"type": "diffusion_model",
|
||||
@ -5044,7 +4983,6 @@
|
||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.1_ComfyUI_repackaged/resolve/main/split_files/clip_vision/clip_vision_h.safetensors",
|
||||
"size": "1.26GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "Comfy-Org/Wan2.2 i2v high noise 14B (fp16)",
|
||||
"type": "diffusion_model",
|
||||
@ -5144,7 +5082,6 @@
|
||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_ti2v_5B_fp16.safetensors",
|
||||
"size": "10.0GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "Comfy-Org/umt5_xxl_fp16.safetensors",
|
||||
"type": "clip",
|
||||
@ -5167,7 +5104,6 @@
|
||||
"url": "https://huggingface.co/Comfy-Org/Wan_2.1_ComfyUI_repackaged/resolve/main/split_files/text_encoders/umt5_xxl_fp8_e4m3fn_scaled.safetensors",
|
||||
"size": "6.74GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "lllyasviel/FramePackI2V_HY",
|
||||
"type": "FramePackI2V",
|
||||
@ -5179,7 +5115,6 @@
|
||||
"url": "lllyasviel/FramePackI2V_HY",
|
||||
"size": "25.75GB"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "LTX-Video Spatial Upscaler v0.9.7",
|
||||
"type": "upscale",
|
||||
@ -5355,6 +5290,314 @@
|
||||
"filename": "LBM_relighting.safetensors",
|
||||
"url": "https://huggingface.co/jasperai/LBM_relighting/resolve/main/model.safetensors",
|
||||
"size": "5.02GB"
|
||||
},
|
||||
{
|
||||
"name": "Qwen-Image VAE",
|
||||
"type": "VAE",
|
||||
"base": "Qwen-Image",
|
||||
"save_path": "vae/qwen-image",
|
||||
"description": "VAE model for Qwen-Image",
|
||||
"reference": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI",
|
||||
"filename": "qwen_image_vae.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors",
|
||||
"size": "335MB"
|
||||
},
|
||||
{
|
||||
"name": "Qwen 2.5 VL 7B Text Encoder (fp8_scaled)",
|
||||
"type": "clip",
|
||||
"base": "Qwen-2.5-VL",
|
||||
"save_path": "text_encoders/qwen",
|
||||
"description": "Qwen 2.5 VL 7B text encoder model (fp8_scaled)",
|
||||
"reference": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI",
|
||||
"filename": "qwen_2.5_vl_7b_fp8_scaled.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors",
|
||||
"size": "3.75GB"
|
||||
},
|
||||
{
|
||||
"name": "Qwen 2.5 VL 7B Text Encoder",
|
||||
"type": "clip",
|
||||
"base": "Qwen-2.5-VL",
|
||||
"save_path": "text_encoders/qwen",
|
||||
"description": "Qwen 2.5 VL 7B text encoder model",
|
||||
"reference": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI",
|
||||
"filename": "qwen_2.5_vl_7b.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b.safetensors",
|
||||
"size": "7.51GB"
|
||||
},
|
||||
{
|
||||
"name": "Qwen-Image Diffusion Model (fp8_e4m3fn)",
|
||||
"type": "diffusion_model",
|
||||
"base": "Qwen-Image",
|
||||
"save_path": "diffusion_models/qwen-image",
|
||||
"description": "Qwen-Image diffusion model (fp8_e4m3fn)",
|
||||
"reference": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI",
|
||||
"filename": "qwen_image_fp8_e4m3fn.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_fp8_e4m3fn.safetensors",
|
||||
"size": "4.89GB"
|
||||
},
|
||||
{
|
||||
"name": "Qwen-Image Diffusion Model (bf16)",
|
||||
"type": "diffusion_model",
|
||||
"base": "Qwen-Image",
|
||||
"save_path": "diffusion_models/qwen-image",
|
||||
"description": "Qwen-Image diffusion model (bf16)",
|
||||
"reference": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI",
|
||||
"filename": "qwen_image_bf16.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_bf16.safetensors",
|
||||
"size": "9.78GB"
|
||||
},
|
||||
{
|
||||
"name": "Qwen-Image-Edit 2509 Diffusion Model (fp8_e4m3fn)",
|
||||
"type": "diffusion_model",
|
||||
"base": "Qwen-Image-Edit",
|
||||
"save_path": "diffusion_models/qwen-image-edit",
|
||||
"description": "Qwen-Image-Edit 2509 diffusion model (fp8_e4m3fn)",
|
||||
"reference": "https://huggingface.co/Comfy-Org/Qwen-Image-Edit_ComfyUI",
|
||||
"filename": "qwen_image_edit_2509_fp8_e4m3fn.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/Qwen-Image-Edit_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_edit_2509_fp8_e4m3fn.safetensors",
|
||||
"size": "4.89GB"
|
||||
},
|
||||
{
|
||||
"name": "Qwen-Image-Edit 2509 Diffusion Model (bf16)",
|
||||
"type": "diffusion_model",
|
||||
"base": "Qwen-Image-Edit",
|
||||
"save_path": "diffusion_models/qwen-image-edit",
|
||||
"description": "Qwen-Image-Edit 2509 diffusion model (bf16)",
|
||||
"reference": "https://huggingface.co/Comfy-Org/Qwen-Image-Edit_ComfyUI",
|
||||
"filename": "qwen_image_edit_2509_bf16.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/Qwen-Image-Edit_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_edit_2509_bf16.safetensors",
|
||||
"size": "9.78GB"
|
||||
},
|
||||
{
|
||||
"name": "Qwen-Image-Edit Diffusion Model (fp8_e4m3fn)",
|
||||
"type": "diffusion_model",
|
||||
"base": "Qwen-Image-Edit",
|
||||
"save_path": "diffusion_models/qwen-image-edit",
|
||||
"description": "Qwen-Image-Edit diffusion model (fp8_e4m3fn)",
|
||||
"reference": "https://huggingface.co/Comfy-Org/Qwen-Image-Edit_ComfyUI",
|
||||
"filename": "qwen_image_edit_fp8_e4m3fn.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/Qwen-Image-Edit_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_edit_fp8_e4m3fn.safetensors",
|
||||
"size": "4.89GB"
|
||||
},
|
||||
{
|
||||
"name": "Qwen-Image-Edit Diffusion Model (bf16)",
|
||||
"type": "diffusion_model",
|
||||
"base": "Qwen-Image-Edit",
|
||||
"save_path": "diffusion_models/qwen-image-edit",
|
||||
"description": "Qwen-Image-Edit diffusion model (bf16)",
|
||||
"reference": "https://huggingface.co/Comfy-Org/Qwen-Image-Edit_ComfyUI",
|
||||
"filename": "qwen_image_edit_bf16.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/Qwen-Image-Edit_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_edit_bf16.safetensors",
|
||||
"size": "9.78GB"
|
||||
},
|
||||
{
|
||||
"name": "Qwen-Image-Lightning 8steps V1.0",
|
||||
"type": "lora",
|
||||
"base": "Qwen-Image",
|
||||
"save_path": "loras/qwen-image-lightning",
|
||||
"description": "Qwen-Image-Lightning 8-step LoRA model V1.0",
|
||||
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
||||
"filename": "Qwen-Image-Lightning-8steps-V1.0.safetensors",
|
||||
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-8steps-V1.0.safetensors",
|
||||
"size": "9.78GB"
|
||||
},
|
||||
{
|
||||
"name": "Qwen-Image-Lightning 4steps V1.0",
|
||||
"type": "lora",
|
||||
"base": "Qwen-Image",
|
||||
"save_path": "loras/qwen-image-lightning",
|
||||
"description": "Qwen-Image-Lightning 4-step LoRA model V1.0",
|
||||
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
||||
"filename": "Qwen-Image-Lightning-4steps-V1.0.safetensors",
|
||||
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-4steps-V1.0.safetensors",
|
||||
"size": "9.78GB"
|
||||
},
|
||||
{
|
||||
"name": "Qwen-Image-Lightning 4steps V1.0 (bf16)",
|
||||
"type": "lora",
|
||||
"base": "Qwen-Image",
|
||||
"save_path": "loras/qwen-image-lightning",
|
||||
"description": "Qwen-Image-Lightning 4-step LoRA model V1.0 (bf16)",
|
||||
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
||||
"filename": "Qwen-Image-Lightning-4steps-V1.0-bf16.safetensors",
|
||||
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-4steps-V1.0-bf16.safetensors",
|
||||
"size": "19.6GB"
|
||||
},
|
||||
{
|
||||
"name": "Qwen-Image-Lightning 4steps V2.0",
|
||||
"type": "lora",
|
||||
"base": "Qwen-Image",
|
||||
"save_path": "loras/qwen-image-lightning",
|
||||
"description": "Qwen-Image-Lightning 4-step LoRA model V2.0",
|
||||
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
||||
"filename": "Qwen-Image-Lightning-4steps-V2.0.safetensors",
|
||||
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-4steps-V2.0.safetensors",
|
||||
"size": "9.78GB"
|
||||
},
|
||||
{
|
||||
"name": "Qwen-Image-Lightning 4steps V2.0 (bf16)",
|
||||
"type": "lora",
|
||||
"base": "Qwen-Image",
|
||||
"save_path": "loras/qwen-image-lightning",
|
||||
"description": "Qwen-Image-Lightning 4-step LoRA model V2.0 (bf16)",
|
||||
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
||||
"filename": "Qwen-Image-Lightning-4steps-V2.0-bf16.safetensors",
|
||||
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-4steps-V2.0-bf16.safetensors",
|
||||
"size": "19.6GB"
|
||||
},
|
||||
{
|
||||
"name": "Qwen-Image-Lightning 8steps V1.1",
|
||||
"type": "lora",
|
||||
"base": "Qwen-Image",
|
||||
"save_path": "loras/qwen-image-lightning",
|
||||
"description": "Qwen-Image-Lightning 8-step LoRA model V1.1",
|
||||
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
||||
"filename": "Qwen-Image-Lightning-8steps-V1.1.safetensors",
|
||||
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-8steps-V1.1.safetensors",
|
||||
"size": "9.78GB"
|
||||
},
|
||||
{
|
||||
"name": "Qwen-Image-Lightning 8steps V1.1 (bf16)",
|
||||
"type": "lora",
|
||||
"base": "Qwen-Image",
|
||||
"save_path": "loras/qwen-image-lightning",
|
||||
"description": "Qwen-Image-Lightning 8-step LoRA model V1.1 (bf16)",
|
||||
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
||||
"filename": "Qwen-Image-Lightning-8steps-V1.1-bf16.safetensors",
|
||||
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-8steps-V1.1-bf16.safetensors",
|
||||
"size": "19.6GB"
|
||||
},
|
||||
{
|
||||
"name": "Qwen-Image-Lightning 8steps V2.0",
|
||||
"type": "lora",
|
||||
"base": "Qwen-Image",
|
||||
"save_path": "loras/qwen-image-lightning",
|
||||
"description": "Qwen-Image-Lightning 8-step LoRA model V2.0",
|
||||
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
||||
"filename": "Qwen-Image-Lightning-8steps-V2.0.safetensors",
|
||||
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-8steps-V2.0.safetensors",
|
||||
"size": "9.78GB"
|
||||
},
|
||||
{
|
||||
"name": "Qwen-Image-Lightning 8steps V2.0 (bf16)",
|
||||
"type": "lora",
|
||||
"base": "Qwen-Image",
|
||||
"save_path": "loras/qwen-image-lightning",
|
||||
"description": "Qwen-Image-Lightning 8-step LoRA model V2.0 (bf16)",
|
||||
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
||||
"filename": "Qwen-Image-Lightning-8steps-V2.0-bf16.safetensors",
|
||||
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-8steps-V2.0-bf16.safetensors",
|
||||
"size": "19.6GB"
|
||||
},
|
||||
{
|
||||
"name": "Qwen-Image-Edit-Lightning 4steps V1.0",
|
||||
"type": "lora",
|
||||
"base": "Qwen-Image-Edit",
|
||||
"save_path": "loras/qwen-image-edit-lightning",
|
||||
"description": "Qwen-Image-Edit-Lightning 4-step LoRA model V1.0",
|
||||
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
||||
"filename": "Qwen-Image-Edit-Lightning-4steps-V1.0.safetensors",
|
||||
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Edit-Lightning-4steps-V1.0.safetensors",
|
||||
"size": "9.78GB"
|
||||
},
|
||||
{
|
||||
"name": "Qwen-Image-Edit-Lightning 4steps V1.0 (bf16)",
|
||||
"type": "lora",
|
||||
"base": "Qwen-Image-Edit",
|
||||
"save_path": "loras/qwen-image-edit-lightning",
|
||||
"description": "Qwen-Image-Edit-Lightning 4-step LoRA model V1.0 (bf16)",
|
||||
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
||||
"filename": "Qwen-Image-Edit-Lightning-4steps-V1.0-bf16.safetensors",
|
||||
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Edit-Lightning-4steps-V1.0-bf16.safetensors",
|
||||
"size": "19.6GB"
|
||||
},
|
||||
{
|
||||
"name": "Qwen-Image-Edit-Lightning 8steps V1.0",
|
||||
"type": "lora",
|
||||
"base": "Qwen-Image-Edit",
|
||||
"save_path": "loras/qwen-image-edit-lightning",
|
||||
"description": "Qwen-Image-Edit-Lightning 8-step LoRA model V1.0",
|
||||
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
||||
"filename": "Qwen-Image-Edit-Lightning-8steps-V1.0.safetensors",
|
||||
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Edit-Lightning-8steps-V1.0.safetensors",
|
||||
"size": "9.78GB"
|
||||
},
|
||||
{
|
||||
"name": "Qwen-Image-Edit-Lightning 8steps V1.0 (bf16)",
|
||||
"type": "lora",
|
||||
"base": "Qwen-Image-Edit",
|
||||
"save_path": "loras/qwen-image-edit-lightning",
|
||||
"description": "Qwen-Image-Edit-Lightning 8-step LoRA model V1.0 (bf16)",
|
||||
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
||||
"filename": "Qwen-Image-Edit-Lightning-8steps-V1.0-bf16.safetensors",
|
||||
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Edit-Lightning-8steps-V1.0-bf16.safetensors",
|
||||
"size": "19.6GB"
|
||||
},
|
||||
{
|
||||
"name": "Qwen-Image-Edit-2509-Lightning 4steps V1.0 (bf16)",
|
||||
"type": "lora",
|
||||
"base": "Qwen-Image-Edit",
|
||||
"save_path": "loras/qwen-image-edit-lightning",
|
||||
"description": "Qwen-Image-Edit-2509-Lightning 4-step LoRA model V1.0 (bf16)",
|
||||
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
||||
"filename": "Qwen-Image-Edit-2509-Lightning-4steps-V1.0-bf16.safetensors",
|
||||
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Edit-2509/Qwen-Image-Edit-2509-Lightning-4steps-V1.0-bf16.safetensors",
|
||||
"size": "19.6GB"
|
||||
},
|
||||
{
|
||||
"name": "Qwen-Image-Edit-2509-Lightning 4steps V1.0 (fp32)",
|
||||
"type": "lora",
|
||||
"base": "Qwen-Image-Edit",
|
||||
"save_path": "loras/qwen-image-edit-lightning",
|
||||
"description": "Qwen-Image-Edit-2509-Lightning 4-step LoRA model V1.0 (fp32)",
|
||||
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
||||
"filename": "Qwen-Image-Edit-2509-Lightning-4steps-V1.0-fp32.safetensors",
|
||||
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Edit-2509/Qwen-Image-Edit-2509-Lightning-4steps-V1.0-fp32.safetensors",
|
||||
"size": "39.1GB"
|
||||
},
|
||||
{
|
||||
"name": "Qwen-Image-Edit-2509-Lightning 8steps V1.0 (bf16)",
|
||||
"type": "lora",
|
||||
"base": "Qwen-Image-Edit",
|
||||
"save_path": "loras/qwen-image-edit-lightning",
|
||||
"description": "Qwen-Image-Edit-2509-Lightning 8-step LoRA model V1.0 (bf16)",
|
||||
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
||||
"filename": "Qwen-Image-Edit-2509-Lightning-8steps-V1.0-bf16.safetensors",
|
||||
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Edit-2509/Qwen-Image-Edit-2509-Lightning-8steps-V1.0-bf16.safetensors",
|
||||
"size": "19.6GB"
|
||||
},
|
||||
{
|
||||
"name": "Qwen-Image-Edit-2509-Lightning 8steps V1.0 (fp32)",
|
||||
"type": "lora",
|
||||
"base": "Qwen-Image-Edit",
|
||||
"save_path": "loras/qwen-image-edit-lightning",
|
||||
"description": "Qwen-Image-Edit-2509-Lightning 8-step LoRA model V1.0 (fp32)",
|
||||
"reference": "https://huggingface.co/lightx2v/Qwen-Image-Lightning",
|
||||
"filename": "Qwen-Image-Edit-2509-Lightning-8steps-V1.0-fp32.safetensors",
|
||||
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Edit-2509/Qwen-Image-Edit-2509-Lightning-8steps-V1.0-fp32.safetensors",
|
||||
"size": "39.1GB"
|
||||
},
|
||||
{
|
||||
"name": "Qwen-Image InstantX ControlNet Union",
|
||||
"type": "controlnet",
|
||||
"base": "Qwen-Image",
|
||||
"save_path": "controlnet/qwen-image/instantx",
|
||||
"description": "Qwen-Image InstantX ControlNet Union model",
|
||||
"reference": "https://huggingface.co/Comfy-Org/Qwen-Image-InstantX-ControlNets",
|
||||
"filename": "Qwen-Image-InstantX-ControlNet-Union.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/Qwen-Image-InstantX-ControlNets/resolve/main/split_files/controlnet/Qwen-Image-InstantX-ControlNet-Union.safetensors",
|
||||
"size": "2.54GB"
|
||||
},
|
||||
{
|
||||
"name": "Qwen-Image InstantX ControlNet Inpainting",
|
||||
"type": "controlnet",
|
||||
"base": "Qwen-Image",
|
||||
"save_path": "controlnet/qwen-image/instantx",
|
||||
"description": "Qwen-Image InstantX ControlNet Inpainting model",
|
||||
"reference": "https://huggingface.co/Comfy-Org/Qwen-Image-InstantX-ControlNets",
|
||||
"filename": "Qwen-Image-InstantX-ControlNet-Inpainting.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/Qwen-Image-InstantX-ControlNets/resolve/main/split_files/controlnet/Qwen-Image-InstantX-ControlNet-Inpainting.safetensors",
|
||||
"size": "2.54GB"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
Loading…
Reference in New Issue
Block a user