mycomfyuiworkflows / Qwen Image Union Diffsynth Lora OpenPose.json
Bhanu Khan
Upload My ComfyUI workflows
2128e05 verified
{
"id": "00000000-0000-0000-0000-000000000000",
"revision": 0,
"last_node_id": 98,
"last_link_id": 79,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [
420,
710
],
"size": [
400,
150
],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 25
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
33
]
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.51",
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
" "
],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 70,
"type": "ReferenceLatent",
"pos": [
860,
470
],
"size": [
197.712890625,
46
],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 31
},
{
"name": "latent",
"shape": 7,
"type": "LATENT",
"link": 32
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
21
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.51",
"Node name for S&R": "ReferenceLatent"
},
"widgets_values": []
},
{
"id": 71,
"type": "ReferenceLatent",
"pos": [
850,
720
],
"size": [
197.712890625,
46
],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 33
},
{
"name": "latent",
"shape": 7,
"type": "LATENT",
"link": 34
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
22
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.51",
"Node name for S&R": "ReferenceLatent"
},
"widgets_values": []
},
{
"id": 39,
"type": "VAELoader",
"pos": [
30,
650
],
"size": [
330,
58
],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [
27,
36
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.51",
"Node name for S&R": "VAELoader",
"models": [
{
"name": "qwen_image_vae.safetensors",
"url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors",
"directory": "vae"
}
]
},
"widgets_values": [
"qwen_image_vae.safetensors"
]
},
{
"id": 38,
"type": "CLIPLoader",
"pos": [
30,
490
],
"size": [
330,
110
],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [
24,
25
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.51",
"Node name for S&R": "CLIPLoader",
"models": [
{
"name": "qwen_2.5_vl_7b_fp8_scaled.safetensors",
"url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors",
"directory": "text_encoders"
}
]
},
"widgets_values": [
"qwen_2.5_vl_7b_fp8_scaled.safetensors",
"qwen_image",
"default"
]
},
{
"id": 37,
"type": "UNETLoader",
"pos": [
30,
220
],
"size": [
330,
82
],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
30
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.51",
"Node name for S&R": "UNETLoader",
"models": [
{
"name": "qwen_image_fp8_e4m3fn.safetensors",
"url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_fp8_e4m3fn.safetensors",
"directory": "diffusion_models"
}
]
},
"widgets_values": [
"qwen_image_fp8_e4m3fn.safetensors",
"default"
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [
1400,
170
],
"size": [
140,
46
],
"flags": {},
"order": 20,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 26
},
{
"name": "vae",
"type": "VAE",
"link": 27
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [
28,
52
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.51",
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 66,
"type": "ModelSamplingAuraFlow",
"pos": [
1100,
170
],
"size": [
260,
58
],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 69
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
20
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.51",
"Node name for S&R": "ModelSamplingAuraFlow"
},
"widgets_values": [
4
]
},
{
"id": 69,
"type": "LoraLoaderModelOnly",
"pos": [
30,
360
],
"size": [
330,
82
],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 30
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
45
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.51",
"Node name for S&R": "LoraLoaderModelOnly",
"models": [
{
"name": "qwen_image_union_diffsynth_lora.safetensors",
"url": "https://huggingface.co/Comfy-Org/Qwen-Image-DiffSynth-ControlNets/resolve/main/split_files/loras/qwen_image_union_diffsynth_lora.safetensors",
"directory": "loras"
}
]
},
"widgets_values": [
"qwen_image_union_diffsynth_lora.safetensors",
1
]
},
{
"id": 79,
"type": "LoraLoaderModelOnly",
"pos": [
490,
210
],
"size": [
470,
82
],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 45
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
69
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.51",
"Node name for S&R": "LoraLoaderModelOnly",
"models": [
{
"name": "Qwen-Image-Lightning-4steps-V1.0.safetensors",
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-4steps-V1.0.safetensors",
"directory": "loras"
}
]
},
"widgets_values": [
"Qwen-Image-Lightning-8steps-V1.1.safetensors",
1
]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [
420,
460
],
"size": [
400,
200
],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 24
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
31
]
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.51",
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"a man standing on a tip of boat"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 3,
"type": "KSampler",
"pos": [
1127.7242431640625,
316.5459289550781
],
"size": [
260,
450
],
"flags": {},
"order": 19,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 20
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 21
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 22
},
{
"name": "latent_image",
"type": "LATENT",
"link": 63
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
26
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.51",
"Node name for S&R": "KSampler"
},
"widgets_values": [
347241068574736,
"randomize",
10,
1,
"euler",
"simple",
1
]
},
{
"id": 73,
"type": "LoadImage",
"pos": [
32.27558135986328,
868.8213500976562
],
"size": [
274.080078125,
314.00006103515625
],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [
79
]
},
{
"name": "MASK",
"type": "MASK",
"links": []
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.51",
"Node name for S&R": "LoadImage"
},
"widgets_values": [
"clipspace/clipspace-painted-masked-22800.png [input]",
"image"
]
},
{
"id": 98,
"type": "DWPreprocessor",
"pos": [
56.353271484375,
1252.619873046875
],
"size": [
294.66668701171875,
222
],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "image",
"type": "IMAGE",
"link": 79
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [
77
]
},
{
"name": "POSE_KEYPOINT",
"type": "POSE_KEYPOINT",
"links": null
}
],
"properties": {
"cnr_id": "comfyui_controlnet_aux",
"ver": "1.1.0",
"Node name for S&R": "DWPreprocessor"
},
"widgets_values": [
"enable",
"enable",
"enable",
1024,
"yolox_l.onnx",
"dw-ll_ucoco_384_bs5.torchscript.pt",
"disable"
]
},
{
"id": 77,
"type": "ImageScaleToTotalPixels",
"pos": [
398.74237060546875,
1247.850341796875
],
"size": [
270,
82
],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "image",
"type": "IMAGE",
"link": 77
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [
48,
51,
75
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.51",
"Node name for S&R": "ImageScaleToTotalPixels"
},
"widgets_values": [
"lanczos",
1
]
},
{
"id": 72,
"type": "VAEEncode",
"pos": [
736.1737670898438,
965.122314453125
],
"size": [
140,
46
],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "pixels",
"type": "IMAGE",
"link": 75
},
{
"name": "vae",
"type": "VAE",
"link": 36
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
32,
34,
63
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.51",
"Node name for S&R": "VAEEncode"
},
"widgets_values": []
},
{
"id": 85,
"type": "SetNode",
"pos": [
744.4232177734375,
1067.3704833984375
],
"size": [
210,
60
],
"flags": {
"collapsed": true
},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"link": 51
}
],
"outputs": [
{
"name": "*",
"type": "*",
"links": null
}
],
"title": "Set_l_image",
"properties": {
"previousName": "l_image"
},
"widgets_values": [
"l_image"
],
"color": "#2a363b",
"bgcolor": "#3f5159"
},
{
"id": 75,
"type": "PreviewImage",
"pos": [
744.1314086914062,
1132.8173828125
],
"size": [
330,
290
],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 48
}
],
"outputs": [],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.51",
"Node name for S&R": "PreviewImage"
},
"widgets_values": []
},
{
"id": 87,
"type": "GetNode",
"pos": [
1871.043701171875,
230.5975799560547
],
"size": [
210,
60
],
"flags": {
"collapsed": true
},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [
53
]
}
],
"title": "Get_l_image",
"properties": {},
"widgets_values": [
"l_image"
],
"color": "#2a363b",
"bgcolor": "#3f5159"
},
{
"id": 60,
"type": "SaveImage",
"pos": [
1420.037109375,
325.4931945800781
],
"size": [
590.5678100585938,
567.7870483398438
],
"flags": {},
"order": 21,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 28
}
],
"outputs": [],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.51"
},
"widgets_values": [
"ComfyUI"
]
},
{
"id": 84,
"type": "Image Comparer (rgthree)",
"pos": [
2040.3214111328125,
180.1768341064453
],
"size": [
551.5145263671875,
579.5601196289062
],
"flags": {},
"order": 22,
"mode": 0,
"inputs": [
{
"dir": 3,
"name": "image_a",
"type": "IMAGE",
"link": 52
},
{
"dir": 3,
"name": "image_b",
"type": "IMAGE",
"link": 53
}
],
"outputs": [],
"properties": {
"cnr_id": "rgthree-comfy",
"ver": "1.0.2508241658",
"comparer_mode": "Slide"
},
"widgets_values": [
[
{
"name": "A",
"selected": true,
"url": "/api/view?filename=rgthree.compare._temp_tmhqi_00061_.png&type=temp&subfolder=&rand=0.48945106370724356"
},
{
"name": "B",
"selected": true,
"url": "/api/view?filename=rgthree.compare._temp_tmhqi_00062_.png&type=temp&subfolder=&rand=0.06646704308951568"
}
]
]
},
{
"id": 80,
"type": "MarkdownNote",
"pos": [
-560,
160
],
"size": [
540,
630
],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Model links",
"properties": {
"widget_ue_connectable": {}
},
"widgets_values": [
"[Tutorial](https://docs.comfy.org/tutorials/image/qwen/qwen-image) | [教程](https://docs.comfy.org/zh-CN/tutorials/image/qwen/qwen-image)\n\n\n## Model links\n\nYou can find all the models on [Huggingface](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/tree/main) or [Modelscope](https://modelscope.cn/models/Comfy-Org/Qwen-Image_ComfyUI/files)\n\n**Diffusion model**\n\n- [qwen_image_fp8_e4m3fn.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_fp8_e4m3fn.safetensors)\n\n**LoRA**\n\n- [Qwen-Image-Lightning-8steps-V1.1.safetensors](https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-8steps-V1.1.safetensors)\n- [qwen_image_union_diffsynth_lora.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image-DiffSynth-ControlNets/resolve/main/split_files/loras/qwen_image_union_diffsynth_lora.safetensors)\n\n**Text encoder**\n\n- [qwen_2.5_vl_7b_fp8_scaled.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors)\n\n**VAE**\n\n- [qwen_image_vae.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors)\n\nModel Storage Location\n\n```\n📂 ComfyUI/\n├── 📂 models/\n│ ├── 📂 diffusion_models/\n│ │ ├── qwen_image_fp8_e4m3fn.safetensors\n│ │ └── qwen_image_distill_full_fp8_e4m3fn.safetensors\n│ ├── 📂 loras/\n│ │ ├── qwen_image_union_diffsynth_lora.safetensors\n│ │ └── Qwen-Image-Lightning-8steps-V1.0.safetensors\n│ ├── 📂 vae/\n│ │ └── qwen_image_vae.safetensors\n│ └── 📂 text_encoders/\n│ └── qwen_2.5_vl_7b_fp8_scaled.safetensors\n```\n"
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 81,
"type": "MarkdownNote",
"pos": [
1128.98486328125,
807.7244873046875
],
"size": [
282.1795349121094,
302.4843444824219
],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "KSampler settings",
"properties": {},
"widgets_values": [
"You can test and find the best setting by yourself. The following table is for reference.\n\n| model | steps | cfg |\n|---------------------|---------------|---------------|\n| fp8_e4m3fn | 20 | 2.5 |\n| fp8_e4m3fn + 4 steps LoRA | 4 | 1.0 |\n"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[
20,
66,
0,
3,
0,
"MODEL"
],
[
21,
70,
0,
3,
1,
"CONDITIONING"
],
[
22,
71,
0,
3,
2,
"CONDITIONING"
],
[
24,
38,
0,
6,
0,
"CLIP"
],
[
25,
38,
0,
7,
0,
"CLIP"
],
[
26,
3,
0,
8,
0,
"LATENT"
],
[
27,
39,
0,
8,
1,
"VAE"
],
[
28,
8,
0,
60,
0,
"IMAGE"
],
[
30,
37,
0,
69,
0,
"MODEL"
],
[
31,
6,
0,
70,
0,
"CONDITIONING"
],
[
32,
72,
0,
70,
1,
"LATENT"
],
[
33,
7,
0,
71,
0,
"CONDITIONING"
],
[
34,
72,
0,
71,
1,
"LATENT"
],
[
36,
39,
0,
72,
1,
"VAE"
],
[
45,
69,
0,
79,
0,
"MODEL"
],
[
48,
77,
0,
75,
0,
"IMAGE"
],
[
51,
77,
0,
85,
0,
"*"
],
[
52,
8,
0,
84,
0,
"IMAGE"
],
[
53,
87,
0,
84,
1,
"IMAGE"
],
[
63,
72,
0,
3,
3,
"LATENT"
],
[
69,
79,
0,
66,
0,
"MODEL"
],
[
75,
77,
0,
72,
0,
"IMAGE"
],
[
77,
98,
0,
77,
0,
"IMAGE"
],
[
79,
73,
0,
98,
0,
"IMAGE"
]
],
"groups": [
{
"id": 1,
"title": "Step 1 - Load models",
"bounding": [
10,
130,
370,
620
],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 2,
"title": "Step 2 - Upload reference image",
"bounding": [
10,
770,
346.4342346191406,
426.4175109863281
],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 5,
"title": "Conditioning",
"bounding": [
400,
330,
680,
570
],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 3,
"title": "Step 3 - Prompt",
"bounding": [
410,
390,
420,
490
],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 6,
"title": "4 steps lighting LoRA",
"bounding": [
400,
130,
680,
180
],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.7213855104977629,
"offset": [
503.5263174105339,
-339.3352993426591
]
},
"frontendVersion": "1.25.11"
},
"version": 0.4
}