{ "id": "91f6bbe2-ed41-4fd6-bac7-71d5b5864ecb", "revision": 0, "last_node_id": 118, "last_link_id": 202, "nodes": [ { "id": 38, "type": "CLIPLoader", "pos": [ -120, 130 ], "size": [ 380, 106 ], "flags": {}, "order": 0, "mode": 0, "inputs": [], "outputs": [ { "name": "CLIP", "type": "CLIP", "slot_index": 0, "links": [ 74, 75 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.51", "Node name for S&R": "CLIPLoader", "models": [ { "name": "qwen_2.5_vl_7b_fp8_scaled.safetensors", "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors", "directory": "text_encoders" } ] }, "widgets_values": [ "qwen_2.5_vl_7b_fp8_scaled.safetensors", "qwen_image", "default" ] }, { "id": 37, "type": "UNETLoader", "pos": [ -120, 0 ], "size": [ 380, 82 ], "flags": {}, "order": 1, "mode": 0, "inputs": [], "outputs": [ { "name": "MODEL", "type": "MODEL", "slot_index": 0, "links": [ 145 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.51", "Node name for S&R": "UNETLoader", "models": [ { "name": "qwen_image_fp8_e4m3fn.safetensors", "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_fp8_e4m3fn.safetensors", "directory": "diffusion_models" } ] }, "widgets_values": [ "qwen_image_fp8_e4m3fn.safetensors", "default" ] }, { "id": 8, "type": "VAEDecode", "pos": [ 847.4144287109375, 560.3872680664062 ], "size": [ 310, 46 ], "flags": {}, "order": 22, "mode": 0, "inputs": [ { "name": "samples", "type": "LATENT", "link": 128 }, { "name": "vae", "type": "VAE", "link": 76 } ], "outputs": [ { "name": "IMAGE", "type": "IMAGE", "slot_index": 0, "links": [ 110 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.51", "Node name for S&R": "VAEDecode" }, "widgets_values": [] }, { "id": 86, "type": "Note", "pos": [ 847.4144287109375, 660.3873901367188 ], "size": [ 307.4002380371094, 127.38092803955078 ], "flags": {}, "order": 2, "mode": 0, "inputs": [], "outputs": [], "properties": {}, "widgets_values": [ "Set cfg to 1.0 for a speed boost at the cost of consistency. Samplers like res_multistep work pretty well at cfg 1.0\n\nThe official number of steps is 50 but I think that's too much. Even just 10 steps seems to work." ], "color": "#432", "bgcolor": "#653" }, { "id": 7, "type": "CLIPTextEncode", "pos": [ 300, 380 ], "size": [ 460, 140 ], "flags": {}, "order": 12, "mode": 0, "inputs": [ { "name": "clip", "type": "CLIP", "link": 75 } ], "outputs": [ { "name": "CONDITIONING", "type": "CONDITIONING", "slot_index": 0, "links": [ 191 ] } ], "title": "CLIP Text Encode (Negative Prompt)", "properties": { "cnr_id": "comfy-core", "ver": "0.3.51", "Node name for S&R": "CLIPTextEncode" }, "widgets_values": [ " " ], "color": "#223", "bgcolor": "#335" }, { "id": 84, "type": "ControlNetLoader", "pos": [ -120, 400 ], "size": [ 380, 58 ], "flags": {}, "order": 3, "mode": 0, "inputs": [], "outputs": [ { "name": "CONTROL_NET", "type": "CONTROL_NET", "links": [ 192 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.51", "Node name for S&R": "ControlNetLoader", "models": [ { "name": "Qwen-Image-InstantX-ControlNet-Inpainting.safetensors", "url": "https://huggingface.co/Comfy-Org/Qwen-Image-InstantX-ControlNets/resolve/main/split_files/controlnet/Qwen-Image-InstantX-ControlNet-Inpainting.safetensors", "directory": "controlnet" } ] }, "widgets_values": [ "Qwen-Image-InstantX-ControlNet-Inpainting.safetensors" ] }, { "id": 39, "type": "VAELoader", "pos": [ -120, 290 ], "size": [ 380, 58 ], "flags": {}, "order": 4, "mode": 0, "inputs": [], "outputs": [ { "name": "VAE", "type": "VAE", "slot_index": 0, "links": [ 76, 144, 193 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.51", "Node name for S&R": "VAELoader", "models": [ { "name": "qwen_image_vae.safetensors", "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors", "directory": "vae" } ] }, "widgets_values": [ "qwen_image_vae.safetensors" ] }, { "id": 110, "type": "VAEEncode", "pos": [ 324.080078125, 455 ], "size": [ 140, 46 ], "flags": {}, "order": 14, "mode": 0, "inputs": [ { "name": "pixels", "type": "IMAGE", "link": 197 }, { "name": "vae", "type": "VAE", "link": null } ], "outputs": [ { "name": "LATENT", "type": "LATENT", "links": [] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.59", "Node name for S&R": "VAEEncode" }, "widgets_values": [] }, { "id": 66, "type": "ModelSamplingAuraFlow", "pos": [ 822.5421752929688, -38.42329025268555 ], "size": [ 310, 58 ], "flags": {}, "order": 16, "mode": 0, "inputs": [ { "name": "model", "type": "MODEL", "link": 149 } ], "outputs": [ { "name": "MODEL", "type": "MODEL", "links": [ 156 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.51", "Node name for S&R": "ModelSamplingAuraFlow" }, "widgets_values": [ 3.1000000000000005 ] }, { "id": 108, "type": "ControlNetInpaintingAliMamaApply", "pos": [ 391.0537109375, 626.009521484375 ], "size": [ 317.0093688964844, 206 ], "flags": {}, "order": 20, "mode": 0, "inputs": [ { "name": "positive", "type": "CONDITIONING", "link": 190 }, { "name": "negative", "type": "CONDITIONING", "link": 191 }, { "name": "control_net", "type": "CONTROL_NET", "link": 192 }, { "name": "vae", "type": "VAE", "link": 193 }, { "name": "image", "type": "IMAGE", "link": 194 }, { "name": "mask", "type": "MASK", "link": 195 } ], "outputs": [ { "name": "positive", "type": "CONDITIONING", "links": [ 188 ] }, { "name": "negative", "type": "CONDITIONING", "links": [ 189 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.59", "Node name for S&R": "ControlNetInpaintingAliMamaApply" }, "widgets_values": [ 1, 0, 1 ] }, { "id": 6, "type": "CLIPTextEncode", "pos": [ 300, 170 ], "size": [ 460, 164.31304931640625 ], "flags": {}, "order": 11, "mode": 0, "inputs": [ { "name": "clip", "type": "CLIP", "link": 74 } ], "outputs": [ { "name": "CONDITIONING", "type": "CONDITIONING", "slot_index": 0, "links": [ 190 ] } ], "title": "CLIP Text Encode (Positive Prompt)", "properties": { "cnr_id": "comfy-core", "ver": "0.3.51", "Node name for S&R": "CLIPTextEncode" }, "widgets_values": [ "The Queen, on a throne, surrounded by Knights, HD, Realistic, Octane Render, Unreal engine" ], "color": "#232", "bgcolor": "#353" }, { "id": 76, "type": "VAEEncode", "pos": [ 557.8602294921875, 905.5271606445312 ], "size": [ 140, 46 ], "flags": { "collapsed": false }, "order": 19, "mode": 0, "inputs": [ { "name": "pixels", "type": "IMAGE", "link": 143 }, { "name": "vae", "type": "VAE", "link": 144 } ], "outputs": [ { "name": "LATENT", "type": "LATENT", "links": [ 142 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.51", "Node name for S&R": "VAEEncode" }, "widgets_values": [] }, { "id": 60, "type": "SaveImage", "pos": [ 1202.1839599609375, 81.55579376220703 ], "size": [ 970, 1030 ], "flags": {}, "order": 23, "mode": 0, "inputs": [ { "name": "images", "type": "IMAGE", "link": 110 } ], "outputs": [], "properties": { "cnr_id": "comfy-core", "ver": "0.3.51" }, "widgets_values": [ "ComfyUI" ] }, { "id": 78, "type": "MarkdownNote", "pos": [ -690, -50 ], "size": [ 541.36865234375, 579.70263671875 ], "flags": {}, "order": 5, "mode": 0, "inputs": [], "outputs": [], "title": "Model links", "properties": { "widget_ue_connectable": {} }, "widgets_values": [ "[Tutorial](https://docs.comfy.org/tutorials/image/qwen/qwen-image) | [教程](https://docs.comfy.org/zh-CN/tutorials/image/qwen/qwen-image)\n\n\n## Model links\n\nYou can find all the models on [Huggingface](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/tree/main) or [Modelscope](https://modelscope.cn/models/Comfy-Org/Qwen-Image_ComfyUI/files)\n\n**Diffusion model**\n\n- [qwen_image_fp8_e4m3fn.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_fp8_e4m3fn.safetensors)\n\n**ControlNet**\n\n- [Qwen-Image-InstantX-ControlNet-Inpainting.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image-InstantX-ControlNets/resolve/main/split_files/controlnet/Qwen-Image-InstantX-ControlNet-Inpainting.safetensors)\n\n\n**LoRA**\n\n- [Qwen-Image-Lightning-4steps-V1.0.safetensors](https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-4steps-V1.0.safetensors)\n\n**Text encoder**\n\n- [qwen_2.5_vl_7b_fp8_scaled.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors)\n\n**VAE**\n\n- [qwen_image_vae.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors)\n\n\nModel Storage Location\n\n```\nšŸ“‚ ComfyUI/\nā”œā”€ā”€ šŸ“‚ models/\n│ ā”œā”€ā”€ šŸ“‚ diffusion_models/\n│ │ ā”œā”€ā”€ qwen_image_fp8_e4m3fn.safetensors\n│ │ └── qwen_image_distill_full_fp8_e4m3fn.safetensors\n│ ā”œā”€ā”€ šŸ“‚ loras/\n│ │ └── Qwen-Image-Lightning-8steps-V1.0.safetensors\n│ ā”œā”€ā”€ šŸ“‚ controlnet/ \n│ │ └── Qwen-Image-InstantX-ControlNet-Inpainting.safetensors\n│ ā”œā”€ā”€ šŸ“‚ vae/\n│ │ └── qwen_image_vae.safetensors\n│ └── šŸ“‚ text_encoders/\n│ └── qwen_2.5_vl_7b_fp8_scaled.safetensors\n```\n" ], "color": "#432", "bgcolor": "#653" }, { "id": 68, "type": "Note", "pos": [ 826.6304931640625, -197.34292602539062 ], "size": [ 310, 90 ], "flags": {}, "order": 6, "mode": 0, "inputs": [], "outputs": [], "properties": {}, "widgets_values": [ "Increase the shift if you get too many blury/dark/bad images. Decrease if you want to try increasing detail." ], "color": "#432", "bgcolor": "#653" }, { "id": 79, "type": "MarkdownNote", "pos": [ 853.4926147460938, 840.671875 ], "size": [ 310, 140 ], "flags": {}, "order": 7, "mode": 0, "inputs": [], "outputs": [], "title": "KSampler settings", "properties": {}, "widgets_values": [ "You can test and find the best setting by yourself. The following table is for reference.\n\n| model | steps | cfg |\n|---------------------|---------------|---------------|\n| fp8_e4m3fn | 20 | 2.5 |\n| fp8_e4m3fn + 4 Steps lightning LoRA | 4 | 1.0 |\n" ], "color": "#432", "bgcolor": "#653" }, { "id": 80, "type": "LoraLoaderModelOnly", "pos": [ 320, -10 ], "size": [ 430, 82 ], "flags": {}, "order": 13, "mode": 4, "inputs": [ { "name": "model", "type": "MODEL", "link": 145 } ], "outputs": [ { "name": "MODEL", "type": "MODEL", "links": [ 149 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.51", "Node name for S&R": "LoraLoaderModelOnly", "models": [ { "name": "Qwen-Image-Lightning-4steps-V1.0.safetensors", "url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-4steps-V1.0.safetensors", "directory": "loras" } ] }, "widgets_values": [ "Qwen-Image-Lightning-4steps-V1.0.safetensors", 1 ] }, { "id": 3, "type": "KSampler", "pos": [ 847.4144287109375, 80.38726043701172 ], "size": [ 310, 430 ], "flags": {}, "order": 21, "mode": 0, "inputs": [ { "name": "model", "type": "MODEL", "link": 156 }, { "name": "positive", "type": "CONDITIONING", "link": 188 }, { "name": "negative", "type": "CONDITIONING", "link": 189 }, { "name": "latent_image", "type": "LATENT", "link": 142 } ], "outputs": [ { "name": "LATENT", "type": "LATENT", "slot_index": 0, "links": [ 128 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.51", "Node name for S&R": "KSampler" }, "widgets_values": [ 310301619553341, "randomize", 20, 2.5, "euler", "simple", 1 ] }, { "id": 71, "type": "LoadImage", "pos": [ -73.20216369628906, 657.9524536132812 ], "size": [ 274.080078125, 314.00006103515625 ], "flags": {}, "order": 8, "mode": 0, "inputs": [], "outputs": [ { "name": "IMAGE", "type": "IMAGE", "links": [ 197, 200 ] }, { "name": "MASK", "type": "MASK", "links": [ 195 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.51", "Node name for S&R": "LoadImage" }, "widgets_values": [ "clipspace/clipspace-painted-masked-19830639.png [input]", "image" ] }, { "id": 75, "type": "ImageScaleToTotalPixels", "pos": [ 390, 1050 ], "size": [ 270, 82 ], "flags": {}, "order": 17, "mode": 0, "inputs": [ { "name": "image", "type": "IMAGE", "link": 201 } ], "outputs": [ { "name": "IMAGE", "type": "IMAGE", "links": [ 143, 194 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.51", "Node name for S&R": "ImageScaleToTotalPixels" }, "widgets_values": [ "area", 1.68 ] }, { "id": 115, "type": "MarkdownNote", "pos": [ -462.4257507324219, 588.766845703125 ], "size": [ 307.56927490234375, 169.79689025878906 ], "flags": {}, "order": 9, "mode": 0, "inputs": [], "outputs": [], "title": "About how to create mask", "properties": {}, "widgets_values": [ "Right-click on the Load Image node, then click \"Open in MaskEditor\" to open it and paint the area you want to inpaint.\n\nYou can learn more about MaskEditor in the [MaskEditor Document](https://docs.comfy.org/interface/maskeditor)" ], "color": "#432", "bgcolor": "#653" }, { "id": 117, "type": "MarkdownNote", "pos": [ -460, 1040 ], "size": [ 307.56927490234375, 169.79689025878906 ], "flags": {}, "order": 10, "mode": 0, "inputs": [], "outputs": [], "title": "About outpainting", "properties": {}, "widgets_values": [ "For outpainting, you should use the mask from **Pad Image for Outpaintin*g** node " ], "color": "#432", "bgcolor": "#653" }, { "id": 116, "type": "ImagePadForOutpaint", "pos": [ -50, 1100 ], "size": [ 270, 174 ], "flags": {}, "order": 15, "mode": 4, "inputs": [ { "name": "image", "type": "IMAGE", "link": 200 } ], "outputs": [ { "name": "IMAGE", "type": "IMAGE", "links": [ 201, 202 ] }, { "name": "MASK", "type": "MASK", "links": null } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.59", "Node name for S&R": "ImagePadForOutpaint" }, "widgets_values": [ 104, 104, 104, 0, 40 ] }, { "id": 118, "type": "PreviewImage", "pos": [ 400, 1200 ], "size": [ 140, 26 ], "flags": {}, "order": 18, "mode": 4, "inputs": [ { "name": "images", "type": "IMAGE", "link": 202 } ], "outputs": [], "properties": { "cnr_id": "comfy-core", "ver": "0.3.59", "Node name for S&R": "PreviewImage" } } ], "links": [ [ 74, 38, 0, 6, 0, "CLIP" ], [ 75, 38, 0, 7, 0, "CLIP" ], [ 76, 39, 0, 8, 1, "VAE" ], [ 110, 8, 0, 60, 0, "IMAGE" ], [ 128, 3, 0, 8, 0, "LATENT" ], [ 142, 76, 0, 3, 3, "LATENT" ], [ 143, 75, 0, 76, 0, "IMAGE" ], [ 144, 39, 0, 76, 1, "VAE" ], [ 145, 37, 0, 80, 0, "MODEL" ], [ 149, 80, 0, 66, 0, "MODEL" ], [ 156, 66, 0, 3, 0, "MODEL" ], [ 188, 108, 0, 3, 1, "CONDITIONING" ], [ 189, 108, 1, 3, 2, "CONDITIONING" ], [ 190, 6, 0, 108, 0, "CONDITIONING" ], [ 191, 7, 0, 108, 1, "CONDITIONING" ], [ 192, 84, 0, 108, 2, "CONTROL_NET" ], [ 193, 39, 0, 108, 3, "VAE" ], [ 194, 75, 0, 108, 4, "IMAGE" ], [ 195, 71, 1, 108, 5, "MASK" ], [ 197, 71, 0, 110, 0, "IMAGE" ], [ 200, 71, 0, 116, 0, "IMAGE" ], [ 201, 116, 0, 75, 0, "IMAGE" ], [ 202, 116, 0, 118, 0, "IMAGE" ] ], "groups": [ { "id": 1, "title": "Step 1 - Upload models", "bounding": [ -130, -80, 400, 610 ], "color": "#3f789e", "font_size": 24, "flags": {} }, { "id": 2, "title": "Step 2 - Upload image and edit mask", "bounding": [ -130, 550, 420, 440 ], "color": "#3f789e", "font_size": 24, "flags": {} }, { "id": 4, "title": "Step 3 - Prompt", "bounding": [ 290, 100, 490, 430 ], "color": "#3f789e", "font_size": 24, "flags": {} }, { "id": 5, "title": "4 steps lightning LoRA", "bounding": [ 290, -80, 490, 160 ], "color": "#3f789e", "font_size": 24, "flags": {} }, { "id": 6, "title": "Press Ctrl-B to enable it for outpainting", "bounding": [ -130, 1010, 420, 290 ], "color": "#3f789e", "font_size": 24, "flags": {} } ], "config": {}, "extra": { "ds": { "scale": 1.0153812235418525, "offset": [ 659.5129778414238, -568.0214660257494 ] }, "frontendVersion": "1.26.10", "groupNodes": {}, "VHS_latentpreview": false, "VHS_latentpreviewrate": 0, "VHS_MetadataImage": true, "VHS_KeepIntermediate": true }, "version": 0.4 }