{ "id": "1805b8e4-0356-4384-adec-44f12a18f32e", "revision": 0, "last_node_id": 94, "last_link_id": 168, "nodes": [ { "id": 52, "type": "LoadImage", "pos": [ -290, -180 ], "size": [ 360, 440 ], "flags": {}, "order": 0, "mode": 0, "inputs": [], "outputs": [ { "name": "IMAGE", "type": "IMAGE", "slot_index": 0, "links": [ 159 ] }, { "name": "MASK", "type": "MASK", "slot_index": 1, "links": null } ], "properties": { "Node name for S&R": "LoadImage", "cnr_id": "comfy-core", "ver": "0.3.67" }, "widgets_values": [ "image_chrono_edit_input_image.png", "image" ] }, { "id": 89, "type": "ImageScaleToMaxDimension", "pos": [ -280, 310 ], "size": [ 330, 90 ], "flags": {}, "order": 4, "mode": 4, "inputs": [ { "name": "image", "type": "IMAGE", "link": 159 } ], "outputs": [ { "name": "IMAGE", "type": "IMAGE", "links": [ 168 ] } ], "properties": { "Node name for S&R": "ImageScaleToMaxDimension", "cnr_id": "comfy-core", "ver": "0.3.67" }, "widgets_values": [ "area", 1280 ] }, { "id": 64, "type": "MarkdownNote", "pos": [ -850, -30 ], "size": [ 490, 540 ], "flags": { "collapsed": true }, "order": 1, "mode": 0, "inputs": [], "outputs": [], "title": "Model links (for local users)", "properties": {}, "widgets_values": [ "Guide: [subgraph](https://docs.comfy.org/interface/features/subgraph)\n## Model links\n\n**text_encoders**\n\n- [umt5_xxl_fp8_e4m3fn_scaled.safetensors](https://huggingface.co/Comfy-Org/Wan_2.1_ComfyUI_repackaged/resolve/main/split_files/text_encoders/umt5_xxl_fp8_e4m3fn_scaled.safetensors)\n\n**clip_vision**\n\n- [clip_vision_h.safetensors](https://huggingface.co/Comfy-Org/Wan_2.1_ComfyUI_repackaged/resolve/main/split_files/clip_vision/clip_vision_h.safetensors)\n\n**loras**\n\n- [chronoedit_distill_lora.safetensors](https://huggingface.co/nvidia/ChronoEdit-14B-Diffusers/resolve/main/lora/chronoedit_distill_lora.safetensors)\n\n**diffusion_models**\n\n- [chrono_edit_14B_fp16.safetensors](https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/chrono_edit_14B_fp16.safetensors)\n\n**vae**\n\n- [wan_2.1_vae.safetensors](https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/vae/wan_2.1_vae.safetensors)\n\n\nModel Storage Location\n\n```\n๐Ÿ“‚ ComfyUI/\nโ”œโ”€โ”€ ๐Ÿ“‚ models/\nโ”‚ โ”œโ”€โ”€ ๐Ÿ“‚ text_encoders/\nโ”‚ โ”‚ โ””โ”€โ”€ umt5_xxl_fp8_e4m3fn_scaled.safetensors\nโ”‚ โ”œโ”€โ”€ ๐Ÿ“‚ clip_vision/\nโ”‚ โ”‚ โ””โ”€โ”€ clip_vision_h.safetensors\nโ”‚ โ”œโ”€โ”€ ๐Ÿ“‚ loras/\nโ”‚ โ”‚ โ””โ”€โ”€ chronoedit_distill_lora.safetensors\nโ”‚ โ”œโ”€โ”€ ๐Ÿ“‚ diffusion_models/\nโ”‚ โ”‚ โ””โ”€โ”€ chrono_edit_14B_fp16.safetensors\nโ”‚ โ””โ”€โ”€ ๐Ÿ“‚ vae/\nโ”‚ โ””โ”€โ”€ wan_2.1_vae.safetensors\n```\n\n## Report issue\n\nIf you have any problems while using this workflow, please report template-related issues via this link: [report the template issue here](https://github.com/Comfy-Org/workflow_templates/issues)." ], "color": "#432", "bgcolor": "#653" }, { "id": 88, "type": "MarkdownNote", "pos": [ -850, -220 ], "size": [ 490, 140 ], "flags": {}, "order": 2, "mode": 0, "inputs": [], "outputs": [], "title": "About ChronoEdit 14B", "properties": {}, "widgets_values": [ "[ChronoEdit-14B](https://huggingface.co/nvidia/ChronoEdit-14B-Diffusers) is finetuned from the pretrain model of Wan2.1-I2V-14B 720P\n\n[ChronoEdit](https://research.nvidia.com/labs/toronto-ai/chronoedit/), a framework developed by teams from NVIDIA and the University of Toronto, reframes image editing as a two-frame video generation task. It leverages the temporal priors of pretrained video generative models and incorporates a temporal reasoning mechanism to achieve editing results with both visual fidelity and physical consistency. Additionally, it comes with the PBench-Edit benchmark for evaluating physical consistency, making it suitable for scenarios like world simulation that require strict adherence to physical laws." ], "color": "#432", "bgcolor": "#653" }, { "id": 91, "type": "MarkdownNote", "pos": [ -290, 490 ], "size": [ 390, 130 ], "flags": { "collapsed": false }, "order": 3, "mode": 0, "inputs": [], "outputs": [], "title": "Note: Image size", "properties": {}, "widgets_values": [ "This model is fine - tuned from Wan2.1 - I2V - 14B 720P (1280x720). So, please don't upload images whose size is too large; that might take up a very large amount of VRAM or lead to bad results.\n\nYou can use `ImageScaleToMaxDimension` to scale it down." ], "color": "#432", "bgcolor": "#653" }, { "id": 94, "type": "66856780-c0db-4a59-98e4-e414dc72c518", "pos": [ 140, -210 ], "size": [ 475.7184042477661, 631.3927853590058 ], "flags": {}, "order": 5, "mode": 0, "inputs": [ { "name": "image", "type": "IMAGE", "link": 168 } ], "outputs": [ { "name": "IMAGE", "type": "IMAGE", "links": [ 167 ] } ], "properties": { "proxyWidgets": [ [ "6", "text" ], [ "50", "width" ], [ "50", "height" ], [ "3", "seed" ], [ "3", "control_after_generate" ], [ "-1", "$$canvas-image-preview" ] ] }, "widgets_values": [] }, { "id": 60, "type": "SaveImage", "pos": [ 646.7723098575518, -209.99088063772658 ], "size": [ 730, 760 ], "flags": {}, "order": 6, "mode": 0, "inputs": [ { "name": "images", "type": "IMAGE", "link": 167 } ], "outputs": [], "properties": { "cnr_id": "comfy-core", "ver": "0.3.67" }, "widgets_values": [ "Chrono_Edit_14B" ] } ], "links": [ [ 159, 52, 0, 89, 0, "IMAGE" ], [ 167, 94, 0, 60, 0, "IMAGE" ], [ 168, 89, 0, 94, 0, "IMAGE" ] ], "groups": [ { "id": 3, "title": "Step 2 - Upload image", "bounding": [ -320, -260, 410, 690 ], "color": "#3f789e", "font_size": 24, "flags": {} } ], "definitions": { "subgraphs": [ { "id": "66856780-c0db-4a59-98e4-e414dc72c518", "version": 1, "state": { "lastGroupId": 5, "lastNodeId": 91, "lastLinkId": 162, "lastRerouteId": 0 }, "revision": 0, "config": {}, "name": "Chrono Image Edit", "inputNode": { "id": -10, "bounding": [ -1030, 275, 120, 60 ] }, "outputNode": { "id": -20, "bounding": [ 1250, 275, 120, 60 ] }, "inputs": [ { "id": "96a9ddeb-fa10-4d86-9486-57a32417e7a0", "name": "image", "type": "IMAGE", "linkIds": [ 161, 160 ], "localized_name": "image", "pos": [ -930, 295 ] } ], "outputs": [ { "id": "97948565-d7a6-4601-b210-66517ce0217b", "name": "IMAGE", "type": "IMAGE", "linkIds": [ 119 ], "localized_name": "IMAGE", "pos": [ 1270, 295 ] } ], "widgets": [], "nodes": [ { "id": 58, "type": "ScaleROPE", "pos": [ 260, -140 ], "size": [ 320, 178 ], "flags": {}, "order": 7, "mode": 0, "inputs": [ { "localized_name": "model", "name": "model", "type": "MODEL", "link": 116 } ], "outputs": [ { "localized_name": "MODEL", "name": "MODEL", "type": "MODEL", "links": [ 117 ] } ], "properties": { "Node name for S&R": "ScaleROPE", "cnr_id": "comfy-core", "ver": "0.3.67" }, "widgets_values": [ 1, 0, 1, 0, 7, 0 ] }, { "id": 39, "type": "VAELoader", "pos": [ -630, 90 ], "size": [ 390, 58 ], "flags": {}, "order": 0, "mode": 0, "inputs": [], "outputs": [ { "localized_name": "VAE", "name": "VAE", "type": "VAE", "slot_index": 0, "links": [ 76, 99 ] } ], "properties": { "Node name for S&R": "VAELoader", "cnr_id": "comfy-core", "ver": "0.3.67", "models": [ { "name": "wan_2.1_vae.safetensors", "url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/vae/wan_2.1_vae.safetensors", "directory": "vae" } ] }, "widgets_values": [ "wan_2.1_vae.safetensors" ] }, { "id": 49, "type": "CLIPVisionLoader", "pos": [ -630, 190 ], "size": [ 390, 58 ], "flags": {}, "order": 1, "mode": 0, "inputs": [], "outputs": [ { "localized_name": "CLIP_VISION", "name": "CLIP_VISION", "type": "CLIP_VISION", "slot_index": 0, "links": [ 94 ] } ], "properties": { "Node name for S&R": "CLIPVisionLoader", "cnr_id": "comfy-core", "ver": "0.3.67", "models": [ { "name": "clip_vision_h.safetensors", "url": "https://huggingface.co/Comfy-Org/Wan_2.1_ComfyUI_repackaged/resolve/main/split_files/clip_vision/clip_vision_h.safetensors", "directory": "clip_vision" } ] }, "widgets_values": [ "clip_vision_h.safetensors" ] }, { "id": 37, "type": "UNETLoader", "pos": [ -630, -200 ], "size": [ 390, 82 ], "flags": {}, "order": 2, "mode": 0, "inputs": [], "outputs": [ { "localized_name": "MODEL", "name": "MODEL", "type": "MODEL", "slot_index": 0, "links": [ 162 ] } ], "properties": { "Node name for S&R": "UNETLoader", "cnr_id": "comfy-core", "ver": "0.3.67", "models": [ { "name": "chrono_edit_14B_fp16.safetensors", "url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/chrono_edit_14B_fp16.safetensors", "directory": "diffusion_models" } ] }, "widgets_values": [ "chrono_edit_14B_fp16.safetensors", "fp8_e4m3fn" ] }, { "id": 8, "type": "VAEDecode", "pos": [ 260, 610 ], "size": [ 320, 46 ], "flags": {}, "order": 9, "mode": 0, "inputs": [ { "localized_name": "samples", "name": "samples", "type": "LATENT", "link": 35 }, { "localized_name": "vae", "name": "vae", "type": "VAE", "link": 76 } ], "outputs": [ { "localized_name": "IMAGE", "name": "IMAGE", "type": "IMAGE", "slot_index": 0, "links": [ 118 ] } ], "properties": { "Node name for S&R": "VAEDecode", "cnr_id": "comfy-core", "ver": "0.3.67" }, "widgets_values": [] }, { "id": 59, "type": "ImageFromBatch", "pos": [ 610, -240 ], "size": [ 270, 82 ], "flags": {}, "order": 12, "mode": 0, "inputs": [ { "localized_name": "image", "name": "image", "type": "IMAGE", "link": 118 } ], "outputs": [ { "localized_name": "IMAGE", "name": "IMAGE", "type": "IMAGE", "links": [ 119 ] } ], "properties": { "Node name for S&R": "ImageFromBatch", "cnr_id": "comfy-core", "ver": "0.3.67" }, "widgets_values": [ 4, 1 ] }, { "id": 51, "type": "CLIPVisionEncode", "pos": [ -160, 590 ], "size": [ 340, 78 ], "flags": {}, "order": 11, "mode": 0, "inputs": [ { "localized_name": "clip_vision", "name": "clip_vision", "type": "CLIP_VISION", "link": 94 }, { "localized_name": "image", "name": "image", "type": "IMAGE", "link": 161 } ], "outputs": [ { "localized_name": "CLIP_VISION_OUTPUT", "name": "CLIP_VISION_OUTPUT", "type": "CLIP_VISION_OUTPUT", "slot_index": 0, "links": [ 120 ] } ], "properties": { "Node name for S&R": "CLIPVisionEncode", "cnr_id": "comfy-core", "ver": "0.3.67" }, "widgets_values": [ "none" ] }, { "id": 50, "type": "WanImageToVideo", "pos": [ -160, 300 ], "size": [ 342.5999755859375, 210 ], "flags": {}, "order": 10, "mode": 0, "inputs": [ { "localized_name": "positive", "name": "positive", "type": "CONDITIONING", "link": 97 }, { "localized_name": "negative", "name": "negative", "type": "CONDITIONING", "link": 98 }, { "localized_name": "vae", "name": "vae", "type": "VAE", "link": 99 }, { "localized_name": "clip_vision_output", "name": "clip_vision_output", "shape": 7, "type": "CLIP_VISION_OUTPUT", "link": 120 }, { "localized_name": "start_image", "name": "start_image", "shape": 7, "type": "IMAGE", "link": 160 } ], "outputs": [ { "localized_name": "positive", "name": "positive", "type": "CONDITIONING", "slot_index": 0, "links": [ 101 ] }, { "localized_name": "negative", "name": "negative", "type": "CONDITIONING", "slot_index": 1, "links": [ 102 ] }, { "localized_name": "latent", "name": "latent", "type": "LATENT", "slot_index": 2, "links": [ 103 ] } ], "properties": { "Node name for S&R": "WanImageToVideo", "cnr_id": "comfy-core", "ver": "0.3.67" }, "widgets_values": [ 720, 720, 5, 1 ] }, { "id": 6, "type": "CLIPTextEncode", "pos": [ -200, -200 ], "size": [ 422.84503173828125, 164.31304931640625 ], "flags": {}, "order": 5, "mode": 0, "inputs": [ { "localized_name": "clip", "name": "clip", "type": "CLIP", "link": 74 } ], "outputs": [ { "localized_name": "CONDITIONING", "name": "CONDITIONING", "type": "CONDITIONING", "slot_index": 0, "links": [ 97 ] } ], "title": "CLIP Text Encode (Positive Prompt)", "properties": { "Node name for S&R": "CLIPTextEncode", "cnr_id": "comfy-core", "ver": "0.3.67" }, "widgets_values": [ "A bottle of facial cleansing foam and bubble shampoo, surrounded by white, round, foamy bubbles. The bubbles are very fluffy, crystal clear, giving a sense of fluffiness and comfort. There are also several bubbles floating in the air around, and the bottle is floating in the air. The background is light pink. The high-resolution picture creates a professional advertising style with high-definition images and high-quality details." ], "color": "#232", "bgcolor": "#353" }, { "id": 7, "type": "CLIPTextEncode", "pos": [ -200, 20 ], "size": [ 425.27801513671875, 180.6060791015625 ], "flags": {}, "order": 6, "mode": 0, "inputs": [ { "localized_name": "clip", "name": "clip", "type": "CLIP", "link": 75 } ], "outputs": [ { "localized_name": "CONDITIONING", "name": "CONDITIONING", "type": "CONDITIONING", "slot_index": 0, "links": [ 98 ] } ], "title": "CLIP Text Encode (Negative Prompt)", "properties": { "Node name for S&R": "CLIPTextEncode", "cnr_id": "comfy-core", "ver": "0.3.67" }, "widgets_values": [ "่‰ฒ่ฐƒ่‰ณไธฝ๏ผŒ่ฟ‡ๆ›๏ผŒ้™ๆ€๏ผŒ็ป†่Š‚ๆจก็ณŠไธๆธ…๏ผŒๅญ—ๅน•๏ผŒ้ฃŽๆ ผ๏ผŒไฝœๅ“๏ผŒ็”ปไฝœ๏ผŒ็”ป้ข๏ผŒ้™ๆญข๏ผŒๆ•ดไฝ“ๅ‘็ฐ๏ผŒๆœ€ๅทฎ่ดจ้‡๏ผŒไฝŽ่ดจ้‡๏ผŒJPEGๅŽ‹็ผฉๆฎ‹็•™๏ผŒไธ‘้™‹็š„๏ผŒๆฎ‹็ผบ็š„๏ผŒๅคšไฝ™็š„ๆ‰‹ๆŒ‡๏ผŒ็”ปๅพ—ไธๅฅฝ็š„ๆ‰‹้ƒจ๏ผŒ็”ปๅพ—ไธๅฅฝ็š„่„ธ้ƒจ๏ผŒ็•ธๅฝข็š„๏ผŒๆฏๅฎน็š„๏ผŒๅฝขๆ€็•ธๅฝข็š„่‚ขไฝ“๏ผŒๆ‰‹ๆŒ‡่žๅˆ๏ผŒ้™ๆญขไธๅŠจ็š„็”ป้ข๏ผŒๆ‚ไนฑ็š„่ƒŒๆ™ฏ๏ผŒไธ‰ๆก่…ฟ๏ผŒ่ƒŒๆ™ฏไบบๅพˆๅคš๏ผŒๅ€’็€่ตฐ" ], "color": "#223", "bgcolor": "#335" }, { "id": 54, "type": "ModelSamplingSD3", "pos": [ 260, -240 ], "size": [ 320, 58 ], "flags": {}, "order": 4, "mode": 0, "inputs": [ { "localized_name": "model", "name": "model", "type": "MODEL", "link": 162 } ], "outputs": [ { "localized_name": "MODEL", "name": "MODEL", "type": "MODEL", "slot_index": 0, "links": [ 116 ] } ], "properties": { "Node name for S&R": "ModelSamplingSD3", "cnr_id": "comfy-core", "ver": "0.3.67" }, "widgets_values": [ 5 ] }, { "id": 3, "type": "KSampler", "pos": [ 260, 80 ], "size": [ 320, 480 ], "flags": {}, "order": 8, "mode": 0, "inputs": [ { "localized_name": "model", "name": "model", "type": "MODEL", "link": 117 }, { "localized_name": "positive", "name": "positive", "type": "CONDITIONING", "link": 101 }, { "localized_name": "negative", "name": "negative", "type": "CONDITIONING", "link": 102 }, { "localized_name": "latent_image", "name": "latent_image", "type": "LATENT", "link": 103 } ], "outputs": [ { "localized_name": "LATENT", "name": "LATENT", "type": "LATENT", "slot_index": 0, "links": [ 35 ] } ], "properties": { "Node name for S&R": "KSampler", "cnr_id": "comfy-core", "ver": "0.3.67" }, "widgets_values": [ 916911225761832, "randomize", 20, 4, "uni_pc", "simple", 1 ] }, { "id": 38, "type": "CLIPLoader", "pos": [ -630, -60 ], "size": [ 390, 106 ], "flags": {}, "order": 3, "mode": 0, "inputs": [], "outputs": [ { "localized_name": "CLIP", "name": "CLIP", "type": "CLIP", "slot_index": 0, "links": [ 74, 75 ] } ], "properties": { "Node name for S&R": "CLIPLoader", "cnr_id": "comfy-core", "ver": "0.3.67", "models": [ { "name": "umt5_xxl_fp8_e4m3fn_scaled.safetensors", "url": "https://huggingface.co/Comfy-Org/Wan_2.1_ComfyUI_repackaged/resolve/main/split_files/text_encoders/umt5_xxl_fp8_e4m3fn_scaled.safetensors", "directory": "text_encoders" } ] }, "widgets_values": [ "umt5_xxl_fp8_e4m3fn_scaled.safetensors", "wan", "default" ] } ], "groups": [ { "id": 1, "title": "Step 2: Prompt", "bounding": [ -210, -270, 445.27801513671875, 484.2060791015625 ], "color": "#3f789e", "font_size": 24, "flags": {} }, { "id": 2, "title": "Step 1 - Load models (for local users)", "bounding": [ -640, -270, 410, 560 ], "color": "#3f789e", "font_size": 24, "flags": {} }, { "id": 4, "title": "Step3: Image size", "bounding": [ -200, 230, 430, 290 ], "color": "#3f789e", "font_size": 24, "flags": {} } ], "links": [ { "id": 116, "origin_id": 54, "origin_slot": 0, "target_id": 58, "target_slot": 0, "type": "MODEL" }, { "id": 35, "origin_id": 3, "origin_slot": 0, "target_id": 8, "target_slot": 0, "type": "LATENT" }, { "id": 76, "origin_id": 39, "origin_slot": 0, "target_id": 8, "target_slot": 1, "type": "VAE" }, { "id": 118, "origin_id": 8, "origin_slot": 0, "target_id": 59, "target_slot": 0, "type": "IMAGE" }, { "id": 94, "origin_id": 49, "origin_slot": 0, "target_id": 51, "target_slot": 0, "type": "CLIP_VISION" }, { "id": 97, "origin_id": 6, "origin_slot": 0, "target_id": 50, "target_slot": 0, "type": "CONDITIONING" }, { "id": 98, "origin_id": 7, "origin_slot": 0, "target_id": 50, "target_slot": 1, "type": "CONDITIONING" }, { "id": 99, "origin_id": 39, "origin_slot": 0, "target_id": 50, "target_slot": 2, "type": "VAE" }, { "id": 120, "origin_id": 51, "origin_slot": 0, "target_id": 50, "target_slot": 3, "type": "CLIP_VISION_OUTPUT" }, { "id": 74, "origin_id": 38, "origin_slot": 0, "target_id": 6, "target_slot": 0, "type": "CLIP" }, { "id": 75, "origin_id": 38, "origin_slot": 0, "target_id": 7, "target_slot": 0, "type": "CLIP" }, { "id": 117, "origin_id": 58, "origin_slot": 0, "target_id": 3, "target_slot": 0, "type": "MODEL" }, { "id": 101, "origin_id": 50, "origin_slot": 0, "target_id": 3, "target_slot": 1, "type": "CONDITIONING" }, { "id": 102, "origin_id": 50, "origin_slot": 1, "target_id": 3, "target_slot": 2, "type": "CONDITIONING" }, { "id": 103, "origin_id": 50, "origin_slot": 2, "target_id": 3, "target_slot": 3, "type": "LATENT" }, { "id": 161, "origin_id": -10, "origin_slot": 0, "target_id": 51, "target_slot": 1, "type": "IMAGE" }, { "id": 160, "origin_id": -10, "origin_slot": 0, "target_id": 50, "target_slot": 4, "type": "IMAGE" }, { "id": 119, "origin_id": 59, "origin_slot": 0, "target_id": -20, "target_slot": 0, "type": "IMAGE" }, { "id": 162, "origin_id": 37, "origin_slot": 0, "target_id": 54, "target_slot": 0, "type": "MODEL" } ], "extra": { "workflowRendererVersion": "LG" } } ] }, "config": {}, "extra": { "ds": { "scale": 0.42834683861501444, "offset": [ 1509.3658522944195, 823.6255964541484 ] }, "frontendVersion": "1.34.6", "VHS_latentpreview": false, "VHS_latentpreviewrate": 0, "VHS_MetadataImage": true, "VHS_KeepIntermediate": true, "workflowRendererVersion": "LG" }, "version": 0.4 }