{ "id": "908d0bfb-e192-4627-9b57-147496e6e2dd", "revision": 0, "last_node_id": 53, "last_link_id": 72, "nodes": [ { "id": 9, "type": "SaveImage", "pos": [ 160.66669539582927, 112.66664357362677 ], "size": [ 560, 646.6666666666667 ], "flags": {}, "order": 2, "mode": 0, "inputs": [ { "name": "images", "type": "IMAGE", "link": 71 } ], "outputs": [], "properties": { "cnr_id": "comfy-core", "ver": "0.3.40" }, "widgets_values": [ "flux_krea" ] }, { "id": 43, "type": "MarkdownNote", "pos": [ -869.3332435690144, 112.66664357362677 ], "size": [ 530, 719.296875 ], "flags": {}, "order": 0, "mode": 0, "inputs": [], "outputs": [], "title": "Model links", "properties": {}, "widgets_values": [ "Guide: [Subgraph](https://docs.comfy.org/interface/features/subgraph)\n\n## Report issue\n\nNote: please update ComfyUI first ([guide](https://docs.comfy.org/zh-CN/installation/update_comfyui)) and prepare required models. Desktop/Cloud ship stable builds; nightly-supported models may not be included yet, please wait for the next stable release.\n\n- Cannot run / runtime errors: [ComfyUI/issues](https://github.com/comfyanonymous/ComfyUI/issues)\n- UI / frontend issues: [ComfyUI_frontend/issues](https://github.com/Comfy-Org/ComfyUI_frontend/issues)\n- Workflow issues: [workflow_templates/issues](https://github.com/Comfy-Org/workflow_templates/issues)\n\n\n## Model links\n\n**text_encoders**\n\n- [clip_l.safetensors](https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors)\n- [t5xxl_fp16.safetensors](https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors)\n\n**diffusion_models**\n\n- [flux1-krea-dev_fp8_scaled.safetensors](https://huggingface.co/Comfy-Org/FLUX.1-Krea-dev_ComfyUI/resolve/main/split_files/diffusion_models/flux1-krea-dev_fp8_scaled.safetensors)\n\n**vae**\n\n- [ae.safetensors](https://huggingface.co/Comfy-Org/Lumina_Image_2.0_Repackaged/resolve/main/split_files/vae/ae.safetensors)\n\n\nModel Storage Location\n\n```\nšŸ“‚ ComfyUI/\nā”œā”€ā”€ šŸ“‚ models/\n│ ā”œā”€ā”€ šŸ“‚ text_encoders/\n│ │ ā”œā”€ā”€ clip_l.safetensors\n│ │ └── t5xxl_fp16.safetensors\n│ ā”œā”€ā”€ šŸ“‚ diffusion_models/\n│ │ └── flux1-krea-dev_fp8_scaled.safetensors\n│ └── šŸ“‚ vae/\n│ └── ae.safetensors\n```\n" ], "color": "#432", "bgcolor": "#000" }, { "id": 53, "type": "fc11e656-d80a-42fa-ae56-c197af368516", "pos": [ -299.3333046041706, 112.66664357362677 ], "size": [ 410, 505 ], "flags": {}, "order": 1, "mode": 0, "inputs": [], "outputs": [ { "name": "IMAGE", "type": "IMAGE", "links": [ 71 ] } ], "properties": { "proxyWidgets": [ [ "-1", "text" ], [ "-1", "width" ], [ "-1", "height" ], [ "31", "seed" ], [ "31", "control_after_generate" ] ] }, "widgets_values": [ "Highly realistic portrait of a Nordic woman with blonde hair and blue eyes, gaze sharp and intellectual. The lighting should reflect the unique coolness of Northern Europe. Outfit is minimalist and modern, background is blurred in cool tones. Needs to perfectly capture the characteristics of a Scandinavian woman. solo, Centered composition\n", 1024, 1024 ] } ], "links": [ [ 71, 53, 0, 9, 0, "IMAGE" ] ], "groups": [], "definitions": { "subgraphs": [ { "id": "fc11e656-d80a-42fa-ae56-c197af368516", "version": 1, "state": { "lastGroupId": 3, "lastNodeId": 51, "lastLinkId": 74, "lastRerouteId": 0 }, "revision": 0, "config": {}, "name": "Flux.1 Krea Dev Text to Image", "inputNode": { "id": -10, "bounding": [ -1050, 426, 120, 100 ] }, "outputNode": { "id": -20, "bounding": [ -50, 420, 120, 60 ] }, "inputs": [ { "id": "c2515318-6e10-4ad9-9466-e6aa855bc849", "name": "text", "type": "STRING", "linkIds": [ 71 ], "pos": [ -950, 446 ] }, { "id": "09f20672-c8a3-4180-823a-5a6af0113e4f", "name": "width", "type": "INT", "linkIds": [ 72 ], "pos": [ -950, 466 ] }, { "id": "7f54c952-896e-4356-bfb2-970e1c8f2eb7", "name": "height", "type": "INT", "linkIds": [ 73 ], "pos": [ -950, 486 ] } ], "outputs": [ { "id": "5310184a-f0a2-405f-9917-dd2a352a4fac", "name": "IMAGE", "type": "IMAGE", "linkIds": [ 9 ], "localized_name": "IMAGE", "pos": [ -30, 440 ] } ], "widgets": [], "nodes": [ { "id": 40, "type": "DualCLIPLoader", "pos": [ -790, 300 ], "size": [ 270, 130 ], "flags": {}, "order": 0, "mode": 0, "inputs": [], "outputs": [ { "localized_name": "CLIP", "name": "CLIP", "type": "CLIP", "links": [ 64 ] } ], "properties": { "Node name for S&R": "DualCLIPLoader", "cnr_id": "comfy-core", "ver": "0.3.40", "models": [ { "name": "clip_l.safetensors", "url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors", "directory": "text_encoders" }, { "name": "t5xxl_fp16.safetensors", "url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors", "directory": "text_encoders" } ] }, "widgets_values": [ "clip_l.safetensors", "t5xxl_fp16.safetensors", "flux", "default" ] }, { "id": 39, "type": "VAELoader", "pos": [ -790, 480 ], "size": [ 270, 58 ], "flags": {}, "order": 1, "mode": 0, "inputs": [], "outputs": [ { "localized_name": "VAE", "name": "VAE", "type": "VAE", "links": [ 58 ] } ], "properties": { "Node name for S&R": "VAELoader", "cnr_id": "comfy-core", "ver": "0.3.40", "models": [ { "name": "ae.safetensors", "url": "https://huggingface.co/Comfy-Org/Lumina_Image_2.0_Repackaged/resolve/main/split_files/vae/ae.safetensors", "directory": "vae" } ] }, "widgets_values": [ "ae.safetensors" ] }, { "id": 42, "type": "ConditioningZeroOut", "pos": [ -480, 470 ], "size": [ 204.134765625, 30 ], "flags": { "collapsed": false }, "order": 6, "mode": 0, "inputs": [ { "localized_name": "conditioning", "name": "conditioning", "type": "CONDITIONING", "link": 66 } ], "outputs": [ { "localized_name": "CONDITIONING", "name": "CONDITIONING", "type": "CONDITIONING", "links": [ 63 ] } ], "properties": { "Node name for S&R": "ConditioningZeroOut", "cnr_id": "comfy-core", "ver": "0.3.40" }, "widgets_values": [] }, { "id": 8, "type": "VAEDecode", "pos": [ -240, 480 ], "size": [ 210, 46 ], "flags": { "collapsed": true }, "order": 3, "mode": 0, "inputs": [ { "localized_name": "samples", "name": "samples", "type": "LATENT", "link": 52 }, { "localized_name": "vae", "name": "vae", "type": "VAE", "link": 58 } ], "outputs": [ { "localized_name": "IMAGE", "name": "IMAGE", "type": "IMAGE", "slot_index": 0, "links": [ 9 ] } ], "properties": { "Node name for S&R": "VAEDecode", "cnr_id": "comfy-core", "ver": "0.3.40" }, "widgets_values": [] }, { "id": 38, "type": "UNETLoader", "pos": [ -790, 160 ], "size": [ 270, 82 ], "flags": {}, "order": 2, "mode": 0, "inputs": [], "outputs": [ { "localized_name": "MODEL", "name": "MODEL", "type": "MODEL", "links": [ 61 ] } ], "properties": { "Node name for S&R": "UNETLoader", "cnr_id": "comfy-core", "ver": "0.3.40", "models": [ { "name": "flux1-krea-dev_fp8_scaled.safetensors", "url": "https://huggingface.co/Comfy-Org/FLUX.1-Krea-dev_ComfyUI/resolve/main/split_files/diffusion_models/flux1-krea-dev_fp8_scaled.safetensors", "directory": "diffusion_models" } ] }, "widgets_values": [ "flux1-krea-dev_fp8_scaled.safetensors", "default" ] }, { "id": 45, "type": "CLIPTextEncode", "pos": [ -460, 180 ], "size": [ 330, 210 ], "flags": {}, "order": 7, "mode": 0, "inputs": [ { "localized_name": "clip", "name": "clip", "type": "CLIP", "link": 64 }, { "localized_name": "text", "name": "text", "type": "STRING", "widget": { "name": "text" }, "link": 71 } ], "outputs": [ { "localized_name": "CONDITIONING", "name": "CONDITIONING", "type": "CONDITIONING", "links": [ 65, 66 ] } ], "properties": { "Node name for S&R": "CLIPTextEncode", "cnr_id": "comfy-core", "ver": "0.3.47" }, "widgets_values": [ "Highly realistic portrait of a Nordic woman with blonde hair and blue eyes, gaze sharp and intellectual. The lighting should reflect the unique coolness of Northern Europe. Outfit is minimalist and modern, background is blurred in cool tones. Needs to perfectly capture the characteristics of a Scandinavian woman. solo, Centered composition\n" ] }, { "id": 27, "type": "EmptySD3LatentImage", "pos": [ -790, 640 ], "size": [ 270, 120 ], "flags": {}, "order": 4, "mode": 0, "inputs": [ { "localized_name": "width", "name": "width", "type": "INT", "widget": { "name": "width" }, "link": 72 }, { "localized_name": "height", "name": "height", "type": "INT", "widget": { "name": "height" }, "link": 73 } ], "outputs": [ { "localized_name": "LATENT", "name": "LATENT", "type": "LATENT", "slot_index": 0, "links": [ 51 ] } ], "properties": { "Node name for S&R": "EmptySD3LatentImage", "cnr_id": "comfy-core", "ver": "0.3.40" }, "widgets_values": [ 1024, 1024, 1 ] }, { "id": 31, "type": "KSampler", "pos": [ -460, 560 ], "size": [ 315, 262 ], "flags": {}, "order": 5, "mode": 0, "inputs": [ { "localized_name": "model", "name": "model", "type": "MODEL", "link": 61 }, { "localized_name": "positive", "name": "positive", "type": "CONDITIONING", "link": 65 }, { "localized_name": "negative", "name": "negative", "type": "CONDITIONING", "link": 63 }, { "localized_name": "latent_image", "name": "latent_image", "type": "LATENT", "link": 51 } ], "outputs": [ { "localized_name": "LATENT", "name": "LATENT", "type": "LATENT", "slot_index": 0, "links": [ 52 ] } ], "properties": { "Node name for S&R": "KSampler", "cnr_id": "comfy-core", "ver": "0.3.40" }, "widgets_values": [ 277251746703202, "randomize", 20, 1, "euler", "simple", 1 ] } ], "groups": [ { "id": 1, "title": "Step 1 - Load Models Here", "bounding": [ -800, 90, 300, 460 ], "color": "#3f789e", "font_size": 24, "flags": {} }, { "id": 2, "title": "Step 2 - Image Size", "bounding": [ -800, 570, 300, 200 ], "color": "#3f789e", "font_size": 24, "flags": {} }, { "id": 3, "title": "Step 3 - Prompt", "bounding": [ -480, 90, 360, 333.6000061035156 ], "color": "#3f789e", "font_size": 24, "flags": {} } ], "links": [ { "id": 66, "origin_id": 45, "origin_slot": 0, "target_id": 42, "target_slot": 0, "type": "CONDITIONING" }, { "id": 52, "origin_id": 31, "origin_slot": 0, "target_id": 8, "target_slot": 0, "type": "LATENT" }, { "id": 58, "origin_id": 39, "origin_slot": 0, "target_id": 8, "target_slot": 1, "type": "VAE" }, { "id": 61, "origin_id": 38, "origin_slot": 0, "target_id": 31, "target_slot": 0, "type": "MODEL" }, { "id": 65, "origin_id": 45, "origin_slot": 0, "target_id": 31, "target_slot": 1, "type": "CONDITIONING" }, { "id": 63, "origin_id": 42, "origin_slot": 0, "target_id": 31, "target_slot": 2, "type": "CONDITIONING" }, { "id": 51, "origin_id": 27, "origin_slot": 0, "target_id": 31, "target_slot": 3, "type": "LATENT" }, { "id": 64, "origin_id": 40, "origin_slot": 0, "target_id": 45, "target_slot": 0, "type": "CLIP" }, { "id": 9, "origin_id": 8, "origin_slot": 0, "target_id": -20, "target_slot": 0, "type": "IMAGE" }, { "id": 71, "origin_id": -10, "origin_slot": 0, "target_id": 45, "target_slot": 1, "type": "STRING" }, { "id": 72, "origin_id": -10, "origin_slot": 1, "target_id": 27, "target_slot": 0, "type": "INT" }, { "id": 73, "origin_id": -10, "origin_slot": 2, "target_id": 27, "target_slot": 1, "type": "INT" } ], "extra": { "workflowRendererVersion": "LG" } } ] }, "config": {}, "extra": { "ds": { "scale": 1.1249414187320026, "offset": [ 862.8436972387228, 288.6851049812555 ] }, "frontendVersion": "1.38.6", "workflowRendererVersion": "LG", "VHS_latentpreview": false, "VHS_latentpreviewrate": 0, "VHS_MetadataImage": true, "VHS_KeepIntermediate": true }, "version": 0.4 }