{ "last_node_id": 59, "last_link_id": 91, "nodes": [ { "id": 10, "type": "VAELoader", "pos": [ 80, 450 ], "size": { "0": 311.81634521484375, "1": 60.429901123046875 }, "flags": {}, "order": 0, "mode": 0, "outputs": [ { "name": "VAE", "type": "VAE", "links": [ 12, 60 ], "shape": 3, "slot_index": 0 } ], "properties": { "Node name for S&R": "VAELoader" }, "widgets_values": [ "ae.sft" ] }, { "id": 11, "type": "DualCLIPLoader", "pos": [ 80, 300 ], "size": { "0": 315, "1": 106 }, "flags": {}, "order": 1, "mode": 0, "outputs": [ { "name": "CLIP", "type": "CLIP", "links": [ 65 ], "shape": 3, "slot_index": 0 } ], "properties": { "Node name for S&R": "DualCLIPLoader" }, "widgets_values": [ "t5xxl_fp8_e4m3fn.safetensors", "clip_l.safetensors", "flux" ] }, { "id": 36, "type": "ModelSamplingFlux", "pos": [ 790, 140 ], "size": { "0": 315, "1": 130 }, "flags": {}, "order": 15, "mode": 0, "inputs": [ { "name": "model", "type": "MODEL", "link": 64 } ], "outputs": [ { "name": "MODEL", "type": "MODEL", "links": [ 54, 55, 58 ], "shape": 3, "slot_index": 0 } ], "properties": { "Node name for S&R": "ModelSamplingFlux" }, "widgets_values": [ 1.1500000000000001, 0.5, 1024, 1024 ], "shape": 1 }, { "id": 16, "type": "KSamplerSelect", "pos": [ 1150, 240 ], "size": { "0": 315, "1": 58 }, "flags": {}, "order": 2, "mode": 0, "outputs": [ { "name": "SAMPLER", "type": "SAMPLER", "links": [ 19 ], "shape": 3, "slot_index": 0 } ], "properties": { "Node name for S&R": "KSamplerSelect" }, "widgets_values": [ "euler" ] }, { "id": 17, "type": "BasicScheduler", "pos": [ 1150, 360 ], "size": { "0": 315, "1": 106 }, "flags": {}, "order": 19, "mode": 0, "inputs": [ { "name": "model", "type": "MODEL", "link": 55, "slot_index": 0 } ], "outputs": [ { "name": "SIGMAS", "type": "SIGMAS", "links": [ 20 ], "shape": 3, "slot_index": 0 } ], "properties": { "Node name for S&R": "BasicScheduler" }, "widgets_values": [ "simple", 26, 1 ] }, { "id": 22, "type": "BasicGuider", "pos": [ 1150, 140 ], "size": { "0": 320, "1": 50 }, "flags": {}, "order": 25, "mode": 0, "inputs": [ { "name": "model", "type": "MODEL", "link": 54, "slot_index": 0 }, { "name": "conditioning", "type": "CONDITIONING", "link": 42, "slot_index": 1 } ], "outputs": [ { "name": "GUIDER", "type": "GUIDER", "links": [ 30 ], "shape": 3, "slot_index": 0 } ], "properties": { "Node name for S&R": "BasicGuider" } }, { "id": 6, "type": "CLIPTextEncode", "pos": [ 460, 320 ], "size": { "0": 630, "1": 240 }, "flags": { "collapsed": true }, "order": 22, "mode": 0, "inputs": [ { "name": "clip", "type": "CLIP", "link": 66 }, { "name": "text", "type": "STRING", "link": 85, "widget": { "name": "text" } } ], "outputs": [ { "name": "CONDITIONING", "type": "CONDITIONING", "links": [ 41, 59 ], "slot_index": 0 } ], "title": "CLIP Text Encode (Positive Prompt)", "properties": { "Node name for S&R": "CLIPTextEncode" }, "widgets_values": [ "Create me an photograph close-up shot of a sensual instagram model at home sitting on her bedroom wearing white top. shot on Canon 1dx Mkii. Natural lighting through the curtains. She has red hair and sitting on the bed. Wearing a white t-shirt with “follow @HARMEETGABHA” printed on it" ], "color": "#232", "bgcolor": "#353" }, { "id": 25, "type": "RandomNoise", "pos": [ 80, 710 ], "size": { "0": 315, "1": 82 }, "flags": {}, "order": 3, "mode": 0, "outputs": [ { "name": "NOISE", "type": "NOISE", "links": [ 37 ], "shape": 3 } ], "properties": { "Node name for S&R": "RandomNoise" }, "widgets_values": [ 3203350277, "fixed" ], "color": "#2a363b", "bgcolor": "#3f5159" }, { "id": 26, "type": "FluxGuidance", "pos": [ 800, 320 ], "size": { "0": 300, "1": 60 }, "flags": {}, "order": 24, "mode": 0, "inputs": [ { "name": "conditioning", "type": "CONDITIONING", "link": 41 } ], "outputs": [ { "name": "CONDITIONING", "type": "CONDITIONING", "links": [ 42 ], "shape": 3, "slot_index": 0 } ], "properties": { "Node name for S&R": "FluxGuidance" }, "widgets_values": [ 10 ], "color": "#233", "bgcolor": "#355" }, { "id": 42, "type": "LoraLoader", "pos": [ 440, 140 ], "size": { "0": 315, "1": 126 }, "flags": {}, "order": 13, "mode": 0, "inputs": [ { "name": "model", "type": "MODEL", "link": 63 }, { "name": "clip", "type": "CLIP", "link": 65 } ], "outputs": [ { "name": "MODEL", "type": "MODEL", "links": [ 64 ], "shape": 3, "slot_index": 0 }, { "name": "CLIP", "type": "CLIP", "links": [ 66, 67 ], "shape": 3, "slot_index": 1 } ], "properties": { "Node name for S&R": "LoraLoader" }, "widgets_values": [ "flux\\flux_realism_lora.safetensors", 0.8, 1 ] }, { "id": 41, "type": "CLIPTextEncode", "pos": [ 450, 740 ], "size": { "0": 640, "1": 90 }, "flags": {}, "order": 16, "mode": 0, "inputs": [ { "name": "clip", "type": "CLIP", "link": 67 } ], "outputs": [ { "name": "CONDITIONING", "type": "CONDITIONING", "links": [ 68 ], "slot_index": 0 } ], "title": "CLIP Text Encode (Positive Prompt)", "properties": { "Node name for S&R": "CLIPTextEncode" }, "widgets_values": [ "(worst quality, low quality, normal quality:2) " ], "color": "#322", "bgcolor": "#533" }, { "id": 55, "type": "GlobalSeed //Inspire", "pos": [ 90, 1010 ], "size": { "0": 315, "1": 130 }, "flags": {}, "order": 4, "mode": 0, "properties": { "Node name for S&R": "GlobalSeed //Inspire" }, "widgets_values": [ 3203350277, true, "randomize", 2811559123 ] }, { "id": 13, "type": "SamplerCustomAdvanced", "pos": [ 1160, 520 ], "size": { "0": 300, "1": 310 }, "flags": {}, "order": 26, "mode": 0, "inputs": [ { "name": "noise", "type": "NOISE", "link": 37, "slot_index": 0 }, { "name": "guider", "type": "GUIDER", "link": 30, "slot_index": 1 }, { "name": "sampler", "type": "SAMPLER", "link": 19, "slot_index": 2 }, { "name": "sigmas", "type": "SIGMAS", "link": 20, "slot_index": 3 }, { "name": "latent_image", "type": "LATENT", "link": 89, "slot_index": 4 } ], "outputs": [ { "name": "output", "type": "LATENT", "links": [ 24 ], "shape": 3, "slot_index": 0 }, { "name": "denoised_output", "type": "LATENT", "links": null, "shape": 3 } ], "properties": { "Node name for S&R": "SamplerCustomAdvanced" } }, { "id": 8, "type": "VAEDecode", "pos": [ 1170, 900 ], "size": { "0": 290, "1": 50 }, "flags": { "collapsed": false }, "order": 27, "mode": 0, "inputs": [ { "name": "samples", "type": "LATENT", "link": 24 }, { "name": "vae", "type": "VAE", "link": 12 } ], "outputs": [ { "name": "IMAGE", "type": "IMAGE", "links": [ 56, 70, 72 ], "slot_index": 0 } ], "properties": { "Node name for S&R": "VAEDecode" } }, { "id": 58, "type": "Note", "pos": [ 1830, 1390 ], "size": { "0": 810, "1": 410 }, "flags": {}, "order": 5, "mode": 0, "properties": { "text": "" }, "widgets_values": [ "Image based prompt for Flux Dev with Upscaler\n\nThe workflow \nUpscaling is done using Ultimate SD Upscaler which is utilising Flux Dev model to upscale (not SD or SDXL). So it leverages the true power of this model and supports the use of a LoRA. You do need to get the select your choise of Upscaler model.\n\n\n\nMore workflows are shared at https://weirdwonderfulai.art - search \"flux\" to find it.\n\n" ], "color": "#432", "bgcolor": "#653" }, { "id": 12, "type": "UNETLoader", "pos": [ 80, 150 ], "size": { "0": 315, "1": 82 }, "flags": {}, "order": 6, "mode": 0, "outputs": [ { "name": "MODEL", "type": "MODEL", "links": [ 63 ], "shape": 3, "slot_index": 0 } ], "properties": { "Node name for S&R": "UNETLoader" }, "widgets_values": [ "flux1-dev.sft", "fp8_e4m3fn" ], "color": "#223", "bgcolor": "#335" }, { "id": 43, "type": "PreviewImage", "pos": [ 1910, 70 ], "size": { "0": 530, "1": 500 }, "flags": {}, "order": 29, "mode": 0, "inputs": [ { "name": "images", "type": "IMAGE", "link": 72 } ], "title": "Initial Image", "properties": { "Node name for S&R": "PreviewImage" } }, { "id": 44, "type": "Image Comparer (rgthree)", "pos": { "0": 1910, "1": 630, "2": 0, "3": 0, "4": 0, "5": 0, "6": 0, "7": 0, "8": 0, "9": 0 }, "size": { "0": 1070, "1": 690 }, "flags": {}, "order": 30, "mode": 0, "inputs": [ { "name": "image_a", "type": "IMAGE", "link": 70, "dir": 3 }, { "name": "image_b", "type": "IMAGE", "link": 71, "dir": 3 } ], "outputs": [], "properties": { "comparer_mode": "Slide" }, "widgets_values": [ [ { "name": "A", "selected": true, "url": "/view?filename=rgthree.compare._temp_lpftg_00047_.png&type=temp&subfolder=&rand=0.40031250342958824" }, { "name": "B", "selected": true, "url": "/view?filename=rgthree.compare._temp_lpftg_00048_.png&type=temp&subfolder=&rand=0.9201738490331748" } ] ] }, { "id": 9, "type": "SaveImage", "pos": [ 2470, 70 ], "size": { "0": 510, "1": 490 }, "flags": {}, "order": 31, "mode": 0, "inputs": [ { "name": "images", "type": "IMAGE", "link": 73 } ], "title": "Upscaled Image (Save)", "properties": { "Node name for S&R": "SaveImage" }, "widgets_values": [ "%date:yyyy-MM-dd%/flux" ] }, { "id": 39, "type": "UltimateSDUpscaleCustomSample", "pos": [ 1500, 140 ], "size": { "0": 350, "1": 1010 }, "flags": {}, "order": 28, "mode": 0, "inputs": [ { "name": "image", "type": "IMAGE", "link": 56 }, { "name": "model", "type": "MODEL", "link": 58, "slot_index": 1 }, { "name": "positive", "type": "CONDITIONING", "link": 59 }, { "name": "negative", "type": "CONDITIONING", "link": 68 }, { "name": "vae", "type": "VAE", "link": 60 }, { "name": "upscale_model", "type": "UPSCALE_MODEL", "link": 57 }, { "name": "custom_sampler", "type": "SAMPLER", "link": null }, { "name": "custom_sigmas", "type": "SIGMAS", "link": null } ], "outputs": [ { "name": "IMAGE", "type": "IMAGE", "links": [ 71, 73 ], "shape": 3, "slot_index": 0 } ], "properties": { "Node name for S&R": "UltimateSDUpscaleCustomSample" }, "widgets_values": [ 2, 3203350277, "fixed", 20, 1, "euler", "simple", 0.3, "Linear", 1024, 1024, 8, 32, "None", 1, 64, 8, 16, true, false ] }, { "id": 32, "type": "Empty Latent Ratio Select SDXL", "pos": [ 80, 570 ], "size": { "0": 319.20001220703125, "1": 82 }, "flags": {}, "order": 7, "mode": 0, "outputs": [ { "name": "LATENT", "type": "LATENT", "links": [ 89 ], "shape": 3, "slot_index": 0 } ], "properties": { "Node name for S&R": "Empty Latent Ratio Select SDXL" }, "widgets_values": [ "2:3 [832x1216 portrait]", 1 ], "color": "#562c62", "bgcolor": "#42184e", "shape": 1 }, { "id": 40, "type": "UpscaleModelLoader", "pos": [ 80, 860 ], "size": { "0": 320, "1": 70 }, "flags": {}, "order": 8, "mode": 0, "outputs": [ { "name": "UPSCALE_MODEL", "type": "UPSCALE_MODEL", "links": [ 57 ], "shape": 3, "slot_index": 0 } ], "properties": { "Node name for S&R": "UpscaleModelLoader" }, "widgets_values": [ "4xNMKDSuperscale_4xNMKDSuperscale.pth" ] }, { "id": 53, "type": "JWStringConcat", "pos": [ 1020, 1310 ], "size": { "0": 280, "1": 54 }, "flags": {}, "order": 20, "mode": 0, "inputs": [ { "name": "a", "type": "STRING", "link": 83, "widget": { "name": "a" } }, { "name": "b", "type": "STRING", "link": 91, "widget": { "name": "b" } } ], "outputs": [ { "name": "STRING", "type": "STRING", "links": [ 84 ], "shape": 3, "slot_index": 0 } ], "properties": { "Node name for S&R": "JWStringConcat" }, "widgets_values": [ "", ", " ] }, { "id": 48, "type": "JWStringConcat", "pos": [ 710, 1500 ], "size": { "0": 280, "1": 54 }, "flags": {}, "order": 18, "mode": 0, "inputs": [ { "name": "a", "type": "STRING", "link": 81, "widget": { "name": "a" } }, { "name": "b", "type": "STRING", "link": 75, "widget": { "name": "b" } } ], "outputs": [ { "name": "STRING", "type": "STRING", "links": [ 83 ], "shape": 3, "slot_index": 0 } ], "properties": { "Node name for S&R": "JWStringConcat" }, "widgets_values": [ "", ", " ] }, { "id": 51, "type": "Switch any [Crystools]", "pos": [ 1340, 1310 ], "size": { "0": 280, "1": 80 }, "flags": {}, "order": 21, "mode": 0, "inputs": [ { "name": "on_true", "type": "*", "link": 84 }, { "name": "on_false", "type": "*", "link": 78 } ], "outputs": [ { "name": "*", "type": "*", "links": [ 85, 90 ], "shape": 3, "slot_index": 0 } ], "title": "Use LLMs for Prompt", "properties": { "Node name for S&R": "Switch any [Crystools]" }, "widgets_values": [ true ] }, { "id": 59, "type": "ShowText|pysssss", "pos": [ 710, 1610 ], "size": { "0": 600, "1": 150 }, "flags": {}, "order": 23, "mode": 0, "inputs": [ { "name": "text", "type": "STRING", "link": 90, "widget": { "name": "text" } } ], "outputs": [ { "name": "STRING", "type": "STRING", "links": null, "shape": 6 } ], "title": "Prompt Preview", "properties": { "Node name for S&R": "ShowText|pysssss" }, "widgets_values": [ "", "The image is a black and white photograph of a man standing in the middle of a foggy landscape. He is wearing a long robe and has a skull-like mask covering his face. The man is holding a staff with a skull on it, which he is holding. The background is filled with fog and there are a few other people walking in the distance. The overall mood of the image is eerie and mysterious.There is a man that is standing in the dark with a cell phone, pictogram, black oled background, ocatane, alterd carbon, libra symbol, glagolitic glyph, teonanacatl glyph, silver iodide, atari logo, ankh symbol, quinacridone magenta, large nike logo, iconic logo symbol" ] }, { "id": 57, "type": "Note", "pos": [ 1350, 1520 ], "size": { "0": 420, "1": 270 }, "flags": {}, "order": 9, "mode": 0, "properties": { "text": "" }, "widgets_values": [ "This allows for Image to Prompt using Florence 2 Model + Clip which interrogates the Image and creates the prompt using Natuarl Language.\n\nYou can combine with Manual Prompt as prefix if you enter something in there for the prompt to include. This could be a \"Trigger\" word of a lora for example. \n\nThis kind of IPADAPTER until we have IPAdapter for Flux DEV.\n\n\nTO ENABLE/DISABLE\n\nSet the Use LLM for PROMPT switch.\nTRUE means the Image is used and prompt is created by combining the image prompt and manual prompt\n\nFALSE means it does not use the prompt generated from the Image but only your Manual Prompt." ], "color": "#432", "bgcolor": "#653" }, { "id": 50, "type": "Primitive string multiline [Crystools]", "pos": [ 450, 900 ], "size": { "0": 650, "1": 250 }, "flags": {}, "order": 10, "mode": 0, "outputs": [ { "name": "string", "type": "STRING", "links": [ 78, 81 ], "shape": 3, "slot_index": 0 } ], "title": "Manual Prompt (positive)", "properties": { "Node name for S&R": "Primitive string multiline [Crystools]" }, "widgets_values": [ "" ], "color": "#232", "bgcolor": "#353" }, { "id": 47, "type": "Florence2Run", "pos": [ 400, 1290 ], "size": { "0": 280, "1": 352 }, "flags": {}, "order": 14, "mode": 0, "inputs": [ { "name": "image", "type": "IMAGE", "link": 86, "slot_index": 0 }, { "name": "florence2_model", "type": "FL2MODEL", "link": 74 } ], "outputs": [ { "name": "image", "type": "IMAGE", "links": [ 76 ], "shape": 3 }, { "name": "mask", "type": "MASK", "links": null, "shape": 3 }, { "name": "caption", "type": "STRING", "links": [ 75 ], "shape": 3, "slot_index": 2 }, { "name": "data", "type": "JSON", "links": null, "shape": 3 } ], "properties": { "Node name for S&R": "Florence2Run" }, "widgets_values": [ "", "more_detailed_caption", true, false, 1024, 3, true, "", 1, "randomize" ] }, { "id": 49, "type": "CLIP_Interrogator", "pos": [ 700, 1290 ], "size": { "0": 300, "1": 150 }, "flags": {}, "order": 17, "mode": 0, "inputs": [ { "name": "image", "type": "IMAGE", "link": 76 } ], "outputs": [ { "name": "full_prompt", "type": "STRING", "links": [ 91 ], "shape": 3, "slot_index": 0 }, { "name": "blip_caption", "type": "STRING", "links": [], "shape": 3, "slot_index": 1 } ], "properties": { "Node name for S&R": "CLIP_Interrogator" }, "widgets_values": [ "fast", false, true, "clip_interrogator_prompt.txt" ] }, { "id": 54, "type": "LoadImage", "pos": [ 50, 1440 ], "size": { "0": 320, "1": 314 }, "flags": {}, "order": 11, "mode": 0, "outputs": [ { "name": "IMAGE", "type": "IMAGE", "links": [ 86 ], "shape": 3 }, { "name": "MASK", "type": "MASK", "links": null, "shape": 3 } ], "properties": { "Node name for S&R": "LoadImage" }, "widgets_values": [ "gaspard4298_elder_elf_necromancer_with_ancient_staff_and_black__563ce69b-6b3d-4f17-a7a5-de3698cb0221.png", "image" ] }, { "id": 46, "type": "DownloadAndLoadFlorence2Model", "pos": [ 60, 1280 ], "size": { "0": 280, "1": 110 }, "flags": {}, "order": 12, "mode": 0, "inputs": [ { "name": "lora", "type": "PEFTLORA", "link": null } ], "outputs": [ { "name": "florence2_model", "type": "FL2MODEL", "links": [ 74 ], "shape": 3, "slot_index": 0 } ], "properties": { "Node name for S&R": "DownloadAndLoadFlorence2Model" }, "widgets_values": [ "microsoft/Florence-2-base", "fp16", "sdpa" ] } ], "links": [ [ 12, 10, 0, 8, 1, "VAE" ], [ 19, 16, 0, 13, 2, "SAMPLER" ], [ 20, 17, 0, 13, 3, "SIGMAS" ], [ 24, 13, 0, 8, 0, "LATENT" ], [ 30, 22, 0, 13, 1, "GUIDER" ], [ 37, 25, 0, 13, 0, "NOISE" ], [ 41, 6, 0, 26, 0, "CONDITIONING" ], [ 42, 26, 0, 22, 1, "CONDITIONING" ], [ 54, 36, 0, 22, 0, "MODEL" ], [ 55, 36, 0, 17, 0, "MODEL" ], [ 56, 8, 0, 39, 0, "IMAGE" ], [ 57, 40, 0, 39, 5, "UPSCALE_MODEL" ], [ 58, 36, 0, 39, 1, "MODEL" ], [ 59, 6, 0, 39, 2, "CONDITIONING" ], [ 60, 10, 0, 39, 4, "VAE" ], [ 63, 12, 0, 42, 0, "MODEL" ], [ 64, 42, 0, 36, 0, "MODEL" ], [ 65, 11, 0, 42, 1, "CLIP" ], [ 66, 42, 1, 6, 0, "CLIP" ], [ 67, 42, 1, 41, 0, "CLIP" ], [ 68, 41, 0, 39, 3, "CONDITIONING" ], [ 70, 8, 0, 44, 0, "IMAGE" ], [ 71, 39, 0, 44, 1, "IMAGE" ], [ 72, 8, 0, 43, 0, "IMAGE" ], [ 73, 39, 0, 9, 0, "IMAGE" ], [ 74, 46, 0, 47, 1, "FL2MODEL" ], [ 75, 47, 2, 48, 1, "STRING" ], [ 76, 47, 0, 49, 0, "IMAGE" ], [ 78, 50, 0, 51, 1, "*" ], [ 81, 50, 0, 48, 0, "STRING" ], [ 83, 48, 0, 53, 0, "STRING" ], [ 84, 53, 0, 51, 0, "*" ], [ 85, 51, 0, 6, 1, "STRING" ], [ 86, 54, 0, 47, 0, "IMAGE" ], [ 89, 32, 0, 13, 4, "LATENT" ], [ 90, 51, 0, 59, 0, "STRING" ], [ 91, 49, 0, 53, 1, "STRING" ] ], "groups": [ { "title": "Image to Prompt", "bounding": [ 20, 1200, 1770, 610 ], "color": "#3f789e", "font_size": 24, "locked": false }, { "title": "Image Generation", "bounding": [ 20, 30, 1860, 1150 ], "color": "#b58b2a", "font_size": 24, "locked": false } ], "config": {}, "extra": { "ds": { "scale": 0.5989500000000003, "offset": [ 533.1139646910282, -174.96213134048514 ] } }, "version": 0.4 }