{ "last_node_id": 47, "last_link_id": 2, "nodes": [ { "id": 18, "type": "UpscaleModelLoader", "pos": [ 2220, 73 ], "size": [ 498.45697021484375, 73.16008758544922 ], "flags": {}, "order": 0, "mode": 0, "inputs": [], "outputs": [ { "name": "UPSCALE_MODEL", "type": "UPSCALE_MODEL", "links": null, "shape": 3 } ], "properties": { "Node name for S&R": "UpscaleModelLoader" }, "widgets_values": [ "001_classicalSR_DF2K_s64w8_SwinIR-M_x2.pth" ], "color": "#323", "bgcolor": "#535" }, { "id": 9, "type": "ControlNetLoader", "pos": [ 20, 990 ], "size": [ 483.23199462890625, 69.4600830078125 ], "flags": {}, "order": 1, "mode": 0, "inputs": [], "outputs": [ { "name": "CONTROL_NET", "type": "CONTROL_NET", "links": null, "shape": 3 } ], "properties": { "Node name for S&R": "ControlNetLoader" }, "widgets_values": [ "IPAdapter\\controlnetxlCNXL_h94IpAdapter.safetensors" ], "color": "#323", "bgcolor": "#535" }, { "id": 1, "type": "CheckpointLoaderSimple", "pos": [ 22, 60 ], "size": [ 504.44586181640625, 106.83826446533203 ], "flags": {}, "order": 2, "mode": 0, "inputs": [], "outputs": [ { "name": "MODEL", "type": "MODEL", "links": null, "shape": 3 }, { "name": "CLIP", "type": "CLIP", "links": null, "shape": 3 }, { "name": "VAE", "type": "VAE", "links": null, "shape": 3 } ], "properties": { "Node name for S&R": "CheckpointLoaderSimple" }, "widgets_values": [ "SD 1.5\\absolutereality_v181.safetensors" ], "color": "#323", "bgcolor": "#535" }, { "id": 5, "type": "LoraLoader", "pos": [ 1114, 65 ], "size": [ 496.5367431640625, 127.54281616210938 ], "flags": {}, "order": 3, "mode": 0, "inputs": [ { "name": "model", "type": "MODEL", "link": null }, { "name": "clip", "type": "CLIP", "link": null } ], "outputs": [ { "name": "MODEL", "type": "MODEL", "links": null, "shape": 3 }, { "name": "CLIP", "type": "CLIP", "links": null, "shape": 3 } ], "properties": { "Node name for S&R": "LoraLoader" }, "widgets_values": [ "HT_Sam_v01.safetensors", 1, 1 ], "color": "#323", "bgcolor": "#535" }, { "id": 3, "type": "VAELoader", "pos": [ 561, 67 ], "size": [ 505.5382385253906, 58 ], "flags": {}, "order": 4, "mode": 0, "inputs": [], "outputs": [ { "name": "VAE", "type": "VAE", "links": null, "shape": 3 } ], "properties": { "Node name for S&R": "VAELoader" }, "widgets_values": [ "vae-ft-mse-840000-ema-pruned.safetensors" ], "color": "#323", "bgcolor": "#535" }, { "id": 8, "type": "Note", "pos": [ 1671, 248 ], "size": [ 499.9104309082031, 272.77294921875 ], "flags": {}, "order": 5, "mode": 0, "inputs": [], "outputs": [], "properties": { "text": "" }, "widgets_values": [ "===================\nmodels/CLIP\n====================\nFor use with certain models, CLIP files go here\n\nhttps://huggingface.co/comfyanonymous/clip_vision_g\n\nhttps://huggingface.co/comfyanonymous/flux_text_encoders/tree/main" ], "color": "#432", "bgcolor": "#653" }, { "id": 29, "type": "Note", "pos": [ 2226, 243 ], "size": [ 485.3605041503906, 269.2041015625 ], "flags": {}, "order": 6, "mode": 0, "inputs": [], "outputs": [], "properties": { "text": "" }, "widgets_values": [ "===================\nmodels/\n====================\nThis could be a range of folders - ESRGAN, RESRGAN, DAT, SwinIR, OmniIR, etc, with the upscaler type in the folder of the same name\n\nhttps://openmodeldb.info/" ], "color": "#432", "bgcolor": "#653" }, { "id": 6, "type": "Note", "pos": [ 1113, 249 ], "size": [ 501.7325439453125, 272.8981628417969 ], "flags": {}, "order": 7, "mode": 0, "inputs": [], "outputs": [], "properties": { "text": "" }, "widgets_values": [ "==================\nmodels/lora\n==================\nAny LoRAs you download or create go into this folder" ], "color": "#432", "bgcolor": "#653" }, { "id": 4, "type": "Note", "pos": [ 557, 252 ], "size": [ 515.8467407226562, 270.8373107910156 ], "flags": {}, "order": 8, "mode": 0, "inputs": [], "outputs": [], "properties": { "text": "" }, "widgets_values": [ "=======================\nmodels/VAE\n=======================\nAny VAE files you have go in this folder, although many normal checkpoints have Baked VAE..\n\nSD 1.5 VAEs\nhttps://civitai.com/models/276082/vae-ft-mse-840000-ema-pruned-or-840000-or-840k-sd15-vae\n\nhttps://civitai.com/models/115217/vae-ft-ema-560000-ema\n\nSDXL VAE\nhttps://civitai.com/models/296576/sdxl-vae?modelVersionId=333245\n" ], "color": "#432", "bgcolor": "#653" }, { "id": 32, "type": "Note", "pos": [ 562, 1253 ], "size": [ 504.2795104980469, 241.4446563720703 ], "flags": {}, "order": 9, "mode": 0, "inputs": [], "outputs": [], "properties": { "text": "" }, "widgets_values": [ "====================\nmodels/Adetailer\n=====================\nThese models should autodownload from node installation, but if not check:\n\nhttps://huggingface.co/Bingsu/adetailer/tree/main" ], "color": "#432", "bgcolor": "#653" }, { "id": 39, "type": "Note", "pos": [ 1693, 1244 ], "size": [ 499.4045104980469, 246.3196563720703 ], "flags": {}, "order": 10, "mode": 0, "inputs": [], "outputs": [], "properties": { "text": "" }, "widgets_values": [ "===========================\nmodels/clip_vision\n===========================\nThese are needed for Comfy UI workflows involving IP Adapter nodes\nClip Vision model downloads:\nhttps://huggingface.co/comfyanonymous/clip_vision_g/tree/main\n\nhttps://huggingface.co/laion/CLIP-ViT-H-14-laion2B-s32B-b79K/blob/main/open_clip_pytorch_model.safetensors\n\nhttps://huggingface.co/laion/CLIP-ViT-bigG-14-laion2B-39B-b160k/blob/main/open_clip_pytorch_model.safetensors\n\nMake sure to rename those models the names you see on the download page, ie CLIP-ViT-H-14-laion2B-s32B-b79K.safetensors for example, as you will have more than one in a folder, models/clip_vision" ], "color": "#432", "bgcolor": "#653" }, { "id": 40, "type": "Note", "pos": [ 2254, 1239 ], "size": [ 499.4045104980469, 246.3196563720703 ], "flags": {}, "order": 11, "mode": 0, "inputs": [], "outputs": [], "properties": { "text": "" }, "widgets_values": [ "=================\nmodels/AnimateDiff\n=================\nFor use with the Animated Diff video nodes\n\nhttps://huggingface.co/guoyww/animatediff/tree/main\n\nNote! Any files that say \"LoRA\" place in models/LoRA!!" ], "color": "#432", "bgcolor": "#653" }, { "id": 16, "type": "UltralyticsDetectorProvider", "pos": [ 560, 990 ], "size": [ 499.76947021484375, 85.8913345336914 ], "flags": {}, "order": 12, "mode": 0, "inputs": [], "outputs": [ { "name": "BBOX_DETECTOR", "type": "BBOX_DETECTOR", "links": null, "slot_index": 0, "shape": 3 }, { "name": "SEGM_DETECTOR", "type": "SEGM_DETECTOR", "links": null, "slot_index": 1, "shape": 3 } ], "properties": { "Node name for S&R": "UltralyticsDetectorProvider" }, "widgets_values": [ "bbox/deepfashion2_yolov8s-seg.pt" ], "color": "#323", "bgcolor": "#535" }, { "id": 17, "type": "SAMLoader", "pos": [ 560, 1120 ], "size": [ 508.79449462890625, 82 ], "flags": {}, "order": 13, "mode": 0, "inputs": [], "outputs": [ { "name": "SAM_MODEL", "type": "SAM_MODEL", "links": null, "slot_index": 0, "shape": 3 } ], "properties": { "Node name for S&R": "SAMLoader" }, "widgets_values": [ "sam_vit_b_01ec64.pth", "AUTO" ], "color": "#323", "bgcolor": "#535" }, { "id": 26, "type": "IPAdapterModelLoader", "pos": [ 1120, 1000 ], "size": [ 503.80072021484375, 58 ], "flags": {}, "order": 14, "mode": 0, "inputs": [], "outputs": [ { "name": "IPADAPTER", "type": "IPADAPTER", "links": null, "shape": 3 } ], "properties": { "Node name for S&R": "IPAdapterModelLoader" }, "widgets_values": [ "ip-adapter-full-face_sd15.bin" ], "color": "#323", "bgcolor": "#535" }, { "id": 27, "type": "CLIPVisionLoader", "pos": [ 1685, 1007 ], "size": [ 501.313232421875, 58 ], "flags": {}, "order": 15, "mode": 0, "inputs": [], "outputs": [ { "name": "CLIP_VISION", "type": "CLIP_VISION", "links": null, "shape": 3 } ], "properties": { "Node name for S&R": "CLIPVisionLoader" }, "widgets_values": [ "CLIP-ViT-H-14-laion2B-s32B-b79K.safetensors" ], "color": "#323", "bgcolor": "#535" }, { "id": 11, "type": "ADE_LoadAnimateDiffModel", "pos": [ 2247, 1010 ], "size": [ 489.3507385253906, 58.75383758544922 ], "flags": {}, "order": 16, "mode": 0, "inputs": [ { "name": "ad_settings", "type": "AD_SETTINGS", "link": null, "shape": 7 } ], "outputs": [ { "name": "MOTION_MODEL", "type": "MOTION_MODEL_ADE", "links": null, "shape": 3 } ], "properties": { "Node name for S&R": "ADE_LoadAnimateDiffModel" }, "widgets_values": [ "control_v11p_sd15_openpose_fp16.safetensors" ], "color": "#323", "bgcolor": "#535" }, { "id": 7, "type": "CLIPLoader", "pos": [ 1665, 66 ], "size": [ 499.8819885253906, 87.72258758544922 ], "flags": {}, "order": 17, "mode": 0, "inputs": [], "outputs": [ { "name": "CLIP", "type": "CLIP", "links": null, "shape": 3 } ], "properties": { "Node name for S&R": "CLIPLoader" }, "widgets_values": [ "clip_g.safetensors", "stable_diffusion" ], "color": "#323", "bgcolor": "#535" }, { "id": 43, "type": "Note", "pos": [ 562.4195556640625, 2115.77783203125 ], "size": [ 499.4045104980469, 246.3196563720703 ], "flags": {}, "order": 18, "mode": 0, "inputs": [], "outputs": [], "properties": { "text": "" }, "widgets_values": [ "=======================\nmodels/unet\n=======================\n\nhttps://huggingface.co/lllyasviel/ic-light/tree/main" ], "color": "#432", "bgcolor": "#653" }, { "id": 2, "type": "Note", "pos": [ 24, 251 ], "size": [ 468.30072021484375, 555.3663330078125 ], "flags": {}, "order": 19, "mode": 0, "inputs": [], "outputs": [], "properties": { "text": "" }, "widgets_values": [ "=======================\nmodels/Stable-diffusion\n=======================\nThis is where the main models you will be using go, also known as checkpoints. Only use .safetensors files for checkpoints.\nSuggested 1.5 models\nAbsolute Reality\nPage: https://civitai.com/models/81458/absolutereality\nModel DL: https://civitai.com/api/download/models/132760?type=Model&format=SafeTensor&size=pruned&fp=fp16\n\nDreamshaper\nPage: https://civitai.com/models/4384?modelVersionId=128713\nModel Download: https://civitai.com/api/download/models/128713?type=Model&format=SafeTensor&size=pruned&fp=fp16\n\n3d Animation Diffusion\nPage: https://civitai.com/models/118086/3d-animation-diffusion\nModel Download:\nhttps://civitai.com/api/download/models/128046?type=Model&format=SafeTensor&size=pruned&fp=fp16\n\nSuggested SDXL models\nRealistic Vision XL V6.0 B1\nPage: https://civitai.com/models/4201?modelVersionId=130072\nModel Download: https://civitai.com/api/download/models/130072?type=Model&format=SafeTensor&size=full&fp=fp16\n\nJuggernaut XL\nPage: https://civitai.com/models/133005?modelVersionId=456194\nModel Download: \nhttps://civitai.com/api/download/models/456194?type=Model&format=SafeTensor&size=full&fp=fp16\n\nDreamshaper XL(read on requirements for CFG and steps)\nPage: https://civitai.com/models/112902?modelVersionId=351306\nModel Download: \nhttps://civitai.com/api/download/models/351306?type=Model&format=SafeTensor&size=full&fp=fp16\n\nCrystal Clear XL\nModel Download:\nhttps://civitai.com/models/122822/crystal-clear-xl" ], "color": "#432", "bgcolor": "#653" }, { "id": 31, "type": "Note", "pos": [ 10, 1251 ], "size": [ 524.0444946289062, 253.9413299560547 ], "flags": {}, "order": 20, "mode": 0, "inputs": [], "outputs": [], "properties": { "text": "" }, "widgets_values": [ "================\nmodels/ControlNet\n================\nThese are the models used for the ControlNet extension in both A1111 and Comfy UI.\nDownload all files (.safetensors and .yaml) from these pages into the above folder (too many to put all the links):\n\nSD 1.5 ControlNet\nDownload page:\nhttps://huggingface.co/lllyasviel/ControlNet-v1-1/tree/main\n\nSDXL ControlNet:\nPage: https://civitai.com/models/136070/controlnetxl-cnxl\nDownload - all bdsqlsz models (download as many as you want, these are the main ones you need below)\n(canny, depth, lineart-anime, mlsdv2, normal, normal--dsine, openpose, recolor, segment, segmentv2, sketch, softedge)" ], "color": "#432", "bgcolor": "#653" }, { "id": 33, "type": "Note", "pos": [ 1130, 1250 ], "size": [ 499.4045104980469, 246.3196563720703 ], "flags": {}, "order": 21, "mode": 0, "inputs": [], "outputs": [], "properties": { "text": "" }, "widgets_values": [ "==================================\nmodels/ipadapter AND models/ControlNet\n==================================\nThese models are needed for IP adapter, and go into two seperate folders, one for Comfy UI (ipadapter) and one for A1111 (ControlNet)\nDownload Page:\nIP Adapter Models\nhttps://huggingface.co/h94/IP-Adapter/tree/main" ], "color": "#432", "bgcolor": "#653" }, { "id": 47, "type": "UNETLoader", "pos": [ 1206.419677734375, 1687.7781982421875 ], "size": [ 307.2070007324219, 82 ], "flags": {}, "order": 22, "mode": 0, "inputs": [], "outputs": [ { "name": "MODEL", "type": "MODEL", "links": [], "slot_index": 0, "shape": 3 } ], "properties": { "Node name for S&R": "UNETLoader" }, "widgets_values": [ "flux1-schnell.safetensors", "default" ], "color": "#323", "bgcolor": "#535" }, { "id": 44, "type": "Note", "pos": [ 1203.419677734375, 2121.77783203125 ], "size": [ 706.2864990234375, 577.294189453125 ], "flags": {}, "order": 23, "mode": 0, "inputs": [], "outputs": [], "properties": { "text": "" }, "widgets_values": [ "=======================\nmodels/diffusion_models\n=======================\nThis is where the main FLUX models you will be using go\n\nLoad Diffuson Model:\nflux1-schnell.safetensors\nModel Page: https://huggingface.co/black-forest-labs/FLUX.1-schnell/tree/main\nDownload Link: https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/flux1-schnell.safetensors?download=true\n\n=======================\nmodels/CLIP\n=======================\nThis is where the main FLUX CLIP models you will be using go\n\nDualCLIPLoader\nt5xxl_fp16.safetensors\nModel Page: https://huggingface.co/comfyanonymous/flux_text_encoders/blob/main/t5xxl_fp16.safetensors\nDownload Link: https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors\n\nclip_l.safetensors\nModel Page: https://huggingface.co/comfyanonymous/flux_text_encoders/blob/main/clip_l.safetensors\nDownload Link: https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors\n\nLoad VAE\nae.safetensors\nModel Page: https://huggingface.co/black-forest-labs/FLUX.1-schnell/blob/main/ae.safetensors\nDownload Link: https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/ae.safetensors\n\nFor new FLUX generations:\nLatent size always should be under 1414 on one side at least, preferably both\nUse 10 steps, denoise at 1.0\nClip_l prompt is comma separated prompt\nt5xxl is narrative sentence prompt\n\nFor FLUX refining pass\nuse 4 steps, denoise 0,25 or lower\ndo not use prompt\n\nFLUX samples with best results: \nEuler Beta\nHuen Normal\nDeis Beta\nIPNDM Beta\nDdim DDIM" ], "color": "#432", "bgcolor": "#653" }, { "id": 38, "type": "Note", "pos": [ 1174.6805419921875, -115.12338256835938 ], "size": [ 668.5661010742188, 97.46566009521484 ], "flags": {}, "order": 24, "mode": 0, "inputs": [], "outputs": [], "properties": { "text": "" }, "widgets_values": [ "This workflow is not meant to be run, as it will produce nothing.\n\nRather, this allows you to double-check your model placement. Remember to hit \"refresh\" to rescan model folders after placing one in the right location, as listed under each model." ], "color": "#322", "bgcolor": "#533" }, { "id": 21, "type": "StableCascade_CheckpointLoader //Inspire", "pos": [ 9.759231567382812, 1743.6806640625 ], "size": [ 461.1070556640625, 295.6864929199219 ], "flags": {}, "order": 25, "mode": 0, "inputs": [], "outputs": [ { "name": "b_model", "type": "MODEL", "links": null, "shape": 3 }, { "name": "b_vae", "type": "VAE", "links": null, "shape": 3 }, { "name": "c_model", "type": "MODEL", "links": null, "shape": 3 }, { "name": "c_vae", "type": "VAE", "links": null, "shape": 3 }, { "name": "c_clip_vision", "type": "CLIP_VISION", "links": null, "shape": 3 }, { "name": "clip", "type": "CLIP", "links": null, "shape": 3 }, { "name": "key_b", "type": "STRING", "links": null, "shape": 3 }, { "name": "key_c", "type": "STRING", "links": null, "shape": 3 } ], "properties": { "Node name for S&R": "StableCascade_CheckpointLoader //Inspire" }, "widgets_values": [ "SD 1.5\\3dAnimationDiffusion_v10.safetensors", "", "SD 1.5\\3dAnimationDiffusion_v10.safetensors", "", "none" ], "color": "#323", "bgcolor": "#535" }, { "id": 41, "type": "Note", "pos": [ -1.1433944702148438, 2113.9345703125 ], "size": [ 468.20513916015625, 250.5357666015625 ], "flags": {}, "order": 26, "mode": 0, "inputs": [], "outputs": [], "properties": { "text": "" }, "widgets_values": [ "=======================\nmodels/Stable-diffusion\n=======================\nStable Cascade and Stable Diffusion 3 models go in the same place as 1.5 and SDXL" ], "color": "#432", "bgcolor": "#653" }, { "id": 42, "type": "LoadAndApplyICLightUnet", "pos": [ 605.2672729492188, 1741.22509765625 ], "size": [ 438.8240966796875, 58 ], "flags": {}, "order": 27, "mode": 0, "inputs": [ { "name": "model", "type": "MODEL", "link": null, "label": "model" } ], "outputs": [ { "name": "MODEL", "type": "MODEL", "links": [], "slot_index": 0, "shape": 3, "label": "MODEL" } ], "properties": { "Node name for S&R": "LoadAndApplyICLightUnet" }, "widgets_values": [ "IC-Light/iclight_sd15_fc.safetensors" ], "color": "#323", "bgcolor": "#535" }, { "id": 46, "type": "DualCLIPLoader", "pos": [ 1544.0474853515625, 1689.77685546875 ], "size": [ 302.6738586425781, 106 ], "flags": {}, "order": 28, "mode": 0, "inputs": [], "outputs": [ { "name": "CLIP", "type": "CLIP", "links": [], "slot_index": 0, "shape": 3 } ], "properties": { "Node name for S&R": "DualCLIPLoader" }, "widgets_values": [ "t5xxl_fp16.safetensors", "clip_l.safetensors", "flux" ], "color": "#323", "bgcolor": "#535" }, { "id": 45, "type": "VAELoader", "pos": [ 1213.83984375, 1827.9207763671875 ], "size": [ 301.9736022949219, 58 ], "flags": {}, "order": 29, "mode": 0, "inputs": [], "outputs": [ { "name": "VAE", "type": "VAE", "links": [], "slot_index": 0, "shape": 3 } ], "properties": { "Node name for S&R": "VAELoader" }, "widgets_values": [ "ae.safetensors" ], "color": "#323", "bgcolor": "#535" } ], "links": [], "groups": [ { "id": 1, "title": "Model Location Workflow", "bounding": [ 906.44921875, -312.4842834472656, 1136.69384765625, 141.55555725097656 ], "color": "#3f789e", "font_size": 100, "flags": {} } ], "config": {}, "extra": { "ds": { "scale": 0.6588450000000011, "offset": [ -67.42587992028226, -1116.5907405860587 ] }, "ue_links": [] }, "version": 0.4 }