{ "id": "55e1b6a2-9f38-46c6-8355-50a9f11a0fac", "revision": 0, "last_node_id": 46, "last_link_id": 70, "nodes": [ { "id": 23, "type": "UltralyticsDetectorProvider", "pos": [ 825.6553955078125, 552.8853149414062 ], "size": [ 361.0537109375, 84.46956634521484 ], "flags": {}, "order": 0, "mode": 0, "inputs": [], "outputs": [ { "name": "BBOX_DETECTOR", "type": "BBOX_DETECTOR", "links": [ 30 ] }, { "name": "SEGM_DETECTOR", "type": "SEGM_DETECTOR", "links": null } ], "properties": { "cnr_id": "comfyui-impact-subpack", "ver": "74db20c95eca152a6d686c914edc0ef4e4762cb8", "Node name for S&R": "UltralyticsDetectorProvider", "ue_properties": { "version": "7.1", "widget_ue_connectable": {}, "input_ue_unconnectable": {} } }, "widgets_values": [ "bbox/face_yolov8m.pt" ] }, { "id": 28, "type": "PreviewImage", "pos": [ 1614.811767578125, 377.77825927734375 ], "size": [ 280, 246 ], "flags": {}, "order": 22, "mode": 0, "inputs": [ { "name": "images", "type": "IMAGE", "link": 39 } ], "outputs": [], "properties": { "cnr_id": "comfy-core", "ver": "0.3.39", "Node name for S&R": "PreviewImage", "ue_properties": { "version": "7.1", "widget_ue_connectable": {}, "input_ue_unconnectable": {} } }, "widgets_values": [] }, { "id": 22, "type": "FaceDetailer", "pos": [ 1200.876220703125, -161.42257690429688 ], "size": [ 400, 965 ], "flags": {}, "order": 17, "mode": 0, "inputs": [ { "name": "image", "type": "IMAGE", "link": 31 }, { "name": "model", "type": "MODEL", "link": 24 }, { "name": "clip", "type": "CLIP", "link": 32 }, { "name": "vae", "type": "VAE", "link": 27 }, { "name": "positive", "type": "CONDITIONING", "link": 25 }, { "name": "negative", "type": "CONDITIONING", "link": 26 }, { "name": "bbox_detector", "type": "BBOX_DETECTOR", "link": 30 }, { "name": "sam_model_opt", "shape": 7, "type": "SAM_MODEL", "link": null }, { "name": "segm_detector_opt", "shape": 7, "type": "SEGM_DETECTOR", "link": null }, { "name": "detailer_hook", "shape": 7, "type": "DETAILER_HOOK", "link": null }, { "name": "scheduler_func_opt", "shape": 7, "type": "SCHEDULER_FUNC", "link": null } ], "outputs": [ { "name": "image", "type": "IMAGE", "links": [ 46 ] }, { "name": "cropped_refined", "shape": 6, "type": "IMAGE", "links": [] }, { "name": "cropped_enhanced_alpha", "shape": 6, "type": "IMAGE", "links": [ 35 ] }, { "name": "mask", "type": "MASK", "links": null }, { "name": "detailer_pipe", "type": "DETAILER_PIPE", "links": null }, { "name": "cnet_images", "shape": 6, "type": "IMAGE", "links": null } ], "properties": { "cnr_id": "comfyui-impact-pack", "ver": "c6056b132d7e155c3ece42b77e08ea45bde1bfef", "Node name for S&R": "FaceDetailer", "ue_properties": { "version": "7.1", "widget_ue_connectable": {}, "input_ue_unconnectable": {} } }, "widgets_values": [ 768, true, 1024, 780907627631598, "randomize", 20, 4, "dpmpp_2m_sde", "karras", 0.5, 5, true, true, 0.5, 10, 3, "center-1", 0, 0.93, 0, 0.7, "False", 10, "", 1, false, 20, false, false ] }, { "id": 30, "type": "Note", "pos": [ 822.5669555664062, 681.2274169921875 ], "size": [ 365.9283142089844, 101.13298797607422 ], "flags": { "collapsed": false }, "order": 1, "mode": 0, "inputs": [], "outputs": [], "properties": { "ue_properties": { "version": "7.1", "widget_ue_connectable": {}, "input_ue_unconnectable": {} } }, "widgets_values": [ "Ignore the red cross. We do not need a SEGM_Detector for this step. " ], "color": "#432", "bgcolor": "#653" }, { "id": 9, "type": "VAELoader", "pos": [ -147.1011962890625, -36.25949478149414 ], "size": [ 270.9627380371094, 67.63069915771484 ], "flags": { "collapsed": false }, "order": 2, "mode": 0, "inputs": [], "outputs": [ { "name": "VAE", "type": "VAE", "links": [ 13, 19 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.39", "Node name for S&R": "VAELoader", "ue_properties": { "version": "7.1", "widget_ue_connectable": {}, "input_ue_unconnectable": {} } }, "widgets_values": [ "sdxl_vae.safetensors" ] }, { "id": 6, "type": "EmptyLatentImage", "pos": [ -148.38095092773438, 66.59291076660156 ], "size": [ 270, 106 ], "flags": {}, "order": 3, "mode": 0, "inputs": [], "outputs": [ { "name": "LATENT", "type": "LATENT", "links": [ 18 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.41", "Node name for S&R": "EmptyLatentImage", "ue_properties": { "version": "7.1", "widget_ue_connectable": {}, "input_ue_unconnectable": {} } }, "widgets_values": [ 768, 1152, 1 ] }, { "id": 2, "type": "CLIPSetLastLayer", "pos": [ -156.12841796875, 218.3623046875 ], "size": [ 270, 58 ], "flags": {}, "order": 11, "mode": 0, "inputs": [ { "name": "clip", "type": "CLIP", "link": 1 } ], "outputs": [ { "name": "CLIP", "type": "CLIP", "links": [ 3, 20 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.41", "Node name for S&R": "CLIPSetLastLayer", "ue_properties": { "version": "7.1", "widget_ue_connectable": {}, "input_ue_unconnectable": {} } }, "widgets_values": [ -2 ] }, { "id": 33, "type": "UpscaleModelLoader", "pos": [ 3020.181884765625, -107.78295135498047 ], "size": [ 315, 58 ], "flags": {}, "order": 4, "mode": 0, "inputs": [], "outputs": [ { "label": "UPSCALE_MODEL", "name": "UPSCALE_MODEL", "type": "UPSCALE_MODEL", "slot_index": 0, "links": [ 47 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.48", "Node name for S&R": "UpscaleModelLoader", "ue_properties": { "version": "7.1", "widget_ue_connectable": {}, "input_ue_unconnectable": {} } }, "widgets_values": [ "4x_foolhardy_Remacri.pth" ] }, { "id": 4, "type": "CLIPTextEncode", "pos": [ 420.38519287109375, 69.29096984863281 ], "size": [ 383.0173645019531, 202.68519592285156 ], "flags": {}, "order": 12, "mode": 0, "inputs": [ { "name": "clip", "type": "CLIP", "link": 3 } ], "outputs": [ { "name": "CONDITIONING", "type": "CONDITIONING", "links": [ 17 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.41", "Node name for S&R": "CLIPTextEncode", "ue_properties": { "version": "7.1", "widget_ue_connectable": {}, "input_ue_unconnectable": {} } }, "widgets_values": [ "cartoon, illustration, anime, painting, CGI, 3D render, low quality, watermark, logo, label" ], "color": "#322", "bgcolor": "#533" }, { "id": 26, "type": "UltralyticsDetectorProvider", "pos": [ 1617.21875, -160.6846923828125 ], "size": [ 270, 78 ], "flags": {}, "order": 5, "mode": 0, "inputs": [], "outputs": [ { "name": "BBOX_DETECTOR", "type": "BBOX_DETECTOR", "links": [ 36 ] }, { "name": "SEGM_DETECTOR", "type": "SEGM_DETECTOR", "links": [ 38 ] } ], "properties": { "cnr_id": "comfyui-impact-subpack", "ver": "74db20c95eca152a6d686c914edc0ef4e4762cb8", "Node name for S&R": "UltralyticsDetectorProvider", "ue_properties": { "version": "7.1", "widget_ue_connectable": {}, "input_ue_unconnectable": {} } }, "widgets_values": [ "bbox/PitEyeDetailer-v2-seg.pt" ] }, { "id": 27, "type": "SAMLoader", "pos": [ 1614.7899169921875, -42.3914909362793 ], "size": [ 270, 82 ], "flags": {}, "order": 6, "mode": 0, "inputs": [], "outputs": [ { "name": "SAM_MODEL", "type": "SAM_MODEL", "links": [ 37 ] } ], "properties": { "cnr_id": "comfyui-impact-pack", "ver": "c6056b132d7e155c3ece42b77e08ea45bde1bfef", "Node name for S&R": "SAMLoader", "ue_properties": { "version": "7.1", "widget_ue_connectable": {}, "input_ue_unconnectable": {} } }, "widgets_values": [ "sam_vit_b_01ec64.pth", "AUTO" ] }, { "id": 24, "type": "PreviewImage", "pos": [ 1615.81787109375, 85.68643951416016 ], "size": [ 280, 246 ], "flags": {}, "order": 19, "mode": 0, "inputs": [ { "name": "images", "type": "IMAGE", "link": 35 } ], "outputs": [], "properties": { "cnr_id": "comfy-core", "ver": "0.3.39", "Node name for S&R": "PreviewImage", "ue_properties": { "version": "7.1", "widget_ue_connectable": {}, "input_ue_unconnectable": {} } }, "widgets_values": [] }, { "id": 1, "type": "CheckpointLoaderSimple", "pos": [ -142.7621612548828, -171.2631072998047 ], "size": [ 270, 98 ], "flags": {}, "order": 7, "mode": 0, "inputs": [], "outputs": [ { "name": "MODEL", "type": "MODEL", "links": [ 22 ] }, { "name": "CLIP", "type": "CLIP", "links": [ 1 ] }, { "name": "VAE", "type": "VAE", "links": [] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.41", "Node name for S&R": "CheckpointLoaderSimple", "ue_properties": { "version": "7.1", "widget_ue_connectable": {}, "input_ue_unconnectable": {} } }, "widgets_values": [ "huslyorealismxl_v10.safetensors" ] }, { "id": 34, "type": "ImageUpscaleWithModel", "pos": [ 3010.697021484375, -15.554044723510742 ], "size": [ 340.20001220703125, 46 ], "flags": {}, "order": 20, "mode": 0, "inputs": [ { "label": "upscale_model", "name": "upscale_model", "type": "UPSCALE_MODEL", "link": 47 }, { "label": "image", "name": "image", "type": "IMAGE", "link": 68 } ], "outputs": [ { "label": "IMAGE", "name": "IMAGE", "type": "IMAGE", "slot_index": 0, "links": [ 51 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.48", "Node name for S&R": "ImageUpscaleWithModel", "ue_properties": { "version": "7.1", "widget_ue_connectable": {}, "input_ue_unconnectable": {} } }, "widgets_values": [] }, { "id": 29, "type": "Note", "pos": [ -150.22056579589844, 332.105712890625 ], "size": [ 938.9341430664062, 445.94769287109375 ], "flags": {}, "order": 8, "mode": 0, "inputs": [], "outputs": [], "properties": { "ue_properties": { "version": "7.1", "widget_ue_connectable": {}, "input_ue_unconnectable": {} } }, "widgets_values": [ "Photorealistic ComfyUI Workflow (SDXL) by Huslyo123.\n\nThis is my personal photorealistic workflow for ComfyUI. It works best with my own model HuslyoRealismXL, but also gives great results with Lustify and AnalogMadness.\nMake sure to load a proper SDXL model and use the sdxl_vae.safetensors VAE file for best quality.\n\nThis workflow uses two FaceDetailer nodes:\n\nOne for the face\nOne specifically for the eyes\n\nFor the eye detailer, I use PitEyeDetailer, which you can get here: https://huggingface.co/ashllay/YOLO_Models/blob/main/segm/PitEyeDetailer-v2-seg.pt\n\nYou can also replace it with a standard eye mesh pipeline or any segmentation model that fits your needs.\n\n-Huslyo123\n\nMore images and promtps: https://civitai.com/user/Huslyo123\nSupport me and get all my workflows: https://www.fanvue.com/huslyo123\n\nWant to create consistent characters? Character LoRa's and make money with your Ai models? Check my workflows on Fanvue. More info: https://huslyo123.carrd.co/\n\n" ], "color": "#432", "bgcolor": "#653" }, { "id": 36, "type": "ImageScaleBy", "pos": [ 3375.341796875, -104.06462860107422 ], "size": [ 262.7231140136719, 82 ], "flags": { "collapsed": false }, "order": 23, "mode": 0, "inputs": [ { "name": "image", "type": "IMAGE", "link": 51 } ], "outputs": [ { "name": "IMAGE", "type": "IMAGE", "slot_index": 0, "links": [ 69 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.39", "Node name for S&R": "ImageScaleBy", "ue_properties": { "version": "7.1", "widget_ue_connectable": {}, "input_ue_unconnectable": {} } }, "widgets_values": [ "lanczos", 0.20000000000000004 ] }, { "id": 25, "type": "FaceDetailer", "pos": [ 1908.1767578125, -167.5527801513672 ], "size": [ 433.3315734863281, 982.0206298828125 ], "flags": {}, "order": 18, "mode": 0, "inputs": [ { "name": "image", "type": "IMAGE", "link": 46 }, { "name": "model", "type": "MODEL", "link": 42 }, { "name": "clip", "type": "CLIP", "link": 41 }, { "name": "vae", "type": "VAE", "link": 45 }, { "name": "positive", "type": "CONDITIONING", "link": 43 }, { "name": "negative", "type": "CONDITIONING", "link": 44 }, { "name": "bbox_detector", "type": "BBOX_DETECTOR", "link": 36 }, { "name": "sam_model_opt", "shape": 7, "type": "SAM_MODEL", "link": 37 }, { "name": "segm_detector_opt", "shape": 7, "type": "SEGM_DETECTOR", "link": 38 }, { "name": "detailer_hook", "shape": 7, "type": "DETAILER_HOOK", "link": null }, { "name": "scheduler_func_opt", "shape": 7, "type": "SCHEDULER_FUNC", "link": null } ], "outputs": [ { "name": "image", "type": "IMAGE", "links": [ 68, 70 ] }, { "name": "cropped_refined", "shape": 6, "type": "IMAGE", "links": null }, { "name": "cropped_enhanced_alpha", "shape": 6, "type": "IMAGE", "links": [ 39 ] }, { "name": "mask", "type": "MASK", "links": null }, { "name": "detailer_pipe", "type": "DETAILER_PIPE", "links": null }, { "name": "cnet_images", "shape": 6, "type": "IMAGE", "links": null } ], "properties": { "cnr_id": "comfyui-impact-pack", "ver": "c6056b132d7e155c3ece42b77e08ea45bde1bfef", "Node name for S&R": "FaceDetailer", "ue_properties": { "version": "7.1", "widget_ue_connectable": {}, "input_ue_unconnectable": {} } }, "widgets_values": [ 768, true, 1024, 157675659348079, "randomize", 20, 4, "dpmpp_2m_sde", "karras", 0.5, 5, true, true, 0.5, 10, 3, "center-1", 0, 0.93, 0, 0.7, "False", 10, "", 1, false, 20, false, false ] }, { "id": 8, "type": "SaveImage", "pos": [ 3020.272705078125, 80.53279876708984 ], "size": [ 639.6654052734375, 720.5995483398438 ], "flags": {}, "order": 24, "mode": 0, "inputs": [ { "name": "images", "type": "IMAGE", "link": 69 } ], "outputs": [], "properties": { "cnr_id": "comfy-core", "ver": "0.3.41", "Node name for S&R": "SaveImage", "ue_properties": { "version": "7.1", "widget_ue_connectable": {}, "input_ue_unconnectable": {} } }, "widgets_values": [ "ComfyUI" ], "color": "#2a363b", "bgcolor": "#3f5159" }, { "id": 35, "type": "PreviewImage", "pos": [ 2365.887939453125, 92.90182495117188 ], "size": [ 617.1703491210938, 722.2535400390625 ], "flags": {}, "order": 21, "mode": 0, "inputs": [ { "label": "images", "name": "images", "type": "IMAGE", "link": 70 } ], "outputs": [], "properties": { "cnr_id": "comfy-core", "ver": "0.3.48", "Node name for S&R": "PreviewImage", "ue_properties": { "version": "7.1", "widget_ue_connectable": {}, "input_ue_unconnectable": {} } }, "widgets_values": [] }, { "id": 31, "type": "Note", "pos": [ 1616.347900390625, 675.1455688476562 ], "size": [ 280.67620849609375, 120.90160369873047 ], "flags": { "collapsed": false }, "order": 9, "mode": 0, "inputs": [], "outputs": [], "properties": { "ue_properties": { "version": "7.1", "widget_ue_connectable": {}, "input_ue_unconnectable": {} } }, "widgets_values": [ "If everything is set up correctly and your prompt generates a character with a visible face and eyes, you should see the face in the top preview box and the eyes just below it. This confirms both FaceDetailer nodes are working as intended.\n" ], "color": "#432", "bgcolor": "#653" }, { "id": 37, "type": "Note", "pos": [ 3698.9072265625, -147.1460723876953 ], "size": [ 291.99017333984375, 220.24794006347656 ], "flags": { "collapsed": false }, "order": 10, "mode": 0, "inputs": [], "outputs": [], "properties": { "ue_properties": { "version": "7.1", "widget_ue_connectable": {}, "input_ue_unconnectable": {} } }, "widgets_values": [ "I use 4x_foolhardy_Remacri or Real-ESRGAN-x4plus. These models upscale by 4x.\n0.65 means you are effectively upscaling by 2.6x, 0.5 means 2x upscaling.\nUpscale by 0.65 gives you a 1977×2995 px file, typically 6–8 MB in size.\n\nMy favorite upscale model: https://huggingface.co/FacehugmanIII/4x_foolhardy_Remacri\n\nPlace in: C:\\Users\\your-name\\Stability Matrix\\Data\\Models\\ESRGAN or if you use a standalone version of ComfyUI in the ESRGAN folder. " ], "color": "#432", "bgcolor": "#653" }, { "id": 21, "type": "Lora Loader Stack (rgthree)", "pos": [ 137.227783203125, -170.4483642578125 ], "size": [ 273.17901611328125, 449.059326171875 ], "flags": {}, "order": 13, "mode": 0, "inputs": [ { "name": "model", "type": "MODEL", "link": 22 }, { "name": "clip", "type": "CLIP", "link": 20 } ], "outputs": [ { "name": "MODEL", "type": "MODEL", "links": [ 23 ] }, { "name": "CLIP", "type": "CLIP", "links": [ 32, 41, 59 ] } ], "properties": { "cnr_id": "rgthree-comfy", "ver": "5288408220180af41ce50b0d29135e1ef5f83fdb", "Node name for S&R": "Lora Loader Stack (rgthree)", "ue_properties": { "version": "7.1", "widget_ue_connectable": {}, "input_ue_unconnectable": {} } }, "widgets_values": [ "Touch_of_Realism_SDXL_V2.safetensors", 0.7, "Woman877.v2.safetensors", 0.7000000000000002, "None", 1, "None", 1 ] }, { "id": 7, "type": "VAEDecode", "pos": [ 824.6995239257812, -161.63165283203125 ], "size": [ 356.0274658203125, 46 ], "flags": {}, "order": 16, "mode": 0, "inputs": [ { "name": "samples", "type": "LATENT", "link": 14 }, { "name": "vae", "type": "VAE", "link": 13 } ], "outputs": [ { "name": "IMAGE", "type": "IMAGE", "links": [ 31 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.41", "Node name for S&R": "VAEDecode", "ue_properties": { "version": "7.1", "widget_ue_connectable": {}, "input_ue_unconnectable": {} } }, "widgets_values": [] }, { "id": 10, "type": "KSampler (Efficient)", "pos": [ 821.2642822265625, -69.67591857910156 ], "size": [ 362.8278503417969, 577.7374877929688 ], "flags": {}, "order": 15, "mode": 0, "inputs": [ { "name": "model", "type": "MODEL", "link": 23 }, { "name": "positive", "type": "CONDITIONING", "link": 16 }, { "name": "negative", "type": "CONDITIONING", "link": 17 }, { "name": "latent_image", "type": "LATENT", "link": 18 }, { "name": "optional_vae", "shape": 7, "type": "VAE", "link": 19 }, { "name": "script", "shape": 7, "type": "SCRIPT", "link": null } ], "outputs": [ { "name": "MODEL", "type": "MODEL", "links": [ 24, 42 ] }, { "name": "CONDITIONING+", "type": "CONDITIONING", "links": [ 25, 43 ] }, { "name": "CONDITIONING-", "type": "CONDITIONING", "links": [ 26, 44 ] }, { "name": "LATENT", "type": "LATENT", "links": [ 14 ] }, { "name": "VAE", "type": "VAE", "links": [ 27, 45 ] }, { "name": "IMAGE", "type": "IMAGE", "links": [] } ], "properties": { "cnr_id": "efficiency-nodes-comfyui", "ver": "3ead4afd120833f3bffdefeca0d6545df8051798", "Node name for S&R": "KSampler (Efficient)", "ue_properties": { "version": "7.1", "widget_ue_connectable": {}, "input_ue_unconnectable": {} } }, "widgets_values": [ -1, null, 32, 4, "dpmpp_2m_sde", "karras", 1, "auto", "true" ], "color": "#443322", "bgcolor": "#665533", "shape": 1 }, { "id": 3, "type": "CLIPTextEncode", "pos": [ 422.14215087890625, -170.44021606445312 ], "size": [ 384.8260803222656, 193.8410186767578 ], "flags": {}, "order": 14, "mode": 0, "inputs": [ { "name": "clip", "type": "CLIP", "link": 59 } ], "outputs": [ { "name": "CONDITIONING", "type": "CONDITIONING", "links": [ 16 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.41", "Node name for S&R": "CLIPTextEncode", "ue_properties": { "version": "7.1", "widget_ue_connectable": {}, "input_ue_unconnectable": {} } }, "widgets_values": [ "woman877, a 25-year-old French woman, light beige skin with natural freckles, thick jet blonde hair in a layered cut with soft waves, brown eyes, oval face, petite frame with a delicate waist. She is making a selfie outside in a park, wearing a pink sweater" ], "color": "#232", "bgcolor": "#353" } ], "links": [ [ 1, 1, 1, 2, 0, "CLIP" ], [ 3, 2, 0, 4, 0, "CLIP" ], [ 13, 9, 0, 7, 1, "VAE" ], [ 14, 10, 3, 7, 0, "LATENT" ], [ 16, 3, 0, 10, 1, "CONDITIONING" ], [ 17, 4, 0, 10, 2, "CONDITIONING" ], [ 18, 6, 0, 10, 3, "LATENT" ], [ 19, 9, 0, 10, 4, "VAE" ], [ 20, 2, 0, 21, 1, "CLIP" ], [ 22, 1, 0, 21, 0, "MODEL" ], [ 23, 21, 0, 10, 0, "MODEL" ], [ 24, 10, 0, 22, 1, "MODEL" ], [ 25, 10, 1, 22, 4, "CONDITIONING" ], [ 26, 10, 2, 22, 5, "CONDITIONING" ], [ 27, 10, 4, 22, 3, "VAE" ], [ 30, 23, 0, 22, 6, "BBOX_DETECTOR" ], [ 31, 7, 0, 22, 0, "IMAGE" ], [ 32, 21, 1, 22, 2, "CLIP" ], [ 35, 22, 2, 24, 0, "IMAGE" ], [ 36, 26, 0, 25, 6, "BBOX_DETECTOR" ], [ 37, 27, 0, 25, 7, "SAM_MODEL" ], [ 38, 26, 1, 25, 8, "SEGM_DETECTOR" ], [ 39, 25, 2, 28, 0, "IMAGE" ], [ 41, 21, 1, 25, 2, "CLIP" ], [ 42, 10, 0, 25, 1, "MODEL" ], [ 43, 10, 1, 25, 4, "CONDITIONING" ], [ 44, 10, 2, 25, 5, "CONDITIONING" ], [ 45, 10, 4, 25, 3, "VAE" ], [ 46, 22, 0, 25, 0, "IMAGE" ], [ 47, 33, 0, 34, 0, "UPSCALE_MODEL" ], [ 51, 34, 0, 36, 0, "IMAGE" ], [ 59, 21, 1, 3, 0, "CLIP" ], [ 68, 25, 0, 34, 1, "IMAGE" ], [ 69, 36, 0, 8, 0, "IMAGE" ], [ 70, 25, 0, 35, 0, "IMAGE" ] ], "groups": [ { "id": 1, "title": "Upscale", "bounding": [ 3003.123046875, -182.42898559570312, 676.2379150390625, 1002.0047607421875 ], "color": "#b58b2a", "font_size": 24, "flags": {} } ], "config": {}, "extra": { "ue_links": [], "links_added_by_ue": [], "ds": { "scale": 0.6727499949326127, "offset": [ 4.315815620603919, 760.5101128170007 ] }, "frontendVersion": "1.25.11", "VHS_latentpreview": false, "VHS_latentpreviewrate": 0, "VHS_MetadataImage": true, "VHS_KeepIntermediate": true }, "version": 0.4 }