{ "3": { "inputs": { "seed": 695262830308132, "steps": 3, "cfg": 2, "sampler_name": "dpmpp_sde", "scheduler": "karras", "denoise": 1, "model": [ "32", 0 ], "positive": [ "22", 0 ], "negative": [ "22", 1 ], "latent_image": [ "5", 0 ] }, "class_type": "KSampler", "_meta": { "title": "KSampler" } }, "4": { "inputs": { "ckpt_name": "dreamshaperXL_sfwLightningDPMSDE.safetensors" }, "class_type": "CheckpointLoaderSimple", "_meta": { "title": "Load Checkpoint" } }, "5": { "inputs": { "width": 1024, "height": 1024, "batch_size": 1 }, "class_type": "EmptyLatentImage", "_meta": { "title": "Empty Latent Image" } }, "6": { "inputs": { "text": "A fencer in full gear, fencing sword, 1 human, empty background, dark background, dark, empty, 1 sword, sword in hand", "clip": [ "4", 1 ] }, "class_type": "CLIPTextEncode", "_meta": { "title": "CLIP Text Encode (Positive)" } }, "8": { "inputs": { "samples": [ "3", 0 ], "vae": [ "4", 2 ] }, "class_type": "VAEDecode", "_meta": { "title": "VAE Decode" } }, "17": { "inputs": { "image": "3bdafb967cede879cabdc2f1277ce5ae8fde8f4a1ff1f0c821fb9b7890bfa252.png", "upload": "image" }, "class_type": "LoadImage", "_meta": { "title": "Load Image" } }, "22": { "inputs": { "strength": 0.98, "start_percent": 0, "end_percent": 1, "positive": [ "6", 0 ], "negative": [ "40", 0 ], "control_net": [ "43", 0 ], "image": [ "17", 0 ], "vae": [ "4", 2 ] }, "class_type": "ControlNetApplyAdvanced", "_meta": { "title": "Apply ControlNet" } }, "28": { "inputs": { "ipadapter_file": "ip-adapter-plus_sdxl_vit-h.safetensors" }, "class_type": "IPAdapterModelLoader", "_meta": { "title": "IPAdapter Model Loader" } }, "29": { "inputs": { "image": "ref_black.png", "upload": "image" }, "class_type": "LoadImage", "_meta": { "title": "Load Image" } }, "31": { "inputs": { "clip_name": "CLIP-ViT-H-14-laion2B-s32B-b79K.safetensors" }, "class_type": "CLIPVisionLoader", "_meta": { "title": "Load CLIP Vision" } }, "32": { "inputs": { "weight": 1.3, "weight_type": "style and composition", "combine_embeds": "norm average", "start_at": 0, "end_at": 1, "embeds_scaling": "K+V w/ C penalty", "model": [ "4", 0 ], "ipadapter": [ "28", 0 ], "image": [ "29", 0 ], "clip_vision": [ "31", 0 ] }, "class_type": "IPAdapterAdvanced", "_meta": { "title": "IPAdapter Advanced" } }, "40": { "inputs": { "text": "blurry, drawing, horror, distorted, malformed, naked, cartoon, anime, out of focus, dull, muted colors, boring pose, no action, distracting background, colorful, (face:5.0), bad hand, (bad anatomy:5.0), worst quality, ai generated images, low quality, average quality, smoke, background, three arms, three hands, white light, (light:5.0), (shadow:5.0), (floor:5.0), 2 sword, multiple sword\n\nembedding:ac_neg1, embedding:ac_neg2, embedding:badhandv4, embedding:DeepNegative_xl_v1, embedding:NEGATIVE_HANDS, embedding:negativeXL_D, embedding:'unaestheticXL_cbp62 -neg.safetensors', embedding:verybadimagenegative_v1.3, embedding:ziprealism_neg, ", "clip": [ "4", 1 ] }, "class_type": "CLIPTextEncode", "_meta": { "title": "CLIP Text Encode (Negative)" } }, "43": { "inputs": { "control_net_name": "diffusion_pytorch_model.safetensors", "model": [ "4", 0 ] }, "class_type": "DiffControlNetLoader", "_meta": { "title": "Load ControlNet Model (diff)" } }, "save_image_websocket_node": { "inputs": { "images": [ "8", 0 ] }, "class_type": "SaveImageWebsocket", "_meta": { "title": "SaveImageWebsocket" } } }