219 lines
4.4 KiB
JSON
219 lines
4.4 KiB
JSON
|
{
|
||
|
"3": {
|
||
|
"inputs": {
|
||
|
"seed": 219117161329479,
|
||
|
"steps": 4,
|
||
|
"cfg": 1.5,
|
||
|
"sampler_name": "dpmpp_sde",
|
||
|
"scheduler": "karras",
|
||
|
"denoise": 1,
|
||
|
"model": [
|
||
|
"4",
|
||
|
0
|
||
|
],
|
||
|
"positive": [
|
||
|
"22",
|
||
|
0
|
||
|
],
|
||
|
"negative": [
|
||
|
"22",
|
||
|
1
|
||
|
],
|
||
|
"latent_image": [
|
||
|
"5",
|
||
|
0
|
||
|
]
|
||
|
},
|
||
|
"class_type": "KSampler",
|
||
|
"_meta": {
|
||
|
"title": "KSampler"
|
||
|
}
|
||
|
},
|
||
|
"4": {
|
||
|
"inputs": {
|
||
|
"ckpt_name": "dreamshaperXL_sfwLightningDPMSDE.safetensors"
|
||
|
},
|
||
|
"class_type": "CheckpointLoaderSimple",
|
||
|
"_meta": {
|
||
|
"title": "Load Checkpoint"
|
||
|
}
|
||
|
},
|
||
|
"5": {
|
||
|
"inputs": {
|
||
|
"width": 1024,
|
||
|
"height": 1024,
|
||
|
"batch_size": 1
|
||
|
},
|
||
|
"class_type": "EmptyLatentImage",
|
||
|
"_meta": {
|
||
|
"title": "Empty Latent Image"
|
||
|
}
|
||
|
},
|
||
|
"6": {
|
||
|
"inputs": {
|
||
|
"text": "A fencer in full gear, fencing sword, 1 human, empty background, dark background, dark, empty, 1 sword, sword in hand",
|
||
|
"clip": [
|
||
|
"4",
|
||
|
1
|
||
|
]
|
||
|
},
|
||
|
"class_type": "CLIPTextEncode",
|
||
|
"_meta": {
|
||
|
"title": "CLIP Text Encode (Positive)"
|
||
|
}
|
||
|
},
|
||
|
"8": {
|
||
|
"inputs": {
|
||
|
"samples": [
|
||
|
"3",
|
||
|
0
|
||
|
],
|
||
|
"vae": [
|
||
|
"4",
|
||
|
2
|
||
|
]
|
||
|
},
|
||
|
"class_type": "VAEDecode",
|
||
|
"_meta": {
|
||
|
"title": "VAE Decode"
|
||
|
}
|
||
|
},
|
||
|
"9": {
|
||
|
"inputs": {
|
||
|
"filename_prefix": "Result",
|
||
|
"images": [
|
||
|
"8",
|
||
|
0
|
||
|
]
|
||
|
},
|
||
|
"class_type": "SaveImage",
|
||
|
"_meta": {
|
||
|
"title": "Save Image"
|
||
|
}
|
||
|
},
|
||
|
"17": {
|
||
|
"inputs": {
|
||
|
"image": "e7f6c011776e8db7cd330b54174fd76f7d0216b612387a5ffcfb81e6f0919683.png",
|
||
|
"upload": "image"
|
||
|
},
|
||
|
"class_type": "LoadImage",
|
||
|
"_meta": {
|
||
|
"title": "Load Image"
|
||
|
}
|
||
|
},
|
||
|
"22": {
|
||
|
"inputs": {
|
||
|
"strength": 1,
|
||
|
"start_percent": 0,
|
||
|
"end_percent": 1,
|
||
|
"positive": [
|
||
|
"6",
|
||
|
0
|
||
|
],
|
||
|
"negative": [
|
||
|
"40",
|
||
|
0
|
||
|
],
|
||
|
"control_net": [
|
||
|
"43",
|
||
|
0
|
||
|
],
|
||
|
"image": [
|
||
|
"17",
|
||
|
0
|
||
|
],
|
||
|
"vae": [
|
||
|
"4",
|
||
|
2
|
||
|
]
|
||
|
},
|
||
|
"class_type": "ControlNetApplyAdvanced",
|
||
|
"_meta": {
|
||
|
"title": "Apply ControlNet"
|
||
|
}
|
||
|
},
|
||
|
"28": {
|
||
|
"inputs": {
|
||
|
"ipadapter_file": "ip-adapter-plus_sdxl_vit-h.safetensors"
|
||
|
},
|
||
|
"class_type": "IPAdapterModelLoader",
|
||
|
"_meta": {
|
||
|
"title": "IPAdapter Model Loader"
|
||
|
}
|
||
|
},
|
||
|
"29": {
|
||
|
"inputs": {
|
||
|
"image": "ref_black.png",
|
||
|
"upload": "image"
|
||
|
},
|
||
|
"class_type": "LoadImage",
|
||
|
"_meta": {
|
||
|
"title": "Load Image"
|
||
|
}
|
||
|
},
|
||
|
"31": {
|
||
|
"inputs": {
|
||
|
"clip_name": "CLIP-ViT-H-14-laion2B-s32B-b79K.safetensors"
|
||
|
},
|
||
|
"class_type": "CLIPVisionLoader",
|
||
|
"_meta": {
|
||
|
"title": "Load CLIP Vision"
|
||
|
}
|
||
|
},
|
||
|
"32": {
|
||
|
"inputs": {
|
||
|
"weight": 1,
|
||
|
"weight_type": "style and composition",
|
||
|
"combine_embeds": "norm average",
|
||
|
"start_at": 0,
|
||
|
"end_at": 1,
|
||
|
"embeds_scaling": "K+V w/ C penalty",
|
||
|
"model": [
|
||
|
"4",
|
||
|
0
|
||
|
],
|
||
|
"ipadapter": [
|
||
|
"28",
|
||
|
0
|
||
|
],
|
||
|
"image": [
|
||
|
"29",
|
||
|
0
|
||
|
],
|
||
|
"clip_vision": [
|
||
|
"31",
|
||
|
0
|
||
|
]
|
||
|
},
|
||
|
"class_type": "IPAdapterAdvanced",
|
||
|
"_meta": {
|
||
|
"title": "IPAdapter Advanced"
|
||
|
}
|
||
|
},
|
||
|
"40": {
|
||
|
"inputs": {
|
||
|
"text": "blurry, drawing, horror, distorted, malformed, naked, cartoon, anime, out of focus, dull, muted colors, boring pose, no action, distracting background, colorful, (face:5.0), bad hand, (bad anatomy:5.0), worst quality, ai generated images, low quality, average quality, smoke, background, three arms, three hands, white light, (light:5.0), (shadow:5.0), (floor:5.0), 2 sword, multiple sword\n\nembedding:ac_neg1, embedding:ac_neg2, embedding:badhandv4, embedding:DeepNegative_xl_v1, embedding:NEGATIVE_HANDS, embedding:negativeXL_D, embedding:unaestheticXL_cbp62 -neg, embedding:verybadimagenegative_v1.3, embedding:ziprealism_neg, ",
|
||
|
"clip": [
|
||
|
"4",
|
||
|
1
|
||
|
]
|
||
|
},
|
||
|
"class_type": "CLIPTextEncode",
|
||
|
"_meta": {
|
||
|
"title": "CLIP Text Encode (Negative)"
|
||
|
}
|
||
|
},
|
||
|
"43": {
|
||
|
"inputs": {
|
||
|
"control_net_name": "OpenPoseXL2.safetensors",
|
||
|
"model": [
|
||
|
"4",
|
||
|
0
|
||
|
]
|
||
|
},
|
||
|
"class_type": "DiffControlNetLoader",
|
||
|
"_meta": {
|
||
|
"title": "Load ControlNet Model (diff)"
|
||
|
}
|
||
|
}
|
||
|
}
|