Added more image generators

This commit is contained in:
BordedDev 2025-04-10 09:51:03 +02:00
parent 2822e7700b
commit b6904edf4e
No known key found for this signature in database
GPG Key ID: C5F495EAE56673BF
5 changed files with 1679 additions and 1 deletions

View File

@ -0,0 +1,199 @@
{
"4": {
"inputs": {
"ckpt_name": "stableDiffusion35_large.safetensors"
},
"class_type": "CheckpointLoaderSimple",
"_meta": {
"title": "Load Checkpoint"
}
},
"6": {
"inputs": {
"text": "<%= it.prompt %>",
"clip": [
"11",
0
]
},
"class_type": "CLIPTextEncode",
"_meta": {
"title": "CLIP Text Encode (Prompt)"
}
},
"8": {
"inputs": {
"samples": [
"294",
0
],
"vae": [
"4",
2
]
},
"class_type": "VAEDecode",
"_meta": {
"title": "VAE Decode"
}
},
"11": {
"inputs": {
"clip_name1": "long_clip\\ViT-L-14-TEXT-detail-improved-hiT-GmP-HF.safetensors",
"clip_name2": "clip_g.safetensors",
"clip_name3": "t5\\google_t5-v1_1-xxl_encoderonly-fp8_e4m3fn.safetensors"
},
"class_type": "TripleCLIPLoader",
"_meta": {
"title": "TripleCLIPLoader"
}
},
"13": {
"inputs": {
"shift": 3,
"model": [
"4",
0
]
},
"class_type": "ModelSamplingSD3",
"_meta": {
"title": "ModelSamplingSD3"
}
},
"67": {
"inputs": {
"conditioning": [
"71",
0
]
},
"class_type": "ConditioningZeroOut",
"_meta": {
"title": "ConditioningZeroOut"
}
},
"68": {
"inputs": {
"start": 0.1,
"end": 1,
"conditioning": [
"67",
0
]
},
"class_type": "ConditioningSetTimestepRange",
"_meta": {
"title": "ConditioningSetTimestepRange"
}
},
"69": {
"inputs": {
"conditioning_1": [
"68",
0
],
"conditioning_2": [
"70",
0
]
},
"class_type": "ConditioningCombine",
"_meta": {
"title": "Conditioning (Combine)"
}
},
"70": {
"inputs": {
"start": 0,
"end": 0.1,
"conditioning": [
"71",
0
]
},
"class_type": "ConditioningSetTimestepRange",
"_meta": {
"title": "ConditioningSetTimestepRange"
}
},
"71": {
"inputs": {
"text": "<%= it.negativePrompt ||'' %>",
"clip": [
"11",
0
]
},
"class_type": "CLIPTextEncode",
"_meta": {
"title": "CLIP Text Encode (Prompt)"
}
},
"135": {
"inputs": {
"width": <%= it.width || 1024 %>,
"height": <%= it.height || 1024 %>,
"batch_size": <%= it.batchSize || 1 %>
},
"class_type": "EmptySD3LatentImage",
"_meta": {
"title": "EmptySD3LatentImage"
}
},
"294": {
"inputs": {
"seed": <%= it.seed || it.randomSeed() %>,
"steps": <%= it.steps || 2 %>,
"cfg": <%= it.cfg || 1 %>,
"sampler_name": "<%= it.sampler || "euler" %>",
"scheduler": "beta",
"denoise": 1,
"model": [
"13",
0
],
"positive": [
"6",
0
],
"negative": [
"69",
0
],
"latent_image": [
"135",
0
]
},
"class_type": "KSampler",
"_meta": {
"title": "KSampler"
}
},
"301": {
"inputs": {
"filename_prefix": "ComfyUI",
"images": [
"8",
0
]
},
"class_type": "SaveImage",
"_meta": {
"title": "Save Image"
}
},
"save_image_websocket_node": {
"inputs": {
"images": [
"8",
0
]
},
"class_type": "SaveImageWebsocket",
"_meta": {
"title": "SaveImageWebsocket"
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,223 @@
{
"5": {
"inputs": {
"width": <%= it.width || 1024 %>,
"height": <%= it.height || 1024 %>,
"batch_size": <%= it.batchSize || 1 %>
},
"class_type": "EmptyLatentImage",
"_meta": {
"title": "Empty Latent Image"
}
},
"6": {
"inputs": {
"text": "<%= it.prompt ||'' %>",
"clip": [
"11",
0
]
},
"class_type": "CLIPTextEncode",
"_meta": {
"title": "CLIP Text Encode (Prompt)"
}
},
"8": {
"inputs": {
"samples": [
"13",
0
],
"vae": [
"10",
0
]
},
"class_type": "VAEDecode",
"_meta": {
"title": "VAE Decode"
}
},
"9": {
"inputs": {
"filename_prefix": "UncensoredFLUX",
"images": [
"8",
0
]
},
"class_type": "SaveImage",
"_meta": {
"title": "Save Image"
}
},
"10": {
"inputs": {
"vae_name": "ae.safetensors"
},
"class_type": "VAELoader",
"_meta": {
"title": "Load VAE"
}
},
"11": {
"inputs": {
"clip_name1": "t5xxl_fp16.safetensors",
"clip_name2": "clip_l.safetensors",
"type": "flux",
"device": "default"
},
"class_type": "DualCLIPLoader",
"_meta": {
"title": "DualCLIPLoader"
}
},
"12": {
"inputs": {
"unet_name": "flux_dev.safetensors",
"weight_dtype": "fp8_e4m3fn"
},
"class_type": "UNETLoader",
"_meta": {
"title": "Load Diffusion Model"
}
},
"13": {
"inputs": {
"noise": [
"25",
0
],
"guider": [
"22",
0
],
"sampler": [
"16",
0
],
"sigmas": [
"17",
0
],
"latent_image": [
"5",
0
]
},
"class_type": "SamplerCustomAdvanced",
"_meta": {
"title": "SamplerCustomAdvanced"
}
},
"16": {
"inputs": {
"sampler_name": "<%= it.sampler || "euler" %>"
},
"class_type": "KSamplerSelect",
"_meta": {
"title": "KSamplerSelect"
}
},
"17": {
"inputs": {
"scheduler": "beta",
"steps": <%= it.steps || 30 %>,
"denoise": 1,
"model": [
"61",
0
]
},
"class_type": "BasicScheduler",
"_meta": {
"title": "BasicScheduler"
}
},
"22": {
"inputs": {
"model": [
"61",
0
],
"conditioning": [
"60",
0
]
},
"class_type": "BasicGuider",
"_meta": {
"title": "BasicGuider"
}
},
"25": {
"inputs": {
"noise_seed": <%= it.seed || it.randomSeed() %>
},
"class_type": "RandomNoise",
"_meta": {
"title": "RandomNoise"
}
},
"60": {
"inputs": {
"guidance": <%= it.cfg || 3.5 %>,
"conditioning": [
"6",
0
]
},
"class_type": "FluxGuidance",
"_meta": {
"title": "FluxGuidance"
}
},
"61": {
"inputs": {
"max_shift": 1.15,
"base_shift": 0.5,
"width": <%= it.width || 1024 %>,
"height": <%= it.height || 1024 %>,
"model": [
"63",
0
]
},
"class_type": "ModelSamplingFlux",
"_meta": {
"title": "ModelSamplingFlux"
}
},
"63": {
"inputs": {
"lora_name": "NSFW_master.safetensors",
"strength_model": 0.8,
"strength_clip": 1,
"model": [
"12",
0
],
"clip": [
"11",
0
]
},
"class_type": "LoraLoader",
"_meta": {
"title": "Load LoRA"
}
},
"save_image_websocket_node": {
"inputs": {
"images": [
"8",
0
]
},
"class_type": "SaveImageWebsocket",
"_meta": {
"title": "SaveImageWebsocket"
}
}
}

View File

@ -162,7 +162,7 @@
},
"60": {
"inputs": {
"guidance": 4,
"guidance": <%= it.cfg || 3.5 %>,
"conditioning": [
"6",
0

View File

@ -0,0 +1,223 @@
{
"5": {
"inputs": {
"width": <%= it.width || 1024 %>,
"height": <%= it.height || 1024 %>,
"batch_size": <%= it.batchSize || 1 %>
},
"class_type": "EmptyLatentImage",
"_meta": {
"title": "Empty Latent Image"
}
},
"6": {
"inputs": {
"text": "<%= it.prompt ||'' %>",
"clip": [
"11",
0
]
},
"class_type": "CLIPTextEncode",
"_meta": {
"title": "CLIP Text Encode (Prompt)"
}
},
"8": {
"inputs": {
"samples": [
"13",
0
],
"vae": [
"10",
0
]
},
"class_type": "VAEDecode",
"_meta": {
"title": "VAE Decode"
}
},
"9": {
"inputs": {
"filename_prefix": "UncensoredFLUX",
"images": [
"8",
0
]
},
"class_type": "SaveImage",
"_meta": {
"title": "Save Image"
}
},
"10": {
"inputs": {
"vae_name": "ae.safetensors"
},
"class_type": "VAELoader",
"_meta": {
"title": "Load VAE"
}
},
"11": {
"inputs": {
"clip_name1": "t5xxl_fp16.safetensors",
"clip_name2": "clip_l.safetensors",
"type": "flux",
"device": "default"
},
"class_type": "DualCLIPLoader",
"_meta": {
"title": "DualCLIPLoader"
}
},
"12": {
"inputs": {
"unet_name": "flux_dev.safetensors",
"weight_dtype": "fp8_e4m3fn"
},
"class_type": "UNETLoader",
"_meta": {
"title": "Load Diffusion Model"
}
},
"13": {
"inputs": {
"noise": [
"25",
0
],
"guider": [
"22",
0
],
"sampler": [
"16",
0
],
"sigmas": [
"17",
0
],
"latent_image": [
"5",
0
]
},
"class_type": "SamplerCustomAdvanced",
"_meta": {
"title": "SamplerCustomAdvanced"
}
},
"16": {
"inputs": {
"sampler_name": "<%= it.sampler || "euler" %>"
},
"class_type": "KSamplerSelect",
"_meta": {
"title": "KSamplerSelect"
}
},
"17": {
"inputs": {
"scheduler": "beta",
"steps": <%= it.steps || 30 %>,
"denoise": 1,
"model": [
"61",
0
]
},
"class_type": "BasicScheduler",
"_meta": {
"title": "BasicScheduler"
}
},
"22": {
"inputs": {
"model": [
"61",
0
],
"conditioning": [
"60",
0
]
},
"class_type": "BasicGuider",
"_meta": {
"title": "BasicGuider"
}
},
"25": {
"inputs": {
"noise_seed": <%= it.seed || it.randomSeed() %>
},
"class_type": "RandomNoise",
"_meta": {
"title": "RandomNoise"
}
},
"60": {
"inputs": {
"guidance": <%= it.cfg || 3.5 %>,
"conditioning": [
"6",
0
]
},
"class_type": "FluxGuidance",
"_meta": {
"title": "FluxGuidance"
}
},
"61": {
"inputs": {
"max_shift": 1.15,
"base_shift": 0.5,
"width": <%= it.width || 1024 %>,
"height": <%= it.height || 1024 %>,
"model": [
"63",
0
]
},
"class_type": "ModelSamplingFlux",
"_meta": {
"title": "ModelSamplingFlux"
}
},
"63": {
"inputs": {
"lora_name": "NSFW_master.safetensors",
"strength_model": 0.8,
"strength_clip": 1,
"model": [
"12",
0
],
"clip": [
"11",
0
]
},
"class_type": "LoraLoader",
"_meta": {
"title": "Load LoRA"
}
},
"save_image_websocket_node": {
"inputs": {
"images": [
"8",
0
]
},
"class_type": "SaveImageWebsocket",
"_meta": {
"title": "SaveImageWebsocket"
}
}
}