From 6390199cb87376dd40abe1ef7d957877d5c41ac2 Mon Sep 17 00:00:00 2001 From: Karl Date: Tue, 24 Jun 2025 19:57:57 +0100 Subject: [PATCH] new flux wf --- libs/comfyui.py | 2 +- workflow_flux.json | 181 +++++---------- workflow_flux_original.json | 433 ++++++++++++++++++++++++++++++++++++ 3 files changed, 490 insertions(+), 126 deletions(-) create mode 100644 workflow_flux_original.json diff --git a/libs/comfyui.py b/libs/comfyui.py index 4438959..9c05052 100644 --- a/libs/comfyui.py +++ b/libs/comfyui.py @@ -164,7 +164,7 @@ def create_image(prompt: str | None = None, model: str = "Random") -> None: seed_param="seed", save_node="CivitAI Image Saver", save_param="filename", - model_node="Unet Loader (GGUF)", + model_node="UnetLoaderGGUFAdvancedDisTorchMultiGPU", model_param="unet_name", model=model ) diff --git a/workflow_flux.json b/workflow_flux.json index f790441..d1a3a64 100644 --- a/workflow_flux.json +++ b/workflow_flux.json @@ -6,7 +6,7 @@ 1 ], "vae": [ - "27", + "73", 0 ] }, @@ -15,27 +15,6 @@ "title": "VAE Decode" } }, - "22": { - "inputs": { - "clip_name1": "t5/t5xxl_fp8_e4m3fn.safetensors", - "clip_name2": "clip_l.safetensors", - "type": "flux", - "device": "default" - }, - "class_type": "DualCLIPLoader", - "_meta": { - "title": "DualCLIPLoader" - } - }, - "27": { - "inputs": { - "vae_name": "FLUX1/ae.safetensors" - }, - "class_type": "VAELoader", - "_meta": { - "title": "Load VAE" - } - }, "32": { "inputs": { "upscale_model": [ @@ -49,7 +28,7 @@ }, "class_type": "ImageUpscaleWithModel", "_meta": { - "title": "Upscale Image (using Model)" + "title": "Upscale Image" } }, "33": { @@ -61,29 +40,6 @@ "title": "Load Upscale Model" } }, - "34": { - "inputs": { - "upscale_method": "lanczos", - "scale_by": 0.5, - "image": [ - "32", - 0 - ] - }, - "class_type": "ImageScaleBy", - "_meta": { - "title": "Half size" - } - }, - "35": { - "inputs": { - "unet_name": "flux1-dev-Q4_0.gguf" - }, - "class_type": "UnetLoaderGGUF", - "_meta": { - "title": "Unet Loader (GGUF)" - } - }, "40": { "inputs": { "int": 20 @@ -126,10 +82,7 @@ "50", 1 ], - "scheduler": [ - "49", - 1 - ], + "scheduler_name": "normal", "positive": [ "44", 0 @@ -172,7 +125,7 @@ ] }, "images": [ - "34", + "32", 0 ] }, @@ -224,18 +177,18 @@ ] }, "clip": [ - "68", - 1 + "72", + 0 ] }, "class_type": "CLIPTextEncode", "_meta": { - "title": "CLIP Text Encode (Prompt)" + "title": "Prompt Encoder" } }, "48": { "inputs": { - "seed": 903006749445372, + "seed": 293172455045072, "increment": 1 }, "class_type": "Seed Generator (Image Saver)", @@ -249,7 +202,7 @@ }, "class_type": "Scheduler Selector (Comfy) (Image Saver)", "_meta": { - "title": "Scheduler Selector" + "title": "Scheduler" } }, "50": { @@ -258,66 +211,27 @@ }, "class_type": "Sampler Selector (Image Saver)", "_meta": { - "title": "Sampler Selector (Image Saver)" - } - }, - "51": { - "inputs": { - "images": [ - "8", - 0 - ] - }, - "class_type": "PreviewImage", - "_meta": { - "title": "Preview Image" + "title": "Sampler" } }, "52": { "inputs": { - "float": 3.5 + "float": 3.500000000000001 }, "class_type": "Float Literal (Image Saver)", "_meta": { - "title": "CFG" + "title": "CFG Scale" } }, "53": { "inputs": { - "float": 1 + "float": 1.0000000000000002 }, "class_type": "Float Literal (Image Saver)", "_meta": { "title": "Denoise" } }, - "60": { - "inputs": { - "clip_l": "", - "t5xxl": [ - "44", - 0 - ], - "guidance": [ - "52", - 0 - ], - "speak_and_recognation": { - "__value__": [ - false, - true - ] - }, - "clip": [ - "68", - 1 - ] - }, - "class_type": "CLIPTextEncodeFlux", - "_meta": { - "title": "CLIPTextEncodeFlux" - } - }, "62": { "inputs": { "noise": [ @@ -343,7 +257,7 @@ }, "class_type": "SamplerCustomAdvanced", "_meta": { - "title": "SamplerCustomAdvanced" + "title": "Custom Sampler" } }, "63": { @@ -355,7 +269,7 @@ }, "class_type": "KSamplerSelect", "_meta": { - "title": "KSamplerSelect" + "title": "KSampler Select" } }, "64": { @@ -373,13 +287,13 @@ 0 ], "model": [ - "68", + "74", 0 ] }, "class_type": "BasicScheduler", "_meta": { - "title": "BasicScheduler" + "title": "Sigma Generator" } }, "65": { @@ -391,13 +305,13 @@ }, "class_type": "RandomNoise", "_meta": { - "title": "RandomNoise" + "title": "Noise Generator" } }, "67": { "inputs": { "model": [ - "68", + "74", 0 ], "conditioning": [ @@ -407,31 +321,48 @@ }, "class_type": "BasicGuider", "_meta": { - "title": "BasicGuider" + "title": "Prompt Guider" } }, - "68": { + "72": { "inputs": { - "lora_01": "None", - "strength_01": 1, - "lora_02": "None", - "strength_02": 1, - "lora_03": "None", - "strength_03": 1, - "lora_04": "None", - "strength_04": 1, - "model": [ - "35", - 0 - ], - "clip": [ - "22", - 0 - ] + "clip_name1": "t5-v1_1-xxl-encoder-Q4_K_M.gguf", + "clip_name2": "clip_l.safetensors", + "type": "flux", + "device": "cuda:1", + "virtual_vram_gb": 4, + "use_other_vram": false, + "expert_mode_allocations": "" }, - "class_type": "Lora Loader Stack (rgthree)", + "class_type": "DualCLIPLoaderGGUFDisTorchMultiGPU", "_meta": { - "title": "Lora Loader Stack (rgthree)" + "title": "DualCLIPLoaderGGUFDisTorchMultiGPU" + } + }, + "73": { + "inputs": { + "vae_name": "FLUX1/ae.safetensors", + "device": "cuda:1" + }, + "class_type": "VAELoaderMultiGPU", + "_meta": { + "title": "VAELoaderMultiGPU" + } + }, + "74": { + "inputs": { + "unet_name": "flux1-dev-Q4_0.gguf", + "dequant_dtype": "default", + "patch_dtype": "default", + "patch_on_device": false, + "device": "cuda:0", + "virtual_vram_gb": 8, + "use_other_vram": true, + "expert_mode_allocations": "" + }, + "class_type": "UnetLoaderGGUFAdvancedDisTorchMultiGPU", + "_meta": { + "title": "UnetLoaderGGUFAdvancedDisTorchMultiGPU" } } } \ No newline at end of file diff --git a/workflow_flux_original.json b/workflow_flux_original.json new file mode 100644 index 0000000..9f92012 --- /dev/null +++ b/workflow_flux_original.json @@ -0,0 +1,433 @@ +{ + "8": { + "inputs": { + "samples": [ + "62", + 1 + ], + "vae": [ + "27", + 0 + ] + }, + "class_type": "VAEDecode", + "_meta": { + "title": "VAE Decode" + } + }, + "22": { + "inputs": { + "clip_name1": "t5/t5xxl_fp8_e4m3fn.safetensors", + "clip_name2": "clip_l.safetensors", + "type": "flux", + "device": "default" + }, + "class_type": "DualCLIPLoader", + "_meta": { + "title": "DualCLIPLoader" + } + }, + "27": { + "inputs": { + "vae_name": "FLUX1/ae.safetensors" + }, + "class_type": "VAELoader", + "_meta": { + "title": "Load VAE" + } + }, + "32": { + "inputs": { + "upscale_model": [ + "33", + 0 + ], + "image": [ + "8", + 0 + ] + }, + "class_type": "ImageUpscaleWithModel", + "_meta": { + "title": "Upscale Image (using Model)" + } + }, + "33": { + "inputs": { + "model_name": "4x-UltraSharp.pth" + }, + "class_type": "UpscaleModelLoader", + "_meta": { + "title": "Load Upscale Model" + } + }, + "34": { + "inputs": { + "upscale_method": "lanczos", + "scale_by": 0.5, + "image": [ + "32", + 0 + ] + }, + "class_type": "ImageScaleBy", + "_meta": { + "title": "Half size" + } + }, + "35": { + "inputs": { + "unet_name": "flux1-dev-Q4_0.gguf" + }, + "class_type": "UnetLoaderGGUF", + "_meta": { + "title": "Unet Loader (GGUF)" + } + }, + "40": { + "inputs": { + "int": 20 + }, + "class_type": "Int Literal (Image Saver)", + "_meta": { + "title": "Generation Steps" + } + }, + "41": { + "inputs": { + "width": 720, + "height": 1080, + "aspect_ratio": "custom", + "swap_dimensions": "Off", + "upscale_factor": 2, + "prescale_factor": 1, + "batch_size": 1 + }, + "class_type": "CR Aspect Ratio", + "_meta": { + "title": "CR Aspect Ratio" + } + }, + "42": { + "inputs": { + "filename": "THISFILE", + "path": "", + "extension": "png", + "steps": [ + "40", + 0 + ], + "cfg": [ + "52", + 0 + ], + "modelname": "flux1-dev-Q4_0.gguf", + "sampler_name": [ + "50", + 1 + ], + "positive": [ + "44", + 0 + ], + "negative": [ + "45", + 0 + ], + "seed_value": [ + "48", + 0 + ], + "width": [ + "41", + 0 + ], + "height": [ + "41", + 1 + ], + "lossless_webp": true, + "quality_jpeg_or_webp": 100, + "optimize_png": false, + "counter": 0, + "denoise": [ + "53", + 0 + ], + "clip_skip": 0, + "time_format": "%Y-%m-%d-%H%M%S", + "save_workflow_as_json": true, + "embed_workflow": true, + "additional_hashes": "", + "download_civitai_data": true, + "easy_remix": true, + "speak_and_recognation": { + "__value__": [ + false, + true + ] + }, + "images": [ + "34", + 0 + ] + }, + "class_type": "Image Saver", + "_meta": { + "title": "CivitAI Image Saver" + } + }, + "44": { + "inputs": { + "text": "", + "speak_and_recognation": { + "__value__": [ + false, + true + ] + } + }, + "class_type": "ttN text", + "_meta": { + "title": "Positive Prompt T5" + } + }, + "45": { + "inputs": { + "text": "text, watermark, deformed Avoid flat colors, poor lighting, and artificial elements. No unrealistic elements, low resolution, or flat colors. Avoid generic objects, poor lighting, and inconsistent styles, blurry, low-quality, distorted faces, overexposed lighting, extra limbs, bad anatomy, low contrast", + "speak_and_recognation": { + "__value__": [ + false, + true + ] + } + }, + "class_type": "ttN text", + "_meta": { + "title": "Negative Prompt" + } + }, + "47": { + "inputs": { + "text": [ + "44", + 0 + ], + "speak_and_recognation": { + "__value__": [ + false, + true + ] + }, + "clip": [ + "68", + 1 + ] + }, + "class_type": "CLIPTextEncode", + "_meta": { + "title": "CLIP Text Encode (Prompt)" + } + }, + "48": { + "inputs": { + "seed": 903006749445372, + "increment": 1 + }, + "class_type": "Seed Generator (Image Saver)", + "_meta": { + "title": "Seed" + } + }, + "49": { + "inputs": { + "scheduler": "beta" + }, + "class_type": "Scheduler Selector (Comfy) (Image Saver)", + "_meta": { + "title": "Scheduler Selector" + } + }, + "50": { + "inputs": { + "sampler_name": "euler" + }, + "class_type": "Sampler Selector (Image Saver)", + "_meta": { + "title": "Sampler Selector (Image Saver)" + } + }, + "51": { + "inputs": { + "images": [ + "8", + 0 + ] + }, + "class_type": "PreviewImage", + "_meta": { + "title": "Preview Image" + } + }, + "52": { + "inputs": { + "float": 3.5 + }, + "class_type": "Float Literal (Image Saver)", + "_meta": { + "title": "CFG" + } + }, + "53": { + "inputs": { + "float": 1 + }, + "class_type": "Float Literal (Image Saver)", + "_meta": { + "title": "Denoise" + } + }, + "60": { + "inputs": { + "clip_l": "", + "t5xxl": [ + "44", + 0 + ], + "guidance": [ + "52", + 0 + ], + "speak_and_recognation": { + "__value__": [ + false, + true + ] + }, + "clip": [ + "68", + 1 + ] + }, + "class_type": "CLIPTextEncodeFlux", + "_meta": { + "title": "CLIPTextEncodeFlux" + } + }, + "62": { + "inputs": { + "noise": [ + "65", + 0 + ], + "guider": [ + "67", + 0 + ], + "sampler": [ + "63", + 0 + ], + "sigmas": [ + "64", + 0 + ], + "latent_image": [ + "41", + 5 + ] + }, + "class_type": "SamplerCustomAdvanced", + "_meta": { + "title": "SamplerCustomAdvanced" + } + }, + "63": { + "inputs": { + "sampler_name": [ + "50", + 0 + ] + }, + "class_type": "KSamplerSelect", + "_meta": { + "title": "KSamplerSelect" + } + }, + "64": { + "inputs": { + "scheduler": [ + "49", + 0 + ], + "steps": [ + "40", + 0 + ], + "denoise": [ + "53", + 0 + ], + "model": [ + "68", + 0 + ] + }, + "class_type": "BasicScheduler", + "_meta": { + "title": "BasicScheduler" + } + }, + "65": { + "inputs": { + "noise_seed": [ + "48", + 0 + ] + }, + "class_type": "RandomNoise", + "_meta": { + "title": "RandomNoise" + } + }, + "67": { + "inputs": { + "model": [ + "68", + 0 + ], + "conditioning": [ + "47", + 0 + ] + }, + "class_type": "BasicGuider", + "_meta": { + "title": "BasicGuider" + } + }, + "68": { + "inputs": { + "lora_01": "None", + "strength_01": 1, + "lora_02": "None", + "strength_02": 1, + "lora_03": "None", + "strength_03": 1, + "lora_04": "None", + "strength_04": 1, + "model": [ + "35", + 0 + ], + "clip": [ + "22", + 0 + ] + }, + "class_type": "Lora Loader Stack (rgthree)", + "_meta": { + "title": "Lora Loader Stack (rgthree)" + } + } +} \ No newline at end of file