From e8ec30fc73d3ab3ee5af5e4f19627bd60557b97c Mon Sep 17 00:00:00 2001 From: Karl Date: Fri, 31 Oct 2025 15:14:19 +0000 Subject: [PATCH] Secondary topic and config updates --- README.md | 11 +++++++++-- libs/openrouter.py | 17 +++++++++++++---- libs/openwebui.py | 13 +++++++++++-- user_config.cfg.sample | 10 +++++++--- 4 files changed, 40 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index 52260fb..07e19e6 100644 --- a/README.md +++ b/README.md @@ -79,12 +79,19 @@ The application is configured via the `user_config.cfg` file. | `[comfyui]` | `width` | The width of the generated image. | `1568` | | `[comfyui]` | `height` | The height of the generated image. | `672` | | `[comfyui]` | `topics` | A comma-separated list of topics to generate prompts from. | | -| `[comfyui]` | `FLUX` | Enable FLUX models (`True`/`False`). | `False` | -| `[comfyui]` | `ONLY_FLUX` | Only use FLUX models (`True`/`False`). | `False` | +| `[comfyui]` | `secondary_topic` | A secondary topic for prompt generation. | | +| `[comfyui]` | `flux` | Enable FLUX models (`True`/`False`). | `False` | +| `[comfyui]` | `qwen` | Enable Qwen models (`True`/`False`). | `False` | +| `[comfyui]` | `only_flux` | Only use FLUX models (`True`/`False`). | `False` | | `[comfyui:flux]` | `models` | A comma-separated list of FLUX models. | `flux1-dev-Q4_0.gguf,flux1-schnell-Q4_0.gguf` | +| `[comfyui:qwen]` | `models` | A comma-separated list of Qwen models. | `qwen-image-Q4_K_S.gguf, qwen-image-Q2_K.gguf` | | `[openwebui]` | `base_url` | The base URL for OpenWebUI. | `https://openwebui` | | `[openwebui]` | `api_key` | The API key for OpenWebUI. | `sk-` | | `[openwebui]` | `models` | A comma-separated list of models for OpenWebUI. | `llama3:latest,cogito:14b,gemma3:12b` | +| `[openrouter]` | `enabled` | Enable OpenRouter integration (`True`/`False`). | `False` | +| `[openrouter]` | `api_key` | The API key for OpenRouter. | | +| `[openrouter]` | `models` | A comma-separated list of models for OpenRouter. | `mistralai/mistral-7b-instruct:free,google/gemma-7b-it:free,meta-llama/llama-3.1-8b-instruct:free` | +| `[openrouter]` | `list_all_free_models` | List all free models (`True`/`False`). | `False` | ## Usage diff --git a/libs/openrouter.py b/libs/openrouter.py index 062d4c3..60ecd34 100644 --- a/libs/openrouter.py +++ b/libs/openrouter.py @@ -33,31 +33,40 @@ def get_free_models(): def create_prompt_on_openrouter(prompt: str, topic: str = "random", model: str = None) -> str: """Sends prompt to OpenRouter and returns the generated response.""" + # Reload config to get latest values + config = load_config() # Check if OpenRouter is enabled - if user_config["openrouter"].get("enabled", "False").lower() != "true": + if config["openrouter"].get("enabled", "False").lower() != "true": logging.warning("OpenRouter is not enabled in the configuration.") return "" - + topic_instruction = "" selected_topic = "" + secondary_topic_instruction = "" # Unique list of recent prompts recent_prompts = list(set(load_recent_prompts())) if topic == "random": - topics = [t.strip() for t in user_config["comfyui"]["topics"].split(",") if t.strip()] + topics = [t.strip() for t in config["comfyui"]["topics"].split(",") if t.strip()] selected_topic = random.choice(topics) if topics else "" elif topic != "": selected_topic = topic else: # Decide on whether to include a topic (e.g., 30% chance to include) - topics = [t.strip() for t in user_config["comfyui"]["topics"].split(",") if t.strip()] + topics = [t.strip() for t in config["comfyui"]["topics"].split(",") if t.strip()] if random.random() < 0.3 and topics: selected_topic = random.choice(topics) if selected_topic != "": topic_instruction = f" Incorporate the theme of '{selected_topic}' into the new prompt." + # Add secondary topic if configured and not empty + secondary_topic = config["comfyui"].get("secondary_topic", "").strip() + if secondary_topic: + secondary_topic_instruction = f" Additionally incorporate the theme of '{secondary_topic}' into the new prompt, in the style of." + user_content = ( "Can you generate me a really random image idea, Do not exceed 10 words. Use clear language, not poetic metaphors." + topic_instruction + + secondary_topic_instruction + "Avoid prompts similar to the following:" + "\n".join(f"{i+1}. {p}" for i, p in enumerate(recent_prompts)) ) diff --git a/libs/openwebui.py b/libs/openwebui.py index 7a5b702..201a40c 100644 --- a/libs/openwebui.py +++ b/libs/openwebui.py @@ -17,26 +17,35 @@ output_folder = user_config["comfyui"]["output_dir"] def create_prompt_on_openwebui(prompt: str, topic: str = "random", model: str = None) -> str: """Sends prompt to OpenWebui and returns the generated response.""" + # Reload config to get latest values + config = load_config() topic_instruction = "" selected_topic = "" + secondary_topic_instruction = "" # Unique list of recent prompts recent_prompts = list(set(load_recent_prompts())) if topic == "random": - topics = [t.strip() for t in user_config["comfyui"]["topics"].split(",") if t.strip()] + topics = [t.strip() for t in config["comfyui"]["topics"].split(",") if t.strip()] selected_topic = random.choice(topics) elif topic != "": selected_topic = topic else: # Decide on whether to include a topic (e.g., 30% chance to include) - topics = [t.strip() for t in user_config["comfyui"]["topics"].split(",") if t.strip()] + topics = [t.strip() for t in config["comfyui"]["topics"].split(",") if t.strip()] if random.random() < 0.3 and topics: selected_topic = random.choice(topics) if selected_topic != "": topic_instruction = f" Incorporate the theme of '{selected_topic}' into the new prompt." + # Add secondary topic if configured and not empty + secondary_topic = config["comfyui"].get("secondary_topic", "").strip() + if secondary_topic: + secondary_topic_instruction = f" Additionally incorporate the theme of '{secondary_topic}' into the new prompt, in the style of." + user_content = ( "Can you generate me a really random image idea, Do not exceed 10 words. Use clear language, not poetic metaphors." + topic_instruction + + secondary_topic_instruction + "Avoid prompts similar to the following:" + "\n".join(f"{i+1}. {p}" for i, p in enumerate(recent_prompts)) ) diff --git a/user_config.cfg.sample b/user_config.cfg.sample index 0293b41..67c5500 100644 --- a/user_config.cfg.sample +++ b/user_config.cfg.sample @@ -13,11 +13,15 @@ output_dir = ./output/ prompt = "Generate a random detailed prompt for stable diffusion." width = 1568 height = 672 -topics = +topics = +secondary_topic = -FLUX = False -ONLY_FLUX = False +flux = False +qwen = False +only_flux = False +[comfyui:qwen] +models = qwen-image-Q4_K_S.gguf, qwen-image-Q2_K.gguf [comfyui:flux] models = flux1-dev-Q4_0.gguf,flux1-schnell-Q4_0.gguf