Merge branch 'secondtopic'

This commit is contained in:
Karl 2025-10-31 16:08:41 +00:00
commit 854732b1c2
3 changed files with 51 additions and 70 deletions

View File

@ -187,24 +187,63 @@ def load_prompt_models_from_config():
return prompt_models return prompt_models
def build_user_content(topic: str = "random") -> str:
"""Build the user content string for prompt generation, including topic instructions and recent prompts avoidance."""
config = load_config()
topic_instruction = ""
selected_topic = ""
secondary_topic_instruction = ""
# Unique list of recent prompts
recent_prompts = list(set(load_recent_prompts()))
if topic == "random":
topics = [t.strip() for t in config["comfyui"]["topics"].split(",") if t.strip()]
selected_topic = random.choice(topics) if topics else ""
elif topic != "":
selected_topic = topic
else:
# Decide on whether to include a topic (e.g., 30% chance to include)
topics = [t.strip() for t in config["comfyui"]["topics"].split(",") if t.strip()]
if random.random() < 0.3 and topics:
selected_topic = random.choice(topics)
if selected_topic != "":
topic_instruction = f" Incorporate the theme of '{selected_topic}' into the new prompt."
# Add secondary topic if configured and not empty
secondary_topic = config["comfyui"].get("secondary_topic", "").strip()
if secondary_topic:
secondary_topic_instruction = f" Additionally incorporate the theme of '{secondary_topic}' into the new prompt."
user_content = (
"Can you generate me a really random image idea, Do not exceed 20 words. Use clear language, not poetic metaphors."
+ topic_instruction
+ secondary_topic_instruction
+ "Avoid prompts similar to the following:"
+ "\n".join(f"{i+1}. {p}" for i, p in enumerate(recent_prompts))
)
return user_content
def create_prompt_with_random_model(base_prompt: str, topic: str = "random"): def create_prompt_with_random_model(base_prompt: str, topic: str = "random"):
"""Create a prompt using a randomly selected model from OpenWebUI or OpenRouter. """Create a prompt using a randomly selected model from OpenWebUI or OpenRouter.
If OpenWebUI fails, it will retry once. If it fails again, it will fallback to OpenRouter. If OpenWebUI fails, it will retry once. If it fails again, it will fallback to OpenRouter.
""" """
prompt_models = load_prompt_models_from_config() prompt_models = load_prompt_models_from_config()
if not prompt_models: if not prompt_models:
logging.warning("No prompt generation models configured.") logging.warning("No prompt generation models configured.")
return None return None
# Randomly select a model # Randomly select a model
service, model = random.choice(prompt_models) service, model = random.choice(prompt_models)
# Import here to avoid circular imports # Import here to avoid circular imports
from libs.openwebui import create_prompt_on_openwebui from libs.openwebui import create_prompt_on_openwebui
from libs.openrouter import create_prompt_on_openrouter from libs.openrouter import create_prompt_on_openrouter
if service == "openwebui": if service == "openwebui":
try: try:
# First attempt with OpenWebUI # First attempt with OpenWebUI
@ -212,13 +251,13 @@ def create_prompt_with_random_model(base_prompt: str, topic: str = "random"):
result = create_prompt_on_openwebui(base_prompt, topic, model) result = create_prompt_on_openwebui(base_prompt, topic, model)
if result: if result:
return result return result
# If first attempt returns None, try again # If first attempt returns None, try again
logging.warning("First OpenWebUI attempt failed. Retrying...") logging.warning("First OpenWebUI attempt failed. Retrying...")
result = create_prompt_on_openwebui(base_prompt, topic, model) result = create_prompt_on_openwebui(base_prompt, topic, model)
if result: if result:
return result return result
# If second attempt fails, fallback to OpenRouter # If second attempt fails, fallback to OpenRouter
logging.warning("Second OpenWebUI attempt failed. Falling back to OpenRouter...") logging.warning("Second OpenWebUI attempt failed. Falling back to OpenRouter...")
openrouter_models = [m for m in prompt_models if m[0] == "openrouter"] openrouter_models = [m for m in prompt_models if m[0] == "openrouter"]
@ -228,7 +267,7 @@ def create_prompt_with_random_model(base_prompt: str, topic: str = "random"):
else: else:
logging.error("No OpenRouter models configured for fallback.") logging.error("No OpenRouter models configured for fallback.")
return "A colorful abstract composition" # Default fallback prompt return "A colorful abstract composition" # Default fallback prompt
except Exception as e: except Exception as e:
logging.error(f"Error with OpenWebUI: {e}") logging.error(f"Error with OpenWebUI: {e}")
# Fallback to OpenRouter on exception # Fallback to OpenRouter on exception
@ -244,7 +283,7 @@ def create_prompt_with_random_model(base_prompt: str, topic: str = "random"):
else: else:
logging.error("No OpenRouter models configured for fallback.") logging.error("No OpenRouter models configured for fallback.")
return "A colorful abstract composition" # Default fallback prompt return "A colorful abstract composition" # Default fallback prompt
elif service == "openrouter": elif service == "openrouter":
try: try:
# Use OpenRouter # Use OpenRouter
@ -252,7 +291,7 @@ def create_prompt_with_random_model(base_prompt: str, topic: str = "random"):
except Exception as e: except Exception as e:
logging.error(f"Error with OpenRouter: {e}") logging.error(f"Error with OpenRouter: {e}")
return "A colorful abstract composition" # Default fallback prompt return "A colorful abstract composition" # Default fallback prompt
user_config = load_config() user_config = load_config()
output_folder = user_config["comfyui"]["output_dir"] output_folder = user_config["comfyui"]["output_dir"]

View File

@ -40,36 +40,7 @@ def create_prompt_on_openrouter(prompt: str, topic: str = "random", model: str =
logging.warning("OpenRouter is not enabled in the configuration.") logging.warning("OpenRouter is not enabled in the configuration.")
return "" return ""
topic_instruction = "" user_content = build_user_content(topic)
selected_topic = ""
secondary_topic_instruction = ""
# Unique list of recent prompts
recent_prompts = list(set(load_recent_prompts()))
if topic == "random":
topics = [t.strip() for t in config["comfyui"]["topics"].split(",") if t.strip()]
selected_topic = random.choice(topics) if topics else ""
elif topic != "":
selected_topic = topic
else:
# Decide on whether to include a topic (e.g., 30% chance to include)
topics = [t.strip() for t in config["comfyui"]["topics"].split(",") if t.strip()]
if random.random() < 0.3 and topics:
selected_topic = random.choice(topics)
if selected_topic != "":
topic_instruction = f" Incorporate the theme of '{selected_topic}' into the new prompt."
# Add secondary topic if configured and not empty
secondary_topic = config["comfyui"].get("secondary_topic", "").strip()
if secondary_topic:
secondary_topic_instruction = f" Additionally incorporate the theme of '{secondary_topic}' into the new prompt, in the style of."
user_content = (
"Can you generate me a really random image idea, Do not exceed 20 words. Use clear language, not poetic metaphors."
+ topic_instruction
+ secondary_topic_instruction
+ "Avoid prompts similar to the following:"
+ "\n".join(f"{i+1}. {p}" for i, p in enumerate(recent_prompts))
)
# Load configured models # Load configured models
configured_models = [m.strip() for m in user_config["openrouter"]["models"].split(",") if m.strip()] configured_models = [m.strip() for m in user_config["openrouter"]["models"].split(",") if m.strip()]

View File

@ -19,36 +19,7 @@ def create_prompt_on_openwebui(prompt: str, topic: str = "random", model: str =
"""Sends prompt to OpenWebui and returns the generated response.""" """Sends prompt to OpenWebui and returns the generated response."""
# Reload config to get latest values # Reload config to get latest values
config = load_config() config = load_config()
topic_instruction = "" user_content = build_user_content(topic)
selected_topic = ""
secondary_topic_instruction = ""
# Unique list of recent prompts
recent_prompts = list(set(load_recent_prompts()))
if topic == "random":
topics = [t.strip() for t in config["comfyui"]["topics"].split(",") if t.strip()]
selected_topic = random.choice(topics)
elif topic != "":
selected_topic = topic
else:
# Decide on whether to include a topic (e.g., 30% chance to include)
topics = [t.strip() for t in config["comfyui"]["topics"].split(",") if t.strip()]
if random.random() < 0.3 and topics:
selected_topic = random.choice(topics)
if selected_topic != "":
topic_instruction = f" Incorporate the theme of '{selected_topic}' into the new prompt."
# Add secondary topic if configured and not empty
secondary_topic = config["comfyui"].get("secondary_topic", "").strip()
if secondary_topic:
secondary_topic_instruction = f" Additionally incorporate the theme of '{secondary_topic}' into the new prompt, in the style of."
user_content = (
"Can you generate me a really random image idea, Do not exceed 20 words. Use clear language, not poetic metaphors."
+ topic_instruction
+ secondary_topic_instruction
+ "Avoid prompts similar to the following:"
+ "\n".join(f"{i+1}. {p}" for i, p in enumerate(recent_prompts))
)
if model: if model:
# Use the specified model # Use the specified model