mirror of
https://github.com/karl0ss/ai_image_frame_server.git
synced 2025-06-20 15:29:12 +01:00
add selecting the topic when doing random prompt
This commit is contained in:
parent
c7d71bfd03
commit
08f3a80169
@ -19,6 +19,7 @@ from libs.generic import (
|
|||||||
get_details_from_png,
|
get_details_from_png,
|
||||||
get_current_version,
|
get_current_version,
|
||||||
load_models_from_config,
|
load_models_from_config,
|
||||||
|
load_topics_from_config
|
||||||
)
|
)
|
||||||
from libs.comfyui import cancel_current_job, create_image, select_model
|
from libs.comfyui import cancel_current_job, create_image, select_model
|
||||||
from libs.ollama import create_prompt_on_openwebui
|
from libs.ollama import create_prompt_on_openwebui
|
||||||
@ -122,9 +123,10 @@ def create():
|
|||||||
if request.method == "POST":
|
if request.method == "POST":
|
||||||
prompt = request.form.get("prompt")
|
prompt = request.form.get("prompt")
|
||||||
selected_workflow, model = select_model(request.form.get("model") or "Random")
|
selected_workflow, model = select_model(request.form.get("model") or "Random")
|
||||||
|
topic = request.form.get("topic")
|
||||||
|
|
||||||
if not prompt:
|
if not prompt:
|
||||||
prompt = create_prompt_on_openwebui(user_config["comfyui"]["prompt"])
|
prompt = create_prompt_on_openwebui(user_config["comfyui"]["prompt"], topic)
|
||||||
|
|
||||||
# Start generation in background
|
# Start generation in background
|
||||||
threading.Thread(target=lambda: create_image(prompt, model)).start()
|
threading.Thread(target=lambda: create_image(prompt, model)).start()
|
||||||
@ -158,8 +160,9 @@ def create_image_endpoint() -> str:
|
|||||||
if user_config["frame"]["create_requires_auth"] == "True" and not session.get('authenticated'):
|
if user_config["frame"]["create_requires_auth"] == "True" and not session.get('authenticated'):
|
||||||
return redirect(url_for("login"))
|
return redirect(url_for("login"))
|
||||||
models = load_models_from_config()
|
models = load_models_from_config()
|
||||||
|
topics = load_topics_from_config()
|
||||||
|
|
||||||
return render_template("create_image.html", models=models)
|
return render_template("create_image.html", models=models, topics=topics)
|
||||||
|
|
||||||
|
|
||||||
if user_config["frame"]["auto_regen"] == "True":
|
if user_config["frame"]["auto_regen"] == "True":
|
||||||
|
@ -105,6 +105,11 @@ def load_models_from_config():
|
|||||||
all_models = flux_models + sdxl_models
|
all_models = flux_models + sdxl_models
|
||||||
return all_models
|
return all_models
|
||||||
|
|
||||||
|
def load_topics_from_config():
|
||||||
|
topics = user_config["comfyui"]["topics"].split(", ")
|
||||||
|
return topics
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
user_config = load_config()
|
user_config = load_config()
|
||||||
output_folder = user_config["comfyui"]["output_dir"]
|
output_folder = user_config["comfyui"]["output_dir"]
|
@ -12,15 +12,23 @@ LOG_FILE = "./prompts_log.jsonl"
|
|||||||
user_config = load_config()
|
user_config = load_config()
|
||||||
output_folder = user_config["comfyui"]["output_dir"]
|
output_folder = user_config["comfyui"]["output_dir"]
|
||||||
|
|
||||||
def create_prompt_on_openwebui(prompt: str) -> str:
|
def create_prompt_on_openwebui(prompt: str, topic: str = "random") -> str:
|
||||||
"""Sends prompt to OpenWebui and returns the generated response."""
|
"""Sends prompt to OpenWebui and returns the generated response."""
|
||||||
|
topic_instruction = ""
|
||||||
|
selected_topic = ""
|
||||||
# Unique list of recent prompts
|
# Unique list of recent prompts
|
||||||
recent_prompts = list(set(load_recent_prompts()))
|
recent_prompts = list(set(load_recent_prompts()))
|
||||||
|
if topic == "random":
|
||||||
|
topics = [t.strip() for t in user_config["comfyui"]["topics"].split(",") if t.strip()]
|
||||||
|
selected_topic = random.choice(topics)
|
||||||
|
elif topic != "":
|
||||||
|
selected_topic = topic
|
||||||
|
else:
|
||||||
# Decide on whether to include a topic (e.g., 30% chance to include)
|
# Decide on whether to include a topic (e.g., 30% chance to include)
|
||||||
topics = [t.strip() for t in user_config["comfyui"]["topics"].split(",") if t.strip()]
|
topics = [t.strip() for t in user_config["comfyui"]["topics"].split(",") if t.strip()]
|
||||||
topic_instruction = ""
|
if random.random() < 0.3 and topics:
|
||||||
if random.random() < 0.5 and topics:
|
|
||||||
selected_topic = random.choice(topics)
|
selected_topic = random.choice(topics)
|
||||||
|
if selected_topic != "":
|
||||||
topic_instruction = f" Incorporate the theme of '{selected_topic}' into the new prompt."
|
topic_instruction = f" Incorporate the theme of '{selected_topic}' into the new prompt."
|
||||||
|
|
||||||
user_content = (
|
user_content = (
|
||||||
|
@ -144,6 +144,17 @@
|
|||||||
</optgroup>
|
</optgroup>
|
||||||
</select>
|
</select>
|
||||||
|
|
||||||
|
<select id="topic-select">
|
||||||
|
<option value="">No Topic</option>
|
||||||
|
<option value="random">Random</option>
|
||||||
|
<optgroup label="Topics">
|
||||||
|
{% for t in topics %}
|
||||||
|
<option value="{{ t }}">{{ t }}</option>
|
||||||
|
{% endfor %}
|
||||||
|
</optgroup>
|
||||||
|
</select>
|
||||||
|
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<!-- waiting overlay -->
|
<!-- waiting overlay -->
|
||||||
@ -183,9 +194,11 @@
|
|||||||
function randomPrompt() {
|
function randomPrompt() {
|
||||||
showSpinner();
|
showSpinner();
|
||||||
const model = document.getElementById('model-select').value;
|
const model = document.getElementById('model-select').value;
|
||||||
|
const topic = document.getElementById('topic-select').value; // this line was missing
|
||||||
|
|
||||||
const formData = new URLSearchParams();
|
const formData = new URLSearchParams();
|
||||||
formData.append('model', model);
|
formData.append('model', model);
|
||||||
|
formData.append('topic', topic); // include topic in request
|
||||||
|
|
||||||
fetch('/create', {
|
fetch('/create', {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
|
Loading…
x
Reference in New Issue
Block a user