From d97ef3f171321aa59afc5e82f6e5b5fd78696420 Mon Sep 17 00:00:00 2001 From: Karl Hudgell Date: Sat, 29 Mar 2025 15:50:38 +0000 Subject: [PATCH] add support for multiple models and posting a prompt to /create --- ai_frame_image_server.py | 14 +++++++++----- lib.py | 25 +++++++++++++++++++++---- user_config.cfg.sample | 2 +- 3 files changed, 31 insertions(+), 10 deletions(-) diff --git a/ai_frame_image_server.py b/ai_frame_image_server.py index 7499931..a2a83df 100644 --- a/ai_frame_image_server.py +++ b/ai_frame_image_server.py @@ -1,4 +1,4 @@ -from flask import Flask, render_template, send_from_directory, redirect, url_for +from flask import Flask, render_template, send_from_directory, redirect, url_for, request, jsonify import os from lib import create_image, load_config @@ -10,19 +10,23 @@ image_folder = "./output" @app.route('/') def index(): - # latest_image = get_latest_image() return render_template("index.html", image="./image.png", reload_interval=user_config["frame"]["reload_interval"]) @app.route('/images/') def images(filename): return send_from_directory(image_folder, filename) -@app.route('/create') +@app.route('/create', methods=["GET", "POST"]) def create(): - """Endpoint to create a new image.""" - create_image() + """Endpoint to create a new image. Supports optional prompt via POST.""" + prompt = request.form.get("prompt") if request.method == "POST" else None + create_image(prompt) # Pass prompt to create_image() + + if request.method == "POST": + return jsonify({"message": "Image created", "prompt": prompt}), 200 return redirect(url_for("index")) + if __name__ == '__main__': os.makedirs(image_folder, exist_ok=True) # Ensure the folder exists app.run(host="0.0.0.0", port=user_config["frame"]["port"], debug=True) diff --git a/lib.py b/lib.py index eb9f9f8..a57e52a 100644 --- a/lib.py +++ b/lib.py @@ -5,9 +5,19 @@ import sys import litellm import time import os - +import requests from comfy_api_simplified import ComfyApiWrapper, ComfyWorkflowWrapper +def get_available_models(): + url = user_config["comfyui"]["comfyui_url"] + "/object_info" + response = requests.get(url) + if response.status_code == 200: + data = response.json() + return data.get("CheckpointLoaderSimple", {}).get("input", {}).get("required", {}).get("ckpt_name", [])[0] + else: + print(f"Failed to fetch models: {response.status_code}") + return [] + def load_config(): user_config = configparser.ConfigParser() try: @@ -18,6 +28,7 @@ def load_config(): logging.error(f"Missing configuration key: {e}") sys.exit(1) + def rename_image(): """Rename 'image.png' to a timestamped filename if it exists in the output folder.""" old_path = os.path.join(user_config["comfyui"]["output_dir"], "image.png") @@ -32,6 +43,7 @@ def rename_image(): print("No image.png found.") return None + def send_prompt_to_openwebui(prompt): response = litellm.completion( api_base=user_config["openwebui"]["base_url"], @@ -62,8 +74,12 @@ def generate_image(file_name, comfy_prompt): wf.set_node_param("Save Image", "filename_prefix", file_name) wf.set_node_param("Empty Latent Image", "width", user_config["comfyui"]["width"]) wf.set_node_param("Empty Latent Image", "height", user_config["comfyui"]["height"]) + valid_models = list(set(get_available_models()) & set(user_config["comfyui"]["models"].split(","))) + if not valid_models: + raise Exception("No valid options available.") + model = random.choice(valid_models) wf.set_node_param( - "Load Checkpoint", "ckpt_name", user_config["comfyui"]["model"] + "Load Checkpoint", "ckpt_name", model ) # Queue your workflow for completion logging.debug(f"Generating image: {file_name}") @@ -79,9 +95,10 @@ def generate_image(file_name, comfy_prompt): logging.error(f"Failed to generate image for UID: {file_name}. Error: {e}") -def create_image(): +def create_image(prompt): """Main function for generating images.""" - prompt = send_prompt_to_openwebui(user_config["comfyui"]["prompt"]) + if prompt is None: + prompt = send_prompt_to_openwebui(user_config["comfyui"]["prompt"]) print(f"Generated prompt: {prompt}") generate_image("image", prompt) diff --git a/user_config.cfg.sample b/user_config.cfg.sample index d57dd72..2827152 100644 --- a/user_config.cfg.sample +++ b/user_config.cfg.sample @@ -4,7 +4,7 @@ port = 5000 [comfyui] comfyui_url = http://comfyui -model = zavychromaxl_v100.safetensors +models = zavychromaxl_v100.safetensors output_dir = ./output/ prompt = "Be explicit, only return the prompt and no other text, Generate a random detailed prompt for stable diffusion." width = 1568