openweb ui client rather than litellm and ollama

This commit is contained in:
Karl 2025-07-29 14:25:13 +01:00
parent aa75646d5f
commit e2acd2dcd6
5 changed files with 45 additions and 40 deletions

View File

@ -15,7 +15,7 @@ from tenacity import (
import nest_asyncio
from libs.generic import rename_image, load_config, save_prompt
from libs.create_thumbnail import generate_thumbnail
from libs.ollama import create_prompt_on_openwebui
from libs.openwebui import create_prompt_on_openwebui
nest_asyncio.apply()
logging.basicConfig(level=logging.INFO)

View File

@ -170,7 +170,7 @@ def create_prompt_with_random_model(base_prompt: str, topic: str = "random"):
if service == "openwebui":
# Import here to avoid circular imports
from libs.ollama import create_prompt_on_openwebui
from libs.openwebui import create_prompt_on_openwebui
return create_prompt_on_openwebui(base_prompt, topic)
elif service == "openrouter":
# Import here to avoid circular imports

View File

@ -1,9 +1,11 @@
import random
import logging
import litellm
import nest_asyncio
from libs.generic import load_recent_prompts, load_config
import re
from openwebui_chat_client import OpenWebUIClient
from datetime import datetime
nest_asyncio.apply()
logging.basicConfig(level=logging.INFO)
@ -33,23 +35,28 @@ def create_prompt_on_openwebui(prompt: str, topic: str = "random", model: str =
topic_instruction = f" Incorporate the theme of '{selected_topic}' into the new prompt."
user_content = (
"Can you generate me a really random image idea, Do not exceed 10 words. Use clear language, not poetic metaphors."
"Can you generate me a really random image idea, Do not exceed 10 words. Use clear language, not poetic metaphors."
+ topic_instruction
+ "Avoid prompts similar to the following:"
+ "\n".join(f"{i+1}. {p}" for i, p in enumerate(recent_prompts))
)
if model:
# Use the specified model
model = model
else:
# Select a random model
model = random.choice(user_config["openwebui"]["models"].split(","))
response = litellm.completion(
api_base=user_config["openwebui"]["base_url"],
model="openai/" + model,
messages=[
model = random.choice(user_config["openwebui"]["models"].split(",")).strip()
# Create OpenWebUI client
client = OpenWebUIClient(
base_url=user_config["openwebui"]["base_url"],
token=user_config["openwebui"]["api_key"],
default_model_id=model
)
# Prepare messages for the chat
messages = [
{
"role": "system",
"content": (
@ -63,23 +70,21 @@ def create_prompt_on_openwebui(prompt: str, topic: str = "random", model: str =
"role": "user",
"content": user_content,
},
],
api_key=user_config["openwebui"]["api_key"],
]
# Send the chat request
result = client.chat(
question=user_content,
chat_title=datetime.now().strftime("%Y-%m-%d %H:%M"),
folder_name="Ai Image Requests"
)
prompt = response["choices"][0]["message"]["content"].strip('"')
# response = litellm.completion(
# api_base=user_config["openwebui"]["base_url"],
# model="openai/brxce/stable-diffusion-prompt-generator:latest",
# messages=[
# {
# "role": "user",
# "content": prompt,
# },
# ],
# api_key=user_config["openwebui"]["api_key"],
# )
# prompt = response["choices"][0]["message"]["content"].strip('"')
if result:
prompt = result["response"].strip('"')
else:
# Fallback if the request fails
prompt = "A vibrant landscape"
match = re.search(r'"([^"]+)"', prompt)
if not match:
match = re.search(r":\s*\n*\s*(.+)", prompt)

Binary file not shown.

View File

@ -1,7 +1,7 @@
from flask import Blueprint, request, render_template, redirect, url_for, session
import threading
from libs.comfyui import create_image, select_model, get_available_models
from libs.ollama import create_prompt_on_openwebui
from libs.openwebui import create_prompt_on_openwebui
from libs.generic import load_models_from_config, load_topics_from_config, load_openrouter_models_from_config, load_openwebui_models_from_config, create_prompt_with_random_model
import os
@ -23,7 +23,7 @@ def create():
# Use the specified prompt model
service, service_model = prompt_model.split(":", 1) if ":" in prompt_model else (prompt_model, "")
if service == "openwebui":
from libs.ollama import create_prompt_on_openwebui
from libs.openwebui import create_prompt_on_openwebui
prompt = create_prompt_on_openwebui(user_config["comfyui"]["prompt"], topic, service_model)
elif service == "openrouter":
from libs.openrouter import create_prompt_on_openrouter