mirror of
https://github.com/karl0ss/ai_image_frame_server.git
synced 2025-08-05 09:58:28 +01:00
openweb ui client rather than litellm and ollama
This commit is contained in:
parent
aa75646d5f
commit
e2acd2dcd6
@ -15,7 +15,7 @@ from tenacity import (
|
|||||||
import nest_asyncio
|
import nest_asyncio
|
||||||
from libs.generic import rename_image, load_config, save_prompt
|
from libs.generic import rename_image, load_config, save_prompt
|
||||||
from libs.create_thumbnail import generate_thumbnail
|
from libs.create_thumbnail import generate_thumbnail
|
||||||
from libs.ollama import create_prompt_on_openwebui
|
from libs.openwebui import create_prompt_on_openwebui
|
||||||
nest_asyncio.apply()
|
nest_asyncio.apply()
|
||||||
|
|
||||||
logging.basicConfig(level=logging.INFO)
|
logging.basicConfig(level=logging.INFO)
|
||||||
|
@ -170,7 +170,7 @@ def create_prompt_with_random_model(base_prompt: str, topic: str = "random"):
|
|||||||
|
|
||||||
if service == "openwebui":
|
if service == "openwebui":
|
||||||
# Import here to avoid circular imports
|
# Import here to avoid circular imports
|
||||||
from libs.ollama import create_prompt_on_openwebui
|
from libs.openwebui import create_prompt_on_openwebui
|
||||||
return create_prompt_on_openwebui(base_prompt, topic)
|
return create_prompt_on_openwebui(base_prompt, topic)
|
||||||
elif service == "openrouter":
|
elif service == "openrouter":
|
||||||
# Import here to avoid circular imports
|
# Import here to avoid circular imports
|
||||||
|
@ -1,9 +1,11 @@
|
|||||||
import random
|
import random
|
||||||
import logging
|
import logging
|
||||||
import litellm
|
|
||||||
import nest_asyncio
|
import nest_asyncio
|
||||||
from libs.generic import load_recent_prompts, load_config
|
from libs.generic import load_recent_prompts, load_config
|
||||||
import re
|
import re
|
||||||
|
from openwebui_chat_client import OpenWebUIClient
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
nest_asyncio.apply()
|
nest_asyncio.apply()
|
||||||
|
|
||||||
logging.basicConfig(level=logging.INFO)
|
logging.basicConfig(level=logging.INFO)
|
||||||
@ -33,23 +35,28 @@ def create_prompt_on_openwebui(prompt: str, topic: str = "random", model: str =
|
|||||||
topic_instruction = f" Incorporate the theme of '{selected_topic}' into the new prompt."
|
topic_instruction = f" Incorporate the theme of '{selected_topic}' into the new prompt."
|
||||||
|
|
||||||
user_content = (
|
user_content = (
|
||||||
"Can you generate me a really random image idea, Do not exceed 10 words. Use clear language, not poetic metaphors.”"
|
"Can you generate me a really random image idea, Do not exceed 10 words. Use clear language, not poetic metaphors."
|
||||||
+ topic_instruction
|
+ topic_instruction
|
||||||
+ "Avoid prompts similar to the following:"
|
+ "Avoid prompts similar to the following:"
|
||||||
+ "\n".join(f"{i+1}. {p}" for i, p in enumerate(recent_prompts))
|
+ "\n".join(f"{i+1}. {p}" for i, p in enumerate(recent_prompts))
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
if model:
|
if model:
|
||||||
# Use the specified model
|
# Use the specified model
|
||||||
model = model
|
model = model
|
||||||
else:
|
else:
|
||||||
# Select a random model
|
# Select a random model
|
||||||
model = random.choice(user_config["openwebui"]["models"].split(","))
|
model = random.choice(user_config["openwebui"]["models"].split(",")).strip()
|
||||||
response = litellm.completion(
|
|
||||||
api_base=user_config["openwebui"]["base_url"],
|
# Create OpenWebUI client
|
||||||
model="openai/" + model,
|
client = OpenWebUIClient(
|
||||||
messages=[
|
base_url=user_config["openwebui"]["base_url"],
|
||||||
|
token=user_config["openwebui"]["api_key"],
|
||||||
|
default_model_id=model
|
||||||
|
)
|
||||||
|
|
||||||
|
# Prepare messages for the chat
|
||||||
|
messages = [
|
||||||
{
|
{
|
||||||
"role": "system",
|
"role": "system",
|
||||||
"content": (
|
"content": (
|
||||||
@ -63,23 +70,21 @@ def create_prompt_on_openwebui(prompt: str, topic: str = "random", model: str =
|
|||||||
"role": "user",
|
"role": "user",
|
||||||
"content": user_content,
|
"content": user_content,
|
||||||
},
|
},
|
||||||
],
|
]
|
||||||
api_key=user_config["openwebui"]["api_key"],
|
|
||||||
|
# Send the chat request
|
||||||
|
result = client.chat(
|
||||||
|
question=user_content,
|
||||||
|
chat_title=datetime.now().strftime("%Y-%m-%d %H:%M"),
|
||||||
|
folder_name="Ai Image Requests"
|
||||||
)
|
)
|
||||||
|
|
||||||
prompt = response["choices"][0]["message"]["content"].strip('"')
|
if result:
|
||||||
# response = litellm.completion(
|
prompt = result["response"].strip('"')
|
||||||
# api_base=user_config["openwebui"]["base_url"],
|
else:
|
||||||
# model="openai/brxce/stable-diffusion-prompt-generator:latest",
|
# Fallback if the request fails
|
||||||
# messages=[
|
prompt = "A vibrant landscape"
|
||||||
# {
|
|
||||||
# "role": "user",
|
|
||||||
# "content": prompt,
|
|
||||||
# },
|
|
||||||
# ],
|
|
||||||
# api_key=user_config["openwebui"]["api_key"],
|
|
||||||
# )
|
|
||||||
# prompt = response["choices"][0]["message"]["content"].strip('"')
|
|
||||||
match = re.search(r'"([^"]+)"', prompt)
|
match = re.search(r'"([^"]+)"', prompt)
|
||||||
if not match:
|
if not match:
|
||||||
match = re.search(r":\s*\n*\s*(.+)", prompt)
|
match = re.search(r":\s*\n*\s*(.+)", prompt)
|
BIN
requirements.txt
BIN
requirements.txt
Binary file not shown.
@ -1,7 +1,7 @@
|
|||||||
from flask import Blueprint, request, render_template, redirect, url_for, session
|
from flask import Blueprint, request, render_template, redirect, url_for, session
|
||||||
import threading
|
import threading
|
||||||
from libs.comfyui import create_image, select_model, get_available_models
|
from libs.comfyui import create_image, select_model, get_available_models
|
||||||
from libs.ollama import create_prompt_on_openwebui
|
from libs.openwebui import create_prompt_on_openwebui
|
||||||
from libs.generic import load_models_from_config, load_topics_from_config, load_openrouter_models_from_config, load_openwebui_models_from_config, create_prompt_with_random_model
|
from libs.generic import load_models_from_config, load_topics_from_config, load_openrouter_models_from_config, load_openwebui_models_from_config, create_prompt_with_random_model
|
||||||
import os
|
import os
|
||||||
|
|
||||||
@ -23,7 +23,7 @@ def create():
|
|||||||
# Use the specified prompt model
|
# Use the specified prompt model
|
||||||
service, service_model = prompt_model.split(":", 1) if ":" in prompt_model else (prompt_model, "")
|
service, service_model = prompt_model.split(":", 1) if ":" in prompt_model else (prompt_model, "")
|
||||||
if service == "openwebui":
|
if service == "openwebui":
|
||||||
from libs.ollama import create_prompt_on_openwebui
|
from libs.openwebui import create_prompt_on_openwebui
|
||||||
prompt = create_prompt_on_openwebui(user_config["comfyui"]["prompt"], topic, service_model)
|
prompt = create_prompt_on_openwebui(user_config["comfyui"]["prompt"], topic, service_model)
|
||||||
elif service == "openrouter":
|
elif service == "openrouter":
|
||||||
from libs.openrouter import create_prompt_on_openrouter
|
from libs.openrouter import create_prompt_on_openrouter
|
||||||
|
Loading…
x
Reference in New Issue
Block a user