mirror of
https://github.com/karl0ss/ai_image_frame_server.git
synced 2025-08-10 03:38:28 +01:00
Compare commits
2 Commits
Author | SHA1 | Date | |
---|---|---|---|
1468ac4bbe | |||
2e13ecfa2f |
@ -1,5 +1,5 @@
|
|||||||
[tool.bumpversion]
|
[tool.bumpversion]
|
||||||
current_version = "0.3.3"
|
current_version = "0.3.4"
|
||||||
parse = "(?P<major>\\d+)\\.(?P<minor>\\d+)\\.(?P<patch>\\d+)"
|
parse = "(?P<major>\\d+)\\.(?P<minor>\\d+)\\.(?P<patch>\\d+)"
|
||||||
serialize = ["{major}.{minor}.{patch}"]
|
serialize = ["{major}.{minor}.{patch}"]
|
||||||
replace = "{new_version}"
|
replace = "{new_version}"
|
||||||
|
@ -4,7 +4,7 @@ FROM python:3.11-slim
|
|||||||
# Set the working directory in the container
|
# Set the working directory in the container
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
# Set version label
|
# Set version label
|
||||||
ARG VERSION="0.3.3"
|
ARG VERSION="0.3.4"
|
||||||
LABEL version=$VERSION
|
LABEL version=$VERSION
|
||||||
|
|
||||||
# Copy project files into the container
|
# Copy project files into the container
|
||||||
|
@ -158,7 +158,10 @@ def load_prompt_models_from_config():
|
|||||||
|
|
||||||
|
|
||||||
def create_prompt_with_random_model(base_prompt: str, topic: str = "random"):
|
def create_prompt_with_random_model(base_prompt: str, topic: str = "random"):
|
||||||
"""Create a prompt using a randomly selected model from OpenWebUI or OpenRouter."""
|
"""Create a prompt using a randomly selected model from OpenWebUI or OpenRouter.
|
||||||
|
|
||||||
|
If OpenWebUI fails, it will retry once. If it fails again, it will fallback to OpenRouter.
|
||||||
|
"""
|
||||||
prompt_models = load_prompt_models_from_config()
|
prompt_models = load_prompt_models_from_config()
|
||||||
|
|
||||||
if not prompt_models:
|
if not prompt_models:
|
||||||
@ -168,16 +171,59 @@ def create_prompt_with_random_model(base_prompt: str, topic: str = "random"):
|
|||||||
# Randomly select a model
|
# Randomly select a model
|
||||||
service, model = random.choice(prompt_models)
|
service, model = random.choice(prompt_models)
|
||||||
|
|
||||||
if service == "openwebui":
|
# Import here to avoid circular imports
|
||||||
# Import here to avoid circular imports
|
from libs.openwebui import create_prompt_on_openwebui
|
||||||
from libs.openwebui import create_prompt_on_openwebui
|
from libs.openrouter import create_prompt_on_openrouter
|
||||||
return create_prompt_on_openwebui(base_prompt, topic)
|
|
||||||
elif service == "openrouter":
|
|
||||||
# Import here to avoid circular imports
|
|
||||||
from libs.openrouter import create_prompt_on_openrouter
|
|
||||||
return create_prompt_on_openrouter(base_prompt, topic)
|
|
||||||
|
|
||||||
return None
|
if service == "openwebui":
|
||||||
|
try:
|
||||||
|
# First attempt with OpenWebUI
|
||||||
|
logging.info(f"Attempting to generate prompt with OpenWebUI using model: {model}")
|
||||||
|
result = create_prompt_on_openwebui(base_prompt, topic, model)
|
||||||
|
if result:
|
||||||
|
return result
|
||||||
|
|
||||||
|
# If first attempt returns None, try again
|
||||||
|
logging.warning("First OpenWebUI attempt failed. Retrying...")
|
||||||
|
result = create_prompt_on_openwebui(base_prompt, topic, model)
|
||||||
|
if result:
|
||||||
|
return result
|
||||||
|
|
||||||
|
# If second attempt fails, fallback to OpenRouter
|
||||||
|
logging.warning("Second OpenWebUI attempt failed. Falling back to OpenRouter...")
|
||||||
|
openrouter_models = [m for m in prompt_models if m[0] == "openrouter"]
|
||||||
|
if openrouter_models:
|
||||||
|
_, openrouter_model = random.choice(openrouter_models)
|
||||||
|
return create_prompt_on_openrouter(base_prompt, topic, openrouter_model)
|
||||||
|
else:
|
||||||
|
logging.error("No OpenRouter models configured for fallback.")
|
||||||
|
return "A colorful abstract composition" # Default fallback prompt
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"Error with OpenWebUI: {e}")
|
||||||
|
# Fallback to OpenRouter on exception
|
||||||
|
logging.warning("OpenWebUI exception. Falling back to OpenRouter...")
|
||||||
|
openrouter_models = [m for m in prompt_models if m[0] == "openrouter"]
|
||||||
|
if openrouter_models:
|
||||||
|
_, openrouter_model = random.choice(openrouter_models)
|
||||||
|
try:
|
||||||
|
return create_prompt_on_openrouter(base_prompt, topic, openrouter_model)
|
||||||
|
except Exception as e2:
|
||||||
|
logging.error(f"Error with OpenRouter fallback: {e2}")
|
||||||
|
return "A colorful abstract composition" # Default fallback prompt
|
||||||
|
else:
|
||||||
|
logging.error("No OpenRouter models configured for fallback.")
|
||||||
|
return "A colorful abstract composition" # Default fallback prompt
|
||||||
|
|
||||||
|
elif service == "openrouter":
|
||||||
|
try:
|
||||||
|
# Use OpenRouter
|
||||||
|
return create_prompt_on_openrouter(base_prompt, topic, model)
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"Error with OpenRouter: {e}")
|
||||||
|
return "A colorful abstract composition" # Default fallback prompt
|
||||||
|
|
||||||
|
return "A colorful abstract composition" # Default fallback prompt
|
||||||
|
|
||||||
user_config = load_config()
|
user_config = load_config()
|
||||||
output_folder = user_config["comfyui"]["output_dir"]
|
output_folder = user_config["comfyui"]["output_dir"]
|
@ -73,17 +73,22 @@ def create_prompt_on_openwebui(prompt: str, topic: str = "random", model: str =
|
|||||||
]
|
]
|
||||||
|
|
||||||
# Send the chat request
|
# Send the chat request
|
||||||
result = client.chat(
|
try:
|
||||||
question=user_content,
|
result = client.chat(
|
||||||
chat_title=datetime.now().strftime("%Y-%m-%d %H:%M"),
|
question=user_content,
|
||||||
folder_name="ai-frame-image-server"
|
chat_title=datetime.now().strftime("%Y-%m-%d %H:%M"),
|
||||||
)
|
folder_name="ai-frame-image-server"
|
||||||
|
)
|
||||||
|
|
||||||
if result:
|
if result:
|
||||||
prompt = result["response"].strip('"')
|
prompt = result["response"].strip('"')
|
||||||
else:
|
else:
|
||||||
# Fallback if the request fails
|
# Return None if the request fails
|
||||||
prompt = "A vibrant landscape"
|
logging.warning(f"OpenWebUI request failed with model: {model}")
|
||||||
|
return None
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"Error in OpenWebUI request with model {model}: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
match = re.search(r'"([^"]+)"', prompt)
|
match = re.search(r'"([^"]+)"', prompt)
|
||||||
if not match:
|
if not match:
|
||||||
|
Loading…
x
Reference in New Issue
Block a user