mirror of
https://github.com/karl0ss/ai_image_frame_server.git
synced 2025-09-07 15:13:15 +01:00
Compare commits
2 Commits
06d3a64bb9
...
086695d898
Author | SHA1 | Date | |
---|---|---|---|
086695d898 | |||
a63668cc93 |
@ -1,5 +1,5 @@
|
|||||||
[tool.bumpversion]
|
[tool.bumpversion]
|
||||||
current_version = "0.3.11"
|
current_version = "0.3.12"
|
||||||
parse = "(?P<major>\\d+)\\.(?P<minor>\\d+)\\.(?P<patch>\\d+)"
|
parse = "(?P<major>\\d+)\\.(?P<minor>\\d+)\\.(?P<patch>\\d+)"
|
||||||
serialize = ["{major}.{minor}.{patch}"]
|
serialize = ["{major}.{minor}.{patch}"]
|
||||||
replace = "{new_version}"
|
replace = "{new_version}"
|
||||||
|
@ -4,7 +4,7 @@ FROM python:3.11-slim
|
|||||||
# Set the working directory in the container
|
# Set the working directory in the container
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
# Set version label
|
# Set version label
|
||||||
ARG VERSION="0.3.11"
|
ARG VERSION="0.3.12"
|
||||||
LABEL version=$VERSION
|
LABEL version=$VERSION
|
||||||
|
|
||||||
# Copy project files into the container
|
# Copy project files into the container
|
||||||
|
@ -240,7 +240,6 @@ def create_prompt_with_random_model(base_prompt: str, topic: str = "random"):
|
|||||||
logging.error(f"Error with OpenRouter: {e}")
|
logging.error(f"Error with OpenRouter: {e}")
|
||||||
return "A colorful abstract composition" # Default fallback prompt
|
return "A colorful abstract composition" # Default fallback prompt
|
||||||
|
|
||||||
return "A colorful abstract composition" # Default fallback prompt
|
|
||||||
|
|
||||||
user_config = load_config()
|
user_config = load_config()
|
||||||
output_folder = user_config["comfyui"]["output_dir"]
|
output_folder = user_config["comfyui"]["output_dir"]
|
@ -1,8 +1,9 @@
|
|||||||
import random
|
import random
|
||||||
import logging
|
import logging
|
||||||
from openai import OpenAI
|
from openai import OpenAI, RateLimitError
|
||||||
import nest_asyncio
|
import nest_asyncio
|
||||||
from libs.generic import load_recent_prompts, load_config
|
from libs.generic import load_recent_prompts, load_config
|
||||||
|
from libs.openwebui import create_prompt_on_openwebui
|
||||||
import re
|
import re
|
||||||
nest_asyncio.apply()
|
nest_asyncio.apply()
|
||||||
|
|
||||||
@ -90,6 +91,20 @@ def create_prompt_on_openrouter(prompt: str, topic: str = "random", model: str =
|
|||||||
prompt = match.group(1)
|
prompt = match.group(1)
|
||||||
logging.debug(prompt)
|
logging.debug(prompt)
|
||||||
return prompt
|
return prompt
|
||||||
|
except RateLimitError as e:
|
||||||
|
logging.warning(f"OpenRouter rate limit exceeded (429): {e}. Falling back to local OpenWebUI model.")
|
||||||
|
# Try to use OpenWebUI as fallback
|
||||||
|
openwebui_models = [m.strip() for m in user_config["openwebui"]["models"].split(",") if m.strip()] if "openwebui" in user_config and "models" in user_config["openwebui"] else []
|
||||||
|
if openwebui_models:
|
||||||
|
selected_model = random.choice(openwebui_models)
|
||||||
|
try:
|
||||||
|
return create_prompt_on_openwebui(user_content, topic, selected_model)
|
||||||
|
except Exception as e2:
|
||||||
|
logging.error(f"OpenWebUI fallback also failed: {e2}")
|
||||||
|
return "A colorful abstract composition" # Final fallback
|
||||||
|
else:
|
||||||
|
logging.error("No OpenWebUI models configured for fallback.")
|
||||||
|
return "A colorful abstract composition" # Final fallback
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Error generating prompt with OpenRouter: {e}")
|
logging.error(f"Error generating prompt with OpenRouter: {e}")
|
||||||
return ""
|
return ""
|
Loading…
x
Reference in New Issue
Block a user