code updates for date, fix update flux guff, new prompt logic

This commit is contained in:
Karl 2025-06-06 11:15:43 +01:00
parent 2a9a226dd1
commit 4b52e5c713
5 changed files with 11 additions and 9 deletions

View File

@ -60,7 +60,8 @@ def image_details(filename):
details = get_details_from_png(path)
return {
"prompt": details["p"],
"model": details["m"]
"model": details["m"],
"date": details["d"]
}

View File

@ -155,8 +155,8 @@ def create_image(prompt: str | None = None, model: str = "Random") -> None:
seed_param="seed",
save_node="CivitAI Image Saver",
save_param="filename",
model_node="CivitAI Image Saver",
model_param="modelname",
model_node="Unet Loader (GGUF)",
model_param="unet_name",
model=model
)
else: # SDXL

View File

@ -66,6 +66,7 @@ def rename_image() -> str | None:
def get_details_from_png(path):
try:
date = datetime.fromtimestamp(os.path.getctime(path)).strftime("%d-%m-%Y")
with Image.open(path) as img:
try:
# Flux workflow
@ -77,7 +78,7 @@ def get_details_from_png(path):
data = json.loads(img.info["prompt"])
prompt = data['6']['inputs']['text']
model = data['4']['inputs']['ckpt_name']
return {"p":prompt,"m":model} or {"p":"","m":""}
return {"p":prompt,"m":model,"d":date} or {"p":"","m":"","c":""}
except Exception as e:
print(f"Error reading metadata from {path}: {e}")
return ""

View File

@ -24,13 +24,13 @@ def create_prompt_on_openwebui(prompt: str) -> str:
topic_instruction = f" Incorporate the theme of '{selected_topic}' into the new prompt."
user_content = (
"Here are the prompts from the last 7 days:\n\n"
+ "\n".join(f"{i+1}. {p}" for i, p in enumerate(recent_prompts))
+ "\n\nDo not repeat ideas, themes, or settings from the above. "
"Now generate a new, completely original Stable Diffusion prompt that hasn't been done yet."
"Can you generate me a really random image idea, Do not exceed 10 words. Use clear language, not poetic metaphors.”"
+ topic_instruction
+ "Avoid prompts similar to the following:"
+ "\n".join(f"{i+1}. {p}" for i, p in enumerate(recent_prompts))
)
model = random.choice(user_config["openwebui"]["models"].split(","))
response = litellm.completion(
api_base=user_config["openwebui"]["base_url"],

View File

@ -263,7 +263,7 @@
if (detailsCache[filename]) {
document.getElementById("lightbox-prompt").textContent =
`Model: ${detailsCache[filename].model}\n\n${detailsCache[filename].prompt}`;
`Created On: ${detailsCache[filename].date}\n\n Model: ${detailsCache[filename].model}\n\n${detailsCache[filename].prompt}`;
} else {
document.getElementById("lightbox-prompt").textContent = "Loading…";