From 097a68ad1862e26cf7f423d19569c8a126e5a64d Mon Sep 17 00:00:00 2001 From: pixelpaws Date: Mon, 22 Sep 2025 13:25:21 +0800 Subject: [PATCH] refactor(recipes): introduce dedicated services for handlers --- py/routes/base_recipe_routes.py | 45 + py/routes/handlers/recipe_handlers.py | 905 ++++----------------- py/services/recipes/__init__.py | 23 + py/services/recipes/analysis_service.py | 289 +++++++ py/services/recipes/errors.py | 22 + py/services/recipes/persistence_service.py | 467 +++++++++++ py/services/recipes/sharing_service.py | 113 +++ tests/services/test_recipe_services.py | 146 ++++ 8 files changed, 1274 insertions(+), 736 deletions(-) create mode 100644 py/services/recipes/__init__.py create mode 100644 py/services/recipes/analysis_service.py create mode 100644 py/services/recipes/errors.py create mode 100644 py/services/recipes/persistence_service.py create mode 100644 py/services/recipes/sharing_service.py create mode 100644 tests/services/test_recipe_services.py diff --git a/py/routes/base_recipe_routes.py b/py/routes/base_recipe_routes.py index 59d4e7ec..4447bb7b 100644 --- a/py/routes/base_recipe_routes.py +++ b/py/routes/base_recipe_routes.py @@ -2,15 +2,25 @@ from __future__ import annotations import logging +import os from typing import Callable, Mapping import jinja2 from aiohttp import web from ..config import config +from ..recipes import RecipeParserFactory +from ..services.downloader import get_downloader +from ..services.recipes import ( + RecipeAnalysisService, + RecipePersistenceService, + RecipeSharingService, +) from ..services.server_i18n import server_i18n from ..services.service_registry import ServiceRegistry from ..services.settings_manager import settings +from ..utils.constants import CARD_PREVIEW_WIDTH +from ..utils.exif_utils import ExifUtils from .handlers.recipe_handlers import ( RecipeAnalysisHandler, RecipeHandlerSet, @@ -124,6 +134,37 @@ class BaseRecipeRoutes: recipe_scanner_getter = lambda: self.recipe_scanner civitai_client_getter = lambda: self.civitai_client + standalone_mode = os.environ.get("HF_HUB_DISABLE_TELEMETRY", "0") == "0" + if not standalone_mode: + from ..metadata_collector import get_metadata # type: ignore[import-not-found] + from ..metadata_collector.metadata_processor import ( # type: ignore[import-not-found] + MetadataProcessor, + ) + from ..metadata_collector.metadata_registry import ( # type: ignore[import-not-found] + MetadataRegistry, + ) + else: # pragma: no cover - optional dependency path + get_metadata = None # type: ignore[assignment] + MetadataProcessor = None # type: ignore[assignment] + MetadataRegistry = None # type: ignore[assignment] + + analysis_service = RecipeAnalysisService( + exif_utils=ExifUtils, + recipe_parser_factory=RecipeParserFactory, + downloader_factory=get_downloader, + metadata_collector=get_metadata, + metadata_processor_cls=MetadataProcessor, + metadata_registry_cls=MetadataRegistry, + standalone_mode=standalone_mode, + logger=logger, + ) + persistence_service = RecipePersistenceService( + exif_utils=ExifUtils, + card_preview_width=CARD_PREVIEW_WIDTH, + logger=logger, + ) + sharing_service = RecipeSharingService(logger=logger) + page_view = RecipePageView( ensure_dependencies_ready=self.ensure_dependencies_ready, settings_service=self.settings, @@ -148,17 +189,21 @@ class BaseRecipeRoutes: ensure_dependencies_ready=self.ensure_dependencies_ready, recipe_scanner_getter=recipe_scanner_getter, logger=logger, + persistence_service=persistence_service, + analysis_service=analysis_service, ) analysis = RecipeAnalysisHandler( ensure_dependencies_ready=self.ensure_dependencies_ready, recipe_scanner_getter=recipe_scanner_getter, civitai_client_getter=civitai_client_getter, logger=logger, + analysis_service=analysis_service, ) sharing = RecipeSharingHandler( ensure_dependencies_ready=self.ensure_dependencies_ready, recipe_scanner_getter=recipe_scanner_getter, logger=logger, + sharing_service=sharing_service, ) return RecipeHandlerSet( diff --git a/py/routes/handlers/recipe_handlers.py b/py/routes/handlers/recipe_handlers.py index 8d8f96bf..35f4c088 100644 --- a/py/routes/handlers/recipe_handlers.py +++ b/py/routes/handlers/recipe_handlers.py @@ -1,40 +1,25 @@ """Dedicated handler objects for recipe-related routes.""" from __future__ import annotations -import asyncio -import base64 -import io import json import logging import os -import tempfile -import time from dataclasses import dataclass from typing import Any, Awaitable, Callable, Dict, Mapping, Optional -import numpy as np from aiohttp import web -from PIL import Image from ...config import config -from ...recipes import RecipeParserFactory -from ...services.downloader import get_downloader from ...services.server_i18n import server_i18n as default_server_i18n from ...services.settings_manager import SettingsManager -from ...utils.constants import CARD_PREVIEW_WIDTH -from ...utils.exif_utils import ExifUtils - -# Check if running in standalone mode -standalone_mode = os.environ.get("HF_HUB_DISABLE_TELEMETRY", "0") == "0" - -if not standalone_mode: - from ...metadata_collector import get_metadata - from ...metadata_collector.metadata_processor import MetadataProcessor - from ...metadata_collector.metadata_registry import MetadataRegistry -else: # pragma: no cover - optional dependency path - get_metadata = None # type: ignore[assignment] - MetadataProcessor = None # type: ignore[assignment] - MetadataRegistry = None # type: ignore[assignment] +from ...services.recipes import ( + RecipeAnalysisService, + RecipeDownloadError, + RecipeNotFoundError, + RecipePersistenceService, + RecipeSharingService, + RecipeValidationError, +) Logger = logging.Logger EnsureDependenciesCallable = Callable[[], Awaitable[None]] @@ -457,22 +442,14 @@ class RecipeManagementHandler: ensure_dependencies_ready: EnsureDependenciesCallable, recipe_scanner_getter: RecipeScannerGetter, logger: Logger, - exif_utils=ExifUtils, - card_preview_width: int = CARD_PREVIEW_WIDTH, - metadata_collector: Optional[Callable[[], Any]] = get_metadata, - metadata_processor_cls: Optional[type] = MetadataProcessor, - metadata_registry_cls: Optional[type] = MetadataRegistry, - standalone_mode: bool = standalone_mode, + persistence_service: RecipePersistenceService, + analysis_service: RecipeAnalysisService, ) -> None: self._ensure_dependencies_ready = ensure_dependencies_ready self._recipe_scanner_getter = recipe_scanner_getter self._logger = logger - self._exif_utils = exif_utils - self._card_preview_width = card_preview_width - self._metadata_collector = metadata_collector - self._metadata_processor_cls = metadata_processor_cls - self._metadata_registry_cls = metadata_registry_cls - self._standalone_mode = standalone_mode + self._persistence_service = persistence_service + self._analysis_service = analysis_service async def save_recipe(self, request: web.Request) -> web.Response: try: @@ -482,171 +459,19 @@ class RecipeManagementHandler: raise RuntimeError("Recipe scanner unavailable") reader = await request.multipart() + payload = await self._parse_save_payload(reader) - image: Optional[bytes] = None - image_base64: Optional[str] = None - name: Optional[str] = None - tags: list[str] = [] - metadata: Dict[str, Any] | None = None - - while True: - field = await reader.next() - if field is None: - break - - if field.name == "image": - image_chunks = bytearray() - while True: - chunk = await field.read_chunk() - if not chunk: - break - image_chunks.extend(chunk) - image = bytes(image_chunks) - elif field.name == "image_base64": - image_base64 = await field.text() - elif field.name == "name": - name = await field.text() - elif field.name == "tags": - tags_text = await field.text() - try: - parsed_tags = json.loads(tags_text) - tags = parsed_tags if isinstance(parsed_tags, list) else [] - except Exception: - tags = [] - elif field.name == "metadata": - metadata_text = await field.text() - try: - metadata = json.loads(metadata_text) - except Exception: - metadata = {} - - missing_fields = [] - if not name: - missing_fields.append("name") - if not metadata: - missing_fields.append("metadata") - if missing_fields: - return web.json_response( - {"error": f"Missing required fields: {', '.join(missing_fields)}"}, - status=400, - ) - - if image is None: - if image_base64: - try: - if "," in image_base64: - image_base64 = image_base64.split(",", 1)[1] - image = base64.b64decode(image_base64) - except Exception as exc: - return web.json_response({"error": f"Invalid base64 image data: {exc}"}, status=400) - else: - return web.json_response({"error": "No image data provided"}, status=400) - - recipes_dir = recipe_scanner.recipes_dir - os.makedirs(recipes_dir, exist_ok=True) - - import uuid - - recipe_id = str(uuid.uuid4()) - optimized_image, extension = self._exif_utils.optimize_image( - image_data=image, - target_width=self._card_preview_width, - format="webp", - quality=85, - preserve_metadata=True, - ) - - image_filename = f"{recipe_id}{extension}" - image_path = os.path.join(recipes_dir, image_filename) - with open(image_path, "wb") as file_obj: - file_obj.write(optimized_image) - - current_time = time.time() - loras_data = [] - for lora in metadata.get("loras", []): - loras_data.append( - { - "file_name": lora.get("file_name", "") - or ( - os.path.splitext(os.path.basename(lora.get("localPath", "")))[0] - if lora.get("localPath") - else "" - ), - "hash": (lora.get("hash") or "").lower(), - "strength": float(lora.get("weight", 1.0)), - "modelVersionId": lora.get("id", 0), - "modelName": lora.get("name", ""), - "modelVersionName": lora.get("version", ""), - "isDeleted": lora.get("isDeleted", False), - "exclude": lora.get("exclude", False), - } - ) - - gen_params = metadata.get("gen_params", {}) - if not gen_params and "raw_metadata" in metadata: - raw_metadata = metadata.get("raw_metadata", {}) - gen_params = { - "prompt": raw_metadata.get("prompt", ""), - "negative_prompt": raw_metadata.get("negative_prompt", ""), - "checkpoint": raw_metadata.get("checkpoint", {}), - "steps": raw_metadata.get("steps", ""), - "sampler": raw_metadata.get("sampler", ""), - "cfg_scale": raw_metadata.get("cfg_scale", ""), - "seed": raw_metadata.get("seed", ""), - "size": raw_metadata.get("size", ""), - "clip_skip": raw_metadata.get("clip_skip", ""), - } - - from ...utils.utils import calculate_recipe_fingerprint - - fingerprint = calculate_recipe_fingerprint(loras_data) - - recipe_data = { - "id": recipe_id, - "file_path": image_path, - "title": name, - "modified": current_time, - "created_date": current_time, - "base_model": metadata.get("base_model", ""), - "loras": loras_data, - "gen_params": gen_params, - "fingerprint": fingerprint, - } - - if tags: - recipe_data["tags"] = tags - - if metadata.get("source_path"): - recipe_data["source_path"] = metadata.get("source_path") - - json_filename = f"{recipe_id}.recipe.json" - json_path = os.path.join(recipes_dir, json_filename) - with open(json_path, "w", encoding="utf-8") as file_obj: - json.dump(recipe_data, file_obj, indent=4, ensure_ascii=False) - - self._exif_utils.append_recipe_metadata(image_path, recipe_data) - - matching_recipes = [] - if fingerprint: - matching_recipes = await recipe_scanner.find_recipes_by_fingerprint(fingerprint) - if recipe_id in matching_recipes: - matching_recipes.remove(recipe_id) - - cache = getattr(recipe_scanner, "_cache", None) - if cache is not None: - cache.raw_data.append(recipe_data) - asyncio.create_task(cache.resort()) - self._logger.info("Added recipe %s to cache", recipe_id) - - return web.json_response( - { - "success": True, - "recipe_id": recipe_id, - "image_path": image_path, - "json_path": json_path, - "matching_recipes": matching_recipes, - } + result = await self._persistence_service.save_recipe( + recipe_scanner=recipe_scanner, + image_bytes=payload["image_bytes"], + image_base64=payload["image_base64"], + name=payload["name"], + tags=payload["tags"], + metadata=payload["metadata"], ) + return web.json_response(result.payload, status=result.status) + except RecipeValidationError as exc: + return web.json_response({"error": str(exc)}, status=400) except Exception as exc: self._logger.error("Error saving recipe: %s", exc, exc_info=True) return web.json_response({"error": str(exc)}, status=500) @@ -659,34 +484,12 @@ class RecipeManagementHandler: raise RuntimeError("Recipe scanner unavailable") recipe_id = request.match_info["recipe_id"] - recipes_dir = recipe_scanner.recipes_dir - if not recipes_dir or not os.path.exists(recipes_dir): - return web.json_response({"error": "Recipes directory not found"}, status=404) - - recipe_json_path = os.path.join(recipes_dir, f"{recipe_id}.recipe.json") - if not os.path.exists(recipe_json_path): - return web.json_response({"error": "Recipe not found"}, status=404) - - with open(recipe_json_path, "r", encoding="utf-8") as file_obj: - recipe_data = json.load(file_obj) - - image_path = recipe_data.get("file_path") - os.remove(recipe_json_path) - self._logger.info("Deleted recipe JSON file: %s", recipe_json_path) - - if image_path and os.path.exists(image_path): - os.remove(image_path) - self._logger.info("Deleted recipe image: %s", image_path) - - cache = getattr(recipe_scanner, "_cache", None) - if cache is not None: - cache.raw_data = [ - item for item in cache.raw_data if str(item.get("id", "")) != recipe_id - ] - asyncio.create_task(cache.resort()) - self._logger.info("Removed recipe %s from cache", recipe_id) - - return web.json_response({"success": True, "message": "Recipe deleted successfully"}) + result = await self._persistence_service.delete_recipe( + recipe_scanner=recipe_scanner, recipe_id=recipe_id + ) + return web.json_response(result.payload, status=result.status) + except RecipeNotFoundError as exc: + return web.json_response({"error": str(exc)}, status=404) except Exception as exc: self._logger.error("Error deleting recipe: %s", exc, exc_info=True) return web.json_response({"error": str(exc)}, status=500) @@ -700,25 +503,14 @@ class RecipeManagementHandler: recipe_id = request.match_info["recipe_id"] data = await request.json() - - if not any( - key in data for key in ("title", "tags", "source_path", "preview_nsfw_level") - ): - return web.json_response( - { - "error": ( - "At least one field to update must be provided (title or tags or " - "source_path or preview_nsfw_level)" - ) - }, - status=400, - ) - - success = await recipe_scanner.update_recipe_metadata(recipe_id, data) - if not success: - return web.json_response({"error": "Recipe not found or update failed"}, status=404) - - return web.json_response({"success": True, "recipe_id": recipe_id, "updates": data}) + result = await self._persistence_service.update_recipe( + recipe_scanner=recipe_scanner, recipe_id=recipe_id, updates=data + ) + return web.json_response(result.payload, status=result.status) + except RecipeValidationError as exc: + return web.json_response({"error": str(exc)}, status=400) + except RecipeNotFoundError as exc: + return web.json_response({"error": str(exc)}, status=404) except Exception as exc: self._logger.error("Error updating recipe: %s", exc, exc_info=True) return web.json_response({"error": str(exc)}, status=500) @@ -731,81 +523,21 @@ class RecipeManagementHandler: raise RuntimeError("Recipe scanner unavailable") data = await request.json() - required_fields = ["recipe_id", "lora_index", "target_name"] - for field in required_fields: + for field in ("recipe_id", "lora_index", "target_name"): if field not in data: - return web.json_response({"error": f"Missing required field: {field}"}, status=400) + raise RecipeValidationError(f"Missing required field: {field}") - recipe_id = data["recipe_id"] - lora_index = int(data["lora_index"]) - target_name = data["target_name"] - - recipe_path = os.path.join(recipe_scanner.recipes_dir, f"{recipe_id}.recipe.json") - if not os.path.exists(recipe_path): - return web.json_response({"error": "Recipe not found"}, status=404) - - lora_scanner = getattr(recipe_scanner, "_lora_scanner", None) - target_lora = None if lora_scanner is None else await lora_scanner.get_model_info_by_name(target_name) - if not target_lora: - return web.json_response({"error": f"Local LoRA not found with name: {target_name}"}, status=404) - - with open(recipe_path, "r", encoding="utf-8") as file_obj: - recipe_data = json.load(file_obj) - - loras = recipe_data.get("loras", []) - lora = loras[lora_index] if lora_index < len(loras) else None - if lora is None: - return web.json_response({"error": "LoRA index out of range in recipe"}, status=404) - - lora["isDeleted"] = False - lora["exclude"] = False - lora["file_name"] = target_name - if "sha256" in target_lora: - lora["hash"] = target_lora["sha256"].lower() - if target_lora.get("civitai"): - lora["modelName"] = target_lora["civitai"]["model"]["name"] - lora["modelVersionName"] = target_lora["civitai"]["name"] - lora["modelVersionId"] = target_lora["civitai"]["id"] - - from ...utils.utils import calculate_recipe_fingerprint - - recipe_data["fingerprint"] = calculate_recipe_fingerprint(recipe_data.get("loras", [])) - - with open(recipe_path, "w", encoding="utf-8") as file_obj: - json.dump(recipe_data, file_obj, indent=4, ensure_ascii=False) - - updated_lora = dict(lora) - updated_lora["inLibrary"] = True - updated_lora["preview_url"] = config.get_preview_static_url(target_lora["preview_url"]) - updated_lora["localPath"] = target_lora["file_path"] - - cache = getattr(recipe_scanner, "_cache", None) - if cache is not None: - for cache_item in cache.raw_data: - if cache_item.get("id") == recipe_id: - cache_item["loras"] = recipe_data["loras"] - cache_item["fingerprint"] = recipe_data["fingerprint"] - asyncio.create_task(cache.resort()) - break - - image_path = recipe_data.get("file_path") - if image_path and os.path.exists(image_path): - self._exif_utils.append_recipe_metadata(image_path, recipe_data) - - matching_recipes = [] - if "fingerprint" in recipe_data: - matching_recipes = await recipe_scanner.find_recipes_by_fingerprint(recipe_data["fingerprint"]) - if recipe_id in matching_recipes: - matching_recipes.remove(recipe_id) - - return web.json_response( - { - "success": True, - "recipe_id": recipe_id, - "updated_lora": updated_lora, - "matching_recipes": matching_recipes, - } + result = await self._persistence_service.reconnect_lora( + recipe_scanner=recipe_scanner, + recipe_id=data["recipe_id"], + lora_index=int(data["lora_index"]), + target_name=data["target_name"], ) + return web.json_response(result.payload, status=result.status) + except RecipeValidationError as exc: + return web.json_response({"error": str(exc)}, status=400) + except RecipeNotFoundError as exc: + return web.json_response({"error": str(exc)}, status=404) except Exception as exc: self._logger.error("Error reconnecting LoRA: %s", exc, exc_info=True) return web.json_response({"error": str(exc)}, status=500) @@ -819,54 +551,14 @@ class RecipeManagementHandler: data = await request.json() recipe_ids = data.get("recipe_ids", []) - if not recipe_ids: - return web.json_response( - {"success": False, "error": "No recipe IDs provided"}, - status=400, - ) - - recipes_dir = recipe_scanner.recipes_dir - if not recipes_dir or not os.path.exists(recipes_dir): - return web.json_response( - {"success": False, "error": "Recipes directory not found"}, - status=404, - ) - - deleted_recipes: list[str] = [] - failed_recipes: list[Dict[str, Any]] = [] - - for recipe_id in recipe_ids: - recipe_json_path = os.path.join(recipes_dir, f"{recipe_id}.recipe.json") - if not os.path.exists(recipe_json_path): - failed_recipes.append({"id": recipe_id, "reason": "Recipe not found"}) - continue - - try: - with open(recipe_json_path, "r", encoding="utf-8") as file_obj: - recipe_data = json.load(file_obj) - image_path = recipe_data.get("file_path") - os.remove(recipe_json_path) - if image_path and os.path.exists(image_path): - os.remove(image_path) - deleted_recipes.append(recipe_id) - except Exception as exc: - failed_recipes.append({"id": recipe_id, "reason": str(exc)}) - - cache = getattr(recipe_scanner, "_cache", None) - if deleted_recipes and cache is not None: - cache.raw_data = [item for item in cache.raw_data if item.get("id") not in deleted_recipes] - asyncio.create_task(cache.resort()) - self._logger.info("Removed %s recipes from cache", len(deleted_recipes)) - - return web.json_response( - { - "success": True, - "deleted": deleted_recipes, - "failed": failed_recipes, - "total_deleted": len(deleted_recipes), - "total_failed": len(failed_recipes), - } + result = await self._persistence_service.bulk_delete( + recipe_scanner=recipe_scanner, recipe_ids=recipe_ids ) + return web.json_response(result.payload, status=result.status) + except RecipeValidationError as exc: + return web.json_response({"success": False, "error": str(exc)}, status=400) + except RecipeNotFoundError as exc: + return web.json_response({"success": False, "error": str(exc)}, status=404) except Exception as exc: self._logger.error("Error performing bulk delete: %s", exc, exc_info=True) return web.json_response({"success": False, "error": str(exc)}, status=500) @@ -878,164 +570,71 @@ class RecipeManagementHandler: if recipe_scanner is None: raise RuntimeError("Recipe scanner unavailable") - if self._metadata_collector is None or self._metadata_processor_cls is None: - return web.json_response({"error": "Metadata collection not available"}, status=400) - - raw_metadata = self._metadata_collector() - metadata_dict = self._metadata_processor_cls.to_dict(raw_metadata) - if not metadata_dict: - return web.json_response({"error": "No generation metadata found"}, status=400) - - if not self._standalone_mode and self._metadata_registry_cls is not None: - metadata_registry = self._metadata_registry_cls() - latest_image = metadata_registry.get_first_decoded_image() - else: - latest_image = None - - if latest_image is None: - return web.json_response( - {"error": "No recent images found to use for recipe. Try generating an image first."}, - status=400, - ) - - self._logger.debug("Image type: %s", type(latest_image)) - - try: - if isinstance(latest_image, tuple): - tensor_image = latest_image[0] if latest_image else None - if tensor_image is None: - return web.json_response({"error": "Empty image tuple received"}, status=400) - else: - tensor_image = latest_image - - if hasattr(tensor_image, "shape"): - shape_info = tensor_image.shape - self._logger.debug("Tensor shape: %s, dtype: %s", shape_info, tensor_image.dtype) - - import torch # type: ignore[import-not-found] - - if isinstance(tensor_image, torch.Tensor): - image_np = tensor_image.cpu().numpy() - else: - image_np = np.array(tensor_image) - - while len(image_np.shape) > 3: - image_np = image_np[0] - - if image_np.dtype in (np.float32, np.float64) and image_np.max() <= 1.0: - image_np = (image_np * 255).astype(np.uint8) - - if len(image_np.shape) == 3 and image_np.shape[2] == 3: - pil_image = Image.fromarray(image_np) - img_byte_arr = io.BytesIO() - pil_image.save(img_byte_arr, format="PNG") - image_bytes = img_byte_arr.getvalue() - else: - return web.json_response( - {"error": f"Cannot handle this data shape: {image_np.shape}, {image_np.dtype}"}, - status=400, - ) - except Exception as exc: - self._logger.error("Error processing image data: %s", exc, exc_info=True) - return web.json_response({"error": f"Error processing image: {exc}"}, status=400) - - lora_stack = metadata_dict.get("loras", "") - import re - - lora_matches = re.findall(r"]+)>", lora_stack) - if not lora_matches: - return web.json_response({"error": "No LoRAs found in the generation metadata"}, status=400) - - loras_for_name = lora_matches[:3] - recipe_name_parts = [] - for name, strength in loras_for_name: - recipe_name_parts.append(f"{name.strip()}-{float(strength):.2f}") - recipe_name = "_".join(recipe_name_parts) - - recipe_name = recipe_name or "recipe" - - recipes_dir = recipe_scanner.recipes_dir - os.makedirs(recipes_dir, exist_ok=True) - - import uuid - - recipe_id = str(uuid.uuid4()) - image_filename = f"{recipe_id}.png" - image_path = os.path.join(recipes_dir, image_filename) - with open(image_path, "wb") as file_obj: - file_obj.write(image_bytes) - - loras_data = [] - lora_scanner = getattr(recipe_scanner, "_lora_scanner", None) - base_model_counts: Dict[str, int] = {} - - for name, strength in lora_matches: - lora_info = None - if lora_scanner is not None: - lora_info = await lora_scanner.get_model_info_by_name(name) - lora_data = { - "file_name": name, - "strength": float(strength), - "hash": (lora_info.get("sha256") or "").lower() if lora_info else "", - "modelVersionId": lora_info.get("civitai", {}).get("id") if lora_info else 0, - "modelName": lora_info.get("civitai", {}).get("model", {}).get("name") if lora_info else "", - "modelVersionName": lora_info.get("civitai", {}).get("name") if lora_info else "", - "isDeleted": False, - "exclude": False, - } - loras_data.append(lora_data) - - if lora_info and "base_model" in lora_info: - base_model = lora_info["base_model"] - base_model_counts[base_model] = base_model_counts.get(base_model, 0) + 1 - - most_common_base_model = "" - if base_model_counts: - most_common_base_model = max(base_model_counts.items(), key=lambda item: item[1])[0] - - recipe_data = { - "id": recipe_id, - "file_path": image_path, - "title": recipe_name, - "modified": time.time(), - "created_date": time.time(), - "base_model": most_common_base_model, - "loras": loras_data, - "checkpoint": metadata_dict.get("checkpoint", ""), - "gen_params": { - key: value - for key, value in metadata_dict.items() - if key not in ["checkpoint", "loras"] - }, - "loras_stack": lora_stack, - } - - json_filename = f"{recipe_id}.recipe.json" - json_path = os.path.join(recipes_dir, json_filename) - with open(json_path, "w", encoding="utf-8") as file_obj: - json.dump(recipe_data, file_obj, indent=4, ensure_ascii=False) - - self._exif_utils.append_recipe_metadata(image_path, recipe_data) - - cache = getattr(recipe_scanner, "_cache", None) - if cache is not None: - cache.raw_data.append(recipe_data) - asyncio.create_task(cache.resort()) - self._logger.info("Added recipe %s to cache", recipe_id) - - return web.json_response( - { - "success": True, - "recipe_id": recipe_id, - "image_path": image_path, - "json_path": json_path, - "recipe_name": recipe_name, - } + analysis = await self._analysis_service.analyze_widget_metadata( + recipe_scanner=recipe_scanner ) + metadata = analysis.payload.get("metadata") + image_bytes = analysis.payload.get("image_bytes") + if not metadata or image_bytes is None: + raise RecipeValidationError("Unable to extract metadata from widget") + + result = await self._persistence_service.save_recipe_from_widget( + recipe_scanner=recipe_scanner, + metadata=metadata, + image_bytes=image_bytes, + ) + return web.json_response(result.payload, status=result.status) + except RecipeValidationError as exc: + return web.json_response({"error": str(exc)}, status=400) except Exception as exc: self._logger.error("Error saving recipe from widget: %s", exc, exc_info=True) return web.json_response({"error": str(exc)}, status=500) + async def _parse_save_payload(self, reader) -> dict[str, Any]: + image_bytes: Optional[bytes] = None + image_base64: Optional[str] = None + name: Optional[str] = None + tags: list[str] = [] + metadata: Optional[Dict[str, Any]] = None + + while True: + field = await reader.next() + if field is None: + break + if field.name == "image": + image_chunks = bytearray() + while True: + chunk = await field.read_chunk() + if not chunk: + break + image_chunks.extend(chunk) + image_bytes = bytes(image_chunks) + elif field.name == "image_base64": + image_base64 = await field.text() + elif field.name == "name": + name = await field.text() + elif field.name == "tags": + tags_text = await field.text() + try: + parsed_tags = json.loads(tags_text) + tags = parsed_tags if isinstance(parsed_tags, list) else [] + except Exception: + tags = [] + elif field.name == "metadata": + metadata_text = await field.text() + try: + metadata = json.loads(metadata_text) + except Exception: + metadata = {} + + return { + "image_bytes": image_bytes, + "image_base64": image_base64, + "name": name, + "tags": tags, + "metadata": metadata, + } + class RecipeAnalysisHandler: """Analyze images to extract recipe metadata.""" @@ -1047,20 +646,15 @@ class RecipeAnalysisHandler: recipe_scanner_getter: RecipeScannerGetter, civitai_client_getter: CivitaiClientGetter, logger: Logger, - exif_utils=ExifUtils, - recipe_parser_factory=RecipeParserFactory, - downloader_factory=get_downloader, + analysis_service: RecipeAnalysisService, ) -> None: self._ensure_dependencies_ready = ensure_dependencies_ready self._recipe_scanner_getter = recipe_scanner_getter self._civitai_client_getter = civitai_client_getter self._logger = logger - self._exif_utils = exif_utils - self._recipe_parser_factory = recipe_parser_factory - self._downloader_factory = downloader_factory + self._analysis_service = analysis_service async def analyze_uploaded_image(self, request: web.Request) -> web.Response: - temp_path: Optional[str] = None try: await self._ensure_dependencies_ready() recipe_scanner = self._recipe_scanner_getter() @@ -1069,112 +663,42 @@ class RecipeAnalysisHandler: raise RuntimeError("Required services unavailable") content_type = request.headers.get("Content-Type", "") - is_url_mode = False - metadata: Optional[Dict[str, Any]] = None - if "multipart/form-data" in content_type: reader = await request.multipart() field = await reader.next() if field is None or field.name != "image": - return web.json_response({"error": "No image field found", "loras": []}, status=400) + raise RecipeValidationError("No image field found") + image_chunks = bytearray() + while True: + chunk = await field.read_chunk() + if not chunk: + break + image_chunks.extend(chunk) + result = await self._analysis_service.analyze_uploaded_image( + image_bytes=bytes(image_chunks), + recipe_scanner=recipe_scanner, + ) + return web.json_response(result.payload, status=result.status) - with tempfile.NamedTemporaryFile(delete=False, suffix=".jpg") as temp_file: - while True: - chunk = await field.read_chunk() - if not chunk: - break - temp_file.write(chunk) - temp_path = temp_file.name - elif "application/json" in content_type: + if "application/json" in content_type: data = await request.json() - url = data.get("url") - is_url_mode = True - if not url: - return web.json_response({"error": "No URL provided", "loras": []}, status=400) + result = await self._analysis_service.analyze_remote_image( + url=data.get("url"), + recipe_scanner=recipe_scanner, + civitai_client=civitai_client, + ) + return web.json_response(result.payload, status=result.status) - import re - - civitai_image_match = re.match(r"https://civitai\.com/images/(\d+)", url) - if civitai_image_match: - image_id = civitai_image_match.group(1) - image_info = await civitai_client.get_image_info(image_id) - if not image_info: - return web.json_response( - {"error": "Failed to fetch image information from Civitai", "loras": []}, - status=400, - ) - image_url = image_info.get("url") - if not image_url: - return web.json_response( - {"error": "No image URL found in Civitai response", "loras": []}, - status=400, - ) - - downloader = await self._downloader_factory() - with tempfile.NamedTemporaryFile(delete=False, suffix=".jpg") as temp_file: - temp_path = temp_file.name - - success, result = await downloader.download_file( - image_url, - temp_path, - use_auth=False, - ) - if not success: - return web.json_response( - {"error": f"Failed to download image from URL: {result}", "loras": []}, - status=400, - ) - metadata = image_info.get("meta") if "meta" in image_info else None - else: - return web.json_response({"error": "Unsupported content type", "loras": []}, status=400) - - if metadata is None and temp_path: - metadata = self._exif_utils.extract_image_metadata(temp_path) - - if not metadata: - response: Dict[str, Any] = {"error": "No metadata found in this image", "loras": []} - if is_url_mode and temp_path: - with open(temp_path, "rb") as image_file: - response["image_base64"] = base64.b64encode(image_file.read()).decode("utf-8") - return web.json_response(response, status=200) - - parser = self._recipe_parser_factory.create_parser(metadata) - if parser is None: - response = {"error": "No parser found for this image", "loras": []} - if is_url_mode and temp_path: - with open(temp_path, "rb") as image_file: - response["image_base64"] = base64.b64encode(image_file.read()).decode("utf-8") - return web.json_response(response, status=200) - - result = await parser.parse_metadata(metadata, recipe_scanner=recipe_scanner) - - if is_url_mode and temp_path: - with open(temp_path, "rb") as image_file: - result["image_base64"] = base64.b64encode(image_file.read()).decode("utf-8") - - if "error" in result and not result.get("loras"): - return web.json_response(result, status=200) - - from ...utils.utils import calculate_recipe_fingerprint - - fingerprint = calculate_recipe_fingerprint(result.get("loras", [])) - result["fingerprint"] = fingerprint - - matching_recipes = [] - if fingerprint: - matching_recipes = await recipe_scanner.find_recipes_by_fingerprint(fingerprint) - - result["matching_recipes"] = matching_recipes - return web.json_response(result) + raise RecipeValidationError("Unsupported content type") + except RecipeValidationError as exc: + return web.json_response({"error": str(exc), "loras": []}, status=400) + except RecipeDownloadError as exc: + return web.json_response({"error": str(exc), "loras": []}, status=400) + except RecipeNotFoundError as exc: + return web.json_response({"error": str(exc), "loras": []}, status=404) except Exception as exc: self._logger.error("Error analyzing recipe image: %s", exc, exc_info=True) return web.json_response({"error": str(exc), "loras": []}, status=500) - finally: - if temp_path and os.path.exists(temp_path): - try: - os.unlink(temp_path) - except Exception as cleanup_exc: # pragma: no cover - logging path - self._logger.error("Error deleting temporary file: %s", cleanup_exc) async def analyze_local_image(self, request: web.Request) -> web.Response: try: @@ -1184,50 +708,15 @@ class RecipeAnalysisHandler: raise RuntimeError("Recipe scanner unavailable") data = await request.json() - file_path = data.get("path") - if not file_path: - return web.json_response({"error": "No file path provided", "loras": []}, status=400) - - file_path = os.path.normpath(file_path.strip('"').strip("'")) - if not os.path.isfile(file_path): - return web.json_response({"error": "File not found", "loras": []}, status=404) - - metadata = self._exif_utils.extract_image_metadata(file_path) - if not metadata: - with open(file_path, "rb") as image_file: - image_base64 = base64.b64encode(image_file.read()).decode("utf-8") - return web.json_response( - {"error": "No metadata found in this image", "loras": [], "image_base64": image_base64}, - status=200, - ) - - parser = self._recipe_parser_factory.create_parser(metadata) - if parser is None: - with open(file_path, "rb") as image_file: - image_base64 = base64.b64encode(image_file.read()).decode("utf-8") - return web.json_response( - {"error": "No parser found for this image", "loras": [], "image_base64": image_base64}, - status=200, - ) - - result = await parser.parse_metadata(metadata, recipe_scanner=recipe_scanner) - with open(file_path, "rb") as image_file: - result["image_base64"] = base64.b64encode(image_file.read()).decode("utf-8") - - if "error" in result and not result.get("loras"): - return web.json_response(result, status=200) - - from ...utils.utils import calculate_recipe_fingerprint - - fingerprint = calculate_recipe_fingerprint(result.get("loras", [])) - result["fingerprint"] = fingerprint - - matching_recipes = [] - if fingerprint: - matching_recipes = await recipe_scanner.find_recipes_by_fingerprint(fingerprint) - result["matching_recipes"] = matching_recipes - - return web.json_response(result) + result = await self._analysis_service.analyze_local_image( + file_path=data.get("path"), + recipe_scanner=recipe_scanner, + ) + return web.json_response(result.payload, status=result.status) + except RecipeValidationError as exc: + return web.json_response({"error": str(exc), "loras": []}, status=400) + except RecipeNotFoundError as exc: + return web.json_response({"error": str(exc), "loras": []}, status=404) except Exception as exc: self._logger.error("Error analyzing local image: %s", exc, exc_info=True) return web.json_response({"error": str(exc), "loras": []}, status=500) @@ -1242,11 +731,12 @@ class RecipeSharingHandler: ensure_dependencies_ready: EnsureDependenciesCallable, recipe_scanner_getter: RecipeScannerGetter, logger: Logger, + sharing_service: RecipeSharingService, ) -> None: self._ensure_dependencies_ready = ensure_dependencies_ready self._recipe_scanner_getter = recipe_scanner_getter self._logger = logger - self._shared_recipes: Dict[str, Dict[str, Any]] = {} + self._sharing_service = sharing_service async def share_recipe(self, request: web.Request) -> web.Response: try: @@ -1256,42 +746,17 @@ class RecipeSharingHandler: raise RuntimeError("Recipe scanner unavailable") recipe_id = request.match_info["recipe_id"] - cache = await recipe_scanner.get_cached_data() - recipe = next( - (r for r in getattr(cache, "raw_data", []) if str(r.get("id", "")) == recipe_id), - None, + result = await self._sharing_service.share_recipe( + recipe_scanner=recipe_scanner, recipe_id=recipe_id ) - if not recipe: - return web.json_response({"error": "Recipe not found"}, status=404) - - image_path = recipe.get("file_path") - if not image_path or not os.path.exists(image_path): - return web.json_response({"error": "Recipe image not found"}, status=404) - - import shutil - - ext = os.path.splitext(image_path)[1] - with tempfile.NamedTemporaryFile(suffix=ext, delete=False) as temp_file: - temp_path = temp_file.name - shutil.copy2(image_path, temp_path) - processed_path = temp_path - - timestamp = int(time.time()) - url_path = f"/api/recipe/{recipe_id}/share/download?t={timestamp}" - self._shared_recipes[recipe_id] = { - "path": processed_path, - "timestamp": timestamp, - "expires": time.time() + 300, - } - self._cleanup_shared_recipes() - - filename = f"recipe_{recipe.get('title', '').replace(' ', '_').lower()}{ext}" - return web.json_response({"success": True, "download_url": url_path, "filename": filename}) + return web.json_response(result.payload, status=result.status) + except RecipeNotFoundError as exc: + return web.json_response({"error": str(exc)}, status=404) except Exception as exc: self._logger.error("Error sharing recipe: %s", exc, exc_info=True) return web.json_response({"error": str(exc)}, status=500) - async def download_shared_recipe(self, request: web.Request) -> web.Response: + async def download_shared_recipe(self, request: web.Request) -> web.StreamResponse: try: await self._ensure_dependencies_ready() recipe_scanner = self._recipe_scanner_getter() @@ -1299,49 +764,17 @@ class RecipeSharingHandler: raise RuntimeError("Recipe scanner unavailable") recipe_id = request.match_info["recipe_id"] - shared_info = self._shared_recipes.get(recipe_id) - if not shared_info: - return web.json_response({"error": "Shared recipe not found or expired"}, status=404) - - file_path = shared_info["path"] - if not os.path.exists(file_path): - return web.json_response({"error": "Shared recipe file not found"}, status=404) - - cache = await recipe_scanner.get_cached_data() - recipe = next( - (r for r in getattr(cache, "raw_data", []) if str(r.get("id", "")) == recipe_id), - None, + download_info = await self._sharing_service.prepare_download( + recipe_scanner=recipe_scanner, recipe_id=recipe_id ) - filename_base = ( - f"recipe_{recipe.get('title', '').replace(' ', '_').lower()}" - if recipe - else recipe_id - ) - ext = os.path.splitext(file_path)[1] - download_filename = f"{filename_base}{ext}" - return web.FileResponse( - file_path, - headers={"Content-Disposition": f'attachment; filename="{download_filename}"'}, + download_info.file_path, + headers={ + "Content-Disposition": f'attachment; filename="{download_info.download_filename}"' + }, ) + except RecipeNotFoundError as exc: + return web.json_response({"error": str(exc)}, status=404) except Exception as exc: self._logger.error("Error downloading shared recipe: %s", exc, exc_info=True) return web.json_response({"error": str(exc)}, status=500) - - def _cleanup_shared_recipes(self) -> None: - current_time = time.time() - expired_ids = [ - recipe_id - for recipe_id, info in self._shared_recipes.items() - if current_time > info.get("expires", 0) - ] - - for recipe_id in expired_ids: - try: - file_path = self._shared_recipes[recipe_id]["path"] - if os.path.exists(file_path): - os.unlink(file_path) - except Exception as exc: # pragma: no cover - logging path - self._logger.error("Error cleaning up shared recipe %s: %s", recipe_id, exc) - finally: - self._shared_recipes.pop(recipe_id, None) diff --git a/py/services/recipes/__init__.py b/py/services/recipes/__init__.py new file mode 100644 index 00000000..8009b7c3 --- /dev/null +++ b/py/services/recipes/__init__.py @@ -0,0 +1,23 @@ +"""Recipe service layer implementations.""" + +from .analysis_service import RecipeAnalysisService +from .persistence_service import RecipePersistenceService +from .sharing_service import RecipeSharingService +from .errors import ( + RecipeServiceError, + RecipeValidationError, + RecipeNotFoundError, + RecipeDownloadError, + RecipeConflictError, +) + +__all__ = [ + "RecipeAnalysisService", + "RecipePersistenceService", + "RecipeSharingService", + "RecipeServiceError", + "RecipeValidationError", + "RecipeNotFoundError", + "RecipeDownloadError", + "RecipeConflictError", +] diff --git a/py/services/recipes/analysis_service.py b/py/services/recipes/analysis_service.py new file mode 100644 index 00000000..77d80e34 --- /dev/null +++ b/py/services/recipes/analysis_service.py @@ -0,0 +1,289 @@ +"""Services responsible for recipe metadata analysis.""" +from __future__ import annotations + +import base64 +import io +import os +import re +import tempfile +from dataclasses import dataclass +from typing import Any, Callable, Optional + +import numpy as np +from PIL import Image + +from ...utils.utils import calculate_recipe_fingerprint +from .errors import ( + RecipeDownloadError, + RecipeNotFoundError, + RecipeServiceError, + RecipeValidationError, +) + + +@dataclass(frozen=True) +class AnalysisResult: + """Return payload from analysis operations.""" + + payload: dict[str, Any] + status: int = 200 + + +class RecipeAnalysisService: + """Extract recipe metadata from various image sources.""" + + def __init__( + self, + *, + exif_utils, + recipe_parser_factory, + downloader_factory: Callable[[], Any], + metadata_collector: Optional[Callable[[], Any]] = None, + metadata_processor_cls: Optional[type] = None, + metadata_registry_cls: Optional[type] = None, + standalone_mode: bool = False, + logger, + ) -> None: + self._exif_utils = exif_utils + self._recipe_parser_factory = recipe_parser_factory + self._downloader_factory = downloader_factory + self._metadata_collector = metadata_collector + self._metadata_processor_cls = metadata_processor_cls + self._metadata_registry_cls = metadata_registry_cls + self._standalone_mode = standalone_mode + self._logger = logger + + async def analyze_uploaded_image( + self, + *, + image_bytes: bytes | None, + recipe_scanner, + ) -> AnalysisResult: + """Analyze an uploaded image payload.""" + + if not image_bytes: + raise RecipeValidationError("No image data provided") + + temp_path = self._write_temp_file(image_bytes) + try: + metadata = self._exif_utils.extract_image_metadata(temp_path) + if not metadata: + return AnalysisResult({"error": "No metadata found in this image", "loras": []}) + + return await self._parse_metadata( + metadata, + recipe_scanner=recipe_scanner, + image_path=None, + include_image_base64=False, + ) + finally: + self._safe_cleanup(temp_path) + + async def analyze_remote_image( + self, + *, + url: str | None, + recipe_scanner, + civitai_client, + ) -> AnalysisResult: + """Analyze an image accessible via URL, including Civitai integration.""" + + if not url: + raise RecipeValidationError("No URL provided") + + if civitai_client is None: + raise RecipeServiceError("Civitai client unavailable") + + temp_path = self._create_temp_path() + metadata: Optional[dict[str, Any]] = None + try: + civitai_match = re.match(r"https://civitai\.com/images/(\d+)", url) + if civitai_match: + image_info = await civitai_client.get_image_info(civitai_match.group(1)) + if not image_info: + raise RecipeDownloadError("Failed to fetch image information from Civitai") + image_url = image_info.get("url") + if not image_url: + raise RecipeDownloadError("No image URL found in Civitai response") + await self._download_image(image_url, temp_path) + metadata = image_info.get("meta") if "meta" in image_info else None + else: + await self._download_image(url, temp_path) + + if metadata is None: + metadata = self._exif_utils.extract_image_metadata(temp_path) + + if not metadata: + return self._metadata_not_found_response(temp_path) + + return await self._parse_metadata( + metadata, + recipe_scanner=recipe_scanner, + image_path=temp_path, + include_image_base64=True, + ) + finally: + self._safe_cleanup(temp_path) + + async def analyze_local_image( + self, + *, + file_path: str | None, + recipe_scanner, + ) -> AnalysisResult: + """Analyze a file already present on disk.""" + + if not file_path: + raise RecipeValidationError("No file path provided") + + normalized_path = os.path.normpath(file_path.strip('"').strip("'")) + if not os.path.isfile(normalized_path): + raise RecipeNotFoundError("File not found") + + metadata = self._exif_utils.extract_image_metadata(normalized_path) + if not metadata: + return self._metadata_not_found_response(normalized_path) + + return await self._parse_metadata( + metadata, + recipe_scanner=recipe_scanner, + image_path=normalized_path, + include_image_base64=True, + ) + + async def analyze_widget_metadata(self, *, recipe_scanner) -> AnalysisResult: + """Analyse the most recent generation metadata for widget saves.""" + + if self._metadata_collector is None or self._metadata_processor_cls is None: + raise RecipeValidationError("Metadata collection not available") + + raw_metadata = self._metadata_collector() + metadata_dict = self._metadata_processor_cls.to_dict(raw_metadata) + if not metadata_dict: + raise RecipeValidationError("No generation metadata found") + + latest_image = None + if not self._standalone_mode and self._metadata_registry_cls is not None: + metadata_registry = self._metadata_registry_cls() + latest_image = metadata_registry.get_first_decoded_image() + + if latest_image is None: + raise RecipeValidationError( + "No recent images found to use for recipe. Try generating an image first." + ) + + image_bytes = self._convert_tensor_to_png_bytes(latest_image) + if image_bytes is None: + raise RecipeValidationError("Cannot handle this data shape from metadata registry") + + return AnalysisResult( + { + "metadata": metadata_dict, + "image_bytes": image_bytes, + } + ) + + # Internal helpers ------------------------------------------------- + + async def _parse_metadata( + self, + metadata: dict[str, Any], + *, + recipe_scanner, + image_path: Optional[str], + include_image_base64: bool, + ) -> AnalysisResult: + parser = self._recipe_parser_factory.create_parser(metadata) + if parser is None: + payload = {"error": "No parser found for this image", "loras": []} + if include_image_base64 and image_path: + payload["image_base64"] = self._encode_file(image_path) + return AnalysisResult(payload) + + result = await parser.parse_metadata(metadata, recipe_scanner=recipe_scanner) + + if include_image_base64 and image_path: + result["image_base64"] = self._encode_file(image_path) + + if "error" in result and not result.get("loras"): + return AnalysisResult(result) + + fingerprint = calculate_recipe_fingerprint(result.get("loras", [])) + result["fingerprint"] = fingerprint + + matching_recipes: list[str] = [] + if fingerprint: + matching_recipes = await recipe_scanner.find_recipes_by_fingerprint(fingerprint) + result["matching_recipes"] = matching_recipes + + return AnalysisResult(result) + + async def _download_image(self, url: str, temp_path: str) -> None: + downloader = await self._downloader_factory() + success, result = await downloader.download_file(url, temp_path, use_auth=False) + if not success: + raise RecipeDownloadError(f"Failed to download image from URL: {result}") + + def _metadata_not_found_response(self, path: str) -> AnalysisResult: + payload: dict[str, Any] = {"error": "No metadata found in this image", "loras": []} + if os.path.exists(path): + payload["image_base64"] = self._encode_file(path) + return AnalysisResult(payload) + + def _write_temp_file(self, data: bytes) -> str: + with tempfile.NamedTemporaryFile(delete=False, suffix=".jpg") as temp_file: + temp_file.write(data) + return temp_file.name + + def _create_temp_path(self) -> str: + with tempfile.NamedTemporaryFile(delete=False, suffix=".jpg") as temp_file: + return temp_file.name + + def _safe_cleanup(self, path: Optional[str]) -> None: + if path and os.path.exists(path): + try: + os.unlink(path) + except Exception as exc: # pragma: no cover - defensive logging + self._logger.error("Error deleting temporary file: %s", exc) + + def _encode_file(self, path: str) -> str: + with open(path, "rb") as image_file: + return base64.b64encode(image_file.read()).decode("utf-8") + + def _convert_tensor_to_png_bytes(self, latest_image: Any) -> Optional[bytes]: + try: + if isinstance(latest_image, tuple): + tensor_image = latest_image[0] if latest_image else None + if tensor_image is None: + return None + else: + tensor_image = latest_image + + if hasattr(tensor_image, "shape"): + self._logger.debug( + "Tensor shape: %s, dtype: %s", tensor_image.shape, getattr(tensor_image, "dtype", None) + ) + + import torch # type: ignore[import-not-found] + + if isinstance(tensor_image, torch.Tensor): + image_np = tensor_image.cpu().numpy() + else: + image_np = np.array(tensor_image) + + while len(image_np.shape) > 3: + image_np = image_np[0] + + if image_np.dtype in (np.float32, np.float64) and image_np.max() <= 1.0: + image_np = (image_np * 255).astype(np.uint8) + + if len(image_np.shape) == 3 and image_np.shape[2] == 3: + pil_image = Image.fromarray(image_np) + img_byte_arr = io.BytesIO() + pil_image.save(img_byte_arr, format="PNG") + return img_byte_arr.getvalue() + except Exception as exc: # pragma: no cover - defensive logging path + self._logger.error("Error processing image data: %s", exc, exc_info=True) + return None + + return None diff --git a/py/services/recipes/errors.py b/py/services/recipes/errors.py new file mode 100644 index 00000000..9e5d9720 --- /dev/null +++ b/py/services/recipes/errors.py @@ -0,0 +1,22 @@ +"""Shared exceptions for recipe services.""" +from __future__ import annotations + + +class RecipeServiceError(Exception): + """Base exception for recipe service failures.""" + + +class RecipeValidationError(RecipeServiceError): + """Raised when a request payload fails validation.""" + + +class RecipeNotFoundError(RecipeServiceError): + """Raised when a recipe resource cannot be located.""" + + +class RecipeDownloadError(RecipeServiceError): + """Raised when remote recipe assets cannot be downloaded.""" + + +class RecipeConflictError(RecipeServiceError): + """Raised when a conflicting recipe state is detected.""" diff --git a/py/services/recipes/persistence_service.py b/py/services/recipes/persistence_service.py new file mode 100644 index 00000000..945680df --- /dev/null +++ b/py/services/recipes/persistence_service.py @@ -0,0 +1,467 @@ +"""Services encapsulating recipe persistence workflows.""" +from __future__ import annotations + +import asyncio +import base64 +import json +import os +import re +import time +import uuid +from dataclasses import dataclass +from typing import Any, Dict, Iterable, Optional + +from ...config import config +from ...utils.utils import calculate_recipe_fingerprint +from .errors import RecipeNotFoundError, RecipeValidationError + + +@dataclass(frozen=True) +class PersistenceResult: + """Return payload from persistence operations.""" + + payload: dict[str, Any] + status: int = 200 + + +class RecipePersistenceService: + """Coordinate recipe persistence tasks across storage and caches.""" + + def __init__( + self, + *, + exif_utils, + card_preview_width: int, + logger, + ) -> None: + self._exif_utils = exif_utils + self._card_preview_width = card_preview_width + self._logger = logger + + async def save_recipe( + self, + *, + recipe_scanner, + image_bytes: bytes | None, + image_base64: str | None, + name: str | None, + tags: Iterable[str], + metadata: Optional[dict[str, Any]], + ) -> PersistenceResult: + """Persist a user uploaded recipe.""" + + missing_fields = [] + if not name: + missing_fields.append("name") + if metadata is None: + missing_fields.append("metadata") + if missing_fields: + raise RecipeValidationError( + f"Missing required fields: {', '.join(missing_fields)}" + ) + + resolved_image_bytes = self._resolve_image_bytes(image_bytes, image_base64) + recipes_dir = recipe_scanner.recipes_dir + os.makedirs(recipes_dir, exist_ok=True) + + recipe_id = str(uuid.uuid4()) + optimized_image, extension = self._exif_utils.optimize_image( + image_data=resolved_image_bytes, + target_width=self._card_preview_width, + format="webp", + quality=85, + preserve_metadata=True, + ) + image_filename = f"{recipe_id}{extension}" + image_path = os.path.join(recipes_dir, image_filename) + with open(image_path, "wb") as file_obj: + file_obj.write(optimized_image) + + current_time = time.time() + loras_data = [self._normalise_lora_entry(lora) for lora in metadata.get("loras", [])] + + gen_params = metadata.get("gen_params", {}) + if not gen_params and "raw_metadata" in metadata: + raw_metadata = metadata.get("raw_metadata", {}) + gen_params = { + "prompt": raw_metadata.get("prompt", ""), + "negative_prompt": raw_metadata.get("negative_prompt", ""), + "checkpoint": raw_metadata.get("checkpoint", {}), + "steps": raw_metadata.get("steps", ""), + "sampler": raw_metadata.get("sampler", ""), + "cfg_scale": raw_metadata.get("cfg_scale", ""), + "seed": raw_metadata.get("seed", ""), + "size": raw_metadata.get("size", ""), + "clip_skip": raw_metadata.get("clip_skip", ""), + } + + fingerprint = calculate_recipe_fingerprint(loras_data) + recipe_data: Dict[str, Any] = { + "id": recipe_id, + "file_path": image_path, + "title": name, + "modified": current_time, + "created_date": current_time, + "base_model": metadata.get("base_model", ""), + "loras": loras_data, + "gen_params": gen_params, + "fingerprint": fingerprint, + } + + tags_list = list(tags) + if tags_list: + recipe_data["tags"] = tags_list + + if metadata.get("source_path"): + recipe_data["source_path"] = metadata.get("source_path") + + json_filename = f"{recipe_id}.recipe.json" + json_path = os.path.join(recipes_dir, json_filename) + with open(json_path, "w", encoding="utf-8") as file_obj: + json.dump(recipe_data, file_obj, indent=4, ensure_ascii=False) + + self._exif_utils.append_recipe_metadata(image_path, recipe_data) + + matching_recipes = await self._find_matching_recipes(recipe_scanner, fingerprint, exclude_id=recipe_id) + await self._update_cache(recipe_scanner, recipe_data) + + return PersistenceResult( + { + "success": True, + "recipe_id": recipe_id, + "image_path": image_path, + "json_path": json_path, + "matching_recipes": matching_recipes, + } + ) + + async def delete_recipe(self, *, recipe_scanner, recipe_id: str) -> PersistenceResult: + """Delete an existing recipe.""" + + recipes_dir = recipe_scanner.recipes_dir + if not recipes_dir or not os.path.exists(recipes_dir): + raise RecipeNotFoundError("Recipes directory not found") + + recipe_json_path = os.path.join(recipes_dir, f"{recipe_id}.recipe.json") + if not os.path.exists(recipe_json_path): + raise RecipeNotFoundError("Recipe not found") + + with open(recipe_json_path, "r", encoding="utf-8") as file_obj: + recipe_data = json.load(file_obj) + + image_path = recipe_data.get("file_path") + os.remove(recipe_json_path) + if image_path and os.path.exists(image_path): + os.remove(image_path) + + await self._remove_from_cache(recipe_scanner, recipe_id) + return PersistenceResult({"success": True, "message": "Recipe deleted successfully"}) + + async def update_recipe(self, *, recipe_scanner, recipe_id: str, updates: dict[str, Any]) -> PersistenceResult: + """Update persisted metadata for a recipe.""" + + if not any(key in updates for key in ("title", "tags", "source_path", "preview_nsfw_level")): + raise RecipeValidationError( + "At least one field to update must be provided (title or tags or source_path or preview_nsfw_level)" + ) + + success = await recipe_scanner.update_recipe_metadata(recipe_id, updates) + if not success: + raise RecipeNotFoundError("Recipe not found or update failed") + + return PersistenceResult({"success": True, "recipe_id": recipe_id, "updates": updates}) + + async def reconnect_lora( + self, + *, + recipe_scanner, + recipe_id: str, + lora_index: int, + target_name: str, + ) -> PersistenceResult: + """Reconnect a LoRA entry within an existing recipe.""" + + recipe_path = os.path.join(recipe_scanner.recipes_dir, f"{recipe_id}.recipe.json") + if not os.path.exists(recipe_path): + raise RecipeNotFoundError("Recipe not found") + + lora_scanner = getattr(recipe_scanner, "_lora_scanner", None) + target_lora = None if lora_scanner is None else await lora_scanner.get_model_info_by_name(target_name) + if not target_lora: + raise RecipeNotFoundError(f"Local LoRA not found with name: {target_name}") + + with open(recipe_path, "r", encoding="utf-8") as file_obj: + recipe_data = json.load(file_obj) + + loras = recipe_data.get("loras", []) + if lora_index >= len(loras): + raise RecipeNotFoundError("LoRA index out of range in recipe") + + lora = loras[lora_index] + lora["isDeleted"] = False + lora["exclude"] = False + lora["file_name"] = target_name + if "sha256" in target_lora: + lora["hash"] = target_lora["sha256"].lower() + if target_lora.get("civitai"): + lora["modelName"] = target_lora["civitai"]["model"]["name"] + lora["modelVersionName"] = target_lora["civitai"]["name"] + lora["modelVersionId"] = target_lora["civitai"]["id"] + + recipe_data["fingerprint"] = calculate_recipe_fingerprint(recipe_data.get("loras", [])) + + with open(recipe_path, "w", encoding="utf-8") as file_obj: + json.dump(recipe_data, file_obj, indent=4, ensure_ascii=False) + + updated_lora = dict(lora) + updated_lora["inLibrary"] = True + updated_lora["preview_url"] = config.get_preview_static_url(target_lora["preview_url"]) + updated_lora["localPath"] = target_lora["file_path"] + + await self._refresh_cache_after_update(recipe_scanner, recipe_id, recipe_data) + + image_path = recipe_data.get("file_path") + if image_path and os.path.exists(image_path): + self._exif_utils.append_recipe_metadata(image_path, recipe_data) + + matching_recipes = [] + if "fingerprint" in recipe_data: + matching_recipes = await recipe_scanner.find_recipes_by_fingerprint(recipe_data["fingerprint"]) + if recipe_id in matching_recipes: + matching_recipes.remove(recipe_id) + + return PersistenceResult( + { + "success": True, + "recipe_id": recipe_id, + "updated_lora": updated_lora, + "matching_recipes": matching_recipes, + } + ) + + async def bulk_delete( + self, + *, + recipe_scanner, + recipe_ids: Iterable[str], + ) -> PersistenceResult: + """Delete multiple recipes in a single request.""" + + recipe_ids = list(recipe_ids) + if not recipe_ids: + raise RecipeValidationError("No recipe IDs provided") + + recipes_dir = recipe_scanner.recipes_dir + if not recipes_dir or not os.path.exists(recipes_dir): + raise RecipeNotFoundError("Recipes directory not found") + + deleted_recipes: list[str] = [] + failed_recipes: list[dict[str, Any]] = [] + + for recipe_id in recipe_ids: + recipe_json_path = os.path.join(recipes_dir, f"{recipe_id}.recipe.json") + if not os.path.exists(recipe_json_path): + failed_recipes.append({"id": recipe_id, "reason": "Recipe not found"}) + continue + + try: + with open(recipe_json_path, "r", encoding="utf-8") as file_obj: + recipe_data = json.load(file_obj) + image_path = recipe_data.get("file_path") + os.remove(recipe_json_path) + if image_path and os.path.exists(image_path): + os.remove(image_path) + deleted_recipes.append(recipe_id) + except Exception as exc: + failed_recipes.append({"id": recipe_id, "reason": str(exc)}) + + if deleted_recipes: + await self._bulk_remove_from_cache(recipe_scanner, deleted_recipes) + + return PersistenceResult( + { + "success": True, + "deleted": deleted_recipes, + "failed": failed_recipes, + "total_deleted": len(deleted_recipes), + "total_failed": len(failed_recipes), + } + ) + + async def save_recipe_from_widget( + self, + *, + recipe_scanner, + metadata: dict[str, Any], + image_bytes: bytes, + ) -> PersistenceResult: + """Save a recipe constructed from widget metadata.""" + + if not metadata: + raise RecipeValidationError("No generation metadata found") + + recipes_dir = recipe_scanner.recipes_dir + os.makedirs(recipes_dir, exist_ok=True) + + recipe_id = str(uuid.uuid4()) + image_filename = f"{recipe_id}.png" + image_path = os.path.join(recipes_dir, image_filename) + with open(image_path, "wb") as file_obj: + file_obj.write(image_bytes) + + lora_stack = metadata.get("loras", "") + lora_matches = re.findall(r"]+)>", lora_stack) + if not lora_matches: + raise RecipeValidationError("No LoRAs found in the generation metadata") + + lora_scanner = getattr(recipe_scanner, "_lora_scanner", None) + loras_data = [] + base_model_counts: Dict[str, int] = {} + + for name, strength in lora_matches: + lora_info = None + if lora_scanner is not None: + lora_info = await lora_scanner.get_model_info_by_name(name) + lora_data = { + "file_name": name, + "strength": float(strength), + "hash": (lora_info.get("sha256") or "").lower() if lora_info else "", + "modelVersionId": lora_info.get("civitai", {}).get("id") if lora_info else 0, + "modelName": lora_info.get("civitai", {}).get("model", {}).get("name") if lora_info else "", + "modelVersionName": lora_info.get("civitai", {}).get("name") if lora_info else "", + "isDeleted": False, + "exclude": False, + } + loras_data.append(lora_data) + + if lora_info and "base_model" in lora_info: + base_model = lora_info["base_model"] + base_model_counts[base_model] = base_model_counts.get(base_model, 0) + 1 + + recipe_name = self._derive_recipe_name(lora_matches) + most_common_base_model = ( + max(base_model_counts.items(), key=lambda item: item[1])[0] if base_model_counts else "" + ) + + recipe_data = { + "id": recipe_id, + "file_path": image_path, + "title": recipe_name, + "modified": time.time(), + "created_date": time.time(), + "base_model": most_common_base_model, + "loras": loras_data, + "checkpoint": metadata.get("checkpoint", ""), + "gen_params": { + key: value + for key, value in metadata.items() + if key not in ["checkpoint", "loras"] + }, + "loras_stack": lora_stack, + } + + json_filename = f"{recipe_id}.recipe.json" + json_path = os.path.join(recipes_dir, json_filename) + with open(json_path, "w", encoding="utf-8") as file_obj: + json.dump(recipe_data, file_obj, indent=4, ensure_ascii=False) + + self._exif_utils.append_recipe_metadata(image_path, recipe_data) + await self._update_cache(recipe_scanner, recipe_data) + + return PersistenceResult( + { + "success": True, + "recipe_id": recipe_id, + "image_path": image_path, + "json_path": json_path, + "recipe_name": recipe_name, + } + ) + + # Helper methods --------------------------------------------------- + + def _resolve_image_bytes(self, image_bytes: bytes | None, image_base64: str | None) -> bytes: + if image_bytes is not None: + return image_bytes + if image_base64: + try: + payload = image_base64.split(",", 1)[1] if "," in image_base64 else image_base64 + return base64.b64decode(payload) + except Exception as exc: # pragma: no cover - validation guard + raise RecipeValidationError(f"Invalid base64 image data: {exc}") from exc + raise RecipeValidationError("No image data provided") + + def _normalise_lora_entry(self, lora: dict[str, Any]) -> dict[str, Any]: + return { + "file_name": lora.get("file_name", "") + or ( + os.path.splitext(os.path.basename(lora.get("localPath", "")))[0] + if lora.get("localPath") + else "" + ), + "hash": (lora.get("hash") or "").lower(), + "strength": float(lora.get("weight", 1.0)), + "modelVersionId": lora.get("id", 0), + "modelName": lora.get("name", ""), + "modelVersionName": lora.get("version", ""), + "isDeleted": lora.get("isDeleted", False), + "exclude": lora.get("exclude", False), + } + + async def _find_matching_recipes( + self, + recipe_scanner, + fingerprint: str | None, + *, + exclude_id: Optional[str] = None, + ) -> list[str]: + if not fingerprint: + return [] + matches = await recipe_scanner.find_recipes_by_fingerprint(fingerprint) + if exclude_id and exclude_id in matches: + matches.remove(exclude_id) + return matches + + async def _update_cache(self, recipe_scanner, recipe_data: dict[str, Any]) -> None: + cache = getattr(recipe_scanner, "_cache", None) + if cache is not None: + cache.raw_data.append(recipe_data) + asyncio.create_task(cache.resort()) + self._logger.info("Added recipe %s to cache", recipe_data.get("id")) + + async def _remove_from_cache(self, recipe_scanner, recipe_id: str) -> None: + cache = getattr(recipe_scanner, "_cache", None) + if cache is not None: + cache.raw_data = [item for item in cache.raw_data if str(item.get("id", "")) != recipe_id] + asyncio.create_task(cache.resort()) + self._logger.info("Removed recipe %s from cache", recipe_id) + + async def _bulk_remove_from_cache(self, recipe_scanner, recipe_ids: Iterable[str]) -> None: + cache = getattr(recipe_scanner, "_cache", None) + if cache is not None: + recipe_ids_set = set(recipe_ids) + cache.raw_data = [item for item in cache.raw_data if item.get("id") not in recipe_ids_set] + asyncio.create_task(cache.resort()) + self._logger.info("Removed %s recipes from cache", len(recipe_ids_set)) + + async def _refresh_cache_after_update( + self, + recipe_scanner, + recipe_id: str, + recipe_data: dict[str, Any], + ) -> None: + cache = getattr(recipe_scanner, "_cache", None) + if cache is not None: + for cache_item in cache.raw_data: + if cache_item.get("id") == recipe_id: + cache_item.update({ + "loras": recipe_data.get("loras", []), + "fingerprint": recipe_data.get("fingerprint"), + }) + asyncio.create_task(cache.resort()) + break + + def _derive_recipe_name(self, lora_matches: list[tuple[str, str]]) -> str: + recipe_name_parts = [f"{name.strip()}-{float(strength):.2f}" for name, strength in lora_matches[:3]] + recipe_name = "_".join(recipe_name_parts) + return recipe_name or "recipe" diff --git a/py/services/recipes/sharing_service.py b/py/services/recipes/sharing_service.py new file mode 100644 index 00000000..7c365bba --- /dev/null +++ b/py/services/recipes/sharing_service.py @@ -0,0 +1,113 @@ +"""Services handling recipe sharing and downloads.""" +from __future__ import annotations + +import os +import shutil +import tempfile +import time +from dataclasses import dataclass +from typing import Any, Dict + +from .errors import RecipeNotFoundError + + +@dataclass(frozen=True) +class SharingResult: + """Return payload for share operations.""" + + payload: dict[str, Any] + status: int = 200 + + +@dataclass(frozen=True) +class DownloadInfo: + """Information required to stream a shared recipe file.""" + + file_path: str + download_filename: str + + +class RecipeSharingService: + """Prepare temporary recipe downloads with TTL cleanup.""" + + def __init__(self, *, ttl_seconds: int = 300, logger) -> None: + self._ttl_seconds = ttl_seconds + self._logger = logger + self._shared_recipes: Dict[str, Dict[str, Any]] = {} + + async def share_recipe(self, *, recipe_scanner, recipe_id: str) -> SharingResult: + """Prepare a temporary downloadable copy of a recipe image.""" + + cache = await recipe_scanner.get_cached_data() + recipe = next( + (r for r in getattr(cache, "raw_data", []) if str(r.get("id", "")) == recipe_id), + None, + ) + if not recipe: + raise RecipeNotFoundError("Recipe not found") + + image_path = recipe.get("file_path") + if not image_path or not os.path.exists(image_path): + raise RecipeNotFoundError("Recipe image not found") + + ext = os.path.splitext(image_path)[1] + with tempfile.NamedTemporaryFile(suffix=ext, delete=False) as temp_file: + temp_path = temp_file.name + + shutil.copy2(image_path, temp_path) + timestamp = int(time.time()) + self._shared_recipes[recipe_id] = { + "path": temp_path, + "timestamp": timestamp, + "expires": time.time() + self._ttl_seconds, + } + self._cleanup_shared_recipes() + + safe_title = recipe.get("title", "").replace(" ", "_").lower() + filename = f"recipe_{safe_title}{ext}" if safe_title else f"recipe_{recipe_id}{ext}" + url_path = f"/api/recipe/{recipe_id}/share/download?t={timestamp}" + return SharingResult({"success": True, "download_url": url_path, "filename": filename}) + + async def prepare_download(self, *, recipe_scanner, recipe_id: str) -> DownloadInfo: + """Return file path and filename for a prepared shared recipe.""" + + shared_info = self._shared_recipes.get(recipe_id) + if not shared_info or time.time() > shared_info.get("expires", 0): + self._cleanup_entry(recipe_id) + raise RecipeNotFoundError("Shared recipe not found or expired") + + file_path = shared_info["path"] + if not os.path.exists(file_path): + self._cleanup_entry(recipe_id) + raise RecipeNotFoundError("Shared recipe file not found") + + cache = await recipe_scanner.get_cached_data() + recipe = next( + (r for r in getattr(cache, "raw_data", []) if str(r.get("id", "")) == recipe_id), + None, + ) + filename_base = ( + f"recipe_{recipe.get('title', '').replace(' ', '_').lower()}" if recipe else recipe_id + ) + ext = os.path.splitext(file_path)[1] + download_filename = f"{filename_base}{ext}" + return DownloadInfo(file_path=file_path, download_filename=download_filename) + + def _cleanup_shared_recipes(self) -> None: + for recipe_id in list(self._shared_recipes.keys()): + shared = self._shared_recipes.get(recipe_id) + if not shared: + continue + if time.time() > shared.get("expires", 0): + self._cleanup_entry(recipe_id) + + def _cleanup_entry(self, recipe_id: str) -> None: + shared_info = self._shared_recipes.pop(recipe_id, None) + if not shared_info: + return + file_path = shared_info.get("path") + if file_path and os.path.exists(file_path): + try: + os.unlink(file_path) + except Exception as exc: # pragma: no cover - defensive logging + self._logger.error("Error cleaning up shared recipe %s: %s", recipe_id, exc) diff --git a/tests/services/test_recipe_services.py b/tests/services/test_recipe_services.py new file mode 100644 index 00000000..e57abf2f --- /dev/null +++ b/tests/services/test_recipe_services.py @@ -0,0 +1,146 @@ +import logging +import os +from types import SimpleNamespace + +import pytest + +from py.services.recipes.analysis_service import RecipeAnalysisService +from py.services.recipes.errors import RecipeDownloadError, RecipeNotFoundError +from py.services.recipes.persistence_service import RecipePersistenceService + + +class DummyExifUtils: + def optimize_image(self, image_data, target_width, format, quality, preserve_metadata): + return image_data, ".webp" + + def append_recipe_metadata(self, image_path, recipe_data): + self.appended = (image_path, recipe_data) + + def extract_image_metadata(self, path): + return {} + + +@pytest.mark.asyncio +async def test_analyze_remote_image_download_failure_cleans_temp(tmp_path, monkeypatch): + exif_utils = DummyExifUtils() + + class DummyFactory: + def create_parser(self, metadata): + return None + + async def downloader_factory(): + class Downloader: + async def download_file(self, url, path, use_auth=False): + return False, "failure" + + return Downloader() + + service = RecipeAnalysisService( + exif_utils=exif_utils, + recipe_parser_factory=DummyFactory(), + downloader_factory=downloader_factory, + metadata_collector=None, + metadata_processor_cls=None, + metadata_registry_cls=None, + standalone_mode=False, + logger=logging.getLogger("test"), + ) + + temp_path = tmp_path / "temp.jpg" + + def create_temp_path(): + temp_path.write_bytes(b"") + return str(temp_path) + + monkeypatch.setattr(service, "_create_temp_path", create_temp_path) + + with pytest.raises(RecipeDownloadError): + await service.analyze_remote_image( + url="https://example.com/image.jpg", + recipe_scanner=SimpleNamespace(), + civitai_client=SimpleNamespace(), + ) + + assert not temp_path.exists(), "temporary file should be cleaned after failure" + + +@pytest.mark.asyncio +async def test_analyze_local_image_missing_file(tmp_path): + async def downloader_factory(): + return SimpleNamespace() + + service = RecipeAnalysisService( + exif_utils=DummyExifUtils(), + recipe_parser_factory=SimpleNamespace(create_parser=lambda metadata: None), + downloader_factory=downloader_factory, + metadata_collector=None, + metadata_processor_cls=None, + metadata_registry_cls=None, + standalone_mode=False, + logger=logging.getLogger("test"), + ) + + with pytest.raises(RecipeNotFoundError): + await service.analyze_local_image( + file_path=str(tmp_path / "missing.png"), + recipe_scanner=SimpleNamespace(), + ) + + +@pytest.mark.asyncio +async def test_save_recipe_reports_duplicates(tmp_path): + exif_utils = DummyExifUtils() + + class DummyCache: + def __init__(self): + self.raw_data = [] + + async def resort(self): + pass + + class DummyScanner: + def __init__(self, root): + self.recipes_dir = str(root) + self._cache = DummyCache() + self.last_fingerprint = None + + async def find_recipes_by_fingerprint(self, fingerprint): + self.last_fingerprint = fingerprint + return ["existing"] + + scanner = DummyScanner(tmp_path) + service = RecipePersistenceService( + exif_utils=exif_utils, + card_preview_width=512, + logger=logging.getLogger("test"), + ) + + metadata = { + "base_model": "sd", + "loras": [ + { + "file_name": "sample", + "hash": "abc123", + "weight": 0.5, + "id": 1, + "name": "Sample", + "version": "v1", + "isDeleted": False, + "exclude": False, + } + ], + } + + result = await service.save_recipe( + recipe_scanner=scanner, + image_bytes=b"image-bytes", + image_base64=None, + name="My Recipe", + tags=["tag"], + metadata=metadata, + ) + + assert result.payload["matching_recipes"] == ["existing"] + assert scanner.last_fingerprint is not None + assert os.path.exists(result.payload["json_path"]) + assert scanner._cache.raw_data