mirror of
https://github.com/willmiao/ComfyUI-Lora-Manager.git
synced 2026-03-21 21:22:11 -03:00
Merge pull request #456 from willmiao/codex/refactor-http-handlers-with-recipe-services
Refactor recipe handlers to use dedicated services
This commit is contained in:
@@ -2,15 +2,25 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
from typing import Callable, Mapping
|
||||
|
||||
import jinja2
|
||||
from aiohttp import web
|
||||
|
||||
from ..config import config
|
||||
from ..recipes import RecipeParserFactory
|
||||
from ..services.downloader import get_downloader
|
||||
from ..services.recipes import (
|
||||
RecipeAnalysisService,
|
||||
RecipePersistenceService,
|
||||
RecipeSharingService,
|
||||
)
|
||||
from ..services.server_i18n import server_i18n
|
||||
from ..services.service_registry import ServiceRegistry
|
||||
from ..services.settings_manager import settings
|
||||
from ..utils.constants import CARD_PREVIEW_WIDTH
|
||||
from ..utils.exif_utils import ExifUtils
|
||||
from .handlers.recipe_handlers import (
|
||||
RecipeAnalysisHandler,
|
||||
RecipeHandlerSet,
|
||||
@@ -124,6 +134,37 @@ class BaseRecipeRoutes:
|
||||
recipe_scanner_getter = lambda: self.recipe_scanner
|
||||
civitai_client_getter = lambda: self.civitai_client
|
||||
|
||||
standalone_mode = os.environ.get("HF_HUB_DISABLE_TELEMETRY", "0") == "0"
|
||||
if not standalone_mode:
|
||||
from ..metadata_collector import get_metadata # type: ignore[import-not-found]
|
||||
from ..metadata_collector.metadata_processor import ( # type: ignore[import-not-found]
|
||||
MetadataProcessor,
|
||||
)
|
||||
from ..metadata_collector.metadata_registry import ( # type: ignore[import-not-found]
|
||||
MetadataRegistry,
|
||||
)
|
||||
else: # pragma: no cover - optional dependency path
|
||||
get_metadata = None # type: ignore[assignment]
|
||||
MetadataProcessor = None # type: ignore[assignment]
|
||||
MetadataRegistry = None # type: ignore[assignment]
|
||||
|
||||
analysis_service = RecipeAnalysisService(
|
||||
exif_utils=ExifUtils,
|
||||
recipe_parser_factory=RecipeParserFactory,
|
||||
downloader_factory=get_downloader,
|
||||
metadata_collector=get_metadata,
|
||||
metadata_processor_cls=MetadataProcessor,
|
||||
metadata_registry_cls=MetadataRegistry,
|
||||
standalone_mode=standalone_mode,
|
||||
logger=logger,
|
||||
)
|
||||
persistence_service = RecipePersistenceService(
|
||||
exif_utils=ExifUtils,
|
||||
card_preview_width=CARD_PREVIEW_WIDTH,
|
||||
logger=logger,
|
||||
)
|
||||
sharing_service = RecipeSharingService(logger=logger)
|
||||
|
||||
page_view = RecipePageView(
|
||||
ensure_dependencies_ready=self.ensure_dependencies_ready,
|
||||
settings_service=self.settings,
|
||||
@@ -148,17 +189,21 @@ class BaseRecipeRoutes:
|
||||
ensure_dependencies_ready=self.ensure_dependencies_ready,
|
||||
recipe_scanner_getter=recipe_scanner_getter,
|
||||
logger=logger,
|
||||
persistence_service=persistence_service,
|
||||
analysis_service=analysis_service,
|
||||
)
|
||||
analysis = RecipeAnalysisHandler(
|
||||
ensure_dependencies_ready=self.ensure_dependencies_ready,
|
||||
recipe_scanner_getter=recipe_scanner_getter,
|
||||
civitai_client_getter=civitai_client_getter,
|
||||
logger=logger,
|
||||
analysis_service=analysis_service,
|
||||
)
|
||||
sharing = RecipeSharingHandler(
|
||||
ensure_dependencies_ready=self.ensure_dependencies_ready,
|
||||
recipe_scanner_getter=recipe_scanner_getter,
|
||||
logger=logger,
|
||||
sharing_service=sharing_service,
|
||||
)
|
||||
|
||||
return RecipeHandlerSet(
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
23
py/services/recipes/__init__.py
Normal file
23
py/services/recipes/__init__.py
Normal file
@@ -0,0 +1,23 @@
|
||||
"""Recipe service layer implementations."""
|
||||
|
||||
from .analysis_service import RecipeAnalysisService
|
||||
from .persistence_service import RecipePersistenceService
|
||||
from .sharing_service import RecipeSharingService
|
||||
from .errors import (
|
||||
RecipeServiceError,
|
||||
RecipeValidationError,
|
||||
RecipeNotFoundError,
|
||||
RecipeDownloadError,
|
||||
RecipeConflictError,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"RecipeAnalysisService",
|
||||
"RecipePersistenceService",
|
||||
"RecipeSharingService",
|
||||
"RecipeServiceError",
|
||||
"RecipeValidationError",
|
||||
"RecipeNotFoundError",
|
||||
"RecipeDownloadError",
|
||||
"RecipeConflictError",
|
||||
]
|
||||
289
py/services/recipes/analysis_service.py
Normal file
289
py/services/recipes/analysis_service.py
Normal file
@@ -0,0 +1,289 @@
|
||||
"""Services responsible for recipe metadata analysis."""
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
import tempfile
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Callable, Optional
|
||||
|
||||
import numpy as np
|
||||
from PIL import Image
|
||||
|
||||
from ...utils.utils import calculate_recipe_fingerprint
|
||||
from .errors import (
|
||||
RecipeDownloadError,
|
||||
RecipeNotFoundError,
|
||||
RecipeServiceError,
|
||||
RecipeValidationError,
|
||||
)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class AnalysisResult:
|
||||
"""Return payload from analysis operations."""
|
||||
|
||||
payload: dict[str, Any]
|
||||
status: int = 200
|
||||
|
||||
|
||||
class RecipeAnalysisService:
|
||||
"""Extract recipe metadata from various image sources."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
exif_utils,
|
||||
recipe_parser_factory,
|
||||
downloader_factory: Callable[[], Any],
|
||||
metadata_collector: Optional[Callable[[], Any]] = None,
|
||||
metadata_processor_cls: Optional[type] = None,
|
||||
metadata_registry_cls: Optional[type] = None,
|
||||
standalone_mode: bool = False,
|
||||
logger,
|
||||
) -> None:
|
||||
self._exif_utils = exif_utils
|
||||
self._recipe_parser_factory = recipe_parser_factory
|
||||
self._downloader_factory = downloader_factory
|
||||
self._metadata_collector = metadata_collector
|
||||
self._metadata_processor_cls = metadata_processor_cls
|
||||
self._metadata_registry_cls = metadata_registry_cls
|
||||
self._standalone_mode = standalone_mode
|
||||
self._logger = logger
|
||||
|
||||
async def analyze_uploaded_image(
|
||||
self,
|
||||
*,
|
||||
image_bytes: bytes | None,
|
||||
recipe_scanner,
|
||||
) -> AnalysisResult:
|
||||
"""Analyze an uploaded image payload."""
|
||||
|
||||
if not image_bytes:
|
||||
raise RecipeValidationError("No image data provided")
|
||||
|
||||
temp_path = self._write_temp_file(image_bytes)
|
||||
try:
|
||||
metadata = self._exif_utils.extract_image_metadata(temp_path)
|
||||
if not metadata:
|
||||
return AnalysisResult({"error": "No metadata found in this image", "loras": []})
|
||||
|
||||
return await self._parse_metadata(
|
||||
metadata,
|
||||
recipe_scanner=recipe_scanner,
|
||||
image_path=None,
|
||||
include_image_base64=False,
|
||||
)
|
||||
finally:
|
||||
self._safe_cleanup(temp_path)
|
||||
|
||||
async def analyze_remote_image(
|
||||
self,
|
||||
*,
|
||||
url: str | None,
|
||||
recipe_scanner,
|
||||
civitai_client,
|
||||
) -> AnalysisResult:
|
||||
"""Analyze an image accessible via URL, including Civitai integration."""
|
||||
|
||||
if not url:
|
||||
raise RecipeValidationError("No URL provided")
|
||||
|
||||
if civitai_client is None:
|
||||
raise RecipeServiceError("Civitai client unavailable")
|
||||
|
||||
temp_path = self._create_temp_path()
|
||||
metadata: Optional[dict[str, Any]] = None
|
||||
try:
|
||||
civitai_match = re.match(r"https://civitai\.com/images/(\d+)", url)
|
||||
if civitai_match:
|
||||
image_info = await civitai_client.get_image_info(civitai_match.group(1))
|
||||
if not image_info:
|
||||
raise RecipeDownloadError("Failed to fetch image information from Civitai")
|
||||
image_url = image_info.get("url")
|
||||
if not image_url:
|
||||
raise RecipeDownloadError("No image URL found in Civitai response")
|
||||
await self._download_image(image_url, temp_path)
|
||||
metadata = image_info.get("meta") if "meta" in image_info else None
|
||||
else:
|
||||
await self._download_image(url, temp_path)
|
||||
|
||||
if metadata is None:
|
||||
metadata = self._exif_utils.extract_image_metadata(temp_path)
|
||||
|
||||
if not metadata:
|
||||
return self._metadata_not_found_response(temp_path)
|
||||
|
||||
return await self._parse_metadata(
|
||||
metadata,
|
||||
recipe_scanner=recipe_scanner,
|
||||
image_path=temp_path,
|
||||
include_image_base64=True,
|
||||
)
|
||||
finally:
|
||||
self._safe_cleanup(temp_path)
|
||||
|
||||
async def analyze_local_image(
|
||||
self,
|
||||
*,
|
||||
file_path: str | None,
|
||||
recipe_scanner,
|
||||
) -> AnalysisResult:
|
||||
"""Analyze a file already present on disk."""
|
||||
|
||||
if not file_path:
|
||||
raise RecipeValidationError("No file path provided")
|
||||
|
||||
normalized_path = os.path.normpath(file_path.strip('"').strip("'"))
|
||||
if not os.path.isfile(normalized_path):
|
||||
raise RecipeNotFoundError("File not found")
|
||||
|
||||
metadata = self._exif_utils.extract_image_metadata(normalized_path)
|
||||
if not metadata:
|
||||
return self._metadata_not_found_response(normalized_path)
|
||||
|
||||
return await self._parse_metadata(
|
||||
metadata,
|
||||
recipe_scanner=recipe_scanner,
|
||||
image_path=normalized_path,
|
||||
include_image_base64=True,
|
||||
)
|
||||
|
||||
async def analyze_widget_metadata(self, *, recipe_scanner) -> AnalysisResult:
|
||||
"""Analyse the most recent generation metadata for widget saves."""
|
||||
|
||||
if self._metadata_collector is None or self._metadata_processor_cls is None:
|
||||
raise RecipeValidationError("Metadata collection not available")
|
||||
|
||||
raw_metadata = self._metadata_collector()
|
||||
metadata_dict = self._metadata_processor_cls.to_dict(raw_metadata)
|
||||
if not metadata_dict:
|
||||
raise RecipeValidationError("No generation metadata found")
|
||||
|
||||
latest_image = None
|
||||
if not self._standalone_mode and self._metadata_registry_cls is not None:
|
||||
metadata_registry = self._metadata_registry_cls()
|
||||
latest_image = metadata_registry.get_first_decoded_image()
|
||||
|
||||
if latest_image is None:
|
||||
raise RecipeValidationError(
|
||||
"No recent images found to use for recipe. Try generating an image first."
|
||||
)
|
||||
|
||||
image_bytes = self._convert_tensor_to_png_bytes(latest_image)
|
||||
if image_bytes is None:
|
||||
raise RecipeValidationError("Cannot handle this data shape from metadata registry")
|
||||
|
||||
return AnalysisResult(
|
||||
{
|
||||
"metadata": metadata_dict,
|
||||
"image_bytes": image_bytes,
|
||||
}
|
||||
)
|
||||
|
||||
# Internal helpers -------------------------------------------------
|
||||
|
||||
async def _parse_metadata(
|
||||
self,
|
||||
metadata: dict[str, Any],
|
||||
*,
|
||||
recipe_scanner,
|
||||
image_path: Optional[str],
|
||||
include_image_base64: bool,
|
||||
) -> AnalysisResult:
|
||||
parser = self._recipe_parser_factory.create_parser(metadata)
|
||||
if parser is None:
|
||||
payload = {"error": "No parser found for this image", "loras": []}
|
||||
if include_image_base64 and image_path:
|
||||
payload["image_base64"] = self._encode_file(image_path)
|
||||
return AnalysisResult(payload)
|
||||
|
||||
result = await parser.parse_metadata(metadata, recipe_scanner=recipe_scanner)
|
||||
|
||||
if include_image_base64 and image_path:
|
||||
result["image_base64"] = self._encode_file(image_path)
|
||||
|
||||
if "error" in result and not result.get("loras"):
|
||||
return AnalysisResult(result)
|
||||
|
||||
fingerprint = calculate_recipe_fingerprint(result.get("loras", []))
|
||||
result["fingerprint"] = fingerprint
|
||||
|
||||
matching_recipes: list[str] = []
|
||||
if fingerprint:
|
||||
matching_recipes = await recipe_scanner.find_recipes_by_fingerprint(fingerprint)
|
||||
result["matching_recipes"] = matching_recipes
|
||||
|
||||
return AnalysisResult(result)
|
||||
|
||||
async def _download_image(self, url: str, temp_path: str) -> None:
|
||||
downloader = await self._downloader_factory()
|
||||
success, result = await downloader.download_file(url, temp_path, use_auth=False)
|
||||
if not success:
|
||||
raise RecipeDownloadError(f"Failed to download image from URL: {result}")
|
||||
|
||||
def _metadata_not_found_response(self, path: str) -> AnalysisResult:
|
||||
payload: dict[str, Any] = {"error": "No metadata found in this image", "loras": []}
|
||||
if os.path.exists(path):
|
||||
payload["image_base64"] = self._encode_file(path)
|
||||
return AnalysisResult(payload)
|
||||
|
||||
def _write_temp_file(self, data: bytes) -> str:
|
||||
with tempfile.NamedTemporaryFile(delete=False, suffix=".jpg") as temp_file:
|
||||
temp_file.write(data)
|
||||
return temp_file.name
|
||||
|
||||
def _create_temp_path(self) -> str:
|
||||
with tempfile.NamedTemporaryFile(delete=False, suffix=".jpg") as temp_file:
|
||||
return temp_file.name
|
||||
|
||||
def _safe_cleanup(self, path: Optional[str]) -> None:
|
||||
if path and os.path.exists(path):
|
||||
try:
|
||||
os.unlink(path)
|
||||
except Exception as exc: # pragma: no cover - defensive logging
|
||||
self._logger.error("Error deleting temporary file: %s", exc)
|
||||
|
||||
def _encode_file(self, path: str) -> str:
|
||||
with open(path, "rb") as image_file:
|
||||
return base64.b64encode(image_file.read()).decode("utf-8")
|
||||
|
||||
def _convert_tensor_to_png_bytes(self, latest_image: Any) -> Optional[bytes]:
|
||||
try:
|
||||
if isinstance(latest_image, tuple):
|
||||
tensor_image = latest_image[0] if latest_image else None
|
||||
if tensor_image is None:
|
||||
return None
|
||||
else:
|
||||
tensor_image = latest_image
|
||||
|
||||
if hasattr(tensor_image, "shape"):
|
||||
self._logger.debug(
|
||||
"Tensor shape: %s, dtype: %s", tensor_image.shape, getattr(tensor_image, "dtype", None)
|
||||
)
|
||||
|
||||
import torch # type: ignore[import-not-found]
|
||||
|
||||
if isinstance(tensor_image, torch.Tensor):
|
||||
image_np = tensor_image.cpu().numpy()
|
||||
else:
|
||||
image_np = np.array(tensor_image)
|
||||
|
||||
while len(image_np.shape) > 3:
|
||||
image_np = image_np[0]
|
||||
|
||||
if image_np.dtype in (np.float32, np.float64) and image_np.max() <= 1.0:
|
||||
image_np = (image_np * 255).astype(np.uint8)
|
||||
|
||||
if len(image_np.shape) == 3 and image_np.shape[2] == 3:
|
||||
pil_image = Image.fromarray(image_np)
|
||||
img_byte_arr = io.BytesIO()
|
||||
pil_image.save(img_byte_arr, format="PNG")
|
||||
return img_byte_arr.getvalue()
|
||||
except Exception as exc: # pragma: no cover - defensive logging path
|
||||
self._logger.error("Error processing image data: %s", exc, exc_info=True)
|
||||
return None
|
||||
|
||||
return None
|
||||
22
py/services/recipes/errors.py
Normal file
22
py/services/recipes/errors.py
Normal file
@@ -0,0 +1,22 @@
|
||||
"""Shared exceptions for recipe services."""
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
class RecipeServiceError(Exception):
|
||||
"""Base exception for recipe service failures."""
|
||||
|
||||
|
||||
class RecipeValidationError(RecipeServiceError):
|
||||
"""Raised when a request payload fails validation."""
|
||||
|
||||
|
||||
class RecipeNotFoundError(RecipeServiceError):
|
||||
"""Raised when a recipe resource cannot be located."""
|
||||
|
||||
|
||||
class RecipeDownloadError(RecipeServiceError):
|
||||
"""Raised when remote recipe assets cannot be downloaded."""
|
||||
|
||||
|
||||
class RecipeConflictError(RecipeServiceError):
|
||||
"""Raised when a conflicting recipe state is detected."""
|
||||
467
py/services/recipes/persistence_service.py
Normal file
467
py/services/recipes/persistence_service.py
Normal file
@@ -0,0 +1,467 @@
|
||||
"""Services encapsulating recipe persistence workflows."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import base64
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
import uuid
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Dict, Iterable, Optional
|
||||
|
||||
from ...config import config
|
||||
from ...utils.utils import calculate_recipe_fingerprint
|
||||
from .errors import RecipeNotFoundError, RecipeValidationError
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class PersistenceResult:
|
||||
"""Return payload from persistence operations."""
|
||||
|
||||
payload: dict[str, Any]
|
||||
status: int = 200
|
||||
|
||||
|
||||
class RecipePersistenceService:
|
||||
"""Coordinate recipe persistence tasks across storage and caches."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
exif_utils,
|
||||
card_preview_width: int,
|
||||
logger,
|
||||
) -> None:
|
||||
self._exif_utils = exif_utils
|
||||
self._card_preview_width = card_preview_width
|
||||
self._logger = logger
|
||||
|
||||
async def save_recipe(
|
||||
self,
|
||||
*,
|
||||
recipe_scanner,
|
||||
image_bytes: bytes | None,
|
||||
image_base64: str | None,
|
||||
name: str | None,
|
||||
tags: Iterable[str],
|
||||
metadata: Optional[dict[str, Any]],
|
||||
) -> PersistenceResult:
|
||||
"""Persist a user uploaded recipe."""
|
||||
|
||||
missing_fields = []
|
||||
if not name:
|
||||
missing_fields.append("name")
|
||||
if metadata is None:
|
||||
missing_fields.append("metadata")
|
||||
if missing_fields:
|
||||
raise RecipeValidationError(
|
||||
f"Missing required fields: {', '.join(missing_fields)}"
|
||||
)
|
||||
|
||||
resolved_image_bytes = self._resolve_image_bytes(image_bytes, image_base64)
|
||||
recipes_dir = recipe_scanner.recipes_dir
|
||||
os.makedirs(recipes_dir, exist_ok=True)
|
||||
|
||||
recipe_id = str(uuid.uuid4())
|
||||
optimized_image, extension = self._exif_utils.optimize_image(
|
||||
image_data=resolved_image_bytes,
|
||||
target_width=self._card_preview_width,
|
||||
format="webp",
|
||||
quality=85,
|
||||
preserve_metadata=True,
|
||||
)
|
||||
image_filename = f"{recipe_id}{extension}"
|
||||
image_path = os.path.join(recipes_dir, image_filename)
|
||||
with open(image_path, "wb") as file_obj:
|
||||
file_obj.write(optimized_image)
|
||||
|
||||
current_time = time.time()
|
||||
loras_data = [self._normalise_lora_entry(lora) for lora in metadata.get("loras", [])]
|
||||
|
||||
gen_params = metadata.get("gen_params", {})
|
||||
if not gen_params and "raw_metadata" in metadata:
|
||||
raw_metadata = metadata.get("raw_metadata", {})
|
||||
gen_params = {
|
||||
"prompt": raw_metadata.get("prompt", ""),
|
||||
"negative_prompt": raw_metadata.get("negative_prompt", ""),
|
||||
"checkpoint": raw_metadata.get("checkpoint", {}),
|
||||
"steps": raw_metadata.get("steps", ""),
|
||||
"sampler": raw_metadata.get("sampler", ""),
|
||||
"cfg_scale": raw_metadata.get("cfg_scale", ""),
|
||||
"seed": raw_metadata.get("seed", ""),
|
||||
"size": raw_metadata.get("size", ""),
|
||||
"clip_skip": raw_metadata.get("clip_skip", ""),
|
||||
}
|
||||
|
||||
fingerprint = calculate_recipe_fingerprint(loras_data)
|
||||
recipe_data: Dict[str, Any] = {
|
||||
"id": recipe_id,
|
||||
"file_path": image_path,
|
||||
"title": name,
|
||||
"modified": current_time,
|
||||
"created_date": current_time,
|
||||
"base_model": metadata.get("base_model", ""),
|
||||
"loras": loras_data,
|
||||
"gen_params": gen_params,
|
||||
"fingerprint": fingerprint,
|
||||
}
|
||||
|
||||
tags_list = list(tags)
|
||||
if tags_list:
|
||||
recipe_data["tags"] = tags_list
|
||||
|
||||
if metadata.get("source_path"):
|
||||
recipe_data["source_path"] = metadata.get("source_path")
|
||||
|
||||
json_filename = f"{recipe_id}.recipe.json"
|
||||
json_path = os.path.join(recipes_dir, json_filename)
|
||||
with open(json_path, "w", encoding="utf-8") as file_obj:
|
||||
json.dump(recipe_data, file_obj, indent=4, ensure_ascii=False)
|
||||
|
||||
self._exif_utils.append_recipe_metadata(image_path, recipe_data)
|
||||
|
||||
matching_recipes = await self._find_matching_recipes(recipe_scanner, fingerprint, exclude_id=recipe_id)
|
||||
await self._update_cache(recipe_scanner, recipe_data)
|
||||
|
||||
return PersistenceResult(
|
||||
{
|
||||
"success": True,
|
||||
"recipe_id": recipe_id,
|
||||
"image_path": image_path,
|
||||
"json_path": json_path,
|
||||
"matching_recipes": matching_recipes,
|
||||
}
|
||||
)
|
||||
|
||||
async def delete_recipe(self, *, recipe_scanner, recipe_id: str) -> PersistenceResult:
|
||||
"""Delete an existing recipe."""
|
||||
|
||||
recipes_dir = recipe_scanner.recipes_dir
|
||||
if not recipes_dir or not os.path.exists(recipes_dir):
|
||||
raise RecipeNotFoundError("Recipes directory not found")
|
||||
|
||||
recipe_json_path = os.path.join(recipes_dir, f"{recipe_id}.recipe.json")
|
||||
if not os.path.exists(recipe_json_path):
|
||||
raise RecipeNotFoundError("Recipe not found")
|
||||
|
||||
with open(recipe_json_path, "r", encoding="utf-8") as file_obj:
|
||||
recipe_data = json.load(file_obj)
|
||||
|
||||
image_path = recipe_data.get("file_path")
|
||||
os.remove(recipe_json_path)
|
||||
if image_path and os.path.exists(image_path):
|
||||
os.remove(image_path)
|
||||
|
||||
await self._remove_from_cache(recipe_scanner, recipe_id)
|
||||
return PersistenceResult({"success": True, "message": "Recipe deleted successfully"})
|
||||
|
||||
async def update_recipe(self, *, recipe_scanner, recipe_id: str, updates: dict[str, Any]) -> PersistenceResult:
|
||||
"""Update persisted metadata for a recipe."""
|
||||
|
||||
if not any(key in updates for key in ("title", "tags", "source_path", "preview_nsfw_level")):
|
||||
raise RecipeValidationError(
|
||||
"At least one field to update must be provided (title or tags or source_path or preview_nsfw_level)"
|
||||
)
|
||||
|
||||
success = await recipe_scanner.update_recipe_metadata(recipe_id, updates)
|
||||
if not success:
|
||||
raise RecipeNotFoundError("Recipe not found or update failed")
|
||||
|
||||
return PersistenceResult({"success": True, "recipe_id": recipe_id, "updates": updates})
|
||||
|
||||
async def reconnect_lora(
|
||||
self,
|
||||
*,
|
||||
recipe_scanner,
|
||||
recipe_id: str,
|
||||
lora_index: int,
|
||||
target_name: str,
|
||||
) -> PersistenceResult:
|
||||
"""Reconnect a LoRA entry within an existing recipe."""
|
||||
|
||||
recipe_path = os.path.join(recipe_scanner.recipes_dir, f"{recipe_id}.recipe.json")
|
||||
if not os.path.exists(recipe_path):
|
||||
raise RecipeNotFoundError("Recipe not found")
|
||||
|
||||
lora_scanner = getattr(recipe_scanner, "_lora_scanner", None)
|
||||
target_lora = None if lora_scanner is None else await lora_scanner.get_model_info_by_name(target_name)
|
||||
if not target_lora:
|
||||
raise RecipeNotFoundError(f"Local LoRA not found with name: {target_name}")
|
||||
|
||||
with open(recipe_path, "r", encoding="utf-8") as file_obj:
|
||||
recipe_data = json.load(file_obj)
|
||||
|
||||
loras = recipe_data.get("loras", [])
|
||||
if lora_index >= len(loras):
|
||||
raise RecipeNotFoundError("LoRA index out of range in recipe")
|
||||
|
||||
lora = loras[lora_index]
|
||||
lora["isDeleted"] = False
|
||||
lora["exclude"] = False
|
||||
lora["file_name"] = target_name
|
||||
if "sha256" in target_lora:
|
||||
lora["hash"] = target_lora["sha256"].lower()
|
||||
if target_lora.get("civitai"):
|
||||
lora["modelName"] = target_lora["civitai"]["model"]["name"]
|
||||
lora["modelVersionName"] = target_lora["civitai"]["name"]
|
||||
lora["modelVersionId"] = target_lora["civitai"]["id"]
|
||||
|
||||
recipe_data["fingerprint"] = calculate_recipe_fingerprint(recipe_data.get("loras", []))
|
||||
|
||||
with open(recipe_path, "w", encoding="utf-8") as file_obj:
|
||||
json.dump(recipe_data, file_obj, indent=4, ensure_ascii=False)
|
||||
|
||||
updated_lora = dict(lora)
|
||||
updated_lora["inLibrary"] = True
|
||||
updated_lora["preview_url"] = config.get_preview_static_url(target_lora["preview_url"])
|
||||
updated_lora["localPath"] = target_lora["file_path"]
|
||||
|
||||
await self._refresh_cache_after_update(recipe_scanner, recipe_id, recipe_data)
|
||||
|
||||
image_path = recipe_data.get("file_path")
|
||||
if image_path and os.path.exists(image_path):
|
||||
self._exif_utils.append_recipe_metadata(image_path, recipe_data)
|
||||
|
||||
matching_recipes = []
|
||||
if "fingerprint" in recipe_data:
|
||||
matching_recipes = await recipe_scanner.find_recipes_by_fingerprint(recipe_data["fingerprint"])
|
||||
if recipe_id in matching_recipes:
|
||||
matching_recipes.remove(recipe_id)
|
||||
|
||||
return PersistenceResult(
|
||||
{
|
||||
"success": True,
|
||||
"recipe_id": recipe_id,
|
||||
"updated_lora": updated_lora,
|
||||
"matching_recipes": matching_recipes,
|
||||
}
|
||||
)
|
||||
|
||||
async def bulk_delete(
|
||||
self,
|
||||
*,
|
||||
recipe_scanner,
|
||||
recipe_ids: Iterable[str],
|
||||
) -> PersistenceResult:
|
||||
"""Delete multiple recipes in a single request."""
|
||||
|
||||
recipe_ids = list(recipe_ids)
|
||||
if not recipe_ids:
|
||||
raise RecipeValidationError("No recipe IDs provided")
|
||||
|
||||
recipes_dir = recipe_scanner.recipes_dir
|
||||
if not recipes_dir or not os.path.exists(recipes_dir):
|
||||
raise RecipeNotFoundError("Recipes directory not found")
|
||||
|
||||
deleted_recipes: list[str] = []
|
||||
failed_recipes: list[dict[str, Any]] = []
|
||||
|
||||
for recipe_id in recipe_ids:
|
||||
recipe_json_path = os.path.join(recipes_dir, f"{recipe_id}.recipe.json")
|
||||
if not os.path.exists(recipe_json_path):
|
||||
failed_recipes.append({"id": recipe_id, "reason": "Recipe not found"})
|
||||
continue
|
||||
|
||||
try:
|
||||
with open(recipe_json_path, "r", encoding="utf-8") as file_obj:
|
||||
recipe_data = json.load(file_obj)
|
||||
image_path = recipe_data.get("file_path")
|
||||
os.remove(recipe_json_path)
|
||||
if image_path and os.path.exists(image_path):
|
||||
os.remove(image_path)
|
||||
deleted_recipes.append(recipe_id)
|
||||
except Exception as exc:
|
||||
failed_recipes.append({"id": recipe_id, "reason": str(exc)})
|
||||
|
||||
if deleted_recipes:
|
||||
await self._bulk_remove_from_cache(recipe_scanner, deleted_recipes)
|
||||
|
||||
return PersistenceResult(
|
||||
{
|
||||
"success": True,
|
||||
"deleted": deleted_recipes,
|
||||
"failed": failed_recipes,
|
||||
"total_deleted": len(deleted_recipes),
|
||||
"total_failed": len(failed_recipes),
|
||||
}
|
||||
)
|
||||
|
||||
async def save_recipe_from_widget(
|
||||
self,
|
||||
*,
|
||||
recipe_scanner,
|
||||
metadata: dict[str, Any],
|
||||
image_bytes: bytes,
|
||||
) -> PersistenceResult:
|
||||
"""Save a recipe constructed from widget metadata."""
|
||||
|
||||
if not metadata:
|
||||
raise RecipeValidationError("No generation metadata found")
|
||||
|
||||
recipes_dir = recipe_scanner.recipes_dir
|
||||
os.makedirs(recipes_dir, exist_ok=True)
|
||||
|
||||
recipe_id = str(uuid.uuid4())
|
||||
image_filename = f"{recipe_id}.png"
|
||||
image_path = os.path.join(recipes_dir, image_filename)
|
||||
with open(image_path, "wb") as file_obj:
|
||||
file_obj.write(image_bytes)
|
||||
|
||||
lora_stack = metadata.get("loras", "")
|
||||
lora_matches = re.findall(r"<lora:([^:]+):([^>]+)>", lora_stack)
|
||||
if not lora_matches:
|
||||
raise RecipeValidationError("No LoRAs found in the generation metadata")
|
||||
|
||||
lora_scanner = getattr(recipe_scanner, "_lora_scanner", None)
|
||||
loras_data = []
|
||||
base_model_counts: Dict[str, int] = {}
|
||||
|
||||
for name, strength in lora_matches:
|
||||
lora_info = None
|
||||
if lora_scanner is not None:
|
||||
lora_info = await lora_scanner.get_model_info_by_name(name)
|
||||
lora_data = {
|
||||
"file_name": name,
|
||||
"strength": float(strength),
|
||||
"hash": (lora_info.get("sha256") or "").lower() if lora_info else "",
|
||||
"modelVersionId": lora_info.get("civitai", {}).get("id") if lora_info else 0,
|
||||
"modelName": lora_info.get("civitai", {}).get("model", {}).get("name") if lora_info else "",
|
||||
"modelVersionName": lora_info.get("civitai", {}).get("name") if lora_info else "",
|
||||
"isDeleted": False,
|
||||
"exclude": False,
|
||||
}
|
||||
loras_data.append(lora_data)
|
||||
|
||||
if lora_info and "base_model" in lora_info:
|
||||
base_model = lora_info["base_model"]
|
||||
base_model_counts[base_model] = base_model_counts.get(base_model, 0) + 1
|
||||
|
||||
recipe_name = self._derive_recipe_name(lora_matches)
|
||||
most_common_base_model = (
|
||||
max(base_model_counts.items(), key=lambda item: item[1])[0] if base_model_counts else ""
|
||||
)
|
||||
|
||||
recipe_data = {
|
||||
"id": recipe_id,
|
||||
"file_path": image_path,
|
||||
"title": recipe_name,
|
||||
"modified": time.time(),
|
||||
"created_date": time.time(),
|
||||
"base_model": most_common_base_model,
|
||||
"loras": loras_data,
|
||||
"checkpoint": metadata.get("checkpoint", ""),
|
||||
"gen_params": {
|
||||
key: value
|
||||
for key, value in metadata.items()
|
||||
if key not in ["checkpoint", "loras"]
|
||||
},
|
||||
"loras_stack": lora_stack,
|
||||
}
|
||||
|
||||
json_filename = f"{recipe_id}.recipe.json"
|
||||
json_path = os.path.join(recipes_dir, json_filename)
|
||||
with open(json_path, "w", encoding="utf-8") as file_obj:
|
||||
json.dump(recipe_data, file_obj, indent=4, ensure_ascii=False)
|
||||
|
||||
self._exif_utils.append_recipe_metadata(image_path, recipe_data)
|
||||
await self._update_cache(recipe_scanner, recipe_data)
|
||||
|
||||
return PersistenceResult(
|
||||
{
|
||||
"success": True,
|
||||
"recipe_id": recipe_id,
|
||||
"image_path": image_path,
|
||||
"json_path": json_path,
|
||||
"recipe_name": recipe_name,
|
||||
}
|
||||
)
|
||||
|
||||
# Helper methods ---------------------------------------------------
|
||||
|
||||
def _resolve_image_bytes(self, image_bytes: bytes | None, image_base64: str | None) -> bytes:
|
||||
if image_bytes is not None:
|
||||
return image_bytes
|
||||
if image_base64:
|
||||
try:
|
||||
payload = image_base64.split(",", 1)[1] if "," in image_base64 else image_base64
|
||||
return base64.b64decode(payload)
|
||||
except Exception as exc: # pragma: no cover - validation guard
|
||||
raise RecipeValidationError(f"Invalid base64 image data: {exc}") from exc
|
||||
raise RecipeValidationError("No image data provided")
|
||||
|
||||
def _normalise_lora_entry(self, lora: dict[str, Any]) -> dict[str, Any]:
|
||||
return {
|
||||
"file_name": lora.get("file_name", "")
|
||||
or (
|
||||
os.path.splitext(os.path.basename(lora.get("localPath", "")))[0]
|
||||
if lora.get("localPath")
|
||||
else ""
|
||||
),
|
||||
"hash": (lora.get("hash") or "").lower(),
|
||||
"strength": float(lora.get("weight", 1.0)),
|
||||
"modelVersionId": lora.get("id", 0),
|
||||
"modelName": lora.get("name", ""),
|
||||
"modelVersionName": lora.get("version", ""),
|
||||
"isDeleted": lora.get("isDeleted", False),
|
||||
"exclude": lora.get("exclude", False),
|
||||
}
|
||||
|
||||
async def _find_matching_recipes(
|
||||
self,
|
||||
recipe_scanner,
|
||||
fingerprint: str | None,
|
||||
*,
|
||||
exclude_id: Optional[str] = None,
|
||||
) -> list[str]:
|
||||
if not fingerprint:
|
||||
return []
|
||||
matches = await recipe_scanner.find_recipes_by_fingerprint(fingerprint)
|
||||
if exclude_id and exclude_id in matches:
|
||||
matches.remove(exclude_id)
|
||||
return matches
|
||||
|
||||
async def _update_cache(self, recipe_scanner, recipe_data: dict[str, Any]) -> None:
|
||||
cache = getattr(recipe_scanner, "_cache", None)
|
||||
if cache is not None:
|
||||
cache.raw_data.append(recipe_data)
|
||||
asyncio.create_task(cache.resort())
|
||||
self._logger.info("Added recipe %s to cache", recipe_data.get("id"))
|
||||
|
||||
async def _remove_from_cache(self, recipe_scanner, recipe_id: str) -> None:
|
||||
cache = getattr(recipe_scanner, "_cache", None)
|
||||
if cache is not None:
|
||||
cache.raw_data = [item for item in cache.raw_data if str(item.get("id", "")) != recipe_id]
|
||||
asyncio.create_task(cache.resort())
|
||||
self._logger.info("Removed recipe %s from cache", recipe_id)
|
||||
|
||||
async def _bulk_remove_from_cache(self, recipe_scanner, recipe_ids: Iterable[str]) -> None:
|
||||
cache = getattr(recipe_scanner, "_cache", None)
|
||||
if cache is not None:
|
||||
recipe_ids_set = set(recipe_ids)
|
||||
cache.raw_data = [item for item in cache.raw_data if item.get("id") not in recipe_ids_set]
|
||||
asyncio.create_task(cache.resort())
|
||||
self._logger.info("Removed %s recipes from cache", len(recipe_ids_set))
|
||||
|
||||
async def _refresh_cache_after_update(
|
||||
self,
|
||||
recipe_scanner,
|
||||
recipe_id: str,
|
||||
recipe_data: dict[str, Any],
|
||||
) -> None:
|
||||
cache = getattr(recipe_scanner, "_cache", None)
|
||||
if cache is not None:
|
||||
for cache_item in cache.raw_data:
|
||||
if cache_item.get("id") == recipe_id:
|
||||
cache_item.update({
|
||||
"loras": recipe_data.get("loras", []),
|
||||
"fingerprint": recipe_data.get("fingerprint"),
|
||||
})
|
||||
asyncio.create_task(cache.resort())
|
||||
break
|
||||
|
||||
def _derive_recipe_name(self, lora_matches: list[tuple[str, str]]) -> str:
|
||||
recipe_name_parts = [f"{name.strip()}-{float(strength):.2f}" for name, strength in lora_matches[:3]]
|
||||
recipe_name = "_".join(recipe_name_parts)
|
||||
return recipe_name or "recipe"
|
||||
113
py/services/recipes/sharing_service.py
Normal file
113
py/services/recipes/sharing_service.py
Normal file
@@ -0,0 +1,113 @@
|
||||
"""Services handling recipe sharing and downloads."""
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
import time
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Dict
|
||||
|
||||
from .errors import RecipeNotFoundError
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class SharingResult:
|
||||
"""Return payload for share operations."""
|
||||
|
||||
payload: dict[str, Any]
|
||||
status: int = 200
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class DownloadInfo:
|
||||
"""Information required to stream a shared recipe file."""
|
||||
|
||||
file_path: str
|
||||
download_filename: str
|
||||
|
||||
|
||||
class RecipeSharingService:
|
||||
"""Prepare temporary recipe downloads with TTL cleanup."""
|
||||
|
||||
def __init__(self, *, ttl_seconds: int = 300, logger) -> None:
|
||||
self._ttl_seconds = ttl_seconds
|
||||
self._logger = logger
|
||||
self._shared_recipes: Dict[str, Dict[str, Any]] = {}
|
||||
|
||||
async def share_recipe(self, *, recipe_scanner, recipe_id: str) -> SharingResult:
|
||||
"""Prepare a temporary downloadable copy of a recipe image."""
|
||||
|
||||
cache = await recipe_scanner.get_cached_data()
|
||||
recipe = next(
|
||||
(r for r in getattr(cache, "raw_data", []) if str(r.get("id", "")) == recipe_id),
|
||||
None,
|
||||
)
|
||||
if not recipe:
|
||||
raise RecipeNotFoundError("Recipe not found")
|
||||
|
||||
image_path = recipe.get("file_path")
|
||||
if not image_path or not os.path.exists(image_path):
|
||||
raise RecipeNotFoundError("Recipe image not found")
|
||||
|
||||
ext = os.path.splitext(image_path)[1]
|
||||
with tempfile.NamedTemporaryFile(suffix=ext, delete=False) as temp_file:
|
||||
temp_path = temp_file.name
|
||||
|
||||
shutil.copy2(image_path, temp_path)
|
||||
timestamp = int(time.time())
|
||||
self._shared_recipes[recipe_id] = {
|
||||
"path": temp_path,
|
||||
"timestamp": timestamp,
|
||||
"expires": time.time() + self._ttl_seconds,
|
||||
}
|
||||
self._cleanup_shared_recipes()
|
||||
|
||||
safe_title = recipe.get("title", "").replace(" ", "_").lower()
|
||||
filename = f"recipe_{safe_title}{ext}" if safe_title else f"recipe_{recipe_id}{ext}"
|
||||
url_path = f"/api/recipe/{recipe_id}/share/download?t={timestamp}"
|
||||
return SharingResult({"success": True, "download_url": url_path, "filename": filename})
|
||||
|
||||
async def prepare_download(self, *, recipe_scanner, recipe_id: str) -> DownloadInfo:
|
||||
"""Return file path and filename for a prepared shared recipe."""
|
||||
|
||||
shared_info = self._shared_recipes.get(recipe_id)
|
||||
if not shared_info or time.time() > shared_info.get("expires", 0):
|
||||
self._cleanup_entry(recipe_id)
|
||||
raise RecipeNotFoundError("Shared recipe not found or expired")
|
||||
|
||||
file_path = shared_info["path"]
|
||||
if not os.path.exists(file_path):
|
||||
self._cleanup_entry(recipe_id)
|
||||
raise RecipeNotFoundError("Shared recipe file not found")
|
||||
|
||||
cache = await recipe_scanner.get_cached_data()
|
||||
recipe = next(
|
||||
(r for r in getattr(cache, "raw_data", []) if str(r.get("id", "")) == recipe_id),
|
||||
None,
|
||||
)
|
||||
filename_base = (
|
||||
f"recipe_{recipe.get('title', '').replace(' ', '_').lower()}" if recipe else recipe_id
|
||||
)
|
||||
ext = os.path.splitext(file_path)[1]
|
||||
download_filename = f"{filename_base}{ext}"
|
||||
return DownloadInfo(file_path=file_path, download_filename=download_filename)
|
||||
|
||||
def _cleanup_shared_recipes(self) -> None:
|
||||
for recipe_id in list(self._shared_recipes.keys()):
|
||||
shared = self._shared_recipes.get(recipe_id)
|
||||
if not shared:
|
||||
continue
|
||||
if time.time() > shared.get("expires", 0):
|
||||
self._cleanup_entry(recipe_id)
|
||||
|
||||
def _cleanup_entry(self, recipe_id: str) -> None:
|
||||
shared_info = self._shared_recipes.pop(recipe_id, None)
|
||||
if not shared_info:
|
||||
return
|
||||
file_path = shared_info.get("path")
|
||||
if file_path and os.path.exists(file_path):
|
||||
try:
|
||||
os.unlink(file_path)
|
||||
except Exception as exc: # pragma: no cover - defensive logging
|
||||
self._logger.error("Error cleaning up shared recipe %s: %s", recipe_id, exc)
|
||||
146
tests/services/test_recipe_services.py
Normal file
146
tests/services/test_recipe_services.py
Normal file
@@ -0,0 +1,146 @@
|
||||
import logging
|
||||
import os
|
||||
from types import SimpleNamespace
|
||||
|
||||
import pytest
|
||||
|
||||
from py.services.recipes.analysis_service import RecipeAnalysisService
|
||||
from py.services.recipes.errors import RecipeDownloadError, RecipeNotFoundError
|
||||
from py.services.recipes.persistence_service import RecipePersistenceService
|
||||
|
||||
|
||||
class DummyExifUtils:
|
||||
def optimize_image(self, image_data, target_width, format, quality, preserve_metadata):
|
||||
return image_data, ".webp"
|
||||
|
||||
def append_recipe_metadata(self, image_path, recipe_data):
|
||||
self.appended = (image_path, recipe_data)
|
||||
|
||||
def extract_image_metadata(self, path):
|
||||
return {}
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_analyze_remote_image_download_failure_cleans_temp(tmp_path, monkeypatch):
|
||||
exif_utils = DummyExifUtils()
|
||||
|
||||
class DummyFactory:
|
||||
def create_parser(self, metadata):
|
||||
return None
|
||||
|
||||
async def downloader_factory():
|
||||
class Downloader:
|
||||
async def download_file(self, url, path, use_auth=False):
|
||||
return False, "failure"
|
||||
|
||||
return Downloader()
|
||||
|
||||
service = RecipeAnalysisService(
|
||||
exif_utils=exif_utils,
|
||||
recipe_parser_factory=DummyFactory(),
|
||||
downloader_factory=downloader_factory,
|
||||
metadata_collector=None,
|
||||
metadata_processor_cls=None,
|
||||
metadata_registry_cls=None,
|
||||
standalone_mode=False,
|
||||
logger=logging.getLogger("test"),
|
||||
)
|
||||
|
||||
temp_path = tmp_path / "temp.jpg"
|
||||
|
||||
def create_temp_path():
|
||||
temp_path.write_bytes(b"")
|
||||
return str(temp_path)
|
||||
|
||||
monkeypatch.setattr(service, "_create_temp_path", create_temp_path)
|
||||
|
||||
with pytest.raises(RecipeDownloadError):
|
||||
await service.analyze_remote_image(
|
||||
url="https://example.com/image.jpg",
|
||||
recipe_scanner=SimpleNamespace(),
|
||||
civitai_client=SimpleNamespace(),
|
||||
)
|
||||
|
||||
assert not temp_path.exists(), "temporary file should be cleaned after failure"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_analyze_local_image_missing_file(tmp_path):
|
||||
async def downloader_factory():
|
||||
return SimpleNamespace()
|
||||
|
||||
service = RecipeAnalysisService(
|
||||
exif_utils=DummyExifUtils(),
|
||||
recipe_parser_factory=SimpleNamespace(create_parser=lambda metadata: None),
|
||||
downloader_factory=downloader_factory,
|
||||
metadata_collector=None,
|
||||
metadata_processor_cls=None,
|
||||
metadata_registry_cls=None,
|
||||
standalone_mode=False,
|
||||
logger=logging.getLogger("test"),
|
||||
)
|
||||
|
||||
with pytest.raises(RecipeNotFoundError):
|
||||
await service.analyze_local_image(
|
||||
file_path=str(tmp_path / "missing.png"),
|
||||
recipe_scanner=SimpleNamespace(),
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_save_recipe_reports_duplicates(tmp_path):
|
||||
exif_utils = DummyExifUtils()
|
||||
|
||||
class DummyCache:
|
||||
def __init__(self):
|
||||
self.raw_data = []
|
||||
|
||||
async def resort(self):
|
||||
pass
|
||||
|
||||
class DummyScanner:
|
||||
def __init__(self, root):
|
||||
self.recipes_dir = str(root)
|
||||
self._cache = DummyCache()
|
||||
self.last_fingerprint = None
|
||||
|
||||
async def find_recipes_by_fingerprint(self, fingerprint):
|
||||
self.last_fingerprint = fingerprint
|
||||
return ["existing"]
|
||||
|
||||
scanner = DummyScanner(tmp_path)
|
||||
service = RecipePersistenceService(
|
||||
exif_utils=exif_utils,
|
||||
card_preview_width=512,
|
||||
logger=logging.getLogger("test"),
|
||||
)
|
||||
|
||||
metadata = {
|
||||
"base_model": "sd",
|
||||
"loras": [
|
||||
{
|
||||
"file_name": "sample",
|
||||
"hash": "abc123",
|
||||
"weight": 0.5,
|
||||
"id": 1,
|
||||
"name": "Sample",
|
||||
"version": "v1",
|
||||
"isDeleted": False,
|
||||
"exclude": False,
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
result = await service.save_recipe(
|
||||
recipe_scanner=scanner,
|
||||
image_bytes=b"image-bytes",
|
||||
image_base64=None,
|
||||
name="My Recipe",
|
||||
tags=["tag"],
|
||||
metadata=metadata,
|
||||
)
|
||||
|
||||
assert result.payload["matching_recipes"] == ["existing"]
|
||||
assert scanner.last_fingerprint is not None
|
||||
assert os.path.exists(result.payload["json_path"])
|
||||
assert scanner._cache.raw_data
|
||||
Reference in New Issue
Block a user