Merge branch 'sort-by-usage-count' into main

This commit is contained in:
pixelpaws
2025-12-26 22:17:03 +08:00
committed by GitHub
85 changed files with 6030 additions and 1550 deletions

84
GEMINI.md Normal file
View File

@@ -0,0 +1,84 @@
# ComfyUI LoRA Manager
## Project Overview
ComfyUI LoRA Manager is a comprehensive extension for ComfyUI that streamlines the organization, downloading, and application of LoRA models. It functions as both a custom node within ComfyUI and a standalone application.
**Key Features:**
* **Model Management:** Browse, organize, and download LoRA models (and Checkpoints/Embeddings) from Civitai and CivArchive.
* **Visualization:** Preview images, videos, and trigger words.
* **Workflow Integration:** "One-click" integration into ComfyUI workflows, preserving generation parameters.
* **Recipe System:** Save and share LoRA combinations as "recipes".
* **Architecture:** Hybrid Python backend (API, file management) and JavaScript/HTML frontend (Web UI).
## Directory Structure
* `py/`: Core Python backend source code.
* `lora_manager.py`: Main entry point for the ComfyUI node.
* `routes/`: API route definitions (using `aiohttp` in standalone, or ComfyUI's server).
* `services/`: Business logic (downloading, metadata, scanning).
* `nodes/`: ComfyUI custom node implementations.
* `static/`: Frontend static assets (CSS, JS, Images).
* `templates/`: HTML templates (Jinja2).
* `locales/`: Internationalization JSON files.
* `web/comfyui/`: JavaScript extensions specifically for the ComfyUI interface.
* `standalone.py`: Entry point for running the manager as a standalone web app.
* `tests/`: Backend tests.
* `requirements.txt`: Python runtime dependencies.
* `package.json`: Frontend development dependencies and test scripts.
## Building and Running
### Prerequisites
* Python 3.8+
* Node.js (only for running frontend tests)
### Backend Setup
1. Install Python dependencies:
```bash
pip install -r requirements.txt
```
### Running in Standalone Mode
You can run the manager independently of ComfyUI for development or management purposes.
```bash
python standalone.py --port 8188
```
### Running in ComfyUI
Ensure the folder is located in `ComfyUI/custom_nodes/`. ComfyUI will automatically load it upon startup.
## Testing
### Backend Tests (Pytest)
1. Install development dependencies:
```bash
pip install -r requirements-dev.txt
```
2. Run tests:
```bash
pytest
```
* Coverage reports are generated in `coverage/backend/`.
### Frontend Tests (Vitest)
1. Install Node dependencies:
```bash
npm install
```
2. Run tests:
```bash
npm run test
```
3. Run coverage:
```bash
npm run test:coverage
```
## Development Conventions
* **Python Style:** Follow PEP 8. Use snake_case for files/functions and PascalCase for classes.
* **Frontend:** Standard ES modules. UI components often end in `_widget.js`.
* **Configuration:** User settings are stored in `settings.json`. Developers should reference `settings.json.example`.
* **Localization:** Update `locales/<lang>.json` and run `scripts/sync_translation_keys.py` when changing UI text.
* **Documentation:** Architecture details are in `docs/architecture/` and `IFLOW.md`.

View File

@@ -4,7 +4,7 @@ try: # pragma: no cover - import fallback for pytest collection
from .py.nodes.trigger_word_toggle import TriggerWordToggle
from .py.nodes.prompt import PromptLoraManager
from .py.nodes.lora_stacker import LoraStacker
from .py.nodes.save_image import SaveImage
from .py.nodes.save_image import SaveImageLM
from .py.nodes.debug_metadata import DebugMetadata
from .py.nodes.wanvideo_lora_select import WanVideoLoraSelect
from .py.nodes.wanvideo_lora_select_from_text import WanVideoLoraSelectFromText
@@ -24,7 +24,7 @@ except ImportError: # pragma: no cover - allows running under pytest without pa
LoraManagerTextLoader = importlib.import_module("py.nodes.lora_loader").LoraManagerTextLoader
TriggerWordToggle = importlib.import_module("py.nodes.trigger_word_toggle").TriggerWordToggle
LoraStacker = importlib.import_module("py.nodes.lora_stacker").LoraStacker
SaveImage = importlib.import_module("py.nodes.save_image").SaveImage
SaveImageLM = importlib.import_module("py.nodes.save_image").SaveImageLM
DebugMetadata = importlib.import_module("py.nodes.debug_metadata").DebugMetadata
WanVideoLoraSelect = importlib.import_module("py.nodes.wanvideo_lora_select").WanVideoLoraSelect
WanVideoLoraSelectFromText = importlib.import_module("py.nodes.wanvideo_lora_select_from_text").WanVideoLoraSelectFromText
@@ -36,7 +36,7 @@ NODE_CLASS_MAPPINGS = {
LoraManagerTextLoader.NAME: LoraManagerTextLoader,
TriggerWordToggle.NAME: TriggerWordToggle,
LoraStacker.NAME: LoraStacker,
SaveImage.NAME: SaveImage,
SaveImageLM.NAME: SaveImageLM,
DebugMetadata.NAME: DebugMetadata,
WanVideoLoraSelect.NAME: WanVideoLoraSelect,
WanVideoLoraSelectFromText.NAME: WanVideoLoraSelectFromText

View File

@@ -159,6 +159,12 @@
"success": "Updated license metadata for {count} {typePlural}",
"none": "All {typePlural} already have license metadata",
"error": "Failed to refresh license metadata for {typePlural}: {message}"
},
"repairRecipes": {
"label": "Recipe-Daten reparieren",
"loading": "Recipe-Daten werden repariert...",
"success": "{count} Rezepte erfolgreich repariert.",
"error": "Recipe-Reparatur fehlgeschlagen: {message}"
}
},
"header": {
@@ -188,7 +194,8 @@
"creator": "Ersteller",
"title": "Rezept-Titel",
"loraName": "LoRA-Dateiname",
"loraModel": "LoRA-Modellname"
"loraModel": "LoRA-Modellname",
"prompt": "Prompt"
}
},
"filter": {
@@ -199,6 +206,7 @@
"license": "Lizenz",
"noCreditRequired": "Kein Credit erforderlich",
"allowSellingGeneratedContent": "Verkauf erlaubt",
"noTags": "Keine Tags",
"clearAll": "Alle Filter löschen"
},
"theme": {
@@ -521,6 +529,7 @@
"replacePreview": "Vorschau ersetzen",
"setContentRating": "Inhaltsbewertung festlegen",
"moveToFolder": "In Ordner verschieben",
"repairMetadata": "[TODO: Translate] Repair metadata",
"excludeModel": "Modell ausschließen",
"deleteModel": "Modell löschen",
"shareRecipe": "Rezept teilen",
@@ -591,10 +600,26 @@
"selectLoraRoot": "Bitte wählen Sie ein LoRA-Stammverzeichnis aus"
}
},
"sort": {
"title": "Rezepte sortieren nach...",
"name": "Name",
"nameAsc": "A - Z",
"nameDesc": "Z - A",
"date": "Datum",
"dateDesc": "Neueste",
"dateAsc": "Älteste",
"lorasCount": "LoRA-Anzahl",
"lorasCountDesc": "Meiste",
"lorasCountAsc": "Wenigste"
},
"refresh": {
"title": "Rezeptliste aktualisieren"
},
"filteredByLora": "Gefiltert nach LoRA"
"filteredByLora": "Gefiltert nach LoRA",
"favorites": {
"title": "Nur Favoriten anzeigen",
"action": "Favoriten"
}
},
"duplicates": {
"found": "{count} Duplikat-Gruppen gefunden",
@@ -620,6 +645,13 @@
"noMissingLoras": "Keine fehlenden LoRAs zum Herunterladen",
"getInfoFailed": "Fehler beim Abrufen der Informationen für fehlende LoRAs",
"prepareError": "Fehler beim Vorbereiten der LoRAs für den Download: {message}"
},
"repair": {
"starting": "[TODO: Translate] Repairing recipe metadata...",
"success": "[TODO: Translate] Recipe metadata repaired successfully",
"skipped": "[TODO: Translate] Recipe already at latest version, no repair needed",
"failed": "[TODO: Translate] Failed to repair recipe: {message}",
"missingId": "[TODO: Translate] Cannot repair recipe: Missing recipe ID"
}
}
},
@@ -641,7 +673,8 @@
"recursiveUnavailable": "Rekursive Suche ist nur in der Baumansicht verfügbar",
"collapseAllDisabled": "Im Listenmodus nicht verfügbar",
"dragDrop": {
"unableToResolveRoot": "Zielpfad für das Verschieben konnte nicht ermittelt werden."
"unableToResolveRoot": "Zielpfad für das Verschieben konnte nicht ermittelt werden.",
"moveUnsupported": "Move is not supported for this item."
}
},
"statistics": {
@@ -1463,7 +1496,8 @@
"bulkMoveFailures": "Fehlgeschlagene Verschiebungen:\n{failures}",
"bulkMoveSuccess": "{successCount} {type}s erfolgreich verschoben",
"exampleImagesDownloadSuccess": "Beispielbilder erfolgreich heruntergeladen!",
"exampleImagesDownloadFailed": "Fehler beim Herunterladen der Beispielbilder: {message}"
"exampleImagesDownloadFailed": "Fehler beim Herunterladen der Beispielbilder: {message}",
"moveFailed": "Failed to move item: {message}"
}
},
"banners": {

View File

@@ -159,6 +159,12 @@
"success": "Updated license metadata for {count} {typePlural}",
"none": "All {typePlural} already have license metadata",
"error": "Failed to refresh license metadata for {typePlural}: {message}"
},
"repairRecipes": {
"label": "Repair recipes data",
"loading": "Repairing recipe data...",
"success": "Successfully repaired {count} recipes.",
"error": "Recipe repair failed: {message}"
}
},
"header": {
@@ -188,7 +194,8 @@
"creator": "Creator",
"title": "Recipe Title",
"loraName": "LoRA Filename",
"loraModel": "LoRA Model Name"
"loraModel": "LoRA Model Name",
"prompt": "Prompt"
}
},
"filter": {
@@ -199,6 +206,7 @@
"license": "License",
"noCreditRequired": "No Credit Required",
"allowSellingGeneratedContent": "Allow Selling",
"noTags": "No tags",
"clearAll": "Clear All Filters"
},
"theme": {
@@ -521,6 +529,7 @@
"replacePreview": "Replace Preview",
"setContentRating": "Set Content Rating",
"moveToFolder": "Move to Folder",
"repairMetadata": "Repair metadata",
"excludeModel": "Exclude Model",
"deleteModel": "Delete Model",
"shareRecipe": "Share Recipe",
@@ -591,10 +600,26 @@
"selectLoraRoot": "Please select a LoRA root directory"
}
},
"sort": {
"title": "Sort recipes by...",
"name": "Name",
"nameAsc": "A - Z",
"nameDesc": "Z - A",
"date": "Date",
"dateDesc": "Newest",
"dateAsc": "Oldest",
"lorasCount": "LoRA Count",
"lorasCountDesc": "Most",
"lorasCountAsc": "Least"
},
"refresh": {
"title": "Refresh recipe list"
},
"filteredByLora": "Filtered by LoRA"
"filteredByLora": "Filtered by LoRA",
"favorites": {
"title": "Show Favorites Only",
"action": "Favorites"
}
},
"duplicates": {
"found": "Found {count} duplicate groups",
@@ -620,6 +645,13 @@
"noMissingLoras": "No missing LoRAs to download",
"getInfoFailed": "Failed to get information for missing LoRAs",
"prepareError": "Error preparing LoRAs for download: {message}"
},
"repair": {
"starting": "Repairing recipe metadata...",
"success": "Recipe metadata repaired successfully",
"skipped": "Recipe already at latest version, no repair needed",
"failed": "Failed to repair recipe: {message}",
"missingId": "Cannot repair recipe: Missing recipe ID"
}
}
},
@@ -641,7 +673,8 @@
"recursiveUnavailable": "Recursive search is available in tree view only",
"collapseAllDisabled": "Not available in list view",
"dragDrop": {
"unableToResolveRoot": "Unable to determine destination path for move."
"unableToResolveRoot": "Unable to determine destination path for move.",
"moveUnsupported": "Move is not supported for this item."
}
},
"statistics": {
@@ -1463,7 +1496,8 @@
"bulkMoveFailures": "Failed moves:\n{failures}",
"bulkMoveSuccess": "Successfully moved {successCount} {type}s",
"exampleImagesDownloadSuccess": "Successfully downloaded example images!",
"exampleImagesDownloadFailed": "Failed to download example images: {message}"
"exampleImagesDownloadFailed": "Failed to download example images: {message}",
"moveFailed": "Failed to move item: {message}"
}
},
"banners": {

View File

@@ -159,6 +159,12 @@
"success": "Updated license metadata for {count} {typePlural}",
"none": "All {typePlural} already have license metadata",
"error": "Failed to refresh license metadata for {typePlural}: {message}"
},
"repairRecipes": {
"label": "Reparar datos de recetas",
"loading": "Reparando datos de recetas...",
"success": "Se repararon con éxito {count} recetas.",
"error": "Error al reparar recetas: {message}"
}
},
"header": {
@@ -188,7 +194,8 @@
"creator": "Creador",
"title": "Título de la receta",
"loraName": "Nombre de archivo LoRA",
"loraModel": "Nombre del modelo LoRA"
"loraModel": "Nombre del modelo LoRA",
"prompt": "Prompt"
}
},
"filter": {
@@ -199,6 +206,7 @@
"license": "Licencia",
"noCreditRequired": "Sin crédito requerido",
"allowSellingGeneratedContent": "Venta permitida",
"noTags": "Sin etiquetas",
"clearAll": "Limpiar todos los filtros"
},
"theme": {
@@ -521,6 +529,7 @@
"replacePreview": "Reemplazar vista previa",
"setContentRating": "Establecer clasificación de contenido",
"moveToFolder": "Mover a carpeta",
"repairMetadata": "[TODO: Translate] Repair metadata",
"excludeModel": "Excluir modelo",
"deleteModel": "Eliminar modelo",
"shareRecipe": "Compartir receta",
@@ -591,10 +600,26 @@
"selectLoraRoot": "Por favor selecciona un directorio raíz de LoRA"
}
},
"sort": {
"title": "Ordenar recetas por...",
"name": "Nombre",
"nameAsc": "A - Z",
"nameDesc": "Z - A",
"date": "Fecha",
"dateDesc": "Más reciente",
"dateAsc": "Más antiguo",
"lorasCount": "Cant. de LoRAs",
"lorasCountDesc": "Más",
"lorasCountAsc": "Menos"
},
"refresh": {
"title": "Actualizar lista de recetas"
},
"filteredByLora": "Filtrado por LoRA"
"filteredByLora": "Filtrado por LoRA",
"favorites": {
"title": "Mostrar solo favoritos",
"action": "Favoritos"
}
},
"duplicates": {
"found": "Se encontraron {count} grupos de duplicados",
@@ -620,6 +645,13 @@
"noMissingLoras": "No hay LoRAs faltantes para descargar",
"getInfoFailed": "Error al obtener información de LoRAs faltantes",
"prepareError": "Error preparando LoRAs para descarga: {message}"
},
"repair": {
"starting": "[TODO: Translate] Repairing recipe metadata...",
"success": "[TODO: Translate] Recipe metadata repaired successfully",
"skipped": "[TODO: Translate] Recipe already at latest version, no repair needed",
"failed": "[TODO: Translate] Failed to repair recipe: {message}",
"missingId": "[TODO: Translate] Cannot repair recipe: Missing recipe ID"
}
}
},
@@ -641,7 +673,8 @@
"recursiveUnavailable": "La búsqueda recursiva solo está disponible en la vista en árbol",
"collapseAllDisabled": "No disponible en vista de lista",
"dragDrop": {
"unableToResolveRoot": "No se puede determinar la ruta de destino para el movimiento."
"unableToResolveRoot": "No se puede determinar la ruta de destino para el movimiento.",
"moveUnsupported": "Move is not supported for this item."
}
},
"statistics": {
@@ -1463,7 +1496,8 @@
"bulkMoveFailures": "Movimientos fallidos:\n{failures}",
"bulkMoveSuccess": "Movidos exitosamente {successCount} {type}s",
"exampleImagesDownloadSuccess": "¡Imágenes de ejemplo descargadas exitosamente!",
"exampleImagesDownloadFailed": "Error al descargar imágenes de ejemplo: {message}"
"exampleImagesDownloadFailed": "Error al descargar imágenes de ejemplo: {message}",
"moveFailed": "Failed to move item: {message}"
}
},
"banners": {

View File

@@ -159,6 +159,12 @@
"success": "Updated license metadata for {count} {typePlural}",
"none": "All {typePlural} already have license metadata",
"error": "Failed to refresh license metadata for {typePlural}: {message}"
},
"repairRecipes": {
"label": "Réparer les données de recettes",
"loading": "Réparation des données de recettes...",
"success": "{count} recettes réparées avec succès.",
"error": "Échec de la réparation des recettes : {message}"
}
},
"header": {
@@ -188,7 +194,8 @@
"creator": "Créateur",
"title": "Titre de la recipe",
"loraName": "Nom de fichier LoRA",
"loraModel": "Nom du modèle LoRA"
"loraModel": "Nom du modèle LoRA",
"prompt": "Prompt"
}
},
"filter": {
@@ -199,6 +206,7 @@
"license": "Licence",
"noCreditRequired": "Crédit non requis",
"allowSellingGeneratedContent": "Vente autorisée",
"noTags": "Aucun tag",
"clearAll": "Effacer tous les filtres"
},
"theme": {
@@ -521,6 +529,7 @@
"replacePreview": "Remplacer l'aperçu",
"setContentRating": "Définir la classification du contenu",
"moveToFolder": "Déplacer vers un dossier",
"repairMetadata": "[TODO: Translate] Repair metadata",
"excludeModel": "Exclure le modèle",
"deleteModel": "Supprimer le modèle",
"shareRecipe": "Partager la recipe",
@@ -591,10 +600,26 @@
"selectLoraRoot": "Veuillez sélectionner un répertoire racine LoRA"
}
},
"sort": {
"title": "Trier les recettes par...",
"name": "Nom",
"nameAsc": "A - Z",
"nameDesc": "Z - A",
"date": "Date",
"dateDesc": "Plus récent",
"dateAsc": "Plus ancien",
"lorasCount": "Nombre de LoRAs",
"lorasCountDesc": "Plus",
"lorasCountAsc": "Moins"
},
"refresh": {
"title": "Actualiser la liste des recipes"
},
"filteredByLora": "Filtré par LoRA"
"filteredByLora": "Filtré par LoRA",
"favorites": {
"title": "Afficher uniquement les favoris",
"action": "Favoris"
}
},
"duplicates": {
"found": "Trouvé {count} groupes de doublons",
@@ -620,6 +645,13 @@
"noMissingLoras": "Aucun LoRA manquant à télécharger",
"getInfoFailed": "Échec de l'obtention des informations pour les LoRAs manquants",
"prepareError": "Erreur lors de la préparation des LoRAs pour le téléchargement : {message}"
},
"repair": {
"starting": "[TODO: Translate] Repairing recipe metadata...",
"success": "[TODO: Translate] Recipe metadata repaired successfully",
"skipped": "[TODO: Translate] Recipe already at latest version, no repair needed",
"failed": "[TODO: Translate] Failed to repair recipe: {message}",
"missingId": "[TODO: Translate] Cannot repair recipe: Missing recipe ID"
}
}
},
@@ -641,7 +673,8 @@
"recursiveUnavailable": "La recherche récursive n'est disponible qu'en vue arborescente",
"collapseAllDisabled": "Non disponible en vue liste",
"dragDrop": {
"unableToResolveRoot": "Impossible de déterminer le chemin de destination pour le déplacement."
"unableToResolveRoot": "Impossible de déterminer le chemin de destination pour le déplacement.",
"moveUnsupported": "Move is not supported for this item."
}
},
"statistics": {
@@ -1463,7 +1496,8 @@
"bulkMoveFailures": "Échecs de déplacement :\n{failures}",
"bulkMoveSuccess": "{successCount} {type}s déplacés avec succès",
"exampleImagesDownloadSuccess": "Images d'exemple téléchargées avec succès !",
"exampleImagesDownloadFailed": "Échec du téléchargement des images d'exemple : {message}"
"exampleImagesDownloadFailed": "Échec du téléchargement des images d'exemple : {message}",
"moveFailed": "Failed to move item: {message}"
}
},
"banners": {

View File

@@ -159,6 +159,12 @@
"success": "Updated license metadata for {count} {typePlural}",
"none": "All {typePlural} already have license metadata",
"error": "Failed to refresh license metadata for {typePlural}: {message}"
},
"repairRecipes": {
"label": "תיקון נתוני מתכונים",
"loading": "מתקן נתוני מתכונים...",
"success": "תוקנו בהצלחה {count} מתכונים.",
"error": "תיקון המתכונים נכשל: {message}"
}
},
"header": {
@@ -188,7 +194,8 @@
"creator": "יוצר",
"title": "כותרת מתכון",
"loraName": "שם קובץ LoRA",
"loraModel": "שם מודל LoRA"
"loraModel": "שם מודל LoRA",
"prompt": "הנחיה"
}
},
"filter": {
@@ -199,6 +206,7 @@
"license": "רישיון",
"noCreditRequired": "ללא קרדיט נדרש",
"allowSellingGeneratedContent": "אפשר מכירה",
"noTags": "ללא תגיות",
"clearAll": "נקה את כל המסננים"
},
"theme": {
@@ -228,6 +236,7 @@
"videoSettings": "הגדרות וידאו",
"layoutSettings": "הגדרות פריסה",
"folderSettings": "הגדרות תיקייה",
"priorityTags": "תגיות עדיפות",
"downloadPathTemplates": "תבניות נתיב הורדה",
"exampleImages": "תמונות דוגמה",
"updateFlags": "תגי עדכון",
@@ -235,8 +244,7 @@
"misc": "שונות",
"metadataArchive": "מסד נתונים של ארכיון מטא-דאטה",
"storageLocation": "מיקום ההגדרות",
"proxySettings": "הגדרות פרוקסי",
"priorityTags": "תגיות עדיפות"
"proxySettings": "הגדרות פרוקסי"
},
"storage": {
"locationLabel": "מצב נייד",
@@ -309,6 +317,26 @@
"defaultEmbeddingRootHelp": "הגדר את ספריית השורש המוגדרת כברירת מחדל של embedding להורדות, ייבוא והעברות",
"noDefault": "אין ברירת מחדל"
},
"priorityTags": {
"title": "תגיות עדיפות",
"description": "התאם את סדר העדיפות של התגיות עבור כל סוג מודל (לדוגמה: character, concept, style(toon|toon_style))",
"placeholder": "character, concept, style(toon|toon_style)",
"helpLinkLabel": "פתח עזרה בנושא תגיות עדיפות",
"modelTypes": {
"lora": "LoRA",
"checkpoint": "Checkpoint",
"embedding": "Embedding"
},
"saveSuccess": "תגיות העדיפות עודכנו.",
"saveError": "עדכון תגיות העדיפות נכשל.",
"loadingSuggestions": "טוען הצעות...",
"validation": {
"missingClosingParen": "לרשומה {index} חסר סוגר סוגריים.",
"missingCanonical": "על הרשומה {index} לכלול שם תגית קנונית.",
"duplicateCanonical": "התגית הקנונית \"{tag}\" מופיעה יותר מפעם אחת.",
"unknown": "תצורת תגיות העדיפות שגויה."
}
},
"downloadPathTemplates": {
"title": "תבניות נתיב הורדה",
"help": "הגדר מבני תיקיות לסוגי מודלים שונים בעת הורדה מ-Civitai.",
@@ -320,8 +348,8 @@
"byFirstTag": "לפי תגית ראשונה",
"baseModelFirstTag": "מודל בסיס + תגית ראשונה",
"baseModelAuthor": "מודל בסיס + יוצר",
"baseModelAuthorFirstTag": "מודל בסיס + יוצר + תגית ראשונה",
"authorFirstTag": "יוצר + תגית ראשונה",
"baseModelAuthorFirstTag": "מודל בסיס + יוצר + תגית ראשונה",
"customTemplate": "תבנית מותאמת אישית"
},
"customTemplatePlaceholder": "הזן תבנית מותאמת אישית (למשל, {base_model}/{author}/{first_tag})",
@@ -409,26 +437,6 @@
"proxyPassword": "סיסמה (אופציונלי)",
"proxyPasswordPlaceholder": "password",
"proxyPasswordHelp": "סיסמה לאימות מול הפרוקסי (אם נדרש)"
},
"priorityTags": {
"title": "תגיות עדיפות",
"description": "התאם את סדר העדיפות של התגיות עבור כל סוג מודל (לדוגמה: character, concept, style(toon|toon_style))",
"placeholder": "character, concept, style(toon|toon_style)",
"helpLinkLabel": "פתח עזרה בנושא תגיות עדיפות",
"modelTypes": {
"lora": "LoRA",
"checkpoint": "Checkpoint",
"embedding": "Embedding"
},
"saveSuccess": "תגיות העדיפות עודכנו.",
"saveError": "עדכון תגיות העדיפות נכשל.",
"loadingSuggestions": "טוען הצעות...",
"validation": {
"missingClosingParen": "לרשומה {index} חסר סוגר סוגריים.",
"missingCanonical": "על הרשומה {index} לכלול שם תגית קנונית.",
"duplicateCanonical": "התגית הקנונית \"{tag}\" מופיעה יותר מפעם אחת.",
"unknown": "תצורת תגיות העדיפות שגויה."
}
}
},
"loras": {
@@ -521,6 +529,7 @@
"replacePreview": "החלף תצוגה מקדימה",
"setContentRating": "הגדר דירוג תוכן",
"moveToFolder": "העבר לתיקייה",
"repairMetadata": "[TODO: Translate] Repair metadata",
"excludeModel": "החרג מודל",
"deleteModel": "מחק מודל",
"shareRecipe": "שתף מתכון",
@@ -591,10 +600,26 @@
"selectLoraRoot": "אנא בחר ספריית שורש של LoRA"
}
},
"sort": {
"title": "מיון מתכונים לפי...",
"name": "שם",
"nameAsc": "א - ת",
"nameDesc": "ת - א",
"date": "תאריך",
"dateDesc": "הכי חדש",
"dateAsc": "הכי ישן",
"lorasCount": "מספר LoRAs",
"lorasCountDesc": "הכי הרבה",
"lorasCountAsc": "הכי פחות"
},
"refresh": {
"title": "רענן רשימת מתכונים"
},
"filteredByLora": "מסונן לפי LoRA"
"filteredByLora": "מסונן לפי LoRA",
"favorites": {
"title": "הצג מועדפים בלבד",
"action": "מועדפים"
}
},
"duplicates": {
"found": "נמצאו {count} קבוצות כפולות",
@@ -620,6 +645,13 @@
"noMissingLoras": "אין LoRAs חסרים להורדה",
"getInfoFailed": "קבלת מידע עבור LoRAs חסרים נכשלה",
"prepareError": "שגיאה בהכנת LoRAs להורדה: {message}"
},
"repair": {
"starting": "[TODO: Translate] Repairing recipe metadata...",
"success": "[TODO: Translate] Recipe metadata repaired successfully",
"skipped": "[TODO: Translate] Recipe already at latest version, no repair needed",
"failed": "[TODO: Translate] Failed to repair recipe: {message}",
"missingId": "[TODO: Translate] Cannot repair recipe: Missing recipe ID"
}
}
},
@@ -641,7 +673,8 @@
"recursiveUnavailable": "חיפוש רקורסיבי זמין רק בתצוגת עץ",
"collapseAllDisabled": "לא זמין בתצוגת רשימה",
"dragDrop": {
"unableToResolveRoot": "לא ניתן לקבוע את נתיב היעד להעברה."
"unableToResolveRoot": "לא ניתן לקבוע את נתיב היעד להעברה.",
"moveUnsupported": "Move is not supported for this item."
}
},
"statistics": {
@@ -1463,7 +1496,8 @@
"bulkMoveFailures": "העברות שנכשלו:\n{failures}",
"bulkMoveSuccess": "הועברו בהצלחה {successCount} {type}s",
"exampleImagesDownloadSuccess": "תמונות הדוגמה הורדו בהצלחה!",
"exampleImagesDownloadFailed": "הורדת תמונות הדוגמה נכשלה: {message}"
"exampleImagesDownloadFailed": "הורדת תמונות הדוגמה נכשלה: {message}",
"moveFailed": "Failed to move item: {message}"
}
},
"banners": {

View File

@@ -159,6 +159,12 @@
"success": "Updated license metadata for {count} {typePlural}",
"none": "All {typePlural} already have license metadata",
"error": "Failed to refresh license metadata for {typePlural}: {message}"
},
"repairRecipes": {
"label": "レシピデータの修復",
"loading": "レシピデータを修復中...",
"success": "{count} 件のレシピを正常に修復しました。",
"error": "レシピの修復に失敗しました: {message}"
}
},
"header": {
@@ -188,7 +194,8 @@
"creator": "作成者",
"title": "レシピタイトル",
"loraName": "LoRAファイル名",
"loraModel": "LoRAモデル名"
"loraModel": "LoRAモデル名",
"prompt": "プロンプト"
}
},
"filter": {
@@ -199,6 +206,7 @@
"license": "ライセンス",
"noCreditRequired": "クレジット不要",
"allowSellingGeneratedContent": "販売許可",
"noTags": "タグなし",
"clearAll": "すべてのフィルタをクリア"
},
"theme": {
@@ -521,6 +529,7 @@
"replacePreview": "プレビューを置換",
"setContentRating": "コンテンツレーティングを設定",
"moveToFolder": "フォルダに移動",
"repairMetadata": "[TODO: Translate] Repair metadata",
"excludeModel": "モデルを除外",
"deleteModel": "モデルを削除",
"shareRecipe": "レシピを共有",
@@ -591,10 +600,26 @@
"selectLoraRoot": "LoRAルートディレクトリを選択してください"
}
},
"sort": {
"title": "レシピの並び替え...",
"name": "名前",
"nameAsc": "A - Z",
"nameDesc": "Z - A",
"date": "日付",
"dateDesc": "新しい順",
"dateAsc": "古い順",
"lorasCount": "LoRA数",
"lorasCountDesc": "多い順",
"lorasCountAsc": "少ない順"
},
"refresh": {
"title": "レシピリストを更新"
},
"filteredByLora": "LoRAでフィルタ済み"
"filteredByLora": "LoRAでフィルタ済み",
"favorites": {
"title": "お気に入りのみ表示",
"action": "お気に入り"
}
},
"duplicates": {
"found": "{count} 個の重複グループが見つかりました",
@@ -620,6 +645,13 @@
"noMissingLoras": "ダウンロードする不足LoRAがありません",
"getInfoFailed": "不足LoRAの情報取得に失敗しました",
"prepareError": "ダウンロード用LoRAの準備中にエラー{message}"
},
"repair": {
"starting": "[TODO: Translate] Repairing recipe metadata...",
"success": "[TODO: Translate] Recipe metadata repaired successfully",
"skipped": "[TODO: Translate] Recipe already at latest version, no repair needed",
"failed": "[TODO: Translate] Failed to repair recipe: {message}",
"missingId": "[TODO: Translate] Cannot repair recipe: Missing recipe ID"
}
}
},
@@ -641,7 +673,8 @@
"recursiveUnavailable": "再帰検索はツリービューでのみ利用できます",
"collapseAllDisabled": "リストビューでは利用できません",
"dragDrop": {
"unableToResolveRoot": "移動先のパスを特定できません。"
"unableToResolveRoot": "移動先のパスを特定できません。",
"moveUnsupported": "Move is not supported for this item."
}
},
"statistics": {
@@ -1463,7 +1496,8 @@
"bulkMoveFailures": "失敗した移動:\n{failures}",
"bulkMoveSuccess": "{successCount} {type}が正常に移動されました",
"exampleImagesDownloadSuccess": "例画像が正常にダウンロードされました!",
"exampleImagesDownloadFailed": "例画像のダウンロードに失敗しました:{message}"
"exampleImagesDownloadFailed": "例画像のダウンロードに失敗しました:{message}",
"moveFailed": "Failed to move item: {message}"
}
},
"banners": {

View File

@@ -159,6 +159,12 @@
"success": "Updated license metadata for {count} {typePlural}",
"none": "All {typePlural} already have license metadata",
"error": "Failed to refresh license metadata for {typePlural}: {message}"
},
"repairRecipes": {
"label": "레시피 데이터 복구",
"loading": "레시피 데이터 복구 중...",
"success": "{count}개의 레시피가 성공적으로 복구되었습니다.",
"error": "레시피 복구 실패: {message}"
}
},
"header": {
@@ -188,7 +194,8 @@
"creator": "제작자",
"title": "레시피 제목",
"loraName": "LoRA 파일명",
"loraModel": "LoRA 모델명"
"loraModel": "LoRA 모델명",
"prompt": "프롬프트"
}
},
"filter": {
@@ -199,6 +206,7 @@
"license": "라이선스",
"noCreditRequired": "크레딧 표기 없음",
"allowSellingGeneratedContent": "판매 허용",
"noTags": "태그 없음",
"clearAll": "모든 필터 지우기"
},
"theme": {
@@ -521,6 +529,7 @@
"replacePreview": "미리보기 교체",
"setContentRating": "콘텐츠 등급 설정",
"moveToFolder": "폴더로 이동",
"repairMetadata": "[TODO: Translate] Repair metadata",
"excludeModel": "모델 제외",
"deleteModel": "모델 삭제",
"shareRecipe": "레시피 공유",
@@ -591,10 +600,26 @@
"selectLoraRoot": "LoRA 루트 디렉토리를 선택해주세요"
}
},
"sort": {
"title": "레시피 정렬...",
"name": "이름",
"nameAsc": "A - Z",
"nameDesc": "Z - A",
"date": "날짜",
"dateDesc": "최신순",
"dateAsc": "오래된순",
"lorasCount": "LoRA 수",
"lorasCountDesc": "많은순",
"lorasCountAsc": "적은순"
},
"refresh": {
"title": "레시피 목록 새로고침"
},
"filteredByLora": "LoRA로 필터링됨"
"filteredByLora": "LoRA로 필터링됨",
"favorites": {
"title": "즐겨찾기만 표시",
"action": "즐겨찾기"
}
},
"duplicates": {
"found": "{count}개의 중복 그룹 발견",
@@ -620,6 +645,13 @@
"noMissingLoras": "다운로드할 누락된 LoRA가 없습니다",
"getInfoFailed": "누락된 LoRA 정보를 가져오는데 실패했습니다",
"prepareError": "LoRA 다운로드 준비 중 오류: {message}"
},
"repair": {
"starting": "[TODO: Translate] Repairing recipe metadata...",
"success": "[TODO: Translate] Recipe metadata repaired successfully",
"skipped": "[TODO: Translate] Recipe already at latest version, no repair needed",
"failed": "[TODO: Translate] Failed to repair recipe: {message}",
"missingId": "[TODO: Translate] Cannot repair recipe: Missing recipe ID"
}
}
},
@@ -641,7 +673,8 @@
"recursiveUnavailable": "재귀 검색은 트리 보기에서만 사용할 수 있습니다",
"collapseAllDisabled": "목록 보기에서는 사용할 수 없습니다",
"dragDrop": {
"unableToResolveRoot": "이동할 대상 경로를 확인할 수 없습니다."
"unableToResolveRoot": "이동할 대상 경로를 확인할 수 없습니다.",
"moveUnsupported": "Move is not supported for this item."
}
},
"statistics": {
@@ -1463,7 +1496,8 @@
"bulkMoveFailures": "실패한 이동:\n{failures}",
"bulkMoveSuccess": "{successCount}개 {type}이(가) 성공적으로 이동되었습니다",
"exampleImagesDownloadSuccess": "예시 이미지가 성공적으로 다운로드되었습니다!",
"exampleImagesDownloadFailed": "예시 이미지 다운로드 실패: {message}"
"exampleImagesDownloadFailed": "예시 이미지 다운로드 실패: {message}",
"moveFailed": "Failed to move item: {message}"
}
},
"banners": {

View File

@@ -159,6 +159,12 @@
"success": "Updated license metadata for {count} {typePlural}",
"none": "All {typePlural} already have license metadata",
"error": "Failed to refresh license metadata for {typePlural}: {message}"
},
"repairRecipes": {
"label": "Восстановить данные рецептов",
"loading": "Восстановление данных рецептов...",
"success": "Успешно восстановлено {count} рецептов.",
"error": "Ошибка восстановления рецептов: {message}"
}
},
"header": {
@@ -188,7 +194,8 @@
"creator": "Автор",
"title": "Название рецепта",
"loraName": "Имя файла LoRA",
"loraModel": "Название модели LoRA"
"loraModel": "Название модели LoRA",
"prompt": "Запрос"
}
},
"filter": {
@@ -199,6 +206,7 @@
"license": "Лицензия",
"noCreditRequired": "Без указания авторства",
"allowSellingGeneratedContent": "Продажа разрешена",
"noTags": "Без тегов",
"clearAll": "Очистить все фильтры"
},
"theme": {
@@ -521,6 +529,7 @@
"replacePreview": "Заменить превью",
"setContentRating": "Установить рейтинг контента",
"moveToFolder": "Переместить в папку",
"repairMetadata": "[TODO: Translate] Repair metadata",
"excludeModel": "Исключить модель",
"deleteModel": "Удалить модель",
"shareRecipe": "Поделиться рецептом",
@@ -591,10 +600,26 @@
"selectLoraRoot": "Пожалуйста, выберите корневую папку LoRA"
}
},
"sort": {
"title": "Сортировка рецептов...",
"name": "Имя",
"nameAsc": "А - Я",
"nameDesc": "Я - А",
"date": "Дата",
"dateDesc": "Сначала новые",
"dateAsc": "Сначала старые",
"lorasCount": "Кол-во LoRA",
"lorasCountDesc": "Больше всего",
"lorasCountAsc": "Меньше всего"
},
"refresh": {
"title": "Обновить список рецептов"
},
"filteredByLora": "Фильтр по LoRA"
"filteredByLora": "Фильтр по LoRA",
"favorites": {
"title": "Только избранные",
"action": "Избранное"
}
},
"duplicates": {
"found": "Найдено {count} групп дубликатов",
@@ -620,6 +645,13 @@
"noMissingLoras": "Нет отсутствующих LoRAs для загрузки",
"getInfoFailed": "Не удалось получить информацию для отсутствующих LoRAs",
"prepareError": "Ошибка подготовки LoRAs для загрузки: {message}"
},
"repair": {
"starting": "[TODO: Translate] Repairing recipe metadata...",
"success": "[TODO: Translate] Recipe metadata repaired successfully",
"skipped": "[TODO: Translate] Recipe already at latest version, no repair needed",
"failed": "[TODO: Translate] Failed to repair recipe: {message}",
"missingId": "[TODO: Translate] Cannot repair recipe: Missing recipe ID"
}
}
},
@@ -641,7 +673,8 @@
"recursiveUnavailable": "Рекурсивный поиск доступен только в режиме дерева",
"collapseAllDisabled": "Недоступно в виде списка",
"dragDrop": {
"unableToResolveRoot": "Не удалось определить путь назначения для перемещения."
"unableToResolveRoot": "Не удалось определить путь назначения для перемещения.",
"moveUnsupported": "Move is not supported for this item."
}
},
"statistics": {
@@ -1463,7 +1496,8 @@
"bulkMoveFailures": "Неудачные перемещения:\n{failures}",
"bulkMoveSuccess": "Успешно перемещено {successCount} {type}s",
"exampleImagesDownloadSuccess": "Примеры изображений успешно загружены!",
"exampleImagesDownloadFailed": "Не удалось загрузить примеры изображений: {message}"
"exampleImagesDownloadFailed": "Не удалось загрузить примеры изображений: {message}",
"moveFailed": "Failed to move item: {message}"
}
},
"banners": {

View File

@@ -159,6 +159,12 @@
"success": "Updated license metadata for {count} {typePlural}",
"none": "All {typePlural} already have license metadata",
"error": "Failed to refresh license metadata for {typePlural}: {message}"
},
"repairRecipes": {
"label": "修复配方数据",
"loading": "正在修复配方数据...",
"success": "成功修复了 {count} 个配方。",
"error": "配方修复失败:{message}"
}
},
"header": {
@@ -188,7 +194,8 @@
"creator": "创作者",
"title": "配方标题",
"loraName": "LoRA 文件名",
"loraModel": "LoRA 模型名称"
"loraModel": "LoRA 模型名称",
"prompt": "提示词"
}
},
"filter": {
@@ -199,6 +206,7 @@
"license": "许可证",
"noCreditRequired": "无需署名",
"allowSellingGeneratedContent": "允许销售",
"noTags": "无标签",
"clearAll": "清除所有筛选"
},
"theme": {
@@ -521,6 +529,7 @@
"replacePreview": "替换预览",
"setContentRating": "设置内容评级",
"moveToFolder": "移动到文件夹",
"repairMetadata": "[TODO: Translate] Repair metadata",
"excludeModel": "排除模型",
"deleteModel": "删除模型",
"shareRecipe": "分享配方",
@@ -591,10 +600,26 @@
"selectLoraRoot": "请选择 LoRA 根目录"
}
},
"sort": {
"title": "配方排序...",
"name": "名称",
"nameAsc": "A - Z",
"nameDesc": "Z - A",
"date": "时间",
"dateDesc": "最新",
"dateAsc": "最早",
"lorasCount": "LoRA 数量",
"lorasCountDesc": "最多",
"lorasCountAsc": "最少"
},
"refresh": {
"title": "刷新配方列表"
},
"filteredByLora": "按 LoRA 筛选"
"filteredByLora": "按 LoRA 筛选",
"favorites": {
"title": "仅显示收藏",
"action": "收藏"
}
},
"duplicates": {
"found": "发现 {count} 个重复组",
@@ -620,6 +645,13 @@
"noMissingLoras": "没有缺失的 LoRA 可下载",
"getInfoFailed": "获取缺失 LoRA 信息失败",
"prepareError": "准备下载 LoRA 时出错:{message}"
},
"repair": {
"starting": "[TODO: Translate] Repairing recipe metadata...",
"success": "[TODO: Translate] Recipe metadata repaired successfully",
"skipped": "[TODO: Translate] Recipe already at latest version, no repair needed",
"failed": "[TODO: Translate] Failed to repair recipe: {message}",
"missingId": "[TODO: Translate] Cannot repair recipe: Missing recipe ID"
}
}
},
@@ -641,7 +673,8 @@
"recursiveUnavailable": "仅在树形视图中可使用递归搜索",
"collapseAllDisabled": "列表视图下不可用",
"dragDrop": {
"unableToResolveRoot": "无法确定移动的目标路径。"
"unableToResolveRoot": "无法确定移动的目标路径。",
"moveUnsupported": "Move is not supported for this item."
}
},
"statistics": {
@@ -1463,7 +1496,8 @@
"bulkMoveFailures": "移动失败:\n{failures}",
"bulkMoveSuccess": "成功移动 {successCount} 个 {type}",
"exampleImagesDownloadSuccess": "示例图片下载成功!",
"exampleImagesDownloadFailed": "示例图片下载失败:{message}"
"exampleImagesDownloadFailed": "示例图片下载失败:{message}",
"moveFailed": "Failed to move item: {message}"
}
},
"banners": {

View File

@@ -154,11 +154,17 @@
"error": "清理範例圖片資料夾失敗:{message}"
},
"fetchMissingLicenses": {
"label": "Refresh license metadata",
"loading": "Refreshing license metadata for {typePlural}...",
"success": "Updated license metadata for {count} {typePlural}",
"none": "All {typePlural} already have license metadata",
"error": "Failed to refresh license metadata for {typePlural}: {message}"
"label": "重新整理授權中繼資料",
"loading": "正在重新整理 {typePlural} 的授權中繼資料...",
"success": "已更新 {count} {typePlural} 的授權中繼資料",
"none": "所有 {typePlural} 已具備授權中繼資料",
"error": "重新整理 {typePlural} 授權中繼資料失敗:{message}"
},
"repairRecipes": {
"label": "修復配方資料",
"loading": "正在修復配方資料...",
"success": "成功修復 {count} 個配方。",
"error": "配方修復失敗:{message}"
}
},
"header": {
@@ -188,7 +194,8 @@
"creator": "創作者",
"title": "配方標題",
"loraName": "LoRA 檔案名稱",
"loraModel": "LoRA 模型名稱"
"loraModel": "LoRA 模型名稱",
"prompt": "提示詞"
}
},
"filter": {
@@ -199,6 +206,7 @@
"license": "授權",
"noCreditRequired": "無需署名",
"allowSellingGeneratedContent": "允許銷售",
"noTags": "無標籤",
"clearAll": "清除所有篩選"
},
"theme": {
@@ -521,6 +529,7 @@
"replacePreview": "更換預覽圖",
"setContentRating": "設定內容分級",
"moveToFolder": "移動到資料夾",
"repairMetadata": "[TODO: Translate] Repair metadata",
"excludeModel": "排除模型",
"deleteModel": "刪除模型",
"shareRecipe": "分享配方",
@@ -591,10 +600,26 @@
"selectLoraRoot": "請選擇 LoRA 根目錄"
}
},
"sort": {
"title": "配方排序...",
"name": "名稱",
"nameAsc": "A - Z",
"nameDesc": "Z - A",
"date": "時間",
"dateDesc": "最新",
"dateAsc": "最舊",
"lorasCount": "LoRA 數量",
"lorasCountDesc": "最多",
"lorasCountAsc": "最少"
},
"refresh": {
"title": "重新整理配方列表"
},
"filteredByLora": "已依 LoRA 篩選"
"filteredByLora": "已依 LoRA 篩選",
"favorites": {
"title": "僅顯示收藏",
"action": "收藏"
}
},
"duplicates": {
"found": "發現 {count} 組重複項",
@@ -620,6 +645,13 @@
"noMissingLoras": "無缺少的 LoRA 可下載",
"getInfoFailed": "取得缺少 LoRA 資訊失敗",
"prepareError": "準備下載 LoRA 時發生錯誤:{message}"
},
"repair": {
"starting": "[TODO: Translate] Repairing recipe metadata...",
"success": "[TODO: Translate] Recipe metadata repaired successfully",
"skipped": "[TODO: Translate] Recipe already at latest version, no repair needed",
"failed": "[TODO: Translate] Failed to repair recipe: {message}",
"missingId": "[TODO: Translate] Cannot repair recipe: Missing recipe ID"
}
}
},
@@ -641,7 +673,8 @@
"recursiveUnavailable": "遞迴搜尋僅能在樹狀檢視中使用",
"collapseAllDisabled": "列表檢視下不可用",
"dragDrop": {
"unableToResolveRoot": "無法確定移動的目標路徑。"
"unableToResolveRoot": "無法確定移動的目標路徑。",
"moveUnsupported": "Move is not supported for this item."
}
},
"statistics": {
@@ -1463,7 +1496,8 @@
"bulkMoveFailures": "移動失敗:\n{failures}",
"bulkMoveSuccess": "已成功移動 {successCount} 個 {type}",
"exampleImagesDownloadSuccess": "範例圖片下載成功!",
"exampleImagesDownloadFailed": "下載範例圖片失敗:{message}"
"exampleImagesDownloadFailed": "下載範例圖片失敗:{message}",
"moveFailed": "Failed to move item: {message}"
}
},
"banners": {

View File

@@ -1,11 +1,13 @@
import os
import platform
import threading
from pathlib import Path
import folder_paths # type: ignore
from typing import Any, Dict, Iterable, List, Mapping, Optional, Set
from typing import Any, Dict, Iterable, List, Mapping, Optional, Set, Tuple
import logging
import json
import urllib.parse
import time
from .utils.settings_paths import ensure_settings_file, get_settings_dir, load_settings_template
@@ -80,6 +82,8 @@ class Config:
self._path_mappings: Dict[str, str] = {}
# Normalized preview root directories used to validate preview access
self._preview_root_paths: Set[Path] = set()
# Optional background rescan thread
self._rescan_thread: Optional[threading.Thread] = None
self.loras_roots = self._init_lora_paths()
self.checkpoints_roots = None
self.unet_roots = None
@@ -282,56 +286,23 @@ class Config:
def _load_symlink_cache(self) -> bool:
cache_path = self._get_symlink_cache_path()
if not cache_path.exists():
logger.info("Symlink cache not found at %s", cache_path)
return False
try:
with cache_path.open("r", encoding="utf-8") as handle:
payload = json.load(handle)
except Exception as exc:
logger.debug("Failed to load symlink cache %s: %s", cache_path, exc)
logger.info("Failed to load symlink cache %s: %s", cache_path, exc)
return False
if not isinstance(payload, dict):
logger.info("Symlink cache payload is not a dict: %s", type(payload))
return False
cached_fingerprint = payload.get("fingerprint")
cached_mappings = payload.get("path_mappings")
if not isinstance(cached_fingerprint, dict) or not isinstance(cached_mappings, Mapping):
return False
current_fingerprint = self._build_symlink_fingerprint()
cached_roots = cached_fingerprint.get("roots")
cached_stats = cached_fingerprint.get("stats")
if (
not isinstance(cached_roots, list)
or not isinstance(cached_stats, Mapping)
or sorted(cached_roots) != sorted(current_fingerprint["roots"]) # type: ignore[index]
):
return False
for root in current_fingerprint["roots"]: # type: ignore[assignment]
cached_stat = cached_stats.get(root) if isinstance(cached_stats, Mapping) else None
current_stat = current_fingerprint["stats"].get(root) # type: ignore[index]
if not isinstance(cached_stat, Mapping) or not current_stat:
return False
cached_mtime = cached_stat.get("mtime_ns")
cached_inode = cached_stat.get("inode")
current_mtime = current_stat.get("mtime_ns")
current_inode = current_stat.get("inode")
if cached_inode != current_inode:
return False
if cached_mtime != current_mtime:
cached_noise = cached_stat.get("noise_mtime_ns")
current_noise = current_stat.get("noise_mtime_ns")
if not (
cached_noise
and current_noise
and cached_mtime == cached_noise
and current_mtime == current_noise
):
if not isinstance(cached_mappings, Mapping):
logger.info("Symlink cache missing path mappings")
return False
normalized_mappings: Dict[str, str] = {}
@@ -341,6 +312,7 @@ class Config:
normalized_mappings[self._normalize_path(target)] = self._normalize_path(link)
self._path_mappings = normalized_mappings
logger.info("Symlink cache loaded with %d mappings", len(self._path_mappings))
return True
def _save_symlink_cache(self) -> None:
@@ -353,22 +325,75 @@ class Config:
try:
with cache_path.open("w", encoding="utf-8") as handle:
json.dump(payload, handle, ensure_ascii=False, indent=2)
logger.info("Symlink cache saved to %s with %d mappings", cache_path, len(self._path_mappings))
except Exception as exc:
logger.debug("Failed to write symlink cache %s: %s", cache_path, exc)
logger.info("Failed to write symlink cache %s: %s", cache_path, exc)
def _initialize_symlink_mappings(self) -> None:
if not self._load_symlink_cache():
start = time.perf_counter()
cache_loaded = self._load_symlink_cache()
if cache_loaded:
logger.info(
"Symlink mappings restored from cache in %.2f ms",
(time.perf_counter() - start) * 1000,
)
self._rebuild_preview_roots()
self._schedule_symlink_rescan()
return
self._scan_symbolic_links()
self._save_symlink_cache()
else:
logger.info("Loaded symlink mappings from cache")
self._rebuild_preview_roots()
logger.info(
"Symlink mappings rebuilt and cached in %.2f ms",
(time.perf_counter() - start) * 1000,
)
def _scan_symbolic_links(self):
"""Scan all symbolic links in LoRA, Checkpoint, and Embedding root directories"""
start = time.perf_counter()
# Reset mappings before rescanning to avoid stale entries
self._path_mappings.clear()
self._seed_root_symlink_mappings()
visited_dirs: Set[str] = set()
for root in self._symlink_roots():
self._scan_directory_links(root, visited_dirs)
logger.info(
"Symlink scan finished in %.2f ms with %d mappings",
(time.perf_counter() - start) * 1000,
len(self._path_mappings),
)
def _schedule_symlink_rescan(self) -> None:
"""Trigger a best-effort background rescan to refresh stale caches."""
if self._rescan_thread and self._rescan_thread.is_alive():
return
def worker():
try:
self._scan_symbolic_links()
self._save_symlink_cache()
self._rebuild_preview_roots()
logger.info("Background symlink rescan completed")
except Exception as exc: # pragma: no cover - defensive logging
logger.info("Background symlink rescan failed: %s", exc)
thread = threading.Thread(
target=worker,
name="lora-manager-symlink-rescan",
daemon=True,
)
self._rescan_thread = thread
thread.start()
def _wait_for_rescan(self, timeout: Optional[float] = None) -> None:
"""Block until the background rescan completes (testing convenience)."""
thread = self._rescan_thread
if thread:
thread.join(timeout=timeout)
def _scan_directory_links(self, root: str, visited_dirs: Set[str]):
"""Iteratively scan directory symlinks to avoid deep recursion."""
@@ -434,6 +459,22 @@ class Config:
self._preview_root_paths.update(self._expand_preview_root(normalized_target))
self._preview_root_paths.update(self._expand_preview_root(normalized_link))
def _seed_root_symlink_mappings(self) -> None:
"""Ensure symlinked root folders are recorded before deep scanning."""
for root in self._symlink_roots():
if not root:
continue
try:
if not self._is_link(root):
continue
target_path = os.path.realpath(root)
if not os.path.isdir(target_path):
continue
self.add_path_mapping(root, target_path)
except Exception as exc:
logger.debug("Skipping root symlink %s: %s", root, exc)
def _expand_preview_root(self, path: str) -> Set[Path]:
"""Return normalized ``Path`` objects representing a preview root."""

View File

@@ -40,7 +40,38 @@ class MetadataProcessor:
candidate_samplers[node_id] = metadata[SAMPLING][node_id]
# If we found candidate samplers, apply primary sampler logic to these candidates only
if candidate_samplers:
# PRE-PROCESS: Ensure all candidate samplers have their parameters populated
# This is especially important for SamplerCustomAdvanced which needs tracing
prompt = metadata.get("current_prompt")
for node_id in candidate_samplers:
# If a sampler is missing common parameters like steps or denoise,
# try to populate them using tracing before ranking
sampler_info = candidate_samplers[node_id]
params = sampler_info.get("parameters", {})
if prompt and (params.get("steps") is None or params.get("denoise") is None):
# Create a temporary params dict to use the handler
temp_params = {
"steps": params.get("steps"),
"denoise": params.get("denoise"),
"sampler": params.get("sampler_name"),
"scheduler": params.get("scheduler")
}
# Check if it's SamplerCustomAdvanced
if prompt.original_prompt and node_id in prompt.original_prompt:
if prompt.original_prompt[node_id].get("class_type") == "SamplerCustomAdvanced":
MetadataProcessor.handle_custom_advanced_sampler(metadata, prompt, node_id, temp_params)
# Update the actual parameters with found values
params["steps"] = temp_params.get("steps")
params["denoise"] = temp_params.get("denoise")
if temp_params.get("sampler"):
params["sampler_name"] = temp_params.get("sampler")
if temp_params.get("scheduler"):
params["scheduler"] = temp_params.get("scheduler")
# Collect potential primary samplers based on different criteria
custom_advanced_samplers = []
advanced_add_noise_samplers = []
@@ -49,7 +80,6 @@ class MetadataProcessor:
high_denoise_id = None
# First, check for SamplerCustomAdvanced among candidates
prompt = metadata.get("current_prompt")
if prompt and prompt.original_prompt:
for node_id in candidate_samplers:
node_info = prompt.original_prompt.get(node_id, {})
@@ -77,15 +107,16 @@ class MetadataProcessor:
# Combine all potential primary samplers
potential_samplers = custom_advanced_samplers + advanced_add_noise_samplers + high_denoise_samplers
# Find the most recent potential primary sampler (closest to downstream node)
for i in range(downstream_index - 1, -1, -1):
# Find the first potential primary sampler (prefer base sampler over refine)
# Use forward search to prioritize the first one in execution order
for i in range(downstream_index):
node_id = execution_order[i]
if node_id in potential_samplers:
return node_id, candidate_samplers[node_id]
# If no potential sampler found from our criteria, return the most recent sampler
# If no potential sampler found from our criteria, return the first sampler
if candidate_samplers:
for i in range(downstream_index - 1, -1, -1):
for i in range(downstream_index):
node_id = execution_order[i]
if node_id in candidate_samplers:
return node_id, candidate_samplers[node_id]
@@ -176,7 +207,10 @@ class MetadataProcessor:
found_node_id = input_value[0] # Connected node_id
# If we're looking for a specific node class
if target_class and prompt.original_prompt[found_node_id].get("class_type") == target_class:
if target_class:
if found_node_id not in prompt.original_prompt:
return None
if prompt.original_prompt[found_node_id].get("class_type") == target_class:
return found_node_id
# If we're not looking for a specific class, update the last valid node
@@ -185,11 +219,19 @@ class MetadataProcessor:
# Continue tracing through intermediate nodes
current_node_id = found_node_id
# For most conditioning nodes, the input we want to follow is named "conditioning"
if "conditioning" in prompt.original_prompt[current_node_id].get("inputs", {}):
# Check if current source node exists
if current_node_id not in prompt.original_prompt:
return found_node_id if not target_class else None
# Determine which input to follow next on the source node
source_node_inputs = prompt.original_prompt[current_node_id].get("inputs", {})
if input_name in source_node_inputs:
current_input = input_name
elif "conditioning" in source_node_inputs:
current_input = "conditioning"
else:
# If there's no "conditioning" input, return the current node
# If there's no suitable input to follow, return the current node
# if we're not looking for a specific target_class
return found_node_id if not target_class else None
else:
@@ -202,12 +244,89 @@ class MetadataProcessor:
return last_valid_node if not target_class else None
@staticmethod
def find_primary_checkpoint(metadata):
"""Find the primary checkpoint model in the workflow"""
def trace_model_path(metadata, prompt, start_node_id):
"""
Trace the model connection path upstream to find the checkpoint
"""
if not prompt or not prompt.original_prompt:
return None
current_node_id = start_node_id
depth = 0
max_depth = 50
while depth < max_depth:
# Check if current node is a registered checkpoint in our metadata
# This handles cached nodes correctly because metadata contains info for all nodes in the graph
if current_node_id in metadata.get(MODELS, {}):
if metadata[MODELS][current_node_id].get("type") == "checkpoint":
return current_node_id
if current_node_id not in prompt.original_prompt:
return None
node = prompt.original_prompt[current_node_id]
inputs = node.get("inputs", {})
class_type = node.get("class_type", "")
# Determine which input to follow next
next_input_name = "model"
# Special handling for initial node
if depth == 0:
if class_type == "SamplerCustomAdvanced":
next_input_name = "guider"
# If the specific input doesn't exist, try generic 'model'
if next_input_name not in inputs:
if "model" in inputs:
next_input_name = "model"
elif "basic_pipe" in inputs:
# Handle pipe nodes like FromBasicPipe by following the pipeline
next_input_name = "basic_pipe"
else:
# Dead end - no model input to follow
return None
# Get connected node
input_val = inputs[next_input_name]
if isinstance(input_val, list) and len(input_val) > 0:
current_node_id = input_val[0]
else:
return None
depth += 1
return None
@staticmethod
def find_primary_checkpoint(metadata, downstream_id=None, primary_sampler_id=None):
"""
Find the primary checkpoint model in the workflow
Parameters:
- metadata: The workflow metadata
- downstream_id: Optional ID of a downstream node to help identify the specific primary sampler
- primary_sampler_id: Optional ID of the primary sampler if already known
"""
if not metadata.get(MODELS):
return None
# In most workflows, there's only one checkpoint, so we can just take the first one
# Method 1: Topology-based tracing (More accurate for complex workflows)
# First, find the primary sampler if not provided
if not primary_sampler_id:
primary_sampler_id, _ = MetadataProcessor.find_primary_sampler(metadata, downstream_id)
if primary_sampler_id:
prompt = metadata.get("current_prompt")
if prompt:
# Trace back from the sampler to find the checkpoint
checkpoint_id = MetadataProcessor.trace_model_path(metadata, prompt, primary_sampler_id)
if checkpoint_id and checkpoint_id in metadata.get(MODELS, {}):
return metadata[MODELS][checkpoint_id].get("name")
# Method 2: Fallback to the first available checkpoint (Original behavior)
# In most simple workflows, there's only one checkpoint, so we can just take the first one
for node_id, model_info in metadata.get(MODELS, {}).items():
if model_info.get("type") == "checkpoint":
return model_info.get("name")
@@ -311,7 +430,8 @@ class MetadataProcessor:
primary_sampler_id, primary_sampler = MetadataProcessor.find_primary_sampler(metadata, id)
# Directly get checkpoint from metadata instead of tracing
checkpoint = MetadataProcessor.find_primary_checkpoint(metadata)
# Pass primary_sampler_id to avoid redundant calculation
checkpoint = MetadataProcessor.find_primary_checkpoint(metadata, id, primary_sampler_id)
if checkpoint:
params["checkpoint"] = checkpoint
@@ -445,6 +565,7 @@ class MetadataProcessor:
scheduler_params = metadata[SAMPLING][scheduler_node_id].get("parameters", {})
params["steps"] = scheduler_params.get("steps")
params["scheduler"] = scheduler_params.get("scheduler")
params["denoise"] = scheduler_params.get("denoise")
# 2. Trace sampler input to find KSamplerSelect (only if sampler input exists)
if "sampler" in sampler_inputs:

View File

@@ -9,7 +9,7 @@ from ..metadata_collector import get_metadata
from PIL import Image, PngImagePlugin
import piexif
class SaveImage:
class SaveImageLM:
NAME = "Save Image (LoraManager)"
CATEGORY = "Lora Manager/utils"
DESCRIPTION = "Save images with embedded generation metadata in compatible format"

View File

@@ -103,7 +103,7 @@ def nunchaku_load_lora(model, lora_name, lora_strength):
# Save the transformer temporarily
model_wrapper.model = None
ret_model = copy.deepcopy(model) # copy everything except the model
ret_model = model.clone()
ret_model_wrapper = ret_model.model.diffusion_model
# Restore the model and set it for the copy

View File

@@ -37,7 +37,8 @@ class RecipeMetadataParser(ABC):
"""
pass
async def populate_lora_from_civitai(self, lora_entry: Dict[str, Any], civitai_info_tuple: Tuple[Dict[str, Any], Optional[str]],
@staticmethod
async def populate_lora_from_civitai(lora_entry: Dict[str, Any], civitai_info_tuple: Tuple[Dict[str, Any], Optional[str]],
recipe_scanner=None, base_model_counts=None, hash_value=None) -> Optional[Dict[str, Any]]:
"""
Populate a lora entry with information from Civitai API response
@@ -149,7 +150,8 @@ class RecipeMetadataParser(ABC):
return lora_entry
async def populate_checkpoint_from_civitai(self, checkpoint: Dict[str, Any], civitai_info: Dict[str, Any]) -> Dict[str, Any]:
@staticmethod
async def populate_checkpoint_from_civitai(checkpoint: Dict[str, Any], civitai_info: Dict[str, Any]) -> Dict[str, Any]:
"""
Populate checkpoint information from Civitai API response
@@ -187,6 +189,7 @@ class RecipeMetadataParser(ABC):
checkpoint['downloadUrl'] = civitai_data.get('downloadUrl', '')
checkpoint['modelId'] = civitai_data.get('modelId', checkpoint.get('modelId', 0))
checkpoint['id'] = civitai_data.get('id', 0)
if 'files' in civitai_data:
model_file = next(

216
py/recipes/enrichment.py Normal file
View File

@@ -0,0 +1,216 @@
import logging
import json
import re
import os
from typing import Any, Dict, Optional
from .merger import GenParamsMerger
from .base import RecipeMetadataParser
from ..services.metadata_service import get_default_metadata_provider
logger = logging.getLogger(__name__)
class RecipeEnricher:
"""Service to enrich recipe metadata from multiple sources (Civitai, Embedded, User)."""
@staticmethod
async def enrich_recipe(
recipe: Dict[str, Any],
civitai_client: Any,
request_params: Optional[Dict[str, Any]] = None
) -> bool:
"""
Enrich a recipe dictionary in-place with metadata from Civitai and embedded params.
Args:
recipe: The recipe dictionary to enrich. Must have 'gen_params' initialized.
civitai_client: Authenticated Civitai client instance.
request_params: (Optional) Parameters from a user request (e.g. import).
Returns:
bool: True if the recipe was modified, False otherwise.
"""
updated = False
gen_params = recipe.get("gen_params", {})
# 1. Fetch Civitai Info if available
civitai_meta = None
model_version_id = None
source_url = recipe.get("source_url") or recipe.get("source_path", "")
# Check if it's a Civitai image URL
image_id_match = re.search(r'civitai\.com/images/(\d+)', str(source_url))
if image_id_match:
image_id = image_id_match.group(1)
try:
image_info = await civitai_client.get_image_info(image_id)
if image_info:
# Handle nested meta often found in Civitai API responses
raw_meta = image_info.get("meta")
if isinstance(raw_meta, dict):
if "meta" in raw_meta and isinstance(raw_meta["meta"], dict):
civitai_meta = raw_meta["meta"]
else:
civitai_meta = raw_meta
model_version_id = image_info.get("modelVersionId")
# If not at top level, check resources in meta
if not model_version_id and civitai_meta:
resources = civitai_meta.get("civitaiResources", [])
for res in resources:
if res.get("type") == "checkpoint":
model_version_id = res.get("modelVersionId")
break
except Exception as e:
logger.warning(f"Failed to fetch Civitai image info: {e}")
# 2. Merge Parameters
# Priority: request_params > civitai_meta > embedded (existing gen_params)
new_gen_params = GenParamsMerger.merge(
request_params=request_params,
civitai_meta=civitai_meta,
embedded_metadata=gen_params
)
if new_gen_params != gen_params:
recipe["gen_params"] = new_gen_params
updated = True
# 3. Checkpoint Enrichment
# If we have a checkpoint entry, or we can find one
# Use 'id' (from Civitai version) as a marker that it's been enriched
checkpoint_entry = recipe.get("checkpoint")
has_full_checkpoint = checkpoint_entry and checkpoint_entry.get("name") and checkpoint_entry.get("id")
if not has_full_checkpoint:
# Helper to look up values in priority order
def start_lookup(keys):
for source in [request_params, civitai_meta, gen_params]:
if source:
if isinstance(keys, list):
for k in keys:
if k in source: return source[k]
else:
if keys in source: return source[keys]
return None
target_version_id = model_version_id or start_lookup("modelVersionId")
# Also check existing checkpoint entry
if not target_version_id and checkpoint_entry:
target_version_id = checkpoint_entry.get("modelVersionId") or checkpoint_entry.get("id")
# Check for version ID in resources (which might be a string in gen_params)
if not target_version_id:
# Look in all sources for "Civitai resources"
resources_val = start_lookup(["Civitai resources", "civitai_resources", "resources"])
if resources_val:
target_version_id = RecipeEnricher._extract_version_id_from_resources({"Civitai resources": resources_val})
target_hash = start_lookup(["Model hash", "checkpoint_hash", "hashes"])
if not target_hash and checkpoint_entry:
target_hash = checkpoint_entry.get("hash") or checkpoint_entry.get("model_hash")
# Look for 'Model' which sometimes is the hash or name
model_val = start_lookup("Model")
# Look for Checkpoint name fallback
checkpoint_val = checkpoint_entry.get("name") if checkpoint_entry else None
if not checkpoint_val:
checkpoint_val = start_lookup(["Checkpoint", "checkpoint"])
checkpoint_updated = await RecipeEnricher._resolve_and_populate_checkpoint(
recipe, target_version_id, target_hash, model_val, checkpoint_val
)
if checkpoint_updated:
updated = True
else:
# Checkpoint exists, no need to sync to gen_params anymore.
pass
# base_model resolution moved to _resolve_and_populate_checkpoint to support strict formatting
return updated
@staticmethod
def _extract_version_id_from_resources(gen_params: Dict[str, Any]) -> Optional[Any]:
"""Try to find modelVersionId in Civitai resources parameter."""
civitai_resources_raw = gen_params.get("Civitai resources")
if not civitai_resources_raw:
return None
resources_list = None
if isinstance(civitai_resources_raw, str):
try:
resources_list = json.loads(civitai_resources_raw)
except Exception:
pass
elif isinstance(civitai_resources_raw, list):
resources_list = civitai_resources_raw
if isinstance(resources_list, list):
for res in resources_list:
if res.get("type") == "checkpoint":
return res.get("modelVersionId")
return None
@staticmethod
async def _resolve_and_populate_checkpoint(
recipe: Dict[str, Any],
target_version_id: Optional[Any],
target_hash: Optional[str],
model_val: Optional[str],
checkpoint_val: Optional[str]
) -> bool:
"""Find checkpoint metadata and populate it in the recipe."""
metadata_provider = await get_default_metadata_provider()
civitai_info = None
if target_version_id:
civitai_info = await metadata_provider.get_model_version_info(str(target_version_id))
elif target_hash:
civitai_info = await metadata_provider.get_model_by_hash(target_hash)
else:
# Look for 'Model' which sometimes is the hash or name
if model_val and len(model_val) == 10: # Likely a short hash
civitai_info = await metadata_provider.get_model_by_hash(model_val)
if civitai_info and not (isinstance(civitai_info, tuple) and civitai_info[1] == "Model not found"):
# If we already have a partial checkpoint, use it as base
existing_cp = recipe.get("checkpoint")
if existing_cp is None:
existing_cp = {}
checkpoint_data = await RecipeMetadataParser.populate_checkpoint_from_civitai(existing_cp, civitai_info)
# 1. First, resolve base_model using full data before we format it away
current_base_model = recipe.get("base_model")
resolved_base_model = checkpoint_data.get("baseModel")
if resolved_base_model:
# Update if empty OR if it matches our generic prefix but is less specific
is_generic = not current_base_model or current_base_model.lower() in ["flux", "sdxl", "sd15"]
if is_generic and resolved_base_model != current_base_model:
recipe["base_model"] = resolved_base_model
# 2. Format according to requirements: type, modelId, modelVersionId, modelName, modelVersionName
formatted_checkpoint = {
"type": "checkpoint",
"modelId": checkpoint_data.get("modelId"),
"modelVersionId": checkpoint_data.get("id") or checkpoint_data.get("modelVersionId"),
"modelName": checkpoint_data.get("name"), # In base.py, 'name' is populated from civitai_data['model']['name']
"modelVersionName": checkpoint_data.get("version") # In base.py, 'version' is populated from civitai_data['name']
}
# Remove None values
recipe["checkpoint"] = {k: v for k, v in formatted_checkpoint.items() if v is not None}
return True
else:
# Fallback to name extraction if we don't already have one
existing_cp = recipe.get("checkpoint")
if not existing_cp or not existing_cp.get("modelName"):
cp_name = checkpoint_val
if cp_name:
recipe["checkpoint"] = {
"type": "checkpoint",
"modelName": cp_name
}
return True
return False

98
py/recipes/merger.py Normal file
View File

@@ -0,0 +1,98 @@
from typing import Any, Dict, Optional
import logging
logger = logging.getLogger(__name__)
class GenParamsMerger:
"""Utility to merge generation parameters from multiple sources with priority."""
BLACKLISTED_KEYS = {
"id", "url", "userId", "username", "createdAt", "updatedAt", "hash", "meta",
"draft", "extra", "width", "height", "process", "quantity", "workflow",
"baseModel", "resources", "disablePoi", "aspectRatio", "Created Date",
"experimental", "civitaiResources", "civitai_resources", "Civitai resources",
"modelVersionId", "modelId", "hashes", "Model", "Model hash", "checkpoint_hash",
"checkpoint", "checksum", "model_checksum"
}
NORMALIZATION_MAPPING = {
# Civitai specific
"cfgScale": "cfg_scale",
"clipSkip": "clip_skip",
"negativePrompt": "negative_prompt",
# Case variations
"Sampler": "sampler",
"Steps": "steps",
"Seed": "seed",
"Size": "size",
"Prompt": "prompt",
"Negative prompt": "negative_prompt",
"Cfg scale": "cfg_scale",
"Clip skip": "clip_skip",
"Denoising strength": "denoising_strength",
}
@staticmethod
def merge(
request_params: Optional[Dict[str, Any]] = None,
civitai_meta: Optional[Dict[str, Any]] = None,
embedded_metadata: Optional[Dict[str, Any]] = None
) -> Dict[str, Any]:
"""
Merge generation parameters from three sources.
Priority: request_params > civitai_meta > embedded_metadata
Args:
request_params: Params provided directly in the import request
civitai_meta: Params from Civitai Image API 'meta' field
embedded_metadata: Params extracted from image EXIF/embedded metadata
Returns:
Merged parameters dictionary
"""
result = {}
# 1. Start with embedded metadata (lowest priority)
if embedded_metadata:
# If it's a full recipe metadata, we use its gen_params
if "gen_params" in embedded_metadata and isinstance(embedded_metadata["gen_params"], dict):
GenParamsMerger._update_normalized(result, embedded_metadata["gen_params"])
else:
# Otherwise assume the dict itself contains gen_params
GenParamsMerger._update_normalized(result, embedded_metadata)
# 2. Layer Civitai meta (medium priority)
if civitai_meta:
GenParamsMerger._update_normalized(result, civitai_meta)
# 3. Layer request params (highest priority)
if request_params:
GenParamsMerger._update_normalized(result, request_params)
# Filter out blacklisted keys and also the original camelCase keys if they were normalized
final_result = {}
for k, v in result.items():
if k in GenParamsMerger.BLACKLISTED_KEYS:
continue
if k in GenParamsMerger.NORMALIZATION_MAPPING:
continue
final_result[k] = v
return final_result
@staticmethod
def _update_normalized(target: Dict[str, Any], source: Dict[str, Any]) -> None:
"""Update target dict with normalized keys from source."""
for k, v in source.items():
normalized_key = GenParamsMerger.NORMALIZATION_MAPPING.get(k, k)
target[normalized_key] = v
# Also keep the original key for now if it's not the same,
# so we can filter at the end or avoid losing it if it wasn't supposed to be renamed?
# Actually, if we rename it, we should probably NOT keep both in 'target'
# because we want to filter them out at the end anyway.
if normalized_key != k:
# If we are overwriting an existing snake_case key with a camelCase one's value,
# that's fine because of the priority order of calls to _update_normalized.
pass
target[k] = v

View File

@@ -36,9 +36,6 @@ class ComfyMetadataParser(RecipeMetadataParser):
# Find all LoraLoader nodes
lora_nodes = {k: v for k, v in data.items() if isinstance(v, dict) and v.get('class_type') == 'LoraLoader'}
if not lora_nodes:
return {"error": "No LoRA information found in this ComfyUI workflow", "loras": []}
# Process each LoraLoader node
for node_id, node in lora_nodes.items():
if 'inputs' not in node or 'lora_name' not in node['inputs']:

View File

@@ -79,26 +79,8 @@ class BaseRecipeRoutes:
return
app.on_startup.append(self.attach_dependencies)
app.on_startup.append(self.prewarm_cache)
self._startup_hooks_registered = True
async def prewarm_cache(self, app: web.Application | None = None) -> None:
"""Pre-load recipe and LoRA caches on startup."""
try:
await self.attach_dependencies(app)
if self.lora_scanner is not None:
await self.lora_scanner.get_cached_data()
hash_index = getattr(self.lora_scanner, "_hash_index", None)
if hash_index is not None and hasattr(hash_index, "_hash_to_path"):
_ = len(hash_index._hash_to_path)
if self.recipe_scanner is not None:
await self.recipe_scanner.get_cached_data(force_refresh=True)
except Exception as exc:
logger.error("Error pre-warming recipe cache: %s", exc, exc_info=True)
def to_route_mapping(self) -> Mapping[str, Callable]:
"""Return a mapping of handler name to coroutine for registrar binding."""

View File

@@ -5,6 +5,7 @@ import asyncio
import json
import logging
import os
import time
from dataclasses import dataclass
from typing import Any, Awaitable, Callable, Dict, Iterable, List, Mapping, Optional
@@ -61,6 +62,37 @@ class ModelPageView:
self._settings = settings_service
self._server_i18n = server_i18n
self._logger = logger
self._app_version = self._get_app_version()
def _get_app_version(self) -> str:
version = "1.0.0"
short_hash = "stable"
try:
import toml
current_file = os.path.abspath(__file__)
# Navigate up from py/routes/handlers/model_handlers.py to project root
root_dir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(current_file))))
pyproject_path = os.path.join(root_dir, 'pyproject.toml')
if os.path.exists(pyproject_path):
with open(pyproject_path, 'r', encoding='utf-8') as f:
data = toml.load(f)
version = data.get('project', {}).get('version', '1.0.0').replace('v', '')
# Try to get git info for granular cache busting
git_dir = os.path.join(root_dir, '.git')
if os.path.exists(git_dir):
try:
import git
repo = git.Repo(root_dir)
short_hash = repo.head.commit.hexsha[:7]
except Exception:
# Fallback if git is not available or not a repo
pass
except Exception as e:
self._logger.debug(f"Failed to read version info for cache busting: {e}")
return f"{version}-{short_hash}"
async def handle(self, request: web.Request) -> web.Response:
try:
@@ -96,6 +128,7 @@ class ModelPageView:
"request": request,
"folders": [],
"t": self._server_i18n.get_translation,
"version": self._app_version,
}
if not is_initializing:
@@ -128,9 +161,12 @@ class ModelListingHandler:
self._logger = logger
async def get_models(self, request: web.Request) -> web.Response:
start_time = time.perf_counter()
try:
params = self._parse_common_params(request)
result = await self._service.get_paginated_data(**params)
format_start = time.perf_counter()
formatted_result = {
"items": [await self._service.format_response(item) for item in result["items"]],
"total": result["total"],
@@ -138,6 +174,13 @@ class ModelListingHandler:
"page_size": result["page_size"],
"total_pages": result["total_pages"],
}
format_duration = time.perf_counter() - format_start
duration = time.perf_counter() - start_time
self._logger.info(
"Request for %s/list took %.3fs (formatting: %.3fs)",
self._service.model_type, duration, format_duration
)
return web.json_response(formatted_result)
except Exception as exc:
self._logger.error("Error retrieving %ss: %s", self._service.model_type, exc, exc_info=True)

View File

@@ -5,6 +5,7 @@ import json
import logging
import os
import re
import asyncio
import tempfile
from dataclasses import dataclass
from typing import Any, Awaitable, Callable, Dict, List, Mapping, Optional
@@ -23,6 +24,11 @@ from ...services.recipes import (
RecipeValidationError,
)
from ...services.metadata_service import get_default_metadata_provider
from ...utils.civitai_utils import rewrite_preview_url
from ...utils.exif_utils import ExifUtils
from ...recipes.merger import GenParamsMerger
from ...recipes.enrichment import RecipeEnricher
from ...services.websocket_manager import ws_manager as default_ws_manager
Logger = logging.Logger
EnsureDependenciesCallable = Callable[[], Awaitable[None]]
@@ -55,16 +61,25 @@ class RecipeHandlerSet:
"delete_recipe": self.management.delete_recipe,
"get_top_tags": self.query.get_top_tags,
"get_base_models": self.query.get_base_models,
"get_roots": self.query.get_roots,
"get_folders": self.query.get_folders,
"get_folder_tree": self.query.get_folder_tree,
"get_unified_folder_tree": self.query.get_unified_folder_tree,
"share_recipe": self.sharing.share_recipe,
"download_shared_recipe": self.sharing.download_shared_recipe,
"get_recipe_syntax": self.query.get_recipe_syntax,
"update_recipe": self.management.update_recipe,
"reconnect_lora": self.management.reconnect_lora,
"find_duplicates": self.query.find_duplicates,
"move_recipes_bulk": self.management.move_recipes_bulk,
"bulk_delete": self.management.bulk_delete,
"save_recipe_from_widget": self.management.save_recipe_from_widget,
"get_recipes_for_lora": self.query.get_recipes_for_lora,
"scan_recipes": self.query.scan_recipes,
"move_recipe": self.management.move_recipe,
"repair_recipes": self.management.repair_recipes,
"repair_recipe": self.management.repair_recipe,
"get_repair_progress": self.management.get_repair_progress,
}
@@ -148,12 +163,15 @@ class RecipeListingHandler:
page_size = int(request.query.get("page_size", "20"))
sort_by = request.query.get("sort_by", "date")
search = request.query.get("search")
folder = request.query.get("folder")
recursive = request.query.get("recursive", "true").lower() == "true"
search_options = {
"title": request.query.get("search_title", "true").lower() == "true",
"tags": request.query.get("search_tags", "true").lower() == "true",
"lora_name": request.query.get("search_lora_name", "true").lower() == "true",
"lora_model": request.query.get("search_lora_model", "true").lower() == "true",
"prompt": request.query.get("search_prompt", "true").lower() == "true",
}
filters: Dict[str, Any] = {}
@@ -161,6 +179,9 @@ class RecipeListingHandler:
if base_models:
filters["base_model"] = base_models.split(",")
if request.query.get("favorite", "false").lower() == "true":
filters["favorite"] = True
tag_filters: Dict[str, str] = {}
legacy_tags = request.query.get("tags")
if legacy_tags:
@@ -192,6 +213,8 @@ class RecipeListingHandler:
filters=filters,
search_options=search_options,
lora_hash=lora_hash,
folder=folder,
recursive=recursive,
)
for item in result.get("items", []):
@@ -298,6 +321,58 @@ class RecipeQueryHandler:
self._logger.error("Error retrieving base models: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": str(exc)}, status=500)
async def get_roots(self, request: web.Request) -> web.Response:
try:
await self._ensure_dependencies_ready()
recipe_scanner = self._recipe_scanner_getter()
if recipe_scanner is None:
raise RuntimeError("Recipe scanner unavailable")
roots = [recipe_scanner.recipes_dir] if recipe_scanner.recipes_dir else []
return web.json_response({"success": True, "roots": roots})
except Exception as exc:
self._logger.error("Error retrieving recipe roots: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": str(exc)}, status=500)
async def get_folders(self, request: web.Request) -> web.Response:
try:
await self._ensure_dependencies_ready()
recipe_scanner = self._recipe_scanner_getter()
if recipe_scanner is None:
raise RuntimeError("Recipe scanner unavailable")
folders = await recipe_scanner.get_folders()
return web.json_response({"success": True, "folders": folders})
except Exception as exc:
self._logger.error("Error retrieving recipe folders: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": str(exc)}, status=500)
async def get_folder_tree(self, request: web.Request) -> web.Response:
try:
await self._ensure_dependencies_ready()
recipe_scanner = self._recipe_scanner_getter()
if recipe_scanner is None:
raise RuntimeError("Recipe scanner unavailable")
folder_tree = await recipe_scanner.get_folder_tree()
return web.json_response({"success": True, "tree": folder_tree})
except Exception as exc:
self._logger.error("Error retrieving recipe folder tree: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": str(exc)}, status=500)
async def get_unified_folder_tree(self, request: web.Request) -> web.Response:
try:
await self._ensure_dependencies_ready()
recipe_scanner = self._recipe_scanner_getter()
if recipe_scanner is None:
raise RuntimeError("Recipe scanner unavailable")
folder_tree = await recipe_scanner.get_folder_tree()
return web.json_response({"success": True, "tree": folder_tree})
except Exception as exc:
self._logger.error("Error retrieving unified recipe folder tree: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": str(exc)}, status=500)
async def get_recipes_for_lora(self, request: web.Request) -> web.Response:
try:
await self._ensure_dependencies_ready()
@@ -410,6 +485,7 @@ class RecipeManagementHandler:
analysis_service: RecipeAnalysisService,
downloader_factory,
civitai_client_getter: CivitaiClientGetter,
ws_manager=default_ws_manager,
) -> None:
self._ensure_dependencies_ready = ensure_dependencies_ready
self._recipe_scanner_getter = recipe_scanner_getter
@@ -418,6 +494,7 @@ class RecipeManagementHandler:
self._analysis_service = analysis_service
self._downloader_factory = downloader_factory
self._civitai_client_getter = civitai_client_getter
self._ws_manager = ws_manager
async def save_recipe(self, request: web.Request) -> web.Response:
try:
@@ -436,6 +513,7 @@ class RecipeManagementHandler:
name=payload["name"],
tags=payload["tags"],
metadata=payload["metadata"],
extension=payload.get("extension"),
)
return web.json_response(result.payload, status=result.status)
except RecipeValidationError as exc:
@@ -444,6 +522,71 @@ class RecipeManagementHandler:
self._logger.error("Error saving recipe: %s", exc, exc_info=True)
return web.json_response({"error": str(exc)}, status=500)
async def repair_recipes(self, request: web.Request) -> web.Response:
try:
await self._ensure_dependencies_ready()
recipe_scanner = self._recipe_scanner_getter()
if recipe_scanner is None:
return web.json_response({"success": False, "error": "Recipe scanner unavailable"}, status=503)
# Check if already running
if self._ws_manager.get_recipe_repair_progress():
return web.json_response({"success": False, "error": "Recipe repair already in progress"}, status=409)
async def progress_callback(data):
await self._ws_manager.broadcast_recipe_repair_progress(data)
# Run in background to avoid timeout
async def run_repair():
try:
await recipe_scanner.repair_all_recipes(
progress_callback=progress_callback
)
except Exception as e:
self._logger.error(f"Error in recipe repair task: {e}", exc_info=True)
await self._ws_manager.broadcast_recipe_repair_progress({
"status": "error",
"error": str(e)
})
finally:
# Keep the final status for a while so the UI can see it
await asyncio.sleep(5)
self._ws_manager.cleanup_recipe_repair_progress()
asyncio.create_task(run_repair())
return web.json_response({"success": True, "message": "Recipe repair started"})
except Exception as exc:
self._logger.error("Error starting recipe repair: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": str(exc)}, status=500)
async def repair_recipe(self, request: web.Request) -> web.Response:
try:
await self._ensure_dependencies_ready()
recipe_scanner = self._recipe_scanner_getter()
if recipe_scanner is None:
return web.json_response({"success": False, "error": "Recipe scanner unavailable"}, status=503)
recipe_id = request.match_info["recipe_id"]
result = await recipe_scanner.repair_recipe_by_id(recipe_id)
return web.json_response(result)
except RecipeNotFoundError as exc:
return web.json_response({"success": False, "error": str(exc)}, status=404)
except Exception as exc:
self._logger.error("Error repairing single recipe: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": str(exc)}, status=500)
async def get_repair_progress(self, request: web.Request) -> web.Response:
try:
progress = self._ws_manager.get_recipe_repair_progress()
if progress:
return web.json_response({"success": True, "progress": progress})
return web.json_response({"success": False, "message": "No repair in progress"}, status=404)
except Exception as exc:
self._logger.error("Error getting repair progress: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": str(exc)}, status=500)
async def import_remote_recipe(self, request: web.Request) -> web.Response:
try:
await self._ensure_dependencies_ready()
@@ -451,10 +594,12 @@ class RecipeManagementHandler:
if recipe_scanner is None:
raise RuntimeError("Recipe scanner unavailable")
# 1. Parse Parameters
params = request.rel_url.query
image_url = params.get("image_url")
name = params.get("name")
resources_raw = params.get("resources")
if not image_url:
raise RecipeValidationError("Missing required field: image_url")
if not name:
@@ -463,27 +608,93 @@ class RecipeManagementHandler:
raise RecipeValidationError("Missing required field: resources")
checkpoint_entry, lora_entries = self._parse_resources_payload(resources_raw)
gen_params = self._parse_gen_params(params.get("gen_params"))
gen_params_request = self._parse_gen_params(params.get("gen_params"))
# 2. Initial Metadata Construction
metadata: Dict[str, Any] = {
"base_model": params.get("base_model", "") or "",
"loras": lora_entries,
"gen_params": gen_params_request or {},
"source_url": image_url
}
source_path = params.get("source_path")
if source_path:
metadata["source_path"] = source_path
if gen_params is not None:
metadata["gen_params"] = gen_params
# Checkpoint handling
if checkpoint_entry:
metadata["checkpoint"] = checkpoint_entry
gen_params_ref = metadata.setdefault("gen_params", {})
if "checkpoint" not in gen_params_ref:
gen_params_ref["checkpoint"] = checkpoint_entry
# Ensure checkpoint is also in gen_params for consistency if needed by enricher?
# Actually enricher looks at metadata['checkpoint'], so this is fine.
# Try to resolve base model from checkpoint if not explicitly provided
if not metadata["base_model"]:
base_model_from_metadata = await self._resolve_base_model_from_checkpoint(checkpoint_entry)
if base_model_from_metadata:
metadata["base_model"] = base_model_from_metadata
tags = self._parse_tags(params.get("tags"))
image_bytes = await self._download_image_bytes(image_url)
# 3. Download Image
image_bytes, extension, civitai_meta_from_download = await self._download_remote_media(image_url)
# 4. Extract Embedded Metadata
# Note: We still extract this here because Enricher currently expects 'gen_params' to already be populated
# with embedded data if we want it to merge it.
# However, logic in Enricher merges: request > civitai > embedded.
# So we should gather embedded params and put them into the recipe's gen_params (as initial state)
# OR pass them to enricher to handle?
# The interface of Enricher.enrich_recipe takes `recipe` (with gen_params) and `request_params`.
# So let's extract embedded and put it into recipe['gen_params'] but careful not to overwrite request params.
# Actually, `GenParamsMerger` which `Enricher` uses handles 3 layers.
# But `Enricher` interface is: recipe['gen_params'] (as embedded) + request_params + civitai (fetched internally).
# Wait, `Enricher` fetches Civitai info internally based on URL.
# `civitai_meta_from_download` is returned by `_download_remote_media` which might be useful if URL didn't have ID.
# Let's extract embedded metadata first
embedded_gen_params = {}
try:
with tempfile.NamedTemporaryFile(suffix=extension, delete=False) as temp_img:
temp_img.write(image_bytes)
temp_img_path = temp_img.name
try:
raw_embedded = ExifUtils.extract_image_metadata(temp_img_path)
if raw_embedded:
parser = self._analysis_service._recipe_parser_factory.create_parser(raw_embedded)
if parser:
parsed_embedded = await parser.parse_metadata(raw_embedded, recipe_scanner=recipe_scanner)
if parsed_embedded and "gen_params" in parsed_embedded:
embedded_gen_params = parsed_embedded["gen_params"]
else:
embedded_gen_params = {"raw_metadata": raw_embedded}
finally:
if os.path.exists(temp_img_path):
os.unlink(temp_img_path)
except Exception as exc:
self._logger.warning("Failed to extract embedded metadata during import: %s", exc)
# Pre-populate gen_params with embedded data so Enricher treats it as the "base" layer
if embedded_gen_params:
# Merge embedded into existing gen_params (which currently only has request params if any)
# But wait, we want request params to override everything.
# So we should set recipe['gen_params'] = embedded, and pass request params to enricher.
metadata["gen_params"] = embedded_gen_params
# 5. Enrich with unified logic
# This will fetch Civitai info (if URL matches) and merge: request > civitai > embedded
civitai_client = self._civitai_client_getter()
await RecipeEnricher.enrich_recipe(
recipe=metadata,
civitai_client=civitai_client,
request_params=gen_params_request # Pass explicit request params here to override
)
# If we got civitai_meta from download but Enricher didn't fetch it (e.g. not a civitai URL or failed),
# we might want to manually merge it?
# But usually `import_remote_recipe` is used with Civitai URLs.
# For now, relying on Enricher's internal fetch is consistent with repair.
result = await self._persistence_service.save_recipe(
recipe_scanner=recipe_scanner,
@@ -492,6 +703,7 @@ class RecipeManagementHandler:
name=name,
tags=tags,
metadata=metadata,
extension=extension,
)
return web.json_response(result.payload, status=result.status)
except RecipeValidationError as exc:
@@ -541,6 +753,64 @@ class RecipeManagementHandler:
self._logger.error("Error updating recipe: %s", exc, exc_info=True)
return web.json_response({"error": str(exc)}, status=500)
async def move_recipe(self, request: web.Request) -> web.Response:
try:
await self._ensure_dependencies_ready()
recipe_scanner = self._recipe_scanner_getter()
if recipe_scanner is None:
raise RuntimeError("Recipe scanner unavailable")
data = await request.json()
recipe_id = data.get("recipe_id")
target_path = data.get("target_path")
if not recipe_id or not target_path:
return web.json_response(
{"success": False, "error": "recipe_id and target_path are required"}, status=400
)
result = await self._persistence_service.move_recipe(
recipe_scanner=recipe_scanner,
recipe_id=str(recipe_id),
target_path=str(target_path),
)
return web.json_response(result.payload, status=result.status)
except RecipeValidationError as exc:
return web.json_response({"success": False, "error": str(exc)}, status=400)
except RecipeNotFoundError as exc:
return web.json_response({"success": False, "error": str(exc)}, status=404)
except Exception as exc:
self._logger.error("Error moving recipe: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": str(exc)}, status=500)
async def move_recipes_bulk(self, request: web.Request) -> web.Response:
try:
await self._ensure_dependencies_ready()
recipe_scanner = self._recipe_scanner_getter()
if recipe_scanner is None:
raise RuntimeError("Recipe scanner unavailable")
data = await request.json()
recipe_ids = data.get("recipe_ids") or []
target_path = data.get("target_path")
if not recipe_ids or not target_path:
return web.json_response(
{"success": False, "error": "recipe_ids and target_path are required"}, status=400
)
result = await self._persistence_service.move_recipes_bulk(
recipe_scanner=recipe_scanner,
recipe_ids=recipe_ids,
target_path=str(target_path),
)
return web.json_response(result.payload, status=result.status)
except RecipeValidationError as exc:
return web.json_response({"success": False, "error": str(exc)}, status=400)
except RecipeNotFoundError as exc:
return web.json_response({"success": False, "error": str(exc)}, status=404)
except Exception as exc:
self._logger.error("Error moving recipes in bulk: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": str(exc)}, status=500)
async def reconnect_lora(self, request: web.Request) -> web.Response:
try:
await self._ensure_dependencies_ready()
@@ -622,6 +892,7 @@ class RecipeManagementHandler:
name: Optional[str] = None
tags: list[str] = []
metadata: Optional[Dict[str, Any]] = None
extension: Optional[str] = None
while True:
field = await reader.next()
@@ -652,6 +923,8 @@ class RecipeManagementHandler:
metadata = json.loads(metadata_text)
except Exception:
metadata = {}
elif field.name == "extension":
extension = await field.text()
return {
"image_bytes": image_bytes,
@@ -659,6 +932,7 @@ class RecipeManagementHandler:
"name": name,
"tags": tags,
"metadata": metadata,
"extension": extension,
}
def _parse_tags(self, tag_text: Optional[str]) -> list[str]:
@@ -729,7 +1003,7 @@ class RecipeManagementHandler:
"exclude": False,
}
async def _download_image_bytes(self, image_url: str) -> bytes:
async def _download_remote_media(self, image_url: str) -> tuple[bytes, str]:
civitai_client = self._civitai_client_getter()
downloader = await self._downloader_factory()
temp_path = None
@@ -744,15 +1018,31 @@ class RecipeManagementHandler:
image_info = await civitai_client.get_image_info(civitai_match.group(1))
if not image_info:
raise RecipeDownloadError("Failed to fetch image information from Civitai")
download_url = image_info.get("url")
if not download_url:
media_url = image_info.get("url")
if not media_url:
raise RecipeDownloadError("No image URL found in Civitai response")
# Use optimized preview URLs if possible
media_type = image_info.get("type")
rewritten_url, _ = rewrite_preview_url(media_url, media_type=media_type)
if rewritten_url:
download_url = rewritten_url
else:
download_url = media_url
success, result = await downloader.download_file(download_url, temp_path, use_auth=False)
if not success:
raise RecipeDownloadError(f"Failed to download image: {result}")
# Extract extension from URL
url_path = download_url.split('?')[0].split('#')[0]
extension = os.path.splitext(url_path)[1].lower()
if not extension:
extension = ".webp" # Default to webp if unknown
with open(temp_path, "rb") as file_obj:
return file_obj.read()
return file_obj.read(), extension, image_info.get("meta") if civitai_match and image_info else None
except RecipeDownloadError:
raise
except RecipeValidationError:
@@ -766,6 +1056,7 @@ class RecipeManagementHandler:
except FileNotFoundError:
pass
def _safe_int(self, value: Any) -> int:
try:
return int(value)

View File

@@ -27,16 +27,25 @@ ROUTE_DEFINITIONS: tuple[RouteDefinition, ...] = (
RouteDefinition("DELETE", "/api/lm/recipe/{recipe_id}", "delete_recipe"),
RouteDefinition("GET", "/api/lm/recipes/top-tags", "get_top_tags"),
RouteDefinition("GET", "/api/lm/recipes/base-models", "get_base_models"),
RouteDefinition("GET", "/api/lm/recipes/roots", "get_roots"),
RouteDefinition("GET", "/api/lm/recipes/folders", "get_folders"),
RouteDefinition("GET", "/api/lm/recipes/folder-tree", "get_folder_tree"),
RouteDefinition("GET", "/api/lm/recipes/unified-folder-tree", "get_unified_folder_tree"),
RouteDefinition("GET", "/api/lm/recipe/{recipe_id}/share", "share_recipe"),
RouteDefinition("GET", "/api/lm/recipe/{recipe_id}/share/download", "download_shared_recipe"),
RouteDefinition("GET", "/api/lm/recipe/{recipe_id}/syntax", "get_recipe_syntax"),
RouteDefinition("GET", "/api/lm/recipes/syntax", "get_recipe_syntax"),
RouteDefinition("PUT", "/api/lm/recipe/{recipe_id}/update", "update_recipe"),
RouteDefinition("POST", "/api/lm/recipe/move", "move_recipe"),
RouteDefinition("POST", "/api/lm/recipes/move-bulk", "move_recipes_bulk"),
RouteDefinition("POST", "/api/lm/recipe/lora/reconnect", "reconnect_lora"),
RouteDefinition("GET", "/api/lm/recipes/find-duplicates", "find_duplicates"),
RouteDefinition("POST", "/api/lm/recipes/bulk-delete", "bulk_delete"),
RouteDefinition("POST", "/api/lm/recipes/save-from-widget", "save_recipe_from_widget"),
RouteDefinition("GET", "/api/lm/recipes/for-lora", "get_recipes_for_lora"),
RouteDefinition("GET", "/api/lm/recipes/scan", "scan_recipes"),
RouteDefinition("POST", "/api/lm/recipes/repair", "repair_recipes"),
RouteDefinition("POST", "/api/lm/recipe/{recipe_id}/repair", "repair_recipe"),
RouteDefinition("GET", "/api/lm/recipes/repair-progress", "get_repair_progress"),
)

View File

@@ -3,6 +3,7 @@ import asyncio
from typing import Any, Dict, List, Optional, Type, TYPE_CHECKING
import logging
import os
import time
from ..utils.constants import VALID_LORA_TYPES
from ..utils.models import BaseModelMetadata
@@ -80,6 +81,7 @@ class BaseModelService(ABC):
**kwargs,
) -> Dict:
"""Get paginated and filtered model data"""
overall_start = time.perf_counter()
sort_params = self.cache_repository.parse_sort(sort_by)
if sort_params.key == 'usage':
@@ -87,6 +89,12 @@ class BaseModelService(ABC):
else:
sorted_data = await self.cache_repository.fetch_sorted(sort_params)
t0 = time.perf_counter()
sorted_data = await self.cache_repository.fetch_sorted(sort_params)
fetch_duration = time.perf_counter() - t0
initial_count = len(sorted_data)
t1 = time.perf_counter()
if hash_filters:
filtered_data = await self._apply_hash_filters(sorted_data, hash_filters)
else:
@@ -116,17 +124,25 @@ class BaseModelService(ABC):
if allow_selling_generated_content is not None:
filtered_data = await self._apply_allow_selling_filter(filtered_data, allow_selling_generated_content)
filter_duration = time.perf_counter() - t1
post_filter_count = len(filtered_data)
annotated_for_filter: Optional[List[Dict]] = None
t2 = time.perf_counter()
if update_available_only:
annotated_for_filter = await self._annotate_update_flags(filtered_data)
filtered_data = [
item for item in annotated_for_filter
if item.get('update_available')
]
update_filter_duration = time.perf_counter() - t2
final_count = len(filtered_data)
t3 = time.perf_counter()
paginated = self._paginate(filtered_data, page, page_size)
pagination_duration = time.perf_counter() - t3
t4 = time.perf_counter()
if update_available_only:
# Items already include update flags thanks to the pre-filter annotation.
paginated['items'] = list(paginated['items'])
@@ -134,6 +150,16 @@ class BaseModelService(ABC):
paginated['items'] = await self._annotate_update_flags(
paginated['items'],
)
annotate_duration = time.perf_counter() - t4
overall_duration = time.perf_counter() - overall_start
logger.info(
"%s.get_paginated_data took %.3fs (fetch: %.3fs, filter: %.3fs, update_filter: %.3fs, pagination: %.3fs, annotate: %.3fs). "
"Counts: initial=%d, post_filter=%d, final=%d",
self.__class__.__name__, overall_duration, fetch_duration, filter_duration,
update_filter_duration, pagination_duration, annotate_duration,
initial_count, post_filter_count, final_count
)
return paginated
async def _fetch_with_usage_sort(self, sort_params):

View File

@@ -1,4 +1,8 @@
import asyncio
import time
import logging
logger = logging.getLogger(__name__)
from typing import Any, Dict, List, Optional, Tuple
from dataclasses import dataclass, field
from operator import itemgetter
@@ -215,24 +219,25 @@ class ModelCache:
def _sort_data(self, data: List[Dict], sort_key: str, order: str) -> List[Dict]:
"""Sort data by sort_key and order"""
start_time = time.perf_counter()
reverse = (order == 'desc')
if sort_key == 'name':
# Natural sort by configured display name, case-insensitive
return natsorted(
result = natsorted(
data,
key=lambda x: self._get_display_name(x).lower(),
reverse=reverse
)
elif sort_key == 'date':
# Sort by modified timestamp
return sorted(
result = sorted(
data,
key=itemgetter('modified'),
reverse=reverse
)
elif sort_key == 'size':
# Sort by file size
return sorted(
result = sorted(
data,
key=itemgetter('size'),
reverse=reverse
@@ -249,16 +254,28 @@ class ModelCache:
)
else:
# Fallback: no sort
return list(data)
result = list(data)
duration = time.perf_counter() - start_time
if duration > 0.05:
logger.info("ModelCache._sort_data(%s, %s) for %d items took %.3fs", sort_key, order, len(data), duration)
return result
async def get_sorted_data(self, sort_key: str = 'name', order: str = 'asc') -> List[Dict]:
"""Get sorted data by sort_key and order, using cache if possible"""
async with self._lock:
if (sort_key, order) == self._last_sort:
return self._last_sorted_data
start_time = time.perf_counter()
sorted_data = self._sort_data(self.raw_data, sort_key, order)
self._last_sort = (sort_key, order)
self._last_sorted_data = sorted_data
duration = time.perf_counter() - start_time
if duration > 0.1:
logger.debug("ModelCache.get_sorted_data(%s, %s) took %.3fs", sort_key, order, duration)
return sorted_data
async def update_name_display_mode(self, display_mode: str) -> None:

View File

@@ -5,6 +5,10 @@ from typing import Any, Dict, Iterable, List, Mapping, Optional, Sequence, Tuple
from ..utils.constants import NSFW_LEVELS
from ..utils.utils import fuzzy_match as default_fuzzy_match
import time
import logging
logger = logging.getLogger(__name__)
DEFAULT_CIVITAI_MODEL_TYPE = "LORA"
@@ -115,22 +119,33 @@ class ModelFilterSet:
def apply(self, data: Iterable[Dict[str, Any]], criteria: FilterCriteria) -> List[Dict[str, Any]]:
"""Return items that satisfy the provided criteria."""
overall_start = time.perf_counter()
items = list(data)
initial_count = len(items)
if self._settings.get("show_only_sfw", False):
t0 = time.perf_counter()
threshold = self._nsfw_levels.get("R", 0)
items = [
item for item in items
if not item.get("preview_nsfw_level") or item.get("preview_nsfw_level") < threshold
]
sfw_duration = time.perf_counter() - t0
else:
sfw_duration = 0
favorites_duration = 0
if criteria.favorites_only:
t0 = time.perf_counter()
items = [item for item in items if item.get("favorite", False)]
favorites_duration = time.perf_counter() - t0
folder_duration = 0
folder = criteria.folder
options = criteria.search_options or {}
recursive = bool(options.get("recursive", True))
if folder is not None:
t0 = time.perf_counter()
if recursive:
if folder:
folder_with_sep = f"{folder}/"
@@ -140,13 +155,20 @@ class ModelFilterSet:
]
else:
items = [item for item in items if item.get("folder") == folder]
folder_duration = time.perf_counter() - t0
base_models_duration = 0
base_models = criteria.base_models or []
if base_models:
t0 = time.perf_counter()
base_model_set = set(base_models)
items = [item for item in items if item.get("base_model") in base_model_set]
base_models_duration = time.perf_counter() - t0
tags_duration = 0
tag_filters = criteria.tags or {}
if tag_filters:
t0 = time.perf_counter()
include_tags = set()
exclude_tags = set()
if isinstance(tag_filters, dict):
@@ -161,18 +183,32 @@ class ModelFilterSet:
include_tags = {tag for tag in tag_filters if tag}
if include_tags:
def matches_include(item_tags):
if not item_tags and "__no_tags__" in include_tags:
return True
return any(tag in include_tags for tag in (item_tags or []))
items = [
item for item in items
if any(tag in include_tags for tag in (item.get("tags", []) or []))
if matches_include(item.get("tags"))
]
if exclude_tags:
def matches_exclude(item_tags):
if not item_tags and "__no_tags__" in exclude_tags:
return True
return any(tag in exclude_tags for tag in (item_tags or []))
items = [
item for item in items
if not any(tag in exclude_tags for tag in (item.get("tags", []) or []))
if not matches_exclude(item.get("tags"))
]
tags_duration = time.perf_counter() - t0
model_types_duration = 0
model_types = criteria.model_types or []
if model_types:
t0 = time.perf_counter()
normalized_model_types = {
model_type for model_type in (
normalize_civitai_model_type(value) for value in model_types
@@ -184,7 +220,17 @@ class ModelFilterSet:
item for item in items
if normalize_civitai_model_type(resolve_civitai_model_type(item)) in normalized_model_types
]
model_types_duration = time.perf_counter() - t0
duration = time.perf_counter() - overall_start
if duration > 0.1: # Only log if it's potentially slow
logger.info(
"ModelFilterSet.apply took %.3fs (sfw: %.3fs, fav: %.3fs, folder: %.3fs, base: %.3fs, tags: %.3fs, types: %.3fs). "
"Count: %d -> %d",
duration, sfw_duration, favorites_duration, folder_duration,
base_models_duration, tags_duration, model_types_duration,
initial_count, len(items)
)
return items

View File

@@ -7,12 +7,18 @@ from natsort import natsorted
@dataclass
class RecipeCache:
"""Cache structure for Recipe data"""
raw_data: List[Dict]
sorted_by_name: List[Dict]
sorted_by_date: List[Dict]
folders: List[str] | None = None
folder_tree: Dict | None = None
def __post_init__(self):
self._lock = asyncio.Lock()
# Normalize optional metadata containers
self.folders = self.folders or []
self.folder_tree = self.folder_tree or {}
async def resort(self, name_only: bool = False):
"""Resort all cached data views"""

View File

@@ -1,7 +1,9 @@
import os
import logging
from __future__ import annotations
import asyncio
import json
import logging
import os
import time
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple
from ..config import config
@@ -14,6 +16,9 @@ from .recipes.errors import RecipeNotFoundError
from ..utils.utils import calculate_recipe_fingerprint, fuzzy_match
from natsort import natsorted
import sys
import re
from ..recipes.merger import GenParamsMerger
from ..recipes.enrichment import RecipeEnricher
logger = logging.getLogger(__name__)
@@ -52,6 +57,8 @@ class RecipeScanner:
cls._instance._civitai_client = None # Will be lazily initialized
return cls._instance
REPAIR_VERSION = 3
def __init__(
self,
lora_scanner: Optional[LoraScanner] = None,
@@ -64,6 +71,7 @@ class RecipeScanner:
self._initialization_task: Optional[asyncio.Task] = None
self._is_initializing = False
self._mutation_lock = asyncio.Lock()
self._post_scan_task: Optional[asyncio.Task] = None
self._resort_tasks: Set[asyncio.Task] = set()
if lora_scanner:
self._lora_scanner = lora_scanner
@@ -84,6 +92,10 @@ class RecipeScanner:
task.cancel()
self._resort_tasks.clear()
if self._post_scan_task and not self._post_scan_task.done():
self._post_scan_task.cancel()
self._post_scan_task = None
self._cache = None
self._initialization_task = None
self._is_initializing = False
@@ -102,19 +114,223 @@ class RecipeScanner:
self._civitai_client = await ServiceRegistry.get_civitai_client()
return self._civitai_client
async def repair_all_recipes(
self,
progress_callback: Optional[Callable[[Dict], Any]] = None
) -> Dict[str, Any]:
"""Repair all recipes by enrichment with Civitai and embedded metadata.
Args:
persistence_service: Service for saving updated recipes
progress_callback: Optional callback for progress updates
Returns:
Dict summary of repair results
"""
async with self._mutation_lock:
cache = await self.get_cached_data()
all_recipes = list(cache.raw_data)
total = len(all_recipes)
repaired_count = 0
skipped_count = 0
errors_count = 0
civitai_client = await self._get_civitai_client()
for i, recipe in enumerate(all_recipes):
try:
# Report progress
if progress_callback:
await progress_callback({
"status": "processing",
"current": i + 1,
"total": total,
"recipe_name": recipe.get("name", "Unknown")
})
if await self._repair_single_recipe(recipe, civitai_client):
repaired_count += 1
else:
skipped_count += 1
except Exception as e:
logger.error(f"Error repairing recipe {recipe.get('file_path')}: {e}")
errors_count += 1
# Final progress update
if progress_callback:
await progress_callback({
"status": "completed",
"repaired": repaired_count,
"skipped": skipped_count,
"errors": errors_count,
"total": total
})
return {
"success": True,
"repaired": repaired_count,
"skipped": skipped_count,
"errors": errors_count,
"total": total
}
async def repair_recipe_by_id(self, recipe_id: str) -> Dict[str, Any]:
"""Repair a single recipe by its ID.
Args:
recipe_id: ID of the recipe to repair
Returns:
Dict summary of repair result
"""
async with self._mutation_lock:
# Get raw recipe from cache directly to avoid formatted fields
cache = await self.get_cached_data()
recipe = next((r for r in cache.raw_data if str(r.get('id', '')) == recipe_id), None)
if not recipe:
raise RecipeNotFoundError(f"Recipe {recipe_id} not found")
civitai_client = await self._get_civitai_client()
success = await self._repair_single_recipe(recipe, civitai_client)
# If successfully repaired, we should return the formatted version for the UI
return {
"success": True,
"repaired": 1 if success else 0,
"skipped": 0 if success else 1,
"recipe": await self.get_recipe_by_id(recipe_id) if success else recipe
}
async def _repair_single_recipe(self, recipe: Dict[str, Any], civitai_client: Any) -> bool:
"""Internal helper to repair a single recipe object.
Args:
recipe: The recipe dictionary to repair (modified in-place)
civitai_client: Authenticated Civitai client
Returns:
bool: True if recipe was repaired or updated, False if skipped
"""
# 1. Skip if already at latest repair version
if recipe.get("repair_version", 0) >= self.REPAIR_VERSION:
return False
# 2. Identification: Is repair needed?
has_checkpoint = "checkpoint" in recipe and recipe["checkpoint"] and recipe["checkpoint"].get("name")
gen_params = recipe.get("gen_params", {})
has_prompt = bool(gen_params.get("prompt"))
needs_repair = not has_checkpoint or not has_prompt
if not needs_repair:
# Even if no repair needed, we mark it with version if it was processed
# Always update and save because if we are here, the version is old (checked in step 1)
recipe["repair_version"] = self.REPAIR_VERSION
await self._save_recipe_persistently(recipe)
return True
# 3. Use Enricher to repair/enrich
try:
updated = await RecipeEnricher.enrich_recipe(recipe, civitai_client)
except Exception as e:
logger.error(f"Error enriching recipe {recipe.get('id')}: {e}")
updated = False
# 4. Mark version and save if updated or just marking version
# If we updated it, OR if the version is old (which we know it is if we are here), save it.
# Actually, if we are here and updated is False, it means we tried to repair but couldn't/didn't need to.
# But we still want to mark it as processed so we don't try again until version bump.
if updated or recipe.get("repair_version", 0) < self.REPAIR_VERSION:
recipe["repair_version"] = self.REPAIR_VERSION
await self._save_recipe_persistently(recipe)
return True
return False
async def _save_recipe_persistently(self, recipe: Dict[str, Any]) -> bool:
"""Helper to save a recipe to both JSON and EXIF metadata."""
recipe_id = recipe.get("id")
if not recipe_id:
return False
recipe_json_path = await self.get_recipe_json_path(recipe_id)
if not recipe_json_path:
return False
try:
# 1. Sanitize for storage (remove runtime convenience fields)
clean_recipe = self._sanitize_recipe_for_storage(recipe)
# 2. Update the original dictionary so that we persist the clean version
# globally if needed, effectively overwriting it in-place.
recipe.clear()
recipe.update(clean_recipe)
# 3. Save JSON
with open(recipe_json_path, 'w', encoding='utf-8') as f:
json.dump(recipe, f, indent=4, ensure_ascii=False)
# 4. Update EXIF if image exists
image_path = recipe.get('file_path')
if image_path and os.path.exists(image_path):
from ..utils.exif_utils import ExifUtils
ExifUtils.append_recipe_metadata(image_path, recipe)
return True
except Exception as e:
logger.error(f"Error persisting recipe {recipe_id}: {e}")
return False
def _sanitize_recipe_for_storage(self, recipe: Dict[str, Any]) -> Dict[str, Any]:
"""Create a clean copy of the recipe without runtime convenience fields."""
import copy
clean = copy.deepcopy(recipe)
# 0. Clean top-level runtime fields
for key in ("file_url", "created_date_formatted", "modified_formatted"):
clean.pop(key, None)
# 1. Clean LORAs
if "loras" in clean and isinstance(clean["loras"], list):
for lora in clean["loras"]:
# Fields to remove (runtime only)
for key in ("inLibrary", "preview_url", "localPath"):
lora.pop(key, None)
# Normalize weight/strength if mapping is desired (standard in persistence_service)
if "weight" in lora and "strength" not in lora:
lora["strength"] = float(lora.pop("weight"))
# 2. Clean Checkpoint
if "checkpoint" in clean and isinstance(clean["checkpoint"], dict):
cp = clean["checkpoint"]
# Fields to remove (runtime only)
for key in ("inLibrary", "localPath", "preview_url", "thumbnailUrl", "size", "downloadUrl"):
cp.pop(key, None)
return clean
async def initialize_in_background(self) -> None:
"""Initialize cache in background using thread pool"""
try:
await self._wait_for_lora_scanner()
# Set initial empty cache to avoid None reference errors
if self._cache is None:
self._cache = RecipeCache(
raw_data=[],
sorted_by_name=[],
sorted_by_date=[]
sorted_by_date=[],
folders=[],
folder_tree={},
)
# Mark as initializing to prevent concurrent initializations
self._is_initializing = True
self._initialization_task = asyncio.current_task()
try:
# Start timer
@@ -126,11 +342,14 @@ class RecipeScanner:
None, # Use default thread pool
self._initialize_recipe_cache_sync # Run synchronous version in thread
)
if cache is not None:
self._cache = cache
# Calculate elapsed time and log it
elapsed_time = time.time() - start_time
recipe_count = len(cache.raw_data) if cache and hasattr(cache, 'raw_data') else 0
logger.info(f"Recipe cache initialized in {elapsed_time:.2f} seconds. Found {recipe_count} recipes")
self._schedule_post_scan_enrichment()
finally:
# Mark initialization as complete regardless of outcome
self._is_initializing = False
@@ -207,6 +426,7 @@ class RecipeScanner:
# Update cache with the collected data
self._cache.raw_data = recipes
self._update_folder_metadata(self._cache)
# Create a simplified resort function that doesn't use await
if hasattr(self._cache, "resort"):
@@ -237,12 +457,97 @@ class RecipeScanner:
# Clean up the event loop
loop.close()
async def _wait_for_lora_scanner(self) -> None:
"""Ensure the LoRA scanner has initialized before recipe enrichment."""
if not getattr(self, "_lora_scanner", None):
return
lora_scanner = self._lora_scanner
cache_ready = getattr(lora_scanner, "_cache", None) is not None
# If cache is already available, we can proceed
if cache_ready:
return
# Await an existing initialization task if present
task = getattr(lora_scanner, "_initialization_task", None)
if task and hasattr(task, "done") and not task.done():
try:
await task
except Exception: # pragma: no cover - defensive guard
pass
if getattr(lora_scanner, "_cache", None) is not None:
return
# Otherwise, request initialization and proceed once it completes
try:
await lora_scanner.initialize_in_background()
except Exception as exc: # pragma: no cover - defensive guard
logger.debug("Recipe Scanner: LoRA init request failed: %s", exc)
def _schedule_post_scan_enrichment(self) -> None:
"""Kick off a non-blocking enrichment pass to fill remote metadata."""
try:
loop = asyncio.get_running_loop()
except RuntimeError:
return
if self._post_scan_task and not self._post_scan_task.done():
return
async def _run_enrichment():
try:
await self._enrich_cache_metadata()
except asyncio.CancelledError:
raise
except Exception as exc: # pragma: no cover - defensive guard
logger.error("Recipe Scanner: error during post-scan enrichment: %s", exc, exc_info=True)
self._post_scan_task = loop.create_task(_run_enrichment(), name="recipe_cache_enrichment")
async def _enrich_cache_metadata(self) -> None:
"""Perform remote metadata enrichment after the initial scan."""
cache = self._cache
if cache is None or not getattr(cache, "raw_data", None):
return
for index, recipe in enumerate(list(cache.raw_data)):
try:
metadata_updated = await self._update_lora_information(recipe)
if metadata_updated:
recipe_id = recipe.get("id")
if recipe_id:
recipe_path = os.path.join(self.recipes_dir, f"{recipe_id}.recipe.json")
if os.path.exists(recipe_path):
try:
self._write_recipe_file(recipe_path, recipe)
except Exception as exc: # pragma: no cover - best-effort persistence
logger.debug("Recipe Scanner: could not persist recipe %s: %s", recipe_id, exc)
except asyncio.CancelledError:
raise
except Exception as exc: # pragma: no cover - defensive logging
logger.error("Recipe Scanner: error enriching recipe %s: %s", recipe.get("id"), exc, exc_info=True)
if index % 10 == 0:
await asyncio.sleep(0)
try:
await cache.resort()
except Exception as exc: # pragma: no cover - defensive logging
logger.debug("Recipe Scanner: error resorting cache after enrichment: %s", exc)
def _schedule_resort(self, *, name_only: bool = False) -> None:
"""Schedule a background resort of the recipe cache."""
if not self._cache:
return
# Keep folder metadata up to date alongside sort order
self._update_folder_metadata()
async def _resort_wrapper() -> None:
try:
await self._cache.resort(name_only=name_only)
@@ -253,6 +558,75 @@ class RecipeScanner:
self._resort_tasks.add(task)
task.add_done_callback(lambda finished: self._resort_tasks.discard(finished))
def _calculate_folder(self, recipe_path: str) -> str:
"""Calculate a normalized folder path relative to ``recipes_dir``."""
recipes_dir = self.recipes_dir
if not recipes_dir:
return ""
try:
recipe_dir = os.path.dirname(os.path.normpath(recipe_path))
relative_dir = os.path.relpath(recipe_dir, recipes_dir)
if relative_dir in (".", ""):
return ""
return relative_dir.replace(os.path.sep, "/")
except Exception:
return ""
def _build_folder_tree(self, folders: list[str]) -> dict:
"""Build a nested folder tree structure from relative folder paths."""
tree: dict[str, dict] = {}
for folder in folders:
if not folder:
continue
parts = folder.split("/")
current_level = tree
for part in parts:
if part not in current_level:
current_level[part] = {}
current_level = current_level[part]
return tree
def _update_folder_metadata(self, cache: RecipeCache | None = None) -> None:
"""Ensure folder lists and tree metadata are synchronized with cache contents."""
cache = cache or self._cache
if cache is None:
return
folders: set[str] = set()
for item in cache.raw_data:
folder_value = item.get("folder", "")
if folder_value is None:
folder_value = ""
if folder_value == ".":
folder_value = ""
normalized = str(folder_value).replace("\\", "/")
item["folder"] = normalized
folders.add(normalized)
cache.folders = sorted(folders, key=lambda entry: entry.lower())
cache.folder_tree = self._build_folder_tree(cache.folders)
async def get_folders(self) -> list[str]:
"""Return a sorted list of recipe folders relative to the recipes root."""
cache = await self.get_cached_data()
self._update_folder_metadata(cache)
return cache.folders
async def get_folder_tree(self) -> dict:
"""Return a hierarchical tree of recipe folders for sidebar navigation."""
cache = await self.get_cached_data()
self._update_folder_metadata(cache)
return cache.folder_tree
@property
def recipes_dir(self) -> str:
"""Get path to recipes directory"""
@@ -269,11 +643,14 @@ class RecipeScanner:
"""Get cached recipe data, refresh if needed"""
# If cache is already initialized and no refresh is needed, return it immediately
if self._cache is not None and not force_refresh:
self._update_folder_metadata()
return self._cache
# If another initialization is already in progress, wait for it to complete
if self._is_initializing and not force_refresh:
return self._cache or RecipeCache(raw_data=[], sorted_by_name=[], sorted_by_date=[])
return self._cache or RecipeCache(
raw_data=[], sorted_by_name=[], sorted_by_date=[], folders=[], folder_tree={}
)
# If force refresh is requested, initialize the cache directly
if force_refresh:
@@ -291,11 +668,14 @@ class RecipeScanner:
self._cache = RecipeCache(
raw_data=raw_data,
sorted_by_name=[],
sorted_by_date=[]
sorted_by_date=[],
folders=[],
folder_tree={},
)
# Resort cache
await self._cache.resort()
self._update_folder_metadata(self._cache)
return self._cache
@@ -305,7 +685,9 @@ class RecipeScanner:
self._cache = RecipeCache(
raw_data=[],
sorted_by_name=[],
sorted_by_date=[]
sorted_by_date=[],
folders=[],
folder_tree={},
)
return self._cache
finally:
@@ -316,7 +698,9 @@ class RecipeScanner:
logger.error(f"Unexpected error in get_cached_data: {e}")
# Return the cache (may be empty or partially initialized)
return self._cache or RecipeCache(raw_data=[], sorted_by_name=[], sorted_by_date=[])
return self._cache or RecipeCache(
raw_data=[], sorted_by_name=[], sorted_by_date=[], folders=[], folder_tree={}
)
async def refresh_cache(self, force: bool = False) -> RecipeCache:
"""Public helper to refresh or return the recipe cache."""
@@ -331,6 +715,7 @@ class RecipeScanner:
cache = await self.get_cached_data()
await cache.add_recipe(recipe_data, resort=False)
self._update_folder_metadata(cache)
self._schedule_resort()
async def remove_recipe(self, recipe_id: str) -> bool:
@@ -344,6 +729,7 @@ class RecipeScanner:
if removed is None:
return False
self._update_folder_metadata(cache)
self._schedule_resort()
return True
@@ -429,6 +815,9 @@ class RecipeScanner:
if path_updated:
self._write_recipe_file(recipe_path, recipe_data)
# Track folder placement relative to recipes directory
recipe_data['folder'] = recipe_data.get('folder') or self._calculate_folder(recipe_path)
# Ensure loras array exists
if 'loras' not in recipe_data:
recipe_data['loras'] = []
@@ -438,7 +827,7 @@ class RecipeScanner:
recipe_data['gen_params'] = {}
# Update lora information with local paths and availability
await self._update_lora_information(recipe_data)
lora_metadata_updated = await self._update_lora_information(recipe_data)
if recipe_data.get('checkpoint'):
checkpoint_entry = self._normalize_checkpoint_entry(recipe_data['checkpoint'])
@@ -459,6 +848,12 @@ class RecipeScanner:
logger.info(f"Added fingerprint to recipe: {recipe_path}")
except Exception as e:
logger.error(f"Error writing updated recipe with fingerprint: {e}")
elif lora_metadata_updated:
# Persist updates such as marking invalid entries as deleted
try:
self._write_recipe_file(recipe_path, recipe_data)
except Exception as e:
logger.error(f"Error writing updated recipe metadata: {e}")
return recipe_data
except Exception as e:
@@ -519,7 +914,13 @@ class RecipeScanner:
logger.warning(f"Marked lora with modelVersionId {model_version_id} as deleted")
metadata_updated = True
else:
logger.debug(f"Could not get hash for modelVersionId {model_version_id}")
# No hash returned; mark as deleted to avoid repeated lookups
lora['isDeleted'] = True
metadata_updated = True
logger.warning(
"Marked lora with modelVersionId %s as deleted after failed hash lookup",
model_version_id,
)
# If has hash but no file_name, look up in lora library
if 'hash' in lora and (not lora.get('file_name') or not lora['file_name']):
@@ -809,7 +1210,7 @@ class RecipeScanner:
return await self._lora_scanner.get_model_info_by_name(name)
async def get_paginated_data(self, page: int, page_size: int, sort_by: str = 'date', search: str = None, filters: dict = None, search_options: dict = None, lora_hash: str = None, bypass_filters: bool = True):
async def get_paginated_data(self, page: int, page_size: int, sort_by: str = 'date', search: str = None, filters: dict = None, search_options: dict = None, lora_hash: str = None, bypass_filters: bool = True, folder: str | None = None, recursive: bool = True):
"""Get paginated and filtered recipe data
Args:
@@ -821,11 +1222,20 @@ class RecipeScanner:
search_options: Dictionary of search options to apply
lora_hash: Optional SHA256 hash of a LoRA to filter recipes by
bypass_filters: If True, ignore other filters when a lora_hash is provided
folder: Optional folder filter relative to recipes directory
recursive: Whether to include recipes in subfolders of the selected folder
"""
cache = await self.get_cached_data()
# Get base dataset
filtered_data = cache.sorted_by_date if sort_by == 'date' else cache.sorted_by_name
sort_field = sort_by.split(':')[0] if ':' in sort_by else sort_by
if sort_field == 'date':
filtered_data = list(cache.sorted_by_date)
elif sort_field == 'name':
filtered_data = list(cache.sorted_by_name)
else:
filtered_data = list(cache.raw_data)
# Apply SFW filtering if enabled
from .settings_manager import get_settings_manager
@@ -856,6 +1266,22 @@ class RecipeScanner:
# Skip further filtering if we're only filtering by LoRA hash with bypass enabled
if not (lora_hash and bypass_filters):
# Apply folder filter before other criteria
if folder is not None:
normalized_folder = folder.strip("/")
def matches_folder(item_folder: str) -> bool:
item_path = (item_folder or "").strip("/")
if recursive:
if not normalized_folder:
return True
return item_path == normalized_folder or item_path.startswith(f"{normalized_folder}/")
return item_path == normalized_folder
filtered_data = [
item for item in filtered_data
if matches_folder(item.get('folder', ''))
]
# Apply search filter
if search:
# Default search options if none provided
@@ -892,6 +1318,14 @@ class RecipeScanner:
if fuzzy_match(str(lora.get('modelName', '')), search):
return True
# Search in prompt and negative_prompt if enabled
if search_options.get('prompt', True) and 'gen_params' in item:
gen_params = item['gen_params']
if fuzzy_match(str(gen_params.get('prompt', '')), search):
return True
if fuzzy_match(str(gen_params.get('negative_prompt', '')), search):
return True
# No match found
return False
@@ -907,6 +1341,13 @@ class RecipeScanner:
if item.get('base_model', '') in filters['base_model']
]
# Filter by favorite
if 'favorite' in filters and filters['favorite']:
filtered_data = [
item for item in filtered_data
if item.get('favorite') is True
]
# Filter by tags
if 'tags' in filters and filters['tags']:
tag_spec = filters['tags']
@@ -925,17 +1366,41 @@ class RecipeScanner:
include_tags = {tag for tag in tag_spec if tag}
if include_tags:
def matches_include(item_tags):
if not item_tags and "__no_tags__" in include_tags:
return True
return any(tag in include_tags for tag in (item_tags or []))
filtered_data = [
item for item in filtered_data
if any(tag in include_tags for tag in (item.get('tags', []) or []))
if matches_include(item.get('tags'))
]
if exclude_tags:
def matches_exclude(item_tags):
if not item_tags and "__no_tags__" in exclude_tags:
return True
return any(tag in exclude_tags for tag in (item_tags or []))
filtered_data = [
item for item in filtered_data
if not any(tag in exclude_tags for tag in (item.get('tags', []) or []))
if not matches_exclude(item.get('tags'))
]
# Apply sorting if not already handled by pre-sorted cache
if ':' in sort_by or sort_field == 'loras_count':
field, order = (sort_by.split(':') + ['desc'])[:2]
reverse = order.lower() == 'desc'
if field == 'name':
filtered_data = natsorted(filtered_data, key=lambda x: x.get('title', '').lower(), reverse=reverse)
elif field == 'date':
# Use modified if available, falling back to created_date
filtered_data.sort(key=lambda x: (x.get('modified', x.get('created_date', 0)), x.get('file_path', '')), reverse=reverse)
elif field == 'loras_count':
filtered_data.sort(key=lambda x: len(x.get('loras', [])), reverse=reverse)
# Calculate pagination
total_items = len(filtered_data)
start_idx = (page - 1) * page_size
@@ -1031,6 +1496,30 @@ class RecipeScanner:
from datetime import datetime
return datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d %H:%M:%S')
async def get_recipe_json_path(self, recipe_id: str) -> Optional[str]:
"""Locate the recipe JSON file, accounting for folder placement."""
recipes_dir = self.recipes_dir
if not recipes_dir:
return None
cache = await self.get_cached_data()
folder = ""
for item in cache.raw_data:
if str(item.get("id")) == str(recipe_id):
folder = item.get("folder") or ""
break
candidate = os.path.normpath(os.path.join(recipes_dir, folder, f"{recipe_id}.recipe.json"))
if os.path.exists(candidate):
return candidate
for root, _, files in os.walk(recipes_dir):
if f"{recipe_id}.recipe.json" in files:
return os.path.join(root, f"{recipe_id}.recipe.json")
return None
async def update_recipe_metadata(self, recipe_id: str, metadata: dict) -> bool:
"""Update recipe metadata (like title and tags) in both file system and cache
@@ -1041,13 +1530,9 @@ class RecipeScanner:
Returns:
bool: True if successful, False otherwise
"""
import os
import json
# First, find the recipe JSON file path
recipe_json_path = os.path.join(self.recipes_dir, f"{recipe_id}.recipe.json")
if not os.path.exists(recipe_json_path):
recipe_json_path = await self.get_recipe_json_path(recipe_id)
if not recipe_json_path or not os.path.exists(recipe_json_path):
return False
try:
@@ -1096,8 +1581,8 @@ class RecipeScanner:
if target_name is None:
raise ValueError("target_name must be provided")
recipe_json_path = os.path.join(self.recipes_dir, f"{recipe_id}.recipe.json")
if not os.path.exists(recipe_json_path):
recipe_json_path = await self.get_recipe_json_path(recipe_id)
if not recipe_json_path or not os.path.exists(recipe_json_path):
raise RecipeNotFoundError("Recipe not found")
async with self._mutation_lock:
@@ -1228,71 +1713,56 @@ class RecipeScanner:
# Always use lowercase hash for consistency
hash_value = hash_value.lower()
# Get recipes directory
recipes_dir = self.recipes_dir
if not recipes_dir or not os.path.exists(recipes_dir):
logger.warning(f"Recipes directory not found: {recipes_dir}")
# Get cache
cache = await self.get_cached_data()
if not cache or not cache.raw_data:
return 0, 0
# Check if cache is initialized
cache_initialized = self._cache is not None
cache_updated_count = 0
file_updated_count = 0
cache_updated_count = 0
# Get all recipe JSON files in the recipes directory
recipe_files = []
for root, _, files in os.walk(recipes_dir):
for file in files:
if file.lower().endswith('.recipe.json'):
recipe_files.append(os.path.join(root, file))
# Process each recipe file
for recipe_path in recipe_files:
try:
# Load the recipe data
with open(recipe_path, 'r', encoding='utf-8') as f:
recipe_data = json.load(f)
# Skip if no loras or invalid structure
if not recipe_data or not isinstance(recipe_data, dict) or 'loras' not in recipe_data:
# Find recipes that need updating from the cache
recipes_to_update = []
for recipe in cache.raw_data:
loras = recipe.get('loras', [])
if not isinstance(loras, list):
continue
# Check if any lora has matching hash
file_updated = False
for lora in recipe_data.get('loras', []):
if 'hash' in lora and lora['hash'].lower() == hash_value:
# Update file_name
old_file_name = lora.get('file_name', '')
has_match = False
for lora in loras:
if not isinstance(lora, dict):
continue
if (lora.get('hash') or '').lower() == hash_value:
if lora.get('file_name') != new_file_name:
lora['file_name'] = new_file_name
file_updated = True
logger.info(f"Updated file_name in recipe {recipe_path}: {old_file_name} -> {new_file_name}")
has_match = True
# If updated, save the file
if file_updated:
with open(recipe_path, 'w', encoding='utf-8') as f:
json.dump(recipe_data, f, indent=4, ensure_ascii=False)
file_updated_count += 1
# Also update in cache if it exists
if cache_initialized:
recipe_id = recipe_data.get('id')
if recipe_id:
for cache_item in self._cache.raw_data:
if cache_item.get('id') == recipe_id:
# Replace loras array with updated version
cache_item['loras'] = recipe_data['loras']
if has_match:
recipes_to_update.append(recipe)
cache_updated_count += 1
break
if not recipes_to_update:
return 0, 0
# Persist changes to disk
async with self._mutation_lock:
for recipe in recipes_to_update:
recipe_id = recipe.get('id')
if not recipe_id:
continue
recipe_path = os.path.join(self.recipes_dir, f"{recipe_id}.recipe.json")
try:
self._write_recipe_file(recipe_path, recipe)
file_updated_count += 1
logger.info(f"Updated file_name in recipe {recipe_path}: -> {new_file_name}")
except Exception as e:
logger.error(f"Error updating recipe file {recipe_path}: {e}")
import traceback
traceback.print_exc(file=sys.stderr)
# Resort cache if updates were made
if cache_initialized and cache_updated_count > 0:
await self._cache.resort()
logger.info(f"Resorted recipe cache after updating {cache_updated_count} items")
# We don't necessarily need to resort because LoRA file_name isn't a sort key,
# but we might want to schedule a resort if we're paranoid or if searching relies on sorted state.
# Given it's a rename of a dependency, search results might change if searching by LoRA name.
self._schedule_resort()
return file_updated_count, cache_updated_count

View File

@@ -13,6 +13,7 @@ import numpy as np
from PIL import Image
from ...utils.utils import calculate_recipe_fingerprint
from ...utils.civitai_utils import rewrite_preview_url
from .errors import (
RecipeDownloadError,
RecipeNotFoundError,
@@ -94,18 +95,39 @@ class RecipeAnalysisService:
if civitai_client is None:
raise RecipeServiceError("Civitai client unavailable")
temp_path = self._create_temp_path()
temp_path = None
metadata: Optional[dict[str, Any]] = None
is_video = False
extension = ".jpg" # Default
try:
civitai_match = re.match(r"https://civitai\.com/images/(\d+)", url)
if civitai_match:
image_info = await civitai_client.get_image_info(civitai_match.group(1))
if not image_info:
raise RecipeDownloadError("Failed to fetch image information from Civitai")
image_url = image_info.get("url")
if not image_url:
raise RecipeDownloadError("No image URL found in Civitai response")
is_video = image_info.get("type") == "video"
# Use optimized preview URLs if possible
rewritten_url, _ = rewrite_preview_url(image_url, media_type=image_info.get("type"))
if rewritten_url:
image_url = rewritten_url
if is_video:
# Extract extension from URL
url_path = image_url.split('?')[0].split('#')[0]
extension = os.path.splitext(url_path)[1].lower() or ".mp4"
else:
extension = ".jpg"
temp_path = self._create_temp_path(suffix=extension)
await self._download_image(image_url, temp_path)
metadata = image_info.get("meta") if "meta" in image_info else None
if (
isinstance(metadata, dict)
@@ -114,21 +136,30 @@ class RecipeAnalysisService:
):
metadata = metadata["meta"]
else:
# Basic extension detection for non-Civitai URLs
url_path = url.split('?')[0].split('#')[0]
extension = os.path.splitext(url_path)[1].lower()
if extension in [".mp4", ".webm"]:
is_video = True
else:
extension = ".jpg"
temp_path = self._create_temp_path(suffix=extension)
await self._download_image(url, temp_path)
if metadata is None:
if metadata is None and not is_video:
metadata = self._exif_utils.extract_image_metadata(temp_path)
if not metadata:
return self._metadata_not_found_response(temp_path)
return await self._parse_metadata(
metadata,
metadata or {},
recipe_scanner=recipe_scanner,
image_path=temp_path,
include_image_base64=True,
is_video=is_video,
extension=extension,
)
finally:
if temp_path:
self._safe_cleanup(temp_path)
async def analyze_local_image(
@@ -198,12 +229,16 @@ class RecipeAnalysisService:
recipe_scanner,
image_path: Optional[str],
include_image_base64: bool,
is_video: bool = False,
extension: str = ".jpg",
) -> AnalysisResult:
parser = self._recipe_parser_factory.create_parser(metadata)
if parser is None:
payload = {"error": "No parser found for this image", "loras": []}
if include_image_base64 and image_path:
payload["image_base64"] = self._encode_file(image_path)
payload["is_video"] = is_video
payload["extension"] = extension
return AnalysisResult(payload)
result = await parser.parse_metadata(metadata, recipe_scanner=recipe_scanner)
@@ -211,6 +246,9 @@ class RecipeAnalysisService:
if include_image_base64 and image_path:
result["image_base64"] = self._encode_file(image_path)
result["is_video"] = is_video
result["extension"] = extension
if "error" in result and not result.get("loras"):
return AnalysisResult(result)
@@ -241,8 +279,8 @@ class RecipeAnalysisService:
temp_file.write(data)
return temp_file.name
def _create_temp_path(self) -> str:
with tempfile.NamedTemporaryFile(delete=False, suffix=".jpg") as temp_file:
def _create_temp_path(self, suffix: str = ".jpg") -> str:
with tempfile.NamedTemporaryFile(delete=False, suffix=suffix) as temp_file:
return temp_file.name
def _safe_cleanup(self, path: Optional[str]) -> None:

View File

@@ -5,6 +5,7 @@ import base64
import json
import os
import re
import shutil
import time
import uuid
from dataclasses import dataclass
@@ -46,6 +47,7 @@ class RecipePersistenceService:
name: str | None,
tags: Iterable[str],
metadata: Optional[dict[str, Any]],
extension: str | None = None,
) -> PersistenceResult:
"""Persist a user uploaded recipe."""
@@ -64,6 +66,13 @@ class RecipePersistenceService:
os.makedirs(recipes_dir, exist_ok=True)
recipe_id = str(uuid.uuid4())
# Handle video formats by bypassing optimization and metadata embedding
is_video = extension in [".mp4", ".webm"]
if is_video:
optimized_image = resolved_image_bytes
# extension is already set
else:
optimized_image, extension = self._exif_utils.optimize_image(
image_data=resolved_image_bytes,
target_width=self._card_preview_width,
@@ -71,6 +80,7 @@ class RecipePersistenceService:
quality=85,
preserve_metadata=True,
)
image_filename = f"{recipe_id}{extension}"
image_path = os.path.join(recipes_dir, image_filename)
normalized_image_path = os.path.normpath(image_path)
@@ -126,6 +136,7 @@ class RecipePersistenceService:
with open(json_path, "w", encoding="utf-8") as file_obj:
json.dump(recipe_data, file_obj, indent=4, ensure_ascii=False)
if not is_video:
self._exif_utils.append_recipe_metadata(normalized_image_path, recipe_data)
matching_recipes = await self._find_matching_recipes(recipe_scanner, fingerprint, exclude_id=recipe_id)
@@ -144,12 +155,8 @@ class RecipePersistenceService:
async def delete_recipe(self, *, recipe_scanner, recipe_id: str) -> PersistenceResult:
"""Delete an existing recipe."""
recipes_dir = recipe_scanner.recipes_dir
if not recipes_dir or not os.path.exists(recipes_dir):
raise RecipeNotFoundError("Recipes directory not found")
recipe_json_path = os.path.join(recipes_dir, f"{recipe_id}.recipe.json")
if not os.path.exists(recipe_json_path):
recipe_json_path = await recipe_scanner.get_recipe_json_path(recipe_id)
if not recipe_json_path or not os.path.exists(recipe_json_path):
raise RecipeNotFoundError("Recipe not found")
with open(recipe_json_path, "r", encoding="utf-8") as file_obj:
@@ -166,9 +173,9 @@ class RecipePersistenceService:
async def update_recipe(self, *, recipe_scanner, recipe_id: str, updates: dict[str, Any]) -> PersistenceResult:
"""Update persisted metadata for a recipe."""
if not any(key in updates for key in ("title", "tags", "source_path", "preview_nsfw_level")):
if not any(key in updates for key in ("title", "tags", "source_path", "preview_nsfw_level", "favorite")):
raise RecipeValidationError(
"At least one field to update must be provided (title or tags or source_path or preview_nsfw_level)"
"At least one field to update must be provided (title or tags or source_path or preview_nsfw_level or favorite)"
)
success = await recipe_scanner.update_recipe_metadata(recipe_id, updates)
@@ -177,6 +184,163 @@ class RecipePersistenceService:
return PersistenceResult({"success": True, "recipe_id": recipe_id, "updates": updates})
def _normalize_target_path(self, recipe_scanner, target_path: str) -> tuple[str, str]:
"""Normalize and validate the target path for recipe moves."""
if not target_path:
raise RecipeValidationError("Target path is required")
recipes_root = recipe_scanner.recipes_dir
if not recipes_root:
raise RecipeNotFoundError("Recipes directory not found")
normalized_target = os.path.normpath(target_path)
recipes_root = os.path.normpath(recipes_root)
if not os.path.isabs(normalized_target):
normalized_target = os.path.normpath(os.path.join(recipes_root, normalized_target))
try:
common_root = os.path.commonpath([normalized_target, recipes_root])
except ValueError as exc:
raise RecipeValidationError("Invalid target path") from exc
if common_root != recipes_root:
raise RecipeValidationError("Target path must be inside the recipes directory")
return normalized_target, recipes_root
async def _move_recipe_files(
self,
*,
recipe_scanner,
recipe_id: str,
normalized_target: str,
recipes_root: str,
) -> dict[str, Any]:
"""Move the recipe's JSON and preview image into the normalized target."""
recipe_json_path = await recipe_scanner.get_recipe_json_path(recipe_id)
if not recipe_json_path or not os.path.exists(recipe_json_path):
raise RecipeNotFoundError("Recipe not found")
recipe_data = await recipe_scanner.get_recipe_by_id(recipe_id)
if not recipe_data:
raise RecipeNotFoundError("Recipe not found")
current_json_dir = os.path.dirname(recipe_json_path)
normalized_image_path = os.path.normpath(recipe_data.get("file_path") or "") if recipe_data.get("file_path") else None
os.makedirs(normalized_target, exist_ok=True)
if os.path.normpath(current_json_dir) == normalized_target:
return {
"success": True,
"message": "Recipe is already in the target folder",
"recipe_id": recipe_id,
"original_file_path": recipe_data.get("file_path"),
"new_file_path": recipe_data.get("file_path"),
}
new_json_path = os.path.normpath(os.path.join(normalized_target, os.path.basename(recipe_json_path)))
shutil.move(recipe_json_path, new_json_path)
new_image_path = normalized_image_path
if normalized_image_path:
target_image_path = os.path.normpath(os.path.join(normalized_target, os.path.basename(normalized_image_path)))
if os.path.exists(normalized_image_path) and normalized_image_path != target_image_path:
shutil.move(normalized_image_path, target_image_path)
new_image_path = target_image_path
relative_folder = os.path.relpath(normalized_target, recipes_root)
if relative_folder in (".", ""):
relative_folder = ""
updates = {"file_path": new_image_path or recipe_data.get("file_path"), "folder": relative_folder.replace(os.path.sep, "/")}
updated = await recipe_scanner.update_recipe_metadata(recipe_id, updates)
if not updated:
raise RecipeNotFoundError("Recipe not found after move")
return {
"success": True,
"recipe_id": recipe_id,
"original_file_path": recipe_data.get("file_path"),
"new_file_path": updates["file_path"],
"json_path": new_json_path,
"folder": updates["folder"],
}
async def move_recipe(self, *, recipe_scanner, recipe_id: str, target_path: str) -> PersistenceResult:
"""Move a recipe's assets into a new folder under the recipes root."""
normalized_target, recipes_root = self._normalize_target_path(recipe_scanner, target_path)
result = await self._move_recipe_files(
recipe_scanner=recipe_scanner,
recipe_id=recipe_id,
normalized_target=normalized_target,
recipes_root=recipes_root,
)
return PersistenceResult(result)
async def move_recipes_bulk(
self,
*,
recipe_scanner,
recipe_ids: Iterable[str],
target_path: str,
) -> PersistenceResult:
"""Move multiple recipes to a new folder."""
recipe_ids = list(recipe_ids)
if not recipe_ids:
raise RecipeValidationError("No recipe IDs provided")
normalized_target, recipes_root = self._normalize_target_path(recipe_scanner, target_path)
results: list[dict[str, Any]] = []
success_count = 0
failure_count = 0
for recipe_id in recipe_ids:
try:
move_result = await self._move_recipe_files(
recipe_scanner=recipe_scanner,
recipe_id=str(recipe_id),
normalized_target=normalized_target,
recipes_root=recipes_root,
)
results.append(
{
"recipe_id": recipe_id,
"original_file_path": move_result.get("original_file_path"),
"new_file_path": move_result.get("new_file_path"),
"success": True,
"message": move_result.get("message", ""),
"folder": move_result.get("folder", ""),
}
)
success_count += 1
except Exception as exc: # pragma: no cover - per-item error handling
results.append(
{
"recipe_id": recipe_id,
"original_file_path": None,
"new_file_path": None,
"success": False,
"message": str(exc),
}
)
failure_count += 1
return PersistenceResult(
{
"success": True,
"message": f"Moved {success_count} of {len(recipe_ids)} recipes",
"results": results,
"success_count": success_count,
"failure_count": failure_count,
}
)
async def reconnect_lora(
self,
*,
@@ -187,8 +351,8 @@ class RecipePersistenceService:
) -> PersistenceResult:
"""Reconnect a LoRA entry within an existing recipe."""
recipe_path = os.path.join(recipe_scanner.recipes_dir, f"{recipe_id}.recipe.json")
if not os.path.exists(recipe_path):
recipe_path = await recipe_scanner.get_recipe_json_path(recipe_id)
if not recipe_path or not os.path.exists(recipe_path):
raise RecipeNotFoundError("Recipe not found")
target_lora = await recipe_scanner.get_local_lora(target_name)
@@ -233,16 +397,12 @@ class RecipePersistenceService:
if not recipe_ids:
raise RecipeValidationError("No recipe IDs provided")
recipes_dir = recipe_scanner.recipes_dir
if not recipes_dir or not os.path.exists(recipes_dir):
raise RecipeNotFoundError("Recipes directory not found")
deleted_recipes: list[str] = []
failed_recipes: list[dict[str, Any]] = []
for recipe_id in recipe_ids:
recipe_json_path = os.path.join(recipes_dir, f"{recipe_id}.recipe.json")
if not os.path.exists(recipe_json_path):
recipe_json_path = await recipe_scanner.get_recipe_json_path(recipe_id)
if not recipe_json_path or not os.path.exists(recipe_json_path):
failed_recipes.append({"id": recipe_id, "reason": "Recipe not found"})
continue

View File

@@ -20,6 +20,8 @@ class WebSocketManager:
self._last_init_progress: Dict[str, Dict] = {}
# Add auto-organize progress tracking
self._auto_organize_progress: Optional[Dict] = None
# Add recipe repair progress tracking
self._recipe_repair_progress: Optional[Dict] = None
self._auto_organize_lock = asyncio.Lock()
async def handle_connection(self, request: web.Request) -> web.WebSocketResponse:
@@ -189,6 +191,14 @@ class WebSocketManager:
# Broadcast via WebSocket
await self.broadcast(data)
async def broadcast_recipe_repair_progress(self, data: Dict):
"""Broadcast recipe repair progress to connected clients"""
# Store progress data in memory
self._recipe_repair_progress = data
# Broadcast via WebSocket
await self.broadcast(data)
def get_auto_organize_progress(self) -> Optional[Dict]:
"""Get current auto-organize progress"""
return self._auto_organize_progress
@@ -197,6 +207,14 @@ class WebSocketManager:
"""Clear auto-organize progress data"""
self._auto_organize_progress = None
def get_recipe_repair_progress(self) -> Optional[Dict]:
"""Get current recipe repair progress"""
return self._recipe_repair_progress
def cleanup_recipe_repair_progress(self):
"""Clear recipe repair progress data"""
self._recipe_repair_progress = None
def is_auto_organize_running(self) -> bool:
"""Check if auto-organize is currently running"""
if not self._auto_organize_progress:

View File

@@ -1,82 +1,33 @@
{
"id": "0448c06d-de1b-46ab-975c-c5aa60d90dbc",
"file_path": "D:/Workspace/ComfyUI/models/loras/recipes/0448c06d-de1b-46ab-975c-c5aa60d90dbc.jpg",
"title": "a mysterious, steampunk-inspired character standing in a dramatic pose",
"modified": 1741837612.3931093,
"created_date": 1741492786.5581934,
"base_model": "Flux.1 D",
"id": "42803a29-02dc-49e1-b798-27da70e8b408",
"file_path": "/home/miao/workspace/ComfyUI/models/loras/recipes/test/42803a29-02dc-49e1-b798-27da70e8b408.webp",
"title": "masterpiece, best quality, amazing quality, very aesthetic, detailed eyes, perfect",
"modified": 1754897325.0507245,
"created_date": 1754897325.0507245,
"base_model": "Illustrious",
"loras": [
{
"file_name": "ChronoDivinitiesFlux_r1",
"hash": "ddbc5abd00db46ad464f5e3ca85f8f7121bc14b594d6785f441d9b002fffe66a",
"strength": 0.8,
"modelVersionId": 1438879,
"modelName": "Chrono Divinities - By HailoKnight",
"modelVersionName": "Flux"
},
{
"file_name": "flux.1_lora_flyway_ink-dynamic",
"hash": "4b4f3b469a0d5d3a04a46886abfa33daa37a905db070ccfbd10b345c6fb00eff",
"strength": 0.2,
"modelVersionId": 914935,
"modelName": "Ink-style",
"modelVersionName": "ink-dynamic"
},
{
"file_name": "ck-painterly-fantasy-000017",
"hash": "48c67064e2936aec342580a2a729d91d75eb818e45ecf993b9650cc66c94c420",
"strength": 0.2,
"modelVersionId": 1189379,
"modelName": "Painterly Fantasy by ChronoKnight - [FLUX & IL]",
"modelVersionName": "FLUX"
},
{
"file_name": "RetroAnimeFluxV1",
"hash": "8f43c31b6c3238ac44195c970d511d759c5893bddd00f59f42b8fe51e8e76fa0",
"strength": 0.8,
"modelVersionId": 806265,
"modelName": "Retro Anime Flux - Style",
"modelVersionName": "v1.0"
},
{
"file_name": "Mezzotint_Artstyle_for_Flux_-_by_Ethanar",
"hash": "e6961502769123bf23a66c5c5298d76264fd6b9610f018319a0ccb091bfc308e",
"strength": 0.2,
"modelVersionId": 757030,
"modelName": "Mezzotint Artstyle for Flux - by Ethanar",
"modelVersionName": "V1"
},
{
"file_name": "FluxMythG0thicL1nes",
"hash": "ecb03595de62bd6183a0dd2b38bea35669fd4d509f4bbae5aa0572cfb7ef4279",
"strength": 0.4,
"modelVersionId": 1202162,
"modelName": "Velvet's Mythic Fantasy Styles | Flux + Pony + illustrious",
"modelVersionName": "Flux Gothic Lines"
},
{
"file_name": "Elden_Ring_-_Yoshitaka_Amano",
"hash": "c660c4c55320be7206cb6a917c59d8da3953cc07169fe10bda833a54ec0024f9",
"strength": 0.75,
"modelVersionId": 746484,
"modelName": "Elden Ring - Yoshitaka Amano",
"modelVersionName": "V1"
"file_name": "",
"hash": "1b5b763d83961bb5745f3af8271ba83f1d4fd69c16278dae6d5b4e194bdde97a",
"strength": 1.0,
"modelVersionId": 2007092,
"modelName": "Pony: People's Works +",
"modelVersionName": "v8_Illusv1.0",
"isDeleted": false,
"exclude": false
}
],
"gen_params": {
"prompt": "a mysterious, steampunk-inspired character standing in a dramatic pose. The character is dressed in a long, intricately detailed dark coat with ornate patterns, a wide-brimmed hat, and leather boots. The face is partially obscured by the hat's shadow, adding to the enigmatic aura. The background showcases a large, antique clock with Roman numerals, surrounded by dynamic lightning and ethereal white birds, enhancing the fantastical atmosphere. The color palette is dominated by dark tones with striking contrasts of white and blue lightning, creating a sense of tension and energy. The overall composition is vertical, with the character centrally positioned, exuding a sense of power and mystery. hkchrono",
"negative_prompt": "",
"checkpoint": {
"type": "checkpoint",
"modelVersionId": 691639,
"modelName": "FLUX",
"modelVersionName": "Dev"
},
"steps": "30",
"sampler": "Undefined",
"cfg_scale": "3.5",
"seed": "1472903449",
"prompt": "masterpiece, best quality, amazing quality, very aesthetic, detailed eyes, perfect eyes, realistic eyes,\n(flat colors:1.5), (anime:1.5), (lineart:1.5),\nclose-up, solo, tongue, 1girl, food, (saliva:0.1), open mouth, candy, simple background, blue background, large lollipop, tongue out, fade background, lips, hand up, holding, looking at viewer, licking, seductive, half-closed eyes,",
"negative_prompt": "shiny skin,",
"steps": 19,
"sampler": "Euler a",
"cfg_scale": 5,
"seed": 1765271748,
"size": "832x1216",
"clip_skip": "2"
}
"clip_skip": 2
},
"fingerprint": "1b5b763d83961bb5745f3af8271ba83f1d4fd69c16278dae6d5b4e194bdde97a:1.0",
"source_path": "https://civitai.com/images/92427432",
"folder": "test"
}

View File

@@ -34,7 +34,7 @@ class TranslationKeySynchronizer:
self.locales_dir = locales_dir
self.verbose = verbose
self.reference_locale = 'en'
self.target_locales = ['zh-CN', 'zh-TW', 'ja', 'ru', 'de', 'fr', 'es', 'ko']
self.target_locales = ['zh-CN', 'zh-TW', 'ja', 'ru', 'de', 'fr', 'es', 'ko', 'he']
def log(self, message: str, level: str = 'INFO'):
"""Log a message if verbose mode is enabled."""

View File

@@ -1,8 +1,10 @@
html, body {
html,
body {
margin: 0;
padding: 0;
height: 100%;
overflow: hidden; /* Disable default scrolling */
overflow: hidden;
/* Disable default scrolling */
}
/* 针对Firefox */
@@ -75,7 +77,8 @@ html, body {
--border-radius-sm: 8px;
--border-radius-xs: 4px;
--scrollbar-width: 8px; /* 添加滚动条宽度变量 */
--scrollbar-width: 8px;
/* 添加滚动条宽度变量 */
/* Shortcut styles */
--shortcut-bg: oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.12);
@@ -104,7 +107,8 @@ html[data-theme="light"] {
--lora-surface: oklch(25% 0.02 256 / 0.98);
--lora-border: oklch(90% 0.02 256 / 0.15);
--lora-text: oklch(98% 0.02 256);
--lora-warning: oklch(75% 0.25 80); /* Modified to be used with oklch() */
--lora-warning: oklch(75% 0.25 80);
/* Modified to be used with oklch() */
--lora-error-bg: color-mix(in oklch, var(--lora-error) 15%, transparent);
--lora-error-border: color-mix(in oklch, var(--lora-error) 40%, transparent);
--badge-update-bg: oklch(62% 0.18 220);
@@ -118,5 +122,10 @@ body {
color: var(--text-color);
display: flex;
flex-direction: column;
padding-top: 0; /* Remove the padding-top */
padding-top: 0;
/* Remove the padding-top */
}
.hidden {
display: none !important;
}

View File

@@ -1,7 +1,8 @@
/* Import Modal Styles */
.import-step {
margin: var(--space-2) 0;
transition: none !important; /* Disable any transitions that might affect display */
transition: none !important;
/* Disable any transitions that might affect display */
}
/* Import Mode Toggle */
@@ -107,7 +108,8 @@
justify-content: center;
}
.recipe-image img {
.recipe-image img,
.recipe-preview-video {
max-width: 100%;
max-height: 100%;
object-fit: contain;
@@ -512,14 +514,17 @@
/* Prevent layout shift with scrollbar */
.modal-content {
overflow-y: scroll; /* Always show scrollbar */
scrollbar-gutter: stable; /* Reserve space for scrollbar */
overflow-y: scroll;
/* Always show scrollbar */
scrollbar-gutter: stable;
/* Reserve space for scrollbar */
}
/* For browsers that don't support scrollbar-gutter */
@supports not (scrollbar-gutter: stable) {
.modal-content {
padding-right: calc(var(--space-2) + var(--scrollbar-width)); /* Add extra padding for scrollbar */
padding-right: calc(var(--space-2) + var(--scrollbar-width));
/* Add extra padding for scrollbar */
}
}
@@ -586,7 +591,8 @@
/* Remove the old warning-message styles that were causing layout issues */
.warning-message {
display: none; /* Hide the old style */
display: none;
/* Hide the old style */
}
/* Update deleted badge to be more prominent */
@@ -613,7 +619,8 @@
color: var(--lora-error);
font-size: 0.9em;
margin-top: 8px;
min-height: 20px; /* Ensure there's always space for the error message */
min-height: 20px;
/* Ensure there's always space for the error message */
font-weight: 500;
}
@@ -662,8 +669,15 @@
}
@keyframes fadeIn {
from { opacity: 0; transform: translateY(-10px); }
to { opacity: 1; transform: translateY(0); }
from {
opacity: 0;
transform: translateY(-10px);
}
to {
opacity: 1;
transform: translateY(0);
}
}
.duplicate-warning {
@@ -779,6 +793,7 @@
text-overflow: ellipsis;
display: -webkit-box;
-webkit-line-clamp: 2;
line-clamp: 2;
-webkit-box-orient: vertical;
}

View File

@@ -20,7 +20,7 @@
}
.modal-header-row {
width: 85%;
width: 84%;
display: flex;
align-items: flex-start;
gap: var(--space-2);

View File

@@ -122,6 +122,7 @@ body.modal-open {
cursor: pointer;
opacity: 0.7;
transition: opacity 0.2s;
z-index: 10;
}
.close:hover {

View File

@@ -242,6 +242,20 @@
border-color: var(--lora-error-border);
}
/* Subtle styling for special system tags like "No tags" */
.filter-tag.special-tag {
border-style: dashed;
opacity: 0.8;
font-style: italic;
}
/* Ensure solid border and full opacity when active or excluded */
.filter-tag.special-tag.active,
.filter-tag.special-tag.exclude {
border-style: solid;
opacity: 1;
}
/* Tag filter styles */
.tag-filter {
display: flex;

View File

@@ -2,6 +2,35 @@ import { RecipeCard } from '../components/RecipeCard.js';
import { state, getCurrentPageState } from '../state/index.js';
import { showToast } from '../utils/uiHelpers.js';
const RECIPE_ENDPOINTS = {
list: '/api/lm/recipes',
detail: '/api/lm/recipe',
scan: '/api/lm/recipes/scan',
update: '/api/lm/recipe',
roots: '/api/lm/recipes/roots',
folders: '/api/lm/recipes/folders',
folderTree: '/api/lm/recipes/folder-tree',
unifiedFolderTree: '/api/lm/recipes/unified-folder-tree',
move: '/api/lm/recipe/move',
moveBulk: '/api/lm/recipes/move-bulk',
bulkDelete: '/api/lm/recipes/bulk-delete',
};
const RECIPE_SIDEBAR_CONFIG = {
config: {
displayName: 'Recipe',
supportsMove: true,
},
endpoints: RECIPE_ENDPOINTS,
};
export function extractRecipeId(filePath) {
if (!filePath) return null;
const basename = filePath.split('/').pop().split('\\').pop();
const dotIndex = basename.lastIndexOf('.');
return dotIndex > 0 ? basename.substring(0, dotIndex) : basename;
}
/**
* Fetch recipes with pagination for virtual scrolling
* @param {number} page - Page number to fetch
@@ -18,10 +47,21 @@ export async function fetchRecipesPage(page = 1, pageSize = 100) {
sort_by: pageState.sortBy
});
if (pageState.showFavoritesOnly) {
params.append('favorite', 'true');
}
if (pageState.activeFolder !== null && pageState.activeFolder !== undefined) {
params.append('folder', pageState.activeFolder);
params.append('recursive', pageState.searchOptions?.recursive !== false);
} else if (pageState.searchOptions?.recursive !== undefined) {
params.append('recursive', pageState.searchOptions.recursive);
}
// If we have a specific recipe ID to load
if (pageState.customFilter?.active && pageState.customFilter?.recipeId) {
// Special case: load specific recipe
const response = await fetch(`/api/lm/recipe/${pageState.customFilter.recipeId}`);
const response = await fetch(`${RECIPE_ENDPOINTS.detail}/${pageState.customFilter.recipeId}`);
if (!response.ok) {
throw new Error(`Failed to load recipe: ${response.statusText}`);
@@ -56,6 +96,7 @@ export async function fetchRecipesPage(page = 1, pageSize = 100) {
params.append('search_tags', pageState.searchOptions.tags.toString());
params.append('search_lora_name', pageState.searchOptions.loraName.toString());
params.append('search_lora_model', pageState.searchOptions.loraModel.toString());
params.append('search_prompt', (pageState.searchOptions.prompt || false).toString());
params.append('fuzzy', 'true');
}
}
@@ -78,7 +119,7 @@ export async function fetchRecipesPage(page = 1, pageSize = 100) {
}
// Fetch recipes
const response = await fetch(`/api/lm/recipes?${params.toString()}`);
const response = await fetch(`${RECIPE_ENDPOINTS.list}?${params.toString()}`);
if (!response.ok) {
throw new Error(`Failed to load recipes: ${response.statusText}`);
@@ -213,7 +254,7 @@ export async function refreshRecipes() {
state.loadingManager.showSimpleLoading('Refreshing recipes...');
// Call the API endpoint to rebuild the recipe cache
const response = await fetch('/api/lm/recipes/scan');
const response = await fetch(RECIPE_ENDPOINTS.scan);
if (!response.ok) {
const data = await response.json();
@@ -277,10 +318,12 @@ export async function updateRecipeMetadata(filePath, updates) {
state.loadingManager.showSimpleLoading('Saving metadata...');
// Extract recipeId from filePath (basename without extension)
const basename = filePath.split('/').pop().split('\\').pop();
const recipeId = basename.substring(0, basename.lastIndexOf('.'));
const recipeId = extractRecipeId(filePath);
if (!recipeId) {
throw new Error('Unable to determine recipe ID');
}
const response = await fetch(`/api/lm/recipe/${recipeId}/update`, {
const response = await fetch(`${RECIPE_ENDPOINTS.update}/${recipeId}/update`, {
method: 'PUT',
headers: {
'Content-Type': 'application/json',
@@ -306,3 +349,187 @@ export async function updateRecipeMetadata(filePath, updates) {
state.loadingManager.hide();
}
}
export class RecipeSidebarApiClient {
constructor() {
this.apiConfig = RECIPE_SIDEBAR_CONFIG;
}
async fetchUnifiedFolderTree() {
const response = await fetch(this.apiConfig.endpoints.unifiedFolderTree);
if (!response.ok) {
throw new Error('Failed to fetch recipe folder tree');
}
return response.json();
}
async fetchModelRoots() {
const response = await fetch(this.apiConfig.endpoints.roots);
if (!response.ok) {
throw new Error('Failed to fetch recipe roots');
}
return response.json();
}
async fetchModelFolders() {
const response = await fetch(this.apiConfig.endpoints.folders);
if (!response.ok) {
throw new Error('Failed to fetch recipe folders');
}
return response.json();
}
async moveBulkModels(filePaths, targetPath) {
if (!this.apiConfig.config.supportsMove) {
showToast('toast.api.bulkMoveNotSupported', { type: this.apiConfig.config.displayName }, 'warning');
return [];
}
const recipeIds = filePaths
.map((path) => extractRecipeId(path))
.filter((id) => !!id);
if (recipeIds.length === 0) {
showToast('toast.models.noModelsSelected', {}, 'warning');
return [];
}
const response = await fetch(this.apiConfig.endpoints.moveBulk, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
recipe_ids: recipeIds,
target_path: targetPath,
}),
});
const result = await response.json();
if (!response.ok || !result.success) {
throw new Error(result.error || `Failed to move ${this.apiConfig.config.displayName}s`);
}
if (result.failure_count > 0) {
showToast(
'toast.api.bulkMovePartial',
{
successCount: result.success_count,
type: this.apiConfig.config.displayName,
failureCount: result.failure_count,
},
'warning'
);
const failedFiles = (result.results || [])
.filter((item) => !item.success)
.map((item) => item.message || 'Unknown error');
if (failedFiles.length > 0) {
const failureMessage =
failedFiles.length <= 3
? failedFiles.join('\n')
: `${failedFiles.slice(0, 3).join('\n')}\n(and ${failedFiles.length - 3} more)`;
showToast('toast.api.bulkMoveFailures', { failures: failureMessage }, 'warning', 6000);
}
} else {
showToast(
'toast.api.bulkMoveSuccess',
{
successCount: result.success_count,
type: this.apiConfig.config.displayName,
},
'success'
);
}
return result.results || [];
}
async moveSingleModel(filePath, targetPath) {
if (!this.apiConfig.config.supportsMove) {
showToast('toast.api.moveNotSupported', { type: this.apiConfig.config.displayName }, 'warning');
return null;
}
const recipeId = extractRecipeId(filePath);
if (!recipeId) {
showToast('toast.api.moveFailed', { message: 'Recipe ID missing' }, 'error');
return null;
}
const response = await fetch(this.apiConfig.endpoints.move, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
recipe_id: recipeId,
target_path: targetPath,
}),
});
const result = await response.json();
if (!response.ok || !result.success) {
throw new Error(result.error || `Failed to move ${this.apiConfig.config.displayName}`);
}
if (result.message) {
showToast('toast.api.moveInfo', { message: result.message }, 'info');
} else {
showToast('toast.api.moveSuccess', { type: this.apiConfig.config.displayName }, 'success');
}
return {
original_file_path: result.original_file_path || filePath,
new_file_path: result.new_file_path || filePath,
folder: result.folder || '',
message: result.message,
};
}
async bulkDeleteModels(filePaths) {
if (!filePaths || filePaths.length === 0) {
throw new Error('No file paths provided');
}
const recipeIds = filePaths
.map((path) => extractRecipeId(path))
.filter((id) => !!id);
if (recipeIds.length === 0) {
throw new Error('No recipe IDs could be derived from file paths');
}
try {
state.loadingManager?.showSimpleLoading('Deleting recipes...');
const response = await fetch(this.apiConfig.endpoints.bulkDelete, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
recipe_ids: recipeIds,
}),
});
const result = await response.json();
if (!response.ok || !result.success) {
throw new Error(result.error || 'Failed to delete recipes');
}
return {
success: true,
deleted_count: result.total_deleted,
failed_count: result.total_failed || 0,
errors: result.failed || [],
};
} finally {
state.loadingManager?.hide();
}
}
}

View File

@@ -15,6 +15,29 @@ export class GlobalContextMenu extends BaseContextMenu {
showMenu(x, y, origin = null) {
const contextOrigin = origin || { type: 'global' };
// Conditional visibility for recipes page
const isRecipesPage = state.currentPageType === 'recipes';
const modelUpdateItem = this.menu.querySelector('[data-action="check-model-updates"]');
const licenseRefreshItem = this.menu.querySelector('[data-action="fetch-missing-licenses"]');
const downloadExamplesItem = this.menu.querySelector('[data-action="download-example-images"]');
const cleanupExamplesItem = this.menu.querySelector('[data-action="cleanup-example-images-folders"]');
const repairRecipesItem = this.menu.querySelector('[data-action="repair-recipes"]');
if (isRecipesPage) {
modelUpdateItem?.classList.add('hidden');
licenseRefreshItem?.classList.add('hidden');
downloadExamplesItem?.classList.add('hidden');
cleanupExamplesItem?.classList.add('hidden');
repairRecipesItem?.classList.remove('hidden');
} else {
modelUpdateItem?.classList.remove('hidden');
licenseRefreshItem?.classList.remove('hidden');
downloadExamplesItem?.classList.remove('hidden');
cleanupExamplesItem?.classList.remove('hidden');
repairRecipesItem?.classList.add('hidden');
}
super.showMenu(x, y, contextOrigin);
}
@@ -40,6 +63,11 @@ export class GlobalContextMenu extends BaseContextMenu {
console.error('Failed to refresh missing license metadata:', error);
});
break;
case 'repair-recipes':
this.repairRecipes(menuItem).catch((error) => {
console.error('Failed to repair recipes:', error);
});
break;
default:
console.warn(`Unhandled global context menu action: ${action}`);
break;
@@ -235,4 +263,78 @@ export class GlobalContextMenu extends BaseContextMenu {
return `${displayName}s`;
}
async repairRecipes(menuItem) {
if (this._repairInProgress) {
return;
}
this._repairInProgress = true;
menuItem?.classList.add('disabled');
const loadingMessage = translate(
'globalContextMenu.repairRecipes.loading',
{},
'Repairing recipe data...'
);
const progressUI = state.loadingManager?.showEnhancedProgress(loadingMessage);
try {
const response = await fetch('/api/lm/recipes/repair', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
});
const result = await response.json();
if (!response.ok || !result.success) {
throw new Error(result.error || 'Failed to start repair');
}
// Poll for progress (or wait for WebSocket if preferred, but polling is simpler for this implementation)
let isComplete = false;
while (!isComplete && this._repairInProgress) {
const progressResponse = await fetch('/api/lm/recipes/repair-progress');
if (progressResponse.ok) {
const progressResult = await progressResponse.json();
if (progressResult.success && progressResult.progress) {
const p = progressResult.progress;
if (p.status === 'processing') {
const percent = (p.current / p.total) * 100;
progressUI?.updateProgress(percent, p.recipe_name, `${loadingMessage} (${p.current}/${p.total})`);
} else if (p.status === 'completed') {
isComplete = true;
progressUI?.complete(translate(
'globalContextMenu.repairRecipes.success',
{ count: p.repaired },
`Repaired ${p.repaired} recipes.`
));
showToast('globalContextMenu.repairRecipes.success', { count: p.repaired }, 'success');
// Refresh recipes page if active
if (window.recipesPage) {
window.recipesPage.refresh();
}
} else if (p.status === 'error') {
throw new Error(p.error || 'Repair failed');
}
} else if (progressResponse.status === 404) {
// Progress might have finished quickly and been cleaned up
isComplete = true;
progressUI?.complete();
}
}
if (!isComplete) {
await new Promise(resolve => setTimeout(resolve, 1000));
}
}
} catch (error) {
console.error('Recipe repair failed:', error);
progressUI?.complete(translate('globalContextMenu.repairRecipes.error', { message: error.message }, 'Repair failed: {message}'));
showToast('globalContextMenu.repairRecipes.error', { message: error.message }, 'error');
} finally {
this._repairInProgress = false;
menuItem?.classList.remove('disabled');
}
}
}

View File

@@ -4,6 +4,7 @@ import { showToast, copyToClipboard, sendLoraToWorkflow } from '../../utils/uiHe
import { setSessionItem, removeSessionItem } from '../../utils/storageHelpers.js';
import { updateRecipeMetadata } from '../../api/recipeApi.js';
import { state } from '../../state/index.js';
import { moveManager } from '../../managers/MoveManager.js';
export class RecipeContextMenu extends BaseContextMenu {
constructor() {
@@ -77,6 +78,9 @@ export class RecipeContextMenu extends BaseContextMenu {
// Share recipe
this.currentCard.querySelector('.fa-share-alt')?.click();
break;
case 'move':
moveManager.showMoveModal(this.currentCard.dataset.filepath);
break;
case 'delete':
// Delete recipe
this.currentCard.querySelector('.fa-trash')?.click();
@@ -89,6 +93,10 @@ export class RecipeContextMenu extends BaseContextMenu {
// Download missing LoRAs
this.downloadMissingLoRAs(recipeId);
break;
case 'repair':
// Repair recipe metadata
this.repairRecipe(recipeId);
break;
}
}
@@ -279,6 +287,38 @@ export class RecipeContextMenu extends BaseContextMenu {
}
}
}
// Repair recipe metadata
async repairRecipe(recipeId) {
if (!recipeId) {
showToast('recipes.contextMenu.repair.missingId', {}, 'error');
return;
}
try {
showToast('recipes.contextMenu.repair.starting', {}, 'info');
const response = await fetch(`/api/lm/recipe/${recipeId}/repair`, {
method: 'POST'
});
const result = await response.json();
if (result.success) {
if (result.repaired > 0) {
showToast('recipes.contextMenu.repair.success', {}, 'success');
// Refresh the current card or reload
this.resetAndReload();
} else {
showToast('recipes.contextMenu.repair.skipped', {}, 'info');
}
} else {
throw new Error(result.error || 'Repair failed');
}
} catch (error) {
console.error('Error repairing recipe:', error);
showToast('recipes.contextMenu.repair.failed', { message: error.message }, 'error');
}
}
}
// Mix in shared methods from ModelContextMenuMixin

View File

@@ -1,8 +1,11 @@
// Recipe Card Component
import { showToast, copyToClipboard, sendLoraToWorkflow } from '../utils/uiHelpers.js';
import { updateRecipeMetadata } from '../api/recipeApi.js';
import { configureModelCardVideo } from './shared/ModelCard.js';
import { modalManager } from '../managers/ModalManager.js';
import { getCurrentPageState } from '../state/index.js';
import { state } from '../state/index.js';
import { bulkManager } from '../managers/BulkManager.js';
import { NSFW_LEVELS, getBaseModelAbbreviation } from '../utils/constants.js';
class RecipeCard {
@@ -38,13 +41,28 @@ class RecipeCard {
const allLorasAvailable = missingLorasCount === 0 && lorasCount > 0;
// Ensure file_url exists, fallback to file_path if needed
const imageUrl = this.recipe.file_url ||
const previewUrl = this.recipe.file_url ||
(this.recipe.file_path ? `/loras_static/root1/preview/${this.recipe.file_path.split('/').pop()}` :
'/loras_static/images/no-preview.png');
// Check if in duplicates mode
const pageState = getCurrentPageState();
const isDuplicatesMode = pageState.duplicatesMode;
const isDuplicatesMode = getCurrentPageState().duplicatesMode;
const autoplayOnHover = state?.global?.settings?.autoplay_on_hover === true;
const isFavorite = this.recipe.favorite === true;
// Video preview logic
const isVideo = previewUrl.endsWith('.mp4') || previewUrl.endsWith('.webm');
const videoAttrs = [
'controls',
'muted',
'loop',
'playsinline',
'preload="none"',
`data-src="${previewUrl}"`
];
if (!autoplayOnHover) {
videoAttrs.push('data-autoplay="true"');
}
// NSFW blur logic - similar to LoraCard
const nsfwLevel = this.recipe.preview_nsfw_level !== undefined ? this.recipe.preview_nsfw_level : 0;
@@ -66,7 +84,10 @@ class RecipeCard {
card.innerHTML = `
<div class="card-preview ${shouldBlur ? 'blurred' : ''}">
<img src="${imageUrl}" alt="${this.recipe.title}">
${isVideo ?
`<video ${videoAttrs.join(' ')} style="pointer-events: none;"></video>` :
`<img src="${previewUrl}" alt="${this.recipe.title}">`
}
${!isDuplicatesMode ? `
<div class="card-header">
${shouldBlur ?
@@ -75,6 +96,7 @@ class RecipeCard {
</button>` : ''}
<span class="base-model-label ${shouldBlur ? 'with-toggle' : ''}" title="${baseModelLabel}">${baseModelDisplay}</span>
<div class="card-actions">
<i class="${isFavorite ? 'fas fa-star favorite-active' : 'far fa-star'}" title="${isFavorite ? 'Remove from Favorites' : 'Add to Favorites'}"></i>
<i class="fas fa-share-alt" title="Share Recipe"></i>
<i class="fas fa-paper-plane" title="Send Recipe to Workflow (Click: Append, Shift+Click: Replace)"></i>
<i class="fas fa-trash" title="Delete Recipe"></i>
@@ -104,6 +126,13 @@ class RecipeCard {
`;
this.attachEventListeners(card, isDuplicatesMode, shouldBlur);
// Add video auto-play on hover functionality if needed
const videoElement = card.querySelector('video');
if (videoElement) {
configureModelCardVideo(videoElement, autoplayOnHover);
}
return card;
}
@@ -113,6 +142,67 @@ class RecipeCard {
return `${missingCount} of ${totalCount} LoRAs missing`;
}
async toggleFavorite(card) {
// Find the latest star icon in case the card was re-rendered
const getStarIcon = (c) => c.querySelector('.fa-star');
let starIcon = getStarIcon(card);
const isFavorite = this.recipe.favorite || false;
const newFavoriteState = !isFavorite;
// Update early to provide instant feedback and avoid race conditions with re-renders
this.recipe.favorite = newFavoriteState;
// Function to update icon state
const updateIconUI = (icon, state) => {
if (!icon) return;
if (state) {
icon.classList.remove('far');
icon.classList.add('fas', 'favorite-active');
icon.title = 'Remove from Favorites';
} else {
icon.classList.remove('fas', 'favorite-active');
icon.classList.add('far');
icon.title = 'Add to Favorites';
}
};
// Update current icon immediately
updateIconUI(starIcon, newFavoriteState);
try {
await updateRecipeMetadata(this.recipe.file_path, {
favorite: newFavoriteState
});
// Status already updated, just show toast
if (newFavoriteState) {
showToast('modelCard.favorites.added', {}, 'success');
} else {
showToast('modelCard.favorites.removed', {}, 'success');
}
// Re-find star icon after API call as VirtualScroller might have replaced the element
// During updateRecipeMetadata, VirtualScroller.updateSingleItem might have re-rendered the card
// We need to find the NEW element in the DOM to ensure we don't have a stale reference
// Though typically VirtualScroller handles the re-render with the NEW this.recipe.favorite
// we will check the DOM just to be sure if this instance's internal card is still what's in DOM
} catch (error) {
console.error('Failed to update favorite status:', error);
// Revert local state on error
this.recipe.favorite = isFavorite;
// Re-find star icon in case of re-render during fault
const currentCard = card.ownerDocument.evaluate(
`.//*[@data-filepath="${this.recipe.file_path}"]`,
card.ownerDocument, null, XPathResult.FIRST_ORDERED_NODE_TYPE, null
).singleNodeValue || card;
updateIconUI(getStarIcon(currentCard), isFavorite);
showToast('modelCard.favorites.updateFailed', {}, 'error');
}
}
attachEventListeners(card, isDuplicatesMode, shouldBlur) {
// Add blur toggle functionality if content should be blurred
if (shouldBlur) {
@@ -137,9 +227,19 @@ class RecipeCard {
// Recipe card click event - only attach if not in duplicates mode
if (!isDuplicatesMode) {
card.addEventListener('click', () => {
if (state.bulkMode) {
bulkManager.toggleCardSelection(card);
return;
}
this.clickHandler(this.recipe);
});
// Favorite button click event - prevent propagation to card
card.querySelector('.fa-star')?.addEventListener('click', (e) => {
e.stopPropagation();
this.toggleFavorite(card);
});
// Share button click event - prevent propagation to card
card.querySelector('.fa-share-alt')?.addEventListener('click', (e) => {
e.stopPropagation();
@@ -235,13 +335,19 @@ class RecipeCard {
}
// Create delete modal content
const previewUrl = this.recipe.file_url || '/loras_static/images/no-preview.png';
const isVideo = previewUrl.endsWith('.mp4') || previewUrl.endsWith('.webm');
const deleteModalContent = `
<div class="modal-content delete-modal-content">
<h2>Delete Recipe</h2>
<p class="delete-message">Are you sure you want to delete this recipe?</p>
<div class="delete-model-info">
<div class="delete-preview">
<img src="${this.recipe.file_url || '/loras_static/images/no-preview.png'}" alt="${this.recipe.title}">
${isVideo ?
`<video src="${previewUrl}" controls muted loop playsinline style="max-width: 100%;"></video>` :
`<img src="${previewUrl}" alt="${this.recipe.title}">`
}
</div>
<div class="delete-info">
<h3>${this.recipe.title}</h3>

View File

@@ -77,7 +77,9 @@ export class SidebarManager {
this.pageControls = pageControls;
this.pageType = pageControls.pageType;
this.lastPageControls = pageControls;
this.apiClient = getModelApiClient();
this.apiClient = pageControls?.getSidebarApiClient?.()
|| pageControls?.sidebarApiClient
|| getModelApiClient();
// Set initial sidebar state immediately (hidden by default)
this.setInitialSidebarState();
@@ -205,6 +207,10 @@ export class SidebarManager {
}
initializeDragAndDrop() {
if (this.apiClient?.apiConfig?.config?.supportsMove === false) {
return;
}
if (!this.dragHandlersInitialized) {
document.addEventListener('dragstart', this.handleCardDragStart);
document.addEventListener('dragend', this.handleCardDragEnd);
@@ -416,7 +422,14 @@ export class SidebarManager {
}
if (!this.apiClient) {
this.apiClient = getModelApiClient();
this.apiClient = this.pageControls?.getSidebarApiClient?.()
|| this.pageControls?.sidebarApiClient
|| getModelApiClient();
}
if (this.apiClient?.apiConfig?.config?.supportsMove === false) {
showToast('toast.models.moveFailed', { message: translate('sidebar.dragDrop.moveUnsupported', {}, 'Move not supported for this page') }, 'error');
return false;
}
const rootPath = this.draggedRootPath ? this.draggedRootPath.replace(/\\/g, '/') : '';
@@ -470,7 +483,9 @@ export class SidebarManager {
}
async init() {
this.apiClient = getModelApiClient();
this.apiClient = this.pageControls?.getSidebarApiClient?.()
|| this.pageControls?.sidebarApiClient
|| getModelApiClient();
// Set initial sidebar state immediately (hidden by default)
this.setInitialSidebarState();
@@ -1010,8 +1025,11 @@ export class SidebarManager {
}
async selectFolder(path) {
// Normalize path: null or undefined means root
const normalizedPath = (path === null || path === undefined) ? '' : path;
// Update selected path
this.selectedPath = path;
this.selectedPath = normalizedPath;
// Update UI
this.updateTreeSelection();
@@ -1019,8 +1037,8 @@ export class SidebarManager {
this.updateSidebarHeader();
// Update page state
this.pageControls.pageState.activeFolder = path;
setStorageItem(`${this.pageType}_activeFolder`, path);
this.pageControls.pageState.activeFolder = normalizedPath;
setStorageItem(`${this.pageType}_activeFolder`, normalizedPath);
// Reload models with new filter
await this.pageControls.resetAndReload();
@@ -1143,7 +1161,7 @@ export class SidebarManager {
});
// Add selection to current path
if (this.selectedPath !== null) {
if (this.selectedPath !== null && this.selectedPath !== undefined) {
const selectedItem = folderTree.querySelector(`[data-path="${this.selectedPath}"]`);
if (selectedItem) {
selectedItem.classList.add('selected');
@@ -1154,7 +1172,7 @@ export class SidebarManager {
node.classList.remove('selected');
});
if (this.selectedPath) {
if (this.selectedPath !== null && this.selectedPath !== undefined) {
const selectedNode = folderTree.querySelector(`[data-path="${this.selectedPath}"] .sidebar-tree-node-content`);
if (selectedNode) {
selectedNode.classList.add('selected');
@@ -1225,9 +1243,10 @@ export class SidebarManager {
// Start with root breadcrumb
const rootSiblings = Object.keys(this.treeData);
const isRootSelected = !this.selectedPath;
const breadcrumbs = [`
<div class="breadcrumb-dropdown">
<span class="sidebar-breadcrumb-item ${this.selectedPath == null ? 'active' : ''}" data-path="">
<span class="sidebar-breadcrumb-item ${isRootSelected ? 'active' : ''}" data-path="">
<i class="fas fa-home"></i> ${this.apiClient.apiConfig.config.displayName} root
</span>
</div>
@@ -1323,7 +1342,7 @@ export class SidebarManager {
const sidebarHeader = document.getElementById('sidebarHeader');
if (!sidebarHeader) return;
if (this.selectedPath == null) {
if (!this.selectedPath) {
sidebarHeader.classList.add('root-selected');
} else {
sidebarHeader.classList.remove('root-selected');

View File

@@ -506,7 +506,7 @@ export function createModelCard(model, modelType) {
// Check if autoplayOnHover is enabled for video previews
const autoplayOnHover = state.global?.settings?.autoplay_on_hover || false;
const isVideo = previewUrl.endsWith('.mp4');
const isVideo = previewUrl.endsWith('.mp4') || previewUrl.endsWith('.webm');
const videoAttrs = [
'controls',
'muted',

View File

@@ -313,7 +313,7 @@ export async function showModelModal(model, modelType) {
// Prepare LoRA specific data with complete civitai data
const escapedWords = (modelType === 'loras' || modelType === 'embeddings') && modelWithFullData.civitai?.trainedWords?.length ?
modelWithFullData.civitai.trainedWords.map(word => word.replace(/'/g, '\\\'')) : [];
modelWithFullData.civitai.trainedWords : [];
// Generate model type specific content
let typeSpecificContent;

View File

@@ -6,7 +6,7 @@
import { showToast, copyToClipboard } from '../../utils/uiHelpers.js';
import { translate } from '../../utils/i18nHelpers.js';
import { getModelApiClient } from '../../api/modelApiFactory.js';
import { escapeAttribute } from './utils.js';
import { escapeAttribute, escapeHtml } from './utils.js';
/**
* Fetch trained words for a model
@@ -80,8 +80,10 @@ function createSuggestionDropdown(trainedWords, classTokens, existingWords = [])
const tokenItem = document.createElement('div');
tokenItem.className = `metadata-suggestion-item class-token-item ${existingWords.includes(classTokens) ? 'already-added' : ''}`;
tokenItem.title = `${translate('modals.model.triggerWords.suggestions.classToken')}: ${classTokens}`;
const escapedToken = escapeHtml(classTokens);
tokenItem.innerHTML = `
<span class="metadata-suggestion-text">${classTokens}</span>
<span class="metadata-suggestion-text">${escapedToken}</span>
<div class="metadata-suggestion-meta">
<span class="token-badge">${translate('modals.model.triggerWords.suggestions.classToken')}</span>
${existingWords.includes(classTokens) ?
@@ -137,8 +139,10 @@ function createSuggestionDropdown(trainedWords, classTokens, existingWords = [])
const item = document.createElement('div');
item.className = `metadata-suggestion-item ${isAdded ? 'already-added' : ''}`;
item.title = word; // Show full word on hover if truncated
const escapedWord = escapeHtml(word);
item.innerHTML = `
<span class="metadata-suggestion-text">${word}</span>
<span class="metadata-suggestion-text">${escapedWord}</span>
<div class="metadata-suggestion-meta">
<span class="trained-word-freq">${frequency}</span>
${isAdded ? `<span class="added-indicator"><i class="fas fa-check"></i></span>` : ''}
@@ -215,9 +219,12 @@ export function renderTriggerWords(words, filePath) {
</div>
<div class="trigger-words-content">
<div class="trigger-words-tags">
${words.map(word => `
<div class="trigger-word-tag" data-word="${word}" onclick="copyTriggerWord('${word}')" title="${translate('modals.model.triggerWords.copyWord')}">
<span class="trigger-word-content">${word}</span>
${words.map(word => {
const escapedWord = escapeHtml(word);
const escapedAttr = escapeAttribute(word);
return `
<div class="trigger-word-tag" data-word="${escapedAttr}" onclick="copyTriggerWord(this.dataset.word)" title="${translate('modals.model.triggerWords.copyWord')}">
<span class="trigger-word-content">${escapedWord}</span>
<span class="trigger-word-copy">
<i class="fas fa-copy"></i>
</span>
@@ -225,7 +232,7 @@ export function renderTriggerWords(words, filePath) {
<i class="fas fa-times"></i>
</button>
</div>
`).join('')}
`}).join('')}
</div>
</div>
<div class="metadata-edit-controls" style="display:none;">
@@ -417,7 +424,7 @@ function resetTriggerWordsUIState(section) {
const deleteBtn = tag.querySelector('.metadata-delete-btn');
// Restore click-to-copy functionality
tag.onclick = () => copyTriggerWord(word);
tag.onclick = () => copyTriggerWord(tag.dataset.word);
// Show copy icon, hide delete button
if (copyIcon) copyIcon.style.display = '';
@@ -454,9 +461,11 @@ function restoreOriginalTriggerWords(section, originalWords) {
const tag = document.createElement('div');
tag.className = 'trigger-word-tag';
tag.dataset.word = word;
tag.onclick = () => copyTriggerWord(word);
tag.onclick = () => copyTriggerWord(tag.dataset.word);
const escapedWord = escapeHtml(word);
tag.innerHTML = `
<span class="trigger-word-content">${word}</span>
<span class="trigger-word-content">${escapedWord}</span>
<span class="trigger-word-copy">
<i class="fas fa-copy"></i>
</span>
@@ -524,8 +533,10 @@ function addNewTriggerWord(word) {
const newTag = document.createElement('div');
newTag.className = 'trigger-word-tag';
newTag.dataset.word = word;
const escapedWord = escapeHtml(word);
newTag.innerHTML = `
<span class="trigger-word-content">${word}</span>
<span class="trigger-word-content">${escapedWord}</span>
<span class="trigger-word-copy" style="display:none;">
<i class="fas fa-copy"></i>
</span>

View File

@@ -60,14 +60,12 @@ export class AppCore {
initTheme();
initBackToTop();
// Initialize the bulk manager and context menu only if not on recipes page
if (state.currentPageType !== 'recipes') {
// Initialize the bulk manager and context menu
bulkManager.initialize();
// Initialize bulk context menu
const bulkContextMenu = new BulkContextMenu();
bulkManager.setBulkContextMenu(bulkContextMenu);
}
// Initialize the example images manager
exampleImagesManager.initialize();
@@ -84,10 +82,7 @@ export class AppCore {
// Start onboarding if needed (after everything is initialized)
setTimeout(() => {
// Do not show onboarding if version-mismatch banner is visible
if (!bannerService.isBannerVisible('version-mismatch')) {
onboardingManager.start();
}
}, 1000); // Small delay to ensure all elements are rendered
// Return the core instance for chaining

View File

@@ -17,7 +17,7 @@ const AFDIAN_URL = 'https://afdian.com/a/pixelpawsai';
const BANNER_HISTORY_KEY = 'banner_history';
const BANNER_HISTORY_VIEWED_AT_KEY = 'banner_history_viewed_at';
const BANNER_HISTORY_LIMIT = 20;
const HISTORY_EXCLUDED_IDS = new Set(['version-mismatch']);
const HISTORY_EXCLUDED_IDS = new Set([]);
/**
* Banner Service for managing notification banners

View File

@@ -3,6 +3,7 @@ import { showToast, copyToClipboard, sendLoraToWorkflow, buildLoraSyntax, getNSF
import { updateCardsForBulkMode } from '../components/shared/ModelCard.js';
import { modalManager } from './ModalManager.js';
import { getModelApiClient, resetAndReload } from '../api/modelApiFactory.js';
import { RecipeSidebarApiClient } from '../api/recipeApi.js';
import { MODEL_TYPES, MODEL_CONFIG } from '../api/apiConfig.js';
import { BASE_MODEL_CATEGORIES } from '../utils/constants.js';
import { getPriorityTagSuggestions } from '../utils/priorityTagHelpers.js';
@@ -62,9 +63,22 @@ export class BulkManager {
autoOrganize: true,
deleteAll: true,
setContentRating: true
},
recipes: {
addTags: false,
sendToWorkflow: false,
copyAll: false,
refreshAll: false,
checkUpdates: false,
moveAll: true,
autoOrganize: false,
deleteAll: true,
setContentRating: false
}
};
this.recipeApiClient = null;
window.addEventListener('lm:priority-tags-updated', () => {
const container = document.querySelector('#bulkAddTagsModal .metadata-suggestions-container');
if (!container) {
@@ -87,9 +101,6 @@ export class BulkManager {
}
initialize() {
// Do not initialize on recipes page
if (state.currentPageType === 'recipes') return;
// Register with event manager for coordinated event handling
this.registerEventHandlers();
@@ -97,6 +108,23 @@ export class BulkManager {
eventManager.setState('bulkMode', state.bulkMode || false);
}
getActiveApiClient() {
if (state.currentPageType === 'recipes') {
if (!this.recipeApiClient) {
this.recipeApiClient = new RecipeSidebarApiClient();
}
return this.recipeApiClient;
}
return getModelApiClient();
}
getCurrentDisplayConfig() {
if (state.currentPageType === 'recipes') {
return { displayName: 'Recipe' };
}
return MODEL_CONFIG[state.currentPageType] || { displayName: 'Model' };
}
setBulkContextMenu(bulkContextMenu) {
this.bulkContextMenu = bulkContextMenu;
}
@@ -240,7 +268,9 @@ export class BulkManager {
// Update event manager state
eventManager.setState('bulkMode', state.bulkMode);
if (this.bulkBtn) {
this.bulkBtn.classList.toggle('active', state.bulkMode);
}
updateCardsForBulkMode(state.bulkMode);
@@ -504,13 +534,13 @@ export class BulkManager {
modalManager.closeModal('bulkDeleteModal');
try {
const apiClient = getModelApiClient();
const apiClient = this.getActiveApiClient();
const filePaths = Array.from(state.selectedModels);
const result = await apiClient.bulkDeleteModels(filePaths);
if (result.success) {
const currentConfig = MODEL_CONFIG[state.currentPageType];
const currentConfig = this.getCurrentDisplayConfig();
showToast('toast.models.deletedSuccessfully', {
count: result.deleted_count,
type: currentConfig.displayName.toLowerCase()
@@ -570,7 +600,7 @@ export class BulkManager {
this.applySelectionState();
const newlySelected = state.selectedModels.size - oldCount;
const currentConfig = MODEL_CONFIG[state.currentPageType];
const currentConfig = this.getCurrentDisplayConfig();
showToast('toast.models.selectedAdditional', {
count: newlySelected,
type: currentConfig.displayName.toLowerCase()
@@ -622,8 +652,7 @@ export class BulkManager {
return;
}
const currentType = state.currentPageType;
const currentConfig = MODEL_CONFIG[currentType] || MODEL_CONFIG[MODEL_TYPES.LORA];
const currentConfig = this.getCurrentDisplayConfig();
const typeLabel = (currentConfig?.displayName || 'Model').toLowerCase();
const { ids: modelIds, missingCount } = this.collectSelectedModelIds();
@@ -969,7 +998,7 @@ export class BulkManager {
modalManager.closeModal('bulkAddTagsModal');
if (successCount > 0) {
const currentConfig = MODEL_CONFIG[state.currentPageType];
const currentConfig = this.getCurrentDisplayConfig();
const toastKey = mode === 'replace' ? 'toast.models.tagsReplacedSuccessfully' : 'toast.models.tagsAddedSuccessfully';
showToast(toastKey, {
count: successCount,

View File

@@ -3,6 +3,7 @@ import { showToast, updatePanelPositions } from '../utils/uiHelpers.js';
import { getModelApiClient } from '../api/modelApiFactory.js';
import { removeStorageItem, setStorageItem, getStorageItem } from '../utils/storageHelpers.js';
import { MODEL_TYPE_DISPLAY_NAMES } from '../utils/constants.js';
import { translate } from '../utils/i18nHelpers.js';
export class FilterManager {
constructor(options = {}) {
@@ -131,6 +132,28 @@ export class FilterManager {
this.applyTagElementState(tagEl, (this.filters.tags && this.filters.tags[tagName]) || 'none');
tagsContainer.appendChild(tagEl);
});
// Add "No tags" as a special filter at the end
const noTagsEl = document.createElement('div');
noTagsEl.className = 'filter-tag tag-filter special-tag';
const noTagsLabel = translate('header.filter.noTags', {}, 'No tags');
const noTagsKey = '__no_tags__';
noTagsEl.dataset.tag = noTagsKey;
noTagsEl.innerHTML = noTagsLabel;
noTagsEl.addEventListener('click', async () => {
const currentState = (this.filters.tags && this.filters.tags[noTagsKey]) || 'none';
const newState = this.getNextTriStateState(currentState);
this.setTagFilterState(noTagsKey, newState);
this.applyTagElementState(noTagsEl, newState);
this.updateActiveFiltersCount();
await this.applyFilters(false);
});
this.applyTagElementState(noTagsEl, (this.filters.tags && this.filters.tags[noTagsKey]) || 'none');
tagsContainer.appendChild(noTagsEl);
}
initializeLicenseFilters() {

View File

@@ -3,6 +3,7 @@ import { state, getCurrentPageState } from '../state/index.js';
import { modalManager } from './ModalManager.js';
import { bulkManager } from './BulkManager.js';
import { getModelApiClient } from '../api/modelApiFactory.js';
import { RecipeSidebarApiClient } from '../api/recipeApi.js';
import { FolderTreeManager } from '../components/FolderTreeManager.js';
import { sidebarManager } from '../components/SidebarManager.js';
@@ -12,11 +13,22 @@ class MoveManager {
this.bulkFilePaths = null;
this.folderTreeManager = new FolderTreeManager();
this.initialized = false;
this.recipeApiClient = null;
// Bind methods
this.updateTargetPath = this.updateTargetPath.bind(this);
}
_getApiClient(modelType = null) {
if (state.currentPageType === 'recipes') {
if (!this.recipeApiClient) {
this.recipeApiClient = new RecipeSidebarApiClient();
}
return this.recipeApiClient;
}
return getModelApiClient(modelType);
}
initializeEventListeners() {
if (this.initialized) return;
@@ -36,7 +48,7 @@ class MoveManager {
this.currentFilePath = null;
this.bulkFilePaths = null;
const apiClient = getModelApiClient();
const apiClient = this._getApiClient(modelType);
const currentPageType = state.currentPageType;
const modelConfig = apiClient.apiConfig.config;
@@ -121,7 +133,7 @@ class MoveManager {
async initializeFolderTree() {
try {
const apiClient = getModelApiClient();
const apiClient = this._getApiClient();
// Fetch unified folder tree
const treeData = await apiClient.fetchUnifiedFolderTree();
@@ -141,7 +153,7 @@ class MoveManager {
updateTargetPath() {
const pathDisplay = document.getElementById('moveTargetPathDisplay');
const modelRoot = document.getElementById('moveModelRoot').value;
const apiClient = getModelApiClient();
const apiClient = this._getApiClient();
const config = apiClient.apiConfig.config;
let fullPath = modelRoot || `Select a ${config.displayName.toLowerCase()} root directory`;
@@ -158,7 +170,7 @@ class MoveManager {
async moveModel() {
const selectedRoot = document.getElementById('moveModelRoot').value;
const apiClient = getModelApiClient();
const apiClient = this._getApiClient();
const config = apiClient.apiConfig.config;
if (!selectedRoot) {

View File

@@ -4,8 +4,7 @@ import {
setStorageItem,
getStoredVersionInfo,
setStoredVersionInfo,
isVersionMatch,
resetDismissedBanner
isVersionMatch
} from '../utils/storageHelpers.js';
import { bannerService } from './BannerService.js';
import { translate } from '../utils/i18nHelpers.js';
@@ -753,94 +752,14 @@ export class UpdateService {
stored: getStoredVersionInfo()
});
// Reset dismissed status for version mismatch banner
resetDismissedBanner('version-mismatch');
// Register and show the version mismatch banner
this.registerVersionMismatchBanner();
// Silently update stored version info as cache busting handles the resource updates
setStoredVersionInfo(this.currentVersionInfo);
}
}
} catch (error) {
console.error('Failed to check version info:', error);
}
}
registerVersionMismatchBanner() {
// Get stored and current version for display
const storedVersion = getStoredVersionInfo() || translate('common.status.unknown');
const currentVersion = this.currentVersionInfo || translate('common.status.unknown');
bannerService.registerBanner('version-mismatch', {
id: 'version-mismatch',
title: translate('banners.versionMismatch.title', {}, 'Application Update Detected'),
content: translate('banners.versionMismatch.content', {
storedVersion,
currentVersion
}, `Your browser is running an outdated version of LoRA Manager (${storedVersion}). The server has been updated to version ${currentVersion}. Please refresh to ensure proper functionality.`),
actions: [
{
text: translate('banners.versionMismatch.refreshNow', {}, 'Refresh Now'),
icon: 'fas fa-sync',
action: 'hardRefresh',
type: 'primary'
}
],
dismissible: false,
priority: 10,
countdown: 15,
onRegister: (bannerElement) => {
// Add countdown element
const countdownEl = document.createElement('div');
countdownEl.className = 'banner-countdown';
countdownEl.innerHTML = `<span>${translate('banners.versionMismatch.refreshingIn', {}, 'Refreshing in')} <strong>15</strong> ${translate('banners.versionMismatch.seconds', {}, 'seconds')}...</span>`;
bannerElement.querySelector('.banner-content').appendChild(countdownEl);
// Start countdown
let seconds = 15;
const countdownInterval = setInterval(() => {
seconds--;
const strongEl = countdownEl.querySelector('strong');
if (strongEl) strongEl.textContent = seconds;
if (seconds <= 0) {
clearInterval(countdownInterval);
this.performHardRefresh();
}
}, 1000);
// Store interval ID for cleanup
bannerElement.dataset.countdownInterval = countdownInterval;
// Add action button event handler
const actionBtn = bannerElement.querySelector('.banner-action[data-action="hardRefresh"]');
if (actionBtn) {
actionBtn.addEventListener('click', (e) => {
e.preventDefault();
clearInterval(countdownInterval);
this.performHardRefresh();
});
}
},
onRemove: (bannerElement) => {
// Clear any existing interval
const intervalId = bannerElement.dataset.countdownInterval;
if (intervalId) {
clearInterval(parseInt(intervalId));
}
}
});
}
performHardRefresh() {
// Update stored version info before refreshing
setStoredVersionInfo(this.currentVersionInfo);
// Force a hard refresh by adding cache-busting parameter
const cacheBuster = new Date().getTime();
window.location.href = window.location.pathname +
(window.location.search ? window.location.search + '&' : '?') +
`cache=${cacheBuster}`;
}
}
// Create and export singleton instance

View File

@@ -66,6 +66,10 @@ export class DownloadManager {
completeMetadata.checkpoint = checkpointMetadata;
}
if (this.importManager.recipeData && this.importManager.recipeData.extension) {
formData.append('extension', this.importManager.recipeData.extension);
}
// Add source_path to metadata to track where the recipe was imported from
if (this.importManager.importMode === 'url') {
const urlInput = document.getElementById('imageUrlInput');

View File

@@ -67,16 +67,27 @@ export class RecipeDataManager {
};
reader.readAsDataURL(this.importManager.recipeImage);
} else if (this.importManager.recipeData && this.importManager.recipeData.image_base64) {
// For URL mode - use the base64 image data returned from the backend
// For URL mode - use the base64 data returned from the backend
if (this.importManager.recipeData.is_video) {
const mimeType = this.importManager.recipeData.extension === '.webm' ? 'video/webm' : 'video/mp4';
imagePreview.innerHTML = `<video src="data:${mimeType};base64,${this.importManager.recipeData.image_base64}" controls autoplay loop muted class="recipe-preview-video"></video>`;
} else {
imagePreview.innerHTML = `<img src="data:image/jpeg;base64,${this.importManager.recipeData.image_base64}" alt="${translate('recipes.controls.import.recipePreviewAlt', {}, 'Recipe preview')}">`;
}
} else if (this.importManager.importMode === 'url') {
// Fallback for URL mode if no base64 data
const urlInput = document.getElementById('imageUrlInput');
if (urlInput && urlInput.value) {
const url = urlInput.value.toLowerCase();
if (url.endsWith('.mp4') || url.endsWith('.webm')) {
const mimeType = url.endsWith('.webm') ? 'video/webm' : 'video/mp4';
imagePreview.innerHTML = `<video src="${urlInput.value}" controls autoplay loop muted class="recipe-preview-video"></video>`;
} else {
imagePreview.innerHTML = `<img src="${urlInput.value}" alt="${translate('recipes.controls.import.recipePreviewAlt', {}, 'Recipe preview')}" crossorigin="anonymous">`;
}
}
}
}
// Update LoRA count information
const totalLoras = this.importManager.recipeData.loras.length;

View File

@@ -2,18 +2,47 @@
import { appCore } from './core.js';
import { ImportManager } from './managers/ImportManager.js';
import { RecipeModal } from './components/RecipeModal.js';
import { getCurrentPageState } from './state/index.js';
import { state, getCurrentPageState } from './state/index.js';
import { getSessionItem, removeSessionItem } from './utils/storageHelpers.js';
import { RecipeContextMenu } from './components/ContextMenu/index.js';
import { DuplicatesManager } from './components/DuplicatesManager.js';
import { refreshVirtualScroll } from './utils/infiniteScroll.js';
import { refreshRecipes } from './api/recipeApi.js';
import { refreshRecipes, RecipeSidebarApiClient } from './api/recipeApi.js';
import { sidebarManager } from './components/SidebarManager.js';
class RecipePageControls {
constructor() {
this.pageType = 'recipes';
this.pageState = getCurrentPageState();
this.sidebarApiClient = new RecipeSidebarApiClient();
}
async resetAndReload() {
refreshVirtualScroll();
}
async refreshModels(fullRebuild = false) {
if (fullRebuild) {
await refreshRecipes();
return;
}
refreshVirtualScroll();
}
getSidebarApiClient() {
return this.sidebarApiClient;
}
}
class RecipeManager {
constructor() {
// Get page state
this.pageState = getCurrentPageState();
// Page controls for shared sidebar behaviors
this.pageControls = new RecipePageControls();
// Initialize ImportManager
this.importManager = new ImportManager();
@@ -52,10 +81,23 @@ class RecipeManager {
// Expose necessary functions to the page
this._exposeGlobalFunctions();
// Initialize sidebar navigation
await this._initSidebar();
// Initialize common page features
appCore.initializePageFeatures();
}
async _initSidebar() {
try {
sidebarManager.setHostPageControls(this.pageControls);
const shouldShowSidebar = state?.global?.settings?.show_folder_sidebar !== false;
await sidebarManager.setSidebarEnabled(shouldShowSidebar);
} catch (error) {
console.error('Failed to initialize recipe sidebar:', error);
}
}
_initSearchOptions() {
// Ensure recipes search options are properly initialized
if (!this.pageState.searchOptions) {
@@ -63,7 +105,9 @@ class RecipeManager {
title: true, // Recipe title
tags: true, // Recipe tags
loraName: true, // LoRA file name
loraModel: true // LoRA model name
loraModel: true, // LoRA model name
prompt: true, // Prompt search
recursive: true
};
}
}
@@ -172,11 +216,26 @@ class RecipeManager {
// Sort select
const sortSelect = document.getElementById('sortSelect');
if (sortSelect) {
sortSelect.value = this.pageState.sortBy || 'date:desc';
sortSelect.addEventListener('change', () => {
this.pageState.sortBy = sortSelect.value;
refreshVirtualScroll();
});
}
const bulkButton = document.querySelector('[data-action="bulk"]');
if (bulkButton) {
bulkButton.addEventListener('click', () => window.bulkManager?.toggleBulkMode());
}
const favoriteFilterBtn = document.getElementById('favoriteFilterBtn');
if (favoriteFilterBtn) {
favoriteFilterBtn.addEventListener('click', () => {
this.pageState.showFavoritesOnly = !this.pageState.showFavoritesOnly;
favoriteFilterBtn.classList.toggle('active', this.pageState.showFavoritesOnly);
refreshVirtualScroll();
});
}
}
// This method is kept for compatibility but now uses virtual scrolling

View File

@@ -95,13 +95,15 @@ export const state = {
currentPage: 1,
isLoading: false,
hasMore: true,
sortBy: 'date',
sortBy: 'date:desc',
activeFolder: getStorageItem('recipes_activeFolder'),
searchManager: null,
searchOptions: {
title: true,
tags: true,
loraName: true,
loraModel: true
loraModel: true,
recursive: getStorageItem('recipes_recursiveSearch', true),
},
filters: {
baseModel: [],

View File

@@ -783,12 +783,13 @@ export class VirtualScroller {
deepMerge(target, source) {
if (!source || !target) return target;
// Initialize result with a copy of target
const result = { ...target };
// Only iterate over keys that exist in target
Object.keys(target).forEach(key => {
// Check if source has this key
if (source.hasOwnProperty(key)) {
if (!source) return result;
// Iterate over all keys in the source object
Object.keys(source).forEach(key => {
const targetValue = target[key];
const sourceValue = source[key];
@@ -801,13 +802,11 @@ export class VirtualScroller {
typeof sourceValue === 'object' &&
!Array.isArray(sourceValue)
) {
result[key] = this.deepMerge(targetValue, sourceValue);
result[key] = this.deepMerge(targetValue || {}, sourceValue);
} else {
// For primitive types, arrays, or null, use the value from source
// Otherwise update with source value (includes primitives, arrays, and new keys)
result[key] = sourceValue;
}
}
// If source does not have this key, keep the original value from target
});
return result;

View File

@@ -4,8 +4,8 @@
<head>
<title>{% block title %}{{ t('header.appTitle') }}{% endblock %}</title>
<meta name="viewport" content="width=device-width, initial-scale=1">
<link rel="stylesheet" href="/loras_static/css/style.css">
<link rel="stylesheet" href="/loras_static/css/onboarding.css">
<link rel="stylesheet" href="/loras_static/css/style.css?v={{ version }}">
<link rel="stylesheet" href="/loras_static/css/onboarding.css?v={{ version }}">
<link rel="stylesheet" href="/loras_static/vendor/flag-icons/flag-icons.min.css">
{% block page_css %}{% endblock %}
<link rel="stylesheet" href="/loras_static/vendor/font-awesome/css/all.min.css"
@@ -98,7 +98,7 @@
{% if is_initializing %}
<!-- Load initialization JavaScript -->
<script type="module" src="/loras_static/js/components/initialization.js"></script>
<script type="module" src="/loras_static/js/components/initialization.js?v={{ version }}"></script>
{% else %}
{% block main_script %}{% endblock %}
{% endif %}

View File

@@ -40,5 +40,5 @@
{% endblock %}
{% block main_script %}
<script type="module" src="/loras_static/js/checkpoints.js"></script>
<script type="module" src="/loras_static/js/checkpoints.js?v={{ version }}"></script>
{% endblock %}

View File

@@ -102,6 +102,9 @@
<div class="context-menu-item" data-action="cleanup-example-images-folders">
<i class="fas fa-trash-restore"></i> <span>{{ t('globalContextMenu.cleanupExampleImages.label') }}</span>
</div>
<div class="context-menu-item" data-action="repair-recipes">
<i class="fas fa-tools"></i> <span>{{ t('globalContextMenu.repairRecipes.label') }}</span>
</div>
</div>
<div id="nsfwLevelSelector" class="nsfw-level-selector">
@@ -110,7 +113,8 @@
<button class="close-nsfw-selector"><i class="fas fa-times"></i></button>
</div>
<div class="nsfw-level-content">
<div class="current-level"><span>{{ t('modals.contentRating.current') }}:</span> <span id="currentNSFWLevel">{{ t('common.status.unknown') }}</span></div>
<div class="current-level"><span>{{ t('modals.contentRating.current') }}:</span> <span id="currentNSFWLevel">{{
t('common.status.unknown') }}</span></div>
<div class="nsfw-level-options">
<button class="nsfw-level-btn" data-level="1">{{ t('modals.contentRating.levels.pg') }}</button>
<button class="nsfw-level-btn" data-level="2">{{ t('modals.contentRating.levels.pg13') }}</button>

View File

@@ -19,22 +19,27 @@
{% set current_page = 'loras' %}
{% endif %}
{% set search_disabled = current_page == 'statistics' %}
{% set search_placeholder_key = 'header.search.notAvailable' if search_disabled else 'header.search.placeholders.' ~ current_page %}
{% set search_placeholder_key = 'header.search.notAvailable' if search_disabled else 'header.search.placeholders.' ~
current_page %}
{% set header_search_class = 'header-search disabled' if search_disabled else 'header-search' %}
<nav class="main-nav">
<a href="/loras" class="nav-item{% if current_path == '/loras' %} active{% endif %}" id="lorasNavItem">
<i class="fas fa-layer-group"></i> <span>{{ t('header.navigation.loras') }}</span>
</a>
<a href="/loras/recipes" class="nav-item{% if current_path.startswith('/loras/recipes') %} active{% endif %}" id="recipesNavItem">
<a href="/loras/recipes" class="nav-item{% if current_path.startswith('/loras/recipes') %} active{% endif %}"
id="recipesNavItem">
<i class="fas fa-book-open"></i> <span>{{ t('header.navigation.recipes') }}</span>
</a>
<a href="/checkpoints" class="nav-item{% if current_path.startswith('/checkpoints') %} active{% endif %}" id="checkpointsNavItem">
<a href="/checkpoints" class="nav-item{% if current_path.startswith('/checkpoints') %} active{% endif %}"
id="checkpointsNavItem">
<i class="fas fa-check-circle"></i> <span>{{ t('header.navigation.checkpoints') }}</span>
</a>
<a href="/embeddings" class="nav-item{% if current_path.startswith('/embeddings') %} active{% endif %}" id="embeddingsNavItem">
<a href="/embeddings" class="nav-item{% if current_path.startswith('/embeddings') %} active{% endif %}"
id="embeddingsNavItem">
<i class="fas fa-code"></i> <span>{{ t('header.navigation.embeddings') }}</span>
</a>
<a href="/statistics" class="nav-item{% if current_path.startswith('/statistics') %} active{% endif %}" id="statisticsNavItem">
<a href="/statistics" class="nav-item{% if current_path.startswith('/statistics') %} active{% endif %}"
id="statisticsNavItem">
<i class="fas fa-chart-bar"></i> <span>{{ t('header.navigation.statistics') }}</span>
</a>
</nav>
@@ -42,12 +47,15 @@
<!-- Context-aware search container -->
<div class="{{ header_search_class }}" id="headerSearch">
<div class="search-container">
<input type="text" id="searchInput" placeholder="{{ t(search_placeholder_key) }}"{% if search_disabled %} disabled{% endif %} />
<input type="text" id="searchInput" placeholder="{{ t(search_placeholder_key) }}" {% if search_disabled %}
disabled{% endif %} />
<i class="fas fa-search search-icon"></i>
<button class="search-options-toggle" id="searchOptionsToggle" title="{{ t('header.search.options') }}"{% if search_disabled %} disabled aria-disabled="true"{% endif %}>
<button class="search-options-toggle" id="searchOptionsToggle" title="{{ t('header.search.options') }}" {% if
search_disabled %} disabled aria-disabled="true" {% endif %}>
<i class="fas fa-sliders-h"></i>
</button>
<button class="search-filter-toggle" id="filterButton" title="{{ t('header.filter.title') }}"{% if search_disabled %} disabled aria-disabled="true"{% endif %}>
<button class="search-filter-toggle" id="filterButton" title="{{ t('header.filter.title') }}" {% if
search_disabled %} disabled aria-disabled="true" {% endif %}>
<i class="fas fa-filter"></i>
<span class="filter-badge" id="activeFiltersCount" style="display: none">0</span>
</button>
@@ -97,6 +105,7 @@
<div class="search-option-tag active" data-option="tags">{{ t('header.search.filters.tags') }}</div>
<div class="search-option-tag active" data-option="loraName">{{ t('header.search.filters.loraName') }}</div>
<div class="search-option-tag active" data-option="loraModel">{{ t('header.search.filters.loraModel') }}</div>
<div class="search-option-tag active" data-option="prompt">{{ t('header.search.filters.prompt') }}</div>
{% elif request.path == '/checkpoints' %}
<div class="search-option-tag active" data-option="filename">{{ t('header.search.filters.filename') }}</div>
<div class="search-option-tag active" data-option="modelname">{{ t('header.search.filters.modelname') }}</div>

View File

@@ -40,5 +40,5 @@
{% endblock %}
{% block main_script %}
<script type="module" src="/loras_static/js/embeddings.js"></script>
<script type="module" src="/loras_static/js/embeddings.js?v={{ version }}"></script>
{% endblock %}

View File

@@ -24,6 +24,6 @@
{% block main_script %}
{% if not is_initializing %}
<script type="module" src="/loras_static/js/loras.js"></script>
<script type="module" src="/loras_static/js/loras.js?v={{ version }}"></script>
{% endif %}
{% endblock %}

View File

@@ -4,9 +4,9 @@
{% block page_id %}recipes{% endblock %}
{% block page_css %}
<link rel="stylesheet" href="/loras_static/css/components/card.css">
<link rel="stylesheet" href="/loras_static/css/components/recipe-modal.css">
<link rel="stylesheet" href="/loras_static/css/components/import-modal.css">
<link rel="stylesheet" href="/loras_static/css/components/card.css?v={{ version }}">
<link rel="stylesheet" href="/loras_static/css/components/recipe-modal.css?v={{ version }}">
<link rel="stylesheet" href="/loras_static/css/components/import-modal.css?v={{ version }}">
{% endblock %}
{% block additional_components %}
@@ -15,17 +15,29 @@
<div id="recipeContextMenu" class="context-menu" style="display: none;">
<!-- <div class="context-menu-item" data-action="details"><i class="fas fa-info-circle"></i> View Details</div> -->
<div class="context-menu-item" data-action="share"><i class="fas fa-share-alt"></i> {{ t('loras.contextMenu.shareRecipe') }}</div>
<div class="context-menu-item" data-action="copy"><i class="fas fa-copy"></i> {{ t('loras.contextMenu.copyRecipeSyntax') }}</div>
<div class="context-menu-item" data-action="sendappend"><i class="fas fa-paper-plane"></i> {{ t('loras.contextMenu.sendToWorkflowAppend') }}</div>
<div class="context-menu-item" data-action="sendreplace"><i class="fas fa-exchange-alt"></i> {{ t('loras.contextMenu.sendToWorkflowReplace') }}</div>
<div class="context-menu-item" data-action="viewloras"><i class="fas fa-layer-group"></i> {{ t('loras.contextMenu.viewAllLoras') }}</div>
<div class="context-menu-item download-missing-item" data-action="download-missing"><i class="fas fa-download"></i> {{ t('loras.contextMenu.downloadMissingLoras') }}</div>
<div class="context-menu-item" data-action="share"><i class="fas fa-share-alt"></i> {{
t('loras.contextMenu.shareRecipe') }}</div>
<div class="context-menu-item" data-action="copy"><i class="fas fa-copy"></i> {{
t('loras.contextMenu.copyRecipeSyntax') }}</div>
<div class="context-menu-item" data-action="sendappend"><i class="fas fa-paper-plane"></i> {{
t('loras.contextMenu.sendToWorkflowAppend') }}</div>
<div class="context-menu-item" data-action="sendreplace"><i class="fas fa-exchange-alt"></i> {{
t('loras.contextMenu.sendToWorkflowReplace') }}</div>
<div class="context-menu-item" data-action="viewloras"><i class="fas fa-layer-group"></i> {{
t('loras.contextMenu.viewAllLoras') }}</div>
<div class="context-menu-item download-missing-item" data-action="download-missing"><i class="fas fa-download"></i>
{{ t('loras.contextMenu.downloadMissingLoras') }}</div>
<div class="context-menu-item" data-action="set-nsfw">
<i class="fas fa-exclamation-triangle"></i> {{ t('loras.contextMenu.setContentRating') }}
</div>
<div class="context-menu-item" data-action="repair">
<i class="fas fa-tools"></i> {{ t('loras.contextMenu.repairMetadata') }}
</div>
<div class="context-menu-separator"></div>
<div class="context-menu-item delete-item" data-action="delete"><i class="fas fa-trash"></i> {{ t('loras.contextMenu.deleteRecipe') }}</div>
<div class="context-menu-item" data-action="move"><i class="fas fa-folder-open"></i> {{
t('loras.contextMenu.moveToFolder') }}</div>
<div class="context-menu-item delete-item" data-action="delete"><i class="fas fa-trash"></i> {{
t('loras.contextMenu.deleteRecipe') }}</div>
</div>
{% endblock %}
@@ -36,25 +48,95 @@
{% block content %}
<!-- Recipe controls -->
<div class="controls">
<div class="actions">
<div class="action-buttons">
<div class="control-group">
<select id="sortSelect" title="{{ t('recipes.controls.sort.title') }}">
<optgroup label="{{ t('recipes.controls.sort.name') }}">
<option value="name:asc">{{ t('recipes.controls.sort.nameAsc') }}</option>
<option value="name:desc">{{ t('recipes.controls.sort.nameDesc') }}</option>
</optgroup>
<optgroup label="{{ t('recipes.controls.sort.date') }}">
<option value="date:desc">{{ t('recipes.controls.sort.dateDesc') }}</option>
<option value="date:asc">{{ t('recipes.controls.sort.dateAsc') }}</option>
</optgroup>
<optgroup label="{{ t('recipes.controls.sort.lorasCount') }}">
<option value="loras_count:desc">{{ t('recipes.controls.sort.lorasCountDesc') }}</option>
<option value="loras_count:asc">{{ t('recipes.controls.sort.lorasCountAsc') }}</option>
</optgroup>
</select>
</div>
<div title="{{ t('recipes.controls.refresh.title') }}" class="control-group">
<button onclick="recipeManager.refreshRecipes()"><i class="fas fa-sync"></i> {{ t('common.actions.refresh') }}</button>
<button onclick="recipeManager.refreshRecipes()"><i class="fas fa-sync"></i> {{
t('common.actions.refresh')
}}</button>
</div>
<div title="{{ t('recipes.controls.import.title') }}" class="control-group">
<button onclick="importManager.showImportModal()"><i class="fas fa-file-import"></i> {{ t('recipes.controls.import.action') }}</button>
<button onclick="importManager.showImportModal()"><i class="fas fa-file-import"></i> {{
t('recipes.controls.import.action') }}</button>
</div>
<div class="control-group" title="{{ t('loras.controls.bulk.title') }}">
<button id="bulkOperationsBtn" data-action="bulk" title="{{ t('loras.controls.bulk.title') }}">
<i class="fas fa-th-large"></i> <span><span>{{ t('loras.controls.bulk.action') }}</span>
<div class="shortcut-key">B</div>
</span>
</button>
</div>
<!-- Add duplicate detection button -->
<div title="{{ t('loras.controls.duplicates.title') }}" class="control-group">
<button onclick="recipeManager.findDuplicateRecipes()"><i class="fas fa-clone"></i> {{ t('loras.controls.duplicates.action') }}</button>
<button onclick="recipeManager.findDuplicateRecipes()"><i class="fas fa-clone"></i> {{
t('loras.controls.duplicates.action') }}</button>
</div>
<div class="control-group">
<button id="favoriteFilterBtn" data-action="toggle-favorites" class="favorite-filter"
title="{{ t('recipes.controls.favorites.title') }}">
<i class="fas fa-star"></i> <span>{{ t('recipes.controls.favorites.action') }}</span>
</button>
</div>
<!-- Custom filter indicator button (hidden by default) -->
<div id="customFilterIndicator" class="control-group hidden">
<div class="filter-active">
<i class="fas fa-filter"></i> <span id="customFilterText">{{ t('recipes.controls.filteredByLora') }}</span>
<i class="fas fa-filter"></i> <span id="customFilterText">{{ t('recipes.controls.filteredByLora')
}}</span>
<i class="fas fa-times-circle clear-filter"></i>
</div>
</div>
</div>
<div class="controls-right">
<div class="keyboard-nav-hint tooltip">
<i class="fas fa-keyboard"></i>
<span class="tooltiptext">
<span>{{ t('keyboard.navigation') }}</span>
<table class="keyboard-shortcuts">
<tr>
<td><span class="key">Page Up</span></td>
<td>{{ t('keyboard.shortcuts.pageUp') }}</td>
</tr>
<tr>
<td><span class="key">Page Down</span></td>
<td>{{ t('keyboard.shortcuts.pageDown') }}</td>
</tr>
<tr>
<td><span class="key">Home</span></td>
<td>{{ t('keyboard.shortcuts.home') }}</td>
</tr>
<tr>
<td><span class="key">End</span></td>
<td>{{ t('keyboard.shortcuts.end') }}</td>
</tr>
</table>
</span>
</div>
</div>
</div>
<!-- Breadcrumb Navigation -->
<div id="breadcrumbContainer" class="sidebar-breadcrumb-container">
<nav class="sidebar-breadcrumb-nav" id="sidebarBreadcrumbNav">
<!-- Breadcrumbs will be populated by JavaScript -->
</nav>
</div>
</div>
<!-- Duplicates banner (hidden by default) -->
@@ -76,6 +158,8 @@
</div>
</div>
{% include 'components/folder_sidebar.html' %}
<!-- Recipe grid -->
<div class="card-grid" id="recipeGrid">
<!-- Remove the server-side conditional rendering and placeholder -->
@@ -83,6 +167,10 @@
</div>
{% endblock %}
{% block main_script %}
<script type="module" src="/loras_static/js/recipes.js"></script>
{% block overlay %}
<div class="bulk-mode-overlay"></div>
{% endblock %}
{% block main_script %}
<script type="module" src="/loras_static/js/recipes.js?v={{ version }}"></script>
{% endblock %}

View File

@@ -192,6 +192,6 @@
{% block main_script %}
{% if not is_initializing %}
<script type="module" src="/loras_static/js/statistics.js"></script>
<script type="module" src="/loras_static/js/statistics.js?v={{ version }}"></script>
{% endif %}
{% endblock %}

View File

@@ -1,3 +1,4 @@
import json
import os
import pytest
@@ -62,6 +63,7 @@ def test_symlink_scan_skips_file_links(monkeypatch: pytest.MonkeyPatch, tmp_path
def test_symlink_cache_reuses_previous_scan(monkeypatch: pytest.MonkeyPatch, tmp_path):
loras_dir, settings_dir = _setup_paths(monkeypatch, tmp_path)
monkeypatch.setattr(config_module.Config, "_schedule_symlink_rescan", lambda self: None)
target_dir = loras_dir / "target"
target_dir.mkdir()
@@ -85,6 +87,7 @@ def test_symlink_cache_reuses_previous_scan(monkeypatch: pytest.MonkeyPatch, tmp
def test_symlink_cache_survives_noise_mtime(monkeypatch: pytest.MonkeyPatch, tmp_path):
loras_dir, settings_dir = _setup_paths(monkeypatch, tmp_path)
monkeypatch.setattr(config_module.Config, "_schedule_symlink_rescan", lambda self: None)
target_dir = loras_dir / "target"
target_dir.mkdir()
@@ -109,3 +112,72 @@ def test_symlink_cache_survives_noise_mtime(monkeypatch: pytest.MonkeyPatch, tmp
second_cfg = config_module.Config()
assert second_cfg.map_path_to_link(str(target_dir)) == _normalize(str(dir_link))
def test_background_rescan_refreshes_cache(monkeypatch: pytest.MonkeyPatch, tmp_path):
loras_dir, _ = _setup_paths(monkeypatch, tmp_path)
target_dir = loras_dir / "target"
target_dir.mkdir()
dir_link = loras_dir / "dir_link"
dir_link.symlink_to(target_dir, target_is_directory=True)
# Build initial cache pointing at the first target
first_cfg = config_module.Config()
old_real = _normalize(os.path.realpath(target_dir))
assert first_cfg.map_path_to_link(str(target_dir)) == _normalize(str(dir_link))
# Retarget the symlink to a new directory without touching the cache file
new_target = loras_dir / "target_v2"
new_target.mkdir()
dir_link.unlink()
dir_link.symlink_to(new_target, target_is_directory=True)
second_cfg = config_module.Config()
# Cache may still point at the old real path immediately after load
initial_mapping = second_cfg.map_path_to_link(str(new_target))
assert initial_mapping in {str(new_target), _normalize(str(dir_link))}
# Background rescan should refresh the mapping to the new target and update the cache file
second_cfg._wait_for_rescan(timeout=2.0)
new_real = _normalize(os.path.realpath(new_target))
assert second_cfg._path_mappings.get(new_real) == _normalize(str(dir_link))
assert second_cfg.map_path_to_link(str(new_target)) == _normalize(str(dir_link))
def test_symlink_roots_are_preserved(monkeypatch: pytest.MonkeyPatch, tmp_path):
settings_dir = tmp_path / "settings"
real_loras = tmp_path / "loras_real"
real_loras.mkdir()
loras_link = tmp_path / "loras_link"
loras_link.symlink_to(real_loras, target_is_directory=True)
checkpoints_dir = tmp_path / "checkpoints"
checkpoints_dir.mkdir()
embedding_dir = tmp_path / "embeddings"
embedding_dir.mkdir()
def fake_get_folder_paths(kind: str):
mapping = {
"loras": [str(loras_link)],
"checkpoints": [str(checkpoints_dir)],
"unet": [],
"embeddings": [str(embedding_dir)],
}
return mapping.get(kind, [])
monkeypatch.setattr(config_module.folder_paths, "get_folder_paths", fake_get_folder_paths)
monkeypatch.setattr(config_module, "standalone_mode", True)
monkeypatch.setattr(config_module, "get_settings_dir", lambda create=True: str(settings_dir))
monkeypatch.setattr(config_module.Config, "_schedule_symlink_rescan", lambda self: None)
cfg = config_module.Config()
normalized_real = _normalize(os.path.realpath(real_loras))
normalized_link = _normalize(str(loras_link))
assert cfg._path_mappings[normalized_real] == normalized_link
cache_path = settings_dir / "cache" / "symlink_map.json"
payload = json.loads(cache_path.read_text(encoding="utf-8"))
assert payload["path_mappings"][normalized_real] == normalized_link

View File

@@ -0,0 +1,114 @@
import { describe, it, beforeEach, afterEach, expect, vi } from 'vitest';
const showToastMock = vi.hoisted(() => vi.fn());
const loadingManagerMock = vi.hoisted(() => ({
showSimpleLoading: vi.fn(),
hide: vi.fn(),
}));
vi.mock('../../../static/js/utils/uiHelpers.js', () => {
return {
showToast: showToastMock,
};
});
vi.mock('../../../static/js/components/RecipeCard.js', () => ({
RecipeCard: vi.fn(() => ({ element: document.createElement('div') })),
}));
vi.mock('../../../static/js/state/index.js', () => {
return {
state: {
loadingManager: loadingManagerMock,
},
getCurrentPageState: vi.fn(),
};
});
import { RecipeSidebarApiClient } from '../../../static/js/api/recipeApi.js';
describe('RecipeSidebarApiClient bulk operations', () => {
beforeEach(() => {
vi.clearAllMocks();
global.fetch = vi.fn();
});
afterEach(() => {
delete global.fetch;
});
it('sends recipe IDs when moving in bulk', async () => {
const api = new RecipeSidebarApiClient();
global.fetch.mockResolvedValue({
ok: true,
json: async () => ({
success: true,
results: [
{
recipe_id: 'abc',
original_file_path: '/recipes/abc.webp',
new_file_path: '/recipes/target/abc.webp',
success: true,
},
],
success_count: 1,
failure_count: 0,
}),
});
const results = await api.moveBulkModels(['/recipes/abc.webp'], '/target/folder');
expect(global.fetch).toHaveBeenCalledWith(
'/api/lm/recipes/move-bulk',
expect.objectContaining({
method: 'POST',
headers: { 'Content-Type': 'application/json' },
})
);
const { body } = global.fetch.mock.calls[0][1];
expect(JSON.parse(body)).toEqual({
recipe_ids: ['abc'],
target_path: '/target/folder',
});
expect(showToastMock).toHaveBeenCalledWith(
'toast.api.bulkMoveSuccess',
{ successCount: 1, type: 'Recipe' },
'success'
);
expect(results[0].recipe_id).toBe('abc');
});
it('posts recipe IDs for bulk delete', async () => {
const api = new RecipeSidebarApiClient();
global.fetch.mockResolvedValue({
ok: true,
json: async () => ({
success: true,
total_deleted: 2,
total_failed: 0,
failed: [],
}),
});
const result = await api.bulkDeleteModels(['/recipes/a.webp', '/recipes/b.webp']);
expect(global.fetch).toHaveBeenCalledWith(
'/api/lm/recipes/bulk-delete',
expect.objectContaining({
method: 'POST',
headers: { 'Content-Type': 'application/json' },
})
);
const parsedBody = JSON.parse(global.fetch.mock.calls[0][1].body);
expect(parsedBody.recipe_ids).toEqual(['a', 'b']);
expect(result).toMatchObject({
success: true,
deleted_count: 2,
failed_count: 0,
});
expect(loadingManagerMock.hide).toHaveBeenCalled();
});
});

View File

@@ -0,0 +1,55 @@
import { describe, it, expect, beforeEach, vi } from "vitest";
const {
TRIGGER_WORDS_MODULE,
UTILS_MODULE,
I18N_HELPERS_MODULE,
} = vi.hoisted(() => ({
TRIGGER_WORDS_MODULE: new URL('../../../static/js/components/shared/TriggerWords.js', import.meta.url).pathname,
UTILS_MODULE: new URL('../../../static/js/components/shared/utils.js', import.meta.url).pathname,
I18N_HELPERS_MODULE: new URL('../../../static/js/utils/i18nHelpers.js', import.meta.url).pathname,
}));
vi.mock(I18N_HELPERS_MODULE, () => ({
translate: vi.fn((key, params, fallback) => fallback || key),
}));
vi.mock('../../../static/js/utils/uiHelpers.js', () => ({
showToast: vi.fn(),
copyToClipboard: vi.fn(),
}));
vi.mock('../../../static/js/api/modelApiFactory.js', () => ({
getModelApiClient: vi.fn(),
}));
describe("TriggerWords HTML Escaping", () => {
let renderTriggerWords;
beforeEach(async () => {
document.body.innerHTML = '';
const module = await import(TRIGGER_WORDS_MODULE);
renderTriggerWords = module.renderTriggerWords;
});
it("escapes HTML tags in trigger words rendering", () => {
const words = ["<style>guangying</style>", "fym <artist>"];
const html = renderTriggerWords(words, "test.safetensors");
expect(html).toContain("&lt;style&gt;guangying&lt;/style&gt;");
expect(html).toContain("fym &lt;artist&gt;");
expect(html).not.toContain("<style>guangying</style>");
});
it("uses dataset for copyTriggerWord to safely handle special characters", () => {
const words = ["word'with'quotes", "<tag>"];
const html = renderTriggerWords(words, "test.safetensors");
// Check for dataset-word attribute
expect(html).toContain('data-word="word&#39;with&#39;quotes"');
expect(html).toContain('data-word="&lt;tag&gt;"');
// Check for the onclick handler
expect(html).toContain('onclick="copyTriggerWord(this.dataset.word)"');
});
});

View File

@@ -234,7 +234,6 @@ describe('AppCore initialization flow', () => {
await vi.runAllTimersAsync();
expect(onboardingManager.start).toHaveBeenCalledTimes(1);
expect(bannerService.isBannerVisible).toHaveBeenCalledWith('version-mismatch');
});
it('does not reinitialize once initialized', async () => {
@@ -253,22 +252,13 @@ describe('AppCore initialization flow', () => {
expect(onboardingManager.start).not.toHaveBeenCalled();
});
it('skips bulk setup when viewing recipes', async () => {
it('initializes bulk setup when viewing recipes', async () => {
state.currentPageType = 'recipes';
await appCore.initialize();
expect(bulkManager.initialize).not.toHaveBeenCalled();
expect(BulkContextMenu).not.toHaveBeenCalled();
expect(bulkManager.setBulkContextMenu).not.toHaveBeenCalled();
});
it('suppresses onboarding when version mismatch banner is visible', async () => {
bannerService.isBannerVisible.mockReturnValueOnce(true);
await appCore.initialize();
await vi.runAllTimersAsync();
expect(onboardingManager.start).not.toHaveBeenCalled();
expect(bulkManager.initialize).toHaveBeenCalledTimes(1);
expect(BulkContextMenu).toHaveBeenCalledTimes(1);
expect(bulkManager.setBulkContextMenu).toHaveBeenCalledTimes(1);
});
});

View File

@@ -9,6 +9,8 @@ const removeSessionItemMock = vi.fn();
const RecipeContextMenuMock = vi.fn();
const refreshVirtualScrollMock = vi.fn();
const refreshRecipesMock = vi.fn();
const fetchUnifiedFolderTreeMock = vi.fn();
const fetchModelFoldersMock = vi.fn();
let importManagerInstance;
let recipeModalInstance;
@@ -35,6 +37,15 @@ vi.mock('../../../static/js/components/RecipeModal.js', () => ({
vi.mock('../../../static/js/state/index.js', () => ({
getCurrentPageState: getCurrentPageStateMock,
state: {
currentPageType: 'recipes',
global: { settings: {} },
virtualScroller: {
removeItemByFilePath: vi.fn(),
updateSingleItem: vi.fn(),
refreshWithData: vi.fn(),
},
},
}));
vi.mock('../../../static/js/utils/storageHelpers.js', () => ({
@@ -56,6 +67,14 @@ vi.mock('../../../static/js/utils/infiniteScroll.js', () => ({
vi.mock('../../../static/js/api/recipeApi.js', () => ({
refreshRecipes: refreshRecipesMock,
RecipeSidebarApiClient: vi.fn(() => ({
apiConfig: { config: { displayName: 'Recipes', supportsMove: true } },
fetchUnifiedFolderTree: fetchUnifiedFolderTreeMock.mockResolvedValue({ success: true, tree: {} }),
fetchModelFolders: fetchModelFoldersMock.mockResolvedValue({ success: true, folders: [] }),
fetchModelRoots: vi.fn().mockResolvedValue({ roots: ['/recipes'] }),
moveBulkModels: vi.fn(),
moveSingleModel: vi.fn(),
})),
}));
describe('RecipeManager', () => {
@@ -81,7 +100,7 @@ describe('RecipeManager', () => {
};
pageState = {
sortBy: 'date',
sortBy: 'date:desc',
searchOptions: undefined,
customFilter: undefined,
duplicatesMode: false,
@@ -118,8 +137,8 @@ describe('RecipeManager', () => {
const sortSelectElement = document.createElement('select');
sortSelectElement.id = 'sortSelect';
sortSelectElement.innerHTML = `
<option value="date">Date</option>
<option value="name">Name</option>
<option value="date:desc">Newest</option>
<option value="name:asc">Name A-Z</option>
`;
document.body.appendChild(sortSelectElement);
@@ -139,6 +158,8 @@ describe('RecipeManager', () => {
tags: true,
loraName: true,
loraModel: true,
prompt: true,
recursive: true,
});
expect(pageState.customFilter).toEqual({
@@ -162,10 +183,10 @@ describe('RecipeManager', () => {
expect(refreshVirtualScrollMock).toHaveBeenCalledTimes(1);
const sortSelect = document.getElementById('sortSelect');
sortSelect.value = 'name';
sortSelect.value = 'name:asc';
sortSelect.dispatchEvent(new Event('change', { bubbles: true }));
expect(pageState.sortBy).toBe('name');
expect(pageState.sortBy).toBe('name:asc');
expect(refreshVirtualScrollMock).toHaveBeenCalledTimes(2);
expect(initializePageFeaturesMock).toHaveBeenCalledTimes(1);
});

View File

@@ -0,0 +1,98 @@
import pytest
from types import SimpleNamespace
from py.metadata_collector.metadata_processor import MetadataProcessor
from py.metadata_collector.constants import MODELS, SAMPLING, IS_SAMPLER
class TestPipeTracer:
@pytest.fixture
def pipe_workflow_metadata(self):
"""
Creates a mock metadata structure matching the one provided in refs/tmp.
Structure:
Load Checkpoint(28) -> Lora Loader(52) -> ToBasicPipe(69) -> FromBasicPipe(71) -> KSampler(32)
"""
original_prompt = {
'28': {
'inputs': {'ckpt_name': 'Illustrious\\bananaSplitzXL_vee5PointOh.safetensors'},
'class_type': 'CheckpointLoaderSimple'
},
'52': {
'inputs': {
'model': ['28', 0],
'clip': ['28', 1]
},
'class_type': 'Lora Loader (LoraManager)'
},
'69': {
'inputs': {
'model': ['52', 0],
'clip': ['52', 1],
'vae': ['28', 2],
'positive': ['75', 0],
'negative': ['30', 0]
},
'class_type': 'ToBasicPipe'
},
'71': {
'inputs': {'basic_pipe': ['69', 0]},
'class_type': 'FromBasicPipe'
},
'32': {
'inputs': {
'seed': 131755205602911,
'steps': 5,
'cfg': 8.0,
'sampler_name': 'euler_ancestral',
'scheduler': 'karras',
'denoise': 1.0,
'model': ['71', 0],
'positive': ['71', 3],
'negative': ['71', 4],
'latent_image': ['76', 0]
},
'class_type': 'KSampler'
},
'75': {'inputs': {'text': 'positive', 'clip': ['52', 1]}, 'class_type': 'CLIPTextEncode'},
'30': {'inputs': {'text': 'negative', 'clip': ['52', 1]}, 'class_type': 'CLIPTextEncode'},
'76': {'inputs': {'width': 832, 'height': 1216, 'batch_size': 1}, 'class_type': 'EmptyLatentImage'}
}
metadata = {
"current_prompt": SimpleNamespace(original_prompt=original_prompt),
MODELS: {
"28": {
"type": "checkpoint",
"name": "bananaSplitzXL_vee5PointOh.safetensors"
}
},
SAMPLING: {
"32": {
IS_SAMPLER: True,
"parameters": {
"sampler_name": "euler_ancestral",
"scheduler": "karras"
}
}
}
}
return metadata
def test_trace_model_path_through_pipe(self, pipe_workflow_metadata):
"""Verify trace_model_path can follow: KSampler -> FromBasicPipe -> ToBasicPipe -> Lora -> Checkpoint."""
prompt = pipe_workflow_metadata["current_prompt"]
# Start trace from KSampler (32)
ckpt_id = MetadataProcessor.trace_model_path(pipe_workflow_metadata, prompt, "32")
assert ckpt_id == "28"
def test_find_primary_checkpoint_with_pipe(self, pipe_workflow_metadata):
"""Verify find_primary_checkpoint returns the correct name even with pipe nodes."""
# Providing sampler_id to test the optimization as well
name = MetadataProcessor.find_primary_checkpoint(pipe_workflow_metadata, primary_sampler_id="32")
assert name == "bananaSplitzXL_vee5PointOh.safetensors"

View File

@@ -0,0 +1,172 @@
import pytest
from types import SimpleNamespace
from py.metadata_collector.metadata_processor import MetadataProcessor
from py.metadata_collector.constants import MODELS, SAMPLING, IS_SAMPLER
class TestMetadataTracer:
@pytest.fixture
def mock_workflow_metadata(self):
"""
Creates a mock metadata structure with a complex workflow graph.
Structure:
Sampler(246) -> Guider(241) -> LoraLoader(264) -> CheckpointLoader(238)
Also includes a "Decoy" checkpoint (ID 999) that is NOT connected,
to verify we found the *connected* one, not just *any* one.
"""
# 1. Define the Graph (Original Prompt)
# Using IDs as strings to match typical ComfyUI behavior in metadata
original_prompt = {
"246": {
"class_type": "SamplerCustomAdvanced",
"inputs": {
"guider": ["241", 0],
"noise": ["255", 0],
"sampler": ["247", 0],
"sigmas": ["248", 0],
"latent_image": ["153", 0]
}
},
"241": {
"class_type": "CFGGuider",
"inputs": {
"model": ["264", 0],
"positive": ["239", 0],
"negative": ["240", 0]
}
},
"264": {
"class_type": "LoraLoader", # Simplified name
"inputs": {
"model": ["238", 0],
"lora_name": "some_style_lora.safetensors"
}
},
"238": {
"class_type": "CheckpointLoaderSimple",
"inputs": {
"ckpt_name": "Correct_Model.safetensors"
}
},
# unconnected / decoy nodes
"999": {
"class_type": "CheckpointLoaderSimple",
"inputs": {
"ckpt_name": "Decoy_Model.safetensors"
}
},
"154": { # Downstream VAE Decode
"class_type": "VAEDecode",
"inputs": {
"samples": ["246", 0]
}
}
}
# 2. Define the Metadata (Collected execution data)
metadata = {
"current_prompt": SimpleNamespace(original_prompt=original_prompt),
"execution_order": ["238", "264", "241", "246", "154", "999"], # 999 execs last or separately
# Models Registry
MODELS: {
"238": {
"type": "checkpoint",
"name": "Correct_Model.safetensors"
},
"999": {
"type": "checkpoint",
"name": "Decoy_Model.safetensors"
}
},
# Sampling Registry
SAMPLING: {
"246": {
IS_SAMPLER: True,
"parameters": {
"sampler_name": "euler",
"scheduler": "normal"
}
}
},
"images": {
"first_decode": {
"node_id": "154"
}
}
}
return metadata
def test_find_primary_sampler_identifies_correct_node(self, mock_workflow_metadata):
"""Verify find_primary_sampler correctly identifies the sampler connected to the downstream decode."""
sampler_id, sampler_info = MetadataProcessor.find_primary_sampler(mock_workflow_metadata, downstream_id="154")
assert sampler_id == "246"
assert sampler_info is not None
assert sampler_info["parameters"]["sampler_name"] == "euler"
def test_trace_model_path_follows_topology(self, mock_workflow_metadata):
"""Verify trace_model_path follows: Sampler -> Guider -> Lora -> Checkpoint."""
prompt = mock_workflow_metadata["current_prompt"]
# Start trace from Sampler (246)
# Should find Checkpoint (238)
ckpt_id = MetadataProcessor.trace_model_path(mock_workflow_metadata, prompt, "246")
assert ckpt_id == "238" # Should be the ID of the connected checkpoint
def test_find_primary_checkpoint_prioritizes_connected_model(self, mock_workflow_metadata):
"""Verify find_primary_checkpoint returns the NAME of the topologically connected checkpoint, honoring the graph."""
name = MetadataProcessor.find_primary_checkpoint(mock_workflow_metadata, downstream_id="154")
assert name == "Correct_Model.safetensors"
assert name != "Decoy_Model.safetensors"
def test_trace_model_path_simple_direct_connection(self):
"""Verify it works for a simple Sampler -> Checkpoint connection."""
original_prompt = {
"100": { # Sampler
"class_type": "KSampler",
"inputs": {
"model": ["101", 0]
}
},
"101": { # Checkpoint
"class_type": "CheckpointLoaderSimple",
"inputs": {}
}
}
metadata = {
"current_prompt": SimpleNamespace(original_prompt=original_prompt),
MODELS: {
"101": {"type": "checkpoint", "name": "Simple_Model.safetensors"}
}
}
ckpt_id = MetadataProcessor.trace_model_path(metadata, metadata["current_prompt"], "100")
assert ckpt_id == "101"
def test_trace_stops_at_max_depth(self):
"""Verify logic halts if graph is infinitely cyclic or too deep."""
# Create a cycle: Node 1 -> Node 2 -> Node 1
original_prompt = {
"1": {"inputs": {"model": ["2", 0]}},
"2": {"inputs": {"model": ["1", 0]}}
}
metadata = {
"current_prompt": SimpleNamespace(original_prompt=original_prompt),
MODELS: {} # No checkpoints registered
}
# Should return None, not hang forever
ckpt_id = MetadataProcessor.trace_model_path(metadata, metadata["current_prompt"], "1")
assert ckpt_id is None

View File

@@ -1,4 +1,5 @@
import logging
import copy
from py.nodes.utils import nunchaku_load_lora
@@ -28,6 +29,9 @@ class _DummyModel:
def __init__(self):
self.model = _DummyModelWrapper()
def clone(self):
return copy.deepcopy(self)
def test_nunchaku_load_lora_skips_missing_lora(monkeypatch, caplog):
import folder_paths

View File

@@ -103,8 +103,7 @@ def test_register_startup_hooks_appends_once():
]
assert routes.attach_dependencies in startup_bound_to_routes
assert routes.prewarm_cache in startup_bound_to_routes
assert len(startup_bound_to_routes) == 2
assert len(startup_bound_to_routes) == 1
def test_to_route_mapping_uses_handler_set():
@@ -212,4 +211,4 @@ def test_recipe_routes_setup_routes_uses_registrar(monkeypatch: pytest.MonkeyPat
if isinstance(getattr(cb, "__self__", None), recipe_routes.RecipeRoutes)
}
assert {type(cb.__self__) for cb in recipe_callbacks} == {recipe_routes.RecipeRoutes}
assert {cb.__name__ for cb in recipe_callbacks} == {"attach_dependencies", "prewarm_cache"}
assert {cb.__name__ for cb in recipe_callbacks} == {"attach_dependencies"}

View File

@@ -29,6 +29,7 @@ class RecipeRouteHarness:
persistence: "StubPersistenceService"
sharing: "StubSharingService"
downloader: "StubDownloader"
civitai: "StubCivitaiClient"
tmp_dir: Path
@@ -68,6 +69,10 @@ class StubRecipeScanner:
async def get_recipe_by_id(self, recipe_id: str) -> Optional[Dict[str, Any]]:
return self.recipes.get(recipe_id)
async def get_recipe_json_path(self, recipe_id: str) -> Optional[str]:
candidate = Path(self.recipes_dir) / f"{recipe_id}.recipe.json"
return str(candidate) if candidate.exists() else None
async def remove_recipe(self, recipe_id: str) -> None:
self.removed.append(recipe_id)
self.recipes.pop(recipe_id, None)
@@ -86,6 +91,7 @@ class StubAnalysisService:
self.remote_calls: List[Optional[str]] = []
self.local_calls: List[Optional[str]] = []
self.result = SimpleNamespace(payload={"loras": []}, status=200)
self._recipe_parser_factory = None
StubAnalysisService.instances.append(self)
async def analyze_uploaded_image(self, *, image_bytes: bytes | None, recipe_scanner) -> SimpleNamespace: # noqa: D401 - mirrors real signature
@@ -118,11 +124,12 @@ class StubPersistenceService:
def __init__(self, **_: Any) -> None:
self.save_calls: List[Dict[str, Any]] = []
self.delete_calls: List[str] = []
self.move_calls: List[Dict[str, str]] = []
self.save_result = SimpleNamespace(payload={"success": True, "recipe_id": "stub-id"}, status=200)
self.delete_result = SimpleNamespace(payload={"success": True}, status=200)
StubPersistenceService.instances.append(self)
async def save_recipe(self, *, recipe_scanner, image_bytes, image_base64, name, tags, metadata) -> SimpleNamespace: # noqa: D401
async def save_recipe(self, *, recipe_scanner, image_bytes, image_base64, name, tags, metadata, extension=None) -> SimpleNamespace: # noqa: D401
self.save_calls.append(
{
"recipe_scanner": recipe_scanner,
@@ -131,6 +138,7 @@ class StubPersistenceService:
"name": name,
"tags": list(tags),
"metadata": metadata,
"extension": extension,
}
)
return self.save_result
@@ -140,6 +148,12 @@ class StubPersistenceService:
await recipe_scanner.remove_recipe(recipe_id)
return self.delete_result
async def move_recipe(self, *, recipe_scanner, recipe_id: str, target_path: str) -> SimpleNamespace: # noqa: D401
self.move_calls.append({"recipe_id": recipe_id, "target_path": target_path})
return SimpleNamespace(
payload={"success": True, "recipe_id": recipe_id, "new_file_path": target_path}, status=200
)
async def update_recipe(self, *, recipe_scanner, recipe_id: str, updates: Dict[str, Any]) -> SimpleNamespace: # pragma: no cover - unused by smoke tests
return SimpleNamespace(payload={"success": True, "recipe_id": recipe_id, "updates": updates}, status=200)
@@ -189,6 +203,16 @@ class StubDownloader:
return True, destination
class StubCivitaiClient:
"""Stub for Civitai API client."""
def __init__(self) -> None:
self.image_info: Dict[str, Any] = {}
async def get_image_info(self, image_id: str) -> Optional[Dict[str, Any]]:
return self.image_info.get(image_id)
@asynccontextmanager
async def recipe_harness(monkeypatch, tmp_path: Path) -> AsyncIterator[RecipeRouteHarness]:
"""Context manager that yields a fully wired recipe route harness."""
@@ -198,12 +222,13 @@ async def recipe_harness(monkeypatch, tmp_path: Path) -> AsyncIterator[RecipeRou
StubSharingService.instances.clear()
scanner = StubRecipeScanner(tmp_path)
civitai_client = StubCivitaiClient()
async def fake_get_recipe_scanner():
return scanner
async def fake_get_civitai_client():
return object()
return civitai_client
downloader = StubDownloader()
@@ -232,6 +257,7 @@ async def recipe_harness(monkeypatch, tmp_path: Path) -> AsyncIterator[RecipeRou
persistence=StubPersistenceService.instances[-1],
sharing=StubSharingService.instances[-1],
downloader=downloader,
civitai=civitai_client,
tmp_dir=tmp_path,
)
@@ -296,8 +322,23 @@ async def test_save_and_delete_recipe_round_trip(monkeypatch, tmp_path: Path) ->
assert harness.persistence.delete_calls == ["saved-id"]
async def test_move_recipe_invokes_persistence(monkeypatch, tmp_path: Path) -> None:
async with recipe_harness(monkeypatch, tmp_path) as harness:
response = await harness.client.post(
"/api/lm/recipe/move",
json={"recipe_id": "move-me", "target_path": str(tmp_path / "recipes" / "subdir")},
)
payload = await response.json()
assert response.status == 200
assert payload["recipe_id"] == "move-me"
assert harness.persistence.move_calls == [
{"recipe_id": "move-me", "target_path": str(tmp_path / "recipes" / "subdir")}
]
async def test_import_remote_recipe(monkeypatch, tmp_path: Path) -> None:
provider_calls: list[int] = []
provider_calls: list[str | int] = []
class Provider:
async def get_model_version_info(self, model_version_id):
@@ -307,7 +348,7 @@ async def test_import_remote_recipe(monkeypatch, tmp_path: Path) -> None:
async def fake_get_default_metadata_provider():
return Provider()
monkeypatch.setattr(recipe_handlers, "get_default_metadata_provider", fake_get_default_metadata_provider)
monkeypatch.setattr("py.recipes.enrichment.get_default_metadata_provider", fake_get_default_metadata_provider)
async with recipe_harness(monkeypatch, tmp_path) as harness:
resources = [
@@ -349,16 +390,15 @@ async def test_import_remote_recipe(monkeypatch, tmp_path: Path) -> None:
assert call["tags"] == ["foo", "bar"]
metadata = call["metadata"]
assert metadata["base_model"] == "Flux Provider"
assert provider_calls == [33]
assert provider_calls == ["33"]
assert metadata["checkpoint"]["modelVersionId"] == 33
assert metadata["loras"][0]["weight"] == 0.25
assert metadata["gen_params"]["prompt"] == "hello world"
assert metadata["gen_params"]["checkpoint"]["modelVersionId"] == 33
assert harness.downloader.urls == ["https://example.com/images/1"]
async def test_import_remote_recipe_falls_back_to_request_base_model(monkeypatch, tmp_path: Path) -> None:
provider_calls: list[int] = []
provider_calls: list[str | int] = []
class Provider:
async def get_model_version_info(self, model_version_id):
@@ -368,7 +408,7 @@ async def test_import_remote_recipe_falls_back_to_request_base_model(monkeypatch
async def fake_get_default_metadata_provider():
return Provider()
monkeypatch.setattr(recipe_handlers, "get_default_metadata_provider", fake_get_default_metadata_provider)
monkeypatch.setattr("py.recipes.enrichment.get_default_metadata_provider", fake_get_default_metadata_provider)
async with recipe_harness(monkeypatch, tmp_path) as harness:
resources = [
@@ -397,7 +437,42 @@ async def test_import_remote_recipe_falls_back_to_request_base_model(monkeypatch
metadata = harness.persistence.save_calls[-1]["metadata"]
assert metadata["base_model"] == "Flux"
assert provider_calls == [77]
assert provider_calls == ["77"]
async def test_import_remote_video_recipe(monkeypatch, tmp_path: Path) -> None:
async def fake_get_default_metadata_provider():
return SimpleNamespace(get_model_version_info=lambda id: ({}, None))
monkeypatch.setattr("py.recipes.enrichment.get_default_metadata_provider", fake_get_default_metadata_provider)
async with recipe_harness(monkeypatch, tmp_path) as harness:
harness.civitai.image_info["12345"] = {
"id": 12345,
"url": "https://image.civitai.com/x/y/original=true/video.mp4",
"type": "video"
}
response = await harness.client.get(
"/api/lm/recipes/import-remote",
params={
"image_url": "https://civitai.com/images/12345",
"name": "Video Recipe",
"resources": json.dumps([]),
"base_model": "Flux",
},
)
payload = await response.json()
assert response.status == 200
assert payload["success"] is True
# Verify downloader was called with rewritten URL
assert "transcode=true" in harness.downloader.urls[0]
# Verify persistence was called with correct extension
call = harness.persistence.save_calls[-1]
assert call["extension"] == ".mp4"
async def test_analyze_uploaded_image_error_path(monkeypatch, tmp_path: Path) -> None:
@@ -452,3 +527,69 @@ async def test_share_and_download_recipe(monkeypatch, tmp_path: Path) -> None:
assert body == b"stub"
download_path.unlink(missing_ok=True)
async def test_import_remote_recipe_merges_metadata(monkeypatch, tmp_path: Path) -> None:
# 1. Mock Metadata Provider
class Provider:
async def get_model_version_info(self, model_version_id):
return {"baseModel": "Flux Provider"}, None
async def fake_get_default_metadata_provider():
return Provider()
monkeypatch.setattr("py.recipes.enrichment.get_default_metadata_provider", fake_get_default_metadata_provider)
# 2. Mock ExifUtils to return some embedded metadata
class MockExifUtils:
@staticmethod
def extract_image_metadata(path):
return "Recipe metadata: " + json.dumps({
"gen_params": {"prompt": "from embedded", "seed": 123}
})
monkeypatch.setattr(recipe_handlers, "ExifUtils", MockExifUtils)
# 3. Mock Parser Factory for StubAnalysisService
class MockParser:
async def parse_metadata(self, raw, recipe_scanner=None):
return json.loads(raw[len("Recipe metadata: "):])
class MockFactory:
def create_parser(self, raw):
if raw.startswith("Recipe metadata: "):
return MockParser()
return None
# 4. Setup Harness and run test
async with recipe_harness(monkeypatch, tmp_path) as harness:
harness.analysis._recipe_parser_factory = MockFactory()
# Civitai meta via image_info
harness.civitai.image_info["1"] = {
"id": 1,
"url": "https://example.com/images/1.jpg",
"meta": {"prompt": "from civitai", "cfg": 7.0}
}
resources = []
response = await harness.client.get(
"/api/lm/recipes/import-remote",
params={
"image_url": "https://civitai.com/images/1",
"name": "Merged Recipe",
"resources": json.dumps(resources),
"gen_params": json.dumps({"prompt": "from request", "steps": 25}),
},
)
payload = await response.json()
assert response.status == 200
call = harness.persistence.save_calls[-1]
metadata = call["metadata"]
gen_params = metadata["gen_params"]
# Priority: request (prompt=request, steps=25) > civitai (prompt=civitai, cfg=7.0) > embedded (prompt=embedded, seed=123)
assert gen_params["prompt"] == "from request"
assert gen_params["steps"] == 25
assert gen_params["cfg"] == 7.0
assert gen_params["seed"] == 123

View File

@@ -0,0 +1,113 @@
import pytest
import json
from py.recipes.parsers.comfy import ComfyMetadataParser
@pytest.mark.asyncio
async def test_parse_metadata_without_loras(monkeypatch):
checkpoint_info = {
"id": 2224012,
"modelId": 1908679,
"model": {"name": "SDXL Checkpoint", "type": "checkpoint"},
"name": "v1.0",
"images": [{"url": "https://image.civitai.com/checkpoints/original=true"}],
"baseModel": "sdxl",
"downloadUrl": "https://civitai.com/api/download/checkpoint",
}
async def fake_metadata_provider():
class Provider:
async def get_model_version_info(self, version_id):
assert version_id == "2224012"
return checkpoint_info, None
return Provider()
monkeypatch.setattr(
"py.recipes.parsers.comfy.get_default_metadata_provider",
fake_metadata_provider,
)
parser = ComfyMetadataParser()
# User provided metadata
metadata_json = {
"resource-stack": {
"class_type": "CheckpointLoaderSimple",
"inputs": {"ckpt_name": "urn:air:sdxl:checkpoint:civitai:1908679@2224012"}
},
"6": {
"class_type": "smZ CLIPTextEncode",
"inputs": {"text": "Positive prompt content"},
"_meta": {"title": "Positive"}
},
"7": {
"class_type": "smZ CLIPTextEncode",
"inputs": {"text": "Negative prompt content"},
"_meta": {"title": "Negative"}
},
"11": {
"class_type": "KSampler",
"inputs": {
"sampler_name": "euler_ancestral",
"scheduler": "normal",
"seed": 904124997,
"steps": 35,
"cfg": 6,
"denoise": 0.1,
"model": ["resource-stack", 0],
"positive": ["6", 0],
"negative": ["7", 0],
"latent_image": ["21", 0]
},
"_meta": {"title": "KSampler"}
},
"extraMetadata": json.dumps({
"prompt": "One woman, (solo:1.3), ...",
"negativePrompt": "lowres, worst quality, ...",
"steps": 35,
"cfgScale": 6,
"sampler": "euler_ancestral",
"seed": 904124997,
"width": 1024,
"height": 1024
})
}
result = await parser.parse_metadata(json.dumps(metadata_json))
assert "error" not in result
assert result["loras"] == []
assert result["checkpoint"] is not None
assert int(result["checkpoint"]["modelId"]) == 1908679
assert int(result["checkpoint"]["id"]) == 2224012
assert result["gen_params"]["prompt"] == "One woman, (solo:1.3), ..."
assert result["gen_params"]["steps"] == 35
assert result["gen_params"]["size"] == "1024x1024"
assert result["from_comfy_metadata"] is True
@pytest.mark.asyncio
async def test_parse_metadata_without_extra_metadata(monkeypatch):
async def fake_metadata_provider():
class Provider:
async def get_model_version_info(self, version_id):
return {"model": {"name": "Test"}, "id": version_id}, None
return Provider()
monkeypatch.setattr(
"py.recipes.parsers.comfy.get_default_metadata_provider",
fake_metadata_provider,
)
parser = ComfyMetadataParser()
metadata_json = {
"node_1": {
"class_type": "CheckpointLoaderSimple",
"inputs": {"ckpt_name": "urn:air:sdxl:checkpoint:civitai:123@456"}
}
}
result = await parser.parse_metadata(json.dumps(metadata_json))
assert "error" not in result
assert result["loras"] == []
assert result["checkpoint"]["id"] == "456"

View File

@@ -0,0 +1,95 @@
import pytest
from py.recipes.merger import GenParamsMerger
def test_merge_priority():
request_params = {"prompt": "from request", "steps": 20}
civitai_meta = {"prompt": "from civitai", "cfg": 7.0}
embedded_metadata = {"gen_params": {"prompt": "from embedded", "seed": 123}}
merged = GenParamsMerger.merge(request_params, civitai_meta, embedded_metadata)
assert merged["prompt"] == "from request"
assert merged["steps"] == 20
assert merged["cfg"] == 7.0
assert merged["seed"] == 123
def test_merge_no_request_params():
civitai_meta = {"prompt": "from civitai", "cfg": 7.0}
embedded_metadata = {"gen_params": {"prompt": "from embedded", "seed": 123}}
merged = GenParamsMerger.merge(None, civitai_meta, embedded_metadata)
assert merged["prompt"] == "from civitai"
assert merged["cfg"] == 7.0
assert merged["seed"] == 123
def test_merge_only_embedded():
embedded_metadata = {"gen_params": {"prompt": "from embedded", "seed": 123}}
merged = GenParamsMerger.merge(None, None, embedded_metadata)
assert merged["prompt"] == "from embedded"
assert merged["seed"] == 123
def test_merge_raw_embedded():
# Test when embedded metadata is just the gen_params themselves
embedded_metadata = {"prompt": "from raw embedded", "seed": 456}
merged = GenParamsMerger.merge(None, None, embedded_metadata)
assert merged["prompt"] == "from raw embedded"
assert merged["seed"] == 456
def test_merge_none_values():
merged = GenParamsMerger.merge(None, None, None)
assert merged == {}
def test_merge_filters_blacklisted_keys():
request_params = {"prompt": "test", "id": "should-be-removed", "checkpoint": "should-not-be-here"}
civitai_meta = {"cfg": 7, "url": "remove-me"}
embedded_metadata = {"seed": 123, "hash": "remove-also"}
merged = GenParamsMerger.merge(request_params, civitai_meta, embedded_metadata)
assert "prompt" in merged
assert "cfg" in merged
assert "seed" in merged
assert "id" not in merged
assert "url" not in merged
assert "hash" not in merged
assert "checkpoint" not in merged
def test_merge_filters_meta_and_normalizes_keys():
civitai_meta = {
"prompt": "masterpiece",
"cfgScale": 5,
"clipSkip": 2,
"negativePrompt": "low quality",
"meta": {"irrelevant": "data"},
"Size": "1024x1024",
"draft": False,
"workflow": "txt2img",
"civitaiResources": [{"type": "checkpoint"}]
}
request_params = {
"cfg_scale": 5.0,
"clip_skip": "2",
"Steps": 30
}
merged = GenParamsMerger.merge(request_params, civitai_meta)
assert "meta" not in merged
assert "cfgScale" not in merged
assert "clipSkip" not in merged
assert "negativePrompt" not in merged
assert "Size" not in merged
assert "draft" not in merged
assert "workflow" not in merged
assert "civitaiResources" not in merged
assert merged["cfg_scale"] == 5.0 # From request_params
assert merged["clip_skip"] == "2" # From request_params
assert merged["negative_prompt"] == "low quality" # Normalized from civitai_meta
assert merged["size"] == "1024x1024" # Normalized from civitai_meta
assert merged["steps"] == 30 # Normalized from request_params

View File

@@ -0,0 +1,104 @@
import pytest
from py.services.model_query import ModelFilterSet, FilterCriteria
from py.services.recipe_scanner import RecipeScanner
from pathlib import Path
from py.config import config
import asyncio
from types import SimpleNamespace
class StubSettings:
def get(self, key, default=None):
return default
# --- Model Filtering Tests ---
def test_model_filter_set_no_tags_include():
filter_set = ModelFilterSet(StubSettings())
data = [
{"name": "m1", "tags": ["tag1"]},
{"name": "m2", "tags": []},
{"name": "m3", "tags": None},
{"name": "m4", "tags": ["tag2"]},
]
# Include __no_tags__
criteria = FilterCriteria(tags={"__no_tags__": "include"})
result = filter_set.apply(data, criteria)
assert len(result) == 2
assert {item["name"] for item in result} == {"m2", "m3"}
def test_model_filter_set_no_tags_exclude():
filter_set = ModelFilterSet(StubSettings())
data = [
{"name": "m1", "tags": ["tag1"]},
{"name": "m2", "tags": []},
{"name": "m3", "tags": None},
{"name": "m4", "tags": ["tag2"]},
]
# Exclude __no_tags__
criteria = FilterCriteria(tags={"__no_tags__": "exclude"})
result = filter_set.apply(data, criteria)
assert len(result) == 2
assert {item["name"] for item in result} == {"m1", "m4"}
def test_model_filter_set_no_tags_mixed():
filter_set = ModelFilterSet(StubSettings())
data = [
{"name": "m1", "tags": ["tag1"]},
{"name": "m2", "tags": []},
{"name": "m3", "tags": None},
{"name": "m4", "tags": ["tag1", "tag2"]},
]
# Include tag1 AND __no_tags__
criteria = FilterCriteria(tags={"tag1": "include", "__no_tags__": "include"})
result = filter_set.apply(data, criteria)
# m1 (tag1), m2 (no tags), m3 (no tags), m4 (tag1)
assert len(result) == 4
# --- Recipe Filtering Tests ---
class StubLoraScanner:
def __init__(self):
self._cache = SimpleNamespace(raw_data=[], version_index={})
async def get_cached_data(self):
return self._cache
async def refresh_cache(self, force=False):
pass
@pytest.fixture
def recipe_scanner(tmp_path, monkeypatch):
monkeypatch.setattr(config, "loras_roots", [str(tmp_path)])
stub = StubLoraScanner()
scanner = RecipeScanner(lora_scanner=stub)
return scanner
@pytest.mark.asyncio
async def test_recipe_scanner_no_tags_filter(recipe_scanner):
scanner = recipe_scanner
# Mock some recipe data
recipes = [
{"id": "r1", "tags": ["tag1"], "title": "R1"},
{"id": "r2", "tags": [], "title": "R2"},
{"id": "r3", "tags": None, "title": "R3"},
]
# We need to inject these into the scanner's cache
# Since get_paginated_data calls get_cached_data() which we stubbed
scanner._cache = SimpleNamespace(
raw_data=recipes,
sorted_by_date=recipes,
sorted_by_name=recipes
)
# Test Include __no_tags__
result = await scanner.get_paginated_data(page=1, page_size=10, filters={"tags": {"__no_tags__": "include"}})
assert len(result["items"]) == 2
assert {item["id"] for item in result["items"]} == {"r2", "r3"}
# Test Exclude __no_tags__
result = await scanner.get_paginated_data(page=1, page_size=10, filters={"tags": {"__no_tags__": "exclude"}})
assert len(result["items"]) == 1
assert result["items"][0]["id"] == "r1"

View File

@@ -0,0 +1,282 @@
import pytest
import asyncio
from unittest.mock import AsyncMock, MagicMock
from py.services.recipe_scanner import RecipeScanner
from types import SimpleNamespace
# We define these here to help with spec= if needed
class MockCivitaiClient:
async def get_image_info(self, image_id):
pass
class MockPersistenceService:
async def save_recipe(self, recipe):
pass
@pytest.fixture
def mock_civitai_client():
client = MagicMock(spec=MockCivitaiClient)
client.get_image_info = AsyncMock()
return client
@pytest.fixture
def mock_metadata_provider():
provider = MagicMock()
provider.get_model_version_info = AsyncMock(return_value=(None, None))
provider.get_model_by_hash = AsyncMock(return_value=(None, None))
return provider
@pytest.fixture
def recipe_scanner():
lora_scanner = MagicMock()
lora_scanner.get_cached_data = AsyncMock(return_value=SimpleNamespace(raw_data=[]))
scanner = RecipeScanner(lora_scanner=lora_scanner)
return scanner
@pytest.fixture
def setup_scanner(recipe_scanner, mock_civitai_client, mock_metadata_provider, monkeypatch):
monkeypatch.setattr(recipe_scanner, "_get_civitai_client", AsyncMock(return_value=mock_civitai_client))
# Wrap the real method with a mock so we can check calls but still execute it
real_save = recipe_scanner._save_recipe_persistently
mock_save = AsyncMock(side_effect=real_save)
monkeypatch.setattr(recipe_scanner, "_save_recipe_persistently", mock_save)
monkeypatch.setattr("py.recipes.enrichment.get_default_metadata_provider", AsyncMock(return_value=mock_metadata_provider))
# Mock get_recipe_json_path to avoid file system issues in tests
recipe_scanner.get_recipe_json_path = AsyncMock(return_value="/tmp/test_recipe.json")
# Mock open to avoid actual file writing
monkeypatch.setattr("builtins.open", MagicMock())
monkeypatch.setattr("json.dump", MagicMock())
monkeypatch.setattr("os.path.exists", MagicMock(return_value=False)) # avoid EXIF logic
return recipe_scanner, mock_civitai_client, mock_metadata_provider
@pytest.mark.asyncio
async def test_repair_all_recipes_skip_up_to_date(setup_scanner):
recipe_scanner, _, _ = setup_scanner
recipe_scanner._cache = SimpleNamespace(raw_data=[
{"id": "r1", "repair_version": RecipeScanner.REPAIR_VERSION, "title": "Up to date"}
])
# Run
results = await recipe_scanner.repair_all_recipes()
# Verify
assert results["repaired"] == 0
assert results["skipped"] == 1
recipe_scanner._save_recipe_persistently.assert_not_called()
@pytest.mark.asyncio
async def test_repair_all_recipes_with_enriched_checkpoint_id(setup_scanner):
recipe_scanner, mock_civitai_client, mock_metadata_provider = setup_scanner
recipe = {
"id": "r1",
"title": "Old Recipe",
"source_url": "https://civitai.com/images/12345",
"checkpoint": None,
"gen_params": {"prompt": ""}
}
recipe_scanner._cache = SimpleNamespace(raw_data=[recipe])
# Mock image info returning modelVersionId
mock_civitai_client.get_image_info.return_value = {
"modelVersionId": 5678,
"meta": {"prompt": "a beautiful forest", "Checkpoint": "basic_name.safetensors"}
}
# Mock metadata provider returning full info
mock_metadata_provider.get_model_version_info.return_value = ({
"id": 5678,
"modelId": 1234,
"name": "v1.0",
"model": {"name": "Full Model Name"},
"baseModel": "SDXL 1.0",
"images": [{"url": "https://image.url/thumb.jpg"}],
"files": [{"type": "Model", "hashes": {"SHA256": "ABCDEF"}, "name": "full_filename.safetensors"}]
}, None)
# Run
results = await recipe_scanner.repair_all_recipes()
# Verify
assert results["repaired"] == 1
mock_metadata_provider.get_model_version_info.assert_called_with("5678")
saved_recipe = recipe_scanner._save_recipe_persistently.call_args[0][0]
checkpoint = saved_recipe["checkpoint"]
assert checkpoint["modelName"] == "Full Model Name"
assert checkpoint["modelVersionName"] == "v1.0"
assert checkpoint["modelId"] == 1234
assert checkpoint["modelVersionId"] == 5678
assert checkpoint["type"] == "checkpoint"
assert "name" not in checkpoint
assert "version" not in checkpoint
assert "hash" not in checkpoint
assert "file_name" not in checkpoint
@pytest.mark.asyncio
async def test_repair_all_recipes_with_enriched_checkpoint_hash(setup_scanner):
recipe_scanner, mock_civitai_client, mock_metadata_provider = setup_scanner
recipe = {
"id": "r1",
"title": "Embedded Only",
"checkpoint": None,
"gen_params": {
"prompt": "",
"Model hash": "hash123"
}
}
recipe_scanner._cache = SimpleNamespace(raw_data=[recipe])
# Mock metadata provider lookup by hash
mock_metadata_provider.get_model_by_hash.return_value = ({
"id": 999,
"modelId": 888,
"name": "v2.0",
"model": {"name": "Hashed Model"},
"baseModel": "SD 1.5",
"files": [{"type": "Model", "hashes": {"SHA256": "hash123"}, "name": "hashed.safetensors"}]
}, None)
# Run
results = await recipe_scanner.repair_all_recipes()
# Verify
assert results["repaired"] == 1
mock_metadata_provider.get_model_by_hash.assert_called_with("hash123")
saved_recipe = recipe_scanner._save_recipe_persistently.call_args[0][0]
checkpoint = saved_recipe["checkpoint"]
assert checkpoint["modelName"] == "Hashed Model"
assert checkpoint["modelVersionName"] == "v2.0"
assert checkpoint["modelId"] == 888
assert checkpoint["type"] == "checkpoint"
@pytest.mark.asyncio
async def test_repair_all_recipes_fallback_to_basic(setup_scanner):
recipe_scanner, mock_civitai_client, mock_metadata_provider = setup_scanner
recipe = {
"id": "r1",
"title": "No Meta Lookup",
"checkpoint": None,
"gen_params": {
"prompt": "",
"Checkpoint": "just_a_name.safetensors"
}
}
recipe_scanner._cache = SimpleNamespace(raw_data=[recipe])
# Mock metadata provider returning nothing
mock_metadata_provider.get_model_by_hash.return_value = (None, "Model not found")
# Run
results = await recipe_scanner.repair_all_recipes()
# Verify
assert results["repaired"] == 1
saved_recipe = recipe_scanner._save_recipe_persistently.call_args[0][0]
assert saved_recipe["checkpoint"]["modelName"] == "just_a_name.safetensors"
assert saved_recipe["checkpoint"]["type"] == "checkpoint"
assert "modelId" not in saved_recipe["checkpoint"]
@pytest.mark.asyncio
async def test_repair_all_recipes_progress_callback(setup_scanner):
recipe_scanner, _, _ = setup_scanner
recipe_scanner._cache = SimpleNamespace(raw_data=[
{"id": "r1", "title": "R1", "checkpoint": None},
{"id": "r2", "title": "R2", "checkpoint": None}
])
progress_calls = []
async def progress_callback(data):
progress_calls.append(data)
# Run
await recipe_scanner.repair_all_recipes(
progress_callback=progress_callback
)
# Verify
assert len(progress_calls) >= 2
assert progress_calls[-1]["status"] == "completed"
assert progress_calls[-1]["total"] == 2
assert progress_calls[-1]["repaired"] == 2
@pytest.mark.asyncio
async def test_repair_all_recipes_strips_runtime_fields(setup_scanner):
recipe_scanner, mock_civitai_client, mock_metadata_provider = setup_scanner
# Recipe with runtime fields
recipe = {
"id": "r1",
"title": "Cleanup Test",
"checkpoint": {
"name": "CP",
"inLibrary": True,
"localPath": "/path/to/cp",
"thumbnailUrl": "thumb.jpg"
},
"loras": [
{
"name": "L1",
"weight": 0.8,
"inLibrary": True,
"localPath": "/path/to/l1",
"preview_url": "p.jpg"
}
],
"gen_params": {"prompt": ""}
}
recipe_scanner._cache = SimpleNamespace(raw_data=[recipe])
# Set high version to trigger repair if needed (or just ensure it processes)
recipe["repair_version"] = 0
# Run
await recipe_scanner.repair_all_recipes()
# Verify sanitation
assert recipe_scanner._save_recipe_persistently.called
saved_recipe = recipe_scanner._save_recipe_persistently.call_args[0][0]
# 1. Check LORA
lora = saved_recipe["loras"][0]
assert "inLibrary" not in lora
assert "localPath" not in lora
assert "preview_url" not in lora
assert "strength" in lora # weight renamed to strength
assert lora["strength"] == 0.8
# 2. Check Checkpoint
cp = saved_recipe["checkpoint"]
assert "inLibrary" not in cp
assert "localPath" not in cp
assert "thumbnailUrl" not in cp
@pytest.mark.asyncio
async def test_sanitize_recipe_for_storage(recipe_scanner):
recipe = {
"loras": [{"name": "L1", "inLibrary": True, "weight": 0.5}],
"checkpoint": {"name": "CP", "localPath": "/tmp/cp"}
}
clean = recipe_scanner._sanitize_recipe_for_storage(recipe)
assert "inLibrary" not in clean["loras"][0]
assert "strength" in clean["loras"][0]
assert clean["loras"][0]["strength"] == 0.5
assert "localPath" not in clean["checkpoint"]
# Testing based on what enricher would produce if it ran,
# but here we are just testing the sanitizer which handles what is ALREADY there.
# However, the sanitizer doesn't rename fields, it just removes runtime ones.
# Since we changed the enricher to NOT put 'name' anymore, this test case
# should probably reflect the new fields if it's simulating a real recipe.
assert clean["checkpoint"]["name"] == "CP"

View File

@@ -349,3 +349,295 @@ def test_enrich_formats_absolute_preview_paths(recipe_scanner, tmp_path):
enriched = scanner._enrich_lora_entry(dict(lora))
assert enriched["preview_url"] == config.get_preview_static_url(str(preview_path))
@pytest.mark.asyncio
async def test_initialize_waits_for_lora_scanner(monkeypatch):
ready_flag = asyncio.Event()
call_count = 0
class StubLoraScanner:
def __init__(self):
self._cache = None
self._is_initializing = True
async def initialize_in_background(self):
nonlocal call_count
call_count += 1
await asyncio.sleep(0)
self._cache = SimpleNamespace(raw_data=[])
self._is_initializing = False
ready_flag.set()
lora_scanner = StubLoraScanner()
scanner = RecipeScanner(lora_scanner=lora_scanner)
await scanner.initialize_in_background()
assert ready_flag.is_set()
assert call_count == 1
assert scanner._cache is not None
@pytest.mark.asyncio
async def test_invalid_model_version_marked_deleted_and_not_retried(monkeypatch, recipe_scanner):
scanner, _ = recipe_scanner
recipes_dir = Path(config.loras_roots[0]) / "recipes"
recipes_dir.mkdir(parents=True, exist_ok=True)
recipe = {
"id": "invalid-version",
"file_path": str(recipes_dir / "invalid-version.webp"),
"title": "Invalid",
"modified": 0.0,
"created_date": 0.0,
"loras": [{"modelVersionId": 999, "file_name": "", "hash": ""}],
}
await scanner.add_recipe(dict(recipe))
call_count = 0
async def fake_get_hash(model_version_id):
nonlocal call_count
call_count += 1
return None
monkeypatch.setattr(scanner, "_get_hash_from_civitai", fake_get_hash)
metadata_updated = await scanner._update_lora_information(recipe)
assert metadata_updated is True
assert recipe["loras"][0]["isDeleted"] is True
assert call_count == 1
# Subsequent calls should skip remote lookup once marked deleted
metadata_updated_again = await scanner._update_lora_information(recipe)
assert metadata_updated_again is False
assert call_count == 1
@pytest.mark.asyncio
async def test_load_recipe_persists_deleted_flag_on_invalid_version(monkeypatch, recipe_scanner, tmp_path):
scanner, _ = recipe_scanner
recipes_dir = Path(config.loras_roots[0]) / "recipes"
recipes_dir.mkdir(parents=True, exist_ok=True)
recipe_id = "persist-invalid"
recipe_path = recipes_dir / f"{recipe_id}.recipe.json"
recipe_data = {
"id": recipe_id,
"file_path": str(recipes_dir / f"{recipe_id}.webp"),
"title": "Invalid",
"modified": 0.0,
"created_date": 0.0,
"loras": [{"modelVersionId": 1234, "file_name": "", "hash": ""}],
}
recipe_path.write_text(json.dumps(recipe_data))
async def fake_get_hash(model_version_id):
return None
monkeypatch.setattr(scanner, "_get_hash_from_civitai", fake_get_hash)
loaded = await scanner._load_recipe_file(str(recipe_path))
assert loaded["loras"][0]["isDeleted"] is True
persisted = json.loads(recipe_path.read_text())
assert persisted["loras"][0]["isDeleted"] is True
@pytest.mark.asyncio
async def test_update_lora_filename_by_hash_updates_affected_recipes(tmp_path: Path, recipe_scanner):
scanner, _ = recipe_scanner
recipes_dir = Path(config.loras_roots[0]) / "recipes"
recipes_dir.mkdir(parents=True, exist_ok=True)
# Recipe 1: Contains the LoRA with hash "hash1"
recipe1_id = "recipe1"
recipe1_path = recipes_dir / f"{recipe1_id}.recipe.json"
recipe1_data = {
"id": recipe1_id,
"file_path": str(tmp_path / "img1.png"),
"title": "Recipe 1",
"modified": 0.0,
"created_date": 0.0,
"loras": [
{"file_name": "old_name", "hash": "hash1"},
{"file_name": "other_lora", "hash": "hash2"}
],
}
recipe1_path.write_text(json.dumps(recipe1_data))
await scanner.add_recipe(dict(recipe1_data))
# Recipe 2: Does NOT contain the LoRA
recipe2_id = "recipe2"
recipe2_path = recipes_dir / f"{recipe2_id}.recipe.json"
recipe2_data = {
"id": recipe2_id,
"file_path": str(tmp_path / "img2.png"),
"title": "Recipe 2",
"modified": 0.0,
"created_date": 0.0,
"loras": [
{"file_name": "other_lora", "hash": "hash2"}
],
}
recipe2_path.write_text(json.dumps(recipe2_data))
await scanner.add_recipe(dict(recipe2_data))
# Update LoRA name for "hash1" (using different case to test normalization)
new_name = "new_name"
file_count, cache_count = await scanner.update_lora_filename_by_hash("HASH1", new_name)
assert file_count == 1
assert cache_count == 1
# Check file on disk
persisted1 = json.loads(recipe1_path.read_text())
assert persisted1["loras"][0]["file_name"] == new_name
assert persisted1["loras"][1]["file_name"] == "other_lora"
# Verify Recipe 2 unchanged
persisted2 = json.loads(recipe2_path.read_text())
assert persisted2["loras"][0]["file_name"] == "other_lora"
cache = await scanner.get_cached_data()
cached1 = next(r for r in cache.raw_data if r["id"] == recipe1_id)
assert cached1["loras"][0]["file_name"] == new_name
@pytest.mark.asyncio
async def test_get_paginated_data_filters_by_favorite(recipe_scanner):
scanner, _ = recipe_scanner
# Add a normal recipe
await scanner.add_recipe({
"id": "regular",
"file_path": "path/regular.png",
"title": "Regular Recipe",
"modified": 1.0,
"created_date": 1.0,
"loras": [],
})
# Add a favorite recipe
await scanner.add_recipe({
"id": "favorite",
"file_path": "path/favorite.png",
"title": "Favorite Recipe",
"modified": 2.0,
"created_date": 2.0,
"loras": [],
"favorite": True
})
# Wait for cache update (it's async in some places, add_recipe is usually enough but let's be safe)
await asyncio.sleep(0)
# Test without filter (should return both)
result_all = await scanner.get_paginated_data(page=1, page_size=10)
assert len(result_all["items"]) == 2
# Test with favorite filter
result_fav = await scanner.get_paginated_data(page=1, page_size=10, filters={"favorite": True})
assert len(result_fav["items"]) == 1
assert result_fav["items"][0]["id"] == "favorite"
# Test with favorite filter set to False (should return both or at least not filter if it's the default)
# Actually our implementation checks if 'favorite' in filters and filters['favorite']
result_fav_false = await scanner.get_paginated_data(page=1, page_size=10, filters={"favorite": False})
assert len(result_fav_false["items"]) == 2
@pytest.mark.asyncio
async def test_get_paginated_data_filters_by_prompt(recipe_scanner):
scanner, _ = recipe_scanner
# Add a recipe with a specific prompt
await scanner.add_recipe({
"id": "prompt-recipe",
"file_path": "path/prompt.png",
"title": "Prompt Recipe",
"modified": 1.0,
"created_date": 1.0,
"loras": [],
"gen_params": {
"prompt": "a beautiful forest landscape"
}
})
# Add a recipe with a specific negative prompt
await scanner.add_recipe({
"id": "neg-prompt-recipe",
"file_path": "path/neg.png",
"title": "Negative Prompt Recipe",
"modified": 2.0,
"created_date": 2.0,
"loras": [],
"gen_params": {
"negative_prompt": "ugly, blurry mountains"
}
})
await asyncio.sleep(0)
# Test search in prompt
result_prompt = await scanner.get_paginated_data(
page=1, page_size=10, search="forest", search_options={"prompt": True}
)
assert len(result_prompt["items"]) == 1
assert result_prompt["items"][0]["id"] == "prompt-recipe"
# Test search in negative prompt
result_neg = await scanner.get_paginated_data(
page=1, page_size=10, search="mountains", search_options={"prompt": True}
)
assert len(result_neg["items"]) == 1
assert result_neg["items"][0]["id"] == "neg-prompt-recipe"
# Test search disabled (should not find by prompt)
result_disabled = await scanner.get_paginated_data(
page=1, page_size=10, search="forest", search_options={"prompt": False}
)
assert len(result_disabled["items"]) == 0
@pytest.mark.asyncio
async def test_get_paginated_data_sorting(recipe_scanner):
scanner, _ = recipe_scanner
# Add test recipes
# Recipe A: Name "Alpha", Date 10, LoRAs 2
await scanner.add_recipe({
"id": "A", "title": "Alpha", "created_date": 10.0,
"loras": [{}, {}], "file_path": "a.png"
})
# Recipe B: Name "Beta", Date 20, LoRAs 1
await scanner.add_recipe({
"id": "B", "title": "Beta", "created_date": 20.0,
"loras": [{}], "file_path": "b.png"
})
# Recipe C: Name "Gamma", Date 5, LoRAs 3
await scanner.add_recipe({
"id": "C", "title": "Gamma", "created_date": 5.0,
"loras": [{}, {}, {}], "file_path": "c.png"
})
await asyncio.sleep(0)
# Test Name DESC: Gamma, Beta, Alpha
res = await scanner.get_paginated_data(page=1, page_size=10, sort_by="name:desc")
assert [i["id"] for i in res["items"]] == ["C", "B", "A"]
# Test LoRA Count DESC: Gamma (3), Alpha (2), Beta (1)
res = await scanner.get_paginated_data(page=1, page_size=10, sort_by="loras_count:desc")
assert [i["id"] for i in res["items"]] == ["C", "A", "B"]
# Test LoRA Count ASC: Beta (1), Alpha (2), Gamma (3)
res = await scanner.get_paginated_data(page=1, page_size=10, sort_by="loras_count:asc")
assert [i["id"] for i in res["items"]] == ["B", "A", "C"]
# Test Date ASC: Gamma (5), Alpha (10), Beta (20)
res = await scanner.get_paginated_data(page=1, page_size=10, sort_by="date:asc")
assert [i["id"] for i in res["items"]] == ["C", "A", "B"]

View File

@@ -12,7 +12,12 @@ from py.services.recipes.persistence_service import RecipePersistenceService
class DummyExifUtils:
def __init__(self):
self.appended = None
self.optimized_calls = 0
def optimize_image(self, image_data, target_width, format, quality, preserve_metadata):
self.optimized_calls += 1
return image_data, ".webp"
def append_recipe_metadata(self, image_path, recipe_data):
@@ -22,6 +27,46 @@ class DummyExifUtils:
return {}
@pytest.mark.asyncio
async def test_save_recipe_video_bypasses_optimization(tmp_path):
exif_utils = DummyExifUtils()
class DummyScanner:
def __init__(self, root):
self.recipes_dir = str(root)
async def find_recipes_by_fingerprint(self, fingerprint):
return []
async def add_recipe(self, recipe_data):
return None
scanner = DummyScanner(tmp_path)
service = RecipePersistenceService(
exif_utils=exif_utils,
card_preview_width=512,
logger=logging.getLogger("test"),
)
metadata = {"base_model": "Flux", "loras": []}
video_bytes = b"mp4-content"
result = await service.save_recipe(
recipe_scanner=scanner,
image_bytes=video_bytes,
image_base64=None,
name="Video Recipe",
tags=[],
metadata=metadata,
extension=".mp4",
)
assert result.payload["image_path"].endswith(".mp4")
assert Path(result.payload["image_path"]).read_bytes() == video_bytes
assert exif_utils.optimized_calls == 0, "Optimization should be bypassed for video"
assert exif_utils.appended is None, "Metadata embedding should be bypassed for video"
@pytest.mark.asyncio
async def test_analyze_remote_image_download_failure_cleans_temp(tmp_path, monkeypatch):
exif_utils = DummyExifUtils()
@@ -50,7 +95,7 @@ async def test_analyze_remote_image_download_failure_cleans_temp(tmp_path, monke
temp_path = tmp_path / "temp.jpg"
def create_temp_path():
def create_temp_path(suffix=".jpg"):
temp_path.write_bytes(b"")
return str(temp_path)
@@ -356,3 +401,138 @@ async def test_save_recipe_from_widget_allows_empty_lora(tmp_path):
assert stored["loras"] == []
assert stored["title"] == "recipe"
assert scanner.added and scanner.added[0]["loras"] == []
@pytest.mark.asyncio
async def test_move_recipe_updates_paths(tmp_path):
exif_utils = DummyExifUtils()
recipes_dir = tmp_path / "recipes"
recipes_dir.mkdir(parents=True, exist_ok=True)
recipe_id = "move-me"
image_path = recipes_dir / f"{recipe_id}.webp"
json_path = recipes_dir / f"{recipe_id}.recipe.json"
image_path.write_bytes(b"img")
json_path.write_text(
json.dumps(
{
"id": recipe_id,
"file_path": str(image_path),
"title": "Recipe",
"loras": [],
"gen_params": {},
"created_date": 0,
"modified": 0,
}
)
)
class MoveScanner:
def __init__(self, root: Path):
self.recipes_dir = str(root)
self.recipe = {
"id": recipe_id,
"file_path": str(image_path),
"title": "Recipe",
"loras": [],
"gen_params": {},
"created_date": 0,
"modified": 0,
"folder": "",
}
async def get_recipe_by_id(self, target_id: str):
return self.recipe if target_id == recipe_id else None
async def get_recipe_json_path(self, target_id: str):
matches = list(Path(self.recipes_dir).rglob(f"{target_id}.recipe.json"))
return str(matches[0]) if matches else None
async def update_recipe_metadata(self, target_id: str, metadata: dict):
if target_id != recipe_id:
return False
self.recipe.update(metadata)
target_path = await self.get_recipe_json_path(target_id)
if not target_path:
return False
existing = json.loads(Path(target_path).read_text())
existing.update(metadata)
Path(target_path).write_text(json.dumps(existing))
return True
async def get_cached_data(self, force_refresh: bool = False): # noqa: ARG002 - signature parity
return SimpleNamespace(raw_data=[self.recipe])
scanner = MoveScanner(recipes_dir)
service = RecipePersistenceService(
exif_utils=exif_utils,
card_preview_width=512,
logger=logging.getLogger("test"),
)
target_folder = recipes_dir / "nested"
result = await service.move_recipe(
recipe_scanner=scanner, recipe_id=recipe_id, target_path=str(target_folder)
)
assert result.payload["folder"] == "nested"
assert Path(result.payload["json_path"]).parent == target_folder
assert Path(result.payload["new_file_path"]).parent == target_folder
assert not json_path.exists()
stored = json.loads(Path(result.payload["json_path"]).read_text())
assert stored["folder"] == "nested"
assert stored["file_path"] == result.payload["new_file_path"]
@pytest.mark.asyncio
async def test_analyze_remote_video(tmp_path):
exif_utils = DummyExifUtils()
class DummyFactory:
def create_parser(self, metadata):
async def parse_metadata(m, recipe_scanner):
return {"loras": []}
return SimpleNamespace(parse_metadata=parse_metadata)
async def downloader_factory():
class Downloader:
async def download_file(self, url, path, use_auth=False):
Path(path).write_bytes(b"video-content")
return True, "success"
return Downloader()
service = RecipeAnalysisService(
exif_utils=exif_utils,
recipe_parser_factory=DummyFactory(),
downloader_factory=downloader_factory,
metadata_collector=None,
metadata_processor_cls=None,
metadata_registry_cls=None,
standalone_mode=False,
logger=logging.getLogger("test"),
)
class DummyClient:
async def get_image_info(self, image_id):
return {
"url": "https://civitai.com/video.mp4",
"type": "video",
"meta": {"prompt": "video prompt"},
}
class DummyScanner:
async def find_recipes_by_fingerprint(self, fingerprint):
return []
result = await service.analyze_remote_image(
url="https://civitai.com/images/123",
recipe_scanner=DummyScanner(),
civitai_client=DummyClient(),
)
assert result.payload["is_video"] is True
assert result.payload["extension"] == ".mp4"
assert result.payload["image_base64"] is not None

View File

@@ -1,3 +1,4 @@
import os
import pytest
from py.services.base_model_service import BaseModelService
@@ -42,8 +43,8 @@ async def test_search_relative_paths_supports_multiple_tokens():
matching = await service.search_relative_paths("flux detail")
assert matching == [
"flux/detail-model.safetensors",
"detail/flux-trained.safetensors",
f"flux{os.sep}detail-model.safetensors",
f"detail{os.sep}flux-trained.safetensors",
]
@@ -60,4 +61,4 @@ async def test_search_relative_paths_excludes_tokens():
matching = await service.search_relative_paths("flux -detail")
assert matching == ["flux/keep-me.safetensors"]
assert matching == [f"flux{os.sep}keep-me.safetensors"]

View File

@@ -0,0 +1,92 @@
import pytest
from py.services.model_query import ModelFilterSet, FilterCriteria
from py.services.recipe_scanner import RecipeScanner
from types import SimpleNamespace
# Mock settings
class MockSettings:
def get(self, key, default=None):
return default
# --- Model Filtering Tests ---
def test_model_filter_set_root_recursive_true():
filter_set = ModelFilterSet(MockSettings())
items = [
{"model_name": "root_item", "folder": ""},
{"model_name": "sub_item", "folder": "sub"},
]
criteria = FilterCriteria(folder="", search_options={"recursive": True})
result = filter_set.apply(items, criteria)
assert len(result) == 2
assert any(i["model_name"] == "root_item" for i in result)
assert any(i["model_name"] == "sub_item" for i in result)
def test_model_filter_set_root_recursive_false():
filter_set = ModelFilterSet(MockSettings())
items = [
{"model_name": "root_item", "folder": ""},
{"model_name": "sub_item", "folder": "sub"},
]
criteria = FilterCriteria(folder="", search_options={"recursive": False})
result = filter_set.apply(items, criteria)
assert len(result) == 1
assert result[0]["model_name"] == "root_item"
# --- Recipe Filtering Tests ---
@pytest.mark.asyncio
async def test_recipe_scanner_root_recursive_true():
# Mock LoraScanner
class StubLoraScanner:
async def get_cached_data(self):
return SimpleNamespace(raw_data=[])
scanner = RecipeScanner(lora_scanner=StubLoraScanner())
# Manually populate cache for testing get_paginated_data logic
scanner._cache = SimpleNamespace(
raw_data=[
{"id": "r1", "title": "root_recipe", "folder": "", "modified": 1.0, "created_date": 1.0, "loras": []},
{"id": "r2", "title": "sub_recipe", "folder": "sub", "modified": 2.0, "created_date": 2.0, "loras": []},
],
sorted_by_date=[
{"id": "r2", "title": "sub_recipe", "folder": "sub", "modified": 2.0, "created_date": 2.0, "loras": []},
{"id": "r1", "title": "root_recipe", "folder": "", "modified": 1.0, "created_date": 1.0, "loras": []},
],
sorted_by_name=[],
version_index={}
)
result = await scanner.get_paginated_data(page=1, page_size=10, folder="", recursive=True)
assert len(result["items"]) == 2
@pytest.mark.asyncio
async def test_recipe_scanner_root_recursive_false():
# Mock LoraScanner
class StubLoraScanner:
async def get_cached_data(self):
return SimpleNamespace(raw_data=[])
scanner = RecipeScanner(lora_scanner=StubLoraScanner())
scanner._cache = SimpleNamespace(
raw_data=[
{"id": "r1", "title": "root_recipe", "folder": "", "modified": 1.0, "created_date": 1.0, "loras": []},
{"id": "r2", "title": "sub_recipe", "folder": "sub", "modified": 2.0, "created_date": 2.0, "loras": []},
],
sorted_by_date=[
{"id": "r2", "title": "sub_recipe", "folder": "sub", "modified": 2.0, "created_date": 2.0, "loras": []},
{"id": "r1", "title": "root_recipe", "folder": "", "modified": 1.0, "created_date": 1.0, "loras": []},
],
sorted_by_name=[],
version_index={}
)
result = await scanner.get_paginated_data(page=1, page_size=10, folder="", recursive=False)
assert len(result["items"]) == 1
assert result["items"][0]["id"] == "r1"

View File

@@ -311,7 +311,7 @@ app.registerExtension({
});
} else {
// If no ',,' delimiter, treat the entire message as one group
const existing = existingTagMap[message.trim()];
const existing = consumeExistingState(message.trim());
tagArray = [{
text: message.trim(),
// Use existing values if available, otherwise use defaults