mirror of
https://github.com/willmiao/ComfyUI-Lora-Manager.git
synced 2026-03-25 23:25:43 -03:00
Merge branch 'sort-by-usage-count' into main
This commit is contained in:
84
GEMINI.md
Normal file
84
GEMINI.md
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
# ComfyUI LoRA Manager
|
||||||
|
|
||||||
|
## Project Overview
|
||||||
|
|
||||||
|
ComfyUI LoRA Manager is a comprehensive extension for ComfyUI that streamlines the organization, downloading, and application of LoRA models. It functions as both a custom node within ComfyUI and a standalone application.
|
||||||
|
|
||||||
|
**Key Features:**
|
||||||
|
* **Model Management:** Browse, organize, and download LoRA models (and Checkpoints/Embeddings) from Civitai and CivArchive.
|
||||||
|
* **Visualization:** Preview images, videos, and trigger words.
|
||||||
|
* **Workflow Integration:** "One-click" integration into ComfyUI workflows, preserving generation parameters.
|
||||||
|
* **Recipe System:** Save and share LoRA combinations as "recipes".
|
||||||
|
* **Architecture:** Hybrid Python backend (API, file management) and JavaScript/HTML frontend (Web UI).
|
||||||
|
|
||||||
|
## Directory Structure
|
||||||
|
|
||||||
|
* `py/`: Core Python backend source code.
|
||||||
|
* `lora_manager.py`: Main entry point for the ComfyUI node.
|
||||||
|
* `routes/`: API route definitions (using `aiohttp` in standalone, or ComfyUI's server).
|
||||||
|
* `services/`: Business logic (downloading, metadata, scanning).
|
||||||
|
* `nodes/`: ComfyUI custom node implementations.
|
||||||
|
* `static/`: Frontend static assets (CSS, JS, Images).
|
||||||
|
* `templates/`: HTML templates (Jinja2).
|
||||||
|
* `locales/`: Internationalization JSON files.
|
||||||
|
* `web/comfyui/`: JavaScript extensions specifically for the ComfyUI interface.
|
||||||
|
* `standalone.py`: Entry point for running the manager as a standalone web app.
|
||||||
|
* `tests/`: Backend tests.
|
||||||
|
* `requirements.txt`: Python runtime dependencies.
|
||||||
|
* `package.json`: Frontend development dependencies and test scripts.
|
||||||
|
|
||||||
|
## Building and Running
|
||||||
|
|
||||||
|
### Prerequisites
|
||||||
|
* Python 3.8+
|
||||||
|
* Node.js (only for running frontend tests)
|
||||||
|
|
||||||
|
### Backend Setup
|
||||||
|
1. Install Python dependencies:
|
||||||
|
```bash
|
||||||
|
pip install -r requirements.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
### Running in Standalone Mode
|
||||||
|
You can run the manager independently of ComfyUI for development or management purposes.
|
||||||
|
```bash
|
||||||
|
python standalone.py --port 8188
|
||||||
|
```
|
||||||
|
|
||||||
|
### Running in ComfyUI
|
||||||
|
Ensure the folder is located in `ComfyUI/custom_nodes/`. ComfyUI will automatically load it upon startup.
|
||||||
|
|
||||||
|
## Testing
|
||||||
|
|
||||||
|
### Backend Tests (Pytest)
|
||||||
|
1. Install development dependencies:
|
||||||
|
```bash
|
||||||
|
pip install -r requirements-dev.txt
|
||||||
|
```
|
||||||
|
2. Run tests:
|
||||||
|
```bash
|
||||||
|
pytest
|
||||||
|
```
|
||||||
|
* Coverage reports are generated in `coverage/backend/`.
|
||||||
|
|
||||||
|
### Frontend Tests (Vitest)
|
||||||
|
1. Install Node dependencies:
|
||||||
|
```bash
|
||||||
|
npm install
|
||||||
|
```
|
||||||
|
2. Run tests:
|
||||||
|
```bash
|
||||||
|
npm run test
|
||||||
|
```
|
||||||
|
3. Run coverage:
|
||||||
|
```bash
|
||||||
|
npm run test:coverage
|
||||||
|
```
|
||||||
|
|
||||||
|
## Development Conventions
|
||||||
|
|
||||||
|
* **Python Style:** Follow PEP 8. Use snake_case for files/functions and PascalCase for classes.
|
||||||
|
* **Frontend:** Standard ES modules. UI components often end in `_widget.js`.
|
||||||
|
* **Configuration:** User settings are stored in `settings.json`. Developers should reference `settings.json.example`.
|
||||||
|
* **Localization:** Update `locales/<lang>.json` and run `scripts/sync_translation_keys.py` when changing UI text.
|
||||||
|
* **Documentation:** Architecture details are in `docs/architecture/` and `IFLOW.md`.
|
||||||
@@ -4,7 +4,7 @@ try: # pragma: no cover - import fallback for pytest collection
|
|||||||
from .py.nodes.trigger_word_toggle import TriggerWordToggle
|
from .py.nodes.trigger_word_toggle import TriggerWordToggle
|
||||||
from .py.nodes.prompt import PromptLoraManager
|
from .py.nodes.prompt import PromptLoraManager
|
||||||
from .py.nodes.lora_stacker import LoraStacker
|
from .py.nodes.lora_stacker import LoraStacker
|
||||||
from .py.nodes.save_image import SaveImage
|
from .py.nodes.save_image import SaveImageLM
|
||||||
from .py.nodes.debug_metadata import DebugMetadata
|
from .py.nodes.debug_metadata import DebugMetadata
|
||||||
from .py.nodes.wanvideo_lora_select import WanVideoLoraSelect
|
from .py.nodes.wanvideo_lora_select import WanVideoLoraSelect
|
||||||
from .py.nodes.wanvideo_lora_select_from_text import WanVideoLoraSelectFromText
|
from .py.nodes.wanvideo_lora_select_from_text import WanVideoLoraSelectFromText
|
||||||
@@ -24,7 +24,7 @@ except ImportError: # pragma: no cover - allows running under pytest without pa
|
|||||||
LoraManagerTextLoader = importlib.import_module("py.nodes.lora_loader").LoraManagerTextLoader
|
LoraManagerTextLoader = importlib.import_module("py.nodes.lora_loader").LoraManagerTextLoader
|
||||||
TriggerWordToggle = importlib.import_module("py.nodes.trigger_word_toggle").TriggerWordToggle
|
TriggerWordToggle = importlib.import_module("py.nodes.trigger_word_toggle").TriggerWordToggle
|
||||||
LoraStacker = importlib.import_module("py.nodes.lora_stacker").LoraStacker
|
LoraStacker = importlib.import_module("py.nodes.lora_stacker").LoraStacker
|
||||||
SaveImage = importlib.import_module("py.nodes.save_image").SaveImage
|
SaveImageLM = importlib.import_module("py.nodes.save_image").SaveImageLM
|
||||||
DebugMetadata = importlib.import_module("py.nodes.debug_metadata").DebugMetadata
|
DebugMetadata = importlib.import_module("py.nodes.debug_metadata").DebugMetadata
|
||||||
WanVideoLoraSelect = importlib.import_module("py.nodes.wanvideo_lora_select").WanVideoLoraSelect
|
WanVideoLoraSelect = importlib.import_module("py.nodes.wanvideo_lora_select").WanVideoLoraSelect
|
||||||
WanVideoLoraSelectFromText = importlib.import_module("py.nodes.wanvideo_lora_select_from_text").WanVideoLoraSelectFromText
|
WanVideoLoraSelectFromText = importlib.import_module("py.nodes.wanvideo_lora_select_from_text").WanVideoLoraSelectFromText
|
||||||
@@ -36,7 +36,7 @@ NODE_CLASS_MAPPINGS = {
|
|||||||
LoraManagerTextLoader.NAME: LoraManagerTextLoader,
|
LoraManagerTextLoader.NAME: LoraManagerTextLoader,
|
||||||
TriggerWordToggle.NAME: TriggerWordToggle,
|
TriggerWordToggle.NAME: TriggerWordToggle,
|
||||||
LoraStacker.NAME: LoraStacker,
|
LoraStacker.NAME: LoraStacker,
|
||||||
SaveImage.NAME: SaveImage,
|
SaveImageLM.NAME: SaveImageLM,
|
||||||
DebugMetadata.NAME: DebugMetadata,
|
DebugMetadata.NAME: DebugMetadata,
|
||||||
WanVideoLoraSelect.NAME: WanVideoLoraSelect,
|
WanVideoLoraSelect.NAME: WanVideoLoraSelect,
|
||||||
WanVideoLoraSelectFromText.NAME: WanVideoLoraSelectFromText
|
WanVideoLoraSelectFromText.NAME: WanVideoLoraSelectFromText
|
||||||
|
|||||||
@@ -159,6 +159,12 @@
|
|||||||
"success": "Updated license metadata for {count} {typePlural}",
|
"success": "Updated license metadata for {count} {typePlural}",
|
||||||
"none": "All {typePlural} already have license metadata",
|
"none": "All {typePlural} already have license metadata",
|
||||||
"error": "Failed to refresh license metadata for {typePlural}: {message}"
|
"error": "Failed to refresh license metadata for {typePlural}: {message}"
|
||||||
|
},
|
||||||
|
"repairRecipes": {
|
||||||
|
"label": "Recipe-Daten reparieren",
|
||||||
|
"loading": "Recipe-Daten werden repariert...",
|
||||||
|
"success": "{count} Rezepte erfolgreich repariert.",
|
||||||
|
"error": "Recipe-Reparatur fehlgeschlagen: {message}"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"header": {
|
"header": {
|
||||||
@@ -188,7 +194,8 @@
|
|||||||
"creator": "Ersteller",
|
"creator": "Ersteller",
|
||||||
"title": "Rezept-Titel",
|
"title": "Rezept-Titel",
|
||||||
"loraName": "LoRA-Dateiname",
|
"loraName": "LoRA-Dateiname",
|
||||||
"loraModel": "LoRA-Modellname"
|
"loraModel": "LoRA-Modellname",
|
||||||
|
"prompt": "Prompt"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"filter": {
|
"filter": {
|
||||||
@@ -199,6 +206,7 @@
|
|||||||
"license": "Lizenz",
|
"license": "Lizenz",
|
||||||
"noCreditRequired": "Kein Credit erforderlich",
|
"noCreditRequired": "Kein Credit erforderlich",
|
||||||
"allowSellingGeneratedContent": "Verkauf erlaubt",
|
"allowSellingGeneratedContent": "Verkauf erlaubt",
|
||||||
|
"noTags": "Keine Tags",
|
||||||
"clearAll": "Alle Filter löschen"
|
"clearAll": "Alle Filter löschen"
|
||||||
},
|
},
|
||||||
"theme": {
|
"theme": {
|
||||||
@@ -521,6 +529,7 @@
|
|||||||
"replacePreview": "Vorschau ersetzen",
|
"replacePreview": "Vorschau ersetzen",
|
||||||
"setContentRating": "Inhaltsbewertung festlegen",
|
"setContentRating": "Inhaltsbewertung festlegen",
|
||||||
"moveToFolder": "In Ordner verschieben",
|
"moveToFolder": "In Ordner verschieben",
|
||||||
|
"repairMetadata": "[TODO: Translate] Repair metadata",
|
||||||
"excludeModel": "Modell ausschließen",
|
"excludeModel": "Modell ausschließen",
|
||||||
"deleteModel": "Modell löschen",
|
"deleteModel": "Modell löschen",
|
||||||
"shareRecipe": "Rezept teilen",
|
"shareRecipe": "Rezept teilen",
|
||||||
@@ -591,10 +600,26 @@
|
|||||||
"selectLoraRoot": "Bitte wählen Sie ein LoRA-Stammverzeichnis aus"
|
"selectLoraRoot": "Bitte wählen Sie ein LoRA-Stammverzeichnis aus"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"sort": {
|
||||||
|
"title": "Rezepte sortieren nach...",
|
||||||
|
"name": "Name",
|
||||||
|
"nameAsc": "A - Z",
|
||||||
|
"nameDesc": "Z - A",
|
||||||
|
"date": "Datum",
|
||||||
|
"dateDesc": "Neueste",
|
||||||
|
"dateAsc": "Älteste",
|
||||||
|
"lorasCount": "LoRA-Anzahl",
|
||||||
|
"lorasCountDesc": "Meiste",
|
||||||
|
"lorasCountAsc": "Wenigste"
|
||||||
|
},
|
||||||
"refresh": {
|
"refresh": {
|
||||||
"title": "Rezeptliste aktualisieren"
|
"title": "Rezeptliste aktualisieren"
|
||||||
},
|
},
|
||||||
"filteredByLora": "Gefiltert nach LoRA"
|
"filteredByLora": "Gefiltert nach LoRA",
|
||||||
|
"favorites": {
|
||||||
|
"title": "Nur Favoriten anzeigen",
|
||||||
|
"action": "Favoriten"
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"duplicates": {
|
"duplicates": {
|
||||||
"found": "{count} Duplikat-Gruppen gefunden",
|
"found": "{count} Duplikat-Gruppen gefunden",
|
||||||
@@ -620,6 +645,13 @@
|
|||||||
"noMissingLoras": "Keine fehlenden LoRAs zum Herunterladen",
|
"noMissingLoras": "Keine fehlenden LoRAs zum Herunterladen",
|
||||||
"getInfoFailed": "Fehler beim Abrufen der Informationen für fehlende LoRAs",
|
"getInfoFailed": "Fehler beim Abrufen der Informationen für fehlende LoRAs",
|
||||||
"prepareError": "Fehler beim Vorbereiten der LoRAs für den Download: {message}"
|
"prepareError": "Fehler beim Vorbereiten der LoRAs für den Download: {message}"
|
||||||
|
},
|
||||||
|
"repair": {
|
||||||
|
"starting": "[TODO: Translate] Repairing recipe metadata...",
|
||||||
|
"success": "[TODO: Translate] Recipe metadata repaired successfully",
|
||||||
|
"skipped": "[TODO: Translate] Recipe already at latest version, no repair needed",
|
||||||
|
"failed": "[TODO: Translate] Failed to repair recipe: {message}",
|
||||||
|
"missingId": "[TODO: Translate] Cannot repair recipe: Missing recipe ID"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -641,7 +673,8 @@
|
|||||||
"recursiveUnavailable": "Rekursive Suche ist nur in der Baumansicht verfügbar",
|
"recursiveUnavailable": "Rekursive Suche ist nur in der Baumansicht verfügbar",
|
||||||
"collapseAllDisabled": "Im Listenmodus nicht verfügbar",
|
"collapseAllDisabled": "Im Listenmodus nicht verfügbar",
|
||||||
"dragDrop": {
|
"dragDrop": {
|
||||||
"unableToResolveRoot": "Zielpfad für das Verschieben konnte nicht ermittelt werden."
|
"unableToResolveRoot": "Zielpfad für das Verschieben konnte nicht ermittelt werden.",
|
||||||
|
"moveUnsupported": "Move is not supported for this item."
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"statistics": {
|
"statistics": {
|
||||||
@@ -1463,7 +1496,8 @@
|
|||||||
"bulkMoveFailures": "Fehlgeschlagene Verschiebungen:\n{failures}",
|
"bulkMoveFailures": "Fehlgeschlagene Verschiebungen:\n{failures}",
|
||||||
"bulkMoveSuccess": "{successCount} {type}s erfolgreich verschoben",
|
"bulkMoveSuccess": "{successCount} {type}s erfolgreich verschoben",
|
||||||
"exampleImagesDownloadSuccess": "Beispielbilder erfolgreich heruntergeladen!",
|
"exampleImagesDownloadSuccess": "Beispielbilder erfolgreich heruntergeladen!",
|
||||||
"exampleImagesDownloadFailed": "Fehler beim Herunterladen der Beispielbilder: {message}"
|
"exampleImagesDownloadFailed": "Fehler beim Herunterladen der Beispielbilder: {message}",
|
||||||
|
"moveFailed": "Failed to move item: {message}"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"banners": {
|
"banners": {
|
||||||
|
|||||||
@@ -159,6 +159,12 @@
|
|||||||
"success": "Updated license metadata for {count} {typePlural}",
|
"success": "Updated license metadata for {count} {typePlural}",
|
||||||
"none": "All {typePlural} already have license metadata",
|
"none": "All {typePlural} already have license metadata",
|
||||||
"error": "Failed to refresh license metadata for {typePlural}: {message}"
|
"error": "Failed to refresh license metadata for {typePlural}: {message}"
|
||||||
|
},
|
||||||
|
"repairRecipes": {
|
||||||
|
"label": "Repair recipes data",
|
||||||
|
"loading": "Repairing recipe data...",
|
||||||
|
"success": "Successfully repaired {count} recipes.",
|
||||||
|
"error": "Recipe repair failed: {message}"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"header": {
|
"header": {
|
||||||
@@ -188,7 +194,8 @@
|
|||||||
"creator": "Creator",
|
"creator": "Creator",
|
||||||
"title": "Recipe Title",
|
"title": "Recipe Title",
|
||||||
"loraName": "LoRA Filename",
|
"loraName": "LoRA Filename",
|
||||||
"loraModel": "LoRA Model Name"
|
"loraModel": "LoRA Model Name",
|
||||||
|
"prompt": "Prompt"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"filter": {
|
"filter": {
|
||||||
@@ -199,6 +206,7 @@
|
|||||||
"license": "License",
|
"license": "License",
|
||||||
"noCreditRequired": "No Credit Required",
|
"noCreditRequired": "No Credit Required",
|
||||||
"allowSellingGeneratedContent": "Allow Selling",
|
"allowSellingGeneratedContent": "Allow Selling",
|
||||||
|
"noTags": "No tags",
|
||||||
"clearAll": "Clear All Filters"
|
"clearAll": "Clear All Filters"
|
||||||
},
|
},
|
||||||
"theme": {
|
"theme": {
|
||||||
@@ -521,6 +529,7 @@
|
|||||||
"replacePreview": "Replace Preview",
|
"replacePreview": "Replace Preview",
|
||||||
"setContentRating": "Set Content Rating",
|
"setContentRating": "Set Content Rating",
|
||||||
"moveToFolder": "Move to Folder",
|
"moveToFolder": "Move to Folder",
|
||||||
|
"repairMetadata": "Repair metadata",
|
||||||
"excludeModel": "Exclude Model",
|
"excludeModel": "Exclude Model",
|
||||||
"deleteModel": "Delete Model",
|
"deleteModel": "Delete Model",
|
||||||
"shareRecipe": "Share Recipe",
|
"shareRecipe": "Share Recipe",
|
||||||
@@ -591,10 +600,26 @@
|
|||||||
"selectLoraRoot": "Please select a LoRA root directory"
|
"selectLoraRoot": "Please select a LoRA root directory"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"sort": {
|
||||||
|
"title": "Sort recipes by...",
|
||||||
|
"name": "Name",
|
||||||
|
"nameAsc": "A - Z",
|
||||||
|
"nameDesc": "Z - A",
|
||||||
|
"date": "Date",
|
||||||
|
"dateDesc": "Newest",
|
||||||
|
"dateAsc": "Oldest",
|
||||||
|
"lorasCount": "LoRA Count",
|
||||||
|
"lorasCountDesc": "Most",
|
||||||
|
"lorasCountAsc": "Least"
|
||||||
|
},
|
||||||
"refresh": {
|
"refresh": {
|
||||||
"title": "Refresh recipe list"
|
"title": "Refresh recipe list"
|
||||||
},
|
},
|
||||||
"filteredByLora": "Filtered by LoRA"
|
"filteredByLora": "Filtered by LoRA",
|
||||||
|
"favorites": {
|
||||||
|
"title": "Show Favorites Only",
|
||||||
|
"action": "Favorites"
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"duplicates": {
|
"duplicates": {
|
||||||
"found": "Found {count} duplicate groups",
|
"found": "Found {count} duplicate groups",
|
||||||
@@ -620,6 +645,13 @@
|
|||||||
"noMissingLoras": "No missing LoRAs to download",
|
"noMissingLoras": "No missing LoRAs to download",
|
||||||
"getInfoFailed": "Failed to get information for missing LoRAs",
|
"getInfoFailed": "Failed to get information for missing LoRAs",
|
||||||
"prepareError": "Error preparing LoRAs for download: {message}"
|
"prepareError": "Error preparing LoRAs for download: {message}"
|
||||||
|
},
|
||||||
|
"repair": {
|
||||||
|
"starting": "Repairing recipe metadata...",
|
||||||
|
"success": "Recipe metadata repaired successfully",
|
||||||
|
"skipped": "Recipe already at latest version, no repair needed",
|
||||||
|
"failed": "Failed to repair recipe: {message}",
|
||||||
|
"missingId": "Cannot repair recipe: Missing recipe ID"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -641,7 +673,8 @@
|
|||||||
"recursiveUnavailable": "Recursive search is available in tree view only",
|
"recursiveUnavailable": "Recursive search is available in tree view only",
|
||||||
"collapseAllDisabled": "Not available in list view",
|
"collapseAllDisabled": "Not available in list view",
|
||||||
"dragDrop": {
|
"dragDrop": {
|
||||||
"unableToResolveRoot": "Unable to determine destination path for move."
|
"unableToResolveRoot": "Unable to determine destination path for move.",
|
||||||
|
"moveUnsupported": "Move is not supported for this item."
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"statistics": {
|
"statistics": {
|
||||||
@@ -1463,7 +1496,8 @@
|
|||||||
"bulkMoveFailures": "Failed moves:\n{failures}",
|
"bulkMoveFailures": "Failed moves:\n{failures}",
|
||||||
"bulkMoveSuccess": "Successfully moved {successCount} {type}s",
|
"bulkMoveSuccess": "Successfully moved {successCount} {type}s",
|
||||||
"exampleImagesDownloadSuccess": "Successfully downloaded example images!",
|
"exampleImagesDownloadSuccess": "Successfully downloaded example images!",
|
||||||
"exampleImagesDownloadFailed": "Failed to download example images: {message}"
|
"exampleImagesDownloadFailed": "Failed to download example images: {message}",
|
||||||
|
"moveFailed": "Failed to move item: {message}"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"banners": {
|
"banners": {
|
||||||
|
|||||||
@@ -159,6 +159,12 @@
|
|||||||
"success": "Updated license metadata for {count} {typePlural}",
|
"success": "Updated license metadata for {count} {typePlural}",
|
||||||
"none": "All {typePlural} already have license metadata",
|
"none": "All {typePlural} already have license metadata",
|
||||||
"error": "Failed to refresh license metadata for {typePlural}: {message}"
|
"error": "Failed to refresh license metadata for {typePlural}: {message}"
|
||||||
|
},
|
||||||
|
"repairRecipes": {
|
||||||
|
"label": "Reparar datos de recetas",
|
||||||
|
"loading": "Reparando datos de recetas...",
|
||||||
|
"success": "Se repararon con éxito {count} recetas.",
|
||||||
|
"error": "Error al reparar recetas: {message}"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"header": {
|
"header": {
|
||||||
@@ -188,7 +194,8 @@
|
|||||||
"creator": "Creador",
|
"creator": "Creador",
|
||||||
"title": "Título de la receta",
|
"title": "Título de la receta",
|
||||||
"loraName": "Nombre de archivo LoRA",
|
"loraName": "Nombre de archivo LoRA",
|
||||||
"loraModel": "Nombre del modelo LoRA"
|
"loraModel": "Nombre del modelo LoRA",
|
||||||
|
"prompt": "Prompt"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"filter": {
|
"filter": {
|
||||||
@@ -199,6 +206,7 @@
|
|||||||
"license": "Licencia",
|
"license": "Licencia",
|
||||||
"noCreditRequired": "Sin crédito requerido",
|
"noCreditRequired": "Sin crédito requerido",
|
||||||
"allowSellingGeneratedContent": "Venta permitida",
|
"allowSellingGeneratedContent": "Venta permitida",
|
||||||
|
"noTags": "Sin etiquetas",
|
||||||
"clearAll": "Limpiar todos los filtros"
|
"clearAll": "Limpiar todos los filtros"
|
||||||
},
|
},
|
||||||
"theme": {
|
"theme": {
|
||||||
@@ -521,6 +529,7 @@
|
|||||||
"replacePreview": "Reemplazar vista previa",
|
"replacePreview": "Reemplazar vista previa",
|
||||||
"setContentRating": "Establecer clasificación de contenido",
|
"setContentRating": "Establecer clasificación de contenido",
|
||||||
"moveToFolder": "Mover a carpeta",
|
"moveToFolder": "Mover a carpeta",
|
||||||
|
"repairMetadata": "[TODO: Translate] Repair metadata",
|
||||||
"excludeModel": "Excluir modelo",
|
"excludeModel": "Excluir modelo",
|
||||||
"deleteModel": "Eliminar modelo",
|
"deleteModel": "Eliminar modelo",
|
||||||
"shareRecipe": "Compartir receta",
|
"shareRecipe": "Compartir receta",
|
||||||
@@ -591,10 +600,26 @@
|
|||||||
"selectLoraRoot": "Por favor selecciona un directorio raíz de LoRA"
|
"selectLoraRoot": "Por favor selecciona un directorio raíz de LoRA"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"sort": {
|
||||||
|
"title": "Ordenar recetas por...",
|
||||||
|
"name": "Nombre",
|
||||||
|
"nameAsc": "A - Z",
|
||||||
|
"nameDesc": "Z - A",
|
||||||
|
"date": "Fecha",
|
||||||
|
"dateDesc": "Más reciente",
|
||||||
|
"dateAsc": "Más antiguo",
|
||||||
|
"lorasCount": "Cant. de LoRAs",
|
||||||
|
"lorasCountDesc": "Más",
|
||||||
|
"lorasCountAsc": "Menos"
|
||||||
|
},
|
||||||
"refresh": {
|
"refresh": {
|
||||||
"title": "Actualizar lista de recetas"
|
"title": "Actualizar lista de recetas"
|
||||||
},
|
},
|
||||||
"filteredByLora": "Filtrado por LoRA"
|
"filteredByLora": "Filtrado por LoRA",
|
||||||
|
"favorites": {
|
||||||
|
"title": "Mostrar solo favoritos",
|
||||||
|
"action": "Favoritos"
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"duplicates": {
|
"duplicates": {
|
||||||
"found": "Se encontraron {count} grupos de duplicados",
|
"found": "Se encontraron {count} grupos de duplicados",
|
||||||
@@ -620,6 +645,13 @@
|
|||||||
"noMissingLoras": "No hay LoRAs faltantes para descargar",
|
"noMissingLoras": "No hay LoRAs faltantes para descargar",
|
||||||
"getInfoFailed": "Error al obtener información de LoRAs faltantes",
|
"getInfoFailed": "Error al obtener información de LoRAs faltantes",
|
||||||
"prepareError": "Error preparando LoRAs para descarga: {message}"
|
"prepareError": "Error preparando LoRAs para descarga: {message}"
|
||||||
|
},
|
||||||
|
"repair": {
|
||||||
|
"starting": "[TODO: Translate] Repairing recipe metadata...",
|
||||||
|
"success": "[TODO: Translate] Recipe metadata repaired successfully",
|
||||||
|
"skipped": "[TODO: Translate] Recipe already at latest version, no repair needed",
|
||||||
|
"failed": "[TODO: Translate] Failed to repair recipe: {message}",
|
||||||
|
"missingId": "[TODO: Translate] Cannot repair recipe: Missing recipe ID"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -641,7 +673,8 @@
|
|||||||
"recursiveUnavailable": "La búsqueda recursiva solo está disponible en la vista en árbol",
|
"recursiveUnavailable": "La búsqueda recursiva solo está disponible en la vista en árbol",
|
||||||
"collapseAllDisabled": "No disponible en vista de lista",
|
"collapseAllDisabled": "No disponible en vista de lista",
|
||||||
"dragDrop": {
|
"dragDrop": {
|
||||||
"unableToResolveRoot": "No se puede determinar la ruta de destino para el movimiento."
|
"unableToResolveRoot": "No se puede determinar la ruta de destino para el movimiento.",
|
||||||
|
"moveUnsupported": "Move is not supported for this item."
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"statistics": {
|
"statistics": {
|
||||||
@@ -1463,7 +1496,8 @@
|
|||||||
"bulkMoveFailures": "Movimientos fallidos:\n{failures}",
|
"bulkMoveFailures": "Movimientos fallidos:\n{failures}",
|
||||||
"bulkMoveSuccess": "Movidos exitosamente {successCount} {type}s",
|
"bulkMoveSuccess": "Movidos exitosamente {successCount} {type}s",
|
||||||
"exampleImagesDownloadSuccess": "¡Imágenes de ejemplo descargadas exitosamente!",
|
"exampleImagesDownloadSuccess": "¡Imágenes de ejemplo descargadas exitosamente!",
|
||||||
"exampleImagesDownloadFailed": "Error al descargar imágenes de ejemplo: {message}"
|
"exampleImagesDownloadFailed": "Error al descargar imágenes de ejemplo: {message}",
|
||||||
|
"moveFailed": "Failed to move item: {message}"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"banners": {
|
"banners": {
|
||||||
|
|||||||
@@ -159,6 +159,12 @@
|
|||||||
"success": "Updated license metadata for {count} {typePlural}",
|
"success": "Updated license metadata for {count} {typePlural}",
|
||||||
"none": "All {typePlural} already have license metadata",
|
"none": "All {typePlural} already have license metadata",
|
||||||
"error": "Failed to refresh license metadata for {typePlural}: {message}"
|
"error": "Failed to refresh license metadata for {typePlural}: {message}"
|
||||||
|
},
|
||||||
|
"repairRecipes": {
|
||||||
|
"label": "Réparer les données de recettes",
|
||||||
|
"loading": "Réparation des données de recettes...",
|
||||||
|
"success": "{count} recettes réparées avec succès.",
|
||||||
|
"error": "Échec de la réparation des recettes : {message}"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"header": {
|
"header": {
|
||||||
@@ -188,7 +194,8 @@
|
|||||||
"creator": "Créateur",
|
"creator": "Créateur",
|
||||||
"title": "Titre de la recipe",
|
"title": "Titre de la recipe",
|
||||||
"loraName": "Nom de fichier LoRA",
|
"loraName": "Nom de fichier LoRA",
|
||||||
"loraModel": "Nom du modèle LoRA"
|
"loraModel": "Nom du modèle LoRA",
|
||||||
|
"prompt": "Prompt"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"filter": {
|
"filter": {
|
||||||
@@ -199,6 +206,7 @@
|
|||||||
"license": "Licence",
|
"license": "Licence",
|
||||||
"noCreditRequired": "Crédit non requis",
|
"noCreditRequired": "Crédit non requis",
|
||||||
"allowSellingGeneratedContent": "Vente autorisée",
|
"allowSellingGeneratedContent": "Vente autorisée",
|
||||||
|
"noTags": "Aucun tag",
|
||||||
"clearAll": "Effacer tous les filtres"
|
"clearAll": "Effacer tous les filtres"
|
||||||
},
|
},
|
||||||
"theme": {
|
"theme": {
|
||||||
@@ -521,6 +529,7 @@
|
|||||||
"replacePreview": "Remplacer l'aperçu",
|
"replacePreview": "Remplacer l'aperçu",
|
||||||
"setContentRating": "Définir la classification du contenu",
|
"setContentRating": "Définir la classification du contenu",
|
||||||
"moveToFolder": "Déplacer vers un dossier",
|
"moveToFolder": "Déplacer vers un dossier",
|
||||||
|
"repairMetadata": "[TODO: Translate] Repair metadata",
|
||||||
"excludeModel": "Exclure le modèle",
|
"excludeModel": "Exclure le modèle",
|
||||||
"deleteModel": "Supprimer le modèle",
|
"deleteModel": "Supprimer le modèle",
|
||||||
"shareRecipe": "Partager la recipe",
|
"shareRecipe": "Partager la recipe",
|
||||||
@@ -591,10 +600,26 @@
|
|||||||
"selectLoraRoot": "Veuillez sélectionner un répertoire racine LoRA"
|
"selectLoraRoot": "Veuillez sélectionner un répertoire racine LoRA"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"sort": {
|
||||||
|
"title": "Trier les recettes par...",
|
||||||
|
"name": "Nom",
|
||||||
|
"nameAsc": "A - Z",
|
||||||
|
"nameDesc": "Z - A",
|
||||||
|
"date": "Date",
|
||||||
|
"dateDesc": "Plus récent",
|
||||||
|
"dateAsc": "Plus ancien",
|
||||||
|
"lorasCount": "Nombre de LoRAs",
|
||||||
|
"lorasCountDesc": "Plus",
|
||||||
|
"lorasCountAsc": "Moins"
|
||||||
|
},
|
||||||
"refresh": {
|
"refresh": {
|
||||||
"title": "Actualiser la liste des recipes"
|
"title": "Actualiser la liste des recipes"
|
||||||
},
|
},
|
||||||
"filteredByLora": "Filtré par LoRA"
|
"filteredByLora": "Filtré par LoRA",
|
||||||
|
"favorites": {
|
||||||
|
"title": "Afficher uniquement les favoris",
|
||||||
|
"action": "Favoris"
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"duplicates": {
|
"duplicates": {
|
||||||
"found": "Trouvé {count} groupes de doublons",
|
"found": "Trouvé {count} groupes de doublons",
|
||||||
@@ -620,6 +645,13 @@
|
|||||||
"noMissingLoras": "Aucun LoRA manquant à télécharger",
|
"noMissingLoras": "Aucun LoRA manquant à télécharger",
|
||||||
"getInfoFailed": "Échec de l'obtention des informations pour les LoRAs manquants",
|
"getInfoFailed": "Échec de l'obtention des informations pour les LoRAs manquants",
|
||||||
"prepareError": "Erreur lors de la préparation des LoRAs pour le téléchargement : {message}"
|
"prepareError": "Erreur lors de la préparation des LoRAs pour le téléchargement : {message}"
|
||||||
|
},
|
||||||
|
"repair": {
|
||||||
|
"starting": "[TODO: Translate] Repairing recipe metadata...",
|
||||||
|
"success": "[TODO: Translate] Recipe metadata repaired successfully",
|
||||||
|
"skipped": "[TODO: Translate] Recipe already at latest version, no repair needed",
|
||||||
|
"failed": "[TODO: Translate] Failed to repair recipe: {message}",
|
||||||
|
"missingId": "[TODO: Translate] Cannot repair recipe: Missing recipe ID"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -641,7 +673,8 @@
|
|||||||
"recursiveUnavailable": "La recherche récursive n'est disponible qu'en vue arborescente",
|
"recursiveUnavailable": "La recherche récursive n'est disponible qu'en vue arborescente",
|
||||||
"collapseAllDisabled": "Non disponible en vue liste",
|
"collapseAllDisabled": "Non disponible en vue liste",
|
||||||
"dragDrop": {
|
"dragDrop": {
|
||||||
"unableToResolveRoot": "Impossible de déterminer le chemin de destination pour le déplacement."
|
"unableToResolveRoot": "Impossible de déterminer le chemin de destination pour le déplacement.",
|
||||||
|
"moveUnsupported": "Move is not supported for this item."
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"statistics": {
|
"statistics": {
|
||||||
@@ -1463,7 +1496,8 @@
|
|||||||
"bulkMoveFailures": "Échecs de déplacement :\n{failures}",
|
"bulkMoveFailures": "Échecs de déplacement :\n{failures}",
|
||||||
"bulkMoveSuccess": "{successCount} {type}s déplacés avec succès",
|
"bulkMoveSuccess": "{successCount} {type}s déplacés avec succès",
|
||||||
"exampleImagesDownloadSuccess": "Images d'exemple téléchargées avec succès !",
|
"exampleImagesDownloadSuccess": "Images d'exemple téléchargées avec succès !",
|
||||||
"exampleImagesDownloadFailed": "Échec du téléchargement des images d'exemple : {message}"
|
"exampleImagesDownloadFailed": "Échec du téléchargement des images d'exemple : {message}",
|
||||||
|
"moveFailed": "Failed to move item: {message}"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"banners": {
|
"banners": {
|
||||||
|
|||||||
@@ -159,6 +159,12 @@
|
|||||||
"success": "Updated license metadata for {count} {typePlural}",
|
"success": "Updated license metadata for {count} {typePlural}",
|
||||||
"none": "All {typePlural} already have license metadata",
|
"none": "All {typePlural} already have license metadata",
|
||||||
"error": "Failed to refresh license metadata for {typePlural}: {message}"
|
"error": "Failed to refresh license metadata for {typePlural}: {message}"
|
||||||
|
},
|
||||||
|
"repairRecipes": {
|
||||||
|
"label": "תיקון נתוני מתכונים",
|
||||||
|
"loading": "מתקן נתוני מתכונים...",
|
||||||
|
"success": "תוקנו בהצלחה {count} מתכונים.",
|
||||||
|
"error": "תיקון המתכונים נכשל: {message}"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"header": {
|
"header": {
|
||||||
@@ -188,7 +194,8 @@
|
|||||||
"creator": "יוצר",
|
"creator": "יוצר",
|
||||||
"title": "כותרת מתכון",
|
"title": "כותרת מתכון",
|
||||||
"loraName": "שם קובץ LoRA",
|
"loraName": "שם קובץ LoRA",
|
||||||
"loraModel": "שם מודל LoRA"
|
"loraModel": "שם מודל LoRA",
|
||||||
|
"prompt": "הנחיה"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"filter": {
|
"filter": {
|
||||||
@@ -199,6 +206,7 @@
|
|||||||
"license": "רישיון",
|
"license": "רישיון",
|
||||||
"noCreditRequired": "ללא קרדיט נדרש",
|
"noCreditRequired": "ללא קרדיט נדרש",
|
||||||
"allowSellingGeneratedContent": "אפשר מכירה",
|
"allowSellingGeneratedContent": "אפשר מכירה",
|
||||||
|
"noTags": "ללא תגיות",
|
||||||
"clearAll": "נקה את כל המסננים"
|
"clearAll": "נקה את כל המסננים"
|
||||||
},
|
},
|
||||||
"theme": {
|
"theme": {
|
||||||
@@ -228,6 +236,7 @@
|
|||||||
"videoSettings": "הגדרות וידאו",
|
"videoSettings": "הגדרות וידאו",
|
||||||
"layoutSettings": "הגדרות פריסה",
|
"layoutSettings": "הגדרות פריסה",
|
||||||
"folderSettings": "הגדרות תיקייה",
|
"folderSettings": "הגדרות תיקייה",
|
||||||
|
"priorityTags": "תגיות עדיפות",
|
||||||
"downloadPathTemplates": "תבניות נתיב הורדה",
|
"downloadPathTemplates": "תבניות נתיב הורדה",
|
||||||
"exampleImages": "תמונות דוגמה",
|
"exampleImages": "תמונות דוגמה",
|
||||||
"updateFlags": "תגי עדכון",
|
"updateFlags": "תגי עדכון",
|
||||||
@@ -235,8 +244,7 @@
|
|||||||
"misc": "שונות",
|
"misc": "שונות",
|
||||||
"metadataArchive": "מסד נתונים של ארכיון מטא-דאטה",
|
"metadataArchive": "מסד נתונים של ארכיון מטא-דאטה",
|
||||||
"storageLocation": "מיקום ההגדרות",
|
"storageLocation": "מיקום ההגדרות",
|
||||||
"proxySettings": "הגדרות פרוקסי",
|
"proxySettings": "הגדרות פרוקסי"
|
||||||
"priorityTags": "תגיות עדיפות"
|
|
||||||
},
|
},
|
||||||
"storage": {
|
"storage": {
|
||||||
"locationLabel": "מצב נייד",
|
"locationLabel": "מצב נייד",
|
||||||
@@ -309,6 +317,26 @@
|
|||||||
"defaultEmbeddingRootHelp": "הגדר את ספריית השורש המוגדרת כברירת מחדל של embedding להורדות, ייבוא והעברות",
|
"defaultEmbeddingRootHelp": "הגדר את ספריית השורש המוגדרת כברירת מחדל של embedding להורדות, ייבוא והעברות",
|
||||||
"noDefault": "אין ברירת מחדל"
|
"noDefault": "אין ברירת מחדל"
|
||||||
},
|
},
|
||||||
|
"priorityTags": {
|
||||||
|
"title": "תגיות עדיפות",
|
||||||
|
"description": "התאם את סדר העדיפות של התגיות עבור כל סוג מודל (לדוגמה: character, concept, style(toon|toon_style))",
|
||||||
|
"placeholder": "character, concept, style(toon|toon_style)",
|
||||||
|
"helpLinkLabel": "פתח עזרה בנושא תגיות עדיפות",
|
||||||
|
"modelTypes": {
|
||||||
|
"lora": "LoRA",
|
||||||
|
"checkpoint": "Checkpoint",
|
||||||
|
"embedding": "Embedding"
|
||||||
|
},
|
||||||
|
"saveSuccess": "תגיות העדיפות עודכנו.",
|
||||||
|
"saveError": "עדכון תגיות העדיפות נכשל.",
|
||||||
|
"loadingSuggestions": "טוען הצעות...",
|
||||||
|
"validation": {
|
||||||
|
"missingClosingParen": "לרשומה {index} חסר סוגר סוגריים.",
|
||||||
|
"missingCanonical": "על הרשומה {index} לכלול שם תגית קנונית.",
|
||||||
|
"duplicateCanonical": "התגית הקנונית \"{tag}\" מופיעה יותר מפעם אחת.",
|
||||||
|
"unknown": "תצורת תגיות העדיפות שגויה."
|
||||||
|
}
|
||||||
|
},
|
||||||
"downloadPathTemplates": {
|
"downloadPathTemplates": {
|
||||||
"title": "תבניות נתיב הורדה",
|
"title": "תבניות נתיב הורדה",
|
||||||
"help": "הגדר מבני תיקיות לסוגי מודלים שונים בעת הורדה מ-Civitai.",
|
"help": "הגדר מבני תיקיות לסוגי מודלים שונים בעת הורדה מ-Civitai.",
|
||||||
@@ -320,8 +348,8 @@
|
|||||||
"byFirstTag": "לפי תגית ראשונה",
|
"byFirstTag": "לפי תגית ראשונה",
|
||||||
"baseModelFirstTag": "מודל בסיס + תגית ראשונה",
|
"baseModelFirstTag": "מודל בסיס + תגית ראשונה",
|
||||||
"baseModelAuthor": "מודל בסיס + יוצר",
|
"baseModelAuthor": "מודל בסיס + יוצר",
|
||||||
"baseModelAuthorFirstTag": "מודל בסיס + יוצר + תגית ראשונה",
|
|
||||||
"authorFirstTag": "יוצר + תגית ראשונה",
|
"authorFirstTag": "יוצר + תגית ראשונה",
|
||||||
|
"baseModelAuthorFirstTag": "מודל בסיס + יוצר + תגית ראשונה",
|
||||||
"customTemplate": "תבנית מותאמת אישית"
|
"customTemplate": "תבנית מותאמת אישית"
|
||||||
},
|
},
|
||||||
"customTemplatePlaceholder": "הזן תבנית מותאמת אישית (למשל, {base_model}/{author}/{first_tag})",
|
"customTemplatePlaceholder": "הזן תבנית מותאמת אישית (למשל, {base_model}/{author}/{first_tag})",
|
||||||
@@ -409,26 +437,6 @@
|
|||||||
"proxyPassword": "סיסמה (אופציונלי)",
|
"proxyPassword": "סיסמה (אופציונלי)",
|
||||||
"proxyPasswordPlaceholder": "password",
|
"proxyPasswordPlaceholder": "password",
|
||||||
"proxyPasswordHelp": "סיסמה לאימות מול הפרוקסי (אם נדרש)"
|
"proxyPasswordHelp": "סיסמה לאימות מול הפרוקסי (אם נדרש)"
|
||||||
},
|
|
||||||
"priorityTags": {
|
|
||||||
"title": "תגיות עדיפות",
|
|
||||||
"description": "התאם את סדר העדיפות של התגיות עבור כל סוג מודל (לדוגמה: character, concept, style(toon|toon_style))",
|
|
||||||
"placeholder": "character, concept, style(toon|toon_style)",
|
|
||||||
"helpLinkLabel": "פתח עזרה בנושא תגיות עדיפות",
|
|
||||||
"modelTypes": {
|
|
||||||
"lora": "LoRA",
|
|
||||||
"checkpoint": "Checkpoint",
|
|
||||||
"embedding": "Embedding"
|
|
||||||
},
|
|
||||||
"saveSuccess": "תגיות העדיפות עודכנו.",
|
|
||||||
"saveError": "עדכון תגיות העדיפות נכשל.",
|
|
||||||
"loadingSuggestions": "טוען הצעות...",
|
|
||||||
"validation": {
|
|
||||||
"missingClosingParen": "לרשומה {index} חסר סוגר סוגריים.",
|
|
||||||
"missingCanonical": "על הרשומה {index} לכלול שם תגית קנונית.",
|
|
||||||
"duplicateCanonical": "התגית הקנונית \"{tag}\" מופיעה יותר מפעם אחת.",
|
|
||||||
"unknown": "תצורת תגיות העדיפות שגויה."
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"loras": {
|
"loras": {
|
||||||
@@ -521,6 +529,7 @@
|
|||||||
"replacePreview": "החלף תצוגה מקדימה",
|
"replacePreview": "החלף תצוגה מקדימה",
|
||||||
"setContentRating": "הגדר דירוג תוכן",
|
"setContentRating": "הגדר דירוג תוכן",
|
||||||
"moveToFolder": "העבר לתיקייה",
|
"moveToFolder": "העבר לתיקייה",
|
||||||
|
"repairMetadata": "[TODO: Translate] Repair metadata",
|
||||||
"excludeModel": "החרג מודל",
|
"excludeModel": "החרג מודל",
|
||||||
"deleteModel": "מחק מודל",
|
"deleteModel": "מחק מודל",
|
||||||
"shareRecipe": "שתף מתכון",
|
"shareRecipe": "שתף מתכון",
|
||||||
@@ -591,10 +600,26 @@
|
|||||||
"selectLoraRoot": "אנא בחר ספריית שורש של LoRA"
|
"selectLoraRoot": "אנא בחר ספריית שורש של LoRA"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"sort": {
|
||||||
|
"title": "מיון מתכונים לפי...",
|
||||||
|
"name": "שם",
|
||||||
|
"nameAsc": "א - ת",
|
||||||
|
"nameDesc": "ת - א",
|
||||||
|
"date": "תאריך",
|
||||||
|
"dateDesc": "הכי חדש",
|
||||||
|
"dateAsc": "הכי ישן",
|
||||||
|
"lorasCount": "מספר LoRAs",
|
||||||
|
"lorasCountDesc": "הכי הרבה",
|
||||||
|
"lorasCountAsc": "הכי פחות"
|
||||||
|
},
|
||||||
"refresh": {
|
"refresh": {
|
||||||
"title": "רענן רשימת מתכונים"
|
"title": "רענן רשימת מתכונים"
|
||||||
},
|
},
|
||||||
"filteredByLora": "מסונן לפי LoRA"
|
"filteredByLora": "מסונן לפי LoRA",
|
||||||
|
"favorites": {
|
||||||
|
"title": "הצג מועדפים בלבד",
|
||||||
|
"action": "מועדפים"
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"duplicates": {
|
"duplicates": {
|
||||||
"found": "נמצאו {count} קבוצות כפולות",
|
"found": "נמצאו {count} קבוצות כפולות",
|
||||||
@@ -620,6 +645,13 @@
|
|||||||
"noMissingLoras": "אין LoRAs חסרים להורדה",
|
"noMissingLoras": "אין LoRAs חסרים להורדה",
|
||||||
"getInfoFailed": "קבלת מידע עבור LoRAs חסרים נכשלה",
|
"getInfoFailed": "קבלת מידע עבור LoRAs חסרים נכשלה",
|
||||||
"prepareError": "שגיאה בהכנת LoRAs להורדה: {message}"
|
"prepareError": "שגיאה בהכנת LoRAs להורדה: {message}"
|
||||||
|
},
|
||||||
|
"repair": {
|
||||||
|
"starting": "[TODO: Translate] Repairing recipe metadata...",
|
||||||
|
"success": "[TODO: Translate] Recipe metadata repaired successfully",
|
||||||
|
"skipped": "[TODO: Translate] Recipe already at latest version, no repair needed",
|
||||||
|
"failed": "[TODO: Translate] Failed to repair recipe: {message}",
|
||||||
|
"missingId": "[TODO: Translate] Cannot repair recipe: Missing recipe ID"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -641,7 +673,8 @@
|
|||||||
"recursiveUnavailable": "חיפוש רקורסיבי זמין רק בתצוגת עץ",
|
"recursiveUnavailable": "חיפוש רקורסיבי זמין רק בתצוגת עץ",
|
||||||
"collapseAllDisabled": "לא זמין בתצוגת רשימה",
|
"collapseAllDisabled": "לא זמין בתצוגת רשימה",
|
||||||
"dragDrop": {
|
"dragDrop": {
|
||||||
"unableToResolveRoot": "לא ניתן לקבוע את נתיב היעד להעברה."
|
"unableToResolveRoot": "לא ניתן לקבוע את נתיב היעד להעברה.",
|
||||||
|
"moveUnsupported": "Move is not supported for this item."
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"statistics": {
|
"statistics": {
|
||||||
@@ -1463,7 +1496,8 @@
|
|||||||
"bulkMoveFailures": "העברות שנכשלו:\n{failures}",
|
"bulkMoveFailures": "העברות שנכשלו:\n{failures}",
|
||||||
"bulkMoveSuccess": "הועברו בהצלחה {successCount} {type}s",
|
"bulkMoveSuccess": "הועברו בהצלחה {successCount} {type}s",
|
||||||
"exampleImagesDownloadSuccess": "תמונות הדוגמה הורדו בהצלחה!",
|
"exampleImagesDownloadSuccess": "תמונות הדוגמה הורדו בהצלחה!",
|
||||||
"exampleImagesDownloadFailed": "הורדת תמונות הדוגמה נכשלה: {message}"
|
"exampleImagesDownloadFailed": "הורדת תמונות הדוגמה נכשלה: {message}",
|
||||||
|
"moveFailed": "Failed to move item: {message}"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"banners": {
|
"banners": {
|
||||||
|
|||||||
@@ -159,6 +159,12 @@
|
|||||||
"success": "Updated license metadata for {count} {typePlural}",
|
"success": "Updated license metadata for {count} {typePlural}",
|
||||||
"none": "All {typePlural} already have license metadata",
|
"none": "All {typePlural} already have license metadata",
|
||||||
"error": "Failed to refresh license metadata for {typePlural}: {message}"
|
"error": "Failed to refresh license metadata for {typePlural}: {message}"
|
||||||
|
},
|
||||||
|
"repairRecipes": {
|
||||||
|
"label": "レシピデータの修復",
|
||||||
|
"loading": "レシピデータを修復中...",
|
||||||
|
"success": "{count} 件のレシピを正常に修復しました。",
|
||||||
|
"error": "レシピの修復に失敗しました: {message}"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"header": {
|
"header": {
|
||||||
@@ -188,7 +194,8 @@
|
|||||||
"creator": "作成者",
|
"creator": "作成者",
|
||||||
"title": "レシピタイトル",
|
"title": "レシピタイトル",
|
||||||
"loraName": "LoRAファイル名",
|
"loraName": "LoRAファイル名",
|
||||||
"loraModel": "LoRAモデル名"
|
"loraModel": "LoRAモデル名",
|
||||||
|
"prompt": "プロンプト"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"filter": {
|
"filter": {
|
||||||
@@ -199,6 +206,7 @@
|
|||||||
"license": "ライセンス",
|
"license": "ライセンス",
|
||||||
"noCreditRequired": "クレジット不要",
|
"noCreditRequired": "クレジット不要",
|
||||||
"allowSellingGeneratedContent": "販売許可",
|
"allowSellingGeneratedContent": "販売許可",
|
||||||
|
"noTags": "タグなし",
|
||||||
"clearAll": "すべてのフィルタをクリア"
|
"clearAll": "すべてのフィルタをクリア"
|
||||||
},
|
},
|
||||||
"theme": {
|
"theme": {
|
||||||
@@ -521,6 +529,7 @@
|
|||||||
"replacePreview": "プレビューを置換",
|
"replacePreview": "プレビューを置換",
|
||||||
"setContentRating": "コンテンツレーティングを設定",
|
"setContentRating": "コンテンツレーティングを設定",
|
||||||
"moveToFolder": "フォルダに移動",
|
"moveToFolder": "フォルダに移動",
|
||||||
|
"repairMetadata": "[TODO: Translate] Repair metadata",
|
||||||
"excludeModel": "モデルを除外",
|
"excludeModel": "モデルを除外",
|
||||||
"deleteModel": "モデルを削除",
|
"deleteModel": "モデルを削除",
|
||||||
"shareRecipe": "レシピを共有",
|
"shareRecipe": "レシピを共有",
|
||||||
@@ -591,10 +600,26 @@
|
|||||||
"selectLoraRoot": "LoRAルートディレクトリを選択してください"
|
"selectLoraRoot": "LoRAルートディレクトリを選択してください"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"sort": {
|
||||||
|
"title": "レシピの並び替え...",
|
||||||
|
"name": "名前",
|
||||||
|
"nameAsc": "A - Z",
|
||||||
|
"nameDesc": "Z - A",
|
||||||
|
"date": "日付",
|
||||||
|
"dateDesc": "新しい順",
|
||||||
|
"dateAsc": "古い順",
|
||||||
|
"lorasCount": "LoRA数",
|
||||||
|
"lorasCountDesc": "多い順",
|
||||||
|
"lorasCountAsc": "少ない順"
|
||||||
|
},
|
||||||
"refresh": {
|
"refresh": {
|
||||||
"title": "レシピリストを更新"
|
"title": "レシピリストを更新"
|
||||||
},
|
},
|
||||||
"filteredByLora": "LoRAでフィルタ済み"
|
"filteredByLora": "LoRAでフィルタ済み",
|
||||||
|
"favorites": {
|
||||||
|
"title": "お気に入りのみ表示",
|
||||||
|
"action": "お気に入り"
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"duplicates": {
|
"duplicates": {
|
||||||
"found": "{count} 個の重複グループが見つかりました",
|
"found": "{count} 個の重複グループが見つかりました",
|
||||||
@@ -620,6 +645,13 @@
|
|||||||
"noMissingLoras": "ダウンロードする不足LoRAがありません",
|
"noMissingLoras": "ダウンロードする不足LoRAがありません",
|
||||||
"getInfoFailed": "不足LoRAの情報取得に失敗しました",
|
"getInfoFailed": "不足LoRAの情報取得に失敗しました",
|
||||||
"prepareError": "ダウンロード用LoRAの準備中にエラー:{message}"
|
"prepareError": "ダウンロード用LoRAの準備中にエラー:{message}"
|
||||||
|
},
|
||||||
|
"repair": {
|
||||||
|
"starting": "[TODO: Translate] Repairing recipe metadata...",
|
||||||
|
"success": "[TODO: Translate] Recipe metadata repaired successfully",
|
||||||
|
"skipped": "[TODO: Translate] Recipe already at latest version, no repair needed",
|
||||||
|
"failed": "[TODO: Translate] Failed to repair recipe: {message}",
|
||||||
|
"missingId": "[TODO: Translate] Cannot repair recipe: Missing recipe ID"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -641,7 +673,8 @@
|
|||||||
"recursiveUnavailable": "再帰検索はツリービューでのみ利用できます",
|
"recursiveUnavailable": "再帰検索はツリービューでのみ利用できます",
|
||||||
"collapseAllDisabled": "リストビューでは利用できません",
|
"collapseAllDisabled": "リストビューでは利用できません",
|
||||||
"dragDrop": {
|
"dragDrop": {
|
||||||
"unableToResolveRoot": "移動先のパスを特定できません。"
|
"unableToResolveRoot": "移動先のパスを特定できません。",
|
||||||
|
"moveUnsupported": "Move is not supported for this item."
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"statistics": {
|
"statistics": {
|
||||||
@@ -1463,7 +1496,8 @@
|
|||||||
"bulkMoveFailures": "失敗した移動:\n{failures}",
|
"bulkMoveFailures": "失敗した移動:\n{failures}",
|
||||||
"bulkMoveSuccess": "{successCount} {type}が正常に移動されました",
|
"bulkMoveSuccess": "{successCount} {type}が正常に移動されました",
|
||||||
"exampleImagesDownloadSuccess": "例画像が正常にダウンロードされました!",
|
"exampleImagesDownloadSuccess": "例画像が正常にダウンロードされました!",
|
||||||
"exampleImagesDownloadFailed": "例画像のダウンロードに失敗しました:{message}"
|
"exampleImagesDownloadFailed": "例画像のダウンロードに失敗しました:{message}",
|
||||||
|
"moveFailed": "Failed to move item: {message}"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"banners": {
|
"banners": {
|
||||||
|
|||||||
@@ -159,6 +159,12 @@
|
|||||||
"success": "Updated license metadata for {count} {typePlural}",
|
"success": "Updated license metadata for {count} {typePlural}",
|
||||||
"none": "All {typePlural} already have license metadata",
|
"none": "All {typePlural} already have license metadata",
|
||||||
"error": "Failed to refresh license metadata for {typePlural}: {message}"
|
"error": "Failed to refresh license metadata for {typePlural}: {message}"
|
||||||
|
},
|
||||||
|
"repairRecipes": {
|
||||||
|
"label": "레시피 데이터 복구",
|
||||||
|
"loading": "레시피 데이터 복구 중...",
|
||||||
|
"success": "{count}개의 레시피가 성공적으로 복구되었습니다.",
|
||||||
|
"error": "레시피 복구 실패: {message}"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"header": {
|
"header": {
|
||||||
@@ -188,7 +194,8 @@
|
|||||||
"creator": "제작자",
|
"creator": "제작자",
|
||||||
"title": "레시피 제목",
|
"title": "레시피 제목",
|
||||||
"loraName": "LoRA 파일명",
|
"loraName": "LoRA 파일명",
|
||||||
"loraModel": "LoRA 모델명"
|
"loraModel": "LoRA 모델명",
|
||||||
|
"prompt": "프롬프트"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"filter": {
|
"filter": {
|
||||||
@@ -199,6 +206,7 @@
|
|||||||
"license": "라이선스",
|
"license": "라이선스",
|
||||||
"noCreditRequired": "크레딧 표기 없음",
|
"noCreditRequired": "크레딧 표기 없음",
|
||||||
"allowSellingGeneratedContent": "판매 허용",
|
"allowSellingGeneratedContent": "판매 허용",
|
||||||
|
"noTags": "태그 없음",
|
||||||
"clearAll": "모든 필터 지우기"
|
"clearAll": "모든 필터 지우기"
|
||||||
},
|
},
|
||||||
"theme": {
|
"theme": {
|
||||||
@@ -521,6 +529,7 @@
|
|||||||
"replacePreview": "미리보기 교체",
|
"replacePreview": "미리보기 교체",
|
||||||
"setContentRating": "콘텐츠 등급 설정",
|
"setContentRating": "콘텐츠 등급 설정",
|
||||||
"moveToFolder": "폴더로 이동",
|
"moveToFolder": "폴더로 이동",
|
||||||
|
"repairMetadata": "[TODO: Translate] Repair metadata",
|
||||||
"excludeModel": "모델 제외",
|
"excludeModel": "모델 제외",
|
||||||
"deleteModel": "모델 삭제",
|
"deleteModel": "모델 삭제",
|
||||||
"shareRecipe": "레시피 공유",
|
"shareRecipe": "레시피 공유",
|
||||||
@@ -591,10 +600,26 @@
|
|||||||
"selectLoraRoot": "LoRA 루트 디렉토리를 선택해주세요"
|
"selectLoraRoot": "LoRA 루트 디렉토리를 선택해주세요"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"sort": {
|
||||||
|
"title": "레시피 정렬...",
|
||||||
|
"name": "이름",
|
||||||
|
"nameAsc": "A - Z",
|
||||||
|
"nameDesc": "Z - A",
|
||||||
|
"date": "날짜",
|
||||||
|
"dateDesc": "최신순",
|
||||||
|
"dateAsc": "오래된순",
|
||||||
|
"lorasCount": "LoRA 수",
|
||||||
|
"lorasCountDesc": "많은순",
|
||||||
|
"lorasCountAsc": "적은순"
|
||||||
|
},
|
||||||
"refresh": {
|
"refresh": {
|
||||||
"title": "레시피 목록 새로고침"
|
"title": "레시피 목록 새로고침"
|
||||||
},
|
},
|
||||||
"filteredByLora": "LoRA로 필터링됨"
|
"filteredByLora": "LoRA로 필터링됨",
|
||||||
|
"favorites": {
|
||||||
|
"title": "즐겨찾기만 표시",
|
||||||
|
"action": "즐겨찾기"
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"duplicates": {
|
"duplicates": {
|
||||||
"found": "{count}개의 중복 그룹 발견",
|
"found": "{count}개의 중복 그룹 발견",
|
||||||
@@ -620,6 +645,13 @@
|
|||||||
"noMissingLoras": "다운로드할 누락된 LoRA가 없습니다",
|
"noMissingLoras": "다운로드할 누락된 LoRA가 없습니다",
|
||||||
"getInfoFailed": "누락된 LoRA 정보를 가져오는데 실패했습니다",
|
"getInfoFailed": "누락된 LoRA 정보를 가져오는데 실패했습니다",
|
||||||
"prepareError": "LoRA 다운로드 준비 중 오류: {message}"
|
"prepareError": "LoRA 다운로드 준비 중 오류: {message}"
|
||||||
|
},
|
||||||
|
"repair": {
|
||||||
|
"starting": "[TODO: Translate] Repairing recipe metadata...",
|
||||||
|
"success": "[TODO: Translate] Recipe metadata repaired successfully",
|
||||||
|
"skipped": "[TODO: Translate] Recipe already at latest version, no repair needed",
|
||||||
|
"failed": "[TODO: Translate] Failed to repair recipe: {message}",
|
||||||
|
"missingId": "[TODO: Translate] Cannot repair recipe: Missing recipe ID"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -641,7 +673,8 @@
|
|||||||
"recursiveUnavailable": "재귀 검색은 트리 보기에서만 사용할 수 있습니다",
|
"recursiveUnavailable": "재귀 검색은 트리 보기에서만 사용할 수 있습니다",
|
||||||
"collapseAllDisabled": "목록 보기에서는 사용할 수 없습니다",
|
"collapseAllDisabled": "목록 보기에서는 사용할 수 없습니다",
|
||||||
"dragDrop": {
|
"dragDrop": {
|
||||||
"unableToResolveRoot": "이동할 대상 경로를 확인할 수 없습니다."
|
"unableToResolveRoot": "이동할 대상 경로를 확인할 수 없습니다.",
|
||||||
|
"moveUnsupported": "Move is not supported for this item."
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"statistics": {
|
"statistics": {
|
||||||
@@ -1463,7 +1496,8 @@
|
|||||||
"bulkMoveFailures": "실패한 이동:\n{failures}",
|
"bulkMoveFailures": "실패한 이동:\n{failures}",
|
||||||
"bulkMoveSuccess": "{successCount}개 {type}이(가) 성공적으로 이동되었습니다",
|
"bulkMoveSuccess": "{successCount}개 {type}이(가) 성공적으로 이동되었습니다",
|
||||||
"exampleImagesDownloadSuccess": "예시 이미지가 성공적으로 다운로드되었습니다!",
|
"exampleImagesDownloadSuccess": "예시 이미지가 성공적으로 다운로드되었습니다!",
|
||||||
"exampleImagesDownloadFailed": "예시 이미지 다운로드 실패: {message}"
|
"exampleImagesDownloadFailed": "예시 이미지 다운로드 실패: {message}",
|
||||||
|
"moveFailed": "Failed to move item: {message}"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"banners": {
|
"banners": {
|
||||||
|
|||||||
@@ -159,6 +159,12 @@
|
|||||||
"success": "Updated license metadata for {count} {typePlural}",
|
"success": "Updated license metadata for {count} {typePlural}",
|
||||||
"none": "All {typePlural} already have license metadata",
|
"none": "All {typePlural} already have license metadata",
|
||||||
"error": "Failed to refresh license metadata for {typePlural}: {message}"
|
"error": "Failed to refresh license metadata for {typePlural}: {message}"
|
||||||
|
},
|
||||||
|
"repairRecipes": {
|
||||||
|
"label": "Восстановить данные рецептов",
|
||||||
|
"loading": "Восстановление данных рецептов...",
|
||||||
|
"success": "Успешно восстановлено {count} рецептов.",
|
||||||
|
"error": "Ошибка восстановления рецептов: {message}"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"header": {
|
"header": {
|
||||||
@@ -188,7 +194,8 @@
|
|||||||
"creator": "Автор",
|
"creator": "Автор",
|
||||||
"title": "Название рецепта",
|
"title": "Название рецепта",
|
||||||
"loraName": "Имя файла LoRA",
|
"loraName": "Имя файла LoRA",
|
||||||
"loraModel": "Название модели LoRA"
|
"loraModel": "Название модели LoRA",
|
||||||
|
"prompt": "Запрос"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"filter": {
|
"filter": {
|
||||||
@@ -199,6 +206,7 @@
|
|||||||
"license": "Лицензия",
|
"license": "Лицензия",
|
||||||
"noCreditRequired": "Без указания авторства",
|
"noCreditRequired": "Без указания авторства",
|
||||||
"allowSellingGeneratedContent": "Продажа разрешена",
|
"allowSellingGeneratedContent": "Продажа разрешена",
|
||||||
|
"noTags": "Без тегов",
|
||||||
"clearAll": "Очистить все фильтры"
|
"clearAll": "Очистить все фильтры"
|
||||||
},
|
},
|
||||||
"theme": {
|
"theme": {
|
||||||
@@ -521,6 +529,7 @@
|
|||||||
"replacePreview": "Заменить превью",
|
"replacePreview": "Заменить превью",
|
||||||
"setContentRating": "Установить рейтинг контента",
|
"setContentRating": "Установить рейтинг контента",
|
||||||
"moveToFolder": "Переместить в папку",
|
"moveToFolder": "Переместить в папку",
|
||||||
|
"repairMetadata": "[TODO: Translate] Repair metadata",
|
||||||
"excludeModel": "Исключить модель",
|
"excludeModel": "Исключить модель",
|
||||||
"deleteModel": "Удалить модель",
|
"deleteModel": "Удалить модель",
|
||||||
"shareRecipe": "Поделиться рецептом",
|
"shareRecipe": "Поделиться рецептом",
|
||||||
@@ -591,10 +600,26 @@
|
|||||||
"selectLoraRoot": "Пожалуйста, выберите корневую папку LoRA"
|
"selectLoraRoot": "Пожалуйста, выберите корневую папку LoRA"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"sort": {
|
||||||
|
"title": "Сортировка рецептов...",
|
||||||
|
"name": "Имя",
|
||||||
|
"nameAsc": "А - Я",
|
||||||
|
"nameDesc": "Я - А",
|
||||||
|
"date": "Дата",
|
||||||
|
"dateDesc": "Сначала новые",
|
||||||
|
"dateAsc": "Сначала старые",
|
||||||
|
"lorasCount": "Кол-во LoRA",
|
||||||
|
"lorasCountDesc": "Больше всего",
|
||||||
|
"lorasCountAsc": "Меньше всего"
|
||||||
|
},
|
||||||
"refresh": {
|
"refresh": {
|
||||||
"title": "Обновить список рецептов"
|
"title": "Обновить список рецептов"
|
||||||
},
|
},
|
||||||
"filteredByLora": "Фильтр по LoRA"
|
"filteredByLora": "Фильтр по LoRA",
|
||||||
|
"favorites": {
|
||||||
|
"title": "Только избранные",
|
||||||
|
"action": "Избранное"
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"duplicates": {
|
"duplicates": {
|
||||||
"found": "Найдено {count} групп дубликатов",
|
"found": "Найдено {count} групп дубликатов",
|
||||||
@@ -620,6 +645,13 @@
|
|||||||
"noMissingLoras": "Нет отсутствующих LoRAs для загрузки",
|
"noMissingLoras": "Нет отсутствующих LoRAs для загрузки",
|
||||||
"getInfoFailed": "Не удалось получить информацию для отсутствующих LoRAs",
|
"getInfoFailed": "Не удалось получить информацию для отсутствующих LoRAs",
|
||||||
"prepareError": "Ошибка подготовки LoRAs для загрузки: {message}"
|
"prepareError": "Ошибка подготовки LoRAs для загрузки: {message}"
|
||||||
|
},
|
||||||
|
"repair": {
|
||||||
|
"starting": "[TODO: Translate] Repairing recipe metadata...",
|
||||||
|
"success": "[TODO: Translate] Recipe metadata repaired successfully",
|
||||||
|
"skipped": "[TODO: Translate] Recipe already at latest version, no repair needed",
|
||||||
|
"failed": "[TODO: Translate] Failed to repair recipe: {message}",
|
||||||
|
"missingId": "[TODO: Translate] Cannot repair recipe: Missing recipe ID"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -641,7 +673,8 @@
|
|||||||
"recursiveUnavailable": "Рекурсивный поиск доступен только в режиме дерева",
|
"recursiveUnavailable": "Рекурсивный поиск доступен только в режиме дерева",
|
||||||
"collapseAllDisabled": "Недоступно в виде списка",
|
"collapseAllDisabled": "Недоступно в виде списка",
|
||||||
"dragDrop": {
|
"dragDrop": {
|
||||||
"unableToResolveRoot": "Не удалось определить путь назначения для перемещения."
|
"unableToResolveRoot": "Не удалось определить путь назначения для перемещения.",
|
||||||
|
"moveUnsupported": "Move is not supported for this item."
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"statistics": {
|
"statistics": {
|
||||||
@@ -1463,7 +1496,8 @@
|
|||||||
"bulkMoveFailures": "Неудачные перемещения:\n{failures}",
|
"bulkMoveFailures": "Неудачные перемещения:\n{failures}",
|
||||||
"bulkMoveSuccess": "Успешно перемещено {successCount} {type}s",
|
"bulkMoveSuccess": "Успешно перемещено {successCount} {type}s",
|
||||||
"exampleImagesDownloadSuccess": "Примеры изображений успешно загружены!",
|
"exampleImagesDownloadSuccess": "Примеры изображений успешно загружены!",
|
||||||
"exampleImagesDownloadFailed": "Не удалось загрузить примеры изображений: {message}"
|
"exampleImagesDownloadFailed": "Не удалось загрузить примеры изображений: {message}",
|
||||||
|
"moveFailed": "Failed to move item: {message}"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"banners": {
|
"banners": {
|
||||||
|
|||||||
@@ -159,6 +159,12 @@
|
|||||||
"success": "Updated license metadata for {count} {typePlural}",
|
"success": "Updated license metadata for {count} {typePlural}",
|
||||||
"none": "All {typePlural} already have license metadata",
|
"none": "All {typePlural} already have license metadata",
|
||||||
"error": "Failed to refresh license metadata for {typePlural}: {message}"
|
"error": "Failed to refresh license metadata for {typePlural}: {message}"
|
||||||
|
},
|
||||||
|
"repairRecipes": {
|
||||||
|
"label": "修复配方数据",
|
||||||
|
"loading": "正在修复配方数据...",
|
||||||
|
"success": "成功修复了 {count} 个配方。",
|
||||||
|
"error": "配方修复失败:{message}"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"header": {
|
"header": {
|
||||||
@@ -188,7 +194,8 @@
|
|||||||
"creator": "创作者",
|
"creator": "创作者",
|
||||||
"title": "配方标题",
|
"title": "配方标题",
|
||||||
"loraName": "LoRA 文件名",
|
"loraName": "LoRA 文件名",
|
||||||
"loraModel": "LoRA 模型名称"
|
"loraModel": "LoRA 模型名称",
|
||||||
|
"prompt": "提示词"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"filter": {
|
"filter": {
|
||||||
@@ -199,6 +206,7 @@
|
|||||||
"license": "许可证",
|
"license": "许可证",
|
||||||
"noCreditRequired": "无需署名",
|
"noCreditRequired": "无需署名",
|
||||||
"allowSellingGeneratedContent": "允许销售",
|
"allowSellingGeneratedContent": "允许销售",
|
||||||
|
"noTags": "无标签",
|
||||||
"clearAll": "清除所有筛选"
|
"clearAll": "清除所有筛选"
|
||||||
},
|
},
|
||||||
"theme": {
|
"theme": {
|
||||||
@@ -521,6 +529,7 @@
|
|||||||
"replacePreview": "替换预览",
|
"replacePreview": "替换预览",
|
||||||
"setContentRating": "设置内容评级",
|
"setContentRating": "设置内容评级",
|
||||||
"moveToFolder": "移动到文件夹",
|
"moveToFolder": "移动到文件夹",
|
||||||
|
"repairMetadata": "[TODO: Translate] Repair metadata",
|
||||||
"excludeModel": "排除模型",
|
"excludeModel": "排除模型",
|
||||||
"deleteModel": "删除模型",
|
"deleteModel": "删除模型",
|
||||||
"shareRecipe": "分享配方",
|
"shareRecipe": "分享配方",
|
||||||
@@ -591,10 +600,26 @@
|
|||||||
"selectLoraRoot": "请选择 LoRA 根目录"
|
"selectLoraRoot": "请选择 LoRA 根目录"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"sort": {
|
||||||
|
"title": "配方排序...",
|
||||||
|
"name": "名称",
|
||||||
|
"nameAsc": "A - Z",
|
||||||
|
"nameDesc": "Z - A",
|
||||||
|
"date": "时间",
|
||||||
|
"dateDesc": "最新",
|
||||||
|
"dateAsc": "最早",
|
||||||
|
"lorasCount": "LoRA 数量",
|
||||||
|
"lorasCountDesc": "最多",
|
||||||
|
"lorasCountAsc": "最少"
|
||||||
|
},
|
||||||
"refresh": {
|
"refresh": {
|
||||||
"title": "刷新配方列表"
|
"title": "刷新配方列表"
|
||||||
},
|
},
|
||||||
"filteredByLora": "按 LoRA 筛选"
|
"filteredByLora": "按 LoRA 筛选",
|
||||||
|
"favorites": {
|
||||||
|
"title": "仅显示收藏",
|
||||||
|
"action": "收藏"
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"duplicates": {
|
"duplicates": {
|
||||||
"found": "发现 {count} 个重复组",
|
"found": "发现 {count} 个重复组",
|
||||||
@@ -620,6 +645,13 @@
|
|||||||
"noMissingLoras": "没有缺失的 LoRA 可下载",
|
"noMissingLoras": "没有缺失的 LoRA 可下载",
|
||||||
"getInfoFailed": "获取缺失 LoRA 信息失败",
|
"getInfoFailed": "获取缺失 LoRA 信息失败",
|
||||||
"prepareError": "准备下载 LoRA 时出错:{message}"
|
"prepareError": "准备下载 LoRA 时出错:{message}"
|
||||||
|
},
|
||||||
|
"repair": {
|
||||||
|
"starting": "[TODO: Translate] Repairing recipe metadata...",
|
||||||
|
"success": "[TODO: Translate] Recipe metadata repaired successfully",
|
||||||
|
"skipped": "[TODO: Translate] Recipe already at latest version, no repair needed",
|
||||||
|
"failed": "[TODO: Translate] Failed to repair recipe: {message}",
|
||||||
|
"missingId": "[TODO: Translate] Cannot repair recipe: Missing recipe ID"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -641,7 +673,8 @@
|
|||||||
"recursiveUnavailable": "仅在树形视图中可使用递归搜索",
|
"recursiveUnavailable": "仅在树形视图中可使用递归搜索",
|
||||||
"collapseAllDisabled": "列表视图下不可用",
|
"collapseAllDisabled": "列表视图下不可用",
|
||||||
"dragDrop": {
|
"dragDrop": {
|
||||||
"unableToResolveRoot": "无法确定移动的目标路径。"
|
"unableToResolveRoot": "无法确定移动的目标路径。",
|
||||||
|
"moveUnsupported": "Move is not supported for this item."
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"statistics": {
|
"statistics": {
|
||||||
@@ -1463,7 +1496,8 @@
|
|||||||
"bulkMoveFailures": "移动失败:\n{failures}",
|
"bulkMoveFailures": "移动失败:\n{failures}",
|
||||||
"bulkMoveSuccess": "成功移动 {successCount} 个 {type}",
|
"bulkMoveSuccess": "成功移动 {successCount} 个 {type}",
|
||||||
"exampleImagesDownloadSuccess": "示例图片下载成功!",
|
"exampleImagesDownloadSuccess": "示例图片下载成功!",
|
||||||
"exampleImagesDownloadFailed": "示例图片下载失败:{message}"
|
"exampleImagesDownloadFailed": "示例图片下载失败:{message}",
|
||||||
|
"moveFailed": "Failed to move item: {message}"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"banners": {
|
"banners": {
|
||||||
|
|||||||
@@ -154,11 +154,17 @@
|
|||||||
"error": "清理範例圖片資料夾失敗:{message}"
|
"error": "清理範例圖片資料夾失敗:{message}"
|
||||||
},
|
},
|
||||||
"fetchMissingLicenses": {
|
"fetchMissingLicenses": {
|
||||||
"label": "Refresh license metadata",
|
"label": "重新整理授權中繼資料",
|
||||||
"loading": "Refreshing license metadata for {typePlural}...",
|
"loading": "正在重新整理 {typePlural} 的授權中繼資料...",
|
||||||
"success": "Updated license metadata for {count} {typePlural}",
|
"success": "已更新 {count} 個 {typePlural} 的授權中繼資料",
|
||||||
"none": "All {typePlural} already have license metadata",
|
"none": "所有 {typePlural} 已具備授權中繼資料",
|
||||||
"error": "Failed to refresh license metadata for {typePlural}: {message}"
|
"error": "重新整理 {typePlural} 授權中繼資料失敗:{message}"
|
||||||
|
},
|
||||||
|
"repairRecipes": {
|
||||||
|
"label": "修復配方資料",
|
||||||
|
"loading": "正在修復配方資料...",
|
||||||
|
"success": "成功修復 {count} 個配方。",
|
||||||
|
"error": "配方修復失敗:{message}"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"header": {
|
"header": {
|
||||||
@@ -188,7 +194,8 @@
|
|||||||
"creator": "創作者",
|
"creator": "創作者",
|
||||||
"title": "配方標題",
|
"title": "配方標題",
|
||||||
"loraName": "LoRA 檔案名稱",
|
"loraName": "LoRA 檔案名稱",
|
||||||
"loraModel": "LoRA 模型名稱"
|
"loraModel": "LoRA 模型名稱",
|
||||||
|
"prompt": "提示詞"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"filter": {
|
"filter": {
|
||||||
@@ -199,6 +206,7 @@
|
|||||||
"license": "授權",
|
"license": "授權",
|
||||||
"noCreditRequired": "無需署名",
|
"noCreditRequired": "無需署名",
|
||||||
"allowSellingGeneratedContent": "允許銷售",
|
"allowSellingGeneratedContent": "允許銷售",
|
||||||
|
"noTags": "無標籤",
|
||||||
"clearAll": "清除所有篩選"
|
"clearAll": "清除所有篩選"
|
||||||
},
|
},
|
||||||
"theme": {
|
"theme": {
|
||||||
@@ -521,6 +529,7 @@
|
|||||||
"replacePreview": "更換預覽圖",
|
"replacePreview": "更換預覽圖",
|
||||||
"setContentRating": "設定內容分級",
|
"setContentRating": "設定內容分級",
|
||||||
"moveToFolder": "移動到資料夾",
|
"moveToFolder": "移動到資料夾",
|
||||||
|
"repairMetadata": "[TODO: Translate] Repair metadata",
|
||||||
"excludeModel": "排除模型",
|
"excludeModel": "排除模型",
|
||||||
"deleteModel": "刪除模型",
|
"deleteModel": "刪除模型",
|
||||||
"shareRecipe": "分享配方",
|
"shareRecipe": "分享配方",
|
||||||
@@ -591,10 +600,26 @@
|
|||||||
"selectLoraRoot": "請選擇 LoRA 根目錄"
|
"selectLoraRoot": "請選擇 LoRA 根目錄"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"sort": {
|
||||||
|
"title": "配方排序...",
|
||||||
|
"name": "名稱",
|
||||||
|
"nameAsc": "A - Z",
|
||||||
|
"nameDesc": "Z - A",
|
||||||
|
"date": "時間",
|
||||||
|
"dateDesc": "最新",
|
||||||
|
"dateAsc": "最舊",
|
||||||
|
"lorasCount": "LoRA 數量",
|
||||||
|
"lorasCountDesc": "最多",
|
||||||
|
"lorasCountAsc": "最少"
|
||||||
|
},
|
||||||
"refresh": {
|
"refresh": {
|
||||||
"title": "重新整理配方列表"
|
"title": "重新整理配方列表"
|
||||||
},
|
},
|
||||||
"filteredByLora": "已依 LoRA 篩選"
|
"filteredByLora": "已依 LoRA 篩選",
|
||||||
|
"favorites": {
|
||||||
|
"title": "僅顯示收藏",
|
||||||
|
"action": "收藏"
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"duplicates": {
|
"duplicates": {
|
||||||
"found": "發現 {count} 組重複項",
|
"found": "發現 {count} 組重複項",
|
||||||
@@ -620,6 +645,13 @@
|
|||||||
"noMissingLoras": "無缺少的 LoRA 可下載",
|
"noMissingLoras": "無缺少的 LoRA 可下載",
|
||||||
"getInfoFailed": "取得缺少 LoRA 資訊失敗",
|
"getInfoFailed": "取得缺少 LoRA 資訊失敗",
|
||||||
"prepareError": "準備下載 LoRA 時發生錯誤:{message}"
|
"prepareError": "準備下載 LoRA 時發生錯誤:{message}"
|
||||||
|
},
|
||||||
|
"repair": {
|
||||||
|
"starting": "[TODO: Translate] Repairing recipe metadata...",
|
||||||
|
"success": "[TODO: Translate] Recipe metadata repaired successfully",
|
||||||
|
"skipped": "[TODO: Translate] Recipe already at latest version, no repair needed",
|
||||||
|
"failed": "[TODO: Translate] Failed to repair recipe: {message}",
|
||||||
|
"missingId": "[TODO: Translate] Cannot repair recipe: Missing recipe ID"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -641,7 +673,8 @@
|
|||||||
"recursiveUnavailable": "遞迴搜尋僅能在樹狀檢視中使用",
|
"recursiveUnavailable": "遞迴搜尋僅能在樹狀檢視中使用",
|
||||||
"collapseAllDisabled": "列表檢視下不可用",
|
"collapseAllDisabled": "列表檢視下不可用",
|
||||||
"dragDrop": {
|
"dragDrop": {
|
||||||
"unableToResolveRoot": "無法確定移動的目標路徑。"
|
"unableToResolveRoot": "無法確定移動的目標路徑。",
|
||||||
|
"moveUnsupported": "Move is not supported for this item."
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"statistics": {
|
"statistics": {
|
||||||
@@ -1463,7 +1496,8 @@
|
|||||||
"bulkMoveFailures": "移動失敗:\n{failures}",
|
"bulkMoveFailures": "移動失敗:\n{failures}",
|
||||||
"bulkMoveSuccess": "已成功移動 {successCount} 個 {type}",
|
"bulkMoveSuccess": "已成功移動 {successCount} 個 {type}",
|
||||||
"exampleImagesDownloadSuccess": "範例圖片下載成功!",
|
"exampleImagesDownloadSuccess": "範例圖片下載成功!",
|
||||||
"exampleImagesDownloadFailed": "下載範例圖片失敗:{message}"
|
"exampleImagesDownloadFailed": "下載範例圖片失敗:{message}",
|
||||||
|
"moveFailed": "Failed to move item: {message}"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"banners": {
|
"banners": {
|
||||||
|
|||||||
127
py/config.py
127
py/config.py
@@ -1,11 +1,13 @@
|
|||||||
import os
|
import os
|
||||||
import platform
|
import platform
|
||||||
|
import threading
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import folder_paths # type: ignore
|
import folder_paths # type: ignore
|
||||||
from typing import Any, Dict, Iterable, List, Mapping, Optional, Set
|
from typing import Any, Dict, Iterable, List, Mapping, Optional, Set, Tuple
|
||||||
import logging
|
import logging
|
||||||
import json
|
import json
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
|
import time
|
||||||
|
|
||||||
from .utils.settings_paths import ensure_settings_file, get_settings_dir, load_settings_template
|
from .utils.settings_paths import ensure_settings_file, get_settings_dir, load_settings_template
|
||||||
|
|
||||||
@@ -80,6 +82,8 @@ class Config:
|
|||||||
self._path_mappings: Dict[str, str] = {}
|
self._path_mappings: Dict[str, str] = {}
|
||||||
# Normalized preview root directories used to validate preview access
|
# Normalized preview root directories used to validate preview access
|
||||||
self._preview_root_paths: Set[Path] = set()
|
self._preview_root_paths: Set[Path] = set()
|
||||||
|
# Optional background rescan thread
|
||||||
|
self._rescan_thread: Optional[threading.Thread] = None
|
||||||
self.loras_roots = self._init_lora_paths()
|
self.loras_roots = self._init_lora_paths()
|
||||||
self.checkpoints_roots = None
|
self.checkpoints_roots = None
|
||||||
self.unet_roots = None
|
self.unet_roots = None
|
||||||
@@ -282,56 +286,23 @@ class Config:
|
|||||||
def _load_symlink_cache(self) -> bool:
|
def _load_symlink_cache(self) -> bool:
|
||||||
cache_path = self._get_symlink_cache_path()
|
cache_path = self._get_symlink_cache_path()
|
||||||
if not cache_path.exists():
|
if not cache_path.exists():
|
||||||
|
logger.info("Symlink cache not found at %s", cache_path)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with cache_path.open("r", encoding="utf-8") as handle:
|
with cache_path.open("r", encoding="utf-8") as handle:
|
||||||
payload = json.load(handle)
|
payload = json.load(handle)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
logger.debug("Failed to load symlink cache %s: %s", cache_path, exc)
|
logger.info("Failed to load symlink cache %s: %s", cache_path, exc)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if not isinstance(payload, dict):
|
if not isinstance(payload, dict):
|
||||||
|
logger.info("Symlink cache payload is not a dict: %s", type(payload))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
cached_fingerprint = payload.get("fingerprint")
|
|
||||||
cached_mappings = payload.get("path_mappings")
|
cached_mappings = payload.get("path_mappings")
|
||||||
if not isinstance(cached_fingerprint, dict) or not isinstance(cached_mappings, Mapping):
|
if not isinstance(cached_mappings, Mapping):
|
||||||
return False
|
logger.info("Symlink cache missing path mappings")
|
||||||
|
|
||||||
current_fingerprint = self._build_symlink_fingerprint()
|
|
||||||
cached_roots = cached_fingerprint.get("roots")
|
|
||||||
cached_stats = cached_fingerprint.get("stats")
|
|
||||||
if (
|
|
||||||
not isinstance(cached_roots, list)
|
|
||||||
or not isinstance(cached_stats, Mapping)
|
|
||||||
or sorted(cached_roots) != sorted(current_fingerprint["roots"]) # type: ignore[index]
|
|
||||||
):
|
|
||||||
return False
|
|
||||||
|
|
||||||
for root in current_fingerprint["roots"]: # type: ignore[assignment]
|
|
||||||
cached_stat = cached_stats.get(root) if isinstance(cached_stats, Mapping) else None
|
|
||||||
current_stat = current_fingerprint["stats"].get(root) # type: ignore[index]
|
|
||||||
if not isinstance(cached_stat, Mapping) or not current_stat:
|
|
||||||
return False
|
|
||||||
|
|
||||||
cached_mtime = cached_stat.get("mtime_ns")
|
|
||||||
cached_inode = cached_stat.get("inode")
|
|
||||||
current_mtime = current_stat.get("mtime_ns")
|
|
||||||
current_inode = current_stat.get("inode")
|
|
||||||
|
|
||||||
if cached_inode != current_inode:
|
|
||||||
return False
|
|
||||||
|
|
||||||
if cached_mtime != current_mtime:
|
|
||||||
cached_noise = cached_stat.get("noise_mtime_ns")
|
|
||||||
current_noise = current_stat.get("noise_mtime_ns")
|
|
||||||
if not (
|
|
||||||
cached_noise
|
|
||||||
and current_noise
|
|
||||||
and cached_mtime == cached_noise
|
|
||||||
and current_mtime == current_noise
|
|
||||||
):
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
normalized_mappings: Dict[str, str] = {}
|
normalized_mappings: Dict[str, str] = {}
|
||||||
@@ -341,6 +312,7 @@ class Config:
|
|||||||
normalized_mappings[self._normalize_path(target)] = self._normalize_path(link)
|
normalized_mappings[self._normalize_path(target)] = self._normalize_path(link)
|
||||||
|
|
||||||
self._path_mappings = normalized_mappings
|
self._path_mappings = normalized_mappings
|
||||||
|
logger.info("Symlink cache loaded with %d mappings", len(self._path_mappings))
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _save_symlink_cache(self) -> None:
|
def _save_symlink_cache(self) -> None:
|
||||||
@@ -353,22 +325,75 @@ class Config:
|
|||||||
try:
|
try:
|
||||||
with cache_path.open("w", encoding="utf-8") as handle:
|
with cache_path.open("w", encoding="utf-8") as handle:
|
||||||
json.dump(payload, handle, ensure_ascii=False, indent=2)
|
json.dump(payload, handle, ensure_ascii=False, indent=2)
|
||||||
|
logger.info("Symlink cache saved to %s with %d mappings", cache_path, len(self._path_mappings))
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
logger.debug("Failed to write symlink cache %s: %s", cache_path, exc)
|
logger.info("Failed to write symlink cache %s: %s", cache_path, exc)
|
||||||
|
|
||||||
def _initialize_symlink_mappings(self) -> None:
|
def _initialize_symlink_mappings(self) -> None:
|
||||||
if not self._load_symlink_cache():
|
start = time.perf_counter()
|
||||||
|
cache_loaded = self._load_symlink_cache()
|
||||||
|
|
||||||
|
if cache_loaded:
|
||||||
|
logger.info(
|
||||||
|
"Symlink mappings restored from cache in %.2f ms",
|
||||||
|
(time.perf_counter() - start) * 1000,
|
||||||
|
)
|
||||||
|
self._rebuild_preview_roots()
|
||||||
|
self._schedule_symlink_rescan()
|
||||||
|
return
|
||||||
|
|
||||||
self._scan_symbolic_links()
|
self._scan_symbolic_links()
|
||||||
self._save_symlink_cache()
|
self._save_symlink_cache()
|
||||||
else:
|
|
||||||
logger.info("Loaded symlink mappings from cache")
|
|
||||||
self._rebuild_preview_roots()
|
self._rebuild_preview_roots()
|
||||||
|
logger.info(
|
||||||
|
"Symlink mappings rebuilt and cached in %.2f ms",
|
||||||
|
(time.perf_counter() - start) * 1000,
|
||||||
|
)
|
||||||
|
|
||||||
def _scan_symbolic_links(self):
|
def _scan_symbolic_links(self):
|
||||||
"""Scan all symbolic links in LoRA, Checkpoint, and Embedding root directories"""
|
"""Scan all symbolic links in LoRA, Checkpoint, and Embedding root directories"""
|
||||||
|
start = time.perf_counter()
|
||||||
|
# Reset mappings before rescanning to avoid stale entries
|
||||||
|
self._path_mappings.clear()
|
||||||
|
self._seed_root_symlink_mappings()
|
||||||
visited_dirs: Set[str] = set()
|
visited_dirs: Set[str] = set()
|
||||||
for root in self._symlink_roots():
|
for root in self._symlink_roots():
|
||||||
self._scan_directory_links(root, visited_dirs)
|
self._scan_directory_links(root, visited_dirs)
|
||||||
|
logger.info(
|
||||||
|
"Symlink scan finished in %.2f ms with %d mappings",
|
||||||
|
(time.perf_counter() - start) * 1000,
|
||||||
|
len(self._path_mappings),
|
||||||
|
)
|
||||||
|
|
||||||
|
def _schedule_symlink_rescan(self) -> None:
|
||||||
|
"""Trigger a best-effort background rescan to refresh stale caches."""
|
||||||
|
|
||||||
|
if self._rescan_thread and self._rescan_thread.is_alive():
|
||||||
|
return
|
||||||
|
|
||||||
|
def worker():
|
||||||
|
try:
|
||||||
|
self._scan_symbolic_links()
|
||||||
|
self._save_symlink_cache()
|
||||||
|
self._rebuild_preview_roots()
|
||||||
|
logger.info("Background symlink rescan completed")
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging
|
||||||
|
logger.info("Background symlink rescan failed: %s", exc)
|
||||||
|
|
||||||
|
thread = threading.Thread(
|
||||||
|
target=worker,
|
||||||
|
name="lora-manager-symlink-rescan",
|
||||||
|
daemon=True,
|
||||||
|
)
|
||||||
|
self._rescan_thread = thread
|
||||||
|
thread.start()
|
||||||
|
|
||||||
|
def _wait_for_rescan(self, timeout: Optional[float] = None) -> None:
|
||||||
|
"""Block until the background rescan completes (testing convenience)."""
|
||||||
|
|
||||||
|
thread = self._rescan_thread
|
||||||
|
if thread:
|
||||||
|
thread.join(timeout=timeout)
|
||||||
|
|
||||||
def _scan_directory_links(self, root: str, visited_dirs: Set[str]):
|
def _scan_directory_links(self, root: str, visited_dirs: Set[str]):
|
||||||
"""Iteratively scan directory symlinks to avoid deep recursion."""
|
"""Iteratively scan directory symlinks to avoid deep recursion."""
|
||||||
@@ -434,6 +459,22 @@ class Config:
|
|||||||
self._preview_root_paths.update(self._expand_preview_root(normalized_target))
|
self._preview_root_paths.update(self._expand_preview_root(normalized_target))
|
||||||
self._preview_root_paths.update(self._expand_preview_root(normalized_link))
|
self._preview_root_paths.update(self._expand_preview_root(normalized_link))
|
||||||
|
|
||||||
|
def _seed_root_symlink_mappings(self) -> None:
|
||||||
|
"""Ensure symlinked root folders are recorded before deep scanning."""
|
||||||
|
|
||||||
|
for root in self._symlink_roots():
|
||||||
|
if not root:
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
if not self._is_link(root):
|
||||||
|
continue
|
||||||
|
target_path = os.path.realpath(root)
|
||||||
|
if not os.path.isdir(target_path):
|
||||||
|
continue
|
||||||
|
self.add_path_mapping(root, target_path)
|
||||||
|
except Exception as exc:
|
||||||
|
logger.debug("Skipping root symlink %s: %s", root, exc)
|
||||||
|
|
||||||
def _expand_preview_root(self, path: str) -> Set[Path]:
|
def _expand_preview_root(self, path: str) -> Set[Path]:
|
||||||
"""Return normalized ``Path`` objects representing a preview root."""
|
"""Return normalized ``Path`` objects representing a preview root."""
|
||||||
|
|
||||||
|
|||||||
@@ -40,7 +40,38 @@ class MetadataProcessor:
|
|||||||
candidate_samplers[node_id] = metadata[SAMPLING][node_id]
|
candidate_samplers[node_id] = metadata[SAMPLING][node_id]
|
||||||
|
|
||||||
# If we found candidate samplers, apply primary sampler logic to these candidates only
|
# If we found candidate samplers, apply primary sampler logic to these candidates only
|
||||||
if candidate_samplers:
|
|
||||||
|
# PRE-PROCESS: Ensure all candidate samplers have their parameters populated
|
||||||
|
# This is especially important for SamplerCustomAdvanced which needs tracing
|
||||||
|
prompt = metadata.get("current_prompt")
|
||||||
|
for node_id in candidate_samplers:
|
||||||
|
# If a sampler is missing common parameters like steps or denoise,
|
||||||
|
# try to populate them using tracing before ranking
|
||||||
|
sampler_info = candidate_samplers[node_id]
|
||||||
|
params = sampler_info.get("parameters", {})
|
||||||
|
|
||||||
|
if prompt and (params.get("steps") is None or params.get("denoise") is None):
|
||||||
|
# Create a temporary params dict to use the handler
|
||||||
|
temp_params = {
|
||||||
|
"steps": params.get("steps"),
|
||||||
|
"denoise": params.get("denoise"),
|
||||||
|
"sampler": params.get("sampler_name"),
|
||||||
|
"scheduler": params.get("scheduler")
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check if it's SamplerCustomAdvanced
|
||||||
|
if prompt.original_prompt and node_id in prompt.original_prompt:
|
||||||
|
if prompt.original_prompt[node_id].get("class_type") == "SamplerCustomAdvanced":
|
||||||
|
MetadataProcessor.handle_custom_advanced_sampler(metadata, prompt, node_id, temp_params)
|
||||||
|
|
||||||
|
# Update the actual parameters with found values
|
||||||
|
params["steps"] = temp_params.get("steps")
|
||||||
|
params["denoise"] = temp_params.get("denoise")
|
||||||
|
if temp_params.get("sampler"):
|
||||||
|
params["sampler_name"] = temp_params.get("sampler")
|
||||||
|
if temp_params.get("scheduler"):
|
||||||
|
params["scheduler"] = temp_params.get("scheduler")
|
||||||
|
|
||||||
# Collect potential primary samplers based on different criteria
|
# Collect potential primary samplers based on different criteria
|
||||||
custom_advanced_samplers = []
|
custom_advanced_samplers = []
|
||||||
advanced_add_noise_samplers = []
|
advanced_add_noise_samplers = []
|
||||||
@@ -49,7 +80,6 @@ class MetadataProcessor:
|
|||||||
high_denoise_id = None
|
high_denoise_id = None
|
||||||
|
|
||||||
# First, check for SamplerCustomAdvanced among candidates
|
# First, check for SamplerCustomAdvanced among candidates
|
||||||
prompt = metadata.get("current_prompt")
|
|
||||||
if prompt and prompt.original_prompt:
|
if prompt and prompt.original_prompt:
|
||||||
for node_id in candidate_samplers:
|
for node_id in candidate_samplers:
|
||||||
node_info = prompt.original_prompt.get(node_id, {})
|
node_info = prompt.original_prompt.get(node_id, {})
|
||||||
@@ -77,15 +107,16 @@ class MetadataProcessor:
|
|||||||
# Combine all potential primary samplers
|
# Combine all potential primary samplers
|
||||||
potential_samplers = custom_advanced_samplers + advanced_add_noise_samplers + high_denoise_samplers
|
potential_samplers = custom_advanced_samplers + advanced_add_noise_samplers + high_denoise_samplers
|
||||||
|
|
||||||
# Find the most recent potential primary sampler (closest to downstream node)
|
# Find the first potential primary sampler (prefer base sampler over refine)
|
||||||
for i in range(downstream_index - 1, -1, -1):
|
# Use forward search to prioritize the first one in execution order
|
||||||
|
for i in range(downstream_index):
|
||||||
node_id = execution_order[i]
|
node_id = execution_order[i]
|
||||||
if node_id in potential_samplers:
|
if node_id in potential_samplers:
|
||||||
return node_id, candidate_samplers[node_id]
|
return node_id, candidate_samplers[node_id]
|
||||||
|
|
||||||
# If no potential sampler found from our criteria, return the most recent sampler
|
# If no potential sampler found from our criteria, return the first sampler
|
||||||
if candidate_samplers:
|
if candidate_samplers:
|
||||||
for i in range(downstream_index - 1, -1, -1):
|
for i in range(downstream_index):
|
||||||
node_id = execution_order[i]
|
node_id = execution_order[i]
|
||||||
if node_id in candidate_samplers:
|
if node_id in candidate_samplers:
|
||||||
return node_id, candidate_samplers[node_id]
|
return node_id, candidate_samplers[node_id]
|
||||||
@@ -176,7 +207,10 @@ class MetadataProcessor:
|
|||||||
found_node_id = input_value[0] # Connected node_id
|
found_node_id = input_value[0] # Connected node_id
|
||||||
|
|
||||||
# If we're looking for a specific node class
|
# If we're looking for a specific node class
|
||||||
if target_class and prompt.original_prompt[found_node_id].get("class_type") == target_class:
|
if target_class:
|
||||||
|
if found_node_id not in prompt.original_prompt:
|
||||||
|
return None
|
||||||
|
if prompt.original_prompt[found_node_id].get("class_type") == target_class:
|
||||||
return found_node_id
|
return found_node_id
|
||||||
|
|
||||||
# If we're not looking for a specific class, update the last valid node
|
# If we're not looking for a specific class, update the last valid node
|
||||||
@@ -185,11 +219,19 @@ class MetadataProcessor:
|
|||||||
|
|
||||||
# Continue tracing through intermediate nodes
|
# Continue tracing through intermediate nodes
|
||||||
current_node_id = found_node_id
|
current_node_id = found_node_id
|
||||||
# For most conditioning nodes, the input we want to follow is named "conditioning"
|
|
||||||
if "conditioning" in prompt.original_prompt[current_node_id].get("inputs", {}):
|
# Check if current source node exists
|
||||||
|
if current_node_id not in prompt.original_prompt:
|
||||||
|
return found_node_id if not target_class else None
|
||||||
|
|
||||||
|
# Determine which input to follow next on the source node
|
||||||
|
source_node_inputs = prompt.original_prompt[current_node_id].get("inputs", {})
|
||||||
|
if input_name in source_node_inputs:
|
||||||
|
current_input = input_name
|
||||||
|
elif "conditioning" in source_node_inputs:
|
||||||
current_input = "conditioning"
|
current_input = "conditioning"
|
||||||
else:
|
else:
|
||||||
# If there's no "conditioning" input, return the current node
|
# If there's no suitable input to follow, return the current node
|
||||||
# if we're not looking for a specific target_class
|
# if we're not looking for a specific target_class
|
||||||
return found_node_id if not target_class else None
|
return found_node_id if not target_class else None
|
||||||
else:
|
else:
|
||||||
@@ -202,12 +244,89 @@ class MetadataProcessor:
|
|||||||
return last_valid_node if not target_class else None
|
return last_valid_node if not target_class else None
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def find_primary_checkpoint(metadata):
|
def trace_model_path(metadata, prompt, start_node_id):
|
||||||
"""Find the primary checkpoint model in the workflow"""
|
"""
|
||||||
|
Trace the model connection path upstream to find the checkpoint
|
||||||
|
"""
|
||||||
|
if not prompt or not prompt.original_prompt:
|
||||||
|
return None
|
||||||
|
|
||||||
|
current_node_id = start_node_id
|
||||||
|
depth = 0
|
||||||
|
max_depth = 50
|
||||||
|
|
||||||
|
while depth < max_depth:
|
||||||
|
# Check if current node is a registered checkpoint in our metadata
|
||||||
|
# This handles cached nodes correctly because metadata contains info for all nodes in the graph
|
||||||
|
if current_node_id in metadata.get(MODELS, {}):
|
||||||
|
if metadata[MODELS][current_node_id].get("type") == "checkpoint":
|
||||||
|
return current_node_id
|
||||||
|
|
||||||
|
if current_node_id not in prompt.original_prompt:
|
||||||
|
return None
|
||||||
|
|
||||||
|
node = prompt.original_prompt[current_node_id]
|
||||||
|
inputs = node.get("inputs", {})
|
||||||
|
class_type = node.get("class_type", "")
|
||||||
|
|
||||||
|
# Determine which input to follow next
|
||||||
|
next_input_name = "model"
|
||||||
|
|
||||||
|
# Special handling for initial node
|
||||||
|
if depth == 0:
|
||||||
|
if class_type == "SamplerCustomAdvanced":
|
||||||
|
next_input_name = "guider"
|
||||||
|
|
||||||
|
# If the specific input doesn't exist, try generic 'model'
|
||||||
|
if next_input_name not in inputs:
|
||||||
|
if "model" in inputs:
|
||||||
|
next_input_name = "model"
|
||||||
|
elif "basic_pipe" in inputs:
|
||||||
|
# Handle pipe nodes like FromBasicPipe by following the pipeline
|
||||||
|
next_input_name = "basic_pipe"
|
||||||
|
else:
|
||||||
|
# Dead end - no model input to follow
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Get connected node
|
||||||
|
input_val = inputs[next_input_name]
|
||||||
|
if isinstance(input_val, list) and len(input_val) > 0:
|
||||||
|
current_node_id = input_val[0]
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
depth += 1
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def find_primary_checkpoint(metadata, downstream_id=None, primary_sampler_id=None):
|
||||||
|
"""
|
||||||
|
Find the primary checkpoint model in the workflow
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
- metadata: The workflow metadata
|
||||||
|
- downstream_id: Optional ID of a downstream node to help identify the specific primary sampler
|
||||||
|
- primary_sampler_id: Optional ID of the primary sampler if already known
|
||||||
|
"""
|
||||||
if not metadata.get(MODELS):
|
if not metadata.get(MODELS):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# In most workflows, there's only one checkpoint, so we can just take the first one
|
# Method 1: Topology-based tracing (More accurate for complex workflows)
|
||||||
|
# First, find the primary sampler if not provided
|
||||||
|
if not primary_sampler_id:
|
||||||
|
primary_sampler_id, _ = MetadataProcessor.find_primary_sampler(metadata, downstream_id)
|
||||||
|
|
||||||
|
if primary_sampler_id:
|
||||||
|
prompt = metadata.get("current_prompt")
|
||||||
|
if prompt:
|
||||||
|
# Trace back from the sampler to find the checkpoint
|
||||||
|
checkpoint_id = MetadataProcessor.trace_model_path(metadata, prompt, primary_sampler_id)
|
||||||
|
if checkpoint_id and checkpoint_id in metadata.get(MODELS, {}):
|
||||||
|
return metadata[MODELS][checkpoint_id].get("name")
|
||||||
|
|
||||||
|
# Method 2: Fallback to the first available checkpoint (Original behavior)
|
||||||
|
# In most simple workflows, there's only one checkpoint, so we can just take the first one
|
||||||
for node_id, model_info in metadata.get(MODELS, {}).items():
|
for node_id, model_info in metadata.get(MODELS, {}).items():
|
||||||
if model_info.get("type") == "checkpoint":
|
if model_info.get("type") == "checkpoint":
|
||||||
return model_info.get("name")
|
return model_info.get("name")
|
||||||
@@ -311,7 +430,8 @@ class MetadataProcessor:
|
|||||||
primary_sampler_id, primary_sampler = MetadataProcessor.find_primary_sampler(metadata, id)
|
primary_sampler_id, primary_sampler = MetadataProcessor.find_primary_sampler(metadata, id)
|
||||||
|
|
||||||
# Directly get checkpoint from metadata instead of tracing
|
# Directly get checkpoint from metadata instead of tracing
|
||||||
checkpoint = MetadataProcessor.find_primary_checkpoint(metadata)
|
# Pass primary_sampler_id to avoid redundant calculation
|
||||||
|
checkpoint = MetadataProcessor.find_primary_checkpoint(metadata, id, primary_sampler_id)
|
||||||
if checkpoint:
|
if checkpoint:
|
||||||
params["checkpoint"] = checkpoint
|
params["checkpoint"] = checkpoint
|
||||||
|
|
||||||
@@ -445,6 +565,7 @@ class MetadataProcessor:
|
|||||||
scheduler_params = metadata[SAMPLING][scheduler_node_id].get("parameters", {})
|
scheduler_params = metadata[SAMPLING][scheduler_node_id].get("parameters", {})
|
||||||
params["steps"] = scheduler_params.get("steps")
|
params["steps"] = scheduler_params.get("steps")
|
||||||
params["scheduler"] = scheduler_params.get("scheduler")
|
params["scheduler"] = scheduler_params.get("scheduler")
|
||||||
|
params["denoise"] = scheduler_params.get("denoise")
|
||||||
|
|
||||||
# 2. Trace sampler input to find KSamplerSelect (only if sampler input exists)
|
# 2. Trace sampler input to find KSamplerSelect (only if sampler input exists)
|
||||||
if "sampler" in sampler_inputs:
|
if "sampler" in sampler_inputs:
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ from ..metadata_collector import get_metadata
|
|||||||
from PIL import Image, PngImagePlugin
|
from PIL import Image, PngImagePlugin
|
||||||
import piexif
|
import piexif
|
||||||
|
|
||||||
class SaveImage:
|
class SaveImageLM:
|
||||||
NAME = "Save Image (LoraManager)"
|
NAME = "Save Image (LoraManager)"
|
||||||
CATEGORY = "Lora Manager/utils"
|
CATEGORY = "Lora Manager/utils"
|
||||||
DESCRIPTION = "Save images with embedded generation metadata in compatible format"
|
DESCRIPTION = "Save images with embedded generation metadata in compatible format"
|
||||||
|
|||||||
@@ -103,7 +103,7 @@ def nunchaku_load_lora(model, lora_name, lora_strength):
|
|||||||
|
|
||||||
# Save the transformer temporarily
|
# Save the transformer temporarily
|
||||||
model_wrapper.model = None
|
model_wrapper.model = None
|
||||||
ret_model = copy.deepcopy(model) # copy everything except the model
|
ret_model = model.clone()
|
||||||
ret_model_wrapper = ret_model.model.diffusion_model
|
ret_model_wrapper = ret_model.model.diffusion_model
|
||||||
|
|
||||||
# Restore the model and set it for the copy
|
# Restore the model and set it for the copy
|
||||||
|
|||||||
@@ -37,7 +37,8 @@ class RecipeMetadataParser(ABC):
|
|||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
async def populate_lora_from_civitai(self, lora_entry: Dict[str, Any], civitai_info_tuple: Tuple[Dict[str, Any], Optional[str]],
|
@staticmethod
|
||||||
|
async def populate_lora_from_civitai(lora_entry: Dict[str, Any], civitai_info_tuple: Tuple[Dict[str, Any], Optional[str]],
|
||||||
recipe_scanner=None, base_model_counts=None, hash_value=None) -> Optional[Dict[str, Any]]:
|
recipe_scanner=None, base_model_counts=None, hash_value=None) -> Optional[Dict[str, Any]]:
|
||||||
"""
|
"""
|
||||||
Populate a lora entry with information from Civitai API response
|
Populate a lora entry with information from Civitai API response
|
||||||
@@ -149,7 +150,8 @@ class RecipeMetadataParser(ABC):
|
|||||||
|
|
||||||
return lora_entry
|
return lora_entry
|
||||||
|
|
||||||
async def populate_checkpoint_from_civitai(self, checkpoint: Dict[str, Any], civitai_info: Dict[str, Any]) -> Dict[str, Any]:
|
@staticmethod
|
||||||
|
async def populate_checkpoint_from_civitai(checkpoint: Dict[str, Any], civitai_info: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
Populate checkpoint information from Civitai API response
|
Populate checkpoint information from Civitai API response
|
||||||
|
|
||||||
@@ -187,6 +189,7 @@ class RecipeMetadataParser(ABC):
|
|||||||
checkpoint['downloadUrl'] = civitai_data.get('downloadUrl', '')
|
checkpoint['downloadUrl'] = civitai_data.get('downloadUrl', '')
|
||||||
|
|
||||||
checkpoint['modelId'] = civitai_data.get('modelId', checkpoint.get('modelId', 0))
|
checkpoint['modelId'] = civitai_data.get('modelId', checkpoint.get('modelId', 0))
|
||||||
|
checkpoint['id'] = civitai_data.get('id', 0)
|
||||||
|
|
||||||
if 'files' in civitai_data:
|
if 'files' in civitai_data:
|
||||||
model_file = next(
|
model_file = next(
|
||||||
|
|||||||
216
py/recipes/enrichment.py
Normal file
216
py/recipes/enrichment.py
Normal file
@@ -0,0 +1,216 @@
|
|||||||
|
import logging
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
import os
|
||||||
|
from typing import Any, Dict, Optional
|
||||||
|
from .merger import GenParamsMerger
|
||||||
|
from .base import RecipeMetadataParser
|
||||||
|
from ..services.metadata_service import get_default_metadata_provider
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
class RecipeEnricher:
|
||||||
|
"""Service to enrich recipe metadata from multiple sources (Civitai, Embedded, User)."""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def enrich_recipe(
|
||||||
|
recipe: Dict[str, Any],
|
||||||
|
civitai_client: Any,
|
||||||
|
request_params: Optional[Dict[str, Any]] = None
|
||||||
|
) -> bool:
|
||||||
|
"""
|
||||||
|
Enrich a recipe dictionary in-place with metadata from Civitai and embedded params.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
recipe: The recipe dictionary to enrich. Must have 'gen_params' initialized.
|
||||||
|
civitai_client: Authenticated Civitai client instance.
|
||||||
|
request_params: (Optional) Parameters from a user request (e.g. import).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if the recipe was modified, False otherwise.
|
||||||
|
"""
|
||||||
|
updated = False
|
||||||
|
gen_params = recipe.get("gen_params", {})
|
||||||
|
|
||||||
|
# 1. Fetch Civitai Info if available
|
||||||
|
civitai_meta = None
|
||||||
|
model_version_id = None
|
||||||
|
|
||||||
|
source_url = recipe.get("source_url") or recipe.get("source_path", "")
|
||||||
|
|
||||||
|
# Check if it's a Civitai image URL
|
||||||
|
image_id_match = re.search(r'civitai\.com/images/(\d+)', str(source_url))
|
||||||
|
if image_id_match:
|
||||||
|
image_id = image_id_match.group(1)
|
||||||
|
try:
|
||||||
|
image_info = await civitai_client.get_image_info(image_id)
|
||||||
|
if image_info:
|
||||||
|
# Handle nested meta often found in Civitai API responses
|
||||||
|
raw_meta = image_info.get("meta")
|
||||||
|
if isinstance(raw_meta, dict):
|
||||||
|
if "meta" in raw_meta and isinstance(raw_meta["meta"], dict):
|
||||||
|
civitai_meta = raw_meta["meta"]
|
||||||
|
else:
|
||||||
|
civitai_meta = raw_meta
|
||||||
|
|
||||||
|
model_version_id = image_info.get("modelVersionId")
|
||||||
|
|
||||||
|
# If not at top level, check resources in meta
|
||||||
|
if not model_version_id and civitai_meta:
|
||||||
|
resources = civitai_meta.get("civitaiResources", [])
|
||||||
|
for res in resources:
|
||||||
|
if res.get("type") == "checkpoint":
|
||||||
|
model_version_id = res.get("modelVersionId")
|
||||||
|
break
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Failed to fetch Civitai image info: {e}")
|
||||||
|
|
||||||
|
# 2. Merge Parameters
|
||||||
|
# Priority: request_params > civitai_meta > embedded (existing gen_params)
|
||||||
|
new_gen_params = GenParamsMerger.merge(
|
||||||
|
request_params=request_params,
|
||||||
|
civitai_meta=civitai_meta,
|
||||||
|
embedded_metadata=gen_params
|
||||||
|
)
|
||||||
|
|
||||||
|
if new_gen_params != gen_params:
|
||||||
|
recipe["gen_params"] = new_gen_params
|
||||||
|
updated = True
|
||||||
|
|
||||||
|
# 3. Checkpoint Enrichment
|
||||||
|
# If we have a checkpoint entry, or we can find one
|
||||||
|
# Use 'id' (from Civitai version) as a marker that it's been enriched
|
||||||
|
checkpoint_entry = recipe.get("checkpoint")
|
||||||
|
has_full_checkpoint = checkpoint_entry and checkpoint_entry.get("name") and checkpoint_entry.get("id")
|
||||||
|
|
||||||
|
if not has_full_checkpoint:
|
||||||
|
# Helper to look up values in priority order
|
||||||
|
def start_lookup(keys):
|
||||||
|
for source in [request_params, civitai_meta, gen_params]:
|
||||||
|
if source:
|
||||||
|
if isinstance(keys, list):
|
||||||
|
for k in keys:
|
||||||
|
if k in source: return source[k]
|
||||||
|
else:
|
||||||
|
if keys in source: return source[keys]
|
||||||
|
return None
|
||||||
|
|
||||||
|
target_version_id = model_version_id or start_lookup("modelVersionId")
|
||||||
|
|
||||||
|
# Also check existing checkpoint entry
|
||||||
|
if not target_version_id and checkpoint_entry:
|
||||||
|
target_version_id = checkpoint_entry.get("modelVersionId") or checkpoint_entry.get("id")
|
||||||
|
|
||||||
|
# Check for version ID in resources (which might be a string in gen_params)
|
||||||
|
if not target_version_id:
|
||||||
|
# Look in all sources for "Civitai resources"
|
||||||
|
resources_val = start_lookup(["Civitai resources", "civitai_resources", "resources"])
|
||||||
|
if resources_val:
|
||||||
|
target_version_id = RecipeEnricher._extract_version_id_from_resources({"Civitai resources": resources_val})
|
||||||
|
|
||||||
|
target_hash = start_lookup(["Model hash", "checkpoint_hash", "hashes"])
|
||||||
|
if not target_hash and checkpoint_entry:
|
||||||
|
target_hash = checkpoint_entry.get("hash") or checkpoint_entry.get("model_hash")
|
||||||
|
|
||||||
|
# Look for 'Model' which sometimes is the hash or name
|
||||||
|
model_val = start_lookup("Model")
|
||||||
|
|
||||||
|
# Look for Checkpoint name fallback
|
||||||
|
checkpoint_val = checkpoint_entry.get("name") if checkpoint_entry else None
|
||||||
|
if not checkpoint_val:
|
||||||
|
checkpoint_val = start_lookup(["Checkpoint", "checkpoint"])
|
||||||
|
|
||||||
|
checkpoint_updated = await RecipeEnricher._resolve_and_populate_checkpoint(
|
||||||
|
recipe, target_version_id, target_hash, model_val, checkpoint_val
|
||||||
|
)
|
||||||
|
if checkpoint_updated:
|
||||||
|
updated = True
|
||||||
|
else:
|
||||||
|
# Checkpoint exists, no need to sync to gen_params anymore.
|
||||||
|
pass
|
||||||
|
# base_model resolution moved to _resolve_and_populate_checkpoint to support strict formatting
|
||||||
|
return updated
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _extract_version_id_from_resources(gen_params: Dict[str, Any]) -> Optional[Any]:
|
||||||
|
"""Try to find modelVersionId in Civitai resources parameter."""
|
||||||
|
civitai_resources_raw = gen_params.get("Civitai resources")
|
||||||
|
if not civitai_resources_raw:
|
||||||
|
return None
|
||||||
|
|
||||||
|
resources_list = None
|
||||||
|
if isinstance(civitai_resources_raw, str):
|
||||||
|
try:
|
||||||
|
resources_list = json.loads(civitai_resources_raw)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
elif isinstance(civitai_resources_raw, list):
|
||||||
|
resources_list = civitai_resources_raw
|
||||||
|
|
||||||
|
if isinstance(resources_list, list):
|
||||||
|
for res in resources_list:
|
||||||
|
if res.get("type") == "checkpoint":
|
||||||
|
return res.get("modelVersionId")
|
||||||
|
return None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def _resolve_and_populate_checkpoint(
|
||||||
|
recipe: Dict[str, Any],
|
||||||
|
target_version_id: Optional[Any],
|
||||||
|
target_hash: Optional[str],
|
||||||
|
model_val: Optional[str],
|
||||||
|
checkpoint_val: Optional[str]
|
||||||
|
) -> bool:
|
||||||
|
"""Find checkpoint metadata and populate it in the recipe."""
|
||||||
|
metadata_provider = await get_default_metadata_provider()
|
||||||
|
civitai_info = None
|
||||||
|
|
||||||
|
if target_version_id:
|
||||||
|
civitai_info = await metadata_provider.get_model_version_info(str(target_version_id))
|
||||||
|
elif target_hash:
|
||||||
|
civitai_info = await metadata_provider.get_model_by_hash(target_hash)
|
||||||
|
else:
|
||||||
|
# Look for 'Model' which sometimes is the hash or name
|
||||||
|
if model_val and len(model_val) == 10: # Likely a short hash
|
||||||
|
civitai_info = await metadata_provider.get_model_by_hash(model_val)
|
||||||
|
|
||||||
|
if civitai_info and not (isinstance(civitai_info, tuple) and civitai_info[1] == "Model not found"):
|
||||||
|
# If we already have a partial checkpoint, use it as base
|
||||||
|
existing_cp = recipe.get("checkpoint")
|
||||||
|
if existing_cp is None:
|
||||||
|
existing_cp = {}
|
||||||
|
checkpoint_data = await RecipeMetadataParser.populate_checkpoint_from_civitai(existing_cp, civitai_info)
|
||||||
|
# 1. First, resolve base_model using full data before we format it away
|
||||||
|
current_base_model = recipe.get("base_model")
|
||||||
|
resolved_base_model = checkpoint_data.get("baseModel")
|
||||||
|
if resolved_base_model:
|
||||||
|
# Update if empty OR if it matches our generic prefix but is less specific
|
||||||
|
is_generic = not current_base_model or current_base_model.lower() in ["flux", "sdxl", "sd15"]
|
||||||
|
if is_generic and resolved_base_model != current_base_model:
|
||||||
|
recipe["base_model"] = resolved_base_model
|
||||||
|
|
||||||
|
# 2. Format according to requirements: type, modelId, modelVersionId, modelName, modelVersionName
|
||||||
|
formatted_checkpoint = {
|
||||||
|
"type": "checkpoint",
|
||||||
|
"modelId": checkpoint_data.get("modelId"),
|
||||||
|
"modelVersionId": checkpoint_data.get("id") or checkpoint_data.get("modelVersionId"),
|
||||||
|
"modelName": checkpoint_data.get("name"), # In base.py, 'name' is populated from civitai_data['model']['name']
|
||||||
|
"modelVersionName": checkpoint_data.get("version") # In base.py, 'version' is populated from civitai_data['name']
|
||||||
|
}
|
||||||
|
# Remove None values
|
||||||
|
recipe["checkpoint"] = {k: v for k, v in formatted_checkpoint.items() if v is not None}
|
||||||
|
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
# Fallback to name extraction if we don't already have one
|
||||||
|
existing_cp = recipe.get("checkpoint")
|
||||||
|
if not existing_cp or not existing_cp.get("modelName"):
|
||||||
|
cp_name = checkpoint_val
|
||||||
|
if cp_name:
|
||||||
|
recipe["checkpoint"] = {
|
||||||
|
"type": "checkpoint",
|
||||||
|
"modelName": cp_name
|
||||||
|
}
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
98
py/recipes/merger.py
Normal file
98
py/recipes/merger.py
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
from typing import Any, Dict, Optional
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
class GenParamsMerger:
|
||||||
|
"""Utility to merge generation parameters from multiple sources with priority."""
|
||||||
|
|
||||||
|
BLACKLISTED_KEYS = {
|
||||||
|
"id", "url", "userId", "username", "createdAt", "updatedAt", "hash", "meta",
|
||||||
|
"draft", "extra", "width", "height", "process", "quantity", "workflow",
|
||||||
|
"baseModel", "resources", "disablePoi", "aspectRatio", "Created Date",
|
||||||
|
"experimental", "civitaiResources", "civitai_resources", "Civitai resources",
|
||||||
|
"modelVersionId", "modelId", "hashes", "Model", "Model hash", "checkpoint_hash",
|
||||||
|
"checkpoint", "checksum", "model_checksum"
|
||||||
|
}
|
||||||
|
|
||||||
|
NORMALIZATION_MAPPING = {
|
||||||
|
# Civitai specific
|
||||||
|
"cfgScale": "cfg_scale",
|
||||||
|
"clipSkip": "clip_skip",
|
||||||
|
"negativePrompt": "negative_prompt",
|
||||||
|
# Case variations
|
||||||
|
"Sampler": "sampler",
|
||||||
|
"Steps": "steps",
|
||||||
|
"Seed": "seed",
|
||||||
|
"Size": "size",
|
||||||
|
"Prompt": "prompt",
|
||||||
|
"Negative prompt": "negative_prompt",
|
||||||
|
"Cfg scale": "cfg_scale",
|
||||||
|
"Clip skip": "clip_skip",
|
||||||
|
"Denoising strength": "denoising_strength",
|
||||||
|
}
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def merge(
|
||||||
|
request_params: Optional[Dict[str, Any]] = None,
|
||||||
|
civitai_meta: Optional[Dict[str, Any]] = None,
|
||||||
|
embedded_metadata: Optional[Dict[str, Any]] = None
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Merge generation parameters from three sources.
|
||||||
|
|
||||||
|
Priority: request_params > civitai_meta > embedded_metadata
|
||||||
|
|
||||||
|
Args:
|
||||||
|
request_params: Params provided directly in the import request
|
||||||
|
civitai_meta: Params from Civitai Image API 'meta' field
|
||||||
|
embedded_metadata: Params extracted from image EXIF/embedded metadata
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Merged parameters dictionary
|
||||||
|
"""
|
||||||
|
result = {}
|
||||||
|
|
||||||
|
# 1. Start with embedded metadata (lowest priority)
|
||||||
|
if embedded_metadata:
|
||||||
|
# If it's a full recipe metadata, we use its gen_params
|
||||||
|
if "gen_params" in embedded_metadata and isinstance(embedded_metadata["gen_params"], dict):
|
||||||
|
GenParamsMerger._update_normalized(result, embedded_metadata["gen_params"])
|
||||||
|
else:
|
||||||
|
# Otherwise assume the dict itself contains gen_params
|
||||||
|
GenParamsMerger._update_normalized(result, embedded_metadata)
|
||||||
|
|
||||||
|
# 2. Layer Civitai meta (medium priority)
|
||||||
|
if civitai_meta:
|
||||||
|
GenParamsMerger._update_normalized(result, civitai_meta)
|
||||||
|
|
||||||
|
# 3. Layer request params (highest priority)
|
||||||
|
if request_params:
|
||||||
|
GenParamsMerger._update_normalized(result, request_params)
|
||||||
|
|
||||||
|
# Filter out blacklisted keys and also the original camelCase keys if they were normalized
|
||||||
|
final_result = {}
|
||||||
|
for k, v in result.items():
|
||||||
|
if k in GenParamsMerger.BLACKLISTED_KEYS:
|
||||||
|
continue
|
||||||
|
if k in GenParamsMerger.NORMALIZATION_MAPPING:
|
||||||
|
continue
|
||||||
|
final_result[k] = v
|
||||||
|
|
||||||
|
return final_result
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _update_normalized(target: Dict[str, Any], source: Dict[str, Any]) -> None:
|
||||||
|
"""Update target dict with normalized keys from source."""
|
||||||
|
for k, v in source.items():
|
||||||
|
normalized_key = GenParamsMerger.NORMALIZATION_MAPPING.get(k, k)
|
||||||
|
target[normalized_key] = v
|
||||||
|
# Also keep the original key for now if it's not the same,
|
||||||
|
# so we can filter at the end or avoid losing it if it wasn't supposed to be renamed?
|
||||||
|
# Actually, if we rename it, we should probably NOT keep both in 'target'
|
||||||
|
# because we want to filter them out at the end anyway.
|
||||||
|
if normalized_key != k:
|
||||||
|
# If we are overwriting an existing snake_case key with a camelCase one's value,
|
||||||
|
# that's fine because of the priority order of calls to _update_normalized.
|
||||||
|
pass
|
||||||
|
target[k] = v
|
||||||
@@ -36,9 +36,6 @@ class ComfyMetadataParser(RecipeMetadataParser):
|
|||||||
# Find all LoraLoader nodes
|
# Find all LoraLoader nodes
|
||||||
lora_nodes = {k: v for k, v in data.items() if isinstance(v, dict) and v.get('class_type') == 'LoraLoader'}
|
lora_nodes = {k: v for k, v in data.items() if isinstance(v, dict) and v.get('class_type') == 'LoraLoader'}
|
||||||
|
|
||||||
if not lora_nodes:
|
|
||||||
return {"error": "No LoRA information found in this ComfyUI workflow", "loras": []}
|
|
||||||
|
|
||||||
# Process each LoraLoader node
|
# Process each LoraLoader node
|
||||||
for node_id, node in lora_nodes.items():
|
for node_id, node in lora_nodes.items():
|
||||||
if 'inputs' not in node or 'lora_name' not in node['inputs']:
|
if 'inputs' not in node or 'lora_name' not in node['inputs']:
|
||||||
|
|||||||
@@ -79,26 +79,8 @@ class BaseRecipeRoutes:
|
|||||||
return
|
return
|
||||||
|
|
||||||
app.on_startup.append(self.attach_dependencies)
|
app.on_startup.append(self.attach_dependencies)
|
||||||
app.on_startup.append(self.prewarm_cache)
|
|
||||||
self._startup_hooks_registered = True
|
self._startup_hooks_registered = True
|
||||||
|
|
||||||
async def prewarm_cache(self, app: web.Application | None = None) -> None:
|
|
||||||
"""Pre-load recipe and LoRA caches on startup."""
|
|
||||||
|
|
||||||
try:
|
|
||||||
await self.attach_dependencies(app)
|
|
||||||
|
|
||||||
if self.lora_scanner is not None:
|
|
||||||
await self.lora_scanner.get_cached_data()
|
|
||||||
hash_index = getattr(self.lora_scanner, "_hash_index", None)
|
|
||||||
if hash_index is not None and hasattr(hash_index, "_hash_to_path"):
|
|
||||||
_ = len(hash_index._hash_to_path)
|
|
||||||
|
|
||||||
if self.recipe_scanner is not None:
|
|
||||||
await self.recipe_scanner.get_cached_data(force_refresh=True)
|
|
||||||
except Exception as exc:
|
|
||||||
logger.error("Error pre-warming recipe cache: %s", exc, exc_info=True)
|
|
||||||
|
|
||||||
def to_route_mapping(self) -> Mapping[str, Callable]:
|
def to_route_mapping(self) -> Mapping[str, Callable]:
|
||||||
"""Return a mapping of handler name to coroutine for registrar binding."""
|
"""Return a mapping of handler name to coroutine for registrar binding."""
|
||||||
|
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ import asyncio
|
|||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
import time
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Any, Awaitable, Callable, Dict, Iterable, List, Mapping, Optional
|
from typing import Any, Awaitable, Callable, Dict, Iterable, List, Mapping, Optional
|
||||||
|
|
||||||
@@ -61,6 +62,37 @@ class ModelPageView:
|
|||||||
self._settings = settings_service
|
self._settings = settings_service
|
||||||
self._server_i18n = server_i18n
|
self._server_i18n = server_i18n
|
||||||
self._logger = logger
|
self._logger = logger
|
||||||
|
self._app_version = self._get_app_version()
|
||||||
|
|
||||||
|
def _get_app_version(self) -> str:
|
||||||
|
version = "1.0.0"
|
||||||
|
short_hash = "stable"
|
||||||
|
try:
|
||||||
|
import toml
|
||||||
|
current_file = os.path.abspath(__file__)
|
||||||
|
# Navigate up from py/routes/handlers/model_handlers.py to project root
|
||||||
|
root_dir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(current_file))))
|
||||||
|
pyproject_path = os.path.join(root_dir, 'pyproject.toml')
|
||||||
|
|
||||||
|
if os.path.exists(pyproject_path):
|
||||||
|
with open(pyproject_path, 'r', encoding='utf-8') as f:
|
||||||
|
data = toml.load(f)
|
||||||
|
version = data.get('project', {}).get('version', '1.0.0').replace('v', '')
|
||||||
|
|
||||||
|
# Try to get git info for granular cache busting
|
||||||
|
git_dir = os.path.join(root_dir, '.git')
|
||||||
|
if os.path.exists(git_dir):
|
||||||
|
try:
|
||||||
|
import git
|
||||||
|
repo = git.Repo(root_dir)
|
||||||
|
short_hash = repo.head.commit.hexsha[:7]
|
||||||
|
except Exception:
|
||||||
|
# Fallback if git is not available or not a repo
|
||||||
|
pass
|
||||||
|
except Exception as e:
|
||||||
|
self._logger.debug(f"Failed to read version info for cache busting: {e}")
|
||||||
|
|
||||||
|
return f"{version}-{short_hash}"
|
||||||
|
|
||||||
async def handle(self, request: web.Request) -> web.Response:
|
async def handle(self, request: web.Request) -> web.Response:
|
||||||
try:
|
try:
|
||||||
@@ -96,6 +128,7 @@ class ModelPageView:
|
|||||||
"request": request,
|
"request": request,
|
||||||
"folders": [],
|
"folders": [],
|
||||||
"t": self._server_i18n.get_translation,
|
"t": self._server_i18n.get_translation,
|
||||||
|
"version": self._app_version,
|
||||||
}
|
}
|
||||||
|
|
||||||
if not is_initializing:
|
if not is_initializing:
|
||||||
@@ -128,9 +161,12 @@ class ModelListingHandler:
|
|||||||
self._logger = logger
|
self._logger = logger
|
||||||
|
|
||||||
async def get_models(self, request: web.Request) -> web.Response:
|
async def get_models(self, request: web.Request) -> web.Response:
|
||||||
|
start_time = time.perf_counter()
|
||||||
try:
|
try:
|
||||||
params = self._parse_common_params(request)
|
params = self._parse_common_params(request)
|
||||||
result = await self._service.get_paginated_data(**params)
|
result = await self._service.get_paginated_data(**params)
|
||||||
|
|
||||||
|
format_start = time.perf_counter()
|
||||||
formatted_result = {
|
formatted_result = {
|
||||||
"items": [await self._service.format_response(item) for item in result["items"]],
|
"items": [await self._service.format_response(item) for item in result["items"]],
|
||||||
"total": result["total"],
|
"total": result["total"],
|
||||||
@@ -138,6 +174,13 @@ class ModelListingHandler:
|
|||||||
"page_size": result["page_size"],
|
"page_size": result["page_size"],
|
||||||
"total_pages": result["total_pages"],
|
"total_pages": result["total_pages"],
|
||||||
}
|
}
|
||||||
|
format_duration = time.perf_counter() - format_start
|
||||||
|
|
||||||
|
duration = time.perf_counter() - start_time
|
||||||
|
self._logger.info(
|
||||||
|
"Request for %s/list took %.3fs (formatting: %.3fs)",
|
||||||
|
self._service.model_type, duration, format_duration
|
||||||
|
)
|
||||||
return web.json_response(formatted_result)
|
return web.json_response(formatted_result)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
self._logger.error("Error retrieving %ss: %s", self._service.model_type, exc, exc_info=True)
|
self._logger.error("Error retrieving %ss: %s", self._service.model_type, exc, exc_info=True)
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ import json
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import asyncio
|
||||||
import tempfile
|
import tempfile
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Any, Awaitable, Callable, Dict, List, Mapping, Optional
|
from typing import Any, Awaitable, Callable, Dict, List, Mapping, Optional
|
||||||
@@ -23,6 +24,11 @@ from ...services.recipes import (
|
|||||||
RecipeValidationError,
|
RecipeValidationError,
|
||||||
)
|
)
|
||||||
from ...services.metadata_service import get_default_metadata_provider
|
from ...services.metadata_service import get_default_metadata_provider
|
||||||
|
from ...utils.civitai_utils import rewrite_preview_url
|
||||||
|
from ...utils.exif_utils import ExifUtils
|
||||||
|
from ...recipes.merger import GenParamsMerger
|
||||||
|
from ...recipes.enrichment import RecipeEnricher
|
||||||
|
from ...services.websocket_manager import ws_manager as default_ws_manager
|
||||||
|
|
||||||
Logger = logging.Logger
|
Logger = logging.Logger
|
||||||
EnsureDependenciesCallable = Callable[[], Awaitable[None]]
|
EnsureDependenciesCallable = Callable[[], Awaitable[None]]
|
||||||
@@ -55,16 +61,25 @@ class RecipeHandlerSet:
|
|||||||
"delete_recipe": self.management.delete_recipe,
|
"delete_recipe": self.management.delete_recipe,
|
||||||
"get_top_tags": self.query.get_top_tags,
|
"get_top_tags": self.query.get_top_tags,
|
||||||
"get_base_models": self.query.get_base_models,
|
"get_base_models": self.query.get_base_models,
|
||||||
|
"get_roots": self.query.get_roots,
|
||||||
|
"get_folders": self.query.get_folders,
|
||||||
|
"get_folder_tree": self.query.get_folder_tree,
|
||||||
|
"get_unified_folder_tree": self.query.get_unified_folder_tree,
|
||||||
"share_recipe": self.sharing.share_recipe,
|
"share_recipe": self.sharing.share_recipe,
|
||||||
"download_shared_recipe": self.sharing.download_shared_recipe,
|
"download_shared_recipe": self.sharing.download_shared_recipe,
|
||||||
"get_recipe_syntax": self.query.get_recipe_syntax,
|
"get_recipe_syntax": self.query.get_recipe_syntax,
|
||||||
"update_recipe": self.management.update_recipe,
|
"update_recipe": self.management.update_recipe,
|
||||||
"reconnect_lora": self.management.reconnect_lora,
|
"reconnect_lora": self.management.reconnect_lora,
|
||||||
"find_duplicates": self.query.find_duplicates,
|
"find_duplicates": self.query.find_duplicates,
|
||||||
|
"move_recipes_bulk": self.management.move_recipes_bulk,
|
||||||
"bulk_delete": self.management.bulk_delete,
|
"bulk_delete": self.management.bulk_delete,
|
||||||
"save_recipe_from_widget": self.management.save_recipe_from_widget,
|
"save_recipe_from_widget": self.management.save_recipe_from_widget,
|
||||||
"get_recipes_for_lora": self.query.get_recipes_for_lora,
|
"get_recipes_for_lora": self.query.get_recipes_for_lora,
|
||||||
"scan_recipes": self.query.scan_recipes,
|
"scan_recipes": self.query.scan_recipes,
|
||||||
|
"move_recipe": self.management.move_recipe,
|
||||||
|
"repair_recipes": self.management.repair_recipes,
|
||||||
|
"repair_recipe": self.management.repair_recipe,
|
||||||
|
"get_repair_progress": self.management.get_repair_progress,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -148,12 +163,15 @@ class RecipeListingHandler:
|
|||||||
page_size = int(request.query.get("page_size", "20"))
|
page_size = int(request.query.get("page_size", "20"))
|
||||||
sort_by = request.query.get("sort_by", "date")
|
sort_by = request.query.get("sort_by", "date")
|
||||||
search = request.query.get("search")
|
search = request.query.get("search")
|
||||||
|
folder = request.query.get("folder")
|
||||||
|
recursive = request.query.get("recursive", "true").lower() == "true"
|
||||||
|
|
||||||
search_options = {
|
search_options = {
|
||||||
"title": request.query.get("search_title", "true").lower() == "true",
|
"title": request.query.get("search_title", "true").lower() == "true",
|
||||||
"tags": request.query.get("search_tags", "true").lower() == "true",
|
"tags": request.query.get("search_tags", "true").lower() == "true",
|
||||||
"lora_name": request.query.get("search_lora_name", "true").lower() == "true",
|
"lora_name": request.query.get("search_lora_name", "true").lower() == "true",
|
||||||
"lora_model": request.query.get("search_lora_model", "true").lower() == "true",
|
"lora_model": request.query.get("search_lora_model", "true").lower() == "true",
|
||||||
|
"prompt": request.query.get("search_prompt", "true").lower() == "true",
|
||||||
}
|
}
|
||||||
|
|
||||||
filters: Dict[str, Any] = {}
|
filters: Dict[str, Any] = {}
|
||||||
@@ -161,6 +179,9 @@ class RecipeListingHandler:
|
|||||||
if base_models:
|
if base_models:
|
||||||
filters["base_model"] = base_models.split(",")
|
filters["base_model"] = base_models.split(",")
|
||||||
|
|
||||||
|
if request.query.get("favorite", "false").lower() == "true":
|
||||||
|
filters["favorite"] = True
|
||||||
|
|
||||||
tag_filters: Dict[str, str] = {}
|
tag_filters: Dict[str, str] = {}
|
||||||
legacy_tags = request.query.get("tags")
|
legacy_tags = request.query.get("tags")
|
||||||
if legacy_tags:
|
if legacy_tags:
|
||||||
@@ -192,6 +213,8 @@ class RecipeListingHandler:
|
|||||||
filters=filters,
|
filters=filters,
|
||||||
search_options=search_options,
|
search_options=search_options,
|
||||||
lora_hash=lora_hash,
|
lora_hash=lora_hash,
|
||||||
|
folder=folder,
|
||||||
|
recursive=recursive,
|
||||||
)
|
)
|
||||||
|
|
||||||
for item in result.get("items", []):
|
for item in result.get("items", []):
|
||||||
@@ -298,6 +321,58 @@ class RecipeQueryHandler:
|
|||||||
self._logger.error("Error retrieving base models: %s", exc, exc_info=True)
|
self._logger.error("Error retrieving base models: %s", exc, exc_info=True)
|
||||||
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
async def get_roots(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
await self._ensure_dependencies_ready()
|
||||||
|
recipe_scanner = self._recipe_scanner_getter()
|
||||||
|
if recipe_scanner is None:
|
||||||
|
raise RuntimeError("Recipe scanner unavailable")
|
||||||
|
|
||||||
|
roots = [recipe_scanner.recipes_dir] if recipe_scanner.recipes_dir else []
|
||||||
|
return web.json_response({"success": True, "roots": roots})
|
||||||
|
except Exception as exc:
|
||||||
|
self._logger.error("Error retrieving recipe roots: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
async def get_folders(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
await self._ensure_dependencies_ready()
|
||||||
|
recipe_scanner = self._recipe_scanner_getter()
|
||||||
|
if recipe_scanner is None:
|
||||||
|
raise RuntimeError("Recipe scanner unavailable")
|
||||||
|
|
||||||
|
folders = await recipe_scanner.get_folders()
|
||||||
|
return web.json_response({"success": True, "folders": folders})
|
||||||
|
except Exception as exc:
|
||||||
|
self._logger.error("Error retrieving recipe folders: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
async def get_folder_tree(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
await self._ensure_dependencies_ready()
|
||||||
|
recipe_scanner = self._recipe_scanner_getter()
|
||||||
|
if recipe_scanner is None:
|
||||||
|
raise RuntimeError("Recipe scanner unavailable")
|
||||||
|
|
||||||
|
folder_tree = await recipe_scanner.get_folder_tree()
|
||||||
|
return web.json_response({"success": True, "tree": folder_tree})
|
||||||
|
except Exception as exc:
|
||||||
|
self._logger.error("Error retrieving recipe folder tree: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
async def get_unified_folder_tree(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
await self._ensure_dependencies_ready()
|
||||||
|
recipe_scanner = self._recipe_scanner_getter()
|
||||||
|
if recipe_scanner is None:
|
||||||
|
raise RuntimeError("Recipe scanner unavailable")
|
||||||
|
|
||||||
|
folder_tree = await recipe_scanner.get_folder_tree()
|
||||||
|
return web.json_response({"success": True, "tree": folder_tree})
|
||||||
|
except Exception as exc:
|
||||||
|
self._logger.error("Error retrieving unified recipe folder tree: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||||
|
|
||||||
async def get_recipes_for_lora(self, request: web.Request) -> web.Response:
|
async def get_recipes_for_lora(self, request: web.Request) -> web.Response:
|
||||||
try:
|
try:
|
||||||
await self._ensure_dependencies_ready()
|
await self._ensure_dependencies_ready()
|
||||||
@@ -410,6 +485,7 @@ class RecipeManagementHandler:
|
|||||||
analysis_service: RecipeAnalysisService,
|
analysis_service: RecipeAnalysisService,
|
||||||
downloader_factory,
|
downloader_factory,
|
||||||
civitai_client_getter: CivitaiClientGetter,
|
civitai_client_getter: CivitaiClientGetter,
|
||||||
|
ws_manager=default_ws_manager,
|
||||||
) -> None:
|
) -> None:
|
||||||
self._ensure_dependencies_ready = ensure_dependencies_ready
|
self._ensure_dependencies_ready = ensure_dependencies_ready
|
||||||
self._recipe_scanner_getter = recipe_scanner_getter
|
self._recipe_scanner_getter = recipe_scanner_getter
|
||||||
@@ -418,6 +494,7 @@ class RecipeManagementHandler:
|
|||||||
self._analysis_service = analysis_service
|
self._analysis_service = analysis_service
|
||||||
self._downloader_factory = downloader_factory
|
self._downloader_factory = downloader_factory
|
||||||
self._civitai_client_getter = civitai_client_getter
|
self._civitai_client_getter = civitai_client_getter
|
||||||
|
self._ws_manager = ws_manager
|
||||||
|
|
||||||
async def save_recipe(self, request: web.Request) -> web.Response:
|
async def save_recipe(self, request: web.Request) -> web.Response:
|
||||||
try:
|
try:
|
||||||
@@ -436,6 +513,7 @@ class RecipeManagementHandler:
|
|||||||
name=payload["name"],
|
name=payload["name"],
|
||||||
tags=payload["tags"],
|
tags=payload["tags"],
|
||||||
metadata=payload["metadata"],
|
metadata=payload["metadata"],
|
||||||
|
extension=payload.get("extension"),
|
||||||
)
|
)
|
||||||
return web.json_response(result.payload, status=result.status)
|
return web.json_response(result.payload, status=result.status)
|
||||||
except RecipeValidationError as exc:
|
except RecipeValidationError as exc:
|
||||||
@@ -444,6 +522,71 @@ class RecipeManagementHandler:
|
|||||||
self._logger.error("Error saving recipe: %s", exc, exc_info=True)
|
self._logger.error("Error saving recipe: %s", exc, exc_info=True)
|
||||||
return web.json_response({"error": str(exc)}, status=500)
|
return web.json_response({"error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
async def repair_recipes(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
await self._ensure_dependencies_ready()
|
||||||
|
recipe_scanner = self._recipe_scanner_getter()
|
||||||
|
if recipe_scanner is None:
|
||||||
|
return web.json_response({"success": False, "error": "Recipe scanner unavailable"}, status=503)
|
||||||
|
|
||||||
|
# Check if already running
|
||||||
|
if self._ws_manager.get_recipe_repair_progress():
|
||||||
|
return web.json_response({"success": False, "error": "Recipe repair already in progress"}, status=409)
|
||||||
|
|
||||||
|
async def progress_callback(data):
|
||||||
|
await self._ws_manager.broadcast_recipe_repair_progress(data)
|
||||||
|
|
||||||
|
# Run in background to avoid timeout
|
||||||
|
async def run_repair():
|
||||||
|
try:
|
||||||
|
await recipe_scanner.repair_all_recipes(
|
||||||
|
progress_callback=progress_callback
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
self._logger.error(f"Error in recipe repair task: {e}", exc_info=True)
|
||||||
|
await self._ws_manager.broadcast_recipe_repair_progress({
|
||||||
|
"status": "error",
|
||||||
|
"error": str(e)
|
||||||
|
})
|
||||||
|
finally:
|
||||||
|
# Keep the final status for a while so the UI can see it
|
||||||
|
await asyncio.sleep(5)
|
||||||
|
self._ws_manager.cleanup_recipe_repair_progress()
|
||||||
|
|
||||||
|
asyncio.create_task(run_repair())
|
||||||
|
|
||||||
|
return web.json_response({"success": True, "message": "Recipe repair started"})
|
||||||
|
except Exception as exc:
|
||||||
|
self._logger.error("Error starting recipe repair: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
async def repair_recipe(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
await self._ensure_dependencies_ready()
|
||||||
|
recipe_scanner = self._recipe_scanner_getter()
|
||||||
|
if recipe_scanner is None:
|
||||||
|
return web.json_response({"success": False, "error": "Recipe scanner unavailable"}, status=503)
|
||||||
|
|
||||||
|
recipe_id = request.match_info["recipe_id"]
|
||||||
|
result = await recipe_scanner.repair_recipe_by_id(recipe_id)
|
||||||
|
return web.json_response(result)
|
||||||
|
except RecipeNotFoundError as exc:
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=404)
|
||||||
|
except Exception as exc:
|
||||||
|
self._logger.error("Error repairing single recipe: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
async def get_repair_progress(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
progress = self._ws_manager.get_recipe_repair_progress()
|
||||||
|
if progress:
|
||||||
|
return web.json_response({"success": True, "progress": progress})
|
||||||
|
return web.json_response({"success": False, "message": "No repair in progress"}, status=404)
|
||||||
|
except Exception as exc:
|
||||||
|
self._logger.error("Error getting repair progress: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
|
||||||
async def import_remote_recipe(self, request: web.Request) -> web.Response:
|
async def import_remote_recipe(self, request: web.Request) -> web.Response:
|
||||||
try:
|
try:
|
||||||
await self._ensure_dependencies_ready()
|
await self._ensure_dependencies_ready()
|
||||||
@@ -451,10 +594,12 @@ class RecipeManagementHandler:
|
|||||||
if recipe_scanner is None:
|
if recipe_scanner is None:
|
||||||
raise RuntimeError("Recipe scanner unavailable")
|
raise RuntimeError("Recipe scanner unavailable")
|
||||||
|
|
||||||
|
# 1. Parse Parameters
|
||||||
params = request.rel_url.query
|
params = request.rel_url.query
|
||||||
image_url = params.get("image_url")
|
image_url = params.get("image_url")
|
||||||
name = params.get("name")
|
name = params.get("name")
|
||||||
resources_raw = params.get("resources")
|
resources_raw = params.get("resources")
|
||||||
|
|
||||||
if not image_url:
|
if not image_url:
|
||||||
raise RecipeValidationError("Missing required field: image_url")
|
raise RecipeValidationError("Missing required field: image_url")
|
||||||
if not name:
|
if not name:
|
||||||
@@ -463,27 +608,93 @@ class RecipeManagementHandler:
|
|||||||
raise RecipeValidationError("Missing required field: resources")
|
raise RecipeValidationError("Missing required field: resources")
|
||||||
|
|
||||||
checkpoint_entry, lora_entries = self._parse_resources_payload(resources_raw)
|
checkpoint_entry, lora_entries = self._parse_resources_payload(resources_raw)
|
||||||
gen_params = self._parse_gen_params(params.get("gen_params"))
|
gen_params_request = self._parse_gen_params(params.get("gen_params"))
|
||||||
|
|
||||||
|
# 2. Initial Metadata Construction
|
||||||
metadata: Dict[str, Any] = {
|
metadata: Dict[str, Any] = {
|
||||||
"base_model": params.get("base_model", "") or "",
|
"base_model": params.get("base_model", "") or "",
|
||||||
"loras": lora_entries,
|
"loras": lora_entries,
|
||||||
|
"gen_params": gen_params_request or {},
|
||||||
|
"source_url": image_url
|
||||||
}
|
}
|
||||||
|
|
||||||
source_path = params.get("source_path")
|
source_path = params.get("source_path")
|
||||||
if source_path:
|
if source_path:
|
||||||
metadata["source_path"] = source_path
|
metadata["source_path"] = source_path
|
||||||
if gen_params is not None:
|
|
||||||
metadata["gen_params"] = gen_params
|
# Checkpoint handling
|
||||||
if checkpoint_entry:
|
if checkpoint_entry:
|
||||||
metadata["checkpoint"] = checkpoint_entry
|
metadata["checkpoint"] = checkpoint_entry
|
||||||
gen_params_ref = metadata.setdefault("gen_params", {})
|
# Ensure checkpoint is also in gen_params for consistency if needed by enricher?
|
||||||
if "checkpoint" not in gen_params_ref:
|
# Actually enricher looks at metadata['checkpoint'], so this is fine.
|
||||||
gen_params_ref["checkpoint"] = checkpoint_entry
|
|
||||||
|
# Try to resolve base model from checkpoint if not explicitly provided
|
||||||
|
if not metadata["base_model"]:
|
||||||
base_model_from_metadata = await self._resolve_base_model_from_checkpoint(checkpoint_entry)
|
base_model_from_metadata = await self._resolve_base_model_from_checkpoint(checkpoint_entry)
|
||||||
if base_model_from_metadata:
|
if base_model_from_metadata:
|
||||||
metadata["base_model"] = base_model_from_metadata
|
metadata["base_model"] = base_model_from_metadata
|
||||||
|
|
||||||
tags = self._parse_tags(params.get("tags"))
|
tags = self._parse_tags(params.get("tags"))
|
||||||
image_bytes = await self._download_image_bytes(image_url)
|
|
||||||
|
# 3. Download Image
|
||||||
|
image_bytes, extension, civitai_meta_from_download = await self._download_remote_media(image_url)
|
||||||
|
|
||||||
|
# 4. Extract Embedded Metadata
|
||||||
|
# Note: We still extract this here because Enricher currently expects 'gen_params' to already be populated
|
||||||
|
# with embedded data if we want it to merge it.
|
||||||
|
# However, logic in Enricher merges: request > civitai > embedded.
|
||||||
|
# So we should gather embedded params and put them into the recipe's gen_params (as initial state)
|
||||||
|
# OR pass them to enricher to handle?
|
||||||
|
# The interface of Enricher.enrich_recipe takes `recipe` (with gen_params) and `request_params`.
|
||||||
|
# So let's extract embedded and put it into recipe['gen_params'] but careful not to overwrite request params.
|
||||||
|
# Actually, `GenParamsMerger` which `Enricher` uses handles 3 layers.
|
||||||
|
# But `Enricher` interface is: recipe['gen_params'] (as embedded) + request_params + civitai (fetched internally).
|
||||||
|
# Wait, `Enricher` fetches Civitai info internally based on URL.
|
||||||
|
# `civitai_meta_from_download` is returned by `_download_remote_media` which might be useful if URL didn't have ID.
|
||||||
|
|
||||||
|
# Let's extract embedded metadata first
|
||||||
|
embedded_gen_params = {}
|
||||||
|
try:
|
||||||
|
with tempfile.NamedTemporaryFile(suffix=extension, delete=False) as temp_img:
|
||||||
|
temp_img.write(image_bytes)
|
||||||
|
temp_img_path = temp_img.name
|
||||||
|
|
||||||
|
try:
|
||||||
|
raw_embedded = ExifUtils.extract_image_metadata(temp_img_path)
|
||||||
|
if raw_embedded:
|
||||||
|
parser = self._analysis_service._recipe_parser_factory.create_parser(raw_embedded)
|
||||||
|
if parser:
|
||||||
|
parsed_embedded = await parser.parse_metadata(raw_embedded, recipe_scanner=recipe_scanner)
|
||||||
|
if parsed_embedded and "gen_params" in parsed_embedded:
|
||||||
|
embedded_gen_params = parsed_embedded["gen_params"]
|
||||||
|
else:
|
||||||
|
embedded_gen_params = {"raw_metadata": raw_embedded}
|
||||||
|
finally:
|
||||||
|
if os.path.exists(temp_img_path):
|
||||||
|
os.unlink(temp_img_path)
|
||||||
|
except Exception as exc:
|
||||||
|
self._logger.warning("Failed to extract embedded metadata during import: %s", exc)
|
||||||
|
|
||||||
|
# Pre-populate gen_params with embedded data so Enricher treats it as the "base" layer
|
||||||
|
if embedded_gen_params:
|
||||||
|
# Merge embedded into existing gen_params (which currently only has request params if any)
|
||||||
|
# But wait, we want request params to override everything.
|
||||||
|
# So we should set recipe['gen_params'] = embedded, and pass request params to enricher.
|
||||||
|
metadata["gen_params"] = embedded_gen_params
|
||||||
|
|
||||||
|
# 5. Enrich with unified logic
|
||||||
|
# This will fetch Civitai info (if URL matches) and merge: request > civitai > embedded
|
||||||
|
civitai_client = self._civitai_client_getter()
|
||||||
|
await RecipeEnricher.enrich_recipe(
|
||||||
|
recipe=metadata,
|
||||||
|
civitai_client=civitai_client,
|
||||||
|
request_params=gen_params_request # Pass explicit request params here to override
|
||||||
|
)
|
||||||
|
|
||||||
|
# If we got civitai_meta from download but Enricher didn't fetch it (e.g. not a civitai URL or failed),
|
||||||
|
# we might want to manually merge it?
|
||||||
|
# But usually `import_remote_recipe` is used with Civitai URLs.
|
||||||
|
# For now, relying on Enricher's internal fetch is consistent with repair.
|
||||||
|
|
||||||
result = await self._persistence_service.save_recipe(
|
result = await self._persistence_service.save_recipe(
|
||||||
recipe_scanner=recipe_scanner,
|
recipe_scanner=recipe_scanner,
|
||||||
@@ -492,6 +703,7 @@ class RecipeManagementHandler:
|
|||||||
name=name,
|
name=name,
|
||||||
tags=tags,
|
tags=tags,
|
||||||
metadata=metadata,
|
metadata=metadata,
|
||||||
|
extension=extension,
|
||||||
)
|
)
|
||||||
return web.json_response(result.payload, status=result.status)
|
return web.json_response(result.payload, status=result.status)
|
||||||
except RecipeValidationError as exc:
|
except RecipeValidationError as exc:
|
||||||
@@ -541,6 +753,64 @@ class RecipeManagementHandler:
|
|||||||
self._logger.error("Error updating recipe: %s", exc, exc_info=True)
|
self._logger.error("Error updating recipe: %s", exc, exc_info=True)
|
||||||
return web.json_response({"error": str(exc)}, status=500)
|
return web.json_response({"error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
async def move_recipe(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
await self._ensure_dependencies_ready()
|
||||||
|
recipe_scanner = self._recipe_scanner_getter()
|
||||||
|
if recipe_scanner is None:
|
||||||
|
raise RuntimeError("Recipe scanner unavailable")
|
||||||
|
|
||||||
|
data = await request.json()
|
||||||
|
recipe_id = data.get("recipe_id")
|
||||||
|
target_path = data.get("target_path")
|
||||||
|
if not recipe_id or not target_path:
|
||||||
|
return web.json_response(
|
||||||
|
{"success": False, "error": "recipe_id and target_path are required"}, status=400
|
||||||
|
)
|
||||||
|
|
||||||
|
result = await self._persistence_service.move_recipe(
|
||||||
|
recipe_scanner=recipe_scanner,
|
||||||
|
recipe_id=str(recipe_id),
|
||||||
|
target_path=str(target_path),
|
||||||
|
)
|
||||||
|
return web.json_response(result.payload, status=result.status)
|
||||||
|
except RecipeValidationError as exc:
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=400)
|
||||||
|
except RecipeNotFoundError as exc:
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=404)
|
||||||
|
except Exception as exc:
|
||||||
|
self._logger.error("Error moving recipe: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||||
|
|
||||||
|
async def move_recipes_bulk(self, request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
await self._ensure_dependencies_ready()
|
||||||
|
recipe_scanner = self._recipe_scanner_getter()
|
||||||
|
if recipe_scanner is None:
|
||||||
|
raise RuntimeError("Recipe scanner unavailable")
|
||||||
|
|
||||||
|
data = await request.json()
|
||||||
|
recipe_ids = data.get("recipe_ids") or []
|
||||||
|
target_path = data.get("target_path")
|
||||||
|
if not recipe_ids or not target_path:
|
||||||
|
return web.json_response(
|
||||||
|
{"success": False, "error": "recipe_ids and target_path are required"}, status=400
|
||||||
|
)
|
||||||
|
|
||||||
|
result = await self._persistence_service.move_recipes_bulk(
|
||||||
|
recipe_scanner=recipe_scanner,
|
||||||
|
recipe_ids=recipe_ids,
|
||||||
|
target_path=str(target_path),
|
||||||
|
)
|
||||||
|
return web.json_response(result.payload, status=result.status)
|
||||||
|
except RecipeValidationError as exc:
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=400)
|
||||||
|
except RecipeNotFoundError as exc:
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=404)
|
||||||
|
except Exception as exc:
|
||||||
|
self._logger.error("Error moving recipes in bulk: %s", exc, exc_info=True)
|
||||||
|
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||||
|
|
||||||
async def reconnect_lora(self, request: web.Request) -> web.Response:
|
async def reconnect_lora(self, request: web.Request) -> web.Response:
|
||||||
try:
|
try:
|
||||||
await self._ensure_dependencies_ready()
|
await self._ensure_dependencies_ready()
|
||||||
@@ -622,6 +892,7 @@ class RecipeManagementHandler:
|
|||||||
name: Optional[str] = None
|
name: Optional[str] = None
|
||||||
tags: list[str] = []
|
tags: list[str] = []
|
||||||
metadata: Optional[Dict[str, Any]] = None
|
metadata: Optional[Dict[str, Any]] = None
|
||||||
|
extension: Optional[str] = None
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
field = await reader.next()
|
field = await reader.next()
|
||||||
@@ -652,6 +923,8 @@ class RecipeManagementHandler:
|
|||||||
metadata = json.loads(metadata_text)
|
metadata = json.loads(metadata_text)
|
||||||
except Exception:
|
except Exception:
|
||||||
metadata = {}
|
metadata = {}
|
||||||
|
elif field.name == "extension":
|
||||||
|
extension = await field.text()
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"image_bytes": image_bytes,
|
"image_bytes": image_bytes,
|
||||||
@@ -659,6 +932,7 @@ class RecipeManagementHandler:
|
|||||||
"name": name,
|
"name": name,
|
||||||
"tags": tags,
|
"tags": tags,
|
||||||
"metadata": metadata,
|
"metadata": metadata,
|
||||||
|
"extension": extension,
|
||||||
}
|
}
|
||||||
|
|
||||||
def _parse_tags(self, tag_text: Optional[str]) -> list[str]:
|
def _parse_tags(self, tag_text: Optional[str]) -> list[str]:
|
||||||
@@ -729,7 +1003,7 @@ class RecipeManagementHandler:
|
|||||||
"exclude": False,
|
"exclude": False,
|
||||||
}
|
}
|
||||||
|
|
||||||
async def _download_image_bytes(self, image_url: str) -> bytes:
|
async def _download_remote_media(self, image_url: str) -> tuple[bytes, str]:
|
||||||
civitai_client = self._civitai_client_getter()
|
civitai_client = self._civitai_client_getter()
|
||||||
downloader = await self._downloader_factory()
|
downloader = await self._downloader_factory()
|
||||||
temp_path = None
|
temp_path = None
|
||||||
@@ -744,15 +1018,31 @@ class RecipeManagementHandler:
|
|||||||
image_info = await civitai_client.get_image_info(civitai_match.group(1))
|
image_info = await civitai_client.get_image_info(civitai_match.group(1))
|
||||||
if not image_info:
|
if not image_info:
|
||||||
raise RecipeDownloadError("Failed to fetch image information from Civitai")
|
raise RecipeDownloadError("Failed to fetch image information from Civitai")
|
||||||
download_url = image_info.get("url")
|
|
||||||
if not download_url:
|
media_url = image_info.get("url")
|
||||||
|
if not media_url:
|
||||||
raise RecipeDownloadError("No image URL found in Civitai response")
|
raise RecipeDownloadError("No image URL found in Civitai response")
|
||||||
|
|
||||||
|
# Use optimized preview URLs if possible
|
||||||
|
media_type = image_info.get("type")
|
||||||
|
rewritten_url, _ = rewrite_preview_url(media_url, media_type=media_type)
|
||||||
|
if rewritten_url:
|
||||||
|
download_url = rewritten_url
|
||||||
|
else:
|
||||||
|
download_url = media_url
|
||||||
|
|
||||||
success, result = await downloader.download_file(download_url, temp_path, use_auth=False)
|
success, result = await downloader.download_file(download_url, temp_path, use_auth=False)
|
||||||
if not success:
|
if not success:
|
||||||
raise RecipeDownloadError(f"Failed to download image: {result}")
|
raise RecipeDownloadError(f"Failed to download image: {result}")
|
||||||
|
|
||||||
|
# Extract extension from URL
|
||||||
|
url_path = download_url.split('?')[0].split('#')[0]
|
||||||
|
extension = os.path.splitext(url_path)[1].lower()
|
||||||
|
if not extension:
|
||||||
|
extension = ".webp" # Default to webp if unknown
|
||||||
|
|
||||||
with open(temp_path, "rb") as file_obj:
|
with open(temp_path, "rb") as file_obj:
|
||||||
return file_obj.read()
|
return file_obj.read(), extension, image_info.get("meta") if civitai_match and image_info else None
|
||||||
except RecipeDownloadError:
|
except RecipeDownloadError:
|
||||||
raise
|
raise
|
||||||
except RecipeValidationError:
|
except RecipeValidationError:
|
||||||
@@ -766,6 +1056,7 @@ class RecipeManagementHandler:
|
|||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def _safe_int(self, value: Any) -> int:
|
def _safe_int(self, value: Any) -> int:
|
||||||
try:
|
try:
|
||||||
return int(value)
|
return int(value)
|
||||||
|
|||||||
@@ -27,16 +27,25 @@ ROUTE_DEFINITIONS: tuple[RouteDefinition, ...] = (
|
|||||||
RouteDefinition("DELETE", "/api/lm/recipe/{recipe_id}", "delete_recipe"),
|
RouteDefinition("DELETE", "/api/lm/recipe/{recipe_id}", "delete_recipe"),
|
||||||
RouteDefinition("GET", "/api/lm/recipes/top-tags", "get_top_tags"),
|
RouteDefinition("GET", "/api/lm/recipes/top-tags", "get_top_tags"),
|
||||||
RouteDefinition("GET", "/api/lm/recipes/base-models", "get_base_models"),
|
RouteDefinition("GET", "/api/lm/recipes/base-models", "get_base_models"),
|
||||||
|
RouteDefinition("GET", "/api/lm/recipes/roots", "get_roots"),
|
||||||
|
RouteDefinition("GET", "/api/lm/recipes/folders", "get_folders"),
|
||||||
|
RouteDefinition("GET", "/api/lm/recipes/folder-tree", "get_folder_tree"),
|
||||||
|
RouteDefinition("GET", "/api/lm/recipes/unified-folder-tree", "get_unified_folder_tree"),
|
||||||
RouteDefinition("GET", "/api/lm/recipe/{recipe_id}/share", "share_recipe"),
|
RouteDefinition("GET", "/api/lm/recipe/{recipe_id}/share", "share_recipe"),
|
||||||
RouteDefinition("GET", "/api/lm/recipe/{recipe_id}/share/download", "download_shared_recipe"),
|
RouteDefinition("GET", "/api/lm/recipe/{recipe_id}/share/download", "download_shared_recipe"),
|
||||||
RouteDefinition("GET", "/api/lm/recipe/{recipe_id}/syntax", "get_recipe_syntax"),
|
RouteDefinition("GET", "/api/lm/recipes/syntax", "get_recipe_syntax"),
|
||||||
RouteDefinition("PUT", "/api/lm/recipe/{recipe_id}/update", "update_recipe"),
|
RouteDefinition("PUT", "/api/lm/recipe/{recipe_id}/update", "update_recipe"),
|
||||||
|
RouteDefinition("POST", "/api/lm/recipe/move", "move_recipe"),
|
||||||
|
RouteDefinition("POST", "/api/lm/recipes/move-bulk", "move_recipes_bulk"),
|
||||||
RouteDefinition("POST", "/api/lm/recipe/lora/reconnect", "reconnect_lora"),
|
RouteDefinition("POST", "/api/lm/recipe/lora/reconnect", "reconnect_lora"),
|
||||||
RouteDefinition("GET", "/api/lm/recipes/find-duplicates", "find_duplicates"),
|
RouteDefinition("GET", "/api/lm/recipes/find-duplicates", "find_duplicates"),
|
||||||
RouteDefinition("POST", "/api/lm/recipes/bulk-delete", "bulk_delete"),
|
RouteDefinition("POST", "/api/lm/recipes/bulk-delete", "bulk_delete"),
|
||||||
RouteDefinition("POST", "/api/lm/recipes/save-from-widget", "save_recipe_from_widget"),
|
RouteDefinition("POST", "/api/lm/recipes/save-from-widget", "save_recipe_from_widget"),
|
||||||
RouteDefinition("GET", "/api/lm/recipes/for-lora", "get_recipes_for_lora"),
|
RouteDefinition("GET", "/api/lm/recipes/for-lora", "get_recipes_for_lora"),
|
||||||
RouteDefinition("GET", "/api/lm/recipes/scan", "scan_recipes"),
|
RouteDefinition("GET", "/api/lm/recipes/scan", "scan_recipes"),
|
||||||
|
RouteDefinition("POST", "/api/lm/recipes/repair", "repair_recipes"),
|
||||||
|
RouteDefinition("POST", "/api/lm/recipe/{recipe_id}/repair", "repair_recipe"),
|
||||||
|
RouteDefinition("GET", "/api/lm/recipes/repair-progress", "get_repair_progress"),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import asyncio
|
|||||||
from typing import Any, Dict, List, Optional, Type, TYPE_CHECKING
|
from typing import Any, Dict, List, Optional, Type, TYPE_CHECKING
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
import time
|
||||||
|
|
||||||
from ..utils.constants import VALID_LORA_TYPES
|
from ..utils.constants import VALID_LORA_TYPES
|
||||||
from ..utils.models import BaseModelMetadata
|
from ..utils.models import BaseModelMetadata
|
||||||
@@ -80,6 +81,7 @@ class BaseModelService(ABC):
|
|||||||
**kwargs,
|
**kwargs,
|
||||||
) -> Dict:
|
) -> Dict:
|
||||||
"""Get paginated and filtered model data"""
|
"""Get paginated and filtered model data"""
|
||||||
|
overall_start = time.perf_counter()
|
||||||
|
|
||||||
sort_params = self.cache_repository.parse_sort(sort_by)
|
sort_params = self.cache_repository.parse_sort(sort_by)
|
||||||
if sort_params.key == 'usage':
|
if sort_params.key == 'usage':
|
||||||
@@ -87,6 +89,12 @@ class BaseModelService(ABC):
|
|||||||
else:
|
else:
|
||||||
sorted_data = await self.cache_repository.fetch_sorted(sort_params)
|
sorted_data = await self.cache_repository.fetch_sorted(sort_params)
|
||||||
|
|
||||||
|
t0 = time.perf_counter()
|
||||||
|
sorted_data = await self.cache_repository.fetch_sorted(sort_params)
|
||||||
|
fetch_duration = time.perf_counter() - t0
|
||||||
|
initial_count = len(sorted_data)
|
||||||
|
|
||||||
|
t1 = time.perf_counter()
|
||||||
if hash_filters:
|
if hash_filters:
|
||||||
filtered_data = await self._apply_hash_filters(sorted_data, hash_filters)
|
filtered_data = await self._apply_hash_filters(sorted_data, hash_filters)
|
||||||
else:
|
else:
|
||||||
@@ -116,17 +124,25 @@ class BaseModelService(ABC):
|
|||||||
|
|
||||||
if allow_selling_generated_content is not None:
|
if allow_selling_generated_content is not None:
|
||||||
filtered_data = await self._apply_allow_selling_filter(filtered_data, allow_selling_generated_content)
|
filtered_data = await self._apply_allow_selling_filter(filtered_data, allow_selling_generated_content)
|
||||||
|
filter_duration = time.perf_counter() - t1
|
||||||
|
post_filter_count = len(filtered_data)
|
||||||
|
|
||||||
annotated_for_filter: Optional[List[Dict]] = None
|
annotated_for_filter: Optional[List[Dict]] = None
|
||||||
|
t2 = time.perf_counter()
|
||||||
if update_available_only:
|
if update_available_only:
|
||||||
annotated_for_filter = await self._annotate_update_flags(filtered_data)
|
annotated_for_filter = await self._annotate_update_flags(filtered_data)
|
||||||
filtered_data = [
|
filtered_data = [
|
||||||
item for item in annotated_for_filter
|
item for item in annotated_for_filter
|
||||||
if item.get('update_available')
|
if item.get('update_available')
|
||||||
]
|
]
|
||||||
|
update_filter_duration = time.perf_counter() - t2
|
||||||
|
final_count = len(filtered_data)
|
||||||
|
|
||||||
|
t3 = time.perf_counter()
|
||||||
paginated = self._paginate(filtered_data, page, page_size)
|
paginated = self._paginate(filtered_data, page, page_size)
|
||||||
|
pagination_duration = time.perf_counter() - t3
|
||||||
|
|
||||||
|
t4 = time.perf_counter()
|
||||||
if update_available_only:
|
if update_available_only:
|
||||||
# Items already include update flags thanks to the pre-filter annotation.
|
# Items already include update flags thanks to the pre-filter annotation.
|
||||||
paginated['items'] = list(paginated['items'])
|
paginated['items'] = list(paginated['items'])
|
||||||
@@ -134,6 +150,16 @@ class BaseModelService(ABC):
|
|||||||
paginated['items'] = await self._annotate_update_flags(
|
paginated['items'] = await self._annotate_update_flags(
|
||||||
paginated['items'],
|
paginated['items'],
|
||||||
)
|
)
|
||||||
|
annotate_duration = time.perf_counter() - t4
|
||||||
|
|
||||||
|
overall_duration = time.perf_counter() - overall_start
|
||||||
|
logger.info(
|
||||||
|
"%s.get_paginated_data took %.3fs (fetch: %.3fs, filter: %.3fs, update_filter: %.3fs, pagination: %.3fs, annotate: %.3fs). "
|
||||||
|
"Counts: initial=%d, post_filter=%d, final=%d",
|
||||||
|
self.__class__.__name__, overall_duration, fetch_duration, filter_duration,
|
||||||
|
update_filter_duration, pagination_duration, annotate_duration,
|
||||||
|
initial_count, post_filter_count, final_count
|
||||||
|
)
|
||||||
return paginated
|
return paginated
|
||||||
|
|
||||||
async def _fetch_with_usage_sort(self, sort_params):
|
async def _fetch_with_usage_sort(self, sort_params):
|
||||||
|
|||||||
@@ -1,4 +1,8 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
|
import time
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
from typing import Any, Dict, List, Optional, Tuple
|
from typing import Any, Dict, List, Optional, Tuple
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
@@ -215,24 +219,25 @@ class ModelCache:
|
|||||||
|
|
||||||
def _sort_data(self, data: List[Dict], sort_key: str, order: str) -> List[Dict]:
|
def _sort_data(self, data: List[Dict], sort_key: str, order: str) -> List[Dict]:
|
||||||
"""Sort data by sort_key and order"""
|
"""Sort data by sort_key and order"""
|
||||||
|
start_time = time.perf_counter()
|
||||||
reverse = (order == 'desc')
|
reverse = (order == 'desc')
|
||||||
if sort_key == 'name':
|
if sort_key == 'name':
|
||||||
# Natural sort by configured display name, case-insensitive
|
# Natural sort by configured display name, case-insensitive
|
||||||
return natsorted(
|
result = natsorted(
|
||||||
data,
|
data,
|
||||||
key=lambda x: self._get_display_name(x).lower(),
|
key=lambda x: self._get_display_name(x).lower(),
|
||||||
reverse=reverse
|
reverse=reverse
|
||||||
)
|
)
|
||||||
elif sort_key == 'date':
|
elif sort_key == 'date':
|
||||||
# Sort by modified timestamp
|
# Sort by modified timestamp
|
||||||
return sorted(
|
result = sorted(
|
||||||
data,
|
data,
|
||||||
key=itemgetter('modified'),
|
key=itemgetter('modified'),
|
||||||
reverse=reverse
|
reverse=reverse
|
||||||
)
|
)
|
||||||
elif sort_key == 'size':
|
elif sort_key == 'size':
|
||||||
# Sort by file size
|
# Sort by file size
|
||||||
return sorted(
|
result = sorted(
|
||||||
data,
|
data,
|
||||||
key=itemgetter('size'),
|
key=itemgetter('size'),
|
||||||
reverse=reverse
|
reverse=reverse
|
||||||
@@ -249,16 +254,28 @@ class ModelCache:
|
|||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
# Fallback: no sort
|
# Fallback: no sort
|
||||||
return list(data)
|
result = list(data)
|
||||||
|
|
||||||
|
duration = time.perf_counter() - start_time
|
||||||
|
if duration > 0.05:
|
||||||
|
logger.info("ModelCache._sort_data(%s, %s) for %d items took %.3fs", sort_key, order, len(data), duration)
|
||||||
|
return result
|
||||||
|
|
||||||
async def get_sorted_data(self, sort_key: str = 'name', order: str = 'asc') -> List[Dict]:
|
async def get_sorted_data(self, sort_key: str = 'name', order: str = 'asc') -> List[Dict]:
|
||||||
"""Get sorted data by sort_key and order, using cache if possible"""
|
"""Get sorted data by sort_key and order, using cache if possible"""
|
||||||
async with self._lock:
|
async with self._lock:
|
||||||
if (sort_key, order) == self._last_sort:
|
if (sort_key, order) == self._last_sort:
|
||||||
return self._last_sorted_data
|
return self._last_sorted_data
|
||||||
|
|
||||||
|
start_time = time.perf_counter()
|
||||||
sorted_data = self._sort_data(self.raw_data, sort_key, order)
|
sorted_data = self._sort_data(self.raw_data, sort_key, order)
|
||||||
self._last_sort = (sort_key, order)
|
self._last_sort = (sort_key, order)
|
||||||
self._last_sorted_data = sorted_data
|
self._last_sorted_data = sorted_data
|
||||||
|
|
||||||
|
duration = time.perf_counter() - start_time
|
||||||
|
if duration > 0.1:
|
||||||
|
logger.debug("ModelCache.get_sorted_data(%s, %s) took %.3fs", sort_key, order, duration)
|
||||||
|
|
||||||
return sorted_data
|
return sorted_data
|
||||||
|
|
||||||
async def update_name_display_mode(self, display_mode: str) -> None:
|
async def update_name_display_mode(self, display_mode: str) -> None:
|
||||||
|
|||||||
@@ -5,6 +5,10 @@ from typing import Any, Dict, Iterable, List, Mapping, Optional, Sequence, Tuple
|
|||||||
|
|
||||||
from ..utils.constants import NSFW_LEVELS
|
from ..utils.constants import NSFW_LEVELS
|
||||||
from ..utils.utils import fuzzy_match as default_fuzzy_match
|
from ..utils.utils import fuzzy_match as default_fuzzy_match
|
||||||
|
import time
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_CIVITAI_MODEL_TYPE = "LORA"
|
DEFAULT_CIVITAI_MODEL_TYPE = "LORA"
|
||||||
@@ -115,22 +119,33 @@ class ModelFilterSet:
|
|||||||
|
|
||||||
def apply(self, data: Iterable[Dict[str, Any]], criteria: FilterCriteria) -> List[Dict[str, Any]]:
|
def apply(self, data: Iterable[Dict[str, Any]], criteria: FilterCriteria) -> List[Dict[str, Any]]:
|
||||||
"""Return items that satisfy the provided criteria."""
|
"""Return items that satisfy the provided criteria."""
|
||||||
|
overall_start = time.perf_counter()
|
||||||
items = list(data)
|
items = list(data)
|
||||||
|
initial_count = len(items)
|
||||||
|
|
||||||
if self._settings.get("show_only_sfw", False):
|
if self._settings.get("show_only_sfw", False):
|
||||||
|
t0 = time.perf_counter()
|
||||||
threshold = self._nsfw_levels.get("R", 0)
|
threshold = self._nsfw_levels.get("R", 0)
|
||||||
items = [
|
items = [
|
||||||
item for item in items
|
item for item in items
|
||||||
if not item.get("preview_nsfw_level") or item.get("preview_nsfw_level") < threshold
|
if not item.get("preview_nsfw_level") or item.get("preview_nsfw_level") < threshold
|
||||||
]
|
]
|
||||||
|
sfw_duration = time.perf_counter() - t0
|
||||||
|
else:
|
||||||
|
sfw_duration = 0
|
||||||
|
|
||||||
|
favorites_duration = 0
|
||||||
if criteria.favorites_only:
|
if criteria.favorites_only:
|
||||||
|
t0 = time.perf_counter()
|
||||||
items = [item for item in items if item.get("favorite", False)]
|
items = [item for item in items if item.get("favorite", False)]
|
||||||
|
favorites_duration = time.perf_counter() - t0
|
||||||
|
|
||||||
|
folder_duration = 0
|
||||||
folder = criteria.folder
|
folder = criteria.folder
|
||||||
options = criteria.search_options or {}
|
options = criteria.search_options or {}
|
||||||
recursive = bool(options.get("recursive", True))
|
recursive = bool(options.get("recursive", True))
|
||||||
if folder is not None:
|
if folder is not None:
|
||||||
|
t0 = time.perf_counter()
|
||||||
if recursive:
|
if recursive:
|
||||||
if folder:
|
if folder:
|
||||||
folder_with_sep = f"{folder}/"
|
folder_with_sep = f"{folder}/"
|
||||||
@@ -140,13 +155,20 @@ class ModelFilterSet:
|
|||||||
]
|
]
|
||||||
else:
|
else:
|
||||||
items = [item for item in items if item.get("folder") == folder]
|
items = [item for item in items if item.get("folder") == folder]
|
||||||
|
folder_duration = time.perf_counter() - t0
|
||||||
|
|
||||||
|
base_models_duration = 0
|
||||||
base_models = criteria.base_models or []
|
base_models = criteria.base_models or []
|
||||||
if base_models:
|
if base_models:
|
||||||
|
t0 = time.perf_counter()
|
||||||
base_model_set = set(base_models)
|
base_model_set = set(base_models)
|
||||||
items = [item for item in items if item.get("base_model") in base_model_set]
|
items = [item for item in items if item.get("base_model") in base_model_set]
|
||||||
|
base_models_duration = time.perf_counter() - t0
|
||||||
|
|
||||||
|
tags_duration = 0
|
||||||
tag_filters = criteria.tags or {}
|
tag_filters = criteria.tags or {}
|
||||||
|
if tag_filters:
|
||||||
|
t0 = time.perf_counter()
|
||||||
include_tags = set()
|
include_tags = set()
|
||||||
exclude_tags = set()
|
exclude_tags = set()
|
||||||
if isinstance(tag_filters, dict):
|
if isinstance(tag_filters, dict):
|
||||||
@@ -161,18 +183,32 @@ class ModelFilterSet:
|
|||||||
include_tags = {tag for tag in tag_filters if tag}
|
include_tags = {tag for tag in tag_filters if tag}
|
||||||
|
|
||||||
if include_tags:
|
if include_tags:
|
||||||
|
def matches_include(item_tags):
|
||||||
|
if not item_tags and "__no_tags__" in include_tags:
|
||||||
|
return True
|
||||||
|
return any(tag in include_tags for tag in (item_tags or []))
|
||||||
|
|
||||||
items = [
|
items = [
|
||||||
item for item in items
|
item for item in items
|
||||||
if any(tag in include_tags for tag in (item.get("tags", []) or []))
|
if matches_include(item.get("tags"))
|
||||||
]
|
]
|
||||||
|
|
||||||
if exclude_tags:
|
if exclude_tags:
|
||||||
|
def matches_exclude(item_tags):
|
||||||
|
if not item_tags and "__no_tags__" in exclude_tags:
|
||||||
|
return True
|
||||||
|
return any(tag in exclude_tags for tag in (item_tags or []))
|
||||||
|
|
||||||
items = [
|
items = [
|
||||||
item for item in items
|
item for item in items
|
||||||
if not any(tag in exclude_tags for tag in (item.get("tags", []) or []))
|
if not matches_exclude(item.get("tags"))
|
||||||
]
|
]
|
||||||
|
tags_duration = time.perf_counter() - t0
|
||||||
|
|
||||||
|
model_types_duration = 0
|
||||||
model_types = criteria.model_types or []
|
model_types = criteria.model_types or []
|
||||||
|
if model_types:
|
||||||
|
t0 = time.perf_counter()
|
||||||
normalized_model_types = {
|
normalized_model_types = {
|
||||||
model_type for model_type in (
|
model_type for model_type in (
|
||||||
normalize_civitai_model_type(value) for value in model_types
|
normalize_civitai_model_type(value) for value in model_types
|
||||||
@@ -184,7 +220,17 @@ class ModelFilterSet:
|
|||||||
item for item in items
|
item for item in items
|
||||||
if normalize_civitai_model_type(resolve_civitai_model_type(item)) in normalized_model_types
|
if normalize_civitai_model_type(resolve_civitai_model_type(item)) in normalized_model_types
|
||||||
]
|
]
|
||||||
|
model_types_duration = time.perf_counter() - t0
|
||||||
|
|
||||||
|
duration = time.perf_counter() - overall_start
|
||||||
|
if duration > 0.1: # Only log if it's potentially slow
|
||||||
|
logger.info(
|
||||||
|
"ModelFilterSet.apply took %.3fs (sfw: %.3fs, fav: %.3fs, folder: %.3fs, base: %.3fs, tags: %.3fs, types: %.3fs). "
|
||||||
|
"Count: %d -> %d",
|
||||||
|
duration, sfw_duration, favorites_duration, folder_duration,
|
||||||
|
base_models_duration, tags_duration, model_types_duration,
|
||||||
|
initial_count, len(items)
|
||||||
|
)
|
||||||
return items
|
return items
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -7,12 +7,18 @@ from natsort import natsorted
|
|||||||
@dataclass
|
@dataclass
|
||||||
class RecipeCache:
|
class RecipeCache:
|
||||||
"""Cache structure for Recipe data"""
|
"""Cache structure for Recipe data"""
|
||||||
|
|
||||||
raw_data: List[Dict]
|
raw_data: List[Dict]
|
||||||
sorted_by_name: List[Dict]
|
sorted_by_name: List[Dict]
|
||||||
sorted_by_date: List[Dict]
|
sorted_by_date: List[Dict]
|
||||||
|
folders: List[str] | None = None
|
||||||
|
folder_tree: Dict | None = None
|
||||||
|
|
||||||
def __post_init__(self):
|
def __post_init__(self):
|
||||||
self._lock = asyncio.Lock()
|
self._lock = asyncio.Lock()
|
||||||
|
# Normalize optional metadata containers
|
||||||
|
self.folders = self.folders or []
|
||||||
|
self.folder_tree = self.folder_tree or {}
|
||||||
|
|
||||||
async def resort(self, name_only: bool = False):
|
async def resort(self, name_only: bool = False):
|
||||||
"""Resort all cached data views"""
|
"""Resort all cached data views"""
|
||||||
|
|||||||
@@ -1,7 +1,9 @@
|
|||||||
import os
|
from __future__ import annotations
|
||||||
import logging
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import json
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
import time
|
import time
|
||||||
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple
|
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple
|
||||||
from ..config import config
|
from ..config import config
|
||||||
@@ -14,6 +16,9 @@ from .recipes.errors import RecipeNotFoundError
|
|||||||
from ..utils.utils import calculate_recipe_fingerprint, fuzzy_match
|
from ..utils.utils import calculate_recipe_fingerprint, fuzzy_match
|
||||||
from natsort import natsorted
|
from natsort import natsorted
|
||||||
import sys
|
import sys
|
||||||
|
import re
|
||||||
|
from ..recipes.merger import GenParamsMerger
|
||||||
|
from ..recipes.enrichment import RecipeEnricher
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -52,6 +57,8 @@ class RecipeScanner:
|
|||||||
cls._instance._civitai_client = None # Will be lazily initialized
|
cls._instance._civitai_client = None # Will be lazily initialized
|
||||||
return cls._instance
|
return cls._instance
|
||||||
|
|
||||||
|
REPAIR_VERSION = 3
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
lora_scanner: Optional[LoraScanner] = None,
|
lora_scanner: Optional[LoraScanner] = None,
|
||||||
@@ -64,6 +71,7 @@ class RecipeScanner:
|
|||||||
self._initialization_task: Optional[asyncio.Task] = None
|
self._initialization_task: Optional[asyncio.Task] = None
|
||||||
self._is_initializing = False
|
self._is_initializing = False
|
||||||
self._mutation_lock = asyncio.Lock()
|
self._mutation_lock = asyncio.Lock()
|
||||||
|
self._post_scan_task: Optional[asyncio.Task] = None
|
||||||
self._resort_tasks: Set[asyncio.Task] = set()
|
self._resort_tasks: Set[asyncio.Task] = set()
|
||||||
if lora_scanner:
|
if lora_scanner:
|
||||||
self._lora_scanner = lora_scanner
|
self._lora_scanner = lora_scanner
|
||||||
@@ -84,6 +92,10 @@ class RecipeScanner:
|
|||||||
task.cancel()
|
task.cancel()
|
||||||
self._resort_tasks.clear()
|
self._resort_tasks.clear()
|
||||||
|
|
||||||
|
if self._post_scan_task and not self._post_scan_task.done():
|
||||||
|
self._post_scan_task.cancel()
|
||||||
|
self._post_scan_task = None
|
||||||
|
|
||||||
self._cache = None
|
self._cache = None
|
||||||
self._initialization_task = None
|
self._initialization_task = None
|
||||||
self._is_initializing = False
|
self._is_initializing = False
|
||||||
@@ -102,19 +114,223 @@ class RecipeScanner:
|
|||||||
self._civitai_client = await ServiceRegistry.get_civitai_client()
|
self._civitai_client = await ServiceRegistry.get_civitai_client()
|
||||||
return self._civitai_client
|
return self._civitai_client
|
||||||
|
|
||||||
|
async def repair_all_recipes(
|
||||||
|
self,
|
||||||
|
progress_callback: Optional[Callable[[Dict], Any]] = None
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Repair all recipes by enrichment with Civitai and embedded metadata.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
persistence_service: Service for saving updated recipes
|
||||||
|
progress_callback: Optional callback for progress updates
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict summary of repair results
|
||||||
|
"""
|
||||||
|
async with self._mutation_lock:
|
||||||
|
cache = await self.get_cached_data()
|
||||||
|
all_recipes = list(cache.raw_data)
|
||||||
|
total = len(all_recipes)
|
||||||
|
repaired_count = 0
|
||||||
|
skipped_count = 0
|
||||||
|
errors_count = 0
|
||||||
|
|
||||||
|
civitai_client = await self._get_civitai_client()
|
||||||
|
|
||||||
|
for i, recipe in enumerate(all_recipes):
|
||||||
|
try:
|
||||||
|
# Report progress
|
||||||
|
if progress_callback:
|
||||||
|
await progress_callback({
|
||||||
|
"status": "processing",
|
||||||
|
"current": i + 1,
|
||||||
|
"total": total,
|
||||||
|
"recipe_name": recipe.get("name", "Unknown")
|
||||||
|
})
|
||||||
|
|
||||||
|
if await self._repair_single_recipe(recipe, civitai_client):
|
||||||
|
repaired_count += 1
|
||||||
|
else:
|
||||||
|
skipped_count += 1
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error repairing recipe {recipe.get('file_path')}: {e}")
|
||||||
|
errors_count += 1
|
||||||
|
|
||||||
|
# Final progress update
|
||||||
|
if progress_callback:
|
||||||
|
await progress_callback({
|
||||||
|
"status": "completed",
|
||||||
|
"repaired": repaired_count,
|
||||||
|
"skipped": skipped_count,
|
||||||
|
"errors": errors_count,
|
||||||
|
"total": total
|
||||||
|
})
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"repaired": repaired_count,
|
||||||
|
"skipped": skipped_count,
|
||||||
|
"errors": errors_count,
|
||||||
|
"total": total
|
||||||
|
}
|
||||||
|
|
||||||
|
async def repair_recipe_by_id(self, recipe_id: str) -> Dict[str, Any]:
|
||||||
|
"""Repair a single recipe by its ID.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
recipe_id: ID of the recipe to repair
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict summary of repair result
|
||||||
|
"""
|
||||||
|
async with self._mutation_lock:
|
||||||
|
# Get raw recipe from cache directly to avoid formatted fields
|
||||||
|
cache = await self.get_cached_data()
|
||||||
|
recipe = next((r for r in cache.raw_data if str(r.get('id', '')) == recipe_id), None)
|
||||||
|
|
||||||
|
if not recipe:
|
||||||
|
raise RecipeNotFoundError(f"Recipe {recipe_id} not found")
|
||||||
|
|
||||||
|
civitai_client = await self._get_civitai_client()
|
||||||
|
success = await self._repair_single_recipe(recipe, civitai_client)
|
||||||
|
|
||||||
|
# If successfully repaired, we should return the formatted version for the UI
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"repaired": 1 if success else 0,
|
||||||
|
"skipped": 0 if success else 1,
|
||||||
|
"recipe": await self.get_recipe_by_id(recipe_id) if success else recipe
|
||||||
|
}
|
||||||
|
|
||||||
|
async def _repair_single_recipe(self, recipe: Dict[str, Any], civitai_client: Any) -> bool:
|
||||||
|
"""Internal helper to repair a single recipe object.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
recipe: The recipe dictionary to repair (modified in-place)
|
||||||
|
civitai_client: Authenticated Civitai client
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if recipe was repaired or updated, False if skipped
|
||||||
|
"""
|
||||||
|
# 1. Skip if already at latest repair version
|
||||||
|
if recipe.get("repair_version", 0) >= self.REPAIR_VERSION:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# 2. Identification: Is repair needed?
|
||||||
|
has_checkpoint = "checkpoint" in recipe and recipe["checkpoint"] and recipe["checkpoint"].get("name")
|
||||||
|
gen_params = recipe.get("gen_params", {})
|
||||||
|
has_prompt = bool(gen_params.get("prompt"))
|
||||||
|
|
||||||
|
needs_repair = not has_checkpoint or not has_prompt
|
||||||
|
|
||||||
|
if not needs_repair:
|
||||||
|
# Even if no repair needed, we mark it with version if it was processed
|
||||||
|
# Always update and save because if we are here, the version is old (checked in step 1)
|
||||||
|
recipe["repair_version"] = self.REPAIR_VERSION
|
||||||
|
await self._save_recipe_persistently(recipe)
|
||||||
|
return True
|
||||||
|
|
||||||
|
# 3. Use Enricher to repair/enrich
|
||||||
|
try:
|
||||||
|
updated = await RecipeEnricher.enrich_recipe(recipe, civitai_client)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error enriching recipe {recipe.get('id')}: {e}")
|
||||||
|
updated = False
|
||||||
|
|
||||||
|
# 4. Mark version and save if updated or just marking version
|
||||||
|
# If we updated it, OR if the version is old (which we know it is if we are here), save it.
|
||||||
|
# Actually, if we are here and updated is False, it means we tried to repair but couldn't/didn't need to.
|
||||||
|
# But we still want to mark it as processed so we don't try again until version bump.
|
||||||
|
if updated or recipe.get("repair_version", 0) < self.REPAIR_VERSION:
|
||||||
|
recipe["repair_version"] = self.REPAIR_VERSION
|
||||||
|
await self._save_recipe_persistently(recipe)
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def _save_recipe_persistently(self, recipe: Dict[str, Any]) -> bool:
|
||||||
|
"""Helper to save a recipe to both JSON and EXIF metadata."""
|
||||||
|
recipe_id = recipe.get("id")
|
||||||
|
if not recipe_id:
|
||||||
|
return False
|
||||||
|
|
||||||
|
recipe_json_path = await self.get_recipe_json_path(recipe_id)
|
||||||
|
if not recipe_json_path:
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
# 1. Sanitize for storage (remove runtime convenience fields)
|
||||||
|
clean_recipe = self._sanitize_recipe_for_storage(recipe)
|
||||||
|
|
||||||
|
# 2. Update the original dictionary so that we persist the clean version
|
||||||
|
# globally if needed, effectively overwriting it in-place.
|
||||||
|
recipe.clear()
|
||||||
|
recipe.update(clean_recipe)
|
||||||
|
|
||||||
|
# 3. Save JSON
|
||||||
|
with open(recipe_json_path, 'w', encoding='utf-8') as f:
|
||||||
|
json.dump(recipe, f, indent=4, ensure_ascii=False)
|
||||||
|
|
||||||
|
# 4. Update EXIF if image exists
|
||||||
|
image_path = recipe.get('file_path')
|
||||||
|
if image_path and os.path.exists(image_path):
|
||||||
|
from ..utils.exif_utils import ExifUtils
|
||||||
|
ExifUtils.append_recipe_metadata(image_path, recipe)
|
||||||
|
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error persisting recipe {recipe_id}: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _sanitize_recipe_for_storage(self, recipe: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
"""Create a clean copy of the recipe without runtime convenience fields."""
|
||||||
|
import copy
|
||||||
|
clean = copy.deepcopy(recipe)
|
||||||
|
|
||||||
|
# 0. Clean top-level runtime fields
|
||||||
|
for key in ("file_url", "created_date_formatted", "modified_formatted"):
|
||||||
|
clean.pop(key, None)
|
||||||
|
|
||||||
|
# 1. Clean LORAs
|
||||||
|
if "loras" in clean and isinstance(clean["loras"], list):
|
||||||
|
for lora in clean["loras"]:
|
||||||
|
# Fields to remove (runtime only)
|
||||||
|
for key in ("inLibrary", "preview_url", "localPath"):
|
||||||
|
lora.pop(key, None)
|
||||||
|
|
||||||
|
# Normalize weight/strength if mapping is desired (standard in persistence_service)
|
||||||
|
if "weight" in lora and "strength" not in lora:
|
||||||
|
lora["strength"] = float(lora.pop("weight"))
|
||||||
|
|
||||||
|
# 2. Clean Checkpoint
|
||||||
|
if "checkpoint" in clean and isinstance(clean["checkpoint"], dict):
|
||||||
|
cp = clean["checkpoint"]
|
||||||
|
# Fields to remove (runtime only)
|
||||||
|
for key in ("inLibrary", "localPath", "preview_url", "thumbnailUrl", "size", "downloadUrl"):
|
||||||
|
cp.pop(key, None)
|
||||||
|
|
||||||
|
return clean
|
||||||
|
|
||||||
async def initialize_in_background(self) -> None:
|
async def initialize_in_background(self) -> None:
|
||||||
"""Initialize cache in background using thread pool"""
|
"""Initialize cache in background using thread pool"""
|
||||||
try:
|
try:
|
||||||
|
await self._wait_for_lora_scanner()
|
||||||
|
|
||||||
# Set initial empty cache to avoid None reference errors
|
# Set initial empty cache to avoid None reference errors
|
||||||
if self._cache is None:
|
if self._cache is None:
|
||||||
self._cache = RecipeCache(
|
self._cache = RecipeCache(
|
||||||
raw_data=[],
|
raw_data=[],
|
||||||
sorted_by_name=[],
|
sorted_by_name=[],
|
||||||
sorted_by_date=[]
|
sorted_by_date=[],
|
||||||
|
folders=[],
|
||||||
|
folder_tree={},
|
||||||
)
|
)
|
||||||
|
|
||||||
# Mark as initializing to prevent concurrent initializations
|
# Mark as initializing to prevent concurrent initializations
|
||||||
self._is_initializing = True
|
self._is_initializing = True
|
||||||
|
self._initialization_task = asyncio.current_task()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Start timer
|
# Start timer
|
||||||
@@ -126,11 +342,14 @@ class RecipeScanner:
|
|||||||
None, # Use default thread pool
|
None, # Use default thread pool
|
||||||
self._initialize_recipe_cache_sync # Run synchronous version in thread
|
self._initialize_recipe_cache_sync # Run synchronous version in thread
|
||||||
)
|
)
|
||||||
|
if cache is not None:
|
||||||
|
self._cache = cache
|
||||||
|
|
||||||
# Calculate elapsed time and log it
|
# Calculate elapsed time and log it
|
||||||
elapsed_time = time.time() - start_time
|
elapsed_time = time.time() - start_time
|
||||||
recipe_count = len(cache.raw_data) if cache and hasattr(cache, 'raw_data') else 0
|
recipe_count = len(cache.raw_data) if cache and hasattr(cache, 'raw_data') else 0
|
||||||
logger.info(f"Recipe cache initialized in {elapsed_time:.2f} seconds. Found {recipe_count} recipes")
|
logger.info(f"Recipe cache initialized in {elapsed_time:.2f} seconds. Found {recipe_count} recipes")
|
||||||
|
self._schedule_post_scan_enrichment()
|
||||||
finally:
|
finally:
|
||||||
# Mark initialization as complete regardless of outcome
|
# Mark initialization as complete regardless of outcome
|
||||||
self._is_initializing = False
|
self._is_initializing = False
|
||||||
@@ -207,6 +426,7 @@ class RecipeScanner:
|
|||||||
|
|
||||||
# Update cache with the collected data
|
# Update cache with the collected data
|
||||||
self._cache.raw_data = recipes
|
self._cache.raw_data = recipes
|
||||||
|
self._update_folder_metadata(self._cache)
|
||||||
|
|
||||||
# Create a simplified resort function that doesn't use await
|
# Create a simplified resort function that doesn't use await
|
||||||
if hasattr(self._cache, "resort"):
|
if hasattr(self._cache, "resort"):
|
||||||
@@ -237,12 +457,97 @@ class RecipeScanner:
|
|||||||
# Clean up the event loop
|
# Clean up the event loop
|
||||||
loop.close()
|
loop.close()
|
||||||
|
|
||||||
|
async def _wait_for_lora_scanner(self) -> None:
|
||||||
|
"""Ensure the LoRA scanner has initialized before recipe enrichment."""
|
||||||
|
|
||||||
|
if not getattr(self, "_lora_scanner", None):
|
||||||
|
return
|
||||||
|
|
||||||
|
lora_scanner = self._lora_scanner
|
||||||
|
cache_ready = getattr(lora_scanner, "_cache", None) is not None
|
||||||
|
|
||||||
|
# If cache is already available, we can proceed
|
||||||
|
if cache_ready:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Await an existing initialization task if present
|
||||||
|
task = getattr(lora_scanner, "_initialization_task", None)
|
||||||
|
if task and hasattr(task, "done") and not task.done():
|
||||||
|
try:
|
||||||
|
await task
|
||||||
|
except Exception: # pragma: no cover - defensive guard
|
||||||
|
pass
|
||||||
|
if getattr(lora_scanner, "_cache", None) is not None:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Otherwise, request initialization and proceed once it completes
|
||||||
|
try:
|
||||||
|
await lora_scanner.initialize_in_background()
|
||||||
|
except Exception as exc: # pragma: no cover - defensive guard
|
||||||
|
logger.debug("Recipe Scanner: LoRA init request failed: %s", exc)
|
||||||
|
|
||||||
|
def _schedule_post_scan_enrichment(self) -> None:
|
||||||
|
"""Kick off a non-blocking enrichment pass to fill remote metadata."""
|
||||||
|
|
||||||
|
try:
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
except RuntimeError:
|
||||||
|
return
|
||||||
|
|
||||||
|
if self._post_scan_task and not self._post_scan_task.done():
|
||||||
|
return
|
||||||
|
|
||||||
|
async def _run_enrichment():
|
||||||
|
try:
|
||||||
|
await self._enrich_cache_metadata()
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
raise
|
||||||
|
except Exception as exc: # pragma: no cover - defensive guard
|
||||||
|
logger.error("Recipe Scanner: error during post-scan enrichment: %s", exc, exc_info=True)
|
||||||
|
|
||||||
|
self._post_scan_task = loop.create_task(_run_enrichment(), name="recipe_cache_enrichment")
|
||||||
|
|
||||||
|
async def _enrich_cache_metadata(self) -> None:
|
||||||
|
"""Perform remote metadata enrichment after the initial scan."""
|
||||||
|
|
||||||
|
cache = self._cache
|
||||||
|
if cache is None or not getattr(cache, "raw_data", None):
|
||||||
|
return
|
||||||
|
|
||||||
|
for index, recipe in enumerate(list(cache.raw_data)):
|
||||||
|
try:
|
||||||
|
metadata_updated = await self._update_lora_information(recipe)
|
||||||
|
if metadata_updated:
|
||||||
|
recipe_id = recipe.get("id")
|
||||||
|
if recipe_id:
|
||||||
|
recipe_path = os.path.join(self.recipes_dir, f"{recipe_id}.recipe.json")
|
||||||
|
if os.path.exists(recipe_path):
|
||||||
|
try:
|
||||||
|
self._write_recipe_file(recipe_path, recipe)
|
||||||
|
except Exception as exc: # pragma: no cover - best-effort persistence
|
||||||
|
logger.debug("Recipe Scanner: could not persist recipe %s: %s", recipe_id, exc)
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
raise
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging
|
||||||
|
logger.error("Recipe Scanner: error enriching recipe %s: %s", recipe.get("id"), exc, exc_info=True)
|
||||||
|
|
||||||
|
if index % 10 == 0:
|
||||||
|
await asyncio.sleep(0)
|
||||||
|
|
||||||
|
try:
|
||||||
|
await cache.resort()
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging
|
||||||
|
logger.debug("Recipe Scanner: error resorting cache after enrichment: %s", exc)
|
||||||
|
|
||||||
def _schedule_resort(self, *, name_only: bool = False) -> None:
|
def _schedule_resort(self, *, name_only: bool = False) -> None:
|
||||||
"""Schedule a background resort of the recipe cache."""
|
"""Schedule a background resort of the recipe cache."""
|
||||||
|
|
||||||
if not self._cache:
|
if not self._cache:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
# Keep folder metadata up to date alongside sort order
|
||||||
|
self._update_folder_metadata()
|
||||||
|
|
||||||
async def _resort_wrapper() -> None:
|
async def _resort_wrapper() -> None:
|
||||||
try:
|
try:
|
||||||
await self._cache.resort(name_only=name_only)
|
await self._cache.resort(name_only=name_only)
|
||||||
@@ -253,6 +558,75 @@ class RecipeScanner:
|
|||||||
self._resort_tasks.add(task)
|
self._resort_tasks.add(task)
|
||||||
task.add_done_callback(lambda finished: self._resort_tasks.discard(finished))
|
task.add_done_callback(lambda finished: self._resort_tasks.discard(finished))
|
||||||
|
|
||||||
|
def _calculate_folder(self, recipe_path: str) -> str:
|
||||||
|
"""Calculate a normalized folder path relative to ``recipes_dir``."""
|
||||||
|
|
||||||
|
recipes_dir = self.recipes_dir
|
||||||
|
if not recipes_dir:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
try:
|
||||||
|
recipe_dir = os.path.dirname(os.path.normpath(recipe_path))
|
||||||
|
relative_dir = os.path.relpath(recipe_dir, recipes_dir)
|
||||||
|
if relative_dir in (".", ""):
|
||||||
|
return ""
|
||||||
|
return relative_dir.replace(os.path.sep, "/")
|
||||||
|
except Exception:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def _build_folder_tree(self, folders: list[str]) -> dict:
|
||||||
|
"""Build a nested folder tree structure from relative folder paths."""
|
||||||
|
|
||||||
|
tree: dict[str, dict] = {}
|
||||||
|
for folder in folders:
|
||||||
|
if not folder:
|
||||||
|
continue
|
||||||
|
|
||||||
|
parts = folder.split("/")
|
||||||
|
current_level = tree
|
||||||
|
|
||||||
|
for part in parts:
|
||||||
|
if part not in current_level:
|
||||||
|
current_level[part] = {}
|
||||||
|
current_level = current_level[part]
|
||||||
|
|
||||||
|
return tree
|
||||||
|
|
||||||
|
def _update_folder_metadata(self, cache: RecipeCache | None = None) -> None:
|
||||||
|
"""Ensure folder lists and tree metadata are synchronized with cache contents."""
|
||||||
|
|
||||||
|
cache = cache or self._cache
|
||||||
|
if cache is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
folders: set[str] = set()
|
||||||
|
for item in cache.raw_data:
|
||||||
|
folder_value = item.get("folder", "")
|
||||||
|
if folder_value is None:
|
||||||
|
folder_value = ""
|
||||||
|
if folder_value == ".":
|
||||||
|
folder_value = ""
|
||||||
|
normalized = str(folder_value).replace("\\", "/")
|
||||||
|
item["folder"] = normalized
|
||||||
|
folders.add(normalized)
|
||||||
|
|
||||||
|
cache.folders = sorted(folders, key=lambda entry: entry.lower())
|
||||||
|
cache.folder_tree = self._build_folder_tree(cache.folders)
|
||||||
|
|
||||||
|
async def get_folders(self) -> list[str]:
|
||||||
|
"""Return a sorted list of recipe folders relative to the recipes root."""
|
||||||
|
|
||||||
|
cache = await self.get_cached_data()
|
||||||
|
self._update_folder_metadata(cache)
|
||||||
|
return cache.folders
|
||||||
|
|
||||||
|
async def get_folder_tree(self) -> dict:
|
||||||
|
"""Return a hierarchical tree of recipe folders for sidebar navigation."""
|
||||||
|
|
||||||
|
cache = await self.get_cached_data()
|
||||||
|
self._update_folder_metadata(cache)
|
||||||
|
return cache.folder_tree
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def recipes_dir(self) -> str:
|
def recipes_dir(self) -> str:
|
||||||
"""Get path to recipes directory"""
|
"""Get path to recipes directory"""
|
||||||
@@ -269,11 +643,14 @@ class RecipeScanner:
|
|||||||
"""Get cached recipe data, refresh if needed"""
|
"""Get cached recipe data, refresh if needed"""
|
||||||
# If cache is already initialized and no refresh is needed, return it immediately
|
# If cache is already initialized and no refresh is needed, return it immediately
|
||||||
if self._cache is not None and not force_refresh:
|
if self._cache is not None and not force_refresh:
|
||||||
|
self._update_folder_metadata()
|
||||||
return self._cache
|
return self._cache
|
||||||
|
|
||||||
# If another initialization is already in progress, wait for it to complete
|
# If another initialization is already in progress, wait for it to complete
|
||||||
if self._is_initializing and not force_refresh:
|
if self._is_initializing and not force_refresh:
|
||||||
return self._cache or RecipeCache(raw_data=[], sorted_by_name=[], sorted_by_date=[])
|
return self._cache or RecipeCache(
|
||||||
|
raw_data=[], sorted_by_name=[], sorted_by_date=[], folders=[], folder_tree={}
|
||||||
|
)
|
||||||
|
|
||||||
# If force refresh is requested, initialize the cache directly
|
# If force refresh is requested, initialize the cache directly
|
||||||
if force_refresh:
|
if force_refresh:
|
||||||
@@ -291,11 +668,14 @@ class RecipeScanner:
|
|||||||
self._cache = RecipeCache(
|
self._cache = RecipeCache(
|
||||||
raw_data=raw_data,
|
raw_data=raw_data,
|
||||||
sorted_by_name=[],
|
sorted_by_name=[],
|
||||||
sorted_by_date=[]
|
sorted_by_date=[],
|
||||||
|
folders=[],
|
||||||
|
folder_tree={},
|
||||||
)
|
)
|
||||||
|
|
||||||
# Resort cache
|
# Resort cache
|
||||||
await self._cache.resort()
|
await self._cache.resort()
|
||||||
|
self._update_folder_metadata(self._cache)
|
||||||
|
|
||||||
return self._cache
|
return self._cache
|
||||||
|
|
||||||
@@ -305,7 +685,9 @@ class RecipeScanner:
|
|||||||
self._cache = RecipeCache(
|
self._cache = RecipeCache(
|
||||||
raw_data=[],
|
raw_data=[],
|
||||||
sorted_by_name=[],
|
sorted_by_name=[],
|
||||||
sorted_by_date=[]
|
sorted_by_date=[],
|
||||||
|
folders=[],
|
||||||
|
folder_tree={},
|
||||||
)
|
)
|
||||||
return self._cache
|
return self._cache
|
||||||
finally:
|
finally:
|
||||||
@@ -316,7 +698,9 @@ class RecipeScanner:
|
|||||||
logger.error(f"Unexpected error in get_cached_data: {e}")
|
logger.error(f"Unexpected error in get_cached_data: {e}")
|
||||||
|
|
||||||
# Return the cache (may be empty or partially initialized)
|
# Return the cache (may be empty or partially initialized)
|
||||||
return self._cache or RecipeCache(raw_data=[], sorted_by_name=[], sorted_by_date=[])
|
return self._cache or RecipeCache(
|
||||||
|
raw_data=[], sorted_by_name=[], sorted_by_date=[], folders=[], folder_tree={}
|
||||||
|
)
|
||||||
|
|
||||||
async def refresh_cache(self, force: bool = False) -> RecipeCache:
|
async def refresh_cache(self, force: bool = False) -> RecipeCache:
|
||||||
"""Public helper to refresh or return the recipe cache."""
|
"""Public helper to refresh or return the recipe cache."""
|
||||||
@@ -331,6 +715,7 @@ class RecipeScanner:
|
|||||||
|
|
||||||
cache = await self.get_cached_data()
|
cache = await self.get_cached_data()
|
||||||
await cache.add_recipe(recipe_data, resort=False)
|
await cache.add_recipe(recipe_data, resort=False)
|
||||||
|
self._update_folder_metadata(cache)
|
||||||
self._schedule_resort()
|
self._schedule_resort()
|
||||||
|
|
||||||
async def remove_recipe(self, recipe_id: str) -> bool:
|
async def remove_recipe(self, recipe_id: str) -> bool:
|
||||||
@@ -344,6 +729,7 @@ class RecipeScanner:
|
|||||||
if removed is None:
|
if removed is None:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
self._update_folder_metadata(cache)
|
||||||
self._schedule_resort()
|
self._schedule_resort()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -429,6 +815,9 @@ class RecipeScanner:
|
|||||||
if path_updated:
|
if path_updated:
|
||||||
self._write_recipe_file(recipe_path, recipe_data)
|
self._write_recipe_file(recipe_path, recipe_data)
|
||||||
|
|
||||||
|
# Track folder placement relative to recipes directory
|
||||||
|
recipe_data['folder'] = recipe_data.get('folder') or self._calculate_folder(recipe_path)
|
||||||
|
|
||||||
# Ensure loras array exists
|
# Ensure loras array exists
|
||||||
if 'loras' not in recipe_data:
|
if 'loras' not in recipe_data:
|
||||||
recipe_data['loras'] = []
|
recipe_data['loras'] = []
|
||||||
@@ -438,7 +827,7 @@ class RecipeScanner:
|
|||||||
recipe_data['gen_params'] = {}
|
recipe_data['gen_params'] = {}
|
||||||
|
|
||||||
# Update lora information with local paths and availability
|
# Update lora information with local paths and availability
|
||||||
await self._update_lora_information(recipe_data)
|
lora_metadata_updated = await self._update_lora_information(recipe_data)
|
||||||
|
|
||||||
if recipe_data.get('checkpoint'):
|
if recipe_data.get('checkpoint'):
|
||||||
checkpoint_entry = self._normalize_checkpoint_entry(recipe_data['checkpoint'])
|
checkpoint_entry = self._normalize_checkpoint_entry(recipe_data['checkpoint'])
|
||||||
@@ -459,6 +848,12 @@ class RecipeScanner:
|
|||||||
logger.info(f"Added fingerprint to recipe: {recipe_path}")
|
logger.info(f"Added fingerprint to recipe: {recipe_path}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error writing updated recipe with fingerprint: {e}")
|
logger.error(f"Error writing updated recipe with fingerprint: {e}")
|
||||||
|
elif lora_metadata_updated:
|
||||||
|
# Persist updates such as marking invalid entries as deleted
|
||||||
|
try:
|
||||||
|
self._write_recipe_file(recipe_path, recipe_data)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error writing updated recipe metadata: {e}")
|
||||||
|
|
||||||
return recipe_data
|
return recipe_data
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -519,7 +914,13 @@ class RecipeScanner:
|
|||||||
logger.warning(f"Marked lora with modelVersionId {model_version_id} as deleted")
|
logger.warning(f"Marked lora with modelVersionId {model_version_id} as deleted")
|
||||||
metadata_updated = True
|
metadata_updated = True
|
||||||
else:
|
else:
|
||||||
logger.debug(f"Could not get hash for modelVersionId {model_version_id}")
|
# No hash returned; mark as deleted to avoid repeated lookups
|
||||||
|
lora['isDeleted'] = True
|
||||||
|
metadata_updated = True
|
||||||
|
logger.warning(
|
||||||
|
"Marked lora with modelVersionId %s as deleted after failed hash lookup",
|
||||||
|
model_version_id,
|
||||||
|
)
|
||||||
|
|
||||||
# If has hash but no file_name, look up in lora library
|
# If has hash but no file_name, look up in lora library
|
||||||
if 'hash' in lora and (not lora.get('file_name') or not lora['file_name']):
|
if 'hash' in lora and (not lora.get('file_name') or not lora['file_name']):
|
||||||
@@ -809,7 +1210,7 @@ class RecipeScanner:
|
|||||||
|
|
||||||
return await self._lora_scanner.get_model_info_by_name(name)
|
return await self._lora_scanner.get_model_info_by_name(name)
|
||||||
|
|
||||||
async def get_paginated_data(self, page: int, page_size: int, sort_by: str = 'date', search: str = None, filters: dict = None, search_options: dict = None, lora_hash: str = None, bypass_filters: bool = True):
|
async def get_paginated_data(self, page: int, page_size: int, sort_by: str = 'date', search: str = None, filters: dict = None, search_options: dict = None, lora_hash: str = None, bypass_filters: bool = True, folder: str | None = None, recursive: bool = True):
|
||||||
"""Get paginated and filtered recipe data
|
"""Get paginated and filtered recipe data
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -821,11 +1222,20 @@ class RecipeScanner:
|
|||||||
search_options: Dictionary of search options to apply
|
search_options: Dictionary of search options to apply
|
||||||
lora_hash: Optional SHA256 hash of a LoRA to filter recipes by
|
lora_hash: Optional SHA256 hash of a LoRA to filter recipes by
|
||||||
bypass_filters: If True, ignore other filters when a lora_hash is provided
|
bypass_filters: If True, ignore other filters when a lora_hash is provided
|
||||||
|
folder: Optional folder filter relative to recipes directory
|
||||||
|
recursive: Whether to include recipes in subfolders of the selected folder
|
||||||
"""
|
"""
|
||||||
cache = await self.get_cached_data()
|
cache = await self.get_cached_data()
|
||||||
|
|
||||||
# Get base dataset
|
# Get base dataset
|
||||||
filtered_data = cache.sorted_by_date if sort_by == 'date' else cache.sorted_by_name
|
sort_field = sort_by.split(':')[0] if ':' in sort_by else sort_by
|
||||||
|
|
||||||
|
if sort_field == 'date':
|
||||||
|
filtered_data = list(cache.sorted_by_date)
|
||||||
|
elif sort_field == 'name':
|
||||||
|
filtered_data = list(cache.sorted_by_name)
|
||||||
|
else:
|
||||||
|
filtered_data = list(cache.raw_data)
|
||||||
|
|
||||||
# Apply SFW filtering if enabled
|
# Apply SFW filtering if enabled
|
||||||
from .settings_manager import get_settings_manager
|
from .settings_manager import get_settings_manager
|
||||||
@@ -856,6 +1266,22 @@ class RecipeScanner:
|
|||||||
|
|
||||||
# Skip further filtering if we're only filtering by LoRA hash with bypass enabled
|
# Skip further filtering if we're only filtering by LoRA hash with bypass enabled
|
||||||
if not (lora_hash and bypass_filters):
|
if not (lora_hash and bypass_filters):
|
||||||
|
# Apply folder filter before other criteria
|
||||||
|
if folder is not None:
|
||||||
|
normalized_folder = folder.strip("/")
|
||||||
|
def matches_folder(item_folder: str) -> bool:
|
||||||
|
item_path = (item_folder or "").strip("/")
|
||||||
|
if recursive:
|
||||||
|
if not normalized_folder:
|
||||||
|
return True
|
||||||
|
return item_path == normalized_folder or item_path.startswith(f"{normalized_folder}/")
|
||||||
|
return item_path == normalized_folder
|
||||||
|
|
||||||
|
filtered_data = [
|
||||||
|
item for item in filtered_data
|
||||||
|
if matches_folder(item.get('folder', ''))
|
||||||
|
]
|
||||||
|
|
||||||
# Apply search filter
|
# Apply search filter
|
||||||
if search:
|
if search:
|
||||||
# Default search options if none provided
|
# Default search options if none provided
|
||||||
@@ -892,6 +1318,14 @@ class RecipeScanner:
|
|||||||
if fuzzy_match(str(lora.get('modelName', '')), search):
|
if fuzzy_match(str(lora.get('modelName', '')), search):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
# Search in prompt and negative_prompt if enabled
|
||||||
|
if search_options.get('prompt', True) and 'gen_params' in item:
|
||||||
|
gen_params = item['gen_params']
|
||||||
|
if fuzzy_match(str(gen_params.get('prompt', '')), search):
|
||||||
|
return True
|
||||||
|
if fuzzy_match(str(gen_params.get('negative_prompt', '')), search):
|
||||||
|
return True
|
||||||
|
|
||||||
# No match found
|
# No match found
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@@ -907,6 +1341,13 @@ class RecipeScanner:
|
|||||||
if item.get('base_model', '') in filters['base_model']
|
if item.get('base_model', '') in filters['base_model']
|
||||||
]
|
]
|
||||||
|
|
||||||
|
# Filter by favorite
|
||||||
|
if 'favorite' in filters and filters['favorite']:
|
||||||
|
filtered_data = [
|
||||||
|
item for item in filtered_data
|
||||||
|
if item.get('favorite') is True
|
||||||
|
]
|
||||||
|
|
||||||
# Filter by tags
|
# Filter by tags
|
||||||
if 'tags' in filters and filters['tags']:
|
if 'tags' in filters and filters['tags']:
|
||||||
tag_spec = filters['tags']
|
tag_spec = filters['tags']
|
||||||
@@ -925,17 +1366,41 @@ class RecipeScanner:
|
|||||||
include_tags = {tag for tag in tag_spec if tag}
|
include_tags = {tag for tag in tag_spec if tag}
|
||||||
|
|
||||||
if include_tags:
|
if include_tags:
|
||||||
|
def matches_include(item_tags):
|
||||||
|
if not item_tags and "__no_tags__" in include_tags:
|
||||||
|
return True
|
||||||
|
return any(tag in include_tags for tag in (item_tags or []))
|
||||||
|
|
||||||
filtered_data = [
|
filtered_data = [
|
||||||
item for item in filtered_data
|
item for item in filtered_data
|
||||||
if any(tag in include_tags for tag in (item.get('tags', []) or []))
|
if matches_include(item.get('tags'))
|
||||||
]
|
]
|
||||||
|
|
||||||
if exclude_tags:
|
if exclude_tags:
|
||||||
|
def matches_exclude(item_tags):
|
||||||
|
if not item_tags and "__no_tags__" in exclude_tags:
|
||||||
|
return True
|
||||||
|
return any(tag in exclude_tags for tag in (item_tags or []))
|
||||||
|
|
||||||
filtered_data = [
|
filtered_data = [
|
||||||
item for item in filtered_data
|
item for item in filtered_data
|
||||||
if not any(tag in exclude_tags for tag in (item.get('tags', []) or []))
|
if not matches_exclude(item.get('tags'))
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# Apply sorting if not already handled by pre-sorted cache
|
||||||
|
if ':' in sort_by or sort_field == 'loras_count':
|
||||||
|
field, order = (sort_by.split(':') + ['desc'])[:2]
|
||||||
|
reverse = order.lower() == 'desc'
|
||||||
|
|
||||||
|
if field == 'name':
|
||||||
|
filtered_data = natsorted(filtered_data, key=lambda x: x.get('title', '').lower(), reverse=reverse)
|
||||||
|
elif field == 'date':
|
||||||
|
# Use modified if available, falling back to created_date
|
||||||
|
filtered_data.sort(key=lambda x: (x.get('modified', x.get('created_date', 0)), x.get('file_path', '')), reverse=reverse)
|
||||||
|
elif field == 'loras_count':
|
||||||
|
filtered_data.sort(key=lambda x: len(x.get('loras', [])), reverse=reverse)
|
||||||
|
|
||||||
# Calculate pagination
|
# Calculate pagination
|
||||||
total_items = len(filtered_data)
|
total_items = len(filtered_data)
|
||||||
start_idx = (page - 1) * page_size
|
start_idx = (page - 1) * page_size
|
||||||
@@ -1031,6 +1496,30 @@ class RecipeScanner:
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
return datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d %H:%M:%S')
|
return datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d %H:%M:%S')
|
||||||
|
|
||||||
|
async def get_recipe_json_path(self, recipe_id: str) -> Optional[str]:
|
||||||
|
"""Locate the recipe JSON file, accounting for folder placement."""
|
||||||
|
|
||||||
|
recipes_dir = self.recipes_dir
|
||||||
|
if not recipes_dir:
|
||||||
|
return None
|
||||||
|
|
||||||
|
cache = await self.get_cached_data()
|
||||||
|
folder = ""
|
||||||
|
for item in cache.raw_data:
|
||||||
|
if str(item.get("id")) == str(recipe_id):
|
||||||
|
folder = item.get("folder") or ""
|
||||||
|
break
|
||||||
|
|
||||||
|
candidate = os.path.normpath(os.path.join(recipes_dir, folder, f"{recipe_id}.recipe.json"))
|
||||||
|
if os.path.exists(candidate):
|
||||||
|
return candidate
|
||||||
|
|
||||||
|
for root, _, files in os.walk(recipes_dir):
|
||||||
|
if f"{recipe_id}.recipe.json" in files:
|
||||||
|
return os.path.join(root, f"{recipe_id}.recipe.json")
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
async def update_recipe_metadata(self, recipe_id: str, metadata: dict) -> bool:
|
async def update_recipe_metadata(self, recipe_id: str, metadata: dict) -> bool:
|
||||||
"""Update recipe metadata (like title and tags) in both file system and cache
|
"""Update recipe metadata (like title and tags) in both file system and cache
|
||||||
|
|
||||||
@@ -1041,13 +1530,9 @@ class RecipeScanner:
|
|||||||
Returns:
|
Returns:
|
||||||
bool: True if successful, False otherwise
|
bool: True if successful, False otherwise
|
||||||
"""
|
"""
|
||||||
import os
|
|
||||||
import json
|
|
||||||
|
|
||||||
# First, find the recipe JSON file path
|
# First, find the recipe JSON file path
|
||||||
recipe_json_path = os.path.join(self.recipes_dir, f"{recipe_id}.recipe.json")
|
recipe_json_path = await self.get_recipe_json_path(recipe_id)
|
||||||
|
if not recipe_json_path or not os.path.exists(recipe_json_path):
|
||||||
if not os.path.exists(recipe_json_path):
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -1096,8 +1581,8 @@ class RecipeScanner:
|
|||||||
if target_name is None:
|
if target_name is None:
|
||||||
raise ValueError("target_name must be provided")
|
raise ValueError("target_name must be provided")
|
||||||
|
|
||||||
recipe_json_path = os.path.join(self.recipes_dir, f"{recipe_id}.recipe.json")
|
recipe_json_path = await self.get_recipe_json_path(recipe_id)
|
||||||
if not os.path.exists(recipe_json_path):
|
if not recipe_json_path or not os.path.exists(recipe_json_path):
|
||||||
raise RecipeNotFoundError("Recipe not found")
|
raise RecipeNotFoundError("Recipe not found")
|
||||||
|
|
||||||
async with self._mutation_lock:
|
async with self._mutation_lock:
|
||||||
@@ -1228,71 +1713,56 @@ class RecipeScanner:
|
|||||||
# Always use lowercase hash for consistency
|
# Always use lowercase hash for consistency
|
||||||
hash_value = hash_value.lower()
|
hash_value = hash_value.lower()
|
||||||
|
|
||||||
# Get recipes directory
|
# Get cache
|
||||||
recipes_dir = self.recipes_dir
|
cache = await self.get_cached_data()
|
||||||
if not recipes_dir or not os.path.exists(recipes_dir):
|
if not cache or not cache.raw_data:
|
||||||
logger.warning(f"Recipes directory not found: {recipes_dir}")
|
|
||||||
return 0, 0
|
return 0, 0
|
||||||
|
|
||||||
# Check if cache is initialized
|
|
||||||
cache_initialized = self._cache is not None
|
|
||||||
cache_updated_count = 0
|
|
||||||
file_updated_count = 0
|
file_updated_count = 0
|
||||||
|
cache_updated_count = 0
|
||||||
|
|
||||||
# Get all recipe JSON files in the recipes directory
|
# Find recipes that need updating from the cache
|
||||||
recipe_files = []
|
recipes_to_update = []
|
||||||
for root, _, files in os.walk(recipes_dir):
|
for recipe in cache.raw_data:
|
||||||
for file in files:
|
loras = recipe.get('loras', [])
|
||||||
if file.lower().endswith('.recipe.json'):
|
if not isinstance(loras, list):
|
||||||
recipe_files.append(os.path.join(root, file))
|
|
||||||
|
|
||||||
# Process each recipe file
|
|
||||||
for recipe_path in recipe_files:
|
|
||||||
try:
|
|
||||||
# Load the recipe data
|
|
||||||
with open(recipe_path, 'r', encoding='utf-8') as f:
|
|
||||||
recipe_data = json.load(f)
|
|
||||||
|
|
||||||
# Skip if no loras or invalid structure
|
|
||||||
if not recipe_data or not isinstance(recipe_data, dict) or 'loras' not in recipe_data:
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Check if any lora has matching hash
|
has_match = False
|
||||||
file_updated = False
|
for lora in loras:
|
||||||
for lora in recipe_data.get('loras', []):
|
if not isinstance(lora, dict):
|
||||||
if 'hash' in lora and lora['hash'].lower() == hash_value:
|
continue
|
||||||
# Update file_name
|
if (lora.get('hash') or '').lower() == hash_value:
|
||||||
old_file_name = lora.get('file_name', '')
|
if lora.get('file_name') != new_file_name:
|
||||||
lora['file_name'] = new_file_name
|
lora['file_name'] = new_file_name
|
||||||
file_updated = True
|
has_match = True
|
||||||
logger.info(f"Updated file_name in recipe {recipe_path}: {old_file_name} -> {new_file_name}")
|
|
||||||
|
|
||||||
# If updated, save the file
|
if has_match:
|
||||||
if file_updated:
|
recipes_to_update.append(recipe)
|
||||||
with open(recipe_path, 'w', encoding='utf-8') as f:
|
|
||||||
json.dump(recipe_data, f, indent=4, ensure_ascii=False)
|
|
||||||
file_updated_count += 1
|
|
||||||
|
|
||||||
# Also update in cache if it exists
|
|
||||||
if cache_initialized:
|
|
||||||
recipe_id = recipe_data.get('id')
|
|
||||||
if recipe_id:
|
|
||||||
for cache_item in self._cache.raw_data:
|
|
||||||
if cache_item.get('id') == recipe_id:
|
|
||||||
# Replace loras array with updated version
|
|
||||||
cache_item['loras'] = recipe_data['loras']
|
|
||||||
cache_updated_count += 1
|
cache_updated_count += 1
|
||||||
break
|
|
||||||
|
|
||||||
|
if not recipes_to_update:
|
||||||
|
return 0, 0
|
||||||
|
|
||||||
|
# Persist changes to disk
|
||||||
|
async with self._mutation_lock:
|
||||||
|
for recipe in recipes_to_update:
|
||||||
|
recipe_id = recipe.get('id')
|
||||||
|
if not recipe_id:
|
||||||
|
continue
|
||||||
|
|
||||||
|
recipe_path = os.path.join(self.recipes_dir, f"{recipe_id}.recipe.json")
|
||||||
|
try:
|
||||||
|
self._write_recipe_file(recipe_path, recipe)
|
||||||
|
file_updated_count += 1
|
||||||
|
logger.info(f"Updated file_name in recipe {recipe_path}: -> {new_file_name}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error updating recipe file {recipe_path}: {e}")
|
logger.error(f"Error updating recipe file {recipe_path}: {e}")
|
||||||
import traceback
|
|
||||||
traceback.print_exc(file=sys.stderr)
|
|
||||||
|
|
||||||
# Resort cache if updates were made
|
# We don't necessarily need to resort because LoRA file_name isn't a sort key,
|
||||||
if cache_initialized and cache_updated_count > 0:
|
# but we might want to schedule a resort if we're paranoid or if searching relies on sorted state.
|
||||||
await self._cache.resort()
|
# Given it's a rename of a dependency, search results might change if searching by LoRA name.
|
||||||
logger.info(f"Resorted recipe cache after updating {cache_updated_count} items")
|
self._schedule_resort()
|
||||||
|
|
||||||
return file_updated_count, cache_updated_count
|
return file_updated_count, cache_updated_count
|
||||||
|
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ import numpy as np
|
|||||||
from PIL import Image
|
from PIL import Image
|
||||||
|
|
||||||
from ...utils.utils import calculate_recipe_fingerprint
|
from ...utils.utils import calculate_recipe_fingerprint
|
||||||
|
from ...utils.civitai_utils import rewrite_preview_url
|
||||||
from .errors import (
|
from .errors import (
|
||||||
RecipeDownloadError,
|
RecipeDownloadError,
|
||||||
RecipeNotFoundError,
|
RecipeNotFoundError,
|
||||||
@@ -94,18 +95,39 @@ class RecipeAnalysisService:
|
|||||||
if civitai_client is None:
|
if civitai_client is None:
|
||||||
raise RecipeServiceError("Civitai client unavailable")
|
raise RecipeServiceError("Civitai client unavailable")
|
||||||
|
|
||||||
temp_path = self._create_temp_path()
|
temp_path = None
|
||||||
metadata: Optional[dict[str, Any]] = None
|
metadata: Optional[dict[str, Any]] = None
|
||||||
|
is_video = False
|
||||||
|
extension = ".jpg" # Default
|
||||||
|
|
||||||
try:
|
try:
|
||||||
civitai_match = re.match(r"https://civitai\.com/images/(\d+)", url)
|
civitai_match = re.match(r"https://civitai\.com/images/(\d+)", url)
|
||||||
if civitai_match:
|
if civitai_match:
|
||||||
image_info = await civitai_client.get_image_info(civitai_match.group(1))
|
image_info = await civitai_client.get_image_info(civitai_match.group(1))
|
||||||
if not image_info:
|
if not image_info:
|
||||||
raise RecipeDownloadError("Failed to fetch image information from Civitai")
|
raise RecipeDownloadError("Failed to fetch image information from Civitai")
|
||||||
|
|
||||||
image_url = image_info.get("url")
|
image_url = image_info.get("url")
|
||||||
if not image_url:
|
if not image_url:
|
||||||
raise RecipeDownloadError("No image URL found in Civitai response")
|
raise RecipeDownloadError("No image URL found in Civitai response")
|
||||||
|
|
||||||
|
is_video = image_info.get("type") == "video"
|
||||||
|
|
||||||
|
# Use optimized preview URLs if possible
|
||||||
|
rewritten_url, _ = rewrite_preview_url(image_url, media_type=image_info.get("type"))
|
||||||
|
if rewritten_url:
|
||||||
|
image_url = rewritten_url
|
||||||
|
|
||||||
|
if is_video:
|
||||||
|
# Extract extension from URL
|
||||||
|
url_path = image_url.split('?')[0].split('#')[0]
|
||||||
|
extension = os.path.splitext(url_path)[1].lower() or ".mp4"
|
||||||
|
else:
|
||||||
|
extension = ".jpg"
|
||||||
|
|
||||||
|
temp_path = self._create_temp_path(suffix=extension)
|
||||||
await self._download_image(image_url, temp_path)
|
await self._download_image(image_url, temp_path)
|
||||||
|
|
||||||
metadata = image_info.get("meta") if "meta" in image_info else None
|
metadata = image_info.get("meta") if "meta" in image_info else None
|
||||||
if (
|
if (
|
||||||
isinstance(metadata, dict)
|
isinstance(metadata, dict)
|
||||||
@@ -114,21 +136,30 @@ class RecipeAnalysisService:
|
|||||||
):
|
):
|
||||||
metadata = metadata["meta"]
|
metadata = metadata["meta"]
|
||||||
else:
|
else:
|
||||||
|
# Basic extension detection for non-Civitai URLs
|
||||||
|
url_path = url.split('?')[0].split('#')[0]
|
||||||
|
extension = os.path.splitext(url_path)[1].lower()
|
||||||
|
if extension in [".mp4", ".webm"]:
|
||||||
|
is_video = True
|
||||||
|
else:
|
||||||
|
extension = ".jpg"
|
||||||
|
|
||||||
|
temp_path = self._create_temp_path(suffix=extension)
|
||||||
await self._download_image(url, temp_path)
|
await self._download_image(url, temp_path)
|
||||||
|
|
||||||
if metadata is None:
|
if metadata is None and not is_video:
|
||||||
metadata = self._exif_utils.extract_image_metadata(temp_path)
|
metadata = self._exif_utils.extract_image_metadata(temp_path)
|
||||||
|
|
||||||
if not metadata:
|
|
||||||
return self._metadata_not_found_response(temp_path)
|
|
||||||
|
|
||||||
return await self._parse_metadata(
|
return await self._parse_metadata(
|
||||||
metadata,
|
metadata or {},
|
||||||
recipe_scanner=recipe_scanner,
|
recipe_scanner=recipe_scanner,
|
||||||
image_path=temp_path,
|
image_path=temp_path,
|
||||||
include_image_base64=True,
|
include_image_base64=True,
|
||||||
|
is_video=is_video,
|
||||||
|
extension=extension,
|
||||||
)
|
)
|
||||||
finally:
|
finally:
|
||||||
|
if temp_path:
|
||||||
self._safe_cleanup(temp_path)
|
self._safe_cleanup(temp_path)
|
||||||
|
|
||||||
async def analyze_local_image(
|
async def analyze_local_image(
|
||||||
@@ -198,12 +229,16 @@ class RecipeAnalysisService:
|
|||||||
recipe_scanner,
|
recipe_scanner,
|
||||||
image_path: Optional[str],
|
image_path: Optional[str],
|
||||||
include_image_base64: bool,
|
include_image_base64: bool,
|
||||||
|
is_video: bool = False,
|
||||||
|
extension: str = ".jpg",
|
||||||
) -> AnalysisResult:
|
) -> AnalysisResult:
|
||||||
parser = self._recipe_parser_factory.create_parser(metadata)
|
parser = self._recipe_parser_factory.create_parser(metadata)
|
||||||
if parser is None:
|
if parser is None:
|
||||||
payload = {"error": "No parser found for this image", "loras": []}
|
payload = {"error": "No parser found for this image", "loras": []}
|
||||||
if include_image_base64 and image_path:
|
if include_image_base64 and image_path:
|
||||||
payload["image_base64"] = self._encode_file(image_path)
|
payload["image_base64"] = self._encode_file(image_path)
|
||||||
|
payload["is_video"] = is_video
|
||||||
|
payload["extension"] = extension
|
||||||
return AnalysisResult(payload)
|
return AnalysisResult(payload)
|
||||||
|
|
||||||
result = await parser.parse_metadata(metadata, recipe_scanner=recipe_scanner)
|
result = await parser.parse_metadata(metadata, recipe_scanner=recipe_scanner)
|
||||||
@@ -211,6 +246,9 @@ class RecipeAnalysisService:
|
|||||||
if include_image_base64 and image_path:
|
if include_image_base64 and image_path:
|
||||||
result["image_base64"] = self._encode_file(image_path)
|
result["image_base64"] = self._encode_file(image_path)
|
||||||
|
|
||||||
|
result["is_video"] = is_video
|
||||||
|
result["extension"] = extension
|
||||||
|
|
||||||
if "error" in result and not result.get("loras"):
|
if "error" in result and not result.get("loras"):
|
||||||
return AnalysisResult(result)
|
return AnalysisResult(result)
|
||||||
|
|
||||||
@@ -241,8 +279,8 @@ class RecipeAnalysisService:
|
|||||||
temp_file.write(data)
|
temp_file.write(data)
|
||||||
return temp_file.name
|
return temp_file.name
|
||||||
|
|
||||||
def _create_temp_path(self) -> str:
|
def _create_temp_path(self, suffix: str = ".jpg") -> str:
|
||||||
with tempfile.NamedTemporaryFile(delete=False, suffix=".jpg") as temp_file:
|
with tempfile.NamedTemporaryFile(delete=False, suffix=suffix) as temp_file:
|
||||||
return temp_file.name
|
return temp_file.name
|
||||||
|
|
||||||
def _safe_cleanup(self, path: Optional[str]) -> None:
|
def _safe_cleanup(self, path: Optional[str]) -> None:
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ import base64
|
|||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import shutil
|
||||||
import time
|
import time
|
||||||
import uuid
|
import uuid
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
@@ -46,6 +47,7 @@ class RecipePersistenceService:
|
|||||||
name: str | None,
|
name: str | None,
|
||||||
tags: Iterable[str],
|
tags: Iterable[str],
|
||||||
metadata: Optional[dict[str, Any]],
|
metadata: Optional[dict[str, Any]],
|
||||||
|
extension: str | None = None,
|
||||||
) -> PersistenceResult:
|
) -> PersistenceResult:
|
||||||
"""Persist a user uploaded recipe."""
|
"""Persist a user uploaded recipe."""
|
||||||
|
|
||||||
@@ -64,6 +66,13 @@ class RecipePersistenceService:
|
|||||||
os.makedirs(recipes_dir, exist_ok=True)
|
os.makedirs(recipes_dir, exist_ok=True)
|
||||||
|
|
||||||
recipe_id = str(uuid.uuid4())
|
recipe_id = str(uuid.uuid4())
|
||||||
|
|
||||||
|
# Handle video formats by bypassing optimization and metadata embedding
|
||||||
|
is_video = extension in [".mp4", ".webm"]
|
||||||
|
if is_video:
|
||||||
|
optimized_image = resolved_image_bytes
|
||||||
|
# extension is already set
|
||||||
|
else:
|
||||||
optimized_image, extension = self._exif_utils.optimize_image(
|
optimized_image, extension = self._exif_utils.optimize_image(
|
||||||
image_data=resolved_image_bytes,
|
image_data=resolved_image_bytes,
|
||||||
target_width=self._card_preview_width,
|
target_width=self._card_preview_width,
|
||||||
@@ -71,6 +80,7 @@ class RecipePersistenceService:
|
|||||||
quality=85,
|
quality=85,
|
||||||
preserve_metadata=True,
|
preserve_metadata=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
image_filename = f"{recipe_id}{extension}"
|
image_filename = f"{recipe_id}{extension}"
|
||||||
image_path = os.path.join(recipes_dir, image_filename)
|
image_path = os.path.join(recipes_dir, image_filename)
|
||||||
normalized_image_path = os.path.normpath(image_path)
|
normalized_image_path = os.path.normpath(image_path)
|
||||||
@@ -126,6 +136,7 @@ class RecipePersistenceService:
|
|||||||
with open(json_path, "w", encoding="utf-8") as file_obj:
|
with open(json_path, "w", encoding="utf-8") as file_obj:
|
||||||
json.dump(recipe_data, file_obj, indent=4, ensure_ascii=False)
|
json.dump(recipe_data, file_obj, indent=4, ensure_ascii=False)
|
||||||
|
|
||||||
|
if not is_video:
|
||||||
self._exif_utils.append_recipe_metadata(normalized_image_path, recipe_data)
|
self._exif_utils.append_recipe_metadata(normalized_image_path, recipe_data)
|
||||||
|
|
||||||
matching_recipes = await self._find_matching_recipes(recipe_scanner, fingerprint, exclude_id=recipe_id)
|
matching_recipes = await self._find_matching_recipes(recipe_scanner, fingerprint, exclude_id=recipe_id)
|
||||||
@@ -144,12 +155,8 @@ class RecipePersistenceService:
|
|||||||
async def delete_recipe(self, *, recipe_scanner, recipe_id: str) -> PersistenceResult:
|
async def delete_recipe(self, *, recipe_scanner, recipe_id: str) -> PersistenceResult:
|
||||||
"""Delete an existing recipe."""
|
"""Delete an existing recipe."""
|
||||||
|
|
||||||
recipes_dir = recipe_scanner.recipes_dir
|
recipe_json_path = await recipe_scanner.get_recipe_json_path(recipe_id)
|
||||||
if not recipes_dir or not os.path.exists(recipes_dir):
|
if not recipe_json_path or not os.path.exists(recipe_json_path):
|
||||||
raise RecipeNotFoundError("Recipes directory not found")
|
|
||||||
|
|
||||||
recipe_json_path = os.path.join(recipes_dir, f"{recipe_id}.recipe.json")
|
|
||||||
if not os.path.exists(recipe_json_path):
|
|
||||||
raise RecipeNotFoundError("Recipe not found")
|
raise RecipeNotFoundError("Recipe not found")
|
||||||
|
|
||||||
with open(recipe_json_path, "r", encoding="utf-8") as file_obj:
|
with open(recipe_json_path, "r", encoding="utf-8") as file_obj:
|
||||||
@@ -166,9 +173,9 @@ class RecipePersistenceService:
|
|||||||
async def update_recipe(self, *, recipe_scanner, recipe_id: str, updates: dict[str, Any]) -> PersistenceResult:
|
async def update_recipe(self, *, recipe_scanner, recipe_id: str, updates: dict[str, Any]) -> PersistenceResult:
|
||||||
"""Update persisted metadata for a recipe."""
|
"""Update persisted metadata for a recipe."""
|
||||||
|
|
||||||
if not any(key in updates for key in ("title", "tags", "source_path", "preview_nsfw_level")):
|
if not any(key in updates for key in ("title", "tags", "source_path", "preview_nsfw_level", "favorite")):
|
||||||
raise RecipeValidationError(
|
raise RecipeValidationError(
|
||||||
"At least one field to update must be provided (title or tags or source_path or preview_nsfw_level)"
|
"At least one field to update must be provided (title or tags or source_path or preview_nsfw_level or favorite)"
|
||||||
)
|
)
|
||||||
|
|
||||||
success = await recipe_scanner.update_recipe_metadata(recipe_id, updates)
|
success = await recipe_scanner.update_recipe_metadata(recipe_id, updates)
|
||||||
@@ -177,6 +184,163 @@ class RecipePersistenceService:
|
|||||||
|
|
||||||
return PersistenceResult({"success": True, "recipe_id": recipe_id, "updates": updates})
|
return PersistenceResult({"success": True, "recipe_id": recipe_id, "updates": updates})
|
||||||
|
|
||||||
|
def _normalize_target_path(self, recipe_scanner, target_path: str) -> tuple[str, str]:
|
||||||
|
"""Normalize and validate the target path for recipe moves."""
|
||||||
|
|
||||||
|
if not target_path:
|
||||||
|
raise RecipeValidationError("Target path is required")
|
||||||
|
|
||||||
|
recipes_root = recipe_scanner.recipes_dir
|
||||||
|
if not recipes_root:
|
||||||
|
raise RecipeNotFoundError("Recipes directory not found")
|
||||||
|
|
||||||
|
normalized_target = os.path.normpath(target_path)
|
||||||
|
recipes_root = os.path.normpath(recipes_root)
|
||||||
|
if not os.path.isabs(normalized_target):
|
||||||
|
normalized_target = os.path.normpath(os.path.join(recipes_root, normalized_target))
|
||||||
|
|
||||||
|
try:
|
||||||
|
common_root = os.path.commonpath([normalized_target, recipes_root])
|
||||||
|
except ValueError as exc:
|
||||||
|
raise RecipeValidationError("Invalid target path") from exc
|
||||||
|
|
||||||
|
if common_root != recipes_root:
|
||||||
|
raise RecipeValidationError("Target path must be inside the recipes directory")
|
||||||
|
|
||||||
|
return normalized_target, recipes_root
|
||||||
|
|
||||||
|
async def _move_recipe_files(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
recipe_scanner,
|
||||||
|
recipe_id: str,
|
||||||
|
normalized_target: str,
|
||||||
|
recipes_root: str,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""Move the recipe's JSON and preview image into the normalized target."""
|
||||||
|
|
||||||
|
recipe_json_path = await recipe_scanner.get_recipe_json_path(recipe_id)
|
||||||
|
if not recipe_json_path or not os.path.exists(recipe_json_path):
|
||||||
|
raise RecipeNotFoundError("Recipe not found")
|
||||||
|
|
||||||
|
recipe_data = await recipe_scanner.get_recipe_by_id(recipe_id)
|
||||||
|
if not recipe_data:
|
||||||
|
raise RecipeNotFoundError("Recipe not found")
|
||||||
|
|
||||||
|
current_json_dir = os.path.dirname(recipe_json_path)
|
||||||
|
normalized_image_path = os.path.normpath(recipe_data.get("file_path") or "") if recipe_data.get("file_path") else None
|
||||||
|
|
||||||
|
os.makedirs(normalized_target, exist_ok=True)
|
||||||
|
|
||||||
|
if os.path.normpath(current_json_dir) == normalized_target:
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"message": "Recipe is already in the target folder",
|
||||||
|
"recipe_id": recipe_id,
|
||||||
|
"original_file_path": recipe_data.get("file_path"),
|
||||||
|
"new_file_path": recipe_data.get("file_path"),
|
||||||
|
}
|
||||||
|
|
||||||
|
new_json_path = os.path.normpath(os.path.join(normalized_target, os.path.basename(recipe_json_path)))
|
||||||
|
shutil.move(recipe_json_path, new_json_path)
|
||||||
|
|
||||||
|
new_image_path = normalized_image_path
|
||||||
|
if normalized_image_path:
|
||||||
|
target_image_path = os.path.normpath(os.path.join(normalized_target, os.path.basename(normalized_image_path)))
|
||||||
|
if os.path.exists(normalized_image_path) and normalized_image_path != target_image_path:
|
||||||
|
shutil.move(normalized_image_path, target_image_path)
|
||||||
|
new_image_path = target_image_path
|
||||||
|
|
||||||
|
relative_folder = os.path.relpath(normalized_target, recipes_root)
|
||||||
|
if relative_folder in (".", ""):
|
||||||
|
relative_folder = ""
|
||||||
|
updates = {"file_path": new_image_path or recipe_data.get("file_path"), "folder": relative_folder.replace(os.path.sep, "/")}
|
||||||
|
|
||||||
|
updated = await recipe_scanner.update_recipe_metadata(recipe_id, updates)
|
||||||
|
if not updated:
|
||||||
|
raise RecipeNotFoundError("Recipe not found after move")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"recipe_id": recipe_id,
|
||||||
|
"original_file_path": recipe_data.get("file_path"),
|
||||||
|
"new_file_path": updates["file_path"],
|
||||||
|
"json_path": new_json_path,
|
||||||
|
"folder": updates["folder"],
|
||||||
|
}
|
||||||
|
|
||||||
|
async def move_recipe(self, *, recipe_scanner, recipe_id: str, target_path: str) -> PersistenceResult:
|
||||||
|
"""Move a recipe's assets into a new folder under the recipes root."""
|
||||||
|
|
||||||
|
normalized_target, recipes_root = self._normalize_target_path(recipe_scanner, target_path)
|
||||||
|
result = await self._move_recipe_files(
|
||||||
|
recipe_scanner=recipe_scanner,
|
||||||
|
recipe_id=recipe_id,
|
||||||
|
normalized_target=normalized_target,
|
||||||
|
recipes_root=recipes_root,
|
||||||
|
)
|
||||||
|
return PersistenceResult(result)
|
||||||
|
|
||||||
|
async def move_recipes_bulk(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
recipe_scanner,
|
||||||
|
recipe_ids: Iterable[str],
|
||||||
|
target_path: str,
|
||||||
|
) -> PersistenceResult:
|
||||||
|
"""Move multiple recipes to a new folder."""
|
||||||
|
|
||||||
|
recipe_ids = list(recipe_ids)
|
||||||
|
if not recipe_ids:
|
||||||
|
raise RecipeValidationError("No recipe IDs provided")
|
||||||
|
|
||||||
|
normalized_target, recipes_root = self._normalize_target_path(recipe_scanner, target_path)
|
||||||
|
|
||||||
|
results: list[dict[str, Any]] = []
|
||||||
|
success_count = 0
|
||||||
|
failure_count = 0
|
||||||
|
|
||||||
|
for recipe_id in recipe_ids:
|
||||||
|
try:
|
||||||
|
move_result = await self._move_recipe_files(
|
||||||
|
recipe_scanner=recipe_scanner,
|
||||||
|
recipe_id=str(recipe_id),
|
||||||
|
normalized_target=normalized_target,
|
||||||
|
recipes_root=recipes_root,
|
||||||
|
)
|
||||||
|
results.append(
|
||||||
|
{
|
||||||
|
"recipe_id": recipe_id,
|
||||||
|
"original_file_path": move_result.get("original_file_path"),
|
||||||
|
"new_file_path": move_result.get("new_file_path"),
|
||||||
|
"success": True,
|
||||||
|
"message": move_result.get("message", ""),
|
||||||
|
"folder": move_result.get("folder", ""),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
success_count += 1
|
||||||
|
except Exception as exc: # pragma: no cover - per-item error handling
|
||||||
|
results.append(
|
||||||
|
{
|
||||||
|
"recipe_id": recipe_id,
|
||||||
|
"original_file_path": None,
|
||||||
|
"new_file_path": None,
|
||||||
|
"success": False,
|
||||||
|
"message": str(exc),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
failure_count += 1
|
||||||
|
|
||||||
|
return PersistenceResult(
|
||||||
|
{
|
||||||
|
"success": True,
|
||||||
|
"message": f"Moved {success_count} of {len(recipe_ids)} recipes",
|
||||||
|
"results": results,
|
||||||
|
"success_count": success_count,
|
||||||
|
"failure_count": failure_count,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
async def reconnect_lora(
|
async def reconnect_lora(
|
||||||
self,
|
self,
|
||||||
*,
|
*,
|
||||||
@@ -187,8 +351,8 @@ class RecipePersistenceService:
|
|||||||
) -> PersistenceResult:
|
) -> PersistenceResult:
|
||||||
"""Reconnect a LoRA entry within an existing recipe."""
|
"""Reconnect a LoRA entry within an existing recipe."""
|
||||||
|
|
||||||
recipe_path = os.path.join(recipe_scanner.recipes_dir, f"{recipe_id}.recipe.json")
|
recipe_path = await recipe_scanner.get_recipe_json_path(recipe_id)
|
||||||
if not os.path.exists(recipe_path):
|
if not recipe_path or not os.path.exists(recipe_path):
|
||||||
raise RecipeNotFoundError("Recipe not found")
|
raise RecipeNotFoundError("Recipe not found")
|
||||||
|
|
||||||
target_lora = await recipe_scanner.get_local_lora(target_name)
|
target_lora = await recipe_scanner.get_local_lora(target_name)
|
||||||
@@ -233,16 +397,12 @@ class RecipePersistenceService:
|
|||||||
if not recipe_ids:
|
if not recipe_ids:
|
||||||
raise RecipeValidationError("No recipe IDs provided")
|
raise RecipeValidationError("No recipe IDs provided")
|
||||||
|
|
||||||
recipes_dir = recipe_scanner.recipes_dir
|
|
||||||
if not recipes_dir or not os.path.exists(recipes_dir):
|
|
||||||
raise RecipeNotFoundError("Recipes directory not found")
|
|
||||||
|
|
||||||
deleted_recipes: list[str] = []
|
deleted_recipes: list[str] = []
|
||||||
failed_recipes: list[dict[str, Any]] = []
|
failed_recipes: list[dict[str, Any]] = []
|
||||||
|
|
||||||
for recipe_id in recipe_ids:
|
for recipe_id in recipe_ids:
|
||||||
recipe_json_path = os.path.join(recipes_dir, f"{recipe_id}.recipe.json")
|
recipe_json_path = await recipe_scanner.get_recipe_json_path(recipe_id)
|
||||||
if not os.path.exists(recipe_json_path):
|
if not recipe_json_path or not os.path.exists(recipe_json_path):
|
||||||
failed_recipes.append({"id": recipe_id, "reason": "Recipe not found"})
|
failed_recipes.append({"id": recipe_id, "reason": "Recipe not found"})
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|||||||
@@ -20,6 +20,8 @@ class WebSocketManager:
|
|||||||
self._last_init_progress: Dict[str, Dict] = {}
|
self._last_init_progress: Dict[str, Dict] = {}
|
||||||
# Add auto-organize progress tracking
|
# Add auto-organize progress tracking
|
||||||
self._auto_organize_progress: Optional[Dict] = None
|
self._auto_organize_progress: Optional[Dict] = None
|
||||||
|
# Add recipe repair progress tracking
|
||||||
|
self._recipe_repair_progress: Optional[Dict] = None
|
||||||
self._auto_organize_lock = asyncio.Lock()
|
self._auto_organize_lock = asyncio.Lock()
|
||||||
|
|
||||||
async def handle_connection(self, request: web.Request) -> web.WebSocketResponse:
|
async def handle_connection(self, request: web.Request) -> web.WebSocketResponse:
|
||||||
@@ -189,6 +191,14 @@ class WebSocketManager:
|
|||||||
# Broadcast via WebSocket
|
# Broadcast via WebSocket
|
||||||
await self.broadcast(data)
|
await self.broadcast(data)
|
||||||
|
|
||||||
|
async def broadcast_recipe_repair_progress(self, data: Dict):
|
||||||
|
"""Broadcast recipe repair progress to connected clients"""
|
||||||
|
# Store progress data in memory
|
||||||
|
self._recipe_repair_progress = data
|
||||||
|
|
||||||
|
# Broadcast via WebSocket
|
||||||
|
await self.broadcast(data)
|
||||||
|
|
||||||
def get_auto_organize_progress(self) -> Optional[Dict]:
|
def get_auto_organize_progress(self) -> Optional[Dict]:
|
||||||
"""Get current auto-organize progress"""
|
"""Get current auto-organize progress"""
|
||||||
return self._auto_organize_progress
|
return self._auto_organize_progress
|
||||||
@@ -197,6 +207,14 @@ class WebSocketManager:
|
|||||||
"""Clear auto-organize progress data"""
|
"""Clear auto-organize progress data"""
|
||||||
self._auto_organize_progress = None
|
self._auto_organize_progress = None
|
||||||
|
|
||||||
|
def get_recipe_repair_progress(self) -> Optional[Dict]:
|
||||||
|
"""Get current recipe repair progress"""
|
||||||
|
return self._recipe_repair_progress
|
||||||
|
|
||||||
|
def cleanup_recipe_repair_progress(self):
|
||||||
|
"""Clear recipe repair progress data"""
|
||||||
|
self._recipe_repair_progress = None
|
||||||
|
|
||||||
def is_auto_organize_running(self) -> bool:
|
def is_auto_organize_running(self) -> bool:
|
||||||
"""Check if auto-organize is currently running"""
|
"""Check if auto-organize is currently running"""
|
||||||
if not self._auto_organize_progress:
|
if not self._auto_organize_progress:
|
||||||
|
|||||||
@@ -1,82 +1,33 @@
|
|||||||
{
|
{
|
||||||
"id": "0448c06d-de1b-46ab-975c-c5aa60d90dbc",
|
"id": "42803a29-02dc-49e1-b798-27da70e8b408",
|
||||||
"file_path": "D:/Workspace/ComfyUI/models/loras/recipes/0448c06d-de1b-46ab-975c-c5aa60d90dbc.jpg",
|
"file_path": "/home/miao/workspace/ComfyUI/models/loras/recipes/test/42803a29-02dc-49e1-b798-27da70e8b408.webp",
|
||||||
"title": "a mysterious, steampunk-inspired character standing in a dramatic pose",
|
"title": "masterpiece, best quality, amazing quality, very aesthetic, detailed eyes, perfect",
|
||||||
"modified": 1741837612.3931093,
|
"modified": 1754897325.0507245,
|
||||||
"created_date": 1741492786.5581934,
|
"created_date": 1754897325.0507245,
|
||||||
"base_model": "Flux.1 D",
|
"base_model": "Illustrious",
|
||||||
"loras": [
|
"loras": [
|
||||||
{
|
{
|
||||||
"file_name": "ChronoDivinitiesFlux_r1",
|
"file_name": "",
|
||||||
"hash": "ddbc5abd00db46ad464f5e3ca85f8f7121bc14b594d6785f441d9b002fffe66a",
|
"hash": "1b5b763d83961bb5745f3af8271ba83f1d4fd69c16278dae6d5b4e194bdde97a",
|
||||||
"strength": 0.8,
|
"strength": 1.0,
|
||||||
"modelVersionId": 1438879,
|
"modelVersionId": 2007092,
|
||||||
"modelName": "Chrono Divinities - By HailoKnight",
|
"modelName": "Pony: People's Works +",
|
||||||
"modelVersionName": "Flux"
|
"modelVersionName": "v8_Illusv1.0",
|
||||||
},
|
"isDeleted": false,
|
||||||
{
|
"exclude": false
|
||||||
"file_name": "flux.1_lora_flyway_ink-dynamic",
|
|
||||||
"hash": "4b4f3b469a0d5d3a04a46886abfa33daa37a905db070ccfbd10b345c6fb00eff",
|
|
||||||
"strength": 0.2,
|
|
||||||
"modelVersionId": 914935,
|
|
||||||
"modelName": "Ink-style",
|
|
||||||
"modelVersionName": "ink-dynamic"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"file_name": "ck-painterly-fantasy-000017",
|
|
||||||
"hash": "48c67064e2936aec342580a2a729d91d75eb818e45ecf993b9650cc66c94c420",
|
|
||||||
"strength": 0.2,
|
|
||||||
"modelVersionId": 1189379,
|
|
||||||
"modelName": "Painterly Fantasy by ChronoKnight - [FLUX & IL]",
|
|
||||||
"modelVersionName": "FLUX"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"file_name": "RetroAnimeFluxV1",
|
|
||||||
"hash": "8f43c31b6c3238ac44195c970d511d759c5893bddd00f59f42b8fe51e8e76fa0",
|
|
||||||
"strength": 0.8,
|
|
||||||
"modelVersionId": 806265,
|
|
||||||
"modelName": "Retro Anime Flux - Style",
|
|
||||||
"modelVersionName": "v1.0"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"file_name": "Mezzotint_Artstyle_for_Flux_-_by_Ethanar",
|
|
||||||
"hash": "e6961502769123bf23a66c5c5298d76264fd6b9610f018319a0ccb091bfc308e",
|
|
||||||
"strength": 0.2,
|
|
||||||
"modelVersionId": 757030,
|
|
||||||
"modelName": "Mezzotint Artstyle for Flux - by Ethanar",
|
|
||||||
"modelVersionName": "V1"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"file_name": "FluxMythG0thicL1nes",
|
|
||||||
"hash": "ecb03595de62bd6183a0dd2b38bea35669fd4d509f4bbae5aa0572cfb7ef4279",
|
|
||||||
"strength": 0.4,
|
|
||||||
"modelVersionId": 1202162,
|
|
||||||
"modelName": "Velvet's Mythic Fantasy Styles | Flux + Pony + illustrious",
|
|
||||||
"modelVersionName": "Flux Gothic Lines"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"file_name": "Elden_Ring_-_Yoshitaka_Amano",
|
|
||||||
"hash": "c660c4c55320be7206cb6a917c59d8da3953cc07169fe10bda833a54ec0024f9",
|
|
||||||
"strength": 0.75,
|
|
||||||
"modelVersionId": 746484,
|
|
||||||
"modelName": "Elden Ring - Yoshitaka Amano",
|
|
||||||
"modelVersionName": "V1"
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"gen_params": {
|
"gen_params": {
|
||||||
"prompt": "a mysterious, steampunk-inspired character standing in a dramatic pose. The character is dressed in a long, intricately detailed dark coat with ornate patterns, a wide-brimmed hat, and leather boots. The face is partially obscured by the hat's shadow, adding to the enigmatic aura. The background showcases a large, antique clock with Roman numerals, surrounded by dynamic lightning and ethereal white birds, enhancing the fantastical atmosphere. The color palette is dominated by dark tones with striking contrasts of white and blue lightning, creating a sense of tension and energy. The overall composition is vertical, with the character centrally positioned, exuding a sense of power and mystery. hkchrono",
|
"prompt": "masterpiece, best quality, amazing quality, very aesthetic, detailed eyes, perfect eyes, realistic eyes,\n(flat colors:1.5), (anime:1.5), (lineart:1.5),\nclose-up, solo, tongue, 1girl, food, (saliva:0.1), open mouth, candy, simple background, blue background, large lollipop, tongue out, fade background, lips, hand up, holding, looking at viewer, licking, seductive, half-closed eyes,",
|
||||||
"negative_prompt": "",
|
"negative_prompt": "shiny skin,",
|
||||||
"checkpoint": {
|
"steps": 19,
|
||||||
"type": "checkpoint",
|
"sampler": "Euler a",
|
||||||
"modelVersionId": 691639,
|
"cfg_scale": 5,
|
||||||
"modelName": "FLUX",
|
"seed": 1765271748,
|
||||||
"modelVersionName": "Dev"
|
|
||||||
},
|
|
||||||
"steps": "30",
|
|
||||||
"sampler": "Undefined",
|
|
||||||
"cfg_scale": "3.5",
|
|
||||||
"seed": "1472903449",
|
|
||||||
"size": "832x1216",
|
"size": "832x1216",
|
||||||
"clip_skip": "2"
|
"clip_skip": 2
|
||||||
}
|
},
|
||||||
|
"fingerprint": "1b5b763d83961bb5745f3af8271ba83f1d4fd69c16278dae6d5b4e194bdde97a:1.0",
|
||||||
|
"source_path": "https://civitai.com/images/92427432",
|
||||||
|
"folder": "test"
|
||||||
}
|
}
|
||||||
@@ -34,7 +34,7 @@ class TranslationKeySynchronizer:
|
|||||||
self.locales_dir = locales_dir
|
self.locales_dir = locales_dir
|
||||||
self.verbose = verbose
|
self.verbose = verbose
|
||||||
self.reference_locale = 'en'
|
self.reference_locale = 'en'
|
||||||
self.target_locales = ['zh-CN', 'zh-TW', 'ja', 'ru', 'de', 'fr', 'es', 'ko']
|
self.target_locales = ['zh-CN', 'zh-TW', 'ja', 'ru', 'de', 'fr', 'es', 'ko', 'he']
|
||||||
|
|
||||||
def log(self, message: str, level: str = 'INFO'):
|
def log(self, message: str, level: str = 'INFO'):
|
||||||
"""Log a message if verbose mode is enabled."""
|
"""Log a message if verbose mode is enabled."""
|
||||||
|
|||||||
@@ -1,8 +1,10 @@
|
|||||||
html, body {
|
html,
|
||||||
|
body {
|
||||||
margin: 0;
|
margin: 0;
|
||||||
padding: 0;
|
padding: 0;
|
||||||
height: 100%;
|
height: 100%;
|
||||||
overflow: hidden; /* Disable default scrolling */
|
overflow: hidden;
|
||||||
|
/* Disable default scrolling */
|
||||||
}
|
}
|
||||||
|
|
||||||
/* 针对Firefox */
|
/* 针对Firefox */
|
||||||
@@ -75,7 +77,8 @@ html, body {
|
|||||||
--border-radius-sm: 8px;
|
--border-radius-sm: 8px;
|
||||||
--border-radius-xs: 4px;
|
--border-radius-xs: 4px;
|
||||||
|
|
||||||
--scrollbar-width: 8px; /* 添加滚动条宽度变量 */
|
--scrollbar-width: 8px;
|
||||||
|
/* 添加滚动条宽度变量 */
|
||||||
|
|
||||||
/* Shortcut styles */
|
/* Shortcut styles */
|
||||||
--shortcut-bg: oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.12);
|
--shortcut-bg: oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.12);
|
||||||
@@ -104,7 +107,8 @@ html[data-theme="light"] {
|
|||||||
--lora-surface: oklch(25% 0.02 256 / 0.98);
|
--lora-surface: oklch(25% 0.02 256 / 0.98);
|
||||||
--lora-border: oklch(90% 0.02 256 / 0.15);
|
--lora-border: oklch(90% 0.02 256 / 0.15);
|
||||||
--lora-text: oklch(98% 0.02 256);
|
--lora-text: oklch(98% 0.02 256);
|
||||||
--lora-warning: oklch(75% 0.25 80); /* Modified to be used with oklch() */
|
--lora-warning: oklch(75% 0.25 80);
|
||||||
|
/* Modified to be used with oklch() */
|
||||||
--lora-error-bg: color-mix(in oklch, var(--lora-error) 15%, transparent);
|
--lora-error-bg: color-mix(in oklch, var(--lora-error) 15%, transparent);
|
||||||
--lora-error-border: color-mix(in oklch, var(--lora-error) 40%, transparent);
|
--lora-error-border: color-mix(in oklch, var(--lora-error) 40%, transparent);
|
||||||
--badge-update-bg: oklch(62% 0.18 220);
|
--badge-update-bg: oklch(62% 0.18 220);
|
||||||
@@ -118,5 +122,10 @@ body {
|
|||||||
color: var(--text-color);
|
color: var(--text-color);
|
||||||
display: flex;
|
display: flex;
|
||||||
flex-direction: column;
|
flex-direction: column;
|
||||||
padding-top: 0; /* Remove the padding-top */
|
padding-top: 0;
|
||||||
|
/* Remove the padding-top */
|
||||||
|
}
|
||||||
|
|
||||||
|
.hidden {
|
||||||
|
display: none !important;
|
||||||
}
|
}
|
||||||
@@ -1,7 +1,8 @@
|
|||||||
/* Import Modal Styles */
|
/* Import Modal Styles */
|
||||||
.import-step {
|
.import-step {
|
||||||
margin: var(--space-2) 0;
|
margin: var(--space-2) 0;
|
||||||
transition: none !important; /* Disable any transitions that might affect display */
|
transition: none !important;
|
||||||
|
/* Disable any transitions that might affect display */
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Import Mode Toggle */
|
/* Import Mode Toggle */
|
||||||
@@ -107,7 +108,8 @@
|
|||||||
justify-content: center;
|
justify-content: center;
|
||||||
}
|
}
|
||||||
|
|
||||||
.recipe-image img {
|
.recipe-image img,
|
||||||
|
.recipe-preview-video {
|
||||||
max-width: 100%;
|
max-width: 100%;
|
||||||
max-height: 100%;
|
max-height: 100%;
|
||||||
object-fit: contain;
|
object-fit: contain;
|
||||||
@@ -512,14 +514,17 @@
|
|||||||
|
|
||||||
/* Prevent layout shift with scrollbar */
|
/* Prevent layout shift with scrollbar */
|
||||||
.modal-content {
|
.modal-content {
|
||||||
overflow-y: scroll; /* Always show scrollbar */
|
overflow-y: scroll;
|
||||||
scrollbar-gutter: stable; /* Reserve space for scrollbar */
|
/* Always show scrollbar */
|
||||||
|
scrollbar-gutter: stable;
|
||||||
|
/* Reserve space for scrollbar */
|
||||||
}
|
}
|
||||||
|
|
||||||
/* For browsers that don't support scrollbar-gutter */
|
/* For browsers that don't support scrollbar-gutter */
|
||||||
@supports not (scrollbar-gutter: stable) {
|
@supports not (scrollbar-gutter: stable) {
|
||||||
.modal-content {
|
.modal-content {
|
||||||
padding-right: calc(var(--space-2) + var(--scrollbar-width)); /* Add extra padding for scrollbar */
|
padding-right: calc(var(--space-2) + var(--scrollbar-width));
|
||||||
|
/* Add extra padding for scrollbar */
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -586,7 +591,8 @@
|
|||||||
|
|
||||||
/* Remove the old warning-message styles that were causing layout issues */
|
/* Remove the old warning-message styles that were causing layout issues */
|
||||||
.warning-message {
|
.warning-message {
|
||||||
display: none; /* Hide the old style */
|
display: none;
|
||||||
|
/* Hide the old style */
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Update deleted badge to be more prominent */
|
/* Update deleted badge to be more prominent */
|
||||||
@@ -613,7 +619,8 @@
|
|||||||
color: var(--lora-error);
|
color: var(--lora-error);
|
||||||
font-size: 0.9em;
|
font-size: 0.9em;
|
||||||
margin-top: 8px;
|
margin-top: 8px;
|
||||||
min-height: 20px; /* Ensure there's always space for the error message */
|
min-height: 20px;
|
||||||
|
/* Ensure there's always space for the error message */
|
||||||
font-weight: 500;
|
font-weight: 500;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -662,8 +669,15 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
@keyframes fadeIn {
|
@keyframes fadeIn {
|
||||||
from { opacity: 0; transform: translateY(-10px); }
|
from {
|
||||||
to { opacity: 1; transform: translateY(0); }
|
opacity: 0;
|
||||||
|
transform: translateY(-10px);
|
||||||
|
}
|
||||||
|
|
||||||
|
to {
|
||||||
|
opacity: 1;
|
||||||
|
transform: translateY(0);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
.duplicate-warning {
|
.duplicate-warning {
|
||||||
@@ -779,6 +793,7 @@
|
|||||||
text-overflow: ellipsis;
|
text-overflow: ellipsis;
|
||||||
display: -webkit-box;
|
display: -webkit-box;
|
||||||
-webkit-line-clamp: 2;
|
-webkit-line-clamp: 2;
|
||||||
|
line-clamp: 2;
|
||||||
-webkit-box-orient: vertical;
|
-webkit-box-orient: vertical;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -20,7 +20,7 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
.modal-header-row {
|
.modal-header-row {
|
||||||
width: 85%;
|
width: 84%;
|
||||||
display: flex;
|
display: flex;
|
||||||
align-items: flex-start;
|
align-items: flex-start;
|
||||||
gap: var(--space-2);
|
gap: var(--space-2);
|
||||||
|
|||||||
@@ -122,6 +122,7 @@ body.modal-open {
|
|||||||
cursor: pointer;
|
cursor: pointer;
|
||||||
opacity: 0.7;
|
opacity: 0.7;
|
||||||
transition: opacity 0.2s;
|
transition: opacity 0.2s;
|
||||||
|
z-index: 10;
|
||||||
}
|
}
|
||||||
|
|
||||||
.close:hover {
|
.close:hover {
|
||||||
|
|||||||
@@ -242,6 +242,20 @@
|
|||||||
border-color: var(--lora-error-border);
|
border-color: var(--lora-error-border);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Subtle styling for special system tags like "No tags" */
|
||||||
|
.filter-tag.special-tag {
|
||||||
|
border-style: dashed;
|
||||||
|
opacity: 0.8;
|
||||||
|
font-style: italic;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Ensure solid border and full opacity when active or excluded */
|
||||||
|
.filter-tag.special-tag.active,
|
||||||
|
.filter-tag.special-tag.exclude {
|
||||||
|
border-style: solid;
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
|
||||||
/* Tag filter styles */
|
/* Tag filter styles */
|
||||||
.tag-filter {
|
.tag-filter {
|
||||||
display: flex;
|
display: flex;
|
||||||
|
|||||||
@@ -2,6 +2,35 @@ import { RecipeCard } from '../components/RecipeCard.js';
|
|||||||
import { state, getCurrentPageState } from '../state/index.js';
|
import { state, getCurrentPageState } from '../state/index.js';
|
||||||
import { showToast } from '../utils/uiHelpers.js';
|
import { showToast } from '../utils/uiHelpers.js';
|
||||||
|
|
||||||
|
const RECIPE_ENDPOINTS = {
|
||||||
|
list: '/api/lm/recipes',
|
||||||
|
detail: '/api/lm/recipe',
|
||||||
|
scan: '/api/lm/recipes/scan',
|
||||||
|
update: '/api/lm/recipe',
|
||||||
|
roots: '/api/lm/recipes/roots',
|
||||||
|
folders: '/api/lm/recipes/folders',
|
||||||
|
folderTree: '/api/lm/recipes/folder-tree',
|
||||||
|
unifiedFolderTree: '/api/lm/recipes/unified-folder-tree',
|
||||||
|
move: '/api/lm/recipe/move',
|
||||||
|
moveBulk: '/api/lm/recipes/move-bulk',
|
||||||
|
bulkDelete: '/api/lm/recipes/bulk-delete',
|
||||||
|
};
|
||||||
|
|
||||||
|
const RECIPE_SIDEBAR_CONFIG = {
|
||||||
|
config: {
|
||||||
|
displayName: 'Recipe',
|
||||||
|
supportsMove: true,
|
||||||
|
},
|
||||||
|
endpoints: RECIPE_ENDPOINTS,
|
||||||
|
};
|
||||||
|
|
||||||
|
export function extractRecipeId(filePath) {
|
||||||
|
if (!filePath) return null;
|
||||||
|
const basename = filePath.split('/').pop().split('\\').pop();
|
||||||
|
const dotIndex = basename.lastIndexOf('.');
|
||||||
|
return dotIndex > 0 ? basename.substring(0, dotIndex) : basename;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Fetch recipes with pagination for virtual scrolling
|
* Fetch recipes with pagination for virtual scrolling
|
||||||
* @param {number} page - Page number to fetch
|
* @param {number} page - Page number to fetch
|
||||||
@@ -18,10 +47,21 @@ export async function fetchRecipesPage(page = 1, pageSize = 100) {
|
|||||||
sort_by: pageState.sortBy
|
sort_by: pageState.sortBy
|
||||||
});
|
});
|
||||||
|
|
||||||
|
if (pageState.showFavoritesOnly) {
|
||||||
|
params.append('favorite', 'true');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (pageState.activeFolder !== null && pageState.activeFolder !== undefined) {
|
||||||
|
params.append('folder', pageState.activeFolder);
|
||||||
|
params.append('recursive', pageState.searchOptions?.recursive !== false);
|
||||||
|
} else if (pageState.searchOptions?.recursive !== undefined) {
|
||||||
|
params.append('recursive', pageState.searchOptions.recursive);
|
||||||
|
}
|
||||||
|
|
||||||
// If we have a specific recipe ID to load
|
// If we have a specific recipe ID to load
|
||||||
if (pageState.customFilter?.active && pageState.customFilter?.recipeId) {
|
if (pageState.customFilter?.active && pageState.customFilter?.recipeId) {
|
||||||
// Special case: load specific recipe
|
// Special case: load specific recipe
|
||||||
const response = await fetch(`/api/lm/recipe/${pageState.customFilter.recipeId}`);
|
const response = await fetch(`${RECIPE_ENDPOINTS.detail}/${pageState.customFilter.recipeId}`);
|
||||||
|
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
throw new Error(`Failed to load recipe: ${response.statusText}`);
|
throw new Error(`Failed to load recipe: ${response.statusText}`);
|
||||||
@@ -56,6 +96,7 @@ export async function fetchRecipesPage(page = 1, pageSize = 100) {
|
|||||||
params.append('search_tags', pageState.searchOptions.tags.toString());
|
params.append('search_tags', pageState.searchOptions.tags.toString());
|
||||||
params.append('search_lora_name', pageState.searchOptions.loraName.toString());
|
params.append('search_lora_name', pageState.searchOptions.loraName.toString());
|
||||||
params.append('search_lora_model', pageState.searchOptions.loraModel.toString());
|
params.append('search_lora_model', pageState.searchOptions.loraModel.toString());
|
||||||
|
params.append('search_prompt', (pageState.searchOptions.prompt || false).toString());
|
||||||
params.append('fuzzy', 'true');
|
params.append('fuzzy', 'true');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -78,7 +119,7 @@ export async function fetchRecipesPage(page = 1, pageSize = 100) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Fetch recipes
|
// Fetch recipes
|
||||||
const response = await fetch(`/api/lm/recipes?${params.toString()}`);
|
const response = await fetch(`${RECIPE_ENDPOINTS.list}?${params.toString()}`);
|
||||||
|
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
throw new Error(`Failed to load recipes: ${response.statusText}`);
|
throw new Error(`Failed to load recipes: ${response.statusText}`);
|
||||||
@@ -213,7 +254,7 @@ export async function refreshRecipes() {
|
|||||||
state.loadingManager.showSimpleLoading('Refreshing recipes...');
|
state.loadingManager.showSimpleLoading('Refreshing recipes...');
|
||||||
|
|
||||||
// Call the API endpoint to rebuild the recipe cache
|
// Call the API endpoint to rebuild the recipe cache
|
||||||
const response = await fetch('/api/lm/recipes/scan');
|
const response = await fetch(RECIPE_ENDPOINTS.scan);
|
||||||
|
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
const data = await response.json();
|
const data = await response.json();
|
||||||
@@ -277,10 +318,12 @@ export async function updateRecipeMetadata(filePath, updates) {
|
|||||||
state.loadingManager.showSimpleLoading('Saving metadata...');
|
state.loadingManager.showSimpleLoading('Saving metadata...');
|
||||||
|
|
||||||
// Extract recipeId from filePath (basename without extension)
|
// Extract recipeId from filePath (basename without extension)
|
||||||
const basename = filePath.split('/').pop().split('\\').pop();
|
const recipeId = extractRecipeId(filePath);
|
||||||
const recipeId = basename.substring(0, basename.lastIndexOf('.'));
|
if (!recipeId) {
|
||||||
|
throw new Error('Unable to determine recipe ID');
|
||||||
|
}
|
||||||
|
|
||||||
const response = await fetch(`/api/lm/recipe/${recipeId}/update`, {
|
const response = await fetch(`${RECIPE_ENDPOINTS.update}/${recipeId}/update`, {
|
||||||
method: 'PUT',
|
method: 'PUT',
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
@@ -306,3 +349,187 @@ export async function updateRecipeMetadata(filePath, updates) {
|
|||||||
state.loadingManager.hide();
|
state.loadingManager.hide();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export class RecipeSidebarApiClient {
|
||||||
|
constructor() {
|
||||||
|
this.apiConfig = RECIPE_SIDEBAR_CONFIG;
|
||||||
|
}
|
||||||
|
|
||||||
|
async fetchUnifiedFolderTree() {
|
||||||
|
const response = await fetch(this.apiConfig.endpoints.unifiedFolderTree);
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error('Failed to fetch recipe folder tree');
|
||||||
|
}
|
||||||
|
return response.json();
|
||||||
|
}
|
||||||
|
|
||||||
|
async fetchModelRoots() {
|
||||||
|
const response = await fetch(this.apiConfig.endpoints.roots);
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error('Failed to fetch recipe roots');
|
||||||
|
}
|
||||||
|
return response.json();
|
||||||
|
}
|
||||||
|
|
||||||
|
async fetchModelFolders() {
|
||||||
|
const response = await fetch(this.apiConfig.endpoints.folders);
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error('Failed to fetch recipe folders');
|
||||||
|
}
|
||||||
|
return response.json();
|
||||||
|
}
|
||||||
|
|
||||||
|
async moveBulkModels(filePaths, targetPath) {
|
||||||
|
if (!this.apiConfig.config.supportsMove) {
|
||||||
|
showToast('toast.api.bulkMoveNotSupported', { type: this.apiConfig.config.displayName }, 'warning');
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
const recipeIds = filePaths
|
||||||
|
.map((path) => extractRecipeId(path))
|
||||||
|
.filter((id) => !!id);
|
||||||
|
|
||||||
|
if (recipeIds.length === 0) {
|
||||||
|
showToast('toast.models.noModelsSelected', {}, 'warning');
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await fetch(this.apiConfig.endpoints.moveBulk, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
recipe_ids: recipeIds,
|
||||||
|
target_path: targetPath,
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await response.json();
|
||||||
|
|
||||||
|
if (!response.ok || !result.success) {
|
||||||
|
throw new Error(result.error || `Failed to move ${this.apiConfig.config.displayName}s`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (result.failure_count > 0) {
|
||||||
|
showToast(
|
||||||
|
'toast.api.bulkMovePartial',
|
||||||
|
{
|
||||||
|
successCount: result.success_count,
|
||||||
|
type: this.apiConfig.config.displayName,
|
||||||
|
failureCount: result.failure_count,
|
||||||
|
},
|
||||||
|
'warning'
|
||||||
|
);
|
||||||
|
|
||||||
|
const failedFiles = (result.results || [])
|
||||||
|
.filter((item) => !item.success)
|
||||||
|
.map((item) => item.message || 'Unknown error');
|
||||||
|
|
||||||
|
if (failedFiles.length > 0) {
|
||||||
|
const failureMessage =
|
||||||
|
failedFiles.length <= 3
|
||||||
|
? failedFiles.join('\n')
|
||||||
|
: `${failedFiles.slice(0, 3).join('\n')}\n(and ${failedFiles.length - 3} more)`;
|
||||||
|
showToast('toast.api.bulkMoveFailures', { failures: failureMessage }, 'warning', 6000);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
showToast(
|
||||||
|
'toast.api.bulkMoveSuccess',
|
||||||
|
{
|
||||||
|
successCount: result.success_count,
|
||||||
|
type: this.apiConfig.config.displayName,
|
||||||
|
},
|
||||||
|
'success'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return result.results || [];
|
||||||
|
}
|
||||||
|
|
||||||
|
async moveSingleModel(filePath, targetPath) {
|
||||||
|
if (!this.apiConfig.config.supportsMove) {
|
||||||
|
showToast('toast.api.moveNotSupported', { type: this.apiConfig.config.displayName }, 'warning');
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const recipeId = extractRecipeId(filePath);
|
||||||
|
if (!recipeId) {
|
||||||
|
showToast('toast.api.moveFailed', { message: 'Recipe ID missing' }, 'error');
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await fetch(this.apiConfig.endpoints.move, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
recipe_id: recipeId,
|
||||||
|
target_path: targetPath,
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await response.json();
|
||||||
|
|
||||||
|
if (!response.ok || !result.success) {
|
||||||
|
throw new Error(result.error || `Failed to move ${this.apiConfig.config.displayName}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (result.message) {
|
||||||
|
showToast('toast.api.moveInfo', { message: result.message }, 'info');
|
||||||
|
} else {
|
||||||
|
showToast('toast.api.moveSuccess', { type: this.apiConfig.config.displayName }, 'success');
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
original_file_path: result.original_file_path || filePath,
|
||||||
|
new_file_path: result.new_file_path || filePath,
|
||||||
|
folder: result.folder || '',
|
||||||
|
message: result.message,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async bulkDeleteModels(filePaths) {
|
||||||
|
if (!filePaths || filePaths.length === 0) {
|
||||||
|
throw new Error('No file paths provided');
|
||||||
|
}
|
||||||
|
|
||||||
|
const recipeIds = filePaths
|
||||||
|
.map((path) => extractRecipeId(path))
|
||||||
|
.filter((id) => !!id);
|
||||||
|
|
||||||
|
if (recipeIds.length === 0) {
|
||||||
|
throw new Error('No recipe IDs could be derived from file paths');
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
state.loadingManager?.showSimpleLoading('Deleting recipes...');
|
||||||
|
|
||||||
|
const response = await fetch(this.apiConfig.endpoints.bulkDelete, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
recipe_ids: recipeIds,
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await response.json();
|
||||||
|
|
||||||
|
if (!response.ok || !result.success) {
|
||||||
|
throw new Error(result.error || 'Failed to delete recipes');
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
deleted_count: result.total_deleted,
|
||||||
|
failed_count: result.total_failed || 0,
|
||||||
|
errors: result.failed || [],
|
||||||
|
};
|
||||||
|
} finally {
|
||||||
|
state.loadingManager?.hide();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -15,6 +15,29 @@ export class GlobalContextMenu extends BaseContextMenu {
|
|||||||
|
|
||||||
showMenu(x, y, origin = null) {
|
showMenu(x, y, origin = null) {
|
||||||
const contextOrigin = origin || { type: 'global' };
|
const contextOrigin = origin || { type: 'global' };
|
||||||
|
|
||||||
|
// Conditional visibility for recipes page
|
||||||
|
const isRecipesPage = state.currentPageType === 'recipes';
|
||||||
|
const modelUpdateItem = this.menu.querySelector('[data-action="check-model-updates"]');
|
||||||
|
const licenseRefreshItem = this.menu.querySelector('[data-action="fetch-missing-licenses"]');
|
||||||
|
const downloadExamplesItem = this.menu.querySelector('[data-action="download-example-images"]');
|
||||||
|
const cleanupExamplesItem = this.menu.querySelector('[data-action="cleanup-example-images-folders"]');
|
||||||
|
const repairRecipesItem = this.menu.querySelector('[data-action="repair-recipes"]');
|
||||||
|
|
||||||
|
if (isRecipesPage) {
|
||||||
|
modelUpdateItem?.classList.add('hidden');
|
||||||
|
licenseRefreshItem?.classList.add('hidden');
|
||||||
|
downloadExamplesItem?.classList.add('hidden');
|
||||||
|
cleanupExamplesItem?.classList.add('hidden');
|
||||||
|
repairRecipesItem?.classList.remove('hidden');
|
||||||
|
} else {
|
||||||
|
modelUpdateItem?.classList.remove('hidden');
|
||||||
|
licenseRefreshItem?.classList.remove('hidden');
|
||||||
|
downloadExamplesItem?.classList.remove('hidden');
|
||||||
|
cleanupExamplesItem?.classList.remove('hidden');
|
||||||
|
repairRecipesItem?.classList.add('hidden');
|
||||||
|
}
|
||||||
|
|
||||||
super.showMenu(x, y, contextOrigin);
|
super.showMenu(x, y, contextOrigin);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -40,6 +63,11 @@ export class GlobalContextMenu extends BaseContextMenu {
|
|||||||
console.error('Failed to refresh missing license metadata:', error);
|
console.error('Failed to refresh missing license metadata:', error);
|
||||||
});
|
});
|
||||||
break;
|
break;
|
||||||
|
case 'repair-recipes':
|
||||||
|
this.repairRecipes(menuItem).catch((error) => {
|
||||||
|
console.error('Failed to repair recipes:', error);
|
||||||
|
});
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
console.warn(`Unhandled global context menu action: ${action}`);
|
console.warn(`Unhandled global context menu action: ${action}`);
|
||||||
break;
|
break;
|
||||||
@@ -235,4 +263,78 @@ export class GlobalContextMenu extends BaseContextMenu {
|
|||||||
|
|
||||||
return `${displayName}s`;
|
return `${displayName}s`;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async repairRecipes(menuItem) {
|
||||||
|
if (this._repairInProgress) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this._repairInProgress = true;
|
||||||
|
menuItem?.classList.add('disabled');
|
||||||
|
|
||||||
|
const loadingMessage = translate(
|
||||||
|
'globalContextMenu.repairRecipes.loading',
|
||||||
|
{},
|
||||||
|
'Repairing recipe data...'
|
||||||
|
);
|
||||||
|
|
||||||
|
const progressUI = state.loadingManager?.showEnhancedProgress(loadingMessage);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch('/api/lm/recipes/repair', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await response.json();
|
||||||
|
if (!response.ok || !result.success) {
|
||||||
|
throw new Error(result.error || 'Failed to start repair');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Poll for progress (or wait for WebSocket if preferred, but polling is simpler for this implementation)
|
||||||
|
let isComplete = false;
|
||||||
|
while (!isComplete && this._repairInProgress) {
|
||||||
|
const progressResponse = await fetch('/api/lm/recipes/repair-progress');
|
||||||
|
if (progressResponse.ok) {
|
||||||
|
const progressResult = await progressResponse.json();
|
||||||
|
if (progressResult.success && progressResult.progress) {
|
||||||
|
const p = progressResult.progress;
|
||||||
|
if (p.status === 'processing') {
|
||||||
|
const percent = (p.current / p.total) * 100;
|
||||||
|
progressUI?.updateProgress(percent, p.recipe_name, `${loadingMessage} (${p.current}/${p.total})`);
|
||||||
|
} else if (p.status === 'completed') {
|
||||||
|
isComplete = true;
|
||||||
|
progressUI?.complete(translate(
|
||||||
|
'globalContextMenu.repairRecipes.success',
|
||||||
|
{ count: p.repaired },
|
||||||
|
`Repaired ${p.repaired} recipes.`
|
||||||
|
));
|
||||||
|
showToast('globalContextMenu.repairRecipes.success', { count: p.repaired }, 'success');
|
||||||
|
// Refresh recipes page if active
|
||||||
|
if (window.recipesPage) {
|
||||||
|
window.recipesPage.refresh();
|
||||||
|
}
|
||||||
|
} else if (p.status === 'error') {
|
||||||
|
throw new Error(p.error || 'Repair failed');
|
||||||
|
}
|
||||||
|
} else if (progressResponse.status === 404) {
|
||||||
|
// Progress might have finished quickly and been cleaned up
|
||||||
|
isComplete = true;
|
||||||
|
progressUI?.complete();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!isComplete) {
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 1000));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Recipe repair failed:', error);
|
||||||
|
progressUI?.complete(translate('globalContextMenu.repairRecipes.error', { message: error.message }, 'Repair failed: {message}'));
|
||||||
|
showToast('globalContextMenu.repairRecipes.error', { message: error.message }, 'error');
|
||||||
|
} finally {
|
||||||
|
this._repairInProgress = false;
|
||||||
|
menuItem?.classList.remove('disabled');
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { showToast, copyToClipboard, sendLoraToWorkflow } from '../../utils/uiHe
|
|||||||
import { setSessionItem, removeSessionItem } from '../../utils/storageHelpers.js';
|
import { setSessionItem, removeSessionItem } from '../../utils/storageHelpers.js';
|
||||||
import { updateRecipeMetadata } from '../../api/recipeApi.js';
|
import { updateRecipeMetadata } from '../../api/recipeApi.js';
|
||||||
import { state } from '../../state/index.js';
|
import { state } from '../../state/index.js';
|
||||||
|
import { moveManager } from '../../managers/MoveManager.js';
|
||||||
|
|
||||||
export class RecipeContextMenu extends BaseContextMenu {
|
export class RecipeContextMenu extends BaseContextMenu {
|
||||||
constructor() {
|
constructor() {
|
||||||
@@ -56,7 +57,7 @@ export class RecipeContextMenu extends BaseContextMenu {
|
|||||||
// Handle recipe-specific actions
|
// Handle recipe-specific actions
|
||||||
const recipeId = this.currentCard.dataset.id;
|
const recipeId = this.currentCard.dataset.id;
|
||||||
|
|
||||||
switch(action) {
|
switch (action) {
|
||||||
case 'details':
|
case 'details':
|
||||||
// Show recipe details
|
// Show recipe details
|
||||||
this.currentCard.click();
|
this.currentCard.click();
|
||||||
@@ -77,6 +78,9 @@ export class RecipeContextMenu extends BaseContextMenu {
|
|||||||
// Share recipe
|
// Share recipe
|
||||||
this.currentCard.querySelector('.fa-share-alt')?.click();
|
this.currentCard.querySelector('.fa-share-alt')?.click();
|
||||||
break;
|
break;
|
||||||
|
case 'move':
|
||||||
|
moveManager.showMoveModal(this.currentCard.dataset.filepath);
|
||||||
|
break;
|
||||||
case 'delete':
|
case 'delete':
|
||||||
// Delete recipe
|
// Delete recipe
|
||||||
this.currentCard.querySelector('.fa-trash')?.click();
|
this.currentCard.querySelector('.fa-trash')?.click();
|
||||||
@@ -89,6 +93,10 @@ export class RecipeContextMenu extends BaseContextMenu {
|
|||||||
// Download missing LoRAs
|
// Download missing LoRAs
|
||||||
this.downloadMissingLoRAs(recipeId);
|
this.downloadMissingLoRAs(recipeId);
|
||||||
break;
|
break;
|
||||||
|
case 'repair':
|
||||||
|
// Repair recipe metadata
|
||||||
|
this.repairRecipe(recipeId);
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -279,6 +287,38 @@ export class RecipeContextMenu extends BaseContextMenu {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Repair recipe metadata
|
||||||
|
async repairRecipe(recipeId) {
|
||||||
|
if (!recipeId) {
|
||||||
|
showToast('recipes.contextMenu.repair.missingId', {}, 'error');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
showToast('recipes.contextMenu.repair.starting', {}, 'info');
|
||||||
|
|
||||||
|
const response = await fetch(`/api/lm/recipe/${recipeId}/repair`, {
|
||||||
|
method: 'POST'
|
||||||
|
});
|
||||||
|
const result = await response.json();
|
||||||
|
|
||||||
|
if (result.success) {
|
||||||
|
if (result.repaired > 0) {
|
||||||
|
showToast('recipes.contextMenu.repair.success', {}, 'success');
|
||||||
|
// Refresh the current card or reload
|
||||||
|
this.resetAndReload();
|
||||||
|
} else {
|
||||||
|
showToast('recipes.contextMenu.repair.skipped', {}, 'info');
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw new Error(result.error || 'Repair failed');
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error repairing recipe:', error);
|
||||||
|
showToast('recipes.contextMenu.repair.failed', { message: error.message }, 'error');
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Mix in shared methods from ModelContextMenuMixin
|
// Mix in shared methods from ModelContextMenuMixin
|
||||||
|
|||||||
@@ -1,8 +1,11 @@
|
|||||||
// Recipe Card Component
|
// Recipe Card Component
|
||||||
import { showToast, copyToClipboard, sendLoraToWorkflow } from '../utils/uiHelpers.js';
|
import { showToast, copyToClipboard, sendLoraToWorkflow } from '../utils/uiHelpers.js';
|
||||||
|
import { updateRecipeMetadata } from '../api/recipeApi.js';
|
||||||
|
import { configureModelCardVideo } from './shared/ModelCard.js';
|
||||||
import { modalManager } from '../managers/ModalManager.js';
|
import { modalManager } from '../managers/ModalManager.js';
|
||||||
import { getCurrentPageState } from '../state/index.js';
|
import { getCurrentPageState } from '../state/index.js';
|
||||||
import { state } from '../state/index.js';
|
import { state } from '../state/index.js';
|
||||||
|
import { bulkManager } from '../managers/BulkManager.js';
|
||||||
import { NSFW_LEVELS, getBaseModelAbbreviation } from '../utils/constants.js';
|
import { NSFW_LEVELS, getBaseModelAbbreviation } from '../utils/constants.js';
|
||||||
|
|
||||||
class RecipeCard {
|
class RecipeCard {
|
||||||
@@ -38,13 +41,28 @@ class RecipeCard {
|
|||||||
const allLorasAvailable = missingLorasCount === 0 && lorasCount > 0;
|
const allLorasAvailable = missingLorasCount === 0 && lorasCount > 0;
|
||||||
|
|
||||||
// Ensure file_url exists, fallback to file_path if needed
|
// Ensure file_url exists, fallback to file_path if needed
|
||||||
const imageUrl = this.recipe.file_url ||
|
const previewUrl = this.recipe.file_url ||
|
||||||
(this.recipe.file_path ? `/loras_static/root1/preview/${this.recipe.file_path.split('/').pop()}` :
|
(this.recipe.file_path ? `/loras_static/root1/preview/${this.recipe.file_path.split('/').pop()}` :
|
||||||
'/loras_static/images/no-preview.png');
|
'/loras_static/images/no-preview.png');
|
||||||
|
|
||||||
// Check if in duplicates mode
|
const isDuplicatesMode = getCurrentPageState().duplicatesMode;
|
||||||
const pageState = getCurrentPageState();
|
const autoplayOnHover = state?.global?.settings?.autoplay_on_hover === true;
|
||||||
const isDuplicatesMode = pageState.duplicatesMode;
|
const isFavorite = this.recipe.favorite === true;
|
||||||
|
|
||||||
|
// Video preview logic
|
||||||
|
const isVideo = previewUrl.endsWith('.mp4') || previewUrl.endsWith('.webm');
|
||||||
|
const videoAttrs = [
|
||||||
|
'controls',
|
||||||
|
'muted',
|
||||||
|
'loop',
|
||||||
|
'playsinline',
|
||||||
|
'preload="none"',
|
||||||
|
`data-src="${previewUrl}"`
|
||||||
|
];
|
||||||
|
|
||||||
|
if (!autoplayOnHover) {
|
||||||
|
videoAttrs.push('data-autoplay="true"');
|
||||||
|
}
|
||||||
|
|
||||||
// NSFW blur logic - similar to LoraCard
|
// NSFW blur logic - similar to LoraCard
|
||||||
const nsfwLevel = this.recipe.preview_nsfw_level !== undefined ? this.recipe.preview_nsfw_level : 0;
|
const nsfwLevel = this.recipe.preview_nsfw_level !== undefined ? this.recipe.preview_nsfw_level : 0;
|
||||||
@@ -66,7 +84,10 @@ class RecipeCard {
|
|||||||
|
|
||||||
card.innerHTML = `
|
card.innerHTML = `
|
||||||
<div class="card-preview ${shouldBlur ? 'blurred' : ''}">
|
<div class="card-preview ${shouldBlur ? 'blurred' : ''}">
|
||||||
<img src="${imageUrl}" alt="${this.recipe.title}">
|
${isVideo ?
|
||||||
|
`<video ${videoAttrs.join(' ')} style="pointer-events: none;"></video>` :
|
||||||
|
`<img src="${previewUrl}" alt="${this.recipe.title}">`
|
||||||
|
}
|
||||||
${!isDuplicatesMode ? `
|
${!isDuplicatesMode ? `
|
||||||
<div class="card-header">
|
<div class="card-header">
|
||||||
${shouldBlur ?
|
${shouldBlur ?
|
||||||
@@ -75,6 +96,7 @@ class RecipeCard {
|
|||||||
</button>` : ''}
|
</button>` : ''}
|
||||||
<span class="base-model-label ${shouldBlur ? 'with-toggle' : ''}" title="${baseModelLabel}">${baseModelDisplay}</span>
|
<span class="base-model-label ${shouldBlur ? 'with-toggle' : ''}" title="${baseModelLabel}">${baseModelDisplay}</span>
|
||||||
<div class="card-actions">
|
<div class="card-actions">
|
||||||
|
<i class="${isFavorite ? 'fas fa-star favorite-active' : 'far fa-star'}" title="${isFavorite ? 'Remove from Favorites' : 'Add to Favorites'}"></i>
|
||||||
<i class="fas fa-share-alt" title="Share Recipe"></i>
|
<i class="fas fa-share-alt" title="Share Recipe"></i>
|
||||||
<i class="fas fa-paper-plane" title="Send Recipe to Workflow (Click: Append, Shift+Click: Replace)"></i>
|
<i class="fas fa-paper-plane" title="Send Recipe to Workflow (Click: Append, Shift+Click: Replace)"></i>
|
||||||
<i class="fas fa-trash" title="Delete Recipe"></i>
|
<i class="fas fa-trash" title="Delete Recipe"></i>
|
||||||
@@ -104,6 +126,13 @@ class RecipeCard {
|
|||||||
`;
|
`;
|
||||||
|
|
||||||
this.attachEventListeners(card, isDuplicatesMode, shouldBlur);
|
this.attachEventListeners(card, isDuplicatesMode, shouldBlur);
|
||||||
|
|
||||||
|
// Add video auto-play on hover functionality if needed
|
||||||
|
const videoElement = card.querySelector('video');
|
||||||
|
if (videoElement) {
|
||||||
|
configureModelCardVideo(videoElement, autoplayOnHover);
|
||||||
|
}
|
||||||
|
|
||||||
return card;
|
return card;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -113,6 +142,67 @@ class RecipeCard {
|
|||||||
return `${missingCount} of ${totalCount} LoRAs missing`;
|
return `${missingCount} of ${totalCount} LoRAs missing`;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async toggleFavorite(card) {
|
||||||
|
// Find the latest star icon in case the card was re-rendered
|
||||||
|
const getStarIcon = (c) => c.querySelector('.fa-star');
|
||||||
|
let starIcon = getStarIcon(card);
|
||||||
|
|
||||||
|
const isFavorite = this.recipe.favorite || false;
|
||||||
|
const newFavoriteState = !isFavorite;
|
||||||
|
|
||||||
|
// Update early to provide instant feedback and avoid race conditions with re-renders
|
||||||
|
this.recipe.favorite = newFavoriteState;
|
||||||
|
|
||||||
|
// Function to update icon state
|
||||||
|
const updateIconUI = (icon, state) => {
|
||||||
|
if (!icon) return;
|
||||||
|
if (state) {
|
||||||
|
icon.classList.remove('far');
|
||||||
|
icon.classList.add('fas', 'favorite-active');
|
||||||
|
icon.title = 'Remove from Favorites';
|
||||||
|
} else {
|
||||||
|
icon.classList.remove('fas', 'favorite-active');
|
||||||
|
icon.classList.add('far');
|
||||||
|
icon.title = 'Add to Favorites';
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Update current icon immediately
|
||||||
|
updateIconUI(starIcon, newFavoriteState);
|
||||||
|
|
||||||
|
try {
|
||||||
|
await updateRecipeMetadata(this.recipe.file_path, {
|
||||||
|
favorite: newFavoriteState
|
||||||
|
});
|
||||||
|
|
||||||
|
// Status already updated, just show toast
|
||||||
|
if (newFavoriteState) {
|
||||||
|
showToast('modelCard.favorites.added', {}, 'success');
|
||||||
|
} else {
|
||||||
|
showToast('modelCard.favorites.removed', {}, 'success');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Re-find star icon after API call as VirtualScroller might have replaced the element
|
||||||
|
// During updateRecipeMetadata, VirtualScroller.updateSingleItem might have re-rendered the card
|
||||||
|
// We need to find the NEW element in the DOM to ensure we don't have a stale reference
|
||||||
|
// Though typically VirtualScroller handles the re-render with the NEW this.recipe.favorite
|
||||||
|
// we will check the DOM just to be sure if this instance's internal card is still what's in DOM
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to update favorite status:', error);
|
||||||
|
// Revert local state on error
|
||||||
|
this.recipe.favorite = isFavorite;
|
||||||
|
|
||||||
|
// Re-find star icon in case of re-render during fault
|
||||||
|
const currentCard = card.ownerDocument.evaluate(
|
||||||
|
`.//*[@data-filepath="${this.recipe.file_path}"]`,
|
||||||
|
card.ownerDocument, null, XPathResult.FIRST_ORDERED_NODE_TYPE, null
|
||||||
|
).singleNodeValue || card;
|
||||||
|
|
||||||
|
updateIconUI(getStarIcon(currentCard), isFavorite);
|
||||||
|
showToast('modelCard.favorites.updateFailed', {}, 'error');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
attachEventListeners(card, isDuplicatesMode, shouldBlur) {
|
attachEventListeners(card, isDuplicatesMode, shouldBlur) {
|
||||||
// Add blur toggle functionality if content should be blurred
|
// Add blur toggle functionality if content should be blurred
|
||||||
if (shouldBlur) {
|
if (shouldBlur) {
|
||||||
@@ -137,9 +227,19 @@ class RecipeCard {
|
|||||||
// Recipe card click event - only attach if not in duplicates mode
|
// Recipe card click event - only attach if not in duplicates mode
|
||||||
if (!isDuplicatesMode) {
|
if (!isDuplicatesMode) {
|
||||||
card.addEventListener('click', () => {
|
card.addEventListener('click', () => {
|
||||||
|
if (state.bulkMode) {
|
||||||
|
bulkManager.toggleCardSelection(card);
|
||||||
|
return;
|
||||||
|
}
|
||||||
this.clickHandler(this.recipe);
|
this.clickHandler(this.recipe);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Favorite button click event - prevent propagation to card
|
||||||
|
card.querySelector('.fa-star')?.addEventListener('click', (e) => {
|
||||||
|
e.stopPropagation();
|
||||||
|
this.toggleFavorite(card);
|
||||||
|
});
|
||||||
|
|
||||||
// Share button click event - prevent propagation to card
|
// Share button click event - prevent propagation to card
|
||||||
card.querySelector('.fa-share-alt')?.addEventListener('click', (e) => {
|
card.querySelector('.fa-share-alt')?.addEventListener('click', (e) => {
|
||||||
e.stopPropagation();
|
e.stopPropagation();
|
||||||
@@ -235,13 +335,19 @@ class RecipeCard {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Create delete modal content
|
// Create delete modal content
|
||||||
|
const previewUrl = this.recipe.file_url || '/loras_static/images/no-preview.png';
|
||||||
|
const isVideo = previewUrl.endsWith('.mp4') || previewUrl.endsWith('.webm');
|
||||||
|
|
||||||
const deleteModalContent = `
|
const deleteModalContent = `
|
||||||
<div class="modal-content delete-modal-content">
|
<div class="modal-content delete-modal-content">
|
||||||
<h2>Delete Recipe</h2>
|
<h2>Delete Recipe</h2>
|
||||||
<p class="delete-message">Are you sure you want to delete this recipe?</p>
|
<p class="delete-message">Are you sure you want to delete this recipe?</p>
|
||||||
<div class="delete-model-info">
|
<div class="delete-model-info">
|
||||||
<div class="delete-preview">
|
<div class="delete-preview">
|
||||||
<img src="${this.recipe.file_url || '/loras_static/images/no-preview.png'}" alt="${this.recipe.title}">
|
${isVideo ?
|
||||||
|
`<video src="${previewUrl}" controls muted loop playsinline style="max-width: 100%;"></video>` :
|
||||||
|
`<img src="${previewUrl}" alt="${this.recipe.title}">`
|
||||||
|
}
|
||||||
</div>
|
</div>
|
||||||
<div class="delete-info">
|
<div class="delete-info">
|
||||||
<h3>${this.recipe.title}</h3>
|
<h3>${this.recipe.title}</h3>
|
||||||
|
|||||||
@@ -77,7 +77,9 @@ export class SidebarManager {
|
|||||||
this.pageControls = pageControls;
|
this.pageControls = pageControls;
|
||||||
this.pageType = pageControls.pageType;
|
this.pageType = pageControls.pageType;
|
||||||
this.lastPageControls = pageControls;
|
this.lastPageControls = pageControls;
|
||||||
this.apiClient = getModelApiClient();
|
this.apiClient = pageControls?.getSidebarApiClient?.()
|
||||||
|
|| pageControls?.sidebarApiClient
|
||||||
|
|| getModelApiClient();
|
||||||
|
|
||||||
// Set initial sidebar state immediately (hidden by default)
|
// Set initial sidebar state immediately (hidden by default)
|
||||||
this.setInitialSidebarState();
|
this.setInitialSidebarState();
|
||||||
@@ -205,6 +207,10 @@ export class SidebarManager {
|
|||||||
}
|
}
|
||||||
|
|
||||||
initializeDragAndDrop() {
|
initializeDragAndDrop() {
|
||||||
|
if (this.apiClient?.apiConfig?.config?.supportsMove === false) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if (!this.dragHandlersInitialized) {
|
if (!this.dragHandlersInitialized) {
|
||||||
document.addEventListener('dragstart', this.handleCardDragStart);
|
document.addEventListener('dragstart', this.handleCardDragStart);
|
||||||
document.addEventListener('dragend', this.handleCardDragEnd);
|
document.addEventListener('dragend', this.handleCardDragEnd);
|
||||||
@@ -416,7 +422,14 @@ export class SidebarManager {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (!this.apiClient) {
|
if (!this.apiClient) {
|
||||||
this.apiClient = getModelApiClient();
|
this.apiClient = this.pageControls?.getSidebarApiClient?.()
|
||||||
|
|| this.pageControls?.sidebarApiClient
|
||||||
|
|| getModelApiClient();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.apiClient?.apiConfig?.config?.supportsMove === false) {
|
||||||
|
showToast('toast.models.moveFailed', { message: translate('sidebar.dragDrop.moveUnsupported', {}, 'Move not supported for this page') }, 'error');
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
const rootPath = this.draggedRootPath ? this.draggedRootPath.replace(/\\/g, '/') : '';
|
const rootPath = this.draggedRootPath ? this.draggedRootPath.replace(/\\/g, '/') : '';
|
||||||
@@ -470,7 +483,9 @@ export class SidebarManager {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async init() {
|
async init() {
|
||||||
this.apiClient = getModelApiClient();
|
this.apiClient = this.pageControls?.getSidebarApiClient?.()
|
||||||
|
|| this.pageControls?.sidebarApiClient
|
||||||
|
|| getModelApiClient();
|
||||||
|
|
||||||
// Set initial sidebar state immediately (hidden by default)
|
// Set initial sidebar state immediately (hidden by default)
|
||||||
this.setInitialSidebarState();
|
this.setInitialSidebarState();
|
||||||
@@ -1010,8 +1025,11 @@ export class SidebarManager {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async selectFolder(path) {
|
async selectFolder(path) {
|
||||||
|
// Normalize path: null or undefined means root
|
||||||
|
const normalizedPath = (path === null || path === undefined) ? '' : path;
|
||||||
|
|
||||||
// Update selected path
|
// Update selected path
|
||||||
this.selectedPath = path;
|
this.selectedPath = normalizedPath;
|
||||||
|
|
||||||
// Update UI
|
// Update UI
|
||||||
this.updateTreeSelection();
|
this.updateTreeSelection();
|
||||||
@@ -1019,8 +1037,8 @@ export class SidebarManager {
|
|||||||
this.updateSidebarHeader();
|
this.updateSidebarHeader();
|
||||||
|
|
||||||
// Update page state
|
// Update page state
|
||||||
this.pageControls.pageState.activeFolder = path;
|
this.pageControls.pageState.activeFolder = normalizedPath;
|
||||||
setStorageItem(`${this.pageType}_activeFolder`, path);
|
setStorageItem(`${this.pageType}_activeFolder`, normalizedPath);
|
||||||
|
|
||||||
// Reload models with new filter
|
// Reload models with new filter
|
||||||
await this.pageControls.resetAndReload();
|
await this.pageControls.resetAndReload();
|
||||||
@@ -1143,7 +1161,7 @@ export class SidebarManager {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Add selection to current path
|
// Add selection to current path
|
||||||
if (this.selectedPath !== null) {
|
if (this.selectedPath !== null && this.selectedPath !== undefined) {
|
||||||
const selectedItem = folderTree.querySelector(`[data-path="${this.selectedPath}"]`);
|
const selectedItem = folderTree.querySelector(`[data-path="${this.selectedPath}"]`);
|
||||||
if (selectedItem) {
|
if (selectedItem) {
|
||||||
selectedItem.classList.add('selected');
|
selectedItem.classList.add('selected');
|
||||||
@@ -1154,7 +1172,7 @@ export class SidebarManager {
|
|||||||
node.classList.remove('selected');
|
node.classList.remove('selected');
|
||||||
});
|
});
|
||||||
|
|
||||||
if (this.selectedPath) {
|
if (this.selectedPath !== null && this.selectedPath !== undefined) {
|
||||||
const selectedNode = folderTree.querySelector(`[data-path="${this.selectedPath}"] .sidebar-tree-node-content`);
|
const selectedNode = folderTree.querySelector(`[data-path="${this.selectedPath}"] .sidebar-tree-node-content`);
|
||||||
if (selectedNode) {
|
if (selectedNode) {
|
||||||
selectedNode.classList.add('selected');
|
selectedNode.classList.add('selected');
|
||||||
@@ -1225,9 +1243,10 @@ export class SidebarManager {
|
|||||||
|
|
||||||
// Start with root breadcrumb
|
// Start with root breadcrumb
|
||||||
const rootSiblings = Object.keys(this.treeData);
|
const rootSiblings = Object.keys(this.treeData);
|
||||||
|
const isRootSelected = !this.selectedPath;
|
||||||
const breadcrumbs = [`
|
const breadcrumbs = [`
|
||||||
<div class="breadcrumb-dropdown">
|
<div class="breadcrumb-dropdown">
|
||||||
<span class="sidebar-breadcrumb-item ${this.selectedPath == null ? 'active' : ''}" data-path="">
|
<span class="sidebar-breadcrumb-item ${isRootSelected ? 'active' : ''}" data-path="">
|
||||||
<i class="fas fa-home"></i> ${this.apiClient.apiConfig.config.displayName} root
|
<i class="fas fa-home"></i> ${this.apiClient.apiConfig.config.displayName} root
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
@@ -1323,7 +1342,7 @@ export class SidebarManager {
|
|||||||
const sidebarHeader = document.getElementById('sidebarHeader');
|
const sidebarHeader = document.getElementById('sidebarHeader');
|
||||||
if (!sidebarHeader) return;
|
if (!sidebarHeader) return;
|
||||||
|
|
||||||
if (this.selectedPath == null) {
|
if (!this.selectedPath) {
|
||||||
sidebarHeader.classList.add('root-selected');
|
sidebarHeader.classList.add('root-selected');
|
||||||
} else {
|
} else {
|
||||||
sidebarHeader.classList.remove('root-selected');
|
sidebarHeader.classList.remove('root-selected');
|
||||||
|
|||||||
@@ -506,7 +506,7 @@ export function createModelCard(model, modelType) {
|
|||||||
|
|
||||||
// Check if autoplayOnHover is enabled for video previews
|
// Check if autoplayOnHover is enabled for video previews
|
||||||
const autoplayOnHover = state.global?.settings?.autoplay_on_hover || false;
|
const autoplayOnHover = state.global?.settings?.autoplay_on_hover || false;
|
||||||
const isVideo = previewUrl.endsWith('.mp4');
|
const isVideo = previewUrl.endsWith('.mp4') || previewUrl.endsWith('.webm');
|
||||||
const videoAttrs = [
|
const videoAttrs = [
|
||||||
'controls',
|
'controls',
|
||||||
'muted',
|
'muted',
|
||||||
@@ -765,7 +765,7 @@ function cleanupHoverHandlers(videoElement) {
|
|||||||
function requestSafePlay(videoElement) {
|
function requestSafePlay(videoElement) {
|
||||||
const playPromise = videoElement.play();
|
const playPromise = videoElement.play();
|
||||||
if (playPromise && typeof playPromise.catch === 'function') {
|
if (playPromise && typeof playPromise.catch === 'function') {
|
||||||
playPromise.catch(() => {});
|
playPromise.catch(() => { });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -313,7 +313,7 @@ export async function showModelModal(model, modelType) {
|
|||||||
|
|
||||||
// Prepare LoRA specific data with complete civitai data
|
// Prepare LoRA specific data with complete civitai data
|
||||||
const escapedWords = (modelType === 'loras' || modelType === 'embeddings') && modelWithFullData.civitai?.trainedWords?.length ?
|
const escapedWords = (modelType === 'loras' || modelType === 'embeddings') && modelWithFullData.civitai?.trainedWords?.length ?
|
||||||
modelWithFullData.civitai.trainedWords.map(word => word.replace(/'/g, '\\\'')) : [];
|
modelWithFullData.civitai.trainedWords : [];
|
||||||
|
|
||||||
// Generate model type specific content
|
// Generate model type specific content
|
||||||
let typeSpecificContent;
|
let typeSpecificContent;
|
||||||
@@ -597,7 +597,7 @@ export async function showModelModal(model, modelType) {
|
|||||||
|
|
||||||
let showcaseCleanup;
|
let showcaseCleanup;
|
||||||
|
|
||||||
const onCloseCallback = function() {
|
const onCloseCallback = function () {
|
||||||
// Clean up all handlers when modal closes for LoRA
|
// Clean up all handlers when modal closes for LoRA
|
||||||
const modalElement = document.getElementById(modalId);
|
const modalElement = document.getElementById(modalId);
|
||||||
if (modalElement && modalElement._clickHandler) {
|
if (modalElement && modalElement._clickHandler) {
|
||||||
@@ -765,13 +765,13 @@ function setupEditableFields(filePath, modelType) {
|
|||||||
const editableFields = document.querySelectorAll('.editable-field [contenteditable]');
|
const editableFields = document.querySelectorAll('.editable-field [contenteditable]');
|
||||||
|
|
||||||
editableFields.forEach(field => {
|
editableFields.forEach(field => {
|
||||||
field.addEventListener('focus', function() {
|
field.addEventListener('focus', function () {
|
||||||
if (this.textContent === 'Add your notes here...') {
|
if (this.textContent === 'Add your notes here...') {
|
||||||
this.textContent = '';
|
this.textContent = '';
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
field.addEventListener('blur', function() {
|
field.addEventListener('blur', function () {
|
||||||
if (this.textContent.trim() === '') {
|
if (this.textContent.trim() === '') {
|
||||||
if (this.classList.contains('notes-content')) {
|
if (this.classList.contains('notes-content')) {
|
||||||
this.textContent = 'Add your notes here...';
|
this.textContent = 'Add your notes here...';
|
||||||
@@ -783,7 +783,7 @@ function setupEditableFields(filePath, modelType) {
|
|||||||
// Add keydown event listeners for notes
|
// Add keydown event listeners for notes
|
||||||
const notesContent = document.querySelector('.notes-content');
|
const notesContent = document.querySelector('.notes-content');
|
||||||
if (notesContent) {
|
if (notesContent) {
|
||||||
notesContent.addEventListener('keydown', async function(e) {
|
notesContent.addEventListener('keydown', async function (e) {
|
||||||
if (e.key === 'Enter') {
|
if (e.key === 'Enter') {
|
||||||
if (e.shiftKey) {
|
if (e.shiftKey) {
|
||||||
// Allow shift+enter for new line
|
// Allow shift+enter for new line
|
||||||
@@ -810,7 +810,7 @@ function setupLoraSpecificFields(filePath) {
|
|||||||
|
|
||||||
if (!presetSelector || !presetValue || !addPresetBtn || !presetTags) return;
|
if (!presetSelector || !presetValue || !addPresetBtn || !presetTags) return;
|
||||||
|
|
||||||
presetSelector.addEventListener('change', function() {
|
presetSelector.addEventListener('change', function () {
|
||||||
const selected = this.value;
|
const selected = this.value;
|
||||||
if (selected) {
|
if (selected) {
|
||||||
presetValue.style.display = 'inline-block';
|
presetValue.style.display = 'inline-block';
|
||||||
@@ -828,7 +828,7 @@ function setupLoraSpecificFields(filePath) {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
addPresetBtn.addEventListener('click', async function() {
|
addPresetBtn.addEventListener('click', async function () {
|
||||||
const key = presetSelector.value;
|
const key = presetSelector.value;
|
||||||
const value = presetValue.value;
|
const value = presetValue.value;
|
||||||
|
|
||||||
@@ -853,7 +853,7 @@ function setupLoraSpecificFields(filePath) {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Add keydown event for preset value
|
// Add keydown event for preset value
|
||||||
presetValue.addEventListener('keydown', function(e) {
|
presetValue.addEventListener('keydown', function (e) {
|
||||||
if (e.key === 'Enter') {
|
if (e.key === 'Enter') {
|
||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
addPresetBtn.click();
|
addPresetBtn.click();
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
import { showToast, copyToClipboard } from '../../utils/uiHelpers.js';
|
import { showToast, copyToClipboard } from '../../utils/uiHelpers.js';
|
||||||
import { translate } from '../../utils/i18nHelpers.js';
|
import { translate } from '../../utils/i18nHelpers.js';
|
||||||
import { getModelApiClient } from '../../api/modelApiFactory.js';
|
import { getModelApiClient } from '../../api/modelApiFactory.js';
|
||||||
import { escapeAttribute } from './utils.js';
|
import { escapeAttribute, escapeHtml } from './utils.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Fetch trained words for a model
|
* Fetch trained words for a model
|
||||||
@@ -80,8 +80,10 @@ function createSuggestionDropdown(trainedWords, classTokens, existingWords = [])
|
|||||||
const tokenItem = document.createElement('div');
|
const tokenItem = document.createElement('div');
|
||||||
tokenItem.className = `metadata-suggestion-item class-token-item ${existingWords.includes(classTokens) ? 'already-added' : ''}`;
|
tokenItem.className = `metadata-suggestion-item class-token-item ${existingWords.includes(classTokens) ? 'already-added' : ''}`;
|
||||||
tokenItem.title = `${translate('modals.model.triggerWords.suggestions.classToken')}: ${classTokens}`;
|
tokenItem.title = `${translate('modals.model.triggerWords.suggestions.classToken')}: ${classTokens}`;
|
||||||
|
|
||||||
|
const escapedToken = escapeHtml(classTokens);
|
||||||
tokenItem.innerHTML = `
|
tokenItem.innerHTML = `
|
||||||
<span class="metadata-suggestion-text">${classTokens}</span>
|
<span class="metadata-suggestion-text">${escapedToken}</span>
|
||||||
<div class="metadata-suggestion-meta">
|
<div class="metadata-suggestion-meta">
|
||||||
<span class="token-badge">${translate('modals.model.triggerWords.suggestions.classToken')}</span>
|
<span class="token-badge">${translate('modals.model.triggerWords.suggestions.classToken')}</span>
|
||||||
${existingWords.includes(classTokens) ?
|
${existingWords.includes(classTokens) ?
|
||||||
@@ -137,8 +139,10 @@ function createSuggestionDropdown(trainedWords, classTokens, existingWords = [])
|
|||||||
const item = document.createElement('div');
|
const item = document.createElement('div');
|
||||||
item.className = `metadata-suggestion-item ${isAdded ? 'already-added' : ''}`;
|
item.className = `metadata-suggestion-item ${isAdded ? 'already-added' : ''}`;
|
||||||
item.title = word; // Show full word on hover if truncated
|
item.title = word; // Show full word on hover if truncated
|
||||||
|
|
||||||
|
const escapedWord = escapeHtml(word);
|
||||||
item.innerHTML = `
|
item.innerHTML = `
|
||||||
<span class="metadata-suggestion-text">${word}</span>
|
<span class="metadata-suggestion-text">${escapedWord}</span>
|
||||||
<div class="metadata-suggestion-meta">
|
<div class="metadata-suggestion-meta">
|
||||||
<span class="trained-word-freq">${frequency}</span>
|
<span class="trained-word-freq">${frequency}</span>
|
||||||
${isAdded ? `<span class="added-indicator"><i class="fas fa-check"></i></span>` : ''}
|
${isAdded ? `<span class="added-indicator"><i class="fas fa-check"></i></span>` : ''}
|
||||||
@@ -215,9 +219,12 @@ export function renderTriggerWords(words, filePath) {
|
|||||||
</div>
|
</div>
|
||||||
<div class="trigger-words-content">
|
<div class="trigger-words-content">
|
||||||
<div class="trigger-words-tags">
|
<div class="trigger-words-tags">
|
||||||
${words.map(word => `
|
${words.map(word => {
|
||||||
<div class="trigger-word-tag" data-word="${word}" onclick="copyTriggerWord('${word}')" title="${translate('modals.model.triggerWords.copyWord')}">
|
const escapedWord = escapeHtml(word);
|
||||||
<span class="trigger-word-content">${word}</span>
|
const escapedAttr = escapeAttribute(word);
|
||||||
|
return `
|
||||||
|
<div class="trigger-word-tag" data-word="${escapedAttr}" onclick="copyTriggerWord(this.dataset.word)" title="${translate('modals.model.triggerWords.copyWord')}">
|
||||||
|
<span class="trigger-word-content">${escapedWord}</span>
|
||||||
<span class="trigger-word-copy">
|
<span class="trigger-word-copy">
|
||||||
<i class="fas fa-copy"></i>
|
<i class="fas fa-copy"></i>
|
||||||
</span>
|
</span>
|
||||||
@@ -225,7 +232,7 @@ export function renderTriggerWords(words, filePath) {
|
|||||||
<i class="fas fa-times"></i>
|
<i class="fas fa-times"></i>
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
`).join('')}
|
`}).join('')}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="metadata-edit-controls" style="display:none;">
|
<div class="metadata-edit-controls" style="display:none;">
|
||||||
@@ -254,7 +261,7 @@ export function setupTriggerWordsEditMode() {
|
|||||||
const editBtn = document.querySelector('.edit-trigger-words-btn');
|
const editBtn = document.querySelector('.edit-trigger-words-btn');
|
||||||
if (!editBtn) return;
|
if (!editBtn) return;
|
||||||
|
|
||||||
editBtn.addEventListener('click', async function() {
|
editBtn.addEventListener('click', async function () {
|
||||||
const triggerWordsSection = this.closest('.trigger-words');
|
const triggerWordsSection = this.closest('.trigger-words');
|
||||||
const isEditMode = triggerWordsSection.classList.toggle('edit-mode');
|
const isEditMode = triggerWordsSection.classList.toggle('edit-mode');
|
||||||
const filePath = this.dataset.filePath;
|
const filePath = this.dataset.filePath;
|
||||||
@@ -368,7 +375,7 @@ export function setupTriggerWordsEditMode() {
|
|||||||
|
|
||||||
if (triggerWordInput) {
|
if (triggerWordInput) {
|
||||||
// Add keydown event to input
|
// Add keydown event to input
|
||||||
triggerWordInput.addEventListener('keydown', function(e) {
|
triggerWordInput.addEventListener('keydown', function (e) {
|
||||||
if (e.key === 'Enter') {
|
if (e.key === 'Enter') {
|
||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
addNewTriggerWord(this.value);
|
addNewTriggerWord(this.value);
|
||||||
@@ -417,7 +424,7 @@ function resetTriggerWordsUIState(section) {
|
|||||||
const deleteBtn = tag.querySelector('.metadata-delete-btn');
|
const deleteBtn = tag.querySelector('.metadata-delete-btn');
|
||||||
|
|
||||||
// Restore click-to-copy functionality
|
// Restore click-to-copy functionality
|
||||||
tag.onclick = () => copyTriggerWord(word);
|
tag.onclick = () => copyTriggerWord(tag.dataset.word);
|
||||||
|
|
||||||
// Show copy icon, hide delete button
|
// Show copy icon, hide delete button
|
||||||
if (copyIcon) copyIcon.style.display = '';
|
if (copyIcon) copyIcon.style.display = '';
|
||||||
@@ -454,9 +461,11 @@ function restoreOriginalTriggerWords(section, originalWords) {
|
|||||||
const tag = document.createElement('div');
|
const tag = document.createElement('div');
|
||||||
tag.className = 'trigger-word-tag';
|
tag.className = 'trigger-word-tag';
|
||||||
tag.dataset.word = word;
|
tag.dataset.word = word;
|
||||||
tag.onclick = () => copyTriggerWord(word);
|
tag.onclick = () => copyTriggerWord(tag.dataset.word);
|
||||||
|
|
||||||
|
const escapedWord = escapeHtml(word);
|
||||||
tag.innerHTML = `
|
tag.innerHTML = `
|
||||||
<span class="trigger-word-content">${word}</span>
|
<span class="trigger-word-content">${escapedWord}</span>
|
||||||
<span class="trigger-word-copy">
|
<span class="trigger-word-copy">
|
||||||
<i class="fas fa-copy"></i>
|
<i class="fas fa-copy"></i>
|
||||||
</span>
|
</span>
|
||||||
@@ -524,8 +533,10 @@ function addNewTriggerWord(word) {
|
|||||||
const newTag = document.createElement('div');
|
const newTag = document.createElement('div');
|
||||||
newTag.className = 'trigger-word-tag';
|
newTag.className = 'trigger-word-tag';
|
||||||
newTag.dataset.word = word;
|
newTag.dataset.word = word;
|
||||||
|
|
||||||
|
const escapedWord = escapeHtml(word);
|
||||||
newTag.innerHTML = `
|
newTag.innerHTML = `
|
||||||
<span class="trigger-word-content">${word}</span>
|
<span class="trigger-word-content">${escapedWord}</span>
|
||||||
<span class="trigger-word-copy" style="display:none;">
|
<span class="trigger-word-copy" style="display:none;">
|
||||||
<i class="fas fa-copy"></i>
|
<i class="fas fa-copy"></i>
|
||||||
</span>
|
</span>
|
||||||
@@ -642,7 +653,7 @@ async function saveTriggerWords() {
|
|||||||
* Copy a trigger word to clipboard
|
* Copy a trigger word to clipboard
|
||||||
* @param {string} word - Word to copy
|
* @param {string} word - Word to copy
|
||||||
*/
|
*/
|
||||||
window.copyTriggerWord = async function(word) {
|
window.copyTriggerWord = async function (word) {
|
||||||
try {
|
try {
|
||||||
await copyToClipboard(word, 'Trigger word copied');
|
await copyToClipboard(word, 'Trigger word copied');
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
|||||||
@@ -60,14 +60,12 @@ export class AppCore {
|
|||||||
initTheme();
|
initTheme();
|
||||||
initBackToTop();
|
initBackToTop();
|
||||||
|
|
||||||
// Initialize the bulk manager and context menu only if not on recipes page
|
// Initialize the bulk manager and context menu
|
||||||
if (state.currentPageType !== 'recipes') {
|
|
||||||
bulkManager.initialize();
|
bulkManager.initialize();
|
||||||
|
|
||||||
// Initialize bulk context menu
|
// Initialize bulk context menu
|
||||||
const bulkContextMenu = new BulkContextMenu();
|
const bulkContextMenu = new BulkContextMenu();
|
||||||
bulkManager.setBulkContextMenu(bulkContextMenu);
|
bulkManager.setBulkContextMenu(bulkContextMenu);
|
||||||
}
|
|
||||||
|
|
||||||
// Initialize the example images manager
|
// Initialize the example images manager
|
||||||
exampleImagesManager.initialize();
|
exampleImagesManager.initialize();
|
||||||
@@ -84,10 +82,7 @@ export class AppCore {
|
|||||||
|
|
||||||
// Start onboarding if needed (after everything is initialized)
|
// Start onboarding if needed (after everything is initialized)
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
// Do not show onboarding if version-mismatch banner is visible
|
|
||||||
if (!bannerService.isBannerVisible('version-mismatch')) {
|
|
||||||
onboardingManager.start();
|
onboardingManager.start();
|
||||||
}
|
|
||||||
}, 1000); // Small delay to ensure all elements are rendered
|
}, 1000); // Small delay to ensure all elements are rendered
|
||||||
|
|
||||||
// Return the core instance for chaining
|
// Return the core instance for chaining
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ const AFDIAN_URL = 'https://afdian.com/a/pixelpawsai';
|
|||||||
const BANNER_HISTORY_KEY = 'banner_history';
|
const BANNER_HISTORY_KEY = 'banner_history';
|
||||||
const BANNER_HISTORY_VIEWED_AT_KEY = 'banner_history_viewed_at';
|
const BANNER_HISTORY_VIEWED_AT_KEY = 'banner_history_viewed_at';
|
||||||
const BANNER_HISTORY_LIMIT = 20;
|
const BANNER_HISTORY_LIMIT = 20;
|
||||||
const HISTORY_EXCLUDED_IDS = new Set(['version-mismatch']);
|
const HISTORY_EXCLUDED_IDS = new Set([]);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Banner Service for managing notification banners
|
* Banner Service for managing notification banners
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { showToast, copyToClipboard, sendLoraToWorkflow, buildLoraSyntax, getNSF
|
|||||||
import { updateCardsForBulkMode } from '../components/shared/ModelCard.js';
|
import { updateCardsForBulkMode } from '../components/shared/ModelCard.js';
|
||||||
import { modalManager } from './ModalManager.js';
|
import { modalManager } from './ModalManager.js';
|
||||||
import { getModelApiClient, resetAndReload } from '../api/modelApiFactory.js';
|
import { getModelApiClient, resetAndReload } from '../api/modelApiFactory.js';
|
||||||
|
import { RecipeSidebarApiClient } from '../api/recipeApi.js';
|
||||||
import { MODEL_TYPES, MODEL_CONFIG } from '../api/apiConfig.js';
|
import { MODEL_TYPES, MODEL_CONFIG } from '../api/apiConfig.js';
|
||||||
import { BASE_MODEL_CATEGORIES } from '../utils/constants.js';
|
import { BASE_MODEL_CATEGORIES } from '../utils/constants.js';
|
||||||
import { getPriorityTagSuggestions } from '../utils/priorityTagHelpers.js';
|
import { getPriorityTagSuggestions } from '../utils/priorityTagHelpers.js';
|
||||||
@@ -62,9 +63,22 @@ export class BulkManager {
|
|||||||
autoOrganize: true,
|
autoOrganize: true,
|
||||||
deleteAll: true,
|
deleteAll: true,
|
||||||
setContentRating: true
|
setContentRating: true
|
||||||
|
},
|
||||||
|
recipes: {
|
||||||
|
addTags: false,
|
||||||
|
sendToWorkflow: false,
|
||||||
|
copyAll: false,
|
||||||
|
refreshAll: false,
|
||||||
|
checkUpdates: false,
|
||||||
|
moveAll: true,
|
||||||
|
autoOrganize: false,
|
||||||
|
deleteAll: true,
|
||||||
|
setContentRating: false
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
this.recipeApiClient = null;
|
||||||
|
|
||||||
window.addEventListener('lm:priority-tags-updated', () => {
|
window.addEventListener('lm:priority-tags-updated', () => {
|
||||||
const container = document.querySelector('#bulkAddTagsModal .metadata-suggestions-container');
|
const container = document.querySelector('#bulkAddTagsModal .metadata-suggestions-container');
|
||||||
if (!container) {
|
if (!container) {
|
||||||
@@ -87,9 +101,6 @@ export class BulkManager {
|
|||||||
}
|
}
|
||||||
|
|
||||||
initialize() {
|
initialize() {
|
||||||
// Do not initialize on recipes page
|
|
||||||
if (state.currentPageType === 'recipes') return;
|
|
||||||
|
|
||||||
// Register with event manager for coordinated event handling
|
// Register with event manager for coordinated event handling
|
||||||
this.registerEventHandlers();
|
this.registerEventHandlers();
|
||||||
|
|
||||||
@@ -97,6 +108,23 @@ export class BulkManager {
|
|||||||
eventManager.setState('bulkMode', state.bulkMode || false);
|
eventManager.setState('bulkMode', state.bulkMode || false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
getActiveApiClient() {
|
||||||
|
if (state.currentPageType === 'recipes') {
|
||||||
|
if (!this.recipeApiClient) {
|
||||||
|
this.recipeApiClient = new RecipeSidebarApiClient();
|
||||||
|
}
|
||||||
|
return this.recipeApiClient;
|
||||||
|
}
|
||||||
|
return getModelApiClient();
|
||||||
|
}
|
||||||
|
|
||||||
|
getCurrentDisplayConfig() {
|
||||||
|
if (state.currentPageType === 'recipes') {
|
||||||
|
return { displayName: 'Recipe' };
|
||||||
|
}
|
||||||
|
return MODEL_CONFIG[state.currentPageType] || { displayName: 'Model' };
|
||||||
|
}
|
||||||
|
|
||||||
setBulkContextMenu(bulkContextMenu) {
|
setBulkContextMenu(bulkContextMenu) {
|
||||||
this.bulkContextMenu = bulkContextMenu;
|
this.bulkContextMenu = bulkContextMenu;
|
||||||
}
|
}
|
||||||
@@ -240,7 +268,9 @@ export class BulkManager {
|
|||||||
// Update event manager state
|
// Update event manager state
|
||||||
eventManager.setState('bulkMode', state.bulkMode);
|
eventManager.setState('bulkMode', state.bulkMode);
|
||||||
|
|
||||||
|
if (this.bulkBtn) {
|
||||||
this.bulkBtn.classList.toggle('active', state.bulkMode);
|
this.bulkBtn.classList.toggle('active', state.bulkMode);
|
||||||
|
}
|
||||||
|
|
||||||
updateCardsForBulkMode(state.bulkMode);
|
updateCardsForBulkMode(state.bulkMode);
|
||||||
|
|
||||||
@@ -504,13 +534,13 @@ export class BulkManager {
|
|||||||
modalManager.closeModal('bulkDeleteModal');
|
modalManager.closeModal('bulkDeleteModal');
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const apiClient = getModelApiClient();
|
const apiClient = this.getActiveApiClient();
|
||||||
const filePaths = Array.from(state.selectedModels);
|
const filePaths = Array.from(state.selectedModels);
|
||||||
|
|
||||||
const result = await apiClient.bulkDeleteModels(filePaths);
|
const result = await apiClient.bulkDeleteModels(filePaths);
|
||||||
|
|
||||||
if (result.success) {
|
if (result.success) {
|
||||||
const currentConfig = MODEL_CONFIG[state.currentPageType];
|
const currentConfig = this.getCurrentDisplayConfig();
|
||||||
showToast('toast.models.deletedSuccessfully', {
|
showToast('toast.models.deletedSuccessfully', {
|
||||||
count: result.deleted_count,
|
count: result.deleted_count,
|
||||||
type: currentConfig.displayName.toLowerCase()
|
type: currentConfig.displayName.toLowerCase()
|
||||||
@@ -570,7 +600,7 @@ export class BulkManager {
|
|||||||
this.applySelectionState();
|
this.applySelectionState();
|
||||||
|
|
||||||
const newlySelected = state.selectedModels.size - oldCount;
|
const newlySelected = state.selectedModels.size - oldCount;
|
||||||
const currentConfig = MODEL_CONFIG[state.currentPageType];
|
const currentConfig = this.getCurrentDisplayConfig();
|
||||||
showToast('toast.models.selectedAdditional', {
|
showToast('toast.models.selectedAdditional', {
|
||||||
count: newlySelected,
|
count: newlySelected,
|
||||||
type: currentConfig.displayName.toLowerCase()
|
type: currentConfig.displayName.toLowerCase()
|
||||||
@@ -622,8 +652,7 @@ export class BulkManager {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const currentType = state.currentPageType;
|
const currentConfig = this.getCurrentDisplayConfig();
|
||||||
const currentConfig = MODEL_CONFIG[currentType] || MODEL_CONFIG[MODEL_TYPES.LORA];
|
|
||||||
const typeLabel = (currentConfig?.displayName || 'Model').toLowerCase();
|
const typeLabel = (currentConfig?.displayName || 'Model').toLowerCase();
|
||||||
|
|
||||||
const { ids: modelIds, missingCount } = this.collectSelectedModelIds();
|
const { ids: modelIds, missingCount } = this.collectSelectedModelIds();
|
||||||
@@ -969,7 +998,7 @@ export class BulkManager {
|
|||||||
modalManager.closeModal('bulkAddTagsModal');
|
modalManager.closeModal('bulkAddTagsModal');
|
||||||
|
|
||||||
if (successCount > 0) {
|
if (successCount > 0) {
|
||||||
const currentConfig = MODEL_CONFIG[state.currentPageType];
|
const currentConfig = this.getCurrentDisplayConfig();
|
||||||
const toastKey = mode === 'replace' ? 'toast.models.tagsReplacedSuccessfully' : 'toast.models.tagsAddedSuccessfully';
|
const toastKey = mode === 'replace' ? 'toast.models.tagsReplacedSuccessfully' : 'toast.models.tagsAddedSuccessfully';
|
||||||
showToast(toastKey, {
|
showToast(toastKey, {
|
||||||
count: successCount,
|
count: successCount,
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { showToast, updatePanelPositions } from '../utils/uiHelpers.js';
|
|||||||
import { getModelApiClient } from '../api/modelApiFactory.js';
|
import { getModelApiClient } from '../api/modelApiFactory.js';
|
||||||
import { removeStorageItem, setStorageItem, getStorageItem } from '../utils/storageHelpers.js';
|
import { removeStorageItem, setStorageItem, getStorageItem } from '../utils/storageHelpers.js';
|
||||||
import { MODEL_TYPE_DISPLAY_NAMES } from '../utils/constants.js';
|
import { MODEL_TYPE_DISPLAY_NAMES } from '../utils/constants.js';
|
||||||
|
import { translate } from '../utils/i18nHelpers.js';
|
||||||
|
|
||||||
export class FilterManager {
|
export class FilterManager {
|
||||||
constructor(options = {}) {
|
constructor(options = {}) {
|
||||||
@@ -131,6 +132,28 @@ export class FilterManager {
|
|||||||
this.applyTagElementState(tagEl, (this.filters.tags && this.filters.tags[tagName]) || 'none');
|
this.applyTagElementState(tagEl, (this.filters.tags && this.filters.tags[tagName]) || 'none');
|
||||||
tagsContainer.appendChild(tagEl);
|
tagsContainer.appendChild(tagEl);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Add "No tags" as a special filter at the end
|
||||||
|
const noTagsEl = document.createElement('div');
|
||||||
|
noTagsEl.className = 'filter-tag tag-filter special-tag';
|
||||||
|
const noTagsLabel = translate('header.filter.noTags', {}, 'No tags');
|
||||||
|
const noTagsKey = '__no_tags__';
|
||||||
|
noTagsEl.dataset.tag = noTagsKey;
|
||||||
|
noTagsEl.innerHTML = noTagsLabel;
|
||||||
|
|
||||||
|
noTagsEl.addEventListener('click', async () => {
|
||||||
|
const currentState = (this.filters.tags && this.filters.tags[noTagsKey]) || 'none';
|
||||||
|
const newState = this.getNextTriStateState(currentState);
|
||||||
|
this.setTagFilterState(noTagsKey, newState);
|
||||||
|
this.applyTagElementState(noTagsEl, newState);
|
||||||
|
|
||||||
|
this.updateActiveFiltersCount();
|
||||||
|
|
||||||
|
await this.applyFilters(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
this.applyTagElementState(noTagsEl, (this.filters.tags && this.filters.tags[noTagsKey]) || 'none');
|
||||||
|
tagsContainer.appendChild(noTagsEl);
|
||||||
}
|
}
|
||||||
|
|
||||||
initializeLicenseFilters() {
|
initializeLicenseFilters() {
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { state, getCurrentPageState } from '../state/index.js';
|
|||||||
import { modalManager } from './ModalManager.js';
|
import { modalManager } from './ModalManager.js';
|
||||||
import { bulkManager } from './BulkManager.js';
|
import { bulkManager } from './BulkManager.js';
|
||||||
import { getModelApiClient } from '../api/modelApiFactory.js';
|
import { getModelApiClient } from '../api/modelApiFactory.js';
|
||||||
|
import { RecipeSidebarApiClient } from '../api/recipeApi.js';
|
||||||
import { FolderTreeManager } from '../components/FolderTreeManager.js';
|
import { FolderTreeManager } from '../components/FolderTreeManager.js';
|
||||||
import { sidebarManager } from '../components/SidebarManager.js';
|
import { sidebarManager } from '../components/SidebarManager.js';
|
||||||
|
|
||||||
@@ -12,11 +13,22 @@ class MoveManager {
|
|||||||
this.bulkFilePaths = null;
|
this.bulkFilePaths = null;
|
||||||
this.folderTreeManager = new FolderTreeManager();
|
this.folderTreeManager = new FolderTreeManager();
|
||||||
this.initialized = false;
|
this.initialized = false;
|
||||||
|
this.recipeApiClient = null;
|
||||||
|
|
||||||
// Bind methods
|
// Bind methods
|
||||||
this.updateTargetPath = this.updateTargetPath.bind(this);
|
this.updateTargetPath = this.updateTargetPath.bind(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_getApiClient(modelType = null) {
|
||||||
|
if (state.currentPageType === 'recipes') {
|
||||||
|
if (!this.recipeApiClient) {
|
||||||
|
this.recipeApiClient = new RecipeSidebarApiClient();
|
||||||
|
}
|
||||||
|
return this.recipeApiClient;
|
||||||
|
}
|
||||||
|
return getModelApiClient(modelType);
|
||||||
|
}
|
||||||
|
|
||||||
initializeEventListeners() {
|
initializeEventListeners() {
|
||||||
if (this.initialized) return;
|
if (this.initialized) return;
|
||||||
|
|
||||||
@@ -36,7 +48,7 @@ class MoveManager {
|
|||||||
this.currentFilePath = null;
|
this.currentFilePath = null;
|
||||||
this.bulkFilePaths = null;
|
this.bulkFilePaths = null;
|
||||||
|
|
||||||
const apiClient = getModelApiClient();
|
const apiClient = this._getApiClient(modelType);
|
||||||
const currentPageType = state.currentPageType;
|
const currentPageType = state.currentPageType;
|
||||||
const modelConfig = apiClient.apiConfig.config;
|
const modelConfig = apiClient.apiConfig.config;
|
||||||
|
|
||||||
@@ -121,7 +133,7 @@ class MoveManager {
|
|||||||
|
|
||||||
async initializeFolderTree() {
|
async initializeFolderTree() {
|
||||||
try {
|
try {
|
||||||
const apiClient = getModelApiClient();
|
const apiClient = this._getApiClient();
|
||||||
// Fetch unified folder tree
|
// Fetch unified folder tree
|
||||||
const treeData = await apiClient.fetchUnifiedFolderTree();
|
const treeData = await apiClient.fetchUnifiedFolderTree();
|
||||||
|
|
||||||
@@ -141,7 +153,7 @@ class MoveManager {
|
|||||||
updateTargetPath() {
|
updateTargetPath() {
|
||||||
const pathDisplay = document.getElementById('moveTargetPathDisplay');
|
const pathDisplay = document.getElementById('moveTargetPathDisplay');
|
||||||
const modelRoot = document.getElementById('moveModelRoot').value;
|
const modelRoot = document.getElementById('moveModelRoot').value;
|
||||||
const apiClient = getModelApiClient();
|
const apiClient = this._getApiClient();
|
||||||
const config = apiClient.apiConfig.config;
|
const config = apiClient.apiConfig.config;
|
||||||
|
|
||||||
let fullPath = modelRoot || `Select a ${config.displayName.toLowerCase()} root directory`;
|
let fullPath = modelRoot || `Select a ${config.displayName.toLowerCase()} root directory`;
|
||||||
@@ -158,7 +170,7 @@ class MoveManager {
|
|||||||
|
|
||||||
async moveModel() {
|
async moveModel() {
|
||||||
const selectedRoot = document.getElementById('moveModelRoot').value;
|
const selectedRoot = document.getElementById('moveModelRoot').value;
|
||||||
const apiClient = getModelApiClient();
|
const apiClient = this._getApiClient();
|
||||||
const config = apiClient.apiConfig.config;
|
const config = apiClient.apiConfig.config;
|
||||||
|
|
||||||
if (!selectedRoot) {
|
if (!selectedRoot) {
|
||||||
|
|||||||
@@ -4,8 +4,7 @@ import {
|
|||||||
setStorageItem,
|
setStorageItem,
|
||||||
getStoredVersionInfo,
|
getStoredVersionInfo,
|
||||||
setStoredVersionInfo,
|
setStoredVersionInfo,
|
||||||
isVersionMatch,
|
isVersionMatch
|
||||||
resetDismissedBanner
|
|
||||||
} from '../utils/storageHelpers.js';
|
} from '../utils/storageHelpers.js';
|
||||||
import { bannerService } from './BannerService.js';
|
import { bannerService } from './BannerService.js';
|
||||||
import { translate } from '../utils/i18nHelpers.js';
|
import { translate } from '../utils/i18nHelpers.js';
|
||||||
@@ -753,94 +752,14 @@ export class UpdateService {
|
|||||||
stored: getStoredVersionInfo()
|
stored: getStoredVersionInfo()
|
||||||
});
|
});
|
||||||
|
|
||||||
// Reset dismissed status for version mismatch banner
|
// Silently update stored version info as cache busting handles the resource updates
|
||||||
resetDismissedBanner('version-mismatch');
|
setStoredVersionInfo(this.currentVersionInfo);
|
||||||
|
|
||||||
// Register and show the version mismatch banner
|
|
||||||
this.registerVersionMismatchBanner();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Failed to check version info:', error);
|
console.error('Failed to check version info:', error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
registerVersionMismatchBanner() {
|
|
||||||
// Get stored and current version for display
|
|
||||||
const storedVersion = getStoredVersionInfo() || translate('common.status.unknown');
|
|
||||||
const currentVersion = this.currentVersionInfo || translate('common.status.unknown');
|
|
||||||
|
|
||||||
bannerService.registerBanner('version-mismatch', {
|
|
||||||
id: 'version-mismatch',
|
|
||||||
title: translate('banners.versionMismatch.title', {}, 'Application Update Detected'),
|
|
||||||
content: translate('banners.versionMismatch.content', {
|
|
||||||
storedVersion,
|
|
||||||
currentVersion
|
|
||||||
}, `Your browser is running an outdated version of LoRA Manager (${storedVersion}). The server has been updated to version ${currentVersion}. Please refresh to ensure proper functionality.`),
|
|
||||||
actions: [
|
|
||||||
{
|
|
||||||
text: translate('banners.versionMismatch.refreshNow', {}, 'Refresh Now'),
|
|
||||||
icon: 'fas fa-sync',
|
|
||||||
action: 'hardRefresh',
|
|
||||||
type: 'primary'
|
|
||||||
}
|
|
||||||
],
|
|
||||||
dismissible: false,
|
|
||||||
priority: 10,
|
|
||||||
countdown: 15,
|
|
||||||
onRegister: (bannerElement) => {
|
|
||||||
// Add countdown element
|
|
||||||
const countdownEl = document.createElement('div');
|
|
||||||
countdownEl.className = 'banner-countdown';
|
|
||||||
countdownEl.innerHTML = `<span>${translate('banners.versionMismatch.refreshingIn', {}, 'Refreshing in')} <strong>15</strong> ${translate('banners.versionMismatch.seconds', {}, 'seconds')}...</span>`;
|
|
||||||
bannerElement.querySelector('.banner-content').appendChild(countdownEl);
|
|
||||||
|
|
||||||
// Start countdown
|
|
||||||
let seconds = 15;
|
|
||||||
const countdownInterval = setInterval(() => {
|
|
||||||
seconds--;
|
|
||||||
const strongEl = countdownEl.querySelector('strong');
|
|
||||||
if (strongEl) strongEl.textContent = seconds;
|
|
||||||
|
|
||||||
if (seconds <= 0) {
|
|
||||||
clearInterval(countdownInterval);
|
|
||||||
this.performHardRefresh();
|
|
||||||
}
|
|
||||||
}, 1000);
|
|
||||||
|
|
||||||
// Store interval ID for cleanup
|
|
||||||
bannerElement.dataset.countdownInterval = countdownInterval;
|
|
||||||
|
|
||||||
// Add action button event handler
|
|
||||||
const actionBtn = bannerElement.querySelector('.banner-action[data-action="hardRefresh"]');
|
|
||||||
if (actionBtn) {
|
|
||||||
actionBtn.addEventListener('click', (e) => {
|
|
||||||
e.preventDefault();
|
|
||||||
clearInterval(countdownInterval);
|
|
||||||
this.performHardRefresh();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
},
|
|
||||||
onRemove: (bannerElement) => {
|
|
||||||
// Clear any existing interval
|
|
||||||
const intervalId = bannerElement.dataset.countdownInterval;
|
|
||||||
if (intervalId) {
|
|
||||||
clearInterval(parseInt(intervalId));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
performHardRefresh() {
|
|
||||||
// Update stored version info before refreshing
|
|
||||||
setStoredVersionInfo(this.currentVersionInfo);
|
|
||||||
|
|
||||||
// Force a hard refresh by adding cache-busting parameter
|
|
||||||
const cacheBuster = new Date().getTime();
|
|
||||||
window.location.href = window.location.pathname +
|
|
||||||
(window.location.search ? window.location.search + '&' : '?') +
|
|
||||||
`cache=${cacheBuster}`;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create and export singleton instance
|
// Create and export singleton instance
|
||||||
|
|||||||
@@ -66,6 +66,10 @@ export class DownloadManager {
|
|||||||
completeMetadata.checkpoint = checkpointMetadata;
|
completeMetadata.checkpoint = checkpointMetadata;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (this.importManager.recipeData && this.importManager.recipeData.extension) {
|
||||||
|
formData.append('extension', this.importManager.recipeData.extension);
|
||||||
|
}
|
||||||
|
|
||||||
// Add source_path to metadata to track where the recipe was imported from
|
// Add source_path to metadata to track where the recipe was imported from
|
||||||
if (this.importManager.importMode === 'url') {
|
if (this.importManager.importMode === 'url') {
|
||||||
const urlInput = document.getElementById('imageUrlInput');
|
const urlInput = document.getElementById('imageUrlInput');
|
||||||
@@ -211,7 +215,7 @@ export class DownloadManager {
|
|||||||
currentLoraProgress = 0;
|
currentLoraProgress = 0;
|
||||||
|
|
||||||
// Initial status update for new LoRA
|
// Initial status update for new LoRA
|
||||||
this.importManager.loadingManager.setStatus(translate('recipes.controls.import.startingDownload', { current: i+1, total: this.importManager.downloadableLoRAs.length }, `Starting download for LoRA ${i+1}/${this.importManager.downloadableLoRAs.length}`));
|
this.importManager.loadingManager.setStatus(translate('recipes.controls.import.startingDownload', { current: i + 1, total: this.importManager.downloadableLoRAs.length }, `Starting download for LoRA ${i + 1}/${this.importManager.downloadableLoRAs.length}`));
|
||||||
updateProgress(0, completedDownloads, lora.name);
|
updateProgress(0, completedDownloads, lora.name);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
|||||||
@@ -36,7 +36,7 @@ export class RecipeDataManager {
|
|||||||
|
|
||||||
// Set up click handler to select all text for easy editing
|
// Set up click handler to select all text for easy editing
|
||||||
if (!recipeName.hasSelectAllHandler) {
|
if (!recipeName.hasSelectAllHandler) {
|
||||||
recipeName.addEventListener('click', function() {
|
recipeName.addEventListener('click', function () {
|
||||||
this.select();
|
this.select();
|
||||||
});
|
});
|
||||||
recipeName.hasSelectAllHandler = true;
|
recipeName.hasSelectAllHandler = true;
|
||||||
@@ -50,7 +50,7 @@ export class RecipeDataManager {
|
|||||||
|
|
||||||
// Always set up click handler for easy editing if not already set
|
// Always set up click handler for easy editing if not already set
|
||||||
if (!recipeName.hasSelectAllHandler) {
|
if (!recipeName.hasSelectAllHandler) {
|
||||||
recipeName.addEventListener('click', function() {
|
recipeName.addEventListener('click', function () {
|
||||||
this.select();
|
this.select();
|
||||||
});
|
});
|
||||||
recipeName.hasSelectAllHandler = true;
|
recipeName.hasSelectAllHandler = true;
|
||||||
@@ -67,16 +67,27 @@ export class RecipeDataManager {
|
|||||||
};
|
};
|
||||||
reader.readAsDataURL(this.importManager.recipeImage);
|
reader.readAsDataURL(this.importManager.recipeImage);
|
||||||
} else if (this.importManager.recipeData && this.importManager.recipeData.image_base64) {
|
} else if (this.importManager.recipeData && this.importManager.recipeData.image_base64) {
|
||||||
// For URL mode - use the base64 image data returned from the backend
|
// For URL mode - use the base64 data returned from the backend
|
||||||
|
if (this.importManager.recipeData.is_video) {
|
||||||
|
const mimeType = this.importManager.recipeData.extension === '.webm' ? 'video/webm' : 'video/mp4';
|
||||||
|
imagePreview.innerHTML = `<video src="data:${mimeType};base64,${this.importManager.recipeData.image_base64}" controls autoplay loop muted class="recipe-preview-video"></video>`;
|
||||||
|
} else {
|
||||||
imagePreview.innerHTML = `<img src="data:image/jpeg;base64,${this.importManager.recipeData.image_base64}" alt="${translate('recipes.controls.import.recipePreviewAlt', {}, 'Recipe preview')}">`;
|
imagePreview.innerHTML = `<img src="data:image/jpeg;base64,${this.importManager.recipeData.image_base64}" alt="${translate('recipes.controls.import.recipePreviewAlt', {}, 'Recipe preview')}">`;
|
||||||
|
}
|
||||||
} else if (this.importManager.importMode === 'url') {
|
} else if (this.importManager.importMode === 'url') {
|
||||||
// Fallback for URL mode if no base64 data
|
// Fallback for URL mode if no base64 data
|
||||||
const urlInput = document.getElementById('imageUrlInput');
|
const urlInput = document.getElementById('imageUrlInput');
|
||||||
if (urlInput && urlInput.value) {
|
if (urlInput && urlInput.value) {
|
||||||
|
const url = urlInput.value.toLowerCase();
|
||||||
|
if (url.endsWith('.mp4') || url.endsWith('.webm')) {
|
||||||
|
const mimeType = url.endsWith('.webm') ? 'video/webm' : 'video/mp4';
|
||||||
|
imagePreview.innerHTML = `<video src="${urlInput.value}" controls autoplay loop muted class="recipe-preview-video"></video>`;
|
||||||
|
} else {
|
||||||
imagePreview.innerHTML = `<img src="${urlInput.value}" alt="${translate('recipes.controls.import.recipePreviewAlt', {}, 'Recipe preview')}" crossorigin="anonymous">`;
|
imagePreview.innerHTML = `<img src="${urlInput.value}" alt="${translate('recipes.controls.import.recipePreviewAlt', {}, 'Recipe preview')}" crossorigin="anonymous">`;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Update LoRA count information
|
// Update LoRA count information
|
||||||
const totalLoras = this.importManager.recipeData.loras.length;
|
const totalLoras = this.importManager.recipeData.loras.length;
|
||||||
|
|||||||
@@ -2,18 +2,47 @@
|
|||||||
import { appCore } from './core.js';
|
import { appCore } from './core.js';
|
||||||
import { ImportManager } from './managers/ImportManager.js';
|
import { ImportManager } from './managers/ImportManager.js';
|
||||||
import { RecipeModal } from './components/RecipeModal.js';
|
import { RecipeModal } from './components/RecipeModal.js';
|
||||||
import { getCurrentPageState } from './state/index.js';
|
import { state, getCurrentPageState } from './state/index.js';
|
||||||
import { getSessionItem, removeSessionItem } from './utils/storageHelpers.js';
|
import { getSessionItem, removeSessionItem } from './utils/storageHelpers.js';
|
||||||
import { RecipeContextMenu } from './components/ContextMenu/index.js';
|
import { RecipeContextMenu } from './components/ContextMenu/index.js';
|
||||||
import { DuplicatesManager } from './components/DuplicatesManager.js';
|
import { DuplicatesManager } from './components/DuplicatesManager.js';
|
||||||
import { refreshVirtualScroll } from './utils/infiniteScroll.js';
|
import { refreshVirtualScroll } from './utils/infiniteScroll.js';
|
||||||
import { refreshRecipes } from './api/recipeApi.js';
|
import { refreshRecipes, RecipeSidebarApiClient } from './api/recipeApi.js';
|
||||||
|
import { sidebarManager } from './components/SidebarManager.js';
|
||||||
|
|
||||||
|
class RecipePageControls {
|
||||||
|
constructor() {
|
||||||
|
this.pageType = 'recipes';
|
||||||
|
this.pageState = getCurrentPageState();
|
||||||
|
this.sidebarApiClient = new RecipeSidebarApiClient();
|
||||||
|
}
|
||||||
|
|
||||||
|
async resetAndReload() {
|
||||||
|
refreshVirtualScroll();
|
||||||
|
}
|
||||||
|
|
||||||
|
async refreshModels(fullRebuild = false) {
|
||||||
|
if (fullRebuild) {
|
||||||
|
await refreshRecipes();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
refreshVirtualScroll();
|
||||||
|
}
|
||||||
|
|
||||||
|
getSidebarApiClient() {
|
||||||
|
return this.sidebarApiClient;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
class RecipeManager {
|
class RecipeManager {
|
||||||
constructor() {
|
constructor() {
|
||||||
// Get page state
|
// Get page state
|
||||||
this.pageState = getCurrentPageState();
|
this.pageState = getCurrentPageState();
|
||||||
|
|
||||||
|
// Page controls for shared sidebar behaviors
|
||||||
|
this.pageControls = new RecipePageControls();
|
||||||
|
|
||||||
// Initialize ImportManager
|
// Initialize ImportManager
|
||||||
this.importManager = new ImportManager();
|
this.importManager = new ImportManager();
|
||||||
|
|
||||||
@@ -52,10 +81,23 @@ class RecipeManager {
|
|||||||
// Expose necessary functions to the page
|
// Expose necessary functions to the page
|
||||||
this._exposeGlobalFunctions();
|
this._exposeGlobalFunctions();
|
||||||
|
|
||||||
|
// Initialize sidebar navigation
|
||||||
|
await this._initSidebar();
|
||||||
|
|
||||||
// Initialize common page features
|
// Initialize common page features
|
||||||
appCore.initializePageFeatures();
|
appCore.initializePageFeatures();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async _initSidebar() {
|
||||||
|
try {
|
||||||
|
sidebarManager.setHostPageControls(this.pageControls);
|
||||||
|
const shouldShowSidebar = state?.global?.settings?.show_folder_sidebar !== false;
|
||||||
|
await sidebarManager.setSidebarEnabled(shouldShowSidebar);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to initialize recipe sidebar:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
_initSearchOptions() {
|
_initSearchOptions() {
|
||||||
// Ensure recipes search options are properly initialized
|
// Ensure recipes search options are properly initialized
|
||||||
if (!this.pageState.searchOptions) {
|
if (!this.pageState.searchOptions) {
|
||||||
@@ -63,7 +105,9 @@ class RecipeManager {
|
|||||||
title: true, // Recipe title
|
title: true, // Recipe title
|
||||||
tags: true, // Recipe tags
|
tags: true, // Recipe tags
|
||||||
loraName: true, // LoRA file name
|
loraName: true, // LoRA file name
|
||||||
loraModel: true // LoRA model name
|
loraModel: true, // LoRA model name
|
||||||
|
prompt: true, // Prompt search
|
||||||
|
recursive: true
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -172,11 +216,26 @@ class RecipeManager {
|
|||||||
// Sort select
|
// Sort select
|
||||||
const sortSelect = document.getElementById('sortSelect');
|
const sortSelect = document.getElementById('sortSelect');
|
||||||
if (sortSelect) {
|
if (sortSelect) {
|
||||||
|
sortSelect.value = this.pageState.sortBy || 'date:desc';
|
||||||
sortSelect.addEventListener('change', () => {
|
sortSelect.addEventListener('change', () => {
|
||||||
this.pageState.sortBy = sortSelect.value;
|
this.pageState.sortBy = sortSelect.value;
|
||||||
refreshVirtualScroll();
|
refreshVirtualScroll();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const bulkButton = document.querySelector('[data-action="bulk"]');
|
||||||
|
if (bulkButton) {
|
||||||
|
bulkButton.addEventListener('click', () => window.bulkManager?.toggleBulkMode());
|
||||||
|
}
|
||||||
|
|
||||||
|
const favoriteFilterBtn = document.getElementById('favoriteFilterBtn');
|
||||||
|
if (favoriteFilterBtn) {
|
||||||
|
favoriteFilterBtn.addEventListener('click', () => {
|
||||||
|
this.pageState.showFavoritesOnly = !this.pageState.showFavoritesOnly;
|
||||||
|
favoriteFilterBtn.classList.toggle('active', this.pageState.showFavoritesOnly);
|
||||||
|
refreshVirtualScroll();
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// This method is kept for compatibility but now uses virtual scrolling
|
// This method is kept for compatibility but now uses virtual scrolling
|
||||||
|
|||||||
@@ -95,13 +95,15 @@ export const state = {
|
|||||||
currentPage: 1,
|
currentPage: 1,
|
||||||
isLoading: false,
|
isLoading: false,
|
||||||
hasMore: true,
|
hasMore: true,
|
||||||
sortBy: 'date',
|
sortBy: 'date:desc',
|
||||||
|
activeFolder: getStorageItem('recipes_activeFolder'),
|
||||||
searchManager: null,
|
searchManager: null,
|
||||||
searchOptions: {
|
searchOptions: {
|
||||||
title: true,
|
title: true,
|
||||||
tags: true,
|
tags: true,
|
||||||
loraName: true,
|
loraName: true,
|
||||||
loraModel: true
|
loraModel: true,
|
||||||
|
recursive: getStorageItem('recipes_recursiveSearch', true),
|
||||||
},
|
},
|
||||||
filters: {
|
filters: {
|
||||||
baseModel: [],
|
baseModel: [],
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ export class VirtualScroller {
|
|||||||
this.scrollContainer = options.scrollContainer || this.containerElement;
|
this.scrollContainer = options.scrollContainer || this.containerElement;
|
||||||
this.batchSize = options.batchSize || 50;
|
this.batchSize = options.batchSize || 50;
|
||||||
this.pageSize = options.pageSize || 100;
|
this.pageSize = options.pageSize || 100;
|
||||||
this.itemAspectRatio = 896/1152; // Aspect ratio of cards
|
this.itemAspectRatio = 896 / 1152; // Aspect ratio of cards
|
||||||
this.rowGap = options.rowGap || 20; // Add vertical gap between rows (default 20px)
|
this.rowGap = options.rowGap || 20; // Add vertical gap between rows (default 20px)
|
||||||
|
|
||||||
// Add container padding properties
|
// Add container padding properties
|
||||||
@@ -716,7 +716,7 @@ export class VirtualScroller {
|
|||||||
// Utility method for debouncing
|
// Utility method for debouncing
|
||||||
debounce(func, wait) {
|
debounce(func, wait) {
|
||||||
let timeout;
|
let timeout;
|
||||||
return function(...args) {
|
return function (...args) {
|
||||||
const context = this;
|
const context = this;
|
||||||
clearTimeout(timeout);
|
clearTimeout(timeout);
|
||||||
timeout = setTimeout(() => func.apply(context, args), wait);
|
timeout = setTimeout(() => func.apply(context, args), wait);
|
||||||
@@ -783,12 +783,13 @@ export class VirtualScroller {
|
|||||||
deepMerge(target, source) {
|
deepMerge(target, source) {
|
||||||
if (!source || !target) return target;
|
if (!source || !target) return target;
|
||||||
|
|
||||||
|
// Initialize result with a copy of target
|
||||||
const result = { ...target };
|
const result = { ...target };
|
||||||
|
|
||||||
// Only iterate over keys that exist in target
|
if (!source) return result;
|
||||||
Object.keys(target).forEach(key => {
|
|
||||||
// Check if source has this key
|
// Iterate over all keys in the source object
|
||||||
if (source.hasOwnProperty(key)) {
|
Object.keys(source).forEach(key => {
|
||||||
const targetValue = target[key];
|
const targetValue = target[key];
|
||||||
const sourceValue = source[key];
|
const sourceValue = source[key];
|
||||||
|
|
||||||
@@ -801,13 +802,11 @@ export class VirtualScroller {
|
|||||||
typeof sourceValue === 'object' &&
|
typeof sourceValue === 'object' &&
|
||||||
!Array.isArray(sourceValue)
|
!Array.isArray(sourceValue)
|
||||||
) {
|
) {
|
||||||
result[key] = this.deepMerge(targetValue, sourceValue);
|
result[key] = this.deepMerge(targetValue || {}, sourceValue);
|
||||||
} else {
|
} else {
|
||||||
// For primitive types, arrays, or null, use the value from source
|
// Otherwise update with source value (includes primitives, arrays, and new keys)
|
||||||
result[key] = sourceValue;
|
result[key] = sourceValue;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
// If source does not have this key, keep the original value from target
|
|
||||||
});
|
});
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
|
|||||||
@@ -4,8 +4,8 @@
|
|||||||
<head>
|
<head>
|
||||||
<title>{% block title %}{{ t('header.appTitle') }}{% endblock %}</title>
|
<title>{% block title %}{{ t('header.appTitle') }}{% endblock %}</title>
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||||
<link rel="stylesheet" href="/loras_static/css/style.css">
|
<link rel="stylesheet" href="/loras_static/css/style.css?v={{ version }}">
|
||||||
<link rel="stylesheet" href="/loras_static/css/onboarding.css">
|
<link rel="stylesheet" href="/loras_static/css/onboarding.css?v={{ version }}">
|
||||||
<link rel="stylesheet" href="/loras_static/vendor/flag-icons/flag-icons.min.css">
|
<link rel="stylesheet" href="/loras_static/vendor/flag-icons/flag-icons.min.css">
|
||||||
{% block page_css %}{% endblock %}
|
{% block page_css %}{% endblock %}
|
||||||
<link rel="stylesheet" href="/loras_static/vendor/font-awesome/css/all.min.css"
|
<link rel="stylesheet" href="/loras_static/vendor/font-awesome/css/all.min.css"
|
||||||
@@ -98,7 +98,7 @@
|
|||||||
|
|
||||||
{% if is_initializing %}
|
{% if is_initializing %}
|
||||||
<!-- Load initialization JavaScript -->
|
<!-- Load initialization JavaScript -->
|
||||||
<script type="module" src="/loras_static/js/components/initialization.js"></script>
|
<script type="module" src="/loras_static/js/components/initialization.js?v={{ version }}"></script>
|
||||||
{% else %}
|
{% else %}
|
||||||
{% block main_script %}{% endblock %}
|
{% block main_script %}{% endblock %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|||||||
@@ -40,5 +40,5 @@
|
|||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block main_script %}
|
{% block main_script %}
|
||||||
<script type="module" src="/loras_static/js/checkpoints.js"></script>
|
<script type="module" src="/loras_static/js/checkpoints.js?v={{ version }}"></script>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|||||||
@@ -102,6 +102,9 @@
|
|||||||
<div class="context-menu-item" data-action="cleanup-example-images-folders">
|
<div class="context-menu-item" data-action="cleanup-example-images-folders">
|
||||||
<i class="fas fa-trash-restore"></i> <span>{{ t('globalContextMenu.cleanupExampleImages.label') }}</span>
|
<i class="fas fa-trash-restore"></i> <span>{{ t('globalContextMenu.cleanupExampleImages.label') }}</span>
|
||||||
</div>
|
</div>
|
||||||
|
<div class="context-menu-item" data-action="repair-recipes">
|
||||||
|
<i class="fas fa-tools"></i> <span>{{ t('globalContextMenu.repairRecipes.label') }}</span>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div id="nsfwLevelSelector" class="nsfw-level-selector">
|
<div id="nsfwLevelSelector" class="nsfw-level-selector">
|
||||||
@@ -110,7 +113,8 @@
|
|||||||
<button class="close-nsfw-selector"><i class="fas fa-times"></i></button>
|
<button class="close-nsfw-selector"><i class="fas fa-times"></i></button>
|
||||||
</div>
|
</div>
|
||||||
<div class="nsfw-level-content">
|
<div class="nsfw-level-content">
|
||||||
<div class="current-level"><span>{{ t('modals.contentRating.current') }}:</span> <span id="currentNSFWLevel">{{ t('common.status.unknown') }}</span></div>
|
<div class="current-level"><span>{{ t('modals.contentRating.current') }}:</span> <span id="currentNSFWLevel">{{
|
||||||
|
t('common.status.unknown') }}</span></div>
|
||||||
<div class="nsfw-level-options">
|
<div class="nsfw-level-options">
|
||||||
<button class="nsfw-level-btn" data-level="1">{{ t('modals.contentRating.levels.pg') }}</button>
|
<button class="nsfw-level-btn" data-level="1">{{ t('modals.contentRating.levels.pg') }}</button>
|
||||||
<button class="nsfw-level-btn" data-level="2">{{ t('modals.contentRating.levels.pg13') }}</button>
|
<button class="nsfw-level-btn" data-level="2">{{ t('modals.contentRating.levels.pg13') }}</button>
|
||||||
|
|||||||
@@ -19,22 +19,27 @@
|
|||||||
{% set current_page = 'loras' %}
|
{% set current_page = 'loras' %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% set search_disabled = current_page == 'statistics' %}
|
{% set search_disabled = current_page == 'statistics' %}
|
||||||
{% set search_placeholder_key = 'header.search.notAvailable' if search_disabled else 'header.search.placeholders.' ~ current_page %}
|
{% set search_placeholder_key = 'header.search.notAvailable' if search_disabled else 'header.search.placeholders.' ~
|
||||||
|
current_page %}
|
||||||
{% set header_search_class = 'header-search disabled' if search_disabled else 'header-search' %}
|
{% set header_search_class = 'header-search disabled' if search_disabled else 'header-search' %}
|
||||||
<nav class="main-nav">
|
<nav class="main-nav">
|
||||||
<a href="/loras" class="nav-item{% if current_path == '/loras' %} active{% endif %}" id="lorasNavItem">
|
<a href="/loras" class="nav-item{% if current_path == '/loras' %} active{% endif %}" id="lorasNavItem">
|
||||||
<i class="fas fa-layer-group"></i> <span>{{ t('header.navigation.loras') }}</span>
|
<i class="fas fa-layer-group"></i> <span>{{ t('header.navigation.loras') }}</span>
|
||||||
</a>
|
</a>
|
||||||
<a href="/loras/recipes" class="nav-item{% if current_path.startswith('/loras/recipes') %} active{% endif %}" id="recipesNavItem">
|
<a href="/loras/recipes" class="nav-item{% if current_path.startswith('/loras/recipes') %} active{% endif %}"
|
||||||
|
id="recipesNavItem">
|
||||||
<i class="fas fa-book-open"></i> <span>{{ t('header.navigation.recipes') }}</span>
|
<i class="fas fa-book-open"></i> <span>{{ t('header.navigation.recipes') }}</span>
|
||||||
</a>
|
</a>
|
||||||
<a href="/checkpoints" class="nav-item{% if current_path.startswith('/checkpoints') %} active{% endif %}" id="checkpointsNavItem">
|
<a href="/checkpoints" class="nav-item{% if current_path.startswith('/checkpoints') %} active{% endif %}"
|
||||||
|
id="checkpointsNavItem">
|
||||||
<i class="fas fa-check-circle"></i> <span>{{ t('header.navigation.checkpoints') }}</span>
|
<i class="fas fa-check-circle"></i> <span>{{ t('header.navigation.checkpoints') }}</span>
|
||||||
</a>
|
</a>
|
||||||
<a href="/embeddings" class="nav-item{% if current_path.startswith('/embeddings') %} active{% endif %}" id="embeddingsNavItem">
|
<a href="/embeddings" class="nav-item{% if current_path.startswith('/embeddings') %} active{% endif %}"
|
||||||
|
id="embeddingsNavItem">
|
||||||
<i class="fas fa-code"></i> <span>{{ t('header.navigation.embeddings') }}</span>
|
<i class="fas fa-code"></i> <span>{{ t('header.navigation.embeddings') }}</span>
|
||||||
</a>
|
</a>
|
||||||
<a href="/statistics" class="nav-item{% if current_path.startswith('/statistics') %} active{% endif %}" id="statisticsNavItem">
|
<a href="/statistics" class="nav-item{% if current_path.startswith('/statistics') %} active{% endif %}"
|
||||||
|
id="statisticsNavItem">
|
||||||
<i class="fas fa-chart-bar"></i> <span>{{ t('header.navigation.statistics') }}</span>
|
<i class="fas fa-chart-bar"></i> <span>{{ t('header.navigation.statistics') }}</span>
|
||||||
</a>
|
</a>
|
||||||
</nav>
|
</nav>
|
||||||
@@ -42,12 +47,15 @@
|
|||||||
<!-- Context-aware search container -->
|
<!-- Context-aware search container -->
|
||||||
<div class="{{ header_search_class }}" id="headerSearch">
|
<div class="{{ header_search_class }}" id="headerSearch">
|
||||||
<div class="search-container">
|
<div class="search-container">
|
||||||
<input type="text" id="searchInput" placeholder="{{ t(search_placeholder_key) }}"{% if search_disabled %} disabled{% endif %} />
|
<input type="text" id="searchInput" placeholder="{{ t(search_placeholder_key) }}" {% if search_disabled %}
|
||||||
|
disabled{% endif %} />
|
||||||
<i class="fas fa-search search-icon"></i>
|
<i class="fas fa-search search-icon"></i>
|
||||||
<button class="search-options-toggle" id="searchOptionsToggle" title="{{ t('header.search.options') }}"{% if search_disabled %} disabled aria-disabled="true"{% endif %}>
|
<button class="search-options-toggle" id="searchOptionsToggle" title="{{ t('header.search.options') }}" {% if
|
||||||
|
search_disabled %} disabled aria-disabled="true" {% endif %}>
|
||||||
<i class="fas fa-sliders-h"></i>
|
<i class="fas fa-sliders-h"></i>
|
||||||
</button>
|
</button>
|
||||||
<button class="search-filter-toggle" id="filterButton" title="{{ t('header.filter.title') }}"{% if search_disabled %} disabled aria-disabled="true"{% endif %}>
|
<button class="search-filter-toggle" id="filterButton" title="{{ t('header.filter.title') }}" {% if
|
||||||
|
search_disabled %} disabled aria-disabled="true" {% endif %}>
|
||||||
<i class="fas fa-filter"></i>
|
<i class="fas fa-filter"></i>
|
||||||
<span class="filter-badge" id="activeFiltersCount" style="display: none">0</span>
|
<span class="filter-badge" id="activeFiltersCount" style="display: none">0</span>
|
||||||
</button>
|
</button>
|
||||||
@@ -97,6 +105,7 @@
|
|||||||
<div class="search-option-tag active" data-option="tags">{{ t('header.search.filters.tags') }}</div>
|
<div class="search-option-tag active" data-option="tags">{{ t('header.search.filters.tags') }}</div>
|
||||||
<div class="search-option-tag active" data-option="loraName">{{ t('header.search.filters.loraName') }}</div>
|
<div class="search-option-tag active" data-option="loraName">{{ t('header.search.filters.loraName') }}</div>
|
||||||
<div class="search-option-tag active" data-option="loraModel">{{ t('header.search.filters.loraModel') }}</div>
|
<div class="search-option-tag active" data-option="loraModel">{{ t('header.search.filters.loraModel') }}</div>
|
||||||
|
<div class="search-option-tag active" data-option="prompt">{{ t('header.search.filters.prompt') }}</div>
|
||||||
{% elif request.path == '/checkpoints' %}
|
{% elif request.path == '/checkpoints' %}
|
||||||
<div class="search-option-tag active" data-option="filename">{{ t('header.search.filters.filename') }}</div>
|
<div class="search-option-tag active" data-option="filename">{{ t('header.search.filters.filename') }}</div>
|
||||||
<div class="search-option-tag active" data-option="modelname">{{ t('header.search.filters.modelname') }}</div>
|
<div class="search-option-tag active" data-option="modelname">{{ t('header.search.filters.modelname') }}</div>
|
||||||
|
|||||||
@@ -40,5 +40,5 @@
|
|||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block main_script %}
|
{% block main_script %}
|
||||||
<script type="module" src="/loras_static/js/embeddings.js"></script>
|
<script type="module" src="/loras_static/js/embeddings.js?v={{ version }}"></script>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|||||||
@@ -24,6 +24,6 @@
|
|||||||
|
|
||||||
{% block main_script %}
|
{% block main_script %}
|
||||||
{% if not is_initializing %}
|
{% if not is_initializing %}
|
||||||
<script type="module" src="/loras_static/js/loras.js"></script>
|
<script type="module" src="/loras_static/js/loras.js?v={{ version }}"></script>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
@@ -4,9 +4,9 @@
|
|||||||
{% block page_id %}recipes{% endblock %}
|
{% block page_id %}recipes{% endblock %}
|
||||||
|
|
||||||
{% block page_css %}
|
{% block page_css %}
|
||||||
<link rel="stylesheet" href="/loras_static/css/components/card.css">
|
<link rel="stylesheet" href="/loras_static/css/components/card.css?v={{ version }}">
|
||||||
<link rel="stylesheet" href="/loras_static/css/components/recipe-modal.css">
|
<link rel="stylesheet" href="/loras_static/css/components/recipe-modal.css?v={{ version }}">
|
||||||
<link rel="stylesheet" href="/loras_static/css/components/import-modal.css">
|
<link rel="stylesheet" href="/loras_static/css/components/import-modal.css?v={{ version }}">
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block additional_components %}
|
{% block additional_components %}
|
||||||
@@ -15,17 +15,29 @@
|
|||||||
|
|
||||||
<div id="recipeContextMenu" class="context-menu" style="display: none;">
|
<div id="recipeContextMenu" class="context-menu" style="display: none;">
|
||||||
<!-- <div class="context-menu-item" data-action="details"><i class="fas fa-info-circle"></i> View Details</div> -->
|
<!-- <div class="context-menu-item" data-action="details"><i class="fas fa-info-circle"></i> View Details</div> -->
|
||||||
<div class="context-menu-item" data-action="share"><i class="fas fa-share-alt"></i> {{ t('loras.contextMenu.shareRecipe') }}</div>
|
<div class="context-menu-item" data-action="share"><i class="fas fa-share-alt"></i> {{
|
||||||
<div class="context-menu-item" data-action="copy"><i class="fas fa-copy"></i> {{ t('loras.contextMenu.copyRecipeSyntax') }}</div>
|
t('loras.contextMenu.shareRecipe') }}</div>
|
||||||
<div class="context-menu-item" data-action="sendappend"><i class="fas fa-paper-plane"></i> {{ t('loras.contextMenu.sendToWorkflowAppend') }}</div>
|
<div class="context-menu-item" data-action="copy"><i class="fas fa-copy"></i> {{
|
||||||
<div class="context-menu-item" data-action="sendreplace"><i class="fas fa-exchange-alt"></i> {{ t('loras.contextMenu.sendToWorkflowReplace') }}</div>
|
t('loras.contextMenu.copyRecipeSyntax') }}</div>
|
||||||
<div class="context-menu-item" data-action="viewloras"><i class="fas fa-layer-group"></i> {{ t('loras.contextMenu.viewAllLoras') }}</div>
|
<div class="context-menu-item" data-action="sendappend"><i class="fas fa-paper-plane"></i> {{
|
||||||
<div class="context-menu-item download-missing-item" data-action="download-missing"><i class="fas fa-download"></i> {{ t('loras.contextMenu.downloadMissingLoras') }}</div>
|
t('loras.contextMenu.sendToWorkflowAppend') }}</div>
|
||||||
|
<div class="context-menu-item" data-action="sendreplace"><i class="fas fa-exchange-alt"></i> {{
|
||||||
|
t('loras.contextMenu.sendToWorkflowReplace') }}</div>
|
||||||
|
<div class="context-menu-item" data-action="viewloras"><i class="fas fa-layer-group"></i> {{
|
||||||
|
t('loras.contextMenu.viewAllLoras') }}</div>
|
||||||
|
<div class="context-menu-item download-missing-item" data-action="download-missing"><i class="fas fa-download"></i>
|
||||||
|
{{ t('loras.contextMenu.downloadMissingLoras') }}</div>
|
||||||
<div class="context-menu-item" data-action="set-nsfw">
|
<div class="context-menu-item" data-action="set-nsfw">
|
||||||
<i class="fas fa-exclamation-triangle"></i> {{ t('loras.contextMenu.setContentRating') }}
|
<i class="fas fa-exclamation-triangle"></i> {{ t('loras.contextMenu.setContentRating') }}
|
||||||
</div>
|
</div>
|
||||||
|
<div class="context-menu-item" data-action="repair">
|
||||||
|
<i class="fas fa-tools"></i> {{ t('loras.contextMenu.repairMetadata') }}
|
||||||
|
</div>
|
||||||
<div class="context-menu-separator"></div>
|
<div class="context-menu-separator"></div>
|
||||||
<div class="context-menu-item delete-item" data-action="delete"><i class="fas fa-trash"></i> {{ t('loras.contextMenu.deleteRecipe') }}</div>
|
<div class="context-menu-item" data-action="move"><i class="fas fa-folder-open"></i> {{
|
||||||
|
t('loras.contextMenu.moveToFolder') }}</div>
|
||||||
|
<div class="context-menu-item delete-item" data-action="delete"><i class="fas fa-trash"></i> {{
|
||||||
|
t('loras.contextMenu.deleteRecipe') }}</div>
|
||||||
</div>
|
</div>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
@@ -34,31 +46,101 @@
|
|||||||
{% block init_check_url %}/api/recipes?page=1&page_size=1{% endblock %}
|
{% block init_check_url %}/api/recipes?page=1&page_size=1{% endblock %}
|
||||||
|
|
||||||
{% block content %}
|
{% block content %}
|
||||||
<!-- Recipe controls -->
|
<!-- Recipe controls -->
|
||||||
<div class="controls">
|
<div class="controls">
|
||||||
|
<div class="actions">
|
||||||
<div class="action-buttons">
|
<div class="action-buttons">
|
||||||
|
<div class="control-group">
|
||||||
|
<select id="sortSelect" title="{{ t('recipes.controls.sort.title') }}">
|
||||||
|
<optgroup label="{{ t('recipes.controls.sort.name') }}">
|
||||||
|
<option value="name:asc">{{ t('recipes.controls.sort.nameAsc') }}</option>
|
||||||
|
<option value="name:desc">{{ t('recipes.controls.sort.nameDesc') }}</option>
|
||||||
|
</optgroup>
|
||||||
|
<optgroup label="{{ t('recipes.controls.sort.date') }}">
|
||||||
|
<option value="date:desc">{{ t('recipes.controls.sort.dateDesc') }}</option>
|
||||||
|
<option value="date:asc">{{ t('recipes.controls.sort.dateAsc') }}</option>
|
||||||
|
</optgroup>
|
||||||
|
<optgroup label="{{ t('recipes.controls.sort.lorasCount') }}">
|
||||||
|
<option value="loras_count:desc">{{ t('recipes.controls.sort.lorasCountDesc') }}</option>
|
||||||
|
<option value="loras_count:asc">{{ t('recipes.controls.sort.lorasCountAsc') }}</option>
|
||||||
|
</optgroup>
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
<div title="{{ t('recipes.controls.refresh.title') }}" class="control-group">
|
<div title="{{ t('recipes.controls.refresh.title') }}" class="control-group">
|
||||||
<button onclick="recipeManager.refreshRecipes()"><i class="fas fa-sync"></i> {{ t('common.actions.refresh') }}</button>
|
<button onclick="recipeManager.refreshRecipes()"><i class="fas fa-sync"></i> {{
|
||||||
|
t('common.actions.refresh')
|
||||||
|
}}</button>
|
||||||
</div>
|
</div>
|
||||||
<div title="{{ t('recipes.controls.import.title') }}" class="control-group">
|
<div title="{{ t('recipes.controls.import.title') }}" class="control-group">
|
||||||
<button onclick="importManager.showImportModal()"><i class="fas fa-file-import"></i> {{ t('recipes.controls.import.action') }}</button>
|
<button onclick="importManager.showImportModal()"><i class="fas fa-file-import"></i> {{
|
||||||
|
t('recipes.controls.import.action') }}</button>
|
||||||
|
</div>
|
||||||
|
<div class="control-group" title="{{ t('loras.controls.bulk.title') }}">
|
||||||
|
<button id="bulkOperationsBtn" data-action="bulk" title="{{ t('loras.controls.bulk.title') }}">
|
||||||
|
<i class="fas fa-th-large"></i> <span><span>{{ t('loras.controls.bulk.action') }}</span>
|
||||||
|
<div class="shortcut-key">B</div>
|
||||||
|
</span>
|
||||||
|
</button>
|
||||||
</div>
|
</div>
|
||||||
<!-- Add duplicate detection button -->
|
<!-- Add duplicate detection button -->
|
||||||
<div title="{{ t('loras.controls.duplicates.title') }}" class="control-group">
|
<div title="{{ t('loras.controls.duplicates.title') }}" class="control-group">
|
||||||
<button onclick="recipeManager.findDuplicateRecipes()"><i class="fas fa-clone"></i> {{ t('loras.controls.duplicates.action') }}</button>
|
<button onclick="recipeManager.findDuplicateRecipes()"><i class="fas fa-clone"></i> {{
|
||||||
|
t('loras.controls.duplicates.action') }}</button>
|
||||||
|
</div>
|
||||||
|
<div class="control-group">
|
||||||
|
<button id="favoriteFilterBtn" data-action="toggle-favorites" class="favorite-filter"
|
||||||
|
title="{{ t('recipes.controls.favorites.title') }}">
|
||||||
|
<i class="fas fa-star"></i> <span>{{ t('recipes.controls.favorites.action') }}</span>
|
||||||
|
</button>
|
||||||
</div>
|
</div>
|
||||||
<!-- Custom filter indicator button (hidden by default) -->
|
<!-- Custom filter indicator button (hidden by default) -->
|
||||||
<div id="customFilterIndicator" class="control-group hidden">
|
<div id="customFilterIndicator" class="control-group hidden">
|
||||||
<div class="filter-active">
|
<div class="filter-active">
|
||||||
<i class="fas fa-filter"></i> <span id="customFilterText">{{ t('recipes.controls.filteredByLora') }}</span>
|
<i class="fas fa-filter"></i> <span id="customFilterText">{{ t('recipes.controls.filteredByLora')
|
||||||
|
}}</span>
|
||||||
<i class="fas fa-times-circle clear-filter"></i>
|
<i class="fas fa-times-circle clear-filter"></i>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<div class="controls-right">
|
||||||
|
<div class="keyboard-nav-hint tooltip">
|
||||||
|
<i class="fas fa-keyboard"></i>
|
||||||
|
<span class="tooltiptext">
|
||||||
|
<span>{{ t('keyboard.navigation') }}</span>
|
||||||
|
<table class="keyboard-shortcuts">
|
||||||
|
<tr>
|
||||||
|
<td><span class="key">Page Up</span></td>
|
||||||
|
<td>{{ t('keyboard.shortcuts.pageUp') }}</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td><span class="key">Page Down</span></td>
|
||||||
|
<td>{{ t('keyboard.shortcuts.pageDown') }}</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td><span class="key">Home</span></td>
|
||||||
|
<td>{{ t('keyboard.shortcuts.home') }}</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td><span class="key">End</span></td>
|
||||||
|
<td>{{ t('keyboard.shortcuts.end') }}</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<!-- Duplicates banner (hidden by default) -->
|
<!-- Breadcrumb Navigation -->
|
||||||
<div id="duplicatesBanner" class="duplicates-banner" style="display: none;">
|
<div id="breadcrumbContainer" class="sidebar-breadcrumb-container">
|
||||||
|
<nav class="sidebar-breadcrumb-nav" id="sidebarBreadcrumbNav">
|
||||||
|
<!-- Breadcrumbs will be populated by JavaScript -->
|
||||||
|
</nav>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Duplicates banner (hidden by default) -->
|
||||||
|
<div id="duplicatesBanner" class="duplicates-banner" style="display: none;">
|
||||||
<div class="banner-content">
|
<div class="banner-content">
|
||||||
<i class="fas fa-exclamation-triangle"></i>
|
<i class="fas fa-exclamation-triangle"></i>
|
||||||
<span id="duplicatesCount">{{ t('recipes.duplicates.found', count=0) }}</span>
|
<span id="duplicatesCount">{{ t('recipes.duplicates.found', count=0) }}</span>
|
||||||
@@ -74,15 +156,21 @@
|
|||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<!-- Recipe grid -->
|
{% include 'components/folder_sidebar.html' %}
|
||||||
<div class="card-grid" id="recipeGrid">
|
|
||||||
|
<!-- Recipe grid -->
|
||||||
|
<div class="card-grid" id="recipeGrid">
|
||||||
<!-- Remove the server-side conditional rendering and placeholder -->
|
<!-- Remove the server-side conditional rendering and placeholder -->
|
||||||
<!-- Virtual scrolling will handle the display logic on the client side -->
|
<!-- Virtual scrolling will handle the display logic on the client side -->
|
||||||
</div>
|
</div>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block overlay %}
|
||||||
|
<div class="bulk-mode-overlay"></div>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block main_script %}
|
{% block main_script %}
|
||||||
<script type="module" src="/loras_static/js/recipes.js"></script>
|
<script type="module" src="/loras_static/js/recipes.js?v={{ version }}"></script>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
@@ -192,6 +192,6 @@
|
|||||||
|
|
||||||
{% block main_script %}
|
{% block main_script %}
|
||||||
{% if not is_initializing %}
|
{% if not is_initializing %}
|
||||||
<script type="module" src="/loras_static/js/statistics.js"></script>
|
<script type="module" src="/loras_static/js/statistics.js?v={{ version }}"></script>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
@@ -1,3 +1,4 @@
|
|||||||
|
import json
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
@@ -62,6 +63,7 @@ def test_symlink_scan_skips_file_links(monkeypatch: pytest.MonkeyPatch, tmp_path
|
|||||||
|
|
||||||
def test_symlink_cache_reuses_previous_scan(monkeypatch: pytest.MonkeyPatch, tmp_path):
|
def test_symlink_cache_reuses_previous_scan(monkeypatch: pytest.MonkeyPatch, tmp_path):
|
||||||
loras_dir, settings_dir = _setup_paths(monkeypatch, tmp_path)
|
loras_dir, settings_dir = _setup_paths(monkeypatch, tmp_path)
|
||||||
|
monkeypatch.setattr(config_module.Config, "_schedule_symlink_rescan", lambda self: None)
|
||||||
|
|
||||||
target_dir = loras_dir / "target"
|
target_dir = loras_dir / "target"
|
||||||
target_dir.mkdir()
|
target_dir.mkdir()
|
||||||
@@ -85,6 +87,7 @@ def test_symlink_cache_reuses_previous_scan(monkeypatch: pytest.MonkeyPatch, tmp
|
|||||||
|
|
||||||
def test_symlink_cache_survives_noise_mtime(monkeypatch: pytest.MonkeyPatch, tmp_path):
|
def test_symlink_cache_survives_noise_mtime(monkeypatch: pytest.MonkeyPatch, tmp_path):
|
||||||
loras_dir, settings_dir = _setup_paths(monkeypatch, tmp_path)
|
loras_dir, settings_dir = _setup_paths(monkeypatch, tmp_path)
|
||||||
|
monkeypatch.setattr(config_module.Config, "_schedule_symlink_rescan", lambda self: None)
|
||||||
|
|
||||||
target_dir = loras_dir / "target"
|
target_dir = loras_dir / "target"
|
||||||
target_dir.mkdir()
|
target_dir.mkdir()
|
||||||
@@ -109,3 +112,72 @@ def test_symlink_cache_survives_noise_mtime(monkeypatch: pytest.MonkeyPatch, tmp
|
|||||||
|
|
||||||
second_cfg = config_module.Config()
|
second_cfg = config_module.Config()
|
||||||
assert second_cfg.map_path_to_link(str(target_dir)) == _normalize(str(dir_link))
|
assert second_cfg.map_path_to_link(str(target_dir)) == _normalize(str(dir_link))
|
||||||
|
|
||||||
|
|
||||||
|
def test_background_rescan_refreshes_cache(monkeypatch: pytest.MonkeyPatch, tmp_path):
|
||||||
|
loras_dir, _ = _setup_paths(monkeypatch, tmp_path)
|
||||||
|
|
||||||
|
target_dir = loras_dir / "target"
|
||||||
|
target_dir.mkdir()
|
||||||
|
dir_link = loras_dir / "dir_link"
|
||||||
|
dir_link.symlink_to(target_dir, target_is_directory=True)
|
||||||
|
|
||||||
|
# Build initial cache pointing at the first target
|
||||||
|
first_cfg = config_module.Config()
|
||||||
|
old_real = _normalize(os.path.realpath(target_dir))
|
||||||
|
assert first_cfg.map_path_to_link(str(target_dir)) == _normalize(str(dir_link))
|
||||||
|
|
||||||
|
# Retarget the symlink to a new directory without touching the cache file
|
||||||
|
new_target = loras_dir / "target_v2"
|
||||||
|
new_target.mkdir()
|
||||||
|
dir_link.unlink()
|
||||||
|
dir_link.symlink_to(new_target, target_is_directory=True)
|
||||||
|
|
||||||
|
second_cfg = config_module.Config()
|
||||||
|
|
||||||
|
# Cache may still point at the old real path immediately after load
|
||||||
|
initial_mapping = second_cfg.map_path_to_link(str(new_target))
|
||||||
|
assert initial_mapping in {str(new_target), _normalize(str(dir_link))}
|
||||||
|
|
||||||
|
# Background rescan should refresh the mapping to the new target and update the cache file
|
||||||
|
second_cfg._wait_for_rescan(timeout=2.0)
|
||||||
|
new_real = _normalize(os.path.realpath(new_target))
|
||||||
|
assert second_cfg._path_mappings.get(new_real) == _normalize(str(dir_link))
|
||||||
|
assert second_cfg.map_path_to_link(str(new_target)) == _normalize(str(dir_link))
|
||||||
|
|
||||||
|
|
||||||
|
def test_symlink_roots_are_preserved(monkeypatch: pytest.MonkeyPatch, tmp_path):
|
||||||
|
settings_dir = tmp_path / "settings"
|
||||||
|
real_loras = tmp_path / "loras_real"
|
||||||
|
real_loras.mkdir()
|
||||||
|
loras_link = tmp_path / "loras_link"
|
||||||
|
loras_link.symlink_to(real_loras, target_is_directory=True)
|
||||||
|
|
||||||
|
checkpoints_dir = tmp_path / "checkpoints"
|
||||||
|
checkpoints_dir.mkdir()
|
||||||
|
embedding_dir = tmp_path / "embeddings"
|
||||||
|
embedding_dir.mkdir()
|
||||||
|
|
||||||
|
def fake_get_folder_paths(kind: str):
|
||||||
|
mapping = {
|
||||||
|
"loras": [str(loras_link)],
|
||||||
|
"checkpoints": [str(checkpoints_dir)],
|
||||||
|
"unet": [],
|
||||||
|
"embeddings": [str(embedding_dir)],
|
||||||
|
}
|
||||||
|
return mapping.get(kind, [])
|
||||||
|
|
||||||
|
monkeypatch.setattr(config_module.folder_paths, "get_folder_paths", fake_get_folder_paths)
|
||||||
|
monkeypatch.setattr(config_module, "standalone_mode", True)
|
||||||
|
monkeypatch.setattr(config_module, "get_settings_dir", lambda create=True: str(settings_dir))
|
||||||
|
monkeypatch.setattr(config_module.Config, "_schedule_symlink_rescan", lambda self: None)
|
||||||
|
|
||||||
|
cfg = config_module.Config()
|
||||||
|
|
||||||
|
normalized_real = _normalize(os.path.realpath(real_loras))
|
||||||
|
normalized_link = _normalize(str(loras_link))
|
||||||
|
assert cfg._path_mappings[normalized_real] == normalized_link
|
||||||
|
|
||||||
|
cache_path = settings_dir / "cache" / "symlink_map.json"
|
||||||
|
payload = json.loads(cache_path.read_text(encoding="utf-8"))
|
||||||
|
assert payload["path_mappings"][normalized_real] == normalized_link
|
||||||
|
|||||||
114
tests/frontend/api/recipeApi.bulk.test.js
Normal file
114
tests/frontend/api/recipeApi.bulk.test.js
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
import { describe, it, beforeEach, afterEach, expect, vi } from 'vitest';
|
||||||
|
|
||||||
|
const showToastMock = vi.hoisted(() => vi.fn());
|
||||||
|
const loadingManagerMock = vi.hoisted(() => ({
|
||||||
|
showSimpleLoading: vi.fn(),
|
||||||
|
hide: vi.fn(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock('../../../static/js/utils/uiHelpers.js', () => {
|
||||||
|
return {
|
||||||
|
showToast: showToastMock,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
vi.mock('../../../static/js/components/RecipeCard.js', () => ({
|
||||||
|
RecipeCard: vi.fn(() => ({ element: document.createElement('div') })),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock('../../../static/js/state/index.js', () => {
|
||||||
|
return {
|
||||||
|
state: {
|
||||||
|
loadingManager: loadingManagerMock,
|
||||||
|
},
|
||||||
|
getCurrentPageState: vi.fn(),
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
import { RecipeSidebarApiClient } from '../../../static/js/api/recipeApi.js';
|
||||||
|
|
||||||
|
describe('RecipeSidebarApiClient bulk operations', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
global.fetch = vi.fn();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
delete global.fetch;
|
||||||
|
});
|
||||||
|
|
||||||
|
it('sends recipe IDs when moving in bulk', async () => {
|
||||||
|
const api = new RecipeSidebarApiClient();
|
||||||
|
global.fetch.mockResolvedValue({
|
||||||
|
ok: true,
|
||||||
|
json: async () => ({
|
||||||
|
success: true,
|
||||||
|
results: [
|
||||||
|
{
|
||||||
|
recipe_id: 'abc',
|
||||||
|
original_file_path: '/recipes/abc.webp',
|
||||||
|
new_file_path: '/recipes/target/abc.webp',
|
||||||
|
success: true,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
success_count: 1,
|
||||||
|
failure_count: 0,
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
const results = await api.moveBulkModels(['/recipes/abc.webp'], '/target/folder');
|
||||||
|
|
||||||
|
expect(global.fetch).toHaveBeenCalledWith(
|
||||||
|
'/api/lm/recipes/move-bulk',
|
||||||
|
expect.objectContaining({
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
const { body } = global.fetch.mock.calls[0][1];
|
||||||
|
expect(JSON.parse(body)).toEqual({
|
||||||
|
recipe_ids: ['abc'],
|
||||||
|
target_path: '/target/folder',
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(showToastMock).toHaveBeenCalledWith(
|
||||||
|
'toast.api.bulkMoveSuccess',
|
||||||
|
{ successCount: 1, type: 'Recipe' },
|
||||||
|
'success'
|
||||||
|
);
|
||||||
|
expect(results[0].recipe_id).toBe('abc');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('posts recipe IDs for bulk delete', async () => {
|
||||||
|
const api = new RecipeSidebarApiClient();
|
||||||
|
global.fetch.mockResolvedValue({
|
||||||
|
ok: true,
|
||||||
|
json: async () => ({
|
||||||
|
success: true,
|
||||||
|
total_deleted: 2,
|
||||||
|
total_failed: 0,
|
||||||
|
failed: [],
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await api.bulkDeleteModels(['/recipes/a.webp', '/recipes/b.webp']);
|
||||||
|
|
||||||
|
expect(global.fetch).toHaveBeenCalledWith(
|
||||||
|
'/api/lm/recipes/bulk-delete',
|
||||||
|
expect.objectContaining({
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
const parsedBody = JSON.parse(global.fetch.mock.calls[0][1].body);
|
||||||
|
expect(parsedBody.recipe_ids).toEqual(['a', 'b']);
|
||||||
|
expect(result).toMatchObject({
|
||||||
|
success: true,
|
||||||
|
deleted_count: 2,
|
||||||
|
failed_count: 0,
|
||||||
|
});
|
||||||
|
expect(loadingManagerMock.hide).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
55
tests/frontend/components/triggerWords.escaping.test.js
Normal file
55
tests/frontend/components/triggerWords.escaping.test.js
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||||
|
|
||||||
|
const {
|
||||||
|
TRIGGER_WORDS_MODULE,
|
||||||
|
UTILS_MODULE,
|
||||||
|
I18N_HELPERS_MODULE,
|
||||||
|
} = vi.hoisted(() => ({
|
||||||
|
TRIGGER_WORDS_MODULE: new URL('../../../static/js/components/shared/TriggerWords.js', import.meta.url).pathname,
|
||||||
|
UTILS_MODULE: new URL('../../../static/js/components/shared/utils.js', import.meta.url).pathname,
|
||||||
|
I18N_HELPERS_MODULE: new URL('../../../static/js/utils/i18nHelpers.js', import.meta.url).pathname,
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock(I18N_HELPERS_MODULE, () => ({
|
||||||
|
translate: vi.fn((key, params, fallback) => fallback || key),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock('../../../static/js/utils/uiHelpers.js', () => ({
|
||||||
|
showToast: vi.fn(),
|
||||||
|
copyToClipboard: vi.fn(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock('../../../static/js/api/modelApiFactory.js', () => ({
|
||||||
|
getModelApiClient: vi.fn(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
describe("TriggerWords HTML Escaping", () => {
|
||||||
|
let renderTriggerWords;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
document.body.innerHTML = '';
|
||||||
|
const module = await import(TRIGGER_WORDS_MODULE);
|
||||||
|
renderTriggerWords = module.renderTriggerWords;
|
||||||
|
});
|
||||||
|
|
||||||
|
it("escapes HTML tags in trigger words rendering", () => {
|
||||||
|
const words = ["<style>guangying</style>", "fym <artist>"];
|
||||||
|
const html = renderTriggerWords(words, "test.safetensors");
|
||||||
|
|
||||||
|
expect(html).toContain("<style>guangying</style>");
|
||||||
|
expect(html).toContain("fym <artist>");
|
||||||
|
expect(html).not.toContain("<style>guangying</style>");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("uses dataset for copyTriggerWord to safely handle special characters", () => {
|
||||||
|
const words = ["word'with'quotes", "<tag>"];
|
||||||
|
const html = renderTriggerWords(words, "test.safetensors");
|
||||||
|
|
||||||
|
// Check for dataset-word attribute
|
||||||
|
expect(html).toContain('data-word="word'with'quotes"');
|
||||||
|
expect(html).toContain('data-word="<tag>"');
|
||||||
|
|
||||||
|
// Check for the onclick handler
|
||||||
|
expect(html).toContain('onclick="copyTriggerWord(this.dataset.word)"');
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -234,7 +234,6 @@ describe('AppCore initialization flow', () => {
|
|||||||
await vi.runAllTimersAsync();
|
await vi.runAllTimersAsync();
|
||||||
|
|
||||||
expect(onboardingManager.start).toHaveBeenCalledTimes(1);
|
expect(onboardingManager.start).toHaveBeenCalledTimes(1);
|
||||||
expect(bannerService.isBannerVisible).toHaveBeenCalledWith('version-mismatch');
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('does not reinitialize once initialized', async () => {
|
it('does not reinitialize once initialized', async () => {
|
||||||
@@ -253,22 +252,13 @@ describe('AppCore initialization flow', () => {
|
|||||||
expect(onboardingManager.start).not.toHaveBeenCalled();
|
expect(onboardingManager.start).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('skips bulk setup when viewing recipes', async () => {
|
it('initializes bulk setup when viewing recipes', async () => {
|
||||||
state.currentPageType = 'recipes';
|
state.currentPageType = 'recipes';
|
||||||
|
|
||||||
await appCore.initialize();
|
await appCore.initialize();
|
||||||
|
|
||||||
expect(bulkManager.initialize).not.toHaveBeenCalled();
|
expect(bulkManager.initialize).toHaveBeenCalledTimes(1);
|
||||||
expect(BulkContextMenu).not.toHaveBeenCalled();
|
expect(BulkContextMenu).toHaveBeenCalledTimes(1);
|
||||||
expect(bulkManager.setBulkContextMenu).not.toHaveBeenCalled();
|
expect(bulkManager.setBulkContextMenu).toHaveBeenCalledTimes(1);
|
||||||
});
|
|
||||||
|
|
||||||
it('suppresses onboarding when version mismatch banner is visible', async () => {
|
|
||||||
bannerService.isBannerVisible.mockReturnValueOnce(true);
|
|
||||||
|
|
||||||
await appCore.initialize();
|
|
||||||
await vi.runAllTimersAsync();
|
|
||||||
|
|
||||||
expect(onboardingManager.start).not.toHaveBeenCalled();
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -9,6 +9,8 @@ const removeSessionItemMock = vi.fn();
|
|||||||
const RecipeContextMenuMock = vi.fn();
|
const RecipeContextMenuMock = vi.fn();
|
||||||
const refreshVirtualScrollMock = vi.fn();
|
const refreshVirtualScrollMock = vi.fn();
|
||||||
const refreshRecipesMock = vi.fn();
|
const refreshRecipesMock = vi.fn();
|
||||||
|
const fetchUnifiedFolderTreeMock = vi.fn();
|
||||||
|
const fetchModelFoldersMock = vi.fn();
|
||||||
|
|
||||||
let importManagerInstance;
|
let importManagerInstance;
|
||||||
let recipeModalInstance;
|
let recipeModalInstance;
|
||||||
@@ -35,6 +37,15 @@ vi.mock('../../../static/js/components/RecipeModal.js', () => ({
|
|||||||
|
|
||||||
vi.mock('../../../static/js/state/index.js', () => ({
|
vi.mock('../../../static/js/state/index.js', () => ({
|
||||||
getCurrentPageState: getCurrentPageStateMock,
|
getCurrentPageState: getCurrentPageStateMock,
|
||||||
|
state: {
|
||||||
|
currentPageType: 'recipes',
|
||||||
|
global: { settings: {} },
|
||||||
|
virtualScroller: {
|
||||||
|
removeItemByFilePath: vi.fn(),
|
||||||
|
updateSingleItem: vi.fn(),
|
||||||
|
refreshWithData: vi.fn(),
|
||||||
|
},
|
||||||
|
},
|
||||||
}));
|
}));
|
||||||
|
|
||||||
vi.mock('../../../static/js/utils/storageHelpers.js', () => ({
|
vi.mock('../../../static/js/utils/storageHelpers.js', () => ({
|
||||||
@@ -56,6 +67,14 @@ vi.mock('../../../static/js/utils/infiniteScroll.js', () => ({
|
|||||||
|
|
||||||
vi.mock('../../../static/js/api/recipeApi.js', () => ({
|
vi.mock('../../../static/js/api/recipeApi.js', () => ({
|
||||||
refreshRecipes: refreshRecipesMock,
|
refreshRecipes: refreshRecipesMock,
|
||||||
|
RecipeSidebarApiClient: vi.fn(() => ({
|
||||||
|
apiConfig: { config: { displayName: 'Recipes', supportsMove: true } },
|
||||||
|
fetchUnifiedFolderTree: fetchUnifiedFolderTreeMock.mockResolvedValue({ success: true, tree: {} }),
|
||||||
|
fetchModelFolders: fetchModelFoldersMock.mockResolvedValue({ success: true, folders: [] }),
|
||||||
|
fetchModelRoots: vi.fn().mockResolvedValue({ roots: ['/recipes'] }),
|
||||||
|
moveBulkModels: vi.fn(),
|
||||||
|
moveSingleModel: vi.fn(),
|
||||||
|
})),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
describe('RecipeManager', () => {
|
describe('RecipeManager', () => {
|
||||||
@@ -81,7 +100,7 @@ describe('RecipeManager', () => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
pageState = {
|
pageState = {
|
||||||
sortBy: 'date',
|
sortBy: 'date:desc',
|
||||||
searchOptions: undefined,
|
searchOptions: undefined,
|
||||||
customFilter: undefined,
|
customFilter: undefined,
|
||||||
duplicatesMode: false,
|
duplicatesMode: false,
|
||||||
@@ -91,7 +110,7 @@ describe('RecipeManager', () => {
|
|||||||
initializeAppMock.mockResolvedValue(undefined);
|
initializeAppMock.mockResolvedValue(undefined);
|
||||||
initializePageFeaturesMock.mockResolvedValue(undefined);
|
initializePageFeaturesMock.mockResolvedValue(undefined);
|
||||||
refreshVirtualScrollMock.mockReset();
|
refreshVirtualScrollMock.mockReset();
|
||||||
refreshVirtualScrollMock.mockImplementation(() => {});
|
refreshVirtualScrollMock.mockImplementation(() => { });
|
||||||
refreshRecipesMock.mockResolvedValue('refreshed');
|
refreshRecipesMock.mockResolvedValue('refreshed');
|
||||||
|
|
||||||
getSessionItemMock.mockImplementation((key) => {
|
getSessionItemMock.mockImplementation((key) => {
|
||||||
@@ -102,7 +121,7 @@ describe('RecipeManager', () => {
|
|||||||
};
|
};
|
||||||
return map[key] ?? null;
|
return map[key] ?? null;
|
||||||
});
|
});
|
||||||
removeSessionItemMock.mockImplementation(() => {});
|
removeSessionItemMock.mockImplementation(() => { });
|
||||||
|
|
||||||
renderRecipesPage();
|
renderRecipesPage();
|
||||||
|
|
||||||
@@ -118,8 +137,8 @@ describe('RecipeManager', () => {
|
|||||||
const sortSelectElement = document.createElement('select');
|
const sortSelectElement = document.createElement('select');
|
||||||
sortSelectElement.id = 'sortSelect';
|
sortSelectElement.id = 'sortSelect';
|
||||||
sortSelectElement.innerHTML = `
|
sortSelectElement.innerHTML = `
|
||||||
<option value="date">Date</option>
|
<option value="date:desc">Newest</option>
|
||||||
<option value="name">Name</option>
|
<option value="name:asc">Name A-Z</option>
|
||||||
`;
|
`;
|
||||||
document.body.appendChild(sortSelectElement);
|
document.body.appendChild(sortSelectElement);
|
||||||
|
|
||||||
@@ -139,6 +158,8 @@ describe('RecipeManager', () => {
|
|||||||
tags: true,
|
tags: true,
|
||||||
loraName: true,
|
loraName: true,
|
||||||
loraModel: true,
|
loraModel: true,
|
||||||
|
prompt: true,
|
||||||
|
recursive: true,
|
||||||
});
|
});
|
||||||
|
|
||||||
expect(pageState.customFilter).toEqual({
|
expect(pageState.customFilter).toEqual({
|
||||||
@@ -162,10 +183,10 @@ describe('RecipeManager', () => {
|
|||||||
expect(refreshVirtualScrollMock).toHaveBeenCalledTimes(1);
|
expect(refreshVirtualScrollMock).toHaveBeenCalledTimes(1);
|
||||||
|
|
||||||
const sortSelect = document.getElementById('sortSelect');
|
const sortSelect = document.getElementById('sortSelect');
|
||||||
sortSelect.value = 'name';
|
sortSelect.value = 'name:asc';
|
||||||
sortSelect.dispatchEvent(new Event('change', { bubbles: true }));
|
sortSelect.dispatchEvent(new Event('change', { bubbles: true }));
|
||||||
|
|
||||||
expect(pageState.sortBy).toBe('name');
|
expect(pageState.sortBy).toBe('name:asc');
|
||||||
expect(refreshVirtualScrollMock).toHaveBeenCalledTimes(2);
|
expect(refreshVirtualScrollMock).toHaveBeenCalledTimes(2);
|
||||||
expect(initializePageFeaturesMock).toHaveBeenCalledTimes(1);
|
expect(initializePageFeaturesMock).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
|
|||||||
98
tests/metadata_collector/test_pipe_tracer.py
Normal file
98
tests/metadata_collector/test_pipe_tracer.py
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
|
||||||
|
import pytest
|
||||||
|
from types import SimpleNamespace
|
||||||
|
from py.metadata_collector.metadata_processor import MetadataProcessor
|
||||||
|
from py.metadata_collector.constants import MODELS, SAMPLING, IS_SAMPLER
|
||||||
|
|
||||||
|
class TestPipeTracer:
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def pipe_workflow_metadata(self):
|
||||||
|
"""
|
||||||
|
Creates a mock metadata structure matching the one provided in refs/tmp.
|
||||||
|
Structure:
|
||||||
|
Load Checkpoint(28) -> Lora Loader(52) -> ToBasicPipe(69) -> FromBasicPipe(71) -> KSampler(32)
|
||||||
|
"""
|
||||||
|
|
||||||
|
original_prompt = {
|
||||||
|
'28': {
|
||||||
|
'inputs': {'ckpt_name': 'Illustrious\\bananaSplitzXL_vee5PointOh.safetensors'},
|
||||||
|
'class_type': 'CheckpointLoaderSimple'
|
||||||
|
},
|
||||||
|
'52': {
|
||||||
|
'inputs': {
|
||||||
|
'model': ['28', 0],
|
||||||
|
'clip': ['28', 1]
|
||||||
|
},
|
||||||
|
'class_type': 'Lora Loader (LoraManager)'
|
||||||
|
},
|
||||||
|
'69': {
|
||||||
|
'inputs': {
|
||||||
|
'model': ['52', 0],
|
||||||
|
'clip': ['52', 1],
|
||||||
|
'vae': ['28', 2],
|
||||||
|
'positive': ['75', 0],
|
||||||
|
'negative': ['30', 0]
|
||||||
|
},
|
||||||
|
'class_type': 'ToBasicPipe'
|
||||||
|
},
|
||||||
|
'71': {
|
||||||
|
'inputs': {'basic_pipe': ['69', 0]},
|
||||||
|
'class_type': 'FromBasicPipe'
|
||||||
|
},
|
||||||
|
'32': {
|
||||||
|
'inputs': {
|
||||||
|
'seed': 131755205602911,
|
||||||
|
'steps': 5,
|
||||||
|
'cfg': 8.0,
|
||||||
|
'sampler_name': 'euler_ancestral',
|
||||||
|
'scheduler': 'karras',
|
||||||
|
'denoise': 1.0,
|
||||||
|
'model': ['71', 0],
|
||||||
|
'positive': ['71', 3],
|
||||||
|
'negative': ['71', 4],
|
||||||
|
'latent_image': ['76', 0]
|
||||||
|
},
|
||||||
|
'class_type': 'KSampler'
|
||||||
|
},
|
||||||
|
'75': {'inputs': {'text': 'positive', 'clip': ['52', 1]}, 'class_type': 'CLIPTextEncode'},
|
||||||
|
'30': {'inputs': {'text': 'negative', 'clip': ['52', 1]}, 'class_type': 'CLIPTextEncode'},
|
||||||
|
'76': {'inputs': {'width': 832, 'height': 1216, 'batch_size': 1}, 'class_type': 'EmptyLatentImage'}
|
||||||
|
}
|
||||||
|
|
||||||
|
metadata = {
|
||||||
|
"current_prompt": SimpleNamespace(original_prompt=original_prompt),
|
||||||
|
MODELS: {
|
||||||
|
"28": {
|
||||||
|
"type": "checkpoint",
|
||||||
|
"name": "bananaSplitzXL_vee5PointOh.safetensors"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
SAMPLING: {
|
||||||
|
"32": {
|
||||||
|
IS_SAMPLER: True,
|
||||||
|
"parameters": {
|
||||||
|
"sampler_name": "euler_ancestral",
|
||||||
|
"scheduler": "karras"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return metadata
|
||||||
|
|
||||||
|
def test_trace_model_path_through_pipe(self, pipe_workflow_metadata):
|
||||||
|
"""Verify trace_model_path can follow: KSampler -> FromBasicPipe -> ToBasicPipe -> Lora -> Checkpoint."""
|
||||||
|
prompt = pipe_workflow_metadata["current_prompt"]
|
||||||
|
|
||||||
|
# Start trace from KSampler (32)
|
||||||
|
ckpt_id = MetadataProcessor.trace_model_path(pipe_workflow_metadata, prompt, "32")
|
||||||
|
|
||||||
|
assert ckpt_id == "28"
|
||||||
|
|
||||||
|
def test_find_primary_checkpoint_with_pipe(self, pipe_workflow_metadata):
|
||||||
|
"""Verify find_primary_checkpoint returns the correct name even with pipe nodes."""
|
||||||
|
# Providing sampler_id to test the optimization as well
|
||||||
|
name = MetadataProcessor.find_primary_checkpoint(pipe_workflow_metadata, primary_sampler_id="32")
|
||||||
|
|
||||||
|
assert name == "bananaSplitzXL_vee5PointOh.safetensors"
|
||||||
172
tests/metadata_collector/test_tracer.py
Normal file
172
tests/metadata_collector/test_tracer.py
Normal file
@@ -0,0 +1,172 @@
|
|||||||
|
|
||||||
|
import pytest
|
||||||
|
from types import SimpleNamespace
|
||||||
|
from py.metadata_collector.metadata_processor import MetadataProcessor
|
||||||
|
from py.metadata_collector.constants import MODELS, SAMPLING, IS_SAMPLER
|
||||||
|
|
||||||
|
class TestMetadataTracer:
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_workflow_metadata(self):
|
||||||
|
"""
|
||||||
|
Creates a mock metadata structure with a complex workflow graph.
|
||||||
|
Structure:
|
||||||
|
Sampler(246) -> Guider(241) -> LoraLoader(264) -> CheckpointLoader(238)
|
||||||
|
|
||||||
|
Also includes a "Decoy" checkpoint (ID 999) that is NOT connected,
|
||||||
|
to verify we found the *connected* one, not just *any* one.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# 1. Define the Graph (Original Prompt)
|
||||||
|
# Using IDs as strings to match typical ComfyUI behavior in metadata
|
||||||
|
original_prompt = {
|
||||||
|
"246": {
|
||||||
|
"class_type": "SamplerCustomAdvanced",
|
||||||
|
"inputs": {
|
||||||
|
"guider": ["241", 0],
|
||||||
|
"noise": ["255", 0],
|
||||||
|
"sampler": ["247", 0],
|
||||||
|
"sigmas": ["248", 0],
|
||||||
|
"latent_image": ["153", 0]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"241": {
|
||||||
|
"class_type": "CFGGuider",
|
||||||
|
"inputs": {
|
||||||
|
"model": ["264", 0],
|
||||||
|
"positive": ["239", 0],
|
||||||
|
"negative": ["240", 0]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"264": {
|
||||||
|
"class_type": "LoraLoader", # Simplified name
|
||||||
|
"inputs": {
|
||||||
|
"model": ["238", 0],
|
||||||
|
"lora_name": "some_style_lora.safetensors"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"238": {
|
||||||
|
"class_type": "CheckpointLoaderSimple",
|
||||||
|
"inputs": {
|
||||||
|
"ckpt_name": "Correct_Model.safetensors"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
# unconnected / decoy nodes
|
||||||
|
"999": {
|
||||||
|
"class_type": "CheckpointLoaderSimple",
|
||||||
|
"inputs": {
|
||||||
|
"ckpt_name": "Decoy_Model.safetensors"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"154": { # Downstream VAE Decode
|
||||||
|
"class_type": "VAEDecode",
|
||||||
|
"inputs": {
|
||||||
|
"samples": ["246", 0]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# 2. Define the Metadata (Collected execution data)
|
||||||
|
metadata = {
|
||||||
|
"current_prompt": SimpleNamespace(original_prompt=original_prompt),
|
||||||
|
"execution_order": ["238", "264", "241", "246", "154", "999"], # 999 execs last or separately
|
||||||
|
|
||||||
|
# Models Registry
|
||||||
|
MODELS: {
|
||||||
|
"238": {
|
||||||
|
"type": "checkpoint",
|
||||||
|
"name": "Correct_Model.safetensors"
|
||||||
|
},
|
||||||
|
"999": {
|
||||||
|
"type": "checkpoint",
|
||||||
|
"name": "Decoy_Model.safetensors"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
# Sampling Registry
|
||||||
|
SAMPLING: {
|
||||||
|
"246": {
|
||||||
|
IS_SAMPLER: True,
|
||||||
|
"parameters": {
|
||||||
|
"sampler_name": "euler",
|
||||||
|
"scheduler": "normal"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"images": {
|
||||||
|
"first_decode": {
|
||||||
|
"node_id": "154"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return metadata
|
||||||
|
|
||||||
|
def test_find_primary_sampler_identifies_correct_node(self, mock_workflow_metadata):
|
||||||
|
"""Verify find_primary_sampler correctly identifies the sampler connected to the downstream decode."""
|
||||||
|
sampler_id, sampler_info = MetadataProcessor.find_primary_sampler(mock_workflow_metadata, downstream_id="154")
|
||||||
|
|
||||||
|
assert sampler_id == "246"
|
||||||
|
assert sampler_info is not None
|
||||||
|
assert sampler_info["parameters"]["sampler_name"] == "euler"
|
||||||
|
|
||||||
|
def test_trace_model_path_follows_topology(self, mock_workflow_metadata):
|
||||||
|
"""Verify trace_model_path follows: Sampler -> Guider -> Lora -> Checkpoint."""
|
||||||
|
prompt = mock_workflow_metadata["current_prompt"]
|
||||||
|
|
||||||
|
# Start trace from Sampler (246)
|
||||||
|
# Should find Checkpoint (238)
|
||||||
|
ckpt_id = MetadataProcessor.trace_model_path(mock_workflow_metadata, prompt, "246")
|
||||||
|
|
||||||
|
assert ckpt_id == "238" # Should be the ID of the connected checkpoint
|
||||||
|
|
||||||
|
def test_find_primary_checkpoint_prioritizes_connected_model(self, mock_workflow_metadata):
|
||||||
|
"""Verify find_primary_checkpoint returns the NAME of the topologically connected checkpoint, honoring the graph."""
|
||||||
|
name = MetadataProcessor.find_primary_checkpoint(mock_workflow_metadata, downstream_id="154")
|
||||||
|
|
||||||
|
assert name == "Correct_Model.safetensors"
|
||||||
|
assert name != "Decoy_Model.safetensors"
|
||||||
|
|
||||||
|
def test_trace_model_path_simple_direct_connection(self):
|
||||||
|
"""Verify it works for a simple Sampler -> Checkpoint connection."""
|
||||||
|
original_prompt = {
|
||||||
|
"100": { # Sampler
|
||||||
|
"class_type": "KSampler",
|
||||||
|
"inputs": {
|
||||||
|
"model": ["101", 0]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"101": { # Checkpoint
|
||||||
|
"class_type": "CheckpointLoaderSimple",
|
||||||
|
"inputs": {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
metadata = {
|
||||||
|
"current_prompt": SimpleNamespace(original_prompt=original_prompt),
|
||||||
|
MODELS: {
|
||||||
|
"101": {"type": "checkpoint", "name": "Simple_Model.safetensors"}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ckpt_id = MetadataProcessor.trace_model_path(metadata, metadata["current_prompt"], "100")
|
||||||
|
assert ckpt_id == "101"
|
||||||
|
|
||||||
|
def test_trace_stops_at_max_depth(self):
|
||||||
|
"""Verify logic halts if graph is infinitely cyclic or too deep."""
|
||||||
|
# Create a cycle: Node 1 -> Node 2 -> Node 1
|
||||||
|
original_prompt = {
|
||||||
|
"1": {"inputs": {"model": ["2", 0]}},
|
||||||
|
"2": {"inputs": {"model": ["1", 0]}}
|
||||||
|
}
|
||||||
|
|
||||||
|
metadata = {
|
||||||
|
"current_prompt": SimpleNamespace(original_prompt=original_prompt),
|
||||||
|
MODELS: {} # No checkpoints registered
|
||||||
|
}
|
||||||
|
|
||||||
|
# Should return None, not hang forever
|
||||||
|
ckpt_id = MetadataProcessor.trace_model_path(metadata, metadata["current_prompt"], "1")
|
||||||
|
assert ckpt_id is None
|
||||||
|
|
||||||
@@ -1,4 +1,5 @@
|
|||||||
import logging
|
import logging
|
||||||
|
import copy
|
||||||
|
|
||||||
from py.nodes.utils import nunchaku_load_lora
|
from py.nodes.utils import nunchaku_load_lora
|
||||||
|
|
||||||
@@ -28,6 +29,9 @@ class _DummyModel:
|
|||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.model = _DummyModelWrapper()
|
self.model = _DummyModelWrapper()
|
||||||
|
|
||||||
|
def clone(self):
|
||||||
|
return copy.deepcopy(self)
|
||||||
|
|
||||||
|
|
||||||
def test_nunchaku_load_lora_skips_missing_lora(monkeypatch, caplog):
|
def test_nunchaku_load_lora_skips_missing_lora(monkeypatch, caplog):
|
||||||
import folder_paths
|
import folder_paths
|
||||||
|
|||||||
@@ -103,8 +103,7 @@ def test_register_startup_hooks_appends_once():
|
|||||||
]
|
]
|
||||||
|
|
||||||
assert routes.attach_dependencies in startup_bound_to_routes
|
assert routes.attach_dependencies in startup_bound_to_routes
|
||||||
assert routes.prewarm_cache in startup_bound_to_routes
|
assert len(startup_bound_to_routes) == 1
|
||||||
assert len(startup_bound_to_routes) == 2
|
|
||||||
|
|
||||||
|
|
||||||
def test_to_route_mapping_uses_handler_set():
|
def test_to_route_mapping_uses_handler_set():
|
||||||
@@ -212,4 +211,4 @@ def test_recipe_routes_setup_routes_uses_registrar(monkeypatch: pytest.MonkeyPat
|
|||||||
if isinstance(getattr(cb, "__self__", None), recipe_routes.RecipeRoutes)
|
if isinstance(getattr(cb, "__self__", None), recipe_routes.RecipeRoutes)
|
||||||
}
|
}
|
||||||
assert {type(cb.__self__) for cb in recipe_callbacks} == {recipe_routes.RecipeRoutes}
|
assert {type(cb.__self__) for cb in recipe_callbacks} == {recipe_routes.RecipeRoutes}
|
||||||
assert {cb.__name__ for cb in recipe_callbacks} == {"attach_dependencies", "prewarm_cache"}
|
assert {cb.__name__ for cb in recipe_callbacks} == {"attach_dependencies"}
|
||||||
|
|||||||
@@ -29,6 +29,7 @@ class RecipeRouteHarness:
|
|||||||
persistence: "StubPersistenceService"
|
persistence: "StubPersistenceService"
|
||||||
sharing: "StubSharingService"
|
sharing: "StubSharingService"
|
||||||
downloader: "StubDownloader"
|
downloader: "StubDownloader"
|
||||||
|
civitai: "StubCivitaiClient"
|
||||||
tmp_dir: Path
|
tmp_dir: Path
|
||||||
|
|
||||||
|
|
||||||
@@ -68,6 +69,10 @@ class StubRecipeScanner:
|
|||||||
async def get_recipe_by_id(self, recipe_id: str) -> Optional[Dict[str, Any]]:
|
async def get_recipe_by_id(self, recipe_id: str) -> Optional[Dict[str, Any]]:
|
||||||
return self.recipes.get(recipe_id)
|
return self.recipes.get(recipe_id)
|
||||||
|
|
||||||
|
async def get_recipe_json_path(self, recipe_id: str) -> Optional[str]:
|
||||||
|
candidate = Path(self.recipes_dir) / f"{recipe_id}.recipe.json"
|
||||||
|
return str(candidate) if candidate.exists() else None
|
||||||
|
|
||||||
async def remove_recipe(self, recipe_id: str) -> None:
|
async def remove_recipe(self, recipe_id: str) -> None:
|
||||||
self.removed.append(recipe_id)
|
self.removed.append(recipe_id)
|
||||||
self.recipes.pop(recipe_id, None)
|
self.recipes.pop(recipe_id, None)
|
||||||
@@ -86,6 +91,7 @@ class StubAnalysisService:
|
|||||||
self.remote_calls: List[Optional[str]] = []
|
self.remote_calls: List[Optional[str]] = []
|
||||||
self.local_calls: List[Optional[str]] = []
|
self.local_calls: List[Optional[str]] = []
|
||||||
self.result = SimpleNamespace(payload={"loras": []}, status=200)
|
self.result = SimpleNamespace(payload={"loras": []}, status=200)
|
||||||
|
self._recipe_parser_factory = None
|
||||||
StubAnalysisService.instances.append(self)
|
StubAnalysisService.instances.append(self)
|
||||||
|
|
||||||
async def analyze_uploaded_image(self, *, image_bytes: bytes | None, recipe_scanner) -> SimpleNamespace: # noqa: D401 - mirrors real signature
|
async def analyze_uploaded_image(self, *, image_bytes: bytes | None, recipe_scanner) -> SimpleNamespace: # noqa: D401 - mirrors real signature
|
||||||
@@ -118,11 +124,12 @@ class StubPersistenceService:
|
|||||||
def __init__(self, **_: Any) -> None:
|
def __init__(self, **_: Any) -> None:
|
||||||
self.save_calls: List[Dict[str, Any]] = []
|
self.save_calls: List[Dict[str, Any]] = []
|
||||||
self.delete_calls: List[str] = []
|
self.delete_calls: List[str] = []
|
||||||
|
self.move_calls: List[Dict[str, str]] = []
|
||||||
self.save_result = SimpleNamespace(payload={"success": True, "recipe_id": "stub-id"}, status=200)
|
self.save_result = SimpleNamespace(payload={"success": True, "recipe_id": "stub-id"}, status=200)
|
||||||
self.delete_result = SimpleNamespace(payload={"success": True}, status=200)
|
self.delete_result = SimpleNamespace(payload={"success": True}, status=200)
|
||||||
StubPersistenceService.instances.append(self)
|
StubPersistenceService.instances.append(self)
|
||||||
|
|
||||||
async def save_recipe(self, *, recipe_scanner, image_bytes, image_base64, name, tags, metadata) -> SimpleNamespace: # noqa: D401
|
async def save_recipe(self, *, recipe_scanner, image_bytes, image_base64, name, tags, metadata, extension=None) -> SimpleNamespace: # noqa: D401
|
||||||
self.save_calls.append(
|
self.save_calls.append(
|
||||||
{
|
{
|
||||||
"recipe_scanner": recipe_scanner,
|
"recipe_scanner": recipe_scanner,
|
||||||
@@ -131,6 +138,7 @@ class StubPersistenceService:
|
|||||||
"name": name,
|
"name": name,
|
||||||
"tags": list(tags),
|
"tags": list(tags),
|
||||||
"metadata": metadata,
|
"metadata": metadata,
|
||||||
|
"extension": extension,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
return self.save_result
|
return self.save_result
|
||||||
@@ -140,6 +148,12 @@ class StubPersistenceService:
|
|||||||
await recipe_scanner.remove_recipe(recipe_id)
|
await recipe_scanner.remove_recipe(recipe_id)
|
||||||
return self.delete_result
|
return self.delete_result
|
||||||
|
|
||||||
|
async def move_recipe(self, *, recipe_scanner, recipe_id: str, target_path: str) -> SimpleNamespace: # noqa: D401
|
||||||
|
self.move_calls.append({"recipe_id": recipe_id, "target_path": target_path})
|
||||||
|
return SimpleNamespace(
|
||||||
|
payload={"success": True, "recipe_id": recipe_id, "new_file_path": target_path}, status=200
|
||||||
|
)
|
||||||
|
|
||||||
async def update_recipe(self, *, recipe_scanner, recipe_id: str, updates: Dict[str, Any]) -> SimpleNamespace: # pragma: no cover - unused by smoke tests
|
async def update_recipe(self, *, recipe_scanner, recipe_id: str, updates: Dict[str, Any]) -> SimpleNamespace: # pragma: no cover - unused by smoke tests
|
||||||
return SimpleNamespace(payload={"success": True, "recipe_id": recipe_id, "updates": updates}, status=200)
|
return SimpleNamespace(payload={"success": True, "recipe_id": recipe_id, "updates": updates}, status=200)
|
||||||
|
|
||||||
@@ -189,6 +203,16 @@ class StubDownloader:
|
|||||||
return True, destination
|
return True, destination
|
||||||
|
|
||||||
|
|
||||||
|
class StubCivitaiClient:
|
||||||
|
"""Stub for Civitai API client."""
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self.image_info: Dict[str, Any] = {}
|
||||||
|
|
||||||
|
async def get_image_info(self, image_id: str) -> Optional[Dict[str, Any]]:
|
||||||
|
return self.image_info.get(image_id)
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@asynccontextmanager
|
||||||
async def recipe_harness(monkeypatch, tmp_path: Path) -> AsyncIterator[RecipeRouteHarness]:
|
async def recipe_harness(monkeypatch, tmp_path: Path) -> AsyncIterator[RecipeRouteHarness]:
|
||||||
"""Context manager that yields a fully wired recipe route harness."""
|
"""Context manager that yields a fully wired recipe route harness."""
|
||||||
@@ -198,12 +222,13 @@ async def recipe_harness(monkeypatch, tmp_path: Path) -> AsyncIterator[RecipeRou
|
|||||||
StubSharingService.instances.clear()
|
StubSharingService.instances.clear()
|
||||||
|
|
||||||
scanner = StubRecipeScanner(tmp_path)
|
scanner = StubRecipeScanner(tmp_path)
|
||||||
|
civitai_client = StubCivitaiClient()
|
||||||
|
|
||||||
async def fake_get_recipe_scanner():
|
async def fake_get_recipe_scanner():
|
||||||
return scanner
|
return scanner
|
||||||
|
|
||||||
async def fake_get_civitai_client():
|
async def fake_get_civitai_client():
|
||||||
return object()
|
return civitai_client
|
||||||
|
|
||||||
downloader = StubDownloader()
|
downloader = StubDownloader()
|
||||||
|
|
||||||
@@ -232,6 +257,7 @@ async def recipe_harness(monkeypatch, tmp_path: Path) -> AsyncIterator[RecipeRou
|
|||||||
persistence=StubPersistenceService.instances[-1],
|
persistence=StubPersistenceService.instances[-1],
|
||||||
sharing=StubSharingService.instances[-1],
|
sharing=StubSharingService.instances[-1],
|
||||||
downloader=downloader,
|
downloader=downloader,
|
||||||
|
civitai=civitai_client,
|
||||||
tmp_dir=tmp_path,
|
tmp_dir=tmp_path,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -296,8 +322,23 @@ async def test_save_and_delete_recipe_round_trip(monkeypatch, tmp_path: Path) ->
|
|||||||
assert harness.persistence.delete_calls == ["saved-id"]
|
assert harness.persistence.delete_calls == ["saved-id"]
|
||||||
|
|
||||||
|
|
||||||
|
async def test_move_recipe_invokes_persistence(monkeypatch, tmp_path: Path) -> None:
|
||||||
|
async with recipe_harness(monkeypatch, tmp_path) as harness:
|
||||||
|
response = await harness.client.post(
|
||||||
|
"/api/lm/recipe/move",
|
||||||
|
json={"recipe_id": "move-me", "target_path": str(tmp_path / "recipes" / "subdir")},
|
||||||
|
)
|
||||||
|
|
||||||
|
payload = await response.json()
|
||||||
|
assert response.status == 200
|
||||||
|
assert payload["recipe_id"] == "move-me"
|
||||||
|
assert harness.persistence.move_calls == [
|
||||||
|
{"recipe_id": "move-me", "target_path": str(tmp_path / "recipes" / "subdir")}
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
async def test_import_remote_recipe(monkeypatch, tmp_path: Path) -> None:
|
async def test_import_remote_recipe(monkeypatch, tmp_path: Path) -> None:
|
||||||
provider_calls: list[int] = []
|
provider_calls: list[str | int] = []
|
||||||
|
|
||||||
class Provider:
|
class Provider:
|
||||||
async def get_model_version_info(self, model_version_id):
|
async def get_model_version_info(self, model_version_id):
|
||||||
@@ -307,7 +348,7 @@ async def test_import_remote_recipe(monkeypatch, tmp_path: Path) -> None:
|
|||||||
async def fake_get_default_metadata_provider():
|
async def fake_get_default_metadata_provider():
|
||||||
return Provider()
|
return Provider()
|
||||||
|
|
||||||
monkeypatch.setattr(recipe_handlers, "get_default_metadata_provider", fake_get_default_metadata_provider)
|
monkeypatch.setattr("py.recipes.enrichment.get_default_metadata_provider", fake_get_default_metadata_provider)
|
||||||
|
|
||||||
async with recipe_harness(monkeypatch, tmp_path) as harness:
|
async with recipe_harness(monkeypatch, tmp_path) as harness:
|
||||||
resources = [
|
resources = [
|
||||||
@@ -349,16 +390,15 @@ async def test_import_remote_recipe(monkeypatch, tmp_path: Path) -> None:
|
|||||||
assert call["tags"] == ["foo", "bar"]
|
assert call["tags"] == ["foo", "bar"]
|
||||||
metadata = call["metadata"]
|
metadata = call["metadata"]
|
||||||
assert metadata["base_model"] == "Flux Provider"
|
assert metadata["base_model"] == "Flux Provider"
|
||||||
assert provider_calls == [33]
|
assert provider_calls == ["33"]
|
||||||
assert metadata["checkpoint"]["modelVersionId"] == 33
|
assert metadata["checkpoint"]["modelVersionId"] == 33
|
||||||
assert metadata["loras"][0]["weight"] == 0.25
|
assert metadata["loras"][0]["weight"] == 0.25
|
||||||
assert metadata["gen_params"]["prompt"] == "hello world"
|
assert metadata["gen_params"]["prompt"] == "hello world"
|
||||||
assert metadata["gen_params"]["checkpoint"]["modelVersionId"] == 33
|
|
||||||
assert harness.downloader.urls == ["https://example.com/images/1"]
|
assert harness.downloader.urls == ["https://example.com/images/1"]
|
||||||
|
|
||||||
|
|
||||||
async def test_import_remote_recipe_falls_back_to_request_base_model(monkeypatch, tmp_path: Path) -> None:
|
async def test_import_remote_recipe_falls_back_to_request_base_model(monkeypatch, tmp_path: Path) -> None:
|
||||||
provider_calls: list[int] = []
|
provider_calls: list[str | int] = []
|
||||||
|
|
||||||
class Provider:
|
class Provider:
|
||||||
async def get_model_version_info(self, model_version_id):
|
async def get_model_version_info(self, model_version_id):
|
||||||
@@ -368,7 +408,7 @@ async def test_import_remote_recipe_falls_back_to_request_base_model(monkeypatch
|
|||||||
async def fake_get_default_metadata_provider():
|
async def fake_get_default_metadata_provider():
|
||||||
return Provider()
|
return Provider()
|
||||||
|
|
||||||
monkeypatch.setattr(recipe_handlers, "get_default_metadata_provider", fake_get_default_metadata_provider)
|
monkeypatch.setattr("py.recipes.enrichment.get_default_metadata_provider", fake_get_default_metadata_provider)
|
||||||
|
|
||||||
async with recipe_harness(monkeypatch, tmp_path) as harness:
|
async with recipe_harness(monkeypatch, tmp_path) as harness:
|
||||||
resources = [
|
resources = [
|
||||||
@@ -397,7 +437,42 @@ async def test_import_remote_recipe_falls_back_to_request_base_model(monkeypatch
|
|||||||
|
|
||||||
metadata = harness.persistence.save_calls[-1]["metadata"]
|
metadata = harness.persistence.save_calls[-1]["metadata"]
|
||||||
assert metadata["base_model"] == "Flux"
|
assert metadata["base_model"] == "Flux"
|
||||||
assert provider_calls == [77]
|
assert provider_calls == ["77"]
|
||||||
|
|
||||||
|
|
||||||
|
async def test_import_remote_video_recipe(monkeypatch, tmp_path: Path) -> None:
|
||||||
|
async def fake_get_default_metadata_provider():
|
||||||
|
return SimpleNamespace(get_model_version_info=lambda id: ({}, None))
|
||||||
|
|
||||||
|
monkeypatch.setattr("py.recipes.enrichment.get_default_metadata_provider", fake_get_default_metadata_provider)
|
||||||
|
|
||||||
|
async with recipe_harness(monkeypatch, tmp_path) as harness:
|
||||||
|
harness.civitai.image_info["12345"] = {
|
||||||
|
"id": 12345,
|
||||||
|
"url": "https://image.civitai.com/x/y/original=true/video.mp4",
|
||||||
|
"type": "video"
|
||||||
|
}
|
||||||
|
|
||||||
|
response = await harness.client.get(
|
||||||
|
"/api/lm/recipes/import-remote",
|
||||||
|
params={
|
||||||
|
"image_url": "https://civitai.com/images/12345",
|
||||||
|
"name": "Video Recipe",
|
||||||
|
"resources": json.dumps([]),
|
||||||
|
"base_model": "Flux",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
payload = await response.json()
|
||||||
|
assert response.status == 200
|
||||||
|
assert payload["success"] is True
|
||||||
|
|
||||||
|
# Verify downloader was called with rewritten URL
|
||||||
|
assert "transcode=true" in harness.downloader.urls[0]
|
||||||
|
|
||||||
|
# Verify persistence was called with correct extension
|
||||||
|
call = harness.persistence.save_calls[-1]
|
||||||
|
assert call["extension"] == ".mp4"
|
||||||
|
|
||||||
|
|
||||||
async def test_analyze_uploaded_image_error_path(monkeypatch, tmp_path: Path) -> None:
|
async def test_analyze_uploaded_image_error_path(monkeypatch, tmp_path: Path) -> None:
|
||||||
@@ -452,3 +527,69 @@ async def test_share_and_download_recipe(monkeypatch, tmp_path: Path) -> None:
|
|||||||
assert body == b"stub"
|
assert body == b"stub"
|
||||||
|
|
||||||
download_path.unlink(missing_ok=True)
|
download_path.unlink(missing_ok=True)
|
||||||
|
async def test_import_remote_recipe_merges_metadata(monkeypatch, tmp_path: Path) -> None:
|
||||||
|
# 1. Mock Metadata Provider
|
||||||
|
class Provider:
|
||||||
|
async def get_model_version_info(self, model_version_id):
|
||||||
|
return {"baseModel": "Flux Provider"}, None
|
||||||
|
|
||||||
|
async def fake_get_default_metadata_provider():
|
||||||
|
return Provider()
|
||||||
|
|
||||||
|
monkeypatch.setattr("py.recipes.enrichment.get_default_metadata_provider", fake_get_default_metadata_provider)
|
||||||
|
|
||||||
|
# 2. Mock ExifUtils to return some embedded metadata
|
||||||
|
class MockExifUtils:
|
||||||
|
@staticmethod
|
||||||
|
def extract_image_metadata(path):
|
||||||
|
return "Recipe metadata: " + json.dumps({
|
||||||
|
"gen_params": {"prompt": "from embedded", "seed": 123}
|
||||||
|
})
|
||||||
|
|
||||||
|
monkeypatch.setattr(recipe_handlers, "ExifUtils", MockExifUtils)
|
||||||
|
|
||||||
|
# 3. Mock Parser Factory for StubAnalysisService
|
||||||
|
class MockParser:
|
||||||
|
async def parse_metadata(self, raw, recipe_scanner=None):
|
||||||
|
return json.loads(raw[len("Recipe metadata: "):])
|
||||||
|
|
||||||
|
class MockFactory:
|
||||||
|
def create_parser(self, raw):
|
||||||
|
if raw.startswith("Recipe metadata: "):
|
||||||
|
return MockParser()
|
||||||
|
return None
|
||||||
|
|
||||||
|
# 4. Setup Harness and run test
|
||||||
|
async with recipe_harness(monkeypatch, tmp_path) as harness:
|
||||||
|
harness.analysis._recipe_parser_factory = MockFactory()
|
||||||
|
|
||||||
|
# Civitai meta via image_info
|
||||||
|
harness.civitai.image_info["1"] = {
|
||||||
|
"id": 1,
|
||||||
|
"url": "https://example.com/images/1.jpg",
|
||||||
|
"meta": {"prompt": "from civitai", "cfg": 7.0}
|
||||||
|
}
|
||||||
|
|
||||||
|
resources = []
|
||||||
|
response = await harness.client.get(
|
||||||
|
"/api/lm/recipes/import-remote",
|
||||||
|
params={
|
||||||
|
"image_url": "https://civitai.com/images/1",
|
||||||
|
"name": "Merged Recipe",
|
||||||
|
"resources": json.dumps(resources),
|
||||||
|
"gen_params": json.dumps({"prompt": "from request", "steps": 25}),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
payload = await response.json()
|
||||||
|
assert response.status == 200
|
||||||
|
|
||||||
|
call = harness.persistence.save_calls[-1]
|
||||||
|
metadata = call["metadata"]
|
||||||
|
gen_params = metadata["gen_params"]
|
||||||
|
|
||||||
|
# Priority: request (prompt=request, steps=25) > civitai (prompt=civitai, cfg=7.0) > embedded (prompt=embedded, seed=123)
|
||||||
|
assert gen_params["prompt"] == "from request"
|
||||||
|
assert gen_params["steps"] == 25
|
||||||
|
assert gen_params["cfg"] == 7.0
|
||||||
|
assert gen_params["seed"] == 123
|
||||||
|
|||||||
113
tests/services/test_comfy_metadata_parser.py
Normal file
113
tests/services/test_comfy_metadata_parser.py
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
import pytest
|
||||||
|
import json
|
||||||
|
from py.recipes.parsers.comfy import ComfyMetadataParser
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_parse_metadata_without_loras(monkeypatch):
|
||||||
|
checkpoint_info = {
|
||||||
|
"id": 2224012,
|
||||||
|
"modelId": 1908679,
|
||||||
|
"model": {"name": "SDXL Checkpoint", "type": "checkpoint"},
|
||||||
|
"name": "v1.0",
|
||||||
|
"images": [{"url": "https://image.civitai.com/checkpoints/original=true"}],
|
||||||
|
"baseModel": "sdxl",
|
||||||
|
"downloadUrl": "https://civitai.com/api/download/checkpoint",
|
||||||
|
}
|
||||||
|
|
||||||
|
async def fake_metadata_provider():
|
||||||
|
class Provider:
|
||||||
|
async def get_model_version_info(self, version_id):
|
||||||
|
assert version_id == "2224012"
|
||||||
|
return checkpoint_info, None
|
||||||
|
return Provider()
|
||||||
|
|
||||||
|
monkeypatch.setattr(
|
||||||
|
"py.recipes.parsers.comfy.get_default_metadata_provider",
|
||||||
|
fake_metadata_provider,
|
||||||
|
)
|
||||||
|
|
||||||
|
parser = ComfyMetadataParser()
|
||||||
|
|
||||||
|
# User provided metadata
|
||||||
|
metadata_json = {
|
||||||
|
"resource-stack": {
|
||||||
|
"class_type": "CheckpointLoaderSimple",
|
||||||
|
"inputs": {"ckpt_name": "urn:air:sdxl:checkpoint:civitai:1908679@2224012"}
|
||||||
|
},
|
||||||
|
"6": {
|
||||||
|
"class_type": "smZ CLIPTextEncode",
|
||||||
|
"inputs": {"text": "Positive prompt content"},
|
||||||
|
"_meta": {"title": "Positive"}
|
||||||
|
},
|
||||||
|
"7": {
|
||||||
|
"class_type": "smZ CLIPTextEncode",
|
||||||
|
"inputs": {"text": "Negative prompt content"},
|
||||||
|
"_meta": {"title": "Negative"}
|
||||||
|
},
|
||||||
|
"11": {
|
||||||
|
"class_type": "KSampler",
|
||||||
|
"inputs": {
|
||||||
|
"sampler_name": "euler_ancestral",
|
||||||
|
"scheduler": "normal",
|
||||||
|
"seed": 904124997,
|
||||||
|
"steps": 35,
|
||||||
|
"cfg": 6,
|
||||||
|
"denoise": 0.1,
|
||||||
|
"model": ["resource-stack", 0],
|
||||||
|
"positive": ["6", 0],
|
||||||
|
"negative": ["7", 0],
|
||||||
|
"latent_image": ["21", 0]
|
||||||
|
},
|
||||||
|
"_meta": {"title": "KSampler"}
|
||||||
|
},
|
||||||
|
"extraMetadata": json.dumps({
|
||||||
|
"prompt": "One woman, (solo:1.3), ...",
|
||||||
|
"negativePrompt": "lowres, worst quality, ...",
|
||||||
|
"steps": 35,
|
||||||
|
"cfgScale": 6,
|
||||||
|
"sampler": "euler_ancestral",
|
||||||
|
"seed": 904124997,
|
||||||
|
"width": 1024,
|
||||||
|
"height": 1024
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
result = await parser.parse_metadata(json.dumps(metadata_json))
|
||||||
|
|
||||||
|
assert "error" not in result
|
||||||
|
assert result["loras"] == []
|
||||||
|
assert result["checkpoint"] is not None
|
||||||
|
assert int(result["checkpoint"]["modelId"]) == 1908679
|
||||||
|
assert int(result["checkpoint"]["id"]) == 2224012
|
||||||
|
assert result["gen_params"]["prompt"] == "One woman, (solo:1.3), ..."
|
||||||
|
assert result["gen_params"]["steps"] == 35
|
||||||
|
assert result["gen_params"]["size"] == "1024x1024"
|
||||||
|
assert result["from_comfy_metadata"] is True
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_parse_metadata_without_extra_metadata(monkeypatch):
|
||||||
|
async def fake_metadata_provider():
|
||||||
|
class Provider:
|
||||||
|
async def get_model_version_info(self, version_id):
|
||||||
|
return {"model": {"name": "Test"}, "id": version_id}, None
|
||||||
|
return Provider()
|
||||||
|
|
||||||
|
monkeypatch.setattr(
|
||||||
|
"py.recipes.parsers.comfy.get_default_metadata_provider",
|
||||||
|
fake_metadata_provider,
|
||||||
|
)
|
||||||
|
|
||||||
|
parser = ComfyMetadataParser()
|
||||||
|
|
||||||
|
metadata_json = {
|
||||||
|
"node_1": {
|
||||||
|
"class_type": "CheckpointLoaderSimple",
|
||||||
|
"inputs": {"ckpt_name": "urn:air:sdxl:checkpoint:civitai:123@456"}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result = await parser.parse_metadata(json.dumps(metadata_json))
|
||||||
|
|
||||||
|
assert "error" not in result
|
||||||
|
assert result["loras"] == []
|
||||||
|
assert result["checkpoint"]["id"] == "456"
|
||||||
95
tests/services/test_gen_params_merger.py
Normal file
95
tests/services/test_gen_params_merger.py
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
import pytest
|
||||||
|
from py.recipes.merger import GenParamsMerger
|
||||||
|
|
||||||
|
def test_merge_priority():
|
||||||
|
request_params = {"prompt": "from request", "steps": 20}
|
||||||
|
civitai_meta = {"prompt": "from civitai", "cfg": 7.0}
|
||||||
|
embedded_metadata = {"gen_params": {"prompt": "from embedded", "seed": 123}}
|
||||||
|
|
||||||
|
merged = GenParamsMerger.merge(request_params, civitai_meta, embedded_metadata)
|
||||||
|
|
||||||
|
assert merged["prompt"] == "from request"
|
||||||
|
assert merged["steps"] == 20
|
||||||
|
assert merged["cfg"] == 7.0
|
||||||
|
assert merged["seed"] == 123
|
||||||
|
|
||||||
|
def test_merge_no_request_params():
|
||||||
|
civitai_meta = {"prompt": "from civitai", "cfg": 7.0}
|
||||||
|
embedded_metadata = {"gen_params": {"prompt": "from embedded", "seed": 123}}
|
||||||
|
|
||||||
|
merged = GenParamsMerger.merge(None, civitai_meta, embedded_metadata)
|
||||||
|
|
||||||
|
assert merged["prompt"] == "from civitai"
|
||||||
|
assert merged["cfg"] == 7.0
|
||||||
|
assert merged["seed"] == 123
|
||||||
|
|
||||||
|
def test_merge_only_embedded():
|
||||||
|
embedded_metadata = {"gen_params": {"prompt": "from embedded", "seed": 123}}
|
||||||
|
|
||||||
|
merged = GenParamsMerger.merge(None, None, embedded_metadata)
|
||||||
|
|
||||||
|
assert merged["prompt"] == "from embedded"
|
||||||
|
assert merged["seed"] == 123
|
||||||
|
|
||||||
|
def test_merge_raw_embedded():
|
||||||
|
# Test when embedded metadata is just the gen_params themselves
|
||||||
|
embedded_metadata = {"prompt": "from raw embedded", "seed": 456}
|
||||||
|
|
||||||
|
merged = GenParamsMerger.merge(None, None, embedded_metadata)
|
||||||
|
|
||||||
|
assert merged["prompt"] == "from raw embedded"
|
||||||
|
assert merged["seed"] == 456
|
||||||
|
|
||||||
|
def test_merge_none_values():
|
||||||
|
merged = GenParamsMerger.merge(None, None, None)
|
||||||
|
assert merged == {}
|
||||||
|
|
||||||
|
def test_merge_filters_blacklisted_keys():
|
||||||
|
request_params = {"prompt": "test", "id": "should-be-removed", "checkpoint": "should-not-be-here"}
|
||||||
|
civitai_meta = {"cfg": 7, "url": "remove-me"}
|
||||||
|
embedded_metadata = {"seed": 123, "hash": "remove-also"}
|
||||||
|
|
||||||
|
merged = GenParamsMerger.merge(request_params, civitai_meta, embedded_metadata)
|
||||||
|
|
||||||
|
assert "prompt" in merged
|
||||||
|
assert "cfg" in merged
|
||||||
|
assert "seed" in merged
|
||||||
|
assert "id" not in merged
|
||||||
|
assert "url" not in merged
|
||||||
|
assert "hash" not in merged
|
||||||
|
assert "checkpoint" not in merged
|
||||||
|
|
||||||
|
def test_merge_filters_meta_and_normalizes_keys():
|
||||||
|
civitai_meta = {
|
||||||
|
"prompt": "masterpiece",
|
||||||
|
"cfgScale": 5,
|
||||||
|
"clipSkip": 2,
|
||||||
|
"negativePrompt": "low quality",
|
||||||
|
"meta": {"irrelevant": "data"},
|
||||||
|
"Size": "1024x1024",
|
||||||
|
"draft": False,
|
||||||
|
"workflow": "txt2img",
|
||||||
|
"civitaiResources": [{"type": "checkpoint"}]
|
||||||
|
}
|
||||||
|
request_params = {
|
||||||
|
"cfg_scale": 5.0,
|
||||||
|
"clip_skip": "2",
|
||||||
|
"Steps": 30
|
||||||
|
}
|
||||||
|
|
||||||
|
merged = GenParamsMerger.merge(request_params, civitai_meta)
|
||||||
|
|
||||||
|
assert "meta" not in merged
|
||||||
|
assert "cfgScale" not in merged
|
||||||
|
assert "clipSkip" not in merged
|
||||||
|
assert "negativePrompt" not in merged
|
||||||
|
assert "Size" not in merged
|
||||||
|
assert "draft" not in merged
|
||||||
|
assert "workflow" not in merged
|
||||||
|
assert "civitaiResources" not in merged
|
||||||
|
|
||||||
|
assert merged["cfg_scale"] == 5.0 # From request_params
|
||||||
|
assert merged["clip_skip"] == "2" # From request_params
|
||||||
|
assert merged["negative_prompt"] == "low quality" # Normalized from civitai_meta
|
||||||
|
assert merged["size"] == "1024x1024" # Normalized from civitai_meta
|
||||||
|
assert merged["steps"] == 30 # Normalized from request_params
|
||||||
104
tests/services/test_no_tags_filter.py
Normal file
104
tests/services/test_no_tags_filter.py
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
import pytest
|
||||||
|
from py.services.model_query import ModelFilterSet, FilterCriteria
|
||||||
|
from py.services.recipe_scanner import RecipeScanner
|
||||||
|
from pathlib import Path
|
||||||
|
from py.config import config
|
||||||
|
import asyncio
|
||||||
|
from types import SimpleNamespace
|
||||||
|
|
||||||
|
class StubSettings:
|
||||||
|
def get(self, key, default=None):
|
||||||
|
return default
|
||||||
|
|
||||||
|
# --- Model Filtering Tests ---
|
||||||
|
|
||||||
|
def test_model_filter_set_no_tags_include():
|
||||||
|
filter_set = ModelFilterSet(StubSettings())
|
||||||
|
data = [
|
||||||
|
{"name": "m1", "tags": ["tag1"]},
|
||||||
|
{"name": "m2", "tags": []},
|
||||||
|
{"name": "m3", "tags": None},
|
||||||
|
{"name": "m4", "tags": ["tag2"]},
|
||||||
|
]
|
||||||
|
|
||||||
|
# Include __no_tags__
|
||||||
|
criteria = FilterCriteria(tags={"__no_tags__": "include"})
|
||||||
|
result = filter_set.apply(data, criteria)
|
||||||
|
assert len(result) == 2
|
||||||
|
assert {item["name"] for item in result} == {"m2", "m3"}
|
||||||
|
|
||||||
|
def test_model_filter_set_no_tags_exclude():
|
||||||
|
filter_set = ModelFilterSet(StubSettings())
|
||||||
|
data = [
|
||||||
|
{"name": "m1", "tags": ["tag1"]},
|
||||||
|
{"name": "m2", "tags": []},
|
||||||
|
{"name": "m3", "tags": None},
|
||||||
|
{"name": "m4", "tags": ["tag2"]},
|
||||||
|
]
|
||||||
|
|
||||||
|
# Exclude __no_tags__
|
||||||
|
criteria = FilterCriteria(tags={"__no_tags__": "exclude"})
|
||||||
|
result = filter_set.apply(data, criteria)
|
||||||
|
assert len(result) == 2
|
||||||
|
assert {item["name"] for item in result} == {"m1", "m4"}
|
||||||
|
|
||||||
|
def test_model_filter_set_no_tags_mixed():
|
||||||
|
filter_set = ModelFilterSet(StubSettings())
|
||||||
|
data = [
|
||||||
|
{"name": "m1", "tags": ["tag1"]},
|
||||||
|
{"name": "m2", "tags": []},
|
||||||
|
{"name": "m3", "tags": None},
|
||||||
|
{"name": "m4", "tags": ["tag1", "tag2"]},
|
||||||
|
]
|
||||||
|
|
||||||
|
# Include tag1 AND __no_tags__
|
||||||
|
criteria = FilterCriteria(tags={"tag1": "include", "__no_tags__": "include"})
|
||||||
|
result = filter_set.apply(data, criteria)
|
||||||
|
# m1 (tag1), m2 (no tags), m3 (no tags), m4 (tag1)
|
||||||
|
assert len(result) == 4
|
||||||
|
|
||||||
|
# --- Recipe Filtering Tests ---
|
||||||
|
|
||||||
|
class StubLoraScanner:
|
||||||
|
def __init__(self):
|
||||||
|
self._cache = SimpleNamespace(raw_data=[], version_index={})
|
||||||
|
async def get_cached_data(self):
|
||||||
|
return self._cache
|
||||||
|
async def refresh_cache(self, force=False):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def recipe_scanner(tmp_path, monkeypatch):
|
||||||
|
monkeypatch.setattr(config, "loras_roots", [str(tmp_path)])
|
||||||
|
stub = StubLoraScanner()
|
||||||
|
scanner = RecipeScanner(lora_scanner=stub)
|
||||||
|
return scanner
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_recipe_scanner_no_tags_filter(recipe_scanner):
|
||||||
|
scanner = recipe_scanner
|
||||||
|
|
||||||
|
# Mock some recipe data
|
||||||
|
recipes = [
|
||||||
|
{"id": "r1", "tags": ["tag1"], "title": "R1"},
|
||||||
|
{"id": "r2", "tags": [], "title": "R2"},
|
||||||
|
{"id": "r3", "tags": None, "title": "R3"},
|
||||||
|
]
|
||||||
|
|
||||||
|
# We need to inject these into the scanner's cache
|
||||||
|
# Since get_paginated_data calls get_cached_data() which we stubbed
|
||||||
|
scanner._cache = SimpleNamespace(
|
||||||
|
raw_data=recipes,
|
||||||
|
sorted_by_date=recipes,
|
||||||
|
sorted_by_name=recipes
|
||||||
|
)
|
||||||
|
|
||||||
|
# Test Include __no_tags__
|
||||||
|
result = await scanner.get_paginated_data(page=1, page_size=10, filters={"tags": {"__no_tags__": "include"}})
|
||||||
|
assert len(result["items"]) == 2
|
||||||
|
assert {item["id"] for item in result["items"]} == {"r2", "r3"}
|
||||||
|
|
||||||
|
# Test Exclude __no_tags__
|
||||||
|
result = await scanner.get_paginated_data(page=1, page_size=10, filters={"tags": {"__no_tags__": "exclude"}})
|
||||||
|
assert len(result["items"]) == 1
|
||||||
|
assert result["items"][0]["id"] == "r1"
|
||||||
282
tests/services/test_recipe_repair.py
Normal file
282
tests/services/test_recipe_repair.py
Normal file
@@ -0,0 +1,282 @@
|
|||||||
|
import pytest
|
||||||
|
import asyncio
|
||||||
|
from unittest.mock import AsyncMock, MagicMock
|
||||||
|
from py.services.recipe_scanner import RecipeScanner
|
||||||
|
from types import SimpleNamespace
|
||||||
|
|
||||||
|
# We define these here to help with spec= if needed
|
||||||
|
class MockCivitaiClient:
|
||||||
|
async def get_image_info(self, image_id):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class MockPersistenceService:
|
||||||
|
async def save_recipe(self, recipe):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_civitai_client():
|
||||||
|
client = MagicMock(spec=MockCivitaiClient)
|
||||||
|
client.get_image_info = AsyncMock()
|
||||||
|
return client
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_metadata_provider():
|
||||||
|
provider = MagicMock()
|
||||||
|
provider.get_model_version_info = AsyncMock(return_value=(None, None))
|
||||||
|
provider.get_model_by_hash = AsyncMock(return_value=(None, None))
|
||||||
|
return provider
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def recipe_scanner():
|
||||||
|
lora_scanner = MagicMock()
|
||||||
|
lora_scanner.get_cached_data = AsyncMock(return_value=SimpleNamespace(raw_data=[]))
|
||||||
|
|
||||||
|
scanner = RecipeScanner(lora_scanner=lora_scanner)
|
||||||
|
return scanner
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def setup_scanner(recipe_scanner, mock_civitai_client, mock_metadata_provider, monkeypatch):
|
||||||
|
monkeypatch.setattr(recipe_scanner, "_get_civitai_client", AsyncMock(return_value=mock_civitai_client))
|
||||||
|
|
||||||
|
# Wrap the real method with a mock so we can check calls but still execute it
|
||||||
|
real_save = recipe_scanner._save_recipe_persistently
|
||||||
|
mock_save = AsyncMock(side_effect=real_save)
|
||||||
|
monkeypatch.setattr(recipe_scanner, "_save_recipe_persistently", mock_save)
|
||||||
|
|
||||||
|
monkeypatch.setattr("py.recipes.enrichment.get_default_metadata_provider", AsyncMock(return_value=mock_metadata_provider))
|
||||||
|
|
||||||
|
# Mock get_recipe_json_path to avoid file system issues in tests
|
||||||
|
recipe_scanner.get_recipe_json_path = AsyncMock(return_value="/tmp/test_recipe.json")
|
||||||
|
# Mock open to avoid actual file writing
|
||||||
|
monkeypatch.setattr("builtins.open", MagicMock())
|
||||||
|
monkeypatch.setattr("json.dump", MagicMock())
|
||||||
|
monkeypatch.setattr("os.path.exists", MagicMock(return_value=False)) # avoid EXIF logic
|
||||||
|
|
||||||
|
return recipe_scanner, mock_civitai_client, mock_metadata_provider
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_repair_all_recipes_skip_up_to_date(setup_scanner):
|
||||||
|
recipe_scanner, _, _ = setup_scanner
|
||||||
|
|
||||||
|
recipe_scanner._cache = SimpleNamespace(raw_data=[
|
||||||
|
{"id": "r1", "repair_version": RecipeScanner.REPAIR_VERSION, "title": "Up to date"}
|
||||||
|
])
|
||||||
|
|
||||||
|
# Run
|
||||||
|
results = await recipe_scanner.repair_all_recipes()
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
assert results["repaired"] == 0
|
||||||
|
assert results["skipped"] == 1
|
||||||
|
recipe_scanner._save_recipe_persistently.assert_not_called()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_repair_all_recipes_with_enriched_checkpoint_id(setup_scanner):
|
||||||
|
recipe_scanner, mock_civitai_client, mock_metadata_provider = setup_scanner
|
||||||
|
|
||||||
|
recipe = {
|
||||||
|
"id": "r1",
|
||||||
|
"title": "Old Recipe",
|
||||||
|
"source_url": "https://civitai.com/images/12345",
|
||||||
|
"checkpoint": None,
|
||||||
|
"gen_params": {"prompt": ""}
|
||||||
|
}
|
||||||
|
recipe_scanner._cache = SimpleNamespace(raw_data=[recipe])
|
||||||
|
|
||||||
|
# Mock image info returning modelVersionId
|
||||||
|
mock_civitai_client.get_image_info.return_value = {
|
||||||
|
"modelVersionId": 5678,
|
||||||
|
"meta": {"prompt": "a beautiful forest", "Checkpoint": "basic_name.safetensors"}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Mock metadata provider returning full info
|
||||||
|
mock_metadata_provider.get_model_version_info.return_value = ({
|
||||||
|
"id": 5678,
|
||||||
|
"modelId": 1234,
|
||||||
|
"name": "v1.0",
|
||||||
|
"model": {"name": "Full Model Name"},
|
||||||
|
"baseModel": "SDXL 1.0",
|
||||||
|
"images": [{"url": "https://image.url/thumb.jpg"}],
|
||||||
|
"files": [{"type": "Model", "hashes": {"SHA256": "ABCDEF"}, "name": "full_filename.safetensors"}]
|
||||||
|
}, None)
|
||||||
|
|
||||||
|
# Run
|
||||||
|
results = await recipe_scanner.repair_all_recipes()
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
assert results["repaired"] == 1
|
||||||
|
mock_metadata_provider.get_model_version_info.assert_called_with("5678")
|
||||||
|
|
||||||
|
saved_recipe = recipe_scanner._save_recipe_persistently.call_args[0][0]
|
||||||
|
checkpoint = saved_recipe["checkpoint"]
|
||||||
|
assert checkpoint["modelName"] == "Full Model Name"
|
||||||
|
assert checkpoint["modelVersionName"] == "v1.0"
|
||||||
|
assert checkpoint["modelId"] == 1234
|
||||||
|
assert checkpoint["modelVersionId"] == 5678
|
||||||
|
assert checkpoint["type"] == "checkpoint"
|
||||||
|
assert "name" not in checkpoint
|
||||||
|
assert "version" not in checkpoint
|
||||||
|
assert "hash" not in checkpoint
|
||||||
|
assert "file_name" not in checkpoint
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_repair_all_recipes_with_enriched_checkpoint_hash(setup_scanner):
|
||||||
|
recipe_scanner, mock_civitai_client, mock_metadata_provider = setup_scanner
|
||||||
|
|
||||||
|
recipe = {
|
||||||
|
"id": "r1",
|
||||||
|
"title": "Embedded Only",
|
||||||
|
"checkpoint": None,
|
||||||
|
"gen_params": {
|
||||||
|
"prompt": "",
|
||||||
|
"Model hash": "hash123"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
recipe_scanner._cache = SimpleNamespace(raw_data=[recipe])
|
||||||
|
|
||||||
|
# Mock metadata provider lookup by hash
|
||||||
|
mock_metadata_provider.get_model_by_hash.return_value = ({
|
||||||
|
"id": 999,
|
||||||
|
"modelId": 888,
|
||||||
|
"name": "v2.0",
|
||||||
|
"model": {"name": "Hashed Model"},
|
||||||
|
"baseModel": "SD 1.5",
|
||||||
|
"files": [{"type": "Model", "hashes": {"SHA256": "hash123"}, "name": "hashed.safetensors"}]
|
||||||
|
}, None)
|
||||||
|
|
||||||
|
# Run
|
||||||
|
results = await recipe_scanner.repair_all_recipes()
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
assert results["repaired"] == 1
|
||||||
|
mock_metadata_provider.get_model_by_hash.assert_called_with("hash123")
|
||||||
|
|
||||||
|
saved_recipe = recipe_scanner._save_recipe_persistently.call_args[0][0]
|
||||||
|
checkpoint = saved_recipe["checkpoint"]
|
||||||
|
assert checkpoint["modelName"] == "Hashed Model"
|
||||||
|
assert checkpoint["modelVersionName"] == "v2.0"
|
||||||
|
assert checkpoint["modelId"] == 888
|
||||||
|
assert checkpoint["type"] == "checkpoint"
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_repair_all_recipes_fallback_to_basic(setup_scanner):
|
||||||
|
recipe_scanner, mock_civitai_client, mock_metadata_provider = setup_scanner
|
||||||
|
|
||||||
|
recipe = {
|
||||||
|
"id": "r1",
|
||||||
|
"title": "No Meta Lookup",
|
||||||
|
"checkpoint": None,
|
||||||
|
"gen_params": {
|
||||||
|
"prompt": "",
|
||||||
|
"Checkpoint": "just_a_name.safetensors"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
recipe_scanner._cache = SimpleNamespace(raw_data=[recipe])
|
||||||
|
|
||||||
|
# Mock metadata provider returning nothing
|
||||||
|
mock_metadata_provider.get_model_by_hash.return_value = (None, "Model not found")
|
||||||
|
|
||||||
|
# Run
|
||||||
|
results = await recipe_scanner.repair_all_recipes()
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
assert results["repaired"] == 1
|
||||||
|
saved_recipe = recipe_scanner._save_recipe_persistently.call_args[0][0]
|
||||||
|
assert saved_recipe["checkpoint"]["modelName"] == "just_a_name.safetensors"
|
||||||
|
assert saved_recipe["checkpoint"]["type"] == "checkpoint"
|
||||||
|
assert "modelId" not in saved_recipe["checkpoint"]
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_repair_all_recipes_progress_callback(setup_scanner):
|
||||||
|
recipe_scanner, _, _ = setup_scanner
|
||||||
|
|
||||||
|
recipe_scanner._cache = SimpleNamespace(raw_data=[
|
||||||
|
{"id": "r1", "title": "R1", "checkpoint": None},
|
||||||
|
{"id": "r2", "title": "R2", "checkpoint": None}
|
||||||
|
])
|
||||||
|
|
||||||
|
progress_calls = []
|
||||||
|
async def progress_callback(data):
|
||||||
|
progress_calls.append(data)
|
||||||
|
|
||||||
|
# Run
|
||||||
|
await recipe_scanner.repair_all_recipes(
|
||||||
|
progress_callback=progress_callback
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
assert len(progress_calls) >= 2
|
||||||
|
assert progress_calls[-1]["status"] == "completed"
|
||||||
|
assert progress_calls[-1]["total"] == 2
|
||||||
|
assert progress_calls[-1]["repaired"] == 2
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_repair_all_recipes_strips_runtime_fields(setup_scanner):
|
||||||
|
recipe_scanner, mock_civitai_client, mock_metadata_provider = setup_scanner
|
||||||
|
|
||||||
|
# Recipe with runtime fields
|
||||||
|
recipe = {
|
||||||
|
"id": "r1",
|
||||||
|
"title": "Cleanup Test",
|
||||||
|
"checkpoint": {
|
||||||
|
"name": "CP",
|
||||||
|
"inLibrary": True,
|
||||||
|
"localPath": "/path/to/cp",
|
||||||
|
"thumbnailUrl": "thumb.jpg"
|
||||||
|
},
|
||||||
|
"loras": [
|
||||||
|
{
|
||||||
|
"name": "L1",
|
||||||
|
"weight": 0.8,
|
||||||
|
"inLibrary": True,
|
||||||
|
"localPath": "/path/to/l1",
|
||||||
|
"preview_url": "p.jpg"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"gen_params": {"prompt": ""}
|
||||||
|
}
|
||||||
|
recipe_scanner._cache = SimpleNamespace(raw_data=[recipe])
|
||||||
|
# Set high version to trigger repair if needed (or just ensure it processes)
|
||||||
|
recipe["repair_version"] = 0
|
||||||
|
|
||||||
|
# Run
|
||||||
|
await recipe_scanner.repair_all_recipes()
|
||||||
|
|
||||||
|
# Verify sanitation
|
||||||
|
assert recipe_scanner._save_recipe_persistently.called
|
||||||
|
saved_recipe = recipe_scanner._save_recipe_persistently.call_args[0][0]
|
||||||
|
|
||||||
|
# 1. Check LORA
|
||||||
|
lora = saved_recipe["loras"][0]
|
||||||
|
assert "inLibrary" not in lora
|
||||||
|
assert "localPath" not in lora
|
||||||
|
assert "preview_url" not in lora
|
||||||
|
assert "strength" in lora # weight renamed to strength
|
||||||
|
assert lora["strength"] == 0.8
|
||||||
|
|
||||||
|
# 2. Check Checkpoint
|
||||||
|
cp = saved_recipe["checkpoint"]
|
||||||
|
assert "inLibrary" not in cp
|
||||||
|
assert "localPath" not in cp
|
||||||
|
assert "thumbnailUrl" not in cp
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_sanitize_recipe_for_storage(recipe_scanner):
|
||||||
|
|
||||||
|
recipe = {
|
||||||
|
"loras": [{"name": "L1", "inLibrary": True, "weight": 0.5}],
|
||||||
|
"checkpoint": {"name": "CP", "localPath": "/tmp/cp"}
|
||||||
|
}
|
||||||
|
|
||||||
|
clean = recipe_scanner._sanitize_recipe_for_storage(recipe)
|
||||||
|
|
||||||
|
assert "inLibrary" not in clean["loras"][0]
|
||||||
|
assert "strength" in clean["loras"][0]
|
||||||
|
assert clean["loras"][0]["strength"] == 0.5
|
||||||
|
assert "localPath" not in clean["checkpoint"]
|
||||||
|
# Testing based on what enricher would produce if it ran,
|
||||||
|
# but here we are just testing the sanitizer which handles what is ALREADY there.
|
||||||
|
# However, the sanitizer doesn't rename fields, it just removes runtime ones.
|
||||||
|
# Since we changed the enricher to NOT put 'name' anymore, this test case
|
||||||
|
# should probably reflect the new fields if it's simulating a real recipe.
|
||||||
|
assert clean["checkpoint"]["name"] == "CP"
|
||||||
@@ -349,3 +349,295 @@ def test_enrich_formats_absolute_preview_paths(recipe_scanner, tmp_path):
|
|||||||
enriched = scanner._enrich_lora_entry(dict(lora))
|
enriched = scanner._enrich_lora_entry(dict(lora))
|
||||||
|
|
||||||
assert enriched["preview_url"] == config.get_preview_static_url(str(preview_path))
|
assert enriched["preview_url"] == config.get_preview_static_url(str(preview_path))
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_initialize_waits_for_lora_scanner(monkeypatch):
|
||||||
|
ready_flag = asyncio.Event()
|
||||||
|
call_count = 0
|
||||||
|
|
||||||
|
class StubLoraScanner:
|
||||||
|
def __init__(self):
|
||||||
|
self._cache = None
|
||||||
|
self._is_initializing = True
|
||||||
|
|
||||||
|
async def initialize_in_background(self):
|
||||||
|
nonlocal call_count
|
||||||
|
call_count += 1
|
||||||
|
await asyncio.sleep(0)
|
||||||
|
self._cache = SimpleNamespace(raw_data=[])
|
||||||
|
self._is_initializing = False
|
||||||
|
ready_flag.set()
|
||||||
|
|
||||||
|
lora_scanner = StubLoraScanner()
|
||||||
|
scanner = RecipeScanner(lora_scanner=lora_scanner)
|
||||||
|
|
||||||
|
await scanner.initialize_in_background()
|
||||||
|
|
||||||
|
assert ready_flag.is_set()
|
||||||
|
assert call_count == 1
|
||||||
|
assert scanner._cache is not None
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_invalid_model_version_marked_deleted_and_not_retried(monkeypatch, recipe_scanner):
|
||||||
|
scanner, _ = recipe_scanner
|
||||||
|
recipes_dir = Path(config.loras_roots[0]) / "recipes"
|
||||||
|
recipes_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
recipe = {
|
||||||
|
"id": "invalid-version",
|
||||||
|
"file_path": str(recipes_dir / "invalid-version.webp"),
|
||||||
|
"title": "Invalid",
|
||||||
|
"modified": 0.0,
|
||||||
|
"created_date": 0.0,
|
||||||
|
"loras": [{"modelVersionId": 999, "file_name": "", "hash": ""}],
|
||||||
|
}
|
||||||
|
await scanner.add_recipe(dict(recipe))
|
||||||
|
|
||||||
|
call_count = 0
|
||||||
|
|
||||||
|
async def fake_get_hash(model_version_id):
|
||||||
|
nonlocal call_count
|
||||||
|
call_count += 1
|
||||||
|
return None
|
||||||
|
|
||||||
|
monkeypatch.setattr(scanner, "_get_hash_from_civitai", fake_get_hash)
|
||||||
|
|
||||||
|
metadata_updated = await scanner._update_lora_information(recipe)
|
||||||
|
|
||||||
|
assert metadata_updated is True
|
||||||
|
assert recipe["loras"][0]["isDeleted"] is True
|
||||||
|
assert call_count == 1
|
||||||
|
|
||||||
|
# Subsequent calls should skip remote lookup once marked deleted
|
||||||
|
metadata_updated_again = await scanner._update_lora_information(recipe)
|
||||||
|
assert metadata_updated_again is False
|
||||||
|
assert call_count == 1
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_load_recipe_persists_deleted_flag_on_invalid_version(monkeypatch, recipe_scanner, tmp_path):
|
||||||
|
scanner, _ = recipe_scanner
|
||||||
|
recipes_dir = Path(config.loras_roots[0]) / "recipes"
|
||||||
|
recipes_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
recipe_id = "persist-invalid"
|
||||||
|
recipe_path = recipes_dir / f"{recipe_id}.recipe.json"
|
||||||
|
recipe_data = {
|
||||||
|
"id": recipe_id,
|
||||||
|
"file_path": str(recipes_dir / f"{recipe_id}.webp"),
|
||||||
|
"title": "Invalid",
|
||||||
|
"modified": 0.0,
|
||||||
|
"created_date": 0.0,
|
||||||
|
"loras": [{"modelVersionId": 1234, "file_name": "", "hash": ""}],
|
||||||
|
}
|
||||||
|
recipe_path.write_text(json.dumps(recipe_data))
|
||||||
|
|
||||||
|
async def fake_get_hash(model_version_id):
|
||||||
|
return None
|
||||||
|
|
||||||
|
monkeypatch.setattr(scanner, "_get_hash_from_civitai", fake_get_hash)
|
||||||
|
|
||||||
|
loaded = await scanner._load_recipe_file(str(recipe_path))
|
||||||
|
|
||||||
|
assert loaded["loras"][0]["isDeleted"] is True
|
||||||
|
|
||||||
|
persisted = json.loads(recipe_path.read_text())
|
||||||
|
assert persisted["loras"][0]["isDeleted"] is True
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_update_lora_filename_by_hash_updates_affected_recipes(tmp_path: Path, recipe_scanner):
|
||||||
|
scanner, _ = recipe_scanner
|
||||||
|
recipes_dir = Path(config.loras_roots[0]) / "recipes"
|
||||||
|
recipes_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Recipe 1: Contains the LoRA with hash "hash1"
|
||||||
|
recipe1_id = "recipe1"
|
||||||
|
recipe1_path = recipes_dir / f"{recipe1_id}.recipe.json"
|
||||||
|
recipe1_data = {
|
||||||
|
"id": recipe1_id,
|
||||||
|
"file_path": str(tmp_path / "img1.png"),
|
||||||
|
"title": "Recipe 1",
|
||||||
|
"modified": 0.0,
|
||||||
|
"created_date": 0.0,
|
||||||
|
"loras": [
|
||||||
|
{"file_name": "old_name", "hash": "hash1"},
|
||||||
|
{"file_name": "other_lora", "hash": "hash2"}
|
||||||
|
],
|
||||||
|
}
|
||||||
|
recipe1_path.write_text(json.dumps(recipe1_data))
|
||||||
|
await scanner.add_recipe(dict(recipe1_data))
|
||||||
|
|
||||||
|
# Recipe 2: Does NOT contain the LoRA
|
||||||
|
recipe2_id = "recipe2"
|
||||||
|
recipe2_path = recipes_dir / f"{recipe2_id}.recipe.json"
|
||||||
|
recipe2_data = {
|
||||||
|
"id": recipe2_id,
|
||||||
|
"file_path": str(tmp_path / "img2.png"),
|
||||||
|
"title": "Recipe 2",
|
||||||
|
"modified": 0.0,
|
||||||
|
"created_date": 0.0,
|
||||||
|
"loras": [
|
||||||
|
{"file_name": "other_lora", "hash": "hash2"}
|
||||||
|
],
|
||||||
|
}
|
||||||
|
recipe2_path.write_text(json.dumps(recipe2_data))
|
||||||
|
await scanner.add_recipe(dict(recipe2_data))
|
||||||
|
|
||||||
|
# Update LoRA name for "hash1" (using different case to test normalization)
|
||||||
|
new_name = "new_name"
|
||||||
|
file_count, cache_count = await scanner.update_lora_filename_by_hash("HASH1", new_name)
|
||||||
|
|
||||||
|
assert file_count == 1
|
||||||
|
assert cache_count == 1
|
||||||
|
|
||||||
|
# Check file on disk
|
||||||
|
persisted1 = json.loads(recipe1_path.read_text())
|
||||||
|
assert persisted1["loras"][0]["file_name"] == new_name
|
||||||
|
assert persisted1["loras"][1]["file_name"] == "other_lora"
|
||||||
|
|
||||||
|
# Verify Recipe 2 unchanged
|
||||||
|
persisted2 = json.loads(recipe2_path.read_text())
|
||||||
|
assert persisted2["loras"][0]["file_name"] == "other_lora"
|
||||||
|
|
||||||
|
cache = await scanner.get_cached_data()
|
||||||
|
cached1 = next(r for r in cache.raw_data if r["id"] == recipe1_id)
|
||||||
|
assert cached1["loras"][0]["file_name"] == new_name
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_get_paginated_data_filters_by_favorite(recipe_scanner):
|
||||||
|
scanner, _ = recipe_scanner
|
||||||
|
|
||||||
|
# Add a normal recipe
|
||||||
|
await scanner.add_recipe({
|
||||||
|
"id": "regular",
|
||||||
|
"file_path": "path/regular.png",
|
||||||
|
"title": "Regular Recipe",
|
||||||
|
"modified": 1.0,
|
||||||
|
"created_date": 1.0,
|
||||||
|
"loras": [],
|
||||||
|
})
|
||||||
|
|
||||||
|
# Add a favorite recipe
|
||||||
|
await scanner.add_recipe({
|
||||||
|
"id": "favorite",
|
||||||
|
"file_path": "path/favorite.png",
|
||||||
|
"title": "Favorite Recipe",
|
||||||
|
"modified": 2.0,
|
||||||
|
"created_date": 2.0,
|
||||||
|
"loras": [],
|
||||||
|
"favorite": True
|
||||||
|
})
|
||||||
|
|
||||||
|
# Wait for cache update (it's async in some places, add_recipe is usually enough but let's be safe)
|
||||||
|
await asyncio.sleep(0)
|
||||||
|
|
||||||
|
# Test without filter (should return both)
|
||||||
|
result_all = await scanner.get_paginated_data(page=1, page_size=10)
|
||||||
|
assert len(result_all["items"]) == 2
|
||||||
|
|
||||||
|
# Test with favorite filter
|
||||||
|
result_fav = await scanner.get_paginated_data(page=1, page_size=10, filters={"favorite": True})
|
||||||
|
assert len(result_fav["items"]) == 1
|
||||||
|
assert result_fav["items"][0]["id"] == "favorite"
|
||||||
|
|
||||||
|
# Test with favorite filter set to False (should return both or at least not filter if it's the default)
|
||||||
|
# Actually our implementation checks if 'favorite' in filters and filters['favorite']
|
||||||
|
result_fav_false = await scanner.get_paginated_data(page=1, page_size=10, filters={"favorite": False})
|
||||||
|
assert len(result_fav_false["items"]) == 2
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_get_paginated_data_filters_by_prompt(recipe_scanner):
|
||||||
|
scanner, _ = recipe_scanner
|
||||||
|
|
||||||
|
# Add a recipe with a specific prompt
|
||||||
|
await scanner.add_recipe({
|
||||||
|
"id": "prompt-recipe",
|
||||||
|
"file_path": "path/prompt.png",
|
||||||
|
"title": "Prompt Recipe",
|
||||||
|
"modified": 1.0,
|
||||||
|
"created_date": 1.0,
|
||||||
|
"loras": [],
|
||||||
|
"gen_params": {
|
||||||
|
"prompt": "a beautiful forest landscape"
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
# Add a recipe with a specific negative prompt
|
||||||
|
await scanner.add_recipe({
|
||||||
|
"id": "neg-prompt-recipe",
|
||||||
|
"file_path": "path/neg.png",
|
||||||
|
"title": "Negative Prompt Recipe",
|
||||||
|
"modified": 2.0,
|
||||||
|
"created_date": 2.0,
|
||||||
|
"loras": [],
|
||||||
|
"gen_params": {
|
||||||
|
"negative_prompt": "ugly, blurry mountains"
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
await asyncio.sleep(0)
|
||||||
|
|
||||||
|
# Test search in prompt
|
||||||
|
result_prompt = await scanner.get_paginated_data(
|
||||||
|
page=1, page_size=10, search="forest", search_options={"prompt": True}
|
||||||
|
)
|
||||||
|
assert len(result_prompt["items"]) == 1
|
||||||
|
assert result_prompt["items"][0]["id"] == "prompt-recipe"
|
||||||
|
|
||||||
|
# Test search in negative prompt
|
||||||
|
result_neg = await scanner.get_paginated_data(
|
||||||
|
page=1, page_size=10, search="mountains", search_options={"prompt": True}
|
||||||
|
)
|
||||||
|
assert len(result_neg["items"]) == 1
|
||||||
|
assert result_neg["items"][0]["id"] == "neg-prompt-recipe"
|
||||||
|
|
||||||
|
# Test search disabled (should not find by prompt)
|
||||||
|
result_disabled = await scanner.get_paginated_data(
|
||||||
|
page=1, page_size=10, search="forest", search_options={"prompt": False}
|
||||||
|
)
|
||||||
|
assert len(result_disabled["items"]) == 0
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_get_paginated_data_sorting(recipe_scanner):
|
||||||
|
scanner, _ = recipe_scanner
|
||||||
|
|
||||||
|
# Add test recipes
|
||||||
|
# Recipe A: Name "Alpha", Date 10, LoRAs 2
|
||||||
|
await scanner.add_recipe({
|
||||||
|
"id": "A", "title": "Alpha", "created_date": 10.0,
|
||||||
|
"loras": [{}, {}], "file_path": "a.png"
|
||||||
|
})
|
||||||
|
# Recipe B: Name "Beta", Date 20, LoRAs 1
|
||||||
|
await scanner.add_recipe({
|
||||||
|
"id": "B", "title": "Beta", "created_date": 20.0,
|
||||||
|
"loras": [{}], "file_path": "b.png"
|
||||||
|
})
|
||||||
|
# Recipe C: Name "Gamma", Date 5, LoRAs 3
|
||||||
|
await scanner.add_recipe({
|
||||||
|
"id": "C", "title": "Gamma", "created_date": 5.0,
|
||||||
|
"loras": [{}, {}, {}], "file_path": "c.png"
|
||||||
|
})
|
||||||
|
|
||||||
|
await asyncio.sleep(0)
|
||||||
|
|
||||||
|
# Test Name DESC: Gamma, Beta, Alpha
|
||||||
|
res = await scanner.get_paginated_data(page=1, page_size=10, sort_by="name:desc")
|
||||||
|
assert [i["id"] for i in res["items"]] == ["C", "B", "A"]
|
||||||
|
|
||||||
|
# Test LoRA Count DESC: Gamma (3), Alpha (2), Beta (1)
|
||||||
|
res = await scanner.get_paginated_data(page=1, page_size=10, sort_by="loras_count:desc")
|
||||||
|
assert [i["id"] for i in res["items"]] == ["C", "A", "B"]
|
||||||
|
|
||||||
|
# Test LoRA Count ASC: Beta (1), Alpha (2), Gamma (3)
|
||||||
|
res = await scanner.get_paginated_data(page=1, page_size=10, sort_by="loras_count:asc")
|
||||||
|
assert [i["id"] for i in res["items"]] == ["B", "A", "C"]
|
||||||
|
|
||||||
|
# Test Date ASC: Gamma (5), Alpha (10), Beta (20)
|
||||||
|
res = await scanner.get_paginated_data(page=1, page_size=10, sort_by="date:asc")
|
||||||
|
assert [i["id"] for i in res["items"]] == ["C", "A", "B"]
|
||||||
|
|||||||
@@ -12,7 +12,12 @@ from py.services.recipes.persistence_service import RecipePersistenceService
|
|||||||
|
|
||||||
|
|
||||||
class DummyExifUtils:
|
class DummyExifUtils:
|
||||||
|
def __init__(self):
|
||||||
|
self.appended = None
|
||||||
|
self.optimized_calls = 0
|
||||||
|
|
||||||
def optimize_image(self, image_data, target_width, format, quality, preserve_metadata):
|
def optimize_image(self, image_data, target_width, format, quality, preserve_metadata):
|
||||||
|
self.optimized_calls += 1
|
||||||
return image_data, ".webp"
|
return image_data, ".webp"
|
||||||
|
|
||||||
def append_recipe_metadata(self, image_path, recipe_data):
|
def append_recipe_metadata(self, image_path, recipe_data):
|
||||||
@@ -22,6 +27,46 @@ class DummyExifUtils:
|
|||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_save_recipe_video_bypasses_optimization(tmp_path):
|
||||||
|
exif_utils = DummyExifUtils()
|
||||||
|
|
||||||
|
class DummyScanner:
|
||||||
|
def __init__(self, root):
|
||||||
|
self.recipes_dir = str(root)
|
||||||
|
|
||||||
|
async def find_recipes_by_fingerprint(self, fingerprint):
|
||||||
|
return []
|
||||||
|
|
||||||
|
async def add_recipe(self, recipe_data):
|
||||||
|
return None
|
||||||
|
|
||||||
|
scanner = DummyScanner(tmp_path)
|
||||||
|
service = RecipePersistenceService(
|
||||||
|
exif_utils=exif_utils,
|
||||||
|
card_preview_width=512,
|
||||||
|
logger=logging.getLogger("test"),
|
||||||
|
)
|
||||||
|
|
||||||
|
metadata = {"base_model": "Flux", "loras": []}
|
||||||
|
video_bytes = b"mp4-content"
|
||||||
|
|
||||||
|
result = await service.save_recipe(
|
||||||
|
recipe_scanner=scanner,
|
||||||
|
image_bytes=video_bytes,
|
||||||
|
image_base64=None,
|
||||||
|
name="Video Recipe",
|
||||||
|
tags=[],
|
||||||
|
metadata=metadata,
|
||||||
|
extension=".mp4",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result.payload["image_path"].endswith(".mp4")
|
||||||
|
assert Path(result.payload["image_path"]).read_bytes() == video_bytes
|
||||||
|
assert exif_utils.optimized_calls == 0, "Optimization should be bypassed for video"
|
||||||
|
assert exif_utils.appended is None, "Metadata embedding should be bypassed for video"
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_analyze_remote_image_download_failure_cleans_temp(tmp_path, monkeypatch):
|
async def test_analyze_remote_image_download_failure_cleans_temp(tmp_path, monkeypatch):
|
||||||
exif_utils = DummyExifUtils()
|
exif_utils = DummyExifUtils()
|
||||||
@@ -50,7 +95,7 @@ async def test_analyze_remote_image_download_failure_cleans_temp(tmp_path, monke
|
|||||||
|
|
||||||
temp_path = tmp_path / "temp.jpg"
|
temp_path = tmp_path / "temp.jpg"
|
||||||
|
|
||||||
def create_temp_path():
|
def create_temp_path(suffix=".jpg"):
|
||||||
temp_path.write_bytes(b"")
|
temp_path.write_bytes(b"")
|
||||||
return str(temp_path)
|
return str(temp_path)
|
||||||
|
|
||||||
@@ -356,3 +401,138 @@ async def test_save_recipe_from_widget_allows_empty_lora(tmp_path):
|
|||||||
assert stored["loras"] == []
|
assert stored["loras"] == []
|
||||||
assert stored["title"] == "recipe"
|
assert stored["title"] == "recipe"
|
||||||
assert scanner.added and scanner.added[0]["loras"] == []
|
assert scanner.added and scanner.added[0]["loras"] == []
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_move_recipe_updates_paths(tmp_path):
|
||||||
|
exif_utils = DummyExifUtils()
|
||||||
|
recipes_dir = tmp_path / "recipes"
|
||||||
|
recipes_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
recipe_id = "move-me"
|
||||||
|
image_path = recipes_dir / f"{recipe_id}.webp"
|
||||||
|
json_path = recipes_dir / f"{recipe_id}.recipe.json"
|
||||||
|
|
||||||
|
image_path.write_bytes(b"img")
|
||||||
|
json_path.write_text(
|
||||||
|
json.dumps(
|
||||||
|
{
|
||||||
|
"id": recipe_id,
|
||||||
|
"file_path": str(image_path),
|
||||||
|
"title": "Recipe",
|
||||||
|
"loras": [],
|
||||||
|
"gen_params": {},
|
||||||
|
"created_date": 0,
|
||||||
|
"modified": 0,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
class MoveScanner:
|
||||||
|
def __init__(self, root: Path):
|
||||||
|
self.recipes_dir = str(root)
|
||||||
|
self.recipe = {
|
||||||
|
"id": recipe_id,
|
||||||
|
"file_path": str(image_path),
|
||||||
|
"title": "Recipe",
|
||||||
|
"loras": [],
|
||||||
|
"gen_params": {},
|
||||||
|
"created_date": 0,
|
||||||
|
"modified": 0,
|
||||||
|
"folder": "",
|
||||||
|
}
|
||||||
|
|
||||||
|
async def get_recipe_by_id(self, target_id: str):
|
||||||
|
return self.recipe if target_id == recipe_id else None
|
||||||
|
|
||||||
|
async def get_recipe_json_path(self, target_id: str):
|
||||||
|
matches = list(Path(self.recipes_dir).rglob(f"{target_id}.recipe.json"))
|
||||||
|
return str(matches[0]) if matches else None
|
||||||
|
|
||||||
|
async def update_recipe_metadata(self, target_id: str, metadata: dict):
|
||||||
|
if target_id != recipe_id:
|
||||||
|
return False
|
||||||
|
self.recipe.update(metadata)
|
||||||
|
target_path = await self.get_recipe_json_path(target_id)
|
||||||
|
if not target_path:
|
||||||
|
return False
|
||||||
|
existing = json.loads(Path(target_path).read_text())
|
||||||
|
existing.update(metadata)
|
||||||
|
Path(target_path).write_text(json.dumps(existing))
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def get_cached_data(self, force_refresh: bool = False): # noqa: ARG002 - signature parity
|
||||||
|
return SimpleNamespace(raw_data=[self.recipe])
|
||||||
|
|
||||||
|
scanner = MoveScanner(recipes_dir)
|
||||||
|
service = RecipePersistenceService(
|
||||||
|
exif_utils=exif_utils,
|
||||||
|
card_preview_width=512,
|
||||||
|
logger=logging.getLogger("test"),
|
||||||
|
)
|
||||||
|
|
||||||
|
target_folder = recipes_dir / "nested"
|
||||||
|
result = await service.move_recipe(
|
||||||
|
recipe_scanner=scanner, recipe_id=recipe_id, target_path=str(target_folder)
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result.payload["folder"] == "nested"
|
||||||
|
assert Path(result.payload["json_path"]).parent == target_folder
|
||||||
|
assert Path(result.payload["new_file_path"]).parent == target_folder
|
||||||
|
assert not json_path.exists()
|
||||||
|
|
||||||
|
stored = json.loads(Path(result.payload["json_path"]).read_text())
|
||||||
|
assert stored["folder"] == "nested"
|
||||||
|
assert stored["file_path"] == result.payload["new_file_path"]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_analyze_remote_video(tmp_path):
|
||||||
|
exif_utils = DummyExifUtils()
|
||||||
|
|
||||||
|
class DummyFactory:
|
||||||
|
def create_parser(self, metadata):
|
||||||
|
async def parse_metadata(m, recipe_scanner):
|
||||||
|
return {"loras": []}
|
||||||
|
return SimpleNamespace(parse_metadata=parse_metadata)
|
||||||
|
|
||||||
|
async def downloader_factory():
|
||||||
|
class Downloader:
|
||||||
|
async def download_file(self, url, path, use_auth=False):
|
||||||
|
Path(path).write_bytes(b"video-content")
|
||||||
|
return True, "success"
|
||||||
|
|
||||||
|
return Downloader()
|
||||||
|
|
||||||
|
service = RecipeAnalysisService(
|
||||||
|
exif_utils=exif_utils,
|
||||||
|
recipe_parser_factory=DummyFactory(),
|
||||||
|
downloader_factory=downloader_factory,
|
||||||
|
metadata_collector=None,
|
||||||
|
metadata_processor_cls=None,
|
||||||
|
metadata_registry_cls=None,
|
||||||
|
standalone_mode=False,
|
||||||
|
logger=logging.getLogger("test"),
|
||||||
|
)
|
||||||
|
|
||||||
|
class DummyClient:
|
||||||
|
async def get_image_info(self, image_id):
|
||||||
|
return {
|
||||||
|
"url": "https://civitai.com/video.mp4",
|
||||||
|
"type": "video",
|
||||||
|
"meta": {"prompt": "video prompt"},
|
||||||
|
}
|
||||||
|
|
||||||
|
class DummyScanner:
|
||||||
|
async def find_recipes_by_fingerprint(self, fingerprint):
|
||||||
|
return []
|
||||||
|
|
||||||
|
result = await service.analyze_remote_image(
|
||||||
|
url="https://civitai.com/images/123",
|
||||||
|
recipe_scanner=DummyScanner(),
|
||||||
|
civitai_client=DummyClient(),
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result.payload["is_video"] is True
|
||||||
|
assert result.payload["extension"] == ".mp4"
|
||||||
|
assert result.payload["image_base64"] is not None
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
import os
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from py.services.base_model_service import BaseModelService
|
from py.services.base_model_service import BaseModelService
|
||||||
@@ -42,8 +43,8 @@ async def test_search_relative_paths_supports_multiple_tokens():
|
|||||||
matching = await service.search_relative_paths("flux detail")
|
matching = await service.search_relative_paths("flux detail")
|
||||||
|
|
||||||
assert matching == [
|
assert matching == [
|
||||||
"flux/detail-model.safetensors",
|
f"flux{os.sep}detail-model.safetensors",
|
||||||
"detail/flux-trained.safetensors",
|
f"detail{os.sep}flux-trained.safetensors",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@@ -60,4 +61,4 @@ async def test_search_relative_paths_excludes_tokens():
|
|||||||
|
|
||||||
matching = await service.search_relative_paths("flux -detail")
|
matching = await service.search_relative_paths("flux -detail")
|
||||||
|
|
||||||
assert matching == ["flux/keep-me.safetensors"]
|
assert matching == [f"flux{os.sep}keep-me.safetensors"]
|
||||||
|
|||||||
92
tests/services/test_root_folder_recursive.py
Normal file
92
tests/services/test_root_folder_recursive.py
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
import pytest
|
||||||
|
from py.services.model_query import ModelFilterSet, FilterCriteria
|
||||||
|
from py.services.recipe_scanner import RecipeScanner
|
||||||
|
from types import SimpleNamespace
|
||||||
|
|
||||||
|
# Mock settings
|
||||||
|
class MockSettings:
|
||||||
|
def get(self, key, default=None):
|
||||||
|
return default
|
||||||
|
|
||||||
|
# --- Model Filtering Tests ---
|
||||||
|
|
||||||
|
def test_model_filter_set_root_recursive_true():
|
||||||
|
filter_set = ModelFilterSet(MockSettings())
|
||||||
|
items = [
|
||||||
|
{"model_name": "root_item", "folder": ""},
|
||||||
|
{"model_name": "sub_item", "folder": "sub"},
|
||||||
|
]
|
||||||
|
criteria = FilterCriteria(folder="", search_options={"recursive": True})
|
||||||
|
|
||||||
|
result = filter_set.apply(items, criteria)
|
||||||
|
|
||||||
|
assert len(result) == 2
|
||||||
|
assert any(i["model_name"] == "root_item" for i in result)
|
||||||
|
assert any(i["model_name"] == "sub_item" for i in result)
|
||||||
|
|
||||||
|
def test_model_filter_set_root_recursive_false():
|
||||||
|
filter_set = ModelFilterSet(MockSettings())
|
||||||
|
items = [
|
||||||
|
{"model_name": "root_item", "folder": ""},
|
||||||
|
{"model_name": "sub_item", "folder": "sub"},
|
||||||
|
]
|
||||||
|
criteria = FilterCriteria(folder="", search_options={"recursive": False})
|
||||||
|
|
||||||
|
result = filter_set.apply(items, criteria)
|
||||||
|
|
||||||
|
assert len(result) == 1
|
||||||
|
assert result[0]["model_name"] == "root_item"
|
||||||
|
|
||||||
|
# --- Recipe Filtering Tests ---
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_recipe_scanner_root_recursive_true():
|
||||||
|
# Mock LoraScanner
|
||||||
|
class StubLoraScanner:
|
||||||
|
async def get_cached_data(self):
|
||||||
|
return SimpleNamespace(raw_data=[])
|
||||||
|
|
||||||
|
scanner = RecipeScanner(lora_scanner=StubLoraScanner())
|
||||||
|
# Manually populate cache for testing get_paginated_data logic
|
||||||
|
scanner._cache = SimpleNamespace(
|
||||||
|
raw_data=[
|
||||||
|
{"id": "r1", "title": "root_recipe", "folder": "", "modified": 1.0, "created_date": 1.0, "loras": []},
|
||||||
|
{"id": "r2", "title": "sub_recipe", "folder": "sub", "modified": 2.0, "created_date": 2.0, "loras": []},
|
||||||
|
],
|
||||||
|
sorted_by_date=[
|
||||||
|
{"id": "r2", "title": "sub_recipe", "folder": "sub", "modified": 2.0, "created_date": 2.0, "loras": []},
|
||||||
|
{"id": "r1", "title": "root_recipe", "folder": "", "modified": 1.0, "created_date": 1.0, "loras": []},
|
||||||
|
],
|
||||||
|
sorted_by_name=[],
|
||||||
|
version_index={}
|
||||||
|
)
|
||||||
|
|
||||||
|
result = await scanner.get_paginated_data(page=1, page_size=10, folder="", recursive=True)
|
||||||
|
|
||||||
|
assert len(result["items"]) == 2
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_recipe_scanner_root_recursive_false():
|
||||||
|
# Mock LoraScanner
|
||||||
|
class StubLoraScanner:
|
||||||
|
async def get_cached_data(self):
|
||||||
|
return SimpleNamespace(raw_data=[])
|
||||||
|
|
||||||
|
scanner = RecipeScanner(lora_scanner=StubLoraScanner())
|
||||||
|
scanner._cache = SimpleNamespace(
|
||||||
|
raw_data=[
|
||||||
|
{"id": "r1", "title": "root_recipe", "folder": "", "modified": 1.0, "created_date": 1.0, "loras": []},
|
||||||
|
{"id": "r2", "title": "sub_recipe", "folder": "sub", "modified": 2.0, "created_date": 2.0, "loras": []},
|
||||||
|
],
|
||||||
|
sorted_by_date=[
|
||||||
|
{"id": "r2", "title": "sub_recipe", "folder": "sub", "modified": 2.0, "created_date": 2.0, "loras": []},
|
||||||
|
{"id": "r1", "title": "root_recipe", "folder": "", "modified": 1.0, "created_date": 1.0, "loras": []},
|
||||||
|
],
|
||||||
|
sorted_by_name=[],
|
||||||
|
version_index={}
|
||||||
|
)
|
||||||
|
|
||||||
|
result = await scanner.get_paginated_data(page=1, page_size=10, folder="", recursive=False)
|
||||||
|
|
||||||
|
assert len(result["items"]) == 1
|
||||||
|
assert result["items"][0]["id"] == "r1"
|
||||||
@@ -311,7 +311,7 @@ app.registerExtension({
|
|||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
// If no ',,' delimiter, treat the entire message as one group
|
// If no ',,' delimiter, treat the entire message as one group
|
||||||
const existing = existingTagMap[message.trim()];
|
const existing = consumeExistingState(message.trim());
|
||||||
tagArray = [{
|
tagArray = [{
|
||||||
text: message.trim(),
|
text: message.trim(),
|
||||||
// Use existing values if available, otherwise use defaults
|
// Use existing values if available, otherwise use defaults
|
||||||
|
|||||||
Reference in New Issue
Block a user