diff --git a/GEMINI.md b/GEMINI.md new file mode 100644 index 00000000..c0239352 --- /dev/null +++ b/GEMINI.md @@ -0,0 +1,84 @@ +# ComfyUI LoRA Manager + +## Project Overview + +ComfyUI LoRA Manager is a comprehensive extension for ComfyUI that streamlines the organization, downloading, and application of LoRA models. It functions as both a custom node within ComfyUI and a standalone application. + +**Key Features:** +* **Model Management:** Browse, organize, and download LoRA models (and Checkpoints/Embeddings) from Civitai and CivArchive. +* **Visualization:** Preview images, videos, and trigger words. +* **Workflow Integration:** "One-click" integration into ComfyUI workflows, preserving generation parameters. +* **Recipe System:** Save and share LoRA combinations as "recipes". +* **Architecture:** Hybrid Python backend (API, file management) and JavaScript/HTML frontend (Web UI). + +## Directory Structure + +* `py/`: Core Python backend source code. + * `lora_manager.py`: Main entry point for the ComfyUI node. + * `routes/`: API route definitions (using `aiohttp` in standalone, or ComfyUI's server). + * `services/`: Business logic (downloading, metadata, scanning). + * `nodes/`: ComfyUI custom node implementations. +* `static/`: Frontend static assets (CSS, JS, Images). +* `templates/`: HTML templates (Jinja2). +* `locales/`: Internationalization JSON files. +* `web/comfyui/`: JavaScript extensions specifically for the ComfyUI interface. +* `standalone.py`: Entry point for running the manager as a standalone web app. +* `tests/`: Backend tests. +* `requirements.txt`: Python runtime dependencies. +* `package.json`: Frontend development dependencies and test scripts. + +## Building and Running + +### Prerequisites +* Python 3.8+ +* Node.js (only for running frontend tests) + +### Backend Setup +1. Install Python dependencies: + ```bash + pip install -r requirements.txt + ``` + +### Running in Standalone Mode +You can run the manager independently of ComfyUI for development or management purposes. +```bash +python standalone.py --port 8188 +``` + +### Running in ComfyUI +Ensure the folder is located in `ComfyUI/custom_nodes/`. ComfyUI will automatically load it upon startup. + +## Testing + +### Backend Tests (Pytest) +1. Install development dependencies: + ```bash + pip install -r requirements-dev.txt + ``` +2. Run tests: + ```bash + pytest + ``` + * Coverage reports are generated in `coverage/backend/`. + +### Frontend Tests (Vitest) +1. Install Node dependencies: + ```bash + npm install + ``` +2. Run tests: + ```bash + npm run test + ``` +3. Run coverage: + ```bash + npm run test:coverage + ``` + +## Development Conventions + +* **Python Style:** Follow PEP 8. Use snake_case for files/functions and PascalCase for classes. +* **Frontend:** Standard ES modules. UI components often end in `_widget.js`. +* **Configuration:** User settings are stored in `settings.json`. Developers should reference `settings.json.example`. +* **Localization:** Update `locales/.json` and run `scripts/sync_translation_keys.py` when changing UI text. +* **Documentation:** Architecture details are in `docs/architecture/` and `IFLOW.md`. diff --git a/__init__.py b/__init__.py index 8e8fbd26..d41a18b4 100644 --- a/__init__.py +++ b/__init__.py @@ -4,7 +4,7 @@ try: # pragma: no cover - import fallback for pytest collection from .py.nodes.trigger_word_toggle import TriggerWordToggle from .py.nodes.prompt import PromptLoraManager from .py.nodes.lora_stacker import LoraStacker - from .py.nodes.save_image import SaveImage + from .py.nodes.save_image import SaveImageLM from .py.nodes.debug_metadata import DebugMetadata from .py.nodes.wanvideo_lora_select import WanVideoLoraSelect from .py.nodes.wanvideo_lora_select_from_text import WanVideoLoraSelectFromText @@ -24,7 +24,7 @@ except ImportError: # pragma: no cover - allows running under pytest without pa LoraManagerTextLoader = importlib.import_module("py.nodes.lora_loader").LoraManagerTextLoader TriggerWordToggle = importlib.import_module("py.nodes.trigger_word_toggle").TriggerWordToggle LoraStacker = importlib.import_module("py.nodes.lora_stacker").LoraStacker - SaveImage = importlib.import_module("py.nodes.save_image").SaveImage + SaveImageLM = importlib.import_module("py.nodes.save_image").SaveImageLM DebugMetadata = importlib.import_module("py.nodes.debug_metadata").DebugMetadata WanVideoLoraSelect = importlib.import_module("py.nodes.wanvideo_lora_select").WanVideoLoraSelect WanVideoLoraSelectFromText = importlib.import_module("py.nodes.wanvideo_lora_select_from_text").WanVideoLoraSelectFromText @@ -36,7 +36,7 @@ NODE_CLASS_MAPPINGS = { LoraManagerTextLoader.NAME: LoraManagerTextLoader, TriggerWordToggle.NAME: TriggerWordToggle, LoraStacker.NAME: LoraStacker, - SaveImage.NAME: SaveImage, + SaveImageLM.NAME: SaveImageLM, DebugMetadata.NAME: DebugMetadata, WanVideoLoraSelect.NAME: WanVideoLoraSelect, WanVideoLoraSelectFromText.NAME: WanVideoLoraSelectFromText diff --git a/locales/de.json b/locales/de.json index 6f484efc..037390c6 100644 --- a/locales/de.json +++ b/locales/de.json @@ -159,6 +159,12 @@ "success": "Updated license metadata for {count} {typePlural}", "none": "All {typePlural} already have license metadata", "error": "Failed to refresh license metadata for {typePlural}: {message}" + }, + "repairRecipes": { + "label": "Recipe-Daten reparieren", + "loading": "Recipe-Daten werden repariert...", + "success": "{count} Rezepte erfolgreich repariert.", + "error": "Recipe-Reparatur fehlgeschlagen: {message}" } }, "header": { @@ -188,7 +194,8 @@ "creator": "Ersteller", "title": "Rezept-Titel", "loraName": "LoRA-Dateiname", - "loraModel": "LoRA-Modellname" + "loraModel": "LoRA-Modellname", + "prompt": "Prompt" } }, "filter": { @@ -199,6 +206,7 @@ "license": "Lizenz", "noCreditRequired": "Kein Credit erforderlich", "allowSellingGeneratedContent": "Verkauf erlaubt", + "noTags": "Keine Tags", "clearAll": "Alle Filter löschen" }, "theme": { @@ -521,6 +529,7 @@ "replacePreview": "Vorschau ersetzen", "setContentRating": "Inhaltsbewertung festlegen", "moveToFolder": "In Ordner verschieben", + "repairMetadata": "[TODO: Translate] Repair metadata", "excludeModel": "Modell ausschließen", "deleteModel": "Modell löschen", "shareRecipe": "Rezept teilen", @@ -591,10 +600,26 @@ "selectLoraRoot": "Bitte wählen Sie ein LoRA-Stammverzeichnis aus" } }, + "sort": { + "title": "Rezepte sortieren nach...", + "name": "Name", + "nameAsc": "A - Z", + "nameDesc": "Z - A", + "date": "Datum", + "dateDesc": "Neueste", + "dateAsc": "Älteste", + "lorasCount": "LoRA-Anzahl", + "lorasCountDesc": "Meiste", + "lorasCountAsc": "Wenigste" + }, "refresh": { "title": "Rezeptliste aktualisieren" }, - "filteredByLora": "Gefiltert nach LoRA" + "filteredByLora": "Gefiltert nach LoRA", + "favorites": { + "title": "Nur Favoriten anzeigen", + "action": "Favoriten" + } }, "duplicates": { "found": "{count} Duplikat-Gruppen gefunden", @@ -620,6 +645,13 @@ "noMissingLoras": "Keine fehlenden LoRAs zum Herunterladen", "getInfoFailed": "Fehler beim Abrufen der Informationen für fehlende LoRAs", "prepareError": "Fehler beim Vorbereiten der LoRAs für den Download: {message}" + }, + "repair": { + "starting": "[TODO: Translate] Repairing recipe metadata...", + "success": "[TODO: Translate] Recipe metadata repaired successfully", + "skipped": "[TODO: Translate] Recipe already at latest version, no repair needed", + "failed": "[TODO: Translate] Failed to repair recipe: {message}", + "missingId": "[TODO: Translate] Cannot repair recipe: Missing recipe ID" } } }, @@ -641,7 +673,8 @@ "recursiveUnavailable": "Rekursive Suche ist nur in der Baumansicht verfügbar", "collapseAllDisabled": "Im Listenmodus nicht verfügbar", "dragDrop": { - "unableToResolveRoot": "Zielpfad für das Verschieben konnte nicht ermittelt werden." + "unableToResolveRoot": "Zielpfad für das Verschieben konnte nicht ermittelt werden.", + "moveUnsupported": "Move is not supported for this item." } }, "statistics": { @@ -1463,7 +1496,8 @@ "bulkMoveFailures": "Fehlgeschlagene Verschiebungen:\n{failures}", "bulkMoveSuccess": "{successCount} {type}s erfolgreich verschoben", "exampleImagesDownloadSuccess": "Beispielbilder erfolgreich heruntergeladen!", - "exampleImagesDownloadFailed": "Fehler beim Herunterladen der Beispielbilder: {message}" + "exampleImagesDownloadFailed": "Fehler beim Herunterladen der Beispielbilder: {message}", + "moveFailed": "Failed to move item: {message}" } }, "banners": { diff --git a/locales/en.json b/locales/en.json index 852234ff..fbe2c07f 100644 --- a/locales/en.json +++ b/locales/en.json @@ -32,7 +32,7 @@ "korean": "한국어", "french": "Français", "spanish": "Español", - "Hebrew": "עברית" + "Hebrew": "עברית" }, "fileSize": { "zero": "0 Bytes", @@ -159,6 +159,12 @@ "success": "Updated license metadata for {count} {typePlural}", "none": "All {typePlural} already have license metadata", "error": "Failed to refresh license metadata for {typePlural}: {message}" + }, + "repairRecipes": { + "label": "Repair recipes data", + "loading": "Repairing recipe data...", + "success": "Successfully repaired {count} recipes.", + "error": "Recipe repair failed: {message}" } }, "header": { @@ -188,7 +194,8 @@ "creator": "Creator", "title": "Recipe Title", "loraName": "LoRA Filename", - "loraModel": "LoRA Model Name" + "loraModel": "LoRA Model Name", + "prompt": "Prompt" } }, "filter": { @@ -199,6 +206,7 @@ "license": "License", "noCreditRequired": "No Credit Required", "allowSellingGeneratedContent": "Allow Selling", + "noTags": "No tags", "clearAll": "Clear All Filters" }, "theme": { @@ -336,7 +344,7 @@ "templateOptions": { "flatStructure": "Flat Structure", "byBaseModel": "By Base Model", - "byAuthor": "By Author", + "byAuthor": "By Author", "byFirstTag": "By First Tag", "baseModelFirstTag": "Base Model + First Tag", "baseModelAuthor": "Base Model + Author", @@ -347,7 +355,7 @@ "customTemplatePlaceholder": "Enter custom template (e.g., {base_model}/{author}/{first_tag})", "modelTypes": { "lora": "LoRA", - "checkpoint": "Checkpoint", + "checkpoint": "Checkpoint", "embedding": "Embedding" }, "baseModelPathMappings": "Base Model Path Mappings", @@ -420,11 +428,11 @@ "proxyHost": "Proxy Host", "proxyHostPlaceholder": "proxy.example.com", "proxyHostHelp": "The hostname or IP address of your proxy server", - "proxyPort": "Proxy Port", + "proxyPort": "Proxy Port", "proxyPortPlaceholder": "8080", "proxyPortHelp": "The port number of your proxy server", "proxyUsername": "Username (Optional)", - "proxyUsernamePlaceholder": "username", + "proxyUsernamePlaceholder": "username", "proxyUsernameHelp": "Username for proxy authentication (if required)", "proxyPassword": "Password (Optional)", "proxyPasswordPlaceholder": "password", @@ -521,6 +529,7 @@ "replacePreview": "Replace Preview", "setContentRating": "Set Content Rating", "moveToFolder": "Move to Folder", + "repairMetadata": "Repair metadata", "excludeModel": "Exclude Model", "deleteModel": "Delete Model", "shareRecipe": "Share Recipe", @@ -591,10 +600,26 @@ "selectLoraRoot": "Please select a LoRA root directory" } }, + "sort": { + "title": "Sort recipes by...", + "name": "Name", + "nameAsc": "A - Z", + "nameDesc": "Z - A", + "date": "Date", + "dateDesc": "Newest", + "dateAsc": "Oldest", + "lorasCount": "LoRA Count", + "lorasCountDesc": "Most", + "lorasCountAsc": "Least" + }, "refresh": { "title": "Refresh recipe list" }, - "filteredByLora": "Filtered by LoRA" + "filteredByLora": "Filtered by LoRA", + "favorites": { + "title": "Show Favorites Only", + "action": "Favorites" + } }, "duplicates": { "found": "Found {count} duplicate groups", @@ -620,6 +645,13 @@ "noMissingLoras": "No missing LoRAs to download", "getInfoFailed": "Failed to get information for missing LoRAs", "prepareError": "Error preparing LoRAs for download: {message}" + }, + "repair": { + "starting": "Repairing recipe metadata...", + "success": "Recipe metadata repaired successfully", + "skipped": "Recipe already at latest version, no repair needed", + "failed": "Failed to repair recipe: {message}", + "missingId": "Cannot repair recipe: Missing recipe ID" } } }, @@ -641,7 +673,8 @@ "recursiveUnavailable": "Recursive search is available in tree view only", "collapseAllDisabled": "Not available in list view", "dragDrop": { - "unableToResolveRoot": "Unable to determine destination path for move." + "unableToResolveRoot": "Unable to determine destination path for move.", + "moveUnsupported": "Move is not supported for this item." } }, "statistics": { @@ -1463,7 +1496,8 @@ "bulkMoveFailures": "Failed moves:\n{failures}", "bulkMoveSuccess": "Successfully moved {successCount} {type}s", "exampleImagesDownloadSuccess": "Successfully downloaded example images!", - "exampleImagesDownloadFailed": "Failed to download example images: {message}" + "exampleImagesDownloadFailed": "Failed to download example images: {message}", + "moveFailed": "Failed to move item: {message}" } }, "banners": { @@ -1481,4 +1515,4 @@ "learnMore": "LM Civitai Extension Tutorial" } } -} +} \ No newline at end of file diff --git a/locales/es.json b/locales/es.json index ff700c05..09e13994 100644 --- a/locales/es.json +++ b/locales/es.json @@ -159,6 +159,12 @@ "success": "Updated license metadata for {count} {typePlural}", "none": "All {typePlural} already have license metadata", "error": "Failed to refresh license metadata for {typePlural}: {message}" + }, + "repairRecipes": { + "label": "Reparar datos de recetas", + "loading": "Reparando datos de recetas...", + "success": "Se repararon con éxito {count} recetas.", + "error": "Error al reparar recetas: {message}" } }, "header": { @@ -188,7 +194,8 @@ "creator": "Creador", "title": "Título de la receta", "loraName": "Nombre de archivo LoRA", - "loraModel": "Nombre del modelo LoRA" + "loraModel": "Nombre del modelo LoRA", + "prompt": "Prompt" } }, "filter": { @@ -199,6 +206,7 @@ "license": "Licencia", "noCreditRequired": "Sin crédito requerido", "allowSellingGeneratedContent": "Venta permitida", + "noTags": "Sin etiquetas", "clearAll": "Limpiar todos los filtros" }, "theme": { @@ -521,6 +529,7 @@ "replacePreview": "Reemplazar vista previa", "setContentRating": "Establecer clasificación de contenido", "moveToFolder": "Mover a carpeta", + "repairMetadata": "[TODO: Translate] Repair metadata", "excludeModel": "Excluir modelo", "deleteModel": "Eliminar modelo", "shareRecipe": "Compartir receta", @@ -591,10 +600,26 @@ "selectLoraRoot": "Por favor selecciona un directorio raíz de LoRA" } }, + "sort": { + "title": "Ordenar recetas por...", + "name": "Nombre", + "nameAsc": "A - Z", + "nameDesc": "Z - A", + "date": "Fecha", + "dateDesc": "Más reciente", + "dateAsc": "Más antiguo", + "lorasCount": "Cant. de LoRAs", + "lorasCountDesc": "Más", + "lorasCountAsc": "Menos" + }, "refresh": { "title": "Actualizar lista de recetas" }, - "filteredByLora": "Filtrado por LoRA" + "filteredByLora": "Filtrado por LoRA", + "favorites": { + "title": "Mostrar solo favoritos", + "action": "Favoritos" + } }, "duplicates": { "found": "Se encontraron {count} grupos de duplicados", @@ -620,6 +645,13 @@ "noMissingLoras": "No hay LoRAs faltantes para descargar", "getInfoFailed": "Error al obtener información de LoRAs faltantes", "prepareError": "Error preparando LoRAs para descarga: {message}" + }, + "repair": { + "starting": "[TODO: Translate] Repairing recipe metadata...", + "success": "[TODO: Translate] Recipe metadata repaired successfully", + "skipped": "[TODO: Translate] Recipe already at latest version, no repair needed", + "failed": "[TODO: Translate] Failed to repair recipe: {message}", + "missingId": "[TODO: Translate] Cannot repair recipe: Missing recipe ID" } } }, @@ -641,7 +673,8 @@ "recursiveUnavailable": "La búsqueda recursiva solo está disponible en la vista en árbol", "collapseAllDisabled": "No disponible en vista de lista", "dragDrop": { - "unableToResolveRoot": "No se puede determinar la ruta de destino para el movimiento." + "unableToResolveRoot": "No se puede determinar la ruta de destino para el movimiento.", + "moveUnsupported": "Move is not supported for this item." } }, "statistics": { @@ -1463,7 +1496,8 @@ "bulkMoveFailures": "Movimientos fallidos:\n{failures}", "bulkMoveSuccess": "Movidos exitosamente {successCount} {type}s", "exampleImagesDownloadSuccess": "¡Imágenes de ejemplo descargadas exitosamente!", - "exampleImagesDownloadFailed": "Error al descargar imágenes de ejemplo: {message}" + "exampleImagesDownloadFailed": "Error al descargar imágenes de ejemplo: {message}", + "moveFailed": "Failed to move item: {message}" } }, "banners": { diff --git a/locales/fr.json b/locales/fr.json index 60d23a2a..bf66274d 100644 --- a/locales/fr.json +++ b/locales/fr.json @@ -159,6 +159,12 @@ "success": "Updated license metadata for {count} {typePlural}", "none": "All {typePlural} already have license metadata", "error": "Failed to refresh license metadata for {typePlural}: {message}" + }, + "repairRecipes": { + "label": "Réparer les données de recettes", + "loading": "Réparation des données de recettes...", + "success": "{count} recettes réparées avec succès.", + "error": "Échec de la réparation des recettes : {message}" } }, "header": { @@ -188,7 +194,8 @@ "creator": "Créateur", "title": "Titre de la recipe", "loraName": "Nom de fichier LoRA", - "loraModel": "Nom du modèle LoRA" + "loraModel": "Nom du modèle LoRA", + "prompt": "Prompt" } }, "filter": { @@ -199,6 +206,7 @@ "license": "Licence", "noCreditRequired": "Crédit non requis", "allowSellingGeneratedContent": "Vente autorisée", + "noTags": "Aucun tag", "clearAll": "Effacer tous les filtres" }, "theme": { @@ -521,6 +529,7 @@ "replacePreview": "Remplacer l'aperçu", "setContentRating": "Définir la classification du contenu", "moveToFolder": "Déplacer vers un dossier", + "repairMetadata": "[TODO: Translate] Repair metadata", "excludeModel": "Exclure le modèle", "deleteModel": "Supprimer le modèle", "shareRecipe": "Partager la recipe", @@ -591,10 +600,26 @@ "selectLoraRoot": "Veuillez sélectionner un répertoire racine LoRA" } }, + "sort": { + "title": "Trier les recettes par...", + "name": "Nom", + "nameAsc": "A - Z", + "nameDesc": "Z - A", + "date": "Date", + "dateDesc": "Plus récent", + "dateAsc": "Plus ancien", + "lorasCount": "Nombre de LoRAs", + "lorasCountDesc": "Plus", + "lorasCountAsc": "Moins" + }, "refresh": { "title": "Actualiser la liste des recipes" }, - "filteredByLora": "Filtré par LoRA" + "filteredByLora": "Filtré par LoRA", + "favorites": { + "title": "Afficher uniquement les favoris", + "action": "Favoris" + } }, "duplicates": { "found": "Trouvé {count} groupes de doublons", @@ -620,6 +645,13 @@ "noMissingLoras": "Aucun LoRA manquant à télécharger", "getInfoFailed": "Échec de l'obtention des informations pour les LoRAs manquants", "prepareError": "Erreur lors de la préparation des LoRAs pour le téléchargement : {message}" + }, + "repair": { + "starting": "[TODO: Translate] Repairing recipe metadata...", + "success": "[TODO: Translate] Recipe metadata repaired successfully", + "skipped": "[TODO: Translate] Recipe already at latest version, no repair needed", + "failed": "[TODO: Translate] Failed to repair recipe: {message}", + "missingId": "[TODO: Translate] Cannot repair recipe: Missing recipe ID" } } }, @@ -641,7 +673,8 @@ "recursiveUnavailable": "La recherche récursive n'est disponible qu'en vue arborescente", "collapseAllDisabled": "Non disponible en vue liste", "dragDrop": { - "unableToResolveRoot": "Impossible de déterminer le chemin de destination pour le déplacement." + "unableToResolveRoot": "Impossible de déterminer le chemin de destination pour le déplacement.", + "moveUnsupported": "Move is not supported for this item." } }, "statistics": { @@ -1463,7 +1496,8 @@ "bulkMoveFailures": "Échecs de déplacement :\n{failures}", "bulkMoveSuccess": "{successCount} {type}s déplacés avec succès", "exampleImagesDownloadSuccess": "Images d'exemple téléchargées avec succès !", - "exampleImagesDownloadFailed": "Échec du téléchargement des images d'exemple : {message}" + "exampleImagesDownloadFailed": "Échec du téléchargement des images d'exemple : {message}", + "moveFailed": "Failed to move item: {message}" } }, "banners": { diff --git a/locales/he.json b/locales/he.json index ae2d9cfc..2ffd3953 100644 --- a/locales/he.json +++ b/locales/he.json @@ -159,6 +159,12 @@ "success": "Updated license metadata for {count} {typePlural}", "none": "All {typePlural} already have license metadata", "error": "Failed to refresh license metadata for {typePlural}: {message}" + }, + "repairRecipes": { + "label": "תיקון נתוני מתכונים", + "loading": "מתקן נתוני מתכונים...", + "success": "תוקנו בהצלחה {count} מתכונים.", + "error": "תיקון המתכונים נכשל: {message}" } }, "header": { @@ -188,7 +194,8 @@ "creator": "יוצר", "title": "כותרת מתכון", "loraName": "שם קובץ LoRA", - "loraModel": "שם מודל LoRA" + "loraModel": "שם מודל LoRA", + "prompt": "הנחיה" } }, "filter": { @@ -199,6 +206,7 @@ "license": "רישיון", "noCreditRequired": "ללא קרדיט נדרש", "allowSellingGeneratedContent": "אפשר מכירה", + "noTags": "ללא תגיות", "clearAll": "נקה את כל המסננים" }, "theme": { @@ -228,6 +236,7 @@ "videoSettings": "הגדרות וידאו", "layoutSettings": "הגדרות פריסה", "folderSettings": "הגדרות תיקייה", + "priorityTags": "תגיות עדיפות", "downloadPathTemplates": "תבניות נתיב הורדה", "exampleImages": "תמונות דוגמה", "updateFlags": "תגי עדכון", @@ -235,8 +244,7 @@ "misc": "שונות", "metadataArchive": "מסד נתונים של ארכיון מטא-דאטה", "storageLocation": "מיקום ההגדרות", - "proxySettings": "הגדרות פרוקסי", - "priorityTags": "תגיות עדיפות" + "proxySettings": "הגדרות פרוקסי" }, "storage": { "locationLabel": "מצב נייד", @@ -309,6 +317,26 @@ "defaultEmbeddingRootHelp": "הגדר את ספריית השורש המוגדרת כברירת מחדל של embedding להורדות, ייבוא והעברות", "noDefault": "אין ברירת מחדל" }, + "priorityTags": { + "title": "תגיות עדיפות", + "description": "התאם את סדר העדיפות של התגיות עבור כל סוג מודל (לדוגמה: character, concept, style(toon|toon_style))", + "placeholder": "character, concept, style(toon|toon_style)", + "helpLinkLabel": "פתח עזרה בנושא תגיות עדיפות", + "modelTypes": { + "lora": "LoRA", + "checkpoint": "Checkpoint", + "embedding": "Embedding" + }, + "saveSuccess": "תגיות העדיפות עודכנו.", + "saveError": "עדכון תגיות העדיפות נכשל.", + "loadingSuggestions": "טוען הצעות...", + "validation": { + "missingClosingParen": "לרשומה {index} חסר סוגר סוגריים.", + "missingCanonical": "על הרשומה {index} לכלול שם תגית קנונית.", + "duplicateCanonical": "התגית הקנונית \"{tag}\" מופיעה יותר מפעם אחת.", + "unknown": "תצורת תגיות העדיפות שגויה." + } + }, "downloadPathTemplates": { "title": "תבניות נתיב הורדה", "help": "הגדר מבני תיקיות לסוגי מודלים שונים בעת הורדה מ-Civitai.", @@ -320,8 +348,8 @@ "byFirstTag": "לפי תגית ראשונה", "baseModelFirstTag": "מודל בסיס + תגית ראשונה", "baseModelAuthor": "מודל בסיס + יוצר", - "baseModelAuthorFirstTag": "מודל בסיס + יוצר + תגית ראשונה", "authorFirstTag": "יוצר + תגית ראשונה", + "baseModelAuthorFirstTag": "מודל בסיס + יוצר + תגית ראשונה", "customTemplate": "תבנית מותאמת אישית" }, "customTemplatePlaceholder": "הזן תבנית מותאמת אישית (למשל, {base_model}/{author}/{first_tag})", @@ -409,26 +437,6 @@ "proxyPassword": "סיסמה (אופציונלי)", "proxyPasswordPlaceholder": "password", "proxyPasswordHelp": "סיסמה לאימות מול הפרוקסי (אם נדרש)" - }, - "priorityTags": { - "title": "תגיות עדיפות", - "description": "התאם את סדר העדיפות של התגיות עבור כל סוג מודל (לדוגמה: character, concept, style(toon|toon_style))", - "placeholder": "character, concept, style(toon|toon_style)", - "helpLinkLabel": "פתח עזרה בנושא תגיות עדיפות", - "modelTypes": { - "lora": "LoRA", - "checkpoint": "Checkpoint", - "embedding": "Embedding" - }, - "saveSuccess": "תגיות העדיפות עודכנו.", - "saveError": "עדכון תגיות העדיפות נכשל.", - "loadingSuggestions": "טוען הצעות...", - "validation": { - "missingClosingParen": "לרשומה {index} חסר סוגר סוגריים.", - "missingCanonical": "על הרשומה {index} לכלול שם תגית קנונית.", - "duplicateCanonical": "התגית הקנונית \"{tag}\" מופיעה יותר מפעם אחת.", - "unknown": "תצורת תגיות העדיפות שגויה." - } } }, "loras": { @@ -521,6 +529,7 @@ "replacePreview": "החלף תצוגה מקדימה", "setContentRating": "הגדר דירוג תוכן", "moveToFolder": "העבר לתיקייה", + "repairMetadata": "[TODO: Translate] Repair metadata", "excludeModel": "החרג מודל", "deleteModel": "מחק מודל", "shareRecipe": "שתף מתכון", @@ -591,10 +600,26 @@ "selectLoraRoot": "אנא בחר ספריית שורש של LoRA" } }, + "sort": { + "title": "מיון מתכונים לפי...", + "name": "שם", + "nameAsc": "א - ת", + "nameDesc": "ת - א", + "date": "תאריך", + "dateDesc": "הכי חדש", + "dateAsc": "הכי ישן", + "lorasCount": "מספר LoRAs", + "lorasCountDesc": "הכי הרבה", + "lorasCountAsc": "הכי פחות" + }, "refresh": { "title": "רענן רשימת מתכונים" }, - "filteredByLora": "מסונן לפי LoRA" + "filteredByLora": "מסונן לפי LoRA", + "favorites": { + "title": "הצג מועדפים בלבד", + "action": "מועדפים" + } }, "duplicates": { "found": "נמצאו {count} קבוצות כפולות", @@ -620,6 +645,13 @@ "noMissingLoras": "אין LoRAs חסרים להורדה", "getInfoFailed": "קבלת מידע עבור LoRAs חסרים נכשלה", "prepareError": "שגיאה בהכנת LoRAs להורדה: {message}" + }, + "repair": { + "starting": "[TODO: Translate] Repairing recipe metadata...", + "success": "[TODO: Translate] Recipe metadata repaired successfully", + "skipped": "[TODO: Translate] Recipe already at latest version, no repair needed", + "failed": "[TODO: Translate] Failed to repair recipe: {message}", + "missingId": "[TODO: Translate] Cannot repair recipe: Missing recipe ID" } } }, @@ -641,7 +673,8 @@ "recursiveUnavailable": "חיפוש רקורסיבי זמין רק בתצוגת עץ", "collapseAllDisabled": "לא זמין בתצוגת רשימה", "dragDrop": { - "unableToResolveRoot": "לא ניתן לקבוע את נתיב היעד להעברה." + "unableToResolveRoot": "לא ניתן לקבוע את נתיב היעד להעברה.", + "moveUnsupported": "Move is not supported for this item." } }, "statistics": { @@ -1463,7 +1496,8 @@ "bulkMoveFailures": "העברות שנכשלו:\n{failures}", "bulkMoveSuccess": "הועברו בהצלחה {successCount} {type}s", "exampleImagesDownloadSuccess": "תמונות הדוגמה הורדו בהצלחה!", - "exampleImagesDownloadFailed": "הורדת תמונות הדוגמה נכשלה: {message}" + "exampleImagesDownloadFailed": "הורדת תמונות הדוגמה נכשלה: {message}", + "moveFailed": "Failed to move item: {message}" } }, "banners": { diff --git a/locales/ja.json b/locales/ja.json index 89c53dcd..592fff2b 100644 --- a/locales/ja.json +++ b/locales/ja.json @@ -159,6 +159,12 @@ "success": "Updated license metadata for {count} {typePlural}", "none": "All {typePlural} already have license metadata", "error": "Failed to refresh license metadata for {typePlural}: {message}" + }, + "repairRecipes": { + "label": "レシピデータの修復", + "loading": "レシピデータを修復中...", + "success": "{count} 件のレシピを正常に修復しました。", + "error": "レシピの修復に失敗しました: {message}" } }, "header": { @@ -188,7 +194,8 @@ "creator": "作成者", "title": "レシピタイトル", "loraName": "LoRAファイル名", - "loraModel": "LoRAモデル名" + "loraModel": "LoRAモデル名", + "prompt": "プロンプト" } }, "filter": { @@ -199,6 +206,7 @@ "license": "ライセンス", "noCreditRequired": "クレジット不要", "allowSellingGeneratedContent": "販売許可", + "noTags": "タグなし", "clearAll": "すべてのフィルタをクリア" }, "theme": { @@ -521,6 +529,7 @@ "replacePreview": "プレビューを置換", "setContentRating": "コンテンツレーティングを設定", "moveToFolder": "フォルダに移動", + "repairMetadata": "[TODO: Translate] Repair metadata", "excludeModel": "モデルを除外", "deleteModel": "モデルを削除", "shareRecipe": "レシピを共有", @@ -591,10 +600,26 @@ "selectLoraRoot": "LoRAルートディレクトリを選択してください" } }, + "sort": { + "title": "レシピの並び替え...", + "name": "名前", + "nameAsc": "A - Z", + "nameDesc": "Z - A", + "date": "日付", + "dateDesc": "新しい順", + "dateAsc": "古い順", + "lorasCount": "LoRA数", + "lorasCountDesc": "多い順", + "lorasCountAsc": "少ない順" + }, "refresh": { "title": "レシピリストを更新" }, - "filteredByLora": "LoRAでフィルタ済み" + "filteredByLora": "LoRAでフィルタ済み", + "favorites": { + "title": "お気に入りのみ表示", + "action": "お気に入り" + } }, "duplicates": { "found": "{count} 個の重複グループが見つかりました", @@ -620,6 +645,13 @@ "noMissingLoras": "ダウンロードする不足LoRAがありません", "getInfoFailed": "不足LoRAの情報取得に失敗しました", "prepareError": "ダウンロード用LoRAの準備中にエラー:{message}" + }, + "repair": { + "starting": "[TODO: Translate] Repairing recipe metadata...", + "success": "[TODO: Translate] Recipe metadata repaired successfully", + "skipped": "[TODO: Translate] Recipe already at latest version, no repair needed", + "failed": "[TODO: Translate] Failed to repair recipe: {message}", + "missingId": "[TODO: Translate] Cannot repair recipe: Missing recipe ID" } } }, @@ -641,7 +673,8 @@ "recursiveUnavailable": "再帰検索はツリービューでのみ利用できます", "collapseAllDisabled": "リストビューでは利用できません", "dragDrop": { - "unableToResolveRoot": "移動先のパスを特定できません。" + "unableToResolveRoot": "移動先のパスを特定できません。", + "moveUnsupported": "Move is not supported for this item." } }, "statistics": { @@ -1463,7 +1496,8 @@ "bulkMoveFailures": "失敗した移動:\n{failures}", "bulkMoveSuccess": "{successCount} {type}が正常に移動されました", "exampleImagesDownloadSuccess": "例画像が正常にダウンロードされました!", - "exampleImagesDownloadFailed": "例画像のダウンロードに失敗しました:{message}" + "exampleImagesDownloadFailed": "例画像のダウンロードに失敗しました:{message}", + "moveFailed": "Failed to move item: {message}" } }, "banners": { diff --git a/locales/ko.json b/locales/ko.json index d3f56dd9..57d9d432 100644 --- a/locales/ko.json +++ b/locales/ko.json @@ -159,6 +159,12 @@ "success": "Updated license metadata for {count} {typePlural}", "none": "All {typePlural} already have license metadata", "error": "Failed to refresh license metadata for {typePlural}: {message}" + }, + "repairRecipes": { + "label": "레시피 데이터 복구", + "loading": "레시피 데이터 복구 중...", + "success": "{count}개의 레시피가 성공적으로 복구되었습니다.", + "error": "레시피 복구 실패: {message}" } }, "header": { @@ -188,7 +194,8 @@ "creator": "제작자", "title": "레시피 제목", "loraName": "LoRA 파일명", - "loraModel": "LoRA 모델명" + "loraModel": "LoRA 모델명", + "prompt": "프롬프트" } }, "filter": { @@ -199,6 +206,7 @@ "license": "라이선스", "noCreditRequired": "크레딧 표기 없음", "allowSellingGeneratedContent": "판매 허용", + "noTags": "태그 없음", "clearAll": "모든 필터 지우기" }, "theme": { @@ -521,6 +529,7 @@ "replacePreview": "미리보기 교체", "setContentRating": "콘텐츠 등급 설정", "moveToFolder": "폴더로 이동", + "repairMetadata": "[TODO: Translate] Repair metadata", "excludeModel": "모델 제외", "deleteModel": "모델 삭제", "shareRecipe": "레시피 공유", @@ -591,10 +600,26 @@ "selectLoraRoot": "LoRA 루트 디렉토리를 선택해주세요" } }, + "sort": { + "title": "레시피 정렬...", + "name": "이름", + "nameAsc": "A - Z", + "nameDesc": "Z - A", + "date": "날짜", + "dateDesc": "최신순", + "dateAsc": "오래된순", + "lorasCount": "LoRA 수", + "lorasCountDesc": "많은순", + "lorasCountAsc": "적은순" + }, "refresh": { "title": "레시피 목록 새로고침" }, - "filteredByLora": "LoRA로 필터링됨" + "filteredByLora": "LoRA로 필터링됨", + "favorites": { + "title": "즐겨찾기만 표시", + "action": "즐겨찾기" + } }, "duplicates": { "found": "{count}개의 중복 그룹 발견", @@ -620,6 +645,13 @@ "noMissingLoras": "다운로드할 누락된 LoRA가 없습니다", "getInfoFailed": "누락된 LoRA 정보를 가져오는데 실패했습니다", "prepareError": "LoRA 다운로드 준비 중 오류: {message}" + }, + "repair": { + "starting": "[TODO: Translate] Repairing recipe metadata...", + "success": "[TODO: Translate] Recipe metadata repaired successfully", + "skipped": "[TODO: Translate] Recipe already at latest version, no repair needed", + "failed": "[TODO: Translate] Failed to repair recipe: {message}", + "missingId": "[TODO: Translate] Cannot repair recipe: Missing recipe ID" } } }, @@ -641,7 +673,8 @@ "recursiveUnavailable": "재귀 검색은 트리 보기에서만 사용할 수 있습니다", "collapseAllDisabled": "목록 보기에서는 사용할 수 없습니다", "dragDrop": { - "unableToResolveRoot": "이동할 대상 경로를 확인할 수 없습니다." + "unableToResolveRoot": "이동할 대상 경로를 확인할 수 없습니다.", + "moveUnsupported": "Move is not supported for this item." } }, "statistics": { @@ -1463,7 +1496,8 @@ "bulkMoveFailures": "실패한 이동:\n{failures}", "bulkMoveSuccess": "{successCount}개 {type}이(가) 성공적으로 이동되었습니다", "exampleImagesDownloadSuccess": "예시 이미지가 성공적으로 다운로드되었습니다!", - "exampleImagesDownloadFailed": "예시 이미지 다운로드 실패: {message}" + "exampleImagesDownloadFailed": "예시 이미지 다운로드 실패: {message}", + "moveFailed": "Failed to move item: {message}" } }, "banners": { diff --git a/locales/ru.json b/locales/ru.json index 8c660982..bd8dda16 100644 --- a/locales/ru.json +++ b/locales/ru.json @@ -159,6 +159,12 @@ "success": "Updated license metadata for {count} {typePlural}", "none": "All {typePlural} already have license metadata", "error": "Failed to refresh license metadata for {typePlural}: {message}" + }, + "repairRecipes": { + "label": "Восстановить данные рецептов", + "loading": "Восстановление данных рецептов...", + "success": "Успешно восстановлено {count} рецептов.", + "error": "Ошибка восстановления рецептов: {message}" } }, "header": { @@ -188,7 +194,8 @@ "creator": "Автор", "title": "Название рецепта", "loraName": "Имя файла LoRA", - "loraModel": "Название модели LoRA" + "loraModel": "Название модели LoRA", + "prompt": "Запрос" } }, "filter": { @@ -199,6 +206,7 @@ "license": "Лицензия", "noCreditRequired": "Без указания авторства", "allowSellingGeneratedContent": "Продажа разрешена", + "noTags": "Без тегов", "clearAll": "Очистить все фильтры" }, "theme": { @@ -521,6 +529,7 @@ "replacePreview": "Заменить превью", "setContentRating": "Установить рейтинг контента", "moveToFolder": "Переместить в папку", + "repairMetadata": "[TODO: Translate] Repair metadata", "excludeModel": "Исключить модель", "deleteModel": "Удалить модель", "shareRecipe": "Поделиться рецептом", @@ -591,10 +600,26 @@ "selectLoraRoot": "Пожалуйста, выберите корневую папку LoRA" } }, + "sort": { + "title": "Сортировка рецептов...", + "name": "Имя", + "nameAsc": "А - Я", + "nameDesc": "Я - А", + "date": "Дата", + "dateDesc": "Сначала новые", + "dateAsc": "Сначала старые", + "lorasCount": "Кол-во LoRA", + "lorasCountDesc": "Больше всего", + "lorasCountAsc": "Меньше всего" + }, "refresh": { "title": "Обновить список рецептов" }, - "filteredByLora": "Фильтр по LoRA" + "filteredByLora": "Фильтр по LoRA", + "favorites": { + "title": "Только избранные", + "action": "Избранное" + } }, "duplicates": { "found": "Найдено {count} групп дубликатов", @@ -620,6 +645,13 @@ "noMissingLoras": "Нет отсутствующих LoRAs для загрузки", "getInfoFailed": "Не удалось получить информацию для отсутствующих LoRAs", "prepareError": "Ошибка подготовки LoRAs для загрузки: {message}" + }, + "repair": { + "starting": "[TODO: Translate] Repairing recipe metadata...", + "success": "[TODO: Translate] Recipe metadata repaired successfully", + "skipped": "[TODO: Translate] Recipe already at latest version, no repair needed", + "failed": "[TODO: Translate] Failed to repair recipe: {message}", + "missingId": "[TODO: Translate] Cannot repair recipe: Missing recipe ID" } } }, @@ -641,7 +673,8 @@ "recursiveUnavailable": "Рекурсивный поиск доступен только в режиме дерева", "collapseAllDisabled": "Недоступно в виде списка", "dragDrop": { - "unableToResolveRoot": "Не удалось определить путь назначения для перемещения." + "unableToResolveRoot": "Не удалось определить путь назначения для перемещения.", + "moveUnsupported": "Move is not supported for this item." } }, "statistics": { @@ -1463,7 +1496,8 @@ "bulkMoveFailures": "Неудачные перемещения:\n{failures}", "bulkMoveSuccess": "Успешно перемещено {successCount} {type}s", "exampleImagesDownloadSuccess": "Примеры изображений успешно загружены!", - "exampleImagesDownloadFailed": "Не удалось загрузить примеры изображений: {message}" + "exampleImagesDownloadFailed": "Не удалось загрузить примеры изображений: {message}", + "moveFailed": "Failed to move item: {message}" } }, "banners": { diff --git a/locales/zh-CN.json b/locales/zh-CN.json index 98660f01..2afe244a 100644 --- a/locales/zh-CN.json +++ b/locales/zh-CN.json @@ -159,6 +159,12 @@ "success": "Updated license metadata for {count} {typePlural}", "none": "All {typePlural} already have license metadata", "error": "Failed to refresh license metadata for {typePlural}: {message}" + }, + "repairRecipes": { + "label": "修复配方数据", + "loading": "正在修复配方数据...", + "success": "成功修复了 {count} 个配方。", + "error": "配方修复失败:{message}" } }, "header": { @@ -188,7 +194,8 @@ "creator": "创作者", "title": "配方标题", "loraName": "LoRA 文件名", - "loraModel": "LoRA 模型名称" + "loraModel": "LoRA 模型名称", + "prompt": "提示词" } }, "filter": { @@ -199,6 +206,7 @@ "license": "许可证", "noCreditRequired": "无需署名", "allowSellingGeneratedContent": "允许销售", + "noTags": "无标签", "clearAll": "清除所有筛选" }, "theme": { @@ -521,6 +529,7 @@ "replacePreview": "替换预览", "setContentRating": "设置内容评级", "moveToFolder": "移动到文件夹", + "repairMetadata": "[TODO: Translate] Repair metadata", "excludeModel": "排除模型", "deleteModel": "删除模型", "shareRecipe": "分享配方", @@ -591,10 +600,26 @@ "selectLoraRoot": "请选择 LoRA 根目录" } }, + "sort": { + "title": "配方排序...", + "name": "名称", + "nameAsc": "A - Z", + "nameDesc": "Z - A", + "date": "时间", + "dateDesc": "最新", + "dateAsc": "最早", + "lorasCount": "LoRA 数量", + "lorasCountDesc": "最多", + "lorasCountAsc": "最少" + }, "refresh": { "title": "刷新配方列表" }, - "filteredByLora": "按 LoRA 筛选" + "filteredByLora": "按 LoRA 筛选", + "favorites": { + "title": "仅显示收藏", + "action": "收藏" + } }, "duplicates": { "found": "发现 {count} 个重复组", @@ -620,6 +645,13 @@ "noMissingLoras": "没有缺失的 LoRA 可下载", "getInfoFailed": "获取缺失 LoRA 信息失败", "prepareError": "准备下载 LoRA 时出错:{message}" + }, + "repair": { + "starting": "[TODO: Translate] Repairing recipe metadata...", + "success": "[TODO: Translate] Recipe metadata repaired successfully", + "skipped": "[TODO: Translate] Recipe already at latest version, no repair needed", + "failed": "[TODO: Translate] Failed to repair recipe: {message}", + "missingId": "[TODO: Translate] Cannot repair recipe: Missing recipe ID" } } }, @@ -641,7 +673,8 @@ "recursiveUnavailable": "仅在树形视图中可使用递归搜索", "collapseAllDisabled": "列表视图下不可用", "dragDrop": { - "unableToResolveRoot": "无法确定移动的目标路径。" + "unableToResolveRoot": "无法确定移动的目标路径。", + "moveUnsupported": "Move is not supported for this item." } }, "statistics": { @@ -1463,7 +1496,8 @@ "bulkMoveFailures": "移动失败:\n{failures}", "bulkMoveSuccess": "成功移动 {successCount} 个 {type}", "exampleImagesDownloadSuccess": "示例图片下载成功!", - "exampleImagesDownloadFailed": "示例图片下载失败:{message}" + "exampleImagesDownloadFailed": "示例图片下载失败:{message}", + "moveFailed": "Failed to move item: {message}" } }, "banners": { diff --git a/locales/zh-TW.json b/locales/zh-TW.json index c4f4df0f..0675aa65 100644 --- a/locales/zh-TW.json +++ b/locales/zh-TW.json @@ -154,11 +154,17 @@ "error": "清理範例圖片資料夾失敗:{message}" }, "fetchMissingLicenses": { - "label": "Refresh license metadata", - "loading": "Refreshing license metadata for {typePlural}...", - "success": "Updated license metadata for {count} {typePlural}", - "none": "All {typePlural} already have license metadata", - "error": "Failed to refresh license metadata for {typePlural}: {message}" + "label": "重新整理授權中繼資料", + "loading": "正在重新整理 {typePlural} 的授權中繼資料...", + "success": "已更新 {count} 個 {typePlural} 的授權中繼資料", + "none": "所有 {typePlural} 已具備授權中繼資料", + "error": "重新整理 {typePlural} 授權中繼資料失敗:{message}" + }, + "repairRecipes": { + "label": "修復配方資料", + "loading": "正在修復配方資料...", + "success": "成功修復 {count} 個配方。", + "error": "配方修復失敗:{message}" } }, "header": { @@ -188,7 +194,8 @@ "creator": "創作者", "title": "配方標題", "loraName": "LoRA 檔案名稱", - "loraModel": "LoRA 模型名稱" + "loraModel": "LoRA 模型名稱", + "prompt": "提示詞" } }, "filter": { @@ -199,6 +206,7 @@ "license": "授權", "noCreditRequired": "無需署名", "allowSellingGeneratedContent": "允許銷售", + "noTags": "無標籤", "clearAll": "清除所有篩選" }, "theme": { @@ -521,6 +529,7 @@ "replacePreview": "更換預覽圖", "setContentRating": "設定內容分級", "moveToFolder": "移動到資料夾", + "repairMetadata": "[TODO: Translate] Repair metadata", "excludeModel": "排除模型", "deleteModel": "刪除模型", "shareRecipe": "分享配方", @@ -591,10 +600,26 @@ "selectLoraRoot": "請選擇 LoRA 根目錄" } }, + "sort": { + "title": "配方排序...", + "name": "名稱", + "nameAsc": "A - Z", + "nameDesc": "Z - A", + "date": "時間", + "dateDesc": "最新", + "dateAsc": "最舊", + "lorasCount": "LoRA 數量", + "lorasCountDesc": "最多", + "lorasCountAsc": "最少" + }, "refresh": { "title": "重新整理配方列表" }, - "filteredByLora": "已依 LoRA 篩選" + "filteredByLora": "已依 LoRA 篩選", + "favorites": { + "title": "僅顯示收藏", + "action": "收藏" + } }, "duplicates": { "found": "發現 {count} 組重複項", @@ -620,6 +645,13 @@ "noMissingLoras": "無缺少的 LoRA 可下載", "getInfoFailed": "取得缺少 LoRA 資訊失敗", "prepareError": "準備下載 LoRA 時發生錯誤:{message}" + }, + "repair": { + "starting": "[TODO: Translate] Repairing recipe metadata...", + "success": "[TODO: Translate] Recipe metadata repaired successfully", + "skipped": "[TODO: Translate] Recipe already at latest version, no repair needed", + "failed": "[TODO: Translate] Failed to repair recipe: {message}", + "missingId": "[TODO: Translate] Cannot repair recipe: Missing recipe ID" } } }, @@ -641,7 +673,8 @@ "recursiveUnavailable": "遞迴搜尋僅能在樹狀檢視中使用", "collapseAllDisabled": "列表檢視下不可用", "dragDrop": { - "unableToResolveRoot": "無法確定移動的目標路徑。" + "unableToResolveRoot": "無法確定移動的目標路徑。", + "moveUnsupported": "Move is not supported for this item." } }, "statistics": { @@ -1463,7 +1496,8 @@ "bulkMoveFailures": "移動失敗:\n{failures}", "bulkMoveSuccess": "已成功移動 {successCount} 個 {type}", "exampleImagesDownloadSuccess": "範例圖片下載成功!", - "exampleImagesDownloadFailed": "下載範例圖片失敗:{message}" + "exampleImagesDownloadFailed": "下載範例圖片失敗:{message}", + "moveFailed": "Failed to move item: {message}" } }, "banners": { diff --git a/py/config.py b/py/config.py index 6212c3b9..c34952d5 100644 --- a/py/config.py +++ b/py/config.py @@ -1,11 +1,13 @@ import os import platform +import threading from pathlib import Path import folder_paths # type: ignore -from typing import Any, Dict, Iterable, List, Mapping, Optional, Set +from typing import Any, Dict, Iterable, List, Mapping, Optional, Set, Tuple import logging import json import urllib.parse +import time from .utils.settings_paths import ensure_settings_file, get_settings_dir, load_settings_template @@ -80,6 +82,8 @@ class Config: self._path_mappings: Dict[str, str] = {} # Normalized preview root directories used to validate preview access self._preview_root_paths: Set[Path] = set() + # Optional background rescan thread + self._rescan_thread: Optional[threading.Thread] = None self.loras_roots = self._init_lora_paths() self.checkpoints_roots = None self.unet_roots = None @@ -282,58 +286,25 @@ class Config: def _load_symlink_cache(self) -> bool: cache_path = self._get_symlink_cache_path() if not cache_path.exists(): + logger.info("Symlink cache not found at %s", cache_path) return False try: with cache_path.open("r", encoding="utf-8") as handle: payload = json.load(handle) except Exception as exc: - logger.debug("Failed to load symlink cache %s: %s", cache_path, exc) + logger.info("Failed to load symlink cache %s: %s", cache_path, exc) return False if not isinstance(payload, dict): + logger.info("Symlink cache payload is not a dict: %s", type(payload)) return False - cached_fingerprint = payload.get("fingerprint") cached_mappings = payload.get("path_mappings") - if not isinstance(cached_fingerprint, dict) or not isinstance(cached_mappings, Mapping): + if not isinstance(cached_mappings, Mapping): + logger.info("Symlink cache missing path mappings") return False - current_fingerprint = self._build_symlink_fingerprint() - cached_roots = cached_fingerprint.get("roots") - cached_stats = cached_fingerprint.get("stats") - if ( - not isinstance(cached_roots, list) - or not isinstance(cached_stats, Mapping) - or sorted(cached_roots) != sorted(current_fingerprint["roots"]) # type: ignore[index] - ): - return False - - for root in current_fingerprint["roots"]: # type: ignore[assignment] - cached_stat = cached_stats.get(root) if isinstance(cached_stats, Mapping) else None - current_stat = current_fingerprint["stats"].get(root) # type: ignore[index] - if not isinstance(cached_stat, Mapping) or not current_stat: - return False - - cached_mtime = cached_stat.get("mtime_ns") - cached_inode = cached_stat.get("inode") - current_mtime = current_stat.get("mtime_ns") - current_inode = current_stat.get("inode") - - if cached_inode != current_inode: - return False - - if cached_mtime != current_mtime: - cached_noise = cached_stat.get("noise_mtime_ns") - current_noise = current_stat.get("noise_mtime_ns") - if not ( - cached_noise - and current_noise - and cached_mtime == cached_noise - and current_mtime == current_noise - ): - return False - normalized_mappings: Dict[str, str] = {} for target, link in cached_mappings.items(): if not isinstance(target, str) or not isinstance(link, str): @@ -341,6 +312,7 @@ class Config: normalized_mappings[self._normalize_path(target)] = self._normalize_path(link) self._path_mappings = normalized_mappings + logger.info("Symlink cache loaded with %d mappings", len(self._path_mappings)) return True def _save_symlink_cache(self) -> None: @@ -353,22 +325,75 @@ class Config: try: with cache_path.open("w", encoding="utf-8") as handle: json.dump(payload, handle, ensure_ascii=False, indent=2) + logger.info("Symlink cache saved to %s with %d mappings", cache_path, len(self._path_mappings)) except Exception as exc: - logger.debug("Failed to write symlink cache %s: %s", cache_path, exc) + logger.info("Failed to write symlink cache %s: %s", cache_path, exc) def _initialize_symlink_mappings(self) -> None: - if not self._load_symlink_cache(): - self._scan_symbolic_links() - self._save_symlink_cache() - else: - logger.info("Loaded symlink mappings from cache") + start = time.perf_counter() + cache_loaded = self._load_symlink_cache() + + if cache_loaded: + logger.info( + "Symlink mappings restored from cache in %.2f ms", + (time.perf_counter() - start) * 1000, + ) + self._rebuild_preview_roots() + self._schedule_symlink_rescan() + return + + self._scan_symbolic_links() + self._save_symlink_cache() self._rebuild_preview_roots() + logger.info( + "Symlink mappings rebuilt and cached in %.2f ms", + (time.perf_counter() - start) * 1000, + ) def _scan_symbolic_links(self): """Scan all symbolic links in LoRA, Checkpoint, and Embedding root directories""" + start = time.perf_counter() + # Reset mappings before rescanning to avoid stale entries + self._path_mappings.clear() + self._seed_root_symlink_mappings() visited_dirs: Set[str] = set() for root in self._symlink_roots(): self._scan_directory_links(root, visited_dirs) + logger.info( + "Symlink scan finished in %.2f ms with %d mappings", + (time.perf_counter() - start) * 1000, + len(self._path_mappings), + ) + + def _schedule_symlink_rescan(self) -> None: + """Trigger a best-effort background rescan to refresh stale caches.""" + + if self._rescan_thread and self._rescan_thread.is_alive(): + return + + def worker(): + try: + self._scan_symbolic_links() + self._save_symlink_cache() + self._rebuild_preview_roots() + logger.info("Background symlink rescan completed") + except Exception as exc: # pragma: no cover - defensive logging + logger.info("Background symlink rescan failed: %s", exc) + + thread = threading.Thread( + target=worker, + name="lora-manager-symlink-rescan", + daemon=True, + ) + self._rescan_thread = thread + thread.start() + + def _wait_for_rescan(self, timeout: Optional[float] = None) -> None: + """Block until the background rescan completes (testing convenience).""" + + thread = self._rescan_thread + if thread: + thread.join(timeout=timeout) def _scan_directory_links(self, root: str, visited_dirs: Set[str]): """Iteratively scan directory symlinks to avoid deep recursion.""" @@ -434,6 +459,22 @@ class Config: self._preview_root_paths.update(self._expand_preview_root(normalized_target)) self._preview_root_paths.update(self._expand_preview_root(normalized_link)) + def _seed_root_symlink_mappings(self) -> None: + """Ensure symlinked root folders are recorded before deep scanning.""" + + for root in self._symlink_roots(): + if not root: + continue + try: + if not self._is_link(root): + continue + target_path = os.path.realpath(root) + if not os.path.isdir(target_path): + continue + self.add_path_mapping(root, target_path) + except Exception as exc: + logger.debug("Skipping root symlink %s: %s", root, exc) + def _expand_preview_root(self, path: str) -> Set[Path]: """Return normalized ``Path`` objects representing a preview root.""" diff --git a/py/metadata_collector/metadata_processor.py b/py/metadata_collector/metadata_processor.py index 587bcf12..2d39f2ba 100644 --- a/py/metadata_collector/metadata_processor.py +++ b/py/metadata_collector/metadata_processor.py @@ -39,8 +39,39 @@ class MetadataProcessor: if node_id in metadata.get(SAMPLING, {}) and metadata[SAMPLING][node_id].get(IS_SAMPLER, False): candidate_samplers[node_id] = metadata[SAMPLING][node_id] - # If we found candidate samplers, apply primary sampler logic to these candidates only - if candidate_samplers: + # If we found candidate samplers, apply primary sampler logic to these candidates only + + # PRE-PROCESS: Ensure all candidate samplers have their parameters populated + # This is especially important for SamplerCustomAdvanced which needs tracing + prompt = metadata.get("current_prompt") + for node_id in candidate_samplers: + # If a sampler is missing common parameters like steps or denoise, + # try to populate them using tracing before ranking + sampler_info = candidate_samplers[node_id] + params = sampler_info.get("parameters", {}) + + if prompt and (params.get("steps") is None or params.get("denoise") is None): + # Create a temporary params dict to use the handler + temp_params = { + "steps": params.get("steps"), + "denoise": params.get("denoise"), + "sampler": params.get("sampler_name"), + "scheduler": params.get("scheduler") + } + + # Check if it's SamplerCustomAdvanced + if prompt.original_prompt and node_id in prompt.original_prompt: + if prompt.original_prompt[node_id].get("class_type") == "SamplerCustomAdvanced": + MetadataProcessor.handle_custom_advanced_sampler(metadata, prompt, node_id, temp_params) + + # Update the actual parameters with found values + params["steps"] = temp_params.get("steps") + params["denoise"] = temp_params.get("denoise") + if temp_params.get("sampler"): + params["sampler_name"] = temp_params.get("sampler") + if temp_params.get("scheduler"): + params["scheduler"] = temp_params.get("scheduler") + # Collect potential primary samplers based on different criteria custom_advanced_samplers = [] advanced_add_noise_samplers = [] @@ -49,7 +80,6 @@ class MetadataProcessor: high_denoise_id = None # First, check for SamplerCustomAdvanced among candidates - prompt = metadata.get("current_prompt") if prompt and prompt.original_prompt: for node_id in candidate_samplers: node_info = prompt.original_prompt.get(node_id, {}) @@ -77,15 +107,16 @@ class MetadataProcessor: # Combine all potential primary samplers potential_samplers = custom_advanced_samplers + advanced_add_noise_samplers + high_denoise_samplers - # Find the most recent potential primary sampler (closest to downstream node) - for i in range(downstream_index - 1, -1, -1): + # Find the first potential primary sampler (prefer base sampler over refine) + # Use forward search to prioritize the first one in execution order + for i in range(downstream_index): node_id = execution_order[i] if node_id in potential_samplers: return node_id, candidate_samplers[node_id] - # If no potential sampler found from our criteria, return the most recent sampler + # If no potential sampler found from our criteria, return the first sampler if candidate_samplers: - for i in range(downstream_index - 1, -1, -1): + for i in range(downstream_index): node_id = execution_order[i] if node_id in candidate_samplers: return node_id, candidate_samplers[node_id] @@ -176,8 +207,11 @@ class MetadataProcessor: found_node_id = input_value[0] # Connected node_id # If we're looking for a specific node class - if target_class and prompt.original_prompt[found_node_id].get("class_type") == target_class: - return found_node_id + if target_class: + if found_node_id not in prompt.original_prompt: + return None + if prompt.original_prompt[found_node_id].get("class_type") == target_class: + return found_node_id # If we're not looking for a specific class, update the last valid node if not target_class: @@ -185,11 +219,19 @@ class MetadataProcessor: # Continue tracing through intermediate nodes current_node_id = found_node_id - # For most conditioning nodes, the input we want to follow is named "conditioning" - if "conditioning" in prompt.original_prompt[current_node_id].get("inputs", {}): + + # Check if current source node exists + if current_node_id not in prompt.original_prompt: + return found_node_id if not target_class else None + + # Determine which input to follow next on the source node + source_node_inputs = prompt.original_prompt[current_node_id].get("inputs", {}) + if input_name in source_node_inputs: + current_input = input_name + elif "conditioning" in source_node_inputs: current_input = "conditioning" else: - # If there's no "conditioning" input, return the current node + # If there's no suitable input to follow, return the current node # if we're not looking for a specific target_class return found_node_id if not target_class else None else: @@ -202,12 +244,89 @@ class MetadataProcessor: return last_valid_node if not target_class else None @staticmethod - def find_primary_checkpoint(metadata): - """Find the primary checkpoint model in the workflow""" - if not metadata.get(MODELS): + def trace_model_path(metadata, prompt, start_node_id): + """ + Trace the model connection path upstream to find the checkpoint + """ + if not prompt or not prompt.original_prompt: return None - # In most workflows, there's only one checkpoint, so we can just take the first one + current_node_id = start_node_id + depth = 0 + max_depth = 50 + + while depth < max_depth: + # Check if current node is a registered checkpoint in our metadata + # This handles cached nodes correctly because metadata contains info for all nodes in the graph + if current_node_id in metadata.get(MODELS, {}): + if metadata[MODELS][current_node_id].get("type") == "checkpoint": + return current_node_id + + if current_node_id not in prompt.original_prompt: + return None + + node = prompt.original_prompt[current_node_id] + inputs = node.get("inputs", {}) + class_type = node.get("class_type", "") + + # Determine which input to follow next + next_input_name = "model" + + # Special handling for initial node + if depth == 0: + if class_type == "SamplerCustomAdvanced": + next_input_name = "guider" + + # If the specific input doesn't exist, try generic 'model' + if next_input_name not in inputs: + if "model" in inputs: + next_input_name = "model" + elif "basic_pipe" in inputs: + # Handle pipe nodes like FromBasicPipe by following the pipeline + next_input_name = "basic_pipe" + else: + # Dead end - no model input to follow + return None + + # Get connected node + input_val = inputs[next_input_name] + if isinstance(input_val, list) and len(input_val) > 0: + current_node_id = input_val[0] + else: + return None + + depth += 1 + + return None + + @staticmethod + def find_primary_checkpoint(metadata, downstream_id=None, primary_sampler_id=None): + """ + Find the primary checkpoint model in the workflow + + Parameters: + - metadata: The workflow metadata + - downstream_id: Optional ID of a downstream node to help identify the specific primary sampler + - primary_sampler_id: Optional ID of the primary sampler if already known + """ + if not metadata.get(MODELS): + return None + + # Method 1: Topology-based tracing (More accurate for complex workflows) + # First, find the primary sampler if not provided + if not primary_sampler_id: + primary_sampler_id, _ = MetadataProcessor.find_primary_sampler(metadata, downstream_id) + + if primary_sampler_id: + prompt = metadata.get("current_prompt") + if prompt: + # Trace back from the sampler to find the checkpoint + checkpoint_id = MetadataProcessor.trace_model_path(metadata, prompt, primary_sampler_id) + if checkpoint_id and checkpoint_id in metadata.get(MODELS, {}): + return metadata[MODELS][checkpoint_id].get("name") + + # Method 2: Fallback to the first available checkpoint (Original behavior) + # In most simple workflows, there's only one checkpoint, so we can just take the first one for node_id, model_info in metadata.get(MODELS, {}).items(): if model_info.get("type") == "checkpoint": return model_info.get("name") @@ -311,7 +430,8 @@ class MetadataProcessor: primary_sampler_id, primary_sampler = MetadataProcessor.find_primary_sampler(metadata, id) # Directly get checkpoint from metadata instead of tracing - checkpoint = MetadataProcessor.find_primary_checkpoint(metadata) + # Pass primary_sampler_id to avoid redundant calculation + checkpoint = MetadataProcessor.find_primary_checkpoint(metadata, id, primary_sampler_id) if checkpoint: params["checkpoint"] = checkpoint @@ -445,6 +565,7 @@ class MetadataProcessor: scheduler_params = metadata[SAMPLING][scheduler_node_id].get("parameters", {}) params["steps"] = scheduler_params.get("steps") params["scheduler"] = scheduler_params.get("scheduler") + params["denoise"] = scheduler_params.get("denoise") # 2. Trace sampler input to find KSamplerSelect (only if sampler input exists) if "sampler" in sampler_inputs: diff --git a/py/nodes/save_image.py b/py/nodes/save_image.py index dbf44d07..e11f031a 100644 --- a/py/nodes/save_image.py +++ b/py/nodes/save_image.py @@ -9,7 +9,7 @@ from ..metadata_collector import get_metadata from PIL import Image, PngImagePlugin import piexif -class SaveImage: +class SaveImageLM: NAME = "Save Image (LoraManager)" CATEGORY = "Lora Manager/utils" DESCRIPTION = "Save images with embedded generation metadata in compatible format" diff --git a/py/nodes/utils.py b/py/nodes/utils.py index 5ea94265..402025e1 100644 --- a/py/nodes/utils.py +++ b/py/nodes/utils.py @@ -103,7 +103,7 @@ def nunchaku_load_lora(model, lora_name, lora_strength): # Save the transformer temporarily model_wrapper.model = None - ret_model = copy.deepcopy(model) # copy everything except the model + ret_model = model.clone() ret_model_wrapper = ret_model.model.diffusion_model # Restore the model and set it for the copy diff --git a/py/recipes/base.py b/py/recipes/base.py index 43534348..fccd1214 100644 --- a/py/recipes/base.py +++ b/py/recipes/base.py @@ -37,7 +37,8 @@ class RecipeMetadataParser(ABC): """ pass - async def populate_lora_from_civitai(self, lora_entry: Dict[str, Any], civitai_info_tuple: Tuple[Dict[str, Any], Optional[str]], + @staticmethod + async def populate_lora_from_civitai(lora_entry: Dict[str, Any], civitai_info_tuple: Tuple[Dict[str, Any], Optional[str]], recipe_scanner=None, base_model_counts=None, hash_value=None) -> Optional[Dict[str, Any]]: """ Populate a lora entry with information from Civitai API response @@ -148,8 +149,9 @@ class RecipeMetadataParser(ABC): logger.error(f"Error populating lora from Civitai info: {e}") return lora_entry - - async def populate_checkpoint_from_civitai(self, checkpoint: Dict[str, Any], civitai_info: Dict[str, Any]) -> Dict[str, Any]: + + @staticmethod + async def populate_checkpoint_from_civitai(checkpoint: Dict[str, Any], civitai_info: Dict[str, Any]) -> Dict[str, Any]: """ Populate checkpoint information from Civitai API response @@ -187,6 +189,7 @@ class RecipeMetadataParser(ABC): checkpoint['downloadUrl'] = civitai_data.get('downloadUrl', '') checkpoint['modelId'] = civitai_data.get('modelId', checkpoint.get('modelId', 0)) + checkpoint['id'] = civitai_data.get('id', 0) if 'files' in civitai_data: model_file = next( diff --git a/py/recipes/enrichment.py b/py/recipes/enrichment.py new file mode 100644 index 00000000..34acdfcd --- /dev/null +++ b/py/recipes/enrichment.py @@ -0,0 +1,216 @@ +import logging +import json +import re +import os +from typing import Any, Dict, Optional +from .merger import GenParamsMerger +from .base import RecipeMetadataParser +from ..services.metadata_service import get_default_metadata_provider + +logger = logging.getLogger(__name__) + +class RecipeEnricher: + """Service to enrich recipe metadata from multiple sources (Civitai, Embedded, User).""" + + @staticmethod + async def enrich_recipe( + recipe: Dict[str, Any], + civitai_client: Any, + request_params: Optional[Dict[str, Any]] = None + ) -> bool: + """ + Enrich a recipe dictionary in-place with metadata from Civitai and embedded params. + + Args: + recipe: The recipe dictionary to enrich. Must have 'gen_params' initialized. + civitai_client: Authenticated Civitai client instance. + request_params: (Optional) Parameters from a user request (e.g. import). + + Returns: + bool: True if the recipe was modified, False otherwise. + """ + updated = False + gen_params = recipe.get("gen_params", {}) + + # 1. Fetch Civitai Info if available + civitai_meta = None + model_version_id = None + + source_url = recipe.get("source_url") or recipe.get("source_path", "") + + # Check if it's a Civitai image URL + image_id_match = re.search(r'civitai\.com/images/(\d+)', str(source_url)) + if image_id_match: + image_id = image_id_match.group(1) + try: + image_info = await civitai_client.get_image_info(image_id) + if image_info: + # Handle nested meta often found in Civitai API responses + raw_meta = image_info.get("meta") + if isinstance(raw_meta, dict): + if "meta" in raw_meta and isinstance(raw_meta["meta"], dict): + civitai_meta = raw_meta["meta"] + else: + civitai_meta = raw_meta + + model_version_id = image_info.get("modelVersionId") + + # If not at top level, check resources in meta + if not model_version_id and civitai_meta: + resources = civitai_meta.get("civitaiResources", []) + for res in resources: + if res.get("type") == "checkpoint": + model_version_id = res.get("modelVersionId") + break + except Exception as e: + logger.warning(f"Failed to fetch Civitai image info: {e}") + + # 2. Merge Parameters + # Priority: request_params > civitai_meta > embedded (existing gen_params) + new_gen_params = GenParamsMerger.merge( + request_params=request_params, + civitai_meta=civitai_meta, + embedded_metadata=gen_params + ) + + if new_gen_params != gen_params: + recipe["gen_params"] = new_gen_params + updated = True + + # 3. Checkpoint Enrichment + # If we have a checkpoint entry, or we can find one + # Use 'id' (from Civitai version) as a marker that it's been enriched + checkpoint_entry = recipe.get("checkpoint") + has_full_checkpoint = checkpoint_entry and checkpoint_entry.get("name") and checkpoint_entry.get("id") + + if not has_full_checkpoint: + # Helper to look up values in priority order + def start_lookup(keys): + for source in [request_params, civitai_meta, gen_params]: + if source: + if isinstance(keys, list): + for k in keys: + if k in source: return source[k] + else: + if keys in source: return source[keys] + return None + + target_version_id = model_version_id or start_lookup("modelVersionId") + + # Also check existing checkpoint entry + if not target_version_id and checkpoint_entry: + target_version_id = checkpoint_entry.get("modelVersionId") or checkpoint_entry.get("id") + + # Check for version ID in resources (which might be a string in gen_params) + if not target_version_id: + # Look in all sources for "Civitai resources" + resources_val = start_lookup(["Civitai resources", "civitai_resources", "resources"]) + if resources_val: + target_version_id = RecipeEnricher._extract_version_id_from_resources({"Civitai resources": resources_val}) + + target_hash = start_lookup(["Model hash", "checkpoint_hash", "hashes"]) + if not target_hash and checkpoint_entry: + target_hash = checkpoint_entry.get("hash") or checkpoint_entry.get("model_hash") + + # Look for 'Model' which sometimes is the hash or name + model_val = start_lookup("Model") + + # Look for Checkpoint name fallback + checkpoint_val = checkpoint_entry.get("name") if checkpoint_entry else None + if not checkpoint_val: + checkpoint_val = start_lookup(["Checkpoint", "checkpoint"]) + + checkpoint_updated = await RecipeEnricher._resolve_and_populate_checkpoint( + recipe, target_version_id, target_hash, model_val, checkpoint_val + ) + if checkpoint_updated: + updated = True + else: + # Checkpoint exists, no need to sync to gen_params anymore. + pass + # base_model resolution moved to _resolve_and_populate_checkpoint to support strict formatting + return updated + + @staticmethod + def _extract_version_id_from_resources(gen_params: Dict[str, Any]) -> Optional[Any]: + """Try to find modelVersionId in Civitai resources parameter.""" + civitai_resources_raw = gen_params.get("Civitai resources") + if not civitai_resources_raw: + return None + + resources_list = None + if isinstance(civitai_resources_raw, str): + try: + resources_list = json.loads(civitai_resources_raw) + except Exception: + pass + elif isinstance(civitai_resources_raw, list): + resources_list = civitai_resources_raw + + if isinstance(resources_list, list): + for res in resources_list: + if res.get("type") == "checkpoint": + return res.get("modelVersionId") + return None + + @staticmethod + async def _resolve_and_populate_checkpoint( + recipe: Dict[str, Any], + target_version_id: Optional[Any], + target_hash: Optional[str], + model_val: Optional[str], + checkpoint_val: Optional[str] + ) -> bool: + """Find checkpoint metadata and populate it in the recipe.""" + metadata_provider = await get_default_metadata_provider() + civitai_info = None + + if target_version_id: + civitai_info = await metadata_provider.get_model_version_info(str(target_version_id)) + elif target_hash: + civitai_info = await metadata_provider.get_model_by_hash(target_hash) + else: + # Look for 'Model' which sometimes is the hash or name + if model_val and len(model_val) == 10: # Likely a short hash + civitai_info = await metadata_provider.get_model_by_hash(model_val) + + if civitai_info and not (isinstance(civitai_info, tuple) and civitai_info[1] == "Model not found"): + # If we already have a partial checkpoint, use it as base + existing_cp = recipe.get("checkpoint") + if existing_cp is None: + existing_cp = {} + checkpoint_data = await RecipeMetadataParser.populate_checkpoint_from_civitai(existing_cp, civitai_info) + # 1. First, resolve base_model using full data before we format it away + current_base_model = recipe.get("base_model") + resolved_base_model = checkpoint_data.get("baseModel") + if resolved_base_model: + # Update if empty OR if it matches our generic prefix but is less specific + is_generic = not current_base_model or current_base_model.lower() in ["flux", "sdxl", "sd15"] + if is_generic and resolved_base_model != current_base_model: + recipe["base_model"] = resolved_base_model + + # 2. Format according to requirements: type, modelId, modelVersionId, modelName, modelVersionName + formatted_checkpoint = { + "type": "checkpoint", + "modelId": checkpoint_data.get("modelId"), + "modelVersionId": checkpoint_data.get("id") or checkpoint_data.get("modelVersionId"), + "modelName": checkpoint_data.get("name"), # In base.py, 'name' is populated from civitai_data['model']['name'] + "modelVersionName": checkpoint_data.get("version") # In base.py, 'version' is populated from civitai_data['name'] + } + # Remove None values + recipe["checkpoint"] = {k: v for k, v in formatted_checkpoint.items() if v is not None} + + return True + else: + # Fallback to name extraction if we don't already have one + existing_cp = recipe.get("checkpoint") + if not existing_cp or not existing_cp.get("modelName"): + cp_name = checkpoint_val + if cp_name: + recipe["checkpoint"] = { + "type": "checkpoint", + "modelName": cp_name + } + return True + + return False diff --git a/py/recipes/merger.py b/py/recipes/merger.py new file mode 100644 index 00000000..93d19857 --- /dev/null +++ b/py/recipes/merger.py @@ -0,0 +1,98 @@ +from typing import Any, Dict, Optional +import logging + +logger = logging.getLogger(__name__) + +class GenParamsMerger: + """Utility to merge generation parameters from multiple sources with priority.""" + + BLACKLISTED_KEYS = { + "id", "url", "userId", "username", "createdAt", "updatedAt", "hash", "meta", + "draft", "extra", "width", "height", "process", "quantity", "workflow", + "baseModel", "resources", "disablePoi", "aspectRatio", "Created Date", + "experimental", "civitaiResources", "civitai_resources", "Civitai resources", + "modelVersionId", "modelId", "hashes", "Model", "Model hash", "checkpoint_hash", + "checkpoint", "checksum", "model_checksum" + } + + NORMALIZATION_MAPPING = { + # Civitai specific + "cfgScale": "cfg_scale", + "clipSkip": "clip_skip", + "negativePrompt": "negative_prompt", + # Case variations + "Sampler": "sampler", + "Steps": "steps", + "Seed": "seed", + "Size": "size", + "Prompt": "prompt", + "Negative prompt": "negative_prompt", + "Cfg scale": "cfg_scale", + "Clip skip": "clip_skip", + "Denoising strength": "denoising_strength", + } + + @staticmethod + def merge( + request_params: Optional[Dict[str, Any]] = None, + civitai_meta: Optional[Dict[str, Any]] = None, + embedded_metadata: Optional[Dict[str, Any]] = None + ) -> Dict[str, Any]: + """ + Merge generation parameters from three sources. + + Priority: request_params > civitai_meta > embedded_metadata + + Args: + request_params: Params provided directly in the import request + civitai_meta: Params from Civitai Image API 'meta' field + embedded_metadata: Params extracted from image EXIF/embedded metadata + + Returns: + Merged parameters dictionary + """ + result = {} + + # 1. Start with embedded metadata (lowest priority) + if embedded_metadata: + # If it's a full recipe metadata, we use its gen_params + if "gen_params" in embedded_metadata and isinstance(embedded_metadata["gen_params"], dict): + GenParamsMerger._update_normalized(result, embedded_metadata["gen_params"]) + else: + # Otherwise assume the dict itself contains gen_params + GenParamsMerger._update_normalized(result, embedded_metadata) + + # 2. Layer Civitai meta (medium priority) + if civitai_meta: + GenParamsMerger._update_normalized(result, civitai_meta) + + # 3. Layer request params (highest priority) + if request_params: + GenParamsMerger._update_normalized(result, request_params) + + # Filter out blacklisted keys and also the original camelCase keys if they were normalized + final_result = {} + for k, v in result.items(): + if k in GenParamsMerger.BLACKLISTED_KEYS: + continue + if k in GenParamsMerger.NORMALIZATION_MAPPING: + continue + final_result[k] = v + + return final_result + + @staticmethod + def _update_normalized(target: Dict[str, Any], source: Dict[str, Any]) -> None: + """Update target dict with normalized keys from source.""" + for k, v in source.items(): + normalized_key = GenParamsMerger.NORMALIZATION_MAPPING.get(k, k) + target[normalized_key] = v + # Also keep the original key for now if it's not the same, + # so we can filter at the end or avoid losing it if it wasn't supposed to be renamed? + # Actually, if we rename it, we should probably NOT keep both in 'target' + # because we want to filter them out at the end anyway. + if normalized_key != k: + # If we are overwriting an existing snake_case key with a camelCase one's value, + # that's fine because of the priority order of calls to _update_normalized. + pass + target[k] = v diff --git a/py/recipes/parsers/comfy.py b/py/recipes/parsers/comfy.py index f81a15ad..e1d7251e 100644 --- a/py/recipes/parsers/comfy.py +++ b/py/recipes/parsers/comfy.py @@ -36,9 +36,6 @@ class ComfyMetadataParser(RecipeMetadataParser): # Find all LoraLoader nodes lora_nodes = {k: v for k, v in data.items() if isinstance(v, dict) and v.get('class_type') == 'LoraLoader'} - if not lora_nodes: - return {"error": "No LoRA information found in this ComfyUI workflow", "loras": []} - # Process each LoraLoader node for node_id, node in lora_nodes.items(): if 'inputs' not in node or 'lora_name' not in node['inputs']: diff --git a/py/routes/base_recipe_routes.py b/py/routes/base_recipe_routes.py index c598a6d2..162f3491 100644 --- a/py/routes/base_recipe_routes.py +++ b/py/routes/base_recipe_routes.py @@ -79,26 +79,8 @@ class BaseRecipeRoutes: return app.on_startup.append(self.attach_dependencies) - app.on_startup.append(self.prewarm_cache) self._startup_hooks_registered = True - async def prewarm_cache(self, app: web.Application | None = None) -> None: - """Pre-load recipe and LoRA caches on startup.""" - - try: - await self.attach_dependencies(app) - - if self.lora_scanner is not None: - await self.lora_scanner.get_cached_data() - hash_index = getattr(self.lora_scanner, "_hash_index", None) - if hash_index is not None and hasattr(hash_index, "_hash_to_path"): - _ = len(hash_index._hash_to_path) - - if self.recipe_scanner is not None: - await self.recipe_scanner.get_cached_data(force_refresh=True) - except Exception as exc: - logger.error("Error pre-warming recipe cache: %s", exc, exc_info=True) - def to_route_mapping(self) -> Mapping[str, Callable]: """Return a mapping of handler name to coroutine for registrar binding.""" diff --git a/py/routes/handlers/model_handlers.py b/py/routes/handlers/model_handlers.py index b07bac01..9d8100c8 100644 --- a/py/routes/handlers/model_handlers.py +++ b/py/routes/handlers/model_handlers.py @@ -5,6 +5,7 @@ import asyncio import json import logging import os +import time from dataclasses import dataclass from typing import Any, Awaitable, Callable, Dict, Iterable, List, Mapping, Optional @@ -61,6 +62,37 @@ class ModelPageView: self._settings = settings_service self._server_i18n = server_i18n self._logger = logger + self._app_version = self._get_app_version() + + def _get_app_version(self) -> str: + version = "1.0.0" + short_hash = "stable" + try: + import toml + current_file = os.path.abspath(__file__) + # Navigate up from py/routes/handlers/model_handlers.py to project root + root_dir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(current_file)))) + pyproject_path = os.path.join(root_dir, 'pyproject.toml') + + if os.path.exists(pyproject_path): + with open(pyproject_path, 'r', encoding='utf-8') as f: + data = toml.load(f) + version = data.get('project', {}).get('version', '1.0.0').replace('v', '') + + # Try to get git info for granular cache busting + git_dir = os.path.join(root_dir, '.git') + if os.path.exists(git_dir): + try: + import git + repo = git.Repo(root_dir) + short_hash = repo.head.commit.hexsha[:7] + except Exception: + # Fallback if git is not available or not a repo + pass + except Exception as e: + self._logger.debug(f"Failed to read version info for cache busting: {e}") + + return f"{version}-{short_hash}" async def handle(self, request: web.Request) -> web.Response: try: @@ -96,6 +128,7 @@ class ModelPageView: "request": request, "folders": [], "t": self._server_i18n.get_translation, + "version": self._app_version, } if not is_initializing: @@ -128,9 +161,12 @@ class ModelListingHandler: self._logger = logger async def get_models(self, request: web.Request) -> web.Response: + start_time = time.perf_counter() try: params = self._parse_common_params(request) result = await self._service.get_paginated_data(**params) + + format_start = time.perf_counter() formatted_result = { "items": [await self._service.format_response(item) for item in result["items"]], "total": result["total"], @@ -138,6 +174,13 @@ class ModelListingHandler: "page_size": result["page_size"], "total_pages": result["total_pages"], } + format_duration = time.perf_counter() - format_start + + duration = time.perf_counter() - start_time + self._logger.info( + "Request for %s/list took %.3fs (formatting: %.3fs)", + self._service.model_type, duration, format_duration + ) return web.json_response(formatted_result) except Exception as exc: self._logger.error("Error retrieving %ss: %s", self._service.model_type, exc, exc_info=True) diff --git a/py/routes/handlers/recipe_handlers.py b/py/routes/handlers/recipe_handlers.py index cee3ad0c..cb7e6f2c 100644 --- a/py/routes/handlers/recipe_handlers.py +++ b/py/routes/handlers/recipe_handlers.py @@ -5,6 +5,7 @@ import json import logging import os import re +import asyncio import tempfile from dataclasses import dataclass from typing import Any, Awaitable, Callable, Dict, List, Mapping, Optional @@ -23,6 +24,11 @@ from ...services.recipes import ( RecipeValidationError, ) from ...services.metadata_service import get_default_metadata_provider +from ...utils.civitai_utils import rewrite_preview_url +from ...utils.exif_utils import ExifUtils +from ...recipes.merger import GenParamsMerger +from ...recipes.enrichment import RecipeEnricher +from ...services.websocket_manager import ws_manager as default_ws_manager Logger = logging.Logger EnsureDependenciesCallable = Callable[[], Awaitable[None]] @@ -55,16 +61,25 @@ class RecipeHandlerSet: "delete_recipe": self.management.delete_recipe, "get_top_tags": self.query.get_top_tags, "get_base_models": self.query.get_base_models, + "get_roots": self.query.get_roots, + "get_folders": self.query.get_folders, + "get_folder_tree": self.query.get_folder_tree, + "get_unified_folder_tree": self.query.get_unified_folder_tree, "share_recipe": self.sharing.share_recipe, "download_shared_recipe": self.sharing.download_shared_recipe, "get_recipe_syntax": self.query.get_recipe_syntax, "update_recipe": self.management.update_recipe, "reconnect_lora": self.management.reconnect_lora, "find_duplicates": self.query.find_duplicates, + "move_recipes_bulk": self.management.move_recipes_bulk, "bulk_delete": self.management.bulk_delete, "save_recipe_from_widget": self.management.save_recipe_from_widget, "get_recipes_for_lora": self.query.get_recipes_for_lora, "scan_recipes": self.query.scan_recipes, + "move_recipe": self.management.move_recipe, + "repair_recipes": self.management.repair_recipes, + "repair_recipe": self.management.repair_recipe, + "get_repair_progress": self.management.get_repair_progress, } @@ -148,12 +163,15 @@ class RecipeListingHandler: page_size = int(request.query.get("page_size", "20")) sort_by = request.query.get("sort_by", "date") search = request.query.get("search") + folder = request.query.get("folder") + recursive = request.query.get("recursive", "true").lower() == "true" search_options = { "title": request.query.get("search_title", "true").lower() == "true", "tags": request.query.get("search_tags", "true").lower() == "true", "lora_name": request.query.get("search_lora_name", "true").lower() == "true", "lora_model": request.query.get("search_lora_model", "true").lower() == "true", + "prompt": request.query.get("search_prompt", "true").lower() == "true", } filters: Dict[str, Any] = {} @@ -161,6 +179,9 @@ class RecipeListingHandler: if base_models: filters["base_model"] = base_models.split(",") + if request.query.get("favorite", "false").lower() == "true": + filters["favorite"] = True + tag_filters: Dict[str, str] = {} legacy_tags = request.query.get("tags") if legacy_tags: @@ -192,6 +213,8 @@ class RecipeListingHandler: filters=filters, search_options=search_options, lora_hash=lora_hash, + folder=folder, + recursive=recursive, ) for item in result.get("items", []): @@ -298,6 +321,58 @@ class RecipeQueryHandler: self._logger.error("Error retrieving base models: %s", exc, exc_info=True) return web.json_response({"success": False, "error": str(exc)}, status=500) + async def get_roots(self, request: web.Request) -> web.Response: + try: + await self._ensure_dependencies_ready() + recipe_scanner = self._recipe_scanner_getter() + if recipe_scanner is None: + raise RuntimeError("Recipe scanner unavailable") + + roots = [recipe_scanner.recipes_dir] if recipe_scanner.recipes_dir else [] + return web.json_response({"success": True, "roots": roots}) + except Exception as exc: + self._logger.error("Error retrieving recipe roots: %s", exc, exc_info=True) + return web.json_response({"success": False, "error": str(exc)}, status=500) + + async def get_folders(self, request: web.Request) -> web.Response: + try: + await self._ensure_dependencies_ready() + recipe_scanner = self._recipe_scanner_getter() + if recipe_scanner is None: + raise RuntimeError("Recipe scanner unavailable") + + folders = await recipe_scanner.get_folders() + return web.json_response({"success": True, "folders": folders}) + except Exception as exc: + self._logger.error("Error retrieving recipe folders: %s", exc, exc_info=True) + return web.json_response({"success": False, "error": str(exc)}, status=500) + + async def get_folder_tree(self, request: web.Request) -> web.Response: + try: + await self._ensure_dependencies_ready() + recipe_scanner = self._recipe_scanner_getter() + if recipe_scanner is None: + raise RuntimeError("Recipe scanner unavailable") + + folder_tree = await recipe_scanner.get_folder_tree() + return web.json_response({"success": True, "tree": folder_tree}) + except Exception as exc: + self._logger.error("Error retrieving recipe folder tree: %s", exc, exc_info=True) + return web.json_response({"success": False, "error": str(exc)}, status=500) + + async def get_unified_folder_tree(self, request: web.Request) -> web.Response: + try: + await self._ensure_dependencies_ready() + recipe_scanner = self._recipe_scanner_getter() + if recipe_scanner is None: + raise RuntimeError("Recipe scanner unavailable") + + folder_tree = await recipe_scanner.get_folder_tree() + return web.json_response({"success": True, "tree": folder_tree}) + except Exception as exc: + self._logger.error("Error retrieving unified recipe folder tree: %s", exc, exc_info=True) + return web.json_response({"success": False, "error": str(exc)}, status=500) + async def get_recipes_for_lora(self, request: web.Request) -> web.Response: try: await self._ensure_dependencies_ready() @@ -410,6 +485,7 @@ class RecipeManagementHandler: analysis_service: RecipeAnalysisService, downloader_factory, civitai_client_getter: CivitaiClientGetter, + ws_manager=default_ws_manager, ) -> None: self._ensure_dependencies_ready = ensure_dependencies_ready self._recipe_scanner_getter = recipe_scanner_getter @@ -418,6 +494,7 @@ class RecipeManagementHandler: self._analysis_service = analysis_service self._downloader_factory = downloader_factory self._civitai_client_getter = civitai_client_getter + self._ws_manager = ws_manager async def save_recipe(self, request: web.Request) -> web.Response: try: @@ -436,6 +513,7 @@ class RecipeManagementHandler: name=payload["name"], tags=payload["tags"], metadata=payload["metadata"], + extension=payload.get("extension"), ) return web.json_response(result.payload, status=result.status) except RecipeValidationError as exc: @@ -444,17 +522,84 @@ class RecipeManagementHandler: self._logger.error("Error saving recipe: %s", exc, exc_info=True) return web.json_response({"error": str(exc)}, status=500) + async def repair_recipes(self, request: web.Request) -> web.Response: + try: + await self._ensure_dependencies_ready() + recipe_scanner = self._recipe_scanner_getter() + if recipe_scanner is None: + return web.json_response({"success": False, "error": "Recipe scanner unavailable"}, status=503) + + # Check if already running + if self._ws_manager.get_recipe_repair_progress(): + return web.json_response({"success": False, "error": "Recipe repair already in progress"}, status=409) + + async def progress_callback(data): + await self._ws_manager.broadcast_recipe_repair_progress(data) + + # Run in background to avoid timeout + async def run_repair(): + try: + await recipe_scanner.repair_all_recipes( + progress_callback=progress_callback + ) + except Exception as e: + self._logger.error(f"Error in recipe repair task: {e}", exc_info=True) + await self._ws_manager.broadcast_recipe_repair_progress({ + "status": "error", + "error": str(e) + }) + finally: + # Keep the final status for a while so the UI can see it + await asyncio.sleep(5) + self._ws_manager.cleanup_recipe_repair_progress() + + asyncio.create_task(run_repair()) + + return web.json_response({"success": True, "message": "Recipe repair started"}) + except Exception as exc: + self._logger.error("Error starting recipe repair: %s", exc, exc_info=True) + return web.json_response({"success": False, "error": str(exc)}, status=500) + + async def repair_recipe(self, request: web.Request) -> web.Response: + try: + await self._ensure_dependencies_ready() + recipe_scanner = self._recipe_scanner_getter() + if recipe_scanner is None: + return web.json_response({"success": False, "error": "Recipe scanner unavailable"}, status=503) + + recipe_id = request.match_info["recipe_id"] + result = await recipe_scanner.repair_recipe_by_id(recipe_id) + return web.json_response(result) + except RecipeNotFoundError as exc: + return web.json_response({"success": False, "error": str(exc)}, status=404) + except Exception as exc: + self._logger.error("Error repairing single recipe: %s", exc, exc_info=True) + return web.json_response({"success": False, "error": str(exc)}, status=500) + + async def get_repair_progress(self, request: web.Request) -> web.Response: + try: + progress = self._ws_manager.get_recipe_repair_progress() + if progress: + return web.json_response({"success": True, "progress": progress}) + return web.json_response({"success": False, "message": "No repair in progress"}, status=404) + except Exception as exc: + self._logger.error("Error getting repair progress: %s", exc, exc_info=True) + return web.json_response({"success": False, "error": str(exc)}, status=500) + + async def import_remote_recipe(self, request: web.Request) -> web.Response: try: await self._ensure_dependencies_ready() recipe_scanner = self._recipe_scanner_getter() if recipe_scanner is None: raise RuntimeError("Recipe scanner unavailable") - + + # 1. Parse Parameters params = request.rel_url.query image_url = params.get("image_url") name = params.get("name") resources_raw = params.get("resources") + if not image_url: raise RecipeValidationError("Missing required field: image_url") if not name: @@ -463,27 +608,93 @@ class RecipeManagementHandler: raise RecipeValidationError("Missing required field: resources") checkpoint_entry, lora_entries = self._parse_resources_payload(resources_raw) - gen_params = self._parse_gen_params(params.get("gen_params")) + gen_params_request = self._parse_gen_params(params.get("gen_params")) + + # 2. Initial Metadata Construction metadata: Dict[str, Any] = { "base_model": params.get("base_model", "") or "", "loras": lora_entries, + "gen_params": gen_params_request or {}, + "source_url": image_url } + source_path = params.get("source_path") if source_path: metadata["source_path"] = source_path - if gen_params is not None: - metadata["gen_params"] = gen_params + + # Checkpoint handling if checkpoint_entry: metadata["checkpoint"] = checkpoint_entry - gen_params_ref = metadata.setdefault("gen_params", {}) - if "checkpoint" not in gen_params_ref: - gen_params_ref["checkpoint"] = checkpoint_entry - base_model_from_metadata = await self._resolve_base_model_from_checkpoint(checkpoint_entry) - if base_model_from_metadata: - metadata["base_model"] = base_model_from_metadata + # Ensure checkpoint is also in gen_params for consistency if needed by enricher? + # Actually enricher looks at metadata['checkpoint'], so this is fine. + + # Try to resolve base model from checkpoint if not explicitly provided + if not metadata["base_model"]: + base_model_from_metadata = await self._resolve_base_model_from_checkpoint(checkpoint_entry) + if base_model_from_metadata: + metadata["base_model"] = base_model_from_metadata tags = self._parse_tags(params.get("tags")) - image_bytes = await self._download_image_bytes(image_url) + + # 3. Download Image + image_bytes, extension, civitai_meta_from_download = await self._download_remote_media(image_url) + + # 4. Extract Embedded Metadata + # Note: We still extract this here because Enricher currently expects 'gen_params' to already be populated + # with embedded data if we want it to merge it. + # However, logic in Enricher merges: request > civitai > embedded. + # So we should gather embedded params and put them into the recipe's gen_params (as initial state) + # OR pass them to enricher to handle? + # The interface of Enricher.enrich_recipe takes `recipe` (with gen_params) and `request_params`. + # So let's extract embedded and put it into recipe['gen_params'] but careful not to overwrite request params. + # Actually, `GenParamsMerger` which `Enricher` uses handles 3 layers. + # But `Enricher` interface is: recipe['gen_params'] (as embedded) + request_params + civitai (fetched internally). + # Wait, `Enricher` fetches Civitai info internally based on URL. + # `civitai_meta_from_download` is returned by `_download_remote_media` which might be useful if URL didn't have ID. + + # Let's extract embedded metadata first + embedded_gen_params = {} + try: + with tempfile.NamedTemporaryFile(suffix=extension, delete=False) as temp_img: + temp_img.write(image_bytes) + temp_img_path = temp_img.name + + try: + raw_embedded = ExifUtils.extract_image_metadata(temp_img_path) + if raw_embedded: + parser = self._analysis_service._recipe_parser_factory.create_parser(raw_embedded) + if parser: + parsed_embedded = await parser.parse_metadata(raw_embedded, recipe_scanner=recipe_scanner) + if parsed_embedded and "gen_params" in parsed_embedded: + embedded_gen_params = parsed_embedded["gen_params"] + else: + embedded_gen_params = {"raw_metadata": raw_embedded} + finally: + if os.path.exists(temp_img_path): + os.unlink(temp_img_path) + except Exception as exc: + self._logger.warning("Failed to extract embedded metadata during import: %s", exc) + + # Pre-populate gen_params with embedded data so Enricher treats it as the "base" layer + if embedded_gen_params: + # Merge embedded into existing gen_params (which currently only has request params if any) + # But wait, we want request params to override everything. + # So we should set recipe['gen_params'] = embedded, and pass request params to enricher. + metadata["gen_params"] = embedded_gen_params + + # 5. Enrich with unified logic + # This will fetch Civitai info (if URL matches) and merge: request > civitai > embedded + civitai_client = self._civitai_client_getter() + await RecipeEnricher.enrich_recipe( + recipe=metadata, + civitai_client=civitai_client, + request_params=gen_params_request # Pass explicit request params here to override + ) + + # If we got civitai_meta from download but Enricher didn't fetch it (e.g. not a civitai URL or failed), + # we might want to manually merge it? + # But usually `import_remote_recipe` is used with Civitai URLs. + # For now, relying on Enricher's internal fetch is consistent with repair. result = await self._persistence_service.save_recipe( recipe_scanner=recipe_scanner, @@ -492,6 +703,7 @@ class RecipeManagementHandler: name=name, tags=tags, metadata=metadata, + extension=extension, ) return web.json_response(result.payload, status=result.status) except RecipeValidationError as exc: @@ -541,6 +753,64 @@ class RecipeManagementHandler: self._logger.error("Error updating recipe: %s", exc, exc_info=True) return web.json_response({"error": str(exc)}, status=500) + async def move_recipe(self, request: web.Request) -> web.Response: + try: + await self._ensure_dependencies_ready() + recipe_scanner = self._recipe_scanner_getter() + if recipe_scanner is None: + raise RuntimeError("Recipe scanner unavailable") + + data = await request.json() + recipe_id = data.get("recipe_id") + target_path = data.get("target_path") + if not recipe_id or not target_path: + return web.json_response( + {"success": False, "error": "recipe_id and target_path are required"}, status=400 + ) + + result = await self._persistence_service.move_recipe( + recipe_scanner=recipe_scanner, + recipe_id=str(recipe_id), + target_path=str(target_path), + ) + return web.json_response(result.payload, status=result.status) + except RecipeValidationError as exc: + return web.json_response({"success": False, "error": str(exc)}, status=400) + except RecipeNotFoundError as exc: + return web.json_response({"success": False, "error": str(exc)}, status=404) + except Exception as exc: + self._logger.error("Error moving recipe: %s", exc, exc_info=True) + return web.json_response({"success": False, "error": str(exc)}, status=500) + + async def move_recipes_bulk(self, request: web.Request) -> web.Response: + try: + await self._ensure_dependencies_ready() + recipe_scanner = self._recipe_scanner_getter() + if recipe_scanner is None: + raise RuntimeError("Recipe scanner unavailable") + + data = await request.json() + recipe_ids = data.get("recipe_ids") or [] + target_path = data.get("target_path") + if not recipe_ids or not target_path: + return web.json_response( + {"success": False, "error": "recipe_ids and target_path are required"}, status=400 + ) + + result = await self._persistence_service.move_recipes_bulk( + recipe_scanner=recipe_scanner, + recipe_ids=recipe_ids, + target_path=str(target_path), + ) + return web.json_response(result.payload, status=result.status) + except RecipeValidationError as exc: + return web.json_response({"success": False, "error": str(exc)}, status=400) + except RecipeNotFoundError as exc: + return web.json_response({"success": False, "error": str(exc)}, status=404) + except Exception as exc: + self._logger.error("Error moving recipes in bulk: %s", exc, exc_info=True) + return web.json_response({"success": False, "error": str(exc)}, status=500) + async def reconnect_lora(self, request: web.Request) -> web.Response: try: await self._ensure_dependencies_ready() @@ -622,6 +892,7 @@ class RecipeManagementHandler: name: Optional[str] = None tags: list[str] = [] metadata: Optional[Dict[str, Any]] = None + extension: Optional[str] = None while True: field = await reader.next() @@ -652,6 +923,8 @@ class RecipeManagementHandler: metadata = json.loads(metadata_text) except Exception: metadata = {} + elif field.name == "extension": + extension = await field.text() return { "image_bytes": image_bytes, @@ -659,6 +932,7 @@ class RecipeManagementHandler: "name": name, "tags": tags, "metadata": metadata, + "extension": extension, } def _parse_tags(self, tag_text: Optional[str]) -> list[str]: @@ -729,7 +1003,7 @@ class RecipeManagementHandler: "exclude": False, } - async def _download_image_bytes(self, image_url: str) -> bytes: + async def _download_remote_media(self, image_url: str) -> tuple[bytes, str]: civitai_client = self._civitai_client_getter() downloader = await self._downloader_factory() temp_path = None @@ -744,15 +1018,31 @@ class RecipeManagementHandler: image_info = await civitai_client.get_image_info(civitai_match.group(1)) if not image_info: raise RecipeDownloadError("Failed to fetch image information from Civitai") - download_url = image_info.get("url") - if not download_url: + + media_url = image_info.get("url") + if not media_url: raise RecipeDownloadError("No image URL found in Civitai response") + + # Use optimized preview URLs if possible + media_type = image_info.get("type") + rewritten_url, _ = rewrite_preview_url(media_url, media_type=media_type) + if rewritten_url: + download_url = rewritten_url + else: + download_url = media_url success, result = await downloader.download_file(download_url, temp_path, use_auth=False) if not success: raise RecipeDownloadError(f"Failed to download image: {result}") + + # Extract extension from URL + url_path = download_url.split('?')[0].split('#')[0] + extension = os.path.splitext(url_path)[1].lower() + if not extension: + extension = ".webp" # Default to webp if unknown + with open(temp_path, "rb") as file_obj: - return file_obj.read() + return file_obj.read(), extension, image_info.get("meta") if civitai_match and image_info else None except RecipeDownloadError: raise except RecipeValidationError: @@ -766,6 +1056,7 @@ class RecipeManagementHandler: except FileNotFoundError: pass + def _safe_int(self, value: Any) -> int: try: return int(value) diff --git a/py/routes/recipe_route_registrar.py b/py/routes/recipe_route_registrar.py index 18bf4cba..b07327b4 100644 --- a/py/routes/recipe_route_registrar.py +++ b/py/routes/recipe_route_registrar.py @@ -27,16 +27,25 @@ ROUTE_DEFINITIONS: tuple[RouteDefinition, ...] = ( RouteDefinition("DELETE", "/api/lm/recipe/{recipe_id}", "delete_recipe"), RouteDefinition("GET", "/api/lm/recipes/top-tags", "get_top_tags"), RouteDefinition("GET", "/api/lm/recipes/base-models", "get_base_models"), + RouteDefinition("GET", "/api/lm/recipes/roots", "get_roots"), + RouteDefinition("GET", "/api/lm/recipes/folders", "get_folders"), + RouteDefinition("GET", "/api/lm/recipes/folder-tree", "get_folder_tree"), + RouteDefinition("GET", "/api/lm/recipes/unified-folder-tree", "get_unified_folder_tree"), RouteDefinition("GET", "/api/lm/recipe/{recipe_id}/share", "share_recipe"), RouteDefinition("GET", "/api/lm/recipe/{recipe_id}/share/download", "download_shared_recipe"), - RouteDefinition("GET", "/api/lm/recipe/{recipe_id}/syntax", "get_recipe_syntax"), + RouteDefinition("GET", "/api/lm/recipes/syntax", "get_recipe_syntax"), RouteDefinition("PUT", "/api/lm/recipe/{recipe_id}/update", "update_recipe"), + RouteDefinition("POST", "/api/lm/recipe/move", "move_recipe"), + RouteDefinition("POST", "/api/lm/recipes/move-bulk", "move_recipes_bulk"), RouteDefinition("POST", "/api/lm/recipe/lora/reconnect", "reconnect_lora"), RouteDefinition("GET", "/api/lm/recipes/find-duplicates", "find_duplicates"), RouteDefinition("POST", "/api/lm/recipes/bulk-delete", "bulk_delete"), RouteDefinition("POST", "/api/lm/recipes/save-from-widget", "save_recipe_from_widget"), RouteDefinition("GET", "/api/lm/recipes/for-lora", "get_recipes_for_lora"), RouteDefinition("GET", "/api/lm/recipes/scan", "scan_recipes"), + RouteDefinition("POST", "/api/lm/recipes/repair", "repair_recipes"), + RouteDefinition("POST", "/api/lm/recipe/{recipe_id}/repair", "repair_recipe"), + RouteDefinition("GET", "/api/lm/recipes/repair-progress", "get_repair_progress"), ) diff --git a/py/services/base_model_service.py b/py/services/base_model_service.py index 12333dca..8f3f306a 100644 --- a/py/services/base_model_service.py +++ b/py/services/base_model_service.py @@ -3,6 +3,7 @@ import asyncio from typing import Any, Dict, List, Optional, Type, TYPE_CHECKING import logging import os +import time from ..utils.constants import VALID_LORA_TYPES from ..utils.models import BaseModelMetadata @@ -80,13 +81,20 @@ class BaseModelService(ABC): **kwargs, ) -> Dict: """Get paginated and filtered model data""" + overall_start = time.perf_counter() sort_params = self.cache_repository.parse_sort(sort_by) if sort_params.key == 'usage': sorted_data = await self._fetch_with_usage_sort(sort_params) else: sorted_data = await self.cache_repository.fetch_sorted(sort_params) + + t0 = time.perf_counter() + sorted_data = await self.cache_repository.fetch_sorted(sort_params) + fetch_duration = time.perf_counter() - t0 + initial_count = len(sorted_data) + t1 = time.perf_counter() if hash_filters: filtered_data = await self._apply_hash_filters(sorted_data, hash_filters) else: @@ -116,17 +124,25 @@ class BaseModelService(ABC): if allow_selling_generated_content is not None: filtered_data = await self._apply_allow_selling_filter(filtered_data, allow_selling_generated_content) + filter_duration = time.perf_counter() - t1 + post_filter_count = len(filtered_data) annotated_for_filter: Optional[List[Dict]] = None + t2 = time.perf_counter() if update_available_only: annotated_for_filter = await self._annotate_update_flags(filtered_data) filtered_data = [ item for item in annotated_for_filter if item.get('update_available') ] + update_filter_duration = time.perf_counter() - t2 + final_count = len(filtered_data) + t3 = time.perf_counter() paginated = self._paginate(filtered_data, page, page_size) + pagination_duration = time.perf_counter() - t3 + t4 = time.perf_counter() if update_available_only: # Items already include update flags thanks to the pre-filter annotation. paginated['items'] = list(paginated['items']) @@ -134,6 +150,16 @@ class BaseModelService(ABC): paginated['items'] = await self._annotate_update_flags( paginated['items'], ) + annotate_duration = time.perf_counter() - t4 + + overall_duration = time.perf_counter() - overall_start + logger.info( + "%s.get_paginated_data took %.3fs (fetch: %.3fs, filter: %.3fs, update_filter: %.3fs, pagination: %.3fs, annotate: %.3fs). " + "Counts: initial=%d, post_filter=%d, final=%d", + self.__class__.__name__, overall_duration, fetch_duration, filter_duration, + update_filter_duration, pagination_duration, annotate_duration, + initial_count, post_filter_count, final_count + ) return paginated async def _fetch_with_usage_sort(self, sort_params): diff --git a/py/services/model_cache.py b/py/services/model_cache.py index 13417778..e29f6548 100644 --- a/py/services/model_cache.py +++ b/py/services/model_cache.py @@ -1,4 +1,8 @@ import asyncio +import time +import logging + +logger = logging.getLogger(__name__) from typing import Any, Dict, List, Optional, Tuple from dataclasses import dataclass, field from operator import itemgetter @@ -215,24 +219,25 @@ class ModelCache: def _sort_data(self, data: List[Dict], sort_key: str, order: str) -> List[Dict]: """Sort data by sort_key and order""" + start_time = time.perf_counter() reverse = (order == 'desc') if sort_key == 'name': # Natural sort by configured display name, case-insensitive - return natsorted( + result = natsorted( data, key=lambda x: self._get_display_name(x).lower(), reverse=reverse ) elif sort_key == 'date': # Sort by modified timestamp - return sorted( + result = sorted( data, key=itemgetter('modified'), reverse=reverse ) elif sort_key == 'size': # Sort by file size - return sorted( + result = sorted( data, key=itemgetter('size'), reverse=reverse @@ -249,16 +254,28 @@ class ModelCache: ) else: # Fallback: no sort - return list(data) + result = list(data) + + duration = time.perf_counter() - start_time + if duration > 0.05: + logger.info("ModelCache._sort_data(%s, %s) for %d items took %.3fs", sort_key, order, len(data), duration) + return result async def get_sorted_data(self, sort_key: str = 'name', order: str = 'asc') -> List[Dict]: """Get sorted data by sort_key and order, using cache if possible""" async with self._lock: if (sort_key, order) == self._last_sort: return self._last_sorted_data + + start_time = time.perf_counter() sorted_data = self._sort_data(self.raw_data, sort_key, order) self._last_sort = (sort_key, order) self._last_sorted_data = sorted_data + + duration = time.perf_counter() - start_time + if duration > 0.1: + logger.debug("ModelCache.get_sorted_data(%s, %s) took %.3fs", sort_key, order, duration) + return sorted_data async def update_name_display_mode(self, display_mode: str) -> None: diff --git a/py/services/model_query.py b/py/services/model_query.py index 5b370138..c6287dc0 100644 --- a/py/services/model_query.py +++ b/py/services/model_query.py @@ -5,6 +5,10 @@ from typing import Any, Dict, Iterable, List, Mapping, Optional, Sequence, Tuple from ..utils.constants import NSFW_LEVELS from ..utils.utils import fuzzy_match as default_fuzzy_match +import time +import logging + +logger = logging.getLogger(__name__) DEFAULT_CIVITAI_MODEL_TYPE = "LORA" @@ -115,22 +119,33 @@ class ModelFilterSet: def apply(self, data: Iterable[Dict[str, Any]], criteria: FilterCriteria) -> List[Dict[str, Any]]: """Return items that satisfy the provided criteria.""" + overall_start = time.perf_counter() items = list(data) + initial_count = len(items) if self._settings.get("show_only_sfw", False): + t0 = time.perf_counter() threshold = self._nsfw_levels.get("R", 0) items = [ item for item in items if not item.get("preview_nsfw_level") or item.get("preview_nsfw_level") < threshold ] + sfw_duration = time.perf_counter() - t0 + else: + sfw_duration = 0 + favorites_duration = 0 if criteria.favorites_only: + t0 = time.perf_counter() items = [item for item in items if item.get("favorite", False)] + favorites_duration = time.perf_counter() - t0 + folder_duration = 0 folder = criteria.folder options = criteria.search_options or {} recursive = bool(options.get("recursive", True)) if folder is not None: + t0 = time.perf_counter() if recursive: if folder: folder_with_sep = f"{folder}/" @@ -140,51 +155,82 @@ class ModelFilterSet: ] else: items = [item for item in items if item.get("folder") == folder] + folder_duration = time.perf_counter() - t0 + base_models_duration = 0 base_models = criteria.base_models or [] if base_models: + t0 = time.perf_counter() base_model_set = set(base_models) items = [item for item in items if item.get("base_model") in base_model_set] + base_models_duration = time.perf_counter() - t0 + tags_duration = 0 tag_filters = criteria.tags or {} - include_tags = set() - exclude_tags = set() - if isinstance(tag_filters, dict): - for tag, state in tag_filters.items(): - if not tag: - continue - if state == "exclude": - exclude_tags.add(tag) - else: - include_tags.add(tag) - else: - include_tags = {tag for tag in tag_filters if tag} + if tag_filters: + t0 = time.perf_counter() + include_tags = set() + exclude_tags = set() + if isinstance(tag_filters, dict): + for tag, state in tag_filters.items(): + if not tag: + continue + if state == "exclude": + exclude_tags.add(tag) + else: + include_tags.add(tag) + else: + include_tags = {tag for tag in tag_filters if tag} - if include_tags: - items = [ - item for item in items - if any(tag in include_tags for tag in (item.get("tags", []) or [])) - ] + if include_tags: + def matches_include(item_tags): + if not item_tags and "__no_tags__" in include_tags: + return True + return any(tag in include_tags for tag in (item_tags or [])) - if exclude_tags: - items = [ - item for item in items - if not any(tag in exclude_tags for tag in (item.get("tags", []) or [])) - ] + items = [ + item for item in items + if matches_include(item.get("tags")) + ] + if exclude_tags: + def matches_exclude(item_tags): + if not item_tags and "__no_tags__" in exclude_tags: + return True + return any(tag in exclude_tags for tag in (item_tags or [])) + + items = [ + item for item in items + if not matches_exclude(item.get("tags")) + ] + tags_duration = time.perf_counter() - t0 + + model_types_duration = 0 model_types = criteria.model_types or [] - normalized_model_types = { - model_type for model_type in ( - normalize_civitai_model_type(value) for value in model_types - ) - if model_type - } - if normalized_model_types: - items = [ - item for item in items - if normalize_civitai_model_type(resolve_civitai_model_type(item)) in normalized_model_types - ] + if model_types: + t0 = time.perf_counter() + normalized_model_types = { + model_type for model_type in ( + normalize_civitai_model_type(value) for value in model_types + ) + if model_type + } + if normalized_model_types: + items = [ + item for item in items + if normalize_civitai_model_type(resolve_civitai_model_type(item)) in normalized_model_types + ] + model_types_duration = time.perf_counter() - t0 + duration = time.perf_counter() - overall_start + if duration > 0.1: # Only log if it's potentially slow + logger.info( + "ModelFilterSet.apply took %.3fs (sfw: %.3fs, fav: %.3fs, folder: %.3fs, base: %.3fs, tags: %.3fs, types: %.3fs). " + "Count: %d -> %d", + duration, sfw_duration, favorites_duration, folder_duration, + base_models_duration, tags_duration, model_types_duration, + initial_count, len(items) + ) return items diff --git a/py/services/recipe_cache.py b/py/services/recipe_cache.py index ac28b3aa..279c9e37 100644 --- a/py/services/recipe_cache.py +++ b/py/services/recipe_cache.py @@ -7,12 +7,18 @@ from natsort import natsorted @dataclass class RecipeCache: """Cache structure for Recipe data""" + raw_data: List[Dict] sorted_by_name: List[Dict] sorted_by_date: List[Dict] + folders: List[str] | None = None + folder_tree: Dict | None = None def __post_init__(self): self._lock = asyncio.Lock() + # Normalize optional metadata containers + self.folders = self.folders or [] + self.folder_tree = self.folder_tree or {} async def resort(self, name_only: bool = False): """Resort all cached data views""" diff --git a/py/services/recipe_scanner.py b/py/services/recipe_scanner.py index ccaf2395..da96696e 100644 --- a/py/services/recipe_scanner.py +++ b/py/services/recipe_scanner.py @@ -1,7 +1,9 @@ -import os -import logging +from __future__ import annotations + import asyncio import json +import logging +import os import time from typing import Any, Dict, Iterable, List, Optional, Set, Tuple from ..config import config @@ -14,6 +16,9 @@ from .recipes.errors import RecipeNotFoundError from ..utils.utils import calculate_recipe_fingerprint, fuzzy_match from natsort import natsorted import sys +import re +from ..recipes.merger import GenParamsMerger +from ..recipes.enrichment import RecipeEnricher logger = logging.getLogger(__name__) @@ -52,6 +57,8 @@ class RecipeScanner: cls._instance._civitai_client = None # Will be lazily initialized return cls._instance + REPAIR_VERSION = 3 + def __init__( self, lora_scanner: Optional[LoraScanner] = None, @@ -64,6 +71,7 @@ class RecipeScanner: self._initialization_task: Optional[asyncio.Task] = None self._is_initializing = False self._mutation_lock = asyncio.Lock() + self._post_scan_task: Optional[asyncio.Task] = None self._resort_tasks: Set[asyncio.Task] = set() if lora_scanner: self._lora_scanner = lora_scanner @@ -84,6 +92,10 @@ class RecipeScanner: task.cancel() self._resort_tasks.clear() + if self._post_scan_task and not self._post_scan_task.done(): + self._post_scan_task.cancel() + self._post_scan_task = None + self._cache = None self._initialization_task = None self._is_initializing = False @@ -102,19 +114,223 @@ class RecipeScanner: self._civitai_client = await ServiceRegistry.get_civitai_client() return self._civitai_client + async def repair_all_recipes( + self, + progress_callback: Optional[Callable[[Dict], Any]] = None + ) -> Dict[str, Any]: + """Repair all recipes by enrichment with Civitai and embedded metadata. + + Args: + persistence_service: Service for saving updated recipes + progress_callback: Optional callback for progress updates + + Returns: + Dict summary of repair results + """ + async with self._mutation_lock: + cache = await self.get_cached_data() + all_recipes = list(cache.raw_data) + total = len(all_recipes) + repaired_count = 0 + skipped_count = 0 + errors_count = 0 + + civitai_client = await self._get_civitai_client() + + for i, recipe in enumerate(all_recipes): + try: + # Report progress + if progress_callback: + await progress_callback({ + "status": "processing", + "current": i + 1, + "total": total, + "recipe_name": recipe.get("name", "Unknown") + }) + + if await self._repair_single_recipe(recipe, civitai_client): + repaired_count += 1 + else: + skipped_count += 1 + + except Exception as e: + logger.error(f"Error repairing recipe {recipe.get('file_path')}: {e}") + errors_count += 1 + + # Final progress update + if progress_callback: + await progress_callback({ + "status": "completed", + "repaired": repaired_count, + "skipped": skipped_count, + "errors": errors_count, + "total": total + }) + + return { + "success": True, + "repaired": repaired_count, + "skipped": skipped_count, + "errors": errors_count, + "total": total + } + + async def repair_recipe_by_id(self, recipe_id: str) -> Dict[str, Any]: + """Repair a single recipe by its ID. + + Args: + recipe_id: ID of the recipe to repair + + Returns: + Dict summary of repair result + """ + async with self._mutation_lock: + # Get raw recipe from cache directly to avoid formatted fields + cache = await self.get_cached_data() + recipe = next((r for r in cache.raw_data if str(r.get('id', '')) == recipe_id), None) + + if not recipe: + raise RecipeNotFoundError(f"Recipe {recipe_id} not found") + + civitai_client = await self._get_civitai_client() + success = await self._repair_single_recipe(recipe, civitai_client) + + # If successfully repaired, we should return the formatted version for the UI + return { + "success": True, + "repaired": 1 if success else 0, + "skipped": 0 if success else 1, + "recipe": await self.get_recipe_by_id(recipe_id) if success else recipe + } + + async def _repair_single_recipe(self, recipe: Dict[str, Any], civitai_client: Any) -> bool: + """Internal helper to repair a single recipe object. + + Args: + recipe: The recipe dictionary to repair (modified in-place) + civitai_client: Authenticated Civitai client + + Returns: + bool: True if recipe was repaired or updated, False if skipped + """ + # 1. Skip if already at latest repair version + if recipe.get("repair_version", 0) >= self.REPAIR_VERSION: + return False + + # 2. Identification: Is repair needed? + has_checkpoint = "checkpoint" in recipe and recipe["checkpoint"] and recipe["checkpoint"].get("name") + gen_params = recipe.get("gen_params", {}) + has_prompt = bool(gen_params.get("prompt")) + + needs_repair = not has_checkpoint or not has_prompt + + if not needs_repair: + # Even if no repair needed, we mark it with version if it was processed + # Always update and save because if we are here, the version is old (checked in step 1) + recipe["repair_version"] = self.REPAIR_VERSION + await self._save_recipe_persistently(recipe) + return True + + # 3. Use Enricher to repair/enrich + try: + updated = await RecipeEnricher.enrich_recipe(recipe, civitai_client) + except Exception as e: + logger.error(f"Error enriching recipe {recipe.get('id')}: {e}") + updated = False + + # 4. Mark version and save if updated or just marking version + # If we updated it, OR if the version is old (which we know it is if we are here), save it. + # Actually, if we are here and updated is False, it means we tried to repair but couldn't/didn't need to. + # But we still want to mark it as processed so we don't try again until version bump. + if updated or recipe.get("repair_version", 0) < self.REPAIR_VERSION: + recipe["repair_version"] = self.REPAIR_VERSION + await self._save_recipe_persistently(recipe) + return True + + return False + + async def _save_recipe_persistently(self, recipe: Dict[str, Any]) -> bool: + """Helper to save a recipe to both JSON and EXIF metadata.""" + recipe_id = recipe.get("id") + if not recipe_id: + return False + + recipe_json_path = await self.get_recipe_json_path(recipe_id) + if not recipe_json_path: + return False + + try: + # 1. Sanitize for storage (remove runtime convenience fields) + clean_recipe = self._sanitize_recipe_for_storage(recipe) + + # 2. Update the original dictionary so that we persist the clean version + # globally if needed, effectively overwriting it in-place. + recipe.clear() + recipe.update(clean_recipe) + + # 3. Save JSON + with open(recipe_json_path, 'w', encoding='utf-8') as f: + json.dump(recipe, f, indent=4, ensure_ascii=False) + + # 4. Update EXIF if image exists + image_path = recipe.get('file_path') + if image_path and os.path.exists(image_path): + from ..utils.exif_utils import ExifUtils + ExifUtils.append_recipe_metadata(image_path, recipe) + + return True + except Exception as e: + logger.error(f"Error persisting recipe {recipe_id}: {e}") + return False + + + def _sanitize_recipe_for_storage(self, recipe: Dict[str, Any]) -> Dict[str, Any]: + """Create a clean copy of the recipe without runtime convenience fields.""" + import copy + clean = copy.deepcopy(recipe) + + # 0. Clean top-level runtime fields + for key in ("file_url", "created_date_formatted", "modified_formatted"): + clean.pop(key, None) + + # 1. Clean LORAs + if "loras" in clean and isinstance(clean["loras"], list): + for lora in clean["loras"]: + # Fields to remove (runtime only) + for key in ("inLibrary", "preview_url", "localPath"): + lora.pop(key, None) + + # Normalize weight/strength if mapping is desired (standard in persistence_service) + if "weight" in lora and "strength" not in lora: + lora["strength"] = float(lora.pop("weight")) + + # 2. Clean Checkpoint + if "checkpoint" in clean and isinstance(clean["checkpoint"], dict): + cp = clean["checkpoint"] + # Fields to remove (runtime only) + for key in ("inLibrary", "localPath", "preview_url", "thumbnailUrl", "size", "downloadUrl"): + cp.pop(key, None) + + return clean + async def initialize_in_background(self) -> None: """Initialize cache in background using thread pool""" try: + await self._wait_for_lora_scanner() + # Set initial empty cache to avoid None reference errors if self._cache is None: self._cache = RecipeCache( raw_data=[], sorted_by_name=[], - sorted_by_date=[] + sorted_by_date=[], + folders=[], + folder_tree={}, ) # Mark as initializing to prevent concurrent initializations self._is_initializing = True + self._initialization_task = asyncio.current_task() try: # Start timer @@ -126,11 +342,14 @@ class RecipeScanner: None, # Use default thread pool self._initialize_recipe_cache_sync # Run synchronous version in thread ) + if cache is not None: + self._cache = cache # Calculate elapsed time and log it elapsed_time = time.time() - start_time recipe_count = len(cache.raw_data) if cache and hasattr(cache, 'raw_data') else 0 logger.info(f"Recipe cache initialized in {elapsed_time:.2f} seconds. Found {recipe_count} recipes") + self._schedule_post_scan_enrichment() finally: # Mark initialization as complete regardless of outcome self._is_initializing = False @@ -207,6 +426,7 @@ class RecipeScanner: # Update cache with the collected data self._cache.raw_data = recipes + self._update_folder_metadata(self._cache) # Create a simplified resort function that doesn't use await if hasattr(self._cache, "resort"): @@ -237,12 +457,97 @@ class RecipeScanner: # Clean up the event loop loop.close() + async def _wait_for_lora_scanner(self) -> None: + """Ensure the LoRA scanner has initialized before recipe enrichment.""" + + if not getattr(self, "_lora_scanner", None): + return + + lora_scanner = self._lora_scanner + cache_ready = getattr(lora_scanner, "_cache", None) is not None + + # If cache is already available, we can proceed + if cache_ready: + return + + # Await an existing initialization task if present + task = getattr(lora_scanner, "_initialization_task", None) + if task and hasattr(task, "done") and not task.done(): + try: + await task + except Exception: # pragma: no cover - defensive guard + pass + if getattr(lora_scanner, "_cache", None) is not None: + return + + # Otherwise, request initialization and proceed once it completes + try: + await lora_scanner.initialize_in_background() + except Exception as exc: # pragma: no cover - defensive guard + logger.debug("Recipe Scanner: LoRA init request failed: %s", exc) + + def _schedule_post_scan_enrichment(self) -> None: + """Kick off a non-blocking enrichment pass to fill remote metadata.""" + + try: + loop = asyncio.get_running_loop() + except RuntimeError: + return + + if self._post_scan_task and not self._post_scan_task.done(): + return + + async def _run_enrichment(): + try: + await self._enrich_cache_metadata() + except asyncio.CancelledError: + raise + except Exception as exc: # pragma: no cover - defensive guard + logger.error("Recipe Scanner: error during post-scan enrichment: %s", exc, exc_info=True) + + self._post_scan_task = loop.create_task(_run_enrichment(), name="recipe_cache_enrichment") + + async def _enrich_cache_metadata(self) -> None: + """Perform remote metadata enrichment after the initial scan.""" + + cache = self._cache + if cache is None or not getattr(cache, "raw_data", None): + return + + for index, recipe in enumerate(list(cache.raw_data)): + try: + metadata_updated = await self._update_lora_information(recipe) + if metadata_updated: + recipe_id = recipe.get("id") + if recipe_id: + recipe_path = os.path.join(self.recipes_dir, f"{recipe_id}.recipe.json") + if os.path.exists(recipe_path): + try: + self._write_recipe_file(recipe_path, recipe) + except Exception as exc: # pragma: no cover - best-effort persistence + logger.debug("Recipe Scanner: could not persist recipe %s: %s", recipe_id, exc) + except asyncio.CancelledError: + raise + except Exception as exc: # pragma: no cover - defensive logging + logger.error("Recipe Scanner: error enriching recipe %s: %s", recipe.get("id"), exc, exc_info=True) + + if index % 10 == 0: + await asyncio.sleep(0) + + try: + await cache.resort() + except Exception as exc: # pragma: no cover - defensive logging + logger.debug("Recipe Scanner: error resorting cache after enrichment: %s", exc) + def _schedule_resort(self, *, name_only: bool = False) -> None: """Schedule a background resort of the recipe cache.""" if not self._cache: return + # Keep folder metadata up to date alongside sort order + self._update_folder_metadata() + async def _resort_wrapper() -> None: try: await self._cache.resort(name_only=name_only) @@ -253,6 +558,75 @@ class RecipeScanner: self._resort_tasks.add(task) task.add_done_callback(lambda finished: self._resort_tasks.discard(finished)) + def _calculate_folder(self, recipe_path: str) -> str: + """Calculate a normalized folder path relative to ``recipes_dir``.""" + + recipes_dir = self.recipes_dir + if not recipes_dir: + return "" + + try: + recipe_dir = os.path.dirname(os.path.normpath(recipe_path)) + relative_dir = os.path.relpath(recipe_dir, recipes_dir) + if relative_dir in (".", ""): + return "" + return relative_dir.replace(os.path.sep, "/") + except Exception: + return "" + + def _build_folder_tree(self, folders: list[str]) -> dict: + """Build a nested folder tree structure from relative folder paths.""" + + tree: dict[str, dict] = {} + for folder in folders: + if not folder: + continue + + parts = folder.split("/") + current_level = tree + + for part in parts: + if part not in current_level: + current_level[part] = {} + current_level = current_level[part] + + return tree + + def _update_folder_metadata(self, cache: RecipeCache | None = None) -> None: + """Ensure folder lists and tree metadata are synchronized with cache contents.""" + + cache = cache or self._cache + if cache is None: + return + + folders: set[str] = set() + for item in cache.raw_data: + folder_value = item.get("folder", "") + if folder_value is None: + folder_value = "" + if folder_value == ".": + folder_value = "" + normalized = str(folder_value).replace("\\", "/") + item["folder"] = normalized + folders.add(normalized) + + cache.folders = sorted(folders, key=lambda entry: entry.lower()) + cache.folder_tree = self._build_folder_tree(cache.folders) + + async def get_folders(self) -> list[str]: + """Return a sorted list of recipe folders relative to the recipes root.""" + + cache = await self.get_cached_data() + self._update_folder_metadata(cache) + return cache.folders + + async def get_folder_tree(self) -> dict: + """Return a hierarchical tree of recipe folders for sidebar navigation.""" + + cache = await self.get_cached_data() + self._update_folder_metadata(cache) + return cache.folder_tree + @property def recipes_dir(self) -> str: """Get path to recipes directory""" @@ -269,11 +643,14 @@ class RecipeScanner: """Get cached recipe data, refresh if needed""" # If cache is already initialized and no refresh is needed, return it immediately if self._cache is not None and not force_refresh: + self._update_folder_metadata() return self._cache # If another initialization is already in progress, wait for it to complete if self._is_initializing and not force_refresh: - return self._cache or RecipeCache(raw_data=[], sorted_by_name=[], sorted_by_date=[]) + return self._cache or RecipeCache( + raw_data=[], sorted_by_name=[], sorted_by_date=[], folders=[], folder_tree={} + ) # If force refresh is requested, initialize the cache directly if force_refresh: @@ -291,11 +668,14 @@ class RecipeScanner: self._cache = RecipeCache( raw_data=raw_data, sorted_by_name=[], - sorted_by_date=[] + sorted_by_date=[], + folders=[], + folder_tree={}, ) - + # Resort cache await self._cache.resort() + self._update_folder_metadata(self._cache) return self._cache @@ -305,7 +685,9 @@ class RecipeScanner: self._cache = RecipeCache( raw_data=[], sorted_by_name=[], - sorted_by_date=[] + sorted_by_date=[], + folders=[], + folder_tree={}, ) return self._cache finally: @@ -316,7 +698,9 @@ class RecipeScanner: logger.error(f"Unexpected error in get_cached_data: {e}") # Return the cache (may be empty or partially initialized) - return self._cache or RecipeCache(raw_data=[], sorted_by_name=[], sorted_by_date=[]) + return self._cache or RecipeCache( + raw_data=[], sorted_by_name=[], sorted_by_date=[], folders=[], folder_tree={} + ) async def refresh_cache(self, force: bool = False) -> RecipeCache: """Public helper to refresh or return the recipe cache.""" @@ -331,6 +715,7 @@ class RecipeScanner: cache = await self.get_cached_data() await cache.add_recipe(recipe_data, resort=False) + self._update_folder_metadata(cache) self._schedule_resort() async def remove_recipe(self, recipe_id: str) -> bool: @@ -344,6 +729,7 @@ class RecipeScanner: if removed is None: return False + self._update_folder_metadata(cache) self._schedule_resort() return True @@ -428,6 +814,9 @@ class RecipeScanner: if path_updated: self._write_recipe_file(recipe_path, recipe_data) + + # Track folder placement relative to recipes directory + recipe_data['folder'] = recipe_data.get('folder') or self._calculate_folder(recipe_path) # Ensure loras array exists if 'loras' not in recipe_data: @@ -438,7 +827,7 @@ class RecipeScanner: recipe_data['gen_params'] = {} # Update lora information with local paths and availability - await self._update_lora_information(recipe_data) + lora_metadata_updated = await self._update_lora_information(recipe_data) if recipe_data.get('checkpoint'): checkpoint_entry = self._normalize_checkpoint_entry(recipe_data['checkpoint']) @@ -459,6 +848,12 @@ class RecipeScanner: logger.info(f"Added fingerprint to recipe: {recipe_path}") except Exception as e: logger.error(f"Error writing updated recipe with fingerprint: {e}") + elif lora_metadata_updated: + # Persist updates such as marking invalid entries as deleted + try: + self._write_recipe_file(recipe_path, recipe_data) + except Exception as e: + logger.error(f"Error writing updated recipe metadata: {e}") return recipe_data except Exception as e: @@ -519,7 +914,13 @@ class RecipeScanner: logger.warning(f"Marked lora with modelVersionId {model_version_id} as deleted") metadata_updated = True else: - logger.debug(f"Could not get hash for modelVersionId {model_version_id}") + # No hash returned; mark as deleted to avoid repeated lookups + lora['isDeleted'] = True + metadata_updated = True + logger.warning( + "Marked lora with modelVersionId %s as deleted after failed hash lookup", + model_version_id, + ) # If has hash but no file_name, look up in lora library if 'hash' in lora and (not lora.get('file_name') or not lora['file_name']): @@ -809,7 +1210,7 @@ class RecipeScanner: return await self._lora_scanner.get_model_info_by_name(name) - async def get_paginated_data(self, page: int, page_size: int, sort_by: str = 'date', search: str = None, filters: dict = None, search_options: dict = None, lora_hash: str = None, bypass_filters: bool = True): + async def get_paginated_data(self, page: int, page_size: int, sort_by: str = 'date', search: str = None, filters: dict = None, search_options: dict = None, lora_hash: str = None, bypass_filters: bool = True, folder: str | None = None, recursive: bool = True): """Get paginated and filtered recipe data Args: @@ -821,11 +1222,20 @@ class RecipeScanner: search_options: Dictionary of search options to apply lora_hash: Optional SHA256 hash of a LoRA to filter recipes by bypass_filters: If True, ignore other filters when a lora_hash is provided + folder: Optional folder filter relative to recipes directory + recursive: Whether to include recipes in subfolders of the selected folder """ cache = await self.get_cached_data() # Get base dataset - filtered_data = cache.sorted_by_date if sort_by == 'date' else cache.sorted_by_name + sort_field = sort_by.split(':')[0] if ':' in sort_by else sort_by + + if sort_field == 'date': + filtered_data = list(cache.sorted_by_date) + elif sort_field == 'name': + filtered_data = list(cache.sorted_by_name) + else: + filtered_data = list(cache.raw_data) # Apply SFW filtering if enabled from .settings_manager import get_settings_manager @@ -856,6 +1266,22 @@ class RecipeScanner: # Skip further filtering if we're only filtering by LoRA hash with bypass enabled if not (lora_hash and bypass_filters): + # Apply folder filter before other criteria + if folder is not None: + normalized_folder = folder.strip("/") + def matches_folder(item_folder: str) -> bool: + item_path = (item_folder or "").strip("/") + if recursive: + if not normalized_folder: + return True + return item_path == normalized_folder or item_path.startswith(f"{normalized_folder}/") + return item_path == normalized_folder + + filtered_data = [ + item for item in filtered_data + if matches_folder(item.get('folder', '')) + ] + # Apply search filter if search: # Default search options if none provided @@ -892,6 +1318,14 @@ class RecipeScanner: if fuzzy_match(str(lora.get('modelName', '')), search): return True + # Search in prompt and negative_prompt if enabled + if search_options.get('prompt', True) and 'gen_params' in item: + gen_params = item['gen_params'] + if fuzzy_match(str(gen_params.get('prompt', '')), search): + return True + if fuzzy_match(str(gen_params.get('negative_prompt', '')), search): + return True + # No match found return False @@ -907,6 +1341,13 @@ class RecipeScanner: if item.get('base_model', '') in filters['base_model'] ] + # Filter by favorite + if 'favorite' in filters and filters['favorite']: + filtered_data = [ + item for item in filtered_data + if item.get('favorite') is True + ] + # Filter by tags if 'tags' in filters and filters['tags']: tag_spec = filters['tags'] @@ -925,17 +1366,41 @@ class RecipeScanner: include_tags = {tag for tag in tag_spec if tag} if include_tags: + def matches_include(item_tags): + if not item_tags and "__no_tags__" in include_tags: + return True + return any(tag in include_tags for tag in (item_tags or [])) + filtered_data = [ item for item in filtered_data - if any(tag in include_tags for tag in (item.get('tags', []) or [])) + if matches_include(item.get('tags')) ] if exclude_tags: + def matches_exclude(item_tags): + if not item_tags and "__no_tags__" in exclude_tags: + return True + return any(tag in exclude_tags for tag in (item_tags or [])) + filtered_data = [ item for item in filtered_data - if not any(tag in exclude_tags for tag in (item.get('tags', []) or [])) + if not matches_exclude(item.get('tags')) ] + + # Apply sorting if not already handled by pre-sorted cache + if ':' in sort_by or sort_field == 'loras_count': + field, order = (sort_by.split(':') + ['desc'])[:2] + reverse = order.lower() == 'desc' + + if field == 'name': + filtered_data = natsorted(filtered_data, key=lambda x: x.get('title', '').lower(), reverse=reverse) + elif field == 'date': + # Use modified if available, falling back to created_date + filtered_data.sort(key=lambda x: (x.get('modified', x.get('created_date', 0)), x.get('file_path', '')), reverse=reverse) + elif field == 'loras_count': + filtered_data.sort(key=lambda x: len(x.get('loras', [])), reverse=reverse) + # Calculate pagination total_items = len(filtered_data) start_idx = (page - 1) * page_size @@ -1031,6 +1496,30 @@ class RecipeScanner: from datetime import datetime return datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d %H:%M:%S') + async def get_recipe_json_path(self, recipe_id: str) -> Optional[str]: + """Locate the recipe JSON file, accounting for folder placement.""" + + recipes_dir = self.recipes_dir + if not recipes_dir: + return None + + cache = await self.get_cached_data() + folder = "" + for item in cache.raw_data: + if str(item.get("id")) == str(recipe_id): + folder = item.get("folder") or "" + break + + candidate = os.path.normpath(os.path.join(recipes_dir, folder, f"{recipe_id}.recipe.json")) + if os.path.exists(candidate): + return candidate + + for root, _, files in os.walk(recipes_dir): + if f"{recipe_id}.recipe.json" in files: + return os.path.join(root, f"{recipe_id}.recipe.json") + + return None + async def update_recipe_metadata(self, recipe_id: str, metadata: dict) -> bool: """Update recipe metadata (like title and tags) in both file system and cache @@ -1041,13 +1530,9 @@ class RecipeScanner: Returns: bool: True if successful, False otherwise """ - import os - import json - # First, find the recipe JSON file path - recipe_json_path = os.path.join(self.recipes_dir, f"{recipe_id}.recipe.json") - - if not os.path.exists(recipe_json_path): + recipe_json_path = await self.get_recipe_json_path(recipe_id) + if not recipe_json_path or not os.path.exists(recipe_json_path): return False try: @@ -1096,8 +1581,8 @@ class RecipeScanner: if target_name is None: raise ValueError("target_name must be provided") - recipe_json_path = os.path.join(self.recipes_dir, f"{recipe_id}.recipe.json") - if not os.path.exists(recipe_json_path): + recipe_json_path = await self.get_recipe_json_path(recipe_id) + if not recipe_json_path or not os.path.exists(recipe_json_path): raise RecipeNotFoundError("Recipe not found") async with self._mutation_lock: @@ -1228,71 +1713,56 @@ class RecipeScanner: # Always use lowercase hash for consistency hash_value = hash_value.lower() - # Get recipes directory - recipes_dir = self.recipes_dir - if not recipes_dir or not os.path.exists(recipes_dir): - logger.warning(f"Recipes directory not found: {recipes_dir}") + # Get cache + cache = await self.get_cached_data() + if not cache or not cache.raw_data: + return 0, 0 + + file_updated_count = 0 + cache_updated_count = 0 + + # Find recipes that need updating from the cache + recipes_to_update = [] + for recipe in cache.raw_data: + loras = recipe.get('loras', []) + if not isinstance(loras, list): + continue + + has_match = False + for lora in loras: + if not isinstance(lora, dict): + continue + if (lora.get('hash') or '').lower() == hash_value: + if lora.get('file_name') != new_file_name: + lora['file_name'] = new_file_name + has_match = True + + if has_match: + recipes_to_update.append(recipe) + cache_updated_count += 1 + + if not recipes_to_update: return 0, 0 - # Check if cache is initialized - cache_initialized = self._cache is not None - cache_updated_count = 0 - file_updated_count = 0 - - # Get all recipe JSON files in the recipes directory - recipe_files = [] - for root, _, files in os.walk(recipes_dir): - for file in files: - if file.lower().endswith('.recipe.json'): - recipe_files.append(os.path.join(root, file)) - - # Process each recipe file - for recipe_path in recipe_files: - try: - # Load the recipe data - with open(recipe_path, 'r', encoding='utf-8') as f: - recipe_data = json.load(f) - - # Skip if no loras or invalid structure - if not recipe_data or not isinstance(recipe_data, dict) or 'loras' not in recipe_data: + # Persist changes to disk + async with self._mutation_lock: + for recipe in recipes_to_update: + recipe_id = recipe.get('id') + if not recipe_id: continue - - # Check if any lora has matching hash - file_updated = False - for lora in recipe_data.get('loras', []): - if 'hash' in lora and lora['hash'].lower() == hash_value: - # Update file_name - old_file_name = lora.get('file_name', '') - lora['file_name'] = new_file_name - file_updated = True - logger.info(f"Updated file_name in recipe {recipe_path}: {old_file_name} -> {new_file_name}") - - # If updated, save the file - if file_updated: - with open(recipe_path, 'w', encoding='utf-8') as f: - json.dump(recipe_data, f, indent=4, ensure_ascii=False) - file_updated_count += 1 - # Also update in cache if it exists - if cache_initialized: - recipe_id = recipe_data.get('id') - if recipe_id: - for cache_item in self._cache.raw_data: - if cache_item.get('id') == recipe_id: - # Replace loras array with updated version - cache_item['loras'] = recipe_data['loras'] - cache_updated_count += 1 - break - - except Exception as e: - logger.error(f"Error updating recipe file {recipe_path}: {e}") - import traceback - traceback.print_exc(file=sys.stderr) + recipe_path = os.path.join(self.recipes_dir, f"{recipe_id}.recipe.json") + try: + self._write_recipe_file(recipe_path, recipe) + file_updated_count += 1 + logger.info(f"Updated file_name in recipe {recipe_path}: -> {new_file_name}") + except Exception as e: + logger.error(f"Error updating recipe file {recipe_path}: {e}") - # Resort cache if updates were made - if cache_initialized and cache_updated_count > 0: - await self._cache.resort() - logger.info(f"Resorted recipe cache after updating {cache_updated_count} items") + # We don't necessarily need to resort because LoRA file_name isn't a sort key, + # but we might want to schedule a resort if we're paranoid or if searching relies on sorted state. + # Given it's a rename of a dependency, search results might change if searching by LoRA name. + self._schedule_resort() return file_updated_count, cache_updated_count diff --git a/py/services/recipes/analysis_service.py b/py/services/recipes/analysis_service.py index b7c76afd..cf709743 100644 --- a/py/services/recipes/analysis_service.py +++ b/py/services/recipes/analysis_service.py @@ -13,6 +13,7 @@ import numpy as np from PIL import Image from ...utils.utils import calculate_recipe_fingerprint +from ...utils.civitai_utils import rewrite_preview_url from .errors import ( RecipeDownloadError, RecipeNotFoundError, @@ -94,18 +95,39 @@ class RecipeAnalysisService: if civitai_client is None: raise RecipeServiceError("Civitai client unavailable") - temp_path = self._create_temp_path() + temp_path = None metadata: Optional[dict[str, Any]] = None + is_video = False + extension = ".jpg" # Default + try: civitai_match = re.match(r"https://civitai\.com/images/(\d+)", url) if civitai_match: image_info = await civitai_client.get_image_info(civitai_match.group(1)) if not image_info: raise RecipeDownloadError("Failed to fetch image information from Civitai") + image_url = image_info.get("url") if not image_url: raise RecipeDownloadError("No image URL found in Civitai response") + + is_video = image_info.get("type") == "video" + + # Use optimized preview URLs if possible + rewritten_url, _ = rewrite_preview_url(image_url, media_type=image_info.get("type")) + if rewritten_url: + image_url = rewritten_url + + if is_video: + # Extract extension from URL + url_path = image_url.split('?')[0].split('#')[0] + extension = os.path.splitext(url_path)[1].lower() or ".mp4" + else: + extension = ".jpg" + + temp_path = self._create_temp_path(suffix=extension) await self._download_image(image_url, temp_path) + metadata = image_info.get("meta") if "meta" in image_info else None if ( isinstance(metadata, dict) @@ -114,22 +136,31 @@ class RecipeAnalysisService: ): metadata = metadata["meta"] else: + # Basic extension detection for non-Civitai URLs + url_path = url.split('?')[0].split('#')[0] + extension = os.path.splitext(url_path)[1].lower() + if extension in [".mp4", ".webm"]: + is_video = True + else: + extension = ".jpg" + + temp_path = self._create_temp_path(suffix=extension) await self._download_image(url, temp_path) - if metadata is None: + if metadata is None and not is_video: metadata = self._exif_utils.extract_image_metadata(temp_path) - if not metadata: - return self._metadata_not_found_response(temp_path) - return await self._parse_metadata( - metadata, + metadata or {}, recipe_scanner=recipe_scanner, image_path=temp_path, include_image_base64=True, + is_video=is_video, + extension=extension, ) finally: - self._safe_cleanup(temp_path) + if temp_path: + self._safe_cleanup(temp_path) async def analyze_local_image( self, @@ -198,12 +229,16 @@ class RecipeAnalysisService: recipe_scanner, image_path: Optional[str], include_image_base64: bool, + is_video: bool = False, + extension: str = ".jpg", ) -> AnalysisResult: parser = self._recipe_parser_factory.create_parser(metadata) if parser is None: payload = {"error": "No parser found for this image", "loras": []} if include_image_base64 and image_path: payload["image_base64"] = self._encode_file(image_path) + payload["is_video"] = is_video + payload["extension"] = extension return AnalysisResult(payload) result = await parser.parse_metadata(metadata, recipe_scanner=recipe_scanner) @@ -211,6 +246,9 @@ class RecipeAnalysisService: if include_image_base64 and image_path: result["image_base64"] = self._encode_file(image_path) + result["is_video"] = is_video + result["extension"] = extension + if "error" in result and not result.get("loras"): return AnalysisResult(result) @@ -241,8 +279,8 @@ class RecipeAnalysisService: temp_file.write(data) return temp_file.name - def _create_temp_path(self) -> str: - with tempfile.NamedTemporaryFile(delete=False, suffix=".jpg") as temp_file: + def _create_temp_path(self, suffix: str = ".jpg") -> str: + with tempfile.NamedTemporaryFile(delete=False, suffix=suffix) as temp_file: return temp_file.name def _safe_cleanup(self, path: Optional[str]) -> None: diff --git a/py/services/recipes/persistence_service.py b/py/services/recipes/persistence_service.py index 2640035e..e1c7ae15 100644 --- a/py/services/recipes/persistence_service.py +++ b/py/services/recipes/persistence_service.py @@ -5,6 +5,7 @@ import base64 import json import os import re +import shutil import time import uuid from dataclasses import dataclass @@ -46,6 +47,7 @@ class RecipePersistenceService: name: str | None, tags: Iterable[str], metadata: Optional[dict[str, Any]], + extension: str | None = None, ) -> PersistenceResult: """Persist a user uploaded recipe.""" @@ -64,13 +66,21 @@ class RecipePersistenceService: os.makedirs(recipes_dir, exist_ok=True) recipe_id = str(uuid.uuid4()) - optimized_image, extension = self._exif_utils.optimize_image( - image_data=resolved_image_bytes, - target_width=self._card_preview_width, - format="webp", - quality=85, - preserve_metadata=True, - ) + + # Handle video formats by bypassing optimization and metadata embedding + is_video = extension in [".mp4", ".webm"] + if is_video: + optimized_image = resolved_image_bytes + # extension is already set + else: + optimized_image, extension = self._exif_utils.optimize_image( + image_data=resolved_image_bytes, + target_width=self._card_preview_width, + format="webp", + quality=85, + preserve_metadata=True, + ) + image_filename = f"{recipe_id}{extension}" image_path = os.path.join(recipes_dir, image_filename) normalized_image_path = os.path.normpath(image_path) @@ -126,7 +136,8 @@ class RecipePersistenceService: with open(json_path, "w", encoding="utf-8") as file_obj: json.dump(recipe_data, file_obj, indent=4, ensure_ascii=False) - self._exif_utils.append_recipe_metadata(normalized_image_path, recipe_data) + if not is_video: + self._exif_utils.append_recipe_metadata(normalized_image_path, recipe_data) matching_recipes = await self._find_matching_recipes(recipe_scanner, fingerprint, exclude_id=recipe_id) await recipe_scanner.add_recipe(recipe_data) @@ -144,12 +155,8 @@ class RecipePersistenceService: async def delete_recipe(self, *, recipe_scanner, recipe_id: str) -> PersistenceResult: """Delete an existing recipe.""" - recipes_dir = recipe_scanner.recipes_dir - if not recipes_dir or not os.path.exists(recipes_dir): - raise RecipeNotFoundError("Recipes directory not found") - - recipe_json_path = os.path.join(recipes_dir, f"{recipe_id}.recipe.json") - if not os.path.exists(recipe_json_path): + recipe_json_path = await recipe_scanner.get_recipe_json_path(recipe_id) + if not recipe_json_path or not os.path.exists(recipe_json_path): raise RecipeNotFoundError("Recipe not found") with open(recipe_json_path, "r", encoding="utf-8") as file_obj: @@ -166,9 +173,9 @@ class RecipePersistenceService: async def update_recipe(self, *, recipe_scanner, recipe_id: str, updates: dict[str, Any]) -> PersistenceResult: """Update persisted metadata for a recipe.""" - if not any(key in updates for key in ("title", "tags", "source_path", "preview_nsfw_level")): + if not any(key in updates for key in ("title", "tags", "source_path", "preview_nsfw_level", "favorite")): raise RecipeValidationError( - "At least one field to update must be provided (title or tags or source_path or preview_nsfw_level)" + "At least one field to update must be provided (title or tags or source_path or preview_nsfw_level or favorite)" ) success = await recipe_scanner.update_recipe_metadata(recipe_id, updates) @@ -177,6 +184,163 @@ class RecipePersistenceService: return PersistenceResult({"success": True, "recipe_id": recipe_id, "updates": updates}) + def _normalize_target_path(self, recipe_scanner, target_path: str) -> tuple[str, str]: + """Normalize and validate the target path for recipe moves.""" + + if not target_path: + raise RecipeValidationError("Target path is required") + + recipes_root = recipe_scanner.recipes_dir + if not recipes_root: + raise RecipeNotFoundError("Recipes directory not found") + + normalized_target = os.path.normpath(target_path) + recipes_root = os.path.normpath(recipes_root) + if not os.path.isabs(normalized_target): + normalized_target = os.path.normpath(os.path.join(recipes_root, normalized_target)) + + try: + common_root = os.path.commonpath([normalized_target, recipes_root]) + except ValueError as exc: + raise RecipeValidationError("Invalid target path") from exc + + if common_root != recipes_root: + raise RecipeValidationError("Target path must be inside the recipes directory") + + return normalized_target, recipes_root + + async def _move_recipe_files( + self, + *, + recipe_scanner, + recipe_id: str, + normalized_target: str, + recipes_root: str, + ) -> dict[str, Any]: + """Move the recipe's JSON and preview image into the normalized target.""" + + recipe_json_path = await recipe_scanner.get_recipe_json_path(recipe_id) + if not recipe_json_path or not os.path.exists(recipe_json_path): + raise RecipeNotFoundError("Recipe not found") + + recipe_data = await recipe_scanner.get_recipe_by_id(recipe_id) + if not recipe_data: + raise RecipeNotFoundError("Recipe not found") + + current_json_dir = os.path.dirname(recipe_json_path) + normalized_image_path = os.path.normpath(recipe_data.get("file_path") or "") if recipe_data.get("file_path") else None + + os.makedirs(normalized_target, exist_ok=True) + + if os.path.normpath(current_json_dir) == normalized_target: + return { + "success": True, + "message": "Recipe is already in the target folder", + "recipe_id": recipe_id, + "original_file_path": recipe_data.get("file_path"), + "new_file_path": recipe_data.get("file_path"), + } + + new_json_path = os.path.normpath(os.path.join(normalized_target, os.path.basename(recipe_json_path))) + shutil.move(recipe_json_path, new_json_path) + + new_image_path = normalized_image_path + if normalized_image_path: + target_image_path = os.path.normpath(os.path.join(normalized_target, os.path.basename(normalized_image_path))) + if os.path.exists(normalized_image_path) and normalized_image_path != target_image_path: + shutil.move(normalized_image_path, target_image_path) + new_image_path = target_image_path + + relative_folder = os.path.relpath(normalized_target, recipes_root) + if relative_folder in (".", ""): + relative_folder = "" + updates = {"file_path": new_image_path or recipe_data.get("file_path"), "folder": relative_folder.replace(os.path.sep, "/")} + + updated = await recipe_scanner.update_recipe_metadata(recipe_id, updates) + if not updated: + raise RecipeNotFoundError("Recipe not found after move") + + return { + "success": True, + "recipe_id": recipe_id, + "original_file_path": recipe_data.get("file_path"), + "new_file_path": updates["file_path"], + "json_path": new_json_path, + "folder": updates["folder"], + } + + async def move_recipe(self, *, recipe_scanner, recipe_id: str, target_path: str) -> PersistenceResult: + """Move a recipe's assets into a new folder under the recipes root.""" + + normalized_target, recipes_root = self._normalize_target_path(recipe_scanner, target_path) + result = await self._move_recipe_files( + recipe_scanner=recipe_scanner, + recipe_id=recipe_id, + normalized_target=normalized_target, + recipes_root=recipes_root, + ) + return PersistenceResult(result) + + async def move_recipes_bulk( + self, + *, + recipe_scanner, + recipe_ids: Iterable[str], + target_path: str, + ) -> PersistenceResult: + """Move multiple recipes to a new folder.""" + + recipe_ids = list(recipe_ids) + if not recipe_ids: + raise RecipeValidationError("No recipe IDs provided") + + normalized_target, recipes_root = self._normalize_target_path(recipe_scanner, target_path) + + results: list[dict[str, Any]] = [] + success_count = 0 + failure_count = 0 + + for recipe_id in recipe_ids: + try: + move_result = await self._move_recipe_files( + recipe_scanner=recipe_scanner, + recipe_id=str(recipe_id), + normalized_target=normalized_target, + recipes_root=recipes_root, + ) + results.append( + { + "recipe_id": recipe_id, + "original_file_path": move_result.get("original_file_path"), + "new_file_path": move_result.get("new_file_path"), + "success": True, + "message": move_result.get("message", ""), + "folder": move_result.get("folder", ""), + } + ) + success_count += 1 + except Exception as exc: # pragma: no cover - per-item error handling + results.append( + { + "recipe_id": recipe_id, + "original_file_path": None, + "new_file_path": None, + "success": False, + "message": str(exc), + } + ) + failure_count += 1 + + return PersistenceResult( + { + "success": True, + "message": f"Moved {success_count} of {len(recipe_ids)} recipes", + "results": results, + "success_count": success_count, + "failure_count": failure_count, + } + ) + async def reconnect_lora( self, *, @@ -187,8 +351,8 @@ class RecipePersistenceService: ) -> PersistenceResult: """Reconnect a LoRA entry within an existing recipe.""" - recipe_path = os.path.join(recipe_scanner.recipes_dir, f"{recipe_id}.recipe.json") - if not os.path.exists(recipe_path): + recipe_path = await recipe_scanner.get_recipe_json_path(recipe_id) + if not recipe_path or not os.path.exists(recipe_path): raise RecipeNotFoundError("Recipe not found") target_lora = await recipe_scanner.get_local_lora(target_name) @@ -233,16 +397,12 @@ class RecipePersistenceService: if not recipe_ids: raise RecipeValidationError("No recipe IDs provided") - recipes_dir = recipe_scanner.recipes_dir - if not recipes_dir or not os.path.exists(recipes_dir): - raise RecipeNotFoundError("Recipes directory not found") - deleted_recipes: list[str] = [] failed_recipes: list[dict[str, Any]] = [] for recipe_id in recipe_ids: - recipe_json_path = os.path.join(recipes_dir, f"{recipe_id}.recipe.json") - if not os.path.exists(recipe_json_path): + recipe_json_path = await recipe_scanner.get_recipe_json_path(recipe_id) + if not recipe_json_path or not os.path.exists(recipe_json_path): failed_recipes.append({"id": recipe_id, "reason": "Recipe not found"}) continue diff --git a/py/services/websocket_manager.py b/py/services/websocket_manager.py index 4c786853..1108f72c 100644 --- a/py/services/websocket_manager.py +++ b/py/services/websocket_manager.py @@ -20,6 +20,8 @@ class WebSocketManager: self._last_init_progress: Dict[str, Dict] = {} # Add auto-organize progress tracking self._auto_organize_progress: Optional[Dict] = None + # Add recipe repair progress tracking + self._recipe_repair_progress: Optional[Dict] = None self._auto_organize_lock = asyncio.Lock() async def handle_connection(self, request: web.Request) -> web.WebSocketResponse: @@ -189,6 +191,14 @@ class WebSocketManager: # Broadcast via WebSocket await self.broadcast(data) + async def broadcast_recipe_repair_progress(self, data: Dict): + """Broadcast recipe repair progress to connected clients""" + # Store progress data in memory + self._recipe_repair_progress = data + + # Broadcast via WebSocket + await self.broadcast(data) + def get_auto_organize_progress(self) -> Optional[Dict]: """Get current auto-organize progress""" return self._auto_organize_progress @@ -197,6 +207,14 @@ class WebSocketManager: """Clear auto-organize progress data""" self._auto_organize_progress = None + def get_recipe_repair_progress(self) -> Optional[Dict]: + """Get current recipe repair progress""" + return self._recipe_repair_progress + + def cleanup_recipe_repair_progress(self): + """Clear recipe repair progress data""" + self._recipe_repair_progress = None + def is_auto_organize_running(self) -> bool: """Check if auto-organize is currently running""" if not self._auto_organize_progress: diff --git a/refs/recipe.json b/refs/recipe.json index bcf1d6b8..12630d33 100644 --- a/refs/recipe.json +++ b/refs/recipe.json @@ -1,82 +1,33 @@ { - "id": "0448c06d-de1b-46ab-975c-c5aa60d90dbc", - "file_path": "D:/Workspace/ComfyUI/models/loras/recipes/0448c06d-de1b-46ab-975c-c5aa60d90dbc.jpg", - "title": "a mysterious, steampunk-inspired character standing in a dramatic pose", - "modified": 1741837612.3931093, - "created_date": 1741492786.5581934, - "base_model": "Flux.1 D", + "id": "42803a29-02dc-49e1-b798-27da70e8b408", + "file_path": "/home/miao/workspace/ComfyUI/models/loras/recipes/test/42803a29-02dc-49e1-b798-27da70e8b408.webp", + "title": "masterpiece, best quality, amazing quality, very aesthetic, detailed eyes, perfect", + "modified": 1754897325.0507245, + "created_date": 1754897325.0507245, + "base_model": "Illustrious", "loras": [ { - "file_name": "ChronoDivinitiesFlux_r1", - "hash": "ddbc5abd00db46ad464f5e3ca85f8f7121bc14b594d6785f441d9b002fffe66a", - "strength": 0.8, - "modelVersionId": 1438879, - "modelName": "Chrono Divinities - By HailoKnight", - "modelVersionName": "Flux" - }, - { - "file_name": "flux.1_lora_flyway_ink-dynamic", - "hash": "4b4f3b469a0d5d3a04a46886abfa33daa37a905db070ccfbd10b345c6fb00eff", - "strength": 0.2, - "modelVersionId": 914935, - "modelName": "Ink-style", - "modelVersionName": "ink-dynamic" - }, - { - "file_name": "ck-painterly-fantasy-000017", - "hash": "48c67064e2936aec342580a2a729d91d75eb818e45ecf993b9650cc66c94c420", - "strength": 0.2, - "modelVersionId": 1189379, - "modelName": "Painterly Fantasy by ChronoKnight - [FLUX & IL]", - "modelVersionName": "FLUX" - }, - { - "file_name": "RetroAnimeFluxV1", - "hash": "8f43c31b6c3238ac44195c970d511d759c5893bddd00f59f42b8fe51e8e76fa0", - "strength": 0.8, - "modelVersionId": 806265, - "modelName": "Retro Anime Flux - Style", - "modelVersionName": "v1.0" - }, - { - "file_name": "Mezzotint_Artstyle_for_Flux_-_by_Ethanar", - "hash": "e6961502769123bf23a66c5c5298d76264fd6b9610f018319a0ccb091bfc308e", - "strength": 0.2, - "modelVersionId": 757030, - "modelName": "Mezzotint Artstyle for Flux - by Ethanar", - "modelVersionName": "V1" - }, - { - "file_name": "FluxMythG0thicL1nes", - "hash": "ecb03595de62bd6183a0dd2b38bea35669fd4d509f4bbae5aa0572cfb7ef4279", - "strength": 0.4, - "modelVersionId": 1202162, - "modelName": "Velvet's Mythic Fantasy Styles | Flux + Pony + illustrious", - "modelVersionName": "Flux Gothic Lines" - }, - { - "file_name": "Elden_Ring_-_Yoshitaka_Amano", - "hash": "c660c4c55320be7206cb6a917c59d8da3953cc07169fe10bda833a54ec0024f9", - "strength": 0.75, - "modelVersionId": 746484, - "modelName": "Elden Ring - Yoshitaka Amano", - "modelVersionName": "V1" + "file_name": "", + "hash": "1b5b763d83961bb5745f3af8271ba83f1d4fd69c16278dae6d5b4e194bdde97a", + "strength": 1.0, + "modelVersionId": 2007092, + "modelName": "Pony: People's Works +", + "modelVersionName": "v8_Illusv1.0", + "isDeleted": false, + "exclude": false } ], "gen_params": { - "prompt": "a mysterious, steampunk-inspired character standing in a dramatic pose. The character is dressed in a long, intricately detailed dark coat with ornate patterns, a wide-brimmed hat, and leather boots. The face is partially obscured by the hat's shadow, adding to the enigmatic aura. The background showcases a large, antique clock with Roman numerals, surrounded by dynamic lightning and ethereal white birds, enhancing the fantastical atmosphere. The color palette is dominated by dark tones with striking contrasts of white and blue lightning, creating a sense of tension and energy. The overall composition is vertical, with the character centrally positioned, exuding a sense of power and mystery. hkchrono", - "negative_prompt": "", - "checkpoint": { - "type": "checkpoint", - "modelVersionId": 691639, - "modelName": "FLUX", - "modelVersionName": "Dev" - }, - "steps": "30", - "sampler": "Undefined", - "cfg_scale": "3.5", - "seed": "1472903449", + "prompt": "masterpiece, best quality, amazing quality, very aesthetic, detailed eyes, perfect eyes, realistic eyes,\n(flat colors:1.5), (anime:1.5), (lineart:1.5),\nclose-up, solo, tongue, 1girl, food, (saliva:0.1), open mouth, candy, simple background, blue background, large lollipop, tongue out, fade background, lips, hand up, holding, looking at viewer, licking, seductive, half-closed eyes,", + "negative_prompt": "shiny skin,", + "steps": 19, + "sampler": "Euler a", + "cfg_scale": 5, + "seed": 1765271748, "size": "832x1216", - "clip_skip": "2" - } + "clip_skip": 2 + }, + "fingerprint": "1b5b763d83961bb5745f3af8271ba83f1d4fd69c16278dae6d5b4e194bdde97a:1.0", + "source_path": "https://civitai.com/images/92427432", + "folder": "test" } \ No newline at end of file diff --git a/scripts/sync_translation_keys.py b/scripts/sync_translation_keys.py index 8a0edb66..5ae1b71d 100644 --- a/scripts/sync_translation_keys.py +++ b/scripts/sync_translation_keys.py @@ -34,7 +34,7 @@ class TranslationKeySynchronizer: self.locales_dir = locales_dir self.verbose = verbose self.reference_locale = 'en' - self.target_locales = ['zh-CN', 'zh-TW', 'ja', 'ru', 'de', 'fr', 'es', 'ko'] + self.target_locales = ['zh-CN', 'zh-TW', 'ja', 'ru', 'de', 'fr', 'es', 'ko', 'he'] def log(self, message: str, level: str = 'INFO'): """Log a message if verbose mode is enabled.""" diff --git a/static/css/base.css b/static/css/base.css index 25be44ce..8d6130a9 100644 --- a/static/css/base.css +++ b/static/css/base.css @@ -1,8 +1,10 @@ -html, body { +html, +body { margin: 0; padding: 0; height: 100%; - overflow: hidden; /* Disable default scrolling */ + overflow: hidden; + /* Disable default scrolling */ } /* 针对Firefox */ @@ -58,12 +60,12 @@ html, body { --badge-update-bg: oklch(72% 0.2 220); --badge-update-text: oklch(28% 0.03 220); --badge-update-glow: oklch(72% 0.2 220 / 0.28); - + /* Spacing Scale */ --space-1: calc(8px * 1); --space-2: calc(8px * 2); --space-3: calc(8px * 3); - + /* Z-index Scale */ --z-base: 10; --z-header: 100; @@ -75,8 +77,9 @@ html, body { --border-radius-sm: 8px; --border-radius-xs: 4px; - --scrollbar-width: 8px; /* 添加滚动条宽度变量 */ - + --scrollbar-width: 8px; + /* 添加滚动条宽度变量 */ + /* Shortcut styles */ --shortcut-bg: oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.12); --shortcut-border: oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.25); @@ -104,7 +107,8 @@ html[data-theme="light"] { --lora-surface: oklch(25% 0.02 256 / 0.98); --lora-border: oklch(90% 0.02 256 / 0.15); --lora-text: oklch(98% 0.02 256); - --lora-warning: oklch(75% 0.25 80); /* Modified to be used with oklch() */ + --lora-warning: oklch(75% 0.25 80); + /* Modified to be used with oklch() */ --lora-error-bg: color-mix(in oklch, var(--lora-error) 15%, transparent); --lora-error-border: color-mix(in oklch, var(--lora-error) 40%, transparent); --badge-update-bg: oklch(62% 0.18 220); @@ -118,5 +122,10 @@ body { color: var(--text-color); display: flex; flex-direction: column; - padding-top: 0; /* Remove the padding-top */ + padding-top: 0; + /* Remove the padding-top */ } + +.hidden { + display: none !important; +} \ No newline at end of file diff --git a/static/css/components/import-modal.css b/static/css/components/import-modal.css index 82c34672..89be93ad 100644 --- a/static/css/components/import-modal.css +++ b/static/css/components/import-modal.css @@ -1,7 +1,8 @@ /* Import Modal Styles */ .import-step { margin: var(--space-2) 0; - transition: none !important; /* Disable any transitions that might affect display */ + transition: none !important; + /* Disable any transitions that might affect display */ } /* Import Mode Toggle */ @@ -107,7 +108,8 @@ justify-content: center; } -.recipe-image img { +.recipe-image img, +.recipe-preview-video { max-width: 100%; max-height: 100%; object-fit: contain; @@ -379,7 +381,7 @@ .recipe-details-layout { grid-template-columns: 1fr; } - + .recipe-image-container { height: 150px; } @@ -512,14 +514,17 @@ /* Prevent layout shift with scrollbar */ .modal-content { - overflow-y: scroll; /* Always show scrollbar */ - scrollbar-gutter: stable; /* Reserve space for scrollbar */ + overflow-y: scroll; + /* Always show scrollbar */ + scrollbar-gutter: stable; + /* Reserve space for scrollbar */ } /* For browsers that don't support scrollbar-gutter */ @supports not (scrollbar-gutter: stable) { .modal-content { - padding-right: calc(var(--space-2) + var(--scrollbar-width)); /* Add extra padding for scrollbar */ + padding-right: calc(var(--space-2) + var(--scrollbar-width)); + /* Add extra padding for scrollbar */ } } @@ -586,7 +591,8 @@ /* Remove the old warning-message styles that were causing layout issues */ .warning-message { - display: none; /* Hide the old style */ + display: none; + /* Hide the old style */ } /* Update deleted badge to be more prominent */ @@ -613,7 +619,8 @@ color: var(--lora-error); font-size: 0.9em; margin-top: 8px; - min-height: 20px; /* Ensure there's always space for the error message */ + min-height: 20px; + /* Ensure there's always space for the error message */ font-weight: 500; } @@ -662,8 +669,15 @@ } @keyframes fadeIn { - from { opacity: 0; transform: translateY(-10px); } - to { opacity: 1; transform: translateY(0); } + from { + opacity: 0; + transform: translateY(-10px); + } + + to { + opacity: 1; + transform: translateY(0); + } } .duplicate-warning { @@ -779,6 +793,7 @@ text-overflow: ellipsis; display: -webkit-box; -webkit-line-clamp: 2; + line-clamp: 2; -webkit-box-orient: vertical; } @@ -793,9 +808,9 @@ opacity: 0.8; } -.duplicate-recipe-date, +.duplicate-recipe-date, .duplicate-recipe-lora-count { display: flex; align-items: center; gap: 4px; -} +} \ No newline at end of file diff --git a/static/css/components/lora-modal/lora-modal.css b/static/css/components/lora-modal/lora-modal.css index cdfe5aaf..71667045 100644 --- a/static/css/components/lora-modal/lora-modal.css +++ b/static/css/components/lora-modal/lora-modal.css @@ -20,7 +20,7 @@ } .modal-header-row { - width: 85%; + width: 84%; display: flex; align-items: flex-start; gap: var(--space-2); diff --git a/static/css/components/modal/_base.css b/static/css/components/modal/_base.css index c869e493..eeadb450 100644 --- a/static/css/components/modal/_base.css +++ b/static/css/components/modal/_base.css @@ -122,6 +122,7 @@ body.modal-open { cursor: pointer; opacity: 0.7; transition: opacity 0.2s; + z-index: 10; } .close:hover { diff --git a/static/css/components/search-filter.css b/static/css/components/search-filter.css index 8feb9709..920a868e 100644 --- a/static/css/components/search-filter.css +++ b/static/css/components/search-filter.css @@ -242,6 +242,20 @@ border-color: var(--lora-error-border); } +/* Subtle styling for special system tags like "No tags" */ +.filter-tag.special-tag { + border-style: dashed; + opacity: 0.8; + font-style: italic; +} + +/* Ensure solid border and full opacity when active or excluded */ +.filter-tag.special-tag.active, +.filter-tag.special-tag.exclude { + border-style: solid; + opacity: 1; +} + /* Tag filter styles */ .tag-filter { display: flex; diff --git a/static/js/api/recipeApi.js b/static/js/api/recipeApi.js index ece9938f..5d373c66 100644 --- a/static/js/api/recipeApi.js +++ b/static/js/api/recipeApi.js @@ -2,6 +2,35 @@ import { RecipeCard } from '../components/RecipeCard.js'; import { state, getCurrentPageState } from '../state/index.js'; import { showToast } from '../utils/uiHelpers.js'; +const RECIPE_ENDPOINTS = { + list: '/api/lm/recipes', + detail: '/api/lm/recipe', + scan: '/api/lm/recipes/scan', + update: '/api/lm/recipe', + roots: '/api/lm/recipes/roots', + folders: '/api/lm/recipes/folders', + folderTree: '/api/lm/recipes/folder-tree', + unifiedFolderTree: '/api/lm/recipes/unified-folder-tree', + move: '/api/lm/recipe/move', + moveBulk: '/api/lm/recipes/move-bulk', + bulkDelete: '/api/lm/recipes/bulk-delete', +}; + +const RECIPE_SIDEBAR_CONFIG = { + config: { + displayName: 'Recipe', + supportsMove: true, + }, + endpoints: RECIPE_ENDPOINTS, +}; + +export function extractRecipeId(filePath) { + if (!filePath) return null; + const basename = filePath.split('/').pop().split('\\').pop(); + const dotIndex = basename.lastIndexOf('.'); + return dotIndex > 0 ? basename.substring(0, dotIndex) : basename; +} + /** * Fetch recipes with pagination for virtual scrolling * @param {number} page - Page number to fetch @@ -10,25 +39,36 @@ import { showToast } from '../utils/uiHelpers.js'; */ export async function fetchRecipesPage(page = 1, pageSize = 100) { const pageState = getCurrentPageState(); - + try { const params = new URLSearchParams({ page: page, page_size: pageSize || pageState.pageSize || 20, sort_by: pageState.sortBy }); - + + if (pageState.showFavoritesOnly) { + params.append('favorite', 'true'); + } + + if (pageState.activeFolder !== null && pageState.activeFolder !== undefined) { + params.append('folder', pageState.activeFolder); + params.append('recursive', pageState.searchOptions?.recursive !== false); + } else if (pageState.searchOptions?.recursive !== undefined) { + params.append('recursive', pageState.searchOptions.recursive); + } + // If we have a specific recipe ID to load if (pageState.customFilter?.active && pageState.customFilter?.recipeId) { // Special case: load specific recipe - const response = await fetch(`/api/lm/recipe/${pageState.customFilter.recipeId}`); - + const response = await fetch(`${RECIPE_ENDPOINTS.detail}/${pageState.customFilter.recipeId}`); + if (!response.ok) { throw new Error(`Failed to load recipe: ${response.statusText}`); } - + const recipe = await response.json(); - + // Return in expected format return { items: [recipe], @@ -38,33 +78,34 @@ export async function fetchRecipesPage(page = 1, pageSize = 100) { hasMore: false }; } - + // Add custom filter for Lora if present if (pageState.customFilter?.active && pageState.customFilter?.loraHash) { params.append('lora_hash', pageState.customFilter.loraHash); params.append('bypass_filters', 'true'); } else { // Normal filtering logic - + // Add search filter if present if (pageState.filters?.search) { params.append('search', pageState.filters.search); - + // Add search option parameters if (pageState.searchOptions) { params.append('search_title', pageState.searchOptions.title.toString()); params.append('search_tags', pageState.searchOptions.tags.toString()); params.append('search_lora_name', pageState.searchOptions.loraName.toString()); params.append('search_lora_model', pageState.searchOptions.loraModel.toString()); + params.append('search_prompt', (pageState.searchOptions.prompt || false).toString()); params.append('fuzzy', 'true'); } } - + // Add base model filters if (pageState.filters?.baseModel && pageState.filters.baseModel.length) { params.append('base_models', pageState.filters.baseModel.join(',')); } - + // Add tag filters if (pageState.filters?.tags && Object.keys(pageState.filters.tags).length) { Object.entries(pageState.filters.tags).forEach(([tag, state]) => { @@ -78,14 +119,14 @@ export async function fetchRecipesPage(page = 1, pageSize = 100) { } // Fetch recipes - const response = await fetch(`/api/lm/recipes?${params.toString()}`); - + const response = await fetch(`${RECIPE_ENDPOINTS.list}?${params.toString()}`); + if (!response.ok) { throw new Error(`Failed to load recipes: ${response.statusText}`); } - + const data = await response.json(); - + return { items: data.items, totalItems: data.total, @@ -111,29 +152,29 @@ export async function resetAndReloadWithVirtualScroll(options = {}) { updateFolders = false, fetchPageFunction } = options; - + const pageState = getCurrentPageState(); - + try { pageState.isLoading = true; - + // Reset page counter pageState.currentPage = 1; - + // Fetch the first page const result = await fetchPageFunction(1, pageState.pageSize || 50); - + // Update the virtual scroller state.virtualScroller.refreshWithData( result.items, result.totalItems, result.hasMore ); - + // Update state pageState.hasMore = result.hasMore; pageState.currentPage = 2; // Next page will be 2 - + return result; } catch (error) { console.error(`Error reloading ${modelType}s:`, error); @@ -156,32 +197,32 @@ export async function loadMoreWithVirtualScroll(options = {}) { updateFolders = false, fetchPageFunction } = options; - + const pageState = getCurrentPageState(); - + try { // Start loading state pageState.isLoading = true; - + // Reset to first page if requested if (resetPage) { pageState.currentPage = 1; } - + // Fetch the first page of data const result = await fetchPageFunction(pageState.currentPage, pageState.pageSize || 50); - + // Update virtual scroller with the new data state.virtualScroller.refreshWithData( result.items, result.totalItems, result.hasMore ); - + // Update state pageState.hasMore = result.hasMore; pageState.currentPage = 2; // Next page to load would be 2 - + return result; } catch (error) { console.error(`Error loading ${modelType}s:`, error); @@ -211,18 +252,18 @@ export async function resetAndReload(updateFolders = false) { export async function refreshRecipes() { try { state.loadingManager.showSimpleLoading('Refreshing recipes...'); - + // Call the API endpoint to rebuild the recipe cache - const response = await fetch('/api/lm/recipes/scan'); - + const response = await fetch(RECIPE_ENDPOINTS.scan); + if (!response.ok) { const data = await response.json(); throw new Error(data.error || 'Failed to refresh recipe cache'); } - + // After successful cache rebuild, reload the recipes await resetAndReload(); - + showToast('toast.recipes.refreshComplete', {}, 'success'); } catch (error) { console.error('Error refreshing recipes:', error); @@ -240,7 +281,7 @@ export async function refreshRecipes() { */ export async function loadMoreRecipes(resetPage = false) { const pageState = getCurrentPageState(); - + // Use virtual scroller if available if (state.virtualScroller) { return loadMoreWithVirtualScroll({ @@ -277,10 +318,12 @@ export async function updateRecipeMetadata(filePath, updates) { state.loadingManager.showSimpleLoading('Saving metadata...'); // Extract recipeId from filePath (basename without extension) - const basename = filePath.split('/').pop().split('\\').pop(); - const recipeId = basename.substring(0, basename.lastIndexOf('.')); - - const response = await fetch(`/api/lm/recipe/${recipeId}/update`, { + const recipeId = extractRecipeId(filePath); + if (!recipeId) { + throw new Error('Unable to determine recipe ID'); + } + + const response = await fetch(`${RECIPE_ENDPOINTS.update}/${recipeId}/update`, { method: 'PUT', headers: { 'Content-Type': 'application/json', @@ -296,7 +339,7 @@ export async function updateRecipeMetadata(filePath, updates) { } state.virtualScroller.updateSingleItem(filePath, updates); - + return data; } catch (error) { console.error('Error updating recipe:', error); @@ -306,3 +349,187 @@ export async function updateRecipeMetadata(filePath, updates) { state.loadingManager.hide(); } } + +export class RecipeSidebarApiClient { + constructor() { + this.apiConfig = RECIPE_SIDEBAR_CONFIG; + } + + async fetchUnifiedFolderTree() { + const response = await fetch(this.apiConfig.endpoints.unifiedFolderTree); + if (!response.ok) { + throw new Error('Failed to fetch recipe folder tree'); + } + return response.json(); + } + + async fetchModelRoots() { + const response = await fetch(this.apiConfig.endpoints.roots); + if (!response.ok) { + throw new Error('Failed to fetch recipe roots'); + } + return response.json(); + } + + async fetchModelFolders() { + const response = await fetch(this.apiConfig.endpoints.folders); + if (!response.ok) { + throw new Error('Failed to fetch recipe folders'); + } + return response.json(); + } + + async moveBulkModels(filePaths, targetPath) { + if (!this.apiConfig.config.supportsMove) { + showToast('toast.api.bulkMoveNotSupported', { type: this.apiConfig.config.displayName }, 'warning'); + return []; + } + + const recipeIds = filePaths + .map((path) => extractRecipeId(path)) + .filter((id) => !!id); + + if (recipeIds.length === 0) { + showToast('toast.models.noModelsSelected', {}, 'warning'); + return []; + } + + const response = await fetch(this.apiConfig.endpoints.moveBulk, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + recipe_ids: recipeIds, + target_path: targetPath, + }), + }); + + const result = await response.json(); + + if (!response.ok || !result.success) { + throw new Error(result.error || `Failed to move ${this.apiConfig.config.displayName}s`); + } + + if (result.failure_count > 0) { + showToast( + 'toast.api.bulkMovePartial', + { + successCount: result.success_count, + type: this.apiConfig.config.displayName, + failureCount: result.failure_count, + }, + 'warning' + ); + + const failedFiles = (result.results || []) + .filter((item) => !item.success) + .map((item) => item.message || 'Unknown error'); + + if (failedFiles.length > 0) { + const failureMessage = + failedFiles.length <= 3 + ? failedFiles.join('\n') + : `${failedFiles.slice(0, 3).join('\n')}\n(and ${failedFiles.length - 3} more)`; + showToast('toast.api.bulkMoveFailures', { failures: failureMessage }, 'warning', 6000); + } + } else { + showToast( + 'toast.api.bulkMoveSuccess', + { + successCount: result.success_count, + type: this.apiConfig.config.displayName, + }, + 'success' + ); + } + + return result.results || []; + } + + async moveSingleModel(filePath, targetPath) { + if (!this.apiConfig.config.supportsMove) { + showToast('toast.api.moveNotSupported', { type: this.apiConfig.config.displayName }, 'warning'); + return null; + } + + const recipeId = extractRecipeId(filePath); + if (!recipeId) { + showToast('toast.api.moveFailed', { message: 'Recipe ID missing' }, 'error'); + return null; + } + + const response = await fetch(this.apiConfig.endpoints.move, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + recipe_id: recipeId, + target_path: targetPath, + }), + }); + + const result = await response.json(); + + if (!response.ok || !result.success) { + throw new Error(result.error || `Failed to move ${this.apiConfig.config.displayName}`); + } + + if (result.message) { + showToast('toast.api.moveInfo', { message: result.message }, 'info'); + } else { + showToast('toast.api.moveSuccess', { type: this.apiConfig.config.displayName }, 'success'); + } + + return { + original_file_path: result.original_file_path || filePath, + new_file_path: result.new_file_path || filePath, + folder: result.folder || '', + message: result.message, + }; + } + + async bulkDeleteModels(filePaths) { + if (!filePaths || filePaths.length === 0) { + throw new Error('No file paths provided'); + } + + const recipeIds = filePaths + .map((path) => extractRecipeId(path)) + .filter((id) => !!id); + + if (recipeIds.length === 0) { + throw new Error('No recipe IDs could be derived from file paths'); + } + + try { + state.loadingManager?.showSimpleLoading('Deleting recipes...'); + + const response = await fetch(this.apiConfig.endpoints.bulkDelete, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + recipe_ids: recipeIds, + }), + }); + + const result = await response.json(); + + if (!response.ok || !result.success) { + throw new Error(result.error || 'Failed to delete recipes'); + } + + return { + success: true, + deleted_count: result.total_deleted, + failed_count: result.total_failed || 0, + errors: result.failed || [], + }; + } finally { + state.loadingManager?.hide(); + } + } +} diff --git a/static/js/components/ContextMenu/GlobalContextMenu.js b/static/js/components/ContextMenu/GlobalContextMenu.js index ec4f794a..1102ea83 100644 --- a/static/js/components/ContextMenu/GlobalContextMenu.js +++ b/static/js/components/ContextMenu/GlobalContextMenu.js @@ -15,6 +15,29 @@ export class GlobalContextMenu extends BaseContextMenu { showMenu(x, y, origin = null) { const contextOrigin = origin || { type: 'global' }; + + // Conditional visibility for recipes page + const isRecipesPage = state.currentPageType === 'recipes'; + const modelUpdateItem = this.menu.querySelector('[data-action="check-model-updates"]'); + const licenseRefreshItem = this.menu.querySelector('[data-action="fetch-missing-licenses"]'); + const downloadExamplesItem = this.menu.querySelector('[data-action="download-example-images"]'); + const cleanupExamplesItem = this.menu.querySelector('[data-action="cleanup-example-images-folders"]'); + const repairRecipesItem = this.menu.querySelector('[data-action="repair-recipes"]'); + + if (isRecipesPage) { + modelUpdateItem?.classList.add('hidden'); + licenseRefreshItem?.classList.add('hidden'); + downloadExamplesItem?.classList.add('hidden'); + cleanupExamplesItem?.classList.add('hidden'); + repairRecipesItem?.classList.remove('hidden'); + } else { + modelUpdateItem?.classList.remove('hidden'); + licenseRefreshItem?.classList.remove('hidden'); + downloadExamplesItem?.classList.remove('hidden'); + cleanupExamplesItem?.classList.remove('hidden'); + repairRecipesItem?.classList.add('hidden'); + } + super.showMenu(x, y, contextOrigin); } @@ -40,6 +63,11 @@ export class GlobalContextMenu extends BaseContextMenu { console.error('Failed to refresh missing license metadata:', error); }); break; + case 'repair-recipes': + this.repairRecipes(menuItem).catch((error) => { + console.error('Failed to repair recipes:', error); + }); + break; default: console.warn(`Unhandled global context menu action: ${action}`); break; @@ -235,4 +263,78 @@ export class GlobalContextMenu extends BaseContextMenu { return `${displayName}s`; } + + async repairRecipes(menuItem) { + if (this._repairInProgress) { + return; + } + + this._repairInProgress = true; + menuItem?.classList.add('disabled'); + + const loadingMessage = translate( + 'globalContextMenu.repairRecipes.loading', + {}, + 'Repairing recipe data...' + ); + + const progressUI = state.loadingManager?.showEnhancedProgress(loadingMessage); + + try { + const response = await fetch('/api/lm/recipes/repair', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + }); + + const result = await response.json(); + if (!response.ok || !result.success) { + throw new Error(result.error || 'Failed to start repair'); + } + + // Poll for progress (or wait for WebSocket if preferred, but polling is simpler for this implementation) + let isComplete = false; + while (!isComplete && this._repairInProgress) { + const progressResponse = await fetch('/api/lm/recipes/repair-progress'); + if (progressResponse.ok) { + const progressResult = await progressResponse.json(); + if (progressResult.success && progressResult.progress) { + const p = progressResult.progress; + if (p.status === 'processing') { + const percent = (p.current / p.total) * 100; + progressUI?.updateProgress(percent, p.recipe_name, `${loadingMessage} (${p.current}/${p.total})`); + } else if (p.status === 'completed') { + isComplete = true; + progressUI?.complete(translate( + 'globalContextMenu.repairRecipes.success', + { count: p.repaired }, + `Repaired ${p.repaired} recipes.` + )); + showToast('globalContextMenu.repairRecipes.success', { count: p.repaired }, 'success'); + // Refresh recipes page if active + if (window.recipesPage) { + window.recipesPage.refresh(); + } + } else if (p.status === 'error') { + throw new Error(p.error || 'Repair failed'); + } + } else if (progressResponse.status === 404) { + // Progress might have finished quickly and been cleaned up + isComplete = true; + progressUI?.complete(); + } + } + + if (!isComplete) { + await new Promise(resolve => setTimeout(resolve, 1000)); + } + } + } catch (error) { + console.error('Recipe repair failed:', error); + progressUI?.complete(translate('globalContextMenu.repairRecipes.error', { message: error.message }, 'Repair failed: {message}')); + showToast('globalContextMenu.repairRecipes.error', { message: error.message }, 'error'); + } finally { + this._repairInProgress = false; + menuItem?.classList.remove('disabled'); + } + } } diff --git a/static/js/components/ContextMenu/RecipeContextMenu.js b/static/js/components/ContextMenu/RecipeContextMenu.js index f9cb9719..a3a2eb32 100644 --- a/static/js/components/ContextMenu/RecipeContextMenu.js +++ b/static/js/components/ContextMenu/RecipeContextMenu.js @@ -4,13 +4,14 @@ import { showToast, copyToClipboard, sendLoraToWorkflow } from '../../utils/uiHe import { setSessionItem, removeSessionItem } from '../../utils/storageHelpers.js'; import { updateRecipeMetadata } from '../../api/recipeApi.js'; import { state } from '../../state/index.js'; +import { moveManager } from '../../managers/MoveManager.js'; export class RecipeContextMenu extends BaseContextMenu { constructor() { super('recipeContextMenu', '.model-card'); this.nsfwSelector = document.getElementById('nsfwLevelSelector'); this.modelType = 'recipe'; - + this.initNSFWSelector(); } @@ -24,20 +25,20 @@ export class RecipeContextMenu extends BaseContextMenu { const { resetAndReload } = await import('../../api/recipeApi.js'); return resetAndReload(); } - + showMenu(x, y, card) { // Call the parent method first to handle basic positioning super.showMenu(x, y, card); - + // Get recipe data to check for missing LoRAs const recipeId = card.dataset.id; const missingLorasItem = this.menu.querySelector('.download-missing-item'); - + if (recipeId && missingLorasItem) { // Check if this card has missing LoRAs const loraCountElement = card.querySelector('.lora-count'); const hasMissingLoras = loraCountElement && loraCountElement.classList.contains('missing'); - + // Show/hide the download missing LoRAs option based on missing status if (hasMissingLoras) { missingLorasItem.style.display = 'flex'; @@ -46,7 +47,7 @@ export class RecipeContextMenu extends BaseContextMenu { } } } - + handleMenuAction(action) { // First try to handle with common actions from ModelContextMenuMixin if (ModelContextMenuMixin.handleCommonMenuActions.call(this, action)) { @@ -55,8 +56,8 @@ export class RecipeContextMenu extends BaseContextMenu { // Handle recipe-specific actions const recipeId = this.currentCard.dataset.id; - - switch(action) { + + switch (action) { case 'details': // Show recipe details this.currentCard.click(); @@ -77,6 +78,9 @@ export class RecipeContextMenu extends BaseContextMenu { // Share recipe this.currentCard.querySelector('.fa-share-alt')?.click(); break; + case 'move': + moveManager.showMoveModal(this.currentCard.dataset.filepath); + break; case 'delete': // Delete recipe this.currentCard.querySelector('.fa-trash')?.click(); @@ -89,9 +93,13 @@ export class RecipeContextMenu extends BaseContextMenu { // Download missing LoRAs this.downloadMissingLoRAs(recipeId); break; + case 'repair': + // Repair recipe metadata + this.repairRecipe(recipeId); + break; } } - + // New method to copy recipe syntax to clipboard copyRecipeSyntax() { const recipeId = this.currentCard.dataset.id; @@ -114,7 +122,7 @@ export class RecipeContextMenu extends BaseContextMenu { showToast('recipes.contextMenu.copyRecipe.failed', {}, 'error'); }); } - + // New method to send recipe to workflow sendRecipeToWorkflow(replaceMode) { const recipeId = this.currentCard.dataset.id; @@ -137,14 +145,14 @@ export class RecipeContextMenu extends BaseContextMenu { showToast('recipes.contextMenu.sendRecipe.failed', {}, 'error'); }); } - + // View all LoRAs in the recipe viewRecipeLoRAs(recipeId) { if (!recipeId) { showToast('recipes.contextMenu.viewLoras.missingId', {}, 'error'); return; } - + // First get the recipe details to access its LoRAs fetch(`/api/lm/recipe/${recipeId}`) .then(response => response.json()) @@ -154,17 +162,17 @@ export class RecipeContextMenu extends BaseContextMenu { removeSessionItem('recipe_to_lora_filterLoraHashes'); removeSessionItem('filterRecipeName'); removeSessionItem('viewLoraDetail'); - + // Collect all hashes from the recipe's LoRAs const loraHashes = recipe.loras .filter(lora => lora.hash) .map(lora => lora.hash.toLowerCase()); - + if (loraHashes.length > 0) { // Store the LoRA hashes and recipe name in session storage setSessionItem('recipe_to_lora_filterLoraHashes', JSON.stringify(loraHashes)); setSessionItem('filterRecipeName', recipe.title); - + // Navigate to the LoRAs page window.location.href = '/loras'; } else { @@ -176,34 +184,34 @@ export class RecipeContextMenu extends BaseContextMenu { showToast('recipes.contextMenu.viewLoras.loadError', { message: error.message }, 'error'); }); } - + // Download missing LoRAs async downloadMissingLoRAs(recipeId) { if (!recipeId) { showToast('recipes.contextMenu.downloadMissing.missingId', {}, 'error'); return; } - + try { // First get the recipe details const response = await fetch(`/api/lm/recipe/${recipeId}`); const recipe = await response.json(); - + // Get missing LoRAs const missingLoras = recipe.loras.filter(lora => !lora.inLibrary && !lora.isDeleted); - + if (missingLoras.length === 0) { showToast('recipes.contextMenu.downloadMissing.noMissingLoras', {}, 'info'); return; } - + // Show loading toast state.loadingManager.showSimpleLoading('Getting version info for missing LoRAs...'); - + // Get version info for each missing LoRA const missingLorasWithVersionInfoPromises = missingLoras.map(async lora => { let endpoint; - + // Determine which endpoint to use based on available data if (lora.modelVersionId) { endpoint = `/api/lm/loras/civitai/model/version/${lora.modelVersionId}`; @@ -213,52 +221,52 @@ export class RecipeContextMenu extends BaseContextMenu { console.error("Missing both hash and modelVersionId for lora:", lora); return null; } - + const versionResponse = await fetch(endpoint); const versionInfo = await versionResponse.json(); - + // Return original lora data combined with version info return { ...lora, civitaiInfo: versionInfo }; }); - + // Wait for all API calls to complete const lorasWithVersionInfo = await Promise.all(missingLorasWithVersionInfoPromises); - + // Filter out null values (failed requests) const validLoras = lorasWithVersionInfo.filter(lora => lora !== null); - + if (validLoras.length === 0) { showToast('recipes.contextMenu.downloadMissing.getInfoFailed', {}, 'error'); return; } - + // Prepare data for import manager using the retrieved information const recipeData = { loras: validLoras.map(lora => { const civitaiInfo = lora.civitaiInfo; - const modelFile = civitaiInfo.files ? + const modelFile = civitaiInfo.files ? civitaiInfo.files.find(file => file.type === 'Model') : null; - + return { // Basic lora info name: civitaiInfo.model?.name || lora.name, version: civitaiInfo.name || '', strength: lora.strength || 1.0, - + // Model identifiers hash: modelFile?.hashes?.SHA256?.toLowerCase() || lora.hash, modelVersionId: civitaiInfo.id || lora.modelVersionId, - + // Metadata thumbnailUrl: civitaiInfo.images?.[0]?.url || '', baseModel: civitaiInfo.baseModel || '', downloadUrl: civitaiInfo.downloadUrl || '', size: modelFile ? (modelFile.sizeKB * 1024) : 0, file_name: modelFile ? modelFile.name.split('.')[0] : '', - + // Status flags existsLocally: false, isDeleted: civitaiInfo.error === "Model not found", @@ -267,7 +275,7 @@ export class RecipeContextMenu extends BaseContextMenu { }; }) }; - + // Call ImportManager's download missing LoRAs method window.importManager.downloadMissingLoras(recipeData, recipeId); } catch (error) { @@ -279,6 +287,38 @@ export class RecipeContextMenu extends BaseContextMenu { } } } + + // Repair recipe metadata + async repairRecipe(recipeId) { + if (!recipeId) { + showToast('recipes.contextMenu.repair.missingId', {}, 'error'); + return; + } + + try { + showToast('recipes.contextMenu.repair.starting', {}, 'info'); + + const response = await fetch(`/api/lm/recipe/${recipeId}/repair`, { + method: 'POST' + }); + const result = await response.json(); + + if (result.success) { + if (result.repaired > 0) { + showToast('recipes.contextMenu.repair.success', {}, 'success'); + // Refresh the current card or reload + this.resetAndReload(); + } else { + showToast('recipes.contextMenu.repair.skipped', {}, 'info'); + } + } else { + throw new Error(result.error || 'Repair failed'); + } + } catch (error) { + console.error('Error repairing recipe:', error); + showToast('recipes.contextMenu.repair.failed', { message: error.message }, 'error'); + } + } } // Mix in shared methods from ModelContextMenuMixin diff --git a/static/js/components/RecipeCard.js b/static/js/components/RecipeCard.js index dec61fa0..f89a2425 100644 --- a/static/js/components/RecipeCard.js +++ b/static/js/components/RecipeCard.js @@ -1,8 +1,11 @@ // Recipe Card Component import { showToast, copyToClipboard, sendLoraToWorkflow } from '../utils/uiHelpers.js'; +import { updateRecipeMetadata } from '../api/recipeApi.js'; +import { configureModelCardVideo } from './shared/ModelCard.js'; import { modalManager } from '../managers/ModalManager.js'; import { getCurrentPageState } from '../state/index.js'; import { state } from '../state/index.js'; +import { bulkManager } from '../managers/BulkManager.js'; import { NSFW_LEVELS, getBaseModelAbbreviation } from '../utils/constants.js'; class RecipeCard { @@ -10,11 +13,11 @@ class RecipeCard { this.recipe = recipe; this.clickHandler = clickHandler; this.element = this.createCardElement(); - + // Store reference to this instance on the DOM element for updates this.element._recipeCardInstance = this; } - + createCardElement() { const card = document.createElement('div'); card.className = 'model-card'; @@ -23,33 +26,48 @@ class RecipeCard { card.dataset.nsfwLevel = this.recipe.preview_nsfw_level || 0; card.dataset.created = this.recipe.created_date; card.dataset.id = this.recipe.id || ''; - + // Get base model with fallback const baseModelLabel = (this.recipe.base_model || '').trim() || 'Unknown'; const baseModelAbbreviation = getBaseModelAbbreviation(baseModelLabel); const baseModelDisplay = baseModelLabel === 'Unknown' ? 'Unknown' : baseModelAbbreviation; - + // Ensure loras array exists const loras = this.recipe.loras || []; const lorasCount = loras.length; - + // Check if all LoRAs are available in the library const missingLorasCount = loras.filter(lora => !lora.inLibrary && !lora.isDeleted).length; const allLorasAvailable = missingLorasCount === 0 && lorasCount > 0; - - // Ensure file_url exists, fallback to file_path if needed - const imageUrl = this.recipe.file_url || - (this.recipe.file_path ? `/loras_static/root1/preview/${this.recipe.file_path.split('/').pop()}` : - '/loras_static/images/no-preview.png'); - // Check if in duplicates mode - const pageState = getCurrentPageState(); - const isDuplicatesMode = pageState.duplicatesMode; + // Ensure file_url exists, fallback to file_path if needed + const previewUrl = this.recipe.file_url || + (this.recipe.file_path ? `/loras_static/root1/preview/${this.recipe.file_path.split('/').pop()}` : + '/loras_static/images/no-preview.png'); + + const isDuplicatesMode = getCurrentPageState().duplicatesMode; + const autoplayOnHover = state?.global?.settings?.autoplay_on_hover === true; + const isFavorite = this.recipe.favorite === true; + + // Video preview logic + const isVideo = previewUrl.endsWith('.mp4') || previewUrl.endsWith('.webm'); + const videoAttrs = [ + 'controls', + 'muted', + 'loop', + 'playsinline', + 'preload="none"', + `data-src="${previewUrl}"` + ]; + + if (!autoplayOnHover) { + videoAttrs.push('data-autoplay="true"'); + } // NSFW blur logic - similar to LoraCard const nsfwLevel = this.recipe.preview_nsfw_level !== undefined ? this.recipe.preview_nsfw_level : 0; const shouldBlur = state.settings.blur_mature_content && nsfwLevel > NSFW_LEVELS.PG13; - + if (shouldBlur) { card.classList.add('nsfw-content'); } @@ -66,15 +84,19 @@ class RecipeCard { card.innerHTML = `
- ${this.recipe.title} + ${isVideo ? + `` : + `${this.recipe.title}` + } ${!isDuplicatesMode ? `
- ${shouldBlur ? - `` : ''} ${baseModelDisplay}
+ @@ -102,30 +124,98 @@ class RecipeCard {
`; - + this.attachEventListeners(card, isDuplicatesMode, shouldBlur); + + // Add video auto-play on hover functionality if needed + const videoElement = card.querySelector('video'); + if (videoElement) { + configureModelCardVideo(videoElement, autoplayOnHover); + } + return card; } - + getLoraStatusTitle(totalCount, missingCount) { if (totalCount === 0) return "No LoRAs in this recipe"; if (missingCount === 0) return "All LoRAs available - Ready to use"; return `${missingCount} of ${totalCount} LoRAs missing`; } - + + async toggleFavorite(card) { + // Find the latest star icon in case the card was re-rendered + const getStarIcon = (c) => c.querySelector('.fa-star'); + let starIcon = getStarIcon(card); + + const isFavorite = this.recipe.favorite || false; + const newFavoriteState = !isFavorite; + + // Update early to provide instant feedback and avoid race conditions with re-renders + this.recipe.favorite = newFavoriteState; + + // Function to update icon state + const updateIconUI = (icon, state) => { + if (!icon) return; + if (state) { + icon.classList.remove('far'); + icon.classList.add('fas', 'favorite-active'); + icon.title = 'Remove from Favorites'; + } else { + icon.classList.remove('fas', 'favorite-active'); + icon.classList.add('far'); + icon.title = 'Add to Favorites'; + } + }; + + // Update current icon immediately + updateIconUI(starIcon, newFavoriteState); + + try { + await updateRecipeMetadata(this.recipe.file_path, { + favorite: newFavoriteState + }); + + // Status already updated, just show toast + if (newFavoriteState) { + showToast('modelCard.favorites.added', {}, 'success'); + } else { + showToast('modelCard.favorites.removed', {}, 'success'); + } + + // Re-find star icon after API call as VirtualScroller might have replaced the element + // During updateRecipeMetadata, VirtualScroller.updateSingleItem might have re-rendered the card + // We need to find the NEW element in the DOM to ensure we don't have a stale reference + // Though typically VirtualScroller handles the re-render with the NEW this.recipe.favorite + // we will check the DOM just to be sure if this instance's internal card is still what's in DOM + } catch (error) { + console.error('Failed to update favorite status:', error); + // Revert local state on error + this.recipe.favorite = isFavorite; + + // Re-find star icon in case of re-render during fault + const currentCard = card.ownerDocument.evaluate( + `.//*[@data-filepath="${this.recipe.file_path}"]`, + card.ownerDocument, null, XPathResult.FIRST_ORDERED_NODE_TYPE, null + ).singleNodeValue || card; + + updateIconUI(getStarIcon(currentCard), isFavorite); + showToast('modelCard.favorites.updateFailed', {}, 'error'); + } + } + attachEventListeners(card, isDuplicatesMode, shouldBlur) { // Add blur toggle functionality if content should be blurred if (shouldBlur) { const toggleBtn = card.querySelector('.toggle-blur-btn'); const showBtn = card.querySelector('.show-content-btn'); - + if (toggleBtn) { toggleBtn.addEventListener('click', (e) => { e.stopPropagation(); this.toggleBlurContent(card); }); } - + if (showBtn) { showBtn.addEventListener('click', (e) => { e.stopPropagation(); @@ -137,21 +227,31 @@ class RecipeCard { // Recipe card click event - only attach if not in duplicates mode if (!isDuplicatesMode) { card.addEventListener('click', () => { + if (state.bulkMode) { + bulkManager.toggleCardSelection(card); + return; + } this.clickHandler(this.recipe); }); - + + // Favorite button click event - prevent propagation to card + card.querySelector('.fa-star')?.addEventListener('click', (e) => { + e.stopPropagation(); + this.toggleFavorite(card); + }); + // Share button click event - prevent propagation to card card.querySelector('.fa-share-alt')?.addEventListener('click', (e) => { e.stopPropagation(); this.shareRecipe(); }); - + // Send button click event - prevent propagation to card card.querySelector('.fa-paper-plane')?.addEventListener('click', (e) => { e.stopPropagation(); this.sendRecipeToWorkflow(e.shiftKey); }); - + // Delete button click event - prevent propagation to card card.querySelector('.fa-trash')?.addEventListener('click', (e) => { e.stopPropagation(); @@ -159,19 +259,19 @@ class RecipeCard { }); } } - + toggleBlurContent(card) { const preview = card.querySelector('.card-preview'); const isBlurred = preview.classList.toggle('blurred'); const icon = card.querySelector('.toggle-blur-btn i'); - + // Update the icon based on blur state if (isBlurred) { icon.className = 'fas fa-eye'; } else { icon.className = 'fas fa-eye-slash'; } - + // Toggle the overlay visibility const overlay = card.querySelector('.nsfw-overlay'); if (overlay) { @@ -182,13 +282,13 @@ class RecipeCard { showBlurredContent(card) { const preview = card.querySelector('.card-preview'); preview.classList.remove('blurred'); - + // Update the toggle button icon const toggleBtn = card.querySelector('.toggle-blur-btn'); if (toggleBtn) { toggleBtn.querySelector('i').className = 'fas fa-eye-slash'; } - + // Hide the overlay const overlay = card.querySelector('.nsfw-overlay'); if (overlay) { @@ -223,7 +323,7 @@ class RecipeCard { showToast('toast.recipes.sendError', {}, 'error'); } } - + showDeleteConfirmation() { try { // Get recipe ID @@ -233,15 +333,21 @@ class RecipeCard { showToast('toast.recipes.cannotDelete', {}, 'error'); return; } - + // Create delete modal content + const previewUrl = this.recipe.file_url || '/loras_static/images/no-preview.png'; + const isVideo = previewUrl.endsWith('.mp4') || previewUrl.endsWith('.webm'); + const deleteModalContent = `
`); } } }); - + sidebarBreadcrumbNav.innerHTML = breadcrumbs.join(''); } updateSidebarHeader() { const sidebarHeader = document.getElementById('sidebarHeader'); if (!sidebarHeader) return; - - if (this.selectedPath == null) { + + if (!this.selectedPath) { sidebarHeader.classList.add('root-selected'); } else { sidebarHeader.classList.remove('root-selected'); @@ -1333,11 +1352,11 @@ export class SidebarManager { toggleSidebar() { const sidebar = document.getElementById('folderSidebar'); const toggleBtn = document.querySelector('.sidebar-toggle-btn'); - + if (!sidebar) return; - + this.isVisible = !this.isVisible; - + if (this.isVisible) { sidebar.classList.remove('collapsed'); sidebar.classList.add('visible'); @@ -1345,28 +1364,28 @@ export class SidebarManager { sidebar.classList.remove('visible'); sidebar.classList.add('collapsed'); } - + if (toggleBtn) { toggleBtn.classList.toggle('active', this.isVisible); } - + this.saveSidebarState(); } closeSidebar() { const sidebar = document.getElementById('folderSidebar'); const toggleBtn = document.querySelector('.sidebar-toggle-btn'); - + if (!sidebar) return; - + this.isVisible = false; sidebar.classList.remove('visible'); sidebar.classList.add('collapsed'); - + if (toggleBtn) { toggleBtn.classList.remove('active'); } - + this.saveSidebarState(); } @@ -1375,12 +1394,12 @@ export class SidebarManager { const expandedPaths = getStorageItem(`${this.pageType}_expandedNodes`, []); const displayMode = getStorageItem(`${this.pageType}_displayMode`, 'tree'); // 'tree' or 'list', default to 'tree' const recursiveSearchEnabled = getStorageItem(`${this.pageType}_recursiveSearch`, true); - + this.isPinned = isPinned; this.expandedNodes = new Set(expandedPaths); this.displayMode = displayMode; this.recursiveSearchEnabled = recursiveSearchEnabled; - + this.updatePinButton(); this.updateDisplayModeButton(); this.updateCollapseAllButton(); diff --git a/static/js/components/shared/ModelCard.js b/static/js/components/shared/ModelCard.js index b23b6a43..4c158b3c 100644 --- a/static/js/components/shared/ModelCard.js +++ b/static/js/components/shared/ModelCard.js @@ -14,11 +14,11 @@ import { eventManager } from '../../utils/EventManager.js'; // Helper function to get display name based on settings function getDisplayName(model) { const displayNameSetting = state.global.settings.model_name_display || 'model_name'; - + if (displayNameSetting === 'file_name') { return model.file_name || model.model_name || 'Unknown Model'; } - + return model.model_name || model.file_name || 'Unknown Model'; } @@ -26,7 +26,7 @@ function getDisplayName(model) { export function setupModelCardEventDelegation(modelType) { // Remove any existing handler first eventManager.removeHandler('click', 'modelCard-delegation'); - + // Register model card event delegation with event manager eventManager.addHandler('click', 'modelCard-delegation', (event) => { return handleModelCardEvent_internal(event, modelType); @@ -42,26 +42,26 @@ function handleModelCardEvent_internal(event, modelType) { // Find the closest card element const card = event.target.closest('.model-card'); if (!card) return false; // Continue with other handlers - + // Handle specific elements within the card if (event.target.closest('.toggle-blur-btn')) { event.stopPropagation(); toggleBlurContent(card); return true; // Stop propagation } - + if (event.target.closest('.show-content-btn')) { event.stopPropagation(); showBlurredContent(card); return true; // Stop propagation } - + if (event.target.closest('.fa-star')) { event.stopPropagation(); toggleFavorite(card); return true; // Stop propagation } - + if (event.target.closest('.fa-globe')) { event.stopPropagation(); if (card.dataset.from_civitai === 'true') { @@ -69,37 +69,37 @@ function handleModelCardEvent_internal(event, modelType) { } return true; // Stop propagation } - + if (event.target.closest('.fa-paper-plane')) { event.stopPropagation(); handleSendToWorkflow(card, event.shiftKey, modelType); return true; // Stop propagation } - + if (event.target.closest('.fa-copy')) { event.stopPropagation(); handleCopyAction(card, modelType); return true; // Stop propagation } - + if (event.target.closest('.fa-trash')) { event.stopPropagation(); showDeleteModal(card.dataset.filepath); return true; // Stop propagation } - + if (event.target.closest('.fa-image')) { event.stopPropagation(); getModelApiClient().replaceModelPreview(card.dataset.filepath); return true; // Stop propagation } - + if (event.target.closest('.fa-folder-open')) { event.stopPropagation(); handleExampleImagesAccess(card, modelType); return true; // Stop propagation } - + // If no specific element was clicked, handle the card click (show modal or toggle selection) handleCardClick(card, modelType); return false; // Continue with other handlers (e.g., bulk selection) @@ -110,14 +110,14 @@ function toggleBlurContent(card) { const preview = card.querySelector('.card-preview'); const isBlurred = preview.classList.toggle('blurred'); const icon = card.querySelector('.toggle-blur-btn i'); - + // Update the icon based on blur state if (isBlurred) { icon.className = 'fas fa-eye'; } else { icon.className = 'fas fa-eye-slash'; } - + // Toggle the overlay visibility const overlay = card.querySelector('.nsfw-overlay'); if (overlay) { @@ -128,13 +128,13 @@ function toggleBlurContent(card) { function showBlurredContent(card) { const preview = card.querySelector('.card-preview'); preview.classList.remove('blurred'); - + // Update the toggle button icon const toggleBtn = card.querySelector('.toggle-blur-btn'); if (toggleBtn) { toggleBtn.querySelector('i').className = 'fas fa-eye-slash'; } - + // Hide the overlay const overlay = card.querySelector('.nsfw-overlay'); if (overlay) { @@ -146,10 +146,10 @@ async function toggleFavorite(card) { const starIcon = card.querySelector('.fa-star'); const isFavorite = starIcon.classList.contains('fas'); const newFavoriteState = !isFavorite; - + try { - await getModelApiClient().saveModelMetadata(card.dataset.filepath, { - favorite: newFavoriteState + await getModelApiClient().saveModelMetadata(card.dataset.filepath, { + favorite: newFavoriteState }); if (newFavoriteState) { @@ -239,11 +239,11 @@ function handleReplacePreview(filePath, modelType) { async function handleExampleImagesAccess(card, modelType) { const modelHash = card.dataset.sha256; - + try { const response = await fetch(`/api/lm/has-example-images?model_hash=${modelHash}`); const data = await response.json(); - + if (data.has_images) { openExampleImagesFolder(modelHash); } else { @@ -257,7 +257,7 @@ async function handleExampleImagesAccess(card, modelType) { function handleCardClick(card, modelType) { const pageState = getCurrentPageState(); - + if (state.bulkMode) { // Toggle selection using the bulk manager bulkManager.toggleCardSelection(card); @@ -294,7 +294,7 @@ async function showModelModalFromCard(card, modelType) { usage_tips: card.dataset.usage_tips, }) }; - + await showModelModal(modelMeta, modelType); } @@ -310,9 +310,9 @@ function showExampleAccessModal(card, modelType) { try { const metaData = JSON.parse(card.dataset.meta || '{}'); hasRemoteExamples = metaData.images && - Array.isArray(metaData.images) && - metaData.images.length > 0 && - metaData.images[0].url; + Array.isArray(metaData.images) && + metaData.images.length > 0 && + metaData.images[0].url; } catch (e) { console.error('Error parsing meta data:', e); } @@ -329,10 +329,10 @@ function showExampleAccessModal(card, modelType) { showToast('modelCard.exampleImages.missingHash', {}, 'error'); return; } - + // Close the modal modalManager.closeModal('exampleAccessModal'); - + try { // Use the appropriate model API client to download examples const apiClient = getModelApiClient(modelType); @@ -462,7 +462,7 @@ export function createModelCard(model, modelType) { if (model.civitai) { card.dataset.meta = JSON.stringify(model.civitai || {}); } - + // Store tags if available if (model.tags && Array.isArray(model.tags)) { card.dataset.tags = JSON.stringify(model.tags); @@ -475,7 +475,7 @@ export function createModelCard(model, modelType) { // Store NSFW level if available const nsfwLevel = model.preview_nsfw_level !== undefined ? model.preview_nsfw_level : 0; card.dataset.nsfwLevel = nsfwLevel; - + // Determine if the preview should be blurred based on NSFW level and user settings const shouldBlur = state.settings.blur_mature_content && nsfwLevel > NSFW_LEVELS.PG13; if (shouldBlur) { @@ -506,7 +506,7 @@ export function createModelCard(model, modelType) { // Check if autoplayOnHover is enabled for video previews const autoplayOnHover = state.global?.settings?.autoplay_on_hover || false; - const isVideo = previewUrl.endsWith('.mp4'); + const isVideo = previewUrl.endsWith('.mp4') || previewUrl.endsWith('.webm'); const videoAttrs = [ 'controls', 'muted', @@ -527,10 +527,10 @@ export function createModelCard(model, modelType) { } // Generate action icons based on model type with i18n support - const favoriteTitle = isFavorite ? + const favoriteTitle = isFavorite ? translate('modelCard.actions.removeFromFavorites', {}, 'Remove from favorites') : translate('modelCard.actions.addToFavorites', {}, 'Add to favorites'); - const globeTitle = model.from_civitai ? + const globeTitle = model.from_civitai ? translate('modelCard.actions.viewOnCivitai', {}, 'View on Civitai') : translate('modelCard.actions.notAvailableFromCivitai', {}, 'Not available from Civitai'); let sendTitle; @@ -582,13 +582,13 @@ export function createModelCard(model, modelType) { card.innerHTML = `
- ${isVideo ? - `` : - `${model.model_name}` - } + ${isVideo ? + `` : + `${model.model_name}` + }
- ${shouldBlur ? - `` : ''}
@@ -629,7 +629,7 @@ export function createModelCard(model, modelType) {
`; - + // Add video auto-play on hover functionality if needed const videoElement = card.querySelector('video'); if (videoElement) { @@ -765,7 +765,7 @@ function cleanupHoverHandlers(videoElement) { function requestSafePlay(videoElement) { const playPromise = videoElement.play(); if (playPromise && typeof playPromise.catch === 'function') { - playPromise.catch(() => {}); + playPromise.catch(() => { }); } } @@ -887,16 +887,16 @@ export function configureModelCardVideo(videoElement, autoplayOnHover) { export function updateCardsForBulkMode(isBulkMode) { // Update the state state.bulkMode = isBulkMode; - + document.body.classList.toggle('bulk-mode', isBulkMode); - + // Get all lora cards - this can now be from the DOM or through the virtual scroller const loraCards = document.querySelectorAll('.model-card'); - + loraCards.forEach(card => { // Get all action containers for this card const actions = card.querySelectorAll('.card-actions'); - + // Handle display property based on mode if (isBulkMode) { // Hide actions when entering bulk mode @@ -911,12 +911,12 @@ export function updateCardsForBulkMode(isBulkMode) { }); } }); - + // If using virtual scroller, we need to rerender after toggling bulk mode if (state.virtualScroller && typeof state.virtualScroller.scheduleRender === 'function') { state.virtualScroller.scheduleRender(); } - + // Apply selection state to cards if entering bulk mode if (isBulkMode) { bulkManager.applySelectionState(); diff --git a/static/js/components/shared/ModelModal.js b/static/js/components/shared/ModelModal.js index 84e1dbb3..7e8261b1 100644 --- a/static/js/components/shared/ModelModal.js +++ b/static/js/components/shared/ModelModal.js @@ -1,15 +1,15 @@ import { showToast, openCivitai } from '../../utils/uiHelpers.js'; import { modalManager } from '../../managers/ModalManager.js'; -import { +import { toggleShowcase, - setupShowcaseScroll, + setupShowcaseScroll, scrollToTop, loadExampleImages } from './showcase/ShowcaseView.js'; import { setupTabSwitching } from './ModelDescription.js'; -import { - setupModelNameEditing, - setupBaseModelEditing, +import { + setupModelNameEditing, + setupBaseModelEditing, setupFileNameEditing } from './ModelMetadata.js'; import { setupTagEditMode } from './ModelTags.js'; @@ -242,7 +242,7 @@ export async function showModelModal(model, modelType) { const modalTitle = model.model_name; cleanupNavigationShortcuts(); detachModalHandlers(modalId); - + // Fetch complete civitai metadata let completeCivitaiData = model.civitai || {}; if (model.file_path) { @@ -254,7 +254,7 @@ export async function showModelModal(model, modelType) { // Continue with existing data if fetch fails } } - + // Update model with complete civitai data const modelWithFullData = { ...model, @@ -269,14 +269,14 @@ export async function showModelModal(model, modelType) {
`.trim() : ''; const creatorInfoAction = modelWithFullData.civitai?.creator ? `
- ${modelWithFullData.civitai.creator.image ? - `
+ ${modelWithFullData.civitai.creator.image ? + `
${modelWithFullData.civitai.creator.username} -
` : - `
+
` : + `
` - } + } ${modelWithFullData.civitai.creator.username}
`.trim() : ''; const creatorActionItems = []; @@ -310,10 +310,10 @@ export async function showModelModal(model, modelType) { const hasUpdateAvailable = Boolean(modelWithFullData.update_available); const updateAvailabilityState = { hasUpdateAvailable }; const updateBadgeTooltip = translate('modelCard.badges.updateAvailable', {}, 'Update available'); - + // Prepare LoRA specific data with complete civitai data - const escapedWords = (modelType === 'loras' || modelType === 'embeddings') && modelWithFullData.civitai?.trainedWords?.length ? - modelWithFullData.civitai.trainedWords.map(word => word.replace(/'/g, '\\\'')) : []; + const escapedWords = (modelType === 'loras' || modelType === 'embeddings') && modelWithFullData.civitai?.trainedWords?.length ? + modelWithFullData.civitai.trainedWords : []; // Generate model type specific content let typeSpecificContent; @@ -343,7 +343,7 @@ export async function showModelModal(model, modelType) { ${versionsTabBadge} `.trim(); - const tabsContent = modelType === 'loras' ? + const tabsContent = modelType === 'loras' ? ` ${versionsTabButton} @@ -351,12 +351,12 @@ export async function showModelModal(model, modelType) { ` ${versionsTabButton}`; - + const loadingExampleImagesText = translate('modals.model.loading.exampleImages', {}, 'Loading example images...'); const loadingDescriptionText = translate('modals.model.loading.description', {}, 'Loading model description...'); const loadingRecipesText = translate('modals.model.loading.recipes', {}, 'Loading recipes...'); const loadingExamplesText = translate('modals.model.loading.examples', {}, 'Loading examples...'); - + const loadingVersionsText = translate('modals.model.loading.versions', {}, 'Loading versions...'); const civitaiModelId = modelWithFullData.civitai?.modelId || ''; const civitaiVersionId = modelWithFullData.civitai?.id || ''; @@ -373,7 +373,7 @@ export async function showModelModal(model, modelType) {
`.trim(); - const tabPanesContent = modelType === 'loras' ? + const tabPanesContent = modelType === 'loras' ? `
${loadingExampleImagesText} @@ -518,7 +518,7 @@ export async function showModelModal(model, modelType) {
`; - + function updateVersionsTabBadge(hasUpdate) { const modalElement = document.getElementById(modalId); if (!modalElement) return; @@ -594,10 +594,10 @@ export async function showModelModal(model, modelType) { updateVersionsTabBadge(hasUpdate); updateCardUpdateAvailability(hasUpdate); } - + let showcaseCleanup; - const onCloseCallback = function() { + const onCloseCallback = function () { // Clean up all handlers when modal closes for LoRA const modalElement = document.getElementById(modalId); if (modalElement && modalElement._clickHandler) { @@ -610,7 +610,7 @@ export async function showModelModal(model, modelType) { } cleanupNavigationShortcuts(); }; - + modalManager.showModal(modalId, content, null, onCloseCallback); const activeModalElement = document.getElementById(modalId); if (activeModalElement) { @@ -643,17 +643,17 @@ export async function showModelModal(model, modelType) { setupEventHandlers(modelWithFullData.file_path, modelType); setupNavigationShortcuts(modelType); updateNavigationControls(); - + // LoRA specific setup if (modelType === 'loras' || modelType === 'embeddings') { setupTriggerWordsEditMode(); - + if (modelType == 'loras') { // Load recipes for this LoRA loadRecipesForLora(modelWithFullData.model_name, modelWithFullData.sha256); } } - + // Load example images asynchronously - merge regular and custom images const regularImages = modelWithFullData.civitai?.images || []; const customImages = modelWithFullData.civitai?.customImages || []; @@ -707,17 +707,17 @@ function detachModalHandlers(modalId) { */ function setupEventHandlers(filePath, modelType) { const modalElement = document.getElementById('modelModal'); - + // Remove existing event listeners first modalElement.removeEventListener('click', handleModalClick); - + // Create and store the handler function function handleModalClick(event) { const target = event.target.closest('[data-action]'); if (!target) return; - + const action = target.dataset.action; - + switch (action) { case 'close-modal': modalManager.closeModal('modelModal'); @@ -748,10 +748,10 @@ function setupEventHandlers(filePath, modelType) { break; } } - + // Add the event listener with the named function modalElement.addEventListener('click', handleModalClick); - + // Store reference to the handler on the element for potential cleanup modalElement._clickHandler = handleModalClick; } @@ -763,15 +763,15 @@ function setupEventHandlers(filePath, modelType) { */ function setupEditableFields(filePath, modelType) { const editableFields = document.querySelectorAll('.editable-field [contenteditable]'); - + editableFields.forEach(field => { - field.addEventListener('focus', function() { + field.addEventListener('focus', function () { if (this.textContent === 'Add your notes here...') { this.textContent = ''; } }); - field.addEventListener('blur', function() { + field.addEventListener('blur', function () { if (this.textContent.trim() === '') { if (this.classList.contains('notes-content')) { this.textContent = 'Add your notes here...'; @@ -783,7 +783,7 @@ function setupEditableFields(filePath, modelType) { // Add keydown event listeners for notes const notesContent = document.querySelector('.notes-content'); if (notesContent) { - notesContent.addEventListener('keydown', async function(e) { + notesContent.addEventListener('keydown', async function (e) { if (e.key === 'Enter') { if (e.shiftKey) { // Allow shift+enter for new line @@ -810,7 +810,7 @@ function setupLoraSpecificFields(filePath) { if (!presetSelector || !presetValue || !addPresetBtn || !presetTags) return; - presetSelector.addEventListener('change', function() { + presetSelector.addEventListener('change', function () { const selected = this.value; if (selected) { presetValue.style.display = 'inline-block'; @@ -828,10 +828,10 @@ function setupLoraSpecificFields(filePath) { } }); - addPresetBtn.addEventListener('click', async function() { + addPresetBtn.addEventListener('click', async function () { const key = presetSelector.value; const value = presetValue.value; - + if (!key || !value) return; const currentPath = resolveFilePath(); @@ -839,21 +839,21 @@ function setupLoraSpecificFields(filePath) { const loraCard = document.querySelector(`.model-card[data-filepath="${currentPath}"]`) || document.querySelector(`.model-card[data-filepath="${filePath}"]`); const currentPresets = parsePresets(loraCard?.dataset.usage_tips); - + currentPresets[key] = parseFloat(value); const newPresetsJson = JSON.stringify(currentPresets); await getModelApiClient().saveModelMetadata(currentPath, { usage_tips: newPresetsJson }); presetTags.innerHTML = renderPresetTags(currentPresets); - + presetSelector.value = ''; presetValue.value = ''; presetValue.style.display = 'none'; }); // Add keydown event for preset value - presetValue.addEventListener('keydown', function(e) { + presetValue.addEventListener('keydown', function (e) { if (e.key === 'Enter') { e.preventDefault(); addPresetBtn.click(); diff --git a/static/js/components/shared/TriggerWords.js b/static/js/components/shared/TriggerWords.js index dfc1cb17..ea9d929a 100644 --- a/static/js/components/shared/TriggerWords.js +++ b/static/js/components/shared/TriggerWords.js @@ -6,7 +6,7 @@ import { showToast, copyToClipboard } from '../../utils/uiHelpers.js'; import { translate } from '../../utils/i18nHelpers.js'; import { getModelApiClient } from '../../api/modelApiFactory.js'; -import { escapeAttribute } from './utils.js'; +import { escapeAttribute, escapeHtml } from './utils.js'; /** * Fetch trained words for a model @@ -17,7 +17,7 @@ async function fetchTrainedWords(filePath) { try { const response = await fetch(`/api/lm/trained-words?file_path=${encodeURIComponent(filePath)}`); const data = await response.json(); - + if (data.success) { return { trainedWords: data.trained_words || [], // Returns array of [word, frequency] pairs @@ -43,11 +43,11 @@ async function fetchTrainedWords(filePath) { function createSuggestionDropdown(trainedWords, classTokens, existingWords = []) { const dropdown = document.createElement('div'); dropdown.className = 'metadata-suggestions-dropdown'; - + // Create header const header = document.createElement('div'); header.className = 'metadata-suggestions-header'; - + // No suggestions case if ((!trainedWords || trainedWords.length === 0) && !classTokens) { header.innerHTML = `${translate('modals.model.triggerWords.suggestions.noSuggestions')}`; @@ -55,12 +55,12 @@ function createSuggestionDropdown(trainedWords, classTokens, existingWords = []) dropdown.innerHTML += `
${translate('modals.model.triggerWords.suggestions.noTrainedWords')}
`; return dropdown; } - + // Sort trained words by frequency (highest first) if available if (trainedWords && trainedWords.length > 0) { trainedWords.sort((a, b) => b[1] - a[1]); } - + // Add class tokens section if available if (classTokens) { // Add class tokens header @@ -71,45 +71,47 @@ function createSuggestionDropdown(trainedWords, classTokens, existingWords = []) ${translate('modals.model.triggerWords.suggestions.classTokenDescription')} `; dropdown.appendChild(classTokensHeader); - + // Add class tokens container const classTokensContainer = document.createElement('div'); classTokensContainer.className = 'class-tokens-container'; - + // Create a special item for the class token const tokenItem = document.createElement('div'); tokenItem.className = `metadata-suggestion-item class-token-item ${existingWords.includes(classTokens) ? 'already-added' : ''}`; tokenItem.title = `${translate('modals.model.triggerWords.suggestions.classToken')}: ${classTokens}`; + + const escapedToken = escapeHtml(classTokens); tokenItem.innerHTML = ` - ${classTokens} + ${escapedToken}
${translate('modals.model.triggerWords.suggestions.classToken')} - ${existingWords.includes(classTokens) ? - `` : ''} + ${existingWords.includes(classTokens) ? + `` : ''}
`; - + // Add click handler if not already added if (!existingWords.includes(classTokens)) { tokenItem.addEventListener('click', () => { // Automatically add this word addNewTriggerWord(classTokens); - + // Also populate the input field for potential editing const input = document.querySelector('.metadata-input'); if (input) input.value = classTokens; - + // Focus on the input if (input) input.focus(); - + // Update dropdown without removing it updateTrainedWordsDropdown(); }); } - + classTokensContainer.appendChild(tokenItem); dropdown.appendChild(classTokensContainer); - + // Add separator if we also have trained words if (trainedWords && trainedWords.length > 0) { const separator = document.createElement('div'); @@ -117,7 +119,7 @@ function createSuggestionDropdown(trainedWords, classTokens, existingWords = []) dropdown.appendChild(separator); } } - + // Add trained words header if we have any if (trainedWords && trainedWords.length > 0) { header.innerHTML = ` @@ -125,52 +127,54 @@ function createSuggestionDropdown(trainedWords, classTokens, existingWords = []) ${translate('modals.model.triggerWords.suggestions.wordsFound', { count: trainedWords.length })} `; dropdown.appendChild(header); - + // Create tag container for trained words const container = document.createElement('div'); container.className = 'metadata-suggestions-container'; - + // Add each trained word as a tag trainedWords.forEach(([word, frequency]) => { const isAdded = existingWords.includes(word); - + const item = document.createElement('div'); item.className = `metadata-suggestion-item ${isAdded ? 'already-added' : ''}`; item.title = word; // Show full word on hover if truncated + + const escapedWord = escapeHtml(word); item.innerHTML = ` - ${word} + ${escapedWord}
${frequency} ${isAdded ? `` : ''}
`; - + if (!isAdded) { item.addEventListener('click', () => { // Automatically add this word addNewTriggerWord(word); - + // Also populate the input field for potential editing const input = document.querySelector('.metadata-input'); if (input) input.value = word; - + // Focus on the input if (input) input.focus(); - + // Update dropdown without removing it updateTrainedWordsDropdown(); }); } - + container.appendChild(item); }); - + dropdown.appendChild(container); } else if (!classTokens) { // If we have neither class tokens nor trained words dropdown.innerHTML += `
${translate('modals.model.triggerWords.suggestions.noTrainedWords')}
`; } - + return dropdown; } @@ -204,7 +208,7 @@ export function renderTriggerWords(words, filePath) { `; - + return `
@@ -215,9 +219,12 @@ export function renderTriggerWords(words, filePath) {
- ${words.map(word => ` -
- ${word} + ${words.map(word => { + const escapedWord = escapeHtml(word); + const escapedAttr = escapeAttribute(word); + return ` +
+ ${escapedWord} @@ -225,7 +232,7 @@ export function renderTriggerWords(words, filePath) {
- `).join('')} + `}).join('')}
`; } else { - statusBadge = existsLocally ? + statusBadge = existsLocally ? `
${translate('recipes.controls.import.inLibrary', {}, 'In Library')}
${localPath}
@@ -126,7 +137,7 @@ export class RecipeDataManager { console.warn('Failed to format early access date', e); } } - + earlyAccessBadge = `
${translate('recipes.controls.import.earlyAccess', {}, 'Early Access')}
${earlyAccessInfo} ${translate('recipes.controls.import.verifyEarlyAccess', {}, 'Verify that you have purchased early access before downloading.')}
@@ -134,7 +145,7 @@ export class RecipeDataManager { } // Format size if available - const sizeDisplay = lora.size ? + const sizeDisplay = lora.size ? `
${this.importManager.formatFileSize(lora.size)}
` : ''; return ` @@ -161,9 +172,9 @@ export class RecipeDataManager { `; }).join(''); } - + // Check for early access loras and show warning if any exist - const earlyAccessLoras = this.importManager.recipeData.loras.filter(lora => + const earlyAccessLoras = this.importManager.recipeData.loras.filter(lora => lora.isEarlyAccess && !lora.existsLocally && !lora.isDeleted); if (earlyAccessLoras.length > 0) { // Show a warning about early access loras @@ -179,7 +190,7 @@ export class RecipeDataManager {
`; - + // Show the warning message const buttonsContainer = document.querySelector('#detailsStep .modal-actions'); if (buttonsContainer) { @@ -188,7 +199,7 @@ export class RecipeDataManager { if (existingWarning) { existingWarning.remove(); } - + // Add new warning const warningContainer = document.createElement('div'); warningContainer.id = 'earlyAccessWarning'; @@ -196,27 +207,27 @@ export class RecipeDataManager { buttonsContainer.parentNode.insertBefore(warningContainer, buttonsContainer); } } - + // Check for duplicate recipes and display warning if found this.checkAndDisplayDuplicates(); - + // Update Next button state based on missing LoRAs and duplicates this.updateNextButtonState(); } - + checkAndDisplayDuplicates() { // Check if we have duplicate recipes - if (this.importManager.recipeData && - this.importManager.recipeData.matching_recipes && + if (this.importManager.recipeData && + this.importManager.recipeData.matching_recipes && this.importManager.recipeData.matching_recipes.length > 0) { - + // Store duplicates in the importManager for later use this.importManager.duplicateRecipes = this.importManager.recipeData.matching_recipes; - + // Create duplicate warning container - const duplicateContainer = document.getElementById('duplicateRecipesContainer') || + const duplicateContainer = document.getElementById('duplicateRecipesContainer') || this.createDuplicateContainer(); - + // Format date helper function const formatDate = (timestamp) => { try { @@ -226,7 +237,7 @@ export class RecipeDataManager { return 'Unknown date'; } }; - + // Generate the HTML for duplicate recipes warning duplicateContainer.innerHTML = `
@@ -262,10 +273,10 @@ export class RecipeDataManager { `).join('')}
`; - + // Show the duplicate container duplicateContainer.style.display = 'block'; - + // Add click event for the toggle button const toggleButton = document.getElementById('toggleDuplicatesList'); if (toggleButton) { @@ -290,49 +301,49 @@ export class RecipeDataManager { if (duplicateContainer) { duplicateContainer.style.display = 'none'; } - + // Reset duplicate tracking this.importManager.duplicateRecipes = []; } } - + createDuplicateContainer() { // Find where to insert the duplicate container const lorasListContainer = document.querySelector('.input-group:has(#lorasList)'); - + if (!lorasListContainer) return null; - + // Create container const duplicateContainer = document.createElement('div'); duplicateContainer.id = 'duplicateRecipesContainer'; duplicateContainer.className = 'duplicate-recipes-container'; - + // Insert before the LoRA list lorasListContainer.parentNode.insertBefore(duplicateContainer, lorasListContainer); - + return duplicateContainer; } - + updateNextButtonState() { const nextButton = document.querySelector('#detailsStep .primary-btn'); const actionsContainer = document.querySelector('#detailsStep .modal-actions'); if (!nextButton || !actionsContainer) return; - + // Always clean up previous warnings and buttons first const existingWarning = document.getElementById('deletedLorasWarning'); if (existingWarning) { existingWarning.remove(); } - + // Remove any existing "import anyway" button const importAnywayBtn = document.getElementById('importAnywayBtn'); if (importAnywayBtn) { importAnywayBtn.remove(); } - + // Count deleted LoRAs const deletedLoras = this.importManager.recipeData.loras.filter(lora => lora.isDeleted).length; - + // If we have deleted LoRAs, show a warning if (deletedLoras > 0) { // Create a new warning container above the buttons @@ -340,7 +351,7 @@ export class RecipeDataManager { const warningContainer = document.createElement('div'); warningContainer.id = 'deletedLorasWarning'; warningContainer.className = 'deleted-loras-warning'; - + // Create warning message warningContainer.innerHTML = `
@@ -349,19 +360,19 @@ export class RecipeDataManager {
These LoRAs cannot be downloaded. If you continue, they will remain in the recipe but won't be included when used.
`; - + // Insert before the buttons container buttonsContainer.parentNode.insertBefore(warningContainer, buttonsContainer); } - + // Check for duplicates but don't change button actions const missingNotDeleted = this.importManager.recipeData.loras.filter( lora => !lora.existsLocally && !lora.isDeleted ).length; - + // Standard button behavior regardless of duplicates nextButton.classList.remove('warning-btn'); - + if (missingNotDeleted > 0) { nextButton.textContent = translate('recipes.controls.import.downloadMissingLoras', {}, 'Download Missing LoRAs'); } else { @@ -372,30 +383,30 @@ export class RecipeDataManager { addTag() { const tagInput = document.getElementById('tagInput'); const tag = tagInput.value.trim(); - + if (!tag) return; - + if (!this.importManager.recipeTags.includes(tag)) { this.importManager.recipeTags.push(tag); this.updateTagsDisplay(); } - + tagInput.value = ''; } - + removeTag(tag) { this.importManager.recipeTags = this.importManager.recipeTags.filter(t => t !== tag); this.updateTagsDisplay(); } - + updateTagsDisplay() { const tagsContainer = document.getElementById('tagsContainer'); - + if (this.importManager.recipeTags.length === 0) { tagsContainer.innerHTML = `
${translate('recipes.controls.import.noTagsAdded', {}, 'No tags added')}
`; return; } - + tagsContainer.innerHTML = this.importManager.recipeTags.map(tag => `
${tag} @@ -410,7 +421,7 @@ export class RecipeDataManager { showToast('toast.recipes.enterRecipeName', {}, 'error'); return; } - + // Automatically mark all deleted LoRAs as excluded if (this.importManager.recipeData && this.importManager.recipeData.loras) { this.importManager.recipeData.loras.forEach(lora => { @@ -419,11 +430,11 @@ export class RecipeDataManager { } }); } - + // Update missing LoRAs list to exclude deleted LoRAs - this.importManager.missingLoras = this.importManager.recipeData.loras.filter(lora => + this.importManager.missingLoras = this.importManager.recipeData.loras.filter(lora => !lora.existsLocally && !lora.isDeleted); - + // If we have downloadable missing LoRAs, go to location step if (this.importManager.missingLoras.length > 0) { // Store only downloadable LoRAs for the download step diff --git a/static/js/recipes.js b/static/js/recipes.js index 293bb8d9..94f158f7 100644 --- a/static/js/recipes.js +++ b/static/js/recipes.js @@ -2,31 +2,60 @@ import { appCore } from './core.js'; import { ImportManager } from './managers/ImportManager.js'; import { RecipeModal } from './components/RecipeModal.js'; -import { getCurrentPageState } from './state/index.js'; +import { state, getCurrentPageState } from './state/index.js'; import { getSessionItem, removeSessionItem } from './utils/storageHelpers.js'; import { RecipeContextMenu } from './components/ContextMenu/index.js'; import { DuplicatesManager } from './components/DuplicatesManager.js'; import { refreshVirtualScroll } from './utils/infiniteScroll.js'; -import { refreshRecipes } from './api/recipeApi.js'; +import { refreshRecipes, RecipeSidebarApiClient } from './api/recipeApi.js'; +import { sidebarManager } from './components/SidebarManager.js'; + +class RecipePageControls { + constructor() { + this.pageType = 'recipes'; + this.pageState = getCurrentPageState(); + this.sidebarApiClient = new RecipeSidebarApiClient(); + } + + async resetAndReload() { + refreshVirtualScroll(); + } + + async refreshModels(fullRebuild = false) { + if (fullRebuild) { + await refreshRecipes(); + return; + } + + refreshVirtualScroll(); + } + + getSidebarApiClient() { + return this.sidebarApiClient; + } +} class RecipeManager { constructor() { // Get page state this.pageState = getCurrentPageState(); - + + // Page controls for shared sidebar behaviors + this.pageControls = new RecipePageControls(); + // Initialize ImportManager this.importManager = new ImportManager(); - + // Initialize RecipeModal this.recipeModal = new RecipeModal(); - + // Initialize DuplicatesManager this.duplicatesManager = new DuplicatesManager(this); - + // Add state tracking for infinite scroll this.pageState.isLoading = false; this.pageState.hasMore = true; - + // Custom filter state - move to pageState for compatibility with virtual scrolling this.pageState.customFilter = { active: false, @@ -35,27 +64,40 @@ class RecipeManager { recipeId: null }; } - + async initialize() { // Initialize event listeners this.initEventListeners(); - + // Set default search options if not already defined this._initSearchOptions(); - + // Initialize context menu new RecipeContextMenu(); - + // Check for custom filter parameters in session storage this._checkCustomFilter(); - + // Expose necessary functions to the page this._exposeGlobalFunctions(); - + + // Initialize sidebar navigation + await this._initSidebar(); + // Initialize common page features appCore.initializePageFeatures(); } - + + async _initSidebar() { + try { + sidebarManager.setHostPageControls(this.pageControls); + const shouldShowSidebar = state?.global?.settings?.show_folder_sidebar !== false; + await sidebarManager.setSidebarEnabled(shouldShowSidebar); + } catch (error) { + console.error('Failed to initialize recipe sidebar:', error); + } + } + _initSearchOptions() { // Ensure recipes search options are properly initialized if (!this.pageState.searchOptions) { @@ -63,25 +105,27 @@ class RecipeManager { title: true, // Recipe title tags: true, // Recipe tags loraName: true, // LoRA file name - loraModel: true // LoRA model name + loraModel: true, // LoRA model name + prompt: true, // Prompt search + recursive: true }; } } - + _exposeGlobalFunctions() { // Only expose what's needed for the page window.recipeManager = this; window.importManager = this.importManager; } - + _checkCustomFilter() { // Check for Lora filter const filterLoraName = getSessionItem('lora_to_recipe_filterLoraName'); const filterLoraHash = getSessionItem('lora_to_recipe_filterLoraHash'); - + // Check for specific recipe ID const viewRecipeId = getSessionItem('viewRecipeId'); - + // Set custom filter if any parameter is present if (filterLoraName || filterLoraHash || viewRecipeId) { this.pageState.customFilter = { @@ -90,35 +134,35 @@ class RecipeManager { loraHash: filterLoraHash, recipeId: viewRecipeId }; - + // Show custom filter indicator this._showCustomFilterIndicator(); } } - + _showCustomFilterIndicator() { const indicator = document.getElementById('customFilterIndicator'); const textElement = document.getElementById('customFilterText'); - + if (!indicator || !textElement) return; - + // Update text based on filter type let filterText = ''; - + if (this.pageState.customFilter.recipeId) { filterText = 'Viewing specific recipe'; } else if (this.pageState.customFilter.loraName) { // Format with Lora name const loraName = this.pageState.customFilter.loraName; - const displayName = loraName.length > 25 ? - loraName.substring(0, 22) + '...' : + const displayName = loraName.length > 25 ? + loraName.substring(0, 22) + '...' : loraName; - + filterText = `Recipes using: ${displayName}`; } else { filterText = 'Filtered recipes'; } - + // Update indicator text and show it textElement.innerHTML = filterText; // Add title attribute to show the lora name as a tooltip @@ -126,14 +170,14 @@ class RecipeManager { textElement.setAttribute('title', this.pageState.customFilter.loraName); } indicator.classList.remove('hidden'); - + // Add pulse animation const filterElement = indicator.querySelector('.filter-active'); if (filterElement) { filterElement.classList.add('animate'); setTimeout(() => filterElement.classList.remove('animate'), 600); } - + // Add click handler for clear filter button const clearFilterBtn = indicator.querySelector('.clear-filter'); if (clearFilterBtn) { @@ -143,7 +187,7 @@ class RecipeManager { }); } } - + _clearCustomFilter() { // Reset custom filter this.pageState.customFilter = { @@ -152,33 +196,48 @@ class RecipeManager { loraHash: null, recipeId: null }; - + // Hide indicator const indicator = document.getElementById('customFilterIndicator'); if (indicator) { indicator.classList.add('hidden'); } - + // Clear any session storage items removeSessionItem('lora_to_recipe_filterLoraName'); removeSessionItem('lora_to_recipe_filterLoraHash'); removeSessionItem('viewRecipeId'); - + // Reset and refresh the virtual scroller refreshVirtualScroll(); } - + initEventListeners() { // Sort select const sortSelect = document.getElementById('sortSelect'); if (sortSelect) { + sortSelect.value = this.pageState.sortBy || 'date:desc'; sortSelect.addEventListener('change', () => { this.pageState.sortBy = sortSelect.value; refreshVirtualScroll(); }); } + + const bulkButton = document.querySelector('[data-action="bulk"]'); + if (bulkButton) { + bulkButton.addEventListener('click', () => window.bulkManager?.toggleBulkMode()); + } + + const favoriteFilterBtn = document.getElementById('favoriteFilterBtn'); + if (favoriteFilterBtn) { + favoriteFilterBtn.addEventListener('click', () => { + this.pageState.showFavoritesOnly = !this.pageState.showFavoritesOnly; + favoriteFilterBtn.classList.toggle('active', this.pageState.showFavoritesOnly); + refreshVirtualScroll(); + }); + } } - + // This method is kept for compatibility but now uses virtual scrolling async loadRecipes(resetPage = true) { // Skip loading if in duplicates mode @@ -186,32 +245,32 @@ class RecipeManager { if (pageState.duplicatesMode) { return; } - + if (resetPage) { refreshVirtualScroll(); } } - + /** * Refreshes the recipe list by first rebuilding the cache and then loading recipes */ async refreshRecipes() { return refreshRecipes(); } - + showRecipeDetails(recipe) { this.recipeModal.showRecipeDetails(recipe); } - + // Duplicate detection and management methods async findDuplicateRecipes() { return await this.duplicatesManager.findDuplicates(); } - + selectLatestDuplicates() { this.duplicatesManager.selectLatestDuplicates(); } - + deleteSelectedDuplicates() { this.duplicatesManager.deleteSelectedDuplicates(); } @@ -219,14 +278,14 @@ class RecipeManager { confirmDeleteDuplicates() { this.duplicatesManager.confirmDeleteDuplicates(); } - + exitDuplicateMode() { // Clear the grid first to prevent showing old content temporarily const recipeGrid = document.getElementById('recipeGrid'); if (recipeGrid) { recipeGrid.innerHTML = ''; } - + this.duplicatesManager.exitDuplicateMode(); } } @@ -235,11 +294,11 @@ class RecipeManager { document.addEventListener('DOMContentLoaded', async () => { // Initialize core application await appCore.initialize(); - + // Initialize recipe manager const recipeManager = new RecipeManager(); await recipeManager.initialize(); }); // Export for use in other modules -export { RecipeManager }; \ No newline at end of file +export { RecipeManager }; diff --git a/static/js/state/index.js b/static/js/state/index.js index 4ee59b55..1dfbf568 100644 --- a/static/js/state/index.js +++ b/static/js/state/index.js @@ -58,7 +58,7 @@ export const state = { loadingManager: null, observer: null, }, - + // Page-specific states pages: { [MODEL_TYPES.LORA]: { @@ -69,20 +69,20 @@ export const state = { activeFolder: getStorageItem(`${MODEL_TYPES.LORA}_activeFolder`), activeLetterFilter: null, previewVersions: loraPreviewVersions, - searchManager: null, - searchOptions: { - filename: true, - modelname: true, - tags: false, - creator: false, - recursive: getStorageItem(`${MODEL_TYPES.LORA}_recursiveSearch`, true), - }, - filters: { - baseModel: [], - tags: {}, - license: {}, - modelTypes: [] - }, + searchManager: null, + searchOptions: { + filename: true, + modelname: true, + tags: false, + creator: false, + recursive: getStorageItem(`${MODEL_TYPES.LORA}_recursiveSearch`, true), + }, + filters: { + baseModel: [], + tags: {}, + license: {}, + modelTypes: [] + }, bulkMode: false, selectedLoras: new Set(), loraMetadataCache: new Map(), @@ -90,33 +90,35 @@ export const state = { showUpdateAvailableOnly: false, duplicatesMode: false, }, - + recipes: { currentPage: 1, isLoading: false, hasMore: true, - sortBy: 'date', - searchManager: null, - searchOptions: { - title: true, - tags: true, - loraName: true, - loraModel: true - }, - filters: { - baseModel: [], - tags: {}, - license: {}, - modelTypes: [], - search: '' - }, + sortBy: 'date:desc', + activeFolder: getStorageItem('recipes_activeFolder'), + searchManager: null, + searchOptions: { + title: true, + tags: true, + loraName: true, + loraModel: true, + recursive: getStorageItem('recipes_recursiveSearch', true), + }, + filters: { + baseModel: [], + tags: {}, + license: {}, + modelTypes: [], + search: '' + }, pageSize: 20, showFavoritesOnly: false, duplicatesMode: false, bulkMode: false, selectedModels: new Set(), }, - + [MODEL_TYPES.CHECKPOINT]: { currentPage: 1, isLoading: false, @@ -124,19 +126,19 @@ export const state = { sortBy: 'name', activeFolder: getStorageItem(`${MODEL_TYPES.CHECKPOINT}_activeFolder`), previewVersions: checkpointPreviewVersions, - searchManager: null, - searchOptions: { - filename: true, - modelname: true, - creator: false, - recursive: getStorageItem(`${MODEL_TYPES.CHECKPOINT}_recursiveSearch`, true), - }, - filters: { - baseModel: [], - tags: {}, - license: {}, - modelTypes: [] - }, + searchManager: null, + searchOptions: { + filename: true, + modelname: true, + creator: false, + recursive: getStorageItem(`${MODEL_TYPES.CHECKPOINT}_recursiveSearch`, true), + }, + filters: { + baseModel: [], + tags: {}, + license: {}, + modelTypes: [] + }, modelType: 'checkpoint', // 'checkpoint' or 'diffusion_model' bulkMode: false, selectedModels: new Set(), @@ -145,7 +147,7 @@ export const state = { showUpdateAvailableOnly: false, duplicatesMode: false, }, - + [MODEL_TYPES.EMBEDDING]: { currentPage: 1, isLoading: false, @@ -154,20 +156,20 @@ export const state = { activeFolder: getStorageItem(`${MODEL_TYPES.EMBEDDING}_activeFolder`), activeLetterFilter: null, previewVersions: embeddingPreviewVersions, - searchManager: null, - searchOptions: { - filename: true, - modelname: true, - tags: false, - creator: false, - recursive: getStorageItem(`${MODEL_TYPES.EMBEDDING}_recursiveSearch`, true), - }, - filters: { - baseModel: [], - tags: {}, - license: {}, - modelTypes: [] - }, + searchManager: null, + searchOptions: { + filename: true, + modelname: true, + tags: false, + creator: false, + recursive: getStorageItem(`${MODEL_TYPES.EMBEDDING}_recursiveSearch`, true), + }, + filters: { + baseModel: [], + tags: {}, + license: {}, + modelTypes: [] + }, bulkMode: false, selectedModels: new Set(), metadataCache: new Map(), @@ -176,45 +178,45 @@ export const state = { duplicatesMode: false, } }, - + // Current active page - use MODEL_TYPES constants currentPageType: MODEL_TYPES.LORA, - + // Backward compatibility - proxy properties get currentPage() { return this.pages[this.currentPageType].currentPage; }, set currentPage(value) { this.pages[this.currentPageType].currentPage = value; }, - + get isLoading() { return this.pages[this.currentPageType].isLoading; }, set isLoading(value) { this.pages[this.currentPageType].isLoading = value; }, - + get hasMore() { return this.pages[this.currentPageType].hasMore; }, set hasMore(value) { this.pages[this.currentPageType].hasMore = value; }, - + get sortBy() { return this.pages[this.currentPageType].sortBy; }, set sortBy(value) { this.pages[this.currentPageType].sortBy = value; }, - + get activeFolder() { return this.pages[this.currentPageType].activeFolder; }, set activeFolder(value) { this.pages[this.currentPageType].activeFolder = value; }, - + get loadingManager() { return this.global.loadingManager; }, set loadingManager(value) { this.global.loadingManager = value; }, - + get observer() { return this.global.observer; }, set observer(value) { this.global.observer = value; }, - + get previewVersions() { return this.pages.loras.previewVersions; }, set previewVersions(value) { this.pages.loras.previewVersions = value; }, - + get searchManager() { return this.pages[this.currentPageType].searchManager; }, set searchManager(value) { this.pages[this.currentPageType].searchManager = value; }, - + get searchOptions() { return this.pages[this.currentPageType].searchOptions; }, set searchOptions(value) { this.pages[this.currentPageType].searchOptions = value; }, - + get filters() { return this.pages[this.currentPageType].filters; }, set filters(value) { this.pages[this.currentPageType].filters = value; }, - - get bulkMode() { + + get bulkMode() { const currentType = this.currentPageType; if (currentType === MODEL_TYPES.LORA) { return this.pages.loras.bulkMode; @@ -222,7 +224,7 @@ export const state = { return this.pages[currentType].bulkMode; } }, - set bulkMode(value) { + set bulkMode(value) { const currentType = this.currentPageType; if (currentType === MODEL_TYPES.LORA) { this.pages.loras.bulkMode = value; @@ -230,11 +232,11 @@ export const state = { this.pages[currentType].bulkMode = value; } }, - + get selectedLoras() { return this.pages.loras.selectedLoras; }, set selectedLoras(value) { this.pages.loras.selectedLoras = value; }, - - get selectedModels() { + + get selectedModels() { const currentType = this.currentPageType; if (currentType === MODEL_TYPES.LORA) { return this.pages.loras.selectedLoras; @@ -242,7 +244,7 @@ export const state = { return this.pages[currentType].selectedModels; } }, - set selectedModels(value) { + set selectedModels(value) { const currentType = this.currentPageType; if (currentType === MODEL_TYPES.LORA) { this.pages.loras.selectedLoras = value; @@ -250,10 +252,10 @@ export const state = { this.pages[currentType].selectedModels = value; } }, - + get loraMetadataCache() { return this.pages.loras.loraMetadataCache; }, set loraMetadataCache(value) { this.pages.loras.loraMetadataCache = value; }, - + get settings() { return this.global.settings; }, set settings(value) { this.global.settings = value; } }; diff --git a/static/js/utils/VirtualScroller.js b/static/js/utils/VirtualScroller.js index 9fd37bfe..59630f57 100644 --- a/static/js/utils/VirtualScroller.js +++ b/static/js/utils/VirtualScroller.js @@ -12,13 +12,13 @@ export class VirtualScroller { this.scrollContainer = options.scrollContainer || this.containerElement; this.batchSize = options.batchSize || 50; this.pageSize = options.pageSize || 100; - this.itemAspectRatio = 896/1152; // Aspect ratio of cards + this.itemAspectRatio = 896 / 1152; // Aspect ratio of cards this.rowGap = options.rowGap || 20; // Add vertical gap between rows (default 20px) - + // Add container padding properties this.containerPaddingTop = options.containerPaddingTop || 4; // Default top padding from CSS this.containerPaddingBottom = options.containerPaddingBottom || 4; // Default bottom padding from CSS - + // Add data windowing enable/disable flag this.enableDataWindowing = options.enableDataWindowing !== undefined ? options.enableDataWindowing : false; @@ -73,15 +73,15 @@ export class VirtualScroller { this.spacerElement.style.width = '100%'; this.spacerElement.style.height = '0px'; // Will be updated as items are loaded this.spacerElement.style.pointerEvents = 'none'; - + // The grid will be used for the actual visible items this.gridElement.style.position = 'relative'; this.gridElement.style.minHeight = '0'; - + // Apply padding directly to ensure consistency this.gridElement.style.paddingTop = `${this.containerPaddingTop}px`; this.gridElement.style.paddingBottom = `${this.containerPaddingBottom}px`; - + // Place the spacer inside the grid container this.gridElement.appendChild(this.spacerElement); } @@ -97,16 +97,16 @@ export class VirtualScroller { const containerStyle = getComputedStyle(this.containerElement); const paddingLeft = parseInt(containerStyle.paddingLeft, 10) || 0; const paddingRight = parseInt(containerStyle.paddingRight, 10) || 0; - + // Calculate available content width (excluding padding) const availableContentWidth = containerWidth - paddingLeft - paddingRight; - + // Get display density setting const displayDensity = state.global.settings?.display_density || 'default'; - + // Set exact column counts and grid widths to match CSS container widths let maxColumns, maxGridWidth; - + // Match exact column counts and CSS container width values based on density if (window.innerWidth >= 3000) { // 4K if (displayDensity === 'default') { @@ -137,17 +137,17 @@ export class VirtualScroller { } maxGridWidth = 1400; // Match exact CSS container width for 1080p } - + // Calculate baseCardWidth based on desired column count and available space // Formula: (maxGridWidth - (columns-1)*gap) / columns const baseCardWidth = (maxGridWidth - ((maxColumns - 1) * this.columnGap)) / maxColumns; - + // Use the smaller of available content width or max grid width const actualGridWidth = Math.min(availableContentWidth, maxGridWidth); - + // Set exact column count based on screen size and mode this.columnsCount = maxColumns; - + // When available width is smaller than maxGridWidth, recalculate columns if (availableContentWidth < maxGridWidth) { // Calculate how many columns can fit in the available space @@ -155,30 +155,30 @@ export class VirtualScroller { (availableContentWidth + this.columnGap) / (baseCardWidth + this.columnGap) )); } - + // Calculate actual item width this.itemWidth = (actualGridWidth - (this.columnsCount - 1) * this.columnGap) / this.columnsCount; - + // Calculate height based on aspect ratio this.itemHeight = this.itemWidth / this.itemAspectRatio; - + // Calculate the left offset to center the grid within the content area this.leftOffset = Math.max(0, (availableContentWidth - actualGridWidth) / 2); // Update grid element max-width to match available width this.gridElement.style.maxWidth = `${actualGridWidth}px`; - + // Add or remove density classes for style adjustments this.gridElement.classList.remove('default-density', 'medium-density', 'compact-density'); this.gridElement.classList.add(`${displayDensity}-density`); - + // Update spacer height this.updateSpacerHeight(); - + // Re-render with new layout this.clearRenderedItems(); this.scheduleRender(); - + return true; } @@ -186,20 +186,20 @@ export class VirtualScroller { // Debounced scroll handler this.scrollHandler = this.debounce(() => this.handleScroll(), 10); this.scrollContainer.addEventListener('scroll', this.scrollHandler); - + // Window resize handler for layout recalculation this.resizeHandler = this.debounce(() => { this.calculateLayout(); }, 150); - + window.addEventListener('resize', this.resizeHandler); - + // Use ResizeObserver for more accurate container size detection if (typeof ResizeObserver !== 'undefined') { this.resizeObserver = new ResizeObserver(this.debounce(() => { this.calculateLayout(); }, 150)); - + this.resizeObserver.observe(this.containerElement); } } @@ -217,35 +217,35 @@ export class VirtualScroller { async loadInitialBatch() { const pageState = getCurrentPageState(); if (this.isLoading) return; - + this.isLoading = true; this.setLoadingTimeout(); // Add loading timeout safety - + try { const { items, totalItems, hasMore } = await this.fetchItemsFn(1, this.pageSize); - + // Initialize the data window with the first batch of items this.items = items || []; this.totalItems = totalItems || 0; this.hasMore = hasMore; this.dataWindow = { start: 0, end: this.items.length }; this.absoluteWindowStart = 0; - + // Update the spacer height based on the total number of items this.updateSpacerHeight(); - + // Check if there are no items and show placeholder if needed if (this.items.length === 0) { this.showNoItemsPlaceholder(); } else { this.removeNoItemsPlaceholder(); } - + // Reset page state to sync with our virtual scroller pageState.currentPage = 2; // Next page to load would be 2 pageState.hasMore = this.hasMore; pageState.isLoading = false; - + return { items, totalItems, hasMore }; } catch (err) { console.error('Failed to load initial batch:', err); @@ -260,36 +260,36 @@ export class VirtualScroller { async loadMoreItems() { const pageState = getCurrentPageState(); if (this.isLoading || !this.hasMore) return; - + this.isLoading = true; pageState.isLoading = true; this.setLoadingTimeout(); // Add loading timeout safety - + try { console.log('Loading more items, page:', pageState.currentPage); const { items, hasMore } = await this.fetchItemsFn(pageState.currentPage, this.pageSize); - + if (items && items.length > 0) { this.items = [...this.items, ...items]; this.hasMore = hasMore; pageState.hasMore = hasMore; - + // Update page for next request pageState.currentPage++; - + // Update the spacer height this.updateSpacerHeight(); - + // Render the newly loaded items if they're in view this.scheduleRender(); - + console.log(`Loaded ${items.length} more items, total now: ${this.items.length}`); } else { this.hasMore = false; pageState.hasMore = false; console.log('No more items to load'); } - + return items; } catch (err) { console.error('Failed to load more items:', err); @@ -305,7 +305,7 @@ export class VirtualScroller { setLoadingTimeout() { // Clear any existing timeout first this.clearLoadingTimeout(); - + // Set a new timeout to prevent loading state from getting stuck this.loadingTimeout = setTimeout(() => { if (this.isLoading) { @@ -326,15 +326,15 @@ export class VirtualScroller { updateSpacerHeight() { if (this.columnsCount === 0) return; - + // Calculate total rows needed based on total items and columns const totalRows = Math.ceil(this.totalItems / this.columnsCount); // Add row gaps to the total height calculation const totalHeight = totalRows * this.itemHeight + (totalRows - 1) * this.rowGap; - + // Include container padding in the total height const spacerHeight = totalHeight + this.containerPaddingTop + this.containerPaddingBottom; - + // Update spacer height to represent all items this.spacerElement.style.height = `${spacerHeight}px`; } @@ -342,28 +342,28 @@ export class VirtualScroller { getVisibleRange() { const scrollTop = this.scrollContainer.scrollTop; const viewportHeight = this.scrollContainer.clientHeight; - + // Calculate the visible row range, accounting for row gaps const rowHeight = this.itemHeight + this.rowGap; const startRow = Math.floor(scrollTop / rowHeight); const endRow = Math.ceil((scrollTop + viewportHeight) / rowHeight); - + // Add overscan for smoother scrolling const overscanRows = this.overscan; const firstRow = Math.max(0, startRow - overscanRows); const lastRow = Math.min(Math.ceil(this.totalItems / this.columnsCount), endRow + overscanRows); - + // Calculate item indices const firstIndex = firstRow * this.columnsCount; const lastIndex = Math.min(this.totalItems, lastRow * this.columnsCount); - + return { start: firstIndex, end: lastIndex }; } // Update the scheduleRender method to check for disabled state scheduleRender() { if (this.disabled || this.renderScheduled) return; - + this.renderScheduled = true; requestAnimationFrame(() => { this.renderItems(); @@ -374,25 +374,25 @@ export class VirtualScroller { // Update the renderItems method to check for disabled state renderItems() { if (this.disabled || this.items.length === 0 || this.columnsCount === 0) return; - + const { start, end } = this.getVisibleRange(); - + // Check if render range has significantly changed - const isSameRange = - start >= this.lastRenderRange.start && + const isSameRange = + start >= this.lastRenderRange.start && end <= this.lastRenderRange.end && Math.abs(start - this.lastRenderRange.start) < 10; - + if (isSameRange) return; - + this.lastRenderRange = { start, end }; - + // Determine which items need to be added and removed const currentIndices = new Set(); for (let i = start; i < end && i < this.items.length; i++) { currentIndices.add(i); } - + // Remove items that are no longer visible for (const [index, element] of this.renderedItems.entries()) { if (!currentIndices.has(index)) { @@ -400,10 +400,10 @@ export class VirtualScroller { this.renderedItems.delete(index); } } - + // Use DocumentFragment for batch DOM operations const fragment = document.createDocumentFragment(); - + // Add new visible items to the fragment for (let i = start; i < end && i < this.items.length; i++) { if (!this.renderedItems.has(i)) { @@ -413,17 +413,17 @@ export class VirtualScroller { this.renderedItems.set(i, element); } } - + // Add the fragment to the grid (single DOM operation) if (fragment.childNodes.length > 0) { this.gridElement.appendChild(fragment); } - + // If we're close to the end and have more items to load, fetch them if (end > this.items.length - (this.columnsCount * 2) && this.hasMore && !this.isLoading) { this.loadMoreItems(); } - + // Check if we need to slide the data window this.slideDataWindow(); } @@ -439,14 +439,14 @@ export class VirtualScroller { this.totalItems = totalItems || 0; this.hasMore = hasMore; this.updateSpacerHeight(); - + // Check if there are no items and show placeholder if needed if (this.items.length === 0) { this.showNoItemsPlaceholder(); } else { this.removeNoItemsPlaceholder(); } - + // Clear all rendered items and redraw this.clearRenderedItems(); this.scheduleRender(); @@ -455,29 +455,29 @@ export class VirtualScroller { createItemElement(item, index) { // Create the DOM element const element = this.createItemFn(item); - + // Add virtual scroll item class element.classList.add('virtual-scroll-item'); - + // Calculate the position const row = Math.floor(index / this.columnsCount); const col = index % this.columnsCount; - + // Calculate precise positions with row gap included // Add the top padding to account for container padding const topPos = this.containerPaddingTop + (row * (this.itemHeight + this.rowGap)); - + // Position correctly with leftOffset (no need to add padding as absolute // positioning is already relative to the padding edge of the container) const leftPos = this.leftOffset + (col * (this.itemWidth + this.columnGap)); - + // Position the element with absolute positioning element.style.position = 'absolute'; element.style.left = `${leftPos}px`; element.style.top = `${topPos}px`; element.style.width = `${this.itemWidth}px`; element.style.height = `${this.itemHeight}px`; - + return element; } @@ -486,17 +486,17 @@ export class VirtualScroller { const scrollTop = this.scrollContainer.scrollTop; this.scrollDirection = scrollTop > this.lastScrollTop ? 'down' : 'up'; this.lastScrollTop = scrollTop; - + // Handle large jumps in scroll position - check if we need to fetch a new window const { scrollHeight } = this.scrollContainer; const scrollRatio = scrollTop / scrollHeight; - + // Only perform data windowing if the feature is enabled if (this.enableDataWindowing && this.totalItems > this.windowSize) { const estimatedIndex = Math.floor(scrollRatio * this.totalItems); const currentWindowStart = this.absoluteWindowStart; const currentWindowEnd = currentWindowStart + this.items.length; - + // If the estimated position is outside our current window by a significant amount if (estimatedIndex < currentWindowStart || estimatedIndex > currentWindowEnd) { // Fetch a new data window centered on the estimated position @@ -504,14 +504,14 @@ export class VirtualScroller { return; // Skip normal rendering until new data is loaded } } - + // Render visible items this.scheduleRender(); - + // If we're near the bottom and have more items, load them const { clientHeight } = this.scrollContainer; const scrollBottom = scrollTop + clientHeight; - + // Fix the threshold calculation - use percentage of remaining height instead // We'll trigger loading when within 20% of the bottom of rendered content const remainingScroll = scrollHeight - scrollBottom; @@ -521,9 +521,9 @@ export class VirtualScroller { // Or when within 2 rows of content from the bottom, whichever is larger (this.itemHeight + this.rowGap) * 2 ); - + const shouldLoadMore = remainingScroll <= scrollThreshold; - + if (shouldLoadMore && this.hasMore && !this.isLoading) { this.loadMoreItems(); } @@ -533,40 +533,40 @@ export class VirtualScroller { async fetchDataWindow(targetIndex) { // Skip if data windowing is disabled or already fetching if (!this.enableDataWindowing || this.fetchingWindow) return; - + this.fetchingWindow = true; - + try { // Calculate which page we need to fetch based on target index const targetPage = Math.floor(targetIndex / this.pageSize) + 1; console.log(`Fetching data window for index ${targetIndex}, page ${targetPage}`); - + const { items, totalItems, hasMore } = await this.fetchItemsFn(targetPage, this.pageSize); - + if (items && items.length > 0) { // Calculate new absolute window start this.absoluteWindowStart = (targetPage - 1) * this.pageSize; - + // Replace the entire data window with new items this.items = items; - this.dataWindow = { + this.dataWindow = { start: 0, end: items.length }; - + this.totalItems = totalItems || 0; this.hasMore = hasMore; - + // Update the current page for future fetches const pageState = getCurrentPageState(); pageState.currentPage = targetPage + 1; pageState.hasMore = hasMore; - + // Update the spacer height and clear current rendered items this.updateSpacerHeight(); this.clearRenderedItems(); this.scheduleRender(); - + console.log(`Loaded ${items.length} items for window at absolute index ${this.absoluteWindowStart}`); } } catch (err) { @@ -581,37 +581,37 @@ export class VirtualScroller { async slideDataWindow() { // Skip if data windowing is disabled if (!this.enableDataWindowing) return; - + const { start, end } = this.getVisibleRange(); const windowStart = this.dataWindow.start; const windowEnd = this.dataWindow.end; const absoluteIndex = this.absoluteWindowStart + windowStart; - + // Calculate the midpoint of the visible range const visibleMidpoint = Math.floor((start + end) / 2); const absoluteMidpoint = this.absoluteWindowStart + visibleMidpoint; - + // Check if we're too close to the window edges const closeToStart = start - windowStart < this.windowPadding; const closeToEnd = windowEnd - end < this.windowPadding; - + // If we're close to either edge and have total items > window size if ((closeToStart || closeToEnd) && this.totalItems > this.windowSize) { // Calculate a new target index centered around the current viewport const halfWindow = Math.floor(this.windowSize / 2); const targetIndex = Math.max(0, absoluteMidpoint - halfWindow); - + // Don't fetch a new window if we're already showing items near the beginning if (targetIndex === 0 && this.absoluteWindowStart === 0) { return; } - + // Don't fetch if we're showing the end of the list and are near the end - if (this.absoluteWindowStart + this.items.length >= this.totalItems && + if (this.absoluteWindowStart + this.items.length >= this.totalItems && this.totalItems - end < halfWindow) { return; } - + // Fetch the new data window await this.fetchDataWindow(targetIndex); } @@ -620,18 +620,18 @@ export class VirtualScroller { reset() { // Remove all rendered items this.clearRenderedItems(); - + // Reset state this.items = []; this.totalItems = 0; this.hasMore = true; - + // Reset spacer height this.spacerElement.style.height = '0px'; - + // Remove any placeholder this.removeNoItemsPlaceholder(); - + // Schedule a re-render this.scheduleRender(); } @@ -640,21 +640,21 @@ export class VirtualScroller { // Remove event listeners this.scrollContainer.removeEventListener('scroll', this.scrollHandler); window.removeEventListener('resize', this.resizeHandler); - + // Clean up the resize observer if present if (this.resizeObserver) { this.resizeObserver.disconnect(); } - + // Remove rendered elements this.clearRenderedItems(); - + // Remove spacer this.spacerElement.remove(); - + // Remove virtual scroll class this.gridElement.classList.remove('virtual-scroll'); - + // Clear any pending timeout this.clearLoadingTimeout(); } @@ -663,19 +663,19 @@ export class VirtualScroller { showNoItemsPlaceholder(message) { // Remove any existing placeholder first this.removeNoItemsPlaceholder(); - + // Create placeholder message const placeholder = document.createElement('div'); placeholder.className = 'placeholder-message'; - + // Determine appropriate message based on page type let placeholderText = ''; - + if (message) { placeholderText = message; } else { const pageType = state.currentPageType; - + if (pageType === 'recipes') { placeholderText = `

No recipes found

@@ -698,10 +698,10 @@ export class VirtualScroller { `; } } - + placeholder.innerHTML = placeholderText; placeholder.id = 'virtualScrollPlaceholder'; - + // Append placeholder to the grid this.gridElement.appendChild(placeholder); } @@ -716,7 +716,7 @@ export class VirtualScroller { // Utility method for debouncing debounce(func, wait) { let timeout; - return function(...args) { + return function (...args) { const context = this; clearTimeout(timeout); timeout = setTimeout(() => func.apply(context, args), wait); @@ -727,55 +727,55 @@ export class VirtualScroller { disable() { // Detach scroll event listener this.scrollContainer.removeEventListener('scroll', this.scrollHandler); - + // Clear all rendered items from the DOM this.clearRenderedItems(); - + // Hide the spacer element if (this.spacerElement) { this.spacerElement.style.display = 'none'; } - + // Flag as disabled this.disabled = true; - + console.log('Virtual scroller disabled'); } // Add enable method to resume rendering and events enable() { if (!this.disabled) return; - + // Reattach scroll event listener this.scrollContainer.addEventListener('scroll', this.scrollHandler); - + // Check if spacer element exists in the DOM, if not, recreate it if (!this.spacerElement || !this.gridElement.contains(this.spacerElement)) { console.log('Spacer element not found in DOM, recreating it'); - + // Create a new spacer element this.spacerElement = document.createElement('div'); this.spacerElement.className = 'virtual-scroll-spacer'; this.spacerElement.style.width = '100%'; this.spacerElement.style.height = '0px'; this.spacerElement.style.pointerEvents = 'none'; - + // Append it to the grid this.gridElement.appendChild(this.spacerElement); - + // Update the spacer height this.updateSpacerHeight(); } else { // Show the spacer element if it exists this.spacerElement.style.display = 'block'; } - + // Flag as enabled this.disabled = false; - + // Re-render items this.scheduleRender(); - + console.log('Virtual scroller enabled'); } @@ -783,31 +783,30 @@ export class VirtualScroller { deepMerge(target, source) { if (!source || !target) return target; + // Initialize result with a copy of target const result = { ...target }; - // Only iterate over keys that exist in target - Object.keys(target).forEach(key => { - // Check if source has this key - if (source.hasOwnProperty(key)) { - const targetValue = target[key]; - const sourceValue = source[key]; + if (!source) return result; - // If both values are non-null objects and not arrays, merge recursively - if ( - targetValue !== null && - typeof targetValue === 'object' && - !Array.isArray(targetValue) && - sourceValue !== null && - typeof sourceValue === 'object' && - !Array.isArray(sourceValue) - ) { - result[key] = this.deepMerge(targetValue, sourceValue); - } else { - // For primitive types, arrays, or null, use the value from source - result[key] = sourceValue; - } + // Iterate over all keys in the source object + Object.keys(source).forEach(key => { + const targetValue = target[key]; + const sourceValue = source[key]; + + // If both values are non-null objects and not arrays, merge recursively + if ( + targetValue !== null && + typeof targetValue === 'object' && + !Array.isArray(targetValue) && + sourceValue !== null && + typeof sourceValue === 'object' && + !Array.isArray(sourceValue) + ) { + result[key] = this.deepMerge(targetValue || {}, sourceValue); + } else { + // Otherwise update with source value (includes primitives, arrays, and new keys) + result[key] = sourceValue; } - // If source does not have this key, keep the original value from target }); return result; @@ -828,43 +827,43 @@ export class VirtualScroller { // Update the item data using deep merge this.items[index] = this.deepMerge(this.items[index], updatedItem); - + // If the item is currently rendered, update its DOM representation if (this.renderedItems.has(index)) { const element = this.renderedItems.get(index); - + // Remove the old element element.remove(); this.renderedItems.delete(index); - + // Create and render the updated element const updatedElement = this.createItemElement(this.items[index], index); - + // Add update indicator visual effects updatedElement.classList.add('updated'); - + // Add temporary update tag const updateIndicator = document.createElement('div'); updateIndicator.className = 'update-indicator'; updateIndicator.textContent = 'Updated'; updatedElement.querySelector('.card-preview').appendChild(updateIndicator); - + // Automatically remove the updated class after animation completes setTimeout(() => { updatedElement.classList.remove('updated'); }, 1500); - + // Automatically remove the indicator after animation completes setTimeout(() => { if (updateIndicator && updateIndicator.parentNode) { updateIndicator.remove(); } }, 2000); - + this.renderedItems.set(index, updatedElement); this.gridElement.appendChild(updatedElement); } - + return true; } @@ -882,26 +881,26 @@ export class VirtualScroller { // Remove the item from the data array this.items.splice(index, 1); - + // Decrement total count this.totalItems = Math.max(0, this.totalItems - 1); - + // Remove the item from rendered items if it exists if (this.renderedItems.has(index)) { this.renderedItems.get(index).remove(); this.renderedItems.delete(index); } - + // Shift all rendered items with higher indices down by 1 const indicesToUpdate = []; - + // Collect all indices that need to be updated for (const [idx, element] of this.renderedItems.entries()) { if (idx > index) { indicesToUpdate.push(idx); } } - + // Update the elements and map entries for (const idx of indicesToUpdate) { const element = this.renderedItems.get(idx); @@ -909,14 +908,14 @@ export class VirtualScroller { // The item is now at the previous index this.renderedItems.set(idx - 1, element); } - + // Update the spacer height to reflect the new total this.updateSpacerHeight(); - + // Re-render to ensure proper layout this.clearRenderedItems(); this.scheduleRender(); - + console.log(`Removed item with file path ${filePath} from virtual scroller data`); return true; } @@ -929,28 +928,28 @@ export class VirtualScroller { return; // Ignore rapid repeated triggers } this.lastPageNavTime = now; - + const scrollContainer = this.scrollContainer; const viewportHeight = scrollContainer.clientHeight; - + // Calculate scroll distance (one viewport minus 10% overlap for context) const scrollDistance = viewportHeight * 0.9; - + // Determine the new scroll position const newScrollTop = scrollContainer.scrollTop + (direction === 'down' ? scrollDistance : -scrollDistance); - + // Remove any existing transition indicators this.removeExistingTransitionIndicator(); - + // Scroll to the new position with smooth animation scrollContainer.scrollTo({ top: newScrollTop, behavior: 'smooth' }); - + // Page transition indicator removed // this.showTransitionIndicator(); - + // Force render after scrolling setTimeout(() => this.renderItems(), 100); setTimeout(() => this.renderItems(), 300); @@ -966,25 +965,25 @@ export class VirtualScroller { scrollToTop() { this.removeExistingTransitionIndicator(); - + // Page transition indicator removed // this.showTransitionIndicator(); - + this.scrollContainer.scrollTo({ top: 0, behavior: 'smooth' }); - + // Force render after scrolling setTimeout(() => this.renderItems(), 100); } scrollToBottom() { this.removeExistingTransitionIndicator(); - + // Page transition indicator removed // this.showTransitionIndicator(); - + // Start loading all remaining pages to ensure content is available this.loadRemainingPages().then(() => { // After loading all content, scroll to the very bottom @@ -995,27 +994,27 @@ export class VirtualScroller { }); }); } - + // New method to load all remaining pages async loadRemainingPages() { // If we're already at the end or loading, don't proceed if (!this.hasMore || this.isLoading) return; - + console.log('Loading all remaining pages for End key navigation...'); - + // Keep loading pages until we reach the end while (this.hasMore && !this.isLoading) { await this.loadMoreItems(); - + // Force render after each page load this.renderItems(); - + // Small delay to prevent overwhelming the browser await new Promise(resolve => setTimeout(resolve, 50)); } - + console.log('Finished loading all pages'); - + // Final render to ensure all content is displayed this.renderItems(); } diff --git a/templates/base.html b/templates/base.html index 7086419c..8c9e0c7a 100644 --- a/templates/base.html +++ b/templates/base.html @@ -4,8 +4,8 @@ {% block title %}{{ t('header.appTitle') }}{% endblock %} - - + + {% block page_css %}{% endblock %} - + {% else %} {% block main_script %}{% endblock %} {% endif %} diff --git a/templates/checkpoints.html b/templates/checkpoints.html index ffa30aff..fd3498da 100644 --- a/templates/checkpoints.html +++ b/templates/checkpoints.html @@ -40,5 +40,5 @@ {% endblock %} {% block main_script %} - + {% endblock %} diff --git a/templates/components/context_menu.html b/templates/components/context_menu.html index 525f0008..b020ea12 100644 --- a/templates/components/context_menu.html +++ b/templates/components/context_menu.html @@ -102,6 +102,9 @@
{{ t('globalContextMenu.cleanupExampleImages.label') }}
+
+ {{ t('globalContextMenu.repairRecipes.label') }} +
@@ -110,7 +113,8 @@
-
{{ t('modals.contentRating.current') }}: {{ t('common.status.unknown') }}
+
{{ t('modals.contentRating.current') }}: {{ + t('common.status.unknown') }}
@@ -123,4 +127,4 @@
-
+
\ No newline at end of file diff --git a/templates/components/header.html b/templates/components/header.html index e2942c39..95c7c01e 100644 --- a/templates/components/header.html +++ b/templates/components/header.html @@ -8,52 +8,60 @@
{% set current_path = request.path %} {% if current_path.startswith('/loras/recipes') %} - {% set current_page = 'recipes' %} + {% set current_page = 'recipes' %} {% elif current_path.startswith('/checkpoints') %} - {% set current_page = 'checkpoints' %} + {% set current_page = 'checkpoints' %} {% elif current_path.startswith('/embeddings') %} - {% set current_page = 'embeddings' %} + {% set current_page = 'embeddings' %} {% elif current_path.startswith('/statistics') %} - {% set current_page = 'statistics' %} + {% set current_page = 'statistics' %} {% else %} - {% set current_page = 'loras' %} + {% set current_page = 'loras' %} {% endif %} {% set search_disabled = current_page == 'statistics' %} - {% set search_placeholder_key = 'header.search.notAvailable' if search_disabled else 'header.search.placeholders.' ~ current_page %} + {% set search_placeholder_key = 'header.search.notAvailable' if search_disabled else 'header.search.placeholders.' ~ + current_page %} {% set header_search_class = 'header-search disabled' if search_disabled else 'header-search' %} - +
- + - -
- +
@@ -97,6 +105,7 @@
{{ t('header.search.filters.tags') }}
{{ t('header.search.filters.loraName') }}
{{ t('header.search.filters.loraModel') }}
+
{{ t('header.search.filters.prompt') }}
{% elif request.path == '/checkpoints' %}
{{ t('header.search.filters.filename') }}
{{ t('header.search.filters.modelname') }}
@@ -165,4 +174,4 @@ {{ t('header.filter.clearAll') }}
-
+
\ No newline at end of file diff --git a/templates/embeddings.html b/templates/embeddings.html index badf12e3..de8b807a 100644 --- a/templates/embeddings.html +++ b/templates/embeddings.html @@ -40,5 +40,5 @@ {% endblock %} {% block main_script %} - + {% endblock %} diff --git a/templates/loras.html b/templates/loras.html index 3ece6a10..5ede68d9 100644 --- a/templates/loras.html +++ b/templates/loras.html @@ -24,6 +24,6 @@ {% block main_script %} {% if not is_initializing %} - + {% endif %} {% endblock %} \ No newline at end of file diff --git a/templates/recipes.html b/templates/recipes.html index 58003607..7f930874 100644 --- a/templates/recipes.html +++ b/templates/recipes.html @@ -4,9 +4,9 @@ {% block page_id %}recipes{% endblock %} {% block page_css %} - - - + + + {% endblock %} {% block additional_components %} @@ -15,17 +15,29 @@ {% endblock %} @@ -34,55 +46,131 @@ {% block init_check_url %}/api/recipes?page=1&page_size=1{% endblock %} {% block content %} - -
+ +
+
+
+ +
- +
- + +
+
+
- + +
+
+
-
- -