diff --git a/py/routes/api_routes.py b/py/routes/api_routes.py index 6b4bc05c..2f21ade2 100644 --- a/py/routes/api_routes.py +++ b/py/routes/api_routes.py @@ -84,6 +84,9 @@ class ApiRoutes: app.router.add_get('/api/loras/find-duplicates', routes.find_duplicate_loras) app.router.add_get('/api/loras/find-filename-conflicts', routes.find_filename_conflicts) + # Add new endpoint for bulk deleting loras + app.router.add_post('/api/loras/bulk-delete', routes.bulk_delete_loras) + async def delete_model(self, request: web.Request) -> web.Response: """Handle model deletion request""" if self.scanner is None: @@ -1267,3 +1270,18 @@ class ApiRoutes: "success": False, "error": str(e) }, status=500) + + async def bulk_delete_loras(self, request: web.Request) -> web.Response: + """Handle bulk deletion of lora models""" + try: + if self.scanner is None: + self.scanner = await ServiceRegistry.get_lora_scanner() + + return await ModelRouteUtils.handle_bulk_delete_models(request, self.scanner) + + except Exception as e: + logger.error(f"Error in bulk delete loras: {e}", exc_info=True) + return web.json_response({ + 'success': False, + 'error': str(e) + }, status=500) diff --git a/py/services/model_hash_index.py b/py/services/model_hash_index.py index da3b4060..d62dc73a 100644 --- a/py/services/model_hash_index.py +++ b/py/services/model_hash_index.py @@ -2,7 +2,7 @@ from typing import Dict, Optional, Set, List import os class ModelHashIndex: - """Index for looking up models by hash or path""" + """Index for looking up models by hash or filename""" def __init__(self): self._hash_to_path: Dict[str, str] = {} @@ -66,36 +66,123 @@ class ModelHashIndex: def remove_by_path(self, file_path: str) -> None: """Remove entry by file path""" filename = self._get_filename_from_path(file_path) - if filename in self._filename_to_hash: - hash_val = self._filename_to_hash[filename] - if hash_val in self._hash_to_path: - del self._hash_to_path[hash_val] - del self._filename_to_hash[filename] + hash_val = None + + # Find the hash for this file path + for h, p in self._hash_to_path.items(): + if p == file_path: + hash_val = h + break + + # If we didn't find a hash, nothing to do + if not hash_val: + return + + # Update duplicates tracking for hash + if hash_val in self._duplicate_hashes: + # Remove the current path from duplicates + self._duplicate_hashes[hash_val] = [p for p in self._duplicate_hashes[hash_val] if p != file_path] - # Also clean up from duplicates tracking - if filename in self._duplicate_filenames: - self._duplicate_filenames[filename] = [p for p in self._duplicate_filenames[filename] if p != file_path] - if not self._duplicate_filenames[filename]: - del self._duplicate_filenames[filename] - - if hash_val in self._duplicate_hashes: - self._duplicate_hashes[hash_val] = [p for p in self._duplicate_hashes[hash_val] if p != file_path] - if not self._duplicate_hashes[hash_val]: + # Update or remove hash mapping based on remaining duplicates + if len(self._duplicate_hashes[hash_val]) > 0: + # Replace with one of the remaining paths + new_path = self._duplicate_hashes[hash_val][0] + new_filename = self._get_filename_from_path(new_path) + + # Update hash-to-path mapping + self._hash_to_path[hash_val] = new_path + + # IMPORTANT: Update filename-to-hash mapping for consistency + # Remove old filename mapping if it points to this hash + if filename in self._filename_to_hash and self._filename_to_hash[filename] == hash_val: + del self._filename_to_hash[filename] + + # Add new filename mapping + self._filename_to_hash[new_filename] = hash_val + + # If only one duplicate left, remove from duplicates tracking + if len(self._duplicate_hashes[hash_val]) == 1: del self._duplicate_hashes[hash_val] + else: + # No duplicates left, remove hash entry completely + del self._duplicate_hashes[hash_val] + del self._hash_to_path[hash_val] + + # Remove corresponding filename entry if it points to this hash + if filename in self._filename_to_hash and self._filename_to_hash[filename] == hash_val: + del self._filename_to_hash[filename] + else: + # No duplicates, simply remove the hash entry + del self._hash_to_path[hash_val] + + # Remove corresponding filename entry if it points to this hash + if filename in self._filename_to_hash and self._filename_to_hash[filename] == hash_val: + del self._filename_to_hash[filename] + + # Update duplicates tracking for filename + if filename in self._duplicate_filenames: + # Remove the current path from duplicates + self._duplicate_filenames[filename] = [p for p in self._duplicate_filenames[filename] if p != file_path] + + # Update or remove filename mapping based on remaining duplicates + if len(self._duplicate_filenames[filename]) > 0: + # Get the hash for the first remaining duplicate path + first_dup_path = self._duplicate_filenames[filename][0] + first_dup_hash = None + for h, p in self._hash_to_path.items(): + if p == first_dup_path: + first_dup_hash = h + break + + # Update the filename to hash mapping if we found a hash + if first_dup_hash: + self._filename_to_hash[filename] = first_dup_hash + + # If only one duplicate left, remove from duplicates tracking + if len(self._duplicate_filenames[filename]) == 1: + del self._duplicate_filenames[filename] + else: + # No duplicates left, remove filename entry completely + del self._duplicate_filenames[filename] + if filename in self._filename_to_hash: + del self._filename_to_hash[filename] def remove_by_hash(self, sha256: str) -> None: """Remove entry by hash""" sha256 = sha256.lower() - if sha256 in self._hash_to_path: - path = self._hash_to_path[sha256] - filename = self._get_filename_from_path(path) - if filename in self._filename_to_hash: - del self._filename_to_hash[filename] - del self._hash_to_path[sha256] + if sha256 not in self._hash_to_path: + return + + # Get the path and filename + path = self._hash_to_path[sha256] + filename = self._get_filename_from_path(path) + + # Get all paths for this hash (including duplicates) + paths_to_remove = [path] + if sha256 in self._duplicate_hashes: + paths_to_remove.extend(self._duplicate_hashes[sha256]) + del self._duplicate_hashes[sha256] + + # Remove hash-to-path mapping + del self._hash_to_path[sha256] + + # Update filename-to-hash and duplicate filenames for all paths + for path_to_remove in paths_to_remove: + fname = self._get_filename_from_path(path_to_remove) - # Clean up from duplicates tracking - if sha256 in self._duplicate_hashes: - del self._duplicate_hashes[sha256] + # If this filename maps to the hash we're removing, remove it + if fname in self._filename_to_hash and self._filename_to_hash[fname] == sha256: + del self._filename_to_hash[fname] + + # Update duplicate filenames tracking + if fname in self._duplicate_filenames: + self._duplicate_filenames[fname] = [p for p in self._duplicate_filenames[fname] if p != path_to_remove] + + if not self._duplicate_filenames[fname]: + del self._duplicate_filenames[fname] + elif len(self._duplicate_filenames[fname]) == 1: + # If only one entry remains, it's no longer a duplicate + del self._duplicate_filenames[fname] def has_hash(self, sha256: str) -> bool: """Check if hash exists in index""" diff --git a/py/services/model_scanner.py b/py/services/model_scanner.py index d2cc992e..29058136 100644 --- a/py/services/model_scanner.py +++ b/py/services/model_scanner.py @@ -133,6 +133,7 @@ class ModelScanner: os.rename(temp_path, cache_path) logger.info(f"Saved {self.model_type} cache with {len(self._cache.raw_data)} models to {cache_path}") + logger.info(f"Hash index stats - hash_to_path: {len(self._hash_index._hash_to_path)}, filename_to_hash: {len(self._hash_index._filename_to_hash)}, duplicate_hashes: {len(self._hash_index._duplicate_hashes)}, duplicate_filenames: {len(self._hash_index._duplicate_filenames)}") return True except Exception as e: logger.error(f"Error saving {self.model_type} cache to disk: {e}") @@ -1219,3 +1220,166 @@ class ModelScanner: # Save updated cache to disk await self._save_cache_to_disk() return updated + + async def bulk_delete_models(self, file_paths: List[str]) -> Dict: + """Delete multiple models and update cache in a batch operation + + Args: + file_paths: List of file paths to delete + + Returns: + Dict containing results of the operation + """ + try: + if not file_paths: + return { + 'success': False, + 'error': 'No file paths provided for deletion', + 'results': [] + } + + # Get the file monitor + file_monitor = getattr(self, 'file_monitor', None) + + # Keep track of success and failures + results = [] + total_deleted = 0 + cache_updated = False + + # Get cache data + cache = await self.get_cached_data() + + # Track deleted models to update cache once + deleted_models = [] + + for file_path in file_paths: + try: + target_dir = os.path.dirname(file_path) + file_name = os.path.splitext(os.path.basename(file_path))[0] + + # Delete all associated files for the model + from ..utils.routes_common import ModelRouteUtils + deleted_files = await ModelRouteUtils.delete_model_files( + target_dir, + file_name, + file_monitor + ) + + if deleted_files: + deleted_models.append(file_path) + results.append({ + 'file_path': file_path, + 'success': True, + 'deleted_files': deleted_files + }) + total_deleted += 1 + else: + results.append({ + 'file_path': file_path, + 'success': False, + 'error': 'No files deleted' + }) + except Exception as e: + logger.error(f"Error deleting file {file_path}: {e}") + results.append({ + 'file_path': file_path, + 'success': False, + 'error': str(e) + }) + + # Batch update cache if any models were deleted + if deleted_models: + # Update the cache in a batch operation + cache_updated = await self._batch_update_cache_for_deleted_models(deleted_models) + + return { + 'success': True, + 'total_deleted': total_deleted, + 'total_attempted': len(file_paths), + 'cache_updated': cache_updated, + 'results': results + } + + except Exception as e: + logger.error(f"Error in bulk delete: {e}", exc_info=True) + return { + 'success': False, + 'error': str(e), + 'results': [] + } + + async def _batch_update_cache_for_deleted_models(self, file_paths: List[str]) -> bool: + """Update cache after multiple models have been deleted + + Args: + file_paths: List of file paths that were deleted + + Returns: + bool: True if cache was updated and saved successfully + """ + if not file_paths or self._cache is None: + return False + + try: + # Get all models that need to be removed from cache + models_to_remove = [item for item in self._cache.raw_data if item['file_path'] in file_paths] + + if not models_to_remove: + return False + + # Update tag counts + for model in models_to_remove: + for tag in model.get('tags', []): + if tag in self._tags_count: + self._tags_count[tag] = max(0, self._tags_count[tag] - 1) + if self._tags_count[tag] == 0: + del self._tags_count[tag] + + # Update hash index + for model in models_to_remove: + file_path = model['file_path'] + if hasattr(self, '_hash_index') and self._hash_index: + # Get the hash and filename before removal for duplicate checking + file_name = os.path.splitext(os.path.basename(file_path))[0] + hash_val = model.get('sha256', '').lower() + + # Remove from hash index + self._hash_index.remove_by_path(file_path) + + # Check and clean up duplicates + self._cleanup_duplicates_after_removal(hash_val, file_name) + + # Update cache data + self._cache.raw_data = [item for item in self._cache.raw_data if item['file_path'] not in file_paths] + + # Resort cache + await self._cache.resort() + + # Save updated cache to disk + await self._save_cache_to_disk() + + return True + + except Exception as e: + logger.error(f"Error updating cache after bulk delete: {e}", exc_info=True) + return False + + def _cleanup_duplicates_after_removal(self, hash_val: str, file_name: str) -> None: + """Clean up duplicate entries in hash index after removing a model + + Args: + hash_val: SHA256 hash of the removed model + file_name: File name of the removed model without extension + """ + if not hash_val or not file_name or not hasattr(self, '_hash_index'): + return + + # Clean up hash duplicates if only 0 or 1 entries remain + if hash_val in self._hash_index._duplicate_hashes: + if len(self._hash_index._duplicate_hashes[hash_val]) <= 1: + del self._hash_index._duplicate_hashes[hash_val] + + # Clean up filename duplicates if only 0 or 1 entries remain + if file_name in self._hash_index._duplicate_filenames: + if len(self._hash_index._duplicate_filenames[file_name]) <= 1: + del self._hash_index._duplicate_filenames[file_name] diff --git a/py/utils/routes_common.py b/py/utils/routes_common.py index 6d5c56f2..2e96bf50 100644 --- a/py/utils/routes_common.py +++ b/py/utils/routes_common.py @@ -62,7 +62,7 @@ class ModelRouteUtils: # Update preview if needed if not local_metadata.get('preview_url') or not os.path.exists(local_metadata['preview_url']): first_preview = next((img for img in civitai_metadata.get('images', [])), None) - if first_preview: + if (first_preview): # Determine if content is video or image is_video = first_preview['type'] == 'video' @@ -571,3 +571,42 @@ class ModelRouteUtils: logger.error(f"Error downloading {model_type}: {error_message}") return web.Response(status=500, text=error_message) + + @staticmethod + async def handle_bulk_delete_models(request: web.Request, scanner) -> web.Response: + """Handle bulk deletion of models + + Args: + request: The aiohttp request + scanner: The model scanner instance with cache management methods + + Returns: + web.Response: The HTTP response + """ + try: + data = await request.json() + file_paths = data.get('file_paths', []) + + if not file_paths: + return web.json_response({ + 'success': False, + 'error': 'No file paths provided for deletion' + }, status=400) + + # Use the scanner's bulk delete method to handle all cache and file operations + result = await scanner.bulk_delete_models(file_paths) + + return web.json_response({ + 'success': result.get('success', False), + 'total_deleted': result.get('total_deleted', 0), + 'total_attempted': result.get('total_attempted', len(file_paths)), + 'results': result.get('results', []) + }) + + except Exception as e: + logger.error(f"Error in bulk delete: {e}", exc_info=True) + return web.json_response({ + 'success': False, + 'error': str(e) + }, status=500) + \ No newline at end of file diff --git a/static/css/base.css b/static/css/base.css index 5dbf15f0..0f0f1a8c 100644 --- a/static/css/base.css +++ b/static/css/base.css @@ -32,13 +32,21 @@ html, body { --card-bg: #ffffff; --border-color: #e0e0e0; - /* Color System */ - --lora-accent: oklch(68% 0.28 256); + /* Color Components */ + --lora-accent-l: 68%; + --lora-accent-c: 0.28; + --lora-accent-h: 256; + --lora-warning-l: 75%; + --lora-warning-c: 0.25; + --lora-warning-h: 80; + + /* Composed Colors */ + --lora-accent: oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h)); --lora-surface: oklch(100% 0 0 / 0.98); --lora-border: oklch(90% 0.02 256 / 0.15); --lora-text: oklch(95% 0.02 256); --lora-error: oklch(75% 0.32 29); - --lora-warning: oklch(75% 0.25 80); /* Modified to be used with oklch() */ + --lora-warning: oklch(var(--lora-warning-l) var(--lora-warning-c) var(--lora-warning-h)); /* Modified to be used with oklch() */ /* Spacing Scale */ --space-1: calc(8px * 1); diff --git a/static/css/components/duplicates.css b/static/css/components/duplicates.css index 79792c24..6085db4a 100644 --- a/static/css/components/duplicates.css +++ b/static/css/components/duplicates.css @@ -2,16 +2,18 @@ /* Duplicates banner */ .duplicates-banner { - position: relative; /* Changed from sticky to relative */ + position: sticky; /* Keep the sticky position */ + top: var(--space-1); width: 100%; - background-color: var(--card-bg); + background-color: oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.1); /* Use accent color with low opacity */ color: var(--text-color); - border-bottom: 1px solid var(--border-color); + border-top: 1px solid oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.3); /* Add top border with accent color */ + border-bottom: 1px solid oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.4); /* Make bottom border stronger */ z-index: var(--z-overlay); - padding: 12px 0; /* Removed horizontal padding */ - box-shadow: 0 2px 8px rgba(0, 0, 0, 0.15); + padding: 12px 0; + box-shadow: 0 3px 10px rgba(0, 0, 0, 0.2); /* Stronger shadow */ transition: all 0.3s ease; - margin-bottom: 20px; /* Add margin to create space below the banner */ + margin-bottom: 20px; } .duplicates-banner .banner-content { @@ -38,7 +40,7 @@ .duplicates-banner i.fa-exclamation-triangle { font-size: 18px; - color: oklch(var(--lora-warning)); + color: oklch(var(--lora-warning-l) var(--lora-warning-c) var(--lora-warning-h)); } .duplicates-banner .banner-actions { @@ -66,7 +68,7 @@ } .duplicates-banner button:hover { - border-color: var(--lora-accent); + border-color: var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h); background: var(--bg-color); transform: translateY(-1px); box-shadow: 0 3px 5px rgba(0, 0, 0, 0.08); @@ -91,23 +93,25 @@ /* Duplicate groups */ .duplicate-group { position: relative; - border: 2px solid oklch(var(--lora-warning)); + border: 2px solid oklch(var(--lora-warning-l) var(--lora-warning-c) var(--lora-warning-h)); border-radius: var(--border-radius-base); padding: 16px; margin-bottom: 24px; background: var(--card-bg); + box-shadow: 0 2px 6px rgba(0, 0, 0, 0.12); /* Add subtle shadow to groups */ } .duplicate-group-header { background-color: var(--bg-color); color: var(--text-color); border: 1px solid var(--border-color); - padding: 8px 16px; + padding: 10px 16px; /* Slightly increased padding */ border-radius: var(--border-radius-xs); margin-bottom: 16px; display: flex; justify-content: space-between; align-items: center; + border-left: 4px solid oklch(var(--lora-warning-l) var(--lora-warning-c) var(--lora-warning-h)); /* Add accent border on the left */ } .duplicate-group-header span:last-child { @@ -135,7 +139,7 @@ } .duplicate-group-header button:hover { - border-color: var(--lora-accent); + border-color: var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h); background: var(--bg-color); transform: translateY(-1px); box-shadow: 0 3px 5px rgba(0, 0, 0, 0.08); @@ -190,7 +194,7 @@ } .group-toggle-btn:hover { - border-color: var(--lora-accent); + border-color: var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h); transform: translateY(-1px); box-shadow: 0 3px 5px rgba(0, 0, 0, 0.08); } @@ -202,16 +206,16 @@ } .lora-card.duplicate:hover { - border-color: var(--lora-accent); + border-color: var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h); } .lora-card.duplicate.latest { border-style: solid; - border-color: oklch(var(--lora-warning)); + border-color: oklch(var(--lora-warning-l) var(--lora-warning-c) var(--lora-warning-h)); } .lora-card.duplicate-selected { - border: 2px solid oklch(var(--lora-accent)); + border: 2px solid oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h)); box-shadow: 0 0 8px rgba(0, 0, 0, 0.2); } @@ -231,7 +235,7 @@ position: absolute; top: 10px; left: 10px; - background: oklch(var(--lora-accent)); + background: oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h)); color: white; font-size: 12px; padding: 2px 6px; @@ -239,6 +243,44 @@ z-index: 5; } +/* Model tooltip for duplicates mode */ +.model-tooltip { + position: absolute; + background-color: var(--card-bg); + border: 1px solid var(--border-color); + border-radius: var(--border-radius-sm); + box-shadow: 0 2px 10px rgba(0,0,0,0.2); + padding: 10px; + z-index: 1000; + max-width: 350px; + min-width: 250px; + color: var(--text-color); + font-size: 0.9em; + pointer-events: none; /* Don't block mouse events */ +} + +.model-tooltip .tooltip-header { + font-weight: bold; + font-size: 1.1em; + margin-bottom: 8px; + padding-bottom: 5px; + border-bottom: 1px solid var(--border-color); + white-space: nowrap; + overflow: hidden; + text-overflow: ellipsis; +} + +.model-tooltip .tooltip-info div { + margin-bottom: 4px; + display: flex; + flex-wrap: wrap; +} + +.model-tooltip .tooltip-info div strong { + margin-right: 5px; + min-width: 70px; +} + /* Responsive adjustments */ @media (max-width: 768px) { .duplicates-banner .banner-content { @@ -270,3 +312,13 @@ flex: 1; } } + +/* In dark mode, add additional distinction */ +html[data-theme="dark"] .duplicates-banner { + box-shadow: 0 3px 12px rgba(0, 0, 0, 0.4); /* Stronger shadow in dark mode */ + background-color: oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.15); /* Slightly stronger background in dark mode */ +} + +html[data-theme="dark"] .duplicate-group { + box-shadow: 0 2px 8px rgba(0, 0, 0, 0.25); /* Stronger shadow in dark mode */ +} diff --git a/static/js/checkpoints.js b/static/js/checkpoints.js index 7d0ebd6b..f2ccd20e 100644 --- a/static/js/checkpoints.js +++ b/static/js/checkpoints.js @@ -4,6 +4,7 @@ import { createPageControls } from './components/controls/index.js'; import { loadMoreCheckpoints } from './api/checkpointApi.js'; import { CheckpointDownloadManager } from './managers/CheckpointDownloadManager.js'; import { CheckpointContextMenu } from './components/ContextMenu/index.js'; +import { ModelDuplicatesManager } from './components/ModelDuplicatesManager.js'; // Initialize the Checkpoints page class CheckpointsPageManager { @@ -14,6 +15,9 @@ class CheckpointsPageManager { // Initialize checkpoint download manager window.checkpointDownloadManager = new CheckpointDownloadManager(); + // Initialize the ModelDuplicatesManager + this.duplicatesManager = new ModelDuplicatesManager(this); + // Expose only necessary functions to global scope this._exposeRequiredGlobalFunctions(); } @@ -29,6 +33,9 @@ class CheckpointsPageManager { window.checkpointManager = { loadCheckpoints: (reset) => loadMoreCheckpoints(reset) }; + + // Expose duplicates manager + window.modelDuplicatesManager = this.duplicatesManager; } async initialize() { diff --git a/static/js/components/DuplicatesManager.js b/static/js/components/DuplicatesManager.js index 7387b026..668ed713 100644 --- a/static/js/components/DuplicatesManager.js +++ b/static/js/components/DuplicatesManager.js @@ -14,8 +14,6 @@ export class DuplicatesManager { async findDuplicates() { try { - document.body.classList.add('loading'); - const response = await fetch('/api/recipes/find-duplicates'); if (!response.ok) { throw new Error('Failed to find duplicates'); @@ -39,8 +37,6 @@ export class DuplicatesManager { console.error('Error finding duplicates:', error); showToast('Failed to find duplicates: ' + error.message, 'error'); return false; - } finally { - document.body.classList.remove('loading'); } } @@ -234,7 +230,7 @@ export class DuplicatesManager { } updateSelectedCount() { - const selectedCountEl = document.getElementById('selectedCount'); + const selectedCountEl = document.getElementById('duplicatesSelectedCount'); if (selectedCountEl) { selectedCountEl.textContent = this.selectedForDeletion.size; } @@ -358,9 +354,7 @@ export class DuplicatesManager { // Add new method to execute deletion after confirmation async confirmDeleteDuplicates() { - try { - document.body.classList.add('loading'); - + try { // Close the modal modalManager.closeModal('duplicateDeleteModal'); @@ -395,8 +389,6 @@ export class DuplicatesManager { } catch (error) { console.error('Error deleting recipes:', error); showToast('Failed to delete recipes: ' + error.message, 'error'); - } finally { - document.body.classList.remove('loading'); } } } diff --git a/static/js/components/LoraCard.js b/static/js/components/LoraCard.js index e1c53375..ab01ee8e 100644 --- a/static/js/components/LoraCard.js +++ b/static/js/components/LoraCard.js @@ -1,5 +1,5 @@ import { showToast, openCivitai, copyToClipboard, sendLoraToWorkflow, openExampleImagesFolder } from '../utils/uiHelpers.js'; -import { state } from '../state/index.js'; +import { state, getCurrentPageState } from '../state/index.js'; import { showLoraModal } from './loraModal/index.js'; import { bulkManager } from '../managers/BulkManager.js'; import { NSFW_LEVELS } from '../utils/constants.js'; @@ -76,9 +76,13 @@ function handleLoraCardEvent(event) { } // If no specific element was clicked, handle the card click (show modal or toggle selection) + const pageState = getCurrentPageState(); if (state.bulkMode) { // Toggle selection using the bulk manager bulkManager.toggleCardSelection(card); + } else if (pageState && pageState.duplicatesMode) { + // In duplicates mode, don't open modal when clicking cards + return; } else { // Normal behavior - show modal const loraMeta = { diff --git a/static/js/components/ModelDuplicatesManager.js b/static/js/components/ModelDuplicatesManager.js new file mode 100644 index 00000000..8b0dc549 --- /dev/null +++ b/static/js/components/ModelDuplicatesManager.js @@ -0,0 +1,414 @@ +// Model Duplicates Manager Component for LoRAs and Checkpoints +import { showToast } from '../utils/uiHelpers.js'; +import { state, getCurrentPageState } from '../state/index.js'; +import { initializeInfiniteScroll } from '../utils/infiniteScroll.js'; +import { formatDate } from '../utils/formatters.js'; + +export class ModelDuplicatesManager { + constructor(pageManager) { + this.pageManager = pageManager; + this.duplicateGroups = []; + this.inDuplicateMode = false; + this.selectedForDeletion = new Set(); + this.modelType = 'loras'; // Default to loras, could be 'checkpoints' + + // Bind methods + this.renderModelCard = this.renderModelCard.bind(this); + this.renderTooltip = this.renderTooltip.bind(this); + } + + async findDuplicates() { + try { + // Determine API endpoint based on model type + const endpoint = `/api/${this.modelType}/find-duplicates`; + + const response = await fetch(endpoint); + if (!response.ok) { + throw new Error(`Failed to find duplicates: ${response.statusText}`); + } + + const data = await response.json(); + if (!data.success) { + throw new Error(data.error || 'Unknown error finding duplicates'); + } + + this.duplicateGroups = data.duplicates || []; + + if (this.duplicateGroups.length === 0) { + showToast('No duplicate models found', 'info'); + return false; + } + + this.enterDuplicateMode(); + return true; + } catch (error) { + console.error('Error finding duplicates:', error); + showToast('Failed to find duplicates: ' + error.message, 'error'); + return false; + } + } + + enterDuplicateMode() { + this.inDuplicateMode = true; + this.selectedForDeletion.clear(); + + // Update state + const pageState = getCurrentPageState(); + pageState.duplicatesMode = true; + + // Show duplicates banner + const banner = document.getElementById('duplicatesBanner'); + const countSpan = document.getElementById('duplicatesCount'); + + if (banner && countSpan) { + countSpan.textContent = `Found ${this.duplicateGroups.length} duplicate group${this.duplicateGroups.length !== 1 ? 's' : ''}`; + banner.style.display = 'block'; + } + + // Disable virtual scrolling if active + if (state.virtualScroller) { + state.virtualScroller.disable(); + } + + // Add duplicate-mode class to the body + document.body.classList.add('duplicate-mode'); + + // Render duplicate groups + this.renderDuplicateGroups(); + + // Update selected count + this.updateSelectedCount(); + } + + exitDuplicateMode() { + this.inDuplicateMode = false; + this.selectedForDeletion.clear(); + + // Update state + const pageState = getCurrentPageState(); + pageState.duplicatesMode = false; + + // Instead of trying to restore the virtual scroller, + // simply redirect to reload the page + // TODO: While this is a workaround rather than a deep fix, it's a pragmatic solution that will immediately resolve the issue for users. We can investigate the underlying cause more thoroughly later when there's time for more extensive debugging. + window.location.href = '/loras'; + } + + renderDuplicateGroups() { + const modelGrid = document.getElementById('loraGrid'); + if (!modelGrid) return; + + // Clear existing content + modelGrid.innerHTML = ''; + + // Render each duplicate group + this.duplicateGroups.forEach((group, groupIndex) => { + const groupDiv = document.createElement('div'); + groupDiv.className = 'duplicate-group'; + groupDiv.dataset.hash = group.hash; + + // Create group header + const header = document.createElement('div'); + header.className = 'duplicate-group-header'; + header.innerHTML = ` + Duplicate Group #${groupIndex + 1} (${group.models.length} models with same hash: ${group.hash}) + + + + `; + groupDiv.appendChild(header); + + // Create cards container + const cardsDiv = document.createElement('div'); + cardsDiv.className = 'card-group-container'; + + // Add scrollable class if there are many models in the group + if (group.models.length > 6) { + cardsDiv.classList.add('scrollable'); + + // Add expand/collapse toggle button + const toggleBtn = document.createElement('button'); + toggleBtn.className = 'group-toggle-btn'; + toggleBtn.innerHTML = ''; + toggleBtn.title = "Expand/Collapse"; + toggleBtn.onclick = function() { + cardsDiv.classList.toggle('scrollable'); + this.innerHTML = cardsDiv.classList.contains('scrollable') ? + '' : + ''; + }; + groupDiv.appendChild(toggleBtn); + } + + // Add all model cards in this group + group.models.forEach(model => { + const card = this.renderModelCard(model, group.hash); + cardsDiv.appendChild(card); + }); + + groupDiv.appendChild(cardsDiv); + modelGrid.appendChild(groupDiv); + }); + } + + renderModelCard(model, groupHash) { + // Create basic card structure + const card = document.createElement('div'); + card.className = 'lora-card duplicate'; + card.dataset.hash = model.sha256; + card.dataset.filePath = model.file_path; + + // Create card content using structure similar to createLoraCard in LoraCard.js + const previewContainer = document.createElement('div'); + previewContainer.className = 'card-preview'; + + // Determine if preview is a video + const isVideo = model.preview_url && model.preview_url.endsWith('.mp4'); + let preview; + + if (isVideo) { + // Create video element for MP4 previews + preview = document.createElement('video'); + preview.loading = 'lazy'; + preview.controls = true; + preview.muted = true; + preview.loop = true; + + const source = document.createElement('source'); + source.src = model.preview_url; + source.type = 'video/mp4'; + preview.appendChild(source); + } else { + // Create image element for standard previews + preview = document.createElement('img'); + preview.loading = 'lazy'; + preview.alt = model.model_name; + + if (model.preview_url) { + preview.src = model.preview_url; + } else { + // Use placeholder + preview.src = '/loras_static/images/no-preview.png'; + } + } + + // Add NSFW blur if needed + if (model.preview_nsfw_level > 0) { + preview.classList.add('nsfw'); + } + + previewContainer.appendChild(preview); + + // Move tooltip listeners to the preview container for consistent behavior + // regardless of whether the preview is an image or video + previewContainer.addEventListener('mouseover', () => this.renderTooltip(card, model)); + previewContainer.addEventListener('mouseout', () => { + const tooltip = document.querySelector('.model-tooltip'); + if (tooltip) tooltip.remove(); + }); + + // Add card footer with just model name + const footer = document.createElement('div'); + footer.className = 'card-footer'; + + const modelInfo = document.createElement('div'); + modelInfo.className = 'model-info'; + + const modelName = document.createElement('span'); + modelName.className = 'model-name'; + modelName.textContent = model.model_name; + modelInfo.appendChild(modelName); + + footer.appendChild(modelInfo); + previewContainer.appendChild(footer); + card.appendChild(previewContainer); + + // Add selection checkbox + const checkbox = document.createElement('input'); + checkbox.type = 'checkbox'; + checkbox.className = 'selector-checkbox'; + checkbox.dataset.filePath = model.file_path; + checkbox.dataset.groupHash = groupHash; + + // Check if already selected + if (this.selectedForDeletion.has(model.file_path)) { + checkbox.checked = true; + card.classList.add('duplicate-selected'); + } + + // Add change event to checkbox + checkbox.addEventListener('change', (e) => { + e.stopPropagation(); + this.toggleCardSelection(model.file_path, card, checkbox); + }); + + // Make the entire card clickable for selection + card.addEventListener('click', (e) => { + // Don't toggle if clicking on the checkbox directly or card actions + if (e.target === checkbox || e.target.closest('.card-actions')) { + return; + } + + // Toggle checkbox state + checkbox.checked = !checkbox.checked; + this.toggleCardSelection(model.file_path, card, checkbox); + }); + + card.appendChild(checkbox); + return card; + } + + renderTooltip(card, model) { + // Remove any existing tooltips + const existingTooltip = document.querySelector('.model-tooltip'); + if (existingTooltip) existingTooltip.remove(); + + // Create tooltip + const tooltip = document.createElement('div'); + tooltip.className = 'model-tooltip'; + + // Add model information to tooltip + tooltip.innerHTML = ` +