feat: implement task cancellation for model scanning and bulk operations

This commit is contained in:
Will Miao
2026-01-02 18:48:28 +08:00
parent 953117efa1
commit 837c32c42f
24 changed files with 505 additions and 219 deletions

View File

@@ -1370,6 +1370,7 @@
"verificationCompleteSuccess": "Verifikation abgeschlossen. Alle Dateien sind bestätigte Duplikate.",
"verificationFailed": "Fehler beim Verifizieren der Hashes: {message}",
"noTagsToAdd": "Keine Tags zum Hinzufügen",
"bulkTagsUpdating": "Tags für {count} Modell(e) werden aktualisiert...",
"tagsAddedSuccessfully": "Erfolgreich {tagCount} Tag(s) zu {count} {type}(s) hinzugefügt",
"tagsReplacedSuccessfully": "Tags für {count} {type}(s) erfolgreich durch {tagCount} Tag(s) ersetzt",
"tagsAddFailed": "Fehler beim Hinzufügen von Tags zu {count} Modell(en)",
@@ -1490,6 +1491,8 @@
"metadataRefreshed": "Metadaten erfolgreich aktualisiert",
"metadataRefreshFailed": "Fehler beim Aktualisieren der Metadaten: {message}",
"metadataUpdateComplete": "Metadaten-Update abgeschlossen",
"operationCancelled": "Vorgang vom Benutzer abgebrochen",
"operationCancelledPartial": "Vorgang abgebrochen. {success} Elemente verarbeitet.",
"metadataFetchFailed": "Fehler beim Abrufen der Metadaten: {message}",
"bulkMetadataCompleteAll": "Alle {count} {type}s erfolgreich aktualisiert",
"bulkMetadataCompletePartial": "{success} von {total} {type}s aktualisiert",

View File

@@ -1370,6 +1370,7 @@
"verificationCompleteSuccess": "Verification complete. All files are confirmed duplicates.",
"verificationFailed": "Failed to verify hashes: {message}",
"noTagsToAdd": "No tags to add",
"bulkTagsUpdating": "Updating tags for {count} model(s)...",
"tagsAddedSuccessfully": "Successfully added {tagCount} tag(s) to {count} {type}(s)",
"tagsReplacedSuccessfully": "Successfully replaced tags for {count} {type}(s) with {tagCount} tag(s)",
"tagsAddFailed": "Failed to add tags to {count} model(s)",
@@ -1490,6 +1491,8 @@
"metadataRefreshed": "Metadata refreshed successfully",
"metadataRefreshFailed": "Failed to refresh metadata: {message}",
"metadataUpdateComplete": "Metadata update complete",
"operationCancelled": "Operation cancelled by user",
"operationCancelledPartial": "Operation cancelled. {success} items processed.",
"metadataFetchFailed": "Failed to fetch metadata: {message}",
"bulkMetadataCompleteAll": "Successfully refreshed all {count} {type}s",
"bulkMetadataCompletePartial": "Refreshed {success} of {total} {type}s",

View File

@@ -1370,6 +1370,7 @@
"verificationCompleteSuccess": "Verificación completa. Todos los archivos son confirmados duplicados.",
"verificationFailed": "Error al verificar hashes: {message}",
"noTagsToAdd": "No hay etiquetas para añadir",
"bulkTagsUpdating": "Actualizando etiquetas para {count} modelo(s)...",
"tagsAddedSuccessfully": "Se añadieron exitosamente {tagCount} etiqueta(s) a {count} {type}(s)",
"tagsReplacedSuccessfully": "Se reemplazaron exitosamente las etiquetas de {count} {type}(s) con {tagCount} etiqueta(s)",
"tagsAddFailed": "Error al añadir etiquetas a {count} modelo(s)",
@@ -1490,6 +1491,8 @@
"metadataRefreshed": "Metadatos actualizados exitosamente",
"metadataRefreshFailed": "Error al actualizar metadatos: {message}",
"metadataUpdateComplete": "Actualización de metadatos completada",
"operationCancelled": "Operación cancelada por el usuario",
"operationCancelledPartial": "Operación cancelada. {success} elementos procesados.",
"metadataFetchFailed": "Error al obtener metadatos: {message}",
"bulkMetadataCompleteAll": "Actualizados exitosamente todos los {count} {type}s",
"bulkMetadataCompletePartial": "Actualizados {success} de {total} {type}s",

View File

@@ -1370,6 +1370,7 @@
"verificationCompleteSuccess": "Vérification terminée. Tous les fichiers sont confirmés comme doublons.",
"verificationFailed": "Échec de la vérification des hash : {message}",
"noTagsToAdd": "Aucun tag à ajouter",
"bulkTagsUpdating": "Mise à jour des tags pour {count} modèle(s)...",
"tagsAddedSuccessfully": "{tagCount} tag(s) ajouté(s) avec succès à {count} {type}(s)",
"tagsReplacedSuccessfully": "Tags remplacés avec succès pour {count} {type}(s) avec {tagCount} tag(s)",
"tagsAddFailed": "Échec de l'ajout des tags à {count} modèle(s)",
@@ -1490,6 +1491,8 @@
"metadataRefreshed": "Métadonnées actualisées avec succès",
"metadataRefreshFailed": "Échec de l'actualisation des métadonnées : {message}",
"metadataUpdateComplete": "Mise à jour des métadonnées terminée",
"operationCancelled": "Opération annulée par l'utilisateur",
"operationCancelledPartial": "Opération annulée. {success} éléments traités.",
"metadataFetchFailed": "Échec de la récupération des métadonnées : {message}",
"bulkMetadataCompleteAll": "Actualisation réussie de tous les {count} {type}s",
"bulkMetadataCompletePartial": "{success} sur {total} {type}s actualisés",

View File

@@ -1370,6 +1370,7 @@
"verificationCompleteSuccess": "האימות הושלם. כל הקבצים אושרו ככפולים.",
"verificationFailed": "אימות ה-hashes נכשל: {message}",
"noTagsToAdd": "אין תגיות להוספה",
"bulkTagsUpdating": "מעדכן תגיות עבור {count} מודלים...",
"tagsAddedSuccessfully": "נוספו בהצלחה {tagCount} תגית(ות) ל-{count} {type}(ים)",
"tagsReplacedSuccessfully": "הוחלפו בהצלחה תגיות עבור {count} {type}(ים) ב-{tagCount} תגית(ות)",
"tagsAddFailed": "הוספת תגיות ל-{count} מודל(ים) נכשלה",
@@ -1490,6 +1491,8 @@
"metadataRefreshed": "המטא-דאטה רועננה בהצלחה",
"metadataRefreshFailed": "רענון המטא-דאטה נכשל: {message}",
"metadataUpdateComplete": "עדכון המטא-דאטה הושלם",
"operationCancelled": "הפעולה בוטלה על ידי המשתמש",
"operationCancelledPartial": "הפעולה בוטלה. {success} פריטים עובדו.",
"metadataFetchFailed": "אחזור המטא-דאטה נכשל: {message}",
"bulkMetadataCompleteAll": "רועננו בהצלחה כל {count} ה-{type}s",
"bulkMetadataCompletePartial": "רועננו {success} מתוך {total} {type}s",

View File

@@ -1370,6 +1370,7 @@
"verificationCompleteSuccess": "検証完了。すべてのファイルが重複であることが確認されました。",
"verificationFailed": "ハッシュの検証に失敗しました:{message}",
"noTagsToAdd": "追加するタグがありません",
"bulkTagsUpdating": "{count} 個のモデルのタグを更新しています...",
"tagsAddedSuccessfully": "{count} {type} に {tagCount} 個のタグを追加しました",
"tagsReplacedSuccessfully": "{count} {type} のタグを {tagCount} 個に置換しました",
"tagsAddFailed": "{count} モデルへのタグ追加に失敗しました",
@@ -1490,6 +1491,8 @@
"metadataRefreshed": "メタデータが正常に更新されました",
"metadataRefreshFailed": "メタデータの更新に失敗しました:{message}",
"metadataUpdateComplete": "メタデータ更新完了",
"operationCancelled": "ユーザーによって操作がキャンセルされました",
"operationCancelledPartial": "操作がキャンセルされました。{success} 個の項目が処理されました。",
"metadataFetchFailed": "メタデータの取得に失敗しました:{message}",
"bulkMetadataCompleteAll": "{count} {type}すべてが正常に更新されました",
"bulkMetadataCompletePartial": "{total} {type}のうち {success} が更新されました",

View File

@@ -1370,6 +1370,7 @@
"verificationCompleteSuccess": "검증 완료. 모든 파일이 중복임을 확인했습니다.",
"verificationFailed": "해시 검증 실패: {message}",
"noTagsToAdd": "추가할 태그가 없습니다",
"bulkTagsUpdating": "{count}개 모델의 태그를 업데이트 중입니다...",
"tagsAddedSuccessfully": "{count}개의 {type}에 {tagCount}개의 태그가 성공적으로 추가되었습니다",
"tagsReplacedSuccessfully": "{count}개의 {type}의 태그가 {tagCount}개의 태그로 성공적으로 교체되었습니다",
"tagsAddFailed": "{count}개의 모델에 태그 추가에 실패했습니다",
@@ -1490,6 +1491,8 @@
"metadataRefreshed": "메타데이터가 성공적으로 새로고침되었습니다",
"metadataRefreshFailed": "메타데이터 새로고침 실패: {message}",
"metadataUpdateComplete": "메타데이터 업데이트 완료",
"operationCancelled": "사용자에 의해 작업이 취소되었습니다",
"operationCancelledPartial": "작업이 취소되었습니다. {success}개 항목이 처리되었습니다.",
"metadataFetchFailed": "메타데이터 가져오기 실패: {message}",
"bulkMetadataCompleteAll": "모든 {count}개 {type}이(가) 성공적으로 새로고침되었습니다",
"bulkMetadataCompletePartial": "{total}개 중 {success}개 {type}이(가) 새로고침되었습니다",

View File

@@ -1370,6 +1370,7 @@
"verificationCompleteSuccess": "Проверка завершена. Все файлы подтверждены как дубликаты.",
"verificationFailed": "Не удалось проверить хеши: {message}",
"noTagsToAdd": "Нет тегов для добавления",
"bulkTagsUpdating": "Обновление тегов для {count} модел(ей)...",
"tagsAddedSuccessfully": "Успешно добавлено {tagCount} тег(ов) к {count} {type}(ам)",
"tagsReplacedSuccessfully": "Успешно заменены теги для {count} {type}(ов) на {tagCount} тег(ов)",
"tagsAddFailed": "Не удалось добавить теги к {count} модель(ям)",
@@ -1490,6 +1491,8 @@
"metadataRefreshed": "Метаданные успешно обновлены",
"metadataRefreshFailed": "Не удалось обновить метаданные: {message}",
"metadataUpdateComplete": "Обновление метаданных завершено",
"operationCancelled": "Операция отменена пользователем",
"operationCancelledPartial": "Операция отменена. Обработано {success} элементов.",
"metadataFetchFailed": "Не удалось получить метаданные: {message}",
"bulkMetadataCompleteAll": "Успешно обновлены все {count} {type}s",
"bulkMetadataCompletePartial": "Обновлено {success} из {total} {type}s",

View File

@@ -1370,6 +1370,7 @@
"verificationCompleteSuccess": "验证完成。所有文件均为重复项。",
"verificationFailed": "验证哈希失败:{message}",
"noTagsToAdd": "没有可添加的标签",
"bulkTagsUpdating": "正在更新 {count} 个模型的标签...",
"tagsAddedSuccessfully": "已成功为 {count} 个 {type} 添加 {tagCount} 个标签",
"tagsReplacedSuccessfully": "已成功为 {count} 个 {type} 替换为 {tagCount} 个标签",
"tagsAddFailed": "为 {count} 个模型添加标签失败",
@@ -1490,6 +1491,8 @@
"metadataRefreshed": "元数据刷新成功",
"metadataRefreshFailed": "刷新元数据失败:{message}",
"metadataUpdateComplete": "元数据更新完成",
"operationCancelled": "操作已由用户取消",
"operationCancelledPartial": "操作已取消。已处理 {success} 个项目。",
"metadataFetchFailed": "获取元数据失败:{message}",
"bulkMetadataCompleteAll": "全部 {count} 个 {type} 元数据刷新成功",
"bulkMetadataCompletePartial": "已刷新 {success}/{total} 个 {type} 元数据",

View File

@@ -1370,6 +1370,7 @@
"verificationCompleteSuccess": "驗證完成。所有檔案均確認為重複項。",
"verificationFailed": "驗證雜湊失敗:{message}",
"noTagsToAdd": "沒有可新增的標籤",
"bulkTagsUpdating": "正在更新 {count} 個模型的標籤...",
"tagsAddedSuccessfully": "已成功將 {tagCount} 個標籤新增到 {count} 個 {type}",
"tagsReplacedSuccessfully": "已成功以 {tagCount} 個標籤取代 {count} 個 {type} 的標籤",
"tagsAddFailed": "新增標籤到 {count} 個模型失敗",
@@ -1490,6 +1491,8 @@
"metadataRefreshed": "metadata 已成功刷新",
"metadataRefreshFailed": "刷新 metadata 失敗:{message}",
"metadataUpdateComplete": "metadata 更新完成",
"operationCancelled": "操作已由用戶取消",
"operationCancelledPartial": "操作已取消。已處理 {success} 個項目。",
"metadataFetchFailed": "取得 metadata 失敗:{message}",
"bulkMetadataCompleteAll": "已成功刷新全部 {count} 個 {type}",
"bulkMetadataCompletePartial": "已刷新 {success} / {total} 個 {type}",

View File

@@ -618,6 +618,8 @@ class ModelQueryHandler:
try:
full_rebuild = request.query.get("full_rebuild", "false").lower() == "true"
await self._service.scan_models(force_refresh=True, rebuild_cache=full_rebuild)
if self._service.scanner.is_cancelled():
return web.json_response({"status": "cancelled", "message": f"{self._service.model_type.capitalize()} scan cancelled"})
return web.json_response({"status": "success", "message": f"{self._service.model_type.capitalize()} scan completed"})
except Exception as exc:
self._logger.error("Error scanning %ss: %s", self._service.model_type, exc, exc_info=True)
@@ -639,6 +641,14 @@ class ModelQueryHandler:
self._logger.error("Error getting folders: %s", exc)
return web.json_response({"success": False, "error": str(exc)}, status=500)
async def cancel_task(self, request: web.Request) -> web.Response:
try:
self._service.scanner.cancel_task()
return web.json_response({"status": "success", "message": "Cancellation requested"})
except Exception as exc:
self._logger.error("Error cancelling task for %s: %s", self._service.model_type, exc)
return web.json_response({"success": False, "error": str(exc)}, status=500)
async def get_folder_tree(self, request: web.Request) -> web.Response:
try:
model_root = request.query.get("model_root")
@@ -1262,6 +1272,8 @@ class ModelUpdateHandler:
force_refresh=force_refresh,
target_model_ids=target_model_ids or None,
)
if self._service.scanner.is_cancelled():
return web.json_response({"success": False, "status": "cancelled", "message": "Update refresh cancelled"})
except RateLimitError as exc:
return web.json_response(
{"success": False, "error": str(exc) or "Rate limited"}, status=429
@@ -1678,4 +1690,5 @@ class ModelHandlerSet:
"set_version_update_ignore": self.updates.set_version_update_ignore,
"get_model_update_status": self.updates.get_model_update_status,
"get_model_versions": self.updates.get_model_versions,
"cancel_task": self.query.cancel_task,
}

View File

@@ -68,6 +68,7 @@ COMMON_ROUTE_DEFINITIONS: tuple[RouteDefinition, ...] = (
RouteDefinition("GET", "/api/lm/pause-download", "pause_download_get"),
RouteDefinition("GET", "/api/lm/resume-download", "resume_download_get"),
RouteDefinition("GET", "/api/lm/download-progress/{download_id}", "get_download_progress"),
RouteDefinition("POST", "/api/lm/{prefix}/cancel-task", "cancel_task"),
RouteDefinition("GET", "/{prefix}", "handle_models_page"),
)

View File

@@ -36,11 +36,13 @@ class AutoOrganizeResult:
self.results_truncated: bool = False
self.sample_results: List[Dict[str, Any]] = []
self.is_flat_structure: bool = False
self.status: str = 'success'
def to_dict(self) -> Dict[str, Any]:
"""Convert result to dictionary"""
result = {
'success': True,
'success': self.status != 'error',
'status': self.status,
'message': f'Auto-organize {self.operation_type} completed: {self.success_count} moved, {self.skipped_count} skipped, {self.failure_count} failed out of {self.total} total',
'summary': {
'total': self.total,
@@ -98,6 +100,8 @@ class ModelFileService:
result = AutoOrganizeResult()
source_directories: Set[str] = set()
self.scanner.reset_cancellation()
try:
# Get all models from cache
cache = await self.scanner.get_cached_data()
@@ -186,6 +190,21 @@ class ModelFileService:
progress_callback,
source_directories # Pass the set to track source directories
)
if self.scanner.is_cancelled():
result.status = 'cancelled'
if progress_callback:
await progress_callback.on_progress({
'type': 'auto_organize_progress',
'status': 'cancelled',
'total': result.total,
'processed': result.processed,
'success': result.success_count,
'failures': result.failure_count,
'skipped': result.skipped_count,
'operation_type': result.operation_type
})
return result
# Send cleanup progress
if progress_callback:
@@ -246,9 +265,15 @@ class ModelFileService:
"""Process models in batches to avoid overwhelming the system"""
for i in range(0, result.total, AUTO_ORGANIZE_BATCH_SIZE):
if self.scanner.is_cancelled():
logger.info(f"{self.model_type.capitalize()} File Service: Auto-organize cancelled by user")
break
batch = all_models[i:i + AUTO_ORGANIZE_BATCH_SIZE]
for model in batch:
if self.scanner.is_cancelled():
break
await self._process_single_model(model, model_roots, result, source_directories)
result.processed += 1
@@ -535,8 +560,12 @@ class ModelMoveService:
"""
try:
results = []
self.scanner.reset_cancellation()
for file_path in file_paths:
if self.scanner.is_cancelled():
logger.info(f"{self.model_type.capitalize()} Move Service: Bulk move cancelled by user")
break
result = await self.move_model(file_path, target_path, use_default_paths=use_default_paths)
results.append({
"original_file_path": file_path,

View File

@@ -84,6 +84,7 @@ class ModelScanner:
self._excluded_models = [] # List to track excluded models
self._persistent_cache = get_persistent_cache()
self._name_display_mode = self._resolve_name_display_mode()
self._cancel_requested = False # Flag for cancellation
try:
loop = asyncio.get_running_loop()
except RuntimeError:
@@ -678,6 +679,7 @@ class ModelScanner:
async def _reconcile_cache(self) -> None:
"""Fast cache reconciliation - only process differences between cache and filesystem"""
self.reset_cancellation()
self._is_initializing = True # Set flag for reconciliation duration
try:
start_time = time.time()
@@ -737,6 +739,9 @@ class ModelScanner:
# Yield control periodically
await asyncio.sleep(0)
if self.is_cancelled():
logger.info(f"{self.model_type.capitalize()} Scanner: Reconcile scan cancelled")
return
# Process new files in batches
total_added = 0
@@ -784,6 +789,10 @@ class ModelScanner:
logger.error(f"Could not determine root path for {path}")
except Exception as e:
logger.error(f"Error adding {path} to cache: {e}")
if self.is_cancelled():
logger.info(f"{self.model_type.capitalize()} Scanner: Reconcile processing cancelled")
return
# Find missing files (in cache but not in filesystem)
missing_files = cached_paths - found_paths
@@ -838,6 +847,19 @@ class ModelScanner:
"""Check if the scanner is currently initializing"""
return self._is_initializing
def cancel_task(self) -> None:
"""Request cancellation of the current long-running task."""
self._cancel_requested = True
logger.info(f"{self.model_type.capitalize()} Scanner: Cancellation requested")
def reset_cancellation(self) -> None:
"""Reset the cancellation flag."""
self._cancel_requested = False
def is_cancelled(self) -> bool:
"""Check if cancellation has been requested."""
return self._cancel_requested
def get_model_roots(self) -> List[str]:
"""Get model root directories"""
raise NotImplementedError("Subclasses must implement get_model_roots")
@@ -1030,6 +1052,8 @@ class ModelScanner:
except Exception as exc: # pragma: no cover - defensive logging
logger.error(f"Error reporting progress for {self.model_type}: {exc}")
self.reset_cancellation()
async def scan_recursive(current_path: str, root_path: str, visited_paths: Set[str]) -> None:
nonlocal processed_files
@@ -1073,6 +1097,8 @@ class ModelScanner:
await handle_progress()
await asyncio.sleep(0)
if self.is_cancelled():
return
elif entry.is_dir(follow_symlinks=True):
await scan_recursive(entry.path, root_path, visited_paths)
except Exception as entry_error:
@@ -1080,6 +1106,9 @@ class ModelScanner:
except Exception as scan_error:
logger.error(f"Error scanning {current_path}: {scan_error}")
if self.is_cancelled():
return
for model_root in self.get_model_roots():
if not os.path.exists(model_root):
continue
@@ -1448,6 +1477,10 @@ class ModelScanner:
deleted_models = []
for file_path in file_paths:
if self.is_cancelled():
logger.info(f"{self.model_type.capitalize()} Scanner: Bulk delete cancelled by user")
break
try:
target_dir = os.path.dirname(file_path)
base_name = os.path.basename(file_path)
@@ -1488,6 +1521,7 @@ class ModelScanner:
return {
'success': True,
'status': 'cancelled' if self.is_cancelled() else 'success',
'total_deleted': total_deleted,
'total_attempted': len(file_paths),
'cache_updated': cache_updated,

View File

@@ -466,6 +466,7 @@ class ModelUpdateService:
target_model_ids: Optional[Sequence[int]] = None,
) -> Dict[int, ModelUpdateRecord]:
"""Refresh update information for every model present in the cache."""
scanner.reset_cancellation()
normalized_targets = (
self._normalize_sequence(target_model_ids)
@@ -542,6 +543,9 @@ class ModelUpdateService:
force_refresh=force_refresh,
prefetched_response=prefetched.get(model_id),
)
if scanner.is_cancelled():
logger.info(f"{model_type.capitalize()} Update Service: Refresh cancelled by user")
return results
if record:
results[model_id] = record
if index % progress_interval == 0 or index == total_models:

View File

@@ -59,6 +59,8 @@ class BulkMetadataRefreshUseCase:
success = 0
needs_resort = False
self._service.scanner.reset_cancellation()
async def emit(status: str, **extra: Any) -> None:
if progress_callback is None:
return
@@ -69,6 +71,10 @@ class BulkMetadataRefreshUseCase:
await emit("started")
for model in to_process:
if self._service.scanner.is_cancelled():
self._logger.info("Bulk metadata refresh cancelled by user")
await emit("cancelled", processed=processed, success=success)
return {"success": False, "message": "Operation cancelled", "processed": processed, "updated": success, "total": total_models}
try:
original_name = model.get("model_name")
await MetadataManager.hydrate_model_data(model)

View File

@@ -52,7 +52,7 @@ export function getApiEndpoints(modelType) {
if (!Object.values(MODEL_TYPES).includes(modelType)) {
throw new Error(`Invalid model type: ${modelType}`);
}
return {
// Base CRUD operations
list: `/api/lm/${modelType}/list`,
@@ -60,17 +60,18 @@ export function getApiEndpoints(modelType) {
exclude: `/api/lm/${modelType}/exclude`,
rename: `/api/lm/${modelType}/rename`,
save: `/api/lm/${modelType}/save-metadata`,
cancelTask: `/api/lm/${modelType}/cancel-task`,
// Bulk operations
bulkDelete: `/api/lm/${modelType}/bulk-delete`,
// Tag operations
addTags: `/api/lm/${modelType}/add-tags`,
// Move operations (now common for all model types that support move)
moveModel: `/api/lm/${modelType}/move_model`,
moveBulk: `/api/lm/${modelType}/move_models_bulk`,
// CivitAI integration
fetchCivitai: `/api/lm/${modelType}/fetch-civitai`,
fetchAllCivitai: `/api/lm/${modelType}/fetch-all-civitai`,
@@ -82,10 +83,10 @@ export function getApiEndpoints(modelType) {
modelUpdateVersions: `/api/lm/${modelType}/updates/versions`,
ignoreModelUpdate: `/api/lm/${modelType}/updates/ignore`,
ignoreVersionUpdate: `/api/lm/${modelType}/updates/ignore-version`,
// Preview management
replacePreview: `/api/lm/${modelType}/replace-preview`,
// Query operations
scan: `/api/lm/${modelType}/scan`,
topTags: `/api/lm/${modelType}/top-tags`,
@@ -99,11 +100,11 @@ export function getApiEndpoints(modelType) {
verify: `/api/lm/${modelType}/verify-duplicates`,
metadata: `/api/lm/${modelType}/metadata`,
modelDescription: `/api/lm/${modelType}/model-description`,
// Auto-organize operations
autoOrganize: `/api/lm/${modelType}/auto-organize`,
autoOrganizeProgress: `/api/lm/${modelType}/auto-organize-progress`,
// Model-specific endpoints (will be merged with specific configs)
specific: {}
};
@@ -144,7 +145,7 @@ export function getCompleteApiConfig(modelType) {
const baseEndpoints = getApiEndpoints(modelType);
const specificEndpoints = MODEL_SPECIFIC_ENDPOINTS[modelType] || {};
const config = MODEL_CONFIG[modelType];
return {
modelType,
config,

View File

@@ -82,6 +82,19 @@ export class BaseModelApiClient {
}
}
async cancelTask() {
try {
const endpoint = this.apiConfig.endpoints.cancelTask;
const response = await fetch(endpoint, {
method: 'POST'
});
return await response.json();
} catch (error) {
console.error(`Error cancelling task for ${this.modelType}:`, error);
return { success: false, error: error.message };
}
}
async loadMoreWithVirtualScroll(resetPage = false, updateFolders = false) {
const pageState = this.getPageState();
@@ -336,9 +349,11 @@ export class BaseModelApiClient {
async refreshModels(fullRebuild = false) {
try {
state.loadingManager.showSimpleLoading(
`${fullRebuild ? 'Full rebuild' : 'Refreshing'} ${this.apiConfig.config.displayName}s...`
state.loadingManager.show(
`${fullRebuild ? 'Full rebuild' : 'Refreshing'} ${this.apiConfig.config.displayName}s...`,
0
);
state.loadingManager.showCancelButton(() => this.cancelTask());
const url = new URL(this.apiConfig.endpoints.scan, window.location.origin);
url.searchParams.append('full_rebuild', fullRebuild);
@@ -349,6 +364,12 @@ export class BaseModelApiClient {
throw new Error(`Failed to refresh ${this.apiConfig.config.displayName}s: ${response.status} ${response.statusText}`);
}
const data = await response.json();
if (data.status === 'cancelled') {
showToast('toast.api.operationCancelled', {}, 'info');
return;
}
resetAndReload(true);
showToast('toast.api.refreshComplete', { action: fullRebuild ? 'Full rebuild' : 'Refresh' }, 'success');
@@ -402,6 +423,7 @@ export class BaseModelApiClient {
await state.loadingManager.showWithProgress(async (loading) => {
try {
loading.showCancelButton(() => this.cancelTask());
const wsProtocol = window.location.protocol === 'https:' ? 'wss://' : 'ws://';
ws = new WebSocket(`${wsProtocol}${window.location.host}${WS_ENDPOINTS.fetchProgress}`);
@@ -427,7 +449,12 @@ export class BaseModelApiClient {
loading.setStatus(
`Completed: Updated ${data.success} of ${data.processed} ${this.apiConfig.config.displayName}s`
);
resolve();
resolve(data);
break;
case 'cancelled':
loading.setStatus('Operation cancelled by user');
resolve(data); // Consider it complete but marked as cancelled
break;
case 'error':
@@ -458,10 +485,14 @@ export class BaseModelApiClient {
}
// Wait for the operation to complete via WebSocket
await operationComplete;
const finalData = await operationComplete;
resetAndReload(false);
showToast('toast.api.metadataUpdateComplete', {}, 'success');
if (finalData && finalData.status === 'cancelled') {
showToast('toast.api.operationCancelledPartial', { success: finalData.success, total: finalData.total }, 'info');
} else {
showToast('toast.api.metadataUpdateComplete', {}, 'success');
}
} catch (error) {
console.error('Error fetching metadata:', error);
showToast('toast.api.metadataFetchFailed', { message: error.message }, 'error');
@@ -487,9 +518,17 @@ export class BaseModelApiClient {
let failedItems = [];
const progressController = state.loadingManager.showEnhancedProgress('Starting metadata refresh...');
let cancelled = false;
progressController.showCancelButton(() => {
cancelled = true;
this.cancelTask();
});
try {
for (let i = 0; i < filePaths.length; i++) {
if (cancelled) {
break;
}
const filePath = filePaths[i];
const fileName = filePath.split('/').pop();
@@ -531,20 +570,15 @@ export class BaseModelApiClient {
}
let completionMessage;
if (successCount === totalItems) {
if (cancelled) {
completionMessage = translate('toast.api.operationCancelledPartial', { success: successCount, total: totalItems }, `Operation cancelled. ${successCount} items processed.`);
showToast('toast.api.operationCancelledPartial', { success: successCount, total: totalItems }, 'info');
} else if (successCount === totalItems) {
completionMessage = translate('toast.api.bulkMetadataCompleteAll', { count: successCount, type: this.apiConfig.config.displayName }, `Successfully refreshed all ${successCount} ${this.apiConfig.config.displayName}s`);
showToast('toast.api.bulkMetadataCompleteAll', { count: successCount, type: this.apiConfig.config.displayName }, 'success');
} else if (successCount > 0) {
completionMessage = translate('toast.api.bulkMetadataCompletePartial', { success: successCount, total: totalItems, type: this.apiConfig.config.displayName }, `Refreshed ${successCount} of ${totalItems} ${this.apiConfig.config.displayName}s`);
showToast('toast.api.bulkMetadataCompletePartial', { success: successCount, total: totalItems, type: this.apiConfig.config.displayName }, 'warning');
// if (failedItems.length > 0) {
// const failureMessage = failedItems.length <= 3
// ? failedItems.map(item => `${item.fileName}: ${item.error}`).join('\n')
// : failedItems.slice(0, 3).map(item => `${item.fileName}: ${item.error}`).join('\n') +
// `\n(and ${failedItems.length - 3} more)`;
// showToast('toast.api.bulkMetadataFailureDetails', { failures: failureMessage }, 'warning', 6000);
// }
} else {
completionMessage = translate('toast.api.bulkMetadataCompleteNone', { type: this.apiConfig.config.displayName }, `Failed to refresh metadata for any ${this.apiConfig.config.displayName}s`);
showToast('toast.api.bulkMetadataCompleteNone', { type: this.apiConfig.config.displayName }, 'error');
@@ -574,28 +608,42 @@ export class BaseModelApiClient {
throw new Error('No model IDs provided');
}
const response = await fetch(this.apiConfig.endpoints.refreshUpdates, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
model_ids: modelIds,
force
})
});
let payload = {};
try {
payload = await response.json();
state.loadingManager.show('Checking for updates...', 0);
state.loadingManager.showCancelButton(() => this.cancelTask());
const response = await fetch(this.apiConfig.endpoints.refreshUpdates, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
model_ids: modelIds,
force
})
});
let payload = {};
try {
payload = await response.json();
} catch (error) {
console.warn('Unable to parse refresh updates response as JSON', error);
}
if (!response.ok || payload?.success !== true) {
if (payload?.status === 'cancelled') {
showToast('toast.api.operationCancelled', {}, 'info');
return null;
}
const message = payload?.error || response.statusText || 'Failed to refresh updates';
throw new Error(message);
}
return payload;
} catch (error) {
console.warn('Unable to parse refresh updates response as JSON', error);
console.error('Error refreshing updates for models:', error);
throw error;
} finally {
state.loadingManager.hide();
}
if (!response.ok || payload?.success !== true) {
const message = payload?.error || response.statusText || 'Failed to refresh updates';
throw new Error(message);
}
return payload;
}
async fetchCivitaiVersions(modelId, source = null) {
@@ -1016,6 +1064,7 @@ export class BaseModelApiClient {
try {
state.loadingManager.showSimpleLoading(`Deleting ${this.apiConfig.config.displayName.toLowerCase()}s...`);
state.loadingManager.showCancelButton(() => this.cancelTask());
const response = await fetch(this.apiConfig.endpoints.bulkDelete, {
method: 'POST',
@@ -1055,6 +1104,7 @@ export class BaseModelApiClient {
let ws = null;
await state.loadingManager.showWithProgress(async (loading) => {
loading.showCancelButton(() => this.stopExampleImages());
try {
// Connect to WebSocket for progress updates
const wsProtocol = window.location.protocol === 'https:' ? 'wss://' : 'ws://';
@@ -1202,6 +1252,7 @@ export class BaseModelApiClient {
let ws = null;
await state.loadingManager.showWithProgress(async (loading) => {
loading.showCancelButton(() => this.cancelTask());
try {
// Connect to WebSocket for progress updates
const wsProtocol = window.location.protocol === 'https:' ? 'wss://' : 'ws://';
@@ -1255,6 +1306,11 @@ export class BaseModelApiClient {
}, 1500);
break;
case 'cancelled':
loading.setStatus(translate('toast.api.operationCancelled', {}, 'Operation cancelled by user'));
resolve(data);
break;
case 'error':
loading.setStatus(translate('loras.bulkOperations.autoOrganizeProgress.error', { error: data.error }, `Error: ${data.error}`));
reject(new Error(data.error));
@@ -1299,7 +1355,9 @@ export class BaseModelApiClient {
const result = await operationComplete;
// Show appropriate success message based on results
if (result.failures === 0) {
if (result.status === 'cancelled') {
showToast('toast.api.operationCancelledPartial', { success: result.success, total: result.total }, 'info');
} else if (result.failures === 0) {
showToast('toast.loras.autoOrganizeSuccess', {
count: result.success,
type: result.operation_type === 'bulk' ? 'selected models' : 'all models'
@@ -1326,4 +1384,17 @@ export class BaseModelApiClient {
completionMessage: translate('loras.bulkOperations.autoOrganizeProgress.complete', {}, 'Auto-organize complete')
});
}
async stopExampleImages() {
try {
const response = await fetch('/api/lm/stop-example-images', {
method: 'POST',
headers: { 'Content-Type': 'application/json' }
});
return response.ok;
} catch (error) {
console.error('Error stopping example images:', error);
return false;
}
}
}

View File

@@ -16,19 +16,19 @@ export class BulkManager {
this.bulkBtn = document.getElementById('bulkOperationsBtn');
// Remove bulk panel references since we're using context menu now
this.bulkContextMenu = null; // Will be set by core initialization
// Marquee selection properties
this.isMarqueeActive = false;
this.isDragging = false;
this.marqueeStart = { x: 0, y: 0 };
this.marqueeElement = null;
this.initialSelectedModels = new Set();
// Drag detection properties
this.dragThreshold = 5; // Pixels to move before considering it a drag
this.mouseDownTime = 0;
this.mouseDownPosition = { x: 0, y: 0 };
// Model type specific action configurations
this.actionConfig = {
[MODEL_TYPES.LORA]: {
@@ -103,7 +103,7 @@ export class BulkManager {
initialize() {
// Register with event manager for coordinated event handling
this.registerEventHandlers();
// Initialize bulk mode state in event manager
eventManager.setState('bulkMode', state.bulkMode || false);
}
@@ -160,7 +160,7 @@ export class BulkManager {
const dx = e.clientX - this.mouseDownPosition.x;
const dy = e.clientY - this.mouseDownPosition.y;
const distance = Math.sqrt(dx * dx + dy * dy);
if (distance >= this.dragThreshold) {
this.isDragging = true;
this.startMarqueeSelection(e, true);
@@ -176,7 +176,7 @@ export class BulkManager {
this.endMarqueeSelection(e);
return true; // Stop propagation
}
// Reset drag detection if we had a mousedown but didn't drag
if (this.mouseDownTime) {
this.mouseDownTime = 0;
@@ -258,25 +258,25 @@ export class BulkManager {
this.toggleBulkMode();
return true; // Stop propagation
}
return false; // Continue with other handlers
}
toggleBulkMode() {
state.bulkMode = !state.bulkMode;
// Update event manager state
eventManager.setState('bulkMode', state.bulkMode);
if (this.bulkBtn) {
this.bulkBtn.classList.toggle('active', state.bulkMode);
}
updateCardsForBulkMode(state.bulkMode);
if (!state.bulkMode) {
this.clearSelection();
// Hide context menu when exiting bulk mode
if (this.bulkContextMenu) {
this.bulkContextMenu.hideMenu();
@@ -289,7 +289,7 @@ export class BulkManager {
card.classList.remove('selected');
});
state.selectedModels.clear();
// Update context menu header if visible
if (this.bulkContextMenu) {
this.bulkContextMenu.updateSelectedCountHeader();
@@ -298,7 +298,7 @@ export class BulkManager {
toggleCardSelection(card) {
const filepath = card.dataset.filepath;
if (card.classList.contains('selected')) {
card.classList.remove('selected');
state.selectedModels.delete(filepath);
@@ -309,7 +309,7 @@ export class BulkManager {
// Cache the metadata for this model
this.updateMetadataCacheFromCard(filepath, card);
}
// Update context menu header if visible
if (this.bulkContextMenu) {
this.bulkContextMenu.updateSelectedCountHeader();
@@ -419,7 +419,7 @@ export class BulkManager {
applySelectionState() {
if (!state.bulkMode) return;
document.querySelectorAll('.model-card').forEach(card => {
const filepath = card.dataset.filepath;
if (state.selectedModels.has(filepath)) {
@@ -437,19 +437,19 @@ export class BulkManager {
showToast('toast.loras.copyOnlyForLoras', {}, 'warning');
return;
}
if (state.selectedModels.size === 0) {
showToast('toast.loras.noLorasSelected', {}, 'warning');
return;
}
const loraSyntaxes = [];
const missingLoras = [];
const metadataCache = this.getMetadataCache();
for (const filepath of state.selectedModels) {
const metadata = metadataCache.get(filepath);
if (metadata) {
const usageTips = JSON.parse(metadata.usageTips || '{}');
loraSyntaxes.push(buildLoraSyntax(metadata.fileName, usageTips));
@@ -457,38 +457,38 @@ export class BulkManager {
missingLoras.push(filepath);
}
}
if (missingLoras.length > 0) {
console.warn('Missing metadata for some selected loras:', missingLoras);
showToast('toast.loras.missingDataForLoras', { count: missingLoras.length }, 'warning');
}
if (loraSyntaxes.length === 0) {
showToast('toast.loras.noValidLorasToCopy', {}, 'error');
return;
}
await copyToClipboard(loraSyntaxes.join(', '), `Copied ${loraSyntaxes.length} LoRA syntaxes to clipboard`);
}
async sendAllModelsToWorkflow(replaceMode = false) {
if (state.currentPageType !== MODEL_TYPES.LORA) {
showToast('toast.loras.sendOnlyForLoras', {}, 'warning');
return;
}
if (state.selectedModels.size === 0) {
showToast('toast.loras.noLorasSelected', {}, 'warning');
return;
}
const loraSyntaxes = [];
const missingLoras = [];
const metadataCache = this.getMetadataCache();
for (const filepath of state.selectedModels) {
const metadata = metadataCache.get(filepath);
if (metadata) {
const usageTips = JSON.parse(metadata.usageTips || '{}');
loraSyntaxes.push(buildLoraSyntax(metadata.fileName, usageTips));
@@ -496,56 +496,56 @@ export class BulkManager {
missingLoras.push(filepath);
}
}
if (missingLoras.length > 0) {
console.warn('Missing metadata for some selected loras:', missingLoras);
showToast('toast.loras.missingDataForLoras', { count: missingLoras.length }, 'warning');
}
if (loraSyntaxes.length === 0) {
showToast('toast.loras.noValidLorasToSend', {}, 'error');
return;
}
await sendLoraToWorkflow(loraSyntaxes.join(', '), replaceMode, 'lora');
}
showBulkDeleteModal() {
if (state.selectedModels.size === 0) {
showToast('toast.models.noModelsSelected', {}, 'warning');
return;
}
const countElement = document.getElementById('bulkDeleteCount');
if (countElement) {
countElement.textContent = state.selectedModels.size;
}
modalManager.showModal('bulkDeleteModal');
}
async confirmBulkDelete() {
if (state.selectedModels.size === 0) {
showToast('toast.models.noModelsSelected', {}, 'warning');
modalManager.closeModal('bulkDeleteModal');
return;
}
modalManager.closeModal('bulkDeleteModal');
try {
const apiClient = this.getActiveApiClient();
const filePaths = Array.from(state.selectedModels);
const result = await apiClient.bulkDeleteModels(filePaths);
if (result.success) {
const currentConfig = this.getCurrentDisplayConfig();
showToast('toast.models.deletedSuccessfully', {
count: result.deleted_count,
type: currentConfig.displayName.toLowerCase()
showToast('toast.models.deletedSuccessfully', {
count: result.deleted_count,
type: currentConfig.displayName.toLowerCase()
}, 'success');
filePaths.forEach(path => {
state.virtualScroller.removeItemByFilePath(path);
});
@@ -562,13 +562,13 @@ export class BulkManager {
showToast('toast.models.deleteFailedGeneral', {}, 'error');
}
}
deselectItem(filepath) {
const card = document.querySelector(`.model-card[data-filepath="${filepath}"]`);
if (card) {
card.classList.remove('selected');
}
state.selectedModels.delete(filepath);
}
@@ -577,10 +577,10 @@ export class BulkManager {
showToast('toast.bulk.unableToSelectAll', {}, 'error');
return;
}
const oldCount = state.selectedModels.size;
const metadataCache = this.getMetadataCache();
state.virtualScroller.items.forEach(item => {
if (item && item.file_path) {
state.selectedModels.add(item.file_path);
@@ -596,16 +596,16 @@ export class BulkManager {
}
}
});
this.applySelectionState();
const newlySelected = state.selectedModels.size - oldCount;
const currentConfig = this.getCurrentDisplayConfig();
showToast('toast.models.selectedAdditional', {
count: newlySelected,
type: currentConfig.displayName.toLowerCase()
showToast('toast.models.selectedAdditional', {
count: newlySelected,
type: currentConfig.displayName.toLowerCase()
}, 'success');
if (this.isStripVisible) {
this.updateThumbnailStrip();
}
@@ -616,13 +616,13 @@ export class BulkManager {
showToast('toast.models.noModelsSelected', {}, 'warning');
return;
}
try {
const apiClient = getModelApiClient();
const filePaths = Array.from(state.selectedModels);
const result = await apiClient.refreshBulkModelMetadata(filePaths);
if (result.success) {
const metadataCache = this.getMetadataCache();
for (const filepath of state.selectedModels) {
@@ -634,12 +634,12 @@ export class BulkManager {
}
}
}
if (this.isStripVisible) {
this.updateThumbnailStrip();
}
}
} catch (error) {
console.error('Error during bulk metadata refresh:', error);
showToast('toast.models.refreshMetadataFailed', {}, 'error');
@@ -714,27 +714,27 @@ export class BulkManager {
showToast('toast.models.noModelsSelected', {}, 'warning');
return;
}
const countElement = document.getElementById('bulkAddTagsCount');
if (countElement) {
countElement.textContent = state.selectedModels.size;
}
// Clear any existing tags in the modal
const tagsContainer = document.getElementById('bulkTagsItems');
if (tagsContainer) {
tagsContainer.innerHTML = '';
}
modalManager.showModal('bulkAddTagsModal', null, null, () => {
// Cleanup when modal is closed
this.cleanupBulkAddTagsModal();
});
// Initialize the bulk tags editing interface
this.initializeBulkTagsInterface();
}
initializeBulkTagsInterface() {
// Setup tag input behavior
const tagInput = document.querySelector('.bulk-metadata-input');
@@ -749,31 +749,31 @@ export class BulkManager {
}
});
}
// Create suggestions dropdown
const tagForm = document.querySelector('#bulkAddTagsModal .metadata-add-form');
if (tagForm) {
const suggestionsDropdown = this.createBulkSuggestionsDropdown();
tagForm.appendChild(suggestionsDropdown);
}
// Setup save button
const appendBtn = document.querySelector('.bulk-append-tags-btn');
const replaceBtn = document.querySelector('.bulk-replace-tags-btn');
if (appendBtn) {
appendBtn.addEventListener('click', () => {
this.saveBulkTags('append');
});
}
if (replaceBtn) {
replaceBtn.addEventListener('click', () => {
this.saveBulkTags('replace');
});
}
}
createBulkSuggestionsDropdown() {
const dropdown = document.createElement('div');
dropdown.className = 'metadata-suggestions-dropdown';
@@ -841,34 +841,34 @@ export class BulkManager {
container.appendChild(item);
});
}
addBulkTag(tag) {
tag = tag.trim().toLowerCase();
if (!tag) return;
const tagsContainer = document.getElementById('bulkTagsItems');
if (!tagsContainer) return;
// Validation: Check length
if (tag.length > 30) {
showToast('modelTags.validation.maxLength', {}, 'error');
return;
}
// Validation: Check total number
const currentTags = tagsContainer.querySelectorAll('.metadata-item');
if (currentTags.length >= 30) {
showToast('modelTags.validation.maxCount', {}, 'error');
return;
}
// Validation: Check for duplicates
const existingTags = Array.from(currentTags).map(tagEl => tagEl.dataset.tag);
if (existingTags.includes(tag)) {
showToast('modelTags.validation.duplicate', {}, 'error');
return;
}
// Create new tag
const newTag = document.createElement('div');
newTag.className = 'metadata-item';
@@ -879,7 +879,7 @@ export class BulkManager {
<i class="fas fa-times"></i>
</button>
`;
// Add delete button event listener
const deleteBtn = newTag.querySelector('.metadata-delete-btn');
deleteBtn.addEventListener('click', (e) => {
@@ -888,10 +888,10 @@ export class BulkManager {
// Update dropdown to show/hide added indicator
this.updateBulkSuggestionsDropdown();
});
tagsContainer.appendChild(newTag);
}
/**
* Get existing tags in the bulk tags container
* @returns {Array} Array of existing tag strings
@@ -899,29 +899,29 @@ export class BulkManager {
getBulkExistingTags() {
const tagsContainer = document.getElementById('bulkTagsItems');
if (!tagsContainer) return [];
const currentTags = tagsContainer.querySelectorAll('.metadata-item');
return Array.from(currentTags).map(tag => tag.dataset.tag);
}
/**
* Update status of items in the bulk suggestions dropdown
*/
updateBulkSuggestionsDropdown() {
const dropdown = document.querySelector('.metadata-suggestions-dropdown');
if (!dropdown) return;
// Get all current tags
const existingTags = this.getBulkExistingTags();
// Update status of each item in dropdown
dropdown.querySelectorAll('.metadata-suggestion-item').forEach(item => {
const tagText = item.querySelector('.metadata-suggestion-text').textContent;
const isAdded = existingTags.includes(tagText);
if (isAdded) {
item.classList.add('already-added');
// Add indicator if it doesn't exist
let indicator = item.querySelector('.added-indicator');
if (!indicator) {
@@ -930,18 +930,18 @@ export class BulkManager {
indicator.innerHTML = '<i class="fas fa-check"></i>';
item.appendChild(indicator);
}
// Remove click event
item.onclick = null;
item.removeEventListener('click', item._clickHandler);
} else {
// Re-enable items that are no longer in the list
item.classList.remove('already-added');
// Remove indicator if it exists
const indicator = item.querySelector('.added-indicator');
if (indicator) indicator.remove();
// Restore click event if not already set
if (!item._clickHandler) {
item._clickHandler = () => {
@@ -959,29 +959,39 @@ export class BulkManager {
}
});
}
async saveBulkTags(mode = 'append') {
const tagElements = document.querySelectorAll('#bulkTagsItems .metadata-item');
const tags = Array.from(tagElements).map(tag => tag.dataset.tag);
if (tags.length === 0) {
showToast('toast.models.noTagsToAdd', {}, 'warning');
return;
}
if (state.selectedModels.size === 0) {
showToast('toast.models.noModelsSelected', {}, 'warning');
return;
}
try {
const apiClient = getModelApiClient();
const filePaths = Array.from(state.selectedModels);
let successCount = 0;
let failCount = 0;
let cancelled = false;
state.loadingManager.showSimpleLoading(translate('toast.models.bulkTagsUpdating', { count: filePaths.length }));
state.loadingManager.showCancelButton(() => {
cancelled = true;
});
// Add or replace tags for each selected model based on mode
for (const filePath of filePaths) {
if (cancelled) {
showToast('toast.api.operationCancelled', {}, 'info');
break;
}
try {
if (mode === 'replace') {
await apiClient.saveModelMetadata(filePath, { tags: tags });
@@ -994,50 +1004,50 @@ export class BulkManager {
failCount++;
}
}
modalManager.closeModal('bulkAddTagsModal');
if (successCount > 0) {
const currentConfig = this.getCurrentDisplayConfig();
const toastKey = mode === 'replace' ? 'toast.models.tagsReplacedSuccessfully' : 'toast.models.tagsAddedSuccessfully';
showToast(toastKey, {
count: successCount,
showToast(toastKey, {
count: successCount,
tagCount: tags.length,
type: currentConfig.displayName.toLowerCase()
type: currentConfig.displayName.toLowerCase()
}, 'success');
}
if (failCount > 0) {
const toastKey = mode === 'replace' ? 'toast.models.tagsReplaceFailed' : 'toast.models.tagsAddFailed';
showToast(toastKey, { count: failCount }, 'warning');
}
} catch (error) {
console.error('Error during bulk tag operation:', error);
const toastKey = mode === 'replace' ? 'toast.models.bulkTagsReplaceFailed' : 'toast.models.bulkTagsAddFailed';
showToast(toastKey, {}, 'error');
}
}
cleanupBulkAddTagsModal() {
// Clear tags container
const tagsContainer = document.getElementById('bulkTagsItems');
if (tagsContainer) {
tagsContainer.innerHTML = '';
}
// Clear input
const input = document.querySelector('.bulk-metadata-input');
if (input) {
input.value = '';
}
// Remove event listeners (they will be re-added when modal opens again)
const appendBtn = document.querySelector('.bulk-append-tags-btn');
if (appendBtn) {
appendBtn.replaceWith(appendBtn.cloneNode(true));
}
const replaceBtn = document.querySelector('.bulk-replace-tags-btn');
if (replaceBtn) {
replaceBtn.replaceWith(replaceBtn.cloneNode(true));
@@ -1140,6 +1150,10 @@ export class BulkManager {
const levelName = getNSFWLevelName(level);
state.loadingManager.showSimpleLoading(translate('toast.models.bulkContentRatingUpdating', { count: totalCount }));
let cancelled = false;
state.loadingManager.showCancelButton(() => {
cancelled = true;
});
let successCount = 0;
let failureCount = 0;
@@ -1147,6 +1161,10 @@ export class BulkManager {
try {
const apiClient = getModelApiClient();
for (const filePath of targets) {
if (cancelled) {
showToast('toast.api.operationCancelled', {}, 'info');
break;
}
try {
await apiClient.saveModelMetadata(filePath, { preview_nsfw_level: level });
successCount++;
@@ -1180,10 +1198,10 @@ export class BulkManager {
initializeBulkBaseModelInterface() {
const select = document.getElementById('bulkBaseModelSelect');
if (!select) return;
// Clear existing options
select.innerHTML = '';
// Add placeholder option
const placeholderOption = document.createElement('option');
placeholderOption.value = '';
@@ -1191,23 +1209,23 @@ export class BulkManager {
placeholderOption.disabled = true;
placeholderOption.selected = true;
select.appendChild(placeholderOption);
// Create option groups for better organization
Object.entries(BASE_MODEL_CATEGORIES).forEach(([category, models]) => {
const optgroup = document.createElement('optgroup');
optgroup.label = category;
models.forEach(model => {
const option = document.createElement('option');
option.value = model;
option.textContent = model;
optgroup.appendChild(option);
});
select.appendChild(optgroup);
});
}
/**
* Save bulk base model changes
*/
@@ -1217,25 +1235,33 @@ export class BulkManager {
showToast('toast.models.baseModelNotSelected', {}, 'warning');
return;
}
const newBaseModel = select.value;
const selectedCount = state.selectedModels.size;
if (selectedCount === 0) {
showToast('toast.models.noModelsSelected', {}, 'warning');
return;
}
modalManager.closeModal('bulkBaseModelModal');
try {
let successCount = 0;
let errorCount = 0;
const errors = [];
let cancelled = false;
state.loadingManager.showSimpleLoading(translate('toast.models.bulkBaseModelUpdating'));
state.loadingManager.showCancelButton(() => {
cancelled = true;
});
for (const filepath of state.selectedModels) {
if (cancelled) {
showToast('toast.api.operationCancelled', {}, 'info');
break;
}
try {
await getModelApiClient().saveModelMetadata(filepath, { base_model: newBaseModel });
successCount++;
@@ -1245,19 +1271,19 @@ export class BulkManager {
console.error(`Failed to update base model for ${filepath}:`, error);
}
}
// Show results
if (errorCount === 0) {
showToast('toast.models.bulkBaseModelUpdateSuccess', { count: successCount }, 'success');
} else if (successCount > 0) {
showToast('toast.models.bulkBaseModelUpdatePartial', {
success: successCount,
failed: errorCount
showToast('toast.models.bulkBaseModelUpdatePartial', {
success: successCount,
failed: errorCount
}, 'warning');
} else {
showToast('toast.models.bulkBaseModelUpdateFailed', {}, 'error');
}
} catch (error) {
console.error('Error during bulk base model operation:', error);
showToast('toast.models.bulkBaseModelUpdateFailed', {}, 'error');
@@ -1265,7 +1291,7 @@ export class BulkManager {
state.loadingManager?.hide?.();
}
}
/**
* Cleanup bulk base model modal
*/
@@ -1288,13 +1314,13 @@ export class BulkManager {
try {
// Get selected file paths
const filePaths = Array.from(state.selectedModels);
// Get the API client for the current model type
const apiClient = getModelApiClient();
// Call the auto-organize method with selected file paths
await apiClient.autoOrganizeModels(filePaths);
resetAndReload(true);
} catch (error) {
console.error('Error during bulk auto-organize:', error);
@@ -1310,7 +1336,7 @@ export class BulkManager {
this.mouseDownTime = Date.now();
this.mouseDownPosition = { x: e.clientX, y: e.clientY };
this.isDragging = false;
// Don't start marquee yet - wait to see if user is dragging
return false;
}
@@ -1324,23 +1350,23 @@ export class BulkManager {
// Store initial mouse position
this.marqueeStart.x = this.mouseDownPosition.x;
this.marqueeStart.y = this.mouseDownPosition.y;
// Store initial selection state
this.initialSelectedModels = new Set(state.selectedModels);
// Enter bulk mode if not already active and we're actually dragging
if (isDragging && !state.bulkMode) {
this.toggleBulkMode();
}
// Create marquee element
this.createMarqueeElement();
this.isMarqueeActive = true;
// Update event manager state
eventManager.setState('marqueeActive', true);
// Add visual feedback class to body
document.body.classList.add('marquee-selecting');
}
@@ -1370,22 +1396,22 @@ export class BulkManager {
*/
updateMarqueeSelection(e) {
if (!this.marqueeElement) return;
const currentX = e.clientX;
const currentY = e.clientY;
// Calculate rectangle bounds
const left = Math.min(this.marqueeStart.x, currentX);
const top = Math.min(this.marqueeStart.y, currentY);
const width = Math.abs(currentX - this.marqueeStart.x);
const height = Math.abs(currentY - this.marqueeStart.y);
// Update marquee element position and size
this.marqueeElement.style.left = left + 'px';
this.marqueeElement.style.top = top + 'px';
this.marqueeElement.style.width = width + 'px';
this.marqueeElement.style.height = height + 'px';
// Check which cards intersect with marquee
this.updateCardSelection(left, top, left + width, top + height);
}
@@ -1396,18 +1422,18 @@ export class BulkManager {
updateCardSelection(left, top, right, bottom) {
const cards = document.querySelectorAll('.model-card');
const newSelection = new Set(this.initialSelectedModels);
cards.forEach(card => {
const rect = card.getBoundingClientRect();
// Check if card intersects with marquee rectangle
const intersects = !(rect.right < left ||
rect.left > right ||
rect.bottom < top ||
rect.top > bottom);
const intersects = !(rect.right < left ||
rect.left > right ||
rect.bottom < top ||
rect.top > bottom);
const filepath = card.dataset.filepath;
if (intersects) {
// Add to selection if intersecting
newSelection.add(filepath);
@@ -1424,10 +1450,10 @@ export class BulkManager {
card.classList.remove('selected');
}
});
// Update global selection state
state.selectedModels = newSelection;
// Update context menu header if visible
if (this.bulkContextMenu) {
this.bulkContextMenu.updateSelectedCountHeader();
@@ -1442,29 +1468,29 @@ export class BulkManager {
this.isMarqueeActive = false;
this.isDragging = false;
this.mouseDownTime = 0;
// Update event manager state
eventManager.setState('marqueeActive', false);
// Remove marquee element
if (this.marqueeElement) {
this.marqueeElement.remove();
this.marqueeElement = null;
}
// Remove visual feedback class
document.body.classList.remove('marquee-selecting');
// Get selection count
const selectionCount = state.selectedModels.size;
// If no models were selected, exit bulk mode
if (selectionCount === 0) {
if (state.bulkMode) {
this.toggleBulkMode();
}
}
// Clear initial selection state
this.initialSelectedModels.clear();
}

View File

@@ -39,6 +39,25 @@ export class LoadingManager {
this.loadingContent.appendChild(this.statusText);
}
this.cancelButton = this.loadingContent.querySelector('.loading-cancel');
if (!this.cancelButton) {
this.cancelButton = document.createElement('button');
this.cancelButton.className = 'loading-cancel secondary-btn';
this.cancelButton.style.display = 'none';
this.cancelButton.style.margin = 'var(--space-2) auto 0';
this.cancelButton.textContent = translate('common.actions.cancel', {}, 'Cancel');
this.loadingContent.appendChild(this.cancelButton);
}
this.onCancelCallback = null;
this.cancelButton.onclick = () => {
if (this.onCancelCallback) {
this.onCancelCallback();
this.cancelButton.disabled = true;
this.cancelButton.textContent = translate('common.status.loading', {}, 'Loading...');
}
};
this.detailsContainer = null; // Will be created when needed
}
@@ -46,7 +65,7 @@ export class LoadingManager {
this.overlay.style.display = 'flex';
this.setProgress(progress);
this.setStatus(message);
// Remove any existing details container
this.removeDetailsContainer();
}
@@ -70,26 +89,43 @@ export class LoadingManager {
this.setProgress(0);
this.setStatus('');
this.removeDetailsContainer();
this.hideCancelButton();
this.progressBar.style.display = 'block';
}
showCancelButton(onCancel) {
if (this.cancelButton) {
this.onCancelCallback = onCancel;
this.cancelButton.style.display = 'flex';
this.cancelButton.disabled = false;
this.cancelButton.textContent = translate('common.actions.cancel', {}, 'Cancel');
}
}
hideCancelButton() {
if (this.cancelButton) {
this.cancelButton.style.display = 'none';
this.onCancelCallback = null;
}
}
// Create a details container for enhanced progress display
createDetailsContainer() {
// Remove existing container if any
this.removeDetailsContainer();
// Create new container
this.detailsContainer = document.createElement('div');
this.detailsContainer.className = 'progress-details-container';
// Insert after the main progress bar
if (this.loadingContent) {
this.loadingContent.appendChild(this.detailsContainer);
}
return this.detailsContainer;
}
// Remove details container
removeDetailsContainer() {
if (this.detailsContainer) {
@@ -97,39 +133,39 @@ export class LoadingManager {
this.detailsContainer = null;
}
}
// Show enhanced progress for downloads
showDownloadProgress(totalItems = 1) {
this.show(translate('modals.download.status.preparing', {}, 'Preparing download...'), 0);
this.progressBar.style.display = 'none';
// Create details container
const detailsContainer = this.createDetailsContainer();
// Create current item progress
const currentItemContainer = document.createElement('div');
currentItemContainer.className = 'current-item-progress';
const currentItemLabel = document.createElement('div');
currentItemLabel.className = 'current-item-label';
currentItemLabel.textContent = translate('modals.download.progress.currentFile', {}, 'Current file:');
const currentItemBar = document.createElement('div');
currentItemBar.className = 'current-item-bar-container';
const currentItemProgress = document.createElement('div');
currentItemProgress.className = 'current-item-bar';
currentItemProgress.style.width = '0%';
const currentItemPercent = document.createElement('span');
currentItemPercent.className = 'current-item-percent';
currentItemPercent.textContent = '0%';
currentItemBar.appendChild(currentItemProgress);
currentItemContainer.appendChild(currentItemLabel);
currentItemContainer.appendChild(currentItemBar);
currentItemContainer.appendChild(currentItemPercent);
// Create overall progress elements if multiple items
let overallLabel = null;
if (totalItems > 1) {
@@ -138,7 +174,7 @@ export class LoadingManager {
overallLabel.textContent = `Overall progress (0/${totalItems} complete):`;
detailsContainer.appendChild(overallLabel);
}
// Add current item progress to container
detailsContainer.appendChild(currentItemContainer);
@@ -217,13 +253,13 @@ export class LoadingManager {
// Initialize transfer stats with empty data
updateTransferStats();
// Return update function
return (currentProgress, currentIndex = 0, currentName = '', metrics = {}) => {
// Update current item progress
currentItemProgress.style.width = `${currentProgress}%`;
currentItemPercent.textContent = `${Math.floor(currentProgress)}%`;
// Update current item label if name provided
if (currentName) {
currentItemLabel.textContent = translate(
@@ -232,13 +268,13 @@ export class LoadingManager {
`Downloading: ${currentName}`
);
}
// Update overall label if multiple items
if (totalItems > 1 && overallLabel) {
overallLabel.textContent = `Overall progress (${currentIndex}/${totalItems} complete):`;
// Calculate and update overall progress
const overallProgress = Math.floor((currentIndex + currentProgress/100) / totalItems * 100);
const overallProgress = Math.floor((currentIndex + currentProgress / 100) / totalItems * 100);
this.setProgress(overallProgress);
} else {
// Single item, just update main progress
@@ -251,7 +287,7 @@ export class LoadingManager {
async showWithProgress(callback, options = {}) {
const { initialMessage = 'Processing...', completionMessage = 'Complete' } = options;
try {
this.show(initialMessage);
await callback(this);
@@ -266,16 +302,20 @@ export class LoadingManager {
// Enhanced progress display without callback pattern
showEnhancedProgress(message = 'Processing...') {
this.show(message, 0);
// Return update functions
return {
updateProgress: (percent, currentItem = '', statusMessage = '') => {
this.setProgress(percent);
this.setProgress(percent);
if (statusMessage) {
this.setStatus(statusMessage);
}
},
showCancelButton: (onCancel) => {
this.showCancelButton(onCancel);
},
complete: async (completionMessage = 'Complete') => {
this.setProgress(100);
this.setStatus(completionMessage);

View File

@@ -2,7 +2,7 @@ import { state } from '../state/index.js';
import { translate } from './i18nHelpers.js';
import { showToast } from './uiHelpers.js';
import { getCompleteApiConfig, getCurrentModelType } from '../api/apiConfig.js';
import { resetAndReload } from '../api/modelApiFactory.js';
import { resetAndReload, getModelApiClient } from '../api/modelApiFactory.js';
import { getStorageItem, setStorageItem } from './storageHelpers.js';
import { modalManager } from '../managers/ModalManager.js';
@@ -18,6 +18,7 @@ const CHECK_UPDATES_CONFIRMATION_KEY = 'ack_check_updates_for_all_models';
export async function performModelUpdateCheck({ onStart, onComplete } = {}) {
const modelType = getCurrentModelType();
const apiConfig = getCompleteApiConfig(modelType);
const apiClient = getModelApiClient(modelType);
const displayName = apiConfig?.config?.displayName ?? 'Model';
if (!apiConfig?.endpoints?.refreshUpdates) {
@@ -41,6 +42,7 @@ export async function performModelUpdateCheck({ onStart, onComplete } = {}) {
onStart?.({ displayName, loadingMessage });
state.loadingManager?.showSimpleLoading?.(loadingMessage);
state.loadingManager?.showCancelButton?.(() => apiClient.cancelTask());
let status = 'success';
let records = [];
@@ -61,6 +63,10 @@ export async function performModelUpdateCheck({ onStart, onComplete } = {}) {
}
if (!response.ok || payload.success !== true) {
if (payload?.status === 'cancelled') {
showToast('toast.api.operationCancelled', {}, 'info');
return { status: 'cancelled', displayName, records: [], error: null };
}
const errorMessage = payload?.error || response.statusText || 'Unknown error';
throw new Error(errorMessage);
}

View File

@@ -168,6 +168,17 @@ class MockScanner:
self.updated_models: List[Dict[str, Any]] = []
self.preview_updates: List[Dict[str, Any]] = []
self.bulk_deleted: List[Sequence[str]] = []
self._cancelled = False
self.model_type = "test-model"
def is_cancelled(self) -> bool:
return self._cancelled
def cancel_task(self) -> None:
self._cancelled = True
def reset_cancellation(self) -> None:
self._cancelled = False
async def get_cached_data(self, force_refresh: bool = False):
return self._cache

View File

@@ -14,6 +14,13 @@ from py.services.model_update_service import ModelUpdateRecord, ModelVersionReco
class DummyScanner:
def __init__(self, cache):
self._cache = cache
self._cancelled = False
def is_cancelled(self) -> bool:
return self._cancelled
def reset_cancellation(self) -> None:
self._cancelled = False
async def get_cached_data(self):
return self._cache

View File

@@ -15,6 +15,13 @@ from py.services.model_update_service import (
class DummyScanner:
def __init__(self, raw_data):
self._cache = SimpleNamespace(raw_data=raw_data, version_index={})
self._cancelled = False
def is_cancelled(self) -> bool:
return self._cancelled
def reset_cancellation(self) -> None:
self._cancelled = False
async def get_cached_data(self, *args, **kwargs):
return self._cache