fix(backup): add user-state backup UI and storage

This commit is contained in:
Will Miao
2026-04-10 20:49:30 +08:00
parent 85b6c91192
commit 72f8e0d1be
25 changed files with 1825 additions and 9 deletions

View File

@@ -263,6 +263,7 @@
"videoSettings": "Video-Einstellungen",
"layoutSettings": "Layout-Einstellungen",
"misc": "Verschiedenes",
"backup": "Backups",
"folderSettings": "Standard-Roots",
"recipeSettings": "Rezepte",
"extraFolderPaths": "Zusätzliche Ordnerpfade",
@@ -324,6 +325,31 @@
"saveFailed": "Übersprungene Pfade konnten nicht gespeichert werden: {message}"
}
},
"backup": {
"autoEnabled": "Automatische Backups",
"autoEnabledHelp": "Erstellt einmal täglich einen lokalen Schnappschuss und behält die neuesten Schnappschüsse gemäß der Aufbewahrungsrichtlinie.",
"retention": "Aufbewahrungsanzahl",
"retentionHelp": "Wie viele automatische Schnappschüsse behalten werden, bevor ältere entfernt werden.",
"management": "Backup-Verwaltung",
"managementHelp": "Exportiere deinen aktuellen Benutzerstatus oder stelle ihn aus einem Backup-Archiv wieder her.",
"locationSummary": "Aktueller Backup-Speicherort",
"openFolderButton": "Backup-Ordner öffnen",
"openFolderSuccess": "Backup-Ordner geöffnet",
"openFolderFailed": "Backup-Ordner konnte nicht geöffnet werden",
"locationCopied": "Backup-Pfad in die Zwischenablage kopiert: {{path}}",
"locationClipboardFallback": "Backup-Pfad: {{path}}",
"exportButton": "Backup exportieren",
"exportSuccess": "Backup erfolgreich exportiert.",
"exportFailed": "Backup konnte nicht exportiert werden: {message}",
"importButton": "Backup importieren",
"importConfirm": "Dieses Backup importieren und den lokalen Benutzerstatus überschreiben?",
"importSuccess": "Backup erfolgreich importiert.",
"importFailed": "Backup konnte nicht importiert werden: {message}",
"latestSnapshot": "Neuester Schnappschuss",
"latestAutoSnapshot": "Neuester automatischer Schnappschuss",
"snapshotCount": "Gespeicherte Schnappschüsse",
"noneAvailable": "Noch keine Schnappschüsse vorhanden"
},
"downloadSkipBaseModels": {
"label": "Downloads für Basismodelle überspringen",
"help": "Gilt für alle Download-Abläufe. Hier können nur unterstützte Basismodelle ausgewählt werden.",

View File

@@ -263,6 +263,7 @@
"videoSettings": "Video Settings",
"layoutSettings": "Layout Settings",
"misc": "Miscellaneous",
"backup": "Backups",
"folderSettings": "Default Roots",
"recipeSettings": "Recipes",
"extraFolderPaths": "Extra Folder Paths",
@@ -324,6 +325,31 @@
"saveFailed": "Unable to save skip paths: {message}"
}
},
"backup": {
"autoEnabled": "Automatic backups",
"autoEnabledHelp": "Create a local snapshot once per day and keep the latest snapshots according to the retention policy.",
"retention": "Retention count",
"retentionHelp": "How many automatic snapshots to keep before older ones are pruned.",
"management": "Backup management",
"managementHelp": "Export your current user state or restore it from a backup archive.",
"locationSummary": "Current backup location",
"openFolderButton": "Open backup folder",
"openFolderSuccess": "Opened backup folder",
"openFolderFailed": "Failed to open backup folder",
"locationCopied": "Backup path copied to clipboard: {{path}}",
"locationClipboardFallback": "Backup path: {{path}}",
"exportButton": "Export backup",
"exportSuccess": "Backup exported successfully.",
"exportFailed": "Failed to export backup: {message}",
"importButton": "Import backup",
"importConfirm": "Import this backup and overwrite local user state?",
"importSuccess": "Backup imported successfully.",
"importFailed": "Failed to import backup: {message}",
"latestSnapshot": "Latest snapshot",
"latestAutoSnapshot": "Latest automatic snapshot",
"snapshotCount": "Saved snapshots",
"noneAvailable": "No snapshots yet"
},
"downloadSkipBaseModels": {
"label": "Skip downloads for base models",
"help": "When enabled, versions using the selected base models will be skipped.",

View File

@@ -263,6 +263,7 @@
"videoSettings": "Configuración de video",
"layoutSettings": "Configuración de diseño",
"misc": "Varios",
"backup": "Copias de seguridad",
"folderSettings": "Raíces predeterminadas",
"recipeSettings": "Recetas",
"extraFolderPaths": "Rutas de carpetas adicionales",
@@ -324,6 +325,31 @@
"saveFailed": "No se pudieron guardar las rutas a omitir: {message}"
}
},
"backup": {
"autoEnabled": "Copias de seguridad automáticas",
"autoEnabledHelp": "Crea una instantánea local una vez al día y conserva las más recientes según la política de retención.",
"retention": "Cantidad de retención",
"retentionHelp": "Cuántas instantáneas automáticas conservar antes de eliminar las antiguas.",
"management": "Gestión de copias",
"managementHelp": "Exporta tu estado de usuario actual o restáuralo desde un archivo de copia de seguridad.",
"locationSummary": "Ubicación actual de la copia",
"openFolderButton": "Abrir carpeta de copias",
"openFolderSuccess": "Carpeta de copias abierta",
"openFolderFailed": "No se pudo abrir la carpeta de copias",
"locationCopied": "Ruta de la copia copiada al portapapeles: {{path}}",
"locationClipboardFallback": "Ruta de la copia: {{path}}",
"exportButton": "Exportar copia",
"exportSuccess": "Copia exportada correctamente.",
"exportFailed": "No se pudo exportar la copia: {message}",
"importButton": "Importar copia",
"importConfirm": "¿Importar esta copia y sobrescribir el estado local del usuario?",
"importSuccess": "Copia importada correctamente.",
"importFailed": "No se pudo importar la copia: {message}",
"latestSnapshot": "Última instantánea",
"latestAutoSnapshot": "Última instantánea automática",
"snapshotCount": "Instantáneas guardadas",
"noneAvailable": "Aún no hay instantáneas"
},
"downloadSkipBaseModels": {
"label": "Omitir descargas para modelos base",
"help": "Se aplica a todos los flujos de descarga. Aquí solo se pueden seleccionar modelos base compatibles.",

View File

@@ -263,6 +263,7 @@
"videoSettings": "Paramètres vidéo",
"layoutSettings": "Paramètres d'affichage",
"misc": "Divers",
"backup": "Sauvegardes",
"folderSettings": "Racines par défaut",
"recipeSettings": "Recipes",
"extraFolderPaths": "Chemins de dossiers supplémentaires",
@@ -324,6 +325,31 @@
"saveFailed": "Impossible d'enregistrer les chemins à ignorer : {message}"
}
},
"backup": {
"autoEnabled": "Sauvegardes automatiques",
"autoEnabledHelp": "Crée un instantané local une fois par jour et conserve les plus récents selon la politique de rétention.",
"retention": "Nombre de rétention",
"retentionHelp": "Combien d'instantanés automatiques conserver avant de supprimer les plus anciens.",
"management": "Gestion des sauvegardes",
"managementHelp": "Exporte l'état actuel de l'utilisateur ou restaure-le depuis une archive de sauvegarde.",
"locationSummary": "Emplacement actuel des sauvegardes",
"openFolderButton": "Ouvrir le dossier de sauvegarde",
"openFolderSuccess": "Dossier de sauvegarde ouvert",
"openFolderFailed": "Impossible d'ouvrir le dossier de sauvegarde",
"locationCopied": "Chemin de sauvegarde copié dans le presse-papiers : {{path}}",
"locationClipboardFallback": "Chemin de sauvegarde : {{path}}",
"exportButton": "Exporter la sauvegarde",
"exportSuccess": "Sauvegarde exportée avec succès.",
"exportFailed": "Échec de l'export de la sauvegarde : {message}",
"importButton": "Importer la sauvegarde",
"importConfirm": "Importer cette sauvegarde et écraser l'état local de l'utilisateur ?",
"importSuccess": "Sauvegarde importée avec succès.",
"importFailed": "Échec de l'import de la sauvegarde : {message}",
"latestSnapshot": "Dernier instantané",
"latestAutoSnapshot": "Dernier instantané automatique",
"snapshotCount": "Instantanés enregistrés",
"noneAvailable": "Aucun instantané pour le moment"
},
"downloadSkipBaseModels": {
"label": "Ignorer les téléchargements pour certains modèles de base",
"help": "Sapplique à tous les flux de téléchargement. Seuls les modèles de base pris en charge peuvent être sélectionnés ici.",

View File

@@ -263,6 +263,7 @@
"videoSettings": "הגדרות וידאו",
"layoutSettings": "הגדרות פריסה",
"misc": "שונות",
"backup": "גיבויים",
"folderSettings": "תיקיות ברירת מחדל",
"recipeSettings": "מתכונים",
"extraFolderPaths": "נתיבי תיקיות נוספים",
@@ -324,6 +325,31 @@
"saveFailed": "לא ניתן לשמור נתיבי דילוג: {message}"
}
},
"backup": {
"autoEnabled": "גיבויים אוטומטיים",
"autoEnabledHelp": "יוצר צילום מצב מקומי פעם ביום ושומר את הצילומים האחרונים לפי מדיניות השמירה.",
"retention": "כמות שמירה",
"retentionHelp": "כמה צילומי מצב אוטומטיים לשמור לפני שמסירים ישנים.",
"management": "ניהול גיבויים",
"managementHelp": "ייצא את מצב המשתמש הנוכחי או שחזר אותו מארכיון גיבוי.",
"locationSummary": "מיקום הגיבוי הנוכחי",
"openFolderButton": "פתח את תיקיית הגיבויים",
"openFolderSuccess": "תיקיית הגיבויים נפתחה",
"openFolderFailed": "לא ניתן היה לפתוח את תיקיית הגיבויים",
"locationCopied": "נתיב הגיבוי הועתק ללוח: {{path}}",
"locationClipboardFallback": "נתיב הגיבוי: {{path}}",
"exportButton": "ייצא גיבוי",
"exportSuccess": "הגיבוי יוצא בהצלחה.",
"exportFailed": "נכשל ייצוא הגיבוי: {message}",
"importButton": "ייבא גיבוי",
"importConfirm": "לייבא את הגיבוי הזה ולדרוס את מצב המשתמש המקומי?",
"importSuccess": "הגיבוי יובא בהצלחה.",
"importFailed": "נכשל ייבוא הגיבוי: {message}",
"latestSnapshot": "צילום המצב האחרון",
"latestAutoSnapshot": "צילום המצב האוטומטי האחרון",
"snapshotCount": "צילומי מצב שמורים",
"noneAvailable": "עדיין אין צילומי מצב"
},
"downloadSkipBaseModels": {
"label": "דלג על הורדות עבור מודלי בסיס",
"help": "חל על כל תהליכי ההורדה. ניתן לבחור כאן רק מודלי בסיס נתמכים.",

View File

@@ -263,6 +263,7 @@
"videoSettings": "動画設定",
"layoutSettings": "レイアウト設定",
"misc": "その他",
"backup": "バックアップ",
"folderSettings": "デフォルトルート",
"recipeSettings": "レシピ",
"extraFolderPaths": "追加フォルダーパス",
@@ -324,6 +325,31 @@
"saveFailed": "スキップパスの保存に失敗しました:{message}"
}
},
"backup": {
"autoEnabled": "自動バックアップ",
"autoEnabledHelp": "1日1回ローカルのスナップショットを作成し、保持ポリシーに従って最新のものを残します。",
"retention": "保持数",
"retentionHelp": "古いものを削除する前に、何件の自動スナップショットを保持するかを指定します。",
"management": "バックアップ管理",
"managementHelp": "現在のユーザー状態をエクスポートするか、バックアップアーカイブから復元します。",
"locationSummary": "現在のバックアップ場所",
"openFolderButton": "バックアップフォルダを開く",
"openFolderSuccess": "バックアップフォルダを開きました",
"openFolderFailed": "バックアップフォルダを開けませんでした",
"locationCopied": "バックアップパスをクリップボードにコピーしました: {{path}}",
"locationClipboardFallback": "バックアップパス: {{path}}",
"exportButton": "バックアップをエクスポート",
"exportSuccess": "バックアップを正常にエクスポートしました。",
"exportFailed": "バックアップのエクスポートに失敗しました: {message}",
"importButton": "バックアップをインポート",
"importConfirm": "このバックアップをインポートして、ローカルのユーザー状態を上書きしますか?",
"importSuccess": "バックアップを正常にインポートしました。",
"importFailed": "バックアップのインポートに失敗しました: {message}",
"latestSnapshot": "最新のスナップショット",
"latestAutoSnapshot": "最新の自動スナップショット",
"snapshotCount": "保存済みスナップショット",
"noneAvailable": "まだスナップショットはありません"
},
"downloadSkipBaseModels": {
"label": "ベースモデルのダウンロードをスキップ",
"help": "すべてのダウンロードフローに適用されます。ここでは対応しているベースモデルのみ選択できます。",

View File

@@ -263,6 +263,7 @@
"videoSettings": "비디오 설정",
"layoutSettings": "레이아웃 설정",
"misc": "기타",
"backup": "백업",
"folderSettings": "기본 루트",
"recipeSettings": "레시피",
"extraFolderPaths": "추가 폴다 경로",
@@ -324,6 +325,31 @@
"saveFailed": "건너뛰기 경로를 저장할 수 없습니다: {message}"
}
},
"backup": {
"autoEnabled": "자동 백업",
"autoEnabledHelp": "하루에 한 번 로컬 스냅샷을 만들고 보존 정책에 따라 최신 스냅샷을 유지합니다.",
"retention": "보존 개수",
"retentionHelp": "오래된 자동 스냅샷을 삭제하기 전에 몇 개를 유지할지 지정합니다.",
"management": "백업 관리",
"managementHelp": "현재 사용자 상태를 내보내거나 백업 아카이브에서 복원합니다.",
"locationSummary": "현재 백업 위치",
"openFolderButton": "백업 폴더 열기",
"openFolderSuccess": "백업 폴더를 열었습니다",
"openFolderFailed": "백업 폴더를 열지 못했습니다",
"locationCopied": "백업 경로를 클립보드에 복사했습니다: {{path}}",
"locationClipboardFallback": "백업 경로: {{path}}",
"exportButton": "백업 내보내기",
"exportSuccess": "백업을 성공적으로 내보냈습니다.",
"exportFailed": "백업 내보내기에 실패했습니다: {message}",
"importButton": "백업 가져오기",
"importConfirm": "이 백업을 가져와서 로컬 사용자 상태를 덮어쓰시겠습니까?",
"importSuccess": "백업을 성공적으로 가져왔습니다.",
"importFailed": "백업 가져오기에 실패했습니다: {message}",
"latestSnapshot": "최근 스냅샷",
"latestAutoSnapshot": "최근 자동 스냅샷",
"snapshotCount": "저장된 스냅샷",
"noneAvailable": "아직 스냅샷이 없습니다"
},
"downloadSkipBaseModels": {
"label": "기본 모델 다운로드 건너뛰기",
"help": "모든 다운로드 흐름에 적용됩니다. 여기서는 지원되는 기본 모델만 선택할 수 있습니다.",

View File

@@ -263,6 +263,7 @@
"videoSettings": "Настройки видео",
"layoutSettings": "Настройки макета",
"misc": "Разное",
"backup": "Резервные копии",
"folderSettings": "Корневые папки",
"recipeSettings": "Рецепты",
"extraFolderPaths": "Дополнительные пути к папкам",
@@ -324,6 +325,31 @@
"saveFailed": "Не удалось сохранить пути для пропуска: {message}"
}
},
"backup": {
"autoEnabled": "Автоматические резервные копии",
"autoEnabledHelp": "Создаёт локальный снимок раз в день и хранит последние снимки согласно политике хранения.",
"retention": "Количество хранения",
"retentionHelp": "Сколько автоматических снимков сохранять перед удалением старых.",
"management": "Управление резервными копиями",
"managementHelp": "Экспортируйте текущее состояние пользователя или восстановите его из архива резервной копии.",
"locationSummary": "Текущее расположение резервных копий",
"openFolderButton": "Открыть папку резервных копий",
"openFolderSuccess": "Папка резервных копий открыта",
"openFolderFailed": "Не удалось открыть папку резервных копий",
"locationCopied": "Путь к резервной копии скопирован в буфер обмена: {{path}}",
"locationClipboardFallback": "Путь к резервной копии: {{path}}",
"exportButton": "Экспортировать резервную копию",
"exportSuccess": "Резервная копия успешно экспортирована.",
"exportFailed": "Не удалось экспортировать резервную копию: {message}",
"importButton": "Импортировать резервную копию",
"importConfirm": "Импортировать эту резервную копию и перезаписать локальное состояние пользователя?",
"importSuccess": "Резервная копия успешно импортирована.",
"importFailed": "Не удалось импортировать резервную копию: {message}",
"latestSnapshot": "Последний снимок",
"latestAutoSnapshot": "Последний автоматический снимок",
"snapshotCount": "Сохранённые снимки",
"noneAvailable": "Снимков пока нет"
},
"downloadSkipBaseModels": {
"label": "Пропускать загрузки для базовых моделей",
"help": "Применяется ко всем сценариям загрузки. Здесь можно выбрать только поддерживаемые базовые модели.",

View File

@@ -263,6 +263,7 @@
"videoSettings": "视频设置",
"layoutSettings": "布局设置",
"misc": "其他",
"backup": "备份",
"folderSettings": "默认根目录",
"recipeSettings": "配方",
"extraFolderPaths": "额外文件夹路径",
@@ -324,6 +325,31 @@
"saveFailed": "无法保存跳过路径:{message}"
}
},
"backup": {
"autoEnabled": "自动备份",
"autoEnabledHelp": "每天创建一次本地快照,并按保留策略保留最新快照。",
"retention": "保留数量",
"retentionHelp": "在删除旧快照之前,要保留多少个自动快照。",
"management": "备份管理",
"managementHelp": "导出当前用户状态,或从备份归档中恢复。",
"locationSummary": "当前备份位置",
"openFolderButton": "打开备份文件夹",
"openFolderSuccess": "已打开备份文件夹",
"openFolderFailed": "无法打开备份文件夹",
"locationCopied": "备份路径已复制到剪贴板:{{path}}",
"locationClipboardFallback": "备份路径:{{path}}",
"exportButton": "导出备份",
"exportSuccess": "备份导出成功。",
"exportFailed": "备份导出失败:{message}",
"importButton": "导入备份",
"importConfirm": "导入此备份并覆盖本地用户状态吗?",
"importSuccess": "备份导入成功。",
"importFailed": "备份导入失败:{message}",
"latestSnapshot": "最近快照",
"latestAutoSnapshot": "最近自动快照",
"snapshotCount": "已保存快照",
"noneAvailable": "还没有快照"
},
"downloadSkipBaseModels": {
"label": "跳过这些基础模型的下载",
"help": "适用于所有下载流程。这里只能选择受支持的基础模型。",

View File

@@ -263,6 +263,7 @@
"videoSettings": "影片設定",
"layoutSettings": "版面設定",
"misc": "其他",
"backup": "備份",
"folderSettings": "預設根目錄",
"recipeSettings": "配方",
"extraFolderPaths": "額外資料夾路徑",
@@ -324,6 +325,31 @@
"saveFailed": "無法儲存跳過路徑:{message}"
}
},
"backup": {
"autoEnabled": "自動備份",
"autoEnabledHelp": "每天建立一次本地快照,並依保留政策保留最新快照。",
"retention": "保留數量",
"retentionHelp": "在刪除舊快照之前,要保留多少自動快照。",
"management": "備份管理",
"managementHelp": "匯出目前的使用者狀態,或從備份封存中還原。",
"locationSummary": "目前備份位置",
"openFolderButton": "開啟備份資料夾",
"openFolderSuccess": "已開啟備份資料夾",
"openFolderFailed": "無法開啟備份資料夾",
"locationCopied": "備份路徑已複製到剪貼簿:{{path}}",
"locationClipboardFallback": "備份路徑:{{path}}",
"exportButton": "匯出備份",
"exportSuccess": "備份匯出成功。",
"exportFailed": "備份匯出失敗:{message}",
"importButton": "匯入備份",
"importConfirm": "要匯入此備份並覆寫本機使用者狀態嗎?",
"importSuccess": "備份匯入成功。",
"importFailed": "備份匯入失敗:{message}",
"latestSnapshot": "最近快照",
"latestAutoSnapshot": "最近自動快照",
"snapshotCount": "已儲存快照",
"noneAvailable": "目前還沒有快照"
},
"downloadSkipBaseModels": {
"label": "跳過這些基礎模型的下載",
"help": "適用於所有下載流程。這裡只能選擇受支援的基礎模型。",

View File

@@ -222,6 +222,7 @@ class LoraManager:
# Register DownloadManager with ServiceRegistry
await ServiceRegistry.get_download_manager()
await ServiceRegistry.get_backup_service()
from .services.metadata_service import initialize_metadata_providers

View File

@@ -9,11 +9,14 @@ objects that can be composed by the route controller.
from __future__ import annotations
import asyncio
import contextlib
import json
import logging
import os
import subprocess
import sys
import tempfile
import zipfile
from dataclasses import dataclass
from typing import Awaitable, Callable, Dict, Mapping, Protocol
@@ -130,6 +133,22 @@ class MetadataArchiveManagerProtocol(Protocol):
...
class BackupServiceProtocol(Protocol):
async def create_snapshot(
self, *, snapshot_type: str = "manual", persist: bool = False
) -> dict: # pragma: no cover - protocol
...
async def restore_snapshot(self, archive_path: str) -> dict: # pragma: no cover - protocol
...
def get_status(self) -> dict: # pragma: no cover - protocol
...
def get_available_snapshots(self) -> list[dict]: # pragma: no cover - protocol
...
class NodeRegistry:
"""Thread-safe registry for tracking LoRA nodes in active workflows."""
@@ -746,12 +765,17 @@ class ModelExampleFilesHandler:
return web.json_response({"success": False, "error": str(exc)}, status=500)
async def _noop_backup_service() -> None:
return None
@dataclass
class ServiceRegistryAdapter:
get_lora_scanner: Callable[[], Awaitable]
get_checkpoint_scanner: Callable[[], Awaitable]
get_embedding_scanner: Callable[[], Awaitable]
get_downloaded_version_history_service: Callable[[], Awaitable]
get_backup_service: Callable[[], Awaitable] = _noop_backup_service
class ModelLibraryHandler:
@@ -1418,10 +1442,150 @@ class MetadataArchiveHandler:
return web.json_response({"success": False, "error": str(exc)}, status=500)
class BackupHandler:
"""Handler for user-state backup export/import."""
def __init__(
self,
*,
backup_service_factory: Callable[[], Awaitable[BackupServiceProtocol]] = ServiceRegistry.get_backup_service,
) -> None:
self._backup_service_factory = backup_service_factory
async def get_backup_status(self, request: web.Request) -> web.Response:
try:
service = await self._backup_service_factory()
return web.json_response(
{
"success": True,
"status": service.get_status(),
"snapshots": service.get_available_snapshots(),
}
)
except Exception as exc: # pragma: no cover - defensive logging
logger.error("Error getting backup status: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": str(exc)}, status=500)
async def export_backup(self, request: web.Request) -> web.Response:
try:
service = await self._backup_service_factory()
result = await service.create_snapshot(snapshot_type="manual", persist=False)
headers = {
"Content-Type": "application/zip",
"Content-Disposition": f'attachment; filename="{result["archive_name"]}"',
}
return web.Response(body=result["archive_bytes"], headers=headers)
except Exception as exc: # pragma: no cover - defensive logging
logger.error("Error exporting backup: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": str(exc)}, status=500)
async def import_backup(self, request: web.Request) -> web.Response:
temp_path: str | None = None
try:
fd, temp_path = tempfile.mkstemp(
suffix=".zip", prefix="lora-manager-backup-"
)
os.close(fd)
if request.content_type.startswith("multipart/"):
reader = await request.multipart()
field = await reader.next()
uploaded = False
while field is not None:
if getattr(field, "filename", None):
with open(temp_path, "wb") as handle:
while True:
chunk = await field.read_chunk()
if not chunk:
break
handle.write(chunk)
uploaded = True
break
field = await reader.next()
if not uploaded:
return web.json_response(
{"success": False, "error": "Missing backup archive"},
status=400,
)
else:
body = await request.read()
if not body:
return web.json_response(
{"success": False, "error": "Missing backup archive"},
status=400,
)
with open(temp_path, "wb") as handle:
handle.write(body)
if not temp_path or not os.path.exists(temp_path) or os.path.getsize(temp_path) == 0:
return web.json_response(
{"success": False, "error": "Missing backup archive"},
status=400,
)
service = await self._backup_service_factory()
result = await service.restore_snapshot(temp_path)
return web.json_response({"success": True, **result})
except (ValueError, zipfile.BadZipFile) as exc:
logger.error("Error importing backup: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": str(exc)}, status=400)
except Exception as exc: # pragma: no cover - defensive logging
logger.error("Error importing backup: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": str(exc)}, status=500)
finally:
if temp_path and os.path.exists(temp_path):
with contextlib.suppress(OSError):
os.remove(temp_path)
class FileSystemHandler:
def __init__(self, settings_service=None) -> None:
self._settings = settings_service or get_settings_manager()
async def _open_path(self, path: str) -> web.Response:
path = os.path.abspath(path)
if not os.path.isdir(path):
return web.json_response(
{"success": False, "error": "Folder does not exist"},
status=404,
)
if os.name == "nt":
subprocess.Popen(["explorer", path])
elif os.name == "posix":
if _is_docker():
return web.json_response(
{
"success": True,
"message": "Running in Docker: Path available for copying",
"path": path,
"mode": "clipboard",
}
)
if _is_wsl():
windows_path = _wsl_to_windows_path(path)
if windows_path:
subprocess.Popen(["explorer.exe", windows_path])
else:
logger.error(
"Failed to convert WSL path to Windows path: %s", path
)
return web.json_response(
{
"success": False,
"error": "Failed to open folder location: path conversion error",
},
status=500,
)
elif sys.platform == "darwin":
subprocess.Popen(["open", path])
else:
subprocess.Popen(["xdg-open", path])
return web.json_response(
{"success": True, "message": f"Opened folder: {path}", "path": path}
)
async def open_file_location(self, request: web.Request) -> web.Response:
try:
data = await request.json()
@@ -1536,6 +1700,20 @@ class FileSystemHandler:
logger.error("Failed to open settings location: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": str(exc)}, status=500)
async def open_backup_location(self, request: web.Request) -> web.Response:
try:
settings_file = getattr(self._settings, "settings_file", None)
if not settings_file:
return web.json_response(
{"success": False, "error": "Settings file not found"}, status=404
)
backup_dir = os.path.join(os.path.dirname(os.path.abspath(settings_file)), "backups")
return await self._open_path(backup_dir)
except Exception as exc: # pragma: no cover - defensive logging
logger.error("Failed to open backup location: %s", exc, exc_info=True)
return web.json_response({"success": False, "error": str(exc)}, status=500)
class CustomWordsHandler:
"""Handler for autocomplete via TagFTSIndex."""
@@ -1840,6 +2018,7 @@ class MiscHandlerSet:
node_registry: NodeRegistryHandler,
model_library: ModelLibraryHandler,
metadata_archive: MetadataArchiveHandler,
backup: BackupHandler,
filesystem: FileSystemHandler,
custom_words: CustomWordsHandler,
supporters: SupportersHandler,
@@ -1855,6 +2034,7 @@ class MiscHandlerSet:
self.node_registry = node_registry
self.model_library = model_library
self.metadata_archive = metadata_archive
self.backup = backup
self.filesystem = filesystem
self.custom_words = custom_words
self.supporters = supporters
@@ -1886,9 +2066,13 @@ class MiscHandlerSet:
"download_metadata_archive": self.metadata_archive.download_metadata_archive,
"remove_metadata_archive": self.metadata_archive.remove_metadata_archive,
"get_metadata_archive_status": self.metadata_archive.get_metadata_archive_status,
"get_backup_status": self.backup.get_backup_status,
"export_backup": self.backup.export_backup,
"import_backup": self.backup.import_backup,
"get_model_versions_status": self.model_library.get_model_versions_status,
"open_file_location": self.filesystem.open_file_location,
"open_settings_location": self.filesystem.open_settings_location,
"open_backup_location": self.filesystem.open_backup_location,
"search_custom_words": self.custom_words.search_custom_words,
"get_supporters": self.supporters.get_supporters,
"get_example_workflows": self.example_workflows.get_example_workflows,
@@ -1907,4 +2091,5 @@ def build_service_registry_adapter() -> ServiceRegistryAdapter:
get_checkpoint_scanner=ServiceRegistry.get_checkpoint_scanner,
get_embedding_scanner=ServiceRegistry.get_embedding_scanner,
get_downloaded_version_history_service=ServiceRegistry.get_downloaded_version_history_service,
get_backup_service=ServiceRegistry.get_backup_service,
)

View File

@@ -62,6 +62,10 @@ MISC_ROUTE_DEFINITIONS: tuple[RouteDefinition, ...] = (
RouteDefinition(
"GET", "/api/lm/metadata-archive-status", "get_metadata_archive_status"
),
RouteDefinition("GET", "/api/lm/backup/status", "get_backup_status"),
RouteDefinition("POST", "/api/lm/backup/export", "export_backup"),
RouteDefinition("POST", "/api/lm/backup/import", "import_backup"),
RouteDefinition("POST", "/api/lm/backup/open-location", "open_backup_location"),
RouteDefinition(
"GET", "/api/lm/model-versions-status", "get_model_versions_status"
),

View File

@@ -23,6 +23,7 @@ from .handlers.misc_handlers import (
FileSystemHandler,
HealthCheckHandler,
LoraCodeHandler,
BackupHandler,
MetadataArchiveHandler,
MiscHandlerSet,
ModelExampleFilesHandler,
@@ -116,6 +117,7 @@ class MiscRoutes:
settings_service=self._settings,
metadata_provider_updater=self._metadata_provider_updater,
)
backup = BackupHandler()
filesystem = FileSystemHandler(settings_service=self._settings)
node_registry_handler = NodeRegistryHandler(
node_registry=self._node_registry,
@@ -141,6 +143,7 @@ class MiscRoutes:
node_registry=node_registry_handler,
model_library=model_library,
metadata_archive=metadata_archive,
backup=backup,
filesystem=filesystem,
custom_words=custom_words,
supporters=supporters,

View File

@@ -0,0 +1,411 @@
from __future__ import annotations
import asyncio
import contextlib
import hashlib
import json
import logging
import os
import shutil
import tempfile
import time
import zipfile
from dataclasses import dataclass
from datetime import datetime, timezone
from pathlib import Path
from typing import Any, Iterable, Optional
from ..utils.cache_paths import CacheType, get_cache_base_dir, get_cache_file_path
from ..utils.settings_paths import get_settings_dir
from .settings_manager import get_settings_manager
logger = logging.getLogger(__name__)
BACKUP_MANIFEST_VERSION = 1
DEFAULT_BACKUP_RETENTION_COUNT = 5
DEFAULT_BACKUP_INTERVAL_SECONDS = 24 * 60 * 60
@dataclass(frozen=True)
class BackupEntry:
kind: str
archive_path: str
target_path: str
sha256: str
size: int
mtime: float
class BackupService:
"""Create and restore user-state backup archives."""
_instance: "BackupService | None" = None
_instance_lock = asyncio.Lock()
def __init__(self, *, settings_manager=None, backup_dir: str | None = None) -> None:
self._settings = settings_manager or get_settings_manager()
self._backup_dir = Path(backup_dir or self._resolve_backup_dir())
self._backup_dir.mkdir(parents=True, exist_ok=True)
self._lock = asyncio.Lock()
self._auto_task: asyncio.Task[None] | None = None
@classmethod
async def get_instance(cls) -> "BackupService":
async with cls._instance_lock:
if cls._instance is None:
cls._instance = cls()
cls._instance._ensure_auto_snapshot_task()
return cls._instance
@staticmethod
def _resolve_backup_dir() -> str:
return os.path.join(get_settings_dir(create=True), "backups")
def get_backup_dir(self) -> str:
return str(self._backup_dir)
def _ensure_auto_snapshot_task(self) -> None:
if self._auto_task is not None and not self._auto_task.done():
return
try:
loop = asyncio.get_running_loop()
except RuntimeError:
return
self._auto_task = loop.create_task(self._auto_backup_loop())
def _get_setting_bool(self, key: str, default: bool) -> bool:
try:
return bool(self._settings.get(key, default))
except Exception:
return default
def _get_setting_int(self, key: str, default: int) -> int:
try:
value = self._settings.get(key, default)
return max(1, int(value))
except Exception:
return default
def _settings_file_path(self) -> str:
settings_file = getattr(self._settings, "settings_file", None)
if settings_file:
return str(settings_file)
return os.path.join(get_settings_dir(create=True), "settings.json")
def _download_history_path(self) -> str:
base_dir = get_cache_base_dir(create=True)
history_dir = os.path.join(base_dir, "download_history")
os.makedirs(history_dir, exist_ok=True)
return os.path.join(history_dir, "downloaded_versions.sqlite")
def _model_update_dir(self) -> str:
return str(Path(get_cache_file_path(CacheType.MODEL_UPDATE, create_dir=True)).parent)
def _model_update_targets(self) -> list[tuple[str, str, str]]:
"""Return (kind, archive_path, target_path) tuples for backup."""
targets: list[tuple[str, str, str]] = []
settings_path = self._settings_file_path()
targets.append(("settings", "settings/settings.json", settings_path))
history_path = self._download_history_path()
targets.append(
(
"download_history",
"cache/download_history/downloaded_versions.sqlite",
history_path,
)
)
symlink_path = get_cache_file_path(CacheType.SYMLINK, create_dir=True)
targets.append(
(
"symlink_map",
"cache/symlink/symlink_map.json",
symlink_path,
)
)
model_update_dir = Path(self._model_update_dir())
if model_update_dir.exists():
for sqlite_file in sorted(model_update_dir.glob("*.sqlite")):
targets.append(
(
"model_update",
f"cache/model_update/{sqlite_file.name}",
str(sqlite_file),
)
)
return targets
@staticmethod
def _hash_file(path: str) -> tuple[str, int, float]:
digest = hashlib.sha256()
total = 0
with open(path, "rb") as handle:
for chunk in iter(lambda: handle.read(1024 * 1024), b""):
total += len(chunk)
digest.update(chunk)
mtime = os.path.getmtime(path)
return digest.hexdigest(), total, mtime
def _build_manifest(self, entries: Iterable[BackupEntry], *, snapshot_type: str) -> dict[str, Any]:
created_at = datetime.now(timezone.utc).isoformat()
active_library = None
try:
active_library = self._settings.get_active_library_name()
except Exception:
active_library = None
return {
"manifest_version": BACKUP_MANIFEST_VERSION,
"created_at": created_at,
"snapshot_type": snapshot_type,
"active_library": active_library,
"files": [
{
"kind": entry.kind,
"archive_path": entry.archive_path,
"target_path": entry.target_path,
"sha256": entry.sha256,
"size": entry.size,
"mtime": entry.mtime,
}
for entry in entries
],
}
def _write_archive(self, archive_path: str, entries: list[BackupEntry], manifest: dict[str, Any]) -> None:
with zipfile.ZipFile(
archive_path,
mode="w",
compression=zipfile.ZIP_DEFLATED,
compresslevel=6,
) as zf:
zf.writestr(
"manifest.json",
json.dumps(manifest, indent=2, ensure_ascii=False).encode("utf-8"),
)
for entry in entries:
zf.write(entry.target_path, arcname=entry.archive_path)
async def create_snapshot(self, *, snapshot_type: str = "manual", persist: bool = False) -> dict[str, Any]:
"""Create a backup archive.
If ``persist`` is true, the archive is stored in the backup directory
and retained according to the configured retention policy.
"""
async with self._lock:
raw_targets = self._model_update_targets()
entries: list[BackupEntry] = []
for kind, archive_path, target_path in raw_targets:
if not os.path.exists(target_path):
continue
sha256, size, mtime = self._hash_file(target_path)
entries.append(
BackupEntry(
kind=kind,
archive_path=archive_path,
target_path=target_path,
sha256=sha256,
size=size,
mtime=mtime,
)
)
if not entries:
raise FileNotFoundError("No backupable files were found")
manifest = self._build_manifest(entries, snapshot_type=snapshot_type)
archive_name = self._build_archive_name(snapshot_type=snapshot_type)
fd, temp_path = tempfile.mkstemp(suffix=".zip", dir=str(self._backup_dir))
os.close(fd)
try:
self._write_archive(temp_path, entries, manifest)
if persist:
final_path = self._backup_dir / archive_name
os.replace(temp_path, final_path)
self._prune_snapshots()
return {
"archive_path": str(final_path),
"archive_name": final_path.name,
"manifest": manifest,
}
with open(temp_path, "rb") as handle:
data = handle.read()
return {
"archive_name": archive_name,
"archive_bytes": data,
"manifest": manifest,
}
finally:
with contextlib.suppress(FileNotFoundError):
os.remove(temp_path)
def _build_archive_name(self, *, snapshot_type: str) -> str:
timestamp = datetime.now(timezone.utc).strftime("%Y%m%dT%H%M%SZ")
return f"lora-manager-backup-{timestamp}-{snapshot_type}.zip"
def _prune_snapshots(self) -> None:
retention = self._get_setting_int(
"backup_retention_count", DEFAULT_BACKUP_RETENTION_COUNT
)
archives = sorted(
self._backup_dir.glob("lora-manager-backup-*-auto.zip"),
key=lambda path: path.stat().st_mtime,
reverse=True,
)
for path in archives[retention:]:
with contextlib.suppress(OSError):
path.unlink()
async def restore_snapshot(self, archive_path: str) -> dict[str, Any]:
"""Restore backup contents from a ZIP archive."""
async with self._lock:
try:
zf = zipfile.ZipFile(archive_path, mode="r")
except zipfile.BadZipFile as exc:
raise ValueError("Backup archive is not a valid ZIP file") from exc
with zf:
try:
manifest = json.loads(zf.read("manifest.json").decode("utf-8"))
except KeyError as exc:
raise ValueError("Backup archive is missing manifest.json") from exc
if not isinstance(manifest, dict):
raise ValueError("Backup manifest is invalid")
if manifest.get("manifest_version") != BACKUP_MANIFEST_VERSION:
raise ValueError("Backup manifest version is not supported")
files = manifest.get("files", [])
if not isinstance(files, list):
raise ValueError("Backup manifest file list is invalid")
extracted_paths: list[tuple[str, str]] = []
temp_dir = Path(tempfile.mkdtemp(prefix="lora-manager-restore-"))
try:
for item in files:
if not isinstance(item, dict):
continue
archive_member = item.get("archive_path")
if not isinstance(archive_member, str) or not archive_member:
continue
archive_member_path = Path(archive_member)
if archive_member_path.is_absolute() or ".." in archive_member_path.parts:
raise ValueError(f"Invalid archive member path: {archive_member}")
kind = item.get("kind")
target_path = self._resolve_restore_target(kind, archive_member)
if target_path is None:
continue
extracted_path = temp_dir / archive_member_path
extracted_path.parent.mkdir(parents=True, exist_ok=True)
with zf.open(archive_member) as source, open(
extracted_path, "wb"
) as destination:
shutil.copyfileobj(source, destination)
expected_hash = item.get("sha256")
if isinstance(expected_hash, str) and expected_hash:
actual_hash, _, _ = self._hash_file(str(extracted_path))
if actual_hash != expected_hash:
raise ValueError(
f"Checksum mismatch for {archive_member}"
)
extracted_paths.append((str(extracted_path), target_path))
for extracted_path, target_path in extracted_paths:
os.makedirs(os.path.dirname(target_path), exist_ok=True)
os.replace(extracted_path, target_path)
finally:
shutil.rmtree(temp_dir, ignore_errors=True)
return {
"success": True,
"restored_files": len(extracted_paths),
"snapshot_type": manifest.get("snapshot_type"),
}
def _resolve_restore_target(self, kind: Any, archive_member: str) -> str | None:
if kind == "settings":
return self._settings_file_path()
if kind == "download_history":
return self._download_history_path()
if kind == "symlink_map":
return get_cache_file_path(CacheType.SYMLINK, create_dir=True)
if kind == "model_update":
filename = os.path.basename(archive_member)
return str(Path(get_cache_file_path(CacheType.MODEL_UPDATE, create_dir=True)).parent / filename)
return None
async def create_auto_snapshot_if_due(self) -> Optional[dict[str, Any]]:
if not self._get_setting_bool("backup_auto_enabled", True):
return None
latest = self.get_latest_auto_snapshot()
now = time.time()
if latest and now - latest["mtime"] < DEFAULT_BACKUP_INTERVAL_SECONDS:
return None
return await self.create_snapshot(snapshot_type="auto", persist=True)
async def _auto_backup_loop(self) -> None:
while True:
try:
await self.create_auto_snapshot_if_due()
await asyncio.sleep(DEFAULT_BACKUP_INTERVAL_SECONDS)
except asyncio.CancelledError:
raise
except Exception as exc: # pragma: no cover - defensive guard
logger.warning("Automatic backup snapshot failed: %s", exc, exc_info=True)
await asyncio.sleep(60)
def get_available_snapshots(self) -> list[dict[str, Any]]:
snapshots: list[dict[str, Any]] = []
for path in sorted(self._backup_dir.glob("lora-manager-backup-*.zip")):
try:
stat = path.stat()
except OSError:
continue
snapshots.append(
{
"name": path.name,
"path": str(path),
"size": stat.st_size,
"mtime": stat.st_mtime,
"is_auto": path.name.endswith("-auto.zip"),
}
)
snapshots.sort(key=lambda item: item["mtime"], reverse=True)
return snapshots
def get_latest_auto_snapshot(self) -> Optional[dict[str, Any]]:
autos = [snapshot for snapshot in self.get_available_snapshots() if snapshot["is_auto"]]
if not autos:
return None
return autos[0]
def get_status(self) -> dict[str, Any]:
snapshots = self.get_available_snapshots()
return {
"backupDir": self.get_backup_dir(),
"enabled": self._get_setting_bool("backup_auto_enabled", True),
"retentionCount": self._get_setting_int(
"backup_retention_count", DEFAULT_BACKUP_RETENTION_COUNT
),
"snapshotCount": len(snapshots),
"latestSnapshot": snapshots[0] if snapshots else None,
"latestAutoSnapshot": self.get_latest_auto_snapshot(),
}

View File

@@ -12,6 +12,7 @@ from typing import Any, Dict, Iterable, List, Mapping, Optional, Sequence
from .errors import RateLimitError, ResourceNotFoundError
from .settings_manager import get_settings_manager
from ..utils.cache_paths import CacheType, resolve_cache_path_with_migration
from ..utils.civitai_utils import rewrite_preview_url
from ..utils.preview_selection import resolve_mature_threshold, select_preview_media
@@ -234,12 +235,52 @@ class ModelUpdateService:
ON model_update_versions(model_id);
"""
def __init__(self, db_path: str, *, ttl_seconds: int = 24 * 60 * 60, settings_manager=None) -> None:
self._db_path = db_path
def __init__(
self,
db_path: str | None = None,
*,
ttl_seconds: int = 24 * 60 * 60,
settings_manager=None,
) -> None:
self._settings = settings_manager or get_settings_manager()
self._library_name = self._get_active_library_name()
self._db_path = db_path or self._resolve_default_path(self._library_name)
self._ttl_seconds = ttl_seconds
self._lock = asyncio.Lock()
self._schema_initialized = False
self._settings = settings_manager or get_settings_manager()
self._custom_db_path = db_path is not None
self._ensure_directory()
self._initialize_schema()
def _get_active_library_name(self) -> str:
try:
value = self._settings.get_active_library_name()
except Exception:
value = None
return value or "default"
def _resolve_default_path(self, library_name: str) -> str:
env_override = os.environ.get("LORA_MANAGER_MODEL_UPDATE_DB")
return resolve_cache_path_with_migration(
CacheType.MODEL_UPDATE,
library_name=library_name,
env_override=env_override,
)
def on_library_changed(self) -> None:
"""Switch to the database for the active library."""
if self._custom_db_path:
return
library_name = self._get_active_library_name()
new_path = self._resolve_default_path(library_name)
if new_path == self._db_path:
return
self._library_name = library_name
self._db_path = new_path
self._schema_initialized = False
self._ensure_directory()
self._initialize_schema()
@@ -262,11 +303,114 @@ class ModelUpdateService:
conn.execute("PRAGMA foreign_keys = ON")
conn.executescript(self._SCHEMA)
self._apply_migrations(conn)
self._migrate_from_legacy_snapshot(conn)
self._schema_initialized = True
except Exception as exc: # pragma: no cover - defensive guard
logger.error("Failed to initialize update schema: %s", exc, exc_info=True)
raise
def _migrate_from_legacy_snapshot(self, conn: sqlite3.Connection) -> None:
"""Copy update tracking data out of the legacy model snapshot database."""
if self._custom_db_path:
return
try:
from .persistent_model_cache import get_persistent_cache
legacy_path = get_persistent_cache(self._library_name).get_database_path()
except Exception:
return
if not legacy_path or os.path.abspath(legacy_path) == os.path.abspath(self._db_path):
return
if not os.path.exists(legacy_path):
return
try:
existing_row = conn.execute(
"SELECT 1 FROM model_update_status LIMIT 1"
).fetchone()
if existing_row:
return
except Exception:
return
try:
with sqlite3.connect(legacy_path, check_same_thread=False) as legacy_conn:
legacy_conn.row_factory = sqlite3.Row
status_rows = legacy_conn.execute(
"""
SELECT model_id, model_type, last_checked_at, should_ignore_model
FROM model_update_status
"""
).fetchall()
if not status_rows:
return
version_rows = legacy_conn.execute(
"""
SELECT model_id, version_id, sort_index, name, base_model, released_at,
size_bytes, preview_url, is_in_library, should_ignore,
early_access_ends_at, is_early_access
FROM model_update_versions
ORDER BY model_id ASC, sort_index ASC, version_id ASC
"""
).fetchall()
conn.execute("BEGIN")
conn.executemany(
"""
INSERT OR REPLACE INTO model_update_status (
model_id, model_type, last_checked_at, should_ignore_model
) VALUES (?, ?, ?, ?)
""",
[
(
int(row["model_id"]),
row["model_type"],
row["last_checked_at"],
int(row["should_ignore_model"] or 0),
)
for row in status_rows
],
)
conn.executemany(
"""
INSERT OR REPLACE INTO model_update_versions (
model_id, version_id, sort_index, name, base_model, released_at,
size_bytes, preview_url, is_in_library, should_ignore,
early_access_ends_at, is_early_access
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""",
[
(
int(row["model_id"]),
int(row["version_id"]),
int(row["sort_index"] or 0),
row["name"],
row["base_model"],
row["released_at"],
row["size_bytes"],
row["preview_url"],
int(row["is_in_library"] or 0),
int(row["should_ignore"] or 0),
row["early_access_ends_at"],
int(row["is_early_access"] or 0),
)
for row in version_rows
],
)
conn.commit()
logger.info(
"Migrated model update tracking data from legacy snapshot DB for %s",
self._library_name,
)
except sqlite3.OperationalError as exc:
logger.debug("Legacy model update migration skipped: %s", exc)
except Exception as exc: # pragma: no cover - defensive guard
logger.warning("Failed to migrate model update data: %s", exc, exc_info=True)
def _apply_migrations(self, conn: sqlite3.Connection) -> None:
"""Ensure legacy databases match the current schema without dropping data."""

View File

@@ -159,10 +159,9 @@ class ServiceRegistry:
return cls._services[service_name]
from .model_update_service import ModelUpdateService
from .persistent_model_cache import get_persistent_cache
from .settings_manager import get_settings_manager
cache = get_persistent_cache()
service = ModelUpdateService(cache.get_database_path())
service = ModelUpdateService(settings_manager=get_settings_manager())
cls._services[service_name] = service
logger.debug(f"Created and registered {service_name}")
return service
@@ -189,6 +188,26 @@ class ServiceRegistry:
logger.debug(f"Created and registered {service_name}")
return service
@classmethod
async def get_backup_service(cls):
"""Get or create the backup service."""
service_name = "backup_service"
if service_name in cls._services:
return cls._services[service_name]
async with cls._get_lock(service_name):
if service_name in cls._services:
return cls._services[service_name]
from .backup_service import BackupService
service = await BackupService.get_instance()
cls._services[service_name] = service
logger.debug(f"Created and registered {service_name}")
return service
@classmethod
async def get_civarchive_client(cls):
"""Get or create CivArchive client instance"""

View File

@@ -95,6 +95,8 @@ DEFAULT_SETTINGS: Dict[str, Any] = {
"metadata_refresh_skip_paths": [],
"skip_previously_downloaded_model_versions": False,
"download_skip_base_models": [],
"backup_auto_enabled": True,
"backup_retention_count": 5,
}
@@ -1983,6 +1985,7 @@ class SettingsManager:
"checkpoint_scanner",
"embedding_scanner",
"recipe_scanner",
"model_update_service",
):
service = ServiceRegistry.get_service_sync(service_name)
if service and hasattr(service, "on_library_changed"):

View File

@@ -11,6 +11,8 @@ Target structure:
│ └── symlink_map.json
├── model/
│ └── {library_name}.sqlite
├── model_update/
│ └── {library_name}.sqlite
├── recipe/
│ └── {library_name}.sqlite
└── fts/
@@ -36,6 +38,7 @@ class CacheType(Enum):
"""Types of cache files managed by the cache path resolver."""
MODEL = "model"
MODEL_UPDATE = "model_update"
RECIPE = "recipe"
RECIPE_FTS = "recipe_fts"
TAG_FTS = "tag_fts"
@@ -45,6 +48,7 @@ class CacheType(Enum):
# Subdirectory structure for each cache type
_CACHE_SUBDIRS = {
CacheType.MODEL: "model",
CacheType.MODEL_UPDATE: "model_update",
CacheType.RECIPE: "recipe",
CacheType.RECIPE_FTS: "fts",
CacheType.TAG_FTS: "fts",
@@ -54,6 +58,7 @@ _CACHE_SUBDIRS = {
# Filename patterns for each cache type
_CACHE_FILENAMES = {
CacheType.MODEL: "{library_name}.sqlite",
CacheType.MODEL_UPDATE: "{library_name}.sqlite",
CacheType.RECIPE: "{library_name}.sqlite",
CacheType.RECIPE_FTS: "recipe_fts.sqlite",
CacheType.TAG_FTS: "tag_fts.sqlite",

View File

@@ -311,6 +311,161 @@ button:disabled,
color: var(--lora-error, #ef4444);
}
.backup-status {
background: rgba(0, 0, 0, 0.03);
border: 1px solid rgba(0, 0, 0, 0.1);
border-radius: var(--border-radius-sm);
padding: var(--space-3);
}
[data-theme="dark"] .backup-status {
background: rgba(255, 255, 255, 0.03);
border: 1px solid var(--lora-border);
}
.backup-summary-grid {
display: grid;
grid-template-columns: repeat(auto-fit, minmax(160px, 1fr));
gap: var(--space-2);
margin-bottom: var(--space-3);
}
.backup-summary-card {
background: rgba(255, 255, 255, 0.5);
border: 1px solid rgba(0, 0, 0, 0.06);
border-radius: var(--border-radius-sm);
padding: var(--space-2);
}
[data-theme="dark"] .backup-summary-card {
background: rgba(255, 255, 255, 0.02);
border-color: rgba(255, 255, 255, 0.05);
}
.backup-summary-label {
color: var(--text-color);
font-size: 0.85rem;
opacity: 0.7;
margin-bottom: 6px;
}
.backup-summary-value {
color: var(--text-color);
font-size: 1.1rem;
font-weight: 600;
line-height: 1.3;
word-break: break-word;
}
.backup-summary-value.status-enabled {
color: var(--lora-success, #10b981);
}
.backup-summary-value.status-disabled {
color: var(--lora-error, #ef4444);
}
.backup-status-list {
display: flex;
flex-direction: column;
gap: var(--space-2);
}
.backup-status-row {
display: grid;
grid-template-columns: minmax(140px, 180px) 1fr;
gap: var(--space-2);
align-items: start;
}
.backup-status-label {
color: var(--text-color);
font-weight: 500;
opacity: 0.8;
}
.backup-status-content {
min-width: 0;
}
.backup-status-primary {
color: var(--text-color);
font-weight: 600;
line-height: 1.4;
}
.backup-status-secondary {
color: var(--text-color);
opacity: 0.72;
font-size: 0.88rem;
line-height: 1.4;
word-break: break-word;
margin-top: 2px;
}
.backup-location-details {
border: 1px solid rgba(0, 0, 0, 0.1);
border-radius: var(--border-radius-sm);
background: rgba(0, 0, 0, 0.02);
}
[data-theme="dark"] .backup-location-details {
border-color: var(--lora-border);
background: rgba(255, 255, 255, 0.02);
}
.backup-location-details summary {
cursor: pointer;
padding: var(--space-2) var(--space-3);
color: var(--text-color);
font-weight: 500;
}
.backup-location-panel {
display: grid;
grid-template-columns: minmax(0, 1fr) auto;
gap: var(--space-2);
align-items: center;
width: 100%;
max-width: 100%;
box-sizing: border-box;
padding: 0 var(--space-3) var(--space-3);
}
.backup-location-panel .text-btn {
justify-self: end;
}
.backup-location-path {
display: block;
min-width: 0;
max-width: 100%;
padding: 6px 8px;
border-radius: var(--border-radius-sm);
background: rgba(0, 0, 0, 0.05);
color: var(--text-color);
overflow-wrap: anywhere;
word-break: break-word;
}
[data-theme="dark"] .backup-location-path {
background: rgba(255, 255, 255, 0.05);
}
@media (max-width: 768px) {
.backup-status-row {
grid-template-columns: 1fr;
}
.backup-location-panel {
grid-template-columns: 1fr;
}
.backup-location-panel .text-btn {
justify-self: start;
}
}
/* Add styles for delete preview image */
.delete-preview {
max-width: 150px;

View File

@@ -361,6 +361,13 @@ export class SettingsManager {
});
}
const openBackupLocationButton = document.getElementById('backupOpenLocationBtn');
if (openBackupLocationButton) {
openBackupLocationButton.addEventListener('click', () => {
this.openBackupLocation();
});
}
['lora', 'checkpoint', 'embedding'].forEach(modelType => {
const customInput = document.getElementById(`${modelType}CustomTemplate`);
if (customInput) {
@@ -742,6 +749,35 @@ export class SettingsManager {
}
}
async openBackupLocation() {
try {
const response = await fetch('/api/lm/backup/open-location', {
method: 'POST'
});
if (!response.ok) {
throw new Error(`Request failed with status ${response.status}`);
}
const data = await response.json();
if (data.mode === 'clipboard' && data.path) {
try {
await navigator.clipboard.writeText(data.path);
showToast('settings.backup.locationCopied', { path: data.path }, 'success');
} catch (clipboardErr) {
console.warn('Clipboard API not available:', clipboardErr);
showToast('settings.backup.locationClipboardFallback', { path: data.path }, 'info');
}
} else {
showToast('settings.backup.openFolderSuccess', {}, 'success');
}
} catch (error) {
console.error('Failed to open backup folder:', error);
showToast('settings.backup.openFolderFailed', {}, 'error');
}
}
async loadSettingsToUI() {
// Set frontend settings from state
const blurMatureContentCheckbox = document.getElementById('blurMatureContent');
@@ -878,6 +914,9 @@ export class SettingsManager {
// Load metadata archive settings
await this.loadMetadataArchiveSettings();
// Load backup settings
await this.loadBackupSettings();
// Load base model path mappings
this.loadBaseModelMappings();
@@ -1857,6 +1896,10 @@ export class SettingsManager {
await this.updateMetadataArchiveStatus();
}
if (settingKey === 'backup_auto_enabled') {
await this.updateBackupStatus();
}
showToast('toast.settings.settingsUpdated', { setting: settingKey.replace(/_/g, ' ') }, 'success');
// Apply frontend settings immediately
@@ -1945,6 +1988,163 @@ export class SettingsManager {
}
}
async loadBackupSettings() {
const backupAutoEnabledCheckbox = document.getElementById('backupAutoEnabled');
if (backupAutoEnabledCheckbox) {
backupAutoEnabledCheckbox.checked = state.global.settings.backup_auto_enabled ?? true;
}
const backupRetentionCountInput = document.getElementById('backupRetentionCount');
if (backupRetentionCountInput) {
backupRetentionCountInput.value = state.global.settings.backup_retention_count ?? 5;
}
await this.updateBackupStatus();
}
async updateBackupStatus() {
try {
const response = await fetch('/api/lm/backup/status');
const data = await response.json();
const statusContainer = document.getElementById('backupStatus');
if (!statusContainer || !data.success) {
return;
}
const status = data.status || {};
const latestAutoSnapshot = status.latestAutoSnapshot;
const retentionCount = status.retentionCount ?? state.global.settings.backup_retention_count ?? 5;
const enabled = status.enabled ?? state.global.settings.backup_auto_enabled ?? true;
const backupDir = status.backupDir || '';
const backupLocationPath = document.getElementById('backupLocationPath');
if (backupLocationPath) {
backupLocationPath.textContent = backupDir;
backupLocationPath.title = backupDir;
}
const formatTimestamp = (timestamp) => {
if (!timestamp) {
return translate('common.status.unknown', {}, 'Unknown');
}
return new Date(timestamp * 1000).toLocaleString();
};
const renderSnapshotDetail = (snapshot) => {
if (!snapshot) {
return translate('settings.backup.noneAvailable', {}, 'No snapshots yet');
}
const size = typeof snapshot.size === 'number' ? ` (${this.formatFileSize(snapshot.size)})` : '';
return `${snapshot.name}${size}`;
};
statusContainer.innerHTML = `
<div class="backup-summary-grid">
<div class="backup-summary-card">
<div class="backup-summary-label">${translate('settings.backup.autoEnabled', {}, 'Automatic snapshots')}</div>
<div class="backup-summary-value status-${enabled ? 'enabled' : 'disabled'}">
${enabled ? translate('common.status.enabled') : translate('common.status.disabled')}
</div>
</div>
<div class="backup-summary-card">
<div class="backup-summary-label">${translate('settings.backup.retention', {}, 'Retention')}</div>
<div class="backup-summary-value">${retentionCount}</div>
</div>
<div class="backup-summary-card">
<div class="backup-summary-label">${translate('settings.backup.snapshotCount', {}, 'Saved snapshots')}</div>
<div class="backup-summary-value">${status.snapshotCount ?? 0}</div>
</div>
</div>
<div class="backup-status-list">
<div class="backup-status-row">
<div class="backup-status-label">${translate('settings.backup.latestAutoSnapshot', {}, 'Latest auto snapshot')}</div>
<div class="backup-status-content">
<div class="backup-status-primary">${formatTimestamp(latestAutoSnapshot?.mtime)}</div>
<div class="backup-status-secondary">${renderSnapshotDetail(latestAutoSnapshot)}</div>
</div>
</div>
</div>
`;
} catch (error) {
console.error('Error updating backup status:', error);
}
}
async exportBackup() {
try {
const response = await fetch('/api/lm/backup/export', {
method: 'POST',
});
if (!response.ok) {
throw new Error(`Request failed with status ${response.status}`);
}
const blob = await response.blob();
const contentDisposition = response.headers.get('Content-Disposition') || '';
const match = contentDisposition.match(/filename="([^"]+)"/);
const filename = match?.[1] || `lora-manager-backup-${Date.now()}.zip`;
const url = window.URL.createObjectURL(blob);
const link = document.createElement('a');
link.href = url;
link.download = filename;
document.body.appendChild(link);
link.click();
link.remove();
window.URL.revokeObjectURL(url);
showToast('settings.backup.exportSuccess', {}, 'success');
} catch (error) {
console.error('Failed to export backup:', error);
showToast('settings.backup.exportFailed', { message: error.message }, 'error');
}
}
triggerBackupImport() {
const input = document.getElementById('backupImportInput');
input?.click();
}
async handleBackupImportFile(input) {
if (!(input instanceof HTMLInputElement)) {
return;
}
const file = input.files?.[0];
input.value = '';
if (!file) {
return;
}
if (!confirm(translate('settings.backup.importConfirm', {}, 'Import this backup and overwrite local user state?'))) {
return;
}
try {
const formData = new FormData();
formData.append('archive', file);
const response = await fetch('/api/lm/backup/import', {
method: 'POST',
body: formData,
});
const data = await response.json();
if (!response.ok || data.success === false) {
throw new Error(data.error || `Request failed with status ${response.status}`);
}
showToast('settings.backup.importSuccess', {}, 'success');
await this.updateBackupStatus();
window.location.reload();
} catch (error) {
console.error('Failed to import backup:', error);
showToast('settings.backup.importFailed', { message: error.message }, 'error');
}
}
async updateMetadataArchiveStatus() {
try {
const response = await fetch('/api/lm/metadata-archive-status');
@@ -2473,8 +2673,11 @@ export class SettingsManager {
try {
// Check if value has changed from existing value
const currentValue = state.global.settings[settingKey] || '';
if (value === currentValue) {
const currentValue = state.global.settings[settingKey];
const normalizedCurrentValue = currentValue === undefined || currentValue === null
? ''
: String(currentValue).trim();
if (value === normalizedCurrentValue) {
return; // No change, exit early
}
@@ -2515,6 +2718,9 @@ export class SettingsManager {
if (settingKey === 'recipes_path') {
showToast('toast.settings.recipesPathUpdated', {}, 'success');
} else if (settingKey === 'backup_retention_count') {
await this.updateBackupStatus();
showToast('toast.settings.settingsUpdated', { setting: settingKey.replace(/_/g, ' ') }, 'success');
} else {
showToast('toast.settings.settingsUpdated', { setting: settingKey.replace(/_/g, ' ') }, 'success');
}

View File

@@ -41,6 +41,8 @@ const DEFAULT_SETTINGS_BASE = Object.freeze({
metadata_refresh_skip_paths: [],
skip_previously_downloaded_model_versions: false,
download_skip_base_models: [],
backup_auto_enabled: true,
backup_retention_count: 5,
});
export function createDefaultSettings() {

View File

@@ -114,6 +114,93 @@
</div>
</div>
<!-- Backup -->
<div class="settings-subsection">
<div class="settings-subsection-header">
<h4>{{ t('settings.sections.backup') }}</h4>
</div>
<div class="setting-item">
<div class="setting-row">
<div class="setting-info">
<label for="backupAutoEnabled">
{{ t('settings.backup.autoEnabled') }}
<i class="fas fa-info-circle info-icon" data-tooltip="{{ t('settings.backup.autoEnabledHelp') }}"></i>
</label>
</div>
<div class="setting-control">
<label class="toggle-switch">
<input type="checkbox" id="backupAutoEnabled"
onchange="settingsManager.saveToggleSetting('backupAutoEnabled', 'backup_auto_enabled')">
<span class="toggle-slider"></span>
</label>
</div>
</div>
</div>
<div class="setting-item">
<div class="setting-row">
<div class="setting-info">
<label for="backupRetentionCount">
{{ t('settings.backup.retention') }}
<i class="fas fa-info-circle info-icon" data-tooltip="{{ t('settings.backup.retentionHelp') }}"></i>
</label>
</div>
<div class="setting-control">
<div class="text-input-wrapper">
<input
type="number"
id="backupRetentionCount"
min="1"
step="1"
onblur="settingsManager.saveInputSetting('backupRetentionCount', 'backup_retention_count')"
onkeydown="if(event.key === 'Enter') { this.blur(); }"
/>
</div>
</div>
</div>
</div>
<div class="setting-item">
<div class="setting-row">
<div class="setting-info">
<label>
{{ t('settings.backup.management') }}
<i class="fas fa-info-circle info-icon" data-tooltip="{{ t('settings.backup.managementHelp') }}"></i>
</label>
</div>
<div class="setting-control">
<button type="button" class="secondary-btn" onclick="settingsManager.exportBackup()">
{{ t('settings.backup.exportButton') }}
</button>
<button type="button" class="secondary-btn" onclick="settingsManager.triggerBackupImport()" style="margin-left: 10px;">
{{ t('settings.backup.importButton') }}
</button>
<input
type="file"
id="backupImportInput"
accept=".zip,application/zip"
style="display: none;"
onchange="settingsManager.handleBackupImportFile(this)"
/>
</div>
</div>
</div>
<div class="setting-item">
<details class="backup-location-details">
<summary>{{ t('settings.backup.locationSummary') }}</summary>
<div class="backup-location-panel">
<code id="backupLocationPath" class="backup-location-path"></code>
<button type="button" class="secondary-btn" id="backupOpenLocationBtn">
{{ t('settings.backup.openFolderButton') }}
</button>
</div>
</details>
</div>
<div class="setting-item">
<div class="backup-status" id="backupStatus">
<!-- Status will be populated by JavaScript -->
</div>
</div>
</div>
<!-- Proxy Settings -->
<div class="settings-subsection">
<div class="settings-subsection-header">
@@ -450,6 +537,7 @@
</div>
</div>
</div>
</div>
<!-- Section 3: Library -->

View File

@@ -8,6 +8,8 @@ import pytest
from aiohttp import web
from py.routes.handlers.misc_handlers import (
BackupHandler,
FileSystemHandler,
LoraCodeHandler,
ModelLibraryHandler,
NodeRegistry,
@@ -111,6 +113,106 @@ async def test_update_settings_rejects_missing_example_path(tmp_path):
assert "Path does not exist" in payload["error"]
class DummyBackupService:
def __init__(self):
self.restore_calls = []
async def create_snapshot(self, *, snapshot_type="manual", persist=False):
return {
"archive_name": "backup.zip",
"archive_bytes": b"zip-bytes",
"manifest": {"snapshot_type": snapshot_type},
}
async def restore_snapshot(self, archive_path):
self.restore_calls.append(archive_path)
return {"success": True, "restored_files": 3, "snapshot_type": "manual"}
def get_status(self):
return {
"backupDir": "/tmp/backups",
"enabled": True,
"retentionCount": 5,
"snapshotCount": 1,
}
def get_available_snapshots(self):
return [{"name": "backup.zip", "path": "/tmp/backup.zip", "size": 8, "mtime": 1.0, "is_auto": False}]
@pytest.mark.asyncio
async def test_backup_handler_returns_status_and_exports(monkeypatch):
service = DummyBackupService()
async def factory():
return service
handler = BackupHandler(backup_service_factory=factory)
status_response = await handler.get_backup_status(FakeRequest())
status_payload = json.loads(status_response.text)
assert status_payload["success"] is True
assert status_payload["status"]["backupDir"] == "/tmp/backups"
assert status_payload["status"]["enabled"] is True
assert status_payload["snapshots"][0]["name"] == "backup.zip"
export_response = await handler.export_backup(FakeRequest())
assert export_response.status == 200
assert export_response.body == b"zip-bytes"
@pytest.mark.asyncio
async def test_backup_handler_rejects_missing_import_archive():
service = DummyBackupService()
async def factory():
return service
handler = BackupHandler(backup_service_factory=factory)
class EmptyRequest:
content_type = "application/octet-stream"
async def read(self):
return b""
response = await handler.import_backup(EmptyRequest())
payload = json.loads(response.text)
assert response.status == 400
assert payload["success"] is False
@pytest.mark.asyncio
async def test_open_backup_location_uses_settings_directory(tmp_path, monkeypatch):
settings_dir = tmp_path / "settings"
settings_dir.mkdir(parents=True, exist_ok=True)
settings_file = settings_dir / "settings.json"
settings_file.write_text("{}", encoding="utf-8")
backup_dir = settings_dir / "backups"
backup_dir.mkdir(parents=True, exist_ok=True)
handler = FileSystemHandler(settings_service=SimpleNamespace(settings_file=str(settings_file)))
calls = []
def fake_popen(args):
calls.append(args)
return MagicMock()
monkeypatch.setattr(subprocess, "Popen", fake_popen)
monkeypatch.setattr("py.routes.handlers.misc_handlers._is_docker", lambda: False)
monkeypatch.setattr("py.routes.handlers.misc_handlers._is_wsl", lambda: False)
response = await handler.open_backup_location(FakeRequest())
payload = json.loads(response.text)
assert response.status == 200
assert payload["success"] is True
assert payload["path"] == str(backup_dir)
assert calls == [["xdg-open", str(backup_dir)]]
class RecordingRouter:
def __init__(self):
self.calls = []

View File

@@ -0,0 +1,228 @@
import json
import os
import sqlite3
from pathlib import Path
import pytest
import py.services.backup_service as backup_service
from py.services.model_update_service import ModelUpdateService
from py.utils.cache_paths import CacheType
class DummySettings:
def __init__(self, settings_file: Path, *, library_name: str = "main", values=None):
self.settings_file = str(settings_file)
self._library_name = library_name
self._values = values or {}
def get(self, key, default=None):
return self._values.get(key, default)
def get_active_library_name(self):
return self._library_name
def _configure_backup_paths(monkeypatch, root: Path):
settings_dir = root / "settings"
cache_dir = settings_dir / "cache"
def fake_get_settings_dir(create: bool = True):
if create:
settings_dir.mkdir(parents=True, exist_ok=True)
return str(settings_dir)
def fake_get_cache_base_dir(create: bool = True):
if create:
cache_dir.mkdir(parents=True, exist_ok=True)
return str(cache_dir)
def fake_get_cache_file_path(cache_type, library_name=None, create_dir=True):
if cache_type == CacheType.SYMLINK:
path = cache_dir / "symlink" / "symlink_map.json"
elif cache_type == CacheType.MODEL_UPDATE:
name = library_name or "default"
path = cache_dir / "model_update" / f"{name}.sqlite"
else: # pragma: no cover - the test only covers the backup targets
raise AssertionError(f"Unexpected cache type: {cache_type}")
if create_dir:
path.parent.mkdir(parents=True, exist_ok=True)
return str(path)
monkeypatch.setattr(backup_service, "get_settings_dir", fake_get_settings_dir)
monkeypatch.setattr(backup_service, "get_cache_base_dir", fake_get_cache_base_dir)
monkeypatch.setattr(backup_service, "get_cache_file_path", fake_get_cache_file_path)
return settings_dir, cache_dir
@pytest.mark.asyncio
async def test_backup_round_trip_restores_user_state(tmp_path, monkeypatch):
settings_dir, cache_dir = _configure_backup_paths(monkeypatch, tmp_path)
settings_file = settings_dir / "settings.json"
download_history = cache_dir / "download_history" / "downloaded_versions.sqlite"
symlink_map = cache_dir / "symlink" / "symlink_map.json"
model_update_db = cache_dir / "model_update" / "main.sqlite"
settings_file.parent.mkdir(parents=True, exist_ok=True)
download_history.parent.mkdir(parents=True, exist_ok=True)
symlink_map.parent.mkdir(parents=True, exist_ok=True)
model_update_db.parent.mkdir(parents=True, exist_ok=True)
settings_file.write_text(json.dumps({"backup_auto_enabled": True}), encoding="utf-8")
download_history.write_bytes(b"download-history-v1")
symlink_map.write_text(json.dumps({"a": "/tmp/a"}), encoding="utf-8")
model_update_db.write_bytes(b"model-update-v1")
service = backup_service.BackupService(
settings_manager=DummySettings(settings_file),
backup_dir=str(tmp_path / "backups"),
)
snapshot = await service.create_snapshot(snapshot_type="manual", persist=False)
archive_path = tmp_path / snapshot["archive_name"]
archive_path.write_bytes(snapshot["archive_bytes"])
settings_file.write_text(json.dumps({"backup_auto_enabled": False}), encoding="utf-8")
download_history.write_bytes(b"download-history-v2")
symlink_map.write_text(json.dumps({"a": "/tmp/b"}), encoding="utf-8")
model_update_db.write_bytes(b"model-update-v2")
result = await service.restore_snapshot(str(archive_path))
assert result["success"] is True
assert settings_file.read_text(encoding="utf-8") == json.dumps({"backup_auto_enabled": True})
assert download_history.read_bytes() == b"download-history-v1"
assert symlink_map.read_text(encoding="utf-8") == json.dumps({"a": "/tmp/a"})
assert model_update_db.read_bytes() == b"model-update-v1"
def test_prune_snapshots_keeps_latest_auto_only(tmp_path, monkeypatch):
settings_dir, _ = _configure_backup_paths(monkeypatch, tmp_path)
settings_file = settings_dir / "settings.json"
settings_file.parent.mkdir(parents=True, exist_ok=True)
settings_file.write_text(json.dumps({"backup_retention_count": 2}), encoding="utf-8")
service = backup_service.BackupService(
settings_manager=DummySettings(settings_file, values={"backup_retention_count": 2}),
backup_dir=str(tmp_path / "backups"),
)
backup_dir = Path(service.get_backup_dir())
backup_dir.mkdir(parents=True, exist_ok=True)
files = [
backup_dir / "lora-manager-backup-20240101T000000Z-auto.zip",
backup_dir / "lora-manager-backup-20240102T000000Z-auto.zip",
backup_dir / "lora-manager-backup-20240103T000000Z-auto.zip",
backup_dir / "lora-manager-backup-20240104T000000Z-manual.zip",
]
for index, path in enumerate(files):
path.write_bytes(b"zip")
os.utime(path, (1000 + index, 1000 + index))
service._prune_snapshots()
remaining = sorted(p.name for p in backup_dir.glob("*.zip"))
assert remaining == [
"lora-manager-backup-20240102T000000Z-auto.zip",
"lora-manager-backup-20240103T000000Z-auto.zip",
"lora-manager-backup-20240104T000000Z-manual.zip",
]
def test_backup_status_includes_backup_dir(tmp_path, monkeypatch):
settings_dir, _ = _configure_backup_paths(monkeypatch, tmp_path)
settings_file = settings_dir / "settings.json"
settings_file.parent.mkdir(parents=True, exist_ok=True)
settings_file.write_text("{}", encoding="utf-8")
service = backup_service.BackupService(
settings_manager=DummySettings(settings_file),
backup_dir=str(tmp_path / "backups"),
)
status = service.get_status()
assert status["backupDir"] == str(tmp_path / "backups")
@pytest.mark.asyncio
async def test_model_update_service_migrates_legacy_snapshot_db(tmp_path, monkeypatch):
legacy_db = tmp_path / "legacy" / "main.sqlite"
new_db = tmp_path / "cache" / "model_update" / "main.sqlite"
legacy_db.parent.mkdir(parents=True, exist_ok=True)
new_db.parent.mkdir(parents=True, exist_ok=True)
with sqlite3.connect(legacy_db) as conn:
conn.executescript(
"""
CREATE TABLE model_update_status (
model_id INTEGER PRIMARY KEY,
model_type TEXT NOT NULL,
last_checked_at REAL,
should_ignore_model INTEGER NOT NULL DEFAULT 0
);
CREATE TABLE model_update_versions (
model_id INTEGER NOT NULL,
version_id INTEGER NOT NULL,
sort_index INTEGER NOT NULL DEFAULT 0,
name TEXT,
base_model TEXT,
released_at TEXT,
size_bytes INTEGER,
preview_url TEXT,
is_in_library INTEGER NOT NULL DEFAULT 0,
should_ignore INTEGER NOT NULL DEFAULT 0,
early_access_ends_at TEXT,
is_early_access INTEGER NOT NULL DEFAULT 0,
PRIMARY KEY (model_id, version_id)
);
INSERT INTO model_update_status (
model_id, model_type, last_checked_at, should_ignore_model
) VALUES (1, 'lora', 123.0, 1);
INSERT INTO model_update_versions (
model_id, version_id, sort_index, name, base_model, released_at,
size_bytes, preview_url, is_in_library, should_ignore,
early_access_ends_at, is_early_access
) VALUES (
1, 11, 0, 'v1', 'SD15', '2024-01-01T00:00:00Z',
1024, 'https://example.com/v1.png', 1, 0, NULL, 0
);
"""
)
conn.commit()
class DummySettingsManager:
def get_active_library_name(self):
return "main"
monkeypatch.setattr(
"py.services.model_update_service.resolve_cache_path_with_migration",
lambda *args, **kwargs: str(new_db),
)
class LegacyCache:
def get_database_path(self):
return str(legacy_db)
monkeypatch.setattr(
"py.services.persistent_model_cache.get_persistent_cache",
lambda *_args, **_kwargs: LegacyCache(),
)
service = ModelUpdateService(settings_manager=DummySettingsManager())
with sqlite3.connect(new_db) as conn:
row = conn.execute(
"SELECT model_id, model_type, last_checked_at, should_ignore_model FROM model_update_status"
).fetchone()
version_row = conn.execute(
"SELECT model_id, version_id, name, base_model, is_in_library FROM model_update_versions"
).fetchone()
assert row == (1, "lora", 123.0, 1)
assert version_row == (1, 11, "v1", "SD15", 1)
assert service._db_path == str(new_db)