mirror of
https://github.com/willmiao/ComfyUI-Lora-Manager.git
synced 2026-04-10 12:52:15 -03:00
fix(backup): add user-state backup UI and storage
This commit is contained in:
@@ -8,6 +8,8 @@ import pytest
|
||||
from aiohttp import web
|
||||
|
||||
from py.routes.handlers.misc_handlers import (
|
||||
BackupHandler,
|
||||
FileSystemHandler,
|
||||
LoraCodeHandler,
|
||||
ModelLibraryHandler,
|
||||
NodeRegistry,
|
||||
@@ -111,6 +113,106 @@ async def test_update_settings_rejects_missing_example_path(tmp_path):
|
||||
assert "Path does not exist" in payload["error"]
|
||||
|
||||
|
||||
class DummyBackupService:
|
||||
def __init__(self):
|
||||
self.restore_calls = []
|
||||
|
||||
async def create_snapshot(self, *, snapshot_type="manual", persist=False):
|
||||
return {
|
||||
"archive_name": "backup.zip",
|
||||
"archive_bytes": b"zip-bytes",
|
||||
"manifest": {"snapshot_type": snapshot_type},
|
||||
}
|
||||
|
||||
async def restore_snapshot(self, archive_path):
|
||||
self.restore_calls.append(archive_path)
|
||||
return {"success": True, "restored_files": 3, "snapshot_type": "manual"}
|
||||
|
||||
def get_status(self):
|
||||
return {
|
||||
"backupDir": "/tmp/backups",
|
||||
"enabled": True,
|
||||
"retentionCount": 5,
|
||||
"snapshotCount": 1,
|
||||
}
|
||||
|
||||
def get_available_snapshots(self):
|
||||
return [{"name": "backup.zip", "path": "/tmp/backup.zip", "size": 8, "mtime": 1.0, "is_auto": False}]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_backup_handler_returns_status_and_exports(monkeypatch):
|
||||
service = DummyBackupService()
|
||||
|
||||
async def factory():
|
||||
return service
|
||||
|
||||
handler = BackupHandler(backup_service_factory=factory)
|
||||
|
||||
status_response = await handler.get_backup_status(FakeRequest())
|
||||
status_payload = json.loads(status_response.text)
|
||||
assert status_payload["success"] is True
|
||||
assert status_payload["status"]["backupDir"] == "/tmp/backups"
|
||||
assert status_payload["status"]["enabled"] is True
|
||||
assert status_payload["snapshots"][0]["name"] == "backup.zip"
|
||||
|
||||
export_response = await handler.export_backup(FakeRequest())
|
||||
assert export_response.status == 200
|
||||
assert export_response.body == b"zip-bytes"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_backup_handler_rejects_missing_import_archive():
|
||||
service = DummyBackupService()
|
||||
|
||||
async def factory():
|
||||
return service
|
||||
|
||||
handler = BackupHandler(backup_service_factory=factory)
|
||||
|
||||
class EmptyRequest:
|
||||
content_type = "application/octet-stream"
|
||||
|
||||
async def read(self):
|
||||
return b""
|
||||
|
||||
response = await handler.import_backup(EmptyRequest())
|
||||
payload = json.loads(response.text)
|
||||
|
||||
assert response.status == 400
|
||||
assert payload["success"] is False
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_open_backup_location_uses_settings_directory(tmp_path, monkeypatch):
|
||||
settings_dir = tmp_path / "settings"
|
||||
settings_dir.mkdir(parents=True, exist_ok=True)
|
||||
settings_file = settings_dir / "settings.json"
|
||||
settings_file.write_text("{}", encoding="utf-8")
|
||||
backup_dir = settings_dir / "backups"
|
||||
backup_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
handler = FileSystemHandler(settings_service=SimpleNamespace(settings_file=str(settings_file)))
|
||||
|
||||
calls = []
|
||||
|
||||
def fake_popen(args):
|
||||
calls.append(args)
|
||||
return MagicMock()
|
||||
|
||||
monkeypatch.setattr(subprocess, "Popen", fake_popen)
|
||||
monkeypatch.setattr("py.routes.handlers.misc_handlers._is_docker", lambda: False)
|
||||
monkeypatch.setattr("py.routes.handlers.misc_handlers._is_wsl", lambda: False)
|
||||
|
||||
response = await handler.open_backup_location(FakeRequest())
|
||||
payload = json.loads(response.text)
|
||||
|
||||
assert response.status == 200
|
||||
assert payload["success"] is True
|
||||
assert payload["path"] == str(backup_dir)
|
||||
assert calls == [["xdg-open", str(backup_dir)]]
|
||||
|
||||
|
||||
class RecordingRouter:
|
||||
def __init__(self):
|
||||
self.calls = []
|
||||
|
||||
228
tests/services/test_backup_service.py
Normal file
228
tests/services/test_backup_service.py
Normal file
@@ -0,0 +1,228 @@
|
||||
import json
|
||||
import os
|
||||
import sqlite3
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
import py.services.backup_service as backup_service
|
||||
from py.services.model_update_service import ModelUpdateService
|
||||
from py.utils.cache_paths import CacheType
|
||||
|
||||
|
||||
class DummySettings:
|
||||
def __init__(self, settings_file: Path, *, library_name: str = "main", values=None):
|
||||
self.settings_file = str(settings_file)
|
||||
self._library_name = library_name
|
||||
self._values = values or {}
|
||||
|
||||
def get(self, key, default=None):
|
||||
return self._values.get(key, default)
|
||||
|
||||
def get_active_library_name(self):
|
||||
return self._library_name
|
||||
|
||||
|
||||
def _configure_backup_paths(monkeypatch, root: Path):
|
||||
settings_dir = root / "settings"
|
||||
cache_dir = settings_dir / "cache"
|
||||
|
||||
def fake_get_settings_dir(create: bool = True):
|
||||
if create:
|
||||
settings_dir.mkdir(parents=True, exist_ok=True)
|
||||
return str(settings_dir)
|
||||
|
||||
def fake_get_cache_base_dir(create: bool = True):
|
||||
if create:
|
||||
cache_dir.mkdir(parents=True, exist_ok=True)
|
||||
return str(cache_dir)
|
||||
|
||||
def fake_get_cache_file_path(cache_type, library_name=None, create_dir=True):
|
||||
if cache_type == CacheType.SYMLINK:
|
||||
path = cache_dir / "symlink" / "symlink_map.json"
|
||||
elif cache_type == CacheType.MODEL_UPDATE:
|
||||
name = library_name or "default"
|
||||
path = cache_dir / "model_update" / f"{name}.sqlite"
|
||||
else: # pragma: no cover - the test only covers the backup targets
|
||||
raise AssertionError(f"Unexpected cache type: {cache_type}")
|
||||
|
||||
if create_dir:
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
return str(path)
|
||||
|
||||
monkeypatch.setattr(backup_service, "get_settings_dir", fake_get_settings_dir)
|
||||
monkeypatch.setattr(backup_service, "get_cache_base_dir", fake_get_cache_base_dir)
|
||||
monkeypatch.setattr(backup_service, "get_cache_file_path", fake_get_cache_file_path)
|
||||
|
||||
return settings_dir, cache_dir
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_backup_round_trip_restores_user_state(tmp_path, monkeypatch):
|
||||
settings_dir, cache_dir = _configure_backup_paths(monkeypatch, tmp_path)
|
||||
|
||||
settings_file = settings_dir / "settings.json"
|
||||
download_history = cache_dir / "download_history" / "downloaded_versions.sqlite"
|
||||
symlink_map = cache_dir / "symlink" / "symlink_map.json"
|
||||
model_update_db = cache_dir / "model_update" / "main.sqlite"
|
||||
|
||||
settings_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
download_history.parent.mkdir(parents=True, exist_ok=True)
|
||||
symlink_map.parent.mkdir(parents=True, exist_ok=True)
|
||||
model_update_db.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
settings_file.write_text(json.dumps({"backup_auto_enabled": True}), encoding="utf-8")
|
||||
download_history.write_bytes(b"download-history-v1")
|
||||
symlink_map.write_text(json.dumps({"a": "/tmp/a"}), encoding="utf-8")
|
||||
model_update_db.write_bytes(b"model-update-v1")
|
||||
|
||||
service = backup_service.BackupService(
|
||||
settings_manager=DummySettings(settings_file),
|
||||
backup_dir=str(tmp_path / "backups"),
|
||||
)
|
||||
|
||||
snapshot = await service.create_snapshot(snapshot_type="manual", persist=False)
|
||||
archive_path = tmp_path / snapshot["archive_name"]
|
||||
archive_path.write_bytes(snapshot["archive_bytes"])
|
||||
|
||||
settings_file.write_text(json.dumps({"backup_auto_enabled": False}), encoding="utf-8")
|
||||
download_history.write_bytes(b"download-history-v2")
|
||||
symlink_map.write_text(json.dumps({"a": "/tmp/b"}), encoding="utf-8")
|
||||
model_update_db.write_bytes(b"model-update-v2")
|
||||
|
||||
result = await service.restore_snapshot(str(archive_path))
|
||||
|
||||
assert result["success"] is True
|
||||
assert settings_file.read_text(encoding="utf-8") == json.dumps({"backup_auto_enabled": True})
|
||||
assert download_history.read_bytes() == b"download-history-v1"
|
||||
assert symlink_map.read_text(encoding="utf-8") == json.dumps({"a": "/tmp/a"})
|
||||
assert model_update_db.read_bytes() == b"model-update-v1"
|
||||
|
||||
|
||||
def test_prune_snapshots_keeps_latest_auto_only(tmp_path, monkeypatch):
|
||||
settings_dir, _ = _configure_backup_paths(monkeypatch, tmp_path)
|
||||
settings_file = settings_dir / "settings.json"
|
||||
settings_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
settings_file.write_text(json.dumps({"backup_retention_count": 2}), encoding="utf-8")
|
||||
|
||||
service = backup_service.BackupService(
|
||||
settings_manager=DummySettings(settings_file, values={"backup_retention_count": 2}),
|
||||
backup_dir=str(tmp_path / "backups"),
|
||||
)
|
||||
|
||||
backup_dir = Path(service.get_backup_dir())
|
||||
backup_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
files = [
|
||||
backup_dir / "lora-manager-backup-20240101T000000Z-auto.zip",
|
||||
backup_dir / "lora-manager-backup-20240102T000000Z-auto.zip",
|
||||
backup_dir / "lora-manager-backup-20240103T000000Z-auto.zip",
|
||||
backup_dir / "lora-manager-backup-20240104T000000Z-manual.zip",
|
||||
]
|
||||
for index, path in enumerate(files):
|
||||
path.write_bytes(b"zip")
|
||||
os.utime(path, (1000 + index, 1000 + index))
|
||||
|
||||
service._prune_snapshots()
|
||||
|
||||
remaining = sorted(p.name for p in backup_dir.glob("*.zip"))
|
||||
assert remaining == [
|
||||
"lora-manager-backup-20240102T000000Z-auto.zip",
|
||||
"lora-manager-backup-20240103T000000Z-auto.zip",
|
||||
"lora-manager-backup-20240104T000000Z-manual.zip",
|
||||
]
|
||||
|
||||
|
||||
def test_backup_status_includes_backup_dir(tmp_path, monkeypatch):
|
||||
settings_dir, _ = _configure_backup_paths(monkeypatch, tmp_path)
|
||||
settings_file = settings_dir / "settings.json"
|
||||
settings_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
settings_file.write_text("{}", encoding="utf-8")
|
||||
|
||||
service = backup_service.BackupService(
|
||||
settings_manager=DummySettings(settings_file),
|
||||
backup_dir=str(tmp_path / "backups"),
|
||||
)
|
||||
|
||||
status = service.get_status()
|
||||
|
||||
assert status["backupDir"] == str(tmp_path / "backups")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_model_update_service_migrates_legacy_snapshot_db(tmp_path, monkeypatch):
|
||||
legacy_db = tmp_path / "legacy" / "main.sqlite"
|
||||
new_db = tmp_path / "cache" / "model_update" / "main.sqlite"
|
||||
legacy_db.parent.mkdir(parents=True, exist_ok=True)
|
||||
new_db.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
with sqlite3.connect(legacy_db) as conn:
|
||||
conn.executescript(
|
||||
"""
|
||||
CREATE TABLE model_update_status (
|
||||
model_id INTEGER PRIMARY KEY,
|
||||
model_type TEXT NOT NULL,
|
||||
last_checked_at REAL,
|
||||
should_ignore_model INTEGER NOT NULL DEFAULT 0
|
||||
);
|
||||
CREATE TABLE model_update_versions (
|
||||
model_id INTEGER NOT NULL,
|
||||
version_id INTEGER NOT NULL,
|
||||
sort_index INTEGER NOT NULL DEFAULT 0,
|
||||
name TEXT,
|
||||
base_model TEXT,
|
||||
released_at TEXT,
|
||||
size_bytes INTEGER,
|
||||
preview_url TEXT,
|
||||
is_in_library INTEGER NOT NULL DEFAULT 0,
|
||||
should_ignore INTEGER NOT NULL DEFAULT 0,
|
||||
early_access_ends_at TEXT,
|
||||
is_early_access INTEGER NOT NULL DEFAULT 0,
|
||||
PRIMARY KEY (model_id, version_id)
|
||||
);
|
||||
INSERT INTO model_update_status (
|
||||
model_id, model_type, last_checked_at, should_ignore_model
|
||||
) VALUES (1, 'lora', 123.0, 1);
|
||||
INSERT INTO model_update_versions (
|
||||
model_id, version_id, sort_index, name, base_model, released_at,
|
||||
size_bytes, preview_url, is_in_library, should_ignore,
|
||||
early_access_ends_at, is_early_access
|
||||
) VALUES (
|
||||
1, 11, 0, 'v1', 'SD15', '2024-01-01T00:00:00Z',
|
||||
1024, 'https://example.com/v1.png', 1, 0, NULL, 0
|
||||
);
|
||||
"""
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
class DummySettingsManager:
|
||||
def get_active_library_name(self):
|
||||
return "main"
|
||||
|
||||
monkeypatch.setattr(
|
||||
"py.services.model_update_service.resolve_cache_path_with_migration",
|
||||
lambda *args, **kwargs: str(new_db),
|
||||
)
|
||||
|
||||
class LegacyCache:
|
||||
def get_database_path(self):
|
||||
return str(legacy_db)
|
||||
|
||||
monkeypatch.setattr(
|
||||
"py.services.persistent_model_cache.get_persistent_cache",
|
||||
lambda *_args, **_kwargs: LegacyCache(),
|
||||
)
|
||||
|
||||
service = ModelUpdateService(settings_manager=DummySettingsManager())
|
||||
|
||||
with sqlite3.connect(new_db) as conn:
|
||||
row = conn.execute(
|
||||
"SELECT model_id, model_type, last_checked_at, should_ignore_model FROM model_update_status"
|
||||
).fetchone()
|
||||
version_row = conn.execute(
|
||||
"SELECT model_id, version_id, name, base_model, is_in_library FROM model_update_versions"
|
||||
).fetchone()
|
||||
|
||||
assert row == (1, "lora", 123.0, 1)
|
||||
assert version_row == (1, 11, "v1", "SD15", 1)
|
||||
assert service._db_path == str(new_db)
|
||||
Reference in New Issue
Block a user