feat(persistent-cache): implement SQLite-based persistent model cache with loading and saving functionality

This commit is contained in:
Will Miao
2025-10-03 11:00:51 +08:00
parent 3b1990e97a
commit 77bbf85b52
7 changed files with 809 additions and 79 deletions

View File

@@ -9,6 +9,7 @@ from py.services import model_scanner
from py.services.model_cache import ModelCache
from py.services.model_hash_index import ModelHashIndex
from py.services.model_scanner import CacheBuildResult, ModelScanner
from py.services.persistent_model_cache import PersistentModelCache
from py.utils.models import BaseModelMetadata
@@ -78,6 +79,11 @@ def reset_model_scanner_singletons():
ModelScanner._locks.clear()
@pytest.fixture(autouse=True)
def disable_persistent_cache_env(monkeypatch):
monkeypatch.setenv('LORA_MANAGER_DISABLE_PERSISTENT_CACHE', '1')
@pytest.fixture(autouse=True)
def stub_register_service(monkeypatch):
async def noop(*_args, **_kwargs):
@@ -175,3 +181,60 @@ async def test_initialize_in_background_applies_scan_result(tmp_path: Path, monk
assert scanner._tags_count == {"alpha": 1, "beta": 1}
assert scanner._excluded_models == [_normalize_path(tmp_path / "skip-file.txt")]
assert ws_stub.payloads[-1]["progress"] == 100
@pytest.mark.asyncio
async def test_load_persisted_cache_populates_cache(tmp_path: Path, monkeypatch):
# Enable persistence for this specific test and back it with a temp database
monkeypatch.setenv('LORA_MANAGER_DISABLE_PERSISTENT_CACHE', '0')
db_path = tmp_path / 'cache.sqlite'
store = PersistentModelCache(db_path=str(db_path))
file_path = tmp_path / 'one.txt'
file_path.write_text('one', encoding='utf-8')
normalized = _normalize_path(file_path)
raw_model = {
'file_path': normalized,
'file_name': 'one',
'model_name': 'one',
'folder': '',
'size': 3,
'modified': 123.0,
'sha256': 'hash-one',
'base_model': 'test',
'preview_url': '',
'preview_nsfw_level': 0,
'from_civitai': True,
'favorite': False,
'notes': '',
'usage_tips': '',
'exclude': False,
'db_checked': False,
'last_checked_at': 0.0,
'tags': ['alpha'],
'civitai': {'id': 11, 'modelId': 22, 'name': 'ver', 'trainedWords': ['abc']},
}
store.save_cache('dummy', [raw_model], {'hash-one': [normalized]}, [])
monkeypatch.setattr(model_scanner, 'get_persistent_cache', lambda: store)
scanner = DummyScanner(tmp_path)
ws_stub = RecordingWebSocketManager()
monkeypatch.setattr(model_scanner, 'ws_manager', ws_stub)
loaded = await scanner._load_persisted_cache('dummy')
assert loaded is True
cache = await scanner.get_cached_data()
assert len(cache.raw_data) == 1
entry = cache.raw_data[0]
assert entry['file_path'] == normalized
assert entry['tags'] == ['alpha']
assert entry['civitai']['trainedWords'] == ['abc']
assert scanner._hash_index.get_path('hash-one') == normalized
assert scanner._tags_count == {'alpha': 1}
assert ws_stub.payloads[-1]['stage'] == 'loading_cache'
assert ws_stub.payloads[-1]['progress'] == 1

View File

@@ -0,0 +1,92 @@
from pathlib import Path
import pytest
from py.services.persistent_model_cache import PersistentModelCache
def test_persistent_cache_roundtrip(tmp_path: Path, monkeypatch) -> None:
monkeypatch.setenv('LORA_MANAGER_DISABLE_PERSISTENT_CACHE', '0')
db_path = tmp_path / 'cache.sqlite'
store = PersistentModelCache(db_path=str(db_path))
file_a = (tmp_path / 'a.txt').as_posix()
file_b = (tmp_path / 'b.txt').as_posix()
duplicate_path = f"{file_b}.copy"
raw_data = [
{
'file_path': file_a,
'file_name': 'a',
'model_name': 'Model A',
'folder': '',
'size': 10,
'modified': 100.0,
'sha256': 'hash-a',
'base_model': 'base',
'preview_url': 'preview/a.png',
'preview_nsfw_level': 1,
'from_civitai': True,
'favorite': True,
'notes': 'note',
'usage_tips': '{}',
'exclude': False,
'db_checked': True,
'last_checked_at': 200.0,
'tags': ['alpha', 'beta'],
'civitai': {'id': 1, 'modelId': 2, 'name': 'verA', 'trainedWords': ['word1']},
},
{
'file_path': file_b,
'file_name': 'b',
'model_name': 'Model B',
'folder': 'folder',
'size': 20,
'modified': 120.0,
'sha256': 'hash-b',
'base_model': '',
'preview_url': '',
'preview_nsfw_level': 0,
'from_civitai': False,
'favorite': False,
'notes': '',
'usage_tips': '',
'exclude': True,
'db_checked': False,
'last_checked_at': 0.0,
'tags': [],
'civitai': None,
},
]
hash_index = {
'hash-a': [file_a],
'hash-b': [file_b, duplicate_path],
}
excluded = [duplicate_path]
store.save_cache('dummy', raw_data, hash_index, excluded)
persisted = store.load_cache('dummy')
assert persisted is not None
assert len(persisted.raw_data) == 2
items = {item['file_path']: item for item in persisted.raw_data}
assert set(items.keys()) == {file_a, file_b}
first = items[file_a]
assert first['favorite'] is True
assert first['civitai']['id'] == 1
assert first['civitai']['trainedWords'] == ['word1']
assert first['tags'] == ['alpha', 'beta']
second = items[file_b]
assert second['exclude'] is True
assert second['civitai'] is None
expected_hash_pairs = {
('hash-a', file_a),
('hash-b', file_b),
('hash-b', duplicate_path),
}
assert set((sha, path) for sha, path in persisted.hash_rows) == expected_hash_pairs
assert persisted.excluded_models == excluded