Refactor model caching logic to streamline adding models and ensure disk persistence

This commit is contained in:
Will Miao
2025-05-30 07:34:39 +08:00
parent ec36524c35
commit 68568073ec
2 changed files with 43 additions and 12 deletions

View File

@@ -2,8 +2,7 @@ import logging
import os
import json
import asyncio
from typing import Optional, Dict, Any
from .civitai_client import CivitaiClient
from typing import Dict
from ..utils.models import LoraMetadata, CheckpointMetadata
from ..utils.constants import CARD_PREVIEW_WIDTH
from ..utils.exif_utils import ExifUtils
@@ -281,17 +280,11 @@ class DownloadManager:
scanner = await self._get_lora_scanner()
logger.info(f"Updating lora cache for {save_path}")
cache = await scanner.get_cached_data()
# Convert metadata to dictionary
metadata_dict = metadata.to_dict()
metadata_dict['folder'] = relative_path
cache.raw_data.append(metadata_dict)
await cache.resort()
all_folders = set(cache.folders)
all_folders.add(relative_path)
cache.folders = sorted(list(all_folders), key=lambda x: x.lower())
# Update the hash index with the new model entry
scanner._hash_index.add_entry(metadata_dict['sha256'], metadata_dict['file_path'])
# Add model to cache and save to disk in a single operation
await scanner.add_model_to_cache(metadata_dict, relative_path)
# Report 100% completion
if progress_callback:

View File

@@ -894,6 +894,44 @@ class ModelScanner:
models_list.append(result)
except Exception as e:
logger.error(f"Error processing {file_path}: {e}")
async def add_model_to_cache(self, metadata_dict: Dict, folder: str = '') -> bool:
"""Add a model to the cache and save to disk
Args:
metadata_dict: The model metadata dictionary
folder: The relative folder path for the model
Returns:
bool: True if successful, False otherwise
"""
try:
if self._cache is None:
await self.get_cached_data()
# Update folder in metadata
metadata_dict['folder'] = folder
# Add to cache
self._cache.raw_data.append(metadata_dict)
# Resort cache data
await self._cache.resort()
# Update folders list
all_folders = set(self._cache.folders)
all_folders.add(folder)
self._cache.folders = sorted(list(all_folders), key=lambda x: x.lower())
# Update the hash index
self._hash_index.add_entry(metadata_dict['sha256'], metadata_dict['file_path'])
# Save to disk
await self._save_cache_to_disk()
return True
except Exception as e:
logger.error(f"Error adding model to cache: {e}")
return False
async def move_model(self, source_path: str, target_path: str) -> bool:
"""Move a model and its associated files to a new location"""