mirror of
https://github.com/willmiao/ComfyUI-Lora-Manager.git
synced 2026-03-24 14:42:11 -03:00
feat: enhance model move functionality with cache entry updates
- Return cache entry data from model move operations for immediate UI updates - Add recalculate_type parameter to update_single_model_cache for proper type adjustment - Propagate cache entry through API layer to frontend MoveManager - Enable virtual scroller to update moved items with new cache data
This commit is contained in:
@@ -496,12 +496,15 @@ class ModelMoveService:
|
||||
'new_file_path': file_path
|
||||
}
|
||||
|
||||
new_file_path = await self.scanner.move_model(file_path, target_path)
|
||||
if new_file_path:
|
||||
move_result = await self.scanner.move_model(file_path, target_path)
|
||||
if move_result:
|
||||
new_file_path = move_result.get("new_path")
|
||||
cache_entry = move_result.get("cache_entry")
|
||||
return {
|
||||
'success': True,
|
||||
'original_file_path': file_path,
|
||||
'new_file_path': new_file_path
|
||||
'new_file_path': new_file_path,
|
||||
'cache_entry': cache_entry
|
||||
}
|
||||
else:
|
||||
return {
|
||||
@@ -539,7 +542,8 @@ class ModelMoveService:
|
||||
"original_file_path": file_path,
|
||||
"new_file_path": result.get('new_file_path'),
|
||||
"success": result['success'],
|
||||
"message": result.get('message', result.get('error', 'Unknown'))
|
||||
"message": result.get('message', result.get('error', 'Unknown')),
|
||||
"cache_entry": result.get('cache_entry')
|
||||
})
|
||||
|
||||
success_count = sum(1 for r in results if r["success"])
|
||||
|
||||
@@ -1216,9 +1216,12 @@ class ModelScanner:
|
||||
except Exception as e:
|
||||
logger.error(f"Error moving metadata file: {e}")
|
||||
|
||||
await self.update_single_model_cache(source_path, target_file, metadata)
|
||||
update_result = await self.update_single_model_cache(source_path, target_file, metadata, recalculate_type=True)
|
||||
|
||||
return target_file
|
||||
return {
|
||||
"new_path": target_file,
|
||||
"cache_entry": update_result if isinstance(update_result, dict) else None
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error moving model: {e}", exc_info=True)
|
||||
@@ -1250,7 +1253,7 @@ class ModelScanner:
|
||||
logger.error(f"Error updating metadata paths: {e}", exc_info=True)
|
||||
return None
|
||||
|
||||
async def update_single_model_cache(self, original_path: str, new_path: str, metadata: Dict) -> bool:
|
||||
async def update_single_model_cache(self, original_path: str, new_path: str, metadata: Dict, recalculate_type: bool = False) -> Union[bool, Dict]:
|
||||
"""Update cache after a model has been moved or modified"""
|
||||
cache = await self.get_cached_data()
|
||||
|
||||
@@ -1287,6 +1290,9 @@ class ModelScanner:
|
||||
file_path_override=normalized_new_path,
|
||||
)
|
||||
|
||||
if recalculate_type:
|
||||
cache_entry = self.adjust_cached_entry(cache_entry)
|
||||
|
||||
cache.raw_data.append(cache_entry)
|
||||
cache.add_to_version_index(cache_entry)
|
||||
|
||||
@@ -1307,7 +1313,7 @@ class ModelScanner:
|
||||
if cache_modified:
|
||||
await self._persist_current_cache()
|
||||
|
||||
return True
|
||||
return cache_entry if metadata else True
|
||||
|
||||
def has_hash(self, sha256: str) -> bool:
|
||||
"""Check if a model with given hash exists"""
|
||||
|
||||
Reference in New Issue
Block a user