mirror of
https://github.com/willmiao/ComfyUI-Lora-Manager.git
synced 2026-05-06 16:36:45 -03:00
feat(doctor): add duplicate filename conflict detection and one-click resolution
Detects when multiple model files share the same basename (causing ambiguity in LoRA resolution), logs warnings during scanning, and provides a "Resolve Conflicts" button in the Doctor panel. Resolution renames duplicates with hash-prefixed unique filenames, migrates all sidecar and preview files, and updates the cache and frontend scroller in-place so the model modal immediately reflects the new filename. Co-Authored-By: Claude Opus 4.7 <noreply@anthropic.com>
This commit is contained in:
@@ -38,10 +38,12 @@ from ...services.websocket_manager import ws_manager
|
||||
from ...services.downloader import get_downloader
|
||||
from ...services.errors import ResourceNotFoundError
|
||||
from ...services.cache_health_monitor import CacheHealthMonitor, CacheHealthStatus
|
||||
from ...utils.models import BaseModelMetadata
|
||||
from ...utils.constants import (
|
||||
CIVITAI_USER_MODEL_TYPES,
|
||||
DEFAULT_NODE_COLOR,
|
||||
NODE_TYPES,
|
||||
PREVIEW_EXTENSIONS,
|
||||
SUPPORTED_MEDIA_EXTENSIONS,
|
||||
VALID_LORA_TYPES,
|
||||
)
|
||||
@@ -617,6 +619,7 @@ class DoctorHandler:
|
||||
diagnostics = [
|
||||
await self._check_civitai_api_key(),
|
||||
await self._check_cache_health(),
|
||||
await self._check_filename_conflicts(),
|
||||
self._check_ui_version(client_version, app_version),
|
||||
]
|
||||
|
||||
@@ -681,6 +684,145 @@ class DoctorHandler:
|
||||
status=status,
|
||||
)
|
||||
|
||||
async def resolve_filename_conflicts(self, request: web.Request) -> web.Response:
|
||||
renamed: list[dict[str, Any]] = []
|
||||
|
||||
try:
|
||||
for model_type, label, factory in self._scanner_factories:
|
||||
try:
|
||||
scanner = await factory()
|
||||
hash_index = getattr(scanner, "_hash_index", None)
|
||||
if hash_index is None:
|
||||
continue
|
||||
duplicates = {
|
||||
filename: list(paths)
|
||||
for filename, paths in hash_index.get_duplicate_filenames().items()
|
||||
}
|
||||
if not duplicates:
|
||||
continue
|
||||
|
||||
cache = await scanner.get_cached_data()
|
||||
path_to_model = {m["file_path"]: m for m in cache.raw_data}
|
||||
|
||||
used_basenames: set[str] = set()
|
||||
for paths in duplicates.values():
|
||||
if paths:
|
||||
used_basenames.add(
|
||||
os.path.splitext(os.path.basename(paths[0]))[0]
|
||||
)
|
||||
|
||||
for filename, paths in duplicates.items():
|
||||
for idx, path in enumerate(paths):
|
||||
if idx == 0:
|
||||
continue
|
||||
dirname = os.path.dirname(path)
|
||||
base_name = os.path.splitext(os.path.basename(path))[0]
|
||||
ext = os.path.splitext(path)[1]
|
||||
if not ext:
|
||||
continue
|
||||
|
||||
model_data = path_to_model.get(path)
|
||||
sha256 = (
|
||||
model_data.get("sha256", "") if model_data else ""
|
||||
)
|
||||
hash_provider = (
|
||||
lambda s=sha256: s if s else "0000"
|
||||
)
|
||||
|
||||
new_filename = (
|
||||
BaseModelMetadata.generate_unique_filename(
|
||||
dirname,
|
||||
base_name,
|
||||
ext,
|
||||
hash_provider=hash_provider,
|
||||
)
|
||||
)
|
||||
|
||||
candidate_base = os.path.splitext(new_filename)[0]
|
||||
counter = 1
|
||||
original_base = candidate_base
|
||||
while candidate_base in used_basenames:
|
||||
candidate_base = f"{original_base}-{counter}"
|
||||
new_filename = f"{candidate_base}{ext}"
|
||||
counter += 1
|
||||
used_basenames.add(candidate_base)
|
||||
|
||||
new_path = os.path.join(dirname, new_filename)
|
||||
|
||||
if new_filename == os.path.basename(path):
|
||||
continue
|
||||
|
||||
if not os.path.exists(path):
|
||||
continue
|
||||
|
||||
old_base_no_ext = os.path.splitext(path)[0]
|
||||
new_base_no_ext = (
|
||||
os.path.splitext(new_path)[0]
|
||||
)
|
||||
|
||||
os.rename(path, new_path)
|
||||
|
||||
for suffix in (".metadata.json", ".civitai.info"):
|
||||
old_sidecar = old_base_no_ext + suffix
|
||||
new_sidecar = new_base_no_ext + suffix
|
||||
if os.path.exists(old_sidecar):
|
||||
os.rename(old_sidecar, new_sidecar)
|
||||
|
||||
for preview_ext in PREVIEW_EXTENSIONS:
|
||||
old_preview = old_base_no_ext + preview_ext
|
||||
new_preview = new_base_no_ext + preview_ext
|
||||
if os.path.exists(old_preview):
|
||||
os.rename(old_preview, new_preview)
|
||||
|
||||
entry = path_to_model.get(path)
|
||||
if entry:
|
||||
entry = dict(entry)
|
||||
entry["file_name"] = os.path.splitext(new_filename)[0]
|
||||
if entry.get("preview_url"):
|
||||
old_preview_url = entry["preview_url"].replace("\\", "/")
|
||||
preview_ext = os.path.splitext(old_preview_url)[1]
|
||||
if preview_ext:
|
||||
entry["preview_url"] = (new_base_no_ext + preview_ext).replace(os.sep, "/")
|
||||
await scanner.update_single_model_cache(
|
||||
path, new_path, entry
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Resolved duplicate filename '%s': "
|
||||
"renamed '%s' to '%s'",
|
||||
filename,
|
||||
path,
|
||||
new_path,
|
||||
)
|
||||
renamed.append({
|
||||
"model_type": model_type,
|
||||
"label": label,
|
||||
"filename": filename,
|
||||
"old_path": path,
|
||||
"new_path": new_path,
|
||||
"new_filename": new_filename,
|
||||
})
|
||||
except Exception as exc: # pragma: no cover - defensive
|
||||
logger.error(
|
||||
"Failed to resolve filename conflicts for %s: %s",
|
||||
model_type,
|
||||
exc,
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
return web.json_response({
|
||||
"success": True,
|
||||
"renamed": renamed,
|
||||
"count": len(renamed),
|
||||
})
|
||||
except Exception as exc:
|
||||
logger.error(
|
||||
"Error resolving filename conflicts: %s", exc, exc_info=True
|
||||
)
|
||||
return web.json_response(
|
||||
{"success": False, "error": str(exc)}, status=500
|
||||
)
|
||||
|
||||
async def export_doctor_bundle(self, request: web.Request) -> web.Response:
|
||||
try:
|
||||
payload = await request.json()
|
||||
@@ -846,6 +988,79 @@ class DoctorHandler:
|
||||
"actions": [{"id": "repair-cache", "label": "Rebuild Cache"}],
|
||||
}
|
||||
|
||||
async def _check_filename_conflicts(self) -> dict[str, Any]:
|
||||
all_conflicts: list[dict[str, Any]] = []
|
||||
total_conflict_groups = 0
|
||||
total_conflict_files = 0
|
||||
|
||||
for model_type, label, factory in self._scanner_factories:
|
||||
try:
|
||||
scanner = await factory()
|
||||
hash_index = getattr(scanner, "_hash_index", None)
|
||||
if hash_index is None:
|
||||
continue
|
||||
duplicates = hash_index.get_duplicate_filenames()
|
||||
if not duplicates:
|
||||
continue
|
||||
|
||||
total_conflict_groups += len(duplicates)
|
||||
for filename, paths in duplicates.items():
|
||||
total_conflict_files += len(paths)
|
||||
all_conflicts.append({
|
||||
"model_type": model_type,
|
||||
"label": label,
|
||||
"filename": filename,
|
||||
"paths": paths,
|
||||
})
|
||||
except Exception as exc: # pragma: no cover - defensive
|
||||
logger.error(
|
||||
"Doctor filename conflict check failed for %s: %s",
|
||||
model_type,
|
||||
exc,
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
if not all_conflicts:
|
||||
return {
|
||||
"id": "filename_conflicts",
|
||||
"title": "Duplicate Filename Conflicts",
|
||||
"status": "ok",
|
||||
"summary": "No duplicate filenames found across model directories.",
|
||||
"details": [],
|
||||
"actions": [],
|
||||
}
|
||||
|
||||
summary = (
|
||||
f"{total_conflict_groups} filename(s) shared by "
|
||||
f"{total_conflict_files} files across your library. "
|
||||
f"This causes ambiguity when loading LoRAs by name."
|
||||
)
|
||||
details: list[str | dict[str, Any]] = [
|
||||
{
|
||||
"conflict_groups": total_conflict_groups,
|
||||
"total_conflict_files": total_conflict_files,
|
||||
}
|
||||
]
|
||||
for conflict in all_conflicts:
|
||||
details.append(
|
||||
f"[{conflict['label']}] '{conflict['filename']}' "
|
||||
f"found in {len(conflict['paths'])} locations"
|
||||
)
|
||||
|
||||
return {
|
||||
"id": "filename_conflicts",
|
||||
"title": "Duplicate Filename Conflicts",
|
||||
"status": "warning",
|
||||
"summary": summary,
|
||||
"details": details,
|
||||
"actions": [
|
||||
{
|
||||
"id": "resolve-filename-conflicts",
|
||||
"label": "Resolve Conflicts",
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
def _check_ui_version(self, client_version: str, app_version: str) -> dict[str, Any]:
|
||||
if client_version and client_version != app_version:
|
||||
return {
|
||||
@@ -2796,6 +3011,7 @@ class MiscHandlerSet:
|
||||
"update_settings": self.settings.update_settings,
|
||||
"get_doctor_diagnostics": self.doctor.get_doctor_diagnostics,
|
||||
"repair_doctor_cache": self.doctor.repair_doctor_cache,
|
||||
"resolve_doctor_filename_conflicts": self.doctor.resolve_filename_conflicts,
|
||||
"export_doctor_bundle": self.doctor.export_doctor_bundle,
|
||||
"get_priority_tags": self.settings.get_priority_tags,
|
||||
"get_settings_libraries": self.settings.get_libraries,
|
||||
|
||||
@@ -24,6 +24,7 @@ MISC_ROUTE_DEFINITIONS: tuple[RouteDefinition, ...] = (
|
||||
RouteDefinition("POST", "/api/lm/settings", "update_settings"),
|
||||
RouteDefinition("GET", "/api/lm/doctor/diagnostics", "get_doctor_diagnostics"),
|
||||
RouteDefinition("POST", "/api/lm/doctor/repair-cache", "repair_doctor_cache"),
|
||||
RouteDefinition("POST", "/api/lm/doctor/resolve-filename-conflicts", "resolve_doctor_filename_conflicts"),
|
||||
RouteDefinition("POST", "/api/lm/doctor/export-bundle", "export_doctor_bundle"),
|
||||
RouteDefinition("GET", "/api/lm/priority-tags", "get_priority_tags"),
|
||||
RouteDefinition("GET", "/api/lm/settings/libraries", "get_settings_libraries"),
|
||||
|
||||
@@ -79,6 +79,12 @@ class ModelHashIndex:
|
||||
hash_val = h
|
||||
break
|
||||
|
||||
if hash_val is None:
|
||||
for h, paths in self._duplicate_hashes.items():
|
||||
if file_path in paths:
|
||||
hash_val = h
|
||||
break
|
||||
|
||||
# If we didn't find a hash, nothing to do
|
||||
if not hash_val:
|
||||
return
|
||||
|
||||
@@ -1072,14 +1072,6 @@ class ModelScanner:
|
||||
excluded_models.append(model_data['file_path'])
|
||||
return None
|
||||
|
||||
# Check for duplicate filename before adding to hash index
|
||||
# filename = os.path.splitext(os.path.basename(file_path))[0]
|
||||
# existing_hash = hash_index.get_hash_by_filename(filename)
|
||||
# if existing_hash and existing_hash != model_data.get('sha256', '').lower():
|
||||
# existing_path = hash_index.get_path(existing_hash)
|
||||
# if existing_path and existing_path != file_path:
|
||||
# logger.warning(f"Duplicate filename detected: '{filename}' - files: '{existing_path}' and '{file_path}'")
|
||||
|
||||
return model_data
|
||||
|
||||
async def _apply_scan_result(self, scan_result: CacheBuildResult) -> None:
|
||||
@@ -1105,6 +1097,31 @@ class ModelScanner:
|
||||
|
||||
await self._cache.resort()
|
||||
|
||||
self._log_duplicate_filename_summary()
|
||||
|
||||
def _log_duplicate_filename_summary(self) -> None:
|
||||
"""Log a batched summary of duplicate filename conflicts once per scan."""
|
||||
if self._hash_index is None:
|
||||
return
|
||||
|
||||
duplicates = self._hash_index.get_duplicate_filenames()
|
||||
if not duplicates:
|
||||
return
|
||||
|
||||
total_files = sum(len(paths) for paths in duplicates.values())
|
||||
conflict_count = len(duplicates)
|
||||
model_type_label = self.model_type or "model"
|
||||
|
||||
logger.warning(
|
||||
"Duplicate filename conflict detected: %d %s filename(s) "
|
||||
"are shared by %d files total, causing ambiguity in %s resolution. "
|
||||
"Open the Doctor panel to resolve one-click.",
|
||||
conflict_count,
|
||||
model_type_label,
|
||||
total_files,
|
||||
model_type_label.capitalize(),
|
||||
)
|
||||
|
||||
async def _sync_download_history(
|
||||
self,
|
||||
raw_data: List[Mapping[str, Any]],
|
||||
|
||||
Reference in New Issue
Block a user