feat(doctor): add duplicate filename conflict detection and one-click resolution

Detects when multiple model files share the same basename (causing
ambiguity in LoRA resolution), logs warnings during scanning, and
provides a "Resolve Conflicts" button in the Doctor panel. Resolution
renames duplicates with hash-prefixed unique filenames, migrates all
sidecar and preview files, and updates the cache and frontend scroller
in-place so the model modal immediately reflects the new filename.

Co-Authored-By: Claude Opus 4.7 <noreply@anthropic.com>
This commit is contained in:
Will Miao
2026-04-30 15:21:26 +08:00
parent 1d035361a4
commit 5dcfde36ea
18 changed files with 601 additions and 19 deletions

View File

@@ -1906,7 +1906,9 @@
"repairSuccess": "Cache-Neuaufbau abgeschlossen.", "repairSuccess": "Cache-Neuaufbau abgeschlossen.",
"repairFailed": "Cache-Neuaufbau fehlgeschlagen: {message}", "repairFailed": "Cache-Neuaufbau fehlgeschlagen: {message}",
"exportSuccess": "Diagnosepaket exportiert.", "exportSuccess": "Diagnosepaket exportiert.",
"exportFailed": "Export des Diagnosepakets fehlgeschlagen: {message}" "exportFailed": "Export des Diagnosepakets fehlgeschlagen: {message}",
"conflictsResolved": "{count} Dateinamenskonflikt(e) gelöst.",
"conflictsResolveFailed": "Auflösung der Dateinamenskonflikte fehlgeschlagen: {message}"
} }
}, },
"banners": { "banners": {

View File

@@ -1906,7 +1906,9 @@
"repairSuccess": "Cache rebuild completed.", "repairSuccess": "Cache rebuild completed.",
"repairFailed": "Cache rebuild failed: {message}", "repairFailed": "Cache rebuild failed: {message}",
"exportSuccess": "Diagnostics bundle exported.", "exportSuccess": "Diagnostics bundle exported.",
"exportFailed": "Failed to export diagnostics bundle: {message}" "exportFailed": "Failed to export diagnostics bundle: {message}",
"conflictsResolved": "{count} filename conflict(s) resolved.",
"conflictsResolveFailed": "Failed to resolve filename conflicts: {message}"
} }
}, },
"banners": { "banners": {

View File

@@ -1906,7 +1906,9 @@
"repairSuccess": "Reconstrucción de caché completada.", "repairSuccess": "Reconstrucción de caché completada.",
"repairFailed": "Error al reconstruir la caché: {message}", "repairFailed": "Error al reconstruir la caché: {message}",
"exportSuccess": "Paquete de diagnósticos exportado.", "exportSuccess": "Paquete de diagnósticos exportado.",
"exportFailed": "Error al exportar el paquete de diagnósticos: {message}" "exportFailed": "Error al exportar el paquete de diagnósticos: {message}",
"conflictsResolved": "{count} conflicto(s) de nombre de archivo resuelto(s).",
"conflictsResolveFailed": "Error al resolver conflictos de nombre de archivo: {message}"
} }
}, },
"banners": { "banners": {

View File

@@ -1906,7 +1906,9 @@
"repairSuccess": "Reconstruction du cache terminée.", "repairSuccess": "Reconstruction du cache terminée.",
"repairFailed": "Échec de la reconstruction du cache : {message}", "repairFailed": "Échec de la reconstruction du cache : {message}",
"exportSuccess": "Lot de diagnostics exporté.", "exportSuccess": "Lot de diagnostics exporté.",
"exportFailed": "Échec de l'export du lot de diagnostics : {message}" "exportFailed": "Échec de l'export du lot de diagnostics : {message}",
"conflictsResolved": "{count} conflit(s) de nom de fichier résolu(s).",
"conflictsResolveFailed": "Échec de la résolution des conflits de nom de fichier : {message}"
} }
}, },
"banners": { "banners": {

View File

@@ -1906,7 +1906,9 @@
"repairSuccess": "בניית המטמון מחדש הושלמה.", "repairSuccess": "בניית המטמון מחדש הושלמה.",
"repairFailed": "בניית המטמון מחדש נכשלה: {message}", "repairFailed": "בניית המטמון מחדש נכשלה: {message}",
"exportSuccess": "חבילת האבחון יוצאה.", "exportSuccess": "חבילת האבחון יוצאה.",
"exportFailed": "ייצוא חבילת האבחון נכשל: {message}" "exportFailed": "ייצוא חבילת האבחון נכשל: {message}",
"conflictsResolved": "נפתרו {count} התנגשויות בשמות קבצים.",
"conflictsResolveFailed": "פתרון התנגשויות שמות קבצים נכשל: {message}"
} }
}, },
"banners": { "banners": {

View File

@@ -1906,7 +1906,9 @@
"repairSuccess": "キャッシュの再構築が完了しました。", "repairSuccess": "キャッシュの再構築が完了しました。",
"repairFailed": "キャッシュの再構築に失敗しました: {message}", "repairFailed": "キャッシュの再構築に失敗しました: {message}",
"exportSuccess": "診断パッケージをエクスポートしました。", "exportSuccess": "診断パッケージをエクスポートしました。",
"exportFailed": "診断パッケージのエクスポートに失敗しました: {message}" "exportFailed": "診断パッケージのエクスポートに失敗しました: {message}",
"conflictsResolved": "{count} 件のファイル名競合が解決されました。",
"conflictsResolveFailed": "ファイル名競合の解決に失敗しました: {message}"
} }
}, },
"banners": { "banners": {

View File

@@ -1906,7 +1906,9 @@
"repairSuccess": "캐시 재구성이 완료되었습니다.", "repairSuccess": "캐시 재구성이 완료되었습니다.",
"repairFailed": "캐시 재구성 실패: {message}", "repairFailed": "캐시 재구성 실패: {message}",
"exportSuccess": "진단 번들이 내보내졌습니다.", "exportSuccess": "진단 번들이 내보내졌습니다.",
"exportFailed": "진단 번들 내보내기 실패: {message}" "exportFailed": "진단 번들 내보내기 실패: {message}",
"conflictsResolved": "{count}개 파일명 충돌이 해결되었습니다.",
"conflictsResolveFailed": "파일명 충돌 해결 실패: {message}"
} }
}, },
"banners": { "banners": {

View File

@@ -1906,7 +1906,9 @@
"repairSuccess": "Перестройка кэша завершена.", "repairSuccess": "Перестройка кэша завершена.",
"repairFailed": "Не удалось перестроить кэш: {message}", "repairFailed": "Не удалось перестроить кэш: {message}",
"exportSuccess": "Диагностический пакет экспортирован.", "exportSuccess": "Диагностический пакет экспортирован.",
"exportFailed": "Не удалось экспортировать диагностический пакет: {message}" "exportFailed": "Не удалось экспортировать диагностический пакет: {message}",
"conflictsResolved": "Разрешено конфликтов имён файлов: {count}.",
"conflictsResolveFailed": "Не удалось разрешить конфликты имён файлов: {message}"
} }
}, },
"banners": { "banners": {

View File

@@ -1906,7 +1906,9 @@
"repairSuccess": "缓存重建完成。", "repairSuccess": "缓存重建完成。",
"repairFailed": "缓存重建失败:{message}", "repairFailed": "缓存重建失败:{message}",
"exportSuccess": "诊断包已导出。", "exportSuccess": "诊断包已导出。",
"exportFailed": "导出诊断包失败:{message}" "exportFailed": "导出诊断包失败:{message}",
"conflictsResolved": "已解决 {count} 个文件名冲突。",
"conflictsResolveFailed": "解决文件名冲突失败:{message}"
} }
}, },
"banners": { "banners": {

View File

@@ -1906,7 +1906,9 @@
"repairSuccess": "快取重建完成。", "repairSuccess": "快取重建完成。",
"repairFailed": "快取重建失敗:{message}", "repairFailed": "快取重建失敗:{message}",
"exportSuccess": "診斷套件已匯出。", "exportSuccess": "診斷套件已匯出。",
"exportFailed": "匯出診斷套件失敗:{message}" "exportFailed": "匯出診斷套件失敗:{message}",
"conflictsResolved": "已解決 {count} 個檔案名稱衝突。",
"conflictsResolveFailed": "解決檔案名稱衝突失敗:{message}"
} }
}, },
"banners": { "banners": {

View File

@@ -38,10 +38,12 @@ from ...services.websocket_manager import ws_manager
from ...services.downloader import get_downloader from ...services.downloader import get_downloader
from ...services.errors import ResourceNotFoundError from ...services.errors import ResourceNotFoundError
from ...services.cache_health_monitor import CacheHealthMonitor, CacheHealthStatus from ...services.cache_health_monitor import CacheHealthMonitor, CacheHealthStatus
from ...utils.models import BaseModelMetadata
from ...utils.constants import ( from ...utils.constants import (
CIVITAI_USER_MODEL_TYPES, CIVITAI_USER_MODEL_TYPES,
DEFAULT_NODE_COLOR, DEFAULT_NODE_COLOR,
NODE_TYPES, NODE_TYPES,
PREVIEW_EXTENSIONS,
SUPPORTED_MEDIA_EXTENSIONS, SUPPORTED_MEDIA_EXTENSIONS,
VALID_LORA_TYPES, VALID_LORA_TYPES,
) )
@@ -617,6 +619,7 @@ class DoctorHandler:
diagnostics = [ diagnostics = [
await self._check_civitai_api_key(), await self._check_civitai_api_key(),
await self._check_cache_health(), await self._check_cache_health(),
await self._check_filename_conflicts(),
self._check_ui_version(client_version, app_version), self._check_ui_version(client_version, app_version),
] ]
@@ -681,6 +684,145 @@ class DoctorHandler:
status=status, status=status,
) )
async def resolve_filename_conflicts(self, request: web.Request) -> web.Response:
renamed: list[dict[str, Any]] = []
try:
for model_type, label, factory in self._scanner_factories:
try:
scanner = await factory()
hash_index = getattr(scanner, "_hash_index", None)
if hash_index is None:
continue
duplicates = {
filename: list(paths)
for filename, paths in hash_index.get_duplicate_filenames().items()
}
if not duplicates:
continue
cache = await scanner.get_cached_data()
path_to_model = {m["file_path"]: m for m in cache.raw_data}
used_basenames: set[str] = set()
for paths in duplicates.values():
if paths:
used_basenames.add(
os.path.splitext(os.path.basename(paths[0]))[0]
)
for filename, paths in duplicates.items():
for idx, path in enumerate(paths):
if idx == 0:
continue
dirname = os.path.dirname(path)
base_name = os.path.splitext(os.path.basename(path))[0]
ext = os.path.splitext(path)[1]
if not ext:
continue
model_data = path_to_model.get(path)
sha256 = (
model_data.get("sha256", "") if model_data else ""
)
hash_provider = (
lambda s=sha256: s if s else "0000"
)
new_filename = (
BaseModelMetadata.generate_unique_filename(
dirname,
base_name,
ext,
hash_provider=hash_provider,
)
)
candidate_base = os.path.splitext(new_filename)[0]
counter = 1
original_base = candidate_base
while candidate_base in used_basenames:
candidate_base = f"{original_base}-{counter}"
new_filename = f"{candidate_base}{ext}"
counter += 1
used_basenames.add(candidate_base)
new_path = os.path.join(dirname, new_filename)
if new_filename == os.path.basename(path):
continue
if not os.path.exists(path):
continue
old_base_no_ext = os.path.splitext(path)[0]
new_base_no_ext = (
os.path.splitext(new_path)[0]
)
os.rename(path, new_path)
for suffix in (".metadata.json", ".civitai.info"):
old_sidecar = old_base_no_ext + suffix
new_sidecar = new_base_no_ext + suffix
if os.path.exists(old_sidecar):
os.rename(old_sidecar, new_sidecar)
for preview_ext in PREVIEW_EXTENSIONS:
old_preview = old_base_no_ext + preview_ext
new_preview = new_base_no_ext + preview_ext
if os.path.exists(old_preview):
os.rename(old_preview, new_preview)
entry = path_to_model.get(path)
if entry:
entry = dict(entry)
entry["file_name"] = os.path.splitext(new_filename)[0]
if entry.get("preview_url"):
old_preview_url = entry["preview_url"].replace("\\", "/")
preview_ext = os.path.splitext(old_preview_url)[1]
if preview_ext:
entry["preview_url"] = (new_base_no_ext + preview_ext).replace(os.sep, "/")
await scanner.update_single_model_cache(
path, new_path, entry
)
logger.info(
"Resolved duplicate filename '%s': "
"renamed '%s' to '%s'",
filename,
path,
new_path,
)
renamed.append({
"model_type": model_type,
"label": label,
"filename": filename,
"old_path": path,
"new_path": new_path,
"new_filename": new_filename,
})
except Exception as exc: # pragma: no cover - defensive
logger.error(
"Failed to resolve filename conflicts for %s: %s",
model_type,
exc,
exc_info=True,
)
return web.json_response({
"success": True,
"renamed": renamed,
"count": len(renamed),
})
except Exception as exc:
logger.error(
"Error resolving filename conflicts: %s", exc, exc_info=True
)
return web.json_response(
{"success": False, "error": str(exc)}, status=500
)
async def export_doctor_bundle(self, request: web.Request) -> web.Response: async def export_doctor_bundle(self, request: web.Request) -> web.Response:
try: try:
payload = await request.json() payload = await request.json()
@@ -846,6 +988,79 @@ class DoctorHandler:
"actions": [{"id": "repair-cache", "label": "Rebuild Cache"}], "actions": [{"id": "repair-cache", "label": "Rebuild Cache"}],
} }
async def _check_filename_conflicts(self) -> dict[str, Any]:
all_conflicts: list[dict[str, Any]] = []
total_conflict_groups = 0
total_conflict_files = 0
for model_type, label, factory in self._scanner_factories:
try:
scanner = await factory()
hash_index = getattr(scanner, "_hash_index", None)
if hash_index is None:
continue
duplicates = hash_index.get_duplicate_filenames()
if not duplicates:
continue
total_conflict_groups += len(duplicates)
for filename, paths in duplicates.items():
total_conflict_files += len(paths)
all_conflicts.append({
"model_type": model_type,
"label": label,
"filename": filename,
"paths": paths,
})
except Exception as exc: # pragma: no cover - defensive
logger.error(
"Doctor filename conflict check failed for %s: %s",
model_type,
exc,
exc_info=True,
)
if not all_conflicts:
return {
"id": "filename_conflicts",
"title": "Duplicate Filename Conflicts",
"status": "ok",
"summary": "No duplicate filenames found across model directories.",
"details": [],
"actions": [],
}
summary = (
f"{total_conflict_groups} filename(s) shared by "
f"{total_conflict_files} files across your library. "
f"This causes ambiguity when loading LoRAs by name."
)
details: list[str | dict[str, Any]] = [
{
"conflict_groups": total_conflict_groups,
"total_conflict_files": total_conflict_files,
}
]
for conflict in all_conflicts:
details.append(
f"[{conflict['label']}] '{conflict['filename']}' "
f"found in {len(conflict['paths'])} locations"
)
return {
"id": "filename_conflicts",
"title": "Duplicate Filename Conflicts",
"status": "warning",
"summary": summary,
"details": details,
"actions": [
{
"id": "resolve-filename-conflicts",
"label": "Resolve Conflicts",
}
],
}
def _check_ui_version(self, client_version: str, app_version: str) -> dict[str, Any]: def _check_ui_version(self, client_version: str, app_version: str) -> dict[str, Any]:
if client_version and client_version != app_version: if client_version and client_version != app_version:
return { return {
@@ -2796,6 +3011,7 @@ class MiscHandlerSet:
"update_settings": self.settings.update_settings, "update_settings": self.settings.update_settings,
"get_doctor_diagnostics": self.doctor.get_doctor_diagnostics, "get_doctor_diagnostics": self.doctor.get_doctor_diagnostics,
"repair_doctor_cache": self.doctor.repair_doctor_cache, "repair_doctor_cache": self.doctor.repair_doctor_cache,
"resolve_doctor_filename_conflicts": self.doctor.resolve_filename_conflicts,
"export_doctor_bundle": self.doctor.export_doctor_bundle, "export_doctor_bundle": self.doctor.export_doctor_bundle,
"get_priority_tags": self.settings.get_priority_tags, "get_priority_tags": self.settings.get_priority_tags,
"get_settings_libraries": self.settings.get_libraries, "get_settings_libraries": self.settings.get_libraries,

View File

@@ -24,6 +24,7 @@ MISC_ROUTE_DEFINITIONS: tuple[RouteDefinition, ...] = (
RouteDefinition("POST", "/api/lm/settings", "update_settings"), RouteDefinition("POST", "/api/lm/settings", "update_settings"),
RouteDefinition("GET", "/api/lm/doctor/diagnostics", "get_doctor_diagnostics"), RouteDefinition("GET", "/api/lm/doctor/diagnostics", "get_doctor_diagnostics"),
RouteDefinition("POST", "/api/lm/doctor/repair-cache", "repair_doctor_cache"), RouteDefinition("POST", "/api/lm/doctor/repair-cache", "repair_doctor_cache"),
RouteDefinition("POST", "/api/lm/doctor/resolve-filename-conflicts", "resolve_doctor_filename_conflicts"),
RouteDefinition("POST", "/api/lm/doctor/export-bundle", "export_doctor_bundle"), RouteDefinition("POST", "/api/lm/doctor/export-bundle", "export_doctor_bundle"),
RouteDefinition("GET", "/api/lm/priority-tags", "get_priority_tags"), RouteDefinition("GET", "/api/lm/priority-tags", "get_priority_tags"),
RouteDefinition("GET", "/api/lm/settings/libraries", "get_settings_libraries"), RouteDefinition("GET", "/api/lm/settings/libraries", "get_settings_libraries"),

View File

@@ -79,6 +79,12 @@ class ModelHashIndex:
hash_val = h hash_val = h
break break
if hash_val is None:
for h, paths in self._duplicate_hashes.items():
if file_path in paths:
hash_val = h
break
# If we didn't find a hash, nothing to do # If we didn't find a hash, nothing to do
if not hash_val: if not hash_val:
return return

View File

@@ -1072,14 +1072,6 @@ class ModelScanner:
excluded_models.append(model_data['file_path']) excluded_models.append(model_data['file_path'])
return None return None
# Check for duplicate filename before adding to hash index
# filename = os.path.splitext(os.path.basename(file_path))[0]
# existing_hash = hash_index.get_hash_by_filename(filename)
# if existing_hash and existing_hash != model_data.get('sha256', '').lower():
# existing_path = hash_index.get_path(existing_hash)
# if existing_path and existing_path != file_path:
# logger.warning(f"Duplicate filename detected: '{filename}' - files: '{existing_path}' and '{file_path}'")
return model_data return model_data
async def _apply_scan_result(self, scan_result: CacheBuildResult) -> None: async def _apply_scan_result(self, scan_result: CacheBuildResult) -> None:
@@ -1105,6 +1097,31 @@ class ModelScanner:
await self._cache.resort() await self._cache.resort()
self._log_duplicate_filename_summary()
def _log_duplicate_filename_summary(self) -> None:
"""Log a batched summary of duplicate filename conflicts once per scan."""
if self._hash_index is None:
return
duplicates = self._hash_index.get_duplicate_filenames()
if not duplicates:
return
total_files = sum(len(paths) for paths in duplicates.values())
conflict_count = len(duplicates)
model_type_label = self.model_type or "model"
logger.warning(
"Duplicate filename conflict detected: %d %s filename(s) "
"are shared by %d files total, causing ambiguity in %s resolution. "
"Open the Doctor panel to resolve one-click.",
conflict_count,
model_type_label,
total_files,
model_type_label.capitalize(),
)
async def _sync_download_history( async def _sync_download_history(
self, self,
raw_data: List[Mapping[str, Any]], raw_data: List[Mapping[str, Any]],

View File

@@ -2,6 +2,7 @@ import { modalManager } from './ModalManager.js';
import { showToast } from '../utils/uiHelpers.js'; import { showToast } from '../utils/uiHelpers.js';
import { translate } from '../utils/i18nHelpers.js'; import { translate } from '../utils/i18nHelpers.js';
import { escapeHtml } from '../components/shared/utils.js'; import { escapeHtml } from '../components/shared/utils.js';
import { state } from '../state/index.js';
const MAX_CONSOLE_ENTRIES = 200; const MAX_CONSOLE_ENTRIES = 200;
@@ -258,6 +259,15 @@ export class DoctorManager {
} }
renderInlineDetail(detail) { renderInlineDetail(detail) {
if (detail.conflict_groups || detail.total_conflict_files) {
return `
<div class="doctor-inline-detail">
<strong>${escapeHtml(translate('doctor.status.warning', {}, 'Conflicts'))}</strong>
<div>${escapeHtml(`${detail.conflict_groups || 0} filenames, ${detail.total_conflict_files || 0} files`)}</div>
</div>
`;
}
if (detail.client_version || detail.server_version) { if (detail.client_version || detail.server_version) {
return ` return `
<div class="doctor-inline-detail"> <div class="doctor-inline-detail">
@@ -317,6 +327,9 @@ export class DoctorManager {
case 'repair-cache': case 'repair-cache':
await this.repairCache(); await this.repairCache();
break; break;
case 'resolve-filename-conflicts':
await this.resolveFilenameConflicts();
break;
case 'reload-page': case 'reload-page':
this.reloadUi(); this.reloadUi();
break; break;
@@ -345,6 +358,47 @@ export class DoctorManager {
} }
} }
async resolveFilenameConflicts() {
try {
this.setLoading(true);
const response = await fetch('/api/lm/doctor/resolve-filename-conflicts', { method: 'POST' });
const payload = await response.json();
if (!response.ok || payload.success === false) {
throw new Error(payload.error || 'Failed to resolve filename conflicts.');
}
const renamedCount = payload.count || 0;
showToast(
'doctor.toast.conflictsResolved',
{ count: renamedCount },
'success'
);
// Update scroller items so model cards reflect new filenames immediately
if (state.virtualScroller && payload.renamed) {
for (const renamed of payload.renamed) {
const baseName = renamed.new_filename.replace(/\.[^.]+$/, '');
state.virtualScroller.updateSingleItem(renamed.old_path, {
file_name: baseName,
file_path: renamed.new_path,
});
}
}
await this.refreshDiagnostics({ silent: true });
} catch (error) {
console.error('Doctor filename conflict resolution failed:', error);
showToast(
'doctor.toast.conflictsResolveFailed',
{ message: error.message },
'error'
);
} finally {
this.setLoading(false);
}
}
async exportBundle() { async exportBundle() {
try { try {
this.setLoading(true); this.setLoading(true);

View File

@@ -10,6 +10,7 @@ from unittest.mock import patch, MagicMock
import pytest import pytest
from aiohttp import web from aiohttp import web
from py.services.model_hash_index import ModelHashIndex
from py.routes.handlers.misc_handlers import ( from py.routes.handlers.misc_handlers import (
BackupHandler, BackupHandler,
DoctorHandler, DoctorHandler,
@@ -78,10 +79,11 @@ async def dummy_downloader_factory():
class DummyDoctorScanner: class DummyDoctorScanner:
def __init__(self, *, model_type='lora', raw_data=None, rebuild_error=None): def __init__(self, *, model_type='lora', raw_data=None, rebuild_error=None, hash_index=None):
self.model_type = model_type self.model_type = model_type
self._raw_data = list(raw_data or []) self._raw_data = list(raw_data or [])
self._rebuild_error = rebuild_error self._rebuild_error = rebuild_error
self._hash_index = hash_index
self._persistent_cache = SimpleNamespace( self._persistent_cache = SimpleNamespace(
load_cache=lambda _model_type: SimpleNamespace(raw_data=list(self._raw_data)) load_cache=lambda _model_type: SimpleNamespace(raw_data=list(self._raw_data))
) )
@@ -91,6 +93,16 @@ class DummyDoctorScanner:
raise self._rebuild_error raise self._rebuild_error
return SimpleNamespace(raw_data=list(self._raw_data)) return SimpleNamespace(raw_data=list(self._raw_data))
async def update_single_model_cache(self, original_path, new_path, metadata):
for item in self._raw_data:
if item.get("file_path") == original_path:
item["file_path"] = new_path
item["file_name"] = metadata.get("file_name", item.get("file_name", ""))
if metadata.get("preview_url"):
item["preview_url"] = metadata["preview_url"]
break
return True
class DummyCivitaiClient: class DummyCivitaiClient:
def __init__(self, *, success=True, result=None): def __init__(self, *, success=True, result=None):
@@ -1582,3 +1594,107 @@ def test_wsl_to_windows_path_returns_none_on_subprocess_error(tmp_path):
): ):
result = _wsl_to_windows_path("/mnt/c/test") result = _wsl_to_windows_path("/mnt/c/test")
assert result is None assert result is None
# ── DoctorHandler filename conflict tests ──────────────────────────────
@pytest.mark.asyncio
async def test_check_filename_conflicts_returns_ok_when_no_duplicates():
hash_index = ModelHashIndex()
async def scanner_factory():
return DummyDoctorScanner(
model_type="lora", raw_data=[], hash_index=hash_index
)
handler = DoctorHandler(
settings_service=DummySettings({"civitai_api_key": "token"}),
scanner_factories=(("lora", "LoRAs", scanner_factory),),
)
response = await handler.get_doctor_diagnostics(FakeRequest(method="GET"))
payload = json.loads(response.text)
diagnostic_map = {item["id"]: item for item in payload["diagnostics"]}
assert diagnostic_map["filename_conflicts"]["status"] == "ok"
assert "No duplicate filenames" in diagnostic_map["filename_conflicts"]["summary"]
@pytest.mark.asyncio
async def test_check_filename_conflicts_detects_duplicates():
hash_index = ModelHashIndex()
hash_index.add_entry("abc123", "/a/lora.safetensors")
hash_index.add_entry("def456", "/b/lora.safetensors")
async def scanner_factory():
return DummyDoctorScanner(
model_type="lora",
raw_data=[
{"file_path": "/a/lora.safetensors"},
{"file_path": "/b/lora.safetensors"},
],
hash_index=hash_index,
)
handler = DoctorHandler(
settings_service=DummySettings({"civitai_api_key": "token"}),
scanner_factories=(("lora", "LoRAs", scanner_factory),),
)
response = await handler.get_doctor_diagnostics(FakeRequest(method="GET"))
payload = json.loads(response.text)
diagnostic_map = {item["id"]: item for item in payload["diagnostics"]}
conflict_diag = diagnostic_map["filename_conflicts"]
assert conflict_diag["status"] == "warning"
assert "1 filename(s)" in conflict_diag["summary"]
assert any("resolve-filename-conflicts" in str(a) for a in conflict_diag.get("actions", []))
@pytest.mark.asyncio
async def test_resolve_filename_conflicts_returns_renamed_list():
hash_index = ModelHashIndex()
hash_index.add_entry("abc123", "lora.safetensors")
hash_index.add_entry("def456", "lora.safetensors")
async def scanner_factory():
return DummyDoctorScanner(
model_type="lora",
raw_data=[],
hash_index=hash_index,
)
handler = DoctorHandler(
settings_service=DummySettings({"civitai_api_key": "token"}),
scanner_factories=(("lora", "LoRAs", scanner_factory),),
)
response = await handler.resolve_filename_conflicts(FakeRequest(method="POST"))
payload = json.loads(response.text)
assert payload["success"] is True
# Files don't exist on disk, so nothing gets renamed
assert payload["count"] == 0
@pytest.mark.asyncio
async def test_resolve_filename_conflicts_handles_scanner_error_gracefully():
class ErrorScanner:
model_type = "lora"
async def get_cached_data(self):
raise RuntimeError("scanner unavailable")
async def scanner_factory():
return ErrorScanner()
handler = DoctorHandler(
settings_service=DummySettings({"civitai_api_key": "token"}),
scanner_factories=(("lora", "LoRAs", scanner_factory),),
)
response = await handler.resolve_filename_conflicts(FakeRequest(method="POST"))
payload = json.loads(response.text)
assert payload["success"] is True
assert payload["count"] == 0

View File

@@ -0,0 +1,113 @@
import pytest
from py.services.model_hash_index import ModelHashIndex
class TestModelHashIndexRemoveByPath:
def test_remove_by_path_finds_hash_in_hash_to_path(self):
index = ModelHashIndex()
index.add_entry("abc123", "/models/lora.safetensors")
index.remove_by_path("/models/lora.safetensors")
assert len(index) == 0
assert not index.get_duplicate_filenames()
def test_remove_by_path_falls_back_to_duplicate_hashes(self):
"""When a path is only tracked in _duplicate_hashes, remove_by_path
should still find and remove it."""
index = ModelHashIndex()
index.add_entry("abc123", "/models/lora_v1.safetensors")
index.add_entry("abc123", "/models/lora_v2.safetensors")
# lora_v1 is the primary (_hash_to_path), lora_v2 is in _duplicate_hashes
index.remove_by_path("/models/lora_v2.safetensors")
assert len(index) == 1
assert index._hash_to_path.get("abc123") == "/models/lora_v1.safetensors"
assert "abc123" not in index._duplicate_hashes
def test_remove_by_path_cleans_up_duplicate_filenames(self):
"""After removing a path, _duplicate_filenames should be updated."""
index = ModelHashIndex()
index.add_entry("abc123", "/models/mylora.safetensors")
index.add_entry("def456", "/other/mylora.safetensors")
assert "mylora" in index.get_duplicate_filenames()
assert len(index.get_duplicate_filenames()["mylora"]) == 2
index.remove_by_path("/other/mylora.safetensors")
# After removing one duplicate, only one path remains — no longer a duplicate
assert "mylora" not in index.get_duplicate_filenames()
def test_remove_by_path_keeps_duplicate_filenames_with_three_entries(self):
"""With 3 entries for the same filename, removing one should leave 2."""
index = ModelHashIndex()
index.add_entry("abc123", "/models/mylora.safetensors")
index.add_entry("def456", "/other/mylora.safetensors")
index.add_entry("ghi789", "/third/mylora.safetensors")
index.remove_by_path("/other/mylora.safetensors")
assert "mylora" in index.get_duplicate_filenames()
paths = index.get_duplicate_filenames()["mylora"]
assert len(paths) == 2
assert "/other/mylora.safetensors" not in paths
def test_remove_by_path_noop_on_unknown_path(self):
index = ModelHashIndex()
index.add_entry("abc123", "/models/lora.safetensors")
# Should not raise
index.remove_by_path("/nonexistent/lora.safetensors")
assert len(index) == 1
def test_remove_by_path_handles_hash_from_duplicate_hashes_only(self):
"""Remove a path whose hash exists ONLY in _duplicate_hashes,
not in _hash_to_path (edge case from index rebuilds)."""
index = ModelHashIndex()
index.add_entry("abc123", "/a/model.safetensors")
index.add_entry("abc123", "/b/model.safetensors")
# Manually remove the primary entry to simulate edge case
del index._hash_to_path["abc123"]
# Now the path is only referenced in _duplicate_hashes
assert "abc123" in index._duplicate_hashes
index.remove_by_path("/b/model.safetensors")
# The remaining path is promoted to _hash_to_path, duplicates cleared
assert "abc123" not in index._duplicate_hashes
assert index._hash_to_path.get("abc123") == "/a/model.safetensors"
class TestModelHashIndexGetDuplicateFilenames:
def test_empty_index_returns_empty_dict(self):
index = ModelHashIndex()
assert index.get_duplicate_filenames() == {}
def test_no_duplicates_returns_empty_dict(self):
index = ModelHashIndex()
index.add_entry("abc123", "/models/lora.safetensors")
index.add_entry("def456", "/models/other.safetensors")
assert index.get_duplicate_filenames() == {}
def test_duplicate_filenames_detected(self):
index = ModelHashIndex()
index.add_entry("abc123", "/a/mylora.safetensors")
index.add_entry("def456", "/b/mylora.safetensors")
dupes = index.get_duplicate_filenames()
assert "mylora" in dupes
assert len(dupes["mylora"]) == 2
def test_same_hash_same_name_not_a_filename_duplicate(self):
"""Same hash with same filename = hash duplicate, not filename conflict."""
index = ModelHashIndex()
index.add_entry("abc123", "/a/lora.safetensors")
# Same hash, same filename — this is a true duplicate (hash collision)
# but the filename index only tracks different files with same name
# Currently add_entry for same hash+path would update, not create duplicate
# This is correct behavior — filename dupes are for different files
def test_add_entry_idempotent_for_same_path_and_hash(self):
index = ModelHashIndex()
index.add_entry("abc123", "/a/lora.safetensors")
index.add_entry("abc123", "/a/lora.safetensors")
assert len(index) == 1
assert index.get_duplicate_filenames() == {}

View File

@@ -624,3 +624,42 @@ async def test_reconcile_cache_removes_duplicate_alias_when_same_real_file_seen_
cache = await scanner.get_cached_data() cache = await scanner.get_cached_data()
cached_paths = {item["file_path"] for item in cache.raw_data} cached_paths = {item["file_path"] for item in cache.raw_data}
assert cached_paths == {_normalize_path(loras_root / "link" / "one.txt")} assert cached_paths == {_normalize_path(loras_root / "link" / "one.txt")}
@pytest.mark.asyncio
async def test_log_duplicate_filename_summary_logs_warning(tmp_path: Path, caplog):
"""When duplicate filenames exist, _log_duplicate_filename_summary should emit
a single warning log with the conflict count and total file count."""
import logging
caplog.set_level(logging.WARNING)
root = tmp_path / "loras"
root.mkdir()
scanner = DummyScanner(root)
# Simulate duplicate filenames in the hash index
scanner._hash_index.add_entry("aaa111", str(root / "model.safetensors"))
scanner._hash_index.add_entry("bbb222", str(root / "dir" / "model.safetensors"))
scanner._log_duplicate_filename_summary()
assert len(caplog.records) >= 1
log_msg = caplog.records[-1].message
assert "Duplicate filename conflict detected" in log_msg
assert "1 dummy filename(s)" in log_msg
assert "2 files total" in log_msg
@pytest.mark.asyncio
async def test_log_duplicate_filename_summary_silent_when_no_duplicates(tmp_path: Path, caplog):
import logging
caplog.set_level(logging.WARNING)
root = tmp_path / "loras"
root.mkdir()
scanner = DummyScanner(root)
scanner._log_duplicate_filename_summary()
# No warning should be logged when there are no duplicates
for record in caplog.records:
assert "Duplicate filename conflict detected" not in record.message