mirror of
https://github.com/willmiao/ComfyUI-Lora-Manager.git
synced 2026-03-25 15:15:44 -03:00
feat(cache): add cache health monitoring and validation system, see #730
- Add cache entry validator service for data integrity checks - Add cache health monitor service for periodic health checks - Enhance model cache and scanner with validation support - Update websocket manager for health status broadcasting - Add initialization banner service for cache health alerts - Add comprehensive test coverage for new services - Update translations across all locales - Refactor sync translation keys script
This commit is contained in:
@@ -9,9 +9,9 @@
|
|||||||
"back": "Zurück",
|
"back": "Zurück",
|
||||||
"next": "Weiter",
|
"next": "Weiter",
|
||||||
"backToTop": "Nach oben",
|
"backToTop": "Nach oben",
|
||||||
"add": "Hinzufügen",
|
|
||||||
"settings": "Einstellungen",
|
"settings": "Einstellungen",
|
||||||
"help": "Hilfe"
|
"help": "Hilfe",
|
||||||
|
"add": "Hinzufügen"
|
||||||
},
|
},
|
||||||
"status": {
|
"status": {
|
||||||
"loading": "Wird geladen...",
|
"loading": "Wird geladen...",
|
||||||
@@ -1572,6 +1572,20 @@
|
|||||||
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
||||||
"supportCta": "Support on Ko-fi",
|
"supportCta": "Support on Ko-fi",
|
||||||
"learnMore": "LM Civitai Extension Tutorial"
|
"learnMore": "LM Civitai Extension Tutorial"
|
||||||
|
},
|
||||||
|
"cacheHealth": {
|
||||||
|
"corrupted": {
|
||||||
|
"title": "Cache-Korruption erkannt"
|
||||||
|
},
|
||||||
|
"degraded": {
|
||||||
|
"title": "Cache-Probleme erkannt"
|
||||||
|
},
|
||||||
|
"content": "{invalid} von {total} Cache-Einträgen sind ungültig ({rate}). Dies kann zu fehlenden Modellen oder Fehlern führen. Ein Neuaufbau des Caches wird empfohlen.",
|
||||||
|
"rebuildCache": "Cache neu aufbauen",
|
||||||
|
"dismiss": "Verwerfen",
|
||||||
|
"rebuilding": "Cache wird neu aufgebaut...",
|
||||||
|
"rebuildFailed": "Fehler beim Neuaufbau des Caches: {error}",
|
||||||
|
"retry": "Wiederholen"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1572,6 +1572,20 @@
|
|||||||
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
||||||
"supportCta": "Support on Ko-fi",
|
"supportCta": "Support on Ko-fi",
|
||||||
"learnMore": "LM Civitai Extension Tutorial"
|
"learnMore": "LM Civitai Extension Tutorial"
|
||||||
|
},
|
||||||
|
"cacheHealth": {
|
||||||
|
"corrupted": {
|
||||||
|
"title": "Cache Corruption Detected"
|
||||||
|
},
|
||||||
|
"degraded": {
|
||||||
|
"title": "Cache Issues Detected"
|
||||||
|
},
|
||||||
|
"content": "{invalid} of {total} cache entries are invalid ({rate}). This may cause missing models or errors. Rebuilding the cache is recommended.",
|
||||||
|
"rebuildCache": "Rebuild Cache",
|
||||||
|
"dismiss": "Dismiss",
|
||||||
|
"rebuilding": "Rebuilding cache...",
|
||||||
|
"rebuildFailed": "Failed to rebuild cache: {error}",
|
||||||
|
"retry": "Retry"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1572,6 +1572,20 @@
|
|||||||
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
||||||
"supportCta": "Support on Ko-fi",
|
"supportCta": "Support on Ko-fi",
|
||||||
"learnMore": "LM Civitai Extension Tutorial"
|
"learnMore": "LM Civitai Extension Tutorial"
|
||||||
|
},
|
||||||
|
"cacheHealth": {
|
||||||
|
"corrupted": {
|
||||||
|
"title": "Corrupción de caché detectada"
|
||||||
|
},
|
||||||
|
"degraded": {
|
||||||
|
"title": "Problemas de caché detectados"
|
||||||
|
},
|
||||||
|
"content": "{invalid} de {total} entradas de caché son inválidas ({rate}). Esto puede causar modelos faltantes o errores. Se recomienda reconstruir la caché.",
|
||||||
|
"rebuildCache": "Reconstruir caché",
|
||||||
|
"dismiss": "Descartar",
|
||||||
|
"rebuilding": "Reconstruyendo caché...",
|
||||||
|
"rebuildFailed": "Error al reconstruir la caché: {error}",
|
||||||
|
"retry": "Reintentar"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1572,6 +1572,20 @@
|
|||||||
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
||||||
"supportCta": "Support on Ko-fi",
|
"supportCta": "Support on Ko-fi",
|
||||||
"learnMore": "LM Civitai Extension Tutorial"
|
"learnMore": "LM Civitai Extension Tutorial"
|
||||||
|
},
|
||||||
|
"cacheHealth": {
|
||||||
|
"corrupted": {
|
||||||
|
"title": "Corruption du cache détectée"
|
||||||
|
},
|
||||||
|
"degraded": {
|
||||||
|
"title": "Problèmes de cache détectés"
|
||||||
|
},
|
||||||
|
"content": "{invalid} des {total} entrées de cache sont invalides ({rate}). Cela peut provoquer des modèles manquants ou des erreurs. Il est recommandé de reconstruire le cache.",
|
||||||
|
"rebuildCache": "Reconstruire le cache",
|
||||||
|
"dismiss": "Ignorer",
|
||||||
|
"rebuilding": "Reconstruction du cache...",
|
||||||
|
"rebuildFailed": "Échec de la reconstruction du cache : {error}",
|
||||||
|
"retry": "Réessayer"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,9 +9,9 @@
|
|||||||
"back": "חזור",
|
"back": "חזור",
|
||||||
"next": "הבא",
|
"next": "הבא",
|
||||||
"backToTop": "חזור למעלה",
|
"backToTop": "חזור למעלה",
|
||||||
"add": "הוסף",
|
|
||||||
"settings": "הגדרות",
|
"settings": "הגדרות",
|
||||||
"help": "עזרה"
|
"help": "עזרה",
|
||||||
|
"add": "הוסף"
|
||||||
},
|
},
|
||||||
"status": {
|
"status": {
|
||||||
"loading": "טוען...",
|
"loading": "טוען...",
|
||||||
@@ -1572,6 +1572,20 @@
|
|||||||
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
||||||
"supportCta": "Support on Ko-fi",
|
"supportCta": "Support on Ko-fi",
|
||||||
"learnMore": "LM Civitai Extension Tutorial"
|
"learnMore": "LM Civitai Extension Tutorial"
|
||||||
|
},
|
||||||
|
"cacheHealth": {
|
||||||
|
"corrupted": {
|
||||||
|
"title": "זוהתה שחיתות במטמון"
|
||||||
|
},
|
||||||
|
"degraded": {
|
||||||
|
"title": "זוהו בעיות במטמון"
|
||||||
|
},
|
||||||
|
"content": "{invalid} מתוך {total} רשומות מטמון אינן תקינות ({rate}). זה עלול לגרום לדגמים חסרים או לשגיאות. מומלץ לבנות מחדש את המטמון.",
|
||||||
|
"rebuildCache": "בניית מטמון מחדש",
|
||||||
|
"dismiss": "ביטול",
|
||||||
|
"rebuilding": "בונה מחדש את המטמון...",
|
||||||
|
"rebuildFailed": "נכשלה בניית המטמון מחדש: {error}",
|
||||||
|
"retry": "נסה שוב"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1572,6 +1572,20 @@
|
|||||||
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
||||||
"supportCta": "Support on Ko-fi",
|
"supportCta": "Support on Ko-fi",
|
||||||
"learnMore": "LM Civitai Extension Tutorial"
|
"learnMore": "LM Civitai Extension Tutorial"
|
||||||
|
},
|
||||||
|
"cacheHealth": {
|
||||||
|
"corrupted": {
|
||||||
|
"title": "キャッシュの破損が検出されました"
|
||||||
|
},
|
||||||
|
"degraded": {
|
||||||
|
"title": "キャッシュの問題が検出されました"
|
||||||
|
},
|
||||||
|
"content": "{total}個のキャッシュエントリのうち{invalid}個が無効です({rate})。モデルが見つからない原因になったり、エラーが発生する可能性があります。キャッシュの再構築を推奨します。",
|
||||||
|
"rebuildCache": "キャッシュを再構築",
|
||||||
|
"dismiss": "閉じる",
|
||||||
|
"rebuilding": "キャッシュを再構築中...",
|
||||||
|
"rebuildFailed": "キャッシュの再構築に失敗しました: {error}",
|
||||||
|
"retry": "再試行"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1572,6 +1572,20 @@
|
|||||||
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
||||||
"supportCta": "Support on Ko-fi",
|
"supportCta": "Support on Ko-fi",
|
||||||
"learnMore": "LM Civitai Extension Tutorial"
|
"learnMore": "LM Civitai Extension Tutorial"
|
||||||
|
},
|
||||||
|
"cacheHealth": {
|
||||||
|
"corrupted": {
|
||||||
|
"title": "캐시 손상이 감지되었습니다"
|
||||||
|
},
|
||||||
|
"degraded": {
|
||||||
|
"title": "캐시 문제가 감지되었습니다"
|
||||||
|
},
|
||||||
|
"content": "{total}개의 캐시 항목 중 {invalid}개가 유효하지 않습니다 ({rate}). 모델 누락이나 오류가 발생할 수 있습니다. 캐시를 재구축하는 것이 좋습니다.",
|
||||||
|
"rebuildCache": "캐시 재구축",
|
||||||
|
"dismiss": "무시",
|
||||||
|
"rebuilding": "캐시 재구축 중...",
|
||||||
|
"rebuildFailed": "캐시 재구축 실패: {error}",
|
||||||
|
"retry": "다시 시도"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1572,6 +1572,20 @@
|
|||||||
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
||||||
"supportCta": "Support on Ko-fi",
|
"supportCta": "Support on Ko-fi",
|
||||||
"learnMore": "LM Civitai Extension Tutorial"
|
"learnMore": "LM Civitai Extension Tutorial"
|
||||||
|
},
|
||||||
|
"cacheHealth": {
|
||||||
|
"corrupted": {
|
||||||
|
"title": "Обнаружено повреждение кэша"
|
||||||
|
},
|
||||||
|
"degraded": {
|
||||||
|
"title": "Обнаружены проблемы с кэшем"
|
||||||
|
},
|
||||||
|
"content": "{invalid} из {total} записей кэша недействительны ({rate}). Это может привести к отсутствию моделей или ошибкам. Рекомендуется перестроить кэш.",
|
||||||
|
"rebuildCache": "Перестроить кэш",
|
||||||
|
"dismiss": "Отклонить",
|
||||||
|
"rebuilding": "Перестроение кэша...",
|
||||||
|
"rebuildFailed": "Не удалось перестроить кэш: {error}",
|
||||||
|
"retry": "Повторить"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1572,6 +1572,20 @@
|
|||||||
"content": "来爱发电为Lora Manager项目发电,支持项目持续开发的同时,获取浏览器插件验证码,按季支付更优惠!支付宝/微信方便支付。感谢支持!🚀",
|
"content": "来爱发电为Lora Manager项目发电,支持项目持续开发的同时,获取浏览器插件验证码,按季支付更优惠!支付宝/微信方便支付。感谢支持!🚀",
|
||||||
"supportCta": "为LM发电",
|
"supportCta": "为LM发电",
|
||||||
"learnMore": "浏览器插件教程"
|
"learnMore": "浏览器插件教程"
|
||||||
|
},
|
||||||
|
"cacheHealth": {
|
||||||
|
"corrupted": {
|
||||||
|
"title": "检测到缓存损坏"
|
||||||
|
},
|
||||||
|
"degraded": {
|
||||||
|
"title": "检测到缓存问题"
|
||||||
|
},
|
||||||
|
"content": "{total} 个缓存条目中有 {invalid} 个无效({rate})。这可能导致模型丢失或错误。建议重建缓存。",
|
||||||
|
"rebuildCache": "重建缓存",
|
||||||
|
"dismiss": "忽略",
|
||||||
|
"rebuilding": "正在重建缓存...",
|
||||||
|
"rebuildFailed": "重建缓存失败:{error}",
|
||||||
|
"retry": "重试"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1572,6 +1572,20 @@
|
|||||||
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
||||||
"supportCta": "Support on Ko-fi",
|
"supportCta": "Support on Ko-fi",
|
||||||
"learnMore": "LM Civitai Extension Tutorial"
|
"learnMore": "LM Civitai Extension Tutorial"
|
||||||
|
},
|
||||||
|
"cacheHealth": {
|
||||||
|
"corrupted": {
|
||||||
|
"title": "檢測到快取損壞"
|
||||||
|
},
|
||||||
|
"degraded": {
|
||||||
|
"title": "檢測到快取問題"
|
||||||
|
},
|
||||||
|
"content": "{total} 個快取項目中有 {invalid} 個無效({rate})。這可能會導致模型遺失或錯誤。建議重建快取。",
|
||||||
|
"rebuildCache": "重建快取",
|
||||||
|
"dismiss": "關閉",
|
||||||
|
"rebuilding": "重建快取中...",
|
||||||
|
"rebuildFailed": "重建快取失敗:{error}",
|
||||||
|
"retry": "重試"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
259
py/services/cache_entry_validator.py
Normal file
259
py/services/cache_entry_validator.py
Normal file
@@ -0,0 +1,259 @@
|
|||||||
|
"""
|
||||||
|
Cache Entry Validator
|
||||||
|
|
||||||
|
Validates and repairs cache entries to prevent runtime errors from
|
||||||
|
missing or invalid critical fields.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from typing import Any, Dict, List, Optional, Tuple
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ValidationResult:
|
||||||
|
"""Result of validating a single cache entry."""
|
||||||
|
is_valid: bool
|
||||||
|
repaired: bool
|
||||||
|
errors: List[str] = field(default_factory=list)
|
||||||
|
entry: Optional[Dict[str, Any]] = None
|
||||||
|
|
||||||
|
|
||||||
|
class CacheEntryValidator:
|
||||||
|
"""
|
||||||
|
Validates and repairs cache entry core fields.
|
||||||
|
|
||||||
|
Critical fields that cause runtime errors when missing:
|
||||||
|
- file_path: KeyError in multiple locations
|
||||||
|
- sha256: KeyError/AttributeError in hash operations
|
||||||
|
|
||||||
|
Medium severity fields that may cause sorting/display issues:
|
||||||
|
- size: KeyError during sorting
|
||||||
|
- modified: KeyError during sorting
|
||||||
|
- model_name: AttributeError on .lower() calls
|
||||||
|
|
||||||
|
Low severity fields:
|
||||||
|
- tags: KeyError/TypeError in recipe operations
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Field definitions: (default_value, is_required)
|
||||||
|
CORE_FIELDS: Dict[str, Tuple[Any, bool]] = {
|
||||||
|
'file_path': ('', True),
|
||||||
|
'sha256': ('', True),
|
||||||
|
'file_name': ('', False),
|
||||||
|
'model_name': ('', False),
|
||||||
|
'folder': ('', False),
|
||||||
|
'size': (0, False),
|
||||||
|
'modified': (0.0, False),
|
||||||
|
'tags': ([], False),
|
||||||
|
'preview_url': ('', False),
|
||||||
|
'base_model': ('', False),
|
||||||
|
'from_civitai': (True, False),
|
||||||
|
'favorite': (False, False),
|
||||||
|
'exclude': (False, False),
|
||||||
|
'db_checked': (False, False),
|
||||||
|
'preview_nsfw_level': (0, False),
|
||||||
|
'notes': ('', False),
|
||||||
|
'usage_tips': ('', False),
|
||||||
|
}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def validate(cls, entry: Dict[str, Any], *, auto_repair: bool = True) -> ValidationResult:
|
||||||
|
"""
|
||||||
|
Validate a single cache entry.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
entry: The cache entry dictionary to validate
|
||||||
|
auto_repair: If True, attempt to repair missing/invalid fields
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
ValidationResult with validation status and optionally repaired entry
|
||||||
|
"""
|
||||||
|
if entry is None:
|
||||||
|
return ValidationResult(
|
||||||
|
is_valid=False,
|
||||||
|
repaired=False,
|
||||||
|
errors=['Entry is None'],
|
||||||
|
entry=None
|
||||||
|
)
|
||||||
|
|
||||||
|
if not isinstance(entry, dict):
|
||||||
|
return ValidationResult(
|
||||||
|
is_valid=False,
|
||||||
|
repaired=False,
|
||||||
|
errors=[f'Entry is not a dict: {type(entry).__name__}'],
|
||||||
|
entry=None
|
||||||
|
)
|
||||||
|
|
||||||
|
errors: List[str] = []
|
||||||
|
repaired = False
|
||||||
|
working_entry = dict(entry) if auto_repair else entry
|
||||||
|
|
||||||
|
for field_name, (default_value, is_required) in cls.CORE_FIELDS.items():
|
||||||
|
value = working_entry.get(field_name)
|
||||||
|
|
||||||
|
# Check if field is missing or None
|
||||||
|
if value is None:
|
||||||
|
if is_required:
|
||||||
|
errors.append(f"Required field '{field_name}' is missing or None")
|
||||||
|
if auto_repair:
|
||||||
|
working_entry[field_name] = cls._get_default_copy(default_value)
|
||||||
|
repaired = True
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Validate field type and value
|
||||||
|
field_error = cls._validate_field(field_name, value, default_value)
|
||||||
|
if field_error:
|
||||||
|
errors.append(field_error)
|
||||||
|
if auto_repair:
|
||||||
|
working_entry[field_name] = cls._get_default_copy(default_value)
|
||||||
|
repaired = True
|
||||||
|
|
||||||
|
# Special validation: file_path must not be empty for required field
|
||||||
|
file_path = working_entry.get('file_path', '')
|
||||||
|
if not file_path or (isinstance(file_path, str) and not file_path.strip()):
|
||||||
|
errors.append("Required field 'file_path' is empty")
|
||||||
|
# Cannot repair empty file_path - entry is invalid
|
||||||
|
return ValidationResult(
|
||||||
|
is_valid=False,
|
||||||
|
repaired=repaired,
|
||||||
|
errors=errors,
|
||||||
|
entry=working_entry if auto_repair else None
|
||||||
|
)
|
||||||
|
|
||||||
|
# Special validation: sha256 must not be empty for required field
|
||||||
|
sha256 = working_entry.get('sha256', '')
|
||||||
|
if not sha256 or (isinstance(sha256, str) and not sha256.strip()):
|
||||||
|
errors.append("Required field 'sha256' is empty")
|
||||||
|
# Cannot repair empty sha256 - entry is invalid
|
||||||
|
return ValidationResult(
|
||||||
|
is_valid=False,
|
||||||
|
repaired=repaired,
|
||||||
|
errors=errors,
|
||||||
|
entry=working_entry if auto_repair else None
|
||||||
|
)
|
||||||
|
|
||||||
|
# Normalize sha256 to lowercase if needed
|
||||||
|
if isinstance(sha256, str):
|
||||||
|
normalized_sha = sha256.lower().strip()
|
||||||
|
if normalized_sha != sha256:
|
||||||
|
working_entry['sha256'] = normalized_sha
|
||||||
|
repaired = True
|
||||||
|
|
||||||
|
# Determine if entry is valid
|
||||||
|
# Entry is valid if no critical required field errors remain after repair
|
||||||
|
# Critical fields are file_path and sha256
|
||||||
|
CRITICAL_REQUIRED_FIELDS = {'file_path', 'sha256'}
|
||||||
|
has_critical_errors = any(
|
||||||
|
"Required field" in error and
|
||||||
|
any(f"'{field}'" in error for field in CRITICAL_REQUIRED_FIELDS)
|
||||||
|
for error in errors
|
||||||
|
)
|
||||||
|
|
||||||
|
is_valid = not has_critical_errors
|
||||||
|
|
||||||
|
return ValidationResult(
|
||||||
|
is_valid=is_valid,
|
||||||
|
repaired=repaired,
|
||||||
|
errors=errors,
|
||||||
|
entry=working_entry if auto_repair else entry
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def validate_batch(
|
||||||
|
cls,
|
||||||
|
entries: List[Dict[str, Any]],
|
||||||
|
*,
|
||||||
|
auto_repair: bool = True
|
||||||
|
) -> Tuple[List[Dict[str, Any]], List[Dict[str, Any]]]:
|
||||||
|
"""
|
||||||
|
Validate a batch of cache entries.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
entries: List of cache entry dictionaries to validate
|
||||||
|
auto_repair: If True, attempt to repair missing/invalid fields
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple of (valid_entries, invalid_entries)
|
||||||
|
"""
|
||||||
|
if not entries:
|
||||||
|
return [], []
|
||||||
|
|
||||||
|
valid_entries: List[Dict[str, Any]] = []
|
||||||
|
invalid_entries: List[Dict[str, Any]] = []
|
||||||
|
|
||||||
|
for entry in entries:
|
||||||
|
result = cls.validate(entry, auto_repair=auto_repair)
|
||||||
|
|
||||||
|
if result.is_valid:
|
||||||
|
# Use repaired entry if available, otherwise original
|
||||||
|
valid_entries.append(result.entry if result.entry else entry)
|
||||||
|
else:
|
||||||
|
invalid_entries.append(entry)
|
||||||
|
# Log invalid entries for debugging
|
||||||
|
file_path = entry.get('file_path', '<unknown>') if isinstance(entry, dict) else '<not a dict>'
|
||||||
|
logger.warning(
|
||||||
|
f"Invalid cache entry for '{file_path}': {', '.join(result.errors)}"
|
||||||
|
)
|
||||||
|
|
||||||
|
return valid_entries, invalid_entries
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _validate_field(cls, field_name: str, value: Any, default_value: Any) -> Optional[str]:
|
||||||
|
"""
|
||||||
|
Validate a specific field value.
|
||||||
|
|
||||||
|
Returns an error message if invalid, None if valid.
|
||||||
|
"""
|
||||||
|
expected_type = type(default_value)
|
||||||
|
|
||||||
|
# Special handling for numeric types
|
||||||
|
if expected_type == int:
|
||||||
|
if not isinstance(value, (int, float)):
|
||||||
|
return f"Field '{field_name}' should be numeric, got {type(value).__name__}"
|
||||||
|
elif expected_type == float:
|
||||||
|
if not isinstance(value, (int, float)):
|
||||||
|
return f"Field '{field_name}' should be numeric, got {type(value).__name__}"
|
||||||
|
elif expected_type == bool:
|
||||||
|
# Be lenient with boolean fields - accept truthy/falsy values
|
||||||
|
pass
|
||||||
|
elif expected_type == str:
|
||||||
|
if not isinstance(value, str):
|
||||||
|
return f"Field '{field_name}' should be string, got {type(value).__name__}"
|
||||||
|
elif expected_type == list:
|
||||||
|
if not isinstance(value, (list, tuple)):
|
||||||
|
return f"Field '{field_name}' should be list, got {type(value).__name__}"
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _get_default_copy(cls, default_value: Any) -> Any:
|
||||||
|
"""Get a copy of the default value to avoid shared mutable state."""
|
||||||
|
if isinstance(default_value, list):
|
||||||
|
return list(default_value)
|
||||||
|
if isinstance(default_value, dict):
|
||||||
|
return dict(default_value)
|
||||||
|
return default_value
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_file_path_safe(cls, entry: Dict[str, Any], default: str = '') -> str:
|
||||||
|
"""Safely get file_path from an entry."""
|
||||||
|
if not isinstance(entry, dict):
|
||||||
|
return default
|
||||||
|
value = entry.get('file_path')
|
||||||
|
if isinstance(value, str):
|
||||||
|
return value
|
||||||
|
return default
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_sha256_safe(cls, entry: Dict[str, Any], default: str = '') -> str:
|
||||||
|
"""Safely get sha256 from an entry."""
|
||||||
|
if not isinstance(entry, dict):
|
||||||
|
return default
|
||||||
|
value = entry.get('sha256')
|
||||||
|
if isinstance(value, str):
|
||||||
|
return value.lower()
|
||||||
|
return default
|
||||||
201
py/services/cache_health_monitor.py
Normal file
201
py/services/cache_health_monitor.py
Normal file
@@ -0,0 +1,201 @@
|
|||||||
|
"""
|
||||||
|
Cache Health Monitor
|
||||||
|
|
||||||
|
Monitors cache health status and determines when user intervention is needed.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from enum import Enum
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from .cache_entry_validator import CacheEntryValidator, ValidationResult
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class CacheHealthStatus(Enum):
|
||||||
|
"""Health status of the cache."""
|
||||||
|
HEALTHY = "healthy"
|
||||||
|
DEGRADED = "degraded"
|
||||||
|
CORRUPTED = "corrupted"
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class HealthReport:
|
||||||
|
"""Report of cache health check."""
|
||||||
|
status: CacheHealthStatus
|
||||||
|
total_entries: int
|
||||||
|
valid_entries: int
|
||||||
|
invalid_entries: int
|
||||||
|
repaired_entries: int
|
||||||
|
invalid_paths: List[str] = field(default_factory=list)
|
||||||
|
message: str = ""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def corruption_rate(self) -> float:
|
||||||
|
"""Calculate the percentage of invalid entries."""
|
||||||
|
if self.total_entries <= 0:
|
||||||
|
return 0.0
|
||||||
|
return self.invalid_entries / self.total_entries
|
||||||
|
|
||||||
|
def to_dict(self) -> Dict[str, Any]:
|
||||||
|
"""Convert to dictionary for JSON serialization."""
|
||||||
|
return {
|
||||||
|
'status': self.status.value,
|
||||||
|
'total_entries': self.total_entries,
|
||||||
|
'valid_entries': self.valid_entries,
|
||||||
|
'invalid_entries': self.invalid_entries,
|
||||||
|
'repaired_entries': self.repaired_entries,
|
||||||
|
'corruption_rate': f"{self.corruption_rate:.1%}",
|
||||||
|
'invalid_paths': self.invalid_paths[:10], # Limit to first 10
|
||||||
|
'message': self.message,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class CacheHealthMonitor:
|
||||||
|
"""
|
||||||
|
Monitors cache health and determines appropriate status.
|
||||||
|
|
||||||
|
Thresholds:
|
||||||
|
- HEALTHY: 0% invalid entries
|
||||||
|
- DEGRADED: 0-5% invalid entries (auto-repaired, user should rebuild)
|
||||||
|
- CORRUPTED: >5% invalid entries (significant data loss likely)
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Threshold percentages
|
||||||
|
DEGRADED_THRESHOLD = 0.01 # 1% - show warning
|
||||||
|
CORRUPTED_THRESHOLD = 0.05 # 5% - critical warning
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
degraded_threshold: float = DEGRADED_THRESHOLD,
|
||||||
|
corrupted_threshold: float = CORRUPTED_THRESHOLD
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Initialize the health monitor.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
degraded_threshold: Corruption rate threshold for DEGRADED status
|
||||||
|
corrupted_threshold: Corruption rate threshold for CORRUPTED status
|
||||||
|
"""
|
||||||
|
self.degraded_threshold = degraded_threshold
|
||||||
|
self.corrupted_threshold = corrupted_threshold
|
||||||
|
|
||||||
|
def check_health(
|
||||||
|
self,
|
||||||
|
entries: List[Dict[str, Any]],
|
||||||
|
*,
|
||||||
|
auto_repair: bool = True
|
||||||
|
) -> HealthReport:
|
||||||
|
"""
|
||||||
|
Check the health of cache entries.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
entries: List of cache entry dictionaries to check
|
||||||
|
auto_repair: If True, attempt to repair entries during validation
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
HealthReport with status and statistics
|
||||||
|
"""
|
||||||
|
if not entries:
|
||||||
|
return HealthReport(
|
||||||
|
status=CacheHealthStatus.HEALTHY,
|
||||||
|
total_entries=0,
|
||||||
|
valid_entries=0,
|
||||||
|
invalid_entries=0,
|
||||||
|
repaired_entries=0,
|
||||||
|
message="Cache is empty"
|
||||||
|
)
|
||||||
|
|
||||||
|
total_entries = len(entries)
|
||||||
|
valid_entries: List[Dict[str, Any]] = []
|
||||||
|
invalid_entries: List[Dict[str, Any]] = []
|
||||||
|
repaired_count = 0
|
||||||
|
invalid_paths: List[str] = []
|
||||||
|
|
||||||
|
for entry in entries:
|
||||||
|
result = CacheEntryValidator.validate(entry, auto_repair=auto_repair)
|
||||||
|
|
||||||
|
if result.is_valid:
|
||||||
|
valid_entries.append(result.entry if result.entry else entry)
|
||||||
|
if result.repaired:
|
||||||
|
repaired_count += 1
|
||||||
|
else:
|
||||||
|
invalid_entries.append(entry)
|
||||||
|
# Extract file path for reporting
|
||||||
|
file_path = CacheEntryValidator.get_file_path_safe(entry, '<unknown>')
|
||||||
|
invalid_paths.append(file_path)
|
||||||
|
|
||||||
|
invalid_count = len(invalid_entries)
|
||||||
|
valid_count = len(valid_entries)
|
||||||
|
|
||||||
|
# Determine status based on corruption rate
|
||||||
|
corruption_rate = invalid_count / total_entries if total_entries > 0 else 0.0
|
||||||
|
|
||||||
|
if invalid_count == 0:
|
||||||
|
status = CacheHealthStatus.HEALTHY
|
||||||
|
message = "Cache is healthy"
|
||||||
|
elif corruption_rate >= self.corrupted_threshold:
|
||||||
|
status = CacheHealthStatus.CORRUPTED
|
||||||
|
message = (
|
||||||
|
f"Cache is corrupted: {invalid_count} invalid entries "
|
||||||
|
f"({corruption_rate:.1%}). Rebuild recommended."
|
||||||
|
)
|
||||||
|
elif corruption_rate >= self.degraded_threshold or invalid_count > 0:
|
||||||
|
status = CacheHealthStatus.DEGRADED
|
||||||
|
message = (
|
||||||
|
f"Cache has {invalid_count} invalid entries "
|
||||||
|
f"({corruption_rate:.1%}). Consider rebuilding cache."
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# This shouldn't happen, but handle gracefully
|
||||||
|
status = CacheHealthStatus.HEALTHY
|
||||||
|
message = "Cache is healthy"
|
||||||
|
|
||||||
|
# Log the health check result
|
||||||
|
if status != CacheHealthStatus.HEALTHY:
|
||||||
|
logger.warning(
|
||||||
|
f"Cache health check: {status.value} - "
|
||||||
|
f"{invalid_count}/{total_entries} invalid, "
|
||||||
|
f"{repaired_count} repaired"
|
||||||
|
)
|
||||||
|
if invalid_paths:
|
||||||
|
logger.debug(f"Invalid entry paths: {invalid_paths[:5]}")
|
||||||
|
|
||||||
|
return HealthReport(
|
||||||
|
status=status,
|
||||||
|
total_entries=total_entries,
|
||||||
|
valid_entries=valid_count,
|
||||||
|
invalid_entries=invalid_count,
|
||||||
|
repaired_entries=repaired_count,
|
||||||
|
invalid_paths=invalid_paths,
|
||||||
|
message=message
|
||||||
|
)
|
||||||
|
|
||||||
|
def should_notify_user(self, report: HealthReport) -> bool:
|
||||||
|
"""
|
||||||
|
Determine if the user should be notified about cache health.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
report: The health report to evaluate
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if user should be notified
|
||||||
|
"""
|
||||||
|
return report.status != CacheHealthStatus.HEALTHY
|
||||||
|
|
||||||
|
def get_notification_severity(self, report: HealthReport) -> str:
|
||||||
|
"""
|
||||||
|
Get the severity level for user notification.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
report: The health report to evaluate
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Severity string: 'warning' or 'error'
|
||||||
|
"""
|
||||||
|
if report.status == CacheHealthStatus.CORRUPTED:
|
||||||
|
return 'error'
|
||||||
|
return 'warning'
|
||||||
@@ -5,7 +5,6 @@ import logging
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
from typing import Any, Dict, List, Optional, Tuple
|
from typing import Any, Dict, List, Optional, Tuple
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from operator import itemgetter
|
|
||||||
from natsort import natsorted
|
from natsort import natsorted
|
||||||
|
|
||||||
# Supported sort modes: (sort_key, order)
|
# Supported sort modes: (sort_key, order)
|
||||||
@@ -229,17 +228,17 @@ class ModelCache:
|
|||||||
reverse=reverse
|
reverse=reverse
|
||||||
)
|
)
|
||||||
elif sort_key == 'date':
|
elif sort_key == 'date':
|
||||||
# Sort by modified timestamp
|
# Sort by modified timestamp (use .get() with default to handle missing fields)
|
||||||
result = sorted(
|
result = sorted(
|
||||||
data,
|
data,
|
||||||
key=itemgetter('modified'),
|
key=lambda x: x.get('modified', 0.0),
|
||||||
reverse=reverse
|
reverse=reverse
|
||||||
)
|
)
|
||||||
elif sort_key == 'size':
|
elif sort_key == 'size':
|
||||||
# Sort by file size
|
# Sort by file size (use .get() with default to handle missing fields)
|
||||||
result = sorted(
|
result = sorted(
|
||||||
data,
|
data,
|
||||||
key=itemgetter('size'),
|
key=lambda x: x.get('size', 0),
|
||||||
reverse=reverse
|
reverse=reverse
|
||||||
)
|
)
|
||||||
elif sort_key == 'usage':
|
elif sort_key == 'usage':
|
||||||
|
|||||||
@@ -20,6 +20,8 @@ from .service_registry import ServiceRegistry
|
|||||||
from .websocket_manager import ws_manager
|
from .websocket_manager import ws_manager
|
||||||
from .persistent_model_cache import get_persistent_cache
|
from .persistent_model_cache import get_persistent_cache
|
||||||
from .settings_manager import get_settings_manager
|
from .settings_manager import get_settings_manager
|
||||||
|
from .cache_entry_validator import CacheEntryValidator
|
||||||
|
from .cache_health_monitor import CacheHealthMonitor, CacheHealthStatus
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -468,6 +470,39 @@ class ModelScanner:
|
|||||||
for tag in adjusted_item.get('tags') or []:
|
for tag in adjusted_item.get('tags') or []:
|
||||||
tags_count[tag] = tags_count.get(tag, 0) + 1
|
tags_count[tag] = tags_count.get(tag, 0) + 1
|
||||||
|
|
||||||
|
# Validate cache entries and check health
|
||||||
|
valid_entries, invalid_entries = CacheEntryValidator.validate_batch(
|
||||||
|
adjusted_raw_data, auto_repair=True
|
||||||
|
)
|
||||||
|
|
||||||
|
if invalid_entries:
|
||||||
|
monitor = CacheHealthMonitor()
|
||||||
|
report = monitor.check_health(adjusted_raw_data, auto_repair=True)
|
||||||
|
|
||||||
|
if report.status != CacheHealthStatus.HEALTHY:
|
||||||
|
# Broadcast health warning to frontend
|
||||||
|
await ws_manager.broadcast_cache_health_warning(report, page_type)
|
||||||
|
logger.warning(
|
||||||
|
f"{self.model_type.capitalize()} Scanner: Cache health issue detected - "
|
||||||
|
f"{report.invalid_entries} invalid entries, {report.repaired_entries} repaired"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Use only valid entries
|
||||||
|
adjusted_raw_data = valid_entries
|
||||||
|
|
||||||
|
# Rebuild tags count from valid entries only
|
||||||
|
tags_count = {}
|
||||||
|
for item in adjusted_raw_data:
|
||||||
|
for tag in item.get('tags') or []:
|
||||||
|
tags_count[tag] = tags_count.get(tag, 0) + 1
|
||||||
|
|
||||||
|
# Remove invalid entries from hash index
|
||||||
|
for invalid_entry in invalid_entries:
|
||||||
|
file_path = CacheEntryValidator.get_file_path_safe(invalid_entry)
|
||||||
|
sha256 = CacheEntryValidator.get_sha256_safe(invalid_entry)
|
||||||
|
if file_path:
|
||||||
|
hash_index.remove_by_path(file_path, sha256)
|
||||||
|
|
||||||
scan_result = CacheBuildResult(
|
scan_result = CacheBuildResult(
|
||||||
raw_data=adjusted_raw_data,
|
raw_data=adjusted_raw_data,
|
||||||
hash_index=hash_index,
|
hash_index=hash_index,
|
||||||
@@ -776,6 +811,18 @@ class ModelScanner:
|
|||||||
model_data = self.adjust_cached_entry(dict(model_data))
|
model_data = self.adjust_cached_entry(dict(model_data))
|
||||||
if not model_data:
|
if not model_data:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
# Validate the new entry before adding
|
||||||
|
validation_result = CacheEntryValidator.validate(
|
||||||
|
model_data, auto_repair=True
|
||||||
|
)
|
||||||
|
if not validation_result.is_valid:
|
||||||
|
logger.warning(
|
||||||
|
f"Skipping invalid entry during reconcile: {path}"
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
model_data = validation_result.entry
|
||||||
|
|
||||||
self._ensure_license_flags(model_data)
|
self._ensure_license_flags(model_data)
|
||||||
# Add to cache
|
# Add to cache
|
||||||
self._cache.raw_data.append(model_data)
|
self._cache.raw_data.append(model_data)
|
||||||
@@ -1090,6 +1137,17 @@ class ModelScanner:
|
|||||||
processed_files += 1
|
processed_files += 1
|
||||||
|
|
||||||
if result:
|
if result:
|
||||||
|
# Validate the entry before adding
|
||||||
|
validation_result = CacheEntryValidator.validate(
|
||||||
|
result, auto_repair=True
|
||||||
|
)
|
||||||
|
if not validation_result.is_valid:
|
||||||
|
logger.warning(
|
||||||
|
f"Skipping invalid scan result: {file_path}"
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
result = validation_result.entry
|
||||||
|
|
||||||
self._ensure_license_flags(result)
|
self._ensure_license_flags(result)
|
||||||
raw_data.append(result)
|
raw_data.append(result)
|
||||||
|
|
||||||
|
|||||||
@@ -255,6 +255,42 @@ class WebSocketManager:
|
|||||||
self._download_progress.pop(download_id, None)
|
self._download_progress.pop(download_id, None)
|
||||||
logger.debug(f"Cleaned up old download progress for {download_id}")
|
logger.debug(f"Cleaned up old download progress for {download_id}")
|
||||||
|
|
||||||
|
async def broadcast_cache_health_warning(self, report: 'HealthReport', page_type: str = None):
|
||||||
|
"""
|
||||||
|
Broadcast cache health warning to frontend.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
report: HealthReport instance from CacheHealthMonitor
|
||||||
|
page_type: The page type (loras, checkpoints, embeddings)
|
||||||
|
"""
|
||||||
|
from .cache_health_monitor import CacheHealthStatus
|
||||||
|
|
||||||
|
# Only broadcast if there are issues
|
||||||
|
if report.status == CacheHealthStatus.HEALTHY:
|
||||||
|
return
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
'type': 'cache_health_warning',
|
||||||
|
'status': report.status.value,
|
||||||
|
'message': report.message,
|
||||||
|
'pageType': page_type,
|
||||||
|
'details': {
|
||||||
|
'total': report.total_entries,
|
||||||
|
'valid': report.valid_entries,
|
||||||
|
'invalid': report.invalid_entries,
|
||||||
|
'repaired': report.repaired_entries,
|
||||||
|
'corruption_rate': f"{report.corruption_rate:.1%}",
|
||||||
|
'invalid_paths': report.invalid_paths[:5], # Limit to first 5
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Broadcasting cache health warning: {report.status.value} "
|
||||||
|
f"({report.invalid_entries} invalid entries)"
|
||||||
|
)
|
||||||
|
|
||||||
|
await self.broadcast(payload)
|
||||||
|
|
||||||
def get_connected_clients_count(self) -> int:
|
def get_connected_clients_count(self) -> int:
|
||||||
"""Get number of connected clients"""
|
"""Get number of connected clients"""
|
||||||
return len(self._websockets)
|
return len(self._websockets)
|
||||||
|
|||||||
0
scripts/sync_translation_keys.py
Normal file → Executable file
0
scripts/sync_translation_keys.py
Normal file → Executable file
@@ -199,6 +199,12 @@ class InitializationManager {
|
|||||||
if (!data) return;
|
if (!data) return;
|
||||||
console.log('Received progress update:', data);
|
console.log('Received progress update:', data);
|
||||||
|
|
||||||
|
// Handle cache health warning messages
|
||||||
|
if (data.type === 'cache_health_warning') {
|
||||||
|
this.handleCacheHealthWarning(data);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
// Check if this update is for our page type
|
// Check if this update is for our page type
|
||||||
if (data.pageType && data.pageType !== this.pageType) {
|
if (data.pageType && data.pageType !== this.pageType) {
|
||||||
console.log(`Ignoring update for ${data.pageType}, we're on ${this.pageType}`);
|
console.log(`Ignoring update for ${data.pageType}, we're on ${this.pageType}`);
|
||||||
@@ -466,6 +472,29 @@ class InitializationManager {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle cache health warning messages from WebSocket
|
||||||
|
*/
|
||||||
|
handleCacheHealthWarning(data) {
|
||||||
|
console.log('Cache health warning received:', data);
|
||||||
|
|
||||||
|
// Import bannerService dynamically to avoid circular dependencies
|
||||||
|
import('../managers/BannerService.js').then(({ bannerService }) => {
|
||||||
|
// Initialize banner service if not already done
|
||||||
|
if (!bannerService.initialized) {
|
||||||
|
bannerService.initialize().then(() => {
|
||||||
|
bannerService.registerCacheHealthBanner(data);
|
||||||
|
}).catch(err => {
|
||||||
|
console.error('Failed to initialize banner service:', err);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
bannerService.registerCacheHealthBanner(data);
|
||||||
|
}
|
||||||
|
}).catch(err => {
|
||||||
|
console.error('Failed to load banner service:', err);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Clean up resources when the component is destroyed
|
* Clean up resources when the component is destroyed
|
||||||
*/
|
*/
|
||||||
|
|||||||
@@ -4,9 +4,11 @@ import {
|
|||||||
removeStorageItem
|
removeStorageItem
|
||||||
} from '../utils/storageHelpers.js';
|
} from '../utils/storageHelpers.js';
|
||||||
import { translate } from '../utils/i18nHelpers.js';
|
import { translate } from '../utils/i18nHelpers.js';
|
||||||
import { state } from '../state/index.js'
|
import { state } from '../state/index.js';
|
||||||
|
import { getModelApiClient } from '../api/modelApiFactory.js';
|
||||||
|
|
||||||
const COMMUNITY_SUPPORT_BANNER_ID = 'community-support';
|
const COMMUNITY_SUPPORT_BANNER_ID = 'community-support';
|
||||||
|
const CACHE_HEALTH_BANNER_ID = 'cache-health-warning';
|
||||||
const COMMUNITY_SUPPORT_BANNER_DELAY_MS = 5 * 24 * 60 * 60 * 1000; // 5 days
|
const COMMUNITY_SUPPORT_BANNER_DELAY_MS = 5 * 24 * 60 * 60 * 1000; // 5 days
|
||||||
const COMMUNITY_SUPPORT_FIRST_SEEN_AT_KEY = 'community_support_banner_first_seen_at';
|
const COMMUNITY_SUPPORT_FIRST_SEEN_AT_KEY = 'community_support_banner_first_seen_at';
|
||||||
const COMMUNITY_SUPPORT_VERSION_KEY = 'community_support_banner_state_version';
|
const COMMUNITY_SUPPORT_VERSION_KEY = 'community_support_banner_state_version';
|
||||||
@@ -293,6 +295,177 @@ class BannerService {
|
|||||||
location.reload();
|
location.reload();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Register a cache health warning banner
|
||||||
|
* @param {Object} healthData - Health data from WebSocket
|
||||||
|
*/
|
||||||
|
registerCacheHealthBanner(healthData) {
|
||||||
|
if (!healthData || healthData.status === 'healthy') {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove existing cache health banner if any
|
||||||
|
this.removeBannerElement(CACHE_HEALTH_BANNER_ID);
|
||||||
|
|
||||||
|
const isCorrupted = healthData.status === 'corrupted';
|
||||||
|
const titleKey = isCorrupted
|
||||||
|
? 'banners.cacheHealth.corrupted.title'
|
||||||
|
: 'banners.cacheHealth.degraded.title';
|
||||||
|
const defaultTitle = isCorrupted
|
||||||
|
? 'Cache Corruption Detected'
|
||||||
|
: 'Cache Issues Detected';
|
||||||
|
|
||||||
|
const title = translate(titleKey, {}, defaultTitle);
|
||||||
|
|
||||||
|
const contentKey = 'banners.cacheHealth.content';
|
||||||
|
const defaultContent = 'Found {invalid} of {total} cache entries are invalid ({rate}). This may cause missing models or errors. Rebuilding the cache is recommended.';
|
||||||
|
const content = translate(contentKey, {
|
||||||
|
invalid: healthData.details?.invalid || 0,
|
||||||
|
total: healthData.details?.total || 0,
|
||||||
|
rate: healthData.details?.corruption_rate || '0%'
|
||||||
|
}, defaultContent);
|
||||||
|
|
||||||
|
this.registerBanner(CACHE_HEALTH_BANNER_ID, {
|
||||||
|
id: CACHE_HEALTH_BANNER_ID,
|
||||||
|
title: title,
|
||||||
|
content: content,
|
||||||
|
pageType: healthData.pageType,
|
||||||
|
actions: [
|
||||||
|
{
|
||||||
|
text: translate('banners.cacheHealth.rebuildCache', {}, 'Rebuild Cache'),
|
||||||
|
icon: 'fas fa-sync-alt',
|
||||||
|
action: 'rebuild-cache',
|
||||||
|
type: 'primary'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
text: translate('banners.cacheHealth.dismiss', {}, 'Dismiss'),
|
||||||
|
icon: 'fas fa-times',
|
||||||
|
action: 'dismiss',
|
||||||
|
type: 'secondary'
|
||||||
|
}
|
||||||
|
],
|
||||||
|
dismissible: true,
|
||||||
|
priority: 10, // High priority
|
||||||
|
onRegister: (bannerElement) => {
|
||||||
|
// Attach click handlers for actions
|
||||||
|
const rebuildBtn = bannerElement.querySelector('[data-action="rebuild-cache"]');
|
||||||
|
const dismissBtn = bannerElement.querySelector('[data-action="dismiss"]');
|
||||||
|
|
||||||
|
if (rebuildBtn) {
|
||||||
|
rebuildBtn.addEventListener('click', (e) => {
|
||||||
|
e.preventDefault();
|
||||||
|
this.handleRebuildCache(bannerElement, healthData.pageType);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (dismissBtn) {
|
||||||
|
dismissBtn.addEventListener('click', (e) => {
|
||||||
|
e.preventDefault();
|
||||||
|
this.dismissBanner(CACHE_HEALTH_BANNER_ID);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle rebuild cache action from banner
|
||||||
|
* @param {HTMLElement} bannerElement - The banner element
|
||||||
|
* @param {string} pageType - The page type (loras, checkpoints, embeddings)
|
||||||
|
*/
|
||||||
|
async handleRebuildCache(bannerElement, pageType) {
|
||||||
|
const currentPageType = pageType || this.getCurrentPageType();
|
||||||
|
|
||||||
|
try {
|
||||||
|
const apiClient = getModelApiClient(currentPageType);
|
||||||
|
|
||||||
|
// Update banner to show rebuilding status
|
||||||
|
const actionsContainer = bannerElement.querySelector('.banner-actions');
|
||||||
|
if (actionsContainer) {
|
||||||
|
actionsContainer.innerHTML = `
|
||||||
|
<span class="banner-loading">
|
||||||
|
<i class="fas fa-spinner fa-spin"></i>
|
||||||
|
<span>${translate('banners.cacheHealth.rebuilding', {}, 'Rebuilding cache...')}</span>
|
||||||
|
</span>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
await apiClient.refreshModels(true);
|
||||||
|
|
||||||
|
// Remove banner on success without marking as dismissed
|
||||||
|
this.removeBannerElement(CACHE_HEALTH_BANNER_ID);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Cache rebuild failed:', error);
|
||||||
|
|
||||||
|
const actionsContainer = bannerElement.querySelector('.banner-actions');
|
||||||
|
if (actionsContainer) {
|
||||||
|
actionsContainer.innerHTML = `
|
||||||
|
<span class="banner-error">
|
||||||
|
<i class="fas fa-exclamation-triangle"></i>
|
||||||
|
<span>${translate('banners.cacheHealth.rebuildFailed', {}, 'Rebuild failed. Please try again.')}</span>
|
||||||
|
</span>
|
||||||
|
<a href="#" class="banner-action banner-action-primary" data-action="rebuild-cache">
|
||||||
|
<i class="fas fa-sync-alt"></i>
|
||||||
|
<span>${translate('banners.cacheHealth.retry', {}, 'Retry')}</span>
|
||||||
|
</a>
|
||||||
|
`;
|
||||||
|
|
||||||
|
// Re-attach click handler
|
||||||
|
const retryBtn = actionsContainer.querySelector('[data-action="rebuild-cache"]');
|
||||||
|
if (retryBtn) {
|
||||||
|
retryBtn.addEventListener('click', (e) => {
|
||||||
|
e.preventDefault();
|
||||||
|
this.handleRebuildCache(bannerElement, pageType);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the current page type from the URL
|
||||||
|
* @returns {string} Page type (loras, checkpoints, embeddings, recipes)
|
||||||
|
*/
|
||||||
|
getCurrentPageType() {
|
||||||
|
const path = window.location.pathname;
|
||||||
|
if (path.includes('/checkpoints')) return 'checkpoints';
|
||||||
|
if (path.includes('/embeddings')) return 'embeddings';
|
||||||
|
if (path.includes('/recipes')) return 'recipes';
|
||||||
|
return 'loras';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the rebuild cache endpoint for the given page type
|
||||||
|
* @param {string} pageType - The page type
|
||||||
|
* @returns {string} The API endpoint URL
|
||||||
|
*/
|
||||||
|
getRebuildEndpoint(pageType) {
|
||||||
|
const endpoints = {
|
||||||
|
'loras': '/api/lm/loras/reload?rebuild=true',
|
||||||
|
'checkpoints': '/api/lm/checkpoints/reload?rebuild=true',
|
||||||
|
'embeddings': '/api/lm/embeddings/reload?rebuild=true'
|
||||||
|
};
|
||||||
|
return endpoints[pageType] || endpoints['loras'];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove a banner element from DOM without marking as dismissed
|
||||||
|
* @param {string} bannerId - Banner ID to remove
|
||||||
|
*/
|
||||||
|
removeBannerElement(bannerId) {
|
||||||
|
const bannerElement = document.querySelector(`[data-banner-id="${bannerId}"]`);
|
||||||
|
if (bannerElement) {
|
||||||
|
bannerElement.style.animation = 'banner-slide-up 0.3s ease-in-out forwards';
|
||||||
|
setTimeout(() => {
|
||||||
|
bannerElement.remove();
|
||||||
|
this.updateContainerVisibility();
|
||||||
|
}, 300);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Also remove from banners map
|
||||||
|
this.banners.delete(bannerId);
|
||||||
|
}
|
||||||
|
|
||||||
prepareCommunitySupportBanner() {
|
prepareCommunitySupportBanner() {
|
||||||
if (this.isBannerDismissed(COMMUNITY_SUPPORT_BANNER_ID)) {
|
if (this.isBannerDismissed(COMMUNITY_SUPPORT_BANNER_ID)) {
|
||||||
return;
|
return;
|
||||||
|
|||||||
283
tests/services/test_cache_entry_validator.py
Normal file
283
tests/services/test_cache_entry_validator.py
Normal file
@@ -0,0 +1,283 @@
|
|||||||
|
"""
|
||||||
|
Unit tests for CacheEntryValidator
|
||||||
|
"""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from py.services.cache_entry_validator import (
|
||||||
|
CacheEntryValidator,
|
||||||
|
ValidationResult,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestCacheEntryValidator:
|
||||||
|
"""Tests for CacheEntryValidator class"""
|
||||||
|
|
||||||
|
def test_validate_valid_entry(self):
|
||||||
|
"""Test validation of a valid cache entry"""
|
||||||
|
entry = {
|
||||||
|
'file_path': '/models/test.safetensors',
|
||||||
|
'sha256': 'abc123def456',
|
||||||
|
'file_name': 'test.safetensors',
|
||||||
|
'model_name': 'Test Model',
|
||||||
|
'size': 1024,
|
||||||
|
'modified': 1234567890.0,
|
||||||
|
'tags': ['tag1', 'tag2'],
|
||||||
|
}
|
||||||
|
|
||||||
|
result = CacheEntryValidator.validate(entry, auto_repair=False)
|
||||||
|
|
||||||
|
assert result.is_valid is True
|
||||||
|
assert result.repaired is False
|
||||||
|
assert len(result.errors) == 0
|
||||||
|
assert result.entry == entry
|
||||||
|
|
||||||
|
def test_validate_missing_required_field_sha256(self):
|
||||||
|
"""Test validation fails when required sha256 field is missing"""
|
||||||
|
entry = {
|
||||||
|
'file_path': '/models/test.safetensors',
|
||||||
|
# sha256 missing
|
||||||
|
'file_name': 'test.safetensors',
|
||||||
|
}
|
||||||
|
|
||||||
|
result = CacheEntryValidator.validate(entry, auto_repair=False)
|
||||||
|
|
||||||
|
assert result.is_valid is False
|
||||||
|
assert result.repaired is False
|
||||||
|
assert any('sha256' in error for error in result.errors)
|
||||||
|
|
||||||
|
def test_validate_missing_required_field_file_path(self):
|
||||||
|
"""Test validation fails when required file_path field is missing"""
|
||||||
|
entry = {
|
||||||
|
# file_path missing
|
||||||
|
'sha256': 'abc123def456',
|
||||||
|
'file_name': 'test.safetensors',
|
||||||
|
}
|
||||||
|
|
||||||
|
result = CacheEntryValidator.validate(entry, auto_repair=False)
|
||||||
|
|
||||||
|
assert result.is_valid is False
|
||||||
|
assert result.repaired is False
|
||||||
|
assert any('file_path' in error for error in result.errors)
|
||||||
|
|
||||||
|
def test_validate_empty_required_field_sha256(self):
|
||||||
|
"""Test validation fails when sha256 is empty string"""
|
||||||
|
entry = {
|
||||||
|
'file_path': '/models/test.safetensors',
|
||||||
|
'sha256': '', # Empty string
|
||||||
|
}
|
||||||
|
|
||||||
|
result = CacheEntryValidator.validate(entry, auto_repair=False)
|
||||||
|
|
||||||
|
assert result.is_valid is False
|
||||||
|
assert result.repaired is False
|
||||||
|
assert any('sha256' in error for error in result.errors)
|
||||||
|
|
||||||
|
def test_validate_empty_required_field_file_path(self):
|
||||||
|
"""Test validation fails when file_path is empty string"""
|
||||||
|
entry = {
|
||||||
|
'file_path': '', # Empty string
|
||||||
|
'sha256': 'abc123def456',
|
||||||
|
}
|
||||||
|
|
||||||
|
result = CacheEntryValidator.validate(entry, auto_repair=False)
|
||||||
|
|
||||||
|
assert result.is_valid is False
|
||||||
|
assert result.repaired is False
|
||||||
|
assert any('file_path' in error for error in result.errors)
|
||||||
|
|
||||||
|
def test_validate_none_required_field(self):
|
||||||
|
"""Test validation fails when required field is None"""
|
||||||
|
entry = {
|
||||||
|
'file_path': None,
|
||||||
|
'sha256': 'abc123def456',
|
||||||
|
}
|
||||||
|
|
||||||
|
result = CacheEntryValidator.validate(entry, auto_repair=False)
|
||||||
|
|
||||||
|
assert result.is_valid is False
|
||||||
|
assert result.repaired is False
|
||||||
|
assert any('file_path' in error for error in result.errors)
|
||||||
|
|
||||||
|
def test_validate_none_entry(self):
|
||||||
|
"""Test validation handles None entry"""
|
||||||
|
result = CacheEntryValidator.validate(None, auto_repair=False)
|
||||||
|
|
||||||
|
assert result.is_valid is False
|
||||||
|
assert result.repaired is False
|
||||||
|
assert any('None' in error for error in result.errors)
|
||||||
|
assert result.entry is None
|
||||||
|
|
||||||
|
def test_validate_non_dict_entry(self):
|
||||||
|
"""Test validation handles non-dict entry"""
|
||||||
|
result = CacheEntryValidator.validate("not a dict", auto_repair=False)
|
||||||
|
|
||||||
|
assert result.is_valid is False
|
||||||
|
assert result.repaired is False
|
||||||
|
assert any('not a dict' in error for error in result.errors)
|
||||||
|
assert result.entry is None
|
||||||
|
|
||||||
|
def test_auto_repair_missing_non_required_field(self):
|
||||||
|
"""Test auto-repair adds missing non-required fields"""
|
||||||
|
entry = {
|
||||||
|
'file_path': '/models/test.safetensors',
|
||||||
|
'sha256': 'abc123def456',
|
||||||
|
# file_name, model_name, tags missing
|
||||||
|
}
|
||||||
|
|
||||||
|
result = CacheEntryValidator.validate(entry, auto_repair=True)
|
||||||
|
|
||||||
|
assert result.is_valid is True
|
||||||
|
assert result.repaired is True
|
||||||
|
assert result.entry['file_name'] == ''
|
||||||
|
assert result.entry['model_name'] == ''
|
||||||
|
assert result.entry['tags'] == []
|
||||||
|
|
||||||
|
def test_auto_repair_wrong_type_field(self):
|
||||||
|
"""Test auto-repair fixes fields with wrong type"""
|
||||||
|
entry = {
|
||||||
|
'file_path': '/models/test.safetensors',
|
||||||
|
'sha256': 'abc123def456',
|
||||||
|
'size': 'not a number', # Should be int
|
||||||
|
'tags': 'not a list', # Should be list
|
||||||
|
}
|
||||||
|
|
||||||
|
result = CacheEntryValidator.validate(entry, auto_repair=True)
|
||||||
|
|
||||||
|
assert result.is_valid is True
|
||||||
|
assert result.repaired is True
|
||||||
|
assert result.entry['size'] == 0 # Default value
|
||||||
|
assert result.entry['tags'] == [] # Default value
|
||||||
|
|
||||||
|
def test_normalize_sha256_lowercase(self):
|
||||||
|
"""Test sha256 is normalized to lowercase"""
|
||||||
|
entry = {
|
||||||
|
'file_path': '/models/test.safetensors',
|
||||||
|
'sha256': 'ABC123DEF456', # Uppercase
|
||||||
|
}
|
||||||
|
|
||||||
|
result = CacheEntryValidator.validate(entry, auto_repair=True)
|
||||||
|
|
||||||
|
assert result.is_valid is True
|
||||||
|
assert result.entry['sha256'] == 'abc123def456'
|
||||||
|
|
||||||
|
def test_validate_batch_all_valid(self):
|
||||||
|
"""Test batch validation with all valid entries"""
|
||||||
|
entries = [
|
||||||
|
{
|
||||||
|
'file_path': '/models/test1.safetensors',
|
||||||
|
'sha256': 'abc123',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'file_path': '/models/test2.safetensors',
|
||||||
|
'sha256': 'def456',
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
valid, invalid = CacheEntryValidator.validate_batch(entries, auto_repair=False)
|
||||||
|
|
||||||
|
assert len(valid) == 2
|
||||||
|
assert len(invalid) == 0
|
||||||
|
|
||||||
|
def test_validate_batch_mixed_validity(self):
|
||||||
|
"""Test batch validation with mixed valid/invalid entries"""
|
||||||
|
entries = [
|
||||||
|
{
|
||||||
|
'file_path': '/models/test1.safetensors',
|
||||||
|
'sha256': 'abc123',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'file_path': '/models/test2.safetensors',
|
||||||
|
# sha256 missing - invalid
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'file_path': '/models/test3.safetensors',
|
||||||
|
'sha256': 'def456',
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
valid, invalid = CacheEntryValidator.validate_batch(entries, auto_repair=False)
|
||||||
|
|
||||||
|
assert len(valid) == 2
|
||||||
|
assert len(invalid) == 1
|
||||||
|
# invalid list contains the actual invalid entries (not by index)
|
||||||
|
assert invalid[0]['file_path'] == '/models/test2.safetensors'
|
||||||
|
|
||||||
|
def test_validate_batch_empty_list(self):
|
||||||
|
"""Test batch validation with empty list"""
|
||||||
|
valid, invalid = CacheEntryValidator.validate_batch([], auto_repair=False)
|
||||||
|
|
||||||
|
assert len(valid) == 0
|
||||||
|
assert len(invalid) == 0
|
||||||
|
|
||||||
|
def test_get_file_path_safe(self):
|
||||||
|
"""Test safe file_path extraction"""
|
||||||
|
entry = {'file_path': '/models/test.safetensors', 'sha256': 'abc123'}
|
||||||
|
assert CacheEntryValidator.get_file_path_safe(entry) == '/models/test.safetensors'
|
||||||
|
|
||||||
|
def test_get_file_path_safe_missing(self):
|
||||||
|
"""Test safe file_path extraction when missing"""
|
||||||
|
entry = {'sha256': 'abc123'}
|
||||||
|
assert CacheEntryValidator.get_file_path_safe(entry) == ''
|
||||||
|
|
||||||
|
def test_get_file_path_safe_not_dict(self):
|
||||||
|
"""Test safe file_path extraction from non-dict"""
|
||||||
|
assert CacheEntryValidator.get_file_path_safe(None) == ''
|
||||||
|
assert CacheEntryValidator.get_file_path_safe('string') == ''
|
||||||
|
|
||||||
|
def test_get_sha256_safe(self):
|
||||||
|
"""Test safe sha256 extraction"""
|
||||||
|
entry = {'file_path': '/models/test.safetensors', 'sha256': 'ABC123'}
|
||||||
|
assert CacheEntryValidator.get_sha256_safe(entry) == 'abc123'
|
||||||
|
|
||||||
|
def test_get_sha256_safe_missing(self):
|
||||||
|
"""Test safe sha256 extraction when missing"""
|
||||||
|
entry = {'file_path': '/models/test.safetensors'}
|
||||||
|
assert CacheEntryValidator.get_sha256_safe(entry) == ''
|
||||||
|
|
||||||
|
def test_get_sha256_safe_not_dict(self):
|
||||||
|
"""Test safe sha256 extraction from non-dict"""
|
||||||
|
assert CacheEntryValidator.get_sha256_safe(None) == ''
|
||||||
|
assert CacheEntryValidator.get_sha256_safe('string') == ''
|
||||||
|
|
||||||
|
def test_validate_with_all_optional_fields(self):
|
||||||
|
"""Test validation with all optional fields present"""
|
||||||
|
entry = {
|
||||||
|
'file_path': '/models/test.safetensors',
|
||||||
|
'sha256': 'abc123',
|
||||||
|
'file_name': 'test.safetensors',
|
||||||
|
'model_name': 'Test Model',
|
||||||
|
'folder': 'test_folder',
|
||||||
|
'size': 1024,
|
||||||
|
'modified': 1234567890.0,
|
||||||
|
'tags': ['tag1', 'tag2'],
|
||||||
|
'preview_url': 'http://example.com/preview.jpg',
|
||||||
|
'base_model': 'SD1.5',
|
||||||
|
'from_civitai': True,
|
||||||
|
'favorite': True,
|
||||||
|
'exclude': False,
|
||||||
|
'db_checked': True,
|
||||||
|
'preview_nsfw_level': 1,
|
||||||
|
'notes': 'Test notes',
|
||||||
|
'usage_tips': 'Test tips',
|
||||||
|
}
|
||||||
|
|
||||||
|
result = CacheEntryValidator.validate(entry, auto_repair=False)
|
||||||
|
|
||||||
|
assert result.is_valid is True
|
||||||
|
assert result.repaired is False
|
||||||
|
assert result.entry == entry
|
||||||
|
|
||||||
|
def test_validate_numeric_field_accepts_float_for_int(self):
|
||||||
|
"""Test that numeric fields accept float for int type"""
|
||||||
|
entry = {
|
||||||
|
'file_path': '/models/test.safetensors',
|
||||||
|
'sha256': 'abc123',
|
||||||
|
'size': 1024.5, # Float for int field
|
||||||
|
'modified': 1234567890.0,
|
||||||
|
}
|
||||||
|
|
||||||
|
result = CacheEntryValidator.validate(entry, auto_repair=False)
|
||||||
|
|
||||||
|
assert result.is_valid is True
|
||||||
|
assert result.repaired is False
|
||||||
364
tests/services/test_cache_health_monitor.py
Normal file
364
tests/services/test_cache_health_monitor.py
Normal file
@@ -0,0 +1,364 @@
|
|||||||
|
"""
|
||||||
|
Unit tests for CacheHealthMonitor
|
||||||
|
"""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from py.services.cache_health_monitor import (
|
||||||
|
CacheHealthMonitor,
|
||||||
|
CacheHealthStatus,
|
||||||
|
HealthReport,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestCacheHealthMonitor:
|
||||||
|
"""Tests for CacheHealthMonitor class"""
|
||||||
|
|
||||||
|
def test_check_health_all_valid_entries(self):
|
||||||
|
"""Test health check with 100% valid entries"""
|
||||||
|
monitor = CacheHealthMonitor()
|
||||||
|
|
||||||
|
entries = [
|
||||||
|
{
|
||||||
|
'file_path': f'/models/test{i}.safetensors',
|
||||||
|
'sha256': f'hash{i}',
|
||||||
|
}
|
||||||
|
for i in range(100)
|
||||||
|
]
|
||||||
|
|
||||||
|
report = monitor.check_health(entries, auto_repair=False)
|
||||||
|
|
||||||
|
assert report.status == CacheHealthStatus.HEALTHY
|
||||||
|
assert report.total_entries == 100
|
||||||
|
assert report.valid_entries == 100
|
||||||
|
assert report.invalid_entries == 0
|
||||||
|
assert report.repaired_entries == 0
|
||||||
|
assert report.corruption_rate == 0.0
|
||||||
|
assert report.message == "Cache is healthy"
|
||||||
|
|
||||||
|
def test_check_health_degraded_cache(self):
|
||||||
|
"""Test health check with 1-5% invalid entries (degraded)"""
|
||||||
|
monitor = CacheHealthMonitor()
|
||||||
|
|
||||||
|
# Create 100 entries, 2 invalid (2%)
|
||||||
|
entries = [
|
||||||
|
{
|
||||||
|
'file_path': f'/models/test{i}.safetensors',
|
||||||
|
'sha256': f'hash{i}',
|
||||||
|
}
|
||||||
|
for i in range(98)
|
||||||
|
]
|
||||||
|
# Add 2 invalid entries
|
||||||
|
entries.append({'file_path': '/models/invalid1.safetensors'}) # Missing sha256
|
||||||
|
entries.append({'file_path': '/models/invalid2.safetensors'}) # Missing sha256
|
||||||
|
|
||||||
|
report = monitor.check_health(entries, auto_repair=False)
|
||||||
|
|
||||||
|
assert report.status == CacheHealthStatus.DEGRADED
|
||||||
|
assert report.total_entries == 100
|
||||||
|
assert report.valid_entries == 98
|
||||||
|
assert report.invalid_entries == 2
|
||||||
|
assert report.corruption_rate == 0.02
|
||||||
|
# Message describes the issue without necessarily containing the word "degraded"
|
||||||
|
assert 'invalid entries' in report.message.lower()
|
||||||
|
|
||||||
|
def test_check_health_corrupted_cache(self):
|
||||||
|
"""Test health check with >5% invalid entries (corrupted)"""
|
||||||
|
monitor = CacheHealthMonitor()
|
||||||
|
|
||||||
|
# Create 100 entries, 10 invalid (10%)
|
||||||
|
entries = [
|
||||||
|
{
|
||||||
|
'file_path': f'/models/test{i}.safetensors',
|
||||||
|
'sha256': f'hash{i}',
|
||||||
|
}
|
||||||
|
for i in range(90)
|
||||||
|
]
|
||||||
|
# Add 10 invalid entries
|
||||||
|
for i in range(10):
|
||||||
|
entries.append({'file_path': f'/models/invalid{i}.safetensors'})
|
||||||
|
|
||||||
|
report = monitor.check_health(entries, auto_repair=False)
|
||||||
|
|
||||||
|
assert report.status == CacheHealthStatus.CORRUPTED
|
||||||
|
assert report.total_entries == 100
|
||||||
|
assert report.valid_entries == 90
|
||||||
|
assert report.invalid_entries == 10
|
||||||
|
assert report.corruption_rate == 0.10
|
||||||
|
assert 'corrupted' in report.message.lower()
|
||||||
|
|
||||||
|
def test_check_health_empty_cache(self):
|
||||||
|
"""Test health check with empty cache"""
|
||||||
|
monitor = CacheHealthMonitor()
|
||||||
|
|
||||||
|
report = monitor.check_health([], auto_repair=False)
|
||||||
|
|
||||||
|
assert report.status == CacheHealthStatus.HEALTHY
|
||||||
|
assert report.total_entries == 0
|
||||||
|
assert report.valid_entries == 0
|
||||||
|
assert report.invalid_entries == 0
|
||||||
|
assert report.corruption_rate == 0.0
|
||||||
|
assert report.message == "Cache is empty"
|
||||||
|
|
||||||
|
def test_check_health_single_invalid_entry(self):
|
||||||
|
"""Test health check with 1 invalid entry out of 1 (100% corruption)"""
|
||||||
|
monitor = CacheHealthMonitor()
|
||||||
|
|
||||||
|
entries = [{'file_path': '/models/invalid.safetensors'}]
|
||||||
|
|
||||||
|
report = monitor.check_health(entries, auto_repair=False)
|
||||||
|
|
||||||
|
assert report.status == CacheHealthStatus.CORRUPTED
|
||||||
|
assert report.total_entries == 1
|
||||||
|
assert report.valid_entries == 0
|
||||||
|
assert report.invalid_entries == 1
|
||||||
|
assert report.corruption_rate == 1.0
|
||||||
|
|
||||||
|
def test_check_health_boundary_degraded_threshold(self):
|
||||||
|
"""Test health check at degraded threshold (1%)"""
|
||||||
|
monitor = CacheHealthMonitor(degraded_threshold=0.01)
|
||||||
|
|
||||||
|
# 100 entries, 1 invalid (exactly 1%)
|
||||||
|
entries = [
|
||||||
|
{
|
||||||
|
'file_path': f'/models/test{i}.safetensors',
|
||||||
|
'sha256': f'hash{i}',
|
||||||
|
}
|
||||||
|
for i in range(99)
|
||||||
|
]
|
||||||
|
entries.append({'file_path': '/models/invalid.safetensors'})
|
||||||
|
|
||||||
|
report = monitor.check_health(entries, auto_repair=False)
|
||||||
|
|
||||||
|
assert report.status == CacheHealthStatus.DEGRADED
|
||||||
|
assert report.corruption_rate == 0.01
|
||||||
|
|
||||||
|
def test_check_health_boundary_corrupted_threshold(self):
|
||||||
|
"""Test health check at corrupted threshold (5%)"""
|
||||||
|
monitor = CacheHealthMonitor(corrupted_threshold=0.05)
|
||||||
|
|
||||||
|
# 100 entries, 5 invalid (exactly 5%)
|
||||||
|
entries = [
|
||||||
|
{
|
||||||
|
'file_path': f'/models/test{i}.safetensors',
|
||||||
|
'sha256': f'hash{i}',
|
||||||
|
}
|
||||||
|
for i in range(95)
|
||||||
|
]
|
||||||
|
for i in range(5):
|
||||||
|
entries.append({'file_path': f'/models/invalid{i}.safetensors'})
|
||||||
|
|
||||||
|
report = monitor.check_health(entries, auto_repair=False)
|
||||||
|
|
||||||
|
assert report.status == CacheHealthStatus.CORRUPTED
|
||||||
|
assert report.corruption_rate == 0.05
|
||||||
|
|
||||||
|
def test_check_health_below_degraded_threshold(self):
|
||||||
|
"""Test health check below degraded threshold (0%)"""
|
||||||
|
monitor = CacheHealthMonitor(degraded_threshold=0.01)
|
||||||
|
|
||||||
|
# All entries valid
|
||||||
|
entries = [
|
||||||
|
{
|
||||||
|
'file_path': f'/models/test{i}.safetensors',
|
||||||
|
'sha256': f'hash{i}',
|
||||||
|
}
|
||||||
|
for i in range(100)
|
||||||
|
]
|
||||||
|
|
||||||
|
report = monitor.check_health(entries, auto_repair=False)
|
||||||
|
|
||||||
|
assert report.status == CacheHealthStatus.HEALTHY
|
||||||
|
assert report.corruption_rate == 0.0
|
||||||
|
|
||||||
|
def test_check_health_auto_repair(self):
|
||||||
|
"""Test health check with auto_repair enabled"""
|
||||||
|
monitor = CacheHealthMonitor()
|
||||||
|
|
||||||
|
# 1 entry with all fields (won't be repaired), 1 entry with missing non-required fields (will be repaired)
|
||||||
|
complete_entry = {
|
||||||
|
'file_path': '/models/test1.safetensors',
|
||||||
|
'sha256': 'hash1',
|
||||||
|
'file_name': 'test1.safetensors',
|
||||||
|
'model_name': 'Model 1',
|
||||||
|
'folder': '',
|
||||||
|
'size': 0,
|
||||||
|
'modified': 0.0,
|
||||||
|
'tags': ['tag1'],
|
||||||
|
'preview_url': '',
|
||||||
|
'base_model': '',
|
||||||
|
'from_civitai': True,
|
||||||
|
'favorite': False,
|
||||||
|
'exclude': False,
|
||||||
|
'db_checked': False,
|
||||||
|
'preview_nsfw_level': 0,
|
||||||
|
'notes': '',
|
||||||
|
'usage_tips': '',
|
||||||
|
}
|
||||||
|
incomplete_entry = {
|
||||||
|
'file_path': '/models/test2.safetensors',
|
||||||
|
'sha256': 'hash2',
|
||||||
|
# Missing many optional fields (will be repaired)
|
||||||
|
}
|
||||||
|
|
||||||
|
entries = [complete_entry, incomplete_entry]
|
||||||
|
|
||||||
|
report = monitor.check_health(entries, auto_repair=True)
|
||||||
|
|
||||||
|
assert report.status == CacheHealthStatus.HEALTHY
|
||||||
|
assert report.total_entries == 2
|
||||||
|
assert report.valid_entries == 2
|
||||||
|
assert report.invalid_entries == 0
|
||||||
|
assert report.repaired_entries == 1
|
||||||
|
|
||||||
|
def test_should_notify_user_healthy(self):
|
||||||
|
"""Test should_notify_user for healthy cache"""
|
||||||
|
monitor = CacheHealthMonitor()
|
||||||
|
|
||||||
|
report = HealthReport(
|
||||||
|
status=CacheHealthStatus.HEALTHY,
|
||||||
|
total_entries=100,
|
||||||
|
valid_entries=100,
|
||||||
|
invalid_entries=0,
|
||||||
|
repaired_entries=0,
|
||||||
|
message="Cache is healthy"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert monitor.should_notify_user(report) is False
|
||||||
|
|
||||||
|
def test_should_notify_user_degraded(self):
|
||||||
|
"""Test should_notify_user for degraded cache"""
|
||||||
|
monitor = CacheHealthMonitor()
|
||||||
|
|
||||||
|
report = HealthReport(
|
||||||
|
status=CacheHealthStatus.DEGRADED,
|
||||||
|
total_entries=100,
|
||||||
|
valid_entries=98,
|
||||||
|
invalid_entries=2,
|
||||||
|
repaired_entries=0,
|
||||||
|
message="Cache is degraded"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert monitor.should_notify_user(report) is True
|
||||||
|
|
||||||
|
def test_should_notify_user_corrupted(self):
|
||||||
|
"""Test should_notify_user for corrupted cache"""
|
||||||
|
monitor = CacheHealthMonitor()
|
||||||
|
|
||||||
|
report = HealthReport(
|
||||||
|
status=CacheHealthStatus.CORRUPTED,
|
||||||
|
total_entries=100,
|
||||||
|
valid_entries=90,
|
||||||
|
invalid_entries=10,
|
||||||
|
repaired_entries=0,
|
||||||
|
message="Cache is corrupted"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert monitor.should_notify_user(report) is True
|
||||||
|
|
||||||
|
def test_get_notification_severity_degraded(self):
|
||||||
|
"""Test get_notification_severity for degraded cache"""
|
||||||
|
monitor = CacheHealthMonitor()
|
||||||
|
|
||||||
|
report = HealthReport(
|
||||||
|
status=CacheHealthStatus.DEGRADED,
|
||||||
|
total_entries=100,
|
||||||
|
valid_entries=98,
|
||||||
|
invalid_entries=2,
|
||||||
|
repaired_entries=0,
|
||||||
|
message="Cache is degraded"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert monitor.get_notification_severity(report) == 'warning'
|
||||||
|
|
||||||
|
def test_get_notification_severity_corrupted(self):
|
||||||
|
"""Test get_notification_severity for corrupted cache"""
|
||||||
|
monitor = CacheHealthMonitor()
|
||||||
|
|
||||||
|
report = HealthReport(
|
||||||
|
status=CacheHealthStatus.CORRUPTED,
|
||||||
|
total_entries=100,
|
||||||
|
valid_entries=90,
|
||||||
|
invalid_entries=10,
|
||||||
|
repaired_entries=0,
|
||||||
|
message="Cache is corrupted"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert monitor.get_notification_severity(report) == 'error'
|
||||||
|
|
||||||
|
def test_report_to_dict(self):
|
||||||
|
"""Test HealthReport to_dict conversion"""
|
||||||
|
report = HealthReport(
|
||||||
|
status=CacheHealthStatus.DEGRADED,
|
||||||
|
total_entries=100,
|
||||||
|
valid_entries=98,
|
||||||
|
invalid_entries=2,
|
||||||
|
repaired_entries=1,
|
||||||
|
invalid_paths=['/path1', '/path2'],
|
||||||
|
message="Cache issues detected"
|
||||||
|
)
|
||||||
|
|
||||||
|
result = report.to_dict()
|
||||||
|
|
||||||
|
assert result['status'] == 'degraded'
|
||||||
|
assert result['total_entries'] == 100
|
||||||
|
assert result['valid_entries'] == 98
|
||||||
|
assert result['invalid_entries'] == 2
|
||||||
|
assert result['repaired_entries'] == 1
|
||||||
|
assert result['corruption_rate'] == '2.0%'
|
||||||
|
assert len(result['invalid_paths']) == 2
|
||||||
|
assert result['message'] == "Cache issues detected"
|
||||||
|
|
||||||
|
def test_report_corruption_rate_zero_division(self):
|
||||||
|
"""Test corruption_rate calculation with zero entries"""
|
||||||
|
report = HealthReport(
|
||||||
|
status=CacheHealthStatus.HEALTHY,
|
||||||
|
total_entries=0,
|
||||||
|
valid_entries=0,
|
||||||
|
invalid_entries=0,
|
||||||
|
repaired_entries=0,
|
||||||
|
message="Cache is empty"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert report.corruption_rate == 0.0
|
||||||
|
|
||||||
|
def test_check_health_collects_invalid_paths(self):
|
||||||
|
"""Test health check collects invalid entry paths"""
|
||||||
|
monitor = CacheHealthMonitor()
|
||||||
|
|
||||||
|
entries = [
|
||||||
|
{
|
||||||
|
'file_path': '/models/valid.safetensors',
|
||||||
|
'sha256': 'hash1',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'file_path': '/models/invalid1.safetensors',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'file_path': '/models/invalid2.safetensors',
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
report = monitor.check_health(entries, auto_repair=False)
|
||||||
|
|
||||||
|
assert len(report.invalid_paths) == 2
|
||||||
|
assert '/models/invalid1.safetensors' in report.invalid_paths
|
||||||
|
assert '/models/invalid2.safetensors' in report.invalid_paths
|
||||||
|
|
||||||
|
def test_report_to_dict_limits_invalid_paths(self):
|
||||||
|
"""Test that to_dict limits invalid_paths to first 10"""
|
||||||
|
report = HealthReport(
|
||||||
|
status=CacheHealthStatus.CORRUPTED,
|
||||||
|
total_entries=15,
|
||||||
|
valid_entries=0,
|
||||||
|
invalid_entries=15,
|
||||||
|
repaired_entries=0,
|
||||||
|
invalid_paths=[f'/path{i}' for i in range(15)],
|
||||||
|
message="Cache corrupted"
|
||||||
|
)
|
||||||
|
|
||||||
|
result = report.to_dict()
|
||||||
|
|
||||||
|
assert len(result['invalid_paths']) == 10
|
||||||
|
assert result['invalid_paths'][0] == '/path0'
|
||||||
|
assert result['invalid_paths'][-1] == '/path9'
|
||||||
167
tests/services/test_model_scanner_cache_validation.py
Normal file
167
tests/services/test_model_scanner_cache_validation.py
Normal file
@@ -0,0 +1,167 @@
|
|||||||
|
"""
|
||||||
|
Integration tests for cache validation in ModelScanner
|
||||||
|
"""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
from py.services.model_scanner import ModelScanner
|
||||||
|
from py.services.cache_entry_validator import CacheEntryValidator
|
||||||
|
from py.services.cache_health_monitor import CacheHealthMonitor, CacheHealthStatus
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_model_scanner_validates_cache_entries(tmp_path_factory):
|
||||||
|
"""Test that ModelScanner validates cache entries during initialization"""
|
||||||
|
# Create temporary test data
|
||||||
|
tmp_dir = tmp_path_factory.mktemp("test_loras")
|
||||||
|
|
||||||
|
# Create test files
|
||||||
|
test_file = tmp_dir / "test_model.safetensors"
|
||||||
|
test_file.write_bytes(b"fake model data" * 100)
|
||||||
|
|
||||||
|
# Mock model scanner (we can't easily instantiate a full scanner in tests)
|
||||||
|
# Instead, test the validation logic directly
|
||||||
|
entries = [
|
||||||
|
{
|
||||||
|
'file_path': str(test_file),
|
||||||
|
'sha256': 'abc123def456',
|
||||||
|
'file_name': 'test_model.safetensors',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'file_path': str(tmp_dir / 'invalid.safetensors'),
|
||||||
|
# Missing sha256 - invalid
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
valid, invalid = CacheEntryValidator.validate_batch(entries, auto_repair=True)
|
||||||
|
|
||||||
|
assert len(valid) == 1
|
||||||
|
assert len(invalid) == 1
|
||||||
|
assert valid[0]['sha256'] == 'abc123def456'
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_model_scanner_detects_degraded_cache():
|
||||||
|
"""Test that ModelScanner detects degraded cache health"""
|
||||||
|
# Create 100 entries with 2% corruption
|
||||||
|
entries = [
|
||||||
|
{
|
||||||
|
'file_path': f'/models/test{i}.safetensors',
|
||||||
|
'sha256': f'hash{i}',
|
||||||
|
}
|
||||||
|
for i in range(98)
|
||||||
|
]
|
||||||
|
# Add 2 invalid entries
|
||||||
|
entries.append({'file_path': '/models/invalid1.safetensors'})
|
||||||
|
entries.append({'file_path': '/models/invalid2.safetensors'})
|
||||||
|
|
||||||
|
monitor = CacheHealthMonitor()
|
||||||
|
report = monitor.check_health(entries, auto_repair=True)
|
||||||
|
|
||||||
|
assert report.status == CacheHealthStatus.DEGRADED
|
||||||
|
assert report.invalid_entries == 2
|
||||||
|
assert report.valid_entries == 98
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_model_scanner_detects_corrupted_cache():
|
||||||
|
"""Test that ModelScanner detects corrupted cache health"""
|
||||||
|
# Create 100 entries with 10% corruption
|
||||||
|
entries = [
|
||||||
|
{
|
||||||
|
'file_path': f'/models/test{i}.safetensors',
|
||||||
|
'sha256': f'hash{i}',
|
||||||
|
}
|
||||||
|
for i in range(90)
|
||||||
|
]
|
||||||
|
# Add 10 invalid entries
|
||||||
|
for i in range(10):
|
||||||
|
entries.append({'file_path': f'/models/invalid{i}.safetensors'})
|
||||||
|
|
||||||
|
monitor = CacheHealthMonitor()
|
||||||
|
report = monitor.check_health(entries, auto_repair=True)
|
||||||
|
|
||||||
|
assert report.status == CacheHealthStatus.CORRUPTED
|
||||||
|
assert report.invalid_entries == 10
|
||||||
|
assert report.valid_entries == 90
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_model_scanner_removes_invalid_from_hash_index():
|
||||||
|
"""Test that ModelScanner removes invalid entries from hash index"""
|
||||||
|
from py.services.model_hash_index import ModelHashIndex
|
||||||
|
|
||||||
|
# Create a hash index with some entries
|
||||||
|
hash_index = ModelHashIndex()
|
||||||
|
valid_entry = {
|
||||||
|
'file_path': '/models/valid.safetensors',
|
||||||
|
'sha256': 'abc123',
|
||||||
|
}
|
||||||
|
invalid_entry = {
|
||||||
|
'file_path': '/models/invalid.safetensors',
|
||||||
|
'sha256': '', # Empty sha256
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add entries to hash index
|
||||||
|
hash_index.add_entry(valid_entry['sha256'], valid_entry['file_path'])
|
||||||
|
hash_index.add_entry(invalid_entry['sha256'], invalid_entry['file_path'])
|
||||||
|
|
||||||
|
# Verify both entries are in the index (using get_hash method)
|
||||||
|
assert hash_index.get_hash(valid_entry['file_path']) == valid_entry['sha256']
|
||||||
|
# Invalid entry won't be added due to empty sha256
|
||||||
|
assert hash_index.get_hash(invalid_entry['file_path']) is None
|
||||||
|
|
||||||
|
# Simulate removing invalid entry (it's not actually there, but let's test the method)
|
||||||
|
hash_index.remove_by_path(
|
||||||
|
CacheEntryValidator.get_file_path_safe(invalid_entry),
|
||||||
|
CacheEntryValidator.get_sha256_safe(invalid_entry)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify valid entry remains
|
||||||
|
assert hash_index.get_hash(valid_entry['file_path']) == valid_entry['sha256']
|
||||||
|
|
||||||
|
|
||||||
|
def test_cache_entry_validator_handles_various_field_types():
|
||||||
|
"""Test that validator handles various field types correctly"""
|
||||||
|
# Test with different field types
|
||||||
|
entry = {
|
||||||
|
'file_path': '/models/test.safetensors',
|
||||||
|
'sha256': 'abc123',
|
||||||
|
'size': 1024, # int
|
||||||
|
'modified': 1234567890.0, # float
|
||||||
|
'favorite': True, # bool
|
||||||
|
'tags': ['tag1', 'tag2'], # list
|
||||||
|
'exclude': False, # bool
|
||||||
|
}
|
||||||
|
|
||||||
|
result = CacheEntryValidator.validate(entry, auto_repair=False)
|
||||||
|
|
||||||
|
assert result.is_valid is True
|
||||||
|
assert result.repaired is False
|
||||||
|
|
||||||
|
|
||||||
|
def test_cache_health_report_serialization():
|
||||||
|
"""Test that HealthReport can be serialized to dict"""
|
||||||
|
from py.services.cache_health_monitor import HealthReport
|
||||||
|
|
||||||
|
report = HealthReport(
|
||||||
|
status=CacheHealthStatus.DEGRADED,
|
||||||
|
total_entries=100,
|
||||||
|
valid_entries=98,
|
||||||
|
invalid_entries=2,
|
||||||
|
repaired_entries=1,
|
||||||
|
invalid_paths=['/path1', '/path2'],
|
||||||
|
message="Cache issues detected"
|
||||||
|
)
|
||||||
|
|
||||||
|
result = report.to_dict()
|
||||||
|
|
||||||
|
assert result['status'] == 'degraded'
|
||||||
|
assert result['total_entries'] == 100
|
||||||
|
assert result['valid_entries'] == 98
|
||||||
|
assert result['invalid_entries'] == 2
|
||||||
|
assert result['repaired_entries'] == 1
|
||||||
|
assert result['corruption_rate'] == '2.0%'
|
||||||
|
assert len(result['invalid_paths']) == 2
|
||||||
|
assert result['message'] == "Cache issues detected"
|
||||||
Reference in New Issue
Block a user