diff --git a/locales/de.json b/locales/de.json index 873d2284..6f484efc 100644 --- a/locales/de.json +++ b/locales/de.json @@ -443,7 +443,10 @@ "dateAsc": "Älteste", "size": "Dateigröße", "sizeDesc": "Größte", - "sizeAsc": "Kleinste" + "sizeAsc": "Kleinste", + "usage": "Anzahl Nutzung", + "usageDesc": "Meiste", + "usageAsc": "Wenigste" }, "refresh": { "title": "Modelliste aktualisieren", diff --git a/locales/en.json b/locales/en.json index df25d8e9..852234ff 100644 --- a/locales/en.json +++ b/locales/en.json @@ -443,7 +443,10 @@ "dateAsc": "Oldest", "size": "File Size", "sizeDesc": "Largest", - "sizeAsc": "Smallest" + "sizeAsc": "Smallest", + "usage": "Use Count", + "usageDesc": "Most", + "usageAsc": "Least" }, "refresh": { "title": "Refresh model list", diff --git a/locales/es.json b/locales/es.json index ff0e9e16..ff700c05 100644 --- a/locales/es.json +++ b/locales/es.json @@ -443,7 +443,10 @@ "dateAsc": "Más antiguo", "size": "Tamaño de archivo", "sizeDesc": "Mayor", - "sizeAsc": "Menor" + "sizeAsc": "Menor", + "usage": "Número de usos", + "usageDesc": "Más", + "usageAsc": "Menos" }, "refresh": { "title": "Actualizar lista de modelos", diff --git a/locales/fr.json b/locales/fr.json index d7c82004..60d23a2a 100644 --- a/locales/fr.json +++ b/locales/fr.json @@ -443,7 +443,10 @@ "dateAsc": "Plus ancien", "size": "Taille du fichier", "sizeDesc": "Plus grand", - "sizeAsc": "Plus petit" + "sizeAsc": "Plus petit", + "usage": "Nombre d'utilisations", + "usageDesc": "Plus", + "usageAsc": "Moins" }, "refresh": { "title": "Actualiser la liste des modèles", diff --git a/locales/he.json b/locales/he.json index 4afa4aa4..ae2d9cfc 100644 --- a/locales/he.json +++ b/locales/he.json @@ -443,7 +443,10 @@ "dateAsc": "הישן ביותר", "size": "גודל קובץ", "sizeDesc": "הגדול ביותר", - "sizeAsc": "הקטן ביותר" + "sizeAsc": "הקטן ביותר", + "usage": "מספר שימושים", + "usageDesc": "הכי הרבה", + "usageAsc": "הכי פחות" }, "refresh": { "title": "רענן רשימת מודלים", diff --git a/locales/ja.json b/locales/ja.json index 7b83ec8f..89c53dcd 100644 --- a/locales/ja.json +++ b/locales/ja.json @@ -443,7 +443,10 @@ "dateAsc": "古い順", "size": "ファイルサイズ", "sizeDesc": "大きい順", - "sizeAsc": "小さい順" + "sizeAsc": "小さい順", + "usage": "使用回数", + "usageDesc": "多い", + "usageAsc": "少ない" }, "refresh": { "title": "モデルリストを更新", diff --git a/locales/ko.json b/locales/ko.json index 9750f070..d3f56dd9 100644 --- a/locales/ko.json +++ b/locales/ko.json @@ -443,7 +443,10 @@ "dateAsc": "오래된순", "size": "파일 크기", "sizeDesc": "큰 순서", - "sizeAsc": "작은 순서" + "sizeAsc": "작은 순서", + "usage": "사용 횟수", + "usageDesc": "많은 순", + "usageAsc": "적은 순" }, "refresh": { "title": "모델 목록 새로고침", diff --git a/locales/ru.json b/locales/ru.json index 9c22651a..8c660982 100644 --- a/locales/ru.json +++ b/locales/ru.json @@ -443,7 +443,10 @@ "dateAsc": "Старейшим", "size": "Размеру файла", "sizeDesc": "Наибольшим", - "sizeAsc": "Наименьшим" + "sizeAsc": "Наименьшим", + "usage": "Число использований", + "usageDesc": "Больше", + "usageAsc": "Меньше" }, "refresh": { "title": "Обновить список моделей", diff --git a/locales/zh-CN.json b/locales/zh-CN.json index df02db1d..98660f01 100644 --- a/locales/zh-CN.json +++ b/locales/zh-CN.json @@ -443,7 +443,10 @@ "dateAsc": "最旧", "size": "文件大小", "sizeDesc": "最大", - "sizeAsc": "最小" + "sizeAsc": "最小", + "usage": "使用次数", + "usageDesc": "最多", + "usageAsc": "最少" }, "refresh": { "title": "刷新模型列表", diff --git a/locales/zh-TW.json b/locales/zh-TW.json index 0d5a8dae..c4f4df0f 100644 --- a/locales/zh-TW.json +++ b/locales/zh-TW.json @@ -443,7 +443,10 @@ "dateAsc": "最舊", "size": "檔案大小", "sizeDesc": "最大", - "sizeAsc": "最小" + "sizeAsc": "最小", + "usage": "使用次數", + "usageDesc": "最多", + "usageAsc": "最少" }, "refresh": { "title": "重新整理模型列表", diff --git a/py/services/base_model_service.py b/py/services/base_model_service.py index 84db592b..12333dca 100644 --- a/py/services/base_model_service.py +++ b/py/services/base_model_service.py @@ -7,6 +7,7 @@ import os from ..utils.constants import VALID_LORA_TYPES from ..utils.models import BaseModelMetadata from ..utils.metadata_manager import MetadataManager +from ..utils.usage_stats import UsageStats from .model_query import ( FilterCriteria, ModelCacheRepository, @@ -81,7 +82,10 @@ class BaseModelService(ABC): """Get paginated and filtered model data""" sort_params = self.cache_repository.parse_sort(sort_by) - sorted_data = await self.cache_repository.fetch_sorted(sort_params) + if sort_params.key == 'usage': + sorted_data = await self._fetch_with_usage_sort(sort_params) + else: + sorted_data = await self.cache_repository.fetch_sorted(sort_params) if hash_filters: filtered_data = await self._apply_hash_filters(sorted_data, hash_filters) @@ -132,6 +136,37 @@ class BaseModelService(ABC): ) return paginated + async def _fetch_with_usage_sort(self, sort_params): + """Fetch data sorted by usage count (desc/asc).""" + cache = await self.cache_repository.get_cache() + raw_items = cache.raw_data or [] + + # Map model type to usage stats bucket + bucket_map = { + 'lora': 'loras', + 'checkpoint': 'checkpoints', + # 'embedding': 'embeddings', # TODO: Enable when embedding usage tracking is implemented + } + bucket_key = bucket_map.get(self.model_type, '') + + usage_stats = UsageStats() + stats = await usage_stats.get_stats() + usage_bucket = stats.get(bucket_key, {}) if bucket_key else {} + + annotated = [] + for item in raw_items: + sha = (item.get('sha256') or '').lower() + usage_info = usage_bucket.get(sha, {}) if isinstance(usage_bucket, dict) else {} + usage_count = usage_info.get('total', 0) if isinstance(usage_info, dict) else 0 + annotated.append({**item, 'usage_count': usage_count}) + + reverse = sort_params.order == 'desc' + annotated.sort( + key=lambda x: (x.get('usage_count', 0), x.get('model_name', '').lower()), + reverse=reverse + ) + return annotated + async def _apply_hash_filters(self, data: List[Dict], hash_filters: Dict) -> List[Dict]: """Apply hash-based filtering""" diff --git a/py/services/checkpoint_service.py b/py/services/checkpoint_service.py index ef1d763f..924f250a 100644 --- a/py/services/checkpoint_service.py +++ b/py/services/checkpoint_service.py @@ -35,6 +35,7 @@ class CheckpointService(BaseModelService): "modified": checkpoint_data.get("modified", ""), "tags": checkpoint_data.get("tags", []), "from_civitai": checkpoint_data.get("from_civitai", True), + "usage_count": checkpoint_data.get("usage_count", 0), "notes": checkpoint_data.get("notes", ""), "model_type": checkpoint_data.get("model_type", "checkpoint"), "favorite": checkpoint_data.get("favorite", False), diff --git a/py/services/embedding_service.py b/py/services/embedding_service.py index 3275552b..bfa51d15 100644 --- a/py/services/embedding_service.py +++ b/py/services/embedding_service.py @@ -35,6 +35,7 @@ class EmbeddingService(BaseModelService): "modified": embedding_data.get("modified", ""), "tags": embedding_data.get("tags", []), "from_civitai": embedding_data.get("from_civitai", True), + # "usage_count": embedding_data.get("usage_count", 0), # TODO: Enable when embedding usage tracking is implemented "notes": embedding_data.get("notes", ""), "model_type": embedding_data.get("model_type", "embedding"), "favorite": embedding_data.get("favorite", False), diff --git a/py/services/lora_service.py b/py/services/lora_service.py index 2de2cf96..f2743cc8 100644 --- a/py/services/lora_service.py +++ b/py/services/lora_service.py @@ -35,6 +35,7 @@ class LoraService(BaseModelService): "modified": lora_data.get("modified", ""), "tags": lora_data.get("tags", []), "from_civitai": lora_data.get("from_civitai", True), + "usage_count": lora_data.get("usage_count", 0), "usage_tips": lora_data.get("usage_tips", ""), "notes": lora_data.get("notes", ""), "favorite": lora_data.get("favorite", False), diff --git a/py/services/model_cache.py b/py/services/model_cache.py index abd8b68b..13417778 100644 --- a/py/services/model_cache.py +++ b/py/services/model_cache.py @@ -13,7 +13,10 @@ SUPPORTED_SORT_MODES = [ ('date', 'desc'), ('size', 'asc'), ('size', 'desc'), + ('usage', 'asc'), + ('usage', 'desc'), ] +# Is this in use? DISPLAY_NAME_MODES = {"model_name", "file_name"} @@ -234,6 +237,16 @@ class ModelCache: key=itemgetter('size'), reverse=reverse ) + elif sort_key == 'usage': + # Sort by usage count, fallback to 0, then name for stability + return sorted( + data, + key=lambda x: ( + x.get('usage_count', 0), + self._get_display_name(x).lower() + ), + reverse=reverse + ) else: # Fallback: no sort return list(data) diff --git a/static/js/components/shared/ModelCard.js b/static/js/components/shared/ModelCard.js index d2c9c96a..b23b6a43 100644 --- a/static/js/components/shared/ModelCard.js +++ b/static/js/components/shared/ModelCard.js @@ -430,12 +430,18 @@ export function createModelCard(model, modelType) { card.dataset.modified = model.modified; card.dataset.file_size = model.file_size; card.dataset.from_civitai = model.from_civitai; + card.dataset.usage_count = String(model.usage_count); card.dataset.notes = model.notes || ''; card.dataset.base_model = model.base_model || 'Unknown'; card.dataset.favorite = model.favorite ? 'true' : 'false'; const hasUpdateAvailable = Boolean(model.update_available); card.dataset.update_available = hasUpdateAvailable ? 'true' : 'false'; + // To only show usage_count when sorting by usage. + const pageState = getCurrentPageState(); + const isUsageSort = pageState?.sortBy?.startsWith('usage'); + const hasUsageCount = isUsageSort && typeof model.usage_count === 'number'; + const civitaiData = model.civitai || {}; const modelId = civitaiData?.modelId ?? civitaiData?.model_id; if (modelId !== undefined && modelId !== null && modelId !== '') { @@ -610,7 +616,10 @@ export function createModelCard(model, modelType) {