Add versioning and history tracking to usage statistics. Implement backup and conversion for old stats format, enhancing data structure for checkpoints and loras.

This commit is contained in:
Will Miao
2025-05-31 16:38:18 +08:00
parent 355c73512d
commit aa1ee96bc9
2 changed files with 134 additions and 27 deletions

View File

@@ -182,10 +182,14 @@ class MiscRoutes:
usage_stats = UsageStats() usage_stats = UsageStats()
stats = await usage_stats.get_stats() stats = await usage_stats.get_stats()
return web.json_response({ # Add version information to help clients handle format changes
stats_response = {
'success': True, 'success': True,
'data': stats 'data': stats,
}) 'format_version': 2 # Indicate this is the new format with history
}
return web.json_response(stats_response)
except Exception as e: except Exception as e:
logger.error(f"Failed to get usage stats: {e}", exc_info=True) logger.error(f"Failed to get usage stats: {e}", exc_info=True)

View File

@@ -4,6 +4,8 @@ import sys
import time import time
import asyncio import asyncio
import logging import logging
import datetime
import shutil
from typing import Dict, Set from typing import Dict, Set
from ..config import config from ..config import config
@@ -26,6 +28,7 @@ class UsageStats:
# Default stats file name # Default stats file name
STATS_FILENAME = "lora_manager_stats.json" STATS_FILENAME = "lora_manager_stats.json"
BACKUP_SUFFIX = ".backup"
def __new__(cls): def __new__(cls):
if cls._instance is None: if cls._instance is None:
@@ -39,8 +42,8 @@ class UsageStats:
# Initialize stats storage # Initialize stats storage
self.stats = { self.stats = {
"checkpoints": {}, # sha256 -> count "checkpoints": {}, # sha256 -> { total: count, history: { date: count } }
"loras": {}, # sha256 -> count "loras": {}, # sha256 -> { total: count, history: { date: count } }
"total_executions": 0, "total_executions": 0,
"last_save_time": 0 "last_save_time": 0
} }
@@ -70,6 +73,68 @@ class UsageStats:
# Use the first lora root # Use the first lora root
return os.path.join(config.loras_roots[0], self.STATS_FILENAME) return os.path.join(config.loras_roots[0], self.STATS_FILENAME)
def _backup_old_stats(self):
"""Backup the old stats file before conversion"""
if os.path.exists(self._stats_file_path):
backup_path = f"{self._stats_file_path}{self.BACKUP_SUFFIX}"
try:
shutil.copy2(self._stats_file_path, backup_path)
logger.info(f"Backed up old stats file to {backup_path}")
return True
except Exception as e:
logger.error(f"Failed to backup stats file: {e}")
return False
def _convert_old_format(self, old_stats):
"""Convert old stats format to new format with history"""
new_stats = {
"checkpoints": {},
"loras": {},
"total_executions": old_stats.get("total_executions", 0),
"last_save_time": old_stats.get("last_save_time", time.time())
}
# Get today's date in YYYY-MM-DD format
today = datetime.datetime.now().strftime("%Y-%m-%d")
# Convert checkpoint stats
if "checkpoints" in old_stats and isinstance(old_stats["checkpoints"], dict):
for hash_id, count in old_stats["checkpoints"].items():
new_stats["checkpoints"][hash_id] = {
"total": count,
"history": {
today: count
}
}
# Convert lora stats
if "loras" in old_stats and isinstance(old_stats["loras"], dict):
for hash_id, count in old_stats["loras"].items():
new_stats["loras"][hash_id] = {
"total": count,
"history": {
today: count
}
}
logger.info("Successfully converted stats from old format to new format with history")
return new_stats
def _is_old_format(self, stats):
"""Check if the stats are in the old format (direct count values)"""
# Check if any lora or checkpoint entry is a direct number instead of an object
if "loras" in stats and isinstance(stats["loras"], dict):
for hash_id, data in stats["loras"].items():
if isinstance(data, (int, float)):
return True
if "checkpoints" in stats and isinstance(stats["checkpoints"], dict):
for hash_id, data in stats["checkpoints"].items():
if isinstance(data, (int, float)):
return True
return False
def _load_stats(self): def _load_stats(self):
"""Load existing statistics from file""" """Load existing statistics from file"""
try: try:
@@ -77,17 +142,26 @@ class UsageStats:
with open(self._stats_file_path, 'r', encoding='utf-8') as f: with open(self._stats_file_path, 'r', encoding='utf-8') as f:
loaded_stats = json.load(f) loaded_stats = json.load(f)
# Update our stats with loaded data # Check if old format and needs conversion
if isinstance(loaded_stats, dict): if self._is_old_format(loaded_stats):
# Update individual sections to maintain structure logger.info("Detected old stats format, performing conversion")
if "checkpoints" in loaded_stats and isinstance(loaded_stats["checkpoints"], dict): self._backup_old_stats()
self.stats["checkpoints"] = loaded_stats["checkpoints"] self.stats = self._convert_old_format(loaded_stats)
else:
# Update our stats with loaded data (already in new format)
if isinstance(loaded_stats, dict):
# Update individual sections to maintain structure
if "checkpoints" in loaded_stats and isinstance(loaded_stats["checkpoints"], dict):
self.stats["checkpoints"] = loaded_stats["checkpoints"]
if "loras" in loaded_stats and isinstance(loaded_stats["loras"], dict): if "loras" in loaded_stats and isinstance(loaded_stats["loras"], dict):
self.stats["loras"] = loaded_stats["loras"] self.stats["loras"] = loaded_stats["loras"]
if "total_executions" in loaded_stats: if "total_executions" in loaded_stats:
self.stats["total_executions"] = loaded_stats["total_executions"] self.stats["total_executions"] = loaded_stats["total_executions"]
if "last_save_time" in loaded_stats:
self.stats["last_save_time"] = loaded_stats["last_save_time"]
logger.info(f"Loaded usage statistics from {self._stats_file_path}") logger.info(f"Loaded usage statistics from {self._stats_file_path}")
except Exception as e: except Exception as e:
@@ -174,15 +248,18 @@ class UsageStats:
# Increment total executions count # Increment total executions count
self.stats["total_executions"] += 1 self.stats["total_executions"] += 1
# Get today's date in YYYY-MM-DD format
today = datetime.datetime.now().strftime("%Y-%m-%d")
# Process checkpoints # Process checkpoints
if MODELS in metadata and isinstance(metadata[MODELS], dict): if MODELS in metadata and isinstance(metadata[MODELS], dict):
await self._process_checkpoints(metadata[MODELS]) await self._process_checkpoints(metadata[MODELS], today)
# Process loras # Process loras
if LORAS in metadata and isinstance(metadata[LORAS], dict): if LORAS in metadata and isinstance(metadata[LORAS], dict):
await self._process_loras(metadata[LORAS]) await self._process_loras(metadata[LORAS], today)
async def _process_checkpoints(self, models_data): async def _process_checkpoints(self, models_data, today_date):
"""Process checkpoint models from metadata""" """Process checkpoint models from metadata"""
try: try:
# Get checkpoint scanner service # Get checkpoint scanner service
@@ -208,12 +285,24 @@ class UsageStats:
# Get hash for this checkpoint # Get hash for this checkpoint
model_hash = checkpoint_scanner.get_hash_by_filename(model_filename) model_hash = checkpoint_scanner.get_hash_by_filename(model_filename)
if model_hash: if model_hash:
# Update stats for this checkpoint # Update stats for this checkpoint with date tracking
self.stats["checkpoints"][model_hash] = self.stats["checkpoints"].get(model_hash, 0) + 1 if model_hash not in self.stats["checkpoints"]:
self.stats["checkpoints"][model_hash] = {
"total": 0,
"history": {}
}
# Increment total count
self.stats["checkpoints"][model_hash]["total"] += 1
# Increment today's count
if today_date not in self.stats["checkpoints"][model_hash]["history"]:
self.stats["checkpoints"][model_hash]["history"][today_date] = 0
self.stats["checkpoints"][model_hash]["history"][today_date] += 1
except Exception as e: except Exception as e:
logger.error(f"Error processing checkpoint usage: {e}", exc_info=True) logger.error(f"Error processing checkpoint usage: {e}", exc_info=True)
async def _process_loras(self, loras_data): async def _process_loras(self, loras_data, today_date):
"""Process LoRA models from metadata""" """Process LoRA models from metadata"""
try: try:
# Get LoRA scanner service # Get LoRA scanner service
@@ -239,8 +328,20 @@ class UsageStats:
# Get hash for this LoRA # Get hash for this LoRA
lora_hash = lora_scanner.get_hash_by_filename(lora_name) lora_hash = lora_scanner.get_hash_by_filename(lora_name)
if lora_hash: if lora_hash:
# Update stats for this LoRA # Update stats for this LoRA with date tracking
self.stats["loras"][lora_hash] = self.stats["loras"].get(lora_hash, 0) + 1 if lora_hash not in self.stats["loras"]:
self.stats["loras"][lora_hash] = {
"total": 0,
"history": {}
}
# Increment total count
self.stats["loras"][lora_hash]["total"] += 1
# Increment today's count
if today_date not in self.stats["loras"][lora_hash]["history"]:
self.stats["loras"][lora_hash]["history"][today_date] = 0
self.stats["loras"][lora_hash]["history"][today_date] += 1
except Exception as e: except Exception as e:
logger.error(f"Error processing LoRA usage: {e}", exc_info=True) logger.error(f"Error processing LoRA usage: {e}", exc_info=True)
@@ -251,9 +352,11 @@ class UsageStats:
async def get_model_usage_count(self, model_type, sha256): async def get_model_usage_count(self, model_type, sha256):
"""Get usage count for a specific model by hash""" """Get usage count for a specific model by hash"""
if model_type == "checkpoint": if model_type == "checkpoint":
return self.stats["checkpoints"].get(sha256, 0) if sha256 in self.stats["checkpoints"]:
return self.stats["checkpoints"][sha256]["total"]
elif model_type == "lora": elif model_type == "lora":
return self.stats["loras"].get(sha256, 0) if sha256 in self.stats["loras"]:
return self.stats["loras"][sha256]["total"]
return 0 return 0
async def process_execution(self, prompt_id): async def process_execution(self, prompt_id):