feat(usage-stats): add extractors for rgthree Power LoRA Loader and TensorRT Loader

Fixes #394 — LoRAs loaded via rgthree Power Lora Loader were not
tracked in usage statistics because no extractor existed for that node.

New extractors:
- RgthreePowerLoraLoaderExtractor: parses LORA_* kwargs, respects
  the per-LoRA 'on' toggle
- TensorRTLoaderExtractor: parses engine filename (strips _$profile
  suffix) as best-effort for vanilla TRT. If the output MODEL has
  attachments["source_model"] (set by NubeBuster fork), overrides
  with the real checkpoint name.

TensorRTRefitLoader and TensorRTLoaderAuto take a MODEL input whose
upstream checkpoint loader is already tracked — no extractor needed.

Also adds a name:<filename> fallback and warning log in both
_process_checkpoints and _process_loras when hash lookup fails.
This commit is contained in:
NubeBuster
2026-04-05 16:39:42 +02:00
parent 727d0ef043
commit b711ac468a
2 changed files with 107 additions and 32 deletions

View File

@@ -317,21 +317,23 @@ class UsageStats:
# Get hash for this checkpoint
model_hash = checkpoint_scanner.get_hash_by_filename(model_filename)
if model_hash:
# Update stats for this checkpoint with date tracking
if model_hash not in self.stats["checkpoints"]:
self.stats["checkpoints"][model_hash] = {
"total": 0,
"history": {}
}
# Increment total count
self.stats["checkpoints"][model_hash]["total"] += 1
# Increment today's count
if today_date not in self.stats["checkpoints"][model_hash]["history"]:
self.stats["checkpoints"][model_hash]["history"][today_date] = 0
self.stats["checkpoints"][model_hash]["history"][today_date] += 1
if not model_hash:
logger.warning(f"No hash found for checkpoint '{model_filename}', tracking by name")
stat_key = model_hash or f"name:{model_filename}"
# Update stats for this checkpoint with date tracking
if stat_key not in self.stats["checkpoints"]:
self.stats["checkpoints"][stat_key] = {
"total": 0,
"history": {}
}
# Increment total count
self.stats["checkpoints"][stat_key]["total"] += 1
# Increment today's count
if today_date not in self.stats["checkpoints"][stat_key]["history"]:
self.stats["checkpoints"][stat_key]["history"][today_date] = 0
self.stats["checkpoints"][stat_key]["history"][today_date] += 1
except Exception as e:
logger.error(f"Error processing checkpoint usage: {e}", exc_info=True)
@@ -360,21 +362,23 @@ class UsageStats:
# Get hash for this LoRA
lora_hash = lora_scanner.get_hash_by_filename(lora_name)
if lora_hash:
# Update stats for this LoRA with date tracking
if lora_hash not in self.stats["loras"]:
self.stats["loras"][lora_hash] = {
"total": 0,
"history": {}
}
# Increment total count
self.stats["loras"][lora_hash]["total"] += 1
# Increment today's count
if today_date not in self.stats["loras"][lora_hash]["history"]:
self.stats["loras"][lora_hash]["history"][today_date] = 0
self.stats["loras"][lora_hash]["history"][today_date] += 1
if not lora_hash:
logger.warning(f"No hash found for LoRA '{lora_name}', tracking by name")
stat_key = lora_hash or f"name:{lora_name}"
# Update stats for this LoRA with date tracking
if stat_key not in self.stats["loras"]:
self.stats["loras"][stat_key] = {
"total": 0,
"history": {}
}
# Increment total count
self.stats["loras"][stat_key]["total"] += 1
# Increment today's count
if today_date not in self.stats["loras"][stat_key]["history"]:
self.stats["loras"][stat_key]["history"][today_date] = 0
self.stats["loras"][stat_key]["history"][today_date] += 1
except Exception as e:
logger.error(f"Error processing LoRA usage: {e}", exc_info=True)