mirror of
https://github.com/willmiao/ComfyUI-Lora-Manager.git
synced 2026-03-21 21:22:11 -03:00
feat: Standardize LoRA extraction format and enhance input handling in node extractors
This commit is contained in:
@@ -139,12 +139,16 @@ class MetadataProcessor:
|
|||||||
else:
|
else:
|
||||||
break # Can't follow further
|
break # Can't follow further
|
||||||
|
|
||||||
# Extract LoRAs
|
# Extract LoRAs using the standardized format
|
||||||
lora_parts = []
|
lora_parts = []
|
||||||
for node_id, lora_info in metadata.get("loras", {}).items():
|
for node_id, lora_info in metadata.get("loras", {}).items():
|
||||||
name = lora_info.get("name", "unknown")
|
# Access the lora_list from the standardized format
|
||||||
strength = lora_info.get("strength_model", 1.0)
|
lora_list = lora_info.get("lora_list", [])
|
||||||
lora_parts.append(f"<lora:{name}:{strength}>")
|
for lora in lora_list:
|
||||||
|
name = lora.get("name", "unknown")
|
||||||
|
strength = lora.get("strength", 1.0)
|
||||||
|
lora_parts.append(f"<lora:{name}:{strength}>")
|
||||||
|
|
||||||
params["loras"] = " ".join(lora_parts)
|
params["loras"] = " ".join(lora_parts)
|
||||||
|
|
||||||
# Set default clip_skip value
|
# Set default clip_skip value
|
||||||
|
|||||||
@@ -1,3 +1,6 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
class NodeMetadataExtractor:
|
class NodeMetadataExtractor:
|
||||||
"""Base class for node-specific metadata extraction"""
|
"""Base class for node-specific metadata extraction"""
|
||||||
|
|
||||||
@@ -87,13 +90,16 @@ class LoraLoaderExtractor(NodeMetadataExtractor):
|
|||||||
return
|
return
|
||||||
|
|
||||||
lora_name = inputs.get("lora_name")
|
lora_name = inputs.get("lora_name")
|
||||||
strength_model = inputs.get("strength_model", 1.0)
|
strength_model = round(float(inputs.get("strength_model", 1.0)), 2)
|
||||||
strength_clip = inputs.get("strength_clip", 1.0)
|
|
||||||
|
|
||||||
|
# Use the standardized format with lora_list
|
||||||
metadata["loras"][node_id] = {
|
metadata["loras"][node_id] = {
|
||||||
"name": lora_name,
|
"lora_list": [
|
||||||
"strength_model": strength_model,
|
{
|
||||||
"strength_clip": strength_clip,
|
"name": lora_name,
|
||||||
|
"strength": strength_model
|
||||||
|
}
|
||||||
|
],
|
||||||
"node_id": node_id
|
"node_id": node_id
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -120,34 +126,48 @@ class LoraLoaderManagerExtractor(NodeMetadataExtractor):
|
|||||||
def extract(node_id, inputs, outputs, metadata):
|
def extract(node_id, inputs, outputs, metadata):
|
||||||
if not inputs:
|
if not inputs:
|
||||||
return
|
return
|
||||||
|
|
||||||
# Handle LoraManager nodes which might store loras differently
|
|
||||||
if "loras" in inputs:
|
|
||||||
loras = inputs.get("loras", [])
|
|
||||||
if isinstance(loras, list):
|
|
||||||
active_loras = []
|
|
||||||
# Filter for active loras (may be a list of dicts with 'active' flag)
|
|
||||||
for lora in loras:
|
|
||||||
if isinstance(lora, dict) and lora.get("active", True) and not lora.get("_isDummy", False):
|
|
||||||
active_loras.append({
|
|
||||||
"name": lora.get("name", ""),
|
|
||||||
"strength": lora.get("strength", 1.0)
|
|
||||||
})
|
|
||||||
|
|
||||||
if active_loras:
|
|
||||||
metadata["loras"][node_id] = {
|
|
||||||
"lora_list": active_loras,
|
|
||||||
"node_id": node_id
|
|
||||||
}
|
|
||||||
|
|
||||||
# If there's a direct text field with lora definitions
|
active_loras = []
|
||||||
if "text" in inputs:
|
|
||||||
text = inputs.get("text", "")
|
# Process lora_stack if available
|
||||||
if text and "<lora:" in text:
|
if "lora_stack" in inputs:
|
||||||
metadata["loras"][node_id] = {
|
lora_stack = inputs.get("lora_stack", [])
|
||||||
"raw_text": text,
|
for lora_path, model_strength, clip_strength in lora_stack:
|
||||||
"node_id": node_id
|
# Extract lora name from path (following the format in lora_loader.py)
|
||||||
}
|
lora_name = os.path.splitext(os.path.basename(lora_path))[0]
|
||||||
|
active_loras.append({
|
||||||
|
"name": lora_name,
|
||||||
|
"strength": model_strength
|
||||||
|
})
|
||||||
|
|
||||||
|
# Process loras from inputs
|
||||||
|
if "loras" in inputs:
|
||||||
|
loras_data = inputs.get("loras", [])
|
||||||
|
|
||||||
|
# Handle new format: {'loras': {'__value__': [...]}}
|
||||||
|
if isinstance(loras_data, dict) and '__value__' in loras_data:
|
||||||
|
loras_list = loras_data['__value__']
|
||||||
|
# Handle old format: {'loras': [...]}
|
||||||
|
elif isinstance(loras_data, list):
|
||||||
|
loras_list = loras_data
|
||||||
|
else:
|
||||||
|
loras_list = []
|
||||||
|
|
||||||
|
# Filter for active loras
|
||||||
|
for lora in loras_list:
|
||||||
|
if isinstance(lora, dict) and lora.get("active", True) and not lora.get("_isDummy", False):
|
||||||
|
active_loras.append({
|
||||||
|
"name": lora.get("name", ""),
|
||||||
|
"strength": float(lora.get("strength", 1.0))
|
||||||
|
})
|
||||||
|
|
||||||
|
if active_loras:
|
||||||
|
metadata["loras"][node_id] = {
|
||||||
|
"lora_list": active_loras,
|
||||||
|
"node_id": node_id
|
||||||
|
}
|
||||||
|
|
||||||
|
print(f"Active LoRAs for node {node_id}: {active_loras}")
|
||||||
|
|
||||||
# Registry of node-specific extractors
|
# Registry of node-specific extractors
|
||||||
NODE_EXTRACTORS = {
|
NODE_EXTRACTORS = {
|
||||||
@@ -156,7 +176,7 @@ NODE_EXTRACTORS = {
|
|||||||
"KSampler": SamplerExtractor,
|
"KSampler": SamplerExtractor,
|
||||||
"LoraLoader": LoraLoaderExtractor,
|
"LoraLoader": LoraLoaderExtractor,
|
||||||
"EmptyLatentImage": ImageSizeExtractor,
|
"EmptyLatentImage": ImageSizeExtractor,
|
||||||
"Lora Loader (LoraManager)": LoraLoaderManagerExtractor,
|
"LoraManagerLoader": LoraLoaderManagerExtractor,
|
||||||
"SamplerCustomAdvanced": SamplerExtractor, # Add SamplerCustomAdvanced
|
"SamplerCustomAdvanced": SamplerExtractor, # Add SamplerCustomAdvanced
|
||||||
"UNETLoader": CheckpointLoaderExtractor, # Add UNETLoader
|
"UNETLoader": CheckpointLoaderExtractor, # Add UNETLoader
|
||||||
# Add other nodes as needed
|
# Add other nodes as needed
|
||||||
|
|||||||
Reference in New Issue
Block a user