Files
ComfyUI-Lora-Manager/py/nodes/wanvideo_lora_select_from_text.py
Will Miao 2ac0eb0f9d fix(wanvideo): resolve lora path resolution and name truncation for extra folder paths
- Use get_lora_info_absolute to obtain correct absolute paths for loras
  in LM extra folder paths, instead of folder_paths.get_full_path which
  only searches ComfyUI's standard loras directories (returned None)
- Fix name field truncation: str.split('.')[0] stopped at the first dot,
  replaced with os.path.splitext to only strip the file extension
- Add _relpath_within_loras helper to preserve subdirectory info in the
  name field, matching WanVideoWrapper's os.path.splitext(lora)[0] format
2026-05-02 14:55:12 +08:00

130 lines
4.8 KiB
Python

import os
from ..utils.utils import get_lora_info_absolute
from ..config import config
from .utils import any_type
import logging
# 初始化日志记录器
logger = logging.getLogger(__name__)
def _relpath_within_loras(abs_path):
"""Return abs_path relative to the first matching lora root, or basename as fallback."""
all_roots = list(config.loras_roots or []) + list(config.extra_loras_roots or [])
for root in all_roots:
try:
return os.path.relpath(abs_path, root)
except ValueError:
continue
return os.path.basename(abs_path)
# 定义新节点的类
class WanVideoLoraTextSelectLM:
# 节点在UI中显示的名称
NAME = "WanVideo Lora Select From Text (LoraManager)"
# 节点所属的分类
CATEGORY = "Lora Manager/stackers"
@classmethod
def INPUT_TYPES(cls):
return {
"required": {
"low_mem_load": ("BOOLEAN", {"default": False, "tooltip": "Load LORA models with less VRAM usage, slower loading. This affects ALL LoRAs, not just the current ones. No effect if merge_loras is False"}),
"merge_lora": ("BOOLEAN", {"default": True, "tooltip": "Merge LoRAs into the model, otherwise they are loaded on the fly. Always disabled for GGUF and scaled fp8 models. This affects ALL LoRAs, not just the current one"}),
"lora_syntax": ("STRING", {
"multiline": True,
"forceInput": True,
"tooltip": "Connect a TEXT output for LoRA syntax: <lora:name:strength>"
}),
},
"optional": {
"prev_lora": ("WANVIDLORA",),
"blocks": ("BLOCKS",)
}
}
RETURN_TYPES = ("WANVIDLORA", "STRING", "STRING")
RETURN_NAMES = ("lora", "trigger_words", "active_loras")
FUNCTION = "process_loras_from_syntax"
def process_loras_from_syntax(self, lora_syntax, low_mem_load=False, merge_lora=True, **kwargs):
text_to_process = lora_syntax
blocks = kwargs.get('blocks', {})
selected_blocks = blocks.get("selected_blocks", {})
layer_filter = blocks.get("layer_filter", "")
loras_list = []
all_trigger_words = []
active_loras = []
prev_lora = kwargs.get('prev_lora', None)
if prev_lora is not None:
loras_list.extend(prev_lora)
if not merge_lora:
low_mem_load = False
parts = text_to_process.split('<lora:')
for part in parts[1:]:
end_index = part.find('>')
if end_index == -1:
continue
content = part[:end_index]
lora_parts = content.split(':')
lora_name_raw = ""
model_strength = 1.0
clip_strength = 1.0
if len(lora_parts) == 2:
lora_name_raw = lora_parts[0].strip()
try:
model_strength = float(lora_parts[1])
clip_strength = model_strength
except (ValueError, IndexError):
logger.warning(f"Invalid strength for LoRA '{lora_name_raw}'. Skipping.")
continue
elif len(lora_parts) >= 3:
lora_name_raw = lora_parts[0].strip()
try:
model_strength = float(lora_parts[1])
clip_strength = float(lora_parts[2])
except (ValueError, IndexError):
logger.warning(f"Invalid strengths for LoRA '{lora_name_raw}'. Skipping.")
continue
else:
continue
lora_path, trigger_words = get_lora_info_absolute(lora_name_raw)
lora_item = {
"path": lora_path,
"strength": model_strength,
"name": os.path.splitext(_relpath_within_loras(lora_path))[0],
"blocks": selected_blocks,
"layer_filter": layer_filter,
"low_mem_load": low_mem_load,
"merge_loras": merge_lora,
}
loras_list.append(lora_item)
active_loras.append((lora_name_raw, model_strength, clip_strength))
all_trigger_words.extend(trigger_words)
trigger_words_text = ",, ".join(all_trigger_words) if all_trigger_words else ""
formatted_loras = []
for name, model_strength, clip_strength in active_loras:
if abs(model_strength - clip_strength) > 0.001:
formatted_loras.append(f"<lora:{name}:{str(model_strength).strip()}:{str(clip_strength).strip()}>")
else:
formatted_loras.append(f"<lora:{name}:{str(model_strength).strip()}>")
active_loras_text = " ".join(formatted_loras)
return (loras_list, trigger_words_text, active_loras_text)