refactor(nodes): standardize node class names with LM suffix

Rename all node classes to use consistent 'LM' suffix pattern:
- LoraCyclerNode → LoraCyclerLM
- LoraManagerLoader → LoraLoaderLM
- LoraManagerTextLoader → LoraTextLoaderLM
- LoraStacker → LoraStackerLM
- LoraRandomizerNode → LoraRandomizerLM
- LoraPoolNode → LoraPoolLM
- WanVideoLoraSelectFromText → WanVideoLoraTextSelectLM
- DebugMetadata → DebugMetadataLM
- TriggerWordToggle → TriggerWordToggleLM
- PromptLoraManager → PromptLM

Updated:
- Core node class definitions (9 files)
- NODE_CLASS_MAPPINGS in __init__.py
- Node type mappings in node_extractors.py
- All related test imports and references
- Logger prefixes for consistency

Frontend extension names remain unchanged (LoraManager.LoraStacker, etc.)
This commit is contained in:
Will Miao
2026-01-25 10:38:10 +08:00
parent b71b3f99dc
commit 41101ad5c6
14 changed files with 74 additions and 74 deletions

View File

@@ -1,16 +1,16 @@
try: # pragma: no cover - import fallback for pytest collection
from .py.lora_manager import LoraManager
from .py.nodes.lora_loader import LoraManagerLoader, LoraManagerTextLoader
from .py.nodes.trigger_word_toggle import TriggerWordToggle
from .py.nodes.prompt import PromptLoraManager
from .py.nodes.lora_stacker import LoraStacker
from .py.nodes.lora_loader import LoraLoaderLM, LoraTextLoaderLM
from .py.nodes.trigger_word_toggle import TriggerWordToggleLM
from .py.nodes.prompt import PromptLM
from .py.nodes.lora_stacker import LoraStackerLM
from .py.nodes.save_image import SaveImageLM
from .py.nodes.debug_metadata import DebugMetadata
from .py.nodes.debug_metadata import DebugMetadataLM
from .py.nodes.wanvideo_lora_select import WanVideoLoraSelectLM
from .py.nodes.wanvideo_lora_select_from_text import WanVideoLoraSelectFromText
from .py.nodes.lora_pool import LoraPoolNode
from .py.nodes.lora_randomizer import LoraRandomizerNode
from .py.nodes.lora_cycler import LoraCyclerNode
from .py.nodes.wanvideo_lora_select_from_text import WanVideoLoraTextSelectLM
from .py.nodes.lora_pool import LoraPoolLM
from .py.nodes.lora_randomizer import LoraRandomizerLM
from .py.nodes.lora_cycler import LoraCyclerLM
from .py.metadata_collector import init as init_metadata_collector
except (
ImportError
@@ -23,48 +23,48 @@ except (
if str(package_root) not in sys.path:
sys.path.append(str(package_root))
PromptLoraManager = importlib.import_module("py.nodes.prompt").PromptLoraManager
PromptLM = importlib.import_module("py.nodes.prompt").PromptLM
LoraManager = importlib.import_module("py.lora_manager").LoraManager
LoraManagerLoader = importlib.import_module(
LoraLoaderLM = importlib.import_module(
"py.nodes.lora_loader"
).LoraManagerLoader
LoraManagerTextLoader = importlib.import_module(
).LoraLoaderLM
LoraTextLoaderLM = importlib.import_module(
"py.nodes.lora_loader"
).LoraManagerTextLoader
TriggerWordToggle = importlib.import_module(
).LoraTextLoaderLM
TriggerWordToggleLM = importlib.import_module(
"py.nodes.trigger_word_toggle"
).TriggerWordToggle
LoraStacker = importlib.import_module("py.nodes.lora_stacker").LoraStacker
).TriggerWordToggleLM
LoraStackerLM = importlib.import_module("py.nodes.lora_stacker").LoraStackerLM
SaveImageLM = importlib.import_module("py.nodes.save_image").SaveImageLM
DebugMetadata = importlib.import_module("py.nodes.debug_metadata").DebugMetadata
DebugMetadataLM = importlib.import_module("py.nodes.debug_metadata").DebugMetadataLM
WanVideoLoraSelectLM = importlib.import_module(
"py.nodes.wanvideo_lora_select"
).WanVideoLoraSelectLM
WanVideoLoraSelectFromText = importlib.import_module(
WanVideoLoraTextSelectLM = importlib.import_module(
"py.nodes.wanvideo_lora_select_from_text"
).WanVideoLoraSelectFromText
LoraPoolNode = importlib.import_module("py.nodes.lora_pool").LoraPoolNode
LoraRandomizerNode = importlib.import_module(
).WanVideoLoraTextSelectLM
LoraPoolLM = importlib.import_module("py.nodes.lora_pool").LoraPoolLM
LoraRandomizerLM = importlib.import_module(
"py.nodes.lora_randomizer"
).LoraRandomizerNode
LoraCyclerNode = importlib.import_module(
).LoraRandomizerLM
LoraCyclerLM = importlib.import_module(
"py.nodes.lora_cycler"
).LoraCyclerNode
).LoraCyclerLM
init_metadata_collector = importlib.import_module("py.metadata_collector").init
NODE_CLASS_MAPPINGS = {
PromptLoraManager.NAME: PromptLoraManager,
LoraManagerLoader.NAME: LoraManagerLoader,
LoraManagerTextLoader.NAME: LoraManagerTextLoader,
TriggerWordToggle.NAME: TriggerWordToggle,
LoraStacker.NAME: LoraStacker,
PromptLM.NAME: PromptLM,
LoraLoaderLM.NAME: LoraLoaderLM,
LoraTextLoaderLM.NAME: LoraTextLoaderLM,
TriggerWordToggleLM.NAME: TriggerWordToggleLM,
LoraStackerLM.NAME: LoraStackerLM,
SaveImageLM.NAME: SaveImageLM,
DebugMetadata.NAME: DebugMetadata,
DebugMetadataLM.NAME: DebugMetadataLM,
WanVideoLoraSelectLM.NAME: WanVideoLoraSelectLM,
WanVideoLoraSelectFromText.NAME: WanVideoLoraSelectFromText,
LoraPoolNode.NAME: LoraPoolNode,
LoraRandomizerNode.NAME: LoraRandomizerNode,
LoraCyclerNode.NAME: LoraCyclerNode,
WanVideoLoraTextSelectLM.NAME: WanVideoLoraTextSelectLM,
LoraPoolLM.NAME: LoraPoolLM,
LoraRandomizerLM.NAME: LoraRandomizerLM,
LoraCyclerLM.NAME: LoraCyclerLM,
}
WEB_DIRECTORY = "./web/comfyui"

View File

@@ -714,10 +714,10 @@ NODE_EXTRACTORS = {
"UNETLoader": UNETLoaderExtractor, # Updated to use dedicated extractor
"UnetLoaderGGUF": UNETLoaderExtractor, # Updated to use dedicated extractor
"LoraLoader": LoraLoaderExtractor,
"LoraManagerLoader": LoraLoaderManagerExtractor,
"LoraLoaderLM": LoraLoaderManagerExtractor,
# Conditioning
"CLIPTextEncode": CLIPTextEncodeExtractor,
"PromptLoraManager": CLIPTextEncodeExtractor,
"PromptLM": CLIPTextEncodeExtractor,
"CLIPTextEncodeFlux": CLIPTextEncodeFluxExtractor, # Add CLIPTextEncodeFlux
"WAS_Text_to_Conditioning": CLIPTextEncodeExtractor,
"AdvancedCLIPTextEncode": CLIPTextEncodeExtractor, # From https://github.com/BlenderNeko/ComfyUI_ADV_CLIP_emb

View File

@@ -4,7 +4,7 @@ from ..metadata_collector.metadata_processor import MetadataProcessor
logger = logging.getLogger(__name__)
class DebugMetadata:
class DebugMetadataLM:
NAME = "Debug Metadata (LoraManager)"
CATEGORY = "Lora Manager/utils"
DESCRIPTION = "Debug node to verify metadata_processor functionality"

View File

@@ -13,7 +13,7 @@ from ..utils.utils import get_lora_info
logger = logging.getLogger(__name__)
class LoraCyclerNode:
class LoraCyclerLM:
"""Node that sequentially cycles through LoRAs from a pool"""
NAME = "Lora Cycler (LoraManager)"
@@ -72,7 +72,7 @@ class LoraCyclerNode:
total_count = len(lora_list)
if total_count == 0:
logger.warning("[LoraCyclerNode] No LoRAs available in pool")
logger.warning("[LoraCyclerLM] No LoRAs available in pool")
return {
"result": ([],),
"ui": {
@@ -103,7 +103,7 @@ class LoraCyclerNode:
lora_path, _ = get_lora_info(current_lora["file_name"])
if not lora_path:
logger.warning(
f"[LoraCyclerNode] Could not find path for LoRA: {current_lora['file_name']}"
f"[LoraCyclerLM] Could not find path for LoRA: {current_lora['file_name']}"
)
lora_stack = []
else:

View File

@@ -6,7 +6,7 @@ from .utils import FlexibleOptionalInputType, any_type, extract_lora_name, get_l
logger = logging.getLogger(__name__)
class LoraManagerLoader:
class LoraLoaderLM:
NAME = "Lora Loader (LoraManager)"
CATEGORY = "Lora Manager/loaders"
@@ -128,7 +128,7 @@ class LoraManagerLoader:
return (model, clip, trigger_words_text, formatted_loras_text)
class LoraManagerTextLoader:
class LoraTextLoaderLM:
NAME = "LoRA Text Loader (LoraManager)"
CATEGORY = "Lora Manager/loaders"

View File

@@ -10,7 +10,7 @@ import logging
logger = logging.getLogger(__name__)
class LoraPoolNode:
class LoraPoolLM:
"""
A node that defines LoRA filter criteria through a Vue-based widget.
@@ -67,7 +67,7 @@ class LoraPoolNode:
filters = pool_config.get("filters", self._default_config()["filters"])
# Log for debugging
logger.debug(f"[LoraPoolNode] Processing filters: {filters}")
logger.debug(f"[LoraPoolLM] Processing filters: {filters}")
return (filters,)

View File

@@ -15,7 +15,7 @@ from .utils import extract_lora_name
logger = logging.getLogger(__name__)
class LoraRandomizerNode:
class LoraRandomizerLM:
"""Node that randomly selects LoRAs from a pool"""
NAME = "Lora Randomizer (LoraManager)"
@@ -72,7 +72,7 @@ class LoraRandomizerNode:
loras = self._preprocess_loras_input(loras)
roll_mode = randomizer_config.get("roll_mode", "always")
logger.debug(f"[LoraRandomizerNode] roll_mode: {roll_mode}")
logger.debug(f"[LoraRandomizerLM] roll_mode: {roll_mode}")
# Dual seed mechanism for batch queue synchronization
# execution_seed: seed for generating execution_stack (= previous next_seed)
@@ -127,7 +127,7 @@ class LoraRandomizerNode:
lora_path, trigger_words = get_lora_info(lora["name"])
if not lora_path:
logger.warning(
f"[LoraRandomizerNode] Could not find path for LoRA: {lora['name']}"
f"[LoraRandomizerLM] Could not find path for LoRA: {lora['name']}"
)
continue

View File

@@ -6,7 +6,7 @@ import logging
logger = logging.getLogger(__name__)
class LoraStacker:
class LoraStackerLM:
NAME = "Lora Stacker (LoraManager)"
CATEGORY = "Lora Manager/stackers"

View File

@@ -1,6 +1,6 @@
from typing import Any, Optional
class PromptLoraManager:
class PromptLM:
"""Encodes text (and optional trigger words) into CLIP conditioning."""
NAME = "Prompt (LoraManager)"

View File

@@ -6,7 +6,7 @@ import logging
logger = logging.getLogger(__name__)
class TriggerWordToggle:
class TriggerWordToggleLM:
NAME = "TriggerWord Toggle (LoraManager)"
CATEGORY = "Lora Manager/utils"
DESCRIPTION = "Toggle trigger words on/off"

View File

@@ -7,7 +7,7 @@ import logging
logger = logging.getLogger(__name__)
# 定义新节点的类
class WanVideoLoraSelectFromText:
class WanVideoLoraTextSelectLM:
# 节点在UI中显示的名称
NAME = "WanVideo Lora Select From Text (LoraManager)"
# 节点所属的分类
@@ -117,9 +117,9 @@ class WanVideoLoraSelectFromText:
return (loras_list, trigger_words_text, active_loras_text)
NODE_CLASS_MAPPINGS = {
"WanVideoLoraSelectFromText": WanVideoLoraSelectFromText
"WanVideoLoraTextSelectLM": WanVideoLoraTextSelectLM
}
NODE_DISPLAY_NAME_MAPPINGS = {
"WanVideoLoraSelectFromText": "WanVideo Lora Select From Text (LoraManager)"
"WanVideoLoraTextSelectLM": "WanVideo Lora Select From Text (LoraManager)"
}

View File

@@ -129,22 +129,22 @@ def test_metadata_registry_caches_and_rehydrates(populated_registry):
def test_lora_manager_cache_updates_when_loras_removed(metadata_registry):
import nodes
class LoraManagerLoader: # type: ignore[too-many-ancestors]
__name__ = "LoraManagerLoader"
class LoraLoaderLM: # type: ignore[too-many-ancestors]
__name__ = "LoraLoaderLM"
nodes.NODE_CLASS_MAPPINGS["LoraManagerLoader"] = LoraManagerLoader
nodes.NODE_CLASS_MAPPINGS["LoraLoaderLM"] = LoraLoaderLM
prompt_graph = {
"lora_node": {"class_type": "LoraManagerLoader", "inputs": {}},
"lora_node": {"class_type": "LoraLoaderLM", "inputs": {}},
}
prompt = SimpleNamespace(original_prompt=prompt_graph)
cache_key = "lora_node:LoraManagerLoader"
cache_key = "lora_node:LoraLoaderLM"
metadata_registry.start_collection("prompt1")
metadata_registry.set_current_prompt(prompt)
metadata_registry.record_node_execution(
"lora_node",
"LoraManagerLoader",
"LoraLoaderLM",
{"loras": [[{"name": "foo", "strength": 0.8, "active": True}]]},
None,
)
@@ -152,7 +152,7 @@ def test_lora_manager_cache_updates_when_loras_removed(metadata_registry):
metadata_registry.start_collection("prompt2")
metadata_registry.set_current_prompt(prompt)
metadata_registry.record_node_execution("lora_node", "LoraManagerLoader", {"loras": [[]]}, None)
metadata_registry.record_node_execution("lora_node", "LoraLoaderLM", {"loras": [[]]}, None)
assert cache_key not in metadata_registry.node_cache

View File

@@ -1,17 +1,17 @@
"""Tests for LoraRandomizerNode roll_mode functionality"""
"""Tests for LoraRandomizerLM roll_mode functionality"""
from unittest.mock import AsyncMock
import pytest
from py.nodes.lora_randomizer import LoraRandomizerNode
from py.nodes.lora_randomizer import LoraRandomizerLM
from py.services import service_registry
@pytest.fixture
def randomizer_node():
"""Create a LoraRandomizerNode instance for testing"""
return LoraRandomizerNode()
"""Create a LoraRandomizerLM instance for testing"""
return LoraRandomizerLM()
@pytest.fixture

View File

@@ -1,8 +1,8 @@
from py.nodes.trigger_word_toggle import TriggerWordToggle
from py.nodes.trigger_word_toggle import TriggerWordToggleLM
def test_group_mode_preserves_parenthesized_groups():
node = TriggerWordToggle()
node = TriggerWordToggleLM()
trigger_data = [
{
"text": "flat color, dark theme",
@@ -42,7 +42,7 @@ def test_group_mode_preserves_parenthesized_groups():
def test_duplicate_words_keep_individual_active_states():
node = TriggerWordToggle()
node = TriggerWordToggleLM()
trigger_data = [
{"text": "A", "active": True, "strength": None, "highlighted": False},
{"text": "A", "active": False, "strength": None, "highlighted": False},
@@ -61,7 +61,7 @@ def test_duplicate_words_keep_individual_active_states():
def test_duplicate_words_preserve_strength_per_instance():
node = TriggerWordToggle()
node = TriggerWordToggleLM()
trigger_data = [
{"text": "(A:0.50)", "active": False, "strength": 0.50, "highlighted": False},
{"text": "A", "active": True, "strength": 1.2, "highlighted": False},
@@ -81,7 +81,7 @@ def test_duplicate_words_preserve_strength_per_instance():
def test_duplicate_groups_respect_active_state():
node = TriggerWordToggle()
node = TriggerWordToggleLM()
trigger_data = [
{"text": "A, B", "active": False, "strength": None, "highlighted": False},
{"text": "A, B", "active": True, "strength": None, "highlighted": False},
@@ -100,7 +100,7 @@ def test_duplicate_groups_respect_active_state():
def test_trigger_words_override_different_from_original():
node = TriggerWordToggle()
node = TriggerWordToggleLM()
trigger_data = [
{
"text": "69yottea_style_illu",
@@ -127,7 +127,7 @@ def test_trigger_words_override_different_from_original():
def test_trigger_words_override_with_new_format():
node = TriggerWordToggle()
node = TriggerWordToggleLM()
(filtered,) = node.process_trigger_words(
id="node",
@@ -142,7 +142,7 @@ def test_trigger_words_override_with_new_format():
def test_trigger_words_same_as_original_processes_toggle():
node = TriggerWordToggle()
node = TriggerWordToggleLM()
trigger_data = [
{"text": "word1", "active": True, "strength": None, "highlighted": False},
{"text": "word2", "active": False, "strength": None, "highlighted": False},
@@ -162,7 +162,7 @@ def test_trigger_words_same_as_original_processes_toggle():
def test_trigger_words_override_empty_toggle_data():
node = TriggerWordToggle()
node = TriggerWordToggleLM()
(filtered,) = node.process_trigger_words(
id="node",