feat: add lora stack combiner node

This commit is contained in:
Will Miao
2026-03-29 08:28:00 +08:00
parent 12bbb0572d
commit 2626dbab8e
3 changed files with 82 additions and 0 deletions

View File

@@ -7,6 +7,7 @@ try: # pragma: no cover - import fallback for pytest collection
from .py.nodes.prompt import PromptLM from .py.nodes.prompt import PromptLM
from .py.nodes.text import TextLM from .py.nodes.text import TextLM
from .py.nodes.lora_stacker import LoraStackerLM from .py.nodes.lora_stacker import LoraStackerLM
from .py.nodes.lora_stack_combiner import LoraStackCombinerLM
from .py.nodes.save_image import SaveImageLM from .py.nodes.save_image import SaveImageLM
from .py.nodes.debug_metadata import DebugMetadataLM from .py.nodes.debug_metadata import DebugMetadataLM
from .py.nodes.wanvideo_lora_select import WanVideoLoraSelectLM from .py.nodes.wanvideo_lora_select import WanVideoLoraSelectLM
@@ -39,6 +40,9 @@ except (
"py.nodes.trigger_word_toggle" "py.nodes.trigger_word_toggle"
).TriggerWordToggleLM ).TriggerWordToggleLM
LoraStackerLM = importlib.import_module("py.nodes.lora_stacker").LoraStackerLM LoraStackerLM = importlib.import_module("py.nodes.lora_stacker").LoraStackerLM
LoraStackCombinerLM = importlib.import_module(
"py.nodes.lora_stack_combiner"
).LoraStackCombinerLM
SaveImageLM = importlib.import_module("py.nodes.save_image").SaveImageLM SaveImageLM = importlib.import_module("py.nodes.save_image").SaveImageLM
DebugMetadataLM = importlib.import_module("py.nodes.debug_metadata").DebugMetadataLM DebugMetadataLM = importlib.import_module("py.nodes.debug_metadata").DebugMetadataLM
WanVideoLoraSelectLM = importlib.import_module( WanVideoLoraSelectLM = importlib.import_module(
@@ -63,6 +67,7 @@ NODE_CLASS_MAPPINGS = {
UNETLoaderLM.NAME: UNETLoaderLM, UNETLoaderLM.NAME: UNETLoaderLM,
TriggerWordToggleLM.NAME: TriggerWordToggleLM, TriggerWordToggleLM.NAME: TriggerWordToggleLM,
LoraStackerLM.NAME: LoraStackerLM, LoraStackerLM.NAME: LoraStackerLM,
LoraStackCombinerLM.NAME: LoraStackCombinerLM,
SaveImageLM.NAME: SaveImageLM, SaveImageLM.NAME: SaveImageLM,
DebugMetadataLM.NAME: DebugMetadataLM, DebugMetadataLM.NAME: DebugMetadataLM,
WanVideoLoraSelectLM.NAME: WanVideoLoraSelectLM, WanVideoLoraSelectLM.NAME: WanVideoLoraSelectLM,

View File

@@ -0,0 +1,26 @@
class LoraStackCombinerLM:
NAME = "Lora Stack Combiner (LoraManager)"
CATEGORY = "Lora Manager/stackers"
@classmethod
def INPUT_TYPES(cls):
return {
"required": {
"lora_stack_a": ("LORA_STACK",),
"lora_stack_b": ("LORA_STACK",),
},
}
RETURN_TYPES = ("LORA_STACK",)
RETURN_NAMES = ("LORA_STACK",)
FUNCTION = "combine_stacks"
def combine_stacks(self, lora_stack_a, lora_stack_b):
combined_stack = []
if lora_stack_a:
combined_stack.extend(lora_stack_a)
if lora_stack_b:
combined_stack.extend(lora_stack_b)
return (combined_stack,)

View File

@@ -0,0 +1,51 @@
from py.nodes.lora_stack_combiner import LoraStackCombinerLM
def test_combine_stacks_preserves_order():
node = LoraStackCombinerLM()
stack_a = [
("folder/a.safetensors", 0.7, 0.6),
("folder/b.safetensors", 0.8, 0.8),
]
stack_b = [
("folder/c.safetensors", 1.0, 0.9),
]
(combined_stack,) = node.combine_stacks(stack_a, stack_b)
assert combined_stack == stack_a + stack_b
def test_combine_stacks_returns_second_when_first_empty():
node = LoraStackCombinerLM()
stack_b = [("folder/c.safetensors", 1.0, 0.9)]
(combined_stack,) = node.combine_stacks([], stack_b)
assert combined_stack == stack_b
def test_combine_stacks_returns_first_when_second_empty():
node = LoraStackCombinerLM()
stack_a = [("folder/a.safetensors", 0.7, 0.6)]
(combined_stack,) = node.combine_stacks(stack_a, [])
assert combined_stack == stack_a
def test_combine_stacks_returns_empty_when_both_empty():
node = LoraStackCombinerLM()
(combined_stack,) = node.combine_stacks([], [])
assert combined_stack == []
def test_combine_stacks_allows_duplicate_entries():
node = LoraStackCombinerLM()
duplicate_entry = ("folder/shared.safetensors", 0.9, 0.5)
(combined_stack,) = node.combine_stacks([duplicate_entry], [duplicate_entry])
assert combined_stack == [duplicate_entry, duplicate_entry]