Update prompt configuration and enhance Lora management functionality

- Expanded the prompt.json file with new configurations for KSampler, CheckpointLoaderSimple, and various CLIPTextEncode nodes.
- Introduced additional Lora management features, including a new Lora Stacker and improved trigger word handling.
- Enhanced the loras_widget.js to log the generated prompt when saving recipes directly, aiding in debugging and user feedback.
- Improved overall structure and organization of the prompt configurations for better maintainability.
This commit is contained in:
Will Miao
2025-03-21 16:35:52 +08:00
parent d4f300645d
commit 4bff17aa1a
27 changed files with 1990 additions and 1 deletions

View File

@@ -0,0 +1,6 @@
# This module contains processors for different node types in a ComfyUI workflow
from .base_processor import NodeProcessor, NODE_PROCESSORS, register_processor
from . import load_processors
__all__ = ["NodeProcessor", "NODE_PROCESSORS", "register_processor"]

View File

@@ -0,0 +1,77 @@
from abc import ABC, abstractmethod
from typing import Dict, Any, List, Optional, Set, Callable, Type
# Registry to store node processors by class_type
NODE_PROCESSORS: Dict[str, Type['NodeProcessor']] = {}
class NodeProcessor(ABC):
"""Base class for node processors that extract information from workflow nodes"""
# Class-level attributes to define which node type this processor handles
# and which fields should be extracted
NODE_CLASS_TYPE: str = None
REQUIRED_FIELDS: Set[str] = set()
def __init__(self, node_id: str, node_data: Dict[str, Any], workflow: Dict[str, Any]):
"""
Initialize a node processor
Args:
node_id: The ID of the node in the workflow
node_data: The node data from the workflow
workflow: The complete workflow data
"""
self.node_id = node_id
self.node_data = node_data
self.workflow = workflow
self.inputs = node_data.get('inputs', {})
@classmethod
def register(cls):
"""Register this processor in the global registry"""
if cls.NODE_CLASS_TYPE:
NODE_PROCESSORS[cls.NODE_CLASS_TYPE] = cls
@abstractmethod
def process(self, workflow_parser) -> Dict[str, Any]:
"""
Process the node and extract relevant information
Args:
workflow_parser: The workflow parser instance for resolving node references
Returns:
Dict containing extracted information from the node
"""
pass
def resolve_input(self, input_key: str, workflow_parser) -> Any:
"""
Resolve an input value which might be a reference to another node
Args:
input_key: The input key to resolve
workflow_parser: The workflow parser instance
Returns:
The resolved value
"""
input_value = self.inputs.get(input_key)
# If not found, return None
if input_value is None:
return None
# If it's a list with node reference [node_id, slot_index]
if isinstance(input_value, list) and len(input_value) == 2:
ref_node_id, slot_index = input_value
return workflow_parser.process_node(ref_node_id)
# Otherwise return the direct value
return input_value
def register_processor(cls):
"""Decorator to register a node processor class"""
cls.register()
return cls

View File

@@ -0,0 +1,21 @@
from typing import Dict, Any
from .base_processor import NodeProcessor, register_processor
@register_processor
class CLIPSetLastLayerProcessor(NodeProcessor):
"""Processor for CLIPSetLastLayer nodes"""
NODE_CLASS_TYPE = "CLIPSetLastLayer"
REQUIRED_FIELDS = {"stop_at_clip_layer", "clip"}
def process(self, workflow_parser) -> Dict[str, Any]:
"""Process a CLIPSetLastLayer node to extract clip skip value"""
if "stop_at_clip_layer" in self.inputs:
# Convert to positive number for clip_skip
layer = self.inputs["stop_at_clip_layer"]
if isinstance(layer, (int, float)) and layer < 0:
# CLIP skip is reported as a positive number
# but stored as a negative layer index
return {"clip_skip": str(abs(layer))}
return None

View File

@@ -0,0 +1,18 @@
from typing import Dict, Any
from .base_processor import NodeProcessor, register_processor
@register_processor
class CLIPTextEncodeProcessor(NodeProcessor):
"""Processor for CLIPTextEncode nodes"""
NODE_CLASS_TYPE = "CLIPTextEncode"
REQUIRED_FIELDS = {"text", "clip"}
def process(self, workflow_parser) -> Dict[str, Any]:
"""Process a CLIPTextEncode node to extract text prompt"""
if "text" in self.inputs:
# Text might be a direct string or a reference to another node
text_value = self.resolve_input("text", workflow_parser)
return text_value
return None

View File

@@ -0,0 +1,21 @@
from typing import Dict, Any
from .base_processor import NodeProcessor, register_processor
@register_processor
class EmptyLatentImageProcessor(NodeProcessor):
"""Processor for EmptyLatentImage nodes"""
NODE_CLASS_TYPE = "EmptyLatentImage"
REQUIRED_FIELDS = {"width", "height", "batch_size"}
def process(self, workflow_parser) -> Dict[str, Any]:
"""Process an EmptyLatentImage node to extract image dimensions"""
result = {}
if "width" in self.inputs and "height" in self.inputs:
width = self.inputs["width"]
height = self.inputs["height"]
result["width"] = width
result["height"] = height
return result

View File

@@ -0,0 +1,27 @@
from typing import Dict, Any
from .base_processor import NodeProcessor, register_processor
@register_processor
class JoinStringsProcessor(NodeProcessor):
"""Processor for JoinStrings nodes"""
NODE_CLASS_TYPE = "JoinStrings"
REQUIRED_FIELDS = {"string1", "string2", "delimiter"}
def process(self, workflow_parser) -> Dict[str, Any]:
"""Process a JoinStrings node to combine strings"""
string1 = self.resolve_input("string1", workflow_parser)
string2 = self.resolve_input("string2", workflow_parser)
delimiter = self.inputs.get("delimiter", ", ")
if string1 is None and string2 is None:
return None
if string1 is None:
return string2
if string2 is None:
return string1
# Join the strings with the delimiter
return f"{string1}{delimiter}{string2}"

View File

@@ -0,0 +1,46 @@
from typing import Dict, Any, Set
from .base_processor import NodeProcessor, register_processor
@register_processor
class KSamplerProcessor(NodeProcessor):
"""Processor for KSampler nodes"""
NODE_CLASS_TYPE = "KSampler"
REQUIRED_FIELDS = {"seed", "steps", "cfg", "sampler_name", "scheduler", "denoise",
"positive", "negative", "latent_image"}
def process(self, workflow_parser) -> Dict[str, Any]:
"""Process a KSampler node to extract generation parameters"""
result = {}
# Directly extract numeric parameters
if "seed" in self.inputs:
result["seed"] = str(self.inputs["seed"])
if "steps" in self.inputs:
result["steps"] = str(self.inputs["steps"])
if "cfg" in self.inputs:
result["cfg_scale"] = str(self.inputs["cfg"])
if "sampler_name" in self.inputs:
result["sampler"] = self.inputs["sampler_name"]
# Resolve referenced inputs
if "positive" in self.inputs:
positive_text = self.resolve_input("positive", workflow_parser)
if positive_text:
result["prompt"] = positive_text
if "negative" in self.inputs:
negative_text = self.resolve_input("negative", workflow_parser)
if negative_text:
result["negative_prompt"] = negative_text
# Resolve latent image for size
if "latent_image" in self.inputs:
latent_info = self.resolve_input("latent_image", workflow_parser)
if latent_info and "width" in latent_info and "height" in latent_info:
result["size"] = f"{latent_info['width']}x{latent_info['height']}"
return result

View File

@@ -0,0 +1,15 @@
"""Module to load all node processors"""
# Import all processor types to register them
from .ksampler_processor import KSamplerProcessor
from .clip_text_encode_processor import CLIPTextEncodeProcessor
from .empty_latent_image_processor import EmptyLatentImageProcessor
from .join_strings_processor import JoinStringsProcessor
from .string_constant_processor import StringConstantProcessor
from .clip_set_last_layer_processor import CLIPSetLastLayerProcessor
from .trigger_word_toggle_processor import TriggerWordToggleProcessor
from .lora_loader_processor import LoraLoaderProcessor
from .lora_stacker_processor import LoraStackerProcessor
# Update the node_processors/__init__.py to include this import
# This ensures all processors are registered when the package is imported

View File

@@ -0,0 +1,50 @@
from typing import Dict, Any, List
from .base_processor import NodeProcessor, register_processor
@register_processor
class LoraLoaderProcessor(NodeProcessor):
"""Processor for Lora Loader (LoraManager) nodes"""
NODE_CLASS_TYPE = "Lora Loader (LoraManager)"
REQUIRED_FIELDS = {"loras", "text", "lora_stack"}
def process(self, workflow_parser) -> Dict[str, Any]:
"""Process a Lora Loader node to extract lora text and stack"""
result = {}
# Get the direct lora text
if "text" in self.inputs:
lora_text = self.inputs.get("text", "")
result["lora_text"] = lora_text
# Process the loras array
if "loras" in self.inputs:
loras = self.inputs["loras"]
active_loras = []
if isinstance(loras, list):
for lora in loras:
if (isinstance(lora, dict) and
lora.get("active", False) and
not lora.get("_isDummy", False) and
"name" in lora and "strength" in lora):
active_loras.append(f"<lora:{lora['name']}:{lora['strength']}>")
if active_loras:
result["active_loras"] = " ".join(active_loras)
# Process the lora stack from a referenced node
if "lora_stack" in self.inputs:
stack_result = self.resolve_input("lora_stack", workflow_parser)
if isinstance(stack_result, dict) and "lora_stack" in stack_result:
# If we got a stack from another node, add it to our result
if "active_loras" in result:
result["active_loras"] = f"{stack_result['lora_stack']} {result['active_loras']}"
else:
result["active_loras"] = stack_result["lora_stack"]
# Combine all loras into one stack
if "active_loras" in result:
result["lora_stack"] = result["active_loras"]
return result

View File

@@ -0,0 +1,52 @@
from typing import Dict, Any, List
from .base_processor import NodeProcessor, register_processor
@register_processor
class LoraStackerProcessor(NodeProcessor):
"""Processor for Lora Stacker (LoraManager) nodes"""
NODE_CLASS_TYPE = "Lora Stacker (LoraManager)"
REQUIRED_FIELDS = {"loras", "text", "lora_stack"}
def process(self, workflow_parser) -> Dict[str, Any]:
"""Process a Lora Stacker node to extract lora stack"""
result = {}
# Get the direct lora text
if "text" in self.inputs:
lora_text = self.inputs.get("text", "")
result["lora_text"] = lora_text
# Process the loras array
if "loras" in self.inputs:
loras = self.inputs["loras"]
active_loras = []
if isinstance(loras, list):
for lora in loras:
if (isinstance(lora, dict) and
lora.get("active", False) and
not lora.get("_isDummy", False) and
"name" in lora and "strength" in lora):
active_loras.append(f"<lora:{lora['name']}:{lora['strength']}>")
if active_loras:
result["active_loras"] = " ".join(active_loras)
# Process the lora stack from a referenced node
if "lora_stack" in self.inputs:
stack_result = self.resolve_input("lora_stack", workflow_parser)
if isinstance(stack_result, dict) and "lora_stack" in stack_result:
# If we got a stack from another node, add it to our result
if "active_loras" in result:
result["lora_stack"] = f"{result['active_loras']} {stack_result['lora_stack']}"
else:
result["lora_stack"] = stack_result["lora_stack"]
elif "active_loras" in result:
# If there was no stack from the referenced node but we have active loras
result["lora_stack"] = result["active_loras"]
elif "active_loras" in result:
# If there's no lora_stack input but we have active loras
result["lora_stack"] = result["active_loras"]
return result

View File

@@ -0,0 +1,22 @@
from typing import Dict, Any
from .base_processor import NodeProcessor, register_processor
@register_processor
class StringConstantProcessor(NodeProcessor):
"""Processor for StringConstantMultiline nodes"""
NODE_CLASS_TYPE = "StringConstantMultiline"
REQUIRED_FIELDS = {"string", "strip_newlines"}
def process(self, workflow_parser) -> Dict[str, Any]:
"""Process a StringConstantMultiline node to extract the string content"""
if "string" in self.inputs:
string_value = self.inputs["string"]
strip_newlines = self.inputs.get("strip_newlines", False)
if strip_newlines and isinstance(string_value, str):
string_value = string_value.replace("\n", " ")
return string_value
return None

View File

@@ -0,0 +1,33 @@
from typing import Dict, Any, List
from .base_processor import NodeProcessor, register_processor
@register_processor
class TriggerWordToggleProcessor(NodeProcessor):
"""Processor for TriggerWord Toggle (LoraManager) nodes"""
NODE_CLASS_TYPE = "TriggerWord Toggle (LoraManager)"
REQUIRED_FIELDS = {"toggle_trigger_words", "group_mode"}
def process(self, workflow_parser) -> Dict[str, Any]:
"""Process a TriggerWord Toggle node to extract active trigger words"""
if "toggle_trigger_words" not in self.inputs:
return None
toggle_words = self.inputs["toggle_trigger_words"]
if not isinstance(toggle_words, list):
return None
# Filter active trigger words that aren't dummy items
active_words = []
for word_entry in toggle_words:
if (isinstance(word_entry, dict) and
word_entry.get("active", False) and
not word_entry.get("_isDummy", False) and
"text" in word_entry):
active_words.append(word_entry["text"])
if not active_words:
return None
# Join all active trigger words with a comma
return ", ".join(active_words)