mirror of
https://github.com/willmiao/ComfyUI-Lora-Manager.git
synced 2026-03-21 13:12:12 -03:00
feat: add dynamic trigger_words inputs to PromptLM node
- Backend: Add _AllContainer for dynamic input validation bypass - Backend: Modify INPUT_TYPES to support trigger_words1, trigger_words2, etc. - Backend: Update encode() to collect all trigger_words* from kwargs - Frontend: Create prompt_dynamic_inputs.js extension - Frontend: Implement onConnectionsChange to auto-add/remove input slots - Frontend: Renumber inputs sequentially on connect/disconnect Based on Impact Pack's Switch (Any) node dynamic input pattern.
This commit is contained in:
@@ -1,4 +1,16 @@
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
import inspect
|
||||
|
||||
|
||||
class _AllContainer:
|
||||
"""Container that accepts any key for dynamic input validation."""
|
||||
|
||||
def __contains__(self, item):
|
||||
return True
|
||||
|
||||
def __getitem__(self, key):
|
||||
return ("STRING", {"forceInput": True})
|
||||
|
||||
|
||||
class PromptLM:
|
||||
"""Encodes text (and optional trigger words) into CLIP conditioning."""
|
||||
@@ -7,11 +19,27 @@ class PromptLM:
|
||||
CATEGORY = "Lora Manager/conditioning"
|
||||
DESCRIPTION = (
|
||||
"Encodes a text prompt using a CLIP model into an embedding that can be used "
|
||||
"to guide the diffusion model towards generating specific images."
|
||||
"to guide the diffusion model towards generating specific images. "
|
||||
"Supports dynamic trigger words inputs."
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def INPUT_TYPES(cls):
|
||||
dyn_inputs = {
|
||||
"trigger_words1": (
|
||||
"STRING",
|
||||
{
|
||||
"forceInput": True,
|
||||
"tooltip": "Trigger words to prepend. Connect to add more inputs.",
|
||||
},
|
||||
),
|
||||
}
|
||||
|
||||
# Bypass validation for dynamic inputs during graph execution
|
||||
stack = inspect.stack()
|
||||
if len(stack) > 2 and stack[2].function == "get_input_info":
|
||||
dyn_inputs = _AllContainer()
|
||||
|
||||
return {
|
||||
"required": {
|
||||
"text": (
|
||||
@@ -23,36 +51,34 @@ class PromptLM:
|
||||
},
|
||||
),
|
||||
"clip": (
|
||||
'CLIP',
|
||||
"CLIP",
|
||||
{"tooltip": "The CLIP model used for encoding the text."},
|
||||
),
|
||||
},
|
||||
"optional": {
|
||||
"trigger_words": (
|
||||
'STRING',
|
||||
{
|
||||
"forceInput": True,
|
||||
"tooltip": (
|
||||
"Optional trigger words to prepend to the text before "
|
||||
"encoding."
|
||||
)
|
||||
},
|
||||
)
|
||||
},
|
||||
"optional": dyn_inputs,
|
||||
}
|
||||
|
||||
RETURN_TYPES = ('CONDITIONING', 'STRING',)
|
||||
RETURN_NAMES = ('CONDITIONING', 'PROMPT',)
|
||||
RETURN_TYPES = ("CONDITIONING", "STRING")
|
||||
RETURN_NAMES = ("CONDITIONING", "PROMPT")
|
||||
OUTPUT_TOOLTIPS = (
|
||||
"A conditioning containing the embedded text used to guide the diffusion model.",
|
||||
)
|
||||
FUNCTION = "encode"
|
||||
|
||||
def encode(self, text: str, clip: Any, trigger_words: Optional[str] = None):
|
||||
prompt = text
|
||||
def encode(self, text: str, clip: Any, **kwargs):
|
||||
# Collect all trigger words from dynamic inputs
|
||||
trigger_words = []
|
||||
for key, value in kwargs.items():
|
||||
if key.startswith("trigger_words") and value:
|
||||
trigger_words.append(value)
|
||||
|
||||
# Build final prompt
|
||||
if trigger_words:
|
||||
prompt = ", ".join([trigger_words, text])
|
||||
prompt = ", ".join(trigger_words + [text])
|
||||
else:
|
||||
prompt = text
|
||||
|
||||
from nodes import CLIPTextEncode # type: ignore
|
||||
|
||||
conditioning = CLIPTextEncode().encode(clip, prompt)[0]
|
||||
return (conditioning, prompt,)
|
||||
return (conditioning, prompt)
|
||||
112
web/comfyui/prompt_dynamic_inputs.js
Normal file
112
web/comfyui/prompt_dynamic_inputs.js
Normal file
@@ -0,0 +1,112 @@
|
||||
import { app } from "../../scripts/app.js";
|
||||
|
||||
/**
|
||||
* Extension for PromptLM node to support dynamic trigger_words inputs.
|
||||
* Based on the dynamic input pattern from Impact Pack's Switch (Any) node.
|
||||
*/
|
||||
app.registerExtension({
|
||||
name: "Comfy.LoraManager.PromptLM",
|
||||
|
||||
async beforeRegisterNodeDef(nodeType, nodeData, app) {
|
||||
if (nodeData.name !== "Prompt (LoraManager)") {
|
||||
return;
|
||||
}
|
||||
|
||||
const onConnectionsChange = nodeType.prototype.onConnectionsChange;
|
||||
|
||||
nodeType.prototype.onConnectionsChange = function(type, index, connected, link_info) {
|
||||
const stackTrace = new Error().stack;
|
||||
|
||||
// Skip during graph loading/pasting to avoid interference
|
||||
if (stackTrace.includes('loadGraphData') || stackTrace.includes('pasteFromClipboard')) {
|
||||
return onConnectionsChange?.apply?.(this, arguments);
|
||||
}
|
||||
|
||||
// Skip subgraph operations
|
||||
if (stackTrace.includes('convertToSubgraph') || stackTrace.includes('Subgraph.configure')) {
|
||||
return onConnectionsChange?.apply?.(this, arguments);
|
||||
}
|
||||
|
||||
if (!link_info) {
|
||||
return onConnectionsChange?.apply?.(this, arguments);
|
||||
}
|
||||
|
||||
// Handle input connections (type === 1)
|
||||
if (type === 1) {
|
||||
const input = this.inputs[index];
|
||||
|
||||
// Only process trigger_words inputs
|
||||
if (!input || !input.name.startsWith('trigger_words')) {
|
||||
return onConnectionsChange?.apply?.(this, arguments);
|
||||
}
|
||||
|
||||
// Count existing trigger_words inputs
|
||||
let triggerWordCount = 0;
|
||||
for (const inp of this.inputs) {
|
||||
if (inp.name.startsWith('trigger_words')) {
|
||||
triggerWordCount++;
|
||||
}
|
||||
}
|
||||
|
||||
// Renumber all trigger_words inputs sequentially
|
||||
let slotIndex = 1;
|
||||
for (const inp of this.inputs) {
|
||||
if (inp.name.startsWith('trigger_words')) {
|
||||
inp.name = `trigger_words${slotIndex}`;
|
||||
slotIndex++;
|
||||
}
|
||||
}
|
||||
|
||||
// Add new input slot if connected and this was the last one
|
||||
if (connected) {
|
||||
const lastTriggerIndex = triggerWordCount;
|
||||
if (index === lastTriggerIndex || index === this.inputs.findIndex(i => i.name === `trigger_words${lastTriggerIndex}`)) {
|
||||
this.addInput(`trigger_words${slotIndex}`, "STRING", {
|
||||
forceInput: true,
|
||||
tooltip: "Trigger words to prepend. Connect to add more inputs."
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Remove disconnected empty input slots (but keep at least one)
|
||||
if (!connected && triggerWordCount > 1) {
|
||||
// Check if this input is now empty and can be removed
|
||||
const disconnectedInput = this.inputs[index];
|
||||
if (disconnectedInput && disconnectedInput.name.startsWith('trigger_words')) {
|
||||
// Only remove if it has no link and is not the last trigger_words input
|
||||
const isLastTriggerSlot = index === this.inputs.findLastIndex(i => i.name.startsWith('trigger_words'));
|
||||
if (!isLastTriggerSlot && !disconnectedInput.link) {
|
||||
this.removeInput(index);
|
||||
|
||||
// Renumber again after removal
|
||||
let newSlotIndex = 1;
|
||||
for (const inp of this.inputs) {
|
||||
if (inp.name.startsWith('trigger_words')) {
|
||||
inp.name = `trigger_words${newSlotIndex}`;
|
||||
newSlotIndex++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return onConnectionsChange?.apply?.(this, arguments);
|
||||
};
|
||||
},
|
||||
|
||||
nodeCreated(node, app) {
|
||||
if (node.comfyClass !== "Prompt (LoraManager)") {
|
||||
return;
|
||||
}
|
||||
|
||||
// Ensure at least one trigger_words input exists on creation
|
||||
const hasTriggerWords = node.inputs.some(inp => inp.name.startsWith('trigger_words'));
|
||||
if (!hasTriggerWords) {
|
||||
node.addInput("trigger_words1", "STRING", {
|
||||
forceInput: true,
|
||||
tooltip: "Trigger words to prepend. Connect to add more inputs."
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
Reference in New Issue
Block a user