mirror of
https://github.com/willmiao/ComfyUI-Lora-Manager.git
synced 2026-05-06 08:26:45 -03:00
fix(metadata): add MyOriginalWaifu prompt extractors
This commit is contained in:
@@ -164,6 +164,64 @@ class CLIPTextEncodeExtractor(NodeMetadataExtractor):
|
||||
metadata[PROMPTS][node_id]["conditioning"] = conditioning
|
||||
|
||||
|
||||
class MyOriginalWaifuTextExtractor(NodeMetadataExtractor):
|
||||
"""Extractor for ComfyUI-MyOriginalWaifu TextProvider nodes."""
|
||||
|
||||
@staticmethod
|
||||
def extract(node_id, inputs, outputs, metadata):
|
||||
if not inputs:
|
||||
return
|
||||
|
||||
positive_text = inputs.get("positive", "")
|
||||
negative_text = inputs.get("negative", "")
|
||||
|
||||
if positive_text or negative_text:
|
||||
metadata[PROMPTS][node_id] = {
|
||||
"positive_text": positive_text,
|
||||
"negative_text": negative_text,
|
||||
"node_id": node_id,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def update(node_id, outputs, metadata):
|
||||
output_tuple = _first_output_tuple(outputs)
|
||||
if not output_tuple or len(output_tuple) < 2:
|
||||
return
|
||||
|
||||
prompt_metadata = _ensure_prompt_metadata(metadata, node_id)
|
||||
prompt_metadata["positive_text"] = output_tuple[0]
|
||||
prompt_metadata["negative_text"] = output_tuple[1]
|
||||
|
||||
|
||||
class MyOriginalWaifuClipExtractor(NodeMetadataExtractor):
|
||||
"""Extractor for ComfyUI-MyOriginalWaifu ClipProvider nodes."""
|
||||
|
||||
@staticmethod
|
||||
def extract(node_id, inputs, outputs, metadata):
|
||||
if not inputs:
|
||||
return
|
||||
|
||||
positive_text = inputs.get("positive", "")
|
||||
negative_text = inputs.get("negative", "")
|
||||
|
||||
if positive_text or negative_text:
|
||||
metadata[PROMPTS][node_id] = {
|
||||
"positive_text": positive_text,
|
||||
"negative_text": negative_text,
|
||||
"node_id": node_id,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def update(node_id, outputs, metadata):
|
||||
output_tuple = _first_output_tuple(outputs)
|
||||
if not output_tuple or len(output_tuple) < 2:
|
||||
return
|
||||
|
||||
prompt_metadata = _ensure_prompt_metadata(metadata, node_id)
|
||||
prompt_metadata["positive_encoded"] = output_tuple[0]
|
||||
prompt_metadata["negative_encoded"] = output_tuple[1]
|
||||
|
||||
|
||||
def _ensure_prompt_metadata(metadata, node_id):
|
||||
if node_id not in metadata[PROMPTS]:
|
||||
metadata[PROMPTS][node_id] = {"node_id": node_id}
|
||||
@@ -985,6 +1043,8 @@ NODE_EXTRACTORS = {
|
||||
"smZ_CLIPTextEncode": CLIPTextEncodeExtractor, # From https://github.com/shiimizu/ComfyUI_smZNodes
|
||||
"CR_ApplyControlNetStack": CR_ApplyControlNetStackExtractor, # Add CR_ApplyControlNetStack
|
||||
"PCTextEncode": CLIPTextEncodeExtractor, # From https://github.com/asagi4/comfyui-prompt-control
|
||||
"TextProvider": MyOriginalWaifuTextExtractor, # ComfyUI-MyOriginalWaifu
|
||||
"ClipProvider": MyOriginalWaifuClipExtractor, # ComfyUI-MyOriginalWaifu
|
||||
"ControlNetApplyAdvanced": ControlNetApplyAdvancedExtractor,
|
||||
"ConditioningCombine": ConditioningCombineExtractor,
|
||||
"SetNode": SetNodeExtractor,
|
||||
|
||||
@@ -177,6 +177,169 @@ def test_attention_bias_clip_text_encode_prompts_are_collected(metadata_registry
|
||||
assert prompt_results["negative_prompt"] == "low quality"
|
||||
|
||||
|
||||
def test_myoriginalwaifu_text_provider_uses_processed_prompt_outputs(
|
||||
metadata_registry, monkeypatch
|
||||
):
|
||||
prompt_graph = {
|
||||
"text_provider": {
|
||||
"class_type": "TextProvider",
|
||||
"inputs": {
|
||||
"positive": "raw positive",
|
||||
"negative": "raw negative",
|
||||
},
|
||||
},
|
||||
"encode_pos": {
|
||||
"class_type": "CLIPTextEncode",
|
||||
"inputs": {"text": ["text_provider", 0], "clip": ["clip", 0]},
|
||||
},
|
||||
"encode_neg": {
|
||||
"class_type": "CLIPTextEncode",
|
||||
"inputs": {"text": ["text_provider", 1], "clip": ["clip", 0]},
|
||||
},
|
||||
"sampler": {
|
||||
"class_type": "KSampler",
|
||||
"inputs": {
|
||||
"seed": 123,
|
||||
"steps": 20,
|
||||
"cfg": 7.0,
|
||||
"sampler_name": "Euler",
|
||||
"scheduler": "karras",
|
||||
"denoise": 1.0,
|
||||
"positive": ["encode_pos", 0],
|
||||
"negative": ["encode_neg", 0],
|
||||
"latent_image": {"samples": types.SimpleNamespace(shape=(1, 4, 16, 16))},
|
||||
},
|
||||
},
|
||||
}
|
||||
prompt = SimpleNamespace(original_prompt=prompt_graph)
|
||||
|
||||
pos_conditioning = object()
|
||||
neg_conditioning = object()
|
||||
|
||||
monkeypatch.setattr(metadata_processor, "standalone_mode", False)
|
||||
|
||||
metadata_registry.start_collection("prompt-myoriginalwaifu-text")
|
||||
metadata_registry.set_current_prompt(prompt)
|
||||
|
||||
metadata_registry.record_node_execution(
|
||||
"text_provider",
|
||||
"TextProvider",
|
||||
{"positive": "raw positive", "negative": "raw negative"},
|
||||
None,
|
||||
)
|
||||
metadata_registry.update_node_execution(
|
||||
"text_provider",
|
||||
"TextProvider",
|
||||
[("processed positive", "processed negative")],
|
||||
)
|
||||
metadata_registry.record_node_execution(
|
||||
"encode_pos", "CLIPTextEncode", {"text": "processed positive"}, None
|
||||
)
|
||||
metadata_registry.update_node_execution(
|
||||
"encode_pos", "CLIPTextEncode", [(pos_conditioning,)]
|
||||
)
|
||||
metadata_registry.record_node_execution(
|
||||
"encode_neg", "CLIPTextEncode", {"text": "processed negative"}, None
|
||||
)
|
||||
metadata_registry.update_node_execution(
|
||||
"encode_neg", "CLIPTextEncode", [(neg_conditioning,)]
|
||||
)
|
||||
metadata_registry.record_node_execution(
|
||||
"sampler",
|
||||
"KSampler",
|
||||
{
|
||||
"seed": 123,
|
||||
"steps": 20,
|
||||
"cfg": 7.0,
|
||||
"sampler_name": "Euler",
|
||||
"scheduler": "karras",
|
||||
"denoise": 1.0,
|
||||
"positive": pos_conditioning,
|
||||
"negative": neg_conditioning,
|
||||
"latent_image": {"samples": types.SimpleNamespace(shape=(1, 4, 16, 16))},
|
||||
},
|
||||
None,
|
||||
)
|
||||
|
||||
metadata = metadata_registry.get_metadata("prompt-myoriginalwaifu-text")
|
||||
params = MetadataProcessor.extract_generation_params(metadata)
|
||||
|
||||
assert metadata[PROMPTS]["text_provider"]["positive_text"] == "processed positive"
|
||||
assert metadata[PROMPTS]["text_provider"]["negative_text"] == "processed negative"
|
||||
assert params["prompt"] == "processed positive"
|
||||
assert params["negative_prompt"] == "processed negative"
|
||||
|
||||
|
||||
def test_myoriginalwaifu_clip_provider_prompts_are_collected_without_clip_text_encode(
|
||||
metadata_registry, monkeypatch
|
||||
):
|
||||
prompt_graph = {
|
||||
"clip_provider": {
|
||||
"class_type": "ClipProvider",
|
||||
"inputs": {
|
||||
"positive": "direct positive",
|
||||
"negative": "direct negative",
|
||||
"clip": ["clip", 0],
|
||||
},
|
||||
},
|
||||
"sampler": {
|
||||
"class_type": "KSampler",
|
||||
"inputs": {
|
||||
"seed": 123,
|
||||
"steps": 20,
|
||||
"cfg": 7.0,
|
||||
"sampler_name": "Euler",
|
||||
"scheduler": "karras",
|
||||
"denoise": 1.0,
|
||||
"positive": ["clip_provider", 0],
|
||||
"negative": ["clip_provider", 1],
|
||||
"latent_image": {"samples": types.SimpleNamespace(shape=(1, 4, 16, 16))},
|
||||
},
|
||||
},
|
||||
}
|
||||
prompt = SimpleNamespace(original_prompt=prompt_graph)
|
||||
|
||||
pos_conditioning = object()
|
||||
neg_conditioning = object()
|
||||
|
||||
monkeypatch.setattr(metadata_processor, "standalone_mode", False)
|
||||
|
||||
metadata_registry.start_collection("prompt-myoriginalwaifu-clip")
|
||||
metadata_registry.set_current_prompt(prompt)
|
||||
|
||||
metadata_registry.record_node_execution(
|
||||
"clip_provider",
|
||||
"ClipProvider",
|
||||
{"positive": "direct positive", "negative": "direct negative"},
|
||||
None,
|
||||
)
|
||||
metadata_registry.update_node_execution(
|
||||
"clip_provider", "ClipProvider", [(pos_conditioning, neg_conditioning)]
|
||||
)
|
||||
metadata_registry.record_node_execution(
|
||||
"sampler",
|
||||
"KSampler",
|
||||
{
|
||||
"seed": 123,
|
||||
"steps": 20,
|
||||
"cfg": 7.0,
|
||||
"sampler_name": "Euler",
|
||||
"scheduler": "karras",
|
||||
"denoise": 1.0,
|
||||
"positive": pos_conditioning,
|
||||
"negative": neg_conditioning,
|
||||
"latent_image": {"samples": types.SimpleNamespace(shape=(1, 4, 16, 16))},
|
||||
},
|
||||
None,
|
||||
)
|
||||
|
||||
metadata = metadata_registry.get_metadata("prompt-myoriginalwaifu-clip")
|
||||
params = MetadataProcessor.extract_generation_params(metadata)
|
||||
|
||||
assert params["prompt"] == "direct positive"
|
||||
assert params["negative_prompt"] == "direct negative"
|
||||
|
||||
|
||||
def test_conditioning_provenance_recovers_combined_controlnet_prompts(
|
||||
metadata_registry, monkeypatch
|
||||
):
|
||||
|
||||
Reference in New Issue
Block a user