Allow for empty lora (no loras option) in Lora Pool

This commit is contained in:
dogatech
2026-03-11 16:15:18 -07:00
committed by pixelpaws
parent 03e1fa75c5
commit 8dd849892d
15 changed files with 170 additions and 59 deletions

View File

@@ -100,11 +100,16 @@ class LoraCyclerLM:
current_lora = lora_list[clamped_index - 1]
# Build LORA_STACK with single LoRA
lora_path, _ = get_lora_info(current_lora["file_name"])
if current_lora["file_name"] == "None":
lora_path = None
else:
lora_path, _ = get_lora_info(current_lora["file_name"])
if not lora_path:
logger.warning(
f"[LoraCyclerLM] Could not find path for LoRA: {current_lora['file_name']}"
)
if current_lora["file_name"] != "None":
logger.warning(
f"[LoraCyclerLM] Could not find path for LoRA: {current_lora['file_name']}"
)
lora_stack = []
else:
# Normalize path separators

View File

@@ -53,6 +53,8 @@ class LoraLoaderLM:
# First process lora_stack if available
if lora_stack:
for lora_path, model_strength, clip_strength in lora_stack:
if lora_path == "None" or not lora_path:
continue
# Extract lora name and convert to absolute path
# lora_stack stores relative paths, but load_torch_file needs absolute paths
lora_name = extract_lora_name(lora_path)
@@ -78,7 +80,7 @@ class LoraLoaderLM:
# Then process loras from kwargs with support for both old and new formats
loras_list = get_loras_list(kwargs)
for lora in loras_list:
if not lora.get('active', False):
if not lora.get('active', False) or lora.get('name') == "None":
continue
lora_name = lora['name']
@@ -197,6 +199,8 @@ class LoraTextLoaderLM:
# First process lora_stack if available
if lora_stack:
for lora_path, model_strength, clip_strength in lora_stack:
if lora_path == "None" or not lora_path:
continue
# Extract lora name and convert to absolute path
# lora_stack stores relative paths, but load_torch_file needs absolute paths
lora_name = extract_lora_name(lora_path)
@@ -223,6 +227,8 @@ class LoraTextLoaderLM:
parsed_loras = self.parse_lora_syntax(lora_syntax)
for lora in parsed_loras:
lora_name = lora['name']
if lora_name == "None":
continue
model_strength = lora['model_strength']
clip_strength = lora['clip_strength']

View File

@@ -82,6 +82,7 @@ class LoraPoolLM:
"folders": {"include": [], "exclude": []},
"favoritesOnly": False,
"license": {"noCreditRequired": False, "allowSelling": False},
"includeEmptyLora": False,
},
"preview": {"matchCount": 0, "lastUpdated": 0},
}

View File

@@ -120,7 +120,7 @@ class LoraRandomizerLM:
"""
lora_stack = []
for lora in loras:
if not lora.get("active", False):
if not lora.get("active", False) or lora.get("name") == "None":
continue
# Get file path

View File

@@ -38,6 +38,8 @@ class LoraStackerLM:
stack.extend(lora_stack)
# Get trigger words from existing stack entries
for lora_path, _, _ in lora_stack:
if lora_path == "None" or not lora_path:
continue
lora_name = extract_lora_name(lora_path)
_, trigger_words = get_lora_info(lora_name)
all_trigger_words.extend(trigger_words)
@@ -45,7 +47,7 @@ class LoraStackerLM:
# Process loras from kwargs with support for both old and new formats
loras_list = get_loras_list(kwargs)
for lora in loras_list:
if not lora.get('active', False):
if not lora.get('active', False) or lora.get('name') == "None":
continue
lora_name = lora['name']