mirror of
https://github.com/willmiao/ComfyUI-Lora-Manager.git
synced 2026-03-24 22:52:12 -03:00
refactor: move No LoRA feature from LoRA Pool to Lora Cycler widget
Move the 'empty/no LoRA' cycling functionality from the LoRA Pool node to the Lora Cycler widget for cleaner architecture: Frontend changes: - Add include_no_lora field to CyclerConfig interface - Add includeNoLora state and logic to useLoraCyclerState composable - Add toggle UI in LoraCyclerSettingsView with special styling - Show 'No LoRA' entry in LoraListModal when enabled - Update LoraCyclerWidget to integrate new logic Backend changes: - lora_cycler.py reads include_no_lora from config - Calculate effective_total_count (actual count + 1 when enabled) - Return empty lora_stack when on No LoRA position - Return actual LoRA count in total_count (not effective count) Reverted files to pre-PR state: - lora_loader.py, lora_pool.py, lora_randomizer.py, lora_stacker.py - lora_routes.py, lora_service.py - LoraPoolWidget.vue and related files Related to PR #861 Co-authored-by: dogatech <dogatech@dogatech.home>
This commit is contained in:
@@ -56,6 +56,9 @@ class LoraCyclerLM:
|
||||
clip_strength = float(cycler_config.get("clip_strength", 1.0))
|
||||
sort_by = "filename"
|
||||
|
||||
# Include "no lora" option
|
||||
include_no_lora = cycler_config.get("include_no_lora", False)
|
||||
|
||||
# Dual-index mechanism for batch queue synchronization
|
||||
execution_index = cycler_config.get("execution_index") # Can be None
|
||||
# next_index_from_config = cycler_config.get("next_index") # Not used on backend
|
||||
@@ -71,7 +74,10 @@ class LoraCyclerLM:
|
||||
|
||||
total_count = len(lora_list)
|
||||
|
||||
if total_count == 0:
|
||||
# Calculate effective total count (includes no lora option if enabled)
|
||||
effective_total_count = total_count + 1 if include_no_lora else total_count
|
||||
|
||||
if total_count == 0 and not include_no_lora:
|
||||
logger.warning("[LoraCyclerLM] No LoRAs available in pool")
|
||||
return {
|
||||
"result": ([],),
|
||||
@@ -93,47 +99,66 @@ class LoraCyclerLM:
|
||||
else:
|
||||
actual_index = current_index
|
||||
|
||||
# Clamp index to valid range (1-based)
|
||||
clamped_index = max(1, min(actual_index, total_count))
|
||||
# Clamp index to valid range (1-based, includes no lora if enabled)
|
||||
clamped_index = max(1, min(actual_index, effective_total_count))
|
||||
|
||||
# Get LoRA at current index (convert to 0-based for list access)
|
||||
current_lora = lora_list[clamped_index - 1]
|
||||
# Check if current index is the "no lora" option (last position when include_no_lora is True)
|
||||
is_no_lora = include_no_lora and clamped_index == effective_total_count
|
||||
|
||||
# Build LORA_STACK with single LoRA
|
||||
if current_lora["file_name"] == "None":
|
||||
lora_path = None
|
||||
else:
|
||||
lora_path, _ = get_lora_info(current_lora["file_name"])
|
||||
|
||||
if not lora_path:
|
||||
if current_lora["file_name"] != "None":
|
||||
logger.warning(
|
||||
f"[LoraCyclerLM] Could not find path for LoRA: {current_lora['file_name']}"
|
||||
)
|
||||
if is_no_lora:
|
||||
# "No LoRA" option - return empty stack
|
||||
lora_stack = []
|
||||
current_lora_name = "No LoRA"
|
||||
current_lora_filename = "No LoRA"
|
||||
else:
|
||||
# Normalize path separators
|
||||
lora_path = lora_path.replace("/", os.sep)
|
||||
lora_stack = [(lora_path, model_strength, clip_strength)]
|
||||
# Get LoRA at current index (convert to 0-based for list access)
|
||||
current_lora = lora_list[clamped_index - 1]
|
||||
current_lora_name = current_lora["file_name"]
|
||||
current_lora_filename = current_lora["file_name"]
|
||||
|
||||
# Build LORA_STACK with single LoRA
|
||||
if current_lora["file_name"] == "None":
|
||||
lora_path = None
|
||||
else:
|
||||
lora_path, _ = get_lora_info(current_lora["file_name"])
|
||||
|
||||
if not lora_path:
|
||||
if current_lora["file_name"] != "None":
|
||||
logger.warning(
|
||||
f"[LoraCyclerLM] Could not find path for LoRA: {current_lora['file_name']}"
|
||||
)
|
||||
lora_stack = []
|
||||
else:
|
||||
# Normalize path separators
|
||||
lora_path = lora_path.replace("/", os.sep)
|
||||
lora_stack = [(lora_path, model_strength, clip_strength)]
|
||||
|
||||
# Calculate next index (wrap to 1 if at end)
|
||||
next_index = clamped_index + 1
|
||||
if next_index > total_count:
|
||||
if next_index > effective_total_count:
|
||||
next_index = 1
|
||||
|
||||
# Get next LoRA for UI display (what will be used next generation)
|
||||
next_lora = lora_list[next_index - 1]
|
||||
next_display_name = next_lora["file_name"]
|
||||
is_next_no_lora = include_no_lora and next_index == effective_total_count
|
||||
if is_next_no_lora:
|
||||
next_display_name = "No LoRA"
|
||||
next_lora_filename = "No LoRA"
|
||||
else:
|
||||
next_lora = lora_list[next_index - 1]
|
||||
next_display_name = next_lora["file_name"]
|
||||
next_lora_filename = next_lora["file_name"]
|
||||
|
||||
return {
|
||||
"result": (lora_stack,),
|
||||
"ui": {
|
||||
"current_index": [clamped_index],
|
||||
"next_index": [next_index],
|
||||
"total_count": [total_count],
|
||||
"current_lora_name": [current_lora["file_name"]],
|
||||
"current_lora_filename": [current_lora["file_name"]],
|
||||
"total_count": [
|
||||
total_count
|
||||
], # Return actual LoRA count, not effective_total_count
|
||||
"current_lora_name": [current_lora_name],
|
||||
"current_lora_filename": [current_lora_filename],
|
||||
"next_lora_name": [next_display_name],
|
||||
"next_lora_filename": [next_lora["file_name"]],
|
||||
"next_lora_filename": [next_lora_filename],
|
||||
},
|
||||
}
|
||||
|
||||
@@ -53,8 +53,6 @@ class LoraLoaderLM:
|
||||
# First process lora_stack if available
|
||||
if lora_stack:
|
||||
for lora_path, model_strength, clip_strength in lora_stack:
|
||||
if lora_path == "None" or not lora_path:
|
||||
continue
|
||||
# Extract lora name and convert to absolute path
|
||||
# lora_stack stores relative paths, but load_torch_file needs absolute paths
|
||||
lora_name = extract_lora_name(lora_path)
|
||||
@@ -80,7 +78,7 @@ class LoraLoaderLM:
|
||||
# Then process loras from kwargs with support for both old and new formats
|
||||
loras_list = get_loras_list(kwargs)
|
||||
for lora in loras_list:
|
||||
if not lora.get('active', False) or lora.get('name') == "None":
|
||||
if not lora.get('active', False):
|
||||
continue
|
||||
|
||||
lora_name = lora['name']
|
||||
@@ -199,8 +197,6 @@ class LoraTextLoaderLM:
|
||||
# First process lora_stack if available
|
||||
if lora_stack:
|
||||
for lora_path, model_strength, clip_strength in lora_stack:
|
||||
if lora_path == "None" or not lora_path:
|
||||
continue
|
||||
# Extract lora name and convert to absolute path
|
||||
# lora_stack stores relative paths, but load_torch_file needs absolute paths
|
||||
lora_name = extract_lora_name(lora_path)
|
||||
@@ -227,8 +223,6 @@ class LoraTextLoaderLM:
|
||||
parsed_loras = self.parse_lora_syntax(lora_syntax)
|
||||
for lora in parsed_loras:
|
||||
lora_name = lora['name']
|
||||
if lora_name == "None":
|
||||
continue
|
||||
model_strength = lora['model_strength']
|
||||
clip_strength = lora['clip_strength']
|
||||
|
||||
|
||||
@@ -82,7 +82,6 @@ class LoraPoolLM:
|
||||
"folders": {"include": [], "exclude": []},
|
||||
"favoritesOnly": False,
|
||||
"license": {"noCreditRequired": False, "allowSelling": False},
|
||||
"includeEmptyLora": False,
|
||||
},
|
||||
"preview": {"matchCount": 0, "lastUpdated": 0},
|
||||
}
|
||||
|
||||
@@ -120,7 +120,7 @@ class LoraRandomizerLM:
|
||||
"""
|
||||
lora_stack = []
|
||||
for lora in loras:
|
||||
if not lora.get("active", False) or lora.get("name") == "None":
|
||||
if not lora.get("active", False):
|
||||
continue
|
||||
|
||||
# Get file path
|
||||
|
||||
@@ -38,8 +38,6 @@ class LoraStackerLM:
|
||||
stack.extend(lora_stack)
|
||||
# Get trigger words from existing stack entries
|
||||
for lora_path, _, _ in lora_stack:
|
||||
if lora_path == "None" or not lora_path:
|
||||
continue
|
||||
lora_name = extract_lora_name(lora_path)
|
||||
_, trigger_words = get_lora_info(lora_name)
|
||||
all_trigger_words.extend(trigger_words)
|
||||
@@ -47,7 +45,7 @@ class LoraStackerLM:
|
||||
# Process loras from kwargs with support for both old and new formats
|
||||
loras_list = get_loras_list(kwargs)
|
||||
for lora in loras_list:
|
||||
if not lora.get('active', False) or lora.get('name') == "None":
|
||||
if not lora.get('active', False):
|
||||
continue
|
||||
|
||||
lora_name = lora['name']
|
||||
|
||||
Reference in New Issue
Block a user