mirror of
https://github.com/willmiao/ComfyUI-Lora-Manager.git
synced 2026-03-21 21:22:11 -03:00
Compare commits
32 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fa08c9c3e4 | ||
|
|
d0d5eb956a | ||
|
|
969f949330 | ||
|
|
9169bbd04d | ||
|
|
99463ad01c | ||
|
|
f1d6b0feda | ||
|
|
e33da50278 | ||
|
|
4034eb3221 | ||
|
|
75a95f0109 | ||
|
|
92fdc16fe6 | ||
|
|
23fa2995c8 | ||
|
|
59aefdff77 | ||
|
|
e92ab9e3cc | ||
|
|
e3bf1f763c | ||
|
|
1c6e9d0b69 | ||
|
|
bfd4eb3e11 | ||
|
|
c9f902a8af | ||
|
|
0b67510ec9 | ||
|
|
b5cd320e8b | ||
|
|
deb25b4987 | ||
|
|
4612da264a | ||
|
|
59b67e1e10 | ||
|
|
5fad936b27 | ||
|
|
e376a45dea | ||
|
|
fd593bb61d | ||
|
|
71b97d5974 | ||
|
|
2b405ae164 | ||
|
|
2fe4736b69 | ||
|
|
184f8ca6cf | ||
|
|
1ff2019dde | ||
|
|
a3d8261686 | ||
|
|
7d0600976e |
@@ -20,6 +20,13 @@ Watch this quick tutorial to learn how to use the new one-click LoRA integration
|
||||
|
||||
## Release Notes
|
||||
|
||||
### v0.8.13
|
||||
* **Enhanced Recipe Management** - Added "Find duplicates" feature to identify and batch delete duplicate recipes with duplicate detection notifications during imports
|
||||
* **Improved Source Tracking** - Source URLs are now saved with recipes imported via URL, allowing users to view original content with one click or manually edit links
|
||||
* **Advanced LoRA Control** - Double-click LoRAs in Loader/Stacker nodes to access expanded CLIP strength controls for more precise adjustments of model and CLIP strength separately
|
||||
* **Lycoris Model Support** - Added compatibility with Lycoris models for expanded creative options
|
||||
* **Bug Fixes & UX Improvements** - Resolved various issues and enhanced overall user experience with numerous optimizations
|
||||
|
||||
### v0.8.12
|
||||
* **Enhanced Model Discovery** - Added alphabetical navigation bar to LoRAs page for faster browsing through large collections
|
||||
* **Optimized Example Images** - Improved download logic to automatically refresh stale metadata before fetching example images
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
import logging
|
||||
from nodes import LoraLoader
|
||||
from comfy.comfy_types import IO # type: ignore
|
||||
from ..services.lora_scanner import LoraScanner
|
||||
from ..config import config
|
||||
import asyncio
|
||||
import os
|
||||
from .utils import FlexibleOptionalInputType, any_type, get_lora_info, extract_lora_name, get_loras_list
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -51,7 +48,11 @@ class LoraManagerLoader:
|
||||
_, trigger_words = asyncio.run(get_lora_info(lora_name))
|
||||
|
||||
all_trigger_words.extend(trigger_words)
|
||||
loaded_loras.append(f"{lora_name}: {model_strength}")
|
||||
# Add clip strength to output if different from model strength
|
||||
if abs(model_strength - clip_strength) > 0.001:
|
||||
loaded_loras.append(f"{lora_name}: {model_strength},{clip_strength}")
|
||||
else:
|
||||
loaded_loras.append(f"{lora_name}: {model_strength}")
|
||||
|
||||
# Then process loras from kwargs with support for both old and new formats
|
||||
loras_list = get_loras_list(kwargs)
|
||||
@@ -60,14 +61,21 @@ class LoraManagerLoader:
|
||||
continue
|
||||
|
||||
lora_name = lora['name']
|
||||
strength = float(lora['strength'])
|
||||
model_strength = float(lora['strength'])
|
||||
# Get clip strength - use model strength as default if not specified
|
||||
clip_strength = float(lora.get('clipStrength', model_strength))
|
||||
|
||||
# Get lora path and trigger words
|
||||
lora_path, trigger_words = asyncio.run(get_lora_info(lora_name))
|
||||
|
||||
# Apply the LoRA using the resolved path
|
||||
model, clip = LoraLoader().load_lora(model, clip, lora_path, strength, strength)
|
||||
loaded_loras.append(f"{lora_name}: {strength}")
|
||||
# Apply the LoRA using the resolved path with separate strengths
|
||||
model, clip = LoraLoader().load_lora(model, clip, lora_path, model_strength, clip_strength)
|
||||
|
||||
# Include clip strength in output if different from model strength
|
||||
if abs(model_strength - clip_strength) > 0.001:
|
||||
loaded_loras.append(f"{lora_name}: {model_strength},{clip_strength}")
|
||||
else:
|
||||
loaded_loras.append(f"{lora_name}: {model_strength}")
|
||||
|
||||
# Add trigger words to collection
|
||||
all_trigger_words.extend(trigger_words)
|
||||
@@ -75,8 +83,23 @@ class LoraManagerLoader:
|
||||
# use ',, ' to separate trigger words for group mode
|
||||
trigger_words_text = ",, ".join(all_trigger_words) if all_trigger_words else ""
|
||||
|
||||
# Format loaded_loras as <lora:lora_name:strength> separated by spaces
|
||||
formatted_loras = " ".join([f"<lora:{name.split(':')[0].strip()}:{str(strength).strip()}>"
|
||||
for name, strength in [item.split(':') for item in loaded_loras]])
|
||||
# Format loaded_loras with support for both formats
|
||||
formatted_loras = []
|
||||
for item in loaded_loras:
|
||||
parts = item.split(":")
|
||||
lora_name = parts[0].strip()
|
||||
strength_parts = parts[1].strip().split(",")
|
||||
|
||||
if len(strength_parts) > 1:
|
||||
# Different model and clip strengths
|
||||
model_str = strength_parts[0].strip()
|
||||
clip_str = strength_parts[1].strip()
|
||||
formatted_loras.append(f"<lora:{lora_name}:{model_str}:{clip_str}>")
|
||||
else:
|
||||
# Same strength for both
|
||||
model_str = strength_parts[0].strip()
|
||||
formatted_loras.append(f"<lora:{lora_name}:{model_str}>")
|
||||
|
||||
formatted_loras_text = " ".join(formatted_loras)
|
||||
|
||||
return (model, clip, trigger_words_text, formatted_loras)
|
||||
return (model, clip, trigger_words_text, formatted_loras_text)
|
||||
@@ -38,7 +38,7 @@ class LoraStacker:
|
||||
|
||||
# Process existing lora_stack if available
|
||||
lora_stack = kwargs.get('lora_stack', None)
|
||||
if lora_stack:
|
||||
if (lora_stack):
|
||||
stack.extend(lora_stack)
|
||||
# Get trigger words from existing stack entries
|
||||
for lora_path, _, _ in lora_stack:
|
||||
@@ -54,7 +54,8 @@ class LoraStacker:
|
||||
|
||||
lora_name = lora['name']
|
||||
model_strength = float(lora['strength'])
|
||||
clip_strength = model_strength # Using same strength for both as in the original loader
|
||||
# Get clip strength - use model strength as default if not specified
|
||||
clip_strength = float(lora.get('clipStrength', model_strength))
|
||||
|
||||
# Get lora path and trigger words
|
||||
lora_path, trigger_words = asyncio.run(get_lora_info(lora_name))
|
||||
@@ -62,15 +63,24 @@ class LoraStacker:
|
||||
# Add to stack without loading
|
||||
# replace '/' with os.sep to avoid different OS path format
|
||||
stack.append((lora_path.replace('/', os.sep), model_strength, clip_strength))
|
||||
active_loras.append((lora_name, model_strength))
|
||||
active_loras.append((lora_name, model_strength, clip_strength))
|
||||
|
||||
# Add trigger words to collection
|
||||
all_trigger_words.extend(trigger_words)
|
||||
|
||||
# use ',, ' to separate trigger words for group mode
|
||||
trigger_words_text = ",, ".join(all_trigger_words) if all_trigger_words else ""
|
||||
# Format active_loras as <lora:lora_name:strength> separated by spaces
|
||||
active_loras_text = " ".join([f"<lora:{name}:{str(strength).strip()}>"
|
||||
for name, strength in active_loras])
|
||||
|
||||
# Format active_loras with support for both formats
|
||||
formatted_loras = []
|
||||
for name, model_strength, clip_strength in active_loras:
|
||||
if abs(model_strength - clip_strength) > 0.001:
|
||||
# Different model and clip strengths
|
||||
formatted_loras.append(f"<lora:{name}:{str(model_strength).strip()}:{str(clip_strength).strip()}>")
|
||||
else:
|
||||
# Same strength for both
|
||||
formatted_loras.append(f"<lora:{name}:{str(model_strength).strip()}>")
|
||||
|
||||
active_loras_text = " ".join(formatted_loras)
|
||||
|
||||
return (stack, trigger_words_text, active_loras_text)
|
||||
|
||||
22
py/recipes/__init__.py
Normal file
22
py/recipes/__init__.py
Normal file
@@ -0,0 +1,22 @@
|
||||
"""Recipe metadata parser package for ComfyUI-Lora-Manager."""
|
||||
|
||||
from .base import RecipeMetadataParser
|
||||
from .factory import RecipeParserFactory
|
||||
from .constants import GEN_PARAM_KEYS, VALID_LORA_TYPES
|
||||
from .parsers import (
|
||||
RecipeFormatParser,
|
||||
ComfyMetadataParser,
|
||||
MetaFormatParser,
|
||||
AutomaticMetadataParser
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
'RecipeMetadataParser',
|
||||
'RecipeParserFactory',
|
||||
'GEN_PARAM_KEYS',
|
||||
'VALID_LORA_TYPES',
|
||||
'RecipeFormatParser',
|
||||
'ComfyMetadataParser',
|
||||
'MetaFormatParser',
|
||||
'AutomaticMetadataParser'
|
||||
]
|
||||
181
py/recipes/base.py
Normal file
181
py/recipes/base.py
Normal file
@@ -0,0 +1,181 @@
|
||||
"""Base classes for recipe parsers."""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from typing import Dict, List, Any, Optional, Tuple
|
||||
from abc import ABC, abstractmethod
|
||||
from ..config import config
|
||||
from .constants import VALID_LORA_TYPES
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class RecipeMetadataParser(ABC):
|
||||
"""Interface for parsing recipe metadata from image user comments"""
|
||||
|
||||
METADATA_MARKER = None
|
||||
|
||||
@abstractmethod
|
||||
def is_metadata_matching(self, user_comment: str) -> bool:
|
||||
"""Check if the user comment matches the metadata format"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def parse_metadata(self, user_comment: str, recipe_scanner=None, civitai_client=None) -> Dict[str, Any]:
|
||||
"""
|
||||
Parse metadata from user comment and return structured recipe data
|
||||
|
||||
Args:
|
||||
user_comment: The EXIF UserComment string from the image
|
||||
recipe_scanner: Optional recipe scanner instance for local LoRA lookup
|
||||
civitai_client: Optional Civitai client for fetching model information
|
||||
|
||||
Returns:
|
||||
Dict containing parsed recipe data with standardized format
|
||||
"""
|
||||
pass
|
||||
|
||||
async def populate_lora_from_civitai(self, lora_entry: Dict[str, Any], civitai_info_tuple: Tuple[Dict[str, Any], Optional[str]],
|
||||
recipe_scanner=None, base_model_counts=None, hash_value=None) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Populate a lora entry with information from Civitai API response
|
||||
|
||||
Args:
|
||||
lora_entry: The lora entry to populate
|
||||
civitai_info_tuple: The response tuple from Civitai API (data, error_msg)
|
||||
recipe_scanner: Optional recipe scanner for local file lookup
|
||||
base_model_counts: Optional dict to track base model counts
|
||||
hash_value: Optional hash value to use if not available in civitai_info
|
||||
|
||||
Returns:
|
||||
The populated lora_entry dict if type is valid, None otherwise
|
||||
"""
|
||||
try:
|
||||
# Unpack the tuple to get the actual data
|
||||
civitai_info, error_msg = civitai_info_tuple if isinstance(civitai_info_tuple, tuple) else (civitai_info_tuple, None)
|
||||
|
||||
if not civitai_info or civitai_info.get("error") == "Model not found":
|
||||
# Model not found or deleted
|
||||
lora_entry['isDeleted'] = True
|
||||
lora_entry['thumbnailUrl'] = '/loras_static/images/no-preview.png'
|
||||
return lora_entry
|
||||
|
||||
# Get model type and validate
|
||||
model_type = civitai_info.get('model', {}).get('type', '').lower()
|
||||
lora_entry['type'] = model_type
|
||||
if model_type not in VALID_LORA_TYPES:
|
||||
logger.debug(f"Skipping non-LoRA model type: {model_type}")
|
||||
return None
|
||||
|
||||
# Check if this is an early access lora
|
||||
if civitai_info.get('earlyAccessEndsAt'):
|
||||
# Convert earlyAccessEndsAt to a human-readable date
|
||||
early_access_date = civitai_info.get('earlyAccessEndsAt', '')
|
||||
lora_entry['isEarlyAccess'] = True
|
||||
lora_entry['earlyAccessEndsAt'] = early_access_date
|
||||
|
||||
# Update model name if available
|
||||
if 'model' in civitai_info and 'name' in civitai_info['model']:
|
||||
lora_entry['name'] = civitai_info['model']['name']
|
||||
|
||||
# Update version if available
|
||||
if 'name' in civitai_info:
|
||||
lora_entry['version'] = civitai_info.get('name', '')
|
||||
|
||||
# Get thumbnail URL from first image
|
||||
if 'images' in civitai_info and civitai_info['images']:
|
||||
lora_entry['thumbnailUrl'] = civitai_info['images'][0].get('url', '')
|
||||
|
||||
# Get base model
|
||||
current_base_model = civitai_info.get('baseModel', '')
|
||||
lora_entry['baseModel'] = current_base_model
|
||||
|
||||
# Update base model counts if tracking them
|
||||
if base_model_counts is not None and current_base_model:
|
||||
base_model_counts[current_base_model] = base_model_counts.get(current_base_model, 0) + 1
|
||||
|
||||
# Get download URL
|
||||
lora_entry['downloadUrl'] = civitai_info.get('downloadUrl', '')
|
||||
|
||||
# Process file information if available
|
||||
if 'files' in civitai_info:
|
||||
# Find the primary model file (type="Model" and primary=true) in the files list
|
||||
model_file = next((file for file in civitai_info.get('files', [])
|
||||
if file.get('type') == 'Model' and file.get('primary') == True), None)
|
||||
|
||||
if model_file:
|
||||
# Get size
|
||||
lora_entry['size'] = model_file.get('sizeKB', 0) * 1024
|
||||
|
||||
# Get SHA256 hash
|
||||
sha256 = model_file.get('hashes', {}).get('SHA256', hash_value)
|
||||
if sha256:
|
||||
lora_entry['hash'] = sha256.lower()
|
||||
|
||||
# Check if exists locally
|
||||
if recipe_scanner and lora_entry['hash']:
|
||||
lora_scanner = recipe_scanner._lora_scanner
|
||||
exists_locally = lora_scanner.has_lora_hash(lora_entry['hash'])
|
||||
if exists_locally:
|
||||
try:
|
||||
local_path = lora_scanner.get_lora_path_by_hash(lora_entry['hash'])
|
||||
lora_entry['existsLocally'] = True
|
||||
lora_entry['localPath'] = local_path
|
||||
lora_entry['file_name'] = os.path.splitext(os.path.basename(local_path))[0]
|
||||
|
||||
# Get thumbnail from local preview if available
|
||||
lora_cache = await lora_scanner.get_cached_data()
|
||||
lora_item = next((item for item in lora_cache.raw_data
|
||||
if item['sha256'].lower() == lora_entry['hash'].lower()), None)
|
||||
if lora_item and 'preview_url' in lora_item:
|
||||
lora_entry['thumbnailUrl'] = config.get_preview_static_url(lora_item['preview_url'])
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting local lora path: {e}")
|
||||
else:
|
||||
# For missing LoRAs, get file_name from model_file.name
|
||||
file_name = model_file.get('name', '')
|
||||
lora_entry['file_name'] = os.path.splitext(file_name)[0] if file_name else ''
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error populating lora from Civitai info: {e}")
|
||||
|
||||
return lora_entry
|
||||
|
||||
async def populate_checkpoint_from_civitai(self, checkpoint: Dict[str, Any], civitai_info: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Populate checkpoint information from Civitai API response
|
||||
|
||||
Args:
|
||||
checkpoint: The checkpoint entry to populate
|
||||
civitai_info: The response from Civitai API
|
||||
|
||||
Returns:
|
||||
The populated checkpoint dict
|
||||
"""
|
||||
try:
|
||||
if civitai_info and civitai_info.get("error") != "Model not found":
|
||||
# Update model name if available
|
||||
if 'model' in civitai_info and 'name' in civitai_info['model']:
|
||||
checkpoint['name'] = civitai_info['model']['name']
|
||||
|
||||
# Update version if available
|
||||
if 'name' in civitai_info:
|
||||
checkpoint['version'] = civitai_info.get('name', '')
|
||||
|
||||
# Get thumbnail URL from first image
|
||||
if 'images' in civitai_info and civitai_info['images']:
|
||||
checkpoint['thumbnailUrl'] = civitai_info['images'][0].get('url', '')
|
||||
|
||||
# Get base model
|
||||
checkpoint['baseModel'] = civitai_info.get('baseModel', '')
|
||||
|
||||
# Get download URL
|
||||
checkpoint['downloadUrl'] = civitai_info.get('downloadUrl', '')
|
||||
else:
|
||||
# Model not found or deleted
|
||||
checkpoint['isDeleted'] = True
|
||||
except Exception as e:
|
||||
logger.error(f"Error populating checkpoint from Civitai info: {e}")
|
||||
|
||||
return checkpoint
|
||||
16
py/recipes/constants.py
Normal file
16
py/recipes/constants.py
Normal file
@@ -0,0 +1,16 @@
|
||||
"""Constants used across recipe parsers."""
|
||||
|
||||
# Constants for generation parameters
|
||||
GEN_PARAM_KEYS = [
|
||||
'prompt',
|
||||
'negative_prompt',
|
||||
'steps',
|
||||
'sampler',
|
||||
'cfg_scale',
|
||||
'seed',
|
||||
'size',
|
||||
'clip_skip',
|
||||
]
|
||||
|
||||
# Valid Lora types
|
||||
VALID_LORA_TYPES = ['lora', 'locon']
|
||||
43
py/recipes/factory.py
Normal file
43
py/recipes/factory.py
Normal file
@@ -0,0 +1,43 @@
|
||||
"""Factory for creating recipe metadata parsers."""
|
||||
|
||||
import logging
|
||||
from .parsers import (
|
||||
RecipeFormatParser,
|
||||
ComfyMetadataParser,
|
||||
MetaFormatParser,
|
||||
AutomaticMetadataParser
|
||||
)
|
||||
from .base import RecipeMetadataParser
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class RecipeParserFactory:
|
||||
"""Factory for creating recipe metadata parsers"""
|
||||
|
||||
@staticmethod
|
||||
def create_parser(user_comment: str) -> RecipeMetadataParser:
|
||||
"""
|
||||
Create appropriate parser based on the user comment content
|
||||
|
||||
Args:
|
||||
user_comment: The EXIF UserComment string from the image
|
||||
|
||||
Returns:
|
||||
Appropriate RecipeMetadataParser implementation
|
||||
"""
|
||||
# Try ComfyMetadataParser first since it requires valid JSON
|
||||
try:
|
||||
if ComfyMetadataParser().is_metadata_matching(user_comment):
|
||||
return ComfyMetadataParser()
|
||||
except Exception:
|
||||
# If JSON parsing fails, move on to other parsers
|
||||
pass
|
||||
|
||||
if RecipeFormatParser().is_metadata_matching(user_comment):
|
||||
return RecipeFormatParser()
|
||||
elif AutomaticMetadataParser().is_metadata_matching(user_comment):
|
||||
return AutomaticMetadataParser()
|
||||
elif MetaFormatParser().is_metadata_matching(user_comment):
|
||||
return MetaFormatParser()
|
||||
else:
|
||||
return None
|
||||
13
py/recipes/parsers/__init__.py
Normal file
13
py/recipes/parsers/__init__.py
Normal file
@@ -0,0 +1,13 @@
|
||||
"""Recipe parsers package."""
|
||||
|
||||
from .recipe_format import RecipeFormatParser
|
||||
from .comfy import ComfyMetadataParser
|
||||
from .meta_format import MetaFormatParser
|
||||
from .automatic import AutomaticMetadataParser
|
||||
|
||||
__all__ = [
|
||||
'RecipeFormatParser',
|
||||
'ComfyMetadataParser',
|
||||
'MetaFormatParser',
|
||||
'AutomaticMetadataParser',
|
||||
]
|
||||
304
py/recipes/parsers/automatic.py
Normal file
304
py/recipes/parsers/automatic.py
Normal file
@@ -0,0 +1,304 @@
|
||||
"""Parser for Automatic1111 metadata format."""
|
||||
|
||||
import re
|
||||
import json
|
||||
import logging
|
||||
from typing import Dict, Any
|
||||
from ..base import RecipeMetadataParser
|
||||
from ..constants import GEN_PARAM_KEYS
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class AutomaticMetadataParser(RecipeMetadataParser):
|
||||
"""Parser for Automatic1111 metadata format"""
|
||||
|
||||
METADATA_MARKER = r"Steps: \d+"
|
||||
|
||||
# Regular expressions for extracting specific metadata
|
||||
HASHES_REGEX = r', Hashes:\s*({[^}]+})'
|
||||
LORA_HASHES_REGEX = r', Lora hashes:\s*"([^"]+)"'
|
||||
CIVITAI_RESOURCES_REGEX = r', Civitai resources:\s*(\[\{.*?\}\])'
|
||||
CIVITAI_METADATA_REGEX = r', Civitai metadata:\s*(\{.*?\})'
|
||||
EXTRANETS_REGEX = r'<(lora|hypernet):([a-zA-Z0-9_\.\-]+):([0-9.]+)>'
|
||||
MODEL_HASH_PATTERN = r'Model hash: ([a-zA-Z0-9]+)'
|
||||
VAE_HASH_PATTERN = r'VAE hash: ([a-zA-Z0-9]+)'
|
||||
|
||||
def is_metadata_matching(self, user_comment: str) -> bool:
|
||||
"""Check if the user comment matches the Automatic1111 format"""
|
||||
return re.search(self.METADATA_MARKER, user_comment) is not None
|
||||
|
||||
async def parse_metadata(self, user_comment: str, recipe_scanner=None, civitai_client=None) -> Dict[str, Any]:
|
||||
"""Parse metadata from Automatic1111 format"""
|
||||
try:
|
||||
# Split on Negative prompt if it exists
|
||||
if "Negative prompt:" in user_comment:
|
||||
parts = user_comment.split('Negative prompt:', 1)
|
||||
prompt = parts[0].strip()
|
||||
negative_and_params = parts[1] if len(parts) > 1 else ""
|
||||
else:
|
||||
# No negative prompt section
|
||||
param_start = re.search(self.METADATA_MARKER, user_comment)
|
||||
if param_start:
|
||||
prompt = user_comment[:param_start.start()].strip()
|
||||
negative_and_params = user_comment[param_start.start():]
|
||||
else:
|
||||
prompt = user_comment.strip()
|
||||
negative_and_params = ""
|
||||
|
||||
# Initialize metadata
|
||||
metadata = {
|
||||
"prompt": prompt,
|
||||
"loras": []
|
||||
}
|
||||
|
||||
# Extract negative prompt and parameters
|
||||
if negative_and_params:
|
||||
# If we split on "Negative prompt:", check for params section
|
||||
if "Negative prompt:" in user_comment:
|
||||
param_start = re.search(r'Steps: ', negative_and_params)
|
||||
if param_start:
|
||||
neg_prompt = negative_and_params[:param_start.start()].strip()
|
||||
metadata["negative_prompt"] = neg_prompt
|
||||
params_section = negative_and_params[param_start.start():]
|
||||
else:
|
||||
metadata["negative_prompt"] = negative_and_params.strip()
|
||||
params_section = ""
|
||||
else:
|
||||
# No negative prompt, entire section is params
|
||||
params_section = negative_and_params
|
||||
|
||||
# Extract generation parameters
|
||||
if params_section:
|
||||
# Extract Civitai resources
|
||||
civitai_resources_match = re.search(self.CIVITAI_RESOURCES_REGEX, params_section)
|
||||
if civitai_resources_match:
|
||||
try:
|
||||
civitai_resources = json.loads(civitai_resources_match.group(1))
|
||||
metadata["civitai_resources"] = civitai_resources
|
||||
params_section = params_section.replace(civitai_resources_match.group(0), '')
|
||||
except json.JSONDecodeError:
|
||||
logger.error("Error parsing Civitai resources JSON")
|
||||
|
||||
# Extract Hashes
|
||||
hashes_match = re.search(self.HASHES_REGEX, params_section)
|
||||
if hashes_match:
|
||||
try:
|
||||
hashes = json.loads(hashes_match.group(1))
|
||||
# Process hash keys
|
||||
processed_hashes = {}
|
||||
for key, value in hashes.items():
|
||||
# Convert Model: or LORA: prefix to lowercase if present
|
||||
if ':' in key:
|
||||
prefix, name = key.split(':', 1)
|
||||
prefix = prefix.lower()
|
||||
else:
|
||||
prefix = ''
|
||||
name = key
|
||||
|
||||
# Clean up the name part
|
||||
if '/' in name:
|
||||
name = name.split('/')[-1] # Get last part after /
|
||||
if '.safetensors' in name:
|
||||
name = name.split('.safetensors')[0] # Remove .safetensors
|
||||
|
||||
# Reconstruct the key
|
||||
new_key = f"{prefix}:{name}" if prefix else name
|
||||
processed_hashes[new_key] = value
|
||||
|
||||
metadata["hashes"] = processed_hashes
|
||||
# Remove hashes from params section to not interfere with other parsing
|
||||
params_section = params_section.replace(hashes_match.group(0), '')
|
||||
except json.JSONDecodeError:
|
||||
logger.error("Error parsing hashes JSON")
|
||||
|
||||
# Extract Lora hashes in alternative format
|
||||
lora_hashes_match = re.search(self.LORA_HASHES_REGEX, params_section)
|
||||
if not hashes_match and lora_hashes_match:
|
||||
try:
|
||||
lora_hashes_str = lora_hashes_match.group(1)
|
||||
lora_hash_entries = lora_hashes_str.split(', ')
|
||||
|
||||
# Initialize hashes dict if it doesn't exist
|
||||
if "hashes" not in metadata:
|
||||
metadata["hashes"] = {}
|
||||
|
||||
# Parse each lora hash entry (format: "name: hash")
|
||||
for entry in lora_hash_entries:
|
||||
if ': ' in entry:
|
||||
lora_name, lora_hash = entry.split(': ', 1)
|
||||
# Add as lora type in the same format as regular hashes
|
||||
metadata["hashes"][f"lora:{lora_name}"] = lora_hash.strip()
|
||||
|
||||
# Remove lora hashes from params section
|
||||
params_section = params_section.replace(lora_hashes_match.group(0), '')
|
||||
except Exception as e:
|
||||
logger.error(f"Error parsing Lora hashes: {e}")
|
||||
|
||||
# Extract basic parameters
|
||||
param_pattern = r'([A-Za-z\s]+): ([^,]+)'
|
||||
params = re.findall(param_pattern, params_section)
|
||||
gen_params = {}
|
||||
|
||||
for key, value in params:
|
||||
clean_key = key.strip().lower().replace(' ', '_')
|
||||
|
||||
# Skip if not in recognized gen param keys
|
||||
if clean_key not in GEN_PARAM_KEYS:
|
||||
continue
|
||||
|
||||
# Convert numeric values
|
||||
if clean_key in ['steps', 'seed']:
|
||||
try:
|
||||
gen_params[clean_key] = int(value.strip())
|
||||
except ValueError:
|
||||
gen_params[clean_key] = value.strip()
|
||||
elif clean_key in ['cfg_scale']:
|
||||
try:
|
||||
gen_params[clean_key] = float(value.strip())
|
||||
except ValueError:
|
||||
gen_params[clean_key] = value.strip()
|
||||
else:
|
||||
gen_params[clean_key] = value.strip()
|
||||
|
||||
# Extract size if available and add to gen_params if a recognized key
|
||||
size_match = re.search(r'Size: (\d+)x(\d+)', params_section)
|
||||
if size_match and 'size' in GEN_PARAM_KEYS:
|
||||
width, height = size_match.groups()
|
||||
gen_params['size'] = f"{width}x{height}"
|
||||
|
||||
# Add prompt and negative_prompt to gen_params if they're in GEN_PARAM_KEYS
|
||||
if 'prompt' in GEN_PARAM_KEYS and 'prompt' in metadata:
|
||||
gen_params['prompt'] = metadata['prompt']
|
||||
if 'negative_prompt' in GEN_PARAM_KEYS and 'negative_prompt' in metadata:
|
||||
gen_params['negative_prompt'] = metadata['negative_prompt']
|
||||
|
||||
metadata["gen_params"] = gen_params
|
||||
|
||||
# Extract LoRA information
|
||||
loras = []
|
||||
base_model_counts = {}
|
||||
|
||||
# First use Civitai resources if available (more reliable source)
|
||||
if metadata.get("civitai_resources"):
|
||||
for resource in metadata.get("civitai_resources", []):
|
||||
if resource.get("type") in ["lora", "lycoris", "hypernet"] and resource.get("modelVersionId"):
|
||||
# Initialize lora entry
|
||||
lora_entry = {
|
||||
'id': str(resource.get("modelVersionId")),
|
||||
'modelId': str(resource.get("modelId")) if resource.get("modelId") else None,
|
||||
'name': resource.get("modelName", "Unknown LoRA"),
|
||||
'version': resource.get("modelVersionName", ""),
|
||||
'type': resource.get("type", "lora"),
|
||||
'weight': round(float(resource.get("weight", 1.0)), 2),
|
||||
'existsLocally': False,
|
||||
'thumbnailUrl': '/loras_static/images/no-preview.png',
|
||||
'baseModel': '',
|
||||
'size': 0,
|
||||
'downloadUrl': '',
|
||||
'isDeleted': False
|
||||
}
|
||||
|
||||
# Get additional info from Civitai
|
||||
if civitai_client:
|
||||
try:
|
||||
civitai_info = await civitai_client.get_model_version_info(resource.get("modelVersionId"))
|
||||
populated_entry = await self.populate_lora_from_civitai(
|
||||
lora_entry,
|
||||
civitai_info,
|
||||
recipe_scanner,
|
||||
base_model_counts
|
||||
)
|
||||
if populated_entry is None:
|
||||
continue # Skip invalid LoRA types
|
||||
lora_entry = populated_entry
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching Civitai info for LoRA {lora_entry['name']}: {e}")
|
||||
|
||||
loras.append(lora_entry)
|
||||
|
||||
# If no LoRAs from Civitai resources or to supplement, extract from metadata["hashes"]
|
||||
if not loras or len(loras) == 0:
|
||||
# Extract lora weights from extranet tags in prompt (for later use)
|
||||
lora_weights = {}
|
||||
lora_matches = re.findall(self.EXTRANETS_REGEX, prompt)
|
||||
for lora_type, lora_name, lora_weight in lora_matches:
|
||||
key = f"{lora_type}:{lora_name}"
|
||||
lora_weights[key] = round(float(lora_weight), 2)
|
||||
|
||||
# Use hashes from metadata as the primary source
|
||||
if metadata.get("hashes"):
|
||||
for hash_key, lora_hash in metadata.get("hashes", {}).items():
|
||||
# Only process lora or hypernet types
|
||||
if not hash_key.startswith(("lora:", "hypernet:")):
|
||||
continue
|
||||
|
||||
lora_type, lora_name = hash_key.split(':', 1)
|
||||
|
||||
# Get weight from extranet tags if available, else default to 1.0
|
||||
weight = lora_weights.get(hash_key, 1.0)
|
||||
|
||||
# Initialize lora entry
|
||||
lora_entry = {
|
||||
'name': lora_name,
|
||||
'type': lora_type, # 'lora' or 'hypernet'
|
||||
'weight': weight,
|
||||
'hash': lora_hash,
|
||||
'existsLocally': False,
|
||||
'localPath': None,
|
||||
'file_name': lora_name,
|
||||
'thumbnailUrl': '/loras_static/images/no-preview.png',
|
||||
'baseModel': '',
|
||||
'size': 0,
|
||||
'downloadUrl': '',
|
||||
'isDeleted': False
|
||||
}
|
||||
|
||||
# Try to get info from Civitai
|
||||
if civitai_client:
|
||||
try:
|
||||
if lora_hash:
|
||||
# If we have hash, use it for lookup
|
||||
civitai_info = await civitai_client.get_model_by_hash(lora_hash)
|
||||
else:
|
||||
civitai_info = None
|
||||
|
||||
populated_entry = await self.populate_lora_from_civitai(
|
||||
lora_entry,
|
||||
civitai_info,
|
||||
recipe_scanner,
|
||||
base_model_counts,
|
||||
lora_hash
|
||||
)
|
||||
if populated_entry is None:
|
||||
continue # Skip invalid LoRA types
|
||||
lora_entry = populated_entry
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching Civitai info for LoRA {lora_name}: {e}")
|
||||
|
||||
loras.append(lora_entry)
|
||||
|
||||
# Try to get base model from resources or make educated guess
|
||||
base_model = None
|
||||
if base_model_counts:
|
||||
# Use the most common base model from the loras
|
||||
base_model = max(base_model_counts.items(), key=lambda x: x[1])[0]
|
||||
|
||||
# Prepare final result structure
|
||||
# Make sure gen_params only contains recognized keys
|
||||
filtered_gen_params = {}
|
||||
for key in GEN_PARAM_KEYS:
|
||||
if key in metadata.get("gen_params", {}):
|
||||
filtered_gen_params[key] = metadata["gen_params"][key]
|
||||
|
||||
result = {
|
||||
'base_model': base_model,
|
||||
'loras': loras,
|
||||
'gen_params': filtered_gen_params,
|
||||
'from_automatic_metadata': True
|
||||
}
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error parsing Automatic1111 metadata: {e}", exc_info=True)
|
||||
return {"error": str(e), "loras": []}
|
||||
216
py/recipes/parsers/comfy.py
Normal file
216
py/recipes/parsers/comfy.py
Normal file
@@ -0,0 +1,216 @@
|
||||
"""Parser for ComfyUI metadata format."""
|
||||
|
||||
import re
|
||||
import json
|
||||
import logging
|
||||
from typing import Dict, Any
|
||||
from ..base import RecipeMetadataParser
|
||||
from ..constants import GEN_PARAM_KEYS
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class ComfyMetadataParser(RecipeMetadataParser):
|
||||
"""Parser for Civitai ComfyUI metadata JSON format"""
|
||||
|
||||
METADATA_MARKER = r"class_type"
|
||||
|
||||
def is_metadata_matching(self, user_comment: str) -> bool:
|
||||
"""Check if the user comment matches the ComfyUI metadata format"""
|
||||
try:
|
||||
data = json.loads(user_comment)
|
||||
# Check if it contains class_type nodes typical of ComfyUI workflow
|
||||
return isinstance(data, dict) and any(isinstance(v, dict) and 'class_type' in v for v in data.values())
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
return False
|
||||
|
||||
async def parse_metadata(self, user_comment: str, recipe_scanner=None, civitai_client=None) -> Dict[str, Any]:
|
||||
"""Parse metadata from Civitai ComfyUI metadata format"""
|
||||
try:
|
||||
data = json.loads(user_comment)
|
||||
loras = []
|
||||
|
||||
# Find all LoraLoader nodes
|
||||
lora_nodes = {k: v for k, v in data.items() if isinstance(v, dict) and v.get('class_type') == 'LoraLoader'}
|
||||
|
||||
if not lora_nodes:
|
||||
return {"error": "No LoRA information found in this ComfyUI workflow", "loras": []}
|
||||
|
||||
# Process each LoraLoader node
|
||||
for node_id, node in lora_nodes.items():
|
||||
if 'inputs' not in node or 'lora_name' not in node['inputs']:
|
||||
continue
|
||||
|
||||
lora_name = node['inputs'].get('lora_name', '')
|
||||
|
||||
# Parse the URN to extract model ID and version ID
|
||||
# Format: "urn:air:sdxl:lora:civitai:1107767@1253442"
|
||||
lora_id_match = re.search(r'civitai:(\d+)@(\d+)', lora_name)
|
||||
if not lora_id_match:
|
||||
continue
|
||||
|
||||
model_id = lora_id_match.group(1)
|
||||
model_version_id = lora_id_match.group(2)
|
||||
|
||||
# Get strength from node inputs
|
||||
weight = node['inputs'].get('strength_model', 1.0)
|
||||
|
||||
# Initialize lora entry with default values
|
||||
lora_entry = {
|
||||
'id': model_version_id,
|
||||
'modelId': model_id,
|
||||
'name': f"Lora {model_id}", # Default name
|
||||
'version': '',
|
||||
'type': 'lora',
|
||||
'weight': weight,
|
||||
'existsLocally': False,
|
||||
'localPath': None,
|
||||
'file_name': '',
|
||||
'hash': '',
|
||||
'thumbnailUrl': '/loras_static/images/no-preview.png',
|
||||
'baseModel': '',
|
||||
'size': 0,
|
||||
'downloadUrl': '',
|
||||
'isDeleted': False
|
||||
}
|
||||
|
||||
# Get additional info from Civitai if client is available
|
||||
if civitai_client:
|
||||
try:
|
||||
civitai_info_tuple = await civitai_client.get_model_version_info(model_version_id)
|
||||
# Populate lora entry with Civitai info
|
||||
populated_entry = await self.populate_lora_from_civitai(
|
||||
lora_entry,
|
||||
civitai_info_tuple,
|
||||
recipe_scanner
|
||||
)
|
||||
if populated_entry is None:
|
||||
continue # Skip invalid LoRA types
|
||||
lora_entry = populated_entry
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching Civitai info for LoRA: {e}")
|
||||
|
||||
loras.append(lora_entry)
|
||||
|
||||
# Find checkpoint info
|
||||
checkpoint_nodes = {k: v for k, v in data.items() if isinstance(v, dict) and v.get('class_type') == 'CheckpointLoaderSimple'}
|
||||
checkpoint = None
|
||||
checkpoint_id = None
|
||||
checkpoint_version_id = None
|
||||
|
||||
if checkpoint_nodes:
|
||||
# Get the first checkpoint node
|
||||
checkpoint_node = next(iter(checkpoint_nodes.values()))
|
||||
if 'inputs' in checkpoint_node and 'ckpt_name' in checkpoint_node['inputs']:
|
||||
checkpoint_name = checkpoint_node['inputs']['ckpt_name']
|
||||
# Parse checkpoint URN
|
||||
checkpoint_match = re.search(r'civitai:(\d+)@(\d+)', checkpoint_name)
|
||||
if checkpoint_match:
|
||||
checkpoint_id = checkpoint_match.group(1)
|
||||
checkpoint_version_id = checkpoint_match.group(2)
|
||||
checkpoint = {
|
||||
'id': checkpoint_version_id,
|
||||
'modelId': checkpoint_id,
|
||||
'name': f"Checkpoint {checkpoint_id}",
|
||||
'version': '',
|
||||
'type': 'checkpoint'
|
||||
}
|
||||
|
||||
# Get additional checkpoint info from Civitai
|
||||
if civitai_client:
|
||||
try:
|
||||
civitai_info_tuple = await civitai_client.get_model_version_info(checkpoint_version_id)
|
||||
civitai_info, _ = civitai_info_tuple if isinstance(civitai_info_tuple, tuple) else (civitai_info_tuple, None)
|
||||
# Populate checkpoint with Civitai info
|
||||
checkpoint = await self.populate_checkpoint_from_civitai(checkpoint, civitai_info)
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching Civitai info for checkpoint: {e}")
|
||||
|
||||
# Extract generation parameters
|
||||
gen_params = {}
|
||||
|
||||
# First try to get from extraMetadata
|
||||
if 'extraMetadata' in data:
|
||||
try:
|
||||
# extraMetadata is a JSON string that needs to be parsed
|
||||
extra_metadata = json.loads(data['extraMetadata'])
|
||||
|
||||
# Map fields from extraMetadata to our standard format
|
||||
mapping = {
|
||||
'prompt': 'prompt',
|
||||
'negativePrompt': 'negative_prompt',
|
||||
'steps': 'steps',
|
||||
'sampler': 'sampler',
|
||||
'cfgScale': 'cfg_scale',
|
||||
'seed': 'seed'
|
||||
}
|
||||
|
||||
for src_key, dest_key in mapping.items():
|
||||
if src_key in extra_metadata:
|
||||
gen_params[dest_key] = extra_metadata[src_key]
|
||||
|
||||
# If size info is available, format as "width x height"
|
||||
if 'width' in extra_metadata and 'height' in extra_metadata:
|
||||
gen_params['size'] = f"{extra_metadata['width']}x{extra_metadata['height']}"
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error parsing extraMetadata: {e}")
|
||||
|
||||
# If extraMetadata doesn't have all the info, try to get from nodes
|
||||
if not gen_params or len(gen_params) < 3: # At least we want prompt, negative_prompt, and steps
|
||||
# Find positive prompt node
|
||||
positive_nodes = {k: v for k, v in data.items() if isinstance(v, dict) and
|
||||
v.get('class_type', '').endswith('CLIPTextEncode') and
|
||||
v.get('_meta', {}).get('title') == 'Positive'}
|
||||
|
||||
if positive_nodes:
|
||||
positive_node = next(iter(positive_nodes.values()))
|
||||
if 'inputs' in positive_node and 'text' in positive_node['inputs']:
|
||||
gen_params['prompt'] = positive_node['inputs']['text']
|
||||
|
||||
# Find negative prompt node
|
||||
negative_nodes = {k: v for k, v in data.items() if isinstance(v, dict) and
|
||||
v.get('class_type', '').endswith('CLIPTextEncode') and
|
||||
v.get('_meta', {}).get('title') == 'Negative'}
|
||||
|
||||
if negative_nodes:
|
||||
negative_node = next(iter(negative_nodes.values()))
|
||||
if 'inputs' in negative_node and 'text' in negative_node['inputs']:
|
||||
gen_params['negative_prompt'] = negative_node['inputs']['text']
|
||||
|
||||
# Find KSampler node for other parameters
|
||||
ksampler_nodes = {k: v for k, v in data.items() if isinstance(v, dict) and v.get('class_type') == 'KSampler'}
|
||||
|
||||
if ksampler_nodes:
|
||||
ksampler_node = next(iter(ksampler_nodes.values()))
|
||||
if 'inputs' in ksampler_node:
|
||||
inputs = ksampler_node['inputs']
|
||||
if 'sampler_name' in inputs:
|
||||
gen_params['sampler'] = inputs['sampler_name']
|
||||
if 'steps' in inputs:
|
||||
gen_params['steps'] = inputs['steps']
|
||||
if 'cfg' in inputs:
|
||||
gen_params['cfg_scale'] = inputs['cfg']
|
||||
if 'seed' in inputs:
|
||||
gen_params['seed'] = inputs['seed']
|
||||
|
||||
# Determine base model from loras info
|
||||
base_model = None
|
||||
if loras:
|
||||
# Use the most common base model from loras
|
||||
base_models = [lora['baseModel'] for lora in loras if lora.get('baseModel')]
|
||||
if base_models:
|
||||
from collections import Counter
|
||||
base_model_counts = Counter(base_models)
|
||||
base_model = base_model_counts.most_common(1)[0][0]
|
||||
|
||||
return {
|
||||
'base_model': base_model,
|
||||
'loras': loras,
|
||||
'checkpoint': checkpoint,
|
||||
'gen_params': gen_params,
|
||||
'from_comfy_metadata': True
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error parsing ComfyUI metadata: {e}", exc_info=True)
|
||||
return {"error": str(e), "loras": []}
|
||||
174
py/recipes/parsers/meta_format.py
Normal file
174
py/recipes/parsers/meta_format.py
Normal file
@@ -0,0 +1,174 @@
|
||||
"""Parser for meta format (Lora_N Model hash) metadata."""
|
||||
|
||||
import re
|
||||
import logging
|
||||
from typing import Dict, Any
|
||||
from ..base import RecipeMetadataParser
|
||||
from ..constants import GEN_PARAM_KEYS
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class MetaFormatParser(RecipeMetadataParser):
|
||||
"""Parser for images with meta format metadata (Lora_N Model hash format)"""
|
||||
|
||||
METADATA_MARKER = r'Lora_\d+ Model hash:'
|
||||
|
||||
def is_metadata_matching(self, user_comment: str) -> bool:
|
||||
"""Check if the user comment matches the metadata format"""
|
||||
return re.search(self.METADATA_MARKER, user_comment, re.IGNORECASE | re.DOTALL) is not None
|
||||
|
||||
async def parse_metadata(self, user_comment: str, recipe_scanner=None, civitai_client=None) -> Dict[str, Any]:
|
||||
"""Parse metadata from images with meta format metadata"""
|
||||
try:
|
||||
# Extract prompt and negative prompt
|
||||
parts = user_comment.split('Negative prompt:', 1)
|
||||
prompt = parts[0].strip()
|
||||
|
||||
# Initialize metadata
|
||||
metadata = {"prompt": prompt, "loras": []}
|
||||
|
||||
# Extract negative prompt and parameters if available
|
||||
if len(parts) > 1:
|
||||
negative_and_params = parts[1]
|
||||
|
||||
# Extract negative prompt - everything until the first parameter (usually "Steps:")
|
||||
param_start = re.search(r'([A-Za-z]+): ', negative_and_params)
|
||||
if param_start:
|
||||
neg_prompt = negative_and_params[:param_start.start()].strip()
|
||||
metadata["negative_prompt"] = neg_prompt
|
||||
params_section = negative_and_params[param_start.start():]
|
||||
else:
|
||||
params_section = negative_and_params
|
||||
|
||||
# Extract key-value parameters (Steps, Sampler, Seed, etc.)
|
||||
param_pattern = r'([A-Za-z_0-9 ]+): ([^,]+)'
|
||||
params = re.findall(param_pattern, params_section)
|
||||
for key, value in params:
|
||||
clean_key = key.strip().lower().replace(' ', '_')
|
||||
metadata[clean_key] = value.strip()
|
||||
|
||||
# Extract LoRA information
|
||||
# Pattern to match lora entries: Lora_0 Model name: ArtVador I.safetensors, Lora_0 Model hash: 08f7133a58, etc.
|
||||
lora_pattern = r'Lora_(\d+) Model name: ([^,]+), Lora_\1 Model hash: ([^,]+), Lora_\1 Strength model: ([^,]+), Lora_\1 Strength clip: ([^,]+)'
|
||||
lora_matches = re.findall(lora_pattern, user_comment)
|
||||
|
||||
# If the regular pattern doesn't match, try a more flexible approach
|
||||
if not lora_matches:
|
||||
# First find all Lora indices
|
||||
lora_indices = set(re.findall(r'Lora_(\d+)', user_comment))
|
||||
|
||||
# For each index, extract the information
|
||||
for idx in lora_indices:
|
||||
lora_info = {}
|
||||
|
||||
# Extract model name
|
||||
name_match = re.search(f'Lora_{idx} Model name: ([^,]+)', user_comment)
|
||||
if name_match:
|
||||
lora_info['name'] = name_match.group(1).strip()
|
||||
|
||||
# Extract model hash
|
||||
hash_match = re.search(f'Lora_{idx} Model hash: ([^,]+)', user_comment)
|
||||
if hash_match:
|
||||
lora_info['hash'] = hash_match.group(1).strip()
|
||||
|
||||
# Extract strength model
|
||||
strength_model_match = re.search(f'Lora_{idx} Strength model: ([^,]+)', user_comment)
|
||||
if strength_model_match:
|
||||
lora_info['strength_model'] = float(strength_model_match.group(1).strip())
|
||||
|
||||
# Extract strength clip
|
||||
strength_clip_match = re.search(f'Lora_{idx} Strength clip: ([^,]+)', user_comment)
|
||||
if strength_clip_match:
|
||||
lora_info['strength_clip'] = float(strength_clip_match.group(1).strip())
|
||||
|
||||
# Only add if we have at least name and hash
|
||||
if 'name' in lora_info and 'hash' in lora_info:
|
||||
lora_matches.append((idx, lora_info['name'], lora_info['hash'],
|
||||
str(lora_info.get('strength_model', 1.0)),
|
||||
str(lora_info.get('strength_clip', 1.0))))
|
||||
|
||||
# Process LoRAs
|
||||
base_model_counts = {}
|
||||
loras = []
|
||||
|
||||
for match in lora_matches:
|
||||
if len(match) == 5: # Regular pattern match
|
||||
idx, name, hash_value, strength_model, strength_clip = match
|
||||
else: # Flexible approach match
|
||||
continue # Should not happen now
|
||||
|
||||
# Clean up the values
|
||||
name = name.strip()
|
||||
if name.endswith('.safetensors'):
|
||||
name = name[:-12] # Remove .safetensors extension
|
||||
|
||||
hash_value = hash_value.strip()
|
||||
weight = float(strength_model) # Use model strength as weight
|
||||
|
||||
# Initialize lora entry with default values
|
||||
lora_entry = {
|
||||
'name': name,
|
||||
'type': 'lora',
|
||||
'weight': weight,
|
||||
'existsLocally': False,
|
||||
'localPath': None,
|
||||
'file_name': name,
|
||||
'hash': hash_value,
|
||||
'thumbnailUrl': '/loras_static/images/no-preview.png',
|
||||
'baseModel': '',
|
||||
'size': 0,
|
||||
'downloadUrl': '',
|
||||
'isDeleted': False
|
||||
}
|
||||
|
||||
# Get info from Civitai by hash if available
|
||||
if civitai_client and hash_value:
|
||||
try:
|
||||
civitai_info = await civitai_client.get_model_by_hash(hash_value)
|
||||
# Populate lora entry with Civitai info
|
||||
populated_entry = await self.populate_lora_from_civitai(
|
||||
lora_entry,
|
||||
civitai_info,
|
||||
recipe_scanner,
|
||||
base_model_counts,
|
||||
hash_value
|
||||
)
|
||||
if populated_entry is None:
|
||||
continue # Skip invalid LoRA types
|
||||
lora_entry = populated_entry
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching Civitai info for LoRA hash {hash_value}: {e}")
|
||||
|
||||
loras.append(lora_entry)
|
||||
|
||||
# Extract model information
|
||||
model = None
|
||||
if 'model' in metadata:
|
||||
model = metadata['model']
|
||||
|
||||
# Set base_model to the most common one from civitai_info
|
||||
base_model = None
|
||||
if base_model_counts:
|
||||
base_model = max(base_model_counts.items(), key=lambda x: x[1])[0]
|
||||
|
||||
# Extract generation parameters for recipe metadata
|
||||
gen_params = {}
|
||||
for key in GEN_PARAM_KEYS:
|
||||
if key in metadata:
|
||||
gen_params[key] = metadata.get(key, '')
|
||||
|
||||
# Try to extract size information if available
|
||||
if 'width' in metadata and 'height' in metadata:
|
||||
gen_params['size'] = f"{metadata['width']}x{metadata['height']}"
|
||||
|
||||
return {
|
||||
'base_model': base_model,
|
||||
'loras': loras,
|
||||
'gen_params': gen_params,
|
||||
'raw_metadata': metadata,
|
||||
'from_meta_format': True
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error parsing meta format metadata: {e}", exc_info=True)
|
||||
return {"error": str(e), "loras": []}
|
||||
114
py/recipes/parsers/recipe_format.py
Normal file
114
py/recipes/parsers/recipe_format.py
Normal file
@@ -0,0 +1,114 @@
|
||||
"""Parser for dedicated recipe metadata format."""
|
||||
|
||||
import re
|
||||
import json
|
||||
import logging
|
||||
from typing import Dict, Any
|
||||
from ...config import config
|
||||
from ..base import RecipeMetadataParser
|
||||
from ..constants import GEN_PARAM_KEYS
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class RecipeFormatParser(RecipeMetadataParser):
|
||||
"""Parser for images with dedicated recipe metadata format"""
|
||||
|
||||
# Regular expression pattern for extracting recipe metadata
|
||||
METADATA_MARKER = r'Recipe metadata: (\{.*\})'
|
||||
|
||||
def is_metadata_matching(self, user_comment: str) -> bool:
|
||||
"""Check if the user comment matches the metadata format"""
|
||||
return re.search(self.METADATA_MARKER, user_comment, re.IGNORECASE | re.DOTALL) is not None
|
||||
|
||||
async def parse_metadata(self, user_comment: str, recipe_scanner=None, civitai_client=None) -> Dict[str, Any]:
|
||||
"""Parse metadata from images with dedicated recipe metadata format"""
|
||||
try:
|
||||
# Extract recipe metadata from user comment
|
||||
try:
|
||||
# Look for recipe metadata section
|
||||
recipe_match = re.search(self.METADATA_MARKER, user_comment, re.IGNORECASE | re.DOTALL)
|
||||
if not recipe_match:
|
||||
recipe_metadata = None
|
||||
else:
|
||||
recipe_json = recipe_match.group(1)
|
||||
recipe_metadata = json.loads(recipe_json)
|
||||
except Exception as e:
|
||||
logger.error(f"Error extracting recipe metadata: {e}")
|
||||
recipe_metadata = None
|
||||
if not recipe_metadata:
|
||||
return {"error": "No recipe metadata found", "loras": []}
|
||||
|
||||
# Process the recipe metadata
|
||||
loras = []
|
||||
for lora in recipe_metadata.get('loras', []):
|
||||
# Convert recipe lora format to frontend format
|
||||
lora_entry = {
|
||||
'id': lora.get('modelVersionId', ''),
|
||||
'name': lora.get('modelName', ''),
|
||||
'version': lora.get('modelVersionName', ''),
|
||||
'type': 'lora',
|
||||
'weight': lora.get('strength', 1.0),
|
||||
'file_name': lora.get('file_name', ''),
|
||||
'hash': lora.get('hash', '')
|
||||
}
|
||||
|
||||
# Check if this LoRA exists locally by SHA256 hash
|
||||
if lora.get('hash') and recipe_scanner:
|
||||
lora_scanner = recipe_scanner._lora_scanner
|
||||
exists_locally = lora_scanner.has_lora_hash(lora['hash'])
|
||||
if exists_locally:
|
||||
lora_cache = await lora_scanner.get_cached_data()
|
||||
lora_item = next((item for item in lora_cache.raw_data if item['sha256'].lower() == lora['hash'].lower()), None)
|
||||
if lora_item:
|
||||
lora_entry['existsLocally'] = True
|
||||
lora_entry['localPath'] = lora_item['file_path']
|
||||
lora_entry['file_name'] = lora_item['file_name']
|
||||
lora_entry['size'] = lora_item['size']
|
||||
lora_entry['thumbnailUrl'] = config.get_preview_static_url(lora_item['preview_url'])
|
||||
|
||||
else:
|
||||
lora_entry['existsLocally'] = False
|
||||
lora_entry['localPath'] = None
|
||||
|
||||
# Try to get additional info from Civitai if we have a model version ID
|
||||
if lora.get('modelVersionId') and civitai_client:
|
||||
try:
|
||||
civitai_info_tuple = await civitai_client.get_model_version_info(lora['modelVersionId'])
|
||||
# Populate lora entry with Civitai info
|
||||
populated_entry = await self.populate_lora_from_civitai(
|
||||
lora_entry,
|
||||
civitai_info_tuple,
|
||||
recipe_scanner,
|
||||
None, # No need to track base model counts
|
||||
lora['hash']
|
||||
)
|
||||
if populated_entry is None:
|
||||
continue # Skip invalid LoRA types
|
||||
lora_entry = populated_entry
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching Civitai info for LoRA: {e}")
|
||||
lora_entry['thumbnailUrl'] = '/loras_static/images/no-preview.png'
|
||||
|
||||
loras.append(lora_entry)
|
||||
|
||||
logger.info(f"Found {len(loras)} loras in recipe metadata")
|
||||
|
||||
# Filter gen_params to only include recognized keys
|
||||
filtered_gen_params = {}
|
||||
if 'gen_params' in recipe_metadata:
|
||||
for key, value in recipe_metadata['gen_params'].items():
|
||||
if key in GEN_PARAM_KEYS:
|
||||
filtered_gen_params[key] = value
|
||||
|
||||
return {
|
||||
'base_model': recipe_metadata.get('base_model', ''),
|
||||
'loras': loras,
|
||||
'gen_params': filtered_gen_params,
|
||||
'tags': recipe_metadata.get('tags', []),
|
||||
'title': recipe_metadata.get('title', ''),
|
||||
'from_recipe_metadata': True
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error parsing recipe format metadata: {e}", exc_info=True)
|
||||
return {"error": str(e), "loras": []}
|
||||
@@ -386,10 +386,10 @@ class ApiRoutes:
|
||||
versions = response.get('modelVersions', [])
|
||||
model_type = response.get('type', '')
|
||||
|
||||
# Check model type - should be LORA
|
||||
if model_type.lower() != 'lora':
|
||||
# Check model type - should be LORA or LoCon
|
||||
if model_type.lower() not in ['lora', 'locon']:
|
||||
return web.json_response({
|
||||
'error': f"Model type mismatch. Expected LORA, got {model_type}"
|
||||
'error': f"Model type mismatch. Expected LORA or LoCon, got {model_type}"
|
||||
}, status=400)
|
||||
|
||||
# Check local availability for each version
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import os
|
||||
import time
|
||||
import base64
|
||||
import numpy as np
|
||||
from PIL import Image
|
||||
import torch
|
||||
@@ -12,7 +13,7 @@ import json
|
||||
import asyncio
|
||||
import sys
|
||||
from ..utils.exif_utils import ExifUtils
|
||||
from ..utils.recipe_parsers import RecipeParserFactory
|
||||
from ..recipes import RecipeParserFactory
|
||||
from ..utils.constants import CARD_PREVIEW_WIDTH
|
||||
|
||||
from ..config import config
|
||||
@@ -56,6 +57,7 @@ class RecipeRoutes:
|
||||
app.router.add_get('/api/recipes', routes.get_recipes)
|
||||
app.router.add_get('/api/recipe/{recipe_id}', routes.get_recipe_detail)
|
||||
app.router.add_post('/api/recipes/analyze-image', routes.analyze_recipe_image)
|
||||
app.router.add_post('/api/recipes/analyze-local-image', routes.analyze_local_image)
|
||||
app.router.add_post('/api/recipes/save', routes.save_recipe)
|
||||
app.router.add_delete('/api/recipe/{recipe_id}', routes.delete_recipe)
|
||||
|
||||
@@ -70,12 +72,18 @@ class RecipeRoutes:
|
||||
# Add new endpoint for getting recipe syntax
|
||||
app.router.add_get('/api/recipe/{recipe_id}/syntax', routes.get_recipe_syntax)
|
||||
|
||||
# Add new endpoint for updating recipe metadata (name and tags)
|
||||
# Add new endpoint for updating recipe metadata (name, tags and source_path)
|
||||
app.router.add_put('/api/recipe/{recipe_id}/update', routes.update_recipe)
|
||||
|
||||
# Add new endpoint for reconnecting deleted LoRAs
|
||||
app.router.add_post('/api/recipe/lora/reconnect', routes.reconnect_lora)
|
||||
|
||||
# Add new endpoint for finding duplicate recipes
|
||||
app.router.add_get('/api/recipes/find-duplicates', routes.find_duplicates)
|
||||
|
||||
# Add new endpoint for bulk deletion of recipes
|
||||
app.router.add_post('/api/recipes/bulk-delete', routes.bulk_delete)
|
||||
|
||||
# Start cache initialization
|
||||
app.on_startup.append(routes._init_cache)
|
||||
|
||||
@@ -300,7 +308,6 @@ class RecipeRoutes:
|
||||
|
||||
# For URL mode, include the image data as base64
|
||||
if is_url_mode and temp_path:
|
||||
import base64
|
||||
with open(temp_path, "rb") as image_file:
|
||||
result["image_base64"] = base64.b64encode(image_file.read()).decode('utf-8')
|
||||
|
||||
@@ -317,7 +324,6 @@ class RecipeRoutes:
|
||||
|
||||
# For URL mode, include the image data as base64
|
||||
if is_url_mode and temp_path:
|
||||
import base64
|
||||
with open(temp_path, "rb") as image_file:
|
||||
result["image_base64"] = base64.b64encode(image_file.read()).decode('utf-8')
|
||||
|
||||
@@ -332,7 +338,6 @@ class RecipeRoutes:
|
||||
|
||||
# For URL mode, include the image data as base64
|
||||
if is_url_mode and temp_path:
|
||||
import base64
|
||||
with open(temp_path, "rb") as image_file:
|
||||
result["image_base64"] = base64.b64encode(image_file.read()).decode('utf-8')
|
||||
|
||||
@@ -340,6 +345,21 @@ class RecipeRoutes:
|
||||
if "error" in result and not result.get("loras"):
|
||||
return web.json_response(result, status=200)
|
||||
|
||||
# Calculate fingerprint from parsed loras
|
||||
from ..utils.utils import calculate_recipe_fingerprint
|
||||
fingerprint = calculate_recipe_fingerprint(result.get("loras", []))
|
||||
|
||||
# Add fingerprint to result
|
||||
result["fingerprint"] = fingerprint
|
||||
|
||||
# Find matching recipes with the same fingerprint
|
||||
matching_recipes = []
|
||||
if fingerprint:
|
||||
matching_recipes = await self.recipe_scanner.find_recipes_by_fingerprint(fingerprint)
|
||||
|
||||
# Add matching recipes to result
|
||||
result["matching_recipes"] = matching_recipes
|
||||
|
||||
return web.json_response(result)
|
||||
|
||||
except Exception as e:
|
||||
@@ -355,7 +375,100 @@ class RecipeRoutes:
|
||||
os.unlink(temp_path)
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting temporary file: {e}")
|
||||
|
||||
async def analyze_local_image(self, request: web.Request) -> web.Response:
|
||||
"""Analyze a local image file for recipe metadata"""
|
||||
try:
|
||||
# Ensure services are initialized
|
||||
await self.init_services()
|
||||
|
||||
# Get JSON data from request
|
||||
data = await request.json()
|
||||
file_path = data.get('path')
|
||||
|
||||
if not file_path:
|
||||
return web.json_response({
|
||||
'error': 'No file path provided',
|
||||
'loras': []
|
||||
}, status=400)
|
||||
|
||||
# Normalize file path for cross-platform compatibility
|
||||
file_path = os.path.normpath(file_path.strip('"').strip("'"))
|
||||
|
||||
# Validate that the file exists
|
||||
if not os.path.isfile(file_path):
|
||||
return web.json_response({
|
||||
'error': 'File not found',
|
||||
'loras': []
|
||||
}, status=404)
|
||||
|
||||
# Extract metadata from the image using ExifUtils
|
||||
metadata = ExifUtils.extract_image_metadata(file_path)
|
||||
|
||||
# If no metadata found, return error
|
||||
if not metadata:
|
||||
# Get base64 image data
|
||||
with open(file_path, "rb") as image_file:
|
||||
image_base64 = base64.b64encode(image_file.read()).decode('utf-8')
|
||||
|
||||
return web.json_response({
|
||||
"error": "No metadata found in this image",
|
||||
"loras": [], # Return empty loras array to prevent client-side errors
|
||||
"image_base64": image_base64
|
||||
}, status=200)
|
||||
|
||||
# Use the parser factory to get the appropriate parser
|
||||
parser = RecipeParserFactory.create_parser(metadata)
|
||||
|
||||
if parser is None:
|
||||
# Get base64 image data
|
||||
with open(file_path, "rb") as image_file:
|
||||
image_base64 = base64.b64encode(image_file.read()).decode('utf-8')
|
||||
|
||||
return web.json_response({
|
||||
"error": "No parser found for this image",
|
||||
"loras": [], # Return empty loras array to prevent client-side errors
|
||||
"image_base64": image_base64
|
||||
}, status=200)
|
||||
|
||||
# Parse the metadata
|
||||
result = await parser.parse_metadata(
|
||||
metadata,
|
||||
recipe_scanner=self.recipe_scanner,
|
||||
civitai_client=self.civitai_client
|
||||
)
|
||||
|
||||
# Add base64 image data to result
|
||||
with open(file_path, "rb") as image_file:
|
||||
result["image_base64"] = base64.b64encode(image_file.read()).decode('utf-8')
|
||||
|
||||
# Check for errors
|
||||
if "error" in result and not result.get("loras"):
|
||||
return web.json_response(result, status=200)
|
||||
|
||||
# Calculate fingerprint from parsed loras
|
||||
from ..utils.utils import calculate_recipe_fingerprint
|
||||
fingerprint = calculate_recipe_fingerprint(result.get("loras", []))
|
||||
|
||||
# Add fingerprint to result
|
||||
result["fingerprint"] = fingerprint
|
||||
|
||||
# Find matching recipes with the same fingerprint
|
||||
matching_recipes = []
|
||||
if fingerprint:
|
||||
matching_recipes = await self.recipe_scanner.find_recipes_by_fingerprint(fingerprint)
|
||||
|
||||
# Add matching recipes to result
|
||||
result["matching_recipes"] = matching_recipes
|
||||
|
||||
return web.json_response(result)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error analyzing local image: {e}", exc_info=True)
|
||||
return web.json_response({
|
||||
'error': str(e),
|
||||
'loras': [] # Return empty loras array to prevent client-side errors
|
||||
}, status=500)
|
||||
|
||||
async def save_recipe(self, request: web.Request) -> web.Response:
|
||||
"""Save a recipe to the recipes folder"""
|
||||
@@ -425,7 +538,6 @@ class RecipeRoutes:
|
||||
if not image:
|
||||
if image_base64:
|
||||
# Convert base64 to binary
|
||||
import base64
|
||||
try:
|
||||
# Remove potential data URL prefix
|
||||
if ',' in image_base64:
|
||||
@@ -474,7 +586,7 @@ class RecipeRoutes:
|
||||
with open(image_path, 'wb') as f:
|
||||
f.write(optimized_image)
|
||||
|
||||
# Create the recipe JSON
|
||||
# Create the recipe data structure
|
||||
current_time = time.time()
|
||||
|
||||
# Format loras data according to the recipe.json format
|
||||
@@ -514,6 +626,10 @@ class RecipeRoutes:
|
||||
"clip_skip": raw_metadata.get("clip_skip", "")
|
||||
}
|
||||
|
||||
# Calculate recipe fingerprint
|
||||
from ..utils.utils import calculate_recipe_fingerprint
|
||||
fingerprint = calculate_recipe_fingerprint(loras_data)
|
||||
|
||||
# Create the recipe data structure
|
||||
recipe_data = {
|
||||
"id": recipe_id,
|
||||
@@ -523,13 +639,18 @@ class RecipeRoutes:
|
||||
"created_date": current_time,
|
||||
"base_model": metadata.get("base_model", ""),
|
||||
"loras": loras_data,
|
||||
"gen_params": gen_params
|
||||
"gen_params": gen_params,
|
||||
"fingerprint": fingerprint
|
||||
}
|
||||
|
||||
# Add tags if provided
|
||||
if tags:
|
||||
recipe_data["tags"] = tags
|
||||
|
||||
# Add source_path if provided in metadata
|
||||
if metadata.get("source_path"):
|
||||
recipe_data["source_path"] = metadata.get("source_path")
|
||||
|
||||
# Save the recipe JSON
|
||||
json_filename = f"{recipe_id}.recipe.json"
|
||||
json_path = os.path.join(recipes_dir, json_filename)
|
||||
@@ -539,6 +660,14 @@ class RecipeRoutes:
|
||||
# Add recipe metadata to the image
|
||||
ExifUtils.append_recipe_metadata(image_path, recipe_data)
|
||||
|
||||
# Check for duplicates
|
||||
matching_recipes = []
|
||||
if fingerprint:
|
||||
matching_recipes = await self.recipe_scanner.find_recipes_by_fingerprint(fingerprint)
|
||||
# Remove current recipe from matches
|
||||
if recipe_id in matching_recipes:
|
||||
matching_recipes.remove(recipe_id)
|
||||
|
||||
# Simplified cache update approach
|
||||
# Instead of trying to update the cache directly, just set it to None
|
||||
# to force a refresh on the next get_cached_data call
|
||||
@@ -554,7 +683,8 @@ class RecipeRoutes:
|
||||
'success': True,
|
||||
'recipe_id': recipe_id,
|
||||
'image_path': image_path,
|
||||
'json_path': json_path
|
||||
'json_path': json_path,
|
||||
'matching_recipes': matching_recipes
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
@@ -1089,9 +1219,9 @@ class RecipeRoutes:
|
||||
data = await request.json()
|
||||
|
||||
# Validate required fields
|
||||
if 'title' not in data and 'tags' not in data:
|
||||
if 'title' not in data and 'tags' not in data and 'source_path' not in data:
|
||||
return web.json_response({
|
||||
"error": "At least one field to update must be provided (title or tags)"
|
||||
"error": "At least one field to update must be provided (title or tags or source_path)"
|
||||
}, status=400)
|
||||
|
||||
# Use the recipe scanner's update method
|
||||
@@ -1186,6 +1316,10 @@ class RecipeRoutes:
|
||||
|
||||
if not found:
|
||||
return web.json_response({"error": "Could not find matching deleted LoRA in recipe"}, status=404)
|
||||
|
||||
# Recalculate recipe fingerprint after updating LoRA
|
||||
from ..utils.utils import calculate_recipe_fingerprint
|
||||
recipe_data['fingerprint'] = calculate_recipe_fingerprint(recipe_data.get('loras', []))
|
||||
|
||||
# Save updated recipe
|
||||
with open(recipe_path, 'w', encoding='utf-8') as f:
|
||||
@@ -1201,6 +1335,8 @@ class RecipeRoutes:
|
||||
if cache_item.get('id') == recipe_id:
|
||||
# Replace loras array with updated version
|
||||
cache_item['loras'] = recipe_data['loras']
|
||||
# Update fingerprint in cache
|
||||
cache_item['fingerprint'] = recipe_data['fingerprint']
|
||||
|
||||
# Resort the cache
|
||||
asyncio.create_task(scanner._cache.resort())
|
||||
@@ -1211,11 +1347,20 @@ class RecipeRoutes:
|
||||
if image_path and os.path.exists(image_path):
|
||||
from ..utils.exif_utils import ExifUtils
|
||||
ExifUtils.append_recipe_metadata(image_path, recipe_data)
|
||||
|
||||
# Find other recipes with the same fingerprint
|
||||
matching_recipes = []
|
||||
if 'fingerprint' in recipe_data:
|
||||
matching_recipes = await scanner.find_recipes_by_fingerprint(recipe_data['fingerprint'])
|
||||
# Remove current recipe from matches
|
||||
if recipe_id in matching_recipes:
|
||||
matching_recipes.remove(recipe_id)
|
||||
|
||||
return web.json_response({
|
||||
"success": True,
|
||||
"recipe_id": recipe_id,
|
||||
"updated_lora": updated_lora
|
||||
"updated_lora": updated_lora,
|
||||
"matching_recipes": matching_recipes
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
@@ -1291,3 +1436,150 @@ class RecipeRoutes:
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}, status=500)
|
||||
|
||||
async def find_duplicates(self, request: web.Request) -> web.Response:
|
||||
"""Find all duplicate recipes based on fingerprints"""
|
||||
try:
|
||||
# Ensure services are initialized
|
||||
await self.init_services()
|
||||
|
||||
# Get all duplicate recipes
|
||||
duplicate_groups = await self.recipe_scanner.find_all_duplicate_recipes()
|
||||
|
||||
# Create response data with additional recipe information
|
||||
response_data = []
|
||||
|
||||
for fingerprint, recipe_ids in duplicate_groups.items():
|
||||
# Skip groups with only one recipe (not duplicates)
|
||||
if len(recipe_ids) <= 1:
|
||||
continue
|
||||
|
||||
# Get recipe details for each recipe in the group
|
||||
recipes = []
|
||||
for recipe_id in recipe_ids:
|
||||
recipe = await self.recipe_scanner.get_recipe_by_id(recipe_id)
|
||||
if recipe:
|
||||
# Add only needed fields to keep response size manageable
|
||||
recipes.append({
|
||||
'id': recipe.get('id'),
|
||||
'title': recipe.get('title'),
|
||||
'file_url': recipe.get('file_url') or self._format_recipe_file_url(recipe.get('file_path', '')),
|
||||
'modified': recipe.get('modified'),
|
||||
'created_date': recipe.get('created_date'),
|
||||
'lora_count': len(recipe.get('loras', [])),
|
||||
})
|
||||
|
||||
# Only include groups with at least 2 valid recipes
|
||||
if len(recipes) >= 2:
|
||||
# Sort recipes by modified date (newest first)
|
||||
recipes.sort(key=lambda x: x.get('modified', 0), reverse=True)
|
||||
|
||||
response_data.append({
|
||||
'fingerprint': fingerprint,
|
||||
'count': len(recipes),
|
||||
'recipes': recipes
|
||||
})
|
||||
|
||||
# Sort groups by count (highest first)
|
||||
response_data.sort(key=lambda x: x['count'], reverse=True)
|
||||
|
||||
return web.json_response({
|
||||
'success': True,
|
||||
'duplicate_groups': response_data
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error finding duplicate recipes: {e}", exc_info=True)
|
||||
return web.json_response({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}, status=500)
|
||||
|
||||
async def bulk_delete(self, request: web.Request) -> web.Response:
|
||||
"""Delete multiple recipes by ID"""
|
||||
try:
|
||||
# Ensure services are initialized
|
||||
await self.init_services()
|
||||
|
||||
# Parse request data
|
||||
data = await request.json()
|
||||
recipe_ids = data.get('recipe_ids', [])
|
||||
|
||||
if not recipe_ids:
|
||||
return web.json_response({
|
||||
'success': False,
|
||||
'error': 'No recipe IDs provided'
|
||||
}, status=400)
|
||||
|
||||
# Get recipes directory
|
||||
recipes_dir = self.recipe_scanner.recipes_dir
|
||||
if not recipes_dir or not os.path.exists(recipes_dir):
|
||||
return web.json_response({
|
||||
'success': False,
|
||||
'error': 'Recipes directory not found'
|
||||
}, status=404)
|
||||
|
||||
# Track deleted and failed recipes
|
||||
deleted_recipes = []
|
||||
failed_recipes = []
|
||||
|
||||
# Process each recipe ID
|
||||
for recipe_id in recipe_ids:
|
||||
# Find recipe JSON file
|
||||
recipe_json_path = os.path.join(recipes_dir, f"{recipe_id}.recipe.json")
|
||||
|
||||
if not os.path.exists(recipe_json_path):
|
||||
failed_recipes.append({
|
||||
'id': recipe_id,
|
||||
'reason': 'Recipe not found'
|
||||
})
|
||||
continue
|
||||
|
||||
try:
|
||||
# Load recipe data to get image path
|
||||
with open(recipe_json_path, 'r', encoding='utf-8') as f:
|
||||
recipe_data = json.load(f)
|
||||
|
||||
# Get image path
|
||||
image_path = recipe_data.get('file_path')
|
||||
|
||||
# Delete recipe JSON file
|
||||
os.remove(recipe_json_path)
|
||||
|
||||
# Delete recipe image if it exists
|
||||
if image_path and os.path.exists(image_path):
|
||||
os.remove(image_path)
|
||||
|
||||
deleted_recipes.append(recipe_id)
|
||||
|
||||
except Exception as e:
|
||||
failed_recipes.append({
|
||||
'id': recipe_id,
|
||||
'reason': str(e)
|
||||
})
|
||||
|
||||
# Update cache if any recipes were deleted
|
||||
if deleted_recipes and self.recipe_scanner._cache is not None:
|
||||
# Remove deleted recipes from raw_data
|
||||
self.recipe_scanner._cache.raw_data = [
|
||||
r for r in self.recipe_scanner._cache.raw_data
|
||||
if r.get('id') not in deleted_recipes
|
||||
]
|
||||
# Resort the cache
|
||||
asyncio.create_task(self.recipe_scanner._cache.resort())
|
||||
logger.info(f"Removed {len(deleted_recipes)} recipes from cache")
|
||||
|
||||
return web.json_response({
|
||||
'success': True,
|
||||
'deleted': deleted_recipes,
|
||||
'failed': failed_recipes,
|
||||
'total_deleted': len(deleted_recipes),
|
||||
'total_failed': len(failed_recipes)
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error performing bulk delete: {e}", exc_info=True)
|
||||
return web.json_response({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}, status=500)
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
from aiohttp import web
|
||||
from server import PromptServer
|
||||
from .nodes.utils import get_lora_info
|
||||
|
||||
@PromptServer.instance.routes.post("/loramanager/get_trigger_words")
|
||||
async def get_trigger_words(request):
|
||||
json_data = await request.json()
|
||||
lora_names = json_data.get("lora_names", [])
|
||||
node_ids = json_data.get("node_ids", [])
|
||||
|
||||
all_trigger_words = []
|
||||
for lora_name in lora_names:
|
||||
_, trigger_words = await get_lora_info(lora_name)
|
||||
all_trigger_words.extend(trigger_words)
|
||||
|
||||
# Format the trigger words
|
||||
trigger_words_text = ",, ".join(all_trigger_words) if all_trigger_words else ""
|
||||
|
||||
# Send update to all connected trigger word toggle nodes
|
||||
for node_id in node_ids:
|
||||
PromptServer.instance.send_sync("trigger_word_update", {
|
||||
"id": node_id,
|
||||
"message": trigger_words_text
|
||||
})
|
||||
|
||||
return web.json_response({"success": True})
|
||||
@@ -136,15 +136,9 @@ class DownloadManager:
|
||||
# 3. Prepare download
|
||||
file_name = file_info['name']
|
||||
save_path = os.path.join(save_dir, file_name)
|
||||
file_size = file_info.get('sizeKB', 0) * 1024
|
||||
|
||||
# 4. Notify file monitor - use normalized path and file size
|
||||
file_monitor = await self._get_lora_monitor() if model_type == "lora" else await self._get_checkpoint_monitor()
|
||||
if file_monitor and file_monitor.handler:
|
||||
file_monitor.handler.add_ignore_path(
|
||||
save_path.replace(os.sep, '/'),
|
||||
file_size
|
||||
)
|
||||
# file monitor is despreted, so we don't need to use it
|
||||
|
||||
# 5. Prepare metadata based on model type
|
||||
if model_type == "checkpoint":
|
||||
|
||||
@@ -2,6 +2,7 @@ import asyncio
|
||||
from typing import List, Dict
|
||||
from dataclasses import dataclass
|
||||
from operator import itemgetter
|
||||
from natsort import natsorted
|
||||
|
||||
@dataclass
|
||||
class LoraCache:
|
||||
@@ -17,7 +18,7 @@ class LoraCache:
|
||||
async def resort(self, name_only: bool = False):
|
||||
"""Resort all cached data views"""
|
||||
async with self._lock:
|
||||
self.sorted_by_name = sorted(
|
||||
self.sorted_by_name = natsorted(
|
||||
self.raw_data,
|
||||
key=lambda x: x['model_name'].lower() # Case-insensitive sort
|
||||
)
|
||||
|
||||
@@ -2,6 +2,7 @@ import asyncio
|
||||
from typing import List, Dict
|
||||
from dataclasses import dataclass
|
||||
from operator import itemgetter
|
||||
from natsort import natsorted
|
||||
|
||||
@dataclass
|
||||
class ModelCache:
|
||||
@@ -17,7 +18,7 @@ class ModelCache:
|
||||
async def resort(self, name_only: bool = False):
|
||||
"""Resort all cached data views"""
|
||||
async with self._lock:
|
||||
self.sorted_by_name = sorted(
|
||||
self.sorted_by_name = natsorted(
|
||||
self.raw_data,
|
||||
key=lambda x: x['model_name'].lower() # Case-insensitive sort
|
||||
)
|
||||
|
||||
@@ -2,6 +2,7 @@ import asyncio
|
||||
from typing import List, Dict
|
||||
from dataclasses import dataclass
|
||||
from operator import itemgetter
|
||||
from natsort import natsorted
|
||||
|
||||
@dataclass
|
||||
class RecipeCache:
|
||||
@@ -16,7 +17,7 @@ class RecipeCache:
|
||||
async def resort(self, name_only: bool = False):
|
||||
"""Resort all cached data views"""
|
||||
async with self._lock:
|
||||
self.sorted_by_name = sorted(
|
||||
self.sorted_by_name = natsorted(
|
||||
self.raw_data,
|
||||
key=lambda x: x.get('title', '').lower() # Case-insensitive sort
|
||||
)
|
||||
|
||||
@@ -9,6 +9,7 @@ from .recipe_cache import RecipeCache
|
||||
from .service_registry import ServiceRegistry
|
||||
from .lora_scanner import LoraScanner
|
||||
from ..utils.utils import fuzzy_match
|
||||
from natsort import natsorted
|
||||
import sys
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -164,7 +165,7 @@ class RecipeScanner:
|
||||
if hasattr(self._cache, "resort"):
|
||||
try:
|
||||
# Sort by name
|
||||
self._cache.sorted_by_name = sorted(
|
||||
self._cache.sorted_by_name = natsorted(
|
||||
self._cache.raw_data,
|
||||
key=lambda x: x.get('title', '').lower()
|
||||
)
|
||||
@@ -321,6 +322,20 @@ class RecipeScanner:
|
||||
|
||||
# Update lora information with local paths and availability
|
||||
await self._update_lora_information(recipe_data)
|
||||
|
||||
# Calculate and update fingerprint if missing
|
||||
if 'loras' in recipe_data and 'fingerprint' not in recipe_data:
|
||||
from ..utils.utils import calculate_recipe_fingerprint
|
||||
fingerprint = calculate_recipe_fingerprint(recipe_data['loras'])
|
||||
recipe_data['fingerprint'] = fingerprint
|
||||
|
||||
# Write updated recipe data back to file
|
||||
try:
|
||||
with open(recipe_path, 'w', encoding='utf-8') as f:
|
||||
json.dump(recipe_data, f, indent=4, ensure_ascii=False)
|
||||
logger.info(f"Added fingerprint to recipe: {recipe_path}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error writing updated recipe with fingerprint: {e}")
|
||||
|
||||
return recipe_data
|
||||
except Exception as e:
|
||||
@@ -801,3 +816,60 @@ class RecipeScanner:
|
||||
logger.info(f"Resorted recipe cache after updating {cache_updated_count} items")
|
||||
|
||||
return file_updated_count, cache_updated_count
|
||||
|
||||
async def find_recipes_by_fingerprint(self, fingerprint: str) -> list:
|
||||
"""Find recipes with a matching fingerprint
|
||||
|
||||
Args:
|
||||
fingerprint: The recipe fingerprint to search for
|
||||
|
||||
Returns:
|
||||
List of recipe details that match the fingerprint
|
||||
"""
|
||||
if not fingerprint:
|
||||
return []
|
||||
|
||||
# Get all recipes from cache
|
||||
cache = await self.get_cached_data()
|
||||
|
||||
# Find recipes with matching fingerprint
|
||||
matching_recipes = []
|
||||
for recipe in cache.raw_data:
|
||||
if recipe.get('fingerprint') == fingerprint:
|
||||
recipe_details = {
|
||||
'id': recipe.get('id'),
|
||||
'title': recipe.get('title'),
|
||||
'file_url': self._format_file_url(recipe.get('file_path')),
|
||||
'modified': recipe.get('modified'),
|
||||
'created_date': recipe.get('created_date'),
|
||||
'lora_count': len(recipe.get('loras', []))
|
||||
}
|
||||
matching_recipes.append(recipe_details)
|
||||
|
||||
return matching_recipes
|
||||
|
||||
async def find_all_duplicate_recipes(self) -> dict:
|
||||
"""Find all recipe duplicates based on fingerprints
|
||||
|
||||
Returns:
|
||||
Dictionary where keys are fingerprints and values are lists of recipe IDs
|
||||
"""
|
||||
# Get all recipes from cache
|
||||
cache = await self.get_cached_data()
|
||||
|
||||
# Group recipes by fingerprint
|
||||
fingerprint_groups = {}
|
||||
for recipe in cache.raw_data:
|
||||
fingerprint = recipe.get('fingerprint')
|
||||
if not fingerprint:
|
||||
continue
|
||||
|
||||
if fingerprint not in fingerprint_groups:
|
||||
fingerprint_groups[fingerprint] = []
|
||||
|
||||
fingerprint_groups[fingerprint].append(recipe.get('id'))
|
||||
|
||||
# Filter to only include groups with more than one recipe
|
||||
duplicate_groups = {k: v for k, v in fingerprint_groups.items() if len(v) > 1}
|
||||
|
||||
return duplicate_groups
|
||||
|
||||
@@ -233,6 +233,17 @@ async def load_metadata(file_path: str, model_class: Type[BaseModelMetadata] = L
|
||||
data['usage_tips'] = "{}"
|
||||
needs_update = True
|
||||
|
||||
# Update preview_nsfw_level if needed
|
||||
civitai_data = data.get('civitai', {})
|
||||
civitai_images = civitai_data.get('images', []) if civitai_data else []
|
||||
if (data.get('preview_url') and
|
||||
data.get('preview_nsfw_level', 0) == 0 and
|
||||
civitai_images and
|
||||
civitai_images[0].get('nsfwLevel', 0) != 0):
|
||||
data['preview_nsfw_level'] = civitai_images[0]['nsfwLevel']
|
||||
# TODO: write to metadata file
|
||||
# needs_update = True
|
||||
|
||||
if needs_update:
|
||||
with open(metadata_path, 'w', encoding='utf-8') as f:
|
||||
json.dump(data, f, indent=2, ensure_ascii=False)
|
||||
|
||||
@@ -2,6 +2,9 @@ from safetensors import safe_open
|
||||
from typing import Dict
|
||||
from .model_utils import determine_base_model
|
||||
import os
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
async def extract_lora_metadata(file_path: str) -> Dict:
|
||||
"""Extract essential metadata from safetensors file"""
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -114,3 +114,49 @@ def fuzzy_match(text: str, pattern: str, threshold: float = 0.7) -> bool:
|
||||
|
||||
# All words found either as substrings or fuzzy matches
|
||||
return True
|
||||
|
||||
def calculate_recipe_fingerprint(loras):
|
||||
"""
|
||||
Calculate a unique fingerprint for a recipe based on its LoRAs.
|
||||
|
||||
The fingerprint is created by sorting LoRA hashes, filtering invalid entries,
|
||||
normalizing strength values to 2 decimal places, and joining in format:
|
||||
hash1:strength1|hash2:strength2|...
|
||||
|
||||
Args:
|
||||
loras (list): List of LoRA dictionaries with hash and strength values
|
||||
|
||||
Returns:
|
||||
str: The calculated fingerprint
|
||||
"""
|
||||
if not loras:
|
||||
return ""
|
||||
|
||||
# Filter valid entries and extract hash and strength
|
||||
valid_loras = []
|
||||
for lora in loras:
|
||||
# Skip excluded loras
|
||||
if lora.get("exclude", False):
|
||||
continue
|
||||
|
||||
# Get the hash - use modelVersionId as fallback if hash is empty
|
||||
hash_value = lora.get("hash", "").lower()
|
||||
if not hash_value and lora.get("isDeleted", False) and lora.get("modelVersionId"):
|
||||
hash_value = lora.get("modelVersionId")
|
||||
|
||||
# Skip entries without a valid hash
|
||||
if not hash_value:
|
||||
continue
|
||||
|
||||
# Normalize strength to 2 decimal places (check both strength and weight fields)
|
||||
strength = round(float(lora.get("strength", lora.get("weight", 1.0))), 2)
|
||||
|
||||
valid_loras.append((hash_value, strength))
|
||||
|
||||
# Sort by hash
|
||||
valid_loras.sort()
|
||||
|
||||
# Join in format hash1:strength1|hash2:strength2|...
|
||||
fingerprint = "|".join([f"{hash_value}:{strength}" for hash_value, strength in valid_loras])
|
||||
|
||||
return fingerprint
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
"""
|
||||
ComfyUI workflow parsing module to extract generation parameters
|
||||
"""
|
||||
@@ -1,58 +0,0 @@
|
||||
"""
|
||||
Command-line interface for the ComfyUI workflow parser
|
||||
"""
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import logging
|
||||
import sys
|
||||
from .parser import parse_workflow
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
||||
handlers=[logging.StreamHandler()]
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def main():
|
||||
"""Entry point for the CLI"""
|
||||
parser = argparse.ArgumentParser(description='Parse ComfyUI workflow files')
|
||||
parser.add_argument('input', help='Input workflow JSON file path')
|
||||
parser.add_argument('-o', '--output', help='Output JSON file path')
|
||||
parser.add_argument('-p', '--pretty', action='store_true', help='Pretty print JSON output')
|
||||
parser.add_argument('--debug', action='store_true', help='Enable debug logging')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Set logging level
|
||||
if args.debug:
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
|
||||
# Validate input file
|
||||
if not os.path.isfile(args.input):
|
||||
logger.error(f"Input file not found: {args.input}")
|
||||
sys.exit(1)
|
||||
|
||||
# Parse workflow
|
||||
try:
|
||||
result = parse_workflow(args.input, args.output)
|
||||
|
||||
# Print result to console if output file not specified
|
||||
if not args.output:
|
||||
if args.pretty:
|
||||
print(json.dumps(result, indent=4))
|
||||
else:
|
||||
print(json.dumps(result))
|
||||
else:
|
||||
logger.info(f"Output saved to: {args.output}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error parsing workflow: {e}")
|
||||
if args.debug:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1,3 +0,0 @@
|
||||
"""
|
||||
Extension directory for custom node mappers
|
||||
"""
|
||||
@@ -1,285 +0,0 @@
|
||||
"""
|
||||
ComfyUI Core nodes mappers extension for workflow parsing
|
||||
"""
|
||||
import logging
|
||||
from typing import Dict, Any, List
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# =============================================================================
|
||||
# Transform Functions
|
||||
# =============================================================================
|
||||
|
||||
def transform_random_noise(inputs: Dict) -> Dict:
|
||||
"""Transform function for RandomNoise node"""
|
||||
return {"seed": str(inputs.get("noise_seed", ""))}
|
||||
|
||||
def transform_ksampler_select(inputs: Dict) -> Dict:
|
||||
"""Transform function for KSamplerSelect node"""
|
||||
return {"sampler": inputs.get("sampler_name", "")}
|
||||
|
||||
def transform_basic_scheduler(inputs: Dict) -> Dict:
|
||||
"""Transform function for BasicScheduler node"""
|
||||
result = {
|
||||
"scheduler": inputs.get("scheduler", ""),
|
||||
"denoise": str(inputs.get("denoise", "1.0"))
|
||||
}
|
||||
|
||||
# Get steps from inputs or steps input
|
||||
if "steps" in inputs:
|
||||
if isinstance(inputs["steps"], str):
|
||||
result["steps"] = inputs["steps"]
|
||||
elif isinstance(inputs["steps"], dict) and "value" in inputs["steps"]:
|
||||
result["steps"] = str(inputs["steps"]["value"])
|
||||
else:
|
||||
result["steps"] = str(inputs["steps"])
|
||||
|
||||
return result
|
||||
|
||||
def transform_basic_guider(inputs: Dict) -> Dict:
|
||||
"""Transform function for BasicGuider node"""
|
||||
result = {}
|
||||
|
||||
# Process conditioning
|
||||
if "conditioning" in inputs:
|
||||
if isinstance(inputs["conditioning"], str):
|
||||
result["prompt"] = inputs["conditioning"]
|
||||
elif isinstance(inputs["conditioning"], dict):
|
||||
result["conditioning"] = inputs["conditioning"]
|
||||
|
||||
# Get model information if needed
|
||||
if "model" in inputs and isinstance(inputs["model"], dict):
|
||||
result["model"] = inputs["model"]
|
||||
|
||||
return result
|
||||
|
||||
def transform_model_sampling_flux(inputs: Dict) -> Dict:
|
||||
"""Transform function for ModelSamplingFlux - mostly a pass-through node"""
|
||||
# This node is primarily used for routing, so we mostly pass through values
|
||||
|
||||
return inputs["model"]
|
||||
|
||||
def transform_sampler_custom_advanced(inputs: Dict) -> Dict:
|
||||
"""Transform function for SamplerCustomAdvanced node"""
|
||||
result = {}
|
||||
|
||||
# Extract seed from noise
|
||||
if "noise" in inputs and isinstance(inputs["noise"], dict):
|
||||
result["seed"] = str(inputs["noise"].get("seed", ""))
|
||||
|
||||
# Extract sampler info
|
||||
if "sampler" in inputs and isinstance(inputs["sampler"], dict):
|
||||
sampler = inputs["sampler"].get("sampler", "")
|
||||
if sampler:
|
||||
result["sampler"] = sampler
|
||||
|
||||
# Extract scheduler, steps, denoise from sigmas
|
||||
if "sigmas" in inputs and isinstance(inputs["sigmas"], dict):
|
||||
sigmas = inputs["sigmas"]
|
||||
result["scheduler"] = sigmas.get("scheduler", "")
|
||||
result["steps"] = str(sigmas.get("steps", ""))
|
||||
result["denoise"] = str(sigmas.get("denoise", "1.0"))
|
||||
|
||||
# Extract prompt and guidance from guider
|
||||
if "guider" in inputs and isinstance(inputs["guider"], dict):
|
||||
guider = inputs["guider"]
|
||||
|
||||
# Get prompt from conditioning
|
||||
if "conditioning" in guider and isinstance(guider["conditioning"], str):
|
||||
result["prompt"] = guider["conditioning"]
|
||||
elif "conditioning" in guider and isinstance(guider["conditioning"], dict):
|
||||
result["guidance"] = guider["conditioning"].get("guidance", "")
|
||||
result["prompt"] = guider["conditioning"].get("prompt", "")
|
||||
|
||||
if "model" in guider and isinstance(guider["model"], dict):
|
||||
result["checkpoint"] = guider["model"].get("checkpoint", "")
|
||||
result["loras"] = guider["model"].get("loras", "")
|
||||
result["clip_skip"] = str(int(guider["model"].get("clip_skip", "-1")) * -1)
|
||||
|
||||
# Extract dimensions from latent_image
|
||||
if "latent_image" in inputs and isinstance(inputs["latent_image"], dict):
|
||||
latent = inputs["latent_image"]
|
||||
width = latent.get("width", 0)
|
||||
height = latent.get("height", 0)
|
||||
if width and height:
|
||||
result["width"] = width
|
||||
result["height"] = height
|
||||
result["size"] = f"{width}x{height}"
|
||||
|
||||
return result
|
||||
|
||||
def transform_ksampler(inputs: Dict) -> Dict:
|
||||
"""Transform function for KSampler nodes"""
|
||||
result = {
|
||||
"seed": str(inputs.get("seed", "")),
|
||||
"steps": str(inputs.get("steps", "")),
|
||||
"cfg": str(inputs.get("cfg", "")),
|
||||
"sampler": inputs.get("sampler_name", ""),
|
||||
"scheduler": inputs.get("scheduler", ""),
|
||||
}
|
||||
|
||||
# Process positive prompt
|
||||
if "positive" in inputs:
|
||||
result["prompt"] = inputs["positive"]
|
||||
|
||||
# Process negative prompt
|
||||
if "negative" in inputs:
|
||||
result["negative_prompt"] = inputs["negative"]
|
||||
|
||||
# Get dimensions from latent image
|
||||
if "latent_image" in inputs and isinstance(inputs["latent_image"], dict):
|
||||
width = inputs["latent_image"].get("width", 0)
|
||||
height = inputs["latent_image"].get("height", 0)
|
||||
if width and height:
|
||||
result["size"] = f"{width}x{height}"
|
||||
|
||||
# Add clip_skip if present
|
||||
if "clip_skip" in inputs:
|
||||
result["clip_skip"] = str(inputs.get("clip_skip", ""))
|
||||
|
||||
# Add guidance if present
|
||||
if "guidance" in inputs:
|
||||
result["guidance"] = str(inputs.get("guidance", ""))
|
||||
|
||||
# Add model if present
|
||||
if "model" in inputs:
|
||||
result["checkpoint"] = inputs.get("model", {}).get("checkpoint", "")
|
||||
result["loras"] = inputs.get("model", {}).get("loras", "")
|
||||
result["clip_skip"] = str(inputs.get("model", {}).get("clip_skip", -1) * -1)
|
||||
|
||||
return result
|
||||
|
||||
def transform_empty_latent(inputs: Dict) -> Dict:
|
||||
"""Transform function for EmptyLatentImage nodes"""
|
||||
width = inputs.get("width", 0)
|
||||
height = inputs.get("height", 0)
|
||||
return {"width": width, "height": height, "size": f"{width}x{height}"}
|
||||
|
||||
def transform_clip_text(inputs: Dict) -> Any:
|
||||
"""Transform function for CLIPTextEncode nodes"""
|
||||
return inputs.get("text", "")
|
||||
|
||||
def transform_flux_guidance(inputs: Dict) -> Dict:
|
||||
"""Transform function for FluxGuidance nodes"""
|
||||
result = {}
|
||||
|
||||
if "guidance" in inputs:
|
||||
result["guidance"] = inputs["guidance"]
|
||||
|
||||
if "conditioning" in inputs:
|
||||
conditioning = inputs["conditioning"]
|
||||
if isinstance(conditioning, str):
|
||||
result["prompt"] = conditioning
|
||||
else:
|
||||
result["prompt"] = "Unknown prompt"
|
||||
|
||||
return result
|
||||
|
||||
def transform_unet_loader(inputs: Dict) -> Dict:
|
||||
"""Transform function for UNETLoader node"""
|
||||
unet_name = inputs.get("unet_name", "")
|
||||
return {"checkpoint": unet_name} if unet_name else {}
|
||||
|
||||
def transform_checkpoint_loader(inputs: Dict) -> Dict:
|
||||
"""Transform function for CheckpointLoaderSimple node"""
|
||||
ckpt_name = inputs.get("ckpt_name", "")
|
||||
return {"checkpoint": ckpt_name} if ckpt_name else {}
|
||||
|
||||
def transform_latent_upscale_by(inputs: Dict) -> Dict:
|
||||
"""Transform function for LatentUpscaleBy node"""
|
||||
result = {}
|
||||
|
||||
width = inputs["samples"].get("width", 0) * inputs["scale_by"]
|
||||
height = inputs["samples"].get("height", 0) * inputs["scale_by"]
|
||||
result["width"] = width
|
||||
result["height"] = height
|
||||
result["size"] = f"{width}x{height}"
|
||||
|
||||
return result
|
||||
|
||||
def transform_clip_set_last_layer(inputs: Dict) -> Dict:
|
||||
"""Transform function for CLIPSetLastLayer node"""
|
||||
result = {}
|
||||
|
||||
if "stop_at_clip_layer" in inputs:
|
||||
result["clip_skip"] = inputs["stop_at_clip_layer"]
|
||||
|
||||
return result
|
||||
|
||||
# =============================================================================
|
||||
# Node Mapper Definitions
|
||||
# =============================================================================
|
||||
|
||||
# Define the mappers for ComfyUI core nodes not in main mapper
|
||||
NODE_MAPPERS_EXT = {
|
||||
# KSamplers
|
||||
"SamplerCustomAdvanced": {
|
||||
"inputs_to_track": ["noise", "guider", "sampler", "sigmas", "latent_image"],
|
||||
"transform_func": transform_sampler_custom_advanced
|
||||
},
|
||||
"KSampler": {
|
||||
"inputs_to_track": [
|
||||
"seed", "steps", "cfg", "sampler_name", "scheduler",
|
||||
"denoise", "positive", "negative", "latent_image",
|
||||
"model", "clip_skip"
|
||||
],
|
||||
"transform_func": transform_ksampler
|
||||
},
|
||||
# ComfyUI core nodes
|
||||
"EmptyLatentImage": {
|
||||
"inputs_to_track": ["width", "height", "batch_size"],
|
||||
"transform_func": transform_empty_latent
|
||||
},
|
||||
"EmptySD3LatentImage": {
|
||||
"inputs_to_track": ["width", "height", "batch_size"],
|
||||
"transform_func": transform_empty_latent
|
||||
},
|
||||
"CLIPTextEncode": {
|
||||
"inputs_to_track": ["text", "clip"],
|
||||
"transform_func": transform_clip_text
|
||||
},
|
||||
"FluxGuidance": {
|
||||
"inputs_to_track": ["guidance", "conditioning"],
|
||||
"transform_func": transform_flux_guidance
|
||||
},
|
||||
"RandomNoise": {
|
||||
"inputs_to_track": ["noise_seed"],
|
||||
"transform_func": transform_random_noise
|
||||
},
|
||||
"KSamplerSelect": {
|
||||
"inputs_to_track": ["sampler_name"],
|
||||
"transform_func": transform_ksampler_select
|
||||
},
|
||||
"BasicScheduler": {
|
||||
"inputs_to_track": ["scheduler", "steps", "denoise", "model"],
|
||||
"transform_func": transform_basic_scheduler
|
||||
},
|
||||
"BasicGuider": {
|
||||
"inputs_to_track": ["model", "conditioning"],
|
||||
"transform_func": transform_basic_guider
|
||||
},
|
||||
"ModelSamplingFlux": {
|
||||
"inputs_to_track": ["max_shift", "base_shift", "width", "height", "model"],
|
||||
"transform_func": transform_model_sampling_flux
|
||||
},
|
||||
"UNETLoader": {
|
||||
"inputs_to_track": ["unet_name"],
|
||||
"transform_func": transform_unet_loader
|
||||
},
|
||||
"CheckpointLoaderSimple": {
|
||||
"inputs_to_track": ["ckpt_name"],
|
||||
"transform_func": transform_checkpoint_loader
|
||||
},
|
||||
"LatentUpscale": {
|
||||
"inputs_to_track": ["width", "height"],
|
||||
"transform_func": transform_empty_latent
|
||||
},
|
||||
"LatentUpscaleBy": {
|
||||
"inputs_to_track": ["samples", "scale_by"],
|
||||
"transform_func": transform_latent_upscale_by
|
||||
},
|
||||
"CLIPSetLastLayer": {
|
||||
"inputs_to_track": ["clip", "stop_at_clip_layer"],
|
||||
"transform_func": transform_clip_set_last_layer
|
||||
}
|
||||
}
|
||||
@@ -1,74 +0,0 @@
|
||||
"""
|
||||
KJNodes mappers extension for ComfyUI workflow parsing
|
||||
"""
|
||||
import logging
|
||||
import re
|
||||
from typing import Dict, Any
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# =============================================================================
|
||||
# Transform Functions
|
||||
# =============================================================================
|
||||
|
||||
def transform_join_strings(inputs: Dict) -> str:
|
||||
"""Transform function for JoinStrings nodes"""
|
||||
string1 = inputs.get("string1", "")
|
||||
string2 = inputs.get("string2", "")
|
||||
delimiter = inputs.get("delimiter", "")
|
||||
return f"{string1}{delimiter}{string2}"
|
||||
|
||||
def transform_string_constant(inputs: Dict) -> str:
|
||||
"""Transform function for StringConstant nodes"""
|
||||
return inputs.get("string", "")
|
||||
|
||||
def transform_empty_latent_presets(inputs: Dict) -> Dict:
|
||||
"""Transform function for EmptyLatentImagePresets nodes"""
|
||||
dimensions = inputs.get("dimensions", "")
|
||||
invert = inputs.get("invert", False)
|
||||
|
||||
# Extract width and height from dimensions string
|
||||
# Expected format: "width x height (ratio)" or similar
|
||||
width = 0
|
||||
height = 0
|
||||
|
||||
if dimensions:
|
||||
# Try to extract dimensions using regex
|
||||
match = re.search(r'(\d+)\s*x\s*(\d+)', dimensions)
|
||||
if match:
|
||||
width = int(match.group(1))
|
||||
height = int(match.group(2))
|
||||
|
||||
# If invert is True, swap width and height
|
||||
if invert and width and height:
|
||||
width, height = height, width
|
||||
|
||||
return {"width": width, "height": height, "size": f"{width}x{height}"}
|
||||
|
||||
def transform_int_constant(inputs: Dict) -> int:
|
||||
"""Transform function for INTConstant nodes"""
|
||||
return inputs.get("value", 0)
|
||||
|
||||
# =============================================================================
|
||||
# Node Mapper Definitions
|
||||
# =============================================================================
|
||||
|
||||
# Define the mappers for KJNodes
|
||||
NODE_MAPPERS_EXT = {
|
||||
"JoinStrings": {
|
||||
"inputs_to_track": ["string1", "string2", "delimiter"],
|
||||
"transform_func": transform_join_strings
|
||||
},
|
||||
"StringConstantMultiline": {
|
||||
"inputs_to_track": ["string"],
|
||||
"transform_func": transform_string_constant
|
||||
},
|
||||
"EmptyLatentImagePresets": {
|
||||
"inputs_to_track": ["dimensions", "invert", "batch_size"],
|
||||
"transform_func": transform_empty_latent_presets
|
||||
},
|
||||
"INTConstant": {
|
||||
"inputs_to_track": ["value"],
|
||||
"transform_func": transform_int_constant
|
||||
}
|
||||
}
|
||||
@@ -1,37 +0,0 @@
|
||||
"""
|
||||
Main entry point for the workflow parser module
|
||||
"""
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
from typing import Dict, Optional, Union
|
||||
|
||||
# Add the parent directory to sys.path to enable imports
|
||||
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
ROOT_DIR = os.path.abspath(os.path.join(SCRIPT_DIR, '..', '..'))
|
||||
sys.path.insert(0, os.path.dirname(SCRIPT_DIR))
|
||||
|
||||
from .parser import parse_workflow
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def parse_comfyui_workflow(
|
||||
workflow_path: str,
|
||||
output_path: Optional[str] = None
|
||||
) -> Dict:
|
||||
"""
|
||||
Parse a ComfyUI workflow file and extract generation parameters
|
||||
|
||||
Args:
|
||||
workflow_path: Path to the workflow JSON file
|
||||
output_path: Optional path to save the output JSON
|
||||
|
||||
Returns:
|
||||
Dictionary containing extracted parameters
|
||||
"""
|
||||
return parse_workflow(workflow_path, output_path)
|
||||
|
||||
if __name__ == "__main__":
|
||||
# If run directly, use the CLI
|
||||
from .cli import main
|
||||
main()
|
||||
@@ -1,282 +0,0 @@
|
||||
"""
|
||||
Node mappers for ComfyUI workflow parsing
|
||||
"""
|
||||
import logging
|
||||
import os
|
||||
import importlib.util
|
||||
import inspect
|
||||
from typing import Dict, List, Any, Optional, Union, Type, Callable, Tuple
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Global mapper registry
|
||||
_MAPPER_REGISTRY: Dict[str, Dict] = {}
|
||||
|
||||
# =============================================================================
|
||||
# Mapper Definition Functions
|
||||
# =============================================================================
|
||||
|
||||
def create_mapper(
|
||||
node_type: str,
|
||||
inputs_to_track: List[str],
|
||||
transform_func: Callable[[Dict], Any] = None
|
||||
) -> Dict:
|
||||
"""Create a mapper definition for a node type"""
|
||||
mapper = {
|
||||
"node_type": node_type,
|
||||
"inputs_to_track": inputs_to_track,
|
||||
"transform": transform_func or (lambda inputs: inputs)
|
||||
}
|
||||
return mapper
|
||||
|
||||
def register_mapper(mapper: Dict) -> None:
|
||||
"""Register a node mapper in the global registry"""
|
||||
_MAPPER_REGISTRY[mapper["node_type"]] = mapper
|
||||
logger.debug(f"Registered mapper for node type: {mapper['node_type']}")
|
||||
|
||||
def get_mapper(node_type: str) -> Optional[Dict]:
|
||||
"""Get a mapper for the specified node type"""
|
||||
return _MAPPER_REGISTRY.get(node_type)
|
||||
|
||||
def get_all_mappers() -> Dict[str, Dict]:
|
||||
"""Get all registered mappers"""
|
||||
return _MAPPER_REGISTRY.copy()
|
||||
|
||||
# =============================================================================
|
||||
# Node Processing Function
|
||||
# =============================================================================
|
||||
|
||||
def process_node(node_id: str, node_data: Dict, workflow: Dict, parser: 'WorkflowParser') -> Any: # type: ignore
|
||||
"""Process a node using its mapper and extract relevant information"""
|
||||
node_type = node_data.get("class_type")
|
||||
mapper = get_mapper(node_type)
|
||||
|
||||
if not mapper:
|
||||
logger.warning(f"No mapper found for node type: {node_type}")
|
||||
return None
|
||||
|
||||
result = {}
|
||||
|
||||
# Extract inputs based on the mapper's tracked inputs
|
||||
for input_name in mapper["inputs_to_track"]:
|
||||
if input_name in node_data.get("inputs", {}):
|
||||
input_value = node_data["inputs"][input_name]
|
||||
|
||||
# Check if input is a reference to another node's output
|
||||
if isinstance(input_value, list) and len(input_value) == 2:
|
||||
try:
|
||||
# Format is [node_id, output_slot]
|
||||
ref_node_id, output_slot = input_value
|
||||
# Convert node_id to string if it's an integer
|
||||
if isinstance(ref_node_id, int):
|
||||
ref_node_id = str(ref_node_id)
|
||||
|
||||
# Recursively process the referenced node
|
||||
ref_value = parser.process_node(ref_node_id, workflow)
|
||||
|
||||
if ref_value is not None:
|
||||
result[input_name] = ref_value
|
||||
else:
|
||||
# If we couldn't get a value from the reference, store the raw value
|
||||
result[input_name] = input_value
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing reference in node {node_id}, input {input_name}: {e}")
|
||||
result[input_name] = input_value
|
||||
else:
|
||||
# Direct value
|
||||
result[input_name] = input_value
|
||||
|
||||
# Apply the transform function
|
||||
try:
|
||||
return mapper["transform"](result)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in transform function for node {node_id} of type {node_type}: {e}")
|
||||
return result
|
||||
|
||||
# =============================================================================
|
||||
# Transform Functions
|
||||
# =============================================================================
|
||||
|
||||
|
||||
|
||||
def transform_lora_loader(inputs: Dict) -> Dict:
|
||||
"""Transform function for LoraLoader nodes"""
|
||||
loras_data = inputs.get("loras", [])
|
||||
lora_stack = inputs.get("lora_stack", {}).get("lora_stack", [])
|
||||
|
||||
lora_texts = []
|
||||
|
||||
# Process loras array
|
||||
if isinstance(loras_data, dict) and "__value__" in loras_data:
|
||||
loras_list = loras_data["__value__"]
|
||||
elif isinstance(loras_data, list):
|
||||
loras_list = loras_data
|
||||
else:
|
||||
loras_list = []
|
||||
|
||||
# Process each active lora entry
|
||||
for lora in loras_list:
|
||||
if isinstance(lora, dict) and lora.get("active", False):
|
||||
lora_name = lora.get("name", "")
|
||||
strength = lora.get("strength", 1.0)
|
||||
lora_texts.append(f"<lora:{lora_name}:{strength}>")
|
||||
|
||||
# Process lora_stack if valid
|
||||
if lora_stack and isinstance(lora_stack, list):
|
||||
if not (len(lora_stack) == 2 and isinstance(lora_stack[0], (str, int)) and isinstance(lora_stack[1], int)):
|
||||
for stack_entry in lora_stack:
|
||||
lora_name = stack_entry[0]
|
||||
strength = stack_entry[1]
|
||||
lora_texts.append(f"<lora:{lora_name}:{strength}>")
|
||||
|
||||
result = {
|
||||
"checkpoint": inputs.get("model", {}).get("checkpoint", ""),
|
||||
"loras": " ".join(lora_texts)
|
||||
}
|
||||
|
||||
if "clip" in inputs and isinstance(inputs["clip"], dict):
|
||||
result["clip_skip"] = inputs["clip"].get("clip_skip", "-1")
|
||||
|
||||
return result
|
||||
|
||||
def transform_lora_stacker(inputs: Dict) -> Dict:
|
||||
"""Transform function for LoraStacker nodes"""
|
||||
loras_data = inputs.get("loras", [])
|
||||
result_stack = []
|
||||
|
||||
# Handle existing stack entries
|
||||
existing_stack = []
|
||||
lora_stack_input = inputs.get("lora_stack", [])
|
||||
|
||||
if isinstance(lora_stack_input, dict) and "lora_stack" in lora_stack_input:
|
||||
existing_stack = lora_stack_input["lora_stack"]
|
||||
elif isinstance(lora_stack_input, list):
|
||||
if not (len(lora_stack_input) == 2 and isinstance(lora_stack_input[0], (str, int)) and
|
||||
isinstance(lora_stack_input[1], int)):
|
||||
existing_stack = lora_stack_input
|
||||
|
||||
# Add existing entries
|
||||
if existing_stack:
|
||||
result_stack.extend(existing_stack)
|
||||
|
||||
# Process new loras
|
||||
if isinstance(loras_data, dict) and "__value__" in loras_data:
|
||||
loras_list = loras_data["__value__"]
|
||||
elif isinstance(loras_data, list):
|
||||
loras_list = loras_data
|
||||
else:
|
||||
loras_list = []
|
||||
|
||||
for lora in loras_list:
|
||||
if isinstance(lora, dict) and lora.get("active", False):
|
||||
lora_name = lora.get("name", "")
|
||||
strength = float(lora.get("strength", 1.0))
|
||||
result_stack.append((lora_name, strength))
|
||||
|
||||
return {"lora_stack": result_stack}
|
||||
|
||||
def transform_trigger_word_toggle(inputs: Dict) -> str:
|
||||
"""Transform function for TriggerWordToggle nodes"""
|
||||
toggle_data = inputs.get("toggle_trigger_words", [])
|
||||
|
||||
if isinstance(toggle_data, dict) and "__value__" in toggle_data:
|
||||
toggle_words = toggle_data["__value__"]
|
||||
elif isinstance(toggle_data, list):
|
||||
toggle_words = toggle_data
|
||||
else:
|
||||
toggle_words = []
|
||||
|
||||
# Filter active trigger words
|
||||
active_words = []
|
||||
for item in toggle_words:
|
||||
if isinstance(item, dict) and item.get("active", False):
|
||||
word = item.get("text", "")
|
||||
if word and not word.startswith("__dummy"):
|
||||
active_words.append(word)
|
||||
|
||||
return ", ".join(active_words)
|
||||
|
||||
# =============================================================================
|
||||
# Node Mapper Definitions
|
||||
# =============================================================================
|
||||
|
||||
# Central definition of all supported node types and their configurations
|
||||
NODE_MAPPERS = {
|
||||
|
||||
# LoraManager nodes
|
||||
"Lora Loader (LoraManager)": {
|
||||
"inputs_to_track": ["model", "clip", "loras", "lora_stack"],
|
||||
"transform_func": transform_lora_loader
|
||||
},
|
||||
"Lora Stacker (LoraManager)": {
|
||||
"inputs_to_track": ["loras", "lora_stack"],
|
||||
"transform_func": transform_lora_stacker
|
||||
},
|
||||
"TriggerWord Toggle (LoraManager)": {
|
||||
"inputs_to_track": ["toggle_trigger_words"],
|
||||
"transform_func": transform_trigger_word_toggle
|
||||
}
|
||||
}
|
||||
|
||||
def register_all_mappers() -> None:
|
||||
"""Register all mappers from the NODE_MAPPERS dictionary"""
|
||||
for node_type, config in NODE_MAPPERS.items():
|
||||
mapper = create_mapper(
|
||||
node_type=node_type,
|
||||
inputs_to_track=config["inputs_to_track"],
|
||||
transform_func=config["transform_func"]
|
||||
)
|
||||
register_mapper(mapper)
|
||||
logger.info(f"Registered {len(NODE_MAPPERS)} node mappers")
|
||||
|
||||
# =============================================================================
|
||||
# Extension Loading
|
||||
# =============================================================================
|
||||
|
||||
def load_extensions(ext_dir: str = None) -> None:
|
||||
"""
|
||||
Load mapper extensions from the specified directory
|
||||
|
||||
Extension files should define a NODE_MAPPERS_EXT dictionary containing mapper configurations.
|
||||
These will be added to the global NODE_MAPPERS dictionary and registered automatically.
|
||||
"""
|
||||
# Use default path if none provided
|
||||
if ext_dir is None:
|
||||
# Get the directory of this file
|
||||
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
ext_dir = os.path.join(current_dir, 'ext')
|
||||
|
||||
# Ensure the extension directory exists
|
||||
if not os.path.exists(ext_dir):
|
||||
os.makedirs(ext_dir, exist_ok=True)
|
||||
logger.info(f"Created extension directory: {ext_dir}")
|
||||
return
|
||||
|
||||
# Load each Python file in the extension directory
|
||||
for filename in os.listdir(ext_dir):
|
||||
if filename.endswith('.py') and not filename.startswith('_'):
|
||||
module_path = os.path.join(ext_dir, filename)
|
||||
module_name = f"workflow.ext.{filename[:-3]}" # Remove .py
|
||||
|
||||
try:
|
||||
# Load the module
|
||||
spec = importlib.util.spec_from_file_location(module_name, module_path)
|
||||
if spec and spec.loader:
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(module)
|
||||
|
||||
# Check if the module defines NODE_MAPPERS_EXT
|
||||
if hasattr(module, 'NODE_MAPPERS_EXT'):
|
||||
# Add the extension mappers to the global NODE_MAPPERS dictionary
|
||||
NODE_MAPPERS.update(module.NODE_MAPPERS_EXT)
|
||||
logger.info(f"Added {len(module.NODE_MAPPERS_EXT)} mappers from extension: {filename}")
|
||||
else:
|
||||
logger.warning(f"Extension {filename} does not define NODE_MAPPERS_EXT dictionary")
|
||||
except Exception as e:
|
||||
logger.warning(f"Error loading extension {filename}: {e}")
|
||||
|
||||
# Re-register all mappers after loading extensions
|
||||
register_all_mappers()
|
||||
|
||||
# Initialize the registry with default mappers
|
||||
# register_default_mappers()
|
||||
@@ -1,181 +0,0 @@
|
||||
"""
|
||||
Main workflow parser implementation for ComfyUI
|
||||
"""
|
||||
import json
|
||||
import logging
|
||||
from typing import Dict, List, Any, Optional, Union, Set
|
||||
from .mappers import get_mapper, get_all_mappers, load_extensions, process_node
|
||||
from .utils import (
|
||||
load_workflow, save_output, find_node_by_type,
|
||||
trace_model_path
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class WorkflowParser:
|
||||
"""Parser for ComfyUI workflows"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize the parser with mappers"""
|
||||
self.processed_nodes: Set[str] = set() # Track processed nodes to avoid cycles
|
||||
self.node_results_cache: Dict[str, Any] = {} # Cache for processed node results
|
||||
|
||||
# Load extensions
|
||||
load_extensions()
|
||||
|
||||
def process_node(self, node_id: str, workflow: Dict) -> Any:
|
||||
"""Process a single node and extract relevant information"""
|
||||
# Return cached result if available
|
||||
if node_id in self.node_results_cache:
|
||||
return self.node_results_cache[node_id]
|
||||
|
||||
# Check if we're in a cycle
|
||||
if node_id in self.processed_nodes:
|
||||
return None
|
||||
|
||||
# Mark this node as being processed (to detect cycles)
|
||||
self.processed_nodes.add(node_id)
|
||||
|
||||
if node_id not in workflow:
|
||||
self.processed_nodes.remove(node_id)
|
||||
return None
|
||||
|
||||
node_data = workflow[node_id]
|
||||
node_type = node_data.get("class_type")
|
||||
|
||||
result = None
|
||||
if get_mapper(node_type):
|
||||
try:
|
||||
result = process_node(node_id, node_data, workflow, self)
|
||||
# Cache the result
|
||||
self.node_results_cache[node_id] = result
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing node {node_id} of type {node_type}: {e}", exc_info=True)
|
||||
# Return a partial result or None depending on how we want to handle errors
|
||||
result = {}
|
||||
|
||||
# Remove node from processed set to allow it to be processed again in a different context
|
||||
self.processed_nodes.remove(node_id)
|
||||
return result
|
||||
|
||||
def find_primary_sampler_node(self, workflow: Dict) -> Optional[str]:
|
||||
"""
|
||||
Find the primary sampler node in the workflow.
|
||||
|
||||
Priority:
|
||||
1. First try to find a SamplerCustomAdvanced node
|
||||
2. If not found, look for KSampler nodes with denoise=1.0
|
||||
3. If still not found, use the first KSampler node
|
||||
|
||||
Args:
|
||||
workflow: The workflow data as a dictionary
|
||||
|
||||
Returns:
|
||||
The node ID of the primary sampler node, or None if not found
|
||||
"""
|
||||
# First check for SamplerCustomAdvanced nodes
|
||||
sampler_advanced_nodes = []
|
||||
ksampler_nodes = []
|
||||
|
||||
# Scan workflow for sampler nodes
|
||||
for node_id, node_data in workflow.items():
|
||||
node_type = node_data.get("class_type")
|
||||
|
||||
if node_type == "SamplerCustomAdvanced":
|
||||
sampler_advanced_nodes.append(node_id)
|
||||
elif node_type == "KSampler":
|
||||
ksampler_nodes.append(node_id)
|
||||
|
||||
# If we found SamplerCustomAdvanced nodes, return the first one
|
||||
if sampler_advanced_nodes:
|
||||
logger.debug(f"Found SamplerCustomAdvanced node: {sampler_advanced_nodes[0]}")
|
||||
return sampler_advanced_nodes[0]
|
||||
|
||||
# If we have KSampler nodes, look for one with denoise=1.0
|
||||
if ksampler_nodes:
|
||||
for node_id in ksampler_nodes:
|
||||
node_data = workflow[node_id]
|
||||
inputs = node_data.get("inputs", {})
|
||||
denoise = inputs.get("denoise", 0)
|
||||
|
||||
# Check if denoise is 1.0 (allowing for small floating point differences)
|
||||
if abs(float(denoise) - 1.0) < 0.001:
|
||||
logger.debug(f"Found KSampler node with denoise=1.0: {node_id}")
|
||||
return node_id
|
||||
|
||||
# If no KSampler with denoise=1.0 found, use the first one
|
||||
logger.debug(f"No KSampler with denoise=1.0 found, using first KSampler: {ksampler_nodes[0]}")
|
||||
return ksampler_nodes[0]
|
||||
|
||||
# No sampler nodes found
|
||||
logger.warning("No sampler nodes found in workflow")
|
||||
return None
|
||||
|
||||
def parse_workflow(self, workflow_data: Union[str, Dict], output_path: Optional[str] = None) -> Dict:
|
||||
"""
|
||||
Parse the workflow and extract generation parameters
|
||||
|
||||
Args:
|
||||
workflow_data: The workflow data as a dictionary or a file path
|
||||
output_path: Optional path to save the output JSON
|
||||
|
||||
Returns:
|
||||
Dictionary containing extracted parameters
|
||||
"""
|
||||
# Load workflow from file if needed
|
||||
if isinstance(workflow_data, str):
|
||||
workflow = load_workflow(workflow_data)
|
||||
else:
|
||||
workflow = workflow_data
|
||||
|
||||
# Reset the processed nodes tracker and cache
|
||||
self.processed_nodes = set()
|
||||
self.node_results_cache = {}
|
||||
|
||||
# Find the primary sampler node
|
||||
sampler_node_id = self.find_primary_sampler_node(workflow)
|
||||
if not sampler_node_id:
|
||||
logger.warning("No suitable sampler node found in workflow")
|
||||
return {}
|
||||
|
||||
# Process sampler node to extract parameters
|
||||
sampler_result = self.process_node(sampler_node_id, workflow)
|
||||
if not sampler_result:
|
||||
return {}
|
||||
|
||||
# Return the sampler result directly - it's already in the format we need
|
||||
# This simplifies the structure and makes it easier to use in recipe_routes.py
|
||||
|
||||
# Handle standard ComfyUI names vs our output format
|
||||
if "cfg" in sampler_result:
|
||||
sampler_result["cfg_scale"] = sampler_result.pop("cfg")
|
||||
|
||||
# Add clip_skip = 1 to match reference output if not already present
|
||||
if "clip_skip" not in sampler_result:
|
||||
sampler_result["clip_skip"] = "1"
|
||||
|
||||
# Ensure the prompt is a string and not a nested dictionary
|
||||
if "prompt" in sampler_result and isinstance(sampler_result["prompt"], dict):
|
||||
if "prompt" in sampler_result["prompt"]:
|
||||
sampler_result["prompt"] = sampler_result["prompt"]["prompt"]
|
||||
|
||||
# Save the result if requested
|
||||
if output_path:
|
||||
save_output(sampler_result, output_path)
|
||||
|
||||
return sampler_result
|
||||
|
||||
|
||||
def parse_workflow(workflow_path: str, output_path: Optional[str] = None) -> Dict:
|
||||
"""
|
||||
Parse a ComfyUI workflow file and extract generation parameters
|
||||
|
||||
Args:
|
||||
workflow_path: Path to the workflow JSON file
|
||||
output_path: Optional path to save the output JSON
|
||||
|
||||
Returns:
|
||||
Dictionary containing extracted parameters
|
||||
"""
|
||||
parser = WorkflowParser()
|
||||
return parser.parse_workflow(workflow_path, output_path)
|
||||
@@ -1,63 +0,0 @@
|
||||
"""
|
||||
Test script for the ComfyUI workflow parser
|
||||
"""
|
||||
import os
|
||||
import json
|
||||
import logging
|
||||
from .parser import parse_workflow
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
||||
handlers=[logging.StreamHandler()]
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Configure paths
|
||||
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
ROOT_DIR = os.path.abspath(os.path.join(SCRIPT_DIR, '..', '..'))
|
||||
REFS_DIR = os.path.join(ROOT_DIR, 'refs')
|
||||
OUTPUT_DIR = os.path.join(ROOT_DIR, 'output')
|
||||
|
||||
def test_parse_flux_workflow():
|
||||
"""Test parsing the flux example workflow"""
|
||||
# Ensure output directory exists
|
||||
os.makedirs(OUTPUT_DIR, exist_ok=True)
|
||||
|
||||
# Define input and output paths
|
||||
input_path = os.path.join(REFS_DIR, 'flux_prompt.json')
|
||||
output_path = os.path.join(OUTPUT_DIR, 'parsed_flux_output.json')
|
||||
|
||||
# Parse workflow
|
||||
logger.info(f"Parsing workflow: {input_path}")
|
||||
result = parse_workflow(input_path, output_path)
|
||||
|
||||
# Print result summary
|
||||
logger.info(f"Output saved to: {output_path}")
|
||||
logger.info(f"Parsing completed. Result summary:")
|
||||
logger.info(f" LoRAs: {result.get('loras', '')}")
|
||||
|
||||
gen_params = result.get('gen_params', {})
|
||||
logger.info(f" Prompt: {gen_params.get('prompt', '')[:50]}...")
|
||||
logger.info(f" Steps: {gen_params.get('steps', '')}")
|
||||
logger.info(f" Sampler: {gen_params.get('sampler', '')}")
|
||||
logger.info(f" Size: {gen_params.get('size', '')}")
|
||||
|
||||
# Compare with reference output
|
||||
ref_output_path = os.path.join(REFS_DIR, 'flux_output.json')
|
||||
try:
|
||||
with open(ref_output_path, 'r') as f:
|
||||
ref_output = json.load(f)
|
||||
|
||||
# Simple validation
|
||||
loras_match = result.get('loras', '') == ref_output.get('loras', '')
|
||||
prompt_match = gen_params.get('prompt', '') == ref_output.get('gen_params', {}).get('prompt', '')
|
||||
|
||||
logger.info(f"Validation against reference:")
|
||||
logger.info(f" LoRAs match: {loras_match}")
|
||||
logger.info(f" Prompt match: {prompt_match}")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to compare with reference output: {e}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
test_parse_flux_workflow()
|
||||
@@ -1,120 +0,0 @@
|
||||
"""
|
||||
Utility functions for ComfyUI workflow parsing
|
||||
"""
|
||||
import json
|
||||
import os
|
||||
import logging
|
||||
from typing import Dict, List, Any, Optional, Union, Set, Tuple
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def load_workflow(workflow_path: str) -> Dict:
|
||||
"""Load a workflow from a JSON file"""
|
||||
try:
|
||||
with open(workflow_path, 'r', encoding='utf-8') as f:
|
||||
return json.load(f)
|
||||
except Exception as e:
|
||||
logger.error(f"Error loading workflow from {workflow_path}: {e}")
|
||||
raise
|
||||
|
||||
def save_output(output: Dict, output_path: str) -> None:
|
||||
"""Save the parsed output to a JSON file"""
|
||||
os.makedirs(os.path.dirname(os.path.abspath(output_path)), exist_ok=True)
|
||||
try:
|
||||
with open(output_path, 'w', encoding='utf-8') as f:
|
||||
json.dump(output, f, indent=4)
|
||||
except Exception as e:
|
||||
logger.error(f"Error saving output to {output_path}: {e}")
|
||||
raise
|
||||
|
||||
def find_node_by_type(workflow: Dict, node_type: str) -> Optional[str]:
|
||||
"""Find a node of the specified type in the workflow"""
|
||||
for node_id, node_data in workflow.items():
|
||||
if node_data.get("class_type") == node_type:
|
||||
return node_id
|
||||
return None
|
||||
|
||||
def find_nodes_by_type(workflow: Dict, node_type: str) -> List[str]:
|
||||
"""Find all nodes of the specified type in the workflow"""
|
||||
return [node_id for node_id, node_data in workflow.items()
|
||||
if node_data.get("class_type") == node_type]
|
||||
|
||||
def get_input_node_ids(workflow: Dict, node_id: str) -> Dict[str, Tuple[str, int]]:
|
||||
"""
|
||||
Get the node IDs for all inputs of the given node
|
||||
|
||||
Returns a dictionary mapping input names to (node_id, output_slot) tuples
|
||||
"""
|
||||
result = {}
|
||||
if node_id not in workflow:
|
||||
return result
|
||||
|
||||
node_data = workflow[node_id]
|
||||
for input_name, input_value in node_data.get("inputs", {}).items():
|
||||
# Check if this input is connected to another node
|
||||
if isinstance(input_value, list) and len(input_value) == 2:
|
||||
# Input is connected to another node's output
|
||||
# Format: [node_id, output_slot]
|
||||
ref_node_id, output_slot = input_value
|
||||
result[input_name] = (str(ref_node_id), output_slot)
|
||||
|
||||
return result
|
||||
|
||||
def trace_model_path(workflow: Dict, start_node_id: str) -> List[str]:
|
||||
"""
|
||||
Trace the model path backward from KSampler to find all LoRA nodes
|
||||
|
||||
Args:
|
||||
workflow: The workflow data
|
||||
start_node_id: The starting node ID (usually KSampler)
|
||||
|
||||
Returns:
|
||||
List of node IDs in the model path
|
||||
"""
|
||||
model_path_nodes = []
|
||||
|
||||
# Get the model input from the start node
|
||||
if start_node_id not in workflow:
|
||||
return model_path_nodes
|
||||
|
||||
# Track visited nodes to avoid cycles
|
||||
visited = set()
|
||||
|
||||
# Stack for depth-first search
|
||||
stack = []
|
||||
|
||||
# Get model input reference if available
|
||||
start_node = workflow[start_node_id]
|
||||
if "inputs" in start_node and "model" in start_node["inputs"] and isinstance(start_node["inputs"]["model"], list):
|
||||
model_ref = start_node["inputs"]["model"]
|
||||
stack.append(str(model_ref[0]))
|
||||
|
||||
# Perform depth-first search
|
||||
while stack:
|
||||
node_id = stack.pop()
|
||||
|
||||
# Skip if already visited
|
||||
if node_id in visited:
|
||||
continue
|
||||
|
||||
# Mark as visited
|
||||
visited.add(node_id)
|
||||
|
||||
# Skip if node doesn't exist
|
||||
if node_id not in workflow:
|
||||
continue
|
||||
|
||||
node = workflow[node_id]
|
||||
node_type = node.get("class_type", "")
|
||||
|
||||
# Add current node to result list if it's a LoRA node
|
||||
if "Lora" in node_type:
|
||||
model_path_nodes.append(node_id)
|
||||
|
||||
# Add all input nodes that have a "model" or "lora_stack" output to the stack
|
||||
if "inputs" in node:
|
||||
for input_name, input_value in node["inputs"].items():
|
||||
if input_name in ["model", "lora_stack"] and isinstance(input_value, list) and len(input_value) == 2:
|
||||
stack.append(str(input_value[0]))
|
||||
|
||||
return model_path_nodes
|
||||
@@ -1,7 +1,7 @@
|
||||
[project]
|
||||
name = "comfyui-lora-manager"
|
||||
description = "LoRA Manager for ComfyUI - Access it at http://localhost:8188/loras for managing LoRA models with previews and metadata integration."
|
||||
version = "0.8.12"
|
||||
version = "0.8.13"
|
||||
license = {file = "LICENSE"}
|
||||
dependencies = [
|
||||
"aiohttp",
|
||||
@@ -13,7 +13,8 @@ dependencies = [
|
||||
"Pillow",
|
||||
"olefile", # for getting rid of warning message
|
||||
"requests",
|
||||
"toml"
|
||||
"toml",
|
||||
"natsort"
|
||||
]
|
||||
|
||||
[project.urls]
|
||||
|
||||
@@ -9,4 +9,5 @@ olefile
|
||||
requests
|
||||
toml
|
||||
numpy
|
||||
torch
|
||||
torch
|
||||
natsort
|
||||
@@ -38,7 +38,7 @@ html, body {
|
||||
--lora-border: oklch(90% 0.02 256 / 0.15);
|
||||
--lora-text: oklch(95% 0.02 256);
|
||||
--lora-error: oklch(75% 0.32 29);
|
||||
--lora-warning: oklch(75% 0.25 80); /* Add warning color for deleted LoRAs */
|
||||
--lora-warning: oklch(75% 0.25 80); /* Modified to be used with oklch() */
|
||||
|
||||
/* Spacing Scale */
|
||||
--space-1: calc(8px * 1);
|
||||
@@ -79,7 +79,7 @@ html[data-theme="light"] {
|
||||
--lora-surface: oklch(25% 0.02 256 / 0.98);
|
||||
--lora-border: oklch(90% 0.02 256 / 0.15);
|
||||
--lora-text: oklch(98% 0.02 256);
|
||||
--lora-warning: oklch(75% 0.25 80); /* Add warning color for dark theme too */
|
||||
--lora-warning: oklch(75% 0.25 80); /* Modified to be used with oklch() */
|
||||
}
|
||||
|
||||
body {
|
||||
|
||||
259
static/css/components/duplicates.css
Normal file
259
static/css/components/duplicates.css
Normal file
@@ -0,0 +1,259 @@
|
||||
/* Duplicates Management Styles */
|
||||
|
||||
/* Duplicates banner */
|
||||
.duplicates-banner {
|
||||
position: sticky;
|
||||
top: 48px; /* Match header height */
|
||||
left: 0;
|
||||
width: 100%;
|
||||
background-color: var(--card-bg);
|
||||
color: var(--text-color);
|
||||
border-bottom: 1px solid var(--border-color);
|
||||
z-index: var(--z-overlay);
|
||||
padding: 12px 16px;
|
||||
box-shadow: 0 2px 8px rgba(0, 0, 0, 0.15);
|
||||
transition: all 0.3s ease;
|
||||
}
|
||||
|
||||
.duplicates-banner .banner-content {
|
||||
max-width: 1400px;
|
||||
margin: 0 auto;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 12px;
|
||||
}
|
||||
|
||||
.duplicates-banner i.fa-exclamation-triangle {
|
||||
font-size: 18px;
|
||||
color: oklch(var(--lora-warning));
|
||||
}
|
||||
|
||||
.duplicates-banner .banner-actions {
|
||||
margin-left: auto;
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.duplicates-banner button {
|
||||
min-width: 100px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 4px;
|
||||
border-radius: var(--border-radius-xs);
|
||||
padding: 4px 10px;
|
||||
border: 1px solid var(--border-color);
|
||||
background: var(--card-bg);
|
||||
color: var(--text-color);
|
||||
font-size: 0.85em;
|
||||
transition: all 0.2s ease;
|
||||
cursor: pointer;
|
||||
box-shadow: 0 1px 2px rgba(0, 0, 0, 0.05);
|
||||
}
|
||||
|
||||
.duplicates-banner button:hover {
|
||||
border-color: var(--lora-accent);
|
||||
background: var(--bg-color);
|
||||
transform: translateY(-1px);
|
||||
box-shadow: 0 3px 5px rgba(0, 0, 0, 0.08);
|
||||
}
|
||||
|
||||
.duplicates-banner button.btn-exit {
|
||||
min-width: unset;
|
||||
width: 28px;
|
||||
height: 28px;
|
||||
padding: 0;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
border-radius: 50%;
|
||||
}
|
||||
|
||||
.duplicates-banner button.disabled {
|
||||
opacity: 0.5;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
/* Duplicate groups */
|
||||
.duplicate-group {
|
||||
position: relative;
|
||||
border: 2px solid oklch(var(--lora-warning));
|
||||
border-radius: var(--border-radius-base);
|
||||
padding: 16px;
|
||||
margin-bottom: 24px;
|
||||
background: var(--card-bg);
|
||||
}
|
||||
|
||||
.duplicate-group-header {
|
||||
background-color: var(--bg-color);
|
||||
color: var(--text-color);
|
||||
border: 1px solid var(--border-color);
|
||||
padding: 8px 16px;
|
||||
border-radius: var(--border-radius-xs);
|
||||
margin-bottom: 16px;
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.duplicate-group-header span:last-child {
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.duplicate-group-header button {
|
||||
min-width: 80px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 4px;
|
||||
border-radius: var(--border-radius-xs);
|
||||
padding: 4px 8px;
|
||||
border: 1px solid var(--border-color);
|
||||
background: var(--card-bg);
|
||||
color: var(--text-color);
|
||||
font-size: 0.85em;
|
||||
transition: all 0.2s ease;
|
||||
cursor: pointer;
|
||||
box-shadow: 0 1px 2px rgba(0, 0, 0, 0.05);
|
||||
margin-left: 8px;
|
||||
}
|
||||
|
||||
.duplicate-group-header button:hover {
|
||||
border-color: var(--lora-accent);
|
||||
background: var(--bg-color);
|
||||
transform: translateY(-1px);
|
||||
box-shadow: 0 3px 5px rgba(0, 0, 0, 0.08);
|
||||
}
|
||||
|
||||
.card-group-container {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
gap: 16px;
|
||||
justify-content: flex-start;
|
||||
align-items: flex-start;
|
||||
}
|
||||
|
||||
/* Make cards in duplicate groups have consistent width */
|
||||
.card-group-container .lora-card {
|
||||
flex: 0 0 auto;
|
||||
width: 240px;
|
||||
margin: 0;
|
||||
cursor: pointer; /* Indicate the card is clickable */
|
||||
}
|
||||
|
||||
/* Ensure the grid layout is only applied to the main recipe grid, not duplicate groups */
|
||||
.duplicate-mode .card-grid {
|
||||
display: block;
|
||||
}
|
||||
|
||||
/* Scrollable container for large duplicate groups */
|
||||
.card-group-container.scrollable {
|
||||
max-height: 450px;
|
||||
overflow-y: auto;
|
||||
padding-right: 8px;
|
||||
}
|
||||
|
||||
/* Add a toggle button to expand/collapse large duplicate groups */
|
||||
.group-toggle-btn {
|
||||
position: absolute;
|
||||
right: 16px;
|
||||
bottom: -12px;
|
||||
background: var(--card-bg);
|
||||
color: var(--text-color);
|
||||
border: 1px solid var(--border-color);
|
||||
border-radius: 50%;
|
||||
width: 24px;
|
||||
height: 24px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
cursor: pointer;
|
||||
z-index: 1;
|
||||
box-shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
|
||||
transition: all 0.2s ease;
|
||||
}
|
||||
|
||||
.group-toggle-btn:hover {
|
||||
border-color: var(--lora-accent);
|
||||
transform: translateY(-1px);
|
||||
box-shadow: 0 3px 5px rgba(0, 0, 0, 0.08);
|
||||
}
|
||||
|
||||
/* Duplicate card styling */
|
||||
.lora-card.duplicate {
|
||||
position: relative;
|
||||
transition: all 0.2s ease;
|
||||
}
|
||||
|
||||
.lora-card.duplicate:hover {
|
||||
border-color: var(--lora-accent);
|
||||
}
|
||||
|
||||
.lora-card.duplicate.latest {
|
||||
border-style: solid;
|
||||
border-color: oklch(var(--lora-warning));
|
||||
}
|
||||
|
||||
.lora-card.duplicate-selected {
|
||||
border: 2px solid oklch(var(--lora-accent));
|
||||
box-shadow: 0 0 8px rgba(0, 0, 0, 0.2);
|
||||
}
|
||||
|
||||
.lora-card .selector-checkbox {
|
||||
position: absolute;
|
||||
top: 10px;
|
||||
right: 10px;
|
||||
z-index: 10;
|
||||
width: 20px;
|
||||
height: 20px;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
/* Latest indicator */
|
||||
.lora-card.duplicate.latest::after {
|
||||
content: "Latest";
|
||||
position: absolute;
|
||||
top: 10px;
|
||||
left: 10px;
|
||||
background: oklch(var(--lora-accent));
|
||||
color: white;
|
||||
font-size: 12px;
|
||||
padding: 2px 6px;
|
||||
border-radius: var(--border-radius-xs);
|
||||
z-index: 5;
|
||||
}
|
||||
|
||||
/* Responsive adjustments */
|
||||
@media (max-width: 768px) {
|
||||
.duplicates-banner .banner-content {
|
||||
flex-direction: column;
|
||||
align-items: flex-start;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.duplicates-banner .banner-actions {
|
||||
width: 100%;
|
||||
margin-left: 0;
|
||||
justify-content: space-between;
|
||||
}
|
||||
|
||||
.duplicate-group-header {
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
align-items: flex-start;
|
||||
}
|
||||
|
||||
.duplicate-group-header span:last-child {
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.duplicate-group-header button {
|
||||
margin-left: 0;
|
||||
flex: 1;
|
||||
}
|
||||
}
|
||||
@@ -291,7 +291,7 @@
|
||||
gap: 8px;
|
||||
padding: var(--space-1);
|
||||
border: 1px solid var(--border-color);
|
||||
border-radius: var(--border-radius-sm);
|
||||
border-radius: var (--border-radius-sm);
|
||||
background: var(--lora-surface);
|
||||
}
|
||||
|
||||
@@ -733,3 +733,150 @@
|
||||
font-size: 0.9em;
|
||||
line-height: 1.4;
|
||||
}
|
||||
|
||||
/* Duplicate Recipes Styles */
|
||||
.duplicate-recipes-container {
|
||||
margin-bottom: var(--space-3);
|
||||
border-radius: var(--border-radius-sm);
|
||||
overflow: hidden;
|
||||
animation: fadeIn 0.3s ease-in-out;
|
||||
}
|
||||
|
||||
@keyframes fadeIn {
|
||||
from { opacity: 0; transform: translateY(-10px); }
|
||||
to { opacity: 1; transform: translateY(0); }
|
||||
}
|
||||
|
||||
.duplicate-warning {
|
||||
display: flex;
|
||||
align-items: flex-start;
|
||||
gap: 12px;
|
||||
padding: 12px 16px;
|
||||
background: oklch(var(--lora-warning) / 0.1);
|
||||
border: 1px solid var(--lora-warning);
|
||||
border-radius: var(--border-radius-sm) var(--border-radius-sm) 0 0;
|
||||
color: var(--text-color);
|
||||
}
|
||||
|
||||
.duplicate-warning .warning-icon {
|
||||
color: var(--lora-warning);
|
||||
font-size: 1.2em;
|
||||
padding-top: 2px;
|
||||
}
|
||||
|
||||
.duplicate-warning .warning-content {
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
.duplicate-warning .warning-title {
|
||||
font-weight: 600;
|
||||
margin-bottom: 4px;
|
||||
}
|
||||
|
||||
.duplicate-warning .warning-text {
|
||||
font-size: 0.9em;
|
||||
line-height: 1.4;
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
flex-wrap: wrap;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.toggle-duplicates-btn {
|
||||
background: none;
|
||||
border: none;
|
||||
color: var(--lora-warning);
|
||||
cursor: pointer;
|
||||
font-size: 0.9em;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
padding: 4px 8px;
|
||||
border-radius: var(--border-radius-xs);
|
||||
}
|
||||
|
||||
.toggle-duplicates-btn:hover {
|
||||
background: oklch(var(--lora-warning) / 0.1);
|
||||
}
|
||||
|
||||
.duplicate-recipes-list {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(auto-fill, minmax(150px, 1fr));
|
||||
gap: 12px;
|
||||
padding: 16px;
|
||||
border: 1px solid var(--border-color);
|
||||
border-top: none;
|
||||
border-radius: 0 0 var(--border-radius-sm) var(--border-radius-sm);
|
||||
background: var(--bg-color);
|
||||
max-height: 300px;
|
||||
overflow-y: auto;
|
||||
transition: max-height 0.3s ease, padding 0.3s ease;
|
||||
}
|
||||
|
||||
.duplicate-recipes-list.collapsed {
|
||||
max-height: 0;
|
||||
padding: 0 16px;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.duplicate-recipe-card {
|
||||
position: relative;
|
||||
border-radius: var(--border-radius-sm);
|
||||
overflow: hidden;
|
||||
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
|
||||
transition: transform 0.2s ease;
|
||||
}
|
||||
|
||||
.duplicate-recipe-card:hover {
|
||||
transform: translateY(-2px);
|
||||
}
|
||||
|
||||
.duplicate-recipe-preview {
|
||||
width: 100%;
|
||||
position: relative;
|
||||
aspect-ratio: 2/3;
|
||||
background: var(--bg-color);
|
||||
}
|
||||
|
||||
.duplicate-recipe-preview img {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
object-fit: cover;
|
||||
}
|
||||
|
||||
.duplicate-recipe-title {
|
||||
position: absolute;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
padding: 8px;
|
||||
background: rgba(0, 0, 0, 0.7);
|
||||
color: white;
|
||||
font-size: 0.85em;
|
||||
line-height: 1.3;
|
||||
max-height: 50%;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
display: -webkit-box;
|
||||
-webkit-line-clamp: 2;
|
||||
-webkit-box-orient: vertical;
|
||||
}
|
||||
|
||||
.duplicate-recipe-details {
|
||||
padding: 8px;
|
||||
background: var(--bg-color);
|
||||
font-size: 0.75em;
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
color: var(--text-color);
|
||||
opacity: 0.8;
|
||||
}
|
||||
|
||||
.duplicate-recipe-date,
|
||||
.duplicate-recipe-lora-count {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
@@ -12,11 +12,13 @@
|
||||
transition: transform 0.3s ease, opacity 0.3s ease;
|
||||
opacity: 0;
|
||||
transform: translateY(20px);
|
||||
pointer-events: none; /* Ignore mouse events when invisible */
|
||||
}
|
||||
|
||||
.progress-panel.visible {
|
||||
opacity: 1;
|
||||
transform: translateY(0);
|
||||
pointer-events: auto; /* Capture mouse events when visible */
|
||||
}
|
||||
|
||||
.progress-panel.collapsed .progress-panel-content {
|
||||
|
||||
@@ -229,8 +229,10 @@
|
||||
background: var(--lora-surface);
|
||||
border: 1px solid var(--border-color);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.recipe-preview-container img,
|
||||
@@ -246,6 +248,133 @@
|
||||
object-fit: contain;
|
||||
}
|
||||
|
||||
/* Source URL container */
|
||||
.source-url-container {
|
||||
position: absolute;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
background: rgba(0, 0, 0, 0.5);
|
||||
padding: 8px 12px;
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
transition: transform 0.3s ease;
|
||||
transform: translateY(100%);
|
||||
}
|
||||
|
||||
.recipe-preview-container:hover .source-url-container {
|
||||
transform: translateY(0);
|
||||
}
|
||||
|
||||
.source-url-container.active {
|
||||
transform: translateY(0);
|
||||
}
|
||||
|
||||
.source-url-content {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
color: #fff;
|
||||
flex: 1;
|
||||
overflow: hidden;
|
||||
font-size: 0.85em;
|
||||
}
|
||||
|
||||
.source-url-icon {
|
||||
margin-right: 8px;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.source-url-text {
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
cursor: pointer;
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
.source-url-edit-btn {
|
||||
background: none;
|
||||
border: none;
|
||||
color: #fff;
|
||||
cursor: pointer;
|
||||
padding: 4px;
|
||||
margin-left: 8px;
|
||||
border-radius: var(--border-radius-xs);
|
||||
opacity: 0.7;
|
||||
transition: opacity 0.2s ease;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.source-url-edit-btn:hover {
|
||||
opacity: 1;
|
||||
background: rgba(255, 255, 255, 0.1);
|
||||
}
|
||||
|
||||
/* Source URL editor */
|
||||
.source-url-editor {
|
||||
display: none;
|
||||
position: absolute;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
background: var(--bg-color);
|
||||
border-top: 1px solid var(--border-color);
|
||||
padding: 12px;
|
||||
flex-direction: column;
|
||||
gap: 10px;
|
||||
z-index: 5;
|
||||
}
|
||||
|
||||
.source-url-editor.active {
|
||||
display: flex;
|
||||
}
|
||||
|
||||
.source-url-input {
|
||||
width: 100%;
|
||||
padding: 8px 10px;
|
||||
border: 1px solid var(--border-color);
|
||||
border-radius: var(--border-radius-xs);
|
||||
background: var(--bg-color);
|
||||
color: var(--text-color);
|
||||
font-size: 0.9em;
|
||||
}
|
||||
|
||||
.source-url-actions {
|
||||
display: flex;
|
||||
justify-content: flex-end;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.source-url-cancel-btn,
|
||||
.source-url-save-btn {
|
||||
padding: 6px 12px;
|
||||
border-radius: var(--border-radius-xs);
|
||||
font-size: 0.85em;
|
||||
cursor: pointer;
|
||||
border: none;
|
||||
transition: all 0.2s;
|
||||
}
|
||||
|
||||
.source-url-cancel-btn {
|
||||
background: var(--bg-color);
|
||||
color: var(--text-color);
|
||||
border: 1px solid var(--border-color);
|
||||
}
|
||||
|
||||
.source-url-save-btn {
|
||||
background: var(--lora-accent);
|
||||
color: white;
|
||||
}
|
||||
|
||||
.source-url-cancel-btn:hover {
|
||||
background: var(--lora-surface);
|
||||
}
|
||||
|
||||
.source-url-save-btn:hover {
|
||||
background: color-mix(in oklch, var(--lora-accent), black 10%);
|
||||
}
|
||||
|
||||
/* Generation Parameters */
|
||||
.recipe-gen-params {
|
||||
height: 360px;
|
||||
|
||||
@@ -22,6 +22,7 @@
|
||||
@import 'components/initialization.css';
|
||||
@import 'components/progress-panel.css';
|
||||
@import 'components/alphabet-bar.css'; /* Add alphabet bar component */
|
||||
@import 'components/duplicates.css'; /* Add duplicates component */
|
||||
|
||||
.initialization-notice {
|
||||
display: flex;
|
||||
|
||||
395
static/js/components/DuplicatesManager.js
Normal file
395
static/js/components/DuplicatesManager.js
Normal file
@@ -0,0 +1,395 @@
|
||||
// Duplicates Manager Component
|
||||
import { showToast } from '../utils/uiHelpers.js';
|
||||
import { RecipeCard } from './RecipeCard.js';
|
||||
import { getCurrentPageState } from '../state/index.js';
|
||||
import { initializeInfiniteScroll } from '../utils/infiniteScroll.js';
|
||||
|
||||
export class DuplicatesManager {
|
||||
constructor(recipeManager) {
|
||||
this.recipeManager = recipeManager;
|
||||
this.duplicateGroups = [];
|
||||
this.inDuplicateMode = false;
|
||||
this.selectedForDeletion = new Set();
|
||||
}
|
||||
|
||||
async findDuplicates() {
|
||||
try {
|
||||
document.body.classList.add('loading');
|
||||
|
||||
const response = await fetch('/api/recipes/find-duplicates');
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to find duplicates');
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
if (!data.success) {
|
||||
throw new Error(data.error || 'Unknown error finding duplicates');
|
||||
}
|
||||
|
||||
this.duplicateGroups = data.duplicate_groups || [];
|
||||
|
||||
if (this.duplicateGroups.length === 0) {
|
||||
showToast('No duplicate recipes found', 'info');
|
||||
return false;
|
||||
}
|
||||
|
||||
this.enterDuplicateMode();
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error('Error finding duplicates:', error);
|
||||
showToast('Failed to find duplicates: ' + error.message, 'error');
|
||||
return false;
|
||||
} finally {
|
||||
document.body.classList.remove('loading');
|
||||
}
|
||||
}
|
||||
|
||||
enterDuplicateMode() {
|
||||
this.inDuplicateMode = true;
|
||||
this.selectedForDeletion.clear();
|
||||
|
||||
// Update state
|
||||
const pageState = getCurrentPageState();
|
||||
pageState.duplicatesMode = true;
|
||||
|
||||
// Show duplicates banner
|
||||
const banner = document.getElementById('duplicatesBanner');
|
||||
const countSpan = document.getElementById('duplicatesCount');
|
||||
|
||||
if (banner && countSpan) {
|
||||
countSpan.textContent = `Found ${this.duplicateGroups.length} duplicate group${this.duplicateGroups.length !== 1 ? 's' : ''}`;
|
||||
banner.style.display = 'block';
|
||||
}
|
||||
|
||||
// Disable infinite scroll
|
||||
if (this.recipeManager.observer) {
|
||||
this.recipeManager.observer.disconnect();
|
||||
this.recipeManager.observer = null;
|
||||
}
|
||||
|
||||
// Add duplicate-mode class to the body
|
||||
document.body.classList.add('duplicate-mode');
|
||||
|
||||
// Render duplicate groups
|
||||
this.renderDuplicateGroups();
|
||||
|
||||
// Update selected count
|
||||
this.updateSelectedCount();
|
||||
}
|
||||
|
||||
exitDuplicateMode() {
|
||||
this.inDuplicateMode = false;
|
||||
this.selectedForDeletion.clear();
|
||||
|
||||
// Update state
|
||||
const pageState = getCurrentPageState();
|
||||
pageState.duplicatesMode = false;
|
||||
|
||||
// Hide duplicates banner
|
||||
const banner = document.getElementById('duplicatesBanner');
|
||||
if (banner) {
|
||||
banner.style.display = 'none';
|
||||
}
|
||||
|
||||
// Remove duplicate-mode class from the body
|
||||
document.body.classList.remove('duplicate-mode');
|
||||
|
||||
// Reload normal recipes view
|
||||
this.recipeManager.loadRecipes();
|
||||
|
||||
// Reinitialize infinite scroll
|
||||
setTimeout(() => {
|
||||
initializeInfiniteScroll('recipes');
|
||||
}, 500);
|
||||
}
|
||||
|
||||
renderDuplicateGroups() {
|
||||
const recipeGrid = document.getElementById('recipeGrid');
|
||||
if (!recipeGrid) return;
|
||||
|
||||
// Clear existing content
|
||||
recipeGrid.innerHTML = '';
|
||||
|
||||
// Render each duplicate group
|
||||
this.duplicateGroups.forEach((group, groupIndex) => {
|
||||
const groupDiv = document.createElement('div');
|
||||
groupDiv.className = 'duplicate-group';
|
||||
groupDiv.dataset.fingerprint = group.fingerprint;
|
||||
|
||||
// Create group header
|
||||
const header = document.createElement('div');
|
||||
header.className = 'duplicate-group-header';
|
||||
header.innerHTML = `
|
||||
<span>Duplicate Group #${groupIndex + 1} (${group.recipes.length} recipes)</span>
|
||||
<span>
|
||||
<button class="btn-select-all" onclick="recipeManager.duplicatesManager.toggleSelectAllInGroup('${group.fingerprint}')">
|
||||
Select All
|
||||
</button>
|
||||
<button class="btn-select-latest" onclick="recipeManager.duplicatesManager.selectLatestInGroup('${group.fingerprint}')">
|
||||
Keep Latest
|
||||
</button>
|
||||
</span>
|
||||
`;
|
||||
groupDiv.appendChild(header);
|
||||
|
||||
// Create cards container
|
||||
const cardsDiv = document.createElement('div');
|
||||
cardsDiv.className = 'card-group-container';
|
||||
|
||||
// Add scrollable class if there are many recipes in the group
|
||||
if (group.recipes.length > 6) {
|
||||
cardsDiv.classList.add('scrollable');
|
||||
|
||||
// Add expand/collapse toggle button
|
||||
const toggleBtn = document.createElement('button');
|
||||
toggleBtn.className = 'group-toggle-btn';
|
||||
toggleBtn.innerHTML = '<i class="fas fa-chevron-down"></i>';
|
||||
toggleBtn.title = "Expand/Collapse";
|
||||
toggleBtn.onclick = function() {
|
||||
cardsDiv.classList.toggle('scrollable');
|
||||
this.innerHTML = cardsDiv.classList.contains('scrollable') ?
|
||||
'<i class="fas fa-chevron-down"></i>' :
|
||||
'<i class="fas fa-chevron-up"></i>';
|
||||
};
|
||||
groupDiv.appendChild(toggleBtn);
|
||||
}
|
||||
|
||||
// Sort recipes by date (newest first)
|
||||
const sortedRecipes = [...group.recipes].sort((a, b) => b.modified - a.modified);
|
||||
|
||||
// Add all recipe cards in this group
|
||||
sortedRecipes.forEach((recipe, index) => {
|
||||
// Create recipe card
|
||||
const recipeCard = new RecipeCard(recipe, (recipe) => {
|
||||
this.recipeManager.showRecipeDetails(recipe);
|
||||
});
|
||||
const card = recipeCard.element;
|
||||
|
||||
// Add duplicate class
|
||||
card.classList.add('duplicate');
|
||||
|
||||
// Mark the latest one
|
||||
if (index === 0) {
|
||||
card.classList.add('latest');
|
||||
}
|
||||
|
||||
// Add selection checkbox
|
||||
const checkbox = document.createElement('input');
|
||||
checkbox.type = 'checkbox';
|
||||
checkbox.className = 'selector-checkbox';
|
||||
checkbox.dataset.recipeId = recipe.id;
|
||||
checkbox.dataset.groupFingerprint = group.fingerprint;
|
||||
|
||||
// Check if already selected
|
||||
if (this.selectedForDeletion.has(recipe.id)) {
|
||||
checkbox.checked = true;
|
||||
card.classList.add('duplicate-selected');
|
||||
}
|
||||
|
||||
// Add change event to checkbox
|
||||
checkbox.addEventListener('change', (e) => {
|
||||
e.stopPropagation();
|
||||
this.toggleCardSelection(recipe.id, card, checkbox);
|
||||
});
|
||||
|
||||
// Make the entire card clickable for selection
|
||||
card.addEventListener('click', (e) => {
|
||||
// Don't toggle if clicking on the checkbox directly or card actions
|
||||
if (e.target === checkbox || e.target.closest('.card-actions')) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Toggle checkbox state
|
||||
checkbox.checked = !checkbox.checked;
|
||||
this.toggleCardSelection(recipe.id, card, checkbox);
|
||||
});
|
||||
|
||||
card.appendChild(checkbox);
|
||||
cardsDiv.appendChild(card);
|
||||
});
|
||||
|
||||
groupDiv.appendChild(cardsDiv);
|
||||
recipeGrid.appendChild(groupDiv);
|
||||
});
|
||||
}
|
||||
|
||||
// Helper method to toggle card selection state
|
||||
toggleCardSelection(recipeId, card, checkbox) {
|
||||
if (checkbox.checked) {
|
||||
this.selectedForDeletion.add(recipeId);
|
||||
card.classList.add('duplicate-selected');
|
||||
} else {
|
||||
this.selectedForDeletion.delete(recipeId);
|
||||
card.classList.remove('duplicate-selected');
|
||||
}
|
||||
|
||||
this.updateSelectedCount();
|
||||
}
|
||||
|
||||
updateSelectedCount() {
|
||||
const selectedCountEl = document.getElementById('selectedCount');
|
||||
if (selectedCountEl) {
|
||||
selectedCountEl.textContent = this.selectedForDeletion.size;
|
||||
}
|
||||
|
||||
// Update delete button state
|
||||
const deleteBtn = document.querySelector('.btn-delete-selected');
|
||||
if (deleteBtn) {
|
||||
deleteBtn.disabled = this.selectedForDeletion.size === 0;
|
||||
deleteBtn.classList.toggle('disabled', this.selectedForDeletion.size === 0);
|
||||
}
|
||||
}
|
||||
|
||||
toggleSelectAllInGroup(fingerprint) {
|
||||
const checkboxes = document.querySelectorAll(`.selector-checkbox[data-group-fingerprint="${fingerprint}"]`);
|
||||
const allSelected = Array.from(checkboxes).every(checkbox => checkbox.checked);
|
||||
|
||||
// If all are selected, deselect all; otherwise select all
|
||||
checkboxes.forEach(checkbox => {
|
||||
checkbox.checked = !allSelected;
|
||||
const recipeId = checkbox.dataset.recipeId;
|
||||
const card = checkbox.closest('.lora-card');
|
||||
|
||||
if (!allSelected) {
|
||||
this.selectedForDeletion.add(recipeId);
|
||||
card.classList.add('duplicate-selected');
|
||||
} else {
|
||||
this.selectedForDeletion.delete(recipeId);
|
||||
card.classList.remove('duplicate-selected');
|
||||
}
|
||||
});
|
||||
|
||||
// Update the button text
|
||||
const button = document.querySelector(`.duplicate-group[data-fingerprint="${fingerprint}"] .btn-select-all`);
|
||||
if (button) {
|
||||
button.textContent = !allSelected ? "Deselect All" : "Select All";
|
||||
}
|
||||
|
||||
this.updateSelectedCount();
|
||||
}
|
||||
|
||||
selectAllInGroup(fingerprint) {
|
||||
const checkboxes = document.querySelectorAll(`.selector-checkbox[data-group-fingerprint="${fingerprint}"]`);
|
||||
checkboxes.forEach(checkbox => {
|
||||
checkbox.checked = true;
|
||||
this.selectedForDeletion.add(checkbox.dataset.recipeId);
|
||||
checkbox.closest('.lora-card').classList.add('duplicate-selected');
|
||||
});
|
||||
|
||||
// Update the button text
|
||||
const button = document.querySelector(`.duplicate-group[data-fingerprint="${fingerprint}"] .btn-select-all`);
|
||||
if (button) {
|
||||
button.textContent = "Deselect All";
|
||||
}
|
||||
|
||||
this.updateSelectedCount();
|
||||
}
|
||||
|
||||
selectLatestInGroup(fingerprint) {
|
||||
// Find all checkboxes in this group
|
||||
const checkboxes = document.querySelectorAll(`.selector-checkbox[data-group-fingerprint="${fingerprint}"]`);
|
||||
|
||||
// Get all the recipes in this group
|
||||
const group = this.duplicateGroups.find(g => g.fingerprint === fingerprint);
|
||||
if (!group) return;
|
||||
|
||||
// Sort recipes by date (newest first)
|
||||
const sortedRecipes = [...group.recipes].sort((a, b) => b.modified - a.modified);
|
||||
|
||||
// Skip the first (latest) one and select the rest for deletion
|
||||
for (let i = 1; i < sortedRecipes.length; i++) {
|
||||
const recipeId = sortedRecipes[i].id;
|
||||
const checkbox = document.querySelector(`.selector-checkbox[data-recipe-id="${recipeId}"]`);
|
||||
|
||||
if (checkbox) {
|
||||
checkbox.checked = true;
|
||||
this.selectedForDeletion.add(recipeId);
|
||||
checkbox.closest('.lora-card').classList.add('duplicate-selected');
|
||||
}
|
||||
}
|
||||
|
||||
// Make sure the latest one is not selected
|
||||
const latestId = sortedRecipes[0].id;
|
||||
const latestCheckbox = document.querySelector(`.selector-checkbox[data-recipe-id="${latestId}"]`);
|
||||
|
||||
if (latestCheckbox) {
|
||||
latestCheckbox.checked = false;
|
||||
this.selectedForDeletion.delete(latestId);
|
||||
latestCheckbox.closest('.lora-card').classList.remove('duplicate-selected');
|
||||
}
|
||||
|
||||
this.updateSelectedCount();
|
||||
}
|
||||
|
||||
selectLatestDuplicates() {
|
||||
// For each duplicate group, select all but the latest recipe
|
||||
this.duplicateGroups.forEach(group => {
|
||||
this.selectLatestInGroup(group.fingerprint);
|
||||
});
|
||||
}
|
||||
|
||||
async deleteSelectedDuplicates() {
|
||||
if (this.selectedForDeletion.size === 0) {
|
||||
showToast('No recipes selected for deletion', 'info');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Show the delete confirmation modal instead of a simple confirm
|
||||
const duplicateDeleteCount = document.getElementById('duplicateDeleteCount');
|
||||
if (duplicateDeleteCount) {
|
||||
duplicateDeleteCount.textContent = this.selectedForDeletion.size;
|
||||
}
|
||||
|
||||
// Use the modal manager to show the confirmation modal
|
||||
modalManager.showModal('duplicateDeleteModal');
|
||||
} catch (error) {
|
||||
console.error('Error preparing delete:', error);
|
||||
showToast('Error: ' + error.message, 'error');
|
||||
}
|
||||
}
|
||||
|
||||
// Add new method to execute deletion after confirmation
|
||||
async confirmDeleteDuplicates() {
|
||||
try {
|
||||
document.body.classList.add('loading');
|
||||
|
||||
// Close the modal
|
||||
modalManager.closeModal('duplicateDeleteModal');
|
||||
|
||||
// Prepare recipe IDs for deletion
|
||||
const recipeIds = Array.from(this.selectedForDeletion);
|
||||
|
||||
// Call API to bulk delete
|
||||
const response = await fetch('/api/recipes/bulk-delete', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({ recipe_ids: recipeIds })
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to delete selected recipes');
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
if (!data.success) {
|
||||
throw new Error(data.error || 'Unknown error deleting recipes');
|
||||
}
|
||||
|
||||
showToast(`Successfully deleted ${data.total_deleted} recipes`, 'success');
|
||||
|
||||
// Exit duplicate mode if deletions were successful
|
||||
if (data.total_deleted > 0) {
|
||||
this.exitDuplicateMode();
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error deleting recipes:', error);
|
||||
showToast('Failed to delete recipes: ' + error.message, 'error');
|
||||
} finally {
|
||||
document.body.classList.remove('loading');
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,12 +1,16 @@
|
||||
// Recipe Card Component
|
||||
import { showToast, copyToClipboard } from '../utils/uiHelpers.js';
|
||||
import { modalManager } from '../managers/ModalManager.js';
|
||||
import { getCurrentPageState } from '../state/index.js';
|
||||
|
||||
class RecipeCard {
|
||||
constructor(recipe, clickHandler) {
|
||||
this.recipe = recipe;
|
||||
this.clickHandler = clickHandler;
|
||||
this.element = this.createCardElement();
|
||||
|
||||
// Store reference to this instance on the DOM element for updates
|
||||
this.element._recipeCardInstance = this;
|
||||
}
|
||||
|
||||
createCardElement() {
|
||||
@@ -33,10 +37,15 @@ class RecipeCard {
|
||||
(this.recipe.file_path ? `/loras_static/root1/preview/${this.recipe.file_path.split('/').pop()}` :
|
||||
'/loras_static/images/no-preview.png');
|
||||
|
||||
// Check if in duplicates mode
|
||||
const pageState = getCurrentPageState();
|
||||
const isDuplicatesMode = pageState.duplicatesMode;
|
||||
|
||||
card.innerHTML = `
|
||||
<div class="recipe-indicator" title="Recipe">R</div>
|
||||
${!isDuplicatesMode ? `<div class="recipe-indicator" title="Recipe">R</div>` : ''}
|
||||
<div class="card-preview">
|
||||
<img src="${imageUrl}" alt="${this.recipe.title}">
|
||||
${!isDuplicatesMode ? `
|
||||
<div class="card-header">
|
||||
<div class="base-model-wrapper">
|
||||
${baseModel ? `<span class="base-model-label" title="${baseModel}">${baseModel}</span>` : ''}
|
||||
@@ -47,19 +56,22 @@ class RecipeCard {
|
||||
<i class="fas fa-trash" title="Delete Recipe"></i>
|
||||
</div>
|
||||
</div>
|
||||
` : ''}
|
||||
<div class="card-footer">
|
||||
<div class="model-info">
|
||||
<span class="model-name">${this.recipe.title}</span>
|
||||
</div>
|
||||
${!isDuplicatesMode ? `
|
||||
<div class="lora-count ${allLorasAvailable ? 'ready' : (lorasCount > 0 ? 'missing' : '')}"
|
||||
title="${this.getLoraStatusTitle(lorasCount, missingLorasCount)}">
|
||||
<i class="fas fa-layer-group"></i> ${lorasCount}
|
||||
</div>
|
||||
` : ''}
|
||||
</div>
|
||||
</div>
|
||||
`;
|
||||
|
||||
this.attachEventListeners(card);
|
||||
this.attachEventListeners(card, isDuplicatesMode);
|
||||
return card;
|
||||
}
|
||||
|
||||
@@ -69,29 +81,31 @@ class RecipeCard {
|
||||
return `${missingCount} of ${totalCount} LoRAs missing`;
|
||||
}
|
||||
|
||||
attachEventListeners(card) {
|
||||
// Recipe card click event
|
||||
card.addEventListener('click', () => {
|
||||
this.clickHandler(this.recipe);
|
||||
});
|
||||
|
||||
// Share button click event - prevent propagation to card
|
||||
card.querySelector('.fa-share-alt')?.addEventListener('click', (e) => {
|
||||
e.stopPropagation();
|
||||
this.shareRecipe();
|
||||
});
|
||||
|
||||
// Copy button click event - prevent propagation to card
|
||||
card.querySelector('.fa-copy')?.addEventListener('click', (e) => {
|
||||
e.stopPropagation();
|
||||
this.copyRecipeSyntax();
|
||||
});
|
||||
|
||||
// Delete button click event - prevent propagation to card
|
||||
card.querySelector('.fa-trash')?.addEventListener('click', (e) => {
|
||||
e.stopPropagation();
|
||||
this.showDeleteConfirmation();
|
||||
});
|
||||
attachEventListeners(card, isDuplicatesMode) {
|
||||
// Recipe card click event - only attach if not in duplicates mode
|
||||
if (!isDuplicatesMode) {
|
||||
card.addEventListener('click', () => {
|
||||
this.clickHandler(this.recipe);
|
||||
});
|
||||
|
||||
// Share button click event - prevent propagation to card
|
||||
card.querySelector('.fa-share-alt')?.addEventListener('click', (e) => {
|
||||
e.stopPropagation();
|
||||
this.shareRecipe();
|
||||
});
|
||||
|
||||
// Copy button click event - prevent propagation to card
|
||||
card.querySelector('.fa-copy')?.addEventListener('click', (e) => {
|
||||
e.stopPropagation();
|
||||
this.copyRecipeSyntax();
|
||||
});
|
||||
|
||||
// Delete button click event - prevent propagation to card
|
||||
card.querySelector('.fa-trash')?.addEventListener('click', (e) => {
|
||||
e.stopPropagation();
|
||||
this.showDeleteConfirmation();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
copyRecipeSyntax() {
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
import { showToast, copyToClipboard } from '../utils/uiHelpers.js';
|
||||
import { state } from '../state/index.js';
|
||||
import { setSessionItem, removeSessionItem } from '../utils/storageHelpers.js';
|
||||
import { updateRecipeCard } from '../utils/cardUpdater.js';
|
||||
|
||||
class RecipeModal {
|
||||
constructor() {
|
||||
@@ -82,7 +83,7 @@ class RecipeModal {
|
||||
|
||||
showRecipeDetails(recipe) {
|
||||
// Store the full recipe for editing
|
||||
this.currentRecipe = JSON.parse(JSON.stringify(recipe)); // 深拷贝以避免对原始对象的修改
|
||||
this.currentRecipe = recipe;
|
||||
|
||||
// Set modal title with edit icon
|
||||
const modalTitle = document.getElementById('recipeModalTitle');
|
||||
@@ -245,6 +246,45 @@ class RecipeModal {
|
||||
imgElement.alt = recipe.title || 'Recipe Preview';
|
||||
mediaContainer.appendChild(imgElement);
|
||||
}
|
||||
|
||||
// Add source URL container if the recipe has a source_path
|
||||
const sourceUrlContainer = document.createElement('div');
|
||||
sourceUrlContainer.className = 'source-url-container';
|
||||
const hasSourceUrl = recipe.source_path && recipe.source_path.trim().length > 0;
|
||||
const sourceUrl = hasSourceUrl ? recipe.source_path : '';
|
||||
const isValidUrl = hasSourceUrl && (sourceUrl.startsWith('http://') || sourceUrl.startsWith('https://'));
|
||||
|
||||
sourceUrlContainer.innerHTML = `
|
||||
<div class="source-url-content">
|
||||
<span class="source-url-icon"><i class="fas fa-link"></i></span>
|
||||
<span class="source-url-text" title="${isValidUrl ? 'Click to open source URL' : 'No valid URL'}">${
|
||||
hasSourceUrl ? sourceUrl : 'No source URL'
|
||||
}</span>
|
||||
</div>
|
||||
<button class="source-url-edit-btn" title="Edit source URL">
|
||||
<i class="fas fa-pencil-alt"></i>
|
||||
</button>
|
||||
`;
|
||||
|
||||
// Add source URL editor
|
||||
const sourceUrlEditor = document.createElement('div');
|
||||
sourceUrlEditor.className = 'source-url-editor';
|
||||
sourceUrlEditor.innerHTML = `
|
||||
<input type="text" class="source-url-input" placeholder="Enter source URL (e.g., https://civitai.com/...)" value="${sourceUrl}">
|
||||
<div class="source-url-actions">
|
||||
<button class="source-url-cancel-btn">Cancel</button>
|
||||
<button class="source-url-save-btn">Save</button>
|
||||
</div>
|
||||
`;
|
||||
|
||||
// Append both containers to the media container
|
||||
mediaContainer.appendChild(sourceUrlContainer);
|
||||
mediaContainer.appendChild(sourceUrlEditor);
|
||||
|
||||
// Set up event listeners for source URL functionality
|
||||
setTimeout(() => {
|
||||
this.setupSourceUrlHandlers();
|
||||
}, 50);
|
||||
}
|
||||
|
||||
// Set generation parameters
|
||||
@@ -451,8 +491,6 @@ class RecipeModal {
|
||||
lorasListElement.innerHTML = '<div class="no-loras">No LoRAs associated with this recipe</div>';
|
||||
this.recipeLorasSyntax = '';
|
||||
}
|
||||
|
||||
console.log(this.currentRecipe.loras);
|
||||
|
||||
// Show the modal
|
||||
modalManager.showModal('recipeModal');
|
||||
@@ -648,50 +686,8 @@ class RecipeModal {
|
||||
// 更新当前recipe对象的属性
|
||||
Object.assign(this.currentRecipe, updates);
|
||||
|
||||
// 确保这个更新也传播到卡片视图
|
||||
// 尝试找到可能显示这个recipe的卡片并更新它
|
||||
try {
|
||||
const recipeCards = document.querySelectorAll('.recipe-card');
|
||||
recipeCards.forEach(card => {
|
||||
if (card.dataset.recipeId === this.recipeId) {
|
||||
// 更新卡片标题
|
||||
if (updates.title) {
|
||||
const titleElement = card.querySelector('.recipe-title');
|
||||
if (titleElement) {
|
||||
titleElement.textContent = updates.title;
|
||||
}
|
||||
}
|
||||
|
||||
// 更新卡片标签
|
||||
if (updates.tags) {
|
||||
const tagsElement = card.querySelector('.recipe-tags');
|
||||
if (tagsElement) {
|
||||
if (updates.tags.length > 0) {
|
||||
tagsElement.innerHTML = updates.tags.map(
|
||||
tag => `<div class="recipe-tag">${tag}</div>`
|
||||
).join('');
|
||||
} else {
|
||||
tagsElement.innerHTML = '';
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
console.log("Non-critical error updating recipe cards:", err);
|
||||
}
|
||||
|
||||
// 重要:强制刷新recipes列表,确保从服务器获取最新数据
|
||||
try {
|
||||
if (window.recipeManager && typeof window.recipeManager.loadRecipes === 'function') {
|
||||
// 异步刷新recipes列表,不阻塞用户界面
|
||||
setTimeout(() => {
|
||||
window.recipeManager.loadRecipes(true);
|
||||
}, 500);
|
||||
}
|
||||
} catch (err) {
|
||||
console.log("Error refreshing recipes list:", err);
|
||||
}
|
||||
// Update the recipe card in the UI
|
||||
updateRecipeCard(this.recipeId, updates);
|
||||
} else {
|
||||
showToast(`Failed to update recipe: ${data.error}`, 'error');
|
||||
}
|
||||
@@ -951,8 +947,8 @@ class RecipeModal {
|
||||
let loraSyntaxMatch = inputValue.match(/<lora:([^:>]+)(?::[^>]+)?>/);
|
||||
let fileName = loraSyntaxMatch ? loraSyntaxMatch[1] : inputValue.trim();
|
||||
|
||||
// Remove any file extension if present
|
||||
fileName = fileName.replace(/\.\w+$/, '');
|
||||
// Remove .safetensors extension if present
|
||||
fileName = fileName.replace(/\.safetensors$/, '');
|
||||
|
||||
// Get the deleted lora data
|
||||
const deletedLora = this.currentRecipe.loras[loraIndex];
|
||||
@@ -1069,6 +1065,56 @@ class RecipeModal {
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// New method to set up source URL handlers
|
||||
setupSourceUrlHandlers() {
|
||||
const sourceUrlContainer = document.querySelector('.source-url-container');
|
||||
const sourceUrlEditor = document.querySelector('.source-url-editor');
|
||||
const sourceUrlText = sourceUrlContainer.querySelector('.source-url-text');
|
||||
const sourceUrlEditBtn = sourceUrlContainer.querySelector('.source-url-edit-btn');
|
||||
const sourceUrlCancelBtn = sourceUrlEditor.querySelector('.source-url-cancel-btn');
|
||||
const sourceUrlSaveBtn = sourceUrlEditor.querySelector('.source-url-save-btn');
|
||||
const sourceUrlInput = sourceUrlEditor.querySelector('.source-url-input');
|
||||
|
||||
// Show editor on edit button click
|
||||
sourceUrlEditBtn.addEventListener('click', () => {
|
||||
sourceUrlContainer.classList.add('hide');
|
||||
sourceUrlEditor.classList.add('active');
|
||||
sourceUrlInput.focus();
|
||||
});
|
||||
|
||||
// Cancel editing
|
||||
sourceUrlCancelBtn.addEventListener('click', () => {
|
||||
sourceUrlEditor.classList.remove('active');
|
||||
sourceUrlContainer.classList.remove('hide');
|
||||
sourceUrlInput.value = this.currentRecipe.source_path || '';
|
||||
});
|
||||
|
||||
// Save new source URL
|
||||
sourceUrlSaveBtn.addEventListener('click', () => {
|
||||
const newSourceUrl = sourceUrlInput.value.trim();
|
||||
if (newSourceUrl && newSourceUrl !== this.currentRecipe.source_path) {
|
||||
// Update source URL in the UI
|
||||
sourceUrlText.textContent = newSourceUrl;
|
||||
sourceUrlText.title = newSourceUrl.startsWith('http://') || newSourceUrl.startsWith('https://') ? 'Click to open source URL' : 'No valid URL';
|
||||
|
||||
// Update the recipe on the server
|
||||
this.updateRecipeMetadata({ source_path: newSourceUrl });
|
||||
}
|
||||
|
||||
// Hide editor
|
||||
sourceUrlEditor.classList.remove('active');
|
||||
sourceUrlContainer.classList.remove('hide');
|
||||
});
|
||||
|
||||
// Open source URL in a new tab if it's valid
|
||||
sourceUrlText.addEventListener('click', () => {
|
||||
const url = sourceUrlText.textContent.trim();
|
||||
if (url.startsWith('http://') || url.startsWith('https://')) {
|
||||
window.open(url, '_blank');
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export { RecipeModal };
|
||||
File diff suppressed because it is too large
Load Diff
@@ -158,6 +158,18 @@ export class ModalManager {
|
||||
});
|
||||
}
|
||||
|
||||
// Add duplicateDeleteModal registration
|
||||
const duplicateDeleteModal = document.getElementById('duplicateDeleteModal');
|
||||
if (duplicateDeleteModal) {
|
||||
this.registerModal('duplicateDeleteModal', {
|
||||
element: duplicateDeleteModal,
|
||||
onClose: () => {
|
||||
this.getModal('duplicateDeleteModal').element.classList.remove('show');
|
||||
document.body.classList.remove('modal-open');
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Set up event listeners for modal toggles
|
||||
const supportToggle = document.getElementById('supportToggleBtn');
|
||||
if (supportToggle) {
|
||||
@@ -221,7 +233,7 @@ export class ModalManager {
|
||||
// Store current scroll position before showing modal
|
||||
this.scrollPosition = window.scrollY;
|
||||
|
||||
if (id === 'deleteModal' || id === 'excludeModal') {
|
||||
if (id === 'deleteModal' || id === 'excludeModal' || id === 'duplicateDeleteModal') {
|
||||
modal.element.classList.add('show');
|
||||
} else {
|
||||
modal.element.style.display = 'block';
|
||||
|
||||
256
static/js/managers/import/DownloadManager.js
Normal file
256
static/js/managers/import/DownloadManager.js
Normal file
@@ -0,0 +1,256 @@
|
||||
import { showToast } from '../../utils/uiHelpers.js';
|
||||
|
||||
export class DownloadManager {
|
||||
constructor(importManager) {
|
||||
this.importManager = importManager;
|
||||
}
|
||||
|
||||
async saveRecipe() {
|
||||
// Check if we're in download-only mode (for existing recipe)
|
||||
const isDownloadOnly = !!this.importManager.recipeId;
|
||||
|
||||
if (!isDownloadOnly && !this.importManager.recipeName) {
|
||||
showToast('Please enter a recipe name', 'error');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Show progress indicator
|
||||
this.importManager.loadingManager.showSimpleLoading(isDownloadOnly ? 'Downloading LoRAs...' : 'Saving recipe...');
|
||||
|
||||
// Only send the complete recipe to save if not in download-only mode
|
||||
if (!isDownloadOnly) {
|
||||
// Create FormData object for saving recipe
|
||||
const formData = new FormData();
|
||||
|
||||
// Add image data - depends on import mode
|
||||
if (this.importManager.recipeImage) {
|
||||
// Direct upload
|
||||
formData.append('image', this.importManager.recipeImage);
|
||||
} else if (this.importManager.recipeData && this.importManager.recipeData.image_base64) {
|
||||
// URL mode with base64 data
|
||||
formData.append('image_base64', this.importManager.recipeData.image_base64);
|
||||
} else if (this.importManager.importMode === 'url') {
|
||||
// Fallback for URL mode - tell backend to fetch the image again
|
||||
const urlInput = document.getElementById('imageUrlInput');
|
||||
if (urlInput && urlInput.value) {
|
||||
formData.append('image_url', urlInput.value);
|
||||
} else {
|
||||
throw new Error('No image data available');
|
||||
}
|
||||
} else {
|
||||
throw new Error('No image data available');
|
||||
}
|
||||
|
||||
formData.append('name', this.importManager.recipeName);
|
||||
formData.append('tags', JSON.stringify(this.importManager.recipeTags));
|
||||
|
||||
// Prepare complete metadata including generation parameters
|
||||
const completeMetadata = {
|
||||
base_model: this.importManager.recipeData.base_model || "",
|
||||
loras: this.importManager.recipeData.loras || [],
|
||||
gen_params: this.importManager.recipeData.gen_params || {},
|
||||
raw_metadata: this.importManager.recipeData.raw_metadata || {}
|
||||
};
|
||||
|
||||
// Add source_path to metadata to track where the recipe was imported from
|
||||
if (this.importManager.importMode === 'url') {
|
||||
const urlInput = document.getElementById('imageUrlInput');
|
||||
if (urlInput && urlInput.value) {
|
||||
completeMetadata.source_path = urlInput.value;
|
||||
}
|
||||
}
|
||||
|
||||
formData.append('metadata', JSON.stringify(completeMetadata));
|
||||
|
||||
// Send save request
|
||||
const response = await fetch('/api/recipes/save', {
|
||||
method: 'POST',
|
||||
body: formData
|
||||
});
|
||||
|
||||
const result = await response.json();
|
||||
|
||||
if (!result.success) {
|
||||
// Handle save error
|
||||
console.error("Failed to save recipe:", result.error);
|
||||
showToast(result.error, 'error');
|
||||
// Close modal
|
||||
modalManager.closeModal('importModal');
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Check if we need to download LoRAs
|
||||
let failedDownloads = 0;
|
||||
if (this.importManager.downloadableLoRAs && this.importManager.downloadableLoRAs.length > 0) {
|
||||
await this.downloadMissingLoras();
|
||||
}
|
||||
|
||||
// Show success message
|
||||
if (isDownloadOnly) {
|
||||
if (failedDownloads === 0) {
|
||||
showToast('LoRAs downloaded successfully', 'success');
|
||||
}
|
||||
} else {
|
||||
showToast(`Recipe "${this.importManager.recipeName}" saved successfully`, 'success');
|
||||
}
|
||||
|
||||
// Close modal
|
||||
modalManager.closeModal('importModal');
|
||||
|
||||
// Refresh the recipe
|
||||
window.recipeManager.loadRecipes();
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error:', error);
|
||||
showToast(error.message, 'error');
|
||||
} finally {
|
||||
this.importManager.loadingManager.hide();
|
||||
}
|
||||
}
|
||||
|
||||
async downloadMissingLoras() {
|
||||
// For download, we need to validate the target path
|
||||
const loraRoot = document.getElementById('importLoraRoot')?.value;
|
||||
if (!loraRoot) {
|
||||
throw new Error('Please select a LoRA root directory');
|
||||
}
|
||||
|
||||
// Build target path
|
||||
let targetPath = loraRoot;
|
||||
if (this.importManager.selectedFolder) {
|
||||
targetPath += '/' + this.importManager.selectedFolder;
|
||||
}
|
||||
|
||||
const newFolder = document.getElementById('importNewFolder')?.value?.trim();
|
||||
if (newFolder) {
|
||||
targetPath += '/' + newFolder;
|
||||
}
|
||||
|
||||
// Set up WebSocket for progress updates
|
||||
const wsProtocol = window.location.protocol === 'https:' ? 'wss://' : 'ws://';
|
||||
const ws = new WebSocket(`${wsProtocol}${window.location.host}/ws/fetch-progress`);
|
||||
|
||||
// Show enhanced loading with progress details for multiple items
|
||||
const updateProgress = this.importManager.loadingManager.showDownloadProgress(
|
||||
this.importManager.downloadableLoRAs.length
|
||||
);
|
||||
|
||||
let completedDownloads = 0;
|
||||
let failedDownloads = 0;
|
||||
let accessFailures = 0;
|
||||
let currentLoraProgress = 0;
|
||||
|
||||
// Set up progress tracking for current download
|
||||
ws.onmessage = (event) => {
|
||||
const data = JSON.parse(event.data);
|
||||
if (data.status === 'progress') {
|
||||
// Update current LoRA progress
|
||||
currentLoraProgress = data.progress;
|
||||
|
||||
// Get current LoRA name
|
||||
const currentLora = this.importManager.downloadableLoRAs[completedDownloads + failedDownloads];
|
||||
const loraName = currentLora ? currentLora.name : '';
|
||||
|
||||
// Update progress display
|
||||
updateProgress(currentLoraProgress, completedDownloads, loraName);
|
||||
|
||||
// Add more detailed status messages based on progress
|
||||
if (currentLoraProgress < 3) {
|
||||
this.importManager.loadingManager.setStatus(
|
||||
`Preparing download for LoRA ${completedDownloads + failedDownloads + 1}/${this.importManager.downloadableLoRAs.length}`
|
||||
);
|
||||
} else if (currentLoraProgress === 3) {
|
||||
this.importManager.loadingManager.setStatus(
|
||||
`Downloaded preview for LoRA ${completedDownloads + failedDownloads + 1}/${this.importManager.downloadableLoRAs.length}`
|
||||
);
|
||||
} else if (currentLoraProgress > 3 && currentLoraProgress < 100) {
|
||||
this.importManager.loadingManager.setStatus(
|
||||
`Downloading LoRA ${completedDownloads + failedDownloads + 1}/${this.importManager.downloadableLoRAs.length}`
|
||||
);
|
||||
} else {
|
||||
this.importManager.loadingManager.setStatus(
|
||||
`Finalizing LoRA ${completedDownloads + failedDownloads + 1}/${this.importManager.downloadableLoRAs.length}`
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
for (let i = 0; i < this.importManager.downloadableLoRAs.length; i++) {
|
||||
const lora = this.importManager.downloadableLoRAs[i];
|
||||
|
||||
// Reset current LoRA progress for new download
|
||||
currentLoraProgress = 0;
|
||||
|
||||
// Initial status update for new LoRA
|
||||
this.importManager.loadingManager.setStatus(`Starting download for LoRA ${i+1}/${this.importManager.downloadableLoRAs.length}`);
|
||||
updateProgress(0, completedDownloads, lora.name);
|
||||
|
||||
try {
|
||||
// Download the LoRA
|
||||
const response = await fetch('/api/download-lora', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
download_url: lora.downloadUrl,
|
||||
model_version_id: lora.modelVersionId,
|
||||
model_hash: lora.hash,
|
||||
lora_root: loraRoot,
|
||||
relative_path: targetPath.replace(loraRoot + '/', '')
|
||||
})
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text();
|
||||
console.error(`Failed to download LoRA ${lora.name}: ${errorText}`);
|
||||
|
||||
// Check if this is an early access error (status 401 is the key indicator)
|
||||
if (response.status === 401) {
|
||||
accessFailures++;
|
||||
this.importManager.loadingManager.setStatus(
|
||||
`Failed to download ${lora.name}: Access restricted`
|
||||
);
|
||||
}
|
||||
|
||||
failedDownloads++;
|
||||
// Continue with next download
|
||||
} else {
|
||||
completedDownloads++;
|
||||
|
||||
// Update progress to show completion of current LoRA
|
||||
updateProgress(100, completedDownloads, '');
|
||||
|
||||
if (completedDownloads + failedDownloads < this.importManager.downloadableLoRAs.length) {
|
||||
this.importManager.loadingManager.setStatus(
|
||||
`Completed ${completedDownloads}/${this.importManager.downloadableLoRAs.length} LoRAs. Starting next download...`
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (downloadError) {
|
||||
console.error(`Error downloading LoRA ${lora.name}:`, downloadError);
|
||||
failedDownloads++;
|
||||
// Continue with next download
|
||||
}
|
||||
}
|
||||
|
||||
// Close WebSocket
|
||||
ws.close();
|
||||
|
||||
// Show appropriate completion message based on results
|
||||
if (failedDownloads === 0) {
|
||||
showToast(`All ${completedDownloads} LoRAs downloaded successfully`, 'success');
|
||||
} else {
|
||||
if (accessFailures > 0) {
|
||||
showToast(
|
||||
`Downloaded ${completedDownloads} of ${this.importManager.downloadableLoRAs.length} LoRAs. ${accessFailures} failed due to access restrictions. Check your API key in settings or early access status.`,
|
||||
'error'
|
||||
);
|
||||
} else {
|
||||
showToast(`Downloaded ${completedDownloads} of ${this.importManager.downloadableLoRAs.length} LoRAs`, 'error');
|
||||
}
|
||||
}
|
||||
|
||||
return failedDownloads;
|
||||
}
|
||||
}
|
||||
220
static/js/managers/import/FolderBrowser.js
Normal file
220
static/js/managers/import/FolderBrowser.js
Normal file
@@ -0,0 +1,220 @@
|
||||
import { showToast } from '../../utils/uiHelpers.js';
|
||||
import { getStorageItem } from '../../utils/storageHelpers.js';
|
||||
|
||||
export class FolderBrowser {
|
||||
constructor(importManager) {
|
||||
this.importManager = importManager;
|
||||
this.folderClickHandler = null;
|
||||
this.updateTargetPath = this.updateTargetPath.bind(this);
|
||||
}
|
||||
|
||||
async proceedToLocation() {
|
||||
// Show the location step with special handling
|
||||
this.importManager.stepManager.showStep('locationStep');
|
||||
|
||||
// Double-check after a short delay to ensure the step is visible
|
||||
setTimeout(() => {
|
||||
const locationStep = document.getElementById('locationStep');
|
||||
if (locationStep.style.display !== 'block' ||
|
||||
window.getComputedStyle(locationStep).display !== 'block') {
|
||||
// Force display again
|
||||
locationStep.style.display = 'block';
|
||||
|
||||
// If still not visible, try with injected style
|
||||
if (window.getComputedStyle(locationStep).display !== 'block') {
|
||||
this.importManager.stepManager.injectedStyles = document.createElement('style');
|
||||
this.importManager.stepManager.injectedStyles.innerHTML = `
|
||||
#locationStep {
|
||||
display: block !important;
|
||||
opacity: 1 !important;
|
||||
visibility: visible !important;
|
||||
}
|
||||
`;
|
||||
document.head.appendChild(this.importManager.stepManager.injectedStyles);
|
||||
}
|
||||
}
|
||||
}, 100);
|
||||
|
||||
try {
|
||||
// Display missing LoRAs that will be downloaded
|
||||
const missingLorasList = document.getElementById('missingLorasList');
|
||||
if (missingLorasList && this.importManager.downloadableLoRAs.length > 0) {
|
||||
// Calculate total size
|
||||
const totalSize = this.importManager.downloadableLoRAs.reduce((sum, lora) => {
|
||||
return sum + (lora.size ? parseInt(lora.size) : 0);
|
||||
}, 0);
|
||||
|
||||
// Update total size display
|
||||
const totalSizeDisplay = document.getElementById('totalDownloadSize');
|
||||
if (totalSizeDisplay) {
|
||||
totalSizeDisplay.textContent = this.importManager.formatFileSize(totalSize);
|
||||
}
|
||||
|
||||
// Update header to include count of missing LoRAs
|
||||
const missingLorasHeader = document.querySelector('.summary-header h3');
|
||||
if (missingLorasHeader) {
|
||||
missingLorasHeader.innerHTML = `Missing LoRAs <span class="lora-count-badge">(${this.importManager.downloadableLoRAs.length})</span> <span id="totalDownloadSize" class="total-size-badge">${this.importManager.formatFileSize(totalSize)}</span>`;
|
||||
}
|
||||
|
||||
// Generate missing LoRAs list
|
||||
missingLorasList.innerHTML = this.importManager.downloadableLoRAs.map(lora => {
|
||||
const sizeDisplay = lora.size ?
|
||||
this.importManager.formatFileSize(lora.size) : 'Unknown size';
|
||||
const baseModel = lora.baseModel ?
|
||||
`<span class="lora-base-model">${lora.baseModel}</span>` : '';
|
||||
const isEarlyAccess = lora.isEarlyAccess;
|
||||
|
||||
// Early access badge
|
||||
let earlyAccessBadge = '';
|
||||
if (isEarlyAccess) {
|
||||
earlyAccessBadge = `<span class="early-access-badge">
|
||||
<i class="fas fa-clock"></i> Early Access
|
||||
</span>`;
|
||||
}
|
||||
|
||||
return `
|
||||
<div class="missing-lora-item ${isEarlyAccess ? 'is-early-access' : ''}">
|
||||
<div class="missing-lora-info">
|
||||
<div class="missing-lora-name">${lora.name}</div>
|
||||
${baseModel}
|
||||
${earlyAccessBadge}
|
||||
</div>
|
||||
<div class="missing-lora-size">${sizeDisplay}</div>
|
||||
</div>
|
||||
`;
|
||||
}).join('');
|
||||
|
||||
// Set up toggle for missing LoRAs list
|
||||
const toggleBtn = document.getElementById('toggleMissingLorasList');
|
||||
if (toggleBtn) {
|
||||
toggleBtn.addEventListener('click', () => {
|
||||
missingLorasList.classList.toggle('collapsed');
|
||||
const icon = toggleBtn.querySelector('i');
|
||||
if (icon) {
|
||||
icon.classList.toggle('fa-chevron-down');
|
||||
icon.classList.toggle('fa-chevron-up');
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Fetch LoRA roots
|
||||
const rootsResponse = await fetch('/api/lora-roots');
|
||||
if (!rootsResponse.ok) {
|
||||
throw new Error(`Failed to fetch LoRA roots: ${rootsResponse.status}`);
|
||||
}
|
||||
|
||||
const rootsData = await rootsResponse.json();
|
||||
const loraRoot = document.getElementById('importLoraRoot');
|
||||
if (loraRoot) {
|
||||
loraRoot.innerHTML = rootsData.roots.map(root =>
|
||||
`<option value="${root}">${root}</option>`
|
||||
).join('');
|
||||
|
||||
// Set default lora root if available
|
||||
const defaultRoot = getStorageItem('settings', {}).default_loras_root;
|
||||
if (defaultRoot && rootsData.roots.includes(defaultRoot)) {
|
||||
loraRoot.value = defaultRoot;
|
||||
}
|
||||
}
|
||||
|
||||
// Fetch folders
|
||||
const foldersResponse = await fetch('/api/folders');
|
||||
if (!foldersResponse.ok) {
|
||||
throw new Error(`Failed to fetch folders: ${foldersResponse.status}`);
|
||||
}
|
||||
|
||||
const foldersData = await foldersResponse.json();
|
||||
const folderBrowser = document.getElementById('importFolderBrowser');
|
||||
if (folderBrowser) {
|
||||
folderBrowser.innerHTML = foldersData.folders.map(folder =>
|
||||
folder ? `<div class="folder-item" data-folder="${folder}">${folder}</div>` : ''
|
||||
).join('');
|
||||
}
|
||||
|
||||
// Initialize folder browser after loading data
|
||||
this.initializeFolderBrowser();
|
||||
} catch (error) {
|
||||
console.error('Error in API calls:', error);
|
||||
showToast(error.message, 'error');
|
||||
}
|
||||
}
|
||||
|
||||
initializeFolderBrowser() {
|
||||
const folderBrowser = document.getElementById('importFolderBrowser');
|
||||
if (!folderBrowser) return;
|
||||
|
||||
// Cleanup existing handler if any
|
||||
this.cleanup();
|
||||
|
||||
// Create new handler
|
||||
this.folderClickHandler = (event) => {
|
||||
const folderItem = event.target.closest('.folder-item');
|
||||
if (!folderItem) return;
|
||||
|
||||
if (folderItem.classList.contains('selected')) {
|
||||
folderItem.classList.remove('selected');
|
||||
this.importManager.selectedFolder = '';
|
||||
} else {
|
||||
folderBrowser.querySelectorAll('.folder-item').forEach(f =>
|
||||
f.classList.remove('selected'));
|
||||
folderItem.classList.add('selected');
|
||||
this.importManager.selectedFolder = folderItem.dataset.folder;
|
||||
}
|
||||
|
||||
// Update path display after folder selection
|
||||
this.updateTargetPath();
|
||||
};
|
||||
|
||||
// Add the new handler
|
||||
folderBrowser.addEventListener('click', this.folderClickHandler);
|
||||
|
||||
// Add event listeners for path updates
|
||||
const loraRoot = document.getElementById('importLoraRoot');
|
||||
const newFolder = document.getElementById('importNewFolder');
|
||||
|
||||
if (loraRoot) loraRoot.addEventListener('change', this.updateTargetPath);
|
||||
if (newFolder) newFolder.addEventListener('input', this.updateTargetPath);
|
||||
|
||||
// Update initial path
|
||||
this.updateTargetPath();
|
||||
}
|
||||
|
||||
cleanup() {
|
||||
if (this.folderClickHandler) {
|
||||
const folderBrowser = document.getElementById('importFolderBrowser');
|
||||
if (folderBrowser) {
|
||||
folderBrowser.removeEventListener('click', this.folderClickHandler);
|
||||
this.folderClickHandler = null;
|
||||
}
|
||||
}
|
||||
|
||||
// Remove path update listeners
|
||||
const loraRoot = document.getElementById('importLoraRoot');
|
||||
const newFolder = document.getElementById('importNewFolder');
|
||||
|
||||
if (loraRoot) loraRoot.removeEventListener('change', this.updateTargetPath);
|
||||
if (newFolder) newFolder.removeEventListener('input', this.updateTargetPath);
|
||||
}
|
||||
|
||||
updateTargetPath() {
|
||||
const pathDisplay = document.getElementById('importTargetPathDisplay');
|
||||
if (!pathDisplay) return;
|
||||
|
||||
const loraRoot = document.getElementById('importLoraRoot')?.value || '';
|
||||
const newFolder = document.getElementById('importNewFolder')?.value?.trim() || '';
|
||||
|
||||
let fullPath = loraRoot || 'Select a LoRA root directory';
|
||||
|
||||
if (loraRoot) {
|
||||
if (this.importManager.selectedFolder) {
|
||||
fullPath += '/' + this.importManager.selectedFolder;
|
||||
}
|
||||
if (newFolder) {
|
||||
fullPath += '/' + newFolder;
|
||||
}
|
||||
}
|
||||
|
||||
pathDisplay.innerHTML = `<span class="path-text">${fullPath}</span>`;
|
||||
}
|
||||
}
|
||||
208
static/js/managers/import/ImageProcessor.js
Normal file
208
static/js/managers/import/ImageProcessor.js
Normal file
@@ -0,0 +1,208 @@
|
||||
import { showToast } from '../../utils/uiHelpers.js';
|
||||
|
||||
export class ImageProcessor {
|
||||
constructor(importManager) {
|
||||
this.importManager = importManager;
|
||||
}
|
||||
|
||||
handleFileUpload(event) {
|
||||
const file = event.target.files[0];
|
||||
const errorElement = document.getElementById('uploadError');
|
||||
|
||||
if (!file) return;
|
||||
|
||||
// Validate file type
|
||||
if (!file.type.match('image.*')) {
|
||||
errorElement.textContent = 'Please select an image file';
|
||||
return;
|
||||
}
|
||||
|
||||
// Reset error
|
||||
errorElement.textContent = '';
|
||||
this.importManager.recipeImage = file;
|
||||
|
||||
// Auto-proceed to next step if file is selected
|
||||
this.importManager.uploadAndAnalyzeImage();
|
||||
}
|
||||
|
||||
async handleUrlInput() {
|
||||
const urlInput = document.getElementById('imageUrlInput');
|
||||
const errorElement = document.getElementById('urlError');
|
||||
const input = urlInput.value.trim();
|
||||
|
||||
// Validate input
|
||||
if (!input) {
|
||||
errorElement.textContent = 'Please enter a URL or file path';
|
||||
return;
|
||||
}
|
||||
|
||||
// Reset error
|
||||
errorElement.textContent = '';
|
||||
|
||||
// Show loading indicator
|
||||
this.importManager.loadingManager.showSimpleLoading('Processing input...');
|
||||
|
||||
try {
|
||||
// Check if it's a URL or a local file path
|
||||
if (input.startsWith('http://') || input.startsWith('https://')) {
|
||||
// Handle as URL
|
||||
await this.analyzeImageFromUrl(input);
|
||||
} else {
|
||||
// Handle as local file path
|
||||
await this.analyzeImageFromLocalPath(input);
|
||||
}
|
||||
} catch (error) {
|
||||
errorElement.textContent = error.message || 'Failed to process input';
|
||||
} finally {
|
||||
this.importManager.loadingManager.hide();
|
||||
}
|
||||
}
|
||||
|
||||
async analyzeImageFromUrl(url) {
|
||||
try {
|
||||
// Call the API with URL data
|
||||
const response = await fetch('/api/recipes/analyze-image', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({ url: url })
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json();
|
||||
throw new Error(errorData.error || 'Failed to analyze image from URL');
|
||||
}
|
||||
|
||||
// Get recipe data from response
|
||||
this.importManager.recipeData = await response.json();
|
||||
|
||||
// Check if we have an error message
|
||||
if (this.importManager.recipeData.error) {
|
||||
throw new Error(this.importManager.recipeData.error);
|
||||
}
|
||||
|
||||
// Check if we have valid recipe data
|
||||
if (!this.importManager.recipeData ||
|
||||
!this.importManager.recipeData.loras ||
|
||||
this.importManager.recipeData.loras.length === 0) {
|
||||
throw new Error('No LoRA information found in this image');
|
||||
}
|
||||
|
||||
// Find missing LoRAs
|
||||
this.importManager.missingLoras = this.importManager.recipeData.loras.filter(
|
||||
lora => !lora.existsLocally
|
||||
);
|
||||
|
||||
// Reset import as new flag
|
||||
this.importManager.importAsNew = false;
|
||||
|
||||
// Proceed to recipe details step
|
||||
this.importManager.showRecipeDetailsStep();
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error analyzing URL:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async analyzeImageFromLocalPath(path) {
|
||||
try {
|
||||
// Call the API with local path data
|
||||
const response = await fetch('/api/recipes/analyze-local-image', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({ path: path })
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json();
|
||||
throw new Error(errorData.error || 'Failed to load image from local path');
|
||||
}
|
||||
|
||||
// Get recipe data from response
|
||||
this.importManager.recipeData = await response.json();
|
||||
|
||||
// Check if we have an error message
|
||||
if (this.importManager.recipeData.error) {
|
||||
throw new Error(this.importManager.recipeData.error);
|
||||
}
|
||||
|
||||
// Check if we have valid recipe data
|
||||
if (!this.importManager.recipeData ||
|
||||
!this.importManager.recipeData.loras ||
|
||||
this.importManager.recipeData.loras.length === 0) {
|
||||
throw new Error('No LoRA information found in this image');
|
||||
}
|
||||
|
||||
// Find missing LoRAs
|
||||
this.importManager.missingLoras = this.importManager.recipeData.loras.filter(
|
||||
lora => !lora.existsLocally
|
||||
);
|
||||
|
||||
// Reset import as new flag
|
||||
this.importManager.importAsNew = false;
|
||||
|
||||
// Proceed to recipe details step
|
||||
this.importManager.showRecipeDetailsStep();
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error analyzing local path:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async uploadAndAnalyzeImage() {
|
||||
if (!this.importManager.recipeImage) {
|
||||
showToast('Please select an image first', 'error');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
this.importManager.loadingManager.showSimpleLoading('Analyzing image metadata...');
|
||||
|
||||
// Create form data for upload
|
||||
const formData = new FormData();
|
||||
formData.append('image', this.importManager.recipeImage);
|
||||
|
||||
// Upload image for analysis
|
||||
const response = await fetch('/api/recipes/analyze-image', {
|
||||
method: 'POST',
|
||||
body: formData
|
||||
});
|
||||
|
||||
// Get recipe data from response
|
||||
this.importManager.recipeData = await response.json();
|
||||
|
||||
// Check if we have an error message
|
||||
if (this.importManager.recipeData.error) {
|
||||
throw new Error(this.importManager.recipeData.error);
|
||||
}
|
||||
|
||||
// Check if we have valid recipe data
|
||||
if (!this.importManager.recipeData ||
|
||||
!this.importManager.recipeData.loras ||
|
||||
this.importManager.recipeData.loras.length === 0) {
|
||||
throw new Error('No LoRA information found in this image');
|
||||
}
|
||||
|
||||
// Find missing LoRAs
|
||||
this.importManager.missingLoras = this.importManager.recipeData.loras.filter(
|
||||
lora => !lora.existsLocally
|
||||
);
|
||||
|
||||
// Reset import as new flag
|
||||
this.importManager.importAsNew = false;
|
||||
|
||||
// Proceed to recipe details step
|
||||
this.importManager.showRecipeDetailsStep();
|
||||
|
||||
} catch (error) {
|
||||
document.getElementById('uploadError').textContent = error.message;
|
||||
} finally {
|
||||
this.importManager.loadingManager.hide();
|
||||
}
|
||||
}
|
||||
}
|
||||
57
static/js/managers/import/ImportStepManager.js
Normal file
57
static/js/managers/import/ImportStepManager.js
Normal file
@@ -0,0 +1,57 @@
|
||||
export class ImportStepManager {
|
||||
constructor() {
|
||||
this.injectedStyles = null;
|
||||
}
|
||||
|
||||
removeInjectedStyles() {
|
||||
if (this.injectedStyles && this.injectedStyles.parentNode) {
|
||||
this.injectedStyles.parentNode.removeChild(this.injectedStyles);
|
||||
this.injectedStyles = null;
|
||||
}
|
||||
|
||||
// Reset inline styles
|
||||
document.querySelectorAll('.import-step').forEach(step => {
|
||||
step.style.cssText = '';
|
||||
});
|
||||
}
|
||||
|
||||
showStep(stepId) {
|
||||
// Remove any injected styles to prevent conflicts
|
||||
this.removeInjectedStyles();
|
||||
|
||||
// Hide all steps first
|
||||
document.querySelectorAll('.import-step').forEach(step => {
|
||||
step.style.display = 'none';
|
||||
});
|
||||
|
||||
// Show target step with a monitoring mechanism
|
||||
const targetStep = document.getElementById(stepId);
|
||||
if (targetStep) {
|
||||
// Use direct style setting
|
||||
targetStep.style.display = 'block';
|
||||
|
||||
// For the locationStep specifically, we need additional measures
|
||||
if (stepId === 'locationStep') {
|
||||
// Create a more persistent style to override any potential conflicts
|
||||
this.injectedStyles = document.createElement('style');
|
||||
this.injectedStyles.innerHTML = `
|
||||
#locationStep {
|
||||
display: block !important;
|
||||
opacity: 1 !important;
|
||||
visibility: visible !important;
|
||||
}
|
||||
`;
|
||||
document.head.appendChild(this.injectedStyles);
|
||||
|
||||
// Force layout recalculation
|
||||
targetStep.offsetHeight;
|
||||
}
|
||||
|
||||
// Scroll modal content to top
|
||||
const modalContent = document.querySelector('#importModal .modal-content');
|
||||
if (modalContent) {
|
||||
modalContent.scrollTop = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
436
static/js/managers/import/RecipeDataManager.js
Normal file
436
static/js/managers/import/RecipeDataManager.js
Normal file
@@ -0,0 +1,436 @@
|
||||
import { showToast } from '../../utils/uiHelpers.js';
|
||||
|
||||
export class RecipeDataManager {
|
||||
constructor(importManager) {
|
||||
this.importManager = importManager;
|
||||
}
|
||||
|
||||
showRecipeDetailsStep() {
|
||||
this.importManager.stepManager.showStep('detailsStep');
|
||||
|
||||
// Set default recipe name from prompt or image filename
|
||||
const recipeName = document.getElementById('recipeName');
|
||||
|
||||
// Check if we have recipe metadata from a shared recipe
|
||||
if (this.importManager.recipeData && this.importManager.recipeData.from_recipe_metadata) {
|
||||
// Use title from recipe metadata
|
||||
if (this.importManager.recipeData.title) {
|
||||
recipeName.value = this.importManager.recipeData.title;
|
||||
this.importManager.recipeName = this.importManager.recipeData.title;
|
||||
}
|
||||
|
||||
// Use tags from recipe metadata
|
||||
if (this.importManager.recipeData.tags && Array.isArray(this.importManager.recipeData.tags)) {
|
||||
this.importManager.recipeTags = [...this.importManager.recipeData.tags];
|
||||
this.updateTagsDisplay();
|
||||
}
|
||||
} else if (this.importManager.recipeData &&
|
||||
this.importManager.recipeData.gen_params &&
|
||||
this.importManager.recipeData.gen_params.prompt) {
|
||||
// Use the first 10 words from the prompt as the default recipe name
|
||||
const promptWords = this.importManager.recipeData.gen_params.prompt.split(' ');
|
||||
const truncatedPrompt = promptWords.slice(0, 10).join(' ');
|
||||
recipeName.value = truncatedPrompt;
|
||||
this.importManager.recipeName = truncatedPrompt;
|
||||
|
||||
// Set up click handler to select all text for easy editing
|
||||
if (!recipeName.hasSelectAllHandler) {
|
||||
recipeName.addEventListener('click', function() {
|
||||
this.select();
|
||||
});
|
||||
recipeName.hasSelectAllHandler = true;
|
||||
}
|
||||
} else if (this.importManager.recipeImage && !recipeName.value) {
|
||||
// Fallback to image filename if no prompt is available
|
||||
const fileName = this.importManager.recipeImage.name.split('.')[0];
|
||||
recipeName.value = fileName;
|
||||
this.importManager.recipeName = fileName;
|
||||
}
|
||||
|
||||
// Always set up click handler for easy editing if not already set
|
||||
if (!recipeName.hasSelectAllHandler) {
|
||||
recipeName.addEventListener('click', function() {
|
||||
this.select();
|
||||
});
|
||||
recipeName.hasSelectAllHandler = true;
|
||||
}
|
||||
|
||||
// Display the uploaded image in the preview
|
||||
const imagePreview = document.getElementById('recipeImagePreview');
|
||||
if (imagePreview) {
|
||||
if (this.importManager.recipeImage) {
|
||||
// For file upload mode
|
||||
const reader = new FileReader();
|
||||
reader.onload = (e) => {
|
||||
imagePreview.innerHTML = `<img src="${e.target.result}" alt="Recipe preview">`;
|
||||
};
|
||||
reader.readAsDataURL(this.importManager.recipeImage);
|
||||
} else if (this.importManager.recipeData && this.importManager.recipeData.image_base64) {
|
||||
// For URL mode - use the base64 image data returned from the backend
|
||||
imagePreview.innerHTML = `<img src="data:image/jpeg;base64,${this.importManager.recipeData.image_base64}" alt="Recipe preview">`;
|
||||
} else if (this.importManager.importMode === 'url') {
|
||||
// Fallback for URL mode if no base64 data
|
||||
const urlInput = document.getElementById('imageUrlInput');
|
||||
if (urlInput && urlInput.value) {
|
||||
imagePreview.innerHTML = `<img src="${urlInput.value}" alt="Recipe preview" crossorigin="anonymous">`;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Update LoRA count information
|
||||
const totalLoras = this.importManager.recipeData.loras.length;
|
||||
const existingLoras = this.importManager.recipeData.loras.filter(lora => lora.existsLocally).length;
|
||||
const loraCountInfo = document.getElementById('loraCountInfo');
|
||||
if (loraCountInfo) {
|
||||
loraCountInfo.textContent = `(${existingLoras}/${totalLoras} in library)`;
|
||||
}
|
||||
|
||||
// Display LoRAs list
|
||||
const lorasList = document.getElementById('lorasList');
|
||||
if (lorasList) {
|
||||
lorasList.innerHTML = this.importManager.recipeData.loras.map(lora => {
|
||||
const existsLocally = lora.existsLocally;
|
||||
const isDeleted = lora.isDeleted;
|
||||
const isEarlyAccess = lora.isEarlyAccess;
|
||||
const localPath = lora.localPath || '';
|
||||
|
||||
// Create status badge based on LoRA status
|
||||
let statusBadge;
|
||||
if (isDeleted) {
|
||||
statusBadge = `<div class="deleted-badge">
|
||||
<i class="fas fa-exclamation-circle"></i> Deleted from Civitai
|
||||
</div>`;
|
||||
} else {
|
||||
statusBadge = existsLocally ?
|
||||
`<div class="local-badge">
|
||||
<i class="fas fa-check"></i> In Library
|
||||
<div class="local-path">${localPath}</div>
|
||||
</div>` :
|
||||
`<div class="missing-badge">
|
||||
<i class="fas fa-exclamation-triangle"></i> Not in Library
|
||||
</div>`;
|
||||
}
|
||||
|
||||
// Early access badge (shown additionally with other badges)
|
||||
let earlyAccessBadge = '';
|
||||
if (isEarlyAccess) {
|
||||
// Format the early access end date if available
|
||||
let earlyAccessInfo = 'This LoRA requires early access payment to download.';
|
||||
if (lora.earlyAccessEndsAt) {
|
||||
try {
|
||||
const endDate = new Date(lora.earlyAccessEndsAt);
|
||||
const formattedDate = endDate.toLocaleDateString();
|
||||
earlyAccessInfo += ` Early access ends on ${formattedDate}.`;
|
||||
} catch (e) {
|
||||
console.warn('Failed to format early access date', e);
|
||||
}
|
||||
}
|
||||
|
||||
earlyAccessBadge = `<div class="early-access-badge">
|
||||
<i class="fas fa-clock"></i> Early Access
|
||||
<div class="early-access-info">${earlyAccessInfo} Verify that you have purchased early access before downloading.</div>
|
||||
</div>`;
|
||||
}
|
||||
|
||||
// Format size if available
|
||||
const sizeDisplay = lora.size ?
|
||||
`<div class="size-badge">${this.importManager.formatFileSize(lora.size)}</div>` : '';
|
||||
|
||||
return `
|
||||
<div class="lora-item ${existsLocally ? 'exists-locally' : isDeleted ? 'is-deleted' : 'missing-locally'} ${isEarlyAccess ? 'is-early-access' : ''}">
|
||||
<div class="lora-thumbnail">
|
||||
<img src="${lora.thumbnailUrl || '/loras_static/images/no-preview.png'}" alt="LoRA preview">
|
||||
</div>
|
||||
<div class="lora-content">
|
||||
<div class="lora-header">
|
||||
<h3>${lora.name}</h3>
|
||||
<div class="badge-container">
|
||||
${statusBadge}
|
||||
${earlyAccessBadge}
|
||||
</div>
|
||||
</div>
|
||||
${lora.version ? `<div class="lora-version">${lora.version}</div>` : ''}
|
||||
<div class="lora-info">
|
||||
${lora.baseModel ? `<div class="base-model">${lora.baseModel}</div>` : ''}
|
||||
${sizeDisplay}
|
||||
<div class="weight-badge">Weight: ${lora.weight || 1.0}</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
`;
|
||||
}).join('');
|
||||
}
|
||||
|
||||
// Check for early access loras and show warning if any exist
|
||||
const earlyAccessLoras = this.importManager.recipeData.loras.filter(lora =>
|
||||
lora.isEarlyAccess && !lora.existsLocally && !lora.isDeleted);
|
||||
if (earlyAccessLoras.length > 0) {
|
||||
// Show a warning about early access loras
|
||||
const warningMessage = `
|
||||
<div class="early-access-warning">
|
||||
<div class="warning-icon"><i class="fas fa-clock"></i></div>
|
||||
<div class="warning-content">
|
||||
<div class="warning-title">${earlyAccessLoras.length} LoRA(s) require Early Access</div>
|
||||
<div class="warning-text">
|
||||
These LoRAs require a payment to access. Download will fail if you haven't purchased access.
|
||||
You may need to log in to your Civitai account in browser settings.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
`;
|
||||
|
||||
// Show the warning message
|
||||
const buttonsContainer = document.querySelector('#detailsStep .modal-actions');
|
||||
if (buttonsContainer) {
|
||||
// Remove existing warning if any
|
||||
const existingWarning = document.getElementById('earlyAccessWarning');
|
||||
if (existingWarning) {
|
||||
existingWarning.remove();
|
||||
}
|
||||
|
||||
// Add new warning
|
||||
const warningContainer = document.createElement('div');
|
||||
warningContainer.id = 'earlyAccessWarning';
|
||||
warningContainer.innerHTML = warningMessage;
|
||||
buttonsContainer.parentNode.insertBefore(warningContainer, buttonsContainer);
|
||||
}
|
||||
}
|
||||
|
||||
// Check for duplicate recipes and display warning if found
|
||||
this.checkAndDisplayDuplicates();
|
||||
|
||||
// Update Next button state based on missing LoRAs and duplicates
|
||||
this.updateNextButtonState();
|
||||
}
|
||||
|
||||
checkAndDisplayDuplicates() {
|
||||
// Check if we have duplicate recipes
|
||||
if (this.importManager.recipeData &&
|
||||
this.importManager.recipeData.matching_recipes &&
|
||||
this.importManager.recipeData.matching_recipes.length > 0) {
|
||||
|
||||
// Store duplicates in the importManager for later use
|
||||
this.importManager.duplicateRecipes = this.importManager.recipeData.matching_recipes;
|
||||
|
||||
// Create duplicate warning container
|
||||
const duplicateContainer = document.getElementById('duplicateRecipesContainer') ||
|
||||
this.createDuplicateContainer();
|
||||
|
||||
// Format date helper function
|
||||
const formatDate = (timestamp) => {
|
||||
try {
|
||||
const date = new Date(timestamp * 1000);
|
||||
return date.toLocaleDateString() + ' ' + date.toLocaleTimeString();
|
||||
} catch (e) {
|
||||
return 'Unknown date';
|
||||
}
|
||||
};
|
||||
|
||||
// Generate the HTML for duplicate recipes warning
|
||||
duplicateContainer.innerHTML = `
|
||||
<div class="duplicate-warning">
|
||||
<div class="warning-icon"><i class="fas fa-clone"></i></div>
|
||||
<div class="warning-content">
|
||||
<div class="warning-title">
|
||||
${this.importManager.duplicateRecipes.length} identical ${this.importManager.duplicateRecipes.length === 1 ? 'recipe' : 'recipes'} found in your library
|
||||
</div>
|
||||
<div class="warning-text">
|
||||
These recipes contain the same LoRAs with identical weights.
|
||||
<button id="toggleDuplicatesList" class="toggle-duplicates-btn">
|
||||
Show duplicates <i class="fas fa-chevron-down"></i>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="duplicate-recipes-list collapsed">
|
||||
${this.importManager.duplicateRecipes.map((recipe) => `
|
||||
<div class="duplicate-recipe-card">
|
||||
<div class="duplicate-recipe-preview">
|
||||
<img src="${recipe.file_url}" alt="Recipe preview">
|
||||
<div class="duplicate-recipe-title">${recipe.title}</div>
|
||||
</div>
|
||||
<div class="duplicate-recipe-details">
|
||||
<div class="duplicate-recipe-date">
|
||||
<i class="fas fa-calendar-alt"></i> ${formatDate(recipe.modified)}
|
||||
</div>
|
||||
<div class="duplicate-recipe-lora-count">
|
||||
<i class="fas fa-layer-group"></i> ${recipe.lora_count} LoRAs
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
`).join('')}
|
||||
</div>
|
||||
`;
|
||||
|
||||
// Show the duplicate container
|
||||
duplicateContainer.style.display = 'block';
|
||||
|
||||
// Add click event for the toggle button
|
||||
const toggleButton = document.getElementById('toggleDuplicatesList');
|
||||
if (toggleButton) {
|
||||
toggleButton.addEventListener('click', () => {
|
||||
const list = duplicateContainer.querySelector('.duplicate-recipes-list');
|
||||
if (list) {
|
||||
list.classList.toggle('collapsed');
|
||||
const icon = toggleButton.querySelector('i');
|
||||
if (icon) {
|
||||
if (list.classList.contains('collapsed')) {
|
||||
toggleButton.innerHTML = `Show duplicates <i class="fas fa-chevron-down"></i>`;
|
||||
} else {
|
||||
toggleButton.innerHTML = `Hide duplicates <i class="fas fa-chevron-up"></i>`;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// No duplicates, hide the container if it exists
|
||||
const duplicateContainer = document.getElementById('duplicateRecipesContainer');
|
||||
if (duplicateContainer) {
|
||||
duplicateContainer.style.display = 'none';
|
||||
}
|
||||
|
||||
// Reset duplicate tracking
|
||||
this.importManager.duplicateRecipes = [];
|
||||
}
|
||||
}
|
||||
|
||||
createDuplicateContainer() {
|
||||
// Find where to insert the duplicate container
|
||||
const lorasListContainer = document.querySelector('.input-group:has(#lorasList)');
|
||||
|
||||
if (!lorasListContainer) return null;
|
||||
|
||||
// Create container
|
||||
const duplicateContainer = document.createElement('div');
|
||||
duplicateContainer.id = 'duplicateRecipesContainer';
|
||||
duplicateContainer.className = 'duplicate-recipes-container';
|
||||
|
||||
// Insert before the LoRA list
|
||||
lorasListContainer.parentNode.insertBefore(duplicateContainer, lorasListContainer);
|
||||
|
||||
return duplicateContainer;
|
||||
}
|
||||
|
||||
updateNextButtonState() {
|
||||
const nextButton = document.querySelector('#detailsStep .primary-btn');
|
||||
const actionsContainer = document.querySelector('#detailsStep .modal-actions');
|
||||
if (!nextButton || !actionsContainer) return;
|
||||
|
||||
// Always clean up previous warnings and buttons first
|
||||
const existingWarning = document.getElementById('deletedLorasWarning');
|
||||
if (existingWarning) {
|
||||
existingWarning.remove();
|
||||
}
|
||||
|
||||
// Remove any existing "import anyway" button
|
||||
const importAnywayBtn = document.getElementById('importAnywayBtn');
|
||||
if (importAnywayBtn) {
|
||||
importAnywayBtn.remove();
|
||||
}
|
||||
|
||||
// Count deleted LoRAs
|
||||
const deletedLoras = this.importManager.recipeData.loras.filter(lora => lora.isDeleted).length;
|
||||
|
||||
// If we have deleted LoRAs, show a warning
|
||||
if (deletedLoras > 0) {
|
||||
// Create a new warning container above the buttons
|
||||
const buttonsContainer = document.querySelector('#detailsStep .modal-actions') || nextButton.parentNode;
|
||||
const warningContainer = document.createElement('div');
|
||||
warningContainer.id = 'deletedLorasWarning';
|
||||
warningContainer.className = 'deleted-loras-warning';
|
||||
|
||||
// Create warning message
|
||||
warningContainer.innerHTML = `
|
||||
<div class="warning-icon"><i class="fas fa-exclamation-triangle"></i></div>
|
||||
<div class="warning-content">
|
||||
<div class="warning-title">${deletedLoras} LoRA(s) have been deleted from Civitai</div>
|
||||
<div class="warning-text">These LoRAs cannot be downloaded. If you continue, they will remain in the recipe but won't be included when used.</div>
|
||||
</div>
|
||||
`;
|
||||
|
||||
// Insert before the buttons container
|
||||
buttonsContainer.parentNode.insertBefore(warningContainer, buttonsContainer);
|
||||
}
|
||||
|
||||
// Check for duplicates but don't change button actions
|
||||
const missingNotDeleted = this.importManager.recipeData.loras.filter(
|
||||
lora => !lora.existsLocally && !lora.isDeleted
|
||||
).length;
|
||||
|
||||
// Standard button behavior regardless of duplicates
|
||||
nextButton.classList.remove('warning-btn');
|
||||
|
||||
if (missingNotDeleted > 0) {
|
||||
nextButton.textContent = 'Download Missing LoRAs';
|
||||
} else {
|
||||
nextButton.textContent = 'Save Recipe';
|
||||
}
|
||||
}
|
||||
|
||||
addTag() {
|
||||
const tagInput = document.getElementById('tagInput');
|
||||
const tag = tagInput.value.trim();
|
||||
|
||||
if (!tag) return;
|
||||
|
||||
if (!this.importManager.recipeTags.includes(tag)) {
|
||||
this.importManager.recipeTags.push(tag);
|
||||
this.updateTagsDisplay();
|
||||
}
|
||||
|
||||
tagInput.value = '';
|
||||
}
|
||||
|
||||
removeTag(tag) {
|
||||
this.importManager.recipeTags = this.importManager.recipeTags.filter(t => t !== tag);
|
||||
this.updateTagsDisplay();
|
||||
}
|
||||
|
||||
updateTagsDisplay() {
|
||||
const tagsContainer = document.getElementById('tagsContainer');
|
||||
|
||||
if (this.importManager.recipeTags.length === 0) {
|
||||
tagsContainer.innerHTML = '<div class="empty-tags">No tags added</div>';
|
||||
return;
|
||||
}
|
||||
|
||||
tagsContainer.innerHTML = this.importManager.recipeTags.map(tag => `
|
||||
<div class="recipe-tag">
|
||||
${tag}
|
||||
<i class="fas fa-times" onclick="importManager.removeTag('${tag}')"></i>
|
||||
</div>
|
||||
`).join('');
|
||||
}
|
||||
|
||||
proceedFromDetails() {
|
||||
// Validate recipe name
|
||||
if (!this.importManager.recipeName) {
|
||||
showToast('Please enter a recipe name', 'error');
|
||||
return;
|
||||
}
|
||||
|
||||
// Automatically mark all deleted LoRAs as excluded
|
||||
if (this.importManager.recipeData && this.importManager.recipeData.loras) {
|
||||
this.importManager.recipeData.loras.forEach(lora => {
|
||||
if (lora.isDeleted) {
|
||||
lora.exclude = true;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Update missing LoRAs list to exclude deleted LoRAs
|
||||
this.importManager.missingLoras = this.importManager.recipeData.loras.filter(lora =>
|
||||
!lora.existsLocally && !lora.isDeleted);
|
||||
|
||||
// If we have downloadable missing LoRAs, go to location step
|
||||
if (this.importManager.missingLoras.length > 0) {
|
||||
// Store only downloadable LoRAs for the download step
|
||||
this.importManager.downloadableLoRAs = this.importManager.missingLoras;
|
||||
this.importManager.proceedToLocation();
|
||||
} else {
|
||||
// Otherwise, save the recipe directly
|
||||
this.importManager.saveRecipe();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -6,6 +6,8 @@ import { RecipeModal } from './components/RecipeModal.js';
|
||||
import { getCurrentPageState } from './state/index.js';
|
||||
import { getSessionItem, removeSessionItem } from './utils/storageHelpers.js';
|
||||
import { RecipeContextMenu } from './components/ContextMenu/index.js';
|
||||
import { DuplicatesManager } from './components/DuplicatesManager.js';
|
||||
import { initializeInfiniteScroll } from './utils/infiniteScroll.js';
|
||||
|
||||
class RecipeManager {
|
||||
constructor() {
|
||||
@@ -18,6 +20,9 @@ class RecipeManager {
|
||||
// Initialize RecipeModal
|
||||
this.recipeModal = new RecipeModal();
|
||||
|
||||
// Initialize DuplicatesManager
|
||||
this.duplicatesManager = new DuplicatesManager(this);
|
||||
|
||||
// Add state tracking for infinite scroll
|
||||
this.pageState.isLoading = false;
|
||||
this.pageState.hasMore = true;
|
||||
@@ -179,6 +184,12 @@ class RecipeManager {
|
||||
|
||||
async loadRecipes(resetPage = true) {
|
||||
try {
|
||||
// Skip loading if in duplicates mode
|
||||
const pageState = getCurrentPageState();
|
||||
if (pageState.duplicatesMode) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Show loading indicator
|
||||
document.body.classList.add('loading');
|
||||
this.pageState.isLoading = true;
|
||||
@@ -366,6 +377,28 @@ class RecipeManager {
|
||||
showRecipeDetails(recipe) {
|
||||
this.recipeModal.showRecipeDetails(recipe);
|
||||
}
|
||||
|
||||
// Duplicate detection and management methods
|
||||
async findDuplicateRecipes() {
|
||||
return await this.duplicatesManager.findDuplicates();
|
||||
}
|
||||
|
||||
selectLatestDuplicates() {
|
||||
this.duplicatesManager.selectLatestDuplicates();
|
||||
}
|
||||
|
||||
deleteSelectedDuplicates() {
|
||||
this.duplicatesManager.deleteSelectedDuplicates();
|
||||
}
|
||||
|
||||
confirmDeleteDuplicates() {
|
||||
this.duplicatesManager.confirmDeleteDuplicates();
|
||||
}
|
||||
|
||||
exitDuplicateMode() {
|
||||
this.duplicatesManager.exitDuplicateMode();
|
||||
initializeInfiniteScroll();
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize components
|
||||
|
||||
@@ -65,6 +65,7 @@ export const state = {
|
||||
},
|
||||
pageSize: 20,
|
||||
showFavoritesOnly: false,
|
||||
duplicatesMode: false, // Add flag for duplicates mode
|
||||
},
|
||||
|
||||
checkpoints: {
|
||||
|
||||
@@ -125,4 +125,65 @@ export function updateLoraCard(filePath, updates, newFilePath) {
|
||||
});
|
||||
|
||||
return loraCard; // Return the updated card element for chaining
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the recipe card after metadata edits in the modal
|
||||
* @param {string} recipeId - ID of the recipe to update
|
||||
* @param {Object} updates - Object containing the updates (title, tags, source_path)
|
||||
*/
|
||||
export function updateRecipeCard(recipeId, updates) {
|
||||
// Find the card with matching recipe ID
|
||||
const recipeCard = document.querySelector(`.lora-card[data-id="${recipeId}"]`);
|
||||
if (!recipeCard) return;
|
||||
|
||||
// Get the recipe card component instance
|
||||
const recipeCardInstance = recipeCard._recipeCardInstance;
|
||||
|
||||
// Update card dataset and visual elements based on the updates object
|
||||
Object.entries(updates).forEach(([key, value]) => {
|
||||
// Update dataset
|
||||
recipeCard.dataset[key] = value;
|
||||
|
||||
// Update visual elements based on the property
|
||||
switch(key) {
|
||||
case 'title':
|
||||
// Update the title in the recipe object
|
||||
if (recipeCardInstance && recipeCardInstance.recipe) {
|
||||
recipeCardInstance.recipe.title = value;
|
||||
}
|
||||
|
||||
// Update the title shown in the card
|
||||
const modelNameElement = recipeCard.querySelector('.model-name');
|
||||
if (modelNameElement) modelNameElement.textContent = value;
|
||||
break;
|
||||
|
||||
case 'tags':
|
||||
// Update tags in the recipe object (not displayed on card UI)
|
||||
if (recipeCardInstance && recipeCardInstance.recipe) {
|
||||
recipeCardInstance.recipe.tags = value;
|
||||
}
|
||||
|
||||
// Store in dataset as JSON string
|
||||
try {
|
||||
if (typeof value === 'string') {
|
||||
recipeCard.dataset.tags = value;
|
||||
} else {
|
||||
recipeCard.dataset.tags = JSON.stringify(value);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('Failed to update recipe tags:', e);
|
||||
}
|
||||
break;
|
||||
|
||||
case 'source_path':
|
||||
// Update source_path in the recipe object (not displayed on card UI)
|
||||
if (recipeCardInstance && recipeCardInstance.recipe) {
|
||||
recipeCardInstance.recipe.source_path = value;
|
||||
}
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
||||
return recipeCard; // Return the updated card element for chaining
|
||||
}
|
||||
12
static/js/utils/formatters.js
Normal file
12
static/js/utils/formatters.js
Normal file
@@ -0,0 +1,12 @@
|
||||
/**
|
||||
* Format a file size in bytes to a human-readable string
|
||||
* @param {number} bytes - The size in bytes
|
||||
* @returns {string} Formatted size string (e.g., "1.5 MB")
|
||||
*/
|
||||
export function formatFileSize(bytes) {
|
||||
if (!bytes || isNaN(bytes)) return '';
|
||||
|
||||
const sizes = ['B', 'KB', 'MB', 'GB'];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(1024));
|
||||
return (bytes / Math.pow(1024, i)).toFixed(2) + ' ' + sizes[i];
|
||||
}
|
||||
@@ -14,6 +14,11 @@ export function initializeInfiniteScroll(pageType = 'loras') {
|
||||
|
||||
// Get the current page state
|
||||
const pageState = getCurrentPageState();
|
||||
|
||||
// Skip initializing if in duplicates mode (for recipes page)
|
||||
if (pageType === 'recipes' && pageState.duplicatesMode) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Determine the load more function and grid ID based on page type
|
||||
let loadMoreFunction;
|
||||
|
||||
@@ -6,12 +6,27 @@
|
||||
<!-- Step 1: Upload Image or Input URL -->
|
||||
<div class="import-step" id="uploadStep">
|
||||
<div class="import-mode-toggle">
|
||||
<button class="toggle-btn active" data-mode="upload" onclick="importManager.toggleImportMode('upload')">
|
||||
<button class="toggle-btn active" data-mode="url" onclick="importManager.toggleImportMode('url')">
|
||||
<i class="fas fa-link"></i> URL / Local Path
|
||||
</button>
|
||||
<button class="toggle-btn" data-mode="upload" onclick="importManager.toggleImportMode('upload')">
|
||||
<i class="fas fa-upload"></i> Upload Image
|
||||
</button>
|
||||
<button class="toggle-btn" data-mode="url" onclick="importManager.toggleImportMode('url')">
|
||||
<i class="fas fa-link"></i> Input URL
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<!-- Input URL/Path Section -->
|
||||
<div class="import-section" id="urlSection">
|
||||
<p>Input a Civitai image URL or local file path to import as a recipe.</p>
|
||||
<div class="input-group">
|
||||
<label for="imageUrlInput">Image URL or File Path:</label>
|
||||
<div class="input-with-button">
|
||||
<input type="text" id="imageUrlInput" placeholder="https://civitai.com/images/... or C:/path/to/image.png">
|
||||
<button class="primary-btn" onclick="importManager.handleUrlInput()">
|
||||
<i class="fas fa-download"></i> Fetch Image
|
||||
</button>
|
||||
</div>
|
||||
<div class="error-message" id="urlError"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Upload Image Section -->
|
||||
@@ -29,21 +44,6 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Input URL Section -->
|
||||
<div class="import-section" id="urlSection" style="display: none;">
|
||||
<p>Input a Civitai image URL to import as a recipe.</p>
|
||||
<div class="input-group">
|
||||
<label for="imageUrlInput">Image URL:</label>
|
||||
<div class="input-with-button">
|
||||
<input type="text" id="imageUrlInput" placeholder="https://civitai.com/images/...">
|
||||
<button class="primary-btn" onclick="importManager.handleUrlInput()">
|
||||
<i class="fas fa-download"></i> Fetch Image
|
||||
</button>
|
||||
</div>
|
||||
<div class="error-message" id="urlError"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="modal-actions">
|
||||
<button class="secondary-btn" onclick="modalManager.closeModal('importModal')">Cancel</button>
|
||||
</div>
|
||||
@@ -84,6 +84,11 @@
|
||||
<!-- LoRAs will be populated here -->
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Container for duplicate recipes warning -->
|
||||
<div id="duplicateRecipesContainer" class="duplicate-recipes-container" style="display: none;">
|
||||
<!-- Duplicate recipes will be populated here -->
|
||||
</div>
|
||||
|
||||
<div class="modal-actions">
|
||||
<button class="secondary-btn" onclick="importManager.backToUpload()">Back</button>
|
||||
|
||||
@@ -24,6 +24,21 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Duplicate Delete Confirmation Modal -->
|
||||
<div id="duplicateDeleteModal" class="modal delete-modal">
|
||||
<div class="modal-content delete-modal-content">
|
||||
<h2>Delete Duplicate Recipes</h2>
|
||||
<p class="delete-message">Are you sure you want to delete the selected duplicate recipes?</p>
|
||||
<div class="delete-model-info">
|
||||
<p><span id="duplicateDeleteCount">0</span> recipes will be permanently deleted.</p>
|
||||
</div>
|
||||
<div class="modal-actions">
|
||||
<button class="cancel-btn" onclick="modalManager.closeModal('duplicateDeleteModal')">Cancel</button>
|
||||
<button class="delete-btn" onclick="recipeManager.confirmDeleteDuplicates()">Delete</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Settings Modal -->
|
||||
<div id="settingsModal" class="modal">
|
||||
<div class="modal-content settings-modal">
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
<div class="recipe-top-section">
|
||||
<div class="recipe-preview-container" id="recipePreviewContainer">
|
||||
<img id="recipeModalImage" src="" alt="Recipe Preview" class="recipe-preview-media">
|
||||
<!-- Source URL elements are now added dynamically in RecipeModal.js -->
|
||||
</div>
|
||||
|
||||
<div class="info-section recipe-gen-params">
|
||||
|
||||
@@ -42,6 +42,10 @@
|
||||
<div title="Import recipes" class="control-group">
|
||||
<button onclick="importManager.showImportModal()"><i class="fas fa-file-import"></i> Import</button>
|
||||
</div>
|
||||
<!-- Add duplicate detection button -->
|
||||
<div title="Find duplicate recipes" class="control-group">
|
||||
<button onclick="recipeManager.findDuplicateRecipes()"><i class="fas fa-clone"></i> Find Duplicates</button>
|
||||
</div>
|
||||
<!-- Custom filter indicator button (hidden by default) -->
|
||||
<div id="customFilterIndicator" class="control-group hidden">
|
||||
<div class="filter-active">
|
||||
@@ -51,6 +55,25 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Duplicates banner (hidden by default) -->
|
||||
<div id="duplicatesBanner" class="duplicates-banner" style="display: none;">
|
||||
<div class="banner-content">
|
||||
<i class="fas fa-exclamation-triangle"></i>
|
||||
<span id="duplicatesCount">Found 0 duplicate groups</span>
|
||||
<div class="banner-actions">
|
||||
<button class="btn-select-latest" onclick="recipeManager.selectLatestDuplicates()">
|
||||
Keep Latest Versions
|
||||
</button>
|
||||
<button class="btn-delete-selected disabled" onclick="recipeManager.deleteSelectedDuplicates()">
|
||||
Delete Selected (<span id="selectedCount">0</span>)
|
||||
</button>
|
||||
<button class="btn-exit" onclick="recipeManager.exitDuplicateMode()">
|
||||
<i class="fas fa-times"></i>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Recipe grid -->
|
||||
<div class="card-grid" id="recipeGrid">
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { app } from "../../scripts/app.js";
|
||||
import { dynamicImportByVersion } from "./utils.js";
|
||||
|
||||
// Extract pattern into a constant for consistent use
|
||||
const LORA_PATTERN = /<lora:([^:]+):([-\d\.]+)>/g;
|
||||
// Update pattern to match both formats: <lora:name:model_strength> or <lora:name:model_strength:clip_strength>
|
||||
const LORA_PATTERN = /<lora:([^:]+):([-\d\.]+)(?::([-\d\.]+))?>/g;
|
||||
|
||||
// Function to get the appropriate loras widget based on ComfyUI version
|
||||
async function getLorasWidgetModule() {
|
||||
@@ -61,10 +61,15 @@ function mergeLoras(lorasText, lorasArr) {
|
||||
const result = [];
|
||||
let match;
|
||||
|
||||
// Reset pattern index before using
|
||||
LORA_PATTERN.lastIndex = 0;
|
||||
|
||||
// Parse text input and create initial entries
|
||||
while ((match = LORA_PATTERN.exec(lorasText)) !== null) {
|
||||
const name = match[1];
|
||||
const inputStrength = Number(match[2]);
|
||||
const modelStrength = Number(match[2]);
|
||||
// Extract clip strength if provided, otherwise use model strength
|
||||
const clipStrength = match[3] ? Number(match[3]) : modelStrength;
|
||||
|
||||
// Find if this lora exists in the array data
|
||||
const existingLora = lorasArr.find(l => l.name === name);
|
||||
@@ -72,8 +77,9 @@ function mergeLoras(lorasText, lorasArr) {
|
||||
result.push({
|
||||
name: name,
|
||||
// Use existing strength if available, otherwise use input strength
|
||||
strength: existingLora ? existingLora.strength : inputStrength,
|
||||
active: existingLora ? existingLora.active : true
|
||||
strength: existingLora ? existingLora.strength : modelStrength,
|
||||
active: existingLora ? existingLora.active : true,
|
||||
clipStrength: existingLora ? existingLora.clipStrength : clipStrength,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -102,18 +108,7 @@ app.registerExtension({
|
||||
let existingLoras = [];
|
||||
if (node.widgets_values && node.widgets_values.length > 0) {
|
||||
const savedValue = node.widgets_values[1];
|
||||
// TODO: clean up this code
|
||||
try {
|
||||
// Check if the value is already an array/object
|
||||
if (typeof savedValue === 'object' && savedValue !== null) {
|
||||
existingLoras = savedValue;
|
||||
} else if (typeof savedValue === 'string') {
|
||||
existingLoras = JSON.parse(savedValue);
|
||||
}
|
||||
} catch (e) {
|
||||
console.warn("Failed to parse loras data:", e);
|
||||
existingLoras = [];
|
||||
}
|
||||
existingLoras = savedValue || [];
|
||||
}
|
||||
// Merge the loras data
|
||||
const mergedLoras = mergeLoras(node.widgets[0].value, existingLoras);
|
||||
@@ -139,7 +134,7 @@ app.registerExtension({
|
||||
const currentLoras = value.map(l => l.name);
|
||||
|
||||
// Use the constant pattern here as well
|
||||
let newText = inputWidget.value.replace(LORA_PATTERN, (match, name, strength) => {
|
||||
let newText = inputWidget.value.replace(LORA_PATTERN, (match, name, strength, clipStrength) => {
|
||||
return currentLoras.includes(name) ? match : '';
|
||||
});
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { app } from "../../scripts/app.js";
|
||||
import { dynamicImportByVersion } from "./utils.js";
|
||||
|
||||
// Extract pattern into a constant for consistent use
|
||||
const LORA_PATTERN = /<lora:([^:]+):([-\d\.]+)>/g;
|
||||
// Update pattern to match both formats: <lora:name:model_strength> or <lora:name:model_strength:clip_strength>
|
||||
const LORA_PATTERN = /<lora:([^:]+):([-\d\.]+)(?::([-\d\.]+))?>/g;
|
||||
|
||||
// Function to get the appropriate loras widget based on ComfyUI version
|
||||
async function getLorasWidgetModule() {
|
||||
@@ -57,10 +57,15 @@ function mergeLoras(lorasText, lorasArr) {
|
||||
const result = [];
|
||||
let match;
|
||||
|
||||
// Reset pattern index before using
|
||||
LORA_PATTERN.lastIndex = 0;
|
||||
|
||||
// Parse text input and create initial entries
|
||||
while ((match = LORA_PATTERN.exec(lorasText)) !== null) {
|
||||
const name = match[1];
|
||||
const inputStrength = Number(match[2]);
|
||||
const modelStrength = Number(match[2]);
|
||||
// Extract clip strength if provided, otherwise use model strength
|
||||
const clipStrength = match[3] ? Number(match[3]) : modelStrength;
|
||||
|
||||
// Find if this lora exists in the array data
|
||||
const existingLora = lorasArr.find(l => l.name === name);
|
||||
@@ -68,8 +73,9 @@ function mergeLoras(lorasText, lorasArr) {
|
||||
result.push({
|
||||
name: name,
|
||||
// Use existing strength if available, otherwise use input strength
|
||||
strength: existingLora ? existingLora.strength : inputStrength,
|
||||
active: existingLora ? existingLora.active : true
|
||||
strength: existingLora ? existingLora.strength : modelStrength,
|
||||
active: existingLora ? existingLora.active : true,
|
||||
clipStrength: existingLora ? existingLora.clipStrength : clipStrength,
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -29,10 +29,13 @@ export function addLorasWidget(node, name, opts, callback) {
|
||||
|
||||
// Fixed sizes for component calculations
|
||||
const LORA_ENTRY_HEIGHT = 40; // Height of a single lora entry
|
||||
const CLIP_ENTRY_HEIGHT = 40; // Height of a clip entry
|
||||
const HEADER_HEIGHT = 40; // Height of the header section
|
||||
const CONTAINER_PADDING = 12; // Top and bottom padding
|
||||
const EMPTY_CONTAINER_HEIGHT = 100; // Height when no loras are present
|
||||
|
||||
// Remove expandedClipEntries Set since we'll determine expansion based on strength values
|
||||
|
||||
// Parse LoRA entries from value
|
||||
const parseLoraValue = (value) => {
|
||||
if (!value) return [];
|
||||
@@ -367,7 +370,7 @@ export function addLorasWidget(node, name, opts, callback) {
|
||||
};
|
||||
|
||||
// Function to handle strength adjustment via dragging
|
||||
const handleStrengthDrag = (name, initialStrength, initialX, event, widget) => {
|
||||
const handleStrengthDrag = (name, initialStrength, initialX, event, widget, isClipStrength = false) => {
|
||||
// Calculate drag sensitivity (how much the strength changes per pixel)
|
||||
// Using 0.01 per 10 pixels of movement
|
||||
const sensitivity = 0.001;
|
||||
@@ -391,7 +394,12 @@ export function addLorasWidget(node, name, opts, callback) {
|
||||
const loraIndex = lorasData.findIndex(l => l.name === name);
|
||||
|
||||
if (loraIndex >= 0) {
|
||||
lorasData[loraIndex].strength = newStrength;
|
||||
// Update the appropriate strength property based on isClipStrength flag
|
||||
if (isClipStrength) {
|
||||
lorasData[loraIndex].clipStrength = newStrength;
|
||||
} else {
|
||||
lorasData[loraIndex].strength = newStrength;
|
||||
}
|
||||
|
||||
// Update the widget value
|
||||
widget.value = formatLoraValue(lorasData);
|
||||
@@ -402,7 +410,7 @@ export function addLorasWidget(node, name, opts, callback) {
|
||||
};
|
||||
|
||||
// Function to initialize drag operation
|
||||
const initDrag = (loraEl, nameEl, name, widget) => {
|
||||
const initDrag = (dragEl, name, widget, isClipStrength = false) => {
|
||||
let isDragging = false;
|
||||
let initialX = 0;
|
||||
let initialStrength = 0;
|
||||
@@ -420,9 +428,8 @@ export function addLorasWidget(node, name, opts, callback) {
|
||||
document.head.appendChild(styleEl);
|
||||
}
|
||||
|
||||
// Create a drag handler that's applied to the entire lora entry
|
||||
// except toggle and strength controls
|
||||
loraEl.addEventListener('mousedown', (e) => {
|
||||
// Create a drag handler
|
||||
dragEl.addEventListener('mousedown', (e) => {
|
||||
// Skip if clicking on toggle or strength control areas
|
||||
if (e.target.closest('.comfy-lora-toggle') ||
|
||||
e.target.closest('input') ||
|
||||
@@ -437,7 +444,7 @@ export function addLorasWidget(node, name, opts, callback) {
|
||||
if (!loraData) return;
|
||||
|
||||
initialX = e.clientX;
|
||||
initialStrength = loraData.strength;
|
||||
initialStrength = isClipStrength ? loraData.clipStrength : loraData.strength;
|
||||
isDragging = true;
|
||||
|
||||
// Add class to body to enforce cursor style globally
|
||||
@@ -453,7 +460,7 @@ export function addLorasWidget(node, name, opts, callback) {
|
||||
if (!isDragging) return;
|
||||
|
||||
// Call the strength adjustment function
|
||||
handleStrengthDrag(name, initialStrength, initialX, e, widget);
|
||||
handleStrengthDrag(name, initialStrength, initialX, e, widget, isClipStrength);
|
||||
|
||||
// Prevent showing the preview tooltip during drag
|
||||
previewTooltip.hide();
|
||||
@@ -691,10 +698,17 @@ export function addLorasWidget(node, name, opts, callback) {
|
||||
header.appendChild(strengthLabel);
|
||||
container.appendChild(header);
|
||||
|
||||
// Track the total visible entries for height calculation
|
||||
let totalVisibleEntries = lorasData.length;
|
||||
|
||||
// Render each lora entry
|
||||
lorasData.forEach((loraData) => {
|
||||
const { name, strength, active } = loraData;
|
||||
const { name, strength, clipStrength, active } = loraData;
|
||||
|
||||
// Determine expansion state using our helper function
|
||||
const isExpanded = shouldShowClipEntry(loraData);
|
||||
|
||||
// Create the main LoRA entry
|
||||
const loraEl = document.createElement("div");
|
||||
loraEl.className = "comfy-lora-entry";
|
||||
Object.assign(loraEl.style, {
|
||||
@@ -708,6 +722,41 @@ export function addLorasWidget(node, name, opts, callback) {
|
||||
marginBottom: "4px",
|
||||
});
|
||||
|
||||
// Add double-click handler to toggle clip entry
|
||||
loraEl.addEventListener('dblclick', (e) => {
|
||||
// Skip if clicking on toggle or strength control areas
|
||||
if (e.target.closest('.comfy-lora-toggle') ||
|
||||
e.target.closest('input') ||
|
||||
e.target.closest('.comfy-lora-arrow')) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Prevent default behavior
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
|
||||
// Toggle the clip entry expanded state
|
||||
const lorasData = parseLoraValue(widget.value);
|
||||
const loraIndex = lorasData.findIndex(l => l.name === name);
|
||||
|
||||
if (loraIndex >= 0) {
|
||||
// Explicitly toggle the expansion state
|
||||
const currentExpanded = shouldShowClipEntry(lorasData[loraIndex]);
|
||||
lorasData[loraIndex].expanded = !currentExpanded;
|
||||
|
||||
// If collapsing, set clipStrength = strength
|
||||
if (!lorasData[loraIndex].expanded) {
|
||||
lorasData[loraIndex].clipStrength = lorasData[loraIndex].strength;
|
||||
}
|
||||
|
||||
// Update the widget value
|
||||
widget.value = formatLoraValue(lorasData);
|
||||
|
||||
// Re-render to show/hide clip entry
|
||||
renderLoras(widget.value, widget);
|
||||
}
|
||||
});
|
||||
|
||||
// Create toggle for this lora
|
||||
const toggle = createToggle(active, (newActive) => {
|
||||
// Update this lora's active state
|
||||
@@ -740,6 +789,14 @@ export function addLorasWidget(node, name, opts, callback) {
|
||||
msUserSelect: "none",
|
||||
});
|
||||
|
||||
// Add expand indicator to name element
|
||||
const expandIndicator = document.createElement("span");
|
||||
expandIndicator.textContent = isExpanded ? " ▼" : " ▶";
|
||||
expandIndicator.style.opacity = "0.7";
|
||||
expandIndicator.style.fontSize = "9px";
|
||||
expandIndicator.style.marginLeft = "4px";
|
||||
nameEl.appendChild(expandIndicator);
|
||||
|
||||
// Move preview tooltip events to nameEl instead of loraEl
|
||||
nameEl.addEventListener('mouseenter', async (e) => {
|
||||
e.stopPropagation();
|
||||
@@ -753,7 +810,7 @@ export function addLorasWidget(node, name, opts, callback) {
|
||||
});
|
||||
|
||||
// Initialize drag functionality for strength adjustment
|
||||
initDrag(loraEl, nameEl, name, widget);
|
||||
initDrag(loraEl, name, widget, false);
|
||||
|
||||
// Remove the preview tooltip events from loraEl
|
||||
loraEl.onmouseenter = () => {
|
||||
@@ -897,10 +954,185 @@ export function addLorasWidget(node, name, opts, callback) {
|
||||
loraEl.appendChild(strengthControl);
|
||||
|
||||
container.appendChild(loraEl);
|
||||
|
||||
// If expanded, show the clip entry
|
||||
if (isExpanded) {
|
||||
totalVisibleEntries++;
|
||||
const clipEl = document.createElement("div");
|
||||
clipEl.className = "comfy-lora-clip-entry";
|
||||
Object.assign(clipEl.style, {
|
||||
display: "flex",
|
||||
justifyContent: "space-between",
|
||||
alignItems: "center",
|
||||
padding: "6px",
|
||||
paddingLeft: "20px", // Indent to align with parent name
|
||||
borderRadius: "6px",
|
||||
backgroundColor: active ? "rgba(65, 70, 90, 0.6)" : "rgba(50, 55, 65, 0.5)",
|
||||
borderLeft: "2px solid rgba(72, 118, 255, 0.6)",
|
||||
transition: "all 0.2s ease",
|
||||
marginBottom: "4px",
|
||||
marginLeft: "10px",
|
||||
marginTop: "-2px"
|
||||
});
|
||||
|
||||
// Create clip name display
|
||||
const clipNameEl = document.createElement("div");
|
||||
clipNameEl.textContent = "[clip] " + name;
|
||||
Object.assign(clipNameEl.style, {
|
||||
flex: "1",
|
||||
overflow: "hidden",
|
||||
textOverflow: "ellipsis",
|
||||
whiteSpace: "nowrap",
|
||||
color: active ? "rgba(200, 215, 240, 0.9)" : "rgba(200, 215, 240, 0.6)",
|
||||
fontSize: "13px",
|
||||
userSelect: "none",
|
||||
WebkitUserSelect: "none",
|
||||
MozUserSelect: "none",
|
||||
msUserSelect: "none",
|
||||
});
|
||||
|
||||
// Create clip strength control
|
||||
const clipStrengthControl = document.createElement("div");
|
||||
Object.assign(clipStrengthControl.style, {
|
||||
display: "flex",
|
||||
alignItems: "center",
|
||||
gap: "8px",
|
||||
});
|
||||
|
||||
// Left arrow for clip
|
||||
const clipLeftArrow = createArrowButton("left", () => {
|
||||
// Decrease clip strength
|
||||
const lorasData = parseLoraValue(widget.value);
|
||||
const loraIndex = lorasData.findIndex(l => l.name === name);
|
||||
|
||||
if (loraIndex >= 0) {
|
||||
lorasData[loraIndex].clipStrength = (parseFloat(lorasData[loraIndex].clipStrength) - 0.05).toFixed(2);
|
||||
|
||||
const newValue = formatLoraValue(lorasData);
|
||||
widget.value = newValue;
|
||||
}
|
||||
});
|
||||
|
||||
// Clip strength display
|
||||
const clipStrengthEl = document.createElement("input");
|
||||
clipStrengthEl.type = "text";
|
||||
clipStrengthEl.value = typeof clipStrength === 'number' ? clipStrength.toFixed(2) : Number(clipStrength).toFixed(2);
|
||||
Object.assign(clipStrengthEl.style, {
|
||||
minWidth: "50px",
|
||||
width: "50px",
|
||||
textAlign: "center",
|
||||
color: active ? "rgba(200, 215, 240, 0.9)" : "rgba(200, 215, 240, 0.6)",
|
||||
fontSize: "13px",
|
||||
background: "none",
|
||||
border: "1px solid transparent",
|
||||
padding: "2px 4px",
|
||||
borderRadius: "3px",
|
||||
outline: "none",
|
||||
});
|
||||
|
||||
// Add hover effect
|
||||
clipStrengthEl.addEventListener('mouseenter', () => {
|
||||
clipStrengthEl.style.border = "1px solid rgba(226, 232, 240, 0.2)";
|
||||
});
|
||||
|
||||
clipStrengthEl.addEventListener('mouseleave', () => {
|
||||
if (document.activeElement !== clipStrengthEl) {
|
||||
clipStrengthEl.style.border = "1px solid transparent";
|
||||
}
|
||||
});
|
||||
|
||||
// Handle focus
|
||||
clipStrengthEl.addEventListener('focus', () => {
|
||||
clipStrengthEl.style.border = "1px solid rgba(72, 118, 255, 0.6)";
|
||||
clipStrengthEl.style.background = "rgba(0, 0, 0, 0.2)";
|
||||
// Auto-select all content
|
||||
clipStrengthEl.select();
|
||||
});
|
||||
|
||||
clipStrengthEl.addEventListener('blur', () => {
|
||||
clipStrengthEl.style.border = "1px solid transparent";
|
||||
clipStrengthEl.style.background = "none";
|
||||
});
|
||||
|
||||
// Handle input changes
|
||||
clipStrengthEl.addEventListener('change', () => {
|
||||
let newValue = parseFloat(clipStrengthEl.value);
|
||||
|
||||
// Validate input
|
||||
if (isNaN(newValue)) {
|
||||
newValue = 1.0;
|
||||
}
|
||||
|
||||
// Update value
|
||||
const lorasData = parseLoraValue(widget.value);
|
||||
const loraIndex = lorasData.findIndex(l => l.name === name);
|
||||
|
||||
if (loraIndex >= 0) {
|
||||
lorasData[loraIndex].clipStrength = newValue.toFixed(2);
|
||||
|
||||
// Update value and trigger callback
|
||||
const newLorasValue = formatLoraValue(lorasData);
|
||||
widget.value = newLorasValue;
|
||||
}
|
||||
});
|
||||
|
||||
// Handle key events
|
||||
clipStrengthEl.addEventListener('keydown', (e) => {
|
||||
if (e.key === 'Enter') {
|
||||
clipStrengthEl.blur();
|
||||
}
|
||||
});
|
||||
|
||||
// Right arrow for clip
|
||||
const clipRightArrow = createArrowButton("right", () => {
|
||||
// Increase clip strength
|
||||
const lorasData = parseLoraValue(widget.value);
|
||||
const loraIndex = lorasData.findIndex(l => l.name === name);
|
||||
|
||||
if (loraIndex >= 0) {
|
||||
lorasData[loraIndex].clipStrength = (parseFloat(lorasData[loraIndex].clipStrength) + 0.05).toFixed(2);
|
||||
|
||||
const newValue = formatLoraValue(lorasData);
|
||||
widget.value = newValue;
|
||||
}
|
||||
});
|
||||
|
||||
clipStrengthControl.appendChild(clipLeftArrow);
|
||||
clipStrengthControl.appendChild(clipStrengthEl);
|
||||
clipStrengthControl.appendChild(clipRightArrow);
|
||||
|
||||
// Assemble clip entry
|
||||
const clipLeftSection = document.createElement("div");
|
||||
Object.assign(clipLeftSection.style, {
|
||||
display: "flex",
|
||||
alignItems: "center",
|
||||
flex: "1",
|
||||
minWidth: "0", // Allow shrinking
|
||||
});
|
||||
|
||||
clipLeftSection.appendChild(clipNameEl);
|
||||
|
||||
clipEl.appendChild(clipLeftSection);
|
||||
clipEl.appendChild(clipStrengthControl);
|
||||
|
||||
// Hover effects for clip entry
|
||||
clipEl.onmouseenter = () => {
|
||||
clipEl.style.backgroundColor = active ? "rgba(70, 75, 95, 0.7)" : "rgba(55, 60, 70, 0.6)";
|
||||
};
|
||||
|
||||
clipEl.onmouseleave = () => {
|
||||
clipEl.style.backgroundColor = active ? "rgba(65, 70, 90, 0.6)" : "rgba(50, 55, 65, 0.5)";
|
||||
};
|
||||
|
||||
// Add drag functionality to clip entry
|
||||
initDrag(clipEl, name, widget, true);
|
||||
|
||||
container.appendChild(clipEl);
|
||||
}
|
||||
});
|
||||
|
||||
// Calculate height based on number of loras and fixed sizes
|
||||
const calculatedHeight = CONTAINER_PADDING + HEADER_HEIGHT + (Math.min(lorasData.length, 5) * LORA_ENTRY_HEIGHT);
|
||||
const calculatedHeight = CONTAINER_PADDING + HEADER_HEIGHT + (Math.min(totalVisibleEntries, 8) * LORA_ENTRY_HEIGHT);
|
||||
updateWidgetHeight(calculatedHeight);
|
||||
};
|
||||
|
||||
@@ -921,7 +1153,37 @@ export function addLorasWidget(node, name, opts, callback) {
|
||||
return [...filtered, lora];
|
||||
}, []);
|
||||
|
||||
widgetValue = uniqueValue;
|
||||
// Preserve clip strengths and expanded state when updating the value
|
||||
const oldLoras = parseLoraValue(widgetValue);
|
||||
|
||||
// Apply existing clip strength values and transfer them to the new value
|
||||
const updatedValue = uniqueValue.map(lora => {
|
||||
const existingLora = oldLoras.find(oldLora => oldLora.name === lora.name);
|
||||
|
||||
// If there's an existing lora with the same name, preserve its clip strength and expanded state
|
||||
if (existingLora) {
|
||||
return {
|
||||
...lora,
|
||||
clipStrength: existingLora.clipStrength || lora.strength,
|
||||
expanded: existingLora.hasOwnProperty('expanded') ?
|
||||
existingLora.expanded :
|
||||
Number(existingLora.clipStrength || lora.strength) !== Number(lora.strength)
|
||||
};
|
||||
}
|
||||
|
||||
// For new loras, default clip strength to model strength and expanded to false
|
||||
// unless clipStrength is already different from strength
|
||||
const clipStrength = lora.clipStrength || lora.strength;
|
||||
return {
|
||||
...lora,
|
||||
clipStrength: clipStrength,
|
||||
expanded: lora.hasOwnProperty('expanded') ?
|
||||
lora.expanded :
|
||||
Number(clipStrength) !== Number(lora.strength)
|
||||
};
|
||||
});
|
||||
|
||||
widgetValue = updatedValue;
|
||||
renderLoras(widgetValue, widget);
|
||||
},
|
||||
getMinHeight: function() {
|
||||
@@ -948,11 +1210,7 @@ export function addLorasWidget(node, name, opts, callback) {
|
||||
widget.callback = callback;
|
||||
|
||||
widget.serializeValue = () => {
|
||||
// Add dummy items to avoid the 2-element serialization issue, a bug in comfyui
|
||||
return [...widgetValue,
|
||||
{ name: "__dummy_item1__", strength: 0, active: false, _isDummy: true },
|
||||
{ name: "__dummy_item2__", strength: 0, active: false, _isDummy: true }
|
||||
];
|
||||
return widgetValue;
|
||||
}
|
||||
|
||||
widget.onRemove = () => {
|
||||
@@ -966,6 +1224,8 @@ export function addLorasWidget(node, name, opts, callback) {
|
||||
// Function to directly save the recipe without dialog
|
||||
async function saveRecipeDirectly(widget) {
|
||||
try {
|
||||
const prompt = await app.graphToPrompt();
|
||||
console.log(prompt);
|
||||
// Show loading toast
|
||||
if (app && app.extensionManager && app.extensionManager.toast) {
|
||||
app.extensionManager.toast.add({
|
||||
@@ -1014,4 +1274,14 @@ async function saveRecipeDirectly(widget) {
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Determine if clip entry should be shown - now based on expanded property or initial diff values
|
||||
const shouldShowClipEntry = (loraData) => {
|
||||
// If expanded property exists, use that
|
||||
if (loraData.hasOwnProperty('expanded')) {
|
||||
return loraData.expanded;
|
||||
}
|
||||
// Otherwise use the legacy logic - if values differ, it should be expanded
|
||||
return Number(loraData.strength) !== Number(loraData.clipStrength);
|
||||
}
|
||||
@@ -220,13 +220,8 @@ export function addTagsWidget(node, name, opts, callback) {
|
||||
// Set callback
|
||||
widget.callback = callback;
|
||||
|
||||
// Add serialization method to avoid ComfyUI serialization issues
|
||||
widget.serializeValue = () => {
|
||||
// Add dummy items to avoid the 2-element serialization issue
|
||||
return [...widgetValue,
|
||||
{ text: "__dummy_item__", active: false, _isDummy: true },
|
||||
{ text: "__dummy_item__", active: false, _isDummy: true }
|
||||
];
|
||||
return widgetValue
|
||||
};
|
||||
|
||||
return { minWidth: 300, minHeight: defaultHeight, widget };
|
||||
|
||||
Reference in New Issue
Block a user