Compare commits

...

10 Commits
v1.0.2 ... main

Author SHA1 Message Date
Will Miao
85b6c91192 fix(download): add ZImageBase to diffusion model routing (#892) 2026-04-10 08:55:28 +08:00
Will Miao
908016cbd6 fix(recipe modal): compact layout on short viewports (#891) 2026-04-09 22:46:25 +08:00
Will Miao
a5ac9cf81b Revert "fix(recipes): make recipe modal viewport-safe (#891)"
This reverts commit 51fe7aa07e.
2026-04-09 22:28:29 +08:00
Will Miao
32875042bd feat(metadata): support PromptAttention CLIP encoder 2026-04-09 19:21:25 +08:00
Will Miao
51fe7aa07e fix(recipes): make recipe modal viewport-safe (#891) 2026-04-09 19:14:12 +08:00
Will Miao
db4726a961 feat(recipes): add configurable storage path migration 2026-04-09 15:57:37 +08:00
Will Miao
e13d70248a fix(usage-stats): resolve pending checkpoint hashes 2026-04-08 09:40:20 +08:00
pixelpaws
1c4919a3e8 Merge pull request #887 from NubeBuster/feat/usage-extractors
feat(usage-stats): add extractors for rgthree Power LoRA Loader and TensorRT loaders
2026-04-08 09:32:08 +08:00
Will Miao
18ddadc9ec feat(autocomplete): auto-format textarea on blur (#884) 2026-04-08 07:57:28 +08:00
NubeBuster
b711ac468a feat(usage-stats): add extractors for rgthree Power LoRA Loader and TensorRT Loader
Fixes #394 — LoRAs loaded via rgthree Power Lora Loader were not
tracked in usage statistics because no extractor existed for that node.

New extractors:
- RgthreePowerLoraLoaderExtractor: parses LORA_* kwargs, respects
  the per-LoRA 'on' toggle
- TensorRTLoaderExtractor: parses engine filename (strips _$profile
  suffix) as best-effort for vanilla TRT. If the output MODEL has
  attachments["source_model"] (set by NubeBuster fork), overrides
  with the real checkpoint name.

TensorRTRefitLoader and TensorRTLoaderAuto take a MODEL input whose
upstream checkpoint loader is already tracked — no extractor needed.

Also adds a name:<filename> fallback and warning log in both
_process_checkpoints and _process_loras when hash lookup fails.
2026-04-05 16:45:21 +02:00
29 changed files with 1227 additions and 41 deletions

View File

@@ -264,6 +264,7 @@
"layoutSettings": "Layout-Einstellungen",
"misc": "Verschiedenes",
"folderSettings": "Standard-Roots",
"recipeSettings": "Rezepte",
"extraFolderPaths": "Zusätzliche Ordnerpfade",
"downloadPathTemplates": "Download-Pfad-Vorlagen",
"priorityTags": "Prioritäts-Tags",
@@ -393,6 +394,10 @@
"defaultUnetRootHelp": "Legen Sie den Standard-Diffusion-Modell-(UNET)-Stammordner für Downloads, Importe und Verschiebungen fest",
"defaultEmbeddingRoot": "Embedding-Stammordner",
"defaultEmbeddingRootHelp": "Legen Sie den Standard-Embedding-Stammordner für Downloads, Importe und Verschiebungen fest",
"recipesPath": "Rezepte-Speicherpfad",
"recipesPathHelp": "Optionales benutzerdefiniertes Verzeichnis für gespeicherte Rezepte. Leer lassen, um den recipes-Ordner im ersten LoRA-Stammverzeichnis zu verwenden.",
"recipesPathPlaceholder": "/path/to/recipes",
"recipesPathMigrating": "Rezepte-Speicher wird verschoben...",
"noDefault": "Kein Standard"
},
"extraFolderPaths": {
@@ -1629,6 +1634,8 @@
"mappingSaveFailed": "Fehler beim Speichern der Basis-Modell-Zuordnungen: {message}",
"downloadTemplatesUpdated": "Download-Pfad-Vorlagen aktualisiert",
"downloadTemplatesFailed": "Fehler beim Speichern der Download-Pfad-Vorlagen: {message}",
"recipesPathUpdated": "Rezepte-Speicherpfad aktualisiert",
"recipesPathSaveFailed": "Fehler beim Aktualisieren des Rezepte-Speicherpfads: {message}",
"settingsUpdated": "Einstellungen aktualisiert: {setting}",
"compactModeToggled": "Kompakt-Modus {state}",
"settingSaveFailed": "Fehler beim Speichern der Einstellung: {message}",

View File

@@ -264,6 +264,7 @@
"layoutSettings": "Layout Settings",
"misc": "Miscellaneous",
"folderSettings": "Default Roots",
"recipeSettings": "Recipes",
"extraFolderPaths": "Extra Folder Paths",
"downloadPathTemplates": "Download Path Templates",
"priorityTags": "Priority Tags",
@@ -393,6 +394,10 @@
"defaultUnetRootHelp": "Set default diffusion model (UNET) root directory for downloads, imports and moves",
"defaultEmbeddingRoot": "Embedding Root",
"defaultEmbeddingRootHelp": "Set default embedding root directory for downloads, imports and moves",
"recipesPath": "Recipes Storage Path",
"recipesPathHelp": "Optional custom directory for stored recipes. Leave empty to use the first LoRA root's recipes folder.",
"recipesPathPlaceholder": "/path/to/recipes",
"recipesPathMigrating": "Migrating recipes storage...",
"noDefault": "No Default"
},
"extraFolderPaths": {
@@ -1629,6 +1634,8 @@
"mappingSaveFailed": "Failed to save base model mappings: {message}",
"downloadTemplatesUpdated": "Download path templates updated",
"downloadTemplatesFailed": "Failed to save download path templates: {message}",
"recipesPathUpdated": "Recipes storage path updated",
"recipesPathSaveFailed": "Failed to update recipes storage path: {message}",
"settingsUpdated": "Settings updated: {setting}",
"compactModeToggled": "Compact Mode {state}",
"settingSaveFailed": "Failed to save setting: {message}",

View File

@@ -264,6 +264,7 @@
"layoutSettings": "Configuración de diseño",
"misc": "Varios",
"folderSettings": "Raíces predeterminadas",
"recipeSettings": "Recetas",
"extraFolderPaths": "Rutas de carpetas adicionales",
"downloadPathTemplates": "Plantillas de rutas de descarga",
"priorityTags": "Etiquetas prioritarias",
@@ -393,6 +394,10 @@
"defaultUnetRootHelp": "Establecer el directorio raíz predeterminado de Diffusion Model (UNET) para descargas, importaciones y movimientos",
"defaultEmbeddingRoot": "Raíz de embedding",
"defaultEmbeddingRootHelp": "Establecer el directorio raíz predeterminado de embedding para descargas, importaciones y movimientos",
"recipesPath": "Ruta de almacenamiento de recetas",
"recipesPathHelp": "Directorio personalizado opcional para las recetas guardadas. Déjalo vacío para usar la carpeta recipes del primer directorio raíz de LoRA.",
"recipesPathPlaceholder": "/path/to/recipes",
"recipesPathMigrating": "Migrando el almacenamiento de recetas...",
"noDefault": "Sin predeterminado"
},
"extraFolderPaths": {
@@ -1629,6 +1634,8 @@
"mappingSaveFailed": "Error al guardar mapeos de modelo base: {message}",
"downloadTemplatesUpdated": "Plantillas de rutas de descarga actualizadas",
"downloadTemplatesFailed": "Error al guardar plantillas de rutas de descarga: {message}",
"recipesPathUpdated": "Ruta de almacenamiento de recetas actualizada",
"recipesPathSaveFailed": "Error al actualizar la ruta de almacenamiento de recetas: {message}",
"settingsUpdated": "Configuración actualizada: {setting}",
"compactModeToggled": "Modo compacto {state}",
"settingSaveFailed": "Error al guardar configuración: {message}",

View File

@@ -264,6 +264,7 @@
"layoutSettings": "Paramètres d'affichage",
"misc": "Divers",
"folderSettings": "Racines par défaut",
"recipeSettings": "Recipes",
"extraFolderPaths": "Chemins de dossiers supplémentaires",
"downloadPathTemplates": "Modèles de chemin de téléchargement",
"priorityTags": "Étiquettes prioritaires",
@@ -393,6 +394,10 @@
"defaultUnetRootHelp": "Définir le répertoire racine Diffusion Model (UNET) par défaut pour les téléchargements, imports et déplacements",
"defaultEmbeddingRoot": "Racine Embedding",
"defaultEmbeddingRootHelp": "Définir le répertoire racine embedding par défaut pour les téléchargements, imports et déplacements",
"recipesPath": "Recipes Storage Path",
"recipesPathHelp": "Optional custom directory for stored recipes. Leave empty to use the first LoRA root's recipes folder.",
"recipesPathPlaceholder": "/path/to/recipes",
"recipesPathMigrating": "Migrating recipes storage...",
"noDefault": "Aucun par défaut"
},
"extraFolderPaths": {
@@ -1629,6 +1634,8 @@
"mappingSaveFailed": "Échec de la sauvegarde des mappages de modèle de base : {message}",
"downloadTemplatesUpdated": "Modèles de chemin de téléchargement mis à jour",
"downloadTemplatesFailed": "Échec de la sauvegarde des modèles de chemin de téléchargement : {message}",
"recipesPathUpdated": "Recipes storage path updated",
"recipesPathSaveFailed": "Failed to update recipes storage path: {message}",
"settingsUpdated": "Paramètres mis à jour : {setting}",
"compactModeToggled": "Mode compact {state}",
"settingSaveFailed": "Échec de la sauvegarde du paramètre : {message}",

View File

@@ -264,6 +264,7 @@
"layoutSettings": "הגדרות פריסה",
"misc": "שונות",
"folderSettings": "תיקיות ברירת מחדל",
"recipeSettings": "מתכונים",
"extraFolderPaths": "נתיבי תיקיות נוספים",
"downloadPathTemplates": "תבניות נתיב הורדה",
"priorityTags": "תגיות עדיפות",
@@ -393,6 +394,10 @@
"defaultUnetRootHelp": "הגדר את ספריית השורש המוגדרת כברירת מחדל של Diffusion Model (UNET) להורדות, ייבוא והעברות",
"defaultEmbeddingRoot": "תיקיית שורש Embedding",
"defaultEmbeddingRootHelp": "הגדר את ספריית השורש המוגדרת כברירת מחדל של embedding להורדות, ייבוא והעברות",
"recipesPath": "נתיב אחסון מתכונים",
"recipesPathHelp": "ספרייה מותאמת אישית אופציונלית למתכונים שנשמרו. השאר ריק כדי להשתמש בתיקיית recipes של שורש LoRA הראשון.",
"recipesPathPlaceholder": "/path/to/recipes",
"recipesPathMigrating": "מעביר את אחסון המתכונים...",
"noDefault": "אין ברירת מחדל"
},
"extraFolderPaths": {
@@ -1629,6 +1634,8 @@
"mappingSaveFailed": "שמירת מיפויי מודל בסיס נכשלה: {message}",
"downloadTemplatesUpdated": "תבניות נתיב הורדה עודכנו",
"downloadTemplatesFailed": "שמירת תבניות נתיב הורדה נכשלה: {message}",
"recipesPathUpdated": "נתיב אחסון המתכונים עודכן",
"recipesPathSaveFailed": "עדכון נתיב אחסון המתכונים נכשל: {message}",
"settingsUpdated": "הגדרות עודכנו: {setting}",
"compactModeToggled": "מצב קומפקטי {state}",
"settingSaveFailed": "שמירת ההגדרה נכשלה: {message}",

View File

@@ -264,6 +264,7 @@
"layoutSettings": "レイアウト設定",
"misc": "その他",
"folderSettings": "デフォルトルート",
"recipeSettings": "レシピ",
"extraFolderPaths": "追加フォルダーパス",
"downloadPathTemplates": "ダウンロードパステンプレート",
"priorityTags": "優先タグ",
@@ -393,6 +394,10 @@
"defaultUnetRootHelp": "ダウンロード、インポート、移動用のデフォルトDiffusion Model (UNET)ルートディレクトリを設定",
"defaultEmbeddingRoot": "Embeddingルート",
"defaultEmbeddingRootHelp": "ダウンロード、インポート、移動用のデフォルトembeddingルートディレクトリを設定",
"recipesPath": "レシピ保存先",
"recipesPathHelp": "保存済みレシピ用の任意のカスタムディレクトリです。空欄にすると最初のLoRAルートのrecipesフォルダーを使用します。",
"recipesPathPlaceholder": "/path/to/recipes",
"recipesPathMigrating": "レシピ保存先を移動中...",
"noDefault": "デフォルトなし"
},
"extraFolderPaths": {
@@ -1629,6 +1634,8 @@
"mappingSaveFailed": "ベースモデルマッピングの保存に失敗しました:{message}",
"downloadTemplatesUpdated": "ダウンロードパステンプレートが更新されました",
"downloadTemplatesFailed": "ダウンロードパステンプレートの保存に失敗しました:{message}",
"recipesPathUpdated": "レシピ保存先を更新しました",
"recipesPathSaveFailed": "レシピ保存先の更新に失敗しました: {message}",
"settingsUpdated": "設定が更新されました:{setting}",
"compactModeToggled": "コンパクトモード {state}",
"settingSaveFailed": "設定の保存に失敗しました:{message}",

View File

@@ -264,6 +264,7 @@
"layoutSettings": "레이아웃 설정",
"misc": "기타",
"folderSettings": "기본 루트",
"recipeSettings": "레시피",
"extraFolderPaths": "추가 폴다 경로",
"downloadPathTemplates": "다운로드 경로 템플릿",
"priorityTags": "우선순위 태그",
@@ -393,6 +394,10 @@
"defaultUnetRootHelp": "다운로드, 가져오기 및 이동을 위한 기본 Diffusion Model (UNET) 루트 디렉토리를 설정합니다",
"defaultEmbeddingRoot": "Embedding 루트",
"defaultEmbeddingRootHelp": "다운로드, 가져오기 및 이동을 위한 기본 Embedding 루트 디렉토리를 설정합니다",
"recipesPath": "레시피 저장 경로",
"recipesPathHelp": "저장된 레시피를 위한 선택적 사용자 지정 디렉터리입니다. 비워 두면 첫 번째 LoRA 루트의 recipes 폴더를 사용합니다.",
"recipesPathPlaceholder": "/path/to/recipes",
"recipesPathMigrating": "레시피 저장 경로를 이동 중...",
"noDefault": "기본값 없음"
},
"extraFolderPaths": {
@@ -1629,6 +1634,8 @@
"mappingSaveFailed": "베이스 모델 매핑 저장 실패: {message}",
"downloadTemplatesUpdated": "다운로드 경로 템플릿이 업데이트되었습니다",
"downloadTemplatesFailed": "다운로드 경로 템플릿 저장 실패: {message}",
"recipesPathUpdated": "레시피 저장 경로가 업데이트되었습니다",
"recipesPathSaveFailed": "레시피 저장 경로 업데이트 실패: {message}",
"settingsUpdated": "설정 업데이트됨: {setting}",
"compactModeToggled": "컴팩트 모드 {state}",
"settingSaveFailed": "설정 저장 실패: {message}",

View File

@@ -264,6 +264,7 @@
"layoutSettings": "Настройки макета",
"misc": "Разное",
"folderSettings": "Корневые папки",
"recipeSettings": "Рецепты",
"extraFolderPaths": "Дополнительные пути к папкам",
"downloadPathTemplates": "Шаблоны путей загрузки",
"priorityTags": "Приоритетные теги",
@@ -393,6 +394,10 @@
"defaultUnetRootHelp": "Установить корневую папку Diffusion Model (UNET) по умолчанию для загрузок, импорта и перемещений",
"defaultEmbeddingRoot": "Корневая папка Embedding",
"defaultEmbeddingRootHelp": "Установить корневую папку embedding по умолчанию для загрузок, импорта и перемещений",
"recipesPath": "Путь хранения рецептов",
"recipesPathHelp": "Дополнительный пользовательский каталог для сохранённых рецептов. Оставьте пустым, чтобы использовать папку recipes в первом корне LoRA.",
"recipesPathPlaceholder": "/path/to/recipes",
"recipesPathMigrating": "Перенос хранилища рецептов...",
"noDefault": "Не задано"
},
"extraFolderPaths": {
@@ -1629,6 +1634,8 @@
"mappingSaveFailed": "Не удалось сохранить сопоставления базовых моделей: {message}",
"downloadTemplatesUpdated": "Шаблоны путей загрузки обновлены",
"downloadTemplatesFailed": "Не удалось сохранить шаблоны путей загрузки: {message}",
"recipesPathUpdated": "Путь хранения рецептов обновлён",
"recipesPathSaveFailed": "Не удалось обновить путь хранения рецептов: {message}",
"settingsUpdated": "Настройки обновлены: {setting}",
"compactModeToggled": "Компактный режим {state}",
"settingSaveFailed": "Не удалось сохранить настройку: {message}",

View File

@@ -264,6 +264,7 @@
"layoutSettings": "布局设置",
"misc": "其他",
"folderSettings": "默认根目录",
"recipeSettings": "配方",
"extraFolderPaths": "额外文件夹路径",
"downloadPathTemplates": "下载路径模板",
"priorityTags": "优先标签",
@@ -393,6 +394,10 @@
"defaultUnetRootHelp": "设置下载、导入和移动时的默认 Diffusion Model (UNET) 根目录",
"defaultEmbeddingRoot": "Embedding 根目录",
"defaultEmbeddingRootHelp": "设置下载、导入和移动时的默认 Embedding 根目录",
"recipesPath": "配方存储路径",
"recipesPathHelp": "已保存配方的可选自定义目录。留空则使用第一个 LoRA 根目录下的 recipes 文件夹。",
"recipesPathPlaceholder": "/path/to/recipes",
"recipesPathMigrating": "正在迁移配方存储...",
"noDefault": "无默认"
},
"extraFolderPaths": {
@@ -1629,6 +1634,8 @@
"mappingSaveFailed": "保存基础模型映射失败:{message}",
"downloadTemplatesUpdated": "下载路径模板已更新",
"downloadTemplatesFailed": "保存下载路径模板失败:{message}",
"recipesPathUpdated": "配方存储路径已更新",
"recipesPathSaveFailed": "更新配方存储路径失败:{message}",
"settingsUpdated": "设置已更新:{setting}",
"compactModeToggled": "紧凑模式 {state}",
"settingSaveFailed": "保存设置失败:{message}",

View File

@@ -264,6 +264,7 @@
"layoutSettings": "版面設定",
"misc": "其他",
"folderSettings": "預設根目錄",
"recipeSettings": "配方",
"extraFolderPaths": "額外資料夾路徑",
"downloadPathTemplates": "下載路徑範本",
"priorityTags": "優先標籤",
@@ -393,6 +394,10 @@
"defaultUnetRootHelp": "設定下載、匯入和移動時的預設 Diffusion Model (UNET) 根目錄",
"defaultEmbeddingRoot": "Embedding 根目錄",
"defaultEmbeddingRootHelp": "設定下載、匯入和移動時的預設 Embedding 根目錄",
"recipesPath": "配方儲存路徑",
"recipesPathHelp": "已儲存配方的可選自訂目錄。留空則使用第一個 LoRA 根目錄下的 recipes 資料夾。",
"recipesPathPlaceholder": "/path/to/recipes",
"recipesPathMigrating": "正在遷移配方儲存...",
"noDefault": "未設定預設"
},
"extraFolderPaths": {
@@ -1629,6 +1634,8 @@
"mappingSaveFailed": "儲存基礎模型對應失敗:{message}",
"downloadTemplatesUpdated": "下載路徑範本已更新",
"downloadTemplatesFailed": "儲存下載路徑範本失敗:{message}",
"recipesPathUpdated": "配方儲存路徑已更新",
"recipesPathSaveFailed": "更新配方儲存路徑失敗:{message}",
"settingsUpdated": "設定已更新:{setting}",
"compactModeToggled": "緊湊模式已{state}",
"settingSaveFailed": "儲存設定失敗:{message}",

View File

@@ -134,6 +134,7 @@ class Config:
self.extra_checkpoints_roots: List[str] = []
self.extra_unet_roots: List[str] = []
self.extra_embeddings_roots: List[str] = []
self.recipes_path: str = ""
# Scan symbolic links during initialization
self._initialize_symlink_mappings()
@@ -652,6 +653,8 @@ class Config:
preview_roots.update(self._expand_preview_root(root))
for root in self.extra_embeddings_roots or []:
preview_roots.update(self._expand_preview_root(root))
if self.recipes_path:
preview_roots.update(self._expand_preview_root(self.recipes_path))
for target, link in self._path_mappings.items():
preview_roots.update(self._expand_preview_root(target))
@@ -911,9 +914,11 @@ class Config:
self,
folder_paths: Mapping[str, Iterable[str]],
extra_folder_paths: Optional[Mapping[str, Iterable[str]]] = None,
recipes_path: str = "",
) -> None:
self._path_mappings.clear()
self._preview_root_paths = set()
self.recipes_path = recipes_path if isinstance(recipes_path, str) else ""
lora_paths = folder_paths.get("loras", []) or []
checkpoint_paths = folder_paths.get("checkpoints", []) or []
@@ -1169,7 +1174,12 @@ class Config:
if not isinstance(extra_folder_paths, Mapping):
extra_folder_paths = None
self._apply_library_paths(folder_paths, extra_folder_paths)
recipes_path = (
str(library_config.get("recipes_path", ""))
if isinstance(library_config, Mapping)
else ""
)
self._apply_library_paths(folder_paths, extra_folder_paths, recipes_path)
logger.info(
"Applied library settings with %d lora roots (%d extra), %d checkpoint roots (%d extra), and %d embedding roots (%d extra)",

View File

@@ -1,4 +1,6 @@
import json
import os
import re
from .constants import MODELS, PROMPTS, SAMPLING, LORAS, SIZE, IMAGES, IS_SAMPLER
@@ -427,6 +429,75 @@ class ImageSizeExtractor(NodeMetadataExtractor):
"node_id": node_id
}
class RgthreePowerLoraLoaderExtractor(NodeMetadataExtractor):
"""Extract LoRA metadata from rgthree Power Lora Loader.
The node passes LoRAs as dynamic kwargs: LORA_1, LORA_2, ... each containing
{'on': bool, 'lora': filename, 'strength': float, 'strengthTwo': float}.
"""
@staticmethod
def extract(node_id, inputs, outputs, metadata):
if not inputs:
return
active_loras = []
for key, value in inputs.items():
if not key.upper().startswith('LORA_'):
continue
if not isinstance(value, dict):
continue
if not value.get('on') or not value.get('lora'):
continue
lora_name = os.path.splitext(os.path.basename(value['lora']))[0]
active_loras.append({
"name": lora_name,
"strength": round(float(value.get('strength', 1.0)), 2)
})
if active_loras:
metadata[LORAS][node_id] = {
"lora_list": active_loras,
"node_id": node_id
}
class TensorRTLoaderExtractor(NodeMetadataExtractor):
"""Extract checkpoint metadata from TensorRT Loader.
extract() parses the engine filename from 'unet_name' as a best-effort
fallback (strips profile suffix after '_$' and counter suffix).
update() checks if the output MODEL has attachments["source_model"]
set by the node (NubeBuster fork) and overrides with the real name.
Vanilla TRT doesn't set this — the filename parse stands.
"""
@staticmethod
def extract(node_id, inputs, outputs, metadata):
if not inputs or "unet_name" not in inputs:
return
unet_name = inputs.get("unet_name")
# Strip path and extension, then drop the $_profile suffix
model_name = os.path.splitext(os.path.basename(unet_name))[0]
if "_$" in model_name:
model_name = model_name[:model_name.index("_$")]
# Strip counter suffix (e.g. _00001_) left by ComfyUI's save path
model_name = re.sub(r'_\d+_?$', '', model_name)
_store_checkpoint_metadata(metadata, node_id, model_name)
@staticmethod
def update(node_id, outputs, metadata):
if not outputs or not isinstance(outputs, list) or len(outputs) == 0:
return
first_output = outputs[0]
if not isinstance(first_output, tuple) or len(first_output) < 1:
return
model = first_output[0]
# NubeBuster fork sets attachments["source_model"] on the ModelPatcher
source_model = getattr(model, 'attachments', {}).get("source_model")
if source_model:
_store_checkpoint_metadata(metadata, node_id, source_model)
class LoraLoaderManagerExtractor(NodeMetadataExtractor):
@staticmethod
def extract(node_id, inputs, outputs, metadata):
@@ -577,8 +648,6 @@ class SamplerCustomAdvancedExtractor(BaseSamplerExtractor):
# Extract latent dimensions
BaseSamplerExtractor.extract_latent_dimensions(node_id, inputs, metadata)
import json
class CLIPTextEncodeFluxExtractor(NodeMetadataExtractor):
@staticmethod
def extract(node_id, inputs, outputs, metadata):
@@ -715,8 +784,11 @@ NODE_EXTRACTORS = {
"UnetLoaderGGUF": UNETLoaderExtractor, # Updated to use dedicated extractor
"LoraLoader": LoraLoaderExtractor,
"LoraLoaderLM": LoraLoaderManagerExtractor,
"RgthreePowerLoraLoader": RgthreePowerLoraLoaderExtractor,
"TensorRTLoader": TensorRTLoaderExtractor,
# Conditioning
"CLIPTextEncode": CLIPTextEncodeExtractor,
"CLIPTextEncodeAttentionBias": CLIPTextEncodeExtractor, # From https://github.com/silveroxides/ComfyUI_PromptAttention
"PromptLM": CLIPTextEncodeExtractor,
"CLIPTextEncodeFlux": CLIPTextEncodeFluxExtractor, # Add CLIPTextEncodeFlux
"WAS_Text_to_Conditioning": CLIPTextEncodeExtractor,

View File

@@ -18,6 +18,7 @@ from .service_registry import ServiceRegistry
from .lora_scanner import LoraScanner
from .metadata_service import get_default_metadata_provider
from .checkpoint_scanner import CheckpointScanner
from .settings_manager import get_settings_manager
from .recipes.errors import RecipeNotFoundError
from ..utils.utils import calculate_recipe_fingerprint, fuzzy_match
from natsort import natsorted
@@ -1090,6 +1091,14 @@ class RecipeScanner:
@property
def recipes_dir(self) -> str:
"""Get path to recipes directory"""
custom_recipes_dir = get_settings_manager().get("recipes_path", "")
if isinstance(custom_recipes_dir, str) and custom_recipes_dir.strip():
recipes_dir = os.path.abspath(
os.path.normpath(os.path.expanduser(custom_recipes_dir.strip()))
)
os.makedirs(recipes_dir, exist_ok=True)
return recipes_dir
if not config.loras_roots:
return ""

View File

@@ -3,6 +3,7 @@ import copy
import json
import os
import shutil
import tempfile
import logging
from pathlib import Path
from datetime import datetime, timezone
@@ -70,6 +71,7 @@ DEFAULT_SETTINGS: Dict[str, Any] = {
"default_checkpoint_root": "",
"default_unet_root": "",
"default_embedding_root": "",
"recipes_path": "",
"base_model_path_mappings": {},
"download_path_templates": {},
"folder_paths": {},
@@ -254,6 +256,7 @@ class SettingsManager:
default_checkpoint_root=merged.get("default_checkpoint_root"),
default_unet_root=merged.get("default_unet_root"),
default_embedding_root=merged.get("default_embedding_root"),
recipes_path=merged.get("recipes_path"),
)
}
merged["active_library"] = library_name
@@ -382,6 +385,7 @@ class SettingsManager:
),
default_unet_root=self.settings.get("default_unet_root", ""),
default_embedding_root=self.settings.get("default_embedding_root", ""),
recipes_path=self.settings.get("recipes_path", ""),
)
libraries = {library_name: library_payload}
self.settings["libraries"] = libraries
@@ -429,6 +433,7 @@ class SettingsManager:
default_checkpoint_root=data.get("default_checkpoint_root"),
default_unet_root=data.get("default_unet_root"),
default_embedding_root=data.get("default_embedding_root"),
recipes_path=data.get("recipes_path"),
metadata=data.get("metadata"),
base=data,
)
@@ -475,6 +480,7 @@ class SettingsManager:
self.settings["default_embedding_root"] = active_library.get(
"default_embedding_root", ""
)
self.settings["recipes_path"] = active_library.get("recipes_path", "")
if save:
self._save_settings()
@@ -491,6 +497,7 @@ class SettingsManager:
default_checkpoint_root: Optional[str] = None,
default_unet_root: Optional[str] = None,
default_embedding_root: Optional[str] = None,
recipes_path: Optional[str] = None,
metadata: Optional[Mapping[str, Any]] = None,
base: Optional[Mapping[str, Any]] = None,
) -> Dict[str, Any]:
@@ -529,6 +536,11 @@ class SettingsManager:
else:
payload.setdefault("default_embedding_root", "")
if recipes_path is not None:
payload["recipes_path"] = recipes_path
else:
payload.setdefault("recipes_path", "")
if metadata:
merged_meta = dict(payload.get("metadata", {}))
merged_meta.update(metadata)
@@ -630,6 +642,7 @@ class SettingsManager:
default_checkpoint_root: Optional[str] = None,
default_unet_root: Optional[str] = None,
default_embedding_root: Optional[str] = None,
recipes_path: Optional[str] = None,
) -> bool:
libraries = self.settings.get("libraries", {})
active_name = self.settings.get("active_library")
@@ -679,6 +692,10 @@ class SettingsManager:
library["default_embedding_root"] = default_embedding_root
changed = True
if recipes_path is not None and library.get("recipes_path") != recipes_path:
library["recipes_path"] = recipes_path
changed = True
if changed:
library.setdefault("created_at", self._current_timestamp())
library["updated_at"] = self._current_timestamp()
@@ -942,7 +959,9 @@ class SettingsManager:
extra_folder_paths=defaults.get("extra_folder_paths", {}),
default_lora_root=defaults.get("default_lora_root"),
default_checkpoint_root=defaults.get("default_checkpoint_root"),
default_unet_root=defaults.get("default_unet_root"),
default_embedding_root=defaults.get("default_embedding_root"),
recipes_path=defaults.get("recipes_path"),
)
defaults["libraries"] = {library_name: default_library}
defaults["active_library"] = library_name
@@ -1236,6 +1255,193 @@ class SettingsManager:
"""Get setting value"""
return self.settings.get(key, default)
def _normalize_recipes_path_value(self, value: Any) -> str:
"""Return a normalized absolute recipes path or an empty string."""
if not isinstance(value, str):
value = "" if value is None else str(value)
stripped = value.strip()
if not stripped:
return ""
return os.path.abspath(os.path.normpath(os.path.expanduser(stripped)))
def _get_effective_recipes_dir(self, recipes_path: Optional[str] = None) -> str:
"""Resolve the effective recipes directory for the active library."""
normalized_custom = self._normalize_recipes_path_value(
self.settings.get("recipes_path", "")
if recipes_path is None
else recipes_path
)
if normalized_custom:
return normalized_custom
folder_paths = self.settings.get("folder_paths", {})
configured_lora_roots = []
if isinstance(folder_paths, Mapping):
raw_lora_roots = folder_paths.get("loras", [])
if isinstance(raw_lora_roots, Sequence) and not isinstance(
raw_lora_roots, (str, bytes)
):
configured_lora_roots = [
path
for path in raw_lora_roots
if isinstance(path, str) and path.strip()
]
if configured_lora_roots:
lora_root = sorted(configured_lora_roots, key=str.casefold)[0]
return os.path.abspath(os.path.join(lora_root, "recipes"))
config_lora_roots = [
path
for path in getattr(config, "loras_roots", []) or []
if isinstance(path, str) and path.strip()
]
if not config_lora_roots:
return ""
return os.path.abspath(
os.path.join(sorted(config_lora_roots, key=str.casefold)[0], "recipes")
)
def _validate_recipes_storage_path(self, normalized_path: str) -> None:
"""Ensure the recipes storage target is usable before saving it."""
if not normalized_path:
return
if os.path.exists(normalized_path) and not os.path.isdir(normalized_path):
raise ValueError("Recipes path must point to a directory")
try:
os.makedirs(normalized_path, exist_ok=True)
except Exception as exc:
raise ValueError(f"Unable to create recipes directory: {exc}") from exc
try:
fd, probe_path = tempfile.mkstemp(
prefix=".lora-manager-recipes-", dir=normalized_path
)
os.close(fd)
os.remove(probe_path)
except Exception as exc:
raise ValueError(f"Recipes path is not writable: {exc}") from exc
def _migrate_recipes_directory(self, source_dir: str, target_dir: str) -> None:
"""Move existing recipe files to a new recipes root and rewrite JSON paths."""
source = os.path.abspath(os.path.normpath(source_dir)) if source_dir else ""
target = os.path.abspath(os.path.normpath(target_dir)) if target_dir else ""
if not source or not target or source == target:
return
if not os.path.exists(source):
os.makedirs(target, exist_ok=True)
return
if os.path.exists(target) and not os.path.isdir(target):
raise ValueError("Recipes path must point to a directory")
try:
common_root = os.path.commonpath([source, target])
except ValueError as exc:
raise ValueError("Invalid recipes path change") from exc
if common_root == source:
raise ValueError("Recipes path cannot be moved into a nested directory")
planned_recipe_updates: Dict[str, Dict[str, Any]] = {}
file_pairs: List[Tuple[str, str]] = []
for root, _, files in os.walk(source):
for filename in files:
source_path = os.path.normpath(os.path.join(root, filename))
relative_path = os.path.relpath(source_path, source)
target_path = os.path.normpath(os.path.join(target, relative_path))
file_pairs.append((source_path, target_path))
if not filename.endswith(".recipe.json"):
continue
try:
with open(source_path, "r", encoding="utf-8") as handle:
payload = json.load(handle)
except Exception as exc:
raise ValueError(
f"Unable to read recipe metadata during migration: {source_path}: {exc}"
) from exc
if not isinstance(payload, dict):
continue
file_path = payload.get("file_path")
if isinstance(file_path, str) and file_path.strip():
normalized_file_path = os.path.abspath(
os.path.normpath(os.path.expanduser(file_path))
)
source_candidates = [source]
real_source = os.path.abspath(
os.path.normpath(os.path.realpath(source_dir))
)
if real_source not in source_candidates:
source_candidates.append(real_source)
rewritten = False
for source_candidate in source_candidates:
try:
file_common_root = os.path.commonpath(
[normalized_file_path, source_candidate]
)
except ValueError:
continue
if file_common_root != source_candidate:
continue
image_relative_path = os.path.relpath(
normalized_file_path, source_candidate
)
payload["file_path"] = os.path.normpath(
os.path.join(target, image_relative_path)
)
rewritten = True
break
if not rewritten and source_candidates:
logger.debug(
"Skipping recipe file_path rewrite during migration for %s",
normalized_file_path,
)
planned_recipe_updates[target_path] = payload
for _, target_path in file_pairs:
if os.path.exists(target_path):
raise ValueError(
f"Recipes path already contains conflicting file: {target_path}"
)
os.makedirs(target, exist_ok=True)
for source_path, target_path in file_pairs:
os.makedirs(os.path.dirname(target_path), exist_ok=True)
shutil.move(source_path, target_path)
for target_path, payload in planned_recipe_updates.items():
with open(target_path, "w", encoding="utf-8") as handle:
json.dump(payload, handle, indent=4, ensure_ascii=False)
for root, dirs, files in os.walk(source, topdown=False):
if dirs or files:
continue
try:
os.rmdir(root)
except OSError:
pass
def set(self, key: str, value: Any) -> None:
"""Set setting value and save"""
if key == "auto_organize_exclusions":
@@ -1246,6 +1452,12 @@ class SettingsManager:
value = self.normalize_download_skip_base_models(value)
elif key == "mature_blur_level":
value = self.normalize_mature_blur_level(value)
elif key == "recipes_path":
current_recipes_dir = self._get_effective_recipes_dir()
value = self._normalize_recipes_path_value(value)
target_recipes_dir = self._get_effective_recipes_dir(value)
self._validate_recipes_storage_path(target_recipes_dir)
self._migrate_recipes_directory(current_recipes_dir, target_recipes_dir)
self.settings[key] = value
portable_switch_pending = False
if key == "use_portable_settings" and isinstance(value, bool):
@@ -1263,9 +1475,13 @@ class SettingsManager:
self._update_active_library_entry(default_unet_root=str(value))
elif key == "default_embedding_root":
self._update_active_library_entry(default_embedding_root=str(value))
elif key == "recipes_path":
self._update_active_library_entry(recipes_path=str(value))
elif key == "model_name_display":
self._notify_model_name_display_change(value)
self._save_settings()
if key == "recipes_path":
self._notify_library_change(self.get_active_library_name())
if portable_switch_pending:
self._finalize_portable_switch()
@@ -1575,6 +1791,7 @@ class SettingsManager:
default_checkpoint_root: Optional[str] = None,
default_unet_root: Optional[str] = None,
default_embedding_root: Optional[str] = None,
recipes_path: Optional[str] = None,
metadata: Optional[Mapping[str, Any]] = None,
activate: bool = False,
) -> Dict[str, Any]:
@@ -1618,6 +1835,11 @@ class SettingsManager:
if default_embedding_root is not None
else existing.get("default_embedding_root")
),
recipes_path=(
recipes_path
if recipes_path is not None
else existing.get("recipes_path")
),
metadata=metadata if metadata is not None else existing.get("metadata"),
base=existing,
)
@@ -1645,6 +1867,7 @@ class SettingsManager:
default_checkpoint_root: str = "",
default_unet_root: str = "",
default_embedding_root: str = "",
recipes_path: str = "",
metadata: Optional[Mapping[str, Any]] = None,
activate: bool = False,
) -> Dict[str, Any]:
@@ -1662,6 +1885,7 @@ class SettingsManager:
default_checkpoint_root=default_checkpoint_root,
default_unet_root=default_unet_root,
default_embedding_root=default_embedding_root,
recipes_path=recipes_path,
metadata=metadata,
activate=activate,
)
@@ -1721,6 +1945,7 @@ class SettingsManager:
default_checkpoint_root: Optional[str] = None,
default_unet_root: Optional[str] = None,
default_embedding_root: Optional[str] = None,
recipes_path: Optional[str] = None,
) -> None:
"""Update folder paths for the active library."""
@@ -1733,6 +1958,7 @@ class SettingsManager:
default_checkpoint_root=default_checkpoint_root,
default_unet_root=default_unet_root,
default_embedding_root=default_embedding_root,
recipes_path=recipes_path,
activate=True,
)

View File

@@ -101,6 +101,7 @@ DEFAULT_PRIORITY_TAG_CONFIG = {
DIFFUSION_MODEL_BASE_MODELS = frozenset(
[
"ZImageTurbo",
"ZImageBase",
"Wan Video 1.3B t2v",
"Wan Video 14B t2v",
"Wan Video 14B i2v 480p",

View File

@@ -292,6 +292,80 @@ class UsageStats:
if LORAS in metadata and isinstance(metadata[LORAS], dict):
await self._process_loras(metadata[LORAS], today)
def _increment_usage_counter(self, category: str, stat_key: str, today_date: str) -> None:
"""Increment usage counters for a resolved stats key."""
if stat_key not in self.stats[category]:
self.stats[category][stat_key] = {
"total": 0,
"history": {}
}
self.stats[category][stat_key]["total"] += 1
if today_date not in self.stats[category][stat_key]["history"]:
self.stats[category][stat_key]["history"][today_date] = 0
self.stats[category][stat_key]["history"][today_date] += 1
def _normalize_model_lookup_name(self, model_name: str) -> str:
"""Normalize a model reference to its base filename without extension."""
return os.path.splitext(os.path.basename(model_name))[0]
async def _find_cached_checkpoint_entry(self, checkpoint_scanner, model_name: str):
"""Best-effort lookup for a checkpoint cache entry by filename/model name."""
get_cached_data = getattr(checkpoint_scanner, "get_cached_data", None)
if not callable(get_cached_data):
return None
cache = await get_cached_data()
raw_data = getattr(cache, "raw_data", None)
if not isinstance(raw_data, list):
return None
normalized_name = self._normalize_model_lookup_name(model_name)
for entry in raw_data:
if not isinstance(entry, dict):
continue
for candidate_key in ("file_name", "model_name", "file_path"):
candidate_value = entry.get(candidate_key)
if not candidate_value or not isinstance(candidate_value, str):
continue
if self._normalize_model_lookup_name(candidate_value) == normalized_name:
return entry
return None
async def _resolve_checkpoint_hash(self, checkpoint_scanner, model_name: str):
"""Resolve a checkpoint hash, calculating pending hashes on demand when needed."""
model_filename = self._normalize_model_lookup_name(model_name)
model_hash = checkpoint_scanner.get_hash_by_filename(model_filename)
if model_hash:
return model_hash
cached_entry = await self._find_cached_checkpoint_entry(checkpoint_scanner, model_name)
if not cached_entry:
logger.warning(f"No hash found for checkpoint '{model_filename}', skipping usage tracking")
return None
cached_hash = cached_entry.get("sha256")
if cached_hash:
return cached_hash
if cached_entry.get("hash_status") == "pending":
calculate_hash = getattr(checkpoint_scanner, "calculate_hash_for_model", None)
file_path = cached_entry.get("file_path")
if callable(calculate_hash) and file_path:
calculated_hash = await calculate_hash(file_path)
if calculated_hash:
return calculated_hash
logger.warning(
f"Failed to calculate pending hash for checkpoint '{model_filename}', skipping usage tracking"
)
return None
logger.warning(f"No hash found for checkpoint '{model_filename}', skipping usage tracking")
return None
async def _process_checkpoints(self, models_data, today_date):
"""Process checkpoint models from metadata"""
try:
@@ -312,26 +386,11 @@ class UsageStats:
if not model_name:
continue
# Clean up filename (remove extension if present)
model_filename = os.path.splitext(os.path.basename(model_name))[0]
model_hash = await self._resolve_checkpoint_hash(checkpoint_scanner, model_name)
if not model_hash:
continue
# Get hash for this checkpoint
model_hash = checkpoint_scanner.get_hash_by_filename(model_filename)
if model_hash:
# Update stats for this checkpoint with date tracking
if model_hash not in self.stats["checkpoints"]:
self.stats["checkpoints"][model_hash] = {
"total": 0,
"history": {}
}
# Increment total count
self.stats["checkpoints"][model_hash]["total"] += 1
# Increment today's count
if today_date not in self.stats["checkpoints"][model_hash]["history"]:
self.stats["checkpoints"][model_hash]["history"][today_date] = 0
self.stats["checkpoints"][model_hash]["history"][today_date] += 1
self._increment_usage_counter("checkpoints", model_hash, today_date)
except Exception as e:
logger.error(f"Error processing checkpoint usage: {e}", exc_info=True)
@@ -360,21 +419,11 @@ class UsageStats:
# Get hash for this LoRA
lora_hash = lora_scanner.get_hash_by_filename(lora_name)
if lora_hash:
# Update stats for this LoRA with date tracking
if lora_hash not in self.stats["loras"]:
self.stats["loras"][lora_hash] = {
"total": 0,
"history": {}
}
if not lora_hash:
logger.warning(f"No hash found for LoRA '{lora_name}', skipping usage tracking")
continue
# Increment total count
self.stats["loras"][lora_hash]["total"] += 1
# Increment today's count
if today_date not in self.stats["loras"][lora_hash]["history"]:
self.stats["loras"][lora_hash]["history"][today_date] = 0
self.stats["loras"][lora_hash]["history"][today_date] += 1
self._increment_usage_counter("loras", lora_hash, today_date)
except Exception as e:
logger.error(f"Error processing LoRA usage: {e}", exc_info=True)

View File

@@ -1036,6 +1036,73 @@
}
}
@media (max-height: 860px) {
#recipeModal .modal-content {
padding-top: var(--space-2);
padding-bottom: var(--space-2);
}
.recipe-modal-header {
padding-bottom: 6px;
margin-bottom: 8px;
}
.recipe-modal-header h2 {
font-size: 1.25em;
max-height: 2.5em;
}
.recipe-tags-container {
margin-bottom: 6px;
}
.recipe-top-section {
grid-template-columns: 1fr;
gap: var(--space-1);
margin-bottom: var(--space-1);
}
.recipe-preview-container {
display: none;
}
.recipe-gen-params {
height: auto;
max-height: 210px;
}
.recipe-gen-params h3 {
margin-bottom: var(--space-1);
font-size: 1.05em;
}
.gen-params-container {
gap: var(--space-1);
}
.param-content {
max-height: 90px;
padding: 10px;
}
.param-textarea {
min-height: 100px;
}
.other-params {
margin-top: 0;
gap: 6px;
}
.recipe-bottom-section {
padding-top: var(--space-1);
}
.recipe-section-header {
margin-bottom: var(--space-1);
}
}
.badge-container {
position: relative;
display: flex;

View File

@@ -766,6 +766,11 @@ export class SettingsManager {
usePortableCheckbox.checked = !!state.global.settings.use_portable_settings;
}
const recipesPathInput = document.getElementById('recipesPath');
if (recipesPathInput) {
recipesPathInput.value = state.global.settings.recipes_path || '';
}
const autoOrganizeExclusionsInput = document.getElementById('autoOrganizeExclusions');
if (autoOrganizeExclusionsInput) {
const patterns = this.normalizePatternList(state.global.settings.auto_organize_exclusions);
@@ -2464,6 +2469,7 @@ export class SettingsManager {
if (!element) return;
const value = element.value.trim(); // Trim whitespace
const shouldShowLoading = settingKey === 'recipes_path';
try {
// Check if value has changed from existing value
@@ -2472,6 +2478,12 @@ export class SettingsManager {
return; // No change, exit early
}
if (shouldShowLoading) {
state.loadingManager?.showSimpleLoading(
translate('settings.folderSettings.recipesPathMigrating', {}, 'Migrating recipes...')
);
}
// For username and password, handle empty values specially
if ((settingKey === 'proxy_username' || settingKey === 'proxy_password') && value === '') {
// Remove from state instead of setting to empty string
@@ -2497,12 +2509,27 @@ export class SettingsManager {
await this.saveSetting(settingKey, value);
}
if (shouldShowLoading) {
state.loadingManager?.hide();
}
if (settingKey === 'recipes_path') {
showToast('toast.settings.recipesPathUpdated', {}, 'success');
} else {
showToast('toast.settings.settingsUpdated', { setting: settingKey.replace(/_/g, ' ') }, 'success');
}
} catch (error) {
if (shouldShowLoading) {
state.loadingManager?.hide();
}
if (settingKey === 'recipes_path') {
showToast('toast.settings.recipesPathSaveFailed', { message: error.message }, 'error');
} else {
showToast('toast.settings.settingSaveFailed', { message: error.message }, 'error');
}
}
}
async saveLanguageSetting() {
const element = document.getElementById('languageSelect');

View File

@@ -18,6 +18,7 @@ const DEFAULT_SETTINGS_BASE = Object.freeze({
default_lora_root: '',
default_checkpoint_root: '',
default_embedding_root: '',
recipes_path: '',
base_model_path_mappings: {},
download_path_templates: {},
example_images_path: '',

View File

@@ -530,6 +530,32 @@
</div>
</div>
</div>
</div>
<!-- Recipe Settings -->
<div class="settings-subsection">
<div class="settings-subsection-header">
<h4>{{ t('settings.sections.recipeSettings') }}</h4>
</div>
<div class="setting-item">
<div class="setting-row">
<div class="setting-info">
<label for="recipesPath">
{{ t('settings.folderSettings.recipesPath') }}
<i class="fas fa-info-circle info-icon" data-tooltip="{{ t('settings.folderSettings.recipesPathHelp') }}"></i>
</label>
</div>
<div class="setting-control">
<div class="text-input-wrapper">
<input type="text" id="recipesPath"
placeholder="{{ t('settings.folderSettings.recipesPathPlaceholder') }}"
onblur="settingsManager.saveInputSetting('recipesPath', 'recipes_path')"
onkeydown="if(event.key === 'Enter') { this.blur(); }" />
</div>
</div>
</div>
</div>
</div>
<!-- Extra Folder Paths -->

View File

@@ -69,6 +69,9 @@ describe('AutoComplete widget interactions', () => {
if (key === 'loramanager.autocomplete_append_comma') {
return true;
}
if (key === 'loramanager.autocomplete_auto_format') {
return true;
}
if (key === 'loramanager.autocomplete_accept_key') {
return 'both';
}
@@ -188,6 +191,59 @@ describe('AutoComplete widget interactions', () => {
expect(insertSelectionSpy).toHaveBeenCalledWith('example_completion');
});
it('formats duplicate commas and extra spaces when the textarea loses focus', async () => {
const input = document.createElement('textarea');
input.value = 'foo bar, , baz ,, qux';
document.body.append(input);
const inputListener = vi.fn();
input.addEventListener('input', inputListener);
const { AutoComplete } = await import(AUTOCOMPLETE_MODULE);
new AutoComplete(input,'prompt', { showPreview: false });
input.dispatchEvent(new Event('blur', { bubbles: true }));
expect(input.value).toBe('foo bar, baz, qux');
expect(inputListener).toHaveBeenCalledTimes(1);
});
it('skips blur formatting when autocomplete auto format is disabled', async () => {
settingGetMock.mockImplementation((key) => {
if (key === 'loramanager.autocomplete_append_comma') {
return true;
}
if (key === 'loramanager.autocomplete_auto_format') {
return false;
}
if (key === 'loramanager.autocomplete_accept_key') {
return 'both';
}
if (key === 'loramanager.prompt_tag_autocomplete') {
return true;
}
if (key === 'loramanager.tag_space_replacement') {
return false;
}
return undefined;
});
const input = document.createElement('textarea');
input.value = 'foo bar, , baz ,, qux';
document.body.append(input);
const inputListener = vi.fn();
input.addEventListener('input', inputListener);
const { AutoComplete } = await import(AUTOCOMPLETE_MODULE);
new AutoComplete(input,'prompt', { showPreview: false });
input.dispatchEvent(new Event('blur', { bubbles: true }));
expect(input.value).toBe('foo bar, , baz ,, qux');
expect(inputListener).not.toHaveBeenCalled();
});
it('accepts the selected suggestion with Enter', async () => {
caretHelperInstance.getBeforeCursor.mockReturnValue('example');
@@ -275,6 +331,9 @@ describe('AutoComplete widget interactions', () => {
if (key === 'loramanager.autocomplete_append_comma') {
return true;
}
if (key === 'loramanager.autocomplete_auto_format') {
return true;
}
if (key === 'loramanager.autocomplete_accept_key') {
return 'tab_only';
}
@@ -322,6 +381,9 @@ describe('AutoComplete widget interactions', () => {
if (key === 'loramanager.autocomplete_append_comma') {
return true;
}
if (key === 'loramanager.autocomplete_auto_format') {
return true;
}
if (key === 'loramanager.autocomplete_accept_key') {
return 'enter_only';
}

View File

@@ -205,4 +205,58 @@ describe('SettingsManager library controls', () => {
expect(select.value).toBe('alpha');
expect(activateSpy).not.toHaveBeenCalled();
});
it('loads recipes_path into the settings input', async () => {
const manager = createManager();
const input = document.createElement('input');
input.id = 'recipesPath';
document.body.appendChild(input);
global.fetch = vi.fn().mockResolvedValue({
ok: true,
json: async () => ({
success: true,
isAvailable: false,
isEnabled: false,
databaseSize: 0,
}),
});
state.global.settings = {
recipes_path: '/custom/recipes',
};
await manager.loadSettingsToUI();
expect(input.value).toBe('/custom/recipes');
});
it('shows loading while saving recipes_path', async () => {
const manager = createManager();
const input = document.createElement('input');
input.id = 'recipesPath';
input.value = '/custom/recipes';
document.body.appendChild(input);
state.global.settings = {
recipes_path: '',
};
global.fetch = vi.fn().mockResolvedValue({
ok: true,
json: async () => ({ success: true }),
});
await manager.saveInputSetting('recipesPath', 'recipes_path');
expect(state.loadingManager.showSimpleLoading).toHaveBeenCalledWith(
'Migrating recipes...'
);
expect(state.loadingManager.hide).toHaveBeenCalledTimes(1);
expect(showToast).toHaveBeenCalledWith(
'toast.settings.recipesPathUpdated',
{},
'success',
);
});
});

View File

@@ -98,6 +98,85 @@ def test_metadata_processor_extracts_generation_params(populated_registry, monke
assert isinstance(value, str)
def test_attention_bias_clip_text_encode_prompts_are_collected(metadata_registry, monkeypatch):
import types
prompt_graph = {
"encode_pos": {
"class_type": "CLIPTextEncodeAttentionBias",
"inputs": {"text": "A <big dog=1.25> on a hill", "clip": ["clip", 0]},
},
"encode_neg": {
"class_type": "CLIPTextEncodeAttentionBias",
"inputs": {"text": "low quality", "clip": ["clip", 0]},
},
"sampler": {
"class_type": "KSampler",
"inputs": {
"seed": types.SimpleNamespace(seed=123),
"steps": 20,
"cfg": 7.0,
"sampler_name": "Euler",
"scheduler": "karras",
"denoise": 1.0,
"positive": ["encode_pos", 0],
"negative": ["encode_neg", 0],
"latent_image": {"samples": types.SimpleNamespace(shape=(1, 4, 16, 16))},
},
},
}
prompt = SimpleNamespace(original_prompt=prompt_graph)
pos_conditioning = object()
neg_conditioning = object()
monkeypatch.setattr(metadata_processor, "standalone_mode", False)
metadata_registry.start_collection("prompt-attention")
metadata_registry.set_current_prompt(prompt)
metadata_registry.record_node_execution(
"encode_pos",
"CLIPTextEncodeAttentionBias",
{"text": "A <big dog=1.25> on a hill"},
None,
)
metadata_registry.update_node_execution(
"encode_pos", "CLIPTextEncodeAttentionBias", [(pos_conditioning,)]
)
metadata_registry.record_node_execution(
"encode_neg",
"CLIPTextEncodeAttentionBias",
{"text": "low quality"},
None,
)
metadata_registry.update_node_execution(
"encode_neg", "CLIPTextEncodeAttentionBias", [(neg_conditioning,)]
)
metadata_registry.record_node_execution(
"sampler",
"KSampler",
{
"seed": types.SimpleNamespace(seed=123),
"positive": pos_conditioning,
"negative": neg_conditioning,
"latent_image": {"samples": types.SimpleNamespace(shape=(1, 4, 16, 16))},
},
None,
)
metadata = metadata_registry.get_metadata("prompt-attention")
sampler_data = metadata[SAMPLING]["sampler"]
prompt_results = MetadataProcessor.match_conditioning_to_prompts(metadata, "sampler")
assert metadata[PROMPTS]["encode_pos"]["text"] == "A <big dog=1.25> on a hill"
assert metadata[PROMPTS]["encode_neg"]["text"] == "low quality"
assert sampler_data["node_id"] == "sampler"
assert sampler_data["is_sampler"] is True
assert prompt_results["prompt"] == "A <big dog=1.25> on a hill"
assert prompt_results["negative_prompt"] == "low quality"
def test_metadata_registry_caches_and_rehydrates(populated_registry):
registry = populated_registry["registry"]
prompt = populated_registry["prompt"]

View File

@@ -113,6 +113,78 @@ async def test_config_updates_preview_roots_after_switch(tmp_path):
assert decoded.replace("\\", "/").endswith("model.webp")
async def test_preview_handler_allows_custom_recipes_path(tmp_path):
lora_root = tmp_path / "library"
lora_root.mkdir()
recipes_root = tmp_path / "recipes_storage"
recipes_root.mkdir()
preview_file = recipes_root / "recipe.webp"
preview_file.write_bytes(b"preview")
config = Config()
config.apply_library_settings(
{
"folder_paths": {
"loras": [str(lora_root)],
"checkpoints": [],
"unet": [],
"embeddings": [],
},
"recipes_path": str(recipes_root),
}
)
assert config.is_preview_path_allowed(str(preview_file))
handler = PreviewHandler(config=config)
encoded_path = urllib.parse.quote(str(preview_file), safe="")
request = make_mocked_request("GET", f"/api/lm/previews?path={encoded_path}")
response = await handler.serve_preview(request)
assert isinstance(response, web.FileResponse)
assert response.status == 200
assert Path(response._path) == preview_file
async def test_preview_handler_allows_symlinked_recipes_path(tmp_path):
lora_root = tmp_path / "library"
lora_root.mkdir()
real_recipes_root = tmp_path / "real_recipes"
real_recipes_root.mkdir()
symlink_recipes_root = tmp_path / "linked_recipes"
symlink_recipes_root.symlink_to(real_recipes_root, target_is_directory=True)
preview_file = real_recipes_root / "recipe.webp"
preview_file.write_bytes(b"preview")
config = Config()
config.apply_library_settings(
{
"folder_paths": {
"loras": [str(lora_root)],
"checkpoints": [],
"unet": [],
"embeddings": [],
},
"recipes_path": str(symlink_recipes_root),
}
)
symlink_preview_path = symlink_recipes_root / "recipe.webp"
assert config.is_preview_path_allowed(str(symlink_preview_path))
handler = PreviewHandler(config=config)
encoded_path = urllib.parse.quote(str(symlink_preview_path), safe="")
request = make_mocked_request("GET", f"/api/lm/previews?path={encoded_path}")
response = await handler.serve_preview(request)
assert isinstance(response, web.FileResponse)
assert response.status == 200
assert Path(response._path) == preview_file.resolve()
def test_is_preview_path_allowed_case_insensitive_on_windows(tmp_path):
"""Test that preview path validation is case-insensitive on Windows.

View File

@@ -8,6 +8,7 @@ import pytest
from py.config import config
from py.services.recipe_scanner import RecipeScanner
from py.services import settings_manager as settings_manager_module
from py.utils.utils import calculate_recipe_fingerprint
@@ -72,12 +73,56 @@ class StubLoraScanner:
@pytest.fixture
def recipe_scanner(tmp_path: Path, monkeypatch):
RecipeScanner._instance = None
settings_manager_module.reset_settings_manager()
monkeypatch.setattr(config, "loras_roots", [str(tmp_path)])
stub = StubLoraScanner()
scanner = RecipeScanner(lora_scanner=stub)
asyncio.run(scanner.refresh_cache(force=True))
yield scanner, stub
RecipeScanner._instance = None
settings_manager_module.reset_settings_manager()
def test_recipes_dir_uses_custom_settings_path(tmp_path: Path, monkeypatch):
RecipeScanner._instance = None
settings_manager_module.reset_settings_manager()
settings_path = tmp_path / "settings.json"
custom_recipes = tmp_path / "custom" / ".." / "custom_recipes"
monkeypatch.setattr(
"py.services.settings_manager.ensure_settings_file",
lambda logger=None: str(settings_path),
)
monkeypatch.setattr(config, "loras_roots", [str(tmp_path / "loras-root")])
manager = settings_manager_module.get_settings_manager()
manager.set("recipes_path", str(custom_recipes))
scanner = RecipeScanner(lora_scanner=StubLoraScanner())
resolved = scanner.recipes_dir
assert resolved == str((tmp_path / "custom_recipes").resolve())
assert Path(resolved).is_dir()
RecipeScanner._instance = None
settings_manager_module.reset_settings_manager()
def test_recipes_dir_falls_back_to_first_lora_root(tmp_path: Path, monkeypatch):
RecipeScanner._instance = None
settings_manager_module.reset_settings_manager()
monkeypatch.setattr(config, "loras_roots", [str(tmp_path / "alpha")])
scanner = RecipeScanner(lora_scanner=StubLoraScanner())
resolved = scanner.recipes_dir
assert resolved == str(tmp_path / "alpha" / "recipes")
assert Path(resolved).is_dir()
RecipeScanner._instance = None
settings_manager_module.reset_settings_manager()
async def test_add_recipe_during_concurrent_reads(recipe_scanner):

View File

@@ -496,6 +496,7 @@ def test_migrate_sanitizes_legacy_libraries(tmp_path, monkeypatch):
assert payload["default_lora_root"] == ""
assert payload["default_checkpoint_root"] == ""
assert payload["default_embedding_root"] == ""
assert payload["recipes_path"] == ""
assert manager.get_active_library_name() == "legacy"
@@ -507,12 +508,14 @@ def test_active_library_syncs_top_level_settings(tmp_path, monkeypatch):
"default_lora_root": "/loras",
"default_checkpoint_root": "/ckpt",
"default_embedding_root": "/embed",
"recipes_path": "/loras/recipes",
},
"studio": {
"folder_paths": {"loras": ["/studio"]},
"default_lora_root": "/studio",
"default_checkpoint_root": "/studio_ckpt",
"default_embedding_root": "/studio_embed",
"recipes_path": "/studio/custom-recipes",
},
},
"active_library": "studio",
@@ -521,6 +524,7 @@ def test_active_library_syncs_top_level_settings(tmp_path, monkeypatch):
"default_lora_root": "/loras",
"default_checkpoint_root": "/ckpt",
"default_embedding_root": "/embed",
"recipes_path": "/loras/recipes",
}
manager = _create_manager_with_settings(tmp_path, monkeypatch, initial)
@@ -530,14 +534,17 @@ def test_active_library_syncs_top_level_settings(tmp_path, monkeypatch):
assert manager.get("default_lora_root") == "/studio"
assert manager.get("default_checkpoint_root") == "/studio_ckpt"
assert manager.get("default_embedding_root") == "/studio_embed"
assert manager.get("recipes_path") == "/studio/custom-recipes"
# Drift the top-level values again and ensure activate_library repairs them
manager.settings["folder_paths"] = {"loras": ["/loras"]}
manager.settings["default_lora_root"] = "/loras"
manager.settings["recipes_path"] = "/loras/recipes"
manager.activate_library("studio")
assert manager.get("folder_paths")["loras"] == ["/studio"]
assert manager.get("default_lora_root") == "/studio"
assert manager.get("recipes_path") == "/studio/custom-recipes"
def test_refresh_environment_variables_updates_stored_value(tmp_path, monkeypatch):
@@ -554,6 +561,7 @@ def test_refresh_environment_variables_updates_stored_value(tmp_path, monkeypatc
"default_lora_root": "",
"default_checkpoint_root": "",
"default_embedding_root": "",
"recipes_path": "",
}
},
"active_library": "default",
@@ -589,6 +597,177 @@ def test_upsert_library_creates_entry_and_activates(manager, tmp_path):
assert str(lora_dir).replace(os.sep, "/") in normalized_stored_paths
def test_set_recipes_path_updates_active_library_entry(manager, tmp_path):
recipes_dir = tmp_path / "custom" / "recipes"
manager.set("recipes_path", str(recipes_dir))
assert manager.get("recipes_path") == str(recipes_dir.resolve())
assert (
manager.get_libraries()["default"]["recipes_path"]
== str(recipes_dir.resolve())
)
def test_set_recipes_path_migrates_existing_recipe_files(manager, tmp_path):
lora_root = tmp_path / "loras"
old_recipes_dir = lora_root / "recipes" / "nested"
old_recipes_dir.mkdir(parents=True)
manager.set("folder_paths", {"loras": [str(lora_root)]})
recipe_id = "recipe-1"
old_image_path = old_recipes_dir / f"{recipe_id}.webp"
old_json_path = old_recipes_dir / f"{recipe_id}.recipe.json"
old_image_path.write_bytes(b"image-bytes")
old_json_path.write_text(
json.dumps(
{
"id": recipe_id,
"file_path": str(old_image_path),
"title": "Recipe 1",
}
),
encoding="utf-8",
)
new_recipes_dir = tmp_path / "custom_recipes"
manager.set("recipes_path", str(new_recipes_dir))
migrated_image_path = new_recipes_dir / "nested" / f"{recipe_id}.webp"
migrated_json_path = new_recipes_dir / "nested" / f"{recipe_id}.recipe.json"
assert manager.get("recipes_path") == str(new_recipes_dir.resolve())
assert migrated_image_path.read_bytes() == b"image-bytes"
migrated_payload = json.loads(migrated_json_path.read_text(encoding="utf-8"))
assert migrated_payload["file_path"] == str(migrated_image_path)
assert not old_image_path.exists()
assert not old_json_path.exists()
def test_clearing_recipes_path_migrates_files_to_default_location(manager, tmp_path):
lora_root = tmp_path / "loras"
custom_recipes_dir = tmp_path / "custom_recipes"
old_recipes_dir = custom_recipes_dir / "nested"
old_recipes_dir.mkdir(parents=True)
manager.set("folder_paths", {"loras": [str(lora_root)]})
manager.settings["recipes_path"] = str(custom_recipes_dir)
recipe_id = "recipe-2"
old_image_path = old_recipes_dir / f"{recipe_id}.webp"
old_json_path = old_recipes_dir / f"{recipe_id}.recipe.json"
old_image_path.write_bytes(b"image-bytes")
old_json_path.write_text(
json.dumps(
{
"id": recipe_id,
"file_path": str(old_image_path),
"title": "Recipe 2",
}
),
encoding="utf-8",
)
manager.set("recipes_path", "")
fallback_recipes_dir = lora_root / "recipes"
migrated_image_path = fallback_recipes_dir / "nested" / f"{recipe_id}.webp"
migrated_json_path = fallback_recipes_dir / "nested" / f"{recipe_id}.recipe.json"
assert manager.get("recipes_path") == ""
assert migrated_image_path.read_bytes() == b"image-bytes"
migrated_payload = json.loads(migrated_json_path.read_text(encoding="utf-8"))
assert migrated_payload["file_path"] == str(migrated_image_path)
assert not old_image_path.exists()
assert not old_json_path.exists()
def test_moving_recipes_path_back_to_parent_directory_is_allowed(manager, tmp_path):
lora_root = tmp_path / "loras"
manager.set("folder_paths", {"loras": [str(lora_root)]})
source_recipes_dir = lora_root / "recipes" / "custom"
source_recipes_dir.mkdir(parents=True)
recipe_id = "recipe-parent"
old_image_path = source_recipes_dir / f"{recipe_id}.webp"
old_json_path = source_recipes_dir / f"{recipe_id}.recipe.json"
old_image_path.write_bytes(b"parent-bytes")
old_json_path.write_text(
json.dumps(
{
"id": recipe_id,
"file_path": str(old_image_path),
"title": "Recipe Parent",
}
),
encoding="utf-8",
)
manager.settings["recipes_path"] = str(source_recipes_dir)
manager.set("recipes_path", str(lora_root / "recipes"))
migrated_image_path = lora_root / "recipes" / f"{recipe_id}.webp"
migrated_json_path = lora_root / "recipes" / f"{recipe_id}.recipe.json"
assert manager.get("recipes_path") == str((lora_root / "recipes").resolve())
assert migrated_image_path.read_bytes() == b"parent-bytes"
migrated_payload = json.loads(migrated_json_path.read_text(encoding="utf-8"))
assert migrated_payload["file_path"] == str(migrated_image_path)
assert not old_image_path.exists()
assert not old_json_path.exists()
def test_set_recipes_path_rewrites_symlinked_recipe_metadata(manager, tmp_path):
real_recipes_dir = tmp_path / "real_recipes"
real_recipes_dir.mkdir()
symlink_recipes_dir = tmp_path / "linked_recipes"
symlink_recipes_dir.symlink_to(real_recipes_dir, target_is_directory=True)
manager.settings["recipes_path"] = str(symlink_recipes_dir)
manager.set("folder_paths", {"loras": [str(tmp_path / "loras")]})
recipe_id = "recipe-symlink"
old_image_path = real_recipes_dir / f"{recipe_id}.webp"
old_json_path = real_recipes_dir / f"{recipe_id}.recipe.json"
old_image_path.write_bytes(b"symlink-bytes")
old_json_path.write_text(
json.dumps(
{
"id": recipe_id,
"file_path": str(old_image_path),
"title": "Recipe Symlink",
}
),
encoding="utf-8",
)
new_recipes_dir = tmp_path / "migrated_recipes"
manager.set("recipes_path", str(new_recipes_dir))
migrated_image_path = new_recipes_dir / f"{recipe_id}.webp"
migrated_json_path = new_recipes_dir / f"{recipe_id}.recipe.json"
assert migrated_image_path.read_bytes() == b"symlink-bytes"
migrated_payload = json.loads(migrated_json_path.read_text(encoding="utf-8"))
assert migrated_payload["file_path"] == str(migrated_image_path)
assert not old_image_path.exists()
assert not old_json_path.exists()
def test_set_recipes_path_rejects_file_target(manager, tmp_path):
lora_root = tmp_path / "loras"
lora_root.mkdir()
manager.set("folder_paths", {"loras": [str(lora_root)]})
target_file = tmp_path / "not_a_directory"
target_file.write_text("blocked", encoding="utf-8")
with pytest.raises(ValueError, match="directory"):
manager.set("recipes_path", str(target_file))
assert manager.get("recipes_path") == ""
def test_extra_folder_paths_stored_separately(manager, tmp_path):
lora_dir = tmp_path / "loras"
extra_dir = tmp_path / "extra_loras"

View File

@@ -152,3 +152,67 @@ async def test_usage_stats_background_processor_handles_pending_prompts(tmp_path
assert stats.stats["loras"]["lora-hash"]["history"][today] == 1
await _finalize_usage_stats(tasks)
async def test_usage_stats_calculates_pending_checkpoint_hash_on_demand(tmp_path, monkeypatch):
stats, tasks, _ = _prepare_usage_stats(tmp_path, monkeypatch)
metadata_payload = {
"models": {
"1": {"type": "checkpoint", "name": "pending_model.safetensors"},
},
"loras": {},
}
checkpoint_cache = SimpleNamespace(
raw_data=[
{
"file_name": "pending_model",
"model_name": "pending_model",
"file_path": "/models/pending_model.safetensors",
"sha256": "",
"hash_status": "pending",
}
]
)
checkpoint_scanner = SimpleNamespace(
get_hash_by_filename=lambda name: None,
get_cached_data=AsyncMock(return_value=checkpoint_cache),
calculate_hash_for_model=AsyncMock(return_value="resolved-hash"),
)
lora_scanner = SimpleNamespace(get_hash_by_filename=lambda name: None)
monkeypatch.setattr(ServiceRegistry, "get_checkpoint_scanner", AsyncMock(return_value=checkpoint_scanner))
monkeypatch.setattr(ServiceRegistry, "get_lora_scanner", AsyncMock(return_value=lora_scanner))
await stats._process_metadata(metadata_payload)
today = datetime.now().strftime("%Y-%m-%d")
checkpoint_scanner.calculate_hash_for_model.assert_awaited_once_with("/models/pending_model.safetensors")
assert stats.stats["checkpoints"]["resolved-hash"]["history"][today] == 1
await _finalize_usage_stats(tasks)
async def test_usage_stats_skips_name_fallback_for_missing_lora_hash(tmp_path, monkeypatch):
stats, tasks, _ = _prepare_usage_stats(tmp_path, monkeypatch)
metadata_payload = {
"models": {},
"loras": {
"2": {"lora_list": [{"name": "missing_lora"}]},
},
}
checkpoint_scanner = SimpleNamespace(get_hash_by_filename=lambda name: None)
lora_scanner = SimpleNamespace(get_hash_by_filename=lambda name: None)
monkeypatch.setattr(ServiceRegistry, "get_checkpoint_scanner", AsyncMock(return_value=checkpoint_scanner))
monkeypatch.setattr(ServiceRegistry, "get_lora_scanner", AsyncMock(return_value=lora_scanner))
await stats._process_metadata(metadata_payload)
assert stats.stats["loras"] == {}
assert not any(key.startswith("name:") for key in stats.stats["loras"])
await _finalize_usage_stats(tasks)

View File

@@ -3,6 +3,7 @@ import { app } from "../../scripts/app.js";
import { TextAreaCaretHelper } from "./textarea_caret_helper.js";
import {
getAutocompleteAppendCommaPreference,
getAutocompleteAutoFormatPreference,
getAutocompleteAcceptKeyPreference,
getPromptTagAutocompletePreference,
getTagSpaceReplacementPreference,
@@ -122,6 +123,32 @@ function formatAutocompleteInsertion(text = '') {
return getAutocompleteAppendCommaPreference() ? `${trimmed},` : `${trimmed} `;
}
function normalizeAutocompleteSegment(segment = '') {
return segment.replace(/\s+/g, ' ').trim();
}
export function formatAutocompleteTextOnBlur(text = '') {
if (typeof text !== 'string') {
return '';
}
return text
.split('\n')
.map((line) => {
if (!line.trim()) {
return '';
}
const cleanedSegments = line
.split(',')
.map(normalizeAutocompleteSegment)
.filter(Boolean);
return cleanedSegments.join(', ');
})
.join('\n');
}
function shouldAcceptAutocompleteKey(key) {
const mode = getAutocompleteAcceptKeyPreference();
@@ -481,6 +508,14 @@ class AutoComplete {
// Handle focus out to hide dropdown
this.onBlur = () => {
if (getAutocompleteAutoFormatPreference()) {
const formattedValue = formatAutocompleteTextOnBlur(this.inputElement.value);
if (formattedValue !== this.inputElement.value) {
this.inputElement.value = formattedValue;
this.inputElement.dispatchEvent(new Event('input', { bubbles: true }));
}
}
// Delay hiding to allow for clicks on dropdown items
setTimeout(() => {
this.hide();

View File

@@ -16,6 +16,9 @@ const PROMPT_TAG_AUTOCOMPLETE_DEFAULT = true;
const AUTOCOMPLETE_APPEND_COMMA_SETTING_ID = "loramanager.autocomplete_append_comma";
const AUTOCOMPLETE_APPEND_COMMA_DEFAULT = true;
const AUTOCOMPLETE_AUTO_FORMAT_SETTING_ID = "loramanager.autocomplete_auto_format";
const AUTOCOMPLETE_AUTO_FORMAT_DEFAULT = true;
const AUTOCOMPLETE_ACCEPT_KEY_SETTING_ID = "loramanager.autocomplete_accept_key";
const AUTOCOMPLETE_ACCEPT_KEY_DEFAULT = "both";
const AUTOCOMPLETE_ACCEPT_KEY_OPTION_BOTH = "Tab or Enter";
@@ -192,6 +195,32 @@ const getAutocompleteAppendCommaPreference = (() => {
};
})();
const getAutocompleteAutoFormatPreference = (() => {
let settingsUnavailableLogged = false;
return () => {
const settingManager = app?.extensionManager?.setting;
if (!settingManager || typeof settingManager.get !== "function") {
if (!settingsUnavailableLogged) {
console.warn("LoRA Manager: settings API unavailable, using default autocomplete auto format setting.");
settingsUnavailableLogged = true;
}
return AUTOCOMPLETE_AUTO_FORMAT_DEFAULT;
}
try {
const value = settingManager.get(AUTOCOMPLETE_AUTO_FORMAT_SETTING_ID);
return value ?? AUTOCOMPLETE_AUTO_FORMAT_DEFAULT;
} catch (error) {
if (!settingsUnavailableLogged) {
console.warn("LoRA Manager: unable to read autocomplete auto format setting, using default.", error);
settingsUnavailableLogged = true;
}
return AUTOCOMPLETE_AUTO_FORMAT_DEFAULT;
}
};
})();
const getAutocompleteAcceptKeyPreference = (() => {
let settingsUnavailableLogged = false;
@@ -375,6 +404,14 @@ app.registerExtension({
tooltip: "When enabled, accepted autocomplete suggestions append ', ' to the inserted text.",
category: ["LoRA Manager", "Autocomplete", "Append comma"],
},
{
id: AUTOCOMPLETE_AUTO_FORMAT_SETTING_ID,
name: "Auto format autocomplete text on blur",
type: "boolean",
defaultValue: AUTOCOMPLETE_AUTO_FORMAT_DEFAULT,
tooltip: "When enabled, leaving an autocomplete textarea removes duplicate commas and collapses unnecessary spaces.",
category: ["LoRA Manager", "Autocomplete", "Auto Format"],
},
{
id: AUTOCOMPLETE_ACCEPT_KEY_SETTING_ID,
name: "Autocomplete accept key",
@@ -505,6 +542,7 @@ export {
getWheelSensitivity,
getAutoPathCorrectionPreference,
getAutocompleteAppendCommaPreference,
getAutocompleteAutoFormatPreference,
getAutocompleteAcceptKeyPreference,
getPromptTagAutocompletePreference,
getTagSpaceReplacementPreference,