mirror of
https://github.com/willmiao/ComfyUI-Lora-Manager.git
synced 2026-05-14 00:57:37 -03:00
fix(recipes): persist source_path in SQLite cache and eliminate source_url redundancy
- Add source_path column to PersistentRecipeCache SQLite schema with migration for existing databases (ALTER TABLE ADD COLUMN) - Backfill source_path from recipe JSON files on first startup after migration to avoid requiring manual cache rebuild - Remove all source_url recipe field references (import_remote_recipe, import_from_url, check_image_exists, enrichment, batch_import) and consolidate on source_path as the single source of truth - Add civitai.green to supported Civitai page hosts - Register check-image-exists and import-from-url recipe endpoints
This commit is contained in:
@@ -224,7 +224,7 @@ class BatchImportService:
|
||||
return False
|
||||
|
||||
for recipe in getattr(cache, "raw_data", []):
|
||||
source_path = recipe.get("source_path") or recipe.get("source_url")
|
||||
source_path = recipe.get("source_path")
|
||||
if source_path and source_path == source:
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -38,6 +38,7 @@ class PersistentRecipeCache:
|
||||
"json_path",
|
||||
"title",
|
||||
"folder",
|
||||
"source_path",
|
||||
"base_model",
|
||||
"fingerprint",
|
||||
"created_date",
|
||||
@@ -334,6 +335,7 @@ class PersistentRecipeCache:
|
||||
json_path TEXT,
|
||||
title TEXT,
|
||||
folder TEXT,
|
||||
source_path TEXT,
|
||||
base_model TEXT,
|
||||
fingerprint TEXT,
|
||||
created_date REAL,
|
||||
@@ -358,6 +360,13 @@ class PersistentRecipeCache:
|
||||
);
|
||||
"""
|
||||
)
|
||||
# Migration: add source_path column to existing databases
|
||||
try:
|
||||
conn.execute(
|
||||
"ALTER TABLE recipes ADD COLUMN source_path TEXT"
|
||||
)
|
||||
except Exception:
|
||||
pass # column already exists
|
||||
conn.commit()
|
||||
self._schema_initialized = True
|
||||
except Exception as exc:
|
||||
@@ -406,6 +415,7 @@ class PersistentRecipeCache:
|
||||
json_path,
|
||||
recipe.get("title"),
|
||||
recipe.get("folder"),
|
||||
recipe.get("source_path"),
|
||||
recipe.get("base_model"),
|
||||
recipe.get("fingerprint"),
|
||||
float(recipe.get("created_date") or 0.0),
|
||||
@@ -456,6 +466,7 @@ class PersistentRecipeCache:
|
||||
"file_path": row["file_path"] or "",
|
||||
"title": row["title"] or "",
|
||||
"folder": row["folder"] or "",
|
||||
"source_path": row["source_path"] or "",
|
||||
"base_model": row["base_model"] or "",
|
||||
"fingerprint": row["fingerprint"] or "",
|
||||
"created_date": row["created_date"] or 0.0,
|
||||
|
||||
@@ -504,6 +504,9 @@ class RecipeScanner:
|
||||
self._cache.raw_data = recipes
|
||||
self._update_folder_metadata(self._cache)
|
||||
self._sort_cache_sync()
|
||||
# Backfill source_path from JSON files if missing (schema migration)
|
||||
if self._backfill_source_path_if_needed(recipes, json_paths):
|
||||
self._persistent_cache.save_cache(recipes, json_paths)
|
||||
return self._cache
|
||||
else:
|
||||
# Partial update: some files changed
|
||||
@@ -514,6 +517,8 @@ class RecipeScanner:
|
||||
self._cache.raw_data = recipes
|
||||
self._update_folder_metadata(self._cache)
|
||||
self._sort_cache_sync()
|
||||
# Backfill source_path from JSON files if missing (schema migration)
|
||||
self._backfill_source_path_if_needed(recipes, json_paths)
|
||||
# Persist updated cache
|
||||
self._persistent_cache.save_cache(recipes, json_paths)
|
||||
return self._cache
|
||||
@@ -642,6 +647,34 @@ class RecipeScanner:
|
||||
|
||||
return recipes, changed, json_paths
|
||||
|
||||
def _backfill_source_path_if_needed(
|
||||
self,
|
||||
recipes: List[Dict],
|
||||
json_paths: Dict[str, str],
|
||||
) -> bool:
|
||||
"""Backfill source_path from recipe JSON files if missing from cache.
|
||||
|
||||
Returns True if any recipes were updated (caller should persist cache).
|
||||
"""
|
||||
updated = False
|
||||
for recipe in recipes:
|
||||
if recipe.get("source_path"):
|
||||
continue
|
||||
recipe_id = str(recipe.get("id", ""))
|
||||
json_path = json_paths.get(recipe_id)
|
||||
if not json_path or not os.path.exists(json_path):
|
||||
continue
|
||||
try:
|
||||
with open(json_path, "r", encoding="utf-8") as f:
|
||||
json_data = json.load(f)
|
||||
file_source_path = json_data.get("source_path")
|
||||
if file_source_path:
|
||||
recipe["source_path"] = file_source_path
|
||||
updated = True
|
||||
except Exception:
|
||||
pass
|
||||
return updated
|
||||
|
||||
def _full_directory_scan_sync(
|
||||
self, recipes_dir: str
|
||||
) -> Tuple[List[Dict], Dict[str, str]]:
|
||||
|
||||
Reference in New Issue
Block a user