Merge pull request #422 from willmiao/ca

Civitai metadata archive db
This commit is contained in:
pixelpaws
2025-09-10 20:35:16 +08:00
committed by GitHub
41 changed files with 2479 additions and 749 deletions

1
.gitignore vendored
View File

@@ -5,3 +5,4 @@ output/*
py/run_test.py py/run_test.py
.vscode/ .vscode/
cache/ cache/
civitai/

View File

@@ -16,7 +16,9 @@
"loading": "Wird geladen...", "loading": "Wird geladen...",
"unknown": "Unbekannt", "unknown": "Unbekannt",
"date": "Datum", "date": "Datum",
"version": "Version" "version": "Version",
"enabled": "Aktiviert",
"disabled": "Deaktiviert"
}, },
"language": { "language": {
"select": "Sprache", "select": "Sprache",
@@ -178,7 +180,8 @@
"folderSettings": "Ordner-Einstellungen", "folderSettings": "Ordner-Einstellungen",
"downloadPathTemplates": "Download-Pfad-Vorlagen", "downloadPathTemplates": "Download-Pfad-Vorlagen",
"exampleImages": "Beispielbilder", "exampleImages": "Beispielbilder",
"misc": "Verschiedenes" "misc": "Verschiedenes",
"metadataArchive": "Metadaten-Archiv-Datenbank"
}, },
"contentFiltering": { "contentFiltering": {
"blurNsfwContent": "NSFW-Inhalte unscharf stellen", "blurNsfwContent": "NSFW-Inhalte unscharf stellen",
@@ -273,6 +276,30 @@
"misc": { "misc": {
"includeTriggerWords": "Trigger Words in LoRA-Syntax einschließen", "includeTriggerWords": "Trigger Words in LoRA-Syntax einschließen",
"includeTriggerWordsHelp": "Trainierte Trigger Words beim Kopieren der LoRA-Syntax in die Zwischenablage einschließen" "includeTriggerWordsHelp": "Trainierte Trigger Words beim Kopieren der LoRA-Syntax in die Zwischenablage einschließen"
},
"metadataArchive": {
"enableArchiveDb": "Metadaten-Archiv-Datenbank aktivieren",
"enableArchiveDbHelp": "Verwenden Sie eine lokale Datenbank, um auf Metadaten von Modellen zuzugreifen, die von Civitai gelöscht wurden.",
"status": "Status",
"statusAvailable": "Verfügbar",
"statusUnavailable": "Nicht verfügbar",
"enabled": "Aktiviert",
"management": "Datenbankverwaltung",
"managementHelp": "Laden Sie die Metadaten-Archiv-Datenbank herunter oder entfernen Sie sie",
"downloadButton": "Datenbank herunterladen",
"downloadingButton": "Wird heruntergeladen...",
"downloadedButton": "Heruntergeladen",
"removeButton": "Datenbank entfernen",
"removingButton": "Wird entfernt...",
"downloadSuccess": "Metadaten-Archiv-Datenbank erfolgreich heruntergeladen",
"downloadError": "Fehler beim Herunterladen der Metadaten-Archiv-Datenbank",
"removeSuccess": "Metadaten-Archiv-Datenbank erfolgreich entfernt",
"removeError": "Fehler beim Entfernen der Metadaten-Archiv-Datenbank",
"removeConfirm": "Sind Sie sicher, dass Sie die Metadaten-Archiv-Datenbank entfernen möchten? Dadurch wird die lokale Datenbankdatei gelöscht und Sie müssen sie erneut herunterladen, um diese Funktion zu nutzen.",
"preparing": "Download wird vorbereitet...",
"connecting": "Verbindung zum Download-Server wird hergestellt...",
"completed": "Abgeschlossen",
"downloadComplete": "Download erfolgreich abgeschlossen"
} }
}, },
"loras": { "loras": {

View File

@@ -16,7 +16,9 @@
"loading": "Loading...", "loading": "Loading...",
"unknown": "Unknown", "unknown": "Unknown",
"date": "Date", "date": "Date",
"version": "Version" "version": "Version",
"enabled": "Enabled",
"disabled": "Disabled"
}, },
"language": { "language": {
"select": "Language", "select": "Language",
@@ -178,7 +180,8 @@
"folderSettings": "Folder Settings", "folderSettings": "Folder Settings",
"downloadPathTemplates": "Download Path Templates", "downloadPathTemplates": "Download Path Templates",
"exampleImages": "Example Images", "exampleImages": "Example Images",
"misc": "Misc." "misc": "Misc.",
"metadataArchive": "Metadata Archive Database"
}, },
"contentFiltering": { "contentFiltering": {
"blurNsfwContent": "Blur NSFW Content", "blurNsfwContent": "Blur NSFW Content",
@@ -273,6 +276,30 @@
"misc": { "misc": {
"includeTriggerWords": "Include Trigger Words in LoRA Syntax", "includeTriggerWords": "Include Trigger Words in LoRA Syntax",
"includeTriggerWordsHelp": "Include trained trigger words when copying LoRA syntax to clipboard" "includeTriggerWordsHelp": "Include trained trigger words when copying LoRA syntax to clipboard"
},
"metadataArchive": {
"enableArchiveDb": "Enable Metadata Archive Database",
"enableArchiveDbHelp": "Use a local database to access metadata for models that have been deleted from Civitai.",
"status": "Status",
"statusAvailable": "Available",
"statusUnavailable": "Not Available",
"enabled": "Enabled",
"management": "Database Management",
"managementHelp": "Download or remove the metadata archive database",
"downloadButton": "Download Database",
"downloadingButton": "Downloading...",
"downloadedButton": "Downloaded",
"removeButton": "Remove Database",
"removingButton": "Removing...",
"downloadSuccess": "Metadata archive database downloaded successfully",
"downloadError": "Failed to download metadata archive database",
"removeSuccess": "Metadata archive database removed successfully",
"removeError": "Failed to remove metadata archive database",
"removeConfirm": "Are you sure you want to remove the metadata archive database? This will delete the local database file and you'll need to download it again to use this feature.",
"preparing": "Preparing download...",
"connecting": "Connecting to download server...",
"completed": "Completed",
"downloadComplete": "Download completed successfully"
} }
}, },
"loras": { "loras": {

View File

@@ -16,7 +16,9 @@
"loading": "Cargando...", "loading": "Cargando...",
"unknown": "Desconocido", "unknown": "Desconocido",
"date": "Fecha", "date": "Fecha",
"version": "Versión" "version": "Versión",
"enabled": "Habilitado",
"disabled": "Deshabilitado"
}, },
"language": { "language": {
"select": "Idioma", "select": "Idioma",
@@ -178,7 +180,8 @@
"folderSettings": "Configuración de carpetas", "folderSettings": "Configuración de carpetas",
"downloadPathTemplates": "Plantillas de rutas de descarga", "downloadPathTemplates": "Plantillas de rutas de descarga",
"exampleImages": "Imágenes de ejemplo", "exampleImages": "Imágenes de ejemplo",
"misc": "Varios" "misc": "Varios",
"metadataArchive": "Base de datos de archivo de metadatos"
}, },
"contentFiltering": { "contentFiltering": {
"blurNsfwContent": "Difuminar contenido NSFW", "blurNsfwContent": "Difuminar contenido NSFW",
@@ -273,6 +276,30 @@
"misc": { "misc": {
"includeTriggerWords": "Incluir palabras clave en la sintaxis de LoRA", "includeTriggerWords": "Incluir palabras clave en la sintaxis de LoRA",
"includeTriggerWordsHelp": "Incluir palabras clave entrenadas al copiar la sintaxis de LoRA al portapapeles" "includeTriggerWordsHelp": "Incluir palabras clave entrenadas al copiar la sintaxis de LoRA al portapapeles"
},
"metadataArchive": {
"enableArchiveDb": "Habilitar base de datos de archivo de metadatos",
"enableArchiveDbHelp": "Utiliza una base de datos local para acceder a metadatos de modelos que han sido eliminados de Civitai.",
"status": "Estado",
"statusAvailable": "Disponible",
"statusUnavailable": "No disponible",
"enabled": "Habilitado",
"management": "Gestión de base de datos",
"managementHelp": "Descargar o eliminar la base de datos de archivo de metadatos",
"downloadButton": "Descargar base de datos",
"downloadingButton": "Descargando...",
"downloadedButton": "Descargado",
"removeButton": "Eliminar base de datos",
"removingButton": "Eliminando...",
"downloadSuccess": "Base de datos de archivo de metadatos descargada exitosamente",
"downloadError": "Error al descargar la base de datos de archivo de metadatos",
"removeSuccess": "Base de datos de archivo de metadatos eliminada exitosamente",
"removeError": "Error al eliminar la base de datos de archivo de metadatos",
"removeConfirm": "¿Estás seguro de que quieres eliminar la base de datos de archivo de metadatos? Esto eliminará el archivo de base de datos local y tendrás que descargarlo de nuevo para usar esta función.",
"preparing": "Preparando descarga...",
"connecting": "Conectando al servidor de descarga...",
"completed": "Completado",
"downloadComplete": "Descarga completada exitosamente"
} }
}, },
"loras": { "loras": {

View File

@@ -16,7 +16,9 @@
"loading": "Chargement...", "loading": "Chargement...",
"unknown": "Inconnu", "unknown": "Inconnu",
"date": "Date", "date": "Date",
"version": "Version" "version": "Version",
"enabled": "Activé",
"disabled": "Désactivé"
}, },
"language": { "language": {
"select": "Langue", "select": "Langue",
@@ -178,7 +180,8 @@
"folderSettings": "Paramètres des dossiers", "folderSettings": "Paramètres des dossiers",
"downloadPathTemplates": "Modèles de chemin de téléchargement", "downloadPathTemplates": "Modèles de chemin de téléchargement",
"exampleImages": "Images d'exemple", "exampleImages": "Images d'exemple",
"misc": "Divers" "misc": "Divers",
"metadataArchive": "Base de données d'archive des métadonnées"
}, },
"contentFiltering": { "contentFiltering": {
"blurNsfwContent": "Flouter le contenu NSFW", "blurNsfwContent": "Flouter le contenu NSFW",
@@ -273,6 +276,30 @@
"misc": { "misc": {
"includeTriggerWords": "Inclure les mots-clés dans la syntaxe LoRA", "includeTriggerWords": "Inclure les mots-clés dans la syntaxe LoRA",
"includeTriggerWordsHelp": "Inclure les mots-clés d'entraînement lors de la copie de la syntaxe LoRA dans le presse-papiers" "includeTriggerWordsHelp": "Inclure les mots-clés d'entraînement lors de la copie de la syntaxe LoRA dans le presse-papiers"
},
"metadataArchive": {
"enableArchiveDb": "Activer la base de données d'archive des métadonnées",
"enableArchiveDbHelp": "Utiliser une base de données locale pour accéder aux métadonnées des modèles supprimés de Civitai.",
"status": "Statut",
"statusAvailable": "Disponible",
"statusUnavailable": "Non disponible",
"enabled": "Activé",
"management": "Gestion de la base de données",
"managementHelp": "Télécharger ou supprimer la base de données d'archive des métadonnées",
"downloadButton": "Télécharger la base de données",
"downloadingButton": "Téléchargement...",
"downloadedButton": "Téléchargé",
"removeButton": "Supprimer la base de données",
"removingButton": "Suppression...",
"downloadSuccess": "Base de données d'archive des métadonnées téléchargée avec succès",
"downloadError": "Échec du téléchargement de la base de données d'archive des métadonnées",
"removeSuccess": "Base de données d'archive des métadonnées supprimée avec succès",
"removeError": "Échec de la suppression de la base de données d'archive des métadonnées",
"removeConfirm": "Êtes-vous sûr de vouloir supprimer la base de données d'archive des métadonnées ? Cela supprimera le fichier local et vous devrez la télécharger à nouveau pour utiliser cette fonctionnalité.",
"preparing": "Préparation du téléchargement...",
"connecting": "Connexion au serveur de téléchargement...",
"completed": "Terminé",
"downloadComplete": "Téléchargement terminé avec succès"
} }
}, },
"loras": { "loras": {

View File

@@ -16,7 +16,9 @@
"loading": "読み込み中...", "loading": "読み込み中...",
"unknown": "不明", "unknown": "不明",
"date": "日付", "date": "日付",
"version": "バージョン" "version": "バージョン",
"enabled": "有効",
"disabled": "無効"
}, },
"language": { "language": {
"select": "言語", "select": "言語",
@@ -178,7 +180,8 @@
"folderSettings": "フォルダ設定", "folderSettings": "フォルダ設定",
"downloadPathTemplates": "ダウンロードパステンプレート", "downloadPathTemplates": "ダウンロードパステンプレート",
"exampleImages": "例画像", "exampleImages": "例画像",
"misc": "その他" "misc": "その他",
"metadataArchive": "メタデータアーカイブデータベース"
}, },
"contentFiltering": { "contentFiltering": {
"blurNsfwContent": "NSFWコンテンツをぼかす", "blurNsfwContent": "NSFWコンテンツをぼかす",
@@ -273,6 +276,30 @@
"misc": { "misc": {
"includeTriggerWords": "LoRA構文にトリガーワードを含める", "includeTriggerWords": "LoRA構文にトリガーワードを含める",
"includeTriggerWordsHelp": "LoRA構文をクリップボードにコピーする際、学習済みトリガーワードを含めます" "includeTriggerWordsHelp": "LoRA構文をクリップボードにコピーする際、学習済みトリガーワードを含めます"
},
"metadataArchive": {
"enableArchiveDb": "メタデータアーカイブデータベースを有効化",
"enableArchiveDbHelp": "Civitaiから削除されたモデルのメタデータにアクセスするためにローカルデータベースを使用します。",
"status": "ステータス",
"statusAvailable": "利用可能",
"statusUnavailable": "利用不可",
"enabled": "有効",
"management": "データベース管理",
"managementHelp": "メタデータアーカイブデータベースのダウンロードまたは削除",
"downloadButton": "データベースをダウンロード",
"downloadingButton": "ダウンロード中...",
"downloadedButton": "ダウンロード済み",
"removeButton": "データベースを削除",
"removingButton": "削除中...",
"downloadSuccess": "メタデータアーカイブデータベースのダウンロードが完了しました",
"downloadError": "メタデータアーカイブデータベースのダウンロードに失敗しました",
"removeSuccess": "メタデータアーカイブデータベースが削除されました",
"removeError": "メタデータアーカイブデータベースの削除に失敗しました",
"removeConfirm": "本当にメタデータアーカイブデータベースを削除しますか?ローカルのデータベースファイルが削除され、この機能を再度利用するには再ダウンロードが必要です。",
"preparing": "ダウンロードを準備中...",
"connecting": "ダウンロードサーバーに接続中...",
"completed": "完了",
"downloadComplete": "ダウンロードが正常に完了しました"
} }
}, },
"loras": { "loras": {

View File

@@ -16,7 +16,9 @@
"loading": "로딩 중...", "loading": "로딩 중...",
"unknown": "알 수 없음", "unknown": "알 수 없음",
"date": "날짜", "date": "날짜",
"version": "버전" "version": "버전",
"enabled": "활성화됨",
"disabled": "비활성화됨"
}, },
"language": { "language": {
"select": "언어", "select": "언어",
@@ -178,7 +180,8 @@
"folderSettings": "폴더 설정", "folderSettings": "폴더 설정",
"downloadPathTemplates": "다운로드 경로 템플릿", "downloadPathTemplates": "다운로드 경로 템플릿",
"exampleImages": "예시 이미지", "exampleImages": "예시 이미지",
"misc": "기타" "misc": "기타",
"metadataArchive": "메타데이터 아카이브 데이터베이스"
}, },
"contentFiltering": { "contentFiltering": {
"blurNsfwContent": "NSFW 콘텐츠 블러 처리", "blurNsfwContent": "NSFW 콘텐츠 블러 처리",
@@ -273,6 +276,30 @@
"misc": { "misc": {
"includeTriggerWords": "LoRA 문법에 트리거 단어 포함", "includeTriggerWords": "LoRA 문법에 트리거 단어 포함",
"includeTriggerWordsHelp": "LoRA 문법을 클립보드에 복사할 때 학습된 트리거 단어를 포함합니다" "includeTriggerWordsHelp": "LoRA 문법을 클립보드에 복사할 때 학습된 트리거 단어를 포함합니다"
},
"metadataArchive": {
"enableArchiveDb": "메타데이터 아카이브 데이터베이스 활성화",
"enableArchiveDbHelp": "Civitai에서 삭제된 모델의 메타데이터에 접근하기 위해 로컬 데이터베이스를 사용합니다.",
"status": "상태",
"statusAvailable": "사용 가능",
"statusUnavailable": "사용 불가",
"enabled": "활성화됨",
"management": "데이터베이스 관리",
"managementHelp": "메타데이터 아카이브 데이터베이스를 다운로드하거나 제거합니다",
"downloadButton": "데이터베이스 다운로드",
"downloadingButton": "다운로드 중...",
"downloadedButton": "다운로드 완료",
"removeButton": "데이터베이스 제거",
"removingButton": "제거 중...",
"downloadSuccess": "메타데이터 아카이브 데이터베이스가 성공적으로 다운로드되었습니다",
"downloadError": "메타데이터 아카이브 데이터베이스 다운로드 실패",
"removeSuccess": "메타데이터 아카이브 데이터베이스가 성공적으로 제거되었습니다",
"removeError": "메타데이터 아카이브 데이터베이스 제거 실패",
"removeConfirm": "메타데이터 아카이브 데이터베이스를 제거하시겠습니까? 이 작업은 로컬 데이터베이스 파일을 삭제하며, 이 기능을 사용하려면 다시 다운로드해야 합니다.",
"preparing": "다운로드 준비 중...",
"connecting": "다운로드 서버에 연결 중...",
"completed": "완료됨",
"downloadComplete": "다운로드가 성공적으로 완료되었습니다"
} }
}, },
"loras": { "loras": {

View File

@@ -16,7 +16,9 @@
"loading": "Загрузка...", "loading": "Загрузка...",
"unknown": "Неизвестно", "unknown": "Неизвестно",
"date": "Дата", "date": "Дата",
"version": "Версия" "version": "Версия",
"enabled": "Включено",
"disabled": "Отключено"
}, },
"language": { "language": {
"select": "Язык", "select": "Язык",
@@ -178,7 +180,8 @@
"folderSettings": "Настройки папок", "folderSettings": "Настройки папок",
"downloadPathTemplates": "Шаблоны путей загрузки", "downloadPathTemplates": "Шаблоны путей загрузки",
"exampleImages": "Примеры изображений", "exampleImages": "Примеры изображений",
"misc": "Разное" "misc": "Разное",
"metadataArchive": "Архив метаданных"
}, },
"contentFiltering": { "contentFiltering": {
"blurNsfwContent": "Размывать NSFW контент", "blurNsfwContent": "Размывать NSFW контент",
@@ -273,6 +276,30 @@
"misc": { "misc": {
"includeTriggerWords": "Включать триггерные слова в синтаксис LoRA", "includeTriggerWords": "Включать триггерные слова в синтаксис LoRA",
"includeTriggerWordsHelp": "Включать обученные триггерные слова при копировании синтаксиса LoRA в буфер обмена" "includeTriggerWordsHelp": "Включать обученные триггерные слова при копировании синтаксиса LoRA в буфер обмена"
},
"metadataArchive": {
"enableArchiveDb": "Включить архив метаданных",
"enableArchiveDbHelp": "Использовать локальную базу данных для доступа к метаданным моделей, удалённых с Civitai.",
"status": "Статус",
"statusAvailable": "Доступно",
"statusUnavailable": "Недоступно",
"enabled": "Включено",
"management": "Управление базой данных",
"managementHelp": "Скачать или удалить базу данных архива метаданных",
"downloadButton": "Скачать базу данных",
"downloadingButton": "Скачивание...",
"downloadedButton": "Скачано",
"removeButton": "Удалить базу данных",
"removingButton": "Удаление...",
"downloadSuccess": "База данных архива метаданных успешно загружена",
"downloadError": "Не удалось загрузить базу данных архива метаданных",
"removeSuccess": "База данных архива метаданных успешно удалена",
"removeError": "Не удалось удалить базу данных архива метаданных",
"removeConfirm": "Вы уверены, что хотите удалить базу данных архива метаданных? Это удалит локальный файл базы данных, и для использования этой функции потребуется повторная загрузка.",
"preparing": "Подготовка к загрузке...",
"connecting": "Подключение к серверу загрузки...",
"completed": "Завершено",
"downloadComplete": "Загрузка успешно завершена"
} }
}, },
"loras": { "loras": {

View File

@@ -16,7 +16,9 @@
"loading": "加载中...", "loading": "加载中...",
"unknown": "未知", "unknown": "未知",
"date": "日期", "date": "日期",
"version": "版本" "version": "版本",
"enabled": "已启用",
"disabled": "已禁用"
}, },
"language": { "language": {
"select": "语言", "select": "语言",
@@ -178,7 +180,8 @@
"folderSettings": "文件夹设置", "folderSettings": "文件夹设置",
"downloadPathTemplates": "下载路径模板", "downloadPathTemplates": "下载路径模板",
"exampleImages": "示例图片", "exampleImages": "示例图片",
"misc": "其他" "misc": "其他",
"metadataArchive": "元数据归档数据库"
}, },
"contentFiltering": { "contentFiltering": {
"blurNsfwContent": "模糊 NSFW 内容", "blurNsfwContent": "模糊 NSFW 内容",
@@ -273,6 +276,30 @@
"misc": { "misc": {
"includeTriggerWords": "复制 LoRA 语法时包含触发词", "includeTriggerWords": "复制 LoRA 语法时包含触发词",
"includeTriggerWordsHelp": "复制 LoRA 语法到剪贴板时包含训练触发词" "includeTriggerWordsHelp": "复制 LoRA 语法到剪贴板时包含训练触发词"
},
"metadataArchive": {
"enableArchiveDb": "启用元数据归档数据库",
"enableArchiveDbHelp": "使用本地数据库访问已从 Civitai 删除的模型元数据。",
"status": "状态",
"statusAvailable": "可用",
"statusUnavailable": "不可用",
"enabled": "已启用",
"management": "数据库管理",
"managementHelp": "下载或移除元数据归档数据库",
"downloadButton": "下载数据库",
"downloadingButton": "正在下载...",
"downloadedButton": "已下载",
"removeButton": "移除数据库",
"removingButton": "正在移除...",
"downloadSuccess": "元数据归档数据库下载成功",
"downloadError": "元数据归档数据库下载失败",
"removeSuccess": "元数据归档数据库移除成功",
"removeError": "元数据归档数据库移除失败",
"removeConfirm": "你确定要移除元数据归档数据库吗?这将删除本地数据库文件,如需使用此功能需重新下载。",
"preparing": "正在准备下载...",
"connecting": "正在连接下载服务器...",
"completed": "已完成",
"downloadComplete": "下载成功完成"
} }
}, },
"loras": { "loras": {

View File

@@ -16,7 +16,9 @@
"loading": "載入中...", "loading": "載入中...",
"unknown": "未知", "unknown": "未知",
"date": "日期", "date": "日期",
"version": "版本" "version": "版本",
"enabled": "已啟用",
"disabled": "已停用"
}, },
"language": { "language": {
"select": "語言", "select": "語言",
@@ -178,7 +180,8 @@
"folderSettings": "資料夾設定", "folderSettings": "資料夾設定",
"downloadPathTemplates": "下載路徑範本", "downloadPathTemplates": "下載路徑範本",
"exampleImages": "範例圖片", "exampleImages": "範例圖片",
"misc": "其他" "misc": "其他",
"metadataArchive": "中繼資料封存資料庫"
}, },
"contentFiltering": { "contentFiltering": {
"blurNsfwContent": "模糊 NSFW 內容", "blurNsfwContent": "模糊 NSFW 內容",
@@ -273,6 +276,30 @@
"misc": { "misc": {
"includeTriggerWords": "在 LoRA 語法中包含觸發詞", "includeTriggerWords": "在 LoRA 語法中包含觸發詞",
"includeTriggerWordsHelp": "複製 LoRA 語法到剪貼簿時包含訓練觸發詞" "includeTriggerWordsHelp": "複製 LoRA 語法到剪貼簿時包含訓練觸發詞"
},
"metadataArchive": {
"enableArchiveDb": "啟用中繼資料封存資料庫",
"enableArchiveDbHelp": "使用本機資料庫以存取已從 Civitai 刪除模型的中繼資料。",
"status": "狀態",
"statusAvailable": "可用",
"statusUnavailable": "不可用",
"enabled": "已啟用",
"management": "資料庫管理",
"managementHelp": "下載或移除中繼資料封存資料庫",
"downloadButton": "下載資料庫",
"downloadingButton": "下載中...",
"downloadedButton": "已下載",
"removeButton": "移除資料庫",
"removingButton": "移除中...",
"downloadSuccess": "中繼資料封存資料庫下載成功",
"downloadError": "下載中繼資料封存資料庫失敗",
"removeSuccess": "中繼資料封存資料庫移除成功",
"removeError": "移除中繼資料封存資料庫失敗",
"removeConfirm": "您確定要移除中繼資料封存資料庫嗎?這將刪除本機資料庫檔案,若要再次使用此功能需重新下載。",
"preparing": "準備下載中...",
"connecting": "正在連接下載伺服器...",
"completed": "已完成",
"downloadComplete": "下載成功完成"
} }
}, },
"loras": { "loras": {

View File

@@ -191,6 +191,9 @@ class LoraManager:
# Register DownloadManager with ServiceRegistry # Register DownloadManager with ServiceRegistry
await ServiceRegistry.get_download_manager() await ServiceRegistry.get_download_manager()
from .services.metadata_service import initialize_metadata_providers
await initialize_metadata_providers()
# Initialize WebSocket manager # Initialize WebSocket manager
await ServiceRegistry.get_websocket_manager() await ServiceRegistry.get_websocket_manager()

View File

@@ -6,6 +6,7 @@ import logging
from typing import Dict, Any from typing import Dict, Any
from ..base import RecipeMetadataParser from ..base import RecipeMetadataParser
from ..constants import GEN_PARAM_KEYS from ..constants import GEN_PARAM_KEYS
from ...services.metadata_service import get_default_metadata_provider
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -30,6 +31,9 @@ class AutomaticMetadataParser(RecipeMetadataParser):
async def parse_metadata(self, user_comment: str, recipe_scanner=None, civitai_client=None) -> Dict[str, Any]: async def parse_metadata(self, user_comment: str, recipe_scanner=None, civitai_client=None) -> Dict[str, Any]:
"""Parse metadata from Automatic1111 format""" """Parse metadata from Automatic1111 format"""
try: try:
# Get metadata provider instead of using civitai_client directly
metadata_provider = await get_default_metadata_provider()
# Split on Negative prompt if it exists # Split on Negative prompt if it exists
if "Negative prompt:" in user_comment: if "Negative prompt:" in user_comment:
parts = user_comment.split('Negative prompt:', 1) parts = user_comment.split('Negative prompt:', 1)
@@ -216,9 +220,9 @@ class AutomaticMetadataParser(RecipeMetadataParser):
} }
# Get additional info from Civitai # Get additional info from Civitai
if civitai_client: if metadata_provider:
try: try:
civitai_info = await civitai_client.get_model_version_info(resource.get("modelVersionId")) civitai_info = await metadata_provider.get_model_version_info(resource.get("modelVersionId"))
populated_entry = await self.populate_lora_from_civitai( populated_entry = await self.populate_lora_from_civitai(
lora_entry, lora_entry,
civitai_info, civitai_info,
@@ -271,11 +275,11 @@ class AutomaticMetadataParser(RecipeMetadataParser):
} }
# Try to get info from Civitai # Try to get info from Civitai
if civitai_client: if metadata_provider:
try: try:
if lora_hash: if lora_hash:
# If we have hash, use it for lookup # If we have hash, use it for lookup
civitai_info = await civitai_client.get_model_by_hash(lora_hash) civitai_info = await metadata_provider.get_model_by_hash(lora_hash)
else: else:
civitai_info = None civitai_info = None

View File

@@ -5,6 +5,7 @@ import logging
from typing import Dict, Any, Union from typing import Dict, Any, Union
from ..base import RecipeMetadataParser from ..base import RecipeMetadataParser
from ..constants import GEN_PARAM_KEYS from ..constants import GEN_PARAM_KEYS
from ...services.metadata_service import get_default_metadata_provider
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -36,12 +37,15 @@ class CivitaiApiMetadataParser(RecipeMetadataParser):
Args: Args:
metadata: The metadata from the image (dict) metadata: The metadata from the image (dict)
recipe_scanner: Optional recipe scanner service recipe_scanner: Optional recipe scanner service
civitai_client: Optional Civitai API client civitai_client: Optional Civitai API client (deprecated, use metadata_provider instead)
Returns: Returns:
Dict containing parsed recipe data Dict containing parsed recipe data
""" """
try: try:
# Get metadata provider instead of using civitai_client directly
metadata_provider = await get_default_metadata_provider()
# Initialize result structure # Initialize result structure
result = { result = {
'base_model': None, 'base_model': None,
@@ -85,9 +89,9 @@ class CivitaiApiMetadataParser(RecipeMetadataParser):
# Extract base model information - directly if available # Extract base model information - directly if available
if "baseModel" in metadata: if "baseModel" in metadata:
result["base_model"] = metadata["baseModel"] result["base_model"] = metadata["baseModel"]
elif "Model hash" in metadata and civitai_client: elif "Model hash" in metadata and metadata_provider:
model_hash = metadata["Model hash"] model_hash = metadata["Model hash"]
model_info = await civitai_client.get_model_by_hash(model_hash) model_info = await metadata_provider.get_model_by_hash(model_hash)
if model_info: if model_info:
result["base_model"] = model_info.get("baseModel", "") result["base_model"] = model_info.get("baseModel", "")
elif "Model" in metadata and isinstance(metadata.get("resources"), list): elif "Model" in metadata and isinstance(metadata.get("resources"), list):
@@ -95,8 +99,8 @@ class CivitaiApiMetadataParser(RecipeMetadataParser):
for resource in metadata.get("resources", []): for resource in metadata.get("resources", []):
if resource.get("type") == "model" and resource.get("name") == metadata.get("Model"): if resource.get("type") == "model" and resource.get("name") == metadata.get("Model"):
# This is likely the checkpoint model # This is likely the checkpoint model
if civitai_client and resource.get("hash"): if metadata_provider and resource.get("hash"):
model_info = await civitai_client.get_model_by_hash(resource.get("hash")) model_info = await metadata_provider.get_model_by_hash(resource.get("hash"))
if model_info: if model_info:
result["base_model"] = model_info.get("baseModel", "") result["base_model"] = model_info.get("baseModel", "")
@@ -138,9 +142,9 @@ class CivitaiApiMetadataParser(RecipeMetadataParser):
} }
# Try to get info from Civitai if hash is available # Try to get info from Civitai if hash is available
if lora_entry['hash'] and civitai_client: if lora_entry['hash'] and metadata_provider:
try: try:
civitai_info = await civitai_client.get_model_by_hash(lora_hash) civitai_info = await metadata_provider.get_model_by_hash(lora_hash)
populated_entry = await self.populate_lora_from_civitai( populated_entry = await self.populate_lora_from_civitai(
lora_entry, lora_entry,
@@ -194,10 +198,10 @@ class CivitaiApiMetadataParser(RecipeMetadataParser):
} }
# Try to get info from Civitai if modelVersionId is available # Try to get info from Civitai if modelVersionId is available
if version_id and civitai_client: if version_id and metadata_provider:
try: try:
# Use get_model_version_info instead of get_model_version # Use get_model_version_info instead of get_model_version
civitai_info, error = await civitai_client.get_model_version_info(version_id) civitai_info, error = await metadata_provider.get_model_version_info(version_id)
if error: if error:
logger.warning(f"Error getting model version info: {error}") logger.warning(f"Error getting model version info: {error}")
@@ -259,11 +263,11 @@ class CivitaiApiMetadataParser(RecipeMetadataParser):
'isDeleted': False 'isDeleted': False
} }
# If we have a version ID and civitai client, try to get more info # If we have a version ID and metadata provider, try to get more info
if version_id and civitai_client: if version_id and metadata_provider:
try: try:
# Use get_model_version_info with the version ID # Use get_model_version_info with the version ID
civitai_info, error = await civitai_client.get_model_version_info(version_id) civitai_info, error = await metadata_provider.get_model_version_info(version_id)
if error: if error:
logger.warning(f"Error getting model version info: {error}") logger.warning(f"Error getting model version info: {error}")
@@ -316,9 +320,9 @@ class CivitaiApiMetadataParser(RecipeMetadataParser):
} }
# Try to get info from Civitai if hash is available # Try to get info from Civitai if hash is available
if lora_entry['hash'] and civitai_client: if lora_entry['hash'] and metadata_provider:
try: try:
civitai_info = await civitai_client.get_model_by_hash(lora_hash) civitai_info = await metadata_provider.get_model_by_hash(lora_hash)
populated_entry = await self.populate_lora_from_civitai( populated_entry = await self.populate_lora_from_civitai(
lora_entry, lora_entry,

View File

@@ -6,6 +6,7 @@ import logging
from typing import Dict, Any from typing import Dict, Any
from ..base import RecipeMetadataParser from ..base import RecipeMetadataParser
from ..constants import GEN_PARAM_KEYS from ..constants import GEN_PARAM_KEYS
from ...services.metadata_service import get_default_metadata_provider
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -26,6 +27,9 @@ class ComfyMetadataParser(RecipeMetadataParser):
async def parse_metadata(self, user_comment: str, recipe_scanner=None, civitai_client=None) -> Dict[str, Any]: async def parse_metadata(self, user_comment: str, recipe_scanner=None, civitai_client=None) -> Dict[str, Any]:
"""Parse metadata from Civitai ComfyUI metadata format""" """Parse metadata from Civitai ComfyUI metadata format"""
try: try:
# Get metadata provider instead of using civitai_client directly
metadata_provider = await get_default_metadata_provider()
data = json.loads(user_comment) data = json.loads(user_comment)
loras = [] loras = []
@@ -73,10 +77,10 @@ class ComfyMetadataParser(RecipeMetadataParser):
'isDeleted': False 'isDeleted': False
} }
# Get additional info from Civitai if client is available # Get additional info from Civitai if metadata provider is available
if civitai_client: if metadata_provider:
try: try:
civitai_info_tuple = await civitai_client.get_model_version_info(model_version_id) civitai_info_tuple = await metadata_provider.get_model_version_info(model_version_id)
# Populate lora entry with Civitai info # Populate lora entry with Civitai info
populated_entry = await self.populate_lora_from_civitai( populated_entry = await self.populate_lora_from_civitai(
lora_entry, lora_entry,
@@ -116,9 +120,9 @@ class ComfyMetadataParser(RecipeMetadataParser):
} }
# Get additional checkpoint info from Civitai # Get additional checkpoint info from Civitai
if civitai_client: if metadata_provider:
try: try:
civitai_info_tuple = await civitai_client.get_model_version_info(checkpoint_version_id) civitai_info_tuple = await metadata_provider.get_model_version_info(checkpoint_version_id)
civitai_info, _ = civitai_info_tuple if isinstance(civitai_info_tuple, tuple) else (civitai_info_tuple, None) civitai_info, _ = civitai_info_tuple if isinstance(civitai_info_tuple, tuple) else (civitai_info_tuple, None)
# Populate checkpoint with Civitai info # Populate checkpoint with Civitai info
checkpoint = await self.populate_checkpoint_from_civitai(checkpoint, civitai_info) checkpoint = await self.populate_checkpoint_from_civitai(checkpoint, civitai_info)

View File

@@ -5,6 +5,7 @@ import logging
from typing import Dict, Any from typing import Dict, Any
from ..base import RecipeMetadataParser from ..base import RecipeMetadataParser
from ..constants import GEN_PARAM_KEYS from ..constants import GEN_PARAM_KEYS
from ...services.metadata_service import get_default_metadata_provider
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -18,8 +19,11 @@ class MetaFormatParser(RecipeMetadataParser):
return re.search(self.METADATA_MARKER, user_comment, re.IGNORECASE | re.DOTALL) is not None return re.search(self.METADATA_MARKER, user_comment, re.IGNORECASE | re.DOTALL) is not None
async def parse_metadata(self, user_comment: str, recipe_scanner=None, civitai_client=None) -> Dict[str, Any]: async def parse_metadata(self, user_comment: str, recipe_scanner=None, civitai_client=None) -> Dict[str, Any]:
"""Parse metadata from images with meta format metadata""" """Parse metadata from images with meta format metadata (Lora_N Model hash format)"""
try: try:
# Get metadata provider instead of using civitai_client directly
metadata_provider = await get_default_metadata_provider()
# Extract prompt and negative prompt # Extract prompt and negative prompt
parts = user_comment.split('Negative prompt:', 1) parts = user_comment.split('Negative prompt:', 1)
prompt = parts[0].strip() prompt = parts[0].strip()
@@ -122,9 +126,9 @@ class MetaFormatParser(RecipeMetadataParser):
} }
# Get info from Civitai by hash if available # Get info from Civitai by hash if available
if civitai_client and hash_value: if metadata_provider and hash_value:
try: try:
civitai_info = await civitai_client.get_model_by_hash(hash_value) civitai_info = await metadata_provider.get_model_by_hash(hash_value)
# Populate lora entry with Civitai info # Populate lora entry with Civitai info
populated_entry = await self.populate_lora_from_civitai( populated_entry = await self.populate_lora_from_civitai(
lora_entry, lora_entry,

View File

@@ -7,6 +7,7 @@ from typing import Dict, Any
from ...config import config from ...config import config
from ..base import RecipeMetadataParser from ..base import RecipeMetadataParser
from ..constants import GEN_PARAM_KEYS from ..constants import GEN_PARAM_KEYS
from ...services.metadata_service import get_default_metadata_provider
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -23,6 +24,9 @@ class RecipeFormatParser(RecipeMetadataParser):
async def parse_metadata(self, user_comment: str, recipe_scanner=None, civitai_client=None) -> Dict[str, Any]: async def parse_metadata(self, user_comment: str, recipe_scanner=None, civitai_client=None) -> Dict[str, Any]:
"""Parse metadata from images with dedicated recipe metadata format""" """Parse metadata from images with dedicated recipe metadata format"""
try: try:
# Get metadata provider instead of using civitai_client directly
metadata_provider = await get_default_metadata_provider()
# Extract recipe metadata from user comment # Extract recipe metadata from user comment
try: try:
# Look for recipe metadata section # Look for recipe metadata section
@@ -71,9 +75,9 @@ class RecipeFormatParser(RecipeMetadataParser):
lora_entry['localPath'] = None lora_entry['localPath'] = None
# Try to get additional info from Civitai if we have a model version ID # Try to get additional info from Civitai if we have a model version ID
if lora.get('modelVersionId') and civitai_client: if lora.get('modelVersionId') and metadata_provider:
try: try:
civitai_info_tuple = await civitai_client.get_model_version_info(lora['modelVersionId']) civitai_info_tuple = await metadata_provider.get_model_version_info(lora['modelVersionId'])
# Populate lora entry with Civitai info # Populate lora entry with Civitai info
populated_entry = await self.populate_lora_from_civitai( populated_entry = await self.populate_lora_from_civitai(
lora_entry, lora_entry,

View File

@@ -611,10 +611,10 @@ class BaseModelRoutes(ABC):
success = 0 success = 0
needs_resort = False needs_resort = False
# Prepare models to process # Prepare models to process, only those without CivitAI data
to_process = [ to_process = [
model for model in cache.raw_data model for model in cache.raw_data
if model.get('sha256') and (not model.get('civitai') or 'id' not in model.get('civitai')) and model.get('from_civitai', True) if model.get('sha256') and (not model.get('civitai') or 'id' not in model.get('civitai'))
] ]
total_to_process = len(to_process) total_to_process = len(to_process)

View File

@@ -4,6 +4,7 @@ from aiohttp import web
from .base_model_routes import BaseModelRoutes from .base_model_routes import BaseModelRoutes
from ..services.checkpoint_service import CheckpointService from ..services.checkpoint_service import CheckpointService
from ..services.service_registry import ServiceRegistry from ..services.service_registry import ServiceRegistry
from ..services.metadata_service import get_default_metadata_provider
from ..config import config from ..config import config
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -15,14 +16,12 @@ class CheckpointRoutes(BaseModelRoutes):
"""Initialize Checkpoint routes with Checkpoint service""" """Initialize Checkpoint routes with Checkpoint service"""
# Service will be initialized later via setup_routes # Service will be initialized later via setup_routes
self.service = None self.service = None
self.civitai_client = None
self.template_name = "checkpoints.html" self.template_name = "checkpoints.html"
async def initialize_services(self): async def initialize_services(self):
"""Initialize services from ServiceRegistry""" """Initialize services from ServiceRegistry"""
checkpoint_scanner = await ServiceRegistry.get_checkpoint_scanner() checkpoint_scanner = await ServiceRegistry.get_checkpoint_scanner()
self.service = CheckpointService(checkpoint_scanner) self.service = CheckpointService(checkpoint_scanner)
self.civitai_client = await ServiceRegistry.get_civitai_client()
# Initialize parent with the service # Initialize parent with the service
super().__init__(self.service) super().__init__(self.service)
@@ -66,7 +65,8 @@ class CheckpointRoutes(BaseModelRoutes):
"""Get available versions for a Civitai checkpoint model with local availability info""" """Get available versions for a Civitai checkpoint model with local availability info"""
try: try:
model_id = request.match_info['model_id'] model_id = request.match_info['model_id']
response = await self.civitai_client.get_model_versions(model_id) metadata_provider = await get_default_metadata_provider()
response = await metadata_provider.get_model_versions(model_id)
if not response or not response.get('modelVersions'): if not response or not response.get('modelVersions'):
return web.Response(status=404, text="Model not found") return web.Response(status=404, text="Model not found")

View File

@@ -4,6 +4,7 @@ from aiohttp import web
from .base_model_routes import BaseModelRoutes from .base_model_routes import BaseModelRoutes
from ..services.embedding_service import EmbeddingService from ..services.embedding_service import EmbeddingService
from ..services.service_registry import ServiceRegistry from ..services.service_registry import ServiceRegistry
from ..services.metadata_service import get_default_metadata_provider
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -14,14 +15,12 @@ class EmbeddingRoutes(BaseModelRoutes):
"""Initialize Embedding routes with Embedding service""" """Initialize Embedding routes with Embedding service"""
# Service will be initialized later via setup_routes # Service will be initialized later via setup_routes
self.service = None self.service = None
self.civitai_client = None
self.template_name = "embeddings.html" self.template_name = "embeddings.html"
async def initialize_services(self): async def initialize_services(self):
"""Initialize services from ServiceRegistry""" """Initialize services from ServiceRegistry"""
embedding_scanner = await ServiceRegistry.get_embedding_scanner() embedding_scanner = await ServiceRegistry.get_embedding_scanner()
self.service = EmbeddingService(embedding_scanner) self.service = EmbeddingService(embedding_scanner)
self.civitai_client = await ServiceRegistry.get_civitai_client()
# Initialize parent with the service # Initialize parent with the service
super().__init__(self.service) super().__init__(self.service)
@@ -61,7 +60,8 @@ class EmbeddingRoutes(BaseModelRoutes):
"""Get available versions for a Civitai embedding model with local availability info""" """Get available versions for a Civitai embedding model with local availability info"""
try: try:
model_id = request.match_info['model_id'] model_id = request.match_info['model_id']
response = await self.civitai_client.get_model_versions(model_id) metadata_provider = await get_default_metadata_provider()
response = await metadata_provider.get_model_versions(model_id)
if not response or not response.get('modelVersions'): if not response or not response.get('modelVersions'):
return web.Response(status=404, text="Model not found") return web.Response(status=404, text="Model not found")

View File

@@ -7,7 +7,7 @@ from server import PromptServer # type: ignore
from .base_model_routes import BaseModelRoutes from .base_model_routes import BaseModelRoutes
from ..services.lora_service import LoraService from ..services.lora_service import LoraService
from ..services.service_registry import ServiceRegistry from ..services.service_registry import ServiceRegistry
from ..utils.routes_common import ModelRouteUtils from ..services.metadata_service import get_default_metadata_provider
from ..utils.utils import get_lora_info from ..utils.utils import get_lora_info
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -19,14 +19,12 @@ class LoraRoutes(BaseModelRoutes):
"""Initialize LoRA routes with LoRA service""" """Initialize LoRA routes with LoRA service"""
# Service will be initialized later via setup_routes # Service will be initialized later via setup_routes
self.service = None self.service = None
self.civitai_client = None
self.template_name = "loras.html" self.template_name = "loras.html"
async def initialize_services(self): async def initialize_services(self):
"""Initialize services from ServiceRegistry""" """Initialize services from ServiceRegistry"""
lora_scanner = await ServiceRegistry.get_lora_scanner() lora_scanner = await ServiceRegistry.get_lora_scanner()
self.service = LoraService(lora_scanner) self.service = LoraService(lora_scanner)
self.civitai_client = await ServiceRegistry.get_civitai_client()
# Initialize parent with the service # Initialize parent with the service
super().__init__(self.service) super().__init__(self.service)
@@ -217,7 +215,8 @@ class LoraRoutes(BaseModelRoutes):
"""Get available versions for a Civitai LoRA model with local availability info""" """Get available versions for a Civitai LoRA model with local availability info"""
try: try:
model_id = request.match_info['model_id'] model_id = request.match_info['model_id']
response = await self.civitai_client.get_model_versions(model_id) metadata_provider = await get_default_metadata_provider()
response = await metadata_provider.get_model_versions(model_id)
if not response or not response.get('modelVersions'): if not response or not response.get('modelVersions'):
return web.Response(status=404, text="Model not found") return web.Response(status=404, text="Model not found")
@@ -261,8 +260,9 @@ class LoraRoutes(BaseModelRoutes):
try: try:
model_version_id = request.match_info.get('modelVersionId') model_version_id = request.match_info.get('modelVersionId')
# Get model details from Civitai API # Get model details from metadata provider
model, error_msg = await self.civitai_client.get_model_version_info(model_version_id) metadata_provider = await get_default_metadata_provider()
model, error_msg = await metadata_provider.get_model_version_info(model_version_id)
if not model: if not model:
# Log warning for failed model retrieval # Log warning for failed model retrieval
@@ -288,7 +288,8 @@ class LoraRoutes(BaseModelRoutes):
"""Get CivitAI model details by hash""" """Get CivitAI model details by hash"""
try: try:
hash = request.match_info.get('hash') hash = request.match_info.get('hash')
model = await self.civitai_client.get_model_by_hash(hash) metadata_provider = await get_default_metadata_provider()
model = await metadata_provider.get_model_by_hash(hash)
return web.json_response(model) return web.json_response(model)
except Exception as e: except Exception as e:
logger.error(f"Error fetching model details by hash: {e}") logger.error(f"Error fetching model details by hash: {e}")

View File

@@ -11,6 +11,8 @@ from ..utils.lora_metadata import extract_trained_words
from ..config import config from ..config import config
from ..utils.constants import SUPPORTED_MEDIA_EXTENSIONS, NODE_TYPES, DEFAULT_NODE_COLOR from ..utils.constants import SUPPORTED_MEDIA_EXTENSIONS, NODE_TYPES, DEFAULT_NODE_COLOR
from ..services.service_registry import ServiceRegistry from ..services.service_registry import ServiceRegistry
from ..services.metadata_service import get_metadata_archive_manager, update_metadata_providers
from ..services.websocket_manager import ws_manager
import re import re
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -113,6 +115,11 @@ class MiscRoutes:
# Add new route for checking if a model exists in the library # Add new route for checking if a model exists in the library
app.router.add_get('/api/check-model-exists', MiscRoutes.check_model_exists) app.router.add_get('/api/check-model-exists', MiscRoutes.check_model_exists)
# Add routes for metadata archive database management
app.router.add_post('/api/download-metadata-archive', MiscRoutes.download_metadata_archive)
app.router.add_post('/api/remove-metadata-archive', MiscRoutes.remove_metadata_archive)
app.router.add_get('/api/metadata-archive-status', MiscRoutes.get_metadata_archive_status)
@staticmethod @staticmethod
async def clear_cache(request): async def clear_cache(request):
"""Clear all cache files from the cache folder""" """Clear all cache files from the cache folder"""
@@ -185,6 +192,9 @@ class MiscRoutes:
# Save to settings # Save to settings
settings.set(key, value) settings.set(key, value)
if key == 'enable_metadata_archive_db':
await update_metadata_providers()
return web.json_response({'success': True}) return web.json_response({'success': True})
except Exception as e: except Exception as e:
logger.error(f"Error updating settings: {e}", exc_info=True) logger.error(f"Error updating settings: {e}", exc_info=True)
@@ -697,3 +707,116 @@ class MiscRoutes:
'success': False, 'success': False,
'error': str(e) 'error': str(e)
}, status=500) }, status=500)
@staticmethod
async def download_metadata_archive(request):
"""Download and extract the metadata archive database"""
try:
archive_manager = await get_metadata_archive_manager()
# Get the download_id from query parameters if provided
download_id = request.query.get('download_id')
# Progress callback to send updates via WebSocket
def progress_callback(stage, message):
data = {
'stage': stage,
'message': message,
'type': 'metadata_archive_download'
}
if download_id:
# Send to specific download WebSocket if download_id is provided
asyncio.create_task(ws_manager.broadcast_download_progress(download_id, data))
else:
# Fallback to general broadcast
asyncio.create_task(ws_manager.broadcast(data))
# Download and extract in background
success = await archive_manager.download_and_extract_database(progress_callback)
if success:
# Update settings to enable metadata archive
settings.set('enable_metadata_archive_db', True)
# Update metadata providers
await update_metadata_providers()
return web.json_response({
'success': True,
'message': 'Metadata archive database downloaded and extracted successfully'
})
else:
return web.json_response({
'success': False,
'error': 'Failed to download and extract metadata archive database'
}, status=500)
except Exception as e:
logger.error(f"Error downloading metadata archive: {e}", exc_info=True)
return web.json_response({
'success': False,
'error': str(e)
}, status=500)
@staticmethod
async def remove_metadata_archive(request):
"""Remove the metadata archive database"""
try:
archive_manager = await get_metadata_archive_manager()
success = await archive_manager.remove_database()
if success:
# Update settings to disable metadata archive
settings.set('enable_metadata_archive_db', False)
# Update metadata providers
await update_metadata_providers()
return web.json_response({
'success': True,
'message': 'Metadata archive database removed successfully'
})
else:
return web.json_response({
'success': False,
'error': 'Failed to remove metadata archive database'
}, status=500)
except Exception as e:
logger.error(f"Error removing metadata archive: {e}", exc_info=True)
return web.json_response({
'success': False,
'error': str(e)
}, status=500)
@staticmethod
async def get_metadata_archive_status(request):
"""Get the status of metadata archive database"""
try:
archive_manager = await get_metadata_archive_manager()
is_available = archive_manager.is_database_available()
is_enabled = settings.get('enable_metadata_archive_db', False)
db_size = 0
if is_available:
db_path = archive_manager.get_database_path()
if db_path and os.path.exists(db_path):
db_size = os.path.getsize(db_path)
return web.json_response({
'success': True,
'isAvailable': is_available,
'isEnabled': is_enabled,
'databaseSize': db_size,
'databasePath': archive_manager.get_database_path() if is_available else None
})
except Exception as e:
logger.error(f"Error getting metadata archive status: {e}", exc_info=True)
return web.json_response({
'success': False,
'error': str(e)
}, status=500)

View File

@@ -24,6 +24,7 @@ from ..config import config
standalone_mode = 'nodes' not in sys.modules standalone_mode = 'nodes' not in sys.modules
from ..services.service_registry import ServiceRegistry # Add ServiceRegistry import from ..services.service_registry import ServiceRegistry # Add ServiceRegistry import
from ..services.downloader import get_downloader
# Only import MetadataRegistry in non-standalone mode # Only import MetadataRegistry in non-standalone mode
if not standalone_mode: if not standalone_mode:
@@ -372,21 +373,23 @@ class RecipeRoutes:
"loras": [] "loras": []
}, status=400) }, status=400)
# Download image directly from URL # Download image using unified downloader
session = await self.civitai_client.session downloader = await get_downloader()
# Create a temporary file to save the downloaded image # Create a temporary file to save the downloaded image
with tempfile.NamedTemporaryFile(delete=False, suffix='.jpg') as temp_file: with tempfile.NamedTemporaryFile(delete=False, suffix='.jpg') as temp_file:
temp_path = temp_file.name temp_path = temp_file.name
async with session.get(image_url) as response: success, result = await downloader.download_file(
if response.status != 200: image_url,
return web.json_response({ temp_path,
"error": f"Failed to download image from URL: HTTP {response.status}", use_auth=False # Image downloads typically don't need auth
"loras": [] )
}, status=400)
with open(temp_path, 'wb') as f: if not success:
f.write(await response.read()) return web.json_response({
"error": f"Failed to download image from URL: {result}",
"loras": []
}, status=400)
# Use meta field from image_info as metadata # Use meta field from image_info as metadata
if 'meta' in image_info: if 'meta' in image_info:
@@ -430,8 +433,7 @@ class RecipeRoutes:
# Parse the metadata # Parse the metadata
result = await parser.parse_metadata( result = await parser.parse_metadata(
metadata, metadata,
recipe_scanner=self.recipe_scanner, recipe_scanner=self.recipe_scanner
civitai_client=self.civitai_client
) )
# For URL mode, include the image data as base64 # For URL mode, include the image data as base64
@@ -532,8 +534,7 @@ class RecipeRoutes:
# Parse the metadata # Parse the metadata
result = await parser.parse_metadata( result = await parser.parse_metadata(
metadata, metadata,
recipe_scanner=self.recipe_scanner, recipe_scanner=self.recipe_scanner
civitai_client=self.civitai_client
) )
# Add base64 image data to result # Add base64 image data to result

View File

@@ -1,5 +1,4 @@
import os import os
import aiohttp
import logging import logging
import toml import toml
import git import git
@@ -8,7 +7,7 @@ import shutil
import tempfile import tempfile
from aiohttp import web from aiohttp import web
from typing import Dict, List from typing import Dict, List
from ..services.downloader import get_downloader, Downloader
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -162,28 +161,42 @@ class UpdateRoutes:
github_api = f"https://api.github.com/repos/{repo_owner}/{repo_name}/releases/latest" github_api = f"https://api.github.com/repos/{repo_owner}/{repo_name}/releases/latest"
try: try:
async with aiohttp.ClientSession() as session: downloader = await get_downloader()
async with session.get(github_api) as resp:
if resp.status != 200:
logger.error(f"Failed to fetch release info: {resp.status}")
return False, ""
data = await resp.json()
zip_url = data.get("zipball_url")
version = data.get("tag_name", "unknown")
# Download ZIP # Get release info
async with session.get(zip_url) as zip_resp: success, data = await downloader.make_request(
if zip_resp.status != 200: 'GET',
logger.error(f"Failed to download ZIP: {zip_resp.status}") github_api,
return False, "" use_auth=False
with tempfile.NamedTemporaryFile(delete=False, suffix=".zip") as tmp_zip: )
tmp_zip.write(await zip_resp.read()) if not success:
zip_path = tmp_zip.name logger.error(f"Failed to fetch release info: {data}")
return False, ""
UpdateRoutes._clean_plugin_folder(plugin_root, skip_files=['settings.json']) zip_url = data.get("zipball_url")
version = data.get("tag_name", "unknown")
# Extract ZIP to temp dir # Download ZIP to temporary file
with tempfile.TemporaryDirectory() as tmp_dir: with tempfile.NamedTemporaryFile(delete=False, suffix=".zip") as tmp_zip:
tmp_zip_path = tmp_zip.name
success, result = await downloader.download_file(
url=zip_url,
save_path=tmp_zip_path,
use_auth=False,
allow_resume=False
)
if not success:
logger.error(f"Failed to download ZIP: {result}")
return False, ""
zip_path = tmp_zip_path
UpdateRoutes._clean_plugin_folder(plugin_root, skip_files=['settings.json'])
# Extract ZIP to temp dir
with tempfile.TemporaryDirectory() as tmp_dir:
with zipfile.ZipFile(zip_path, 'r') as zip_ref: with zipfile.ZipFile(zip_path, 'r') as zip_ref:
zip_ref.extractall(tmp_dir) zip_ref.extractall(tmp_dir)
# Find extracted folder (GitHub ZIP contains a root folder) # Find extracted folder (GitHub ZIP contains a root folder)
@@ -213,9 +226,9 @@ class UpdateRoutes:
with open(tracking_info_file, "w", encoding='utf-8') as file: with open(tracking_info_file, "w", encoding='utf-8') as file:
file.write('\n'.join(tracking_files)) file.write('\n'.join(tracking_files))
os.remove(zip_path) os.remove(zip_path)
logger.info(f"Updated plugin via ZIP to {version}") logger.info(f"Updated plugin via ZIP to {version}")
return True, version return True, version
except Exception as e: except Exception as e:
logger.error(f"ZIP update failed: {e}", exc_info=True) logger.error(f"ZIP update failed: {e}", exc_info=True)
@@ -244,23 +257,23 @@ class UpdateRoutes:
github_url = f"https://api.github.com/repos/{repo_owner}/{repo_name}/commits/main" github_url = f"https://api.github.com/repos/{repo_owner}/{repo_name}/commits/main"
try: try:
async with aiohttp.ClientSession() as session: downloader = await Downloader.get_instance()
async with session.get(github_url, headers={'Accept': 'application/vnd.github+json'}) as response: success, data = await downloader.make_request('GET', github_url, custom_headers={'Accept': 'application/vnd.github+json'})
if response.status != 200:
logger.warning(f"Failed to fetch GitHub commit: {response.status}")
return "main", []
data = await response.json() if not success:
commit_sha = data.get('sha', '')[:7] # Short hash logger.warning(f"Failed to fetch GitHub commit: {data}")
commit_message = data.get('commit', {}).get('message', '') return "main", []
# Format as "main-{short_hash}" commit_sha = data.get('sha', '')[:7] # Short hash
version = f"main-{commit_sha}" commit_message = data.get('commit', {}).get('message', '')
# Use commit message as changelog # Format as "main-{short_hash}"
changelog = [commit_message] if commit_message else [] version = f"main-{commit_sha}"
return version, changelog # Use commit message as changelog
changelog = [commit_message] if commit_message else []
return version, changelog
except Exception as e: except Exception as e:
logger.error(f"Error fetching nightly version: {e}", exc_info=True) logger.error(f"Error fetching nightly version: {e}", exc_info=True)
@@ -410,22 +423,22 @@ class UpdateRoutes:
github_url = f"https://api.github.com/repos/{repo_owner}/{repo_name}/releases/latest" github_url = f"https://api.github.com/repos/{repo_owner}/{repo_name}/releases/latest"
try: try:
async with aiohttp.ClientSession() as session: downloader = await Downloader.get_instance()
async with session.get(github_url, headers={'Accept': 'application/vnd.github+json'}) as response: success, data = await downloader.make_request('GET', github_url, custom_headers={'Accept': 'application/vnd.github+json'})
if response.status != 200:
logger.warning(f"Failed to fetch GitHub release: {response.status}")
return "v0.0.0", []
data = await response.json() if not success:
version = data.get('tag_name', '') logger.warning(f"Failed to fetch GitHub release: {data}")
if not version.startswith('v'): return "v0.0.0", []
version = f"v{version}"
# Extract changelog from release notes version = data.get('tag_name', '')
body = data.get('body', '') if not version.startswith('v'):
changelog = UpdateRoutes._parse_changelog(body) version = f"v{version}"
return version, changelog # Extract changelog from release notes
body = data.get('body', '')
changelog = UpdateRoutes._parse_changelog(body)
return version, changelog
except Exception as e: except Exception as e:
logger.error(f"Error fetching remote version: {e}", exc_info=True) logger.error(f"Error fetching remote version: {e}", exc_info=True)

View File

@@ -1,11 +1,10 @@
from datetime import datetime from datetime import datetime
import aiohttp
import os import os
import logging import logging
import asyncio import asyncio
from email.parser import Parser
from typing import Optional, Dict, Tuple, List from typing import Optional, Dict, Tuple, List
from urllib.parse import unquote from .model_metadata_provider import CivitaiModelMetadataProvider, ModelMetadataProviderManager
from .downloader import get_downloader
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -19,6 +18,11 @@ class CivitaiClient:
async with cls._lock: async with cls._lock:
if cls._instance is None: if cls._instance is None:
cls._instance = cls() cls._instance = cls()
# Register this client as a metadata provider
provider_manager = await ModelMetadataProviderManager.get_instance()
provider_manager.register_provider('civitai', CivitaiModelMetadataProvider(cls._instance), True)
return cls._instance return cls._instance
def __init__(self): def __init__(self):
@@ -28,80 +32,8 @@ class CivitaiClient:
self._initialized = True self._initialized = True
self.base_url = "https://civitai.com/api/v1" self.base_url = "https://civitai.com/api/v1"
self.headers = {
'User-Agent': 'ComfyUI-LoRA-Manager/1.0'
}
self._session = None
self._session_created_at = None
# Adjust chunk size based on storage type - consider making this configurable
self.chunk_size = 4 * 1024 * 1024 # 4MB chunks for better HDD throughput
@property async def download_file(self, url: str, save_dir: str, default_filename: str, progress_callback=None) -> Tuple[bool, str]:
async def session(self) -> aiohttp.ClientSession:
"""Lazy initialize the session"""
if self._session is None:
# Optimize TCP connection parameters
connector = aiohttp.TCPConnector(
ssl=True,
limit=8, # Increase from 3 to 8 for better parallelism
ttl_dns_cache=300, # Enable DNS caching with reasonable timeout
force_close=False, # Keep connections for reuse
enable_cleanup_closed=True
)
trust_env = True # Allow using system environment proxy settings
# Configure timeout parameters - increase read timeout for large files and remove sock_read timeout
timeout = aiohttp.ClientTimeout(total=None, connect=60, sock_read=None)
self._session = aiohttp.ClientSession(
connector=connector,
trust_env=trust_env,
timeout=timeout
)
self._session_created_at = datetime.now()
return self._session
async def _ensure_fresh_session(self):
"""Refresh session if it's been open too long"""
if self._session is not None:
if not hasattr(self, '_session_created_at') or \
(datetime.now() - self._session_created_at).total_seconds() > 300: # 5 minutes
await self.close()
self._session = None
return await self.session
def _parse_content_disposition(self, header: str) -> str:
"""Parse filename from content-disposition header"""
if not header:
return None
# Handle quoted filenames
if 'filename="' in header:
start = header.index('filename="') + 10
end = header.index('"', start)
return unquote(header[start:end])
# Fallback to original parsing
disposition = Parser().parsestr(f'Content-Disposition: {header}')
filename = disposition.get_param('filename')
if filename:
return unquote(filename)
return None
def _get_request_headers(self) -> dict:
"""Get request headers with optional API key"""
headers = {
'User-Agent': 'ComfyUI-LoRA-Manager/1.0',
'Content-Type': 'application/json'
}
from .settings_manager import settings
api_key = settings.get('civitai_api_key')
if (api_key):
headers['Authorization'] = f'Bearer {api_key}'
return headers
async def _download_file(self, url: str, save_dir: str, default_filename: str, progress_callback=None) -> Tuple[bool, str]:
"""Download file with resumable downloads and retry mechanism """Download file with resumable downloads and retry mechanism
Args: Args:
@@ -113,215 +45,69 @@ class CivitaiClient:
Returns: Returns:
Tuple[bool, str]: (success, save_path or error message) Tuple[bool, str]: (success, save_path or error message)
""" """
max_retries = 5 downloader = await get_downloader()
retry_count = 0
base_delay = 2.0 # Base delay for exponential backoff
# Initial setup
session = await self._ensure_fresh_session()
save_path = os.path.join(save_dir, default_filename) save_path = os.path.join(save_dir, default_filename)
part_path = save_path + '.part'
# Get existing file size for resume # Use unified downloader with CivitAI authentication
resume_offset = 0 success, result = await downloader.download_file(
if os.path.exists(part_path): url=url,
resume_offset = os.path.getsize(part_path) save_path=save_path,
logger.info(f"Resuming download from offset {resume_offset} bytes") progress_callback=progress_callback,
use_auth=True, # Enable CivitAI authentication
allow_resume=True
)
total_size = 0 return success, result
filename = default_filename
while retry_count <= max_retries:
try:
headers = self._get_request_headers()
# Add Range header for resume if we have partial data
if resume_offset > 0:
headers['Range'] = f'bytes={resume_offset}-'
# Add Range header to allow resumable downloads
headers['Accept-Encoding'] = 'identity' # Disable compression for better chunked downloads
logger.debug(f"Download attempt {retry_count + 1}/{max_retries + 1} from: {url}")
if resume_offset > 0:
logger.debug(f"Requesting range from byte {resume_offset}")
async with session.get(url, headers=headers, allow_redirects=True) as response:
# Handle different response codes
if response.status == 200:
# Full content response
if resume_offset > 0:
# Server doesn't support ranges, restart from beginning
logger.warning("Server doesn't support range requests, restarting download")
resume_offset = 0
if os.path.exists(part_path):
os.remove(part_path)
elif response.status == 206:
# Partial content response (resume successful)
content_range = response.headers.get('Content-Range')
if content_range:
# Parse total size from Content-Range header (e.g., "bytes 1024-2047/2048")
range_parts = content_range.split('/')
if len(range_parts) == 2:
total_size = int(range_parts[1])
logger.info(f"Successfully resumed download from byte {resume_offset}")
elif response.status == 416:
# Range not satisfiable - file might be complete or corrupted
if os.path.exists(part_path):
part_size = os.path.getsize(part_path)
logger.warning(f"Range not satisfiable. Part file size: {part_size}")
# Try to get actual file size
head_response = await session.head(url, headers=self._get_request_headers())
if head_response.status == 200:
actual_size = int(head_response.headers.get('content-length', 0))
if part_size == actual_size:
# File is complete, just rename it
os.rename(part_path, save_path)
if progress_callback:
await progress_callback(100)
return True, save_path
# Remove corrupted part file and restart
os.remove(part_path)
resume_offset = 0
continue
elif response.status == 401:
logger.warning(f"Unauthorized access to resource: {url} (Status 401)")
return False, "Invalid or missing CivitAI API key, or early access restriction."
elif response.status == 403:
logger.warning(f"Forbidden access to resource: {url} (Status 403)")
return False, "Access forbidden: You don't have permission to download this file."
else:
logger.error(f"Download failed for {url} with status {response.status}")
return False, f"Download failed with status {response.status}"
# Get total file size for progress calculation (if not set from Content-Range)
if total_size == 0:
total_size = int(response.headers.get('content-length', 0))
if response.status == 206:
# For partial content, add the offset to get total file size
total_size += resume_offset
current_size = resume_offset
last_progress_report_time = datetime.now()
# Stream download to file with progress updates using larger buffer
loop = asyncio.get_running_loop()
mode = 'ab' if resume_offset > 0 else 'wb'
with open(part_path, mode) as f:
async for chunk in response.content.iter_chunked(self.chunk_size):
if chunk:
# Run blocking file write in executor
await loop.run_in_executor(None, f.write, chunk)
current_size += len(chunk)
# Limit progress update frequency to reduce overhead
now = datetime.now()
time_diff = (now - last_progress_report_time).total_seconds()
if progress_callback and total_size and time_diff >= 1.0:
progress = (current_size / total_size) * 100
await progress_callback(progress)
last_progress_report_time = now
# Download completed successfully
# Verify file size if total_size was provided
final_size = os.path.getsize(part_path)
if total_size > 0 and final_size != total_size:
logger.warning(f"File size mismatch. Expected: {total_size}, Got: {final_size}")
# Don't treat this as fatal error, rename anyway
# Atomically rename .part to final file with retries
max_rename_attempts = 5
rename_attempt = 0
rename_success = False
while rename_attempt < max_rename_attempts and not rename_success:
try:
os.rename(part_path, save_path)
rename_success = True
except PermissionError as e:
rename_attempt += 1
if rename_attempt < max_rename_attempts:
logger.info(f"File still in use, retrying rename in 2 seconds (attempt {rename_attempt}/{max_rename_attempts})")
await asyncio.sleep(2) # Wait before retrying
else:
logger.error(f"Failed to rename file after {max_rename_attempts} attempts: {e}")
return False, f"Failed to finalize download: {str(e)}"
# Ensure 100% progress is reported
if progress_callback:
await progress_callback(100)
return True, save_path
except (aiohttp.ClientError, aiohttp.ClientPayloadError,
aiohttp.ServerDisconnectedError, asyncio.TimeoutError) as e:
retry_count += 1
logger.warning(f"Network error during download (attempt {retry_count}/{max_retries + 1}): {e}")
if retry_count <= max_retries:
# Calculate delay with exponential backoff
delay = base_delay * (2 ** (retry_count - 1))
logger.info(f"Retrying in {delay} seconds...")
await asyncio.sleep(delay)
# Update resume offset for next attempt
if os.path.exists(part_path):
resume_offset = os.path.getsize(part_path)
logger.info(f"Will resume from byte {resume_offset}")
# Refresh session to get new connection
await self.close()
session = await self._ensure_fresh_session()
continue
else:
logger.error(f"Max retries exceeded for download: {e}")
return False, f"Network error after {max_retries + 1} attempts: {str(e)}"
except Exception as e:
logger.error(f"Unexpected download error: {e}")
return False, str(e)
return False, f"Download failed after {max_retries + 1} attempts"
async def get_model_by_hash(self, model_hash: str) -> Optional[Dict]: async def get_model_by_hash(self, model_hash: str) -> Optional[Dict]:
try: try:
session = await self._ensure_fresh_session() downloader = await get_downloader()
async with session.get(f"{self.base_url}/model-versions/by-hash/{model_hash}") as response: success, result = await downloader.make_request(
if response.status == 200: 'GET',
return await response.json() f"{self.base_url}/model-versions/by-hash/{model_hash}",
return None use_auth=True
)
if success:
return result
return None
except Exception as e: except Exception as e:
logger.error(f"API Error: {str(e)}") logger.error(f"API Error: {str(e)}")
return None return None
async def download_preview_image(self, image_url: str, save_path: str): async def download_preview_image(self, image_url: str, save_path: str):
try: try:
session = await self._ensure_fresh_session() downloader = await get_downloader()
async with session.get(image_url) as response: success, content = await downloader.download_to_memory(
if response.status == 200: image_url,
content = await response.read() use_auth=False # Preview images don't need auth
with open(save_path, 'wb') as f: )
f.write(content) if success:
return True # Ensure directory exists
return False os.makedirs(os.path.dirname(save_path), exist_ok=True)
with open(save_path, 'wb') as f:
f.write(content)
return True
return False
except Exception as e: except Exception as e:
print(f"Download Error: {str(e)}") logger.error(f"Download Error: {str(e)}")
return False return False
async def get_model_versions(self, model_id: str) -> List[Dict]: async def get_model_versions(self, model_id: str) -> List[Dict]:
"""Get all versions of a model with local availability info""" """Get all versions of a model with local availability info"""
try: try:
session = await self._ensure_fresh_session() # Use fresh session downloader = await get_downloader()
async with session.get(f"{self.base_url}/models/{model_id}") as response: success, result = await downloader.make_request(
if response.status != 200: 'GET',
return None f"{self.base_url}/models/{model_id}",
data = await response.json() use_auth=True
)
if success:
# Also return model type along with versions # Also return model type along with versions
return { return {
'modelVersions': data.get('modelVersions', []), 'modelVersions': result.get('modelVersions', []),
'type': data.get('type', '') 'type': result.get('type', '')
} }
return None
except Exception as e: except Exception as e:
logger.error(f"Error fetching model versions: {e}") logger.error(f"Error fetching model versions: {e}")
return None return None
@@ -337,68 +123,74 @@ class CivitaiClient:
Optional[Dict]: The model version data with additional fields or None if not found Optional[Dict]: The model version data with additional fields or None if not found
""" """
try: try:
session = await self._ensure_fresh_session() downloader = await get_downloader()
headers = self._get_request_headers()
# Case 1: Only version_id is provided # Case 1: Only version_id is provided
if model_id is None and version_id is not None: if model_id is None and version_id is not None:
# First get the version info to extract model_id # First get the version info to extract model_id
async with session.get(f"{self.base_url}/model-versions/{version_id}", headers=headers) as response: success, version = await downloader.make_request(
if response.status != 200: 'GET',
return None f"{self.base_url}/model-versions/{version_id}",
use_auth=True
)
if not success:
return None
version = await response.json() model_id = version.get('modelId')
model_id = version.get('modelId') if not model_id:
logger.error(f"No modelId found in version {version_id}")
if not model_id: return None
logger.error(f"No modelId found in version {version_id}")
return None
# Now get the model data for additional metadata # Now get the model data for additional metadata
async with session.get(f"{self.base_url}/models/{model_id}") as response: success, model_data = await downloader.make_request(
if response.status != 200: 'GET',
return version # Return version without additional metadata f"{self.base_url}/models/{model_id}",
use_auth=True
model_data = await response.json() )
if success:
# Enrich version with model data # Enrich version with model data
version['model']['description'] = model_data.get("description") version['model']['description'] = model_data.get("description")
version['model']['tags'] = model_data.get("tags", []) version['model']['tags'] = model_data.get("tags", [])
version['creator'] = model_data.get("creator") version['creator'] = model_data.get("creator")
return version return version
# Case 2: model_id is provided (with or without version_id) # Case 2: model_id is provided (with or without version_id)
elif model_id is not None: elif model_id is not None:
# Step 1: Get model data to find version_id if not provided and get additional metadata # Step 1: Get model data to find version_id if not provided and get additional metadata
async with session.get(f"{self.base_url}/models/{model_id}") as response: success, data = await downloader.make_request(
if response.status != 200: 'GET',
return None f"{self.base_url}/models/{model_id}",
use_auth=True
)
if not success:
return None
data = await response.json() model_versions = data.get('modelVersions', [])
model_versions = data.get('modelVersions', [])
# Step 2: Determine the version_id to use # Step 2: Determine the version_id to use
target_version_id = version_id target_version_id = version_id
if target_version_id is None: if target_version_id is None:
target_version_id = model_versions[0].get('id') target_version_id = model_versions[0].get('id')
# Step 3: Get detailed version info using the version_id # Step 3: Get detailed version info using the version_id
async with session.get(f"{self.base_url}/model-versions/{target_version_id}", headers=headers) as response: success, version = await downloader.make_request(
if response.status != 200: 'GET',
return None f"{self.base_url}/model-versions/{target_version_id}",
use_auth=True
)
if not success:
return None
version = await response.json() # Step 4: Enrich version_info with model data
# Add description and tags from model data
version['model']['description'] = data.get("description")
version['model']['tags'] = data.get("tags", [])
# Step 4: Enrich version_info with model data # Add creator from model data
# Add description and tags from model data version['creator'] = data.get("creator")
version['model']['description'] = data.get("description")
version['model']['tags'] = data.get("tags", [])
# Add creator from model data return version
version['creator'] = data.get("creator")
return version
# Case 3: Neither model_id nor version_id provided # Case 3: Neither model_id nor version_id provided
else: else:
@@ -421,30 +213,29 @@ class CivitaiClient:
- An error message if there was an error, or None on success - An error message if there was an error, or None on success
""" """
try: try:
session = await self._ensure_fresh_session() downloader = await get_downloader()
url = f"{self.base_url}/model-versions/{version_id}" url = f"{self.base_url}/model-versions/{version_id}"
headers = self._get_request_headers()
logger.debug(f"Resolving DNS for model version info: {url}") logger.debug(f"Resolving DNS for model version info: {url}")
async with session.get(url, headers=headers) as response: success, result = await downloader.make_request(
if response.status == 200: 'GET',
logger.debug(f"Successfully fetched model version info for: {version_id}") url,
return await response.json(), None use_auth=True
)
# Handle specific error cases if success:
if response.status == 404: logger.debug(f"Successfully fetched model version info for: {version_id}")
# Try to parse the error message return result, None
try:
error_data = await response.json()
error_msg = error_data.get('error', f"Model not found (status 404)")
logger.warning(f"Model version not found: {version_id} - {error_msg}")
return None, error_msg
except:
return None, "Model not found (status 404)"
# Other error cases # Handle specific error cases
logger.error(f"Failed to fetch model info for {version_id} (status {response.status})") if "404" in str(result):
return None, f"Failed to fetch model info (status {response.status})" error_msg = f"Model not found (status 404)"
logger.warning(f"Model version not found: {version_id} - {error_msg}")
return None, error_msg
# Other error cases
logger.error(f"Failed to fetch model info for {version_id}: {result}")
return None, str(result)
except Exception as e: except Exception as e:
error_msg = f"Error fetching model version info: {e}" error_msg = f"Error fetching model version info: {e}"
logger.error(error_msg) logger.error(error_msg)
@@ -459,78 +250,50 @@ class CivitaiClient:
Returns: Returns:
Tuple[Optional[Dict], int]: A tuple containing: Tuple[Optional[Dict], int]: A tuple containing:
- A dictionary with model metadata or None if not found - A dictionary with model metadata or None if not found
- The HTTP status code from the request - The HTTP status code from the request (0 for exceptions)
""" """
try: try:
session = await self._ensure_fresh_session() downloader = await get_downloader()
headers = self._get_request_headers()
url = f"{self.base_url}/models/{model_id}" url = f"{self.base_url}/models/{model_id}"
async with session.get(url, headers=headers) as response: success, result = await downloader.make_request(
status_code = response.status 'GET',
url,
use_auth=True
)
if status_code != 200: if not success:
logger.warning(f"Failed to fetch model metadata: Status {status_code}") # Try to extract status code from error message
return None, status_code status_code = 0
if "404" in str(result):
status_code = 404
elif "401" in str(result):
status_code = 401
elif "403" in str(result):
status_code = 403
logger.warning(f"Failed to fetch model metadata: {result}")
return None, status_code
data = await response.json() # Extract relevant metadata
metadata = {
# Extract relevant metadata "description": result.get("description") or "No model description available",
metadata = { "tags": result.get("tags", []),
"description": data.get("description") or "No model description available", "creator": {
"tags": data.get("tags", []), "username": result.get("creator", {}).get("username"),
"creator": { "image": result.get("creator", {}).get("image")
"username": data.get("creator", {}).get("username"),
"image": data.get("creator", {}).get("image")
}
} }
}
if metadata["description"] or metadata["tags"] or metadata["creator"]["username"]: if metadata["description"] or metadata["tags"] or metadata["creator"]["username"]:
return metadata, status_code return metadata, 200
else: else:
logger.warning(f"No metadata found for model {model_id}") logger.warning(f"No metadata found for model {model_id}")
return None, status_code return None, 200
except Exception as e: except Exception as e:
logger.error(f"Error fetching model metadata: {e}", exc_info=True) logger.error(f"Error fetching model metadata: {e}", exc_info=True)
return None, 0 return None, 0
# Keep old method for backward compatibility, delegating to the new one
async def get_model_description(self, model_id: str) -> Optional[str]:
"""Fetch the model description from Civitai API (Legacy method)"""
metadata, _ = await self.get_model_metadata(model_id)
return metadata.get("description") if metadata else None
async def close(self):
"""Close the session if it exists"""
if self._session is not None:
await self._session.close()
self._session = None
async def _get_hash_from_civitai(self, model_version_id: str) -> Optional[str]:
"""Get hash from Civitai API"""
try:
session = await self._ensure_fresh_session()
if not session:
return None
version_info = await session.get(f"{self.base_url}/model-versions/{model_version_id}")
if not version_info or not version_info.json().get('files'):
return None
# Get hash from the first file
for file_info in version_info.json().get('files', []):
if file_info.get('hashes', {}).get('SHA256'):
# Convert hash to lowercase to standardize
hash_value = file_info['hashes']['SHA256'].lower()
return hash_value
return None
except Exception as e:
logger.error(f"Error getting hash from Civitai: {e}")
return None
async def get_image_info(self, image_id: str) -> Optional[Dict]: async def get_image_info(self, image_id: str) -> Optional[Dict]:
"""Fetch image information from Civitai API """Fetch image information from Civitai API
@@ -541,22 +304,25 @@ class CivitaiClient:
Optional[Dict]: The image data or None if not found Optional[Dict]: The image data or None if not found
""" """
try: try:
session = await self._ensure_fresh_session() downloader = await get_downloader()
headers = self._get_request_headers()
url = f"{self.base_url}/images?imageId={image_id}&nsfw=X" url = f"{self.base_url}/images?imageId={image_id}&nsfw=X"
logger.debug(f"Fetching image info for ID: {image_id}") logger.debug(f"Fetching image info for ID: {image_id}")
async with session.get(url, headers=headers) as response: success, result = await downloader.make_request(
if response.status == 200: 'GET',
data = await response.json() url,
if data and "items" in data and len(data["items"]) > 0: use_auth=True
logger.debug(f"Successfully fetched image info for ID: {image_id}") )
return data["items"][0]
logger.warning(f"No image found with ID: {image_id}")
return None
logger.error(f"Failed to fetch image info for ID: {image_id} (status {response.status})") if success:
if result and "items" in result and len(result["items"]) > 0:
logger.debug(f"Successfully fetched image info for ID: {image_id}")
return result["items"][0]
logger.warning(f"No image found with ID: {image_id}")
return None return None
logger.error(f"Failed to fetch image info for ID: {image_id}: {result}")
return None
except Exception as e: except Exception as e:
error_msg = f"Error fetching image info: {e}" error_msg = f"Error fetching image info: {e}"
logger.error(error_msg) logger.error(error_msg)

View File

@@ -10,6 +10,8 @@ from ..utils.exif_utils import ExifUtils
from ..utils.metadata_manager import MetadataManager from ..utils.metadata_manager import MetadataManager
from .service_registry import ServiceRegistry from .service_registry import ServiceRegistry
from .settings_manager import settings from .settings_manager import settings
from .metadata_service import get_default_metadata_provider
from .downloader import get_downloader
# Download to temporary file first # Download to temporary file first
import tempfile import tempfile
@@ -199,11 +201,11 @@ class DownloadManager:
if await embedding_scanner.check_model_version_exists(model_version_id): if await embedding_scanner.check_model_version_exists(model_version_id):
return {'success': False, 'error': 'Model version already exists in embedding library'} return {'success': False, 'error': 'Model version already exists in embedding library'}
# Get civitai client # Get metadata provider instead of civitai client directly
civitai_client = await self._get_civitai_client() metadata_provider = await get_default_metadata_provider()
# Get version info based on the provided identifier # Get version info based on the provided identifier
version_info = await civitai_client.get_model_version(model_id, model_version_id) version_info = await metadata_provider.get_model_version(model_id, model_version_id)
if not version_info: if not version_info:
return {'success': False, 'error': 'Failed to fetch model metadata'} return {'success': False, 'error': 'Failed to fetch model metadata'}
@@ -445,8 +447,14 @@ class DownloadManager:
preview_ext = '.mp4' preview_ext = '.mp4'
preview_path = os.path.splitext(save_path)[0] + preview_ext preview_path = os.path.splitext(save_path)[0] + preview_ext
# Download video directly # Download video directly using downloader
if await civitai_client.download_preview_image(images[0]['url'], preview_path): downloader = await get_downloader()
success, result = await downloader.download_file(
images[0]['url'],
preview_path,
use_auth=False # Preview images typically don't need auth
)
if success:
metadata.preview_url = preview_path.replace(os.sep, '/') metadata.preview_url = preview_path.replace(os.sep, '/')
metadata.preview_nsfw_level = images[0].get('nsfwLevel', 0) metadata.preview_nsfw_level = images[0].get('nsfwLevel', 0)
else: else:
@@ -454,8 +462,16 @@ class DownloadManager:
with tempfile.NamedTemporaryFile(suffix='.png', delete=False) as temp_file: with tempfile.NamedTemporaryFile(suffix='.png', delete=False) as temp_file:
temp_path = temp_file.name temp_path = temp_file.name
# Download the original image to temp path # Download the original image to temp path using downloader
if await civitai_client.download_preview_image(images[0]['url'], temp_path): downloader = await get_downloader()
success, content = await downloader.download_to_memory(
images[0]['url'],
use_auth=False
)
if success:
# Save to temp file
with open(temp_path, 'wb') as f:
f.write(content)
# Optimize and convert to WebP # Optimize and convert to WebP
preview_path = os.path.splitext(save_path)[0] + '.webp' preview_path = os.path.splitext(save_path)[0] + '.webp'
@@ -486,12 +502,13 @@ class DownloadManager:
if progress_callback: if progress_callback:
await progress_callback(3) # 3% progress after preview download await progress_callback(3) # 3% progress after preview download
# Download model file with progress tracking # Download model file with progress tracking using downloader
success, result = await civitai_client._download_file( downloader = await get_downloader()
success, result = await downloader.download_file(
download_url, download_url,
save_dir, save_path, # Use full path instead of separate dir and filename
os.path.basename(save_path), progress_callback=lambda p: self._handle_download_progress(p, progress_callback),
progress_callback=lambda p: self._handle_download_progress(p, progress_callback) use_auth=True # Model downloads need authentication
) )
if not success: if not success:

469
py/services/downloader.py Normal file
View File

@@ -0,0 +1,469 @@
"""
Unified download manager for all HTTP/HTTPS downloads in the application.
This module provides a centralized download service with:
- Singleton pattern for global session management
- Support for authenticated downloads (e.g., CivitAI API key)
- Resumable downloads with automatic retry
- Progress tracking and callbacks
- Optimized connection pooling and timeouts
- Unified error handling and logging
"""
import os
import logging
import asyncio
import aiohttp
from datetime import datetime
from typing import Optional, Dict, Tuple, Callable, Union
from ..services.settings_manager import settings
logger = logging.getLogger(__name__)
class Downloader:
"""Unified downloader for all HTTP/HTTPS downloads in the application."""
_instance = None
_lock = asyncio.Lock()
@classmethod
async def get_instance(cls):
"""Get singleton instance of Downloader"""
async with cls._lock:
if cls._instance is None:
cls._instance = cls()
return cls._instance
def __init__(self):
"""Initialize the downloader with optimal settings"""
# Check if already initialized for singleton pattern
if hasattr(self, '_initialized'):
return
self._initialized = True
# Session management
self._session = None
self._session_created_at = None
# Configuration
self.chunk_size = 4 * 1024 * 1024 # 4MB chunks for better throughput
self.max_retries = 5
self.base_delay = 2.0 # Base delay for exponential backoff
self.session_timeout = 300 # 5 minutes
# Default headers
self.default_headers = {
'User-Agent': 'ComfyUI-LoRA-Manager/1.0'
}
@property
async def session(self) -> aiohttp.ClientSession:
"""Get or create the global aiohttp session with optimized settings"""
if self._session is None or self._should_refresh_session():
await self._create_session()
return self._session
def _should_refresh_session(self) -> bool:
"""Check if session should be refreshed"""
if self._session is None:
return True
if not hasattr(self, '_session_created_at') or self._session_created_at is None:
return True
# Refresh if session is older than timeout
if (datetime.now() - self._session_created_at).total_seconds() > self.session_timeout:
return True
return False
async def _create_session(self):
"""Create a new aiohttp session with optimized settings"""
# Close existing session if any
if self._session is not None:
await self._session.close()
# Optimize TCP connection parameters
connector = aiohttp.TCPConnector(
ssl=True,
limit=8, # Concurrent connections
ttl_dns_cache=300, # DNS cache timeout
force_close=False, # Keep connections for reuse
enable_cleanup_closed=True
)
# Configure timeout parameters
timeout = aiohttp.ClientTimeout(
total=None, # No total timeout for large downloads
connect=60, # Connection timeout
sock_read=300 # 5 minute socket read timeout
)
self._session = aiohttp.ClientSession(
connector=connector,
trust_env=True, # Use system proxy settings
timeout=timeout
)
self._session_created_at = datetime.now()
logger.debug("Created new HTTP session")
def _get_auth_headers(self, use_auth: bool = False) -> Dict[str, str]:
"""Get headers with optional authentication"""
headers = self.default_headers.copy()
if use_auth:
# Add CivitAI API key if available
api_key = settings.get('civitai_api_key')
if api_key:
headers['Authorization'] = f'Bearer {api_key}'
headers['Content-Type'] = 'application/json'
return headers
async def download_file(
self,
url: str,
save_path: str,
progress_callback: Optional[Callable[[float], None]] = None,
use_auth: bool = False,
custom_headers: Optional[Dict[str, str]] = None,
allow_resume: bool = True
) -> Tuple[bool, str]:
"""
Download a file with resumable downloads and retry mechanism
Args:
url: Download URL
save_path: Full path where the file should be saved
progress_callback: Optional callback for progress updates (0-100)
use_auth: Whether to include authentication headers (e.g., CivitAI API key)
custom_headers: Additional headers to include in request
allow_resume: Whether to support resumable downloads
Returns:
Tuple[bool, str]: (success, save_path or error message)
"""
retry_count = 0
part_path = save_path + '.part' if allow_resume else save_path
# Prepare headers
headers = self._get_auth_headers(use_auth)
if custom_headers:
headers.update(custom_headers)
# Get existing file size for resume
resume_offset = 0
if allow_resume and os.path.exists(part_path):
resume_offset = os.path.getsize(part_path)
logger.info(f"Resuming download from offset {resume_offset} bytes")
total_size = 0
while retry_count <= self.max_retries:
try:
session = await self.session
# Add Range header for resume if we have partial data
request_headers = headers.copy()
if allow_resume and resume_offset > 0:
request_headers['Range'] = f'bytes={resume_offset}-'
# Disable compression for better chunked downloads
request_headers['Accept-Encoding'] = 'identity'
logger.debug(f"Download attempt {retry_count + 1}/{self.max_retries + 1} from: {url}")
if resume_offset > 0:
logger.debug(f"Requesting range from byte {resume_offset}")
async with session.get(url, headers=request_headers, allow_redirects=True) as response:
# Handle different response codes
if response.status == 200:
# Full content response
if resume_offset > 0:
# Server doesn't support ranges, restart from beginning
logger.warning("Server doesn't support range requests, restarting download")
resume_offset = 0
if os.path.exists(part_path):
os.remove(part_path)
elif response.status == 206:
# Partial content response (resume successful)
content_range = response.headers.get('Content-Range')
if content_range:
# Parse total size from Content-Range header (e.g., "bytes 1024-2047/2048")
range_parts = content_range.split('/')
if len(range_parts) == 2:
total_size = int(range_parts[1])
logger.info(f"Successfully resumed download from byte {resume_offset}")
elif response.status == 416:
# Range not satisfiable - file might be complete or corrupted
if allow_resume and os.path.exists(part_path):
part_size = os.path.getsize(part_path)
logger.warning(f"Range not satisfiable. Part file size: {part_size}")
# Try to get actual file size
head_response = await session.head(url, headers=headers)
if head_response.status == 200:
actual_size = int(head_response.headers.get('content-length', 0))
if part_size == actual_size:
# File is complete, just rename it
if allow_resume:
os.rename(part_path, save_path)
if progress_callback:
await progress_callback(100)
return True, save_path
# Remove corrupted part file and restart
os.remove(part_path)
resume_offset = 0
continue
elif response.status == 401:
logger.warning(f"Unauthorized access to resource: {url} (Status 401)")
return False, "Invalid or missing API key, or early access restriction."
elif response.status == 403:
logger.warning(f"Forbidden access to resource: {url} (Status 403)")
return False, "Access forbidden: You don't have permission to download this file."
elif response.status == 404:
logger.warning(f"Resource not found: {url} (Status 404)")
return False, "File not found - the download link may be invalid or expired."
else:
logger.error(f"Download failed for {url} with status {response.status}")
return False, f"Download failed with status {response.status}"
# Get total file size for progress calculation (if not set from Content-Range)
if total_size == 0:
total_size = int(response.headers.get('content-length', 0))
if response.status == 206:
# For partial content, add the offset to get total file size
total_size += resume_offset
current_size = resume_offset
last_progress_report_time = datetime.now()
# Ensure directory exists
os.makedirs(os.path.dirname(save_path), exist_ok=True)
# Stream download to file with progress updates
loop = asyncio.get_running_loop()
mode = 'ab' if (allow_resume and resume_offset > 0) else 'wb'
with open(part_path, mode) as f:
async for chunk in response.content.iter_chunked(self.chunk_size):
if chunk:
# Run blocking file write in executor
await loop.run_in_executor(None, f.write, chunk)
current_size += len(chunk)
# Limit progress update frequency to reduce overhead
now = datetime.now()
time_diff = (now - last_progress_report_time).total_seconds()
if progress_callback and total_size and time_diff >= 1.0:
progress = (current_size / total_size) * 100
await progress_callback(progress)
last_progress_report_time = now
# Download completed successfully
# Verify file size if total_size was provided
final_size = os.path.getsize(part_path)
if total_size > 0 and final_size != total_size:
logger.warning(f"File size mismatch. Expected: {total_size}, Got: {final_size}")
# Don't treat this as fatal error, continue anyway
# Atomically rename .part to final file (only if using resume)
if allow_resume and part_path != save_path:
max_rename_attempts = 5
rename_attempt = 0
rename_success = False
while rename_attempt < max_rename_attempts and not rename_success:
try:
# If the destination file exists, remove it first (Windows safe)
if os.path.exists(save_path):
os.remove(save_path)
os.rename(part_path, save_path)
rename_success = True
except PermissionError as e:
rename_attempt += 1
if rename_attempt < max_rename_attempts:
logger.info(f"File still in use, retrying rename in 2 seconds (attempt {rename_attempt}/{max_rename_attempts})")
await asyncio.sleep(2)
else:
logger.error(f"Failed to rename file after {max_rename_attempts} attempts: {e}")
return False, f"Failed to finalize download: {str(e)}"
# Ensure 100% progress is reported
if progress_callback:
await progress_callback(100)
return True, save_path
except (aiohttp.ClientError, aiohttp.ClientPayloadError,
aiohttp.ServerDisconnectedError, asyncio.TimeoutError) as e:
retry_count += 1
logger.warning(f"Network error during download (attempt {retry_count}/{self.max_retries + 1}): {e}")
if retry_count <= self.max_retries:
# Calculate delay with exponential backoff
delay = self.base_delay * (2 ** (retry_count - 1))
logger.info(f"Retrying in {delay} seconds...")
await asyncio.sleep(delay)
# Update resume offset for next attempt
if allow_resume and os.path.exists(part_path):
resume_offset = os.path.getsize(part_path)
logger.info(f"Will resume from byte {resume_offset}")
# Refresh session to get new connection
await self._create_session()
continue
else:
logger.error(f"Max retries exceeded for download: {e}")
return False, f"Network error after {self.max_retries + 1} attempts: {str(e)}"
except Exception as e:
logger.error(f"Unexpected download error: {e}")
return False, str(e)
return False, f"Download failed after {self.max_retries + 1} attempts"
async def download_to_memory(
self,
url: str,
use_auth: bool = False,
custom_headers: Optional[Dict[str, str]] = None
) -> Tuple[bool, Union[bytes, str]]:
"""
Download a file to memory (for small files like preview images)
Args:
url: Download URL
use_auth: Whether to include authentication headers
custom_headers: Additional headers to include in request
Returns:
Tuple[bool, Union[bytes, str]]: (success, content or error message)
"""
try:
session = await self.session
# Prepare headers
headers = self._get_auth_headers(use_auth)
if custom_headers:
headers.update(custom_headers)
async with session.get(url, headers=headers) as response:
if response.status == 200:
content = await response.read()
return True, content
elif response.status == 401:
return False, "Unauthorized access - invalid or missing API key"
elif response.status == 403:
return False, "Access forbidden"
elif response.status == 404:
return False, "File not found"
else:
return False, f"Download failed with status {response.status}"
except Exception as e:
logger.error(f"Error downloading to memory from {url}: {e}")
return False, str(e)
async def get_response_headers(
self,
url: str,
use_auth: bool = False,
custom_headers: Optional[Dict[str, str]] = None
) -> Tuple[bool, Union[Dict, str]]:
"""
Get response headers without downloading the full content
Args:
url: URL to check
use_auth: Whether to include authentication headers
custom_headers: Additional headers to include in request
Returns:
Tuple[bool, Union[Dict, str]]: (success, headers dict or error message)
"""
try:
session = await self.session
# Prepare headers
headers = self._get_auth_headers(use_auth)
if custom_headers:
headers.update(custom_headers)
async with session.head(url, headers=headers) as response:
if response.status == 200:
return True, dict(response.headers)
else:
return False, f"Head request failed with status {response.status}"
except Exception as e:
logger.error(f"Error getting headers from {url}: {e}")
return False, str(e)
async def make_request(
self,
method: str,
url: str,
use_auth: bool = False,
custom_headers: Optional[Dict[str, str]] = None,
**kwargs
) -> Tuple[bool, Union[Dict, str]]:
"""
Make a generic HTTP request and return JSON response
Args:
method: HTTP method (GET, POST, etc.)
url: Request URL
use_auth: Whether to include authentication headers
custom_headers: Additional headers to include in request
**kwargs: Additional arguments for aiohttp request
Returns:
Tuple[bool, Union[Dict, str]]: (success, response data or error message)
"""
try:
session = await self.session
# Prepare headers
headers = self._get_auth_headers(use_auth)
if custom_headers:
headers.update(custom_headers)
async with session.request(method, url, headers=headers, **kwargs) as response:
if response.status == 200:
# Try to parse as JSON, fall back to text
try:
data = await response.json()
return True, data
except:
text = await response.text()
return True, text
elif response.status == 401:
return False, "Unauthorized access - invalid or missing API key"
elif response.status == 403:
return False, "Access forbidden"
elif response.status == 404:
return False, "Resource not found"
else:
return False, f"Request failed with status {response.status}"
except Exception as e:
logger.error(f"Error making {method} request to {url}: {e}")
return False, str(e)
async def close(self):
"""Close the HTTP session"""
if self._session is not None:
await self._session.close()
self._session = None
self._session_created_at = None
logger.debug("Closed HTTP session")
# Global instance accessor
async def get_downloader() -> Downloader:
"""Get the global downloader instance"""
return await Downloader.get_instance()

View File

@@ -0,0 +1,151 @@
import zipfile
import logging
import asyncio
from pathlib import Path
from typing import Optional
from .downloader import get_downloader
logger = logging.getLogger(__name__)
class MetadataArchiveManager:
"""Manages downloading and extracting Civitai metadata archive database"""
DOWNLOAD_URLS = [
"https://github.com/willmiao/civitai-metadata-archive-db/releases/download/db-2025-08-08/civitai.zip",
"https://huggingface.co/datasets/willmiao/civitai-metadata-archive-db/blob/main/civitai.zip"
]
def __init__(self, base_path: str):
"""Initialize with base path where files will be stored"""
self.base_path = Path(base_path)
self.civitai_folder = self.base_path / "civitai"
self.archive_path = self.base_path / "civitai.zip"
self.db_path = self.civitai_folder / "civitai.sqlite"
def is_database_available(self) -> bool:
"""Check if the SQLite database is available and valid"""
return self.db_path.exists() and self.db_path.stat().st_size > 0
def get_database_path(self) -> Optional[str]:
"""Get the path to the SQLite database if available"""
if self.is_database_available():
return str(self.db_path)
return None
async def download_and_extract_database(self, progress_callback=None) -> bool:
"""Download and extract the metadata archive database
Args:
progress_callback: Optional callback function to report progress
Returns:
bool: True if successful, False otherwise
"""
try:
# Create directories if they don't exist
self.base_path.mkdir(parents=True, exist_ok=True)
self.civitai_folder.mkdir(parents=True, exist_ok=True)
# Download the archive
if not await self._download_archive(progress_callback):
return False
# Extract the archive
if not await self._extract_archive(progress_callback):
return False
# Clean up the archive file
if self.archive_path.exists():
self.archive_path.unlink()
logger.info(f"Successfully downloaded and extracted metadata database to {self.db_path}")
return True
except Exception as e:
logger.error(f"Error downloading and extracting metadata database: {e}", exc_info=True)
return False
async def _download_archive(self, progress_callback=None) -> bool:
"""Download the zip archive from one of the available URLs"""
downloader = await get_downloader()
for url in self.DOWNLOAD_URLS:
try:
logger.info(f"Attempting to download from {url}")
if progress_callback:
progress_callback("download", f"Downloading from {url}")
# Custom progress callback to report download progress
async def download_progress(progress):
if progress_callback:
progress_callback("download", f"Downloading archive... {progress:.1f}%")
success, result = await downloader.download_file(
url=url,
save_path=str(self.archive_path),
progress_callback=download_progress,
use_auth=False, # Public download, no auth needed
allow_resume=True
)
if success:
logger.info(f"Successfully downloaded archive from {url}")
return True
else:
logger.warning(f"Failed to download from {url}: {result}")
continue
except Exception as e:
logger.warning(f"Error downloading from {url}: {e}")
continue
logger.error("Failed to download archive from any URL")
return False
async def _extract_archive(self, progress_callback=None) -> bool:
"""Extract the zip archive to the civitai folder"""
try:
if progress_callback:
progress_callback("extract", "Extracting archive...")
# Run extraction in thread pool to avoid blocking
loop = asyncio.get_event_loop()
await loop.run_in_executor(None, self._extract_zip_sync)
if progress_callback:
progress_callback("extract", "Extraction completed")
return True
except Exception as e:
logger.error(f"Error extracting archive: {e}", exc_info=True)
return False
def _extract_zip_sync(self):
"""Synchronous zip extraction (runs in thread pool)"""
with zipfile.ZipFile(self.archive_path, 'r') as archive:
archive.extractall(path=self.base_path)
async def remove_database(self) -> bool:
"""Remove the metadata database and folder"""
try:
if self.civitai_folder.exists():
# Remove all files in the civitai folder
for file_path in self.civitai_folder.iterdir():
if file_path.is_file():
file_path.unlink()
# Remove the folder itself
self.civitai_folder.rmdir()
# Also remove the archive file if it exists
if self.archive_path.exists():
self.archive_path.unlink()
logger.info("Successfully removed metadata database")
return True
except Exception as e:
logger.error(f"Error removing metadata database: {e}", exc_info=True)
return False

View File

@@ -0,0 +1,108 @@
import os
import logging
from .model_metadata_provider import (
ModelMetadataProviderManager,
SQLiteModelMetadataProvider,
CivitaiModelMetadataProvider,
FallbackMetadataProvider
)
from .settings_manager import settings
from .metadata_archive_manager import MetadataArchiveManager
from .service_registry import ServiceRegistry
logger = logging.getLogger(__name__)
async def initialize_metadata_providers():
"""Initialize and configure all metadata providers based on settings"""
provider_manager = await ModelMetadataProviderManager.get_instance()
# Clear existing providers to allow reinitialization
provider_manager.providers.clear()
provider_manager.default_provider = None
# Get settings
enable_archive_db = settings.get('enable_metadata_archive_db', False)
providers = []
# Initialize archive database provider if enabled
if enable_archive_db:
try:
# Initialize archive manager
base_path = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
archive_manager = MetadataArchiveManager(base_path)
db_path = archive_manager.get_database_path()
if db_path and os.path.exists(db_path):
sqlite_provider = SQLiteModelMetadataProvider(db_path)
provider_manager.register_provider('sqlite', sqlite_provider)
providers.append(('sqlite', sqlite_provider))
logger.info(f"SQLite metadata provider registered with database: {db_path}")
else:
logger.warning("Metadata archive database is enabled but database file not found")
except Exception as e:
logger.error(f"Failed to initialize SQLite metadata provider: {e}")
# Initialize Civitai API provider (always available as fallback)
try:
civitai_client = await ServiceRegistry.get_civitai_client()
civitai_provider = CivitaiModelMetadataProvider(civitai_client)
provider_manager.register_provider('civitai_api', civitai_provider)
providers.append(('civitai_api', civitai_provider))
logger.info("Civitai API metadata provider registered")
except Exception as e:
logger.error(f"Failed to initialize Civitai API metadata provider: {e}")
# Set up fallback provider based on available providers
if len(providers) > 1:
# Always use Civitai API first, then Archive DB
ordered_providers = []
ordered_providers.extend([p[1] for p in providers if p[0] == 'civitai_api'])
ordered_providers.extend([p[1] for p in providers if p[0] == 'sqlite'])
if ordered_providers:
fallback_provider = FallbackMetadataProvider(ordered_providers)
provider_manager.register_provider('fallback', fallback_provider, is_default=True)
logger.info(f"Fallback metadata provider registered with {len(ordered_providers)} providers, Civitai API first")
elif len(providers) == 1:
# Only one provider available, set it as default
provider_name, provider = providers[0]
provider_manager.register_provider(provider_name, provider, is_default=True)
logger.info(f"Single metadata provider registered as default: {provider_name}")
else:
logger.warning("No metadata providers available - this may cause metadata lookup failures")
return provider_manager
async def update_metadata_providers():
"""Update metadata providers based on current settings"""
try:
# Get current settings
enable_archive_db = settings.get('enable_metadata_archive_db', False)
# Reinitialize all providers with new settings
provider_manager = await initialize_metadata_providers()
logger.info(f"Updated metadata providers, archive_db enabled: {enable_archive_db}")
return provider_manager
except Exception as e:
logger.error(f"Failed to update metadata providers: {e}")
return await ModelMetadataProviderManager.get_instance()
async def get_metadata_archive_manager():
"""Get metadata archive manager instance"""
base_path = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
return MetadataArchiveManager(base_path)
async def get_metadata_provider(provider_name: str = None):
"""Get a specific metadata provider or default provider"""
provider_manager = await ModelMetadataProviderManager.get_instance()
if provider_name:
return provider_manager._get_provider(provider_name)
return provider_manager._get_provider()
async def get_default_metadata_provider():
"""Get the default metadata provider (fallback or single provider)"""
return await get_metadata_provider()

View File

@@ -0,0 +1,395 @@
from abc import ABC, abstractmethod
import json
import aiosqlite
import logging
from typing import Optional, Dict, List, Tuple, Any
logger = logging.getLogger(__name__)
class ModelMetadataProvider(ABC):
"""Base abstract class for all model metadata providers"""
@abstractmethod
async def get_model_by_hash(self, model_hash: str) -> Optional[Dict]:
"""Find model by hash value"""
pass
@abstractmethod
async def get_model_versions(self, model_id: str) -> Optional[Dict]:
"""Get all versions of a model with their details"""
pass
@abstractmethod
async def get_model_version(self, model_id: int = None, version_id: int = None) -> Optional[Dict]:
"""Get specific model version with additional metadata"""
pass
@abstractmethod
async def get_model_version_info(self, version_id: str) -> Tuple[Optional[Dict], Optional[str]]:
"""Fetch model version metadata"""
pass
@abstractmethod
async def get_model_metadata(self, model_id: str) -> Tuple[Optional[Dict], int]:
"""Fetch model metadata (description, tags, and creator info)"""
pass
class CivitaiModelMetadataProvider(ModelMetadataProvider):
"""Provider that uses Civitai API for metadata"""
def __init__(self, civitai_client):
self.client = civitai_client
async def get_model_by_hash(self, model_hash: str) -> Optional[Dict]:
return await self.client.get_model_by_hash(model_hash)
async def get_model_versions(self, model_id: str) -> Optional[Dict]:
return await self.client.get_model_versions(model_id)
async def get_model_version(self, model_id: int = None, version_id: int = None) -> Optional[Dict]:
return await self.client.get_model_version(model_id, version_id)
async def get_model_version_info(self, version_id: str) -> Tuple[Optional[Dict], Optional[str]]:
return await self.client.get_model_version_info(version_id)
async def get_model_metadata(self, model_id: str) -> Tuple[Optional[Dict], int]:
return await self.client.get_model_metadata(model_id)
class SQLiteModelMetadataProvider(ModelMetadataProvider):
"""Provider that uses SQLite database for metadata"""
def __init__(self, db_path: str):
self.db_path = db_path
async def get_model_by_hash(self, model_hash: str) -> Optional[Dict]:
"""Find model by hash value from SQLite database"""
async with aiosqlite.connect(self.db_path) as db:
# Look up in model_files table to get model_id and version_id
query = """
SELECT model_id, version_id
FROM model_files
WHERE sha256 = ?
LIMIT 1
"""
db.row_factory = aiosqlite.Row
cursor = await db.execute(query, (model_hash.upper(),))
file_row = await cursor.fetchone()
if not file_row:
return None
# Get version details
model_id = file_row['model_id']
version_id = file_row['version_id']
# Build response in the same format as Civitai API
return await self._get_version_with_model_data(db, model_id, version_id)
async def get_model_versions(self, model_id: str) -> Optional[Dict]:
"""Get all versions of a model from SQLite database"""
async with aiosqlite.connect(self.db_path) as db:
db.row_factory = aiosqlite.Row
# First check if model exists
model_query = "SELECT * FROM models WHERE id = ?"
cursor = await db.execute(model_query, (model_id,))
model_row = await cursor.fetchone()
if not model_row:
return None
model_data = json.loads(model_row['data'])
model_type = model_row['type']
# Get all versions for this model
versions_query = """
SELECT id, name, base_model, data, position, published_at
FROM model_versions
WHERE model_id = ?
ORDER BY position ASC
"""
cursor = await db.execute(versions_query, (model_id,))
version_rows = await cursor.fetchall()
if not version_rows:
return {'modelVersions': [], 'type': model_type}
# Format versions similar to Civitai API
model_versions = []
for row in version_rows:
version_data = json.loads(row['data'])
# Add fields from the row to ensure we have the basic fields
version_entry = {
'id': row['id'],
'modelId': int(model_id),
'name': row['name'],
'baseModel': row['base_model'],
'model': {
'name': model_row['name'],
'type': model_type,
},
'source': 'archive_db'
}
# Update with any additional data
version_entry.update(version_data)
model_versions.append(version_entry)
return {
'modelVersions': model_versions,
'type': model_type
}
async def get_model_version(self, model_id: int = None, version_id: int = None) -> Optional[Dict]:
"""Get specific model version with additional metadata from SQLite database"""
if not model_id and not version_id:
return None
async with aiosqlite.connect(self.db_path) as db:
db.row_factory = aiosqlite.Row
# Case 1: Only version_id is provided
if model_id is None and version_id is not None:
# First get the version info to extract model_id
version_query = "SELECT model_id FROM model_versions WHERE id = ?"
cursor = await db.execute(version_query, (version_id,))
version_row = await cursor.fetchone()
if not version_row:
return None
model_id = version_row['model_id']
# Case 2: model_id is provided but version_id is not
elif model_id is not None and version_id is None:
# Find the latest version
version_query = """
SELECT id FROM model_versions
WHERE model_id = ?
ORDER BY position ASC
LIMIT 1
"""
cursor = await db.execute(version_query, (model_id,))
version_row = await cursor.fetchone()
if not version_row:
return None
version_id = version_row['id']
# Now we have both model_id and version_id, get the full data
return await self._get_version_with_model_data(db, model_id, version_id)
async def get_model_version_info(self, version_id: str) -> Tuple[Optional[Dict], Optional[str]]:
"""Fetch model version metadata from SQLite database"""
async with aiosqlite.connect(self.db_path) as db:
db.row_factory = aiosqlite.Row
# Get version details
version_query = "SELECT model_id FROM model_versions WHERE id = ?"
cursor = await db.execute(version_query, (version_id,))
version_row = await cursor.fetchone()
if not version_row:
return None, "Model version not found"
model_id = version_row['model_id']
# Build complete version data with model info
version_data = await self._get_version_with_model_data(db, model_id, version_id)
return version_data, None
async def get_model_metadata(self, model_id: str) -> Tuple[Optional[Dict], int]:
"""Fetch model metadata from SQLite database"""
async with aiosqlite.connect(self.db_path) as db:
db.row_factory = aiosqlite.Row
# Get model details
model_query = "SELECT name, type, data, username FROM models WHERE id = ?"
cursor = await db.execute(model_query, (model_id,))
model_row = await cursor.fetchone()
if not model_row:
return None, 404
# Parse data JSON
try:
model_data = json.loads(model_row['data'])
# Extract relevant metadata
metadata = {
"description": model_data.get("description", "No model description available"),
"tags": model_data.get("tags", []),
"creator": {
"username": model_row['username'] or model_data.get("creator", {}).get("username"),
"image": model_data.get("creator", {}).get("image")
}
}
return metadata, 200
except json.JSONDecodeError:
return None, 500
async def _get_version_with_model_data(self, db, model_id, version_id) -> Optional[Dict]:
"""Helper to build version data with model information"""
# Get version details
version_query = "SELECT name, base_model, data FROM model_versions WHERE id = ? AND model_id = ?"
cursor = await db.execute(version_query, (version_id, model_id))
version_row = await cursor.fetchone()
if not version_row:
return None
# Get model details
model_query = "SELECT name, type, data, username FROM models WHERE id = ?"
cursor = await db.execute(model_query, (model_id,))
model_row = await cursor.fetchone()
if not model_row:
return None
# Parse JSON data
try:
version_data = json.loads(version_row['data'])
model_data = json.loads(model_row['data'])
# Build response
result = {
"id": int(version_id),
"modelId": int(model_id),
"name": version_row['name'],
"baseModel": version_row['base_model'],
"model": {
"name": model_row['name'],
"description": model_data.get("description"),
"type": model_row['type'],
"tags": model_data.get("tags", [])
},
"creator": {
"username": model_row['username'] or model_data.get("creator", {}).get("username"),
"image": model_data.get("creator", {}).get("image")
},
"source": "archive_db"
}
# Add any additional fields from version data
result.update(version_data)
return result
except json.JSONDecodeError:
return None
class FallbackMetadataProvider(ModelMetadataProvider):
"""Try providers in order, return first successful result."""
def __init__(self, providers: list):
self.providers = providers
async def get_model_by_hash(self, model_hash: str) -> Optional[Dict]:
for provider in self.providers:
try:
result = await provider.get_model_by_hash(model_hash)
if result:
return result
except Exception:
continue
return None
async def get_model_versions(self, model_id: str) -> Optional[Dict]:
for provider in self.providers:
try:
result = await provider.get_model_versions(model_id)
if result:
return result
except Exception as e:
logger.debug(f"Provider failed for get_model_versions: {e}")
continue
return None
async def get_model_version(self, model_id: int = None, version_id: int = None) -> Optional[Dict]:
for provider in self.providers:
try:
result = await provider.get_model_version(model_id, version_id)
if result:
return result
except Exception as e:
logger.debug(f"Provider failed for get_model_version: {e}")
continue
return None
async def get_model_version_info(self, version_id: str) -> Tuple[Optional[Dict], Optional[str]]:
for provider in self.providers:
try:
result, error = await provider.get_model_version_info(version_id)
if result:
return result, error
except Exception as e:
logger.debug(f"Provider failed for get_model_version_info: {e}")
continue
return None, "No provider could retrieve the data"
async def get_model_metadata(self, model_id: str) -> Tuple[Optional[Dict], int]:
for provider in self.providers:
try:
result, status = await provider.get_model_metadata(model_id)
if result:
return result, status
except Exception as e:
logger.debug(f"Provider failed for get_model_metadata: {e}")
continue
return None, 404
class ModelMetadataProviderManager:
"""Manager for selecting and using model metadata providers"""
_instance = None
@classmethod
async def get_instance(cls):
"""Get singleton instance of ModelMetadataProviderManager"""
if cls._instance is None:
cls._instance = cls()
return cls._instance
def __init__(self):
self.providers = {}
self.default_provider = None
def register_provider(self, name: str, provider: ModelMetadataProvider, is_default: bool = False):
"""Register a metadata provider"""
self.providers[name] = provider
if is_default or self.default_provider is None:
self.default_provider = name
async def get_model_by_hash(self, model_hash: str, provider_name: str = None) -> Optional[Dict]:
"""Find model by hash using specified or default provider"""
provider = self._get_provider(provider_name)
return await provider.get_model_by_hash(model_hash)
async def get_model_versions(self, model_id: str, provider_name: str = None) -> Optional[Dict]:
"""Get model versions using specified or default provider"""
provider = self._get_provider(provider_name)
return await provider.get_model_versions(model_id)
async def get_model_version(self, model_id: int = None, version_id: int = None, provider_name: str = None) -> Optional[Dict]:
"""Get specific model version using specified or default provider"""
provider = self._get_provider(provider_name)
return await provider.get_model_version(model_id, version_id)
async def get_model_version_info(self, version_id: str, provider_name: str = None) -> Tuple[Optional[Dict], Optional[str]]:
"""Fetch model version info using specified or default provider"""
provider = self._get_provider(provider_name)
return await provider.get_model_version_info(version_id)
async def get_model_metadata(self, model_id: str, provider_name: str = None) -> Tuple[Optional[Dict], int]:
"""Fetch model metadata using specified or default provider"""
provider = self._get_provider(provider_name)
return await provider.get_model_metadata(model_id)
def _get_provider(self, provider_name: str = None) -> ModelMetadataProvider:
"""Get provider by name or default provider"""
if provider_name and provider_name in self.providers:
return self.providers[provider_name]
if self.default_provider is None:
raise ValueError("No default provider set and no valid provider specified")
return self.providers[self.default_provider]

View File

@@ -730,11 +730,10 @@ class ModelScanner:
if needs_metadata_update and model_id: if needs_metadata_update and model_id:
logger.debug(f"Fetching missing metadata for {file_path} with model ID {model_id}") logger.debug(f"Fetching missing metadata for {file_path} with model ID {model_id}")
from ..services.civitai_client import CivitaiClient from ..services.metadata_service import get_default_metadata_provider
client = CivitaiClient() metadata_provider = await get_default_metadata_provider()
model_metadata, status_code = await client.get_model_metadata(model_id) model_metadata, status_code = await metadata_provider.get_model_metadata(model_id)
await client.close()
if status_code == 404: if status_code == 404:
logger.warning(f"Model {model_id} appears to be deleted from Civitai (404 response)") logger.warning(f"Model {model_id} appears to be deleted from Civitai (404 response)")

View File

@@ -8,6 +8,7 @@ from ..config import config
from .recipe_cache import RecipeCache from .recipe_cache import RecipeCache
from .service_registry import ServiceRegistry from .service_registry import ServiceRegistry
from .lora_scanner import LoraScanner from .lora_scanner import LoraScanner
from .metadata_service import get_default_metadata_provider
from ..utils.utils import fuzzy_match from ..utils.utils import fuzzy_match
from natsort import natsorted from natsort import natsorted
import sys import sys
@@ -431,13 +432,13 @@ class RecipeScanner:
async def _get_hash_from_civitai(self, model_version_id: str) -> Optional[str]: async def _get_hash_from_civitai(self, model_version_id: str) -> Optional[str]:
"""Get hash from Civitai API""" """Get hash from Civitai API"""
try: try:
# Get CivitaiClient from ServiceRegistry # Get metadata provider instead of civitai client directly
civitai_client = await self._get_civitai_client() metadata_provider = await get_default_metadata_provider()
if not civitai_client: if not metadata_provider:
logger.error("Failed to get CivitaiClient from ServiceRegistry") logger.error("Failed to get metadata provider")
return None return None
version_info, error_msg = await civitai_client.get_model_version_info(model_version_id) version_info, error_msg = await metadata_provider.get_model_version_info(model_version_id)
if not version_info: if not version_info:
if error_msg and "model not found" in error_msg.lower(): if error_msg and "model not found" in error_msg.lower():

View File

@@ -81,7 +81,8 @@ class SettingsManager:
return { return {
"civitai_api_key": "", "civitai_api_key": "",
"show_only_sfw": False, "show_only_sfw": False,
"language": "en" # 添加默认语言设置 "language": "en",
"enable_metadata_archive_db": False # Enable metadata archive database
} }
def get(self, key: str, default: Any = None) -> Any: def get(self, key: str, default: Any = None) -> Any:

View File

@@ -3,13 +3,13 @@ import os
import asyncio import asyncio
import json import json
import time import time
import aiohttp
from aiohttp import web from aiohttp import web
from ..services.service_registry import ServiceRegistry from ..services.service_registry import ServiceRegistry
from ..utils.metadata_manager import MetadataManager from ..utils.metadata_manager import MetadataManager
from .example_images_processor import ExampleImagesProcessor from .example_images_processor import ExampleImagesProcessor
from .example_images_metadata import MetadataUpdater from .example_images_metadata import MetadataUpdater
from ..services.websocket_manager import ws_manager # Add this import at the top from ..services.websocket_manager import ws_manager # Add this import at the top
from ..services.downloader import get_downloader
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -199,19 +199,8 @@ class DownloadManager:
"""Download example images for all models""" """Download example images for all models"""
global is_downloading, download_progress global is_downloading, download_progress
# Create independent download session # Get unified downloader
connector = aiohttp.TCPConnector( downloader = await get_downloader()
ssl=True,
limit=3,
force_close=False,
enable_cleanup_closed=True
)
timeout = aiohttp.ClientTimeout(total=None, connect=60, sock_read=60)
independent_session = aiohttp.ClientSession(
connector=connector,
trust_env=True,
timeout=timeout
)
try: try:
# Get scanners # Get scanners
@@ -246,7 +235,7 @@ class DownloadManager:
# Main logic for processing model is here, but actual operations are delegated to other classes # Main logic for processing model is here, but actual operations are delegated to other classes
was_remote_download = await DownloadManager._process_model( was_remote_download = await DownloadManager._process_model(
scanner_type, model, scanner, scanner_type, model, scanner,
output_dir, optimize, independent_session output_dir, optimize, downloader
) )
# Update progress # Update progress
@@ -270,12 +259,6 @@ class DownloadManager:
download_progress['end_time'] = time.time() download_progress['end_time'] = time.time()
finally: finally:
# Close the independent session
try:
await independent_session.close()
except Exception as e:
logger.error(f"Error closing download session: {e}")
# Save final progress to file # Save final progress to file
try: try:
DownloadManager._save_progress(output_dir) DownloadManager._save_progress(output_dir)
@@ -286,7 +269,7 @@ class DownloadManager:
is_downloading = False is_downloading = False
@staticmethod @staticmethod
async def _process_model(scanner_type, model, scanner, output_dir, optimize, independent_session): async def _process_model(scanner_type, model, scanner, output_dir, optimize, downloader):
"""Process a single model download""" """Process a single model download"""
global download_progress global download_progress
@@ -347,7 +330,7 @@ class DownloadManager:
images = model.get('civitai', {}).get('images', []) images = model.get('civitai', {}).get('images', [])
success, is_stale = await ExampleImagesProcessor.download_model_images( success, is_stale = await ExampleImagesProcessor.download_model_images(
model_hash, model_name, images, model_dir, optimize, independent_session model_hash, model_name, images, model_dir, optimize, downloader
) )
# If metadata is stale, try to refresh it # If metadata is stale, try to refresh it
@@ -365,7 +348,7 @@ class DownloadManager:
# Retry download with updated metadata # Retry download with updated metadata
updated_images = updated_model.get('civitai', {}).get('images', []) updated_images = updated_model.get('civitai', {}).get('images', [])
success, _ = await ExampleImagesProcessor.download_model_images( success, _ = await ExampleImagesProcessor.download_model_images(
model_hash, model_name, updated_images, model_dir, optimize, independent_session model_hash, model_name, updated_images, model_dir, optimize, downloader
) )
download_progress['refreshed_models'].add(model_hash) download_progress['refreshed_models'].add(model_hash)
@@ -529,19 +512,8 @@ class DownloadManager:
"""Download example images for specific models only - synchronous version""" """Download example images for specific models only - synchronous version"""
global download_progress global download_progress
# Create independent download session # Get unified downloader
connector = aiohttp.TCPConnector( downloader = await get_downloader()
ssl=True,
limit=3,
force_close=False,
enable_cleanup_closed=True
)
timeout = aiohttp.ClientTimeout(total=None, connect=60, sock_read=60)
independent_session = aiohttp.ClientSession(
connector=connector,
trust_env=True,
timeout=timeout
)
try: try:
# Get scanners # Get scanners
@@ -586,7 +558,7 @@ class DownloadManager:
# Force process this model regardless of previous status # Force process this model regardless of previous status
was_successful = await DownloadManager._process_specific_model( was_successful = await DownloadManager._process_specific_model(
scanner_type, model, scanner, scanner_type, model, scanner,
output_dir, optimize, independent_session output_dir, optimize, downloader
) )
if was_successful: if was_successful:
@@ -650,14 +622,11 @@ class DownloadManager:
raise raise
finally: finally:
# Close the independent session # No need to close any sessions since we use the global downloader
try: pass
await independent_session.close()
except Exception as e:
logger.error(f"Error closing download session: {e}")
@staticmethod @staticmethod
async def _process_specific_model(scanner_type, model, scanner, output_dir, optimize, independent_session): async def _process_specific_model(scanner_type, model, scanner, output_dir, optimize, downloader):
"""Process a specific model for forced download, ignoring previous download status""" """Process a specific model for forced download, ignoring previous download status"""
global download_progress global download_progress
@@ -701,7 +670,7 @@ class DownloadManager:
images = model.get('civitai', {}).get('images', []) images = model.get('civitai', {}).get('images', [])
success, is_stale, failed_images = await ExampleImagesProcessor.download_model_images_with_tracking( success, is_stale, failed_images = await ExampleImagesProcessor.download_model_images_with_tracking(
model_hash, model_name, images, model_dir, optimize, independent_session model_hash, model_name, images, model_dir, optimize, downloader
) )
# If metadata is stale, try to refresh it # If metadata is stale, try to refresh it
@@ -719,7 +688,7 @@ class DownloadManager:
# Retry download with updated metadata # Retry download with updated metadata
updated_images = updated_model.get('civitai', {}).get('images', []) updated_images = updated_model.get('civitai', {}).get('images', [])
success, _, additional_failed_images = await ExampleImagesProcessor.download_model_images_with_tracking( success, _, additional_failed_images = await ExampleImagesProcessor.download_model_images_with_tracking(
model_hash, model_name, updated_images, model_dir, optimize, independent_session model_hash, model_name, updated_images, model_dir, optimize, downloader
) )
# Combine failed images from both attempts # Combine failed images from both attempts

View File

@@ -35,7 +35,7 @@ class ExampleImagesProcessor:
return image_url return image_url
@staticmethod @staticmethod
async def download_model_images(model_hash, model_name, model_images, model_dir, optimize, independent_session): async def download_model_images(model_hash, model_name, model_images, model_dir, optimize, downloader):
"""Download images for a single model """Download images for a single model
Returns: Returns:
@@ -78,23 +78,25 @@ class ExampleImagesProcessor:
try: try:
logger.debug(f"Downloading {save_filename} for {model_name}") logger.debug(f"Downloading {save_filename} for {model_name}")
# Download directly using the independent session # Download using the unified downloader
async with independent_session.get(image_url, timeout=60) as response: success, content = await downloader.download_to_memory(
if response.status == 200: image_url,
with open(save_path, 'wb') as f: use_auth=False # Example images don't need auth
async for chunk in response.content.iter_chunked(8192): )
if chunk:
f.write(chunk) if success:
elif response.status == 404: with open(save_path, 'wb') as f:
error_msg = f"Failed to download file: {image_url}, status code: 404 - Model metadata might be stale" f.write(content)
logger.warning(error_msg) elif "404" in str(content):
model_success = False # Mark the model as failed due to 404 error error_msg = f"Failed to download file: {image_url}, status code: 404 - Model metadata might be stale"
# Return early to trigger metadata refresh attempt logger.warning(error_msg)
return False, True # (success, is_metadata_stale) model_success = False # Mark the model as failed due to 404 error
else: # Return early to trigger metadata refresh attempt
error_msg = f"Failed to download file: {image_url}, status code: {response.status}" return False, True # (success, is_metadata_stale)
logger.warning(error_msg) else:
model_success = False # Mark the model as failed error_msg = f"Failed to download file: {image_url}, error: {content}"
logger.warning(error_msg)
model_success = False # Mark the model as failed
except Exception as e: except Exception as e:
error_msg = f"Error downloading file {image_url}: {str(e)}" error_msg = f"Error downloading file {image_url}: {str(e)}"
logger.error(error_msg) logger.error(error_msg)
@@ -103,7 +105,7 @@ class ExampleImagesProcessor:
return model_success, False # (success, is_metadata_stale) return model_success, False # (success, is_metadata_stale)
@staticmethod @staticmethod
async def download_model_images_with_tracking(model_hash, model_name, model_images, model_dir, optimize, independent_session): async def download_model_images_with_tracking(model_hash, model_name, model_images, model_dir, optimize, downloader):
"""Download images for a single model with tracking of failed image URLs """Download images for a single model with tracking of failed image URLs
Returns: Returns:
@@ -147,25 +149,27 @@ class ExampleImagesProcessor:
try: try:
logger.debug(f"Downloading {save_filename} for {model_name}") logger.debug(f"Downloading {save_filename} for {model_name}")
# Download directly using the independent session # Download using the unified downloader
async with independent_session.get(image_url, timeout=60) as response: success, content = await downloader.download_to_memory(
if response.status == 200: image_url,
with open(save_path, 'wb') as f: use_auth=False # Example images don't need auth
async for chunk in response.content.iter_chunked(8192): )
if chunk:
f.write(chunk) if success:
elif response.status == 404: with open(save_path, 'wb') as f:
error_msg = f"Failed to download file: {image_url}, status code: 404 - Model metadata might be stale" f.write(content)
logger.warning(error_msg) elif "404" in str(content):
model_success = False # Mark the model as failed due to 404 error error_msg = f"Failed to download file: {image_url}, status code: 404 - Model metadata might be stale"
failed_images.append(image_url) # Track failed URL logger.warning(error_msg)
# Return early to trigger metadata refresh attempt model_success = False # Mark the model as failed due to 404 error
return False, True, failed_images # (success, is_metadata_stale, failed_images) failed_images.append(image_url) # Track failed URL
else: # Return early to trigger metadata refresh attempt
error_msg = f"Failed to download file: {image_url}, status code: {response.status}" return False, True, failed_images # (success, is_metadata_stale, failed_images)
logger.warning(error_msg) else:
model_success = False # Mark the model as failed error_msg = f"Failed to download file: {image_url}, error: {content}"
failed_images.append(image_url) # Track failed URL logger.warning(error_msg)
model_success = False # Mark the model as failed
failed_images.append(image_url) # Track failed URL
except Exception as e: except Exception as e:
error_msg = f"Error downloading file {image_url}: {str(e)}" error_msg = f"Error downloading file {image_url}: {str(e)}"
logger.error(error_msg) logger.error(error_msg)

View File

@@ -7,12 +7,12 @@ from aiohttp import web
from .model_utils import determine_base_model from .model_utils import determine_base_model
from .constants import PREVIEW_EXTENSIONS, CARD_PREVIEW_WIDTH from .constants import PREVIEW_EXTENSIONS, CARD_PREVIEW_WIDTH
from ..config import config from ..config import config
from ..services.civitai_client import CivitaiClient
from ..services.service_registry import ServiceRegistry from ..services.service_registry import ServiceRegistry
from ..services.downloader import get_downloader
from ..utils.exif_utils import ExifUtils from ..utils.exif_utils import ExifUtils
from ..utils.metadata_manager import MetadataManager from ..utils.metadata_manager import MetadataManager
from ..services.download_manager import DownloadManager
from ..services.websocket_manager import ws_manager from ..services.websocket_manager import ws_manager
from ..services.metadata_service import get_default_metadata_provider, get_metadata_provider
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -37,28 +37,51 @@ class ModelRouteUtils:
local_metadata['from_civitai'] = False local_metadata['from_civitai'] = False
await MetadataManager.save_metadata(metadata_path, local_metadata) await MetadataManager.save_metadata(metadata_path, local_metadata)
@staticmethod
def is_civitai_api_metadata(meta: dict) -> bool:
"""
Determine if the given civitai metadata is from the civitai API.
Returns True if both 'files' and 'images' exist and are non-empty,
and the 'source' is not 'archive_db'.
"""
if not isinstance(meta, dict):
return False
files = meta.get('files')
images = meta.get('images')
source = meta.get('source')
return bool(files) and bool(images) and source != 'archive_db'
@staticmethod @staticmethod
async def update_model_metadata(metadata_path: str, local_metadata: Dict, async def update_model_metadata(metadata_path: str, local_metadata: Dict,
civitai_metadata: Dict, client: CivitaiClient) -> None: civitai_metadata: Dict, metadata_provider=None) -> None:
"""Update local metadata with CivitAI data""" """Update local metadata with CivitAI data"""
# Save existing trainedWords and customImages if they exist # Save existing trainedWords and customImages if they exist
existing_civitai = local_metadata.get('civitai') or {} # Use empty dict if None existing_civitai = local_metadata.get('civitai') or {} # Use empty dict if None
# Create a new civitai metadata by updating existing with new # Check if we should skip the update to avoid overwriting richer data
merged_civitai = existing_civitai.copy() if civitai_metadata.get('source') == 'archive_db' and ModelRouteUtils.is_civitai_api_metadata(existing_civitai):
merged_civitai.update(civitai_metadata) logger.info(f"Skip civitai update for {local_metadata.get('model_name', '')} ({existing_civitai.get('name', '')})")
else:
# Create a new civitai metadata by updating existing with new
merged_civitai = existing_civitai.copy()
merged_civitai.update(civitai_metadata)
# Special handling for trainedWords - ensure we don't lose any existing trained words if civitai_metadata.get('source') == 'archive_db':
if 'trainedWords' in existing_civitai: model_name = civitai_metadata.get('model', {}).get('name', '')
existing_trained_words = existing_civitai.get('trainedWords', []) version_name = civitai_metadata.get('name', '')
new_trained_words = civitai_metadata.get('trainedWords', []) logger.info(f"Recovered metadata from archive_db for deleted model: {model_name} ({version_name})")
# Use a set to combine words without duplicates, then convert back to list
merged_trained_words = list(set(existing_trained_words + new_trained_words))
merged_civitai['trainedWords'] = merged_trained_words
# Update local metadata with merged civitai data # Special handling for trainedWords - ensure we don't lose any existing trained words
local_metadata['civitai'] = merged_civitai if 'trainedWords' in existing_civitai:
local_metadata['from_civitai'] = True existing_trained_words = existing_civitai.get('trainedWords', [])
new_trained_words = civitai_metadata.get('trainedWords', [])
# Use a set to combine words without duplicates, then convert back to list
merged_trained_words = list(set(existing_trained_words + new_trained_words))
merged_civitai['trainedWords'] = merged_trained_words
# Update local metadata with merged civitai data
local_metadata['civitai'] = merged_civitai
local_metadata['from_civitai'] = True
# Update model name if available # Update model name if available
if 'model' in civitai_metadata: if 'model' in civitai_metadata:
@@ -79,15 +102,17 @@ class ModelRouteUtils:
# If we have modelId and don't have enough metadata, fetch additional data # If we have modelId and don't have enough metadata, fetch additional data
if not model_metadata or not model_metadata.get('description'): if not model_metadata or not model_metadata.get('description'):
model_id = civitai_metadata.get('modelId') model_id = civitai_metadata.get('modelId')
if model_id: if model_id and metadata_provider:
fetched_metadata, _ = await client.get_model_metadata(str(model_id)) fetched_metadata, _ = await metadata_provider.get_model_metadata(str(model_id))
if fetched_metadata: if fetched_metadata:
model_metadata = fetched_metadata model_metadata = fetched_metadata
# Update local metadata with the model information # Update local metadata with the model information
if model_metadata: if model_metadata:
local_metadata['modelDescription'] = model_metadata.get('description', '') local_metadata['modelDescription'] = model_metadata.get('description', '')
local_metadata['tags'] = model_metadata.get('tags', []) # Only set tags if local_metadata['tags'] is empty
if not local_metadata.get('tags'):
local_metadata['tags'] = model_metadata.get('tags', [])
if 'creator' in model_metadata and model_metadata['creator']: if 'creator' in model_metadata and model_metadata['creator']:
local_metadata['civitai']['creator'] = model_metadata['creator'] local_metadata['civitai']['creator'] = model_metadata['creator']
@@ -113,22 +138,28 @@ class ModelRouteUtils:
preview_path = os.path.join(os.path.dirname(metadata_path), preview_filename) preview_path = os.path.join(os.path.dirname(metadata_path), preview_filename)
if is_video: if is_video:
# Download video as is # Download video as is using downloader
if await client.download_preview_image(first_preview['url'], preview_path): downloader = await get_downloader()
success, result = await downloader.download_file(
first_preview['url'],
preview_path,
use_auth=False
)
if success:
local_metadata['preview_url'] = preview_path.replace(os.sep, '/') local_metadata['preview_url'] = preview_path.replace(os.sep, '/')
local_metadata['preview_nsfw_level'] = first_preview.get('nsfwLevel', 0) local_metadata['preview_nsfw_level'] = first_preview.get('nsfwLevel', 0)
else: else:
# For images, download and then optimize to WebP # For images, download and then optimize to WebP using downloader
temp_path = preview_path + ".temp" downloader = await get_downloader()
if await client.download_preview_image(first_preview['url'], temp_path): success, content = await downloader.download_to_memory(
first_preview['url'],
use_auth=False
)
if success:
try: try:
# Read the downloaded image
with open(temp_path, 'rb') as f:
image_data = f.read()
# Optimize and convert to WebP # Optimize and convert to WebP
optimized_data, _ = ExifUtils.optimize_image( optimized_data, _ = ExifUtils.optimize_image(
image_data=image_data, image_data=content, # Use downloaded content directly
target_width=CARD_PREVIEW_WIDTH, target_width=CARD_PREVIEW_WIDTH,
format='webp', format='webp',
quality=85, quality=85,
@@ -143,17 +174,16 @@ class ModelRouteUtils:
local_metadata['preview_url'] = preview_path.replace(os.sep, '/') local_metadata['preview_url'] = preview_path.replace(os.sep, '/')
local_metadata['preview_nsfw_level'] = first_preview.get('nsfwLevel', 0) local_metadata['preview_nsfw_level'] = first_preview.get('nsfwLevel', 0)
# Remove the temporary file
if os.path.exists(temp_path):
os.remove(temp_path)
except Exception as e: except Exception as e:
logger.error(f"Error optimizing preview image: {e}") logger.error(f"Error optimizing preview image: {e}")
# If optimization fails, try to use the downloaded image directly # If optimization fails, save the original content
if os.path.exists(temp_path): try:
os.rename(temp_path, preview_path) with open(preview_path, 'wb') as f:
f.write(content)
local_metadata['preview_url'] = preview_path.replace(os.sep, '/') local_metadata['preview_url'] = preview_path.replace(os.sep, '/')
local_metadata['preview_nsfw_level'] = first_preview.get('nsfwLevel', 0) local_metadata['preview_nsfw_level'] = first_preview.get('nsfwLevel', 0)
except Exception as save_error:
logger.error(f"Error saving preview image: {save_error}")
# Save updated metadata # Save updated metadata
await MetadataManager.save_metadata(metadata_path, local_metadata) await MetadataManager.save_metadata(metadata_path, local_metadata)
@@ -176,7 +206,6 @@ class ModelRouteUtils:
Returns: Returns:
bool: True if successful, False otherwise bool: True if successful, False otherwise
""" """
client = CivitaiClient()
try: try:
# Validate input parameters # Validate input parameters
if not isinstance(model_data, dict): if not isinstance(model_data, dict):
@@ -188,8 +217,13 @@ class ModelRouteUtils:
# Check if model metadata exists # Check if model metadata exists
local_metadata = await ModelRouteUtils.load_local_metadata(metadata_path) local_metadata = await ModelRouteUtils.load_local_metadata(metadata_path)
# Fetch metadata from Civitai if model_data.get('from_civitai') is False:
civitai_metadata = await client.get_model_by_hash(sha256) # Likely deleted from CivitAI, use archive_db if available
metadata_provider = await get_metadata_provider('sqlite')
else:
metadata_provider = await get_default_metadata_provider()
civitai_metadata = await metadata_provider.get_model_by_hash(sha256)
if not civitai_metadata: if not civitai_metadata:
# Mark as not from CivitAI if not found # Mark as not from CivitAI if not found
local_metadata['from_civitai'] = False local_metadata['from_civitai'] = False
@@ -202,7 +236,7 @@ class ModelRouteUtils:
metadata_path, metadata_path,
local_metadata, local_metadata,
civitai_metadata, civitai_metadata,
client metadata_provider
) )
# Update cache object directly using safe .get() method # Update cache object directly using safe .get() method
@@ -225,8 +259,6 @@ class ModelRouteUtils:
except Exception as e: except Exception as e:
logger.error(f"Error fetching CivitAI data: {str(e)}", exc_info=True) # Include stack trace logger.error(f"Error fetching CivitAI data: {str(e)}", exc_info=True) # Include stack trace
return False return False
finally:
await client.close()
@staticmethod @staticmethod
def filter_civitai_data(data: Dict, minimal: bool = False) -> Dict: def filter_civitai_data(data: Dict, minimal: bool = False) -> Dict:
@@ -359,24 +391,22 @@ class ModelRouteUtils:
if not local_metadata or not local_metadata.get('sha256'): if not local_metadata or not local_metadata.get('sha256'):
return web.json_response({"success": False, "error": "No SHA256 hash found"}, status=400) return web.json_response({"success": False, "error": "No SHA256 hash found"}, status=400)
# Create a client for fetching from Civitai # Get metadata provider and fetch from unified provider
client = CivitaiClient() metadata_provider = await get_default_metadata_provider()
try:
# Fetch and update metadata
civitai_metadata = await client.get_model_by_hash(local_metadata["sha256"])
if not civitai_metadata:
await ModelRouteUtils.handle_not_found_on_civitai(metadata_path, local_metadata)
return web.json_response({"success": False, "error": "Not found on CivitAI"}, status=404)
await ModelRouteUtils.update_model_metadata(metadata_path, local_metadata, civitai_metadata, client) # Fetch and update metadata
civitai_metadata = await metadata_provider.get_model_by_hash(local_metadata["sha256"])
if not civitai_metadata:
await ModelRouteUtils.handle_not_found_on_civitai(metadata_path, local_metadata)
return web.json_response({"success": False, "error": "Not found on CivitAI"}, status=404)
# Update the cache await ModelRouteUtils.update_model_metadata(metadata_path, local_metadata, civitai_metadata, metadata_provider)
await scanner.update_single_model_cache(data['file_path'], data['file_path'], local_metadata)
# Return the updated metadata along with success status # Update the cache
return web.json_response({"success": True, "metadata": local_metadata}) await scanner.update_single_model_cache(data['file_path'], data['file_path'], local_metadata)
finally:
await client.close() # Return the updated metadata along with success status
return web.json_response({"success": True, "metadata": local_metadata})
except Exception as e: except Exception as e:
logger.error(f"Error fetching from CivitAI: {e}", exc_info=True) logger.error(f"Error fetching from CivitAI: {e}", exc_info=True)
@@ -777,43 +807,38 @@ class ModelRouteUtils:
# Check if model metadata exists # Check if model metadata exists
local_metadata = await ModelRouteUtils.load_local_metadata(metadata_path) local_metadata = await ModelRouteUtils.load_local_metadata(metadata_path)
# Create a client for fetching from Civitai # Get metadata provider and fetch metadata using get_model_version which includes more comprehensive data
client = await CivitaiClient.get_instance() metadata_provider = await get_default_metadata_provider()
try: civitai_metadata = await metadata_provider.get_model_version(model_id, model_version_id)
# Fetch metadata using get_model_version which includes more comprehensive data if not civitai_metadata:
civitai_metadata = await client.get_model_version(model_id, model_version_id) error_msg = f"Model version not found on CivitAI for ID: {model_id}"
if not civitai_metadata: if model_version_id:
error_msg = f"Model version not found on CivitAI for ID: {model_id}" error_msg += f" with version: {model_version_id}"
if model_version_id: return web.json_response({"success": False, "error": error_msg}, status=404)
error_msg += f" with version: {model_version_id}"
return web.json_response({"success": False, "error": error_msg}, status=404)
# Try to find the primary model file to get the SHA256 hash # Try to find the primary model file to get the SHA256 hash
primary_model_file = None primary_model_file = None
for file in civitai_metadata.get('files', []): for file in civitai_metadata.get('files', []):
if file.get('primary', False) and file.get('type') == 'Model': if file.get('primary', False) and file.get('type') == 'Model':
primary_model_file = file primary_model_file = file
break break
# Update the SHA256 hash in local metadata if available # Update the SHA256 hash in local metadata if available
if primary_model_file and primary_model_file.get('hashes', {}).get('SHA256'): if primary_model_file and primary_model_file.get('hashes', {}).get('SHA256'):
local_metadata['sha256'] = primary_model_file['hashes']['SHA256'].lower() local_metadata['sha256'] = primary_model_file['hashes']['SHA256'].lower()
# Update metadata with CivitAI information # Update metadata with CivitAI information
await ModelRouteUtils.update_model_metadata(metadata_path, local_metadata, civitai_metadata, client) await ModelRouteUtils.update_model_metadata(metadata_path, local_metadata, civitai_metadata, metadata_provider)
# Update the cache # Update the cache
await scanner.update_single_model_cache(file_path, file_path, local_metadata) await scanner.update_single_model_cache(file_path, file_path, local_metadata)
return web.json_response({ return web.json_response({
"success": True, "success": True,
"message": f"Model successfully re-linked to Civitai model {model_id}" + "message": f"Model successfully re-linked to Civitai model {model_id}" +
(f" version {model_version_id}" if model_version_id else ""), (f" version {model_version_id}" if model_version_id else ""),
"hash": local_metadata.get('sha256', '') "hash": local_metadata.get('sha256', '')
}) })
finally:
await client.close()
except Exception as e: except Exception as e:
logger.error(f"Error re-linking to CivitAI: {e}", exc_info=True) logger.error(f"Error re-linking to CivitAI: {e}", exc_info=True)

View File

@@ -12,7 +12,8 @@ dependencies = [
"olefile", # for getting rid of warning message "olefile", # for getting rid of warning message
"toml", "toml",
"natsort", "natsort",
"GitPython" "GitPython",
"aiosqlite"
] ]
[project.urls] [project.urls]

38
refs/civitai.sql Normal file
View File

@@ -0,0 +1,38 @@
CREATE TABLE models (
id INTEGER PRIMARY KEY,
name TEXT NOT NULL,
type TEXT NOT NULL,
username TEXT,
data TEXT NOT NULL,
created_at INTEGER NOT NULL,
updated_at INTEGER NOT NULL
) STRICT;
CREATE TABLE model_versions (
id INTEGER PRIMARY KEY,
model_id INTEGER NOT NULL,
position INTEGER NOT NULL,
name TEXT NOT NULL,
base_model TEXT NOT NULL,
published_at INTEGER,
data TEXT NOT NULL,
created_at INTEGER NOT NULL,
updated_at INTEGER NOT NULL
) STRICT;
CREATE INDEX model_versions_model_id_idx ON model_versions (model_id);
CREATE TABLE model_files (
id INTEGER PRIMARY KEY,
model_id INTEGER NOT NULL,
version_id INTEGER NOT NULL,
type TEXT NOT NULL,
sha256 TEXT,
data TEXT NOT NULL,
created_at INTEGER NOT NULL,
updated_at INTEGER NOT NULL
) STRICT;
CREATE INDEX model_files_model_id_idx ON model_files (model_id);
CREATE INDEX model_files_version_id_idx ON model_files (version_id);
CREATE TABLE archived_model_files (
file_id INTEGER PRIMARY KEY,
model_id INTEGER NOT NULL,
version_id INTEGER NOT NULL
) STRICT;

View File

@@ -8,3 +8,4 @@ toml
numpy numpy
natsort natsort
GitPython GitPython
aiosqlite

View File

@@ -208,6 +208,14 @@ body.modal-open {
pointer-events: none; pointer-events: none;
} }
button:disabled,
.primary-btn:disabled,
.danger-btn:disabled {
opacity: 0.5;
cursor: not-allowed;
pointer-events: none;
}
.restart-required-icon { .restart-required-icon {
color: var(--lora-warning); color: var(--lora-warning);
margin-left: 5px; margin-left: 5px;
@@ -228,14 +236,76 @@ body.modal-open {
background-color: oklch(35% 0.02 256 / 0.98); background-color: oklch(35% 0.02 256 / 0.98);
} }
.primary-btn.disabled { /* Danger button styles */
opacity: 0.5; .danger-btn {
cursor: not-allowed; display: flex;
align-items: center;
gap: 8px;
padding: 8px 16px;
background-color: var(--lora-error);
color: white;
border: none;
border-radius: var(--border-radius-sm);
cursor: pointer;
transition: background-color 0.2s;
font-size: 0.95em;
} }
.primary-btn.disabled { .danger-btn:hover {
opacity: 0.5; background-color: oklch(from var(--lora-error) l c h / 85%);
cursor: not-allowed; color: white;
}
/* Metadata archive status styles */
.metadata-archive-status {
background: rgba(0, 0, 0, 0.03);
border: 1px solid rgba(0, 0, 0, 0.1);
border-radius: var(--border-radius-sm);
padding: var(--space-2);
margin-bottom: var(--space-2);
}
[data-theme="dark"] .metadata-archive-status {
background: rgba(255, 255, 255, 0.03);
border: 1px solid var(--lora-border);
}
.archive-status-item {
display: flex;
justify-content: space-between;
align-items: center;
margin-bottom: 8px;
font-size: 0.95em;
}
.archive-status-item:last-child {
margin-bottom: 0;
}
.archive-status-label {
font-weight: 500;
color: var(--text-color);
opacity: 0.8;
}
.archive-status-value {
color: var(--text-color);
}
.archive-status-value.status-available {
color: var(--lora-success, #10b981);
}
.archive-status-value.status-unavailable {
color: var(--lora-warning, #f59e0b);
}
.archive-status-value.status-enabled {
color: var(--lora-success, #10b981);
}
.archive-status-value.status-disabled {
color: var(--lora-error, #ef4444);
} }
/* Add styles for delete preview image */ /* Add styles for delete preview image */

View File

@@ -260,6 +260,9 @@ export class SettingsManager {
includeTriggerWordsCheckbox.checked = state.global.settings.includeTriggerWords || false; includeTriggerWordsCheckbox.checked = state.global.settings.includeTriggerWords || false;
} }
// Load metadata archive settings
await this.loadMetadataArchiveSettings();
// Load base model path mappings // Load base model path mappings
this.loadBaseModelMappings(); this.loadBaseModelMappings();
@@ -786,6 +789,8 @@ export class SettingsManager {
state.global.settings.compactMode = value; state.global.settings.compactMode = value;
} else if (settingKey === 'include_trigger_words') { } else if (settingKey === 'include_trigger_words') {
state.global.settings.includeTriggerWords = value; state.global.settings.includeTriggerWords = value;
} else if (settingKey === 'enable_metadata_archive_db') {
state.global.settings.enable_metadata_archive_db = value;
} else { } else {
// For any other settings that might be added in the future // For any other settings that might be added in the future
state.global.settings[settingKey] = value; state.global.settings[settingKey] = value;
@@ -796,7 +801,7 @@ export class SettingsManager {
try { try {
// For backend settings, make API call // For backend settings, make API call
if (['show_only_sfw'].includes(settingKey)) { if (['show_only_sfw', 'enable_metadata_archive_db'].includes(settingKey)) {
const payload = {}; const payload = {};
payload[settingKey] = value; payload[settingKey] = value;
@@ -811,6 +816,11 @@ export class SettingsManager {
if (!response.ok) { if (!response.ok) {
throw new Error('Failed to save setting'); throw new Error('Failed to save setting');
} }
// Refresh metadata archive status when enable setting changes
if (settingKey === 'enable_metadata_archive_db') {
await this.updateMetadataArchiveStatus();
}
} }
showToast('toast.settings.settingsUpdated', { setting: settingKey.replace(/_/g, ' ') }, 'success'); showToast('toast.settings.settingsUpdated', { setting: settingKey.replace(/_/g, ' ') }, 'success');
@@ -839,6 +849,11 @@ export class SettingsManager {
}, 'success'); }, 'success');
} }
// Special handling for metadata archive settings
if (settingKey === 'enable_metadata_archive_db') {
await this.updateMetadataArchiveStatus();
}
} catch (error) { } catch (error) {
showToast('toast.settings.settingSaveFailed', { message: error.message }, 'error'); showToast('toast.settings.settingSaveFailed', { message: error.message }, 'error');
} }
@@ -916,6 +931,254 @@ export class SettingsManager {
} }
} }
async loadMetadataArchiveSettings() {
try {
// Load current settings from state
const enableMetadataArchiveCheckbox = document.getElementById('enableMetadataArchive');
if (enableMetadataArchiveCheckbox) {
enableMetadataArchiveCheckbox.checked = state.global.settings.enable_metadata_archive_db || false;
}
// Load status
await this.updateMetadataArchiveStatus();
} catch (error) {
console.error('Error loading metadata archive settings:', error);
}
}
async updateMetadataArchiveStatus() {
try {
const response = await fetch('/api/metadata-archive-status');
const data = await response.json();
const statusContainer = document.getElementById('metadataArchiveStatus');
if (statusContainer && data.success) {
const status = data;
const sizeText = status.databaseSize > 0 ? ` (${this.formatFileSize(status.databaseSize)})` : '';
statusContainer.innerHTML = `
<div class="archive-status-item">
<span class="archive-status-label">${translate('settings.metadataArchive.status')}:</span>
<span class="archive-status-value status-${status.isAvailable ? 'available' : 'unavailable'}">
${status.isAvailable ? translate('settings.metadataArchive.statusAvailable') : translate('settings.metadataArchive.statusUnavailable')}
${sizeText}
</span>
</div>
<div class="archive-status-item">
<span class="archive-status-label">${translate('settings.metadataArchive.enabled')}:</span>
<span class="archive-status-value status-${status.isEnabled ? 'enabled' : 'disabled'}">
${status.isEnabled ? translate('common.status.enabled') : translate('common.status.disabled')}
</span>
</div>
`;
// Update button states
const downloadBtn = document.getElementById('downloadMetadataArchiveBtn');
const removeBtn = document.getElementById('removeMetadataArchiveBtn');
if (downloadBtn) {
downloadBtn.disabled = status.isAvailable;
downloadBtn.textContent = status.isAvailable ?
translate('settings.metadataArchive.downloadedButton') :
translate('settings.metadataArchive.downloadButton');
}
if (removeBtn) {
removeBtn.disabled = !status.isAvailable;
}
}
} catch (error) {
console.error('Error updating metadata archive status:', error);
}
}
formatFileSize(bytes) {
if (bytes === 0) return '0 Bytes';
const k = 1024;
const sizes = ['Bytes', 'KB', 'MB', 'GB'];
const i = Math.floor(Math.log(bytes) / Math.log(k));
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
}
async downloadMetadataArchive() {
try {
const downloadBtn = document.getElementById('downloadMetadataArchiveBtn');
if (downloadBtn) {
downloadBtn.disabled = true;
downloadBtn.textContent = translate('settings.metadataArchive.downloadingButton');
}
// Show loading with enhanced progress
const progressUpdater = state.loadingManager.showEnhancedProgress(translate('settings.metadataArchive.preparing'));
// Set up WebSocket for progress updates
const wsProtocol = window.location.protocol === 'https:' ? 'wss://' : 'ws://';
const downloadId = `metadata_archive_${Date.now()}`;
const ws = new WebSocket(`${wsProtocol}${window.location.host}/ws/download-progress?id=${downloadId}`);
let wsConnected = false;
let actualDownloadId = downloadId; // Will be updated when WebSocket confirms the ID
// Promise to wait for WebSocket connection and ID confirmation
const wsReady = new Promise((resolve) => {
ws.onopen = () => {
wsConnected = true;
console.log('Connected to metadata archive download progress WebSocket');
};
ws.onmessage = (event) => {
const data = JSON.parse(event.data);
// Handle download ID confirmation
if (data.type === 'download_id') {
actualDownloadId = data.download_id;
console.log(`Connected to metadata archive download progress with ID: ${data.download_id}`);
resolve(data.download_id);
return;
}
// Handle metadata archive download progress
if (data.type === 'metadata_archive_download') {
const message = data.message || '';
// Update progress bar based on stage
let progressPercent = 0;
if (data.stage === 'download') {
// Extract percentage from message if available
const percentMatch = data.message.match(/(\d+\.?\d*)%/);
if (percentMatch) {
progressPercent = Math.min(parseFloat(percentMatch[1]), 90); // Cap at 90% for download
} else {
progressPercent = 0; // Default download progress
}
} else if (data.stage === 'extract') {
progressPercent = 95; // Near completion for extraction
}
// Update loading manager progress
progressUpdater.updateProgress(progressPercent, '', `${message}`);
}
};
ws.onerror = (error) => {
console.error('WebSocket error:', error);
resolve(downloadId); // Fallback to original ID
};
// Timeout fallback
setTimeout(() => resolve(downloadId), 5000);
});
ws.onclose = () => {
console.log('WebSocket connection closed');
};
// Wait for WebSocket to be ready
await wsReady;
const response = await fetch(`/api/download-metadata-archive?download_id=${encodeURIComponent(actualDownloadId)}`, {
method: 'POST',
headers: {
'Content-Type': 'application/json'
}
});
const data = await response.json();
// Close WebSocket
if (ws.readyState === WebSocket.OPEN) {
ws.close();
}
if (data.success) {
// Complete progress
await progressUpdater.complete(translate('settings.metadataArchive.downloadComplete'));
showToast('settings.metadataArchive.downloadSuccess', 'success');
// Update settings in state
state.global.settings.enable_metadata_archive_db = true;
setStorageItem('settings', state.global.settings);
// Update UI
const enableCheckbox = document.getElementById('enableMetadataArchive');
if (enableCheckbox) {
enableCheckbox.checked = true;
}
await this.updateMetadataArchiveStatus();
} else {
// Hide loading on error
state.loadingManager.hide();
showToast('settings.metadataArchive.downloadError' + ': ' + data.error, 'error');
}
} catch (error) {
console.error('Error downloading metadata archive:', error);
// Hide loading on error
state.loadingManager.hide();
showToast('settings.metadataArchive.downloadError' + ': ' + error.message, 'error');
} finally {
const downloadBtn = document.getElementById('downloadMetadataArchiveBtn');
if (downloadBtn) {
downloadBtn.disabled = false;
downloadBtn.textContent = translate('settings.metadataArchive.downloadButton');
}
}
}
async removeMetadataArchive() {
if (!confirm(translate('settings.metadataArchive.removeConfirm'))) {
return;
}
try {
const removeBtn = document.getElementById('removeMetadataArchiveBtn');
if (removeBtn) {
removeBtn.disabled = true;
removeBtn.textContent = translate('settings.metadataArchive.removingButton');
}
const response = await fetch('/api/remove-metadata-archive', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
}
});
const data = await response.json();
if (data.success) {
showToast('settings.metadataArchive.removeSuccess', 'success');
// Update settings in state
state.global.settings.enable_metadata_archive_db = false;
setStorageItem('settings', state.global.settings);
// Update UI
const enableCheckbox = document.getElementById('enableMetadataArchive');
if (enableCheckbox) {
enableCheckbox.checked = false;
}
await this.updateMetadataArchiveStatus();
} else {
showToast('settings.metadataArchive.removeError' + ': ' + data.error, 'error');
}
} catch (error) {
console.error('Error removing metadata archive:', error);
showToast('settings.metadataArchive.removeError' + ': ' + error.message, 'error');
} finally {
const removeBtn = document.getElementById('removeMetadataArchiveBtn');
if (removeBtn) {
removeBtn.disabled = false;
removeBtn.textContent = translate('settings.metadataArchive.removeButton');
}
}
}
async saveInputSetting(elementId, settingKey) { async saveInputSetting(elementId, settingKey) {
const element = document.getElementById(elementId); const element = document.getElementById(elementId);
if (!element) return; if (!element) return;

View File

@@ -398,6 +398,53 @@
</div> </div>
</div> </div>
<!-- Metadata Archive Section -->
<div class="settings-section">
<h3>{{ t('settings.sections.metadataArchive') }}</h3>
<div class="setting-item">
<div class="setting-row">
<div class="setting-info">
<label for="enableMetadataArchive">{{ t('settings.metadataArchive.enableArchiveDb') }}:</label>
</div>
<div class="setting-control">
<label class="toggle-switch">
<input type="checkbox" id="enableMetadataArchive" onchange="settingsManager.saveToggleSetting('enableMetadataArchive', 'enable_metadata_archive_db')">
<span class="toggle-slider"></span>
</label>
</div>
</div>
<div class="input-help">
{{ t('settings.metadataArchive.enableArchiveDbHelp') }}
</div>
</div>
<div class="setting-item">
<div class="metadata-archive-status" id="metadataArchiveStatus">
<!-- Status will be populated by JavaScript -->
</div>
</div>
<div class="setting-item">
<div class="setting-row">
<div class="setting-info">
<label>{{ t('settings.metadataArchive.management') }}:</label>
</div>
<div class="setting-control">
<button type="button" id="downloadMetadataArchiveBtn" class="primary-btn" onclick="settingsManager.downloadMetadataArchive()">
{{ t('settings.metadataArchive.downloadButton') }}
</button>
<button type="button" id="removeMetadataArchiveBtn" class="danger-btn" onclick="settingsManager.removeMetadataArchive()" style="margin-left: 10px;">
{{ t('settings.metadataArchive.removeButton') }}
</button>
</div>
</div>
<div class="input-help">
{{ t('settings.metadataArchive.managementHelp') }}
</div>
</div>
</div>
<!-- Misc. Section --> <!-- Misc. Section -->
<div class="settings-section"> <div class="settings-section">
<h3>{{ t('settings.sections.misc') }}</h3> <h3>{{ t('settings.sections.misc') }}</h3>