mirror of
https://github.com/willmiao/ComfyUI-Lora-Manager.git
synced 2026-03-22 05:32:12 -03:00
Compare commits
238 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f09224152a | ||
|
|
df93670598 | ||
|
|
073fb3a94a | ||
|
|
53c4165d82 | ||
|
|
8cd4550189 | ||
|
|
2b2e4fefab | ||
|
|
5f93648297 | ||
|
|
8a628f0bd0 | ||
|
|
b67c8598d6 | ||
|
|
0254c9d0e9 | ||
|
|
ecb512995c | ||
|
|
f8b9fa9b20 | ||
|
|
5d4917c8d9 | ||
|
|
a50309c22e | ||
|
|
f5020e081f | ||
|
|
3c0bfcb226 | ||
|
|
9198a23ba9 | ||
|
|
02bac7edfb | ||
|
|
ea1d1a49c9 | ||
|
|
9a789f8f08 | ||
|
|
1971881537 | ||
|
|
4eb46a8d3e | ||
|
|
36f28b3c65 | ||
|
|
2452cc4df1 | ||
|
|
eda1ce9743 | ||
|
|
e24621a0af | ||
|
|
7173a2b9d6 | ||
|
|
d540b21aac | ||
|
|
9952721e76 | ||
|
|
26e4895807 | ||
|
|
c533a8e7bf | ||
|
|
dc820a456f | ||
|
|
07721af87c | ||
|
|
5093c30c06 | ||
|
|
8c77080ae6 | ||
|
|
bcf72c6bcc | ||
|
|
3849f7eef9 | ||
|
|
7eced1e3e9 | ||
|
|
51b5261f40 | ||
|
|
963f6b1383 | ||
|
|
b75baa1d1a | ||
|
|
6d95e93378 | ||
|
|
7117e0c33e | ||
|
|
d261474f3a | ||
|
|
c09d67d2e4 | ||
|
|
1427dc8e38 | ||
|
|
77a7b90dc7 | ||
|
|
e9d55fe146 | ||
|
|
57f369a6de | ||
|
|
059ebeead7 | ||
|
|
831a9da9d7 | ||
|
|
6000e08640 | ||
|
|
3edc65c106 | ||
|
|
655157434e | ||
|
|
3661b11b70 | ||
|
|
0e73db0669 | ||
|
|
8158441a92 | ||
|
|
5600471093 | ||
|
|
354cf03bbc | ||
|
|
645b7c247d | ||
|
|
5f25a29303 | ||
|
|
906d00106d | ||
|
|
7850131969 | ||
|
|
3d5ec4a9f1 | ||
|
|
1cdbb9a851 | ||
|
|
e224be4b88 | ||
|
|
b9d3a4afce | ||
|
|
aa4aa1a613 | ||
|
|
cc8e1c5049 | ||
|
|
41e649415a | ||
|
|
c8f770a86b | ||
|
|
29bb85359e | ||
|
|
4557da8b63 | ||
|
|
09b75de25b | ||
|
|
415fc5720c | ||
|
|
4dd8ce778e | ||
|
|
f81ff2efe9 | ||
|
|
837bb17b08 | ||
|
|
5ee93a27ee | ||
|
|
2e6aa5fe9f | ||
|
|
c14e066f8f | ||
|
|
c09100c22e | ||
|
|
839ed3bda3 | ||
|
|
1f627774c1 | ||
|
|
3b842355c2 | ||
|
|
dd27411ebf | ||
|
|
388ff7f5b4 | ||
|
|
f76343f389 | ||
|
|
ce5a1ae3d0 | ||
|
|
1d40d7400f | ||
|
|
1bb5d0b072 | ||
|
|
c3932538e1 | ||
|
|
a68141adf4 | ||
|
|
fb8ba4c076 | ||
|
|
4ed3bd9039 | ||
|
|
ba6e2eadba | ||
|
|
1c16392367 | ||
|
|
035ad4b473 | ||
|
|
a7ee883227 | ||
|
|
ddf9e33961 | ||
|
|
4301b3455f | ||
|
|
3d6bb432c4 | ||
|
|
6c03aa1430 | ||
|
|
5376fd8724 | ||
|
|
6dea9a76bc | ||
|
|
d73903e82e | ||
|
|
4862419b61 | ||
|
|
e6e7df7454 | ||
|
|
30f9e3e2ec | ||
|
|
707d0cb8a4 | ||
|
|
56ea7594ce | ||
|
|
389e46c251 | ||
|
|
6db17e682a | ||
|
|
94e0308a12 | ||
|
|
1f9f821576 | ||
|
|
57933dfba6 | ||
|
|
c50bee7757 | ||
|
|
4e3ee843f9 | ||
|
|
7e40f6fcb9 | ||
|
|
7976956b6b | ||
|
|
adce5293d5 | ||
|
|
c2db5eb6df | ||
|
|
f958ecdf18 | ||
|
|
ef0bcc6cf1 | ||
|
|
285428ad3a | ||
|
|
ee18cff3d9 | ||
|
|
1be3235564 | ||
|
|
a92883509a | ||
|
|
ce42d83ce9 | ||
|
|
077cf7b574 | ||
|
|
b99d78bda6 | ||
|
|
39586f4a20 | ||
|
|
4ef750b206 | ||
|
|
9d3d93823d | ||
|
|
45c1113b72 | ||
|
|
e10717dcda | ||
|
|
315ab6f70b | ||
|
|
cf4d654c4b | ||
|
|
569c829709 | ||
|
|
de05b59f29 | ||
|
|
70a282a6c0 | ||
|
|
b10bcf7e78 | ||
|
|
5fb10263f3 | ||
|
|
9e76c9783e | ||
|
|
7770976513 | ||
|
|
dc1f7ab6fe | ||
|
|
32b1d6c561 | ||
|
|
5264e49f2a | ||
|
|
ce3adaf831 | ||
|
|
e2f3e57f5c | ||
|
|
5c2349ff42 | ||
|
|
50eee8c373 | ||
|
|
f89b792535 | ||
|
|
6d0ea2841c | ||
|
|
98678a8698 | ||
|
|
5326fa2970 | ||
|
|
90547670a2 | ||
|
|
4753206c52 | ||
|
|
613aa3b1c3 | ||
|
|
a6b704d4b4 | ||
|
|
227d06c736 | ||
|
|
8508763831 | ||
|
|
136d3153fa | ||
|
|
49bdf77040 | ||
|
|
f4dcd89835 | ||
|
|
139e915711 | ||
|
|
22eda58074 | ||
|
|
fb91cf4df2 | ||
|
|
e0332571da | ||
|
|
2d4bc47746 | ||
|
|
38e766484e | ||
|
|
b5ee4a6408 | ||
|
|
7892df21ec | ||
|
|
188fe407b6 | ||
|
|
600afdcd92 | ||
|
|
994fa4bd43 | ||
|
|
51098f2829 | ||
|
|
795b9e8418 | ||
|
|
9ca2b9dd56 | ||
|
|
d77b6d78b7 | ||
|
|
427e7a36d5 | ||
|
|
c90306cc9b | ||
|
|
5fe0660c64 | ||
|
|
2abb5bf122 | ||
|
|
bb65527469 | ||
|
|
d9a6db3359 | ||
|
|
58cafdb713 | ||
|
|
0594e278b6 | ||
|
|
807425f12a | ||
|
|
aa4b1ccc25 | ||
|
|
58255ec28b | ||
|
|
d62b84693d | ||
|
|
df75c7e68d | ||
|
|
c5c7fdf54f | ||
|
|
49e0deeff3 | ||
|
|
0c20701bef | ||
|
|
faa26651dd | ||
|
|
2eae8a7729 | ||
|
|
dde2b2a960 | ||
|
|
4a9089d3dd | ||
|
|
3244a5f1a1 | ||
|
|
449c1e9d10 | ||
|
|
d0aa916683 | ||
|
|
13433f8cd2 | ||
|
|
8d336320c0 | ||
|
|
d945c58d51 | ||
|
|
acaf122346 | ||
|
|
713759b411 | ||
|
|
c5175bb870 | ||
|
|
e63ef8d031 | ||
|
|
e043537241 | ||
|
|
46126f9950 | ||
|
|
f4eb916914 | ||
|
|
49b9b7a5ea | ||
|
|
9b1a9ee071 | ||
|
|
0b8f137a1b | ||
|
|
6148a12301 | ||
|
|
fadbf21b4f | ||
|
|
c38a06937d | ||
|
|
1a34403b0e | ||
|
|
e4d58d0f60 | ||
|
|
4e4ea85cc3 | ||
|
|
f7a856349a | ||
|
|
15edd7a42c | ||
|
|
46243a236d | ||
|
|
6f382e587a | ||
|
|
bf3d706bf4 | ||
|
|
cdf21e813c | ||
|
|
10f5588e4a | ||
|
|
0ecbdf6f39 | ||
|
|
61101a7ad0 | ||
|
|
6d9be814a5 | ||
|
|
52bf93e430 | ||
|
|
00fade756c | ||
|
|
3c0feb23ba | ||
|
|
3627840fe9 | ||
|
|
bbdc1bba87 | ||
|
|
21a1bc1a01 |
4
.github/FUNDING.yml
vendored
4
.github/FUNDING.yml
vendored
@@ -1,5 +1,5 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
patreon: PixelPawsAI
|
||||
ko_fi: pixelpawsai
|
||||
custom: ['paypal.me/pixelpawsai']
|
||||
patreon: PixelPawsAI
|
||||
custom: ['paypal.me/pixelpawsai', 'https://afdian.com/a/pixelpawsai']
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -9,3 +9,4 @@ civitai/
|
||||
node_modules/
|
||||
coverage/
|
||||
.coverage
|
||||
model_cache/
|
||||
|
||||
55
README.md
55
README.md
@@ -34,6 +34,23 @@ Enhance your Civitai browsing experience with our companion browser extension! S
|
||||
|
||||
## Release Notes
|
||||
|
||||
### v0.9.10
|
||||
* **Smarter Update Matching** - Users can now choose to check and group updates by matching base model only or with no base-model constraint; version lists also support toggling between same-base versions or all versions.
|
||||
* **Flexible Tag Filtering** - The filter panel now supports tag exclusion: click a tag to include, click again to exclude, and click a third time to clear, enabling stronger and more flexible tag filters.
|
||||
* **License Visibility & Controls** - Model detail headers and ComfyUI preview popups now show Civitai license icons. The filter panel gains license include/exclude options, and a new global context menu action, "Refresh license metadata," fetches missing license data.
|
||||
* **Recipe Improvements** - Recipes now allow importing with zero LoRAs, and recipe detail pages show the related checkpoint for easier reference.
|
||||
* **Better ZIP Downloads** - When downloading models packaged in ZIPs, model files are extracted into the target model folder; ZIPs containing multiple model files (e.g., WanVideo high/low LoRA pairs) are added as separate models.
|
||||
* **Template Workflow Update** - Refreshed the "Illustrious Pony Example" template workflow with usage guidance for each LoRA Manager node.
|
||||
* **Bug Fixes & Stability** - General fixes and stability improvements.
|
||||
|
||||
### v0.9.9
|
||||
* **Check for Updates Feature** - Users can now check for updates for all models or selected models in bulk mode. Models with available updates will display an "update available" badge on their model card, and users can filter to show only models with updates.
|
||||
* **Model Versions Management** - Added a new Versions tab in the model modal that centralizes all versions of a model, providing download, delete, and ignore update functions.
|
||||
* **Send Checkpoint to ComfyUI** - Users can now click the send button on a checkpoint card to send the checkpoint directly to the current workflow's checkpoint or diffusion model loader node in ComfyUI.
|
||||
* **Customizable Model Card Display** - Added a new setting that allows users to choose whether to display the model name or filename on model cards.
|
||||
* **New Path Template Placeholders** - Added new path template placeholders: `{model_name}` and `{version_name}` for more flexible organization.
|
||||
* **ComfyUI Auto Path Correction Setting** - Added a new setting within ComfyUI to enable or disable the auto path correction feature.
|
||||
|
||||
### v0.9.8
|
||||
* **Full CivArchive API Support** - Added complete support for the CivArchive API as a fallback metadata source beyond Civitai API. Models deleted from Civitai can now still retrieve metadata through the CivArchive API.
|
||||
* **Download Models from CivArchive** - Added support for downloading models directly from CivArchive, similar to downloading from Civitai. Simply click the Download button and paste the model URL to download the corresponding model.
|
||||
@@ -63,34 +80,6 @@ Enhance your Civitai browsing experience with our companion browser extension! S
|
||||
* **Automatic Filename Conflict Resolution** - Implemented automatic file renaming (`original name + short hash`) to prevent conflicts when downloading or moving models.
|
||||
* **Performance Optimizations & Bug Fixes** - Various performance improvements and bug fixes for a more stable and responsive experience.
|
||||
|
||||
### v0.8.30
|
||||
* **Automatic Model Path Correction** - Added auto-correction for model paths in built-in nodes such as Load Checkpoint, Load Diffusion Model, Load LoRA, and other custom nodes with similar functionality. Workflows containing outdated or incorrect model paths will now be automatically updated to reflect the current location of your models.
|
||||
* **Node UI Enhancements** - Improved node interface for a smoother and more intuitive user experience.
|
||||
* **Bug Fixes** - Addressed various bugs to enhance stability and reliability.
|
||||
|
||||
### v0.8.29
|
||||
* **Enhanced Recipe Imports** - Improved recipe importing with new target folder selection, featuring path input autocomplete and interactive folder tree navigation. Added a "Use Default Path" option when downloading missing LoRAs.
|
||||
* **WanVideo Lora Select Node Update** - Updated the WanVideo Lora Select node with a 'merge_loras' option to match the counterpart node in the WanVideoWrapper node package.
|
||||
* **Autocomplete Conflict Resolution** - Resolved an autocomplete feature conflict in LoRA nodes with pysssss autocomplete.
|
||||
* **Improved Download Functionality** - Enhanced download functionality with resumable downloads and improved error handling.
|
||||
* **Bug Fixes** - Addressed several bugs for improved stability and performance.
|
||||
|
||||
### v0.8.28
|
||||
* **Autocomplete for Node Inputs** - Instantly find and add LoRAs by filename directly in Lora Loader, Lora Stacker, and WanVideo Lora Select nodes. Autocomplete suggestions include preview tooltips and preset weights, allowing you to quickly select LoRAs without opening the LoRA Manager UI.
|
||||
* **Duplicate Notification Control** - Added a switch to duplicates mode, enabling users to turn off duplicate model notifications for a more streamlined experience.
|
||||
* **Download Example Images from Context Menu** - Introduced a new context menu option to download example images for individual models.
|
||||
|
||||
### v0.8.27
|
||||
* **User Experience Enhancements** - Improved the model download target folder selection with path input autocomplete and interactive folder tree navigation, making it easier and faster to choose where models are saved.
|
||||
* **Default Path Option for Downloads** - Added a "Use Default Path" option when downloading models. When enabled, models are automatically organized and stored according to your configured path template settings.
|
||||
* **Advanced Download Path Templates** - Expanded path template settings, allowing users to set individual templates for LoRA, checkpoint, and embedding models for greater flexibility. Introduced the `{author}` placeholder, enabling automatic organization of model files by creator name.
|
||||
* **Bug Fixes & Stability Improvements** - Addressed various bugs and improved overall stability for a smoother experience.
|
||||
|
||||
### v0.8.26
|
||||
* **Creator Search Option** - Added ability to search models by creator name, making it easier to find models from specific authors.
|
||||
* **Enhanced Node Usability** - Improved user experience for Lora Loader, Lora Stacker, and WanVideo Lora Select nodes by fixing the maximum height of the text input area. Users can now freely and conveniently adjust the LoRA region within these nodes.
|
||||
* **Compatibility Fixes** - Resolved compatibility issues with ComfyUI and certain custom nodes, including ComfyUI-Custom-Scripts, ensuring smoother integration and operation.
|
||||
|
||||
[View Update History](./update_logs.md)
|
||||
|
||||
---
|
||||
@@ -148,9 +137,10 @@ Enhance your Civitai browsing experience with our companion browser extension! S
|
||||
|
||||
### Option 2: **Portable Standalone Edition** (No ComfyUI required)
|
||||
|
||||
1. Download the [Portable Package](https://github.com/willmiao/ComfyUI-Lora-Manager/releases/download/v0.9.2/lora_manager_portable.7z)
|
||||
2. Copy the provided `settings.json.example` file to create a new file named `settings.json` in `comfyui-lora-manager` folder
|
||||
1. Download the [Portable Package](https://github.com/willmiao/ComfyUI-Lora-Manager/releases/download/v0.9.8/lora_manager_portable.7z)
|
||||
2. Copy the provided `settings.json.example` file to create a new file named `settings.json` in `comfyui-lora-manager` folder.
|
||||
3. Edit the new `settings.json` to include your correct model folder paths and CivitAI API key
|
||||
- Set `"use_portable_settings": true` if you want the configuration to remain inside the repository folder instead of your user settings directory.
|
||||
4. Run run.bat
|
||||
- To change the startup port, edit `run.bat` and modify the parameter (e.g. `--port 9001`)
|
||||
|
||||
@@ -231,8 +221,9 @@ You can now run LoRA Manager independently from ComfyUI:
|
||||
```
|
||||
|
||||
2. **For non-ComfyUI users**:
|
||||
- Copy the provided `settings.json.example` file to create a new file named `settings.json`
|
||||
- Edit `settings.json` to include your correct model folder paths and CivitAI API key
|
||||
- Copy the provided `settings.json.example` file to create a new file named `settings.json`. Update the API key, optional language, and folder paths only—the library registry is created automatically when LoRA Manager starts.
|
||||
- Edit `settings.json` to include your correct model folder paths and CivitAI API key (you can leave the defaults until ready to configure them)
|
||||
- Enable portable mode by setting `"use_portable_settings": true` if you prefer LoRA Manager to read and write the `settings.json` located in the project directory.
|
||||
- Install required dependencies: `pip install -r requirements.txt`
|
||||
- Run standalone mode:
|
||||
```bash
|
||||
|
||||
@@ -21,7 +21,7 @@ This matrix captures the scenarios that Phase 3 frontend tests should cover for
|
||||
| ID | Feature | Scenario | LoRAs Expectations | Checkpoints Expectations | Notes |
|
||||
| --- | --- | --- | --- | --- | --- |
|
||||
| F-01 | Search filter | Typing a query updates `pageState.filters.search`, persists to session, and triggers `resetAndReload` on submit | Validate `SearchManager` writes query and reloads via API stub; confirm LoRA cards pass query downstream | Same as LoRAs | Cover `enter` press and clicking search icon |
|
||||
| F-02 | Tag filter | Selecting a tag chip adds it to filters, applies active styling, and reloads results | Tag stored under `filters.tags`; `FilterManager.applyFilters` persists and triggers `resetAndReload(true)` | Same; ensure base model tag set is scoped to checkpoints dataset | Include removal path |
|
||||
| F-02 | Tag filter | Selecting a tag chip cycles include ➜ exclude ➜ clear, updates storage, and reloads results | Tag state stored under `filters.tags[tagName] = 'include'|'exclude'`; `FilterManager.applyFilters` persists and triggers `resetAndReload(true)` | Same; ensure base model tag set is scoped to checkpoints dataset | Include removal path |
|
||||
| F-03 | Base model filter | Toggling base model checkboxes updates `filters.baseModel`, persists, and reloads | Ensure only LoRA-supported models show; toggle multi-select | Ensure SDXL/Flux base models appear as expected | Capture UI state restored from storage on next init |
|
||||
| F-04 | Favorites-only | Clicking favorites toggle updates session flag and calls `resetAndReload(true)` | Button gains `.active` class and API called | Same | Verify duplicates badge refresh when active |
|
||||
| F-05 | Sort selection | Changing sort select saves preference (legacy + new format) and reloads | Confirm `PageControls.saveSortPreference` invoked with option and API called | Same with checkpoints-specific defaults | Cover `convertLegacySortFormat` branch |
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 669 KiB |
File diff suppressed because one or more lines are too long
Binary file not shown.
|
Before Width: | Height: | Size: 669 KiB After Width: | Height: | Size: 668 KiB |
File diff suppressed because one or more lines are too long
194
locales/de.json
194
locales/de.json
@@ -101,7 +101,12 @@
|
||||
"checkpointNameCopied": "Checkpoint-Name kopiert",
|
||||
"toggleBlur": "Unschärfe umschalten",
|
||||
"show": "Anzeigen",
|
||||
"openExampleImages": "Beispielbilder-Ordner öffnen"
|
||||
"openExampleImages": "Beispielbilder-Ordner öffnen",
|
||||
"replacePreview": "Vorschau ersetzen",
|
||||
"copyCheckpointName": "Checkpoint-Name kopieren",
|
||||
"copyEmbeddingName": "Embedding-Name kopieren",
|
||||
"sendCheckpointToWorkflow": "An ComfyUI senden",
|
||||
"sendEmbeddingToWorkflow": "An ComfyUI senden"
|
||||
},
|
||||
"nsfw": {
|
||||
"matureContent": "Nicht jugendfreie Inhalte",
|
||||
@@ -115,12 +120,17 @@
|
||||
"updateFailed": "Fehler beim Aktualisieren des Favoriten-Status"
|
||||
},
|
||||
"sendToWorkflow": {
|
||||
"checkpointNotImplemented": "Checkpoint an Workflow senden - Funktion wird implementiert"
|
||||
"checkpointNotImplemented": "Checkpoint an Workflow senden - Funktion wird implementiert",
|
||||
"missingPath": "Modellpfad für diese Karte konnte nicht ermittelt werden"
|
||||
},
|
||||
"exampleImages": {
|
||||
"checkError": "Fehler beim Überprüfen der Beispielbilder",
|
||||
"missingHash": "Fehlende Modell-Hash-Informationen.",
|
||||
"noRemoteImagesAvailable": "Keine Remote-Beispielbilder für dieses Modell auf Civitai verfügbar"
|
||||
},
|
||||
"badges": {
|
||||
"update": "Update",
|
||||
"updateAvailable": "Update verfügbar"
|
||||
}
|
||||
},
|
||||
"globalContextMenu": {
|
||||
@@ -129,12 +139,26 @@
|
||||
"missingPath": "Bitte legen Sie einen Speicherort fest, bevor Sie Beispielbilder herunterladen.",
|
||||
"unavailable": "Beispielbild-Downloads sind noch nicht verfügbar. Versuchen Sie es erneut, nachdem die Seite vollständig geladen ist."
|
||||
},
|
||||
"checkModelUpdates": {
|
||||
"label": "Auf Updates prüfen",
|
||||
"loading": "Prüfe auf {type}-Updates...",
|
||||
"success": "{count} Update(s) für {type} gefunden",
|
||||
"none": "Alle {type} sind auf dem neuesten Stand",
|
||||
"error": "Fehler beim Prüfen auf {type}-Updates: {message}"
|
||||
},
|
||||
"cleanupExampleImages": {
|
||||
"label": "Beispielbild-Ordner bereinigen",
|
||||
"success": "{count} Ordner wurden in den Papierkorb verschoben",
|
||||
"none": "Keine Beispielbild-Ordner mussten bereinigt werden",
|
||||
"partial": "Bereinigung abgeschlossen, {failures} Ordner übersprungen",
|
||||
"error": "Fehler beim Bereinigen der Beispielbild-Ordner: {message}"
|
||||
},
|
||||
"fetchMissingLicenses": {
|
||||
"label": "Refresh license metadata",
|
||||
"loading": "Refreshing license metadata for {typePlural}...",
|
||||
"success": "Updated license metadata for {count} {typePlural}",
|
||||
"none": "All {typePlural} already have license metadata",
|
||||
"error": "Failed to refresh license metadata for {typePlural}: {message}"
|
||||
}
|
||||
},
|
||||
"header": {
|
||||
@@ -171,6 +195,10 @@
|
||||
"title": "Modelle filtern",
|
||||
"baseModel": "Basis-Modell",
|
||||
"modelTags": "Tags (Top 20)",
|
||||
"modelTypes": "Model Types",
|
||||
"license": "Lizenz",
|
||||
"noCreditRequired": "Kein Credit erforderlich",
|
||||
"allowSellingGeneratedContent": "Verkauf erlaubt",
|
||||
"clearAll": "Alle Filter löschen"
|
||||
},
|
||||
"theme": {
|
||||
@@ -181,6 +209,7 @@
|
||||
},
|
||||
"actions": {
|
||||
"checkUpdates": "Updates prüfen",
|
||||
"notifications": "Benachrichtigungen",
|
||||
"support": "Unterstützung"
|
||||
}
|
||||
},
|
||||
@@ -202,10 +231,17 @@
|
||||
"priorityTags": "Prioritäts-Tags",
|
||||
"downloadPathTemplates": "Download-Pfad-Vorlagen",
|
||||
"exampleImages": "Beispielbilder",
|
||||
"updateFlags": "Update-Markierungen",
|
||||
"autoOrganize": "Auto-organize",
|
||||
"misc": "Verschiedenes",
|
||||
"metadataArchive": "Metadaten-Archiv-Datenbank",
|
||||
"storageLocation": "Einstellungsort",
|
||||
"proxySettings": "Proxy-Einstellungen"
|
||||
},
|
||||
"storage": {
|
||||
"locationLabel": "Portabler Modus",
|
||||
"locationHelp": "Aktiviere, um settings.json im Repository zu belassen; deaktiviere, um es im Benutzerkonfigurationsordner zu speichern."
|
||||
},
|
||||
"contentFiltering": {
|
||||
"blurNsfwContent": "NSFW-Inhalte unscharf stellen",
|
||||
"blurNsfwContentHelp": "Nicht jugendfreie (NSFW) Vorschaubilder unscharf stellen",
|
||||
@@ -216,6 +252,15 @@
|
||||
"autoplayOnHover": "Videos bei Hover automatisch abspielen",
|
||||
"autoplayOnHoverHelp": "Video-Vorschauen nur beim Darüberfahren mit der Maus abspielen"
|
||||
},
|
||||
"autoOrganizeExclusions": {
|
||||
"label": "Auto-Organisierungs-Ausnahmen",
|
||||
"placeholder": "Beispiel: curated/*, */backups/*; *_temp.safetensors",
|
||||
"help": "Dateien überspringen, die mit diesen Wildcard-Mustern übereinstimmen. Mehrere Muster mit Kommas oder Semikolons trennen.",
|
||||
"validation": {
|
||||
"noPatterns": "Geben Sie mindestens ein Muster ein, getrennt durch Kommas oder Semikolons.",
|
||||
"saveFailed": "Fehler beim Speichern der Ausschlüsse: {message}"
|
||||
}
|
||||
},
|
||||
"layoutSettings": {
|
||||
"displayDensity": "Anzeige-Dichte",
|
||||
"displayDensityOptions": {
|
||||
@@ -230,26 +275,26 @@
|
||||
"compact": "7 (1080p), 8 (2K), 10 (4K)"
|
||||
},
|
||||
"displayDensityWarning": "Warnung: Höhere Dichten können bei Systemen mit begrenzten Ressourcen zu Performance-Problemen führen.",
|
||||
"showFolderSidebar": "Ordner-Seitenleiste anzeigen",
|
||||
"showFolderSidebarHelp": "Blenden Sie die Ordner-Navigationsleiste auf den Modellseiten ein oder aus. Wenn deaktiviert, bleiben Seitenleiste und Hoverbereich verborgen.",
|
||||
"cardInfoDisplay": "Karten-Info-Anzeige",
|
||||
"cardInfoDisplayOptions": {
|
||||
"always": "Immer sichtbar",
|
||||
"hover": "Bei Hover anzeigen"
|
||||
},
|
||||
"cardInfoDisplayHelp": "Wählen Sie, wann Modellinformationen und Aktionsschaltflächen angezeigt werden sollen:",
|
||||
"cardInfoDisplayDetails": {
|
||||
"always": "Kopf- und Fußzeilen sind immer sichtbar",
|
||||
"hover": "Kopf- und Fußzeilen erscheinen nur beim Darüberfahren mit der Maus"
|
||||
"cardInfoDisplayHelp": "Wählen Sie, wann Modellinformationen und Aktionsschaltflächen angezeigt werden sollen",
|
||||
"modelCardFooterAction": "Aktion der Modellkarten-Schaltfläche",
|
||||
"modelCardFooterActionOptions": {
|
||||
"exampleImages": "Beispielbilder öffnen",
|
||||
"replacePreview": "Vorschau ersetzen"
|
||||
},
|
||||
"modelCardFooterActionHelp": "Wähle aus, was die Schaltfläche unten rechts auf der Karte ausführt",
|
||||
"modelNameDisplay": "Anzeige des Modellnamens",
|
||||
"modelNameDisplayOptions": {
|
||||
"modelName": "Modellname",
|
||||
"fileName": "Dateiname"
|
||||
},
|
||||
"modelNameDisplayHelp": "Wählen Sie aus, was in der Fußzeile der Modellkarte angezeigt werden soll:",
|
||||
"modelNameDisplayDetails": {
|
||||
"modelName": "Den beschreibenden Namen des Modells anzeigen",
|
||||
"fileName": "Den tatsächlichen Dateinamen auf der Festplatte anzeigen"
|
||||
}
|
||||
"modelNameDisplayHelp": "Wählen Sie aus, was in der Fußzeile der Modellkarte angezeigt werden soll"
|
||||
},
|
||||
"folderSettings": {
|
||||
"activeLibrary": "Aktive Bibliothek",
|
||||
@@ -331,6 +376,14 @@
|
||||
"download": "Herunterladen",
|
||||
"restartRequired": "Neustart erforderlich"
|
||||
},
|
||||
"updateFlagStrategy": {
|
||||
"label": "Strategie für Update-Markierungen",
|
||||
"help": "Entscheide, ob Update-Badges nur dann erscheinen, wenn eine neue Version dasselbe Basismodell wie deine lokalen Dateien verwendet, oder sobald es irgendein neueres Release für dieses Modell gibt.",
|
||||
"options": {
|
||||
"sameBase": "Updates nach Basismodell abgleichen",
|
||||
"any": "Jede verfügbare Aktualisierung markieren"
|
||||
}
|
||||
},
|
||||
"misc": {
|
||||
"includeTriggerWords": "Trigger Words in LoRA-Syntax einschließen",
|
||||
"includeTriggerWordsHelp": "Trainierte Trigger Words beim Kopieren der LoRA-Syntax in die Zwischenablage einschließen"
|
||||
@@ -394,8 +447,10 @@
|
||||
},
|
||||
"refresh": {
|
||||
"title": "Modelliste aktualisieren",
|
||||
"quick": "Schnelle Aktualisierung (inkrementell)",
|
||||
"full": "Vollständiger Neuaufbau (komplett)"
|
||||
"quick": "Änderungen synchronisieren",
|
||||
"quickTooltip": "Nach neuen oder fehlenden Modelldateien suchen, damit die Liste aktuell bleibt.",
|
||||
"full": "Cache neu aufbauen",
|
||||
"fullTooltip": "Alle Modelldetails aus Metadatendateien neu laden – nutzen, wenn die Bibliothek veraltet wirkt oder nach manuellen Änderungen."
|
||||
},
|
||||
"fetch": {
|
||||
"title": "Metadaten von Civitai abrufen",
|
||||
@@ -416,6 +471,13 @@
|
||||
"favorites": {
|
||||
"title": "Nur Favoriten anzeigen",
|
||||
"action": "Favoriten"
|
||||
},
|
||||
"updates": {
|
||||
"title": "Nur Modelle mit verfügbaren Updates anzeigen",
|
||||
"action": "Updates",
|
||||
"menuLabel": "Weitere Update-Optionen anzeigen",
|
||||
"check": "Updates prüfen",
|
||||
"checkTooltip": "Die Aktualisierungssuche kann einige Zeit dauern."
|
||||
}
|
||||
},
|
||||
"bulkOperations": {
|
||||
@@ -427,6 +489,7 @@
|
||||
"setContentRating": "Inhaltsbewertung für alle festlegen",
|
||||
"copyAll": "Alle Syntax kopieren",
|
||||
"refreshAll": "Alle Metadaten aktualisieren",
|
||||
"checkUpdates": "Auswahl auf Updates prüfen",
|
||||
"moveAll": "Alle in Ordner verschieben",
|
||||
"autoOrganize": "Automatisch organisieren",
|
||||
"deleteAll": "Alle Modelle löschen",
|
||||
@@ -443,6 +506,7 @@
|
||||
},
|
||||
"contextMenu": {
|
||||
"refreshMetadata": "Civitai-Daten aktualisieren",
|
||||
"checkUpdates": "Updates prüfen",
|
||||
"relinkCivitai": "Mit Civitai neu verknüpfen",
|
||||
"copySyntax": "LoRA-Syntax kopieren",
|
||||
"copyFilename": "Modell-Dateiname kopieren",
|
||||
@@ -464,6 +528,9 @@
|
||||
},
|
||||
"recipes": {
|
||||
"title": "LoRA-Rezepte",
|
||||
"actions": {
|
||||
"sendCheckpoint": "Send to ComfyUI"
|
||||
},
|
||||
"controls": {
|
||||
"import": {
|
||||
"action": "Importieren",
|
||||
@@ -702,6 +769,12 @@
|
||||
"countMessage": "Modelle werden dauerhaft gelöscht.",
|
||||
"action": "Alle löschen"
|
||||
},
|
||||
"checkUpdates": {
|
||||
"title": "Alle {typePlural} auf Updates prüfen?",
|
||||
"message": "Damit werden alle {typePlural} in deiner Bibliothek auf Updates geprüft. Bei großen Sammlungen kann das etwas länger dauern.",
|
||||
"tip": "Du möchtest in Etappen prüfen? Wechsle in den Sammelmodus, wähle die benötigten Modelle aus und nutze anschließend \"Auswahl auf Updates prüfen\".",
|
||||
"action": "Alles prüfen"
|
||||
},
|
||||
"bulkAddTags": {
|
||||
"title": "Tags zu mehreren Modellen hinzufügen",
|
||||
"description": "Tags hinzufügen zu",
|
||||
@@ -838,13 +911,77 @@
|
||||
"tabs": {
|
||||
"examples": "Beispiele",
|
||||
"description": "Modellbeschreibung",
|
||||
"recipes": "Rezepte"
|
||||
"recipes": "Rezepte",
|
||||
"versions": "Versionen"
|
||||
},
|
||||
"license": {
|
||||
"noImageSell": "No selling generated content",
|
||||
"noRentCivit": "No Civitai generation",
|
||||
"noRent": "No generation services",
|
||||
"noSell": "No selling models",
|
||||
"creditRequired": "Ersteller-Angabe erforderlich",
|
||||
"noDerivatives": "Keine gemeinsamen Zusammenführungen",
|
||||
"noReLicense": "Gleiche Berechtigungen erforderlich",
|
||||
"restrictionsLabel": "Lizenzbeschränkungen"
|
||||
},
|
||||
"loading": {
|
||||
"exampleImages": "Beispielbilder werden geladen...",
|
||||
"description": "Modellbeschreibung wird geladen...",
|
||||
"recipes": "Rezepte werden geladen...",
|
||||
"examples": "Beispiele werden geladen..."
|
||||
"examples": "Beispiele werden geladen...",
|
||||
"versions": "Versionen werden geladen..."
|
||||
},
|
||||
"versions": {
|
||||
"heading": "Modellversionen",
|
||||
"copy": "Verwalten Sie alle Versionen dieses Modells an einem Ort.",
|
||||
"media": {
|
||||
"placeholder": "Keine Vorschau"
|
||||
},
|
||||
"labels": {
|
||||
"unnamed": "Unbenannte Version",
|
||||
"noDetails": "Keine zusätzlichen Details"
|
||||
},
|
||||
"badges": {
|
||||
"current": "Aktuelle Version",
|
||||
"inLibrary": "In der Bibliothek",
|
||||
"newer": "Neuere Version",
|
||||
"ignored": "Ignoriert"
|
||||
},
|
||||
"actions": {
|
||||
"download": "Herunterladen",
|
||||
"delete": "Löschen",
|
||||
"ignore": "Ignorieren",
|
||||
"unignore": "Ignorierung aufheben",
|
||||
"resumeModelUpdates": "Aktualisierungen für dieses Modell fortsetzen",
|
||||
"ignoreModelUpdates": "Aktualisierungen für dieses Modell ignorieren",
|
||||
"viewLocalVersions": "Alle lokalen Versionen anzeigen",
|
||||
"viewLocalTooltip": "Demnächst verfügbar"
|
||||
},
|
||||
"filters": {
|
||||
"label": "Basisfilter",
|
||||
"state": {
|
||||
"showAll": "Alle Versionen",
|
||||
"showSameBase": "Gleiches Basismodell"
|
||||
},
|
||||
"tooltip": {
|
||||
"showAllVersions": "Wechseln, um alle Versionen anzuzeigen",
|
||||
"showSameBaseVersions": "Wechseln, um nur Versionen mit demselben Basismodell anzuzeigen"
|
||||
},
|
||||
"empty": "Keine Versionen entsprechen dem Filter für das aktuelle Basismodell."
|
||||
},
|
||||
"empty": "Noch keine Versionshistorie für dieses Modell vorhanden.",
|
||||
"error": "Versionen konnten nicht geladen werden.",
|
||||
"missingModelId": "Für dieses Modell ist keine Civitai-Model-ID vorhanden.",
|
||||
"confirm": {
|
||||
"delete": "Diese Version aus Ihrer Bibliothek löschen?"
|
||||
},
|
||||
"toast": {
|
||||
"modelIgnored": "Aktualisierungen für dieses Modell werden ignoriert",
|
||||
"modelResumed": "Aktualisierungen für dieses Modell werden wieder geprüft",
|
||||
"versionIgnored": "Aktualisierungen für diese Version werden ignoriert",
|
||||
"versionUnignored": "Version wurde wieder aktiviert",
|
||||
"versionDeleted": "Version gelöscht"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -951,7 +1088,9 @@
|
||||
"loraFailedToSend": "Fehler beim Senden der LoRA an den Workflow",
|
||||
"recipeAdded": "Rezept zum Workflow hinzugefügt",
|
||||
"recipeReplaced": "Rezept im Workflow ersetzt",
|
||||
"recipeFailedToSend": "Fehler beim Senden des Rezepts an den Workflow"
|
||||
"recipeFailedToSend": "Fehler beim Senden des Rezepts an den Workflow",
|
||||
"noMatchingNodes": "Keine kompatiblen Knoten im aktuellen Workflow verfügbar",
|
||||
"noTargetNodeSelected": "Kein Zielknoten ausgewählt"
|
||||
},
|
||||
"nodeSelector": {
|
||||
"recipe": "Rezept",
|
||||
@@ -996,6 +1135,11 @@
|
||||
},
|
||||
"update": {
|
||||
"title": "Nach Updates suchen",
|
||||
"notificationsTitle": "Benachrichtigungszentrum",
|
||||
"tabs": {
|
||||
"updates": "Aktualisierungen",
|
||||
"messages": "Mitteilungen"
|
||||
},
|
||||
"updateAvailable": "Update verfügbar",
|
||||
"noChangelogAvailable": "Kein detailliertes Changelog verfügbar. Weitere Informationen auf GitHub.",
|
||||
"currentVersion": "Aktuelle Version",
|
||||
@@ -1027,6 +1171,13 @@
|
||||
"nightly": {
|
||||
"warning": "Warnung: Nightly Builds können experimentelle Funktionen enthalten und könnten instabil sein.",
|
||||
"enable": "Nightly Updates aktivieren"
|
||||
},
|
||||
"banners": {
|
||||
"recent": "Neueste Mitteilungen",
|
||||
"empty": "Keine aktuellen Banner verfügbar.",
|
||||
"shown": "{time} angezeigt",
|
||||
"dismissed": "{time} geschlossen",
|
||||
"active": "Aktiv"
|
||||
}
|
||||
},
|
||||
"support": {
|
||||
@@ -1106,6 +1257,9 @@
|
||||
"cannotSend": "Kann Rezept nicht senden: Fehlende Rezept-ID",
|
||||
"sendFailed": "Fehler beim Senden des Rezepts an Workflow",
|
||||
"sendError": "Fehler beim Senden des Rezepts an Workflow",
|
||||
"missingCheckpointPath": "Checkpoint-Pfad nicht verfügbar",
|
||||
"missingCheckpointInfo": "Checkpoint-Informationen fehlen",
|
||||
"downloadCheckpointFailed": "Checkpoint-Download fehlgeschlagen: {message}",
|
||||
"cannotDelete": "Kann Rezept nicht löschen: Fehlende Rezept-ID",
|
||||
"deleteConfirmationError": "Fehler beim Anzeigen der Löschbestätigung",
|
||||
"deletedSuccessfully": "Rezept erfolgreich gelöscht",
|
||||
@@ -1146,6 +1300,12 @@
|
||||
"bulkContentRatingSet": "Inhaltsbewertung auf {level} für {count} Modell(e) gesetzt",
|
||||
"bulkContentRatingPartial": "Inhaltsbewertung auf {level} für {success} Modell(e) gesetzt, {failed} fehlgeschlagen",
|
||||
"bulkContentRatingFailed": "Inhaltsbewertung für ausgewählte Modelle konnte nicht aktualisiert werden",
|
||||
"bulkUpdatesChecking": "Ausgewählte {type}-Modelle werden auf Updates geprüft...",
|
||||
"bulkUpdatesSuccess": "Updates für {count} ausgewählte {type}-Modelle verfügbar",
|
||||
"bulkUpdatesNone": "Keine Updates für ausgewählte {type}-Modelle gefunden",
|
||||
"bulkUpdatesMissing": "Ausgewählte {type}-Modelle sind nicht mit Civitai-Updates verknüpft",
|
||||
"bulkUpdatesPartialMissing": "{missing} ausgewählte {type}-Modelle ohne Civitai-Verknüpfung übersprungen",
|
||||
"bulkUpdatesFailed": "Updates für ausgewählte {type}-Modelle konnten nicht geprüft werden: {message}",
|
||||
"invalidCharactersRemoved": "Ungültige Zeichen aus Dateiname entfernt",
|
||||
"filenameCannotBeEmpty": "Dateiname darf nicht leer sein",
|
||||
"renameFailed": "Fehler beim Umbenennen der Datei: {message}",
|
||||
@@ -1206,7 +1366,7 @@
|
||||
},
|
||||
"triggerWords": {
|
||||
"loadFailed": "Konnte trainierte Wörter nicht laden",
|
||||
"tooLong": "Trigger Word sollte 30 Wörter nicht überschreiten",
|
||||
"tooLong": "Trigger Word sollte 100 Wörter nicht überschreiten",
|
||||
"tooMany": "Maximal 30 Trigger Words erlaubt",
|
||||
"alreadyExists": "Dieses Trigger Word existiert bereits",
|
||||
"updateSuccess": "Trigger Words erfolgreich aktualisiert",
|
||||
|
||||
196
locales/en.json
196
locales/en.json
@@ -101,7 +101,12 @@
|
||||
"checkpointNameCopied": "Checkpoint name copied",
|
||||
"toggleBlur": "Toggle blur",
|
||||
"show": "Show",
|
||||
"openExampleImages": "Open Example Images Folder"
|
||||
"openExampleImages": "Open Example Images Folder",
|
||||
"replacePreview": "Replace Preview",
|
||||
"copyCheckpointName": "Copy checkpoint name",
|
||||
"copyEmbeddingName": "Copy embedding name",
|
||||
"sendCheckpointToWorkflow": "Send to ComfyUI",
|
||||
"sendEmbeddingToWorkflow": "Send to ComfyUI"
|
||||
},
|
||||
"nsfw": {
|
||||
"matureContent": "Mature Content",
|
||||
@@ -115,12 +120,17 @@
|
||||
"updateFailed": "Failed to update favorite status"
|
||||
},
|
||||
"sendToWorkflow": {
|
||||
"checkpointNotImplemented": "Send checkpoint to workflow - feature to be implemented"
|
||||
"checkpointNotImplemented": "Send checkpoint to workflow - feature to be implemented",
|
||||
"missingPath": "Unable to determine model path for this card"
|
||||
},
|
||||
"exampleImages": {
|
||||
"checkError": "Error checking for example images",
|
||||
"missingHash": "Missing model hash information.",
|
||||
"noRemoteImagesAvailable": "No remote example images available for this model on Civitai"
|
||||
},
|
||||
"badges": {
|
||||
"update": "Update",
|
||||
"updateAvailable": "Update available"
|
||||
}
|
||||
},
|
||||
"globalContextMenu": {
|
||||
@@ -129,12 +139,26 @@
|
||||
"missingPath": "Set a download location before downloading example images.",
|
||||
"unavailable": "Example image downloads aren't available yet. Try again after the page finishes loading."
|
||||
},
|
||||
"checkModelUpdates": {
|
||||
"label": "Check for updates",
|
||||
"loading": "Checking for {type} updates...",
|
||||
"success": "Found {count} update(s) for {type}s",
|
||||
"none": "All {type}s are up to date",
|
||||
"error": "Failed to check for {type} updates: {message}"
|
||||
},
|
||||
"cleanupExampleImages": {
|
||||
"label": "Clean up example image folders",
|
||||
"success": "Moved {count} folder(s) to the deleted folder",
|
||||
"none": "No example image folders needed cleanup",
|
||||
"partial": "Cleanup completed with {failures} folder(s) skipped",
|
||||
"error": "Failed to clean example image folders: {message}"
|
||||
},
|
||||
"fetchMissingLicenses": {
|
||||
"label": "Refresh license metadata",
|
||||
"loading": "Refreshing license metadata for {typePlural}...",
|
||||
"success": "Updated license metadata for {count} {typePlural}",
|
||||
"none": "All {typePlural} already have license metadata",
|
||||
"error": "Failed to refresh license metadata for {typePlural}: {message}"
|
||||
}
|
||||
},
|
||||
"header": {
|
||||
@@ -171,6 +195,10 @@
|
||||
"title": "Filter Models",
|
||||
"baseModel": "Base Model",
|
||||
"modelTags": "Tags (Top 20)",
|
||||
"modelTypes": "Model Types",
|
||||
"license": "License",
|
||||
"noCreditRequired": "No Credit Required",
|
||||
"allowSellingGeneratedContent": "Allow Selling",
|
||||
"clearAll": "Clear All Filters"
|
||||
},
|
||||
"theme": {
|
||||
@@ -181,6 +209,7 @@
|
||||
},
|
||||
"actions": {
|
||||
"checkUpdates": "Check Updates",
|
||||
"notifications": "Notifications",
|
||||
"support": "Support"
|
||||
}
|
||||
},
|
||||
@@ -202,10 +231,17 @@
|
||||
"priorityTags": "Priority Tags",
|
||||
"downloadPathTemplates": "Download Path Templates",
|
||||
"exampleImages": "Example Images",
|
||||
"updateFlags": "Update Flags",
|
||||
"autoOrganize": "Auto-organize",
|
||||
"misc": "Misc.",
|
||||
"metadataArchive": "Metadata Archive Database",
|
||||
"storageLocation": "Settings Location",
|
||||
"proxySettings": "Proxy Settings"
|
||||
},
|
||||
"storage": {
|
||||
"locationLabel": "Portable mode",
|
||||
"locationHelp": "Enable to keep settings.json inside the repository; disable to store it in your user config directory."
|
||||
},
|
||||
"contentFiltering": {
|
||||
"blurNsfwContent": "Blur NSFW Content",
|
||||
"blurNsfwContentHelp": "Blur mature (NSFW) content preview images",
|
||||
@@ -216,11 +252,20 @@
|
||||
"autoplayOnHover": "Autoplay Videos on Hover",
|
||||
"autoplayOnHoverHelp": "Only play video previews when hovering over them"
|
||||
},
|
||||
"autoOrganizeExclusions": {
|
||||
"label": "Auto-organize exclusions",
|
||||
"placeholder": "Example: curated/*, */backups/*; *_temp.safetensors",
|
||||
"help": "Skip moving files that match these wildcard patterns. Separate multiple patterns with commas or semicolons.",
|
||||
"validation": {
|
||||
"noPatterns": "Enter at least one pattern separated by commas or semicolons.",
|
||||
"saveFailed": "Unable to save exclusions: {message}"
|
||||
}
|
||||
},
|
||||
"layoutSettings": {
|
||||
"displayDensity": "Display Density",
|
||||
"displayDensityOptions": {
|
||||
"default": "Default",
|
||||
"medium": "Medium",
|
||||
"medium": "Medium",
|
||||
"compact": "Compact"
|
||||
},
|
||||
"displayDensityHelp": "Choose how many cards to display per row:",
|
||||
@@ -230,26 +275,26 @@
|
||||
"compact": "7 (1080p), 8 (2K), 10 (4K)"
|
||||
},
|
||||
"displayDensityWarning": "Warning: Higher densities may cause performance issues on systems with limited resources.",
|
||||
"showFolderSidebar": "Show Folder Sidebar",
|
||||
"showFolderSidebarHelp": "Toggle the folder navigation sidebar on model pages. When disabled, the sidebar and hover area stay hidden.",
|
||||
"cardInfoDisplay": "Card Info Display",
|
||||
"cardInfoDisplayOptions": {
|
||||
"always": "Always Visible",
|
||||
"hover": "Reveal on Hover"
|
||||
},
|
||||
"cardInfoDisplayHelp": "Choose when to display model information and action buttons:",
|
||||
"cardInfoDisplayDetails": {
|
||||
"always": "Headers and footers are always visible",
|
||||
"hover": "Headers and footers only appear when hovering over a card"
|
||||
"cardInfoDisplayHelp": "Choose when to display model information and action buttons",
|
||||
"modelCardFooterAction": "Model Card Button Action",
|
||||
"modelCardFooterActionOptions": {
|
||||
"exampleImages": "Open Example Images",
|
||||
"replacePreview": "Replace Preview"
|
||||
},
|
||||
"modelCardFooterActionHelp": "Choose what the bottom-right card button does",
|
||||
"modelNameDisplay": "Model Name Display",
|
||||
"modelNameDisplayOptions": {
|
||||
"modelName": "Model Name",
|
||||
"fileName": "File Name"
|
||||
},
|
||||
"modelNameDisplayHelp": "Choose what to display in the model card footer:",
|
||||
"modelNameDisplayDetails": {
|
||||
"modelName": "Display the model's descriptive name",
|
||||
"fileName": "Display the actual file name on disk"
|
||||
}
|
||||
"modelNameDisplayHelp": "Choose what to display in the model card footer"
|
||||
},
|
||||
"folderSettings": {
|
||||
"activeLibrary": "Active Library",
|
||||
@@ -331,6 +376,14 @@
|
||||
"download": "Download",
|
||||
"restartRequired": "Requires restart"
|
||||
},
|
||||
"updateFlagStrategy": {
|
||||
"label": "Update Flag Strategy",
|
||||
"help": "Decide whether update badges should only appear when a new release shares the same base model as your local files or whenever any newer version exists for that model.",
|
||||
"options": {
|
||||
"sameBase": "Match updates by base model",
|
||||
"any": "Flag any available update"
|
||||
}
|
||||
},
|
||||
"misc": {
|
||||
"includeTriggerWords": "Include Trigger Words in LoRA Syntax",
|
||||
"includeTriggerWordsHelp": "Include trained trigger words when copying LoRA syntax to clipboard"
|
||||
@@ -394,8 +447,10 @@
|
||||
},
|
||||
"refresh": {
|
||||
"title": "Refresh model list",
|
||||
"quick": "Quick Refresh (incremental)",
|
||||
"full": "Full Rebuild (complete)"
|
||||
"quick": "Sync Changes",
|
||||
"quickTooltip": "Scan for new or missing model files so the list stays current.",
|
||||
"full": "Rebuild Cache",
|
||||
"fullTooltip": "Reload all model details from metadata files—use if the library looks out of date or after manual edits."
|
||||
},
|
||||
"fetch": {
|
||||
"title": "Fetch metadata from Civitai",
|
||||
@@ -416,6 +471,13 @@
|
||||
"favorites": {
|
||||
"title": "Show Favorites Only",
|
||||
"action": "Favorites"
|
||||
},
|
||||
"updates": {
|
||||
"title": "Show models with updates available",
|
||||
"action": "Updates",
|
||||
"menuLabel": "Show update options",
|
||||
"check": "Check updates",
|
||||
"checkTooltip": "Checking updates may take a while."
|
||||
}
|
||||
},
|
||||
"bulkOperations": {
|
||||
@@ -427,6 +489,7 @@
|
||||
"setContentRating": "Set Content Rating for Selected",
|
||||
"copyAll": "Copy Selected Syntax",
|
||||
"refreshAll": "Refresh Selected Metadata",
|
||||
"checkUpdates": "Check Updates for Selected",
|
||||
"moveAll": "Move Selected to Folder",
|
||||
"autoOrganize": "Auto-Organize Selected",
|
||||
"deleteAll": "Delete Selected Models",
|
||||
@@ -443,6 +506,7 @@
|
||||
},
|
||||
"contextMenu": {
|
||||
"refreshMetadata": "Refresh Civitai Data",
|
||||
"checkUpdates": "Check Updates",
|
||||
"relinkCivitai": "Re-link to Civitai",
|
||||
"copySyntax": "Copy LoRA Syntax",
|
||||
"copyFilename": "Copy Model Filename",
|
||||
@@ -464,6 +528,9 @@
|
||||
},
|
||||
"recipes": {
|
||||
"title": "LoRA Recipes",
|
||||
"actions": {
|
||||
"sendCheckpoint": "Send to ComfyUI"
|
||||
},
|
||||
"controls": {
|
||||
"import": {
|
||||
"action": "Import",
|
||||
@@ -702,6 +769,12 @@
|
||||
"countMessage": "models will be permanently deleted.",
|
||||
"action": "Delete All"
|
||||
},
|
||||
"checkUpdates": {
|
||||
"title": "Check updates for all {typePlural}?",
|
||||
"message": "This checks every {typePlural} in your library for updates. Large collections may take a little longer.",
|
||||
"tip": "To work in smaller batches, switch to bulk mode, choose the ones you need, then use \"Check Updates for Selected\".",
|
||||
"action": "Check All"
|
||||
},
|
||||
"bulkAddTags": {
|
||||
"title": "Add Tags to Multiple Models",
|
||||
"description": "Add tags to",
|
||||
@@ -838,13 +911,77 @@
|
||||
"tabs": {
|
||||
"examples": "Examples",
|
||||
"description": "Model Description",
|
||||
"recipes": "Recipes"
|
||||
"recipes": "Recipes",
|
||||
"versions": "Versions"
|
||||
},
|
||||
"license": {
|
||||
"noImageSell": "No selling generated content",
|
||||
"noRentCivit": "No Civitai generation",
|
||||
"noRent": "No generation services",
|
||||
"noSell": "No selling models",
|
||||
"creditRequired": "Creator credit required",
|
||||
"noDerivatives": "No sharing merges",
|
||||
"noReLicense": "Same permissions required",
|
||||
"restrictionsLabel": "License restrictions"
|
||||
},
|
||||
"loading": {
|
||||
"exampleImages": "Loading example images...",
|
||||
"description": "Loading model description...",
|
||||
"recipes": "Loading recipes...",
|
||||
"examples": "Loading examples..."
|
||||
"examples": "Loading examples...",
|
||||
"versions": "Loading versions..."
|
||||
},
|
||||
"versions": {
|
||||
"heading": "Model versions",
|
||||
"copy": "Track and manage every version of this model in one place.",
|
||||
"media": {
|
||||
"placeholder": "No preview"
|
||||
},
|
||||
"labels": {
|
||||
"unnamed": "Untitled Version",
|
||||
"noDetails": "No additional details"
|
||||
},
|
||||
"badges": {
|
||||
"current": "Current Version",
|
||||
"inLibrary": "In Library",
|
||||
"newer": "Newer Version",
|
||||
"ignored": "Ignored"
|
||||
},
|
||||
"actions": {
|
||||
"download": "Download",
|
||||
"delete": "Delete",
|
||||
"ignore": "Ignore",
|
||||
"unignore": "Unignore",
|
||||
"resumeModelUpdates": "Resume updates for this model",
|
||||
"ignoreModelUpdates": "Ignore updates for this model",
|
||||
"viewLocalVersions": "View all local versions",
|
||||
"viewLocalTooltip": "Coming soon"
|
||||
},
|
||||
"filters": {
|
||||
"label": "Base filter",
|
||||
"state": {
|
||||
"showAll": "All versions",
|
||||
"showSameBase": "Same base"
|
||||
},
|
||||
"tooltip": {
|
||||
"showAllVersions": "Switch to showing all versions",
|
||||
"showSameBaseVersions": "Switch to showing only versions that match the current base model"
|
||||
},
|
||||
"empty": "No versions match the current base model filter."
|
||||
},
|
||||
"empty": "No version history available for this model yet.",
|
||||
"error": "Failed to load versions.",
|
||||
"missingModelId": "This model is missing a Civitai model id.",
|
||||
"confirm": {
|
||||
"delete": "Delete this version from your library?"
|
||||
},
|
||||
"toast": {
|
||||
"modelIgnored": "Updates ignored for this model",
|
||||
"modelResumed": "Update tracking resumed",
|
||||
"versionIgnored": "Updates ignored for this version",
|
||||
"versionUnignored": "Version re-enabled",
|
||||
"versionDeleted": "Version deleted"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -951,7 +1088,9 @@
|
||||
"loraFailedToSend": "Failed to send LoRA to workflow",
|
||||
"recipeAdded": "Recipe appended to workflow",
|
||||
"recipeReplaced": "Recipe replaced in workflow",
|
||||
"recipeFailedToSend": "Failed to send recipe to workflow"
|
||||
"recipeFailedToSend": "Failed to send recipe to workflow",
|
||||
"noMatchingNodes": "No compatible nodes available in the current workflow",
|
||||
"noTargetNodeSelected": "No target node selected"
|
||||
},
|
||||
"nodeSelector": {
|
||||
"recipe": "Recipe",
|
||||
@@ -996,6 +1135,11 @@
|
||||
},
|
||||
"update": {
|
||||
"title": "Check for Updates",
|
||||
"notificationsTitle": "Notifications",
|
||||
"tabs": {
|
||||
"updates": "Updates",
|
||||
"messages": "Messages"
|
||||
},
|
||||
"updateAvailable": "Update Available",
|
||||
"noChangelogAvailable": "No detailed changelog available. Check GitHub for more information.",
|
||||
"currentVersion": "Current Version",
|
||||
@@ -1027,6 +1171,13 @@
|
||||
"nightly": {
|
||||
"warning": "Warning: Nightly builds may contain experimental features and could be unstable.",
|
||||
"enable": "Enable Nightly Updates"
|
||||
},
|
||||
"banners": {
|
||||
"recent": "Recent messages",
|
||||
"empty": "No recent banners yet.",
|
||||
"shown": "Shown {time}",
|
||||
"dismissed": "Dismissed {time}",
|
||||
"active": "Active"
|
||||
}
|
||||
},
|
||||
"support": {
|
||||
@@ -1106,6 +1257,9 @@
|
||||
"cannotSend": "Cannot send recipe: Missing recipe ID",
|
||||
"sendFailed": "Failed to send recipe to workflow",
|
||||
"sendError": "Error sending recipe to workflow",
|
||||
"missingCheckpointPath": "Checkpoint path not available",
|
||||
"missingCheckpointInfo": "Missing checkpoint information",
|
||||
"downloadCheckpointFailed": "Failed to download checkpoint: {message}",
|
||||
"cannotDelete": "Cannot delete recipe: Missing recipe ID",
|
||||
"deleteConfirmationError": "Error showing delete confirmation",
|
||||
"deletedSuccessfully": "Recipe deleted successfully",
|
||||
@@ -1146,6 +1300,12 @@
|
||||
"bulkContentRatingSet": "Set content rating to {level} for {count} model(s)",
|
||||
"bulkContentRatingPartial": "Set content rating to {level} for {success} model(s), {failed} failed",
|
||||
"bulkContentRatingFailed": "Failed to update content rating for selected models",
|
||||
"bulkUpdatesChecking": "Checking selected {type}(s) for updates...",
|
||||
"bulkUpdatesSuccess": "Updates available for {count} selected {type}(s)",
|
||||
"bulkUpdatesNone": "No updates found for selected {type}(s)",
|
||||
"bulkUpdatesMissing": "Selected {type}(s) are not linked to Civitai updates",
|
||||
"bulkUpdatesPartialMissing": "Skipped {missing} selected {type}(s) without Civitai links",
|
||||
"bulkUpdatesFailed": "Failed to check updates for selected {type}(s): {message}",
|
||||
"invalidCharactersRemoved": "Invalid characters removed from filename",
|
||||
"filenameCannotBeEmpty": "File name cannot be empty",
|
||||
"renameFailed": "Failed to rename file: {message}",
|
||||
@@ -1206,7 +1366,7 @@
|
||||
},
|
||||
"triggerWords": {
|
||||
"loadFailed": "Could not load trained words",
|
||||
"tooLong": "Trigger word should not exceed 30 words",
|
||||
"tooLong": "Trigger word should not exceed 100 words",
|
||||
"tooMany": "Maximum 30 trigger words allowed",
|
||||
"alreadyExists": "This trigger word already exists",
|
||||
"updateSuccess": "Trigger words updated successfully",
|
||||
|
||||
194
locales/es.json
194
locales/es.json
@@ -101,7 +101,12 @@
|
||||
"checkpointNameCopied": "Nombre del checkpoint copiado",
|
||||
"toggleBlur": "Alternar difuminado",
|
||||
"show": "Mostrar",
|
||||
"openExampleImages": "Abrir carpeta de imágenes de ejemplo"
|
||||
"openExampleImages": "Abrir carpeta de imágenes de ejemplo",
|
||||
"replacePreview": "Reemplazar vista previa",
|
||||
"copyCheckpointName": "Copiar nombre del checkpoint",
|
||||
"copyEmbeddingName": "Copiar nombre del embedding",
|
||||
"sendCheckpointToWorkflow": "Enviar a ComfyUI",
|
||||
"sendEmbeddingToWorkflow": "Enviar a ComfyUI"
|
||||
},
|
||||
"nsfw": {
|
||||
"matureContent": "Contenido para adultos",
|
||||
@@ -115,12 +120,17 @@
|
||||
"updateFailed": "Error al actualizar estado de favoritos"
|
||||
},
|
||||
"sendToWorkflow": {
|
||||
"checkpointNotImplemented": "Enviar checkpoint al flujo de trabajo - función por implementar"
|
||||
"checkpointNotImplemented": "Enviar checkpoint al flujo de trabajo - función por implementar",
|
||||
"missingPath": "No se puede determinar la ruta del modelo para esta tarjeta"
|
||||
},
|
||||
"exampleImages": {
|
||||
"checkError": "Error al verificar imágenes de ejemplo",
|
||||
"missingHash": "Falta información del hash del modelo.",
|
||||
"noRemoteImagesAvailable": "No hay imágenes de ejemplo remotas disponibles para este modelo en Civitai"
|
||||
},
|
||||
"badges": {
|
||||
"update": "Actualización",
|
||||
"updateAvailable": "Actualización disponible"
|
||||
}
|
||||
},
|
||||
"globalContextMenu": {
|
||||
@@ -129,12 +139,26 @@
|
||||
"missingPath": "Establece una ubicación de descarga antes de descargar imágenes de ejemplo.",
|
||||
"unavailable": "Las descargas de imágenes de ejemplo aún no están disponibles. Intenta de nuevo después de que la página termine de cargar."
|
||||
},
|
||||
"checkModelUpdates": {
|
||||
"label": "Buscar actualizaciones",
|
||||
"loading": "Buscando actualizaciones de {type}...",
|
||||
"success": "Se encontraron {count} actualización(es) para {type}",
|
||||
"none": "Todos los {type} están actualizados",
|
||||
"error": "Error al buscar actualizaciones de {type}: {message}"
|
||||
},
|
||||
"cleanupExampleImages": {
|
||||
"label": "Limpiar carpetas de imágenes de ejemplo",
|
||||
"success": "Se movieron {count} carpeta(s) a la carpeta de eliminados",
|
||||
"none": "No hay carpetas de imágenes de ejemplo que necesiten limpieza",
|
||||
"partial": "Limpieza completada con {failures} carpeta(s) omitidas",
|
||||
"error": "No se pudieron limpiar las carpetas de imágenes de ejemplo: {message}"
|
||||
},
|
||||
"fetchMissingLicenses": {
|
||||
"label": "Refresh license metadata",
|
||||
"loading": "Refreshing license metadata for {typePlural}...",
|
||||
"success": "Updated license metadata for {count} {typePlural}",
|
||||
"none": "All {typePlural} already have license metadata",
|
||||
"error": "Failed to refresh license metadata for {typePlural}: {message}"
|
||||
}
|
||||
},
|
||||
"header": {
|
||||
@@ -171,6 +195,10 @@
|
||||
"title": "Filtrar modelos",
|
||||
"baseModel": "Modelo base",
|
||||
"modelTags": "Etiquetas (Top 20)",
|
||||
"modelTypes": "Model Types",
|
||||
"license": "Licencia",
|
||||
"noCreditRequired": "Sin crédito requerido",
|
||||
"allowSellingGeneratedContent": "Venta permitida",
|
||||
"clearAll": "Limpiar todos los filtros"
|
||||
},
|
||||
"theme": {
|
||||
@@ -181,6 +209,7 @@
|
||||
},
|
||||
"actions": {
|
||||
"checkUpdates": "Comprobar actualizaciones",
|
||||
"notifications": "Notificaciones",
|
||||
"support": "Soporte"
|
||||
}
|
||||
},
|
||||
@@ -202,10 +231,17 @@
|
||||
"priorityTags": "Etiquetas prioritarias",
|
||||
"downloadPathTemplates": "Plantillas de rutas de descarga",
|
||||
"exampleImages": "Imágenes de ejemplo",
|
||||
"updateFlags": "Indicadores de actualización",
|
||||
"autoOrganize": "Auto-organize",
|
||||
"misc": "Varios",
|
||||
"metadataArchive": "Base de datos de archivo de metadatos",
|
||||
"storageLocation": "Ubicación de ajustes",
|
||||
"proxySettings": "Configuración de proxy"
|
||||
},
|
||||
"storage": {
|
||||
"locationLabel": "Modo portátil",
|
||||
"locationHelp": "Activa para mantener settings.json dentro del repositorio; desactívalo para guardarlo en tu directorio de configuración de usuario."
|
||||
},
|
||||
"contentFiltering": {
|
||||
"blurNsfwContent": "Difuminar contenido NSFW",
|
||||
"blurNsfwContentHelp": "Difuminar imágenes de vista previa de contenido para adultos (NSFW)",
|
||||
@@ -216,6 +252,15 @@
|
||||
"autoplayOnHover": "Reproducir videos automáticamente al pasar el ratón",
|
||||
"autoplayOnHoverHelp": "Solo reproducir vistas previas de video al pasar el ratón sobre ellas"
|
||||
},
|
||||
"autoOrganizeExclusions": {
|
||||
"label": "Exclusiones de auto-organización",
|
||||
"placeholder": "Ejemplo: curated/*, */backups/*; *_temp.safetensors",
|
||||
"help": "Omitir archivos que coincidan con estos patrones comodín. Separe múltiples patrones con comas o puntos y comas.",
|
||||
"validation": {
|
||||
"noPatterns": "Ingrese al menos un patrón separado por comas o puntos y comas.",
|
||||
"saveFailed": "No se pudieron guardar las exclusiones: {message}"
|
||||
}
|
||||
},
|
||||
"layoutSettings": {
|
||||
"displayDensity": "Densidad de visualización",
|
||||
"displayDensityOptions": {
|
||||
@@ -230,26 +275,26 @@
|
||||
"compact": "7 (1080p), 8 (2K), 10 (4K)"
|
||||
},
|
||||
"displayDensityWarning": "Advertencia: Densidades más altas pueden causar problemas de rendimiento en sistemas con recursos limitados.",
|
||||
"showFolderSidebar": "Mostrar barra lateral de carpetas",
|
||||
"showFolderSidebarHelp": "Activa o desactiva la barra lateral de navegación de carpetas en las páginas de modelos. Cuando está desactivada, la barra lateral y el área de desplazamiento permanecen ocultas.",
|
||||
"cardInfoDisplay": "Visualización de información de tarjeta",
|
||||
"cardInfoDisplayOptions": {
|
||||
"always": "Siempre visible",
|
||||
"hover": "Mostrar al pasar el ratón"
|
||||
},
|
||||
"cardInfoDisplayHelp": "Elige cuándo mostrar información del modelo y botones de acción:",
|
||||
"cardInfoDisplayDetails": {
|
||||
"always": "Los encabezados y pies de página siempre son visibles",
|
||||
"hover": "Los encabezados y pies de página solo aparecen al pasar el ratón sobre una tarjeta"
|
||||
"cardInfoDisplayHelp": "Elige cuándo mostrar información del modelo y botones de acción",
|
||||
"modelCardFooterAction": "Acción del botón de tarjeta de modelo",
|
||||
"modelCardFooterActionOptions": {
|
||||
"exampleImages": "Abrir imágenes de ejemplo",
|
||||
"replacePreview": "Reemplazar vista previa"
|
||||
},
|
||||
"modelCardFooterActionHelp": "Elige qué hace el botón en la esquina inferior derecha de la tarjeta",
|
||||
"modelNameDisplay": "Visualización del nombre del modelo",
|
||||
"modelNameDisplayOptions": {
|
||||
"modelName": "Nombre del modelo",
|
||||
"fileName": "Nombre del archivo"
|
||||
},
|
||||
"modelNameDisplayHelp": "Elige qué mostrar en el pie de la tarjeta del modelo:",
|
||||
"modelNameDisplayDetails": {
|
||||
"modelName": "Mostrar el nombre descriptivo del modelo",
|
||||
"fileName": "Mostrar el nombre real del archivo en el disco"
|
||||
}
|
||||
"modelNameDisplayHelp": "Elige qué mostrar en el pie de la tarjeta del modelo"
|
||||
},
|
||||
"folderSettings": {
|
||||
"activeLibrary": "Biblioteca activa",
|
||||
@@ -331,6 +376,14 @@
|
||||
"download": "Descargar",
|
||||
"restartRequired": "Requiere reinicio"
|
||||
},
|
||||
"updateFlagStrategy": {
|
||||
"label": "Estrategia de indicadores de actualización",
|
||||
"help": "Decide si las insignias de actualización deben mostrarse solo cuando una nueva versión comparte el mismo modelo base que tus archivos locales o siempre que exista cualquier versión más reciente de ese modelo.",
|
||||
"options": {
|
||||
"sameBase": "Coincidir actualizaciones por modelo base",
|
||||
"any": "Marcar cualquier actualización disponible"
|
||||
}
|
||||
},
|
||||
"misc": {
|
||||
"includeTriggerWords": "Incluir palabras clave en la sintaxis de LoRA",
|
||||
"includeTriggerWordsHelp": "Incluir palabras clave entrenadas al copiar la sintaxis de LoRA al portapapeles"
|
||||
@@ -394,8 +447,10 @@
|
||||
},
|
||||
"refresh": {
|
||||
"title": "Actualizar lista de modelos",
|
||||
"quick": "Actualización rápida (incremental)",
|
||||
"full": "Reconstrucción completa"
|
||||
"quick": "Sincronizar cambios",
|
||||
"quickTooltip": "Busca archivos de modelo nuevos o faltantes para mantener la lista al día.",
|
||||
"full": "Reconstruir caché",
|
||||
"fullTooltip": "Vuelve a cargar todos los detalles desde los archivos de metadatos; úsalo si la biblioteca parece desactualizada o tras ediciones manuales."
|
||||
},
|
||||
"fetch": {
|
||||
"title": "Obtener metadatos de Civitai",
|
||||
@@ -416,6 +471,13 @@
|
||||
"favorites": {
|
||||
"title": "Mostrar solo favoritos",
|
||||
"action": "Favoritos"
|
||||
},
|
||||
"updates": {
|
||||
"title": "Mostrar solo modelos con actualizaciones disponibles",
|
||||
"action": "Actualizaciones",
|
||||
"menuLabel": "Mostrar opciones de actualización",
|
||||
"check": "Buscar actualizaciones",
|
||||
"checkTooltip": "Comprobar actualizaciones puede tardar."
|
||||
}
|
||||
},
|
||||
"bulkOperations": {
|
||||
@@ -427,6 +489,7 @@
|
||||
"setContentRating": "Establecer clasificación de contenido para todos",
|
||||
"copyAll": "Copiar toda la sintaxis",
|
||||
"refreshAll": "Actualizar todos los metadatos",
|
||||
"checkUpdates": "Comprobar actualizaciones para la selección",
|
||||
"moveAll": "Mover todos a carpeta",
|
||||
"autoOrganize": "Auto-organizar seleccionados",
|
||||
"deleteAll": "Eliminar todos los modelos",
|
||||
@@ -443,6 +506,7 @@
|
||||
},
|
||||
"contextMenu": {
|
||||
"refreshMetadata": "Actualizar datos de Civitai",
|
||||
"checkUpdates": "Comprobar actualizaciones",
|
||||
"relinkCivitai": "Re-vincular a Civitai",
|
||||
"copySyntax": "Copiar sintaxis de LoRA",
|
||||
"copyFilename": "Copiar nombre de archivo del modelo",
|
||||
@@ -464,6 +528,9 @@
|
||||
},
|
||||
"recipes": {
|
||||
"title": "Recetas de LoRA",
|
||||
"actions": {
|
||||
"sendCheckpoint": "Enviar a ComfyUI"
|
||||
},
|
||||
"controls": {
|
||||
"import": {
|
||||
"action": "Importar",
|
||||
@@ -702,6 +769,12 @@
|
||||
"countMessage": "modelos serán eliminados permanentemente.",
|
||||
"action": "Eliminar todo"
|
||||
},
|
||||
"checkUpdates": {
|
||||
"title": "¿Comprobar actualizaciones para todos los {typePlural}?",
|
||||
"message": "Esto comprobará las actualizaciones de todos los {typePlural} de tu biblioteca. En colecciones grandes puede tardar un poco más.",
|
||||
"tip": "¿Quieres hacerlo por partes? Activa el modo por lotes, selecciona los modelos que necesites y usa \"Comprobar actualizaciones para la selección\".",
|
||||
"action": "Comprobar todo"
|
||||
},
|
||||
"bulkAddTags": {
|
||||
"title": "Añadir etiquetas a múltiples modelos",
|
||||
"description": "Añadir etiquetas a",
|
||||
@@ -838,13 +911,77 @@
|
||||
"tabs": {
|
||||
"examples": "Ejemplos",
|
||||
"description": "Descripción del modelo",
|
||||
"recipes": "Recetas"
|
||||
"recipes": "Recetas",
|
||||
"versions": "Versiones"
|
||||
},
|
||||
"license": {
|
||||
"noImageSell": "No selling generated content",
|
||||
"noRentCivit": "No Civitai generation",
|
||||
"noRent": "No generation services",
|
||||
"noSell": "No selling models",
|
||||
"creditRequired": "Crédito del creador requerido",
|
||||
"noDerivatives": "No se permiten fusiones",
|
||||
"noReLicense": "Se requieren mismos permisos",
|
||||
"restrictionsLabel": "Restricciones de licencia"
|
||||
},
|
||||
"loading": {
|
||||
"exampleImages": "Cargando imágenes de ejemplo...",
|
||||
"description": "Cargando descripción del modelo...",
|
||||
"recipes": "Cargando recetas...",
|
||||
"examples": "Cargando ejemplos..."
|
||||
"examples": "Cargando ejemplos...",
|
||||
"versions": "Cargando versiones..."
|
||||
},
|
||||
"versions": {
|
||||
"heading": "Versiones del modelo",
|
||||
"copy": "Administra todas las versiones de este modelo en un solo lugar.",
|
||||
"media": {
|
||||
"placeholder": "Sin vista previa"
|
||||
},
|
||||
"labels": {
|
||||
"unnamed": "Versión sin nombre",
|
||||
"noDetails": "Sin detalles adicionales"
|
||||
},
|
||||
"badges": {
|
||||
"current": "Versión actual",
|
||||
"inLibrary": "En la biblioteca",
|
||||
"newer": "Versión más reciente",
|
||||
"ignored": "Ignorada"
|
||||
},
|
||||
"actions": {
|
||||
"download": "Descargar",
|
||||
"delete": "Eliminar",
|
||||
"ignore": "Ignorar",
|
||||
"unignore": "Dejar de ignorar",
|
||||
"resumeModelUpdates": "Reanudar actualizaciones para este modelo",
|
||||
"ignoreModelUpdates": "Ignorar actualizaciones para este modelo",
|
||||
"viewLocalVersions": "Ver todas las versiones locales",
|
||||
"viewLocalTooltip": "Disponible pronto"
|
||||
},
|
||||
"filters": {
|
||||
"label": "Filtro base",
|
||||
"state": {
|
||||
"showAll": "Todas las versiones",
|
||||
"showSameBase": "Mismo modelo base"
|
||||
},
|
||||
"tooltip": {
|
||||
"showAllVersions": "Cambiar para mostrar todas las versiones",
|
||||
"showSameBaseVersions": "Cambiar para mostrar solo versiones del mismo modelo base"
|
||||
},
|
||||
"empty": "Ninguna versión coincide con el filtro del modelo base actual."
|
||||
},
|
||||
"empty": "Aún no hay historial de versiones para este modelo.",
|
||||
"error": "No se pudieron cargar las versiones.",
|
||||
"missingModelId": "Este modelo no tiene un ID de modelo de Civitai.",
|
||||
"confirm": {
|
||||
"delete": "¿Eliminar esta versión de tu biblioteca?"
|
||||
},
|
||||
"toast": {
|
||||
"modelIgnored": "Se ignoran las actualizaciones de este modelo",
|
||||
"modelResumed": "Seguimiento de actualizaciones reanudado",
|
||||
"versionIgnored": "Se ignoran las actualizaciones de esta versión",
|
||||
"versionUnignored": "Versión habilitada nuevamente",
|
||||
"versionDeleted": "Versión eliminada"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -951,7 +1088,9 @@
|
||||
"loraFailedToSend": "Error al enviar LoRA al flujo de trabajo",
|
||||
"recipeAdded": "Receta añadida al flujo de trabajo",
|
||||
"recipeReplaced": "Receta reemplazada en el flujo de trabajo",
|
||||
"recipeFailedToSend": "Error al enviar receta al flujo de trabajo"
|
||||
"recipeFailedToSend": "Error al enviar receta al flujo de trabajo",
|
||||
"noMatchingNodes": "No hay nodos compatibles disponibles en el flujo de trabajo actual",
|
||||
"noTargetNodeSelected": "No se ha seleccionado ningún nodo de destino"
|
||||
},
|
||||
"nodeSelector": {
|
||||
"recipe": "Receta",
|
||||
@@ -996,6 +1135,11 @@
|
||||
},
|
||||
"update": {
|
||||
"title": "Comprobar actualizaciones",
|
||||
"notificationsTitle": "Centro de notificaciones",
|
||||
"tabs": {
|
||||
"updates": "Actualizaciones",
|
||||
"messages": "Mensajes"
|
||||
},
|
||||
"updateAvailable": "Actualización disponible",
|
||||
"noChangelogAvailable": "No hay registro de cambios detallado disponible. Revisa GitHub para más información.",
|
||||
"currentVersion": "Versión actual",
|
||||
@@ -1027,6 +1171,13 @@
|
||||
"nightly": {
|
||||
"warning": "Advertencia: Las compilaciones nocturnas pueden contener características experimentales y podrían ser inestables.",
|
||||
"enable": "Habilitar actualizaciones nocturnas"
|
||||
},
|
||||
"banners": {
|
||||
"recent": "Notificaciones recientes",
|
||||
"empty": "No hay banners recientes.",
|
||||
"shown": "Mostrado {time}",
|
||||
"dismissed": "Descartado {time}",
|
||||
"active": "Activo"
|
||||
}
|
||||
},
|
||||
"support": {
|
||||
@@ -1106,6 +1257,9 @@
|
||||
"cannotSend": "No se puede enviar receta: Falta ID de receta",
|
||||
"sendFailed": "Error al enviar receta al flujo de trabajo",
|
||||
"sendError": "Error enviando receta al flujo de trabajo",
|
||||
"missingCheckpointPath": "Ruta del checkpoint no disponible",
|
||||
"missingCheckpointInfo": "Falta información del checkpoint",
|
||||
"downloadCheckpointFailed": "Error al descargar el checkpoint: {message}",
|
||||
"cannotDelete": "No se puede eliminar receta: Falta ID de receta",
|
||||
"deleteConfirmationError": "Error mostrando confirmación de eliminación",
|
||||
"deletedSuccessfully": "Receta eliminada exitosamente",
|
||||
@@ -1146,6 +1300,12 @@
|
||||
"bulkContentRatingSet": "Clasificación de contenido establecida en {level} para {count} modelo(s)",
|
||||
"bulkContentRatingPartial": "Clasificación de contenido establecida en {level} para {success} modelo(s), {failed} fallaron",
|
||||
"bulkContentRatingFailed": "No se pudo actualizar la clasificación de contenido para los modelos seleccionados",
|
||||
"bulkUpdatesChecking": "Comprobando actualizaciones para {type} seleccionados...",
|
||||
"bulkUpdatesSuccess": "Actualizaciones disponibles para {count} {type} seleccionados",
|
||||
"bulkUpdatesNone": "No se encontraron actualizaciones para los {type} seleccionados",
|
||||
"bulkUpdatesMissing": "Los {type} seleccionados no están vinculados a actualizaciones de Civitai",
|
||||
"bulkUpdatesPartialMissing": "Se omitieron {missing} {type} seleccionados sin enlace de Civitai",
|
||||
"bulkUpdatesFailed": "Error al comprobar actualizaciones para los {type} seleccionados: {message}",
|
||||
"invalidCharactersRemoved": "Caracteres inválidos eliminados del nombre de archivo",
|
||||
"filenameCannotBeEmpty": "El nombre de archivo no puede estar vacío",
|
||||
"renameFailed": "Error al renombrar archivo: {message}",
|
||||
@@ -1206,7 +1366,7 @@
|
||||
},
|
||||
"triggerWords": {
|
||||
"loadFailed": "No se pudieron cargar palabras entrenadas",
|
||||
"tooLong": "La palabra clave no debe exceder 30 palabras",
|
||||
"tooLong": "La palabra clave no debe exceder 100 palabras",
|
||||
"tooMany": "Máximo 30 palabras clave permitidas",
|
||||
"alreadyExists": "Esta palabra clave ya existe",
|
||||
"updateSuccess": "Palabras clave actualizadas exitosamente",
|
||||
|
||||
240
locales/fr.json
240
locales/fr.json
@@ -101,7 +101,12 @@
|
||||
"checkpointNameCopied": "Nom du checkpoint copié",
|
||||
"toggleBlur": "Basculer le flou",
|
||||
"show": "Afficher",
|
||||
"openExampleImages": "Ouvrir le dossier d'images d'exemple"
|
||||
"openExampleImages": "Ouvrir le dossier d'images d'exemple",
|
||||
"replacePreview": "Remplacer l'aperçu",
|
||||
"copyCheckpointName": "Copier le nom du checkpoint",
|
||||
"copyEmbeddingName": "Copier le nom de l'embedding",
|
||||
"sendCheckpointToWorkflow": "Envoyer vers ComfyUI",
|
||||
"sendEmbeddingToWorkflow": "Envoyer vers ComfyUI"
|
||||
},
|
||||
"nsfw": {
|
||||
"matureContent": "Contenu pour adultes",
|
||||
@@ -115,12 +120,17 @@
|
||||
"updateFailed": "Échec de la mise à jour du statut des favoris"
|
||||
},
|
||||
"sendToWorkflow": {
|
||||
"checkpointNotImplemented": "Envoyer le checkpoint vers le workflow - fonctionnalité à implémenter"
|
||||
"checkpointNotImplemented": "Envoyer le checkpoint vers le workflow - fonctionnalité à implémenter",
|
||||
"missingPath": "Impossible de déterminer le chemin du modèle pour cette carte"
|
||||
},
|
||||
"exampleImages": {
|
||||
"checkError": "Erreur lors de la vérification des images d'exemple",
|
||||
"missingHash": "Informations de hachage du modèle manquantes.",
|
||||
"noRemoteImagesAvailable": "Aucune image d'exemple distante disponible pour ce modèle sur Civitai"
|
||||
},
|
||||
"badges": {
|
||||
"update": "Mise à jour",
|
||||
"updateAvailable": "Mise à jour disponible"
|
||||
}
|
||||
},
|
||||
"globalContextMenu": {
|
||||
@@ -129,12 +139,26 @@
|
||||
"missingPath": "Définissez un emplacement de téléchargement avant de télécharger les images d'exemple.",
|
||||
"unavailable": "Le téléchargement des images d'exemple n'est pas encore disponible. Réessayez après le chargement complet de la page."
|
||||
},
|
||||
"checkModelUpdates": {
|
||||
"label": "Vérifier les mises à jour",
|
||||
"loading": "Recherche de mises à jour pour {type}...",
|
||||
"success": "{count} mise(s) à jour trouvée(s) pour {type}",
|
||||
"none": "Tous les {type} sont à jour",
|
||||
"error": "Échec de la vérification des mises à jour pour {type} : {message}"
|
||||
},
|
||||
"cleanupExampleImages": {
|
||||
"label": "Nettoyer les dossiers d'images d'exemple",
|
||||
"label": "Supprimer les dossiers d'exemples orphelins",
|
||||
"success": "{count} dossier(s) déplacé(s) vers le dossier supprimé",
|
||||
"none": "Aucun dossier d'images d'exemple à nettoyer",
|
||||
"partial": "Nettoyage terminé avec {failures} dossier(s) ignoré(s)",
|
||||
"error": "Échec du nettoyage des dossiers d'images d'exemple : {message}"
|
||||
},
|
||||
"fetchMissingLicenses": {
|
||||
"label": "Refresh license metadata",
|
||||
"loading": "Refreshing license metadata for {typePlural}...",
|
||||
"success": "Updated license metadata for {count} {typePlural}",
|
||||
"none": "All {typePlural} already have license metadata",
|
||||
"error": "Failed to refresh license metadata for {typePlural}: {message}"
|
||||
}
|
||||
},
|
||||
"header": {
|
||||
@@ -171,6 +195,10 @@
|
||||
"title": "Filtrer les modèles",
|
||||
"baseModel": "Modèle de base",
|
||||
"modelTags": "Tags (Top 20)",
|
||||
"modelTypes": "Model Types",
|
||||
"license": "Licence",
|
||||
"noCreditRequired": "Crédit non requis",
|
||||
"allowSellingGeneratedContent": "Vente autorisée",
|
||||
"clearAll": "Effacer tous les filtres"
|
||||
},
|
||||
"theme": {
|
||||
@@ -181,6 +209,7 @@
|
||||
},
|
||||
"actions": {
|
||||
"checkUpdates": "Vérifier les mises à jour",
|
||||
"notifications": "Notifications",
|
||||
"support": "Support"
|
||||
}
|
||||
},
|
||||
@@ -199,12 +228,19 @@
|
||||
"videoSettings": "Paramètres vidéo",
|
||||
"layoutSettings": "Paramètres d'affichage",
|
||||
"folderSettings": "Paramètres des dossiers",
|
||||
"priorityTags": "Étiquettes prioritaires",
|
||||
"downloadPathTemplates": "Modèles de chemin de téléchargement",
|
||||
"exampleImages": "Images d'exemple",
|
||||
"updateFlags": "Indicateurs de mise à jour",
|
||||
"autoOrganize": "Auto-organize",
|
||||
"misc": "Divers",
|
||||
"metadataArchive": "Base de données d'archive des métadonnées",
|
||||
"proxySettings": "Paramètres du proxy",
|
||||
"priorityTags": "Étiquettes prioritaires"
|
||||
"storageLocation": "Emplacement des paramètres",
|
||||
"proxySettings": "Paramètres du proxy"
|
||||
},
|
||||
"storage": {
|
||||
"locationLabel": "Mode portable",
|
||||
"locationHelp": "Activez pour garder settings.json dans le dépôt ; désactivez pour le placer dans votre dossier de configuration utilisateur."
|
||||
},
|
||||
"contentFiltering": {
|
||||
"blurNsfwContent": "Flouter le contenu NSFW",
|
||||
@@ -216,6 +252,15 @@
|
||||
"autoplayOnHover": "Lecture automatique vidéo au survol",
|
||||
"autoplayOnHoverHelp": "Lire les aperçus vidéo uniquement lors du survol"
|
||||
},
|
||||
"autoOrganizeExclusions": {
|
||||
"label": "Exclusions de l'auto-organisation",
|
||||
"placeholder": "Exemple : curated/*, */backups/*; *_temp.safetensors",
|
||||
"help": "Ignorer les fichiers correspondant à ces motifs génériques. Séparez plusieurs motifs par des virgules ou des points-virgules.",
|
||||
"validation": {
|
||||
"noPatterns": "Entrez au moins un motif séparé par des virgules ou des points-virgules.",
|
||||
"saveFailed": "Impossible d'enregistrer les exclusions : {message}"
|
||||
}
|
||||
},
|
||||
"layoutSettings": {
|
||||
"displayDensity": "Densité d'affichage",
|
||||
"displayDensityOptions": {
|
||||
@@ -230,26 +275,26 @@
|
||||
"compact": "7 (1080p), 8 (2K), 10 (4K)"
|
||||
},
|
||||
"displayDensityWarning": "Attention : Des densités plus élevées peuvent causer des problèmes de performance sur les systèmes avec des ressources limitées.",
|
||||
"showFolderSidebar": "Afficher la barre latérale des dossiers",
|
||||
"showFolderSidebarHelp": "Activez ou désactivez la barre latérale de navigation des dossiers sur les pages de modèles. Lorsqu'elle est désactivée, la barre latérale et la zone de survol restent masquées.",
|
||||
"cardInfoDisplay": "Affichage des informations de carte",
|
||||
"cardInfoDisplayOptions": {
|
||||
"always": "Toujours visible",
|
||||
"hover": "Révéler au survol"
|
||||
},
|
||||
"cardInfoDisplayHelp": "Choisissez quand afficher les informations du modèle et les boutons d'action :",
|
||||
"cardInfoDisplayDetails": {
|
||||
"always": "Les en-têtes et pieds de page sont toujours visibles",
|
||||
"hover": "Les en-têtes et pieds de page n'apparaissent qu'au survol d'une carte"
|
||||
"cardInfoDisplayHelp": "Choisissez quand afficher les informations du modèle et les boutons d'action",
|
||||
"modelCardFooterAction": "Action du bouton de carte de modèle",
|
||||
"modelCardFooterActionOptions": {
|
||||
"exampleImages": "Ouvrir les images d'exemple",
|
||||
"replacePreview": "Remplacer l'aperçu"
|
||||
},
|
||||
"modelCardFooterActionHelp": "Choisissez ce que fait le bouton en bas à droite de la carte",
|
||||
"modelNameDisplay": "Affichage du nom du modèle",
|
||||
"modelNameDisplayOptions": {
|
||||
"modelName": "Nom du modèle",
|
||||
"fileName": "Nom du fichier"
|
||||
},
|
||||
"modelNameDisplayHelp": "Choisissez ce qui doit être affiché dans le pied de page de la carte du modèle :",
|
||||
"modelNameDisplayDetails": {
|
||||
"modelName": "Afficher le nom descriptif du modèle",
|
||||
"fileName": "Afficher le nom réel du fichier sur le disque"
|
||||
}
|
||||
"modelNameDisplayHelp": "Choisissez ce qui doit être affiché dans le pied de page de la carte du modèle"
|
||||
},
|
||||
"folderSettings": {
|
||||
"activeLibrary": "Bibliothèque active",
|
||||
@@ -264,6 +309,26 @@
|
||||
"defaultEmbeddingRootHelp": "Définir le répertoire racine embedding par défaut pour les téléchargements, imports et déplacements",
|
||||
"noDefault": "Aucun par défaut"
|
||||
},
|
||||
"priorityTags": {
|
||||
"title": "Étiquettes prioritaires",
|
||||
"description": "Personnalisez l'ordre de priorité des étiquettes pour chaque type de modèle (par ex. : character, concept, style(toon|toon_style))",
|
||||
"placeholder": "character, concept, style(toon|toon_style)",
|
||||
"helpLinkLabel": "Ouvrir l'aide sur les étiquettes prioritaires",
|
||||
"modelTypes": {
|
||||
"lora": "LoRA",
|
||||
"checkpoint": "Checkpoint",
|
||||
"embedding": "Embedding"
|
||||
},
|
||||
"saveSuccess": "Étiquettes prioritaires mises à jour.",
|
||||
"saveError": "Échec de la mise à jour des étiquettes prioritaires.",
|
||||
"loadingSuggestions": "Chargement des suggestions...",
|
||||
"validation": {
|
||||
"missingClosingParen": "L'entrée {index} n'a pas de parenthèse fermante.",
|
||||
"missingCanonical": "L'entrée {index} doit inclure un nom d'étiquette canonique.",
|
||||
"duplicateCanonical": "L'étiquette canonique \"{tag}\" apparaît plusieurs fois.",
|
||||
"unknown": "Configuration d'étiquettes prioritaires invalide."
|
||||
}
|
||||
},
|
||||
"downloadPathTemplates": {
|
||||
"title": "Modèles de chemin de téléchargement",
|
||||
"help": "Configurer les structures de dossiers pour différents types de modèles lors du téléchargement depuis Civitai.",
|
||||
@@ -311,6 +376,14 @@
|
||||
"download": "Télécharger",
|
||||
"restartRequired": "Redémarrage requis"
|
||||
},
|
||||
"updateFlagStrategy": {
|
||||
"label": "Stratégie des indicateurs de mise à jour",
|
||||
"help": "Choisissez si les badges de mise à jour doivent apparaître uniquement lorsqu’une nouvelle version partage le même modèle de base que vos fichiers locaux, ou dès qu’il existe une version plus récente pour ce modèle.",
|
||||
"options": {
|
||||
"sameBase": "Faire correspondre les mises à jour par modèle de base",
|
||||
"any": "Signaler n’importe quelle mise à jour disponible"
|
||||
}
|
||||
},
|
||||
"misc": {
|
||||
"includeTriggerWords": "Inclure les mots-clés dans la syntaxe LoRA",
|
||||
"includeTriggerWordsHelp": "Inclure les mots-clés d'entraînement lors de la copie de la syntaxe LoRA dans le presse-papiers"
|
||||
@@ -356,26 +429,6 @@
|
||||
"proxyPassword": "Mot de passe (optionnel)",
|
||||
"proxyPasswordPlaceholder": "mot_de_passe",
|
||||
"proxyPasswordHelp": "Mot de passe pour l'authentification proxy (si nécessaire)"
|
||||
},
|
||||
"priorityTags": {
|
||||
"title": "Étiquettes prioritaires",
|
||||
"description": "Personnalisez l'ordre de priorité des étiquettes pour chaque type de modèle (par ex. : character, concept, style(toon|toon_style))",
|
||||
"placeholder": "character, concept, style(toon|toon_style)",
|
||||
"helpLinkLabel": "Ouvrir l'aide sur les étiquettes prioritaires",
|
||||
"modelTypes": {
|
||||
"lora": "LoRA",
|
||||
"checkpoint": "Checkpoint",
|
||||
"embedding": "Embedding"
|
||||
},
|
||||
"saveSuccess": "Étiquettes prioritaires mises à jour.",
|
||||
"saveError": "Échec de la mise à jour des étiquettes prioritaires.",
|
||||
"loadingSuggestions": "Chargement des suggestions...",
|
||||
"validation": {
|
||||
"missingClosingParen": "L'entrée {index} n'a pas de parenthèse fermante.",
|
||||
"missingCanonical": "L'entrée {index} doit inclure un nom d'étiquette canonique.",
|
||||
"duplicateCanonical": "L'étiquette canonique \"{tag}\" apparaît plusieurs fois.",
|
||||
"unknown": "Configuration d'étiquettes prioritaires invalide."
|
||||
}
|
||||
}
|
||||
},
|
||||
"loras": {
|
||||
@@ -394,8 +447,10 @@
|
||||
},
|
||||
"refresh": {
|
||||
"title": "Actualiser la liste des modèles",
|
||||
"quick": "Actualisation rapide (incrémentale)",
|
||||
"full": "Reconstruction complète"
|
||||
"quick": "Synchroniser les changements",
|
||||
"quickTooltip": "Analyse les nouveaux fichiers de modèle ou les fichiers manquants pour garder la liste à jour.",
|
||||
"full": "Reconstruire le cache",
|
||||
"fullTooltip": "Recharge tous les détails des modèles depuis les fichiers metadata — à utiliser si la bibliothèque paraît obsolète ou après des modifications manuelles."
|
||||
},
|
||||
"fetch": {
|
||||
"title": "Récupérer les métadonnées depuis Civitai",
|
||||
@@ -416,6 +471,13 @@
|
||||
"favorites": {
|
||||
"title": "Afficher uniquement les favoris",
|
||||
"action": "Favoris"
|
||||
},
|
||||
"updates": {
|
||||
"title": "Afficher uniquement les modèles avec des mises à jour disponibles",
|
||||
"action": "Mises à jour",
|
||||
"menuLabel": "Afficher les options de mise à jour",
|
||||
"check": "Rechercher des mises à jour",
|
||||
"checkTooltip": "La vérification peut prendre du temps."
|
||||
}
|
||||
},
|
||||
"bulkOperations": {
|
||||
@@ -427,6 +489,7 @@
|
||||
"setContentRating": "Définir la classification du contenu pour tous",
|
||||
"copyAll": "Copier toute la syntaxe",
|
||||
"refreshAll": "Actualiser toutes les métadonnées",
|
||||
"checkUpdates": "Vérifier les mises à jour pour la sélection",
|
||||
"moveAll": "Déplacer tout vers un dossier",
|
||||
"autoOrganize": "Auto-organiser la sélection",
|
||||
"deleteAll": "Supprimer tous les modèles",
|
||||
@@ -443,6 +506,7 @@
|
||||
},
|
||||
"contextMenu": {
|
||||
"refreshMetadata": "Actualiser les données Civitai",
|
||||
"checkUpdates": "Vérifier les mises à jour",
|
||||
"relinkCivitai": "Relier à nouveau à Civitai",
|
||||
"copySyntax": "Copier la syntaxe LoRA",
|
||||
"copyFilename": "Copier le nom de fichier du modèle",
|
||||
@@ -464,6 +528,9 @@
|
||||
},
|
||||
"recipes": {
|
||||
"title": "LoRA Recipes",
|
||||
"actions": {
|
||||
"sendCheckpoint": "Envoyer vers ComfyUI"
|
||||
},
|
||||
"controls": {
|
||||
"import": {
|
||||
"action": "Importer",
|
||||
@@ -702,6 +769,12 @@
|
||||
"countMessage": "modèles seront définitivement supprimés.",
|
||||
"action": "Tout supprimer"
|
||||
},
|
||||
"checkUpdates": {
|
||||
"title": "Vérifier les mises à jour pour tous les {typePlural} ?",
|
||||
"message": "Cette action vérifie les mises à jour pour tous les {typePlural} de votre bibliothèque. Les grandes collections peuvent prendre un peu plus de temps.",
|
||||
"tip": "Besoin de procéder par étapes ? Passez en mode lot, sélectionnez les modèles souhaités puis utilisez \"Vérifier les mises à jour pour la sélection\".",
|
||||
"action": "Tout vérifier"
|
||||
},
|
||||
"bulkAddTags": {
|
||||
"title": "Ajouter des tags à plusieurs modèles",
|
||||
"description": "Ajouter des tags à",
|
||||
@@ -838,13 +911,77 @@
|
||||
"tabs": {
|
||||
"examples": "Exemples",
|
||||
"description": "Description du modèle",
|
||||
"recipes": "Recipes"
|
||||
"recipes": "Recipes",
|
||||
"versions": "Versions"
|
||||
},
|
||||
"license": {
|
||||
"noImageSell": "No selling generated content",
|
||||
"noRentCivit": "No Civitai generation",
|
||||
"noRent": "No generation services",
|
||||
"noSell": "No selling models",
|
||||
"creditRequired": "Crédit du créateur requis",
|
||||
"noDerivatives": "Pas de fusion de partage",
|
||||
"noReLicense": "Mêmes autorisations requises",
|
||||
"restrictionsLabel": "Restrictions de licence"
|
||||
},
|
||||
"loading": {
|
||||
"exampleImages": "Chargement des images d'exemple...",
|
||||
"description": "Chargement de la description du modèle...",
|
||||
"recipes": "Chargement des recipes...",
|
||||
"examples": "Chargement des exemples..."
|
||||
"examples": "Chargement des exemples...",
|
||||
"versions": "Chargement des versions..."
|
||||
},
|
||||
"versions": {
|
||||
"heading": "Versions du modèle",
|
||||
"copy": "Gérez toutes les versions de ce modèle en un seul endroit.",
|
||||
"media": {
|
||||
"placeholder": "Aucune prévisualisation"
|
||||
},
|
||||
"labels": {
|
||||
"unnamed": "Version sans nom",
|
||||
"noDetails": "Aucun détail supplémentaire"
|
||||
},
|
||||
"badges": {
|
||||
"current": "Version actuelle",
|
||||
"inLibrary": "Dans la bibliothèque",
|
||||
"newer": "Version plus récente",
|
||||
"ignored": "Ignorée"
|
||||
},
|
||||
"actions": {
|
||||
"download": "Télécharger",
|
||||
"delete": "Supprimer",
|
||||
"ignore": "Ignorer",
|
||||
"unignore": "Ne plus ignorer",
|
||||
"resumeModelUpdates": "Reprendre les mises à jour pour ce modèle",
|
||||
"ignoreModelUpdates": "Ignorer les mises à jour pour ce modèle",
|
||||
"viewLocalVersions": "Voir toutes les versions locales",
|
||||
"viewLocalTooltip": "Bientôt disponible"
|
||||
},
|
||||
"filters": {
|
||||
"label": "Filtre de base",
|
||||
"state": {
|
||||
"showAll": "Toutes les versions",
|
||||
"showSameBase": "Même modèle de base"
|
||||
},
|
||||
"tooltip": {
|
||||
"showAllVersions": "Passer à l'affichage de toutes les versions",
|
||||
"showSameBaseVersions": "Passer à l'affichage des versions du même modèle de base"
|
||||
},
|
||||
"empty": "Aucune version ne correspond au filtre du modèle de base actuel."
|
||||
},
|
||||
"empty": "Aucun historique de versions n'est disponible pour ce modèle pour le moment.",
|
||||
"error": "Échec du chargement des versions.",
|
||||
"missingModelId": "Ce modèle ne possède pas d'identifiant de modèle Civitai.",
|
||||
"confirm": {
|
||||
"delete": "Supprimer cette version de votre bibliothèque ?"
|
||||
},
|
||||
"toast": {
|
||||
"modelIgnored": "Les mises à jour de ce modèle sont ignorées",
|
||||
"modelResumed": "Suivi des mises à jour repris",
|
||||
"versionIgnored": "Les mises à jour de cette version sont ignorées",
|
||||
"versionUnignored": "Version réactivée",
|
||||
"versionDeleted": "Version supprimée"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -951,7 +1088,9 @@
|
||||
"loraFailedToSend": "Échec de l'envoi du LoRA au workflow",
|
||||
"recipeAdded": "Recipe ajoutée au workflow",
|
||||
"recipeReplaced": "Recipe remplacée dans le workflow",
|
||||
"recipeFailedToSend": "Échec de l'envoi de la recipe au workflow"
|
||||
"recipeFailedToSend": "Échec de l'envoi de la recipe au workflow",
|
||||
"noMatchingNodes": "Aucun nœud compatible disponible dans le workflow actuel",
|
||||
"noTargetNodeSelected": "Aucun nœud cible sélectionné"
|
||||
},
|
||||
"nodeSelector": {
|
||||
"recipe": "Recipe",
|
||||
@@ -996,6 +1135,11 @@
|
||||
},
|
||||
"update": {
|
||||
"title": "Vérifier les mises à jour",
|
||||
"notificationsTitle": "Notifications",
|
||||
"tabs": {
|
||||
"updates": "Mises à jour",
|
||||
"messages": "Messages"
|
||||
},
|
||||
"updateAvailable": "Mise à jour disponible",
|
||||
"noChangelogAvailable": "Aucun journal des modifications détaillé disponible. Consultez GitHub pour plus d'informations.",
|
||||
"currentVersion": "Version actuelle",
|
||||
@@ -1027,6 +1171,13 @@
|
||||
"nightly": {
|
||||
"warning": "Attention : Les versions nightly peuvent contenir des fonctionnalités expérimentales et être instables.",
|
||||
"enable": "Activer les mises à jour nightly"
|
||||
},
|
||||
"banners": {
|
||||
"recent": "Messages récents",
|
||||
"empty": "Aucune bannière récente.",
|
||||
"shown": "Affiché {time}",
|
||||
"dismissed": "Ignoré {time}",
|
||||
"active": "Actif"
|
||||
}
|
||||
},
|
||||
"support": {
|
||||
@@ -1106,6 +1257,9 @@
|
||||
"cannotSend": "Impossible d'envoyer la recipe : ID de recipe manquant",
|
||||
"sendFailed": "Échec de l'envoi de la recipe vers le workflow",
|
||||
"sendError": "Erreur lors de l'envoi de la recipe vers le workflow",
|
||||
"missingCheckpointPath": "Chemin du checkpoint indisponible",
|
||||
"missingCheckpointInfo": "Informations sur le checkpoint manquantes",
|
||||
"downloadCheckpointFailed": "Échec du téléchargement du checkpoint : {message}",
|
||||
"cannotDelete": "Impossible de supprimer la recipe : ID de recipe manquant",
|
||||
"deleteConfirmationError": "Erreur lors de l'affichage de la confirmation de suppression",
|
||||
"deletedSuccessfully": "Recipe supprimée avec succès",
|
||||
@@ -1146,6 +1300,12 @@
|
||||
"bulkContentRatingSet": "Classification du contenu définie sur {level} pour {count} modèle(s)",
|
||||
"bulkContentRatingPartial": "Classification du contenu définie sur {level} pour {success} modèle(s), {failed} échec(s)",
|
||||
"bulkContentRatingFailed": "Impossible de mettre à jour la classification du contenu pour les modèles sélectionnés",
|
||||
"bulkUpdatesChecking": "Vérification des mises à jour pour les {type} sélectionnés...",
|
||||
"bulkUpdatesSuccess": "Mises à jour disponibles pour {count} {type} sélectionnés",
|
||||
"bulkUpdatesNone": "Aucune mise à jour trouvée pour les {type} sélectionnés",
|
||||
"bulkUpdatesMissing": "Les {type} sélectionnés ne sont pas liés aux mises à jour Civitai",
|
||||
"bulkUpdatesPartialMissing": "{missing} {type} sélectionnés sans lien Civitai ignorés",
|
||||
"bulkUpdatesFailed": "Échec de la vérification des mises à jour pour les {type} sélectionnés : {message}",
|
||||
"invalidCharactersRemoved": "Caractères invalides supprimés du nom de fichier",
|
||||
"filenameCannotBeEmpty": "Le nom de fichier ne peut pas être vide",
|
||||
"renameFailed": "Échec du renommage du fichier : {message}",
|
||||
@@ -1206,7 +1366,7 @@
|
||||
},
|
||||
"triggerWords": {
|
||||
"loadFailed": "Impossible de charger les mots entraînés",
|
||||
"tooLong": "Le mot-clé ne doit pas dépasser 30 mots",
|
||||
"tooLong": "Le mot-clé ne doit pas dépasser 100 mots",
|
||||
"tooMany": "Maximum 30 mots-clés autorisés",
|
||||
"alreadyExists": "Ce mot-clé existe déjà",
|
||||
"updateSuccess": "Mots-clés mis à jour avec succès",
|
||||
|
||||
194
locales/he.json
194
locales/he.json
@@ -101,7 +101,12 @@
|
||||
"checkpointNameCopied": "שם Checkpoint הועתק",
|
||||
"toggleBlur": "הפעל/כבה טשטוש",
|
||||
"show": "הצג",
|
||||
"openExampleImages": "פתח תיקיית תמונות דוגמה"
|
||||
"openExampleImages": "פתח תיקיית תמונות דוגמה",
|
||||
"replacePreview": "החלף תצוגה מקדימה",
|
||||
"copyCheckpointName": "העתק שם Checkpoint",
|
||||
"copyEmbeddingName": "העתק שם Embedding",
|
||||
"sendCheckpointToWorkflow": "שלח ל-ComfyUI",
|
||||
"sendEmbeddingToWorkflow": "שלח ל-ComfyUI"
|
||||
},
|
||||
"nsfw": {
|
||||
"matureContent": "תוכן למבוגרים",
|
||||
@@ -115,12 +120,17 @@
|
||||
"updateFailed": "עדכון סטטוס מועדפים נכשל"
|
||||
},
|
||||
"sendToWorkflow": {
|
||||
"checkpointNotImplemented": "שליחת checkpoint ל-workflow - תכונה שתיושם בעתיד"
|
||||
"checkpointNotImplemented": "שליחת checkpoint ל-workflow - תכונה שתיושם בעתיד",
|
||||
"missingPath": "לא ניתן לקבוע את נתיב המודל לכרטיס זה"
|
||||
},
|
||||
"exampleImages": {
|
||||
"checkError": "שגיאה בבדיקת תמונות דוגמה",
|
||||
"missingHash": "חסר מידע hash של המודל.",
|
||||
"noRemoteImagesAvailable": "אין תמונות דוגמה מרוחקות זמינות למודל זה ב-Civitai"
|
||||
},
|
||||
"badges": {
|
||||
"update": "עדכון",
|
||||
"updateAvailable": "עדכון זמין"
|
||||
}
|
||||
},
|
||||
"globalContextMenu": {
|
||||
@@ -129,12 +139,26 @@
|
||||
"missingPath": "הגדר מיקום הורדה לפני הורדת תמונות דוגמה.",
|
||||
"unavailable": "הורדות תמונות דוגמה אינן זמינות עדיין. נסה שוב לאחר שהדף מסיים להיטען."
|
||||
},
|
||||
"checkModelUpdates": {
|
||||
"label": "בדוק עדכונים",
|
||||
"loading": "בודק עדכונים עבור {type}...",
|
||||
"success": "נמצאו {count} עדכונים עבור {type}",
|
||||
"none": "כל ה-{type} מעודכנים",
|
||||
"error": "נכשל בבדיקת העדכונים עבור {type}: {message}"
|
||||
},
|
||||
"cleanupExampleImages": {
|
||||
"label": "נקה תיקיות תמונות דוגמה",
|
||||
"success": "הועברו {count} תיקיות לתיקיית המחוקים",
|
||||
"none": "אין תיקיות תמונות דוגמה שזקוקות לניקוי",
|
||||
"partial": "הניקוי הושלם עם דילוג על {failures} תיקיות",
|
||||
"error": "ניקוי תיקיות תמונות הדוגמה נכשל: {message}"
|
||||
},
|
||||
"fetchMissingLicenses": {
|
||||
"label": "Refresh license metadata",
|
||||
"loading": "Refreshing license metadata for {typePlural}...",
|
||||
"success": "Updated license metadata for {count} {typePlural}",
|
||||
"none": "All {typePlural} already have license metadata",
|
||||
"error": "Failed to refresh license metadata for {typePlural}: {message}"
|
||||
}
|
||||
},
|
||||
"header": {
|
||||
@@ -171,6 +195,10 @@
|
||||
"title": "סנן מודלים",
|
||||
"baseModel": "מודל בסיס",
|
||||
"modelTags": "תגיות (20 המובילות)",
|
||||
"modelTypes": "Model Types",
|
||||
"license": "רישיון",
|
||||
"noCreditRequired": "ללא קרדיט נדרש",
|
||||
"allowSellingGeneratedContent": "אפשר מכירה",
|
||||
"clearAll": "נקה את כל המסננים"
|
||||
},
|
||||
"theme": {
|
||||
@@ -181,6 +209,7 @@
|
||||
},
|
||||
"actions": {
|
||||
"checkUpdates": "בדוק עדכונים",
|
||||
"notifications": "התראות",
|
||||
"support": "תמיכה"
|
||||
}
|
||||
},
|
||||
@@ -201,11 +230,18 @@
|
||||
"folderSettings": "הגדרות תיקייה",
|
||||
"downloadPathTemplates": "תבניות נתיב הורדה",
|
||||
"exampleImages": "תמונות דוגמה",
|
||||
"updateFlags": "תגי עדכון",
|
||||
"autoOrganize": "Auto-organize",
|
||||
"misc": "שונות",
|
||||
"metadataArchive": "מסד נתונים של ארכיון מטא-דאטה",
|
||||
"storageLocation": "מיקום ההגדרות",
|
||||
"proxySettings": "הגדרות פרוקסי",
|
||||
"priorityTags": "תגיות עדיפות"
|
||||
},
|
||||
"storage": {
|
||||
"locationLabel": "מצב נייד",
|
||||
"locationHelp": "הפעל כדי לשמור את settings.json בתוך המאגר; בטל כדי לשמור אותו בתיקיית ההגדרות של המשתמש."
|
||||
},
|
||||
"contentFiltering": {
|
||||
"blurNsfwContent": "טשטש תוכן NSFW",
|
||||
"blurNsfwContentHelp": "טשטש תמונות תצוגה מקדימה של תוכן למבוגרים (NSFW)",
|
||||
@@ -216,6 +252,15 @@
|
||||
"autoplayOnHover": "נגן וידאו אוטומטית בריחוף",
|
||||
"autoplayOnHoverHelp": "נגן תצוגות מקדימות של וידאו רק בעת ריחוף מעליהן"
|
||||
},
|
||||
"autoOrganizeExclusions": {
|
||||
"label": "יוצא דופן של ארגון אוטומטי",
|
||||
"placeholder": "דוגמה: curated/*, */backups/*; *_temp.safetensors",
|
||||
"help": "דלג על העברת קבצים התואמים לתבניות אלו. הפרד תבניות מרובות בפסיקים או בנקודותיים.",
|
||||
"validation": {
|
||||
"noPatterns": "הזן לפחות תבנית אחת מופרדת בפסיקים או בנקודותיים.",
|
||||
"saveFailed": "לא ניתן לשמור את ההוצאות: {message}"
|
||||
}
|
||||
},
|
||||
"layoutSettings": {
|
||||
"displayDensity": "צפיפות תצוגה",
|
||||
"displayDensityOptions": {
|
||||
@@ -230,26 +275,26 @@
|
||||
"compact": "7 (1080p), 8 (2K), 10 (4K)"
|
||||
},
|
||||
"displayDensityWarning": "אזהרה: צפיפויות גבוהות יותר עלולות לגרום לבעיות ביצועים במערכות עם משאבים מוגבלים.",
|
||||
"showFolderSidebar": "הצג סרגל צד תיקיות",
|
||||
"showFolderSidebarHelp": "הפעל או כבה את סרגל הצד לניווט תיקיות בדפי המודל. כאשר הוא כבוי, סרגל הצד ואזור הריחוף נשארים מוסתרים.",
|
||||
"cardInfoDisplay": "תצוגת מידע בכרטיס",
|
||||
"cardInfoDisplayOptions": {
|
||||
"always": "תמיד גלוי",
|
||||
"hover": "חשוף בריחוף"
|
||||
},
|
||||
"cardInfoDisplayHelp": "בחר מתי להציג מידע על המודל וכפתורי פעולה:",
|
||||
"cardInfoDisplayDetails": {
|
||||
"always": "כותרות עליונות ותחתונות תמיד גלויות",
|
||||
"hover": "כותרות עליונות ותחתונות מופיעות רק בעת ריחוף מעל כרטיס"
|
||||
"cardInfoDisplayHelp": "בחר מתי להציג מידע על המודל וכפתורי פעולה",
|
||||
"modelCardFooterAction": "פעולת כפתור כרטיס מודל",
|
||||
"modelCardFooterActionOptions": {
|
||||
"exampleImages": "פתח תמונות דוגמה",
|
||||
"replacePreview": "החלף תצוגה מקדימה"
|
||||
},
|
||||
"modelCardFooterActionHelp": "בחר מה עושה הכפתור בפינה הימנית התחתונה של הכרטיס",
|
||||
"modelNameDisplay": "תצוגת שם מודל",
|
||||
"modelNameDisplayOptions": {
|
||||
"modelName": "שם מודל",
|
||||
"fileName": "שם קובץ"
|
||||
},
|
||||
"modelNameDisplayHelp": "בחר מה להציג בכותרת התחתונה של כרטיס המודל:",
|
||||
"modelNameDisplayDetails": {
|
||||
"modelName": "הצג את השם התיאורי של המודל",
|
||||
"fileName": "הצג את שם הקובץ בפועל בדיסק"
|
||||
}
|
||||
"modelNameDisplayHelp": "בחר מה להציג בכותרת התחתונה של כרטיס המודל"
|
||||
},
|
||||
"folderSettings": {
|
||||
"activeLibrary": "ספרייה פעילה",
|
||||
@@ -311,6 +356,14 @@
|
||||
"download": "הורד",
|
||||
"restartRequired": "דורש הפעלה מחדש"
|
||||
},
|
||||
"updateFlagStrategy": {
|
||||
"label": "אסטרטגיית תגי עדכון",
|
||||
"help": "בחרו אם תוויות העדכון יוצגו רק כאשר גרסה חדשה חולקת את אותו דגם בסיס כמו הקבצים המקומיים שלכם או בכל מקרה שבו קיימת גרסה חדשה עבור אותו דגם.",
|
||||
"options": {
|
||||
"sameBase": "התאמת עדכונים לפי דגם בסיס",
|
||||
"any": "תוויות לכל עדכון זמין"
|
||||
}
|
||||
},
|
||||
"misc": {
|
||||
"includeTriggerWords": "כלול מילות טריגר בתחביר LoRA",
|
||||
"includeTriggerWordsHelp": "כלול מילות טריגר מאומנות בעת העתקת תחביר LoRA ללוח"
|
||||
@@ -394,8 +447,10 @@
|
||||
},
|
||||
"refresh": {
|
||||
"title": "רענן רשימת מודלים",
|
||||
"quick": "רענון מהיר (מצטבר)",
|
||||
"full": "בנייה מחדש מלאה (שלם)"
|
||||
"quick": "סנכרון שינויים",
|
||||
"quickTooltip": "סריקה לאיתור קבצי מודל חדשים או חסרים כדי לשמור את הרשימה מעודכנת.",
|
||||
"full": "בניית מטמון מחדש",
|
||||
"fullTooltip": "טוען מחדש את כל פרטי המודלים מקבצי המטא-דאטה – לשימוש אם הספרייה נראית לא מעודכנת או לאחר עריכות ידניות."
|
||||
},
|
||||
"fetch": {
|
||||
"title": "אחזר מטא-דאטה מ-Civitai",
|
||||
@@ -416,6 +471,13 @@
|
||||
"favorites": {
|
||||
"title": "הצג מועדפים בלבד",
|
||||
"action": "מועדפים"
|
||||
},
|
||||
"updates": {
|
||||
"title": "הצג רק דגמים עם עדכונים זמינים",
|
||||
"action": "עדכונים",
|
||||
"menuLabel": "הצגת אפשרויות עדכון",
|
||||
"check": "בדוק עדכונים",
|
||||
"checkTooltip": "בדיקת עדכונים עלולה לקחת זמן."
|
||||
}
|
||||
},
|
||||
"bulkOperations": {
|
||||
@@ -427,6 +489,7 @@
|
||||
"setContentRating": "הגדר דירוג תוכן לכל המודלים",
|
||||
"copyAll": "העתק את כל התחבירים",
|
||||
"refreshAll": "רענן את כל המטא-דאטה",
|
||||
"checkUpdates": "בדוק עדכונים לבחירה",
|
||||
"moveAll": "העבר הכל לתיקייה",
|
||||
"autoOrganize": "ארגן אוטומטית נבחרים",
|
||||
"deleteAll": "מחק את כל המודלים",
|
||||
@@ -443,6 +506,7 @@
|
||||
},
|
||||
"contextMenu": {
|
||||
"refreshMetadata": "רענן נתוני Civitai",
|
||||
"checkUpdates": "בדוק עדכונים",
|
||||
"relinkCivitai": "קשר מחדש ל-Civitai",
|
||||
"copySyntax": "העתק תחביר LoRA",
|
||||
"copyFilename": "העתק שם קובץ מודל",
|
||||
@@ -464,6 +528,9 @@
|
||||
},
|
||||
"recipes": {
|
||||
"title": "מתכוני LoRA",
|
||||
"actions": {
|
||||
"sendCheckpoint": "שלח ל-ComfyUI"
|
||||
},
|
||||
"controls": {
|
||||
"import": {
|
||||
"action": "ייבא",
|
||||
@@ -702,6 +769,12 @@
|
||||
"countMessage": "מודלים יימחקו לצמיתות.",
|
||||
"action": "מחק הכל"
|
||||
},
|
||||
"checkUpdates": {
|
||||
"title": "לבדוק עדכונים לכל ה-{typePlural}?",
|
||||
"message": "הפעולה תבדוק עדכונים עבור כל ה-{typePlural} בספרייה שלך. באוספים גדולים זה עלול לקחת מעט יותר זמן.",
|
||||
"tip": "רוצים לחלק למנות קטנות? עברו למצב קבוצתי, בחרו את המודלים הדרושים ואז השתמשו ב\"בדוק עדכונים לנבחרים\".",
|
||||
"action": "בדוק הכל"
|
||||
},
|
||||
"bulkAddTags": {
|
||||
"title": "הוסף תגיות למספר מודלים",
|
||||
"description": "הוסף תגיות ל-",
|
||||
@@ -838,13 +911,77 @@
|
||||
"tabs": {
|
||||
"examples": "דוגמאות",
|
||||
"description": "תיאור המודל",
|
||||
"recipes": "מתכונים"
|
||||
"recipes": "מתכונים",
|
||||
"versions": "גרסאות"
|
||||
},
|
||||
"license": {
|
||||
"noImageSell": "No selling generated content",
|
||||
"noRentCivit": "No Civitai generation",
|
||||
"noRent": "No generation services",
|
||||
"noSell": "No selling models",
|
||||
"creditRequired": "נדרש ייחוס ליוצר",
|
||||
"noDerivatives": "אין שיתוף מיזוגים",
|
||||
"noReLicense": "נדרשות אותן הרשאות",
|
||||
"restrictionsLabel": "הגבלות רישיון"
|
||||
},
|
||||
"loading": {
|
||||
"exampleImages": "טוען תמונות דוגמה...",
|
||||
"description": "טוען תיאור מודל...",
|
||||
"recipes": "טוען מתכונים...",
|
||||
"examples": "טוען דוגמאות..."
|
||||
"examples": "טוען דוגמאות...",
|
||||
"versions": "טוען גרסאות..."
|
||||
},
|
||||
"versions": {
|
||||
"heading": "גרסאות המודל",
|
||||
"copy": "נהל את כל הגרסאות של המודל הזה במקום אחד.",
|
||||
"media": {
|
||||
"placeholder": "אין תצוגה מקדימה"
|
||||
},
|
||||
"labels": {
|
||||
"unnamed": "גרסה ללא שם",
|
||||
"noDetails": "אין פרטים נוספים"
|
||||
},
|
||||
"badges": {
|
||||
"current": "גרסה נוכחית",
|
||||
"inLibrary": "בספרייה",
|
||||
"newer": "גרסה חדשה יותר",
|
||||
"ignored": "התעלם"
|
||||
},
|
||||
"actions": {
|
||||
"download": "הורדה",
|
||||
"delete": "מחיקה",
|
||||
"ignore": "התעלם",
|
||||
"unignore": "בטל התעלמות",
|
||||
"resumeModelUpdates": "המשך עדכונים עבור מודל זה",
|
||||
"ignoreModelUpdates": "התעלם מעדכונים עבור מודל זה",
|
||||
"viewLocalVersions": "הצג את כל הגרסאות המקומיות",
|
||||
"viewLocalTooltip": "יגיע בקרוב"
|
||||
},
|
||||
"filters": {
|
||||
"label": "מסנן בסיס",
|
||||
"state": {
|
||||
"showAll": "כל הגרסאות",
|
||||
"showSameBase": "אותו מודל בסיס"
|
||||
},
|
||||
"tooltip": {
|
||||
"showAllVersions": "החלף להצגת כל הגרסאות",
|
||||
"showSameBaseVersions": "החלף להצגת גרסאות עם אותו מודל בסיס"
|
||||
},
|
||||
"empty": "אין גרסאות התואמות את המסנן של מודל הבסיס הנוכחי."
|
||||
},
|
||||
"empty": "אין עדיין היסטוריית גרסאות למודל זה.",
|
||||
"error": "טעינת הגרסאות נכשלה.",
|
||||
"missingModelId": "למודל זה אין מזהה מודל של Civitai.",
|
||||
"confirm": {
|
||||
"delete": "למחוק גרסה זו מהספרייה שלך?"
|
||||
},
|
||||
"toast": {
|
||||
"modelIgnored": "העדכונים עבור מודל זה נוגבו",
|
||||
"modelResumed": "מעקב העדכונים חודש",
|
||||
"versionIgnored": "העדכונים עבור גרסה זו נוגבו",
|
||||
"versionUnignored": "הגרסה הופעלה מחדש",
|
||||
"versionDeleted": "הגרסה נמחקה"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -951,7 +1088,9 @@
|
||||
"loraFailedToSend": "שליחת LoRA ל-workflow נכשלה",
|
||||
"recipeAdded": "מתכון נוסף ל-workflow",
|
||||
"recipeReplaced": "מתכון הוחלף ב-workflow",
|
||||
"recipeFailedToSend": "שליחת מתכון ל-workflow נכשלה"
|
||||
"recipeFailedToSend": "שליחת מתכון ל-workflow נכשלה",
|
||||
"noMatchingNodes": "אין צמתים תואמים זמינים ב-workflow הנוכחי",
|
||||
"noTargetNodeSelected": "לא נבחר צומת יעד"
|
||||
},
|
||||
"nodeSelector": {
|
||||
"recipe": "מתכון",
|
||||
@@ -996,6 +1135,11 @@
|
||||
},
|
||||
"update": {
|
||||
"title": "בדוק עדכונים",
|
||||
"notificationsTitle": "מרכז התראות",
|
||||
"tabs": {
|
||||
"updates": "עדכונים",
|
||||
"messages": "הודעות"
|
||||
},
|
||||
"updateAvailable": "עדכון זמין",
|
||||
"noChangelogAvailable": "אין יומן שינויים מפורט זמין. בדוק ב-GitHub למידע נוסף.",
|
||||
"currentVersion": "גרסה נוכחית",
|
||||
@@ -1027,6 +1171,13 @@
|
||||
"nightly": {
|
||||
"warning": "אזהרה: גרסאות ליליות עשויות להכיל תכונות ניסיוניות ועלולות להיות לא יציבות.",
|
||||
"enable": "הפעל עדכונים ליליים"
|
||||
},
|
||||
"banners": {
|
||||
"recent": "הודעות אחרונות",
|
||||
"empty": "אין כרגע באנרים אחרונים.",
|
||||
"shown": "הוצג {time}",
|
||||
"dismissed": "הוסר {time}",
|
||||
"active": "פעיל"
|
||||
}
|
||||
},
|
||||
"support": {
|
||||
@@ -1106,6 +1257,9 @@
|
||||
"cannotSend": "לא ניתן לשלוח מתכון: חסר מזהה מתכון",
|
||||
"sendFailed": "שליחת המתכון ל-workflow נכשלה",
|
||||
"sendError": "שגיאה בשליחת המתכון ל-workflow",
|
||||
"missingCheckpointPath": "נתיב ה-checkpoint אינו זמין",
|
||||
"missingCheckpointInfo": "חסרים פרטי checkpoint",
|
||||
"downloadCheckpointFailed": "הורדת checkpoint נכשלה: {message}",
|
||||
"cannotDelete": "לא ניתן למחוק מתכון: חסר מזהה מתכון",
|
||||
"deleteConfirmationError": "שגיאה בהצגת אישור המחיקה",
|
||||
"deletedSuccessfully": "המתכון נמחק בהצלחה",
|
||||
@@ -1146,6 +1300,12 @@
|
||||
"bulkContentRatingSet": "דירוג התוכן הוגדר ל-{level} עבור {count} מודלים",
|
||||
"bulkContentRatingPartial": "דירוג התוכן הוגדר ל-{level} עבור {success} מודלים, {failed} נכשלו",
|
||||
"bulkContentRatingFailed": "עדכון דירוג התוכן עבור המודלים שנבחרו נכשל",
|
||||
"bulkUpdatesChecking": "בודק עדכונים עבור {type} שנבחרו...",
|
||||
"bulkUpdatesSuccess": "יש עדכונים עבור {count} {type} שנבחרו",
|
||||
"bulkUpdatesNone": "לא נמצאו עדכונים עבור {type} שנבחרו",
|
||||
"bulkUpdatesMissing": "ה-{type} שנבחרו אינם מקושרים לעדכוני Civitai",
|
||||
"bulkUpdatesPartialMissing": "דילג על {missing} {type} שנבחרו ללא קישור Civitai",
|
||||
"bulkUpdatesFailed": "בדיקת העדכונים עבור {type} שנבחרו נכשלה: {message}",
|
||||
"invalidCharactersRemoved": "תווים לא חוקיים הוסרו משם הקובץ",
|
||||
"filenameCannotBeEmpty": "שם הקובץ אינו יכול להיות ריק",
|
||||
"renameFailed": "שינוי שם הקובץ נכשל: {message}",
|
||||
@@ -1206,7 +1366,7 @@
|
||||
},
|
||||
"triggerWords": {
|
||||
"loadFailed": "לא ניתן היה לטעון מילים מאומנות",
|
||||
"tooLong": "מילת טריגר לא תעלה על 30 מילים",
|
||||
"tooLong": "מילת טריגר לא תעלה על 100 מילים",
|
||||
"tooMany": "מותרות עד 30 מילות טריגר",
|
||||
"alreadyExists": "מילת טריגר זו כבר קיימת",
|
||||
"updateSuccess": "מילות הטריגר עודכנו בהצלחה",
|
||||
|
||||
238
locales/ja.json
238
locales/ja.json
@@ -101,7 +101,12 @@
|
||||
"checkpointNameCopied": "checkpointの名前をコピーしました",
|
||||
"toggleBlur": "ぼかしの切り替え",
|
||||
"show": "表示",
|
||||
"openExampleImages": "例画像フォルダを開く"
|
||||
"openExampleImages": "例画像フォルダを開く",
|
||||
"replacePreview": "プレビューを置換",
|
||||
"copyCheckpointName": "checkpoint名をコピー",
|
||||
"copyEmbeddingName": "embedding名をコピー",
|
||||
"sendCheckpointToWorkflow": "ComfyUIに送信",
|
||||
"sendEmbeddingToWorkflow": "ComfyUIに送信"
|
||||
},
|
||||
"nsfw": {
|
||||
"matureContent": "成人向けコンテンツ",
|
||||
@@ -115,12 +120,17 @@
|
||||
"updateFailed": "お気に入り状態の更新に失敗しました"
|
||||
},
|
||||
"sendToWorkflow": {
|
||||
"checkpointNotImplemented": "checkpointをワークフローに送信 - 実装予定の機能"
|
||||
"checkpointNotImplemented": "checkpointをワークフローに送信 - 実装予定の機能",
|
||||
"missingPath": "このカードのモデルパスを特定できません"
|
||||
},
|
||||
"exampleImages": {
|
||||
"checkError": "例画像の確認中にエラーが発生しました",
|
||||
"missingHash": "モデルハッシュ情報がありません。",
|
||||
"noRemoteImagesAvailable": "このモデルのCivitaiでのリモート例画像は利用できません"
|
||||
},
|
||||
"badges": {
|
||||
"update": "アップデート",
|
||||
"updateAvailable": "アップデートがあります"
|
||||
}
|
||||
},
|
||||
"globalContextMenu": {
|
||||
@@ -129,12 +139,26 @@
|
||||
"missingPath": "例画像をダウンロードする前にダウンロード場所を設定してください。",
|
||||
"unavailable": "例画像のダウンロードはまだ利用できません。ページの読み込みが完了してから再度お試しください。"
|
||||
},
|
||||
"checkModelUpdates": {
|
||||
"label": "アップデートを確認",
|
||||
"loading": "{type} のアップデートを確認中…",
|
||||
"success": "{type} のアップデートが {count} 件見つかりました",
|
||||
"none": "すべての {type} は最新です",
|
||||
"error": "{type} のアップデート確認に失敗しました: {message}"
|
||||
},
|
||||
"cleanupExampleImages": {
|
||||
"label": "例画像フォルダをクリーンアップ",
|
||||
"success": "{count} 個のフォルダを削除フォルダに移動しました",
|
||||
"none": "クリーンアップが必要な例画像フォルダはありません",
|
||||
"partial": "クリーンアップが完了しましたが、{failures} 個のフォルダはスキップされました",
|
||||
"error": "例画像フォルダのクリーンアップに失敗しました:{message}"
|
||||
},
|
||||
"fetchMissingLicenses": {
|
||||
"label": "Refresh license metadata",
|
||||
"loading": "Refreshing license metadata for {typePlural}...",
|
||||
"success": "Updated license metadata for {count} {typePlural}",
|
||||
"none": "All {typePlural} already have license metadata",
|
||||
"error": "Failed to refresh license metadata for {typePlural}: {message}"
|
||||
}
|
||||
},
|
||||
"header": {
|
||||
@@ -171,6 +195,10 @@
|
||||
"title": "モデルをフィルタ",
|
||||
"baseModel": "ベースモデル",
|
||||
"modelTags": "タグ(上位20)",
|
||||
"modelTypes": "Model Types",
|
||||
"license": "ライセンス",
|
||||
"noCreditRequired": "クレジット不要",
|
||||
"allowSellingGeneratedContent": "販売許可",
|
||||
"clearAll": "すべてのフィルタをクリア"
|
||||
},
|
||||
"theme": {
|
||||
@@ -181,6 +209,7 @@
|
||||
},
|
||||
"actions": {
|
||||
"checkUpdates": "更新確認",
|
||||
"notifications": "通知",
|
||||
"support": "サポート"
|
||||
}
|
||||
},
|
||||
@@ -199,12 +228,19 @@
|
||||
"videoSettings": "動画設定",
|
||||
"layoutSettings": "レイアウト設定",
|
||||
"folderSettings": "フォルダ設定",
|
||||
"priorityTags": "優先タグ",
|
||||
"downloadPathTemplates": "ダウンロードパステンプレート",
|
||||
"exampleImages": "例画像",
|
||||
"updateFlags": "アップデートフラグ",
|
||||
"autoOrganize": "Auto-organize",
|
||||
"misc": "その他",
|
||||
"metadataArchive": "メタデータアーカイブデータベース",
|
||||
"proxySettings": "プロキシ設定",
|
||||
"priorityTags": "優先タグ"
|
||||
"storageLocation": "設定の場所",
|
||||
"proxySettings": "プロキシ設定"
|
||||
},
|
||||
"storage": {
|
||||
"locationLabel": "ポータブルモード",
|
||||
"locationHelp": "有効にすると settings.json をリポジトリ内に保持し、無効にするとユーザー設定ディレクトリに格納します。"
|
||||
},
|
||||
"contentFiltering": {
|
||||
"blurNsfwContent": "NSFWコンテンツをぼかす",
|
||||
@@ -216,6 +252,15 @@
|
||||
"autoplayOnHover": "ホバー時に動画を自動再生",
|
||||
"autoplayOnHoverHelp": "動画プレビューはホバー時にのみ再生されます"
|
||||
},
|
||||
"autoOrganizeExclusions": {
|
||||
"label": "自動整理除外設定",
|
||||
"placeholder": "例: curated/*, */backups/*; *_temp.safetensors",
|
||||
"help": "これらのワイルドカードパターンに一致するファイルの移動をスキップします。複数のパターンはカンマまたはセミコロンで区切ってください。",
|
||||
"validation": {
|
||||
"noPatterns": "カンマまたはセミコロンで区切られた少なくとも1つのパターンを入力してください。",
|
||||
"saveFailed": "除外設定を保存できませんでした: {message}"
|
||||
}
|
||||
},
|
||||
"layoutSettings": {
|
||||
"displayDensity": "表示密度",
|
||||
"displayDensityOptions": {
|
||||
@@ -230,26 +275,26 @@
|
||||
"compact": "7(1080p)、8(2K)、10(4K)"
|
||||
},
|
||||
"displayDensityWarning": "警告:高密度設定は、リソースが限られたシステムでパフォーマンスの問題を引き起こす可能性があります。",
|
||||
"showFolderSidebar": "フォルダサイドバーを表示",
|
||||
"showFolderSidebarHelp": "モデルページのフォルダナビゲーションサイドバーを表示/非表示にします。無効にするとサイドバーとホバーエリアは表示されません。",
|
||||
"cardInfoDisplay": "カード情報表示",
|
||||
"cardInfoDisplayOptions": {
|
||||
"always": "常に表示",
|
||||
"hover": "ホバー時に表示"
|
||||
},
|
||||
"cardInfoDisplayHelp": "モデル情報とアクションボタンの表示タイミングを選択:",
|
||||
"cardInfoDisplayDetails": {
|
||||
"always": "ヘッダーとフッターが常に表示されます",
|
||||
"hover": "カードにホバーしたときのみヘッダーとフッターが表示されます"
|
||||
"cardInfoDisplayHelp": "モデル情報とアクションボタンの表示タイミングを選択",
|
||||
"modelCardFooterAction": "モデルカードボタンのアクション",
|
||||
"modelCardFooterActionOptions": {
|
||||
"exampleImages": "例画像を開く",
|
||||
"replacePreview": "プレビューを置換"
|
||||
},
|
||||
"modelCardFooterActionHelp": "カード右下のボタンが何をするかを選択します",
|
||||
"modelNameDisplay": "モデル名表示",
|
||||
"modelNameDisplayOptions": {
|
||||
"modelName": "モデル名",
|
||||
"fileName": "ファイル名"
|
||||
},
|
||||
"modelNameDisplayHelp": "モデルカードのフッターに表示する内容を選択:",
|
||||
"modelNameDisplayDetails": {
|
||||
"modelName": "モデルの説明的な名前を表示",
|
||||
"fileName": "ディスク上の実際のファイル名を表示"
|
||||
}
|
||||
"modelNameDisplayHelp": "モデルカードのフッターに表示する内容を選択"
|
||||
},
|
||||
"folderSettings": {
|
||||
"activeLibrary": "アクティブライブラリ",
|
||||
@@ -264,6 +309,26 @@
|
||||
"defaultEmbeddingRootHelp": "ダウンロード、インポート、移動用のデフォルトembeddingルートディレクトリを設定",
|
||||
"noDefault": "デフォルトなし"
|
||||
},
|
||||
"priorityTags": {
|
||||
"title": "優先タグ",
|
||||
"description": "各モデルタイプのタグ優先順位をカスタマイズします (例: character, concept, style(toon|toon_style))",
|
||||
"placeholder": "character, concept, style(toon|toon_style)",
|
||||
"helpLinkLabel": "優先タグのヘルプを開く",
|
||||
"modelTypes": {
|
||||
"lora": "LoRA",
|
||||
"checkpoint": "チェックポイント",
|
||||
"embedding": "埋め込み"
|
||||
},
|
||||
"saveSuccess": "優先タグを更新しました。",
|
||||
"saveError": "優先タグの更新に失敗しました。",
|
||||
"loadingSuggestions": "候補を読み込み中...",
|
||||
"validation": {
|
||||
"missingClosingParen": "エントリ {index} に閉じ括弧がありません。",
|
||||
"missingCanonical": "エントリ {index} には正規タグ名を含める必要があります。",
|
||||
"duplicateCanonical": "正規タグ \"{tag}\" が複数回登場しています。",
|
||||
"unknown": "無効な優先タグ設定です。"
|
||||
}
|
||||
},
|
||||
"downloadPathTemplates": {
|
||||
"title": "ダウンロードパステンプレート",
|
||||
"help": "Civitaiからダウンロードする際の異なるモデルタイプのフォルダ構造を設定します。",
|
||||
@@ -311,6 +376,14 @@
|
||||
"download": "ダウンロード",
|
||||
"restartRequired": "再起動が必要"
|
||||
},
|
||||
"updateFlagStrategy": {
|
||||
"label": "アップデートフラグの表示戦略",
|
||||
"help": "新リリースがローカルファイルと同じベースモデルを共有する場合にのみ更新バッジを表示するか、そのモデルに新しいバージョンがあれば常に表示するかを決めます。",
|
||||
"options": {
|
||||
"sameBase": "ベースモデルで更新をマッチ",
|
||||
"any": "利用可能な更新すべてを表示"
|
||||
}
|
||||
},
|
||||
"misc": {
|
||||
"includeTriggerWords": "LoRA構文にトリガーワードを含める",
|
||||
"includeTriggerWordsHelp": "LoRA構文をクリップボードにコピーする際、学習済みトリガーワードを含めます"
|
||||
@@ -356,26 +429,6 @@
|
||||
"proxyPassword": "パスワード(任意)",
|
||||
"proxyPasswordPlaceholder": "パスワード",
|
||||
"proxyPasswordHelp": "プロキシ認証用のパスワード(必要な場合)"
|
||||
},
|
||||
"priorityTags": {
|
||||
"title": "優先タグ",
|
||||
"description": "各モデルタイプのタグ優先順位をカスタマイズします (例: character, concept, style(toon|toon_style))",
|
||||
"placeholder": "character, concept, style(toon|toon_style)",
|
||||
"helpLinkLabel": "優先タグのヘルプを開く",
|
||||
"modelTypes": {
|
||||
"lora": "LoRA",
|
||||
"checkpoint": "チェックポイント",
|
||||
"embedding": "埋め込み"
|
||||
},
|
||||
"saveSuccess": "優先タグを更新しました。",
|
||||
"saveError": "優先タグの更新に失敗しました。",
|
||||
"loadingSuggestions": "候補を読み込み中...",
|
||||
"validation": {
|
||||
"missingClosingParen": "エントリ {index} に閉じ括弧がありません。",
|
||||
"missingCanonical": "エントリ {index} には正規タグ名を含める必要があります。",
|
||||
"duplicateCanonical": "正規タグ \"{tag}\" が複数回登場しています。",
|
||||
"unknown": "無効な優先タグ設定です。"
|
||||
}
|
||||
}
|
||||
},
|
||||
"loras": {
|
||||
@@ -394,8 +447,10 @@
|
||||
},
|
||||
"refresh": {
|
||||
"title": "モデルリストを更新",
|
||||
"quick": "クイック更新(増分)",
|
||||
"full": "完全再構築(完全)"
|
||||
"quick": "変更を同期",
|
||||
"quickTooltip": "新しいモデルファイルや欠けているファイルをスキャンして一覧を最新に保ちます。",
|
||||
"full": "キャッシュを再構築",
|
||||
"fullTooltip": "メタデータファイルから全モデル情報を再読み込みします。リストが古いと感じるときや手動編集後に使用してください。"
|
||||
},
|
||||
"fetch": {
|
||||
"title": "Civitaiからメタデータを取得",
|
||||
@@ -416,6 +471,13 @@
|
||||
"favorites": {
|
||||
"title": "お気に入りのみ表示",
|
||||
"action": "お気に入り"
|
||||
},
|
||||
"updates": {
|
||||
"title": "アップデート可能なモデルのみ表示",
|
||||
"action": "アップデート",
|
||||
"menuLabel": "更新オプションを表示",
|
||||
"check": "アップデートを確認",
|
||||
"checkTooltip": "確認には時間がかかる場合があります。"
|
||||
}
|
||||
},
|
||||
"bulkOperations": {
|
||||
@@ -427,6 +489,7 @@
|
||||
"setContentRating": "すべてのモデルのコンテンツレーティングを設定",
|
||||
"copyAll": "すべての構文をコピー",
|
||||
"refreshAll": "すべてのメタデータを更新",
|
||||
"checkUpdates": "選択項目の更新を確認",
|
||||
"moveAll": "すべてをフォルダに移動",
|
||||
"autoOrganize": "自動整理を実行",
|
||||
"deleteAll": "すべてのモデルを削除",
|
||||
@@ -443,6 +506,7 @@
|
||||
},
|
||||
"contextMenu": {
|
||||
"refreshMetadata": "Civitaiデータを更新",
|
||||
"checkUpdates": "更新確認",
|
||||
"relinkCivitai": "Civitaiに再リンク",
|
||||
"copySyntax": "LoRA構文をコピー",
|
||||
"copyFilename": "モデルファイル名をコピー",
|
||||
@@ -464,6 +528,9 @@
|
||||
},
|
||||
"recipes": {
|
||||
"title": "LoRAレシピ",
|
||||
"actions": {
|
||||
"sendCheckpoint": "ComfyUIへ送信"
|
||||
},
|
||||
"controls": {
|
||||
"import": {
|
||||
"action": "インポート",
|
||||
@@ -702,6 +769,12 @@
|
||||
"countMessage": "モデルが完全に削除されます。",
|
||||
"action": "すべて削除"
|
||||
},
|
||||
"checkUpdates": {
|
||||
"title": "すべての{type}の更新を確認しますか?",
|
||||
"message": "ライブラリ内のすべての{type}で更新を確認します。コレクションが大きい場合は時間がかかることがあります。",
|
||||
"tip": "少しずつ確認したい場合はバルクモードに切り替え、必要なモデルを選んで「選択項目の更新を確認」を使ってください。",
|
||||
"action": "すべて確認"
|
||||
},
|
||||
"bulkAddTags": {
|
||||
"title": "複数モデルにタグを追加",
|
||||
"description": "タグを追加するモデル:",
|
||||
@@ -838,13 +911,77 @@
|
||||
"tabs": {
|
||||
"examples": "例",
|
||||
"description": "モデル説明",
|
||||
"recipes": "レシピ"
|
||||
"recipes": "レシピ",
|
||||
"versions": "バージョン"
|
||||
},
|
||||
"license": {
|
||||
"noImageSell": "No selling generated content",
|
||||
"noRentCivit": "No Civitai generation",
|
||||
"noRent": "No generation services",
|
||||
"noSell": "No selling models",
|
||||
"creditRequired": "作成者のクレジットが必要",
|
||||
"noDerivatives": "共有マージ不可",
|
||||
"noReLicense": "同じ権限が必要",
|
||||
"restrictionsLabel": "ライセンス制限"
|
||||
},
|
||||
"loading": {
|
||||
"exampleImages": "例画像を読み込み中...",
|
||||
"description": "モデル説明を読み込み中...",
|
||||
"recipes": "レシピを読み込み中...",
|
||||
"examples": "例を読み込み中..."
|
||||
"examples": "例を読み込み中...",
|
||||
"versions": "バージョンを読み込み中..."
|
||||
},
|
||||
"versions": {
|
||||
"heading": "モデルバージョン",
|
||||
"copy": "このモデルのすべてのバージョンを一か所で管理します。",
|
||||
"media": {
|
||||
"placeholder": "プレビューなし"
|
||||
},
|
||||
"labels": {
|
||||
"unnamed": "名前のないバージョン",
|
||||
"noDetails": "追加情報なし"
|
||||
},
|
||||
"badges": {
|
||||
"current": "現在のバージョン",
|
||||
"inLibrary": "ライブラリにあります",
|
||||
"newer": "新しいバージョン",
|
||||
"ignored": "無視中"
|
||||
},
|
||||
"actions": {
|
||||
"download": "ダウンロード",
|
||||
"delete": "削除",
|
||||
"ignore": "無視",
|
||||
"unignore": "無視を解除",
|
||||
"resumeModelUpdates": "このモデルの更新を再開",
|
||||
"ignoreModelUpdates": "このモデルの更新を無視",
|
||||
"viewLocalVersions": "ローカルの全バージョンを表示",
|
||||
"viewLocalTooltip": "近日対応予定"
|
||||
},
|
||||
"filters": {
|
||||
"label": "ベースフィルター",
|
||||
"state": {
|
||||
"showAll": "すべてのバージョン",
|
||||
"showSameBase": "同じベース"
|
||||
},
|
||||
"tooltip": {
|
||||
"showAllVersions": "すべてのバージョンを表示する",
|
||||
"showSameBaseVersions": "同じベースモデルのバージョンのみ表示する"
|
||||
},
|
||||
"empty": "現在のベースモデルフィルターに一致するバージョンがありません。"
|
||||
},
|
||||
"empty": "このモデルにはまだバージョン履歴がありません。",
|
||||
"error": "バージョンの読み込みに失敗しました。",
|
||||
"missingModelId": "このモデルにはCivitaiのモデルIDがありません。",
|
||||
"confirm": {
|
||||
"delete": "このバージョンをライブラリから削除しますか?"
|
||||
},
|
||||
"toast": {
|
||||
"modelIgnored": "このモデルの更新は無視されます",
|
||||
"modelResumed": "更新の監視を再開しました",
|
||||
"versionIgnored": "このバージョンの更新は無視されます",
|
||||
"versionUnignored": "バージョンを再度有効にしました",
|
||||
"versionDeleted": "バージョンを削除しました"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -951,7 +1088,9 @@
|
||||
"loraFailedToSend": "LoRAをワークフローに送信できませんでした",
|
||||
"recipeAdded": "レシピがワークフローに追加されました",
|
||||
"recipeReplaced": "レシピがワークフローで置換されました",
|
||||
"recipeFailedToSend": "レシピをワークフローに送信できませんでした"
|
||||
"recipeFailedToSend": "レシピをワークフローに送信できませんでした",
|
||||
"noMatchingNodes": "現在のワークフローには互換性のあるノードがありません",
|
||||
"noTargetNodeSelected": "ターゲットノードが選択されていません"
|
||||
},
|
||||
"nodeSelector": {
|
||||
"recipe": "レシピ",
|
||||
@@ -996,6 +1135,11 @@
|
||||
},
|
||||
"update": {
|
||||
"title": "更新確認",
|
||||
"notificationsTitle": "通知センター",
|
||||
"tabs": {
|
||||
"updates": "更新",
|
||||
"messages": "メッセージ"
|
||||
},
|
||||
"updateAvailable": "更新が利用可能",
|
||||
"noChangelogAvailable": "詳細な変更ログは利用できません。詳細はGitHubでご確認ください。",
|
||||
"currentVersion": "現在のバージョン",
|
||||
@@ -1027,6 +1171,13 @@
|
||||
"nightly": {
|
||||
"warning": "警告:ナイトリービルドには実験的機能が含まれており、不安定な場合があります。",
|
||||
"enable": "ナイトリー更新を有効にする"
|
||||
},
|
||||
"banners": {
|
||||
"recent": "最近の通知",
|
||||
"empty": "最近のバナーはありません。",
|
||||
"shown": "{time} に表示",
|
||||
"dismissed": "{time} に非表示",
|
||||
"active": "アクティブ"
|
||||
}
|
||||
},
|
||||
"support": {
|
||||
@@ -1106,6 +1257,9 @@
|
||||
"cannotSend": "レシピを送信できません:レシピIDがありません",
|
||||
"sendFailed": "レシピのワークフローへの送信に失敗しました",
|
||||
"sendError": "レシピのワークフロー送信エラー",
|
||||
"missingCheckpointPath": "チェックポイントのパスがありません",
|
||||
"missingCheckpointInfo": "チェックポイント情報が不足しています",
|
||||
"downloadCheckpointFailed": "チェックポイントのダウンロードに失敗しました: {message}",
|
||||
"cannotDelete": "レシピを削除できません:レシピIDがありません",
|
||||
"deleteConfirmationError": "削除確認の表示中にエラーが発生しました",
|
||||
"deletedSuccessfully": "レシピが正常に削除されました",
|
||||
@@ -1146,6 +1300,12 @@
|
||||
"bulkContentRatingSet": "{count} 件のモデルのコンテンツレーティングを {level} に設定しました",
|
||||
"bulkContentRatingPartial": "{success} 件のモデルのコンテンツレーティングを {level} に設定、{failed} 件は失敗しました",
|
||||
"bulkContentRatingFailed": "選択したモデルのコンテンツレーティングを更新できませんでした",
|
||||
"bulkUpdatesChecking": "選択された{type}の更新を確認しています...",
|
||||
"bulkUpdatesSuccess": "{count} 件の選択された{type}に利用可能な更新があります",
|
||||
"bulkUpdatesNone": "選択された{type}には更新が見つかりませんでした",
|
||||
"bulkUpdatesMissing": "選択された{type}はCivitaiの更新にリンクされていません",
|
||||
"bulkUpdatesPartialMissing": "Civitaiリンクがない{missing} 件の{type}をスキップしました",
|
||||
"bulkUpdatesFailed": "選択された{type}の更新確認に失敗しました: {message}",
|
||||
"invalidCharactersRemoved": "ファイル名から無効な文字が削除されました",
|
||||
"filenameCannotBeEmpty": "ファイル名を空にすることはできません",
|
||||
"renameFailed": "ファイル名の変更に失敗しました:{message}",
|
||||
@@ -1206,7 +1366,7 @@
|
||||
},
|
||||
"triggerWords": {
|
||||
"loadFailed": "学習済みワードを読み込めませんでした",
|
||||
"tooLong": "トリガーワードは30ワードを超えてはいけません",
|
||||
"tooLong": "トリガーワードは100ワードを超えてはいけません",
|
||||
"tooMany": "最大30トリガーワードまで許可されています",
|
||||
"alreadyExists": "このトリガーワードは既に存在します",
|
||||
"updateSuccess": "トリガーワードが正常に更新されました",
|
||||
|
||||
238
locales/ko.json
238
locales/ko.json
@@ -101,7 +101,12 @@
|
||||
"checkpointNameCopied": "Checkpoint 이름 복사됨",
|
||||
"toggleBlur": "블러 토글",
|
||||
"show": "보기",
|
||||
"openExampleImages": "예시 이미지 폴더 열기"
|
||||
"openExampleImages": "예시 이미지 폴더 열기",
|
||||
"replacePreview": "미리보기 교체",
|
||||
"copyCheckpointName": "Checkpoint 이름 복사",
|
||||
"copyEmbeddingName": "Embedding 이름 복사",
|
||||
"sendCheckpointToWorkflow": "ComfyUI로 전송",
|
||||
"sendEmbeddingToWorkflow": "ComfyUI로 전송"
|
||||
},
|
||||
"nsfw": {
|
||||
"matureContent": "성인 콘텐츠",
|
||||
@@ -115,12 +120,17 @@
|
||||
"updateFailed": "즐겨찾기 상태 업데이트 실패"
|
||||
},
|
||||
"sendToWorkflow": {
|
||||
"checkpointNotImplemented": "Checkpoint을 워크플로로 전송 - 구현 예정 기능"
|
||||
"checkpointNotImplemented": "Checkpoint을 워크플로로 전송 - 구현 예정 기능",
|
||||
"missingPath": "이 카드의 모델 경로를 확인할 수 없습니다"
|
||||
},
|
||||
"exampleImages": {
|
||||
"checkError": "예시 이미지 확인 중 오류",
|
||||
"missingHash": "모델 해시 정보가 없습니다.",
|
||||
"noRemoteImagesAvailable": "Civitai에서 이 모델의 원격 예시 이미지를 사용할 수 없습니다"
|
||||
},
|
||||
"badges": {
|
||||
"update": "업데이트",
|
||||
"updateAvailable": "업데이트 가능"
|
||||
}
|
||||
},
|
||||
"globalContextMenu": {
|
||||
@@ -129,12 +139,26 @@
|
||||
"missingPath": "예시 이미지를 다운로드하기 전에 다운로드 위치를 설정하세요.",
|
||||
"unavailable": "예시 이미지 다운로드는 아직 사용할 수 없습니다. 페이지 로딩이 완료된 후 다시 시도하세요."
|
||||
},
|
||||
"checkModelUpdates": {
|
||||
"label": "업데이트 확인",
|
||||
"loading": "{type} 업데이트를 확인 중...",
|
||||
"success": "{type} 업데이트 {count}개를 찾았습니다",
|
||||
"none": "모든 {type}가 최신 상태입니다",
|
||||
"error": "{type} 업데이트 확인 실패: {message}"
|
||||
},
|
||||
"cleanupExampleImages": {
|
||||
"label": "예시 이미지 폴더 정리",
|
||||
"success": "{count}개의 폴더가 삭제 폴더로 이동되었습니다",
|
||||
"none": "정리가 필요한 예시 이미지 폴더가 없습니다",
|
||||
"partial": "정리가 완료되었으나 {failures}개의 폴더가 건너뛰어졌습니다",
|
||||
"error": "예시 이미지 폴더 정리에 실패했습니다: {message}"
|
||||
},
|
||||
"fetchMissingLicenses": {
|
||||
"label": "Refresh license metadata",
|
||||
"loading": "Refreshing license metadata for {typePlural}...",
|
||||
"success": "Updated license metadata for {count} {typePlural}",
|
||||
"none": "All {typePlural} already have license metadata",
|
||||
"error": "Failed to refresh license metadata for {typePlural}: {message}"
|
||||
}
|
||||
},
|
||||
"header": {
|
||||
@@ -171,6 +195,10 @@
|
||||
"title": "모델 필터",
|
||||
"baseModel": "베이스 모델",
|
||||
"modelTags": "태그 (상위 20개)",
|
||||
"modelTypes": "Model Types",
|
||||
"license": "라이선스",
|
||||
"noCreditRequired": "크레딧 표기 없음",
|
||||
"allowSellingGeneratedContent": "판매 허용",
|
||||
"clearAll": "모든 필터 지우기"
|
||||
},
|
||||
"theme": {
|
||||
@@ -181,6 +209,7 @@
|
||||
},
|
||||
"actions": {
|
||||
"checkUpdates": "업데이트 확인",
|
||||
"notifications": "알림",
|
||||
"support": "지원"
|
||||
}
|
||||
},
|
||||
@@ -199,12 +228,19 @@
|
||||
"videoSettings": "비디오 설정",
|
||||
"layoutSettings": "레이아웃 설정",
|
||||
"folderSettings": "폴더 설정",
|
||||
"priorityTags": "우선순위 태그",
|
||||
"downloadPathTemplates": "다운로드 경로 템플릿",
|
||||
"exampleImages": "예시 이미지",
|
||||
"updateFlags": "업데이트 표시",
|
||||
"autoOrganize": "Auto-organize",
|
||||
"misc": "기타",
|
||||
"metadataArchive": "메타데이터 아카이브 데이터베이스",
|
||||
"proxySettings": "프록시 설정",
|
||||
"priorityTags": "우선순위 태그"
|
||||
"storageLocation": "설정 위치",
|
||||
"proxySettings": "프록시 설정"
|
||||
},
|
||||
"storage": {
|
||||
"locationLabel": "휴대용 모드",
|
||||
"locationHelp": "활성화하면 settings.json을 리포지토리에 유지하고, 비활성화하면 사용자 구성 디렉터리에 저장합니다."
|
||||
},
|
||||
"contentFiltering": {
|
||||
"blurNsfwContent": "NSFW 콘텐츠 블러 처리",
|
||||
@@ -216,6 +252,15 @@
|
||||
"autoplayOnHover": "호버 시 비디오 자동 재생",
|
||||
"autoplayOnHoverHelp": "마우스를 올렸을 때만 비디오 미리보기를 재생합니다"
|
||||
},
|
||||
"autoOrganizeExclusions": {
|
||||
"label": "자동 정리 제외 항목",
|
||||
"placeholder": "예: curated/*, */backups/*; *_temp.safetensors",
|
||||
"help": "이 와일드카드 패턴과 일치하는 파일 이동을 건너뜁니다. 여러 패턴은 쉼표 또는 세미콜론으로 구분하십시오.",
|
||||
"validation": {
|
||||
"noPatterns": "쉼표 또는 세미콜론으로 구분된 최소한 하나의 패턴을 입력하십시오.",
|
||||
"saveFailed": "제외 항목을 저장할 수 없습니다: {message}"
|
||||
}
|
||||
},
|
||||
"layoutSettings": {
|
||||
"displayDensity": "표시 밀도",
|
||||
"displayDensityOptions": {
|
||||
@@ -230,26 +275,26 @@
|
||||
"compact": "7개 (1080p), 8개 (2K), 10개 (4K)"
|
||||
},
|
||||
"displayDensityWarning": "경고: 높은 밀도는 리소스가 제한된 시스템에서 성능 문제를 일으킬 수 있습니다.",
|
||||
"showFolderSidebar": "폴더 사이드바 표시",
|
||||
"showFolderSidebarHelp": "모델 페이지에서 폴더 탐색 사이드바를 켜거나 끕니다. 비활성화하면 사이드바와 호버 영역이 표시되지 않습니다.",
|
||||
"cardInfoDisplay": "카드 정보 표시",
|
||||
"cardInfoDisplayOptions": {
|
||||
"always": "항상 표시",
|
||||
"hover": "호버 시 표시"
|
||||
},
|
||||
"cardInfoDisplayHelp": "모델 정보 및 액션 버튼을 언제 표시할지 선택하세요:",
|
||||
"cardInfoDisplayDetails": {
|
||||
"always": "헤더와 푸터가 항상 보입니다",
|
||||
"hover": "카드에 마우스를 올렸을 때만 헤더와 푸터가 나타납니다"
|
||||
"cardInfoDisplayHelp": "모델 정보 및 액션 버튼을 언제 표시할지 선택하세요",
|
||||
"modelCardFooterAction": "모델 카드 버튼 동작",
|
||||
"modelCardFooterActionOptions": {
|
||||
"exampleImages": "예시 이미지 열기",
|
||||
"replacePreview": "미리보기 교체"
|
||||
},
|
||||
"modelCardFooterActionHelp": "카드 우측 하단 버튼이 수행할 작업을 선택하세요",
|
||||
"modelNameDisplay": "모델명 표시",
|
||||
"modelNameDisplayOptions": {
|
||||
"modelName": "모델명",
|
||||
"fileName": "파일명"
|
||||
},
|
||||
"modelNameDisplayHelp": "모델 카드 하단에 표시할 내용을 선택하세요:",
|
||||
"modelNameDisplayDetails": {
|
||||
"modelName": "모델의 설명적 이름 표시",
|
||||
"fileName": "디스크의 실제 파일명 표시"
|
||||
}
|
||||
"modelNameDisplayHelp": "모델 카드 하단에 표시할 내용을 선택하세요"
|
||||
},
|
||||
"folderSettings": {
|
||||
"activeLibrary": "활성 라이브러리",
|
||||
@@ -264,6 +309,26 @@
|
||||
"defaultEmbeddingRootHelp": "다운로드, 가져오기 및 이동을 위한 기본 Embedding 루트 디렉토리를 설정합니다",
|
||||
"noDefault": "기본값 없음"
|
||||
},
|
||||
"priorityTags": {
|
||||
"title": "우선순위 태그",
|
||||
"description": "모델 유형별 태그 우선순위를 사용자 지정합니다(예: character, concept, style(toon|toon_style)).",
|
||||
"placeholder": "character, concept, style(toon|toon_style)",
|
||||
"helpLinkLabel": "우선순위 태그 도움말 열기",
|
||||
"modelTypes": {
|
||||
"lora": "LoRA",
|
||||
"checkpoint": "체크포인트",
|
||||
"embedding": "임베딩"
|
||||
},
|
||||
"saveSuccess": "우선순위 태그가 업데이트되었습니다.",
|
||||
"saveError": "우선순위 태그를 업데이트하지 못했습니다.",
|
||||
"loadingSuggestions": "추천을 불러오는 중...",
|
||||
"validation": {
|
||||
"missingClosingParen": "{index}번째 항목에 닫는 괄호가 없습니다.",
|
||||
"missingCanonical": "{index}번째 항목에는 정식 태그 이름이 포함되어야 합니다.",
|
||||
"duplicateCanonical": "정식 태그 \"{tag}\"가 여러 번 나타납니다.",
|
||||
"unknown": "잘못된 우선순위 태그 구성입니다."
|
||||
}
|
||||
},
|
||||
"downloadPathTemplates": {
|
||||
"title": "다운로드 경로 템플릿",
|
||||
"help": "Civitai에서 다운로드할 때 다양한 모델 유형의 폴더 구조를 구성합니다.",
|
||||
@@ -311,6 +376,14 @@
|
||||
"download": "다운로드",
|
||||
"restartRequired": "재시작 필요"
|
||||
},
|
||||
"updateFlagStrategy": {
|
||||
"label": "업데이트 표시 전략",
|
||||
"help": "새 릴리스가 로컬 파일과 동일한 베이스 모델을 공유할 때만 업데이트 배지를 표시할지, 또는 해당 모델에 사용 가능한 새 버전이 있으면 항상 표시할지 결정합니다.",
|
||||
"options": {
|
||||
"sameBase": "베이스 모델로 업데이트 일치",
|
||||
"any": "사용 가능한 모든 업데이트 표시"
|
||||
}
|
||||
},
|
||||
"misc": {
|
||||
"includeTriggerWords": "LoRA 문법에 트리거 단어 포함",
|
||||
"includeTriggerWordsHelp": "LoRA 문법을 클립보드에 복사할 때 학습된 트리거 단어를 포함합니다"
|
||||
@@ -356,26 +429,6 @@
|
||||
"proxyPassword": "비밀번호 (선택사항)",
|
||||
"proxyPasswordPlaceholder": "password",
|
||||
"proxyPasswordHelp": "프록시 인증에 필요한 비밀번호 (필요한 경우)"
|
||||
},
|
||||
"priorityTags": {
|
||||
"title": "우선순위 태그",
|
||||
"description": "모델 유형별 태그 우선순위를 사용자 지정합니다(예: character, concept, style(toon|toon_style)).",
|
||||
"placeholder": "character, concept, style(toon|toon_style)",
|
||||
"helpLinkLabel": "우선순위 태그 도움말 열기",
|
||||
"modelTypes": {
|
||||
"lora": "LoRA",
|
||||
"checkpoint": "체크포인트",
|
||||
"embedding": "임베딩"
|
||||
},
|
||||
"saveSuccess": "우선순위 태그가 업데이트되었습니다.",
|
||||
"saveError": "우선순위 태그를 업데이트하지 못했습니다.",
|
||||
"loadingSuggestions": "추천을 불러오는 중...",
|
||||
"validation": {
|
||||
"missingClosingParen": "{index}번째 항목에 닫는 괄호가 없습니다.",
|
||||
"missingCanonical": "{index}번째 항목에는 정식 태그 이름이 포함되어야 합니다.",
|
||||
"duplicateCanonical": "정식 태그 \"{tag}\"가 여러 번 나타납니다.",
|
||||
"unknown": "잘못된 우선순위 태그 구성입니다."
|
||||
}
|
||||
}
|
||||
},
|
||||
"loras": {
|
||||
@@ -394,8 +447,10 @@
|
||||
},
|
||||
"refresh": {
|
||||
"title": "모델 목록 새로고침",
|
||||
"quick": "빠른 새로고침 (증분)",
|
||||
"full": "전체 재구성 (완전)"
|
||||
"quick": "변경 사항 동기화",
|
||||
"quickTooltip": "새로운 모델 파일이나 누락된 파일을 찾아 목록을 최신 상태로 유지합니다.",
|
||||
"full": "캐시 재구성",
|
||||
"fullTooltip": "메타데이터 파일에서 모든 모델 정보를 다시 불러옵니다. 라이브러리가 오래되어 보이거나 수동 수정 후에 사용하세요."
|
||||
},
|
||||
"fetch": {
|
||||
"title": "Civitai에서 메타데이터 가져오기",
|
||||
@@ -416,6 +471,13 @@
|
||||
"favorites": {
|
||||
"title": "즐겨찾기만 보기",
|
||||
"action": "즐겨찾기"
|
||||
},
|
||||
"updates": {
|
||||
"title": "업데이트 가능한 모델만 표시",
|
||||
"action": "업데이트",
|
||||
"menuLabel": "업데이트 옵션 표시",
|
||||
"check": "업데이트 확인",
|
||||
"checkTooltip": "업데이트 확인에는 시간이 걸릴 수 있습니다."
|
||||
}
|
||||
},
|
||||
"bulkOperations": {
|
||||
@@ -427,6 +489,7 @@
|
||||
"setContentRating": "모든 모델에 콘텐츠 등급 설정",
|
||||
"copyAll": "모든 문법 복사",
|
||||
"refreshAll": "모든 메타데이터 새로고침",
|
||||
"checkUpdates": "선택 항목 업데이트 확인",
|
||||
"moveAll": "모두 폴더로 이동",
|
||||
"autoOrganize": "자동 정리 선택",
|
||||
"deleteAll": "모든 모델 삭제",
|
||||
@@ -443,6 +506,7 @@
|
||||
},
|
||||
"contextMenu": {
|
||||
"refreshMetadata": "Civitai 데이터 새로고침",
|
||||
"checkUpdates": "업데이트 확인",
|
||||
"relinkCivitai": "Civitai에 다시 연결",
|
||||
"copySyntax": "LoRA 문법 복사",
|
||||
"copyFilename": "모델 파일명 복사",
|
||||
@@ -464,6 +528,9 @@
|
||||
},
|
||||
"recipes": {
|
||||
"title": "LoRA 레시피",
|
||||
"actions": {
|
||||
"sendCheckpoint": "ComfyUI로 보내기"
|
||||
},
|
||||
"controls": {
|
||||
"import": {
|
||||
"action": "가져오기",
|
||||
@@ -702,6 +769,12 @@
|
||||
"countMessage": "개의 모델이 영구적으로 삭제됩니다.",
|
||||
"action": "모두 삭제"
|
||||
},
|
||||
"checkUpdates": {
|
||||
"title": "{type} 전체 업데이트를 확인할까요?",
|
||||
"message": "라이브러리에 있는 모든 {type}의 업데이트를 확인합니다. 컬렉션이 클수록 시간이 조금 더 걸릴 수 있습니다.",
|
||||
"tip": "나눠서 진행하고 싶다면 벌크 모드로 전환해 필요한 모델만 선택한 뒤 \"선택 항목 업데이트 확인\"을 사용하세요.",
|
||||
"action": "전체 확인"
|
||||
},
|
||||
"bulkAddTags": {
|
||||
"title": "여러 모델에 태그 추가",
|
||||
"description": "다음에 태그를 추가합니다:",
|
||||
@@ -838,13 +911,77 @@
|
||||
"tabs": {
|
||||
"examples": "예시",
|
||||
"description": "모델 설명",
|
||||
"recipes": "레시피"
|
||||
"recipes": "레시피",
|
||||
"versions": "버전"
|
||||
},
|
||||
"license": {
|
||||
"noImageSell": "No selling generated content",
|
||||
"noRentCivit": "No Civitai generation",
|
||||
"noRent": "No generation services",
|
||||
"noSell": "No selling models",
|
||||
"creditRequired": "제작자 크레딧 필요",
|
||||
"noDerivatives": "공유 병합 불가",
|
||||
"noReLicense": "동일한 권한 필요",
|
||||
"restrictionsLabel": "라이선스 제한"
|
||||
},
|
||||
"loading": {
|
||||
"exampleImages": "예시 이미지 로딩 중...",
|
||||
"description": "모델 설명 로딩 중...",
|
||||
"recipes": "레시피 로딩 중...",
|
||||
"examples": "예시 로딩 중..."
|
||||
"examples": "예시 로딩 중...",
|
||||
"versions": "버전 로딩 중..."
|
||||
},
|
||||
"versions": {
|
||||
"heading": "모델 버전",
|
||||
"copy": "이 모델의 모든 버전을 한 곳에서 관리하세요.",
|
||||
"media": {
|
||||
"placeholder": "미리보기 없음"
|
||||
},
|
||||
"labels": {
|
||||
"unnamed": "이름 없는 버전",
|
||||
"noDetails": "추가 정보 없음"
|
||||
},
|
||||
"badges": {
|
||||
"current": "현재 버전",
|
||||
"inLibrary": "라이브러리에 있음",
|
||||
"newer": "최신 버전",
|
||||
"ignored": "무시됨"
|
||||
},
|
||||
"actions": {
|
||||
"download": "다운로드",
|
||||
"delete": "삭제",
|
||||
"ignore": "무시",
|
||||
"unignore": "무시 해제",
|
||||
"resumeModelUpdates": "이 모델 업데이트 재개",
|
||||
"ignoreModelUpdates": "이 모델 업데이트 무시",
|
||||
"viewLocalVersions": "로컬 버전 모두 보기",
|
||||
"viewLocalTooltip": "곧 제공 예정"
|
||||
},
|
||||
"filters": {
|
||||
"label": "기본 필터",
|
||||
"state": {
|
||||
"showAll": "모든 버전",
|
||||
"showSameBase": "같은 베이스"
|
||||
},
|
||||
"tooltip": {
|
||||
"showAllVersions": "모든 버전을 표시하도록 전환",
|
||||
"showSameBaseVersions": "같은 베이스 모델 버전만 표시하도록 전환"
|
||||
},
|
||||
"empty": "현재 베이스 모델 필터와 일치하는 버전이 없습니다."
|
||||
},
|
||||
"empty": "이 모델에는 아직 버전 기록이 없습니다.",
|
||||
"error": "버전을 불러오지 못했습니다.",
|
||||
"missingModelId": "이 모델에는 Civitai 모델 ID가 없습니다.",
|
||||
"confirm": {
|
||||
"delete": "이 버전을 라이브러리에서 삭제하시겠습니까?"
|
||||
},
|
||||
"toast": {
|
||||
"modelIgnored": "이 모델의 업데이트가 무시됩니다",
|
||||
"modelResumed": "업데이트 추적이 재개되었습니다",
|
||||
"versionIgnored": "이 버전의 업데이트가 무시됩니다",
|
||||
"versionUnignored": "버전이 다시 활성화되었습니다",
|
||||
"versionDeleted": "버전이 삭제되었습니다"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -951,7 +1088,9 @@
|
||||
"loraFailedToSend": "LoRA를 워크플로로 전송하지 못했습니다",
|
||||
"recipeAdded": "레시피가 워크플로에 추가되었습니다",
|
||||
"recipeReplaced": "레시피가 워크플로에서 교체되었습니다",
|
||||
"recipeFailedToSend": "레시피를 워크플로로 전송하지 못했습니다"
|
||||
"recipeFailedToSend": "레시피를 워크플로로 전송하지 못했습니다",
|
||||
"noMatchingNodes": "현재 워크플로에서 호환되는 노드가 없습니다",
|
||||
"noTargetNodeSelected": "대상 노드가 선택되지 않았습니다"
|
||||
},
|
||||
"nodeSelector": {
|
||||
"recipe": "레시피",
|
||||
@@ -996,6 +1135,11 @@
|
||||
},
|
||||
"update": {
|
||||
"title": "업데이트 확인",
|
||||
"notificationsTitle": "알림 센터",
|
||||
"tabs": {
|
||||
"updates": "업데이트",
|
||||
"messages": "메시지"
|
||||
},
|
||||
"updateAvailable": "업데이트 사용 가능",
|
||||
"noChangelogAvailable": "상세한 변경 로그가 없습니다. 더 많은 정보는 GitHub를 확인하세요.",
|
||||
"currentVersion": "현재 버전",
|
||||
@@ -1027,6 +1171,13 @@
|
||||
"nightly": {
|
||||
"warning": "경고: 나이틀리 빌드는 실험적 기능을 포함할 수 있으며 불안정할 수 있습니다.",
|
||||
"enable": "나이틀리 업데이트 활성화"
|
||||
},
|
||||
"banners": {
|
||||
"recent": "최근 알림",
|
||||
"empty": "최근 배너가 없습니다.",
|
||||
"shown": "{time}에 표시",
|
||||
"dismissed": "{time}에 닫힘",
|
||||
"active": "활성"
|
||||
}
|
||||
},
|
||||
"support": {
|
||||
@@ -1106,6 +1257,9 @@
|
||||
"cannotSend": "레시피를 전송할 수 없습니다: 레시피 ID 누락",
|
||||
"sendFailed": "레시피를 워크플로로 전송하는데 실패했습니다",
|
||||
"sendError": "레시피를 워크플로로 전송하는 중 오류",
|
||||
"missingCheckpointPath": "체크포인트 경로를 사용할 수 없습니다",
|
||||
"missingCheckpointInfo": "체크포인트 정보가 부족합니다",
|
||||
"downloadCheckpointFailed": "체크포인트 다운로드 실패: {message}",
|
||||
"cannotDelete": "레시피를 삭제할 수 없습니다: 레시피 ID 누락",
|
||||
"deleteConfirmationError": "삭제 확인 표시 오류",
|
||||
"deletedSuccessfully": "레시피가 성공적으로 삭제되었습니다",
|
||||
@@ -1146,6 +1300,12 @@
|
||||
"bulkContentRatingSet": "{count}개 모델의 콘텐츠 등급을 {level}(으)로 설정했습니다",
|
||||
"bulkContentRatingPartial": "{success}개 모델의 콘텐츠 등급을 {level}(으)로 설정했고, {failed}개는 실패했습니다",
|
||||
"bulkContentRatingFailed": "선택한 모델의 콘텐츠 등급을 업데이트하지 못했습니다",
|
||||
"bulkUpdatesChecking": "선택한 {type}의 업데이트를 확인하는 중...",
|
||||
"bulkUpdatesSuccess": "선택한 {count}개의 {type}에 사용할 수 있는 업데이트가 있습니다",
|
||||
"bulkUpdatesNone": "선택한 {type}에 대한 업데이트가 없습니다",
|
||||
"bulkUpdatesMissing": "선택한 {type}이 Civitai 업데이트에 연결되어 있지 않습니다",
|
||||
"bulkUpdatesPartialMissing": "Civitai 링크가 없는 {missing}개의 {type}을 건너뛰었습니다",
|
||||
"bulkUpdatesFailed": "선택한 {type}의 업데이트 확인에 실패했습니다: {message}",
|
||||
"invalidCharactersRemoved": "파일명에서 잘못된 문자가 제거되었습니다",
|
||||
"filenameCannotBeEmpty": "파일 이름은 비어있을 수 없습니다",
|
||||
"renameFailed": "파일 이름 변경 실패: {message}",
|
||||
@@ -1206,7 +1366,7 @@
|
||||
},
|
||||
"triggerWords": {
|
||||
"loadFailed": "학습된 단어를 로딩할 수 없습니다",
|
||||
"tooLong": "트리거 단어는 30단어를 초과할 수 없습니다",
|
||||
"tooLong": "트리거 단어는 100단어를 초과할 수 없습니다",
|
||||
"tooMany": "최대 30개의 트리거 단어만 허용됩니다",
|
||||
"alreadyExists": "이 트리거 단어는 이미 존재합니다",
|
||||
"updateSuccess": "트리거 단어가 성공적으로 업데이트되었습니다",
|
||||
|
||||
238
locales/ru.json
238
locales/ru.json
@@ -101,7 +101,12 @@
|
||||
"checkpointNameCopied": "Имя checkpoint скопировано",
|
||||
"toggleBlur": "Переключить размытие",
|
||||
"show": "Показать",
|
||||
"openExampleImages": "Открыть папку с примерами"
|
||||
"openExampleImages": "Открыть папку с примерами",
|
||||
"replacePreview": "Заменить превью",
|
||||
"copyCheckpointName": "Копировать имя checkpoint",
|
||||
"copyEmbeddingName": "Копировать имя embedding",
|
||||
"sendCheckpointToWorkflow": "Отправить в ComfyUI",
|
||||
"sendEmbeddingToWorkflow": "Отправить в ComfyUI"
|
||||
},
|
||||
"nsfw": {
|
||||
"matureContent": "Контент для взрослых",
|
||||
@@ -115,12 +120,17 @@
|
||||
"updateFailed": "Не удалось обновить статус избранного"
|
||||
},
|
||||
"sendToWorkflow": {
|
||||
"checkpointNotImplemented": "Отправка checkpoint в workflow - функция будет реализована"
|
||||
"checkpointNotImplemented": "Отправка checkpoint в workflow - функция будет реализована",
|
||||
"missingPath": "Невозможно определить путь модели для этой карточки"
|
||||
},
|
||||
"exampleImages": {
|
||||
"checkError": "Ошибка проверки примеров изображений",
|
||||
"missingHash": "Отсутствует хеш модели.",
|
||||
"noRemoteImagesAvailable": "Нет удаленных примеров изображений для этой модели на Civitai"
|
||||
},
|
||||
"badges": {
|
||||
"update": "Обновление",
|
||||
"updateAvailable": "Доступно обновление"
|
||||
}
|
||||
},
|
||||
"globalContextMenu": {
|
||||
@@ -129,12 +139,26 @@
|
||||
"missingPath": "Укажите место загрузки перед загрузкой примеров изображений.",
|
||||
"unavailable": "Загрузка примеров изображений пока недоступна. Попробуйте снова после полной загрузки страницы."
|
||||
},
|
||||
"checkModelUpdates": {
|
||||
"label": "Проверить обновления",
|
||||
"loading": "Проверка обновлений для {type}...",
|
||||
"success": "Найдено {count} обновлений для {type}",
|
||||
"none": "Все {type} актуальны",
|
||||
"error": "Не удалось проверить обновления для {type}: {message}"
|
||||
},
|
||||
"cleanupExampleImages": {
|
||||
"label": "Очистить папки с примерами изображений",
|
||||
"success": "Перемещено {count} папок в папку удалённых",
|
||||
"none": "Нет папок с примерами изображений, требующих очистки",
|
||||
"partial": "Очистка завершена, пропущено {failures} папок",
|
||||
"error": "Не удалось очистить папки с примерами изображений: {message}"
|
||||
},
|
||||
"fetchMissingLicenses": {
|
||||
"label": "Refresh license metadata",
|
||||
"loading": "Refreshing license metadata for {typePlural}...",
|
||||
"success": "Updated license metadata for {count} {typePlural}",
|
||||
"none": "All {typePlural} already have license metadata",
|
||||
"error": "Failed to refresh license metadata for {typePlural}: {message}"
|
||||
}
|
||||
},
|
||||
"header": {
|
||||
@@ -171,6 +195,10 @@
|
||||
"title": "Фильтр моделей",
|
||||
"baseModel": "Базовая модель",
|
||||
"modelTags": "Теги (Топ 20)",
|
||||
"modelTypes": "Model Types",
|
||||
"license": "Лицензия",
|
||||
"noCreditRequired": "Без указания авторства",
|
||||
"allowSellingGeneratedContent": "Продажа разрешена",
|
||||
"clearAll": "Очистить все фильтры"
|
||||
},
|
||||
"theme": {
|
||||
@@ -181,6 +209,7 @@
|
||||
},
|
||||
"actions": {
|
||||
"checkUpdates": "Проверить обновления",
|
||||
"notifications": "Уведомления",
|
||||
"support": "Поддержка"
|
||||
}
|
||||
},
|
||||
@@ -199,12 +228,19 @@
|
||||
"videoSettings": "Настройки видео",
|
||||
"layoutSettings": "Настройки макета",
|
||||
"folderSettings": "Настройки папок",
|
||||
"priorityTags": "Приоритетные теги",
|
||||
"downloadPathTemplates": "Шаблоны путей загрузки",
|
||||
"exampleImages": "Примеры изображений",
|
||||
"updateFlags": "Метки обновлений",
|
||||
"autoOrganize": "Auto-organize",
|
||||
"misc": "Разное",
|
||||
"metadataArchive": "Архив метаданных",
|
||||
"proxySettings": "Настройки прокси",
|
||||
"priorityTags": "Приоритетные теги"
|
||||
"storageLocation": "Расположение настроек",
|
||||
"proxySettings": "Настройки прокси"
|
||||
},
|
||||
"storage": {
|
||||
"locationLabel": "Портативный режим",
|
||||
"locationHelp": "Включите, чтобы хранить settings.json в репозитории; выключите, чтобы сохранить его в папке конфигурации пользователя."
|
||||
},
|
||||
"contentFiltering": {
|
||||
"blurNsfwContent": "Размывать NSFW контент",
|
||||
@@ -216,6 +252,15 @@
|
||||
"autoplayOnHover": "Автовоспроизведение видео при наведении",
|
||||
"autoplayOnHoverHelp": "Воспроизводить превью видео только при наведении курсора"
|
||||
},
|
||||
"autoOrganizeExclusions": {
|
||||
"label": "Исключения автосортировки",
|
||||
"placeholder": "Пример: curated/*, */backups/*; *_temp.safetensors",
|
||||
"help": "Пропускать перемещение файлов, соответствующих этим шаблонам. Разделяйте несколько шаблонов запятыми или точками с запятой.",
|
||||
"validation": {
|
||||
"noPatterns": "Введите хотя бы один шаблон, разделенный запятыми или точками с запятой.",
|
||||
"saveFailed": "Не удалось сохранить исключения: {message}"
|
||||
}
|
||||
},
|
||||
"layoutSettings": {
|
||||
"displayDensity": "Плотность отображения",
|
||||
"displayDensityOptions": {
|
||||
@@ -230,26 +275,26 @@
|
||||
"compact": "7 (1080p), 8 (2K), 10 (4K)"
|
||||
},
|
||||
"displayDensityWarning": "Предупреждение: Высокая плотность может вызвать проблемы с производительностью на системах с ограниченными ресурсами.",
|
||||
"showFolderSidebar": "Показывать боковую панель папок",
|
||||
"showFolderSidebarHelp": "Включает или выключает боковую панель навигации по папкам на страницах моделей. При отключении панель и область наведения скрыты.",
|
||||
"cardInfoDisplay": "Отображение информации карточки",
|
||||
"cardInfoDisplayOptions": {
|
||||
"always": "Всегда видимо",
|
||||
"hover": "Показать при наведении"
|
||||
},
|
||||
"cardInfoDisplayHelp": "Выберите когда отображать информацию о модели и кнопки действий:",
|
||||
"cardInfoDisplayDetails": {
|
||||
"always": "Заголовки и подписи всегда видны",
|
||||
"hover": "Заголовки и подписи появляются только при наведении на карточку"
|
||||
"cardInfoDisplayHelp": "Выберите когда отображать информацию о модели и кнопки действий",
|
||||
"modelCardFooterAction": "Действие кнопки карточки модели",
|
||||
"modelCardFooterActionOptions": {
|
||||
"exampleImages": "Открыть примеры изображений",
|
||||
"replacePreview": "Заменить превью"
|
||||
},
|
||||
"modelCardFooterActionHelp": "Выберите, что делает кнопка в правом нижнем углу карточки",
|
||||
"modelNameDisplay": "Отображение названия модели",
|
||||
"modelNameDisplayOptions": {
|
||||
"modelName": "Название модели",
|
||||
"fileName": "Имя файла"
|
||||
},
|
||||
"modelNameDisplayHelp": "Выберите, что отображать в нижней части карточки модели:",
|
||||
"modelNameDisplayDetails": {
|
||||
"modelName": "Отображать описательное название модели",
|
||||
"fileName": "Отображать фактическое имя файла на диске"
|
||||
}
|
||||
"modelNameDisplayHelp": "Выберите, что отображать в нижней части карточки модели"
|
||||
},
|
||||
"folderSettings": {
|
||||
"activeLibrary": "Активная библиотека",
|
||||
@@ -264,6 +309,26 @@
|
||||
"defaultEmbeddingRootHelp": "Установить корневую папку embedding по умолчанию для загрузок, импорта и перемещений",
|
||||
"noDefault": "Не задано"
|
||||
},
|
||||
"priorityTags": {
|
||||
"title": "Приоритетные теги",
|
||||
"description": "Настройте порядок приоритетов тегов для каждого типа моделей (например, character, concept, style(toon|toon_style)).",
|
||||
"placeholder": "character, concept, style(toon|toon_style)",
|
||||
"helpLinkLabel": "Открыть справку по приоритетным тегам",
|
||||
"modelTypes": {
|
||||
"lora": "LoRA",
|
||||
"checkpoint": "Чекпойнт",
|
||||
"embedding": "Эмбеддинг"
|
||||
},
|
||||
"saveSuccess": "Приоритетные теги обновлены.",
|
||||
"saveError": "Не удалось обновить приоритетные теги.",
|
||||
"loadingSuggestions": "Загрузка подсказок...",
|
||||
"validation": {
|
||||
"missingClosingParen": "В записи {index} отсутствует закрывающая скобка.",
|
||||
"missingCanonical": "Запись {index} должна содержать каноническое имя тега.",
|
||||
"duplicateCanonical": "Канонический тег \"{tag}\" встречается более одного раза.",
|
||||
"unknown": "Недопустимая конфигурация приоритетных тегов."
|
||||
}
|
||||
},
|
||||
"downloadPathTemplates": {
|
||||
"title": "Шаблоны путей загрузки",
|
||||
"help": "Настройте структуру папок для разных типов моделей при загрузке с Civitai.",
|
||||
@@ -311,6 +376,14 @@
|
||||
"download": "Загрузить",
|
||||
"restartRequired": "Требует перезапуска"
|
||||
},
|
||||
"updateFlagStrategy": {
|
||||
"label": "Стратегия меток обновлений",
|
||||
"help": "Выберите, отображать ли значки обновления только когда новая версия имеет тот же базовый модель, что и локальные файлы, или всегда при наличии любого нового релиза для этой модели.",
|
||||
"options": {
|
||||
"sameBase": "Совпадение обновлений по базовой модели",
|
||||
"any": "Отмечать любые доступные обновления"
|
||||
}
|
||||
},
|
||||
"misc": {
|
||||
"includeTriggerWords": "Включать триггерные слова в синтаксис LoRA",
|
||||
"includeTriggerWordsHelp": "Включать обученные триггерные слова при копировании синтаксиса LoRA в буфер обмена"
|
||||
@@ -356,26 +429,6 @@
|
||||
"proxyPassword": "Пароль (необязательно)",
|
||||
"proxyPasswordPlaceholder": "пароль",
|
||||
"proxyPasswordHelp": "Пароль для аутентификации на прокси (если требуется)"
|
||||
},
|
||||
"priorityTags": {
|
||||
"title": "Приоритетные теги",
|
||||
"description": "Настройте порядок приоритетов тегов для каждого типа моделей (например, character, concept, style(toon|toon_style)).",
|
||||
"placeholder": "character, concept, style(toon|toon_style)",
|
||||
"helpLinkLabel": "Открыть справку по приоритетным тегам",
|
||||
"modelTypes": {
|
||||
"lora": "LoRA",
|
||||
"checkpoint": "Чекпойнт",
|
||||
"embedding": "Эмбеддинг"
|
||||
},
|
||||
"saveSuccess": "Приоритетные теги обновлены.",
|
||||
"saveError": "Не удалось обновить приоритетные теги.",
|
||||
"loadingSuggestions": "Загрузка подсказок...",
|
||||
"validation": {
|
||||
"missingClosingParen": "В записи {index} отсутствует закрывающая скобка.",
|
||||
"missingCanonical": "Запись {index} должна содержать каноническое имя тега.",
|
||||
"duplicateCanonical": "Канонический тег \"{tag}\" встречается более одного раза.",
|
||||
"unknown": "Недопустимая конфигурация приоритетных тегов."
|
||||
}
|
||||
}
|
||||
},
|
||||
"loras": {
|
||||
@@ -394,8 +447,10 @@
|
||||
},
|
||||
"refresh": {
|
||||
"title": "Обновить список моделей",
|
||||
"quick": "Быстрое обновление (инкрементальное)",
|
||||
"full": "Полная перестройка (полное)"
|
||||
"quick": "Синхронизировать изменения",
|
||||
"quickTooltip": "Находит новые или отсутствующие файлы моделей, чтобы список оставался актуальным.",
|
||||
"full": "Перестроить кэш",
|
||||
"fullTooltip": "Перечитывает все данные моделей из файлов метаданных — используйте, если библиотека выглядит устаревшей или после ручных правок."
|
||||
},
|
||||
"fetch": {
|
||||
"title": "Получить метаданные с Civitai",
|
||||
@@ -416,6 +471,13 @@
|
||||
"favorites": {
|
||||
"title": "Показать только избранное",
|
||||
"action": "Избранное"
|
||||
},
|
||||
"updates": {
|
||||
"title": "Показывать только модели с доступными обновлениями",
|
||||
"action": "Обновления",
|
||||
"menuLabel": "Показать параметры обновления",
|
||||
"check": "Проверить обновления",
|
||||
"checkTooltip": "Проверка может занять время."
|
||||
}
|
||||
},
|
||||
"bulkOperations": {
|
||||
@@ -427,6 +489,7 @@
|
||||
"setContentRating": "Установить рейтинг контента для всех",
|
||||
"copyAll": "Копировать весь синтаксис",
|
||||
"refreshAll": "Обновить все метаданные",
|
||||
"checkUpdates": "Проверить обновления для выбранных",
|
||||
"moveAll": "Переместить все в папку",
|
||||
"autoOrganize": "Автоматически организовать выбранные",
|
||||
"deleteAll": "Удалить все модели",
|
||||
@@ -443,6 +506,7 @@
|
||||
},
|
||||
"contextMenu": {
|
||||
"refreshMetadata": "Обновить данные Civitai",
|
||||
"checkUpdates": "Проверить обновления",
|
||||
"relinkCivitai": "Пересвязать с Civitai",
|
||||
"copySyntax": "Копировать синтаксис LoRA",
|
||||
"copyFilename": "Копировать имя файла модели",
|
||||
@@ -464,6 +528,9 @@
|
||||
},
|
||||
"recipes": {
|
||||
"title": "Рецепты LoRA",
|
||||
"actions": {
|
||||
"sendCheckpoint": "Отправить в ComfyUI"
|
||||
},
|
||||
"controls": {
|
||||
"import": {
|
||||
"action": "Импортировать",
|
||||
@@ -702,6 +769,12 @@
|
||||
"countMessage": "моделей будут удалены навсегда.",
|
||||
"action": "Удалить все"
|
||||
},
|
||||
"checkUpdates": {
|
||||
"title": "Проверить обновления для всех {typePlural}?",
|
||||
"message": "Будут проверены обновления для всех {typePlural} в вашей библиотеке. Для больших коллекций это может занять немного больше времени.",
|
||||
"tip": "Хотите проверять по частям? Переключитесь в массовый режим, выберите нужные модели и используйте \"Проверить обновления для выбранных\".",
|
||||
"action": "Проверить всё"
|
||||
},
|
||||
"bulkAddTags": {
|
||||
"title": "Добавить теги к нескольким моделям",
|
||||
"description": "Добавить теги к",
|
||||
@@ -838,13 +911,77 @@
|
||||
"tabs": {
|
||||
"examples": "Примеры",
|
||||
"description": "Описание модели",
|
||||
"recipes": "Рецепты"
|
||||
"recipes": "Рецепты",
|
||||
"versions": "Версии"
|
||||
},
|
||||
"license": {
|
||||
"noImageSell": "No selling generated content",
|
||||
"noRentCivit": "No Civitai generation",
|
||||
"noRent": "No generation services",
|
||||
"noSell": "No selling models",
|
||||
"creditRequired": "Требуется указание авторства",
|
||||
"noDerivatives": "Запрет на совместное использование производных работ",
|
||||
"noReLicense": "Требуются те же права",
|
||||
"restrictionsLabel": "Лицензионные ограничения"
|
||||
},
|
||||
"loading": {
|
||||
"exampleImages": "Загрузка примеров изображений...",
|
||||
"description": "Загрузка описания модели...",
|
||||
"recipes": "Загрузка рецептов...",
|
||||
"examples": "Загрузка примеров..."
|
||||
"examples": "Загрузка примеров...",
|
||||
"versions": "Загрузка версий..."
|
||||
},
|
||||
"versions": {
|
||||
"heading": "Версии модели",
|
||||
"copy": "Управляйте всеми версиями этой модели в одном месте.",
|
||||
"media": {
|
||||
"placeholder": "Нет превью"
|
||||
},
|
||||
"labels": {
|
||||
"unnamed": "Версия без названия",
|
||||
"noDetails": "Дополнительная информация отсутствует"
|
||||
},
|
||||
"badges": {
|
||||
"current": "Текущая версия",
|
||||
"inLibrary": "В библиотеке",
|
||||
"newer": "Более новая версия",
|
||||
"ignored": "Игнорируется"
|
||||
},
|
||||
"actions": {
|
||||
"download": "Скачать",
|
||||
"delete": "Удалить",
|
||||
"ignore": "Игнорировать",
|
||||
"unignore": "Перестать игнорировать",
|
||||
"resumeModelUpdates": "Возобновить обновления для этой модели",
|
||||
"ignoreModelUpdates": "Игнорировать обновления для этой модели",
|
||||
"viewLocalVersions": "Показать все локальные версии",
|
||||
"viewLocalTooltip": "Скоро появится"
|
||||
},
|
||||
"filters": {
|
||||
"label": "Фильтр по базе",
|
||||
"state": {
|
||||
"showAll": "Все версии",
|
||||
"showSameBase": "Тот же базовый"
|
||||
},
|
||||
"tooltip": {
|
||||
"showAllVersions": "Переключиться на отображение всех версий",
|
||||
"showSameBaseVersions": "Переключиться на отображение только версий с тем же базовым"
|
||||
},
|
||||
"empty": "Нет версий, соответствующих текущему фильтру базовой модели."
|
||||
},
|
||||
"empty": "Для этой модели пока нет истории версий.",
|
||||
"error": "Не удалось загрузить версии.",
|
||||
"missingModelId": "У этой модели отсутствует идентификатор модели Civitai.",
|
||||
"confirm": {
|
||||
"delete": "Удалить эту версию из библиотеки?"
|
||||
},
|
||||
"toast": {
|
||||
"modelIgnored": "Обновления для этой модели игнорируются",
|
||||
"modelResumed": "Отслеживание обновлений возобновлено",
|
||||
"versionIgnored": "Обновления для этой версии игнорируются",
|
||||
"versionUnignored": "Версия снова активна",
|
||||
"versionDeleted": "Версия удалена"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -951,7 +1088,9 @@
|
||||
"loraFailedToSend": "Не удалось отправить LoRA в workflow",
|
||||
"recipeAdded": "Рецепт добавлен в workflow",
|
||||
"recipeReplaced": "Рецепт заменён в workflow",
|
||||
"recipeFailedToSend": "Не удалось отправить рецепт в workflow"
|
||||
"recipeFailedToSend": "Не удалось отправить рецепт в workflow",
|
||||
"noMatchingNodes": "В текущем workflow нет совместимых узлов",
|
||||
"noTargetNodeSelected": "Целевой узел не выбран"
|
||||
},
|
||||
"nodeSelector": {
|
||||
"recipe": "Рецепт",
|
||||
@@ -996,6 +1135,11 @@
|
||||
},
|
||||
"update": {
|
||||
"title": "Проверить обновления",
|
||||
"notificationsTitle": "Центр уведомлений",
|
||||
"tabs": {
|
||||
"updates": "Обновления",
|
||||
"messages": "Сообщения"
|
||||
},
|
||||
"updateAvailable": "Доступно обновление",
|
||||
"noChangelogAvailable": "Подробный список изменений недоступен. Проверьте GitHub для получения дополнительной информации.",
|
||||
"currentVersion": "Текущая версия",
|
||||
@@ -1027,6 +1171,13 @@
|
||||
"nightly": {
|
||||
"warning": "Предупреждение: Ночные сборки могут содержать экспериментальные функции и могут быть нестабильными.",
|
||||
"enable": "Включить ночные обновления"
|
||||
},
|
||||
"banners": {
|
||||
"recent": "Недавние уведомления",
|
||||
"empty": "Недавних баннеров нет.",
|
||||
"shown": "Показано {time}",
|
||||
"dismissed": "Закрыто {time}",
|
||||
"active": "Активно"
|
||||
}
|
||||
},
|
||||
"support": {
|
||||
@@ -1106,6 +1257,9 @@
|
||||
"cannotSend": "Невозможно отправить рецепт: отсутствует ID рецепта",
|
||||
"sendFailed": "Не удалось отправить рецепт в workflow",
|
||||
"sendError": "Ошибка отправки рецепта в workflow",
|
||||
"missingCheckpointPath": "Путь к чекпойнту недоступен",
|
||||
"missingCheckpointInfo": "Отсутствуют данные о чекпойнте",
|
||||
"downloadCheckpointFailed": "Не удалось скачать чекпойнт: {message}",
|
||||
"cannotDelete": "Невозможно удалить рецепт: отсутствует ID рецепта",
|
||||
"deleteConfirmationError": "Ошибка отображения подтверждения удаления",
|
||||
"deletedSuccessfully": "Рецепт успешно удален",
|
||||
@@ -1146,6 +1300,12 @@
|
||||
"bulkContentRatingSet": "Рейтинг контента установлен на {level} для {count} модель(ей)",
|
||||
"bulkContentRatingPartial": "Рейтинг контента {level} установлен для {success} модель(ей), {failed} не удалось",
|
||||
"bulkContentRatingFailed": "Не удалось обновить рейтинг контента для выбранных моделей",
|
||||
"bulkUpdatesChecking": "Проверка обновлений для выбранных {type}...",
|
||||
"bulkUpdatesSuccess": "Доступны обновления для {count} выбранных {type}",
|
||||
"bulkUpdatesNone": "Обновления для выбранных {type} не найдены",
|
||||
"bulkUpdatesMissing": "Выбранные {type} не привязаны к обновлениям Civitai",
|
||||
"bulkUpdatesPartialMissing": "Пропущено {missing} выбранных {type} без привязки Civitai",
|
||||
"bulkUpdatesFailed": "Не удалось проверить обновления для выбранных {type}: {message}",
|
||||
"invalidCharactersRemoved": "Недопустимые символы удалены из имени файла",
|
||||
"filenameCannotBeEmpty": "Имя файла не может быть пустым",
|
||||
"renameFailed": "Не удалось переименовать файл: {message}",
|
||||
@@ -1206,7 +1366,7 @@
|
||||
},
|
||||
"triggerWords": {
|
||||
"loadFailed": "Не удалось загрузить обученные слова",
|
||||
"tooLong": "Триггерное слово не должно превышать 30 слов",
|
||||
"tooLong": "Триггерное слово не должно превышать 100 слов",
|
||||
"tooMany": "Максимум 30 триггерных слов разрешено",
|
||||
"alreadyExists": "Это триггерное слово уже существует",
|
||||
"updateSuccess": "Триггерные слова успешно обновлены",
|
||||
|
||||
@@ -101,7 +101,12 @@
|
||||
"checkpointNameCopied": "检查点名称已复制",
|
||||
"toggleBlur": "切换模糊",
|
||||
"show": "显示",
|
||||
"openExampleImages": "打开示例图片文件夹"
|
||||
"openExampleImages": "打开示例图片文件夹",
|
||||
"replacePreview": "替换预览",
|
||||
"copyCheckpointName": "复制 Checkpoint 名称",
|
||||
"copyEmbeddingName": "复制 Embedding 名称",
|
||||
"sendCheckpointToWorkflow": "发送到 ComfyUI",
|
||||
"sendEmbeddingToWorkflow": "发送到 ComfyUI"
|
||||
},
|
||||
"nsfw": {
|
||||
"matureContent": "成熟内容",
|
||||
@@ -115,12 +120,17 @@
|
||||
"updateFailed": "收藏状态更新失败"
|
||||
},
|
||||
"sendToWorkflow": {
|
||||
"checkpointNotImplemented": "发送检查点到工作流 - 功能待实现"
|
||||
"checkpointNotImplemented": "发送检查点到工作流 - 功能待实现",
|
||||
"missingPath": "无法确定此卡片的模型路径"
|
||||
},
|
||||
"exampleImages": {
|
||||
"checkError": "检查示例图片时出错",
|
||||
"missingHash": "缺少模型哈希信息。",
|
||||
"noRemoteImagesAvailable": "此模型在 Civitai 上没有远程示例图片"
|
||||
},
|
||||
"badges": {
|
||||
"update": "更新",
|
||||
"updateAvailable": "有可用更新"
|
||||
}
|
||||
},
|
||||
"globalContextMenu": {
|
||||
@@ -129,12 +139,26 @@
|
||||
"missingPath": "请先设置下载位置后再下载示例图片。",
|
||||
"unavailable": "示例图片下载当前不可用。请在页面加载完成后重试。"
|
||||
},
|
||||
"checkModelUpdates": {
|
||||
"label": "检查更新",
|
||||
"loading": "正在检查 {type} 更新...",
|
||||
"success": "找到 {count} 条 {type} 更新",
|
||||
"none": "所有 {type} 均已是最新版本",
|
||||
"error": "检查 {type} 更新失败:{message}"
|
||||
},
|
||||
"cleanupExampleImages": {
|
||||
"label": "清理示例图片文件夹",
|
||||
"success": "已将 {count} 个文件夹移动到已删除文件夹",
|
||||
"none": "没有需要清理的示例图片文件夹",
|
||||
"partial": "清理完成,有 {failures} 个文件夹跳过",
|
||||
"error": "清理示例图片文件夹失败:{message}"
|
||||
},
|
||||
"fetchMissingLicenses": {
|
||||
"label": "Refresh license metadata",
|
||||
"loading": "Refreshing license metadata for {typePlural}...",
|
||||
"success": "Updated license metadata for {count} {typePlural}",
|
||||
"none": "All {typePlural} already have license metadata",
|
||||
"error": "Failed to refresh license metadata for {typePlural}: {message}"
|
||||
}
|
||||
},
|
||||
"header": {
|
||||
@@ -171,6 +195,10 @@
|
||||
"title": "筛选模型",
|
||||
"baseModel": "基础模型",
|
||||
"modelTags": "标签(前20)",
|
||||
"modelTypes": "Model Types",
|
||||
"license": "许可证",
|
||||
"noCreditRequired": "无需署名",
|
||||
"allowSellingGeneratedContent": "允许销售",
|
||||
"clearAll": "清除所有筛选"
|
||||
},
|
||||
"theme": {
|
||||
@@ -181,6 +209,7 @@
|
||||
},
|
||||
"actions": {
|
||||
"checkUpdates": "检查更新",
|
||||
"notifications": "通知",
|
||||
"support": "支持"
|
||||
}
|
||||
},
|
||||
@@ -199,12 +228,19 @@
|
||||
"videoSettings": "视频设置",
|
||||
"layoutSettings": "布局设置",
|
||||
"folderSettings": "文件夹设置",
|
||||
"priorityTags": "优先标签",
|
||||
"downloadPathTemplates": "下载路径模板",
|
||||
"exampleImages": "示例图片",
|
||||
"updateFlags": "更新标记",
|
||||
"autoOrganize": "Auto-organize",
|
||||
"misc": "其他",
|
||||
"metadataArchive": "元数据归档数据库",
|
||||
"proxySettings": "代理设置",
|
||||
"priorityTags": "优先标签"
|
||||
"storageLocation": "设置位置",
|
||||
"proxySettings": "代理设置"
|
||||
},
|
||||
"storage": {
|
||||
"locationLabel": "便携模式",
|
||||
"locationHelp": "开启可将 settings.json 保存在仓库中;关闭则保存在用户配置目录。"
|
||||
},
|
||||
"contentFiltering": {
|
||||
"blurNsfwContent": "模糊 NSFW 内容",
|
||||
@@ -216,6 +252,15 @@
|
||||
"autoplayOnHover": "悬停时自动播放视频",
|
||||
"autoplayOnHoverHelp": "仅在悬停时播放视频预览"
|
||||
},
|
||||
"autoOrganizeExclusions": {
|
||||
"label": "自动整理排除项",
|
||||
"placeholder": "示例: curated/*, */backups/*; *_temp.safetensors",
|
||||
"help": "跳过与这些通配符模式匹配的文件。多个模式用逗号或分号分隔。",
|
||||
"validation": {
|
||||
"noPatterns": "请输入至少一个用逗号或分号分隔的模式。",
|
||||
"saveFailed": "无法保存排除项:{message}"
|
||||
}
|
||||
},
|
||||
"layoutSettings": {
|
||||
"displayDensity": "显示密度",
|
||||
"displayDensityOptions": {
|
||||
@@ -230,26 +275,26 @@
|
||||
"compact": "7(1080p),8(2K),10(4K)"
|
||||
},
|
||||
"displayDensityWarning": "警告:高密度可能导致资源有限的系统性能下降。",
|
||||
"showFolderSidebar": "显示文件夹侧边栏",
|
||||
"showFolderSidebarHelp": "在模型页面启用或禁用文件夹导航侧边栏。关闭后,侧边栏和悬停区域将保持隐藏。",
|
||||
"cardInfoDisplay": "卡片信息显示",
|
||||
"cardInfoDisplayOptions": {
|
||||
"always": "始终可见",
|
||||
"hover": "悬停时显示"
|
||||
},
|
||||
"cardInfoDisplayHelp": "选择何时显示模型信息和操作按钮:",
|
||||
"cardInfoDisplayDetails": {
|
||||
"always": "标题和底部始终显示",
|
||||
"hover": "仅在悬停卡片时显示标题和底部"
|
||||
"cardInfoDisplayHelp": "选择何时显示模型信息和操作按钮",
|
||||
"modelCardFooterAction": "模型卡片按钮操作",
|
||||
"modelCardFooterActionOptions": {
|
||||
"exampleImages": "打开示例图片",
|
||||
"replacePreview": "替换预览"
|
||||
},
|
||||
"modelCardFooterActionHelp": "选择右下角卡片按钮的功能",
|
||||
"modelNameDisplay": "模型名称显示",
|
||||
"modelNameDisplayOptions": {
|
||||
"modelName": "模型名称",
|
||||
"fileName": "文件名"
|
||||
},
|
||||
"modelNameDisplayHelp": "选择在模型卡片底部显示的内容:",
|
||||
"modelNameDisplayDetails": {
|
||||
"modelName": "显示模型的描述性名称",
|
||||
"fileName": "显示磁盘上的实际文件名"
|
||||
}
|
||||
"modelNameDisplayHelp": "选择在模型卡片底部显示的内容"
|
||||
},
|
||||
"folderSettings": {
|
||||
"activeLibrary": "活动库",
|
||||
@@ -264,6 +309,26 @@
|
||||
"defaultEmbeddingRootHelp": "设置下载、导入和移动时的默认 Embedding 根目录",
|
||||
"noDefault": "无默认"
|
||||
},
|
||||
"priorityTags": {
|
||||
"title": "优先标签",
|
||||
"description": "为每种模型类型自定义标签优先级顺序 (例如: character, concept, style(toon|toon_style))",
|
||||
"placeholder": "character, concept, style(toon|toon_style)",
|
||||
"helpLinkLabel": "打开优先标签帮助",
|
||||
"modelTypes": {
|
||||
"lora": "LoRA",
|
||||
"checkpoint": "Checkpoint",
|
||||
"embedding": "Embedding"
|
||||
},
|
||||
"saveSuccess": "优先标签已更新。",
|
||||
"saveError": "优先标签更新失败。",
|
||||
"loadingSuggestions": "正在加载建议...",
|
||||
"validation": {
|
||||
"missingClosingParen": "条目 {index} 缺少右括号。",
|
||||
"missingCanonical": "条目 {index} 必须包含规范标签名称。",
|
||||
"duplicateCanonical": "规范标签 \"{tag}\" 出现多次。",
|
||||
"unknown": "优先标签配置无效。"
|
||||
}
|
||||
},
|
||||
"downloadPathTemplates": {
|
||||
"title": "下载路径模板",
|
||||
"help": "配置从 Civitai 下载不同模型类型的文件夹结构。",
|
||||
@@ -311,6 +376,14 @@
|
||||
"download": "下载",
|
||||
"restartRequired": "需要重启"
|
||||
},
|
||||
"updateFlagStrategy": {
|
||||
"label": "更新标记策略",
|
||||
"help": "决定更新徽章是否仅在新版本与本地文件共享相同基础模型时显示,或只要该模型有任何更新版本就显示。",
|
||||
"options": {
|
||||
"sameBase": "按基础模型匹配更新",
|
||||
"any": "显示任何可用更新"
|
||||
}
|
||||
},
|
||||
"misc": {
|
||||
"includeTriggerWords": "复制 LoRA 语法时包含触发词",
|
||||
"includeTriggerWordsHelp": "复制 LoRA 语法到剪贴板时包含训练触发词"
|
||||
@@ -356,26 +429,6 @@
|
||||
"proxyPassword": "密码 (可选)",
|
||||
"proxyPasswordPlaceholder": "密码",
|
||||
"proxyPasswordHelp": "代理认证的密码 (如果需要)"
|
||||
},
|
||||
"priorityTags": {
|
||||
"title": "优先标签",
|
||||
"description": "为每种模型类型自定义标签优先级顺序 (例如: character, concept, style(toon|toon_style))",
|
||||
"placeholder": "character, concept, style(toon|toon_style)",
|
||||
"helpLinkLabel": "打开优先标签帮助",
|
||||
"modelTypes": {
|
||||
"lora": "LoRA",
|
||||
"checkpoint": "Checkpoint",
|
||||
"embedding": "Embedding"
|
||||
},
|
||||
"saveSuccess": "优先标签已更新。",
|
||||
"saveError": "优先标签更新失败。",
|
||||
"loadingSuggestions": "正在加载建议...",
|
||||
"validation": {
|
||||
"missingClosingParen": "条目 {index} 缺少右括号。",
|
||||
"missingCanonical": "条目 {index} 必须包含规范标签名称。",
|
||||
"duplicateCanonical": "规范标签 \"{tag}\" 出现多次。",
|
||||
"unknown": "优先标签配置无效。"
|
||||
}
|
||||
}
|
||||
},
|
||||
"loras": {
|
||||
@@ -394,8 +447,10 @@
|
||||
},
|
||||
"refresh": {
|
||||
"title": "刷新模型列表",
|
||||
"quick": "快速刷新(增量)",
|
||||
"full": "完全重建(完整)"
|
||||
"quick": "同步变更",
|
||||
"quickTooltip": "扫描新的或缺失的模型文件,保持列表最新。",
|
||||
"full": "重建缓存",
|
||||
"fullTooltip": "从元数据文件重新加载所有模型信息;用于列表过时或手动编辑后。"
|
||||
},
|
||||
"fetch": {
|
||||
"title": "从 Civitai 获取元数据",
|
||||
@@ -416,6 +471,13 @@
|
||||
"favorites": {
|
||||
"title": "仅显示收藏",
|
||||
"action": "收藏"
|
||||
},
|
||||
"updates": {
|
||||
"title": "仅显示可用更新的模型",
|
||||
"action": "更新",
|
||||
"menuLabel": "显示更新选项",
|
||||
"check": "检查更新",
|
||||
"checkTooltip": "检查更新可能耗时。"
|
||||
}
|
||||
},
|
||||
"bulkOperations": {
|
||||
@@ -427,6 +489,7 @@
|
||||
"setContentRating": "为所选中设置内容评级",
|
||||
"copyAll": "复制所选中语法",
|
||||
"refreshAll": "刷新所选中元数据",
|
||||
"checkUpdates": "检查所选更新",
|
||||
"moveAll": "移动所选中到文件夹",
|
||||
"autoOrganize": "自动整理所选模型",
|
||||
"deleteAll": "删除选中模型",
|
||||
@@ -443,6 +506,7 @@
|
||||
},
|
||||
"contextMenu": {
|
||||
"refreshMetadata": "刷新 Civitai 数据",
|
||||
"checkUpdates": "检查更新",
|
||||
"relinkCivitai": "重新关联到 Civitai",
|
||||
"copySyntax": "复制 LoRA 语法",
|
||||
"copyFilename": "复制模型文件名",
|
||||
@@ -464,6 +528,9 @@
|
||||
},
|
||||
"recipes": {
|
||||
"title": "LoRA 配方",
|
||||
"actions": {
|
||||
"sendCheckpoint": "发送到 ComfyUI"
|
||||
},
|
||||
"controls": {
|
||||
"import": {
|
||||
"action": "导入",
|
||||
@@ -702,6 +769,12 @@
|
||||
"countMessage": "模型将被永久删除。",
|
||||
"action": "全部删除"
|
||||
},
|
||||
"checkUpdates": {
|
||||
"title": "检查所有 {type} 的更新?",
|
||||
"message": "这会为库中的每个 {type} 检查更新,大型集合可能需要一些时间。",
|
||||
"tip": "想分批进行?切换到批量模式,选中需要的模型,然后使用“检查所选更新”。",
|
||||
"action": "检查全部"
|
||||
},
|
||||
"bulkAddTags": {
|
||||
"title": "批量添加标签",
|
||||
"description": "为多个模型添加标签",
|
||||
@@ -838,13 +911,77 @@
|
||||
"tabs": {
|
||||
"examples": "示例",
|
||||
"description": "模型描述",
|
||||
"recipes": "配方"
|
||||
"recipes": "配方",
|
||||
"versions": "版本"
|
||||
},
|
||||
"license": {
|
||||
"noImageSell": "No selling generated content",
|
||||
"noRentCivit": "No Civitai generation",
|
||||
"noRent": "No generation services",
|
||||
"noSell": "No selling models",
|
||||
"creditRequired": "需要创作者署名",
|
||||
"noDerivatives": "禁止分享合并作品",
|
||||
"noReLicense": "需要相同权限",
|
||||
"restrictionsLabel": "许可证限制"
|
||||
},
|
||||
"loading": {
|
||||
"exampleImages": "正在加载示例图片...",
|
||||
"description": "正在加载模型描述...",
|
||||
"recipes": "正在加载配方...",
|
||||
"examples": "正在加载示例..."
|
||||
"examples": "正在加载示例...",
|
||||
"versions": "正在加载版本..."
|
||||
},
|
||||
"versions": {
|
||||
"heading": "模型版本",
|
||||
"copy": "在一个位置管理该模型的所有版本。",
|
||||
"media": {
|
||||
"placeholder": "无预览"
|
||||
},
|
||||
"labels": {
|
||||
"unnamed": "未命名版本",
|
||||
"noDetails": "暂无更多信息"
|
||||
},
|
||||
"badges": {
|
||||
"current": "当前版本",
|
||||
"inLibrary": "已在库中",
|
||||
"newer": "较新的版本",
|
||||
"ignored": "已忽略"
|
||||
},
|
||||
"actions": {
|
||||
"download": "下载",
|
||||
"delete": "删除",
|
||||
"ignore": "忽略",
|
||||
"unignore": "取消忽略",
|
||||
"resumeModelUpdates": "继续跟踪该模型的更新",
|
||||
"ignoreModelUpdates": "忽略该模型的更新",
|
||||
"viewLocalVersions": "查看所有本地版本",
|
||||
"viewLocalTooltip": "敬请期待"
|
||||
},
|
||||
"filters": {
|
||||
"label": "基础筛选",
|
||||
"state": {
|
||||
"showAll": "全部版本",
|
||||
"showSameBase": "相同基模型"
|
||||
},
|
||||
"tooltip": {
|
||||
"showAllVersions": "切换为显示所有版本",
|
||||
"showSameBaseVersions": "仅显示与当前基模型匹配的版本"
|
||||
},
|
||||
"empty": "没有与当前基模型筛选匹配的版本。"
|
||||
},
|
||||
"empty": "该模型还没有版本历史。",
|
||||
"error": "加载版本失败。",
|
||||
"missingModelId": "该模型缺少 Civitai 模型 ID。",
|
||||
"confirm": {
|
||||
"delete": "从库中删除此版本?"
|
||||
},
|
||||
"toast": {
|
||||
"modelIgnored": "已忽略该模型的更新",
|
||||
"modelResumed": "已恢复更新跟踪",
|
||||
"versionIgnored": "已忽略该版本的更新",
|
||||
"versionUnignored": "已重新启用该版本",
|
||||
"versionDeleted": "版本已删除"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -951,7 +1088,9 @@
|
||||
"loraFailedToSend": "发送 LoRA 到工作流失败",
|
||||
"recipeAdded": "配方已追加到工作流",
|
||||
"recipeReplaced": "配方已替换到工作流",
|
||||
"recipeFailedToSend": "发送配方到工作流失败"
|
||||
"recipeFailedToSend": "发送配方到工作流失败",
|
||||
"noMatchingNodes": "当前工作流中没有兼容的节点",
|
||||
"noTargetNodeSelected": "未选择目标节点"
|
||||
},
|
||||
"nodeSelector": {
|
||||
"recipe": "配方",
|
||||
@@ -996,6 +1135,11 @@
|
||||
},
|
||||
"update": {
|
||||
"title": "检查更新",
|
||||
"notificationsTitle": "通知中心",
|
||||
"tabs": {
|
||||
"updates": "更新",
|
||||
"messages": "消息"
|
||||
},
|
||||
"updateAvailable": "更新可用",
|
||||
"noChangelogAvailable": "没有详细的更新日志可用。请查看 GitHub 以获取更多信息。",
|
||||
"currentVersion": "当前版本",
|
||||
@@ -1027,6 +1171,13 @@
|
||||
"nightly": {
|
||||
"warning": "警告:Nightly 版本可能包含实验性功能,可能不稳定。",
|
||||
"enable": "启用 Nightly 更新"
|
||||
},
|
||||
"banners": {
|
||||
"recent": "最近的通知",
|
||||
"empty": "暂无最近的横幅通知。",
|
||||
"shown": "{time} 显示",
|
||||
"dismissed": "{time} 关闭",
|
||||
"active": "仍在显示"
|
||||
}
|
||||
},
|
||||
"support": {
|
||||
@@ -1106,6 +1257,9 @@
|
||||
"cannotSend": "无法发送配方:缺少配方 ID",
|
||||
"sendFailed": "发送配方到工作流失败",
|
||||
"sendError": "发送配方到工作流出错",
|
||||
"missingCheckpointPath": "缺少检查点路径",
|
||||
"missingCheckpointInfo": "缺少检查点信息",
|
||||
"downloadCheckpointFailed": "下载检查点失败:{message}",
|
||||
"cannotDelete": "无法删除配方:缺少配方 ID",
|
||||
"deleteConfirmationError": "显示删除确认出错",
|
||||
"deletedSuccessfully": "配方删除成功",
|
||||
@@ -1146,6 +1300,12 @@
|
||||
"bulkContentRatingSet": "已将 {count} 个模型的内容评级设置为 {level}",
|
||||
"bulkContentRatingPartial": "已将 {success} 个模型的内容评级设置为 {level},{failed} 个失败",
|
||||
"bulkContentRatingFailed": "未能更新所选模型的内容评级",
|
||||
"bulkUpdatesChecking": "正在检查所选 {type} 的更新...",
|
||||
"bulkUpdatesSuccess": "{count} 个所选 {type} 有可用更新",
|
||||
"bulkUpdatesNone": "所选 {type} 未发现更新",
|
||||
"bulkUpdatesMissing": "所选 {type} 未关联 Civitai 更新",
|
||||
"bulkUpdatesPartialMissing": "已跳过 {missing} 个未关联 Civitai 的所选 {type}",
|
||||
"bulkUpdatesFailed": "检查所选 {type} 的更新失败:{message}",
|
||||
"invalidCharactersRemoved": "文件名中的无效字符已移除",
|
||||
"filenameCannotBeEmpty": "文件名不能为空",
|
||||
"renameFailed": "重命名文件失败:{message}",
|
||||
@@ -1206,7 +1366,7 @@
|
||||
},
|
||||
"triggerWords": {
|
||||
"loadFailed": "无法加载训练词",
|
||||
"tooLong": "触发词不能超过30个词",
|
||||
"tooLong": "触发词不能超过100个词",
|
||||
"tooMany": "最多允许30个触发词",
|
||||
"alreadyExists": "该触发词已存在",
|
||||
"updateSuccess": "触发词更新成功",
|
||||
@@ -1305,10 +1465,10 @@
|
||||
"seconds": "秒后刷新"
|
||||
},
|
||||
"communitySupport": {
|
||||
"title": "Keep LoRA Manager Thriving with Your Support ❤️",
|
||||
"content": "LoRA Manager is a passion project maintained full-time by a solo developer. Your support on Ko-fi helps cover development costs, keeps new updates coming, and unlocks a license key for the LM Civitai Extension as a thank-you gift. Every contribution truly makes a difference.",
|
||||
"supportCta": "Support on Ko-fi",
|
||||
"learnMore": "LM Civitai Extension Tutorial"
|
||||
"title": "LM 浏览器插件限时优惠 ⚡",
|
||||
"content": "来爱发电为Lora Manager项目发电,支持项目持续开发的同时,获取浏览器插件验证码,按季支付更优惠!支付宝/微信方便支付。感谢支持!🚀",
|
||||
"supportCta": "为LM发电",
|
||||
"learnMore": "浏览器插件教程"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -101,7 +101,12 @@
|
||||
"checkpointNameCopied": "Checkpoint 名稱已複製",
|
||||
"toggleBlur": "切換模糊",
|
||||
"show": "顯示",
|
||||
"openExampleImages": "開啟範例圖片資料夾"
|
||||
"openExampleImages": "開啟範例圖片資料夾",
|
||||
"replacePreview": "更換預覽圖",
|
||||
"copyCheckpointName": "複製檢查點名稱",
|
||||
"copyEmbeddingName": "複製嵌入名稱",
|
||||
"sendCheckpointToWorkflow": "傳送到 ComfyUI",
|
||||
"sendEmbeddingToWorkflow": "傳送到 ComfyUI"
|
||||
},
|
||||
"nsfw": {
|
||||
"matureContent": "成熟內容",
|
||||
@@ -115,12 +120,17 @@
|
||||
"updateFailed": "更新收藏狀態失敗"
|
||||
},
|
||||
"sendToWorkflow": {
|
||||
"checkpointNotImplemented": "傳送 checkpoint 到工作流 - 功能尚未實現"
|
||||
"checkpointNotImplemented": "傳送 checkpoint 到工作流 - 功能尚未實現",
|
||||
"missingPath": "無法確定此卡片的模型路徑"
|
||||
},
|
||||
"exampleImages": {
|
||||
"checkError": "檢查範例圖片時發生錯誤",
|
||||
"missingHash": "缺少模型雜湊資訊。",
|
||||
"noRemoteImagesAvailable": "此模型在 Civitai 上無遠端範例圖片"
|
||||
},
|
||||
"badges": {
|
||||
"update": "更新",
|
||||
"updateAvailable": "有可用更新"
|
||||
}
|
||||
},
|
||||
"globalContextMenu": {
|
||||
@@ -129,12 +139,26 @@
|
||||
"missingPath": "請先設定下載位置再下載範例圖片。",
|
||||
"unavailable": "範例圖片下載目前尚不可用。請在頁面載入完成後再試一次。"
|
||||
},
|
||||
"checkModelUpdates": {
|
||||
"label": "檢查更新",
|
||||
"loading": "正在檢查 {type} 更新...",
|
||||
"success": "找到 {count} 個 {type} 更新",
|
||||
"none": "所有 {type} 都是最新版本",
|
||||
"error": "檢查 {type} 更新失敗:{message}"
|
||||
},
|
||||
"cleanupExampleImages": {
|
||||
"label": "清理範例圖片資料夾",
|
||||
"success": "已將 {count} 個資料夾移至已刪除資料夾",
|
||||
"none": "沒有需要清理的範例圖片資料夾",
|
||||
"partial": "清理完成,有 {failures} 個資料夾略過",
|
||||
"error": "清理範例圖片資料夾失敗:{message}"
|
||||
},
|
||||
"fetchMissingLicenses": {
|
||||
"label": "Refresh license metadata",
|
||||
"loading": "Refreshing license metadata for {typePlural}...",
|
||||
"success": "Updated license metadata for {count} {typePlural}",
|
||||
"none": "All {typePlural} already have license metadata",
|
||||
"error": "Failed to refresh license metadata for {typePlural}: {message}"
|
||||
}
|
||||
},
|
||||
"header": {
|
||||
@@ -171,6 +195,10 @@
|
||||
"title": "篩選模型",
|
||||
"baseModel": "基礎模型",
|
||||
"modelTags": "標籤(前 20)",
|
||||
"modelTypes": "Model Types",
|
||||
"license": "授權",
|
||||
"noCreditRequired": "無需署名",
|
||||
"allowSellingGeneratedContent": "允許銷售",
|
||||
"clearAll": "清除所有篩選"
|
||||
},
|
||||
"theme": {
|
||||
@@ -181,6 +209,7 @@
|
||||
},
|
||||
"actions": {
|
||||
"checkUpdates": "檢查更新",
|
||||
"notifications": "通知",
|
||||
"support": "支援"
|
||||
}
|
||||
},
|
||||
@@ -199,12 +228,19 @@
|
||||
"videoSettings": "影片設定",
|
||||
"layoutSettings": "版面設定",
|
||||
"folderSettings": "資料夾設定",
|
||||
"priorityTags": "優先標籤",
|
||||
"downloadPathTemplates": "下載路徑範本",
|
||||
"exampleImages": "範例圖片",
|
||||
"updateFlags": "更新標記",
|
||||
"autoOrganize": "Auto-organize",
|
||||
"misc": "其他",
|
||||
"metadataArchive": "中繼資料封存資料庫",
|
||||
"proxySettings": "代理設定",
|
||||
"priorityTags": "優先標籤"
|
||||
"storageLocation": "設定位置",
|
||||
"proxySettings": "代理設定"
|
||||
},
|
||||
"storage": {
|
||||
"locationLabel": "可攜式模式",
|
||||
"locationHelp": "啟用可將 settings.json 保存在儲存庫中;停用則保存在使用者設定目錄。"
|
||||
},
|
||||
"contentFiltering": {
|
||||
"blurNsfwContent": "模糊 NSFW 內容",
|
||||
@@ -216,6 +252,15 @@
|
||||
"autoplayOnHover": "滑鼠懸停自動播放影片",
|
||||
"autoplayOnHoverHelp": "僅在滑鼠懸停時播放影片預覽"
|
||||
},
|
||||
"autoOrganizeExclusions": {
|
||||
"label": "自動整理排除項目",
|
||||
"placeholder": "範例: curated/*, */backups/*; *_temp.safetensors",
|
||||
"help": "跳過符合這些萬用字元模式的檔案。多個模式請用逗號或分號分隔。",
|
||||
"validation": {
|
||||
"noPatterns": "請輸入至少一個以逗號或分號分隔的模式。",
|
||||
"saveFailed": "無法儲存排除項目:{message}"
|
||||
}
|
||||
},
|
||||
"layoutSettings": {
|
||||
"displayDensity": "顯示密度",
|
||||
"displayDensityOptions": {
|
||||
@@ -230,26 +275,26 @@
|
||||
"compact": "7(1080p)、8(2K)、10(4K)"
|
||||
},
|
||||
"displayDensityWarning": "警告:較高密度可能導致資源有限的系統效能下降。",
|
||||
"showFolderSidebar": "顯示資料夾側邊欄",
|
||||
"showFolderSidebarHelp": "在模型頁面啟用或停用資料夾導覽側邊欄。停用後,側邊欄與滑鼠懸停區域將保持隱藏。",
|
||||
"cardInfoDisplay": "卡片資訊顯示",
|
||||
"cardInfoDisplayOptions": {
|
||||
"always": "永遠顯示",
|
||||
"hover": "滑鼠懸停顯示"
|
||||
},
|
||||
"cardInfoDisplayHelp": "選擇何時顯示模型資訊與操作按鈕:",
|
||||
"cardInfoDisplayDetails": {
|
||||
"always": "標題與頁腳始終可見",
|
||||
"hover": "標題與頁腳僅在滑鼠懸停時顯示"
|
||||
"cardInfoDisplayHelp": "選擇何時顯示模型資訊與操作按鈕",
|
||||
"modelCardFooterAction": "模型卡片按鈕操作",
|
||||
"modelCardFooterActionOptions": {
|
||||
"exampleImages": "開啟範例圖片",
|
||||
"replacePreview": "更換預覽圖"
|
||||
},
|
||||
"modelCardFooterActionHelp": "選擇右下角卡片按鈕的功能",
|
||||
"modelNameDisplay": "模型名稱顯示",
|
||||
"modelNameDisplayOptions": {
|
||||
"modelName": "模型名稱",
|
||||
"fileName": "檔案名稱"
|
||||
},
|
||||
"modelNameDisplayHelp": "選擇在模型卡片底部顯示的內容:",
|
||||
"modelNameDisplayDetails": {
|
||||
"modelName": "顯示模型的描述性名稱",
|
||||
"fileName": "顯示磁碟上的實際檔案名稱"
|
||||
}
|
||||
"modelNameDisplayHelp": "選擇在模型卡片底部顯示的內容"
|
||||
},
|
||||
"folderSettings": {
|
||||
"activeLibrary": "使用中的資料庫",
|
||||
@@ -264,6 +309,26 @@
|
||||
"defaultEmbeddingRootHelp": "設定下載、匯入和移動時的預設 Embedding 根目錄",
|
||||
"noDefault": "未設定預設"
|
||||
},
|
||||
"priorityTags": {
|
||||
"title": "優先標籤",
|
||||
"description": "為每種模型類型自訂標籤的優先順序 (例如: character, concept, style(toon|toon_style))",
|
||||
"placeholder": "character, concept, style(toon|toon_style)",
|
||||
"helpLinkLabel": "開啟優先標籤說明",
|
||||
"modelTypes": {
|
||||
"lora": "LoRA",
|
||||
"checkpoint": "Checkpoint",
|
||||
"embedding": "Embedding"
|
||||
},
|
||||
"saveSuccess": "優先標籤已更新。",
|
||||
"saveError": "更新優先標籤失敗。",
|
||||
"loadingSuggestions": "正在載入建議...",
|
||||
"validation": {
|
||||
"missingClosingParen": "項目 {index} 缺少右括號。",
|
||||
"missingCanonical": "項目 {index} 必須包含正規標籤名稱。",
|
||||
"duplicateCanonical": "正規標籤 \"{tag}\" 出現多於一次。",
|
||||
"unknown": "優先標籤設定無效。"
|
||||
}
|
||||
},
|
||||
"downloadPathTemplates": {
|
||||
"title": "下載路徑範本",
|
||||
"help": "設定從 Civitai 下載時不同模型類型的資料夾結構。",
|
||||
@@ -311,6 +376,14 @@
|
||||
"download": "下載",
|
||||
"restartRequired": "需要重新啟動"
|
||||
},
|
||||
"updateFlagStrategy": {
|
||||
"label": "更新標記策略",
|
||||
"help": "決定更新徽章是否僅在新版本與本地檔案共享相同基礎模型時顯示,或只要該模型有任何更新版本就顯示。",
|
||||
"options": {
|
||||
"sameBase": "依基礎模型匹配更新",
|
||||
"any": "顯示任何可用更新"
|
||||
}
|
||||
},
|
||||
"misc": {
|
||||
"includeTriggerWords": "在 LoRA 語法中包含觸發詞",
|
||||
"includeTriggerWordsHelp": "複製 LoRA 語法到剪貼簿時包含訓練觸發詞"
|
||||
@@ -356,26 +429,6 @@
|
||||
"proxyPassword": "密碼(選填)",
|
||||
"proxyPasswordPlaceholder": "password",
|
||||
"proxyPasswordHelp": "代理驗證所需的密碼(如有需要)"
|
||||
},
|
||||
"priorityTags": {
|
||||
"title": "優先標籤",
|
||||
"description": "為每種模型類型自訂標籤的優先順序 (例如: character, concept, style(toon|toon_style))",
|
||||
"placeholder": "character, concept, style(toon|toon_style)",
|
||||
"helpLinkLabel": "開啟優先標籤說明",
|
||||
"modelTypes": {
|
||||
"lora": "LoRA",
|
||||
"checkpoint": "Checkpoint",
|
||||
"embedding": "Embedding"
|
||||
},
|
||||
"saveSuccess": "優先標籤已更新。",
|
||||
"saveError": "更新優先標籤失敗。",
|
||||
"loadingSuggestions": "正在載入建議...",
|
||||
"validation": {
|
||||
"missingClosingParen": "項目 {index} 缺少右括號。",
|
||||
"missingCanonical": "項目 {index} 必須包含正規標籤名稱。",
|
||||
"duplicateCanonical": "正規標籤 \"{tag}\" 出現多於一次。",
|
||||
"unknown": "優先標籤設定無效。"
|
||||
}
|
||||
}
|
||||
},
|
||||
"loras": {
|
||||
@@ -394,8 +447,10 @@
|
||||
},
|
||||
"refresh": {
|
||||
"title": "重新整理模型列表",
|
||||
"quick": "快速刷新(增量)",
|
||||
"full": "完整重建(全部)"
|
||||
"quick": "同步變更",
|
||||
"quickTooltip": "掃描新的或缺少的模型檔案,讓清單保持最新。",
|
||||
"full": "重建快取",
|
||||
"fullTooltip": "從中繼資料檔重新載入所有模型資訊;適用於清單過時或手動編輯後。"
|
||||
},
|
||||
"fetch": {
|
||||
"title": "從 Civitai 取得 metadata",
|
||||
@@ -416,6 +471,13 @@
|
||||
"favorites": {
|
||||
"title": "僅顯示收藏",
|
||||
"action": "收藏"
|
||||
},
|
||||
"updates": {
|
||||
"title": "僅顯示可用更新的模型",
|
||||
"action": "更新",
|
||||
"menuLabel": "顯示更新選項",
|
||||
"check": "檢查更新",
|
||||
"checkTooltip": "檢查更新可能耗時。"
|
||||
}
|
||||
},
|
||||
"bulkOperations": {
|
||||
@@ -427,6 +489,7 @@
|
||||
"setContentRating": "為全部設定內容分級",
|
||||
"copyAll": "複製全部語法",
|
||||
"refreshAll": "刷新全部 metadata",
|
||||
"checkUpdates": "檢查所選更新",
|
||||
"moveAll": "全部移動到資料夾",
|
||||
"autoOrganize": "自動整理所選模型",
|
||||
"deleteAll": "刪除全部模型",
|
||||
@@ -443,6 +506,7 @@
|
||||
},
|
||||
"contextMenu": {
|
||||
"refreshMetadata": "刷新 Civitai 資料",
|
||||
"checkUpdates": "檢查更新",
|
||||
"relinkCivitai": "重新連結 Civitai",
|
||||
"copySyntax": "複製 LoRA 語法",
|
||||
"copyFilename": "複製模型檔名",
|
||||
@@ -464,6 +528,9 @@
|
||||
},
|
||||
"recipes": {
|
||||
"title": "LoRA 配方",
|
||||
"actions": {
|
||||
"sendCheckpoint": "傳送到 ComfyUI"
|
||||
},
|
||||
"controls": {
|
||||
"import": {
|
||||
"action": "匯入",
|
||||
@@ -702,6 +769,12 @@
|
||||
"countMessage": "模型將被永久刪除。",
|
||||
"action": "全部刪除"
|
||||
},
|
||||
"checkUpdates": {
|
||||
"title": "要檢查所有 {type} 的更新嗎?",
|
||||
"message": "這會為資料庫中的每個 {type} 檢查更新,大型收藏可能會花上一些時間。",
|
||||
"tip": "想分批處理?切換到批次模式,選擇需要的模型,然後使用「檢查所選更新」。",
|
||||
"action": "全部檢查"
|
||||
},
|
||||
"bulkAddTags": {
|
||||
"title": "新增標籤到多個模型",
|
||||
"description": "新增標籤到",
|
||||
@@ -838,13 +911,77 @@
|
||||
"tabs": {
|
||||
"examples": "範例圖片",
|
||||
"description": "模型描述",
|
||||
"recipes": "配方"
|
||||
"recipes": "配方",
|
||||
"versions": "版本"
|
||||
},
|
||||
"license": {
|
||||
"noImageSell": "No selling generated content",
|
||||
"noRentCivit": "No Civitai generation",
|
||||
"noRent": "No generation services",
|
||||
"noSell": "No selling models",
|
||||
"creditRequired": "需要創作者標示",
|
||||
"noDerivatives": "禁止分享合併作品",
|
||||
"noReLicense": "需要相同授權",
|
||||
"restrictionsLabel": "授權限制"
|
||||
},
|
||||
"loading": {
|
||||
"exampleImages": "載入範例圖片中...",
|
||||
"description": "載入模型描述中...",
|
||||
"recipes": "載入配方中...",
|
||||
"examples": "載入範例中..."
|
||||
"examples": "載入範例中...",
|
||||
"versions": "載入版本中..."
|
||||
},
|
||||
"versions": {
|
||||
"heading": "模型版本",
|
||||
"copy": "在同一位置追蹤並管理此模型的所有版本。",
|
||||
"media": {
|
||||
"placeholder": "無預覽"
|
||||
},
|
||||
"labels": {
|
||||
"unnamed": "未命名版本",
|
||||
"noDetails": "沒有其他資訊"
|
||||
},
|
||||
"badges": {
|
||||
"current": "目前版本",
|
||||
"inLibrary": "已在庫中",
|
||||
"newer": "較新版本",
|
||||
"ignored": "已忽略"
|
||||
},
|
||||
"actions": {
|
||||
"download": "下載",
|
||||
"delete": "刪除",
|
||||
"ignore": "忽略",
|
||||
"unignore": "取消忽略",
|
||||
"resumeModelUpdates": "恢復追蹤此模型的更新",
|
||||
"ignoreModelUpdates": "忽略此模型的更新",
|
||||
"viewLocalVersions": "檢視所有本地版本",
|
||||
"viewLocalTooltip": "敬請期待"
|
||||
},
|
||||
"filters": {
|
||||
"label": "基礎篩選",
|
||||
"state": {
|
||||
"showAll": "所有版本",
|
||||
"showSameBase": "相同基礎模型"
|
||||
},
|
||||
"tooltip": {
|
||||
"showAllVersions": "切換為顯示所有版本",
|
||||
"showSameBaseVersions": "僅顯示與目前基礎模型相符的版本"
|
||||
},
|
||||
"empty": "沒有符合目前基礎模型篩選的版本。"
|
||||
},
|
||||
"empty": "此模型尚無版本歷史。",
|
||||
"error": "載入版本失敗。",
|
||||
"missingModelId": "此模型缺少 Civitai 模型 ID。",
|
||||
"confirm": {
|
||||
"delete": "要從庫中刪除此版本嗎?"
|
||||
},
|
||||
"toast": {
|
||||
"modelIgnored": "已忽略此模型的更新",
|
||||
"modelResumed": "已恢復更新追蹤",
|
||||
"versionIgnored": "已忽略此版本的更新",
|
||||
"versionUnignored": "已重新啟用此版本",
|
||||
"versionDeleted": "已刪除此版本"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -951,7 +1088,9 @@
|
||||
"loraFailedToSend": "傳送 LoRA 到工作流失敗",
|
||||
"recipeAdded": "配方已附加到工作流",
|
||||
"recipeReplaced": "配方已取代於工作流",
|
||||
"recipeFailedToSend": "傳送配方到工作流失敗"
|
||||
"recipeFailedToSend": "傳送配方到工作流失敗",
|
||||
"noMatchingNodes": "目前工作流程中沒有相容的節點",
|
||||
"noTargetNodeSelected": "未選擇目標節點"
|
||||
},
|
||||
"nodeSelector": {
|
||||
"recipe": "配方",
|
||||
@@ -996,6 +1135,11 @@
|
||||
},
|
||||
"update": {
|
||||
"title": "檢查更新",
|
||||
"notificationsTitle": "通知中心",
|
||||
"tabs": {
|
||||
"updates": "更新",
|
||||
"messages": "訊息"
|
||||
},
|
||||
"updateAvailable": "有新版本可用",
|
||||
"noChangelogAvailable": "無詳細更新日誌。請至 GitHub 查看更多資訊。",
|
||||
"currentVersion": "目前版本",
|
||||
@@ -1027,6 +1171,13 @@
|
||||
"nightly": {
|
||||
"warning": "警告:Nightly 版本可能包含實驗性功能且可能不穩定。",
|
||||
"enable": "啟用 Nightly 更新"
|
||||
},
|
||||
"banners": {
|
||||
"recent": "最新通知",
|
||||
"empty": "目前沒有最近的橫幅通知。",
|
||||
"shown": "{time} 顯示",
|
||||
"dismissed": "{time} 關閉",
|
||||
"active": "仍在顯示"
|
||||
}
|
||||
},
|
||||
"support": {
|
||||
@@ -1106,6 +1257,9 @@
|
||||
"cannotSend": "無法傳送配方:缺少配方 ID",
|
||||
"sendFailed": "傳送配方到工作流失敗",
|
||||
"sendError": "傳送配方到工作流錯誤",
|
||||
"missingCheckpointPath": "缺少檢查點路徑",
|
||||
"missingCheckpointInfo": "缺少檢查點資訊",
|
||||
"downloadCheckpointFailed": "下載檢查點失敗:{message}",
|
||||
"cannotDelete": "無法刪除配方:缺少配方 ID",
|
||||
"deleteConfirmationError": "顯示刪除確認時發生錯誤",
|
||||
"deletedSuccessfully": "配方已成功刪除",
|
||||
@@ -1146,6 +1300,12 @@
|
||||
"bulkContentRatingSet": "已將 {count} 個模型的內容分級設定為 {level}",
|
||||
"bulkContentRatingPartial": "已將 {success} 個模型的內容分級設定為 {level},{failed} 個失敗",
|
||||
"bulkContentRatingFailed": "無法更新所選模型的內容分級",
|
||||
"bulkUpdatesChecking": "正在檢查所選 {type} 的更新...",
|
||||
"bulkUpdatesSuccess": "{count} 個所選 {type} 有可用更新",
|
||||
"bulkUpdatesNone": "所選 {type} 未找到更新",
|
||||
"bulkUpdatesMissing": "所選 {type} 未連結 Civitai 更新",
|
||||
"bulkUpdatesPartialMissing": "已略過 {missing} 個未連結 Civitai 的所選 {type}",
|
||||
"bulkUpdatesFailed": "檢查所選 {type} 更新失敗:{message}",
|
||||
"invalidCharactersRemoved": "已移除檔名中的無效字元",
|
||||
"filenameCannotBeEmpty": "檔案名稱不可為空",
|
||||
"renameFailed": "重新命名檔案失敗:{message}",
|
||||
@@ -1206,7 +1366,7 @@
|
||||
},
|
||||
"triggerWords": {
|
||||
"loadFailed": "無法載入訓練詞",
|
||||
"tooLong": "觸發詞不可超過 30 個字",
|
||||
"tooLong": "觸發詞不可超過 100 個字",
|
||||
"tooMany": "最多允許 30 個觸發詞",
|
||||
"alreadyExists": "此觸發詞已存在",
|
||||
"updateSuccess": "觸發詞已更新",
|
||||
|
||||
3
package-lock.json
generated
3
package-lock.json
generated
@@ -114,6 +114,7 @@
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
},
|
||||
@@ -137,6 +138,7 @@
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
}
|
||||
@@ -1611,6 +1613,7 @@
|
||||
"integrity": "sha512-MyL55p3Ut3cXbeBEG7Hcv0mVM8pp8PBNWxRqchZnSfAiES1v1mRnMeFfaHWIPULpwsYfvO+ZmMZz5tGCnjzDUQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"cssstyle": "^4.0.1",
|
||||
"data-urls": "^5.0.0",
|
||||
|
||||
78
py/config.py
78
py/config.py
@@ -2,12 +2,12 @@ import os
|
||||
import platform
|
||||
from pathlib import Path
|
||||
import folder_paths # type: ignore
|
||||
from typing import Dict, Iterable, List, Mapping, Set
|
||||
from typing import Any, Dict, Iterable, List, Mapping, Optional, Set
|
||||
import logging
|
||||
import json
|
||||
import urllib.parse
|
||||
|
||||
from .utils.settings_paths import ensure_settings_file
|
||||
from .utils.settings_paths import ensure_settings_file, load_settings_template
|
||||
|
||||
# Use an environment variable to control standalone mode
|
||||
standalone_mode = os.environ.get("LORA_MANAGER_STANDALONE", "0") == "1" or os.environ.get("HF_HUB_DISABLE_TELEMETRY", "0") == "0"
|
||||
@@ -45,6 +45,30 @@ def _normalize_folder_paths_for_comparison(
|
||||
return normalized
|
||||
|
||||
|
||||
def _normalize_library_folder_paths(
|
||||
library_payload: Mapping[str, Any]
|
||||
) -> Dict[str, Set[str]]:
|
||||
"""Return normalized folder paths extracted from a library payload."""
|
||||
|
||||
folder_paths = library_payload.get("folder_paths")
|
||||
if isinstance(folder_paths, Mapping):
|
||||
return _normalize_folder_paths_for_comparison(folder_paths)
|
||||
return {}
|
||||
|
||||
|
||||
def _get_template_folder_paths() -> Dict[str, Set[str]]:
|
||||
"""Return normalized folder paths defined in the bundled template."""
|
||||
|
||||
template_payload = load_settings_template()
|
||||
if not template_payload:
|
||||
return {}
|
||||
|
||||
folder_paths = template_payload.get("folder_paths")
|
||||
if isinstance(folder_paths, Mapping):
|
||||
return _normalize_folder_paths_for_comparison(folder_paths)
|
||||
return {}
|
||||
|
||||
|
||||
class Config:
|
||||
"""Global configuration for LoRA Manager"""
|
||||
|
||||
@@ -81,6 +105,43 @@ class Config:
|
||||
comfy_library = libraries.get("comfyui", {})
|
||||
default_library = libraries.get("default", {})
|
||||
|
||||
template_folder_paths = _get_template_folder_paths()
|
||||
default_library_paths: Dict[str, Set[str]] = {}
|
||||
if isinstance(default_library, Mapping):
|
||||
default_library_paths = _normalize_library_folder_paths(default_library)
|
||||
|
||||
libraries_changed = False
|
||||
if (
|
||||
isinstance(default_library, Mapping)
|
||||
and template_folder_paths
|
||||
and default_library_paths == template_folder_paths
|
||||
):
|
||||
if "comfyui" in libraries:
|
||||
try:
|
||||
settings_service.delete_library("default")
|
||||
libraries_changed = True
|
||||
logger.info("Removed template 'default' library entry")
|
||||
except Exception as delete_error:
|
||||
logger.debug(
|
||||
"Failed to delete template 'default' library: %s",
|
||||
delete_error,
|
||||
)
|
||||
else:
|
||||
try:
|
||||
settings_service.rename_library("default", "comfyui")
|
||||
libraries_changed = True
|
||||
logger.info("Renamed template 'default' library to 'comfyui'")
|
||||
except Exception as rename_error:
|
||||
logger.debug(
|
||||
"Failed to rename template 'default' library: %s",
|
||||
rename_error,
|
||||
)
|
||||
|
||||
if libraries_changed:
|
||||
libraries = settings_service.get_libraries()
|
||||
comfy_library = libraries.get("comfyui", {})
|
||||
default_library = libraries.get("default", {})
|
||||
|
||||
target_folder_paths = {
|
||||
'loras': list(self.loras_roots),
|
||||
'checkpoints': list(self.checkpoints_roots or []),
|
||||
@@ -90,9 +151,16 @@ class Config:
|
||||
|
||||
normalized_target_paths = _normalize_folder_paths_for_comparison(target_folder_paths)
|
||||
|
||||
if (not comfy_library and default_library and normalized_target_paths and
|
||||
_normalize_folder_paths_for_comparison(default_library.get("folder_paths", {})) ==
|
||||
normalized_target_paths):
|
||||
normalized_default_paths: Optional[Dict[str, Set[str]]] = None
|
||||
if isinstance(default_library, Mapping):
|
||||
normalized_default_paths = _normalize_library_folder_paths(default_library)
|
||||
|
||||
if (
|
||||
not comfy_library
|
||||
and default_library
|
||||
and normalized_target_paths
|
||||
and normalized_default_paths == normalized_target_paths
|
||||
):
|
||||
try:
|
||||
settings_service.rename_library("default", "comfyui")
|
||||
logger.info("Renamed legacy 'default' library to 'comfyui'")
|
||||
|
||||
@@ -23,6 +23,18 @@ logger = logging.getLogger(__name__)
|
||||
# Check if we're in standalone mode
|
||||
STANDALONE_MODE = 'nodes' not in sys.modules
|
||||
|
||||
HEADER_SIZE_LIMIT = 16384
|
||||
|
||||
|
||||
def _sanitize_size_limit(value):
|
||||
"""Return a non-negative integer size for ``handler_args`` comparisons."""
|
||||
|
||||
try:
|
||||
coerced = int(value)
|
||||
except (TypeError, ValueError):
|
||||
return 0
|
||||
return coerced if coerced >= 0 else 0
|
||||
|
||||
|
||||
class _SettingsProxy:
|
||||
def __init__(self):
|
||||
@@ -50,6 +62,24 @@ class LoraManager:
|
||||
"""Initialize and register all routes using the new refactored architecture"""
|
||||
app = PromptServer.instance.app
|
||||
|
||||
# Increase allowed header sizes so browsers with large localhost cookie
|
||||
# jars (multiple UIs on 127.0.0.1) don't trip aiohttp's 8KB default
|
||||
# limits. Cookies for unrelated apps are still sent to the plugin and
|
||||
# may otherwise raise LineTooLong errors when the request parser reads
|
||||
# them. Preserve any previously configured handler arguments while
|
||||
# ensuring our minimum sizes are applied.
|
||||
handler_args = getattr(app, "_handler_args", {}) or {}
|
||||
updated_handler_args = dict(handler_args)
|
||||
updated_handler_args["max_field_size"] = max(
|
||||
_sanitize_size_limit(handler_args.get("max_field_size", 0)),
|
||||
HEADER_SIZE_LIMIT,
|
||||
)
|
||||
updated_handler_args["max_line_size"] = max(
|
||||
_sanitize_size_limit(handler_args.get("max_line_size", 0)),
|
||||
HEADER_SIZE_LIMIT,
|
||||
)
|
||||
app._handler_args = updated_handler_args
|
||||
|
||||
# Configure aiohttp access logger to be less verbose
|
||||
logging.getLogger('aiohttp.access').setLevel(logging.WARNING)
|
||||
|
||||
|
||||
@@ -196,9 +196,11 @@ class MetadataRegistry:
|
||||
node_metadata[category] = {}
|
||||
node_metadata[category][node_id] = current_metadata[category][node_id]
|
||||
|
||||
# Save to cache if we have any metadata for this node
|
||||
# Save new metadata or clear stale cache entries when metadata is empty
|
||||
if any(node_metadata.values()):
|
||||
self.node_cache[cache_key] = node_metadata
|
||||
else:
|
||||
self.node_cache.pop(cache_key, None)
|
||||
|
||||
def clear_unused_cache(self):
|
||||
"""Clean up node_cache entries that are no longer in use"""
|
||||
|
||||
@@ -3,6 +3,18 @@ import os
|
||||
from .constants import MODELS, PROMPTS, SAMPLING, LORAS, SIZE, IMAGES, IS_SAMPLER
|
||||
|
||||
|
||||
def _store_checkpoint_metadata(metadata, node_id, model_name):
|
||||
"""Store checkpoint model information when available."""
|
||||
if not model_name:
|
||||
return
|
||||
metadata.setdefault(MODELS, {})
|
||||
metadata[MODELS][node_id] = {
|
||||
"name": model_name,
|
||||
"type": "checkpoint",
|
||||
"node_id": node_id
|
||||
}
|
||||
|
||||
|
||||
class NodeMetadataExtractor:
|
||||
"""Base class for node-specific metadata extraction"""
|
||||
|
||||
@@ -29,12 +41,48 @@ class CheckpointLoaderExtractor(NodeMetadataExtractor):
|
||||
return
|
||||
|
||||
model_name = inputs.get("ckpt_name")
|
||||
if model_name:
|
||||
metadata[MODELS][node_id] = {
|
||||
"name": model_name,
|
||||
"type": "checkpoint",
|
||||
"node_id": node_id
|
||||
}
|
||||
_store_checkpoint_metadata(metadata, node_id, model_name)
|
||||
|
||||
|
||||
class NunchakuFluxDiTLoaderExtractor(NodeMetadataExtractor):
|
||||
@staticmethod
|
||||
def extract(node_id, inputs, outputs, metadata):
|
||||
if not inputs or "model_path" not in inputs:
|
||||
return
|
||||
|
||||
model_name = inputs.get("model_path")
|
||||
_store_checkpoint_metadata(metadata, node_id, model_name)
|
||||
|
||||
|
||||
class NunchakuQwenImageDiTLoaderExtractor(NodeMetadataExtractor):
|
||||
@staticmethod
|
||||
def extract(node_id, inputs, outputs, metadata):
|
||||
if not inputs or "model_name" not in inputs:
|
||||
return
|
||||
|
||||
model_name = inputs.get("model_name")
|
||||
_store_checkpoint_metadata(metadata, node_id, model_name)
|
||||
|
||||
class GGUFLoaderExtractor(NodeMetadataExtractor):
|
||||
@staticmethod
|
||||
def extract(node_id, inputs, outputs, metadata):
|
||||
if not inputs or "gguf_name" not in inputs:
|
||||
return
|
||||
|
||||
model_name = inputs.get("gguf_name")
|
||||
_store_checkpoint_metadata(metadata, node_id, model_name)
|
||||
|
||||
|
||||
class KJNodesModelLoaderExtractor(NodeMetadataExtractor):
|
||||
"""Extract metadata from KJNodes loaders that expose `model_name`."""
|
||||
|
||||
@staticmethod
|
||||
def extract(node_id, inputs, outputs, metadata):
|
||||
if not inputs or "model_name" not in inputs:
|
||||
return
|
||||
|
||||
model_name = inputs.get("model_name")
|
||||
_store_checkpoint_metadata(metadata, node_id, model_name)
|
||||
|
||||
class TSCCheckpointLoaderExtractor(NodeMetadataExtractor):
|
||||
@staticmethod
|
||||
@@ -43,12 +91,7 @@ class TSCCheckpointLoaderExtractor(NodeMetadataExtractor):
|
||||
return
|
||||
|
||||
model_name = inputs.get("ckpt_name")
|
||||
if model_name:
|
||||
metadata[MODELS][node_id] = {
|
||||
"name": model_name,
|
||||
"type": "checkpoint",
|
||||
"node_id": node_id
|
||||
}
|
||||
_store_checkpoint_metadata(metadata, node_id, model_name)
|
||||
|
||||
# For loader node has lora_stack input, like Efficient Loader from Efficient Nodes
|
||||
active_loras = []
|
||||
@@ -651,6 +694,7 @@ NODE_EXTRACTORS = {
|
||||
"KSamplerAdvancedBasicPipe": KSamplerAdvancedBasicPipeExtractor, # comfyui-impact-pack
|
||||
"KSampler_inspire_pipe": KSamplerBasicPipeExtractor, # comfyui-inspire-pack
|
||||
"KSamplerAdvanced_inspire_pipe": KSamplerAdvancedBasicPipeExtractor, # comfyui-inspire-pack
|
||||
"KSampler_inspire": SamplerExtractor, # comfyui-inspire-pack
|
||||
# Sampling Selectors
|
||||
"KSamplerSelect": KSamplerSelectExtractor, # Add KSamplerSelect
|
||||
"BasicScheduler": BasicSchedulerExtractor, # Add BasicScheduler
|
||||
@@ -660,6 +704,13 @@ NODE_EXTRACTORS = {
|
||||
"comfyLoader": CheckpointLoaderExtractor, # easy comfyLoader
|
||||
"CheckpointLoaderSimpleWithImages": CheckpointLoaderExtractor, # CheckpointLoader|pysssss
|
||||
"TSC_EfficientLoader": TSCCheckpointLoaderExtractor, # Efficient Nodes
|
||||
"NunchakuFluxDiTLoader": NunchakuFluxDiTLoaderExtractor, # ComfyUI-Nunchaku
|
||||
"NunchakuQwenImageDiTLoader": NunchakuQwenImageDiTLoaderExtractor, # ComfyUI-Nunchaku
|
||||
"LoaderGGUF": GGUFLoaderExtractor, # calcuis gguf
|
||||
"LoaderGGUFAdvanced": GGUFLoaderExtractor, # calcuis gguf
|
||||
"GGUFLoaderKJ": KJNodesModelLoaderExtractor, # KJNodes
|
||||
"DiffusionModelLoaderKJ": KJNodesModelLoaderExtractor, # KJNodes
|
||||
"CheckpointLoaderKJ": CheckpointLoaderExtractor, # KJNodes
|
||||
"UNETLoader": UNETLoaderExtractor, # Updated to use dedicated extractor
|
||||
"UnetLoaderGGUF": UNETLoaderExtractor, # Updated to use dedicated extractor
|
||||
"LoraLoader": LoraLoaderExtractor,
|
||||
|
||||
@@ -141,7 +141,6 @@ class LoraManagerTextLoader:
|
||||
"required": {
|
||||
"model": ("MODEL",),
|
||||
"lora_syntax": ("STRING", {
|
||||
"defaultInput": True,
|
||||
"forceInput": True,
|
||||
"tooltip": "Format: <lora:lora_name:strength> separated by spaces or punctuation"
|
||||
}),
|
||||
|
||||
@@ -273,9 +273,15 @@ class SaveImage:
|
||||
length = int(parts[1])
|
||||
prompt = prompt[:length]
|
||||
filename = filename.replace(segment, prompt.strip())
|
||||
elif key == "model" and 'checkpoint' in metadata_dict:
|
||||
model = metadata_dict.get('checkpoint', '')
|
||||
model = os.path.splitext(os.path.basename(model))[0]
|
||||
elif key == "model":
|
||||
model_value = metadata_dict.get('checkpoint')
|
||||
if isinstance(model_value, (bytes, os.PathLike)):
|
||||
model_value = str(model_value)
|
||||
|
||||
if not isinstance(model_value, str) or not model_value:
|
||||
model = "model_unavailable"
|
||||
else:
|
||||
model = os.path.splitext(os.path.basename(model_value))[0]
|
||||
if len(parts) >= 2:
|
||||
length = int(parts[1])
|
||||
model = model[:length]
|
||||
@@ -442,4 +448,4 @@ class SaveImage:
|
||||
add_counter_to_filename
|
||||
)
|
||||
|
||||
return (images,)
|
||||
return (images,)
|
||||
|
||||
@@ -23,6 +23,10 @@ class TriggerWordToggle:
|
||||
"default": True,
|
||||
"tooltip": "Sets the default initial state (active or inactive) when trigger words are added."
|
||||
}),
|
||||
"allow_strength_adjustment": ("BOOLEAN", {
|
||||
"default": False,
|
||||
"tooltip": "Enable mouse wheel adjustment of each trigger word's strength."
|
||||
}),
|
||||
},
|
||||
"optional": FlexibleOptionalInputType(any_type),
|
||||
"hidden": {
|
||||
@@ -47,7 +51,14 @@ class TriggerWordToggle:
|
||||
else:
|
||||
return data
|
||||
|
||||
def process_trigger_words(self, id, group_mode, default_active, **kwargs):
|
||||
def process_trigger_words(
|
||||
self,
|
||||
id,
|
||||
group_mode,
|
||||
default_active,
|
||||
allow_strength_adjustment=False,
|
||||
**kwargs,
|
||||
):
|
||||
# Handle both old and new formats for trigger_words
|
||||
trigger_words_data = self._get_toggle_data(kwargs, 'orinalMessage')
|
||||
trigger_words = trigger_words_data if isinstance(trigger_words_data, str) else ""
|
||||
@@ -63,27 +74,89 @@ class TriggerWordToggle:
|
||||
trigger_data = json.loads(trigger_data)
|
||||
|
||||
# Create dictionaries to track active state of words or groups
|
||||
active_state = {item['text']: item.get('active', False) for item in trigger_data}
|
||||
# Also track strength values for each trigger word
|
||||
active_state = {}
|
||||
strength_map = {}
|
||||
|
||||
if group_mode:
|
||||
# Split by two or more consecutive commas to get groups
|
||||
groups = re.split(r',{2,}', trigger_words)
|
||||
# Remove leading/trailing whitespace from each group
|
||||
groups = [group.strip() for group in groups]
|
||||
|
||||
# Filter groups: keep those not in toggle_trigger_words or those that are active
|
||||
filtered_groups = [group for group in groups if group not in active_state or active_state[group]]
|
||||
|
||||
if filtered_groups:
|
||||
filtered_triggers = ', '.join(filtered_groups)
|
||||
for item in trigger_data:
|
||||
text = item['text']
|
||||
active = item.get('active', False)
|
||||
# Extract strength if it's in the format "(word:strength)"
|
||||
strength_match = re.match(r'\((.+):([\d.]+)\)', text)
|
||||
if strength_match:
|
||||
original_word = strength_match.group(1).strip()
|
||||
strength = float(strength_match.group(2))
|
||||
active_state[original_word] = active
|
||||
if allow_strength_adjustment:
|
||||
strength_map[original_word] = strength
|
||||
else:
|
||||
filtered_triggers = ""
|
||||
active_state[text.strip()] = active
|
||||
|
||||
if group_mode:
|
||||
if isinstance(trigger_data, list):
|
||||
filtered_groups = []
|
||||
for item in trigger_data:
|
||||
text = (item.get('text') or "").strip()
|
||||
if not text:
|
||||
continue
|
||||
if item.get('active', False):
|
||||
filtered_groups.append(text)
|
||||
|
||||
if filtered_groups:
|
||||
filtered_triggers = ', '.join(filtered_groups)
|
||||
else:
|
||||
filtered_triggers = ""
|
||||
else:
|
||||
# Split by two or more consecutive commas to get groups
|
||||
groups = re.split(r',{2,}', trigger_words)
|
||||
# Remove leading/trailing whitespace from each group
|
||||
groups = [group.strip() for group in groups]
|
||||
|
||||
# Process groups: keep those not in toggle_trigger_words or those that are active
|
||||
filtered_groups = []
|
||||
for group in groups:
|
||||
# Check if this group contains any words that are in the active_state
|
||||
group_words = [word.strip() for word in group.split(',')]
|
||||
active_group_words = []
|
||||
|
||||
for word in group_words:
|
||||
word_comparison = re.sub(r'\((.+):([\d.]+)\)', r'\1', word).strip()
|
||||
|
||||
if word_comparison not in active_state or active_state[word_comparison]:
|
||||
active_group_words.append(
|
||||
self._format_word_output(
|
||||
word_comparison,
|
||||
strength_map,
|
||||
allow_strength_adjustment,
|
||||
)
|
||||
)
|
||||
|
||||
if active_group_words:
|
||||
filtered_groups.append(', '.join(active_group_words))
|
||||
|
||||
if filtered_groups:
|
||||
filtered_triggers = ', '.join(filtered_groups)
|
||||
else:
|
||||
filtered_triggers = ""
|
||||
else:
|
||||
# Original behavior for individual words mode
|
||||
# Normal mode: split by commas and treat each word as a separate tag
|
||||
original_words = [word.strip() for word in trigger_words.split(',')]
|
||||
# Filter out empty strings
|
||||
original_words = [word for word in original_words if word]
|
||||
filtered_words = [word for word in original_words if word not in active_state or active_state[word]]
|
||||
|
||||
filtered_words = []
|
||||
for word in original_words:
|
||||
# Remove any existing strength formatting for comparison
|
||||
word_comparison = re.sub(r'\((.+):([\d.]+)\)', r'\1', word).strip()
|
||||
|
||||
if word_comparison not in active_state or active_state[word_comparison]:
|
||||
filtered_words.append(
|
||||
self._format_word_output(
|
||||
word_comparison,
|
||||
strength_map,
|
||||
allow_strength_adjustment,
|
||||
)
|
||||
)
|
||||
|
||||
if filtered_words:
|
||||
filtered_triggers = ', '.join(filtered_words)
|
||||
@@ -93,4 +166,9 @@ class TriggerWordToggle:
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing trigger words: {e}")
|
||||
|
||||
return (filtered_triggers,)
|
||||
return (filtered_triggers,)
|
||||
|
||||
def _format_word_output(self, base_word, strength_map, allow_strength_adjustment):
|
||||
if allow_strength_adjustment and base_word in strength_map:
|
||||
return f"({base_word}:{strength_map[base_word]:.2f})"
|
||||
return base_word
|
||||
|
||||
@@ -110,10 +110,14 @@ def nunchaku_load_lora(model, lora_name, lora_strength):
|
||||
model_wrapper.model = transformer
|
||||
ret_model_wrapper.model = transformer
|
||||
|
||||
# Get full path to the LoRA file
|
||||
lora_path = folder_paths.get_full_path("loras", lora_name)
|
||||
# Get full path to the LoRA file. Allow both direct paths and registered LoRA names.
|
||||
lora_path = lora_name if os.path.isfile(lora_name) else folder_paths.get_full_path("loras", lora_name)
|
||||
if not lora_path or not os.path.isfile(lora_path):
|
||||
logger.warning("Skipping LoRA '%s' because it could not be found", lora_name)
|
||||
return model
|
||||
|
||||
ret_model_wrapper.loras.append((lora_path, lora_strength))
|
||||
|
||||
|
||||
# Convert the LoRA to diffusers format
|
||||
sd = to_diffusers(lora_path)
|
||||
|
||||
|
||||
@@ -21,7 +21,6 @@ class WanVideoLoraSelectFromText:
|
||||
"merge_lora": ("BOOLEAN", {"default": True, "tooltip": "Merge LoRAs into the model, otherwise they are loaded on the fly. Always disabled for GGUF and scaled fp8 models. This affects ALL LoRAs, not just the current one"}),
|
||||
"lora_syntax": ("STRING", {
|
||||
"multiline": True,
|
||||
"defaultInput": True,
|
||||
"forceInput": True,
|
||||
"tooltip": "Connect a TEXT output for LoRA syntax: <lora:name:strength>"
|
||||
}),
|
||||
|
||||
@@ -8,6 +8,7 @@ from typing import Dict, List, Any, Optional, Tuple
|
||||
from abc import ABC, abstractmethod
|
||||
from ..config import config
|
||||
from ..utils.constants import VALID_LORA_TYPES
|
||||
from ..utils.civitai_utils import rewrite_preview_url
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -78,7 +79,7 @@ class RecipeMetadataParser(ABC):
|
||||
# Update model name if available
|
||||
if 'model' in civitai_info and 'name' in civitai_info['model']:
|
||||
lora_entry['name'] = civitai_info['model']['name']
|
||||
|
||||
|
||||
lora_entry['id'] = civitai_info.get('id')
|
||||
lora_entry['modelId'] = civitai_info.get('modelId')
|
||||
|
||||
@@ -88,7 +89,10 @@ class RecipeMetadataParser(ABC):
|
||||
|
||||
# Get thumbnail URL from first image
|
||||
if 'images' in civitai_info and civitai_info['images']:
|
||||
lora_entry['thumbnailUrl'] = civitai_info['images'][0].get('url', '')
|
||||
image_url = civitai_info['images'][0].get('url')
|
||||
if image_url:
|
||||
rewritten_image_url, _ = rewrite_preview_url(image_url, media_type='image')
|
||||
lora_entry['thumbnailUrl'] = rewritten_image_url or image_url
|
||||
|
||||
# Get base model
|
||||
current_base_model = civitai_info.get('baseModel', '')
|
||||
@@ -151,33 +155,59 @@ class RecipeMetadataParser(ABC):
|
||||
|
||||
Args:
|
||||
checkpoint: The checkpoint entry to populate
|
||||
civitai_info: The response from Civitai API
|
||||
civitai_info: The response from Civitai API or a (data, error_msg) tuple
|
||||
|
||||
Returns:
|
||||
The populated checkpoint dict
|
||||
"""
|
||||
try:
|
||||
if civitai_info and civitai_info.get("error") != "Model not found":
|
||||
# Update model name if available
|
||||
if 'model' in civitai_info and 'name' in civitai_info['model']:
|
||||
checkpoint['name'] = civitai_info['model']['name']
|
||||
|
||||
# Update version if available
|
||||
if 'name' in civitai_info:
|
||||
checkpoint['version'] = civitai_info.get('name', '')
|
||||
|
||||
# Get thumbnail URL from first image
|
||||
if 'images' in civitai_info and civitai_info['images']:
|
||||
checkpoint['thumbnailUrl'] = civitai_info['images'][0].get('url', '')
|
||||
|
||||
# Get base model
|
||||
checkpoint['baseModel'] = civitai_info.get('baseModel', '')
|
||||
|
||||
# Get download URL
|
||||
checkpoint['downloadUrl'] = civitai_info.get('downloadUrl', '')
|
||||
else:
|
||||
# Model not found or deleted
|
||||
civitai_data, error_msg = (
|
||||
(civitai_info, None)
|
||||
if not isinstance(civitai_info, tuple)
|
||||
else civitai_info
|
||||
)
|
||||
|
||||
if not civitai_data or error_msg == "Model not found":
|
||||
checkpoint['isDeleted'] = True
|
||||
return checkpoint
|
||||
|
||||
if 'model' in civitai_data and 'name' in civitai_data['model']:
|
||||
checkpoint['name'] = civitai_data['model']['name']
|
||||
|
||||
if 'name' in civitai_data:
|
||||
checkpoint['version'] = civitai_data.get('name', '')
|
||||
|
||||
if 'images' in civitai_data and civitai_data['images']:
|
||||
image_url = civitai_data['images'][0].get('url')
|
||||
if image_url:
|
||||
rewritten_image_url, _ = rewrite_preview_url(image_url, media_type='image')
|
||||
checkpoint['thumbnailUrl'] = rewritten_image_url or image_url
|
||||
|
||||
checkpoint['baseModel'] = civitai_data.get('baseModel', '')
|
||||
checkpoint['downloadUrl'] = civitai_data.get('downloadUrl', '')
|
||||
|
||||
checkpoint['modelId'] = civitai_data.get('modelId', checkpoint.get('modelId', 0))
|
||||
|
||||
if 'files' in civitai_data:
|
||||
model_file = next(
|
||||
(
|
||||
file
|
||||
for file in civitai_data.get('files', [])
|
||||
if file.get('type') == 'Model'
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
if model_file:
|
||||
checkpoint['size'] = model_file.get('sizeKB', 0) * 1024
|
||||
|
||||
sha256 = model_file.get('hashes', {}).get('SHA256')
|
||||
if sha256:
|
||||
checkpoint['hash'] = sha256.lower()
|
||||
|
||||
file_name = model_file.get('name', '')
|
||||
if file_name:
|
||||
checkpoint['file_name'] = os.path.splitext(file_name)[0]
|
||||
except Exception as e:
|
||||
logger.error(f"Error populating checkpoint from Civitai info: {e}")
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Parser for Automatic1111 metadata format."""
|
||||
|
||||
import re
|
||||
import os
|
||||
import json
|
||||
import logging
|
||||
from typing import Dict, Any
|
||||
@@ -22,6 +23,7 @@ class AutomaticMetadataParser(RecipeMetadataParser):
|
||||
CIVITAI_METADATA_REGEX = r', Civitai metadata:\s*(\{.*?\})'
|
||||
EXTRANETS_REGEX = r'<(lora|hypernet):([^:]+):(-?[0-9.]+)>'
|
||||
MODEL_HASH_PATTERN = r'Model hash: ([a-zA-Z0-9]+)'
|
||||
MODEL_NAME_PATTERN = r'Model: ([^,]+)'
|
||||
VAE_HASH_PATTERN = r'VAE hash: ([a-zA-Z0-9]+)'
|
||||
|
||||
def is_metadata_matching(self, user_comment: str) -> bool:
|
||||
@@ -115,6 +117,12 @@ class AutomaticMetadataParser(RecipeMetadataParser):
|
||||
except json.JSONDecodeError:
|
||||
logger.error("Error parsing hashes JSON")
|
||||
|
||||
# Pick up model hash from parsed hashes if available
|
||||
if "hashes" in metadata and not metadata.get("model_hash"):
|
||||
model_hash_from_hashes = metadata["hashes"].get("model")
|
||||
if model_hash_from_hashes:
|
||||
metadata["model_hash"] = model_hash_from_hashes
|
||||
|
||||
# Extract Lora hashes in alternative format
|
||||
lora_hashes_match = re.search(self.LORA_HASHES_REGEX, params_section)
|
||||
if not hashes_match and lora_hashes_match:
|
||||
@@ -137,6 +145,17 @@ class AutomaticMetadataParser(RecipeMetadataParser):
|
||||
params_section = params_section.replace(lora_hashes_match.group(0), '')
|
||||
except Exception as e:
|
||||
logger.error(f"Error parsing Lora hashes: {e}")
|
||||
|
||||
# Extract checkpoint model hash/name when provided outside Civitai resources
|
||||
model_hash_match = re.search(self.MODEL_HASH_PATTERN, params_section)
|
||||
if model_hash_match:
|
||||
metadata["model_hash"] = model_hash_match.group(1).strip()
|
||||
params_section = params_section.replace(model_hash_match.group(0), '')
|
||||
|
||||
model_name_match = re.search(self.MODEL_NAME_PATTERN, params_section)
|
||||
if model_name_match:
|
||||
metadata["model_name"] = model_name_match.group(1).strip()
|
||||
params_section = params_section.replace(model_name_match.group(0), '')
|
||||
|
||||
# Extract basic parameters
|
||||
param_pattern = r'([A-Za-z\s]+): ([^,]+)'
|
||||
@@ -178,9 +197,10 @@ class AutomaticMetadataParser(RecipeMetadataParser):
|
||||
|
||||
metadata["gen_params"] = gen_params
|
||||
|
||||
# Extract LoRA information
|
||||
# Extract LoRA and checkpoint information
|
||||
loras = []
|
||||
base_model_counts = {}
|
||||
checkpoint = None
|
||||
|
||||
# First use Civitai resources if available (more reliable source)
|
||||
if metadata.get("civitai_resources"):
|
||||
@@ -202,6 +222,50 @@ class AutomaticMetadataParser(RecipeMetadataParser):
|
||||
resource["modelVersionId"] = air_modelVersionId
|
||||
# --- End added ---
|
||||
|
||||
if resource.get("type") == "checkpoint" and resource.get("modelVersionId"):
|
||||
version_id = resource.get("modelVersionId")
|
||||
version_id_str = str(version_id)
|
||||
checkpoint_entry = {
|
||||
'id': version_id,
|
||||
'modelId': resource.get("modelId", 0),
|
||||
'name': resource.get("modelName", "Unknown Checkpoint"),
|
||||
'version': resource.get("modelVersionName", resource.get("versionName", "")),
|
||||
'type': resource.get("type", "checkpoint"),
|
||||
'existsLocally': False,
|
||||
'localPath': None,
|
||||
'file_name': resource.get("modelName", ""),
|
||||
'hash': resource.get("hash", "") or "",
|
||||
'thumbnailUrl': '/loras_static/images/no-preview.png',
|
||||
'baseModel': '',
|
||||
'size': 0,
|
||||
'downloadUrl': '',
|
||||
'isDeleted': False
|
||||
}
|
||||
|
||||
if metadata_provider:
|
||||
try:
|
||||
civitai_info = await metadata_provider.get_model_version_info(version_id_str)
|
||||
checkpoint_entry = await self.populate_checkpoint_from_civitai(
|
||||
checkpoint_entry,
|
||||
civitai_info
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error fetching Civitai info for checkpoint version %s: %s",
|
||||
version_id,
|
||||
e,
|
||||
)
|
||||
|
||||
# Prefer the first checkpoint found
|
||||
if checkpoint_entry.get("baseModel"):
|
||||
base_model_value = checkpoint_entry["baseModel"]
|
||||
base_model_counts[base_model_value] = base_model_counts.get(base_model_value, 0) + 1
|
||||
|
||||
if checkpoint is None:
|
||||
checkpoint = checkpoint_entry
|
||||
|
||||
continue
|
||||
|
||||
if resource.get("type") in ["lora", "lycoris", "hypernet"] and resource.get("modelVersionId"):
|
||||
# Initialize lora entry
|
||||
lora_entry = {
|
||||
@@ -237,6 +301,52 @@ class AutomaticMetadataParser(RecipeMetadataParser):
|
||||
|
||||
loras.append(lora_entry)
|
||||
|
||||
# Fallback checkpoint parsing from generic "Model" and "Model hash" fields
|
||||
if checkpoint is None:
|
||||
model_hash = metadata.get("model_hash")
|
||||
if not model_hash and metadata.get("hashes"):
|
||||
model_hash = metadata["hashes"].get("model")
|
||||
|
||||
model_name = metadata.get("model_name")
|
||||
file_name = ""
|
||||
if model_name:
|
||||
cleaned_name = re.split(r"[\\\\/]", model_name)[-1]
|
||||
file_name = os.path.splitext(cleaned_name)[0]
|
||||
|
||||
if model_hash or model_name:
|
||||
checkpoint_entry = {
|
||||
'id': 0,
|
||||
'modelId': 0,
|
||||
'name': model_name or "Unknown Checkpoint",
|
||||
'version': '',
|
||||
'type': 'checkpoint',
|
||||
'hash': model_hash or "",
|
||||
'existsLocally': False,
|
||||
'localPath': None,
|
||||
'file_name': file_name,
|
||||
'thumbnailUrl': '/loras_static/images/no-preview.png',
|
||||
'baseModel': '',
|
||||
'size': 0,
|
||||
'downloadUrl': '',
|
||||
'isDeleted': False
|
||||
}
|
||||
|
||||
if metadata_provider and model_hash:
|
||||
try:
|
||||
civitai_info = await metadata_provider.get_model_by_hash(model_hash)
|
||||
checkpoint_entry = await self.populate_checkpoint_from_civitai(
|
||||
checkpoint_entry,
|
||||
civitai_info
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching Civitai info for checkpoint hash {model_hash}: {e}")
|
||||
|
||||
if checkpoint_entry.get("baseModel"):
|
||||
base_model_value = checkpoint_entry["baseModel"]
|
||||
base_model_counts[base_model_value] = base_model_counts.get(base_model_value, 0) + 1
|
||||
|
||||
checkpoint = checkpoint_entry
|
||||
|
||||
# If no LoRAs from Civitai resources or to supplement, extract from metadata["hashes"]
|
||||
if not loras or len(loras) == 0:
|
||||
# Extract lora weights from extranet tags in prompt (for later use)
|
||||
@@ -300,7 +410,9 @@ class AutomaticMetadataParser(RecipeMetadataParser):
|
||||
|
||||
# Try to get base model from resources or make educated guess
|
||||
base_model = None
|
||||
if base_model_counts:
|
||||
if checkpoint and checkpoint.get("baseModel"):
|
||||
base_model = checkpoint.get("baseModel")
|
||||
elif base_model_counts:
|
||||
# Use the most common base model from the loras
|
||||
base_model = max(base_model_counts.items(), key=lambda x: x[1])[0]
|
||||
|
||||
@@ -317,6 +429,10 @@ class AutomaticMetadataParser(RecipeMetadataParser):
|
||||
'gen_params': filtered_gen_params,
|
||||
'from_automatic_metadata': True
|
||||
}
|
||||
|
||||
if checkpoint:
|
||||
result['checkpoint'] = checkpoint
|
||||
result['model'] = checkpoint
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@@ -23,13 +23,48 @@ class CivitaiApiMetadataParser(RecipeMetadataParser):
|
||||
"""
|
||||
if not metadata or not isinstance(metadata, dict):
|
||||
return False
|
||||
|
||||
# Check for key markers specific to Civitai image metadata
|
||||
return any([
|
||||
"resources" in metadata,
|
||||
"civitaiResources" in metadata,
|
||||
"additionalResources" in metadata
|
||||
])
|
||||
|
||||
def has_markers(payload: Dict[str, Any]) -> bool:
|
||||
# Check for common CivitAI image metadata fields
|
||||
civitai_image_fields = (
|
||||
"resources",
|
||||
"civitaiResources",
|
||||
"additionalResources",
|
||||
"hashes",
|
||||
"prompt",
|
||||
"negativePrompt",
|
||||
"steps",
|
||||
"sampler",
|
||||
"cfgScale",
|
||||
"seed",
|
||||
"width",
|
||||
"height",
|
||||
"Model",
|
||||
"Model hash"
|
||||
)
|
||||
return any(key in payload for key in civitai_image_fields)
|
||||
|
||||
# Check the main metadata object
|
||||
if has_markers(metadata):
|
||||
return True
|
||||
|
||||
# Check for LoRA hash patterns
|
||||
hashes = metadata.get("hashes")
|
||||
if isinstance(hashes, dict) and any(str(key).lower().startswith("lora:") for key in hashes):
|
||||
return True
|
||||
|
||||
# Check nested meta object (common in CivitAI image responses)
|
||||
nested_meta = metadata.get("meta")
|
||||
if isinstance(nested_meta, dict):
|
||||
if has_markers(nested_meta):
|
||||
return True
|
||||
|
||||
# Also check for LoRA hash patterns in nested meta
|
||||
hashes = nested_meta.get("hashes")
|
||||
if isinstance(hashes, dict) and any(str(key).lower().startswith("lora:") for key in hashes):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
async def parse_metadata(self, metadata, recipe_scanner=None, civitai_client=None) -> Dict[str, Any]:
|
||||
"""Parse metadata from Civitai image format
|
||||
@@ -45,11 +80,32 @@ class CivitaiApiMetadataParser(RecipeMetadataParser):
|
||||
try:
|
||||
# Get metadata provider instead of using civitai_client directly
|
||||
metadata_provider = await get_default_metadata_provider()
|
||||
|
||||
# Civitai image responses may wrap the actual metadata inside a "meta" key
|
||||
if (
|
||||
isinstance(metadata, dict)
|
||||
and "meta" in metadata
|
||||
and isinstance(metadata["meta"], dict)
|
||||
):
|
||||
inner_meta = metadata["meta"]
|
||||
if any(
|
||||
key in inner_meta
|
||||
for key in (
|
||||
"resources",
|
||||
"civitaiResources",
|
||||
"additionalResources",
|
||||
"hashes",
|
||||
"prompt",
|
||||
"negativePrompt",
|
||||
)
|
||||
):
|
||||
metadata = inner_meta
|
||||
|
||||
# Initialize result structure
|
||||
result = {
|
||||
'base_model': None,
|
||||
'loras': [],
|
||||
'model': None,
|
||||
'gen_params': {},
|
||||
'from_civitai_image': True
|
||||
}
|
||||
@@ -61,8 +117,9 @@ class CivitaiApiMetadataParser(RecipeMetadataParser):
|
||||
lora_hashes = {}
|
||||
if "hashes" in metadata and isinstance(metadata["hashes"], dict):
|
||||
for key, hash_value in metadata["hashes"].items():
|
||||
if key.startswith("LORA:"):
|
||||
lora_name = key.replace("LORA:", "")
|
||||
key_str = str(key)
|
||||
if key_str.lower().startswith("lora:"):
|
||||
lora_name = key_str.split(":", 1)[1]
|
||||
lora_hashes[lora_name] = hash_value
|
||||
|
||||
# Extract prompt and negative prompt
|
||||
@@ -174,13 +231,48 @@ class CivitaiApiMetadataParser(RecipeMetadataParser):
|
||||
# Process civitaiResources array
|
||||
if "civitaiResources" in metadata and isinstance(metadata["civitaiResources"], list):
|
||||
for resource in metadata["civitaiResources"]:
|
||||
# Get unique identifier for deduplication
|
||||
# Get resource type and identifier
|
||||
resource_type = str(resource.get("type") or "").lower()
|
||||
version_id = str(resource.get("modelVersionId", ""))
|
||||
|
||||
|
||||
if resource_type == "checkpoint":
|
||||
checkpoint_entry = {
|
||||
'id': resource.get("modelVersionId", 0),
|
||||
'modelId': resource.get("modelId", 0),
|
||||
'name': resource.get("modelName", "Unknown Checkpoint"),
|
||||
'version': resource.get("modelVersionName", ""),
|
||||
'type': resource.get("type", "checkpoint"),
|
||||
'existsLocally': False,
|
||||
'localPath': None,
|
||||
'file_name': resource.get("modelName", ""),
|
||||
'hash': resource.get("hash", "") or "",
|
||||
'thumbnailUrl': '/loras_static/images/no-preview.png',
|
||||
'baseModel': '',
|
||||
'size': 0,
|
||||
'downloadUrl': '',
|
||||
'isDeleted': False
|
||||
}
|
||||
|
||||
if version_id and metadata_provider:
|
||||
try:
|
||||
civitai_info = await metadata_provider.get_model_version_info(version_id)
|
||||
|
||||
checkpoint_entry = await self.populate_checkpoint_from_civitai(
|
||||
checkpoint_entry,
|
||||
civitai_info
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching Civitai info for checkpoint version {version_id}: {e}")
|
||||
|
||||
if result["model"] is None:
|
||||
result["model"] = checkpoint_entry
|
||||
|
||||
continue
|
||||
|
||||
# Skip if we've already added this LoRA
|
||||
if version_id and version_id in added_loras:
|
||||
continue
|
||||
|
||||
|
||||
# Initialize lora entry
|
||||
lora_entry = {
|
||||
'id': resource.get("modelVersionId", 0),
|
||||
@@ -196,31 +288,31 @@ class CivitaiApiMetadataParser(RecipeMetadataParser):
|
||||
'downloadUrl': '',
|
||||
'isDeleted': False
|
||||
}
|
||||
|
||||
|
||||
# Try to get info from Civitai if modelVersionId is available
|
||||
if version_id and metadata_provider:
|
||||
try:
|
||||
# Use get_model_version_info instead of get_model_version
|
||||
civitai_info = await metadata_provider.get_model_version_info(version_id)
|
||||
|
||||
|
||||
populated_entry = await self.populate_lora_from_civitai(
|
||||
lora_entry,
|
||||
civitai_info,
|
||||
recipe_scanner,
|
||||
base_model_counts
|
||||
)
|
||||
|
||||
|
||||
if populated_entry is None:
|
||||
continue # Skip invalid LoRA types
|
||||
|
||||
|
||||
lora_entry = populated_entry
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching Civitai info for model version {version_id}: {e}")
|
||||
|
||||
|
||||
# Track this LoRA in our deduplication dict
|
||||
if version_id:
|
||||
added_loras[version_id] = len(result["loras"])
|
||||
|
||||
|
||||
result["loras"].append(lora_entry)
|
||||
|
||||
# Process additionalResources array
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Parser for meta format (Lora_N Model hash) metadata."""
|
||||
|
||||
import os
|
||||
import re
|
||||
import logging
|
||||
from typing import Dict, Any
|
||||
@@ -145,14 +146,53 @@ class MetaFormatParser(RecipeMetadataParser):
|
||||
|
||||
loras.append(lora_entry)
|
||||
|
||||
# Extract model information
|
||||
model = None
|
||||
if 'model' in metadata:
|
||||
model = metadata['model']
|
||||
# Extract checkpoint information from generic Model/Model hash fields
|
||||
checkpoint = None
|
||||
model_hash = metadata.get("model_hash")
|
||||
model_name = metadata.get("model")
|
||||
|
||||
if model_hash or model_name:
|
||||
cleaned_name = None
|
||||
if model_name:
|
||||
cleaned_name = re.split(r"[\\\\/]", model_name)[-1]
|
||||
cleaned_name = os.path.splitext(cleaned_name)[0]
|
||||
|
||||
checkpoint_entry = {
|
||||
'id': 0,
|
||||
'modelId': 0,
|
||||
'name': model_name or "Unknown Checkpoint",
|
||||
'version': '',
|
||||
'type': 'checkpoint',
|
||||
'hash': model_hash or "",
|
||||
'existsLocally': False,
|
||||
'localPath': None,
|
||||
'file_name': cleaned_name or (model_name or ""),
|
||||
'thumbnailUrl': '/loras_static/images/no-preview.png',
|
||||
'baseModel': '',
|
||||
'size': 0,
|
||||
'downloadUrl': '',
|
||||
'isDeleted': False
|
||||
}
|
||||
|
||||
if metadata_provider and model_hash:
|
||||
try:
|
||||
civitai_info = await metadata_provider.get_model_by_hash(model_hash)
|
||||
checkpoint_entry = await self.populate_checkpoint_from_civitai(
|
||||
checkpoint_entry,
|
||||
civitai_info
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching Civitai info for checkpoint hash {model_hash}: {e}")
|
||||
|
||||
if checkpoint_entry.get("baseModel"):
|
||||
base_model_value = checkpoint_entry["baseModel"]
|
||||
base_model_counts[base_model_value] = base_model_counts.get(base_model_value, 0) + 1
|
||||
|
||||
checkpoint = checkpoint_entry
|
||||
|
||||
# Set base_model to the most common one from civitai_info
|
||||
base_model = None
|
||||
if base_model_counts:
|
||||
# Set base_model to the most common one from civitai_info or checkpoint
|
||||
base_model = checkpoint["baseModel"] if checkpoint and checkpoint.get("baseModel") else None
|
||||
if not base_model and base_model_counts:
|
||||
base_model = max(base_model_counts.items(), key=lambda x: x[1])[0]
|
||||
|
||||
# Extract generation parameters for recipe metadata
|
||||
@@ -170,7 +210,8 @@ class MetaFormatParser(RecipeMetadataParser):
|
||||
'loras': loras,
|
||||
'gen_params': gen_params,
|
||||
'raw_metadata': metadata,
|
||||
'from_meta_format': True
|
||||
'from_meta_format': True,
|
||||
**({'checkpoint': checkpoint, 'model': checkpoint} if checkpoint else {})
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import re
|
||||
import json
|
||||
import logging
|
||||
from typing import Dict, Any
|
||||
from typing import Dict, Any, Optional
|
||||
from ...config import config
|
||||
from ..base import RecipeMetadataParser
|
||||
from ..constants import GEN_PARAM_KEYS
|
||||
@@ -16,6 +16,28 @@ class RecipeFormatParser(RecipeMetadataParser):
|
||||
|
||||
# Regular expression pattern for extracting recipe metadata
|
||||
METADATA_MARKER = r'Recipe metadata: (\{.*\})'
|
||||
|
||||
async def _get_lora_from_version_index(self, recipe_scanner, model_version_id: Any) -> Optional[Dict[str, Any]]:
|
||||
"""Return a cached LoRA entry by modelVersionId if available."""
|
||||
|
||||
if not recipe_scanner or not getattr(recipe_scanner, "_lora_scanner", None):
|
||||
return None
|
||||
|
||||
try:
|
||||
normalized_id = int(model_version_id)
|
||||
except (TypeError, ValueError):
|
||||
return None
|
||||
|
||||
try:
|
||||
cache = await recipe_scanner._lora_scanner.get_cached_data()
|
||||
except Exception as exc: # pragma: no cover - defensive logging
|
||||
logger.debug("Unable to load lora cache for version lookup: %s", exc)
|
||||
return None
|
||||
|
||||
if not cache or not getattr(cache, "version_index", None):
|
||||
return None
|
||||
|
||||
return cache.version_index.get(normalized_id)
|
||||
|
||||
def is_metadata_matching(self, user_comment: str) -> bool:
|
||||
"""Check if the user comment matches the metadata format"""
|
||||
@@ -53,49 +75,110 @@ class RecipeFormatParser(RecipeMetadataParser):
|
||||
'type': 'lora',
|
||||
'weight': lora.get('strength', 1.0),
|
||||
'file_name': lora.get('file_name', ''),
|
||||
'hash': lora.get('hash', '')
|
||||
'hash': lora.get('hash', ''),
|
||||
'existsLocally': False,
|
||||
'inLibrary': False,
|
||||
'localPath': None,
|
||||
'thumbnailUrl': '/loras_static/images/no-preview.png',
|
||||
'size': 0
|
||||
}
|
||||
|
||||
# Check if this LoRA exists locally by SHA256 hash
|
||||
if lora.get('hash') and recipe_scanner:
|
||||
if recipe_scanner:
|
||||
lora_scanner = recipe_scanner._lora_scanner
|
||||
exists_locally = lora_scanner.has_hash(lora['hash'])
|
||||
if exists_locally:
|
||||
lora_cache = await lora_scanner.get_cached_data()
|
||||
lora_item = next((item for item in lora_cache.raw_data if item['sha256'].lower() == lora['hash'].lower()), None)
|
||||
if lora_item:
|
||||
|
||||
if lora.get('hash'):
|
||||
exists_locally = lora_scanner.has_hash(lora['hash'])
|
||||
if exists_locally:
|
||||
lora_cache = await lora_scanner.get_cached_data()
|
||||
lora_item = next((item for item in lora_cache.raw_data if item['sha256'].lower() == lora['hash'].lower()), None)
|
||||
if lora_item:
|
||||
lora_entry['existsLocally'] = True
|
||||
lora_entry['inLibrary'] = True
|
||||
lora_entry['localPath'] = lora_item['file_path']
|
||||
lora_entry['file_name'] = lora_item['file_name']
|
||||
lora_entry['size'] = lora_item['size']
|
||||
lora_entry['thumbnailUrl'] = config.get_preview_static_url(lora_item['preview_url'])
|
||||
|
||||
else:
|
||||
lora_entry['existsLocally'] = False
|
||||
lora_entry['inLibrary'] = False
|
||||
lora_entry['localPath'] = None
|
||||
|
||||
# If we still don't have a local match, try matching by modelVersionId
|
||||
if not lora_entry['existsLocally'] and lora.get('modelVersionId') is not None:
|
||||
cached_lora = await self._get_lora_from_version_index(recipe_scanner, lora.get('modelVersionId'))
|
||||
if cached_lora:
|
||||
lora_entry['existsLocally'] = True
|
||||
lora_entry['localPath'] = lora_item['file_path']
|
||||
lora_entry['file_name'] = lora_item['file_name']
|
||||
lora_entry['size'] = lora_item['size']
|
||||
lora_entry['thumbnailUrl'] = config.get_preview_static_url(lora_item['preview_url'])
|
||||
|
||||
else:
|
||||
lora_entry['existsLocally'] = False
|
||||
lora_entry['localPath'] = None
|
||||
|
||||
# Try to get additional info from Civitai if we have a model version ID
|
||||
if lora.get('modelVersionId') and metadata_provider:
|
||||
try:
|
||||
civitai_info_tuple = await metadata_provider.get_model_version_info(lora['modelVersionId'])
|
||||
# Populate lora entry with Civitai info
|
||||
populated_entry = await self.populate_lora_from_civitai(
|
||||
lora_entry,
|
||||
civitai_info_tuple,
|
||||
recipe_scanner,
|
||||
None, # No need to track base model counts
|
||||
lora['hash']
|
||||
)
|
||||
if populated_entry is None:
|
||||
continue # Skip invalid LoRA types
|
||||
lora_entry = populated_entry
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching Civitai info for LoRA: {e}")
|
||||
lora_entry['thumbnailUrl'] = '/loras_static/images/no-preview.png'
|
||||
lora_entry['inLibrary'] = True
|
||||
lora_entry['localPath'] = cached_lora.get('file_path')
|
||||
lora_entry['file_name'] = cached_lora.get('file_name') or lora_entry['file_name']
|
||||
lora_entry['size'] = cached_lora.get('size', lora_entry['size'])
|
||||
if cached_lora.get('sha256'):
|
||||
lora_entry['hash'] = cached_lora['sha256']
|
||||
preview_url = cached_lora.get('preview_url')
|
||||
if preview_url:
|
||||
lora_entry['thumbnailUrl'] = config.get_preview_static_url(preview_url)
|
||||
|
||||
# Try to get additional info from Civitai if we have a model version ID and still missing locally
|
||||
if not lora_entry['existsLocally'] and lora.get('modelVersionId') and metadata_provider:
|
||||
try:
|
||||
civitai_info_tuple = await metadata_provider.get_model_version_info(lora['modelVersionId'])
|
||||
# Populate lora entry with Civitai info
|
||||
populated_entry = await self.populate_lora_from_civitai(
|
||||
lora_entry,
|
||||
civitai_info_tuple,
|
||||
recipe_scanner,
|
||||
None, # No need to track base model counts
|
||||
lora_entry.get('hash', '')
|
||||
)
|
||||
if populated_entry is None:
|
||||
continue # Skip invalid LoRA types
|
||||
lora_entry = populated_entry
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching Civitai info for LoRA: {e}")
|
||||
lora_entry['thumbnailUrl'] = '/loras_static/images/no-preview.png'
|
||||
|
||||
loras.append(lora_entry)
|
||||
|
||||
|
||||
logger.info(f"Found {len(loras)} loras in recipe metadata")
|
||||
|
||||
# Process checkpoint information if present
|
||||
checkpoint = None
|
||||
checkpoint_data = recipe_metadata.get('checkpoint') or {}
|
||||
if isinstance(checkpoint_data, dict) and checkpoint_data:
|
||||
version_id = checkpoint_data.get('modelVersionId') or checkpoint_data.get('id')
|
||||
checkpoint_entry = {
|
||||
'id': version_id or 0,
|
||||
'modelId': checkpoint_data.get('modelId', 0),
|
||||
'name': checkpoint_data.get('name', 'Unknown Checkpoint'),
|
||||
'version': checkpoint_data.get('version', ''),
|
||||
'type': checkpoint_data.get('type', 'checkpoint'),
|
||||
'hash': checkpoint_data.get('hash', ''),
|
||||
'existsLocally': False,
|
||||
'localPath': None,
|
||||
'file_name': checkpoint_data.get('file_name', ''),
|
||||
'thumbnailUrl': '/loras_static/images/no-preview.png',
|
||||
'baseModel': '',
|
||||
'size': 0,
|
||||
'downloadUrl': '',
|
||||
'isDeleted': False
|
||||
}
|
||||
|
||||
if metadata_provider:
|
||||
try:
|
||||
civitai_info = None
|
||||
if version_id:
|
||||
civitai_info = await metadata_provider.get_model_version_info(str(version_id))
|
||||
elif checkpoint_entry.get('hash'):
|
||||
civitai_info = await metadata_provider.get_model_by_hash(checkpoint_entry['hash'])
|
||||
|
||||
if civitai_info:
|
||||
checkpoint_entry = await self.populate_checkpoint_from_civitai(checkpoint_entry, civitai_info)
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching Civitai info for checkpoint in recipe metadata: {e}")
|
||||
|
||||
checkpoint = checkpoint_entry
|
||||
|
||||
# Filter gen_params to only include recognized keys
|
||||
filtered_gen_params = {}
|
||||
@@ -105,12 +188,13 @@ class RecipeFormatParser(RecipeMetadataParser):
|
||||
filtered_gen_params[key] = value
|
||||
|
||||
return {
|
||||
'base_model': recipe_metadata.get('base_model', ''),
|
||||
'base_model': checkpoint['baseModel'] if checkpoint and checkpoint.get('baseModel') else recipe_metadata.get('base_model', ''),
|
||||
'loras': loras,
|
||||
'gen_params': filtered_gen_params,
|
||||
'tags': recipe_metadata.get('tags', []),
|
||||
'title': recipe_metadata.get('title', ''),
|
||||
'from_recipe_metadata': True
|
||||
'from_recipe_metadata': True,
|
||||
**({'checkpoint': checkpoint, 'model': checkpoint} if checkpoint else {})
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
|
||||
@@ -126,6 +126,7 @@ class BaseModelRoutes(ABC):
|
||||
metadata_manager=MetadataManager,
|
||||
metadata_loader=self._metadata_sync_service.load_local_metadata,
|
||||
recipe_scanner_factory=ServiceRegistry.get_recipe_scanner,
|
||||
update_service=self._model_update_service,
|
||||
)
|
||||
self._handler_set = None
|
||||
self._handler_mapping = None
|
||||
@@ -297,4 +298,3 @@ class BaseModelRoutes(ABC):
|
||||
if self._model_update_service is None:
|
||||
raise RuntimeError("Model update service has not been attached")
|
||||
return self._model_update_service
|
||||
|
||||
|
||||
@@ -191,6 +191,8 @@ class BaseRecipeRoutes:
|
||||
logger=logger,
|
||||
persistence_service=persistence_service,
|
||||
analysis_service=analysis_service,
|
||||
downloader_factory=get_downloader,
|
||||
civitai_client_getter=civitai_client_getter,
|
||||
)
|
||||
analysis = RecipeAnalysisHandler(
|
||||
ensure_dependencies_ready=self.ensure_dependencies_ready,
|
||||
@@ -214,4 +216,3 @@ class BaseRecipeRoutes:
|
||||
analysis=analysis,
|
||||
sharing=sharing,
|
||||
)
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import logging
|
||||
from typing import Dict
|
||||
from aiohttp import web
|
||||
|
||||
from .base_model_routes import BaseModelRoutes
|
||||
@@ -51,6 +52,19 @@ class CheckpointRoutes(BaseModelRoutes):
|
||||
def _get_expected_model_types(self) -> str:
|
||||
"""Get expected model types string for error messages"""
|
||||
return "Checkpoint"
|
||||
|
||||
def _parse_specific_params(self, request: web.Request) -> Dict:
|
||||
"""Parse Checkpoint-specific parameters"""
|
||||
params: Dict = {}
|
||||
|
||||
if 'checkpoint_hash' in request.query:
|
||||
params['hash_filters'] = {'single_hash': request.query['checkpoint_hash'].lower()}
|
||||
elif 'checkpoint_hashes' in request.query:
|
||||
params['hash_filters'] = {
|
||||
'multiple_hashes': [h.lower() for h in request.query['checkpoint_hashes'].split(',')]
|
||||
}
|
||||
|
||||
return params
|
||||
|
||||
async def get_checkpoint_info(self, request: web.Request) -> web.Response:
|
||||
"""Get detailed information for a specific checkpoint by name"""
|
||||
|
||||
@@ -27,6 +27,7 @@ from ...services.service_registry import ServiceRegistry
|
||||
from ...services.settings_manager import get_settings_manager
|
||||
from ...services.websocket_manager import ws_manager
|
||||
from ...services.downloader import get_downloader
|
||||
from ...services.errors import ResourceNotFoundError
|
||||
from ...utils.constants import (
|
||||
CIVITAI_USER_MODEL_TYPES,
|
||||
DEFAULT_NODE_COLOR,
|
||||
@@ -100,6 +101,36 @@ class NodeRegistry:
|
||||
node_type = node.get("type", "")
|
||||
type_id = NODE_TYPES.get(node_type, 0)
|
||||
bgcolor = node.get("bgcolor") or DEFAULT_NODE_COLOR
|
||||
raw_capabilities = node.get("capabilities")
|
||||
capabilities: dict = {}
|
||||
if isinstance(raw_capabilities, dict):
|
||||
capabilities = dict(raw_capabilities)
|
||||
|
||||
raw_widget_names: list | None = node.get("widget_names")
|
||||
if not isinstance(raw_widget_names, list):
|
||||
capability_widget_names = capabilities.get("widget_names")
|
||||
raw_widget_names = capability_widget_names if isinstance(capability_widget_names, list) else None
|
||||
|
||||
widget_names: list[str] = []
|
||||
if isinstance(raw_widget_names, list):
|
||||
widget_names = [
|
||||
str(widget_name)
|
||||
for widget_name in raw_widget_names
|
||||
if isinstance(widget_name, str) and widget_name
|
||||
]
|
||||
|
||||
if widget_names:
|
||||
capabilities["widget_names"] = widget_names
|
||||
else:
|
||||
capabilities.pop("widget_names", None)
|
||||
|
||||
if "supports_lora" in capabilities:
|
||||
capabilities["supports_lora"] = bool(capabilities["supports_lora"])
|
||||
|
||||
comfy_class = node.get("comfy_class")
|
||||
if not isinstance(comfy_class, str) or not comfy_class:
|
||||
comfy_class = node_type if isinstance(node_type, str) else None
|
||||
|
||||
self._nodes[unique_id] = {
|
||||
"id": node_id,
|
||||
"graph_id": graph_id,
|
||||
@@ -109,6 +140,9 @@ class NodeRegistry:
|
||||
"title": node.get("title"),
|
||||
"type": type_id,
|
||||
"type_name": node_type,
|
||||
"comfy_class": comfy_class,
|
||||
"capabilities": capabilities,
|
||||
"widget_names": widget_names,
|
||||
}
|
||||
logger.debug("Registered %s nodes in registry", len(nodes))
|
||||
self._registry_updated.set()
|
||||
@@ -146,6 +180,7 @@ class SettingsHandler:
|
||||
"download_path_templates",
|
||||
"enable_metadata_archive_db",
|
||||
"language",
|
||||
"use_portable_settings",
|
||||
"proxy_enabled",
|
||||
"proxy_type",
|
||||
"proxy_host",
|
||||
@@ -159,11 +194,15 @@ class SettingsHandler:
|
||||
"autoplay_on_hover",
|
||||
"display_density",
|
||||
"card_info_display",
|
||||
"show_folder_sidebar",
|
||||
"include_trigger_words",
|
||||
"show_only_sfw",
|
||||
"compact_mode",
|
||||
"priority_tags",
|
||||
"model_card_footer_action",
|
||||
"model_name_display",
|
||||
"update_flag_strategy",
|
||||
"auto_organize_exclusions",
|
||||
)
|
||||
|
||||
_PROXY_KEYS = {"proxy_enabled", "proxy_host", "proxy_port", "proxy_username", "proxy_password", "proxy_type"}
|
||||
@@ -204,7 +243,16 @@ class SettingsHandler:
|
||||
value = self._settings.get(key)
|
||||
if value is not None:
|
||||
response_data[key] = value
|
||||
return web.json_response({"success": True, "settings": response_data})
|
||||
settings_file = getattr(self._settings, "settings_file", None)
|
||||
if settings_file:
|
||||
response_data["settings_file"] = settings_file
|
||||
messages_getter = getattr(self._settings, "get_startup_messages", None)
|
||||
messages = list(messages_getter()) if callable(messages_getter) else []
|
||||
return web.json_response({
|
||||
"success": True,
|
||||
"settings": response_data,
|
||||
"messages": messages,
|
||||
})
|
||||
except Exception as exc: # pragma: no cover - defensive logging
|
||||
logger.error("Error getting settings: %s", exc, exc_info=True)
|
||||
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||
@@ -575,7 +623,10 @@ class ModelLibraryHandler:
|
||||
if not metadata_provider:
|
||||
return web.json_response({"success": False, "error": "Metadata provider not available"}, status=503)
|
||||
|
||||
response = await metadata_provider.get_model_versions(model_id)
|
||||
try:
|
||||
response = await metadata_provider.get_model_versions(model_id)
|
||||
except ResourceNotFoundError:
|
||||
return web.json_response({"success": False, "error": "Model not found"}, status=404)
|
||||
if not response or not response.get("modelVersions"):
|
||||
return web.json_response({"success": False, "error": "Model not found"}, status=404)
|
||||
|
||||
@@ -918,6 +969,88 @@ class NodeRegistryHandler:
|
||||
logger.error("Failed to get registry: %s", exc, exc_info=True)
|
||||
return web.json_response({"success": False, "error": "Internal Error", "message": str(exc)}, status=500)
|
||||
|
||||
async def update_node_widget(self, request: web.Request) -> web.Response:
|
||||
try:
|
||||
data = await request.json()
|
||||
widget_name = data.get("widget_name")
|
||||
value = data.get("value")
|
||||
node_ids = data.get("node_ids")
|
||||
|
||||
if not isinstance(widget_name, str) or not widget_name:
|
||||
return web.json_response({"success": False, "error": "Missing widget_name parameter"}, status=400)
|
||||
|
||||
if not isinstance(value, str) or not value:
|
||||
return web.json_response({"success": False, "error": "Missing value parameter"}, status=400)
|
||||
|
||||
if not isinstance(node_ids, list) or not node_ids:
|
||||
return web.json_response(
|
||||
{"success": False, "error": "node_ids must be a non-empty list"},
|
||||
status=400,
|
||||
)
|
||||
|
||||
results = []
|
||||
for entry in node_ids:
|
||||
node_identifier = entry
|
||||
graph_identifier = None
|
||||
if isinstance(entry, dict):
|
||||
node_identifier = entry.get("node_id")
|
||||
graph_identifier = entry.get("graph_id")
|
||||
|
||||
if node_identifier is None:
|
||||
results.append(
|
||||
{
|
||||
"node_id": node_identifier,
|
||||
"graph_id": graph_identifier,
|
||||
"success": False,
|
||||
"error": "Missing node_id parameter",
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
try:
|
||||
parsed_node_id = int(node_identifier)
|
||||
except (TypeError, ValueError):
|
||||
parsed_node_id = node_identifier
|
||||
|
||||
payload = {
|
||||
"id": parsed_node_id,
|
||||
"widget_name": widget_name,
|
||||
"value": value,
|
||||
}
|
||||
|
||||
if graph_identifier is not None:
|
||||
payload["graph_id"] = str(graph_identifier)
|
||||
|
||||
try:
|
||||
self._prompt_server.instance.send_sync("lm_widget_update", payload)
|
||||
results.append(
|
||||
{
|
||||
"node_id": parsed_node_id,
|
||||
"graph_id": payload.get("graph_id"),
|
||||
"success": True,
|
||||
}
|
||||
)
|
||||
except Exception as exc: # pragma: no cover - defensive logging
|
||||
logger.error(
|
||||
"Error sending widget update to node %s (graph %s): %s",
|
||||
parsed_node_id,
|
||||
graph_identifier,
|
||||
exc,
|
||||
)
|
||||
results.append(
|
||||
{
|
||||
"node_id": parsed_node_id,
|
||||
"graph_id": payload.get("graph_id"),
|
||||
"success": False,
|
||||
"error": str(exc),
|
||||
}
|
||||
)
|
||||
|
||||
return web.json_response({"success": True, "results": results})
|
||||
except Exception as exc: # pragma: no cover - defensive logging
|
||||
logger.error("Failed to update node widget: %s", exc, exc_info=True)
|
||||
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||
|
||||
|
||||
class MiscHandlerSet:
|
||||
"""Aggregate handlers into a lookup compatible with the registrar."""
|
||||
@@ -961,6 +1094,7 @@ class MiscHandlerSet:
|
||||
"get_trained_words": self.trained_words.get_trained_words,
|
||||
"get_model_example_files": self.model_examples.get_model_example_files,
|
||||
"register_nodes": self.node_registry.register_nodes,
|
||||
"update_node_widget": self.node_registry.update_node_widget,
|
||||
"get_registry": self.node_registry.get_registry,
|
||||
"check_model_exists": self.model_library.check_model_exists,
|
||||
"get_civitai_user_models": self.model_library.get_civitai_user_models,
|
||||
|
||||
@@ -6,7 +6,7 @@ import json
|
||||
import logging
|
||||
import os
|
||||
from dataclasses import dataclass
|
||||
from typing import Awaitable, Callable, Dict, Iterable, List, Mapping, Optional
|
||||
from typing import Any, Awaitable, Callable, Dict, Iterable, List, Mapping, Optional
|
||||
|
||||
from aiohttp import web
|
||||
import jinja2
|
||||
@@ -16,7 +16,7 @@ from ...services.download_coordinator import DownloadCoordinator
|
||||
from ...services.metadata_sync_service import MetadataSyncService
|
||||
from ...services.model_file_service import ModelMoveService
|
||||
from ...services.preview_asset_service import PreviewAssetService
|
||||
from ...services.settings_manager import SettingsManager
|
||||
from ...services.settings_manager import SettingsManager, get_settings_manager
|
||||
from ...services.tag_update_service import TagUpdateService
|
||||
from ...services.use_cases import (
|
||||
AutoOrganizeInProgressError,
|
||||
@@ -29,10 +29,18 @@ from ...services.use_cases import (
|
||||
)
|
||||
from ...services.websocket_manager import WebSocketManager
|
||||
from ...services.websocket_progress_callback import WebSocketProgressCallback
|
||||
from ...services.errors import RateLimitError
|
||||
from ...services.errors import RateLimitError, ResourceNotFoundError
|
||||
from ...utils.civitai_utils import resolve_license_payload
|
||||
from ...utils.file_utils import calculate_sha256
|
||||
from ...utils.metadata_manager import MetadataManager
|
||||
|
||||
LICENSE_FIELDS = (
|
||||
"allowNoCredit",
|
||||
"allowCommercialUse",
|
||||
"allowDerivatives",
|
||||
"allowDifferentLicense",
|
||||
)
|
||||
|
||||
|
||||
class ModelPageView:
|
||||
"""Render the HTML view for model listings."""
|
||||
@@ -144,7 +152,30 @@ class ModelListingHandler:
|
||||
fuzzy_search = request.query.get("fuzzy_search", "false").lower() == "true"
|
||||
|
||||
base_models = request.query.getall("base_model", [])
|
||||
tags = request.query.getall("tag", [])
|
||||
model_types = list(request.query.getall("model_type", []))
|
||||
model_types.extend(request.query.getall("civitai_model_type", []))
|
||||
# Support legacy ?tag=foo plus new ?tag_include/foo & ?tag_exclude parameters
|
||||
legacy_tags = request.query.getall("tag", [])
|
||||
if not legacy_tags:
|
||||
legacy_csv = request.query.get("tags")
|
||||
if legacy_csv:
|
||||
legacy_tags = [tag.strip() for tag in legacy_csv.split(",") if tag.strip()]
|
||||
|
||||
include_tags = request.query.getall("tag_include", [])
|
||||
exclude_tags = request.query.getall("tag_exclude", [])
|
||||
|
||||
tag_filters: Dict[str, str] = {}
|
||||
for tag in legacy_tags:
|
||||
if tag:
|
||||
tag_filters[tag] = "include"
|
||||
|
||||
for tag in include_tags:
|
||||
if tag:
|
||||
tag_filters[tag] = "include"
|
||||
|
||||
for tag in exclude_tags:
|
||||
if tag:
|
||||
tag_filters[tag] = "exclude"
|
||||
favorites_only = request.query.get("favorites_only", "false").lower() == "true"
|
||||
|
||||
search_options = {
|
||||
@@ -166,10 +197,20 @@ class ModelListingHandler:
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
pass
|
||||
|
||||
has_update = request.query.get("has_update", "false")
|
||||
has_update_filter = (
|
||||
has_update.lower() in {"1", "true", "yes"} if isinstance(has_update, str) else False
|
||||
)
|
||||
update_available_only = request.query.get("update_available_only", "false").lower() == "true"
|
||||
|
||||
# New license-based query filters
|
||||
credit_required = request.query.get("credit_required")
|
||||
if credit_required is not None:
|
||||
credit_required = credit_required.lower() not in ("false", "0", "")
|
||||
else:
|
||||
credit_required = None # None means no filter applied
|
||||
|
||||
allow_selling_generated_content = request.query.get("allow_selling_generated_content")
|
||||
if allow_selling_generated_content is not None:
|
||||
allow_selling_generated_content = allow_selling_generated_content.lower() not in ("false", "0", "")
|
||||
else:
|
||||
allow_selling_generated_content = None # None means no filter applied
|
||||
|
||||
return {
|
||||
"page": page,
|
||||
@@ -179,11 +220,14 @@ class ModelListingHandler:
|
||||
"search": search,
|
||||
"fuzzy_search": fuzzy_search,
|
||||
"base_models": base_models,
|
||||
"tags": tags,
|
||||
"tags": tag_filters,
|
||||
"search_options": search_options,
|
||||
"hash_filters": hash_filters,
|
||||
"favorites_only": favorites_only,
|
||||
"has_update": has_update_filter,
|
||||
"update_available_only": update_available_only,
|
||||
"credit_required": credit_required,
|
||||
"allow_selling_generated_content": allow_selling_generated_content,
|
||||
"model_types": model_types,
|
||||
**self._parse_specific_params(request),
|
||||
}
|
||||
|
||||
@@ -516,6 +560,17 @@ class ModelQueryHandler:
|
||||
self._logger.error("Error retrieving base models: %s", exc)
|
||||
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||
|
||||
async def get_model_types(self, request: web.Request) -> web.Response:
|
||||
try:
|
||||
limit = int(request.query.get("limit", "20"))
|
||||
if limit < 1 or limit > 100:
|
||||
limit = 20
|
||||
model_types = await self._service.get_model_types(limit)
|
||||
return web.json_response({"success": True, "model_types": model_types})
|
||||
except Exception as exc:
|
||||
self._logger.error("Error retrieving model types: %s", exc)
|
||||
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||
|
||||
async def scan_models(self, request: web.Request) -> web.Response:
|
||||
try:
|
||||
full_rebuild = request.query.get("full_rebuild", "false").lower() == "true"
|
||||
@@ -626,9 +681,16 @@ class ModelQueryHandler:
|
||||
model_name = request.query.get("name")
|
||||
if not model_name:
|
||||
return web.Response(text=f"{self._service.model_type.capitalize()} file name is required", status=400)
|
||||
include_license_flags = (request.query.get("license_flags", "").strip().lower() in {"1", "true", "yes", "on"})
|
||||
preview_url = await self._service.get_model_preview_url(model_name)
|
||||
if preview_url:
|
||||
return web.json_response({"success": True, "preview_url": preview_url})
|
||||
response_payload: dict[str, object] = {"success": True, "preview_url": preview_url}
|
||||
if include_license_flags:
|
||||
model_data = await self._service.get_model_info_by_name(model_name)
|
||||
license_flags = (model_data or {}).get("license_flags")
|
||||
if license_flags is not None:
|
||||
response_payload["license_flags"] = int(license_flags)
|
||||
return web.json_response(response_payload)
|
||||
return web.json_response({"success": False, "error": f"No preview URL found for the specified {self._service.model_type}"}, status=404)
|
||||
except Exception as exc:
|
||||
self._logger.error("Error getting %s preview URL: %s", self._service.model_type, exc, exc_info=True)
|
||||
@@ -863,7 +925,10 @@ class ModelCivitaiHandler:
|
||||
try:
|
||||
model_id = request.match_info["model_id"]
|
||||
metadata_provider = await self._metadata_provider_factory()
|
||||
response = await metadata_provider.get_model_versions(model_id)
|
||||
try:
|
||||
response = await metadata_provider.get_model_versions(model_id)
|
||||
except ResourceNotFoundError:
|
||||
return web.Response(status=404, text="Model not found")
|
||||
if not response or not response.get("modelVersions"):
|
||||
return web.Response(status=404, text="Model not found")
|
||||
|
||||
@@ -986,16 +1051,23 @@ class ModelAutoOrganizeHandler:
|
||||
async def auto_organize_models(self, request: web.Request) -> web.Response:
|
||||
try:
|
||||
file_paths = None
|
||||
exclusion_patterns = None
|
||||
settings_manager = get_settings_manager()
|
||||
if request.method == "POST":
|
||||
try:
|
||||
data = await request.json()
|
||||
file_paths = data.get("file_paths")
|
||||
if "exclusion_patterns" in data:
|
||||
exclusion_patterns = settings_manager.normalize_auto_organize_exclusions(
|
||||
data.get("exclusion_patterns")
|
||||
)
|
||||
except Exception: # pragma: no cover - permissive path
|
||||
pass
|
||||
|
||||
result = await self._use_case.execute(
|
||||
file_paths=file_paths,
|
||||
progress_callback=self._progress_callback,
|
||||
exclusion_patterns=exclusion_patterns,
|
||||
)
|
||||
return web.json_response(result.to_dict())
|
||||
except AutoOrganizeInProgressError:
|
||||
@@ -1040,11 +1112,97 @@ class ModelUpdateHandler:
|
||||
self._metadata_provider_selector = metadata_provider_selector
|
||||
self._logger = logger
|
||||
|
||||
async def fetch_missing_civitai_license_data(self, request: web.Request) -> web.Response:
|
||||
payload = await self._read_json(request)
|
||||
target_model_ids = self._extract_target_model_ids(payload)
|
||||
|
||||
provider = await self._get_civitai_provider()
|
||||
if provider is None:
|
||||
return web.json_response(
|
||||
{"success": False, "error": "Civitai provider not available"},
|
||||
status=503,
|
||||
)
|
||||
|
||||
try:
|
||||
cache = await self._service.scanner.get_cached_data()
|
||||
except Exception as exc:
|
||||
self._logger.error("Failed to load cache for license refresh: %s", exc, exc_info=True)
|
||||
cache = None
|
||||
|
||||
target_set = set(target_model_ids) if target_model_ids is not None else None
|
||||
candidates = await self._collect_models_missing_license(cache, target_set)
|
||||
if not candidates:
|
||||
return web.json_response({"success": True, "updated": []})
|
||||
|
||||
model_ids = sorted(candidates.keys())
|
||||
try:
|
||||
license_map = await self._fetch_license_info(provider, model_ids)
|
||||
except RateLimitError as exc:
|
||||
return web.json_response(
|
||||
{"success": False, "error": str(exc) or "Rate limited"},
|
||||
status=429,
|
||||
)
|
||||
except Exception as exc: # pragma: no cover - defensive log
|
||||
self._logger.error("Failed to fetch license info: %s", exc, exc_info=True)
|
||||
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||
|
||||
updated: List[Dict[str, str]] = []
|
||||
errors: List[Dict[str, str]] = []
|
||||
for model_id in model_ids:
|
||||
license_payload = license_map.get(model_id)
|
||||
if not license_payload:
|
||||
continue
|
||||
resolved_payload = resolve_license_payload(license_payload)
|
||||
for context in candidates.get(model_id, []):
|
||||
metadata_path = context["file_path"]
|
||||
metadata_payload = context["metadata"]
|
||||
civitai_section = metadata_payload.setdefault("civitai", {})
|
||||
model_section = civitai_section.get("model")
|
||||
if not isinstance(model_section, Mapping):
|
||||
model_section = {}
|
||||
model_section.update(resolved_payload)
|
||||
civitai_section["model"] = model_section
|
||||
metadata_payload["civitai"] = civitai_section
|
||||
try:
|
||||
await MetadataManager.save_metadata(metadata_path, metadata_payload)
|
||||
updated.append({"modelId": model_id, "filePath": metadata_path})
|
||||
except Exception as exc:
|
||||
self._logger.error(
|
||||
"Failed to save metadata for %s: %s",
|
||||
metadata_path,
|
||||
exc,
|
||||
exc_info=True,
|
||||
)
|
||||
errors.append({"filePath": metadata_path, "error": str(exc)})
|
||||
|
||||
response_payload = {"success": True, "updated": updated}
|
||||
missing_model_ids = [mid for mid in model_ids if mid not in license_map]
|
||||
if missing_model_ids:
|
||||
response_payload["missingModelIds"] = missing_model_ids
|
||||
if errors:
|
||||
response_payload["errors"] = errors
|
||||
return web.json_response(response_payload)
|
||||
|
||||
async def refresh_model_updates(self, request: web.Request) -> web.Response:
|
||||
payload = await self._read_json(request)
|
||||
force_refresh = self._parse_bool(request.query.get("force")) or self._parse_bool(
|
||||
payload.get("force")
|
||||
)
|
||||
|
||||
raw_model_ids = payload.get("modelIds")
|
||||
if raw_model_ids is None:
|
||||
raw_model_ids = payload.get("model_ids")
|
||||
|
||||
target_model_ids: list[int] = []
|
||||
if isinstance(raw_model_ids, (list, tuple, set)):
|
||||
for value in raw_model_ids:
|
||||
normalized = self._normalize_model_id(value)
|
||||
if normalized is not None:
|
||||
target_model_ids.append(normalized)
|
||||
|
||||
if target_model_ids:
|
||||
target_model_ids = sorted(set(target_model_ids))
|
||||
|
||||
provider = await self._get_civitai_provider()
|
||||
if provider is None:
|
||||
return web.json_response(
|
||||
@@ -1057,6 +1215,7 @@ class ModelUpdateHandler:
|
||||
self._service.scanner,
|
||||
provider,
|
||||
force_refresh=force_refresh,
|
||||
target_model_ids=target_model_ids or None,
|
||||
)
|
||||
except RateLimitError as exc:
|
||||
return web.json_response(
|
||||
@@ -1066,10 +1225,16 @@ class ModelUpdateHandler:
|
||||
self._logger.error("Failed to refresh model updates: %s", exc, exc_info=True)
|
||||
return web.json_response({"success": False, "error": str(exc)}, status=500)
|
||||
|
||||
serialized_records = []
|
||||
for record in records.values():
|
||||
has_update_fn = getattr(record, "has_update", None)
|
||||
if callable(has_update_fn) and has_update_fn():
|
||||
serialized_records.append(self._serialize_record(record))
|
||||
|
||||
return web.json_response(
|
||||
{
|
||||
"success": True,
|
||||
"records": [self._serialize_record(record) for record in records.values()],
|
||||
"records": serialized_records,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -1085,6 +1250,28 @@ class ModelUpdateHandler:
|
||||
)
|
||||
return web.json_response({"success": True, "record": self._serialize_record(record)})
|
||||
|
||||
async def set_version_update_ignore(self, request: web.Request) -> web.Response:
|
||||
payload = await self._read_json(request)
|
||||
model_id = self._normalize_model_id(payload.get("modelId"))
|
||||
version_id = self._normalize_model_id(payload.get("versionId"))
|
||||
if model_id is None or version_id is None:
|
||||
return web.json_response(
|
||||
{"success": False, "error": "modelId and versionId are required"},
|
||||
status=400,
|
||||
)
|
||||
|
||||
should_ignore = self._parse_bool(payload.get("shouldIgnore"))
|
||||
record = await self._update_service.set_version_should_ignore(
|
||||
self._service.model_type,
|
||||
model_id,
|
||||
version_id,
|
||||
should_ignore,
|
||||
)
|
||||
overrides = await self._build_version_context(record)
|
||||
return web.json_response(
|
||||
{"success": True, "record": self._serialize_record(record, version_context=overrides)}
|
||||
)
|
||||
|
||||
async def get_model_update_status(self, request: web.Request) -> web.Response:
|
||||
model_id = self._normalize_model_id(request.match_info.get("model_id"))
|
||||
if model_id is None:
|
||||
@@ -1107,6 +1294,33 @@ class ModelUpdateHandler:
|
||||
|
||||
return web.json_response({"success": True, "record": self._serialize_record(record)})
|
||||
|
||||
async def get_model_versions(self, request: web.Request) -> web.Response:
|
||||
model_id = self._normalize_model_id(request.match_info.get("model_id"))
|
||||
if model_id is None:
|
||||
return web.json_response(
|
||||
{"success": False, "error": "model_id must be an integer"}, status=400
|
||||
)
|
||||
|
||||
refresh = self._parse_bool(request.query.get("refresh"))
|
||||
force = self._parse_bool(request.query.get("force"))
|
||||
|
||||
try:
|
||||
record = await self._get_or_refresh_record(model_id, refresh=refresh, force=force)
|
||||
except RateLimitError as exc:
|
||||
return web.json_response(
|
||||
{"success": False, "error": str(exc) or "Rate limited"}, status=429
|
||||
)
|
||||
|
||||
if record is None:
|
||||
return web.json_response(
|
||||
{"success": False, "error": "Model not tracked"}, status=404
|
||||
)
|
||||
|
||||
overrides = await self._build_version_context(record)
|
||||
return web.json_response(
|
||||
{"success": True, "record": self._serialize_record(record, version_context=overrides)}
|
||||
)
|
||||
|
||||
async def _get_or_refresh_record(
|
||||
self, model_id: int, *, refresh: bool, force: bool
|
||||
) -> Optional[object]:
|
||||
@@ -1133,6 +1347,132 @@ class ModelUpdateHandler:
|
||||
self._logger.error("Failed to acquire civitai provider: %s", exc, exc_info=True)
|
||||
return None
|
||||
|
||||
async def _collect_models_missing_license(
|
||||
self,
|
||||
cache,
|
||||
target_model_ids: Optional[set[int]],
|
||||
) -> Dict[int, List[Dict[str, Any]]]:
|
||||
entries: Dict[int, List[Dict[str, Any]]] = {}
|
||||
if cache is None:
|
||||
return entries
|
||||
|
||||
raw_data = getattr(cache, "raw_data", None) or []
|
||||
seen_paths: set[str] = set()
|
||||
target_set = target_model_ids
|
||||
|
||||
for item in raw_data:
|
||||
if not isinstance(item, Mapping):
|
||||
continue
|
||||
file_path = item.get("file_path")
|
||||
if not isinstance(file_path, str) or not file_path or file_path in seen_paths:
|
||||
continue
|
||||
seen_paths.add(file_path)
|
||||
|
||||
civitai_entry = item.get("civitai")
|
||||
if not isinstance(civitai_entry, Mapping):
|
||||
continue
|
||||
|
||||
model_id = self._normalize_model_id(civitai_entry.get("modelId"))
|
||||
if model_id is None:
|
||||
continue
|
||||
if target_set is not None and model_id not in target_set:
|
||||
continue
|
||||
|
||||
try:
|
||||
metadata_obj, should_skip = await MetadataManager.load_metadata(file_path)
|
||||
except Exception as exc:
|
||||
self._logger.debug("Failed to load metadata for %s: %s", file_path, exc)
|
||||
continue
|
||||
if metadata_obj is None or should_skip:
|
||||
continue
|
||||
|
||||
metadata_payload = self._convert_metadata_to_dict(metadata_obj)
|
||||
civitai_payload = metadata_payload.get("civitai")
|
||||
if not isinstance(civitai_payload, Mapping):
|
||||
civitai_payload = {}
|
||||
|
||||
model_payload = civitai_payload.get("model")
|
||||
if not isinstance(model_payload, Mapping):
|
||||
model_payload = {}
|
||||
|
||||
missing = [key for key in LICENSE_FIELDS if key not in model_payload]
|
||||
if not missing:
|
||||
continue
|
||||
|
||||
civitai_payload["model"] = model_payload
|
||||
metadata_payload["civitai"] = civitai_payload
|
||||
entries.setdefault(model_id, []).append(
|
||||
{"file_path": file_path, "metadata": metadata_payload}
|
||||
)
|
||||
|
||||
return entries
|
||||
|
||||
async def _fetch_license_info(
|
||||
self,
|
||||
provider,
|
||||
model_ids: List[int],
|
||||
) -> Dict[int, Dict[str, Any]]:
|
||||
if not model_ids:
|
||||
return {}
|
||||
|
||||
BATCH_SIZE = 100
|
||||
aggregated: Dict[int, Dict[str, Any]] = {}
|
||||
for start in range(0, len(model_ids), BATCH_SIZE):
|
||||
chunk = model_ids[start : start + BATCH_SIZE]
|
||||
response = await provider.get_model_versions_bulk(chunk)
|
||||
if not isinstance(response, Mapping):
|
||||
continue
|
||||
|
||||
for raw_id, payload in response.items():
|
||||
normalized_id = self._normalize_model_id(raw_id)
|
||||
if normalized_id is None or not isinstance(payload, Mapping):
|
||||
continue
|
||||
license_data: Dict[str, Any] = {}
|
||||
for field in LICENSE_FIELDS:
|
||||
license_data[field] = payload.get(field)
|
||||
aggregated[normalized_id] = license_data
|
||||
|
||||
return aggregated
|
||||
|
||||
def _extract_target_model_ids(self, payload: Dict) -> Optional[List[int]]:
|
||||
if not isinstance(payload, Mapping):
|
||||
return None
|
||||
|
||||
raw_ids = payload.get("modelIds")
|
||||
if raw_ids is None:
|
||||
raw_ids = payload.get("model_ids")
|
||||
|
||||
if not isinstance(raw_ids, (list, tuple, set)):
|
||||
return None
|
||||
|
||||
normalized: List[int] = []
|
||||
for candidate in raw_ids:
|
||||
model_id = self._normalize_model_id(candidate)
|
||||
if model_id is not None:
|
||||
normalized.append(model_id)
|
||||
|
||||
if not normalized:
|
||||
return None
|
||||
|
||||
return sorted(set(normalized))
|
||||
|
||||
@staticmethod
|
||||
def _convert_metadata_to_dict(metadata: Any) -> Dict[str, Any]:
|
||||
if metadata is None:
|
||||
return {}
|
||||
|
||||
to_dict = getattr(metadata, "to_dict", None)
|
||||
if callable(to_dict):
|
||||
try:
|
||||
return to_dict()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if isinstance(metadata, Mapping):
|
||||
return dict(metadata)
|
||||
|
||||
return {}
|
||||
|
||||
async def _read_json(self, request: web.Request) -> Dict:
|
||||
if not request.can_read_body:
|
||||
return {}
|
||||
@@ -1160,8 +1500,13 @@ class ModelUpdateHandler:
|
||||
except (TypeError, ValueError):
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _serialize_record(record) -> Dict:
|
||||
def _serialize_record(
|
||||
self,
|
||||
record,
|
||||
*,
|
||||
version_context: Optional[Dict[int, Dict[str, Optional[str]]]] = None,
|
||||
) -> Dict:
|
||||
context = version_context or {}
|
||||
return {
|
||||
"modelType": record.model_type,
|
||||
"modelId": record.model_id,
|
||||
@@ -1169,10 +1514,60 @@ class ModelUpdateHandler:
|
||||
"versionIds": record.version_ids,
|
||||
"inLibraryVersionIds": record.in_library_version_ids,
|
||||
"lastCheckedAt": record.last_checked_at,
|
||||
"shouldIgnore": record.should_ignore,
|
||||
"shouldIgnore": record.should_ignore_model,
|
||||
"hasUpdate": record.has_update(),
|
||||
"versions": [
|
||||
self._serialize_version(version, context.get(version.version_id))
|
||||
for version in record.versions
|
||||
],
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _serialize_version(version, context: Optional[Dict[str, Optional[str]]]) -> Dict:
|
||||
context = context or {}
|
||||
preview_override = context.get("preview_override")
|
||||
preview_url = preview_override if preview_override is not None else version.preview_url
|
||||
return {
|
||||
"versionId": version.version_id,
|
||||
"name": version.name,
|
||||
"baseModel": version.base_model,
|
||||
"releasedAt": version.released_at,
|
||||
"sizeBytes": version.size_bytes,
|
||||
"previewUrl": preview_url,
|
||||
"isInLibrary": version.is_in_library,
|
||||
"shouldIgnore": version.should_ignore,
|
||||
"filePath": context.get("file_path"),
|
||||
"fileName": context.get("file_name"),
|
||||
}
|
||||
|
||||
async def _build_version_context(self, record) -> Dict[int, Dict[str, Optional[str]]]:
|
||||
context: Dict[int, Dict[str, Optional[str]]] = {}
|
||||
try:
|
||||
cache = await self._service.scanner.get_cached_data()
|
||||
except Exception as exc: # pragma: no cover - defensive logging
|
||||
self._logger.debug("Failed to load cache while building preview overrides: %s", exc)
|
||||
return context
|
||||
|
||||
version_index = getattr(cache, "version_index", None)
|
||||
if not version_index:
|
||||
return context
|
||||
|
||||
for version in record.versions:
|
||||
if not version.is_in_library:
|
||||
continue
|
||||
cache_entry = version_index.get(version.version_id)
|
||||
if isinstance(cache_entry, Mapping):
|
||||
preview = cache_entry.get("preview_url")
|
||||
context_entry: Dict[str, Optional[str]] = {
|
||||
"file_path": cache_entry.get("file_path"),
|
||||
"file_name": cache_entry.get("file_name"),
|
||||
"preview_override": None,
|
||||
}
|
||||
if isinstance(preview, str) and preview:
|
||||
context_entry["preview_override"] = config.get_preview_static_url(preview)
|
||||
context[version.version_id] = context_entry
|
||||
return context
|
||||
|
||||
|
||||
@dataclass
|
||||
class ModelHandlerSet:
|
||||
@@ -1205,6 +1600,7 @@ class ModelHandlerSet:
|
||||
"verify_duplicates": self.management.verify_duplicates,
|
||||
"get_top_tags": self.query.get_top_tags,
|
||||
"get_base_models": self.query.get_base_models,
|
||||
"get_model_types": self.query.get_model_types,
|
||||
"scan_models": self.query.scan_models,
|
||||
"get_model_roots": self.query.get_model_roots,
|
||||
"get_folders": self.query.get_folders,
|
||||
@@ -1232,7 +1628,9 @@ class ModelHandlerSet:
|
||||
"get_model_description": self.query.get_model_description,
|
||||
"get_relative_paths": self.query.get_relative_paths,
|
||||
"refresh_model_updates": self.updates.refresh_model_updates,
|
||||
"fetch_missing_civitai_license_data": self.updates.fetch_missing_civitai_license_data,
|
||||
"set_model_update_ignore": self.updates.set_model_update_ignore,
|
||||
"set_version_update_ignore": self.updates.set_version_update_ignore,
|
||||
"get_model_update_status": self.updates.get_model_update_status,
|
||||
"get_model_versions": self.updates.get_model_versions,
|
||||
}
|
||||
|
||||
|
||||
@@ -4,8 +4,10 @@ from __future__ import annotations
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import tempfile
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Awaitable, Callable, Dict, Mapping, Optional
|
||||
from typing import Any, Awaitable, Callable, Dict, List, Mapping, Optional
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
@@ -20,6 +22,7 @@ from ...services.recipes import (
|
||||
RecipeSharingService,
|
||||
RecipeValidationError,
|
||||
)
|
||||
from ...services.metadata_service import get_default_metadata_provider
|
||||
|
||||
Logger = logging.Logger
|
||||
EnsureDependenciesCallable = Callable[[], Awaitable[None]]
|
||||
@@ -45,6 +48,7 @@ class RecipeHandlerSet:
|
||||
"render_page": self.page_view.render_page,
|
||||
"list_recipes": self.listing.list_recipes,
|
||||
"get_recipe": self.listing.get_recipe,
|
||||
"import_remote_recipe": self.management.import_remote_recipe,
|
||||
"analyze_uploaded_image": self.analysis.analyze_uploaded_image,
|
||||
"analyze_local_image": self.analysis.analyze_local_image,
|
||||
"save_recipe": self.management.save_recipe,
|
||||
@@ -152,14 +156,31 @@ class RecipeListingHandler:
|
||||
"lora_model": request.query.get("search_lora_model", "true").lower() == "true",
|
||||
}
|
||||
|
||||
filters: Dict[str, list[str]] = {}
|
||||
filters: Dict[str, Any] = {}
|
||||
base_models = request.query.get("base_models")
|
||||
if base_models:
|
||||
filters["base_model"] = base_models.split(",")
|
||||
|
||||
tags = request.query.get("tags")
|
||||
if tags:
|
||||
filters["tags"] = tags.split(",")
|
||||
tag_filters: Dict[str, str] = {}
|
||||
legacy_tags = request.query.get("tags")
|
||||
if legacy_tags:
|
||||
for tag in legacy_tags.split(","):
|
||||
tag = tag.strip()
|
||||
if tag:
|
||||
tag_filters[tag] = "include"
|
||||
|
||||
include_tags = request.query.getall("tag_include", [])
|
||||
for tag in include_tags:
|
||||
if tag:
|
||||
tag_filters[tag] = "include"
|
||||
|
||||
exclude_tags = request.query.getall("tag_exclude", [])
|
||||
for tag in exclude_tags:
|
||||
if tag:
|
||||
tag_filters[tag] = "exclude"
|
||||
|
||||
if tag_filters:
|
||||
filters["tags"] = tag_filters
|
||||
|
||||
lora_hash = request.query.get("lora_hash")
|
||||
|
||||
@@ -387,12 +408,16 @@ class RecipeManagementHandler:
|
||||
logger: Logger,
|
||||
persistence_service: RecipePersistenceService,
|
||||
analysis_service: RecipeAnalysisService,
|
||||
downloader_factory,
|
||||
civitai_client_getter: CivitaiClientGetter,
|
||||
) -> None:
|
||||
self._ensure_dependencies_ready = ensure_dependencies_ready
|
||||
self._recipe_scanner_getter = recipe_scanner_getter
|
||||
self._logger = logger
|
||||
self._persistence_service = persistence_service
|
||||
self._analysis_service = analysis_service
|
||||
self._downloader_factory = downloader_factory
|
||||
self._civitai_client_getter = civitai_client_getter
|
||||
|
||||
async def save_recipe(self, request: web.Request) -> web.Response:
|
||||
try:
|
||||
@@ -419,6 +444,64 @@ class RecipeManagementHandler:
|
||||
self._logger.error("Error saving recipe: %s", exc, exc_info=True)
|
||||
return web.json_response({"error": str(exc)}, status=500)
|
||||
|
||||
async def import_remote_recipe(self, request: web.Request) -> web.Response:
|
||||
try:
|
||||
await self._ensure_dependencies_ready()
|
||||
recipe_scanner = self._recipe_scanner_getter()
|
||||
if recipe_scanner is None:
|
||||
raise RuntimeError("Recipe scanner unavailable")
|
||||
|
||||
params = request.rel_url.query
|
||||
image_url = params.get("image_url")
|
||||
name = params.get("name")
|
||||
resources_raw = params.get("resources")
|
||||
if not image_url:
|
||||
raise RecipeValidationError("Missing required field: image_url")
|
||||
if not name:
|
||||
raise RecipeValidationError("Missing required field: name")
|
||||
if not resources_raw:
|
||||
raise RecipeValidationError("Missing required field: resources")
|
||||
|
||||
checkpoint_entry, lora_entries = self._parse_resources_payload(resources_raw)
|
||||
gen_params = self._parse_gen_params(params.get("gen_params"))
|
||||
metadata: Dict[str, Any] = {
|
||||
"base_model": params.get("base_model", "") or "",
|
||||
"loras": lora_entries,
|
||||
}
|
||||
source_path = params.get("source_path")
|
||||
if source_path:
|
||||
metadata["source_path"] = source_path
|
||||
if gen_params is not None:
|
||||
metadata["gen_params"] = gen_params
|
||||
if checkpoint_entry:
|
||||
metadata["checkpoint"] = checkpoint_entry
|
||||
gen_params_ref = metadata.setdefault("gen_params", {})
|
||||
if "checkpoint" not in gen_params_ref:
|
||||
gen_params_ref["checkpoint"] = checkpoint_entry
|
||||
base_model_from_metadata = await self._resolve_base_model_from_checkpoint(checkpoint_entry)
|
||||
if base_model_from_metadata:
|
||||
metadata["base_model"] = base_model_from_metadata
|
||||
|
||||
tags = self._parse_tags(params.get("tags"))
|
||||
image_bytes = await self._download_image_bytes(image_url)
|
||||
|
||||
result = await self._persistence_service.save_recipe(
|
||||
recipe_scanner=recipe_scanner,
|
||||
image_bytes=image_bytes,
|
||||
image_base64=None,
|
||||
name=name,
|
||||
tags=tags,
|
||||
metadata=metadata,
|
||||
)
|
||||
return web.json_response(result.payload, status=result.status)
|
||||
except RecipeValidationError as exc:
|
||||
return web.json_response({"error": str(exc)}, status=400)
|
||||
except RecipeDownloadError as exc:
|
||||
return web.json_response({"error": str(exc)}, status=400)
|
||||
except Exception as exc:
|
||||
self._logger.error("Error importing recipe from remote source: %s", exc, exc_info=True)
|
||||
return web.json_response({"error": str(exc)}, status=500)
|
||||
|
||||
async def delete_recipe(self, request: web.Request) -> web.Response:
|
||||
try:
|
||||
await self._ensure_dependencies_ready()
|
||||
@@ -578,6 +661,140 @@ class RecipeManagementHandler:
|
||||
"metadata": metadata,
|
||||
}
|
||||
|
||||
def _parse_tags(self, tag_text: Optional[str]) -> list[str]:
|
||||
if not tag_text:
|
||||
return []
|
||||
return [tag.strip() for tag in tag_text.split(",") if tag.strip()]
|
||||
|
||||
def _parse_gen_params(self, payload: Optional[str]) -> Optional[Dict[str, Any]]:
|
||||
if payload is None:
|
||||
return None
|
||||
if payload == "":
|
||||
return {}
|
||||
try:
|
||||
parsed = json.loads(payload)
|
||||
except json.JSONDecodeError as exc:
|
||||
raise RecipeValidationError(f"Invalid gen_params payload: {exc}") from exc
|
||||
if parsed is None:
|
||||
return {}
|
||||
if not isinstance(parsed, dict):
|
||||
raise RecipeValidationError("gen_params payload must be an object")
|
||||
return parsed
|
||||
|
||||
def _parse_resources_payload(self, payload_raw: str) -> tuple[Optional[Dict[str, Any]], List[Dict[str, Any]]]:
|
||||
try:
|
||||
payload = json.loads(payload_raw)
|
||||
except json.JSONDecodeError as exc:
|
||||
raise RecipeValidationError(f"Invalid resources payload: {exc}") from exc
|
||||
|
||||
if not isinstance(payload, list):
|
||||
raise RecipeValidationError("Resources payload must be a list")
|
||||
|
||||
checkpoint_entry: Optional[Dict[str, Any]] = None
|
||||
lora_entries: List[Dict[str, Any]] = []
|
||||
|
||||
for resource in payload:
|
||||
if not isinstance(resource, dict):
|
||||
continue
|
||||
resource_type = str(resource.get("type") or "").lower()
|
||||
if resource_type == "checkpoint":
|
||||
checkpoint_entry = self._build_checkpoint_entry(resource)
|
||||
elif resource_type in {"lora", "lycoris"}:
|
||||
lora_entries.append(self._build_lora_entry(resource))
|
||||
|
||||
return checkpoint_entry, lora_entries
|
||||
|
||||
def _build_checkpoint_entry(self, resource: Dict[str, Any]) -> Dict[str, Any]:
|
||||
return {
|
||||
"type": resource.get("type", "checkpoint"),
|
||||
"modelId": self._safe_int(resource.get("modelId")),
|
||||
"modelVersionId": self._safe_int(resource.get("modelVersionId")),
|
||||
"modelName": resource.get("modelName", ""),
|
||||
"modelVersionName": resource.get("modelVersionName", ""),
|
||||
}
|
||||
|
||||
def _build_lora_entry(self, resource: Dict[str, Any]) -> Dict[str, Any]:
|
||||
weight_raw = resource.get("weight", 1.0)
|
||||
try:
|
||||
weight = float(weight_raw)
|
||||
except (TypeError, ValueError):
|
||||
weight = 1.0
|
||||
return {
|
||||
"file_name": resource.get("modelName", ""),
|
||||
"weight": weight,
|
||||
"id": self._safe_int(resource.get("modelVersionId")),
|
||||
"name": resource.get("modelName", ""),
|
||||
"version": resource.get("modelVersionName", ""),
|
||||
"isDeleted": False,
|
||||
"exclude": False,
|
||||
}
|
||||
|
||||
async def _download_image_bytes(self, image_url: str) -> bytes:
|
||||
civitai_client = self._civitai_client_getter()
|
||||
downloader = await self._downloader_factory()
|
||||
temp_path = None
|
||||
try:
|
||||
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
|
||||
temp_path = temp_file.name
|
||||
download_url = image_url
|
||||
civitai_match = re.match(r"https://civitai\.com/images/(\d+)", image_url)
|
||||
if civitai_match:
|
||||
if civitai_client is None:
|
||||
raise RecipeDownloadError("Civitai client unavailable for image download")
|
||||
image_info = await civitai_client.get_image_info(civitai_match.group(1))
|
||||
if not image_info:
|
||||
raise RecipeDownloadError("Failed to fetch image information from Civitai")
|
||||
download_url = image_info.get("url")
|
||||
if not download_url:
|
||||
raise RecipeDownloadError("No image URL found in Civitai response")
|
||||
|
||||
success, result = await downloader.download_file(download_url, temp_path, use_auth=False)
|
||||
if not success:
|
||||
raise RecipeDownloadError(f"Failed to download image: {result}")
|
||||
with open(temp_path, "rb") as file_obj:
|
||||
return file_obj.read()
|
||||
except RecipeDownloadError:
|
||||
raise
|
||||
except RecipeValidationError:
|
||||
raise
|
||||
except Exception as exc: # pragma: no cover - defensive guard
|
||||
raise RecipeValidationError(f"Unable to download image: {exc}") from exc
|
||||
finally:
|
||||
if temp_path:
|
||||
try:
|
||||
os.unlink(temp_path)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
def _safe_int(self, value: Any) -> int:
|
||||
try:
|
||||
return int(value)
|
||||
except (TypeError, ValueError):
|
||||
return 0
|
||||
|
||||
async def _resolve_base_model_from_checkpoint(self, checkpoint_entry: Dict[str, Any]) -> str:
|
||||
version_id = self._safe_int(checkpoint_entry.get("modelVersionId"))
|
||||
|
||||
if not version_id:
|
||||
return ""
|
||||
|
||||
try:
|
||||
provider = await get_default_metadata_provider()
|
||||
if not provider:
|
||||
return ""
|
||||
|
||||
version_info = await provider.get_model_version_info(version_id)
|
||||
if isinstance(version_info, tuple):
|
||||
version_info = version_info[0]
|
||||
|
||||
if isinstance(version_info, dict):
|
||||
base_model = version_info.get("baseModel") or ""
|
||||
return str(base_model) if base_model is not None else ""
|
||||
except Exception as exc: # pragma: no cover - defensive logging
|
||||
self._logger.warning("Failed to resolve base model from checkpoint metadata: %s", exc)
|
||||
|
||||
return ""
|
||||
|
||||
|
||||
class RecipeAnalysisHandler:
|
||||
"""Analyze images to extract recipe metadata."""
|
||||
|
||||
@@ -33,6 +33,7 @@ MISC_ROUTE_DEFINITIONS: tuple[RouteDefinition, ...] = (
|
||||
RouteDefinition("GET", "/api/lm/trained-words", "get_trained_words"),
|
||||
RouteDefinition("GET", "/api/lm/model-example-files", "get_model_example_files"),
|
||||
RouteDefinition("POST", "/api/lm/register-nodes", "register_nodes"),
|
||||
RouteDefinition("POST", "/api/lm/update-node-widget", "update_node_widget"),
|
||||
RouteDefinition("GET", "/api/lm/get-registry", "get_registry"),
|
||||
RouteDefinition("GET", "/api/lm/check-model-exists", "check_model_exists"),
|
||||
RouteDefinition("GET", "/api/lm/civitai/user-models", "get_civitai_user_models"),
|
||||
|
||||
@@ -39,6 +39,7 @@ COMMON_ROUTE_DEFINITIONS: tuple[RouteDefinition, ...] = (
|
||||
RouteDefinition("GET", "/api/lm/{prefix}/auto-organize-progress", "get_auto_organize_progress"),
|
||||
RouteDefinition("GET", "/api/lm/{prefix}/top-tags", "get_top_tags"),
|
||||
RouteDefinition("GET", "/api/lm/{prefix}/base-models", "get_base_models"),
|
||||
RouteDefinition("GET", "/api/lm/{prefix}/model-types", "get_model_types"),
|
||||
RouteDefinition("GET", "/api/lm/{prefix}/scan", "scan_models"),
|
||||
RouteDefinition("GET", "/api/lm/{prefix}/roots", "get_model_roots"),
|
||||
RouteDefinition("GET", "/api/lm/{prefix}/folders", "get_folders"),
|
||||
@@ -56,8 +57,11 @@ COMMON_ROUTE_DEFINITIONS: tuple[RouteDefinition, ...] = (
|
||||
RouteDefinition("GET", "/api/lm/{prefix}/civitai/model/version/{modelVersionId}", "get_civitai_model_by_version"),
|
||||
RouteDefinition("GET", "/api/lm/{prefix}/civitai/model/hash/{hash}", "get_civitai_model_by_hash"),
|
||||
RouteDefinition("POST", "/api/lm/{prefix}/updates/refresh", "refresh_model_updates"),
|
||||
RouteDefinition("POST", "/api/lm/{prefix}/updates/fetch-missing-license", "fetch_missing_civitai_license_data"),
|
||||
RouteDefinition("POST", "/api/lm/{prefix}/updates/ignore", "set_model_update_ignore"),
|
||||
RouteDefinition("POST", "/api/lm/{prefix}/updates/ignore-version", "set_version_update_ignore"),
|
||||
RouteDefinition("GET", "/api/lm/{prefix}/updates/status/{model_id}", "get_model_update_status"),
|
||||
RouteDefinition("GET", "/api/lm/{prefix}/updates/versions/{model_id}", "get_model_versions"),
|
||||
RouteDefinition("POST", "/api/lm/download-model", "download_model"),
|
||||
RouteDefinition("GET", "/api/lm/download-model-get", "download_model_get"),
|
||||
RouteDefinition("GET", "/api/lm/cancel-download-get", "cancel_download_get"),
|
||||
@@ -101,4 +105,3 @@ class ModelRouteRegistrar:
|
||||
add_method_name = self._METHOD_MAP[method.upper()]
|
||||
add_method = getattr(self._app.router, add_method_name)
|
||||
add_method(path, handler)
|
||||
|
||||
|
||||
@@ -20,6 +20,7 @@ ROUTE_DEFINITIONS: tuple[RouteDefinition, ...] = (
|
||||
RouteDefinition("GET", "/loras/recipes", "render_page"),
|
||||
RouteDefinition("GET", "/api/lm/recipes", "list_recipes"),
|
||||
RouteDefinition("GET", "/api/lm/recipe/{recipe_id}", "get_recipe"),
|
||||
RouteDefinition("GET", "/api/lm/recipes/import-remote", "import_remote_recipe"),
|
||||
RouteDefinition("POST", "/api/lm/recipes/analyze-image", "analyze_uploaded_image"),
|
||||
RouteDefinition("POST", "/api/lm/recipes/analyze-local-image", "analyze_local_image"),
|
||||
RouteDefinition("POST", "/api/lm/recipes/save", "save_recipe"),
|
||||
@@ -61,4 +62,3 @@ class RecipeRouteRegistrar:
|
||||
add_method_name = self._METHOD_MAP[method.upper()]
|
||||
add_method = getattr(self._app.router, add_method_name)
|
||||
add_method(path, handler)
|
||||
|
||||
|
||||
@@ -205,8 +205,8 @@ class UpdateRoutes:
|
||||
|
||||
zip_path = tmp_zip_path
|
||||
|
||||
# Skip both settings.json and civitai folder
|
||||
UpdateRoutes._clean_plugin_folder(plugin_root, skip_files=['settings.json', 'civitai'])
|
||||
# Skip both settings.json, civitai and model cache folder
|
||||
UpdateRoutes._clean_plugin_folder(plugin_root, skip_files=['settings.json', 'civitai', 'model_cache'])
|
||||
|
||||
# Extract ZIP to temp dir
|
||||
with tempfile.TemporaryDirectory() as tmp_dir:
|
||||
@@ -344,6 +344,11 @@ class UpdateRoutes:
|
||||
origin.fetch()
|
||||
|
||||
if nightly:
|
||||
# Reset to discard any local changes
|
||||
repo.git.reset('--hard')
|
||||
# Clean untracked files
|
||||
repo.git.clean('-fd')
|
||||
|
||||
# Switch to main branch and pull latest
|
||||
main_branch = 'main'
|
||||
if main_branch not in [branch.name for branch in repo.branches]:
|
||||
@@ -357,6 +362,11 @@ class UpdateRoutes:
|
||||
new_version = f"main-{repo.head.commit.hexsha[:7]}"
|
||||
|
||||
else:
|
||||
# Reset to discard any local changes
|
||||
repo.git.reset('--hard')
|
||||
# Clean untracked files
|
||||
repo.git.clean('-fd')
|
||||
|
||||
# Get latest release tag
|
||||
tags = sorted(repo.tags, key=lambda t: t.commit.committed_datetime, reverse=True)
|
||||
if not tags:
|
||||
|
||||
@@ -1,12 +1,21 @@
|
||||
from abc import ABC, abstractmethod
|
||||
import asyncio
|
||||
from typing import Dict, List, Optional, Type, TYPE_CHECKING
|
||||
from typing import Any, Dict, List, Optional, Type, TYPE_CHECKING
|
||||
import logging
|
||||
import os
|
||||
|
||||
from ..utils.constants import VALID_LORA_TYPES
|
||||
from ..utils.models import BaseModelMetadata
|
||||
from ..utils.metadata_manager import MetadataManager
|
||||
from .model_query import FilterCriteria, ModelCacheRepository, ModelFilterSet, SearchStrategy, SettingsProvider
|
||||
from .model_query import (
|
||||
FilterCriteria,
|
||||
ModelCacheRepository,
|
||||
ModelFilterSet,
|
||||
SearchStrategy,
|
||||
SettingsProvider,
|
||||
normalize_civitai_model_type,
|
||||
resolve_civitai_model_type,
|
||||
)
|
||||
from .settings_manager import get_settings_manager
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -59,44 +68,69 @@ class BaseModelService(ABC):
|
||||
search: str = None,
|
||||
fuzzy_search: bool = False,
|
||||
base_models: list = None,
|
||||
tags: list = None,
|
||||
model_types: list = None,
|
||||
tags: Optional[Dict[str, str]] = None,
|
||||
search_options: dict = None,
|
||||
hash_filters: dict = None,
|
||||
favorites_only: bool = False,
|
||||
has_update: bool = False,
|
||||
update_available_only: bool = False,
|
||||
credit_required: Optional[bool] = None,
|
||||
allow_selling_generated_content: Optional[bool] = None,
|
||||
**kwargs,
|
||||
) -> Dict:
|
||||
"""Get paginated and filtered model data"""
|
||||
|
||||
sort_params = self.cache_repository.parse_sort(sort_by)
|
||||
sorted_data = await self.cache_repository.fetch_sorted(sort_params)
|
||||
|
||||
if hash_filters:
|
||||
filtered_data = await self._apply_hash_filters(sorted_data, hash_filters)
|
||||
return self._paginate(filtered_data, page, page_size)
|
||||
|
||||
filtered_data = await self._apply_common_filters(
|
||||
sorted_data,
|
||||
folder=folder,
|
||||
base_models=base_models,
|
||||
tags=tags,
|
||||
favorites_only=favorites_only,
|
||||
search_options=search_options,
|
||||
)
|
||||
|
||||
if search:
|
||||
filtered_data = await self._apply_search_filters(
|
||||
filtered_data,
|
||||
search,
|
||||
fuzzy_search,
|
||||
search_options,
|
||||
else:
|
||||
filtered_data = await self._apply_common_filters(
|
||||
sorted_data,
|
||||
folder=folder,
|
||||
base_models=base_models,
|
||||
model_types=model_types,
|
||||
tags=tags,
|
||||
favorites_only=favorites_only,
|
||||
search_options=search_options,
|
||||
)
|
||||
|
||||
filtered_data = await self._apply_specific_filters(filtered_data, **kwargs)
|
||||
if search:
|
||||
filtered_data = await self._apply_search_filters(
|
||||
filtered_data,
|
||||
search,
|
||||
fuzzy_search,
|
||||
search_options,
|
||||
)
|
||||
|
||||
if has_update:
|
||||
filtered_data = await self._apply_update_filter(filtered_data)
|
||||
filtered_data = await self._apply_specific_filters(filtered_data, **kwargs)
|
||||
|
||||
return self._paginate(filtered_data, page, page_size)
|
||||
# Apply license-based filters
|
||||
if credit_required is not None:
|
||||
filtered_data = await self._apply_credit_required_filter(filtered_data, credit_required)
|
||||
|
||||
if allow_selling_generated_content is not None:
|
||||
filtered_data = await self._apply_allow_selling_filter(filtered_data, allow_selling_generated_content)
|
||||
|
||||
annotated_for_filter: Optional[List[Dict]] = None
|
||||
if update_available_only:
|
||||
annotated_for_filter = await self._annotate_update_flags(filtered_data)
|
||||
filtered_data = [
|
||||
item for item in annotated_for_filter
|
||||
if item.get('update_available')
|
||||
]
|
||||
|
||||
paginated = self._paginate(filtered_data, page, page_size)
|
||||
|
||||
if update_available_only:
|
||||
# Items already include update flags thanks to the pre-filter annotation.
|
||||
paginated['items'] = list(paginated['items'])
|
||||
else:
|
||||
paginated['items'] = await self._annotate_update_flags(
|
||||
paginated['items'],
|
||||
)
|
||||
return paginated
|
||||
|
||||
|
||||
async def _apply_hash_filters(self, data: List[Dict], hash_filters: Dict) -> List[Dict]:
|
||||
@@ -126,7 +160,8 @@ class BaseModelService(ABC):
|
||||
data: List[Dict],
|
||||
folder: str = None,
|
||||
base_models: list = None,
|
||||
tags: list = None,
|
||||
model_types: list = None,
|
||||
tags: Optional[Dict[str, str]] = None,
|
||||
favorites_only: bool = False,
|
||||
search_options: dict = None,
|
||||
) -> List[Dict]:
|
||||
@@ -135,6 +170,7 @@ class BaseModelService(ABC):
|
||||
criteria = FilterCriteria(
|
||||
folder=folder,
|
||||
base_models=base_models,
|
||||
model_types=model_types,
|
||||
tags=tags,
|
||||
favorites_only=favorites_only,
|
||||
search_options=normalized_options,
|
||||
@@ -156,45 +192,178 @@ class BaseModelService(ABC):
|
||||
"""Apply model-specific filters - to be overridden by subclasses if needed"""
|
||||
return data
|
||||
|
||||
async def _apply_update_filter(self, data: List[Dict]) -> List[Dict]:
|
||||
"""Filter models to those with remote updates available when requested."""
|
||||
if not data:
|
||||
return []
|
||||
if self.update_service is None:
|
||||
logger.warning(
|
||||
"Requested has_update filter for %s models but update service is unavailable",
|
||||
self.model_type,
|
||||
)
|
||||
return []
|
||||
|
||||
candidates: List[tuple[Dict, int]] = []
|
||||
async def _apply_credit_required_filter(self, data: List[Dict], credit_required: bool) -> List[Dict]:
|
||||
"""Apply credit required filtering based on license_flags.
|
||||
|
||||
Args:
|
||||
data: List of model data items
|
||||
credit_required:
|
||||
- True: Return items where credit is required (allowNoCredit=False)
|
||||
- False: Return items where credit is not required (allowNoCredit=True)
|
||||
"""
|
||||
filtered_data = []
|
||||
for item in data:
|
||||
model_id = self._extract_model_id(item)
|
||||
if model_id is not None:
|
||||
candidates.append((item, model_id))
|
||||
license_flags = item.get("license_flags", 127) # Default to all permissions enabled
|
||||
|
||||
# Bit 0 represents allowNoCredit (1 = no credit required, 0 = credit required)
|
||||
allow_no_credit = bool(license_flags & (1 << 0))
|
||||
|
||||
# If credit_required is True, we want items where allowNoCredit is False (credit required)
|
||||
# If credit_required is False, we want items where allowNoCredit is True (no credit required)
|
||||
if credit_required:
|
||||
if not allow_no_credit: # Credit is required
|
||||
filtered_data.append(item)
|
||||
else:
|
||||
if allow_no_credit: # Credit is not required
|
||||
filtered_data.append(item)
|
||||
|
||||
return filtered_data
|
||||
|
||||
if not candidates:
|
||||
async def _apply_allow_selling_filter(self, data: List[Dict], allow_selling: bool) -> List[Dict]:
|
||||
"""Apply allow selling generated content filtering based on license_flags.
|
||||
|
||||
Args:
|
||||
data: List of model data items
|
||||
allow_selling:
|
||||
- True: Return items where selling generated content is allowed (allowCommercialUse contains Image)
|
||||
- False: Return items where selling generated content is not allowed (allowCommercialUse does not contain Image)
|
||||
"""
|
||||
filtered_data = []
|
||||
for item in data:
|
||||
license_flags = item.get("license_flags", 127) # Default to all permissions enabled
|
||||
|
||||
# Bits 1-4 represent commercial use permissions
|
||||
# Bit 1 specifically represents Image permission (allowCommercialUse contains Image)
|
||||
has_image_permission = bool(license_flags & (1 << 1))
|
||||
|
||||
# If allow_selling is True, we want items where Image permission is granted
|
||||
# If allow_selling is False, we want items where Image permission is not granted
|
||||
if allow_selling:
|
||||
if has_image_permission: # Selling generated content is allowed
|
||||
filtered_data.append(item)
|
||||
else:
|
||||
if not has_image_permission: # Selling generated content is not allowed
|
||||
filtered_data.append(item)
|
||||
|
||||
return filtered_data
|
||||
|
||||
async def _annotate_update_flags(
|
||||
self,
|
||||
items: List[Dict],
|
||||
) -> List[Dict]:
|
||||
"""Attach an update_available flag to each response item.
|
||||
|
||||
Items without a civitai model id default to False.
|
||||
"""
|
||||
if not items:
|
||||
return []
|
||||
|
||||
tasks = [
|
||||
self.update_service.has_update(self.model_type, model_id)
|
||||
for _, model_id in candidates
|
||||
]
|
||||
results = await asyncio.gather(*tasks, return_exceptions=True)
|
||||
annotated = [dict(item) for item in items]
|
||||
|
||||
filtered: List[Dict] = []
|
||||
for (item, model_id), result in zip(candidates, results):
|
||||
if isinstance(result, Exception):
|
||||
logger.error(
|
||||
"Failed to resolve update status for model %s (%s): %s",
|
||||
model_id,
|
||||
self.model_type,
|
||||
result,
|
||||
)
|
||||
if self.update_service is None:
|
||||
for item in annotated:
|
||||
item['update_available'] = False
|
||||
return annotated
|
||||
|
||||
id_to_items: Dict[int, List[Dict]] = {}
|
||||
ordered_ids: List[int] = []
|
||||
for item in annotated:
|
||||
model_id = self._extract_model_id(item)
|
||||
if model_id is None:
|
||||
item['update_available'] = False
|
||||
continue
|
||||
if result:
|
||||
filtered.append(item)
|
||||
return filtered
|
||||
if model_id not in id_to_items:
|
||||
id_to_items[model_id] = []
|
||||
ordered_ids.append(model_id)
|
||||
id_to_items[model_id].append(item)
|
||||
|
||||
if not ordered_ids:
|
||||
return annotated
|
||||
|
||||
strategy_value = self.settings.get("update_flag_strategy")
|
||||
if isinstance(strategy_value, str) and strategy_value.strip():
|
||||
strategy = strategy_value.strip().lower()
|
||||
else:
|
||||
strategy = "same_base"
|
||||
same_base_mode = strategy == "same_base"
|
||||
|
||||
records = None
|
||||
resolved: Optional[Dict[int, bool]] = None
|
||||
if same_base_mode:
|
||||
record_method = getattr(self.update_service, "get_records_bulk", None)
|
||||
if callable(record_method):
|
||||
try:
|
||||
records = await record_method(self.model_type, ordered_ids)
|
||||
resolved = {
|
||||
model_id: record.has_update()
|
||||
for model_id, record in records.items()
|
||||
}
|
||||
except Exception as exc:
|
||||
logger.error(
|
||||
"Failed to resolve update records in bulk for %s models (%s): %s",
|
||||
self.model_type,
|
||||
ordered_ids,
|
||||
exc,
|
||||
exc_info=True,
|
||||
)
|
||||
records = None
|
||||
resolved = None
|
||||
|
||||
if resolved is None:
|
||||
bulk_method = getattr(self.update_service, "has_updates_bulk", None)
|
||||
if callable(bulk_method):
|
||||
try:
|
||||
resolved = await bulk_method(self.model_type, ordered_ids)
|
||||
except Exception as exc:
|
||||
logger.error(
|
||||
"Failed to resolve update status in bulk for %s models (%s): %s",
|
||||
self.model_type,
|
||||
ordered_ids,
|
||||
exc,
|
||||
exc_info=True,
|
||||
)
|
||||
resolved = None
|
||||
|
||||
if resolved is None:
|
||||
tasks = [
|
||||
self.update_service.has_update(self.model_type, model_id)
|
||||
for model_id in ordered_ids
|
||||
]
|
||||
results = await asyncio.gather(*tasks, return_exceptions=True)
|
||||
resolved = {}
|
||||
for model_id, result in zip(ordered_ids, results):
|
||||
if isinstance(result, Exception):
|
||||
logger.error(
|
||||
"Failed to resolve update status for model %s (%s): %s",
|
||||
model_id,
|
||||
self.model_type,
|
||||
result,
|
||||
)
|
||||
continue
|
||||
resolved[model_id] = bool(result)
|
||||
|
||||
for model_id, items_for_id in id_to_items.items():
|
||||
default_flag = bool(resolved.get(model_id, False)) if resolved else False
|
||||
record = records.get(model_id) if records else None
|
||||
base_highest_versions = (
|
||||
self._build_highest_local_versions_by_base(record) if same_base_mode and record else {}
|
||||
)
|
||||
for item in items_for_id:
|
||||
if same_base_mode and record is not None:
|
||||
base_model = self._extract_base_model(item)
|
||||
normalized_base = self._normalize_base_model_name(base_model)
|
||||
threshold_version = base_highest_versions.get(normalized_base) if normalized_base else None
|
||||
if threshold_version is None:
|
||||
threshold_version = self._extract_version_id(item)
|
||||
flag = record.has_update_for_base(
|
||||
threshold_version,
|
||||
base_model,
|
||||
)
|
||||
else:
|
||||
flag = default_flag
|
||||
item['update_available'] = flag
|
||||
|
||||
return annotated
|
||||
|
||||
@staticmethod
|
||||
def _extract_model_id(item: Dict) -> Optional[int]:
|
||||
@@ -208,7 +377,71 @@ class BaseModelService(ABC):
|
||||
return int(value)
|
||||
except (TypeError, ValueError):
|
||||
return None
|
||||
|
||||
|
||||
@staticmethod
|
||||
def _extract_version_id(item: Dict) -> Optional[int]:
|
||||
civitai = item.get('civitai') if isinstance(item, dict) else None
|
||||
if not isinstance(civitai, dict):
|
||||
return None
|
||||
value = civitai.get('id')
|
||||
if value is None:
|
||||
return None
|
||||
try:
|
||||
return int(value)
|
||||
except (TypeError, ValueError):
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _extract_base_model(item: Dict) -> Optional[str]:
|
||||
value = item.get('base_model')
|
||||
if value is None:
|
||||
return None
|
||||
if isinstance(value, str):
|
||||
candidate = value.strip()
|
||||
else:
|
||||
try:
|
||||
candidate = str(value).strip()
|
||||
except Exception:
|
||||
return None
|
||||
return candidate if candidate else None
|
||||
|
||||
@staticmethod
|
||||
def _normalize_base_model_name(value: Optional[str]) -> Optional[str]:
|
||||
"""Return a lowercased, trimmed base model name for comparison."""
|
||||
|
||||
if value is None:
|
||||
return None
|
||||
if isinstance(value, str):
|
||||
candidate = value.strip()
|
||||
else:
|
||||
try:
|
||||
candidate = str(value).strip()
|
||||
except Exception:
|
||||
return None
|
||||
return candidate.lower() if candidate else None
|
||||
|
||||
def _build_highest_local_versions_by_base(self, record) -> Dict[str, int]:
|
||||
"""Return the highest local version id known for each normalized base model."""
|
||||
|
||||
if record is None:
|
||||
return {}
|
||||
|
||||
highest_by_base: Dict[str, int] = {}
|
||||
for version in getattr(record, "versions", []):
|
||||
if not getattr(version, "is_in_library", False):
|
||||
continue
|
||||
normalized_base = self._normalize_base_model_name(getattr(version, "base_model", None))
|
||||
if normalized_base is None:
|
||||
continue
|
||||
version_id = getattr(version, "version_id", None)
|
||||
if version_id is None:
|
||||
continue
|
||||
current_max = highest_by_base.get(normalized_base)
|
||||
if current_max is None or version_id > current_max:
|
||||
highest_by_base[normalized_base] = version_id
|
||||
|
||||
return highest_by_base
|
||||
|
||||
def _paginate(self, data: List[Dict], page: int, page_size: int) -> Dict:
|
||||
"""Apply pagination to filtered data"""
|
||||
total_items = len(data)
|
||||
@@ -236,6 +469,25 @@ class BaseModelService(ABC):
|
||||
async def get_base_models(self, limit: int = 20) -> List[Dict]:
|
||||
"""Get base models sorted by frequency"""
|
||||
return await self.scanner.get_base_models(limit)
|
||||
|
||||
async def get_model_types(self, limit: int = 20) -> List[Dict[str, Any]]:
|
||||
"""Get counts of normalized CivitAI model types present in the cache."""
|
||||
cache = await self.scanner.get_cached_data()
|
||||
|
||||
type_counts: Dict[str, int] = {}
|
||||
for entry in cache.raw_data:
|
||||
normalized_type = normalize_civitai_model_type(resolve_civitai_model_type(entry))
|
||||
if not normalized_type or normalized_type not in VALID_LORA_TYPES:
|
||||
continue
|
||||
type_counts[normalized_type] = type_counts.get(normalized_type, 0) + 1
|
||||
|
||||
sorted_types = sorted(
|
||||
[{"type": model_type, "count": count} for model_type, count in type_counts.items()],
|
||||
key=lambda value: value["count"],
|
||||
reverse=True,
|
||||
)
|
||||
|
||||
return sorted_types[:limit]
|
||||
|
||||
def has_hash(self, sha256: str) -> bool:
|
||||
"""Check if a model with given hash exists"""
|
||||
@@ -396,13 +648,55 @@ class BaseModelService(ABC):
|
||||
return None
|
||||
return metadata.modelDescription or ''
|
||||
|
||||
@staticmethod
|
||||
def _parse_search_tokens(search_term: str) -> tuple[List[str], List[str]]:
|
||||
"""Split a search string into include and exclude tokens."""
|
||||
include_terms: List[str] = []
|
||||
exclude_terms: List[str] = []
|
||||
|
||||
for raw_term in search_term.split():
|
||||
term = raw_term.strip()
|
||||
if not term:
|
||||
continue
|
||||
|
||||
if term.startswith("-") and len(term) > 1:
|
||||
exclude_terms.append(term[1:].lower())
|
||||
else:
|
||||
include_terms.append(term.lower())
|
||||
|
||||
return include_terms, exclude_terms
|
||||
|
||||
@staticmethod
|
||||
def _relative_path_matches_tokens(
|
||||
path_lower: str, include_terms: List[str], exclude_terms: List[str]
|
||||
) -> bool:
|
||||
"""Determine whether a relative path string satisfies include/exclude tokens."""
|
||||
if any(term and term in path_lower for term in exclude_terms):
|
||||
return False
|
||||
|
||||
for term in include_terms:
|
||||
if term and term not in path_lower:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def _relative_path_sort_key(relative_path: str, include_terms: List[str]) -> tuple:
|
||||
"""Sort paths by how well they satisfy the include tokens."""
|
||||
path_lower = relative_path.lower()
|
||||
prefix_hits = sum(1 for term in include_terms if term and path_lower.startswith(term))
|
||||
match_positions = [path_lower.find(term) for term in include_terms if term and term in path_lower]
|
||||
first_match_index = min(match_positions) if match_positions else 0
|
||||
|
||||
return (-prefix_hits, first_match_index, len(relative_path), path_lower)
|
||||
|
||||
|
||||
async def search_relative_paths(self, search_term: str, limit: int = 15) -> List[str]:
|
||||
"""Search model relative file paths for autocomplete functionality"""
|
||||
cache = await self.scanner.get_cached_data()
|
||||
include_terms, exclude_terms = self._parse_search_tokens(search_term)
|
||||
|
||||
matching_paths = []
|
||||
search_lower = search_term.lower()
|
||||
|
||||
# Get model roots for path calculation
|
||||
model_roots = self.scanner.get_model_roots()
|
||||
@@ -424,17 +718,19 @@ class BaseModelService(ABC):
|
||||
relative_path = normalized_file[len(normalized_root):].lstrip(os.sep)
|
||||
break
|
||||
|
||||
if relative_path and search_lower in relative_path.lower():
|
||||
if not relative_path:
|
||||
continue
|
||||
|
||||
relative_lower = relative_path.lower()
|
||||
if self._relative_path_matches_tokens(relative_lower, include_terms, exclude_terms):
|
||||
matching_paths.append(relative_path)
|
||||
|
||||
if len(matching_paths) >= limit * 2: # Get more for better sorting
|
||||
break
|
||||
|
||||
# Sort by relevance (exact matches first, then by length)
|
||||
matching_paths.sort(key=lambda x: (
|
||||
not x.lower().startswith(search_lower), # Exact prefix matches first
|
||||
len(x), # Then by length (shorter first)
|
||||
x.lower() # Then alphabetically
|
||||
))
|
||||
# Sort by relevance (prefix and earliest hits first, then by length and alphabetically)
|
||||
matching_paths.sort(
|
||||
key=lambda relative: self._relative_path_sort_key(relative, include_terms)
|
||||
)
|
||||
|
||||
return matching_paths[:limit]
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import logging
|
||||
from typing import List
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from ..utils.models import CheckpointMetadata
|
||||
from ..config import config
|
||||
@@ -21,14 +21,33 @@ class CheckpointScanner(ModelScanner):
|
||||
hash_index=ModelHashIndex()
|
||||
)
|
||||
|
||||
def _resolve_model_type(self, root_path: Optional[str]) -> Optional[str]:
|
||||
if not root_path:
|
||||
return None
|
||||
|
||||
if config.checkpoints_roots and root_path in config.checkpoints_roots:
|
||||
return "checkpoint"
|
||||
|
||||
if config.unet_roots and root_path in config.unet_roots:
|
||||
return "diffusion_model"
|
||||
|
||||
return None
|
||||
|
||||
def adjust_metadata(self, metadata, file_path, root_path):
|
||||
if hasattr(metadata, "model_type"):
|
||||
if root_path in config.checkpoints_roots:
|
||||
metadata.model_type = "checkpoint"
|
||||
elif root_path in config.unet_roots:
|
||||
metadata.model_type = "diffusion_model"
|
||||
model_type = self._resolve_model_type(root_path)
|
||||
if model_type:
|
||||
metadata.model_type = model_type
|
||||
return metadata
|
||||
|
||||
def adjust_cached_entry(self, entry: Dict[str, Any]) -> Dict[str, Any]:
|
||||
model_type = self._resolve_model_type(
|
||||
self._find_root_for_file(entry.get("file_path"))
|
||||
)
|
||||
if model_type:
|
||||
entry["model_type"] = model_type
|
||||
return entry
|
||||
|
||||
def get_model_roots(self) -> List[str]:
|
||||
"""Get checkpoint root directories"""
|
||||
return config.base_models_roots
|
||||
return config.base_models_roots
|
||||
|
||||
@@ -38,6 +38,7 @@ class CheckpointService(BaseModelService):
|
||||
"notes": checkpoint_data.get("notes", ""),
|
||||
"model_type": checkpoint_data.get("model_type", "checkpoint"),
|
||||
"favorite": checkpoint_data.get("favorite", False),
|
||||
"update_available": bool(checkpoint_data.get("update_available", False)),
|
||||
"civitai": self.filter_civitai_data(checkpoint_data.get("civitai", {}), minimal=True)
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import os
|
||||
import json
|
||||
import logging
|
||||
import asyncio
|
||||
@@ -8,22 +7,6 @@ from .model_metadata_provider import CivArchiveModelMetadataProvider, ModelMetad
|
||||
from .downloader import get_downloader
|
||||
from .errors import RateLimitError
|
||||
|
||||
try:
|
||||
from bs4 import BeautifulSoup
|
||||
except ImportError as exc:
|
||||
BeautifulSoup = None # type: ignore[assignment]
|
||||
_BS4_IMPORT_ERROR = exc
|
||||
else:
|
||||
_BS4_IMPORT_ERROR = None
|
||||
|
||||
def _require_beautifulsoup():
|
||||
if BeautifulSoup is None:
|
||||
raise RuntimeError(
|
||||
"BeautifulSoup (bs4) is required for CivArchive client. "
|
||||
"Install it with 'pip install beautifulsoup4'."
|
||||
) from _BS4_IMPORT_ERROR
|
||||
return BeautifulSoup
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class CivArchiveClient:
|
||||
@@ -446,109 +429,3 @@ class CivArchiveClient:
|
||||
if version is None:
|
||||
return None, "Model not found"
|
||||
return version, None
|
||||
|
||||
async def get_model_by_url(self, url) -> Optional[Dict]:
|
||||
"""Get specific model version by parsing CivArchive HTML page (legacy method)
|
||||
|
||||
This is the original HTML scraping implementation, kept for reference and new sites added not in api.
|
||||
The primary get_model_version() now uses the API instead.
|
||||
"""
|
||||
|
||||
try:
|
||||
# Construct CivArchive URL
|
||||
url = f"https://civarchive.com/{url}"
|
||||
downloader = await get_downloader()
|
||||
session = await downloader.session
|
||||
async with session.get(url) as response:
|
||||
if response.status != 200:
|
||||
return None
|
||||
|
||||
html_content = await response.text()
|
||||
|
||||
# Parse HTML to extract JSON data
|
||||
soup_parser = _require_beautifulsoup()
|
||||
soup = soup_parser(html_content, 'html.parser')
|
||||
script_tag = soup.find('script', {'id': '__NEXT_DATA__', 'type': 'application/json'})
|
||||
|
||||
if not script_tag:
|
||||
return None
|
||||
|
||||
# Parse JSON content
|
||||
json_data = json.loads(script_tag.string)
|
||||
model_data = json_data.get('props', {}).get('pageProps', {}).get('model')
|
||||
|
||||
if not model_data or 'version' not in model_data:
|
||||
return None
|
||||
|
||||
# Extract version data as base
|
||||
version = model_data['version'].copy()
|
||||
|
||||
# Restructure stats
|
||||
if 'downloadCount' in version and 'ratingCount' in version and 'rating' in version:
|
||||
version['stats'] = {
|
||||
'downloadCount': version.pop('downloadCount'),
|
||||
'ratingCount': version.pop('ratingCount'),
|
||||
'rating': version.pop('rating')
|
||||
}
|
||||
|
||||
# Rename trigger to trainedWords
|
||||
if 'trigger' in version:
|
||||
version['trainedWords'] = version.pop('trigger')
|
||||
|
||||
# Transform files data to expected format
|
||||
if 'files' in version:
|
||||
transformed_files = []
|
||||
for file_data in version['files']:
|
||||
# Find first available mirror (deletedAt is null)
|
||||
available_mirror = None
|
||||
for mirror in file_data.get('mirrors', []):
|
||||
if mirror.get('deletedAt') is None:
|
||||
available_mirror = mirror
|
||||
break
|
||||
|
||||
# Create transformed file entry
|
||||
transformed_file = {
|
||||
'id': file_data.get('id'),
|
||||
'sizeKB': file_data.get('sizeKB'),
|
||||
'name': available_mirror.get('filename', file_data.get('name')) if available_mirror else file_data.get('name'),
|
||||
'type': file_data.get('type'),
|
||||
'downloadUrl': available_mirror.get('url') if available_mirror else None,
|
||||
'primary': file_data.get('is_primary', False),
|
||||
'mirrors': file_data.get('mirrors', [])
|
||||
}
|
||||
|
||||
# Transform hash format
|
||||
if 'sha256' in file_data:
|
||||
transformed_file['hashes'] = {
|
||||
'SHA256': file_data['sha256'].upper()
|
||||
}
|
||||
|
||||
transformed_files.append(transformed_file)
|
||||
|
||||
version['files'] = transformed_files
|
||||
|
||||
# Add model information
|
||||
version['model'] = {
|
||||
'name': model_data.get('name'),
|
||||
'type': model_data.get('type'),
|
||||
'nsfw': model_data.get('is_nsfw', False),
|
||||
'description': model_data.get('description'),
|
||||
'tags': model_data.get('tags', [])
|
||||
}
|
||||
|
||||
version['creator'] = {
|
||||
'username': model_data.get('username'),
|
||||
'image': ''
|
||||
}
|
||||
|
||||
# Add source identifier
|
||||
version['source'] = 'civarchive'
|
||||
version['is_deleted'] = json_data.get('query', {}).get('is_deleted', False)
|
||||
|
||||
return version
|
||||
|
||||
except RateLimitError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching CivArchive model version (scraping) {url}: {e}")
|
||||
return None
|
||||
|
||||
@@ -2,10 +2,11 @@ import asyncio
|
||||
import copy
|
||||
import logging
|
||||
import os
|
||||
from typing import Optional, Dict, Tuple, List
|
||||
from typing import Any, Optional, Dict, Tuple, List, Sequence
|
||||
from .model_metadata_provider import CivitaiModelMetadataProvider, ModelMetadataProviderManager
|
||||
from .downloader import get_downloader
|
||||
from .errors import RateLimitError
|
||||
from .errors import RateLimitError, ResourceNotFoundError
|
||||
from ..utils.civitai_utils import resolve_license_payload
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -103,44 +104,32 @@ class CivitaiClient:
|
||||
|
||||
async def get_model_by_hash(self, model_hash: str) -> Tuple[Optional[Dict], Optional[str]]:
|
||||
try:
|
||||
success, result = await self._make_request(
|
||||
success, version = await self._make_request(
|
||||
'GET',
|
||||
f"{self.base_url}/model-versions/by-hash/{model_hash}",
|
||||
use_auth=True
|
||||
)
|
||||
if success:
|
||||
# Get model ID from version data
|
||||
model_id = result.get('modelId')
|
||||
if model_id:
|
||||
# Fetch additional model metadata
|
||||
success_model, data = await self._make_request(
|
||||
'GET',
|
||||
f"{self.base_url}/models/{model_id}",
|
||||
use_auth=True
|
||||
)
|
||||
if success_model:
|
||||
# Enrich version_info with model data
|
||||
result['model']['description'] = data.get("description")
|
||||
result['model']['tags'] = data.get("tags", [])
|
||||
if not success:
|
||||
message = str(version)
|
||||
if "not found" in message.lower():
|
||||
return None, "Model not found"
|
||||
|
||||
# Add creator from model data
|
||||
result['creator'] = data.get("creator")
|
||||
logger.error("Failed to fetch model info for %s: %s", model_hash[:10], message)
|
||||
return None, message
|
||||
|
||||
self._remove_comfy_metadata(result)
|
||||
return result, None
|
||||
|
||||
# Handle specific error cases
|
||||
if "not found" in str(result):
|
||||
return None, "Model not found"
|
||||
|
||||
# Other error cases
|
||||
logger.error(f"Failed to fetch model info for {model_hash[:10]}: {result}")
|
||||
return None, str(result)
|
||||
model_id = version.get('modelId')
|
||||
if model_id:
|
||||
model_data = await self._fetch_model_data(model_id)
|
||||
if model_data:
|
||||
self._enrich_version_with_model_data(version, model_data)
|
||||
|
||||
self._remove_comfy_metadata(version)
|
||||
return version, None
|
||||
except RateLimitError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"API Error: {str(e)}")
|
||||
return None, str(e)
|
||||
except Exception as exc:
|
||||
logger.error("API Error: %s", exc)
|
||||
return None, str(exc)
|
||||
|
||||
async def download_preview_image(self, image_url: str, save_path: str):
|
||||
try:
|
||||
@@ -160,7 +149,29 @@ class CivitaiClient:
|
||||
logger.error(f"Download Error: {str(e)}")
|
||||
return False
|
||||
|
||||
async def get_model_versions(self, model_id: str) -> List[Dict]:
|
||||
@staticmethod
|
||||
def _extract_error_message(payload: Any) -> str:
|
||||
"""Return a human-readable error message from an API payload."""
|
||||
|
||||
def _from_value(value: Any) -> str:
|
||||
if isinstance(value, str):
|
||||
return value
|
||||
if isinstance(value, dict):
|
||||
for key in ("message", "error", "detail", "details"):
|
||||
if key in value:
|
||||
candidate = _from_value(value[key])
|
||||
if candidate:
|
||||
return candidate
|
||||
if isinstance(value, list):
|
||||
for item in value:
|
||||
candidate = _from_value(item)
|
||||
if candidate:
|
||||
return candidate
|
||||
return ""
|
||||
|
||||
return _from_value(payload)
|
||||
|
||||
async def get_model_versions(self, model_id: str) -> Optional[Dict]:
|
||||
"""Get all versions of a model with local availability info"""
|
||||
try:
|
||||
success, result = await self._make_request(
|
||||
@@ -175,11 +186,76 @@ class CivitaiClient:
|
||||
'type': result.get('type', ''),
|
||||
'name': result.get('name', '')
|
||||
}
|
||||
message = self._extract_error_message(result)
|
||||
if message and 'not found' in message.lower():
|
||||
raise ResourceNotFoundError(f"Resource not found for model {model_id}")
|
||||
if message:
|
||||
raise RuntimeError(message)
|
||||
return None
|
||||
except RateLimitError:
|
||||
raise
|
||||
except ResourceNotFoundError as exc:
|
||||
logger.info("Model %s is no longer available on Civitai: %s", model_id, exc)
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching model versions: {e}")
|
||||
logger.error("Error fetching model versions: %s", e, exc_info=True)
|
||||
raise
|
||||
|
||||
async def get_model_versions_bulk(
|
||||
self, model_ids: Sequence[int]
|
||||
) -> Optional[Dict[int, Dict]]:
|
||||
"""Fetch model metadata for multiple ids using the batch API."""
|
||||
|
||||
deduped: Dict[int, None] = {}
|
||||
for raw_id in model_ids:
|
||||
try:
|
||||
normalized = int(raw_id)
|
||||
except (TypeError, ValueError):
|
||||
continue
|
||||
deduped.setdefault(normalized, None)
|
||||
|
||||
normalized_ids = [str(model_id) for model_id in deduped.keys()]
|
||||
if not normalized_ids:
|
||||
return {}
|
||||
|
||||
try:
|
||||
query = ",".join(normalized_ids)
|
||||
success, result = await self._make_request(
|
||||
'GET',
|
||||
f"{self.base_url}/models",
|
||||
use_auth=True,
|
||||
params={'ids': query},
|
||||
)
|
||||
if not success:
|
||||
return None
|
||||
|
||||
items = result.get('items') if isinstance(result, dict) else None
|
||||
if not isinstance(items, list):
|
||||
return {}
|
||||
|
||||
payload: Dict[int, Dict] = {}
|
||||
for item in items:
|
||||
if not isinstance(item, dict):
|
||||
continue
|
||||
model_id = item.get('id')
|
||||
try:
|
||||
normalized_id = int(model_id)
|
||||
except (TypeError, ValueError):
|
||||
continue
|
||||
payload[normalized_id] = {
|
||||
'modelVersions': item.get('modelVersions', []),
|
||||
'type': item.get('type', ''),
|
||||
'name': item.get('name', ''),
|
||||
'allowNoCredit': item.get('allowNoCredit'),
|
||||
'allowCommercialUse': item.get('allowCommercialUse'),
|
||||
'allowDerivatives': item.get('allowDerivatives'),
|
||||
'allowDifferentLicense': item.get('allowDifferentLicense'),
|
||||
}
|
||||
return payload
|
||||
except RateLimitError:
|
||||
raise
|
||||
except Exception as exc:
|
||||
logger.error(f"Error fetching model versions in bulk: {exc}")
|
||||
return None
|
||||
|
||||
async def get_model_version(self, model_id: int = None, version_id: int = None) -> Optional[Dict]:
|
||||
@@ -337,6 +413,10 @@ class CivitaiClient:
|
||||
model_info['tags'] = model_data.get("tags", [])
|
||||
version['creator'] = model_data.get("creator")
|
||||
|
||||
license_payload = resolve_license_payload(model_data)
|
||||
for field, value in license_payload.items():
|
||||
model_info[field] = value
|
||||
|
||||
async def get_model_version_info(self, version_id: str) -> Tuple[Optional[Dict], Optional[str]]:
|
||||
"""Fetch model version metadata from Civitai
|
||||
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
import copy
|
||||
import logging
|
||||
import os
|
||||
import asyncio
|
||||
import inspect
|
||||
import shutil
|
||||
import zipfile
|
||||
from collections import OrderedDict
|
||||
import uuid
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
@@ -9,12 +12,15 @@ from urllib.parse import urlparse
|
||||
from ..utils.models import LoraMetadata, CheckpointMetadata, EmbeddingMetadata
|
||||
from ..utils.constants import CARD_PREVIEW_WIDTH, VALID_LORA_TYPES
|
||||
from ..utils.civitai_utils import rewrite_preview_url
|
||||
from ..utils.preview_selection import select_preview_media
|
||||
from ..utils.utils import sanitize_folder_name
|
||||
from ..utils.exif_utils import ExifUtils
|
||||
from ..utils.file_utils import calculate_sha256
|
||||
from ..utils.metadata_manager import MetadataManager
|
||||
from .service_registry import ServiceRegistry
|
||||
from .settings_manager import get_settings_manager
|
||||
from .metadata_service import get_default_metadata_provider
|
||||
from .downloader import get_downloader, DownloadProgress
|
||||
from .downloader import get_downloader, DownloadProgress, DownloadStreamControl
|
||||
|
||||
# Download to temporary file first
|
||||
import tempfile
|
||||
@@ -43,7 +49,7 @@ class DownloadManager:
|
||||
self._active_downloads = OrderedDict() # download_id -> download_info
|
||||
self._download_semaphore = asyncio.Semaphore(5) # Limit concurrent downloads
|
||||
self._download_tasks = {} # download_id -> asyncio.Task
|
||||
self._pause_events: Dict[str, asyncio.Event] = {}
|
||||
self._pause_events: Dict[str, DownloadStreamControl] = {}
|
||||
|
||||
async def _get_lora_scanner(self):
|
||||
"""Get the lora scanner from registry"""
|
||||
@@ -88,11 +94,11 @@ class DownloadManager:
|
||||
'bytes_downloaded': 0,
|
||||
'total_bytes': None,
|
||||
'bytes_per_second': 0.0,
|
||||
'last_progress_timestamp': None,
|
||||
}
|
||||
|
||||
pause_event = asyncio.Event()
|
||||
pause_event.set()
|
||||
self._pause_events[task_id] = pause_event
|
||||
pause_control = DownloadStreamControl()
|
||||
self._pause_events[task_id] = pause_control
|
||||
|
||||
# Create tracking task
|
||||
download_task = asyncio.create_task(
|
||||
@@ -139,19 +145,23 @@ class DownloadManager:
|
||||
info['bytes_downloaded'] = snapshot.bytes_downloaded
|
||||
info['total_bytes'] = snapshot.total_bytes
|
||||
info['bytes_per_second'] = snapshot.bytes_per_second
|
||||
pause_control = self._pause_events.get(task_id)
|
||||
if isinstance(pause_control, DownloadStreamControl):
|
||||
pause_control.mark_progress(snapshot.timestamp)
|
||||
info['last_progress_timestamp'] = pause_control.last_progress_timestamp
|
||||
|
||||
if original_callback:
|
||||
await self._dispatch_progress(original_callback, snapshot, progress_value)
|
||||
|
||||
|
||||
# Acquire semaphore to limit concurrent downloads
|
||||
try:
|
||||
async with self._download_semaphore:
|
||||
pause_event = self._pause_events.get(task_id)
|
||||
if pause_event is not None and not pause_event.is_set():
|
||||
pause_control = self._pause_events.get(task_id)
|
||||
if pause_control is not None and pause_control.is_paused():
|
||||
if task_id in self._active_downloads:
|
||||
self._active_downloads[task_id]['status'] = 'paused'
|
||||
self._active_downloads[task_id]['bytes_per_second'] = 0.0
|
||||
await pause_event.wait()
|
||||
await pause_control.wait()
|
||||
|
||||
# Update status to downloading
|
||||
if task_id in self._active_downloads:
|
||||
@@ -325,7 +335,7 @@ class DownloadManager:
|
||||
await progress_callback(0)
|
||||
|
||||
# 2. Get file information
|
||||
file_info = next((f for f in version_info.get('files', []) if f.get('primary') and f.get('type') == 'Model'), None)
|
||||
file_info = next((f for f in version_info.get('files', []) if f.get('primary') and f.get('type') in ('Model', 'Negative')), None)
|
||||
if not file_info:
|
||||
return {'success': False, 'error': 'No primary file found in metadata'}
|
||||
mirrors = file_info.get('mirrors') or []
|
||||
@@ -369,6 +379,19 @@ class DownloadManager:
|
||||
download_id=download_id,
|
||||
)
|
||||
|
||||
if result.get('success', False):
|
||||
resolved_model_id = (
|
||||
model_id
|
||||
or version_info.get('modelId')
|
||||
or (version_info.get('model') or {}).get('id')
|
||||
)
|
||||
await self._sync_downloaded_version(
|
||||
model_type,
|
||||
resolved_model_id,
|
||||
version_info,
|
||||
model_version_id,
|
||||
)
|
||||
|
||||
# If early_access_msg exists and download failed, replace error message
|
||||
if 'early_access_msg' in locals() and not result.get('success', False):
|
||||
result['error'] = early_access_msg
|
||||
@@ -383,6 +406,96 @@ class DownloadManager:
|
||||
return {'success': False, 'error': f"Early access restriction: {str(e)}. Please ensure you have purchased early access and are logged in to Civitai."}
|
||||
return {'success': False, 'error': str(e)}
|
||||
|
||||
async def _sync_downloaded_version(
|
||||
self,
|
||||
model_type: str,
|
||||
model_id_value,
|
||||
version_info: Dict,
|
||||
fallback_version_id=None,
|
||||
) -> None:
|
||||
"""Ensure update tracking reflects a newly downloaded version."""
|
||||
|
||||
try:
|
||||
update_service = await ServiceRegistry.get_model_update_service()
|
||||
except Exception as exc:
|
||||
logger.debug("Skipping update sync; failed to acquire update service: %s", exc)
|
||||
return
|
||||
|
||||
if update_service is None:
|
||||
return
|
||||
|
||||
resolved_model_id = model_id_value
|
||||
if resolved_model_id is None:
|
||||
resolved_model_id = version_info.get('modelId')
|
||||
if resolved_model_id is None:
|
||||
model_info = version_info.get('model')
|
||||
if isinstance(model_info, dict):
|
||||
resolved_model_id = model_info.get('id')
|
||||
try:
|
||||
resolved_model_id = int(resolved_model_id)
|
||||
except (TypeError, ValueError):
|
||||
logger.debug("Skipping update sync; invalid model id: %s", resolved_model_id)
|
||||
return
|
||||
|
||||
version_id = version_info.get('id')
|
||||
if version_id is None:
|
||||
version_id = fallback_version_id
|
||||
try:
|
||||
version_id = int(version_id)
|
||||
except (TypeError, ValueError):
|
||||
logger.debug(
|
||||
"Skipping update sync; invalid version id for model %s: %s",
|
||||
resolved_model_id,
|
||||
version_id,
|
||||
)
|
||||
return
|
||||
|
||||
version_ids = set()
|
||||
scanner = None
|
||||
try:
|
||||
if model_type == 'lora':
|
||||
scanner = await self._get_lora_scanner()
|
||||
elif model_type == 'checkpoint':
|
||||
scanner = await self._get_checkpoint_scanner()
|
||||
elif model_type == 'embedding':
|
||||
scanner = await ServiceRegistry.get_embedding_scanner()
|
||||
except Exception as exc:
|
||||
logger.debug("Failed to acquire scanner for %s models: %s", model_type, exc)
|
||||
|
||||
if scanner is not None:
|
||||
try:
|
||||
local_versions = await scanner.get_model_versions_by_id(resolved_model_id)
|
||||
except Exception as exc:
|
||||
logger.debug(
|
||||
"Failed to collect local versions for %s model %s: %s",
|
||||
model_type,
|
||||
resolved_model_id,
|
||||
exc,
|
||||
)
|
||||
else:
|
||||
for entry in local_versions or []:
|
||||
vid = entry.get('versionId')
|
||||
try:
|
||||
version_ids.add(int(vid))
|
||||
except (TypeError, ValueError):
|
||||
continue
|
||||
|
||||
version_ids.add(version_id)
|
||||
|
||||
try:
|
||||
await update_service.update_in_library_versions(
|
||||
model_type,
|
||||
resolved_model_id,
|
||||
sorted(version_ids),
|
||||
)
|
||||
except Exception as exc:
|
||||
logger.debug(
|
||||
"Failed to update in-library versions for %s model %s: %s",
|
||||
model_type,
|
||||
resolved_model_id,
|
||||
exc,
|
||||
)
|
||||
|
||||
def _calculate_relative_path(self, version_info: Dict, model_type: str = 'lora') -> str:
|
||||
"""Calculate relative path using template from settings
|
||||
|
||||
@@ -415,8 +528,10 @@ class DownloadManager:
|
||||
base_model_mappings = settings_manager.get('base_model_path_mappings', {})
|
||||
mapped_base_model = base_model_mappings.get(base_model, base_model)
|
||||
|
||||
model_info = version_info.get('model') or {}
|
||||
|
||||
# Get model tags
|
||||
model_tags = version_info.get('model', {}).get('tags', [])
|
||||
model_tags = model_info.get('tags', [])
|
||||
|
||||
first_tag = settings_manager.resolve_priority_tag_for_model(model_tags, model_type)
|
||||
|
||||
@@ -425,6 +540,8 @@ class DownloadManager:
|
||||
formatted_path = formatted_path.replace('{base_model}', mapped_base_model)
|
||||
formatted_path = formatted_path.replace('{first_tag}', first_tag)
|
||||
formatted_path = formatted_path.replace('{author}', author)
|
||||
formatted_path = formatted_path.replace('{model_name}', sanitize_folder_name(model_info.get('name', '')))
|
||||
formatted_path = formatted_path.replace('{version_name}', sanitize_folder_name(version_info.get('name', '')))
|
||||
|
||||
if model_type == 'embedding':
|
||||
formatted_path = formatted_path.replace(' ', '_')
|
||||
@@ -443,6 +560,13 @@ class DownloadManager:
|
||||
download_id: str = None,
|
||||
) -> Dict:
|
||||
"""Execute the actual download process including preview images and model files"""
|
||||
metadata_entries: List = []
|
||||
metadata_files_for_cleanup: List[str] = []
|
||||
extracted_paths: List[str] = []
|
||||
metadata_path = ""
|
||||
preview_targets: List[str] = []
|
||||
preview_path: str | None = None
|
||||
preview_nsfw_level = 0
|
||||
try:
|
||||
# Extract original filename details
|
||||
original_filename = os.path.basename(metadata.file_path)
|
||||
@@ -473,8 +597,8 @@ class DownloadManager:
|
||||
part_path = save_path + '.part'
|
||||
metadata_path = os.path.splitext(save_path)[0] + '.metadata.json'
|
||||
|
||||
pause_event = self._pause_events.get(download_id) if download_id else None
|
||||
|
||||
pause_control = self._pause_events.get(download_id) if download_id else None
|
||||
|
||||
# Store file paths in active_downloads for potential cleanup
|
||||
if download_id and download_id in self._active_downloads:
|
||||
self._active_downloads[download_id]['file_path'] = save_path
|
||||
@@ -486,10 +610,21 @@ class DownloadManager:
|
||||
if progress_callback:
|
||||
await progress_callback(1) # 1% progress for starting preview download
|
||||
|
||||
first_image = images[0] if isinstance(images[0], dict) else None
|
||||
preview_url = first_image.get('url') if first_image else None
|
||||
media_type = (first_image.get('type') or '').lower() if first_image else ''
|
||||
nsfw_level = first_image.get('nsfwLevel', 0) if first_image else 0
|
||||
settings_manager = get_settings_manager()
|
||||
blur_mature_content = bool(
|
||||
settings_manager.get('blur_mature_content', True)
|
||||
)
|
||||
selected_image, nsfw_level = select_preview_media(
|
||||
images,
|
||||
blur_mature_content=blur_mature_content,
|
||||
)
|
||||
|
||||
preview_url = selected_image.get('url') if selected_image else None
|
||||
media_type = (
|
||||
(selected_image.get('type') or '').lower()
|
||||
if selected_image
|
||||
else ''
|
||||
)
|
||||
|
||||
def _extension_from_url(url: str, fallback: str) -> str:
|
||||
try:
|
||||
@@ -575,16 +710,17 @@ class DownloadManager:
|
||||
logger.warning(f"Failed to delete temp file: {e}")
|
||||
|
||||
if preview_downloaded and preview_path:
|
||||
preview_nsfw_level = nsfw_level
|
||||
metadata.preview_url = preview_path.replace(os.sep, '/')
|
||||
metadata.preview_nsfw_level = nsfw_level
|
||||
if download_id and download_id in self._active_downloads:
|
||||
self._active_downloads[download_id]['preview_path'] = preview_path
|
||||
|
||||
if progress_callback:
|
||||
await progress_callback(3) # 3% progress after preview download
|
||||
|
||||
# Download model file with progress tracking using downloader
|
||||
downloader = await get_downloader()
|
||||
if pause_control is not None:
|
||||
pause_control.update_stall_timeout(downloader.stall_timeout)
|
||||
last_error = None
|
||||
for download_url in download_urls:
|
||||
use_auth = download_url.startswith("https://civitai.com/api/download/")
|
||||
@@ -597,8 +733,8 @@ class DownloadManager:
|
||||
"use_auth": use_auth, # Only use authentication for Civitai downloads
|
||||
}
|
||||
|
||||
if pause_event is not None:
|
||||
download_kwargs["pause_event"] = pause_event
|
||||
if pause_control is not None:
|
||||
download_kwargs["pause_event"] = pause_control
|
||||
|
||||
success, result = await downloader.download_file(
|
||||
download_url,
|
||||
@@ -635,53 +771,189 @@ class DownloadManager:
|
||||
|
||||
return {'success': False, 'error': last_error or 'Failed to download file'}
|
||||
|
||||
# 4. Update file information (size and modified time)
|
||||
metadata.update_file_info(save_path)
|
||||
# 4. Handle archive extraction and prepare per-file metadata
|
||||
actual_file_paths = [save_path]
|
||||
if zipfile.is_zipfile(save_path):
|
||||
extracted_paths = await self._extract_safetensors_from_archive(save_path)
|
||||
if not extracted_paths:
|
||||
return {'success': False, 'error': 'Zip archive does not contain any safetensors files'}
|
||||
actual_file_paths = extracted_paths
|
||||
try:
|
||||
os.remove(save_path)
|
||||
except OSError as exc:
|
||||
logger.warning(f"Unable to delete temporary archive {save_path}: {exc}")
|
||||
if download_id and download_id in self._active_downloads:
|
||||
self._active_downloads[download_id]['file_path'] = extracted_paths[0]
|
||||
self._active_downloads[download_id]['extracted_paths'] = extracted_paths
|
||||
|
||||
# 5. Final metadata update
|
||||
await MetadataManager.save_metadata(save_path, metadata)
|
||||
metadata_entries = await self._build_metadata_entries(metadata, actual_file_paths)
|
||||
if preview_path:
|
||||
preview_targets = self._distribute_preview_to_entries(preview_path, metadata_entries)
|
||||
for entry, target in zip(metadata_entries, preview_targets):
|
||||
entry.preview_url = target.replace(os.sep, "/")
|
||||
entry.preview_nsfw_level = preview_nsfw_level
|
||||
if download_id and download_id in self._active_downloads and preview_targets:
|
||||
self._active_downloads[download_id]["preview_path"] = preview_targets[0]
|
||||
|
||||
# 6. Update cache based on model type
|
||||
scanner = None
|
||||
if model_type == "checkpoint":
|
||||
scanner = await self._get_checkpoint_scanner()
|
||||
logger.info(f"Updating checkpoint cache for {save_path}")
|
||||
logger.info(f"Updating checkpoint cache for {actual_file_paths[0]}")
|
||||
elif model_type == "lora":
|
||||
scanner = await self._get_lora_scanner()
|
||||
logger.info(f"Updating lora cache for {save_path}")
|
||||
logger.info(f"Updating lora cache for {actual_file_paths[0]}")
|
||||
elif model_type == "embedding":
|
||||
scanner = await ServiceRegistry.get_embedding_scanner()
|
||||
logger.info(f"Updating embedding cache for {save_path}")
|
||||
|
||||
# Convert metadata to dictionary
|
||||
metadata_dict = metadata.to_dict()
|
||||
logger.info(f"Updating embedding cache for {actual_file_paths[0]}")
|
||||
|
||||
# Add model to cache and save to disk in a single operation
|
||||
await scanner.add_model_to_cache(metadata_dict, relative_path)
|
||||
adjust_cached_entry = (
|
||||
getattr(scanner, "adjust_cached_entry", None) if scanner is not None else None
|
||||
)
|
||||
|
||||
for index, entry in enumerate(metadata_entries):
|
||||
file_path_for_adjust = getattr(entry, "file_path", actual_file_paths[index])
|
||||
normalized_file_path = (
|
||||
file_path_for_adjust.replace(os.sep, "/")
|
||||
if isinstance(file_path_for_adjust, str)
|
||||
else str(file_path_for_adjust)
|
||||
)
|
||||
|
||||
if scanner is not None:
|
||||
find_root = getattr(scanner, "_find_root_for_file", None)
|
||||
adjust_root = None
|
||||
if callable(find_root):
|
||||
try:
|
||||
adjust_root = find_root(normalized_file_path)
|
||||
except TypeError:
|
||||
adjust_root = None
|
||||
|
||||
adjust_metadata = getattr(scanner, "adjust_metadata", None)
|
||||
if callable(adjust_metadata):
|
||||
adjusted_entry = adjust_metadata(entry, normalized_file_path, adjust_root)
|
||||
if adjusted_entry is not None:
|
||||
entry = adjusted_entry
|
||||
metadata_entries[index] = entry
|
||||
|
||||
metadata_file_path = os.path.splitext(entry.file_path)[0] + '.metadata.json'
|
||||
metadata_files_for_cleanup.append(metadata_file_path)
|
||||
|
||||
await MetadataManager.save_metadata(entry.file_path, entry)
|
||||
|
||||
metadata_dict = entry.to_dict()
|
||||
if callable(adjust_cached_entry):
|
||||
metadata_dict = adjust_cached_entry(metadata_dict)
|
||||
|
||||
if scanner is not None:
|
||||
await scanner.add_model_to_cache(metadata_dict, relative_path)
|
||||
|
||||
# Report 100% completion
|
||||
if progress_callback:
|
||||
await progress_callback(100)
|
||||
|
||||
return {
|
||||
'success': True
|
||||
}
|
||||
return {'success': True}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in _execute_download: {e}", exc_info=True)
|
||||
# Clean up partial downloads except .part file
|
||||
cleanup_files = [metadata_path]
|
||||
if hasattr(metadata, 'preview_url') and metadata.preview_url and os.path.exists(metadata.preview_url):
|
||||
cleanup_files.append(metadata.preview_url)
|
||||
|
||||
for path in cleanup_files:
|
||||
cleanup_targets = {
|
||||
path
|
||||
for path in [save_path, metadata_path, *metadata_files_for_cleanup, *extracted_paths]
|
||||
if path
|
||||
}
|
||||
preview_candidate = (
|
||||
metadata_entries[0].preview_url
|
||||
if metadata_entries
|
||||
else getattr(metadata, "preview_url", None)
|
||||
)
|
||||
if preview_candidate:
|
||||
cleanup_targets.add(preview_candidate)
|
||||
|
||||
cleanup_targets.update(preview_targets)
|
||||
for path in cleanup_targets:
|
||||
if path and os.path.exists(path):
|
||||
try:
|
||||
os.remove(path)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to cleanup file {path}: {e}")
|
||||
|
||||
except Exception as exc:
|
||||
logger.warning(f"Failed to cleanup file {path}: {exc}")
|
||||
|
||||
return {'success': False, 'error': str(e)}
|
||||
|
||||
async def _extract_safetensors_from_archive(self, archive_path: str) -> List[str]:
|
||||
if not zipfile.is_zipfile(archive_path):
|
||||
return []
|
||||
|
||||
target_dir = os.path.dirname(archive_path)
|
||||
|
||||
def _extract_sync() -> List[str]:
|
||||
extracted_files: List[str] = []
|
||||
with zipfile.ZipFile(archive_path, "r") as archive:
|
||||
for info in archive.infolist():
|
||||
if info.is_dir():
|
||||
continue
|
||||
if not info.filename.lower().endswith(".safetensors"):
|
||||
continue
|
||||
file_name = os.path.basename(info.filename)
|
||||
if not file_name:
|
||||
continue
|
||||
dest_path = self._resolve_extracted_destination(target_dir, file_name)
|
||||
with archive.open(info) as source, open(dest_path, "wb") as target:
|
||||
shutil.copyfileobj(source, target)
|
||||
extracted_files.append(dest_path)
|
||||
return extracted_files
|
||||
|
||||
return await asyncio.to_thread(_extract_sync)
|
||||
|
||||
async def _build_metadata_entries(self, base_metadata, file_paths: List[str]) -> List:
|
||||
if not file_paths:
|
||||
return []
|
||||
|
||||
entries: List = []
|
||||
for index, file_path in enumerate(file_paths):
|
||||
entry = base_metadata if index == 0 else copy.deepcopy(base_metadata)
|
||||
entry.update_file_info(file_path)
|
||||
entry.sha256 = await calculate_sha256(file_path)
|
||||
entries.append(entry)
|
||||
|
||||
return entries
|
||||
|
||||
def _resolve_extracted_destination(self, target_dir: str, filename: str) -> str:
|
||||
base_name, extension = os.path.splitext(filename)
|
||||
candidate = filename
|
||||
destination = os.path.join(target_dir, candidate)
|
||||
counter = 1
|
||||
|
||||
while os.path.exists(destination):
|
||||
candidate = f"{base_name}-{counter}{extension}"
|
||||
destination = os.path.join(target_dir, candidate)
|
||||
counter += 1
|
||||
|
||||
return destination
|
||||
|
||||
def _distribute_preview_to_entries(self, preview_path: str, entries: List) -> List[str]:
|
||||
if not preview_path or not entries:
|
||||
return []
|
||||
|
||||
if not os.path.exists(preview_path):
|
||||
return []
|
||||
|
||||
extension = os.path.splitext(preview_path)[1] or ".webp"
|
||||
|
||||
targets = [
|
||||
os.path.splitext(entry.file_path)[0] + extension for entry in entries
|
||||
]
|
||||
|
||||
if not targets:
|
||||
return []
|
||||
|
||||
first_target = targets[0]
|
||||
if preview_path != first_target:
|
||||
os.replace(preview_path, first_target)
|
||||
source_path = first_target
|
||||
|
||||
for target in targets[1:]:
|
||||
shutil.copyfile(source_path, target)
|
||||
|
||||
return targets
|
||||
|
||||
async def _handle_download_progress(
|
||||
self,
|
||||
progress_update,
|
||||
@@ -727,9 +999,9 @@ class DownloadManager:
|
||||
task = self._download_tasks[download_id]
|
||||
task.cancel()
|
||||
|
||||
pause_event = self._pause_events.get(download_id)
|
||||
if pause_event is not None:
|
||||
pause_event.set()
|
||||
pause_control = self._pause_events.get(download_id)
|
||||
if pause_control is not None:
|
||||
pause_control.resume()
|
||||
|
||||
# Update status in active downloads
|
||||
if download_id in self._active_downloads:
|
||||
@@ -745,16 +1017,23 @@ class DownloadManager:
|
||||
# Clean up ALL files including .part when user cancels
|
||||
download_info = self._active_downloads.get(download_id)
|
||||
if download_info:
|
||||
# Delete the main file
|
||||
if 'file_path' in download_info:
|
||||
file_path = download_info['file_path']
|
||||
target_files = set()
|
||||
primary_path = download_info.get('file_path')
|
||||
if primary_path:
|
||||
target_files.add(primary_path)
|
||||
|
||||
for extra_path in download_info.get('extracted_paths', []):
|
||||
if extra_path:
|
||||
target_files.add(extra_path)
|
||||
|
||||
for file_path in target_files:
|
||||
if os.path.exists(file_path):
|
||||
try:
|
||||
os.unlink(file_path)
|
||||
logger.debug(f"Deleted cancelled download: {file_path}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting file: {e}")
|
||||
|
||||
|
||||
# Delete the .part file (only on user cancellation)
|
||||
if 'part_path' in download_info:
|
||||
part_path = download_info['part_path']
|
||||
@@ -764,10 +1043,9 @@ class DownloadManager:
|
||||
logger.debug(f"Deleted partial download: {part_path}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting part file: {e}")
|
||||
|
||||
# Delete metadata file if exists
|
||||
if 'file_path' in download_info:
|
||||
file_path = download_info['file_path']
|
||||
|
||||
# Delete metadata files for each resolved path
|
||||
for file_path in target_files:
|
||||
metadata_path = os.path.splitext(file_path)[0] + '.metadata.json'
|
||||
if os.path.exists(metadata_path):
|
||||
try:
|
||||
@@ -775,15 +1053,16 @@ class DownloadManager:
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting metadata file: {e}")
|
||||
|
||||
preview_path_value = download_info.get('preview_path')
|
||||
if preview_path_value and os.path.exists(preview_path_value):
|
||||
try:
|
||||
os.unlink(preview_path_value)
|
||||
logger.debug(f"Deleted preview file: {preview_path_value}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting preview file: {e}")
|
||||
preview_path_value = download_info.get('preview_path')
|
||||
if preview_path_value and os.path.exists(preview_path_value):
|
||||
try:
|
||||
os.unlink(preview_path_value)
|
||||
logger.debug(f"Deleted preview file: {preview_path_value}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting preview file: {preview_path_value}")
|
||||
|
||||
# Delete preview file if exists (.webp or .mp4) for legacy paths
|
||||
# Delete preview file if exists (.webp or .mp4) for legacy paths
|
||||
for file_path in target_files:
|
||||
for preview_ext in ['.webp', '.mp4']:
|
||||
preview_path = os.path.splitext(file_path)[0] + preview_ext
|
||||
if os.path.exists(preview_path):
|
||||
@@ -791,8 +1070,7 @@ class DownloadManager:
|
||||
os.unlink(preview_path)
|
||||
logger.debug(f"Deleted preview file: {preview_path}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting preview file: {e}")
|
||||
|
||||
logger.error(f"Error deleting preview file: {preview_path}")
|
||||
return {'success': True, 'message': 'Download cancelled successfully'}
|
||||
except Exception as e:
|
||||
logger.error(f"Error cancelling download: {e}", exc_info=True)
|
||||
@@ -806,16 +1084,14 @@ class DownloadManager:
|
||||
if download_id not in self._download_tasks:
|
||||
return {'success': False, 'error': 'Download task not found'}
|
||||
|
||||
pause_event = self._pause_events.get(download_id)
|
||||
if pause_event is None:
|
||||
pause_event = asyncio.Event()
|
||||
pause_event.set()
|
||||
self._pause_events[download_id] = pause_event
|
||||
pause_control = self._pause_events.get(download_id)
|
||||
if pause_control is None:
|
||||
return {'success': False, 'error': 'Download task not found'}
|
||||
|
||||
if not pause_event.is_set():
|
||||
if pause_control.is_paused():
|
||||
return {'success': False, 'error': 'Download is already paused'}
|
||||
|
||||
pause_event.clear()
|
||||
pause_control.pause()
|
||||
|
||||
download_info = self._active_downloads.get(download_id)
|
||||
if download_info is not None:
|
||||
@@ -827,16 +1103,28 @@ class DownloadManager:
|
||||
async def resume_download(self, download_id: str) -> Dict:
|
||||
"""Resume a previously paused download."""
|
||||
|
||||
pause_event = self._pause_events.get(download_id)
|
||||
if pause_event is None:
|
||||
pause_control = self._pause_events.get(download_id)
|
||||
if pause_control is None:
|
||||
return {'success': False, 'error': 'Download task not found'}
|
||||
|
||||
if pause_event.is_set():
|
||||
if pause_control.is_set():
|
||||
return {'success': False, 'error': 'Download is not paused'}
|
||||
|
||||
pause_event.set()
|
||||
|
||||
download_info = self._active_downloads.get(download_id)
|
||||
force_reconnect = False
|
||||
if pause_control is not None:
|
||||
elapsed = pause_control.time_since_last_progress()
|
||||
threshold = max(30.0, pause_control.stall_timeout / 2.0)
|
||||
if elapsed is not None and elapsed >= threshold:
|
||||
force_reconnect = True
|
||||
logger.info(
|
||||
"Forcing reconnect for download %s after %.1f seconds without progress",
|
||||
download_id,
|
||||
elapsed,
|
||||
)
|
||||
|
||||
pause_control.resume(force_reconnect=force_reconnect)
|
||||
|
||||
if download_info is not None:
|
||||
if download_info.get('status') == 'paused':
|
||||
download_info['status'] = 'downloading'
|
||||
|
||||
@@ -36,6 +36,73 @@ class DownloadProgress:
|
||||
timestamp: float
|
||||
|
||||
|
||||
class DownloadStreamControl:
|
||||
"""Synchronize pause/resume requests and reconnect hints for a download."""
|
||||
|
||||
def __init__(self, *, stall_timeout: Optional[float] = None) -> None:
|
||||
self._event = asyncio.Event()
|
||||
self._event.set()
|
||||
self._reconnect_requested = False
|
||||
self.last_progress_timestamp: Optional[float] = None
|
||||
self.stall_timeout: float = float(stall_timeout) if stall_timeout is not None else 120.0
|
||||
|
||||
def is_set(self) -> bool:
|
||||
return self._event.is_set()
|
||||
|
||||
def is_paused(self) -> bool:
|
||||
return not self._event.is_set()
|
||||
|
||||
def set(self) -> None:
|
||||
self._event.set()
|
||||
|
||||
def clear(self) -> None:
|
||||
self._event.clear()
|
||||
|
||||
async def wait(self) -> None:
|
||||
await self._event.wait()
|
||||
|
||||
def pause(self) -> None:
|
||||
self.clear()
|
||||
|
||||
def resume(self, *, force_reconnect: bool = False) -> None:
|
||||
if force_reconnect:
|
||||
self._reconnect_requested = True
|
||||
self.set()
|
||||
|
||||
def request_reconnect(self) -> None:
|
||||
self._reconnect_requested = True
|
||||
self.set()
|
||||
|
||||
def has_reconnect_request(self) -> bool:
|
||||
return self._reconnect_requested
|
||||
|
||||
def consume_reconnect_request(self) -> bool:
|
||||
reconnect = self._reconnect_requested
|
||||
self._reconnect_requested = False
|
||||
return reconnect
|
||||
|
||||
def mark_progress(self, timestamp: Optional[float] = None) -> None:
|
||||
self.last_progress_timestamp = timestamp or datetime.now().timestamp()
|
||||
self._reconnect_requested = False
|
||||
|
||||
def time_since_last_progress(self, *, now: Optional[float] = None) -> Optional[float]:
|
||||
if self.last_progress_timestamp is None:
|
||||
return None
|
||||
reference = now if now is not None else datetime.now().timestamp()
|
||||
return max(0.0, reference - self.last_progress_timestamp)
|
||||
|
||||
def update_stall_timeout(self, stall_timeout: float) -> None:
|
||||
self.stall_timeout = float(stall_timeout)
|
||||
|
||||
|
||||
class DownloadRestartRequested(Exception):
|
||||
"""Raised when a caller explicitly requests a fresh HTTP stream."""
|
||||
|
||||
|
||||
class DownloadStalledError(Exception):
|
||||
"""Raised when download progress stalls beyond the configured timeout."""
|
||||
|
||||
|
||||
class Downloader:
|
||||
"""Unified downloader for all HTTP/HTTPS downloads in the application."""
|
||||
|
||||
@@ -67,10 +134,14 @@ class Downloader:
|
||||
self.max_retries = 5
|
||||
self.base_delay = 2.0 # Base delay for exponential backoff
|
||||
self.session_timeout = 300 # 5 minutes
|
||||
self.stall_timeout = self._resolve_stall_timeout()
|
||||
|
||||
# Default headers
|
||||
self.default_headers = {
|
||||
'User-Agent': 'ComfyUI-LoRA-Manager/1.0'
|
||||
'User-Agent': 'ComfyUI-LoRA-Manager/1.0',
|
||||
# Explicitly request uncompressed payloads so aiohttp doesn't need optional
|
||||
# decoders (e.g. zstandard) that may be missing in runtime environments.
|
||||
'Accept-Encoding': 'identity',
|
||||
}
|
||||
|
||||
@property
|
||||
@@ -79,14 +150,38 @@ class Downloader:
|
||||
if self._session is None or self._should_refresh_session():
|
||||
await self._create_session()
|
||||
return self._session
|
||||
|
||||
|
||||
@property
|
||||
def proxy_url(self) -> Optional[str]:
|
||||
"""Get the current proxy URL (initialize if needed)"""
|
||||
if not hasattr(self, '_proxy_url'):
|
||||
self._proxy_url = None
|
||||
return self._proxy_url
|
||||
|
||||
|
||||
def _resolve_stall_timeout(self) -> float:
|
||||
"""Determine the stall timeout from settings or environment."""
|
||||
default_timeout = 120.0
|
||||
settings_timeout = None
|
||||
|
||||
try:
|
||||
settings_manager = get_settings_manager()
|
||||
settings_timeout = settings_manager.get('download_stall_timeout_seconds')
|
||||
except Exception as exc: # pragma: no cover - defensive guard
|
||||
logger.debug("Failed to read stall timeout from settings: %s", exc)
|
||||
|
||||
raw_value = (
|
||||
settings_timeout
|
||||
if settings_timeout not in (None, "")
|
||||
else os.environ.get('COMFYUI_DOWNLOAD_STALL_TIMEOUT')
|
||||
)
|
||||
|
||||
try:
|
||||
timeout_value = float(raw_value)
|
||||
except (TypeError, ValueError):
|
||||
timeout_value = default_timeout
|
||||
|
||||
return max(30.0, timeout_value)
|
||||
|
||||
def _should_refresh_session(self) -> bool:
|
||||
"""Check if session should be refreshed"""
|
||||
if self._session is None:
|
||||
@@ -178,7 +273,7 @@ class Downloader:
|
||||
use_auth: bool = False,
|
||||
custom_headers: Optional[Dict[str, str]] = None,
|
||||
allow_resume: bool = True,
|
||||
pause_event: Optional[asyncio.Event] = None,
|
||||
pause_event: Optional[DownloadStreamControl] = None,
|
||||
) -> Tuple[bool, str]:
|
||||
"""
|
||||
Download a file with resumable downloads and retry mechanism
|
||||
@@ -190,7 +285,7 @@ class Downloader:
|
||||
use_auth: Whether to include authentication headers (e.g., CivitAI API key)
|
||||
custom_headers: Additional headers to include in request
|
||||
allow_resume: Whether to support resumable downloads
|
||||
pause_event: Optional event that, when cleared, will pause streaming until set again
|
||||
pause_event: Optional stream control used to pause/resume and request reconnects
|
||||
|
||||
Returns:
|
||||
Tuple[bool, str]: (success, save_path or error message)
|
||||
@@ -304,59 +399,144 @@ class Downloader:
|
||||
last_progress_report_time = datetime.now()
|
||||
progress_samples: deque[tuple[datetime, int]] = deque()
|
||||
progress_samples.append((last_progress_report_time, current_size))
|
||||
|
||||
|
||||
# Ensure directory exists
|
||||
os.makedirs(os.path.dirname(save_path), exist_ok=True)
|
||||
|
||||
|
||||
# Stream download to file with progress updates
|
||||
loop = asyncio.get_running_loop()
|
||||
mode = 'ab' if (allow_resume and resume_offset > 0) else 'wb'
|
||||
control = pause_event
|
||||
|
||||
if control is not None:
|
||||
control.update_stall_timeout(self.stall_timeout)
|
||||
|
||||
with open(part_path, mode) as f:
|
||||
async for chunk in response.content.iter_chunked(self.chunk_size):
|
||||
if pause_event is not None and not pause_event.is_set():
|
||||
await pause_event.wait()
|
||||
if chunk:
|
||||
# Run blocking file write in executor
|
||||
await loop.run_in_executor(None, f.write, chunk)
|
||||
current_size += len(chunk)
|
||||
while True:
|
||||
active_stall_timeout = control.stall_timeout if control else self.stall_timeout
|
||||
|
||||
# Limit progress update frequency to reduce overhead
|
||||
now = datetime.now()
|
||||
time_diff = (now - last_progress_report_time).total_seconds()
|
||||
if control is not None:
|
||||
if control.is_paused():
|
||||
await control.wait()
|
||||
resume_time = datetime.now()
|
||||
last_progress_report_time = resume_time
|
||||
if control.consume_reconnect_request():
|
||||
raise DownloadRestartRequested(
|
||||
"Reconnect requested after resume"
|
||||
)
|
||||
elif control.consume_reconnect_request():
|
||||
raise DownloadRestartRequested("Reconnect requested")
|
||||
|
||||
if progress_callback and time_diff >= 1.0:
|
||||
progress_samples.append((now, current_size))
|
||||
cutoff = now - timedelta(seconds=5)
|
||||
while progress_samples and progress_samples[0][0] < cutoff:
|
||||
progress_samples.popleft()
|
||||
try:
|
||||
chunk = await asyncio.wait_for(
|
||||
response.content.read(self.chunk_size),
|
||||
timeout=active_stall_timeout,
|
||||
)
|
||||
except asyncio.TimeoutError as exc:
|
||||
logger.warning(
|
||||
"Download stalled for %.1f seconds without progress from %s",
|
||||
active_stall_timeout,
|
||||
url,
|
||||
)
|
||||
raise DownloadStalledError(
|
||||
f"No data received for {active_stall_timeout:.1f} seconds"
|
||||
) from exc
|
||||
|
||||
percent = (current_size / total_size) * 100 if total_size else 0.0
|
||||
bytes_per_second = 0.0
|
||||
if len(progress_samples) >= 2:
|
||||
first_time, first_bytes = progress_samples[0]
|
||||
last_time, last_bytes = progress_samples[-1]
|
||||
elapsed = (last_time - first_time).total_seconds()
|
||||
if elapsed > 0:
|
||||
bytes_per_second = (last_bytes - first_bytes) / elapsed
|
||||
if not chunk:
|
||||
break
|
||||
|
||||
progress_snapshot = DownloadProgress(
|
||||
percent_complete=percent,
|
||||
bytes_downloaded=current_size,
|
||||
total_bytes=total_size or None,
|
||||
bytes_per_second=bytes_per_second,
|
||||
timestamp=now.timestamp(),
|
||||
)
|
||||
# Run blocking file write in executor
|
||||
await loop.run_in_executor(None, f.write, chunk)
|
||||
current_size += len(chunk)
|
||||
|
||||
await self._dispatch_progress_callback(progress_callback, progress_snapshot)
|
||||
last_progress_report_time = now
|
||||
now = datetime.now()
|
||||
if control is not None:
|
||||
control.mark_progress(timestamp=now.timestamp())
|
||||
|
||||
# Limit progress update frequency to reduce overhead
|
||||
time_diff = (now - last_progress_report_time).total_seconds()
|
||||
|
||||
if progress_callback and time_diff >= 1.0:
|
||||
progress_samples.append((now, current_size))
|
||||
cutoff = now - timedelta(seconds=5)
|
||||
while progress_samples and progress_samples[0][0] < cutoff:
|
||||
progress_samples.popleft()
|
||||
|
||||
percent = (current_size / total_size) * 100 if total_size else 0.0
|
||||
bytes_per_second = 0.0
|
||||
if len(progress_samples) >= 2:
|
||||
first_time, first_bytes = progress_samples[0]
|
||||
last_time, last_bytes = progress_samples[-1]
|
||||
elapsed = (last_time - first_time).total_seconds()
|
||||
if elapsed > 0:
|
||||
bytes_per_second = (last_bytes - first_bytes) / elapsed
|
||||
|
||||
progress_snapshot = DownloadProgress(
|
||||
percent_complete=percent,
|
||||
bytes_downloaded=current_size,
|
||||
total_bytes=total_size or None,
|
||||
bytes_per_second=bytes_per_second,
|
||||
timestamp=now.timestamp(),
|
||||
)
|
||||
|
||||
await self._dispatch_progress_callback(progress_callback, progress_snapshot)
|
||||
last_progress_report_time = now
|
||||
|
||||
# Download completed successfully
|
||||
# Verify file size if total_size was provided
|
||||
final_size = os.path.getsize(part_path)
|
||||
if total_size > 0 and final_size != total_size:
|
||||
logger.warning(f"File size mismatch. Expected: {total_size}, Got: {final_size}")
|
||||
# Don't treat this as fatal error, continue anyway
|
||||
|
||||
# Verify file size integrity before finalizing
|
||||
final_size = os.path.getsize(part_path) if os.path.exists(part_path) else 0
|
||||
expected_size = total_size if total_size > 0 else None
|
||||
|
||||
integrity_error: Optional[str] = None
|
||||
if final_size <= 0:
|
||||
integrity_error = "Downloaded file is empty"
|
||||
elif expected_size is not None and final_size != expected_size:
|
||||
integrity_error = (
|
||||
f"File size mismatch. Expected: {expected_size}, Got: {final_size}"
|
||||
)
|
||||
|
||||
if integrity_error is not None:
|
||||
logger.error(
|
||||
"Download integrity check failed for %s: %s",
|
||||
save_path,
|
||||
integrity_error,
|
||||
)
|
||||
|
||||
# Remove the corrupted payload so future attempts start fresh
|
||||
if os.path.exists(part_path):
|
||||
try:
|
||||
os.remove(part_path)
|
||||
except OSError as remove_error:
|
||||
logger.warning(
|
||||
"Failed to delete corrupted download %s: %s",
|
||||
part_path,
|
||||
remove_error,
|
||||
)
|
||||
if part_path != save_path and os.path.exists(save_path):
|
||||
try:
|
||||
os.remove(save_path)
|
||||
except OSError as remove_error:
|
||||
logger.warning(
|
||||
"Failed to delete target file %s after integrity error: %s",
|
||||
save_path,
|
||||
remove_error,
|
||||
)
|
||||
|
||||
retry_count += 1
|
||||
if retry_count <= self.max_retries:
|
||||
delay = self.base_delay * (2 ** (retry_count - 1))
|
||||
logger.info(
|
||||
"Retrying download in %s seconds due to integrity check failure",
|
||||
delay,
|
||||
)
|
||||
await asyncio.sleep(delay)
|
||||
resume_offset = 0
|
||||
total_size = 0
|
||||
await self._create_session()
|
||||
continue
|
||||
|
||||
return False, integrity_error
|
||||
|
||||
# Atomically rename .part to final file (only if using resume)
|
||||
if allow_resume and part_path != save_path:
|
||||
max_rename_attempts = 5
|
||||
@@ -379,7 +559,9 @@ class Downloader:
|
||||
else:
|
||||
logger.error(f"Failed to rename file after {max_rename_attempts} attempts: {e}")
|
||||
return False, f"Failed to finalize download: {str(e)}"
|
||||
|
||||
|
||||
final_size = os.path.getsize(save_path)
|
||||
|
||||
# Ensure 100% progress is reported
|
||||
if progress_callback:
|
||||
final_snapshot = DownloadProgress(
|
||||
@@ -394,11 +576,17 @@ class Downloader:
|
||||
|
||||
return True, save_path
|
||||
|
||||
except (aiohttp.ClientError, aiohttp.ClientPayloadError,
|
||||
aiohttp.ServerDisconnectedError, asyncio.TimeoutError) as e:
|
||||
except (
|
||||
aiohttp.ClientError,
|
||||
aiohttp.ClientPayloadError,
|
||||
aiohttp.ServerDisconnectedError,
|
||||
asyncio.TimeoutError,
|
||||
DownloadStalledError,
|
||||
DownloadRestartRequested,
|
||||
) as e:
|
||||
retry_count += 1
|
||||
logger.warning(f"Network error during download (attempt {retry_count}/{self.max_retries + 1}): {e}")
|
||||
|
||||
|
||||
if retry_count <= self.max_retries:
|
||||
# Calculate delay with exponential backoff
|
||||
delay = self.base_delay * (2 ** (retry_count - 1))
|
||||
|
||||
@@ -38,6 +38,7 @@ class EmbeddingService(BaseModelService):
|
||||
"notes": embedding_data.get("notes", ""),
|
||||
"model_type": embedding_data.get("model_type", "embedding"),
|
||||
"favorite": embedding_data.get("favorite", False),
|
||||
"update_available": bool(embedding_data.get("update_available", False)),
|
||||
"civitai": self.filter_civitai_data(embedding_data.get("civitai", {}), minimal=True)
|
||||
}
|
||||
|
||||
|
||||
@@ -19,3 +19,9 @@ class RateLimitError(RuntimeError):
|
||||
self.retry_after = retry_after
|
||||
self.provider = provider
|
||||
|
||||
|
||||
class ResourceNotFoundError(RuntimeError):
|
||||
"""Raised when a remote resource is permanently missing."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
@@ -38,6 +38,7 @@ class LoraService(BaseModelService):
|
||||
"usage_tips": lora_data.get("usage_tips", ""),
|
||||
"notes": lora_data.get("notes", ""),
|
||||
"favorite": lora_data.get("favorite", False),
|
||||
"update_available": bool(lora_data.get("update_available", False)),
|
||||
"civitai": self.filter_civitai_data(lora_data.get("civitai", {}), minimal=True)
|
||||
}
|
||||
|
||||
|
||||
@@ -2,11 +2,12 @@ import os
|
||||
import logging
|
||||
from .model_metadata_provider import (
|
||||
ModelMetadataProvider,
|
||||
ModelMetadataProviderManager,
|
||||
ModelMetadataProviderManager,
|
||||
SQLiteModelMetadataProvider,
|
||||
CivitaiModelMetadataProvider,
|
||||
CivArchiveModelMetadataProvider,
|
||||
FallbackMetadataProvider
|
||||
FallbackMetadataProvider,
|
||||
RateLimitRetryingProvider,
|
||||
)
|
||||
from .settings_manager import get_settings_manager
|
||||
from .metadata_archive_manager import MetadataArchiveManager
|
||||
@@ -108,14 +109,24 @@ async def get_metadata_archive_manager():
|
||||
base_path = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
|
||||
return MetadataArchiveManager(base_path)
|
||||
|
||||
def _wrap_provider_with_rate_limit(provider_name: str | None, provider: ModelMetadataProvider) -> ModelMetadataProvider:
|
||||
if isinstance(provider, (FallbackMetadataProvider, RateLimitRetryingProvider)):
|
||||
return provider
|
||||
return RateLimitRetryingProvider(provider, label=provider_name)
|
||||
|
||||
|
||||
async def get_metadata_provider(provider_name: str = None):
|
||||
"""Get a specific metadata provider or default provider"""
|
||||
"""Get a specific metadata provider or default provider with rate-limit handling."""
|
||||
|
||||
provider_manager = await ModelMetadataProviderManager.get_instance()
|
||||
|
||||
if provider_name:
|
||||
return provider_manager._get_provider(provider_name)
|
||||
|
||||
return provider_manager._get_provider()
|
||||
|
||||
provider = (
|
||||
provider_manager._get_provider(provider_name)
|
||||
if provider_name
|
||||
else provider_manager._get_provider()
|
||||
)
|
||||
|
||||
return _wrap_provider_with_rate_limit(provider_name, provider)
|
||||
|
||||
async def get_default_metadata_provider():
|
||||
"""Get the default metadata provider (fallback or single provider)"""
|
||||
|
||||
@@ -9,6 +9,7 @@ from datetime import datetime
|
||||
from typing import Any, Awaitable, Callable, Dict, Iterable, Optional
|
||||
|
||||
from ..services.settings_manager import SettingsManager
|
||||
from ..utils.civitai_utils import resolve_license_payload
|
||||
from ..utils.model_utils import determine_base_model
|
||||
from .errors import RateLimitError
|
||||
|
||||
@@ -135,6 +136,17 @@ class MetadataSyncService:
|
||||
):
|
||||
local_metadata.setdefault("civitai", {})["creator"] = model_data["creator"]
|
||||
|
||||
merged_civitai = local_metadata.get("civitai") or {}
|
||||
civitai_model = merged_civitai.get("model")
|
||||
if not isinstance(civitai_model, dict):
|
||||
civitai_model = {}
|
||||
|
||||
license_payload = resolve_license_payload(model_data)
|
||||
civitai_model.update(license_payload)
|
||||
|
||||
merged_civitai["model"] = civitai_model
|
||||
local_metadata["civitai"] = merged_civitai
|
||||
|
||||
local_metadata["base_model"] = determine_base_model(
|
||||
civitai_metadata.get("baseModel")
|
||||
)
|
||||
@@ -202,6 +214,7 @@ class MetadataSyncService:
|
||||
metadata_provider: Optional[MetadataProviderProtocol] = None
|
||||
provider_used: Optional[str] = None
|
||||
last_error: Optional[str] = None
|
||||
civitai_api_not_found = False
|
||||
|
||||
for provider_name, provider in provider_attempts:
|
||||
try:
|
||||
@@ -216,19 +229,24 @@ class MetadataSyncService:
|
||||
if provider_name == "sqlite":
|
||||
sqlite_attempted = True
|
||||
|
||||
is_default_provider = provider_name is None
|
||||
|
||||
if civitai_metadata_candidate:
|
||||
civitai_metadata = civitai_metadata_candidate
|
||||
metadata_provider = provider
|
||||
provider_used = provider_name
|
||||
break
|
||||
|
||||
if is_default_provider and error == "Model not found":
|
||||
civitai_api_not_found = True
|
||||
|
||||
last_error = error or last_error
|
||||
|
||||
if civitai_metadata is None or metadata_provider is None:
|
||||
if sqlite_attempted:
|
||||
model_data["db_checked"] = True
|
||||
|
||||
if last_error == "Model not found":
|
||||
if civitai_api_not_found:
|
||||
model_data["from_civitai"] = False
|
||||
model_data["civitai_deleted"] = True
|
||||
model_data["db_checked"] = sqlite_attempted or (enable_archive and model_data.get("db_checked", False))
|
||||
@@ -254,7 +272,10 @@ class MetadataSyncService:
|
||||
return False, error_msg
|
||||
|
||||
model_data["from_civitai"] = True
|
||||
model_data["civitai_deleted"] = civitai_metadata.get("source") == "archive_db" or civitai_metadata.get("source") == "civarchive"
|
||||
if provider_used is None:
|
||||
model_data["civitai_deleted"] = False
|
||||
elif civitai_api_not_found:
|
||||
model_data["civitai_deleted"] = True
|
||||
model_data["db_checked"] = enable_archive and (
|
||||
civitai_metadata.get("source") == "archive_db" or sqlite_attempted
|
||||
)
|
||||
@@ -295,6 +316,7 @@ class MetadataSyncService:
|
||||
"preview_url": local_metadata.get("preview_url"),
|
||||
"civitai": local_metadata.get("civitai"),
|
||||
}
|
||||
|
||||
model_data.update(update_payload)
|
||||
|
||||
await update_cache_func(file_path, file_path, local_metadata)
|
||||
@@ -344,15 +366,6 @@ class MetadataSyncService:
|
||||
+ (f" with version: {model_version_id}" if model_version_id else "")
|
||||
)
|
||||
|
||||
primary_model_file: Optional[Dict[str, Any]] = None
|
||||
for file_info in civitai_metadata.get("files", []):
|
||||
if file_info.get("primary", False) and file_info.get("type") == "Model":
|
||||
primary_model_file = file_info
|
||||
break
|
||||
|
||||
if primary_model_file and primary_model_file.get("hashes", {}).get("SHA256"):
|
||||
metadata["sha256"] = primary_model_file["hashes"]["SHA256"].lower()
|
||||
|
||||
metadata_path = os.path.splitext(file_path)[0] + ".metadata.json"
|
||||
await self.update_model_metadata(
|
||||
metadata_path,
|
||||
@@ -445,4 +458,3 @@ class MetadataSyncService:
|
||||
results["verified_as_duplicates"] = False
|
||||
|
||||
return results
|
||||
|
||||
|
||||
@@ -15,6 +15,9 @@ SUPPORTED_SORT_MODES = [
|
||||
('size', 'desc'),
|
||||
]
|
||||
|
||||
DISPLAY_NAME_MODES = {"model_name", "file_name"}
|
||||
|
||||
|
||||
@dataclass
|
||||
class ModelCache:
|
||||
"""Cache structure for model data with extensible sorting."""
|
||||
@@ -22,16 +25,65 @@ class ModelCache:
|
||||
raw_data: List[Dict]
|
||||
folders: List[str]
|
||||
version_index: Dict[int, Dict] = field(default_factory=dict)
|
||||
model_id_index: Dict[int, List[Dict[str, Any]]] = field(default_factory=dict)
|
||||
name_display_mode: str = "model_name"
|
||||
|
||||
def __post_init__(self):
|
||||
self._lock = asyncio.Lock()
|
||||
# Cache for last sort: (sort_key, order) -> sorted list
|
||||
self._last_sort: Tuple[str, str] = (None, None)
|
||||
self._last_sorted_data: List[Dict] = []
|
||||
self._normalize_raw_data()
|
||||
self.name_display_mode = self._normalize_display_mode(self.name_display_mode)
|
||||
# Default sort on init
|
||||
asyncio.create_task(self.resort())
|
||||
self.rebuild_version_index()
|
||||
|
||||
@staticmethod
|
||||
def _normalize_display_mode(value: Optional[str]) -> str:
|
||||
if isinstance(value, str) and value in DISPLAY_NAME_MODES:
|
||||
return value
|
||||
return "model_name"
|
||||
|
||||
@staticmethod
|
||||
def _ensure_string(value: Any) -> str:
|
||||
"""Return a safe string representation for metadata fields."""
|
||||
|
||||
if isinstance(value, str):
|
||||
return value
|
||||
if value is None:
|
||||
return ""
|
||||
return str(value)
|
||||
|
||||
def _normalize_item(self, item: Dict) -> None:
|
||||
"""Ensure core metadata fields are present and string typed."""
|
||||
|
||||
if not isinstance(item, dict):
|
||||
return
|
||||
|
||||
for field in ("model_name", "file_name", "folder"):
|
||||
if field in item:
|
||||
item[field] = self._ensure_string(item.get(field))
|
||||
|
||||
def _normalize_raw_data(self) -> None:
|
||||
"""Normalize every cached entry before it is consumed."""
|
||||
|
||||
for item in self.raw_data:
|
||||
self._normalize_item(item)
|
||||
|
||||
def _get_display_name(self, item: Dict) -> str:
|
||||
"""Return the value used for name-based sorting based on display settings."""
|
||||
|
||||
if self.name_display_mode == "file_name":
|
||||
primary = self._ensure_string(item.get("file_name"))
|
||||
fallback = self._ensure_string(item.get("model_name"))
|
||||
else:
|
||||
primary = self._ensure_string(item.get("model_name"))
|
||||
fallback = self._ensure_string(item.get("file_name"))
|
||||
|
||||
candidate = primary or fallback
|
||||
return candidate or ""
|
||||
|
||||
@staticmethod
|
||||
def _normalize_version_id(value: Any) -> Optional[int]:
|
||||
"""Normalize a potential version identifier into an integer."""
|
||||
@@ -46,14 +98,15 @@ class ModelCache:
|
||||
return None
|
||||
|
||||
def rebuild_version_index(self) -> None:
|
||||
"""Rebuild the version index from the current raw data."""
|
||||
"""Rebuild the version and model indexes from the current raw data."""
|
||||
|
||||
self.version_index = {}
|
||||
self.model_id_index = {}
|
||||
for item in self.raw_data:
|
||||
self.add_to_version_index(item)
|
||||
|
||||
def add_to_version_index(self, item: Dict) -> None:
|
||||
"""Register a cache item in the version index if possible."""
|
||||
"""Register a cache item in the version/model indexes if possible."""
|
||||
|
||||
civitai_data = item.get('civitai') if isinstance(item, dict) else None
|
||||
if not isinstance(civitai_data, dict):
|
||||
@@ -65,8 +118,24 @@ class ModelCache:
|
||||
|
||||
self.version_index[version_id] = item
|
||||
|
||||
model_id = self._normalize_version_id(civitai_data.get('modelId'))
|
||||
if model_id is None:
|
||||
return
|
||||
|
||||
descriptor = self._build_version_descriptor(item, civitai_data, version_id)
|
||||
if descriptor is None:
|
||||
return
|
||||
|
||||
versions = self.model_id_index.setdefault(model_id, [])
|
||||
for index, existing in enumerate(versions):
|
||||
if existing.get('versionId') == descriptor['versionId']:
|
||||
versions[index] = descriptor
|
||||
break
|
||||
else:
|
||||
versions.append(descriptor)
|
||||
|
||||
def remove_from_version_index(self, item: Dict) -> None:
|
||||
"""Remove a cache item from the version index if present."""
|
||||
"""Remove a cache item from the version/model indexes if present."""
|
||||
|
||||
civitai_data = item.get('civitai') if isinstance(item, dict) else None
|
||||
if not isinstance(civitai_data, dict):
|
||||
@@ -83,6 +152,46 @@ class ModelCache:
|
||||
):
|
||||
self.version_index.pop(version_id, None)
|
||||
|
||||
model_id = self._normalize_version_id(civitai_data.get('modelId'))
|
||||
if model_id is None:
|
||||
return
|
||||
|
||||
versions = self.model_id_index.get(model_id)
|
||||
if not versions:
|
||||
return
|
||||
|
||||
filtered = [v for v in versions if v.get('versionId') != version_id]
|
||||
if filtered:
|
||||
self.model_id_index[model_id] = filtered
|
||||
else:
|
||||
self.model_id_index.pop(model_id, None)
|
||||
|
||||
def _build_version_descriptor(
|
||||
self,
|
||||
item: Dict,
|
||||
civitai_data: Dict[str, Any],
|
||||
version_id: int,
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Create a lightweight descriptor for a version entry."""
|
||||
|
||||
model_name = self._ensure_string(civitai_data.get('name'))
|
||||
file_name = self._ensure_string(item.get('file_name'))
|
||||
return {
|
||||
'versionId': version_id,
|
||||
'name': model_name,
|
||||
'fileName': file_name,
|
||||
}
|
||||
|
||||
def get_versions_by_model_id(self, model_id: Any) -> List[Dict[str, Any]]:
|
||||
"""Return cached version descriptors for a given model ID."""
|
||||
|
||||
normalized_id = self._normalize_version_id(model_id)
|
||||
if normalized_id is None:
|
||||
return []
|
||||
|
||||
versions = self.model_id_index.get(normalized_id, [])
|
||||
return [dict(version) for version in versions]
|
||||
|
||||
async def resort(self):
|
||||
"""Resort cached data according to last sort mode if set"""
|
||||
async with self._lock:
|
||||
@@ -93,7 +202,11 @@ class ModelCache:
|
||||
# Update folder list
|
||||
# else: do nothing
|
||||
|
||||
all_folders = set(l['folder'] for l in self.raw_data)
|
||||
all_folders = {
|
||||
self._ensure_string(item.get('folder'))
|
||||
for item in self.raw_data
|
||||
if isinstance(item, dict)
|
||||
}
|
||||
self.folders = sorted(list(all_folders), key=lambda x: x.lower())
|
||||
self.rebuild_version_index()
|
||||
|
||||
@@ -101,10 +214,10 @@ class ModelCache:
|
||||
"""Sort data by sort_key and order"""
|
||||
reverse = (order == 'desc')
|
||||
if sort_key == 'name':
|
||||
# Natural sort by model_name, case-insensitive
|
||||
# Natural sort by configured display name, case-insensitive
|
||||
return natsorted(
|
||||
data,
|
||||
key=lambda x: x['model_name'].lower(),
|
||||
key=lambda x: self._get_display_name(x).lower(),
|
||||
reverse=reverse
|
||||
)
|
||||
elif sort_key == 'date':
|
||||
@@ -135,6 +248,20 @@ class ModelCache:
|
||||
self._last_sorted_data = sorted_data
|
||||
return sorted_data
|
||||
|
||||
async def update_name_display_mode(self, display_mode: str) -> None:
|
||||
"""Update the display mode used for name sorting and refresh cached results."""
|
||||
|
||||
normalized = self._normalize_display_mode(display_mode)
|
||||
async with self._lock:
|
||||
if self.name_display_mode == normalized:
|
||||
return
|
||||
|
||||
self.name_display_mode = normalized
|
||||
|
||||
if self._last_sort[0] == 'name':
|
||||
sort_key, order = self._last_sort
|
||||
self._last_sorted_data = self._sort_data(self.raw_data, sort_key, order)
|
||||
|
||||
async def update_preview_url(self, file_path: str, preview_url: str, preview_nsfw_level: int) -> bool:
|
||||
"""Update preview_url for a specific model in all cached data
|
||||
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import asyncio
|
||||
import fnmatch
|
||||
import os
|
||||
import logging
|
||||
from typing import List, Dict, Optional, Any, Set
|
||||
from typing import Any, Dict, List, Optional, Sequence, Set
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
from ..utils.utils import calculate_relative_path_for_model, remove_empty_dirs
|
||||
@@ -79,9 +80,10 @@ class ModelFileService:
|
||||
return self.scanner.get_model_roots()
|
||||
|
||||
async def auto_organize_models(
|
||||
self,
|
||||
self,
|
||||
file_paths: Optional[List[str]] = None,
|
||||
progress_callback: Optional[ProgressCallback] = None
|
||||
progress_callback: Optional[ProgressCallback] = None,
|
||||
exclusion_patterns: Optional[Sequence[str]] = None,
|
||||
) -> AutoOrganizeResult:
|
||||
"""Auto-organize models based on current settings
|
||||
|
||||
@@ -100,6 +102,13 @@ class ModelFileService:
|
||||
# Get all models from cache
|
||||
cache = await self.scanner.get_cached_data()
|
||||
all_models = cache.raw_data
|
||||
|
||||
settings_manager = get_settings_manager()
|
||||
normalized_exclusions = settings_manager.normalize_auto_organize_exclusions(
|
||||
exclusion_patterns
|
||||
if exclusion_patterns is not None
|
||||
else settings_manager.get_auto_organize_exclusions()
|
||||
)
|
||||
|
||||
# Filter models if specific file paths are provided
|
||||
if file_paths:
|
||||
@@ -107,11 +116,19 @@ class ModelFileService:
|
||||
result.operation_type = 'bulk'
|
||||
else:
|
||||
result.operation_type = 'all'
|
||||
|
||||
# Get model roots for this scanner
|
||||
|
||||
model_roots = self.get_model_roots()
|
||||
if not model_roots:
|
||||
raise ValueError('No model roots configured')
|
||||
|
||||
if normalized_exclusions:
|
||||
all_models = [
|
||||
model
|
||||
for model in all_models
|
||||
if not self._should_exclude_model(
|
||||
model.get('file_path'), normalized_exclusions, model_roots
|
||||
)
|
||||
]
|
||||
|
||||
# Check if flat structure is configured for this model type
|
||||
settings_manager = get_settings_manager()
|
||||
@@ -133,7 +150,34 @@ class ModelFileService:
|
||||
'skipped': 0,
|
||||
'operation_type': result.operation_type
|
||||
})
|
||||
|
||||
|
||||
if result.total == 0:
|
||||
if progress_callback:
|
||||
await asyncio.sleep(0.1)
|
||||
payload = {
|
||||
'type': 'auto_organize_progress',
|
||||
'total': 0,
|
||||
'processed': 0,
|
||||
'success': 0,
|
||||
'failures': 0,
|
||||
'skipped': 0,
|
||||
'operation_type': result.operation_type
|
||||
}
|
||||
await progress_callback.on_progress({**payload, 'status': 'processing'})
|
||||
await progress_callback.on_progress({
|
||||
**payload,
|
||||
'status': 'cleaning',
|
||||
'message': 'Cleaning up empty directories...'
|
||||
})
|
||||
result.cleanup_counts = {}
|
||||
await progress_callback.on_progress({
|
||||
**payload,
|
||||
'status': 'completed',
|
||||
'cleanup': result.cleanup_counts
|
||||
})
|
||||
|
||||
return result
|
||||
|
||||
# Process models in batches
|
||||
await self._process_models_in_batches(
|
||||
all_models,
|
||||
@@ -301,10 +345,43 @@ class ModelFileService:
|
||||
# Normalize paths for comparison
|
||||
normalized_root = os.path.normpath(root).replace(os.sep, '/')
|
||||
normalized_file = os.path.normpath(file_path).replace(os.sep, '/')
|
||||
|
||||
|
||||
if normalized_file.startswith(normalized_root):
|
||||
return root
|
||||
return None
|
||||
|
||||
def _should_exclude_model(
|
||||
self,
|
||||
file_path: Optional[str],
|
||||
patterns: Sequence[str],
|
||||
model_roots: Sequence[str],
|
||||
) -> bool:
|
||||
if not file_path or not patterns:
|
||||
return False
|
||||
|
||||
normalized_path = os.path.normpath(file_path).replace(os.sep, '/')
|
||||
filename = os.path.basename(normalized_path)
|
||||
relative_path = None
|
||||
|
||||
if model_roots:
|
||||
root = self._find_model_root(file_path, list(model_roots))
|
||||
if root:
|
||||
normalized_root = os.path.normpath(root)
|
||||
try:
|
||||
relative = os.path.relpath(file_path, normalized_root)
|
||||
except ValueError:
|
||||
relative = None
|
||||
if relative is not None:
|
||||
relative_path = relative.replace(os.sep, '/')
|
||||
|
||||
for pattern in patterns:
|
||||
if fnmatch.fnmatch(filename, pattern):
|
||||
return True
|
||||
if relative_path and fnmatch.fnmatch(relative_path, pattern):
|
||||
return True
|
||||
if fnmatch.fnmatch(normalized_path, pattern):
|
||||
return True
|
||||
return False
|
||||
|
||||
async def _calculate_target_directory(
|
||||
self,
|
||||
@@ -461,4 +538,4 @@ class ModelMoveService:
|
||||
'results': [],
|
||||
'success_count': 0,
|
||||
'failure_count': len(file_paths)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,26 +4,29 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
from typing import Awaitable, Callable, Dict, Iterable, List, Optional
|
||||
from typing import Any, Awaitable, Callable, Dict, Iterable, List, Mapping, Optional, TYPE_CHECKING
|
||||
|
||||
from ..services.service_registry import ServiceRegistry
|
||||
from ..utils.constants import PREVIEW_EXTENSIONS
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..services.model_update_service import ModelUpdateService
|
||||
|
||||
async def delete_model_artifacts(target_dir: str, file_name: str) -> List[str]:
|
||||
|
||||
async def delete_model_artifacts(
|
||||
target_dir: str, file_name: str, main_extension: str | None = None
|
||||
) -> List[str]:
|
||||
"""Delete the primary model artefacts within ``target_dir``."""
|
||||
|
||||
patterns = [
|
||||
f"{file_name}.safetensors",
|
||||
f"{file_name}.metadata.json",
|
||||
]
|
||||
main_extension = ".safetensors" if main_extension is None else main_extension
|
||||
main_file = f"{file_name}{main_extension}" if main_extension else file_name
|
||||
patterns = [main_file, f"{file_name}.metadata.json"]
|
||||
for ext in PREVIEW_EXTENSIONS:
|
||||
patterns.append(f"{file_name}{ext}")
|
||||
|
||||
deleted: List[str] = []
|
||||
main_file = patterns[0]
|
||||
main_path = os.path.join(target_dir, main_file).replace(os.sep, "/")
|
||||
|
||||
if os.path.exists(main_path):
|
||||
@@ -54,6 +57,7 @@ class ModelLifecycleService:
|
||||
metadata_manager,
|
||||
metadata_loader: Callable[[str], Awaitable[Dict[str, object]]],
|
||||
recipe_scanner_factory: Callable[[], Awaitable] | None = None,
|
||||
update_service: "ModelUpdateService" | None = None,
|
||||
) -> None:
|
||||
self._scanner = scanner
|
||||
self._metadata_manager = metadata_manager
|
||||
@@ -61,6 +65,7 @@ class ModelLifecycleService:
|
||||
self._recipe_scanner_factory = (
|
||||
recipe_scanner_factory or ServiceRegistry.get_recipe_scanner
|
||||
)
|
||||
self._update_service = update_service
|
||||
|
||||
async def delete_model(self, file_path: str) -> Dict[str, object]:
|
||||
"""Delete a model file and associated artefacts."""
|
||||
@@ -68,20 +73,103 @@ class ModelLifecycleService:
|
||||
if not file_path:
|
||||
raise ValueError("Model path is required")
|
||||
|
||||
target_dir = os.path.dirname(file_path)
|
||||
file_name = os.path.splitext(os.path.basename(file_path))[0]
|
||||
|
||||
deleted_files = await delete_model_artifacts(target_dir, file_name)
|
||||
|
||||
cache = await self._scanner.get_cached_data()
|
||||
cache.raw_data = [item for item in cache.raw_data if item["file_path"] != file_path]
|
||||
await cache.resort()
|
||||
|
||||
cached_entry = None
|
||||
if cache and hasattr(cache, "raw_data"):
|
||||
cached_entry = next(
|
||||
(item for item in cache.raw_data if item.get("file_path") == file_path),
|
||||
None,
|
||||
)
|
||||
|
||||
metadata_payload = {}
|
||||
try:
|
||||
metadata_payload = await self._metadata_manager.load_metadata_payload(file_path)
|
||||
except Exception as exc: # pragma: no cover - defensive guard
|
||||
logger.debug("Failed to load metadata payload for %s: %s", file_path, exc)
|
||||
|
||||
model_id = (
|
||||
self._extract_model_id_from_payload(metadata_payload)
|
||||
or self._extract_model_id_from_payload(cached_entry)
|
||||
)
|
||||
|
||||
target_dir = os.path.dirname(file_path)
|
||||
base_name = os.path.basename(file_path)
|
||||
file_name, main_extension = os.path.splitext(base_name)
|
||||
deleted_files = await delete_model_artifacts(
|
||||
target_dir, file_name, main_extension=main_extension
|
||||
)
|
||||
|
||||
if cache:
|
||||
cache.raw_data = [
|
||||
item for item in cache.raw_data if item.get("file_path") != file_path
|
||||
]
|
||||
await cache.resort()
|
||||
|
||||
if hasattr(self._scanner, "_hash_index") and self._scanner._hash_index:
|
||||
self._scanner._hash_index.remove_by_path(file_path)
|
||||
|
||||
await self._sync_update_for_model(model_id)
|
||||
return {"success": True, "deleted_files": deleted_files}
|
||||
|
||||
@staticmethod
|
||||
def _extract_model_id_from_payload(payload: Any) -> Optional[int]:
|
||||
if not isinstance(payload, Mapping):
|
||||
return None
|
||||
civitai = payload.get("civitai")
|
||||
if isinstance(civitai, Mapping):
|
||||
candidate = civitai.get("modelId") or civitai.get("model_id")
|
||||
if candidate is None:
|
||||
model_section = civitai.get("model")
|
||||
if isinstance(model_section, Mapping):
|
||||
candidate = model_section.get("id")
|
||||
normalized = ModelLifecycleService._coerce_int(candidate)
|
||||
if normalized is not None:
|
||||
return normalized
|
||||
fallback = payload.get("model_id") or payload.get("civitai_model_id")
|
||||
return ModelLifecycleService._coerce_int(fallback)
|
||||
|
||||
@staticmethod
|
||||
def _coerce_int(value: Any) -> Optional[int]:
|
||||
try:
|
||||
return int(value)
|
||||
except (TypeError, ValueError):
|
||||
return None
|
||||
|
||||
async def _sync_update_for_model(self, model_id: Optional[int]) -> None:
|
||||
if self._update_service is None or model_id is None:
|
||||
return
|
||||
|
||||
try:
|
||||
versions = await self._scanner.get_model_versions_by_id(model_id)
|
||||
except Exception as exc: # pragma: no cover - defensive log
|
||||
logger.debug(
|
||||
"Failed to collect local versions for model %s: %s", model_id, exc
|
||||
)
|
||||
versions = []
|
||||
|
||||
version_ids = set()
|
||||
for version in versions or []:
|
||||
candidate = (
|
||||
version.get("versionId")
|
||||
or version.get("id")
|
||||
or version.get("version_id")
|
||||
)
|
||||
normalized = ModelLifecycleService._coerce_int(candidate)
|
||||
if normalized is not None:
|
||||
version_ids.add(normalized)
|
||||
|
||||
try:
|
||||
await self._update_service.update_in_library_versions(
|
||||
self._scanner.model_type,
|
||||
model_id,
|
||||
sorted(version_ids),
|
||||
)
|
||||
except Exception as exc: # pragma: no cover - defensive log
|
||||
logger.debug(
|
||||
"Failed to sync update record for model %s: %s", model_id, exc
|
||||
)
|
||||
|
||||
async def exclude_model(self, file_path: str) -> Dict[str, object]:
|
||||
"""Mark a model as excluded and prune cache references."""
|
||||
|
||||
@@ -146,16 +234,19 @@ class ModelLifecycleService:
|
||||
raise ValueError("Invalid characters in file name")
|
||||
|
||||
target_dir = os.path.dirname(file_path)
|
||||
old_file_name = os.path.splitext(os.path.basename(file_path))[0]
|
||||
new_file_path = os.path.join(target_dir, f"{new_file_name}.safetensors").replace(
|
||||
os.sep, "/"
|
||||
)
|
||||
base_name = os.path.basename(file_path)
|
||||
old_file_name, old_extension = os.path.splitext(base_name)
|
||||
if not old_extension:
|
||||
old_extension = ".safetensors"
|
||||
new_file_path = os.path.join(
|
||||
target_dir, f"{new_file_name}{old_extension}"
|
||||
).replace(os.sep, "/")
|
||||
|
||||
if os.path.exists(new_file_path):
|
||||
raise ValueError("A file with this name already exists")
|
||||
|
||||
patterns = [
|
||||
f"{old_file_name}.safetensors",
|
||||
f"{old_file_name}{old_extension}",
|
||||
f"{old_file_name}.metadata.json",
|
||||
f"{old_file_name}.metadata.json.bak",
|
||||
]
|
||||
@@ -236,10 +327,20 @@ class ModelLifecycleService:
|
||||
def _get_multipart_ext(filename: str) -> str:
|
||||
"""Return the extension for files with compound suffixes."""
|
||||
|
||||
parts = filename.split(".")
|
||||
if len(parts) == 3:
|
||||
return "." + ".".join(parts[-2:])
|
||||
if len(parts) >= 4:
|
||||
return "." + ".".join(parts[-3:])
|
||||
return os.path.splitext(filename)[1]
|
||||
known_suffixes = [
|
||||
".metadata.json.bak",
|
||||
".metadata.json",
|
||||
".safetensors",
|
||||
*PREVIEW_EXTENSIONS,
|
||||
]
|
||||
|
||||
for suffix in sorted(known_suffixes, key=len, reverse=True):
|
||||
if filename.endswith(suffix):
|
||||
return suffix
|
||||
|
||||
basename = os.path.basename(filename)
|
||||
dot_index = basename.rfind(".")
|
||||
if dot_index != -1:
|
||||
return basename[dot_index:]
|
||||
|
||||
return os.path.splitext(basename)[1]
|
||||
|
||||
@@ -41,6 +41,55 @@ def _require_aiosqlite() -> Any:
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class _RateLimitRetryHelper:
|
||||
"""Coordinate exponential backoff retries after rate limiting."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
retry_limit: int = 3,
|
||||
base_delay: float = 1.5,
|
||||
max_delay: float = 30.0,
|
||||
jitter_ratio: float = 0.2,
|
||||
) -> None:
|
||||
self._retry_limit = max(1, retry_limit)
|
||||
self._base_delay = base_delay
|
||||
self._max_delay = max_delay
|
||||
self._jitter_ratio = max(0.0, jitter_ratio)
|
||||
|
||||
async def run(self, label: str, func, *args, **kwargs):
|
||||
attempt = 0
|
||||
while True:
|
||||
try:
|
||||
return await func(*args, **kwargs)
|
||||
except RateLimitError as exc:
|
||||
attempt += 1
|
||||
if attempt >= self._retry_limit:
|
||||
exc.provider = exc.provider or label
|
||||
raise
|
||||
|
||||
delay = self._calculate_delay(exc.retry_after, attempt)
|
||||
logger.warning(
|
||||
"Provider %s rate limited request; retrying in %.2fs (attempt %s/%s)",
|
||||
label,
|
||||
delay,
|
||||
attempt,
|
||||
self._retry_limit,
|
||||
)
|
||||
await asyncio.sleep(delay)
|
||||
|
||||
def _calculate_delay(self, retry_after: Optional[float], attempt: int) -> float:
|
||||
if retry_after is not None:
|
||||
return min(self._max_delay, max(0.0, retry_after))
|
||||
|
||||
base_delay = self._base_delay * (2 ** max(0, attempt - 1))
|
||||
jitter_span = base_delay * self._jitter_ratio
|
||||
if jitter_span > 0:
|
||||
base_delay += random.uniform(-jitter_span, jitter_span)
|
||||
|
||||
return min(self._max_delay, max(0.0, base_delay))
|
||||
|
||||
class ModelMetadataProvider(ABC):
|
||||
"""Base abstract class for all model metadata providers"""
|
||||
|
||||
@@ -53,6 +102,12 @@ class ModelMetadataProvider(ABC):
|
||||
async def get_model_versions(self, model_id: str) -> Optional[Dict]:
|
||||
"""Get all versions of a model with their details"""
|
||||
pass
|
||||
|
||||
async def get_model_versions_bulk(
|
||||
self, model_ids: Sequence[int]
|
||||
) -> Optional[Dict[int, Dict]]:
|
||||
"""Fetch model versions for multiple model ids when supported."""
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
async def get_model_version(self, model_id: int = None, version_id: int = None) -> Optional[Dict]:
|
||||
@@ -80,6 +135,11 @@ class CivitaiModelMetadataProvider(ModelMetadataProvider):
|
||||
|
||||
async def get_model_versions(self, model_id: str) -> Optional[Dict]:
|
||||
return await self.client.get_model_versions(model_id)
|
||||
|
||||
async def get_model_versions_bulk(
|
||||
self, model_ids: Sequence[int]
|
||||
) -> Optional[Dict[int, Dict]]:
|
||||
return await self.client.get_model_versions_bulk(model_ids)
|
||||
|
||||
async def get_model_version(self, model_id: int = None, version_id: int = None) -> Optional[Dict]:
|
||||
return await self.client.get_model_version(model_id, version_id)
|
||||
@@ -379,6 +439,12 @@ class FallbackMetadataProvider(ModelMetadataProvider):
|
||||
self._rate_limit_base_delay = rate_limit_base_delay
|
||||
self._rate_limit_max_delay = rate_limit_max_delay
|
||||
self._rate_limit_jitter_ratio = max(0.0, rate_limit_jitter_ratio)
|
||||
self._rate_limit_helper = _RateLimitRetryHelper(
|
||||
retry_limit=self._rate_limit_retry_limit,
|
||||
base_delay=self._rate_limit_base_delay,
|
||||
max_delay=self._rate_limit_max_delay,
|
||||
jitter_ratio=self._rate_limit_jitter_ratio,
|
||||
)
|
||||
|
||||
async def get_model_by_hash(self, model_hash: str) -> Tuple[Optional[Dict], Optional[str]]:
|
||||
for provider, label in self._iter_providers():
|
||||
@@ -474,44 +540,80 @@ class FallbackMetadataProvider(ModelMetadataProvider):
|
||||
def _iter_providers(self):
|
||||
return zip(self.providers, self._provider_labels)
|
||||
|
||||
async def _call_with_rate_limit(
|
||||
async def _call_with_rate_limit(self, label: str, func, *args, **kwargs):
|
||||
return await self._rate_limit_helper.run(label, func, *args, **kwargs)
|
||||
|
||||
|
||||
class RateLimitRetryingProvider(ModelMetadataProvider):
|
||||
"""Adapter that retries individual provider calls after rate limiting."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
label: str,
|
||||
func,
|
||||
*args,
|
||||
**kwargs,
|
||||
):
|
||||
attempt = 0
|
||||
while True:
|
||||
try:
|
||||
return await func(*args, **kwargs)
|
||||
except RateLimitError as exc:
|
||||
attempt += 1
|
||||
if attempt >= self._rate_limit_retry_limit:
|
||||
exc.provider = exc.provider or label
|
||||
raise exc
|
||||
delay = self._calculate_rate_limit_delay(exc.retry_after, attempt)
|
||||
logger.warning(
|
||||
"Provider %s rate limited request; retrying in %.2fs (attempt %s/%s)",
|
||||
label,
|
||||
delay,
|
||||
attempt,
|
||||
self._rate_limit_retry_limit,
|
||||
)
|
||||
await asyncio.sleep(delay)
|
||||
except Exception:
|
||||
raise
|
||||
provider: ModelMetadataProvider,
|
||||
label: Optional[str] = None,
|
||||
*,
|
||||
rate_limit_retry_limit: int = 3,
|
||||
rate_limit_base_delay: float = 1.5,
|
||||
rate_limit_max_delay: float = 30.0,
|
||||
rate_limit_jitter_ratio: float = 0.2,
|
||||
) -> None:
|
||||
self._provider = provider
|
||||
self._label = label or provider.__class__.__name__
|
||||
self._rate_limit_helper = _RateLimitRetryHelper(
|
||||
retry_limit=rate_limit_retry_limit,
|
||||
base_delay=rate_limit_base_delay,
|
||||
max_delay=rate_limit_max_delay,
|
||||
jitter_ratio=rate_limit_jitter_ratio,
|
||||
)
|
||||
|
||||
def _calculate_rate_limit_delay(self, retry_after: Optional[float], attempt: int) -> float:
|
||||
if retry_after is not None:
|
||||
return min(self._rate_limit_max_delay, max(0.0, retry_after))
|
||||
def __getattr__(self, item):
|
||||
return getattr(self._provider, item)
|
||||
|
||||
base_delay = self._rate_limit_base_delay * (2 ** max(0, attempt - 1))
|
||||
jitter_span = base_delay * self._rate_limit_jitter_ratio
|
||||
if jitter_span > 0:
|
||||
base_delay += random.uniform(-jitter_span, jitter_span)
|
||||
async def get_model_by_hash(self, model_hash: str) -> Tuple[Optional[Dict], Optional[str]]:
|
||||
return await self._rate_limit_helper.run(
|
||||
self._label,
|
||||
self._provider.get_model_by_hash,
|
||||
model_hash,
|
||||
)
|
||||
|
||||
return min(self._rate_limit_max_delay, max(0.0, base_delay))
|
||||
async def get_model_versions(self, model_id: str) -> Optional[Dict]:
|
||||
return await self._rate_limit_helper.run(
|
||||
self._label,
|
||||
self._provider.get_model_versions,
|
||||
model_id,
|
||||
)
|
||||
|
||||
async def get_model_versions_bulk(
|
||||
self,
|
||||
model_ids: Sequence[int],
|
||||
) -> Optional[Dict[int, Dict]]:
|
||||
return await self._rate_limit_helper.run(
|
||||
self._label,
|
||||
self._provider.get_model_versions_bulk,
|
||||
model_ids,
|
||||
)
|
||||
|
||||
async def get_model_version(self, model_id: int = None, version_id: int = None) -> Optional[Dict]:
|
||||
return await self._rate_limit_helper.run(
|
||||
self._label,
|
||||
self._provider.get_model_version,
|
||||
model_id,
|
||||
version_id,
|
||||
)
|
||||
|
||||
async def get_model_version_info(self, version_id: str) -> Tuple[Optional[Dict], Optional[str]]:
|
||||
return await self._rate_limit_helper.run(
|
||||
self._label,
|
||||
self._provider.get_model_version_info,
|
||||
version_id,
|
||||
)
|
||||
|
||||
async def get_user_models(self, username: str) -> Optional[List[Dict]]:
|
||||
return await self._rate_limit_helper.run(
|
||||
self._label,
|
||||
self._provider.get_user_models,
|
||||
username,
|
||||
)
|
||||
|
||||
class ModelMetadataProviderManager:
|
||||
"""Manager for selecting and using model metadata providers"""
|
||||
@@ -544,7 +646,19 @@ class ModelMetadataProviderManager:
|
||||
"""Get model versions using specified or default provider"""
|
||||
provider = self._get_provider(provider_name)
|
||||
return await provider.get_model_versions(model_id)
|
||||
|
||||
|
||||
async def get_model_versions_bulk(
|
||||
self,
|
||||
model_ids: Sequence[int],
|
||||
provider_name: str = None,
|
||||
) -> Optional[Dict[int, Dict]]:
|
||||
"""Fetch model versions for multiple model ids when supported by provider."""
|
||||
provider = self._get_provider(provider_name)
|
||||
try:
|
||||
return await provider.get_model_versions_bulk(model_ids)
|
||||
except NotImplementedError:
|
||||
return None
|
||||
|
||||
async def get_model_version(self, model_id: int = None, version_id: int = None, provider_name: str = None) -> Optional[Dict]:
|
||||
"""Get specific model version using specified or default provider"""
|
||||
provider = self._get_provider(provider_name)
|
||||
|
||||
@@ -1,12 +1,49 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Dict, Iterable, List, Optional, Sequence, Tuple, Protocol, Callable
|
||||
from typing import Any, Dict, Iterable, List, Mapping, Optional, Sequence, Tuple, Protocol, Callable
|
||||
|
||||
from ..utils.constants import NSFW_LEVELS
|
||||
from ..utils.utils import fuzzy_match as default_fuzzy_match
|
||||
|
||||
|
||||
DEFAULT_CIVITAI_MODEL_TYPE = "LORA"
|
||||
|
||||
|
||||
def _coerce_to_str(value: Any) -> Optional[str]:
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
candidate = str(value).strip()
|
||||
return candidate if candidate else None
|
||||
|
||||
|
||||
def normalize_civitai_model_type(value: Any) -> Optional[str]:
|
||||
"""Return a lowercase string suitable for comparisons."""
|
||||
candidate = _coerce_to_str(value)
|
||||
return candidate.lower() if candidate else None
|
||||
|
||||
|
||||
def resolve_civitai_model_type(entry: Mapping[str, Any]) -> str:
|
||||
"""Extract the model type from CivitAI metadata, defaulting to LORA."""
|
||||
if not isinstance(entry, Mapping):
|
||||
return DEFAULT_CIVITAI_MODEL_TYPE
|
||||
|
||||
civitai = entry.get("civitai")
|
||||
if isinstance(civitai, Mapping):
|
||||
civitai_model = civitai.get("model")
|
||||
if isinstance(civitai_model, Mapping):
|
||||
model_type = _coerce_to_str(civitai_model.get("type"))
|
||||
if model_type:
|
||||
return model_type
|
||||
|
||||
model_type = _coerce_to_str(entry.get("model_type"))
|
||||
if model_type:
|
||||
return model_type
|
||||
|
||||
return DEFAULT_CIVITAI_MODEL_TYPE
|
||||
|
||||
|
||||
class SettingsProvider(Protocol):
|
||||
"""Protocol describing the SettingsManager contract used by query helpers."""
|
||||
|
||||
@@ -28,9 +65,10 @@ class FilterCriteria:
|
||||
|
||||
folder: Optional[str] = None
|
||||
base_models: Optional[Sequence[str]] = None
|
||||
tags: Optional[Sequence[str]] = None
|
||||
tags: Optional[Dict[str, str]] = None
|
||||
favorites_only: bool = False
|
||||
search_options: Optional[Dict[str, Any]] = None
|
||||
model_types: Optional[Sequence[str]] = None
|
||||
|
||||
|
||||
class ModelCacheRepository:
|
||||
@@ -108,12 +146,43 @@ class ModelFilterSet:
|
||||
base_model_set = set(base_models)
|
||||
items = [item for item in items if item.get("base_model") in base_model_set]
|
||||
|
||||
tags = criteria.tags or []
|
||||
if tags:
|
||||
tag_set = set(tags)
|
||||
tag_filters = criteria.tags or {}
|
||||
include_tags = set()
|
||||
exclude_tags = set()
|
||||
if isinstance(tag_filters, dict):
|
||||
for tag, state in tag_filters.items():
|
||||
if not tag:
|
||||
continue
|
||||
if state == "exclude":
|
||||
exclude_tags.add(tag)
|
||||
else:
|
||||
include_tags.add(tag)
|
||||
else:
|
||||
include_tags = {tag for tag in tag_filters if tag}
|
||||
|
||||
if include_tags:
|
||||
items = [
|
||||
item for item in items
|
||||
if any(tag in tag_set for tag in item.get("tags", []))
|
||||
if any(tag in include_tags for tag in (item.get("tags", []) or []))
|
||||
]
|
||||
|
||||
if exclude_tags:
|
||||
items = [
|
||||
item for item in items
|
||||
if not any(tag in exclude_tags for tag in (item.get("tags", []) or []))
|
||||
]
|
||||
|
||||
model_types = criteria.model_types or []
|
||||
normalized_model_types = {
|
||||
model_type for model_type in (
|
||||
normalize_civitai_model_type(value) for value in model_types
|
||||
)
|
||||
if model_type
|
||||
}
|
||||
if normalized_model_types:
|
||||
items = [
|
||||
item for item in items
|
||||
if normalize_civitai_model_type(resolve_civitai_model_type(item)) in normalized_model_types
|
||||
]
|
||||
|
||||
return items
|
||||
@@ -187,6 +256,9 @@ class SearchStrategy:
|
||||
return results
|
||||
|
||||
def _matches(self, candidate: str, search_term: str, search_lower: str, fuzzy: bool) -> bool:
|
||||
if not isinstance(candidate, str):
|
||||
candidate = "" if candidate is None else str(candidate)
|
||||
|
||||
if not candidate:
|
||||
return False
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ from ..utils.models import BaseModelMetadata
|
||||
from ..config import config
|
||||
from ..utils.file_utils import find_preview_file, get_preview_extension
|
||||
from ..utils.metadata_manager import MetadataManager
|
||||
from ..utils.civitai_utils import resolve_license_info
|
||||
from .model_cache import ModelCache
|
||||
from .model_hash_index import ModelHashIndex
|
||||
from ..utils.constants import PREVIEW_EXTENSIONS
|
||||
@@ -18,6 +19,7 @@ from .model_lifecycle_service import delete_model_artifacts
|
||||
from .service_registry import ServiceRegistry
|
||||
from .websocket_manager import ws_manager
|
||||
from .persistent_model_cache import get_persistent_cache
|
||||
from .settings_manager import get_settings_manager
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -81,6 +83,13 @@ class ModelScanner:
|
||||
self._is_initializing = False # Flag to track initialization state
|
||||
self._excluded_models = [] # List to track excluded models
|
||||
self._persistent_cache = get_persistent_cache()
|
||||
self._name_display_mode = self._resolve_name_display_mode()
|
||||
try:
|
||||
loop = asyncio.get_running_loop()
|
||||
except RuntimeError:
|
||||
loop = None
|
||||
self._loop = loop
|
||||
self.loop = loop
|
||||
self._initialized = True
|
||||
|
||||
# Register this service
|
||||
@@ -94,6 +103,7 @@ class ModelScanner:
|
||||
self._tags_count = {}
|
||||
self._excluded_models = []
|
||||
self._is_initializing = False
|
||||
self._name_display_mode = self._resolve_name_display_mode()
|
||||
|
||||
try:
|
||||
loop = asyncio.get_running_loop()
|
||||
@@ -101,8 +111,30 @@ class ModelScanner:
|
||||
loop = None
|
||||
|
||||
if loop and not loop.is_closed():
|
||||
self._loop = loop
|
||||
self.loop = loop
|
||||
loop.create_task(self.initialize_in_background())
|
||||
|
||||
|
||||
def _resolve_name_display_mode(self) -> str:
|
||||
"""Return the configured display mode for name sorting."""
|
||||
|
||||
try:
|
||||
manager = get_settings_manager()
|
||||
except Exception: # pragma: no cover - fallback to defaults
|
||||
return "model_name"
|
||||
|
||||
value = manager.get("model_name_display", "model_name")
|
||||
return ModelCache._normalize_display_mode(value)
|
||||
|
||||
async def on_model_name_display_changed(self, display_mode: str) -> None:
|
||||
"""Handle updates to the model name display preference."""
|
||||
|
||||
normalized = ModelCache._normalize_display_mode(display_mode)
|
||||
self._name_display_mode = normalized
|
||||
|
||||
if self._cache is not None:
|
||||
await self._cache.update_name_display_mode(normalized)
|
||||
|
||||
async def _register_service(self):
|
||||
"""Register this instance with the ServiceRegistry"""
|
||||
service_name = f"{self.model_type}_scanner"
|
||||
@@ -129,6 +161,12 @@ class ModelScanner:
|
||||
if trained_words:
|
||||
slim['trainedWords'] = list(trained_words) if isinstance(trained_words, list) else trained_words
|
||||
|
||||
civitai_model = civitai.get('model')
|
||||
if isinstance(civitai_model, Mapping):
|
||||
model_type_value = civitai_model.get('type')
|
||||
if model_type_value not in (None, '', []):
|
||||
slim['model'] = {'type': model_type_value}
|
||||
|
||||
return slim or None
|
||||
|
||||
def _build_cache_entry(
|
||||
@@ -144,7 +182,17 @@ class ModelScanner:
|
||||
def get_value(key: str, default: Any = None) -> Any:
|
||||
if is_mapping:
|
||||
return source.get(key, default)
|
||||
return getattr(source, key, default)
|
||||
|
||||
sentinel = object()
|
||||
value = getattr(source, key, sentinel)
|
||||
if value is not sentinel:
|
||||
return value
|
||||
|
||||
unknown = getattr(source, "_unknown_fields", None)
|
||||
if isinstance(unknown, dict) and key in unknown:
|
||||
return unknown[key]
|
||||
|
||||
return default
|
||||
|
||||
file_path = file_path_override or get_value('file_path', '') or ''
|
||||
normalized_path = file_path.replace('\\', '/')
|
||||
@@ -166,7 +214,8 @@ class ModelScanner:
|
||||
else:
|
||||
preview_url = ''
|
||||
|
||||
civitai_slim = self._slim_civitai_payload(get_value('civitai'))
|
||||
civitai_full = get_value('civitai')
|
||||
civitai_slim = self._slim_civitai_payload(civitai_full)
|
||||
usage_tips = get_value('usage_tips', '') or ''
|
||||
if not isinstance(usage_tips, str):
|
||||
usage_tips = str(usage_tips)
|
||||
@@ -198,12 +247,76 @@ class ModelScanner:
|
||||
'civitai_deleted': bool(get_value('civitai_deleted', False)),
|
||||
}
|
||||
|
||||
license_source: Dict[str, Any] = {}
|
||||
if isinstance(civitai_full, Mapping):
|
||||
civitai_model = civitai_full.get('model')
|
||||
if isinstance(civitai_model, Mapping):
|
||||
for key in (
|
||||
'allowNoCredit',
|
||||
'allowCommercialUse',
|
||||
'allowDerivatives',
|
||||
'allowDifferentLicense',
|
||||
):
|
||||
if key in civitai_model:
|
||||
license_source[key] = civitai_model.get(key)
|
||||
|
||||
for key in (
|
||||
'allowNoCredit',
|
||||
'allowCommercialUse',
|
||||
'allowDerivatives',
|
||||
'allowDifferentLicense',
|
||||
):
|
||||
if key not in license_source:
|
||||
value = get_value(key)
|
||||
if value is not None:
|
||||
license_source[key] = value
|
||||
|
||||
_, license_flags = resolve_license_info(license_source or {})
|
||||
entry['license_flags'] = license_flags
|
||||
|
||||
model_type = get_value('model_type', None)
|
||||
if model_type:
|
||||
entry['model_type'] = model_type
|
||||
|
||||
return entry
|
||||
|
||||
def _ensure_license_flags(self, entry: Dict[str, Any]) -> None:
|
||||
"""Ensure cached entries include an integer license flag bitset."""
|
||||
|
||||
if not isinstance(entry, dict):
|
||||
return
|
||||
|
||||
license_value = entry.get('license_flags')
|
||||
if license_value is not None:
|
||||
try:
|
||||
entry['license_flags'] = int(license_value)
|
||||
except (TypeError, ValueError):
|
||||
_, fallback_flags = resolve_license_info({})
|
||||
entry['license_flags'] = fallback_flags
|
||||
return
|
||||
|
||||
license_source = {
|
||||
'allowNoCredit': entry.get('allowNoCredit'),
|
||||
'allowCommercialUse': entry.get('allowCommercialUse'),
|
||||
'allowDerivatives': entry.get('allowDerivatives'),
|
||||
'allowDifferentLicense': entry.get('allowDifferentLicense'),
|
||||
}
|
||||
civitai_full = entry.get('civitai')
|
||||
if isinstance(civitai_full, Mapping):
|
||||
civitai_model = civitai_full.get('model')
|
||||
if isinstance(civitai_model, Mapping):
|
||||
for key in (
|
||||
'allowNoCredit',
|
||||
'allowCommercialUse',
|
||||
'allowDerivatives',
|
||||
'allowDifferentLicense',
|
||||
):
|
||||
if key in civitai_model:
|
||||
license_source[key] = civitai_model.get(key)
|
||||
|
||||
_, license_flags = resolve_license_info(license_source)
|
||||
entry['license_flags'] = license_flags
|
||||
|
||||
async def initialize_in_background(self) -> None:
|
||||
"""Initialize cache in background using thread pool"""
|
||||
try:
|
||||
@@ -211,7 +324,8 @@ class ModelScanner:
|
||||
if self._cache is None:
|
||||
self._cache = ModelCache(
|
||||
raw_data=[],
|
||||
folders=[]
|
||||
folders=[],
|
||||
name_display_mode=self._name_display_mode,
|
||||
)
|
||||
|
||||
# Set initializing flag to true
|
||||
@@ -344,12 +458,16 @@ class ModelScanner:
|
||||
hash_index.add_entry(sha_value.lower(), path)
|
||||
|
||||
tags_count: Dict[str, int] = {}
|
||||
adjusted_raw_data: List[Dict[str, Any]] = []
|
||||
for item in persisted.raw_data:
|
||||
for tag in item.get('tags') or []:
|
||||
adjusted_item = self.adjust_cached_entry(dict(item))
|
||||
adjusted_raw_data.append(adjusted_item)
|
||||
|
||||
for tag in adjusted_item.get('tags') or []:
|
||||
tags_count[tag] = tags_count.get(tag, 0) + 1
|
||||
|
||||
scan_result = CacheBuildResult(
|
||||
raw_data=list(persisted.raw_data),
|
||||
raw_data=adjusted_raw_data,
|
||||
hash_index=hash_index,
|
||||
tags_count=tags_count,
|
||||
excluded_models=list(persisted.excluded_models)
|
||||
@@ -516,7 +634,8 @@ class ModelScanner:
|
||||
if self._cache is None and not force_refresh:
|
||||
return ModelCache(
|
||||
raw_data=[],
|
||||
folders=[]
|
||||
folders=[],
|
||||
name_display_mode=self._name_display_mode,
|
||||
)
|
||||
|
||||
# If force refresh is requested, initialize the cache directly
|
||||
@@ -530,6 +649,7 @@ class ModelScanner:
|
||||
|
||||
async def _initialize_cache(self) -> None:
|
||||
"""Initialize or refresh the cache"""
|
||||
print("init start", flush=True)
|
||||
self._is_initializing = True # Set flag
|
||||
try:
|
||||
start_time = time.time()
|
||||
@@ -538,6 +658,7 @@ class ModelScanner:
|
||||
scan_result = await self._gather_model_data()
|
||||
await self._apply_scan_result(scan_result)
|
||||
await self._save_persistent_cache(scan_result)
|
||||
print("init end", flush=True)
|
||||
|
||||
logger.info(
|
||||
f"{self.model_type.capitalize()} Scanner: Cache initialization completed in {time.time() - start_time:.2f} seconds, "
|
||||
@@ -549,7 +670,8 @@ class ModelScanner:
|
||||
if self._cache is None:
|
||||
self._cache = ModelCache(
|
||||
raw_data=[],
|
||||
folders=[]
|
||||
folders=[],
|
||||
name_display_mode=self._name_display_mode,
|
||||
)
|
||||
finally:
|
||||
self._is_initializing = False # Unset flag
|
||||
@@ -640,6 +762,10 @@ class ModelScanner:
|
||||
if root_path:
|
||||
model_data = await self._process_model_file(path, root_path)
|
||||
if model_data:
|
||||
model_data = self.adjust_cached_entry(dict(model_data))
|
||||
if not model_data:
|
||||
continue
|
||||
self._ensure_license_flags(model_data)
|
||||
# Add to cache
|
||||
self._cache.raw_data.append(model_data)
|
||||
self._cache.add_to_version_index(model_data)
|
||||
@@ -732,6 +858,41 @@ class ModelScanner:
|
||||
"""Hook for subclasses: adjust metadata during scanning"""
|
||||
return metadata
|
||||
|
||||
def adjust_cached_entry(self, entry: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Hook for subclasses: adjust entries loaded from the persisted cache."""
|
||||
return entry
|
||||
|
||||
@staticmethod
|
||||
def _normalize_path_value(path: Optional[str]) -> str:
|
||||
if not path:
|
||||
return ''
|
||||
|
||||
normalized = os.path.normpath(path)
|
||||
if normalized == '.':
|
||||
return ''
|
||||
|
||||
return normalized.replace('\\', '/')
|
||||
|
||||
def _find_root_for_file(self, file_path: Optional[str]) -> Optional[str]:
|
||||
"""Return the configured root directory that contains ``file_path``."""
|
||||
|
||||
normalized_path = self._normalize_path_value(file_path)
|
||||
if not normalized_path:
|
||||
return None
|
||||
|
||||
for root in self.get_model_roots() or []:
|
||||
normalized_root = self._normalize_path_value(root)
|
||||
if not normalized_root:
|
||||
continue
|
||||
|
||||
if (
|
||||
normalized_path == normalized_root
|
||||
or normalized_path.startswith(f"{normalized_root}/")
|
||||
):
|
||||
return root
|
||||
|
||||
return None
|
||||
|
||||
async def _process_model_file(
|
||||
self,
|
||||
file_path: str,
|
||||
@@ -837,7 +998,8 @@ class ModelScanner:
|
||||
if self._cache is None:
|
||||
self._cache = ModelCache(
|
||||
raw_data=list(scan_result.raw_data),
|
||||
folders=[]
|
||||
folders=[],
|
||||
name_display_mode=self._name_display_mode,
|
||||
)
|
||||
else:
|
||||
self._cache.raw_data = list(scan_result.raw_data)
|
||||
@@ -898,6 +1060,7 @@ class ModelScanner:
|
||||
processed_files += 1
|
||||
|
||||
if result:
|
||||
self._ensure_license_flags(result)
|
||||
raw_data.append(result)
|
||||
|
||||
sha_value = result.get('sha256')
|
||||
@@ -1281,11 +1444,13 @@ class ModelScanner:
|
||||
for file_path in file_paths:
|
||||
try:
|
||||
target_dir = os.path.dirname(file_path)
|
||||
file_name = os.path.splitext(os.path.basename(file_path))[0]
|
||||
|
||||
base_name = os.path.basename(file_path)
|
||||
file_name, main_extension = os.path.splitext(base_name)
|
||||
|
||||
deleted_files = await delete_model_artifacts(
|
||||
target_dir,
|
||||
file_name
|
||||
file_name,
|
||||
main_extension=main_extension,
|
||||
)
|
||||
|
||||
if deleted_files:
|
||||
@@ -1443,21 +1608,10 @@ class ModelScanner:
|
||||
"""
|
||||
try:
|
||||
cache = await self.get_cached_data()
|
||||
if not cache or not cache.raw_data:
|
||||
if not cache:
|
||||
return []
|
||||
|
||||
versions = []
|
||||
for item in cache.raw_data:
|
||||
if (item.get('civitai') and
|
||||
item['civitai'].get('modelId') == model_id and
|
||||
item['civitai'].get('id')):
|
||||
versions.append({
|
||||
'versionId': item['civitai'].get('id'),
|
||||
'name': item['civitai'].get('name'),
|
||||
'fileName': item.get('file_name', '')
|
||||
})
|
||||
|
||||
return versions
|
||||
|
||||
return cache.get_versions_by_model_id(model_id)
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting model versions: {e}")
|
||||
return []
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -5,9 +5,9 @@ import re
|
||||
import sqlite3
|
||||
import threading
|
||||
from dataclasses import dataclass
|
||||
from typing import Dict, List, Optional, Sequence, Tuple
|
||||
from typing import Dict, List, Mapping, Optional, Sequence, Tuple
|
||||
|
||||
from ..utils.settings_paths import get_settings_dir
|
||||
from ..utils.settings_paths import get_project_root, get_settings_dir
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -21,6 +21,9 @@ class PersistedCacheData:
|
||||
excluded_models: List[str]
|
||||
|
||||
|
||||
DEFAULT_LICENSE_FLAGS = 127 # 127 (0b1111111) encodes default CivitAI permissions with all commercial modes enabled.
|
||||
|
||||
|
||||
class PersistentModelCache:
|
||||
"""Persist core model metadata and hash index data in SQLite."""
|
||||
|
||||
@@ -44,9 +47,11 @@ class PersistentModelCache:
|
||||
"metadata_source",
|
||||
"civitai_id",
|
||||
"civitai_model_id",
|
||||
"civitai_model_type",
|
||||
"civitai_name",
|
||||
"civitai_creator_username",
|
||||
"trained_words",
|
||||
"license_flags",
|
||||
"civitai_deleted",
|
||||
"exclude",
|
||||
"db_checked",
|
||||
@@ -134,7 +139,8 @@ class PersistentModelCache:
|
||||
creator_username = row["civitai_creator_username"]
|
||||
civitai: Optional[Dict] = None
|
||||
civitai_has_data = any(
|
||||
row[col] is not None for col in ("civitai_id", "civitai_model_id", "civitai_name")
|
||||
row[col] is not None
|
||||
for col in ("civitai_id", "civitai_model_id", "civitai_model_type", "civitai_name")
|
||||
) or trained_words or creator_username
|
||||
if civitai_has_data:
|
||||
civitai = {}
|
||||
@@ -148,6 +154,13 @@ class PersistentModelCache:
|
||||
civitai["trainedWords"] = trained_words
|
||||
if creator_username:
|
||||
civitai.setdefault("creator", {})["username"] = creator_username
|
||||
model_type_value = row["civitai_model_type"]
|
||||
if model_type_value:
|
||||
civitai.setdefault("model", {})["type"] = model_type_value
|
||||
|
||||
license_value = row["license_flags"]
|
||||
if license_value is None:
|
||||
license_value = DEFAULT_LICENSE_FLAGS
|
||||
|
||||
item = {
|
||||
"file_path": file_path,
|
||||
@@ -171,6 +184,7 @@ class PersistentModelCache:
|
||||
"tags": tags.get(file_path, []),
|
||||
"civitai": civitai,
|
||||
"civitai_deleted": bool(row["civitai_deleted"]),
|
||||
"license_flags": int(license_value),
|
||||
}
|
||||
raw_data.append(item)
|
||||
|
||||
@@ -397,7 +411,7 @@ class PersistentModelCache:
|
||||
settings_dir = get_settings_dir(create=True)
|
||||
except Exception as exc: # pragma: no cover - defensive guard
|
||||
logger.warning("Falling back to project directory for cache: %s", exc)
|
||||
settings_dir = os.path.dirname(os.path.dirname(self._db_path)) if hasattr(self, "_db_path") else os.getcwd()
|
||||
settings_dir = get_project_root()
|
||||
safe_name = re.sub(r"[^A-Za-z0-9_.-]", "_", library_name or "default")
|
||||
if safe_name.lower() in ("default", ""):
|
||||
legacy_path = os.path.join(settings_dir, self._DEFAULT_FILENAME)
|
||||
@@ -434,6 +448,7 @@ class PersistentModelCache:
|
||||
metadata_source TEXT,
|
||||
civitai_id INTEGER,
|
||||
civitai_model_id INTEGER,
|
||||
civitai_model_type TEXT,
|
||||
civitai_name TEXT,
|
||||
civitai_creator_username TEXT,
|
||||
trained_words TEXT,
|
||||
@@ -483,7 +498,10 @@ class PersistentModelCache:
|
||||
required_columns = {
|
||||
"metadata_source": "TEXT",
|
||||
"civitai_creator_username": "TEXT",
|
||||
"civitai_model_type": "TEXT",
|
||||
"civitai_deleted": "INTEGER DEFAULT 0",
|
||||
# Persisting without explicit flags should assume CivitAI's documented defaults (0b111001 == 57).
|
||||
"license_flags": f"INTEGER DEFAULT {DEFAULT_LICENSE_FLAGS}",
|
||||
}
|
||||
|
||||
for column, definition in required_columns.items():
|
||||
@@ -517,6 +535,17 @@ class PersistentModelCache:
|
||||
creator_data = civitai.get("creator") if isinstance(civitai, dict) else None
|
||||
if isinstance(creator_data, dict):
|
||||
creator_username = creator_data.get("username") or None
|
||||
model_type_value = None
|
||||
if isinstance(civitai, Mapping):
|
||||
civitai_model_info = civitai.get("model")
|
||||
if isinstance(civitai_model_info, Mapping):
|
||||
candidate_type = civitai_model_info.get("type")
|
||||
if candidate_type not in (None, "", []):
|
||||
model_type_value = candidate_type
|
||||
|
||||
license_flags = item.get("license_flags")
|
||||
if license_flags is None:
|
||||
license_flags = DEFAULT_LICENSE_FLAGS
|
||||
|
||||
return (
|
||||
model_type,
|
||||
@@ -537,9 +566,11 @@ class PersistentModelCache:
|
||||
metadata_source,
|
||||
civitai.get("id"),
|
||||
civitai.get("modelId"),
|
||||
model_type_value,
|
||||
civitai.get("name"),
|
||||
creator_username,
|
||||
trained_words_json,
|
||||
int(license_flags),
|
||||
1 if item.get("civitai_deleted") else 0,
|
||||
1 if item.get("exclude") else 0,
|
||||
1 if item.get("db_checked") else 0,
|
||||
|
||||
@@ -9,6 +9,8 @@ from urllib.parse import urlparse
|
||||
|
||||
from ..utils.constants import CARD_PREVIEW_WIDTH, PREVIEW_EXTENSIONS
|
||||
from ..utils.civitai_utils import rewrite_preview_url
|
||||
from ..utils.preview_selection import select_preview_media
|
||||
from .settings_manager import get_settings_manager
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -43,7 +45,18 @@ class PreviewAssetService:
|
||||
if not images:
|
||||
return
|
||||
|
||||
first_preview = images[0]
|
||||
settings_manager = get_settings_manager()
|
||||
blur_mature_content = bool(
|
||||
settings_manager.get("blur_mature_content", True)
|
||||
)
|
||||
first_preview, nsfw_level = select_preview_media(
|
||||
images,
|
||||
blur_mature_content=blur_mature_content,
|
||||
)
|
||||
|
||||
if not first_preview:
|
||||
return
|
||||
|
||||
base_name = os.path.splitext(os.path.splitext(os.path.basename(metadata_path))[0])[0]
|
||||
preview_dir = os.path.dirname(metadata_path)
|
||||
is_video = first_preview.get("type") == "video"
|
||||
@@ -81,7 +94,7 @@ class PreviewAssetService:
|
||||
success, _ = await downloader.download_file(candidate, preview_path, use_auth=False)
|
||||
if success:
|
||||
local_metadata["preview_url"] = preview_path.replace(os.sep, "/")
|
||||
local_metadata["preview_nsfw_level"] = first_preview.get("nsfwLevel", 0)
|
||||
local_metadata["preview_nsfw_level"] = nsfw_level
|
||||
return
|
||||
else:
|
||||
rewritten_url, rewritten = rewrite_preview_url(preview_url, media_type="image")
|
||||
@@ -93,7 +106,7 @@ class PreviewAssetService:
|
||||
)
|
||||
if success:
|
||||
local_metadata["preview_url"] = preview_path.replace(os.sep, "/")
|
||||
local_metadata["preview_nsfw_level"] = first_preview.get("nsfwLevel", 0)
|
||||
local_metadata["preview_nsfw_level"] = nsfw_level
|
||||
return
|
||||
|
||||
extension = ".webp"
|
||||
@@ -124,7 +137,7 @@ class PreviewAssetService:
|
||||
return
|
||||
|
||||
local_metadata["preview_url"] = preview_path.replace(os.sep, "/")
|
||||
local_metadata["preview_nsfw_level"] = first_preview.get("nsfwLevel", 0)
|
||||
local_metadata["preview_nsfw_level"] = nsfw_level
|
||||
|
||||
async def replace_preview(
|
||||
self,
|
||||
|
||||
@@ -9,6 +9,7 @@ from .recipe_cache import RecipeCache
|
||||
from .service_registry import ServiceRegistry
|
||||
from .lora_scanner import LoraScanner
|
||||
from .metadata_service import get_default_metadata_provider
|
||||
from .checkpoint_scanner import CheckpointScanner
|
||||
from .recipes.errors import RecipeNotFoundError
|
||||
from ..utils.utils import calculate_recipe_fingerprint, fuzzy_match
|
||||
from natsort import natsorted
|
||||
@@ -23,24 +24,39 @@ class RecipeScanner:
|
||||
_lock = asyncio.Lock()
|
||||
|
||||
@classmethod
|
||||
async def get_instance(cls, lora_scanner: Optional[LoraScanner] = None):
|
||||
async def get_instance(
|
||||
cls,
|
||||
lora_scanner: Optional[LoraScanner] = None,
|
||||
checkpoint_scanner: Optional[CheckpointScanner] = None,
|
||||
):
|
||||
"""Get singleton instance of RecipeScanner"""
|
||||
async with cls._lock:
|
||||
if cls._instance is None:
|
||||
if not lora_scanner:
|
||||
# Get lora scanner from service registry if not provided
|
||||
lora_scanner = await ServiceRegistry.get_lora_scanner()
|
||||
cls._instance = cls(lora_scanner)
|
||||
if not checkpoint_scanner:
|
||||
checkpoint_scanner = await ServiceRegistry.get_checkpoint_scanner()
|
||||
cls._instance = cls(lora_scanner, checkpoint_scanner)
|
||||
return cls._instance
|
||||
|
||||
def __new__(cls, lora_scanner: Optional[LoraScanner] = None):
|
||||
def __new__(
|
||||
cls,
|
||||
lora_scanner: Optional[LoraScanner] = None,
|
||||
checkpoint_scanner: Optional[CheckpointScanner] = None,
|
||||
):
|
||||
if cls._instance is None:
|
||||
cls._instance = super().__new__(cls)
|
||||
cls._instance._lora_scanner = lora_scanner
|
||||
cls._instance._checkpoint_scanner = checkpoint_scanner
|
||||
cls._instance._civitai_client = None # Will be lazily initialized
|
||||
return cls._instance
|
||||
|
||||
def __init__(self, lora_scanner: Optional[LoraScanner] = None):
|
||||
def __init__(
|
||||
self,
|
||||
lora_scanner: Optional[LoraScanner] = None,
|
||||
checkpoint_scanner: Optional[CheckpointScanner] = None,
|
||||
):
|
||||
# Ensure initialization only happens once
|
||||
if not hasattr(self, '_initialized'):
|
||||
self._cache: Optional[RecipeCache] = None
|
||||
@@ -51,6 +67,8 @@ class RecipeScanner:
|
||||
self._resort_tasks: Set[asyncio.Task] = set()
|
||||
if lora_scanner:
|
||||
self._lora_scanner = lora_scanner
|
||||
if checkpoint_scanner:
|
||||
self._checkpoint_scanner = checkpoint_scanner
|
||||
self._initialized = True
|
||||
|
||||
def on_library_changed(self) -> None:
|
||||
@@ -384,16 +402,32 @@ class RecipeScanner:
|
||||
|
||||
# Ensure the image file exists
|
||||
image_path = recipe_data.get('file_path')
|
||||
if not os.path.exists(image_path):
|
||||
normalized_image_path = os.path.normpath(image_path) if image_path else image_path
|
||||
path_updated = False
|
||||
if image_path and normalized_image_path != image_path:
|
||||
recipe_data['file_path'] = normalized_image_path
|
||||
image_path = normalized_image_path
|
||||
path_updated = True
|
||||
|
||||
if image_path and not os.path.exists(image_path):
|
||||
logger.warning(f"Recipe image not found: {image_path}")
|
||||
# Try to find the image in the same directory as the recipe
|
||||
recipe_dir = os.path.dirname(recipe_path)
|
||||
image_filename = os.path.basename(image_path)
|
||||
alternative_path = os.path.join(recipe_dir, image_filename)
|
||||
if os.path.exists(alternative_path):
|
||||
recipe_data['file_path'] = alternative_path
|
||||
normalized_alternative = os.path.normpath(alternative_path)
|
||||
recipe_data['file_path'] = normalized_alternative
|
||||
image_path = normalized_alternative
|
||||
path_updated = True
|
||||
logger.info(
|
||||
"Updated recipe image path to %s after relocating asset", normalized_alternative
|
||||
)
|
||||
else:
|
||||
logger.warning(f"Could not find alternative image path for {image_path}")
|
||||
|
||||
if path_updated:
|
||||
self._write_recipe_file(recipe_path, recipe_data)
|
||||
|
||||
# Ensure loras array exists
|
||||
if 'loras' not in recipe_data:
|
||||
@@ -406,6 +440,14 @@ class RecipeScanner:
|
||||
# Update lora information with local paths and availability
|
||||
await self._update_lora_information(recipe_data)
|
||||
|
||||
if recipe_data.get('checkpoint'):
|
||||
checkpoint_entry = self._normalize_checkpoint_entry(recipe_data['checkpoint'])
|
||||
if checkpoint_entry:
|
||||
recipe_data['checkpoint'] = self._enrich_checkpoint_entry(checkpoint_entry)
|
||||
else:
|
||||
logger.warning("Dropping invalid checkpoint entry in %s", recipe_path)
|
||||
recipe_data.pop('checkpoint', None)
|
||||
|
||||
# Calculate and update fingerprint if missing
|
||||
if 'loras' in recipe_data and 'fingerprint' not in recipe_data:
|
||||
fingerprint = calculate_recipe_fingerprint(recipe_data['loras'])
|
||||
@@ -413,18 +455,24 @@ class RecipeScanner:
|
||||
|
||||
# Write updated recipe data back to file
|
||||
try:
|
||||
with open(recipe_path, 'w', encoding='utf-8') as f:
|
||||
json.dump(recipe_data, f, indent=4, ensure_ascii=False)
|
||||
self._write_recipe_file(recipe_path, recipe_data)
|
||||
logger.info(f"Added fingerprint to recipe: {recipe_path}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error writing updated recipe with fingerprint: {e}")
|
||||
|
||||
|
||||
return recipe_data
|
||||
except Exception as e:
|
||||
logger.error(f"Error loading recipe file {recipe_path}: {e}")
|
||||
import traceback
|
||||
traceback.print_exc(file=sys.stderr)
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _write_recipe_file(recipe_path: str, recipe_data: Dict[str, Any]) -> None:
|
||||
"""Persist ``recipe_data`` back to ``recipe_path`` with standard formatting."""
|
||||
|
||||
with open(recipe_path, 'w', encoding='utf-8') as file_obj:
|
||||
json.dump(recipe_data, file_obj, indent=4, ensure_ascii=False)
|
||||
|
||||
async def _update_lora_information(self, recipe_data: Dict) -> bool:
|
||||
"""Update LoRA information with hash and file_name
|
||||
@@ -542,6 +590,48 @@ class RecipeScanner:
|
||||
logger.error(f"Error getting hash from Civitai: {e}")
|
||||
return None, False
|
||||
|
||||
def _get_lora_from_version_index(self, model_version_id: Any) -> Optional[Dict[str, Any]]:
|
||||
"""Quickly fetch a cached LoRA entry by modelVersionId using the version index."""
|
||||
|
||||
if not self._lora_scanner:
|
||||
return None
|
||||
|
||||
cache = getattr(self._lora_scanner, "_cache", None)
|
||||
if cache is None:
|
||||
return None
|
||||
|
||||
version_index = getattr(cache, "version_index", None)
|
||||
if not version_index:
|
||||
return None
|
||||
|
||||
try:
|
||||
normalized_id = int(model_version_id)
|
||||
except (TypeError, ValueError):
|
||||
return None
|
||||
|
||||
return version_index.get(normalized_id)
|
||||
|
||||
def _get_checkpoint_from_version_index(self, model_version_id: Any) -> Optional[Dict[str, Any]]:
|
||||
"""Fetch a cached checkpoint entry by version id."""
|
||||
|
||||
if not self._checkpoint_scanner:
|
||||
return None
|
||||
|
||||
cache = getattr(self._checkpoint_scanner, "_cache", None)
|
||||
if cache is None:
|
||||
return None
|
||||
|
||||
version_index = getattr(cache, "version_index", None)
|
||||
if not version_index:
|
||||
return None
|
||||
|
||||
try:
|
||||
normalized_id = int(model_version_id)
|
||||
except (TypeError, ValueError):
|
||||
return None
|
||||
|
||||
return version_index.get(normalized_id)
|
||||
|
||||
async def _determine_base_model(self, loras: List[Dict]) -> Optional[str]:
|
||||
"""Determine the most common base model among LoRAs"""
|
||||
base_models = {}
|
||||
@@ -580,6 +670,80 @@ class RecipeScanner:
|
||||
logger.error(f"Error getting base model for lora: {e}")
|
||||
return None
|
||||
|
||||
def _normalize_checkpoint_entry(self, checkpoint_raw: Any) -> Optional[Dict[str, Any]]:
|
||||
"""Coerce legacy or malformed checkpoint entries into a dict."""
|
||||
|
||||
if isinstance(checkpoint_raw, dict):
|
||||
return dict(checkpoint_raw)
|
||||
|
||||
if isinstance(checkpoint_raw, (list, tuple)) and len(checkpoint_raw) == 1:
|
||||
return self._normalize_checkpoint_entry(checkpoint_raw[0])
|
||||
|
||||
if isinstance(checkpoint_raw, str):
|
||||
name = checkpoint_raw.strip()
|
||||
if not name:
|
||||
return None
|
||||
|
||||
file_name = os.path.splitext(os.path.basename(name))[0]
|
||||
return {
|
||||
"name": name,
|
||||
"file_name": file_name,
|
||||
}
|
||||
|
||||
logger.warning("Unexpected checkpoint payload type %s", type(checkpoint_raw).__name__)
|
||||
return None
|
||||
|
||||
def _enrich_checkpoint_entry(self, checkpoint: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Populate convenience fields for a checkpoint entry."""
|
||||
|
||||
if not checkpoint or not isinstance(checkpoint, dict) or not self._checkpoint_scanner:
|
||||
return checkpoint
|
||||
|
||||
hash_value = (checkpoint.get('hash') or '').lower()
|
||||
version_entry = None
|
||||
model_version_id = checkpoint.get('id') or checkpoint.get('modelVersionId')
|
||||
if not hash_value and model_version_id is not None:
|
||||
version_entry = self._get_checkpoint_from_version_index(model_version_id)
|
||||
|
||||
try:
|
||||
preview_url = checkpoint.get('preview_url') or checkpoint.get('thumbnailUrl')
|
||||
if preview_url:
|
||||
checkpoint['preview_url'] = self._normalize_preview_url(preview_url)
|
||||
|
||||
if hash_value:
|
||||
checkpoint['inLibrary'] = self._checkpoint_scanner.has_hash(hash_value)
|
||||
checkpoint['preview_url'] = self._normalize_preview_url(
|
||||
checkpoint.get('preview_url')
|
||||
or self._checkpoint_scanner.get_preview_url_by_hash(hash_value)
|
||||
)
|
||||
checkpoint['localPath'] = self._checkpoint_scanner.get_path_by_hash(hash_value)
|
||||
elif version_entry:
|
||||
checkpoint['inLibrary'] = True
|
||||
cached_path = version_entry.get('file_path') or version_entry.get('path')
|
||||
if cached_path:
|
||||
checkpoint.setdefault('localPath', cached_path)
|
||||
if not checkpoint.get('file_name'):
|
||||
checkpoint['file_name'] = os.path.splitext(os.path.basename(cached_path))[0]
|
||||
|
||||
if version_entry.get('sha256') and not checkpoint.get('hash'):
|
||||
checkpoint['hash'] = version_entry.get('sha256')
|
||||
|
||||
preview_url = self._normalize_preview_url(version_entry.get('preview_url'))
|
||||
if preview_url:
|
||||
checkpoint.setdefault('preview_url', preview_url)
|
||||
|
||||
if version_entry.get('model_type'):
|
||||
checkpoint.setdefault('model_type', version_entry.get('model_type'))
|
||||
else:
|
||||
checkpoint.setdefault('inLibrary', False)
|
||||
|
||||
if checkpoint.get('preview_url'):
|
||||
checkpoint['preview_url'] = self._normalize_preview_url(checkpoint['preview_url'])
|
||||
except Exception as exc: # pragma: no cover - defensive logging
|
||||
logger.debug("Error enriching checkpoint entry %s: %s", hash_value or model_version_id, exc)
|
||||
|
||||
return checkpoint
|
||||
|
||||
def _enrich_lora_entry(self, lora: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Populate convenience fields for a LoRA entry."""
|
||||
|
||||
@@ -587,18 +751,56 @@ class RecipeScanner:
|
||||
return lora
|
||||
|
||||
hash_value = (lora.get('hash') or '').lower()
|
||||
if not hash_value:
|
||||
return lora
|
||||
version_entry = None
|
||||
if not hash_value and lora.get('modelVersionId') is not None:
|
||||
version_entry = self._get_lora_from_version_index(lora.get('modelVersionId'))
|
||||
|
||||
try:
|
||||
lora['inLibrary'] = self._lora_scanner.has_hash(hash_value)
|
||||
lora['preview_url'] = self._lora_scanner.get_preview_url_by_hash(hash_value)
|
||||
lora['localPath'] = self._lora_scanner.get_path_by_hash(hash_value)
|
||||
if hash_value:
|
||||
lora['inLibrary'] = self._lora_scanner.has_hash(hash_value)
|
||||
lora['preview_url'] = self._normalize_preview_url(
|
||||
self._lora_scanner.get_preview_url_by_hash(hash_value)
|
||||
)
|
||||
lora['localPath'] = self._lora_scanner.get_path_by_hash(hash_value)
|
||||
elif version_entry:
|
||||
lora['inLibrary'] = True
|
||||
cached_path = version_entry.get('file_path') or version_entry.get('path')
|
||||
if cached_path:
|
||||
lora.setdefault('localPath', cached_path)
|
||||
if not lora.get('file_name'):
|
||||
lora['file_name'] = os.path.splitext(os.path.basename(cached_path))[0]
|
||||
|
||||
if version_entry.get('sha256') and not lora.get('hash'):
|
||||
lora['hash'] = version_entry.get('sha256')
|
||||
|
||||
preview_url = self._normalize_preview_url(version_entry.get('preview_url'))
|
||||
if preview_url:
|
||||
lora.setdefault('preview_url', preview_url)
|
||||
else:
|
||||
lora.setdefault('inLibrary', False)
|
||||
|
||||
if lora.get('preview_url'):
|
||||
lora['preview_url'] = self._normalize_preview_url(lora['preview_url'])
|
||||
except Exception as exc: # pragma: no cover - defensive logging
|
||||
logger.debug("Error enriching lora entry %s: %s", hash_value, exc)
|
||||
|
||||
return lora
|
||||
|
||||
def _normalize_preview_url(self, preview_url: Optional[str]) -> Optional[str]:
|
||||
"""Return a preview URL that is reachable from the browser."""
|
||||
|
||||
if not preview_url or not isinstance(preview_url, str):
|
||||
return preview_url
|
||||
|
||||
normalized = preview_url.strip()
|
||||
if normalized.startswith("/api/lm/previews?path="):
|
||||
return normalized
|
||||
|
||||
if os.path.isabs(normalized):
|
||||
return config.get_preview_static_url(normalized)
|
||||
|
||||
return normalized
|
||||
|
||||
async def get_local_lora(self, name: str) -> Optional[Dict[str, Any]]:
|
||||
"""Lookup a local LoRA model by name."""
|
||||
|
||||
@@ -625,6 +827,17 @@ class RecipeScanner:
|
||||
# Get base dataset
|
||||
filtered_data = cache.sorted_by_date if sort_by == 'date' else cache.sorted_by_name
|
||||
|
||||
# Apply SFW filtering if enabled
|
||||
from .settings_manager import get_settings_manager
|
||||
settings = get_settings_manager()
|
||||
if settings.get("show_only_sfw", False):
|
||||
from ..utils.constants import NSFW_LEVELS
|
||||
threshold = NSFW_LEVELS.get("R", 4) # Default to R level (4) if not found
|
||||
filtered_data = [
|
||||
item for item in filtered_data
|
||||
if not item.get("preview_nsfw_level") or item.get("preview_nsfw_level") < threshold
|
||||
]
|
||||
|
||||
# Special case: Filter by LoRA hash (takes precedence if bypass_filters is True)
|
||||
if lora_hash:
|
||||
# Filter recipes that contain this LoRA hash
|
||||
@@ -696,10 +909,32 @@ class RecipeScanner:
|
||||
|
||||
# Filter by tags
|
||||
if 'tags' in filters and filters['tags']:
|
||||
filtered_data = [
|
||||
item for item in filtered_data
|
||||
if any(tag in item.get('tags', []) for tag in filters['tags'])
|
||||
]
|
||||
tag_spec = filters['tags']
|
||||
include_tags = set()
|
||||
exclude_tags = set()
|
||||
|
||||
if isinstance(tag_spec, dict):
|
||||
for tag, state in tag_spec.items():
|
||||
if not tag:
|
||||
continue
|
||||
if state == 'exclude':
|
||||
exclude_tags.add(tag)
|
||||
else:
|
||||
include_tags.add(tag)
|
||||
else:
|
||||
include_tags = {tag for tag in tag_spec if tag}
|
||||
|
||||
if include_tags:
|
||||
filtered_data = [
|
||||
item for item in filtered_data
|
||||
if any(tag in include_tags for tag in (item.get('tags', []) or []))
|
||||
]
|
||||
|
||||
if exclude_tags:
|
||||
filtered_data = [
|
||||
item for item in filtered_data
|
||||
if not any(tag in exclude_tags for tag in (item.get('tags', []) or []))
|
||||
]
|
||||
|
||||
# Calculate pagination
|
||||
total_items = len(filtered_data)
|
||||
@@ -713,6 +948,12 @@ class RecipeScanner:
|
||||
for item in paginated_items:
|
||||
if 'loras' in item:
|
||||
item['loras'] = [self._enrich_lora_entry(dict(lora)) for lora in item['loras']]
|
||||
if item.get('checkpoint'):
|
||||
checkpoint_entry = self._normalize_checkpoint_entry(item['checkpoint'])
|
||||
if checkpoint_entry:
|
||||
item['checkpoint'] = self._enrich_checkpoint_entry(checkpoint_entry)
|
||||
else:
|
||||
item.pop('checkpoint', None)
|
||||
|
||||
result = {
|
||||
'items': paginated_items,
|
||||
@@ -760,6 +1001,12 @@ class RecipeScanner:
|
||||
# Add lora metadata
|
||||
if 'loras' in formatted_recipe:
|
||||
formatted_recipe['loras'] = [self._enrich_lora_entry(dict(lora)) for lora in formatted_recipe['loras']]
|
||||
if formatted_recipe.get('checkpoint'):
|
||||
checkpoint_entry = self._normalize_checkpoint_entry(formatted_recipe['checkpoint'])
|
||||
if checkpoint_entry:
|
||||
formatted_recipe['checkpoint'] = self._enrich_checkpoint_entry(checkpoint_entry)
|
||||
else:
|
||||
formatted_recipe.pop('checkpoint', None)
|
||||
|
||||
return formatted_recipe
|
||||
|
||||
|
||||
@@ -107,6 +107,12 @@ class RecipeAnalysisService:
|
||||
raise RecipeDownloadError("No image URL found in Civitai response")
|
||||
await self._download_image(image_url, temp_path)
|
||||
metadata = image_info.get("meta") if "meta" in image_info else None
|
||||
if (
|
||||
isinstance(metadata, dict)
|
||||
and "meta" in metadata
|
||||
and isinstance(metadata["meta"], dict)
|
||||
):
|
||||
metadata = metadata["meta"]
|
||||
else:
|
||||
await self._download_image(url, temp_path)
|
||||
|
||||
|
||||
@@ -73,19 +73,20 @@ class RecipePersistenceService:
|
||||
)
|
||||
image_filename = f"{recipe_id}{extension}"
|
||||
image_path = os.path.join(recipes_dir, image_filename)
|
||||
with open(image_path, "wb") as file_obj:
|
||||
normalized_image_path = os.path.normpath(image_path)
|
||||
with open(normalized_image_path, "wb") as file_obj:
|
||||
file_obj.write(optimized_image)
|
||||
|
||||
current_time = time.time()
|
||||
loras_data = [self._normalise_lora_entry(lora) for lora in metadata.get("loras", [])]
|
||||
loras_data = [self._normalise_lora_entry(lora) for lora in (metadata.get("loras") or [])]
|
||||
checkpoint_entry = self._sanitize_checkpoint_entry(self._extract_checkpoint_entry(metadata))
|
||||
|
||||
gen_params = metadata.get("gen_params", {})
|
||||
gen_params = metadata.get("gen_params") or {}
|
||||
if not gen_params and "raw_metadata" in metadata:
|
||||
raw_metadata = metadata.get("raw_metadata", {})
|
||||
gen_params = {
|
||||
"prompt": raw_metadata.get("prompt", ""),
|
||||
"negative_prompt": raw_metadata.get("negative_prompt", ""),
|
||||
"checkpoint": raw_metadata.get("checkpoint", {}),
|
||||
"steps": raw_metadata.get("steps", ""),
|
||||
"sampler": raw_metadata.get("sampler", ""),
|
||||
"cfg_scale": raw_metadata.get("cfg_scale", ""),
|
||||
@@ -94,10 +95,13 @@ class RecipePersistenceService:
|
||||
"clip_skip": raw_metadata.get("clip_skip", ""),
|
||||
}
|
||||
|
||||
# Drop checkpoint duplication from generation parameters to store it only at top level
|
||||
gen_params.pop("checkpoint", None)
|
||||
|
||||
fingerprint = calculate_recipe_fingerprint(loras_data)
|
||||
recipe_data: Dict[str, Any] = {
|
||||
"id": recipe_id,
|
||||
"file_path": image_path,
|
||||
"file_path": normalized_image_path,
|
||||
"title": name,
|
||||
"modified": current_time,
|
||||
"created_date": current_time,
|
||||
@@ -106,6 +110,8 @@ class RecipePersistenceService:
|
||||
"gen_params": gen_params,
|
||||
"fingerprint": fingerprint,
|
||||
}
|
||||
if checkpoint_entry:
|
||||
recipe_data["checkpoint"] = checkpoint_entry
|
||||
|
||||
tags_list = list(tags)
|
||||
if tags_list:
|
||||
@@ -116,10 +122,11 @@ class RecipePersistenceService:
|
||||
|
||||
json_filename = f"{recipe_id}.recipe.json"
|
||||
json_path = os.path.join(recipes_dir, json_filename)
|
||||
json_path = os.path.normpath(json_path)
|
||||
with open(json_path, "w", encoding="utf-8") as file_obj:
|
||||
json.dump(recipe_data, file_obj, indent=4, ensure_ascii=False)
|
||||
|
||||
self._exif_utils.append_recipe_metadata(image_path, recipe_data)
|
||||
self._exif_utils.append_recipe_metadata(normalized_image_path, recipe_data)
|
||||
|
||||
matching_recipes = await self._find_matching_recipes(recipe_scanner, fingerprint, exclude_id=recipe_id)
|
||||
await recipe_scanner.add_recipe(recipe_data)
|
||||
@@ -128,7 +135,7 @@ class RecipePersistenceService:
|
||||
{
|
||||
"success": True,
|
||||
"recipe_id": recipe_id,
|
||||
"image_path": image_path,
|
||||
"image_path": normalized_image_path,
|
||||
"json_path": json_path,
|
||||
"matching_recipes": matching_recipes,
|
||||
}
|
||||
@@ -293,8 +300,6 @@ class RecipePersistenceService:
|
||||
|
||||
lora_stack = metadata.get("loras", "")
|
||||
lora_matches = re.findall(r"<lora:([^:]+):([^>]+)>", lora_stack)
|
||||
if not lora_matches:
|
||||
raise RecipeValidationError("No LoRAs found in the generation metadata")
|
||||
|
||||
loras_data = []
|
||||
base_model_counts: Dict[str, int] = {}
|
||||
@@ -330,7 +335,7 @@ class RecipePersistenceService:
|
||||
"created_date": time.time(),
|
||||
"base_model": most_common_base_model,
|
||||
"loras": loras_data,
|
||||
"checkpoint": metadata.get("checkpoint", ""),
|
||||
"checkpoint": self._sanitize_checkpoint_entry(metadata.get("checkpoint", "")),
|
||||
"gen_params": {
|
||||
key: value
|
||||
for key, value in metadata.items()
|
||||
@@ -359,6 +364,30 @@ class RecipePersistenceService:
|
||||
|
||||
# Helper methods ---------------------------------------------------
|
||||
|
||||
def _extract_checkpoint_entry(self, metadata: dict[str, Any]) -> Optional[dict[str, Any]]:
|
||||
"""Pull a checkpoint entry from various metadata locations."""
|
||||
|
||||
checkpoint_entry = metadata.get("checkpoint") or metadata.get("model")
|
||||
if not checkpoint_entry:
|
||||
gen_params = metadata.get("gen_params") or {}
|
||||
checkpoint_entry = gen_params.get("checkpoint")
|
||||
|
||||
return checkpoint_entry if isinstance(checkpoint_entry, dict) else None
|
||||
|
||||
def _sanitize_checkpoint_entry(self, checkpoint_entry: Optional[dict[str, Any]]) -> Optional[dict[str, Any]]:
|
||||
"""Remove transient/local-only fields from checkpoint metadata."""
|
||||
|
||||
if not checkpoint_entry:
|
||||
return None
|
||||
|
||||
if not isinstance(checkpoint_entry, dict):
|
||||
return checkpoint_entry
|
||||
|
||||
pruned = dict(checkpoint_entry)
|
||||
for key in ("existsLocally", "localPath", "thumbnailUrl", "size", "downloadUrl"):
|
||||
pruned.pop(key, None)
|
||||
return pruned
|
||||
|
||||
def _resolve_image_bytes(self, image_bytes: bytes | None, image_base64: str | None) -> bytes:
|
||||
if image_bytes is not None:
|
||||
return image_bytes
|
||||
|
||||
@@ -2,9 +2,11 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import tempfile
|
||||
import time
|
||||
import unicodedata
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Dict
|
||||
|
||||
@@ -59,8 +61,9 @@ class RecipeSharingService:
|
||||
}
|
||||
self._cleanup_shared_recipes()
|
||||
|
||||
safe_title = recipe.get("title", "").replace(" ", "_").lower()
|
||||
filename = f"recipe_{safe_title}{ext}" if safe_title else f"recipe_{recipe_id}{ext}"
|
||||
filename = self._build_download_filename(
|
||||
title=recipe.get("title", ""), recipe_id=recipe_id, ext=ext
|
||||
)
|
||||
url_path = f"/api/lm/recipe/{recipe_id}/share/download?t={timestamp}"
|
||||
return SharingResult({"success": True, "download_url": url_path, "filename": filename})
|
||||
|
||||
@@ -78,13 +81,38 @@ class RecipeSharingService:
|
||||
raise RecipeNotFoundError("Shared recipe file not found")
|
||||
|
||||
recipe = await recipe_scanner.get_recipe_by_id(recipe_id)
|
||||
filename_base = (
|
||||
f"recipe_{recipe.get('title', '').replace(' ', '_').lower()}" if recipe else recipe_id
|
||||
)
|
||||
ext = os.path.splitext(file_path)[1]
|
||||
download_filename = f"{filename_base}{ext}"
|
||||
download_filename = self._build_download_filename(
|
||||
title=recipe.get("title", "") if recipe else "",
|
||||
recipe_id=recipe_id,
|
||||
ext=ext,
|
||||
)
|
||||
return DownloadInfo(file_path=file_path, download_filename=download_filename)
|
||||
|
||||
@staticmethod
|
||||
def _build_download_filename(*, title: str, recipe_id: str, ext: str) -> str:
|
||||
"""Generate a sanitized filename safe for HTTP headers and filesystems."""
|
||||
|
||||
ext = ext or ""
|
||||
safe_title = RecipeSharingService._slugify(title)
|
||||
fallback = RecipeSharingService._slugify(recipe_id)
|
||||
identifier = safe_title or fallback or "recipe"
|
||||
return f"recipe_{identifier}{ext}"
|
||||
|
||||
@staticmethod
|
||||
def _slugify(value: str) -> str:
|
||||
"""Convert arbitrary input into a lowercase, header-safe slug."""
|
||||
|
||||
if not value:
|
||||
return ""
|
||||
|
||||
normalized = unicodedata.normalize("NFKD", value)
|
||||
ascii_value = normalized.encode("ascii", "ignore").decode("ascii")
|
||||
ascii_value = ascii_value.replace("\n", " ").replace("\r", " ")
|
||||
sanitized = re.sub(r"[^A-Za-z0-9._-]+", "_", ascii_value)
|
||||
sanitized = re.sub(r"_+", "_", sanitized).strip("._-")
|
||||
return sanitized.lower()
|
||||
|
||||
def _cleanup_shared_recipes(self) -> None:
|
||||
for recipe_id in list(self._shared_recipes.keys()):
|
||||
shared = self._shared_recipes.get(recipe_id)
|
||||
|
||||
@@ -1,13 +1,18 @@
|
||||
import asyncio
|
||||
import copy
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from datetime import datetime, timezone
|
||||
from threading import Lock
|
||||
from typing import Any, Dict, Iterable, List, Mapping, Optional, Sequence
|
||||
from typing import Any, Awaitable, Dict, Iterable, List, Mapping, Optional, Sequence, Tuple
|
||||
|
||||
from ..utils.constants import DEFAULT_PRIORITY_TAG_CONFIG
|
||||
from ..utils.settings_paths import ensure_settings_file
|
||||
from platformdirs import user_config_dir
|
||||
|
||||
from ..utils.constants import DEFAULT_HASH_CHUNK_SIZE_MB, DEFAULT_PRIORITY_TAG_CONFIG
|
||||
from ..utils.settings_paths import APP_NAME, ensure_settings_file, get_legacy_settings_path
|
||||
from ..utils.tag_priorities import (
|
||||
PriorityTagEntry,
|
||||
collect_canonical_tags,
|
||||
@@ -18,8 +23,16 @@ from ..utils.tag_priorities import (
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
CORE_USER_SETTING_KEYS: Tuple[str, ...] = (
|
||||
"civitai_api_key",
|
||||
"folder_paths",
|
||||
)
|
||||
|
||||
|
||||
DEFAULT_SETTINGS: Dict[str, Any] = {
|
||||
"civitai_api_key": "",
|
||||
"use_portable_settings": False,
|
||||
"hash_chunk_size_mb": DEFAULT_HASH_CHUNK_SIZE_MB,
|
||||
"language": "en",
|
||||
"show_only_sfw": False,
|
||||
"enable_metadata_archive_db": False,
|
||||
@@ -34,6 +47,7 @@ DEFAULT_SETTINGS: Dict[str, Any] = {
|
||||
"default_embedding_root": "",
|
||||
"base_model_path_mappings": {},
|
||||
"download_path_templates": {},
|
||||
"folder_paths": {},
|
||||
"example_images_path": "",
|
||||
"optimize_example_images": True,
|
||||
"auto_download_example_images": False,
|
||||
@@ -41,16 +55,31 @@ DEFAULT_SETTINGS: Dict[str, Any] = {
|
||||
"autoplay_on_hover": False,
|
||||
"display_density": "default",
|
||||
"card_info_display": "always",
|
||||
"show_folder_sidebar": True,
|
||||
"include_trigger_words": False,
|
||||
"compact_mode": False,
|
||||
"priority_tags": DEFAULT_PRIORITY_TAG_CONFIG.copy(),
|
||||
"model_name_display": "model_name",
|
||||
"model_card_footer_action": "example_images",
|
||||
"update_flag_strategy": "same_base",
|
||||
"auto_organize_exclusions": [],
|
||||
}
|
||||
|
||||
|
||||
class SettingsManager:
|
||||
def __init__(self):
|
||||
self.settings_file = ensure_settings_file(logger)
|
||||
self._pending_portable_switch: Optional[Dict[str, str]] = None
|
||||
self._standalone_mode = self._detect_standalone_mode()
|
||||
self._startup_messages: List[Dict[str, Any]] = []
|
||||
self._needs_initial_save = False
|
||||
self._bootstrap_reason: Optional[str] = None
|
||||
self._seed_template: Optional[Dict[str, Any]] = None
|
||||
self._template_payload_cache: Optional[Dict[str, Any]] = None
|
||||
self._template_payload_cache_loaded = False
|
||||
self._original_disk_payload: Optional[Dict[str, Any]] = None
|
||||
self._preserve_disk_template = False
|
||||
self._template_path = Path(__file__).resolve().parents[2] / "settings.json.example"
|
||||
self.settings = self._load_settings()
|
||||
self._migrate_setting_keys()
|
||||
self._ensure_default_settings()
|
||||
@@ -58,45 +87,217 @@ class SettingsManager:
|
||||
self._migrate_download_path_template()
|
||||
self._auto_set_default_roots()
|
||||
self._check_environment_variables()
|
||||
self._collect_configuration_warnings()
|
||||
|
||||
if self._needs_initial_save:
|
||||
self._save_settings()
|
||||
self._needs_initial_save = False
|
||||
|
||||
def _detect_standalone_mode(self) -> bool:
|
||||
"""Return ``True`` when running in standalone mode."""
|
||||
|
||||
return os.environ.get("LORA_MANAGER_STANDALONE") == "1"
|
||||
|
||||
def _load_settings(self) -> Dict[str, Any]:
|
||||
"""Load settings from file"""
|
||||
if os.path.exists(self.settings_file):
|
||||
try:
|
||||
with open(self.settings_file, 'r', encoding='utf-8') as f:
|
||||
return json.load(f)
|
||||
except Exception as e:
|
||||
logger.error(f"Error loading settings: {e}")
|
||||
data = json.load(f)
|
||||
if isinstance(data, dict):
|
||||
self._original_disk_payload = copy.deepcopy(data)
|
||||
if self._matches_template_payload(data):
|
||||
self._preserve_disk_template = True
|
||||
return data
|
||||
except json.JSONDecodeError as exc:
|
||||
logger.error("Failed to parse settings.json: %s", exc)
|
||||
self._add_startup_message(
|
||||
code="settings-json-invalid",
|
||||
title="Settings file could not be parsed",
|
||||
message=(
|
||||
"LoRA Manager could not parse settings.json. Default settings "
|
||||
"will be used for this session."
|
||||
),
|
||||
severity="error",
|
||||
actions=self._default_settings_actions(),
|
||||
details=str(exc),
|
||||
dismissible=False,
|
||||
)
|
||||
self._needs_initial_save = True
|
||||
self._bootstrap_reason = "invalid"
|
||||
except Exception as exc: # pragma: no cover - defensive guard
|
||||
logger.error("Unexpected error loading settings: %s", exc)
|
||||
self._add_startup_message(
|
||||
code="settings-json-unreadable",
|
||||
title="Settings file could not be read",
|
||||
message="LoRA Manager could not read settings.json. Default settings will be used for this session.",
|
||||
severity="error",
|
||||
actions=self._default_settings_actions(),
|
||||
details=str(exc),
|
||||
dismissible=False,
|
||||
)
|
||||
self._needs_initial_save = True
|
||||
self._bootstrap_reason = "unreadable"
|
||||
|
||||
if not os.path.exists(self.settings_file):
|
||||
self._needs_initial_save = True
|
||||
self._bootstrap_reason = "missing"
|
||||
seeded = self._load_settings_template()
|
||||
if seeded is not None:
|
||||
defaults = self._get_default_settings()
|
||||
merged = self._merge_template_with_defaults(defaults, seeded)
|
||||
return merged
|
||||
return self._get_default_settings()
|
||||
|
||||
def _load_settings_template(self) -> Optional[Dict[str, Any]]:
|
||||
"""Load the bundled template when no user settings are found."""
|
||||
|
||||
payload = self._read_template_payload()
|
||||
if payload is None:
|
||||
return None
|
||||
|
||||
self._seed_template = copy.deepcopy(payload)
|
||||
return copy.deepcopy(payload)
|
||||
|
||||
def _read_template_payload(self) -> Optional[Dict[str, Any]]:
|
||||
"""Return the cached contents of ``settings.json.example`` when available."""
|
||||
|
||||
if self._template_payload_cache_loaded:
|
||||
if self._template_payload_cache is None:
|
||||
return None
|
||||
return copy.deepcopy(self._template_payload_cache)
|
||||
|
||||
self._template_payload_cache_loaded = True
|
||||
|
||||
try:
|
||||
with self._template_path.open("r", encoding="utf-8") as handle:
|
||||
data = json.load(handle)
|
||||
except FileNotFoundError:
|
||||
logger.debug("settings.json.example not found at %s", self._template_path)
|
||||
return None
|
||||
except json.JSONDecodeError as exc:
|
||||
logger.warning("Failed to parse settings.json.example: %s", exc)
|
||||
return None
|
||||
|
||||
if not isinstance(data, dict):
|
||||
logger.debug("settings.json.example is not a JSON object; ignoring template")
|
||||
return None
|
||||
|
||||
self._template_payload_cache = copy.deepcopy(data)
|
||||
return copy.deepcopy(self._template_payload_cache)
|
||||
|
||||
def _matches_template_payload(self, payload: Mapping[str, Any]) -> bool:
|
||||
"""Return ``True`` when ``payload`` matches the bundled template."""
|
||||
|
||||
template = self._read_template_payload()
|
||||
if template is None:
|
||||
return False
|
||||
|
||||
return payload == template
|
||||
|
||||
def _merge_template_with_defaults(
|
||||
self, defaults: Dict[str, Any], template: Mapping[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""Merge template values into the in-memory defaults."""
|
||||
|
||||
merged = copy.deepcopy(defaults)
|
||||
for key, value in template.items():
|
||||
if key == "folder_paths" and isinstance(value, Mapping):
|
||||
merged[key] = self._normalize_folder_paths(value)
|
||||
else:
|
||||
merged[key] = copy.deepcopy(value)
|
||||
|
||||
merged.setdefault("language", "en")
|
||||
merged.setdefault("folder_paths", {})
|
||||
library_name = merged.get("active_library") or "default"
|
||||
merged["libraries"] = {
|
||||
library_name: self._build_library_payload(
|
||||
folder_paths=merged.get("folder_paths", {}),
|
||||
default_lora_root=merged.get("default_lora_root"),
|
||||
default_checkpoint_root=merged.get("default_checkpoint_root"),
|
||||
default_embedding_root=merged.get("default_embedding_root"),
|
||||
)
|
||||
}
|
||||
merged["active_library"] = library_name
|
||||
return merged
|
||||
|
||||
def _ensure_default_settings(self) -> None:
|
||||
"""Ensure all default settings keys exist"""
|
||||
updated = False
|
||||
normalized_priority = self._normalize_priority_tag_config(
|
||||
self.settings.get("priority_tags")
|
||||
)
|
||||
if normalized_priority != self.settings.get("priority_tags"):
|
||||
self.settings["priority_tags"] = normalized_priority
|
||||
updated = True
|
||||
for key, value in self._get_default_settings().items():
|
||||
defaults = self._get_default_settings()
|
||||
updated_existing = False
|
||||
inserted_defaults = False
|
||||
|
||||
if "priority_tags" in self.settings:
|
||||
normalized_priority = self._normalize_priority_tag_config(
|
||||
self.settings.get("priority_tags")
|
||||
)
|
||||
if normalized_priority != self.settings.get("priority_tags"):
|
||||
self.settings["priority_tags"] = normalized_priority
|
||||
updated_existing = True
|
||||
else:
|
||||
self.settings["priority_tags"] = copy.deepcopy(
|
||||
defaults.get("priority_tags", DEFAULT_PRIORITY_TAG_CONFIG)
|
||||
)
|
||||
inserted_defaults = True
|
||||
|
||||
if "auto_organize_exclusions" in self.settings:
|
||||
normalized_exclusions = self.normalize_auto_organize_exclusions(
|
||||
self.settings.get("auto_organize_exclusions")
|
||||
)
|
||||
if normalized_exclusions != self.settings.get("auto_organize_exclusions"):
|
||||
self.settings["auto_organize_exclusions"] = normalized_exclusions
|
||||
updated_existing = True
|
||||
else:
|
||||
self.settings["auto_organize_exclusions"] = []
|
||||
inserted_defaults = True
|
||||
|
||||
for key, value in defaults.items():
|
||||
if key == "priority_tags":
|
||||
continue
|
||||
if key not in self.settings:
|
||||
if isinstance(value, dict):
|
||||
self.settings[key] = value.copy()
|
||||
self.settings[key] = copy.deepcopy(value)
|
||||
else:
|
||||
self.settings[key] = value
|
||||
updated = True
|
||||
if updated:
|
||||
inserted_defaults = True
|
||||
|
||||
if updated_existing or (
|
||||
inserted_defaults and self._bootstrap_reason in {"invalid", "unreadable"}
|
||||
):
|
||||
self._save_settings()
|
||||
|
||||
def _migrate_to_library_registry(self) -> None:
|
||||
"""Ensure settings include the multi-library registry structure."""
|
||||
libraries = self.settings.get("libraries")
|
||||
active_name = self.settings.get("active_library")
|
||||
initial_bootstrap = self._bootstrap_reason == "missing"
|
||||
|
||||
if not isinstance(libraries, dict) or not libraries:
|
||||
raw_top_level_paths = self.settings.get("folder_paths", {})
|
||||
normalized_top_level_paths: Dict[str, List[str]] = {}
|
||||
if isinstance(raw_top_level_paths, Mapping):
|
||||
normalized_top_level_paths = self._normalize_folder_paths(raw_top_level_paths)
|
||||
if normalized_top_level_paths != raw_top_level_paths:
|
||||
self.settings["folder_paths"] = copy.deepcopy(normalized_top_level_paths)
|
||||
|
||||
top_level_has_paths = self._has_configured_paths(normalized_top_level_paths)
|
||||
|
||||
needs_library_bootstrap = not isinstance(libraries, dict) or not libraries
|
||||
|
||||
if (
|
||||
not needs_library_bootstrap
|
||||
and top_level_has_paths
|
||||
and len(libraries) == 1
|
||||
):
|
||||
only_library_payload = next(iter(libraries.values()))
|
||||
if isinstance(only_library_payload, Mapping):
|
||||
folder_payload = only_library_payload.get("folder_paths")
|
||||
if not self._has_configured_paths(folder_payload):
|
||||
needs_library_bootstrap = True
|
||||
|
||||
if needs_library_bootstrap:
|
||||
library_name = active_name or "default"
|
||||
library_payload = self._build_library_payload(
|
||||
folder_paths=self.settings.get("folder_paths", {}),
|
||||
folder_paths=normalized_top_level_paths,
|
||||
default_lora_root=self.settings.get("default_lora_root", ""),
|
||||
default_checkpoint_root=self.settings.get("default_checkpoint_root", ""),
|
||||
default_embedding_root=self.settings.get("default_embedding_root", ""),
|
||||
@@ -105,17 +306,40 @@ class SettingsManager:
|
||||
self.settings["libraries"] = libraries
|
||||
self.settings["active_library"] = library_name
|
||||
self._sync_active_library_to_root(save=False)
|
||||
self._save_settings()
|
||||
if not initial_bootstrap and not self._preserve_disk_template:
|
||||
self._save_settings()
|
||||
return
|
||||
|
||||
seed_library_name: Optional[str] = None
|
||||
if top_level_has_paths and isinstance(libraries, dict):
|
||||
target_name: Optional[str] = None
|
||||
if active_name and active_name in libraries:
|
||||
target_name = active_name
|
||||
elif len(libraries) == 1:
|
||||
target_name = next(iter(libraries.keys()))
|
||||
|
||||
if target_name:
|
||||
candidate_payload = libraries.get(target_name)
|
||||
if isinstance(candidate_payload, Mapping) and not self._has_configured_paths(candidate_payload.get("folder_paths")):
|
||||
seed_library_name = target_name
|
||||
|
||||
sanitized_libraries: Dict[str, Dict[str, Any]] = {}
|
||||
changed = False
|
||||
for name, data in libraries.items():
|
||||
if not isinstance(data, dict):
|
||||
data = {}
|
||||
changed = True
|
||||
|
||||
candidate_folder_paths = data.get("folder_paths")
|
||||
if (
|
||||
seed_library_name == name
|
||||
and not self._has_configured_paths(candidate_folder_paths)
|
||||
and top_level_has_paths
|
||||
):
|
||||
candidate_folder_paths = normalized_top_level_paths
|
||||
|
||||
payload = self._build_library_payload(
|
||||
folder_paths=data.get("folder_paths"),
|
||||
folder_paths=candidate_folder_paths,
|
||||
default_lora_root=data.get("default_lora_root"),
|
||||
default_checkpoint_root=data.get("default_checkpoint_root"),
|
||||
default_embedding_root=data.get("default_embedding_root"),
|
||||
@@ -130,12 +354,15 @@ class SettingsManager:
|
||||
self.settings["libraries"] = sanitized_libraries
|
||||
|
||||
if not active_name or active_name not in sanitized_libraries:
|
||||
changed = True
|
||||
if sanitized_libraries:
|
||||
self.settings["active_library"] = next(iter(sanitized_libraries.keys()))
|
||||
else:
|
||||
self.settings["active_library"] = "default"
|
||||
|
||||
self._sync_active_library_to_root(save=changed)
|
||||
self._sync_active_library_to_root(save=changed and not initial_bootstrap)
|
||||
if changed and initial_bootstrap:
|
||||
self._needs_initial_save = True
|
||||
|
||||
def _sync_active_library_to_root(self, *, save: bool = False) -> None:
|
||||
"""Update top-level folder path settings to mirror the active library."""
|
||||
@@ -224,6 +451,25 @@ class SettingsManager:
|
||||
normalized[key] = cleaned
|
||||
return normalized
|
||||
|
||||
def _has_configured_paths(self, folder_paths: Any) -> bool:
|
||||
if not isinstance(folder_paths, Mapping):
|
||||
return False
|
||||
|
||||
for values in folder_paths.values():
|
||||
if isinstance(values, str):
|
||||
candidate_values = [values]
|
||||
else:
|
||||
try:
|
||||
candidate_values = list(values) # type: ignore[arg-type]
|
||||
except TypeError:
|
||||
continue
|
||||
|
||||
for path in candidate_values:
|
||||
if isinstance(path, str) and path.strip():
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def _validate_folder_paths(
|
||||
self,
|
||||
library_name: str,
|
||||
@@ -316,6 +562,7 @@ class SettingsManager:
|
||||
'cardInfoDisplay': 'card_info_display',
|
||||
'includeTriggerWords': 'include_trigger_words',
|
||||
'compactMode': 'compact_mode',
|
||||
'modelCardFooterAction': 'model_card_footer_action',
|
||||
}
|
||||
|
||||
updated = False
|
||||
@@ -379,7 +626,10 @@ class SettingsManager:
|
||||
default_checkpoint_root=self.settings.get('default_checkpoint_root'),
|
||||
default_embedding_root=self.settings.get('default_embedding_root'),
|
||||
)
|
||||
self._save_settings()
|
||||
if self._bootstrap_reason == "missing":
|
||||
self._needs_initial_save = True
|
||||
else:
|
||||
self._save_settings()
|
||||
|
||||
def _check_environment_variables(self) -> None:
|
||||
"""Check for environment variables and update settings if needed"""
|
||||
@@ -390,17 +640,108 @@ class SettingsManager:
|
||||
self.settings['civitai_api_key'] = env_api_key
|
||||
self._save_settings()
|
||||
|
||||
def _default_settings_actions(self) -> List[Dict[str, Any]]:
|
||||
return [
|
||||
{
|
||||
"action": "open-settings-location",
|
||||
"label": "Open settings folder",
|
||||
"type": "primary",
|
||||
"icon": "fas fa-folder-open",
|
||||
}
|
||||
]
|
||||
|
||||
def _add_startup_message(
|
||||
self,
|
||||
*,
|
||||
code: str,
|
||||
title: str,
|
||||
message: str,
|
||||
severity: str = "info",
|
||||
actions: Optional[List[Dict[str, Any]]] = None,
|
||||
details: Optional[str] = None,
|
||||
dismissible: bool = False,
|
||||
) -> None:
|
||||
if any(existing.get("code") == code for existing in self._startup_messages):
|
||||
return
|
||||
|
||||
payload: Dict[str, Any] = {
|
||||
"code": code,
|
||||
"title": title,
|
||||
"message": message,
|
||||
"severity": severity.lower(),
|
||||
"dismissible": bool(dismissible),
|
||||
}
|
||||
|
||||
if actions:
|
||||
payload["actions"] = [dict(action) for action in actions]
|
||||
if details:
|
||||
payload["details"] = details
|
||||
payload["settings_file"] = self.settings_file
|
||||
|
||||
self._startup_messages.append(payload)
|
||||
|
||||
def _collect_configuration_warnings(self) -> None:
|
||||
if not self._standalone_mode:
|
||||
return
|
||||
|
||||
folder_paths = self.settings.get('folder_paths', {}) or {}
|
||||
monitored_keys = ('loras', 'checkpoints', 'embeddings')
|
||||
|
||||
has_valid_paths = False
|
||||
for key in monitored_keys:
|
||||
raw_paths = folder_paths.get(key) or []
|
||||
if isinstance(raw_paths, str):
|
||||
raw_paths = [raw_paths]
|
||||
try:
|
||||
iterator = list(raw_paths)
|
||||
except TypeError:
|
||||
continue
|
||||
if any(isinstance(path, str) and path and os.path.exists(path) for path in iterator):
|
||||
has_valid_paths = True
|
||||
break
|
||||
|
||||
if not has_valid_paths:
|
||||
if self._bootstrap_reason == "missing":
|
||||
message = (
|
||||
"LoRA Manager created a default settings.json because no configuration was found. "
|
||||
"Edit settings.json to add your model directories so library scanning can run."
|
||||
)
|
||||
else:
|
||||
message = (
|
||||
"LoRA Manager could not locate any configured model directories. "
|
||||
"Edit settings.json to add your model folders so library scanning can run."
|
||||
)
|
||||
self._add_startup_message(
|
||||
code="missing-model-paths",
|
||||
title="Model folders need setup",
|
||||
message=message,
|
||||
severity="warning",
|
||||
actions=self._default_settings_actions(),
|
||||
dismissible=False,
|
||||
)
|
||||
|
||||
def refresh_environment_variables(self) -> None:
|
||||
"""Refresh settings from environment variables"""
|
||||
self._check_environment_variables()
|
||||
|
||||
def _get_default_settings(self) -> Dict[str, Any]:
|
||||
"""Return default settings"""
|
||||
defaults = DEFAULT_SETTINGS.copy()
|
||||
# Ensure nested dicts are independent copies
|
||||
defaults = copy.deepcopy(DEFAULT_SETTINGS)
|
||||
defaults['base_model_path_mappings'] = {}
|
||||
defaults['download_path_templates'] = {}
|
||||
defaults['priority_tags'] = DEFAULT_PRIORITY_TAG_CONFIG.copy()
|
||||
defaults.setdefault('folder_paths', {})
|
||||
defaults['auto_organize_exclusions'] = []
|
||||
|
||||
library_name = defaults.get("active_library") or "default"
|
||||
default_library = self._build_library_payload(
|
||||
folder_paths=defaults.get("folder_paths", {}),
|
||||
default_lora_root=defaults.get("default_lora_root"),
|
||||
default_checkpoint_root=defaults.get("default_checkpoint_root"),
|
||||
default_embedding_root=defaults.get("default_embedding_root"),
|
||||
)
|
||||
defaults['libraries'] = {library_name: default_library}
|
||||
defaults['active_library'] = library_name
|
||||
return defaults
|
||||
|
||||
def _normalize_priority_tag_config(self, value: Any) -> Dict[str, str]:
|
||||
@@ -416,6 +757,35 @@ class SettingsManager:
|
||||
|
||||
return normalized
|
||||
|
||||
def normalize_auto_organize_exclusions(self, value: Any) -> List[str]:
|
||||
if value is None:
|
||||
return []
|
||||
|
||||
if isinstance(value, str):
|
||||
candidates: Iterable[str] = (
|
||||
value.replace("\n", ",").replace(";", ",").split(",")
|
||||
)
|
||||
elif isinstance(value, Sequence) and not isinstance(value, (bytes, bytearray, str)):
|
||||
candidates = value
|
||||
else:
|
||||
return []
|
||||
|
||||
patterns: List[str] = []
|
||||
for raw in candidates:
|
||||
if isinstance(raw, str):
|
||||
token = raw.strip()
|
||||
if token:
|
||||
patterns.append(token)
|
||||
|
||||
unique_patterns: List[str] = []
|
||||
seen = set()
|
||||
for pattern in patterns:
|
||||
if pattern not in seen:
|
||||
seen.add(pattern)
|
||||
unique_patterns.append(pattern)
|
||||
|
||||
return unique_patterns
|
||||
|
||||
def get_priority_tag_config(self) -> Dict[str, str]:
|
||||
stored_value = self.settings.get("priority_tags")
|
||||
normalized = self._normalize_priority_tag_config(stored_value)
|
||||
@@ -424,6 +794,18 @@ class SettingsManager:
|
||||
self._save_settings()
|
||||
return normalized.copy()
|
||||
|
||||
def get_auto_organize_exclusions(self) -> List[str]:
|
||||
exclusions = self.normalize_auto_organize_exclusions(
|
||||
self.settings.get("auto_organize_exclusions")
|
||||
)
|
||||
if exclusions != self.settings.get("auto_organize_exclusions"):
|
||||
self.settings["auto_organize_exclusions"] = exclusions
|
||||
self._save_settings()
|
||||
return exclusions
|
||||
|
||||
def get_startup_messages(self) -> List[Dict[str, Any]]:
|
||||
return [message.copy() for message in self._startup_messages]
|
||||
|
||||
def get_priority_tag_entries(self, model_type: str) -> List[PriorityTagEntry]:
|
||||
config = self.get_priority_tag_config()
|
||||
raw_config = config.get(model_type, "")
|
||||
@@ -456,7 +838,13 @@ class SettingsManager:
|
||||
|
||||
def set(self, key: str, value: Any) -> None:
|
||||
"""Set setting value and save"""
|
||||
if key == "auto_organize_exclusions":
|
||||
value = self.normalize_auto_organize_exclusions(value)
|
||||
self.settings[key] = value
|
||||
portable_switch_pending = False
|
||||
if key == "use_portable_settings" and isinstance(value, bool):
|
||||
portable_switch_pending = True
|
||||
self._prepare_portable_switch(value)
|
||||
if key == 'folder_paths' and isinstance(value, Mapping):
|
||||
self._update_active_library_entry(folder_paths=value) # type: ignore[arg-type]
|
||||
elif key == 'default_lora_root':
|
||||
@@ -465,7 +853,11 @@ class SettingsManager:
|
||||
self._update_active_library_entry(default_checkpoint_root=str(value))
|
||||
elif key == 'default_embedding_root':
|
||||
self._update_active_library_entry(default_embedding_root=str(value))
|
||||
elif key == 'model_name_display':
|
||||
self._notify_model_name_display_change(value)
|
||||
self._save_settings()
|
||||
if portable_switch_pending:
|
||||
self._finalize_portable_switch()
|
||||
|
||||
def delete(self, key: str) -> None:
|
||||
"""Delete setting key and save"""
|
||||
@@ -474,13 +866,217 @@ class SettingsManager:
|
||||
self._save_settings()
|
||||
logger.info(f"Deleted setting: {key}")
|
||||
|
||||
def _prepare_portable_switch(self, use_portable: bool) -> None:
|
||||
"""Prepare switching the settings storage location."""
|
||||
|
||||
legacy_path = get_legacy_settings_path()
|
||||
user_dir = self._get_user_config_directory()
|
||||
user_settings_path = os.path.join(user_dir, "settings.json")
|
||||
|
||||
target_path = legacy_path if use_portable else user_settings_path
|
||||
other_path = user_settings_path if use_portable else legacy_path
|
||||
target_dir = os.path.dirname(target_path)
|
||||
os.makedirs(target_dir, exist_ok=True)
|
||||
|
||||
previous_path = self.settings_file or target_path
|
||||
previous_dir = os.path.dirname(previous_path) or target_dir
|
||||
|
||||
if os.path.abspath(previous_path) != os.path.abspath(target_path):
|
||||
self._copy_model_cache_directory(previous_dir, target_dir)
|
||||
|
||||
self._pending_portable_switch = {"other_path": other_path}
|
||||
self.settings_file = target_path
|
||||
|
||||
def _finalize_portable_switch(self) -> None:
|
||||
"""Mirror the latest settings file to the secondary location."""
|
||||
|
||||
info = self._pending_portable_switch
|
||||
if not info:
|
||||
return
|
||||
|
||||
other_path = info.get("other_path")
|
||||
current_path = self.settings_file
|
||||
|
||||
if not other_path or not current_path:
|
||||
self._pending_portable_switch = None
|
||||
return
|
||||
|
||||
if os.path.abspath(other_path) == os.path.abspath(current_path):
|
||||
self._pending_portable_switch = None
|
||||
return
|
||||
|
||||
other_dir = os.path.dirname(other_path) or os.path.dirname(current_path)
|
||||
if other_dir:
|
||||
os.makedirs(other_dir, exist_ok=True)
|
||||
|
||||
try:
|
||||
shutil.copy2(current_path, other_path)
|
||||
except Exception as exc:
|
||||
logger.warning("Failed to mirror settings.json to %s: %s", other_path, exc)
|
||||
finally:
|
||||
self._pending_portable_switch = None
|
||||
|
||||
def _copy_model_cache_directory(self, source_dir: str, target_dir: str) -> None:
|
||||
"""Copy model_cache artifacts when switching storage locations."""
|
||||
|
||||
if not source_dir or not target_dir:
|
||||
return
|
||||
|
||||
source_cache_dir = os.path.join(source_dir, "model_cache")
|
||||
target_cache_dir = os.path.join(target_dir, "model_cache")
|
||||
if (
|
||||
os.path.isdir(source_cache_dir)
|
||||
and os.path.abspath(source_cache_dir) != os.path.abspath(target_cache_dir)
|
||||
):
|
||||
try:
|
||||
shutil.copytree(source_cache_dir, target_cache_dir, dirs_exist_ok=True)
|
||||
except Exception as exc:
|
||||
logger.warning(
|
||||
"Failed to copy model_cache directory from %s to %s: %s",
|
||||
source_cache_dir,
|
||||
target_cache_dir,
|
||||
exc,
|
||||
)
|
||||
|
||||
source_cache_file = os.path.join(source_dir, "model_cache.sqlite")
|
||||
target_cache_file = os.path.join(target_dir, "model_cache.sqlite")
|
||||
if (
|
||||
os.path.isfile(source_cache_file)
|
||||
and os.path.abspath(source_cache_file) != os.path.abspath(target_cache_file)
|
||||
):
|
||||
try:
|
||||
shutil.copy2(source_cache_file, target_cache_file)
|
||||
except Exception as exc:
|
||||
logger.warning(
|
||||
"Failed to copy model_cache.sqlite from %s to %s: %s",
|
||||
source_cache_file,
|
||||
target_cache_file,
|
||||
exc,
|
||||
)
|
||||
|
||||
def _get_user_config_directory(self) -> str:
|
||||
"""Return the user configuration directory, falling back to ~/.config."""
|
||||
|
||||
try:
|
||||
config_dir = user_config_dir(APP_NAME, appauthor=False) or ""
|
||||
except Exception as exc: # pragma: no cover - defensive fallback
|
||||
logger.warning("Failed to determine user config directory: %s", exc)
|
||||
config_dir = ""
|
||||
|
||||
if not config_dir:
|
||||
config_dir = os.path.join(os.path.expanduser("~"), f".config/{APP_NAME}")
|
||||
|
||||
try:
|
||||
os.makedirs(config_dir, exist_ok=True)
|
||||
except Exception as exc:
|
||||
logger.warning("Failed to create user config directory %s: %s", config_dir, exc)
|
||||
|
||||
return config_dir
|
||||
|
||||
def _notify_model_name_display_change(self, value: Any) -> None:
|
||||
"""Trigger cache resorting when the model name display preference updates."""
|
||||
|
||||
try:
|
||||
from .service_registry import ServiceRegistry # type: ignore
|
||||
except Exception: # pragma: no cover - registry optional in some contexts
|
||||
return
|
||||
|
||||
display_mode = value if isinstance(value, str) else "model_name"
|
||||
pending: List[Tuple[Optional[asyncio.AbstractEventLoop], Awaitable[Any]]] = []
|
||||
|
||||
def _resolve_service_loop(service: Any) -> Optional[asyncio.AbstractEventLoop]:
|
||||
loop = getattr(service, "loop", None)
|
||||
if loop is None:
|
||||
loop = getattr(service, "_loop", None)
|
||||
return loop if isinstance(loop, asyncio.AbstractEventLoop) else None
|
||||
|
||||
for service_name in (
|
||||
"lora_scanner",
|
||||
"checkpoint_scanner",
|
||||
"embedding_scanner",
|
||||
"recipe_scanner",
|
||||
):
|
||||
service = ServiceRegistry.get_service_sync(service_name)
|
||||
if not service or not hasattr(service, "on_model_name_display_changed"):
|
||||
continue
|
||||
|
||||
try:
|
||||
result = service.on_model_name_display_changed(display_mode)
|
||||
except Exception as exc: # pragma: no cover - defensive guard
|
||||
logger.debug(
|
||||
"Service %s failed to schedule name display update: %s",
|
||||
service_name,
|
||||
exc,
|
||||
)
|
||||
continue
|
||||
|
||||
if asyncio.iscoroutine(result):
|
||||
service_loop = _resolve_service_loop(service)
|
||||
pending.append((service_loop, result))
|
||||
|
||||
if not pending:
|
||||
return
|
||||
|
||||
try:
|
||||
loop = asyncio.get_running_loop()
|
||||
except RuntimeError:
|
||||
loop = None
|
||||
|
||||
for service_loop, coroutine in pending:
|
||||
target_loop = service_loop or loop
|
||||
|
||||
if target_loop is None:
|
||||
try:
|
||||
asyncio.run(coroutine)
|
||||
except RuntimeError:
|
||||
logger.debug("Skipping name display update due to missing event loop")
|
||||
continue
|
||||
|
||||
if loop is not None and target_loop is loop:
|
||||
target_loop.create_task(coroutine)
|
||||
continue
|
||||
|
||||
if target_loop.is_running():
|
||||
try:
|
||||
asyncio.run_coroutine_threadsafe(coroutine, target_loop)
|
||||
except Exception as exc: # pragma: no cover - defensive guard
|
||||
logger.debug("Failed to dispatch name display update: %s", exc)
|
||||
continue
|
||||
|
||||
try:
|
||||
asyncio.run(coroutine)
|
||||
except RuntimeError:
|
||||
logger.debug("Skipping name display update due to closed loop")
|
||||
|
||||
def _save_settings(self) -> None:
|
||||
"""Save settings to file"""
|
||||
try:
|
||||
payload = self._serialize_settings_for_disk()
|
||||
with open(self.settings_file, 'w', encoding='utf-8') as f:
|
||||
json.dump(self.settings, f, indent=2)
|
||||
json.dump(payload, f, indent=2)
|
||||
except Exception as e:
|
||||
logger.error(f"Error saving settings: {e}")
|
||||
else:
|
||||
if self._bootstrap_reason == "missing":
|
||||
self._bootstrap_reason = None
|
||||
self._seed_template = None
|
||||
|
||||
def _serialize_settings_for_disk(self) -> Dict[str, Any]:
|
||||
"""Return the settings payload that should be persisted to disk."""
|
||||
|
||||
if self._bootstrap_reason == "missing":
|
||||
minimal: Dict[str, Any] = {}
|
||||
for key in CORE_USER_SETTING_KEYS:
|
||||
if key in self.settings:
|
||||
minimal[key] = copy.deepcopy(self.settings[key])
|
||||
|
||||
if self._seed_template:
|
||||
for key, value in self._seed_template.items():
|
||||
minimal.setdefault(key, copy.deepcopy(value))
|
||||
|
||||
return minimal
|
||||
|
||||
return copy.deepcopy(self.settings)
|
||||
|
||||
def get_libraries(self) -> Dict[str, Dict[str, Any]]:
|
||||
"""Return a copy of the registered libraries."""
|
||||
|
||||
@@ -33,7 +33,8 @@ class TagUpdateService:
|
||||
tags_added: List[str] = []
|
||||
for tag in new_tags:
|
||||
if isinstance(tag, str) and tag.strip():
|
||||
normalized = tag.strip()
|
||||
# Convert all tags to lowercase to avoid case sensitivity issues on Windows
|
||||
normalized = tag.strip().lower()
|
||||
if normalized.lower() not in existing_lower:
|
||||
existing_tags.append(normalized)
|
||||
existing_lower.append(normalized.lower())
|
||||
|
||||
@@ -39,6 +39,7 @@ class AutoOrganizeUseCase:
|
||||
*,
|
||||
file_paths: Optional[Sequence[str]] = None,
|
||||
progress_callback: Optional[ProgressCallback] = None,
|
||||
exclusion_patterns: Optional[Sequence[str]] = None,
|
||||
) -> AutoOrganizeResult:
|
||||
"""Run the auto-organize routine guarded by a shared lock."""
|
||||
|
||||
@@ -53,4 +54,5 @@ class AutoOrganizeUseCase:
|
||||
return await self._file_service.auto_organize_models(
|
||||
file_paths=list(file_paths) if file_paths is not None else None,
|
||||
progress_callback=progress_callback,
|
||||
exclusion_patterns=exclusion_patterns,
|
||||
)
|
||||
|
||||
@@ -2,9 +2,138 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Dict, Iterable, Mapping, Sequence
|
||||
from urllib.parse import urlparse, urlunparse
|
||||
|
||||
|
||||
_DEFAULT_ALLOW_COMMERCIAL_USE: Sequence[str] = ("Sell",)
|
||||
_LICENSE_DEFAULTS: Dict[str, Any] = {
|
||||
"allowNoCredit": True,
|
||||
"allowCommercialUse": _DEFAULT_ALLOW_COMMERCIAL_USE,
|
||||
"allowDerivatives": True,
|
||||
"allowDifferentLicense": True,
|
||||
}
|
||||
_COMMERCIAL_ALLOWED_VALUES = {"sell", "rent", "rentcivit", "image"}
|
||||
_COMMERCIAL_SHIFT = 1
|
||||
|
||||
|
||||
def _normalize_commercial_values(value: Any) -> Sequence[str]:
|
||||
"""Return a normalized list of commercial permissions preserving source values."""
|
||||
|
||||
if value is None:
|
||||
return list(_DEFAULT_ALLOW_COMMERCIAL_USE)
|
||||
|
||||
if isinstance(value, str):
|
||||
return [value]
|
||||
|
||||
if isinstance(value, Iterable):
|
||||
result = []
|
||||
for item in value:
|
||||
if item is None:
|
||||
continue
|
||||
if isinstance(item, str):
|
||||
result.append(item)
|
||||
continue
|
||||
result.append(str(item))
|
||||
if result:
|
||||
return result
|
||||
try:
|
||||
if len(value) == 0: # type: ignore[arg-type]
|
||||
return []
|
||||
except TypeError:
|
||||
pass
|
||||
|
||||
return list(_DEFAULT_ALLOW_COMMERCIAL_USE)
|
||||
|
||||
|
||||
def _to_bool(value: Any, fallback: bool) -> bool:
|
||||
if value is None:
|
||||
return fallback
|
||||
return bool(value)
|
||||
|
||||
|
||||
def resolve_license_payload(model_data: Mapping[str, Any] | None) -> Dict[str, Any]:
|
||||
"""Extract license fields from model metadata applying documented defaults."""
|
||||
|
||||
payload: Dict[str, Any] = {}
|
||||
|
||||
allow_no_credit = payload["allowNoCredit"] = _to_bool(
|
||||
(model_data or {}).get("allowNoCredit"),
|
||||
_LICENSE_DEFAULTS["allowNoCredit"],
|
||||
)
|
||||
|
||||
commercial = _normalize_commercial_values(
|
||||
(model_data or {}).get("allowCommercialUse"),
|
||||
)
|
||||
payload["allowCommercialUse"] = list(commercial)
|
||||
|
||||
allow_derivatives = payload["allowDerivatives"] = _to_bool(
|
||||
(model_data or {}).get("allowDerivatives"),
|
||||
_LICENSE_DEFAULTS["allowDerivatives"],
|
||||
)
|
||||
|
||||
allow_different_license = payload["allowDifferentLicense"] = _to_bool(
|
||||
(model_data or {}).get("allowDifferentLicense"),
|
||||
_LICENSE_DEFAULTS["allowDifferentLicense"],
|
||||
)
|
||||
|
||||
# Ensure booleans are plain bool instances
|
||||
payload["allowNoCredit"] = bool(allow_no_credit)
|
||||
payload["allowDerivatives"] = bool(allow_derivatives)
|
||||
payload["allowDifferentLicense"] = bool(allow_different_license)
|
||||
|
||||
return payload
|
||||
|
||||
|
||||
def _resolve_commercial_bits(values: Sequence[str]) -> int:
|
||||
normalized_values = set()
|
||||
for value in values:
|
||||
normalized = str(value).strip().lower().replace("_", "").replace("-", "")
|
||||
if normalized in _COMMERCIAL_ALLOWED_VALUES:
|
||||
normalized_values.add(normalized)
|
||||
|
||||
has_sell = "sell" in normalized_values
|
||||
has_rent = has_sell or "rent" in normalized_values
|
||||
has_rentcivit = has_rent or "rentcivit" in normalized_values
|
||||
has_image = has_sell or "image" in normalized_values
|
||||
|
||||
commercial_bits = (
|
||||
(1 if has_sell else 0) << 3
|
||||
| (1 if has_rent else 0) << 2
|
||||
| (1 if has_rentcivit else 0) << 1
|
||||
| (1 if has_image else 0)
|
||||
)
|
||||
return commercial_bits << _COMMERCIAL_SHIFT
|
||||
|
||||
|
||||
def build_license_flags(payload: Mapping[str, Any] | None) -> int:
|
||||
"""Encode license payload into a compact bitset for cache storage."""
|
||||
|
||||
resolved = resolve_license_payload(payload or {})
|
||||
|
||||
flags = 0
|
||||
if resolved.get("allowNoCredit", True):
|
||||
flags |= 1 << 0
|
||||
|
||||
commercial_bits = _resolve_commercial_bits(resolved.get("allowCommercialUse", ()))
|
||||
flags |= commercial_bits
|
||||
|
||||
if resolved.get("allowDerivatives", True):
|
||||
flags |= 1 << 5
|
||||
|
||||
if resolved.get("allowDifferentLicense", True):
|
||||
flags |= 1 << 6
|
||||
|
||||
return flags
|
||||
|
||||
|
||||
def resolve_license_info(model_data: Mapping[str, Any] | None) -> tuple[Dict[str, Any], int]:
|
||||
"""Return normalized license payload and its encoded bitset."""
|
||||
|
||||
payload = resolve_license_payload(model_data)
|
||||
return payload, build_license_flags(payload)
|
||||
|
||||
|
||||
def rewrite_preview_url(source_url: str | None, media_type: str | None = None) -> tuple[str | None, bool]:
|
||||
"""Rewrite Civitai preview URLs to use optimized renditions.
|
||||
|
||||
@@ -43,5 +172,9 @@ def rewrite_preview_url(source_url: str | None, media_type: str | None = None) -
|
||||
return rewritten, True
|
||||
|
||||
|
||||
__all__ = ["rewrite_preview_url"]
|
||||
|
||||
__all__ = [
|
||||
"build_license_flags",
|
||||
"resolve_license_payload",
|
||||
"resolve_license_info",
|
||||
"rewrite_preview_url",
|
||||
]
|
||||
|
||||
@@ -55,6 +55,9 @@ CIVITAI_USER_MODEL_TYPES = [
|
||||
'checkpoint',
|
||||
]
|
||||
|
||||
# Default chunk size in megabytes used for hashing large files.
|
||||
DEFAULT_HASH_CHUNK_SIZE_MB = 4
|
||||
|
||||
# Auto-organize settings
|
||||
AUTO_ORGANIZE_BATCH_SIZE = 50 # Process models in batches to avoid overwhelming the system
|
||||
|
||||
|
||||
@@ -5,8 +5,10 @@ import json
|
||||
import time
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
from typing import Any, Dict
|
||||
import uuid
|
||||
from typing import Any, Dict, Iterable, List, Set, Tuple
|
||||
|
||||
from ..services.service_registry import ServiceRegistry
|
||||
from ..utils.example_images_paths import (
|
||||
@@ -516,10 +518,12 @@ class DownloadManager:
|
||||
if civitai_payload.get('images'):
|
||||
images = civitai_payload.get('images', [])
|
||||
|
||||
success, is_stale = await ExampleImagesProcessor.download_model_images(
|
||||
success, is_stale, failed_images = await ExampleImagesProcessor.download_model_images_with_tracking(
|
||||
model_hash, model_name, images, model_dir, optimize, downloader
|
||||
)
|
||||
|
||||
failed_urls: Set[str] = set(failed_images)
|
||||
|
||||
# If metadata is stale, try to refresh it
|
||||
if is_stale and model_hash not in self._progress['refreshed_models']:
|
||||
await MetadataUpdater.refresh_model_metadata(
|
||||
@@ -536,20 +540,36 @@ class DownloadManager:
|
||||
if updated_civitai.get('images'):
|
||||
# Retry download with updated metadata
|
||||
updated_images = updated_civitai.get('images', [])
|
||||
success, _ = await ExampleImagesProcessor.download_model_images(
|
||||
success, _, additional_failed = await ExampleImagesProcessor.download_model_images_with_tracking(
|
||||
model_hash, model_name, updated_images, model_dir, optimize, downloader
|
||||
)
|
||||
|
||||
failed_urls.update(additional_failed)
|
||||
|
||||
self._progress['refreshed_models'].add(model_hash)
|
||||
|
||||
# Mark as processed if successful, or as failed if unsuccessful after refresh
|
||||
if success:
|
||||
if failed_urls:
|
||||
await self._remove_failed_images_from_metadata(
|
||||
model_hash,
|
||||
model_name,
|
||||
model_dir,
|
||||
failed_urls,
|
||||
scanner,
|
||||
)
|
||||
|
||||
if failed_urls:
|
||||
self._progress['failed_models'].add(model_hash)
|
||||
self._progress['processed_models'].add(model_hash)
|
||||
logger.info(
|
||||
"Removed %s failed example images for %s", len(failed_urls), model_name
|
||||
)
|
||||
elif success:
|
||||
self._progress['processed_models'].add(model_hash)
|
||||
else:
|
||||
# If we refreshed metadata and still failed, mark as permanently failed
|
||||
if model_hash in self._progress['refreshed_models']:
|
||||
self._progress['failed_models'].add(model_hash)
|
||||
logger.info(f"Marking model {model_name} as failed after metadata refresh")
|
||||
self._progress['failed_models'].add(model_hash)
|
||||
logger.info(
|
||||
"Example images download failed for %s despite metadata refresh", model_name
|
||||
)
|
||||
|
||||
return True # Return True to indicate a remote download happened
|
||||
else:
|
||||
@@ -888,6 +908,8 @@ class DownloadManager:
|
||||
model_hash, model_name, images, model_dir, optimize, downloader
|
||||
)
|
||||
|
||||
failed_urls: Set[str] = set(failed_images)
|
||||
|
||||
# If metadata is stale, try to refresh it
|
||||
if is_stale and model_hash not in self._progress['refreshed_models']:
|
||||
await MetadataUpdater.refresh_model_metadata(
|
||||
@@ -909,19 +931,18 @@ class DownloadManager:
|
||||
)
|
||||
|
||||
# Combine failed images from both attempts
|
||||
failed_images.extend(additional_failed_images)
|
||||
failed_urls.update(additional_failed_images)
|
||||
|
||||
self._progress['refreshed_models'].add(model_hash)
|
||||
|
||||
# For forced downloads, remove failed images from metadata
|
||||
if failed_images:
|
||||
# Create a copy of images excluding failed ones
|
||||
if failed_urls:
|
||||
await self._remove_failed_images_from_metadata(
|
||||
model_hash, model_name, failed_images, scanner
|
||||
model_hash, model_name, model_dir, failed_urls, scanner
|
||||
)
|
||||
|
||||
# Mark as processed
|
||||
if success or failed_images: # Mark as processed if we successfully downloaded some images or removed failed ones
|
||||
if success or failed_urls: # Mark as processed if we successfully downloaded some images or removed failed ones
|
||||
self._progress['processed_models'].add(model_hash)
|
||||
|
||||
return True # Return True to indicate a remote download happened
|
||||
@@ -938,49 +959,112 @@ class DownloadManager:
|
||||
self._progress['last_error'] = error_msg
|
||||
return False # Return False on exception
|
||||
|
||||
async def _remove_failed_images_from_metadata(self, model_hash, model_name, failed_images, scanner):
|
||||
"""Remove failed images from model metadata"""
|
||||
async def _remove_failed_images_from_metadata(
|
||||
self,
|
||||
model_hash: str,
|
||||
model_name: str,
|
||||
model_dir: str,
|
||||
failed_images: Iterable[str],
|
||||
scanner,
|
||||
) -> None:
|
||||
"""Mark failed images in model metadata so they won't be retried."""
|
||||
|
||||
failed_set: Set[str] = {url for url in failed_images if url}
|
||||
if not failed_set:
|
||||
return
|
||||
|
||||
try:
|
||||
# Get current model data
|
||||
model_data = await MetadataUpdater.get_updated_model(model_hash, scanner)
|
||||
if not model_data:
|
||||
logger.warning(f"Could not find model data for {model_name} to remove failed images")
|
||||
return
|
||||
|
||||
if not model_data.get('civitai', {}).get('images'):
|
||||
|
||||
civitai_payload = model_data.get('civitai') or {}
|
||||
current_images = civitai_payload.get('images') or []
|
||||
if not current_images:
|
||||
logger.warning(f"No images in metadata for {model_name}")
|
||||
return
|
||||
|
||||
# Get current images
|
||||
current_images = model_data['civitai']['images']
|
||||
|
||||
# Filter out failed images
|
||||
updated_images = [img for img in current_images if img.get('url') not in failed_images]
|
||||
|
||||
# If images were removed, update metadata
|
||||
if len(updated_images) < len(current_images):
|
||||
removed_count = len(current_images) - len(updated_images)
|
||||
logger.info(f"Removing {removed_count} failed images from metadata for {model_name}")
|
||||
|
||||
# Update the images list
|
||||
model_data['civitai']['images'] = updated_images
|
||||
|
||||
# Save metadata to file
|
||||
file_path = model_data.get('file_path')
|
||||
if file_path:
|
||||
# Create a copy of model data without 'folder' field
|
||||
model_copy = model_data.copy()
|
||||
model_copy.pop('folder', None)
|
||||
|
||||
# Write metadata to file
|
||||
await MetadataManager.save_metadata(file_path, model_copy)
|
||||
logger.info(f"Saved updated metadata for {model_name} after removing failed images")
|
||||
|
||||
# Update the scanner cache
|
||||
|
||||
updated = False
|
||||
|
||||
for image in current_images:
|
||||
image_url = image.get('url')
|
||||
optimized_url = (
|
||||
ExampleImagesProcessor.get_civitai_optimized_url(image_url)
|
||||
if image_url and 'civitai.com' in image_url
|
||||
else None
|
||||
)
|
||||
|
||||
if image_url not in failed_set and optimized_url not in failed_set:
|
||||
continue
|
||||
|
||||
if image.get('downloadFailed'):
|
||||
continue
|
||||
|
||||
image['downloadFailed'] = True
|
||||
image.setdefault('downloadError', 'not_found')
|
||||
logger.debug(
|
||||
"Marked example image %s for %s as failed due to missing remote asset",
|
||||
image_url,
|
||||
model_name,
|
||||
)
|
||||
updated = True
|
||||
|
||||
if not updated:
|
||||
return
|
||||
|
||||
file_path = model_data.get('file_path')
|
||||
if file_path:
|
||||
model_copy = model_data.copy()
|
||||
model_copy.pop('folder', None)
|
||||
await MetadataManager.save_metadata(file_path, model_copy)
|
||||
|
||||
try:
|
||||
await scanner.update_single_model_cache(file_path, file_path, model_data)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error removing failed images from metadata for {model_name}: {e}", exc_info=True)
|
||||
except AttributeError:
|
||||
logger.debug("Scanner does not expose cache update for %s", model_name)
|
||||
|
||||
except Exception as exc: # pragma: no cover - defensive logging
|
||||
logger.error(
|
||||
"Error removing failed images from metadata for %s: %s", model_name, exc, exc_info=True
|
||||
)
|
||||
|
||||
def _renumber_example_image_files(self, model_dir: str) -> None:
|
||||
if not model_dir or not os.path.isdir(model_dir):
|
||||
return
|
||||
|
||||
pattern = re.compile(r'^image_(\d+)(\.[^.]+)$', re.IGNORECASE)
|
||||
matches: List[Tuple[int, str, str]] = []
|
||||
|
||||
for entry in os.listdir(model_dir):
|
||||
match = pattern.match(entry)
|
||||
if match:
|
||||
matches.append((int(match.group(1)), entry, match.group(2)))
|
||||
|
||||
if not matches:
|
||||
return
|
||||
|
||||
matches.sort(key=lambda item: item[0])
|
||||
staged_paths: List[Tuple[str, str]] = []
|
||||
|
||||
for _, original_name, extension in matches:
|
||||
source_path = os.path.join(model_dir, original_name)
|
||||
temp_name = f"tmp_{uuid.uuid4().hex}_{original_name}"
|
||||
temp_path = os.path.join(model_dir, temp_name)
|
||||
try:
|
||||
os.rename(source_path, temp_path)
|
||||
staged_paths.append((temp_path, extension))
|
||||
except OSError as exc:
|
||||
logger.warning("Failed to stage rename for %s: %s", source_path, exc)
|
||||
|
||||
for new_index, (temp_path, extension) in enumerate(staged_paths):
|
||||
final_name = f"image_{new_index}{extension}"
|
||||
final_path = os.path.join(model_dir, final_name)
|
||||
try:
|
||||
os.rename(temp_path, final_path)
|
||||
except OSError as exc:
|
||||
logger.warning("Failed to finalise rename for %s: %s", final_path, exc)
|
||||
|
||||
async def _broadcast_progress(
|
||||
self,
|
||||
|
||||
@@ -199,10 +199,13 @@ def is_valid_example_images_root(folder_path: str) -> bool:
|
||||
if item == "_deleted":
|
||||
# Allow cleanup staging folders
|
||||
continue
|
||||
# When multi-library mode is active we expect nested hash folders
|
||||
if uses_library_scoped_folders():
|
||||
if _library_folder_has_only_hash_dirs(item_path):
|
||||
continue
|
||||
# Accept legacy library folders even when current settings do not
|
||||
# explicitly enable multi-library mode. This allows users to reuse a
|
||||
# previously configured example images directory after settings are
|
||||
# reset, as long as the nested structure still looks like dedicated
|
||||
# hash folders.
|
||||
if _library_folder_has_only_hash_dirs(item_path):
|
||||
continue
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@@ -85,6 +85,16 @@ class ExampleImagesProcessor:
|
||||
# Default fallback
|
||||
return '.jpg'
|
||||
|
||||
@staticmethod
|
||||
def _is_not_found_error(error) -> bool:
|
||||
"""Return True when the downloader response represents a 404/Not Found."""
|
||||
|
||||
if not error:
|
||||
return False
|
||||
|
||||
message = str(error).lower()
|
||||
return '404' in message or 'file not found' in message
|
||||
|
||||
@staticmethod
|
||||
async def download_model_images(model_hash, model_name, model_images, model_dir, optimize, downloader):
|
||||
"""Download images for a single model
|
||||
@@ -98,7 +108,15 @@ class ExampleImagesProcessor:
|
||||
image_url = image.get('url')
|
||||
if not image_url:
|
||||
continue
|
||||
|
||||
|
||||
if image.get('downloadFailed'):
|
||||
logger.debug(
|
||||
"Skipping example image %s for %s because it previously failed to download",
|
||||
image_url,
|
||||
model_name,
|
||||
)
|
||||
continue
|
||||
|
||||
# Apply optimization for Civitai URLs if enabled
|
||||
original_url = image_url
|
||||
if optimize and 'civitai.com' in image_url:
|
||||
@@ -142,7 +160,7 @@ class ExampleImagesProcessor:
|
||||
with open(save_path, 'wb') as f:
|
||||
f.write(content)
|
||||
|
||||
elif "404" in str(content):
|
||||
elif ExampleImagesProcessor._is_not_found_error(content):
|
||||
error_msg = f"Failed to download file: {image_url}, status code: 404 - Model metadata might be stale"
|
||||
logger.warning(error_msg)
|
||||
model_success = False # Mark the model as failed due to 404 error
|
||||
@@ -173,7 +191,15 @@ class ExampleImagesProcessor:
|
||||
image_url = image.get('url')
|
||||
if not image_url:
|
||||
continue
|
||||
|
||||
|
||||
if image.get('downloadFailed'):
|
||||
logger.debug(
|
||||
"Skipping example image %s for %s because it previously failed to download",
|
||||
image_url,
|
||||
model_name,
|
||||
)
|
||||
continue
|
||||
|
||||
# Apply optimization for Civitai URLs if enabled
|
||||
original_url = image_url
|
||||
if optimize and 'civitai.com' in image_url:
|
||||
@@ -217,7 +243,7 @@ class ExampleImagesProcessor:
|
||||
with open(save_path, 'wb') as f:
|
||||
f.write(content)
|
||||
|
||||
elif "404" in str(content):
|
||||
elif ExampleImagesProcessor._is_not_found_error(content):
|
||||
error_msg = f"Failed to download file: {image_url}, status code: 404 - Model metadata might be stale"
|
||||
logger.warning(error_msg)
|
||||
model_success = False # Mark the model as failed due to 404 error
|
||||
|
||||
@@ -140,6 +140,28 @@ class ExifUtils:
|
||||
if metadata:
|
||||
# Remove any existing recipe metadata
|
||||
metadata = ExifUtils.remove_recipe_metadata(metadata)
|
||||
|
||||
# Prepare checkpoint data
|
||||
checkpoint_data = recipe_data.get("checkpoint") or {}
|
||||
simplified_checkpoint = None
|
||||
if isinstance(checkpoint_data, dict) and checkpoint_data:
|
||||
simplified_checkpoint = {
|
||||
"type": checkpoint_data.get("type", "checkpoint"),
|
||||
"modelId": checkpoint_data.get("modelId", 0),
|
||||
"modelVersionId": checkpoint_data.get("modelVersionId")
|
||||
or checkpoint_data.get("id", 0),
|
||||
"modelName": checkpoint_data.get(
|
||||
"modelName", checkpoint_data.get("name", "")
|
||||
),
|
||||
"modelVersionName": checkpoint_data.get(
|
||||
"modelVersionName", checkpoint_data.get("version", "")
|
||||
),
|
||||
"hash": checkpoint_data.get("hash", "").lower()
|
||||
if checkpoint_data.get("hash")
|
||||
else "",
|
||||
"file_name": checkpoint_data.get("file_name", ""),
|
||||
"baseModel": checkpoint_data.get("baseModel", ""),
|
||||
}
|
||||
|
||||
# Prepare simplified loras data
|
||||
simplified_loras = []
|
||||
@@ -160,7 +182,8 @@ class ExifUtils:
|
||||
'base_model': recipe_data.get('base_model', ''),
|
||||
'loras': simplified_loras,
|
||||
'gen_params': recipe_data.get('gen_params', {}),
|
||||
'tags': recipe_data.get('tags', [])
|
||||
'tags': recipe_data.get('tags', []),
|
||||
**({'checkpoint': simplified_checkpoint} if simplified_checkpoint else {})
|
||||
}
|
||||
|
||||
# Convert to JSON string
|
||||
@@ -359,4 +382,4 @@ class ExifUtils:
|
||||
return f.read(), os.path.splitext(image_data)[1]
|
||||
except Exception:
|
||||
return image_data, '.jpg' # Last resort fallback
|
||||
return image_data, '.jpg'
|
||||
return image_data, '.jpg'
|
||||
|
||||
@@ -1,17 +1,41 @@
|
||||
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
import hashlib
|
||||
|
||||
from .constants import PREVIEW_EXTENSIONS, CARD_PREVIEW_WIDTH
|
||||
from .constants import (
|
||||
CARD_PREVIEW_WIDTH,
|
||||
DEFAULT_HASH_CHUNK_SIZE_MB,
|
||||
PREVIEW_EXTENSIONS,
|
||||
)
|
||||
from .exif_utils import ExifUtils
|
||||
from ..services.settings_manager import get_settings_manager
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _get_hash_chunk_size_bytes() -> int:
|
||||
"""Return the chunk size used for hashing, in bytes."""
|
||||
|
||||
settings_manager = get_settings_manager()
|
||||
chunk_size_mb = settings_manager.get("hash_chunk_size_mb", DEFAULT_HASH_CHUNK_SIZE_MB)
|
||||
try:
|
||||
chunk_size_value = float(chunk_size_mb)
|
||||
except (TypeError, ValueError):
|
||||
chunk_size_value = float(DEFAULT_HASH_CHUNK_SIZE_MB)
|
||||
|
||||
if chunk_size_value <= 0:
|
||||
chunk_size_value = float(DEFAULT_HASH_CHUNK_SIZE_MB)
|
||||
|
||||
return max(1, int(chunk_size_value * 1024 * 1024))
|
||||
|
||||
|
||||
async def calculate_sha256(file_path: str) -> str:
|
||||
"""Calculate SHA256 hash of a file"""
|
||||
sha256_hash = hashlib.sha256()
|
||||
chunk_size = _get_hash_chunk_size_bytes()
|
||||
with open(file_path, "rb") as f:
|
||||
for byte_block in iter(lambda: f.read(128 * 1024), b""):
|
||||
for byte_block in iter(lambda: f.read(chunk_size), b""):
|
||||
sha256_hash.update(byte_block)
|
||||
return sha256_hash.hexdigest()
|
||||
|
||||
@@ -81,4 +105,4 @@ def get_preview_extension(preview_path: str) -> str:
|
||||
|
||||
def normalize_path(path: str) -> str:
|
||||
"""Normalize file path to use forward slashes"""
|
||||
return path.replace(os.sep, "/") if path else path
|
||||
return path.replace(os.sep, "/") if path else path
|
||||
|
||||
@@ -22,7 +22,7 @@ class MetadataManager:
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
async def load_metadata(file_path: str, model_class: Type[BaseModelMetadata] = LoraMetadata) -> Optional[BaseModelMetadata]:
|
||||
async def load_metadata(file_path: str, model_class: Type[BaseModelMetadata] = LoraMetadata) -> tuple[Optional[BaseModelMetadata], bool]:
|
||||
"""
|
||||
Load metadata safely.
|
||||
|
||||
|
||||
63
py/utils/preview_selection.py
Normal file
63
py/utils/preview_selection.py
Normal file
@@ -0,0 +1,63 @@
|
||||
"""Utilities for selecting preview media from Civitai image metadata."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Mapping, Optional, Sequence, Tuple
|
||||
|
||||
from .constants import NSFW_LEVELS
|
||||
|
||||
PreviewMedia = Mapping[str, object]
|
||||
|
||||
|
||||
def _extract_nsfw_level(entry: Mapping[str, object]) -> int:
|
||||
"""Return a normalized NSFW level value for the supplied media entry."""
|
||||
|
||||
value = entry.get("nsfwLevel", 0)
|
||||
try:
|
||||
return int(value) # type: ignore[return-value]
|
||||
except (TypeError, ValueError):
|
||||
return 0
|
||||
|
||||
|
||||
def select_preview_media(
|
||||
images: Sequence[Mapping[str, object]] | None,
|
||||
*,
|
||||
blur_mature_content: bool,
|
||||
) -> Tuple[Optional[PreviewMedia], int]:
|
||||
"""Select the most appropriate preview media entry.
|
||||
|
||||
When ``blur_mature_content`` is enabled we first try to return the first media
|
||||
item with an ``nsfwLevel`` lower than :pydata:`NSFW_LEVELS["R"]`. If none are
|
||||
available we return the media entry with the lowest NSFW level. When the
|
||||
setting is disabled we simply return the first entry.
|
||||
"""
|
||||
|
||||
if not images:
|
||||
return None, 0
|
||||
|
||||
candidates = [item for item in images if isinstance(item, Mapping)]
|
||||
if not candidates:
|
||||
return None, 0
|
||||
|
||||
selected = candidates[0]
|
||||
selected_level = _extract_nsfw_level(selected)
|
||||
|
||||
if not blur_mature_content:
|
||||
return selected, selected_level
|
||||
|
||||
safe_threshold = NSFW_LEVELS.get("R", 4)
|
||||
for candidate in candidates:
|
||||
level = _extract_nsfw_level(candidate)
|
||||
if level < safe_threshold:
|
||||
return candidate, level
|
||||
|
||||
for candidate in candidates[1:]:
|
||||
level = _extract_nsfw_level(candidate)
|
||||
if level < selected_level:
|
||||
selected = candidate
|
||||
selected_level = level
|
||||
|
||||
return selected, selected_level
|
||||
|
||||
|
||||
__all__ = ["select_preview_media"]
|
||||
@@ -2,10 +2,11 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
from typing import Optional
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from platformdirs import user_config_dir
|
||||
|
||||
@@ -36,8 +37,13 @@ def get_settings_dir(create: bool = True) -> str:
|
||||
The absolute path to the user configuration directory.
|
||||
"""
|
||||
|
||||
config_dir = user_config_dir(APP_NAME, appauthor=False)
|
||||
if create:
|
||||
legacy_path = get_legacy_settings_path()
|
||||
if _should_use_portable_settings(legacy_path, _LOGGER):
|
||||
config_dir = os.path.dirname(legacy_path)
|
||||
else:
|
||||
config_dir = user_config_dir(APP_NAME, appauthor=False)
|
||||
|
||||
if create and config_dir:
|
||||
os.makedirs(config_dir, exist_ok=True)
|
||||
return config_dir
|
||||
|
||||
@@ -64,6 +70,11 @@ def ensure_settings_file(logger: Optional[logging.Logger] = None) -> str:
|
||||
"""
|
||||
|
||||
logger = logger or _LOGGER
|
||||
legacy_path = get_legacy_settings_path()
|
||||
|
||||
if _should_use_portable_settings(legacy_path, logger):
|
||||
return legacy_path
|
||||
|
||||
target_path = get_settings_file_path(create_dir=True)
|
||||
preferred_dir = user_config_dir(APP_NAME, appauthor=False)
|
||||
preferred_path = os.path.join(preferred_dir, "settings.json")
|
||||
@@ -71,7 +82,6 @@ def ensure_settings_file(logger: Optional[logging.Logger] = None) -> str:
|
||||
if os.path.abspath(target_path) != os.path.abspath(preferred_path):
|
||||
os.makedirs(preferred_dir, exist_ok=True)
|
||||
target_path = preferred_path
|
||||
legacy_path = get_legacy_settings_path()
|
||||
|
||||
if os.path.exists(legacy_path) and not os.path.exists(target_path):
|
||||
try:
|
||||
@@ -88,3 +98,63 @@ def ensure_settings_file(logger: Optional[logging.Logger] = None) -> str:
|
||||
|
||||
return target_path
|
||||
|
||||
|
||||
def _should_use_portable_settings(path: str, logger: logging.Logger) -> bool:
|
||||
"""Return ``True`` when the repository settings file enables portable mode."""
|
||||
|
||||
if not os.path.exists(path):
|
||||
return False
|
||||
|
||||
try:
|
||||
with open(path, "r", encoding="utf-8") as handle:
|
||||
payload = json.load(handle)
|
||||
except json.JSONDecodeError as exc:
|
||||
logger.warning("Failed to parse %s for portable mode flag: %s", path, exc)
|
||||
return False
|
||||
except OSError as exc:
|
||||
logger.warning("Could not read %s to determine portable mode: %s", path, exc)
|
||||
return False
|
||||
|
||||
if not isinstance(payload, dict):
|
||||
logger.debug("Portable settings file %s does not contain a JSON object", path)
|
||||
return False
|
||||
|
||||
flag = payload.get("use_portable_settings")
|
||||
if isinstance(flag, bool):
|
||||
return flag
|
||||
|
||||
if flag is not None:
|
||||
logger.warning(
|
||||
"Ignoring non-boolean use_portable_settings value in %s", path
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
def load_settings_template() -> Optional[Dict[str, Any]]:
|
||||
"""Return the parsed contents of ``settings.json.example`` when available."""
|
||||
|
||||
template_path = os.path.join(get_project_root(), "settings.json.example")
|
||||
|
||||
try:
|
||||
with open(template_path, "r", encoding="utf-8") as handle:
|
||||
payload = json.load(handle)
|
||||
except FileNotFoundError:
|
||||
_LOGGER.debug("settings.json.example not found at %s", template_path)
|
||||
return None
|
||||
except json.JSONDecodeError as exc:
|
||||
_LOGGER.warning("Failed to parse settings.json.example: %s", exc)
|
||||
return None
|
||||
except OSError as exc:
|
||||
_LOGGER.warning(
|
||||
"Could not read settings.json.example at %s: %s", template_path, exc
|
||||
)
|
||||
return None
|
||||
|
||||
if not isinstance(payload, dict):
|
||||
_LOGGER.debug(
|
||||
"settings.json.example at %s does not contain a JSON object", template_path
|
||||
)
|
||||
return None
|
||||
|
||||
return payload
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from difflib import SequenceMatcher
|
||||
import os
|
||||
import re
|
||||
from typing import Dict
|
||||
from ..services.service_registry import ServiceRegistry
|
||||
from ..config import config
|
||||
@@ -85,6 +86,41 @@ def fuzzy_match(text: str, pattern: str, threshold: float = 0.85) -> bool:
|
||||
# All words found either as substrings or fuzzy matches
|
||||
return True
|
||||
|
||||
def sanitize_folder_name(name: str, replacement: str = "_") -> str:
|
||||
"""Sanitize a folder name by removing or replacing invalid characters.
|
||||
|
||||
Args:
|
||||
name: The original folder name.
|
||||
replacement: The character to use when replacing invalid characters.
|
||||
|
||||
Returns:
|
||||
A sanitized folder name safe to use across common filesystems.
|
||||
"""
|
||||
|
||||
if not name:
|
||||
return ""
|
||||
|
||||
# Replace invalid characters commonly restricted on Windows and POSIX
|
||||
invalid_chars_pattern = r'[<>:"/\\|?*\x00-\x1f]'
|
||||
sanitized = re.sub(invalid_chars_pattern, replacement, name)
|
||||
|
||||
# Trim whitespace introduced during sanitization
|
||||
sanitized = sanitized.strip()
|
||||
|
||||
# Collapse repeated replacement characters to a single instance
|
||||
if replacement:
|
||||
sanitized = re.sub(f"{re.escape(replacement)}+", replacement, sanitized)
|
||||
sanitized = sanitized.strip(replacement)
|
||||
|
||||
# Remove trailing spaces or periods which are invalid on Windows
|
||||
sanitized = sanitized.rstrip(" .")
|
||||
|
||||
if not sanitized:
|
||||
return "unnamed"
|
||||
|
||||
return sanitized
|
||||
|
||||
|
||||
def calculate_recipe_fingerprint(loras):
|
||||
"""
|
||||
Calculate a unique fingerprint for a recipe based on its LoRAs.
|
||||
@@ -169,16 +205,26 @@ def calculate_relative_path_for_model(model_data: Dict, model_type: str = 'lora'
|
||||
base_model_mappings = settings_manager.get('base_model_path_mappings', {})
|
||||
mapped_base_model = base_model_mappings.get(base_model, base_model)
|
||||
|
||||
first_tag = settings_manager.resolve_priority_tag_for_model(model_tags, model_type)
|
||||
# Convert all tags to lowercase to avoid case sensitivity issues on Windows
|
||||
lowercase_tags = [tag.lower() for tag in model_tags if isinstance(tag, str)]
|
||||
first_tag = settings_manager.resolve_priority_tag_for_model(lowercase_tags, model_type)
|
||||
|
||||
if not first_tag:
|
||||
first_tag = 'no tags' # Default if no tags available
|
||||
|
||||
# Format the template with available data
|
||||
model_name = sanitize_folder_name(model_data.get('model_name', ''))
|
||||
version_name = ''
|
||||
|
||||
if isinstance(civitai_data, dict):
|
||||
version_name = sanitize_folder_name(civitai_data.get('name') or '')
|
||||
|
||||
formatted_path = path_template
|
||||
formatted_path = formatted_path.replace('{base_model}', mapped_base_model)
|
||||
formatted_path = formatted_path.replace('{first_tag}', first_tag)
|
||||
formatted_path = formatted_path.replace('{author}', author)
|
||||
formatted_path = formatted_path.replace('{model_name}', model_name)
|
||||
formatted_path = formatted_path.replace('{version_name}', version_name)
|
||||
|
||||
if model_type == 'embedding':
|
||||
formatted_path = formatted_path.replace(' ', '_')
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
[project]
|
||||
name = "comfyui-lora-manager"
|
||||
description = "Revolutionize your workflow with the ultimate LoRA companion for ComfyUI!"
|
||||
version = "0.9.8"
|
||||
version = "0.9.11"
|
||||
license = {file = "LICENSE"}
|
||||
dependencies = [
|
||||
"aiohttp",
|
||||
|
||||
@@ -7,5 +7,6 @@ python_functions = test_*
|
||||
# Register async marker for coroutine-style tests
|
||||
markers =
|
||||
asyncio: execute test within asyncio event loop
|
||||
no_settings_dir_isolation: allow tests to use real settings paths
|
||||
# Skip problematic directories to avoid import conflicts
|
||||
norecursedirs = .git .tox dist build *.egg __pycache__ py
|
||||
@@ -11,7 +11,11 @@
|
||||
"type": "LORA",
|
||||
"nsfw": false,
|
||||
"description": "description",
|
||||
"tags": ["style"]
|
||||
"tags": ["style"],
|
||||
"allowNoCredit": true,
|
||||
"allowCommercialUse": ["Sell"],
|
||||
"allowDerivatives": true,
|
||||
"allowDifferentLicense": true
|
||||
},
|
||||
"files": [
|
||||
{
|
||||
|
||||
@@ -1,9 +1,17 @@
|
||||
const settingsStore = new Map();
|
||||
|
||||
export const app = {
|
||||
canvas: { ds: { scale: 1 } },
|
||||
extensionManager: {
|
||||
toast: {
|
||||
add: () => {},
|
||||
},
|
||||
setting: {
|
||||
get: (id) => (settingsStore.has(id) ? settingsStore.get(id) : undefined),
|
||||
set: async (id, value) => {
|
||||
settingsStore.set(id, value);
|
||||
},
|
||||
},
|
||||
},
|
||||
registerExtension: () => {},
|
||||
graphToPrompt: async () => ({ workflow: { nodes: new Map() } }),
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
{
|
||||
"use_portable_settings": false,
|
||||
"civitai_api_key": "your_civitai_api_key_here",
|
||||
"folder_paths": {
|
||||
"loras": [
|
||||
@@ -13,5 +14,6 @@
|
||||
"C:/path/to/your/embeddings_folder",
|
||||
"C:/path/to/another/embeddings_folder"
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"auto_organize_exclusions": []
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ import os
|
||||
import sys
|
||||
import json
|
||||
from py.middleware.cache_middleware import cache_control
|
||||
from py.utils.settings_paths import ensure_settings_file, get_settings_dir
|
||||
from py.utils.settings_paths import ensure_settings_file
|
||||
|
||||
# Set environment variable to indicate standalone mode
|
||||
os.environ["LORA_MANAGER_STANDALONE"] = "1"
|
||||
@@ -102,8 +102,11 @@ import asyncio
|
||||
import logging
|
||||
from aiohttp import web
|
||||
|
||||
# Increase allowable header size to align with in-ComfyUI configuration.
|
||||
HEADER_SIZE_LIMIT = 16384
|
||||
|
||||
# Setup logging
|
||||
logging.basicConfig(level=logging.INFO,
|
||||
logging.basicConfig(level=logging.INFO,
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||
logger = logging.getLogger("lora-manager-standalone")
|
||||
|
||||
@@ -133,7 +136,14 @@ class StandaloneServer:
|
||||
"""Server implementation for standalone mode"""
|
||||
|
||||
def __init__(self):
|
||||
self.app = web.Application(logger=logger, middlewares=[cache_control])
|
||||
self.app = web.Application(
|
||||
logger=logger,
|
||||
middlewares=[cache_control],
|
||||
handler_args={
|
||||
"max_field_size": HEADER_SIZE_LIMIT,
|
||||
"max_line_size": HEADER_SIZE_LIMIT,
|
||||
},
|
||||
)
|
||||
self.instance = self # Make it compatible with PromptServer.instance pattern
|
||||
|
||||
# Ensure the app's access logger is configured to reduce verbosity
|
||||
@@ -218,54 +228,43 @@ class StandaloneServer:
|
||||
from py.lora_manager import LoraManager
|
||||
|
||||
def validate_settings():
|
||||
"""Validate that settings.json exists and has required configuration"""
|
||||
settings_path = ensure_settings_file(logger)
|
||||
if not os.path.exists(settings_path):
|
||||
logger.error("=" * 80)
|
||||
logger.error("CONFIGURATION ERROR: settings.json file not found!")
|
||||
logger.error("")
|
||||
logger.error("Expected location: %s", settings_path)
|
||||
logger.error("")
|
||||
logger.error("To run in standalone mode, you need to create a settings.json file.")
|
||||
logger.error("Please follow these steps:")
|
||||
logger.error("")
|
||||
logger.error("1. Copy the provided settings.json.example file to create a new file")
|
||||
logger.error(" named settings.json inside the LoRA Manager settings folder:")
|
||||
logger.error(" %s", get_settings_dir())
|
||||
logger.error("")
|
||||
logger.error("2. Edit settings.json to include your correct model folder paths")
|
||||
logger.error(" and CivitAI API key")
|
||||
logger.error("=" * 80)
|
||||
return False
|
||||
|
||||
# Check if settings.json has valid folder paths
|
||||
"""Initialize settings and log any startup warnings."""
|
||||
try:
|
||||
with open(settings_path, 'r', encoding='utf-8') as f:
|
||||
settings = json.load(f)
|
||||
|
||||
folder_paths = settings.get('folder_paths', {})
|
||||
has_valid_paths = False
|
||||
|
||||
for path_type in ['loras', 'checkpoints', 'embeddings']:
|
||||
paths = folder_paths.get(path_type, [])
|
||||
if paths and any(os.path.exists(p) for p in paths):
|
||||
has_valid_paths = True
|
||||
break
|
||||
|
||||
if not has_valid_paths:
|
||||
logger.warning("=" * 80)
|
||||
logger.warning("CONFIGURATION WARNING: No valid model folder paths found!")
|
||||
logger.warning("")
|
||||
logger.warning("Your settings.json exists but doesn't contain valid folder paths.")
|
||||
logger.warning("Please check and update the folder_paths section in settings.json")
|
||||
logger.warning("to include existing directories for your models.")
|
||||
logger.warning("=" * 80)
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error reading settings.json: {e}")
|
||||
from py.services.settings_manager import get_settings_manager
|
||||
|
||||
manager = get_settings_manager()
|
||||
except Exception as exc: # pragma: no cover - defensive logging
|
||||
logger.error("Failed to initialise settings manager: %s", exc, exc_info=True)
|
||||
return False
|
||||
|
||||
|
||||
messages = manager.get_startup_messages()
|
||||
if messages:
|
||||
logger.warning("=" * 80)
|
||||
logger.warning("Standalone mode is using fallback configuration values.")
|
||||
for message in messages:
|
||||
severity = (message.get("severity") or "info").lower()
|
||||
title = message.get("title")
|
||||
body = message.get("message") or ""
|
||||
details = message.get("details")
|
||||
location = message.get("settings_file") or manager.settings_file
|
||||
|
||||
text = f"{title}: {body}" if title else body
|
||||
log_method = logger.info
|
||||
if severity == "error":
|
||||
log_method = logger.error
|
||||
elif severity == "warning":
|
||||
log_method = logger.warning
|
||||
|
||||
log_method(text)
|
||||
if details:
|
||||
log_method("Details: %s", details)
|
||||
if location:
|
||||
log_method("Settings file: %s", location)
|
||||
|
||||
logger.warning("=" * 80)
|
||||
else:
|
||||
logger.info("Loaded settings from %s", manager.settings_file)
|
||||
|
||||
return True
|
||||
|
||||
class StandaloneLoraManager(LoraManager):
|
||||
|
||||
@@ -48,11 +48,16 @@ html, body {
|
||||
/* Composed Colors */
|
||||
--lora-accent: oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h));
|
||||
--lora-surface: oklch(97% 0 0 / 0.95);
|
||||
--lora-border: oklch(90% 0.02 256 / 0.15);
|
||||
--lora-border: oklch(72% 0.03 256 / 0.45);
|
||||
--lora-text: oklch(95% 0.02 256);
|
||||
--lora-error: oklch(75% 0.32 29);
|
||||
--lora-error-bg: color-mix(in oklch, var(--lora-error) 20%, transparent);
|
||||
--lora-error-border: color-mix(in oklch, var(--lora-error) 50%, transparent);
|
||||
--lora-warning: oklch(var(--lora-warning-l) var(--lora-warning-c) var(--lora-warning-h));
|
||||
--lora-success: oklch(var(--lora-success-l) var(--lora-success-c) var(--lora-success-h));
|
||||
--badge-update-bg: oklch(72% 0.2 220);
|
||||
--badge-update-text: oklch(28% 0.03 220);
|
||||
--badge-update-glow: oklch(72% 0.2 220 / 0.28);
|
||||
|
||||
/* Spacing Scale */
|
||||
--space-1: calc(8px * 1);
|
||||
@@ -100,6 +105,11 @@ html[data-theme="light"] {
|
||||
--lora-border: oklch(90% 0.02 256 / 0.15);
|
||||
--lora-text: oklch(98% 0.02 256);
|
||||
--lora-warning: oklch(75% 0.25 80); /* Modified to be used with oklch() */
|
||||
--lora-error-bg: color-mix(in oklch, var(--lora-error) 15%, transparent);
|
||||
--lora-error-border: color-mix(in oklch, var(--lora-error) 40%, transparent);
|
||||
--badge-update-bg: oklch(62% 0.18 220);
|
||||
--badge-update-text: oklch(98% 0.02 240);
|
||||
--badge-update-glow: oklch(62% 0.18 220 / 0.4);
|
||||
}
|
||||
|
||||
body {
|
||||
|
||||
@@ -296,6 +296,18 @@
|
||||
min-height: 20px;
|
||||
}
|
||||
|
||||
.card-header-info {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
flex: 1;
|
||||
min-width: 0;
|
||||
}
|
||||
|
||||
.card-header-info .base-model-label {
|
||||
flex-shrink: 1;
|
||||
}
|
||||
|
||||
.card-actions i {
|
||||
margin-left: var(--space-1);
|
||||
cursor: pointer;
|
||||
@@ -422,6 +434,7 @@
|
||||
border-radius: var(--border-radius-xs);
|
||||
backdrop-filter: blur(2px);
|
||||
font-size: 0.85em;
|
||||
line-height: 1.2;
|
||||
}
|
||||
|
||||
/* Style for version name */
|
||||
@@ -575,4 +588,26 @@
|
||||
15% { opacity: 1; transform: translateY(0); }
|
||||
85% { opacity: 1; transform: translateY(0); }
|
||||
100% { opacity: 0; transform: translateY(0); }
|
||||
}
|
||||
}
|
||||
|
||||
.model-card.has-update {
|
||||
border-color: color-mix(in oklab, var(--badge-update-bg) 60%, transparent);
|
||||
box-shadow: 0 0 0 1px color-mix(in oklab, var(--badge-update-bg) 45%, transparent);
|
||||
}
|
||||
|
||||
.model-update-badge {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
padding: 2px 10px;
|
||||
border-radius: var(--border-radius-xs);
|
||||
background: var(--badge-update-bg);
|
||||
color: var(--badge-update-text);
|
||||
font-size: 0.7rem;
|
||||
font-weight: 600;
|
||||
letter-spacing: 0.04em;
|
||||
text-transform: uppercase;
|
||||
box-shadow: 0 4px 12px var(--badge-update-glow);
|
||||
border: 1px solid color-mix(in oklab, var(--badge-update-bg) 55%, transparent);
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
@@ -9,6 +9,42 @@
|
||||
border-bottom: 1px solid var(--lora-border);
|
||||
}
|
||||
|
||||
.modal-header-actions {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: var(--space-2);
|
||||
flex-wrap: wrap;
|
||||
width: 100%;
|
||||
margin-bottom: var(--space-1);
|
||||
}
|
||||
|
||||
.modal-header-actions .license-restrictions {
|
||||
margin-left: auto;
|
||||
}
|
||||
|
||||
.license-restrictions {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
padding: 4px 0;
|
||||
}
|
||||
|
||||
.license-restrictions .license-icon {
|
||||
width: 22px;
|
||||
height: 22px;
|
||||
display: inline-block;
|
||||
background-color: var(--text-muted);
|
||||
-webkit-mask: var(--license-icon-image) center/contain no-repeat;
|
||||
mask: var(--license-icon-image) center/contain no-repeat;
|
||||
transition: background-color 0.2s ease, transform 0.2s ease;
|
||||
cursor: default;
|
||||
}
|
||||
|
||||
.license-restrictions .license-icon:hover {
|
||||
background-color: var(--text-color);
|
||||
transform: translateY(-1px);
|
||||
}
|
||||
|
||||
/* Info Grid */
|
||||
.info-grid {
|
||||
display: grid;
|
||||
@@ -323,6 +359,10 @@
|
||||
}
|
||||
|
||||
.tab-btn {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: var(--space-1);
|
||||
padding: var(--space-1) var(--space-2);
|
||||
background: transparent;
|
||||
border: none;
|
||||
@@ -346,6 +386,51 @@
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.tab-btn .tab-label {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
line-height: 1.2;
|
||||
}
|
||||
|
||||
.tab-btn .tab-badge {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
padding: 2px 8px;
|
||||
border-radius: var(--border-radius-xs);
|
||||
background: var(--badge-update-bg);
|
||||
color: var(--badge-update-text);
|
||||
font-size: 0.68em;
|
||||
font-weight: 600;
|
||||
letter-spacing: 0.06em;
|
||||
text-transform: uppercase;
|
||||
box-shadow: 0 3px 10px var(--badge-update-glow);
|
||||
border: 1px solid color-mix(in oklab, var(--badge-update-bg) 55%, transparent);
|
||||
line-height: 1;
|
||||
}
|
||||
|
||||
.tab-badge--update {
|
||||
animation: tab-badge-pulse 2.8s ease-in-out infinite;
|
||||
}
|
||||
|
||||
.tab-btn--has-update:not(.active) {
|
||||
color: color-mix(in oklch, var(--text-color) 70%, var(--badge-update-bg) 30%);
|
||||
}
|
||||
|
||||
.tab-btn--has-update.active {
|
||||
border-bottom-color: var(--badge-update-bg);
|
||||
}
|
||||
|
||||
@keyframes tab-badge-pulse {
|
||||
0%, 100% {
|
||||
box-shadow: 0 3px 10px color-mix(in oklch, var(--badge-update-glow) 100%, transparent);
|
||||
transform: translateY(0);
|
||||
}
|
||||
50% {
|
||||
box-shadow: 0 5px 14px color-mix(in oklch, var(--badge-update-glow) 90%, transparent);
|
||||
transform: translateY(-1px);
|
||||
}
|
||||
}
|
||||
|
||||
.tab-content {
|
||||
position: relative;
|
||||
min-height: 100px;
|
||||
@@ -359,24 +444,306 @@
|
||||
display: block;
|
||||
}
|
||||
|
||||
.view-all-btn {
|
||||
.recipes-header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 5px;
|
||||
padding: 6px 12px;
|
||||
background-color: var(--lora-accent);
|
||||
color: var(--lora-text);
|
||||
border: none;
|
||||
border-radius: var(--border-radius-sm);
|
||||
cursor: pointer;
|
||||
transition: background-color 0.2s;
|
||||
font-size: 13px;
|
||||
align-items: flex-start;
|
||||
justify-content: space-between;
|
||||
gap: var(--space-3);
|
||||
padding: var(--space-2) 0 var(--space-3);
|
||||
margin-bottom: var(--space-2);
|
||||
border-bottom: 1px solid var(--lora-border);
|
||||
}
|
||||
|
||||
.view-all-btn:hover {
|
||||
opacity: 0.9;
|
||||
.recipes-header__text {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 6px;
|
||||
max-width: 520px;
|
||||
}
|
||||
|
||||
.recipes-header__eyebrow {
|
||||
font-size: 0.75em;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.12em;
|
||||
font-weight: 600;
|
||||
color: var(--text-color);
|
||||
opacity: 0.6;
|
||||
}
|
||||
|
||||
.recipes-header__text h3 {
|
||||
margin: 0;
|
||||
font-size: 1.1em;
|
||||
line-height: 1.4;
|
||||
}
|
||||
|
||||
.recipes-header__description {
|
||||
margin: 0;
|
||||
font-size: 0.9em;
|
||||
line-height: 1.5;
|
||||
color: var(--text-color);
|
||||
opacity: 0.75;
|
||||
}
|
||||
|
||||
.recipes-header__view-all {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: var(--space-1);
|
||||
padding: 8px 14px;
|
||||
border: 1px solid oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.35);
|
||||
background: transparent;
|
||||
color: var(--lora-accent);
|
||||
border-radius: var(--border-radius-sm);
|
||||
cursor: pointer;
|
||||
font-size: 0.9em;
|
||||
font-weight: 600;
|
||||
transition: background 0.2s ease, border-color 0.2s ease, transform 0.2s ease;
|
||||
}
|
||||
|
||||
.recipes-header__view-all i {
|
||||
font-size: 0.85em;
|
||||
}
|
||||
|
||||
.recipes-header__view-all:hover,
|
||||
.recipes-header__view-all:focus-visible {
|
||||
background: oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.15);
|
||||
border-color: var(--lora-accent);
|
||||
outline: none;
|
||||
transform: translateY(-1px);
|
||||
}
|
||||
|
||||
.recipes-header__view-all:active {
|
||||
transform: translateY(0);
|
||||
}
|
||||
|
||||
.recipes-card-grid {
|
||||
max-width: none;
|
||||
margin: var(--space-3) 0 0;
|
||||
padding: 0;
|
||||
grid-template-columns: repeat(auto-fill, minmax(240px, 1fr));
|
||||
gap: var(--space-3);
|
||||
row-gap: var(--space-3);
|
||||
}
|
||||
|
||||
.recipe-card {
|
||||
background: var(--lora-surface);
|
||||
border: 1px solid var(--lora-border);
|
||||
border-radius: var(--border-radius-base);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
overflow: hidden;
|
||||
min-height: 320px;
|
||||
transition: transform 0.2s ease, border-color 0.2s ease, box-shadow 0.2s ease;
|
||||
}
|
||||
|
||||
.recipe-card:hover {
|
||||
transform: translateY(-4px);
|
||||
border-color: oklch(var(--lora-accent-l) var(--lora-accent-c) var(--lora-accent-h) / 0.6);
|
||||
box-shadow: 0 16px 32px rgba(17, 17, 26, 0.18);
|
||||
}
|
||||
|
||||
.recipe-card:focus-visible {
|
||||
outline: 2px solid var(--lora-accent);
|
||||
outline-offset: 3px;
|
||||
}
|
||||
|
||||
.recipe-card__media {
|
||||
position: relative;
|
||||
overflow: hidden;
|
||||
aspect-ratio: 4 / 3;
|
||||
background: var(--lora-surface);
|
||||
}
|
||||
|
||||
.recipe-card__media img {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
object-fit: cover;
|
||||
transition: transform 0.25s ease;
|
||||
}
|
||||
|
||||
.recipe-card:hover .recipe-card__media img {
|
||||
transform: scale(1.02);
|
||||
}
|
||||
|
||||
.recipe-card__media::after {
|
||||
content: "";
|
||||
position: absolute;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
height: 36%;
|
||||
background: linear-gradient(180deg, transparent 0%, rgba(12, 13, 24, 0.55) 100%);
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
.recipe-card__media-top {
|
||||
position: absolute;
|
||||
top: var(--space-1);
|
||||
right: var(--space-1);
|
||||
display: flex;
|
||||
gap: var(--space-1);
|
||||
}
|
||||
|
||||
.recipe-card__copy {
|
||||
background: rgba(15, 21, 40, 0.6);
|
||||
border: none;
|
||||
border-radius: 999px;
|
||||
color: white;
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
padding: 6px 10px;
|
||||
cursor: pointer;
|
||||
transition: background 0.2s ease, transform 0.2s ease, opacity 0.2s ease;
|
||||
}
|
||||
|
||||
.recipe-card__copy i {
|
||||
font-size: 0.85em;
|
||||
}
|
||||
|
||||
.recipe-card__copy:hover,
|
||||
.recipe-card__copy:focus-visible {
|
||||
background: rgba(15, 21, 40, 0.8);
|
||||
transform: translateY(-1px);
|
||||
outline: none;
|
||||
}
|
||||
|
||||
.recipe-card__copy:active {
|
||||
transform: translateY(0);
|
||||
}
|
||||
|
||||
[data-theme="light"] .recipe-card__copy {
|
||||
background: rgba(255, 255, 255, 0.85);
|
||||
color: rgba(17, 23, 41, 0.8);
|
||||
box-shadow: 0 4px 12px rgba(15, 23, 42, 0.08);
|
||||
}
|
||||
|
||||
[data-theme="light"] .recipe-card__copy:hover,
|
||||
[data-theme="light"] .recipe-card__copy:focus-visible {
|
||||
background: rgba(255, 255, 255, 0.95);
|
||||
}
|
||||
|
||||
.recipe-card__body {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--space-2);
|
||||
padding: var(--space-2);
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
.recipe-card__title {
|
||||
margin: 0;
|
||||
font-size: 1.05em;
|
||||
line-height: 1.4;
|
||||
font-weight: 600;
|
||||
overflow: hidden;
|
||||
display: -webkit-box;
|
||||
-webkit-line-clamp: 2;
|
||||
-webkit-box-orient: vertical;
|
||||
}
|
||||
|
||||
.recipe-card__meta {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
gap: var(--space-1);
|
||||
}
|
||||
|
||||
.recipe-card__badge {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
padding: 4px 10px;
|
||||
border-radius: 999px;
|
||||
font-size: 0.78em;
|
||||
font-weight: 600;
|
||||
line-height: 1;
|
||||
background: rgba(255, 255, 255, 0.08);
|
||||
color: var(--text-color);
|
||||
}
|
||||
|
||||
.recipe-card__badge i {
|
||||
font-size: 0.85em;
|
||||
}
|
||||
|
||||
.recipe-card__badge--base {
|
||||
background: rgba(255, 255, 255, 0.1);
|
||||
color: var(--text-color);
|
||||
}
|
||||
|
||||
[data-theme="light"] .recipe-card__badge {
|
||||
background: rgba(15, 23, 42, 0.08);
|
||||
}
|
||||
|
||||
[data-theme="light"] .recipe-card__badge--base {
|
||||
background: rgba(15, 23, 42, 0.12);
|
||||
}
|
||||
|
||||
.recipe-card__badge--ready {
|
||||
background: rgba(34, 197, 94, 0.18);
|
||||
color: #4ade80;
|
||||
}
|
||||
|
||||
.recipe-card__badge--missing {
|
||||
background: rgba(234, 179, 8, 0.2);
|
||||
color: #facc15;
|
||||
}
|
||||
|
||||
.recipe-card__badge--empty {
|
||||
background: rgba(148, 163, 184, 0.18);
|
||||
color: #e2e8f0;
|
||||
}
|
||||
|
||||
[data-theme="light"] .recipe-card__badge--ready {
|
||||
color: #157347;
|
||||
background: rgba(76, 167, 120, 0.16);
|
||||
}
|
||||
|
||||
[data-theme="light"] .recipe-card__badge--missing {
|
||||
color: #9f580a;
|
||||
background: rgba(245, 199, 43, 0.22);
|
||||
}
|
||||
|
||||
[data-theme="light"] .recipe-card__badge--empty {
|
||||
color: rgba(71, 85, 105, 0.9);
|
||||
background: rgba(148, 163, 184, 0.2);
|
||||
}
|
||||
|
||||
.recipe-card__cta {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: var(--space-1);
|
||||
color: var(--lora-accent);
|
||||
font-size: 0.9em;
|
||||
font-weight: 600;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
.recipe-card__cta i {
|
||||
font-size: 0.85em;
|
||||
transition: transform 0.2s ease;
|
||||
}
|
||||
|
||||
.recipe-card:hover .recipe-card__cta i {
|
||||
transform: translateX(4px);
|
||||
}
|
||||
|
||||
@media (max-width: 900px) {
|
||||
.recipes-header {
|
||||
flex-direction: column;
|
||||
align-items: flex-start;
|
||||
}
|
||||
|
||||
.recipes-header__view-all {
|
||||
align-self: flex-start;
|
||||
}
|
||||
}
|
||||
|
||||
@media (max-width: 640px) {
|
||||
.recipes-card-grid {
|
||||
grid-template-columns: 1fr;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/* Loading, error and empty states */
|
||||
.recipes-loading,
|
||||
.recipes-error,
|
||||
@@ -467,7 +834,7 @@
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 10px;
|
||||
margin-bottom: var(--space-1);
|
||||
margin-bottom: 0;
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
|
||||
@@ -491,4 +858,4 @@
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
}
|
||||
}
|
||||
|
||||
389
static/css/components/lora-modal/versions.css
Normal file
389
static/css/components/lora-modal/versions.css
Normal file
@@ -0,0 +1,389 @@
|
||||
.model-versions-tab {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--space-3);
|
||||
padding: var(--space-2) 0;
|
||||
}
|
||||
|
||||
.versions-toolbar {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
align-items: flex-end;
|
||||
justify-content: space-between;
|
||||
gap: var(--space-2);
|
||||
padding: var(--space-2);
|
||||
background: color-mix(in oklch, var(--lora-surface) 70%, transparent);
|
||||
border: 1px solid var(--lora-border);
|
||||
border-radius: var(--border-radius-sm);
|
||||
}
|
||||
|
||||
.versions-toolbar-info h3 {
|
||||
margin: 0 0 4px;
|
||||
font-size: 1.05rem;
|
||||
font-weight: 600;
|
||||
color: var(--text-color);
|
||||
}
|
||||
|
||||
.sr-only {
|
||||
position: absolute;
|
||||
width: 1px;
|
||||
height: 1px;
|
||||
padding: 0;
|
||||
margin: -1px;
|
||||
overflow: hidden;
|
||||
clip: rect(0, 0, 0, 0);
|
||||
border: 0;
|
||||
}
|
||||
|
||||
.versions-toolbar-info p {
|
||||
margin: 0;
|
||||
font-size: 0.85rem;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.versions-toolbar-info-heading {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: var(--space-2);
|
||||
}
|
||||
|
||||
.versions-toolbar-actions {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
gap: var(--space-1);
|
||||
}
|
||||
|
||||
.versions-toolbar-btn {
|
||||
appearance: none;
|
||||
border-radius: var(--border-radius-xs);
|
||||
padding: 8px 14px;
|
||||
font-size: 0.85rem;
|
||||
font-weight: 600;
|
||||
cursor: pointer;
|
||||
border: 1px solid transparent;
|
||||
transition: background-color 0.2s ease, border-color 0.2s ease, color 0.2s ease, transform 0.2s ease;
|
||||
}
|
||||
|
||||
.versions-toolbar-btn-primary {
|
||||
background: var(--lora-accent);
|
||||
color: #fff;
|
||||
border-color: color-mix(in oklch, var(--lora-accent) 70%, transparent);
|
||||
}
|
||||
|
||||
.versions-toolbar-btn-primary:hover:not(:disabled) {
|
||||
transform: translateY(-1px);
|
||||
background: color-mix(in oklch, var(--lora-accent) 85%, transparent);
|
||||
}
|
||||
|
||||
.versions-toolbar-btn-secondary {
|
||||
background: transparent;
|
||||
color: var(--text-muted);
|
||||
border-color: var(--border-color);
|
||||
}
|
||||
|
||||
.versions-toolbar-btn-secondary:hover:not(:disabled) {
|
||||
color: var(--text-color);
|
||||
}
|
||||
|
||||
.versions-filter-toggle {
|
||||
appearance: none;
|
||||
border: 1px solid var(--border-color);
|
||||
border-radius: var(--border-radius-sm);
|
||||
padding: 0;
|
||||
margin-bottom: 4px;
|
||||
width: 30px;
|
||||
height: 30px;
|
||||
background: color-mix(in oklch, var(--card-bg) 80%, var(--bg-color));
|
||||
align-self: center;
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
color: var(--text-muted);
|
||||
transition: border-color 0.2s ease, background 0.2s ease, color 0.2s ease, transform 0.2s ease;
|
||||
position: relative;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.versions-filter-toggle i {
|
||||
font-size: 1rem;
|
||||
}
|
||||
|
||||
.versions-filter-toggle:hover:not(:disabled) {
|
||||
border-color: var(--text-color);
|
||||
color: var(--text-color);
|
||||
transform: translateY(-1px);
|
||||
}
|
||||
|
||||
.versions-filter-toggle[data-filter-active="true"] {
|
||||
border-color: color-mix(in oklch, var(--lora-accent) 65%, transparent);
|
||||
color: var(--lora-accent);
|
||||
background: color-mix(in oklch, var(--lora-accent) 20%, var(--card-bg) 80%);
|
||||
}
|
||||
|
||||
.versions-toolbar-btn:disabled {
|
||||
opacity: 0.6;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
.versions-list {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--space-2);
|
||||
}
|
||||
|
||||
.version-divider {
|
||||
height: 1px;
|
||||
background: var(--border-color);
|
||||
margin: var(--space-1) 0;
|
||||
}
|
||||
|
||||
.model-version-row {
|
||||
display: grid;
|
||||
grid-template-columns: 124px 1fr auto;
|
||||
align-items: center;
|
||||
gap: var(--space-2);
|
||||
padding: var(--space-2);
|
||||
background: color-mix(in oklch, var(--card-bg) 92%, var(--bg-color) 8%);
|
||||
border: 1px solid var(--lora-border);
|
||||
border-radius: var(--border-radius-sm);
|
||||
box-shadow: 0 1px 2px rgba(0, 0, 0, 0.04);
|
||||
transition: border-color 0.2s ease, box-shadow 0.2s ease, transform 0.2s ease;
|
||||
}
|
||||
|
||||
[data-theme="dark"] .model-version-row {
|
||||
background: color-mix(in oklch, var(--card-bg) 88%, black 12%);
|
||||
}
|
||||
|
||||
.model-version-row:hover {
|
||||
transform: translateY(-1px);
|
||||
box-shadow: 0 4px 16px rgba(0, 0, 0, 0.08);
|
||||
}
|
||||
|
||||
.model-version-row.is-clickable {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.model-version-row.is-current {
|
||||
border-color: var(--lora-accent);
|
||||
box-shadow: 0 0 0 1px color-mix(in oklch, var(--lora-accent) 65%, transparent),
|
||||
0 10px 22px rgba(0, 0, 0, 0.12);
|
||||
}
|
||||
|
||||
.version-media {
|
||||
width: 124px;
|
||||
height: 88px;
|
||||
border-radius: var(--border-radius-xs);
|
||||
overflow: hidden;
|
||||
background: rgba(0, 0, 0, 0.03);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
border: 1px solid color-mix(in oklch, var(--border-color) 70%, transparent);
|
||||
}
|
||||
|
||||
.version-media img,
|
||||
.version-media video {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
object-fit: cover;
|
||||
}
|
||||
|
||||
.version-media img {
|
||||
/* Bias cropping toward the upper region to keep faces visible */
|
||||
object-position: center 20%;
|
||||
}
|
||||
|
||||
.version-media video {
|
||||
background: #000;
|
||||
}
|
||||
|
||||
.version-media-placeholder {
|
||||
font-size: 0.85rem;
|
||||
color: var(--text-muted);
|
||||
border-style: dashed;
|
||||
border-width: 1px;
|
||||
}
|
||||
|
||||
.version-details {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
min-width: 0;
|
||||
}
|
||||
|
||||
.version-title {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
font-weight: 600;
|
||||
font-size: 0.95rem;
|
||||
}
|
||||
|
||||
.versions-tab-version-name {
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
max-width: 100%;
|
||||
}
|
||||
|
||||
.version-badges {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
gap: 6px;
|
||||
}
|
||||
|
||||
.version-badge {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
padding: 3px 8px;
|
||||
border-radius: 999px;
|
||||
border: 1px solid transparent;
|
||||
font-size: 0.7rem;
|
||||
font-weight: 600;
|
||||
letter-spacing: 0.02em;
|
||||
}
|
||||
|
||||
.version-badge-info {
|
||||
background: color-mix(in oklch, var(--badge-update-bg) 25%, transparent);
|
||||
color: var(--badge-update-bg);
|
||||
border-color: color-mix(in oklch, var(--badge-update-bg) 55%, transparent);
|
||||
}
|
||||
|
||||
.version-badge-success {
|
||||
background: color-mix(in oklch, var(--lora-success) 25%, transparent);
|
||||
color: var(--lora-success);
|
||||
border-color: color-mix(in oklch, var(--lora-success) 50%, transparent);
|
||||
}
|
||||
|
||||
.version-badge-muted {
|
||||
background: color-mix(in oklch, var(--text-muted) 18%, transparent);
|
||||
color: var(--text-muted);
|
||||
border-color: color-mix(in oklch, var(--text-muted) 40%, transparent);
|
||||
}
|
||||
|
||||
.version-badge-current {
|
||||
background: color-mix(in oklch, var(--lora-accent) 22%, transparent);
|
||||
color: var(--lora-accent);
|
||||
border-color: color-mix(in oklch, var(--lora-accent) 55%, transparent);
|
||||
}
|
||||
|
||||
.version-meta {
|
||||
font-size: 0.8rem;
|
||||
color: var(--text-muted);
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
gap: 10px;
|
||||
}
|
||||
|
||||
.version-meta-item {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
.version-meta-primary {
|
||||
font-weight: 600;
|
||||
color: color-mix(in oklch, var(--text-color) 88%, var(--lora-accent) 12%);
|
||||
}
|
||||
|
||||
.version-meta-separator {
|
||||
color: color-mix(in oklch, var(--text-muted) 90%, var(--text-color) 10%);
|
||||
}
|
||||
|
||||
.version-actions {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 6px;
|
||||
align-items: flex-end;
|
||||
}
|
||||
|
||||
.version-action {
|
||||
min-width: 128px;
|
||||
padding: 7px 12px;
|
||||
border-radius: var(--border-radius-xs);
|
||||
border: 1px solid transparent;
|
||||
font-size: 0.8rem;
|
||||
font-weight: 600;
|
||||
cursor: pointer;
|
||||
transition: background-color 0.2s ease, color 0.2s ease, border-color 0.2s ease, transform 0.2s ease;
|
||||
}
|
||||
|
||||
.version-action-primary {
|
||||
background: var(--lora-accent);
|
||||
color: #fff;
|
||||
border-color: color-mix(in oklch, var(--lora-accent) 65%, transparent);
|
||||
}
|
||||
|
||||
.version-action-primary:hover {
|
||||
transform: translateY(-1px);
|
||||
background: color-mix(in oklch, var(--lora-accent) 85%, transparent);
|
||||
}
|
||||
|
||||
.version-action-danger {
|
||||
background: transparent;
|
||||
border-color: color-mix(in oklch, var(--lora-error) 60%, transparent);
|
||||
color: var(--lora-error);
|
||||
}
|
||||
|
||||
.version-action-danger:hover {
|
||||
background: color-mix(in oklch, var(--lora-error) 12%, transparent);
|
||||
}
|
||||
|
||||
.version-action-ghost {
|
||||
background: transparent;
|
||||
border-color: var(--border-color);
|
||||
color: var(--text-color);
|
||||
}
|
||||
|
||||
.version-action-ghost:hover {
|
||||
background: color-mix(in oklch, var(--lora-surface) 35%, transparent);
|
||||
}
|
||||
|
||||
.version-action:disabled {
|
||||
opacity: 0.6;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
.versions-loading-state,
|
||||
.versions-empty,
|
||||
.versions-error {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 8px;
|
||||
padding: var(--space-3);
|
||||
border: 1px dashed var(--lora-border);
|
||||
border-radius: var(--border-radius-sm);
|
||||
color: var(--text-muted);
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.versions-error {
|
||||
border-style: solid;
|
||||
border-color: color-mix(in oklch, var(--lora-error) 45%, transparent);
|
||||
color: var(--lora-error);
|
||||
}
|
||||
|
||||
.versions-empty i,
|
||||
.versions-error i {
|
||||
font-size: 1.25rem;
|
||||
}
|
||||
|
||||
@media (max-width: 900px) {
|
||||
.model-version-row {
|
||||
grid-template-columns: 1fr;
|
||||
align-items: stretch;
|
||||
}
|
||||
|
||||
.version-actions {
|
||||
flex-direction: row;
|
||||
justify-content: flex-end;
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
|
||||
.version-action {
|
||||
min-width: 0;
|
||||
}
|
||||
}
|
||||
@@ -315,7 +315,8 @@ button:disabled,
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.delete-preview img {
|
||||
.delete-preview img,
|
||||
.delete-preview video {
|
||||
width: 100%;
|
||||
height: auto;
|
||||
max-height: 150px;
|
||||
@@ -345,4 +346,4 @@ button:disabled,
|
||||
font-style: italic;
|
||||
margin-top: var(--space-1);
|
||||
text-align: center;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -233,6 +233,11 @@
|
||||
resize: vertical;
|
||||
}
|
||||
|
||||
.auto-organize-exclusions-input {
|
||||
width: 100%;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
.priority-tags-input:focus {
|
||||
border-color: var(--lora-accent);
|
||||
outline: none;
|
||||
@@ -245,12 +250,24 @@
|
||||
|
||||
.priority-tags-header {
|
||||
align-items: center;
|
||||
justify-content: flex-start;
|
||||
}
|
||||
|
||||
.priority-tags-actions {
|
||||
.priority-tags-info {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: flex-end;
|
||||
gap: var(--space-1);
|
||||
width: auto;
|
||||
}
|
||||
|
||||
.priority-tags-info label {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
.auto-organize-exclusions-item {
|
||||
gap: var(--space-2);
|
||||
}
|
||||
|
||||
.priority-tags-example {
|
||||
|
||||
@@ -588,6 +588,26 @@
|
||||
padding-top: 4px; /* Add padding to prevent first item from being cut off when hovered */
|
||||
}
|
||||
|
||||
.recipe-resources-list {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 10px;
|
||||
flex: 1;
|
||||
min-height: 0;
|
||||
}
|
||||
|
||||
.recipe-checkpoint-container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--space-1);
|
||||
}
|
||||
|
||||
.version-divider {
|
||||
height: 1px;
|
||||
background: var(--border-color);
|
||||
margin: var(--space-1) 0;
|
||||
}
|
||||
|
||||
.recipe-lora-item {
|
||||
display: flex;
|
||||
gap: var(--space-2);
|
||||
@@ -614,6 +634,13 @@
|
||||
border-left: 4px solid var(--lora-accent);
|
||||
}
|
||||
|
||||
.recipe-lora-item.checkpoint-item {
|
||||
cursor: pointer;
|
||||
padding-top: 8px;
|
||||
padding-bottom: 8px;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.recipe-lora-item.missing-locally {
|
||||
border-left: 4px solid var(--lora-error);
|
||||
}
|
||||
@@ -962,6 +989,10 @@
|
||||
z-index: 100;
|
||||
}
|
||||
|
||||
.badge-container .resource-action {
|
||||
margin-left: auto;
|
||||
}
|
||||
|
||||
/* Add styles for missing LoRAs download feature */
|
||||
.recipe-status.missing {
|
||||
position: relative;
|
||||
@@ -1004,3 +1035,61 @@
|
||||
.recipe-status.clickable:hover {
|
||||
background-color: rgba(var(--lora-warning-rgb, 255, 165, 0), 0.2);
|
||||
}
|
||||
|
||||
.recipe-checkpoint-meta {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
gap: 8px;
|
||||
align-items: center;
|
||||
font-size: 0.85em;
|
||||
margin-bottom: 2px;
|
||||
}
|
||||
|
||||
.recipe-checkpoint-meta .checkpoint-type {
|
||||
background: var(--lora-surface);
|
||||
padding: 2px 8px;
|
||||
border-radius: var(--border-radius-xs);
|
||||
color: var(--text-color);
|
||||
}
|
||||
|
||||
.recipe-resource-actions {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
margin-top: 2px;
|
||||
}
|
||||
|
||||
.resource-action {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
padding: 5px 10px;
|
||||
border-radius: var(--border-radius-xs);
|
||||
border: 1px solid var(--border-color);
|
||||
background: var(--bg-color);
|
||||
color: var(--text-color);
|
||||
font-size: 0.9em;
|
||||
cursor: pointer;
|
||||
transition: background-color 0.2s ease, border-color 0.2s ease, transform 0.2s ease;
|
||||
}
|
||||
|
||||
.resource-action.compact {
|
||||
padding: 4px 10px;
|
||||
font-size: 0.88em;
|
||||
}
|
||||
|
||||
.resource-action:hover {
|
||||
background: var(--lora-surface);
|
||||
transform: translateY(-1px);
|
||||
}
|
||||
|
||||
.resource-action.primary {
|
||||
background: var(--lora-accent);
|
||||
color: white;
|
||||
border-color: var(--lora-accent);
|
||||
}
|
||||
|
||||
.resource-action.primary:hover {
|
||||
background: color-mix(in oklch, var(--lora-accent), black 10%);
|
||||
}
|
||||
|
||||
@@ -235,6 +235,13 @@
|
||||
border-color: var(--lora-accent);
|
||||
}
|
||||
|
||||
/* Exclude state styling for filter tags */
|
||||
.filter-tag.exclude {
|
||||
background-color: var(--lora-error-bg);
|
||||
color: var(--lora-error);
|
||||
border-color: var(--lora-error-border);
|
||||
}
|
||||
|
||||
/* Tag filter styles */
|
||||
.tag-filter {
|
||||
display: flex;
|
||||
|
||||
@@ -21,6 +21,10 @@
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
.folder-sidebar.hidden-by-setting {
|
||||
display: none !important;
|
||||
}
|
||||
|
||||
/* Visible state */
|
||||
.folder-sidebar.visible {
|
||||
transform: translateX(0);
|
||||
@@ -59,6 +63,10 @@
|
||||
pointer-events: all;
|
||||
}
|
||||
|
||||
.sidebar-hover-area.hidden-by-setting {
|
||||
display: none !important;
|
||||
}
|
||||
|
||||
.sidebar-hover-area.disabled {
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
@@ -12,6 +12,73 @@
|
||||
border-bottom: 1px solid var(--lora-border);
|
||||
}
|
||||
|
||||
.notification-tabs {
|
||||
display: flex;
|
||||
gap: var(--space-2);
|
||||
margin-bottom: var(--space-3);
|
||||
}
|
||||
|
||||
.notification-tab {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: var(--space-1);
|
||||
padding: 0.5rem 0.75rem;
|
||||
background: var(--lora-surface);
|
||||
border: 1px solid var(--lora-border);
|
||||
border-radius: var(--border-radius-sm);
|
||||
color: var(--text-color);
|
||||
cursor: pointer;
|
||||
transition: background 0.2s ease, border-color 0.2s ease;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.notification-tab:hover,
|
||||
.notification-tab.active {
|
||||
background: var(--lora-accent-light, rgba(0, 148, 255, 0.12));
|
||||
border-color: var(--lora-accent);
|
||||
color: var(--lora-accent-text, var(--text-color));
|
||||
}
|
||||
|
||||
.notification-tab-badge {
|
||||
display: none;
|
||||
min-width: 1.25rem;
|
||||
height: 1.25rem;
|
||||
padding: 0 0.4rem;
|
||||
border-radius: 999px;
|
||||
background: var(--lora-accent);
|
||||
color: #fff;
|
||||
font-size: 0.75rem;
|
||||
font-weight: 600;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.notification-tab-badge.is-dot {
|
||||
min-width: 0.5rem;
|
||||
width: 0.5rem;
|
||||
height: 0.5rem;
|
||||
padding: 0;
|
||||
border-radius: 50%;
|
||||
}
|
||||
|
||||
.notification-tab-badge.visible {
|
||||
display: inline-flex;
|
||||
}
|
||||
|
||||
.notification-panels {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--space-3);
|
||||
}
|
||||
|
||||
.notification-panel {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.notification-panel.active {
|
||||
display: block;
|
||||
}
|
||||
|
||||
.update-icon {
|
||||
font-size: 1.8em;
|
||||
color: var(--lora-accent);
|
||||
@@ -165,6 +232,137 @@
|
||||
justify-content: flex-start;
|
||||
}
|
||||
|
||||
.banner-history {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--space-2);
|
||||
}
|
||||
|
||||
.banner-history h3 {
|
||||
margin: 0;
|
||||
font-size: 1.05rem;
|
||||
color: var(--lora-accent);
|
||||
}
|
||||
|
||||
.banner-history-empty {
|
||||
margin: 0;
|
||||
padding: var(--space-3);
|
||||
background: var(--lora-surface);
|
||||
border: 1px dashed var(--lora-border);
|
||||
border-radius: var(--border-radius-sm);
|
||||
text-align: center;
|
||||
color: var(--text-muted, rgba(0, 0, 0, 0.6));
|
||||
}
|
||||
|
||||
.banner-history-list {
|
||||
list-style: none;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--space-2);
|
||||
}
|
||||
|
||||
.banner-history-item {
|
||||
border: 1px solid var(--lora-border);
|
||||
border-radius: var(--border-radius-sm);
|
||||
padding: var(--space-2);
|
||||
background: var(--card-bg, #fff);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--space-1);
|
||||
}
|
||||
|
||||
[data-theme="dark"] .banner-history-item {
|
||||
background: rgba(255, 255, 255, 0.03);
|
||||
}
|
||||
|
||||
.banner-history-title {
|
||||
margin: 0;
|
||||
font-size: 1rem;
|
||||
}
|
||||
|
||||
.banner-history-description {
|
||||
margin: 0;
|
||||
color: var(--text-color);
|
||||
opacity: 0.85;
|
||||
}
|
||||
|
||||
.banner-history-meta {
|
||||
display: flex;
|
||||
gap: var(--space-2);
|
||||
font-size: 0.85rem;
|
||||
color: var(--text-muted, rgba(0, 0, 0, 0.6));
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
|
||||
.banner-history-time {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.banner-history-status {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 0.35rem;
|
||||
font-weight: 600;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.05em;
|
||||
}
|
||||
|
||||
.banner-history-status.active {
|
||||
color: var(--lora-success);
|
||||
}
|
||||
|
||||
.banner-history-status.dismissed {
|
||||
color: var(--lora-error);
|
||||
}
|
||||
|
||||
.banner-history-actions {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
gap: var(--space-2);
|
||||
margin-top: var(--space-1);
|
||||
}
|
||||
|
||||
.banner-history-action {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 0.35rem;
|
||||
padding: 0.35rem 0.65rem;
|
||||
border-radius: var(--border-radius-sm);
|
||||
border: 1px solid var(--lora-border);
|
||||
text-decoration: none;
|
||||
font-size: 0.85rem;
|
||||
transition: background 0.2s ease, color 0.2s ease, border-color 0.2s ease;
|
||||
}
|
||||
|
||||
.banner-history-action i {
|
||||
font-size: 0.9rem;
|
||||
}
|
||||
|
||||
.banner-history-action.banner-history-action-primary {
|
||||
background: var(--lora-accent);
|
||||
border-color: var(--lora-accent);
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
.banner-history-action.banner-history-action-secondary {
|
||||
background: var(--lora-surface);
|
||||
color: var(--text-color);
|
||||
}
|
||||
|
||||
.banner-history-action.banner-history-action-tertiary {
|
||||
background: transparent;
|
||||
border-style: dashed;
|
||||
}
|
||||
|
||||
.banner-history-action:hover {
|
||||
background: var(--lora-accent-light, rgba(0, 148, 255, 0.12));
|
||||
border-color: var(--lora-accent);
|
||||
color: var(--lora-accent-text, var(--text-color));
|
||||
}
|
||||
|
||||
/* Override toggle switch styles for update preferences */
|
||||
.update-preferences .toggle-switch {
|
||||
position: relative;
|
||||
|
||||
@@ -104,8 +104,22 @@
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
.control-group button:disabled {
|
||||
cursor: not-allowed;
|
||||
opacity: 0.6;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
.control-group button.loading,
|
||||
.dropdown-toggle.loading {
|
||||
cursor: wait;
|
||||
opacity: 0.7;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
/* Controls */
|
||||
.control-group button.favorite-filter {
|
||||
.control-group button.favorite-filter,
|
||||
.control-group button.update-filter {
|
||||
position: relative;
|
||||
overflow: hidden;
|
||||
}
|
||||
@@ -120,6 +134,30 @@
|
||||
color: #ffc107;
|
||||
}
|
||||
|
||||
.control-group button.update-filter i {
|
||||
margin-right: 4px;
|
||||
color: var(--lora-accent);
|
||||
}
|
||||
|
||||
.control-group button.update-filter.active {
|
||||
background: var(--lora-accent);
|
||||
color: white;
|
||||
}
|
||||
|
||||
.control-group button.update-filter.active i {
|
||||
color: white;
|
||||
}
|
||||
|
||||
.update-filter-group .dropdown-main.update-filter.active + .dropdown-toggle {
|
||||
background: var(--lora-accent);
|
||||
border-color: var(--lora-accent);
|
||||
color: white;
|
||||
}
|
||||
|
||||
.update-filter-group .dropdown-main.update-filter.active + .dropdown-toggle i {
|
||||
color: inherit;
|
||||
}
|
||||
|
||||
/* Active state for buttons that can be toggled */
|
||||
.control-group button.active {
|
||||
background: var(--lora-accent);
|
||||
@@ -307,6 +345,9 @@
|
||||
border-top-left-radius: 0;
|
||||
border-bottom-left-radius: 0;
|
||||
padding: 0 !important;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.dropdown-menu {
|
||||
@@ -315,7 +356,8 @@
|
||||
left: 0;
|
||||
z-index: 1000;
|
||||
display: none;
|
||||
min-width: 230px;
|
||||
min-width: max(100%, max-content);
|
||||
width: max-content;
|
||||
padding: 5px 0;
|
||||
margin: 2px 0 0;
|
||||
font-size: 0.85em;
|
||||
@@ -339,6 +381,12 @@
|
||||
transition: background-color 0.2s ease;
|
||||
}
|
||||
|
||||
.dropdown-item.disabled {
|
||||
cursor: default;
|
||||
opacity: 0.6;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
.dropdown-item:hover {
|
||||
background-color: oklch(var(--lora-accent) / 0.1);
|
||||
}
|
||||
|
||||
@@ -27,6 +27,7 @@
|
||||
@import 'components/lora-modal/preset-tags.css';
|
||||
@import 'components/lora-modal/showcase.css';
|
||||
@import 'components/lora-modal/triggerwords.css';
|
||||
@import 'components/lora-modal/versions.css';
|
||||
@import 'components/shared/edit-metadata.css';
|
||||
@import 'components/search-filter.css';
|
||||
@import 'components/bulk.css';
|
||||
@@ -55,4 +56,4 @@
|
||||
/* 使用已有的loading-spinner样式 */
|
||||
.initialization-notice .loading-spinner {
|
||||
margin-bottom: var(--space-2);
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user